@kodus/kodus-graph 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +62 -0
- package/src/analysis/blast-radius.ts +54 -0
- package/src/analysis/communities.ts +135 -0
- package/src/analysis/diff.ts +120 -0
- package/src/analysis/flows.ts +112 -0
- package/src/analysis/review-context.ts +141 -0
- package/src/analysis/risk-score.ts +62 -0
- package/src/analysis/search.ts +76 -0
- package/src/analysis/test-gaps.ts +21 -0
- package/src/cli.ts +192 -0
- package/src/commands/analyze.ts +66 -0
- package/src/commands/communities.ts +19 -0
- package/src/commands/context.ts +69 -0
- package/src/commands/diff.ts +96 -0
- package/src/commands/flows.ts +19 -0
- package/src/commands/parse.ts +100 -0
- package/src/commands/search.ts +41 -0
- package/src/commands/update.ts +166 -0
- package/src/graph/builder.ts +170 -0
- package/src/graph/edges.ts +101 -0
- package/src/graph/loader.ts +100 -0
- package/src/graph/merger.ts +25 -0
- package/src/graph/types.ts +218 -0
- package/src/parser/batch.ts +74 -0
- package/src/parser/discovery.ts +42 -0
- package/src/parser/extractor.ts +37 -0
- package/src/parser/extractors/generic.ts +87 -0
- package/src/parser/extractors/python.ts +127 -0
- package/src/parser/extractors/ruby.ts +142 -0
- package/src/parser/extractors/typescript.ts +329 -0
- package/src/parser/languages.ts +122 -0
- package/src/resolver/call-resolver.ts +179 -0
- package/src/resolver/import-map.ts +27 -0
- package/src/resolver/import-resolver.ts +72 -0
- package/src/resolver/languages/csharp.ts +7 -0
- package/src/resolver/languages/go.ts +7 -0
- package/src/resolver/languages/java.ts +7 -0
- package/src/resolver/languages/php.ts +7 -0
- package/src/resolver/languages/python.ts +35 -0
- package/src/resolver/languages/ruby.ts +21 -0
- package/src/resolver/languages/rust.ts +7 -0
- package/src/resolver/languages/typescript.ts +168 -0
- package/src/resolver/symbol-table.ts +53 -0
- package/src/shared/file-hash.ts +7 -0
- package/src/shared/filters.ts +243 -0
- package/src/shared/logger.ts +14 -0
- package/src/shared/qualified-name.ts +5 -0
- package/src/shared/safe-path.ts +31 -0
- package/src/shared/schemas.ts +31 -0
- package/src/shared/temp.ts +17 -0
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import type { IndexedGraph } from '../graph/loader';
|
|
2
|
+
import type { GraphNode } from '../graph/types';
|
|
3
|
+
|
|
4
|
+
export interface SearchOptions {
|
|
5
|
+
query?: string;
|
|
6
|
+
kind?: string;
|
|
7
|
+
file?: string;
|
|
8
|
+
limit?: number;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function searchNodes(graph: IndexedGraph, opts: SearchOptions): GraphNode[] {
|
|
12
|
+
const { query, kind, file, limit = 50 } = opts;
|
|
13
|
+
let results = graph.nodes;
|
|
14
|
+
|
|
15
|
+
if (query) {
|
|
16
|
+
const matcher = buildMatcher(query);
|
|
17
|
+
results = results.filter((n) => matcher(n.name) || matcher(n.qualified_name));
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
if (kind) {
|
|
21
|
+
results = results.filter((n) => n.kind === kind);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (file) {
|
|
25
|
+
const fileMatcher = buildMatcher(file);
|
|
26
|
+
results = results.filter((n) => fileMatcher(n.file_path));
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
results.sort((a, b) => a.file_path.localeCompare(b.file_path) || a.line_start - b.line_start);
|
|
30
|
+
|
|
31
|
+
return results.slice(0, limit);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function findCallers(graph: IndexedGraph, qualifiedName: string): GraphNode[] {
|
|
35
|
+
const edges = graph.reverseAdjacency.get(qualifiedName) || [];
|
|
36
|
+
const callers: GraphNode[] = [];
|
|
37
|
+
for (const e of edges) {
|
|
38
|
+
if (e.kind !== 'CALLS') continue;
|
|
39
|
+
const node = graph.byQualified.get(e.source_qualified);
|
|
40
|
+
if (node) callers.push(node);
|
|
41
|
+
}
|
|
42
|
+
return callers;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export function findCallees(graph: IndexedGraph, qualifiedName: string): GraphNode[] {
|
|
46
|
+
const edges = graph.adjacency.get(qualifiedName) || [];
|
|
47
|
+
const callees: GraphNode[] = [];
|
|
48
|
+
for (const e of edges) {
|
|
49
|
+
if (e.kind !== 'CALLS') continue;
|
|
50
|
+
const node = graph.byQualified.get(e.target_qualified);
|
|
51
|
+
if (node) callees.push(node);
|
|
52
|
+
}
|
|
53
|
+
return callees;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function buildMatcher(pattern: string): (text: string) => boolean {
|
|
57
|
+
// Regex: /pattern/flags
|
|
58
|
+
if (pattern.startsWith('/')) {
|
|
59
|
+
const lastSlash = pattern.lastIndexOf('/');
|
|
60
|
+
if (lastSlash > 0) {
|
|
61
|
+
const regex = new RegExp(pattern.slice(1, lastSlash), pattern.slice(lastSlash + 1));
|
|
62
|
+
return (text) => regex.test(text);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Glob: contains *
|
|
67
|
+
if (pattern.includes('*')) {
|
|
68
|
+
const escaped = pattern.replace(/[.+^${}()|[\]\\]/g, '\\$&').replace(/\*/g, '.*');
|
|
69
|
+
const regex = new RegExp(`^${escaped}$`, 'i');
|
|
70
|
+
return (text) => regex.test(text);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Substring (case-insensitive)
|
|
74
|
+
const lower = pattern.toLowerCase();
|
|
75
|
+
return (text) => text.toLowerCase().includes(lower);
|
|
76
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { GraphData, TestGap } from '../graph/types';
|
|
2
|
+
|
|
3
|
+
export function findTestGaps(graph: GraphData, changedFiles: string[]): TestGap[] {
|
|
4
|
+
const changedSet = new Set(changedFiles);
|
|
5
|
+
|
|
6
|
+
const testedFiles = new Set(graph.edges.filter((e) => e.kind === 'TESTED_BY').map((e) => e.source_qualified));
|
|
7
|
+
|
|
8
|
+
return graph.nodes
|
|
9
|
+
.filter(
|
|
10
|
+
(n) =>
|
|
11
|
+
changedSet.has(n.file_path) &&
|
|
12
|
+
(n.kind === 'Function' || n.kind === 'Method') &&
|
|
13
|
+
!n.is_test &&
|
|
14
|
+
!testedFiles.has(n.file_path),
|
|
15
|
+
)
|
|
16
|
+
.map((n) => ({
|
|
17
|
+
function: n.qualified_name,
|
|
18
|
+
file_path: n.file_path,
|
|
19
|
+
line_start: n.line_start,
|
|
20
|
+
}));
|
|
21
|
+
}
|
package/src/cli.ts
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import { existsSync } from 'fs';
|
|
4
|
+
import { resolve } from 'path';
|
|
5
|
+
import { executeAnalyze } from './commands/analyze';
|
|
6
|
+
import { executeCommunities } from './commands/communities';
|
|
7
|
+
import { executeContext } from './commands/context';
|
|
8
|
+
import { executeDiff } from './commands/diff';
|
|
9
|
+
import { executeFlows } from './commands/flows';
|
|
10
|
+
import { executeParse } from './commands/parse';
|
|
11
|
+
import { executeSearch } from './commands/search';
|
|
12
|
+
import { executeUpdate } from './commands/update';
|
|
13
|
+
|
|
14
|
+
const program = new Command();
|
|
15
|
+
|
|
16
|
+
program.name('kodus-graph').description('Code graph builder for Kodus code review').version('0.1.0');
|
|
17
|
+
|
|
18
|
+
program
|
|
19
|
+
.command('parse')
|
|
20
|
+
.description('Parse source files and generate nodes + edges')
|
|
21
|
+
.option('--all', 'Parse all files in repo')
|
|
22
|
+
.option('--files <paths...>', 'Parse specific files')
|
|
23
|
+
.option('--repo-dir <path>', 'Repository root directory', '.')
|
|
24
|
+
.requiredOption('--out <path>', 'Output JSON file path')
|
|
25
|
+
.action(async (opts) => {
|
|
26
|
+
const repoDir = resolve(opts.repoDir);
|
|
27
|
+
if (!existsSync(repoDir)) {
|
|
28
|
+
process.stderr.write(`Error: --repo-dir does not exist: ${repoDir}\n`);
|
|
29
|
+
process.exit(1);
|
|
30
|
+
}
|
|
31
|
+
await executeParse({
|
|
32
|
+
repoDir: opts.repoDir,
|
|
33
|
+
files: opts.files,
|
|
34
|
+
all: opts.all ?? false,
|
|
35
|
+
out: opts.out,
|
|
36
|
+
});
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
program
|
|
40
|
+
.command('analyze')
|
|
41
|
+
.description('Compute blast radius, risk score, and test gaps')
|
|
42
|
+
.requiredOption('--files <paths...>', 'Changed files to analyze')
|
|
43
|
+
.option('--repo-dir <path>', 'Repository root directory', '.')
|
|
44
|
+
.option('--graph <path>', 'Path to main graph JSON')
|
|
45
|
+
.requiredOption('--out <path>', 'Output JSON file path')
|
|
46
|
+
.action(async (opts) => {
|
|
47
|
+
const repoDir = resolve(opts.repoDir);
|
|
48
|
+
if (!existsSync(repoDir)) {
|
|
49
|
+
process.stderr.write(`Error: --repo-dir does not exist: ${repoDir}\n`);
|
|
50
|
+
process.exit(1);
|
|
51
|
+
}
|
|
52
|
+
await executeAnalyze({
|
|
53
|
+
repoDir: opts.repoDir,
|
|
54
|
+
files: opts.files,
|
|
55
|
+
graph: opts.graph,
|
|
56
|
+
out: opts.out,
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
program
|
|
61
|
+
.command('context')
|
|
62
|
+
.description('Generate enriched review context for agents')
|
|
63
|
+
.requiredOption('--files <paths...>', 'Changed files')
|
|
64
|
+
.option('--repo-dir <path>', 'Repository root directory', '.')
|
|
65
|
+
.option('--graph <path>', 'Path to main graph JSON')
|
|
66
|
+
.requiredOption('--out <path>', 'Output JSON file path')
|
|
67
|
+
.action(async (opts) => {
|
|
68
|
+
const repoDir = resolve(opts.repoDir);
|
|
69
|
+
if (!existsSync(repoDir)) {
|
|
70
|
+
process.stderr.write(`Error: --repo-dir does not exist: ${repoDir}\n`);
|
|
71
|
+
process.exit(1);
|
|
72
|
+
}
|
|
73
|
+
await executeContext({
|
|
74
|
+
repoDir: opts.repoDir,
|
|
75
|
+
files: opts.files,
|
|
76
|
+
graph: opts.graph,
|
|
77
|
+
out: opts.out,
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
program
|
|
82
|
+
.command('diff')
|
|
83
|
+
.description('Compare changed files against an existing graph')
|
|
84
|
+
.option('--base <ref>', 'Git ref to diff against')
|
|
85
|
+
.option('--files <paths...>', 'Explicit list of changed files')
|
|
86
|
+
.option('--repo-dir <path>', 'Repository root directory', '.')
|
|
87
|
+
.option('--graph <path>', 'Previous graph JSON', '.kodus-graph/graph.json')
|
|
88
|
+
.requiredOption('--out <path>', 'Output JSON file path')
|
|
89
|
+
.action(async (opts) => {
|
|
90
|
+
if (!opts.base && !opts.files) {
|
|
91
|
+
process.stderr.write('Error: one of --base or --files is required\n');
|
|
92
|
+
process.exit(1);
|
|
93
|
+
}
|
|
94
|
+
const repoDir = resolve(opts.repoDir);
|
|
95
|
+
if (!existsSync(repoDir)) {
|
|
96
|
+
process.stderr.write(`Error: --repo-dir does not exist: ${repoDir}\n`);
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
await executeDiff({
|
|
100
|
+
repoDir: opts.repoDir,
|
|
101
|
+
base: opts.base,
|
|
102
|
+
files: opts.files,
|
|
103
|
+
graph: opts.graph,
|
|
104
|
+
out: opts.out,
|
|
105
|
+
});
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
program
|
|
109
|
+
.command('update')
|
|
110
|
+
.description('Incrementally update graph (only re-parse changed files)')
|
|
111
|
+
.option('--repo-dir <path>', 'Repository root directory', '.')
|
|
112
|
+
.option('--graph <path>', 'Previous graph JSON (default: .kodus-graph/graph.json)')
|
|
113
|
+
.option('--out <path>', 'Output path (default: same as --graph)')
|
|
114
|
+
.action(async (opts) => {
|
|
115
|
+
const repoDir = resolve(opts.repoDir);
|
|
116
|
+
if (!existsSync(repoDir)) {
|
|
117
|
+
process.stderr.write(`Error: --repo-dir does not exist: ${repoDir}\n`);
|
|
118
|
+
process.exit(1);
|
|
119
|
+
}
|
|
120
|
+
await executeUpdate({
|
|
121
|
+
repoDir: opts.repoDir,
|
|
122
|
+
graph: opts.graph,
|
|
123
|
+
out: opts.out,
|
|
124
|
+
});
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
program
|
|
128
|
+
.command('communities')
|
|
129
|
+
.description('Detect module clusters and coupling between them')
|
|
130
|
+
.requiredOption('--graph <path>', 'Path to graph JSON')
|
|
131
|
+
.requiredOption('--out <path>', 'Output JSON file path')
|
|
132
|
+
.option('--min-size <n>', 'Minimum nodes per community', '2')
|
|
133
|
+
.option('--depth <n>', 'Directory grouping depth', '2')
|
|
134
|
+
.action((opts) => {
|
|
135
|
+
executeCommunities({
|
|
136
|
+
graph: opts.graph,
|
|
137
|
+
out: opts.out,
|
|
138
|
+
minSize: parseInt(opts.minSize, 10),
|
|
139
|
+
depth: parseInt(opts.depth, 10),
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
program
|
|
144
|
+
.command('flows')
|
|
145
|
+
.description('Detect entry points and trace execution paths')
|
|
146
|
+
.requiredOption('--graph <path>', 'Path to graph JSON')
|
|
147
|
+
.requiredOption('--out <path>', 'Output JSON file path')
|
|
148
|
+
.option('--max-depth <n>', 'Max BFS trace depth', '10')
|
|
149
|
+
.option('--type <kind>', 'Filter: test, http, all', 'all')
|
|
150
|
+
.action((opts) => {
|
|
151
|
+
executeFlows({
|
|
152
|
+
graph: opts.graph,
|
|
153
|
+
out: opts.out,
|
|
154
|
+
maxDepth: parseInt(opts.maxDepth, 10),
|
|
155
|
+
type: opts.type as 'test' | 'http' | 'all',
|
|
156
|
+
});
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
program
|
|
160
|
+
.command('search')
|
|
161
|
+
.description('Search the graph by name, kind, file, or relations')
|
|
162
|
+
.requiredOption('--graph <path>', 'Path to graph JSON')
|
|
163
|
+
.option('--query <pattern>', 'Search by name/qualified_name (glob or /regex/)')
|
|
164
|
+
.option('--kind <type>', 'Filter by kind: Function, Method, Class, Interface, Enum, Test')
|
|
165
|
+
.option('--file <pattern>', 'Filter by file path (glob)')
|
|
166
|
+
.option('--callers-of <qualified>', 'Find callers of this node')
|
|
167
|
+
.option('--callees-of <qualified>', 'Find callees of this node')
|
|
168
|
+
.option('--limit <n>', 'Max results', '50')
|
|
169
|
+
.option('--out <path>', 'Output file (default: stdout)')
|
|
170
|
+
.action((opts) => {
|
|
171
|
+
const modes = [opts.query, opts.callersOf, opts.calleesOf].filter(Boolean).length;
|
|
172
|
+
if (modes === 0) {
|
|
173
|
+
process.stderr.write('Error: one of --query, --callers-of, or --callees-of is required\n');
|
|
174
|
+
process.exit(1);
|
|
175
|
+
}
|
|
176
|
+
if (modes > 1) {
|
|
177
|
+
process.stderr.write('Error: --query, --callers-of, and --callees-of are mutually exclusive\n');
|
|
178
|
+
process.exit(1);
|
|
179
|
+
}
|
|
180
|
+
executeSearch({
|
|
181
|
+
graph: opts.graph,
|
|
182
|
+
query: opts.query,
|
|
183
|
+
kind: opts.kind,
|
|
184
|
+
file: opts.file,
|
|
185
|
+
callersOf: opts.callersOf,
|
|
186
|
+
calleesOf: opts.calleesOf,
|
|
187
|
+
limit: parseInt(opts.limit, 10),
|
|
188
|
+
out: opts.out,
|
|
189
|
+
});
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
program.parse();
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { readFileSync, writeFileSync } from 'fs';
|
|
2
|
+
import { resolve } from 'path';
|
|
3
|
+
import { computeBlastRadius } from '../analysis/blast-radius';
|
|
4
|
+
import { computeRiskScore } from '../analysis/risk-score';
|
|
5
|
+
import { findTestGaps } from '../analysis/test-gaps';
|
|
6
|
+
import { buildGraphData } from '../graph/builder';
|
|
7
|
+
import { mergeGraphs } from '../graph/merger';
|
|
8
|
+
import type { AnalysisOutput, MainGraphInput } from '../graph/types';
|
|
9
|
+
import { parseBatch } from '../parser/batch';
|
|
10
|
+
import { discoverFiles } from '../parser/discovery';
|
|
11
|
+
import { GraphInputSchema } from '../shared/schemas';
|
|
12
|
+
|
|
13
|
+
interface AnalyzeOptions {
|
|
14
|
+
repoDir: string;
|
|
15
|
+
files: string[];
|
|
16
|
+
graph?: string;
|
|
17
|
+
out: string;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function executeAnalyze(opts: AnalyzeOptions): Promise<void> {
|
|
21
|
+
const repoDir = resolve(opts.repoDir);
|
|
22
|
+
|
|
23
|
+
// Load main graph if provided
|
|
24
|
+
let mainGraph: MainGraphInput | null = null;
|
|
25
|
+
if (opts.graph) {
|
|
26
|
+
let raw: unknown;
|
|
27
|
+
try {
|
|
28
|
+
raw = JSON.parse(readFileSync(opts.graph, 'utf-8'));
|
|
29
|
+
} catch (_err) {
|
|
30
|
+
process.stderr.write(`Error: Failed to read --graph file: ${opts.graph}\n`);
|
|
31
|
+
process.exit(1);
|
|
32
|
+
}
|
|
33
|
+
const validated = GraphInputSchema.safeParse(raw);
|
|
34
|
+
if (!validated.success) {
|
|
35
|
+
process.stderr.write(`Error: Invalid graph JSON: ${validated.error.message}\n`);
|
|
36
|
+
process.exit(1);
|
|
37
|
+
}
|
|
38
|
+
mainGraph = {
|
|
39
|
+
repo_id: '',
|
|
40
|
+
sha: '',
|
|
41
|
+
nodes: validated.data.nodes,
|
|
42
|
+
edges: validated.data.edges,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Parse changed files locally
|
|
47
|
+
const localFiles = discoverFiles(repoDir, opts.files);
|
|
48
|
+
const rawGraph = await parseBatch(localFiles, repoDir);
|
|
49
|
+
const localGraphData = buildGraphData(rawGraph, [], [], repoDir, new Map());
|
|
50
|
+
|
|
51
|
+
// Merge with main graph (or use local only)
|
|
52
|
+
const mergedGraph = mainGraph ? mergeGraphs(mainGraph, localGraphData, opts.files) : localGraphData;
|
|
53
|
+
|
|
54
|
+
// Analyze
|
|
55
|
+
const blastRadius = computeBlastRadius(mergedGraph, opts.files);
|
|
56
|
+
const riskScore = computeRiskScore(mergedGraph, opts.files, blastRadius);
|
|
57
|
+
const testGaps = findTestGaps(mergedGraph, opts.files);
|
|
58
|
+
|
|
59
|
+
const output: AnalysisOutput = {
|
|
60
|
+
blast_radius: blastRadius,
|
|
61
|
+
risk_score: riskScore,
|
|
62
|
+
test_gaps: testGaps,
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
writeFileSync(opts.out, JSON.stringify(output, null, 2));
|
|
66
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { writeFileSync } from 'fs';
|
|
2
|
+
import { detectCommunities } from '../analysis/communities';
|
|
3
|
+
import { loadGraph } from '../graph/loader';
|
|
4
|
+
|
|
5
|
+
interface CommunitiesCommandOptions {
|
|
6
|
+
graph: string;
|
|
7
|
+
out: string;
|
|
8
|
+
minSize: number;
|
|
9
|
+
depth: number;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function executeCommunities(opts: CommunitiesCommandOptions): void {
|
|
13
|
+
const graph = loadGraph(opts.graph);
|
|
14
|
+
const result = detectCommunities(graph, { depth: opts.depth, minSize: opts.minSize });
|
|
15
|
+
writeFileSync(opts.out, JSON.stringify(result, null, 2));
|
|
16
|
+
process.stderr.write(
|
|
17
|
+
`Communities: ${result.summary.total_communities} detected, avg cohesion ${result.summary.avg_cohesion}, ${result.summary.high_coupling_pairs} high-coupling pairs\n`,
|
|
18
|
+
);
|
|
19
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { readFileSync, rmSync, writeFileSync } from 'fs';
|
|
2
|
+
import { resolve } from 'path';
|
|
3
|
+
import { buildReviewContext } from '../analysis/review-context';
|
|
4
|
+
import { mergeGraphs } from '../graph/merger';
|
|
5
|
+
import type { ContextOutput, GraphData, MainGraphInput } from '../graph/types';
|
|
6
|
+
import { log } from '../shared/logger';
|
|
7
|
+
import { GraphInputSchema } from '../shared/schemas';
|
|
8
|
+
import { createSecureTempFile } from '../shared/temp';
|
|
9
|
+
import { executeParse } from './parse';
|
|
10
|
+
|
|
11
|
+
interface ContextOptions {
|
|
12
|
+
repoDir: string;
|
|
13
|
+
files: string[];
|
|
14
|
+
graph?: string;
|
|
15
|
+
out: string;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function executeContext(opts: ContextOptions): Promise<void> {
|
|
19
|
+
const repoDir = resolve(opts.repoDir);
|
|
20
|
+
|
|
21
|
+
// Parse changed files using secure temp
|
|
22
|
+
const tmp = createSecureTempFile('ctx');
|
|
23
|
+
try {
|
|
24
|
+
await executeParse({
|
|
25
|
+
repoDir,
|
|
26
|
+
files: opts.files,
|
|
27
|
+
all: false,
|
|
28
|
+
out: tmp.filePath,
|
|
29
|
+
});
|
|
30
|
+
const parseResult = JSON.parse(readFileSync(tmp.filePath, 'utf-8'));
|
|
31
|
+
|
|
32
|
+
// Load and merge with main graph if provided
|
|
33
|
+
let mergedGraph: GraphData;
|
|
34
|
+
if (opts.graph) {
|
|
35
|
+
let raw: unknown;
|
|
36
|
+
try {
|
|
37
|
+
raw = JSON.parse(readFileSync(opts.graph, 'utf-8'));
|
|
38
|
+
} catch (_err) {
|
|
39
|
+
process.stderr.write(`Error: Failed to read --graph file: ${opts.graph}\n`);
|
|
40
|
+
process.exit(1);
|
|
41
|
+
}
|
|
42
|
+
const validated = GraphInputSchema.safeParse(raw);
|
|
43
|
+
if (!validated.success) {
|
|
44
|
+
process.stderr.write(`Error: Invalid graph JSON: ${validated.error.message}\n`);
|
|
45
|
+
process.exit(1);
|
|
46
|
+
}
|
|
47
|
+
const mainGraph: MainGraphInput = {
|
|
48
|
+
repo_id: '',
|
|
49
|
+
sha: '',
|
|
50
|
+
nodes: validated.data.nodes,
|
|
51
|
+
edges: validated.data.edges,
|
|
52
|
+
};
|
|
53
|
+
mergedGraph = mergeGraphs(mainGraph, parseResult, opts.files);
|
|
54
|
+
} else {
|
|
55
|
+
mergedGraph = { nodes: parseResult.nodes, edges: parseResult.edges };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Build review context
|
|
59
|
+
const contextOutput: ContextOutput = buildReviewContext(mergedGraph, opts.files);
|
|
60
|
+
|
|
61
|
+
writeFileSync(opts.out, JSON.stringify(contextOutput, null, 2));
|
|
62
|
+
} finally {
|
|
63
|
+
try {
|
|
64
|
+
rmSync(tmp.dir, { recursive: true, force: true });
|
|
65
|
+
} catch (err) {
|
|
66
|
+
log.debug('Failed to clean up temp dir', { dir: tmp.dir, error: String(err) });
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { execSync } from 'child_process';
|
|
2
|
+
import { existsSync, writeFileSync } from 'fs';
|
|
3
|
+
import { relative, resolve } from 'path';
|
|
4
|
+
import { performance } from 'perf_hooks';
|
|
5
|
+
import { computeStructuralDiff } from '../analysis/diff';
|
|
6
|
+
import { buildGraphData } from '../graph/builder';
|
|
7
|
+
import { loadGraph } from '../graph/loader';
|
|
8
|
+
import type { ImportEdge } from '../graph/types';
|
|
9
|
+
import { parseBatch } from '../parser/batch';
|
|
10
|
+
import { discoverFiles } from '../parser/discovery';
|
|
11
|
+
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
12
|
+
import { createImportMap } from '../resolver/import-map';
|
|
13
|
+
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
14
|
+
import { createSymbolTable } from '../resolver/symbol-table';
|
|
15
|
+
import { computeFileHash } from '../shared/file-hash';
|
|
16
|
+
|
|
17
|
+
interface DiffCommandOptions {
|
|
18
|
+
repoDir: string;
|
|
19
|
+
base?: string;
|
|
20
|
+
files?: string[];
|
|
21
|
+
graph: string;
|
|
22
|
+
out: string;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export async function executeDiff(opts: DiffCommandOptions): Promise<void> {
|
|
26
|
+
const t0 = performance.now();
|
|
27
|
+
const repoDir = resolve(opts.repoDir);
|
|
28
|
+
|
|
29
|
+
// Resolve changed files
|
|
30
|
+
let changedFiles: string[];
|
|
31
|
+
if (opts.base) {
|
|
32
|
+
try {
|
|
33
|
+
const output = execSync(`git diff --name-only ${opts.base}`, { cwd: repoDir, encoding: 'utf-8' });
|
|
34
|
+
changedFiles = output.trim().split('\n').filter(Boolean);
|
|
35
|
+
} catch (err) {
|
|
36
|
+
process.stderr.write(`Error: failed to run git diff with base "${opts.base}": ${String(err)}\n`);
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
} else {
|
|
40
|
+
changedFiles = opts.files!;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
process.stderr.write(`[1/4] ${changedFiles.length} changed files\n`);
|
|
44
|
+
|
|
45
|
+
// Load old graph
|
|
46
|
+
const graphPath = resolve(opts.graph);
|
|
47
|
+
if (!existsSync(graphPath)) {
|
|
48
|
+
process.stderr.write(`Error: graph file not found: ${graphPath}\n`);
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
const oldGraph = loadGraph(graphPath);
|
|
52
|
+
process.stderr.write(`[2/4] Loaded previous graph (${oldGraph.nodes.length} nodes)\n`);
|
|
53
|
+
|
|
54
|
+
// Re-parse changed files
|
|
55
|
+
const absFiles = discoverFiles(repoDir, changedFiles);
|
|
56
|
+
const rawGraph = await parseBatch(absFiles, repoDir);
|
|
57
|
+
|
|
58
|
+
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
59
|
+
const symbolTable = createSymbolTable();
|
|
60
|
+
const importMap = createImportMap();
|
|
61
|
+
const importEdges: ImportEdge[] = [];
|
|
62
|
+
|
|
63
|
+
for (const f of rawGraph.functions) symbolTable.add(f.file, f.name, f.qualified);
|
|
64
|
+
for (const c of rawGraph.classes) symbolTable.add(c.file, c.name, c.qualified);
|
|
65
|
+
for (const i of rawGraph.interfaces) symbolTable.add(i.file, i.name, i.qualified);
|
|
66
|
+
|
|
67
|
+
for (const imp of rawGraph.imports) {
|
|
68
|
+
const langKey = imp.lang === 'python' ? 'python' : imp.lang === 'ruby' ? 'ruby' : 'typescript';
|
|
69
|
+
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
70
|
+
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
71
|
+
importEdges.push({ source: imp.file, target: resolvedRel || imp.module, resolved: !!resolvedRel, line: imp.line });
|
|
72
|
+
const target = resolvedRel || imp.module;
|
|
73
|
+
for (const name of imp.names) importMap.add(imp.file, name, target);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const { callEdges } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
77
|
+
|
|
78
|
+
const fileHashes = new Map<string, string>();
|
|
79
|
+
for (const f of absFiles) {
|
|
80
|
+
try {
|
|
81
|
+
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
82
|
+
} catch {}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
const newGraphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes);
|
|
86
|
+
process.stderr.write(`[3/4] Re-parsed ${absFiles.length} files (${newGraphData.nodes.length} nodes)\n`);
|
|
87
|
+
|
|
88
|
+
// Compute diff
|
|
89
|
+
const relChangedFiles = changedFiles.map((f) => (f.startsWith('/') ? relative(repoDir, f) : f));
|
|
90
|
+
const result = computeStructuralDiff(oldGraph, newGraphData.nodes, newGraphData.edges, relChangedFiles);
|
|
91
|
+
process.stderr.write(
|
|
92
|
+
`[4/4] Diff: +${result.summary.added} -${result.summary.removed} ~${result.summary.modified} nodes (${Math.round(performance.now() - t0)}ms)\n`,
|
|
93
|
+
);
|
|
94
|
+
|
|
95
|
+
writeFileSync(opts.out, JSON.stringify(result, null, 2));
|
|
96
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { writeFileSync } from 'fs';
|
|
2
|
+
import { detectFlows } from '../analysis/flows';
|
|
3
|
+
import { loadGraph } from '../graph/loader';
|
|
4
|
+
|
|
5
|
+
interface FlowsCommandOptions {
|
|
6
|
+
graph: string;
|
|
7
|
+
out: string;
|
|
8
|
+
maxDepth: number;
|
|
9
|
+
type: 'test' | 'http' | 'all';
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function executeFlows(opts: FlowsCommandOptions): void {
|
|
13
|
+
const graph = loadGraph(opts.graph);
|
|
14
|
+
const result = detectFlows(graph, { maxDepth: opts.maxDepth, type: opts.type });
|
|
15
|
+
writeFileSync(opts.out, JSON.stringify(result, null, 2));
|
|
16
|
+
process.stderr.write(
|
|
17
|
+
`Flows: ${result.summary.total_flows} detected (test:${result.summary.by_type.test} http:${result.summary.by_type.http}), avg depth ${result.summary.avg_depth}\n`,
|
|
18
|
+
);
|
|
19
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { writeFileSync } from 'fs';
|
|
2
|
+
import { relative, resolve } from 'path';
|
|
3
|
+
import { performance } from 'perf_hooks';
|
|
4
|
+
import { buildGraphData } from '../graph/builder';
|
|
5
|
+
import type { ImportEdge, ParseOutput } from '../graph/types';
|
|
6
|
+
import { parseBatch } from '../parser/batch';
|
|
7
|
+
import { discoverFiles } from '../parser/discovery';
|
|
8
|
+
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
9
|
+
import { createImportMap } from '../resolver/import-map';
|
|
10
|
+
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
11
|
+
import { createSymbolTable } from '../resolver/symbol-table';
|
|
12
|
+
import { computeFileHash } from '../shared/file-hash';
|
|
13
|
+
import { log } from '../shared/logger';
|
|
14
|
+
|
|
15
|
+
interface ParseOptions {
|
|
16
|
+
repoDir: string;
|
|
17
|
+
files?: string[];
|
|
18
|
+
all: boolean;
|
|
19
|
+
out: string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export async function executeParse(opts: ParseOptions): Promise<void> {
|
|
23
|
+
const t0 = performance.now();
|
|
24
|
+
const repoDir = resolve(opts.repoDir);
|
|
25
|
+
|
|
26
|
+
// Phase 1: Discover files
|
|
27
|
+
const files = discoverFiles(repoDir, opts.all ? undefined : opts.files);
|
|
28
|
+
process.stderr.write(`[1/5] Discovered ${files.length} files\n`);
|
|
29
|
+
|
|
30
|
+
// Phase 2: Parse + extract
|
|
31
|
+
const rawGraph = await parseBatch(files, repoDir);
|
|
32
|
+
process.stderr.write(
|
|
33
|
+
`[2/5] Parsed ${rawGraph.functions.length} functions, ${rawGraph.classes.length} classes, ${rawGraph.rawCalls.length} call sites\n`,
|
|
34
|
+
);
|
|
35
|
+
|
|
36
|
+
// Phase 3: Resolve imports
|
|
37
|
+
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
38
|
+
const symbolTable = createSymbolTable();
|
|
39
|
+
const importMap = createImportMap();
|
|
40
|
+
const importEdges: ImportEdge[] = [];
|
|
41
|
+
|
|
42
|
+
// Populate symbol table
|
|
43
|
+
for (const f of rawGraph.functions) symbolTable.add(f.file, f.name, f.qualified);
|
|
44
|
+
for (const c of rawGraph.classes) symbolTable.add(c.file, c.name, c.qualified);
|
|
45
|
+
for (const i of rawGraph.interfaces) symbolTable.add(i.file, i.name, i.qualified);
|
|
46
|
+
|
|
47
|
+
// Resolve each import
|
|
48
|
+
for (const imp of rawGraph.imports) {
|
|
49
|
+
const langKey = imp.lang === 'python' ? 'python' : imp.lang === 'ruby' ? 'ruby' : 'typescript';
|
|
50
|
+
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
51
|
+
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
52
|
+
importEdges.push({
|
|
53
|
+
source: imp.file,
|
|
54
|
+
target: resolvedRel || imp.module,
|
|
55
|
+
resolved: !!resolvedRel,
|
|
56
|
+
line: imp.line,
|
|
57
|
+
});
|
|
58
|
+
const target = resolvedRel || imp.module;
|
|
59
|
+
for (const name of imp.names) importMap.add(imp.file, name, target);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
process.stderr.write(
|
|
63
|
+
`[3/5] Resolved ${importEdges.filter((e) => e.resolved).length}/${importEdges.length} imports\n`,
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
// Phase 4: Resolve calls
|
|
67
|
+
const { callEdges, stats } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
68
|
+
process.stderr.write(
|
|
69
|
+
`[4/5] Resolved ${callEdges.length} calls (DI:${stats.di} same:${stats.same} import:${stats.import} unique:${stats.unique} ambiguous:${stats.ambiguous} noise:${stats.noise})\n`,
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
// Phase 5: Build output
|
|
73
|
+
const fileHashes = new Map<string, string>();
|
|
74
|
+
for (const f of files) {
|
|
75
|
+
try {
|
|
76
|
+
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
77
|
+
} catch (err) {
|
|
78
|
+
log.warn('Failed to compute file hash', { file: f, error: String(err) });
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const graphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes);
|
|
83
|
+
process.stderr.write(`[5/5] Built graph: ${graphData.nodes.length} nodes, ${graphData.edges.length} edges\n`);
|
|
84
|
+
|
|
85
|
+
const output: ParseOutput = {
|
|
86
|
+
metadata: {
|
|
87
|
+
repo_dir: repoDir,
|
|
88
|
+
files_parsed: files.length,
|
|
89
|
+
total_nodes: graphData.nodes.length,
|
|
90
|
+
total_edges: graphData.edges.length,
|
|
91
|
+
duration_ms: Math.round(performance.now() - t0),
|
|
92
|
+
parse_errors: rawGraph.parseErrors,
|
|
93
|
+
extract_errors: rawGraph.extractErrors,
|
|
94
|
+
},
|
|
95
|
+
nodes: graphData.nodes,
|
|
96
|
+
edges: graphData.edges,
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
writeFileSync(opts.out, JSON.stringify(output, null, 2));
|
|
100
|
+
}
|