@kodus/kodus-graph 0.2.8 → 0.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +252 -0
- package/dist/analysis/blast-radius.d.ts +2 -0
- package/dist/analysis/blast-radius.js +55 -0
- package/dist/analysis/communities.d.ts +28 -0
- package/dist/analysis/communities.js +100 -0
- package/dist/analysis/context-builder.d.ts +34 -0
- package/dist/analysis/context-builder.js +92 -0
- package/dist/analysis/diff.d.ts +41 -0
- package/dist/analysis/diff.js +155 -0
- package/dist/analysis/enrich.d.ts +5 -0
- package/dist/analysis/enrich.js +126 -0
- package/dist/analysis/flows.d.ts +27 -0
- package/dist/analysis/flows.js +86 -0
- package/dist/analysis/inheritance.d.ts +3 -0
- package/dist/analysis/inheritance.js +31 -0
- package/dist/analysis/prompt-formatter.d.ts +2 -0
- package/dist/analysis/prompt-formatter.js +173 -0
- package/dist/analysis/risk-score.d.ts +4 -0
- package/dist/analysis/risk-score.js +51 -0
- package/dist/analysis/search.d.ts +11 -0
- package/dist/analysis/search.js +64 -0
- package/dist/analysis/test-gaps.d.ts +2 -0
- package/dist/analysis/test-gaps.js +14 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +210 -0
- package/dist/commands/analyze.d.ts +9 -0
- package/dist/commands/analyze.js +116 -0
- package/dist/commands/communities.d.ts +8 -0
- package/dist/commands/communities.js +9 -0
- package/dist/commands/context.d.ts +12 -0
- package/dist/commands/context.js +130 -0
- package/dist/commands/diff.d.ts +9 -0
- package/dist/commands/diff.js +89 -0
- package/dist/commands/flows.d.ts +8 -0
- package/dist/commands/flows.js +9 -0
- package/dist/commands/parse.d.ts +11 -0
- package/dist/commands/parse.js +101 -0
- package/dist/commands/search.d.ts +12 -0
- package/dist/commands/search.js +27 -0
- package/dist/commands/update.d.ts +7 -0
- package/dist/commands/update.js +154 -0
- package/dist/graph/builder.d.ts +6 -0
- package/dist/graph/builder.js +248 -0
- package/dist/graph/edges.d.ts +23 -0
- package/dist/graph/edges.js +159 -0
- package/dist/graph/json-writer.d.ts +9 -0
- package/dist/graph/json-writer.js +38 -0
- package/dist/graph/loader.d.ts +13 -0
- package/dist/graph/loader.js +101 -0
- package/dist/graph/merger.d.ts +7 -0
- package/dist/graph/merger.js +18 -0
- package/dist/graph/types.d.ts +252 -0
- package/dist/graph/types.js +1 -0
- package/dist/parser/batch.d.ts +5 -0
- package/dist/parser/batch.js +93 -0
- package/dist/parser/discovery.d.ts +7 -0
- package/dist/parser/discovery.js +61 -0
- package/dist/parser/extractor.d.ts +4 -0
- package/dist/parser/extractor.js +33 -0
- package/dist/parser/extractors/generic.d.ts +8 -0
- package/dist/parser/extractors/generic.js +471 -0
- package/dist/parser/extractors/python.d.ts +8 -0
- package/dist/parser/extractors/python.js +133 -0
- package/dist/parser/extractors/ruby.d.ts +8 -0
- package/dist/parser/extractors/ruby.js +153 -0
- package/dist/parser/extractors/typescript.d.ts +10 -0
- package/dist/parser/extractors/typescript.js +365 -0
- package/dist/parser/languages.d.ts +32 -0
- package/dist/parser/languages.js +304 -0
- package/dist/resolver/call-resolver.d.ts +36 -0
- package/dist/resolver/call-resolver.js +178 -0
- package/dist/resolver/external-detector.d.ts +11 -0
- package/dist/resolver/external-detector.js +820 -0
- package/dist/resolver/fs-cache.d.ts +8 -0
- package/dist/resolver/fs-cache.js +36 -0
- package/dist/resolver/import-map.d.ts +12 -0
- package/dist/resolver/import-map.js +21 -0
- package/dist/resolver/import-resolver.d.ts +19 -0
- package/dist/resolver/import-resolver.js +310 -0
- package/dist/resolver/languages/csharp.d.ts +3 -0
- package/dist/resolver/languages/csharp.js +94 -0
- package/dist/resolver/languages/go.d.ts +3 -0
- package/dist/resolver/languages/go.js +197 -0
- package/dist/resolver/languages/java.d.ts +1 -0
- package/dist/resolver/languages/java.js +193 -0
- package/dist/resolver/languages/php.d.ts +3 -0
- package/dist/resolver/languages/php.js +75 -0
- package/dist/resolver/languages/python.d.ts +11 -0
- package/dist/resolver/languages/python.js +127 -0
- package/dist/resolver/languages/ruby.d.ts +24 -0
- package/dist/resolver/languages/ruby.js +110 -0
- package/dist/resolver/languages/rust.d.ts +1 -0
- package/dist/resolver/languages/rust.js +197 -0
- package/dist/resolver/languages/typescript.d.ts +35 -0
- package/dist/resolver/languages/typescript.js +416 -0
- package/dist/resolver/re-export-resolver.d.ts +24 -0
- package/dist/resolver/re-export-resolver.js +57 -0
- package/dist/resolver/symbol-table.d.ts +17 -0
- package/dist/resolver/symbol-table.js +60 -0
- package/dist/shared/extract-calls.d.ts +26 -0
- package/dist/shared/extract-calls.js +57 -0
- package/dist/shared/file-hash.d.ts +3 -0
- package/dist/shared/file-hash.js +10 -0
- package/dist/shared/filters.d.ts +3 -0
- package/dist/shared/filters.js +240 -0
- package/dist/shared/logger.d.ts +6 -0
- package/dist/shared/logger.js +17 -0
- package/dist/shared/qualified-name.d.ts +1 -0
- package/dist/shared/qualified-name.js +9 -0
- package/dist/shared/safe-path.d.ts +6 -0
- package/dist/shared/safe-path.js +29 -0
- package/dist/shared/schemas.d.ts +43 -0
- package/dist/shared/schemas.js +30 -0
- package/dist/shared/temp.d.ts +11 -0
- package/{src/shared/temp.ts → dist/shared/temp.js} +4 -5
- package/package.json +20 -6
- package/src/analysis/blast-radius.ts +0 -54
- package/src/analysis/communities.ts +0 -135
- package/src/analysis/context-builder.ts +0 -130
- package/src/analysis/diff.ts +0 -169
- package/src/analysis/enrich.ts +0 -110
- package/src/analysis/flows.ts +0 -112
- package/src/analysis/inheritance.ts +0 -34
- package/src/analysis/prompt-formatter.ts +0 -175
- package/src/analysis/risk-score.ts +0 -62
- package/src/analysis/search.ts +0 -76
- package/src/analysis/test-gaps.ts +0 -21
- package/src/cli.ts +0 -210
- package/src/commands/analyze.ts +0 -128
- package/src/commands/communities.ts +0 -19
- package/src/commands/context.ts +0 -182
- package/src/commands/diff.ts +0 -96
- package/src/commands/flows.ts +0 -19
- package/src/commands/parse.ts +0 -124
- package/src/commands/search.ts +0 -41
- package/src/commands/update.ts +0 -166
- package/src/graph/builder.ts +0 -209
- package/src/graph/edges.ts +0 -101
- package/src/graph/json-writer.ts +0 -43
- package/src/graph/loader.ts +0 -113
- package/src/graph/merger.ts +0 -25
- package/src/graph/types.ts +0 -283
- package/src/parser/batch.ts +0 -82
- package/src/parser/discovery.ts +0 -75
- package/src/parser/extractor.ts +0 -37
- package/src/parser/extractors/generic.ts +0 -132
- package/src/parser/extractors/python.ts +0 -133
- package/src/parser/extractors/ruby.ts +0 -147
- package/src/parser/extractors/typescript.ts +0 -350
- package/src/parser/languages.ts +0 -122
- package/src/resolver/call-resolver.ts +0 -244
- package/src/resolver/import-map.ts +0 -27
- package/src/resolver/import-resolver.ts +0 -72
- package/src/resolver/languages/csharp.ts +0 -7
- package/src/resolver/languages/go.ts +0 -7
- package/src/resolver/languages/java.ts +0 -7
- package/src/resolver/languages/php.ts +0 -7
- package/src/resolver/languages/python.ts +0 -35
- package/src/resolver/languages/ruby.ts +0 -21
- package/src/resolver/languages/rust.ts +0 -7
- package/src/resolver/languages/typescript.ts +0 -168
- package/src/resolver/re-export-resolver.ts +0 -66
- package/src/resolver/symbol-table.ts +0 -67
- package/src/shared/extract-calls.ts +0 -75
- package/src/shared/file-hash.ts +0 -12
- package/src/shared/filters.ts +0 -243
- package/src/shared/logger.ts +0 -17
- package/src/shared/qualified-name.ts +0 -5
- package/src/shared/safe-path.ts +0 -31
- package/src/shared/schemas.ts +0 -32
package/src/commands/context.ts
DELETED
|
@@ -1,182 +0,0 @@
|
|
|
1
|
-
import { readFileSync, rmSync, writeFileSync } from 'fs';
|
|
2
|
-
import { resolve } from 'path';
|
|
3
|
-
import { buildContextV2 } from '../analysis/context-builder';
|
|
4
|
-
import { formatPrompt } from '../analysis/prompt-formatter';
|
|
5
|
-
import { mergeGraphs } from '../graph/merger';
|
|
6
|
-
import type { GraphData, MainGraphInput } from '../graph/types';
|
|
7
|
-
import { log } from '../shared/logger';
|
|
8
|
-
import { GraphInputSchema } from '../shared/schemas';
|
|
9
|
-
import { createSecureTempFile } from '../shared/temp';
|
|
10
|
-
import { executeParse } from './parse';
|
|
11
|
-
|
|
12
|
-
interface ContextOptions {
|
|
13
|
-
repoDir: string;
|
|
14
|
-
files: string[];
|
|
15
|
-
graph?: string;
|
|
16
|
-
out: string;
|
|
17
|
-
minConfidence: number;
|
|
18
|
-
maxDepth: number;
|
|
19
|
-
format: 'json' | 'prompt';
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
export async function executeContext(opts: ContextOptions): Promise<void> {
|
|
23
|
-
const repoDir = resolve(opts.repoDir);
|
|
24
|
-
|
|
25
|
-
log.info('context: starting', {
|
|
26
|
-
files: opts.files,
|
|
27
|
-
repoDir,
|
|
28
|
-
graph: opts.graph ?? null,
|
|
29
|
-
format: opts.format,
|
|
30
|
-
minConfidence: opts.minConfidence,
|
|
31
|
-
maxDepth: opts.maxDepth,
|
|
32
|
-
});
|
|
33
|
-
|
|
34
|
-
// Parse changed files using secure temp
|
|
35
|
-
const tmp = createSecureTempFile('ctx');
|
|
36
|
-
try {
|
|
37
|
-
await executeParse({
|
|
38
|
-
repoDir,
|
|
39
|
-
files: opts.files,
|
|
40
|
-
all: false,
|
|
41
|
-
out: tmp.filePath,
|
|
42
|
-
});
|
|
43
|
-
const parseResult = JSON.parse(readFileSync(tmp.filePath, 'utf-8'));
|
|
44
|
-
|
|
45
|
-
log.info('context: parse done', {
|
|
46
|
-
nodes: parseResult.nodes?.length ?? 0,
|
|
47
|
-
edges: parseResult.edges?.length ?? 0,
|
|
48
|
-
});
|
|
49
|
-
|
|
50
|
-
// Load and merge with main graph if provided
|
|
51
|
-
let mergedGraph: GraphData;
|
|
52
|
-
let oldGraph: GraphData | null = null;
|
|
53
|
-
|
|
54
|
-
if (opts.graph) {
|
|
55
|
-
let raw: unknown;
|
|
56
|
-
try {
|
|
57
|
-
raw = JSON.parse(readFileSync(opts.graph, 'utf-8'));
|
|
58
|
-
} catch (_err) {
|
|
59
|
-
process.stderr.write(`Error: Failed to read --graph file: ${opts.graph}\n`);
|
|
60
|
-
process.exit(1);
|
|
61
|
-
}
|
|
62
|
-
const validated = GraphInputSchema.safeParse(raw);
|
|
63
|
-
if (!validated.success) {
|
|
64
|
-
process.stderr.write(`Error: Invalid graph JSON: ${validated.error.message}\n`);
|
|
65
|
-
process.exit(1);
|
|
66
|
-
}
|
|
67
|
-
const changedSet = new Set(opts.files);
|
|
68
|
-
const sameBranch = detectSameBranch(validated.data.nodes, parseResult.nodes, changedSet);
|
|
69
|
-
|
|
70
|
-
log.info('context: baseline graph loaded', {
|
|
71
|
-
graphNodes: validated.data.nodes.length,
|
|
72
|
-
graphEdges: validated.data.edges.length,
|
|
73
|
-
sameBranch,
|
|
74
|
-
});
|
|
75
|
-
|
|
76
|
-
if (sameBranch) {
|
|
77
|
-
// --graph was built from the same commit (e.g. kodus-ai's parse --all on PR branch).
|
|
78
|
-
// Exclude changed files from oldGraph so diff detects their functions as "added"
|
|
79
|
-
// instead of falsely marking everything "unchanged".
|
|
80
|
-
oldGraph = {
|
|
81
|
-
nodes: validated.data.nodes.filter((n: { file_path: string }) => !changedSet.has(n.file_path)),
|
|
82
|
-
edges: validated.data.edges.filter((e: { file_path: string }) => !changedSet.has(e.file_path)),
|
|
83
|
-
};
|
|
84
|
-
log.debug('Same-branch detected: excluding changed files from baseline', {
|
|
85
|
-
changedFiles: opts.files.length,
|
|
86
|
-
});
|
|
87
|
-
} else {
|
|
88
|
-
oldGraph = { nodes: validated.data.nodes, edges: validated.data.edges };
|
|
89
|
-
}
|
|
90
|
-
|
|
91
|
-
const mainGraph: MainGraphInput = {
|
|
92
|
-
repo_id: '',
|
|
93
|
-
sha: '',
|
|
94
|
-
nodes: validated.data.nodes,
|
|
95
|
-
edges: validated.data.edges,
|
|
96
|
-
};
|
|
97
|
-
mergedGraph = mergeGraphs(mainGraph, parseResult, opts.files);
|
|
98
|
-
} else {
|
|
99
|
-
mergedGraph = { nodes: parseResult.nodes, edges: parseResult.edges };
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
// Build V2 context
|
|
103
|
-
const output = buildContextV2({
|
|
104
|
-
mergedGraph,
|
|
105
|
-
oldGraph,
|
|
106
|
-
changedFiles: opts.files,
|
|
107
|
-
minConfidence: opts.minConfidence,
|
|
108
|
-
maxDepth: opts.maxDepth,
|
|
109
|
-
});
|
|
110
|
-
|
|
111
|
-
log.info('context: analysis done', {
|
|
112
|
-
changedFunctions: output.analysis.changed_functions.length,
|
|
113
|
-
diff: output.analysis.structural_diff.summary,
|
|
114
|
-
blastRadius: output.analysis.blast_radius.total_functions,
|
|
115
|
-
risk: `${output.analysis.risk.level} (${output.analysis.risk.score})`,
|
|
116
|
-
testGaps: output.analysis.test_gaps.length,
|
|
117
|
-
affectedFlows: output.analysis.affected_flows.length,
|
|
118
|
-
duration_ms: output.analysis.metadata.duration_ms,
|
|
119
|
-
});
|
|
120
|
-
|
|
121
|
-
if (opts.format === 'prompt') {
|
|
122
|
-
writeFileSync(opts.out, formatPrompt(output));
|
|
123
|
-
} else {
|
|
124
|
-
writeFileSync(opts.out, JSON.stringify(output, null, 2));
|
|
125
|
-
}
|
|
126
|
-
} finally {
|
|
127
|
-
try {
|
|
128
|
-
rmSync(tmp.dir, { recursive: true, force: true });
|
|
129
|
-
} catch (err) {
|
|
130
|
-
log.debug('Failed to clean up temp dir', { dir: tmp.dir, error: String(err) });
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
/**
|
|
136
|
-
* Detect if --graph was built from the same commit as the current repo.
|
|
137
|
-
* Compares file_hash values for changed files between the graph and the fresh parse.
|
|
138
|
-
* When hashes match, the graph can't serve as a baseline for diff — it IS the new state.
|
|
139
|
-
*/
|
|
140
|
-
function detectSameBranch(
|
|
141
|
-
graphNodes: Array<{ file_path: string; file_hash: string }>,
|
|
142
|
-
parseNodes: Array<{ file_path: string; file_hash: string }>,
|
|
143
|
-
changedFiles: Set<string>,
|
|
144
|
-
): boolean {
|
|
145
|
-
const graphHashes = new Map<string, string>();
|
|
146
|
-
for (const n of graphNodes) {
|
|
147
|
-
if (changedFiles.has(n.file_path) && n.file_hash && !graphHashes.has(n.file_path)) {
|
|
148
|
-
graphHashes.set(n.file_path, n.file_hash);
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
// No overlap means graph has no nodes for changed files — not same-branch scenario
|
|
153
|
-
if (graphHashes.size === 0) {
|
|
154
|
-
log.debug('detectSameBranch: no graph hashes for changed files');
|
|
155
|
-
return false;
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
const parseHashes = new Map<string, string>();
|
|
159
|
-
for (const n of parseNodes) {
|
|
160
|
-
if (n.file_hash && !parseHashes.has(n.file_path)) {
|
|
161
|
-
parseHashes.set(n.file_path, n.file_hash);
|
|
162
|
-
}
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
// If any overlapping file has different hash → different branch
|
|
166
|
-
for (const [file, hash] of graphHashes) {
|
|
167
|
-
const parseHash = parseHashes.get(file);
|
|
168
|
-
if (parseHash && parseHash !== hash) {
|
|
169
|
-
log.debug('detectSameBranch: hash mismatch → different branch', {
|
|
170
|
-
file,
|
|
171
|
-
graphHash: hash.substring(0, 8),
|
|
172
|
-
parseHash: parseHash.substring(0, 8),
|
|
173
|
-
});
|
|
174
|
-
return false;
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
log.debug('detectSameBranch: all hashes match → same branch', {
|
|
179
|
-
filesCompared: graphHashes.size,
|
|
180
|
-
});
|
|
181
|
-
return true;
|
|
182
|
-
}
|
package/src/commands/diff.ts
DELETED
|
@@ -1,96 +0,0 @@
|
|
|
1
|
-
import { execSync } from 'child_process';
|
|
2
|
-
import { existsSync, writeFileSync } from 'fs';
|
|
3
|
-
import { relative, resolve } from 'path';
|
|
4
|
-
import { performance } from 'perf_hooks';
|
|
5
|
-
import { computeStructuralDiff } from '../analysis/diff';
|
|
6
|
-
import { buildGraphData } from '../graph/builder';
|
|
7
|
-
import { loadGraph } from '../graph/loader';
|
|
8
|
-
import type { ImportEdge } from '../graph/types';
|
|
9
|
-
import { parseBatch } from '../parser/batch';
|
|
10
|
-
import { discoverFiles } from '../parser/discovery';
|
|
11
|
-
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
12
|
-
import { createImportMap } from '../resolver/import-map';
|
|
13
|
-
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
14
|
-
import { createSymbolTable } from '../resolver/symbol-table';
|
|
15
|
-
import { computeFileHash } from '../shared/file-hash';
|
|
16
|
-
|
|
17
|
-
interface DiffCommandOptions {
|
|
18
|
-
repoDir: string;
|
|
19
|
-
base?: string;
|
|
20
|
-
files?: string[];
|
|
21
|
-
graph: string;
|
|
22
|
-
out: string;
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
export async function executeDiff(opts: DiffCommandOptions): Promise<void> {
|
|
26
|
-
const t0 = performance.now();
|
|
27
|
-
const repoDir = resolve(opts.repoDir);
|
|
28
|
-
|
|
29
|
-
// Resolve changed files
|
|
30
|
-
let changedFiles: string[];
|
|
31
|
-
if (opts.base) {
|
|
32
|
-
try {
|
|
33
|
-
const output = execSync(`git diff --name-only ${opts.base}`, { cwd: repoDir, encoding: 'utf-8' });
|
|
34
|
-
changedFiles = output.trim().split('\n').filter(Boolean);
|
|
35
|
-
} catch (err) {
|
|
36
|
-
process.stderr.write(`Error: failed to run git diff with base "${opts.base}": ${String(err)}\n`);
|
|
37
|
-
process.exit(1);
|
|
38
|
-
}
|
|
39
|
-
} else {
|
|
40
|
-
changedFiles = opts.files!;
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
process.stderr.write(`[1/4] ${changedFiles.length} changed files\n`);
|
|
44
|
-
|
|
45
|
-
// Load old graph
|
|
46
|
-
const graphPath = resolve(opts.graph);
|
|
47
|
-
if (!existsSync(graphPath)) {
|
|
48
|
-
process.stderr.write(`Error: graph file not found: ${graphPath}\n`);
|
|
49
|
-
process.exit(1);
|
|
50
|
-
}
|
|
51
|
-
const oldGraph = loadGraph(graphPath);
|
|
52
|
-
process.stderr.write(`[2/4] Loaded previous graph (${oldGraph.nodes.length} nodes)\n`);
|
|
53
|
-
|
|
54
|
-
// Re-parse changed files
|
|
55
|
-
const absFiles = discoverFiles(repoDir, changedFiles);
|
|
56
|
-
const rawGraph = await parseBatch(absFiles, repoDir);
|
|
57
|
-
|
|
58
|
-
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
59
|
-
const symbolTable = createSymbolTable();
|
|
60
|
-
const importMap = createImportMap();
|
|
61
|
-
const importEdges: ImportEdge[] = [];
|
|
62
|
-
|
|
63
|
-
for (const f of rawGraph.functions) symbolTable.add(f.file, f.name, f.qualified);
|
|
64
|
-
for (const c of rawGraph.classes) symbolTable.add(c.file, c.name, c.qualified);
|
|
65
|
-
for (const i of rawGraph.interfaces) symbolTable.add(i.file, i.name, i.qualified);
|
|
66
|
-
|
|
67
|
-
for (const imp of rawGraph.imports) {
|
|
68
|
-
const langKey = imp.lang === 'python' ? 'python' : imp.lang === 'ruby' ? 'ruby' : 'typescript';
|
|
69
|
-
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
70
|
-
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
71
|
-
importEdges.push({ source: imp.file, target: resolvedRel || imp.module, resolved: !!resolvedRel, line: imp.line });
|
|
72
|
-
const target = resolvedRel || imp.module;
|
|
73
|
-
for (const name of imp.names) importMap.add(imp.file, name, target);
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
const { callEdges } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
77
|
-
|
|
78
|
-
const fileHashes = new Map<string, string>();
|
|
79
|
-
for (const f of absFiles) {
|
|
80
|
-
try {
|
|
81
|
-
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
82
|
-
} catch {}
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
const newGraphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes);
|
|
86
|
-
process.stderr.write(`[3/4] Re-parsed ${absFiles.length} files (${newGraphData.nodes.length} nodes)\n`);
|
|
87
|
-
|
|
88
|
-
// Compute diff
|
|
89
|
-
const relChangedFiles = changedFiles.map((f) => (f.startsWith('/') ? relative(repoDir, f) : f));
|
|
90
|
-
const result = computeStructuralDiff(oldGraph, newGraphData.nodes, newGraphData.edges, relChangedFiles);
|
|
91
|
-
process.stderr.write(
|
|
92
|
-
`[4/4] Diff: +${result.summary.added} -${result.summary.removed} ~${result.summary.modified} nodes (${Math.round(performance.now() - t0)}ms)\n`,
|
|
93
|
-
);
|
|
94
|
-
|
|
95
|
-
writeFileSync(opts.out, JSON.stringify(result, null, 2));
|
|
96
|
-
}
|
package/src/commands/flows.ts
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import { writeFileSync } from 'fs';
|
|
2
|
-
import { detectFlows } from '../analysis/flows';
|
|
3
|
-
import { loadGraph } from '../graph/loader';
|
|
4
|
-
|
|
5
|
-
interface FlowsCommandOptions {
|
|
6
|
-
graph: string;
|
|
7
|
-
out: string;
|
|
8
|
-
maxDepth: number;
|
|
9
|
-
type: 'test' | 'http' | 'all';
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
export function executeFlows(opts: FlowsCommandOptions): void {
|
|
13
|
-
const graph = loadGraph(opts.graph);
|
|
14
|
-
const result = detectFlows(graph, { maxDepth: opts.maxDepth, type: opts.type });
|
|
15
|
-
writeFileSync(opts.out, JSON.stringify(result, null, 2));
|
|
16
|
-
process.stderr.write(
|
|
17
|
-
`Flows: ${result.summary.total_flows} detected (test:${result.summary.by_type.test} http:${result.summary.by_type.http}), avg depth ${result.summary.avg_depth}\n`,
|
|
18
|
-
);
|
|
19
|
-
}
|
package/src/commands/parse.ts
DELETED
|
@@ -1,124 +0,0 @@
|
|
|
1
|
-
import { resolve, relative } from 'path';
|
|
2
|
-
import { performance } from 'perf_hooks';
|
|
3
|
-
import { buildGraphData } from '../graph/builder';
|
|
4
|
-
import { writeGraphJSON } from '../graph/json-writer';
|
|
5
|
-
import type { ImportEdge } from '../graph/types';
|
|
6
|
-
import { parseBatch } from '../parser/batch';
|
|
7
|
-
import { discoverFiles } from '../parser/discovery';
|
|
8
|
-
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
9
|
-
import { createImportMap } from '../resolver/import-map';
|
|
10
|
-
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
11
|
-
import { buildReExportMap } from '../resolver/re-export-resolver';
|
|
12
|
-
import { createSymbolTable } from '../resolver/symbol-table';
|
|
13
|
-
import { computeFileHash } from '../shared/file-hash';
|
|
14
|
-
import { log } from '../shared/logger';
|
|
15
|
-
|
|
16
|
-
export interface ParseOptions {
|
|
17
|
-
repoDir: string;
|
|
18
|
-
files?: string[];
|
|
19
|
-
all: boolean;
|
|
20
|
-
out: string;
|
|
21
|
-
include?: string[];
|
|
22
|
-
exclude?: string[];
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
export async function executeParse(opts: ParseOptions): Promise<void> {
|
|
26
|
-
const t0 = performance.now();
|
|
27
|
-
const repoDir = resolve(opts.repoDir);
|
|
28
|
-
|
|
29
|
-
// Phase 1: Discover files
|
|
30
|
-
const files = discoverFiles(repoDir, opts.all ? undefined : opts.files, opts.include, opts.exclude);
|
|
31
|
-
process.stderr.write(`[1/5] Discovered ${files.length} files\n`);
|
|
32
|
-
|
|
33
|
-
// Phase 2: Parse + extract
|
|
34
|
-
let rawGraph = await parseBatch(files, repoDir);
|
|
35
|
-
process.stderr.write(
|
|
36
|
-
`[2/5] Parsed ${rawGraph.functions.length} functions, ${rawGraph.classes.length} classes, ${rawGraph.rawCalls.length} call sites\n`,
|
|
37
|
-
);
|
|
38
|
-
|
|
39
|
-
// Phase 3: Resolve imports
|
|
40
|
-
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
41
|
-
let symbolTable = createSymbolTable();
|
|
42
|
-
let importMap = createImportMap();
|
|
43
|
-
let importEdges: ImportEdge[] = [];
|
|
44
|
-
|
|
45
|
-
for (const f of rawGraph.functions) symbolTable.add(f.file, f.name, f.qualified);
|
|
46
|
-
for (const c of rawGraph.classes) symbolTable.add(c.file, c.name, c.qualified);
|
|
47
|
-
for (const i of rawGraph.interfaces) symbolTable.add(i.file, i.name, i.qualified);
|
|
48
|
-
|
|
49
|
-
// Pre-resolve re-exports so barrel imports follow through to actual definitions
|
|
50
|
-
const barrelMap = buildReExportMap(rawGraph.reExports, repoDir, tsconfigAliases);
|
|
51
|
-
|
|
52
|
-
for (const imp of rawGraph.imports) {
|
|
53
|
-
const langKey = imp.lang === 'python' ? 'python' : imp.lang === 'ruby' ? 'ruby' : 'typescript';
|
|
54
|
-
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
55
|
-
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
56
|
-
importEdges.push({
|
|
57
|
-
source: imp.file,
|
|
58
|
-
target: resolvedRel || imp.module,
|
|
59
|
-
resolved: !!resolvedRel,
|
|
60
|
-
line: imp.line,
|
|
61
|
-
});
|
|
62
|
-
const target = resolvedRel || imp.module;
|
|
63
|
-
for (const name of imp.names) {
|
|
64
|
-
// If target is a barrel file, follow re-exports to find the actual definition
|
|
65
|
-
let finalTarget = target;
|
|
66
|
-
if (resolvedRel) {
|
|
67
|
-
const reExportedFiles = barrelMap.get(resolvedRel);
|
|
68
|
-
if (reExportedFiles) {
|
|
69
|
-
for (const reFile of reExportedFiles) {
|
|
70
|
-
if (symbolTable.lookupExact(reFile, name)) {
|
|
71
|
-
finalTarget = reFile;
|
|
72
|
-
break;
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
importMap.add(imp.file, name, finalTarget);
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
process.stderr.write(
|
|
82
|
-
`[3/5] Resolved ${importEdges.filter((e) => e.resolved).length}/${importEdges.length} imports\n`,
|
|
83
|
-
);
|
|
84
|
-
|
|
85
|
-
// Phase 4: Resolve calls
|
|
86
|
-
let { callEdges, stats } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
87
|
-
process.stderr.write(
|
|
88
|
-
`[4/5] Resolved ${callEdges.length} calls (DI:${stats.di} same:${stats.same} import:${stats.import} unique:${stats.unique} ambiguous:${stats.ambiguous} noise:${stats.noise})\n`,
|
|
89
|
-
);
|
|
90
|
-
|
|
91
|
-
// Phase 5: Build output
|
|
92
|
-
const fileHashes = new Map<string, string>();
|
|
93
|
-
for (const f of files) {
|
|
94
|
-
try {
|
|
95
|
-
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
96
|
-
} catch (err) {
|
|
97
|
-
log.warn('Failed to compute file hash', { file: f, error: String(err) });
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
const parseErrors = rawGraph.parseErrors;
|
|
102
|
-
const extractErrors = rawGraph.extractErrors;
|
|
103
|
-
const graphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes);
|
|
104
|
-
process.stderr.write(`[5/5] Built graph: ${graphData.nodes.length} nodes, ${graphData.edges.length} edges\n`);
|
|
105
|
-
|
|
106
|
-
// Release intermediaries — no longer needed after buildGraphData
|
|
107
|
-
rawGraph = null as any;
|
|
108
|
-
symbolTable = null as any;
|
|
109
|
-
importMap = null as any;
|
|
110
|
-
callEdges = null as any;
|
|
111
|
-
importEdges = null as any;
|
|
112
|
-
|
|
113
|
-
const metadata = {
|
|
114
|
-
repo_dir: repoDir,
|
|
115
|
-
files_parsed: files.length,
|
|
116
|
-
total_nodes: graphData.nodes.length,
|
|
117
|
-
total_edges: graphData.edges.length,
|
|
118
|
-
duration_ms: Math.round(performance.now() - t0),
|
|
119
|
-
parse_errors: parseErrors,
|
|
120
|
-
extract_errors: extractErrors,
|
|
121
|
-
};
|
|
122
|
-
|
|
123
|
-
writeGraphJSON(opts.out, metadata, graphData.nodes, graphData.edges);
|
|
124
|
-
}
|
package/src/commands/search.ts
DELETED
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
import { writeFileSync } from 'fs';
|
|
2
|
-
import { findCallees, findCallers, searchNodes } from '../analysis/search';
|
|
3
|
-
import { loadGraph } from '../graph/loader';
|
|
4
|
-
import type { GraphNode } from '../graph/types';
|
|
5
|
-
|
|
6
|
-
interface SearchCommandOptions {
|
|
7
|
-
graph: string;
|
|
8
|
-
query?: string;
|
|
9
|
-
kind?: string;
|
|
10
|
-
file?: string;
|
|
11
|
-
callersOf?: string;
|
|
12
|
-
calleesOf?: string;
|
|
13
|
-
limit: number;
|
|
14
|
-
out?: string;
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
export function executeSearch(opts: SearchCommandOptions): void {
|
|
18
|
-
const graph = loadGraph(opts.graph);
|
|
19
|
-
|
|
20
|
-
let results: GraphNode[];
|
|
21
|
-
let queryInfo: Record<string, string | null>;
|
|
22
|
-
|
|
23
|
-
if (opts.callersOf) {
|
|
24
|
-
results = findCallers(graph, opts.callersOf);
|
|
25
|
-
queryInfo = { callers_of: opts.callersOf, kind: null, file: null };
|
|
26
|
-
} else if (opts.calleesOf) {
|
|
27
|
-
results = findCallees(graph, opts.calleesOf);
|
|
28
|
-
queryInfo = { callees_of: opts.calleesOf, kind: null, file: null };
|
|
29
|
-
} else {
|
|
30
|
-
results = searchNodes(graph, { query: opts.query, kind: opts.kind, file: opts.file, limit: opts.limit });
|
|
31
|
-
queryInfo = { pattern: opts.query || null, kind: opts.kind || null, file: opts.file || null };
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
const output = JSON.stringify({ results, total: results.length, query: queryInfo }, null, 2);
|
|
35
|
-
|
|
36
|
-
if (opts.out) {
|
|
37
|
-
writeFileSync(opts.out, output);
|
|
38
|
-
} else {
|
|
39
|
-
process.stdout.write(`${output}\n`);
|
|
40
|
-
}
|
|
41
|
-
}
|
package/src/commands/update.ts
DELETED
|
@@ -1,166 +0,0 @@
|
|
|
1
|
-
import { existsSync, mkdirSync, writeFileSync } from 'fs';
|
|
2
|
-
import { dirname, relative, resolve } from 'path';
|
|
3
|
-
import { performance } from 'perf_hooks';
|
|
4
|
-
import { buildGraphData } from '../graph/builder';
|
|
5
|
-
import { loadGraph } from '../graph/loader';
|
|
6
|
-
import type { GraphEdge, GraphNode, ImportEdge, ParseOutput } from '../graph/types';
|
|
7
|
-
import { parseBatch } from '../parser/batch';
|
|
8
|
-
import { discoverFiles } from '../parser/discovery';
|
|
9
|
-
import { resolveAllCalls } from '../resolver/call-resolver';
|
|
10
|
-
import { createImportMap } from '../resolver/import-map';
|
|
11
|
-
import { loadTsconfigAliases, resolveImport } from '../resolver/import-resolver';
|
|
12
|
-
import { createSymbolTable } from '../resolver/symbol-table';
|
|
13
|
-
import { computeFileHash } from '../shared/file-hash';
|
|
14
|
-
|
|
15
|
-
const DEFAULT_GRAPH_PATH = '.kodus-graph/graph.json';
|
|
16
|
-
|
|
17
|
-
interface UpdateCommandOptions {
|
|
18
|
-
repoDir: string;
|
|
19
|
-
graph?: string;
|
|
20
|
-
out?: string;
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
export async function executeUpdate(opts: UpdateCommandOptions): Promise<void> {
|
|
24
|
-
const t0 = performance.now();
|
|
25
|
-
const repoDir = resolve(opts.repoDir);
|
|
26
|
-
const graphPath = resolve(repoDir, opts.graph || DEFAULT_GRAPH_PATH);
|
|
27
|
-
const outPath = resolve(repoDir, opts.out || opts.graph || DEFAULT_GRAPH_PATH);
|
|
28
|
-
|
|
29
|
-
if (!existsSync(graphPath)) {
|
|
30
|
-
process.stderr.write(`Error: graph file not found: ${graphPath}. Run "kodus-graph parse" first.\n`);
|
|
31
|
-
process.exit(1);
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
const oldGraph = loadGraph(graphPath);
|
|
35
|
-
process.stderr.write(`[1/5] Loaded previous graph (${oldGraph.nodes.length} nodes)\n`);
|
|
36
|
-
|
|
37
|
-
// Build file hash index from old graph
|
|
38
|
-
const oldHashes = new Map<string, string>();
|
|
39
|
-
for (const node of oldGraph.nodes) {
|
|
40
|
-
if (node.file_hash && !oldHashes.has(node.file_path)) {
|
|
41
|
-
oldHashes.set(node.file_path, node.file_hash);
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
// Discover current files
|
|
46
|
-
const allFiles = discoverFiles(repoDir);
|
|
47
|
-
const allRel = allFiles.map((f) => relative(repoDir, f));
|
|
48
|
-
const currentFiles = new Set(allRel);
|
|
49
|
-
const oldFiles = new Set(oldHashes.keys());
|
|
50
|
-
|
|
51
|
-
// Classify files
|
|
52
|
-
const added: string[] = [];
|
|
53
|
-
const modified: string[] = [];
|
|
54
|
-
const deleted: string[] = [];
|
|
55
|
-
const unchanged: string[] = [];
|
|
56
|
-
|
|
57
|
-
for (const file of currentFiles) {
|
|
58
|
-
const absPath = resolve(repoDir, file);
|
|
59
|
-
if (!oldHashes.has(file)) {
|
|
60
|
-
added.push(file);
|
|
61
|
-
} else {
|
|
62
|
-
const currentHash = computeFileHash(absPath);
|
|
63
|
-
if (currentHash !== oldHashes.get(file)) {
|
|
64
|
-
modified.push(file);
|
|
65
|
-
} else {
|
|
66
|
-
unchanged.push(file);
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
for (const file of oldFiles) {
|
|
72
|
-
if (!currentFiles.has(file)) deleted.push(file);
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
const toReparse = [...added, ...modified];
|
|
76
|
-
process.stderr.write(
|
|
77
|
-
`[2/5] Files: ${added.length} added, ${modified.length} modified, ${deleted.length} deleted, ${unchanged.length} unchanged\n`,
|
|
78
|
-
);
|
|
79
|
-
|
|
80
|
-
if (toReparse.length === 0 && deleted.length === 0) {
|
|
81
|
-
process.stderr.write('[3/5] No changes detected, graph is up to date\n');
|
|
82
|
-
const output: ParseOutput = {
|
|
83
|
-
metadata: {
|
|
84
|
-
...oldGraph.metadata,
|
|
85
|
-
duration_ms: Math.round(performance.now() - t0),
|
|
86
|
-
files_unchanged: unchanged.length,
|
|
87
|
-
incremental: true,
|
|
88
|
-
},
|
|
89
|
-
nodes: oldGraph.nodes,
|
|
90
|
-
edges: oldGraph.edges,
|
|
91
|
-
};
|
|
92
|
-
ensureDir(outPath);
|
|
93
|
-
writeFileSync(outPath, JSON.stringify(output, null, 2));
|
|
94
|
-
return;
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
// Re-parse changed files
|
|
98
|
-
const absToReparse = toReparse.map((f) => resolve(repoDir, f));
|
|
99
|
-
const rawGraph = await parseBatch(absToReparse, repoDir);
|
|
100
|
-
process.stderr.write(`[3/5] Re-parsed ${toReparse.length} files\n`);
|
|
101
|
-
|
|
102
|
-
// Resolve imports and calls for new files
|
|
103
|
-
const tsconfigAliases = loadTsconfigAliases(repoDir);
|
|
104
|
-
const symbolTable = createSymbolTable();
|
|
105
|
-
const importMap = createImportMap();
|
|
106
|
-
const importEdges: ImportEdge[] = [];
|
|
107
|
-
|
|
108
|
-
for (const f of rawGraph.functions) symbolTable.add(f.file, f.name, f.qualified);
|
|
109
|
-
for (const c of rawGraph.classes) symbolTable.add(c.file, c.name, c.qualified);
|
|
110
|
-
for (const i of rawGraph.interfaces) symbolTable.add(i.file, i.name, i.qualified);
|
|
111
|
-
|
|
112
|
-
for (const imp of rawGraph.imports) {
|
|
113
|
-
const langKey = imp.lang === 'python' ? 'python' : imp.lang === 'ruby' ? 'ruby' : 'typescript';
|
|
114
|
-
const resolved = resolveImport(resolve(repoDir, imp.file), imp.module, langKey, repoDir, tsconfigAliases);
|
|
115
|
-
const resolvedRel = resolved ? relative(repoDir, resolved) : null;
|
|
116
|
-
importEdges.push({ source: imp.file, target: resolvedRel || imp.module, resolved: !!resolvedRel, line: imp.line });
|
|
117
|
-
const target = resolvedRel || imp.module;
|
|
118
|
-
for (const name of imp.names) importMap.add(imp.file, name, target);
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
const { callEdges } = resolveAllCalls(rawGraph.rawCalls, rawGraph.diMaps, symbolTable, importMap);
|
|
122
|
-
|
|
123
|
-
const fileHashes = new Map<string, string>();
|
|
124
|
-
for (const f of absToReparse) {
|
|
125
|
-
try {
|
|
126
|
-
fileHashes.set(relative(repoDir, f), computeFileHash(f));
|
|
127
|
-
} catch {}
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
const newGraphData = buildGraphData(rawGraph, callEdges, importEdges, repoDir, fileHashes);
|
|
131
|
-
process.stderr.write(`[4/5] Built new graph fragment (${newGraphData.nodes.length} nodes)\n`);
|
|
132
|
-
|
|
133
|
-
// Merge: keep old nodes/edges NOT in changed/deleted files, add new ones
|
|
134
|
-
const changedOrDeleted = new Set([...toReparse, ...deleted]);
|
|
135
|
-
const mergedNodes: GraphNode[] = oldGraph.nodes.filter((n) => !changedOrDeleted.has(n.file_path));
|
|
136
|
-
const mergedEdges: GraphEdge[] = oldGraph.edges.filter((e) => !changedOrDeleted.has(e.file_path));
|
|
137
|
-
|
|
138
|
-
mergedNodes.push(...newGraphData.nodes);
|
|
139
|
-
mergedEdges.push(...newGraphData.edges);
|
|
140
|
-
|
|
141
|
-
process.stderr.write(`[5/5] Merged: ${mergedNodes.length} nodes, ${mergedEdges.length} edges\n`);
|
|
142
|
-
|
|
143
|
-
const output: ParseOutput = {
|
|
144
|
-
metadata: {
|
|
145
|
-
repo_dir: repoDir,
|
|
146
|
-
files_parsed: toReparse.length,
|
|
147
|
-
files_unchanged: unchanged.length,
|
|
148
|
-
total_nodes: mergedNodes.length,
|
|
149
|
-
total_edges: mergedEdges.length,
|
|
150
|
-
duration_ms: Math.round(performance.now() - t0),
|
|
151
|
-
parse_errors: rawGraph.parseErrors,
|
|
152
|
-
extract_errors: rawGraph.extractErrors,
|
|
153
|
-
incremental: true,
|
|
154
|
-
},
|
|
155
|
-
nodes: mergedNodes,
|
|
156
|
-
edges: mergedEdges,
|
|
157
|
-
};
|
|
158
|
-
|
|
159
|
-
ensureDir(outPath);
|
|
160
|
-
writeFileSync(outPath, JSON.stringify(output, null, 2));
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
function ensureDir(filePath: string): void {
|
|
164
|
-
const dir = dirname(filePath);
|
|
165
|
-
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
166
|
-
}
|