@kodus/kodus-graph 0.2.12 → 0.2.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analysis/context-builder.d.ts +3 -0
- package/dist/analysis/context-builder.js +18 -0
- package/dist/analysis/diff-lines.d.ts +27 -0
- package/dist/analysis/diff-lines.js +71 -0
- package/dist/analysis/prompt-formatter.js +59 -0
- package/dist/cli.js +2 -0
- package/dist/commands/context.d.ts +1 -0
- package/dist/commands/context.js +21 -0
- package/package.json +1 -1
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import type { AffectedFlow, ContextAnalysisMetadata, GraphData, GraphEdge, GraphNode, ParseMetadata } from '../graph/types';
|
|
2
2
|
import { computeBlastRadius } from './blast-radius';
|
|
3
3
|
import { type DiffResult } from './diff';
|
|
4
|
+
import { type DiffHunk } from './diff-lines';
|
|
4
5
|
import { enrichChangedFunctions } from './enrich';
|
|
5
6
|
import { extractInheritance } from './inheritance';
|
|
6
7
|
import { computeRiskScore } from './risk-score';
|
|
@@ -29,6 +30,8 @@ interface BuildContextV2Options {
|
|
|
29
30
|
minConfidence: number;
|
|
30
31
|
maxDepth: number;
|
|
31
32
|
skipTests?: boolean;
|
|
33
|
+
/** Parsed diff hunks per file — used to filter changed functions in fallback mode (no oldGraph) */
|
|
34
|
+
diffHunks?: Map<string, DiffHunk[]>;
|
|
32
35
|
}
|
|
33
36
|
export declare function buildContextV2(opts: BuildContextV2Options): ContextV2Output;
|
|
34
37
|
export {};
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { performance } from 'perf_hooks';
|
|
2
2
|
import { indexGraph } from '../graph/loader';
|
|
3
|
+
import { log } from '../shared/logger';
|
|
3
4
|
import { computeBlastRadius } from './blast-radius';
|
|
4
5
|
import { computeStructuralDiff } from './diff';
|
|
6
|
+
import { overlapsWithDiff } from './diff-lines';
|
|
5
7
|
import { enrichChangedFunctions } from './enrich';
|
|
6
8
|
import { detectFlows } from './flows';
|
|
7
9
|
import { extractInheritance } from './inheritance';
|
|
@@ -24,6 +26,22 @@ export function buildContextV2(opts) {
|
|
|
24
26
|
...structuralDiff.nodes.modified.map((n) => n.qualified_name),
|
|
25
27
|
...structuralDiff.nodes.removed.map((n) => n.qualified_name),
|
|
26
28
|
]);
|
|
29
|
+
// In fallback mode (no oldGraph), ALL functions are "added" by structural diff.
|
|
30
|
+
// If we have actual diff hunks, filter to only functions whose lines overlap with real changes.
|
|
31
|
+
if (!oldGraph && opts.diffHunks && opts.diffHunks.size > 0) {
|
|
32
|
+
const before = trulyChangedQN.size;
|
|
33
|
+
for (const qn of [...trulyChangedQN]) {
|
|
34
|
+
const node = indexed.byQualified.get(qn);
|
|
35
|
+
if (node && !overlapsWithDiff(node.file_path, node.line_start, node.line_end, opts.diffHunks)) {
|
|
36
|
+
trulyChangedQN.delete(qn);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
log.info('context: diff-hunk filter applied (fallback mode)', {
|
|
40
|
+
before,
|
|
41
|
+
after: trulyChangedQN.size,
|
|
42
|
+
filtered: before - trulyChangedQN.size,
|
|
43
|
+
});
|
|
44
|
+
}
|
|
27
45
|
const blastRadius = computeBlastRadius(mergedGraph, [...trulyChangedQN], maxDepth, minConfidence);
|
|
28
46
|
const allFlows = detectFlows(indexed, { maxDepth: 10, type: 'all' });
|
|
29
47
|
const testGaps = opts.skipTests ? [] : findTestGaps(mergedGraph, changedFiles);
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parse unified diff output to extract changed line ranges per file.
|
|
3
|
+
* Used in fallback mode (no DB baseline) to filter which AST functions
|
|
4
|
+
* are truly "changed" vs just "present in the file".
|
|
5
|
+
*/
|
|
6
|
+
export interface DiffHunk {
|
|
7
|
+
/** First changed line on the new (post-change) side */
|
|
8
|
+
newStart: number;
|
|
9
|
+
/** Number of lines in this hunk on the new side */
|
|
10
|
+
newCount: number;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Parse a unified diff string into per-file hunk ranges.
|
|
14
|
+
*
|
|
15
|
+
* Accepts standard `git diff` / `git format-patch` output as well as
|
|
16
|
+
* GitHub-style patches (individual file patches concatenated).
|
|
17
|
+
*
|
|
18
|
+
* @returns Map from file path (relative, no leading slash) to its hunks
|
|
19
|
+
*/
|
|
20
|
+
export declare function parseDiffHunks(diffContent: string): Map<string, DiffHunk[]>;
|
|
21
|
+
/**
|
|
22
|
+
* Check if a node's line range overlaps with any diff hunk in the same file.
|
|
23
|
+
*
|
|
24
|
+
* A node [lineStart, lineEnd] overlaps with a hunk [hunkStart, hunkEnd] when:
|
|
25
|
+
* lineStart <= hunkEnd AND lineEnd >= hunkStart
|
|
26
|
+
*/
|
|
27
|
+
export declare function overlapsWithDiff(filePath: string, lineStart: number, lineEnd: number, diffHunks: Map<string, DiffHunk[]>): boolean;
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Parse unified diff output to extract changed line ranges per file.
|
|
3
|
+
* Used in fallback mode (no DB baseline) to filter which AST functions
|
|
4
|
+
* are truly "changed" vs just "present in the file".
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Parse a unified diff string into per-file hunk ranges.
|
|
8
|
+
*
|
|
9
|
+
* Accepts standard `git diff` / `git format-patch` output as well as
|
|
10
|
+
* GitHub-style patches (individual file patches concatenated).
|
|
11
|
+
*
|
|
12
|
+
* @returns Map from file path (relative, no leading slash) to its hunks
|
|
13
|
+
*/
|
|
14
|
+
export function parseDiffHunks(diffContent) {
|
|
15
|
+
const result = new Map();
|
|
16
|
+
let currentFile = null;
|
|
17
|
+
for (const line of diffContent.split('\n')) {
|
|
18
|
+
// Match file header: +++ b/path/to/file
|
|
19
|
+
if (line.startsWith('+++ b/')) {
|
|
20
|
+
currentFile = line.slice(6);
|
|
21
|
+
if (!result.has(currentFile)) {
|
|
22
|
+
result.set(currentFile, []);
|
|
23
|
+
}
|
|
24
|
+
continue;
|
|
25
|
+
}
|
|
26
|
+
// Also handle +++ path (without b/ prefix, e.g. GitHub patch format)
|
|
27
|
+
if (line.startsWith('+++ ') && !line.startsWith('+++ /dev/null')) {
|
|
28
|
+
const path = line.slice(4).replace(/^a\/|^b\//, '');
|
|
29
|
+
if (path) {
|
|
30
|
+
currentFile = path;
|
|
31
|
+
if (!result.has(currentFile)) {
|
|
32
|
+
result.set(currentFile, []);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
// Match hunk header: @@ -oldStart,oldCount +newStart,newCount @@
|
|
38
|
+
if (line.startsWith('@@') && currentFile) {
|
|
39
|
+
const match = line.match(/@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
|
|
40
|
+
if (match) {
|
|
41
|
+
const newStart = parseInt(match[1], 10);
|
|
42
|
+
const newCount = match[2] !== undefined ? parseInt(match[2], 10) : 1;
|
|
43
|
+
result.get(currentFile).push({ newStart, newCount });
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return result;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Check if a node's line range overlaps with any diff hunk in the same file.
|
|
51
|
+
*
|
|
52
|
+
* A node [lineStart, lineEnd] overlaps with a hunk [hunkStart, hunkEnd] when:
|
|
53
|
+
* lineStart <= hunkEnd AND lineEnd >= hunkStart
|
|
54
|
+
*/
|
|
55
|
+
export function overlapsWithDiff(filePath, lineStart, lineEnd, diffHunks) {
|
|
56
|
+
const hunks = diffHunks.get(filePath);
|
|
57
|
+
if (!hunks || hunks.length === 0) {
|
|
58
|
+
return false;
|
|
59
|
+
}
|
|
60
|
+
for (const hunk of hunks) {
|
|
61
|
+
// A hunk with newCount=0 means pure deletion at that point — skip
|
|
62
|
+
if (hunk.newCount === 0) {
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
const hunkEnd = hunk.newStart + hunk.newCount - 1;
|
|
66
|
+
if (lineStart <= hunkEnd && lineEnd >= hunk.newStart) {
|
|
67
|
+
return true;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return false;
|
|
71
|
+
}
|
|
@@ -90,6 +90,15 @@ export function formatPrompt(output) {
|
|
|
90
90
|
lines.push('');
|
|
91
91
|
}
|
|
92
92
|
}
|
|
93
|
+
// ── Imports for changed files (helps agent spot missing/new dependencies) ──
|
|
94
|
+
const importLines = buildImportsSection(output, analysis);
|
|
95
|
+
if (importLines.length > 0) {
|
|
96
|
+
lines.push('IMPORTS:');
|
|
97
|
+
for (const line of importLines) {
|
|
98
|
+
lines.push(line);
|
|
99
|
+
}
|
|
100
|
+
lines.push('');
|
|
101
|
+
}
|
|
93
102
|
// ── Hierarchy (compact) ──
|
|
94
103
|
if (analysis.inheritance.length > 0) {
|
|
95
104
|
lines.push('HIERARCHY:');
|
|
@@ -216,3 +225,53 @@ function buildSiblingMap(analysis, output) {
|
|
|
216
225
|
}
|
|
217
226
|
return result;
|
|
218
227
|
}
|
|
228
|
+
/**
|
|
229
|
+
* Build compact IMPORTS section for changed files.
|
|
230
|
+
* Shows each import edge from a changed file with:
|
|
231
|
+
* - NEW tag if the import was added in this change (not in oldGraph)
|
|
232
|
+
* - ⚠ UNRESOLVED if the import target has no corresponding node in the graph
|
|
233
|
+
* Groups by source file for readability.
|
|
234
|
+
*/
|
|
235
|
+
function buildImportsSection(output, analysis) {
|
|
236
|
+
const changedFiles = new Set(analysis.structural_diff.changed_files);
|
|
237
|
+
// Collect IMPORTS edges from changed files
|
|
238
|
+
const importEdges = output.graph.edges.filter((e) => e.kind === 'IMPORTS' && changedFiles.has(e.file_path));
|
|
239
|
+
if (importEdges.length === 0) {
|
|
240
|
+
return [];
|
|
241
|
+
}
|
|
242
|
+
// Set of new import edges (added in this diff)
|
|
243
|
+
const newImportKeys = new Set(analysis.structural_diff.edges.added
|
|
244
|
+
.filter((e) => e.kind === 'IMPORTS')
|
|
245
|
+
.map((e) => `${e.source_qualified}→${e.target_qualified}`));
|
|
246
|
+
// Set of all node qualified names — to detect unresolved targets
|
|
247
|
+
const allNodes = new Set(output.graph.nodes.map((n) => n.qualified_name));
|
|
248
|
+
// Group by source file
|
|
249
|
+
const byFile = new Map();
|
|
250
|
+
for (const edge of importEdges) {
|
|
251
|
+
const existing = byFile.get(edge.file_path) || [];
|
|
252
|
+
existing.push(edge);
|
|
253
|
+
byFile.set(edge.file_path, existing);
|
|
254
|
+
}
|
|
255
|
+
const lines = [];
|
|
256
|
+
for (const [filePath, edges] of byFile) {
|
|
257
|
+
for (const edge of edges) {
|
|
258
|
+
const key = `${edge.source_qualified}→${edge.target_qualified}`;
|
|
259
|
+
const tags = [];
|
|
260
|
+
if (newImportKeys.has(key)) {
|
|
261
|
+
tags.push('NEW');
|
|
262
|
+
}
|
|
263
|
+
// Check if target exists as a node in the graph
|
|
264
|
+
// For IMPORTS, target_qualified is usually "file::Symbol".
|
|
265
|
+
// If neither the exact target nor any node starting with the target exists, it's unresolved.
|
|
266
|
+
const targetExists = allNodes.has(edge.target_qualified) ||
|
|
267
|
+
// Also check if the target is a file-level reference (e.g. "src/utils.ts::default")
|
|
268
|
+
[...allNodes].some((qn) => qn.startsWith(`${edge.target_qualified}::`));
|
|
269
|
+
if (!targetExists) {
|
|
270
|
+
tags.push('⚠ UNRESOLVED');
|
|
271
|
+
}
|
|
272
|
+
const tagStr = tags.length > 0 ? ` (${tags.join(', ')})` : '';
|
|
273
|
+
lines.push(` ${filePath} → ${shortName(edge.target_qualified)}${tagStr}`);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
return lines;
|
|
277
|
+
}
|
package/dist/cli.js
CHANGED
|
@@ -71,6 +71,7 @@ program
|
|
|
71
71
|
.requiredOption('--files <paths...>', 'Changed files')
|
|
72
72
|
.option('--repo-dir <path>', 'Repository root directory', '.')
|
|
73
73
|
.option('--graph <path>', 'Path to main graph JSON')
|
|
74
|
+
.option('--diff <path>', 'Path to unified diff file (filters changed functions in fallback mode)')
|
|
74
75
|
.requiredOption('--out <path>', 'Output JSON file path')
|
|
75
76
|
.option('--min-confidence <n>', 'Minimum CALLS edge confidence', '0.5')
|
|
76
77
|
.option('--max-depth <n>', 'Blast radius BFS depth', '3')
|
|
@@ -90,6 +91,7 @@ program
|
|
|
90
91
|
repoDir: opts.repoDir,
|
|
91
92
|
files: opts.files,
|
|
92
93
|
graph: opts.graph,
|
|
94
|
+
diff: opts.diff,
|
|
93
95
|
out: opts.out,
|
|
94
96
|
minConfidence: Number.parseFloat(opts.minConfidence),
|
|
95
97
|
maxDepth: Number.parseInt(opts.maxDepth, 10),
|
package/dist/commands/context.js
CHANGED
|
@@ -2,6 +2,7 @@ import { execSync } from 'child_process';
|
|
|
2
2
|
import { readFileSync, rmSync, writeFileSync } from 'fs';
|
|
3
3
|
import { resolve } from 'path';
|
|
4
4
|
import { buildContextV2 } from '../analysis/context-builder';
|
|
5
|
+
import { parseDiffHunks } from '../analysis/diff-lines';
|
|
5
6
|
import { formatPrompt } from '../analysis/prompt-formatter';
|
|
6
7
|
import { mergeGraphs } from '../graph/merger';
|
|
7
8
|
import { log } from '../shared/logger';
|
|
@@ -94,6 +95,25 @@ export async function executeContext(opts) {
|
|
|
94
95
|
else {
|
|
95
96
|
mergedGraph = { nodes: parseResult.nodes, edges: parseResult.edges };
|
|
96
97
|
}
|
|
98
|
+
// Parse diff hunks if provided (used in fallback mode to filter changed functions)
|
|
99
|
+
let diffHunks;
|
|
100
|
+
if (opts.diff) {
|
|
101
|
+
try {
|
|
102
|
+
const diffContent = readFileSync(opts.diff, 'utf-8');
|
|
103
|
+
diffHunks = parseDiffHunks(diffContent);
|
|
104
|
+
log.info('context: diff hunks loaded', {
|
|
105
|
+
path: opts.diff,
|
|
106
|
+
files: diffHunks.size,
|
|
107
|
+
totalHunks: [...diffHunks.values()].reduce((s, h) => s + h.length, 0),
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
catch (err) {
|
|
111
|
+
log.warn('context: failed to read --diff file, proceeding without it', {
|
|
112
|
+
path: opts.diff,
|
|
113
|
+
error: String(err),
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
}
|
|
97
117
|
// Build V2 context
|
|
98
118
|
const output = buildContextV2({
|
|
99
119
|
mergedGraph,
|
|
@@ -102,6 +122,7 @@ export async function executeContext(opts) {
|
|
|
102
122
|
minConfidence: opts.minConfidence,
|
|
103
123
|
maxDepth: opts.maxDepth,
|
|
104
124
|
skipTests: opts.skipTests,
|
|
125
|
+
diffHunks,
|
|
105
126
|
});
|
|
106
127
|
log.info('context: analysis done', {
|
|
107
128
|
changedFunctions: output.analysis.changed_functions.length,
|
package/package.json
CHANGED