@syke1/mcp-server 1.4.17 → 1.4.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai/realtime-analyzer.js +1 -1
- package/dist/git/change-coupling.d.ts +41 -0
- package/dist/git/change-coupling.js +250 -0
- package/dist/graph/incremental.d.ts +35 -0
- package/dist/graph/incremental.js +319 -0
- package/dist/graph/memo-cache.d.ts +47 -0
- package/dist/graph/memo-cache.js +176 -0
- package/dist/graph/scc.d.ts +57 -0
- package/dist/graph/scc.js +206 -0
- package/dist/graph.d.ts +6 -0
- package/dist/graph.js +17 -1
- package/dist/index.js +151 -11
- package/dist/scoring/pagerank.d.ts +67 -0
- package/dist/scoring/pagerank.js +221 -0
- package/dist/scoring/risk-scorer.d.ts +99 -0
- package/dist/scoring/risk-scorer.js +623 -0
- package/dist/tools/analyze-impact.d.ts +36 -1
- package/dist/tools/analyze-impact.js +278 -2
- package/dist/tools/gate-build.d.ts +7 -2
- package/dist/tools/gate-build.js +179 -13
- package/dist/watcher/file-cache.d.ts +9 -0
- package/dist/watcher/file-cache.js +40 -0
- package/dist/web/server.js +20 -3
- package/package.json +1 -1
|
@@ -112,7 +112,7 @@ async function analyzeChangeRealtime(change, graph, getFileContent) {
|
|
|
112
112
|
const absPath = path.normalize(path.join(graph.sourceDir, relPath));
|
|
113
113
|
let affectedNodes = [];
|
|
114
114
|
if (graph.files.has(absPath)) {
|
|
115
|
-
const impact = (0, analyze_impact_1.analyzeImpact)(absPath, graph);
|
|
115
|
+
const impact = await (0, analyze_impact_1.analyzeImpact)(absPath, graph);
|
|
116
116
|
affectedNodes = [...impact.directDependents, ...impact.transitiveDependents];
|
|
117
117
|
}
|
|
118
118
|
// Build context: changed file + top 5 connected files' smart context
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
export interface ChangeCoupling {
|
|
2
|
+
file1: string;
|
|
3
|
+
file2: string;
|
|
4
|
+
coChangeCount: number;
|
|
5
|
+
file1Changes: number;
|
|
6
|
+
file2Changes: number;
|
|
7
|
+
confidence: number;
|
|
8
|
+
support: number;
|
|
9
|
+
}
|
|
10
|
+
export interface CouplingResult {
|
|
11
|
+
couplings: ChangeCoupling[];
|
|
12
|
+
fileCouplings: Map<string, ChangeCoupling[]>;
|
|
13
|
+
totalCommitsAnalyzed: number;
|
|
14
|
+
analyzedAt: number;
|
|
15
|
+
}
|
|
16
|
+
export interface CouplingOptions {
|
|
17
|
+
maxCommits?: number;
|
|
18
|
+
minSupport?: number;
|
|
19
|
+
minConfidence?: number;
|
|
20
|
+
maxFilesPerCommit?: number;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Invalidate the coupling cache. Call this when the graph is refreshed
|
|
24
|
+
* or when git history may have changed.
|
|
25
|
+
*/
|
|
26
|
+
export declare function invalidateCouplingCache(): void;
|
|
27
|
+
/**
|
|
28
|
+
* Mine git history to find files that frequently co-change.
|
|
29
|
+
*
|
|
30
|
+
* Runs `git log --name-only` and analyzes pairwise file combinations
|
|
31
|
+
* within each commit to identify hidden logical dependencies.
|
|
32
|
+
*/
|
|
33
|
+
export declare function mineGitHistory(projectRoot: string, options?: CouplingOptions): Promise<CouplingResult>;
|
|
34
|
+
/**
|
|
35
|
+
* Get all significant couplings for a given file path.
|
|
36
|
+
* Returns an empty array if no couplings are found.
|
|
37
|
+
*
|
|
38
|
+
* The filePath should be a relative path matching git log output format
|
|
39
|
+
* (forward slashes, relative to project root).
|
|
40
|
+
*/
|
|
41
|
+
export declare function getCoupledFiles(filePath: string, result: CouplingResult): ChangeCoupling[];
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.invalidateCouplingCache = invalidateCouplingCache;
|
|
4
|
+
exports.mineGitHistory = mineGitHistory;
|
|
5
|
+
exports.getCoupledFiles = getCoupledFiles;
|
|
6
|
+
const child_process_1 = require("child_process");
|
|
7
|
+
// ── Defaults ──
|
|
8
|
+
const DEFAULT_MAX_COMMITS = 500;
|
|
9
|
+
const DEFAULT_MIN_SUPPORT = 3;
|
|
10
|
+
const DEFAULT_MIN_CONFIDENCE = 0.3;
|
|
11
|
+
const DEFAULT_MAX_FILES_PER_COMMIT = 20;
|
|
12
|
+
const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
|
|
13
|
+
// ── Cache ──
|
|
14
|
+
let cachedResult = null;
|
|
15
|
+
let cachedProjectRoot = null;
|
|
16
|
+
/**
|
|
17
|
+
* Invalidate the coupling cache. Call this when the graph is refreshed
|
|
18
|
+
* or when git history may have changed.
|
|
19
|
+
*/
|
|
20
|
+
function invalidateCouplingCache() {
|
|
21
|
+
cachedResult = null;
|
|
22
|
+
cachedProjectRoot = null;
|
|
23
|
+
}
|
|
24
|
+
// ── Git History Mining ──
|
|
25
|
+
/**
|
|
26
|
+
* Check whether the given directory is inside a git repository.
|
|
27
|
+
*/
|
|
28
|
+
function isGitRepo(projectRoot) {
|
|
29
|
+
try {
|
|
30
|
+
(0, child_process_1.execSync)("git rev-parse --is-inside-work-tree", {
|
|
31
|
+
cwd: projectRoot,
|
|
32
|
+
encoding: "utf-8",
|
|
33
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
34
|
+
});
|
|
35
|
+
return true;
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
return false;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Parse git log output into a list of commits, each containing
|
|
43
|
+
* the list of files changed in that commit.
|
|
44
|
+
*/
|
|
45
|
+
function parseGitLog(raw) {
|
|
46
|
+
const commits = [];
|
|
47
|
+
const segments = raw.split("COMMIT:");
|
|
48
|
+
for (const segment of segments) {
|
|
49
|
+
const trimmed = segment.trim();
|
|
50
|
+
if (!trimmed)
|
|
51
|
+
continue;
|
|
52
|
+
// First line is the commit hash, remaining lines are file paths
|
|
53
|
+
const lines = trimmed.split("\n");
|
|
54
|
+
const files = [];
|
|
55
|
+
for (let i = 1; i < lines.length; i++) {
|
|
56
|
+
const fileLine = lines[i].trim();
|
|
57
|
+
if (fileLine) {
|
|
58
|
+
files.push(fileLine);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
if (files.length > 0) {
|
|
62
|
+
commits.push(files);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return commits;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Normalize a git-output path (forward slashes) to be consistent
|
|
69
|
+
* with how the dependency graph stores paths.
|
|
70
|
+
*/
|
|
71
|
+
function normalizePath(filePath) {
|
|
72
|
+
// Git always outputs forward slashes; normalize for consistency
|
|
73
|
+
return filePath.replace(/\\/g, "/");
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Check if a file path looks like a source file (not binary, not config noise).
|
|
77
|
+
* We keep this broad — the dependency graph comparison will handle the real filtering.
|
|
78
|
+
*/
|
|
79
|
+
function isSourceFile(filePath) {
|
|
80
|
+
// Skip obviously non-source files
|
|
81
|
+
const skipPatterns = [
|
|
82
|
+
/\.lock$/,
|
|
83
|
+
/package-lock\.json$/,
|
|
84
|
+
/yarn\.lock$/,
|
|
85
|
+
/\.min\.(js|css)$/,
|
|
86
|
+
/\.map$/,
|
|
87
|
+
/\.d\.ts$/,
|
|
88
|
+
/\.png$/,
|
|
89
|
+
/\.jpg$/,
|
|
90
|
+
/\.jpeg$/,
|
|
91
|
+
/\.gif$/,
|
|
92
|
+
/\.svg$/,
|
|
93
|
+
/\.ico$/,
|
|
94
|
+
/\.woff2?$/,
|
|
95
|
+
/\.ttf$/,
|
|
96
|
+
/\.eot$/,
|
|
97
|
+
/\.pdf$/,
|
|
98
|
+
/\.zip$/,
|
|
99
|
+
/\.tar$/,
|
|
100
|
+
/\.gz$/,
|
|
101
|
+
];
|
|
102
|
+
const normalized = filePath.toLowerCase();
|
|
103
|
+
return !skipPatterns.some((p) => p.test(normalized));
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Create a canonical pair key for two files (order-independent).
|
|
107
|
+
*/
|
|
108
|
+
function pairKey(a, b) {
|
|
109
|
+
return a < b ? `${a}\0${b}` : `${b}\0${a}`;
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Mine git history to find files that frequently co-change.
|
|
113
|
+
*
|
|
114
|
+
* Runs `git log --name-only` and analyzes pairwise file combinations
|
|
115
|
+
* within each commit to identify hidden logical dependencies.
|
|
116
|
+
*/
|
|
117
|
+
async function mineGitHistory(projectRoot, options) {
|
|
118
|
+
// Return cached result if still valid
|
|
119
|
+
if (cachedResult &&
|
|
120
|
+
cachedProjectRoot === projectRoot &&
|
|
121
|
+
Date.now() - cachedResult.analyzedAt < CACHE_TTL_MS) {
|
|
122
|
+
return cachedResult;
|
|
123
|
+
}
|
|
124
|
+
const maxCommits = options?.maxCommits ?? DEFAULT_MAX_COMMITS;
|
|
125
|
+
const minSupport = options?.minSupport ?? DEFAULT_MIN_SUPPORT;
|
|
126
|
+
const minConfidence = options?.minConfidence ?? DEFAULT_MIN_CONFIDENCE;
|
|
127
|
+
const maxFilesPerCommit = options?.maxFilesPerCommit ?? DEFAULT_MAX_FILES_PER_COMMIT;
|
|
128
|
+
// Empty result for non-git projects
|
|
129
|
+
const emptyResult = {
|
|
130
|
+
couplings: [],
|
|
131
|
+
fileCouplings: new Map(),
|
|
132
|
+
totalCommitsAnalyzed: 0,
|
|
133
|
+
analyzedAt: Date.now(),
|
|
134
|
+
};
|
|
135
|
+
if (!isGitRepo(projectRoot)) {
|
|
136
|
+
cachedResult = emptyResult;
|
|
137
|
+
cachedProjectRoot = projectRoot;
|
|
138
|
+
return emptyResult;
|
|
139
|
+
}
|
|
140
|
+
// Run git log
|
|
141
|
+
let raw;
|
|
142
|
+
try {
|
|
143
|
+
raw = (0, child_process_1.execSync)(`git log --name-only --format="COMMIT:%H" --max-count=${maxCommits}`, {
|
|
144
|
+
cwd: projectRoot,
|
|
145
|
+
encoding: "utf-8",
|
|
146
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
147
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
catch {
|
|
151
|
+
cachedResult = emptyResult;
|
|
152
|
+
cachedProjectRoot = projectRoot;
|
|
153
|
+
return emptyResult;
|
|
154
|
+
}
|
|
155
|
+
const commits = parseGitLog(raw);
|
|
156
|
+
// Track per-file change counts and per-pair co-change counts
|
|
157
|
+
const fileChangeCount = new Map();
|
|
158
|
+
const pairCoChangeCount = new Map();
|
|
159
|
+
let totalCommitsAnalyzed = 0;
|
|
160
|
+
for (const commitFiles of commits) {
|
|
161
|
+
// Filter to source files and normalize paths
|
|
162
|
+
const filtered = commitFiles
|
|
163
|
+
.map(normalizePath)
|
|
164
|
+
.filter(isSourceFile);
|
|
165
|
+
// Skip mega-commits (merge commits, large refactors)
|
|
166
|
+
if (filtered.length > maxFilesPerCommit || filtered.length < 2) {
|
|
167
|
+
if (filtered.length === 1) {
|
|
168
|
+
// Still count single-file commits for per-file totals
|
|
169
|
+
const file = filtered[0];
|
|
170
|
+
fileChangeCount.set(file, (fileChangeCount.get(file) || 0) + 1);
|
|
171
|
+
}
|
|
172
|
+
totalCommitsAnalyzed++;
|
|
173
|
+
continue;
|
|
174
|
+
}
|
|
175
|
+
totalCommitsAnalyzed++;
|
|
176
|
+
// Count per-file changes
|
|
177
|
+
for (const file of filtered) {
|
|
178
|
+
fileChangeCount.set(file, (fileChangeCount.get(file) || 0) + 1);
|
|
179
|
+
}
|
|
180
|
+
// Count pairwise co-changes
|
|
181
|
+
for (let i = 0; i < filtered.length; i++) {
|
|
182
|
+
for (let j = i + 1; j < filtered.length; j++) {
|
|
183
|
+
const key = pairKey(filtered[i], filtered[j]);
|
|
184
|
+
pairCoChangeCount.set(key, (pairCoChangeCount.get(key) || 0) + 1);
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
// Build coupling results, filtering by thresholds
|
|
189
|
+
const couplings = [];
|
|
190
|
+
for (const [key, coCount] of pairCoChangeCount) {
|
|
191
|
+
if (coCount < minSupport)
|
|
192
|
+
continue;
|
|
193
|
+
const [file1, file2] = key.split("\0");
|
|
194
|
+
const file1Changes = fileChangeCount.get(file1) || 0;
|
|
195
|
+
const file2Changes = fileChangeCount.get(file2) || 0;
|
|
196
|
+
const maxChanges = Math.max(file1Changes, file2Changes);
|
|
197
|
+
const confidence = maxChanges > 0 ? coCount / maxChanges : 0;
|
|
198
|
+
if (confidence < minConfidence)
|
|
199
|
+
continue;
|
|
200
|
+
couplings.push({
|
|
201
|
+
file1,
|
|
202
|
+
file2,
|
|
203
|
+
coChangeCount: coCount,
|
|
204
|
+
file1Changes,
|
|
205
|
+
file2Changes,
|
|
206
|
+
confidence,
|
|
207
|
+
support: coCount,
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
// Sort by confidence descending
|
|
211
|
+
couplings.sort((a, b) => b.confidence - a.confidence);
|
|
212
|
+
// Build the per-file lookup map
|
|
213
|
+
const fileCouplings = new Map();
|
|
214
|
+
for (const coupling of couplings) {
|
|
215
|
+
// Add to file1's list
|
|
216
|
+
if (!fileCouplings.has(coupling.file1)) {
|
|
217
|
+
fileCouplings.set(coupling.file1, []);
|
|
218
|
+
}
|
|
219
|
+
fileCouplings.get(coupling.file1).push(coupling);
|
|
220
|
+
// Add to file2's list
|
|
221
|
+
if (!fileCouplings.has(coupling.file2)) {
|
|
222
|
+
fileCouplings.set(coupling.file2, []);
|
|
223
|
+
}
|
|
224
|
+
fileCouplings.get(coupling.file2).push(coupling);
|
|
225
|
+
}
|
|
226
|
+
// Sort each file's couplings by confidence descending
|
|
227
|
+
for (const [, list] of fileCouplings) {
|
|
228
|
+
list.sort((a, b) => b.confidence - a.confidence);
|
|
229
|
+
}
|
|
230
|
+
const result = {
|
|
231
|
+
couplings,
|
|
232
|
+
fileCouplings,
|
|
233
|
+
totalCommitsAnalyzed,
|
|
234
|
+
analyzedAt: Date.now(),
|
|
235
|
+
};
|
|
236
|
+
cachedResult = result;
|
|
237
|
+
cachedProjectRoot = projectRoot;
|
|
238
|
+
return result;
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Get all significant couplings for a given file path.
|
|
242
|
+
* Returns an empty array if no couplings are found.
|
|
243
|
+
*
|
|
244
|
+
* The filePath should be a relative path matching git log output format
|
|
245
|
+
* (forward slashes, relative to project root).
|
|
246
|
+
*/
|
|
247
|
+
function getCoupledFiles(filePath, result) {
|
|
248
|
+
const normalized = normalizePath(filePath);
|
|
249
|
+
return result.fileCouplings.get(normalized) || [];
|
|
250
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Incremental Graph Updates for SYKE.
|
|
3
|
+
*
|
|
4
|
+
* Instead of rebuilding the entire dependency graph when a single file changes,
|
|
5
|
+
* this module re-parses only the changed file's imports and updates the
|
|
6
|
+
* forward/reverse maps in place. SCC and PageRank are recomputed fully
|
|
7
|
+
* (both are O(V+E) and fast enough) only when edges actually change.
|
|
8
|
+
*
|
|
9
|
+
* This brings update latency from O(N * parse) down to O(1 * parse + V+E)
|
|
10
|
+
* for large codebases (10K+ files).
|
|
11
|
+
*/
|
|
12
|
+
import { DependencyGraph } from "../graph";
|
|
13
|
+
export interface IncrementalUpdateResult {
|
|
14
|
+
updatedFile: string;
|
|
15
|
+
addedEdges: [string, string][];
|
|
16
|
+
removedEdges: [string, string][];
|
|
17
|
+
edgesChanged: boolean;
|
|
18
|
+
affectedFiles: string[];
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Update the graph for a single changed file.
|
|
22
|
+
* Re-parses only that file's imports and updates forward/reverse maps.
|
|
23
|
+
* Returns info about what changed for cache invalidation.
|
|
24
|
+
*/
|
|
25
|
+
export declare function updateGraphForFile(graph: DependencyGraph, filePath: string, projectRoot: string): IncrementalUpdateResult;
|
|
26
|
+
/**
|
|
27
|
+
* Add a new file to the graph.
|
|
28
|
+
* Initializes forward/reverse entries, parses imports, and adds edges.
|
|
29
|
+
*/
|
|
30
|
+
export declare function addFileToGraph(graph: DependencyGraph, filePath: string, projectRoot: string): IncrementalUpdateResult;
|
|
31
|
+
/**
|
|
32
|
+
* Remove a file from the graph.
|
|
33
|
+
* Cleans up all forward edges, reverse edges, and the files set.
|
|
34
|
+
*/
|
|
35
|
+
export declare function removeFileFromGraph(graph: DependencyGraph, filePath: string): IncrementalUpdateResult;
|
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Incremental Graph Updates for SYKE.
|
|
4
|
+
*
|
|
5
|
+
* Instead of rebuilding the entire dependency graph when a single file changes,
|
|
6
|
+
* this module re-parses only the changed file's imports and updates the
|
|
7
|
+
* forward/reverse maps in place. SCC and PageRank are recomputed fully
|
|
8
|
+
* (both are O(V+E) and fast enough) only when edges actually change.
|
|
9
|
+
*
|
|
10
|
+
* This brings update latency from O(N * parse) down to O(1 * parse + V+E)
|
|
11
|
+
* for large codebases (10K+ files).
|
|
12
|
+
*/
|
|
13
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
14
|
+
if (k2 === undefined) k2 = k;
|
|
15
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
16
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
17
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
18
|
+
}
|
|
19
|
+
Object.defineProperty(o, k2, desc);
|
|
20
|
+
}) : (function(o, m, k, k2) {
|
|
21
|
+
if (k2 === undefined) k2 = k;
|
|
22
|
+
o[k2] = m[k];
|
|
23
|
+
}));
|
|
24
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
25
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
26
|
+
}) : function(o, v) {
|
|
27
|
+
o["default"] = v;
|
|
28
|
+
});
|
|
29
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
30
|
+
var ownKeys = function(o) {
|
|
31
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
32
|
+
var ar = [];
|
|
33
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
34
|
+
return ar;
|
|
35
|
+
};
|
|
36
|
+
return ownKeys(o);
|
|
37
|
+
};
|
|
38
|
+
return function (mod) {
|
|
39
|
+
if (mod && mod.__esModule) return mod;
|
|
40
|
+
var result = {};
|
|
41
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
42
|
+
__setModuleDefault(result, mod);
|
|
43
|
+
return result;
|
|
44
|
+
};
|
|
45
|
+
})();
|
|
46
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
47
|
+
exports.updateGraphForFile = updateGraphForFile;
|
|
48
|
+
exports.addFileToGraph = addFileToGraph;
|
|
49
|
+
exports.removeFileFromGraph = removeFileFromGraph;
|
|
50
|
+
const path = __importStar(require("path"));
|
|
51
|
+
const plugin_1 = require("../languages/plugin");
|
|
52
|
+
const scc_1 = require("./scc");
|
|
53
|
+
const pagerank_1 = require("../scoring/pagerank");
|
|
54
|
+
const risk_scorer_1 = require("../scoring/risk-scorer");
|
|
55
|
+
// ── Core Functions ──
|
|
56
|
+
/**
|
|
57
|
+
* Update the graph for a single changed file.
|
|
58
|
+
* Re-parses only that file's imports and updates forward/reverse maps.
|
|
59
|
+
* Returns info about what changed for cache invalidation.
|
|
60
|
+
*/
|
|
61
|
+
function updateGraphForFile(graph, filePath, projectRoot) {
|
|
62
|
+
const normalized = path.normalize(filePath);
|
|
63
|
+
// If file is not in the graph, treat as a new file addition
|
|
64
|
+
if (!graph.files.has(normalized)) {
|
|
65
|
+
return addFileToGraph(graph, filePath, projectRoot);
|
|
66
|
+
}
|
|
67
|
+
// 1. Get old forward edges for this file
|
|
68
|
+
const oldDeps = graph.forward.get(normalized) || [];
|
|
69
|
+
const oldDepsSet = new Set(oldDeps);
|
|
70
|
+
// 2. Determine which language plugin handles this file
|
|
71
|
+
const plugin = (0, plugin_1.getPluginForFile)(normalized);
|
|
72
|
+
if (!plugin) {
|
|
73
|
+
// No plugin can handle this file extension - nothing to update
|
|
74
|
+
return {
|
|
75
|
+
updatedFile: normalized,
|
|
76
|
+
addedEdges: [],
|
|
77
|
+
removedEdges: [],
|
|
78
|
+
edgesChanged: false,
|
|
79
|
+
affectedFiles: [],
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
// 3. Find the appropriate source directory for this file
|
|
83
|
+
const sourceDir = findSourceDirForFile(normalized, graph);
|
|
84
|
+
// 4. Re-parse imports for this file
|
|
85
|
+
const rawImports = plugin.parseImports(normalized, projectRoot, sourceDir);
|
|
86
|
+
// 5. Filter to only include files that exist in the graph (internal deps)
|
|
87
|
+
const newDeps = rawImports.filter(imp => graph.files.has(imp));
|
|
88
|
+
const newDepsSet = new Set(newDeps);
|
|
89
|
+
// 6. Compute diff
|
|
90
|
+
const addedEdges = [];
|
|
91
|
+
const removedEdges = [];
|
|
92
|
+
for (const dep of newDeps) {
|
|
93
|
+
if (!oldDepsSet.has(dep)) {
|
|
94
|
+
addedEdges.push([normalized, dep]);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
for (const dep of oldDeps) {
|
|
98
|
+
if (!newDepsSet.has(dep)) {
|
|
99
|
+
removedEdges.push([normalized, dep]);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
const edgesChanged = addedEdges.length > 0 || removedEdges.length > 0;
|
|
103
|
+
// 7. Update forward map
|
|
104
|
+
graph.forward.set(normalized, newDeps);
|
|
105
|
+
// 8. Update reverse map for removed edges
|
|
106
|
+
for (const [, dep] of removedEdges) {
|
|
107
|
+
const revList = graph.reverse.get(dep);
|
|
108
|
+
if (revList) {
|
|
109
|
+
const idx = revList.indexOf(normalized);
|
|
110
|
+
if (idx !== -1) {
|
|
111
|
+
revList.splice(idx, 1);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
// 9. Update reverse map for added edges
|
|
116
|
+
for (const [, dep] of addedEdges) {
|
|
117
|
+
const revList = graph.reverse.get(dep);
|
|
118
|
+
if (revList) {
|
|
119
|
+
if (!revList.includes(normalized)) {
|
|
120
|
+
revList.push(normalized);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
graph.reverse.set(dep, [normalized]);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
// 10. Compute affected files (reverse transitive closure of the changed file)
|
|
128
|
+
const affectedFiles = computeAffectedFiles(normalized, graph);
|
|
129
|
+
// 11. If edges changed, recompute SCC and PageRank
|
|
130
|
+
if (edgesChanged) {
|
|
131
|
+
recomputeGraphMetrics(graph);
|
|
132
|
+
}
|
|
133
|
+
return {
|
|
134
|
+
updatedFile: normalized,
|
|
135
|
+
addedEdges,
|
|
136
|
+
removedEdges,
|
|
137
|
+
edgesChanged,
|
|
138
|
+
affectedFiles,
|
|
139
|
+
};
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Add a new file to the graph.
|
|
143
|
+
* Initializes forward/reverse entries, parses imports, and adds edges.
|
|
144
|
+
*/
|
|
145
|
+
function addFileToGraph(graph, filePath, projectRoot) {
|
|
146
|
+
const normalized = path.normalize(filePath);
|
|
147
|
+
// Already in graph? Treat as an update instead
|
|
148
|
+
if (graph.files.has(normalized)) {
|
|
149
|
+
return updateGraphForFile(graph, filePath, projectRoot);
|
|
150
|
+
}
|
|
151
|
+
// 1. Add to the files set
|
|
152
|
+
graph.files.add(normalized);
|
|
153
|
+
// 2. Initialize forward entry
|
|
154
|
+
graph.forward.set(normalized, []);
|
|
155
|
+
// 3. Initialize reverse entry if not exists
|
|
156
|
+
if (!graph.reverse.has(normalized)) {
|
|
157
|
+
graph.reverse.set(normalized, []);
|
|
158
|
+
}
|
|
159
|
+
// 4. Determine which plugin handles this file
|
|
160
|
+
const plugin = (0, plugin_1.getPluginForFile)(normalized);
|
|
161
|
+
if (!plugin) {
|
|
162
|
+
return {
|
|
163
|
+
updatedFile: normalized,
|
|
164
|
+
addedEdges: [],
|
|
165
|
+
removedEdges: [],
|
|
166
|
+
edgesChanged: false,
|
|
167
|
+
affectedFiles: [],
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
// 5. Find source directory
|
|
171
|
+
const sourceDir = findSourceDirForFile(normalized, graph);
|
|
172
|
+
// 6. Parse imports
|
|
173
|
+
const rawImports = plugin.parseImports(normalized, projectRoot, sourceDir);
|
|
174
|
+
const newDeps = rawImports.filter(imp => graph.files.has(imp));
|
|
175
|
+
const addedEdges = [];
|
|
176
|
+
// 7. Set forward edges
|
|
177
|
+
graph.forward.set(normalized, newDeps);
|
|
178
|
+
// 8. Update reverse maps for new edges
|
|
179
|
+
for (const dep of newDeps) {
|
|
180
|
+
addedEdges.push([normalized, dep]);
|
|
181
|
+
const revList = graph.reverse.get(dep);
|
|
182
|
+
if (revList) {
|
|
183
|
+
if (!revList.includes(normalized)) {
|
|
184
|
+
revList.push(normalized);
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
graph.reverse.set(dep, [normalized]);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
// 9. Check if any existing file imports this new file
|
|
192
|
+
// (their forward edges might now resolve to this file)
|
|
193
|
+
// This is hard to detect without re-parsing all files,
|
|
194
|
+
// so we skip it — the next full refresh will catch it.
|
|
195
|
+
// The conservative approach is to just note that edges changed.
|
|
196
|
+
const edgesChanged = addedEdges.length > 0;
|
|
197
|
+
const affectedFiles = computeAffectedFiles(normalized, graph);
|
|
198
|
+
if (edgesChanged) {
|
|
199
|
+
recomputeGraphMetrics(graph);
|
|
200
|
+
}
|
|
201
|
+
return {
|
|
202
|
+
updatedFile: normalized,
|
|
203
|
+
addedEdges,
|
|
204
|
+
removedEdges: [],
|
|
205
|
+
edgesChanged,
|
|
206
|
+
affectedFiles,
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
/**
|
|
210
|
+
* Remove a file from the graph.
|
|
211
|
+
* Cleans up all forward edges, reverse edges, and the files set.
|
|
212
|
+
*/
|
|
213
|
+
function removeFileFromGraph(graph, filePath) {
|
|
214
|
+
const normalized = path.normalize(filePath);
|
|
215
|
+
if (!graph.files.has(normalized)) {
|
|
216
|
+
// File wasn't in graph, nothing to do
|
|
217
|
+
return {
|
|
218
|
+
updatedFile: normalized,
|
|
219
|
+
addedEdges: [],
|
|
220
|
+
removedEdges: [],
|
|
221
|
+
edgesChanged: false,
|
|
222
|
+
affectedFiles: [],
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
// Compute affected files BEFORE removing (need the reverse graph intact)
|
|
226
|
+
const affectedFiles = computeAffectedFiles(normalized, graph);
|
|
227
|
+
const removedEdges = [];
|
|
228
|
+
// 1. Remove all forward edges (this file imports X)
|
|
229
|
+
const forwardDeps = graph.forward.get(normalized) || [];
|
|
230
|
+
for (const dep of forwardDeps) {
|
|
231
|
+
removedEdges.push([normalized, dep]);
|
|
232
|
+
// Remove this file from dep's reverse list
|
|
233
|
+
const revList = graph.reverse.get(dep);
|
|
234
|
+
if (revList) {
|
|
235
|
+
const idx = revList.indexOf(normalized);
|
|
236
|
+
if (idx !== -1) {
|
|
237
|
+
revList.splice(idx, 1);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
// 2. Remove all reverse edges (X imports this file)
|
|
242
|
+
const reverseDeps = graph.reverse.get(normalized) || [];
|
|
243
|
+
for (const src of reverseDeps) {
|
|
244
|
+
removedEdges.push([src, normalized]);
|
|
245
|
+
// Remove this file from src's forward list
|
|
246
|
+
const fwdList = graph.forward.get(src);
|
|
247
|
+
if (fwdList) {
|
|
248
|
+
const idx = fwdList.indexOf(normalized);
|
|
249
|
+
if (idx !== -1) {
|
|
250
|
+
fwdList.splice(idx, 1);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
// 3. Clean up maps
|
|
255
|
+
graph.forward.delete(normalized);
|
|
256
|
+
graph.reverse.delete(normalized);
|
|
257
|
+
graph.files.delete(normalized);
|
|
258
|
+
const edgesChanged = removedEdges.length > 0;
|
|
259
|
+
if (edgesChanged) {
|
|
260
|
+
recomputeGraphMetrics(graph);
|
|
261
|
+
}
|
|
262
|
+
return {
|
|
263
|
+
updatedFile: normalized,
|
|
264
|
+
addedEdges: [],
|
|
265
|
+
removedEdges,
|
|
266
|
+
edgesChanged,
|
|
267
|
+
affectedFiles,
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
// ── Internal Helpers ──
|
|
271
|
+
/**
|
|
272
|
+
* Find the source directory that contains the given file.
|
|
273
|
+
* Falls back to graph.sourceDir if no match found.
|
|
274
|
+
*/
|
|
275
|
+
function findSourceDirForFile(filePath, graph) {
|
|
276
|
+
for (const dir of graph.sourceDirs) {
|
|
277
|
+
if (filePath.startsWith(dir)) {
|
|
278
|
+
return dir;
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
return graph.sourceDir;
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Compute the set of files whose cached BFS/impact results might be stale.
|
|
285
|
+
* This is the reverse transitive closure: all files that transitively depend
|
|
286
|
+
* on the changed file (including the changed file itself).
|
|
287
|
+
*/
|
|
288
|
+
function computeAffectedFiles(filePath, graph) {
|
|
289
|
+
const affected = new Set();
|
|
290
|
+
affected.add(filePath);
|
|
291
|
+
const queue = [filePath];
|
|
292
|
+
while (queue.length > 0) {
|
|
293
|
+
const current = queue.shift();
|
|
294
|
+
const dependents = graph.reverse.get(current) || [];
|
|
295
|
+
for (const dep of dependents) {
|
|
296
|
+
if (!affected.has(dep)) {
|
|
297
|
+
affected.add(dep);
|
|
298
|
+
queue.push(dep);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
return [...affected];
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Recompute SCC and PageRank after edge changes.
|
|
306
|
+
* Both are O(V+E) and fast (<100ms for 10K files).
|
|
307
|
+
*/
|
|
308
|
+
function recomputeGraphMetrics(graph) {
|
|
309
|
+
// Recompute SCC
|
|
310
|
+
graph.scc = (0, scc_1.computeSCC)(graph);
|
|
311
|
+
// Recompute PageRank
|
|
312
|
+
(0, pagerank_1.invalidatePageRank)();
|
|
313
|
+
graph.pageRank = (0, pagerank_1.computePageRank)(graph);
|
|
314
|
+
// Invalidate project metrics (will be recomputed lazily)
|
|
315
|
+
(0, risk_scorer_1.invalidateProjectMetrics)();
|
|
316
|
+
const cyclicCount = graph.scc.condensed.nodes.filter(n => n.isCyclic).length;
|
|
317
|
+
console.error(`[syke:incremental] Graph metrics recomputed: ${graph.files.size} files, ` +
|
|
318
|
+
`${graph.scc.components.length} SCCs (${cyclicCount} cyclic)`);
|
|
319
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memoized BFS Result Cache for SYKE.
|
|
3
|
+
*
|
|
4
|
+
* Caches impact analysis results (BFS reverse traversals) so that
|
|
5
|
+
* repeated queries for the same file return instantly.
|
|
6
|
+
*
|
|
7
|
+
* Smart invalidation: when a file changes, only cache entries that
|
|
8
|
+
* could be affected are evicted. A reverse index maps each file to
|
|
9
|
+
* the set of cache keys whose impactSet contains it, making
|
|
10
|
+
* invalidation O(affected) instead of O(cache_size).
|
|
11
|
+
*
|
|
12
|
+
* Uses LRU eviction when the cache exceeds maxSize.
|
|
13
|
+
*/
|
|
14
|
+
export interface MemoEntry {
|
|
15
|
+
impactSet: string[];
|
|
16
|
+
directCount: number;
|
|
17
|
+
transitiveCount: number;
|
|
18
|
+
riskLevel: string;
|
|
19
|
+
cascadeLevels?: Map<string, number>;
|
|
20
|
+
computedAt: number;
|
|
21
|
+
}
|
|
22
|
+
export interface MemoCacheStats {
|
|
23
|
+
size: number;
|
|
24
|
+
hits: number;
|
|
25
|
+
misses: number;
|
|
26
|
+
}
|
|
27
|
+
export interface MemoCache {
|
|
28
|
+
get(filePath: string): MemoEntry | undefined;
|
|
29
|
+
set(filePath: string, entry: MemoEntry): void;
|
|
30
|
+
invalidate(affectedFiles: string[]): number;
|
|
31
|
+
invalidateAll(): void;
|
|
32
|
+
stats(): MemoCacheStats;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Create a new MemoCache with LRU eviction and reverse-index invalidation.
|
|
36
|
+
*
|
|
37
|
+
* @param maxSize Maximum number of cached entries (default 500).
|
|
38
|
+
*/
|
|
39
|
+
export declare function createMemoCache(maxSize?: number): MemoCache;
|
|
40
|
+
/**
|
|
41
|
+
* Get the global memo cache instance (lazy initialization).
|
|
42
|
+
*/
|
|
43
|
+
export declare function getMemoCache(): MemoCache;
|
|
44
|
+
/**
|
|
45
|
+
* Reset the global memo cache (e.g., on full graph rebuild).
|
|
46
|
+
*/
|
|
47
|
+
export declare function resetMemoCache(): void;
|