@syke1/mcp-server 1.4.17 → 1.4.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,319 @@
1
+ "use strict";
2
+ /**
3
+ * Incremental Graph Updates for SYKE.
4
+ *
5
+ * Instead of rebuilding the entire dependency graph when a single file changes,
6
+ * this module re-parses only the changed file's imports and updates the
7
+ * forward/reverse maps in place. SCC and PageRank are recomputed fully
8
+ * (both are O(V+E) and fast enough) only when edges actually change.
9
+ *
10
+ * This brings update latency from O(N * parse) down to O(1 * parse + V+E)
11
+ * for large codebases (10K+ files).
12
+ */
13
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
14
+ if (k2 === undefined) k2 = k;
15
+ var desc = Object.getOwnPropertyDescriptor(m, k);
16
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
17
+ desc = { enumerable: true, get: function() { return m[k]; } };
18
+ }
19
+ Object.defineProperty(o, k2, desc);
20
+ }) : (function(o, m, k, k2) {
21
+ if (k2 === undefined) k2 = k;
22
+ o[k2] = m[k];
23
+ }));
24
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
25
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
26
+ }) : function(o, v) {
27
+ o["default"] = v;
28
+ });
29
+ var __importStar = (this && this.__importStar) || (function () {
30
+ var ownKeys = function(o) {
31
+ ownKeys = Object.getOwnPropertyNames || function (o) {
32
+ var ar = [];
33
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
34
+ return ar;
35
+ };
36
+ return ownKeys(o);
37
+ };
38
+ return function (mod) {
39
+ if (mod && mod.__esModule) return mod;
40
+ var result = {};
41
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
42
+ __setModuleDefault(result, mod);
43
+ return result;
44
+ };
45
+ })();
46
+ Object.defineProperty(exports, "__esModule", { value: true });
47
+ exports.updateGraphForFile = updateGraphForFile;
48
+ exports.addFileToGraph = addFileToGraph;
49
+ exports.removeFileFromGraph = removeFileFromGraph;
50
+ const path = __importStar(require("path"));
51
+ const plugin_1 = require("../languages/plugin");
52
+ const scc_1 = require("./scc");
53
+ const pagerank_1 = require("../scoring/pagerank");
54
+ const risk_scorer_1 = require("../scoring/risk-scorer");
55
+ // ── Core Functions ──
56
+ /**
57
+ * Update the graph for a single changed file.
58
+ * Re-parses only that file's imports and updates forward/reverse maps.
59
+ * Returns info about what changed for cache invalidation.
60
+ */
61
+ function updateGraphForFile(graph, filePath, projectRoot) {
62
+ const normalized = path.normalize(filePath);
63
+ // If file is not in the graph, treat as a new file addition
64
+ if (!graph.files.has(normalized)) {
65
+ return addFileToGraph(graph, filePath, projectRoot);
66
+ }
67
+ // 1. Get old forward edges for this file
68
+ const oldDeps = graph.forward.get(normalized) || [];
69
+ const oldDepsSet = new Set(oldDeps);
70
+ // 2. Determine which language plugin handles this file
71
+ const plugin = (0, plugin_1.getPluginForFile)(normalized);
72
+ if (!plugin) {
73
+ // No plugin can handle this file extension - nothing to update
74
+ return {
75
+ updatedFile: normalized,
76
+ addedEdges: [],
77
+ removedEdges: [],
78
+ edgesChanged: false,
79
+ affectedFiles: [],
80
+ };
81
+ }
82
+ // 3. Find the appropriate source directory for this file
83
+ const sourceDir = findSourceDirForFile(normalized, graph);
84
+ // 4. Re-parse imports for this file
85
+ const rawImports = plugin.parseImports(normalized, projectRoot, sourceDir);
86
+ // 5. Filter to only include files that exist in the graph (internal deps)
87
+ const newDeps = rawImports.filter(imp => graph.files.has(imp));
88
+ const newDepsSet = new Set(newDeps);
89
+ // 6. Compute diff
90
+ const addedEdges = [];
91
+ const removedEdges = [];
92
+ for (const dep of newDeps) {
93
+ if (!oldDepsSet.has(dep)) {
94
+ addedEdges.push([normalized, dep]);
95
+ }
96
+ }
97
+ for (const dep of oldDeps) {
98
+ if (!newDepsSet.has(dep)) {
99
+ removedEdges.push([normalized, dep]);
100
+ }
101
+ }
102
+ const edgesChanged = addedEdges.length > 0 || removedEdges.length > 0;
103
+ // 7. Update forward map
104
+ graph.forward.set(normalized, newDeps);
105
+ // 8. Update reverse map for removed edges
106
+ for (const [, dep] of removedEdges) {
107
+ const revList = graph.reverse.get(dep);
108
+ if (revList) {
109
+ const idx = revList.indexOf(normalized);
110
+ if (idx !== -1) {
111
+ revList.splice(idx, 1);
112
+ }
113
+ }
114
+ }
115
+ // 9. Update reverse map for added edges
116
+ for (const [, dep] of addedEdges) {
117
+ const revList = graph.reverse.get(dep);
118
+ if (revList) {
119
+ if (!revList.includes(normalized)) {
120
+ revList.push(normalized);
121
+ }
122
+ }
123
+ else {
124
+ graph.reverse.set(dep, [normalized]);
125
+ }
126
+ }
127
+ // 10. Compute affected files (reverse transitive closure of the changed file)
128
+ const affectedFiles = computeAffectedFiles(normalized, graph);
129
+ // 11. If edges changed, recompute SCC and PageRank
130
+ if (edgesChanged) {
131
+ recomputeGraphMetrics(graph);
132
+ }
133
+ return {
134
+ updatedFile: normalized,
135
+ addedEdges,
136
+ removedEdges,
137
+ edgesChanged,
138
+ affectedFiles,
139
+ };
140
+ }
141
+ /**
142
+ * Add a new file to the graph.
143
+ * Initializes forward/reverse entries, parses imports, and adds edges.
144
+ */
145
+ function addFileToGraph(graph, filePath, projectRoot) {
146
+ const normalized = path.normalize(filePath);
147
+ // Already in graph? Treat as an update instead
148
+ if (graph.files.has(normalized)) {
149
+ return updateGraphForFile(graph, filePath, projectRoot);
150
+ }
151
+ // 1. Add to the files set
152
+ graph.files.add(normalized);
153
+ // 2. Initialize forward entry
154
+ graph.forward.set(normalized, []);
155
+ // 3. Initialize reverse entry if not exists
156
+ if (!graph.reverse.has(normalized)) {
157
+ graph.reverse.set(normalized, []);
158
+ }
159
+ // 4. Determine which plugin handles this file
160
+ const plugin = (0, plugin_1.getPluginForFile)(normalized);
161
+ if (!plugin) {
162
+ return {
163
+ updatedFile: normalized,
164
+ addedEdges: [],
165
+ removedEdges: [],
166
+ edgesChanged: false,
167
+ affectedFiles: [],
168
+ };
169
+ }
170
+ // 5. Find source directory
171
+ const sourceDir = findSourceDirForFile(normalized, graph);
172
+ // 6. Parse imports
173
+ const rawImports = plugin.parseImports(normalized, projectRoot, sourceDir);
174
+ const newDeps = rawImports.filter(imp => graph.files.has(imp));
175
+ const addedEdges = [];
176
+ // 7. Set forward edges
177
+ graph.forward.set(normalized, newDeps);
178
+ // 8. Update reverse maps for new edges
179
+ for (const dep of newDeps) {
180
+ addedEdges.push([normalized, dep]);
181
+ const revList = graph.reverse.get(dep);
182
+ if (revList) {
183
+ if (!revList.includes(normalized)) {
184
+ revList.push(normalized);
185
+ }
186
+ }
187
+ else {
188
+ graph.reverse.set(dep, [normalized]);
189
+ }
190
+ }
191
+ // 9. Check if any existing file imports this new file
192
+ // (their forward edges might now resolve to this file)
193
+ // This is hard to detect without re-parsing all files,
194
+ // so we skip it — the next full refresh will catch it.
195
+ // The conservative approach is to just note that edges changed.
196
+ const edgesChanged = addedEdges.length > 0;
197
+ const affectedFiles = computeAffectedFiles(normalized, graph);
198
+ if (edgesChanged) {
199
+ recomputeGraphMetrics(graph);
200
+ }
201
+ return {
202
+ updatedFile: normalized,
203
+ addedEdges,
204
+ removedEdges: [],
205
+ edgesChanged,
206
+ affectedFiles,
207
+ };
208
+ }
209
+ /**
210
+ * Remove a file from the graph.
211
+ * Cleans up all forward edges, reverse edges, and the files set.
212
+ */
213
+ function removeFileFromGraph(graph, filePath) {
214
+ const normalized = path.normalize(filePath);
215
+ if (!graph.files.has(normalized)) {
216
+ // File wasn't in graph, nothing to do
217
+ return {
218
+ updatedFile: normalized,
219
+ addedEdges: [],
220
+ removedEdges: [],
221
+ edgesChanged: false,
222
+ affectedFiles: [],
223
+ };
224
+ }
225
+ // Compute affected files BEFORE removing (need the reverse graph intact)
226
+ const affectedFiles = computeAffectedFiles(normalized, graph);
227
+ const removedEdges = [];
228
+ // 1. Remove all forward edges (this file imports X)
229
+ const forwardDeps = graph.forward.get(normalized) || [];
230
+ for (const dep of forwardDeps) {
231
+ removedEdges.push([normalized, dep]);
232
+ // Remove this file from dep's reverse list
233
+ const revList = graph.reverse.get(dep);
234
+ if (revList) {
235
+ const idx = revList.indexOf(normalized);
236
+ if (idx !== -1) {
237
+ revList.splice(idx, 1);
238
+ }
239
+ }
240
+ }
241
+ // 2. Remove all reverse edges (X imports this file)
242
+ const reverseDeps = graph.reverse.get(normalized) || [];
243
+ for (const src of reverseDeps) {
244
+ removedEdges.push([src, normalized]);
245
+ // Remove this file from src's forward list
246
+ const fwdList = graph.forward.get(src);
247
+ if (fwdList) {
248
+ const idx = fwdList.indexOf(normalized);
249
+ if (idx !== -1) {
250
+ fwdList.splice(idx, 1);
251
+ }
252
+ }
253
+ }
254
+ // 3. Clean up maps
255
+ graph.forward.delete(normalized);
256
+ graph.reverse.delete(normalized);
257
+ graph.files.delete(normalized);
258
+ const edgesChanged = removedEdges.length > 0;
259
+ if (edgesChanged) {
260
+ recomputeGraphMetrics(graph);
261
+ }
262
+ return {
263
+ updatedFile: normalized,
264
+ addedEdges: [],
265
+ removedEdges,
266
+ edgesChanged,
267
+ affectedFiles,
268
+ };
269
+ }
270
+ // ── Internal Helpers ──
271
+ /**
272
+ * Find the source directory that contains the given file.
273
+ * Falls back to graph.sourceDir if no match found.
274
+ */
275
+ function findSourceDirForFile(filePath, graph) {
276
+ for (const dir of graph.sourceDirs) {
277
+ if (filePath.startsWith(dir)) {
278
+ return dir;
279
+ }
280
+ }
281
+ return graph.sourceDir;
282
+ }
283
+ /**
284
+ * Compute the set of files whose cached BFS/impact results might be stale.
285
+ * This is the reverse transitive closure: all files that transitively depend
286
+ * on the changed file (including the changed file itself).
287
+ */
288
+ function computeAffectedFiles(filePath, graph) {
289
+ const affected = new Set();
290
+ affected.add(filePath);
291
+ const queue = [filePath];
292
+ while (queue.length > 0) {
293
+ const current = queue.shift();
294
+ const dependents = graph.reverse.get(current) || [];
295
+ for (const dep of dependents) {
296
+ if (!affected.has(dep)) {
297
+ affected.add(dep);
298
+ queue.push(dep);
299
+ }
300
+ }
301
+ }
302
+ return [...affected];
303
+ }
304
+ /**
305
+ * Recompute SCC and PageRank after edge changes.
306
+ * Both are O(V+E) and fast (<100ms for 10K files).
307
+ */
308
+ function recomputeGraphMetrics(graph) {
309
+ // Recompute SCC
310
+ graph.scc = (0, scc_1.computeSCC)(graph);
311
+ // Recompute PageRank
312
+ (0, pagerank_1.invalidatePageRank)();
313
+ graph.pageRank = (0, pagerank_1.computePageRank)(graph);
314
+ // Invalidate project metrics (will be recomputed lazily)
315
+ (0, risk_scorer_1.invalidateProjectMetrics)();
316
+ const cyclicCount = graph.scc.condensed.nodes.filter(n => n.isCyclic).length;
317
+ console.error(`[syke:incremental] Graph metrics recomputed: ${graph.files.size} files, ` +
318
+ `${graph.scc.components.length} SCCs (${cyclicCount} cyclic)`);
319
+ }
@@ -0,0 +1,47 @@
1
+ /**
2
+ * Memoized BFS Result Cache for SYKE.
3
+ *
4
+ * Caches impact analysis results (BFS reverse traversals) so that
5
+ * repeated queries for the same file return instantly.
6
+ *
7
+ * Smart invalidation: when a file changes, only cache entries that
8
+ * could be affected are evicted. A reverse index maps each file to
9
+ * the set of cache keys whose impactSet contains it, making
10
+ * invalidation O(affected) instead of O(cache_size).
11
+ *
12
+ * Uses LRU eviction when the cache exceeds maxSize.
13
+ */
14
+ export interface MemoEntry {
15
+ impactSet: string[];
16
+ directCount: number;
17
+ transitiveCount: number;
18
+ riskLevel: string;
19
+ cascadeLevels?: Map<string, number>;
20
+ computedAt: number;
21
+ }
22
+ export interface MemoCacheStats {
23
+ size: number;
24
+ hits: number;
25
+ misses: number;
26
+ }
27
+ export interface MemoCache {
28
+ get(filePath: string): MemoEntry | undefined;
29
+ set(filePath: string, entry: MemoEntry): void;
30
+ invalidate(affectedFiles: string[]): number;
31
+ invalidateAll(): void;
32
+ stats(): MemoCacheStats;
33
+ }
34
+ /**
35
+ * Create a new MemoCache with LRU eviction and reverse-index invalidation.
36
+ *
37
+ * @param maxSize Maximum number of cached entries (default 500).
38
+ */
39
+ export declare function createMemoCache(maxSize?: number): MemoCache;
40
+ /**
41
+ * Get the global memo cache instance (lazy initialization).
42
+ */
43
+ export declare function getMemoCache(): MemoCache;
44
+ /**
45
+ * Reset the global memo cache (e.g., on full graph rebuild).
46
+ */
47
+ export declare function resetMemoCache(): void;
@@ -0,0 +1,176 @@
1
+ "use strict";
2
+ /**
3
+ * Memoized BFS Result Cache for SYKE.
4
+ *
5
+ * Caches impact analysis results (BFS reverse traversals) so that
6
+ * repeated queries for the same file return instantly.
7
+ *
8
+ * Smart invalidation: when a file changes, only cache entries that
9
+ * could be affected are evicted. A reverse index maps each file to
10
+ * the set of cache keys whose impactSet contains it, making
11
+ * invalidation O(affected) instead of O(cache_size).
12
+ *
13
+ * Uses LRU eviction when the cache exceeds maxSize.
14
+ */
15
+ Object.defineProperty(exports, "__esModule", { value: true });
16
+ exports.createMemoCache = createMemoCache;
17
+ exports.getMemoCache = getMemoCache;
18
+ exports.resetMemoCache = resetMemoCache;
19
+ // ── Implementation ──
20
+ /**
21
+ * Create a new MemoCache with LRU eviction and reverse-index invalidation.
22
+ *
23
+ * @param maxSize Maximum number of cached entries (default 500).
24
+ */
25
+ function createMemoCache(maxSize = 500) {
26
+ // Main cache: filePath -> MemoEntry
27
+ const cache = new Map();
28
+ // LRU tracking: most recently accessed key moves to the end
29
+ const accessOrder = [];
30
+ // Reverse index: maps each file to the set of cache keys whose
31
+ // impactSet contains that file. Used for O(affected) invalidation.
32
+ const reverseIndex = new Map();
33
+ // Stats
34
+ let hits = 0;
35
+ let misses = 0;
36
+ /**
37
+ * Move a key to the end of the access order (most recently used).
38
+ */
39
+ function touchKey(key) {
40
+ const idx = accessOrder.indexOf(key);
41
+ if (idx !== -1) {
42
+ accessOrder.splice(idx, 1);
43
+ }
44
+ accessOrder.push(key);
45
+ }
46
+ /**
47
+ * Remove a single entry from the cache and clean up the reverse index.
48
+ */
49
+ function removeEntry(key) {
50
+ const entry = cache.get(key);
51
+ if (!entry)
52
+ return;
53
+ // Remove from reverse index
54
+ for (const file of entry.impactSet) {
55
+ const keys = reverseIndex.get(file);
56
+ if (keys) {
57
+ keys.delete(key);
58
+ if (keys.size === 0) {
59
+ reverseIndex.delete(file);
60
+ }
61
+ }
62
+ }
63
+ // Also remove the key itself from the reverse index
64
+ const selfKeys = reverseIndex.get(key);
65
+ if (selfKeys) {
66
+ selfKeys.delete(key);
67
+ if (selfKeys.size === 0) {
68
+ reverseIndex.delete(key);
69
+ }
70
+ }
71
+ cache.delete(key);
72
+ const orderIdx = accessOrder.indexOf(key);
73
+ if (orderIdx !== -1) {
74
+ accessOrder.splice(orderIdx, 1);
75
+ }
76
+ }
77
+ /**
78
+ * Evict the least recently used entry when cache exceeds maxSize.
79
+ */
80
+ function evictLRU() {
81
+ while (cache.size > maxSize && accessOrder.length > 0) {
82
+ const lruKey = accessOrder.shift();
83
+ removeEntry(lruKey);
84
+ }
85
+ }
86
+ /**
87
+ * Add a file -> cacheKey mapping to the reverse index.
88
+ */
89
+ function addToReverseIndex(file, cacheKey) {
90
+ let keys = reverseIndex.get(file);
91
+ if (!keys) {
92
+ keys = new Set();
93
+ reverseIndex.set(file, keys);
94
+ }
95
+ keys.add(cacheKey);
96
+ }
97
+ return {
98
+ get(filePath) {
99
+ const entry = cache.get(filePath);
100
+ if (entry) {
101
+ hits++;
102
+ touchKey(filePath);
103
+ return entry;
104
+ }
105
+ misses++;
106
+ return undefined;
107
+ },
108
+ set(filePath, entry) {
109
+ // If already cached, remove old reverse index entries first
110
+ if (cache.has(filePath)) {
111
+ removeEntry(filePath);
112
+ }
113
+ // Store the entry
114
+ cache.set(filePath, entry);
115
+ touchKey(filePath);
116
+ // Build reverse index: map each file in impactSet -> this cache key
117
+ for (const file of entry.impactSet) {
118
+ addToReverseIndex(file, filePath);
119
+ }
120
+ // Also index the key itself (if the queried file changes, its own
121
+ // cached result is stale)
122
+ addToReverseIndex(filePath, filePath);
123
+ // Evict LRU if over capacity
124
+ evictLRU();
125
+ },
126
+ invalidate(affectedFiles) {
127
+ const keysToInvalidate = new Set();
128
+ for (const file of affectedFiles) {
129
+ // Find all cache keys whose impactSet contains this file
130
+ const keys = reverseIndex.get(file);
131
+ if (keys) {
132
+ for (const key of keys) {
133
+ keysToInvalidate.add(key);
134
+ }
135
+ }
136
+ }
137
+ // Remove all identified entries
138
+ for (const key of keysToInvalidate) {
139
+ removeEntry(key);
140
+ }
141
+ return keysToInvalidate.size;
142
+ },
143
+ invalidateAll() {
144
+ cache.clear();
145
+ accessOrder.length = 0;
146
+ reverseIndex.clear();
147
+ // Do NOT reset hits/misses — they are cumulative diagnostics
148
+ },
149
+ stats() {
150
+ return {
151
+ size: cache.size,
152
+ hits,
153
+ misses,
154
+ };
155
+ },
156
+ };
157
+ }
158
+ // ── Singleton Instance ──
159
+ let globalMemoCache = null;
160
+ /**
161
+ * Get the global memo cache instance (lazy initialization).
162
+ */
163
+ function getMemoCache() {
164
+ if (!globalMemoCache) {
165
+ globalMemoCache = createMemoCache();
166
+ }
167
+ return globalMemoCache;
168
+ }
169
+ /**
170
+ * Reset the global memo cache (e.g., on full graph rebuild).
171
+ */
172
+ function resetMemoCache() {
173
+ if (globalMemoCache) {
174
+ globalMemoCache.invalidateAll();
175
+ }
176
+ }
@@ -0,0 +1,57 @@
1
+ /**
2
+ * Strongly Connected Components (SCC) via Tarjan's algorithm,
3
+ * graph condensation into a DAG, and topological sort via Kahn's algorithm.
4
+ *
5
+ * Used to detect circular dependencies and provide accurate cascade-level
6
+ * impact analysis on the condensed (acyclic) dependency graph.
7
+ */
8
+ import { DependencyGraph } from "../graph";
9
+ export interface SCCResult {
10
+ /** Each SCC as an array of absolute file paths */
11
+ components: string[][];
12
+ /** Maps each file to its SCC index in `components` */
13
+ nodeToComponent: Map<string, number>;
14
+ /** The condensed DAG built from the SCCs */
15
+ condensed: CondensedDAG;
16
+ }
17
+ export interface CondensedDAG {
18
+ /** One node per SCC */
19
+ nodes: CondensedNode[];
20
+ /** SCC index -> list of SCC indices that depend on it (forward = same direction as file imports) */
21
+ forward: Map<number, number[]>;
22
+ /** SCC index -> list of SCC indices it depends on */
23
+ reverse: Map<number, number[]>;
24
+ /** SCCs in topological order (dependencies before dependents) */
25
+ topologicalOrder: number[];
26
+ }
27
+ export interface CondensedNode {
28
+ /** Index in the `nodes` array, matches the SCC index */
29
+ index: number;
30
+ /** Absolute file paths belonging to this SCC */
31
+ files: string[];
32
+ /** Number of files in this SCC */
33
+ size: number;
34
+ /** True if this SCC has more than one file (circular dependency) */
35
+ isCyclic: boolean;
36
+ }
37
+ /**
38
+ * Compute all Strongly Connected Components of the dependency graph
39
+ * using Tarjan's algorithm. Returns SCCs, a file-to-SCC mapping,
40
+ * and the condensed DAG with topological ordering.
41
+ */
42
+ export declare function computeSCC(graph: DependencyGraph): SCCResult;
43
+ /**
44
+ * Build a DAG where each node represents one SCC.
45
+ * Edges between SCCs are derived from the original graph's edges
46
+ * between files belonging to different SCCs.
47
+ */
48
+ export declare function condenseGraph(graph: DependencyGraph, components: string[][], nodeToComponent: Map<string, number>): CondensedDAG;
49
+ /**
50
+ * Compute a topological ordering of the condensed DAG using Kahn's algorithm.
51
+ * The condensed graph is guaranteed to be acyclic after SCC condensation.
52
+ *
53
+ * Returns SCC indices in dependency order: dependencies come before dependents.
54
+ * This uses the `forward` edges (file A imports B means A -> B in forward),
55
+ * so we process nodes with no incoming forward edges first (leaf dependencies).
56
+ */
57
+ export declare function topologicalSort(dag: CondensedDAG): number[];