@syke1/mcp-server 1.4.17 → 1.4.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -113,6 +113,88 @@ SYKE supports three AI providers for semantic analysis. Bring your own key:
113
113
  **Auto-selection:** SYKE uses the first available key (Gemini > OpenAI > Anthropic).
114
114
  **Force provider:** Set `aiProvider` in config (or `SYKE_AI_PROVIDER` env var) to override.
115
115
 
116
+ ### Advanced Graph Algorithms
117
+
118
+ SYKE goes beyond simple dependency counting. Five production-grade algorithms work together to deliver precise, fast, and context-rich impact analysis — all running **locally with zero AI token cost**.
119
+
120
+ #### 1. SCC Condensation + Topological Sort
121
+
122
+ Circular dependencies are the #1 source of misleading impact analysis. SYKE uses **Tarjan's algorithm** to detect all Strongly Connected Components, condenses them into a clean DAG, then runs topological sort to compute correct cascade levels.
123
+
124
+ ```
125
+ Before: "47 files affected" (inflated by cycles)
126
+ After: "3 files in circular cluster (Level 0) → 5 files (Level 1) → 4 files (Level 2)"
127
+ ```
128
+
129
+ - O(V+E) computation — runs in single-digit milliseconds
130
+ - Every SCC with size > 1 is flagged as a circular dependency cluster
131
+ - Cascade levels are accurate even in heavily cyclic codebases
132
+
133
+ #### 2. Composite Risk Scoring
134
+
135
+ Five signals combined into a single 0–1 risk score:
136
+
137
+ | Signal | Weight | What it measures |
138
+ |--------|--------|-----------------|
139
+ | **Fan-in** | 30% | How many files depend on this one |
140
+ | **Stability Index** | 20% | I = Ce/(Ca+Ce) — lower = foundation file = riskier to change |
141
+ | **Cyclomatic Complexity** | 20% | Internal branching complexity (regex-based, 8 languages) |
142
+ | **Cascade Depth** | 15% | How many layers deep the impact propagates |
143
+ | **PageRank** | 15% | Recursive importance in the dependency graph |
144
+
145
+ ```
146
+ auth_service.ts → Risk: 0.82 (CRITICAL)
147
+ Fan-in: 24, Stability: 0.12, Complexity: 47, Cascade: 4 levels, PageRank: 99th
148
+
149
+ string_utils.ts → Risk: 0.31 (LOW)
150
+ Fan-in: 18, Stability: 0.85, Complexity: 3, Cascade: 1 level, PageRank: 42nd
151
+ ```
152
+
153
+ AI agents can now make threshold decisions: proceed if < 0.3, warn if 0.3–0.7, block if > 0.7.
154
+
155
+ #### 3. Historical Change Coupling
156
+
157
+ Static imports miss **hidden dependencies** — files that always change together but have no import relationship. SYKE mines your git history (last 500 commits) to find these logical couplings.
158
+
159
+ ```
160
+ auth_service.ts changed →
161
+ [Dependency Graph] auth_provider.ts, login_screen.ts
162
+ [Git Coupling — Hidden Dependencies]
163
+ config/auth_config.json (85% confidence, 12 co-changes)
164
+ styles/auth.css (72% confidence, 8 co-changes)
165
+ ```
166
+
167
+ - Catches 15–30% of impacted files that static analysis misses entirely
168
+ - Filters mega-commits (>20 files) to avoid noise
169
+ - 5-minute cache with auto-refresh
170
+
171
+ #### 4. PageRank for File Importance
172
+
173
+ Simple fan-in counts treat all dependents equally. **PageRank** computes recursive importance — a file imported by many *important* files ranks higher than one imported by many leaf files.
174
+
175
+ ```
176
+ Before: utils.ts ranked #1 (25 dependents — but all are leaf components)
177
+ After: auth.ts ranked #1 (20 dependents — 15 of which are core modules)
178
+ ```
179
+
180
+ - Standard Power Iteration with damping factor 0.85
181
+ - Precomputed at startup, incrementally updated on file changes
182
+ - Every file gets a rank position and percentile (e.g., "rank #3 of 245, 99th percentile")
183
+
184
+ #### 5. Incremental Graph Updates + Memoized Queries
185
+
186
+ For large codebases (10K+ files), full graph rebuilds are too slow. SYKE now updates **only the changed file's edges** and invalidates **only the affected cache entries**.
187
+
188
+ ```
189
+ Before: 1 file changed → re-parse all 500 files → 2+ seconds
190
+ After: 1 file changed → re-parse 1 file → edge diff → 50ms
191
+ Same file queried again → cache hit → O(1) instant
192
+ ```
193
+
194
+ - Reverse index enables O(affected) cache invalidation instead of O(cache_size)
195
+ - SCC and PageRank recompute after edge changes (still < 100ms for 10K files)
196
+ - 500-entry LRU cache with hit/miss diagnostics
197
+
116
198
  ### Language Support
117
199
 
118
200
  Auto-detected, zero-config: **Dart/Flutter**, **TypeScript/JavaScript**, **Python**, **Go**, **Rust**, **Java**, **C++**, **Ruby**.
@@ -112,7 +112,7 @@ async function analyzeChangeRealtime(change, graph, getFileContent) {
112
112
  const absPath = path.normalize(path.join(graph.sourceDir, relPath));
113
113
  let affectedNodes = [];
114
114
  if (graph.files.has(absPath)) {
115
- const impact = (0, analyze_impact_1.analyzeImpact)(absPath, graph);
115
+ const impact = await (0, analyze_impact_1.analyzeImpact)(absPath, graph);
116
116
  affectedNodes = [...impact.directDependents, ...impact.transitiveDependents];
117
117
  }
118
118
  // Build context: changed file + top 5 connected files' smart context
@@ -0,0 +1,41 @@
1
+ export interface ChangeCoupling {
2
+ file1: string;
3
+ file2: string;
4
+ coChangeCount: number;
5
+ file1Changes: number;
6
+ file2Changes: number;
7
+ confidence: number;
8
+ support: number;
9
+ }
10
+ export interface CouplingResult {
11
+ couplings: ChangeCoupling[];
12
+ fileCouplings: Map<string, ChangeCoupling[]>;
13
+ totalCommitsAnalyzed: number;
14
+ analyzedAt: number;
15
+ }
16
+ export interface CouplingOptions {
17
+ maxCommits?: number;
18
+ minSupport?: number;
19
+ minConfidence?: number;
20
+ maxFilesPerCommit?: number;
21
+ }
22
+ /**
23
+ * Invalidate the coupling cache. Call this when the graph is refreshed
24
+ * or when git history may have changed.
25
+ */
26
+ export declare function invalidateCouplingCache(): void;
27
+ /**
28
+ * Mine git history to find files that frequently co-change.
29
+ *
30
+ * Runs `git log --name-only` and analyzes pairwise file combinations
31
+ * within each commit to identify hidden logical dependencies.
32
+ */
33
+ export declare function mineGitHistory(projectRoot: string, options?: CouplingOptions): Promise<CouplingResult>;
34
+ /**
35
+ * Get all significant couplings for a given file path.
36
+ * Returns an empty array if no couplings are found.
37
+ *
38
+ * The filePath should be a relative path matching git log output format
39
+ * (forward slashes, relative to project root).
40
+ */
41
+ export declare function getCoupledFiles(filePath: string, result: CouplingResult): ChangeCoupling[];
@@ -0,0 +1,250 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.invalidateCouplingCache = invalidateCouplingCache;
4
+ exports.mineGitHistory = mineGitHistory;
5
+ exports.getCoupledFiles = getCoupledFiles;
6
+ const child_process_1 = require("child_process");
7
+ // ── Defaults ──
8
+ const DEFAULT_MAX_COMMITS = 500;
9
+ const DEFAULT_MIN_SUPPORT = 3;
10
+ const DEFAULT_MIN_CONFIDENCE = 0.3;
11
+ const DEFAULT_MAX_FILES_PER_COMMIT = 20;
12
+ const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
13
+ // ── Cache ──
14
+ let cachedResult = null;
15
+ let cachedProjectRoot = null;
16
+ /**
17
+ * Invalidate the coupling cache. Call this when the graph is refreshed
18
+ * or when git history may have changed.
19
+ */
20
+ function invalidateCouplingCache() {
21
+ cachedResult = null;
22
+ cachedProjectRoot = null;
23
+ }
24
+ // ── Git History Mining ──
25
+ /**
26
+ * Check whether the given directory is inside a git repository.
27
+ */
28
+ function isGitRepo(projectRoot) {
29
+ try {
30
+ (0, child_process_1.execSync)("git rev-parse --is-inside-work-tree", {
31
+ cwd: projectRoot,
32
+ encoding: "utf-8",
33
+ stdio: ["pipe", "pipe", "pipe"],
34
+ });
35
+ return true;
36
+ }
37
+ catch {
38
+ return false;
39
+ }
40
+ }
41
+ /**
42
+ * Parse git log output into a list of commits, each containing
43
+ * the list of files changed in that commit.
44
+ */
45
+ function parseGitLog(raw) {
46
+ const commits = [];
47
+ const segments = raw.split("COMMIT:");
48
+ for (const segment of segments) {
49
+ const trimmed = segment.trim();
50
+ if (!trimmed)
51
+ continue;
52
+ // First line is the commit hash, remaining lines are file paths
53
+ const lines = trimmed.split("\n");
54
+ const files = [];
55
+ for (let i = 1; i < lines.length; i++) {
56
+ const fileLine = lines[i].trim();
57
+ if (fileLine) {
58
+ files.push(fileLine);
59
+ }
60
+ }
61
+ if (files.length > 0) {
62
+ commits.push(files);
63
+ }
64
+ }
65
+ return commits;
66
+ }
67
+ /**
68
+ * Normalize a git-output path (forward slashes) to be consistent
69
+ * with how the dependency graph stores paths.
70
+ */
71
+ function normalizePath(filePath) {
72
+ // Git always outputs forward slashes; normalize for consistency
73
+ return filePath.replace(/\\/g, "/");
74
+ }
75
+ /**
76
+ * Check if a file path looks like a source file (not binary, not config noise).
77
+ * We keep this broad — the dependency graph comparison will handle the real filtering.
78
+ */
79
+ function isSourceFile(filePath) {
80
+ // Skip obviously non-source files
81
+ const skipPatterns = [
82
+ /\.lock$/,
83
+ /package-lock\.json$/,
84
+ /yarn\.lock$/,
85
+ /\.min\.(js|css)$/,
86
+ /\.map$/,
87
+ /\.d\.ts$/,
88
+ /\.png$/,
89
+ /\.jpg$/,
90
+ /\.jpeg$/,
91
+ /\.gif$/,
92
+ /\.svg$/,
93
+ /\.ico$/,
94
+ /\.woff2?$/,
95
+ /\.ttf$/,
96
+ /\.eot$/,
97
+ /\.pdf$/,
98
+ /\.zip$/,
99
+ /\.tar$/,
100
+ /\.gz$/,
101
+ ];
102
+ const normalized = filePath.toLowerCase();
103
+ return !skipPatterns.some((p) => p.test(normalized));
104
+ }
105
+ /**
106
+ * Create a canonical pair key for two files (order-independent).
107
+ */
108
+ function pairKey(a, b) {
109
+ return a < b ? `${a}\0${b}` : `${b}\0${a}`;
110
+ }
111
+ /**
112
+ * Mine git history to find files that frequently co-change.
113
+ *
114
+ * Runs `git log --name-only` and analyzes pairwise file combinations
115
+ * within each commit to identify hidden logical dependencies.
116
+ */
117
+ async function mineGitHistory(projectRoot, options) {
118
+ // Return cached result if still valid
119
+ if (cachedResult &&
120
+ cachedProjectRoot === projectRoot &&
121
+ Date.now() - cachedResult.analyzedAt < CACHE_TTL_MS) {
122
+ return cachedResult;
123
+ }
124
+ const maxCommits = options?.maxCommits ?? DEFAULT_MAX_COMMITS;
125
+ const minSupport = options?.minSupport ?? DEFAULT_MIN_SUPPORT;
126
+ const minConfidence = options?.minConfidence ?? DEFAULT_MIN_CONFIDENCE;
127
+ const maxFilesPerCommit = options?.maxFilesPerCommit ?? DEFAULT_MAX_FILES_PER_COMMIT;
128
+ // Empty result for non-git projects
129
+ const emptyResult = {
130
+ couplings: [],
131
+ fileCouplings: new Map(),
132
+ totalCommitsAnalyzed: 0,
133
+ analyzedAt: Date.now(),
134
+ };
135
+ if (!isGitRepo(projectRoot)) {
136
+ cachedResult = emptyResult;
137
+ cachedProjectRoot = projectRoot;
138
+ return emptyResult;
139
+ }
140
+ // Run git log
141
+ let raw;
142
+ try {
143
+ raw = (0, child_process_1.execSync)(`git log --name-only --format="COMMIT:%H" --max-count=${maxCommits}`, {
144
+ cwd: projectRoot,
145
+ encoding: "utf-8",
146
+ maxBuffer: 10 * 1024 * 1024,
147
+ stdio: ["pipe", "pipe", "pipe"],
148
+ });
149
+ }
150
+ catch {
151
+ cachedResult = emptyResult;
152
+ cachedProjectRoot = projectRoot;
153
+ return emptyResult;
154
+ }
155
+ const commits = parseGitLog(raw);
156
+ // Track per-file change counts and per-pair co-change counts
157
+ const fileChangeCount = new Map();
158
+ const pairCoChangeCount = new Map();
159
+ let totalCommitsAnalyzed = 0;
160
+ for (const commitFiles of commits) {
161
+ // Filter to source files and normalize paths
162
+ const filtered = commitFiles
163
+ .map(normalizePath)
164
+ .filter(isSourceFile);
165
+ // Skip mega-commits (merge commits, large refactors)
166
+ if (filtered.length > maxFilesPerCommit || filtered.length < 2) {
167
+ if (filtered.length === 1) {
168
+ // Still count single-file commits for per-file totals
169
+ const file = filtered[0];
170
+ fileChangeCount.set(file, (fileChangeCount.get(file) || 0) + 1);
171
+ }
172
+ totalCommitsAnalyzed++;
173
+ continue;
174
+ }
175
+ totalCommitsAnalyzed++;
176
+ // Count per-file changes
177
+ for (const file of filtered) {
178
+ fileChangeCount.set(file, (fileChangeCount.get(file) || 0) + 1);
179
+ }
180
+ // Count pairwise co-changes
181
+ for (let i = 0; i < filtered.length; i++) {
182
+ for (let j = i + 1; j < filtered.length; j++) {
183
+ const key = pairKey(filtered[i], filtered[j]);
184
+ pairCoChangeCount.set(key, (pairCoChangeCount.get(key) || 0) + 1);
185
+ }
186
+ }
187
+ }
188
+ // Build coupling results, filtering by thresholds
189
+ const couplings = [];
190
+ for (const [key, coCount] of pairCoChangeCount) {
191
+ if (coCount < minSupport)
192
+ continue;
193
+ const [file1, file2] = key.split("\0");
194
+ const file1Changes = fileChangeCount.get(file1) || 0;
195
+ const file2Changes = fileChangeCount.get(file2) || 0;
196
+ const maxChanges = Math.max(file1Changes, file2Changes);
197
+ const confidence = maxChanges > 0 ? coCount / maxChanges : 0;
198
+ if (confidence < minConfidence)
199
+ continue;
200
+ couplings.push({
201
+ file1,
202
+ file2,
203
+ coChangeCount: coCount,
204
+ file1Changes,
205
+ file2Changes,
206
+ confidence,
207
+ support: coCount,
208
+ });
209
+ }
210
+ // Sort by confidence descending
211
+ couplings.sort((a, b) => b.confidence - a.confidence);
212
+ // Build the per-file lookup map
213
+ const fileCouplings = new Map();
214
+ for (const coupling of couplings) {
215
+ // Add to file1's list
216
+ if (!fileCouplings.has(coupling.file1)) {
217
+ fileCouplings.set(coupling.file1, []);
218
+ }
219
+ fileCouplings.get(coupling.file1).push(coupling);
220
+ // Add to file2's list
221
+ if (!fileCouplings.has(coupling.file2)) {
222
+ fileCouplings.set(coupling.file2, []);
223
+ }
224
+ fileCouplings.get(coupling.file2).push(coupling);
225
+ }
226
+ // Sort each file's couplings by confidence descending
227
+ for (const [, list] of fileCouplings) {
228
+ list.sort((a, b) => b.confidence - a.confidence);
229
+ }
230
+ const result = {
231
+ couplings,
232
+ fileCouplings,
233
+ totalCommitsAnalyzed,
234
+ analyzedAt: Date.now(),
235
+ };
236
+ cachedResult = result;
237
+ cachedProjectRoot = projectRoot;
238
+ return result;
239
+ }
240
+ /**
241
+ * Get all significant couplings for a given file path.
242
+ * Returns an empty array if no couplings are found.
243
+ *
244
+ * The filePath should be a relative path matching git log output format
245
+ * (forward slashes, relative to project root).
246
+ */
247
+ function getCoupledFiles(filePath, result) {
248
+ const normalized = normalizePath(filePath);
249
+ return result.fileCouplings.get(normalized) || [];
250
+ }
@@ -0,0 +1,35 @@
1
+ /**
2
+ * Incremental Graph Updates for SYKE.
3
+ *
4
+ * Instead of rebuilding the entire dependency graph when a single file changes,
5
+ * this module re-parses only the changed file's imports and updates the
6
+ * forward/reverse maps in place. SCC and PageRank are recomputed fully
7
+ * (both are O(V+E) and fast enough) only when edges actually change.
8
+ *
9
+ * This brings update latency from O(N * parse) down to O(1 * parse + V+E)
10
+ * for large codebases (10K+ files).
11
+ */
12
+ import { DependencyGraph } from "../graph";
13
+ export interface IncrementalUpdateResult {
14
+ updatedFile: string;
15
+ addedEdges: [string, string][];
16
+ removedEdges: [string, string][];
17
+ edgesChanged: boolean;
18
+ affectedFiles: string[];
19
+ }
20
+ /**
21
+ * Update the graph for a single changed file.
22
+ * Re-parses only that file's imports and updates forward/reverse maps.
23
+ * Returns info about what changed for cache invalidation.
24
+ */
25
+ export declare function updateGraphForFile(graph: DependencyGraph, filePath: string, projectRoot: string): IncrementalUpdateResult;
26
+ /**
27
+ * Add a new file to the graph.
28
+ * Initializes forward/reverse entries, parses imports, and adds edges.
29
+ */
30
+ export declare function addFileToGraph(graph: DependencyGraph, filePath: string, projectRoot: string): IncrementalUpdateResult;
31
+ /**
32
+ * Remove a file from the graph.
33
+ * Cleans up all forward edges, reverse edges, and the files set.
34
+ */
35
+ export declare function removeFileFromGraph(graph: DependencyGraph, filePath: string): IncrementalUpdateResult;