@syke1/mcp-server 1.4.17 → 1.4.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -50,6 +50,9 @@ const graph_1 = require("./graph");
50
50
  const plugin_1 = require("./languages/plugin");
51
51
  const analyze_impact_1 = require("./tools/analyze-impact");
52
52
  const gate_build_1 = require("./tools/gate-build");
53
+ const change_coupling_1 = require("./git/change-coupling");
54
+ const risk_scorer_1 = require("./scoring/risk-scorer");
55
+ const pagerank_1 = require("./scoring/pagerank");
53
56
  const analyzer_1 = require("./ai/analyzer");
54
57
  const provider_1 = require("./ai/provider");
55
58
  const server_1 = require("./web/server");
@@ -248,7 +251,7 @@ async function main() {
248
251
  case "gate_build": {
249
252
  const graph = (0, graph_1.getGraph)(currentProjectRoot, currentPackageName);
250
253
  const files = args.files?.map((f) => resolveFilePath(f, currentProjectRoot, graph.sourceDir));
251
- const result = (0, gate_build_1.gateCheck)(graph, files);
254
+ const result = await (0, gate_build_1.gateCheck)(graph, files);
252
255
  return {
253
256
  content: [
254
257
  { type: "text", text: appendDashboardFooter((0, gate_build_1.formatGateResult)(result)) },
@@ -273,24 +276,84 @@ async function main() {
273
276
  if (!isFileInFreeSet(resolved, graph)) {
274
277
  return { content: [{ type: "text", text: PRO_UPGRADE_MSG }] };
275
278
  }
276
- const result = (0, analyze_impact_1.analyzeImpact)(resolved, graph);
279
+ const result = await (0, analyze_impact_1.analyzeImpact)(resolved, graph, { includeRiskScore: true, includeCoupling: true });
280
+ const cachedTag = result.fromCache ? " (cached)" : "";
277
281
  const lines = [
278
- `## Impact Analysis: ${result.relativePath}`,
282
+ `## Impact Analysis: ${result.relativePath}${cachedTag}`,
279
283
  `**Risk Level:** ${result.riskLevel}`,
280
284
  `**Total impacted files:** ${result.totalImpacted}`,
281
285
  "",
282
286
  ];
287
+ // Show composite risk score
288
+ if (result.riskScore) {
289
+ lines.push("### Composite Risk Score");
290
+ lines.push((0, risk_scorer_1.formatRiskScore)(result.riskScore));
291
+ lines.push("");
292
+ }
293
+ // Show circular dependency warning if file is in a cyclic SCC
294
+ if (result.circularCluster && result.circularCluster.length > 0) {
295
+ lines.push("### Circular Dependency Cluster");
296
+ lines.push(`This file is part of a circular dependency with ${result.circularCluster.length} other file(s):`);
297
+ for (const f of result.circularCluster) {
298
+ lines.push(`- ${f}`);
299
+ }
300
+ lines.push("**All files in this cluster are immediately affected by any change.**");
301
+ lines.push("");
302
+ }
283
303
  if (result.directDependents.length > 0) {
284
304
  lines.push(`### Direct Dependents (${result.directDependents.length})`);
285
305
  for (const d of result.directDependents) {
286
- lines.push(`- ${d}`);
306
+ const level = result.cascadeLevels?.get(d);
307
+ const levelStr = level !== undefined ? `, cascade level ${level}` : "";
308
+ // Add PageRank percentile if available
309
+ let prStr = "";
310
+ if (graph.pageRank) {
311
+ const absPath = path.normalize(path.join(graph.sourceDir, d));
312
+ const prData = (0, pagerank_1.getFileRank)(absPath, graph.pageRank);
313
+ if (prData) {
314
+ prStr = `, PageRank ${prData.percentile}th percentile`;
315
+ }
316
+ }
317
+ const annotationParts = [levelStr, prStr].filter(Boolean).join("");
318
+ lines.push(`- ${d}${annotationParts ? ` (${annotationParts.replace(/^, /, "")})` : ""}`);
287
319
  }
288
320
  }
289
321
  if (result.transitiveDependents.length > 0) {
290
322
  lines.push("");
291
323
  lines.push(`### Transitive Dependents (${result.transitiveDependents.length})`);
292
324
  for (const d of result.transitiveDependents) {
293
- lines.push(`- ${d}`);
325
+ const level = result.cascadeLevels?.get(d);
326
+ const levelStr = level !== undefined ? `, cascade level ${level}` : "";
327
+ // Add PageRank percentile if available
328
+ let prStr = "";
329
+ if (graph.pageRank) {
330
+ const absPath = path.normalize(path.join(graph.sourceDir, d));
331
+ const prData = (0, pagerank_1.getFileRank)(absPath, graph.pageRank);
332
+ if (prData) {
333
+ prStr = `, PageRank ${prData.percentile}th percentile`;
334
+ }
335
+ }
336
+ const annotationParts = [levelStr, prStr].filter(Boolean).join("");
337
+ lines.push(`- ${d}${annotationParts ? ` (${annotationParts.replace(/^, /, "")})` : ""}`);
338
+ }
339
+ }
340
+ // Show historical change coupling (hidden dependencies)
341
+ if (result.coupledFiles && result.coupledFiles.length > 0) {
342
+ lines.push("");
343
+ lines.push("### Historical Change Coupling (hidden dependencies)");
344
+ for (const cf of result.coupledFiles) {
345
+ const pct = Math.round(cf.confidence * 100);
346
+ lines.push(` - ${cf.relativePath} (confidence: ${pct}%, co-changed ${cf.coChangeCount} times)`);
347
+ }
348
+ lines.push("These files frequently change together but have no import relationship.");
349
+ }
350
+ // SCC summary stats
351
+ if (result.sccCount !== undefined) {
352
+ lines.push("");
353
+ lines.push("### Graph Structure");
354
+ lines.push(`- SCCs in project: ${result.sccCount}`);
355
+ if (result.cyclicSCCs !== undefined && result.cyclicSCCs > 0) {
356
+ lines.push(`- Circular dependency clusters: ${result.cyclicSCCs}`);
294
357
  }
295
358
  }
296
359
  return { content: [{ type: "text", text: appendDashboardFooter(lines.join("\n")) }] };
@@ -312,13 +375,37 @@ async function main() {
312
375
  if (!isFileInFreeSet(resolved, graph)) {
313
376
  return { content: [{ type: "text", text: PRO_UPGRADE_MSG }] };
314
377
  }
315
- const result = (0, analyze_impact_1.analyzeImpact)(resolved, graph);
378
+ const result = await (0, analyze_impact_1.analyzeImpact)(resolved, graph, { includeRiskScore: true, includeCoupling: true });
316
379
  const rel = path.relative(graph.sourceDir, resolved).replace(/\\/g, "/");
380
+ const safeCachedTag = result.fromCache ? " (cached)" : "";
381
+ // Enhanced output with composite risk score
382
+ let output = `${result.riskLevel} — ${rel} impacts ${result.totalImpacted} file(s)${safeCachedTag}`;
383
+ // Show file importance via PageRank
384
+ if (graph.pageRank) {
385
+ const prData = (0, pagerank_1.getFileRank)(resolved, graph.pageRank);
386
+ if (prData) {
387
+ output += `\nFile importance: rank #${prData.rank} of ${graph.files.size} files (${prData.percentile}th percentile)`;
388
+ }
389
+ }
390
+ if (result.riskScore) {
391
+ output += `\n${(0, risk_scorer_1.formatRiskScore)(result.riskScore)}`;
392
+ }
393
+ // Mention high-confidence couplings as a warning
394
+ if (result.coupledFiles && result.coupledFiles.length > 0) {
395
+ const highConf = result.coupledFiles.filter((cf) => cf.confidence >= 0.5);
396
+ if (highConf.length > 0) {
397
+ output += `\n\nHistorical coupling warning: ${highConf.length} file(s) frequently co-change with this file but have no import relationship:`;
398
+ for (const cf of highConf) {
399
+ const pct = Math.round(cf.confidence * 100);
400
+ output += `\n - ${cf.relativePath} (${pct}%, ${cf.coChangeCount} times)`;
401
+ }
402
+ }
403
+ }
317
404
  return {
318
405
  content: [
319
406
  {
320
407
  type: "text",
321
- text: appendDashboardFooter(`${result.riskLevel} — ${rel} impacts ${result.totalImpacted} file(s)`),
408
+ text: appendDashboardFooter(output),
322
409
  },
323
410
  ],
324
411
  };
@@ -368,6 +455,54 @@ async function main() {
368
455
  const requestedN = args.top_n || 10;
369
456
  const graph = (0, graph_1.getGraph)(currentProjectRoot, currentPackageName);
370
457
  const hubs = (0, analyze_impact_1.getHubFiles)(graph, requestedN);
458
+ // If PageRank is available, build enriched entries sorted by PageRank
459
+ const pageRankAvailable = !!graph.pageRank;
460
+ if (pageRankAvailable) {
461
+ // Enrich hub data with PageRank and re-sort by PageRank score
462
+ const enriched = hubs.map(h => {
463
+ const absPath = path.normalize(path.join(graph.sourceDir, h.relativePath));
464
+ const prData = graph.pageRank ? (0, pagerank_1.getFileRank)(absPath, graph.pageRank) : null;
465
+ return { ...h, prData };
466
+ });
467
+ // Sort by PageRank score descending (fallback to dependent count)
468
+ enriched.sort((a, b) => {
469
+ const scoreA = a.prData?.score ?? 0;
470
+ const scoreB = b.prData?.score ?? 0;
471
+ if (scoreB !== scoreA)
472
+ return scoreB - scoreA;
473
+ return b.dependentCount - a.dependentCount;
474
+ });
475
+ // Compute risk scores for hub files
476
+ try {
477
+ (0, risk_scorer_1.computeProjectMetrics)(graph);
478
+ }
479
+ catch { /* non-critical */ }
480
+ const lines = [
481
+ `## Hub Files (Top ${enriched.length}, ranked by PageRank)`,
482
+ "",
483
+ ];
484
+ enriched.forEach((h, i) => {
485
+ const prScore = h.prData?.score?.toFixed(6) ?? "N/A";
486
+ const prRank = h.prData?.rank ?? "?";
487
+ const prPercentile = h.prData?.percentile ?? "?";
488
+ lines.push(`**#${i + 1} ${h.relativePath}**`);
489
+ lines.push(` PageRank: ${prScore} (rank #${prRank}, ${prPercentile}th percentile)`);
490
+ lines.push(` Dependents: ${h.dependentCount} (direct)`);
491
+ // Try to get risk score
492
+ const absPath = path.normalize(path.join(graph.sourceDir, h.relativePath));
493
+ try {
494
+ const rs = (0, risk_scorer_1.getRiskScore)(absPath, graph);
495
+ lines.push(` Risk Score: ${rs.composite.toFixed(2)} (${rs.riskLevel})`);
496
+ }
497
+ catch {
498
+ lines.push(` Risk: ${h.riskLevel}`);
499
+ }
500
+ lines.push("");
501
+ });
502
+ lines.push(`Total files in graph: ${graph.files.size}`);
503
+ return { content: [{ type: "text", text: lines.join("\n") }] };
504
+ }
505
+ // Fallback: original table format (no PageRank data)
371
506
  const lines = [
372
507
  `## Hub Files (Top ${hubs.length})`,
373
508
  "",
@@ -383,11 +518,13 @@ async function main() {
383
518
  }
384
519
  case "refresh_graph": {
385
520
  const graph = (0, graph_1.refreshGraph)(currentProjectRoot, currentPackageName);
521
+ (0, change_coupling_1.invalidateCouplingCache)();
522
+ const cacheStats = (0, analyze_impact_1.getImpactMemoCache)().stats();
386
523
  return {
387
524
  content: [
388
525
  {
389
526
  type: "text",
390
- text: `Graph refreshed (${graph.languages.join("+")}): ${graph.files.size} files scanned.`,
527
+ text: `Graph refreshed (${graph.languages.join("+")}): ${graph.files.size} files scanned. Change coupling cache invalidated. Memo cache cleared (was ${cacheStats.size} entries, ${cacheStats.hits} hits / ${cacheStats.misses} misses).`,
391
528
  },
392
529
  ],
393
530
  };
@@ -416,7 +553,7 @@ async function main() {
416
553
  if (!isFileInFreeSet(resolved, graph)) {
417
554
  return { content: [{ type: "text", text: PRO_UPGRADE_MSG }] };
418
555
  }
419
- const impactResult = (0, analyze_impact_1.analyzeImpact)(resolved, graph);
556
+ const impactResult = await (0, analyze_impact_1.analyzeImpact)(resolved, graph);
420
557
  const aiResult = await (0, analyzer_1.analyzeWithAI)(resolved, impactResult, graph);
421
558
  // Free tier: append partial analysis warning
422
559
  let resultText = aiResult;
@@ -523,6 +660,7 @@ async function main() {
523
660
  // Initialize file cache (load ALL source files into memory)
524
661
  fileCache = new file_cache_1.FileCache(currentProjectRoot);
525
662
  fileCache.initialize();
663
+ fileCache.setGraph(graph); // Enable incremental graph updates on file changes
526
664
  fileCache.startWatching();
527
665
  }
528
666
  // Web server handle (set after server starts)
@@ -537,13 +675,15 @@ async function main() {
537
675
  fileCache.stop();
538
676
  fileCache = new file_cache_1.FileCache(newRoot);
539
677
  fileCache.initialize();
678
+ // Rebuild graph
679
+ const graph = (0, graph_1.refreshGraph)(newRoot, currentPackageName);
680
+ // Enable incremental updates on the new cache
681
+ fileCache.setGraph(graph);
540
682
  fileCache.startWatching();
541
683
  // Re-wire SSE events to the new FileCache
542
684
  if (webServerHandle) {
543
685
  webServerHandle.setFileCache(fileCache);
544
686
  }
545
- // Rebuild graph
546
- const graph = (0, graph_1.refreshGraph)(newRoot, currentPackageName);
547
687
  console.error(`[syke] Switched to project: ${newRoot}`);
548
688
  console.error(`[syke] Languages: ${plugins.map(p => p.name).join(", ")}`);
549
689
  console.error(`[syke] Package: ${currentPackageName}`);
@@ -0,0 +1,67 @@
1
+ /**
2
+ * PageRank scoring for SYKE dependency graphs.
3
+ *
4
+ * Uses the Power Iteration algorithm to compute recursive importance scores.
5
+ * A file imported by many important files ranks higher than one imported by
6
+ * many leaf files. This provides a more nuanced importance signal than
7
+ * simple reverse dependent count (fan-in).
8
+ *
9
+ * In dependency graph terms:
10
+ * - If A imports B, the forward edge is A -> B.
11
+ * - B receives importance from A (B is important because A depends on it).
12
+ * - So we iterate over graph.reverse to find incoming "importance links".
13
+ *
14
+ * Dangling nodes (files that import nothing) distribute their rank
15
+ * equally to all nodes, preventing rank from leaking out of the graph.
16
+ */
17
+ import { DependencyGraph } from "../graph";
18
+ export interface PageRankOptions {
19
+ dampingFactor?: number;
20
+ maxIterations?: number;
21
+ tolerance?: number;
22
+ }
23
+ export interface PageRankResult {
24
+ scores: Map<string, number>;
25
+ ranked: RankedFile[];
26
+ iterations: number;
27
+ computedAt: number;
28
+ }
29
+ export interface RankedFile {
30
+ filePath: string;
31
+ relativePath: string;
32
+ score: number;
33
+ rank: number;
34
+ percentile: number;
35
+ }
36
+ /**
37
+ * Invalidate the cached PageRank result.
38
+ * Call this when the dependency graph is rebuilt.
39
+ */
40
+ export declare function invalidatePageRank(): void;
41
+ /**
42
+ * Compute PageRank scores for all files in the dependency graph
43
+ * using the Power Iteration method.
44
+ *
45
+ * The algorithm:
46
+ * 1. Initialize rank[i] = 1/N for all N files.
47
+ * 2. Repeat until convergence or maxIterations:
48
+ * For each file i:
49
+ * newRank[i] = (1 - d) / N + d * SUM(rank[j] / outDegree[j])
50
+ * for all j that link to i (j imports i -> j is in reverse[i])
51
+ * Handle dangling nodes: files with outDegree=0 distribute rank to all.
52
+ * If max|newRank - rank| < tolerance: break
53
+ * rank = newRank
54
+ *
55
+ * Direction clarification:
56
+ * - forward[A] = [B] means "A imports B" (A -> B).
57
+ * - reverse[B] = [A] means "A imports B" — A gives importance to B.
58
+ * - outDegree of A = forward[A].length (how many files A imports).
59
+ * - When computing rank for B, sum over reverse[B]: each file A that
60
+ * imports B contributes rank[A] / outDegree[A] to B.
61
+ */
62
+ export declare function computePageRank(graph: DependencyGraph, options?: PageRankOptions): PageRankResult;
63
+ /**
64
+ * O(1) lookup of a file's PageRank data from a precomputed result.
65
+ * Returns null if the file is not in the result.
66
+ */
67
+ export declare function getFileRank(filePath: string, result: PageRankResult): RankedFile | null;
@@ -0,0 +1,221 @@
1
+ "use strict";
2
+ /**
3
+ * PageRank scoring for SYKE dependency graphs.
4
+ *
5
+ * Uses the Power Iteration algorithm to compute recursive importance scores.
6
+ * A file imported by many important files ranks higher than one imported by
7
+ * many leaf files. This provides a more nuanced importance signal than
8
+ * simple reverse dependent count (fan-in).
9
+ *
10
+ * In dependency graph terms:
11
+ * - If A imports B, the forward edge is A -> B.
12
+ * - B receives importance from A (B is important because A depends on it).
13
+ * - So we iterate over graph.reverse to find incoming "importance links".
14
+ *
15
+ * Dangling nodes (files that import nothing) distribute their rank
16
+ * equally to all nodes, preventing rank from leaking out of the graph.
17
+ */
18
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
19
+ if (k2 === undefined) k2 = k;
20
+ var desc = Object.getOwnPropertyDescriptor(m, k);
21
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
22
+ desc = { enumerable: true, get: function() { return m[k]; } };
23
+ }
24
+ Object.defineProperty(o, k2, desc);
25
+ }) : (function(o, m, k, k2) {
26
+ if (k2 === undefined) k2 = k;
27
+ o[k2] = m[k];
28
+ }));
29
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
30
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
31
+ }) : function(o, v) {
32
+ o["default"] = v;
33
+ });
34
+ var __importStar = (this && this.__importStar) || (function () {
35
+ var ownKeys = function(o) {
36
+ ownKeys = Object.getOwnPropertyNames || function (o) {
37
+ var ar = [];
38
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
39
+ return ar;
40
+ };
41
+ return ownKeys(o);
42
+ };
43
+ return function (mod) {
44
+ if (mod && mod.__esModule) return mod;
45
+ var result = {};
46
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
47
+ __setModuleDefault(result, mod);
48
+ return result;
49
+ };
50
+ })();
51
+ Object.defineProperty(exports, "__esModule", { value: true });
52
+ exports.invalidatePageRank = invalidatePageRank;
53
+ exports.computePageRank = computePageRank;
54
+ exports.getFileRank = getFileRank;
55
+ const path = __importStar(require("path"));
56
+ // ── Module-level Cache ──
57
+ let cachedResult = null;
58
+ /**
59
+ * Invalidate the cached PageRank result.
60
+ * Call this when the dependency graph is rebuilt.
61
+ */
62
+ function invalidatePageRank() {
63
+ cachedResult = null;
64
+ }
65
+ // ── Core Algorithm ──
66
+ /**
67
+ * Compute PageRank scores for all files in the dependency graph
68
+ * using the Power Iteration method.
69
+ *
70
+ * The algorithm:
71
+ * 1. Initialize rank[i] = 1/N for all N files.
72
+ * 2. Repeat until convergence or maxIterations:
73
+ * For each file i:
74
+ * newRank[i] = (1 - d) / N + d * SUM(rank[j] / outDegree[j])
75
+ * for all j that link to i (j imports i -> j is in reverse[i])
76
+ * Handle dangling nodes: files with outDegree=0 distribute rank to all.
77
+ * If max|newRank - rank| < tolerance: break
78
+ * rank = newRank
79
+ *
80
+ * Direction clarification:
81
+ * - forward[A] = [B] means "A imports B" (A -> B).
82
+ * - reverse[B] = [A] means "A imports B" — A gives importance to B.
83
+ * - outDegree of A = forward[A].length (how many files A imports).
84
+ * - When computing rank for B, sum over reverse[B]: each file A that
85
+ * imports B contributes rank[A] / outDegree[A] to B.
86
+ */
87
+ function computePageRank(graph, options) {
88
+ // Return cached result if available
89
+ if (cachedResult) {
90
+ return cachedResult;
91
+ }
92
+ const d = options?.dampingFactor ?? 0.85;
93
+ const maxIter = options?.maxIterations ?? 100;
94
+ const tol = options?.tolerance ?? 1e-6;
95
+ const files = [...graph.files];
96
+ const N = files.length;
97
+ // Handle empty graph
98
+ if (N === 0) {
99
+ const result = {
100
+ scores: new Map(),
101
+ ranked: [],
102
+ iterations: 0,
103
+ computedAt: Date.now(),
104
+ };
105
+ cachedResult = result;
106
+ return result;
107
+ }
108
+ // Build index maps for fast array-based iteration
109
+ const fileToIdx = new Map();
110
+ for (let i = 0; i < N; i++) {
111
+ fileToIdx.set(files[i], i);
112
+ }
113
+ // Compute out-degree for each file (number of files it imports)
114
+ const outDegree = new Float64Array(N);
115
+ for (let i = 0; i < N; i++) {
116
+ const deps = graph.forward.get(files[i]);
117
+ outDegree[i] = deps ? deps.length : 0;
118
+ }
119
+ // Identify dangling nodes (files with no outgoing edges / no imports)
120
+ const danglingNodes = [];
121
+ for (let i = 0; i < N; i++) {
122
+ if (outDegree[i] === 0) {
123
+ danglingNodes.push(i);
124
+ }
125
+ }
126
+ // Build reverse adjacency list as index arrays for performance
127
+ // reverseAdj[i] = list of indices j where file j imports file i
128
+ const reverseAdj = new Array(N);
129
+ for (let i = 0; i < N; i++) {
130
+ reverseAdj[i] = [];
131
+ }
132
+ for (const [target, sources] of graph.reverse) {
133
+ const targetIdx = fileToIdx.get(target);
134
+ if (targetIdx === undefined)
135
+ continue;
136
+ for (const source of sources) {
137
+ const sourceIdx = fileToIdx.get(source);
138
+ if (sourceIdx !== undefined) {
139
+ reverseAdj[targetIdx].push(sourceIdx);
140
+ }
141
+ }
142
+ }
143
+ // Initialize ranks
144
+ let rank = new Float64Array(N);
145
+ const initRank = 1.0 / N;
146
+ for (let i = 0; i < N; i++) {
147
+ rank[i] = initRank;
148
+ }
149
+ let iterations = 0;
150
+ const baseTeleport = (1.0 - d) / N;
151
+ for (let iter = 0; iter < maxIter; iter++) {
152
+ iterations = iter + 1;
153
+ // Compute dangling rank sum: total rank held by dangling nodes
154
+ let danglingSum = 0;
155
+ for (const idx of danglingNodes) {
156
+ danglingSum += rank[idx];
157
+ }
158
+ // Each node gets an equal share of the dangling rank (redistributed)
159
+ const danglingContribution = d * danglingSum / N;
160
+ const newRank = new Float64Array(N);
161
+ for (let i = 0; i < N; i++) {
162
+ // Sum contributions from all files that import file i
163
+ let incomingSum = 0;
164
+ for (const j of reverseAdj[i]) {
165
+ incomingSum += rank[j] / outDegree[j];
166
+ }
167
+ newRank[i] = baseTeleport + danglingContribution + d * incomingSum;
168
+ }
169
+ // Check convergence: max absolute difference
170
+ let maxDiff = 0;
171
+ for (let i = 0; i < N; i++) {
172
+ const diff = Math.abs(newRank[i] - rank[i]);
173
+ if (diff > maxDiff)
174
+ maxDiff = diff;
175
+ }
176
+ rank = newRank;
177
+ if (maxDiff < tol) {
178
+ break;
179
+ }
180
+ }
181
+ // Build scores map
182
+ const scores = new Map();
183
+ for (let i = 0; i < N; i++) {
184
+ scores.set(files[i], rank[i]);
185
+ }
186
+ // Build sorted ranked list
187
+ const indexedScores = [];
188
+ for (let i = 0; i < N; i++) {
189
+ indexedScores.push({ idx: i, score: rank[i] });
190
+ }
191
+ indexedScores.sort((a, b) => b.score - a.score);
192
+ const ranked = indexedScores.map((entry, position) => ({
193
+ filePath: files[entry.idx],
194
+ relativePath: path.relative(graph.sourceDir, files[entry.idx]).replace(/\\/g, "/"),
195
+ score: entry.score,
196
+ rank: position + 1,
197
+ percentile: Math.round(((N - 1 - position) / Math.max(1, N - 1)) * 100),
198
+ }));
199
+ const result = {
200
+ scores,
201
+ ranked,
202
+ iterations,
203
+ computedAt: Date.now(),
204
+ };
205
+ cachedResult = result;
206
+ console.error(`[syke:pagerank] Computed PageRank for ${N} files in ${iterations} iterations`);
207
+ return result;
208
+ }
209
+ // ── Lookup ──
210
+ /**
211
+ * O(1) lookup of a file's PageRank data from a precomputed result.
212
+ * Returns null if the file is not in the result.
213
+ */
214
+ function getFileRank(filePath, result) {
215
+ const score = result.scores.get(filePath);
216
+ if (score === undefined)
217
+ return null;
218
+ // Find the ranked entry (scores map guarantees it exists in ranked array)
219
+ const entry = result.ranked.find(r => r.filePath === filePath);
220
+ return entry || null;
221
+ }
@@ -0,0 +1,99 @@
1
+ /**
2
+ * Composite Risk Scoring for SYKE.
3
+ *
4
+ * Combines multiple signals (fan-in, instability, cyclomatic complexity,
5
+ * cascade depth) into a single 0-1 risk score using weighted normalization.
6
+ *
7
+ * Based on Robert C. Martin's stability metrics and standard software
8
+ * engineering coupling/cohesion analysis.
9
+ */
10
+ import { DependencyGraph } from "../graph";
11
+ export interface CouplingMetrics {
12
+ fanIn: number;
13
+ fanOut: number;
14
+ transitiveFanIn: number;
15
+ }
16
+ export type CompositeRiskLevel = "CRITICAL" | "HIGH" | "MEDIUM" | "LOW" | "SAFE";
17
+ export interface RiskScore {
18
+ composite: number;
19
+ fanIn: number;
20
+ fanOut: number;
21
+ transitiveFanIn: number;
22
+ instability: number;
23
+ complexity: number;
24
+ normalizedComplexity: number;
25
+ cascadeDepth: number;
26
+ riskLevel: CompositeRiskLevel;
27
+ pageRank?: number;
28
+ pageRankPercentile?: number;
29
+ }
30
+ export interface ProjectMetrics {
31
+ maxFanIn: number;
32
+ maxTransitiveFanIn: number;
33
+ maxComplexity: number;
34
+ maxCascadeDepth: number;
35
+ fileMetrics: Map<string, RiskScore>;
36
+ }
37
+ export declare const RISK_WEIGHTS: {
38
+ fanIn: number;
39
+ stability: number;
40
+ complexity: number;
41
+ cascadeDepth: number;
42
+ pageRank: number;
43
+ };
44
+ /**
45
+ * Invalidate cached project metrics. Call when the graph is rebuilt.
46
+ */
47
+ export declare function invalidateProjectMetrics(): void;
48
+ /**
49
+ * Compute coupling metrics for a single file from the dependency graph.
50
+ */
51
+ export declare function computeCouplingMetrics(filePath: string, graph: DependencyGraph): CouplingMetrics;
52
+ /**
53
+ * Robert C. Martin's Instability Index.
54
+ *
55
+ * I = Ce / (Ca + Ce) where Ca = fanIn, Ce = fanOut
56
+ * 0 = maximally stable (everything depends on it, dangerous to change)
57
+ * 1 = maximally unstable (leaf node, safe to change)
58
+ */
59
+ export declare function computeInstability(fanIn: number, fanOut: number): number;
60
+ /**
61
+ * Compute cyclomatic complexity of source code using regex-based
62
+ * decision point counting.
63
+ *
64
+ * Returns the raw count of decision points (base complexity of 1 is NOT added).
65
+ */
66
+ export declare function computeComplexity(content: string, language: string): number;
67
+ /**
68
+ * Compute the maximum cascade depth from the condensed DAG.
69
+ * This is the longest path from the file's SCC through the reverse edges
70
+ * of the condensed DAG (i.e., how many layers deep the impact propagates).
71
+ */
72
+ export declare function computeCascadeDepth(filePath: string, graph: DependencyGraph): number;
73
+ /**
74
+ * Map a composite score (0-1) to a risk level.
75
+ */
76
+ export declare function classifyCompositeRisk(score: number): CompositeRiskLevel;
77
+ /**
78
+ * Compute the composite risk score for a single file.
79
+ *
80
+ * If `projectMetrics` is provided, uses pre-computed normalization bounds.
81
+ * Otherwise, uses raw metrics without normalization (less accurate but functional).
82
+ */
83
+ export declare function computeRiskScore(filePath: string, graph: DependencyGraph, fileContent: string | null, projectMetrics?: ProjectMetrics): RiskScore;
84
+ /**
85
+ * Pre-compute metrics for all files in the project.
86
+ * This establishes normalization bounds and caches per-file scores.
87
+ *
88
+ * Uses lazy initialization and caches results until invalidated.
89
+ */
90
+ export declare function computeProjectMetrics(graph: DependencyGraph, getFileContent?: (path: string) => string | null): ProjectMetrics;
91
+ /**
92
+ * Get the cached risk score for a file, or compute it on the fly.
93
+ * Uses project-wide normalization when available.
94
+ */
95
+ export declare function getRiskScore(filePath: string, graph: DependencyGraph, fileContent?: string | null): RiskScore;
96
+ /**
97
+ * Format a risk score for display in MCP tool output.
98
+ */
99
+ export declare function formatRiskScore(score: RiskScore): string;