@syke1/mcp-server 1.4.17 → 1.4.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,623 @@
1
+ "use strict";
2
+ /**
3
+ * Composite Risk Scoring for SYKE.
4
+ *
5
+ * Combines multiple signals (fan-in, instability, cyclomatic complexity,
6
+ * cascade depth) into a single 0-1 risk score using weighted normalization.
7
+ *
8
+ * Based on Robert C. Martin's stability metrics and standard software
9
+ * engineering coupling/cohesion analysis.
10
+ */
11
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
12
+ if (k2 === undefined) k2 = k;
13
+ var desc = Object.getOwnPropertyDescriptor(m, k);
14
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
15
+ desc = { enumerable: true, get: function() { return m[k]; } };
16
+ }
17
+ Object.defineProperty(o, k2, desc);
18
+ }) : (function(o, m, k, k2) {
19
+ if (k2 === undefined) k2 = k;
20
+ o[k2] = m[k];
21
+ }));
22
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
23
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
24
+ }) : function(o, v) {
25
+ o["default"] = v;
26
+ });
27
+ var __importStar = (this && this.__importStar) || (function () {
28
+ var ownKeys = function(o) {
29
+ ownKeys = Object.getOwnPropertyNames || function (o) {
30
+ var ar = [];
31
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
32
+ return ar;
33
+ };
34
+ return ownKeys(o);
35
+ };
36
+ return function (mod) {
37
+ if (mod && mod.__esModule) return mod;
38
+ var result = {};
39
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
40
+ __setModuleDefault(result, mod);
41
+ return result;
42
+ };
43
+ })();
44
+ Object.defineProperty(exports, "__esModule", { value: true });
45
+ exports.RISK_WEIGHTS = void 0;
46
+ exports.invalidateProjectMetrics = invalidateProjectMetrics;
47
+ exports.computeCouplingMetrics = computeCouplingMetrics;
48
+ exports.computeInstability = computeInstability;
49
+ exports.computeComplexity = computeComplexity;
50
+ exports.computeCascadeDepth = computeCascadeDepth;
51
+ exports.classifyCompositeRisk = classifyCompositeRisk;
52
+ exports.computeRiskScore = computeRiskScore;
53
+ exports.computeProjectMetrics = computeProjectMetrics;
54
+ exports.getRiskScore = getRiskScore;
55
+ exports.formatRiskScore = formatRiskScore;
56
+ const fs = __importStar(require("fs"));
57
+ const plugin_1 = require("../languages/plugin");
58
+ const pagerank_1 = require("./pagerank");
59
+ // ── Weights (tunable constants) ──
60
+ exports.RISK_WEIGHTS = {
61
+ fanIn: 0.30, // Most important: how many things break
62
+ stability: 0.20, // Foundation files are riskier
63
+ complexity: 0.20, // Complex files are harder to change safely
64
+ cascadeDepth: 0.15, // Deep cascades mean wider blast radius
65
+ pageRank: 0.15, // Recursive importance from the dependency graph
66
+ };
67
+ // ── Risk Level Thresholds ──
68
+ const RISK_THRESHOLDS = {
69
+ CRITICAL: 0.8,
70
+ HIGH: 0.6,
71
+ MEDIUM: 0.4,
72
+ LOW: 0.2,
73
+ };
74
+ // ── Cached Project Metrics ──
75
+ let cachedProjectMetrics = null;
76
+ let cachedProjectRoot = null;
77
+ /**
78
+ * Invalidate cached project metrics. Call when the graph is rebuilt.
79
+ */
80
+ function invalidateProjectMetrics() {
81
+ cachedProjectMetrics = null;
82
+ cachedProjectRoot = null;
83
+ }
84
+ // ── Core Metric Computations ──
85
+ /**
86
+ * Compute coupling metrics for a single file from the dependency graph.
87
+ */
88
+ function computeCouplingMetrics(filePath, graph) {
89
+ const fanIn = (graph.reverse.get(filePath) || []).length;
90
+ const fanOut = (graph.forward.get(filePath) || []).length;
91
+ const transitiveFanIn = computeTransitiveFanIn(filePath, graph);
92
+ return { fanIn, fanOut, transitiveFanIn };
93
+ }
94
+ /**
95
+ * BFS reverse traversal to count all transitive dependents.
96
+ */
97
+ function computeTransitiveFanIn(filePath, graph) {
98
+ const visited = new Set();
99
+ const queue = [];
100
+ const directDeps = graph.reverse.get(filePath) || [];
101
+ for (const dep of directDeps) {
102
+ if (!visited.has(dep)) {
103
+ visited.add(dep);
104
+ queue.push(dep);
105
+ }
106
+ }
107
+ while (queue.length > 0) {
108
+ const current = queue.shift();
109
+ const dependents = graph.reverse.get(current) || [];
110
+ for (const dep of dependents) {
111
+ if (!visited.has(dep) && dep !== filePath) {
112
+ visited.add(dep);
113
+ queue.push(dep);
114
+ }
115
+ }
116
+ }
117
+ return visited.size;
118
+ }
119
+ /**
120
+ * Robert C. Martin's Instability Index.
121
+ *
122
+ * I = Ce / (Ca + Ce) where Ca = fanIn, Ce = fanOut
123
+ * 0 = maximally stable (everything depends on it, dangerous to change)
124
+ * 1 = maximally unstable (leaf node, safe to change)
125
+ */
126
+ function computeInstability(fanIn, fanOut) {
127
+ if (fanIn + fanOut === 0)
128
+ return 0.5; // isolated file
129
+ return fanOut / (fanIn + fanOut);
130
+ }
131
+ // ── Cyclomatic Complexity (regex-based) ──
132
+ /**
133
+ * Language-specific regex patterns for counting decision points.
134
+ * Each pattern matches one decision point in source code.
135
+ */
136
+ const COMPLEXITY_PATTERNS = {
137
+ typescript: [
138
+ /\bif\s*\(/g,
139
+ /\belse\s+if\s*\(/g,
140
+ /\bfor\s*\(/g,
141
+ /\bwhile\s*\(/g,
142
+ /\bdo\s*\{/g,
143
+ /\bswitch\s*\(/g,
144
+ /\bcase\s+/g,
145
+ /\bcatch\s*\(/g,
146
+ /\?\s*[^:?]/g, // ternary (simplified)
147
+ /&&/g,
148
+ /\|\|/g,
149
+ /\?\?/g,
150
+ ],
151
+ javascript: [], // same as typescript, populated below
152
+ dart: [
153
+ /\bif\s*\(/g,
154
+ /\belse\s+if\s*\(/g,
155
+ /\bfor\s*\(/g,
156
+ /\bwhile\s*\(/g,
157
+ /\bdo\s*\{/g,
158
+ /\bswitch\s*\(/g,
159
+ /\bcase\s+/g,
160
+ /\bcatch\s*\(/g,
161
+ /\?\s*[^:?]/g,
162
+ /&&/g,
163
+ /\|\|/g,
164
+ /\?\?/g,
165
+ /\blate\s+/g,
166
+ ],
167
+ python: [
168
+ /\bif\s+/g,
169
+ /\belif\s+/g,
170
+ /\bfor\s+/g,
171
+ /\bwhile\s+/g,
172
+ /\bexcept\s*/g,
173
+ /\band\b/g,
174
+ /\bor\b/g,
175
+ /\bwith\s+/g,
176
+ ],
177
+ go: [
178
+ /\bif\s+/g,
179
+ /\bfor\s+/g,
180
+ /\bselect\s*\{/g,
181
+ /\bcase\s+/g,
182
+ /&&/g,
183
+ /\|\|/g,
184
+ ],
185
+ rust: [
186
+ /\bif\s+/g,
187
+ /\belse\s+if\s+/g,
188
+ /\bfor\s+/g,
189
+ /\bwhile\s+/g,
190
+ /\bloop\s*\{/g,
191
+ /\bmatch\s+/g,
192
+ /=>/g,
193
+ /&&/g,
194
+ /\|\|/g,
195
+ ],
196
+ java: [
197
+ /\bif\s*\(/g,
198
+ /\belse\s+if\s*\(/g,
199
+ /\bfor\s*\(/g,
200
+ /\bwhile\s*\(/g,
201
+ /\bdo\s*\{/g,
202
+ /\bswitch\s*\(/g,
203
+ /\bcase\s+/g,
204
+ /\bcatch\s*\(/g,
205
+ /\?\s*[^:?]/g,
206
+ /&&/g,
207
+ /\|\|/g,
208
+ ],
209
+ cpp: [
210
+ /\bif\s*\(/g,
211
+ /\belse\s+if\s*\(/g,
212
+ /\bfor\s*\(/g,
213
+ /\bwhile\s*\(/g,
214
+ /\bdo\s*\{/g,
215
+ /\bswitch\s*\(/g,
216
+ /\bcase\s+/g,
217
+ /\bcatch\s*\(/g,
218
+ /\?\s*[^:?]/g,
219
+ /&&/g,
220
+ /\|\|/g,
221
+ ],
222
+ ruby: [
223
+ /\bif\s+/g,
224
+ /\belsif\s+/g,
225
+ /\bunless\s+/g,
226
+ /\bwhile\s+/g,
227
+ /\buntil\s+/g,
228
+ /\bfor\s+/g,
229
+ /\bwhen\s+/g,
230
+ /\brescue\b/g,
231
+ /&&/g,
232
+ /\|\|/g,
233
+ ],
234
+ };
235
+ // JavaScript uses same patterns as TypeScript
236
+ COMPLEXITY_PATTERNS.javascript = COMPLEXITY_PATTERNS.typescript;
237
+ /**
238
+ * Map language plugin IDs to complexity pattern keys.
239
+ */
240
+ const LANGUAGE_ID_MAP = {
241
+ typescript: "typescript",
242
+ dart: "dart",
243
+ python: "python",
244
+ go: "go",
245
+ rust: "rust",
246
+ java: "java",
247
+ cpp: "cpp",
248
+ ruby: "ruby",
249
+ };
250
+ /**
251
+ * Detect the language for a file based on extension or plugin ID.
252
+ */
253
+ function detectLanguageForFile(filePath) {
254
+ const plugin = (0, plugin_1.getPluginForFile)(filePath);
255
+ if (plugin) {
256
+ return LANGUAGE_ID_MAP[plugin.id] || "typescript";
257
+ }
258
+ // Fallback: detect by extension
259
+ const ext = filePath.split(".").pop()?.toLowerCase() || "";
260
+ const extMap = {
261
+ ts: "typescript", tsx: "typescript", js: "javascript", jsx: "javascript",
262
+ dart: "dart", py: "python", go: "go", rs: "rust",
263
+ java: "java", cpp: "cpp", cc: "cpp", cxx: "cpp", c: "cpp", h: "cpp", hpp: "cpp",
264
+ rb: "ruby",
265
+ };
266
+ return extMap[ext] || "typescript";
267
+ }
268
+ /**
269
+ * Strip comments and string literals from source code to avoid
270
+ * false positives in complexity counting.
271
+ */
272
+ function stripCommentsAndStrings(content) {
273
+ // Remove block comments
274
+ let stripped = content.replace(/\/\*[\s\S]*?\*\//g, "");
275
+ // Remove line comments
276
+ stripped = stripped.replace(/\/\/.*$/gm, "");
277
+ // Remove Python/Ruby line comments
278
+ stripped = stripped.replace(/#.*$/gm, "");
279
+ // Remove double-quoted strings (simple approach)
280
+ stripped = stripped.replace(/"(?:[^"\\]|\\.)*"/g, '""');
281
+ // Remove single-quoted strings
282
+ stripped = stripped.replace(/'(?:[^'\\]|\\.)*'/g, "''");
283
+ // Remove template literals
284
+ stripped = stripped.replace(/`(?:[^`\\]|\\.)*`/g, "``");
285
+ return stripped;
286
+ }
287
+ /**
288
+ * Compute cyclomatic complexity of source code using regex-based
289
+ * decision point counting.
290
+ *
291
+ * Returns the raw count of decision points (base complexity of 1 is NOT added).
292
+ */
293
+ function computeComplexity(content, language) {
294
+ const patterns = COMPLEXITY_PATTERNS[language] || COMPLEXITY_PATTERNS.typescript;
295
+ const stripped = stripCommentsAndStrings(content);
296
+ let count = 0;
297
+ for (const pattern of patterns) {
298
+ // Reset lastIndex for global regexes
299
+ pattern.lastIndex = 0;
300
+ const matches = stripped.match(pattern);
301
+ if (matches) {
302
+ count += matches.length;
303
+ }
304
+ }
305
+ return count;
306
+ }
307
+ // ── Cascade Depth ──
308
+ /**
309
+ * Compute the maximum cascade depth from the condensed DAG.
310
+ * This is the longest path from the file's SCC through the reverse edges
311
+ * of the condensed DAG (i.e., how many layers deep the impact propagates).
312
+ */
313
+ function computeCascadeDepth(filePath, graph) {
314
+ const scc = graph.scc;
315
+ if (!scc) {
316
+ // Fallback: use simple BFS depth on raw graph
317
+ return computeRawCascadeDepth(filePath, graph);
318
+ }
319
+ const sccIndex = scc.nodeToComponent.get(filePath);
320
+ if (sccIndex === undefined)
321
+ return 0;
322
+ // BFS on condensed DAG reverse edges to find max depth
323
+ const visited = new Map(); // SCC index -> depth
324
+ visited.set(sccIndex, 0);
325
+ const queue = [
326
+ { idx: sccIndex, depth: 0 },
327
+ ];
328
+ let maxDepth = 0;
329
+ while (queue.length > 0) {
330
+ const { idx, depth } = queue.shift();
331
+ const dependentSCCs = scc.condensed.reverse.get(idx) || [];
332
+ for (const depSCC of dependentSCCs) {
333
+ if (!visited.has(depSCC)) {
334
+ const newDepth = depth + 1;
335
+ visited.set(depSCC, newDepth);
336
+ maxDepth = Math.max(maxDepth, newDepth);
337
+ queue.push({ idx: depSCC, depth: newDepth });
338
+ }
339
+ }
340
+ }
341
+ return maxDepth;
342
+ }
343
+ /**
344
+ * Fallback: simple BFS depth on the raw reverse graph.
345
+ */
346
+ function computeRawCascadeDepth(filePath, graph) {
347
+ const visited = new Set();
348
+ visited.add(filePath);
349
+ let currentLevel = [filePath];
350
+ let depth = 0;
351
+ while (currentLevel.length > 0) {
352
+ const nextLevel = [];
353
+ for (const file of currentLevel) {
354
+ const dependents = graph.reverse.get(file) || [];
355
+ for (const dep of dependents) {
356
+ if (!visited.has(dep)) {
357
+ visited.add(dep);
358
+ nextLevel.push(dep);
359
+ }
360
+ }
361
+ }
362
+ if (nextLevel.length > 0) {
363
+ depth++;
364
+ }
365
+ currentLevel = nextLevel;
366
+ }
367
+ return depth;
368
+ }
369
+ // ── Normalization ──
370
+ /**
371
+ * Min-max normalize a value to the 0-1 range.
372
+ * Returns 0 if max equals min (all values identical).
373
+ */
374
+ function normalize(value, min, max) {
375
+ if (max <= min)
376
+ return 0;
377
+ return Math.min(1, Math.max(0, (value - min) / (max - min)));
378
+ }
379
+ // ── Risk Level Classification ──
380
+ /**
381
+ * Map a composite score (0-1) to a risk level.
382
+ */
383
+ function classifyCompositeRisk(score) {
384
+ if (score >= RISK_THRESHOLDS.CRITICAL)
385
+ return "CRITICAL";
386
+ if (score >= RISK_THRESHOLDS.HIGH)
387
+ return "HIGH";
388
+ if (score >= RISK_THRESHOLDS.MEDIUM)
389
+ return "MEDIUM";
390
+ if (score >= RISK_THRESHOLDS.LOW)
391
+ return "LOW";
392
+ return "SAFE";
393
+ }
394
+ // ── Main Scoring Functions ──
395
+ /**
396
+ * Read file content from disk (used when no content is passed).
397
+ */
398
+ function readFileContent(filePath) {
399
+ try {
400
+ return fs.readFileSync(filePath, "utf-8");
401
+ }
402
+ catch {
403
+ return null;
404
+ }
405
+ }
406
+ /**
407
+ * Compute the composite risk score for a single file.
408
+ *
409
+ * If `projectMetrics` is provided, uses pre-computed normalization bounds.
410
+ * Otherwise, uses raw metrics without normalization (less accurate but functional).
411
+ */
412
+ function computeRiskScore(filePath, graph, fileContent, projectMetrics) {
413
+ // Compute coupling metrics
414
+ const coupling = computeCouplingMetrics(filePath, graph);
415
+ // Compute instability
416
+ const instability = computeInstability(coupling.fanIn, coupling.fanOut);
417
+ // Compute complexity
418
+ const content = fileContent ?? readFileContent(filePath);
419
+ const language = detectLanguageForFile(filePath);
420
+ const complexity = content ? computeComplexity(content, language) : 0;
421
+ // Compute cascade depth
422
+ const cascadeDepth = computeCascadeDepth(filePath, graph);
423
+ // Normalize using project-wide bounds (or use raw values)
424
+ let normalizedFanIn;
425
+ let normalizedComplexity;
426
+ let normalizedCascadeDepth;
427
+ if (projectMetrics) {
428
+ normalizedFanIn = normalize(coupling.transitiveFanIn, 0, projectMetrics.maxTransitiveFanIn);
429
+ normalizedComplexity = normalize(complexity, 0, projectMetrics.maxComplexity);
430
+ normalizedCascadeDepth = normalize(cascadeDepth, 0, projectMetrics.maxCascadeDepth);
431
+ }
432
+ else {
433
+ // Without project metrics, use simple heuristic normalization
434
+ normalizedFanIn = normalize(coupling.transitiveFanIn, 0, Math.max(coupling.transitiveFanIn, 20));
435
+ normalizedComplexity = normalize(complexity, 0, Math.max(complexity, 50));
436
+ normalizedCascadeDepth = normalize(cascadeDepth, 0, Math.max(cascadeDepth, 5));
437
+ }
438
+ // Retrieve PageRank data if available on the graph
439
+ let pageRankScore;
440
+ let pageRankPercentile;
441
+ let normalizedPageRank = 0;
442
+ let hasPageRank = false;
443
+ if (graph.pageRank) {
444
+ const prData = (0, pagerank_1.getFileRank)(filePath, graph.pageRank);
445
+ if (prData) {
446
+ pageRankScore = prData.score;
447
+ pageRankPercentile = prData.percentile;
448
+ normalizedPageRank = prData.percentile / 100; // 0-1 range
449
+ hasPageRank = true;
450
+ }
451
+ }
452
+ // Composite score — include PageRank if available, otherwise normalize
453
+ // remaining weights to sum to 1 for backward compatibility
454
+ let composite;
455
+ if (hasPageRank) {
456
+ composite =
457
+ exports.RISK_WEIGHTS.fanIn * normalizedFanIn +
458
+ exports.RISK_WEIGHTS.stability * (1 - instability) +
459
+ exports.RISK_WEIGHTS.complexity * normalizedComplexity +
460
+ exports.RISK_WEIGHTS.cascadeDepth * normalizedCascadeDepth +
461
+ exports.RISK_WEIGHTS.pageRank * normalizedPageRank;
462
+ }
463
+ else {
464
+ // Fallback: redistribute pageRank weight proportionally among other signals
465
+ const baseSum = exports.RISK_WEIGHTS.fanIn + exports.RISK_WEIGHTS.stability +
466
+ exports.RISK_WEIGHTS.complexity + exports.RISK_WEIGHTS.cascadeDepth;
467
+ composite =
468
+ (exports.RISK_WEIGHTS.fanIn / baseSum) * normalizedFanIn +
469
+ (exports.RISK_WEIGHTS.stability / baseSum) * (1 - instability) +
470
+ (exports.RISK_WEIGHTS.complexity / baseSum) * normalizedComplexity +
471
+ (exports.RISK_WEIGHTS.cascadeDepth / baseSum) * normalizedCascadeDepth;
472
+ }
473
+ // Clamp to 0-1
474
+ const clampedComposite = Math.min(1, Math.max(0, composite));
475
+ return {
476
+ composite: Math.round(clampedComposite * 100) / 100,
477
+ fanIn: coupling.fanIn,
478
+ fanOut: coupling.fanOut,
479
+ transitiveFanIn: coupling.transitiveFanIn,
480
+ instability: Math.round(instability * 100) / 100,
481
+ complexity,
482
+ normalizedComplexity: Math.round(normalizedComplexity * 100) / 100,
483
+ cascadeDepth,
484
+ riskLevel: classifyCompositeRisk(clampedComposite),
485
+ pageRank: pageRankScore,
486
+ pageRankPercentile,
487
+ };
488
+ }
489
+ /**
490
+ * Pre-compute metrics for all files in the project.
491
+ * This establishes normalization bounds and caches per-file scores.
492
+ *
493
+ * Uses lazy initialization and caches results until invalidated.
494
+ */
495
+ function computeProjectMetrics(graph, getFileContent) {
496
+ // Return cached if available and for the same project
497
+ if (cachedProjectMetrics && cachedProjectRoot === graph.projectRoot) {
498
+ return cachedProjectMetrics;
499
+ }
500
+ const contentGetter = getFileContent || readFileContent;
501
+ // Phase 1: Compute raw metrics for all files
502
+ const rawMetrics = new Map();
503
+ let maxFanIn = 0;
504
+ let maxTransitiveFanIn = 0;
505
+ let maxComplexity = 0;
506
+ let maxCascadeDepth = 0;
507
+ for (const filePath of graph.files) {
508
+ const coupling = computeCouplingMetrics(filePath, graph);
509
+ const instability = computeInstability(coupling.fanIn, coupling.fanOut);
510
+ const content = contentGetter(filePath);
511
+ const language = detectLanguageForFile(filePath);
512
+ const complexity = content ? computeComplexity(content, language) : 0;
513
+ const cascadeDepth = computeCascadeDepth(filePath, graph);
514
+ rawMetrics.set(filePath, { coupling, instability, complexity, cascadeDepth });
515
+ maxFanIn = Math.max(maxFanIn, coupling.fanIn);
516
+ maxTransitiveFanIn = Math.max(maxTransitiveFanIn, coupling.transitiveFanIn);
517
+ maxComplexity = Math.max(maxComplexity, complexity);
518
+ maxCascadeDepth = Math.max(maxCascadeDepth, cascadeDepth);
519
+ }
520
+ // Phase 2: Compute normalized composite scores for all files
521
+ const projectMetrics = {
522
+ maxFanIn,
523
+ maxTransitiveFanIn,
524
+ maxComplexity,
525
+ maxCascadeDepth,
526
+ fileMetrics: new Map(),
527
+ };
528
+ for (const [filePath, raw] of rawMetrics) {
529
+ const normalizedFanIn = normalize(raw.coupling.transitiveFanIn, 0, maxTransitiveFanIn);
530
+ const normalizedComplexity = normalize(raw.complexity, 0, maxComplexity);
531
+ const normalizedCascadeDepth = normalize(raw.cascadeDepth, 0, maxCascadeDepth);
532
+ // Retrieve PageRank data if available
533
+ let pageRankScore;
534
+ let pageRankPercentile;
535
+ let normalizedPageRank = 0;
536
+ let hasPageRank = false;
537
+ if (graph.pageRank) {
538
+ const prData = (0, pagerank_1.getFileRank)(filePath, graph.pageRank);
539
+ if (prData) {
540
+ pageRankScore = prData.score;
541
+ pageRankPercentile = prData.percentile;
542
+ normalizedPageRank = prData.percentile / 100;
543
+ hasPageRank = true;
544
+ }
545
+ }
546
+ let composite;
547
+ if (hasPageRank) {
548
+ composite =
549
+ exports.RISK_WEIGHTS.fanIn * normalizedFanIn +
550
+ exports.RISK_WEIGHTS.stability * (1 - raw.instability) +
551
+ exports.RISK_WEIGHTS.complexity * normalizedComplexity +
552
+ exports.RISK_WEIGHTS.cascadeDepth * normalizedCascadeDepth +
553
+ exports.RISK_WEIGHTS.pageRank * normalizedPageRank;
554
+ }
555
+ else {
556
+ const baseSum = exports.RISK_WEIGHTS.fanIn + exports.RISK_WEIGHTS.stability +
557
+ exports.RISK_WEIGHTS.complexity + exports.RISK_WEIGHTS.cascadeDepth;
558
+ composite =
559
+ (exports.RISK_WEIGHTS.fanIn / baseSum) * normalizedFanIn +
560
+ (exports.RISK_WEIGHTS.stability / baseSum) * (1 - raw.instability) +
561
+ (exports.RISK_WEIGHTS.complexity / baseSum) * normalizedComplexity +
562
+ (exports.RISK_WEIGHTS.cascadeDepth / baseSum) * normalizedCascadeDepth;
563
+ }
564
+ const clampedComposite = Math.min(1, Math.max(0, composite));
565
+ projectMetrics.fileMetrics.set(filePath, {
566
+ composite: Math.round(clampedComposite * 100) / 100,
567
+ fanIn: raw.coupling.fanIn,
568
+ fanOut: raw.coupling.fanOut,
569
+ transitiveFanIn: raw.coupling.transitiveFanIn,
570
+ instability: Math.round(raw.instability * 100) / 100,
571
+ complexity: raw.complexity,
572
+ normalizedComplexity: Math.round(normalizedComplexity * 100) / 100,
573
+ cascadeDepth: raw.cascadeDepth,
574
+ riskLevel: classifyCompositeRisk(clampedComposite),
575
+ pageRank: pageRankScore,
576
+ pageRankPercentile,
577
+ });
578
+ }
579
+ // Cache the result
580
+ cachedProjectMetrics = projectMetrics;
581
+ cachedProjectRoot = graph.projectRoot;
582
+ console.error(`[syke:scoring] Project metrics computed for ${graph.files.size} files ` +
583
+ `(maxFanIn=${maxFanIn}, maxTransFanIn=${maxTransitiveFanIn}, ` +
584
+ `maxComplexity=${maxComplexity}, maxCascadeDepth=${maxCascadeDepth})`);
585
+ return projectMetrics;
586
+ }
587
+ /**
588
+ * Get the cached risk score for a file, or compute it on the fly.
589
+ * Uses project-wide normalization when available.
590
+ */
591
+ function getRiskScore(filePath, graph, fileContent) {
592
+ // Try to use cached project metrics for accurate normalization
593
+ if (cachedProjectMetrics && cachedProjectRoot === graph.projectRoot) {
594
+ const cached = cachedProjectMetrics.fileMetrics.get(filePath);
595
+ if (cached)
596
+ return cached;
597
+ // File not in cache (possibly new), compute with project bounds
598
+ return computeRiskScore(filePath, graph, fileContent ?? null, cachedProjectMetrics);
599
+ }
600
+ // No project metrics available, compute without normalization
601
+ return computeRiskScore(filePath, graph, fileContent ?? null);
602
+ }
603
+ /**
604
+ * Format a risk score for display in MCP tool output.
605
+ */
606
+ function formatRiskScore(score) {
607
+ const stabilityDesc = score.instability <= 0.2 ? "very stable -- dangerous to change" :
608
+ score.instability <= 0.4 ? "stable -- be cautious" :
609
+ score.instability <= 0.6 ? "balanced" :
610
+ score.instability <= 0.8 ? "unstable -- relatively safe" :
611
+ "very unstable -- safe to change";
612
+ const lines = [
613
+ `Risk Score: ${score.composite.toFixed(2)} (${score.riskLevel})`,
614
+ ` Fan-in: ${score.transitiveFanIn} (direct: ${score.fanIn}, fan-out: ${score.fanOut})`,
615
+ ` Stability: ${score.instability.toFixed(2)} (${stabilityDesc})`,
616
+ ` Complexity: ${score.complexity} (normalized: ${score.normalizedComplexity.toFixed(2)})`,
617
+ ` Cascade depth: ${score.cascadeDepth} level(s)`,
618
+ ];
619
+ if (score.pageRank !== undefined && score.pageRankPercentile !== undefined) {
620
+ lines.push(` PageRank: ${score.pageRank.toFixed(6)} (${score.pageRankPercentile}th percentile)`);
621
+ }
622
+ return lines.join("\n");
623
+ }
@@ -1,5 +1,13 @@
1
1
  import { DependencyGraph } from "../graph";
2
+ import { RiskScore } from "../scoring/risk-scorer";
3
+ import { MemoCache } from "../graph/memo-cache";
2
4
  export type RiskLevel = "HIGH" | "MEDIUM" | "LOW" | "NONE";
5
+ export interface CoupledFileInfo {
6
+ relativePath: string;
7
+ confidence: number;
8
+ coChangeCount: number;
9
+ inDependencyGraph: boolean;
10
+ }
3
11
  export interface ImpactResult {
4
12
  filePath: string;
5
13
  relativePath: string;
@@ -7,11 +15,38 @@ export interface ImpactResult {
7
15
  directDependents: string[];
8
16
  transitiveDependents: string[];
9
17
  totalImpacted: number;
18
+ /** Cascade depth of each impacted file in the condensed DAG */
19
+ cascadeLevels?: Map<string, number>;
20
+ /** Files in the same cyclic SCC as the changed file (if any) */
21
+ circularCluster?: string[];
22
+ /** Total number of SCCs in the project */
23
+ sccCount?: number;
24
+ /** Number of SCCs with more than one file (circular dependencies) */
25
+ cyclicSCCs?: number;
26
+ /** Composite risk score (0-1) combining multiple signals */
27
+ riskScore?: RiskScore;
28
+ /** Files that historically co-change but may not be in the dependency graph */
29
+ coupledFiles?: CoupledFileInfo[];
30
+ /** True if the BFS result came from the memo cache (fast path) */
31
+ fromCache?: boolean;
10
32
  }
11
33
  /**
12
34
  * BFS reverse traversal to find all files impacted by modifying `filePath`.
35
+ * When SCC data is available, uses the condensed DAG for more accurate
36
+ * cascade-level analysis and circular dependency detection.
37
+ *
38
+ * Optionally computes a composite risk score when `includeRiskScore` is true.
39
+ * Optionally computes historical change coupling when `includeCoupling` is true.
40
+ */
41
+ export declare function analyzeImpact(filePath: string, graph: DependencyGraph, options?: {
42
+ includeRiskScore?: boolean;
43
+ fileContent?: string | null;
44
+ includeCoupling?: boolean;
45
+ }): Promise<ImpactResult>;
46
+ /**
47
+ * Get the memo cache instance for diagnostics (cache stats, etc.).
13
48
  */
14
- export declare function analyzeImpact(filePath: string, graph: DependencyGraph): ImpactResult;
49
+ export declare function getImpactMemoCache(): MemoCache;
15
50
  export declare function classifyRisk(count: number): RiskLevel;
16
51
  /**
17
52
  * Rank files by number of reverse dependents (hub score).