@getmikk/core 1.8.3 → 1.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/package.json +6 -4
  2. package/src/constants.ts +285 -0
  3. package/src/contract/contract-generator.ts +7 -0
  4. package/src/contract/index.ts +2 -3
  5. package/src/contract/lock-compiler.ts +66 -35
  6. package/src/contract/lock-reader.ts +30 -5
  7. package/src/contract/schema.ts +21 -0
  8. package/src/error-handler.ts +432 -0
  9. package/src/graph/cluster-detector.ts +52 -22
  10. package/src/graph/confidence-engine.ts +85 -0
  11. package/src/graph/graph-builder.ts +298 -255
  12. package/src/graph/impact-analyzer.ts +132 -119
  13. package/src/graph/index.ts +4 -0
  14. package/src/graph/memory-manager.ts +186 -0
  15. package/src/graph/query-engine.ts +76 -0
  16. package/src/graph/risk-engine.ts +86 -0
  17. package/src/graph/types.ts +89 -65
  18. package/src/index.ts +2 -0
  19. package/src/parser/change-detector.ts +99 -0
  20. package/src/parser/go/go-extractor.ts +18 -8
  21. package/src/parser/go/go-parser.ts +2 -0
  22. package/src/parser/index.ts +86 -36
  23. package/src/parser/javascript/js-extractor.ts +1 -1
  24. package/src/parser/javascript/js-parser.ts +2 -0
  25. package/src/parser/oxc-parser.ts +708 -0
  26. package/src/parser/oxc-resolver.ts +83 -0
  27. package/src/parser/tree-sitter/parser.ts +19 -10
  28. package/src/parser/types.ts +100 -73
  29. package/src/parser/typescript/ts-extractor.ts +229 -589
  30. package/src/parser/typescript/ts-parser.ts +16 -171
  31. package/src/parser/typescript/ts-resolver.ts +11 -1
  32. package/src/search/bm25.ts +16 -4
  33. package/src/utils/minimatch.ts +1 -1
  34. package/tests/contract.test.ts +2 -2
  35. package/tests/dead-code.test.ts +7 -7
  36. package/tests/esm-resolver.test.ts +75 -0
  37. package/tests/graph.test.ts +20 -20
  38. package/tests/helpers.ts +11 -6
  39. package/tests/impact-classified.test.ts +37 -41
  40. package/tests/parser.test.ts +7 -5
  41. package/tests/ts-parser.test.ts +27 -52
  42. package/test-output.txt +0 -373
@@ -1,144 +1,157 @@
1
- import type { DependencyGraph, GraphEdge, ImpactResult, ClassifiedImpact, RiskLevel } from './types.js'
1
+ import type { DependencyGraph, ImpactResult, ClassifiedImpact } from './types.js'
2
+ import { RiskEngine } from './risk-engine.js'
3
+ import { ConfidenceEngine } from './confidence-engine.js'
2
4
 
3
5
  /**
4
- * ImpactAnalyzer Given changed nodes, walks the graph backwards (BFS)
5
- * to find everything that depends on them.
6
- * Powers "what breaks if I change X?"
7
- *
8
- * Risk classification:
9
- * CRITICAL = direct caller (depth 1) that crosses a module boundary
10
- * HIGH = direct caller (depth 1) within the same module
11
- * MEDIUM = depth 2
12
- * LOW = depth 3+
13
- *
14
- * Confidence is derived from the quality of resolved edges in the traversal
15
- * path, not from the size of the result set. A small impact set built from
16
- * low-confidence (unresolved/fuzzy) edges is still LOW confidence.
6
+ * Mikk 2.0: Impact Analyzer
7
+ * Given changed nodes, walks the graph backwards (reverse dependency)
8
+ * to find everything impacted, computing quantitative risk and confidence.
17
9
  */
18
10
  export class ImpactAnalyzer {
19
- constructor(private graph: DependencyGraph) { }
11
+ private riskEngine: RiskEngine;
12
+ private confidenceEngine: ConfidenceEngine;
13
+
14
+ constructor(private graph: DependencyGraph) {
15
+ this.riskEngine = new RiskEngine(graph);
16
+ this.confidenceEngine = new ConfidenceEngine(graph);
17
+ }
20
18
 
21
19
  /** Given a list of changed node IDs, find everything impacted */
22
- analyze(changedNodeIds: string[]): ImpactResult {
23
- const visited = new Set<string>()
24
- const depthMap = new Map<string, number>()
25
- // Track the minimum confidence seen along the path to each node
26
- const pathConfidence = new Map<string, number>()
27
-
28
- const queue: { id: string; depth: number; confidence: number }[] =
29
- changedNodeIds.map(id => ({ id, depth: 0, confidence: 1.0 }))
30
- // Use an index pointer instead of queue.shift() to avoid O(n) cost per dequeue.
31
- let queueHead = 0
32
- let maxDepth = 0
33
-
34
- const changedSet = new Set(changedNodeIds)
35
-
36
- // Collect module IDs of the changed nodes — filter out undefined so that
37
- // nodes without a moduleId don't accidentally match every other unmoduled node
38
- // and cause everything to appear "same module".
39
- const changedModules = new Set<string>()
40
- for (const id of changedNodeIds) {
41
- const node = this.graph.nodes.get(id)
42
- if (node?.moduleId) changedModules.add(node.moduleId)
43
- }
20
+ public analyze(changedNodeIds: string[]): ImpactResult {
21
+ // depth and shortest-path tracking per visited node
22
+ const visited = new Map<string, { depth: number, paths: string[][] }>();
23
+ // Use an index pointer instead of queue.shift() to avoid O(n) dequeue cost.
24
+ const queue: { id: string, depth: number, path: string[], pathSet: Set<string> }[] =
25
+ changedNodeIds.map(id => ({ id, depth: 0, path: [id], pathSet: new Set([id]) }));
26
+ let queueHead = 0;
27
+
28
+ let maxDepth = 0;
29
+ const entryPoints = new Set<string>();
30
+ const criticalModules = new Set<string>();
44
31
 
45
32
  while (queueHead < queue.length) {
46
- const { id: current, depth, confidence: pathConf } = queue[queueHead++]
47
- if (visited.has(current)) continue
48
- visited.add(current)
49
- depthMap.set(current, depth)
50
- pathConfidence.set(current, pathConf)
51
- maxDepth = Math.max(maxDepth, depth)
52
-
53
- // Find everything that depends on current (incoming edges)
54
- const dependents = this.graph.inEdges.get(current) || []
55
- for (const edge of dependents) {
56
- if (!visited.has(edge.source) && edge.type !== 'contains') {
57
- // Propagate the minimum confidence seen so far on this path.
58
- // A chain is only as trustworthy as its weakest link.
59
- const edgeConf = edge.confidence ?? 1.0
60
- const newPathConf = Math.min(pathConf, edgeConf)
61
- queue.push({ id: edge.source, depth: depth + 1, confidence: newPathConf })
33
+ const { id: current, depth, path, pathSet } = queue[queueHead++];
34
+
35
+ if (!visited.has(current)) {
36
+ visited.set(current, { depth, paths: [path] });
37
+ } else {
38
+ visited.get(current)!.paths.push(path);
39
+ if (depth < visited.get(current)!.depth) {
40
+ visited.get(current)!.depth = depth;
62
41
  }
63
42
  }
64
- }
65
43
 
66
- const impacted = [...visited].filter(id => !changedSet.has(id))
44
+ maxDepth = Math.max(maxDepth, depth);
45
+ const node = this.graph.nodes.get(current);
67
46
 
68
- // Classify each impacted node by risk level
69
- const classified: ImpactResult['classified'] = {
70
- critical: [],
71
- high: [],
72
- medium: [],
73
- low: [],
74
- }
47
+ if (node?.metadata?.isExported) {
48
+ entryPoints.add(current);
49
+ }
75
50
 
76
- for (const id of impacted) {
77
- const node = this.graph.nodes.get(id)
78
- if (!node) continue
51
+ const dependents = this.graph.inEdges.get(current) || [];
52
+ for (const edge of dependents) {
53
+ // Allow 'contains' edges so if a function is changed, the file it belongs to is impacted,
54
+ // which then allows traversing 'imports' edges from other files.
55
+ if (!pathSet.has(edge.from)) {
56
+ const newPathSet = new Set(pathSet);
57
+ newPathSet.add(edge.from);
58
+ queue.push({
59
+ id: edge.from,
60
+ depth: depth + 1,
61
+ path: [...path, edge.from],
62
+ pathSet: newPathSet,
63
+ });
64
+ }
65
+ }
66
+ }
79
67
 
80
- const depth = depthMap.get(id) ?? 999
81
- // A node crosses a module boundary when its module differs from ALL changed modules.
82
- // If the node has no moduleId, treat it as crossing a boundary (unknown module ≠ known).
83
- const crossesBoundary = !node.moduleId || !changedModules.has(node.moduleId)
68
+ const impactedIds = Array.from(visited.keys()).filter(id =>
69
+ !changedNodeIds.includes(id) && id.startsWith('fn:')
70
+ );
71
+
72
+ let totalRisk = 0;
73
+ let totalConfidence = 0;
74
+
75
+ const classified = {
76
+ critical: [] as ClassifiedImpact[],
77
+ high: [] as ClassifiedImpact[],
78
+ medium: [] as ClassifiedImpact[],
79
+ low: [] as ClassifiedImpact[]
80
+ };
81
+
82
+ for (const id of impactedIds) {
83
+ const context = visited.get(id)!;
84
+ const node = this.graph.nodes.get(id);
85
+ let risk = this.riskEngine.scoreNode(id);
86
+
87
+ // Path reversal for confidence calculation (since BFS walks backwards)
88
+ const reversedPaths = context.paths.map(p => [...p].reverse());
89
+ const confidence = this.confidenceEngine.calculateNodeAggregatedConfidence(reversedPaths);
90
+
91
+ // Mikk 2.0 Hybrid Risk: Boost if boundary crossed at depth 1
92
+ // Check if ANY changed node crosses module boundary (not just first one)
93
+ if (context.depth === 1 && node?.moduleId) {
94
+ const crossesBoundary = changedNodeIds.some(id => {
95
+ const changedNode = this.graph.nodes.get(id);
96
+ // Add proper null checks for module IDs
97
+ if (!changedNode?.moduleId || !node.moduleId) {
98
+ return false;
99
+ }
100
+ return changedNode.moduleId !== node.moduleId;
101
+ });
102
+ if (crossesBoundary) {
103
+ risk = Math.max(risk, 80);
104
+ }
105
+ }
84
106
 
85
- const risk: RiskLevel =
86
- depth === 1 && crossesBoundary ? 'critical' :
87
- depth === 1 ? 'high' :
88
- depth === 2 ? 'medium' :
89
- 'low'
107
+ totalRisk += risk;
108
+ totalConfidence += confidence;
90
109
 
91
- const entry: ClassifiedImpact = {
110
+ const impactEntry: ClassifiedImpact = {
92
111
  nodeId: id,
93
- label: node.label,
94
- file: node.file,
95
- moduleId: node.moduleId,
96
- risk,
97
- depth,
112
+ label: node?.name || 'unknown',
113
+ file: node?.file || 'unknown',
114
+ risk: (risk >= 80 ? 'CRITICAL' : risk >= 60 ? 'HIGH' : risk >= 40 ? 'MEDIUM' : 'LOW'),
115
+ riskScore: risk,
116
+ depth: context.depth
117
+ };
118
+
119
+ if (risk >= 80) classified.critical.push(impactEntry);
120
+ else if (risk >= 60) classified.high.push(impactEntry);
121
+ else if (risk >= 40) classified.medium.push(impactEntry);
122
+ else classified.low.push(impactEntry);
123
+
124
+ if (risk > 70 && node?.moduleId) {
125
+ criticalModules.add(node.moduleId);
98
126
  }
99
-
100
- classified[risk].push(entry)
101
- }
102
-
103
- return {
104
- changed: changedNodeIds,
105
- impacted,
106
- depth: maxDepth,
107
- confidence: this.computeConfidence(impacted, pathConfidence),
108
- classified,
109
127
  }
110
- }
111
128
 
112
- /**
113
- * Derive confidence from the actual quality of edges traversed, not from
114
- * result size. A small result built from fuzzy/unresolved edges is LOW
115
- * confidence; a large result built from high-confidence AST edges is HIGH.
116
- *
117
- * Algorithm:
118
- * - Compute the average minimum-path-confidence across all impacted nodes.
119
- * - Penalise for deep chains (they amplify uncertainty).
120
- * - Map the combined score to HIGH / MEDIUM / LOW.
121
- */
122
- private computeConfidence(
123
- impacted: string[],
124
- pathConfidence: Map<string, number>,
125
- ): 'high' | 'medium' | 'low' {
126
- if (impacted.length === 0) return 'high'
127
-
128
- // Average path confidence across all impacted nodes
129
- let total = 0
130
- for (const id of impacted) {
131
- total += pathConfidence.get(id) ?? 1.0
132
- }
133
- const avgConf = total / impacted.length
129
+ const avgConfidence = impactedIds.length > 0
130
+ ? totalConfidence / impactedIds.length
131
+ : 1.0;
134
132
 
135
- // Penalise for large impact sets: confidence erodes with result size
136
- const sizePenalty = impacted.length > 20 ? 0.15 : impacted.length > 10 ? 0.08 : 0
133
+ const riskScore = impactedIds.length > 0
134
+ ? Math.min(Math.max(totalRisk / impactedIds.length, 0), 100)
135
+ : 0;
137
136
 
138
- const score = avgConf - sizePenalty
137
+ const allImpacted: ClassifiedImpact[] = [
138
+ ...classified.critical,
139
+ ...classified.high,
140
+ ...classified.medium,
141
+ ...classified.low
142
+ ];
139
143
 
140
- if (score >= 0.75) return 'high'
141
- if (score >= 0.50) return 'medium'
142
- return 'low'
144
+ return {
145
+ changed: changedNodeIds,
146
+ impacted: impactedIds,
147
+ allImpacted,
148
+ depth: maxDepth,
149
+ entryPoints: Array.from(entryPoints),
150
+ criticalModules: Array.from(criticalModules),
151
+ paths: Array.from(visited.values()).flatMap(v => v.paths),
152
+ confidence: Number(avgConfidence.toFixed(3)),
153
+ riskScore: Math.round(riskScore),
154
+ classified
155
+ };
143
156
  }
144
157
  }
@@ -4,4 +4,8 @@ export { ImpactAnalyzer } from './impact-analyzer.js'
4
4
  export { ClusterDetector } from './cluster-detector.js'
5
5
  export { DeadCodeDetector } from './dead-code-detector.js'
6
6
  export type { DeadCodeResult, DeadCodeEntry } from './dead-code-detector.js'
7
+ export { RiskEngine } from './risk-engine.js'
8
+ export type { RiskContext, RiskModifiers } from './risk-engine.js'
9
+ export { ConfidenceEngine } from './confidence-engine.js'
10
+ export { QueryEngine } from './query-engine.js'
7
11
 
@@ -0,0 +1,186 @@
1
+ /**
2
+ * MemoryManager — monitors and limits heap usage during graph operations.
3
+ *
4
+ * Design notes:
5
+ * - No console.log/warn in production paths. All diagnostics are exposed
6
+ * via getMemoryStats() so callers can decide how to surface them.
7
+ * - The auto-GC timer is ref-unref'd so it doesn't keep the Node process alive.
8
+ * - dispose() must be called when the manager is no longer needed.
9
+ */
10
+
11
+ const MEMORY_THRESHOLDS = {
12
+ WARNING: 100 * 1024 * 1024, // 100 MB
13
+ CRITICAL: 200 * 1024 * 1024, // 200 MB
14
+ EMERGENCY: 400 * 1024 * 1024, // 400 MB
15
+ } as const
16
+
17
+ const DEFAULT_CONFIG = {
18
+ maxAge: 30 * 60 * 1000, // 30 minutes
19
+ maxNodes: 10_000,
20
+ gcInterval: 60 * 1000, // 1 minute
21
+ } as const
22
+
23
+ export interface MemoryStats {
24
+ heapUsed: number
25
+ heapTotal: number
26
+ external: number
27
+ rss: number
28
+ percentage: number
29
+ status: 'normal' | 'warning' | 'critical' | 'emergency'
30
+ }
31
+
32
+ export interface MemoryManagerConfig {
33
+ maxAge?: number
34
+ maxNodes?: number
35
+ gcInterval?: number
36
+ enableAutoGC?: boolean
37
+ }
38
+
39
+ export class MemoryManager {
40
+ private readonly maxAge: number
41
+ private readonly maxNodes: number
42
+ private readonly gcInterval: number
43
+ private nodeCache = new Map<string, { data: unknown; timestamp: number }>()
44
+ private gcTimer?: ReturnType<typeof setInterval>
45
+
46
+ constructor(config: MemoryManagerConfig = {}) {
47
+ this.maxAge = config.maxAge ?? DEFAULT_CONFIG.maxAge
48
+ this.maxNodes = config.maxNodes ?? DEFAULT_CONFIG.maxNodes
49
+ this.gcInterval = config.gcInterval ?? DEFAULT_CONFIG.gcInterval
50
+
51
+ if (config.enableAutoGC !== false) this.startAutoGC()
52
+ }
53
+
54
+ getMemoryStats(): MemoryStats {
55
+ const u = process.memoryUsage()
56
+ const percentage = (u.heapUsed / u.heapTotal) * 100
57
+
58
+ let status: MemoryStats['status'] = 'normal'
59
+ if (u.heapUsed > MEMORY_THRESHOLDS.EMERGENCY) status = 'emergency'
60
+ else if (u.heapUsed > MEMORY_THRESHOLDS.CRITICAL) status = 'critical'
61
+ else if (u.heapUsed > MEMORY_THRESHOLDS.WARNING) status = 'warning'
62
+
63
+ return { heapUsed: u.heapUsed, heapTotal: u.heapTotal, external: u.external, rss: u.rss, percentage, status }
64
+ }
65
+
66
+ isMemoryCritical(): boolean {
67
+ const { status } = this.getMemoryStats()
68
+ return status === 'critical' || status === 'emergency'
69
+ }
70
+
71
+ forceGC(): void {
72
+ if (typeof global.gc === 'function') global.gc()
73
+ }
74
+
75
+ cacheNode(id: string, data: unknown): void {
76
+ if (this.nodeCache.size >= this.maxNodes) {
77
+ this.evictOldest(Math.ceil(this.maxNodes * 0.1))
78
+ }
79
+ this.nodeCache.set(id, { data, timestamp: Date.now() })
80
+ }
81
+
82
+ getCachedNode(id: string): unknown | null {
83
+ const entry = this.nodeCache.get(id)
84
+ if (!entry) return null
85
+ if (Date.now() - entry.timestamp > this.maxAge) {
86
+ this.nodeCache.delete(id)
87
+ return null
88
+ }
89
+ return entry.data
90
+ }
91
+
92
+ clearCache(): void {
93
+ this.nodeCache.clear()
94
+ }
95
+
96
+ cleanup(): void {
97
+ const now = Date.now()
98
+ for (const [id, e] of this.nodeCache) {
99
+ if (now - e.timestamp > this.maxAge) this.nodeCache.delete(id)
100
+ }
101
+ this.forceGC()
102
+ }
103
+
104
+ stopAutoGC(): void {
105
+ if (this.gcTimer) { clearInterval(this.gcTimer); this.gcTimer = undefined }
106
+ }
107
+
108
+ dispose(): void {
109
+ this.stopAutoGC()
110
+ this.clearCache()
111
+ this.forceGC()
112
+ }
113
+
114
+ private evictOldest(count: number): void {
115
+ const sorted = [...this.nodeCache.entries()].sort((a, b) => a[1].timestamp - b[1].timestamp)
116
+ for (let i = 0; i < Math.min(count, sorted.length); i++) {
117
+ this.nodeCache.delete(sorted[i][0])
118
+ }
119
+ }
120
+
121
+ private startAutoGC(): void {
122
+ this.gcTimer = setInterval(() => {
123
+ if (this.isMemoryCritical()) this.cleanup()
124
+ }, this.gcInterval)
125
+
126
+ // Don't keep the Node process alive just for GC checks
127
+ if (this.gcTimer.unref) this.gcTimer.unref()
128
+ }
129
+ }
130
+
131
+ /**
132
+ * MemoryAwareGraphBuilder — builds a graph from a lock with memory monitoring.
133
+ * Builds purely from the in-memory lock; does NOT re-parse source files.
134
+ */
135
+ export class MemoryAwareGraphBuilder {
136
+ private memoryManager: MemoryManager
137
+
138
+ constructor(config?: MemoryManagerConfig) {
139
+ this.memoryManager = new MemoryManager(config)
140
+ }
141
+
142
+ buildGraph(lock: {
143
+ functions?: Record<string, { name: string; file: string; moduleId: string; isExported?: boolean; isAsync?: boolean; calls?: string[] }>
144
+ }) {
145
+ if (this.memoryManager.isMemoryCritical()) this.memoryManager.cleanup()
146
+
147
+ try {
148
+ return this.buildInternal(lock)
149
+ } finally {
150
+ this.memoryManager.cleanup()
151
+ }
152
+ }
153
+
154
+ getMemoryStats(): MemoryStats { return this.memoryManager.getMemoryStats() }
155
+ dispose(): void { this.memoryManager.dispose() }
156
+
157
+ private buildInternal(lock: {
158
+ functions?: Record<string, { name: string; file: string; moduleId: string; isExported?: boolean; isAsync?: boolean; calls?: string[] }>
159
+ }) {
160
+ const nodes = new Map<string, unknown>()
161
+ const edges: unknown[] = []
162
+ const outEdges = new Map<string, unknown[]>()
163
+ const inEdges = new Map<string, unknown[]>()
164
+
165
+ for (const [id, fn] of Object.entries(lock.functions ?? {})) {
166
+ nodes.set(id, {
167
+ id, name: fn.name, file: fn.file, type: 'function', moduleId: fn.moduleId,
168
+ metadata: { isExported: fn.isExported, isAsync: fn.isAsync },
169
+ })
170
+ outEdges.set(id, [])
171
+ inEdges.set(id, [])
172
+ }
173
+
174
+ for (const [id, fn] of Object.entries(lock.functions ?? {})) {
175
+ for (const targetId of fn.calls ?? []) {
176
+ if (!nodes.has(targetId)) continue
177
+ const edge = { from: id, to: targetId, type: 'calls', confidence: 1.0 }
178
+ edges.push(edge)
179
+ outEdges.get(id)!.push(edge)
180
+ inEdges.get(targetId)!.push(edge)
181
+ }
182
+ }
183
+
184
+ return { nodes, edges, outEdges, inEdges }
185
+ }
186
+ }
@@ -0,0 +1,76 @@
1
+ import type { DependencyGraph } from './types.js'
2
+
3
+ /**
4
+ * QueryEngine — high-performance graph traversal and path-finding.
5
+ *
6
+ * All BFS loops use an index pointer instead of Array.shift() to avoid
7
+ * the O(n) cost of shifting the underlying array on each dequeue.
8
+ */
9
+ export class QueryEngine {
10
+ constructor(private graph: DependencyGraph) {}
11
+
12
+ /** Find all direct dependents (who calls this node?) */
13
+ getDependents(nodeId: string): string[] {
14
+ return (this.graph.inEdges.get(nodeId) ?? [])
15
+ .filter(e => e.type !== 'contains')
16
+ .map(e => e.from)
17
+ }
18
+
19
+ /** Find all direct dependencies (what does this node call?) */
20
+ getDependencies(nodeId: string): string[] {
21
+ return (this.graph.outEdges.get(nodeId) ?? [])
22
+ .filter(e => e.type !== 'contains')
23
+ .map(e => e.to)
24
+ }
25
+
26
+ /**
27
+ * Find the shortest path between two nodes using BFS.
28
+ * Returns an ordered array of node IDs, or null if no path exists.
29
+ */
30
+ findPath(start: string, end: string): string[] | null {
31
+ if (!this.graph.nodes.has(start) || !this.graph.nodes.has(end)) return null
32
+ if (start === end) return [start]
33
+
34
+ const visited = new Set<string>([start])
35
+ // Each entry: [nodeId, pathSoFar]
36
+ const queue: Array<[string, string[]]> = [[start, [start]]]
37
+ let head = 0
38
+
39
+ while (head < queue.length) {
40
+ const [id, path] = queue[head++]
41
+
42
+ for (const edge of this.graph.outEdges.get(id) ?? []) {
43
+ if (edge.type === 'contains') continue
44
+ if (edge.to === end) return [...path, end]
45
+ if (!visited.has(edge.to)) {
46
+ visited.add(edge.to)
47
+ queue.push([edge.to, [...path, edge.to]])
48
+ }
49
+ }
50
+ }
51
+
52
+ return null
53
+ }
54
+
55
+ /**
56
+ * Get the full downstream (transitive dependents) of a node.
57
+ * Answers "What would break if I change X?"
58
+ */
59
+ getDownstreamImpact(nodeId: string): string[] {
60
+ const visited = new Set<string>()
61
+ const queue: string[] = [nodeId]
62
+ let head = 0
63
+
64
+ while (head < queue.length) {
65
+ const current = queue[head++]
66
+ for (const dep of this.getDependents(current)) {
67
+ if (!visited.has(dep) && dep !== nodeId) {
68
+ visited.add(dep)
69
+ queue.push(dep)
70
+ }
71
+ }
72
+ }
73
+
74
+ return [...visited]
75
+ }
76
+ }
@@ -0,0 +1,86 @@
1
+ import type { DependencyGraph, GraphNode } from './types.js'
2
+
3
+ export interface RiskContext {
4
+ connectedNodesCount: number;
5
+ dependencyDepth: number;
6
+ }
7
+
8
+ export interface RiskModifiers {
9
+ isAuthOrSecurity: boolean;
10
+ isDatabaseOrState: boolean;
11
+ isPublicAPI: boolean;
12
+ }
13
+
14
+ /**
15
+ * Mikk 2.0: Risk Engine
16
+ * Computes risk scores based on a quantitative mathematical model.
17
+ */
18
+ export class RiskEngine {
19
+ constructor(private graph: DependencyGraph) {}
20
+
21
+ /**
22
+ * Compute the absolute risk score (0-100) for modifying a specific node.
23
+ * Formula: Base Risk = (Connected Nodes * 1.5) + (Depth * 2) + Modifiers
24
+ */
25
+ public scoreNode(nodeId: string): number {
26
+ const node = this.graph.nodes.get(nodeId);
27
+ if (!node) return 0;
28
+
29
+ const context = this.analyzeContext(nodeId);
30
+ const modifiers = this.analyzeModifiers(node);
31
+
32
+ let score = (context.connectedNodesCount * 1.5) + (context.dependencyDepth * 2);
33
+
34
+ // Apply strict modifiers
35
+ if (modifiers.isAuthOrSecurity) score += 30;
36
+ if (modifiers.isDatabaseOrState) score += 20;
37
+ if (modifiers.isPublicAPI) score += 15;
38
+
39
+ return Math.min(Math.max(score, 0), 100);
40
+ }
41
+
42
+ private analyzeContext(nodeId: string): RiskContext {
43
+ const visited = new Set<string>();
44
+ let maxDepth = 0;
45
+
46
+ // Use index pointer instead of queue.shift() — avoids O(n) array shift per pop.
47
+ const queue: Array<{ id: string, depth: number }> = [{ id: nodeId, depth: 0 }];
48
+ let queueHead = 0;
49
+ visited.add(nodeId);
50
+
51
+ let connectedNodesCount = 0;
52
+
53
+ while (queueHead < queue.length) {
54
+ const current = queue[queueHead++];
55
+ maxDepth = Math.max(maxDepth, current.depth);
56
+
57
+ const inEdges = this.graph.inEdges.get(current.id) || [];
58
+ connectedNodesCount += inEdges.length;
59
+
60
+ for (const edge of inEdges) {
61
+ if (!visited.has(edge.from)) {
62
+ visited.add(edge.from);
63
+ queue.push({ id: edge.from, depth: current.depth + 1 });
64
+ }
65
+ }
66
+ }
67
+
68
+ return {
69
+ connectedNodesCount,
70
+ dependencyDepth: maxDepth
71
+ };
72
+ }
73
+
74
+ private analyzeModifiers(node: GraphNode): RiskModifiers {
75
+ const nameAndFile = `${node.name} ${node.file}`.toLowerCase();
76
+
77
+ const authKeywords = ['auth', 'login', 'jwt', 'verify', 'token', 'crypt', 'hash', 'password'];
78
+ const dbKeywords = ['db', 'query', 'sql', 'insert', 'update', 'delete', 'redis', 'cache', 'transaction'];
79
+
80
+ return {
81
+ isAuthOrSecurity: authKeywords.some(kw => nameAndFile.includes(kw)),
82
+ isDatabaseOrState: dbKeywords.some(kw => nameAndFile.includes(kw)),
83
+ isPublicAPI: !!node.metadata?.isExported
84
+ };
85
+ }
86
+ }