@syke1/mcp-server 1.4.17 → 1.4.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai/realtime-analyzer.js +1 -1
- package/dist/git/change-coupling.d.ts +41 -0
- package/dist/git/change-coupling.js +250 -0
- package/dist/graph/incremental.d.ts +35 -0
- package/dist/graph/incremental.js +319 -0
- package/dist/graph/memo-cache.d.ts +47 -0
- package/dist/graph/memo-cache.js +176 -0
- package/dist/graph/scc.d.ts +57 -0
- package/dist/graph/scc.js +206 -0
- package/dist/graph.d.ts +6 -0
- package/dist/graph.js +17 -1
- package/dist/index.js +151 -11
- package/dist/scoring/pagerank.d.ts +67 -0
- package/dist/scoring/pagerank.js +221 -0
- package/dist/scoring/risk-scorer.d.ts +99 -0
- package/dist/scoring/risk-scorer.js +623 -0
- package/dist/tools/analyze-impact.d.ts +36 -1
- package/dist/tools/analyze-impact.js +278 -2
- package/dist/tools/gate-build.d.ts +7 -2
- package/dist/tools/gate-build.js +179 -13
- package/dist/watcher/file-cache.d.ts +9 -0
- package/dist/watcher/file-cache.js +40 -0
- package/dist/web/server.js +20 -3
- package/package.json +1 -1
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Memoized BFS Result Cache for SYKE.
|
|
4
|
+
*
|
|
5
|
+
* Caches impact analysis results (BFS reverse traversals) so that
|
|
6
|
+
* repeated queries for the same file return instantly.
|
|
7
|
+
*
|
|
8
|
+
* Smart invalidation: when a file changes, only cache entries that
|
|
9
|
+
* could be affected are evicted. A reverse index maps each file to
|
|
10
|
+
* the set of cache keys whose impactSet contains it, making
|
|
11
|
+
* invalidation O(affected) instead of O(cache_size).
|
|
12
|
+
*
|
|
13
|
+
* Uses LRU eviction when the cache exceeds maxSize.
|
|
14
|
+
*/
|
|
15
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
16
|
+
exports.createMemoCache = createMemoCache;
|
|
17
|
+
exports.getMemoCache = getMemoCache;
|
|
18
|
+
exports.resetMemoCache = resetMemoCache;
|
|
19
|
+
// ── Implementation ──
|
|
20
|
+
/**
|
|
21
|
+
* Create a new MemoCache with LRU eviction and reverse-index invalidation.
|
|
22
|
+
*
|
|
23
|
+
* @param maxSize Maximum number of cached entries (default 500).
|
|
24
|
+
*/
|
|
25
|
+
function createMemoCache(maxSize = 500) {
|
|
26
|
+
// Main cache: filePath -> MemoEntry
|
|
27
|
+
const cache = new Map();
|
|
28
|
+
// LRU tracking: most recently accessed key moves to the end
|
|
29
|
+
const accessOrder = [];
|
|
30
|
+
// Reverse index: maps each file to the set of cache keys whose
|
|
31
|
+
// impactSet contains that file. Used for O(affected) invalidation.
|
|
32
|
+
const reverseIndex = new Map();
|
|
33
|
+
// Stats
|
|
34
|
+
let hits = 0;
|
|
35
|
+
let misses = 0;
|
|
36
|
+
/**
|
|
37
|
+
* Move a key to the end of the access order (most recently used).
|
|
38
|
+
*/
|
|
39
|
+
function touchKey(key) {
|
|
40
|
+
const idx = accessOrder.indexOf(key);
|
|
41
|
+
if (idx !== -1) {
|
|
42
|
+
accessOrder.splice(idx, 1);
|
|
43
|
+
}
|
|
44
|
+
accessOrder.push(key);
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Remove a single entry from the cache and clean up the reverse index.
|
|
48
|
+
*/
|
|
49
|
+
function removeEntry(key) {
|
|
50
|
+
const entry = cache.get(key);
|
|
51
|
+
if (!entry)
|
|
52
|
+
return;
|
|
53
|
+
// Remove from reverse index
|
|
54
|
+
for (const file of entry.impactSet) {
|
|
55
|
+
const keys = reverseIndex.get(file);
|
|
56
|
+
if (keys) {
|
|
57
|
+
keys.delete(key);
|
|
58
|
+
if (keys.size === 0) {
|
|
59
|
+
reverseIndex.delete(file);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
// Also remove the key itself from the reverse index
|
|
64
|
+
const selfKeys = reverseIndex.get(key);
|
|
65
|
+
if (selfKeys) {
|
|
66
|
+
selfKeys.delete(key);
|
|
67
|
+
if (selfKeys.size === 0) {
|
|
68
|
+
reverseIndex.delete(key);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
cache.delete(key);
|
|
72
|
+
const orderIdx = accessOrder.indexOf(key);
|
|
73
|
+
if (orderIdx !== -1) {
|
|
74
|
+
accessOrder.splice(orderIdx, 1);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Evict the least recently used entry when cache exceeds maxSize.
|
|
79
|
+
*/
|
|
80
|
+
function evictLRU() {
|
|
81
|
+
while (cache.size > maxSize && accessOrder.length > 0) {
|
|
82
|
+
const lruKey = accessOrder.shift();
|
|
83
|
+
removeEntry(lruKey);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Add a file -> cacheKey mapping to the reverse index.
|
|
88
|
+
*/
|
|
89
|
+
function addToReverseIndex(file, cacheKey) {
|
|
90
|
+
let keys = reverseIndex.get(file);
|
|
91
|
+
if (!keys) {
|
|
92
|
+
keys = new Set();
|
|
93
|
+
reverseIndex.set(file, keys);
|
|
94
|
+
}
|
|
95
|
+
keys.add(cacheKey);
|
|
96
|
+
}
|
|
97
|
+
return {
|
|
98
|
+
get(filePath) {
|
|
99
|
+
const entry = cache.get(filePath);
|
|
100
|
+
if (entry) {
|
|
101
|
+
hits++;
|
|
102
|
+
touchKey(filePath);
|
|
103
|
+
return entry;
|
|
104
|
+
}
|
|
105
|
+
misses++;
|
|
106
|
+
return undefined;
|
|
107
|
+
},
|
|
108
|
+
set(filePath, entry) {
|
|
109
|
+
// If already cached, remove old reverse index entries first
|
|
110
|
+
if (cache.has(filePath)) {
|
|
111
|
+
removeEntry(filePath);
|
|
112
|
+
}
|
|
113
|
+
// Store the entry
|
|
114
|
+
cache.set(filePath, entry);
|
|
115
|
+
touchKey(filePath);
|
|
116
|
+
// Build reverse index: map each file in impactSet -> this cache key
|
|
117
|
+
for (const file of entry.impactSet) {
|
|
118
|
+
addToReverseIndex(file, filePath);
|
|
119
|
+
}
|
|
120
|
+
// Also index the key itself (if the queried file changes, its own
|
|
121
|
+
// cached result is stale)
|
|
122
|
+
addToReverseIndex(filePath, filePath);
|
|
123
|
+
// Evict LRU if over capacity
|
|
124
|
+
evictLRU();
|
|
125
|
+
},
|
|
126
|
+
invalidate(affectedFiles) {
|
|
127
|
+
const keysToInvalidate = new Set();
|
|
128
|
+
for (const file of affectedFiles) {
|
|
129
|
+
// Find all cache keys whose impactSet contains this file
|
|
130
|
+
const keys = reverseIndex.get(file);
|
|
131
|
+
if (keys) {
|
|
132
|
+
for (const key of keys) {
|
|
133
|
+
keysToInvalidate.add(key);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
// Remove all identified entries
|
|
138
|
+
for (const key of keysToInvalidate) {
|
|
139
|
+
removeEntry(key);
|
|
140
|
+
}
|
|
141
|
+
return keysToInvalidate.size;
|
|
142
|
+
},
|
|
143
|
+
invalidateAll() {
|
|
144
|
+
cache.clear();
|
|
145
|
+
accessOrder.length = 0;
|
|
146
|
+
reverseIndex.clear();
|
|
147
|
+
// Do NOT reset hits/misses — they are cumulative diagnostics
|
|
148
|
+
},
|
|
149
|
+
stats() {
|
|
150
|
+
return {
|
|
151
|
+
size: cache.size,
|
|
152
|
+
hits,
|
|
153
|
+
misses,
|
|
154
|
+
};
|
|
155
|
+
},
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
// ── Singleton Instance ──
|
|
159
|
+
let globalMemoCache = null;
|
|
160
|
+
/**
|
|
161
|
+
* Get the global memo cache instance (lazy initialization).
|
|
162
|
+
*/
|
|
163
|
+
function getMemoCache() {
|
|
164
|
+
if (!globalMemoCache) {
|
|
165
|
+
globalMemoCache = createMemoCache();
|
|
166
|
+
}
|
|
167
|
+
return globalMemoCache;
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Reset the global memo cache (e.g., on full graph rebuild).
|
|
171
|
+
*/
|
|
172
|
+
function resetMemoCache() {
|
|
173
|
+
if (globalMemoCache) {
|
|
174
|
+
globalMemoCache.invalidateAll();
|
|
175
|
+
}
|
|
176
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Strongly Connected Components (SCC) via Tarjan's algorithm,
|
|
3
|
+
* graph condensation into a DAG, and topological sort via Kahn's algorithm.
|
|
4
|
+
*
|
|
5
|
+
* Used to detect circular dependencies and provide accurate cascade-level
|
|
6
|
+
* impact analysis on the condensed (acyclic) dependency graph.
|
|
7
|
+
*/
|
|
8
|
+
import { DependencyGraph } from "../graph";
|
|
9
|
+
export interface SCCResult {
|
|
10
|
+
/** Each SCC as an array of absolute file paths */
|
|
11
|
+
components: string[][];
|
|
12
|
+
/** Maps each file to its SCC index in `components` */
|
|
13
|
+
nodeToComponent: Map<string, number>;
|
|
14
|
+
/** The condensed DAG built from the SCCs */
|
|
15
|
+
condensed: CondensedDAG;
|
|
16
|
+
}
|
|
17
|
+
export interface CondensedDAG {
|
|
18
|
+
/** One node per SCC */
|
|
19
|
+
nodes: CondensedNode[];
|
|
20
|
+
/** SCC index -> list of SCC indices that depend on it (forward = same direction as file imports) */
|
|
21
|
+
forward: Map<number, number[]>;
|
|
22
|
+
/** SCC index -> list of SCC indices it depends on */
|
|
23
|
+
reverse: Map<number, number[]>;
|
|
24
|
+
/** SCCs in topological order (dependencies before dependents) */
|
|
25
|
+
topologicalOrder: number[];
|
|
26
|
+
}
|
|
27
|
+
export interface CondensedNode {
|
|
28
|
+
/** Index in the `nodes` array, matches the SCC index */
|
|
29
|
+
index: number;
|
|
30
|
+
/** Absolute file paths belonging to this SCC */
|
|
31
|
+
files: string[];
|
|
32
|
+
/** Number of files in this SCC */
|
|
33
|
+
size: number;
|
|
34
|
+
/** True if this SCC has more than one file (circular dependency) */
|
|
35
|
+
isCyclic: boolean;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Compute all Strongly Connected Components of the dependency graph
|
|
39
|
+
* using Tarjan's algorithm. Returns SCCs, a file-to-SCC mapping,
|
|
40
|
+
* and the condensed DAG with topological ordering.
|
|
41
|
+
*/
|
|
42
|
+
export declare function computeSCC(graph: DependencyGraph): SCCResult;
|
|
43
|
+
/**
|
|
44
|
+
* Build a DAG where each node represents one SCC.
|
|
45
|
+
* Edges between SCCs are derived from the original graph's edges
|
|
46
|
+
* between files belonging to different SCCs.
|
|
47
|
+
*/
|
|
48
|
+
export declare function condenseGraph(graph: DependencyGraph, components: string[][], nodeToComponent: Map<string, number>): CondensedDAG;
|
|
49
|
+
/**
|
|
50
|
+
* Compute a topological ordering of the condensed DAG using Kahn's algorithm.
|
|
51
|
+
* The condensed graph is guaranteed to be acyclic after SCC condensation.
|
|
52
|
+
*
|
|
53
|
+
* Returns SCC indices in dependency order: dependencies come before dependents.
|
|
54
|
+
* This uses the `forward` edges (file A imports B means A -> B in forward),
|
|
55
|
+
* so we process nodes with no incoming forward edges first (leaf dependencies).
|
|
56
|
+
*/
|
|
57
|
+
export declare function topologicalSort(dag: CondensedDAG): number[];
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Strongly Connected Components (SCC) via Tarjan's algorithm,
|
|
4
|
+
* graph condensation into a DAG, and topological sort via Kahn's algorithm.
|
|
5
|
+
*
|
|
6
|
+
* Used to detect circular dependencies and provide accurate cascade-level
|
|
7
|
+
* impact analysis on the condensed (acyclic) dependency graph.
|
|
8
|
+
*/
|
|
9
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
10
|
+
exports.computeSCC = computeSCC;
|
|
11
|
+
exports.condenseGraph = condenseGraph;
|
|
12
|
+
exports.topologicalSort = topologicalSort;
|
|
13
|
+
// ── Tarjan's SCC Algorithm ──
|
|
14
|
+
/**
|
|
15
|
+
* Compute all Strongly Connected Components of the dependency graph
|
|
16
|
+
* using Tarjan's algorithm. Returns SCCs, a file-to-SCC mapping,
|
|
17
|
+
* and the condensed DAG with topological ordering.
|
|
18
|
+
*/
|
|
19
|
+
function computeSCC(graph) {
|
|
20
|
+
// Handle empty graph
|
|
21
|
+
if (graph.files.size === 0) {
|
|
22
|
+
const emptyDAG = {
|
|
23
|
+
nodes: [],
|
|
24
|
+
forward: new Map(),
|
|
25
|
+
reverse: new Map(),
|
|
26
|
+
topologicalOrder: [],
|
|
27
|
+
};
|
|
28
|
+
return {
|
|
29
|
+
components: [],
|
|
30
|
+
nodeToComponent: new Map(),
|
|
31
|
+
condensed: emptyDAG,
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
const components = [];
|
|
35
|
+
// Tarjan state
|
|
36
|
+
let indexCounter = 0;
|
|
37
|
+
const nodeIndex = new Map();
|
|
38
|
+
const nodeLowlink = new Map();
|
|
39
|
+
const onStack = new Set();
|
|
40
|
+
const stack = [];
|
|
41
|
+
function strongConnect(node) {
|
|
42
|
+
nodeIndex.set(node, indexCounter);
|
|
43
|
+
nodeLowlink.set(node, indexCounter);
|
|
44
|
+
indexCounter++;
|
|
45
|
+
stack.push(node);
|
|
46
|
+
onStack.add(node);
|
|
47
|
+
const successors = graph.forward.get(node) || [];
|
|
48
|
+
for (const successor of successors) {
|
|
49
|
+
// Only process nodes that exist in the graph
|
|
50
|
+
if (!graph.files.has(successor))
|
|
51
|
+
continue;
|
|
52
|
+
if (!nodeIndex.has(successor)) {
|
|
53
|
+
// Successor not yet visited — recurse
|
|
54
|
+
strongConnect(successor);
|
|
55
|
+
nodeLowlink.set(node, Math.min(nodeLowlink.get(node), nodeLowlink.get(successor)));
|
|
56
|
+
}
|
|
57
|
+
else if (onStack.has(successor)) {
|
|
58
|
+
// Successor is on the stack — part of current SCC
|
|
59
|
+
nodeLowlink.set(node, Math.min(nodeLowlink.get(node), nodeIndex.get(successor)));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
// If node is a root of an SCC, pop the stack to form a component
|
|
63
|
+
if (nodeLowlink.get(node) === nodeIndex.get(node)) {
|
|
64
|
+
const component = [];
|
|
65
|
+
let w;
|
|
66
|
+
do {
|
|
67
|
+
w = stack.pop();
|
|
68
|
+
onStack.delete(w);
|
|
69
|
+
component.push(w);
|
|
70
|
+
} while (w !== node);
|
|
71
|
+
components.push(component);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
// Visit all nodes (handles disconnected components)
|
|
75
|
+
for (const file of graph.files) {
|
|
76
|
+
if (!nodeIndex.has(file)) {
|
|
77
|
+
strongConnect(file);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
// Build file-to-component mapping
|
|
81
|
+
const nodeToComponent = new Map();
|
|
82
|
+
for (let i = 0; i < components.length; i++) {
|
|
83
|
+
for (const file of components[i]) {
|
|
84
|
+
nodeToComponent.set(file, i);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
// Build the condensed DAG
|
|
88
|
+
const condensed = condenseGraph(graph, components, nodeToComponent);
|
|
89
|
+
return { components, nodeToComponent, condensed };
|
|
90
|
+
}
|
|
91
|
+
// ── Graph Condensation ──
|
|
92
|
+
/**
|
|
93
|
+
* Build a DAG where each node represents one SCC.
|
|
94
|
+
* Edges between SCCs are derived from the original graph's edges
|
|
95
|
+
* between files belonging to different SCCs.
|
|
96
|
+
*/
|
|
97
|
+
function condenseGraph(graph, components, nodeToComponent) {
|
|
98
|
+
const numComponents = components.length;
|
|
99
|
+
// Build condensed nodes
|
|
100
|
+
const nodes = components.map((files, index) => ({
|
|
101
|
+
index,
|
|
102
|
+
files,
|
|
103
|
+
size: files.length,
|
|
104
|
+
isCyclic: files.length > 1,
|
|
105
|
+
}));
|
|
106
|
+
// Build forward and reverse edges between SCCs (deduplicated)
|
|
107
|
+
const forwardSets = new Map();
|
|
108
|
+
const reverseSets = new Map();
|
|
109
|
+
for (let i = 0; i < numComponents; i++) {
|
|
110
|
+
forwardSets.set(i, new Set());
|
|
111
|
+
reverseSets.set(i, new Set());
|
|
112
|
+
}
|
|
113
|
+
for (const [file, deps] of graph.forward) {
|
|
114
|
+
const srcSCC = nodeToComponent.get(file);
|
|
115
|
+
if (srcSCC === undefined)
|
|
116
|
+
continue;
|
|
117
|
+
for (const dep of deps) {
|
|
118
|
+
const dstSCC = nodeToComponent.get(dep);
|
|
119
|
+
if (dstSCC === undefined)
|
|
120
|
+
continue;
|
|
121
|
+
// Skip self-edges (within the same SCC)
|
|
122
|
+
if (srcSCC === dstSCC)
|
|
123
|
+
continue;
|
|
124
|
+
forwardSets.get(srcSCC).add(dstSCC);
|
|
125
|
+
reverseSets.get(dstSCC).add(srcSCC);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
// Convert sets to arrays
|
|
129
|
+
const forward = new Map();
|
|
130
|
+
const reverse = new Map();
|
|
131
|
+
for (const [key, set] of forwardSets) {
|
|
132
|
+
forward.set(key, [...set]);
|
|
133
|
+
}
|
|
134
|
+
for (const [key, set] of reverseSets) {
|
|
135
|
+
reverse.set(key, [...set]);
|
|
136
|
+
}
|
|
137
|
+
const dag = {
|
|
138
|
+
nodes,
|
|
139
|
+
forward,
|
|
140
|
+
reverse,
|
|
141
|
+
topologicalOrder: [],
|
|
142
|
+
};
|
|
143
|
+
// Compute topological order
|
|
144
|
+
dag.topologicalOrder = topologicalSort(dag);
|
|
145
|
+
return dag;
|
|
146
|
+
}
|
|
147
|
+
// ── Topological Sort (Kahn's Algorithm) ──
|
|
148
|
+
/**
|
|
149
|
+
* Compute a topological ordering of the condensed DAG using Kahn's algorithm.
|
|
150
|
+
* The condensed graph is guaranteed to be acyclic after SCC condensation.
|
|
151
|
+
*
|
|
152
|
+
* Returns SCC indices in dependency order: dependencies come before dependents.
|
|
153
|
+
* This uses the `forward` edges (file A imports B means A -> B in forward),
|
|
154
|
+
* so we process nodes with no incoming forward edges first (leaf dependencies).
|
|
155
|
+
*/
|
|
156
|
+
function topologicalSort(dag) {
|
|
157
|
+
const numNodes = dag.nodes.length;
|
|
158
|
+
if (numNodes === 0)
|
|
159
|
+
return [];
|
|
160
|
+
// In-degree = number of dependencies (forward edges out of each node).
|
|
161
|
+
// forward[A] = [B] means A imports B, so A depends on B.
|
|
162
|
+
// For "dependencies first" ordering, nodes with zero dependencies come first.
|
|
163
|
+
const inDegree = new Map();
|
|
164
|
+
for (let i = 0; i < numNodes; i++) {
|
|
165
|
+
inDegree.set(i, 0);
|
|
166
|
+
}
|
|
167
|
+
for (const [src, dsts] of dag.forward) {
|
|
168
|
+
inDegree.set(src, (inDegree.get(src) || 0) + dsts.length);
|
|
169
|
+
}
|
|
170
|
+
// Start with nodes that have no dependencies (in-degree 0 in forward)
|
|
171
|
+
const queue = [];
|
|
172
|
+
for (const [node, degree] of inDegree) {
|
|
173
|
+
if (degree === 0) {
|
|
174
|
+
queue.push(node);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
const order = [];
|
|
178
|
+
while (queue.length > 0) {
|
|
179
|
+
const current = queue.shift();
|
|
180
|
+
order.push(current);
|
|
181
|
+
// current has no remaining dependencies.
|
|
182
|
+
// For all nodes that depend on current (reverse edges: who imports current),
|
|
183
|
+
// decrement their in-degree.
|
|
184
|
+
const dependents = dag.reverse.get(current) || [];
|
|
185
|
+
for (const dependent of dependents) {
|
|
186
|
+
const newDegree = (inDegree.get(dependent) || 0) - 1;
|
|
187
|
+
inDegree.set(dependent, newDegree);
|
|
188
|
+
if (newDegree === 0) {
|
|
189
|
+
queue.push(dependent);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
// If order doesn't contain all nodes, there's a bug (shouldn't happen after SCC condensation)
|
|
194
|
+
if (order.length !== numNodes) {
|
|
195
|
+
console.error(`[syke:scc] WARNING: Topological sort produced ${order.length}/${numNodes} nodes. ` +
|
|
196
|
+
`This indicates a bug in SCC condensation.`);
|
|
197
|
+
// Add remaining nodes at the end
|
|
198
|
+
const ordered = new Set(order);
|
|
199
|
+
for (let i = 0; i < numNodes; i++) {
|
|
200
|
+
if (!ordered.has(i)) {
|
|
201
|
+
order.push(i);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
return order;
|
|
206
|
+
}
|
package/dist/graph.d.ts
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { SCCResult } from "./graph/scc";
|
|
2
|
+
import { PageRankResult } from "./scoring/pagerank";
|
|
1
3
|
export interface DependencyGraph {
|
|
2
4
|
forward: Map<string, string[]>;
|
|
3
5
|
reverse: Map<string, string[]>;
|
|
@@ -7,6 +9,10 @@ export interface DependencyGraph {
|
|
|
7
9
|
sourceDirs: string[];
|
|
8
10
|
/** backward compat: first source directory */
|
|
9
11
|
sourceDir: string;
|
|
12
|
+
/** Strongly Connected Components — computed after graph build */
|
|
13
|
+
scc?: SCCResult;
|
|
14
|
+
/** PageRank importance scores — computed after graph build */
|
|
15
|
+
pageRank?: PageRankResult;
|
|
10
16
|
}
|
|
11
17
|
export declare function buildGraph(projectRoot: string, packageName?: string): DependencyGraph;
|
|
12
18
|
export declare function getGraph(projectRoot: string, packageName?: string): DependencyGraph;
|
package/dist/graph.js
CHANGED
|
@@ -39,6 +39,10 @@ exports.refreshGraph = refreshGraph;
|
|
|
39
39
|
const path = __importStar(require("path"));
|
|
40
40
|
const plugin_1 = require("./languages/plugin");
|
|
41
41
|
const typescript_1 = require("./languages/typescript");
|
|
42
|
+
const scc_1 = require("./graph/scc");
|
|
43
|
+
const risk_scorer_1 = require("./scoring/risk-scorer");
|
|
44
|
+
const pagerank_1 = require("./scoring/pagerank");
|
|
45
|
+
const memo_cache_1 = require("./graph/memo-cache");
|
|
42
46
|
let cachedGraph = null;
|
|
43
47
|
function buildGraph(projectRoot, packageName) {
|
|
44
48
|
const detectedPlugins = (0, plugin_1.detectLanguages)(projectRoot);
|
|
@@ -85,8 +89,17 @@ function buildGraph(projectRoot, packageName) {
|
|
|
85
89
|
sourceDirs: allSourceDirs,
|
|
86
90
|
sourceDir,
|
|
87
91
|
};
|
|
92
|
+
// Invalidate memo cache (full rebuild means all cached BFS results are stale)
|
|
93
|
+
(0, memo_cache_1.resetMemoCache)();
|
|
94
|
+
// Compute SCC and attach to graph
|
|
95
|
+
const scc = (0, scc_1.computeSCC)(graph);
|
|
96
|
+
graph.scc = scc;
|
|
97
|
+
// Compute PageRank importance scores
|
|
98
|
+
(0, pagerank_1.invalidatePageRank)();
|
|
99
|
+
graph.pageRank = (0, pagerank_1.computePageRank)(graph);
|
|
100
|
+
const cyclicCount = scc.condensed.nodes.filter(n => n.isCyclic).length;
|
|
88
101
|
cachedGraph = graph;
|
|
89
|
-
console.error(`[syke] Graph built (${languages.join("+")}): ${files.size} files, ${countEdges(forward)} edges`);
|
|
102
|
+
console.error(`[syke] Graph built (${languages.join("+")}): ${files.size} files, ${countEdges(forward)} edges, ${scc.components.length} SCCs (${cyclicCount} cyclic)`);
|
|
90
103
|
return graph;
|
|
91
104
|
}
|
|
92
105
|
function countEdges(forward) {
|
|
@@ -105,5 +118,8 @@ function getGraph(projectRoot, packageName) {
|
|
|
105
118
|
function refreshGraph(projectRoot, packageName) {
|
|
106
119
|
cachedGraph = null;
|
|
107
120
|
(0, typescript_1.clearAliasCache)();
|
|
121
|
+
(0, risk_scorer_1.invalidateProjectMetrics)();
|
|
122
|
+
(0, pagerank_1.invalidatePageRank)();
|
|
123
|
+
(0, memo_cache_1.resetMemoCache)();
|
|
108
124
|
return buildGraph(projectRoot, packageName);
|
|
109
125
|
}
|