callgraph-mcp 1.3.0 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -21
- package/dist/index.js +7 -3
- package/dist/server.js +71 -0
- package/dist/tools/analyzeFile.js +109 -0
- package/dist/tools/analyzeWorkspace.js +95 -0
- package/dist/tools/findCycles.d.ts +2 -0
- package/dist/tools/findCycles.js +177 -0
- package/dist/tools/findDuplicates.d.ts +2 -0
- package/dist/tools/findDuplicates.js +254 -0
- package/dist/tools/findOrphans.js +100 -0
- package/dist/tools/getCallees.js +116 -0
- package/dist/tools/getCallers.js +117 -0
- package/dist/tools/getFlow.js +140 -0
- package/dist/tools/listEntryPoints.js +96 -0
- package/dist/utils/analysis.js +113 -0
- package/dist/utils/cache.js +28 -0
- package/dist/utils/fileDiscovery.js +94 -0
- package/dist/utils/formatGraph.js +38 -0
- package/dist/utils/toolHelper.js +11 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -29,7 +29,7 @@ Most AI coding tools answer structural questions about your codebase by reading
|
|
|
29
29
|
|
|
30
30
|
## Setup
|
|
31
31
|
|
|
32
|
-
### Option 1 — VS Code
|
|
32
|
+
### Option 1 — VS Code via `npx` (no install required)
|
|
33
33
|
|
|
34
34
|
Add to your project's `.vscode/mcp.json`:
|
|
35
35
|
|
|
@@ -79,17 +79,21 @@ Then point your client at it:
|
|
|
79
79
|
|
|
80
80
|
## Tools Reference
|
|
81
81
|
|
|
82
|
-
|
|
83
|
-
|------|----------------|----------|-----------------|
|
|
84
|
-
| `flowmap_analyze_workspace` | `workspacePath` | `exclude`, `language` | Full call graph: all nodes, edges, flows, orphans |
|
|
85
|
-
| `flowmap_analyze_file` | `filePath` | — | Functions and call sites in a single file |
|
|
86
|
-
| `flowmap_get_callers` | `functionName`, `workspacePath` | — | Every function across the workspace that directly calls the named function |
|
|
87
|
-
| `flowmap_get_callees` | `functionName`, `workspacePath` | — | Every function the named function directly calls |
|
|
88
|
-
| `flowmap_get_flow` | `functionName`, `workspacePath` | `maxDepth` (default 10) | Full BFS subgraph reachable from a function — the complete execution path |
|
|
89
|
-
| `flowmap_list_entry_points` | `workspacePath` | — | All entry points: mains, route handlers, CLI commands, React roots |
|
|
90
|
-
| `flowmap_find_orphans` | `workspacePath` | — | Functions unreachable from any entry point — potential dead code |
|
|
82
|
+
Optional parameters shown in `[brackets]`.
|
|
91
83
|
|
|
92
|
-
|
|
84
|
+
| Tool | Parameters | Returns |
|
|
85
|
+
|------|-----------|---------|
|
|
86
|
+
| `flowmap_analyze_workspace` | `workspacePath`, [`exclude`], [`language`] | Full call graph: nodes, edges, flows, orphans |
|
|
87
|
+
| `flowmap_analyze_file` | `filePath` | Functions and call sites in one file |
|
|
88
|
+
| `flowmap_get_callers` | `functionName`, `workspacePath` | Direct callers of the function |
|
|
89
|
+
| `flowmap_get_callees` | `functionName`, `workspacePath` | Functions the named function calls |
|
|
90
|
+
| `flowmap_get_flow` | `functionName`, `workspacePath`, [`maxDepth`=10] | Full BFS subgraph reachable from a function |
|
|
91
|
+
| `flowmap_list_entry_points` | `workspacePath` | Mains, route handlers, CLI commands, React roots |
|
|
92
|
+
| `flowmap_find_orphans` | `workspacePath` | Functions unreachable from any entry point |
|
|
93
|
+
| `flowmap_find_cycles` | `workspacePath`, [`minCycleLength`], [`exclude`] | All circular call chains with exact edges |
|
|
94
|
+
| `flowmap_find_duplicates` *(experimental)* | `workspacePath`, [`similarityThreshold`=0.75], [`minCallees`=2], [`exclude`] | Function clusters with similar callee sets |
|
|
95
|
+
|
|
96
|
+
**`workspacePath`** — absolute path to the repo root (e.g. `/home/user/my-project` or `C:\projects\my-app`).
|
|
93
97
|
|
|
94
98
|
---
|
|
95
99
|
|
|
@@ -97,29 +101,29 @@ Then point your client at it:
|
|
|
97
101
|
|
|
98
102
|
| Variable | Default | Description |
|
|
99
103
|
|----------|---------|-------------|
|
|
100
|
-
| `FLOWMAP_TRANSPORT` | `stdio` |
|
|
101
|
-
| `FLOWMAP_PORT` | `3100` | HTTP
|
|
102
|
-
| `FLOWMAP_GRAMMARS` | *(bundled)* | Override path to
|
|
104
|
+
| `FLOWMAP_TRANSPORT` | `stdio` | `stdio` or `http` |
|
|
105
|
+
| `FLOWMAP_PORT` | `3100` | HTTP port (http transport only) |
|
|
106
|
+
| `FLOWMAP_GRAMMARS` | *(bundled)* | Override path to WASM grammar files |
|
|
107
|
+
| `FLOWMAP_DUP_THRESHOLD` | `0.75` | Jaccard similarity threshold for `find_duplicates` (0–1) |
|
|
108
|
+
| `FLOWMAP_DUP_MIN_CALLEES` | `2` | Min callee count for `find_duplicates` |
|
|
103
109
|
|
|
104
110
|
---
|
|
105
111
|
|
|
106
112
|
## Example Use Cases
|
|
107
113
|
|
|
108
|
-
These prompts work because the answers come from the call graph index — not from the model's memory of what your code might look like. Every result is exact, reproducible, and complete regardless of codebase size.
|
|
109
|
-
|
|
110
114
|
---
|
|
111
115
|
|
|
112
116
|
### PR review and change safety
|
|
113
117
|
|
|
114
118
|
> *"I just modified `processPayment`. Without reading any code, tell me every function that could break and rank them by how many hops away they are from the change."*
|
|
115
119
|
|
|
116
|
-
The agent calls `flowmap_get_callers("processPayment", workspacePath)` for the direct impact radius (1 hop), then recursively traverses callers-of-callers to build a ranked list by distance.
|
|
120
|
+
The agent calls `flowmap_get_callers("processPayment", workspacePath)` for the direct impact radius (1 hop), then recursively traverses callers-of-callers to build a ranked list by distance.
|
|
117
121
|
|
|
118
122
|
---
|
|
119
123
|
|
|
120
124
|
> *"We're about to merge a PR that touches `validateCart`. Give me an impact report — what's the worst case if this function throws."*
|
|
121
125
|
|
|
122
|
-
The agent calls `flowmap_get_flow("validateCart", workspacePath)` to map every function reachable downstream, then `flowmap_get_callers("validateCart", workspacePath)` to map every upstream caller.
|
|
126
|
+
The agent calls `flowmap_get_flow("validateCart", workspacePath)` to map every function reachable downstream, then `flowmap_get_callers("validateCart", workspacePath)` to map every upstream caller.
|
|
123
127
|
|
|
124
128
|
---
|
|
125
129
|
|
|
@@ -133,7 +137,7 @@ The agent calls `flowmap_analyze_workspace(workspacePath)` to get the full graph
|
|
|
133
137
|
|
|
134
138
|
> *"Find every cycle in the call graph. For each one tell me which file I should break the dependency in to resolve it cleanly."*
|
|
135
139
|
|
|
136
|
-
The agent calls `
|
|
140
|
+
The agent calls `flowmap_find_cycles(workspacePath)`. Each cycle is returned as an ordered list of functions with file paths and the exact call edges forming the loop — no post-processing needed. Because the graph is exact, cycle membership is exact — not a guess about which modules "seem" circular.
|
|
137
141
|
|
|
138
142
|
---
|
|
139
143
|
|
|
@@ -149,7 +153,7 @@ The agent calls `flowmap_find_orphans(workspacePath)`. This returns every functi
|
|
|
149
153
|
|
|
150
154
|
> *"I just joined this team. Walk me through this codebase starting from the entry points — explain each major flow in plain English without me having to read a single file."*
|
|
151
155
|
|
|
152
|
-
The agent calls `flowmap_list_entry_points(workspacePath)` to find every main, route handler, CLI command, and React root. Then it calls `flowmap_get_flow` on each one to trace the execution.
|
|
156
|
+
The agent calls `flowmap_list_entry_points(workspacePath)` to find every main, route handler, CLI command, and React root. Then it calls `flowmap_get_flow` on each one to trace the execution.
|
|
153
157
|
|
|
154
158
|
---
|
|
155
159
|
|
|
@@ -157,7 +161,7 @@ The agent calls `flowmap_list_entry_points(workspacePath)` to find every main, r
|
|
|
157
161
|
|
|
158
162
|
> *"I want to extract the payment logic into its own module. Based purely on call relationships, which functions naturally belong together and which ones would need to stay behind."*
|
|
159
163
|
|
|
160
|
-
The agent calls `flowmap_analyze_workspace(workspacePath)` and uses the graph to find the connected component of functions reachable from payment-related entry points.
|
|
164
|
+
The agent calls `flowmap_analyze_workspace(workspacePath)` and uses the graph to find the connected component of functions reachable from payment-related entry points.
|
|
161
165
|
|
|
162
166
|
---
|
|
163
167
|
|
|
@@ -184,6 +188,28 @@ When an agent is generating new code, it can call `flowmap_analyze_workspace` be
|
|
|
184
188
|
- No existing entry points were broken
|
|
185
189
|
- The intended call relationships were actually created
|
|
186
190
|
|
|
191
|
+
---
|
|
192
|
+
|
|
193
|
+
### Catching agent-introduced duplication before it compounds
|
|
194
|
+
|
|
195
|
+
> *"We've been using an AI agent to build this codebase for 3 months. How much logic has it silently duplicated?"*
|
|
196
|
+
|
|
197
|
+
Agents optimize for the current instruction, not long-term architecture. When a task requires shared logic, the agent rarely stops to ask "does this already exist?" — refactoring requires understanding dependencies and predicting side effects, which is harder than just writing new code that works. So it copies, tweaks slightly, and moves on. It satisfied the step. It made the problem worse.
|
|
198
|
+
|
|
199
|
+
The agent calls `flowmap_find_duplicates(workspacePath)`. Each cluster in the result is a group of functions with different names — often in different components — that call the same set of dependencies. That's the callee-set fingerprint of duplicated business logic. The output names exactly which functions to merge and which file to consolidate them into.
|
|
200
|
+
|
|
201
|
+
---
|
|
202
|
+
|
|
203
|
+
### Detecting circular dependencies introduced by agent-generated code
|
|
204
|
+
|
|
205
|
+
> *"The agent has been adding features for weeks. Are there any circular call dependencies I should know about before this becomes a production problem?"*
|
|
206
|
+
|
|
207
|
+
Agents don't track long-term call structure. When told to wire up a feature quickly, they take the fastest path: call whatever is needed, from wherever is convenient. Over time this creates circular dependencies — `A` calls `B` calls `C` calls `A` — that are invisible until something breaks or hangs at runtime.
|
|
208
|
+
|
|
209
|
+
The agent calls `flowmap_find_cycles(workspacePath)`. Every cycle is returned with the exact functions involved, their file locations, and the specific edges forming the loop. No guessing about which modules "seem" circular. The result tells you precisely where to break the chain.
|
|
210
|
+
|
|
211
|
+
---
|
|
212
|
+
|
|
187
213
|
## How It Works
|
|
188
214
|
|
|
189
215
|
1. Tree-sitter WASM grammars parse each source file into an AST — no runtime execution, no imports
|
package/dist/index.js
CHANGED
|
@@ -1,4 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
"use strict";var me=Object.create;var P=Object.defineProperty;var ge=Object.getOwnPropertyDescriptor;var ue=Object.getOwnPropertyNames;var ye=Object.getPrototypeOf,he=Object.prototype.hasOwnProperty;var xe=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of ue(t))!he.call(e,n)&&n!==r&&P(e,n,{get:()=>t[n],enumerable:!(o=ge(t,n))||o.enumerable});return e};var g=(e,t,r)=>(r=e!=null?me(ye(e)):{},xe(t||!e||!e.__esModule?P(r,"default",{value:e,enumerable:!0}):r,e));var ie=require("http"),ae=require("@modelcontextprotocol/sdk/server/mcp.js"),ce=require("@modelcontextprotocol/sdk/server/stdio.js"),le=require("@modelcontextprotocol/sdk/server/streamableHttp.js"),pe=require("crypto");var b=require("zod"),$=g(require("fs"));var v=g(require("path")),M=g(require("fs")),h=require("@codeflow-map/core");var D=new Map,Se=3e4;function U(e){let t=D.get(e);return t?Date.now()-t.cachedAt>Se?(D.delete(e),null):t.graph:null}function z(e,t){D.set(e,{graph:t,cachedAt:Date.now(),workspacePath:e})}var w=g(require("path")),I=g(require("fast-glob")),E=require("@codeflow-map/core"),_e=["**/node_modules/**","**/venv/**","**/.venv/**","**/__pycache__/**","**/vendor/**","**/target/**","**/.git/**","**/dist/**","**/build/**","**/.next/**","**/.turbo/**","**/coverage/**","**/.gradle/**","**/.cache/**","**/site-packages/**","**/.mypy_cache/**","**/.pytest_cache/**","**/out/**","**/bin/**","**/obj/**","**/tests/**","**/__tests__/**","**/spec/**","**/__specs__/**","**/test/**"];async function J(e,t={}){let{exclude:r=[],language:o}=t,n;if(o){let c=Object.entries(E.FILE_EXTENSION_MAP).filter(([,f])=>f===o).map(([f])=>f.replace(".",""));n=c.length>0?c:[]}else n=Object.keys(E.FILE_EXTENSION_MAP).map(c=>c.replace(".",""));if(n.length===0)return[];let s=n.length===1?`**/*.${n[0]}`:`**/*.{${n.join(",")}}`,a=[..._e,...r],l=e.replace(/\\/g,"/"),p=await(0,I.default)(s,{cwd:l,ignore:a,absolute:!1,dot:!1,onlyFiles:!0}),i=[];for(let c of p){let f=w.extname(c),x=E.FILE_EXTENSION_MAP[f];x&&i.push({filePath:c.replace(/\\/g,"/"),absPath:w.resolve(e,c),languageId:x})}return i}var G=50,W=!1;function R(){if(process.env.FLOWMAP_GRAMMARS)return process.env.FLOWMAP_GRAMMARS;let e=[v.resolve(__dirname,"..","grammars"),v.resolve(__dirname,"..","..","grammars")];for(let t of e)if(M.existsSync(v.join(t,"tree-sitter.wasm")))return t;return e[0]}async function ve(){if(!W){let e=R(),t=v.join(e,"tree-sitter.wasm"),r=M.existsSync(t);console.error(`[flowmap] Grammar directory: ${e} (tree-sitter.wasm ${r?"found":"missing"})`),await(0,h.initTreeSitter)(e),W=!0}}async function u(e,t={}){let r=U(e);if(r)return r;await ve();let o=R(),n=Date.now(),s=await J(e,t),a=[],l=[],p=0;for(let S=0;S<s.length;S+=G){let y=s.slice(S,S+G),_=await Promise.all(y.map(m=>(0,h.parseFile)(m.filePath,m.absPath,o,m.languageId).catch(()=>null)));for(let m of _)m&&(a.push(...m.functions),l.push(...m.calls),p++)}let i=(0,h.buildCallGraph)(a,l);(0,h.detectEntryPoints)(a,i);let{flows:c,orphans:f}=(0,h.partitionFlows)(a,i),x={nodes:a,edges:i,flows:c,orphans:f,scannedFiles:p,durationMs:Date.now()-n};return z(e,x),x}function d(e,t,r,o,n){e.tool(t,r,o,n)}var Oe=["typescript","javascript","python","java","go","rust","tsx","jsx"],Ne=["node_modules","dist",".git","__pycache__","*.test.*","*.spec.*"];function j(e){d(e,"flowmap_analyze_workspace","Scan an entire codebase and return a full call graph \u2014 all functions, their parameters, and all call relationships between them. Use this first when exploring an unfamiliar codebase.",{workspacePath:b.z.string().describe("Absolute path to the repository root"),exclude:b.z.string().optional().describe("Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*"),language:b.z.string().optional().describe("Filter to a single language: typescript, javascript, python, java, go, rust, tsx, jsx. Omit to scan all.")},async({workspacePath:t,exclude:r,language:o})=>{try{if(!$.existsSync(t))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${t}`,workspacePath:t})}]};let n=r?r.split(",").map(l=>l.trim()).filter(Boolean):Ne,s=o&&Oe.includes(o)?o:void 0,a=await u(t,{exclude:n,language:s});return{content:[{type:"text",text:JSON.stringify(a)}]}}catch(n){let s=n instanceof Error?n.message:String(n);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:s,workspacePath:t})}]}}})}var H=require("zod"),X=g(require("fs")),T=g(require("path")),O=require("@codeflow-map/core");var K=!1;function k(e){d(e,"flowmap_analyze_file","Scan a single file and return all functions defined in it, their parameters, and calls made within the file.",{filePath:H.z.string().describe("Absolute path to the file to analyse")},async({filePath:t})=>{try{if(!X.existsSync(t))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FILE_NOT_FOUND",message:`File does not exist: ${t}`})}]};let r=T.extname(t),o=O.FILE_EXTENSION_MAP[r];if(!o)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"UNSUPPORTED_LANGUAGE",message:`Unsupported file extension: ${r}`})}]};let n=R();K||(await(0,O.initTreeSitter)(n),K=!0);let s=Date.now(),a=T.basename(t),l=await(0,O.parseFile)(a,t,n,o);return{content:[{type:"text",text:JSON.stringify({filePath:a,functions:l.functions,calls:l.calls,durationMs:Date.now()-s})}]}}catch(r){let o=r instanceof Error?r.message:String(r);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:o})}]}}})}var C=require("zod"),B=g(require("fs"));function Z(e){d(e,"flowmap_get_callers","Return all functions that directly call the named function. Use this for impact analysis \u2014 to understand what breaks if you change a function's signature.",{functionName:C.z.string().describe("The function name to find callers of"),workspacePath:C.z.string().describe("Absolute path to the repository root")},async({functionName:t,workspacePath:r})=>{try{if(!B.existsSync(r))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${r}`,workspacePath:r})}]};let o=await u(r),n=o.nodes.filter(i=>i.name===t);if(n.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${t}" found in the codebase.`,workspacePath:r})}]};let s=n[0],a=new Set(n.map(i=>i.id)),p=o.edges.filter(i=>a.has(i.to)).map(i=>{let c=o.nodes.find(f=>f.id===i.from);return{id:i.from,name:c?.name??"unknown",filePath:c?.filePath??"unknown",startLine:c?.startLine??0,callLine:i.line}});return{content:[{type:"text",text:JSON.stringify({target:t,targetId:s.id,callers:p,count:p.length})}]}}catch(o){let n=o instanceof Error?o.message:String(o);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:n,workspacePath:r})}]}}})}var L=require("zod"),V=g(require("fs"));function Y(e){d(e,"flowmap_get_callees","Return all functions directly called by the named function. Use this to understand what a function depends on.",{functionName:L.z.string().describe("The function name to find callees of"),workspacePath:L.z.string().describe("Absolute path to the repository root")},async({functionName:t,workspacePath:r})=>{try{if(!V.existsSync(r))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${r}`,workspacePath:r})}]};let o=await u(r),n=o.nodes.filter(i=>i.name===t);if(n.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${t}" found in the codebase.`,workspacePath:r})}]};let s=n[0],a=new Set(n.map(i=>i.id)),p=o.edges.filter(i=>a.has(i.from)).map(i=>{let c=o.nodes.find(f=>f.id===i.to);return{id:i.to,name:c?.name??"unknown",filePath:c?.filePath??"unknown",startLine:c?.startLine??0,callLine:i.line}});return{content:[{type:"text",text:JSON.stringify({target:t,targetId:s.id,callees:p,count:p.length})}]}}catch(o){let n=o instanceof Error?o.message:String(o);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:n,workspacePath:r})}]}}})}var F=require("zod"),q=g(require("fs"));function Q(e){d(e,"flowmap_get_flow","Return the complete sub-graph reachable from a given function \u2014 every function it calls, every function those call, and so on recursively. Use this to understand the full execution path of a feature or entry point.",{functionName:F.z.string().describe("The starting function name"),workspacePath:F.z.string().describe("Absolute path to the repository root"),maxDepth:F.z.number().optional().describe("Maximum recursion depth. Default 10.")},async({functionName:t,workspacePath:r,maxDepth:o})=>{let n=o??10;try{if(!q.existsSync(r))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${r}`,workspacePath:r})}]};let s=await u(r),a=s.nodes.filter(y=>y.name===t);if(a.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${t}" found in the codebase.`,workspacePath:r})}]};let l=a[0],p=new Map;for(let y of s.edges){let _=p.get(y.from)||[],m=s.nodes.find(A=>A.id===y.to);m&&(_.push({edge:y,node:m}),p.set(y.from,_))}let i=new Set,c=[],f=[],x=0,S=[l.id];for(i.add(l.id),c.push(l);S.length>0&&x<n;){let y=[];for(let _ of S){let m=p.get(_)||[];for(let{edge:A,node:N}of m)f.push(A),i.has(N.id)||(i.add(N.id),c.push(N),y.push(N.id))}S=y,x++}return{content:[{type:"text",text:JSON.stringify({entryFunction:t,nodes:c,edges:f,depth:x,totalFunctions:c.length})}]}}catch(s){let a=s instanceof Error?s.message:String(s);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:a,workspacePath:r})}]}}})}var ee=require("zod"),te=g(require("fs"));function re(e){d(e,"flowmap_list_entry_points","Return all detected entry points in the codebase \u2014 main functions, HTTP route handlers, React root renders, CLI commands, etc. Always call this first when exploring a new codebase to understand where execution begins.",{workspacePath:ee.z.string().describe("Absolute path to the repository root")},async({workspacePath:t})=>{try{if(!te.existsSync(t))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${t}`,workspacePath:t})}]};let r=await u(t),n=r.nodes.filter(s=>s.isEntryPoint).map(s=>({id:s.id,name:s.name,filePath:s.filePath,startLine:s.startLine,language:s.language,isExported:s.isExported,isAsync:s.isAsync}));return{content:[{type:"text",text:JSON.stringify({entryPoints:n,count:n.length,durationMs:r.durationMs})}]}}catch(r){let o=r instanceof Error?r.message:String(r);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:o,workspacePath:t})}]}}})}var ne=require("zod"),oe=g(require("fs"));function se(e){d(e,"flowmap_find_orphans","Return all functions that are never called from any entry point \u2014 potential dead code. Use this during refactoring to identify code that can safely be removed.",{workspacePath:ne.z.string().describe("Absolute path to the repository root")},async({workspacePath:t})=>{try{if(!oe.existsSync(t))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${t}`,workspacePath:t})}]};let r=await u(t),o=r.orphans.map(n=>{let s=r.nodes.find(a=>a.id===n);return s?{id:s.id,name:s.name,filePath:s.filePath,startLine:s.startLine,language:s.language,isExported:s.isExported}:{id:n,name:"unknown",filePath:"unknown",startLine:0}});return{content:[{type:"text",text:JSON.stringify({orphans:o,count:o.length,durationMs:r.durationMs,note:"Exported functions may be used by external consumers \u2014 verify before deleting."})}]}}catch(r){let o=r instanceof Error?r.message:String(r);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:o,workspacePath:t})}]}}})}function fe(){let e=new ae.McpServer({name:"callgraph-mcp",version:"1.0.0"});return Ee(e),e}function Ee(e){j(e),k(e),Z(e),Y(e),Q(e),re(e),se(e)}async function de(){let e=(process.env.FLOWMAP_TRANSPORT||"stdio").toLowerCase();e==="http"||e==="sse"?await Re():await we()}async function we(){let e=fe(),t=new ce.StdioServerTransport;await e.connect(t)}async function Re(){let e=parseInt(process.env.FLOWMAP_PORT||"3100",10),t=fe(),r=new le.StreamableHTTPServerTransport({sessionIdGenerator:()=>(0,pe.randomUUID)()}),o=(0,ie.createServer)(async(n,s)=>{let a=n.url||"/";a==="/mcp"||a==="/"?await r.handleRequest(n,s):s.writeHead(404).end("Not Found")});await t.connect(r),o.listen(e,()=>{process.stderr.write(`FlowMap MCP server listening on http://localhost:${e}/mcp
|
|
3
|
-
|
|
4
|
-
|
|
2
|
+
"use strict";
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
const server_1 = require("./server");
|
|
5
|
+
(0, server_1.startServer)().catch((err) => {
|
|
6
|
+
process.stderr.write(`FlowMap MCP server failed to start: ${err}\n`);
|
|
7
|
+
process.exit(1);
|
|
8
|
+
});
|
package/dist/server.js
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createMcpServer = createMcpServer;
|
|
4
|
+
exports.startServer = startServer;
|
|
5
|
+
const http_1 = require("http");
|
|
6
|
+
const mcp_js_1 = require("@modelcontextprotocol/sdk/server/mcp.js");
|
|
7
|
+
const stdio_js_1 = require("@modelcontextprotocol/sdk/server/stdio.js");
|
|
8
|
+
const streamableHttp_js_1 = require("@modelcontextprotocol/sdk/server/streamableHttp.js");
|
|
9
|
+
const crypto_1 = require("crypto");
|
|
10
|
+
const analyzeWorkspace_1 = require("./tools/analyzeWorkspace");
|
|
11
|
+
const analyzeFile_1 = require("./tools/analyzeFile");
|
|
12
|
+
const getCallers_1 = require("./tools/getCallers");
|
|
13
|
+
const getCallees_1 = require("./tools/getCallees");
|
|
14
|
+
const getFlow_1 = require("./tools/getFlow");
|
|
15
|
+
const listEntryPoints_1 = require("./tools/listEntryPoints");
|
|
16
|
+
const findOrphans_1 = require("./tools/findOrphans");
|
|
17
|
+
const findCycles_1 = require("./tools/findCycles");
|
|
18
|
+
const findDuplicates_1 = require("./tools/findDuplicates");
|
|
19
|
+
function createMcpServer() {
|
|
20
|
+
const server = new mcp_js_1.McpServer({
|
|
21
|
+
name: 'callgraph-mcp',
|
|
22
|
+
version: '1.0.0',
|
|
23
|
+
});
|
|
24
|
+
registerTools(server);
|
|
25
|
+
return server;
|
|
26
|
+
}
|
|
27
|
+
function registerTools(server) {
|
|
28
|
+
(0, analyzeWorkspace_1.registerAnalyzeWorkspace)(server);
|
|
29
|
+
(0, analyzeFile_1.registerAnalyzeFile)(server);
|
|
30
|
+
(0, getCallers_1.registerGetCallers)(server);
|
|
31
|
+
(0, getCallees_1.registerGetCallees)(server);
|
|
32
|
+
(0, getFlow_1.registerGetFlow)(server);
|
|
33
|
+
(0, listEntryPoints_1.registerListEntryPoints)(server);
|
|
34
|
+
(0, findOrphans_1.registerFindOrphans)(server);
|
|
35
|
+
(0, findCycles_1.registerFindCycles)(server);
|
|
36
|
+
(0, findDuplicates_1.registerFindDuplicates)(server);
|
|
37
|
+
}
|
|
38
|
+
async function startServer() {
|
|
39
|
+
const mode = (process.env.FLOWMAP_TRANSPORT || 'stdio').toLowerCase();
|
|
40
|
+
if (mode === 'http' || mode === 'sse') {
|
|
41
|
+
await startHttpServer();
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
await startStdioServer();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
async function startStdioServer() {
|
|
48
|
+
const server = createMcpServer();
|
|
49
|
+
const transport = new stdio_js_1.StdioServerTransport();
|
|
50
|
+
await server.connect(transport);
|
|
51
|
+
}
|
|
52
|
+
async function startHttpServer() {
|
|
53
|
+
const port = parseInt(process.env.FLOWMAP_PORT || '3100', 10);
|
|
54
|
+
const server = createMcpServer();
|
|
55
|
+
const transport = new streamableHttp_js_1.StreamableHTTPServerTransport({
|
|
56
|
+
sessionIdGenerator: () => (0, crypto_1.randomUUID)(),
|
|
57
|
+
});
|
|
58
|
+
const httpServer = (0, http_1.createServer)(async (req, res) => {
|
|
59
|
+
const url = req.url || '/';
|
|
60
|
+
if (url === '/mcp' || url === '/') {
|
|
61
|
+
await transport.handleRequest(req, res);
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
res.writeHead(404).end('Not Found');
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
await server.connect(transport);
|
|
68
|
+
httpServer.listen(port, () => {
|
|
69
|
+
process.stderr.write(`FlowMap MCP server listening on http://localhost:${port}/mcp\n`);
|
|
70
|
+
});
|
|
71
|
+
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.registerAnalyzeFile = registerAnalyzeFile;
|
|
37
|
+
const zod_1 = require("zod");
|
|
38
|
+
const fs = __importStar(require("fs"));
|
|
39
|
+
const path = __importStar(require("path"));
|
|
40
|
+
const core_1 = require("@codeflow-map/core");
|
|
41
|
+
const analysis_1 = require("../utils/analysis");
|
|
42
|
+
const toolHelper_1 = require("../utils/toolHelper");
|
|
43
|
+
let treeSitterInitialized = false;
|
|
44
|
+
function registerAnalyzeFile(server) {
|
|
45
|
+
(0, toolHelper_1.registerTool)(server, 'flowmap_analyze_file', 'Scan a single file and return all functions defined in it, their parameters, and calls made within the file.', {
|
|
46
|
+
filePath: zod_1.z.string().describe('Absolute path to the file to analyse'),
|
|
47
|
+
}, async ({ filePath: absolutePath }) => {
|
|
48
|
+
try {
|
|
49
|
+
if (!fs.existsSync(absolutePath)) {
|
|
50
|
+
return {
|
|
51
|
+
content: [{
|
|
52
|
+
type: 'text',
|
|
53
|
+
text: JSON.stringify({
|
|
54
|
+
error: true,
|
|
55
|
+
code: 'FILE_NOT_FOUND',
|
|
56
|
+
message: `File does not exist: ${absolutePath}`,
|
|
57
|
+
}),
|
|
58
|
+
}],
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
const ext = path.extname(absolutePath);
|
|
62
|
+
const languageId = core_1.FILE_EXTENSION_MAP[ext];
|
|
63
|
+
if (!languageId) {
|
|
64
|
+
return {
|
|
65
|
+
content: [{
|
|
66
|
+
type: 'text',
|
|
67
|
+
text: JSON.stringify({
|
|
68
|
+
error: true,
|
|
69
|
+
code: 'UNSUPPORTED_LANGUAGE',
|
|
70
|
+
message: `Unsupported file extension: ${ext}`,
|
|
71
|
+
}),
|
|
72
|
+
}],
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
const wasmDir = (0, analysis_1.resolveWasmDir)();
|
|
76
|
+
if (!treeSitterInitialized) {
|
|
77
|
+
await (0, core_1.initTreeSitter)(wasmDir);
|
|
78
|
+
treeSitterInitialized = true;
|
|
79
|
+
}
|
|
80
|
+
const startTime = Date.now();
|
|
81
|
+
const relativePath = path.basename(absolutePath);
|
|
82
|
+
const result = await (0, core_1.parseFile)(relativePath, absolutePath, wasmDir, languageId);
|
|
83
|
+
return {
|
|
84
|
+
content: [{
|
|
85
|
+
type: 'text',
|
|
86
|
+
text: JSON.stringify({
|
|
87
|
+
filePath: relativePath,
|
|
88
|
+
functions: result.functions,
|
|
89
|
+
calls: result.calls,
|
|
90
|
+
durationMs: Date.now() - startTime,
|
|
91
|
+
}),
|
|
92
|
+
}],
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
catch (err) {
|
|
96
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
97
|
+
return {
|
|
98
|
+
content: [{
|
|
99
|
+
type: 'text',
|
|
100
|
+
text: JSON.stringify({
|
|
101
|
+
error: true,
|
|
102
|
+
code: 'PARSE_ERROR',
|
|
103
|
+
message,
|
|
104
|
+
}),
|
|
105
|
+
}],
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.registerAnalyzeWorkspace = registerAnalyzeWorkspace;
|
|
37
|
+
const zod_1 = require("zod");
|
|
38
|
+
const fs = __importStar(require("fs"));
|
|
39
|
+
const analysis_1 = require("../utils/analysis");
|
|
40
|
+
const toolHelper_1 = require("../utils/toolHelper");
|
|
41
|
+
const SUPPORTED_LANGUAGES = ['typescript', 'javascript', 'python', 'java', 'go', 'rust', 'tsx', 'jsx'];
|
|
42
|
+
const DEFAULT_EXCLUDES = ['node_modules', 'dist', '.git', '__pycache__', '*.test.*', '*.spec.*'];
|
|
43
|
+
function registerAnalyzeWorkspace(server) {
|
|
44
|
+
(0, toolHelper_1.registerTool)(server, 'flowmap_analyze_workspace', 'Scan an entire codebase and return a full call graph — all functions, their parameters, and all call relationships between them. Use this first when exploring an unfamiliar codebase.', {
|
|
45
|
+
workspacePath: zod_1.z.string().describe('Absolute path to the repository root'),
|
|
46
|
+
exclude: zod_1.z.string().optional().describe('Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*'),
|
|
47
|
+
language: zod_1.z.string().optional().describe('Filter to a single language: typescript, javascript, python, java, go, rust, tsx, jsx. Omit to scan all.'),
|
|
48
|
+
}, async ({ workspacePath, exclude, language }) => {
|
|
49
|
+
try {
|
|
50
|
+
if (!fs.existsSync(workspacePath)) {
|
|
51
|
+
return {
|
|
52
|
+
content: [{
|
|
53
|
+
type: 'text',
|
|
54
|
+
text: JSON.stringify({
|
|
55
|
+
error: true,
|
|
56
|
+
code: 'WORKSPACE_NOT_FOUND',
|
|
57
|
+
message: `Directory does not exist: ${workspacePath}`,
|
|
58
|
+
workspacePath,
|
|
59
|
+
}),
|
|
60
|
+
}],
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
const excludeList = exclude
|
|
64
|
+
? exclude.split(',').map(s => s.trim()).filter(Boolean)
|
|
65
|
+
: DEFAULT_EXCLUDES;
|
|
66
|
+
const lang = language && SUPPORTED_LANGUAGES.includes(language)
|
|
67
|
+
? language
|
|
68
|
+
: undefined;
|
|
69
|
+
const graph = await (0, analysis_1.analyzeWorkspace)(workspacePath, {
|
|
70
|
+
exclude: excludeList,
|
|
71
|
+
language: lang,
|
|
72
|
+
});
|
|
73
|
+
return {
|
|
74
|
+
content: [{
|
|
75
|
+
type: 'text',
|
|
76
|
+
text: JSON.stringify(graph),
|
|
77
|
+
}],
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
catch (err) {
|
|
81
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
82
|
+
return {
|
|
83
|
+
content: [{
|
|
84
|
+
type: 'text',
|
|
85
|
+
text: JSON.stringify({
|
|
86
|
+
error: true,
|
|
87
|
+
code: 'PARSE_ERROR',
|
|
88
|
+
message,
|
|
89
|
+
workspacePath,
|
|
90
|
+
}),
|
|
91
|
+
}],
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
});
|
|
95
|
+
}
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.registerFindCycles = registerFindCycles;
|
|
37
|
+
const zod_1 = require("zod");
|
|
38
|
+
const fs = __importStar(require("fs"));
|
|
39
|
+
const analysis_1 = require("../utils/analysis");
|
|
40
|
+
const toolHelper_1 = require("../utils/toolHelper");
|
|
41
|
+
/** Tarjan's SCC — returns groups of node IDs that form cycles. */
|
|
42
|
+
function findStronglyConnectedComponents(nodeIds, edges) {
|
|
43
|
+
const index = new Map();
|
|
44
|
+
const lowlink = new Map();
|
|
45
|
+
const onStack = new Map();
|
|
46
|
+
const stack = [];
|
|
47
|
+
const sccs = [];
|
|
48
|
+
let counter = 0;
|
|
49
|
+
// Build adjacency list
|
|
50
|
+
const adj = new Map();
|
|
51
|
+
for (const id of nodeIds)
|
|
52
|
+
adj.set(id, []);
|
|
53
|
+
for (const e of edges) {
|
|
54
|
+
if (adj.has(e.from) && adj.has(e.to)) {
|
|
55
|
+
adj.get(e.from).push(e.to);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
function strongConnect(v) {
|
|
59
|
+
index.set(v, counter);
|
|
60
|
+
lowlink.set(v, counter);
|
|
61
|
+
counter++;
|
|
62
|
+
stack.push(v);
|
|
63
|
+
onStack.set(v, true);
|
|
64
|
+
for (const w of (adj.get(v) ?? [])) {
|
|
65
|
+
if (!index.has(w)) {
|
|
66
|
+
strongConnect(w);
|
|
67
|
+
lowlink.set(v, Math.min(lowlink.get(v), lowlink.get(w)));
|
|
68
|
+
}
|
|
69
|
+
else if (onStack.get(w)) {
|
|
70
|
+
lowlink.set(v, Math.min(lowlink.get(v), index.get(w)));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
if (lowlink.get(v) === index.get(v)) {
|
|
74
|
+
const scc = [];
|
|
75
|
+
let w;
|
|
76
|
+
do {
|
|
77
|
+
w = stack.pop();
|
|
78
|
+
onStack.set(w, false);
|
|
79
|
+
scc.push(w);
|
|
80
|
+
} while (w !== v);
|
|
81
|
+
sccs.push(scc);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
for (const id of nodeIds) {
|
|
85
|
+
if (!index.has(id))
|
|
86
|
+
strongConnect(id);
|
|
87
|
+
}
|
|
88
|
+
return sccs;
|
|
89
|
+
}
|
|
90
|
+
function describeCycleEdges(cycle, edges) {
|
|
91
|
+
const memberSet = new Set(cycle);
|
|
92
|
+
return edges
|
|
93
|
+
.filter(e => memberSet.has(e.from) && memberSet.has(e.to))
|
|
94
|
+
.map(e => ({ from: e.from, to: e.to, line: e.line }));
|
|
95
|
+
}
|
|
96
|
+
function registerFindCycles(server) {
|
|
97
|
+
(0, toolHelper_1.registerTool)(server, 'flowmap_find_cycles', 'Detect all call cycles (circular dependencies / mutual recursion) in the codebase. Returns each cycle as an ordered list of functions that call each other in a loop, along with the exact call edges forming the cycle. Use this to identify architectural problems, infinite-recursion risks, or tightly coupled modules.', {
|
|
98
|
+
workspacePath: zod_1.z.string().describe('Absolute path to the repository root'),
|
|
99
|
+
minCycleLength: zod_1.z.number().int().min(1).optional().describe('Minimum number of functions in a cycle to report (default: 1, includes self-recursion)'),
|
|
100
|
+
exclude: zod_1.z.string().optional().describe('Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*'),
|
|
101
|
+
}, async ({ workspacePath, minCycleLength = 1, exclude }) => {
|
|
102
|
+
try {
|
|
103
|
+
if (!fs.existsSync(workspacePath)) {
|
|
104
|
+
return {
|
|
105
|
+
content: [{
|
|
106
|
+
type: 'text',
|
|
107
|
+
text: JSON.stringify({
|
|
108
|
+
error: true,
|
|
109
|
+
code: 'WORKSPACE_NOT_FOUND',
|
|
110
|
+
message: `Directory does not exist: ${workspacePath}`,
|
|
111
|
+
workspacePath,
|
|
112
|
+
}),
|
|
113
|
+
}],
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
const DEFAULT_EXCLUDES = ['node_modules', 'dist', '.git', '__pycache__', '*.test.*', '*.spec.*'];
|
|
117
|
+
const excludeList = exclude
|
|
118
|
+
? exclude.split(',').map(s => s.trim()).filter(Boolean)
|
|
119
|
+
: DEFAULT_EXCLUDES;
|
|
120
|
+
const graph = await (0, analysis_1.analyzeWorkspace)(workspacePath, { exclude: excludeList });
|
|
121
|
+
const nodeIds = graph.nodes.map(n => n.id);
|
|
122
|
+
const sccs = findStronglyConnectedComponents(nodeIds, graph.edges);
|
|
123
|
+
// A self-loop counts as a cycle of length 1
|
|
124
|
+
const selfLoopIds = new Set(graph.edges.filter(e => e.from === e.to).map(e => e.from));
|
|
125
|
+
const cyclesRaw = sccs.filter(scc => {
|
|
126
|
+
if (scc.length > 1)
|
|
127
|
+
return scc.length >= minCycleLength;
|
|
128
|
+
// single-node SCC — only a cycle if there's a self-edge
|
|
129
|
+
return minCycleLength <= 1 && selfLoopIds.has(scc[0]);
|
|
130
|
+
});
|
|
131
|
+
const nodeById = new Map(graph.nodes.map(n => [n.id, n]));
|
|
132
|
+
const cycles = cyclesRaw.map((scc, i) => {
|
|
133
|
+
const members = scc.map(id => {
|
|
134
|
+
const n = nodeById.get(id);
|
|
135
|
+
return n
|
|
136
|
+
? { id, name: n.name, filePath: n.filePath, startLine: n.startLine, language: n.language }
|
|
137
|
+
: { id, name: 'unknown', filePath: 'unknown', startLine: 0, language: 'unknown' };
|
|
138
|
+
});
|
|
139
|
+
const cycleEdges = describeCycleEdges(scc, graph.edges);
|
|
140
|
+
return {
|
|
141
|
+
cycleIndex: i + 1,
|
|
142
|
+
length: scc.length,
|
|
143
|
+
members,
|
|
144
|
+
edges: cycleEdges,
|
|
145
|
+
};
|
|
146
|
+
});
|
|
147
|
+
return {
|
|
148
|
+
content: [{
|
|
149
|
+
type: 'text',
|
|
150
|
+
text: JSON.stringify({
|
|
151
|
+
cycles,
|
|
152
|
+
totalCycles: cycles.length,
|
|
153
|
+
durationMs: graph.durationMs,
|
|
154
|
+
scannedFiles: graph.scannedFiles,
|
|
155
|
+
note: cycles.length === 0
|
|
156
|
+
? 'No cycles detected — the call graph is acyclic.'
|
|
157
|
+
: `${cycles.length} cycle(s) found. Cycles involving many functions or cross-module calls are the highest priority to review.`,
|
|
158
|
+
}),
|
|
159
|
+
}],
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
catch (err) {
|
|
163
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
164
|
+
return {
|
|
165
|
+
content: [{
|
|
166
|
+
type: 'text',
|
|
167
|
+
text: JSON.stringify({
|
|
168
|
+
error: true,
|
|
169
|
+
code: 'PARSE_ERROR',
|
|
170
|
+
message,
|
|
171
|
+
workspacePath,
|
|
172
|
+
}),
|
|
173
|
+
}],
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
});
|
|
177
|
+
}
|