callgraph-mcp 1.7.1 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -127,6 +127,8 @@ Optional parameters shown in `[brackets]`.
127
127
  | `FLOWMAP_TRANSPORT` | `stdio` | `stdio` or `http` |
128
128
  | `FLOWMAP_PORT` | `3100` | HTTP port (http transport only) |
129
129
  | `FLOWMAP_GRAMMARS` | *(bundled)* | Override path to WASM grammar files |
130
+ | `FLOWMAP_BATCH_SIZE` | `50` | Files per parallel parsing batch (must be ≥ 1) |
131
+ | `FLOWMAP_CACHE_TTL_MS` | `30000` | Result cache time-to-live in milliseconds (0 to disable) |
130
132
  | `FLOWMAP_DUP_THRESHOLD` | `0.75` | Jaccard similarity threshold for `find_duplicates` (0–1) |
131
133
  | `FLOWMAP_DUP_MIN_CALLEES` | `2` | Min callee count for `find_duplicates` |
132
134
 
@@ -221,6 +223,10 @@ The agent calls `flowmap_find_cycles(workspacePath)`. Every cycle is returned wi
221
223
 
222
224
  ---
223
225
 
226
+ ![Duplicates](https://raw.githubusercontent.com/devricky-codes/callgraph-mcp/refs/heads/main/assets/duplicates.gif)
227
+
228
+ *Duplicates flags different functions that could be a single re-usable function*
229
+
224
230
  ## How It Works
225
231
 
226
232
  1. Tree-sitter WASM grammars parse each source file into an AST — no runtime execution, no imports
package/dist/index.js CHANGED
@@ -1,4 +1,7 @@
1
1
  #!/usr/bin/env node
2
- "use strict";var Te=Object.create;var q=Object.defineProperty;var Pe=Object.getOwnPropertyDescriptor;var Ue=Object.getOwnPropertyNames;var Ie=Object.getPrototypeOf,ze=Object.prototype.hasOwnProperty;var We=(t,e,n,r)=>{if(e&&typeof e=="object"||typeof e=="function")for(let s of Ue(e))!ze.call(t,s)&&s!==n&&q(t,s,{get:()=>e[s],enumerable:!(r=Pe(e,s))||r.enumerable});return t};var O=(t,e,n)=>(n=t!=null?Te(Ie(t)):{},We(e||!t||!t.__esModule?q(n,"default",{value:t,enumerable:!0}):n,t));var be=require("http"),Me=require("@modelcontextprotocol/sdk/server/mcp.js"),Fe=require("@modelcontextprotocol/sdk/server/stdio.js"),De=require("@modelcontextprotocol/sdk/server/streamableHttp.js"),Le=require("crypto");var J=require("zod"),re=O(require("fs"));var D=O(require("path")),K=O(require("fs")),N=require("@codeflow-map/core");var k=new Map,Je=3e4;function V(t){let e=k.get(t);return e?Date.now()-e.cachedAt>Je?(k.delete(t),null):e.graph:null}function Y(t,e){k.set(t,{graph:e,cachedAt:Date.now(),workspacePath:t})}var z=O(require("path")),Q=O(require("fast-glob")),I=require("@codeflow-map/core"),$e=["**/node_modules/**","**/venv/**","**/.venv/**","**/__pycache__/**","**/vendor/**","**/target/**","**/.git/**","**/dist/**","**/build/**","**/.next/**","**/.turbo/**","**/coverage/**","**/.gradle/**","**/.cache/**","**/site-packages/**","**/.mypy_cache/**","**/.pytest_cache/**","**/out/**","**/bin/**","**/obj/**","**/tests/**","**/__tests__/**","**/spec/**","**/__specs__/**","**/test/**"];async function ee(t,e={}){let{exclude:n=[],language:r}=e,s;if(r){let i=Object.entries(I.FILE_EXTENSION_MAP).filter(([,p])=>p===r).map(([p])=>p.replace(".",""));s=i.length>0?i:[]}else s=Object.keys(I.FILE_EXTENSION_MAP).map(i=>i.replace(".",""));if(s.length===0)return[];let o=s.length===1?`**/*.${s[0]}`:`**/*.{${s.join(",")}}`,c=[...$e,...n],m=t.replace(/\\/g,"/"),g=await(0,Q.default)(o,{cwd:m,ignore:c,absolute:!1,dot:!1,onlyFiles:!0}),a=[];for(let i of g){let p=z.extname(i),f=I.FILE_EXTENSION_MAP[p];f&&a.push({filePath:i.replace(/\\/g,"/"),absPath:z.resolve(t,i),languageId:f})}return a}var te=50,ne=!1;function W(){if(process.env.FLOWMAP_GRAMMARS)return process.env.FLOWMAP_GRAMMARS;let t=[D.resolve(__dirname,"..","grammars"),D.resolve(__dirname,"..","..","grammars")];for(let e of t)if(K.existsSync(D.join(e,"tree-sitter.wasm")))return e;return t[0]}async function Ge(){if(!ne){let t=W(),e=D.join(t,"tree-sitter.wasm"),n=K.existsSync(e);console.error(`[flowmap] Grammar directory: ${t} (tree-sitter.wasm ${n?"found":"missing"})`),await(0,N.initTreeSitter)(t),ne=!0}}async function _(t,e={}){let n=V(t);if(n)return n;await Ge();let r=W(),s=Date.now(),o=await ee(t,e),c=[],m=[],g=0;for(let d=0;d<o.length;d+=te){let u=o.slice(d,d+te),E=await Promise.all(u.map(S=>(0,N.parseFile)(S.filePath,S.absPath,r,S.languageId).catch(()=>null)));for(let S of E)S&&(c.push(...S.functions),m.push(...S.calls),g++)}let a=(0,N.buildCallGraph)(c,m);(0,N.detectEntryPoints)(c,a);let{flows:i,orphans:p}=(0,N.partitionFlows)(c,a),f={nodes:c,edges:a,flows:i,orphans:p,scannedFiles:g,durationMs:Date.now()-s};return Y(t,f),f}function y(t,e,n,r,s){t.tool(e,n,r,s)}var je=["typescript","javascript","python","java","go","rust","tsx","jsx"],He=["node_modules","dist",".git","__pycache__","*.test.*","*.spec.*"];function se(t){y(t,"flowmap_analyze_workspace","Scan an entire codebase and return a full call graph \u2014 all functions, their parameters, and all call relationships between them. Use this first when exploring an unfamiliar codebase.",{workspacePath:J.z.string().describe("Absolute path to the repository root"),exclude:J.z.string().optional().describe("Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*"),language:J.z.string().optional().describe("Filter to a single language: typescript, javascript, python, java, go, rust, tsx, jsx. Omit to scan all.")},async({workspacePath:e,exclude:n,language:r})=>{try{if(!re.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let s=n?n.split(",").map(m=>m.trim()).filter(Boolean):He,o=r&&je.includes(r)?r:void 0,c=await _(e,{exclude:s,language:o});return{content:[{type:"text",text:JSON.stringify(c)}]}}catch(s){let o=s instanceof Error?s.message:String(s);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:o,workspacePath:e})}]}}})}var ie=require("zod"),ae=O(require("fs")),$=O(require("path")),L=require("@codeflow-map/core");var oe=!1;function ce(t){y(t,"flowmap_analyze_file","Scan a single file and return all functions defined in it, their parameters, and calls made within the file.",{filePath:ie.z.string().describe("Absolute path to the file to analyse")},async({filePath:e})=>{try{if(!ae.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FILE_NOT_FOUND",message:`File does not exist: ${e}`})}]};let n=$.extname(e),r=L.FILE_EXTENSION_MAP[n];if(!r)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"UNSUPPORTED_LANGUAGE",message:`Unsupported file extension: ${n}`})}]};let s=W();oe||(await(0,L.initTreeSitter)(s),oe=!0);let o=Date.now(),c=$.basename(e),m=await(0,L.parseFile)(c,e,s,r);return{content:[{type:"text",text:JSON.stringify({filePath:c,functions:m.functions,calls:m.calls,durationMs:Date.now()-o})}]}}catch(n){let r=n instanceof Error?n.message:String(n);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:r})}]}}})}var X=require("zod"),le=O(require("fs"));function de(t){y(t,"flowmap_get_callers","Return all functions that directly call the named function. Use this for impact analysis \u2014 to understand what breaks if you change a function's signature.",{functionName:X.z.string().describe("The function name to find callers of"),workspacePath:X.z.string().describe("Absolute path to the repository root")},async({functionName:e,workspacePath:n})=>{try{if(!le.existsSync(n))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${n}`,workspacePath:n})}]};let r=await _(n),s=r.nodes.filter(a=>a.name===e);if(s.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${e}" found in the codebase.`,workspacePath:n})}]};let o=s[0],c=new Set(s.map(a=>a.id)),g=r.edges.filter(a=>c.has(a.to)).map(a=>{let i=r.nodes.find(p=>p.id===a.from);return{id:a.from,name:i?.name??"unknown",filePath:i?.filePath??"unknown",startLine:i?.startLine??0,callLine:a.line}});return{content:[{type:"text",text:JSON.stringify({target:e,targetId:o.id,callers:g,count:g.length})}]}}catch(r){let s=r instanceof Error?r.message:String(r);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:s,workspacePath:n})}]}}})}var B=require("zod"),fe=O(require("fs"));function pe(t){y(t,"flowmap_get_callees","Return all functions directly called by the named function. Use this to understand what a function depends on.",{functionName:B.z.string().describe("The function name to find callees of"),workspacePath:B.z.string().describe("Absolute path to the repository root")},async({functionName:e,workspacePath:n})=>{try{if(!fe.existsSync(n))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${n}`,workspacePath:n})}]};let r=await _(n),s=r.nodes.filter(a=>a.name===e);if(s.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${e}" found in the codebase.`,workspacePath:n})}]};let o=s[0],c=new Set(s.map(a=>a.id)),g=r.edges.filter(a=>c.has(a.from)).map(a=>{let i=r.nodes.find(p=>p.id===a.to);return{id:a.to,name:i?.name??"unknown",filePath:i?.filePath??"unknown",startLine:i?.startLine??0,callLine:a.line}});return{content:[{type:"text",text:JSON.stringify({target:e,targetId:o.id,callees:g,count:g.length})}]}}catch(r){let s=r instanceof Error?r.message:String(r);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:s,workspacePath:n})}]}}})}var G=require("zod"),me=O(require("fs"));function ge(t){y(t,"flowmap_get_flow","Return the complete sub-graph reachable from a given function \u2014 every function it calls, every function those call, and so on recursively. Use this to understand the full execution path of a feature or entry point.",{functionName:G.z.string().describe("The starting function name"),workspacePath:G.z.string().describe("Absolute path to the repository root"),maxDepth:G.z.number().optional().describe("Maximum recursion depth. Default 10.")},async({functionName:e,workspacePath:n,maxDepth:r})=>{let s=r??10;try{if(!me.existsSync(n))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${n}`,workspacePath:n})}]};let o=await _(n),c=o.nodes.filter(u=>u.name===e);if(c.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${e}" found in the codebase.`,workspacePath:n})}]};let m=c[0],g=new Map;for(let u of o.edges){let E=g.get(u.from)||[],S=o.nodes.find(b=>b.id===u.to);S&&(E.push({edge:u,node:S}),g.set(u.from,E))}let a=new Set,i=[],p=[],f=0,d=[m.id];for(a.add(m.id),i.push(m);d.length>0&&f<s;){let u=[];for(let E of d){let S=g.get(E)||[];for(let{edge:b,node:h}of S)p.push(b),a.has(h.id)||(a.add(h.id),i.push(h),u.push(h.id))}d=u,f++}return{content:[{type:"text",text:JSON.stringify({entryFunction:e,nodes:i,edges:p,depth:f,totalFunctions:i.length})}]}}catch(o){let c=o instanceof Error?o.message:String(o);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:c,workspacePath:n})}]}}})}var ue=require("zod"),he=O(require("fs"));function ye(t){y(t,"flowmap_list_entry_points","Return all detected entry points in the codebase \u2014 main functions, HTTP route handlers, React root renders, CLI commands, etc. Always call this first when exploring a new codebase to understand where execution begins.",{workspacePath:ue.z.string().describe("Absolute path to the repository root")},async({workspacePath:e})=>{try{if(!he.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let n=await _(e),s=n.nodes.filter(o=>o.isEntryPoint).map(o=>({id:o.id,name:o.name,filePath:o.filePath,startLine:o.startLine,language:o.language,isExported:o.isExported,isAsync:o.isAsync}));return{content:[{type:"text",text:JSON.stringify({entryPoints:s,count:s.length,durationMs:n.durationMs})}]}}catch(n){let r=n instanceof Error?n.message:String(n);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:r,workspacePath:e})}]}}})}var Se=require("zod"),xe=O(require("fs"));function _e(t){y(t,"flowmap_find_orphans","Return all functions that are never called from any entry point \u2014 potential dead code. Use this during refactoring to identify code that can safely be removed.",{workspacePath:Se.z.string().describe("Absolute path to the repository root")},async({workspacePath:e})=>{try{if(!xe.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let n=await _(e),r=n.orphans.map(s=>{let o=n.nodes.find(c=>c.id===s);return o?{id:o.id,name:o.name,filePath:o.filePath,startLine:o.startLine,language:o.language,isExported:o.isExported}:{id:s,name:"unknown",filePath:"unknown",startLine:0}});return{content:[{type:"text",text:JSON.stringify({orphans:r,count:r.length,durationMs:n.durationMs,note:"Exported functions may be used by external consumers \u2014 verify before deleting."})}]}}catch(n){let r=n instanceof Error?n.message:String(n);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:r,workspacePath:e})}]}}})}var j=require("zod"),ve=O(require("fs"));function ke(t,e){let n=new Map,r=new Map,s=new Map,o=[],c=[],m=0,g=new Map;for(let i of t)g.set(i,[]);for(let i of e)g.has(i.from)&&g.has(i.to)&&g.get(i.from).push(i.to);function a(i){n.set(i,m),r.set(i,m),m++,o.push(i),s.set(i,!0);for(let p of g.get(i)??[])n.has(p)?s.get(p)&&r.set(i,Math.min(r.get(i),n.get(p))):(a(p),r.set(i,Math.min(r.get(i),r.get(p))));if(r.get(i)===n.get(i)){let p=[],f;do f=o.pop(),s.set(f,!1),p.push(f);while(f!==i);c.push(p)}}for(let i of t)n.has(i)||a(i);return c}function Ke(t,e){let n=new Set(t);return e.filter(r=>n.has(r.from)&&n.has(r.to)).map(r=>({from:r.from,to:r.to,line:r.line}))}function Oe(t){y(t,"flowmap_find_cycles","Detect all call cycles (circular dependencies / mutual recursion) in the codebase. Returns each cycle as an ordered list of functions that call each other in a loop, along with the exact call edges forming the cycle. Use this to identify architectural problems, infinite-recursion risks, or tightly coupled modules.",{workspacePath:j.z.string().describe("Absolute path to the repository root"),minCycleLength:j.z.number().int().min(1).optional().describe("Minimum number of functions in a cycle to report (default: 1, includes self-recursion)"),exclude:j.z.string().optional().describe("Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*")},async({workspacePath:e,minCycleLength:n=1,exclude:r})=>{try{if(!ve.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let o=r?r.split(",").map(d=>d.trim()).filter(Boolean):["node_modules","dist",".git","__pycache__","*.test.*","*.spec.*"],c=await _(e,{exclude:o}),m=c.nodes.map(d=>d.id),g=ke(m,c.edges),a=new Set(c.edges.filter(d=>d.from===d.to).map(d=>d.from)),i=g.filter(d=>d.length>1?d.length>=n:n<=1&&a.has(d[0])),p=new Map(c.nodes.map(d=>[d.id,d])),f=i.map((d,u)=>{let E=d.map(b=>{let h=p.get(b);return h?{id:b,name:h.name,filePath:h.filePath,startLine:h.startLine,language:h.language}:{id:b,name:"unknown",filePath:"unknown",startLine:0,language:"unknown"}}),S=Ke(d,c.edges);return{cycleIndex:u+1,length:d.length,members:E,edges:S}});return{content:[{type:"text",text:JSON.stringify({cycles:f,totalCycles:f.length,durationMs:c.durationMs,scannedFiles:c.scannedFiles,note:f.length===0?"No cycles detected \u2014 the call graph is acyclic.":`${f.length} cycle(s) found. Cycles involving many functions or cross-module calls are the highest priority to review.`})}]}}catch(s){let o=s instanceof Error?s.message:String(s);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:o,workspacePath:e})}]}}})}var T=require("zod"),we=O(require("fs"));function Ee(t,e){if(t.size===0&&e.size===0)return 1;let n=0;for(let s of t)e.has(s)&&n++;let r=t.size+e.size-n;return r===0?0:n/r}var Z=class{parent=new Map;find(e){return this.parent.get(e)!==e&&this.parent.set(e,this.find(this.parent.get(e))),this.parent.get(e)}union(e,n){this.parent.has(e)||this.parent.set(e,e),this.parent.has(n)||this.parent.set(n,n);let r=this.find(e),s=this.find(n);r!==s&&this.parent.set(r,s)}init(e){this.parent.has(e)||this.parent.set(e,e)}clusters(){let e=new Map;for(let n of this.parent.keys()){let r=this.find(n);e.has(r)||e.set(r,[]),e.get(r).push(n)}return e}};function Xe(){let t=parseFloat(process.env.FLOWMAP_DUP_THRESHOLD??"");return isFinite(t)&&t>=0&&t<=1?t:.75}function Be(){let t=parseInt(process.env.FLOWMAP_DUP_MIN_CALLEES??"",10);return isFinite(t)&&t>=1?t:2}function Ne(t){y(t,"flowmap_find_duplicates","Identify functionally duplicate functions \u2014 different names, potentially in different files or components, but calling the same set of dependencies (same business logic). Uses callee-set Jaccard similarity: two functions are flagged as duplicates when the overlap of what they call exceeds the similarity threshold. Results are grouped into clusters so you can see when 3+ functions are all doing the same thing. Use this to find refactoring opportunities and candidates for a shared utility. Default thresholds can be tuned via FLOWMAP_DUP_THRESHOLD and FLOWMAP_DUP_MIN_CALLEES environment variables.",{workspacePath:T.z.string().describe("Absolute path to the repository root"),similarityThreshold:T.z.number().min(0).max(1).optional().describe("Jaccard similarity threshold (0\u20131). Default: 0.75 (or FLOWMAP_DUP_THRESHOLD env var). Lower = more matches, higher = stricter. 1.0 = identical callee sets."),minCallees:T.z.number().int().min(1).optional().describe("Minimum number of distinct callees a function must have to be considered. Default: 2 (or FLOWMAP_DUP_MIN_CALLEES env var). Raising this avoids matching trivial one-liner wrappers."),exclude:T.z.string().optional().describe("Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*")},async({workspacePath:e,similarityThreshold:n,minCallees:r,exclude:s})=>{let o=n??Xe(),c=r??Be();try{if(!we.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let g=s?s.split(",").map(l=>l.trim()).filter(Boolean):["node_modules","dist",".git","__pycache__","*.test.*","*.spec.*"],a=await _(e,{exclude:g}),i=o,p=c,f=new Map,d=new Map(a.nodes.map(l=>[l.id,l]));for(let l of a.nodes)f.set(l.id,new Set);for(let l of a.edges){if(l.from===l.to)continue;let F=d.get(l.to)?.name??l.to;f.get(l.from)?.add(F)}let u=a.nodes.filter(l=>(f.get(l.id)?.size??0)>=p),E=new Z;for(let l of u)E.init(l.id);let S=new Map;for(let l=0;l<u.length;l++){let x=u[l],F=f.get(x.id);for(let A=l+1;A<u.length;A++){let M=u[A];if(x.name===M.name&&x.filePath===M.filePath)continue;let C=f.get(M.id),R=Ee(F,C);if(R>=i){E.union(x.id,M.id);let H=[x.id,M.id].sort().join("|||");S.set(H,R)}}}let b=E.clusters(),h=[],Re=1;for(let[,l]of b){if(l.length<2)continue;let x=l.map(v=>{let w=d.get(v),P=[...f.get(v)??[]].sort();return{id:v,name:w?.name??"unknown",filePath:w?.filePath??"unknown",startLine:w?.startLine??0,language:w?.language??"unknown",calleeCount:P.length,callees:P}}),F=l.map(v=>f.get(v)),A=[...F[0]].filter(v=>F.every(w=>w.has(v))).sort(),M=1,C=0;for(let v=0;v<l.length;v++)for(let w=v+1;w<l.length;w++){let P=[l[v],l[w]].sort().join("|||"),U=S.get(P)??Ee(f.get(l[v]),f.get(l[w]));U<M&&(M=U),U>C&&(C=U)}let R=new Set(x.map(v=>v.filePath)).size,H=R>1?`These ${x.length} functions across ${R} files share the same core logic. Consider extracting a shared utility that accepts parameters for any behavioural differences.`:`These ${x.length} functions in the same file appear to duplicate logic. Consider merging them or extracting a private helper.`;h.push({clusterIndex:Re++,size:x.length,members:x,sharedCallees:A,minSimilarity:Math.round(M*100)/100,maxSimilarity:Math.round(C*100)/100,suggestion:H})}return h.sort((l,x)=>x.size-l.size||x.sharedCallees.length-l.sharedCallees.length),{content:[{type:"text",text:JSON.stringify({duplicateClusters:h,totalClusters:h.length,totalFunctionsInvolved:h.reduce((l,x)=>l+x.size,0),parameters:{similarityThreshold:i,minCallees:p,envOverrides:{FLOWMAP_DUP_THRESHOLD:process.env.FLOWMAP_DUP_THRESHOLD??null,FLOWMAP_DUP_MIN_CALLEES:process.env.FLOWMAP_DUP_MIN_CALLEES??null}},durationMs:a.durationMs,scannedFiles:a.scannedFiles,note:h.length===0?"No functionally duplicate functions detected at the current threshold. Try lowering similarityThreshold or minCallees.":`${h.length} duplicate cluster(s) found. Each cluster is a group of functions that call the same logical dependencies and are candidates for generalisation.`})}]}}catch(m){let g=m instanceof Error?m.message:String(m);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:g,workspacePath:e})}]}}})}function Ae(){let t=new Me.McpServer({name:"callgraph-mcp",version:"1.0.0"});return Ze(t),t}function Ze(t){se(t),ce(t),de(t),pe(t),ge(t),ye(t),_e(t),Oe(t),Ne(t)}async function Ce(){let t=(process.env.FLOWMAP_TRANSPORT||"stdio").toLowerCase();t==="http"||t==="sse"?await Ve():await qe()}async function qe(){let t=Ae(),e=new Fe.StdioServerTransport;await t.connect(e)}async function Ve(){let t=parseInt(process.env.FLOWMAP_PORT||"3100",10),e=Ae(),n=new De.StreamableHTTPServerTransport({sessionIdGenerator:()=>(0,Le.randomUUID)()}),r=(0,be.createServer)(async(s,o)=>{let c=s.url||"/";c==="/mcp"||c==="/"?await n.handleRequest(s,o):o.writeHead(404).end("Not Found")});await e.connect(n),r.listen(t,()=>{process.stderr.write(`FlowMap MCP server listening on http://localhost:${t}/mcp
3
- `)})}Ce().catch(t=>{process.stderr.write(`FlowMap MCP server failed to start: ${t}
2
+ "use strict";var Ue=Object.create;var Y=Object.defineProperty;var Ie=Object.getOwnPropertyDescriptor;var ze=Object.getOwnPropertyNames;var We=Object.getPrototypeOf,Je=Object.prototype.hasOwnProperty;var $e=(t,e,r,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of ze(e))!Je.call(t,n)&&n!==r&&Y(t,n,{get:()=>e[n],enumerable:!(s=Ie(e,n))||s.enumerable});return t};var O=(t,e,r)=>(r=t!=null?Ue(We(t)):{},$e(e||!t||!t.__esModule?Y(r,"default",{value:t,enumerable:!0}):r,t));var Te=require("http"),Ne=require("@modelcontextprotocol/sdk/server/mcp.js"),Ae=require("@modelcontextprotocol/sdk/server/stdio.js"),Fe=require("@modelcontextprotocol/sdk/server/streamableHttp.js"),De=require("crypto");var k=require("zod"),oe=O(require("fs"));var F=O(require("path")),K=O(require("fs")),b=require("@codeflow-map/core");var V=new Map;function ke(){let t=process.env.FLOWMAP_CACHE_TTL_MS;if(!t)return 3e4;let e=parseInt(t,10);return!isFinite(e)||e<0?(process.stderr.write(`[flowmap] Invalid FLOWMAP_CACHE_TTL_MS: "${t}" (must be non-negative integer). Using default 30000ms.
3
+ `),3e4):e}var Ge=ke();function Q(t){let e=V.get(t);return e?Date.now()-e.cachedAt>Ge?(V.delete(t),null):e.graph:null}function ee(t,e){V.set(t,{graph:e,cachedAt:Date.now(),workspacePath:t})}var J=O(require("path")),te=O(require("fast-glob")),W=require("@codeflow-map/core"),He=["**/node_modules/**","**/venv/**","**/.venv/**","**/__pycache__/**","**/vendor/**","**/target/**","**/.git/**","**/dist/**","**/build/**","**/.next/**","**/.turbo/**","**/coverage/**","**/.gradle/**","**/.cache/**","**/site-packages/**","**/.mypy_cache/**","**/.pytest_cache/**","**/out/**","**/bin/**","**/obj/**","**/tests/**","**/__tests__/**","**/spec/**","**/__specs__/**","**/test/**"];async function re(t,e={}){let{exclude:r=[],language:s}=e,n;if(s){let o=Object.entries(W.FILE_EXTENSION_MAP).filter(([,p])=>p===s).map(([p])=>p.replace(".",""));n=o.length>0?o:[]}else n=Object.keys(W.FILE_EXTENSION_MAP).map(o=>o.replace(".",""));if(n.length===0)return[];let a=n.length===1?`**/*.${n[0]}`:`**/*.{${n.join(",")}}`,i=[...He,...r],l=t.replace(/\\/g,"/"),g=await(0,te.default)(a,{cwd:l,ignore:i,absolute:!1,dot:!1,onlyFiles:!0}),d=[];for(let o of g){let p=J.extname(o),f=W.FILE_EXTENSION_MAP[p];f&&d.push({filePath:o.replace(/\\/g,"/"),absPath:J.resolve(t,o),languageId:f})}return d}function je(){let t=process.env.FLOWMAP_BATCH_SIZE;if(!t)return 50;let e=parseInt(t,10);return!isFinite(e)||e<1?(process.stderr.write(`[flowmap] Invalid FLOWMAP_BATCH_SIZE: "${t}" (must be positive integer). Using default 50.
4
+ `),50):e}var se=je(),ne=!1;function $(){if(process.env.FLOWMAP_GRAMMARS)return process.env.FLOWMAP_GRAMMARS;let t=[F.resolve(__dirname,"..","grammars"),F.resolve(__dirname,"..","..","grammars")];for(let e of t)if(K.existsSync(F.join(e,"tree-sitter.wasm")))return e;return t[0]}async function Be(){if(!ne){let t=$(),e=F.join(t,"tree-sitter.wasm"),r=K.existsSync(e);console.error(`[flowmap] Grammar directory: ${t} (tree-sitter.wasm ${r?"found":"missing"})`),await(0,b.initTreeSitter)(t),ne=!0}}async function v(t,e={}){let r=Q(t);if(r)return r;await Be();let s=$(),n=Date.now(),a=await re(t,e),i=[],l=[],g=0;for(let u=0;u<a.length;u+=se){let m=a.slice(u,u+se),y=await Promise.all(m.map(S=>(0,b.parseFile)(S.filePath,S.absPath,s,S.languageId).catch(()=>null)));for(let S of y)S&&(i.push(...S.functions),l.push(...S.calls),g++)}let d=(0,b.buildCallGraph)(i,l);(0,b.detectEntryPoints)(i,d);let{flows:o,orphans:p}=(0,b.partitionFlows)(i,d),f={nodes:i,edges:d,flows:o,orphans:p,scannedFiles:g,durationMs:Date.now()-n};return ee(t,f),f}function x(t,e,r,s,n){t.tool(e,r,s,n)}var h=class{constructor(e){this.toolName=e;this.logToStderr(`[${e}] Starting analysis...`)}steps=[];startTime=Date.now();stageStartTime=Date.now();currentStep=0;reportProgress(e){let r=Date.now(),s=r-this.stageStartTime;this.currentStep++,this.steps.push({step:this.currentStep,stage:e,timestamp:r,durationMs:s}),this.logToStderr(`[${this.toolName}] Step ${this.currentStep}: ${e} (${s}ms)`),this.stageStartTime=r}getProgress(){return this.steps}getTotalDurationMs(){return Date.now()-this.startTime}getSummary(){let e=this.getTotalDurationMs();return`${this.steps.length} steps in ${e}ms`}logToStderr(e){process.stderr.write(e+`
5
+ `)}};var Ve=["typescript","javascript","python","java","go","rust","tsx","jsx"],Ke=["node_modules","dist",".git","__pycache__","*.test.*","*.spec.*"];function ie(t){x(t,"flowmap_analyze_workspace","Scan an entire codebase and return a full call graph \u2014 all functions, their parameters, and all call relationships between them. Use this first when exploring an unfamiliar codebase.",{workspacePath:k.z.string().describe("Absolute path to the repository root"),exclude:k.z.string().optional().describe("Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*"),language:k.z.string().optional().describe("Filter to a single language: typescript, javascript, python, java, go, rust, tsx, jsx. Omit to scan all.")},async({workspacePath:e,exclude:r,language:s})=>{let n=new h("flowmap_analyze_workspace");try{if(n.reportProgress("Validating workspace path"),!oe.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let a=r?r.split(",").map(g=>g.trim()).filter(Boolean):Ke,i=s&&Ve.includes(s)?s:void 0;n.reportProgress("Starting codebase analysis");let l=await v(e,{exclude:a,language:i});return n.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({...l,progress:{steps:n.getProgress(),summary:n.getSummary()}})}]}}catch(a){let i=a instanceof Error?a.message:String(a);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:i,workspacePath:e})}]}}})}var ce=require("zod"),le=O(require("fs")),G=O(require("path")),D=require("@codeflow-map/core");var ae=!1;function pe(t){x(t,"flowmap_analyze_file","Scan a single file and return all functions defined in it, their parameters, and calls made within the file.",{filePath:ce.z.string().describe("Absolute path to the file to analyse")},async({filePath:e})=>{let r=new h("flowmap_analyze_file");try{if(r.reportProgress("Validating file path"),!le.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FILE_NOT_FOUND",message:`File does not exist: ${e}`})}]};let s=G.extname(e),n=D.FILE_EXTENSION_MAP[s];if(!n)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"UNSUPPORTED_LANGUAGE",message:`Unsupported file extension: ${s}`})}]};let a=$();ae||(r.reportProgress("Initializing TreeSitter"),await(0,D.initTreeSitter)(a),ae=!0);let i=Date.now(),l=G.basename(e);r.reportProgress("Parsing file");let g=await(0,D.parseFile)(l,e,a,n);return r.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({filePath:l,functions:g.functions,calls:g.calls,durationMs:Date.now()-i,progress:{steps:r.getProgress(),summary:r.getSummary()}})}]}}catch(s){let n=s instanceof Error?s.message:String(s);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:n})}]}}})}var X=require("zod"),ge=O(require("fs"));function me(t){x(t,"flowmap_get_callers","Return all functions that directly call the named function. Use this for impact analysis \u2014 to understand what breaks if you change a function's signature.",{functionName:X.z.string().describe("The function name to find callers of"),workspacePath:X.z.string().describe("Absolute path to the repository root")},async({functionName:e,workspacePath:r})=>{let s=new h("flowmap_get_callers");try{if(s.reportProgress("Validating workspace path"),!ge.existsSync(r))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${r}`,workspacePath:r})}]};s.reportProgress("Building call graph");let n=await v(r);s.reportProgress("Searching for target function");let a=n.nodes.filter(o=>o.name===e);if(a.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${e}" found in the codebase.`,workspacePath:r})}]};let i=a[0],l=new Set(a.map(o=>o.id));s.reportProgress("Filtering callers");let d=n.edges.filter(o=>l.has(o.to)).map(o=>{let p=n.nodes.find(f=>f.id===o.from);return{id:o.from,name:p?.name??"unknown",filePath:p?.filePath??"unknown",startLine:p?.startLine??0,callLine:o.line}});return s.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({target:e,targetId:i.id,callers:d,count:d.length,progress:{steps:s.getProgress(),summary:s.getSummary()}})}]}}catch(n){let a=n instanceof Error?n.message:String(n);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:a,workspacePath:r})}]}}})}var Z=require("zod"),de=O(require("fs"));function ue(t){x(t,"flowmap_get_callees","Return all functions directly called by the named function. Use this to understand what a function depends on.",{functionName:Z.z.string().describe("The function name to find callees of"),workspacePath:Z.z.string().describe("Absolute path to the repository root")},async({functionName:e,workspacePath:r})=>{let s=new h("flowmap_get_callees");try{if(s.reportProgress("Validating workspace path"),!de.existsSync(r))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${r}`,workspacePath:r})}]};s.reportProgress("Building call graph");let n=await v(r);s.reportProgress("Searching for target function");let a=n.nodes.filter(o=>o.name===e);if(a.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${e}" found in the codebase.`,workspacePath:r})}]};let i=a[0],l=new Set(a.map(o=>o.id));s.reportProgress("Filtering callees");let d=n.edges.filter(o=>l.has(o.from)).map(o=>{let p=n.nodes.find(f=>f.id===o.to);return{id:o.to,name:p?.name??"unknown",filePath:p?.filePath??"unknown",startLine:p?.startLine??0,callLine:o.line}});return s.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({target:e,targetId:i.id,callees:d,count:d.length,progress:{steps:s.getProgress(),summary:s.getSummary()}})}]}}catch(n){let a=n instanceof Error?n.message:String(n);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:a,workspacePath:r})}]}}})}var H=require("zod"),fe=O(require("fs"));function he(t){x(t,"flowmap_get_flow","Return the complete sub-graph reachable from a given function \u2014 every function it calls, every function those call, and so on recursively. Use this to understand the full execution path of a feature or entry point.",{functionName:H.z.string().describe("The starting function name"),workspacePath:H.z.string().describe("Absolute path to the repository root"),maxDepth:H.z.number().optional().describe("Maximum recursion depth. Default 10.")},async({functionName:e,workspacePath:r,maxDepth:s})=>{let n=new h("flowmap_get_flow"),a=s??10;try{if(n.reportProgress("Validating workspace path"),!fe.existsSync(r))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${r}`,workspacePath:r})}]};n.reportProgress("Building call graph");let i=await v(r);n.reportProgress("Locating start function");let l=i.nodes.filter(y=>y.name===e);if(l.length===0)return{content:[{type:"text",text:JSON.stringify({error:!0,code:"FUNCTION_NOT_FOUND",message:`No function named "${e}" found in the codebase.`,workspacePath:r})}]};let g=l[0];n.reportProgress("Tracing call flow");let d=new Map;for(let y of i.edges){let S=d.get(y.from)||[],M=i.nodes.find(T=>T.id===y.to);M&&(S.push({edge:y,node:M}),d.set(y.from,S))}let o=new Set,p=[],f=[],u=0,m=[g.id];for(o.add(g.id),p.push(g);m.length>0&&u<a;){let y=[];for(let S of m){let M=d.get(S)||[];for(let{edge:T,node:_}of M)f.push(T),o.has(_.id)||(o.add(_.id),p.push(_),y.push(_.id))}m=y,u++}return n.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({entryFunction:e,nodes:p,edges:f,depth:u,totalFunctions:p.length,progress:{steps:n.getProgress(),summary:n.getSummary()}})}]}}catch(i){let l=i instanceof Error?i.message:String(i);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:l,workspacePath:r})}]}}})}var ye=require("zod"),Se=O(require("fs"));function _e(t){x(t,"flowmap_list_entry_points","Return all detected entry points in the codebase \u2014 main functions, HTTP route handlers, React root renders, CLI commands, etc. Always call this first when exploring a new codebase to understand where execution begins.",{workspacePath:ye.z.string().describe("Absolute path to the repository root")},async({workspacePath:e})=>{let r=new h("flowmap_list_entry_points");try{if(r.reportProgress("Validating workspace path"),!Se.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};r.reportProgress("Building call graph");let s=await v(e);r.reportProgress("Filtering entry points");let a=s.nodes.filter(i=>i.isEntryPoint).map(i=>({id:i.id,name:i.name,filePath:i.filePath,startLine:i.startLine,language:i.language,isExported:i.isExported,isAsync:i.isAsync}));return r.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({entryPoints:a,count:a.length,durationMs:s.durationMs,progress:{steps:r.getProgress(),summary:r.getSummary()}})}]}}catch(s){let n=s instanceof Error?s.message:String(s);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:n,workspacePath:e})}]}}})}var xe=require("zod"),we=O(require("fs"));function ve(t){x(t,"flowmap_find_orphans","Return all functions that are never called from any entry point \u2014 potential dead code. Use this during refactoring to identify code that can safely be removed.",{workspacePath:xe.z.string().describe("Absolute path to the repository root")},async({workspacePath:e})=>{let r=new h("flowmap_find_orphans");try{if(r.reportProgress("Validating workspace path"),!we.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};r.reportProgress("Building call graph");let s=await v(e);r.reportProgress("Identifying orphan functions");let n=s.orphans.map(a=>{let i=s.nodes.find(l=>l.id===a);return i?{id:i.id,name:i.name,filePath:i.filePath,startLine:i.startLine,language:i.language,isExported:i.isExported}:{id:a,name:"unknown",filePath:"unknown",startLine:0}});return r.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({orphans:n,count:n.length,durationMs:s.durationMs,note:"Exported functions may be used by external consumers \u2014 verify before deleting.",progress:{steps:r.getProgress(),summary:r.getSummary()}})}]}}catch(s){let n=s instanceof Error?s.message:String(s);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:n,workspacePath:e})}]}}})}var j=require("zod"),Pe=O(require("fs"));function Xe(t,e){let r=new Map,s=new Map,n=new Map,a=[],i=[],l=0,g=new Map;for(let o of t)g.set(o,[]);for(let o of e)g.has(o.from)&&g.has(o.to)&&g.get(o.from).push(o.to);function d(o){r.set(o,l),s.set(o,l),l++,a.push(o),n.set(o,!0);for(let p of g.get(o)??[])r.has(p)?n.get(p)&&s.set(o,Math.min(s.get(o),r.get(p))):(d(p),s.set(o,Math.min(s.get(o),s.get(p))));if(s.get(o)===r.get(o)){let p=[],f;do f=a.pop(),n.set(f,!1),p.push(f);while(f!==o);i.push(p)}}for(let o of t)r.has(o)||d(o);return i}function Ze(t,e){let r=new Set(t);return e.filter(s=>r.has(s.from)&&r.has(s.to)).map(s=>({from:s.from,to:s.to,line:s.line}))}function Oe(t){x(t,"flowmap_find_cycles","Detect all call cycles (circular dependencies / mutual recursion) in the codebase. Returns each cycle as an ordered list of functions that call each other in a loop, along with the exact call edges forming the cycle. Use this to identify architectural problems, infinite-recursion risks, or tightly coupled modules.",{workspacePath:j.z.string().describe("Absolute path to the repository root"),minCycleLength:j.z.number().int().min(1).optional().describe("Minimum number of functions in a cycle to report (default: 1, includes self-recursion)"),exclude:j.z.string().optional().describe("Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*")},async({workspacePath:e,minCycleLength:r=1,exclude:s})=>{let n=new h("flowmap_find_cycles");try{if(n.reportProgress("Validating workspace path"),!Pe.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let i=s?s.split(",").map(m=>m.trim()).filter(Boolean):["node_modules","dist",".git","__pycache__","*.test.*","*.spec.*"];n.reportProgress("Building call graph");let l=await v(e,{exclude:i});n.reportProgress("Detecting cycle patterns");let g=l.nodes.map(m=>m.id),d=Xe(g,l.edges),o=new Set(l.edges.filter(m=>m.from===m.to).map(m=>m.from)),p=d.filter(m=>m.length>1?m.length>=r:r<=1&&o.has(m[0])),f=new Map(l.nodes.map(m=>[m.id,m]));n.reportProgress("Building cycle details");let u=p.map((m,y)=>{let S=m.map(T=>{let _=f.get(T);return _?{id:T,name:_.name,filePath:_.filePath,startLine:_.startLine,language:_.language}:{id:T,name:"unknown",filePath:"unknown",startLine:0,language:"unknown"}}),M=Ze(m,l.edges);return{cycleIndex:y+1,length:m.length,members:S,edges:M}});return n.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({cycles:u,totalCycles:u.length,durationMs:l.durationMs,scannedFiles:l.scannedFiles,progress:{steps:n.getProgress(),summary:n.getSummary()},note:u.length===0?"No cycles detected \u2014 the call graph is acyclic.":`${u.length} cycle(s) found. Cycles involving many functions or cross-module calls are the highest priority to review.`})}]}}catch(a){let i=a instanceof Error?a.message:String(a);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:i,workspacePath:e})}]}}})}var U=require("zod"),be=O(require("fs"));function Ee(t,e){if(t.size===0&&e.size===0)return 1;let r=0;for(let n of t)e.has(n)&&r++;let s=t.size+e.size-r;return s===0?0:r/s}var q=class{parent=new Map;find(e){return this.parent.get(e)!==e&&this.parent.set(e,this.find(this.parent.get(e))),this.parent.get(e)}union(e,r){this.parent.has(e)||this.parent.set(e,e),this.parent.has(r)||this.parent.set(r,r);let s=this.find(e),n=this.find(r);s!==n&&this.parent.set(s,n)}init(e){this.parent.has(e)||this.parent.set(e,e)}clusters(){let e=new Map;for(let r of this.parent.keys()){let s=this.find(r);e.has(s)||e.set(s,[]),e.get(s).push(r)}return e}};function qe(){let t=parseFloat(process.env.FLOWMAP_DUP_THRESHOLD??"");return isFinite(t)&&t>=0&&t<=1?t:.75}function Ye(){let t=parseInt(process.env.FLOWMAP_DUP_MIN_CALLEES??"",10);return isFinite(t)&&t>=1?t:2}function Me(t){x(t,"flowmap_find_duplicates","Identify functionally duplicate functions \u2014 different names, potentially in different files or components, but calling the same set of dependencies (same business logic). Uses callee-set Jaccard similarity: two functions are flagged as duplicates when the overlap of what they call exceeds the similarity threshold. Results are grouped into clusters so you can see when 3+ functions are all doing the same thing. Use this to find refactoring opportunities and candidates for a shared utility. Default thresholds can be tuned via FLOWMAP_DUP_THRESHOLD and FLOWMAP_DUP_MIN_CALLEES environment variables.",{workspacePath:U.z.string().describe("Absolute path to the repository root"),similarityThreshold:U.z.number().min(0).max(1).optional().describe("Jaccard similarity threshold (0\u20131). Default: 0.75 (or FLOWMAP_DUP_THRESHOLD env var). Lower = more matches, higher = stricter. 1.0 = identical callee sets."),minCallees:U.z.number().int().min(1).optional().describe("Minimum number of distinct callees a function must have to be considered. Default: 2 (or FLOWMAP_DUP_MIN_CALLEES env var). Raising this avoids matching trivial one-liner wrappers."),exclude:U.z.string().optional().describe("Comma-separated glob patterns to exclude. Defaults: node_modules,dist,.git,__pycache__,*.test.*,*.spec.*")},async({workspacePath:e,similarityThreshold:r,minCallees:s,exclude:n})=>{let a=new h("flowmap_find_duplicates"),i=r??qe(),l=s??Ye();try{if(a.reportProgress("Validating workspace path"),!be.existsSync(e))return{content:[{type:"text",text:JSON.stringify({error:!0,code:"WORKSPACE_NOT_FOUND",message:`Directory does not exist: ${e}`,workspacePath:e})}]};let d=n?n.split(",").map(c=>c.trim()).filter(Boolean):["node_modules","dist",".git","__pycache__","*.test.*","*.spec.*"];a.reportProgress("Building call graph");let o=await v(e,{exclude:d});a.reportProgress("Computing callee signatures");let p=i,f=l,u=new Map,m=new Map(o.nodes.map(c=>[c.id,c]));for(let c of o.nodes)u.set(c.id,new Set);for(let c of o.edges){if(c.from===c.to)continue;let A=m.get(c.to)?.name??c.to;u.get(c.from)?.add(A)}let y=o.nodes.filter(c=>(u.get(c.id)?.size??0)>=f);a.reportProgress("Comparing function signatures");let S=new q;for(let c of y)S.init(c.id);let M=new Map;for(let c=0;c<y.length;c++){let w=y[c],A=u.get(w.id);for(let C=c+1;C<y.length;C++){let N=y[C];if(w.name===N.name&&w.filePath===N.filePath)continue;let L=u.get(N.id),R=Ee(A,L);if(R>=p){S.union(w.id,N.id);let B=[w.id,N.id].sort().join("|||");M.set(B,R)}}}let T=S.clusters(),_=[],Re=1;for(let[,c]of T){if(c.length<2)continue;let w=c.map(P=>{let E=m.get(P),I=[...u.get(P)??[]].sort();return{id:P,name:E?.name??"unknown",filePath:E?.filePath??"unknown",startLine:E?.startLine??0,language:E?.language??"unknown",calleeCount:I.length,callees:I}}),A=c.map(P=>u.get(P)),C=[...A[0]].filter(P=>A.every(E=>E.has(P))).sort(),N=1,L=0;for(let P=0;P<c.length;P++)for(let E=P+1;E<c.length;E++){let I=[c[P],c[E]].sort().join("|||"),z=M.get(I)??Ee(u.get(c[P]),u.get(c[E]));z<N&&(N=z),z>L&&(L=z)}let R=new Set(w.map(P=>P.filePath)).size,B=R>1?`These ${w.length} functions across ${R} files share the same core logic. Consider extracting a shared utility that accepts parameters for any behavioural differences.`:`These ${w.length} functions in the same file appear to duplicate logic. Consider merging them or extracting a private helper.`;_.push({clusterIndex:Re++,size:w.length,members:w,sharedCallees:C,minSimilarity:Math.round(N*100)/100,maxSimilarity:Math.round(L*100)/100,suggestion:B})}return _.sort((c,w)=>w.size-c.size||w.sharedCallees.length-c.sharedCallees.length),a.reportProgress("Analysis complete"),{content:[{type:"text",text:JSON.stringify({duplicateClusters:_,totalClusters:_.length,totalFunctionsInvolved:_.reduce((c,w)=>c+w.size,0),parameters:{similarityThreshold:p,minCallees:f,envOverrides:{FLOWMAP_DUP_THRESHOLD:process.env.FLOWMAP_DUP_THRESHOLD??null,FLOWMAP_DUP_MIN_CALLEES:process.env.FLOWMAP_DUP_MIN_CALLEES??null}},durationMs:o.durationMs,scannedFiles:o.scannedFiles,progress:{steps:a.getProgress(),summary:a.getSummary()},note:_.length===0?"No functionally duplicate functions detected at the current threshold. Try lowering similarityThreshold or minCallees.":`${_.length} duplicate cluster(s) found. Each cluster is a group of functions that call the same logical dependencies and are candidates for generalisation.`})}]}}catch(g){let d=g instanceof Error?g.message:String(g);return{content:[{type:"text",text:JSON.stringify({error:!0,code:"PARSE_ERROR",message:d,workspacePath:e})}]}}})}function Ce(){let t=new Ne.McpServer({name:"callgraph-mcp",version:"1.0.0"});return Qe(t),t}function Qe(t){ie(t),pe(t),me(t),ue(t),he(t),_e(t),ve(t),Oe(t),Me(t)}async function Le(){let t=(process.env.FLOWMAP_TRANSPORT||"stdio").toLowerCase();t==="http"||t==="sse"?await tt():await et()}async function et(){let t=Ce(),e=new Ae.StdioServerTransport;await t.connect(e)}async function tt(){let t=parseInt(process.env.FLOWMAP_PORT||"3100",10),e=Ce(),r=new Fe.StreamableHTTPServerTransport({sessionIdGenerator:()=>(0,De.randomUUID)()}),s=(0,Te.createServer)(async(n,a)=>{let i=n.url||"/";i==="/mcp"||i==="/"?await r.handleRequest(n,a):a.writeHead(404).end("Not Found")});await e.connect(r),s.listen(t,()=>{process.stderr.write(`FlowMap MCP server listening on http://localhost:${t}/mcp
6
+ `)})}Le().catch(t=>{process.stderr.write(`FlowMap MCP server failed to start: ${t}
4
7
  `),process.exit(1)});
@@ -0,0 +1,22 @@
1
+ /**
2
+ * Progress tracker for tools to report operation stages and timing
3
+ */
4
+ export interface ProgressStep {
5
+ step: number;
6
+ stage: string;
7
+ timestamp: number;
8
+ durationMs: number;
9
+ }
10
+ export declare class ProgressTracker {
11
+ private toolName;
12
+ private steps;
13
+ private startTime;
14
+ private stageStartTime;
15
+ private currentStep;
16
+ constructor(toolName: string);
17
+ reportProgress(stage: string): void;
18
+ getProgress(): ProgressStep[];
19
+ getTotalDurationMs(): number;
20
+ getSummary(): string;
21
+ private logToStderr;
22
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "callgraph-mcp",
3
- "version": "1.7.1",
3
+ "version": "1.8.0",
4
4
  "description": "MCP server for codebase call-flow analysis. Local, deterministic, language-agnostic. Powered by @codeflow-map/core.",
5
5
  "keywords": [
6
6
  "mcp",
@@ -8,33 +8,7 @@
8
8
  "call-graph",
9
9
  "static-analysis",
10
10
  "code-analysis",
11
- "flowmap",
12
- "callsight",
13
- "tree-sitter",
14
- "ast",
15
- "code-visualization",
16
- "call-graph-analysis",
17
- "codebase-analysis",
18
- "code-intelligence",
19
- "developer-tools",
20
- "typescript",
21
- "javascript",
22
- "python",
23
- "golang",
24
- "dead-code",
25
- "code-review",
26
- "refactoring",
27
- "dependency-analysis",
28
- "cursor",
29
- "claude",
30
- "copilot",
31
- "agentic",
32
- "impact-analysis",
33
- "entry-points",
34
- "execution-flow",
35
- "claude-mcp",
36
- "cursor-mcp",
37
- "vscode"
11
+ "flowmap"
38
12
  ],
39
13
  "license": "MIT",
40
14
  "author": "devricky-codes",