trace-mcp 1.2.1 → 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -2522,6 +2522,16 @@ function buildProjectContext(rootPath) {
2522
2522
  } catch {
2523
2523
  }
2524
2524
  }
2525
+ try {
2526
+ const ghWorkflowDir = path.resolve(rootPath, ".github/workflows");
2527
+ const entries = fs.readdirSync(ghWorkflowDir);
2528
+ for (const entry of entries) {
2529
+ if (entry.endsWith(".yml") || entry.endsWith(".yaml")) {
2530
+ configFiles.push(`.github/workflows/${entry}`);
2531
+ }
2532
+ }
2533
+ } catch {
2534
+ }
2525
2535
  return {
2526
2536
  rootPath,
2527
2537
  packageJson,
@@ -3310,69 +3320,557 @@ function buildNode(store, comp, filePath, remainingDepth, visited, budget) {
3310
3320
  return node;
3311
3321
  }
3312
3322
 
3323
+ // src/tools/git-analysis.ts
3324
+ import { execFileSync } from "child_process";
3325
+
3326
+ // src/logger.ts
3327
+ import pino from "pino";
3328
+ var level = process.env.TRACE_MCP_LOG_LEVEL ?? "info";
3329
+ var logger = pino({
3330
+ name: "trace-mcp",
3331
+ level
3332
+ }, process.stderr);
3333
+
3334
+ // src/tools/git-analysis.ts
3335
+ function isGitRepo(cwd) {
3336
+ try {
3337
+ execFileSync("git", ["rev-parse", "--is-inside-work-tree"], {
3338
+ cwd,
3339
+ stdio: "pipe",
3340
+ timeout: 5e3
3341
+ });
3342
+ return true;
3343
+ } catch {
3344
+ return false;
3345
+ }
3346
+ }
3347
+ function getGitFileStats(cwd, sinceDays) {
3348
+ const args = [
3349
+ "log",
3350
+ "--pretty=format:__COMMIT__%H|%aI|%aN",
3351
+ "--name-only",
3352
+ "--no-merges",
3353
+ "--diff-filter=ACDMR"
3354
+ ];
3355
+ if (sinceDays !== void 0) {
3356
+ args.push(`--since=${sinceDays} days ago`);
3357
+ }
3358
+ let output;
3359
+ try {
3360
+ output = execFileSync("git", args, {
3361
+ cwd,
3362
+ stdio: "pipe",
3363
+ maxBuffer: 10 * 1024 * 1024,
3364
+ // 10 MB
3365
+ timeout: 3e4
3366
+ }).toString("utf-8");
3367
+ } catch (e) {
3368
+ logger.warn({ error: e }, "git log failed");
3369
+ return /* @__PURE__ */ new Map();
3370
+ }
3371
+ const fileStats = /* @__PURE__ */ new Map();
3372
+ let currentDate = null;
3373
+ let currentAuthor = null;
3374
+ for (const line of output.split("\n")) {
3375
+ if (line.startsWith("__COMMIT__")) {
3376
+ const parts = line.slice("__COMMIT__".length).split("|");
3377
+ currentDate = new Date(parts[1]);
3378
+ currentAuthor = parts[2];
3379
+ continue;
3380
+ }
3381
+ const trimmed = line.trim();
3382
+ if (!trimmed || !currentDate || !currentAuthor) continue;
3383
+ const existing = fileStats.get(trimmed);
3384
+ if (existing) {
3385
+ existing.commits++;
3386
+ existing.authors.add(currentAuthor);
3387
+ if (currentDate < existing.firstDate) existing.firstDate = currentDate;
3388
+ if (currentDate > existing.lastDate) existing.lastDate = currentDate;
3389
+ } else {
3390
+ fileStats.set(trimmed, {
3391
+ file: trimmed,
3392
+ commits: 1,
3393
+ authors: /* @__PURE__ */ new Set([currentAuthor]),
3394
+ firstDate: currentDate,
3395
+ lastDate: currentDate
3396
+ });
3397
+ }
3398
+ }
3399
+ return fileStats;
3400
+ }
3401
+ function getChurnRate(cwd, options = {}) {
3402
+ const { sinceDays, limit = 50, filePattern } = options;
3403
+ if (!isGitRepo(cwd)) {
3404
+ return [];
3405
+ }
3406
+ const stats = getGitFileStats(cwd, sinceDays);
3407
+ let entries = [];
3408
+ for (const [file, data] of stats) {
3409
+ if (filePattern && !file.includes(filePattern)) continue;
3410
+ const lifespanMs = data.lastDate.getTime() - data.firstDate.getTime();
3411
+ const lifespanWeeks = Math.max(lifespanMs / (7 * 24 * 60 * 60 * 1e3), 1);
3412
+ const churnPerWeek = Math.round(data.commits / lifespanWeeks * 100) / 100;
3413
+ let assessment;
3414
+ if (churnPerWeek <= 1) assessment = "stable";
3415
+ else if (churnPerWeek <= 3) assessment = "active";
3416
+ else assessment = "volatile";
3417
+ entries.push({
3418
+ file,
3419
+ commits: data.commits,
3420
+ unique_authors: data.authors.size,
3421
+ first_seen: data.firstDate.toISOString().split("T")[0],
3422
+ last_modified: data.lastDate.toISOString().split("T")[0],
3423
+ churn_per_week: churnPerWeek,
3424
+ assessment
3425
+ });
3426
+ }
3427
+ entries.sort((a, b) => b.commits - a.commits);
3428
+ return entries.slice(0, limit);
3429
+ }
3430
+ function getHotspots(store, cwd, options = {}) {
3431
+ const { sinceDays = 90, limit = 20, minCyclomatic = 3 } = options;
3432
+ if (!isGitRepo(cwd)) {
3433
+ return getComplexityOnlyHotspots(store, limit, minCyclomatic);
3434
+ }
3435
+ const gitStats = getGitFileStats(cwd, sinceDays);
3436
+ const fileComplexity = getMaxCyclomaticPerFile(store);
3437
+ const entries = [];
3438
+ for (const [file, maxCyclomatic] of fileComplexity) {
3439
+ if (maxCyclomatic < minCyclomatic) continue;
3440
+ const git = gitStats.get(file);
3441
+ const commits = git?.commits ?? 0;
3442
+ const score = Math.round(maxCyclomatic * Math.log(1 + commits) * 100) / 100;
3443
+ if (score <= 0) continue;
3444
+ let assessment;
3445
+ if (score <= 3) assessment = "low";
3446
+ else if (score <= 10) assessment = "medium";
3447
+ else assessment = "high";
3448
+ entries.push({
3449
+ file,
3450
+ max_cyclomatic: maxCyclomatic,
3451
+ commits,
3452
+ score,
3453
+ assessment
3454
+ });
3455
+ }
3456
+ entries.sort((a, b) => b.score - a.score);
3457
+ return entries.slice(0, limit);
3458
+ }
3459
+ function getMaxCyclomaticPerFile(store) {
3460
+ const rows = store.db.prepare(`
3461
+ SELECT f.path, MAX(s.cyclomatic) as max_cyclomatic
3462
+ FROM symbols s
3463
+ JOIN files f ON s.file_id = f.id
3464
+ WHERE s.cyclomatic IS NOT NULL
3465
+ GROUP BY f.path
3466
+ `).all();
3467
+ const result = /* @__PURE__ */ new Map();
3468
+ for (const row of rows) {
3469
+ result.set(row.path, row.max_cyclomatic);
3470
+ }
3471
+ return result;
3472
+ }
3473
+ function getComplexityOnlyHotspots(store, limit, minCyclomatic) {
3474
+ const fileComplexity = getMaxCyclomaticPerFile(store);
3475
+ const entries = [];
3476
+ for (const [file, maxCyclomatic] of fileComplexity) {
3477
+ if (maxCyclomatic < minCyclomatic) continue;
3478
+ entries.push({
3479
+ file,
3480
+ max_cyclomatic: maxCyclomatic,
3481
+ commits: 0,
3482
+ score: maxCyclomatic,
3483
+ // score = complexity alone
3484
+ assessment: maxCyclomatic <= 3 ? "low" : maxCyclomatic <= 10 ? "medium" : "high"
3485
+ });
3486
+ }
3487
+ entries.sort((a, b) => b.score - a.score);
3488
+ return entries.slice(0, limit);
3489
+ }
3490
+
3313
3491
  // src/tools/impact.ts
3314
- function getChangeImpact(store, opts, depth = 3, maxDependents = 200) {
3315
- let startNodeId;
3492
+ import { execSync } from "child_process";
3493
+ function getModule(filePath, depth = 2) {
3494
+ const parts = filePath.split("/");
3495
+ return parts.length <= depth ? parts[0] : parts.slice(0, depth).join("/");
3496
+ }
3497
+ function riskLevel(score) {
3498
+ if (score >= 0.75) return "critical";
3499
+ if (score >= 0.5) return "high";
3500
+ if (score >= 0.25) return "medium";
3501
+ return "low";
3502
+ }
3503
+ function round(v, decimals = 2) {
3504
+ const f = 10 ** decimals;
3505
+ return Math.round(v * f) / f;
3506
+ }
3507
+ function clamp01(v, ceiling) {
3508
+ return Math.min(v / ceiling, 1);
3509
+ }
3510
+ function getTestedFileIds(store) {
3511
+ const rows = store.db.prepare(`
3512
+ SELECT DISTINCT
3513
+ CASE
3514
+ WHEN n.node_type = 'file' THEN n.ref_id
3515
+ WHEN n.node_type = 'symbol' THEN (SELECT file_id FROM symbols WHERE id = n.ref_id)
3516
+ END AS fid
3517
+ FROM edges e
3518
+ JOIN edge_types et ON e.edge_type_id = et.id
3519
+ JOIN nodes n ON e.target_node_id = n.id
3520
+ WHERE et.name = 'test_covers'
3521
+ `).all();
3522
+ const set = /* @__PURE__ */ new Set();
3523
+ for (const r of rows) if (r.fid != null) set.add(r.fid);
3524
+ return set;
3525
+ }
3526
+ function getCoChangesForFile(store, filePath, graphFiles) {
3527
+ try {
3528
+ const rows = store.db.prepare(`
3529
+ SELECT
3530
+ CASE WHEN file_a = ? THEN file_b ELSE file_a END AS co_file,
3531
+ confidence
3532
+ FROM co_changes
3533
+ WHERE (file_a = ? OR file_b = ?)
3534
+ AND confidence >= 0.3
3535
+ AND co_change_count >= 3
3536
+ ORDER BY confidence DESC
3537
+ LIMIT 15
3538
+ `).all(filePath, filePath, filePath);
3539
+ return rows.map((r) => ({
3540
+ file: r.co_file,
3541
+ confidence: round(r.confidence),
3542
+ inGraph: graphFiles.has(r.co_file)
3543
+ }));
3544
+ } catch {
3545
+ return [];
3546
+ }
3547
+ }
3548
+ function findAffectedTests(store, targetPath, dependentPaths) {
3549
+ const seen = /* @__PURE__ */ new Set();
3550
+ const allPaths = [targetPath, ...dependentPaths];
3551
+ for (const p of allPaths) {
3552
+ const file = store.getFile(p);
3553
+ if (!file) continue;
3554
+ const fileNodeId = store.getNodeId("file", file.id);
3555
+ if (fileNodeId != null) {
3556
+ collectTestFiles(store, fileNodeId, seen);
3557
+ }
3558
+ const symbols = store.getSymbolsByFile(file.id);
3559
+ for (const sym of symbols) {
3560
+ const symNodeId = store.getNodeId("symbol", sym.id);
3561
+ if (symNodeId != null) {
3562
+ collectTestFiles(store, symNodeId, seen);
3563
+ }
3564
+ }
3565
+ }
3566
+ const files = [...seen].sort();
3567
+ return { total: files.length, files };
3568
+ }
3569
+ function collectTestFiles(store, nodeId, seen) {
3570
+ const incoming = store.getIncomingEdges(nodeId);
3571
+ for (const edge of incoming) {
3572
+ if (edge.edge_type_name !== "test_covers") continue;
3573
+ const ref = store.getNodeRef(edge.source_node_id);
3574
+ if (!ref) continue;
3575
+ let fileId;
3576
+ if (ref.nodeType === "file") {
3577
+ fileId = ref.refId;
3578
+ } else if (ref.nodeType === "symbol") {
3579
+ const s = store.getSymbolById(ref.refId);
3580
+ if (s) fileId = s.file_id;
3581
+ }
3582
+ if (fileId != null) {
3583
+ const f = store.getFileById(fileId);
3584
+ if (f) seen.add(f.path);
3585
+ }
3586
+ }
3587
+ }
3588
+ function getFileChurn(cwd, filePath, days) {
3589
+ try {
3590
+ const since = new Date(Date.now() - days * 864e5).toISOString().slice(0, 10);
3591
+ const output = execSync(
3592
+ `git log --since="${since}" --oneline -- "${filePath}" | wc -l`,
3593
+ { cwd, encoding: "utf8", timeout: 5e3, stdio: ["pipe", "pipe", "pipe"] }
3594
+ );
3595
+ return parseInt(output.trim(), 10) || 0;
3596
+ } catch {
3597
+ return 0;
3598
+ }
3599
+ }
3600
+ function getChangeImpact(store, opts, depth = 3, maxDependents = 200, cwd) {
3601
+ let startNodeIds = [];
3316
3602
  let targetPath;
3317
3603
  let targetSymbolId;
3318
- if (opts.symbolId) {
3604
+ let targetSymbolName;
3605
+ let targetKind;
3606
+ let scopedToSymbols;
3607
+ if (opts.symbolIds && opts.symbolIds.length > 0) {
3608
+ scopedToSymbols = opts.symbolIds;
3609
+ let firstSym;
3610
+ for (const sid of opts.symbolIds) {
3611
+ const sym = store.getSymbolBySymbolId(sid);
3612
+ if (!sym) continue;
3613
+ if (!firstSym) firstSym = sym;
3614
+ const nid = store.getNodeId("symbol", sym.id);
3615
+ if (nid != null) startNodeIds.push(nid);
3616
+ }
3617
+ if (!firstSym) {
3618
+ return err(notFound(opts.symbolIds[0]));
3619
+ }
3620
+ const file = store.getFileById(firstSym.file_id);
3621
+ targetPath = file?.path ?? "unknown";
3622
+ targetSymbolId = firstSym.symbol_id;
3623
+ targetSymbolName = firstSym.name;
3624
+ targetKind = firstSym.kind;
3625
+ startNodeIds = [...new Set(startNodeIds)];
3626
+ } else if (opts.symbolId) {
3319
3627
  const sym = store.getSymbolBySymbolId(opts.symbolId);
3320
3628
  if (!sym) {
3321
3629
  return err(notFound(opts.symbolId));
3322
3630
  }
3323
- startNodeId = store.getNodeId("symbol", sym.id);
3631
+ const nodeId = store.getNodeId("symbol", sym.id);
3632
+ if (nodeId != null) startNodeIds.push(nodeId);
3324
3633
  const file = store.getFileById(sym.file_id);
3325
3634
  targetPath = file?.path ?? "unknown";
3326
3635
  targetSymbolId = opts.symbolId;
3636
+ targetSymbolName = sym.name;
3637
+ targetKind = sym.kind;
3327
3638
  } else if (opts.filePath) {
3328
3639
  const file = store.getFile(opts.filePath);
3329
3640
  if (!file) {
3330
3641
  return err(notFound(opts.filePath));
3331
3642
  }
3332
3643
  targetPath = file.path;
3644
+ const fileNodeId = store.getNodeId("file", file.id);
3645
+ if (fileNodeId != null) startNodeIds.push(fileNodeId);
3333
3646
  const symbols = store.getSymbolsByFile(file.id);
3334
3647
  const primarySymbol = symbols.find((s) => s.kind === "class") ?? symbols[0];
3335
3648
  if (primarySymbol) {
3336
- startNodeId = store.getNodeId("symbol", primarySymbol.id);
3337
3649
  targetSymbolId = primarySymbol.symbol_id;
3338
- } else {
3339
- startNodeId = store.getNodeId("file", file.id);
3650
+ targetSymbolName = primarySymbol.name;
3651
+ targetKind = primarySymbol.kind;
3652
+ }
3653
+ for (const sym of symbols) {
3654
+ const nid = store.getNodeId("symbol", sym.id);
3655
+ if (nid != null) startNodeIds.push(nid);
3340
3656
  }
3657
+ startNodeIds = [...new Set(startNodeIds)];
3341
3658
  } else {
3342
- return err(notFound("", ["Provide either filePath or symbolId"]));
3659
+ return err(notFound("", ["Provide either filePath, symbolId, or symbolIds"]));
3343
3660
  }
3344
3661
  const pennant = getPennantImpact(store, opts.symbolId ?? opts.filePath ?? "");
3345
- if (startNodeId == null) {
3662
+ if (startNodeIds.length === 0) {
3663
+ const emptySummary = {
3664
+ totalFiles: 0,
3665
+ totalSymbols: 0,
3666
+ maxDepth: 0,
3667
+ crossBoundary: false,
3668
+ publicApiAffected: 0,
3669
+ untestedDependents: 0,
3670
+ highComplexityDependents: 0,
3671
+ sentence: "No dependents found."
3672
+ };
3673
+ const emptyRisk = {
3674
+ score: 0,
3675
+ level: "low",
3676
+ publicApiBreaking: false,
3677
+ untestedRatio: 0,
3678
+ maxComplexity: 0,
3679
+ mitigations: []
3680
+ };
3346
3681
  return ok({
3347
- target: { path: targetPath, symbolId: targetSymbolId },
3682
+ target: { path: targetPath, symbolId: targetSymbolId, symbolName: targetSymbolName, kind: targetKind },
3683
+ summary: emptySummary,
3684
+ risk: emptyRisk,
3348
3685
  dependents: [],
3686
+ affectedTests: { total: 0, files: [] },
3349
3687
  totalAffected: 0
3350
3688
  });
3351
3689
  }
3352
- const dependents = [];
3690
+ const testedFileIds = getTestedFileIds(store);
3691
+ const rawDeps = [];
3353
3692
  const visited = /* @__PURE__ */ new Set();
3354
- visited.add(startNodeId);
3355
- traverseIncoming(store, startNodeId, 1, depth, visited, dependents, maxDependents);
3356
- const truncated = dependents.length >= maxDependents;
3357
- return ok({
3358
- target: { path: targetPath, symbolId: targetSymbolId },
3693
+ for (const nid of startNodeIds) visited.add(nid);
3694
+ traverseIncoming(store, startNodeIds, maxDependents, depth, visited, rawDeps, testedFileIds);
3695
+ const truncated = rawDeps.length >= maxDependents;
3696
+ const dependents = deduplicateByFile(rawDeps);
3697
+ const byEdgeType = {};
3698
+ const byDepth = {};
3699
+ for (const raw of rawDeps) {
3700
+ byEdgeType[raw.edgeType] = (byEdgeType[raw.edgeType] ?? 0) + 1;
3701
+ byDepth[raw.depth] = (byDepth[raw.depth] ?? 0) + 1;
3702
+ }
3703
+ const moduleMap = /* @__PURE__ */ new Map();
3704
+ const graphFiles = /* @__PURE__ */ new Set();
3705
+ const targetModule = getModule(targetPath);
3706
+ let crossBoundary = false;
3707
+ let publicApiAffected = 0;
3708
+ let untestedDependents = 0;
3709
+ let highComplexityDependents = 0;
3710
+ let maxComplexity = 0;
3711
+ let maxDepthSeen = 0;
3712
+ for (const dep of dependents) {
3713
+ const mod = getModule(dep.path);
3714
+ if (mod !== targetModule) crossBoundary = true;
3715
+ let modEntry = moduleMap.get(mod);
3716
+ if (!modEntry) {
3717
+ modEntry = { files: /* @__PURE__ */ new Set(), maxDepth: 0, hasUntested: false };
3718
+ moduleMap.set(mod, modEntry);
3719
+ }
3720
+ modEntry.files.add(dep.path);
3721
+ modEntry.maxDepth = Math.max(modEntry.maxDepth, dep.depth);
3722
+ if (dep.hasTests === false) modEntry.hasUntested = true;
3723
+ graphFiles.add(dep.path);
3724
+ if (dep.hasTests === false) untestedDependents++;
3725
+ if (dep.depth > maxDepthSeen) maxDepthSeen = dep.depth;
3726
+ for (const sym of dep.symbols ?? []) {
3727
+ if (sym.isExported) publicApiAffected++;
3728
+ if ((sym.complexity ?? 0) > 15) highComplexityDependents++;
3729
+ if ((sym.complexity ?? 0) > maxComplexity) maxComplexity = sym.complexity ?? 0;
3730
+ }
3731
+ }
3732
+ const byModule = [...moduleMap.entries()].map(([mod, data]) => ({
3733
+ module: mod,
3734
+ count: data.files.size,
3735
+ files: [...data.files],
3736
+ maxDepth: data.maxDepth,
3737
+ hasUntested: data.hasUntested
3738
+ })).sort((a, b) => b.count - a.count);
3739
+ const affectedTests = findAffectedTests(
3740
+ store,
3741
+ targetPath,
3742
+ dependents.slice(0, 50).map((d) => d.path)
3743
+ );
3744
+ graphFiles.add(targetPath);
3745
+ const coChangeAll = getCoChangesForFile(store, targetPath, graphFiles);
3746
+ const coChangeHidden = coChangeAll.filter((c) => !c.inGraph);
3747
+ const targetFile = store.getFile(targetPath);
3748
+ const targetHasTests = targetFile ? testedFileIds.has(targetFile.id) : false;
3749
+ let churnCommits = 0;
3750
+ if (cwd && isGitRepo(cwd)) {
3751
+ churnCommits = getFileChurn(cwd, targetPath, 180);
3752
+ }
3753
+ const totalFiles = dependents.length || 1;
3754
+ const untestedRatio = round(untestedDependents / totalFiles);
3755
+ const blastScore = clamp01(dependents.length, 50);
3756
+ const complexityScore = clamp01(maxComplexity, 20);
3757
+ const testGapScore = targetHasTests ? 0 : 0.8;
3758
+ const churnScore = clamp01(churnCommits, 30);
3759
+ const publicApiScore = publicApiAffected > 0 ? 0.3 : 0;
3760
+ const riskScore = round(
3761
+ 0.3 * blastScore + 0.2 * complexityScore + 0.2 * testGapScore + 0.15 * churnScore + 0.15 * publicApiScore
3762
+ );
3763
+ const mitigations = [];
3764
+ if (!targetHasTests) mitigations.push("Add test coverage for the target before modifying");
3765
+ if (blastScore > 0.5) mitigations.push(`High blast radius (${dependents.length} files) \u2014 consider incremental rollout`);
3766
+ if (complexityScore > 0.7) mitigations.push("High complexity in dependents \u2014 review carefully for regressions");
3767
+ if (untestedRatio > 0.5) mitigations.push(`${untestedDependents}/${totalFiles} dependents lack tests \u2014 add integration tests`);
3768
+ if (publicApiAffected > 0) mitigations.push(`${publicApiAffected} public API symbol(s) affected \u2014 check for breaking changes`);
3769
+ if (churnCommits > 15) mitigations.push(`High churn (${churnCommits} commits/180d) \u2014 review recent history`);
3770
+ if (coChangeHidden.length > 0) {
3771
+ mitigations.push(`${coChangeHidden.length} hidden coupling(s) via git history: ${coChangeHidden.slice(0, 3).map((c) => c.file).join(", ")}`);
3772
+ }
3773
+ const risk = {
3774
+ score: riskScore,
3775
+ level: riskLevel(riskScore),
3776
+ publicApiBreaking: publicApiAffected > 0,
3777
+ untestedRatio,
3778
+ maxComplexity,
3779
+ mitigations
3780
+ };
3781
+ const modCount = byModule.length;
3782
+ const symCount = dependents.reduce((s, d) => s + (d.symbols?.length ?? 0), 0);
3783
+ const parts = [];
3784
+ parts.push(`${dependents.length} file(s) across ${modCount} module(s)`);
3785
+ if (publicApiAffected > 0) parts.push(`${publicApiAffected} public API`);
3786
+ if (untestedDependents > 0) parts.push(`${untestedDependents} untested`);
3787
+ if (highComplexityDependents > 0) parts.push(`${highComplexityDependents} high-complexity`);
3788
+ if (affectedTests.total > 0) parts.push(`${affectedTests.total} test(s) to run`);
3789
+ if (coChangeHidden.length > 0) parts.push(`${coChangeHidden.length} hidden coupling(s)`);
3790
+ const summary = {
3791
+ totalFiles: dependents.length,
3792
+ totalSymbols: symCount,
3793
+ maxDepth: maxDepthSeen,
3794
+ crossBoundary,
3795
+ publicApiAffected,
3796
+ untestedDependents,
3797
+ highComplexityDependents,
3798
+ sentence: `Impact: ${parts.join(", ")}.${risk.level !== "low" ? ` Risk: ${risk.level}.` : ""}`
3799
+ };
3800
+ const breakingChanges = detectBreakingChanges(store, targetPath, scopedToSymbols);
3801
+ if (breakingChanges.length > 0) {
3802
+ const totalConsumers = breakingChanges.reduce((s, b) => s + b.consumers, 0);
3803
+ mitigations.push(`${breakingChanges.length} exported symbol(s) with ${totalConsumers} consumer(s) \u2014 signature change = breaking`);
3804
+ parts.push(`${breakingChanges.length} breaking risk(s)`);
3805
+ summary.sentence = `Impact: ${parts.join(", ")}.${risk.level !== "low" ? ` Risk: ${risk.level}.` : ""}`;
3806
+ }
3807
+ const result = {
3808
+ target: { path: targetPath, symbolId: targetSymbolId, symbolName: targetSymbolName, kind: targetKind },
3809
+ summary,
3810
+ risk,
3811
+ affectedTests,
3359
3812
  dependents,
3360
- totalAffected: dependents.length,
3361
- ...truncated ? { truncated: true } : {},
3362
- ...pennant ? { pennant } : {}
3363
- });
3813
+ totalAffected: dependents.length
3814
+ };
3815
+ if (breakingChanges.length > 0) result.breakingChanges = breakingChanges;
3816
+ if (byModule.length > 0) result.byModule = byModule;
3817
+ if (Object.keys(byEdgeType).length > 0) result.byEdgeType = byEdgeType;
3818
+ if (Object.keys(byDepth).length > 0) result.byDepth = byDepth;
3819
+ if (coChangeHidden.length > 0) result.coChangeHidden = coChangeHidden;
3820
+ if (truncated) result.truncated = true;
3821
+ if (pennant) result.pennant = pennant;
3822
+ if (scopedToSymbols) result.scopedToSymbols = scopedToSymbols;
3823
+ return ok(result);
3364
3824
  }
3365
- function traverseIncoming(store, startNodeId, _currentDepth, maxDepth, visited, dependents, maxDependents) {
3366
- let frontier = [startNodeId];
3825
+ function deduplicateByFile(rawDeps) {
3826
+ const fileMap = /* @__PURE__ */ new Map();
3827
+ for (const raw of rawDeps) {
3828
+ let entry = fileMap.get(raw.path);
3829
+ if (!entry) {
3830
+ entry = { edgeTypes: /* @__PURE__ */ new Set(), depth: raw.depth, hasTests: raw.hasTests, symbols: [] };
3831
+ fileMap.set(raw.path, entry);
3832
+ }
3833
+ entry.edgeTypes.add(raw.edgeType);
3834
+ entry.depth = Math.min(entry.depth, raw.depth);
3835
+ if (raw.hasTests != null) entry.hasTests = raw.hasTests;
3836
+ if (raw.symbolId && raw.symbolName && raw.symbolKind) {
3837
+ if (!entry.symbols.some((s) => s.symbolId === raw.symbolId)) {
3838
+ const sym = {
3839
+ symbolId: raw.symbolId,
3840
+ symbolName: raw.symbolName,
3841
+ symbolKind: raw.symbolKind
3842
+ };
3843
+ if (raw.complexity != null) sym.complexity = raw.complexity;
3844
+ if (raw.isExported != null) sym.isExported = raw.isExported;
3845
+ entry.symbols.push(sym);
3846
+ }
3847
+ }
3848
+ }
3849
+ const result = [];
3850
+ for (const [path43, entry] of fileMap) {
3851
+ const dep = {
3852
+ path: path43,
3853
+ edgeTypes: [...entry.edgeTypes],
3854
+ depth: entry.depth
3855
+ };
3856
+ if (entry.hasTests != null) dep.hasTests = entry.hasTests;
3857
+ if (entry.symbols.length > 0) dep.symbols = entry.symbols;
3858
+ result.push(dep);
3859
+ }
3860
+ result.sort((a, b) => a.depth - b.depth || a.path.localeCompare(b.path));
3861
+ return result;
3862
+ }
3863
+ function traverseIncoming(store, startNodeIds, maxDependents, maxDepth, visited, rawDeps, testedFileIds) {
3864
+ let frontier = startNodeIds;
3367
3865
  for (let depth = 1; depth <= maxDepth; depth++) {
3368
- if (frontier.length === 0 || dependents.length >= maxDependents) break;
3866
+ if (frontier.length === 0 || rawDeps.length >= maxDependents) break;
3369
3867
  const allEdges = store.getEdgesForNodesBatch(frontier);
3370
3868
  const frontierSet = new Set(frontier);
3371
3869
  const newFrontier = [];
3372
3870
  const sourceNodeIds = [];
3373
3871
  const edgeBySource = /* @__PURE__ */ new Map();
3374
3872
  for (const edge of allEdges) {
3375
- if (dependents.length + sourceNodeIds.length >= maxDependents) break;
3873
+ if (rawDeps.length + sourceNodeIds.length >= maxDependents) break;
3376
3874
  if (!frontierSet.has(edge.target_node_id)) continue;
3377
3875
  if (edge.source_node_id === edge.target_node_id) continue;
3378
3876
  const srcId = edge.source_node_id;
@@ -3395,32 +3893,112 @@ function traverseIncoming(store, startNodeId, _currentDepth, maxDepth, visited,
3395
3893
  const symFileIds = /* @__PURE__ */ new Set();
3396
3894
  for (const sym of symbolMap.values()) symFileIds.add(sym.file_id);
3397
3895
  const symFiles = symFileIds.size > 0 ? store.getFilesByIds([...symFileIds]) : /* @__PURE__ */ new Map();
3896
+ const complexityMap = /* @__PURE__ */ new Map();
3897
+ if (symbolIds.length > 0) {
3898
+ const placeholders = symbolIds.map(() => "?").join(",");
3899
+ const rows = store.db.prepare(
3900
+ `SELECT id, cyclomatic FROM symbols WHERE id IN (${placeholders}) AND cyclomatic IS NOT NULL`
3901
+ ).all(...symbolIds);
3902
+ for (const r of rows) complexityMap.set(r.id, r.cyclomatic);
3903
+ }
3398
3904
  for (const srcId of sourceNodeIds) {
3399
- if (dependents.length >= maxDependents) break;
3905
+ if (rawDeps.length >= maxDependents) break;
3400
3906
  const ref = nodeRefs.get(srcId);
3401
3907
  if (!ref) continue;
3402
- let filePath;
3403
- let symbolId;
3908
+ let filePath;
3909
+ let symbolId;
3910
+ let symbolName;
3911
+ let symbolKind;
3912
+ let complexity;
3913
+ let isExported;
3914
+ let fileId;
3915
+ if (ref.nodeType === "symbol") {
3916
+ const sym = symbolMap.get(ref.refId);
3917
+ if (sym) {
3918
+ symbolId = sym.symbol_id;
3919
+ symbolName = sym.name;
3920
+ symbolKind = sym.kind;
3921
+ filePath = symFiles.get(sym.file_id)?.path;
3922
+ fileId = sym.file_id;
3923
+ complexity = complexityMap.get(sym.id);
3924
+ if (sym.metadata) {
3925
+ try {
3926
+ const meta = JSON.parse(sym.metadata);
3927
+ isExported = meta.exported === true || meta.exported === 1;
3928
+ } catch {
3929
+ }
3930
+ }
3931
+ }
3932
+ } else if (ref.nodeType === "file") {
3933
+ const f = fileMap.get(ref.refId);
3934
+ filePath = f?.path;
3935
+ fileId = f?.id;
3936
+ }
3937
+ if (filePath) {
3938
+ rawDeps.push({
3939
+ path: filePath,
3940
+ edgeType: edgeBySource.get(srcId) ?? "unknown",
3941
+ depth,
3942
+ symbolId,
3943
+ symbolName,
3944
+ symbolKind,
3945
+ complexity,
3946
+ hasTests: fileId != null ? testedFileIds.has(fileId) : void 0,
3947
+ isExported,
3948
+ fileId
3949
+ });
3950
+ }
3951
+ }
3952
+ frontier = newFrontier;
3953
+ }
3954
+ }
3955
+ function detectBreakingChanges(store, targetPath, scopedSymbolIds) {
3956
+ const file = store.getFile(targetPath);
3957
+ if (!file) return [];
3958
+ const symbols = store.getSymbolsByFile(file.id);
3959
+ const breaking = [];
3960
+ for (const sym of symbols) {
3961
+ if (!sym.metadata) continue;
3962
+ let meta;
3963
+ try {
3964
+ meta = JSON.parse(sym.metadata);
3965
+ } catch {
3966
+ continue;
3967
+ }
3968
+ if (meta.exported !== true && meta.exported !== 1) continue;
3969
+ if (scopedSymbolIds && !scopedSymbolIds.includes(sym.symbol_id)) continue;
3970
+ const nodeId = store.getNodeId("symbol", sym.id);
3971
+ if (nodeId == null) continue;
3972
+ const incoming = store.getIncomingEdges(nodeId);
3973
+ const consumerFiles = /* @__PURE__ */ new Set();
3974
+ for (const edge of incoming) {
3975
+ if (edge.edge_type_name === "test_covers") continue;
3976
+ const ref = store.getNodeRef(edge.source_node_id);
3977
+ if (!ref) continue;
3404
3978
  if (ref.nodeType === "symbol") {
3405
- const sym = symbolMap.get(ref.refId);
3406
- if (sym) {
3407
- symbolId = sym.symbol_id;
3408
- filePath = symFiles.get(sym.file_id)?.path;
3979
+ const s = store.getSymbolById(ref.refId);
3980
+ if (s) {
3981
+ const f = store.getFileById(s.file_id);
3982
+ if (f && f.path !== targetPath) consumerFiles.add(f.path);
3409
3983
  }
3410
3984
  } else if (ref.nodeType === "file") {
3411
- filePath = fileMap.get(ref.refId)?.path;
3412
- }
3413
- if (filePath) {
3414
- dependents.push({
3415
- path: filePath,
3416
- symbolId,
3417
- edgeType: edgeBySource.get(srcId) ?? "unknown",
3418
- depth
3419
- });
3985
+ const f = store.getFileById(ref.refId);
3986
+ if (f && f.path !== targetPath) consumerFiles.add(f.path);
3420
3987
  }
3421
3988
  }
3422
- frontier = newFrontier;
3989
+ if (consumerFiles.size > 0) {
3990
+ breaking.push({
3991
+ symbolId: sym.symbol_id,
3992
+ symbolName: sym.name,
3993
+ kind: sym.kind,
3994
+ consumers: consumerFiles.size,
3995
+ consumerFiles: [...consumerFiles].slice(0, 10)
3996
+ // cap for readability
3997
+ });
3998
+ }
3423
3999
  }
4000
+ breaking.sort((a, b) => b.consumers - a.consumers);
4001
+ return breaking;
3424
4002
  }
3425
4003
  function getPennantImpact(store, name) {
3426
4004
  if (!name) return null;
@@ -3750,14 +4328,6 @@ import path9 from "path";
3750
4328
  import { cpus } from "os";
3751
4329
  import fg2 from "fast-glob";
3752
4330
 
3753
- // src/logger.ts
3754
- import pino from "pino";
3755
- var level = process.env.TRACE_MCP_LOG_LEVEL ?? "info";
3756
- var logger = pino({
3757
- name: "trace-mcp",
3758
- level
3759
- }, process.stderr);
3760
-
3761
4331
  // src/plugin-api/executor.ts
3762
4332
  var DEFAULT_TIMEOUT_MS = 3e4;
3763
4333
  var MAX_SYMBOLS_PER_FILE = 1e4;
@@ -4289,166 +4859,6 @@ var GitignoreMatcher = class {
4289
4859
 
4290
4860
  // src/tools/history.ts
4291
4861
  import { execFileSync as execFileSync2 } from "child_process";
4292
-
4293
- // src/tools/git-analysis.ts
4294
- import { execFileSync } from "child_process";
4295
- function isGitRepo(cwd) {
4296
- try {
4297
- execFileSync("git", ["rev-parse", "--is-inside-work-tree"], {
4298
- cwd,
4299
- stdio: "pipe",
4300
- timeout: 5e3
4301
- });
4302
- return true;
4303
- } catch {
4304
- return false;
4305
- }
4306
- }
4307
- function getGitFileStats(cwd, sinceDays) {
4308
- const args = [
4309
- "log",
4310
- "--pretty=format:__COMMIT__%H|%aI|%aN",
4311
- "--name-only",
4312
- "--no-merges",
4313
- "--diff-filter=ACDMR"
4314
- ];
4315
- if (sinceDays !== void 0) {
4316
- args.push(`--since=${sinceDays} days ago`);
4317
- }
4318
- let output;
4319
- try {
4320
- output = execFileSync("git", args, {
4321
- cwd,
4322
- stdio: "pipe",
4323
- maxBuffer: 10 * 1024 * 1024,
4324
- // 10 MB
4325
- timeout: 3e4
4326
- }).toString("utf-8");
4327
- } catch (e) {
4328
- logger.warn({ error: e }, "git log failed");
4329
- return /* @__PURE__ */ new Map();
4330
- }
4331
- const fileStats = /* @__PURE__ */ new Map();
4332
- let currentDate = null;
4333
- let currentAuthor = null;
4334
- for (const line of output.split("\n")) {
4335
- if (line.startsWith("__COMMIT__")) {
4336
- const parts = line.slice("__COMMIT__".length).split("|");
4337
- currentDate = new Date(parts[1]);
4338
- currentAuthor = parts[2];
4339
- continue;
4340
- }
4341
- const trimmed = line.trim();
4342
- if (!trimmed || !currentDate || !currentAuthor) continue;
4343
- const existing = fileStats.get(trimmed);
4344
- if (existing) {
4345
- existing.commits++;
4346
- existing.authors.add(currentAuthor);
4347
- if (currentDate < existing.firstDate) existing.firstDate = currentDate;
4348
- if (currentDate > existing.lastDate) existing.lastDate = currentDate;
4349
- } else {
4350
- fileStats.set(trimmed, {
4351
- file: trimmed,
4352
- commits: 1,
4353
- authors: /* @__PURE__ */ new Set([currentAuthor]),
4354
- firstDate: currentDate,
4355
- lastDate: currentDate
4356
- });
4357
- }
4358
- }
4359
- return fileStats;
4360
- }
4361
- function getChurnRate(cwd, options = {}) {
4362
- const { sinceDays, limit = 50, filePattern } = options;
4363
- if (!isGitRepo(cwd)) {
4364
- return [];
4365
- }
4366
- const stats = getGitFileStats(cwd, sinceDays);
4367
- let entries = [];
4368
- for (const [file, data] of stats) {
4369
- if (filePattern && !file.includes(filePattern)) continue;
4370
- const lifespanMs = data.lastDate.getTime() - data.firstDate.getTime();
4371
- const lifespanWeeks = Math.max(lifespanMs / (7 * 24 * 60 * 60 * 1e3), 1);
4372
- const churnPerWeek = Math.round(data.commits / lifespanWeeks * 100) / 100;
4373
- let assessment;
4374
- if (churnPerWeek <= 1) assessment = "stable";
4375
- else if (churnPerWeek <= 3) assessment = "active";
4376
- else assessment = "volatile";
4377
- entries.push({
4378
- file,
4379
- commits: data.commits,
4380
- unique_authors: data.authors.size,
4381
- first_seen: data.firstDate.toISOString().split("T")[0],
4382
- last_modified: data.lastDate.toISOString().split("T")[0],
4383
- churn_per_week: churnPerWeek,
4384
- assessment
4385
- });
4386
- }
4387
- entries.sort((a, b) => b.commits - a.commits);
4388
- return entries.slice(0, limit);
4389
- }
4390
- function getHotspots(store, cwd, options = {}) {
4391
- const { sinceDays = 90, limit = 20, minCyclomatic = 3 } = options;
4392
- if (!isGitRepo(cwd)) {
4393
- return getComplexityOnlyHotspots(store, limit, minCyclomatic);
4394
- }
4395
- const gitStats = getGitFileStats(cwd, sinceDays);
4396
- const fileComplexity = getMaxCyclomaticPerFile(store);
4397
- const entries = [];
4398
- for (const [file, maxCyclomatic] of fileComplexity) {
4399
- if (maxCyclomatic < minCyclomatic) continue;
4400
- const git = gitStats.get(file);
4401
- const commits = git?.commits ?? 0;
4402
- const score = Math.round(maxCyclomatic * Math.log(1 + commits) * 100) / 100;
4403
- if (score <= 0) continue;
4404
- let assessment;
4405
- if (score <= 3) assessment = "low";
4406
- else if (score <= 10) assessment = "medium";
4407
- else assessment = "high";
4408
- entries.push({
4409
- file,
4410
- max_cyclomatic: maxCyclomatic,
4411
- commits,
4412
- score,
4413
- assessment
4414
- });
4415
- }
4416
- entries.sort((a, b) => b.score - a.score);
4417
- return entries.slice(0, limit);
4418
- }
4419
- function getMaxCyclomaticPerFile(store) {
4420
- const rows = store.db.prepare(`
4421
- SELECT f.path, MAX(s.cyclomatic) as max_cyclomatic
4422
- FROM symbols s
4423
- JOIN files f ON s.file_id = f.id
4424
- WHERE s.cyclomatic IS NOT NULL
4425
- GROUP BY f.path
4426
- `).all();
4427
- const result = /* @__PURE__ */ new Map();
4428
- for (const row of rows) {
4429
- result.set(row.path, row.max_cyclomatic);
4430
- }
4431
- return result;
4432
- }
4433
- function getComplexityOnlyHotspots(store, limit, minCyclomatic) {
4434
- const fileComplexity = getMaxCyclomaticPerFile(store);
4435
- const entries = [];
4436
- for (const [file, maxCyclomatic] of fileComplexity) {
4437
- if (maxCyclomatic < minCyclomatic) continue;
4438
- entries.push({
4439
- file,
4440
- max_cyclomatic: maxCyclomatic,
4441
- commits: 0,
4442
- score: maxCyclomatic,
4443
- // score = complexity alone
4444
- assessment: maxCyclomatic <= 3 ? "low" : maxCyclomatic <= 10 ? "medium" : "high"
4445
- });
4446
- }
4447
- entries.sort((a, b) => b.score - a.score);
4448
- return entries.slice(0, limit);
4449
- }
4450
-
4451
- // src/tools/history.ts
4452
4862
  function sampleFileCommits(cwd, filePath, sinceDays, count) {
4453
4863
  const args = [
4454
4864
  "log",
@@ -7690,7 +8100,7 @@ function registerAITools(server, ctx) {
7690
8100
  },
7691
8101
  async ({ file_path, diff }) => {
7692
8102
  const impactResult = getChangeImpact(store, { filePath: file_path });
7693
- const blastRadius = impactResult.isOk() ? impactResult.value.dependents.map((d) => `${d.edgeType}: ${d.path}`).join("\n") : "";
8103
+ const blastRadius = impactResult.isOk() ? impactResult.value.dependents.map((d) => `${d.edgeTypes.join(", ")}: ${d.path}`).join("\n") : "";
7694
8104
  const prompt = PROMPTS.review_change.build({
7695
8105
  filePath: file_path,
7696
8106
  diff: diff ?? "",
@@ -8901,7 +9311,14 @@ var CALL_EDGE_TYPES = /* @__PURE__ */ new Set([
8901
9311
  "routes_to",
8902
9312
  "validates_with",
8903
9313
  "nest_injects",
8904
- "graphql_resolves"
9314
+ "graphql_resolves",
9315
+ // Import-based edges (fallback when no call edges exist)
9316
+ "esm_imports",
9317
+ "imports",
9318
+ "uses",
9319
+ // Component/rendering edges
9320
+ "renders_component",
9321
+ "uses_composable"
8905
9322
  ]);
8906
9323
  var MAX_DEPTH = 10;
8907
9324
  function getCallGraph(store, opts, depth = 2) {
@@ -11214,6 +11631,10 @@ async function getTaskContext(store, rootPath, opts, ai) {
11214
11631
  const recency = computeRecency(file.indexed_at, now);
11215
11632
  const typeBonus = getTypeBonus(sym.kind);
11216
11633
  let score = hybridScore({ relevance, pagerank: pr, recency, typeBonus });
11634
+ const NON_CODE_LANGUAGES = /* @__PURE__ */ new Set(["markdown", "yaml", "json", "toml", "xml", "html", "csv", "text", "ini"]);
11635
+ if (file.language && NON_CODE_LANGUAGES.has(file.language.toLowerCase())) {
11636
+ score *= 0.2;
11637
+ }
11217
11638
  if (walkInfo.depth > 0) {
11218
11639
  score *= 1 / (1 + 0.3 * walkInfo.depth);
11219
11640
  }
@@ -11478,7 +11899,7 @@ function rankPercentile(values) {
11478
11899
  function clampNormalize(value, ceiling) {
11479
11900
  return Math.min(1, value / ceiling);
11480
11901
  }
11481
- function riskLevel(score) {
11902
+ function riskLevel2(score) {
11482
11903
  if (score < 0.3) return "low";
11483
11904
  if (score < 0.5) return "medium";
11484
11905
  if (score < 0.75) return "high";
@@ -11491,7 +11912,7 @@ function debtGrade(score) {
11491
11912
  if (score < 0.8) return "D";
11492
11913
  return "F";
11493
11914
  }
11494
- function getModule(filePath, depth) {
11915
+ function getModule2(filePath, depth) {
11495
11916
  const parts = filePath.split("/");
11496
11917
  return parts.slice(0, Math.min(depth, parts.length - 1)).join("/") || filePath;
11497
11918
  }
@@ -11548,14 +11969,14 @@ function predictBugs(store, cwd, options = {}) {
11548
11969
  predictions.push({
11549
11970
  file,
11550
11971
  score: Math.round(score * 1e3) / 1e3,
11551
- risk: riskLevel(score),
11972
+ risk: riskLevel2(score),
11552
11973
  factors: [
11553
- { signal: "churn", raw_value: round(git?.churnPerWeek ?? 0), normalized: round(sChurn), weight: w.churn, contribution: round(w.churn * sChurn) },
11554
- { signal: "fix_ratio", raw_value: round(git?.fixRatio ?? 0), normalized: round(sFixRatio), weight: w.fix_ratio, contribution: round(w.fix_ratio * sFixRatio) },
11555
- { signal: "complexity", raw_value: complexityMap.get(file) ?? 0, normalized: round(sComplexity), weight: w.complexity, contribution: round(w.complexity * sComplexity) },
11556
- { signal: "coupling", raw_value: round(coupling?.instability ?? 0), normalized: round(sCoupling), weight: w.coupling, contribution: round(w.coupling * sCoupling) },
11557
- { signal: "pagerank", raw_value: round(pagerankMap.get(file)?.score ?? 0), normalized: round(sPagerank), weight: w.pagerank, contribution: round(w.pagerank * sPagerank) },
11558
- { signal: "authors", raw_value: git?.authors ?? 0, normalized: round(sAuthors), weight: w.authors, contribution: round(w.authors * sAuthors) }
11974
+ { signal: "churn", raw_value: round2(git?.churnPerWeek ?? 0), normalized: round2(sChurn), weight: w.churn, contribution: round2(w.churn * sChurn) },
11975
+ { signal: "fix_ratio", raw_value: round2(git?.fixRatio ?? 0), normalized: round2(sFixRatio), weight: w.fix_ratio, contribution: round2(w.fix_ratio * sFixRatio) },
11976
+ { signal: "complexity", raw_value: complexityMap.get(file) ?? 0, normalized: round2(sComplexity), weight: w.complexity, contribution: round2(w.complexity * sComplexity) },
11977
+ { signal: "coupling", raw_value: round2(coupling?.instability ?? 0), normalized: round2(sCoupling), weight: w.coupling, contribution: round2(w.coupling * sCoupling) },
11978
+ { signal: "pagerank", raw_value: round2(pagerankMap.get(file)?.score ?? 0), normalized: round2(sPagerank), weight: w.pagerank, contribution: round2(w.pagerank * sPagerank) },
11979
+ { signal: "authors", raw_value: git?.authors ?? 0, normalized: round2(sAuthors), weight: w.authors, contribution: round2(w.authors * sAuthors) }
11559
11980
  ]
11560
11981
  });
11561
11982
  }
@@ -11590,7 +12011,7 @@ function detectDrift(store, cwd, options = {}) {
11590
12011
  for (const file of files) {
11591
12012
  fileCommitCount.set(file, (fileCommitCount.get(file) ?? 0) + 1);
11592
12013
  fileTotalCount.set(file, (fileTotalCount.get(file) ?? 0) + 1);
11593
- modulesTouched.add(getModule(file, moduleDepth));
12014
+ modulesTouched.add(getModule2(file, moduleDepth));
11594
12015
  }
11595
12016
  const isShotgun = modulesTouched.size >= 3;
11596
12017
  if (isShotgun) {
@@ -11611,8 +12032,8 @@ function detectDrift(store, cwd, options = {}) {
11611
12032
  const anomalies = [];
11612
12033
  for (const [key, count] of coChangeCount) {
11613
12034
  const [fileA, fileB] = key.split("|");
11614
- const moduleA = getModule(fileA, moduleDepth);
11615
- const moduleB = getModule(fileB, moduleDepth);
12035
+ const moduleA = getModule2(fileA, moduleDepth);
12036
+ const moduleB = getModule2(fileB, moduleDepth);
11616
12037
  if (moduleA === moduleB) continue;
11617
12038
  const commitsA = fileCommitCount.get(fileA) ?? 0;
11618
12039
  const commitsB = fileCommitCount.get(fileB) ?? 0;
@@ -11624,7 +12045,7 @@ function detectDrift(store, cwd, options = {}) {
11624
12045
  file_a: fileA,
11625
12046
  file_b: fileB,
11626
12047
  co_change_count: count,
11627
- confidence: round(jaccard2),
12048
+ confidence: round2(jaccard2),
11628
12049
  module_a: moduleA,
11629
12050
  module_b: moduleB
11630
12051
  });
@@ -11639,7 +12060,7 @@ function detectDrift(store, cwd, options = {}) {
11639
12060
  file,
11640
12061
  shotgun_commits: shotgunCommits,
11641
12062
  total_commits: total,
11642
- ratio: round(ratio)
12063
+ ratio: round2(ratio)
11643
12064
  });
11644
12065
  }
11645
12066
  }
@@ -11686,7 +12107,7 @@ function getTechDebt(store, cwd, options = {}) {
11686
12107
  const allFiles = store.getAllFiles();
11687
12108
  const moduleFiles = /* @__PURE__ */ new Map();
11688
12109
  for (const f of allFiles) {
11689
- const mod = getModule(f.path, moduleDepth);
12110
+ const mod = getModule2(f.path, moduleDepth);
11690
12111
  if (!moduleFiles.has(mod)) moduleFiles.set(mod, []);
11691
12112
  moduleFiles.get(mod).push(f.path);
11692
12113
  }
@@ -11722,7 +12143,7 @@ function getTechDebt(store, cwd, options = {}) {
11722
12143
  }
11723
12144
  if (sCoupling > 0.7) {
11724
12145
  recommendations.push({
11725
- action: `Reduce coupling: module has high average instability (${round(sCoupling)})`,
12146
+ action: `Reduce coupling: module has high average instability (${round2(sCoupling)})`,
11726
12147
  target: mod,
11727
12148
  priority: "medium"
11728
12149
  });
@@ -11746,13 +12167,13 @@ function getTechDebt(store, cwd, options = {}) {
11746
12167
  }
11747
12168
  modules.push({
11748
12169
  module: mod,
11749
- score: round(score),
12170
+ score: round2(score),
11750
12171
  grade: debtGrade(score),
11751
12172
  breakdown: {
11752
- complexity: round(sComplexity),
11753
- coupling: round(sCoupling),
11754
- test_gap: round(sTestGap),
11755
- churn: round(sChurn)
12173
+ complexity: round2(sComplexity),
12174
+ coupling: round2(sCoupling),
12175
+ test_gap: round2(sTestGap),
12176
+ churn: round2(sChurn)
11756
12177
  },
11757
12178
  file_count: files.length,
11758
12179
  recommendations
@@ -11762,7 +12183,7 @@ function getTechDebt(store, cwd, options = {}) {
11762
12183
  const totalScore = modules.length > 0 ? modules.reduce((s, m) => s + m.score, 0) / modules.length : 0;
11763
12184
  return ok({
11764
12185
  modules: modules.slice(0, 50),
11765
- project_score: round(totalScore),
12186
+ project_score: round2(totalScore),
11766
12187
  project_grade: debtGrade(totalScore)
11767
12188
  });
11768
12189
  }
@@ -11859,15 +12280,15 @@ function assessChangeRisk(store, cwd, opts) {
11859
12280
  if (sChurn > 0.7) mitigations.push("Frequently changed file \u2014 review recent change history for context");
11860
12281
  return ok({
11861
12282
  target: { file: targetFile, symbol_id: targetSymbolId },
11862
- risk_score: round(riskScore),
11863
- risk_level: riskLevel(riskScore),
11864
- confidence: round(confidence),
12283
+ risk_score: round2(riskScore),
12284
+ risk_level: riskLevel2(riskScore),
12285
+ confidence: round2(confidence),
11865
12286
  factors: [
11866
- { signal: "blast_radius", value: round(sBlast), weight: w.blast_radius, contribution: round(w.blast_radius * sBlast), detail: `${blastFiles} files, ${blastSymbols} symbols in blast radius` },
11867
- { signal: "complexity", value: round(sComplexity), weight: w.complexity, contribution: round(w.complexity * sComplexity), detail: `Max cyclomatic: ${complexityRow?.max_cyc ?? 0}` },
11868
- { signal: "churn", value: round(sChurn), weight: w.churn, contribution: round(w.churn * sChurn), detail: gitAvailable ? `Churn percentile: ${round(sChurn * 100)}%` : "Git unavailable" },
11869
- { signal: "test_gap", value: sTestGap, weight: w.test_gap, contribution: round(w.test_gap * sTestGap), detail: hasTestCoverage ? "Has test coverage" : "No test coverage" },
11870
- { signal: "coupling", value: round(sCoupling), weight: w.coupling, contribution: round(w.coupling * sCoupling), detail: `Instability: ${round(sCoupling)}` }
12287
+ { signal: "blast_radius", value: round2(sBlast), weight: w.blast_radius, contribution: round2(w.blast_radius * sBlast), detail: `${blastFiles} files, ${blastSymbols} symbols in blast radius` },
12288
+ { signal: "complexity", value: round2(sComplexity), weight: w.complexity, contribution: round2(w.complexity * sComplexity), detail: `Max cyclomatic: ${complexityRow?.max_cyc ?? 0}` },
12289
+ { signal: "churn", value: round2(sChurn), weight: w.churn, contribution: round2(w.churn * sChurn), detail: gitAvailable ? `Churn percentile: ${round2(sChurn * 100)}%` : "Git unavailable" },
12290
+ { signal: "test_gap", value: sTestGap, weight: w.test_gap, contribution: round2(w.test_gap * sTestGap), detail: hasTestCoverage ? "Has test coverage" : "No test coverage" },
12291
+ { signal: "coupling", value: round2(sCoupling), weight: w.coupling, contribution: round2(w.coupling * sCoupling), detail: `Instability: ${round2(sCoupling)}` }
11871
12292
  ],
11872
12293
  mitigations,
11873
12294
  blast_radius: { files: blastFiles, symbols: blastSymbols }
@@ -11950,7 +12371,7 @@ function getCachedBugPredictions(store, limit, minScore, filePattern, ttlMs = 60
11950
12371
  predictions: rows.map((r) => ({
11951
12372
  file: r.file_path,
11952
12373
  score: r.score,
11953
- risk: riskLevel(r.score),
12374
+ risk: riskLevel2(r.score),
11954
12375
  factors: JSON.parse(r.factors || "[]")
11955
12376
  })),
11956
12377
  total_files_analyzed: snapshotFull?.file_count ?? rows.length,
@@ -12020,7 +12441,7 @@ function saveBugPredictionCache(store, predictions, cwd) {
12020
12441
  return null;
12021
12442
  }
12022
12443
  }
12023
- function round(v, decimals = 3) {
12444
+ function round2(v, decimals = 3) {
12024
12445
  const m = 10 ** decimals;
12025
12446
  return Math.round(v * m) / m;
12026
12447
  }
@@ -13855,7 +14276,7 @@ function searchText(store, projectRoot, opts) {
13855
14276
  filePattern,
13856
14277
  language,
13857
14278
  maxResults = 50,
13858
- contextLines = 2,
14279
+ contextLines = 0,
13859
14280
  caseSensitive = false
13860
14281
  } = opts;
13861
14282
  if (!query || query.length === 0) {
@@ -13878,7 +14299,7 @@ function searchText(store, projectRoot, opts) {
13878
14299
  files = store.db.prepare("SELECT * FROM files WHERE status != ?").all("error");
13879
14300
  }
13880
14301
  if (filePattern) {
13881
- const isMatch = (0, import_picomatch.default)(filePattern, { matchBase: true });
14302
+ const isMatch = (0, import_picomatch.default)(filePattern, { dot: true });
13882
14303
  files = files.filter((f) => isMatch(f.path));
13883
14304
  }
13884
14305
  const matches = [];
@@ -13992,7 +14413,7 @@ function buildGraphData(store, opts) {
13992
14413
  } else {
13993
14414
  const allFiles = store.getAllFiles();
13994
14415
  if (scope.includes("*")) {
13995
- const isMatch = (0, import_picomatch2.default)(scope, { matchBase: true });
14416
+ const isMatch = (0, import_picomatch2.default)(scope, { dot: true });
13996
14417
  seedFiles = allFiles.filter((f) => isMatch(f.path));
13997
14418
  } else if (scope.endsWith("/") || !scope.includes(".")) {
13998
14419
  seedFiles = allFiles.filter((f) => f.path.startsWith(scope.replace(/\/$/, "")));
@@ -15362,11 +15783,11 @@ function getCrossServiceImpact(topoStore, projectRoot, additionalRepos, opts) {
15362
15783
  });
15363
15784
  }
15364
15785
  }
15365
- const riskLevel2 = affected.length >= 3 ? "high" : affected.length >= 1 ? "medium" : "low";
15786
+ const riskLevel3 = affected.length >= 3 ? "high" : affected.length >= 1 ? "medium" : "low";
15366
15787
  return ok({
15367
15788
  target: { service: opts.service, endpoint: opts.endpoint, event: opts.event },
15368
15789
  affected_services: affected,
15369
- risk_level: riskLevel2
15790
+ risk_level: riskLevel3
15370
15791
  });
15371
15792
  }
15372
15793
  function getApiContract(topoStore, projectRoot, additionalRepos, opts) {
@@ -15960,7 +16381,7 @@ var FederationManager = class {
15960
16381
  }
15961
16382
  }
15962
16383
  const uniqueRepos = new Set(clients.map((c) => c.repo));
15963
- const riskLevel2 = uniqueRepos.size >= 3 ? "critical" : uniqueRepos.size >= 2 ? "high" : clients.length >= 3 ? "medium" : "low";
16384
+ const riskLevel3 = uniqueRepos.size >= 3 ? "critical" : uniqueRepos.size >= 2 ? "high" : clients.length >= 3 ? "medium" : "low";
15964
16385
  const svc = this.topoStore.getAllServices().find((s) => s.id === ep.service_id);
15965
16386
  const repo = svc ? this.topoStore.getFederatedRepo(svc.repo_root) : void 0;
15966
16387
  results.push({
@@ -15971,7 +16392,7 @@ var FederationManager = class {
15971
16392
  repo: repo?.name ?? svc?.repo_root ?? "unknown"
15972
16393
  },
15973
16394
  clients,
15974
- riskLevel: riskLevel2,
16395
+ riskLevel: riskLevel3,
15975
16396
  summary: `${ep.method ?? "*"} ${ep.path} is called by ${clients.length} client(s) in ${uniqueRepos.size} repo(s)`
15976
16397
  });
15977
16398
  }
@@ -16467,6 +16888,34 @@ var hintGenerators = {
16467
16888
  }
16468
16889
  }
16469
16890
  return hints;
16891
+ },
16892
+ batch(r) {
16893
+ const hints = [];
16894
+ const results = arr(dig(r, "batch_results"));
16895
+ if (results.length > 0) {
16896
+ hints.push({ tool: "get_session_stats", args: {}, why: "Check token savings from this batch vs individual calls" });
16897
+ }
16898
+ return hints;
16899
+ },
16900
+ get_optimization_report(r) {
16901
+ const hints = [];
16902
+ const opts = arr(dig(r, "optimizations"));
16903
+ const hasRepeated = opts.some((o) => str(o?.rule) === "repeated-file-read");
16904
+ if (hasRepeated) {
16905
+ hints.push({ tool: "get_outline", args: { path: "<frequently_read_file>" }, why: "Use get_outline + get_symbol instead of repeated full-file reads" });
16906
+ }
16907
+ hints.push({ tool: "get_real_savings", args: { period: "today" }, why: "See actual per-file token savings breakdown" });
16908
+ return hints;
16909
+ },
16910
+ get_real_savings(r) {
16911
+ const hints = [];
16912
+ hints.push({ tool: "get_session_stats", args: {}, why: "See per-tool call counts and savings for this session" });
16913
+ return hints;
16914
+ },
16915
+ get_session_stats(r) {
16916
+ const hints = [];
16917
+ hints.push({ tool: "get_optimization_report", args: { period: "today" }, why: "Find specific waste patterns to fix" });
16918
+ return hints;
16470
16919
  }
16471
16920
  };
16472
16921
  function getHints(toolName, result, max = 3) {
@@ -18942,14 +19391,14 @@ function generatePrTemplate(input, affected, risk) {
18942
19391
  }
18943
19392
 
18944
19393
  // src/tools/co-changes.ts
18945
- import { execSync } from "child_process";
19394
+ import { execSync as execSync2 } from "child_process";
18946
19395
  function collectCoChanges(rootPath, windowDays = 180) {
18947
19396
  const sinceDate = /* @__PURE__ */ new Date();
18948
19397
  sinceDate.setDate(sinceDate.getDate() - windowDays);
18949
19398
  const since = sinceDate.toISOString().split("T")[0];
18950
19399
  let gitOutput;
18951
19400
  try {
18952
- gitOutput = execSync(
19401
+ gitOutput = execSync2(
18953
19402
  `git log --name-only --pretty=format:"COMMIT:%H:%aI" --since="${since}" --diff-filter=AMRD`,
18954
19403
  { cwd: rootPath, maxBuffer: 50 * 1024 * 1024, encoding: "utf-8", timeout: 3e4 }
18955
19404
  );
@@ -19061,12 +19510,12 @@ function getCoChanges(store, opts) {
19061
19510
  }
19062
19511
 
19063
19512
  // src/tools/changed-symbols.ts
19064
- import { execSync as execSync2 } from "child_process";
19513
+ import { execSync as execSync3 } from "child_process";
19065
19514
  function getChangedSymbols(store, rootPath, opts) {
19066
19515
  const until = opts.until ?? "HEAD";
19067
19516
  let diffNameStatus;
19068
19517
  try {
19069
- diffNameStatus = execSync2(
19518
+ diffNameStatus = execSync3(
19070
19519
  `git diff --name-status --diff-filter=AMRD ${opts.since}..${until}`,
19071
19520
  { cwd: rootPath, encoding: "utf-8", maxBuffer: 10 * 1024 * 1024, timeout: 15e3 }
19072
19521
  ).trim();
@@ -19092,7 +19541,7 @@ function getChangedSymbols(store, rootPath, opts) {
19092
19541
  }
19093
19542
  let diffUnified = "";
19094
19543
  try {
19095
- diffUnified = execSync2(
19544
+ diffUnified = execSync3(
19096
19545
  `git diff --unified=0 ${opts.since}..${until}`,
19097
19546
  { cwd: rootPath, encoding: "utf-8", maxBuffer: 50 * 1024 * 1024, timeout: 3e4 }
19098
19547
  );
@@ -19213,14 +19662,14 @@ function compareBranches(store, rootPath, opts) {
19213
19662
  const branch = opts.branch;
19214
19663
  let mergeBase;
19215
19664
  try {
19216
- mergeBase = execSync2(
19665
+ mergeBase = execSync3(
19217
19666
  `git merge-base ${base} ${branch}`,
19218
19667
  { cwd: rootPath, encoding: "utf-8", timeout: 1e4 }
19219
19668
  ).trim();
19220
19669
  } catch (e) {
19221
19670
  if (base === "main") {
19222
19671
  try {
19223
- mergeBase = execSync2(
19672
+ mergeBase = execSync3(
19224
19673
  `git merge-base master ${branch}`,
19225
19674
  { cwd: rootPath, encoding: "utf-8", timeout: 1e4 }
19226
19675
  ).trim();
@@ -19233,7 +19682,7 @@ function compareBranches(store, rootPath, opts) {
19233
19682
  }
19234
19683
  let commitCount = 0;
19235
19684
  try {
19236
- const countOutput = execSync2(
19685
+ const countOutput = execSync3(
19237
19686
  `git rev-list --count ${mergeBase}..${branch}`,
19238
19687
  { cwd: rootPath, encoding: "utf-8", timeout: 1e4 }
19239
19688
  ).trim();
@@ -19291,6 +19740,7 @@ var SAVINGS_PATH = path33.join(TRACE_MCP_HOME, "savings.json");
19291
19740
  var RAW_COST_ESTIMATES = {
19292
19741
  get_symbol: 800,
19293
19742
  search: 600,
19743
+ search_text: 3e3,
19294
19744
  get_outline: 1200,
19295
19745
  get_change_impact: 2e3,
19296
19746
  get_feature_context: 4e3,
@@ -20684,6 +21134,40 @@ function benchmarkCallGraph(store, symbols, count, rand) {
20684
21134
  });
20685
21135
  return buildScenario("call_graph", "Trace call graph (baseline: read all caller/callee files)", details);
20686
21136
  }
21137
+ function benchmarkTaskContext(store, symbols, files, count, rand) {
21138
+ const sampled = sample(symbols.filter((s) => s.kind === "function" || s.kind === "class"), count, rand);
21139
+ const details = sampled.map((s) => {
21140
+ const nodeRow = store.db.prepare(
21141
+ "SELECT n.id FROM nodes n JOIN symbols sym ON n.ref_id = sym.id AND n.node_type = ? WHERE sym.symbol_id = ?"
21142
+ ).get("symbol", s.symbol_id);
21143
+ let relatedFileBytes = 0;
21144
+ let relatedFileCount = 0;
21145
+ if (nodeRow) {
21146
+ const related = store.db.prepare(`
21147
+ SELECT DISTINCT f.byte_length FROM (
21148
+ SELECT source_node_id AS nid FROM edges WHERE target_node_id = ?
21149
+ UNION
21150
+ SELECT target_node_id AS nid FROM edges WHERE source_node_id = ?
21151
+ ) r
21152
+ JOIN nodes n2 ON r.nid = n2.id AND n2.node_type = 'symbol'
21153
+ JOIN symbols s2 ON n2.ref_id = s2.id
21154
+ JOIN files f ON s2.file_id = f.id
21155
+ LIMIT 10
21156
+ `).all(nodeRow.id, nodeRow.id);
21157
+ relatedFileBytes = related.reduce((sum, d) => sum + (d.byte_length || 0), 0);
21158
+ relatedFileCount = related.length;
21159
+ }
21160
+ const targetFileBytes = s.file_byte_length;
21161
+ const additionalFiles = Math.min(relatedFileCount, 5);
21162
+ const avgRelatedSize = relatedFileCount > 0 ? relatedFileBytes / relatedFileCount : s.file_byte_length;
21163
+ const totalReadBytes = targetFileBytes + additionalFiles * avgRelatedSize;
21164
+ const grepOverhead = 2 * 5 * 20 * 80;
21165
+ const bl = estimateTokens2(totalReadBytes + grepOverhead);
21166
+ const tm = estimateTokens2(Math.round(totalReadBytes * 0.08));
21167
+ return { query: `task: understand ${s.name}`, file: s.file_path, baseline_tokens: bl, trace_mcp_tokens: tm, reduction_pct: reductionPct(bl, tm) };
21168
+ });
21169
+ return buildScenario("composite_task", "NL task \u2192 optimal code context (baseline: search + read 5-8 files + grep)", details);
21170
+ }
20687
21171
  function runBenchmark(store, opts = {}) {
20688
21172
  const n = opts.queries ?? 10;
20689
21173
  const rand = seededRandom(opts.seed ?? 42);
@@ -20694,7 +21178,8 @@ function runBenchmark(store, opts = {}) {
20694
21178
  benchmarkFileExploration(files, n, rand),
20695
21179
  benchmarkSearch(symbols, n, rand),
20696
21180
  benchmarkImpactAnalysis(store, symbols, n, rand),
20697
- benchmarkCallGraph(store, symbols, n, rand)
21181
+ benchmarkCallGraph(store, symbols, n, rand),
21182
+ benchmarkTaskContext(store, symbols, files, n, rand)
20698
21183
  ];
20699
21184
  const totalQueries = scenarios.reduce((s, sc) => s + sc.queries, 0);
20700
21185
  const totalBaseline = scenarios.reduce((s, sc) => s + sc.baseline_tokens, 0);
@@ -21712,8 +22197,8 @@ function registerPrompts(server, ctx) {
21712
22197
  `);
21713
22198
  let changedFiles = [];
21714
22199
  try {
21715
- const { execSync: execSync3 } = await import("child_process");
21716
- const diff = execSync3(`git diff --name-only ${baseRef}...${branch}`, {
22200
+ const { execSync: execSync4 } = await import("child_process");
22201
+ const diff = execSync4(`git diff --name-only ${baseRef}...${branch}`, {
21717
22202
  cwd: projectRoot,
21718
22203
  encoding: "utf-8",
21719
22204
  timeout: 1e4
@@ -21934,8 +22419,8 @@ Analyze this project's architecture health. Identify the most critical issues an
21934
22419
  `);
21935
22420
  let changedFiles = [];
21936
22421
  try {
21937
- const { execSync: execSync3 } = await import("child_process");
21938
- const diff = execSync3(`git diff --name-only ${baseRef}...${branch}`, {
22422
+ const { execSync: execSync4 } = await import("child_process");
22423
+ const diff = execSync4(`git diff --name-only ${baseRef}...${branch}`, {
21939
22424
  cwd: projectRoot,
21940
22425
  encoding: "utf-8",
21941
22426
  timeout: 1e4
@@ -23072,7 +23557,7 @@ var FileWatcher = class {
23072
23557
  var require2 = createRequire(import.meta.url);
23073
23558
  var { version: PKG_VERSION } = require2("../package.json");
23074
23559
  function j2(value) {
23075
- return JSON.stringify(value);
23560
+ return JSON.stringify(value, (_key, val) => val === null || val === void 0 ? void 0 : val);
23076
23561
  }
23077
23562
  function extractResultCount(response) {
23078
23563
  if (response?.isError) return 0;
@@ -23149,6 +23634,15 @@ function createServer2(store, registry, config, rootPath) {
23149
23634
  "- State stores \u2192 `get_state_stores` (Zustand/Redux/Pinia)",
23150
23635
  "- Event graph \u2192 `get_event_graph` (event emitters/listeners)",
23151
23636
  "",
23637
+ "Token optimization (IMPORTANT \u2014 saves 40-85% tokens):",
23638
+ "- **Batch multiple queries** \u2192 `batch` combines up to 10 tool calls into 1 MCP request. Use whenever you need 2+ independent queries:",
23639
+ ' `batch({ calls: [{ tool: "get_outline", args: { path: "a.ts" } }, { tool: "get_outline", args: { path: "b.ts" } }] })`',
23640
+ "- **Bundle symbol + imports** \u2192 `get_context_bundle` returns a symbol's source + its import dependencies in one call (supports batch via `symbol_ids[]`)",
23641
+ "- **Avoid repeated file reads** \u2192 use `get_outline` once to understand structure, then `get_symbol` for specific symbols. NEVER read the same file multiple times.",
23642
+ "- **Use `get_task_context` instead of Agent subagents** \u2192 it returns focused context within a token budget, replacing manual search chains",
23643
+ "- Check token waste \u2192 `get_optimization_report` detects repeated reads, Bash grep, and unused trace-mcp tools",
23644
+ "- Track savings \u2192 `get_session_stats` shows per-tool token savings; `get_real_savings` shows actual vs achievable token usage",
23645
+ "",
23152
23646
  "WHEN TO USE native tools (Read/Grep/Glob):",
23153
23647
  "- Non-code files (.md, .json, .yaml, .toml, config) \u2192 Read/Grep",
23154
23648
  "- Reading a file before editing (Edit needs full content) \u2192 Read",
@@ -23171,6 +23665,7 @@ function createServer2(store, registry, config, rootPath) {
23171
23665
  }
23172
23666
  const _originalTool = server.tool.bind(server);
23173
23667
  const registeredToolNames = [];
23668
+ const toolHandlers = /* @__PURE__ */ new Map();
23174
23669
  const descriptionOverrides = config.tools?.descriptions ?? {};
23175
23670
  const sharedParamOverrides = typeof descriptionOverrides._shared === "object" && descriptionOverrides._shared !== null ? descriptionOverrides._shared : {};
23176
23671
  server.tool = ((...args) => {
@@ -23202,6 +23697,9 @@ function createServer2(store, registry, config, rootPath) {
23202
23697
  const cbIdx = args.length - 1;
23203
23698
  const originalCb = args[cbIdx];
23204
23699
  if (typeof originalCb === "function") {
23700
+ toolHandlers.set(name, async (params) => {
23701
+ return await originalCb(params);
23702
+ });
23205
23703
  args[cbIdx] = async (...cbArgs) => {
23206
23704
  savings.recordCall(name);
23207
23705
  const params = cbArgs[0] && typeof cbArgs[0] === "object" ? cbArgs[0] : {};
@@ -23238,10 +23736,12 @@ function createServer2(store, registry, config, rootPath) {
23238
23736
  process.on("SIGINT", flushSavings);
23239
23737
  process.on("SIGTERM", flushSavings);
23240
23738
  process.on("exit", flushSavings);
23739
+ let budgetWarningShown = false;
23241
23740
  function jh(toolName, value) {
23242
23741
  const hinted = withHints(toolName, value);
23243
23742
  const stats = savings.getSessionStats();
23244
- if (stats.total_calls >= 15) {
23743
+ if (stats.total_calls >= 15 && !budgetWarningShown) {
23744
+ budgetWarningShown = true;
23245
23745
  const obj = hinted !== null && typeof hinted === "object" && !Array.isArray(hinted) ? hinted : { data: hinted };
23246
23746
  obj._budget_warning = `${stats.total_calls} tool calls this session (~${stats.total_raw_tokens} raw tokens). Consider using get_task_context or get_feature_context for consolidated context instead of many small queries.`;
23247
23747
  return j2(obj);
@@ -23453,13 +23953,27 @@ function createServer2(store, registry, config, rootPath) {
23453
23953
  }));
23454
23954
  const response = { items, total: result.total, search_mode: result.search_mode };
23455
23955
  if (items.length === 0) {
23456
- const stats = store.getStats();
23457
- response.evidence = buildNegativeEvidence(
23458
- stats.totalFiles,
23459
- stats.totalSymbols,
23460
- result.search_mode === "fuzzy" || !!fuzzy,
23461
- "search"
23462
- );
23956
+ const textResult = searchText(store, projectRoot, {
23957
+ query,
23958
+ filePattern: file_pattern,
23959
+ language,
23960
+ maxResults: Math.min(limit ?? 20, 10),
23961
+ contextLines: 1
23962
+ });
23963
+ if (textResult.isOk() && textResult.value.matches.length > 0) {
23964
+ const tv = textResult.value;
23965
+ response.fallback_text_matches = tv.matches;
23966
+ response.fallback_total = tv.total_matches;
23967
+ response.search_mode = "symbol_miss_text_fallback";
23968
+ } else {
23969
+ const stats = store.getStats();
23970
+ response.evidence = buildNegativeEvidence(
23971
+ stats.totalFiles,
23972
+ stats.totalSymbols,
23973
+ result.search_mode === "fuzzy" || !!fuzzy,
23974
+ "search"
23975
+ );
23976
+ }
23463
23977
  }
23464
23978
  return { content: [{ type: "text", text: jh("search", response) }] };
23465
23979
  }
@@ -23482,23 +23996,25 @@ function createServer2(store, registry, config, rootPath) {
23482
23996
  );
23483
23997
  server.tool(
23484
23998
  "get_change_impact",
23485
- "Determine what depends on a file or symbol (reverse dependency analysis). Use before making changes to understand blast radius.",
23999
+ "Full change impact report: risk score + mitigations, breaking change detection, enriched dependents (complexity, coverage, exports), module groups, affected tests, co-change hidden couplings. Supports diff-aware mode via symbol_ids to scope analysis to only changed symbols.",
23486
24000
  {
23487
24001
  file_path: z4.string().max(512).optional().describe("Relative file path to analyze"),
23488
24002
  symbol_id: z4.string().max(512).optional().describe("Symbol ID to analyze"),
24003
+ symbol_ids: z4.array(z4.string().max(512)).max(50).optional().describe("Diff-aware: only analyze impact of these specific symbols (e.g. from get_changed_symbols)"),
23489
24004
  depth: z4.number().int().min(1).max(20).optional().describe("Max traversal depth (default 3)"),
23490
24005
  max_dependents: z4.number().int().min(1).max(5e3).optional().describe("Cap on returned dependents (default 200)")
23491
24006
  },
23492
- async ({ file_path, symbol_id, depth, max_dependents }) => {
24007
+ async ({ file_path, symbol_id, symbol_ids, depth, max_dependents }) => {
23493
24008
  if (file_path) {
23494
24009
  const blocked = guardPath(file_path);
23495
24010
  if (blocked) return blocked;
23496
24011
  }
23497
24012
  const result = getChangeImpact(
23498
24013
  store,
23499
- { filePath: file_path, symbolId: symbol_id },
24014
+ { filePath: file_path, symbolId: symbol_id, symbolIds: symbol_ids },
23500
24015
  depth ?? 3,
23501
- max_dependents ?? 200
24016
+ max_dependents ?? 200,
24017
+ projectRoot
23502
24018
  );
23503
24019
  if (result.isErr()) {
23504
24020
  return { content: [{ type: "text", text: j2(formatToolError(result.error)) }], isError: true };
@@ -24830,7 +25346,7 @@ function createServer2(store, registry, config, rootPath) {
24830
25346
  file_pattern: z4.string().max(512).optional().describe('Glob filter, e.g. "src/**/*.ts"'),
24831
25347
  language: z4.string().max(64).optional().describe('Filter by language (e.g. "typescript", "python")'),
24832
25348
  max_results: z4.number().int().min(1).max(200).optional().describe("Max matches to return (default 50)"),
24833
- context_lines: z4.number().int().min(0).max(10).optional().describe("Lines of context before/after each match (default 2)"),
25349
+ context_lines: z4.number().int().min(0).max(10).optional().describe("Lines of context before/after each match (default 0 \u2014 set higher if you need surrounding code)"),
24834
25350
  case_sensitive: z4.boolean().optional().describe("Case-sensitive search (default false)")
24835
25351
  },
24836
25352
  async ({ query, is_regex, file_pattern, language, max_results, context_lines, case_sensitive }) => {
@@ -25436,6 +25952,43 @@ function createServer2(store, registry, config, rootPath) {
25436
25952
  return { content: [{ type: "text", text: j2(summary) }] };
25437
25953
  }
25438
25954
  );
25955
+ _originalTool(
25956
+ "batch",
25957
+ "Execute multiple trace-mcp tools in a single MCP request. Returns results for all calls. Use to reduce round-trips when you need several independent queries (e.g., get_outline for 3 files, or search + get_symbol together).",
25958
+ {
25959
+ calls: z4.array(z4.object({
25960
+ tool: z4.string().describe('Tool name (e.g., "get_outline", "get_symbol", "search")'),
25961
+ args: z4.record(z4.unknown()).describe("Tool arguments")
25962
+ })).min(1).max(10).describe("Array of tool calls to execute (max 10)")
25963
+ },
25964
+ async ({ calls }) => {
25965
+ const results = [];
25966
+ for (const call of calls) {
25967
+ const handler = toolHandlers.get(call.tool);
25968
+ if (!handler) {
25969
+ results.push({ tool: call.tool, error: `Unknown tool: ${call.tool}` });
25970
+ continue;
25971
+ }
25972
+ try {
25973
+ savings.recordCall(call.tool);
25974
+ const response = await handler(call.args);
25975
+ const text = response.content?.[0]?.text;
25976
+ if (text) {
25977
+ try {
25978
+ results.push({ tool: call.tool, result: JSON.parse(text) });
25979
+ } catch {
25980
+ results.push({ tool: call.tool, result: text });
25981
+ }
25982
+ } else {
25983
+ results.push({ tool: call.tool, result: response });
25984
+ }
25985
+ } catch (e) {
25986
+ results.push({ tool: call.tool, error: e instanceof Error ? e.message : String(e) });
25987
+ }
25988
+ }
25989
+ return { content: [{ type: "text", text: j2({ batch_results: results, total: results.length }) }] };
25990
+ }
25991
+ );
25439
25992
  registerPrompts(server, { store, registry, config, projectRoot });
25440
25993
  return server;
25441
25994
  }