hanzo-mcp 0.9.0__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hanzo-mcp might be problematic. Click here for more details.

Files changed (135) hide show
  1. hanzo_mcp/__init__.py +1 -1
  2. hanzo_mcp/analytics/posthog_analytics.py +14 -1
  3. hanzo_mcp/cli.py +108 -4
  4. hanzo_mcp/server.py +11 -0
  5. hanzo_mcp/tools/__init__.py +3 -16
  6. hanzo_mcp/tools/agent/__init__.py +5 -0
  7. hanzo_mcp/tools/agent/agent.py +5 -0
  8. hanzo_mcp/tools/agent/agent_tool.py +3 -17
  9. hanzo_mcp/tools/agent/agent_tool_v1_deprecated.py +623 -0
  10. hanzo_mcp/tools/agent/clarification_tool.py +7 -1
  11. hanzo_mcp/tools/agent/claude_desktop_auth.py +16 -6
  12. hanzo_mcp/tools/agent/cli_agent_base.py +5 -0
  13. hanzo_mcp/tools/agent/cli_tools.py +26 -0
  14. hanzo_mcp/tools/agent/code_auth_tool.py +5 -0
  15. hanzo_mcp/tools/agent/critic_tool.py +7 -1
  16. hanzo_mcp/tools/agent/iching_tool.py +5 -0
  17. hanzo_mcp/tools/agent/network_tool.py +5 -0
  18. hanzo_mcp/tools/agent/review_tool.py +7 -1
  19. hanzo_mcp/tools/agent/swarm_alias.py +5 -0
  20. hanzo_mcp/tools/agent/swarm_tool.py +701 -0
  21. hanzo_mcp/tools/agent/swarm_tool_v1_deprecated.py +554 -0
  22. hanzo_mcp/tools/agent/unified_cli_tools.py +5 -0
  23. hanzo_mcp/tools/common/auto_timeout.py +234 -0
  24. hanzo_mcp/tools/common/base.py +4 -0
  25. hanzo_mcp/tools/common/batch_tool.py +5 -0
  26. hanzo_mcp/tools/common/config_tool.py +5 -0
  27. hanzo_mcp/tools/common/critic_tool.py +5 -0
  28. hanzo_mcp/tools/common/paginated_base.py +4 -0
  29. hanzo_mcp/tools/common/permissions.py +38 -12
  30. hanzo_mcp/tools/common/personality.py +673 -980
  31. hanzo_mcp/tools/common/stats.py +5 -0
  32. hanzo_mcp/tools/common/thinking_tool.py +5 -0
  33. hanzo_mcp/tools/common/timeout_parser.py +103 -0
  34. hanzo_mcp/tools/common/tool_disable.py +5 -0
  35. hanzo_mcp/tools/common/tool_enable.py +5 -0
  36. hanzo_mcp/tools/common/tool_list.py +5 -0
  37. hanzo_mcp/tools/config/config_tool.py +5 -0
  38. hanzo_mcp/tools/config/mode_tool.py +5 -0
  39. hanzo_mcp/tools/database/graph.py +5 -0
  40. hanzo_mcp/tools/database/graph_add.py +5 -0
  41. hanzo_mcp/tools/database/graph_query.py +5 -0
  42. hanzo_mcp/tools/database/graph_remove.py +5 -0
  43. hanzo_mcp/tools/database/graph_search.py +5 -0
  44. hanzo_mcp/tools/database/graph_stats.py +5 -0
  45. hanzo_mcp/tools/database/sql.py +5 -0
  46. hanzo_mcp/tools/database/sql_query.py +2 -0
  47. hanzo_mcp/tools/database/sql_search.py +5 -0
  48. hanzo_mcp/tools/database/sql_stats.py +5 -0
  49. hanzo_mcp/tools/editor/neovim_command.py +5 -0
  50. hanzo_mcp/tools/editor/neovim_edit.py +7 -2
  51. hanzo_mcp/tools/editor/neovim_session.py +5 -0
  52. hanzo_mcp/tools/filesystem/__init__.py +23 -26
  53. hanzo_mcp/tools/filesystem/ast_tool.py +2 -3
  54. hanzo_mcp/tools/filesystem/base.py +0 -16
  55. hanzo_mcp/tools/filesystem/batch_search.py +825 -0
  56. hanzo_mcp/tools/filesystem/content_replace.py +5 -3
  57. hanzo_mcp/tools/filesystem/diff.py +5 -0
  58. hanzo_mcp/tools/filesystem/directory_tree.py +34 -281
  59. hanzo_mcp/tools/filesystem/directory_tree_paginated.py +345 -0
  60. hanzo_mcp/tools/filesystem/edit.py +5 -4
  61. hanzo_mcp/tools/filesystem/find.py +177 -311
  62. hanzo_mcp/tools/filesystem/find_files.py +370 -0
  63. hanzo_mcp/tools/filesystem/git_search.py +5 -3
  64. hanzo_mcp/tools/filesystem/grep.py +454 -0
  65. hanzo_mcp/tools/filesystem/multi_edit.py +5 -4
  66. hanzo_mcp/tools/filesystem/read.py +11 -8
  67. hanzo_mcp/tools/filesystem/rules_tool.py +5 -3
  68. hanzo_mcp/tools/filesystem/search_tool.py +728 -0
  69. hanzo_mcp/tools/filesystem/symbols_tool.py +510 -0
  70. hanzo_mcp/tools/filesystem/tree.py +273 -0
  71. hanzo_mcp/tools/filesystem/watch.py +6 -1
  72. hanzo_mcp/tools/filesystem/write.py +12 -6
  73. hanzo_mcp/tools/jupyter/jupyter.py +30 -2
  74. hanzo_mcp/tools/jupyter/notebook_edit.py +298 -0
  75. hanzo_mcp/tools/jupyter/notebook_read.py +148 -0
  76. hanzo_mcp/tools/llm/consensus_tool.py +8 -6
  77. hanzo_mcp/tools/llm/llm_manage.py +5 -0
  78. hanzo_mcp/tools/llm/llm_tool.py +2 -0
  79. hanzo_mcp/tools/llm/llm_unified.py +5 -0
  80. hanzo_mcp/tools/llm/provider_tools.py +5 -0
  81. hanzo_mcp/tools/lsp/lsp_tool.py +475 -622
  82. hanzo_mcp/tools/mcp/mcp_add.py +7 -2
  83. hanzo_mcp/tools/mcp/mcp_remove.py +15 -2
  84. hanzo_mcp/tools/mcp/mcp_stats.py +5 -0
  85. hanzo_mcp/tools/mcp/mcp_tool.py +5 -0
  86. hanzo_mcp/tools/memory/knowledge_tools.py +14 -0
  87. hanzo_mcp/tools/memory/memory_tools.py +17 -0
  88. hanzo_mcp/tools/search/find_tool.py +5 -3
  89. hanzo_mcp/tools/search/unified_search.py +3 -1
  90. hanzo_mcp/tools/shell/__init__.py +2 -14
  91. hanzo_mcp/tools/shell/base_process.py +4 -2
  92. hanzo_mcp/tools/shell/bash_tool.py +2 -0
  93. hanzo_mcp/tools/shell/command_executor.py +7 -7
  94. hanzo_mcp/tools/shell/logs.py +5 -0
  95. hanzo_mcp/tools/shell/npx.py +5 -0
  96. hanzo_mcp/tools/shell/npx_background.py +5 -0
  97. hanzo_mcp/tools/shell/npx_tool.py +5 -0
  98. hanzo_mcp/tools/shell/open.py +5 -0
  99. hanzo_mcp/tools/shell/pkill.py +5 -0
  100. hanzo_mcp/tools/shell/process_tool.py +5 -0
  101. hanzo_mcp/tools/shell/processes.py +5 -0
  102. hanzo_mcp/tools/shell/run_background.py +5 -0
  103. hanzo_mcp/tools/shell/run_command.py +2 -0
  104. hanzo_mcp/tools/shell/run_command_windows.py +5 -0
  105. hanzo_mcp/tools/shell/streaming_command.py +5 -0
  106. hanzo_mcp/tools/shell/uvx.py +5 -0
  107. hanzo_mcp/tools/shell/uvx_background.py +5 -0
  108. hanzo_mcp/tools/shell/uvx_tool.py +5 -0
  109. hanzo_mcp/tools/shell/zsh_tool.py +3 -0
  110. hanzo_mcp/tools/todo/todo.py +5 -0
  111. hanzo_mcp/tools/todo/todo_read.py +142 -0
  112. hanzo_mcp/tools/todo/todo_write.py +367 -0
  113. hanzo_mcp/tools/vector/__init__.py +42 -95
  114. hanzo_mcp/tools/vector/index_tool.py +5 -0
  115. hanzo_mcp/tools/vector/vector.py +5 -0
  116. hanzo_mcp/tools/vector/vector_index.py +5 -0
  117. hanzo_mcp/tools/vector/vector_search.py +5 -0
  118. {hanzo_mcp-0.9.0.dist-info → hanzo_mcp-0.9.1.dist-info}/METADATA +1 -1
  119. hanzo_mcp-0.9.1.dist-info/RECORD +195 -0
  120. hanzo_mcp/tools/common/path_utils.py +0 -34
  121. hanzo_mcp/tools/compiler/__init__.py +0 -8
  122. hanzo_mcp/tools/compiler/sandboxed_compiler.py +0 -681
  123. hanzo_mcp/tools/environment/__init__.py +0 -8
  124. hanzo_mcp/tools/environment/environment_detector.py +0 -594
  125. hanzo_mcp/tools/filesystem/search.py +0 -1160
  126. hanzo_mcp/tools/framework/__init__.py +0 -8
  127. hanzo_mcp/tools/framework/framework_modes.py +0 -714
  128. hanzo_mcp/tools/memory/conversation_memory.py +0 -636
  129. hanzo_mcp/tools/shell/run_tool.py +0 -56
  130. hanzo_mcp/tools/vector/node_tool.py +0 -538
  131. hanzo_mcp/tools/vector/unified_vector.py +0 -384
  132. hanzo_mcp-0.9.0.dist-info/RECORD +0 -191
  133. {hanzo_mcp-0.9.0.dist-info → hanzo_mcp-0.9.1.dist-info}/WHEEL +0 -0
  134. {hanzo_mcp-0.9.0.dist-info → hanzo_mcp-0.9.1.dist-info}/entry_points.txt +0 -0
  135. {hanzo_mcp-0.9.0.dist-info → hanzo_mcp-0.9.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,825 @@
1
+ """Batch search tool that runs multiple search queries in parallel.
2
+
3
+ This tool allows running multiple searches of different types concurrently:
4
+ - grep: Fast pattern/regex search
5
+ - grep_ast: AST-aware code search
6
+ - vector_search: Semantic similarity search
7
+ - git_search: Search through git history
8
+
9
+ Results are combined and ranked for comprehensive search coverage.
10
+ Perfect for complex research and refactoring tasks where you need
11
+ to find all occurrences across different dimensions.
12
+ """
13
+
14
+ import re
15
+ import asyncio
16
+ from enum import Enum
17
+ from typing import Any, Dict, List, Tuple, Optional
18
+ from pathlib import Path
19
+ from dataclasses import asdict, dataclass
20
+ from typing_extensions import Unpack, Annotated, TypedDict, final, override
21
+
22
+ from pydantic import Field
23
+ from mcp.server import FastMCP
24
+
25
+ from hanzo_mcp.tools.common.auto_timeout import auto_timeout
26
+ from mcp.server.fastmcp import Context as MCPContext
27
+
28
+ from hanzo_mcp.tools.filesystem.base import FilesystemBaseTool
29
+ from hanzo_mcp.tools.filesystem.grep import Grep
30
+ from hanzo_mcp.tools.common.permissions import PermissionManager
31
+ from hanzo_mcp.tools.filesystem.ast_tool import ASTTool
32
+ from hanzo_mcp.tools.vector.ast_analyzer import Symbol, ASTAnalyzer
33
+ from hanzo_mcp.tools.vector.vector_search import VectorSearchTool
34
+ from hanzo_mcp.tools.filesystem.git_search import GitSearchTool
35
+ from hanzo_mcp.tools.vector.project_manager import ProjectVectorManager
36
+
37
+
38
+ class SearchType(Enum):
39
+ """Types of searches that can be performed."""
40
+
41
+ GREP = "grep"
42
+ VECTOR = "vector"
43
+ AST = "ast"
44
+ SYMBOL = "symbol"
45
+ GIT = "git"
46
+
47
+
48
+ @dataclass
49
+ class SearchResult:
50
+ """Search result combining different search types."""
51
+
52
+ file_path: str
53
+ line_number: Optional[int]
54
+ content: str
55
+ search_type: SearchType
56
+ score: float # Relevance score (0-1)
57
+ context: Optional[str] = None # AST/function context
58
+ symbol_info: Optional[Symbol] = None
59
+ project: Optional[str] = None
60
+
61
+ def to_dict(self) -> Dict[str, Any]:
62
+ """Convert to dictionary for JSON serialization."""
63
+ result = asdict(self)
64
+ result["search_type"] = self.search_type.value
65
+ if self.symbol_info:
66
+ result["symbol_info"] = asdict(self.symbol_info)
67
+ return result
68
+
69
+
70
+ @dataclass
71
+ class BatchSearchResults:
72
+ """Container for all batch search results."""
73
+
74
+ query: str
75
+ total_results: int
76
+ results_by_type: Dict[SearchType, List[SearchResult]]
77
+ combined_results: List[SearchResult]
78
+ search_time_ms: float
79
+
80
+ def to_dict(self) -> Dict[str, Any]:
81
+ """Convert to dictionary for JSON serialization."""
82
+ return {
83
+ "query": self.query,
84
+ "total_results": self.total_results,
85
+ "results_by_type": {k.value: [r.to_dict() for r in v] for k, v in self.results_by_type.items()},
86
+ "combined_results": [r.to_dict() for r in self.combined_results],
87
+ "search_time_ms": self.search_time_ms,
88
+ }
89
+
90
+
91
+ Queries = Annotated[
92
+ List[Dict[str, Any]],
93
+ Field(description="List of search queries with types", min_length=1),
94
+ ]
95
+ SearchPath = Annotated[str, Field(description="Path to search in", default=".")]
96
+ Include = Annotated[str, Field(description="File pattern to include", default="*")]
97
+ MaxResults = Annotated[int, Field(description="Maximum results per query", default=20)]
98
+ IncludeContext = Annotated[bool, Field(description="Include function/method context", default=True)]
99
+ CombineResults = Annotated[bool, Field(description="Combine and deduplicate results", default=True)]
100
+
101
+
102
+ class BatchSearchParams(TypedDict):
103
+ """Parameters for batch search.
104
+
105
+ queries format: [
106
+ {"type": "grep", "pattern": "TODO"},
107
+ {"type": "vector_search", "query": "error handling"},
108
+ {"type": "grep_ast", "pattern": "def.*test"},
109
+ {"type": "git_search", "pattern": "bug fix", "search_type": "commits"}
110
+ ]
111
+ """
112
+
113
+ queries: Queries
114
+ path: SearchPath
115
+ include: Include
116
+ max_results: MaxResults
117
+ include_context: IncludeContext
118
+ combine_results: CombineResults
119
+
120
+
121
+ @final
122
+ class BatchSearchTool(FilesystemBaseTool):
123
+ """Search tool combining multiple search strategies."""
124
+
125
+ def __init__(
126
+ self,
127
+ permission_manager: PermissionManager,
128
+ project_manager: Optional[ProjectVectorManager] = None,
129
+ ):
130
+ """Initialize the search tool."""
131
+ super().__init__(permission_manager)
132
+ self.project_manager = project_manager
133
+
134
+ # Initialize component search tools
135
+ self.grep_tool = Grep(permission_manager)
136
+ self.grep_ast_tool = ASTTool(permission_manager)
137
+ self.git_search_tool = GitSearchTool(permission_manager)
138
+ self.ast_analyzer = ASTAnalyzer()
139
+
140
+ # Vector search is optional
141
+ self.vector_tool = None
142
+ if project_manager:
143
+ self.vector_tool = VectorSearchTool(permission_manager, project_manager)
144
+
145
+ # Cache for AST analysis results
146
+ self._ast_cache: Dict[str, Any] = {}
147
+ self._symbol_cache: Dict[str, List[Symbol]] = {}
148
+
149
+ @property
150
+ @override
151
+ def name(self) -> str:
152
+ """Get the tool name."""
153
+ return "batch_search"
154
+
155
+ @property
156
+ @override
157
+ def description(self) -> str:
158
+ """Get the tool description."""
159
+ return """Run multiple search queries in parallel across different search types.
160
+
161
+ Supports running concurrent searches:
162
+ - Multiple grep patterns
163
+ - Multiple vector queries
164
+ - Multiple AST searches
165
+ - Combined with git history search
166
+
167
+ Examples:
168
+ - Search for 'config' in code + 'configuration' in docs + 'CONFIG' in constants
169
+ - Find all references to a function across code, comments, and git history
170
+ - Search for concept across different naming conventions
171
+
172
+ Results are intelligently combined, deduplicated, and ranked by relevance.
173
+ Perfect for comprehensive code analysis and refactoring tasks."""
174
+
175
+ def _detect_search_intent(self, pattern: str) -> Tuple[bool, bool, bool]:
176
+ """Analyze pattern to determine which search types to enable.
177
+
178
+ Returns:
179
+ Tuple of (should_use_vector, should_use_ast, should_use_symbol)
180
+ """
181
+ # Default to all enabled
182
+ use_vector = True
183
+ use_ast = True
184
+ use_symbol = True
185
+
186
+ # If pattern looks like regex, focus on text search
187
+ regex_indicators = [
188
+ ".*",
189
+ "\\w",
190
+ "\\d",
191
+ "\\s",
192
+ "[",
193
+ "]",
194
+ "(",
195
+ ")",
196
+ "|",
197
+ "^",
198
+ "$",
199
+ ]
200
+ if any(indicator in pattern for indicator in regex_indicators):
201
+ use_vector = False # Regex patterns don't work well with vector search
202
+
203
+ # If pattern looks like a function/class name, prioritize symbol search
204
+ if re.match(r"^[a-zA-Z_][a-zA-Z0-9_]*$", pattern):
205
+ use_symbol = True
206
+ use_ast = True
207
+
208
+ # If pattern contains natural language, prioritize vector search
209
+ words = pattern.split()
210
+ if len(words) > 2 and not any(c in pattern for c in ["(", ")", "{", "}", "[", "]"]):
211
+ use_vector = True
212
+
213
+ return use_vector, use_ast, use_symbol
214
+
215
+ async def _run_grep_search(
216
+ self, pattern: str, path: str, include: str, tool_ctx, max_results: int
217
+ ) -> List[SearchResult]:
218
+ """Run grep search and convert results."""
219
+ await tool_ctx.info(f"Running grep search for: {pattern}")
220
+
221
+ try:
222
+ # Use the existing grep tool
223
+ grep_result = await self.grep_tool.call(tool_ctx.mcp_context, pattern=pattern, path=path, include=include)
224
+
225
+ results = []
226
+ if "Found" in grep_result and "matches" in grep_result:
227
+ # Parse grep results
228
+ lines = grep_result.split("\n")
229
+ for line in lines[2:]: # Skip header lines
230
+ if ":" in line and len(line.strip()) > 0:
231
+ try:
232
+ parts = line.split(":", 2)
233
+ if len(parts) >= 3:
234
+ file_path = parts[0]
235
+ line_num = int(parts[1])
236
+ content = parts[2].strip()
237
+
238
+ result = SearchResult(
239
+ file_path=file_path,
240
+ line_number=line_num,
241
+ content=content,
242
+ search_type=SearchType.GREP,
243
+ score=1.0, # Grep results are exact matches
244
+ )
245
+ results.append(result)
246
+
247
+ if len(results) >= max_results:
248
+ break
249
+ except (ValueError, IndexError):
250
+ continue
251
+
252
+ await tool_ctx.info(f"Grep search found {len(results)} results")
253
+ return results
254
+
255
+ except Exception as e:
256
+ await tool_ctx.error(f"Grep search failed: {str(e)}")
257
+ return []
258
+
259
+ async def _run_vector_search(self, pattern: str, path: str, tool_ctx, max_results: int) -> List[SearchResult]:
260
+ """Run vector search and convert results."""
261
+ if not self.vector_tool:
262
+ return []
263
+
264
+ await tool_ctx.info(f"Running vector search for: {pattern}")
265
+
266
+ try:
267
+ # Determine search scope based on path
268
+ if path == ".":
269
+ search_scope = "current"
270
+ else:
271
+ search_scope = "all" # Could be enhanced to detect project
272
+
273
+ vector_result = await self.vector_tool.call(
274
+ tool_ctx.mcp_context,
275
+ query=pattern,
276
+ limit=max_results,
277
+ score_threshold=0.3,
278
+ search_scope=search_scope,
279
+ include_content=True,
280
+ )
281
+
282
+ results = []
283
+ # Parse vector search results - this would need to be enhanced
284
+ # based on the actual format returned by vector_tool
285
+ if "Found" in vector_result:
286
+ # This is a simplified parser - would need to match actual format
287
+ lines = vector_result.split("\n")
288
+ current_file = None
289
+ current_score = 0.0
290
+
291
+ for line in lines:
292
+ if "Result" in line and "Score:" in line:
293
+ # Extract score
294
+ score_match = re.search(r"Score: ([\d.]+)%", line)
295
+ if score_match:
296
+ current_score = float(score_match.group(1)) / 100.0
297
+
298
+ # Extract file path
299
+ if " - " in line:
300
+ parts = line.split(" - ")
301
+ if len(parts) > 1:
302
+ current_file = parts[-1].strip()
303
+
304
+ elif current_file and line.strip() and not line.startswith("-"):
305
+ # This is content
306
+ result = SearchResult(
307
+ file_path=current_file,
308
+ line_number=None,
309
+ content=line.strip(),
310
+ search_type=SearchType.VECTOR,
311
+ score=current_score,
312
+ )
313
+ results.append(result)
314
+
315
+ if len(results) >= max_results:
316
+ break
317
+
318
+ await tool_ctx.info(f"Vector search found {len(results)} results")
319
+ return results
320
+
321
+ except Exception as e:
322
+ await tool_ctx.error(f"Vector search failed: {str(e)}")
323
+ return []
324
+
325
+ async def _run_ast_search(
326
+ self, pattern: str, path: str, include: str, tool_ctx, max_results: int
327
+ ) -> List[SearchResult]:
328
+ """Run AST-aware search and convert results."""
329
+ await tool_ctx.info(f"Running AST search for: {pattern}")
330
+
331
+ try:
332
+ ast_result = await self.grep_ast_tool.call(
333
+ tool_ctx.mcp_context,
334
+ pattern=pattern,
335
+ path=path,
336
+ ignore_case=False,
337
+ line_number=True,
338
+ )
339
+
340
+ results = []
341
+ if ast_result and not ast_result.startswith("No matches"):
342
+ # Parse AST results - they include structural context
343
+ current_file = None
344
+ context_lines = []
345
+
346
+ for line in ast_result.split("\n"):
347
+ if line.endswith(":") and "/" in line:
348
+ # This is a file header
349
+ current_file = line[:-1]
350
+ context_lines = []
351
+ elif current_file and line.strip():
352
+ if ":" in line and line.strip()[0].isdigit():
353
+ # This looks like a line with number
354
+ try:
355
+ parts = line.split(":", 1)
356
+ line_num = int(parts[0].strip())
357
+ content = parts[1].strip() if len(parts) > 1 else ""
358
+
359
+ result = SearchResult(
360
+ file_path=current_file,
361
+ line_number=line_num,
362
+ content=content,
363
+ search_type=SearchType.AST,
364
+ score=0.9, # High score for AST matches
365
+ context=("\n".join(context_lines) if context_lines else None),
366
+ )
367
+ results.append(result)
368
+
369
+ if len(results) >= max_results:
370
+ break
371
+
372
+ except ValueError:
373
+ context_lines.append(line)
374
+ else:
375
+ context_lines.append(line)
376
+
377
+ await tool_ctx.info(f"AST search found {len(results)} results")
378
+ return results
379
+
380
+ except Exception as e:
381
+ await tool_ctx.error(f"AST search failed: {str(e)}")
382
+ return []
383
+
384
+ async def _run_symbol_search(self, pattern: str, path: str, tool_ctx, max_results: int) -> List[SearchResult]:
385
+ """Run symbol search using AST analysis."""
386
+ await tool_ctx.info(f"Running symbol search for: {pattern}")
387
+
388
+ try:
389
+ results = []
390
+ path_obj = Path(path)
391
+
392
+ # Find files to analyze
393
+ files_to_check = []
394
+ if path_obj.is_file():
395
+ files_to_check.append(str(path_obj))
396
+ elif path_obj.is_dir():
397
+ # Look for source files
398
+ for ext in [".py", ".js", ".ts", ".java", ".cpp", ".c"]:
399
+ files_to_check.extend(path_obj.rglob(f"*{ext}"))
400
+ files_to_check = [str(f) for f in files_to_check[:50]] # Limit for performance
401
+
402
+ # Analyze files for symbols
403
+ for file_path in files_to_check:
404
+ if not self.is_path_allowed(file_path):
405
+ continue
406
+
407
+ # Check cache first
408
+ if file_path in self._symbol_cache:
409
+ symbols = self._symbol_cache[file_path]
410
+ else:
411
+ # Analyze file
412
+ file_ast = self.ast_analyzer.analyze_file(file_path)
413
+ symbols = file_ast.symbols if file_ast else []
414
+ self._symbol_cache[file_path] = symbols
415
+
416
+ # Search symbols
417
+ for symbol in symbols:
418
+ if re.search(pattern, symbol.name, re.IGNORECASE):
419
+ result = SearchResult(
420
+ file_path=symbol.file_path,
421
+ line_number=symbol.line_start,
422
+ content=f"{symbol.type} {symbol.name}"
423
+ + (f" - {symbol.docstring[:100]}..." if symbol.docstring else ""),
424
+ search_type=SearchType.SYMBOL,
425
+ score=0.95, # Very high score for symbol matches
426
+ symbol_info=symbol,
427
+ context=symbol.signature,
428
+ )
429
+ results.append(result)
430
+
431
+ if len(results) >= max_results:
432
+ break
433
+
434
+ if len(results) >= max_results:
435
+ break
436
+
437
+ await tool_ctx.info(f"Symbol search found {len(results)} results")
438
+ return results
439
+
440
+ except Exception as e:
441
+ await tool_ctx.error(f"Symbol search failed: {str(e)}")
442
+ return []
443
+
444
+ async def _add_function_context(self, results: List[SearchResult], tool_ctx) -> List[SearchResult]:
445
+ """Add function/method context to results where relevant."""
446
+ enhanced_results = []
447
+
448
+ for result in results:
449
+ enhanced_result = result
450
+
451
+ if result.line_number and not result.context:
452
+ try:
453
+ # Read the file and find surrounding function
454
+ file_path = Path(result.file_path)
455
+ if file_path.exists() and self.is_path_allowed(str(file_path)):
456
+ # Check if we have AST analysis cached
457
+ if str(file_path) not in self._ast_cache:
458
+ file_ast = self.ast_analyzer.analyze_file(str(file_path))
459
+ self._ast_cache[str(file_path)] = file_ast
460
+ else:
461
+ file_ast = self._ast_cache[str(file_path)]
462
+
463
+ if file_ast:
464
+ # Find symbol containing this line
465
+ for symbol in file_ast.symbols:
466
+ if symbol.line_start <= result.line_number <= symbol.line_end and symbol.type in [
467
+ "function",
468
+ "method",
469
+ ]:
470
+ enhanced_result = SearchResult(
471
+ file_path=result.file_path,
472
+ line_number=result.line_number,
473
+ content=result.content,
474
+ search_type=result.search_type,
475
+ score=result.score,
476
+ context=f"In {symbol.type} {symbol.name}(): {symbol.signature or ''}",
477
+ symbol_info=symbol,
478
+ project=result.project,
479
+ )
480
+ break
481
+ except Exception as e:
482
+ await tool_ctx.warning(f"Could not add context for {result.file_path}: {str(e)}")
483
+
484
+ enhanced_results.append(enhanced_result)
485
+
486
+ return enhanced_results
487
+
488
+ def _combine_and_rank_results(self, results_by_type: Dict[SearchType, List[SearchResult]]) -> List[SearchResult]:
489
+ """Combine results from different search types and rank by relevance."""
490
+ all_results = []
491
+ seen_combinations = set()
492
+
493
+ # Combine all results, avoiding duplicates
494
+ for _search_type, results in results_by_type.items():
495
+ for result in results:
496
+ # Create a key to identify duplicates
497
+ key = (result.file_path, result.line_number)
498
+
499
+ if key not in seen_combinations:
500
+ seen_combinations.add(key)
501
+ all_results.append(result)
502
+ else:
503
+ # Merge with existing result based on score and type priority
504
+ type_priority = {
505
+ SearchType.SYMBOL: 4,
506
+ SearchType.GREP: 3,
507
+ SearchType.AST: 2,
508
+ SearchType.VECTOR: 1,
509
+ }
510
+
511
+ for existing in all_results:
512
+ existing_key = (existing.file_path, existing.line_number)
513
+ if existing_key == key:
514
+ # Update if the new result has higher priority or better score
515
+ result_priority = type_priority[result.search_type]
516
+ existing_priority = type_priority[existing.search_type]
517
+
518
+ # Replace existing if: higher priority type, or same priority but higher score
519
+ if result_priority > existing_priority or (
520
+ result_priority == existing_priority and result.score > existing.score
521
+ ):
522
+ # Replace the entire result to preserve type
523
+ idx = all_results.index(existing)
524
+ all_results[idx] = result
525
+ else:
526
+ # Still merge useful information
527
+ existing.context = existing.context or result.context
528
+ existing.symbol_info = existing.symbol_info or result.symbol_info
529
+ break
530
+
531
+ # Sort by score (descending) then by search type priority
532
+ type_priority = {
533
+ SearchType.SYMBOL: 4,
534
+ SearchType.GREP: 3,
535
+ SearchType.AST: 2,
536
+ SearchType.VECTOR: 1,
537
+ }
538
+
539
+ all_results.sort(key=lambda r: (r.score, type_priority[r.search_type]), reverse=True)
540
+
541
+ return all_results
542
+
543
+ @override
544
+ @auto_timeout("batch_search")
545
+
546
+
547
+ async def call(self, ctx: MCPContext, **params: Unpack[BatchSearchParams]) -> str:
548
+ """Execute batch search with multiple queries in parallel."""
549
+ import time
550
+
551
+ start_time = time.time()
552
+
553
+ tool_ctx = self.create_tool_context(ctx)
554
+
555
+ # Extract parameters
556
+ queries = params["queries"]
557
+ path = params.get("path", ".")
558
+ include = params.get("include", "*")
559
+ max_results = params.get("max_results", 20)
560
+ include_context = params.get("include_context", True)
561
+ combine_results = params.get("combine_results", True)
562
+
563
+ # Validate path
564
+ path_validation = self.validate_path(path)
565
+ if path_validation.is_error:
566
+ await tool_ctx.error(path_validation.error_message)
567
+ return f"Error: {path_validation.error_message}"
568
+
569
+ # Check path permissions and existence
570
+ allowed, error_msg = await self.check_path_allowed(path, tool_ctx)
571
+ if not allowed:
572
+ return error_msg
573
+
574
+ exists, error_msg = await self.check_path_exists(path, tool_ctx)
575
+ if not exists:
576
+ return error_msg
577
+
578
+ await tool_ctx.info(f"Starting batch search with {len(queries)} queries in {path}")
579
+
580
+ # Run all queries in parallel
581
+ search_tasks = []
582
+ query_info = [] # Track query info for results
583
+
584
+ for query in queries:
585
+ query_type = query.get("type", "grep")
586
+ query_info.append(query)
587
+
588
+ if query_type == "grep":
589
+ pattern = query.get("pattern")
590
+ if pattern:
591
+ search_tasks.append(self._run_grep_search(pattern, path, include, tool_ctx, max_results))
592
+
593
+ elif query_type == "grep_ast":
594
+ pattern = query.get("pattern")
595
+ if pattern:
596
+ search_tasks.append(self._run_ast_search(pattern, path, include, tool_ctx, max_results))
597
+
598
+ elif query_type == "vector_search" and self.vector_tool:
599
+ search_query = query.get("query") or query.get("pattern")
600
+ if search_query:
601
+ search_tasks.append(self._run_vector_search(search_query, path, tool_ctx, max_results))
602
+
603
+ elif query_type == "git_search":
604
+ pattern = query.get("pattern")
605
+ search_type = query.get("search_type", "content")
606
+ if pattern:
607
+ search_tasks.append(self._run_git_search(pattern, path, search_type, tool_ctx, max_results))
608
+
609
+ else:
610
+ await tool_ctx.warning(f"Unknown or unavailable search type: {query_type}")
611
+
612
+ # Execute all searches in parallel
613
+ search_results = await asyncio.gather(*search_tasks, return_exceptions=True)
614
+
615
+ # Collect all results
616
+ all_results = []
617
+ results_by_query = {}
618
+
619
+ for i, (query, result) in enumerate(zip(query_info, search_results)):
620
+ if isinstance(result, Exception):
621
+ await tool_ctx.error(f"Query {i + 1} failed: {str(result)}")
622
+ results_by_query[i] = []
623
+ else:
624
+ results_by_query[i] = result
625
+ all_results.extend(result)
626
+
627
+ # Combine and deduplicate results if requested
628
+ if combine_results:
629
+ combined_results = self._combine_results(all_results)
630
+ else:
631
+ combined_results = all_results
632
+
633
+ # Add context if requested
634
+ if include_context:
635
+ combined_results = await self._add_context_to_results(combined_results, tool_ctx)
636
+
637
+ end_time = time.time()
638
+ search_time_ms = (end_time - start_time) * 1000
639
+
640
+ # Sort by relevance score
641
+ combined_results.sort(key=lambda r: r.score, reverse=True)
642
+
643
+ # Limit total results
644
+ combined_results = combined_results[: max_results * 2] # Allow more when combining
645
+
646
+ # Create batch results object
647
+ batch_results = BatchSearchResults(
648
+ query=f"Batch search with {len(queries)} queries",
649
+ total_results=len(combined_results),
650
+ results_by_type={
651
+ SearchType.GREP: [r for r in combined_results if r.search_type == SearchType.GREP],
652
+ SearchType.VECTOR: [r for r in combined_results if r.search_type == SearchType.VECTOR],
653
+ SearchType.AST: [r for r in combined_results if r.search_type == SearchType.AST],
654
+ SearchType.GIT: [r for r in combined_results if r.search_type == SearchType.GIT],
655
+ },
656
+ combined_results=combined_results,
657
+ search_time_ms=search_time_ms,
658
+ )
659
+
660
+ # Format output
661
+ return self._format_batch_results(batch_results, query_info)
662
+
663
+ async def _run_git_search(
664
+ self, pattern: str, path: str, search_type: str, tool_ctx, max_results: int
665
+ ) -> List[SearchResult]:
666
+ """Run git search and convert results."""
667
+ await tool_ctx.info(f"Running git search for: {pattern} (type: {search_type})")
668
+
669
+ try:
670
+ # Use the git search tool
671
+ git_result = await self.git_search_tool.call(
672
+ tool_ctx.mcp_context,
673
+ pattern=pattern,
674
+ path=path,
675
+ search_type=search_type,
676
+ max_count=max_results,
677
+ )
678
+
679
+ results = []
680
+ if "Found" in git_result:
681
+ # Parse git search results - simplified parser
682
+ lines = git_result.split("\n")
683
+ current_file = None
684
+
685
+ for line in lines:
686
+ if line.strip():
687
+ # Extract file path and content
688
+ if ":" in line:
689
+ parts = line.split(":", 2)
690
+ if len(parts) >= 2:
691
+ file_path = parts[0].strip()
692
+ content = parts[-1].strip() if len(parts) > 2 else line
693
+
694
+ result = SearchResult(
695
+ file_path=file_path,
696
+ line_number=None,
697
+ content=content,
698
+ search_type=SearchType.GIT,
699
+ score=0.8, # Git results are relevant
700
+ )
701
+ results.append(result)
702
+
703
+ if len(results) >= max_results:
704
+ break
705
+
706
+ await tool_ctx.info(f"Git search found {len(results)} results")
707
+ return results
708
+
709
+ except Exception as e:
710
+ await tool_ctx.error(f"Git search failed: {str(e)}")
711
+ return []
712
+
713
+ def _combine_results(self, results: List[SearchResult]) -> List[SearchResult]:
714
+ """Combine and deduplicate search results."""
715
+ # Use file path and line number as key for deduplication
716
+ seen = {}
717
+ combined = []
718
+
719
+ for result in results:
720
+ key = (result.file_path, result.line_number)
721
+
722
+ if key not in seen:
723
+ seen[key] = result
724
+ combined.append(result)
725
+ else:
726
+ # If we've seen this location, keep the one with higher score
727
+ existing = seen[key]
728
+ if result.score > existing.score:
729
+ # Replace with higher scored result
730
+ idx = combined.index(existing)
731
+ combined[idx] = result
732
+ seen[key] = result
733
+
734
+ return combined
735
+
736
+ async def _add_context_to_results(self, results: List[SearchResult], tool_ctx) -> List[SearchResult]:
737
+ """Add function/method context to results."""
738
+ # This is a simplified version - you could enhance with full AST context
739
+ return await self._add_function_context(results, tool_ctx)
740
+
741
+ def _format_batch_results(self, results: BatchSearchResults, query_info: List[Dict]) -> str:
742
+ """Format batch search results for display."""
743
+ output = []
744
+
745
+ # Header
746
+ output.append(f"=== Batch Search Results ===")
747
+ output.append(f"Queries: {len(query_info)}")
748
+ output.append(f"Total results: {results.total_results}")
749
+ output.append(f"Search time: {results.search_time_ms:.1f}ms\n")
750
+
751
+ # Summary by type
752
+ output.append("Results by type:")
753
+ for search_type, type_results in results.results_by_type.items():
754
+ if type_results:
755
+ output.append(f" {search_type.value}: {len(type_results)} results")
756
+ output.append("")
757
+
758
+ # Query summary
759
+ output.append("Queries executed:")
760
+ for i, query in enumerate(query_info):
761
+ query_type = query.get("type", "grep")
762
+ pattern = query.get("pattern") or query.get("query", "")
763
+ output.append(f" {i + 1}. {query_type}: {pattern}")
764
+ output.append("")
765
+
766
+ # Results
767
+ if results.combined_results:
768
+ output.append("=== Top Results ===\n")
769
+
770
+ # Group by file
771
+ results_by_file = {}
772
+ for result in results.combined_results[:50]: # Limit display
773
+ if result.file_path not in results_by_file:
774
+ results_by_file[result.file_path] = []
775
+ results_by_file[result.file_path].append(result)
776
+
777
+ # Display results by file
778
+ for file_path, file_results in results_by_file.items():
779
+ output.append(f"{file_path}")
780
+ output.append("-" * len(file_path))
781
+
782
+ # Sort by line number if available
783
+ file_results.sort(key=lambda r: r.line_number or 0)
784
+
785
+ for result in file_results:
786
+ score_str = f"[{result.search_type.value} {result.score:.2f}]"
787
+
788
+ if result.line_number:
789
+ output.append(f" {result.line_number:>4}: {score_str} {result.content}")
790
+ else:
791
+ output.append(f" {score_str} {result.content}")
792
+
793
+ if result.context:
794
+ output.append(f" Context: {result.context}")
795
+
796
+ output.append("")
797
+ else:
798
+ output.append("No results found.")
799
+
800
+ return "\n".join(output)
801
+
802
+ @override
803
+ def register(self, mcp_server: FastMCP) -> None:
804
+ """Register the batch search tool with the MCP server."""
805
+ tool_self = self
806
+
807
+ @mcp_server.tool(name=self.name, description=self.description)
808
+ async def batch_search(
809
+ ctx: MCPContext,
810
+ queries: Queries,
811
+ path: SearchPath = ".",
812
+ include: Include = "*",
813
+ max_results: MaxResults = 20,
814
+ include_context: IncludeContext = True,
815
+ combine_results: CombineResults = True,
816
+ ) -> str:
817
+ return await tool_self.call(
818
+ ctx,
819
+ queries=queries,
820
+ path=path,
821
+ include=include,
822
+ max_results=max_results,
823
+ include_context=include_context,
824
+ combine_results=combine_results,
825
+ )