hanzo-mcp 0.3.8__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hanzo-mcp might be problematic. Click here for more details.

Files changed (87) hide show
  1. hanzo_mcp/__init__.py +1 -1
  2. hanzo_mcp/cli.py +118 -170
  3. hanzo_mcp/cli_enhanced.py +438 -0
  4. hanzo_mcp/config/__init__.py +19 -0
  5. hanzo_mcp/config/settings.py +388 -0
  6. hanzo_mcp/config/tool_config.py +197 -0
  7. hanzo_mcp/prompts/__init__.py +117 -0
  8. hanzo_mcp/prompts/compact_conversation.py +77 -0
  9. hanzo_mcp/prompts/create_release.py +38 -0
  10. hanzo_mcp/prompts/project_system.py +120 -0
  11. hanzo_mcp/prompts/project_todo_reminder.py +111 -0
  12. hanzo_mcp/prompts/utils.py +286 -0
  13. hanzo_mcp/server.py +117 -99
  14. hanzo_mcp/tools/__init__.py +105 -32
  15. hanzo_mcp/tools/agent/__init__.py +8 -11
  16. hanzo_mcp/tools/agent/agent_tool.py +290 -224
  17. hanzo_mcp/tools/agent/prompt.py +16 -13
  18. hanzo_mcp/tools/agent/tool_adapter.py +9 -9
  19. hanzo_mcp/tools/common/__init__.py +17 -16
  20. hanzo_mcp/tools/common/base.py +79 -110
  21. hanzo_mcp/tools/common/batch_tool.py +330 -0
  22. hanzo_mcp/tools/common/context.py +26 -292
  23. hanzo_mcp/tools/common/permissions.py +12 -12
  24. hanzo_mcp/tools/common/thinking_tool.py +153 -0
  25. hanzo_mcp/tools/common/validation.py +1 -63
  26. hanzo_mcp/tools/filesystem/__init__.py +88 -57
  27. hanzo_mcp/tools/filesystem/base.py +32 -24
  28. hanzo_mcp/tools/filesystem/content_replace.py +114 -107
  29. hanzo_mcp/tools/filesystem/directory_tree.py +129 -105
  30. hanzo_mcp/tools/filesystem/edit.py +279 -0
  31. hanzo_mcp/tools/filesystem/grep.py +458 -0
  32. hanzo_mcp/tools/filesystem/grep_ast_tool.py +250 -0
  33. hanzo_mcp/tools/filesystem/multi_edit.py +362 -0
  34. hanzo_mcp/tools/filesystem/read.py +255 -0
  35. hanzo_mcp/tools/filesystem/write.py +156 -0
  36. hanzo_mcp/tools/jupyter/__init__.py +41 -29
  37. hanzo_mcp/tools/jupyter/base.py +66 -57
  38. hanzo_mcp/tools/jupyter/{edit_notebook.py → notebook_edit.py} +162 -139
  39. hanzo_mcp/tools/jupyter/notebook_read.py +152 -0
  40. hanzo_mcp/tools/shell/__init__.py +29 -20
  41. hanzo_mcp/tools/shell/base.py +87 -45
  42. hanzo_mcp/tools/shell/bash_session.py +731 -0
  43. hanzo_mcp/tools/shell/bash_session_executor.py +295 -0
  44. hanzo_mcp/tools/shell/command_executor.py +435 -384
  45. hanzo_mcp/tools/shell/run_command.py +284 -131
  46. hanzo_mcp/tools/shell/run_command_windows.py +328 -0
  47. hanzo_mcp/tools/shell/session_manager.py +196 -0
  48. hanzo_mcp/tools/shell/session_storage.py +325 -0
  49. hanzo_mcp/tools/todo/__init__.py +66 -0
  50. hanzo_mcp/tools/todo/base.py +319 -0
  51. hanzo_mcp/tools/todo/todo_read.py +148 -0
  52. hanzo_mcp/tools/todo/todo_write.py +378 -0
  53. hanzo_mcp/tools/vector/__init__.py +95 -0
  54. hanzo_mcp/tools/vector/infinity_store.py +365 -0
  55. hanzo_mcp/tools/vector/project_manager.py +361 -0
  56. hanzo_mcp/tools/vector/vector_index.py +115 -0
  57. hanzo_mcp/tools/vector/vector_search.py +215 -0
  58. {hanzo_mcp-0.3.8.dist-info → hanzo_mcp-0.5.0.dist-info}/METADATA +33 -1
  59. hanzo_mcp-0.5.0.dist-info/RECORD +63 -0
  60. {hanzo_mcp-0.3.8.dist-info → hanzo_mcp-0.5.0.dist-info}/WHEEL +1 -1
  61. hanzo_mcp/tools/agent/base_provider.py +0 -73
  62. hanzo_mcp/tools/agent/litellm_provider.py +0 -45
  63. hanzo_mcp/tools/agent/lmstudio_agent.py +0 -385
  64. hanzo_mcp/tools/agent/lmstudio_provider.py +0 -219
  65. hanzo_mcp/tools/agent/provider_registry.py +0 -120
  66. hanzo_mcp/tools/common/error_handling.py +0 -86
  67. hanzo_mcp/tools/common/logging_config.py +0 -115
  68. hanzo_mcp/tools/common/session.py +0 -91
  69. hanzo_mcp/tools/common/think_tool.py +0 -123
  70. hanzo_mcp/tools/common/version_tool.py +0 -120
  71. hanzo_mcp/tools/filesystem/edit_file.py +0 -287
  72. hanzo_mcp/tools/filesystem/get_file_info.py +0 -170
  73. hanzo_mcp/tools/filesystem/read_files.py +0 -199
  74. hanzo_mcp/tools/filesystem/search_content.py +0 -275
  75. hanzo_mcp/tools/filesystem/write_file.py +0 -162
  76. hanzo_mcp/tools/jupyter/notebook_operations.py +0 -514
  77. hanzo_mcp/tools/jupyter/read_notebook.py +0 -165
  78. hanzo_mcp/tools/project/__init__.py +0 -64
  79. hanzo_mcp/tools/project/analysis.py +0 -886
  80. hanzo_mcp/tools/project/base.py +0 -66
  81. hanzo_mcp/tools/project/project_analyze.py +0 -173
  82. hanzo_mcp/tools/shell/run_script.py +0 -215
  83. hanzo_mcp/tools/shell/script_tool.py +0 -244
  84. hanzo_mcp-0.3.8.dist-info/RECORD +0 -53
  85. {hanzo_mcp-0.3.8.dist-info → hanzo_mcp-0.5.0.dist-info}/entry_points.txt +0 -0
  86. {hanzo_mcp-0.3.8.dist-info → hanzo_mcp-0.5.0.dist-info}/licenses/LICENSE +0 -0
  87. {hanzo_mcp-0.3.8.dist-info → hanzo_mcp-0.5.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,458 @@
1
+ """Grep tool implementation.
2
+
3
+ This module provides the Grep tool for finding text patterns in files using ripgrep.
4
+ """
5
+
6
+ import asyncio
7
+ import fnmatch
8
+ import json
9
+ import re
10
+ import shlex
11
+ import shutil
12
+ from pathlib import Path
13
+ from typing import Annotated, TypedDict, Unpack, final, override
14
+
15
+ from fastmcp import Context as MCPContext
16
+ from fastmcp import FastMCP
17
+ from pydantic import Field
18
+
19
+ from hanzo_mcp.tools.common.context import ToolContext
20
+ from hanzo_mcp.tools.filesystem.base import FilesystemBaseTool
21
+
22
+ Pattern = Annotated[
23
+ str,
24
+ Field(
25
+ description="The regular expression pattern to search for in file contents",
26
+ min_length=1,
27
+ ),
28
+ ]
29
+
30
+ SearchPath = Annotated[
31
+ str,
32
+ Field(
33
+ description="The directory to search in. Defaults to the current working directory.",
34
+ default=".",
35
+ ),
36
+ ]
37
+
38
+ Include = Annotated[
39
+ str,
40
+ Field(
41
+ description='File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")',
42
+ default="*",
43
+ ),
44
+ ]
45
+
46
+
47
+ class GrepToolParams(TypedDict):
48
+ """Parameters for the Grep tool.
49
+
50
+ Attributes:
51
+ pattern: The regular expression pattern to search for in file contents
52
+ path: The directory to search in. Defaults to the current working directory.
53
+ include: File pattern to include in the search (e.g. "*.js", "*.{ts,tsx}")
54
+ """
55
+
56
+ pattern: Pattern
57
+ path: SearchPath
58
+ include: Include
59
+
60
+
61
+ @final
62
+ class Grep(FilesystemBaseTool):
63
+ """Fast content search tool that works with any codebase size."""
64
+
65
+ @property
66
+ @override
67
+ def name(self) -> str:
68
+ """Get the tool name.
69
+
70
+ Returns:
71
+ Tool name
72
+ """
73
+ return "grep"
74
+
75
+ @property
76
+ @override
77
+ def description(self) -> str:
78
+ """Get the tool description.
79
+
80
+ Returns:
81
+ Tool description
82
+ """
83
+ return """Fast content search tool that works with any codebase size.
84
+ Searches file contents using regular expressions.
85
+ Supports full regex syntax (eg. "log.*Error", "function\\s+\\w+", etc.).
86
+ Filter files by pattern with the include parameter (eg. "*.js", "*.{ts,tsx}").
87
+ Returns matching file paths sorted by modification time.
88
+ Use this tool when you need to find files containing specific patterns.
89
+ When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead."""
90
+
91
+ def is_ripgrep_installed(self) -> bool:
92
+ """Check if ripgrep (rg) is installed.
93
+
94
+ Returns:
95
+ True if ripgrep is installed, False otherwise
96
+ """
97
+ return shutil.which("rg") is not None
98
+
99
+ async def run_ripgrep(
100
+ self,
101
+ pattern: str,
102
+ path: str,
103
+ tool_ctx: ToolContext,
104
+ include_pattern: str | None = None,
105
+ ) -> str:
106
+ """Run ripgrep with the given parameters and return the results.
107
+
108
+ Args:
109
+ pattern: The regular expression pattern to search for
110
+ path: The directory or file to search in
111
+ include_pattern: Optional file pattern to include in the search
112
+ tool_ctx: Tool context for logging
113
+
114
+ Returns:
115
+ The search results as formatted string
116
+ """
117
+ # Special case for tests: direct file path with include pattern that doesn't match
118
+ if Path(path).is_file() and include_pattern and include_pattern != "*":
119
+ if not fnmatch.fnmatch(Path(path).name, include_pattern):
120
+ await tool_ctx.info(
121
+ f"File does not match pattern '{include_pattern}': {path}"
122
+ )
123
+ return f"File does not match pattern '{include_pattern}': {path}"
124
+
125
+ cmd = ["rg", "--json", pattern]
126
+
127
+ # Add path
128
+ cmd.append(path)
129
+
130
+ # Add include pattern if provided
131
+ if include_pattern and include_pattern != "*":
132
+ cmd.extend(["-g", include_pattern])
133
+
134
+ await tool_ctx.info(f"Running ripgrep command: {shlex.join(cmd)}")
135
+
136
+ try:
137
+ # Execute ripgrep process
138
+ process = await asyncio.create_subprocess_exec(
139
+ *cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
140
+ )
141
+
142
+ stdout, stderr = await process.communicate()
143
+
144
+ if process.returncode != 0 and process.returncode != 1:
145
+ # rg returns 1 when no matches are found, which is not an error
146
+ await tool_ctx.error(
147
+ f"ripgrep failed with exit code {process.returncode}: {stderr.decode()}"
148
+ )
149
+ return f"Error executing ripgrep: {stderr.decode()}"
150
+
151
+ # Parse the JSON output
152
+ results = self.parse_ripgrep_json_output(stdout.decode())
153
+ return results
154
+
155
+ except Exception as e:
156
+ await tool_ctx.error(f"Error running ripgrep: {str(e)}")
157
+ return f"Error running ripgrep: {str(e)}"
158
+
159
+ def parse_ripgrep_json_output(self, output: str) -> str:
160
+ """Parse ripgrep JSON output and format it for human readability.
161
+
162
+ Args:
163
+ output: The JSON output from ripgrep
164
+
165
+ Returns:
166
+ Formatted string with search results
167
+ """
168
+ if not output.strip():
169
+ return "No matches found."
170
+
171
+ formatted_results = []
172
+ file_results = {}
173
+
174
+ for line in output.splitlines():
175
+ if not line.strip():
176
+ continue
177
+
178
+ try:
179
+ data = json.loads(line)
180
+
181
+ if data.get("type") == "match":
182
+ path = data.get("data", {}).get("path", {}).get("text", "")
183
+ line_number = data.get("data", {}).get("line_number", 0)
184
+ line_text = (
185
+ data.get("data", {}).get("lines", {}).get("text", "").rstrip()
186
+ )
187
+
188
+ if path not in file_results:
189
+ file_results[path] = []
190
+
191
+ file_results[path].append((line_number, line_text))
192
+
193
+ except json.JSONDecodeError as e:
194
+ formatted_results.append(f"Error parsing JSON: {str(e)}")
195
+
196
+ # Count total matches
197
+ total_matches = sum(len(matches) for matches in file_results.values())
198
+ total_files = len(file_results)
199
+
200
+ if total_matches == 0:
201
+ return "No matches found."
202
+
203
+ formatted_results.append(
204
+ f"Found {total_matches} matches in {total_files} file{'s' if total_files > 1 else ''}:"
205
+ )
206
+ formatted_results.append("") # Empty line for readability
207
+
208
+ # Format the results by file
209
+ for file_path, matches in file_results.items():
210
+ for line_number, line_text in matches:
211
+ formatted_results.append(f"{file_path}:{line_number}: {line_text}")
212
+
213
+ return "\n".join(formatted_results)
214
+
215
+ async def fallback_grep(
216
+ self,
217
+ pattern: str,
218
+ path: str,
219
+ tool_ctx: ToolContext,
220
+ include_pattern: str | None = None,
221
+ ) -> str:
222
+ """Fallback Python implementation when ripgrep is not available.
223
+
224
+ Args:
225
+ pattern: The regular expression pattern to search for
226
+ path: The directory or file to search in
227
+ include_pattern: Optional file pattern to include in the search
228
+ tool_ctx: Tool context for logging
229
+
230
+ Returns:
231
+ The search results as formatted string
232
+ """
233
+ await tool_ctx.info("Using fallback Python implementation for grep")
234
+
235
+ try:
236
+ input_path = Path(path)
237
+
238
+ # Find matching files
239
+ matching_files: list[Path] = []
240
+
241
+ # Process based on whether path is a file or directory
242
+ if input_path.is_file():
243
+ # Single file search - check file pattern match first
244
+ if (
245
+ include_pattern is None
246
+ or include_pattern == "*"
247
+ or fnmatch.fnmatch(input_path.name, include_pattern)
248
+ ):
249
+ matching_files.append(input_path)
250
+ await tool_ctx.info(f"Searching single file: {path}")
251
+ else:
252
+ # File doesn't match the pattern, return immediately
253
+ await tool_ctx.info(
254
+ f"File does not match pattern '{include_pattern}': {path}"
255
+ )
256
+ return f"File does not match pattern '{include_pattern}': {path}"
257
+ elif input_path.is_dir():
258
+ # Directory search - find all files
259
+ await tool_ctx.info(f"Finding files in directory: {path}")
260
+
261
+ # Keep track of allowed paths for filtering
262
+ allowed_paths: set[str] = set()
263
+
264
+ # Collect all allowed paths first for faster filtering
265
+ for entry in input_path.rglob("*"):
266
+ entry_path = str(entry)
267
+ if self.is_path_allowed(entry_path):
268
+ allowed_paths.add(entry_path)
269
+
270
+ # Find matching files efficiently
271
+ for entry in input_path.rglob("*"):
272
+ entry_path = str(entry)
273
+ if entry_path in allowed_paths and entry.is_file():
274
+ if (
275
+ include_pattern is None
276
+ or include_pattern == "*"
277
+ or fnmatch.fnmatch(entry.name, include_pattern)
278
+ ):
279
+ matching_files.append(entry)
280
+
281
+ await tool_ctx.info(f"Found {len(matching_files)} matching files")
282
+ else:
283
+ # This shouldn't happen if path exists
284
+ await tool_ctx.error(f"Path is neither a file nor a directory: {path}")
285
+ return f"Error: Path is neither a file nor a directory: {path}"
286
+
287
+ # Report progress
288
+ total_files = len(matching_files)
289
+ if input_path.is_file():
290
+ await tool_ctx.info(f"Searching file: {path}")
291
+ else:
292
+ await tool_ctx.info(
293
+ f"Searching through {total_files} files in directory"
294
+ )
295
+
296
+ # Set up for parallel processing
297
+ results: list[str] = []
298
+ files_processed = 0
299
+ matches_found = 0
300
+ batch_size = 20 # Process files in batches to avoid overwhelming the system
301
+
302
+ # Use a semaphore to limit concurrent file operations
303
+ semaphore = asyncio.Semaphore(10)
304
+
305
+ # Create an async function to search a single file
306
+ async def search_file(file_path: Path) -> list[str]:
307
+ nonlocal files_processed, matches_found
308
+ file_results: list[str] = []
309
+
310
+ try:
311
+ async with semaphore: # Limit concurrent operations
312
+ try:
313
+ with open(file_path, "r", encoding="utf-8") as f:
314
+ for line_num, line in enumerate(f, 1):
315
+ if re.search(pattern, line):
316
+ file_results.append(
317
+ f"{file_path}:{line_num}: {line.rstrip()}"
318
+ )
319
+ matches_found += 1
320
+ files_processed += 1
321
+ except UnicodeDecodeError:
322
+ # Skip binary files
323
+ files_processed += 1
324
+ except Exception as e:
325
+ await tool_ctx.warning(
326
+ f"Error reading {file_path}: {str(e)}"
327
+ )
328
+ except Exception as e:
329
+ await tool_ctx.warning(f"Error processing {file_path}: {str(e)}")
330
+
331
+ return file_results
332
+
333
+ # Process files in parallel batches
334
+ for i in range(0, len(matching_files), batch_size):
335
+ batch = matching_files[i : i + batch_size]
336
+ batch_tasks = [search_file(file_path) for file_path in batch]
337
+
338
+ # Report progress
339
+ await tool_ctx.report_progress(i, total_files)
340
+
341
+ # Wait for the batch to complete
342
+ batch_results = await asyncio.gather(*batch_tasks)
343
+
344
+ # Flatten and collect results
345
+ for file_result in batch_results:
346
+ results.extend(file_result)
347
+
348
+ # Final progress report
349
+ await tool_ctx.report_progress(total_files, total_files)
350
+
351
+ if not results:
352
+ if input_path.is_file():
353
+ return f"No matches found for pattern '{pattern}' in file: {path}"
354
+ else:
355
+ return f"No matches found for pattern '{pattern}' in files matching '{include_pattern or '*'}' in directory: {path}"
356
+
357
+ await tool_ctx.info(
358
+ f"Found {matches_found} matches in {files_processed} file{'s' if files_processed > 1 else ''}"
359
+ )
360
+ return (
361
+ f"Found {matches_found} matches in {files_processed} file{'s' if files_processed > 1 else ''}:\n\n"
362
+ + "\n".join(results)
363
+ )
364
+ except Exception as e:
365
+ await tool_ctx.error(f"Error searching file contents: {str(e)}")
366
+ return f"Error searching file contents: {str(e)}"
367
+
368
+ @override
369
+ async def call(
370
+ self,
371
+ ctx: MCPContext,
372
+ **params: Unpack[GrepToolParams],
373
+ ) -> str:
374
+ """Execute the grep tool with the given parameters.
375
+
376
+ Args:
377
+ ctx: MCP context
378
+ **params: Tool parameters
379
+
380
+ Returns:
381
+ Tool result
382
+ """
383
+ tool_ctx = self.create_tool_context(ctx)
384
+
385
+ # Extract parameters
386
+ pattern = params.get("pattern")
387
+ path: str = params.get("path", ".")
388
+ # Support both 'include' and legacy 'file_pattern' parameter for backward compatibility
389
+ include: str = params.get("include") or params.get("file_pattern")
390
+
391
+ # Validate required parameters for direct calls (not through MCP framework)
392
+ if pattern is None:
393
+ await tool_ctx.error("Parameter 'pattern' is required but was None")
394
+ return "Error: Parameter 'pattern' is required but was None"
395
+
396
+ # Validate path if provided
397
+ if path:
398
+ path_validation = self.validate_path(path)
399
+ if path_validation.is_error:
400
+ await tool_ctx.error(path_validation.error_message)
401
+ return f"Error: {path_validation.error_message}"
402
+
403
+ # Check if path is allowed
404
+ allowed, error_msg = await self.check_path_allowed(path, tool_ctx)
405
+ if not allowed:
406
+ return error_msg
407
+
408
+ # Check if path exists
409
+ exists, error_msg = await self.check_path_exists(path, tool_ctx)
410
+ if not exists:
411
+ return error_msg
412
+
413
+ # Log operation
414
+ search_info = f"Searching for pattern '{pattern}'"
415
+ if include:
416
+ search_info += f" in files matching '{include}'"
417
+ search_info += f" in path: {path}"
418
+ await tool_ctx.info(search_info)
419
+
420
+ # Check if ripgrep is installed and use it if available
421
+ try:
422
+ if self.is_ripgrep_installed():
423
+ await tool_ctx.info("ripgrep is installed, using ripgrep for search")
424
+ result = await self.run_ripgrep(pattern, path, tool_ctx, include)
425
+ return result
426
+ else:
427
+ await tool_ctx.info(
428
+ "ripgrep is not installed, using fallback implementation"
429
+ )
430
+ result = await self.fallback_grep(pattern, path, tool_ctx, include)
431
+ return result
432
+ except Exception as e:
433
+ await tool_ctx.error(f"Error in grep tool: {str(e)}")
434
+ return f"Error in grep tool: {str(e)}"
435
+
436
+ @override
437
+ def register(self, mcp_server: FastMCP) -> None:
438
+ """Register this grep tool with the MCP server.
439
+
440
+ Creates a wrapper function with explicitly defined parameters that match
441
+ the tool's parameter schema and registers it with the MCP server.
442
+
443
+ Args:
444
+ mcp_server: The FastMCP server instance
445
+ """
446
+ tool_self = self # Create a reference to self for use in the closure
447
+
448
+ @mcp_server.tool(name=self.name, description=self.description)
449
+ async def grep(
450
+ ctx: MCPContext,
451
+ pattern: Pattern,
452
+ path: SearchPath,
453
+ include: Include,
454
+ ) -> str:
455
+ # Use 'include' parameter if provided, otherwise fall back to 'file_pattern'
456
+ return await tool_self.call(
457
+ ctx, pattern=pattern, path=path, include=include
458
+ )
@@ -0,0 +1,250 @@
1
+ """Grep AST tool implementation.
2
+
3
+ This module provides the GrepAstTool for searching through source code files with AST context,
4
+ seeing matching lines with useful context showing how they fit into the code structure.
5
+ """
6
+
7
+ import os
8
+ from pathlib import Path
9
+ from typing import Annotated, TypedDict, Unpack, final, override
10
+
11
+ from fastmcp import Context as MCPContext
12
+ from fastmcp import FastMCP
13
+ from fastmcp.server.dependencies import get_context
14
+ from grep_ast.grep_ast import TreeContext
15
+ from pydantic import Field
16
+
17
+ from hanzo_mcp.tools.filesystem.base import FilesystemBaseTool
18
+
19
+ Pattern = Annotated[
20
+ str,
21
+ Field(
22
+ description="The regex pattern to search for in source code files",
23
+ min_length=1,
24
+ ),
25
+ ]
26
+
27
+ SearchPath = Annotated[
28
+ str,
29
+ Field(
30
+ description="The path to search in (file or directory)",
31
+ min_length=1,
32
+ ),
33
+ ]
34
+
35
+ IgnoreCase = Annotated[
36
+ bool,
37
+ Field(
38
+ description="Whether to ignore case when matching",
39
+ default=False,
40
+ ),
41
+ ]
42
+
43
+ LineNumber = Annotated[
44
+ bool,
45
+ Field(
46
+ description="Whether to display line numbers",
47
+ default=False,
48
+ ),
49
+ ]
50
+
51
+
52
+ class GrepAstToolParams(TypedDict):
53
+ """Parameters for the GrepAstTool.
54
+
55
+ Attributes:
56
+ pattern: The regex pattern to search for in source code files
57
+ path: The path to search in (file or directory)
58
+ ignore_case: Whether to ignore case when matching
59
+ line_number: Whether to display line numbers
60
+ """
61
+
62
+ pattern: Pattern
63
+ path: SearchPath
64
+ ignore_case: IgnoreCase
65
+ line_number: LineNumber
66
+
67
+
68
+ @final
69
+ class GrepAstTool(FilesystemBaseTool):
70
+ """Tool for searching through source code files with AST context."""
71
+
72
+ @property
73
+ @override
74
+ def name(self) -> str:
75
+ """Get the tool name.
76
+
77
+ Returns:
78
+ Tool name
79
+ """
80
+ return "grep_ast"
81
+
82
+ @property
83
+ @override
84
+ def description(self) -> str:
85
+ """Get the tool description.
86
+
87
+ Returns:
88
+ Tool description
89
+ """
90
+ return """Search through source code files and see matching lines with useful AST (Abstract Syntax Tree) context. This tool helps you understand code structure by showing how matched lines fit into functions, classes, and other code blocks.
91
+
92
+ Unlike traditional search tools like `search_content` that only show matching lines, `grep_ast` leverages the AST to reveal the structural context around matches, making it easier to understand the code organization.
93
+
94
+ When to use this tool:
95
+ 1. When you need to understand where a pattern appears within larger code structures
96
+ 2. When searching for function or class definitions that match a pattern
97
+ 3. When you want to see not just the matching line but its surrounding context in the code
98
+ 4. When exploring unfamiliar codebases and need structural context
99
+ 5. When examining how a specific pattern is used across different parts of the codebase
100
+
101
+ This tool is superior to regular grep/search_content when you need to understand code structure, not just find text matches.
102
+
103
+ Example usage:
104
+ ```
105
+ grep_ast(pattern="function_name", path="/path/to/file.py", ignore_case=False, line_number=True)
106
+ ```"""
107
+
108
+ @override
109
+ async def call(
110
+ self,
111
+ ctx: MCPContext,
112
+ **params: Unpack[GrepAstToolParams],
113
+ ) -> str:
114
+ """Execute the tool with the given parameters.
115
+
116
+ Args:
117
+ ctx: MCP context
118
+ **params: Tool parameters
119
+
120
+ Returns:
121
+ Tool result
122
+ """
123
+ tool_ctx = self.create_tool_context(ctx)
124
+ self.set_tool_context_info(tool_ctx)
125
+
126
+ # Extract parameters
127
+ pattern: Pattern = params["pattern"]
128
+ path: SearchPath = params["path"]
129
+ ignore_case = params.get("ignore_case", False)
130
+ line_number = params.get("line_number", False)
131
+
132
+ # Validate the path
133
+ path_validation = self.validate_path(path)
134
+ if not path_validation.is_valid:
135
+ await tool_ctx.error(f"Invalid path: {path_validation.error_message}")
136
+ return f"Error: Invalid path: {path_validation.error_message}"
137
+
138
+ # Check if path is allowed
139
+ is_allowed, error_message = await self.check_path_allowed(path, tool_ctx)
140
+ if not is_allowed:
141
+ return error_message
142
+
143
+ # Check if path exists
144
+ is_exists, error_message = await self.check_path_exists(path, tool_ctx)
145
+ if not is_exists:
146
+ return error_message
147
+
148
+ await tool_ctx.info(f"Searching for '{pattern}' in {path}")
149
+
150
+ # Get the files to process
151
+ path_obj = Path(path)
152
+ files_to_process = []
153
+
154
+ if path_obj.is_file():
155
+ files_to_process.append(str(path_obj))
156
+ elif path_obj.is_dir():
157
+ for root, _, files in os.walk(path_obj):
158
+ for file in files:
159
+ file_path = Path(root) / file
160
+ if self.is_path_allowed(str(file_path)):
161
+ files_to_process.append(str(file_path))
162
+
163
+ if not files_to_process:
164
+ await tool_ctx.warning(f"No source code files found in {path}")
165
+ return f"No source code files found in {path}"
166
+
167
+ # Process each file
168
+ results = []
169
+ processed_count = 0
170
+
171
+ await tool_ctx.info(f"Found {len(files_to_process)} file(s) to process")
172
+
173
+ for file_path in files_to_process:
174
+ await tool_ctx.report_progress(processed_count, len(files_to_process))
175
+
176
+ try:
177
+ # Read the file
178
+ with open(file_path, "r", encoding="utf-8") as f:
179
+ code = f.read()
180
+
181
+ # Process the file with grep-ast
182
+ try:
183
+ tc = TreeContext(
184
+ file_path,
185
+ code,
186
+ color=False,
187
+ verbose=False,
188
+ line_number=line_number,
189
+ )
190
+
191
+ # Find matches
192
+ loi = tc.grep(pattern, ignore_case)
193
+
194
+ if loi:
195
+ tc.add_lines_of_interest(loi)
196
+ tc.add_context()
197
+ output = tc.format()
198
+
199
+ # Add the result to our list
200
+ results.append(f"\n{file_path}:\n{output}\n")
201
+ except Exception as e:
202
+ # Skip files that can't be parsed by tree-sitter
203
+ await tool_ctx.warning(f"Could not parse {file_path}: {str(e)}")
204
+ except UnicodeDecodeError:
205
+ await tool_ctx.warning(f"Could not read {file_path} as text")
206
+ except Exception as e:
207
+ await tool_ctx.error(f"Error processing {file_path}: {str(e)}")
208
+
209
+ processed_count += 1
210
+
211
+ # Final progress report
212
+ await tool_ctx.report_progress(len(files_to_process), len(files_to_process))
213
+
214
+ if not results:
215
+ await tool_ctx.warning(f"No matches found for '{pattern}' in {path}")
216
+ return f"No matches found for '{pattern}' in {path}"
217
+
218
+ await tool_ctx.info(f"Found matches in {len(results)} file(s)")
219
+
220
+ # Join the results
221
+ return "\n".join(results)
222
+
223
+ @override
224
+ def register(self, mcp_server: FastMCP) -> None:
225
+ """Register this tool with the MCP server.
226
+
227
+ Creates a wrapper function with explicitly defined parameters that match
228
+ the tool's parameter schema and registers it with the MCP server.
229
+
230
+ Args:
231
+ mcp_server: The FastMCP server instance
232
+ """
233
+ tool_self = self # Create a reference to self for use in the closure
234
+
235
+ @mcp_server.tool(name=self.name, description=self.description)
236
+ async def grep_ast(
237
+ ctx: MCPContext,
238
+ pattern: Pattern,
239
+ path: SearchPath,
240
+ ignore_case: IgnoreCase,
241
+ line_number: LineNumber,
242
+ ) -> str:
243
+ ctx = get_context()
244
+ return await tool_self.call(
245
+ ctx,
246
+ pattern=pattern,
247
+ path=path,
248
+ ignore_case=ignore_case,
249
+ line_number=line_number,
250
+ )