hanzo-mcp 0.5.2__py3-none-any.whl → 0.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hanzo-mcp might be problematic. Click here for more details.
- hanzo_mcp/__init__.py +1 -1
- hanzo_mcp/cli.py +32 -0
- hanzo_mcp/dev_server.py +246 -0
- hanzo_mcp/prompts/__init__.py +1 -1
- hanzo_mcp/prompts/project_system.py +43 -7
- hanzo_mcp/server.py +5 -1
- hanzo_mcp/tools/__init__.py +66 -35
- hanzo_mcp/tools/agent/__init__.py +1 -1
- hanzo_mcp/tools/agent/agent.py +401 -0
- hanzo_mcp/tools/agent/agent_tool.py +3 -4
- hanzo_mcp/tools/common/__init__.py +1 -1
- hanzo_mcp/tools/common/base.py +2 -2
- hanzo_mcp/tools/common/batch_tool.py +3 -5
- hanzo_mcp/tools/common/config_tool.py +1 -1
- hanzo_mcp/tools/common/context.py +1 -1
- hanzo_mcp/tools/common/palette.py +344 -0
- hanzo_mcp/tools/common/palette_loader.py +108 -0
- hanzo_mcp/tools/common/stats.py +1 -1
- hanzo_mcp/tools/common/thinking_tool.py +3 -5
- hanzo_mcp/tools/common/tool_disable.py +1 -1
- hanzo_mcp/tools/common/tool_enable.py +1 -1
- hanzo_mcp/tools/common/tool_list.py +49 -52
- hanzo_mcp/tools/config/__init__.py +10 -0
- hanzo_mcp/tools/config/config_tool.py +212 -0
- hanzo_mcp/tools/config/index_config.py +176 -0
- hanzo_mcp/tools/config/palette_tool.py +166 -0
- hanzo_mcp/tools/database/__init__.py +1 -1
- hanzo_mcp/tools/database/graph.py +482 -0
- hanzo_mcp/tools/database/graph_add.py +1 -1
- hanzo_mcp/tools/database/graph_query.py +1 -1
- hanzo_mcp/tools/database/graph_remove.py +1 -1
- hanzo_mcp/tools/database/graph_search.py +1 -1
- hanzo_mcp/tools/database/graph_stats.py +1 -1
- hanzo_mcp/tools/database/sql.py +411 -0
- hanzo_mcp/tools/database/sql_query.py +1 -1
- hanzo_mcp/tools/database/sql_search.py +1 -1
- hanzo_mcp/tools/database/sql_stats.py +1 -1
- hanzo_mcp/tools/editor/neovim_command.py +1 -1
- hanzo_mcp/tools/editor/neovim_edit.py +1 -1
- hanzo_mcp/tools/editor/neovim_session.py +1 -1
- hanzo_mcp/tools/filesystem/__init__.py +42 -13
- hanzo_mcp/tools/filesystem/base.py +1 -1
- hanzo_mcp/tools/filesystem/batch_search.py +4 -4
- hanzo_mcp/tools/filesystem/content_replace.py +3 -5
- hanzo_mcp/tools/filesystem/diff.py +193 -0
- hanzo_mcp/tools/filesystem/directory_tree.py +3 -5
- hanzo_mcp/tools/filesystem/edit.py +3 -5
- hanzo_mcp/tools/filesystem/find.py +443 -0
- hanzo_mcp/tools/filesystem/find_files.py +1 -1
- hanzo_mcp/tools/filesystem/git_search.py +1 -1
- hanzo_mcp/tools/filesystem/grep.py +2 -2
- hanzo_mcp/tools/filesystem/multi_edit.py +3 -5
- hanzo_mcp/tools/filesystem/read.py +17 -5
- hanzo_mcp/tools/filesystem/{grep_ast_tool.py → symbols.py} +17 -27
- hanzo_mcp/tools/filesystem/symbols_unified.py +376 -0
- hanzo_mcp/tools/filesystem/tree.py +268 -0
- hanzo_mcp/tools/filesystem/unified_search.py +711 -0
- hanzo_mcp/tools/filesystem/unix_aliases.py +99 -0
- hanzo_mcp/tools/filesystem/watch.py +174 -0
- hanzo_mcp/tools/filesystem/write.py +3 -5
- hanzo_mcp/tools/jupyter/__init__.py +9 -12
- hanzo_mcp/tools/jupyter/base.py +1 -1
- hanzo_mcp/tools/jupyter/jupyter.py +326 -0
- hanzo_mcp/tools/jupyter/notebook_edit.py +3 -4
- hanzo_mcp/tools/jupyter/notebook_read.py +3 -5
- hanzo_mcp/tools/llm/__init__.py +4 -0
- hanzo_mcp/tools/llm/consensus_tool.py +1 -1
- hanzo_mcp/tools/llm/llm_manage.py +1 -1
- hanzo_mcp/tools/llm/llm_tool.py +1 -1
- hanzo_mcp/tools/llm/llm_unified.py +851 -0
- hanzo_mcp/tools/llm/provider_tools.py +1 -1
- hanzo_mcp/tools/mcp/__init__.py +4 -0
- hanzo_mcp/tools/mcp/mcp_add.py +1 -1
- hanzo_mcp/tools/mcp/mcp_remove.py +1 -1
- hanzo_mcp/tools/mcp/mcp_stats.py +1 -1
- hanzo_mcp/tools/mcp/mcp_unified.py +503 -0
- hanzo_mcp/tools/shell/__init__.py +20 -42
- hanzo_mcp/tools/shell/base.py +1 -1
- hanzo_mcp/tools/shell/base_process.py +303 -0
- hanzo_mcp/tools/shell/bash_unified.py +134 -0
- hanzo_mcp/tools/shell/logs.py +1 -1
- hanzo_mcp/tools/shell/npx.py +1 -1
- hanzo_mcp/tools/shell/npx_background.py +1 -1
- hanzo_mcp/tools/shell/npx_unified.py +101 -0
- hanzo_mcp/tools/shell/open.py +107 -0
- hanzo_mcp/tools/shell/pkill.py +1 -1
- hanzo_mcp/tools/shell/process_unified.py +131 -0
- hanzo_mcp/tools/shell/processes.py +1 -1
- hanzo_mcp/tools/shell/run_background.py +1 -1
- hanzo_mcp/tools/shell/run_command.py +3 -4
- hanzo_mcp/tools/shell/run_command_windows.py +3 -4
- hanzo_mcp/tools/shell/uvx.py +1 -1
- hanzo_mcp/tools/shell/uvx_background.py +1 -1
- hanzo_mcp/tools/shell/uvx_unified.py +101 -0
- hanzo_mcp/tools/todo/__init__.py +1 -1
- hanzo_mcp/tools/todo/base.py +1 -1
- hanzo_mcp/tools/todo/todo.py +265 -0
- hanzo_mcp/tools/todo/todo_read.py +3 -5
- hanzo_mcp/tools/todo/todo_write.py +3 -5
- hanzo_mcp/tools/vector/__init__.py +1 -1
- hanzo_mcp/tools/vector/index_tool.py +1 -1
- hanzo_mcp/tools/vector/project_manager.py +27 -5
- hanzo_mcp/tools/vector/vector.py +311 -0
- hanzo_mcp/tools/vector/vector_index.py +1 -1
- hanzo_mcp/tools/vector/vector_search.py +1 -1
- hanzo_mcp-0.6.2.dist-info/METADATA +336 -0
- hanzo_mcp-0.6.2.dist-info/RECORD +134 -0
- hanzo_mcp-0.6.2.dist-info/entry_points.txt +3 -0
- hanzo_mcp-0.5.2.dist-info/METADATA +0 -276
- hanzo_mcp-0.5.2.dist-info/RECORD +0 -106
- hanzo_mcp-0.5.2.dist-info/entry_points.txt +0 -2
- {hanzo_mcp-0.5.2.dist-info → hanzo_mcp-0.6.2.dist-info}/WHEEL +0 -0
- {hanzo_mcp-0.5.2.dist-info → hanzo_mcp-0.6.2.dist-info}/licenses/LICENSE +0 -0
- {hanzo_mcp-0.5.2.dist-info → hanzo_mcp-0.6.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
"""Unified find tool implementation.
|
|
2
|
+
|
|
3
|
+
This module provides the FindTool for finding text patterns in files using
|
|
4
|
+
multiple search backends in order of preference: rg > ag > ack > grep.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import fnmatch
|
|
9
|
+
import json
|
|
10
|
+
import re
|
|
11
|
+
import shlex
|
|
12
|
+
import shutil
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Annotated, TypedDict, Unpack, final, override, Optional, List, Dict, Any
|
|
15
|
+
|
|
16
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
17
|
+
from pydantic import Field
|
|
18
|
+
|
|
19
|
+
from hanzo_mcp.tools.common.context import ToolContext
|
|
20
|
+
from hanzo_mcp.tools.filesystem.base import FilesystemBaseTool
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Parameter types
|
|
24
|
+
Pattern = Annotated[
|
|
25
|
+
str,
|
|
26
|
+
Field(
|
|
27
|
+
description="Pattern to search for (regex or literal)",
|
|
28
|
+
min_length=1,
|
|
29
|
+
),
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
SearchPath = Annotated[
|
|
33
|
+
str,
|
|
34
|
+
Field(
|
|
35
|
+
description="Path to search in",
|
|
36
|
+
default=".",
|
|
37
|
+
),
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
Include = Annotated[
|
|
41
|
+
Optional[str],
|
|
42
|
+
Field(
|
|
43
|
+
description='File pattern to include (e.g. "*.js")',
|
|
44
|
+
default=None,
|
|
45
|
+
),
|
|
46
|
+
]
|
|
47
|
+
|
|
48
|
+
Exclude = Annotated[
|
|
49
|
+
Optional[str],
|
|
50
|
+
Field(
|
|
51
|
+
description='File pattern to exclude',
|
|
52
|
+
default=None,
|
|
53
|
+
),
|
|
54
|
+
]
|
|
55
|
+
|
|
56
|
+
CaseSensitive = Annotated[
|
|
57
|
+
bool,
|
|
58
|
+
Field(
|
|
59
|
+
description="Case sensitive search",
|
|
60
|
+
default=True,
|
|
61
|
+
),
|
|
62
|
+
]
|
|
63
|
+
|
|
64
|
+
FixedStrings = Annotated[
|
|
65
|
+
bool,
|
|
66
|
+
Field(
|
|
67
|
+
description="Treat pattern as literal string, not regex",
|
|
68
|
+
default=False,
|
|
69
|
+
),
|
|
70
|
+
]
|
|
71
|
+
|
|
72
|
+
ShowContext = Annotated[
|
|
73
|
+
int,
|
|
74
|
+
Field(
|
|
75
|
+
description="Lines of context to show around matches",
|
|
76
|
+
default=0,
|
|
77
|
+
),
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
Backend = Annotated[
|
|
81
|
+
Optional[str],
|
|
82
|
+
Field(
|
|
83
|
+
description="Force specific backend: rg, ag, ack, grep",
|
|
84
|
+
default=None,
|
|
85
|
+
),
|
|
86
|
+
]
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class FindParams(TypedDict, total=False):
|
|
90
|
+
"""Parameters for find tool."""
|
|
91
|
+
pattern: str
|
|
92
|
+
path: str
|
|
93
|
+
include: Optional[str]
|
|
94
|
+
exclude: Optional[str]
|
|
95
|
+
case_sensitive: bool
|
|
96
|
+
fixed_strings: bool
|
|
97
|
+
show_context: int
|
|
98
|
+
backend: Optional[str]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
@final
|
|
102
|
+
class FindTool(FilesystemBaseTool):
|
|
103
|
+
"""Unified find tool with multiple backend support."""
|
|
104
|
+
|
|
105
|
+
def __init__(self, permission_manager):
|
|
106
|
+
"""Initialize the find tool."""
|
|
107
|
+
super().__init__(permission_manager)
|
|
108
|
+
self._backend_order = ["rg", "ag", "ack", "grep"]
|
|
109
|
+
self._available_backends = None
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
@override
|
|
113
|
+
def name(self) -> str:
|
|
114
|
+
"""Get the tool name."""
|
|
115
|
+
return "find"
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
@override
|
|
119
|
+
def description(self) -> str:
|
|
120
|
+
"""Get the tool description."""
|
|
121
|
+
backends = self._get_available_backends()
|
|
122
|
+
backend_str = ", ".join(backends) if backends else "fallback grep"
|
|
123
|
+
|
|
124
|
+
return f"""Find pattern in files (like ffind). Available: {backend_str}.
|
|
125
|
+
|
|
126
|
+
Usage:
|
|
127
|
+
find "TODO"
|
|
128
|
+
find "error.*fatal" ./src
|
|
129
|
+
find "config" --include "*.json"
|
|
130
|
+
find "password" --exclude "*.log"
|
|
131
|
+
|
|
132
|
+
Fast, intuitive file content search."""
|
|
133
|
+
|
|
134
|
+
def _get_available_backends(self) -> List[str]:
|
|
135
|
+
"""Get list of available search backends."""
|
|
136
|
+
if self._available_backends is None:
|
|
137
|
+
self._available_backends = []
|
|
138
|
+
for backend in self._backend_order:
|
|
139
|
+
if shutil.which(backend):
|
|
140
|
+
self._available_backends.append(backend)
|
|
141
|
+
return self._available_backends
|
|
142
|
+
|
|
143
|
+
@override
|
|
144
|
+
async def call(
|
|
145
|
+
self,
|
|
146
|
+
ctx: MCPContext,
|
|
147
|
+
**params: Unpack[FindParams],
|
|
148
|
+
) -> str:
|
|
149
|
+
"""Execute find operation."""
|
|
150
|
+
tool_ctx = self.create_tool_context(ctx)
|
|
151
|
+
|
|
152
|
+
# Extract parameters
|
|
153
|
+
pattern = params.get("pattern")
|
|
154
|
+
if not pattern:
|
|
155
|
+
return "Error: pattern is required"
|
|
156
|
+
|
|
157
|
+
path = params.get("path", ".")
|
|
158
|
+
include = params.get("include")
|
|
159
|
+
exclude = params.get("exclude")
|
|
160
|
+
case_sensitive = params.get("case_sensitive", True)
|
|
161
|
+
fixed_strings = params.get("fixed_strings", False)
|
|
162
|
+
show_context = params.get("show_context", 0)
|
|
163
|
+
backend = params.get("backend")
|
|
164
|
+
|
|
165
|
+
# Validate path
|
|
166
|
+
path_validation = self.validate_path(path)
|
|
167
|
+
if path_validation.is_error:
|
|
168
|
+
await tool_ctx.error(path_validation.error_message)
|
|
169
|
+
return f"Error: {path_validation.error_message}"
|
|
170
|
+
|
|
171
|
+
# Check permissions
|
|
172
|
+
allowed, error_msg = await self.check_path_allowed(path, tool_ctx)
|
|
173
|
+
if not allowed:
|
|
174
|
+
return error_msg
|
|
175
|
+
|
|
176
|
+
# Check existence
|
|
177
|
+
exists, error_msg = await self.check_path_exists(path, tool_ctx)
|
|
178
|
+
if not exists:
|
|
179
|
+
return error_msg
|
|
180
|
+
|
|
181
|
+
# Select backend
|
|
182
|
+
available = self._get_available_backends()
|
|
183
|
+
|
|
184
|
+
if backend:
|
|
185
|
+
# User specified backend
|
|
186
|
+
if backend not in available and backend != "grep":
|
|
187
|
+
return f"Error: Backend '{backend}' not available. Available: {', '.join(available + ['grep'])}"
|
|
188
|
+
selected_backend = backend
|
|
189
|
+
elif available:
|
|
190
|
+
# Use first available
|
|
191
|
+
selected_backend = available[0]
|
|
192
|
+
else:
|
|
193
|
+
# Fallback
|
|
194
|
+
selected_backend = "grep"
|
|
195
|
+
|
|
196
|
+
await tool_ctx.info(f"Using {selected_backend} to search for '{pattern}' in {path}")
|
|
197
|
+
|
|
198
|
+
# Execute search
|
|
199
|
+
if selected_backend == "rg":
|
|
200
|
+
return await self._run_ripgrep(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
201
|
+
elif selected_backend == "ag":
|
|
202
|
+
return await self._run_silver_searcher(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
203
|
+
elif selected_backend == "ack":
|
|
204
|
+
return await self._run_ack(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
205
|
+
else:
|
|
206
|
+
return await self._run_fallback_grep(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
207
|
+
|
|
208
|
+
async def _run_ripgrep(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
209
|
+
"""Run ripgrep backend."""
|
|
210
|
+
cmd = ["rg", "--json"]
|
|
211
|
+
|
|
212
|
+
if not case_sensitive:
|
|
213
|
+
cmd.append("-i")
|
|
214
|
+
if fixed_strings:
|
|
215
|
+
cmd.append("-F")
|
|
216
|
+
if show_context > 0:
|
|
217
|
+
cmd.extend(["-C", str(show_context)])
|
|
218
|
+
if include:
|
|
219
|
+
cmd.extend(["-g", include])
|
|
220
|
+
if exclude:
|
|
221
|
+
cmd.extend(["-g", f"!{exclude}"])
|
|
222
|
+
|
|
223
|
+
cmd.extend([pattern, path])
|
|
224
|
+
|
|
225
|
+
try:
|
|
226
|
+
process = await asyncio.create_subprocess_exec(
|
|
227
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
stdout, stderr = await process.communicate()
|
|
231
|
+
|
|
232
|
+
if process.returncode not in [0, 1]: # 1 = no matches
|
|
233
|
+
await tool_ctx.error(f"ripgrep failed: {stderr.decode()}")
|
|
234
|
+
return f"Error: {stderr.decode()}"
|
|
235
|
+
|
|
236
|
+
return self._parse_ripgrep_output(stdout.decode())
|
|
237
|
+
|
|
238
|
+
except Exception as e:
|
|
239
|
+
await tool_ctx.error(f"Error running ripgrep: {str(e)}")
|
|
240
|
+
return f"Error running ripgrep: {str(e)}"
|
|
241
|
+
|
|
242
|
+
async def _run_silver_searcher(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
243
|
+
"""Run silver searcher (ag) backend."""
|
|
244
|
+
cmd = ["ag", "--nocolor", "--nogroup"]
|
|
245
|
+
|
|
246
|
+
if not case_sensitive:
|
|
247
|
+
cmd.append("-i")
|
|
248
|
+
if fixed_strings:
|
|
249
|
+
cmd.append("-F")
|
|
250
|
+
if show_context > 0:
|
|
251
|
+
cmd.extend(["-C", str(show_context)])
|
|
252
|
+
if include:
|
|
253
|
+
cmd.extend(["-G", include])
|
|
254
|
+
if exclude:
|
|
255
|
+
cmd.extend(["--ignore", exclude])
|
|
256
|
+
|
|
257
|
+
cmd.extend([pattern, path])
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
process = await asyncio.create_subprocess_exec(
|
|
261
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
stdout, stderr = await process.communicate()
|
|
265
|
+
|
|
266
|
+
if process.returncode not in [0, 1]:
|
|
267
|
+
await tool_ctx.error(f"ag failed: {stderr.decode()}")
|
|
268
|
+
return f"Error: {stderr.decode()}"
|
|
269
|
+
|
|
270
|
+
output = stdout.decode()
|
|
271
|
+
if not output.strip():
|
|
272
|
+
return "No matches found."
|
|
273
|
+
|
|
274
|
+
lines = output.strip().split('\n')
|
|
275
|
+
return f"Found {len(lines)} matches:\n\n" + output
|
|
276
|
+
|
|
277
|
+
except Exception as e:
|
|
278
|
+
await tool_ctx.error(f"Error running ag: {str(e)}")
|
|
279
|
+
return f"Error running ag: {str(e)}"
|
|
280
|
+
|
|
281
|
+
async def _run_ack(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
282
|
+
"""Run ack backend."""
|
|
283
|
+
cmd = ["ack", "--nocolor", "--nogroup"]
|
|
284
|
+
|
|
285
|
+
if not case_sensitive:
|
|
286
|
+
cmd.append("-i")
|
|
287
|
+
if fixed_strings:
|
|
288
|
+
cmd.append("-Q")
|
|
289
|
+
if show_context > 0:
|
|
290
|
+
cmd.extend(["-C", str(show_context)])
|
|
291
|
+
if include:
|
|
292
|
+
# ack uses different syntax for file patterns
|
|
293
|
+
cmd.extend(["--type-add", f"custom:ext:{include.replace('*.', '')}", "--type=custom"])
|
|
294
|
+
|
|
295
|
+
cmd.extend([pattern, path])
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
process = await asyncio.create_subprocess_exec(
|
|
299
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
stdout, stderr = await process.communicate()
|
|
303
|
+
|
|
304
|
+
if process.returncode not in [0, 1]:
|
|
305
|
+
await tool_ctx.error(f"ack failed: {stderr.decode()}")
|
|
306
|
+
return f"Error: {stderr.decode()}"
|
|
307
|
+
|
|
308
|
+
output = stdout.decode()
|
|
309
|
+
if not output.strip():
|
|
310
|
+
return "No matches found."
|
|
311
|
+
|
|
312
|
+
lines = output.strip().split('\n')
|
|
313
|
+
return f"Found {len(lines)} matches:\n\n" + output
|
|
314
|
+
|
|
315
|
+
except Exception as e:
|
|
316
|
+
await tool_ctx.error(f"Error running ack: {str(e)}")
|
|
317
|
+
return f"Error running ack: {str(e)}"
|
|
318
|
+
|
|
319
|
+
async def _run_fallback_grep(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
320
|
+
"""Fallback Python implementation."""
|
|
321
|
+
await tool_ctx.info("Using fallback Python grep implementation")
|
|
322
|
+
|
|
323
|
+
try:
|
|
324
|
+
input_path = Path(path)
|
|
325
|
+
matching_files = []
|
|
326
|
+
|
|
327
|
+
# Get files to search
|
|
328
|
+
if input_path.is_file():
|
|
329
|
+
if self._match_file_pattern(input_path.name, include, exclude):
|
|
330
|
+
matching_files.append(input_path)
|
|
331
|
+
else:
|
|
332
|
+
for entry in input_path.rglob("*"):
|
|
333
|
+
if entry.is_file() and self.is_path_allowed(str(entry)):
|
|
334
|
+
if self._match_file_pattern(entry.name, include, exclude):
|
|
335
|
+
matching_files.append(entry)
|
|
336
|
+
|
|
337
|
+
if not matching_files:
|
|
338
|
+
return "No matching files found."
|
|
339
|
+
|
|
340
|
+
# Compile pattern
|
|
341
|
+
if fixed_strings:
|
|
342
|
+
pattern_re = re.escape(pattern)
|
|
343
|
+
else:
|
|
344
|
+
pattern_re = pattern
|
|
345
|
+
|
|
346
|
+
if not case_sensitive:
|
|
347
|
+
flags = re.IGNORECASE
|
|
348
|
+
else:
|
|
349
|
+
flags = 0
|
|
350
|
+
|
|
351
|
+
regex = re.compile(pattern_re, flags)
|
|
352
|
+
|
|
353
|
+
# Search files
|
|
354
|
+
results = []
|
|
355
|
+
total_matches = 0
|
|
356
|
+
|
|
357
|
+
for file_path in matching_files:
|
|
358
|
+
try:
|
|
359
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
360
|
+
lines = f.readlines()
|
|
361
|
+
|
|
362
|
+
for i, line in enumerate(lines, 1):
|
|
363
|
+
if regex.search(line):
|
|
364
|
+
# Format result with context if requested
|
|
365
|
+
if show_context > 0:
|
|
366
|
+
start = max(0, i - show_context - 1)
|
|
367
|
+
end = min(len(lines), i + show_context)
|
|
368
|
+
|
|
369
|
+
context_lines = []
|
|
370
|
+
for j in range(start, end):
|
|
371
|
+
prefix = ":" if j + 1 == i else "-"
|
|
372
|
+
context_lines.append(f"{file_path}:{j+1}{prefix}{lines[j].rstrip()}")
|
|
373
|
+
results.extend(context_lines)
|
|
374
|
+
results.append("") # Separator
|
|
375
|
+
else:
|
|
376
|
+
results.append(f"{file_path}:{i}:{line.rstrip()}")
|
|
377
|
+
total_matches += 1
|
|
378
|
+
|
|
379
|
+
except UnicodeDecodeError:
|
|
380
|
+
pass # Skip binary files
|
|
381
|
+
except Exception as e:
|
|
382
|
+
await tool_ctx.warning(f"Error reading {file_path}: {str(e)}")
|
|
383
|
+
|
|
384
|
+
if not results:
|
|
385
|
+
return "No matches found."
|
|
386
|
+
|
|
387
|
+
return f"Found {total_matches} matches:\n\n" + "\n".join(results)
|
|
388
|
+
|
|
389
|
+
except Exception as e:
|
|
390
|
+
await tool_ctx.error(f"Error in fallback grep: {str(e)}")
|
|
391
|
+
return f"Error in fallback grep: {str(e)}"
|
|
392
|
+
|
|
393
|
+
def _match_file_pattern(self, filename: str, include: Optional[str], exclude: Optional[str]) -> bool:
|
|
394
|
+
"""Check if filename matches include/exclude patterns."""
|
|
395
|
+
if include and not fnmatch.fnmatch(filename, include):
|
|
396
|
+
return False
|
|
397
|
+
if exclude and fnmatch.fnmatch(filename, exclude):
|
|
398
|
+
return False
|
|
399
|
+
return True
|
|
400
|
+
|
|
401
|
+
def _parse_ripgrep_output(self, output: str) -> str:
|
|
402
|
+
"""Parse ripgrep JSON output."""
|
|
403
|
+
if not output.strip():
|
|
404
|
+
return "No matches found."
|
|
405
|
+
|
|
406
|
+
results = []
|
|
407
|
+
total_matches = 0
|
|
408
|
+
|
|
409
|
+
for line in output.splitlines():
|
|
410
|
+
if not line.strip():
|
|
411
|
+
continue
|
|
412
|
+
|
|
413
|
+
try:
|
|
414
|
+
data = json.loads(line)
|
|
415
|
+
|
|
416
|
+
if data.get("type") == "match":
|
|
417
|
+
match_data = data.get("data", {})
|
|
418
|
+
path = match_data.get("path", {}).get("text", "")
|
|
419
|
+
line_number = match_data.get("line_number", 0)
|
|
420
|
+
line_text = match_data.get("lines", {}).get("text", "").rstrip()
|
|
421
|
+
|
|
422
|
+
results.append(f"{path}:{line_number}:{line_text}")
|
|
423
|
+
total_matches += 1
|
|
424
|
+
|
|
425
|
+
elif data.get("type") == "context":
|
|
426
|
+
context_data = data.get("data", {})
|
|
427
|
+
path = context_data.get("path", {}).get("text", "")
|
|
428
|
+
line_number = context_data.get("line_number", 0)
|
|
429
|
+
line_text = context_data.get("lines", {}).get("text", "").rstrip()
|
|
430
|
+
|
|
431
|
+
results.append(f"{path}:{line_number}-{line_text}")
|
|
432
|
+
|
|
433
|
+
except json.JSONDecodeError:
|
|
434
|
+
pass
|
|
435
|
+
|
|
436
|
+
if not results:
|
|
437
|
+
return "No matches found."
|
|
438
|
+
|
|
439
|
+
return f"Found {total_matches} matches:\n\n" + "\n".join(results)
|
|
440
|
+
|
|
441
|
+
def register(self, mcp_server) -> None:
|
|
442
|
+
"""Register this tool with the MCP server."""
|
|
443
|
+
pass
|
|
@@ -4,7 +4,7 @@ import os
|
|
|
4
4
|
from typing import Annotated, Optional, TypedDict, Unpack, final, override
|
|
5
5
|
from pathlib import Path
|
|
6
6
|
|
|
7
|
-
from fastmcp import Context as MCPContext
|
|
7
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
8
8
|
from pydantic import Field
|
|
9
9
|
|
|
10
10
|
from hanzo_mcp.tools.common.base import BaseTool
|
|
@@ -5,7 +5,7 @@ import subprocess
|
|
|
5
5
|
import re
|
|
6
6
|
from typing import Annotated, TypedDict, Unpack, final, override
|
|
7
7
|
|
|
8
|
-
from fastmcp import Context as MCPContext
|
|
8
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
9
9
|
from pydantic import Field
|
|
10
10
|
|
|
11
11
|
from hanzo_mcp.tools.common.base import BaseTool
|
|
@@ -12,8 +12,8 @@ import shutil
|
|
|
12
12
|
from pathlib import Path
|
|
13
13
|
from typing import Annotated, TypedDict, Unpack, final, override
|
|
14
14
|
|
|
15
|
-
from fastmcp import Context as MCPContext
|
|
16
|
-
from
|
|
15
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
16
|
+
from mcp.server import FastMCP
|
|
17
17
|
from pydantic import Field
|
|
18
18
|
|
|
19
19
|
from hanzo_mcp.tools.common.context import ToolContext
|
|
@@ -7,9 +7,8 @@ from difflib import unified_diff
|
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
from typing import Annotated, TypedDict, Unpack, final, override
|
|
9
9
|
|
|
10
|
-
from fastmcp import Context as MCPContext
|
|
11
|
-
from
|
|
12
|
-
from fastmcp.server.dependencies import get_context
|
|
10
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
11
|
+
from mcp.server import FastMCP
|
|
13
12
|
from pydantic import Field
|
|
14
13
|
|
|
15
14
|
from hanzo_mcp.tools.filesystem.base import FilesystemBaseTool
|
|
@@ -350,11 +349,10 @@ If you want to create a new file, use:
|
|
|
350
349
|
|
|
351
350
|
@mcp_server.tool(name=self.name, description=self.description)
|
|
352
351
|
async def multi_edit(
|
|
353
|
-
ctx: MCPContext,
|
|
354
352
|
file_path: FilePath,
|
|
355
353
|
edits: Edits,
|
|
354
|
+
ctx: MCPContext
|
|
356
355
|
) -> str:
|
|
357
|
-
ctx = get_context()
|
|
358
356
|
return await tool_self.call(
|
|
359
357
|
ctx,
|
|
360
358
|
file_path=file_path,
|
|
@@ -6,9 +6,8 @@ This module provides the ReadTool for reading the contents of files.
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from typing import Annotated, TypedDict, Unpack, final, override
|
|
8
8
|
|
|
9
|
-
from fastmcp import Context as MCPContext
|
|
10
|
-
from
|
|
11
|
-
from fastmcp.server.dependencies import get_context
|
|
9
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
10
|
+
from mcp.server import FastMCP
|
|
12
11
|
from pydantic import Field
|
|
13
12
|
|
|
14
13
|
from hanzo_mcp.tools.filesystem.base import FilesystemBaseTool
|
|
@@ -230,6 +229,20 @@ Usage:
|
|
|
230
229
|
await tool_ctx.error(f"Error reading file: {str(e)}")
|
|
231
230
|
return f"Error: {str(e)}"
|
|
232
231
|
|
|
232
|
+
async def run(self, ctx: MCPContext, file_path: str, offset: int = 0, limit: int = 2000) -> str:
|
|
233
|
+
"""Run method for backwards compatibility with test scripts.
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
ctx: MCP context
|
|
237
|
+
file_path: Path to file to read
|
|
238
|
+
offset: Line offset to start reading
|
|
239
|
+
limit: Maximum lines to read
|
|
240
|
+
|
|
241
|
+
Returns:
|
|
242
|
+
File contents
|
|
243
|
+
"""
|
|
244
|
+
return await self.call(ctx, file_path=file_path, offset=offset, limit=limit)
|
|
245
|
+
|
|
233
246
|
@override
|
|
234
247
|
def register(self, mcp_server: FastMCP) -> None:
|
|
235
248
|
"""Register this tool with the MCP server.
|
|
@@ -244,12 +257,11 @@ Usage:
|
|
|
244
257
|
|
|
245
258
|
@mcp_server.tool(name=self.name, description=self.description)
|
|
246
259
|
async def read(
|
|
247
|
-
ctx: MCPContext,
|
|
248
260
|
file_path: FilePath,
|
|
249
261
|
offset: Offset,
|
|
250
262
|
limit: Limit,
|
|
263
|
+
ctx: MCPContext
|
|
251
264
|
) -> str:
|
|
252
|
-
ctx = get_context()
|
|
253
265
|
return await tool_self.call(
|
|
254
266
|
ctx, file_path=file_path, offset=offset, limit=limit
|
|
255
267
|
)
|
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""Symbols tool implementation.
|
|
2
2
|
|
|
3
|
-
This module provides the
|
|
4
|
-
|
|
3
|
+
This module provides the SymbolsTool for searching, indexing, and querying code symbols
|
|
4
|
+
using tree-sitter AST parsing. It can find function definitions, class declarations,
|
|
5
|
+
and other code structures with full context.
|
|
5
6
|
"""
|
|
6
7
|
|
|
7
8
|
import os
|
|
8
9
|
from pathlib import Path
|
|
9
10
|
from typing import Annotated, TypedDict, Unpack, final, override
|
|
10
11
|
|
|
11
|
-
from fastmcp import Context as MCPContext
|
|
12
|
-
from
|
|
13
|
-
from fastmcp.server.dependencies import get_context
|
|
12
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
13
|
+
from mcp.server import FastMCP
|
|
14
14
|
from grep_ast.grep_ast import TreeContext
|
|
15
15
|
from pydantic import Field
|
|
16
16
|
|
|
@@ -66,8 +66,8 @@ class GrepAstToolParams(TypedDict):
|
|
|
66
66
|
|
|
67
67
|
|
|
68
68
|
@final
|
|
69
|
-
class
|
|
70
|
-
"""Tool for searching
|
|
69
|
+
class SymbolsTool(FilesystemBaseTool):
|
|
70
|
+
"""Tool for searching and querying code symbols using tree-sitter AST parsing."""
|
|
71
71
|
|
|
72
72
|
@property
|
|
73
73
|
@override
|
|
@@ -77,7 +77,7 @@ class GrepAstTool(FilesystemBaseTool):
|
|
|
77
77
|
Returns:
|
|
78
78
|
Tool name
|
|
79
79
|
"""
|
|
80
|
-
return "
|
|
80
|
+
return "symbols"
|
|
81
81
|
|
|
82
82
|
@property
|
|
83
83
|
@override
|
|
@@ -87,23 +87,14 @@ class GrepAstTool(FilesystemBaseTool):
|
|
|
87
87
|
Returns:
|
|
88
88
|
Tool description
|
|
89
89
|
"""
|
|
90
|
-
return """
|
|
90
|
+
return """Code symbols search with tree-sitter AST. Actions: search (default), index, query.
|
|
91
91
|
|
|
92
|
-
|
|
92
|
+
Usage:
|
|
93
|
+
symbols "function_name" ./src
|
|
94
|
+
symbols --action index --path ./src
|
|
95
|
+
symbols --action query --type function --path ./src
|
|
93
96
|
|
|
94
|
-
|
|
95
|
-
1. When you need to understand where a pattern appears within larger code structures
|
|
96
|
-
2. When searching for function or class definitions that match a pattern
|
|
97
|
-
3. When you want to see not just the matching line but its surrounding context in the code
|
|
98
|
-
4. When exploring unfamiliar codebases and need structural context
|
|
99
|
-
5. When examining how a specific pattern is used across different parts of the codebase
|
|
100
|
-
|
|
101
|
-
This tool is superior to regular grep/search_content when you need to understand code structure, not just find text matches.
|
|
102
|
-
|
|
103
|
-
Example usage:
|
|
104
|
-
```
|
|
105
|
-
grep_ast(pattern="function_name", path="/path/to/file.py", ignore_case=False, line_number=True)
|
|
106
|
-
```"""
|
|
97
|
+
Finds code structures (functions, classes, methods) with full context."""
|
|
107
98
|
|
|
108
99
|
@override
|
|
109
100
|
async def call(
|
|
@@ -233,14 +224,13 @@ grep_ast(pattern="function_name", path="/path/to/file.py", ignore_case=False, li
|
|
|
233
224
|
tool_self = self # Create a reference to self for use in the closure
|
|
234
225
|
|
|
235
226
|
@mcp_server.tool(name=self.name, description=self.description)
|
|
236
|
-
async def
|
|
237
|
-
ctx: MCPContext,
|
|
227
|
+
async def symbols(
|
|
238
228
|
pattern: Pattern,
|
|
239
229
|
path: SearchPath,
|
|
240
230
|
ignore_case: IgnoreCase,
|
|
241
231
|
line_number: LineNumber,
|
|
232
|
+
ctx: MCPContext
|
|
242
233
|
) -> str:
|
|
243
|
-
ctx = get_context()
|
|
244
234
|
return await tool_self.call(
|
|
245
235
|
ctx,
|
|
246
236
|
pattern=pattern,
|