hanzo-mcp 0.5.1__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hanzo-mcp might be problematic. Click here for more details.
- hanzo_mcp/__init__.py +1 -1
- hanzo_mcp/cli.py +32 -0
- hanzo_mcp/dev_server.py +246 -0
- hanzo_mcp/prompts/__init__.py +1 -1
- hanzo_mcp/prompts/project_system.py +43 -7
- hanzo_mcp/server.py +5 -1
- hanzo_mcp/tools/__init__.py +168 -6
- hanzo_mcp/tools/agent/__init__.py +1 -1
- hanzo_mcp/tools/agent/agent.py +401 -0
- hanzo_mcp/tools/agent/agent_tool.py +3 -4
- hanzo_mcp/tools/common/__init__.py +1 -1
- hanzo_mcp/tools/common/base.py +9 -4
- hanzo_mcp/tools/common/batch_tool.py +3 -5
- hanzo_mcp/tools/common/config_tool.py +1 -1
- hanzo_mcp/tools/common/context.py +1 -1
- hanzo_mcp/tools/common/palette.py +344 -0
- hanzo_mcp/tools/common/palette_loader.py +108 -0
- hanzo_mcp/tools/common/stats.py +261 -0
- hanzo_mcp/tools/common/thinking_tool.py +3 -5
- hanzo_mcp/tools/common/tool_disable.py +144 -0
- hanzo_mcp/tools/common/tool_enable.py +182 -0
- hanzo_mcp/tools/common/tool_list.py +260 -0
- hanzo_mcp/tools/config/__init__.py +10 -0
- hanzo_mcp/tools/config/config_tool.py +212 -0
- hanzo_mcp/tools/config/index_config.py +176 -0
- hanzo_mcp/tools/config/palette_tool.py +166 -0
- hanzo_mcp/tools/database/__init__.py +71 -0
- hanzo_mcp/tools/database/database_manager.py +246 -0
- hanzo_mcp/tools/database/graph.py +482 -0
- hanzo_mcp/tools/database/graph_add.py +257 -0
- hanzo_mcp/tools/database/graph_query.py +536 -0
- hanzo_mcp/tools/database/graph_remove.py +267 -0
- hanzo_mcp/tools/database/graph_search.py +348 -0
- hanzo_mcp/tools/database/graph_stats.py +345 -0
- hanzo_mcp/tools/database/sql.py +411 -0
- hanzo_mcp/tools/database/sql_query.py +229 -0
- hanzo_mcp/tools/database/sql_search.py +296 -0
- hanzo_mcp/tools/database/sql_stats.py +254 -0
- hanzo_mcp/tools/editor/__init__.py +11 -0
- hanzo_mcp/tools/editor/neovim_command.py +272 -0
- hanzo_mcp/tools/editor/neovim_edit.py +290 -0
- hanzo_mcp/tools/editor/neovim_session.py +356 -0
- hanzo_mcp/tools/filesystem/__init__.py +52 -13
- hanzo_mcp/tools/filesystem/base.py +1 -1
- hanzo_mcp/tools/filesystem/batch_search.py +812 -0
- hanzo_mcp/tools/filesystem/content_replace.py +3 -5
- hanzo_mcp/tools/filesystem/diff.py +193 -0
- hanzo_mcp/tools/filesystem/directory_tree.py +3 -5
- hanzo_mcp/tools/filesystem/edit.py +3 -5
- hanzo_mcp/tools/filesystem/find.py +443 -0
- hanzo_mcp/tools/filesystem/find_files.py +348 -0
- hanzo_mcp/tools/filesystem/git_search.py +505 -0
- hanzo_mcp/tools/filesystem/grep.py +2 -2
- hanzo_mcp/tools/filesystem/multi_edit.py +3 -5
- hanzo_mcp/tools/filesystem/read.py +17 -5
- hanzo_mcp/tools/filesystem/{grep_ast_tool.py → symbols.py} +17 -27
- hanzo_mcp/tools/filesystem/symbols_unified.py +376 -0
- hanzo_mcp/tools/filesystem/tree.py +268 -0
- hanzo_mcp/tools/filesystem/unified_search.py +465 -443
- hanzo_mcp/tools/filesystem/unix_aliases.py +99 -0
- hanzo_mcp/tools/filesystem/watch.py +174 -0
- hanzo_mcp/tools/filesystem/write.py +3 -5
- hanzo_mcp/tools/jupyter/__init__.py +9 -12
- hanzo_mcp/tools/jupyter/base.py +1 -1
- hanzo_mcp/tools/jupyter/jupyter.py +326 -0
- hanzo_mcp/tools/jupyter/notebook_edit.py +3 -4
- hanzo_mcp/tools/jupyter/notebook_read.py +3 -5
- hanzo_mcp/tools/llm/__init__.py +31 -0
- hanzo_mcp/tools/llm/consensus_tool.py +351 -0
- hanzo_mcp/tools/llm/llm_manage.py +413 -0
- hanzo_mcp/tools/llm/llm_tool.py +346 -0
- hanzo_mcp/tools/llm/llm_unified.py +851 -0
- hanzo_mcp/tools/llm/provider_tools.py +412 -0
- hanzo_mcp/tools/mcp/__init__.py +15 -0
- hanzo_mcp/tools/mcp/mcp_add.py +263 -0
- hanzo_mcp/tools/mcp/mcp_remove.py +127 -0
- hanzo_mcp/tools/mcp/mcp_stats.py +165 -0
- hanzo_mcp/tools/mcp/mcp_unified.py +503 -0
- hanzo_mcp/tools/shell/__init__.py +21 -23
- hanzo_mcp/tools/shell/base.py +1 -1
- hanzo_mcp/tools/shell/base_process.py +303 -0
- hanzo_mcp/tools/shell/bash_unified.py +134 -0
- hanzo_mcp/tools/shell/logs.py +265 -0
- hanzo_mcp/tools/shell/npx.py +194 -0
- hanzo_mcp/tools/shell/npx_background.py +254 -0
- hanzo_mcp/tools/shell/npx_unified.py +101 -0
- hanzo_mcp/tools/shell/open.py +107 -0
- hanzo_mcp/tools/shell/pkill.py +262 -0
- hanzo_mcp/tools/shell/process_unified.py +131 -0
- hanzo_mcp/tools/shell/processes.py +279 -0
- hanzo_mcp/tools/shell/run_background.py +326 -0
- hanzo_mcp/tools/shell/run_command.py +3 -4
- hanzo_mcp/tools/shell/run_command_windows.py +3 -4
- hanzo_mcp/tools/shell/uvx.py +187 -0
- hanzo_mcp/tools/shell/uvx_background.py +249 -0
- hanzo_mcp/tools/shell/uvx_unified.py +101 -0
- hanzo_mcp/tools/todo/__init__.py +1 -1
- hanzo_mcp/tools/todo/base.py +1 -1
- hanzo_mcp/tools/todo/todo.py +265 -0
- hanzo_mcp/tools/todo/todo_read.py +3 -5
- hanzo_mcp/tools/todo/todo_write.py +3 -5
- hanzo_mcp/tools/vector/__init__.py +6 -1
- hanzo_mcp/tools/vector/git_ingester.py +3 -0
- hanzo_mcp/tools/vector/index_tool.py +358 -0
- hanzo_mcp/tools/vector/infinity_store.py +98 -0
- hanzo_mcp/tools/vector/project_manager.py +27 -5
- hanzo_mcp/tools/vector/vector.py +311 -0
- hanzo_mcp/tools/vector/vector_index.py +1 -1
- hanzo_mcp/tools/vector/vector_search.py +12 -7
- hanzo_mcp-0.6.1.dist-info/METADATA +336 -0
- hanzo_mcp-0.6.1.dist-info/RECORD +134 -0
- hanzo_mcp-0.6.1.dist-info/entry_points.txt +3 -0
- hanzo_mcp-0.5.1.dist-info/METADATA +0 -276
- hanzo_mcp-0.5.1.dist-info/RECORD +0 -68
- hanzo_mcp-0.5.1.dist-info/entry_points.txt +0 -2
- {hanzo_mcp-0.5.1.dist-info → hanzo_mcp-0.6.1.dist-info}/WHEEL +0 -0
- {hanzo_mcp-0.5.1.dist-info → hanzo_mcp-0.6.1.dist-info}/licenses/LICENSE +0 -0
- {hanzo_mcp-0.5.1.dist-info → hanzo_mcp-0.6.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
"""Unified find tool implementation.
|
|
2
|
+
|
|
3
|
+
This module provides the FindTool for finding text patterns in files using
|
|
4
|
+
multiple search backends in order of preference: rg > ag > ack > grep.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import fnmatch
|
|
9
|
+
import json
|
|
10
|
+
import re
|
|
11
|
+
import shlex
|
|
12
|
+
import shutil
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Annotated, TypedDict, Unpack, final, override, Optional, List, Dict, Any
|
|
15
|
+
|
|
16
|
+
from mcp.server.fastmcp import Context as MCPContext
|
|
17
|
+
from pydantic import Field
|
|
18
|
+
|
|
19
|
+
from hanzo_mcp.tools.common.context import ToolContext
|
|
20
|
+
from hanzo_mcp.tools.filesystem.base import FilesystemBaseTool
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Parameter types
|
|
24
|
+
Pattern = Annotated[
|
|
25
|
+
str,
|
|
26
|
+
Field(
|
|
27
|
+
description="Pattern to search for (regex or literal)",
|
|
28
|
+
min_length=1,
|
|
29
|
+
),
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
SearchPath = Annotated[
|
|
33
|
+
str,
|
|
34
|
+
Field(
|
|
35
|
+
description="Path to search in",
|
|
36
|
+
default=".",
|
|
37
|
+
),
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
Include = Annotated[
|
|
41
|
+
Optional[str],
|
|
42
|
+
Field(
|
|
43
|
+
description='File pattern to include (e.g. "*.js")',
|
|
44
|
+
default=None,
|
|
45
|
+
),
|
|
46
|
+
]
|
|
47
|
+
|
|
48
|
+
Exclude = Annotated[
|
|
49
|
+
Optional[str],
|
|
50
|
+
Field(
|
|
51
|
+
description='File pattern to exclude',
|
|
52
|
+
default=None,
|
|
53
|
+
),
|
|
54
|
+
]
|
|
55
|
+
|
|
56
|
+
CaseSensitive = Annotated[
|
|
57
|
+
bool,
|
|
58
|
+
Field(
|
|
59
|
+
description="Case sensitive search",
|
|
60
|
+
default=True,
|
|
61
|
+
),
|
|
62
|
+
]
|
|
63
|
+
|
|
64
|
+
FixedStrings = Annotated[
|
|
65
|
+
bool,
|
|
66
|
+
Field(
|
|
67
|
+
description="Treat pattern as literal string, not regex",
|
|
68
|
+
default=False,
|
|
69
|
+
),
|
|
70
|
+
]
|
|
71
|
+
|
|
72
|
+
ShowContext = Annotated[
|
|
73
|
+
int,
|
|
74
|
+
Field(
|
|
75
|
+
description="Lines of context to show around matches",
|
|
76
|
+
default=0,
|
|
77
|
+
),
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
Backend = Annotated[
|
|
81
|
+
Optional[str],
|
|
82
|
+
Field(
|
|
83
|
+
description="Force specific backend: rg, ag, ack, grep",
|
|
84
|
+
default=None,
|
|
85
|
+
),
|
|
86
|
+
]
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class FindParams(TypedDict, total=False):
|
|
90
|
+
"""Parameters for find tool."""
|
|
91
|
+
pattern: str
|
|
92
|
+
path: str
|
|
93
|
+
include: Optional[str]
|
|
94
|
+
exclude: Optional[str]
|
|
95
|
+
case_sensitive: bool
|
|
96
|
+
fixed_strings: bool
|
|
97
|
+
show_context: int
|
|
98
|
+
backend: Optional[str]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
@final
|
|
102
|
+
class FindTool(FilesystemBaseTool):
|
|
103
|
+
"""Unified find tool with multiple backend support."""
|
|
104
|
+
|
|
105
|
+
def __init__(self, permission_manager):
|
|
106
|
+
"""Initialize the find tool."""
|
|
107
|
+
super().__init__(permission_manager)
|
|
108
|
+
self._backend_order = ["rg", "ag", "ack", "grep"]
|
|
109
|
+
self._available_backends = None
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
@override
|
|
113
|
+
def name(self) -> str:
|
|
114
|
+
"""Get the tool name."""
|
|
115
|
+
return "find"
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
@override
|
|
119
|
+
def description(self) -> str:
|
|
120
|
+
"""Get the tool description."""
|
|
121
|
+
backends = self._get_available_backends()
|
|
122
|
+
backend_str = ", ".join(backends) if backends else "fallback grep"
|
|
123
|
+
|
|
124
|
+
return f"""Find pattern in files (like ffind). Available: {backend_str}.
|
|
125
|
+
|
|
126
|
+
Usage:
|
|
127
|
+
find "TODO"
|
|
128
|
+
find "error.*fatal" ./src
|
|
129
|
+
find "config" --include "*.json"
|
|
130
|
+
find "password" --exclude "*.log"
|
|
131
|
+
|
|
132
|
+
Fast, intuitive file content search."""
|
|
133
|
+
|
|
134
|
+
def _get_available_backends(self) -> List[str]:
|
|
135
|
+
"""Get list of available search backends."""
|
|
136
|
+
if self._available_backends is None:
|
|
137
|
+
self._available_backends = []
|
|
138
|
+
for backend in self._backend_order:
|
|
139
|
+
if shutil.which(backend):
|
|
140
|
+
self._available_backends.append(backend)
|
|
141
|
+
return self._available_backends
|
|
142
|
+
|
|
143
|
+
@override
|
|
144
|
+
async def call(
|
|
145
|
+
self,
|
|
146
|
+
ctx: MCPContext,
|
|
147
|
+
**params: Unpack[FindParams],
|
|
148
|
+
) -> str:
|
|
149
|
+
"""Execute find operation."""
|
|
150
|
+
tool_ctx = self.create_tool_context(ctx)
|
|
151
|
+
|
|
152
|
+
# Extract parameters
|
|
153
|
+
pattern = params.get("pattern")
|
|
154
|
+
if not pattern:
|
|
155
|
+
return "Error: pattern is required"
|
|
156
|
+
|
|
157
|
+
path = params.get("path", ".")
|
|
158
|
+
include = params.get("include")
|
|
159
|
+
exclude = params.get("exclude")
|
|
160
|
+
case_sensitive = params.get("case_sensitive", True)
|
|
161
|
+
fixed_strings = params.get("fixed_strings", False)
|
|
162
|
+
show_context = params.get("show_context", 0)
|
|
163
|
+
backend = params.get("backend")
|
|
164
|
+
|
|
165
|
+
# Validate path
|
|
166
|
+
path_validation = self.validate_path(path)
|
|
167
|
+
if path_validation.is_error:
|
|
168
|
+
await tool_ctx.error(path_validation.error_message)
|
|
169
|
+
return f"Error: {path_validation.error_message}"
|
|
170
|
+
|
|
171
|
+
# Check permissions
|
|
172
|
+
allowed, error_msg = await self.check_path_allowed(path, tool_ctx)
|
|
173
|
+
if not allowed:
|
|
174
|
+
return error_msg
|
|
175
|
+
|
|
176
|
+
# Check existence
|
|
177
|
+
exists, error_msg = await self.check_path_exists(path, tool_ctx)
|
|
178
|
+
if not exists:
|
|
179
|
+
return error_msg
|
|
180
|
+
|
|
181
|
+
# Select backend
|
|
182
|
+
available = self._get_available_backends()
|
|
183
|
+
|
|
184
|
+
if backend:
|
|
185
|
+
# User specified backend
|
|
186
|
+
if backend not in available and backend != "grep":
|
|
187
|
+
return f"Error: Backend '{backend}' not available. Available: {', '.join(available + ['grep'])}"
|
|
188
|
+
selected_backend = backend
|
|
189
|
+
elif available:
|
|
190
|
+
# Use first available
|
|
191
|
+
selected_backend = available[0]
|
|
192
|
+
else:
|
|
193
|
+
# Fallback
|
|
194
|
+
selected_backend = "grep"
|
|
195
|
+
|
|
196
|
+
await tool_ctx.info(f"Using {selected_backend} to search for '{pattern}' in {path}")
|
|
197
|
+
|
|
198
|
+
# Execute search
|
|
199
|
+
if selected_backend == "rg":
|
|
200
|
+
return await self._run_ripgrep(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
201
|
+
elif selected_backend == "ag":
|
|
202
|
+
return await self._run_silver_searcher(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
203
|
+
elif selected_backend == "ack":
|
|
204
|
+
return await self._run_ack(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
205
|
+
else:
|
|
206
|
+
return await self._run_fallback_grep(pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx)
|
|
207
|
+
|
|
208
|
+
async def _run_ripgrep(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
209
|
+
"""Run ripgrep backend."""
|
|
210
|
+
cmd = ["rg", "--json"]
|
|
211
|
+
|
|
212
|
+
if not case_sensitive:
|
|
213
|
+
cmd.append("-i")
|
|
214
|
+
if fixed_strings:
|
|
215
|
+
cmd.append("-F")
|
|
216
|
+
if show_context > 0:
|
|
217
|
+
cmd.extend(["-C", str(show_context)])
|
|
218
|
+
if include:
|
|
219
|
+
cmd.extend(["-g", include])
|
|
220
|
+
if exclude:
|
|
221
|
+
cmd.extend(["-g", f"!{exclude}"])
|
|
222
|
+
|
|
223
|
+
cmd.extend([pattern, path])
|
|
224
|
+
|
|
225
|
+
try:
|
|
226
|
+
process = await asyncio.create_subprocess_exec(
|
|
227
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
stdout, stderr = await process.communicate()
|
|
231
|
+
|
|
232
|
+
if process.returncode not in [0, 1]: # 1 = no matches
|
|
233
|
+
await tool_ctx.error(f"ripgrep failed: {stderr.decode()}")
|
|
234
|
+
return f"Error: {stderr.decode()}"
|
|
235
|
+
|
|
236
|
+
return self._parse_ripgrep_output(stdout.decode())
|
|
237
|
+
|
|
238
|
+
except Exception as e:
|
|
239
|
+
await tool_ctx.error(f"Error running ripgrep: {str(e)}")
|
|
240
|
+
return f"Error running ripgrep: {str(e)}"
|
|
241
|
+
|
|
242
|
+
async def _run_silver_searcher(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
243
|
+
"""Run silver searcher (ag) backend."""
|
|
244
|
+
cmd = ["ag", "--nocolor", "--nogroup"]
|
|
245
|
+
|
|
246
|
+
if not case_sensitive:
|
|
247
|
+
cmd.append("-i")
|
|
248
|
+
if fixed_strings:
|
|
249
|
+
cmd.append("-F")
|
|
250
|
+
if show_context > 0:
|
|
251
|
+
cmd.extend(["-C", str(show_context)])
|
|
252
|
+
if include:
|
|
253
|
+
cmd.extend(["-G", include])
|
|
254
|
+
if exclude:
|
|
255
|
+
cmd.extend(["--ignore", exclude])
|
|
256
|
+
|
|
257
|
+
cmd.extend([pattern, path])
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
process = await asyncio.create_subprocess_exec(
|
|
261
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
stdout, stderr = await process.communicate()
|
|
265
|
+
|
|
266
|
+
if process.returncode not in [0, 1]:
|
|
267
|
+
await tool_ctx.error(f"ag failed: {stderr.decode()}")
|
|
268
|
+
return f"Error: {stderr.decode()}"
|
|
269
|
+
|
|
270
|
+
output = stdout.decode()
|
|
271
|
+
if not output.strip():
|
|
272
|
+
return "No matches found."
|
|
273
|
+
|
|
274
|
+
lines = output.strip().split('\n')
|
|
275
|
+
return f"Found {len(lines)} matches:\n\n" + output
|
|
276
|
+
|
|
277
|
+
except Exception as e:
|
|
278
|
+
await tool_ctx.error(f"Error running ag: {str(e)}")
|
|
279
|
+
return f"Error running ag: {str(e)}"
|
|
280
|
+
|
|
281
|
+
async def _run_ack(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
282
|
+
"""Run ack backend."""
|
|
283
|
+
cmd = ["ack", "--nocolor", "--nogroup"]
|
|
284
|
+
|
|
285
|
+
if not case_sensitive:
|
|
286
|
+
cmd.append("-i")
|
|
287
|
+
if fixed_strings:
|
|
288
|
+
cmd.append("-Q")
|
|
289
|
+
if show_context > 0:
|
|
290
|
+
cmd.extend(["-C", str(show_context)])
|
|
291
|
+
if include:
|
|
292
|
+
# ack uses different syntax for file patterns
|
|
293
|
+
cmd.extend(["--type-add", f"custom:ext:{include.replace('*.', '')}", "--type=custom"])
|
|
294
|
+
|
|
295
|
+
cmd.extend([pattern, path])
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
process = await asyncio.create_subprocess_exec(
|
|
299
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
stdout, stderr = await process.communicate()
|
|
303
|
+
|
|
304
|
+
if process.returncode not in [0, 1]:
|
|
305
|
+
await tool_ctx.error(f"ack failed: {stderr.decode()}")
|
|
306
|
+
return f"Error: {stderr.decode()}"
|
|
307
|
+
|
|
308
|
+
output = stdout.decode()
|
|
309
|
+
if not output.strip():
|
|
310
|
+
return "No matches found."
|
|
311
|
+
|
|
312
|
+
lines = output.strip().split('\n')
|
|
313
|
+
return f"Found {len(lines)} matches:\n\n" + output
|
|
314
|
+
|
|
315
|
+
except Exception as e:
|
|
316
|
+
await tool_ctx.error(f"Error running ack: {str(e)}")
|
|
317
|
+
return f"Error running ack: {str(e)}"
|
|
318
|
+
|
|
319
|
+
async def _run_fallback_grep(self, pattern, path, include, exclude, case_sensitive, fixed_strings, show_context, tool_ctx) -> str:
|
|
320
|
+
"""Fallback Python implementation."""
|
|
321
|
+
await tool_ctx.info("Using fallback Python grep implementation")
|
|
322
|
+
|
|
323
|
+
try:
|
|
324
|
+
input_path = Path(path)
|
|
325
|
+
matching_files = []
|
|
326
|
+
|
|
327
|
+
# Get files to search
|
|
328
|
+
if input_path.is_file():
|
|
329
|
+
if self._match_file_pattern(input_path.name, include, exclude):
|
|
330
|
+
matching_files.append(input_path)
|
|
331
|
+
else:
|
|
332
|
+
for entry in input_path.rglob("*"):
|
|
333
|
+
if entry.is_file() and self.is_path_allowed(str(entry)):
|
|
334
|
+
if self._match_file_pattern(entry.name, include, exclude):
|
|
335
|
+
matching_files.append(entry)
|
|
336
|
+
|
|
337
|
+
if not matching_files:
|
|
338
|
+
return "No matching files found."
|
|
339
|
+
|
|
340
|
+
# Compile pattern
|
|
341
|
+
if fixed_strings:
|
|
342
|
+
pattern_re = re.escape(pattern)
|
|
343
|
+
else:
|
|
344
|
+
pattern_re = pattern
|
|
345
|
+
|
|
346
|
+
if not case_sensitive:
|
|
347
|
+
flags = re.IGNORECASE
|
|
348
|
+
else:
|
|
349
|
+
flags = 0
|
|
350
|
+
|
|
351
|
+
regex = re.compile(pattern_re, flags)
|
|
352
|
+
|
|
353
|
+
# Search files
|
|
354
|
+
results = []
|
|
355
|
+
total_matches = 0
|
|
356
|
+
|
|
357
|
+
for file_path in matching_files:
|
|
358
|
+
try:
|
|
359
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
360
|
+
lines = f.readlines()
|
|
361
|
+
|
|
362
|
+
for i, line in enumerate(lines, 1):
|
|
363
|
+
if regex.search(line):
|
|
364
|
+
# Format result with context if requested
|
|
365
|
+
if show_context > 0:
|
|
366
|
+
start = max(0, i - show_context - 1)
|
|
367
|
+
end = min(len(lines), i + show_context)
|
|
368
|
+
|
|
369
|
+
context_lines = []
|
|
370
|
+
for j in range(start, end):
|
|
371
|
+
prefix = ":" if j + 1 == i else "-"
|
|
372
|
+
context_lines.append(f"{file_path}:{j+1}{prefix}{lines[j].rstrip()}")
|
|
373
|
+
results.extend(context_lines)
|
|
374
|
+
results.append("") # Separator
|
|
375
|
+
else:
|
|
376
|
+
results.append(f"{file_path}:{i}:{line.rstrip()}")
|
|
377
|
+
total_matches += 1
|
|
378
|
+
|
|
379
|
+
except UnicodeDecodeError:
|
|
380
|
+
pass # Skip binary files
|
|
381
|
+
except Exception as e:
|
|
382
|
+
await tool_ctx.warning(f"Error reading {file_path}: {str(e)}")
|
|
383
|
+
|
|
384
|
+
if not results:
|
|
385
|
+
return "No matches found."
|
|
386
|
+
|
|
387
|
+
return f"Found {total_matches} matches:\n\n" + "\n".join(results)
|
|
388
|
+
|
|
389
|
+
except Exception as e:
|
|
390
|
+
await tool_ctx.error(f"Error in fallback grep: {str(e)}")
|
|
391
|
+
return f"Error in fallback grep: {str(e)}"
|
|
392
|
+
|
|
393
|
+
def _match_file_pattern(self, filename: str, include: Optional[str], exclude: Optional[str]) -> bool:
|
|
394
|
+
"""Check if filename matches include/exclude patterns."""
|
|
395
|
+
if include and not fnmatch.fnmatch(filename, include):
|
|
396
|
+
return False
|
|
397
|
+
if exclude and fnmatch.fnmatch(filename, exclude):
|
|
398
|
+
return False
|
|
399
|
+
return True
|
|
400
|
+
|
|
401
|
+
def _parse_ripgrep_output(self, output: str) -> str:
|
|
402
|
+
"""Parse ripgrep JSON output."""
|
|
403
|
+
if not output.strip():
|
|
404
|
+
return "No matches found."
|
|
405
|
+
|
|
406
|
+
results = []
|
|
407
|
+
total_matches = 0
|
|
408
|
+
|
|
409
|
+
for line in output.splitlines():
|
|
410
|
+
if not line.strip():
|
|
411
|
+
continue
|
|
412
|
+
|
|
413
|
+
try:
|
|
414
|
+
data = json.loads(line)
|
|
415
|
+
|
|
416
|
+
if data.get("type") == "match":
|
|
417
|
+
match_data = data.get("data", {})
|
|
418
|
+
path = match_data.get("path", {}).get("text", "")
|
|
419
|
+
line_number = match_data.get("line_number", 0)
|
|
420
|
+
line_text = match_data.get("lines", {}).get("text", "").rstrip()
|
|
421
|
+
|
|
422
|
+
results.append(f"{path}:{line_number}:{line_text}")
|
|
423
|
+
total_matches += 1
|
|
424
|
+
|
|
425
|
+
elif data.get("type") == "context":
|
|
426
|
+
context_data = data.get("data", {})
|
|
427
|
+
path = context_data.get("path", {}).get("text", "")
|
|
428
|
+
line_number = context_data.get("line_number", 0)
|
|
429
|
+
line_text = context_data.get("lines", {}).get("text", "").rstrip()
|
|
430
|
+
|
|
431
|
+
results.append(f"{path}:{line_number}-{line_text}")
|
|
432
|
+
|
|
433
|
+
except json.JSONDecodeError:
|
|
434
|
+
pass
|
|
435
|
+
|
|
436
|
+
if not results:
|
|
437
|
+
return "No matches found."
|
|
438
|
+
|
|
439
|
+
return f"Found {total_matches} matches:\n\n" + "\n".join(results)
|
|
440
|
+
|
|
441
|
+
def register(self, mcp_server) -> None:
|
|
442
|
+
"""Register this tool with the MCP server."""
|
|
443
|
+
pass
|