hanzo-mcp 0.6.12__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hanzo-mcp might be problematic. Click here for more details.

Files changed (117) hide show
  1. hanzo_mcp/__init__.py +2 -2
  2. hanzo_mcp/analytics/__init__.py +5 -0
  3. hanzo_mcp/analytics/posthog_analytics.py +364 -0
  4. hanzo_mcp/cli.py +5 -5
  5. hanzo_mcp/cli_enhanced.py +7 -7
  6. hanzo_mcp/cli_plugin.py +91 -0
  7. hanzo_mcp/config/__init__.py +1 -1
  8. hanzo_mcp/config/settings.py +70 -7
  9. hanzo_mcp/config/tool_config.py +20 -6
  10. hanzo_mcp/dev_server.py +3 -3
  11. hanzo_mcp/prompts/project_system.py +1 -1
  12. hanzo_mcp/server.py +40 -3
  13. hanzo_mcp/server_enhanced.py +69 -0
  14. hanzo_mcp/tools/__init__.py +140 -31
  15. hanzo_mcp/tools/agent/__init__.py +85 -4
  16. hanzo_mcp/tools/agent/agent_tool.py +104 -6
  17. hanzo_mcp/tools/agent/agent_tool_v2.py +459 -0
  18. hanzo_mcp/tools/agent/clarification_protocol.py +220 -0
  19. hanzo_mcp/tools/agent/clarification_tool.py +68 -0
  20. hanzo_mcp/tools/agent/claude_cli_tool.py +125 -0
  21. hanzo_mcp/tools/agent/claude_desktop_auth.py +508 -0
  22. hanzo_mcp/tools/agent/cli_agent_base.py +191 -0
  23. hanzo_mcp/tools/agent/code_auth.py +436 -0
  24. hanzo_mcp/tools/agent/code_auth_tool.py +194 -0
  25. hanzo_mcp/tools/agent/codex_cli_tool.py +123 -0
  26. hanzo_mcp/tools/agent/critic_tool.py +376 -0
  27. hanzo_mcp/tools/agent/gemini_cli_tool.py +128 -0
  28. hanzo_mcp/tools/agent/grok_cli_tool.py +128 -0
  29. hanzo_mcp/tools/agent/iching_tool.py +380 -0
  30. hanzo_mcp/tools/agent/network_tool.py +273 -0
  31. hanzo_mcp/tools/agent/prompt.py +62 -20
  32. hanzo_mcp/tools/agent/review_tool.py +433 -0
  33. hanzo_mcp/tools/agent/swarm_tool.py +535 -0
  34. hanzo_mcp/tools/agent/swarm_tool_v2.py +594 -0
  35. hanzo_mcp/tools/common/__init__.py +15 -1
  36. hanzo_mcp/tools/common/base.py +5 -4
  37. hanzo_mcp/tools/common/batch_tool.py +103 -11
  38. hanzo_mcp/tools/common/config_tool.py +2 -2
  39. hanzo_mcp/tools/common/context.py +2 -2
  40. hanzo_mcp/tools/common/context_fix.py +26 -0
  41. hanzo_mcp/tools/common/critic_tool.py +196 -0
  42. hanzo_mcp/tools/common/decorators.py +208 -0
  43. hanzo_mcp/tools/common/enhanced_base.py +106 -0
  44. hanzo_mcp/tools/common/fastmcp_pagination.py +369 -0
  45. hanzo_mcp/tools/common/forgiving_edit.py +243 -0
  46. hanzo_mcp/tools/common/mode.py +116 -0
  47. hanzo_mcp/tools/common/mode_loader.py +105 -0
  48. hanzo_mcp/tools/common/paginated_base.py +230 -0
  49. hanzo_mcp/tools/common/paginated_response.py +307 -0
  50. hanzo_mcp/tools/common/pagination.py +226 -0
  51. hanzo_mcp/tools/common/permissions.py +1 -1
  52. hanzo_mcp/tools/common/personality.py +936 -0
  53. hanzo_mcp/tools/common/plugin_loader.py +287 -0
  54. hanzo_mcp/tools/common/stats.py +4 -4
  55. hanzo_mcp/tools/common/tool_list.py +4 -1
  56. hanzo_mcp/tools/common/truncate.py +101 -0
  57. hanzo_mcp/tools/common/validation.py +1 -1
  58. hanzo_mcp/tools/config/__init__.py +3 -1
  59. hanzo_mcp/tools/config/config_tool.py +1 -1
  60. hanzo_mcp/tools/config/mode_tool.py +209 -0
  61. hanzo_mcp/tools/database/__init__.py +1 -1
  62. hanzo_mcp/tools/editor/__init__.py +1 -1
  63. hanzo_mcp/tools/filesystem/__init__.py +48 -14
  64. hanzo_mcp/tools/filesystem/ast_multi_edit.py +562 -0
  65. hanzo_mcp/tools/filesystem/batch_search.py +3 -3
  66. hanzo_mcp/tools/filesystem/diff.py +2 -2
  67. hanzo_mcp/tools/filesystem/directory_tree_paginated.py +338 -0
  68. hanzo_mcp/tools/filesystem/rules_tool.py +235 -0
  69. hanzo_mcp/tools/filesystem/{unified_search.py → search_tool.py} +12 -12
  70. hanzo_mcp/tools/filesystem/{symbols_unified.py → symbols_tool.py} +104 -5
  71. hanzo_mcp/tools/filesystem/watch.py +3 -2
  72. hanzo_mcp/tools/jupyter/__init__.py +2 -2
  73. hanzo_mcp/tools/jupyter/jupyter.py +1 -1
  74. hanzo_mcp/tools/llm/__init__.py +3 -3
  75. hanzo_mcp/tools/llm/llm_tool.py +648 -143
  76. hanzo_mcp/tools/lsp/__init__.py +5 -0
  77. hanzo_mcp/tools/lsp/lsp_tool.py +512 -0
  78. hanzo_mcp/tools/mcp/__init__.py +2 -2
  79. hanzo_mcp/tools/mcp/{mcp_unified.py → mcp_tool.py} +3 -3
  80. hanzo_mcp/tools/memory/__init__.py +76 -0
  81. hanzo_mcp/tools/memory/knowledge_tools.py +518 -0
  82. hanzo_mcp/tools/memory/memory_tools.py +456 -0
  83. hanzo_mcp/tools/search/__init__.py +6 -0
  84. hanzo_mcp/tools/search/find_tool.py +581 -0
  85. hanzo_mcp/tools/search/unified_search.py +953 -0
  86. hanzo_mcp/tools/shell/__init__.py +11 -6
  87. hanzo_mcp/tools/shell/auto_background.py +203 -0
  88. hanzo_mcp/tools/shell/base_process.py +57 -29
  89. hanzo_mcp/tools/shell/bash_session_executor.py +1 -1
  90. hanzo_mcp/tools/shell/{bash_unified.py → bash_tool.py} +18 -34
  91. hanzo_mcp/tools/shell/command_executor.py +2 -2
  92. hanzo_mcp/tools/shell/{npx_unified.py → npx_tool.py} +16 -33
  93. hanzo_mcp/tools/shell/open.py +2 -2
  94. hanzo_mcp/tools/shell/{process_unified.py → process_tool.py} +1 -1
  95. hanzo_mcp/tools/shell/run_command_windows.py +1 -1
  96. hanzo_mcp/tools/shell/streaming_command.py +594 -0
  97. hanzo_mcp/tools/shell/uvx.py +47 -2
  98. hanzo_mcp/tools/shell/uvx_background.py +47 -2
  99. hanzo_mcp/tools/shell/{uvx_unified.py → uvx_tool.py} +16 -33
  100. hanzo_mcp/tools/todo/__init__.py +14 -19
  101. hanzo_mcp/tools/todo/todo.py +22 -1
  102. hanzo_mcp/tools/vector/__init__.py +1 -1
  103. hanzo_mcp/tools/vector/infinity_store.py +2 -2
  104. hanzo_mcp/tools/vector/project_manager.py +1 -1
  105. hanzo_mcp/types.py +23 -0
  106. hanzo_mcp-0.7.0.dist-info/METADATA +516 -0
  107. hanzo_mcp-0.7.0.dist-info/RECORD +180 -0
  108. {hanzo_mcp-0.6.12.dist-info → hanzo_mcp-0.7.0.dist-info}/entry_points.txt +1 -0
  109. hanzo_mcp/tools/common/palette.py +0 -344
  110. hanzo_mcp/tools/common/palette_loader.py +0 -108
  111. hanzo_mcp/tools/config/palette_tool.py +0 -179
  112. hanzo_mcp/tools/llm/llm_unified.py +0 -851
  113. hanzo_mcp-0.6.12.dist-info/METADATA +0 -339
  114. hanzo_mcp-0.6.12.dist-info/RECORD +0 -135
  115. hanzo_mcp-0.6.12.dist-info/licenses/LICENSE +0 -21
  116. {hanzo_mcp-0.6.12.dist-info → hanzo_mcp-0.7.0.dist-info}/WHEEL +0 -0
  117. {hanzo_mcp-0.6.12.dist-info → hanzo_mcp-0.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,562 @@
1
+ """AST-aware multi-edit tool using treesitter for accurate code modifications."""
2
+
3
+ import os
4
+ import json
5
+ from typing import List, Dict, Any, Optional, Tuple, Set
6
+ from pathlib import Path
7
+ from dataclasses import dataclass
8
+ from collections import defaultdict
9
+
10
+ from hanzo_mcp.tools.common.base import BaseTool
11
+ from hanzo_mcp.tools.common.decorators import with_context_normalization
12
+ from hanzo_mcp.tools.common.paginated_response import AutoPaginatedResponse
13
+ from hanzo_mcp.types import MCPResourceDocument
14
+
15
+ try:
16
+ import tree_sitter
17
+ import tree_sitter_python
18
+ import tree_sitter_javascript
19
+ import tree_sitter_typescript
20
+ import tree_sitter_go
21
+ import tree_sitter_rust
22
+ import tree_sitter_java
23
+ import tree_sitter_cpp
24
+ TREESITTER_AVAILABLE = True
25
+ except ImportError:
26
+ TREESITTER_AVAILABLE = False
27
+
28
+
29
+ @dataclass
30
+ class ASTMatch:
31
+ """Represents an AST match with context."""
32
+ file_path: str
33
+ line_start: int
34
+ line_end: int
35
+ column_start: int
36
+ column_end: int
37
+ node_type: str
38
+ text: str
39
+ parent_context: Optional[str] = None
40
+ semantic_context: Optional[str] = None
41
+
42
+
43
+ @dataclass
44
+ class EditOperation:
45
+ """Enhanced edit operation with AST awareness."""
46
+ old_string: str
47
+ new_string: str
48
+ node_types: Optional[List[str]] = None # Restrict to specific AST node types
49
+ semantic_match: bool = False # Use semantic matching
50
+ expect_count: Optional[int] = None # Expected number of matches
51
+ context_lines: int = 5 # Lines of context for uniqueness
52
+
53
+
54
+ class ASTMultiEdit(BaseTool):
55
+ """Multi-edit tool with AST awareness and automatic reference finding."""
56
+
57
+ name = "ast_multi_edit"
58
+ description = """Enhanced multi-edit with AST awareness and reference finding.
59
+
60
+ Features:
61
+ - AST-based search for accurate matches
62
+ - Automatic reference finding across codebase
63
+ - Semantic matching (find all usages of a symbol)
64
+ - Result pagination to avoid token limits
65
+ - Context-aware replacements
66
+
67
+ Examples:
68
+ 1. Rename a function and all its calls:
69
+ ast_multi_edit("file.py", [
70
+ {"old_string": "oldFunc", "new_string": "newFunc", "semantic_match": true}
71
+ ])
72
+
73
+ 2. Update specific node types only:
74
+ ast_multi_edit("file.go", [
75
+ {"old_string": "StopTracking", "new_string": "StopTrackingWithContext",
76
+ "node_types": ["call_expression"]}
77
+ ])
78
+ """
79
+
80
+ def __init__(self):
81
+ super().__init__()
82
+ self.parsers = {}
83
+ self.languages = {}
84
+
85
+ if TREESITTER_AVAILABLE:
86
+ self._init_parsers()
87
+
88
+ def _init_parsers(self):
89
+ """Initialize treesitter parsers for supported languages."""
90
+ language_mapping = {
91
+ '.py': (tree_sitter_python, 'python'),
92
+ '.js': (tree_sitter_javascript, 'javascript'),
93
+ '.jsx': (tree_sitter_javascript, 'javascript'),
94
+ '.ts': (tree_sitter_typescript.typescript, 'typescript'),
95
+ '.tsx': (tree_sitter_typescript.tsx, 'tsx'),
96
+ '.go': (tree_sitter_go, 'go'),
97
+ '.rs': (tree_sitter_rust, 'rust'),
98
+ '.java': (tree_sitter_java, 'java'),
99
+ '.cpp': (tree_sitter_cpp, 'cpp'),
100
+ '.cc': (tree_sitter_cpp, 'cpp'),
101
+ '.cxx': (tree_sitter_cpp, 'cpp'),
102
+ '.h': (tree_sitter_cpp, 'cpp'),
103
+ '.hpp': (tree_sitter_cpp, 'cpp'),
104
+ }
105
+
106
+ for ext, (module, name) in language_mapping.items():
107
+ try:
108
+ parser = tree_sitter.Parser()
109
+ if hasattr(module, 'language'):
110
+ parser.set_language(module.language())
111
+ else:
112
+ # For older tree-sitter bindings
113
+ lang = tree_sitter.Language(module.language(), name)
114
+ parser.set_language(lang)
115
+ self.parsers[ext] = parser
116
+ self.languages[ext] = name
117
+ except Exception as e:
118
+ print(f"Failed to initialize parser for {ext}: {e}")
119
+
120
+ def _get_parser(self, file_path: str) -> Optional[tree_sitter.Parser]:
121
+ """Get parser for file type."""
122
+ ext = Path(file_path).suffix.lower()
123
+ return self.parsers.get(ext)
124
+
125
+ def _parse_file(self, file_path: str, content: str) -> Optional[tree_sitter.Tree]:
126
+ """Parse file content into AST."""
127
+ parser = self._get_parser(file_path)
128
+ if not parser:
129
+ return None
130
+
131
+ return parser.parse(bytes(content, 'utf-8'))
132
+
133
+ def _find_references(self,
134
+ symbol: str,
135
+ file_path: str,
136
+ project_root: Optional[str] = None) -> List[ASTMatch]:
137
+ """Find all references to a symbol across the project."""
138
+ matches = []
139
+
140
+ if not project_root:
141
+ project_root = self._find_project_root(file_path)
142
+
143
+ # Get language-specific reference patterns
144
+ patterns = self._get_reference_patterns(symbol, file_path)
145
+
146
+ # Search across all relevant files
147
+ for pattern in patterns:
148
+ # Use grep_ast tool for efficient AST-aware search
149
+ results = self._search_with_ast(pattern, project_root)
150
+ matches.extend(results)
151
+
152
+ return matches
153
+
154
+ def _get_reference_patterns(self, symbol: str, file_path: str) -> List[Dict[str, Any]]:
155
+ """Get language-specific patterns for finding references."""
156
+ ext = Path(file_path).suffix.lower()
157
+ lang = self.languages.get(ext, 'generic')
158
+
159
+ patterns = []
160
+
161
+ if lang == 'go':
162
+ # Go specific patterns
163
+ patterns.extend([
164
+ # Function calls
165
+ {"query": f"(call_expression function: (identifier) @func (#eq? @func \"{symbol}\"))", "type": "call"},
166
+ # Method calls
167
+ {"query": f"(call_expression function: (selector_expression field: (field_identifier) @method (#eq? @method \"{symbol}\")))", "type": "method_call"},
168
+ # Function declarations
169
+ {"query": f"(function_declaration name: (identifier) @name (#eq? @name \"{symbol}\"))", "type": "declaration"},
170
+ # Type references
171
+ {"query": f"(type_identifier) @type (#eq? @type \"{symbol}\")", "type": "type_ref"},
172
+ ])
173
+ elif lang in ['javascript', 'typescript', 'tsx']:
174
+ patterns.extend([
175
+ # Function calls
176
+ {"query": f"(call_expression function: (identifier) @func (#eq? @func \"{symbol}\"))", "type": "call"},
177
+ # Method calls
178
+ {"query": f"(call_expression function: (member_expression property: (property_identifier) @prop (#eq? @prop \"{symbol}\")))", "type": "method_call"},
179
+ # Function declarations
180
+ {"query": f"(function_declaration name: (identifier) @name (#eq? @name \"{symbol}\"))", "type": "declaration"},
181
+ # Variable declarations
182
+ {"query": f"(variable_declarator name: (identifier) @var (#eq? @var \"{symbol}\"))", "type": "variable"},
183
+ ])
184
+ elif lang == 'python':
185
+ patterns.extend([
186
+ # Function calls
187
+ {"query": f"(call function: (identifier) @func (#eq? @func \"{symbol}\"))", "type": "call"},
188
+ # Method calls
189
+ {"query": f"(call function: (attribute attribute: (identifier) @attr (#eq? @attr \"{symbol}\")))", "type": "method_call"},
190
+ # Function definitions
191
+ {"query": f"(function_definition name: (identifier) @name (#eq? @name \"{symbol}\"))", "type": "declaration"},
192
+ # Class definitions
193
+ {"query": f"(class_definition name: (identifier) @name (#eq? @name \"{symbol}\"))", "type": "class"},
194
+ ])
195
+ else:
196
+ # Generic patterns
197
+ patterns.append({"query": symbol, "type": "text"})
198
+
199
+ return patterns
200
+
201
+ def _search_with_ast(self, pattern: Dict[str, Any], root: str) -> List[ASTMatch]:
202
+ """Search using AST patterns."""
203
+ matches = []
204
+
205
+ # This would integrate with grep_ast tool
206
+ # For now, simulate the search
207
+ import glob
208
+
209
+ for file_path in glob.glob(f"{root}/**/*.*", recursive=True):
210
+ if self._should_skip_file(file_path):
211
+ continue
212
+
213
+ try:
214
+ with open(file_path, 'r', encoding='utf-8') as f:
215
+ content = f.read()
216
+
217
+ tree = self._parse_file(file_path, content)
218
+ if tree and pattern["type"] != "text":
219
+ # Use treesitter query
220
+ matches.extend(self._query_ast(tree, pattern, file_path, content))
221
+ else:
222
+ # Fallback to text search
223
+ matches.extend(self._text_search(content, pattern["query"], file_path))
224
+
225
+ except Exception:
226
+ continue
227
+
228
+ return matches
229
+
230
+ def _query_ast(self,
231
+ tree: tree_sitter.Tree,
232
+ pattern: Dict[str, Any],
233
+ file_path: str,
234
+ content: str) -> List[ASTMatch]:
235
+ """Query AST with treesitter pattern."""
236
+ matches = []
237
+
238
+ try:
239
+ # Get language for query
240
+ lang_name = self.languages.get(Path(file_path).suffix.lower())
241
+ if not lang_name:
242
+ return matches
243
+
244
+ # Execute query
245
+ query = tree_sitter.Query(pattern["query"], lang_name)
246
+ captures = query.captures(tree.root_node)
247
+
248
+ lines = content.split('\n')
249
+
250
+ for node, name in captures:
251
+ match = ASTMatch(
252
+ file_path=file_path,
253
+ line_start=node.start_point[0] + 1,
254
+ line_end=node.end_point[0] + 1,
255
+ column_start=node.start_point[1],
256
+ column_end=node.end_point[1],
257
+ node_type=node.type,
258
+ text=content[node.start_byte:node.end_byte],
259
+ parent_context=self._get_parent_context(node, content),
260
+ semantic_context=pattern["type"]
261
+ )
262
+ matches.append(match)
263
+
264
+ except Exception as e:
265
+ # Fallback to simple search
266
+ pass
267
+
268
+ return matches
269
+
270
+ def _get_parent_context(self, node: tree_sitter.Node, content: str) -> Optional[str]:
271
+ """Get parent context for better understanding."""
272
+ parent = node.parent
273
+ if parent:
274
+ # Get parent function/class name
275
+ if parent.type in ['function_declaration', 'function_definition', 'method_definition']:
276
+ for child in parent.children:
277
+ if child.type == 'identifier':
278
+ return f"function: {content[child.start_byte:child.end_byte]}"
279
+ elif parent.type in ['class_declaration', 'class_definition']:
280
+ for child in parent.children:
281
+ if child.type == 'identifier':
282
+ return f"class: {content[child.start_byte:child.end_byte]}"
283
+
284
+ return None
285
+
286
+ def _text_search(self, content: str, pattern: str, file_path: str) -> List[ASTMatch]:
287
+ """Fallback text search."""
288
+ matches = []
289
+ lines = content.split('\n')
290
+
291
+ for i, line in enumerate(lines):
292
+ if pattern in line:
293
+ col = line.find(pattern)
294
+ match = ASTMatch(
295
+ file_path=file_path,
296
+ line_start=i + 1,
297
+ line_end=i + 1,
298
+ column_start=col,
299
+ column_end=col + len(pattern),
300
+ node_type='text',
301
+ text=pattern,
302
+ semantic_context='text_match'
303
+ )
304
+ matches.append(match)
305
+
306
+ return matches
307
+
308
+ def _should_skip_file(self, file_path: str) -> bool:
309
+ """Check if file should be skipped."""
310
+ skip_dirs = {'.git', 'node_modules', '__pycache__', '.pytest_cache', 'venv', '.env'}
311
+ skip_extensions = {'.pyc', '.pyo', '.so', '.dylib', '.dll', '.exe'}
312
+
313
+ path = Path(file_path)
314
+
315
+ # Check directories
316
+ for part in path.parts:
317
+ if part in skip_dirs:
318
+ return True
319
+
320
+ # Check extensions
321
+ if path.suffix in skip_extensions:
322
+ return True
323
+
324
+ # Check if binary
325
+ try:
326
+ with open(file_path, 'rb') as f:
327
+ chunk = f.read(512)
328
+ if b'\0' in chunk:
329
+ return True
330
+ except:
331
+ return True
332
+
333
+ return False
334
+
335
+ def _find_project_root(self, file_path: str) -> str:
336
+ """Find project root by looking for markers."""
337
+ markers = {'.git', 'package.json', 'go.mod', 'Cargo.toml', 'pyproject.toml', 'setup.py'}
338
+
339
+ path = Path(file_path).resolve()
340
+ for parent in path.parents:
341
+ for marker in markers:
342
+ if (parent / marker).exists():
343
+ return str(parent)
344
+
345
+ return str(path.parent)
346
+
347
+ def _group_matches_by_file(self, matches: List[ASTMatch]) -> Dict[str, List[ASTMatch]]:
348
+ """Group matches by file for efficient editing."""
349
+ grouped = defaultdict(list)
350
+ for match in matches:
351
+ grouped[match.file_path].append(match)
352
+ return grouped
353
+
354
+ def _create_unique_context(self,
355
+ content: str,
356
+ match: ASTMatch,
357
+ context_lines: int) -> str:
358
+ """Create unique context for edit identification."""
359
+ lines = content.split('\n')
360
+
361
+ start_line = max(0, match.line_start - context_lines - 1)
362
+ end_line = min(len(lines), match.line_end + context_lines)
363
+
364
+ context_lines = lines[start_line:end_line]
365
+ return '\n'.join(context_lines)
366
+
367
+ async def run(self,
368
+ file_path: str,
369
+ edits: List[Dict[str, Any]],
370
+ find_references: bool = False,
371
+ page_size: int = 50,
372
+ preview_only: bool = False,
373
+ **kwargs) -> MCPResourceDocument:
374
+ """Execute AST-aware multi-edit operation.
375
+
376
+ Args:
377
+ file_path: Primary file to edit
378
+ edits: List of edit operations
379
+ find_references: Whether to find and edit references across codebase
380
+ page_size: Number of results per page
381
+ preview_only: Show what would be changed without applying
382
+ """
383
+
384
+ if not TREESITTER_AVAILABLE:
385
+ return self._fallback_to_basic_edit(file_path, edits)
386
+
387
+ results = {
388
+ "primary_file": file_path,
389
+ "edits_requested": len(edits),
390
+ "files_analyzed": 0,
391
+ "matches_found": 0,
392
+ "edits_applied": 0,
393
+ "errors": [],
394
+ "changes": []
395
+ }
396
+
397
+ # Convert edits to EditOperation objects
398
+ edit_ops = []
399
+ for edit in edits:
400
+ edit_ops.append(EditOperation(
401
+ old_string=edit["old_string"],
402
+ new_string=edit["new_string"],
403
+ node_types=edit.get("node_types"),
404
+ semantic_match=edit.get("semantic_match", False),
405
+ expect_count=edit.get("expect_count"),
406
+ context_lines=edit.get("context_lines", 5)
407
+ ))
408
+
409
+ # Find all matches
410
+ all_matches = []
411
+
412
+ # First, analyze primary file
413
+ try:
414
+ with open(file_path, 'r', encoding='utf-8') as f:
415
+ content = f.read()
416
+
417
+ tree = self._parse_file(file_path, content)
418
+
419
+ for edit_op in edit_ops:
420
+ if edit_op.semantic_match and find_references:
421
+ # Find all references across codebase
422
+ matches = self._find_references(edit_op.old_string, file_path)
423
+ else:
424
+ # Just search in current file
425
+ if tree:
426
+ pattern = {"query": edit_op.old_string, "type": "text"}
427
+ matches = self._query_ast(tree, pattern, file_path, content)
428
+ else:
429
+ matches = self._text_search(content, edit_op.old_string, file_path)
430
+
431
+ # Filter by node types if specified
432
+ if edit_op.node_types:
433
+ matches = [m for m in matches if m.node_type in edit_op.node_types]
434
+
435
+ # Check expected count
436
+ if edit_op.expect_count is not None and len(matches) != edit_op.expect_count:
437
+ results["errors"].append({
438
+ "edit": edit_op.old_string,
439
+ "expected": edit_op.expect_count,
440
+ "found": len(matches),
441
+ "locations": [f"{m.file_path}:{m.line_start}" for m in matches[:5]]
442
+ })
443
+ continue
444
+
445
+ all_matches.extend([(edit_op, match) for match in matches])
446
+
447
+ except Exception as e:
448
+ results["errors"].append({
449
+ "file": file_path,
450
+ "error": str(e)
451
+ })
452
+ return MCPResourceDocument(data=results)
453
+
454
+ results["matches_found"] = len(all_matches)
455
+ results["files_analyzed"] = len(set(m[1].file_path for m in all_matches))
456
+
457
+ if preview_only:
458
+ # Return preview of changes
459
+ preview = self._generate_preview(all_matches, page_size)
460
+ results["preview"] = preview
461
+ return MCPResourceDocument(data=results)
462
+
463
+ # Apply edits
464
+ changes_by_file = self._group_changes(all_matches)
465
+
466
+ for file_path, changes in changes_by_file.items():
467
+ try:
468
+ success = await self._apply_file_changes(file_path, changes)
469
+ if success:
470
+ results["edits_applied"] += len(changes)
471
+ results["changes"].append({
472
+ "file": file_path,
473
+ "edits": len(changes)
474
+ })
475
+ except Exception as e:
476
+ results["errors"].append({
477
+ "file": file_path,
478
+ "error": str(e)
479
+ })
480
+
481
+ return MCPResourceDocument(data=results)
482
+
483
+ def _group_changes(self, matches: List[Tuple[EditOperation, ASTMatch]]) -> Dict[str, List[Tuple[EditOperation, ASTMatch]]]:
484
+ """Group changes by file."""
485
+ grouped = defaultdict(list)
486
+ for edit_op, match in matches:
487
+ grouped[match.file_path].append((edit_op, match))
488
+ return grouped
489
+
490
+ async def _apply_file_changes(self,
491
+ file_path: str,
492
+ changes: List[Tuple[EditOperation, ASTMatch]]) -> bool:
493
+ """Apply changes to a single file."""
494
+ with open(file_path, 'r', encoding='utf-8') as f:
495
+ content = f.read()
496
+
497
+ # Sort changes by position (reverse order to maintain positions)
498
+ changes.sort(key=lambda x: (x[1].line_start, x[1].column_start), reverse=True)
499
+
500
+ lines = content.split('\n')
501
+
502
+ for edit_op, match in changes:
503
+ # Create unique context for this match
504
+ context = self._create_unique_context(content, match, edit_op.context_lines)
505
+
506
+ # Apply the edit
507
+ if match.line_start == match.line_end:
508
+ # Single line edit
509
+ line = lines[match.line_start - 1]
510
+ before = line[:match.column_start]
511
+ after = line[match.column_end:]
512
+ lines[match.line_start - 1] = before + edit_op.new_string + after
513
+ else:
514
+ # Multi-line edit
515
+ # Remove old lines
516
+ del lines[match.line_start - 1:match.line_end]
517
+ # Insert new content
518
+ lines.insert(match.line_start - 1, edit_op.new_string)
519
+
520
+ # Write back
521
+ with open(file_path, 'w', encoding='utf-8') as f:
522
+ f.write('\n'.join(lines))
523
+
524
+ return True
525
+
526
+ def _generate_preview(self,
527
+ matches: List[Tuple[EditOperation, ASTMatch]],
528
+ page_size: int) -> List[Dict[str, Any]]:
529
+ """Generate preview of changes."""
530
+ preview = []
531
+
532
+ for i, (edit_op, match) in enumerate(matches[:page_size]):
533
+ preview.append({
534
+ "file": match.file_path,
535
+ "line": match.line_start,
536
+ "column": match.column_start,
537
+ "node_type": match.node_type,
538
+ "context": match.parent_context,
539
+ "old": edit_op.old_string,
540
+ "new": edit_op.new_string,
541
+ "semantic_type": match.semantic_context
542
+ })
543
+
544
+ if len(matches) > page_size:
545
+ preview.append({
546
+ "note": f"... and {len(matches) - page_size} more matches"
547
+ })
548
+
549
+ return preview
550
+
551
+ def _fallback_to_basic_edit(self, file_path: str, edits: List[Dict[str, Any]]) -> MCPResourceDocument:
552
+ """Fallback to basic multi-edit when treesitter not available."""
553
+ # Delegate to existing multi_edit tool
554
+ from hanzo_mcp.tools.filesystem.multi_edit import MultiEdit
555
+ basic_tool = MultiEdit()
556
+ return basic_tool.run(file_path, edits)
557
+
558
+
559
+ # Tool registration
560
+ def create_ast_multi_edit_tool():
561
+ """Factory function to create AST multi-edit tool."""
562
+ return ASTMultiEdit()
@@ -45,7 +45,7 @@ class SearchType(Enum):
45
45
 
46
46
  @dataclass
47
47
  class SearchResult:
48
- """Unified search result combining different search types."""
48
+ """Search result combining different search types."""
49
49
  file_path: str
50
50
  line_number: Optional[int]
51
51
  content: str
@@ -112,11 +112,11 @@ class BatchSearchParams(TypedDict):
112
112
 
113
113
  @final
114
114
  class BatchSearchTool(FilesystemBaseTool):
115
- """Unified search tool combining multiple search strategies."""
115
+ """Search tool combining multiple search strategies."""
116
116
 
117
117
  def __init__(self, permission_manager: PermissionManager,
118
118
  project_manager: Optional[ProjectVectorManager] = None):
119
- """Initialize the unified search tool."""
119
+ """Initialize the search tool."""
120
120
  super().__init__(permission_manager)
121
121
  self.project_manager = project_manager
122
122
 
@@ -98,7 +98,7 @@ diff a.json b.json --ignore-whitespace"""
98
98
 
99
99
  # Generate diff
100
100
  if unified:
101
- # Unified diff format
101
+ # diff format
102
102
  diff_lines = list(difflib.unified_diff(
103
103
  lines1,
104
104
  lines2,
@@ -183,7 +183,7 @@ diff a.json b.json --ignore-whitespace"""
183
183
  unified: bool = True,
184
184
  context: int = 3,
185
185
  ignore_whitespace: bool = False,
186
- show_line_numbers: bool = True,
186
+ show_line_numbers: bool = True
187
187
  ) -> str:
188
188
  """Handle diff tool calls."""
189
189
  return await tool_self.run(