hanzo-mcp 0.8.8__py3-none-any.whl → 0.8.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hanzo-mcp might be problematic. Click here for more details.
- hanzo_mcp/__init__.py +1 -3
- hanzo_mcp/analytics/posthog_analytics.py +4 -17
- hanzo_mcp/bridge.py +9 -25
- hanzo_mcp/cli.py +8 -17
- hanzo_mcp/cli_enhanced.py +5 -14
- hanzo_mcp/cli_plugin.py +3 -9
- hanzo_mcp/config/settings.py +6 -20
- hanzo_mcp/config/tool_config.py +2 -4
- hanzo_mcp/core/base_agent.py +88 -88
- hanzo_mcp/core/model_registry.py +238 -210
- hanzo_mcp/dev_server.py +5 -15
- hanzo_mcp/prompts/__init__.py +2 -6
- hanzo_mcp/prompts/project_todo_reminder.py +3 -9
- hanzo_mcp/prompts/tool_explorer.py +1 -3
- hanzo_mcp/prompts/utils.py +7 -21
- hanzo_mcp/server.py +6 -7
- hanzo_mcp/tools/__init__.py +13 -29
- hanzo_mcp/tools/agent/__init__.py +2 -1
- hanzo_mcp/tools/agent/agent.py +10 -30
- hanzo_mcp/tools/agent/agent_tool.py +6 -17
- hanzo_mcp/tools/agent/agent_tool_v1_deprecated.py +15 -42
- hanzo_mcp/tools/agent/claude_desktop_auth.py +3 -9
- hanzo_mcp/tools/agent/cli_agent_base.py +7 -24
- hanzo_mcp/tools/agent/cli_tools.py +76 -75
- hanzo_mcp/tools/agent/code_auth.py +1 -3
- hanzo_mcp/tools/agent/code_auth_tool.py +2 -6
- hanzo_mcp/tools/agent/critic_tool.py +8 -24
- hanzo_mcp/tools/agent/iching_tool.py +12 -36
- hanzo_mcp/tools/agent/network_tool.py +7 -18
- hanzo_mcp/tools/agent/prompt.py +1 -5
- hanzo_mcp/tools/agent/review_tool.py +10 -25
- hanzo_mcp/tools/agent/swarm_alias.py +1 -3
- hanzo_mcp/tools/agent/swarm_tool.py +16 -41
- hanzo_mcp/tools/agent/swarm_tool_v1_deprecated.py +11 -39
- hanzo_mcp/tools/agent/unified_cli_tools.py +38 -38
- hanzo_mcp/tools/common/batch_tool.py +15 -45
- hanzo_mcp/tools/common/config_tool.py +9 -28
- hanzo_mcp/tools/common/context.py +1 -3
- hanzo_mcp/tools/common/critic_tool.py +1 -3
- hanzo_mcp/tools/common/decorators.py +2 -6
- hanzo_mcp/tools/common/enhanced_base.py +2 -6
- hanzo_mcp/tools/common/fastmcp_pagination.py +4 -12
- hanzo_mcp/tools/common/forgiving_edit.py +9 -28
- hanzo_mcp/tools/common/mode.py +1 -5
- hanzo_mcp/tools/common/paginated_base.py +3 -11
- hanzo_mcp/tools/common/paginated_response.py +10 -30
- hanzo_mcp/tools/common/pagination.py +3 -9
- hanzo_mcp/tools/common/permissions.py +3 -9
- hanzo_mcp/tools/common/personality.py +9 -34
- hanzo_mcp/tools/common/plugin_loader.py +3 -15
- hanzo_mcp/tools/common/stats.py +7 -19
- hanzo_mcp/tools/common/thinking_tool.py +1 -3
- hanzo_mcp/tools/common/tool_disable.py +2 -6
- hanzo_mcp/tools/common/tool_list.py +2 -6
- hanzo_mcp/tools/common/validation.py +1 -3
- hanzo_mcp/tools/config/config_tool.py +7 -13
- hanzo_mcp/tools/config/index_config.py +1 -3
- hanzo_mcp/tools/config/mode_tool.py +5 -15
- hanzo_mcp/tools/database/database_manager.py +3 -9
- hanzo_mcp/tools/database/graph.py +1 -3
- hanzo_mcp/tools/database/graph_add.py +3 -9
- hanzo_mcp/tools/database/graph_query.py +11 -34
- hanzo_mcp/tools/database/graph_remove.py +3 -9
- hanzo_mcp/tools/database/graph_search.py +6 -20
- hanzo_mcp/tools/database/graph_stats.py +11 -33
- hanzo_mcp/tools/database/sql.py +4 -12
- hanzo_mcp/tools/database/sql_query.py +6 -10
- hanzo_mcp/tools/database/sql_search.py +2 -6
- hanzo_mcp/tools/database/sql_stats.py +5 -15
- hanzo_mcp/tools/editor/neovim_command.py +1 -3
- hanzo_mcp/tools/editor/neovim_session.py +7 -13
- hanzo_mcp/tools/filesystem/__init__.py +2 -3
- hanzo_mcp/tools/filesystem/ast_multi_edit.py +14 -43
- hanzo_mcp/tools/filesystem/base.py +4 -12
- hanzo_mcp/tools/filesystem/batch_search.py +35 -115
- hanzo_mcp/tools/filesystem/content_replace.py +4 -12
- hanzo_mcp/tools/filesystem/diff.py +2 -10
- hanzo_mcp/tools/filesystem/directory_tree.py +9 -27
- hanzo_mcp/tools/filesystem/directory_tree_paginated.py +5 -15
- hanzo_mcp/tools/filesystem/edit.py +6 -18
- hanzo_mcp/tools/filesystem/find.py +3 -9
- hanzo_mcp/tools/filesystem/find_files.py +2 -6
- hanzo_mcp/tools/filesystem/git_search.py +9 -24
- hanzo_mcp/tools/filesystem/grep.py +9 -27
- hanzo_mcp/tools/filesystem/multi_edit.py +6 -18
- hanzo_mcp/tools/filesystem/read.py +8 -26
- hanzo_mcp/tools/filesystem/rules_tool.py +6 -17
- hanzo_mcp/tools/filesystem/search_tool.py +18 -62
- hanzo_mcp/tools/filesystem/symbols_tool.py +5 -15
- hanzo_mcp/tools/filesystem/tree.py +1 -3
- hanzo_mcp/tools/filesystem/watch.py +1 -3
- hanzo_mcp/tools/filesystem/write.py +1 -3
- hanzo_mcp/tools/jupyter/base.py +6 -20
- hanzo_mcp/tools/jupyter/jupyter.py +4 -12
- hanzo_mcp/tools/jupyter/notebook_edit.py +11 -35
- hanzo_mcp/tools/jupyter/notebook_read.py +2 -6
- hanzo_mcp/tools/llm/consensus_tool.py +8 -24
- hanzo_mcp/tools/llm/llm_manage.py +2 -6
- hanzo_mcp/tools/llm/llm_tool.py +17 -58
- hanzo_mcp/tools/llm/llm_unified.py +18 -59
- hanzo_mcp/tools/llm/provider_tools.py +1 -3
- hanzo_mcp/tools/lsp/lsp_tool.py +5 -17
- hanzo_mcp/tools/mcp/mcp_add.py +3 -5
- hanzo_mcp/tools/mcp/mcp_remove.py +1 -1
- hanzo_mcp/tools/mcp/mcp_stats.py +1 -3
- hanzo_mcp/tools/mcp/mcp_tool.py +9 -23
- hanzo_mcp/tools/memory/__init__.py +33 -40
- hanzo_mcp/tools/memory/knowledge_tools.py +7 -25
- hanzo_mcp/tools/memory/memory_tools.py +7 -19
- hanzo_mcp/tools/search/find_tool.py +10 -32
- hanzo_mcp/tools/search/unified_search.py +27 -81
- hanzo_mcp/tools/shell/__init__.py +2 -2
- hanzo_mcp/tools/shell/auto_background.py +2 -6
- hanzo_mcp/tools/shell/base.py +1 -5
- hanzo_mcp/tools/shell/base_process.py +5 -7
- hanzo_mcp/tools/shell/bash_session.py +7 -24
- hanzo_mcp/tools/shell/bash_session_executor.py +5 -15
- hanzo_mcp/tools/shell/bash_tool.py +3 -7
- hanzo_mcp/tools/shell/command_executor.py +26 -79
- hanzo_mcp/tools/shell/logs.py +4 -16
- hanzo_mcp/tools/shell/npx.py +2 -8
- hanzo_mcp/tools/shell/npx_tool.py +1 -3
- hanzo_mcp/tools/shell/pkill.py +4 -12
- hanzo_mcp/tools/shell/process_tool.py +2 -8
- hanzo_mcp/tools/shell/processes.py +5 -17
- hanzo_mcp/tools/shell/run_background.py +1 -3
- hanzo_mcp/tools/shell/run_command.py +1 -3
- hanzo_mcp/tools/shell/run_command_windows.py +1 -3
- hanzo_mcp/tools/shell/session_manager.py +2 -6
- hanzo_mcp/tools/shell/session_storage.py +2 -6
- hanzo_mcp/tools/shell/streaming_command.py +7 -23
- hanzo_mcp/tools/shell/uvx.py +4 -14
- hanzo_mcp/tools/shell/uvx_background.py +2 -6
- hanzo_mcp/tools/shell/uvx_tool.py +1 -3
- hanzo_mcp/tools/shell/zsh_tool.py +12 -20
- hanzo_mcp/tools/todo/todo.py +1 -3
- hanzo_mcp/tools/todo/todo_read.py +3 -9
- hanzo_mcp/tools/todo/todo_write.py +6 -18
- hanzo_mcp/tools/vector/__init__.py +3 -9
- hanzo_mcp/tools/vector/ast_analyzer.py +6 -20
- hanzo_mcp/tools/vector/git_ingester.py +10 -30
- hanzo_mcp/tools/vector/index_tool.py +3 -9
- hanzo_mcp/tools/vector/infinity_store.py +11 -30
- hanzo_mcp/tools/vector/mock_infinity.py +159 -0
- hanzo_mcp/tools/vector/project_manager.py +4 -12
- hanzo_mcp/tools/vector/vector.py +2 -6
- hanzo_mcp/tools/vector/vector_index.py +8 -8
- hanzo_mcp/tools/vector/vector_search.py +7 -21
- {hanzo_mcp-0.8.8.dist-info → hanzo_mcp-0.8.13.dist-info}/METADATA +2 -2
- hanzo_mcp-0.8.13.dist-info/RECORD +193 -0
- hanzo_mcp-0.8.8.dist-info/RECORD +0 -192
- {hanzo_mcp-0.8.8.dist-info → hanzo_mcp-0.8.13.dist-info}/WHEEL +0 -0
- {hanzo_mcp-0.8.8.dist-info → hanzo_mcp-0.8.13.dist-info}/entry_points.txt +0 -0
- {hanzo_mcp-0.8.8.dist-info → hanzo_mcp-0.8.13.dist-info}/top_level.txt +0 -0
|
@@ -102,9 +102,7 @@ class GraphQueryParams(TypedDict, total=False):
|
|
|
102
102
|
class GraphQueryTool(BaseTool):
|
|
103
103
|
"""Tool for querying the graph database."""
|
|
104
104
|
|
|
105
|
-
def __init__(
|
|
106
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
107
|
-
):
|
|
105
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
108
106
|
"""Initialize the graph query tool.
|
|
109
107
|
|
|
110
108
|
Args:
|
|
@@ -192,10 +190,7 @@ Examples:
|
|
|
192
190
|
return f"Error: Invalid query '{query}'. Must be one of: {', '.join(valid_queries)}"
|
|
193
191
|
|
|
194
192
|
# Validate required parameters
|
|
195
|
-
if
|
|
196
|
-
query in ["neighbors", "subgraph", "connected", "ancestors", "descendants"]
|
|
197
|
-
and not node_id
|
|
198
|
-
):
|
|
193
|
+
if query in ["neighbors", "subgraph", "connected", "ancestors", "descendants"] and not node_id:
|
|
199
194
|
return f"Error: node_id is required for '{query}' query"
|
|
200
195
|
|
|
201
196
|
if query == "path" and (not node_id or not target_id):
|
|
@@ -225,27 +220,17 @@ Examples:
|
|
|
225
220
|
|
|
226
221
|
try:
|
|
227
222
|
if query == "neighbors":
|
|
228
|
-
return self._query_neighbors(
|
|
229
|
-
graph_conn, node_id, relationship, node_type, direction
|
|
230
|
-
)
|
|
223
|
+
return self._query_neighbors(graph_conn, node_id, relationship, node_type, direction)
|
|
231
224
|
elif query == "path":
|
|
232
225
|
return self._query_path(graph_conn, node_id, target_id, relationship)
|
|
233
226
|
elif query == "subgraph":
|
|
234
|
-
return self._query_subgraph(
|
|
235
|
-
graph_conn, node_id, depth, relationship, node_type, direction
|
|
236
|
-
)
|
|
227
|
+
return self._query_subgraph(graph_conn, node_id, depth, relationship, node_type, direction)
|
|
237
228
|
elif query == "connected":
|
|
238
|
-
return self._query_connected(
|
|
239
|
-
graph_conn, node_id, relationship, node_type, direction
|
|
240
|
-
)
|
|
229
|
+
return self._query_connected(graph_conn, node_id, relationship, node_type, direction)
|
|
241
230
|
elif query == "ancestors":
|
|
242
|
-
return self._query_ancestors(
|
|
243
|
-
graph_conn, node_id, depth, relationship, node_type
|
|
244
|
-
)
|
|
231
|
+
return self._query_ancestors(graph_conn, node_id, depth, relationship, node_type)
|
|
245
232
|
elif query == "descendants":
|
|
246
|
-
return self._query_descendants(
|
|
247
|
-
graph_conn, node_id, depth, relationship, node_type
|
|
248
|
-
)
|
|
233
|
+
return self._query_descendants(graph_conn, node_id, depth, relationship, node_type)
|
|
249
234
|
|
|
250
235
|
except Exception as e:
|
|
251
236
|
await tool_ctx.error(f"Failed to execute query: {str(e)}")
|
|
@@ -331,13 +316,9 @@ Examples:
|
|
|
331
316
|
output = [f"Neighbors of '{node_id}' ({node_info[0]}):\n"]
|
|
332
317
|
for n in neighbors:
|
|
333
318
|
arrow = "<--" if n["direction"] == "incoming" else "-->"
|
|
334
|
-
output.append(
|
|
335
|
-
f" {node_id} {arrow}[{n['relationship']}]--> {n['node_id']} ({n['node_type']})"
|
|
336
|
-
)
|
|
319
|
+
output.append(f" {node_id} {arrow}[{n['relationship']}]--> {n['node_id']} ({n['node_type']})")
|
|
337
320
|
if n["properties"]:
|
|
338
|
-
output.append(
|
|
339
|
-
f" Properties: {json.dumps(n['properties'], indent=6)[:100]}"
|
|
340
|
-
)
|
|
321
|
+
output.append(f" Properties: {json.dumps(n['properties'], indent=6)[:100]}")
|
|
341
322
|
|
|
342
323
|
output.append(f"\nTotal neighbors: {len(neighbors)}")
|
|
343
324
|
return "\n".join(output)
|
|
@@ -590,9 +571,7 @@ Examples:
|
|
|
590
571
|
node_type: Optional[str],
|
|
591
572
|
) -> str:
|
|
592
573
|
"""Find nodes that point TO this node (incoming edges only)."""
|
|
593
|
-
return self._query_subgraph(
|
|
594
|
-
conn, node_id, depth, relationship, node_type, "incoming"
|
|
595
|
-
)
|
|
574
|
+
return self._query_subgraph(conn, node_id, depth, relationship, node_type, "incoming")
|
|
596
575
|
|
|
597
576
|
def _query_descendants(
|
|
598
577
|
self,
|
|
@@ -603,9 +582,7 @@ Examples:
|
|
|
603
582
|
node_type: Optional[str],
|
|
604
583
|
) -> str:
|
|
605
584
|
"""Find nodes that this node points TO (outgoing edges only)."""
|
|
606
|
-
return self._query_subgraph(
|
|
607
|
-
conn, node_id, depth, relationship, node_type, "outgoing"
|
|
608
|
-
)
|
|
585
|
+
return self._query_subgraph(conn, node_id, depth, relationship, node_type, "outgoing")
|
|
609
586
|
|
|
610
587
|
def register(self, mcp_server) -> None:
|
|
611
588
|
"""Register this tool with the MCP server."""
|
|
@@ -74,9 +74,7 @@ class GraphRemoveParams(TypedDict, total=False):
|
|
|
74
74
|
class GraphRemoveTool(BaseTool):
|
|
75
75
|
"""Tool for removing nodes and edges from graph database."""
|
|
76
76
|
|
|
77
|
-
def __init__(
|
|
78
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
79
|
-
):
|
|
77
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
80
78
|
"""Initialize the graph remove tool.
|
|
81
79
|
|
|
82
80
|
Args:
|
|
@@ -220,9 +218,7 @@ Examples:
|
|
|
220
218
|
# Remove edge(s)
|
|
221
219
|
if relationship:
|
|
222
220
|
# Remove specific edge
|
|
223
|
-
await tool_ctx.info(
|
|
224
|
-
f"Removing edge: {source} --[{relationship}]--> {target}"
|
|
225
|
-
)
|
|
221
|
+
await tool_ctx.info(f"Removing edge: {source} --[{relationship}]--> {target}")
|
|
226
222
|
|
|
227
223
|
cursor = graph_conn.cursor()
|
|
228
224
|
cursor.execute(
|
|
@@ -242,9 +238,7 @@ Examples:
|
|
|
242
238
|
return f"Successfully removed edge: {source} --[{relationship}]--> {target}"
|
|
243
239
|
else:
|
|
244
240
|
# Remove all edges between nodes
|
|
245
|
-
await tool_ctx.info(
|
|
246
|
-
f"Removing all edges between {source} and {target}"
|
|
247
|
-
)
|
|
241
|
+
await tool_ctx.info(f"Removing all edges between {source} and {target}")
|
|
248
242
|
|
|
249
243
|
cursor = graph_conn.cursor()
|
|
250
244
|
cursor.execute(
|
|
@@ -76,9 +76,7 @@ class GraphSearchParams(TypedDict, total=False):
|
|
|
76
76
|
class GraphSearchTool(BaseTool):
|
|
77
77
|
"""Tool for searching nodes and edges in graph database."""
|
|
78
78
|
|
|
79
|
-
def __init__(
|
|
80
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
81
|
-
):
|
|
79
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
82
80
|
"""Initialize the graph search tool.
|
|
83
81
|
|
|
84
82
|
Args:
|
|
@@ -315,13 +313,8 @@ Examples:
|
|
|
315
313
|
output.append(f"Nodes ({len(nodes)}):")
|
|
316
314
|
for node in nodes[:20]: # Show first 20
|
|
317
315
|
output.append(f" {node['id']} ({node['node_type']})")
|
|
318
|
-
if
|
|
319
|
-
node[
|
|
320
|
-
and "matching_properties" in node
|
|
321
|
-
):
|
|
322
|
-
output.append(
|
|
323
|
-
f" Matched in: {list(node['matching_properties'].keys())}"
|
|
324
|
-
)
|
|
316
|
+
if node["match_field"] == "properties" and "matching_properties" in node:
|
|
317
|
+
output.append(f" Matched in: {list(node['matching_properties'].keys())}")
|
|
325
318
|
if node["properties"] and node["match_field"] != "properties":
|
|
326
319
|
props_str = json.dumps(node["properties"], indent=6)[:100]
|
|
327
320
|
if len(props_str) == 100:
|
|
@@ -335,16 +328,9 @@ Examples:
|
|
|
335
328
|
if edges:
|
|
336
329
|
output.append(f"Edges ({len(edges)}):")
|
|
337
330
|
for edge in edges[:20]: # Show first 20
|
|
338
|
-
output.append(
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
if (
|
|
342
|
-
edge["match_field"] == "properties"
|
|
343
|
-
and "matching_properties" in edge
|
|
344
|
-
):
|
|
345
|
-
output.append(
|
|
346
|
-
f" Matched in: {list(edge['matching_properties'].keys())}"
|
|
347
|
-
)
|
|
331
|
+
output.append(f" {edge['source']} --[{edge['relationship']}]--> {edge['target']}")
|
|
332
|
+
if edge["match_field"] == "properties" and "matching_properties" in edge:
|
|
333
|
+
output.append(f" Matched in: {list(edge['matching_properties'].keys())}")
|
|
348
334
|
if edge["weight"] != 1.0:
|
|
349
335
|
output.append(f" Weight: {edge['weight']}")
|
|
350
336
|
if edge["properties"]:
|
|
@@ -58,9 +58,7 @@ class GraphStatsParams(TypedDict, total=False):
|
|
|
58
58
|
class GraphStatsTool(BaseTool):
|
|
59
59
|
"""Tool for getting graph database statistics."""
|
|
60
60
|
|
|
61
|
-
def __init__(
|
|
62
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
63
|
-
):
|
|
61
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
64
62
|
"""Initialize the graph stats tool.
|
|
65
63
|
|
|
66
64
|
Args:
|
|
@@ -139,9 +137,7 @@ Examples:
|
|
|
139
137
|
except Exception as e:
|
|
140
138
|
return f"Error accessing project database: {str(e)}"
|
|
141
139
|
|
|
142
|
-
await tool_ctx.info(
|
|
143
|
-
f"Getting graph statistics for project: {project_db.project_path}"
|
|
144
|
-
)
|
|
140
|
+
await tool_ctx.info(f"Getting graph statistics for project: {project_db.project_path}")
|
|
145
141
|
|
|
146
142
|
# Get graph connection
|
|
147
143
|
graph_conn = project_db.get_graph_connection()
|
|
@@ -156,9 +152,7 @@ Examples:
|
|
|
156
152
|
|
|
157
153
|
# Basic counts
|
|
158
154
|
if node_type_filter:
|
|
159
|
-
cursor.execute(
|
|
160
|
-
"SELECT COUNT(*) FROM nodes WHERE type = ?", (node_type_filter,)
|
|
161
|
-
)
|
|
155
|
+
cursor.execute("SELECT COUNT(*) FROM nodes WHERE type = ?", (node_type_filter,))
|
|
162
156
|
node_count = cursor.fetchone()[0]
|
|
163
157
|
output.append(f"Nodes (type='{node_type_filter}'): {node_count:,}")
|
|
164
158
|
else:
|
|
@@ -172,9 +166,7 @@ Examples:
|
|
|
172
166
|
(relationship_filter,),
|
|
173
167
|
)
|
|
174
168
|
edge_count = cursor.fetchone()[0]
|
|
175
|
-
output.append(
|
|
176
|
-
f"Edges (relationship='{relationship_filter}'): {edge_count:,}"
|
|
177
|
-
)
|
|
169
|
+
output.append(f"Edges (relationship='{relationship_filter}'): {edge_count:,}")
|
|
178
170
|
else:
|
|
179
171
|
cursor.execute("SELECT COUNT(*) FROM edges")
|
|
180
172
|
edge_count = cursor.fetchone()[0]
|
|
@@ -188,9 +180,7 @@ Examples:
|
|
|
188
180
|
|
|
189
181
|
# Node type distribution
|
|
190
182
|
output.append("=== Node Types ===")
|
|
191
|
-
cursor.execute(
|
|
192
|
-
"SELECT type, COUNT(*) as count FROM nodes GROUP BY type ORDER BY count DESC"
|
|
193
|
-
)
|
|
183
|
+
cursor.execute("SELECT type, COUNT(*) as count FROM nodes GROUP BY type ORDER BY count DESC")
|
|
194
184
|
node_types = cursor.fetchall()
|
|
195
185
|
|
|
196
186
|
for n_type, count in node_types[:10]:
|
|
@@ -285,31 +275,21 @@ Examples:
|
|
|
285
275
|
output.append("\n=== Detailed Analysis ===")
|
|
286
276
|
|
|
287
277
|
# Node properties usage
|
|
288
|
-
cursor.execute(
|
|
289
|
-
"SELECT COUNT(*) FROM nodes WHERE properties IS NOT NULL"
|
|
290
|
-
)
|
|
278
|
+
cursor.execute("SELECT COUNT(*) FROM nodes WHERE properties IS NOT NULL")
|
|
291
279
|
nodes_with_props = cursor.fetchone()[0]
|
|
292
280
|
if nodes_with_props > 0:
|
|
293
281
|
props_pct = (nodes_with_props / node_count) * 100
|
|
294
|
-
output.append(
|
|
295
|
-
f"Nodes with properties: {nodes_with_props} ({props_pct:.1f}%)"
|
|
296
|
-
)
|
|
282
|
+
output.append(f"Nodes with properties: {nodes_with_props} ({props_pct:.1f}%)")
|
|
297
283
|
|
|
298
284
|
# Edge properties usage
|
|
299
|
-
cursor.execute(
|
|
300
|
-
"SELECT COUNT(*) FROM edges WHERE properties IS NOT NULL"
|
|
301
|
-
)
|
|
285
|
+
cursor.execute("SELECT COUNT(*) FROM edges WHERE properties IS NOT NULL")
|
|
302
286
|
edges_with_props = cursor.fetchone()[0]
|
|
303
287
|
if edges_with_props > 0 and edge_count > 0:
|
|
304
288
|
props_pct = (edges_with_props / edge_count) * 100
|
|
305
|
-
output.append(
|
|
306
|
-
f"Edges with properties: {edges_with_props} ({props_pct:.1f}%)"
|
|
307
|
-
)
|
|
289
|
+
output.append(f"Edges with properties: {edges_with_props} ({props_pct:.1f}%)")
|
|
308
290
|
|
|
309
291
|
# Weight distribution
|
|
310
|
-
cursor.execute(
|
|
311
|
-
"SELECT MIN(weight), MAX(weight), AVG(weight) FROM edges"
|
|
312
|
-
)
|
|
292
|
+
cursor.execute("SELECT MIN(weight), MAX(weight), AVG(weight) FROM edges")
|
|
313
293
|
weight_stats = cursor.fetchone()
|
|
314
294
|
if weight_stats[0] is not None:
|
|
315
295
|
output.append(f"\nEdge weights:")
|
|
@@ -338,9 +318,7 @@ Examples:
|
|
|
338
318
|
if patterns:
|
|
339
319
|
output.append("Most common connections:")
|
|
340
320
|
for src_type, rel, tgt_type, count in patterns:
|
|
341
|
-
output.append(
|
|
342
|
-
f" {src_type} --[{rel}]--> {tgt_type}: {count} times"
|
|
343
|
-
)
|
|
321
|
+
output.append(f" {src_type} --[{rel}]--> {tgt_type}: {count} times")
|
|
344
322
|
|
|
345
323
|
# Component analysis (simplified)
|
|
346
324
|
output.append("\n=== Graph Structure ===")
|
hanzo_mcp/tools/database/sql.py
CHANGED
|
@@ -75,9 +75,7 @@ class SQLParams(TypedDict, total=False):
|
|
|
75
75
|
class SQLTool(BaseTool):
|
|
76
76
|
"""Unified SQL database tool."""
|
|
77
77
|
|
|
78
|
-
def __init__(
|
|
79
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
80
|
-
):
|
|
78
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
81
79
|
"""Initialize the SQL tool."""
|
|
82
80
|
super().__init__(permission_manager)
|
|
83
81
|
self.db_manager = db_manager
|
|
@@ -296,9 +294,7 @@ sql --action stats --table users
|
|
|
296
294
|
output.append("-" * 60)
|
|
297
295
|
|
|
298
296
|
for col in columns:
|
|
299
|
-
output.append(
|
|
300
|
-
f"{col[1]} | {col[2]} | {col[3]} | {col[4]} | {col[5]}"
|
|
301
|
-
)
|
|
297
|
+
output.append(f"{col[1]} | {col[2]} | {col[3]} | {col[4]} | {col[5]}")
|
|
302
298
|
|
|
303
299
|
# Get indexes
|
|
304
300
|
cursor = conn.execute(f"PRAGMA index_list({table})")
|
|
@@ -336,9 +332,7 @@ sql --action stats --table users
|
|
|
336
332
|
# Get columns
|
|
337
333
|
cursor = conn.execute(f"PRAGMA table_info({table_name})")
|
|
338
334
|
columns = cursor.fetchall()
|
|
339
|
-
output.append(
|
|
340
|
-
f"Columns: {', '.join([col[1] for col in columns])}"
|
|
341
|
-
)
|
|
335
|
+
output.append(f"Columns: {', '.join([col[1] for col in columns])}")
|
|
342
336
|
|
|
343
337
|
return "\n".join(output)
|
|
344
338
|
|
|
@@ -402,9 +396,7 @@ sql --action stats --table users
|
|
|
402
396
|
"""
|
|
403
397
|
)
|
|
404
398
|
stats = cursor.fetchone()
|
|
405
|
-
output.append(
|
|
406
|
-
f" {col_name}: distinct={stats[0]}, nulls={stats[1]}"
|
|
407
|
-
)
|
|
399
|
+
output.append(f" {col_name}: distinct={stats[0]}, nulls={stats[1]}")
|
|
408
400
|
|
|
409
401
|
else:
|
|
410
402
|
# Overall database stats
|
|
@@ -48,9 +48,7 @@ class SqlQueryParams(TypedDict, total=False):
|
|
|
48
48
|
class SqlQueryTool(BaseTool):
|
|
49
49
|
"""Tool for executing SQL queries on project databases."""
|
|
50
50
|
|
|
51
|
-
def __init__(
|
|
52
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
53
|
-
):
|
|
51
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
54
52
|
"""Initialize the SQL query tool.
|
|
55
53
|
|
|
56
54
|
Args:
|
|
@@ -140,11 +138,11 @@ Note: Use sql_search for text search operations."""
|
|
|
140
138
|
query_upper = query.upper()
|
|
141
139
|
for keyword in write_keywords:
|
|
142
140
|
if keyword in query_upper:
|
|
143
|
-
return
|
|
141
|
+
return (
|
|
142
|
+
f"Error: Query contains {keyword} operation. Set --read-only false to allow write operations."
|
|
143
|
+
)
|
|
144
144
|
|
|
145
|
-
await tool_ctx.info(
|
|
146
|
-
f"Executing SQL query on project: {project_db.project_path}"
|
|
147
|
-
)
|
|
145
|
+
await tool_ctx.info(f"Executing SQL query on project: {project_db.project_path}")
|
|
148
146
|
|
|
149
147
|
# Execute query
|
|
150
148
|
conn = None
|
|
@@ -209,9 +207,7 @@ Note: Use sql_search for text search operations."""
|
|
|
209
207
|
output_rows = []
|
|
210
208
|
for row in rows[:1000]: # Limit to 1000 rows
|
|
211
209
|
row_str = " | ".join(
|
|
212
|
-
self._truncate(str(val) if val is not None else "NULL", width).ljust(
|
|
213
|
-
width
|
|
214
|
-
)
|
|
210
|
+
self._truncate(str(val) if val is not None else "NULL", width).ljust(width)
|
|
215
211
|
for val, width in zip(row, col_widths)
|
|
216
212
|
)
|
|
217
213
|
output_rows.append(row_str)
|
|
@@ -66,9 +66,7 @@ class SqlSearchParams(TypedDict, total=False):
|
|
|
66
66
|
class SqlSearchTool(BaseTool):
|
|
67
67
|
"""Tool for searching text in SQLite database."""
|
|
68
68
|
|
|
69
|
-
def __init__(
|
|
70
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
71
|
-
):
|
|
69
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
72
70
|
"""Initialize the SQL search tool.
|
|
73
71
|
|
|
74
72
|
Args:
|
|
@@ -253,9 +251,7 @@ Use sql_query for complex queries with joins, conditions, etc."""
|
|
|
253
251
|
|
|
254
252
|
return text_columns
|
|
255
253
|
|
|
256
|
-
def _format_results(
|
|
257
|
-
self, table: str, results: list, pattern: str, search_columns: list[str]
|
|
258
|
-
) -> str:
|
|
254
|
+
def _format_results(self, table: str, results: list, pattern: str, search_columns: list[str]) -> str:
|
|
259
255
|
"""Format search results based on table type."""
|
|
260
256
|
output = []
|
|
261
257
|
|
|
@@ -39,9 +39,7 @@ class SqlStatsParams(TypedDict, total=False):
|
|
|
39
39
|
class SqlStatsTool(BaseTool):
|
|
40
40
|
"""Tool for getting SQLite database statistics."""
|
|
41
41
|
|
|
42
|
-
def __init__(
|
|
43
|
-
self, permission_manager: PermissionManager, db_manager: DatabaseManager
|
|
44
|
-
):
|
|
42
|
+
def __init__(self, permission_manager: PermissionManager, db_manager: DatabaseManager):
|
|
45
43
|
"""Initialize the SQL stats tool.
|
|
46
44
|
|
|
47
45
|
Args:
|
|
@@ -115,9 +113,7 @@ Examples:
|
|
|
115
113
|
except Exception as e:
|
|
116
114
|
return f"Error accessing project database: {str(e)}"
|
|
117
115
|
|
|
118
|
-
await tool_ctx.info(
|
|
119
|
-
f"Getting statistics for project: {project_db.project_path}"
|
|
120
|
-
)
|
|
116
|
+
await tool_ctx.info(f"Getting statistics for project: {project_db.project_path}")
|
|
121
117
|
|
|
122
118
|
# Collect statistics
|
|
123
119
|
conn = None
|
|
@@ -136,9 +132,7 @@ Examples:
|
|
|
136
132
|
output.append("")
|
|
137
133
|
|
|
138
134
|
# Get table statistics
|
|
139
|
-
cursor.execute(
|
|
140
|
-
"SELECT name FROM sqlite_master WHERE type='table' ORDER BY name"
|
|
141
|
-
)
|
|
135
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
|
|
142
136
|
tables = cursor.fetchall()
|
|
143
137
|
|
|
144
138
|
output.append("=== Tables ===")
|
|
@@ -182,9 +176,7 @@ Examples:
|
|
|
182
176
|
|
|
183
177
|
# Show sample data for specific tables
|
|
184
178
|
if table_name == "files" and row_count > 0:
|
|
185
|
-
cursor.execute(
|
|
186
|
-
f"SELECT COUNT(DISTINCT SUBSTR(path, -3)) as ext_count FROM {table_name}"
|
|
187
|
-
)
|
|
179
|
+
cursor.execute(f"SELECT COUNT(DISTINCT SUBSTR(path, -3)) as ext_count FROM {table_name}")
|
|
188
180
|
ext_count = cursor.fetchone()[0]
|
|
189
181
|
output.append(f" File types: ~{ext_count}")
|
|
190
182
|
|
|
@@ -206,9 +198,7 @@ Examples:
|
|
|
206
198
|
output.append(f"\nTotal Rows: {total_rows:,}")
|
|
207
199
|
|
|
208
200
|
# Get index statistics
|
|
209
|
-
cursor.execute(
|
|
210
|
-
"SELECT name FROM sqlite_master WHERE type='index' AND sql IS NOT NULL ORDER BY name"
|
|
211
|
-
)
|
|
201
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='index' AND sql IS NOT NULL ORDER BY name")
|
|
212
202
|
indexes = cursor.fetchall()
|
|
213
203
|
if indexes:
|
|
214
204
|
output.append(f"\n=== Indexes ===")
|
|
@@ -158,9 +158,7 @@ Note: Requires Neovim to be installed.
|
|
|
158
158
|
return "Error: Must provide either 'command', 'commands', or 'macro'"
|
|
159
159
|
|
|
160
160
|
if sum(bool(x) for x in [command, commands, macro]) > 1:
|
|
161
|
-
return
|
|
162
|
-
"Error: Can only use one of 'command', 'commands', or 'macro' at a time"
|
|
163
|
-
)
|
|
161
|
+
return "Error: Can only use one of 'command', 'commands', or 'macro' at a time"
|
|
164
162
|
|
|
165
163
|
# Check if Neovim is available
|
|
166
164
|
nvim_cmd = shutil.which("nvim")
|
|
@@ -168,9 +168,7 @@ Note: Requires Neovim to be installed.
|
|
|
168
168
|
project_path,
|
|
169
169
|
)
|
|
170
170
|
elif action == "restore":
|
|
171
|
-
return await self._restore_session(
|
|
172
|
-
tool_ctx, session_name, project_session_dir
|
|
173
|
-
)
|
|
171
|
+
return await self._restore_session(tool_ctx, session_name, project_session_dir)
|
|
174
172
|
elif action == "list":
|
|
175
173
|
return self._list_sessions(project_session_dir, project_path)
|
|
176
174
|
elif action == "delete":
|
|
@@ -214,9 +212,7 @@ Note: Requires Neovim to be installed.
|
|
|
214
212
|
# Run Neovim to save session
|
|
215
213
|
# First, check if Neovim is already running
|
|
216
214
|
# For now, we'll create a new instance
|
|
217
|
-
result = subprocess.run(
|
|
218
|
-
["nvim", "-c", vim_script.strip()], capture_output=True, text=True
|
|
219
|
-
)
|
|
215
|
+
result = subprocess.run(["nvim", "-c", vim_script.strip()], capture_output=True, text=True)
|
|
220
216
|
|
|
221
217
|
if result.returncode != 0 and result.stderr:
|
|
222
218
|
return f"Error saving session: {result.stderr}"
|
|
@@ -246,15 +242,15 @@ Or manually in Neovim:
|
|
|
246
242
|
except Exception as e:
|
|
247
243
|
return f"Error saving session: {str(e)}"
|
|
248
244
|
|
|
249
|
-
async def _restore_session(
|
|
250
|
-
self, tool_ctx, session_name: Optional[str], project_dir: Path
|
|
251
|
-
) -> str:
|
|
245
|
+
async def _restore_session(self, tool_ctx, session_name: Optional[str], project_dir: Path) -> str:
|
|
252
246
|
"""Restore Neovim session."""
|
|
253
247
|
if not session_name:
|
|
254
248
|
# List available sessions
|
|
255
249
|
sessions = list(project_dir.glob("*.vim"))
|
|
256
250
|
if not sessions:
|
|
257
|
-
return
|
|
251
|
+
return (
|
|
252
|
+
"Error: No sessions found for this project. Use 'neovim_session --action list' to see all sessions."
|
|
253
|
+
)
|
|
258
254
|
|
|
259
255
|
# Use most recent
|
|
260
256
|
sessions.sort(key=lambda x: x.stat().st_mtime, reverse=True)
|
|
@@ -330,9 +326,7 @@ Or manually in Neovim:
|
|
|
330
326
|
if len(other_sessions) > 10:
|
|
331
327
|
output.append(f" ... and {len(other_sessions) - 10} more")
|
|
332
328
|
|
|
333
|
-
output.append(
|
|
334
|
-
"\nUse 'neovim_session --action restore --session-name <name>' to restore a session."
|
|
335
|
-
)
|
|
329
|
+
output.append("\nUse 'neovim_session --action restore --session-name <name>' to restore a session.")
|
|
336
330
|
|
|
337
331
|
return "\n".join(output)
|
|
338
332
|
|
|
@@ -93,9 +93,7 @@ def get_read_only_filesystem_tools(
|
|
|
93
93
|
return tools
|
|
94
94
|
|
|
95
95
|
|
|
96
|
-
def get_filesystem_tools(
|
|
97
|
-
permission_manager: PermissionManager, project_manager=None
|
|
98
|
-
) -> list[BaseTool]:
|
|
96
|
+
def get_filesystem_tools(permission_manager: PermissionManager, project_manager=None) -> list[BaseTool]:
|
|
99
97
|
"""Create instances of all filesystem tools.
|
|
100
98
|
|
|
101
99
|
Args:
|
|
@@ -226,6 +224,7 @@ def register_filesystem_tools(
|
|
|
226
224
|
try:
|
|
227
225
|
ast_tool = next((t for t in tools if getattr(t, "name", "") == "ast"), None)
|
|
228
226
|
if ast_tool is not None:
|
|
227
|
+
|
|
229
228
|
class _SymbolsAlias(ASTTool): # type: ignore[misc]
|
|
230
229
|
@property
|
|
231
230
|
def name(self) -> str: # type: ignore[override]
|
|
@@ -129,9 +129,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
129
129
|
|
|
130
130
|
return parser.parse(bytes(content, "utf-8"))
|
|
131
131
|
|
|
132
|
-
def _find_references(
|
|
133
|
-
self, symbol: str, file_path: str, project_root: Optional[str] = None
|
|
134
|
-
) -> List[ASTMatch]:
|
|
132
|
+
def _find_references(self, symbol: str, file_path: str, project_root: Optional[str] = None) -> List[ASTMatch]:
|
|
135
133
|
"""Find all references to a symbol across the project."""
|
|
136
134
|
matches = []
|
|
137
135
|
|
|
@@ -149,9 +147,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
149
147
|
|
|
150
148
|
return matches
|
|
151
149
|
|
|
152
|
-
def _get_reference_patterns(
|
|
153
|
-
self, symbol: str, file_path: str
|
|
154
|
-
) -> List[Dict[str, Any]]:
|
|
150
|
+
def _get_reference_patterns(self, symbol: str, file_path: str) -> List[Dict[str, Any]]:
|
|
155
151
|
"""Get language-specific patterns for finding references."""
|
|
156
152
|
ext = Path(file_path).suffix.lower()
|
|
157
153
|
lang = self.languages.get(ext, "generic")
|
|
@@ -262,9 +258,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
262
258
|
matches.extend(self._query_ast(tree, pattern, file_path, content))
|
|
263
259
|
else:
|
|
264
260
|
# Fallback to text search
|
|
265
|
-
matches.extend(
|
|
266
|
-
self._text_search(content, pattern["query"], file_path)
|
|
267
|
-
)
|
|
261
|
+
matches.extend(self._text_search(content, pattern["query"], file_path))
|
|
268
262
|
|
|
269
263
|
except Exception:
|
|
270
264
|
continue
|
|
@@ -313,9 +307,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
313
307
|
|
|
314
308
|
return matches
|
|
315
309
|
|
|
316
|
-
def _get_parent_context(
|
|
317
|
-
self, node: tree_sitter.Node, content: str
|
|
318
|
-
) -> Optional[str]:
|
|
310
|
+
def _get_parent_context(self, node: tree_sitter.Node, content: str) -> Optional[str]:
|
|
319
311
|
"""Get parent context for better understanding."""
|
|
320
312
|
parent = node.parent
|
|
321
313
|
if parent:
|
|
@@ -335,9 +327,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
335
327
|
|
|
336
328
|
return None
|
|
337
329
|
|
|
338
|
-
def _text_search(
|
|
339
|
-
self, content: str, pattern: str, file_path: str
|
|
340
|
-
) -> List[ASTMatch]:
|
|
330
|
+
def _text_search(self, content: str, pattern: str, file_path: str) -> List[ASTMatch]:
|
|
341
331
|
"""Fallback text search."""
|
|
342
332
|
matches = []
|
|
343
333
|
lines = content.split("\n")
|
|
@@ -412,18 +402,14 @@ class ASTMultiEdit(BaseTool):
|
|
|
412
402
|
|
|
413
403
|
return str(path.parent)
|
|
414
404
|
|
|
415
|
-
def _group_matches_by_file(
|
|
416
|
-
self, matches: List[ASTMatch]
|
|
417
|
-
) -> Dict[str, List[ASTMatch]]:
|
|
405
|
+
def _group_matches_by_file(self, matches: List[ASTMatch]) -> Dict[str, List[ASTMatch]]:
|
|
418
406
|
"""Group matches by file for efficient editing."""
|
|
419
407
|
grouped = defaultdict(list)
|
|
420
408
|
for match in matches:
|
|
421
409
|
grouped[match.file_path].append(match)
|
|
422
410
|
return grouped
|
|
423
411
|
|
|
424
|
-
def _create_unique_context(
|
|
425
|
-
self, content: str, match: ASTMatch, context_lines: int
|
|
426
|
-
) -> str:
|
|
412
|
+
def _create_unique_context(self, content: str, match: ASTMatch, context_lines: int) -> str:
|
|
427
413
|
"""Create unique context for edit identification."""
|
|
428
414
|
lines = content.split("\n")
|
|
429
415
|
|
|
@@ -499,27 +485,20 @@ class ASTMultiEdit(BaseTool):
|
|
|
499
485
|
pattern = {"query": edit_op.old_string, "type": "text"}
|
|
500
486
|
matches = self._query_ast(tree, pattern, file_path, content)
|
|
501
487
|
else:
|
|
502
|
-
matches = self._text_search(
|
|
503
|
-
content, edit_op.old_string, file_path
|
|
504
|
-
)
|
|
488
|
+
matches = self._text_search(content, edit_op.old_string, file_path)
|
|
505
489
|
|
|
506
490
|
# Filter by node types if specified
|
|
507
491
|
if edit_op.node_types:
|
|
508
492
|
matches = [m for m in matches if m.node_type in edit_op.node_types]
|
|
509
493
|
|
|
510
494
|
# Check expected count
|
|
511
|
-
if (
|
|
512
|
-
edit_op.expect_count is not None
|
|
513
|
-
and len(matches) != edit_op.expect_count
|
|
514
|
-
):
|
|
495
|
+
if edit_op.expect_count is not None and len(matches) != edit_op.expect_count:
|
|
515
496
|
results["errors"].append(
|
|
516
497
|
{
|
|
517
498
|
"edit": edit_op.old_string,
|
|
518
499
|
"expected": edit_op.expect_count,
|
|
519
500
|
"found": len(matches),
|
|
520
|
-
"locations": [
|
|
521
|
-
f"{m.file_path}:{m.line_start}" for m in matches[:5]
|
|
522
|
-
],
|
|
501
|
+
"locations": [f"{m.file_path}:{m.line_start}" for m in matches[:5]],
|
|
523
502
|
}
|
|
524
503
|
)
|
|
525
504
|
continue
|
|
@@ -547,9 +526,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
547
526
|
success = await self._apply_file_changes(file_path, changes)
|
|
548
527
|
if success:
|
|
549
528
|
results["edits_applied"] += len(changes)
|
|
550
|
-
results["changes"].append(
|
|
551
|
-
{"file": file_path, "edits": len(changes)}
|
|
552
|
-
)
|
|
529
|
+
results["changes"].append({"file": file_path, "edits": len(changes)})
|
|
553
530
|
except Exception as e:
|
|
554
531
|
results["errors"].append({"file": file_path, "error": str(e)})
|
|
555
532
|
|
|
@@ -564,9 +541,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
564
541
|
grouped[match.file_path].append((edit_op, match))
|
|
565
542
|
return grouped
|
|
566
543
|
|
|
567
|
-
async def _apply_file_changes(
|
|
568
|
-
self, file_path: str, changes: List[Tuple[EditOperation, ASTMatch]]
|
|
569
|
-
) -> bool:
|
|
544
|
+
async def _apply_file_changes(self, file_path: str, changes: List[Tuple[EditOperation, ASTMatch]]) -> bool:
|
|
570
545
|
"""Apply changes to a single file."""
|
|
571
546
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
572
547
|
content = f.read()
|
|
@@ -600,9 +575,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
600
575
|
|
|
601
576
|
return True
|
|
602
577
|
|
|
603
|
-
def _generate_preview(
|
|
604
|
-
self, matches: List[Tuple[EditOperation, ASTMatch]], page_size: int
|
|
605
|
-
) -> List[Dict[str, Any]]:
|
|
578
|
+
def _generate_preview(self, matches: List[Tuple[EditOperation, ASTMatch]], page_size: int) -> List[Dict[str, Any]]:
|
|
606
579
|
"""Generate preview of changes."""
|
|
607
580
|
preview = []
|
|
608
581
|
|
|
@@ -625,9 +598,7 @@ class ASTMultiEdit(BaseTool):
|
|
|
625
598
|
|
|
626
599
|
return preview
|
|
627
600
|
|
|
628
|
-
def _fallback_to_basic_edit(
|
|
629
|
-
self, file_path: str, edits: List[Dict[str, Any]]
|
|
630
|
-
) -> MCPResourceDocument:
|
|
601
|
+
def _fallback_to_basic_edit(self, file_path: str, edits: List[Dict[str, Any]]) -> MCPResourceDocument:
|
|
631
602
|
"""Fallback to basic multi-edit when treesitter not available."""
|
|
632
603
|
# Delegate to existing multi_edit tool
|
|
633
604
|
from hanzo_mcp.tools.filesystem.multi_edit import MultiEdit
|