ctrlcode 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ctrlcode/__init__.py +8 -0
- ctrlcode/agents/__init__.py +29 -0
- ctrlcode/agents/cleanup.py +388 -0
- ctrlcode/agents/communication.py +439 -0
- ctrlcode/agents/observability.py +421 -0
- ctrlcode/agents/react_loop.py +297 -0
- ctrlcode/agents/registry.py +211 -0
- ctrlcode/agents/result_parser.py +242 -0
- ctrlcode/agents/workflow.py +723 -0
- ctrlcode/analysis/__init__.py +28 -0
- ctrlcode/analysis/ast_diff.py +163 -0
- ctrlcode/analysis/bug_detector.py +149 -0
- ctrlcode/analysis/code_graphs.py +329 -0
- ctrlcode/analysis/semantic.py +205 -0
- ctrlcode/analysis/static.py +183 -0
- ctrlcode/analysis/synthesizer.py +281 -0
- ctrlcode/analysis/tests.py +189 -0
- ctrlcode/cleanup/__init__.py +16 -0
- ctrlcode/cleanup/auto_merge.py +350 -0
- ctrlcode/cleanup/doc_gardening.py +388 -0
- ctrlcode/cleanup/pr_automation.py +330 -0
- ctrlcode/cleanup/scheduler.py +356 -0
- ctrlcode/config.py +380 -0
- ctrlcode/embeddings/__init__.py +6 -0
- ctrlcode/embeddings/embedder.py +192 -0
- ctrlcode/embeddings/vector_store.py +213 -0
- ctrlcode/fuzzing/__init__.py +24 -0
- ctrlcode/fuzzing/analyzer.py +280 -0
- ctrlcode/fuzzing/budget.py +112 -0
- ctrlcode/fuzzing/context.py +665 -0
- ctrlcode/fuzzing/context_fuzzer.py +506 -0
- ctrlcode/fuzzing/derived_orchestrator.py +732 -0
- ctrlcode/fuzzing/oracle_adapter.py +135 -0
- ctrlcode/linters/__init__.py +11 -0
- ctrlcode/linters/hand_rolled_utils.py +221 -0
- ctrlcode/linters/yolo_parsing.py +217 -0
- ctrlcode/metrics/__init__.py +6 -0
- ctrlcode/metrics/dashboard.py +283 -0
- ctrlcode/metrics/tech_debt.py +663 -0
- ctrlcode/paths.py +68 -0
- ctrlcode/permissions.py +179 -0
- ctrlcode/providers/__init__.py +15 -0
- ctrlcode/providers/anthropic.py +138 -0
- ctrlcode/providers/base.py +77 -0
- ctrlcode/providers/openai.py +197 -0
- ctrlcode/providers/parallel.py +104 -0
- ctrlcode/server.py +871 -0
- ctrlcode/session/__init__.py +6 -0
- ctrlcode/session/baseline.py +57 -0
- ctrlcode/session/manager.py +967 -0
- ctrlcode/skills/__init__.py +10 -0
- ctrlcode/skills/builtin/commit.toml +29 -0
- ctrlcode/skills/builtin/docs.toml +25 -0
- ctrlcode/skills/builtin/refactor.toml +33 -0
- ctrlcode/skills/builtin/review.toml +28 -0
- ctrlcode/skills/builtin/test.toml +28 -0
- ctrlcode/skills/loader.py +111 -0
- ctrlcode/skills/registry.py +139 -0
- ctrlcode/storage/__init__.py +19 -0
- ctrlcode/storage/history_db.py +708 -0
- ctrlcode/tools/__init__.py +220 -0
- ctrlcode/tools/bash.py +112 -0
- ctrlcode/tools/browser.py +352 -0
- ctrlcode/tools/executor.py +153 -0
- ctrlcode/tools/explore.py +486 -0
- ctrlcode/tools/mcp.py +108 -0
- ctrlcode/tools/observability.py +561 -0
- ctrlcode/tools/registry.py +193 -0
- ctrlcode/tools/todo.py +291 -0
- ctrlcode/tools/update.py +266 -0
- ctrlcode/tools/webfetch.py +147 -0
- ctrlcode-0.1.0.dist-info/METADATA +93 -0
- ctrlcode-0.1.0.dist-info/RECORD +75 -0
- ctrlcode-0.1.0.dist-info/WHEEL +4 -0
- ctrlcode-0.1.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
"""Tool execution coordinator."""
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Any
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
|
|
7
|
+
from .registry import ToolRegistry
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class ToolCallResult:
|
|
14
|
+
"""Result of a tool call."""
|
|
15
|
+
|
|
16
|
+
tool_name: str
|
|
17
|
+
call_id: str
|
|
18
|
+
success: bool
|
|
19
|
+
result: Any = None
|
|
20
|
+
error: str | None = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ToolExecutor:
|
|
24
|
+
"""Executes tool calls (both MCP and built-in)."""
|
|
25
|
+
|
|
26
|
+
def __init__(self, registry: ToolRegistry):
|
|
27
|
+
"""
|
|
28
|
+
Initialize tool executor.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
registry: Tool registry with MCP clients and built-in tools
|
|
32
|
+
"""
|
|
33
|
+
self.registry = registry
|
|
34
|
+
|
|
35
|
+
async def execute(
|
|
36
|
+
self,
|
|
37
|
+
tool_name: str,
|
|
38
|
+
arguments: dict[str, Any],
|
|
39
|
+
call_id: str,
|
|
40
|
+
) -> ToolCallResult:
|
|
41
|
+
"""
|
|
42
|
+
Execute a tool call.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
tool_name: Name of tool to call
|
|
46
|
+
arguments: Tool arguments
|
|
47
|
+
call_id: Unique call identifier
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
ToolCallResult with execution result
|
|
51
|
+
"""
|
|
52
|
+
# Get tool definition
|
|
53
|
+
tool = self.registry.get_tool(tool_name)
|
|
54
|
+
if not tool:
|
|
55
|
+
logger.error(f"Tool not found: {tool_name}")
|
|
56
|
+
return ToolCallResult(
|
|
57
|
+
tool_name=tool_name,
|
|
58
|
+
call_id=call_id,
|
|
59
|
+
success=False,
|
|
60
|
+
error=f"Tool '{tool_name}' not found",
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
# Execute based on tool type
|
|
64
|
+
try:
|
|
65
|
+
if self.registry.is_builtin(tool_name):
|
|
66
|
+
# Built-in tool - call function directly
|
|
67
|
+
from .registry import BuiltinTool
|
|
68
|
+
import inspect
|
|
69
|
+
builtin_tool = tool
|
|
70
|
+
assert isinstance(builtin_tool, BuiltinTool)
|
|
71
|
+
|
|
72
|
+
# Check if function is async and await if needed
|
|
73
|
+
if inspect.iscoroutinefunction(builtin_tool.function):
|
|
74
|
+
result = await builtin_tool.function(**arguments)
|
|
75
|
+
else:
|
|
76
|
+
result = builtin_tool.function(**arguments)
|
|
77
|
+
|
|
78
|
+
# Check if tool returned an error (error key exists and is not None)
|
|
79
|
+
if isinstance(result, dict) and "error" in result and result["error"] is not None:
|
|
80
|
+
logger.error(f"Built-in tool '{tool_name}' returned error: {result['error']}")
|
|
81
|
+
return ToolCallResult(
|
|
82
|
+
tool_name=tool_name,
|
|
83
|
+
call_id=call_id,
|
|
84
|
+
success=False,
|
|
85
|
+
error=result["error"],
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
logger.info(f"Built-in tool '{tool_name}' executed successfully")
|
|
89
|
+
return ToolCallResult(
|
|
90
|
+
tool_name=tool_name,
|
|
91
|
+
call_id=call_id,
|
|
92
|
+
success=True,
|
|
93
|
+
result=result,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
else:
|
|
97
|
+
# MCP tool - call via client
|
|
98
|
+
from .mcp import MCPTool
|
|
99
|
+
mcp_tool = tool
|
|
100
|
+
assert isinstance(mcp_tool, MCPTool)
|
|
101
|
+
|
|
102
|
+
client = self.registry.get_client(mcp_tool.server_name)
|
|
103
|
+
if not client:
|
|
104
|
+
logger.error(f"Client not found for server: {mcp_tool.server_name}")
|
|
105
|
+
return ToolCallResult(
|
|
106
|
+
tool_name=tool_name,
|
|
107
|
+
call_id=call_id,
|
|
108
|
+
success=False,
|
|
109
|
+
error=f"Server '{mcp_tool.server_name}' not available",
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
result = await client.call_tool(tool_name, arguments)
|
|
113
|
+
logger.info(f"MCP tool '{tool_name}' executed successfully")
|
|
114
|
+
|
|
115
|
+
return ToolCallResult(
|
|
116
|
+
tool_name=tool_name,
|
|
117
|
+
call_id=call_id,
|
|
118
|
+
success=True,
|
|
119
|
+
result=result,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
except Exception as e:
|
|
123
|
+
logger.error(f"Tool execution failed: {e}")
|
|
124
|
+
return ToolCallResult(
|
|
125
|
+
tool_name=tool_name,
|
|
126
|
+
call_id=call_id,
|
|
127
|
+
success=False,
|
|
128
|
+
error=str(e),
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
async def execute_batch(
|
|
132
|
+
self,
|
|
133
|
+
tool_calls: list[dict[str, Any]]
|
|
134
|
+
) -> list[ToolCallResult]:
|
|
135
|
+
"""
|
|
136
|
+
Execute multiple tool calls.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
tool_calls: List of tool call dicts with name, arguments, call_id
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
List of results
|
|
143
|
+
"""
|
|
144
|
+
results = []
|
|
145
|
+
for call in tool_calls:
|
|
146
|
+
result = await self.execute(
|
|
147
|
+
tool_name=call["name"],
|
|
148
|
+
arguments=call["arguments"],
|
|
149
|
+
call_id=call["call_id"],
|
|
150
|
+
)
|
|
151
|
+
results.append(result)
|
|
152
|
+
|
|
153
|
+
return results
|
|
@@ -0,0 +1,486 @@
|
|
|
1
|
+
"""Built-in exploration tools for codebase navigation."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import subprocess
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from ..permissions import request_permission
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class SearchResult:
|
|
14
|
+
"""Result from code search."""
|
|
15
|
+
|
|
16
|
+
file: str
|
|
17
|
+
line: int
|
|
18
|
+
content: str
|
|
19
|
+
context_before: list[str] | None = None
|
|
20
|
+
context_after: list[str] | None = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ExploreTools:
|
|
24
|
+
"""Built-in tools for exploring codebases."""
|
|
25
|
+
|
|
26
|
+
def __init__(self, workspace_root: str | Path):
|
|
27
|
+
"""
|
|
28
|
+
Initialize exploration tools.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
workspace_root: Root directory for exploration
|
|
32
|
+
"""
|
|
33
|
+
self.workspace_root = Path(workspace_root).resolve()
|
|
34
|
+
|
|
35
|
+
def search_files(self, pattern: str, max_results: int = 100) -> list[str]:
|
|
36
|
+
"""
|
|
37
|
+
Search for files matching glob pattern.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
pattern: Glob pattern (e.g., "**/*.py", "src/**/*.ts")
|
|
41
|
+
max_results: Maximum results to return
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
List of file paths relative to workspace root
|
|
45
|
+
"""
|
|
46
|
+
try:
|
|
47
|
+
matches = []
|
|
48
|
+
for path in self.workspace_root.glob(pattern):
|
|
49
|
+
if path.is_file():
|
|
50
|
+
rel_path = path.relative_to(self.workspace_root)
|
|
51
|
+
matches.append(str(rel_path))
|
|
52
|
+
|
|
53
|
+
if len(matches) >= max_results:
|
|
54
|
+
break
|
|
55
|
+
|
|
56
|
+
return sorted(matches)
|
|
57
|
+
|
|
58
|
+
except Exception as e:
|
|
59
|
+
return [f"Error: {e}"]
|
|
60
|
+
|
|
61
|
+
def search_code(
|
|
62
|
+
self,
|
|
63
|
+
query: str,
|
|
64
|
+
file_pattern: str = "**/*",
|
|
65
|
+
context_lines: int = 2,
|
|
66
|
+
max_results: int = 50,
|
|
67
|
+
) -> list[dict[str, Any]]:
|
|
68
|
+
"""
|
|
69
|
+
Search code content using ripgrep.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
query: Search query (regex supported)
|
|
73
|
+
file_pattern: Glob pattern to limit search
|
|
74
|
+
context_lines: Lines of context before/after match
|
|
75
|
+
max_results: Maximum results to return
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
List of search results with file, line, content, context
|
|
79
|
+
"""
|
|
80
|
+
try:
|
|
81
|
+
# Use ripgrep for fast search
|
|
82
|
+
cmd = [
|
|
83
|
+
"rg",
|
|
84
|
+
"--json",
|
|
85
|
+
"-C", str(context_lines),
|
|
86
|
+
"--max-count", str(max_results),
|
|
87
|
+
query,
|
|
88
|
+
]
|
|
89
|
+
|
|
90
|
+
# Add file pattern if specified
|
|
91
|
+
if file_pattern != "**/*":
|
|
92
|
+
cmd.extend(["-g", file_pattern])
|
|
93
|
+
|
|
94
|
+
# Add explicit search path (required for subprocess to work correctly)
|
|
95
|
+
cmd.append(".")
|
|
96
|
+
|
|
97
|
+
result = subprocess.run(
|
|
98
|
+
cmd,
|
|
99
|
+
cwd=self.workspace_root,
|
|
100
|
+
capture_output=True,
|
|
101
|
+
text=True,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
if result.returncode != 0 and result.returncode != 1:
|
|
105
|
+
return [{"error": result.stderr}]
|
|
106
|
+
|
|
107
|
+
# Parse JSON output
|
|
108
|
+
results = []
|
|
109
|
+
for line in result.stdout.splitlines():
|
|
110
|
+
if not line.strip():
|
|
111
|
+
continue
|
|
112
|
+
|
|
113
|
+
import json
|
|
114
|
+
data = json.loads(line)
|
|
115
|
+
|
|
116
|
+
if data.get("type") == "match":
|
|
117
|
+
match_data = data["data"]
|
|
118
|
+
# Path from rg is relative to cwd, resolve to absolute then make relative to workspace
|
|
119
|
+
abs_path = (self.workspace_root / match_data["path"]["text"]).resolve()
|
|
120
|
+
rel_path = abs_path.relative_to(self.workspace_root)
|
|
121
|
+
results.append({
|
|
122
|
+
"file": str(rel_path),
|
|
123
|
+
"line": match_data["line_number"],
|
|
124
|
+
"content": match_data["lines"]["text"].rstrip(),
|
|
125
|
+
})
|
|
126
|
+
|
|
127
|
+
return results[:max_results]
|
|
128
|
+
|
|
129
|
+
except FileNotFoundError:
|
|
130
|
+
# Fallback to basic grep if ripgrep not available
|
|
131
|
+
return self._fallback_search(query, file_pattern, max_results)
|
|
132
|
+
|
|
133
|
+
except Exception as e:
|
|
134
|
+
return [{"error": str(e)}]
|
|
135
|
+
|
|
136
|
+
def _fallback_search(
|
|
137
|
+
self, query: str, file_pattern: str, max_results: int
|
|
138
|
+
) -> list[dict[str, Any]]:
|
|
139
|
+
"""Fallback to Python-based search if ripgrep unavailable."""
|
|
140
|
+
results = []
|
|
141
|
+
|
|
142
|
+
try:
|
|
143
|
+
for path in self.workspace_root.glob(file_pattern):
|
|
144
|
+
if not path.is_file():
|
|
145
|
+
continue
|
|
146
|
+
|
|
147
|
+
# Skip binary files
|
|
148
|
+
try:
|
|
149
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
150
|
+
for line_num, line in enumerate(f, 1):
|
|
151
|
+
if query.lower() in line.lower():
|
|
152
|
+
results.append({
|
|
153
|
+
"file": str(path.relative_to(self.workspace_root)),
|
|
154
|
+
"line": line_num,
|
|
155
|
+
"content": line.rstrip(),
|
|
156
|
+
})
|
|
157
|
+
|
|
158
|
+
if len(results) >= max_results:
|
|
159
|
+
return results
|
|
160
|
+
except (UnicodeDecodeError, PermissionError):
|
|
161
|
+
continue
|
|
162
|
+
|
|
163
|
+
except Exception as e:
|
|
164
|
+
return [{"error": str(e)}]
|
|
165
|
+
|
|
166
|
+
return results
|
|
167
|
+
|
|
168
|
+
def read_file(
|
|
169
|
+
self,
|
|
170
|
+
path: str,
|
|
171
|
+
start_line: int | None = None,
|
|
172
|
+
end_line: int | None = None,
|
|
173
|
+
) -> dict[str, Any]:
|
|
174
|
+
"""
|
|
175
|
+
Read file contents.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
path: File path relative to workspace root
|
|
179
|
+
start_line: Start line (1-indexed, inclusive)
|
|
180
|
+
end_line: End line (1-indexed, inclusive)
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
Dict with content, total_lines, and metadata
|
|
184
|
+
"""
|
|
185
|
+
try:
|
|
186
|
+
file_path = (self.workspace_root / path).resolve()
|
|
187
|
+
|
|
188
|
+
# Security: ensure path is within workspace
|
|
189
|
+
if not str(file_path).startswith(str(self.workspace_root)):
|
|
190
|
+
return {"error": "Path outside workspace"}
|
|
191
|
+
|
|
192
|
+
if not file_path.exists():
|
|
193
|
+
return {"error": "File not found"}
|
|
194
|
+
|
|
195
|
+
if not file_path.is_file():
|
|
196
|
+
return {"error": "Not a file"}
|
|
197
|
+
|
|
198
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
199
|
+
lines = f.readlines()
|
|
200
|
+
|
|
201
|
+
total_lines = len(lines)
|
|
202
|
+
|
|
203
|
+
# Apply line range
|
|
204
|
+
if start_line is not None or end_line is not None:
|
|
205
|
+
start = (start_line - 1) if start_line else 0
|
|
206
|
+
end = end_line if end_line else total_lines
|
|
207
|
+
lines = lines[start:end]
|
|
208
|
+
|
|
209
|
+
content = "".join(lines)
|
|
210
|
+
|
|
211
|
+
return {
|
|
212
|
+
"content": content,
|
|
213
|
+
"total_lines": total_lines,
|
|
214
|
+
"lines_returned": len(lines),
|
|
215
|
+
"path": str(path),
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
except UnicodeDecodeError:
|
|
219
|
+
return {"error": "File is binary or uses unsupported encoding"}
|
|
220
|
+
except Exception as e:
|
|
221
|
+
return {"error": str(e)}
|
|
222
|
+
|
|
223
|
+
async def write_file(self, path: str, content: str) -> dict[str, Any]:
|
|
224
|
+
"""
|
|
225
|
+
Write content to a file.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
path: File path relative to workspace root
|
|
229
|
+
content: Content to write to file
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
Dict with success status and metadata
|
|
233
|
+
"""
|
|
234
|
+
try:
|
|
235
|
+
file_path = (self.workspace_root / path).resolve()
|
|
236
|
+
|
|
237
|
+
# Check if path is outside workspace - request permission if so
|
|
238
|
+
outside_workspace = not str(file_path).startswith(str(self.workspace_root))
|
|
239
|
+
|
|
240
|
+
if outside_workspace:
|
|
241
|
+
# Request user approval for writing outside workspace
|
|
242
|
+
approved = await request_permission(
|
|
243
|
+
operation="write_file",
|
|
244
|
+
path=str(file_path),
|
|
245
|
+
reason=f"Write file outside workspace ({self.workspace_root})",
|
|
246
|
+
details={"workspace": str(self.workspace_root), "requested_path": path}
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
if not approved:
|
|
250
|
+
return {"error": "Permission denied: path outside workspace"}
|
|
251
|
+
|
|
252
|
+
# Create parent directories if needed
|
|
253
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
254
|
+
|
|
255
|
+
# Write file
|
|
256
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
257
|
+
f.write(content)
|
|
258
|
+
|
|
259
|
+
return {
|
|
260
|
+
"success": True,
|
|
261
|
+
"path": str(file_path), # Return absolute path so TUI can read it back
|
|
262
|
+
"bytes_written": len(content.encode("utf-8")),
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
except Exception as e:
|
|
266
|
+
return {"error": str(e)}
|
|
267
|
+
|
|
268
|
+
def list_directory(
|
|
269
|
+
self, path: str = ".", max_depth: int = 1
|
|
270
|
+
) -> dict[str, Any]:
|
|
271
|
+
"""
|
|
272
|
+
List directory contents.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
path: Directory path relative to workspace root
|
|
276
|
+
max_depth: How deep to recurse (1 = immediate children only)
|
|
277
|
+
|
|
278
|
+
Returns:
|
|
279
|
+
Dict with directories and files
|
|
280
|
+
"""
|
|
281
|
+
try:
|
|
282
|
+
dir_path = (self.workspace_root / path).resolve()
|
|
283
|
+
|
|
284
|
+
# Security: ensure path is within workspace
|
|
285
|
+
if not str(dir_path).startswith(str(self.workspace_root)):
|
|
286
|
+
return {"error": "Path outside workspace"}
|
|
287
|
+
|
|
288
|
+
if not dir_path.exists():
|
|
289
|
+
return {"error": "Directory not found"}
|
|
290
|
+
|
|
291
|
+
if not dir_path.is_dir():
|
|
292
|
+
return {"error": "Not a directory"}
|
|
293
|
+
|
|
294
|
+
directories = []
|
|
295
|
+
files = []
|
|
296
|
+
|
|
297
|
+
# Use os.walk for recursive listing
|
|
298
|
+
for root, dirs, filenames in os.walk(dir_path):
|
|
299
|
+
root_path = Path(root)
|
|
300
|
+
depth = len(root_path.relative_to(dir_path).parts)
|
|
301
|
+
|
|
302
|
+
if depth >= max_depth:
|
|
303
|
+
dirs.clear() # Don't recurse deeper
|
|
304
|
+
continue
|
|
305
|
+
|
|
306
|
+
for d in sorted(dirs):
|
|
307
|
+
rel_path = (root_path / d).relative_to(self.workspace_root)
|
|
308
|
+
directories.append(str(rel_path))
|
|
309
|
+
|
|
310
|
+
for f in sorted(filenames):
|
|
311
|
+
rel_path = (root_path / f).relative_to(self.workspace_root)
|
|
312
|
+
file_path = root_path / f
|
|
313
|
+
files.append({
|
|
314
|
+
"path": str(rel_path),
|
|
315
|
+
"size": file_path.stat().st_size,
|
|
316
|
+
})
|
|
317
|
+
|
|
318
|
+
return {
|
|
319
|
+
"path": str(Path(path)),
|
|
320
|
+
"directories": directories[:100], # Limit results
|
|
321
|
+
"files": files[:100],
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
except Exception as e:
|
|
325
|
+
return {"error": str(e)}
|
|
326
|
+
|
|
327
|
+
def get_file_info(self, path: str) -> dict[str, Any]:
|
|
328
|
+
"""
|
|
329
|
+
Get file metadata.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
path: File path relative to workspace root
|
|
333
|
+
|
|
334
|
+
Returns:
|
|
335
|
+
Dict with size, modified time, type, etc.
|
|
336
|
+
"""
|
|
337
|
+
try:
|
|
338
|
+
file_path = (self.workspace_root / path).resolve()
|
|
339
|
+
|
|
340
|
+
# Security: ensure path is within workspace
|
|
341
|
+
if not str(file_path).startswith(str(self.workspace_root)):
|
|
342
|
+
return {"error": "Path outside workspace"}
|
|
343
|
+
|
|
344
|
+
if not file_path.exists():
|
|
345
|
+
return {"error": "File not found"}
|
|
346
|
+
|
|
347
|
+
stat = file_path.stat()
|
|
348
|
+
|
|
349
|
+
return {
|
|
350
|
+
"path": str(path),
|
|
351
|
+
"size": stat.st_size,
|
|
352
|
+
"modified": stat.st_mtime,
|
|
353
|
+
"is_file": file_path.is_file(),
|
|
354
|
+
"is_directory": file_path.is_dir(),
|
|
355
|
+
"extension": file_path.suffix,
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
except Exception as e:
|
|
359
|
+
return {"error": str(e)}
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
# Tool schemas for LLM providers (Anthropic/OpenAI format)
|
|
363
|
+
EXPLORE_TOOL_SCHEMAS = [
|
|
364
|
+
{
|
|
365
|
+
"name": "search_files",
|
|
366
|
+
"description": "Search for files in the codebase using glob patterns. Use this to find files by name or extension.",
|
|
367
|
+
"input_schema": {
|
|
368
|
+
"type": "object",
|
|
369
|
+
"properties": {
|
|
370
|
+
"pattern": {
|
|
371
|
+
"type": "string",
|
|
372
|
+
"description": "Glob pattern to match files (e.g., '**/*.py' for all Python files, 'src/**/*.ts' for TypeScript in src/)",
|
|
373
|
+
},
|
|
374
|
+
"max_results": {
|
|
375
|
+
"type": "integer",
|
|
376
|
+
"description": "Maximum number of results to return",
|
|
377
|
+
"default": 100,
|
|
378
|
+
},
|
|
379
|
+
},
|
|
380
|
+
"required": ["pattern"],
|
|
381
|
+
},
|
|
382
|
+
},
|
|
383
|
+
{
|
|
384
|
+
"name": "search_code",
|
|
385
|
+
"description": "Search for code content across files. Use this to find where specific functions, classes, or patterns are used.",
|
|
386
|
+
"input_schema": {
|
|
387
|
+
"type": "object",
|
|
388
|
+
"properties": {
|
|
389
|
+
"query": {
|
|
390
|
+
"type": "string",
|
|
391
|
+
"description": "Search query (supports regex). E.g., 'class FuzzingOrchestrator', 'def process_turn'",
|
|
392
|
+
},
|
|
393
|
+
"file_pattern": {
|
|
394
|
+
"type": "string",
|
|
395
|
+
"description": "Limit search to files matching this glob pattern",
|
|
396
|
+
"default": "**/*",
|
|
397
|
+
},
|
|
398
|
+
"context_lines": {
|
|
399
|
+
"type": "integer",
|
|
400
|
+
"description": "Number of context lines before/after match",
|
|
401
|
+
"default": 2,
|
|
402
|
+
},
|
|
403
|
+
"max_results": {
|
|
404
|
+
"type": "integer",
|
|
405
|
+
"description": "Maximum number of results to return",
|
|
406
|
+
"default": 50,
|
|
407
|
+
},
|
|
408
|
+
},
|
|
409
|
+
"required": ["query"],
|
|
410
|
+
},
|
|
411
|
+
},
|
|
412
|
+
{
|
|
413
|
+
"name": "read_file",
|
|
414
|
+
"description": "Read contents of a file. Use this to examine specific files found through search.",
|
|
415
|
+
"input_schema": {
|
|
416
|
+
"type": "object",
|
|
417
|
+
"properties": {
|
|
418
|
+
"path": {
|
|
419
|
+
"type": "string",
|
|
420
|
+
"description": "File path relative to workspace root",
|
|
421
|
+
},
|
|
422
|
+
"start_line": {
|
|
423
|
+
"type": "integer",
|
|
424
|
+
"description": "Start line number (1-indexed, inclusive). Omit to read from beginning.",
|
|
425
|
+
},
|
|
426
|
+
"end_line": {
|
|
427
|
+
"type": "integer",
|
|
428
|
+
"description": "End line number (1-indexed, inclusive). Omit to read to end.",
|
|
429
|
+
},
|
|
430
|
+
},
|
|
431
|
+
"required": ["path"],
|
|
432
|
+
},
|
|
433
|
+
},
|
|
434
|
+
{
|
|
435
|
+
"name": "write_file",
|
|
436
|
+
"description": "Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Parent directories are created automatically.",
|
|
437
|
+
"input_schema": {
|
|
438
|
+
"type": "object",
|
|
439
|
+
"properties": {
|
|
440
|
+
"path": {
|
|
441
|
+
"type": "string",
|
|
442
|
+
"description": "File path relative to workspace root (e.g., 'fizzbuzz.py', 'src/utils.py'). NEVER use absolute paths.",
|
|
443
|
+
},
|
|
444
|
+
"content": {
|
|
445
|
+
"type": "string",
|
|
446
|
+
"description": "Content to write to the file",
|
|
447
|
+
},
|
|
448
|
+
},
|
|
449
|
+
"required": ["path", "content"],
|
|
450
|
+
},
|
|
451
|
+
},
|
|
452
|
+
{
|
|
453
|
+
"name": "list_directory",
|
|
454
|
+
"description": "List contents of a directory. Use this to understand project structure.",
|
|
455
|
+
"input_schema": {
|
|
456
|
+
"type": "object",
|
|
457
|
+
"properties": {
|
|
458
|
+
"path": {
|
|
459
|
+
"type": "string",
|
|
460
|
+
"description": "Directory path relative to workspace root",
|
|
461
|
+
"default": ".",
|
|
462
|
+
},
|
|
463
|
+
"max_depth": {
|
|
464
|
+
"type": "integer",
|
|
465
|
+
"description": "How deep to recurse (1 = immediate children only)",
|
|
466
|
+
"default": 1,
|
|
467
|
+
},
|
|
468
|
+
},
|
|
469
|
+
"required": [],
|
|
470
|
+
},
|
|
471
|
+
},
|
|
472
|
+
{
|
|
473
|
+
"name": "get_file_info",
|
|
474
|
+
"description": "Get metadata about a file (size, modified time, type).",
|
|
475
|
+
"input_schema": {
|
|
476
|
+
"type": "object",
|
|
477
|
+
"properties": {
|
|
478
|
+
"path": {
|
|
479
|
+
"type": "string",
|
|
480
|
+
"description": "File path relative to workspace root",
|
|
481
|
+
},
|
|
482
|
+
},
|
|
483
|
+
"required": ["path"],
|
|
484
|
+
},
|
|
485
|
+
},
|
|
486
|
+
]
|