amd-gaia 0.14.3__py3-none-any.whl → 0.15.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {amd_gaia-0.14.3.dist-info → amd_gaia-0.15.1.dist-info}/METADATA +223 -223
- amd_gaia-0.15.1.dist-info/RECORD +178 -0
- {amd_gaia-0.14.3.dist-info → amd_gaia-0.15.1.dist-info}/entry_points.txt +1 -0
- {amd_gaia-0.14.3.dist-info → amd_gaia-0.15.1.dist-info}/licenses/LICENSE.md +20 -20
- gaia/__init__.py +29 -29
- gaia/agents/__init__.py +19 -19
- gaia/agents/base/__init__.py +9 -9
- gaia/agents/base/agent.py +2177 -2177
- gaia/agents/base/api_agent.py +120 -120
- gaia/agents/base/console.py +1841 -1841
- gaia/agents/base/errors.py +237 -237
- gaia/agents/base/mcp_agent.py +86 -86
- gaia/agents/base/tools.py +83 -83
- gaia/agents/blender/agent.py +556 -556
- gaia/agents/blender/agent_simple.py +133 -135
- gaia/agents/blender/app.py +211 -211
- gaia/agents/blender/app_simple.py +41 -41
- gaia/agents/blender/core/__init__.py +16 -16
- gaia/agents/blender/core/materials.py +506 -506
- gaia/agents/blender/core/objects.py +316 -316
- gaia/agents/blender/core/rendering.py +225 -225
- gaia/agents/blender/core/scene.py +220 -220
- gaia/agents/blender/core/view.py +146 -146
- gaia/agents/chat/__init__.py +9 -9
- gaia/agents/chat/agent.py +835 -835
- gaia/agents/chat/app.py +1058 -1058
- gaia/agents/chat/session.py +508 -508
- gaia/agents/chat/tools/__init__.py +15 -15
- gaia/agents/chat/tools/file_tools.py +96 -96
- gaia/agents/chat/tools/rag_tools.py +1729 -1729
- gaia/agents/chat/tools/shell_tools.py +436 -436
- gaia/agents/code/__init__.py +7 -7
- gaia/agents/code/agent.py +549 -549
- gaia/agents/code/cli.py +377 -0
- gaia/agents/code/models.py +135 -135
- gaia/agents/code/orchestration/__init__.py +24 -24
- gaia/agents/code/orchestration/checklist_executor.py +1763 -1763
- gaia/agents/code/orchestration/checklist_generator.py +713 -713
- gaia/agents/code/orchestration/factories/__init__.py +9 -9
- gaia/agents/code/orchestration/factories/base.py +63 -63
- gaia/agents/code/orchestration/factories/nextjs_factory.py +118 -118
- gaia/agents/code/orchestration/factories/python_factory.py +106 -106
- gaia/agents/code/orchestration/orchestrator.py +841 -841
- gaia/agents/code/orchestration/project_analyzer.py +391 -391
- gaia/agents/code/orchestration/steps/__init__.py +67 -67
- gaia/agents/code/orchestration/steps/base.py +188 -188
- gaia/agents/code/orchestration/steps/error_handler.py +314 -314
- gaia/agents/code/orchestration/steps/nextjs.py +828 -828
- gaia/agents/code/orchestration/steps/python.py +307 -307
- gaia/agents/code/orchestration/template_catalog.py +469 -469
- gaia/agents/code/orchestration/workflows/__init__.py +14 -14
- gaia/agents/code/orchestration/workflows/base.py +80 -80
- gaia/agents/code/orchestration/workflows/nextjs.py +186 -186
- gaia/agents/code/orchestration/workflows/python.py +94 -94
- gaia/agents/code/prompts/__init__.py +11 -11
- gaia/agents/code/prompts/base_prompt.py +77 -77
- gaia/agents/code/prompts/code_patterns.py +2036 -2036
- gaia/agents/code/prompts/nextjs_prompt.py +40 -40
- gaia/agents/code/prompts/python_prompt.py +109 -109
- gaia/agents/code/schema_inference.py +365 -365
- gaia/agents/code/system_prompt.py +41 -41
- gaia/agents/code/tools/__init__.py +42 -42
- gaia/agents/code/tools/cli_tools.py +1138 -1138
- gaia/agents/code/tools/code_formatting.py +319 -319
- gaia/agents/code/tools/code_tools.py +769 -769
- gaia/agents/code/tools/error_fixing.py +1347 -1347
- gaia/agents/code/tools/external_tools.py +180 -180
- gaia/agents/code/tools/file_io.py +845 -845
- gaia/agents/code/tools/prisma_tools.py +190 -190
- gaia/agents/code/tools/project_management.py +1016 -1016
- gaia/agents/code/tools/testing.py +321 -321
- gaia/agents/code/tools/typescript_tools.py +122 -122
- gaia/agents/code/tools/validation_parsing.py +461 -461
- gaia/agents/code/tools/validation_tools.py +806 -806
- gaia/agents/code/tools/web_dev_tools.py +1758 -1758
- gaia/agents/code/validators/__init__.py +16 -16
- gaia/agents/code/validators/antipattern_checker.py +241 -241
- gaia/agents/code/validators/ast_analyzer.py +197 -197
- gaia/agents/code/validators/requirements_validator.py +145 -145
- gaia/agents/code/validators/syntax_validator.py +171 -171
- gaia/agents/docker/__init__.py +7 -7
- gaia/agents/docker/agent.py +642 -642
- gaia/agents/emr/__init__.py +8 -8
- gaia/agents/emr/agent.py +1506 -1506
- gaia/agents/emr/cli.py +1322 -1322
- gaia/agents/emr/constants.py +475 -475
- gaia/agents/emr/dashboard/__init__.py +4 -4
- gaia/agents/emr/dashboard/server.py +1974 -1974
- gaia/agents/jira/__init__.py +11 -11
- gaia/agents/jira/agent.py +894 -894
- gaia/agents/jira/jql_templates.py +299 -299
- gaia/agents/routing/__init__.py +7 -7
- gaia/agents/routing/agent.py +567 -570
- gaia/agents/routing/system_prompt.py +75 -75
- gaia/agents/summarize/__init__.py +11 -0
- gaia/agents/summarize/agent.py +885 -0
- gaia/agents/summarize/prompts.py +129 -0
- gaia/api/__init__.py +23 -23
- gaia/api/agent_registry.py +238 -238
- gaia/api/app.py +305 -305
- gaia/api/openai_server.py +575 -575
- gaia/api/schemas.py +186 -186
- gaia/api/sse_handler.py +373 -373
- gaia/apps/__init__.py +4 -4
- gaia/apps/llm/__init__.py +6 -6
- gaia/apps/llm/app.py +173 -169
- gaia/apps/summarize/app.py +116 -633
- gaia/apps/summarize/html_viewer.py +133 -133
- gaia/apps/summarize/pdf_formatter.py +284 -284
- gaia/audio/__init__.py +2 -2
- gaia/audio/audio_client.py +439 -439
- gaia/audio/audio_recorder.py +269 -269
- gaia/audio/kokoro_tts.py +599 -599
- gaia/audio/whisper_asr.py +432 -432
- gaia/chat/__init__.py +16 -16
- gaia/chat/app.py +430 -430
- gaia/chat/prompts.py +522 -522
- gaia/chat/sdk.py +1228 -1225
- gaia/cli.py +5481 -5621
- gaia/database/__init__.py +10 -10
- gaia/database/agent.py +176 -176
- gaia/database/mixin.py +290 -290
- gaia/database/testing.py +64 -64
- gaia/eval/batch_experiment.py +2332 -2332
- gaia/eval/claude.py +542 -542
- gaia/eval/config.py +37 -37
- gaia/eval/email_generator.py +512 -512
- gaia/eval/eval.py +3179 -3179
- gaia/eval/groundtruth.py +1130 -1130
- gaia/eval/transcript_generator.py +582 -582
- gaia/eval/webapp/README.md +167 -167
- gaia/eval/webapp/package-lock.json +875 -875
- gaia/eval/webapp/package.json +20 -20
- gaia/eval/webapp/public/app.js +3402 -3402
- gaia/eval/webapp/public/index.html +87 -87
- gaia/eval/webapp/public/styles.css +3661 -3661
- gaia/eval/webapp/server.js +415 -415
- gaia/eval/webapp/test-setup.js +72 -72
- gaia/llm/__init__.py +9 -2
- gaia/llm/base_client.py +60 -0
- gaia/llm/exceptions.py +12 -0
- gaia/llm/factory.py +70 -0
- gaia/llm/lemonade_client.py +3236 -3221
- gaia/llm/lemonade_manager.py +294 -294
- gaia/llm/providers/__init__.py +9 -0
- gaia/llm/providers/claude.py +108 -0
- gaia/llm/providers/lemonade.py +120 -0
- gaia/llm/providers/openai_provider.py +79 -0
- gaia/llm/vlm_client.py +382 -382
- gaia/logger.py +189 -189
- gaia/mcp/agent_mcp_server.py +245 -245
- gaia/mcp/blender_mcp_client.py +138 -138
- gaia/mcp/blender_mcp_server.py +648 -648
- gaia/mcp/context7_cache.py +332 -332
- gaia/mcp/external_services.py +518 -518
- gaia/mcp/mcp_bridge.py +811 -550
- gaia/mcp/servers/__init__.py +6 -6
- gaia/mcp/servers/docker_mcp.py +83 -83
- gaia/perf_analysis.py +361 -0
- gaia/rag/__init__.py +10 -10
- gaia/rag/app.py +293 -293
- gaia/rag/demo.py +304 -304
- gaia/rag/pdf_utils.py +235 -235
- gaia/rag/sdk.py +2194 -2194
- gaia/security.py +163 -163
- gaia/talk/app.py +289 -289
- gaia/talk/sdk.py +538 -538
- gaia/testing/__init__.py +87 -87
- gaia/testing/assertions.py +330 -330
- gaia/testing/fixtures.py +333 -333
- gaia/testing/mocks.py +493 -493
- gaia/util.py +46 -46
- gaia/utils/__init__.py +33 -33
- gaia/utils/file_watcher.py +675 -675
- gaia/utils/parsing.py +223 -223
- gaia/version.py +100 -100
- amd_gaia-0.14.3.dist-info/RECORD +0 -168
- gaia/agents/code/app.py +0 -266
- gaia/llm/llm_client.py +0 -729
- {amd_gaia-0.14.3.dist-info → amd_gaia-0.15.1.dist-info}/WHEEL +0 -0
- {amd_gaia-0.14.3.dist-info → amd_gaia-0.15.1.dist-info}/top_level.txt +0 -0
|
@@ -1,845 +1,845 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
# Copyright(C) 2024-2025 Advanced Micro Devices, Inc. All rights reserved.
|
|
3
|
-
# SPDX-License-Identifier: MIT
|
|
4
|
-
"""
|
|
5
|
-
File I/O tools mixin for code agents.
|
|
6
|
-
|
|
7
|
-
This module provides a mixin class with file I/O operations that can be
|
|
8
|
-
inherited by agents that need file manipulation capabilities.
|
|
9
|
-
"""
|
|
10
|
-
|
|
11
|
-
import ast
|
|
12
|
-
import difflib
|
|
13
|
-
import os
|
|
14
|
-
from typing import Any, Dict, Optional
|
|
15
|
-
|
|
16
|
-
from gaia.agents.base.tools import tool
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class FileIOToolsMixin:
|
|
20
|
-
"""Mixin class providing file I/O tools for code agents.
|
|
21
|
-
|
|
22
|
-
This class provides a collection of file I/O operations as tools that can be
|
|
23
|
-
registered and used by agents. It includes reading, writing, editing, searching,
|
|
24
|
-
and diffing capabilities for Python files.
|
|
25
|
-
|
|
26
|
-
Attributes (provided by CodeAgent via ValidationAndParsingMixin):
|
|
27
|
-
_validate_python_syntax: Method to validate Python syntax
|
|
28
|
-
_parse_python_code: Method to parse Python code and extract structure
|
|
29
|
-
|
|
30
|
-
NOTE: This mixin expects the agent to also have ValidationAndParsingMixin
|
|
31
|
-
for _validate_python_syntax() and _parse_python_code() methods.
|
|
32
|
-
"""
|
|
33
|
-
|
|
34
|
-
def register_file_io_tools(self) -> None:
|
|
35
|
-
"""Register all file I/O tools."""
|
|
36
|
-
|
|
37
|
-
@tool
|
|
38
|
-
def read_file(file_path: str) -> Dict[str, Any]:
|
|
39
|
-
"""Read any file and intelligently analyze based on file type.
|
|
40
|
-
|
|
41
|
-
Automatically detects file type and provides appropriate analysis:
|
|
42
|
-
- Python files (.py): Syntax validation + symbol extraction (functions/classes)
|
|
43
|
-
- Markdown files (.md): Headers + code blocks + links
|
|
44
|
-
- Other text files: Raw content
|
|
45
|
-
|
|
46
|
-
Args:
|
|
47
|
-
file_path: Path to the file to read
|
|
48
|
-
|
|
49
|
-
Returns:
|
|
50
|
-
Dictionary with file content and type-specific metadata
|
|
51
|
-
"""
|
|
52
|
-
try:
|
|
53
|
-
# Security check
|
|
54
|
-
if not self.path_validator.is_path_allowed(file_path):
|
|
55
|
-
return {
|
|
56
|
-
"status": "error",
|
|
57
|
-
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
if not os.path.exists(file_path):
|
|
61
|
-
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
62
|
-
|
|
63
|
-
# Read file content
|
|
64
|
-
try:
|
|
65
|
-
with open(file_path, "r", encoding="utf-8") as f:
|
|
66
|
-
content = f.read()
|
|
67
|
-
except UnicodeDecodeError:
|
|
68
|
-
# Binary file
|
|
69
|
-
with open(file_path, "rb") as f:
|
|
70
|
-
content_bytes = f.read()
|
|
71
|
-
return {
|
|
72
|
-
"status": "success",
|
|
73
|
-
"file_path": file_path,
|
|
74
|
-
"file_type": "binary",
|
|
75
|
-
"content": f"[Binary file, {len(content_bytes)} bytes]",
|
|
76
|
-
"is_binary": True,
|
|
77
|
-
"size_bytes": len(content_bytes),
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
# Detect file type by extension
|
|
81
|
-
ext = os.path.splitext(file_path)[1].lower()
|
|
82
|
-
|
|
83
|
-
# Base result with common fields
|
|
84
|
-
result = {
|
|
85
|
-
"status": "success",
|
|
86
|
-
"file_path": file_path,
|
|
87
|
-
"content": content,
|
|
88
|
-
"line_count": len(content.splitlines()),
|
|
89
|
-
"size_bytes": len(content.encode("utf-8")),
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
# Python file - add syntax validation and symbol extraction
|
|
93
|
-
if ext == ".py":
|
|
94
|
-
import re
|
|
95
|
-
|
|
96
|
-
result["file_type"] = "python"
|
|
97
|
-
|
|
98
|
-
# Validate syntax using mixin method
|
|
99
|
-
validation = self._validate_python_syntax(content)
|
|
100
|
-
result["is_valid"] = validation["is_valid"]
|
|
101
|
-
result["errors"] = validation.get("errors", [])
|
|
102
|
-
|
|
103
|
-
# Extract symbols using mixin method
|
|
104
|
-
if validation["is_valid"]:
|
|
105
|
-
parsed = self._parse_python_code(content)
|
|
106
|
-
# Handle both ParsedCode object and dict (for backward compat)
|
|
107
|
-
if hasattr(parsed, "symbols"):
|
|
108
|
-
result["symbols"] = [
|
|
109
|
-
{"name": s.name, "type": s.type, "line": s.line}
|
|
110
|
-
for s in parsed.symbols
|
|
111
|
-
]
|
|
112
|
-
elif hasattr(parsed, "ast_tree"):
|
|
113
|
-
# ParsedCode object
|
|
114
|
-
tree = parsed.ast_tree
|
|
115
|
-
symbols = []
|
|
116
|
-
for node in ast.walk(tree):
|
|
117
|
-
if isinstance(
|
|
118
|
-
node, (ast.FunctionDef, ast.AsyncFunctionDef)
|
|
119
|
-
):
|
|
120
|
-
symbols.append(
|
|
121
|
-
{
|
|
122
|
-
"name": node.name,
|
|
123
|
-
"type": "function",
|
|
124
|
-
"line": node.lineno,
|
|
125
|
-
}
|
|
126
|
-
)
|
|
127
|
-
elif isinstance(node, ast.ClassDef):
|
|
128
|
-
symbols.append(
|
|
129
|
-
{
|
|
130
|
-
"name": node.name,
|
|
131
|
-
"type": "class",
|
|
132
|
-
"line": node.lineno,
|
|
133
|
-
}
|
|
134
|
-
)
|
|
135
|
-
result["symbols"] = symbols
|
|
136
|
-
|
|
137
|
-
# Markdown file - extract structure
|
|
138
|
-
elif ext == ".md":
|
|
139
|
-
import re
|
|
140
|
-
|
|
141
|
-
result["file_type"] = "markdown"
|
|
142
|
-
|
|
143
|
-
# Extract headers
|
|
144
|
-
headers = re.findall(r"^#{1,6}\s+(.+)$", content, re.MULTILINE)
|
|
145
|
-
result["headers"] = headers
|
|
146
|
-
|
|
147
|
-
# Extract code blocks
|
|
148
|
-
code_blocks = re.findall(r"```(\w*)\n(.*?)```", content, re.DOTALL)
|
|
149
|
-
result["code_blocks"] = [
|
|
150
|
-
{"language": lang, "code": code} for lang, code in code_blocks
|
|
151
|
-
]
|
|
152
|
-
|
|
153
|
-
# Extract links
|
|
154
|
-
links = re.findall(r"\[([^\]]+)\]\(([^)]+)\)", content)
|
|
155
|
-
result["links"] = [
|
|
156
|
-
{"text": text, "url": url} for text, url in links
|
|
157
|
-
]
|
|
158
|
-
|
|
159
|
-
# Other text files
|
|
160
|
-
else:
|
|
161
|
-
result["file_type"] = ext[1:] if ext else "text"
|
|
162
|
-
|
|
163
|
-
return result
|
|
164
|
-
|
|
165
|
-
except Exception as e:
|
|
166
|
-
return {"status": "error", "error": str(e)}
|
|
167
|
-
|
|
168
|
-
@tool
|
|
169
|
-
def write_python_file(
|
|
170
|
-
file_path: str,
|
|
171
|
-
content: str,
|
|
172
|
-
validate: bool = True,
|
|
173
|
-
create_dirs: bool = True,
|
|
174
|
-
) -> Dict[str, Any]:
|
|
175
|
-
"""Write Python code to a file.
|
|
176
|
-
|
|
177
|
-
Args:
|
|
178
|
-
file_path: Path where to write the file
|
|
179
|
-
content: Python code content
|
|
180
|
-
validate: Whether to validate syntax before writing
|
|
181
|
-
create_dirs: Whether to create parent directories
|
|
182
|
-
|
|
183
|
-
Returns:
|
|
184
|
-
Dictionary with write operation results
|
|
185
|
-
"""
|
|
186
|
-
try:
|
|
187
|
-
# Validate syntax if requested (using mixin method)
|
|
188
|
-
if validate:
|
|
189
|
-
validation = self._validate_python_syntax(content)
|
|
190
|
-
if not validation["is_valid"]:
|
|
191
|
-
return {
|
|
192
|
-
"status": "error",
|
|
193
|
-
"error": "Invalid Python syntax",
|
|
194
|
-
"syntax_errors": validation.get("errors", []),
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
# Security check
|
|
198
|
-
if not self.path_validator.is_path_allowed(file_path):
|
|
199
|
-
return {
|
|
200
|
-
"status": "error",
|
|
201
|
-
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
202
|
-
}
|
|
203
|
-
|
|
204
|
-
# Create parent directories if needed
|
|
205
|
-
if create_dirs and os.path.dirname(file_path):
|
|
206
|
-
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
207
|
-
|
|
208
|
-
# Write the file
|
|
209
|
-
with open(file_path, "w", encoding="utf-8") as f:
|
|
210
|
-
f.write(content)
|
|
211
|
-
|
|
212
|
-
return {
|
|
213
|
-
"status": "success",
|
|
214
|
-
"file_path": file_path,
|
|
215
|
-
"bytes_written": len(content.encode("utf-8")),
|
|
216
|
-
"line_count": len(content.splitlines()),
|
|
217
|
-
}
|
|
218
|
-
except Exception as e:
|
|
219
|
-
return {"status": "error", "error": str(e)}
|
|
220
|
-
|
|
221
|
-
@tool
|
|
222
|
-
def edit_python_file(
|
|
223
|
-
file_path: str,
|
|
224
|
-
old_content: str,
|
|
225
|
-
new_content: str,
|
|
226
|
-
backup: bool = True,
|
|
227
|
-
dry_run: bool = False,
|
|
228
|
-
) -> Dict[str, Any]:
|
|
229
|
-
"""Edit a Python file by replacing content.
|
|
230
|
-
|
|
231
|
-
Args:
|
|
232
|
-
file_path: Path to the file to edit
|
|
233
|
-
old_content: Content to find and replace
|
|
234
|
-
new_content: New content to insert
|
|
235
|
-
backup: Whether to create a backup
|
|
236
|
-
dry_run: Whether to only simulate the edit
|
|
237
|
-
|
|
238
|
-
Returns:
|
|
239
|
-
Dictionary with edit operation results
|
|
240
|
-
"""
|
|
241
|
-
try:
|
|
242
|
-
# Security check
|
|
243
|
-
if not self.path_validator.is_path_allowed(file_path):
|
|
244
|
-
return {
|
|
245
|
-
"status": "error",
|
|
246
|
-
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
# Read current content
|
|
250
|
-
if not os.path.exists(file_path):
|
|
251
|
-
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
252
|
-
|
|
253
|
-
with open(file_path, "r", encoding="utf-8") as f:
|
|
254
|
-
current_content = f.read()
|
|
255
|
-
|
|
256
|
-
# Check if old content exists
|
|
257
|
-
if old_content not in current_content:
|
|
258
|
-
return {
|
|
259
|
-
"status": "error",
|
|
260
|
-
"error": "Content to replace not found in file",
|
|
261
|
-
}
|
|
262
|
-
|
|
263
|
-
# Create new content
|
|
264
|
-
modified_content = current_content.replace(old_content, new_content, 1)
|
|
265
|
-
|
|
266
|
-
# Validate new content (using mixin method)
|
|
267
|
-
validation = self._validate_python_syntax(modified_content)
|
|
268
|
-
if not validation["is_valid"]:
|
|
269
|
-
return {
|
|
270
|
-
"status": "error",
|
|
271
|
-
"error": "Edit would result in invalid Python syntax",
|
|
272
|
-
"syntax_errors": validation.get("errors", []),
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
# Generate diff
|
|
276
|
-
diff = "\n".join(
|
|
277
|
-
difflib.unified_diff(
|
|
278
|
-
current_content.splitlines(keepends=True),
|
|
279
|
-
modified_content.splitlines(keepends=True),
|
|
280
|
-
fromfile=file_path,
|
|
281
|
-
tofile=file_path,
|
|
282
|
-
)
|
|
283
|
-
)
|
|
284
|
-
|
|
285
|
-
if dry_run:
|
|
286
|
-
return {
|
|
287
|
-
"status": "success",
|
|
288
|
-
"dry_run": True,
|
|
289
|
-
"diff": diff,
|
|
290
|
-
"would_change": current_content != modified_content,
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
# Create backup if requested
|
|
294
|
-
if backup:
|
|
295
|
-
backup_path = f"{file_path}.bak"
|
|
296
|
-
with open(backup_path, "w", encoding="utf-8") as f:
|
|
297
|
-
f.write(current_content)
|
|
298
|
-
|
|
299
|
-
# Write the modified content
|
|
300
|
-
with open(file_path, "w", encoding="utf-8") as f:
|
|
301
|
-
f.write(modified_content)
|
|
302
|
-
|
|
303
|
-
return {
|
|
304
|
-
"status": "success",
|
|
305
|
-
"file_path": file_path,
|
|
306
|
-
"diff": diff,
|
|
307
|
-
"backup_created": backup,
|
|
308
|
-
}
|
|
309
|
-
except Exception as e:
|
|
310
|
-
return {"status": "error", "error": str(e)}
|
|
311
|
-
|
|
312
|
-
@tool
|
|
313
|
-
def search_code(
|
|
314
|
-
directory: str = ".",
|
|
315
|
-
pattern: str = "",
|
|
316
|
-
file_extension: str = ".py",
|
|
317
|
-
max_results: int = 100,
|
|
318
|
-
) -> Dict[str, Any]:
|
|
319
|
-
"""Search for patterns in code files.
|
|
320
|
-
|
|
321
|
-
Args:
|
|
322
|
-
directory: Directory to search in
|
|
323
|
-
pattern: Pattern to search for
|
|
324
|
-
file_extension: File extension to filter
|
|
325
|
-
max_results: Maximum number of results
|
|
326
|
-
|
|
327
|
-
Returns:
|
|
328
|
-
Dictionary with search results
|
|
329
|
-
"""
|
|
330
|
-
try:
|
|
331
|
-
# Security check
|
|
332
|
-
if not self.path_validator.is_path_allowed(directory):
|
|
333
|
-
return {
|
|
334
|
-
"status": "error",
|
|
335
|
-
"error": f"Access denied: {directory} is not in allowed paths",
|
|
336
|
-
}
|
|
337
|
-
|
|
338
|
-
results = []
|
|
339
|
-
files_searched = 0
|
|
340
|
-
files_with_matches = 0
|
|
341
|
-
|
|
342
|
-
for root, _, files in os.walk(directory):
|
|
343
|
-
for file in files:
|
|
344
|
-
if not file.endswith(file_extension):
|
|
345
|
-
continue
|
|
346
|
-
|
|
347
|
-
file_path = os.path.join(root, file)
|
|
348
|
-
files_searched += 1
|
|
349
|
-
|
|
350
|
-
try:
|
|
351
|
-
with open(file_path, "r", encoding="utf-8") as f:
|
|
352
|
-
content = f.read()
|
|
353
|
-
|
|
354
|
-
if pattern in content:
|
|
355
|
-
files_with_matches += 1
|
|
356
|
-
# Find line numbers with matches
|
|
357
|
-
matches = []
|
|
358
|
-
for i, line in enumerate(content.splitlines(), 1):
|
|
359
|
-
if pattern in line:
|
|
360
|
-
matches.append(
|
|
361
|
-
{"line": i, "content": line.strip()}
|
|
362
|
-
)
|
|
363
|
-
|
|
364
|
-
results.append(
|
|
365
|
-
{
|
|
366
|
-
"file": os.path.relpath(file_path, directory),
|
|
367
|
-
"matches": matches[
|
|
368
|
-
:10
|
|
369
|
-
], # Limit matches per file
|
|
370
|
-
}
|
|
371
|
-
)
|
|
372
|
-
|
|
373
|
-
if len(results) >= max_results:
|
|
374
|
-
break
|
|
375
|
-
except Exception:
|
|
376
|
-
continue
|
|
377
|
-
|
|
378
|
-
if len(results) >= max_results:
|
|
379
|
-
break
|
|
380
|
-
|
|
381
|
-
return {
|
|
382
|
-
"status": "success",
|
|
383
|
-
"pattern": pattern,
|
|
384
|
-
"directory": directory,
|
|
385
|
-
"files_searched": files_searched,
|
|
386
|
-
"files_with_matches": files_with_matches,
|
|
387
|
-
"results": results,
|
|
388
|
-
}
|
|
389
|
-
except Exception as e:
|
|
390
|
-
return {"status": "error", "error": str(e)}
|
|
391
|
-
|
|
392
|
-
@tool
|
|
393
|
-
def generate_diff(
|
|
394
|
-
file_path: str, new_content: str, context_lines: int = 3
|
|
395
|
-
) -> Dict[str, Any]:
|
|
396
|
-
"""Generate a unified diff for a file.
|
|
397
|
-
|
|
398
|
-
Args:
|
|
399
|
-
file_path: Path to the original file
|
|
400
|
-
new_content: New content to compare
|
|
401
|
-
context_lines: Number of context lines in diff
|
|
402
|
-
|
|
403
|
-
Returns:
|
|
404
|
-
Dictionary with diff information
|
|
405
|
-
"""
|
|
406
|
-
try:
|
|
407
|
-
# Security check
|
|
408
|
-
if not self.path_validator.is_path_allowed(file_path):
|
|
409
|
-
return {
|
|
410
|
-
"status": "error",
|
|
411
|
-
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
# Read original content
|
|
415
|
-
if os.path.exists(file_path):
|
|
416
|
-
with open(file_path, "r", encoding="utf-8") as f:
|
|
417
|
-
original_content = f.read()
|
|
418
|
-
else:
|
|
419
|
-
original_content = ""
|
|
420
|
-
|
|
421
|
-
# Generate unified diff
|
|
422
|
-
diff = list(
|
|
423
|
-
difflib.unified_diff(
|
|
424
|
-
original_content.splitlines(keepends=True),
|
|
425
|
-
new_content.splitlines(keepends=True),
|
|
426
|
-
fromfile=file_path,
|
|
427
|
-
tofile=file_path,
|
|
428
|
-
n=context_lines,
|
|
429
|
-
)
|
|
430
|
-
)
|
|
431
|
-
|
|
432
|
-
# Count changes
|
|
433
|
-
additions = sum(
|
|
434
|
-
1
|
|
435
|
-
for line in diff
|
|
436
|
-
if line.startswith("+") and not line.startswith("+++")
|
|
437
|
-
)
|
|
438
|
-
deletions = sum(
|
|
439
|
-
1
|
|
440
|
-
for line in diff
|
|
441
|
-
if line.startswith("-") and not line.startswith("---")
|
|
442
|
-
)
|
|
443
|
-
|
|
444
|
-
return {
|
|
445
|
-
"status": "success",
|
|
446
|
-
"file_path": file_path,
|
|
447
|
-
"diff": "".join(diff),
|
|
448
|
-
"additions": additions,
|
|
449
|
-
"deletions": deletions,
|
|
450
|
-
"has_changes": bool(diff),
|
|
451
|
-
}
|
|
452
|
-
except Exception as e:
|
|
453
|
-
return {"status": "error", "error": str(e)}
|
|
454
|
-
|
|
455
|
-
@tool
|
|
456
|
-
def write_markdown_file(
|
|
457
|
-
file_path: str, content: str, create_dirs: bool = True
|
|
458
|
-
) -> Dict[str, Any]:
|
|
459
|
-
"""Write content to a markdown file.
|
|
460
|
-
|
|
461
|
-
Args:
|
|
462
|
-
file_path: Path where to write the file
|
|
463
|
-
content: Markdown content
|
|
464
|
-
create_dirs: Whether to create parent directories
|
|
465
|
-
|
|
466
|
-
Returns:
|
|
467
|
-
Dictionary with write operation results
|
|
468
|
-
"""
|
|
469
|
-
try:
|
|
470
|
-
# Security check
|
|
471
|
-
if not self.path_validator.is_path_allowed(file_path):
|
|
472
|
-
return {
|
|
473
|
-
"status": "error",
|
|
474
|
-
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
475
|
-
}
|
|
476
|
-
|
|
477
|
-
# Create parent directories if needed
|
|
478
|
-
if create_dirs:
|
|
479
|
-
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
480
|
-
|
|
481
|
-
# Write the file
|
|
482
|
-
with open(file_path, "w", encoding="utf-8") as f:
|
|
483
|
-
f.write(content)
|
|
484
|
-
|
|
485
|
-
return {
|
|
486
|
-
"status": "success",
|
|
487
|
-
"file_path": file_path,
|
|
488
|
-
"bytes_written": len(content.encode("utf-8")),
|
|
489
|
-
"line_count": len(content.splitlines()),
|
|
490
|
-
}
|
|
491
|
-
except Exception as e:
|
|
492
|
-
return {"status": "error", "error": str(e)}
|
|
493
|
-
|
|
494
|
-
@tool
|
|
495
|
-
def write_file(
|
|
496
|
-
file_path: str,
|
|
497
|
-
content: str,
|
|
498
|
-
create_dirs: bool = True,
|
|
499
|
-
project_dir: Optional[str] = None,
|
|
500
|
-
) -> Dict[str, Any]:
|
|
501
|
-
"""Write content to any file (TypeScript, JavaScript, JSON, etc.) without syntax validation.
|
|
502
|
-
|
|
503
|
-
Use this tool for non-Python files like .tsx, .ts, .js, .json, etc.
|
|
504
|
-
|
|
505
|
-
Args:
|
|
506
|
-
file_path: Path where to write the file
|
|
507
|
-
content: Content to write to the file
|
|
508
|
-
create_dirs: Whether to create parent directories if they don't exist
|
|
509
|
-
project_dir: Project root directory for resolving relative paths
|
|
510
|
-
|
|
511
|
-
Returns:
|
|
512
|
-
dict: Status and file information
|
|
513
|
-
"""
|
|
514
|
-
try:
|
|
515
|
-
from pathlib import Path
|
|
516
|
-
|
|
517
|
-
path = Path(file_path)
|
|
518
|
-
if project_dir:
|
|
519
|
-
base = Path(project_dir).resolve()
|
|
520
|
-
if not path.is_absolute():
|
|
521
|
-
path = base / path
|
|
522
|
-
path = path.resolve()
|
|
523
|
-
|
|
524
|
-
# Create parent directories if requested
|
|
525
|
-
if create_dirs and not path.parent.exists():
|
|
526
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
527
|
-
|
|
528
|
-
# Write content to file
|
|
529
|
-
path.write_text(content, encoding="utf-8")
|
|
530
|
-
|
|
531
|
-
console = getattr(self, "console", None)
|
|
532
|
-
if console:
|
|
533
|
-
if content.strip():
|
|
534
|
-
console.print_prompt(
|
|
535
|
-
content,
|
|
536
|
-
title=f"✏️ write_file → {path}",
|
|
537
|
-
)
|
|
538
|
-
else:
|
|
539
|
-
console.print_info(
|
|
540
|
-
f"write_file: {path} was created but no content was written."
|
|
541
|
-
)
|
|
542
|
-
|
|
543
|
-
return {
|
|
544
|
-
"status": "success",
|
|
545
|
-
"file_path": str(path),
|
|
546
|
-
"size_bytes": len(content),
|
|
547
|
-
"file_type": path.suffix[1:] if path.suffix else "unknown",
|
|
548
|
-
}
|
|
549
|
-
except Exception as e:
|
|
550
|
-
return {"status": "error", "error": str(e)}
|
|
551
|
-
|
|
552
|
-
@tool
|
|
553
|
-
def edit_file(
|
|
554
|
-
file_path: str,
|
|
555
|
-
old_content: str,
|
|
556
|
-
new_content: str,
|
|
557
|
-
project_dir: Optional[str] = None,
|
|
558
|
-
) -> Dict[str, Any]:
|
|
559
|
-
"""Edit any file by replacing old content with new content (no syntax validation).
|
|
560
|
-
|
|
561
|
-
Use this tool for non-Python files like .tsx, .ts, .js, .json, etc.
|
|
562
|
-
|
|
563
|
-
Args:
|
|
564
|
-
file_path: Path to the file to edit
|
|
565
|
-
old_content: Exact content to find and replace
|
|
566
|
-
new_content: New content to replace with
|
|
567
|
-
project_dir: Project root directory for resolving relative paths
|
|
568
|
-
|
|
569
|
-
Returns:
|
|
570
|
-
dict: Status and edit information
|
|
571
|
-
"""
|
|
572
|
-
try:
|
|
573
|
-
from pathlib import Path
|
|
574
|
-
|
|
575
|
-
path = Path(file_path)
|
|
576
|
-
if project_dir:
|
|
577
|
-
base = Path(project_dir).resolve()
|
|
578
|
-
if not path.is_absolute():
|
|
579
|
-
path = base / path
|
|
580
|
-
path = path.resolve()
|
|
581
|
-
|
|
582
|
-
if not path.exists():
|
|
583
|
-
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
584
|
-
|
|
585
|
-
# Read current content
|
|
586
|
-
current_content = path.read_text(encoding="utf-8")
|
|
587
|
-
|
|
588
|
-
# Check if old_content exists in file
|
|
589
|
-
if old_content not in current_content:
|
|
590
|
-
return {
|
|
591
|
-
"status": "error",
|
|
592
|
-
"error": f"Content to replace not found in {file_path}",
|
|
593
|
-
}
|
|
594
|
-
|
|
595
|
-
# Replace content
|
|
596
|
-
updated_content = current_content.replace(old_content, new_content, 1)
|
|
597
|
-
|
|
598
|
-
# Generate diff before writing
|
|
599
|
-
diff = "\n".join(
|
|
600
|
-
difflib.unified_diff(
|
|
601
|
-
current_content.splitlines(keepends=True),
|
|
602
|
-
updated_content.splitlines(keepends=True),
|
|
603
|
-
fromfile=f"a/{os.path.basename(str(path))}",
|
|
604
|
-
tofile=f"b/{os.path.basename(str(path))}",
|
|
605
|
-
lineterm="",
|
|
606
|
-
)
|
|
607
|
-
)
|
|
608
|
-
|
|
609
|
-
# Write updated content
|
|
610
|
-
path.write_text(updated_content, encoding="utf-8")
|
|
611
|
-
|
|
612
|
-
console = getattr(self, "console", None)
|
|
613
|
-
if console:
|
|
614
|
-
if diff.strip():
|
|
615
|
-
console.print_diff(diff, os.path.basename(str(path)))
|
|
616
|
-
else:
|
|
617
|
-
console.print_info(f"edit_file: No changes were made to {path}")
|
|
618
|
-
|
|
619
|
-
return {
|
|
620
|
-
"status": "success",
|
|
621
|
-
"file_path": str(path),
|
|
622
|
-
"old_size": len(current_content),
|
|
623
|
-
"new_size": len(updated_content),
|
|
624
|
-
"file_type": path.suffix[1:] if path.suffix else "unknown",
|
|
625
|
-
"diff": diff,
|
|
626
|
-
}
|
|
627
|
-
except Exception as e:
|
|
628
|
-
return {"status": "error", "error": str(e)}
|
|
629
|
-
|
|
630
|
-
@tool
|
|
631
|
-
def update_gaia_md(
|
|
632
|
-
project_root: str = ".",
|
|
633
|
-
project_name: str = None,
|
|
634
|
-
description: str = None,
|
|
635
|
-
structure: Dict[str, Any] = None,
|
|
636
|
-
instructions: str = None,
|
|
637
|
-
) -> Dict[str, Any]:
|
|
638
|
-
"""Create or update GAIA.md file for project context.
|
|
639
|
-
|
|
640
|
-
Args:
|
|
641
|
-
project_root: Root directory of the project
|
|
642
|
-
project_name: Name of the project
|
|
643
|
-
description: Project description
|
|
644
|
-
structure: Project structure dictionary
|
|
645
|
-
instructions: Special instructions for GAIA
|
|
646
|
-
|
|
647
|
-
Returns:
|
|
648
|
-
Dictionary with update results
|
|
649
|
-
"""
|
|
650
|
-
try:
|
|
651
|
-
from datetime import datetime
|
|
652
|
-
|
|
653
|
-
gaia_path = os.path.join(project_root, "GAIA.md")
|
|
654
|
-
|
|
655
|
-
# Security check
|
|
656
|
-
if not self.path_validator.is_path_allowed(gaia_path):
|
|
657
|
-
return {
|
|
658
|
-
"status": "error",
|
|
659
|
-
"error": f"Access denied: {gaia_path} is not in allowed paths",
|
|
660
|
-
}
|
|
661
|
-
|
|
662
|
-
# Start building content
|
|
663
|
-
content = "# GAIA.md\n\n"
|
|
664
|
-
content += "This file provides guidance to GAIA Code Agent when working with code in this project.\n\n"
|
|
665
|
-
|
|
666
|
-
if project_name:
|
|
667
|
-
content += f"## Project: {project_name}\n\n"
|
|
668
|
-
|
|
669
|
-
if description:
|
|
670
|
-
content += f"## Description\n{description}\n\n"
|
|
671
|
-
|
|
672
|
-
content += f"**Last Updated:** {datetime.now().isoformat()}\n\n"
|
|
673
|
-
|
|
674
|
-
if structure:
|
|
675
|
-
content += "## Project Structure\n```\n"
|
|
676
|
-
|
|
677
|
-
def format_structure(struct, indent=""):
|
|
678
|
-
result = ""
|
|
679
|
-
if isinstance(struct, dict):
|
|
680
|
-
for key, value in struct.items():
|
|
681
|
-
if isinstance(value, dict):
|
|
682
|
-
result += f"{indent}{key}\n"
|
|
683
|
-
result += format_structure(value, indent + " ")
|
|
684
|
-
else:
|
|
685
|
-
result += f"{indent}{key} - {value}\n"
|
|
686
|
-
return result
|
|
687
|
-
|
|
688
|
-
content += format_structure(structure)
|
|
689
|
-
content += "```\n\n"
|
|
690
|
-
|
|
691
|
-
if instructions:
|
|
692
|
-
content += f"## Special Instructions\n{instructions}\n\n"
|
|
693
|
-
|
|
694
|
-
# Add default sections
|
|
695
|
-
content += "## Development Guidelines\n"
|
|
696
|
-
content += "- Follow PEP 8 style guidelines\n"
|
|
697
|
-
content += "- Add docstrings to all functions and classes\n"
|
|
698
|
-
content += "- Include type hints where appropriate\n"
|
|
699
|
-
content += "- Write unit tests for new functionality\n\n"
|
|
700
|
-
|
|
701
|
-
content += "## Code Quality\n"
|
|
702
|
-
content += "- All code should pass pylint checks\n"
|
|
703
|
-
content += "- Use Black formatter for consistent style\n"
|
|
704
|
-
content += "- Ensure proper error handling\n\n"
|
|
705
|
-
|
|
706
|
-
# Write the file
|
|
707
|
-
with open(gaia_path, "w", encoding="utf-8") as f:
|
|
708
|
-
f.write(content)
|
|
709
|
-
|
|
710
|
-
return {
|
|
711
|
-
"status": "success",
|
|
712
|
-
"file_path": gaia_path,
|
|
713
|
-
"created": not os.path.exists(gaia_path),
|
|
714
|
-
"message": f"GAIA.md {'created' if not os.path.exists(gaia_path) else 'updated'} at {gaia_path}",
|
|
715
|
-
}
|
|
716
|
-
except Exception as e:
|
|
717
|
-
return {"status": "error", "error": str(e)}
|
|
718
|
-
|
|
719
|
-
@tool
|
|
720
|
-
def replace_function(
|
|
721
|
-
file_path: str,
|
|
722
|
-
function_name: str,
|
|
723
|
-
new_implementation: str,
|
|
724
|
-
backup: bool = True,
|
|
725
|
-
) -> Dict[str, Any]:
|
|
726
|
-
"""Replace a specific function in a Python file.
|
|
727
|
-
|
|
728
|
-
Args:
|
|
729
|
-
file_path: Path to the Python file
|
|
730
|
-
function_name: Name of the function to replace
|
|
731
|
-
new_implementation: New function implementation
|
|
732
|
-
backup: Whether to create backup
|
|
733
|
-
|
|
734
|
-
Returns:
|
|
735
|
-
Dictionary with replacement result
|
|
736
|
-
"""
|
|
737
|
-
try:
|
|
738
|
-
# Security check
|
|
739
|
-
if not self.path_validator.is_path_allowed(file_path):
|
|
740
|
-
return {
|
|
741
|
-
"status": "error",
|
|
742
|
-
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
743
|
-
}
|
|
744
|
-
|
|
745
|
-
if not os.path.exists(file_path):
|
|
746
|
-
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
747
|
-
|
|
748
|
-
with open(file_path, "r", encoding="utf-8") as f:
|
|
749
|
-
content = f.read()
|
|
750
|
-
|
|
751
|
-
# Parse the file to find the function
|
|
752
|
-
try:
|
|
753
|
-
tree = ast.parse(content)
|
|
754
|
-
except SyntaxError as e:
|
|
755
|
-
return {"status": "error", "error": f"File has syntax errors: {e}"}
|
|
756
|
-
|
|
757
|
-
# Find the function node
|
|
758
|
-
function_node = None
|
|
759
|
-
for node in ast.walk(tree):
|
|
760
|
-
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
761
|
-
if node.name == function_name:
|
|
762
|
-
function_node = node
|
|
763
|
-
break
|
|
764
|
-
|
|
765
|
-
if not function_node:
|
|
766
|
-
return {
|
|
767
|
-
"status": "error",
|
|
768
|
-
"error": f"Function '{function_name}' not found in file",
|
|
769
|
-
}
|
|
770
|
-
|
|
771
|
-
# Get line range of the function
|
|
772
|
-
lines = content.splitlines(keepends=True)
|
|
773
|
-
start_line = function_node.lineno - 1
|
|
774
|
-
|
|
775
|
-
# Find end of function (simplified - finds next def or class at same indent)
|
|
776
|
-
end_line = len(lines)
|
|
777
|
-
indent_level = len(lines[start_line]) - len(lines[start_line].lstrip())
|
|
778
|
-
|
|
779
|
-
for i in range(start_line + 1, len(lines)):
|
|
780
|
-
line = lines[i]
|
|
781
|
-
if line.strip() and not line.lstrip().startswith("#"):
|
|
782
|
-
current_indent = len(line) - len(line.lstrip())
|
|
783
|
-
if current_indent <= indent_level and line.strip():
|
|
784
|
-
if line.lstrip().startswith(
|
|
785
|
-
("def ", "class ", "async def ")
|
|
786
|
-
):
|
|
787
|
-
end_line = i
|
|
788
|
-
break
|
|
789
|
-
|
|
790
|
-
# Create backup if requested
|
|
791
|
-
if backup:
|
|
792
|
-
backup_path = f"{file_path}.bak"
|
|
793
|
-
with open(backup_path, "w", encoding="utf-8") as f:
|
|
794
|
-
f.write(content)
|
|
795
|
-
|
|
796
|
-
# Replace the function
|
|
797
|
-
new_lines = (
|
|
798
|
-
lines[:start_line] + [new_implementation + "\n"] + lines[end_line:]
|
|
799
|
-
)
|
|
800
|
-
modified_content = "".join(new_lines)
|
|
801
|
-
|
|
802
|
-
# Validate new content (using mixin method)
|
|
803
|
-
validation = self._validate_python_syntax(modified_content)
|
|
804
|
-
if not validation["is_valid"]:
|
|
805
|
-
return {
|
|
806
|
-
"status": "error",
|
|
807
|
-
"error": "Replacement would result in invalid syntax",
|
|
808
|
-
"syntax_errors": validation.get("errors", []),
|
|
809
|
-
}
|
|
810
|
-
|
|
811
|
-
# Write the modified content
|
|
812
|
-
with open(file_path, "w", encoding="utf-8") as f:
|
|
813
|
-
f.write(modified_content)
|
|
814
|
-
|
|
815
|
-
# Generate diff
|
|
816
|
-
diff = "\n".join(
|
|
817
|
-
difflib.unified_diff(
|
|
818
|
-
content.splitlines(keepends=True),
|
|
819
|
-
modified_content.splitlines(keepends=True),
|
|
820
|
-
fromfile=file_path,
|
|
821
|
-
tofile=file_path,
|
|
822
|
-
)
|
|
823
|
-
)
|
|
824
|
-
|
|
825
|
-
return {
|
|
826
|
-
"status": "success",
|
|
827
|
-
"file_path": file_path,
|
|
828
|
-
"function_replaced": function_name,
|
|
829
|
-
"backup_path": backup_path if backup else None,
|
|
830
|
-
"diff": diff,
|
|
831
|
-
}
|
|
832
|
-
except Exception as e:
|
|
833
|
-
return {"status": "error", "error": str(e)}
|
|
834
|
-
|
|
835
|
-
# Return the list of registered tools for tracking
|
|
836
|
-
return [
|
|
837
|
-
"read_file",
|
|
838
|
-
"write_python_file",
|
|
839
|
-
"edit_python_file",
|
|
840
|
-
"search_code",
|
|
841
|
-
"generate_diff",
|
|
842
|
-
"write_markdown_file",
|
|
843
|
-
"update_gaia_md",
|
|
844
|
-
"replace_function",
|
|
845
|
-
]
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# Copyright(C) 2024-2025 Advanced Micro Devices, Inc. All rights reserved.
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
"""
|
|
5
|
+
File I/O tools mixin for code agents.
|
|
6
|
+
|
|
7
|
+
This module provides a mixin class with file I/O operations that can be
|
|
8
|
+
inherited by agents that need file manipulation capabilities.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import ast
|
|
12
|
+
import difflib
|
|
13
|
+
import os
|
|
14
|
+
from typing import Any, Dict, Optional
|
|
15
|
+
|
|
16
|
+
from gaia.agents.base.tools import tool
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class FileIOToolsMixin:
|
|
20
|
+
"""Mixin class providing file I/O tools for code agents.
|
|
21
|
+
|
|
22
|
+
This class provides a collection of file I/O operations as tools that can be
|
|
23
|
+
registered and used by agents. It includes reading, writing, editing, searching,
|
|
24
|
+
and diffing capabilities for Python files.
|
|
25
|
+
|
|
26
|
+
Attributes (provided by CodeAgent via ValidationAndParsingMixin):
|
|
27
|
+
_validate_python_syntax: Method to validate Python syntax
|
|
28
|
+
_parse_python_code: Method to parse Python code and extract structure
|
|
29
|
+
|
|
30
|
+
NOTE: This mixin expects the agent to also have ValidationAndParsingMixin
|
|
31
|
+
for _validate_python_syntax() and _parse_python_code() methods.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def register_file_io_tools(self) -> None:
|
|
35
|
+
"""Register all file I/O tools."""
|
|
36
|
+
|
|
37
|
+
@tool
|
|
38
|
+
def read_file(file_path: str) -> Dict[str, Any]:
|
|
39
|
+
"""Read any file and intelligently analyze based on file type.
|
|
40
|
+
|
|
41
|
+
Automatically detects file type and provides appropriate analysis:
|
|
42
|
+
- Python files (.py): Syntax validation + symbol extraction (functions/classes)
|
|
43
|
+
- Markdown files (.md): Headers + code blocks + links
|
|
44
|
+
- Other text files: Raw content
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
file_path: Path to the file to read
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
Dictionary with file content and type-specific metadata
|
|
51
|
+
"""
|
|
52
|
+
try:
|
|
53
|
+
# Security check
|
|
54
|
+
if not self.path_validator.is_path_allowed(file_path):
|
|
55
|
+
return {
|
|
56
|
+
"status": "error",
|
|
57
|
+
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if not os.path.exists(file_path):
|
|
61
|
+
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
62
|
+
|
|
63
|
+
# Read file content
|
|
64
|
+
try:
|
|
65
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
66
|
+
content = f.read()
|
|
67
|
+
except UnicodeDecodeError:
|
|
68
|
+
# Binary file
|
|
69
|
+
with open(file_path, "rb") as f:
|
|
70
|
+
content_bytes = f.read()
|
|
71
|
+
return {
|
|
72
|
+
"status": "success",
|
|
73
|
+
"file_path": file_path,
|
|
74
|
+
"file_type": "binary",
|
|
75
|
+
"content": f"[Binary file, {len(content_bytes)} bytes]",
|
|
76
|
+
"is_binary": True,
|
|
77
|
+
"size_bytes": len(content_bytes),
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
# Detect file type by extension
|
|
81
|
+
ext = os.path.splitext(file_path)[1].lower()
|
|
82
|
+
|
|
83
|
+
# Base result with common fields
|
|
84
|
+
result = {
|
|
85
|
+
"status": "success",
|
|
86
|
+
"file_path": file_path,
|
|
87
|
+
"content": content,
|
|
88
|
+
"line_count": len(content.splitlines()),
|
|
89
|
+
"size_bytes": len(content.encode("utf-8")),
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
# Python file - add syntax validation and symbol extraction
|
|
93
|
+
if ext == ".py":
|
|
94
|
+
import re
|
|
95
|
+
|
|
96
|
+
result["file_type"] = "python"
|
|
97
|
+
|
|
98
|
+
# Validate syntax using mixin method
|
|
99
|
+
validation = self._validate_python_syntax(content)
|
|
100
|
+
result["is_valid"] = validation["is_valid"]
|
|
101
|
+
result["errors"] = validation.get("errors", [])
|
|
102
|
+
|
|
103
|
+
# Extract symbols using mixin method
|
|
104
|
+
if validation["is_valid"]:
|
|
105
|
+
parsed = self._parse_python_code(content)
|
|
106
|
+
# Handle both ParsedCode object and dict (for backward compat)
|
|
107
|
+
if hasattr(parsed, "symbols"):
|
|
108
|
+
result["symbols"] = [
|
|
109
|
+
{"name": s.name, "type": s.type, "line": s.line}
|
|
110
|
+
for s in parsed.symbols
|
|
111
|
+
]
|
|
112
|
+
elif hasattr(parsed, "ast_tree"):
|
|
113
|
+
# ParsedCode object
|
|
114
|
+
tree = parsed.ast_tree
|
|
115
|
+
symbols = []
|
|
116
|
+
for node in ast.walk(tree):
|
|
117
|
+
if isinstance(
|
|
118
|
+
node, (ast.FunctionDef, ast.AsyncFunctionDef)
|
|
119
|
+
):
|
|
120
|
+
symbols.append(
|
|
121
|
+
{
|
|
122
|
+
"name": node.name,
|
|
123
|
+
"type": "function",
|
|
124
|
+
"line": node.lineno,
|
|
125
|
+
}
|
|
126
|
+
)
|
|
127
|
+
elif isinstance(node, ast.ClassDef):
|
|
128
|
+
symbols.append(
|
|
129
|
+
{
|
|
130
|
+
"name": node.name,
|
|
131
|
+
"type": "class",
|
|
132
|
+
"line": node.lineno,
|
|
133
|
+
}
|
|
134
|
+
)
|
|
135
|
+
result["symbols"] = symbols
|
|
136
|
+
|
|
137
|
+
# Markdown file - extract structure
|
|
138
|
+
elif ext == ".md":
|
|
139
|
+
import re
|
|
140
|
+
|
|
141
|
+
result["file_type"] = "markdown"
|
|
142
|
+
|
|
143
|
+
# Extract headers
|
|
144
|
+
headers = re.findall(r"^#{1,6}\s+(.+)$", content, re.MULTILINE)
|
|
145
|
+
result["headers"] = headers
|
|
146
|
+
|
|
147
|
+
# Extract code blocks
|
|
148
|
+
code_blocks = re.findall(r"```(\w*)\n(.*?)```", content, re.DOTALL)
|
|
149
|
+
result["code_blocks"] = [
|
|
150
|
+
{"language": lang, "code": code} for lang, code in code_blocks
|
|
151
|
+
]
|
|
152
|
+
|
|
153
|
+
# Extract links
|
|
154
|
+
links = re.findall(r"\[([^\]]+)\]\(([^)]+)\)", content)
|
|
155
|
+
result["links"] = [
|
|
156
|
+
{"text": text, "url": url} for text, url in links
|
|
157
|
+
]
|
|
158
|
+
|
|
159
|
+
# Other text files
|
|
160
|
+
else:
|
|
161
|
+
result["file_type"] = ext[1:] if ext else "text"
|
|
162
|
+
|
|
163
|
+
return result
|
|
164
|
+
|
|
165
|
+
except Exception as e:
|
|
166
|
+
return {"status": "error", "error": str(e)}
|
|
167
|
+
|
|
168
|
+
@tool
|
|
169
|
+
def write_python_file(
|
|
170
|
+
file_path: str,
|
|
171
|
+
content: str,
|
|
172
|
+
validate: bool = True,
|
|
173
|
+
create_dirs: bool = True,
|
|
174
|
+
) -> Dict[str, Any]:
|
|
175
|
+
"""Write Python code to a file.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
file_path: Path where to write the file
|
|
179
|
+
content: Python code content
|
|
180
|
+
validate: Whether to validate syntax before writing
|
|
181
|
+
create_dirs: Whether to create parent directories
|
|
182
|
+
|
|
183
|
+
Returns:
|
|
184
|
+
Dictionary with write operation results
|
|
185
|
+
"""
|
|
186
|
+
try:
|
|
187
|
+
# Validate syntax if requested (using mixin method)
|
|
188
|
+
if validate:
|
|
189
|
+
validation = self._validate_python_syntax(content)
|
|
190
|
+
if not validation["is_valid"]:
|
|
191
|
+
return {
|
|
192
|
+
"status": "error",
|
|
193
|
+
"error": "Invalid Python syntax",
|
|
194
|
+
"syntax_errors": validation.get("errors", []),
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
# Security check
|
|
198
|
+
if not self.path_validator.is_path_allowed(file_path):
|
|
199
|
+
return {
|
|
200
|
+
"status": "error",
|
|
201
|
+
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
# Create parent directories if needed
|
|
205
|
+
if create_dirs and os.path.dirname(file_path):
|
|
206
|
+
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
207
|
+
|
|
208
|
+
# Write the file
|
|
209
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
210
|
+
f.write(content)
|
|
211
|
+
|
|
212
|
+
return {
|
|
213
|
+
"status": "success",
|
|
214
|
+
"file_path": file_path,
|
|
215
|
+
"bytes_written": len(content.encode("utf-8")),
|
|
216
|
+
"line_count": len(content.splitlines()),
|
|
217
|
+
}
|
|
218
|
+
except Exception as e:
|
|
219
|
+
return {"status": "error", "error": str(e)}
|
|
220
|
+
|
|
221
|
+
@tool
|
|
222
|
+
def edit_python_file(
|
|
223
|
+
file_path: str,
|
|
224
|
+
old_content: str,
|
|
225
|
+
new_content: str,
|
|
226
|
+
backup: bool = True,
|
|
227
|
+
dry_run: bool = False,
|
|
228
|
+
) -> Dict[str, Any]:
|
|
229
|
+
"""Edit a Python file by replacing content.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
file_path: Path to the file to edit
|
|
233
|
+
old_content: Content to find and replace
|
|
234
|
+
new_content: New content to insert
|
|
235
|
+
backup: Whether to create a backup
|
|
236
|
+
dry_run: Whether to only simulate the edit
|
|
237
|
+
|
|
238
|
+
Returns:
|
|
239
|
+
Dictionary with edit operation results
|
|
240
|
+
"""
|
|
241
|
+
try:
|
|
242
|
+
# Security check
|
|
243
|
+
if not self.path_validator.is_path_allowed(file_path):
|
|
244
|
+
return {
|
|
245
|
+
"status": "error",
|
|
246
|
+
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
# Read current content
|
|
250
|
+
if not os.path.exists(file_path):
|
|
251
|
+
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
252
|
+
|
|
253
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
254
|
+
current_content = f.read()
|
|
255
|
+
|
|
256
|
+
# Check if old content exists
|
|
257
|
+
if old_content not in current_content:
|
|
258
|
+
return {
|
|
259
|
+
"status": "error",
|
|
260
|
+
"error": "Content to replace not found in file",
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
# Create new content
|
|
264
|
+
modified_content = current_content.replace(old_content, new_content, 1)
|
|
265
|
+
|
|
266
|
+
# Validate new content (using mixin method)
|
|
267
|
+
validation = self._validate_python_syntax(modified_content)
|
|
268
|
+
if not validation["is_valid"]:
|
|
269
|
+
return {
|
|
270
|
+
"status": "error",
|
|
271
|
+
"error": "Edit would result in invalid Python syntax",
|
|
272
|
+
"syntax_errors": validation.get("errors", []),
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
# Generate diff
|
|
276
|
+
diff = "\n".join(
|
|
277
|
+
difflib.unified_diff(
|
|
278
|
+
current_content.splitlines(keepends=True),
|
|
279
|
+
modified_content.splitlines(keepends=True),
|
|
280
|
+
fromfile=file_path,
|
|
281
|
+
tofile=file_path,
|
|
282
|
+
)
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
if dry_run:
|
|
286
|
+
return {
|
|
287
|
+
"status": "success",
|
|
288
|
+
"dry_run": True,
|
|
289
|
+
"diff": diff,
|
|
290
|
+
"would_change": current_content != modified_content,
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
# Create backup if requested
|
|
294
|
+
if backup:
|
|
295
|
+
backup_path = f"{file_path}.bak"
|
|
296
|
+
with open(backup_path, "w", encoding="utf-8") as f:
|
|
297
|
+
f.write(current_content)
|
|
298
|
+
|
|
299
|
+
# Write the modified content
|
|
300
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
301
|
+
f.write(modified_content)
|
|
302
|
+
|
|
303
|
+
return {
|
|
304
|
+
"status": "success",
|
|
305
|
+
"file_path": file_path,
|
|
306
|
+
"diff": diff,
|
|
307
|
+
"backup_created": backup,
|
|
308
|
+
}
|
|
309
|
+
except Exception as e:
|
|
310
|
+
return {"status": "error", "error": str(e)}
|
|
311
|
+
|
|
312
|
+
@tool
|
|
313
|
+
def search_code(
|
|
314
|
+
directory: str = ".",
|
|
315
|
+
pattern: str = "",
|
|
316
|
+
file_extension: str = ".py",
|
|
317
|
+
max_results: int = 100,
|
|
318
|
+
) -> Dict[str, Any]:
|
|
319
|
+
"""Search for patterns in code files.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
directory: Directory to search in
|
|
323
|
+
pattern: Pattern to search for
|
|
324
|
+
file_extension: File extension to filter
|
|
325
|
+
max_results: Maximum number of results
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
Dictionary with search results
|
|
329
|
+
"""
|
|
330
|
+
try:
|
|
331
|
+
# Security check
|
|
332
|
+
if not self.path_validator.is_path_allowed(directory):
|
|
333
|
+
return {
|
|
334
|
+
"status": "error",
|
|
335
|
+
"error": f"Access denied: {directory} is not in allowed paths",
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
results = []
|
|
339
|
+
files_searched = 0
|
|
340
|
+
files_with_matches = 0
|
|
341
|
+
|
|
342
|
+
for root, _, files in os.walk(directory):
|
|
343
|
+
for file in files:
|
|
344
|
+
if not file.endswith(file_extension):
|
|
345
|
+
continue
|
|
346
|
+
|
|
347
|
+
file_path = os.path.join(root, file)
|
|
348
|
+
files_searched += 1
|
|
349
|
+
|
|
350
|
+
try:
|
|
351
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
352
|
+
content = f.read()
|
|
353
|
+
|
|
354
|
+
if pattern in content:
|
|
355
|
+
files_with_matches += 1
|
|
356
|
+
# Find line numbers with matches
|
|
357
|
+
matches = []
|
|
358
|
+
for i, line in enumerate(content.splitlines(), 1):
|
|
359
|
+
if pattern in line:
|
|
360
|
+
matches.append(
|
|
361
|
+
{"line": i, "content": line.strip()}
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
results.append(
|
|
365
|
+
{
|
|
366
|
+
"file": os.path.relpath(file_path, directory),
|
|
367
|
+
"matches": matches[
|
|
368
|
+
:10
|
|
369
|
+
], # Limit matches per file
|
|
370
|
+
}
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
if len(results) >= max_results:
|
|
374
|
+
break
|
|
375
|
+
except Exception:
|
|
376
|
+
continue
|
|
377
|
+
|
|
378
|
+
if len(results) >= max_results:
|
|
379
|
+
break
|
|
380
|
+
|
|
381
|
+
return {
|
|
382
|
+
"status": "success",
|
|
383
|
+
"pattern": pattern,
|
|
384
|
+
"directory": directory,
|
|
385
|
+
"files_searched": files_searched,
|
|
386
|
+
"files_with_matches": files_with_matches,
|
|
387
|
+
"results": results,
|
|
388
|
+
}
|
|
389
|
+
except Exception as e:
|
|
390
|
+
return {"status": "error", "error": str(e)}
|
|
391
|
+
|
|
392
|
+
@tool
|
|
393
|
+
def generate_diff(
|
|
394
|
+
file_path: str, new_content: str, context_lines: int = 3
|
|
395
|
+
) -> Dict[str, Any]:
|
|
396
|
+
"""Generate a unified diff for a file.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
file_path: Path to the original file
|
|
400
|
+
new_content: New content to compare
|
|
401
|
+
context_lines: Number of context lines in diff
|
|
402
|
+
|
|
403
|
+
Returns:
|
|
404
|
+
Dictionary with diff information
|
|
405
|
+
"""
|
|
406
|
+
try:
|
|
407
|
+
# Security check
|
|
408
|
+
if not self.path_validator.is_path_allowed(file_path):
|
|
409
|
+
return {
|
|
410
|
+
"status": "error",
|
|
411
|
+
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
# Read original content
|
|
415
|
+
if os.path.exists(file_path):
|
|
416
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
417
|
+
original_content = f.read()
|
|
418
|
+
else:
|
|
419
|
+
original_content = ""
|
|
420
|
+
|
|
421
|
+
# Generate unified diff
|
|
422
|
+
diff = list(
|
|
423
|
+
difflib.unified_diff(
|
|
424
|
+
original_content.splitlines(keepends=True),
|
|
425
|
+
new_content.splitlines(keepends=True),
|
|
426
|
+
fromfile=file_path,
|
|
427
|
+
tofile=file_path,
|
|
428
|
+
n=context_lines,
|
|
429
|
+
)
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
# Count changes
|
|
433
|
+
additions = sum(
|
|
434
|
+
1
|
|
435
|
+
for line in diff
|
|
436
|
+
if line.startswith("+") and not line.startswith("+++")
|
|
437
|
+
)
|
|
438
|
+
deletions = sum(
|
|
439
|
+
1
|
|
440
|
+
for line in diff
|
|
441
|
+
if line.startswith("-") and not line.startswith("---")
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
return {
|
|
445
|
+
"status": "success",
|
|
446
|
+
"file_path": file_path,
|
|
447
|
+
"diff": "".join(diff),
|
|
448
|
+
"additions": additions,
|
|
449
|
+
"deletions": deletions,
|
|
450
|
+
"has_changes": bool(diff),
|
|
451
|
+
}
|
|
452
|
+
except Exception as e:
|
|
453
|
+
return {"status": "error", "error": str(e)}
|
|
454
|
+
|
|
455
|
+
@tool
|
|
456
|
+
def write_markdown_file(
|
|
457
|
+
file_path: str, content: str, create_dirs: bool = True
|
|
458
|
+
) -> Dict[str, Any]:
|
|
459
|
+
"""Write content to a markdown file.
|
|
460
|
+
|
|
461
|
+
Args:
|
|
462
|
+
file_path: Path where to write the file
|
|
463
|
+
content: Markdown content
|
|
464
|
+
create_dirs: Whether to create parent directories
|
|
465
|
+
|
|
466
|
+
Returns:
|
|
467
|
+
Dictionary with write operation results
|
|
468
|
+
"""
|
|
469
|
+
try:
|
|
470
|
+
# Security check
|
|
471
|
+
if not self.path_validator.is_path_allowed(file_path):
|
|
472
|
+
return {
|
|
473
|
+
"status": "error",
|
|
474
|
+
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
# Create parent directories if needed
|
|
478
|
+
if create_dirs:
|
|
479
|
+
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
|
480
|
+
|
|
481
|
+
# Write the file
|
|
482
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
483
|
+
f.write(content)
|
|
484
|
+
|
|
485
|
+
return {
|
|
486
|
+
"status": "success",
|
|
487
|
+
"file_path": file_path,
|
|
488
|
+
"bytes_written": len(content.encode("utf-8")),
|
|
489
|
+
"line_count": len(content.splitlines()),
|
|
490
|
+
}
|
|
491
|
+
except Exception as e:
|
|
492
|
+
return {"status": "error", "error": str(e)}
|
|
493
|
+
|
|
494
|
+
@tool
|
|
495
|
+
def write_file(
|
|
496
|
+
file_path: str,
|
|
497
|
+
content: str,
|
|
498
|
+
create_dirs: bool = True,
|
|
499
|
+
project_dir: Optional[str] = None,
|
|
500
|
+
) -> Dict[str, Any]:
|
|
501
|
+
"""Write content to any file (TypeScript, JavaScript, JSON, etc.) without syntax validation.
|
|
502
|
+
|
|
503
|
+
Use this tool for non-Python files like .tsx, .ts, .js, .json, etc.
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
file_path: Path where to write the file
|
|
507
|
+
content: Content to write to the file
|
|
508
|
+
create_dirs: Whether to create parent directories if they don't exist
|
|
509
|
+
project_dir: Project root directory for resolving relative paths
|
|
510
|
+
|
|
511
|
+
Returns:
|
|
512
|
+
dict: Status and file information
|
|
513
|
+
"""
|
|
514
|
+
try:
|
|
515
|
+
from pathlib import Path
|
|
516
|
+
|
|
517
|
+
path = Path(file_path)
|
|
518
|
+
if project_dir:
|
|
519
|
+
base = Path(project_dir).resolve()
|
|
520
|
+
if not path.is_absolute():
|
|
521
|
+
path = base / path
|
|
522
|
+
path = path.resolve()
|
|
523
|
+
|
|
524
|
+
# Create parent directories if requested
|
|
525
|
+
if create_dirs and not path.parent.exists():
|
|
526
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
527
|
+
|
|
528
|
+
# Write content to file
|
|
529
|
+
path.write_text(content, encoding="utf-8")
|
|
530
|
+
|
|
531
|
+
console = getattr(self, "console", None)
|
|
532
|
+
if console:
|
|
533
|
+
if content.strip():
|
|
534
|
+
console.print_prompt(
|
|
535
|
+
content,
|
|
536
|
+
title=f"✏️ write_file → {path}",
|
|
537
|
+
)
|
|
538
|
+
else:
|
|
539
|
+
console.print_info(
|
|
540
|
+
f"write_file: {path} was created but no content was written."
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
return {
|
|
544
|
+
"status": "success",
|
|
545
|
+
"file_path": str(path),
|
|
546
|
+
"size_bytes": len(content),
|
|
547
|
+
"file_type": path.suffix[1:] if path.suffix else "unknown",
|
|
548
|
+
}
|
|
549
|
+
except Exception as e:
|
|
550
|
+
return {"status": "error", "error": str(e)}
|
|
551
|
+
|
|
552
|
+
@tool
|
|
553
|
+
def edit_file(
|
|
554
|
+
file_path: str,
|
|
555
|
+
old_content: str,
|
|
556
|
+
new_content: str,
|
|
557
|
+
project_dir: Optional[str] = None,
|
|
558
|
+
) -> Dict[str, Any]:
|
|
559
|
+
"""Edit any file by replacing old content with new content (no syntax validation).
|
|
560
|
+
|
|
561
|
+
Use this tool for non-Python files like .tsx, .ts, .js, .json, etc.
|
|
562
|
+
|
|
563
|
+
Args:
|
|
564
|
+
file_path: Path to the file to edit
|
|
565
|
+
old_content: Exact content to find and replace
|
|
566
|
+
new_content: New content to replace with
|
|
567
|
+
project_dir: Project root directory for resolving relative paths
|
|
568
|
+
|
|
569
|
+
Returns:
|
|
570
|
+
dict: Status and edit information
|
|
571
|
+
"""
|
|
572
|
+
try:
|
|
573
|
+
from pathlib import Path
|
|
574
|
+
|
|
575
|
+
path = Path(file_path)
|
|
576
|
+
if project_dir:
|
|
577
|
+
base = Path(project_dir).resolve()
|
|
578
|
+
if not path.is_absolute():
|
|
579
|
+
path = base / path
|
|
580
|
+
path = path.resolve()
|
|
581
|
+
|
|
582
|
+
if not path.exists():
|
|
583
|
+
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
584
|
+
|
|
585
|
+
# Read current content
|
|
586
|
+
current_content = path.read_text(encoding="utf-8")
|
|
587
|
+
|
|
588
|
+
# Check if old_content exists in file
|
|
589
|
+
if old_content not in current_content:
|
|
590
|
+
return {
|
|
591
|
+
"status": "error",
|
|
592
|
+
"error": f"Content to replace not found in {file_path}",
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
# Replace content
|
|
596
|
+
updated_content = current_content.replace(old_content, new_content, 1)
|
|
597
|
+
|
|
598
|
+
# Generate diff before writing
|
|
599
|
+
diff = "\n".join(
|
|
600
|
+
difflib.unified_diff(
|
|
601
|
+
current_content.splitlines(keepends=True),
|
|
602
|
+
updated_content.splitlines(keepends=True),
|
|
603
|
+
fromfile=f"a/{os.path.basename(str(path))}",
|
|
604
|
+
tofile=f"b/{os.path.basename(str(path))}",
|
|
605
|
+
lineterm="",
|
|
606
|
+
)
|
|
607
|
+
)
|
|
608
|
+
|
|
609
|
+
# Write updated content
|
|
610
|
+
path.write_text(updated_content, encoding="utf-8")
|
|
611
|
+
|
|
612
|
+
console = getattr(self, "console", None)
|
|
613
|
+
if console:
|
|
614
|
+
if diff.strip():
|
|
615
|
+
console.print_diff(diff, os.path.basename(str(path)))
|
|
616
|
+
else:
|
|
617
|
+
console.print_info(f"edit_file: No changes were made to {path}")
|
|
618
|
+
|
|
619
|
+
return {
|
|
620
|
+
"status": "success",
|
|
621
|
+
"file_path": str(path),
|
|
622
|
+
"old_size": len(current_content),
|
|
623
|
+
"new_size": len(updated_content),
|
|
624
|
+
"file_type": path.suffix[1:] if path.suffix else "unknown",
|
|
625
|
+
"diff": diff,
|
|
626
|
+
}
|
|
627
|
+
except Exception as e:
|
|
628
|
+
return {"status": "error", "error": str(e)}
|
|
629
|
+
|
|
630
|
+
@tool
|
|
631
|
+
def update_gaia_md(
|
|
632
|
+
project_root: str = ".",
|
|
633
|
+
project_name: str = None,
|
|
634
|
+
description: str = None,
|
|
635
|
+
structure: Dict[str, Any] = None,
|
|
636
|
+
instructions: str = None,
|
|
637
|
+
) -> Dict[str, Any]:
|
|
638
|
+
"""Create or update GAIA.md file for project context.
|
|
639
|
+
|
|
640
|
+
Args:
|
|
641
|
+
project_root: Root directory of the project
|
|
642
|
+
project_name: Name of the project
|
|
643
|
+
description: Project description
|
|
644
|
+
structure: Project structure dictionary
|
|
645
|
+
instructions: Special instructions for GAIA
|
|
646
|
+
|
|
647
|
+
Returns:
|
|
648
|
+
Dictionary with update results
|
|
649
|
+
"""
|
|
650
|
+
try:
|
|
651
|
+
from datetime import datetime
|
|
652
|
+
|
|
653
|
+
gaia_path = os.path.join(project_root, "GAIA.md")
|
|
654
|
+
|
|
655
|
+
# Security check
|
|
656
|
+
if not self.path_validator.is_path_allowed(gaia_path):
|
|
657
|
+
return {
|
|
658
|
+
"status": "error",
|
|
659
|
+
"error": f"Access denied: {gaia_path} is not in allowed paths",
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
# Start building content
|
|
663
|
+
content = "# GAIA.md\n\n"
|
|
664
|
+
content += "This file provides guidance to GAIA Code Agent when working with code in this project.\n\n"
|
|
665
|
+
|
|
666
|
+
if project_name:
|
|
667
|
+
content += f"## Project: {project_name}\n\n"
|
|
668
|
+
|
|
669
|
+
if description:
|
|
670
|
+
content += f"## Description\n{description}\n\n"
|
|
671
|
+
|
|
672
|
+
content += f"**Last Updated:** {datetime.now().isoformat()}\n\n"
|
|
673
|
+
|
|
674
|
+
if structure:
|
|
675
|
+
content += "## Project Structure\n```\n"
|
|
676
|
+
|
|
677
|
+
def format_structure(struct, indent=""):
|
|
678
|
+
result = ""
|
|
679
|
+
if isinstance(struct, dict):
|
|
680
|
+
for key, value in struct.items():
|
|
681
|
+
if isinstance(value, dict):
|
|
682
|
+
result += f"{indent}{key}\n"
|
|
683
|
+
result += format_structure(value, indent + " ")
|
|
684
|
+
else:
|
|
685
|
+
result += f"{indent}{key} - {value}\n"
|
|
686
|
+
return result
|
|
687
|
+
|
|
688
|
+
content += format_structure(structure)
|
|
689
|
+
content += "```\n\n"
|
|
690
|
+
|
|
691
|
+
if instructions:
|
|
692
|
+
content += f"## Special Instructions\n{instructions}\n\n"
|
|
693
|
+
|
|
694
|
+
# Add default sections
|
|
695
|
+
content += "## Development Guidelines\n"
|
|
696
|
+
content += "- Follow PEP 8 style guidelines\n"
|
|
697
|
+
content += "- Add docstrings to all functions and classes\n"
|
|
698
|
+
content += "- Include type hints where appropriate\n"
|
|
699
|
+
content += "- Write unit tests for new functionality\n\n"
|
|
700
|
+
|
|
701
|
+
content += "## Code Quality\n"
|
|
702
|
+
content += "- All code should pass pylint checks\n"
|
|
703
|
+
content += "- Use Black formatter for consistent style\n"
|
|
704
|
+
content += "- Ensure proper error handling\n\n"
|
|
705
|
+
|
|
706
|
+
# Write the file
|
|
707
|
+
with open(gaia_path, "w", encoding="utf-8") as f:
|
|
708
|
+
f.write(content)
|
|
709
|
+
|
|
710
|
+
return {
|
|
711
|
+
"status": "success",
|
|
712
|
+
"file_path": gaia_path,
|
|
713
|
+
"created": not os.path.exists(gaia_path),
|
|
714
|
+
"message": f"GAIA.md {'created' if not os.path.exists(gaia_path) else 'updated'} at {gaia_path}",
|
|
715
|
+
}
|
|
716
|
+
except Exception as e:
|
|
717
|
+
return {"status": "error", "error": str(e)}
|
|
718
|
+
|
|
719
|
+
@tool
|
|
720
|
+
def replace_function(
|
|
721
|
+
file_path: str,
|
|
722
|
+
function_name: str,
|
|
723
|
+
new_implementation: str,
|
|
724
|
+
backup: bool = True,
|
|
725
|
+
) -> Dict[str, Any]:
|
|
726
|
+
"""Replace a specific function in a Python file.
|
|
727
|
+
|
|
728
|
+
Args:
|
|
729
|
+
file_path: Path to the Python file
|
|
730
|
+
function_name: Name of the function to replace
|
|
731
|
+
new_implementation: New function implementation
|
|
732
|
+
backup: Whether to create backup
|
|
733
|
+
|
|
734
|
+
Returns:
|
|
735
|
+
Dictionary with replacement result
|
|
736
|
+
"""
|
|
737
|
+
try:
|
|
738
|
+
# Security check
|
|
739
|
+
if not self.path_validator.is_path_allowed(file_path):
|
|
740
|
+
return {
|
|
741
|
+
"status": "error",
|
|
742
|
+
"error": f"Access denied: {file_path} is not in allowed paths",
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
if not os.path.exists(file_path):
|
|
746
|
+
return {"status": "error", "error": f"File not found: {file_path}"}
|
|
747
|
+
|
|
748
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
749
|
+
content = f.read()
|
|
750
|
+
|
|
751
|
+
# Parse the file to find the function
|
|
752
|
+
try:
|
|
753
|
+
tree = ast.parse(content)
|
|
754
|
+
except SyntaxError as e:
|
|
755
|
+
return {"status": "error", "error": f"File has syntax errors: {e}"}
|
|
756
|
+
|
|
757
|
+
# Find the function node
|
|
758
|
+
function_node = None
|
|
759
|
+
for node in ast.walk(tree):
|
|
760
|
+
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
761
|
+
if node.name == function_name:
|
|
762
|
+
function_node = node
|
|
763
|
+
break
|
|
764
|
+
|
|
765
|
+
if not function_node:
|
|
766
|
+
return {
|
|
767
|
+
"status": "error",
|
|
768
|
+
"error": f"Function '{function_name}' not found in file",
|
|
769
|
+
}
|
|
770
|
+
|
|
771
|
+
# Get line range of the function
|
|
772
|
+
lines = content.splitlines(keepends=True)
|
|
773
|
+
start_line = function_node.lineno - 1
|
|
774
|
+
|
|
775
|
+
# Find end of function (simplified - finds next def or class at same indent)
|
|
776
|
+
end_line = len(lines)
|
|
777
|
+
indent_level = len(lines[start_line]) - len(lines[start_line].lstrip())
|
|
778
|
+
|
|
779
|
+
for i in range(start_line + 1, len(lines)):
|
|
780
|
+
line = lines[i]
|
|
781
|
+
if line.strip() and not line.lstrip().startswith("#"):
|
|
782
|
+
current_indent = len(line) - len(line.lstrip())
|
|
783
|
+
if current_indent <= indent_level and line.strip():
|
|
784
|
+
if line.lstrip().startswith(
|
|
785
|
+
("def ", "class ", "async def ")
|
|
786
|
+
):
|
|
787
|
+
end_line = i
|
|
788
|
+
break
|
|
789
|
+
|
|
790
|
+
# Create backup if requested
|
|
791
|
+
if backup:
|
|
792
|
+
backup_path = f"{file_path}.bak"
|
|
793
|
+
with open(backup_path, "w", encoding="utf-8") as f:
|
|
794
|
+
f.write(content)
|
|
795
|
+
|
|
796
|
+
# Replace the function
|
|
797
|
+
new_lines = (
|
|
798
|
+
lines[:start_line] + [new_implementation + "\n"] + lines[end_line:]
|
|
799
|
+
)
|
|
800
|
+
modified_content = "".join(new_lines)
|
|
801
|
+
|
|
802
|
+
# Validate new content (using mixin method)
|
|
803
|
+
validation = self._validate_python_syntax(modified_content)
|
|
804
|
+
if not validation["is_valid"]:
|
|
805
|
+
return {
|
|
806
|
+
"status": "error",
|
|
807
|
+
"error": "Replacement would result in invalid syntax",
|
|
808
|
+
"syntax_errors": validation.get("errors", []),
|
|
809
|
+
}
|
|
810
|
+
|
|
811
|
+
# Write the modified content
|
|
812
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
813
|
+
f.write(modified_content)
|
|
814
|
+
|
|
815
|
+
# Generate diff
|
|
816
|
+
diff = "\n".join(
|
|
817
|
+
difflib.unified_diff(
|
|
818
|
+
content.splitlines(keepends=True),
|
|
819
|
+
modified_content.splitlines(keepends=True),
|
|
820
|
+
fromfile=file_path,
|
|
821
|
+
tofile=file_path,
|
|
822
|
+
)
|
|
823
|
+
)
|
|
824
|
+
|
|
825
|
+
return {
|
|
826
|
+
"status": "success",
|
|
827
|
+
"file_path": file_path,
|
|
828
|
+
"function_replaced": function_name,
|
|
829
|
+
"backup_path": backup_path if backup else None,
|
|
830
|
+
"diff": diff,
|
|
831
|
+
}
|
|
832
|
+
except Exception as e:
|
|
833
|
+
return {"status": "error", "error": str(e)}
|
|
834
|
+
|
|
835
|
+
# Return the list of registered tools for tracking
|
|
836
|
+
return [
|
|
837
|
+
"read_file",
|
|
838
|
+
"write_python_file",
|
|
839
|
+
"edit_python_file",
|
|
840
|
+
"search_code",
|
|
841
|
+
"generate_diff",
|
|
842
|
+
"write_markdown_file",
|
|
843
|
+
"update_gaia_md",
|
|
844
|
+
"replace_function",
|
|
845
|
+
]
|