code-puppy 0.0.29__py3-none-any.whl → 0.0.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- code_puppy/agent.py +50 -16
- code_puppy/agent_prompts.py +6 -2
- code_puppy/command_line/file_path_completion.py +65 -0
- code_puppy/command_line/meta_command_handler.py +72 -0
- code_puppy/command_line/model_picker_completion.py +92 -0
- code_puppy/command_line/prompt_toolkit_completion.py +94 -131
- code_puppy/command_line/utils.py +36 -0
- code_puppy/main.py +43 -16
- code_puppy/session_memory.py +71 -0
- code_puppy/tools/__init__.py +11 -4
- code_puppy/tools/code_map.py +86 -0
- code_puppy/tools/command_runner.py +61 -197
- code_puppy/tools/common.py +3 -1
- code_puppy/tools/file_modifications.py +179 -329
- code_puppy/tools/file_operations.py +193 -353
- code_puppy/tools/web_search.py +11 -28
- {code_puppy-0.0.29.dist-info → code_puppy-0.0.31.dist-info}/METADATA +2 -1
- code_puppy-0.0.31.dist-info/RECORD +27 -0
- code_puppy-0.0.29.dist-info/RECORD +0 -21
- {code_puppy-0.0.29.data → code_puppy-0.0.31.data}/data/code_puppy/models.json +0 -0
- {code_puppy-0.0.29.dist-info → code_puppy-0.0.31.dist-info}/WHEEL +0 -0
- {code_puppy-0.0.29.dist-info → code_puppy-0.0.31.dist-info}/entry_points.txt +0 -0
- {code_puppy-0.0.29.dist-info → code_puppy-0.0.31.dist-info}/licenses/LICENSE +0 -0
|
@@ -4,361 +4,201 @@ import fnmatch
|
|
|
4
4
|
from typing import List, Dict, Any
|
|
5
5
|
from code_puppy.tools.common import console
|
|
6
6
|
from pydantic_ai import RunContext
|
|
7
|
-
from code_puppy.agent import code_generation_agent
|
|
8
7
|
|
|
9
|
-
|
|
10
|
-
# Constants for file operations
|
|
11
|
-
IGNORE_PATTERNS = [
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
]
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
"
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
"type": "file",
|
|
122
|
-
"size": size,
|
|
123
|
-
"full_path": file_path,
|
|
124
|
-
"depth": depth,
|
|
125
|
-
}
|
|
126
|
-
results.append(file_info)
|
|
127
|
-
file_list.append(file_info)
|
|
128
|
-
except (FileNotFoundError, PermissionError):
|
|
129
|
-
# Skip files we can't access
|
|
8
|
+
def register_file_operations_tools(agent):
|
|
9
|
+
# Constants for file operations
|
|
10
|
+
IGNORE_PATTERNS = [
|
|
11
|
+
"**/node_modules/**",
|
|
12
|
+
"**/.git/**",
|
|
13
|
+
"**/__pycache__/**",
|
|
14
|
+
"**/.DS_Store",
|
|
15
|
+
"**/.env",
|
|
16
|
+
"**/.venv/**",
|
|
17
|
+
"**/venv/**",
|
|
18
|
+
"**/.idea/**",
|
|
19
|
+
"**/.vscode/**",
|
|
20
|
+
"**/dist/**",
|
|
21
|
+
"**/build/**",
|
|
22
|
+
"**/*.pyc",
|
|
23
|
+
"**/*.pyo",
|
|
24
|
+
"**/*.pyd",
|
|
25
|
+
"**/*.so",
|
|
26
|
+
"**/*.dll",
|
|
27
|
+
"**/*.exe",
|
|
28
|
+
]
|
|
29
|
+
def should_ignore_path(path: str) -> bool:
|
|
30
|
+
for pattern in IGNORE_PATTERNS:
|
|
31
|
+
if fnmatch.fnmatch(path, pattern):
|
|
32
|
+
return True
|
|
33
|
+
return False
|
|
34
|
+
|
|
35
|
+
@agent.tool
|
|
36
|
+
def list_files(context: RunContext, directory: str = ".", recursive: bool = True) -> List[Dict[str, Any]]:
|
|
37
|
+
results = []
|
|
38
|
+
directory = os.path.abspath(directory)
|
|
39
|
+
console.print("\n[bold white on blue] DIRECTORY LISTING [/bold white on blue]")
|
|
40
|
+
console.print(f"\U0001F4C2 [bold cyan]{directory}[/bold cyan] [dim](recursive={recursive})[/dim]")
|
|
41
|
+
console.print("[dim]" + "-" * 60 + "[/dim]")
|
|
42
|
+
if not os.path.exists(directory):
|
|
43
|
+
console.print(f"[bold red]Error:[/bold red] Directory '{directory}' does not exist")
|
|
44
|
+
console.print("[dim]" + "-" * 60 + "[/dim]\n")
|
|
45
|
+
return [{"error": f"Directory '{directory}' does not exist"}]
|
|
46
|
+
if not os.path.isdir(directory):
|
|
47
|
+
console.print(f"[bold red]Error:[/bold red] '{directory}' is not a directory")
|
|
48
|
+
console.print("[dim]" + "-" * 60 + "[/dim]\n")
|
|
49
|
+
return [{"error": f"'{directory}' is not a directory"}]
|
|
50
|
+
folder_structure = {}
|
|
51
|
+
file_list = []
|
|
52
|
+
for root, dirs, files in os.walk(directory):
|
|
53
|
+
dirs[:] = [d for d in dirs if not should_ignore_path(os.path.join(root, d))]
|
|
54
|
+
rel_path = os.path.relpath(root, directory)
|
|
55
|
+
depth = 0 if rel_path == "." else rel_path.count(os.sep) + 1
|
|
56
|
+
if rel_path == ".":
|
|
57
|
+
rel_path = ""
|
|
58
|
+
if rel_path:
|
|
59
|
+
dir_path = os.path.join(directory, rel_path)
|
|
60
|
+
results.append({"path": rel_path, "type": "directory", "size": 0, "full_path": dir_path, "depth": depth})
|
|
61
|
+
folder_structure[rel_path] = {"path": rel_path, "depth": depth, "full_path": dir_path}
|
|
62
|
+
for file in files:
|
|
63
|
+
file_path = os.path.join(root, file)
|
|
64
|
+
if should_ignore_path(file_path):
|
|
65
|
+
continue
|
|
66
|
+
rel_file_path = os.path.join(rel_path, file) if rel_path else file
|
|
67
|
+
try:
|
|
68
|
+
size = os.path.getsize(file_path)
|
|
69
|
+
file_info = {"path": rel_file_path, "type": "file", "size": size, "full_path": file_path, "depth": depth}
|
|
70
|
+
results.append(file_info)
|
|
71
|
+
file_list.append(file_info)
|
|
72
|
+
except (FileNotFoundError, PermissionError):
|
|
73
|
+
continue
|
|
74
|
+
if not recursive:
|
|
75
|
+
break
|
|
76
|
+
def format_size(size_bytes):
|
|
77
|
+
if size_bytes < 1024:
|
|
78
|
+
return f"{size_bytes} B"
|
|
79
|
+
elif size_bytes < 1024*1024:
|
|
80
|
+
return f"{size_bytes/1024:.1f} KB"
|
|
81
|
+
elif size_bytes < 1024*1024*1024:
|
|
82
|
+
return f"{size_bytes/(1024*1024):.1f} MB"
|
|
83
|
+
else:
|
|
84
|
+
return f"{size_bytes/(1024*1024*1024):.1f} GB"
|
|
85
|
+
def get_file_icon(file_path):
|
|
86
|
+
ext = os.path.splitext(file_path)[1].lower()
|
|
87
|
+
if ext in [".py", ".pyw"]:
|
|
88
|
+
return "\U0001F40D"
|
|
89
|
+
elif ext in [".js", ".jsx", ".ts", ".tsx"]:
|
|
90
|
+
return "\U0001F4DC"
|
|
91
|
+
elif ext in [".html", ".htm", ".xml"]:
|
|
92
|
+
return "\U0001F310"
|
|
93
|
+
elif ext in [".css", ".scss", ".sass"]:
|
|
94
|
+
return "\U0001F3A8"
|
|
95
|
+
elif ext in [".md", ".markdown", ".rst"]:
|
|
96
|
+
return "\U0001F4DD"
|
|
97
|
+
elif ext in [".json", ".yaml", ".yml", ".toml"]:
|
|
98
|
+
return "\u2699\ufe0f"
|
|
99
|
+
elif ext in [".jpg", ".jpeg", ".png", ".gif", ".svg", ".webp"]:
|
|
100
|
+
return "\U0001F5BC\ufe0f"
|
|
101
|
+
elif ext in [".mp3", ".wav", ".ogg", ".flac"]:
|
|
102
|
+
return "\U0001F3B5"
|
|
103
|
+
elif ext in [".mp4", ".avi", ".mov", ".webm"]:
|
|
104
|
+
return "\U0001F3AC"
|
|
105
|
+
elif ext in [".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx"]:
|
|
106
|
+
return "\U0001F4C4"
|
|
107
|
+
elif ext in [".zip", ".tar", ".gz", ".rar", ".7z"]:
|
|
108
|
+
return "\U0001F4E6"
|
|
109
|
+
elif ext in [".exe", ".dll", ".so", ".dylib"]:
|
|
110
|
+
return "\u26A1"
|
|
111
|
+
else:
|
|
112
|
+
return "\U0001F4C4"
|
|
113
|
+
if results:
|
|
114
|
+
files = sorted([f for f in results if f["type"] == "file"], key=lambda x: x["path"])
|
|
115
|
+
console.print(f"\U0001F4C1 [bold blue]{os.path.basename(directory) or directory}[/bold blue]")
|
|
116
|
+
all_items = sorted(results, key=lambda x: x["path"])
|
|
117
|
+
parent_dirs_with_content = set()
|
|
118
|
+
for i, item in enumerate(all_items):
|
|
119
|
+
if item["type"] == "directory" and not item["path"]:
|
|
130
120
|
continue
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
return f"{size_bytes / (1024 * 1024 * 1024):.1f} GB"
|
|
145
|
-
|
|
146
|
-
# Helper function to get file icon based on extension
|
|
147
|
-
def get_file_icon(file_path):
|
|
148
|
-
ext = os.path.splitext(file_path)[1].lower()
|
|
149
|
-
if ext in [".py", ".pyw"]:
|
|
150
|
-
return "🐍" # Python
|
|
151
|
-
elif ext in [".js", ".jsx", ".ts", ".tsx"]:
|
|
152
|
-
return "📜" # JavaScript/TypeScript
|
|
153
|
-
elif ext in [".html", ".htm", ".xml"]:
|
|
154
|
-
return "🌐" # HTML/XML
|
|
155
|
-
elif ext in [".css", ".scss", ".sass"]:
|
|
156
|
-
return "🎨" # CSS
|
|
157
|
-
elif ext in [".md", ".markdown", ".rst"]:
|
|
158
|
-
return "📝" # Markdown/docs
|
|
159
|
-
elif ext in [".json", ".yaml", ".yml", ".toml"]:
|
|
160
|
-
return "⚙️" # Config files
|
|
161
|
-
elif ext in [".jpg", ".jpeg", ".png", ".gif", ".svg", ".webp"]:
|
|
162
|
-
return "🖼️" # Images
|
|
163
|
-
elif ext in [".mp3", ".wav", ".ogg", ".flac"]:
|
|
164
|
-
return "🎵" # Audio
|
|
165
|
-
elif ext in [".mp4", ".avi", ".mov", ".webm"]:
|
|
166
|
-
return "🎬" # Video
|
|
167
|
-
elif ext in [".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx"]:
|
|
168
|
-
return "📄" # Documents
|
|
169
|
-
elif ext in [".zip", ".tar", ".gz", ".rar", ".7z"]:
|
|
170
|
-
return "📦" # Archives
|
|
171
|
-
elif ext in [".exe", ".dll", ".so", ".dylib"]:
|
|
172
|
-
return "⚡" # Executables
|
|
173
|
-
else:
|
|
174
|
-
return "📄" # Default file icon
|
|
175
|
-
|
|
176
|
-
# Display tree structure
|
|
177
|
-
if results:
|
|
178
|
-
# Sort directories and files
|
|
179
|
-
|
|
180
|
-
files = sorted(
|
|
181
|
-
[f for f in results if f["type"] == "file"], key=lambda x: x["path"]
|
|
182
|
-
)
|
|
183
|
-
|
|
184
|
-
# First show directory itself
|
|
185
|
-
console.print(
|
|
186
|
-
f"📁 [bold blue]{os.path.basename(directory) or directory}[/bold blue]"
|
|
187
|
-
)
|
|
188
|
-
|
|
189
|
-
# After gathering all results
|
|
190
|
-
# Combine both directories and files, then sort
|
|
191
|
-
all_items = sorted(results, key=lambda x: x["path"])
|
|
192
|
-
|
|
193
|
-
parent_dirs_with_content = set()
|
|
194
|
-
|
|
195
|
-
for i, item in enumerate(all_items):
|
|
196
|
-
# Skip root directory
|
|
197
|
-
if item["type"] == "directory" and not item["path"]:
|
|
198
|
-
continue
|
|
199
|
-
|
|
200
|
-
# Get parent directories to track which ones have content
|
|
201
|
-
if os.sep in item["path"]:
|
|
202
|
-
parent_path = os.path.dirname(item["path"])
|
|
203
|
-
parent_dirs_with_content.add(parent_path)
|
|
204
|
-
|
|
205
|
-
# Calculate depth from path
|
|
206
|
-
depth = item["path"].count(os.sep) + 1 if item["path"] else 0
|
|
207
|
-
|
|
208
|
-
# Calculate prefix for tree structure
|
|
209
|
-
prefix = ""
|
|
210
|
-
for d in range(depth):
|
|
211
|
-
if d == depth - 1:
|
|
212
|
-
prefix += "└── "
|
|
121
|
+
if os.sep in item["path"]:
|
|
122
|
+
parent_path = os.path.dirname(item["path"])
|
|
123
|
+
parent_dirs_with_content.add(parent_path)
|
|
124
|
+
depth = item["path"].count(os.sep) + 1 if item["path"] else 0
|
|
125
|
+
prefix = ""
|
|
126
|
+
for d in range(depth):
|
|
127
|
+
if d == depth - 1:
|
|
128
|
+
prefix += "\u2514\u2500\u2500 "
|
|
129
|
+
else:
|
|
130
|
+
prefix += " "
|
|
131
|
+
name = os.path.basename(item["path"]) or item["path"]
|
|
132
|
+
if item["type"] == "directory":
|
|
133
|
+
console.print(f"{prefix}\U0001F4C1 [bold blue]{name}/[/bold blue]")
|
|
213
134
|
else:
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
if item["type"] == "directory"
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
)
|
|
239
|
-
console.print("[dim]" + "-" * 60 + "[/dim]\n")
|
|
240
|
-
|
|
241
|
-
return results
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
@code_generation_agent.tool
|
|
245
|
-
def create_file(
|
|
246
|
-
context: RunContext, file_path: str, content: str = ""
|
|
247
|
-
) -> Dict[str, Any]:
|
|
248
|
-
console.log(f"✨ Creating new file [bold green]{file_path}[/bold green]")
|
|
249
|
-
"""Create a new file with optional content.
|
|
250
|
-
|
|
251
|
-
Args:
|
|
252
|
-
file_path: Path where the file should be created
|
|
253
|
-
content: Optional content to write to the file
|
|
254
|
-
|
|
255
|
-
Returns:
|
|
256
|
-
A dictionary with the result of the operation
|
|
257
|
-
"""
|
|
258
|
-
file_path = os.path.abspath(file_path)
|
|
259
|
-
|
|
260
|
-
# Check if file already exists
|
|
261
|
-
if os.path.exists(file_path):
|
|
262
|
-
return {
|
|
263
|
-
"error": f"File '{file_path}' already exists. Use modify_file to edit it."
|
|
264
|
-
}
|
|
265
|
-
|
|
266
|
-
# Create parent directories if they don't exist
|
|
267
|
-
directory = os.path.dirname(file_path)
|
|
268
|
-
if directory and not os.path.exists(directory):
|
|
135
|
+
icon = get_file_icon(item["path"])
|
|
136
|
+
size_str = format_size(item["size"])
|
|
137
|
+
console.print(f"{prefix}{icon} [green]{name}[/green] [dim]({size_str})[/dim]")
|
|
138
|
+
else:
|
|
139
|
+
console.print("[yellow]Directory is empty[/yellow]")
|
|
140
|
+
dir_count = sum(1 for item in results if item["type"] == "directory")
|
|
141
|
+
file_count = sum(1 for item in results if item["type"] == "file")
|
|
142
|
+
total_size = sum(item["size"] for item in results if item["type"] == "file")
|
|
143
|
+
console.print("\n[bold cyan]Summary:[/bold cyan]")
|
|
144
|
+
console.print(f"\U0001F4C1 [blue]{dir_count} directories[/blue], \U0001F4C4 [green]{file_count} files[/green] [dim]({format_size(total_size)} total)[/dim]")
|
|
145
|
+
console.print("[dim]" + "-" * 60 + "[/dim]\n")
|
|
146
|
+
return results
|
|
147
|
+
|
|
148
|
+
@agent.tool
|
|
149
|
+
def create_file(context: RunContext, file_path: str, content: str = "") -> Dict[str, Any]:
|
|
150
|
+
file_path = os.path.abspath(file_path)
|
|
151
|
+
if os.path.exists(file_path):
|
|
152
|
+
return {"error": f"File '{file_path}' already exists. Use replace_in_file or write_to_file to edit it."}
|
|
153
|
+
directory = os.path.dirname(file_path)
|
|
154
|
+
if directory and not os.path.exists(directory):
|
|
155
|
+
try:
|
|
156
|
+
os.makedirs(directory)
|
|
157
|
+
except Exception as e:
|
|
158
|
+
return {"error": f"Error creating directory '{directory}': {str(e)}"}
|
|
269
159
|
try:
|
|
270
|
-
|
|
160
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
161
|
+
console.print("[yellow]Writing to file:[/yellow]")
|
|
162
|
+
console.print(content)
|
|
163
|
+
f.write(content)
|
|
164
|
+
return {"success": True, "path": file_path, "message": f"File created at '{file_path}'", "content_length": len(content)}
|
|
271
165
|
except Exception as e:
|
|
272
|
-
return {"error": f"Error creating
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
"
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
"
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
@
|
|
292
|
-
def
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
with open(file_path, "r", encoding="utf-8") as f:
|
|
312
|
-
content = f.read()
|
|
313
|
-
|
|
314
|
-
# Get file extension
|
|
315
|
-
_, ext = os.path.splitext(file_path)
|
|
316
|
-
|
|
317
|
-
return {
|
|
318
|
-
"content": content,
|
|
319
|
-
"path": file_path,
|
|
320
|
-
"extension": ext.lstrip("."),
|
|
321
|
-
"total_lines": len(content.splitlines()),
|
|
322
|
-
}
|
|
323
|
-
except UnicodeDecodeError:
|
|
324
|
-
# For binary files, return an error
|
|
325
|
-
return {"error": f"Cannot read '{file_path}' as text - it may be a binary file"}
|
|
326
|
-
except Exception as e:
|
|
327
|
-
return {"error": f"Error reading file '{file_path}': {str(e)}"}
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
@code_generation_agent.tool
|
|
331
|
-
def grep(
|
|
332
|
-
context: RunContext, search_string: str, directory: str = "."
|
|
333
|
-
) -> List[Dict[str, Any]]:
|
|
334
|
-
"""Recursively search for a string in files starting from a given directory.
|
|
335
|
-
|
|
336
|
-
Args:
|
|
337
|
-
search_string: The string to search for.
|
|
338
|
-
directory: The directory to start the search from.
|
|
339
|
-
|
|
340
|
-
Returns:
|
|
341
|
-
A list of dictionaries containing file paths and line numbers where matches occur.
|
|
342
|
-
"""
|
|
343
|
-
matches = []
|
|
344
|
-
max_matches = 200
|
|
345
|
-
directory = os.path.abspath(directory)
|
|
346
|
-
|
|
347
|
-
for root, dirs, files in os.walk(directory):
|
|
348
|
-
for file in files:
|
|
349
|
-
file_path = os.path.join(root, file)
|
|
350
|
-
if should_ignore_path(file_path):
|
|
351
|
-
continue
|
|
352
|
-
|
|
353
|
-
try:
|
|
354
|
-
with open(file_path, "r", encoding="utf-8") as f:
|
|
355
|
-
for line_number, line in enumerate(f, start=1):
|
|
356
|
-
if search_string in line:
|
|
357
|
-
matches.append({"file_path": file_path, "line_number": line_number})
|
|
358
|
-
if len(matches) >= max_matches:
|
|
359
|
-
return matches
|
|
360
|
-
except (FileNotFoundError, PermissionError, UnicodeDecodeError):
|
|
361
|
-
# Skip files that can't be accessed or are not text files
|
|
362
|
-
continue
|
|
363
|
-
|
|
364
|
-
return matches
|
|
166
|
+
return {"error": f"Error creating file '{file_path}': {str(e)}"}
|
|
167
|
+
|
|
168
|
+
@agent.tool
|
|
169
|
+
def read_file(context: RunContext, file_path: str) -> Dict[str, Any]:
|
|
170
|
+
file_path = os.path.abspath(file_path)
|
|
171
|
+
if not os.path.exists(file_path):
|
|
172
|
+
return {"error": f"File '{file_path}' does not exist"}
|
|
173
|
+
if not os.path.isfile(file_path):
|
|
174
|
+
return {"error": f"'{file_path}' is not a file"}
|
|
175
|
+
try:
|
|
176
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
177
|
+
content = f.read()
|
|
178
|
+
_, ext = os.path.splitext(file_path)
|
|
179
|
+
return {"content": content, "path": file_path, "extension": ext.lstrip("."), "total_lines": len(content.splitlines())}
|
|
180
|
+
except UnicodeDecodeError:
|
|
181
|
+
return {"error": f"Cannot read '{file_path}' as text - it may be a binary file"}
|
|
182
|
+
except Exception as e:
|
|
183
|
+
return {"error": f"Error reading file '{file_path}': {str(e)}"}
|
|
184
|
+
|
|
185
|
+
@agent.tool
|
|
186
|
+
def grep(context: RunContext, search_string: str, directory: str = ".") -> List[Dict[str, Any]]:
|
|
187
|
+
matches = []
|
|
188
|
+
max_matches = 200
|
|
189
|
+
directory = os.path.abspath(directory)
|
|
190
|
+
for root, dirs, files in os.walk(directory):
|
|
191
|
+
for file in files:
|
|
192
|
+
file_path = os.path.join(root, file)
|
|
193
|
+
if should_ignore_path(file_path):
|
|
194
|
+
continue
|
|
195
|
+
try:
|
|
196
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
197
|
+
for line_number, line in enumerate(f, start=1):
|
|
198
|
+
if search_string in line:
|
|
199
|
+
matches.append({"file_path": file_path, "line_number": line_number})
|
|
200
|
+
if len(matches) >= max_matches:
|
|
201
|
+
return matches
|
|
202
|
+
except (FileNotFoundError, PermissionError, UnicodeDecodeError):
|
|
203
|
+
continue
|
|
204
|
+
return matches
|
code_puppy/tools/web_search.py
CHANGED
|
@@ -1,32 +1,15 @@
|
|
|
1
|
-
from code_puppy.agent import code_generation_agent
|
|
2
1
|
from typing import Dict
|
|
3
2
|
import requests
|
|
4
3
|
from pydantic_ai import RunContext
|
|
5
4
|
|
|
6
|
-
|
|
7
|
-
@
|
|
8
|
-
def grab_json_from_url(context: RunContext, url: str) -> Dict:
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
Raises:
|
|
18
|
-
ValueError: If response content type is not application/json.
|
|
19
|
-
"""
|
|
20
|
-
response = requests.get(url)
|
|
21
|
-
response.raise_for_status()
|
|
22
|
-
|
|
23
|
-
if response.headers.get('Content-Type') != 'application/json':
|
|
24
|
-
raise ValueError(f"Response from {{url}} is not of type application/json")
|
|
25
|
-
|
|
26
|
-
json_data = response.json()
|
|
27
|
-
|
|
28
|
-
# Limit to 1000 lines if the response is large
|
|
29
|
-
if isinstance(json_data, list) and len(json_data) > 1000:
|
|
30
|
-
return json_data[:1000]
|
|
31
|
-
|
|
32
|
-
return json_data
|
|
5
|
+
def register_web_search_tools(agent):
|
|
6
|
+
@agent.tool
|
|
7
|
+
def grab_json_from_url(context: RunContext, url: str) -> Dict:
|
|
8
|
+
response = requests.get(url)
|
|
9
|
+
response.raise_for_status()
|
|
10
|
+
if response.headers.get('Content-Type') != 'application/json':
|
|
11
|
+
raise ValueError(f"Response from {url} is not of type application/json")
|
|
12
|
+
json_data = response.json()
|
|
13
|
+
if isinstance(json_data, list) and len(json_data) > 1000:
|
|
14
|
+
return json_data[:1000]
|
|
15
|
+
return json_data
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: code-puppy
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.31
|
|
4
4
|
Summary: Code generation agent
|
|
5
5
|
Author: Michael Pfaffenberger
|
|
6
6
|
License: MIT
|
|
@@ -17,6 +17,7 @@ Requires-Dist: bs4>=0.0.2
|
|
|
17
17
|
Requires-Dist: httpx-limiter>=0.3.0
|
|
18
18
|
Requires-Dist: httpx>=0.24.1
|
|
19
19
|
Requires-Dist: logfire>=0.7.1
|
|
20
|
+
Requires-Dist: pathspec>=0.11.0
|
|
20
21
|
Requires-Dist: prompt-toolkit>=3.0.38
|
|
21
22
|
Requires-Dist: pydantic-ai>=0.1.0
|
|
22
23
|
Requires-Dist: pydantic>=2.4.0
|