code-puppy 0.0.30__py3-none-any.whl → 0.0.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,361 +4,201 @@ import fnmatch
4
4
  from typing import List, Dict, Any
5
5
  from code_puppy.tools.common import console
6
6
  from pydantic_ai import RunContext
7
- from code_puppy.agent import code_generation_agent
8
7
 
9
-
10
- # Constants for file operations
11
- IGNORE_PATTERNS = [
12
- "**/node_modules/**",
13
- "**/.git/**",
14
- "**/__pycache__/**",
15
- "**/.DS_Store",
16
- "**/.env",
17
- "**/.venv/**",
18
- "**/venv/**",
19
- "**/.idea/**",
20
- "**/.vscode/**",
21
- "**/dist/**",
22
- "**/build/**",
23
- "**/*.pyc",
24
- "**/*.pyo",
25
- "**/*.pyd",
26
- "**/*.so",
27
- "**/*.dll",
28
- "**/*.exe",
29
- ]
30
-
31
-
32
- def should_ignore_path(path: str) -> bool:
33
- """Check if the path should be ignored based on patterns."""
34
- for pattern in IGNORE_PATTERNS:
35
- if fnmatch.fnmatch(path, pattern):
36
- return True
37
- return False
38
-
39
-
40
- @code_generation_agent.tool
41
- def list_files(
42
- context: RunContext, directory: str = ".", recursive: bool = True
43
- ) -> List[Dict[str, Any]]:
44
- """Recursively list all files in a directory, ignoring common patterns.
45
-
46
- Args:
47
- directory: The directory to list files from. Defaults to current directory.
48
- recursive: Whether to search recursively. Defaults to True.
49
-
50
- Returns:
51
- A list of dictionaries with file information including path, size, and type.
52
- """
53
- results = []
54
- directory = os.path.abspath(directory)
55
-
56
- # Display directory listing header
57
- console.print("\n[bold white on blue] DIRECTORY LISTING [/bold white on blue]")
58
- console.print(
59
- f"📂 [bold cyan]{directory}[/bold cyan] [dim](recursive={recursive})[/dim]"
60
- )
61
- console.print("[dim]" + "-" * 60 + "[/dim]")
62
-
63
- if not os.path.exists(directory):
64
- console.print(
65
- f"[bold red]Error:[/bold red] Directory '{directory}' does not exist"
66
- )
67
- console.print("[dim]" + "-" * 60 + "[/dim]\n")
68
- return [{"error": f"Directory '{directory}' does not exist"}]
69
-
70
- if not os.path.isdir(directory):
71
- console.print(f"[bold red]Error:[/bold red] '{directory}' is not a directory")
72
- console.print("[dim]" + "-" * 60 + "[/dim]\n")
73
- return [{"error": f"'{directory}' is not a directory"}]
74
-
75
- # Track folders and files at each level for tree display
76
- folder_structure = {}
77
- file_list = []
78
-
79
- for root, dirs, files in os.walk(directory):
80
- # Skip ignored directories
81
- dirs[:] = [d for d in dirs if not should_ignore_path(os.path.join(root, d))]
82
-
83
- rel_path = os.path.relpath(root, directory)
84
- depth = 0 if rel_path == "." else rel_path.count(os.sep) + 1
85
-
86
- if rel_path == ".":
87
- rel_path = ""
88
-
89
- # Add directory entry to results
90
- if rel_path:
91
- dir_path = os.path.join(directory, rel_path)
92
- results.append(
93
- {
94
- "path": rel_path,
95
- "type": "directory",
96
- "size": 0,
97
- "full_path": dir_path,
98
- "depth": depth,
99
- }
100
- )
101
-
102
- # Add to folder structure for display
103
- folder_structure[rel_path] = {
104
- "path": rel_path,
105
- "depth": depth,
106
- "full_path": dir_path,
107
- }
108
-
109
- # Add file entries
110
- for file in files:
111
- file_path = os.path.join(root, file)
112
- if should_ignore_path(file_path):
113
- continue
114
-
115
- rel_file_path = os.path.join(rel_path, file) if rel_path else file
116
-
117
- try:
118
- size = os.path.getsize(file_path)
119
- file_info = {
120
- "path": rel_file_path,
121
- "type": "file",
122
- "size": size,
123
- "full_path": file_path,
124
- "depth": depth,
125
- }
126
- results.append(file_info)
127
- file_list.append(file_info)
128
- except (FileNotFoundError, PermissionError):
129
- # Skip files we can't access
8
+ def register_file_operations_tools(agent):
9
+ # Constants for file operations
10
+ IGNORE_PATTERNS = [
11
+ "**/node_modules/**",
12
+ "**/.git/**",
13
+ "**/__pycache__/**",
14
+ "**/.DS_Store",
15
+ "**/.env",
16
+ "**/.venv/**",
17
+ "**/venv/**",
18
+ "**/.idea/**",
19
+ "**/.vscode/**",
20
+ "**/dist/**",
21
+ "**/build/**",
22
+ "**/*.pyc",
23
+ "**/*.pyo",
24
+ "**/*.pyd",
25
+ "**/*.so",
26
+ "**/*.dll",
27
+ "**/*.exe",
28
+ ]
29
+ def should_ignore_path(path: str) -> bool:
30
+ for pattern in IGNORE_PATTERNS:
31
+ if fnmatch.fnmatch(path, pattern):
32
+ return True
33
+ return False
34
+
35
+ @agent.tool
36
+ def list_files(context: RunContext, directory: str = ".", recursive: bool = True) -> List[Dict[str, Any]]:
37
+ results = []
38
+ directory = os.path.abspath(directory)
39
+ console.print("\n[bold white on blue] DIRECTORY LISTING [/bold white on blue]")
40
+ console.print(f"\U0001F4C2 [bold cyan]{directory}[/bold cyan] [dim](recursive={recursive})[/dim]")
41
+ console.print("[dim]" + "-" * 60 + "[/dim]")
42
+ if not os.path.exists(directory):
43
+ console.print(f"[bold red]Error:[/bold red] Directory '{directory}' does not exist")
44
+ console.print("[dim]" + "-" * 60 + "[/dim]\n")
45
+ return [{"error": f"Directory '{directory}' does not exist"}]
46
+ if not os.path.isdir(directory):
47
+ console.print(f"[bold red]Error:[/bold red] '{directory}' is not a directory")
48
+ console.print("[dim]" + "-" * 60 + "[/dim]\n")
49
+ return [{"error": f"'{directory}' is not a directory"}]
50
+ folder_structure = {}
51
+ file_list = []
52
+ for root, dirs, files in os.walk(directory):
53
+ dirs[:] = [d for d in dirs if not should_ignore_path(os.path.join(root, d))]
54
+ rel_path = os.path.relpath(root, directory)
55
+ depth = 0 if rel_path == "." else rel_path.count(os.sep) + 1
56
+ if rel_path == ".":
57
+ rel_path = ""
58
+ if rel_path:
59
+ dir_path = os.path.join(directory, rel_path)
60
+ results.append({"path": rel_path, "type": "directory", "size": 0, "full_path": dir_path, "depth": depth})
61
+ folder_structure[rel_path] = {"path": rel_path, "depth": depth, "full_path": dir_path}
62
+ for file in files:
63
+ file_path = os.path.join(root, file)
64
+ if should_ignore_path(file_path):
65
+ continue
66
+ rel_file_path = os.path.join(rel_path, file) if rel_path else file
67
+ try:
68
+ size = os.path.getsize(file_path)
69
+ file_info = {"path": rel_file_path, "type": "file", "size": size, "full_path": file_path, "depth": depth}
70
+ results.append(file_info)
71
+ file_list.append(file_info)
72
+ except (FileNotFoundError, PermissionError):
73
+ continue
74
+ if not recursive:
75
+ break
76
+ def format_size(size_bytes):
77
+ if size_bytes < 1024:
78
+ return f"{size_bytes} B"
79
+ elif size_bytes < 1024*1024:
80
+ return f"{size_bytes/1024:.1f} KB"
81
+ elif size_bytes < 1024*1024*1024:
82
+ return f"{size_bytes/(1024*1024):.1f} MB"
83
+ else:
84
+ return f"{size_bytes/(1024*1024*1024):.1f} GB"
85
+ def get_file_icon(file_path):
86
+ ext = os.path.splitext(file_path)[1].lower()
87
+ if ext in [".py", ".pyw"]:
88
+ return "\U0001F40D"
89
+ elif ext in [".js", ".jsx", ".ts", ".tsx"]:
90
+ return "\U0001F4DC"
91
+ elif ext in [".html", ".htm", ".xml"]:
92
+ return "\U0001F310"
93
+ elif ext in [".css", ".scss", ".sass"]:
94
+ return "\U0001F3A8"
95
+ elif ext in [".md", ".markdown", ".rst"]:
96
+ return "\U0001F4DD"
97
+ elif ext in [".json", ".yaml", ".yml", ".toml"]:
98
+ return "\u2699\ufe0f"
99
+ elif ext in [".jpg", ".jpeg", ".png", ".gif", ".svg", ".webp"]:
100
+ return "\U0001F5BC\ufe0f"
101
+ elif ext in [".mp3", ".wav", ".ogg", ".flac"]:
102
+ return "\U0001F3B5"
103
+ elif ext in [".mp4", ".avi", ".mov", ".webm"]:
104
+ return "\U0001F3AC"
105
+ elif ext in [".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx"]:
106
+ return "\U0001F4C4"
107
+ elif ext in [".zip", ".tar", ".gz", ".rar", ".7z"]:
108
+ return "\U0001F4E6"
109
+ elif ext in [".exe", ".dll", ".so", ".dylib"]:
110
+ return "\u26A1"
111
+ else:
112
+ return "\U0001F4C4"
113
+ if results:
114
+ files = sorted([f for f in results if f["type"] == "file"], key=lambda x: x["path"])
115
+ console.print(f"\U0001F4C1 [bold blue]{os.path.basename(directory) or directory}[/bold blue]")
116
+ all_items = sorted(results, key=lambda x: x["path"])
117
+ parent_dirs_with_content = set()
118
+ for i, item in enumerate(all_items):
119
+ if item["type"] == "directory" and not item["path"]:
130
120
  continue
131
-
132
- if not recursive:
133
- break
134
-
135
- # Helper function to format file size
136
- def format_size(size_bytes):
137
- if size_bytes < 1024:
138
- return f"{size_bytes} B"
139
- elif size_bytes < 1024 * 1024:
140
- return f"{size_bytes / 1024:.1f} KB"
141
- elif size_bytes < 1024 * 1024 * 1024:
142
- return f"{size_bytes / (1024 * 1024):.1f} MB"
143
- else:
144
- return f"{size_bytes / (1024 * 1024 * 1024):.1f} GB"
145
-
146
- # Helper function to get file icon based on extension
147
- def get_file_icon(file_path):
148
- ext = os.path.splitext(file_path)[1].lower()
149
- if ext in [".py", ".pyw"]:
150
- return "🐍" # Python
151
- elif ext in [".js", ".jsx", ".ts", ".tsx"]:
152
- return "📜" # JavaScript/TypeScript
153
- elif ext in [".html", ".htm", ".xml"]:
154
- return "🌐" # HTML/XML
155
- elif ext in [".css", ".scss", ".sass"]:
156
- return "🎨" # CSS
157
- elif ext in [".md", ".markdown", ".rst"]:
158
- return "📝" # Markdown/docs
159
- elif ext in [".json", ".yaml", ".yml", ".toml"]:
160
- return "⚙️" # Config files
161
- elif ext in [".jpg", ".jpeg", ".png", ".gif", ".svg", ".webp"]:
162
- return "🖼️" # Images
163
- elif ext in [".mp3", ".wav", ".ogg", ".flac"]:
164
- return "🎵" # Audio
165
- elif ext in [".mp4", ".avi", ".mov", ".webm"]:
166
- return "🎬" # Video
167
- elif ext in [".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx"]:
168
- return "📄" # Documents
169
- elif ext in [".zip", ".tar", ".gz", ".rar", ".7z"]:
170
- return "📦" # Archives
171
- elif ext in [".exe", ".dll", ".so", ".dylib"]:
172
- return "⚡" # Executables
173
- else:
174
- return "📄" # Default file icon
175
-
176
- # Display tree structure
177
- if results:
178
- # Sort directories and files
179
-
180
- files = sorted(
181
- [f for f in results if f["type"] == "file"], key=lambda x: x["path"]
182
- )
183
-
184
- # First show directory itself
185
- console.print(
186
- f"📁 [bold blue]{os.path.basename(directory) or directory}[/bold blue]"
187
- )
188
-
189
- # After gathering all results
190
- # Combine both directories and files, then sort
191
- all_items = sorted(results, key=lambda x: x["path"])
192
-
193
- parent_dirs_with_content = set()
194
-
195
- for i, item in enumerate(all_items):
196
- # Skip root directory
197
- if item["type"] == "directory" and not item["path"]:
198
- continue
199
-
200
- # Get parent directories to track which ones have content
201
- if os.sep in item["path"]:
202
- parent_path = os.path.dirname(item["path"])
203
- parent_dirs_with_content.add(parent_path)
204
-
205
- # Calculate depth from path
206
- depth = item["path"].count(os.sep) + 1 if item["path"] else 0
207
-
208
- # Calculate prefix for tree structure
209
- prefix = ""
210
- for d in range(depth):
211
- if d == depth - 1:
212
- prefix += "└── "
121
+ if os.sep in item["path"]:
122
+ parent_path = os.path.dirname(item["path"])
123
+ parent_dirs_with_content.add(parent_path)
124
+ depth = item["path"].count(os.sep) + 1 if item["path"] else 0
125
+ prefix = ""
126
+ for d in range(depth):
127
+ if d == depth - 1:
128
+ prefix += "\u2514\u2500\u2500 "
129
+ else:
130
+ prefix += " "
131
+ name = os.path.basename(item["path"]) or item["path"]
132
+ if item["type"] == "directory":
133
+ console.print(f"{prefix}\U0001F4C1 [bold blue]{name}/[/bold blue]")
213
134
  else:
214
- prefix += " "
215
-
216
- # Display item with appropriate icon and color
217
- name = os.path.basename(item["path"]) or item["path"]
218
-
219
- if item["type"] == "directory":
220
- console.print(f"{prefix}📁 [bold blue]{name}/[/bold blue]")
221
- else: # file
222
- icon = get_file_icon(item["path"])
223
- size_str = format_size(item["size"])
224
- console.print(
225
- f"{prefix}{icon} [green]{name}[/green] [dim]({size_str})[/dim]"
226
- )
227
- else:
228
- console.print("[yellow]Directory is empty[/yellow]")
229
-
230
- # Display summary
231
- dir_count = sum(1 for item in results if item["type"] == "directory")
232
- file_count = sum(1 for item in results if item["type"] == "file")
233
- total_size = sum(item["size"] for item in results if item["type"] == "file")
234
-
235
- console.print("\n[bold cyan]Summary:[/bold cyan]")
236
- console.print(
237
- f"📁 [blue]{dir_count} directories[/blue], 📄 [green]{file_count} files[/green] [dim]({format_size(total_size)} total)[/dim]"
238
- )
239
- console.print("[dim]" + "-" * 60 + "[/dim]\n")
240
-
241
- return results
242
-
243
-
244
- @code_generation_agent.tool
245
- def create_file(
246
- context: RunContext, file_path: str, content: str = ""
247
- ) -> Dict[str, Any]:
248
- console.log(f"✨ Creating new file [bold green]{file_path}[/bold green]")
249
- """Create a new file with optional content.
250
-
251
- Args:
252
- file_path: Path where the file should be created
253
- content: Optional content to write to the file
254
-
255
- Returns:
256
- A dictionary with the result of the operation
257
- """
258
- file_path = os.path.abspath(file_path)
259
-
260
- # Check if file already exists
261
- if os.path.exists(file_path):
262
- return {
263
- "error": f"File '{file_path}' already exists. Use modify_file to edit it."
264
- }
265
-
266
- # Create parent directories if they don't exist
267
- directory = os.path.dirname(file_path)
268
- if directory and not os.path.exists(directory):
135
+ icon = get_file_icon(item["path"])
136
+ size_str = format_size(item["size"])
137
+ console.print(f"{prefix}{icon} [green]{name}[/green] [dim]({size_str})[/dim]")
138
+ else:
139
+ console.print("[yellow]Directory is empty[/yellow]")
140
+ dir_count = sum(1 for item in results if item["type"] == "directory")
141
+ file_count = sum(1 for item in results if item["type"] == "file")
142
+ total_size = sum(item["size"] for item in results if item["type"] == "file")
143
+ console.print("\n[bold cyan]Summary:[/bold cyan]")
144
+ console.print(f"\U0001F4C1 [blue]{dir_count} directories[/blue], \U0001F4C4 [green]{file_count} files[/green] [dim]({format_size(total_size)} total)[/dim]")
145
+ console.print("[dim]" + "-" * 60 + "[/dim]\n")
146
+ return results
147
+
148
+ @agent.tool
149
+ def create_file(context: RunContext, file_path: str, content: str = "") -> Dict[str, Any]:
150
+ file_path = os.path.abspath(file_path)
151
+ if os.path.exists(file_path):
152
+ return {"error": f"File '{file_path}' already exists. Use replace_in_file or write_to_file to edit it."}
153
+ directory = os.path.dirname(file_path)
154
+ if directory and not os.path.exists(directory):
155
+ try:
156
+ os.makedirs(directory)
157
+ except Exception as e:
158
+ return {"error": f"Error creating directory '{directory}': {str(e)}"}
269
159
  try:
270
- os.makedirs(directory)
160
+ with open(file_path, "w", encoding="utf-8") as f:
161
+ console.print("[yellow]Writing to file:[/yellow]")
162
+ console.print(content)
163
+ f.write(content)
164
+ return {"success": True, "path": file_path, "message": f"File created at '{file_path}'", "content_length": len(content)}
271
165
  except Exception as e:
272
- return {"error": f"Error creating directory '{directory}': {str(e)}"}
273
-
274
- # Create the file
275
- try:
276
- with open(file_path, "w", encoding="utf-8") as f:
277
- console.print("[yellow]Writing to file:[/yellow]")
278
- console.print(content)
279
- f.write(content)
280
-
281
- return {
282
- "success": True,
283
- "path": file_path,
284
- "message": f"File created at '{file_path}'",
285
- "content_length": len(content),
286
- }
287
- except Exception as e:
288
- return {"error": f"Error creating file '{file_path}': {str(e)}"}
289
-
290
-
291
- @code_generation_agent.tool
292
- def read_file(context: RunContext, file_path: str) -> Dict[str, Any]:
293
- console.log(f"📄 Reading [bold cyan]{file_path}[/bold cyan]")
294
- """Read the contents of a file.
295
-
296
- Args:
297
- file_path: Path to the file to read
298
-
299
- Returns:
300
- A dictionary with the file contents and metadata.
301
- """
302
- file_path = os.path.abspath(file_path)
303
-
304
- if not os.path.exists(file_path):
305
- return {"error": f"File '{file_path}' does not exist"}
306
-
307
- if not os.path.isfile(file_path):
308
- return {"error": f"'{file_path}' is not a file"}
309
-
310
- try:
311
- with open(file_path, "r", encoding="utf-8") as f:
312
- content = f.read()
313
-
314
- # Get file extension
315
- _, ext = os.path.splitext(file_path)
316
-
317
- return {
318
- "content": content,
319
- "path": file_path,
320
- "extension": ext.lstrip("."),
321
- "total_lines": len(content.splitlines()),
322
- }
323
- except UnicodeDecodeError:
324
- # For binary files, return an error
325
- return {"error": f"Cannot read '{file_path}' as text - it may be a binary file"}
326
- except Exception as e:
327
- return {"error": f"Error reading file '{file_path}': {str(e)}"}
328
-
329
-
330
- @code_generation_agent.tool
331
- def grep(
332
- context: RunContext, search_string: str, directory: str = "."
333
- ) -> List[Dict[str, Any]]:
334
- """Recursively search for a string in files starting from a given directory.
335
-
336
- Args:
337
- search_string: The string to search for.
338
- directory: The directory to start the search from.
339
-
340
- Returns:
341
- A list of dictionaries containing file paths and line numbers where matches occur.
342
- """
343
- matches = []
344
- max_matches = 200
345
- directory = os.path.abspath(directory)
346
-
347
- for root, dirs, files in os.walk(directory):
348
- for file in files:
349
- file_path = os.path.join(root, file)
350
- if should_ignore_path(file_path):
351
- continue
352
-
353
- try:
354
- with open(file_path, "r", encoding="utf-8") as f:
355
- for line_number, line in enumerate(f, start=1):
356
- if search_string in line:
357
- matches.append({"file_path": file_path, "line_number": line_number})
358
- if len(matches) >= max_matches:
359
- return matches
360
- except (FileNotFoundError, PermissionError, UnicodeDecodeError):
361
- # Skip files that can't be accessed or are not text files
362
- continue
363
-
364
- return matches
166
+ return {"error": f"Error creating file '{file_path}': {str(e)}"}
167
+
168
+ @agent.tool
169
+ def read_file(context: RunContext, file_path: str) -> Dict[str, Any]:
170
+ file_path = os.path.abspath(file_path)
171
+ if not os.path.exists(file_path):
172
+ return {"error": f"File '{file_path}' does not exist"}
173
+ if not os.path.isfile(file_path):
174
+ return {"error": f"'{file_path}' is not a file"}
175
+ try:
176
+ with open(file_path, "r", encoding="utf-8") as f:
177
+ content = f.read()
178
+ _, ext = os.path.splitext(file_path)
179
+ return {"content": content, "path": file_path, "extension": ext.lstrip("."), "total_lines": len(content.splitlines())}
180
+ except UnicodeDecodeError:
181
+ return {"error": f"Cannot read '{file_path}' as text - it may be a binary file"}
182
+ except Exception as e:
183
+ return {"error": f"Error reading file '{file_path}': {str(e)}"}
184
+
185
+ @agent.tool
186
+ def grep(context: RunContext, search_string: str, directory: str = ".") -> List[Dict[str, Any]]:
187
+ matches = []
188
+ max_matches = 200
189
+ directory = os.path.abspath(directory)
190
+ for root, dirs, files in os.walk(directory):
191
+ for file in files:
192
+ file_path = os.path.join(root, file)
193
+ if should_ignore_path(file_path):
194
+ continue
195
+ try:
196
+ with open(file_path, "r", encoding="utf-8") as f:
197
+ for line_number, line in enumerate(f, start=1):
198
+ if search_string in line:
199
+ matches.append({"file_path": file_path, "line_number": line_number})
200
+ if len(matches) >= max_matches:
201
+ return matches
202
+ except (FileNotFoundError, PermissionError, UnicodeDecodeError):
203
+ continue
204
+ return matches
@@ -1,32 +1,15 @@
1
- from code_puppy.agent import code_generation_agent
2
1
  from typing import Dict
3
2
  import requests
4
3
  from pydantic_ai import RunContext
5
4
 
6
-
7
- @code_generation_agent.tool
8
- def grab_json_from_url(context: RunContext, url: str) -> Dict:
9
- """Grab JSON from a URL if the response is of type application/json.
10
-
11
- Args:
12
- url: The URL to grab the JSON from.
13
-
14
- Returns:
15
- Parsed JSON data if successful.
16
-
17
- Raises:
18
- ValueError: If response content type is not application/json.
19
- """
20
- response = requests.get(url)
21
- response.raise_for_status()
22
-
23
- if response.headers.get('Content-Type') != 'application/json':
24
- raise ValueError(f"Response from {{url}} is not of type application/json")
25
-
26
- json_data = response.json()
27
-
28
- # Limit to 1000 lines if the response is large
29
- if isinstance(json_data, list) and len(json_data) > 1000:
30
- return json_data[:1000]
31
-
32
- return json_data
5
+ def register_web_search_tools(agent):
6
+ @agent.tool
7
+ def grab_json_from_url(context: RunContext, url: str) -> Dict:
8
+ response = requests.get(url)
9
+ response.raise_for_status()
10
+ if response.headers.get('Content-Type') != 'application/json':
11
+ raise ValueError(f"Response from {url} is not of type application/json")
12
+ json_data = response.json()
13
+ if isinstance(json_data, list) and len(json_data) > 1000:
14
+ return json_data[:1000]
15
+ return json_data
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: code-puppy
3
- Version: 0.0.30
3
+ Version: 0.0.32
4
4
  Summary: Code generation agent
5
5
  Author: Michael Pfaffenberger
6
6
  License: MIT
@@ -17,6 +17,7 @@ Requires-Dist: bs4>=0.0.2
17
17
  Requires-Dist: httpx-limiter>=0.3.0
18
18
  Requires-Dist: httpx>=0.24.1
19
19
  Requires-Dist: logfire>=0.7.1
20
+ Requires-Dist: pathspec>=0.11.0
20
21
  Requires-Dist: prompt-toolkit>=3.0.38
21
22
  Requires-Dist: pydantic-ai>=0.1.0
22
23
  Requires-Dist: pydantic>=2.4.0