aloop 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. agent/__init__.py +0 -0
  2. agent/agent.py +182 -0
  3. agent/base.py +406 -0
  4. agent/context.py +126 -0
  5. agent/prompts/__init__.py +1 -0
  6. agent/todo.py +149 -0
  7. agent/tool_executor.py +54 -0
  8. agent/verification.py +135 -0
  9. aloop-0.1.1.dist-info/METADATA +252 -0
  10. aloop-0.1.1.dist-info/RECORD +66 -0
  11. aloop-0.1.1.dist-info/WHEEL +5 -0
  12. aloop-0.1.1.dist-info/entry_points.txt +2 -0
  13. aloop-0.1.1.dist-info/licenses/LICENSE +21 -0
  14. aloop-0.1.1.dist-info/top_level.txt +9 -0
  15. cli.py +19 -0
  16. config.py +146 -0
  17. interactive.py +865 -0
  18. llm/__init__.py +51 -0
  19. llm/base.py +26 -0
  20. llm/compat.py +226 -0
  21. llm/content_utils.py +309 -0
  22. llm/litellm_adapter.py +450 -0
  23. llm/message_types.py +245 -0
  24. llm/model_manager.py +265 -0
  25. llm/retry.py +95 -0
  26. main.py +246 -0
  27. memory/__init__.py +20 -0
  28. memory/compressor.py +554 -0
  29. memory/manager.py +538 -0
  30. memory/serialization.py +82 -0
  31. memory/short_term.py +88 -0
  32. memory/store/__init__.py +6 -0
  33. memory/store/memory_store.py +100 -0
  34. memory/store/yaml_file_memory_store.py +414 -0
  35. memory/token_tracker.py +203 -0
  36. memory/types.py +51 -0
  37. tools/__init__.py +6 -0
  38. tools/advanced_file_ops.py +557 -0
  39. tools/base.py +51 -0
  40. tools/calculator.py +50 -0
  41. tools/code_navigator.py +975 -0
  42. tools/explore.py +254 -0
  43. tools/file_ops.py +150 -0
  44. tools/git_tools.py +791 -0
  45. tools/notify.py +69 -0
  46. tools/parallel_execute.py +420 -0
  47. tools/session_manager.py +205 -0
  48. tools/shell.py +147 -0
  49. tools/shell_background.py +470 -0
  50. tools/smart_edit.py +491 -0
  51. tools/todo.py +130 -0
  52. tools/web_fetch.py +673 -0
  53. tools/web_search.py +61 -0
  54. utils/__init__.py +15 -0
  55. utils/logger.py +105 -0
  56. utils/model_pricing.py +49 -0
  57. utils/runtime.py +75 -0
  58. utils/terminal_ui.py +422 -0
  59. utils/tui/__init__.py +39 -0
  60. utils/tui/command_registry.py +49 -0
  61. utils/tui/components.py +306 -0
  62. utils/tui/input_handler.py +393 -0
  63. utils/tui/model_ui.py +204 -0
  64. utils/tui/progress.py +292 -0
  65. utils/tui/status_bar.py +178 -0
  66. utils/tui/theme.py +165 -0
@@ -0,0 +1,557 @@
1
+ """Advanced file operation tools inspired by Claude Code."""
2
+
3
+ import asyncio
4
+ import contextlib
5
+ import re
6
+ import shutil
7
+ from pathlib import Path
8
+ from typing import Any, Dict, List, Optional
9
+
10
+ import aiofiles
11
+ import aiofiles.os
12
+
13
+ from tools.base import BaseTool
14
+
15
+
16
+ class GlobTool(BaseTool):
17
+ """Fast file pattern matching tool."""
18
+
19
+ @property
20
+ def name(self) -> str:
21
+ return "glob_files"
22
+
23
+ @property
24
+ def description(self) -> str:
25
+ return """Fast file pattern matching tool.
26
+
27
+ Use this to find files by patterns like:
28
+ - "**/*.py" - all Python files recursively
29
+ - "src/**/*.js" - JavaScript files in src/
30
+ - "*.txt" - text files in current directory
31
+
32
+ Much faster than reading directories recursively.
33
+ Returns sorted list of matching file paths."""
34
+
35
+ @property
36
+ def parameters(self) -> Dict[str, Any]:
37
+ return {
38
+ "pattern": {
39
+ "type": "string",
40
+ "description": "Glob pattern to match files (e.g., '**/*.py', 'src/**/*.js')",
41
+ },
42
+ "path": {
43
+ "type": "string",
44
+ "description": "Base directory to search in (default: current directory)",
45
+ },
46
+ }
47
+
48
+ async def execute(self, pattern: str, path: str = ".") -> str:
49
+ """Find files matching glob pattern."""
50
+ try:
51
+ base_path = Path(path)
52
+ if not await aiofiles.os.path.exists(str(base_path)):
53
+ return f"Error: Path does not exist: {path}"
54
+
55
+ matches = await asyncio.to_thread(lambda: sorted(base_path.glob(pattern)))
56
+ if not matches:
57
+ return f"No files found matching pattern: {pattern} in {path}"
58
+
59
+ # Limit to 100 results to avoid overwhelming output
60
+ if len(matches) > 100:
61
+ result_lines = [str(m) for m in matches[:100]]
62
+ result_lines.append(f"\n... and {len(matches) - 100} more files")
63
+ return "\n".join(result_lines)
64
+
65
+ return "\n".join(str(m) for m in matches)
66
+ except Exception as e:
67
+ return f"Error executing glob: {str(e)}"
68
+
69
+
70
+ class GrepTool(BaseTool):
71
+ """Search file contents using regex patterns."""
72
+
73
+ def __init__(self):
74
+ """Initialize GrepTool, checking for ripgrep availability."""
75
+ self._rg_path = shutil.which("rg")
76
+ self._has_ripgrep = self._rg_path is not None
77
+
78
+ @property
79
+ def name(self) -> str:
80
+ return "grep_content"
81
+
82
+ @property
83
+ def description(self) -> str:
84
+ return """Search file contents using regex patterns.
85
+
86
+ Output modes:
87
+ - files_only: Just list files containing matches (default)
88
+ - with_context: Show matching lines with line numbers
89
+ - count: Count matches per file
90
+
91
+ Options:
92
+ - file_pattern: Glob pattern to filter files (e.g., '**/*.py', 'src/**/*.js')
93
+ - exclude_patterns: Glob patterns to exclude (e.g., ['**/*.pyc', 'node_modules/**'])
94
+ - context_lines: Show N lines before and after matches (with_context mode)
95
+ - multiline: Enable multiline pattern matching
96
+
97
+ Examples:
98
+ - Find functions: pattern="def\\s+\\w+", file_pattern="**/*.py"
99
+ - Search imports: pattern="^import\\s+", file_pattern="src/**/*.py"
100
+ - Find TODOs: pattern="TODO|FIXME", exclude_patterns=["tests/**"]
101
+ - Count prints: pattern="print\\(", mode="count"
102
+ - With context: pattern="ERROR", mode="with_context", context_lines=2"""
103
+
104
+ @property
105
+ def parameters(self) -> Dict[str, Any]:
106
+ return {
107
+ "pattern": {"type": "string", "description": "Regex pattern to search for"},
108
+ "path": {
109
+ "type": "string",
110
+ "description": "Directory to search in (default: current directory)",
111
+ },
112
+ "mode": {
113
+ "type": "string",
114
+ "description": "Output mode: files_only, with_context, or count (default: files_only)",
115
+ },
116
+ "case_sensitive": {
117
+ "type": "boolean",
118
+ "description": "Whether search is case sensitive (default: true)",
119
+ },
120
+ "file_pattern": {
121
+ "type": "string",
122
+ "description": "Optional glob pattern to filter files before content search (e.g., '**/*.py', 'src/**/*.js')",
123
+ },
124
+ "exclude_patterns": {
125
+ "type": "array",
126
+ "items": {"type": "string"},
127
+ "description": "Optional list of glob patterns to exclude (e.g., ['**/*.pyc', 'node_modules/**'])",
128
+ },
129
+ "max_matches_per_file": {
130
+ "type": "integer",
131
+ "description": "Maximum matches to show per file in with_context mode (default: 5)",
132
+ },
133
+ "context_lines": {
134
+ "type": "integer",
135
+ "description": "Number of lines to show before and after each match (default: 0)",
136
+ },
137
+ "multiline": {
138
+ "type": "boolean",
139
+ "description": "Enable multiline pattern matching (default: false)",
140
+ },
141
+ "max_count": {
142
+ "type": "integer",
143
+ "description": "Maximum total number of results to return (default: 50)",
144
+ },
145
+ }
146
+
147
+ async def execute(
148
+ self,
149
+ pattern: str,
150
+ path: str = ".",
151
+ mode: str = "files_only",
152
+ case_sensitive: bool = True,
153
+ file_pattern: str = None,
154
+ exclude_patterns: list = None,
155
+ max_matches_per_file: int = 5,
156
+ context_lines: int = 0,
157
+ multiline: bool = False,
158
+ max_count: int = 50,
159
+ **kwargs,
160
+ ) -> str:
161
+ """Search for pattern in files with optional file filtering."""
162
+ base_path = Path(path)
163
+ if not await aiofiles.os.path.exists(str(base_path)):
164
+ return f"Error: Path does not exist: {path}"
165
+
166
+ # Use ripgrep if available
167
+ if self._has_ripgrep:
168
+ return await self._execute_ripgrep(
169
+ pattern=pattern,
170
+ path=path,
171
+ mode=mode,
172
+ case_sensitive=case_sensitive,
173
+ file_pattern=file_pattern,
174
+ exclude_patterns=exclude_patterns,
175
+ max_matches_per_file=max_matches_per_file,
176
+ context_lines=context_lines,
177
+ multiline=multiline,
178
+ max_count=max_count,
179
+ )
180
+ else:
181
+ return await self._execute_python_fallback(
182
+ pattern=pattern,
183
+ path=path,
184
+ mode=mode,
185
+ case_sensitive=case_sensitive,
186
+ file_pattern=file_pattern,
187
+ exclude_patterns=exclude_patterns,
188
+ max_matches_per_file=max_matches_per_file,
189
+ max_count=max_count,
190
+ )
191
+
192
+ async def _execute_ripgrep(
193
+ self,
194
+ pattern: str,
195
+ path: str,
196
+ mode: str,
197
+ case_sensitive: bool,
198
+ file_pattern: Optional[str],
199
+ exclude_patterns: Optional[List[str]],
200
+ max_matches_per_file: int,
201
+ context_lines: int,
202
+ multiline: bool,
203
+ max_count: int,
204
+ ) -> str:
205
+ """Execute search using ripgrep."""
206
+ cmd = [self._rg_path]
207
+
208
+ # Output mode
209
+ if mode == "files_only":
210
+ cmd.append("-l") # --files-with-matches
211
+ elif mode == "count":
212
+ cmd.append("-c") # --count
213
+ else: # with_context
214
+ cmd.append("-n") # --line-number
215
+ if context_lines > 0:
216
+ cmd.extend(["-C", str(context_lines)])
217
+
218
+ # Case sensitivity
219
+ if not case_sensitive:
220
+ cmd.append("-i")
221
+
222
+ # Multiline mode
223
+ if multiline:
224
+ cmd.append("-U") # --multiline
225
+
226
+ # File type filtering via glob
227
+ if file_pattern:
228
+ cmd.extend(["-g", file_pattern])
229
+
230
+ # Exclude patterns
231
+ default_excludes = [
232
+ ".git/",
233
+ "node_modules/",
234
+ "__pycache__/",
235
+ "*.pyc",
236
+ ".venv/",
237
+ "venv/",
238
+ "target/",
239
+ "build/",
240
+ ]
241
+ excludes = exclude_patterns if exclude_patterns is not None else default_excludes
242
+ for exclude in excludes:
243
+ cmd.extend(["-g", f"!{exclude}"])
244
+
245
+ # Max results per file (only for with_context mode)
246
+ if mode == "with_context":
247
+ cmd.extend(["-m", str(max_matches_per_file)])
248
+
249
+ # Include hidden files but exclude .git
250
+ cmd.append("--hidden")
251
+
252
+ # Pattern and path
253
+ cmd.extend(["--", pattern, path])
254
+
255
+ try:
256
+ process = await asyncio.create_subprocess_exec(
257
+ *cmd,
258
+ stdout=asyncio.subprocess.PIPE,
259
+ stderr=asyncio.subprocess.PIPE,
260
+ )
261
+ try:
262
+ stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=30)
263
+ except asyncio.TimeoutError:
264
+ process.kill()
265
+ await process.communicate()
266
+ return "Error: Search timed out after 30 seconds"
267
+
268
+ output = stdout.decode(errors="ignore")
269
+ error_output = stderr.decode(errors="ignore")
270
+ if not output and process.returncode == 1:
271
+ return f"No matches found for pattern '{pattern}'"
272
+ elif process.returncode not in (0, 1):
273
+ if error_output:
274
+ return f"Error executing ripgrep: {error_output.strip()}"
275
+ return f"No matches found for pattern '{pattern}'"
276
+
277
+ # Limit output size
278
+ lines = output.strip().split("\n") if output.strip() else []
279
+ if len(lines) > max_count:
280
+ lines = lines[:max_count]
281
+ lines.append(f"\n... (truncated, showing first {max_count} results)")
282
+
283
+ output = "\n".join(lines)
284
+
285
+ # Check output size
286
+ estimated_tokens = len(output) // self.CHARS_PER_TOKEN
287
+ if estimated_tokens > self.MAX_TOKENS:
288
+ max_chars = self.MAX_TOKENS * self.CHARS_PER_TOKEN
289
+ output = output[:max_chars]
290
+ output += f"\n... (output truncated to ~{self.MAX_TOKENS} tokens)"
291
+
292
+ return output
293
+
294
+ except Exception as e:
295
+ return f"Error executing ripgrep: {str(e)}"
296
+
297
+ async def _execute_python_fallback(
298
+ self,
299
+ pattern: str,
300
+ path: str,
301
+ mode: str,
302
+ case_sensitive: bool,
303
+ file_pattern: Optional[str],
304
+ exclude_patterns: Optional[List[str]],
305
+ max_matches_per_file: int,
306
+ max_count: int,
307
+ ) -> str:
308
+ """Execute search using Python regex (fallback when ripgrep not available)."""
309
+ try:
310
+ flags = 0 if case_sensitive else re.IGNORECASE
311
+ regex = re.compile(pattern, flags)
312
+ except re.error as e:
313
+ return f"Error: Invalid regex pattern: {str(e)}"
314
+
315
+ try:
316
+ base_path = Path(path)
317
+
318
+ # Default exclusions if not specified
319
+ default_excludes = [
320
+ "*.pyc",
321
+ "*.so",
322
+ "*.dylib",
323
+ "*.dll",
324
+ "*.exe",
325
+ "*.bin",
326
+ "*.jpg",
327
+ "*.png",
328
+ "*.gif",
329
+ "*.pdf",
330
+ "*.zip",
331
+ "*.tar",
332
+ "*.gz",
333
+ ]
334
+
335
+ # Determine files to search
336
+ if file_pattern:
337
+ try:
338
+ files_to_search = await asyncio.to_thread(
339
+ lambda: [f for f in base_path.glob(file_pattern) if f.is_file()]
340
+ )
341
+ except Exception as e:
342
+ return f"Error with file_pattern '{file_pattern}': {str(e)}"
343
+ else:
344
+ files_to_search = await asyncio.to_thread(
345
+ lambda: [f for f in base_path.rglob("*") if f.is_file()]
346
+ )
347
+
348
+ # Filter out excluded patterns
349
+ excludes = exclude_patterns if exclude_patterns is not None else default_excludes
350
+
351
+ # Pre-compute set of excluded files
352
+ excluded_files = set()
353
+ for exclude_pattern in excludes:
354
+ with contextlib.suppress(Exception):
355
+ glob_matches = await asyncio.to_thread(
356
+ lambda exclude_pattern=exclude_pattern: list(
357
+ base_path.glob(exclude_pattern)
358
+ )
359
+ )
360
+ rglob_matches = await asyncio.to_thread(
361
+ lambda exclude_pattern=exclude_pattern: list(
362
+ base_path.rglob(exclude_pattern)
363
+ )
364
+ )
365
+ excluded_files.update(glob_matches)
366
+ excluded_files.update(rglob_matches)
367
+
368
+ # Also exclude common directories
369
+ exclude_dirs = {
370
+ ".git",
371
+ "node_modules",
372
+ "__pycache__",
373
+ ".venv",
374
+ "venv",
375
+ "target",
376
+ "build",
377
+ }
378
+
379
+ # Filter files
380
+ filtered_files = []
381
+ for file_path in files_to_search:
382
+ if file_path in excluded_files:
383
+ continue
384
+ # Check for excluded directories
385
+ skip = False
386
+ for part in file_path.parts:
387
+ if part in exclude_dirs:
388
+ skip = True
389
+ break
390
+ if not skip:
391
+ filtered_files.append(file_path)
392
+
393
+ results = []
394
+ files_searched = 0
395
+
396
+ for file_path in filtered_files:
397
+ files_searched += 1
398
+
399
+ try:
400
+ async with aiofiles.open(file_path, encoding="utf-8") as f:
401
+ content = await f.read()
402
+ matches = list(regex.finditer(content))
403
+
404
+ if not matches:
405
+ continue
406
+
407
+ if mode == "files_only":
408
+ results.append(str(file_path))
409
+ elif mode == "count":
410
+ results.append(f"{file_path}: {len(matches)} matches")
411
+ elif mode == "with_context":
412
+ lines = content.splitlines()
413
+ for match in matches[:max_matches_per_file]:
414
+ line_no = content[: match.start()].count("\n") + 1
415
+ if line_no <= len(lines):
416
+ results.append(f"{file_path}:{line_no}: {lines[line_no-1].strip()}")
417
+ except (UnicodeDecodeError, PermissionError):
418
+ continue
419
+
420
+ if len(results) >= max_count:
421
+ break
422
+
423
+ if not results:
424
+ return (
425
+ f"No matches found for pattern '{pattern}' in {files_searched} files searched"
426
+ )
427
+
428
+ output = "\n".join(results)
429
+
430
+ # Check output size
431
+ estimated_tokens = len(output) // self.CHARS_PER_TOKEN
432
+ if estimated_tokens > self.MAX_TOKENS:
433
+ return (
434
+ f"Error: Grep output (~{estimated_tokens} tokens) exceeds "
435
+ f"maximum allowed ({self.MAX_TOKENS}). Please use more specific "
436
+ f"file_pattern or pattern to narrow results."
437
+ )
438
+
439
+ return output
440
+ except Exception as e:
441
+ return f"Error executing grep: {str(e)}"
442
+
443
+
444
+ class EditTool(BaseTool):
445
+ """Edit files surgically without reading entire contents."""
446
+
447
+ @property
448
+ def name(self) -> str:
449
+ return "edit_file"
450
+
451
+ @property
452
+ def description(self) -> str:
453
+ return """Edit files surgically without reading entire contents.
454
+
455
+ Operations:
456
+ - replace: Find and replace text exactly (old_text and new_text required)
457
+ - append: Add to end of file (text required)
458
+ - insert_at_line: Insert text at specific line number (line_number and text required)
459
+
460
+ More efficient than reading full file, modifying, and writing back.
461
+
462
+ IMPORTANT: Use this for small, targeted edits to save tokens."""
463
+
464
+ @property
465
+ def parameters(self) -> Dict[str, Any]:
466
+ return {
467
+ "file_path": {"type": "string", "description": "Path to the file to edit"},
468
+ "operation": {
469
+ "type": "string",
470
+ "description": "Operation to perform: replace, append, or insert_at_line",
471
+ },
472
+ "old_text": {
473
+ "type": "string",
474
+ "description": "Text to find and replace (for replace operation)",
475
+ },
476
+ "new_text": {
477
+ "type": "string",
478
+ "description": "New text to insert (for replace operation)",
479
+ },
480
+ "text": {
481
+ "type": "string",
482
+ "description": "Text to add (for append or insert_at_line operations)",
483
+ },
484
+ "line_number": {
485
+ "type": "integer",
486
+ "description": "Line number to insert at (for insert_at_line operation, 1-indexed)",
487
+ },
488
+ }
489
+
490
+ async def execute(
491
+ self,
492
+ file_path: str,
493
+ operation: str,
494
+ old_text: str = "",
495
+ new_text: str = "",
496
+ text: str = "",
497
+ line_number: int = 0,
498
+ **kwargs,
499
+ ) -> str:
500
+ """Perform surgical file edit."""
501
+ try:
502
+ path = Path(file_path)
503
+
504
+ if not await aiofiles.os.path.exists(str(path)):
505
+ return f"Error: File does not exist: {file_path}"
506
+
507
+ if operation == "replace":
508
+ if not old_text:
509
+ return "Error: old_text parameter is required for replace operation"
510
+
511
+ async with aiofiles.open(file_path, encoding="utf-8") as f:
512
+ content = await f.read()
513
+
514
+ if old_text not in content:
515
+ return f"Error: Text not found in {file_path}"
516
+
517
+ # Replace only the first occurrence
518
+ content = content.replace(old_text, new_text, 1)
519
+ async with aiofiles.open(file_path, "w", encoding="utf-8") as f:
520
+ await f.write(content)
521
+ return f"Successfully replaced text in {file_path}"
522
+
523
+ elif operation == "append":
524
+ if not text:
525
+ return "Error: text parameter is required for append operation"
526
+
527
+ async with aiofiles.open(file_path, "a", encoding="utf-8") as f:
528
+ await f.write(text)
529
+ return f"Successfully appended to {file_path}"
530
+
531
+ elif operation == "insert_at_line":
532
+ if not text:
533
+ return "Error: text parameter is required for insert_at_line operation"
534
+ if line_number <= 0:
535
+ return "Error: line_number must be positive (1-indexed)"
536
+
537
+ async with aiofiles.open(file_path, encoding="utf-8") as f:
538
+ content = await f.read()
539
+ lines = content.splitlines(keepends=True)
540
+
541
+ # Insert at the specified line (1-indexed)
542
+ if line_number > len(lines) + 1:
543
+ return f"Error: line_number {line_number} is beyond file length {len(lines)}"
544
+
545
+ # Ensure text ends with newline if inserting in middle
546
+ insert_text = text if text.endswith("\n") else text + "\n"
547
+ lines.insert(line_number - 1, insert_text)
548
+
549
+ async with aiofiles.open(file_path, "w", encoding="utf-8") as f:
550
+ await f.write("".join(lines))
551
+ return f"Successfully inserted text at line {line_number} in {file_path}"
552
+
553
+ else:
554
+ return f"Error: Unknown operation '{operation}'. Supported: replace, append, insert_at_line"
555
+
556
+ except Exception as e:
557
+ return f"Error executing edit: {str(e)}"
tools/base.py ADDED
@@ -0,0 +1,51 @@
1
+ """Base tool interface for all agent tools."""
2
+
3
+ from abc import ABC, abstractmethod
4
+ from typing import Any, Dict
5
+
6
+
7
+ class BaseTool(ABC):
8
+ """Abstract base class for all tools."""
9
+
10
+ # Token limits for tool output size checking
11
+ MAX_TOKENS = 25000
12
+ CHARS_PER_TOKEN = 4 # Conservative estimate
13
+
14
+ @property
15
+ @abstractmethod
16
+ def name(self) -> str:
17
+ """Unique tool identifier."""
18
+ pass
19
+
20
+ @property
21
+ @abstractmethod
22
+ def description(self) -> str:
23
+ """Human-readable description for the LLM."""
24
+ pass
25
+
26
+ @property
27
+ @abstractmethod
28
+ def parameters(self) -> Dict[str, Any]:
29
+ """JSON Schema for tool parameters."""
30
+ pass
31
+
32
+ @abstractmethod
33
+ async def execute(self, **kwargs) -> str:
34
+ """Execute the tool and return result as string."""
35
+ raise NotImplementedError
36
+
37
+ def to_anthropic_schema(self) -> Dict[str, Any]:
38
+ """Convert to Anthropic tool schema format."""
39
+ params = self.parameters
40
+ # Parameters without a 'default' value are required
41
+ required = [key for key, value in params.items() if "default" not in value]
42
+
43
+ return {
44
+ "name": self.name,
45
+ "description": self.description,
46
+ "input_schema": {
47
+ "type": "object",
48
+ "properties": params,
49
+ "required": required,
50
+ },
51
+ }
tools/calculator.py ADDED
@@ -0,0 +1,50 @@
1
+ """Calculator tool for executing Python code."""
2
+
3
+ import io
4
+ import sys
5
+ from typing import Any, Dict
6
+
7
+ from .base import BaseTool
8
+
9
+
10
+ class CalculatorTool(BaseTool):
11
+ """Execute Python code for calculations and data manipulation."""
12
+
13
+ @property
14
+ def name(self) -> str:
15
+ return "calculate"
16
+
17
+ @property
18
+ def description(self) -> str:
19
+ return "Execute Python code for calculations. Use for math, data manipulation, etc. Use print() to output results."
20
+
21
+ @property
22
+ def parameters(self) -> Dict[str, Any]:
23
+ return {
24
+ "code": {
25
+ "type": "string",
26
+ "description": "Python code to execute (use print() for output)",
27
+ }
28
+ }
29
+
30
+ async def execute(self, code: str) -> str:
31
+ """Execute Python code and return output."""
32
+ # Capture stdout
33
+ old_stdout = sys.stdout
34
+ sys.stdout = buffer = io.StringIO()
35
+
36
+ try:
37
+ # Safe execution with limited scope
38
+ exec_globals = {
39
+ "__builtins__": __builtins__,
40
+ "math": __import__("math"),
41
+ "datetime": __import__("datetime"),
42
+ "json": __import__("json"),
43
+ }
44
+ exec(code, exec_globals)
45
+ output = buffer.getvalue()
46
+ return output if output else "Code executed successfully (no output)"
47
+ except Exception as e:
48
+ return f"Error executing code: {str(e)}"
49
+ finally:
50
+ sys.stdout = old_stdout