skydeckai-code 0.1.23__py3-none-any.whl → 0.1.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
aidd/tools/__init__.py CHANGED
@@ -5,6 +5,7 @@ from .code_execution import (
5
5
  handle_execute_code,
6
6
  handle_execute_shell_script,
7
7
  )
8
+ from .code_tools import search_code_tool, handle_search_code
8
9
  from .directory_tools import (
9
10
  create_directory_tool,
10
11
  directory_tree_tool,
@@ -14,9 +15,11 @@ from .directory_tools import (
14
15
  list_directory_tool,
15
16
  )
16
17
  from .file_tools import (
18
+ copy_file_tool,
17
19
  delete_file_tool,
18
20
  edit_file_tool,
19
21
  get_file_info_tool,
22
+ handle_copy_file,
20
23
  handle_delete_file,
21
24
  handle_edit_file,
22
25
  handle_get_file_info,
@@ -36,6 +39,7 @@ from .get_available_windows_tool import get_available_windows_tool, handle_get_a
36
39
  from .git_tools import (
37
40
  git_add_tool,
38
41
  git_checkout_tool,
42
+ git_clone_tool,
39
43
  git_commit_tool,
40
44
  git_create_branch_tool,
41
45
  git_diff_staged_tool,
@@ -48,6 +52,7 @@ from .git_tools import (
48
52
  git_status_tool,
49
53
  handle_git_add,
50
54
  handle_git_checkout,
55
+ handle_git_clone,
51
56
  handle_git_commit,
52
57
  handle_git_create_branch,
53
58
  handle_git_diff,
@@ -60,6 +65,7 @@ from .git_tools import (
60
65
  handle_git_status,
61
66
  )
62
67
  from .image_tools import read_image_file_tool, handle_read_image_file
68
+ from .other_tools import batch_tools_tool, handle_batch_tools, think_tool, handle_think
63
69
  from .path_tools import (
64
70
  get_allowed_directory_tool,
65
71
  handle_get_allowed_directory,
@@ -71,6 +77,7 @@ from .screenshot_tool import (
71
77
  handle_capture_screenshot,
72
78
  )
73
79
  from .system_tools import get_system_info_tool, handle_get_system_info
80
+ from .web_tools import web_fetch_tool, handle_web_fetch
74
81
 
75
82
  # Export all tools definitions
76
83
  TOOL_DEFINITIONS = [
@@ -83,6 +90,7 @@ TOOL_DEFINITIONS = [
83
90
  read_file_tool(),
84
91
  read_multiple_files_tool(),
85
92
  move_file_tool(),
93
+ copy_file_tool(),
86
94
  search_files_tool(),
87
95
  delete_file_tool(),
88
96
  get_file_info_tool(),
@@ -90,6 +98,9 @@ TOOL_DEFINITIONS = [
90
98
  execute_code_tool(),
91
99
  execute_shell_script_tool(),
92
100
  codebase_mapper_tool(),
101
+ search_code_tool(),
102
+ batch_tools_tool(),
103
+ think_tool(),
93
104
  # Git tools
94
105
  git_init_tool(),
95
106
  git_status_tool(),
@@ -102,6 +113,7 @@ TOOL_DEFINITIONS = [
102
113
  git_log_tool(),
103
114
  git_create_branch_tool(),
104
115
  git_checkout_tool(),
116
+ git_clone_tool(),
105
117
  git_show_tool(),
106
118
  get_system_info_tool(),
107
119
  # Screenshot tools
@@ -111,6 +123,8 @@ TOOL_DEFINITIONS = [
111
123
  get_available_windows_tool(),
112
124
  # Image tools
113
125
  read_image_file_tool(),
126
+ # Web tools
127
+ web_fetch_tool(),
114
128
  ]
115
129
 
116
130
  # Export all handlers
@@ -124,13 +138,17 @@ TOOL_HANDLERS = {
124
138
  "edit_file": handle_edit_file,
125
139
  "read_multiple_files": handle_read_multiple_files,
126
140
  "move_file": handle_move_file,
141
+ "copy_file": handle_copy_file,
127
142
  "search_files": handle_search_files,
143
+ "search_code": handle_search_code,
128
144
  "delete_file": handle_delete_file,
129
145
  "get_file_info": handle_get_file_info,
130
146
  "directory_tree": handle_directory_tree,
131
147
  "execute_code": handle_execute_code,
132
148
  "execute_shell_script": handle_execute_shell_script,
133
149
  "codebase_mapper": handle_codebase_mapper,
150
+ "batch_tools": handle_batch_tools,
151
+ "think": handle_think,
134
152
  # Git handlers
135
153
  "git_init": handle_git_init,
136
154
  "git_status": handle_git_status,
@@ -143,6 +161,7 @@ TOOL_HANDLERS = {
143
161
  "git_log": handle_git_log,
144
162
  "git_create_branch": handle_git_create_branch,
145
163
  "git_checkout": handle_git_checkout,
164
+ "git_clone": handle_git_clone,
146
165
  "git_show": handle_git_show,
147
166
  "get_system_info": handle_get_system_info,
148
167
  # Screenshot handlers
@@ -152,4 +171,6 @@ TOOL_HANDLERS = {
152
171
  "get_available_windows": handle_get_available_windows,
153
172
  # Image handlers
154
173
  "read_image_file": handle_read_image_file,
174
+ # Web handlers
175
+ "web_fetch": handle_web_fetch,
155
176
  }
@@ -0,0 +1,332 @@
1
+ import os
2
+ import re
3
+ import fnmatch
4
+ import subprocess
5
+ from datetime import datetime
6
+ from typing import List, Dict, Any, Optional, Union, Tuple
7
+
8
+ from mcp.types import TextContent
9
+ from .state import state
10
+
11
+
12
+ def search_code_tool():
13
+ return {
14
+ "name": "search_code",
15
+ "description": "Fast content search tool using regular expressions. "
16
+ "WHEN TO USE: When you need to search for specific patterns within file contents across a codebase. "
17
+ "Useful for finding function definitions, variable usages, import statements, or any text pattern "
18
+ "in source code files. "
19
+ "WHEN NOT TO USE: When you need to find files by name (use search_files instead), when you need "
20
+ "semantic code understanding (use codebase_mapper instead), or when analyzing individual file "
21
+ "structure. "
22
+ "RETURNS: Lines of code matching the specified pattern, grouped by file with line numbers. "
23
+ "Results are sorted by file modification time with newest files first. Respects file filtering "
24
+ "and ignores binary files. Search is restricted to the allowed directory.",
25
+ "inputSchema": {
26
+ "type": "object",
27
+ "properties": {
28
+ "pattern": {
29
+ "type": "string",
30
+ "description": "Regular expression pattern to search for in file contents. Supports full regex syntax. "
31
+ "Examples: 'function\\s+\\w+' to find function declarations, 'import\\s+.*from' to find "
32
+ "import statements, 'console\\.log.*Error' to find error logs."
33
+ },
34
+ "include": {
35
+ "type": "string",
36
+ "description": "File pattern to include in the search. Supports glob patterns including wildcards and braces. "
37
+ "Examples: '*.js' for all JavaScript files, '*.{ts,tsx}' for TypeScript files, "
38
+ "'src/**/*.py' for Python files in the src directory and subdirectories.",
39
+ "default": "*"
40
+ },
41
+ "exclude": {
42
+ "type": "string",
43
+ "description": "File pattern to exclude from the search. Supports glob patterns including wildcards and braces. "
44
+ "Examples: 'node_modules/**' to exclude node_modules directory, '*.min.js' to exclude minified JS.",
45
+ "default": ""
46
+ },
47
+ "max_results": {
48
+ "type": "integer",
49
+ "description": "Maximum number of matching results to return. Use to limit output size for common patterns. "
50
+ "Default is 100, which is sufficient for most searches while preventing excessive output.",
51
+ "default": 100
52
+ },
53
+ "case_sensitive": {
54
+ "type": "boolean",
55
+ "description": "Whether to perform a case-sensitive search. When true, 'Error' will not match 'error'. "
56
+ "Default is false, which makes searches case-insensitive.",
57
+ "default": False
58
+ },
59
+ "path": {
60
+ "type": "string",
61
+ "description": "Base directory to search from. This is the starting point for the search. "
62
+ "Examples: '.' for current directory, 'src' to search only within src directory. "
63
+ "Default is the root of the allowed directory.",
64
+ "default": "."
65
+ }
66
+ },
67
+ "required": ["pattern"]
68
+ }
69
+ }
70
+
71
+
72
+ async def handle_search_code(arguments: dict) -> List[TextContent]:
73
+ """Handle searching for patterns in code files."""
74
+ pattern = arguments.get("pattern")
75
+ include = arguments.get("include", "*")
76
+ exclude = arguments.get("exclude", "")
77
+ max_results = arguments.get("max_results", 100)
78
+ case_sensitive = arguments.get("case_sensitive", False)
79
+ path = arguments.get("path", ".")
80
+
81
+ if not pattern:
82
+ raise ValueError("Pattern must be provided")
83
+
84
+ # Determine full path for search start
85
+ if os.path.isabs(path):
86
+ full_path = os.path.abspath(path)
87
+ else:
88
+ full_path = os.path.abspath(os.path.join(state.allowed_directory, path))
89
+
90
+ # Security check
91
+ if not full_path.startswith(state.allowed_directory):
92
+ raise ValueError(f"Access denied: Path ({full_path}) must be within allowed directory")
93
+
94
+ if not os.path.exists(full_path):
95
+ raise ValueError(f"Path does not exist: {path}")
96
+ if not os.path.isdir(full_path):
97
+ raise ValueError(f"Path is not a directory: {path}")
98
+
99
+ try:
100
+ # Use ripgrep if available for faster results
101
+ try:
102
+ return await _search_with_ripgrep(
103
+ pattern, include, exclude, max_results, case_sensitive, full_path
104
+ )
105
+ except (subprocess.SubprocessError, FileNotFoundError):
106
+ # Fallback to Python implementation if ripgrep not available
107
+ return await _search_with_python(
108
+ pattern, include, exclude, max_results, case_sensitive, full_path
109
+ )
110
+ except Exception as e:
111
+ raise ValueError(f"Error searching code: {str(e)}")
112
+
113
+
114
+ async def _search_with_ripgrep(
115
+ pattern: str,
116
+ include: str,
117
+ exclude: str,
118
+ max_results: int,
119
+ case_sensitive: bool,
120
+ full_path: str
121
+ ) -> List[TextContent]:
122
+ """Search using ripgrep for better performance."""
123
+ cmd = ["rg", "--line-number"]
124
+
125
+ # Add case sensitivity flag
126
+ if not case_sensitive:
127
+ cmd.append("--ignore-case")
128
+
129
+ # Add include patterns if provided
130
+ if include and include != "*":
131
+ # Convert glob pattern to ripgrep glob
132
+ cmd.extend(["--glob", include])
133
+
134
+ # Add exclude patterns if provided
135
+ if exclude:
136
+ # Convert glob pattern to ripgrep glob
137
+ cmd.extend(["--glob", f"!{exclude}"])
138
+
139
+ # Add max results
140
+ cmd.extend(["--max-count", str(max_results)])
141
+
142
+ # Add pattern and path
143
+ cmd.extend([pattern, full_path])
144
+
145
+ try:
146
+ result = subprocess.run(
147
+ cmd,
148
+ capture_output=True,
149
+ text=True,
150
+ check=True
151
+ )
152
+
153
+ output = result.stdout.strip()
154
+ if not output:
155
+ return [TextContent(
156
+ type="text",
157
+ text="No matches found."
158
+ )]
159
+
160
+ # Process output to add file modification times
161
+ files_with_matches = {}
162
+ current_file = None
163
+
164
+ for line in output.split('\n'):
165
+ if not line.strip():
166
+ continue
167
+
168
+ # ripgrep output format: file:line:content
169
+ parts = line.split(':', 2)
170
+ if len(parts) >= 3:
171
+ file_path, line_num, content = parts[0], parts[1], parts[2]
172
+
173
+ # Get relative path for display
174
+ rel_path = os.path.relpath(file_path, state.allowed_directory)
175
+
176
+ if rel_path not in files_with_matches:
177
+ # Get file modification time
178
+ mod_time = os.path.getmtime(file_path)
179
+ files_with_matches[rel_path] = {
180
+ "mod_time": mod_time,
181
+ "matches": []
182
+ }
183
+
184
+ files_with_matches[rel_path]["matches"].append(f"{line_num}: {content}")
185
+
186
+ # Sort files by modification time (newest first)
187
+ sorted_files = sorted(
188
+ files_with_matches.items(),
189
+ key=lambda x: x[1]["mod_time"],
190
+ reverse=True
191
+ )
192
+
193
+ # Format output
194
+ formatted_output = []
195
+ for file_path, data in sorted_files:
196
+ formatted_output.append(f"\n{file_path} (modified: {datetime.fromtimestamp(data['mod_time']).strftime('%Y-%m-%d %H:%M:%S')})")
197
+ formatted_output.extend(data["matches"])
198
+
199
+ return [TextContent(
200
+ type="text",
201
+ text="\n".join(formatted_output)
202
+ )]
203
+
204
+ except subprocess.CalledProcessError as e:
205
+ if e.returncode == 1 and not e.stderr:
206
+ # ripgrep returns 1 when no matches are found
207
+ return [TextContent(
208
+ type="text",
209
+ text="No matches found."
210
+ )]
211
+ raise
212
+
213
+
214
+ async def _search_with_python(
215
+ pattern: str,
216
+ include: str,
217
+ exclude: str,
218
+ max_results: int,
219
+ case_sensitive: bool,
220
+ full_path: str
221
+ ) -> List[TextContent]:
222
+ """Fallback search implementation using Python's regex and file operations."""
223
+ # Compile the regex pattern
224
+ try:
225
+ if case_sensitive:
226
+ regex = re.compile(pattern)
227
+ else:
228
+ regex = re.compile(pattern, re.IGNORECASE)
229
+ except re.error as e:
230
+ raise ValueError(f"Invalid regular expression: {str(e)}")
231
+
232
+ # Convert glob patterns to regex patterns for matching
233
+ include_pattern = fnmatch.translate(include)
234
+ include_regex = re.compile(include_pattern)
235
+
236
+ exclude_regex = None
237
+ if exclude:
238
+ exclude_pattern = fnmatch.translate(exclude)
239
+ exclude_regex = re.compile(exclude_pattern)
240
+
241
+ # Dictionary to store files with matches and their modification times
242
+ files_with_matches = {}
243
+ match_count = 0
244
+
245
+ # Walk the directory tree
246
+ for root, _, files in os.walk(full_path):
247
+ if match_count >= max_results:
248
+ break
249
+
250
+ for filename in files:
251
+ if match_count >= max_results:
252
+ break
253
+
254
+ file_path = os.path.join(root, filename)
255
+
256
+ # Get path relative to the search root for pattern matching
257
+ rel_path = os.path.relpath(file_path, full_path)
258
+
259
+ # Check if file matches include pattern
260
+ if not include_regex.match(filename) and not include_regex.match(rel_path):
261
+ continue
262
+
263
+ # Check if file matches exclude pattern
264
+ if exclude_regex and (exclude_regex.match(filename) or exclude_regex.match(rel_path)):
265
+ continue
266
+
267
+ # Get file modification time
268
+ try:
269
+ mod_time = os.path.getmtime(file_path)
270
+ except (OSError, IOError):
271
+ continue
272
+
273
+ # Skip binary files
274
+ try:
275
+ with open(file_path, 'r', encoding='utf-8') as f:
276
+ try:
277
+ # Try to read the first few bytes to check if it's a text file
278
+ f.read(4096)
279
+ # Rewind to beginning of file
280
+ f.seek(0)
281
+ except UnicodeDecodeError:
282
+ # Skip binary files
283
+ continue
284
+
285
+ # Get relative path for display
286
+ display_path = os.path.relpath(file_path, state.allowed_directory)
287
+
288
+ # Initialize entry for this file
289
+ if display_path not in files_with_matches:
290
+ files_with_matches[display_path] = {
291
+ "mod_time": mod_time,
292
+ "matches": []
293
+ }
294
+
295
+ # Search for pattern in each line
296
+ for line_num, line in enumerate(f, 1):
297
+ if regex.search(line):
298
+ files_with_matches[display_path]["matches"].append(f"{line_num}: {line.rstrip()}")
299
+ match_count += 1
300
+
301
+ if match_count >= max_results:
302
+ break
303
+
304
+ except (OSError, IOError):
305
+ # Skip files that can't be read
306
+ continue
307
+
308
+ # No matches found
309
+ if not files_with_matches:
310
+ return [TextContent(
311
+ type="text",
312
+ text="No matches found."
313
+ )]
314
+
315
+ # Sort files by modification time (newest first)
316
+ sorted_files = sorted(
317
+ files_with_matches.items(),
318
+ key=lambda x: x[1]["mod_time"],
319
+ reverse=True
320
+ )
321
+
322
+ # Format output
323
+ formatted_output = []
324
+ for file_path, data in sorted_files:
325
+ if data["matches"]: # Only include files that actually have matches
326
+ formatted_output.append(f"\n{file_path} (modified: {datetime.fromtimestamp(data['mod_time']).strftime('%Y-%m-%d %H:%M:%S')})")
327
+ formatted_output.extend(data["matches"])
328
+
329
+ return [TextContent(
330
+ type="text",
331
+ text="\n".join(formatted_output)
332
+ )]