skydeckai-code 0.1.37__py3-none-any.whl → 0.1.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: skydeckai-code
3
- Version: 0.1.37
3
+ Version: 0.1.39
4
4
  Summary: This MCP server provides a comprehensive set of tools for AI-driven Development workflows including file operations, code analysis, multi-language execution, web content fetching with HTML-to-markdown conversion, multi-engine web search, code content searching, and system information retrieval.
5
5
  Project-URL: Homepage, https://github.com/skydeckai/skydeckai-code
6
6
  Project-URL: Repository, https://github.com/skydeckai/skydeckai-code
@@ -16,6 +16,8 @@ Requires-Dist: mcp>=1.6.0
16
16
  Requires-Dist: mss>=10.0.0
17
17
  Requires-Dist: pillow>=11.1.0
18
18
  Requires-Dist: psutil>=7.0.0
19
+ Requires-Dist: pygetwindow>=0.0.9; sys_platform == 'win32'
20
+ Requires-Dist: pyobjc-framework-quartz>=11.0; sys_platform == 'darwin'
19
21
  Requires-Dist: requests>=2.32.3
20
22
  Requires-Dist: tree-sitter-c-sharp>=0.23.1
21
23
  Requires-Dist: tree-sitter-cpp>=0.23.4
@@ -29,12 +31,10 @@ Requires-Dist: tree-sitter-ruby>=0.23.1
29
31
  Requires-Dist: tree-sitter-rust==0.23.2
30
32
  Requires-Dist: tree-sitter-typescript>=0.23.2
31
33
  Requires-Dist: tree-sitter>=0.24.0
32
- Provides-Extra: macos
33
- Requires-Dist: pyobjc-framework-quartz>=11.0; extra == 'macos'
34
- Provides-Extra: windows
35
- Requires-Dist: pygetwindow>=0.0.9; extra == 'windows'
36
34
  Description-Content-Type: text/markdown
37
35
 
36
+ [![MseeP.ai Security Assessment Badge](https://mseep.net/pr/skydeckai-code-badge.png)](https://mseep.ai/app/skydeckai-code)
37
+
38
38
  # SkyDeckAI Code
39
39
 
40
40
  An MCP server that provides a comprehensive set of tools for AI-driven development workflows. Features include file system operations, code analysis using tree-sitter for multiple programming languages, code execution, web content fetching with HTML-to-markdown conversion, multi-engine web search, code content searching, and system information retrieval. Designed to enhance AI's capability to assist in software development tasks by providing direct access to both local and remote resources.
@@ -5,9 +5,9 @@ src/aidd/server.py,sha256=kPRyWeWkMCZjabelC65XTmzZG7yw8htMJKSfnUcKnb0,1575
5
5
  src/aidd/tools/__init__.py,sha256=j1uL7WVH3xS8JTGnpSIhaz7qkc3gcxuDyvZIYP9UztI,3731
6
6
  src/aidd/tools/base.py,sha256=wHSAaGGYWM8ECmoYd7KEcmjsZRWesNQFf3zMjCKGMcc,380
7
7
  src/aidd/tools/code_analysis.py,sha256=fDpm2o_If5PsngXzHN2-ezSkPVT0ZxivLuzmHrOAmVU,33188
8
- src/aidd/tools/code_execution.py,sha256=HRLUR1-q1PiCXKZV5QmTknDJKsfvPvFSWZbTpYFcv7I,13703
9
- src/aidd/tools/code_tools.py,sha256=DQ6N34Wbz5DwUPzt6RG7jk9HF2SsWFCFn99mencHK1c,14263
10
- src/aidd/tools/directory_tools.py,sha256=Hxzge_ziYw_FsjYb5yF0R0dHEdvuWRsg7WsdYDG0AUg,12971
8
+ src/aidd/tools/code_execution.py,sha256=7HKstQ-LTjGEUn87LhowOJbd4Pq_zG0xkO-K0JJ-EFs,15513
9
+ src/aidd/tools/code_tools.py,sha256=rJx_CMq0mB7aBJ6YcNB_6geFnjHU4OaGcXyuu909xhM,16010
10
+ src/aidd/tools/directory_tools.py,sha256=GMG4-9iO5RfTkbhlWaW40GPKa1qujMPTN32pwxjUU4E,18052
11
11
  src/aidd/tools/file_tools.py,sha256=GYzP6WxGbV1V42FlWNSuGIyiCtRp09kwF6lOZrFtq_U,43112
12
12
  src/aidd/tools/get_active_apps_tool.py,sha256=BjLF7iXSDgyAmm_gfFgAul2Gn3iX-CNVYHM7Sh4jTAI,19427
13
13
  src/aidd/tools/get_available_windows_tool.py,sha256=OVIYhItTn9u_DftOr3vPCT-R0DOFvMEEJXA6tD6gqWQ,15952
@@ -16,10 +16,10 @@ src/aidd/tools/other_tools.py,sha256=iG3Sd2FP0M0pRv5esPBAUMvlwxTyAMDUdS77IqA_f5s
16
16
  src/aidd/tools/path_tools.py,sha256=RGoOhqP69eHJzM8tEgn_5-GRaR0gp25fd0XZIJ_RnQE,4045
17
17
  src/aidd/tools/screenshot_tool.py,sha256=NMO5B4UG8qfMEOMRd2YoOjtwz_oQ2y1UAGU22jV1yGU,46337
18
18
  src/aidd/tools/state.py,sha256=RWSw0Jfsui8FqC0xsI7Ik07tAg35hRwLHa5xGBVbiI4,1493
19
- src/aidd/tools/system_tools.py,sha256=H4_qveKC2HA7SIbi-j4vxA0W4jYh2wfu9A6ni5wkZyA,7249
19
+ src/aidd/tools/system_tools.py,sha256=XgdIgKeqePZx5pj59zH7Jhs2Abn55XUf0tvKbKMVtPo,7400
20
20
  src/aidd/tools/web_tools.py,sha256=gdsj2DEVYb_oYChItK5I1ugt2w25U7IAa5kEw9q6MVg,35534
21
- skydeckai_code-0.1.37.dist-info/METADATA,sha256=1YGhMWLG0ftxoyHwRaO64oi5mlSpRsXW5wh0quhcHdA,31860
22
- skydeckai_code-0.1.37.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
23
- skydeckai_code-0.1.37.dist-info/entry_points.txt,sha256=ZkU1spOhLEnz5MpUn4dDihVcE0DMUC6ejzbsF-eNth4,88
24
- skydeckai_code-0.1.37.dist-info/licenses/LICENSE,sha256=uHse04vmI6ZjW7TblegFl30X-sDyyF0-QvH8ItPca3c,10865
25
- skydeckai_code-0.1.37.dist-info/RECORD,,
21
+ skydeckai_code-0.1.39.dist-info/METADATA,sha256=AwWLNrB3UMqXpsqiWYw6-ZSN8h6LRiZr_2PNt0yqfd4,31952
22
+ skydeckai_code-0.1.39.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
23
+ skydeckai_code-0.1.39.dist-info/entry_points.txt,sha256=ZkU1spOhLEnz5MpUn4dDihVcE0DMUC6ejzbsF-eNth4,88
24
+ skydeckai_code-0.1.39.dist-info/licenses/LICENSE,sha256=uHse04vmI6ZjW7TblegFl30X-sDyyF0-QvH8ItPca3c,10865
25
+ skydeckai_code-0.1.39.dist-info/RECORD,,
@@ -2,49 +2,28 @@ import os
2
2
  import stat
3
3
  import subprocess
4
4
  from typing import Any, Dict, List
5
-
6
5
  import mcp.types as types
6
+ from pathlib import Path
7
7
 
8
8
  from .state import state
9
9
 
10
10
  # Language configurations
11
11
  LANGUAGE_CONFIGS = {
12
- 'python': {
13
- 'file_extension': '.py',
14
- 'command': ['python3'],
15
- 'comment_prefix': '#'
16
- },
17
- 'javascript': {
18
- 'file_extension': '.js',
19
- 'command': ['node'],
20
- 'comment_prefix': '//'
21
- },
22
- 'ruby': {
23
- 'file_extension': '.rb',
24
- 'command': ['ruby'],
25
- 'comment_prefix': '#'
12
+ "python": {"file_extension": ".py", "command": ["python3"], "comment_prefix": "#"},
13
+ "javascript": {"file_extension": ".js", "command": ["node"], "comment_prefix": "//"},
14
+ "ruby": {"file_extension": ".rb", "command": ["ruby"], "comment_prefix": "#"},
15
+ "php": {"file_extension": ".php", "command": ["php"], "comment_prefix": "//"},
16
+ "go": {"file_extension": ".go", "command": ["go", "run"], "comment_prefix": "//", "wrapper_start": "package main\nfunc main() {", "wrapper_end": "}"},
17
+ "rust": {
18
+ "file_extension": ".rs",
19
+ "command": ["rustc", "-o"], # Special handling needed
20
+ "comment_prefix": "//",
21
+ "wrapper_start": "fn main() {",
22
+ "wrapper_end": "}",
26
23
  },
27
- 'php': {
28
- 'file_extension': '.php',
29
- 'command': ['php'],
30
- 'comment_prefix': '//'
31
- },
32
- 'go': {
33
- 'file_extension': '.go',
34
- 'command': ['go', 'run'],
35
- 'comment_prefix': '//',
36
- 'wrapper_start': 'package main\nfunc main() {',
37
- 'wrapper_end': '}'
38
- },
39
- 'rust': {
40
- 'file_extension': '.rs',
41
- 'command': ['rustc', '-o'], # Special handling needed
42
- 'comment_prefix': '//',
43
- 'wrapper_start': 'fn main() {',
44
- 'wrapper_end': '}'
45
- }
46
24
  }
47
25
 
26
+
48
27
  def execute_code_tool() -> Dict[str, Any]:
49
28
  return {
50
29
  "name": "execute_code",
@@ -70,29 +49,31 @@ def execute_code_tool() -> Dict[str, Any]:
70
49
  "language": {
71
50
  "type": "string",
72
51
  "enum": list(LANGUAGE_CONFIGS.keys()),
73
- "description": "Programming language to use. Must be one of the supported languages: " + ", ".join(LANGUAGE_CONFIGS.keys()) + ". " +
74
- "Each language requires the appropriate runtime to be installed on the user's machine. The code will be executed using: python3 for " +
75
- "Python, node for JavaScript, ruby for Ruby, php for PHP, go run for Go, and rustc for Rust."
52
+ "description": "Programming language to use. Must be one of the supported languages: "
53
+ + ", ".join(LANGUAGE_CONFIGS.keys())
54
+ + ". "
55
+ + "Each language requires the appropriate runtime to be installed on the user's machine. The code will be executed using: python3 for "
56
+ + "Python, node for JavaScript, ruby for Ruby, php for PHP, go run for Go, and rustc for Rust.",
76
57
  },
77
58
  "code": {
78
59
  "type": "string",
79
- "description": "Code to execute on the user's local machine in the current working directory. The code will be saved to a " +
80
- "temporary file and executed within the allowed workspace. For Go and Rust, main function wrappers will be added automatically if " +
81
- "not present. For PHP, <?php will be prepended if not present."
60
+ "description": "Code to execute on the user's local machine in the current working directory. The code will be saved to a "
61
+ + "temporary file and executed within the allowed workspace. For Go and Rust, main function wrappers will be added automatically if "
62
+ + "not present. For PHP, <?php will be prepended if not present.",
82
63
  },
83
64
  "timeout": {
84
65
  "type": "integer",
85
- "description": "Maximum execution time in seconds. The execution will be terminated if it exceeds this time limit, returning a " +
86
- "timeout message. Must be between 1 and 30 seconds.",
66
+ "description": "Maximum execution time in seconds. The execution will be terminated if it exceeds this time limit, returning a " + "timeout message. Must be between 1 and 30 seconds.",
87
67
  "default": 5,
88
68
  "minimum": 1,
89
- "maximum": 30
90
- }
69
+ "maximum": 30,
70
+ },
91
71
  },
92
- "required": ["language", "code"]
93
- }
72
+ "required": ["language", "code"],
73
+ },
94
74
  }
95
75
 
76
+
96
77
  def execute_shell_script_tool() -> Dict[str, Any]:
97
78
  return {
98
79
  "name": "execute_shell_script",
@@ -122,47 +103,46 @@ def execute_shell_script_tool() -> Dict[str, Any]:
122
103
  "script": {
123
104
  "type": "string",
124
105
  "description": "Shell script to execute on the user's local machine. Can include any valid shell commands or scripts that would "
125
- "run in a standard shell environment. The script is executed using /bin/sh for maximum compatibility across systems."
106
+ "run in a standard shell environment. The script is executed using /bin/sh for maximum compatibility across systems.",
126
107
  },
127
108
  "timeout": {
128
109
  "type": "integer",
129
- "description": "Maximum execution time in seconds. The execution will be terminated if it exceeds this time limit. "
130
- "Default is 300 seconds (5 minutes), with a maximum allowed value of 600 seconds (10 minutes).",
110
+ "description": "Maximum execution time in seconds. The execution will be terminated if it exceeds this time limit. Default is 300 seconds (5 minutes), with a maximum allowed value of 600 seconds (10 minutes).",
131
111
  "default": 300,
132
- "maximum": 600
133
- }
112
+ "maximum": 600,
113
+ },
134
114
  },
135
- "required": ["script"]
136
- }
115
+ "required": ["script"],
116
+ },
137
117
  }
138
118
 
119
+
139
120
  def is_command_available(command: str) -> bool:
140
121
  """Check if a command is available in the system."""
141
122
  try:
142
- subprocess.run(['which', command],
143
- stdout=subprocess.PIPE,
144
- stderr=subprocess.PIPE,
145
- check=True)
123
+ subprocess.run(["which", command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True)
146
124
  return True
147
125
  except subprocess.CalledProcessError:
148
126
  return False
149
127
 
128
+
150
129
  def prepare_code(code: str, language: str) -> str:
151
130
  """Prepare code for execution based on language requirements."""
152
131
  config = LANGUAGE_CONFIGS[language]
153
132
 
154
- if language == 'go':
155
- if 'package main' not in code and 'func main()' not in code:
133
+ if language == "go":
134
+ if "package main" not in code and "func main()" not in code:
156
135
  return f"{config['wrapper_start']}\n{code}\n{config['wrapper_end']}"
157
- elif language == 'rust':
158
- if 'fn main()' not in code:
136
+ elif language == "rust":
137
+ if "fn main()" not in code:
159
138
  return f"{config['wrapper_start']}\n{code}\n{config['wrapper_end']}"
160
- elif language == 'php':
161
- if '<?php' not in code:
139
+ elif language == "php":
140
+ if "<?php" not in code:
162
141
  return f"<?php\n{code}"
163
142
 
164
143
  return code
165
144
 
145
+
166
146
  async def execute_code_in_temp_file(language: str, code: str, timeout: int) -> tuple[str, str, int]:
167
147
  """Execute code in a temporary file and return stdout, stderr, and return code."""
168
148
  config = LANGUAGE_CONFIGS[language]
@@ -173,27 +153,24 @@ async def execute_code_in_temp_file(language: str, code: str, timeout: int) -> t
173
153
  os.chdir(state.allowed_directory)
174
154
 
175
155
  # Write code to temp file
176
- with open(temp_file, 'w') as f:
156
+ with open(temp_file, "w") as f:
177
157
  # Prepare and write code
178
158
  prepared_code = prepare_code(code, language)
179
159
  f.write(prepared_code)
180
160
  f.flush()
181
161
 
182
162
  # Prepare command
183
- if language == 'rust':
163
+ if language == "rust":
184
164
  # Special handling for Rust
185
- output_path = 'temp_script.exe'
186
- compile_cmd = ['rustc', temp_file, '-o', output_path]
165
+ output_path = "temp_script.exe"
166
+ compile_cmd = ["rustc", temp_file, "-o", output_path]
187
167
  try:
188
- subprocess.run(compile_cmd,
189
- check=True,
190
- capture_output=True,
191
- timeout=timeout)
168
+ subprocess.run(compile_cmd, check=True, capture_output=True, timeout=timeout)
192
169
  cmd = [output_path]
193
170
  except subprocess.CalledProcessError as e:
194
- return '', e.stderr.decode(), e.returncode
171
+ return "", e.stderr.decode(), e.returncode
195
172
  else:
196
- cmd = config['command'] + [temp_file]
173
+ cmd = config["command"] + [temp_file]
197
174
 
198
175
  # Execute code
199
176
  try:
@@ -205,18 +182,19 @@ async def execute_code_in_temp_file(language: str, code: str, timeout: int) -> t
205
182
  )
206
183
  return result.stdout, result.stderr, result.returncode
207
184
  except subprocess.TimeoutExpired:
208
- return '', f'Execution timed out after {timeout} seconds', 124
185
+ return "", f"Execution timed out after {timeout} seconds", 124
209
186
 
210
187
  finally:
211
188
  # Cleanup
212
189
  # Note: We stay in the allowed directory as all operations should happen there
213
190
  try:
214
191
  os.unlink(temp_file)
215
- if language == 'rust' and os.path.exists(output_path):
192
+ if language == "rust" and os.path.exists(output_path):
216
193
  os.unlink(output_path)
217
194
  except Exception:
218
195
  pass
219
196
 
197
+
220
198
  async def handle_execute_code(arguments: dict) -> List[types.TextContent]:
221
199
  """Handle code execution in various programming languages."""
222
200
  language = arguments.get("language")
@@ -230,12 +208,9 @@ async def handle_execute_code(arguments: dict) -> List[types.TextContent]:
230
208
  raise ValueError(f"Unsupported language: {language}")
231
209
 
232
210
  # Check if required command is available
233
- command = LANGUAGE_CONFIGS[language]['command'][0]
211
+ command = LANGUAGE_CONFIGS[language]["command"][0]
234
212
  if not is_command_available(command):
235
- return [types.TextContent(
236
- type="text",
237
- text=f"Error: {command} is not installed on the system"
238
- )]
213
+ return [types.TextContent(type="text", text=f"Error: {command} is not installed on the system")]
239
214
 
240
215
  try:
241
216
  stdout, stderr, returncode = await execute_code_in_temp_file(language, code, timeout)
@@ -250,16 +225,11 @@ async def handle_execute_code(arguments: dict) -> List[types.TextContent]:
250
225
  if returncode != 0:
251
226
  result.append(f"\nProcess exited with code {returncode}")
252
227
 
253
- return [types.TextContent(
254
- type="text",
255
- text="\n\n".join(result)
256
- )]
228
+ return [types.TextContent(type="text", text="\n\n".join(result))]
257
229
 
258
230
  except Exception as e:
259
- return [types.TextContent(
260
- type="text",
261
- text=f"Error executing code:\n{str(e)}"
262
- )]
231
+ return [types.TextContent(type="text", text=f"Error executing code:\n{str(e)}")]
232
+
263
233
 
264
234
  async def execute_shell_script_in_temp_file(script: str, timeout: int) -> tuple[str, str, int]:
265
235
  """Execute a shell script in a temporary file and return stdout, stderr, and return code."""
@@ -270,7 +240,7 @@ async def execute_shell_script_in_temp_file(script: str, timeout: int) -> tuple[
270
240
  os.chdir(state.allowed_directory)
271
241
 
272
242
  # Write script to temp file
273
- with open(temp_file, 'w') as f:
243
+ with open(temp_file, "w") as f:
274
244
  f.write("#!/bin/sh\n") # Use sh for maximum compatibility
275
245
  f.write(script)
276
246
  f.flush()
@@ -280,15 +250,19 @@ async def execute_shell_script_in_temp_file(script: str, timeout: int) -> tuple[
280
250
 
281
251
  # Execute script
282
252
  try:
253
+ path_env = get_comprehensive_shell_paths()
254
+ env = os.environ.copy()
255
+ env["PATH"] = path_env
283
256
  result = subprocess.run(
284
257
  ["/bin/sh", temp_file], # Use sh explicitly for consistent behavior
285
258
  capture_output=True,
286
259
  timeout=timeout,
287
260
  text=True,
261
+ env=env,
288
262
  )
289
263
  return result.stdout, result.stderr, result.returncode
290
264
  except subprocess.TimeoutExpired:
291
- return '', f'Execution timed out after {timeout} seconds', 124
265
+ return "", f"Execution timed out after {timeout} seconds", 124
292
266
 
293
267
  finally:
294
268
  # Cleanup
@@ -297,6 +271,7 @@ async def execute_shell_script_in_temp_file(script: str, timeout: int) -> tuple[
297
271
  except Exception:
298
272
  pass
299
273
 
274
+
300
275
  async def handle_execute_shell_script(arguments: dict) -> List[types.TextContent]:
301
276
  """Handle shell script execution."""
302
277
  script = arguments.get("script")
@@ -314,13 +289,75 @@ async def handle_execute_shell_script(arguments: dict) -> List[types.TextContent
314
289
  if returncode != 0:
315
290
  result.append(f"\nScript exited with code {returncode}")
316
291
 
317
- return [types.TextContent(
318
- type="text",
319
- text="\n\n".join(result)
320
- )]
292
+ return [types.TextContent(type="text", text="\n\n".join(result))]
321
293
 
322
294
  except Exception as e:
323
- return [types.TextContent(
324
- type="text",
325
- text=f"Error executing shell script:\n{str(e)}"
326
- )]
295
+ return [types.TextContent(type="text", text=f"Error executing shell script:\n{str(e)}")]
296
+
297
+
298
+ def get_comprehensive_shell_paths():
299
+ """
300
+ Get PATH from shells using login mode to capture initialization files
301
+ """
302
+ shells_file = Path("/etc/shells")
303
+
304
+ if not shells_file.exists():
305
+ return os.environ.get("PATH", "")
306
+
307
+ # Read available shells
308
+ try:
309
+ with open(shells_file, "r") as f:
310
+ shells = [line.strip() for line in f if line.strip() and not line.startswith("#")]
311
+ except IOError:
312
+ return os.environ.get("PATH", "")
313
+
314
+ all_paths = []
315
+
316
+ # Get PATH from each shell as login shell
317
+ for shell in shells:
318
+ if not os.path.exists(shell):
319
+ continue
320
+
321
+ # Different approaches for different shells
322
+ commands_to_try = [
323
+ [shell, "--login", "-i", "-c", "echo $PATH"],
324
+ [shell, "-l", "-i", "-c", "echo $PATH"],
325
+ [shell, "--login", "-c", "echo $PATH"],
326
+ [shell, "-l", "-c", "echo $PATH"],
327
+ [shell, "-c", "echo $PATH"],
328
+ ]
329
+
330
+ # Try each command until one works
331
+ for cmd in commands_to_try:
332
+ try:
333
+ result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
334
+ if result.returncode == 0 and result.stdout.strip():
335
+ shell_path = result.stdout.strip()
336
+ if shell_path and shell_path != "$PATH":
337
+ all_paths.extend(shell_path.split(":"))
338
+ break
339
+ except (subprocess.SubprocessError, subprocess.TimeoutExpired):
340
+ continue
341
+
342
+ # Add current PATH
343
+ current_path = os.environ.get("PATH", "")
344
+ if current_path:
345
+ all_paths.extend(current_path.split(":"))
346
+
347
+ # Remove duplicates while preserving order
348
+ return deduplicate_paths(all_paths)
349
+
350
+
351
+ def deduplicate_paths(all_paths):
352
+ """
353
+ Remove duplicates while preserving order
354
+ """
355
+ seen = set()
356
+ merged_path = []
357
+ for path in all_paths:
358
+ path = path.strip()
359
+ if path and path not in seen and os.path.exists(path):
360
+ seen.add(path)
361
+ merged_path.append(path)
362
+
363
+ return ":".join(merged_path)
@@ -5,7 +5,8 @@ import subprocess
5
5
  import json
6
6
  from datetime import datetime
7
7
  from typing import List, Dict, Any, Optional, Union, Tuple
8
-
8
+ import platform
9
+ import stat
9
10
  from mcp.types import TextContent
10
11
  from .state import state
11
12
 
@@ -65,7 +66,14 @@ def search_code_tool():
65
66
  "Examples: '.' for current directory, 'src' to search only within src directory. "
66
67
  "Default is the root of the allowed directory.",
67
68
  "default": "."
68
- }
69
+ },
70
+ "include_hidden": {
71
+ "type": "boolean",
72
+ "description": "Whether to include hidden files. When true, also include the hidden files like"
73
+ ".env, .config on Unix/Posix/Linux, or files with hidden attribute on Windows"
74
+ "Default is false, which exclude the hidden files from search",
75
+ "default": False,
76
+ },
69
77
  },
70
78
  "required": ["patterns"]
71
79
  }
@@ -80,6 +88,7 @@ async def handle_search_code(arguments: dict) -> List[TextContent]:
80
88
  max_results = arguments.get("max_results", 100)
81
89
  case_sensitive = arguments.get("case_sensitive", False)
82
90
  path = arguments.get("path", ".")
91
+ include_hidden = arguments.get("include_hidden", False)
83
92
 
84
93
  if not patterns:
85
94
  raise ValueError("At least one pattern must be provided")
@@ -109,12 +118,12 @@ async def handle_search_code(arguments: dict) -> List[TextContent]:
109
118
  # Use ripgrep if available for faster results
110
119
  try:
111
120
  result = await _search_with_ripgrep(
112
- pattern, include, exclude, max_results, case_sensitive, full_path
121
+ pattern, include, exclude, max_results, case_sensitive, full_path, include_hidden
113
122
  )
114
123
  except (subprocess.SubprocessError, FileNotFoundError):
115
124
  # Fallback to Python implementation if ripgrep not available
116
125
  result = await _search_with_python(
117
- pattern, include, exclude, max_results, case_sensitive, full_path
126
+ pattern, include, exclude, max_results, case_sensitive, full_path, include_hidden
118
127
  )
119
128
 
120
129
  # Add pattern header for multiple patterns
@@ -139,7 +148,8 @@ async def _search_with_ripgrep(
139
148
  exclude: str,
140
149
  max_results: int,
141
150
  case_sensitive: bool,
142
- full_path: str
151
+ full_path: str,
152
+ include_hidden: bool
143
153
  ) -> List[TextContent]:
144
154
  """Search using ripgrep for better performance."""
145
155
  cmd = ["rg", "--line-number"]
@@ -161,6 +171,10 @@ async def _search_with_ripgrep(
161
171
  # Add max results
162
172
  cmd.extend(["--max-count", str(max_results)])
163
173
 
174
+ # Add hidden files
175
+ if include_hidden:
176
+ cmd.append("--hidden")
177
+
164
178
  # Add pattern and path
165
179
  cmd.extend([pattern, full_path])
166
180
 
@@ -247,7 +261,8 @@ async def _search_with_python(
247
261
  exclude: str,
248
262
  max_results: int,
249
263
  case_sensitive: bool,
250
- full_path: str
264
+ full_path: str,
265
+ include_hidden: bool
251
266
  ) -> List[TextContent]:
252
267
  """Fallback search implementation using Python's regex and file operations."""
253
268
  # Compile the regex pattern
@@ -273,10 +288,17 @@ async def _search_with_python(
273
288
  match_count = 0
274
289
 
275
290
  # Walk the directory tree
276
- for root, _, files in os.walk(full_path):
291
+ for root, dirs, files in os.walk(full_path):
277
292
  if match_count >= max_results:
278
293
  break
279
294
 
295
+ if not include_hidden:
296
+ # Remove hidden directories from dirs list to prevent os.walk from entering them
297
+ dirs[:] = [d for d in dirs if not is_hidden(os.path.join(root, d))]
298
+
299
+ # Filter out hidden files
300
+ files = [f for f in files if not is_hidden(os.path.join(root, f))]
301
+
280
302
  for filename in files:
281
303
  if match_count >= max_results:
282
304
  break
@@ -371,3 +393,27 @@ async def _search_with_python(
371
393
  type="text",
372
394
  text="\n".join(formatted_output)
373
395
  )]
396
+
397
+
398
+ def is_hidden_windows(filepath: str) -> bool:
399
+ """Check if file/folder is hidden on Windows"""
400
+ try:
401
+ attrs = os.stat(filepath).st_file_attributes
402
+ return attrs & stat.FILE_ATTRIBUTE_HIDDEN
403
+ except (AttributeError, OSError):
404
+ return False
405
+
406
+
407
+ def is_hidden_unix(name: str) -> bool:
408
+ """Check if file/folder is hidden on Unix-like systems (Linux/macOS)"""
409
+ return name.startswith('.')
410
+
411
+
412
+ def is_hidden(filepath: str) -> bool:
413
+ """Cross-platform hidden file/folder detection"""
414
+ name = os.path.basename(filepath)
415
+
416
+ if platform.system() == 'Windows':
417
+ return is_hidden_windows(filepath) or name.startswith('.')
418
+ else:
419
+ return is_hidden_unix(name)
@@ -2,9 +2,9 @@ import json
2
2
  import os
3
3
  import subprocess
4
4
  from datetime import datetime
5
-
5
+ import asyncio
6
6
  from mcp.types import TextContent
7
-
7
+ from pathlib import Path
8
8
  from .state import state
9
9
 
10
10
 
@@ -12,12 +12,12 @@ def list_directory_tool():
12
12
  return {
13
13
  "name": "list_directory",
14
14
  "description": "Get a detailed listing of files and directories in the specified path, including type, size, and modification "
15
- "date. WHEN TO USE: When you need to explore the contents of a directory, understand what files are available, check file sizes or "
16
- "modification dates, or locate specific files by name. WHEN NOT TO USE: When you need to read the contents of files (use read_file "
17
- "instead), when you need a recursive listing of all subdirectories (use directory_tree instead), or when searching for files by name pattern "
18
- "(use search_files instead). RETURNS: Text with each line containing file type ([DIR]/[FILE]), name, size (in B/KB/MB), and "
19
- "modification date. Only works within the allowed directory. Example: Enter 'src' to list contents of the src directory, or '.' for "
20
- "current directory.",
15
+ "date. WHEN TO USE: When you need to explore the contents of a directory, understand what files are available, check file sizes or "
16
+ "modification dates, or locate specific files by name. WHEN NOT TO USE: When you need to read the contents of files (use read_file "
17
+ "instead), when you need a recursive listing of all subdirectories (use directory_tree instead), or when searching for files by name pattern "
18
+ "(use search_files instead). RETURNS: Text with each line containing file type ([DIR]/[FILE]), name, size (in B/KB/MB), and "
19
+ "modification date. Only works within the allowed directory. Example: Enter 'src' to list contents of the src directory, or '.' for "
20
+ "current directory.",
21
21
  "inputSchema": {
22
22
  "type": "object",
23
23
  "properties": {
@@ -26,10 +26,11 @@ def list_directory_tool():
26
26
  "description": "Path of the directory to list. Examples: '.' for current directory, 'src' for src directory, 'docs/images' for a nested directory. The path must be within the allowed workspace.",
27
27
  }
28
28
  },
29
- "required": ["path"]
29
+ "required": ["path"],
30
30
  },
31
31
  }
32
32
 
33
+
33
34
  async def handle_list_directory(arguments: dict):
34
35
  from mcp.types import TextContent
35
36
 
@@ -77,29 +78,31 @@ async def handle_list_directory(arguments: dict):
77
78
  except PermissionError:
78
79
  raise ValueError(f"Permission denied accessing: {full_path}")
79
80
 
81
+
80
82
  def create_directory_tool():
81
83
  return {
82
84
  "name": "create_directory",
83
85
  "description": "Create a new directory or ensure a directory exists. "
84
- "Can create multiple nested directories in one operation. "
85
- "WHEN TO USE: When you need to set up project structure, organize files, create output directories before saving files, or establish a directory hierarchy. "
86
- "WHEN NOT TO USE: When you only want to check if a directory exists (use get_file_info instead), or when trying to create directories outside the allowed workspace. "
87
- "RETURNS: Text message confirming either that the directory was successfully created or that it already exists. "
88
- "The operation succeeds silently if the directory already exists. "
89
- "Only works within the allowed directory. "
90
- "Example: Enter 'src/components' to create nested directories.",
86
+ "Can create multiple nested directories in one operation. "
87
+ "WHEN TO USE: When you need to set up project structure, organize files, create output directories before saving files, or establish a directory hierarchy. "
88
+ "WHEN NOT TO USE: When you only want to check if a directory exists (use get_file_info instead), or when trying to create directories outside the allowed workspace. "
89
+ "RETURNS: Text message confirming either that the directory was successfully created or that it already exists. "
90
+ "The operation succeeds silently if the directory already exists. "
91
+ "Only works within the allowed directory. "
92
+ "Example: Enter 'src/components' to create nested directories.",
91
93
  "inputSchema": {
92
94
  "type": "object",
93
95
  "properties": {
94
96
  "path": {
95
97
  "type": "string",
96
- "description": "Path of the directory to create. Can include nested directories which will all be created. Examples: 'logs' for a simple directory, 'src/components/buttons' for nested directories. Both absolute and relative paths are supported, but must be within the allowed workspace."
98
+ "description": "Path of the directory to create. Can include nested directories which will all be created. Examples: 'logs' for a simple directory, 'src/components/buttons' for nested directories. Both absolute and relative paths are supported, but must be within the allowed workspace.",
97
99
  }
98
100
  },
99
- "required": ["path"]
101
+ "required": ["path"],
100
102
  },
101
103
  }
102
104
 
105
+
103
106
  async def handle_create_directory(arguments: dict):
104
107
  """Handle creating a new directory."""
105
108
  from mcp.types import TextContent
@@ -117,9 +120,7 @@ async def handle_create_directory(arguments: dict):
117
120
 
118
121
  # Security check: ensure path is within allowed directory
119
122
  if not full_path.startswith(state.allowed_directory):
120
- raise ValueError(
121
- f"Access denied: Path ({full_path}) must be within allowed directory ({state.allowed_directory})"
122
- )
123
+ raise ValueError(f"Access denied: Path ({full_path}) must be within allowed directory ({state.allowed_directory})")
123
124
 
124
125
  already_exists = os.path.exists(full_path)
125
126
 
@@ -129,72 +130,48 @@ async def handle_create_directory(arguments: dict):
129
130
 
130
131
  if already_exists:
131
132
  return [TextContent(type="text", text=f"Directory already exists: {path}")]
132
- return [TextContent(
133
- type="text",
134
- text=f"Successfully created directory: {path}"
135
- )]
133
+ return [TextContent(type="text", text=f"Successfully created directory: {path}")]
136
134
  except PermissionError:
137
135
  raise ValueError(f"Permission denied creating directory: {path}")
138
136
  except Exception as e:
139
137
  raise ValueError(f"Error creating directory: {str(e)}")
140
138
 
139
+
141
140
  def directory_tree_tool():
142
141
  return {
143
142
  "name": "directory_tree",
144
143
  "description": "Get a recursive tree view of files and directories in the specified path as a JSON structure. "
145
- "WHEN TO USE: When you need to understand the complete structure of a directory tree, visualize the hierarchy of files and directories, or get a comprehensive overview of a project's organization. "
146
- "Particularly useful for large projects where you need to see nested relationships. "
147
- "WHEN NOT TO USE: When you only need a flat list of files in a single directory (use directory_listing instead), or when you're only interested in specific file types (use search_files instead). "
148
- "RETURNS: JSON structure where each entry includes 'name', 'type' (file/directory), and 'children' for directories. "
149
- "Files have no children array, while directories always have a children array (which may be empty). "
150
- "The output is formatted with 2-space indentation for readability. For Git repositories, shows tracked files only. "
151
- "Only works within the allowed directory. "
152
- "Example: Enter '.' for current directory, or 'src' for a specific directory.",
144
+ "WHEN TO USE: When you need to understand the complete structure of a directory tree, visualize the hierarchy of files and directories, or get a comprehensive overview of a project's organization. "
145
+ "Particularly useful for large projects where you need to see nested relationships. "
146
+ "WHEN NOT TO USE: When you only need a flat list of files in a single directory (use directory_listing instead), or when you're only interested in specific file types (use search_files instead). "
147
+ "RETURNS: JSON structure where each entry includes 'name', 'type' (file/directory), and 'children' for directories. "
148
+ "Files have no children array, while directories always have a children array (which may be empty). "
149
+ "The output is formatted with 2-space indentation for readability. For Git repositories, shows tracked files only. "
150
+ "Only works within the allowed directory and only for non-hidden files, or files that are not inside hidden directory. "
151
+ "If you want to show the hidden files also, use commands like execute_shell_script. "
152
+ "Example: Enter '.' for current directory, or 'src' for a specific directory.",
153
153
  "inputSchema": {
154
154
  "type": "object",
155
155
  "properties": {
156
156
  "path": {
157
157
  "type": "string",
158
- "description": "Root directory to analyze. This is the starting point for the recursive tree generation. Examples: '.' for current directory, 'src' for the src directory. Both absolute and relative paths are supported, but must be within the allowed workspace."
159
- }
158
+ "description": "Root directory to analyze. This is the starting point for the recursive tree generation. Examples: '.' for current directory, 'src' for the src directory. Both absolute and relative paths are supported, but must be within the allowed workspace.",
159
+ },
160
+ "max_depth": {
161
+ "type": "integer",
162
+ "description": "Max depth for traversing in case of big and deeply nested directory",
163
+ "default": 3,
164
+ },
160
165
  },
161
- "required": ["path"]
166
+ "required": ["path"],
162
167
  },
163
168
  }
164
169
 
165
- async def build_directory_tree(dir_path: str) -> dict:
166
- """Build directory tree as a JSON structure."""
167
- try:
168
- entries = list(os.scandir(dir_path))
169
- # Sort entries by name
170
- entries.sort(key=lambda e: e.name.lower())
171
-
172
- result = {
173
- "name": os.path.basename(dir_path) or dir_path,
174
- "type": "directory",
175
- "children": []
176
- }
177
-
178
- for entry in entries:
179
- if entry.is_dir():
180
- # Recursively process subdirectories
181
- child_tree = await build_directory_tree(entry.path)
182
- result["children"].append(child_tree)
183
- else:
184
- result["children"].append({
185
- "name": entry.name,
186
- "type": "file"
187
- })
188
-
189
- return result
190
- except PermissionError:
191
- raise ValueError(f"Access denied: {dir_path}")
192
- except Exception as e:
193
- raise ValueError(f"Error processing directory {dir_path}: {str(e)}")
194
170
 
195
171
  async def handle_directory_tree(arguments: dict):
196
172
  """Handle building a directory tree."""
197
173
  path = arguments.get("path", ".")
174
+ max_depth = arguments.get("max_depth", 3)
198
175
 
199
176
  # Validate and get full path
200
177
  full_path = os.path.abspath(os.path.join(state.allowed_directory, path))
@@ -205,85 +182,253 @@ async def handle_directory_tree(arguments: dict):
205
182
  if not os.path.isdir(full_path):
206
183
  raise ValueError(f"Path is not a directory: {full_path}")
207
184
 
208
- # Try git ls-files first
185
+ """
186
+ Idea: for git repo directory, use git ls-files to list all the files
187
+ So that we can avoid some gigantic directories like node_modules, build, dist
188
+ Else just use normal listing
189
+ 1. Try git ls-files for this directory
190
+ 2. If failed, identify git repo by rg and sed -> find -> python,
191
+ git ls-files then add to the visited
192
+ 3. List the remaining that is not in visited using rg -> find -> python
193
+ """
194
+ root = {"name": full_path, "type": "directory", "children": []}
195
+ dir_cache = {"": root}
209
196
  try:
210
- # Get list of all files tracked by git
211
- result = subprocess.run(
212
- ['git', 'ls-files'],
213
- cwd=full_path,
214
- capture_output=True,
215
- text=True,
216
- check=True,
217
- )
218
-
219
- # If git command was successful
220
- files = [f for f in result.stdout.split('\n') if f.strip()]
221
- files.sort()
222
-
223
- # Build tree from git files
224
- directory_map = {}
225
- root_name = os.path.basename(full_path) or full_path
226
-
227
- # First pass: collect all directories and files
228
- for file in files:
229
- parts = file.split(os.sep)
230
- # Add all intermediate directories
231
- for i in range(len(parts)):
232
- parent = os.sep.join(parts[:i])
233
- os.sep.join(parts[:i+1])
234
- if i < len(parts) - 1: # It's a directory
235
- directory_map.setdefault(parent, {"dirs": set(), "files": set()})["dirs"].add(parts[i])
236
- else: # It's a file
237
- directory_map.setdefault(parent, {"dirs": set(), "files": set()})["files"].add(parts[i])
238
-
239
- async def build_git_tree(current_path: str) -> dict:
240
- dir_name = current_path.split(os.sep)[-1] if current_path else ''
241
- result = {
242
- "name": dir_name or root_name,
243
- "type": "directory",
244
- "children": [],
245
- }
246
-
247
- if current_path not in directory_map:
248
- return result
249
-
250
- entry = directory_map[current_path]
251
-
252
- # Add directories first
253
- for dir_name in sorted(entry["dirs"]):
254
- child_path = os.path.join(current_path, dir_name) if current_path else dir_name
255
- child_tree = await build_git_tree(child_path)
256
- result["children"].append(child_tree)
257
-
258
- # Then add files
259
- for file_name in sorted(entry["files"]):
260
- result["children"].append({
261
- "name": file_name,
262
- "type": "file",
263
- })
264
-
265
- return result
266
-
267
- # Build the tree structure starting from root
268
- tree = await build_git_tree('')
269
- return [TextContent(type="text", text=json.dumps(tree, indent=2))]
270
-
271
- except (subprocess.CalledProcessError, FileNotFoundError):
272
- # Git not available or not a git repository, use fallback implementation
197
+ paths = await git_ls(Path(full_path))
198
+ build_tree_from_paths(root, dir_cache, paths, max_depth)
199
+ json_tree = json.dumps(root, indent=2)
200
+ return [TextContent(type="text", text=json_tree)]
201
+ except Exception:
273
202
  pass
274
- except Exception as e:
275
- # Log the error but continue with fallback
276
- print(f"Error using git ls-files: {e}")
203
+
204
+ # build the tree for git repo
205
+ try:
206
+ git_repos = await find_git_repo_async(full_path)
207
+ except Exception:
208
+ git_repos = find_git_repos_python(Path(full_path))
209
+ for git_repo in git_repos:
210
+ absolute_git_repo = Path(full_path) / git_repo
211
+ paths = []
212
+ try:
213
+ paths = await git_ls(absolute_git_repo)
214
+ except Exception:
215
+ try:
216
+ paths = await scan_path_async([], absolute_git_repo)
217
+ except Exception:
218
+ paths = scan_path([], absolute_git_repo)
219
+ finally:
220
+ paths = [git_repo / path for path in paths]
221
+ build_tree_from_paths(root, dir_cache, paths, max_depth)
222
+
223
+ # for non-git directory, do normal scan
224
+ non_git_scans = []
225
+ try:
226
+ non_git_scans = await scan_path_async(git_repos, Path(full_path))
227
+ except Exception:
228
+ non_git_scans = scan_path(git_repos, Path(full_path))
229
+ finally:
230
+ build_tree_from_paths(root, dir_cache, non_git_scans, max_depth)
231
+ json_tree = json.dumps(root, indent=2)
232
+ return [TextContent(type="text", text=json_tree)]
233
+
234
+
235
+ async def find_git_repo_async(cwd: str) -> list[Path]:
236
+ # ripgrep first then find
237
+ try:
238
+ cmd = r"rg --files --glob '**/.git/HEAD' --hidden | sed 's|/\.git/HEAD$|/.git|'"
239
+ proc = await asyncio.create_subprocess_shell(
240
+ cmd,
241
+ cwd=cwd,
242
+ stdout=subprocess.PIPE,
243
+ stderr=subprocess.PIPE,
244
+ )
245
+ stdout, stderr = await proc.communicate()
246
+
247
+ if proc.returncode not in [0, 1]: # 0 = success, 1 = some files not found (normal)
248
+ stderr_text = stderr.decode().strip()
249
+ if stderr_text: # If there's stderr content, it's likely a real error
250
+ raise Exception(f"Find command error: {stderr_text}")
251
+
252
+ git_dirs = stdout.decode().strip().splitlines()
253
+ repo_paths: list[Path] = []
254
+ for git_dir in git_dirs:
255
+ if git_dir: # Skip empty lines
256
+ # Convert to Path object and get parent (removes .git)
257
+ repo_relative_path = Path(git_dir).parent
258
+ repo_paths.append(repo_relative_path)
259
+ return repo_paths
260
+
261
+ except Exception:
277
262
  pass
278
263
 
279
- # Fallback to regular directory traversal
264
+ cmd = r"find . -name .git -type d ! -path '*/\.*/*'"
265
+ proc = await asyncio.create_subprocess_shell(
266
+ cmd,
267
+ cwd=cwd,
268
+ stdout=subprocess.PIPE,
269
+ stderr=subprocess.PIPE,
270
+ )
271
+ stdout, stderr = await proc.communicate()
272
+
273
+ if proc.returncode not in [0, 1]: # 0 = success, 1 = some files not found (normal)
274
+ stderr_text = stderr.decode().strip()
275
+ if stderr_text: # If there's stderr content, it's likely a real error
276
+ raise Exception(f"Find command error: {stderr_text}")
277
+
278
+ git_dirs = stdout.decode().strip().splitlines()
279
+ repo_paths: list[Path] = []
280
+
281
+ for git_dir in git_dirs:
282
+ if git_dir: # Skip empty lines
283
+ # Convert to Path object and get parent (removes .git)
284
+ repo_relative_path = Path(git_dir).parent
285
+ repo_paths.append(repo_relative_path)
286
+ return repo_paths
287
+
288
+
289
+ def find_git_repos_python(start_path: Path) -> list[Path]:
290
+ r"""
291
+ Python fallback for: find . -name .git -type d ! -path '*/\.*/*'
292
+
293
+ Finds all .git directories, excluding those inside hidden directories.
294
+
295
+ Args:
296
+ start_path: Starting directory (defaults to current directory)
297
+
298
+ Returns:
299
+ List of Path objects pointing to .git directories
300
+ """
301
+ git_dirs = []
302
+ start_str = str(start_path)
303
+
304
+ for root, dirs, _ in os.walk(start_str, followlinks=False):
305
+ # Remove hidden directories from traversal
306
+ dirs[:] = [d for d in dirs if not d.startswith(".")]
307
+
308
+ # Check if current directory contains .git
309
+ if ".git" in dirs:
310
+ # Calculate relative path
311
+ rel_root = os.path.relpath(root, start_str)
312
+ if rel_root == ".":
313
+ git_path = ".git"
314
+ else:
315
+ git_path = rel_root + "/.git"
316
+
317
+ git_dirs.append(Path(git_path))
318
+
319
+ # Remove .git from further traversal (we don't need to go inside it)
320
+ dirs.remove(".git")
321
+
322
+ return git_dirs
323
+
324
+
325
+ async def git_ls(git_cwd: Path) -> list[Path]:
326
+ cmd = r"git ls-files"
327
+ proc = await asyncio.create_subprocess_shell(cmd, cwd=git_cwd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
328
+ stdout, stderr = await proc.communicate()
329
+ if proc.returncode != 0:
330
+ stderr_text = stderr.decode().strip()
331
+ raise Exception(f"Command error with status {proc.returncode}: {stderr_text}")
332
+
333
+ paths = stdout.decode().strip().splitlines()
334
+ paths = [Path(path) for path in paths if path]
335
+ return paths
336
+
337
+
338
+ def build_tree_from_paths(root: dict, dir_cache: dict, paths: list[Path], max_depth: int):
339
+ paths = [path for path in paths if len(path.parts) <= max_depth]
340
+
341
+ for path in paths:
342
+ parts = path.parts
343
+ current_path = ""
344
+ current = root
345
+ n = len(parts)
346
+
347
+ for i, part in enumerate(parts):
348
+ if i == n - 1:
349
+ current["children"].append({"name": part, "type": "file"})
350
+ else:
351
+ current_path = str(Path(current_path) / part) if current_path else part
352
+
353
+ if current_path not in dir_cache:
354
+ new_dir = {"name": part, "type": "directory", "children": []}
355
+ current["children"].append(new_dir)
356
+ dir_cache[current_path] = new_dir
357
+
358
+ current = dir_cache[current_path]
359
+
360
+
361
+ def scan_path(ignore_paths: list[Path], cwd: Path) -> list[Path]:
362
+ # ignore_paths relative to cwd
363
+ ignore_absolute = {(cwd / ignore_path).resolve() for ignore_path in ignore_paths}
364
+ files: list[Path] = []
365
+ for root, dirs, filenames in os.walk(cwd):
366
+ root_path = Path(root)
367
+
368
+ # Remove hidden directories from dirs list (modifies os.walk behavior)
369
+ dirs[:] = [d for d in dirs if not d.startswith(".")]
370
+
371
+ # Remove ignored directories from dirs list
372
+ dirs[:] = [d for d in dirs if (root_path / d).resolve() not in ignore_absolute]
373
+
374
+ # Add non-hidden files
375
+ for filename in filenames:
376
+ if not filename.startswith("."):
377
+ file_path = root_path / filename
378
+ # Return path relative to cwd
379
+ files.append(file_path.relative_to(cwd))
380
+
381
+ return files
382
+
383
+
384
+ async def scan_path_async(ignore_paths: list[Path], cwd: Path) -> list[Path]:
385
+ # try ripgrep first, then find
280
386
  try:
281
- # Build the directory tree structure
282
- tree = await build_directory_tree(full_path)
387
+ rgignore = " ".join(f"--glob '!{path}/**'" for path in ignore_paths)
388
+ rgcmd = rf"rg --files {rgignore} ."
389
+
390
+ proc = await asyncio.create_subprocess_shell(
391
+ rgcmd,
392
+ cwd=cwd,
393
+ stdout=asyncio.subprocess.PIPE,
394
+ stderr=asyncio.subprocess.PIPE,
395
+ )
283
396
 
284
- # Convert to JSON with pretty printing
285
- json_tree = json.dumps(tree, indent=2)
397
+ stdout, stderr = await proc.communicate()
286
398
 
287
- return [TextContent(type="text", text=json_tree)]
288
- except Exception as e:
289
- raise ValueError(f"Error building directory tree: {str(e)}")
399
+ if proc.returncode in [0, 1]:
400
+ paths = []
401
+ for line in stdout.decode().strip().splitlines():
402
+ if line:
403
+ paths.append(Path(line))
404
+ return paths
405
+ except Exception:
406
+ pass
407
+
408
+ ignore_paths += [Path("backend")]
409
+
410
+ findignore = " ".join(f"-path './{path}' -prune -o" for path in ignore_paths)
411
+ findcmd = f"find . {findignore} -type f ! -path '*/.*/*' ! -name '.*' -print"
412
+
413
+ proc = await asyncio.create_subprocess_shell(
414
+ findcmd,
415
+ cwd=cwd,
416
+ stdout=subprocess.PIPE,
417
+ stderr=subprocess.PIPE,
418
+ )
419
+ stdout, stderr = await proc.communicate()
420
+
421
+ if proc.returncode not in [0, 1]: # 0 = success, 1 = some files not found (normal)
422
+ stderr_text = stderr.decode().strip()
423
+ if stderr_text: # If there's stderr content, it's likely a real error
424
+ raise Exception(f"Find command error: {stderr_text}")
425
+
426
+ paths = []
427
+ for line in stdout.decode().strip().splitlines():
428
+ if line:
429
+ if line.startswith("./"):
430
+ line = line[2:]
431
+ if line:
432
+ paths.append(Path(line))
433
+
434
+ return paths
@@ -112,6 +112,9 @@ def get_system_details() -> Dict[str, Any]:
112
112
  """Gather detailed system information."""
113
113
 
114
114
  is_mac = platform.system() == "Darwin"
115
+ disk_total = psutil.disk_usage('/').total
116
+ disk_free = psutil.disk_usage('/').free
117
+ disk_used_percentage = (disk_total - disk_free) / disk_total * 100
115
118
 
116
119
  # System and OS Information
117
120
  system_info = {
@@ -142,7 +145,7 @@ def get_system_details() -> Dict[str, Any]:
142
145
  "disk": {
143
146
  "total": get_size(psutil.disk_usage('/').total),
144
147
  "free": get_size(psutil.disk_usage('/').free),
145
- "used_percentage": f"{psutil.disk_usage('/').percent}%"
148
+ "used_percentage": f"{disk_used_percentage}%"
146
149
  }
147
150
  }
148
151