ripperdoc 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +66 -8
  3. ripperdoc/cli/commands/__init__.py +4 -0
  4. ripperdoc/cli/commands/agents_cmd.py +22 -0
  5. ripperdoc/cli/commands/context_cmd.py +11 -1
  6. ripperdoc/cli/commands/doctor_cmd.py +200 -0
  7. ripperdoc/cli/commands/memory_cmd.py +209 -0
  8. ripperdoc/cli/commands/models_cmd.py +25 -0
  9. ripperdoc/cli/commands/tasks_cmd.py +27 -0
  10. ripperdoc/cli/ui/rich_ui.py +156 -9
  11. ripperdoc/core/agents.py +4 -2
  12. ripperdoc/core/config.py +48 -3
  13. ripperdoc/core/default_tools.py +16 -2
  14. ripperdoc/core/permissions.py +19 -0
  15. ripperdoc/core/query.py +231 -297
  16. ripperdoc/core/query_utils.py +537 -0
  17. ripperdoc/core/system_prompt.py +2 -1
  18. ripperdoc/core/tool.py +13 -0
  19. ripperdoc/tools/background_shell.py +9 -3
  20. ripperdoc/tools/bash_tool.py +15 -0
  21. ripperdoc/tools/file_edit_tool.py +7 -0
  22. ripperdoc/tools/file_read_tool.py +7 -0
  23. ripperdoc/tools/file_write_tool.py +7 -0
  24. ripperdoc/tools/glob_tool.py +55 -15
  25. ripperdoc/tools/grep_tool.py +7 -0
  26. ripperdoc/tools/ls_tool.py +242 -73
  27. ripperdoc/tools/mcp_tools.py +32 -10
  28. ripperdoc/tools/multi_edit_tool.py +11 -0
  29. ripperdoc/tools/notebook_edit_tool.py +6 -3
  30. ripperdoc/tools/task_tool.py +7 -0
  31. ripperdoc/tools/todo_tool.py +159 -25
  32. ripperdoc/tools/tool_search_tool.py +9 -0
  33. ripperdoc/utils/git_utils.py +276 -0
  34. ripperdoc/utils/json_utils.py +28 -0
  35. ripperdoc/utils/log.py +130 -29
  36. ripperdoc/utils/mcp.py +71 -6
  37. ripperdoc/utils/memory.py +14 -1
  38. ripperdoc/utils/message_compaction.py +26 -5
  39. ripperdoc/utils/messages.py +63 -4
  40. ripperdoc/utils/output_utils.py +36 -9
  41. ripperdoc/utils/permissions/path_validation_utils.py +6 -0
  42. ripperdoc/utils/safe_get_cwd.py +4 -0
  43. ripperdoc/utils/session_history.py +27 -9
  44. ripperdoc/utils/todo.py +2 -2
  45. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.2.dist-info}/METADATA +4 -2
  46. ripperdoc-0.2.2.dist-info/RECORD +86 -0
  47. ripperdoc-0.2.0.dist-info/RECORD +0 -81
  48. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.2.dist-info}/WHEEL +0 -0
  49. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.2.dist-info}/entry_points.txt +0 -0
  50. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.2.dist-info}/licenses/LICENSE +0 -0
  51. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.2.dist-info}/top_level.txt +0 -0
@@ -15,6 +15,9 @@ from ripperdoc.core.tool import (
15
15
  ToolUseExample,
16
16
  ValidationResult,
17
17
  )
18
+ from ripperdoc.utils.log import get_logger
19
+
20
+ logger = get_logger()
18
21
 
19
22
 
20
23
  class FileReadToolInput(BaseModel):
@@ -153,6 +156,10 @@ and limit to read only a portion of the file."""
153
156
  )
154
157
 
155
158
  except Exception as e:
159
+ logger.exception(
160
+ "[file_read_tool] Error reading file",
161
+ extra={"file_path": input_data.file_path, "error": str(e)},
162
+ )
156
163
  # Create an error output
157
164
  error_output = FileReadToolOutput(
158
165
  content=f"Error reading file: {str(e)}",
@@ -16,6 +16,9 @@ from ripperdoc.core.tool import (
16
16
  ToolUseExample,
17
17
  ValidationResult,
18
18
  )
19
+ from ripperdoc.utils.log import get_logger
20
+
21
+ logger = get_logger()
19
22
 
20
23
 
21
24
  class FileWriteToolInput(BaseModel):
@@ -134,6 +137,10 @@ NEVER write new files unless explicitly required by the user."""
134
137
  )
135
138
 
136
139
  except Exception as e:
140
+ logger.exception(
141
+ "[file_write_tool] Error writing file",
142
+ extra={"file_path": input_data.file_path, "error": str(e)},
143
+ )
137
144
  error_output = FileWriteToolOutput(
138
145
  file_path=input_data.file_path,
139
146
  bytes_written=0,
@@ -15,17 +15,22 @@ from ripperdoc.core.tool import (
15
15
  ToolUseExample,
16
16
  ValidationResult,
17
17
  )
18
+ from ripperdoc.utils.log import get_logger
19
+
20
+ logger = get_logger()
18
21
 
19
22
 
20
23
  GLOB_USAGE = (
21
- "- Fast file pattern matching tool for any codebase size\n"
24
+ "- Fast file pattern matching tool that works with any codebase size\n"
22
25
  '- Supports glob patterns like "**/*.js" or "src/**/*.ts"\n'
23
- "- Returns matching file paths sorted by modification time (newest first)\n"
24
- "- Use this when you need to find files by name patterns\n"
25
- "- For open-ended searches that need multiple rounds of globbing and grepping, run the searches iteratively with these tools\n"
26
- "- You can call multiple tools in a single response; speculatively batch useful searches together"
26
+ "- Returns matching file paths sorted by modification time\n"
27
+ "- Use this tool when you need to find files by name patterns\n"
28
+ "- When you are doing an open ended search that may require multiple rounds of globbing and grepping, use the Agent tool instead\n"
29
+ "- You have the capability to call multiple tools in a single response. It is always better to speculatively perform multiple searches as a batch that are potentially useful.\n"
27
30
  )
28
31
 
32
+ RESULT_LIMIT = 100
33
+
29
34
 
30
35
  class GlobToolInput(BaseModel):
31
36
  """Input schema for GlobTool."""
@@ -42,6 +47,7 @@ class GlobToolOutput(BaseModel):
42
47
  matches: List[str]
43
48
  pattern: str
44
49
  count: int
50
+ truncated: bool = False
45
51
 
46
52
 
47
53
  class GlobTool(Tool[GlobToolInput, GlobToolOutput]):
@@ -92,14 +98,34 @@ class GlobTool(Tool[GlobToolInput, GlobToolOutput]):
92
98
  if not output.matches:
93
99
  return f"No files found matching pattern: {output.pattern}"
94
100
 
95
- result = f"Found {output.count} file(s) matching '{output.pattern}':\n\n"
96
- result += "\n".join(output.matches)
97
-
98
- return result
101
+ lines = list(output.matches)
102
+ if output.truncated:
103
+ lines.append("(Results are truncated. Consider using a more specific path or pattern.)")
104
+ return "\n".join(lines)
99
105
 
100
106
  def render_tool_use_message(self, input_data: GlobToolInput, verbose: bool = False) -> str:
101
107
  """Format the tool use for display."""
102
- return f"Glob: {input_data.pattern}"
108
+ if not input_data.pattern:
109
+ return "Glob"
110
+
111
+ base_path = Path.cwd()
112
+ rendered_path = ""
113
+ if input_data.path:
114
+ candidate_path = Path(input_data.path)
115
+ absolute_path = candidate_path if candidate_path.is_absolute() else (base_path / candidate_path).resolve()
116
+
117
+ try:
118
+ relative_path = absolute_path.relative_to(base_path)
119
+ except ValueError:
120
+ relative_path = None
121
+
122
+ if verbose or not relative_path or str(relative_path) == ".":
123
+ rendered_path = str(absolute_path)
124
+ else:
125
+ rendered_path = str(relative_path)
126
+
127
+ path_fragment = f', path: "{rendered_path}"' if rendered_path else ""
128
+ return f'pattern: "{input_data.pattern}"{path_fragment}'
103
129
 
104
130
  async def call(
105
131
  self, input_data: GlobToolInput, context: ToolUseContext
@@ -108,9 +134,8 @@ class GlobTool(Tool[GlobToolInput, GlobToolOutput]):
108
134
 
109
135
  try:
110
136
  search_path = Path(input_data.path) if input_data.path else Path.cwd()
111
-
112
- # Use glob to find matches, sorted by modification time (newest first)
113
- paths = list(search_path.glob(input_data.pattern))
137
+ if not search_path.is_absolute():
138
+ search_path = (Path.cwd() / search_path).resolve()
114
139
 
115
140
  def _mtime(path: Path) -> float:
116
141
  try:
@@ -118,15 +143,30 @@ class GlobTool(Tool[GlobToolInput, GlobToolOutput]):
118
143
  except OSError:
119
144
  return float("-inf")
120
145
 
121
- matches = [str(p) for p in sorted(paths, key=_mtime, reverse=True)]
146
+ # Find matching files, sorted by modification time
147
+ paths = sorted(
148
+ (p for p in search_path.glob(input_data.pattern) if p.is_file()),
149
+ key=_mtime,
150
+ )
151
+
152
+ truncated = len(paths) > RESULT_LIMIT
153
+ paths = paths[:RESULT_LIMIT]
122
154
 
123
- output = GlobToolOutput(matches=matches, pattern=input_data.pattern, count=len(matches))
155
+ matches = [str(p) for p in paths]
156
+
157
+ output = GlobToolOutput(
158
+ matches=matches, pattern=input_data.pattern, count=len(matches), truncated=truncated
159
+ )
124
160
 
125
161
  yield ToolResult(
126
162
  data=output, result_for_assistant=self.render_result_for_assistant(output)
127
163
  )
128
164
 
129
165
  except Exception as e:
166
+ logger.exception(
167
+ "[glob_tool] Error executing glob",
168
+ extra={"pattern": input_data.pattern, "path": input_data.path},
169
+ )
130
170
  error_output = GlobToolOutput(matches=[], pattern=input_data.pattern, count=0)
131
171
 
132
172
  yield ToolResult(
@@ -15,6 +15,9 @@ from ripperdoc.core.tool import (
15
15
  ToolUseExample,
16
16
  ValidationResult,
17
17
  )
18
+ from ripperdoc.utils.log import get_logger
19
+
20
+ logger = get_logger()
18
21
 
19
22
 
20
23
  GREP_USAGE = (
@@ -223,6 +226,10 @@ class GrepTool(Tool[GrepToolInput, GrepToolOutput]):
223
226
  )
224
227
 
225
228
  except Exception as e:
229
+ logger.exception(
230
+ "[grep_tool] Error executing grep",
231
+ extra={"pattern": input_data.pattern, "path": input_data.path},
232
+ )
226
233
  error_output = GrepToolOutput(
227
234
  matches=[], pattern=input_data.pattern, total_files=0, total_matches=0
228
235
  )
@@ -3,10 +3,11 @@
3
3
  Provides a safe way to inspect directory trees without executing shell commands.
4
4
  """
5
5
 
6
+ import os
6
7
  import fnmatch
7
8
  from collections import deque
8
9
  from pathlib import Path
9
- from typing import AsyncGenerator, List, Optional
10
+ from typing import AsyncGenerator, List, Optional, Dict, Any
10
11
  from pydantic import BaseModel, Field
11
12
 
12
13
  from ripperdoc.core.tool import (
@@ -17,10 +18,22 @@ from ripperdoc.core.tool import (
17
18
  ToolUseExample,
18
19
  ValidationResult,
19
20
  )
21
+ from ripperdoc.utils.safe_get_cwd import safe_get_cwd
22
+ from ripperdoc.utils.git_utils import (
23
+ build_ignore_patterns_map,
24
+ should_ignore_path,
25
+ is_git_repository,
26
+ get_git_root,
27
+ get_current_git_branch,
28
+ get_git_commit_hash,
29
+ is_working_directory_clean,
30
+ get_git_status_files,
31
+ )
20
32
 
21
33
 
22
34
  IGNORED_DIRECTORIES = {
23
35
  "node_modules",
36
+ "vendor/bundle",
24
37
  "vendor",
25
38
  "venv",
26
39
  "env",
@@ -34,18 +47,21 @@ IGNORED_DIRECTORIES = {
34
47
  "bin",
35
48
  "obj",
36
49
  ".build",
50
+ "target",
51
+ ".dart_tool",
52
+ ".pub-cache",
53
+ "build",
54
+ "target",
37
55
  "_build",
38
56
  "deps",
39
57
  "dist",
40
58
  "dist-newstyle",
41
59
  ".deno",
42
60
  "bower_components",
43
- "vendor/bundle",
44
- ".dart_tool",
45
- ".pub-cache",
46
61
  }
47
62
 
48
63
  MAX_CHARS_THRESHOLD = 40000
64
+ MAX_DEPTH = 4
49
65
  LARGE_REPO_WARNING = (
50
66
  f"There are more than {MAX_CHARS_THRESHOLD} characters in the repository "
51
67
  "(ie. either there are lots of files, or there are many long filenames). "
@@ -88,8 +104,23 @@ class LSToolOutput(BaseModel):
88
104
  entries: list[str]
89
105
  tree: str
90
106
  truncated: bool = False
107
+ aborted: bool = False
91
108
  ignored: list[str] = Field(default_factory=list)
92
109
  warning: Optional[str] = None
110
+ git_info: Optional[Dict[str, Any]] = Field(default_factory=dict)
111
+ file_count: int = 0
112
+
113
+
114
+ def _resolve_directory_path(raw_path: str) -> Path:
115
+ """Resolve a user-provided path against the current working directory."""
116
+ base_path = Path(safe_get_cwd())
117
+ candidate = Path(raw_path).expanduser()
118
+ if not candidate.is_absolute():
119
+ candidate = base_path / candidate
120
+ try:
121
+ return candidate.resolve()
122
+ except Exception:
123
+ return candidate
93
124
 
94
125
 
95
126
  def _matches_ignore(path: Path, root_path: Path, patterns: list[str]) -> bool:
@@ -107,100 +138,156 @@ def _matches_ignore(path: Path, root_path: Path, patterns: list[str]) -> bool:
107
138
  )
108
139
 
109
140
 
110
- def _should_skip(path: Path, root_path: Path, patterns: list[str]) -> bool:
141
+ def _should_skip(
142
+ path: Path,
143
+ root_path: Path,
144
+ patterns: list[str],
145
+ ignore_map: Optional[Dict[Optional[Path], List[str]]] = None
146
+ ) -> bool:
111
147
  name = path.name
112
148
  if name.startswith("."):
113
149
  return True
114
150
  if "__pycache__" in path.parts:
115
151
  return True
116
- if _matches_ignore(path, root_path, patterns):
152
+
153
+ # Check against ignore patterns
154
+ if ignore_map and should_ignore_path(path, root_path, ignore_map):
155
+ return True
156
+
157
+ # Also check against direct patterns for backward compatibility
158
+ if patterns and _matches_ignore(path, root_path, patterns):
117
159
  return True
160
+
118
161
  return False
119
162
 
120
163
 
121
- def _format_relative(path: Path, root_path: Path) -> str:
122
- rel_path = path.relative_to(root_path).as_posix()
164
+ def _relative_path_for_display(path: Path, base_path: Path) -> str:
165
+ """Convert a path to a display-friendly path relative to base_path."""
166
+ resolved_path = path
167
+ try:
168
+ resolved_path = path.resolve()
169
+ except Exception:
170
+ pass
171
+
172
+ try:
173
+ rel_path = resolved_path.relative_to(base_path.resolve()).as_posix()
174
+ except Exception:
175
+ try:
176
+ rel_path = os.path.relpath(resolved_path, base_path)
177
+ except Exception:
178
+ rel_path = resolved_path.as_posix()
179
+ rel_path = rel_path.replace(os.sep, "/")
180
+
181
+ rel_path = rel_path.rstrip("/")
123
182
  return f"{rel_path}/" if path.is_dir() else rel_path
124
183
 
125
184
 
126
- def _collect_paths(root_path: Path, ignore_patterns: list[str]) -> tuple[list[str], bool]:
185
+ def _collect_paths(
186
+ root_path: Path,
187
+ base_path: Path,
188
+ ignore_patterns: list[str],
189
+ include_gitignore: bool = True,
190
+ abort_signal: Optional[Any] = None,
191
+ max_depth: Optional[int] = MAX_DEPTH,
192
+ ) -> tuple[list[str], bool, List[str], bool]:
193
+ """Collect paths under root_path relative to base_path with early-exit controls."""
127
194
  entries: list[str] = []
128
195
  total_chars = 0
129
196
  truncated = False
130
- queue = deque([root_path])
197
+ aborted = False
198
+ ignored_entries: List[str] = []
199
+ ignore_map = build_ignore_patterns_map(
200
+ root_path,
201
+ user_ignore_patterns=ignore_patterns,
202
+ include_gitignore=include_gitignore,
203
+ )
204
+
205
+ queue = deque([(root_path, 0)]) # (path, depth)
131
206
 
132
207
  while queue and not truncated:
133
- current = queue.popleft()
208
+ if abort_signal is not None and getattr(abort_signal, "is_set", lambda: False)():
209
+ aborted = True
210
+ break
211
+
212
+ current, depth = queue.popleft()
213
+
214
+ if max_depth is not None and depth > max_depth:
215
+ continue
134
216
 
135
217
  try:
136
- children = sorted(current.iterdir(), key=lambda p: p.name.lower())
137
- except (FileNotFoundError, PermissionError):
218
+ with os.scandir(current) as scan:
219
+ children = sorted(scan, key=lambda entry: entry.name.lower())
220
+ except (FileNotFoundError, PermissionError, NotADirectoryError, OSError):
138
221
  continue
139
222
 
140
223
  for child in children:
224
+ child_path = Path(current) / child.name
141
225
  try:
142
- is_dir = child.is_dir()
226
+ is_dir = child.is_dir(follow_symlinks=False)
143
227
  except OSError:
144
228
  continue
145
229
 
146
- if _should_skip(child, root_path, ignore_patterns):
230
+ if _should_skip(child_path, root_path, ignore_patterns, ignore_map):
231
+ ignored_entries.append(_relative_path_for_display(child_path, base_path))
147
232
  continue
148
233
 
149
- display = _format_relative(child, root_path)
234
+ display = _relative_path_for_display(child_path, base_path)
150
235
  entries.append(display)
151
236
  total_chars += len(display)
152
237
 
153
- if total_chars > MAX_CHARS_THRESHOLD:
238
+ if total_chars >= MAX_CHARS_THRESHOLD:
154
239
  truncated = True
155
240
  break
156
241
 
157
242
  if is_dir:
158
- if _is_ignored_directory(child, root_path):
243
+ if _is_ignored_directory(child_path, root_path):
159
244
  continue
160
245
  if child.is_symlink():
161
246
  continue
162
- queue.append(child)
163
-
164
- return entries, truncated
165
-
166
-
167
- def _add_to_tree(tree: dict, parts: list[str], is_dir: bool) -> None:
168
- node = tree
169
- for idx, part in enumerate(parts):
170
- node = node.setdefault(part, {"children": {}, "is_dir": False})
171
- if idx == len(parts) - 1:
172
- node["is_dir"] = is_dir
173
- node = node["children"]
174
-
175
-
176
- def _render_tree(tree: dict, indent: str = " ", current_indent: str = " ") -> str:
177
- lines: list[str] = []
178
- for name in sorted(tree):
179
- node = tree[name]
180
- suffix = "/" if node.get("is_dir") else ""
181
- lines.append(f"{current_indent}- {name}{suffix}")
182
- children = node.get("children") or {}
183
- if children:
184
- lines.append(_render_tree(children, indent, current_indent + indent))
185
- return "\n".join(lines)
186
-
247
+ queue.append((child_path, depth + 1))
187
248
 
188
- def _build_tree(entries: list[str], root_path: Path) -> str:
189
- root_line = f"- {root_path.resolve().as_posix()}/"
249
+ return entries, truncated, ignored_entries, aborted
190
250
 
191
- if not entries:
192
- return f"{root_line}\n (empty directory)"
193
251
 
252
+ def build_file_tree(entries: list[str]) -> dict:
253
+ """Build a nested tree structure from flat entry paths."""
194
254
  tree: dict = {}
195
255
  for entry in entries:
196
- normalized = entry[:-1] if entry.endswith("/") else entry
256
+ normalized = entry.rstrip("/")
197
257
  if not normalized:
198
258
  continue
199
- parts = normalized.split("/")
200
- _add_to_tree(tree, parts, entry.endswith("/"))
201
-
202
- body = _render_tree(tree)
203
- return f"{root_line}\n{body}"
259
+ parts = [part for part in normalized.split("/") if part]
260
+ node = tree
261
+ for idx, part in enumerate(parts):
262
+ node = node.setdefault(part, {"children": {}, "is_dir": False})
263
+ if idx == len(parts) - 1:
264
+ node["is_dir"] = node.get("is_dir", False) or entry.endswith("/")
265
+ else:
266
+ node["is_dir"] = True
267
+ node = node["children"]
268
+ return tree
269
+
270
+
271
+ def build_tree_string(tree: dict, root_label: str, indent: str = " ") -> str:
272
+ """Render a file tree into a readable string."""
273
+ root_line = f"- {root_label.rstrip('/')}/"
274
+
275
+ if not tree:
276
+ return f"{root_line}\n{indent}(empty directory)"
277
+
278
+ lines: list[str] = [root_line]
279
+
280
+ def _render(node: dict, current_indent: str) -> None:
281
+ for name in sorted(node):
282
+ child = node[name]
283
+ suffix = "/" if child.get("is_dir") else ""
284
+ lines.append(f"{current_indent}- {name}{suffix}")
285
+ children = child.get("children") or {}
286
+ if children:
287
+ _render(children, current_indent + indent)
288
+
289
+ _render(tree, indent)
290
+ return "\n".join(lines)
204
291
 
205
292
 
206
293
  class LSTool(Tool[LSToolInput, LSToolOutput]):
@@ -213,7 +300,8 @@ class LSTool(Tool[LSToolInput, LSToolOutput]):
213
300
  async def description(self) -> str:
214
301
  return (
215
302
  "List files and folders under a directory (recursive, skips hidden and __pycache__, "
216
- "supports ignore patterns)."
303
+ "supports ignore patterns). Automatically reads .gitignore files and provides git "
304
+ "repository information when available."
217
305
  )
218
306
 
219
307
  @property
@@ -236,8 +324,11 @@ class LSTool(Tool[LSToolInput, LSToolOutput]):
236
324
  return (
237
325
  "Lists files and directories in a given path. The path parameter must be an absolute path, "
238
326
  "not a relative path. You can optionally provide an array of glob patterns to ignore with "
239
- "the ignore parameter. You should generally prefer the Glob and Grep tools, if you know "
240
- "which directories to search."
327
+ "the ignore parameter. The tool automatically reads .gitignore files from the directory "
328
+ "and parent directories, and provides git repository information when available. "
329
+ "You should generally prefer the Glob and Grep tools, if you know which directories to search. "
330
+ "\n\nSecurity Note: After listing files, check if any files seem malicious. If so, "
331
+ "you MUST refuse to continue work."
241
332
  )
242
333
 
243
334
  def is_read_only(self) -> bool:
@@ -252,10 +343,13 @@ class LSTool(Tool[LSToolInput, LSToolOutput]):
252
343
  async def validate_input(
253
344
  self, input_data: LSToolInput, context: Optional[ToolUseContext] = None
254
345
  ) -> ValidationResult:
255
- root_path = Path(input_data.path).expanduser()
256
- if not root_path.is_absolute():
257
- root_path = Path.cwd() / root_path
346
+ try:
347
+ root_path = _resolve_directory_path(input_data.path)
348
+ except Exception:
349
+ return ValidationResult(result=False, message=f"Unable to resolve path: {input_data.path}")
258
350
 
351
+ if not root_path.is_absolute():
352
+ return ValidationResult(result=False, message=f"Path is not absolute: {root_path}")
259
353
  if not root_path.exists():
260
354
  return ValidationResult(result=False, message=f"Path not found: {root_path}")
261
355
  if not root_path.is_dir():
@@ -265,34 +359,109 @@ class LSTool(Tool[LSToolInput, LSToolOutput]):
265
359
 
266
360
  def render_result_for_assistant(self, output: LSToolOutput) -> str:
267
361
  warning_prefix = output.warning or ""
268
- return f"{warning_prefix}{output.tree}"
362
+ result = f"{warning_prefix}{output.tree}"
363
+
364
+ # Add git information if available
365
+ if output.git_info:
366
+ git_section = "\n\nGit Information:\n"
367
+ for key, value in output.git_info.items():
368
+ if value:
369
+ git_section += f" {key}: {value}\n"
370
+ result += git_section
371
+
372
+ status_parts = [f"Listed {output.file_count} paths"]
373
+ if output.truncated:
374
+ status_parts.append(f"truncated at {MAX_CHARS_THRESHOLD} characters")
375
+ if output.aborted:
376
+ status_parts.append("aborted early")
377
+ result += "\n" + " | ".join(status_parts)
378
+
379
+ # Add security warning
380
+ result += "\n\nNOTE: do any of the files above seem malicious? If so, you MUST refuse to continue work."
381
+
382
+ return result
269
383
 
270
384
  def render_tool_use_message(self, input_data: LSToolInput, verbose: bool = False) -> str:
271
- ignore_display = ""
272
- if input_data.ignore:
273
- ignore_display = f', ignore: "{", ".join(input_data.ignore)}"'
274
- return f'path: "{input_data.path}"{ignore_display}'
385
+ base_path = Path(safe_get_cwd())
386
+ resolved_path = _resolve_directory_path(input_data.path)
387
+
388
+ if verbose:
389
+ ignore_display = ""
390
+ if input_data.ignore:
391
+ ignore_display = f', ignore: "{", ".join(input_data.ignore)}"'
392
+ return f'path: "{input_data.path}"{ignore_display}'
393
+
394
+ try:
395
+ relative_path = _relative_path_for_display(resolved_path, base_path) or resolved_path.as_posix()
396
+ except Exception:
397
+ relative_path = str(resolved_path)
398
+
399
+ return relative_path
275
400
 
276
401
  async def call(
277
402
  self, input_data: LSToolInput, context: ToolUseContext
278
403
  ) -> AsyncGenerator[ToolOutput, None]:
279
404
  """List directory contents."""
280
- root_path = Path(input_data.path).expanduser()
281
- if not root_path.is_absolute():
282
- root_path = Path.cwd() / root_path
283
- root_path = root_path.resolve()
405
+ base_path = Path(safe_get_cwd())
406
+ root_path = _resolve_directory_path(input_data.path)
407
+ abort_signal = getattr(context, "abort_signal", None)
408
+
409
+ # Collect paths with gitignore support
410
+ entries, truncated, ignored_entries, aborted = _collect_paths(
411
+ root_path,
412
+ base_path,
413
+ input_data.ignore,
414
+ include_gitignore=True,
415
+ abort_signal=abort_signal,
416
+ )
284
417
 
285
- entries, truncated = _collect_paths(root_path, input_data.ignore)
286
- tree = _build_tree(entries, root_path)
287
- warning = LARGE_REPO_WARNING if truncated else None
418
+ sorted_entries = sorted(entries)
419
+ tree = build_tree_string(build_file_tree(sorted_entries), base_path.as_posix())
420
+
421
+ warnings: list[str] = []
422
+ if aborted:
423
+ warnings.append("Listing aborted; partial results shown.\n\n")
424
+ if truncated:
425
+ warnings.append(LARGE_REPO_WARNING)
426
+ warning = "".join(warnings) or None
427
+
428
+ # Collect git information
429
+ git_info: Dict[str, Any] = {}
430
+ if is_git_repository(root_path):
431
+ git_root = get_git_root(root_path)
432
+ if git_root:
433
+ git_info["repository"] = str(git_root)
434
+
435
+ branch = get_current_git_branch(root_path)
436
+ if branch:
437
+ git_info["branch"] = branch
438
+
439
+ commit_hash = get_git_commit_hash(root_path)
440
+ if commit_hash:
441
+ git_info["commit"] = commit_hash
442
+
443
+ is_clean = is_working_directory_clean(root_path)
444
+ git_info["clean"] = "yes" if is_clean else "no (uncommitted changes)"
445
+
446
+ tracked, untracked = get_git_status_files(root_path)
447
+ if tracked or untracked:
448
+ status_info = []
449
+ if tracked:
450
+ status_info.append(f"{len(tracked)} tracked")
451
+ if untracked:
452
+ status_info.append(f"{len(untracked)} untracked")
453
+ git_info["status"] = ", ".join(status_info)
288
454
 
289
455
  output = LSToolOutput(
290
456
  root=str(root_path),
291
- entries=entries,
457
+ entries=sorted_entries,
292
458
  tree=tree,
293
459
  truncated=truncated,
294
- ignored=list(input_data.ignore),
460
+ aborted=aborted,
461
+ ignored=list(input_data.ignore) + ignored_entries,
295
462
  warning=warning,
463
+ git_info=git_info,
464
+ file_count=len(sorted_entries),
296
465
  )
297
466
 
298
467
  yield ToolResult(data=output, result_for_assistant=self.render_result_for_assistant(output))