onetool-mcp 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. bench/__init__.py +5 -0
  2. bench/cli.py +69 -0
  3. bench/harness/__init__.py +66 -0
  4. bench/harness/client.py +692 -0
  5. bench/harness/config.py +397 -0
  6. bench/harness/csv_writer.py +109 -0
  7. bench/harness/evaluate.py +512 -0
  8. bench/harness/metrics.py +283 -0
  9. bench/harness/runner.py +899 -0
  10. bench/py.typed +0 -0
  11. bench/reporter.py +629 -0
  12. bench/run.py +487 -0
  13. bench/secrets.py +101 -0
  14. bench/utils.py +16 -0
  15. onetool/__init__.py +4 -0
  16. onetool/cli.py +391 -0
  17. onetool/py.typed +0 -0
  18. onetool_mcp-1.0.0b1.dist-info/METADATA +163 -0
  19. onetool_mcp-1.0.0b1.dist-info/RECORD +132 -0
  20. onetool_mcp-1.0.0b1.dist-info/WHEEL +4 -0
  21. onetool_mcp-1.0.0b1.dist-info/entry_points.txt +3 -0
  22. onetool_mcp-1.0.0b1.dist-info/licenses/LICENSE.txt +687 -0
  23. onetool_mcp-1.0.0b1.dist-info/licenses/NOTICE.txt +64 -0
  24. ot/__init__.py +37 -0
  25. ot/__main__.py +6 -0
  26. ot/_cli.py +107 -0
  27. ot/_tui.py +53 -0
  28. ot/config/__init__.py +46 -0
  29. ot/config/defaults/bench.yaml +4 -0
  30. ot/config/defaults/diagram-templates/api-flow.mmd +33 -0
  31. ot/config/defaults/diagram-templates/c4-context.puml +30 -0
  32. ot/config/defaults/diagram-templates/class-diagram.mmd +87 -0
  33. ot/config/defaults/diagram-templates/feature-mindmap.mmd +70 -0
  34. ot/config/defaults/diagram-templates/microservices.d2 +81 -0
  35. ot/config/defaults/diagram-templates/project-gantt.mmd +37 -0
  36. ot/config/defaults/diagram-templates/state-machine.mmd +42 -0
  37. ot/config/defaults/onetool.yaml +25 -0
  38. ot/config/defaults/prompts.yaml +97 -0
  39. ot/config/defaults/servers.yaml +7 -0
  40. ot/config/defaults/snippets.yaml +4 -0
  41. ot/config/defaults/tool_templates/__init__.py +7 -0
  42. ot/config/defaults/tool_templates/extension.py +52 -0
  43. ot/config/defaults/tool_templates/isolated.py +61 -0
  44. ot/config/dynamic.py +121 -0
  45. ot/config/global_templates/__init__.py +2 -0
  46. ot/config/global_templates/bench-secrets-template.yaml +6 -0
  47. ot/config/global_templates/bench.yaml +9 -0
  48. ot/config/global_templates/onetool.yaml +27 -0
  49. ot/config/global_templates/secrets-template.yaml +44 -0
  50. ot/config/global_templates/servers.yaml +18 -0
  51. ot/config/global_templates/snippets.yaml +235 -0
  52. ot/config/loader.py +1087 -0
  53. ot/config/mcp.py +145 -0
  54. ot/config/secrets.py +190 -0
  55. ot/config/tool_config.py +125 -0
  56. ot/decorators.py +116 -0
  57. ot/executor/__init__.py +35 -0
  58. ot/executor/base.py +16 -0
  59. ot/executor/fence_processor.py +83 -0
  60. ot/executor/linter.py +142 -0
  61. ot/executor/pack_proxy.py +260 -0
  62. ot/executor/param_resolver.py +140 -0
  63. ot/executor/pep723.py +288 -0
  64. ot/executor/result_store.py +369 -0
  65. ot/executor/runner.py +496 -0
  66. ot/executor/simple.py +163 -0
  67. ot/executor/tool_loader.py +396 -0
  68. ot/executor/validator.py +398 -0
  69. ot/executor/worker_pool.py +388 -0
  70. ot/executor/worker_proxy.py +189 -0
  71. ot/http_client.py +145 -0
  72. ot/logging/__init__.py +37 -0
  73. ot/logging/config.py +315 -0
  74. ot/logging/entry.py +213 -0
  75. ot/logging/format.py +188 -0
  76. ot/logging/span.py +349 -0
  77. ot/meta.py +1555 -0
  78. ot/paths.py +453 -0
  79. ot/prompts.py +218 -0
  80. ot/proxy/__init__.py +21 -0
  81. ot/proxy/manager.py +396 -0
  82. ot/py.typed +0 -0
  83. ot/registry/__init__.py +189 -0
  84. ot/registry/models.py +57 -0
  85. ot/registry/parser.py +269 -0
  86. ot/registry/registry.py +413 -0
  87. ot/server.py +315 -0
  88. ot/shortcuts/__init__.py +15 -0
  89. ot/shortcuts/aliases.py +87 -0
  90. ot/shortcuts/snippets.py +258 -0
  91. ot/stats/__init__.py +35 -0
  92. ot/stats/html.py +250 -0
  93. ot/stats/jsonl_writer.py +283 -0
  94. ot/stats/reader.py +354 -0
  95. ot/stats/timing.py +57 -0
  96. ot/support.py +63 -0
  97. ot/tools.py +114 -0
  98. ot/utils/__init__.py +81 -0
  99. ot/utils/batch.py +161 -0
  100. ot/utils/cache.py +120 -0
  101. ot/utils/deps.py +403 -0
  102. ot/utils/exceptions.py +23 -0
  103. ot/utils/factory.py +179 -0
  104. ot/utils/format.py +65 -0
  105. ot/utils/http.py +202 -0
  106. ot/utils/platform.py +45 -0
  107. ot/utils/sanitize.py +130 -0
  108. ot/utils/truncate.py +69 -0
  109. ot_tools/__init__.py +4 -0
  110. ot_tools/_convert/__init__.py +12 -0
  111. ot_tools/_convert/excel.py +279 -0
  112. ot_tools/_convert/pdf.py +254 -0
  113. ot_tools/_convert/powerpoint.py +268 -0
  114. ot_tools/_convert/utils.py +358 -0
  115. ot_tools/_convert/word.py +283 -0
  116. ot_tools/brave_search.py +604 -0
  117. ot_tools/code_search.py +736 -0
  118. ot_tools/context7.py +495 -0
  119. ot_tools/convert.py +614 -0
  120. ot_tools/db.py +415 -0
  121. ot_tools/diagram.py +1604 -0
  122. ot_tools/diagram.yaml +167 -0
  123. ot_tools/excel.py +1372 -0
  124. ot_tools/file.py +1348 -0
  125. ot_tools/firecrawl.py +732 -0
  126. ot_tools/grounding_search.py +646 -0
  127. ot_tools/package.py +604 -0
  128. ot_tools/py.typed +0 -0
  129. ot_tools/ripgrep.py +544 -0
  130. ot_tools/scaffold.py +471 -0
  131. ot_tools/transform.py +213 -0
  132. ot_tools/web_fetch.py +384 -0
ot_tools/file.py ADDED
@@ -0,0 +1,1348 @@
1
+ """Secure file operations for OneTool.
2
+
3
+ Provides file reading, writing, editing, and management with configurable
4
+ security boundaries. All paths are validated against allowed directories.
5
+
6
+ Configuration via onetool.yaml:
7
+ tools:
8
+ file:
9
+ allowed_dirs: ["."] # Allowed directories (empty = cwd only)
10
+ exclude_patterns: [".git"] # Patterns to exclude
11
+ max_file_size: 10000000 # Max file size (10MB)
12
+ backup_on_write: true # Create .bak before writes
13
+
14
+ Attribution: Inspired by fast-filesystem-mcp (Apache 2.0)
15
+ https://github.com/efforthye/fast-filesystem-mcp
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ # Note: This module defines a function named `list` which shadows the builtin.
21
+ # Use typing.List for type annotations to avoid mypy confusion.
22
+ from typing import List # noqa: UP035
23
+
24
+ pack = "file"
25
+
26
+ __all__ = [
27
+ "copy",
28
+ "delete",
29
+ "edit",
30
+ "info",
31
+ "list",
32
+ "move",
33
+ "read",
34
+ "search",
35
+ "tree",
36
+ "write",
37
+ ]
38
+
39
+ import fnmatch
40
+ import os
41
+ import shutil
42
+ import stat
43
+ import tempfile
44
+ from datetime import UTC, datetime
45
+ from pathlib import Path
46
+ from typing import Any
47
+
48
+ from pydantic import BaseModel, Field
49
+
50
+ from ot.config import get_tool_config
51
+ from ot.logging import LogSpan
52
+ from ot.paths import resolve_cwd_path
53
+
54
+
55
+ class Config(BaseModel):
56
+ """Pack configuration - discovered by registry."""
57
+
58
+ allowed_dirs: list[str] = Field(
59
+ default_factory=list,
60
+ description="Allowed directories for file operations (empty = cwd only)",
61
+ )
62
+ exclude_patterns: list[str] = Field(
63
+ default_factory=lambda: [".git", "node_modules", "__pycache__", ".venv", "venv"],
64
+ description="Path patterns to exclude from operations",
65
+ )
66
+ max_file_size: int = Field(
67
+ default=10000000,
68
+ ge=1000,
69
+ le=100000000,
70
+ description="Maximum file size in bytes (default 10MB)",
71
+ )
72
+ max_list_entries: int = Field(
73
+ default=1000,
74
+ ge=10,
75
+ le=10000,
76
+ description="Maximum entries to return in list/tree operations",
77
+ )
78
+ backup_on_write: bool = Field(
79
+ default=True,
80
+ description="Create .bak backup before overwriting files",
81
+ )
82
+ use_trash: bool = Field(
83
+ default=True,
84
+ description="Move files to trash instead of permanent deletion",
85
+ )
86
+ relative_paths: bool = Field(
87
+ default=True,
88
+ description="Output relative paths instead of absolute paths",
89
+ )
90
+
91
+
92
+ # Optional send2trash for safe deletion
93
+ try:
94
+ import send2trash
95
+
96
+ HAS_SEND2TRASH = True
97
+ except ImportError:
98
+ HAS_SEND2TRASH = False
99
+
100
+ # Pre-computed set of text characters for binary detection (P2 fix)
101
+ # Includes common control chars (bell, backspace, tab, newline, formfeed, carriage return, escape)
102
+ # plus all printable ASCII and extended ASCII
103
+ _TEXT_CHARS = frozenset({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)))
104
+
105
+
106
+ # ============================================================================
107
+ # Path Validation
108
+ # ============================================================================
109
+
110
+
111
+ def _get_file_config() -> Config:
112
+ """Get file tool configuration."""
113
+ return get_tool_config("file", Config)
114
+
115
+
116
+ def _expand_path(path: str) -> Path:
117
+ """Resolve a file path relative to project directory.
118
+
119
+ Uses SDK resolve_cwd_path() for consistent path resolution.
120
+
121
+ Path resolution follows project conventions:
122
+ - Relative paths: resolved relative to project directory (OT_CWD)
123
+ - Absolute paths: used as-is
124
+ - ~ paths: expanded to home directory
125
+ - Prefixed paths (CWD/, GLOBAL/, OT_DIR/): resolved to respective dirs
126
+
127
+ Note: ${VAR} patterns are NOT expanded. Use ~/path instead of ${HOME}/path.
128
+
129
+ Args:
130
+ path: Path string (can contain ~ or prefixes)
131
+
132
+ Returns:
133
+ Resolved absolute Path (not yet validated for existence)
134
+ """
135
+ return resolve_cwd_path(path)
136
+
137
+
138
+ def _is_excluded(path: Path, exclude_patterns: List[str]) -> bool: # noqa: UP006
139
+ """Check if path matches any exclude pattern.
140
+
141
+ Args:
142
+ path: Resolved path to check
143
+ exclude_patterns: List of fnmatch patterns
144
+
145
+ Returns:
146
+ True if path matches any exclude pattern
147
+ """
148
+ path_str = str(path)
149
+ path_parts = path.parts
150
+
151
+ for pattern in exclude_patterns:
152
+ # Check full path
153
+ if fnmatch.fnmatch(path_str, f"*{pattern}*"):
154
+ return True
155
+ # Check each part of the path
156
+ for part in path_parts:
157
+ if fnmatch.fnmatch(part, pattern):
158
+ return True
159
+
160
+ return False
161
+
162
+
163
+ def _validate_path(
164
+ path: str, *, must_exist: bool = True
165
+ ) -> tuple[Path | None, str | None]:
166
+ """Validate and resolve a path against security constraints.
167
+
168
+ Args:
169
+ path: User-provided path string
170
+ must_exist: If True, path must exist
171
+
172
+ Returns:
173
+ Tuple of (user_path, error_message)
174
+ If error, user_path is None
175
+ user_path preserves symlinks for type detection; security is validated
176
+ against the fully resolved path.
177
+ """
178
+ cfg = _get_file_config()
179
+
180
+ # Build user path (preserve symlinks for type detection)
181
+ try:
182
+ cwd = resolve_cwd_path(".")
183
+ p = Path(path).expanduser()
184
+ user_path = p if p.is_absolute() else cwd / p
185
+ # Resolve for security validation (follows symlinks)
186
+ real_path = user_path.resolve()
187
+ except (OSError, ValueError) as e:
188
+ return None, f"Invalid path: {e}"
189
+
190
+ # Check if path exists when required
191
+ if must_exist and not user_path.exists():
192
+ return None, f"Path not found: {path}"
193
+
194
+ # Check against allowed directories
195
+ allowed_dirs: List[Path] = [] # noqa: UP006
196
+
197
+ if cfg.allowed_dirs:
198
+ for allowed in cfg.allowed_dirs:
199
+ # Use SDK path resolution for allowed_dirs too
200
+ allowed_dirs.append(resolve_cwd_path(allowed))
201
+ else:
202
+ # Default: only cwd and subdirectories
203
+ allowed_dirs = [cwd]
204
+
205
+ # Verify path is under an allowed directory
206
+ is_allowed = False
207
+ for allowed in allowed_dirs:
208
+ try:
209
+ real_path.relative_to(allowed)
210
+ is_allowed = True
211
+ break
212
+ except ValueError:
213
+ continue
214
+
215
+ if not is_allowed:
216
+ return None, "Access denied: path outside allowed directories"
217
+
218
+ # Check exclude patterns
219
+ if _is_excluded(real_path, cfg.exclude_patterns):
220
+ return None, "Access denied: path matches exclude pattern"
221
+
222
+ return user_path, None
223
+
224
+
225
+ def _check_file_size(path: Path) -> str | None:
226
+ """Check if file exceeds max size limit.
227
+
228
+ Args:
229
+ path: Path to check
230
+
231
+ Returns:
232
+ Error message if too large, None if OK
233
+ """
234
+ cfg = _get_file_config()
235
+ try:
236
+ size = path.stat().st_size
237
+ if size > cfg.max_file_size:
238
+ max_mb = cfg.max_file_size / 1_000_000
239
+ size_mb = size / 1_000_000
240
+ return f"File too large: {size_mb:.1f}MB (max: {max_mb:.1f}MB)"
241
+ except OSError as e:
242
+ return f"Cannot check file size: {e}"
243
+ return None
244
+
245
+
246
+ def _is_binary(data: bytes, sample_size: int = 8192) -> bool:
247
+ """Detect if data appears to be binary.
248
+
249
+ Args:
250
+ data: Bytes to check
251
+ sample_size: Number of bytes to sample
252
+
253
+ Returns:
254
+ True if data appears binary
255
+ """
256
+ sample = data[:sample_size]
257
+ # Check for null bytes (common in binary files)
258
+ if b"\x00" in sample:
259
+ return True
260
+ # Check ratio of non-printable characters using pre-computed set
261
+ non_text = sum(1 for byte in sample if byte not in _TEXT_CHARS)
262
+ return non_text / len(sample) > 0.3 if sample else False
263
+
264
+
265
+ def _decode_content(data: bytes, encoding: str) -> str | None:
266
+ """Decode bytes to string with encoding fallback.
267
+
268
+ Args:
269
+ data: Raw bytes to decode
270
+ encoding: Primary encoding to try
271
+
272
+ Returns:
273
+ Decoded string, or None if decoding failed
274
+ """
275
+ try:
276
+ return data.decode(encoding)
277
+ except UnicodeDecodeError:
278
+ pass
279
+
280
+ # Fallback: try charset detection
281
+ try:
282
+ import charset_normalizer
283
+
284
+ detected = charset_normalizer.from_bytes(data).best()
285
+ if detected:
286
+ return str(detected)
287
+ except ImportError:
288
+ pass
289
+
290
+ return None
291
+
292
+
293
+ def _format_size(size_bytes: int) -> str:
294
+ """Format bytes as human-readable size.
295
+
296
+ Args:
297
+ size_bytes: Size in bytes
298
+
299
+ Returns:
300
+ Human-readable string like "1.23 MB"
301
+ """
302
+ if size_bytes < 1024:
303
+ return f"{size_bytes} B"
304
+ elif size_bytes < 1024 * 1024:
305
+ return f"{size_bytes / 1024:.2f} KB"
306
+ elif size_bytes < 1024 * 1024 * 1024:
307
+ return f"{size_bytes / (1024 * 1024):.2f} MB"
308
+ else:
309
+ return f"{size_bytes / (1024 * 1024 * 1024):.2f} GB"
310
+
311
+
312
+ def _create_backup(path: Path) -> str | None:
313
+ """Create a backup of a file.
314
+
315
+ Args:
316
+ path: File to backup
317
+
318
+ Returns:
319
+ Error message if backup failed, None if OK
320
+ """
321
+ if not path.exists():
322
+ return None
323
+
324
+ cfg = _get_file_config()
325
+ if not cfg.backup_on_write:
326
+ return None
327
+
328
+ backup_path = path.with_suffix(path.suffix + ".bak")
329
+ try:
330
+ shutil.copy2(path, backup_path)
331
+ except OSError as e:
332
+ return f"Backup failed: {e}"
333
+
334
+ return None
335
+
336
+
337
+ # ============================================================================
338
+ # Read Operations
339
+ # ============================================================================
340
+
341
+
342
+ def read(
343
+ *,
344
+ path: str,
345
+ offset: int = 1,
346
+ limit: int | None = None,
347
+ encoding: str = "utf-8",
348
+ ) -> str:
349
+ """Read file content with optional offset and limit.
350
+
351
+ Reads text files line by line. For large files, use offset/limit
352
+ for pagination. Binary files return a warning message.
353
+
354
+ Args:
355
+ path: Path to file (relative to cwd or absolute)
356
+ offset: Line number to start from (1-indexed, default: 1)
357
+ limit: Maximum lines to return (default: all remaining)
358
+ encoding: Text encoding (default: utf-8)
359
+
360
+ Returns:
361
+ File content with line numbers, or error message
362
+
363
+ Example:
364
+ file.read(path="src/main.py")
365
+ file.read(path="src/main.py", offset=100, limit=50)
366
+ file.read(path="config.json", encoding="utf-8")
367
+ """
368
+ with LogSpan(span="file.read", path=path, offset=offset, limit=limit) as s:
369
+ resolved, error = _validate_path(path, must_exist=True)
370
+ if error:
371
+ s.add(error=error)
372
+ return f"Error: {error}"
373
+ assert resolved is not None # mypy: error check above ensures this
374
+
375
+ if not resolved.is_file():
376
+ s.add(error="not_a_file")
377
+ return f"Error: Not a file: {path}"
378
+
379
+ # Check file size
380
+ size_error = _check_file_size(resolved)
381
+ if size_error:
382
+ s.add(error="file_too_large")
383
+ return f"Error: {size_error}"
384
+
385
+ try:
386
+ # Read file once in binary mode for both detection and content
387
+ raw_data = resolved.read_bytes()
388
+
389
+ # Check for binary content
390
+ if _is_binary(raw_data):
391
+ s.add(error="binary_file")
392
+ return f"Error: Binary file detected ({len(raw_data)} bytes). Use appropriate tools for binary files."
393
+
394
+ # Decode content
395
+ content = _decode_content(raw_data, encoding)
396
+ if content is None:
397
+ s.add(error="encoding_error")
398
+ return f"Error: Could not decode file as {encoding}. Try specifying correct encoding."
399
+
400
+ # Process lines with pagination
401
+ lines = content.splitlines()
402
+ total_lines = len(lines)
403
+
404
+ # Convert 1-indexed offset to 0-indexed start position
405
+ start_idx = max(0, offset - 1)
406
+
407
+ if start_idx >= total_lines:
408
+ s.add(resultLen=0, totalLines=total_lines)
409
+ return f"(empty - line {offset} > total lines {total_lines})"
410
+
411
+ # Apply pagination
412
+ end_idx = start_idx + limit if limit else total_lines
413
+ output_lines = [
414
+ f"{i + 1:6d}\t{line}"
415
+ for i, line in enumerate(lines)
416
+ if start_idx <= i < end_idx
417
+ ]
418
+ lines_collected = len(output_lines)
419
+
420
+ result = "\n".join(output_lines)
421
+
422
+ # Add pagination info if truncated
423
+ remaining = total_lines - (start_idx + lines_collected)
424
+ if remaining > 0:
425
+ next_line = start_idx + lines_collected + 1 # 1-indexed
426
+ result += f"\n\n... ({remaining} more lines, use offset={next_line} to continue)"
427
+
428
+ s.add(
429
+ resultLen=len(result),
430
+ totalLines=total_lines,
431
+ linesReturned=lines_collected,
432
+ )
433
+ return result
434
+
435
+ except OSError as e:
436
+ s.add(error=str(e))
437
+ return f"Error: {e}"
438
+
439
+
440
+ def info(*, path: str, follow_symlinks: bool = True) -> dict[str, Any] | str:
441
+ """Get file or directory metadata.
442
+
443
+ Returns size, timestamps, permissions, and type information.
444
+ For symlinks, reports symlink metadata by default; use follow_symlinks=True
445
+ to get target metadata instead.
446
+
447
+ Args:
448
+ path: Path to file or directory
449
+ follow_symlinks: If True, follow symlinks to get target info (default: True)
450
+
451
+ Returns:
452
+ Dict with path, type, size, permissions, timestamps (or error string)
453
+
454
+ Example:
455
+ file.info(path="src/main.py")
456
+ file.info(path="./docs")
457
+ file.info(path="link", follow_symlinks=False) # Get symlink metadata
458
+ """
459
+ with LogSpan(span="file.info", path=path) as s:
460
+ resolved, error = _validate_path(path, must_exist=True)
461
+ if error:
462
+ s.add(error=error)
463
+ return f"Error: {error}"
464
+ assert resolved is not None # mypy: error check above ensures this
465
+
466
+ cfg = _get_file_config()
467
+
468
+ try:
469
+ # Determine type (check symlink first - is_file/is_dir follow symlinks)
470
+ is_symlink = resolved.is_symlink()
471
+ if is_symlink and not follow_symlinks:
472
+ # Report symlink itself
473
+ file_type = "symlink"
474
+ st = resolved.lstat()
475
+ elif is_symlink and follow_symlinks:
476
+ # Follow symlink - report target type but note it's a symlink
477
+ st = resolved.stat()
478
+ if resolved.is_file():
479
+ file_type = "file"
480
+ elif resolved.is_dir():
481
+ file_type = "directory"
482
+ else:
483
+ file_type = "other"
484
+ elif resolved.is_file():
485
+ file_type = "file"
486
+ st = resolved.stat()
487
+ elif resolved.is_dir():
488
+ file_type = "directory"
489
+ st = resolved.stat()
490
+ else:
491
+ file_type = "other"
492
+ st = resolved.stat()
493
+
494
+ # Format timestamps
495
+ created = datetime.fromtimestamp(st.st_ctime, tz=UTC).isoformat()
496
+ modified = datetime.fromtimestamp(st.st_mtime, tz=UTC).isoformat()
497
+ accessed = datetime.fromtimestamp(st.st_atime, tz=UTC).isoformat()
498
+
499
+ # Format permissions
500
+ mode = stat.filemode(st.st_mode)
501
+
502
+ # Use relative path if configured
503
+ cwd = resolve_cwd_path(".")
504
+ if cfg.relative_paths:
505
+ try:
506
+ display_path = str(resolved.relative_to(cwd))
507
+ except ValueError:
508
+ # Path is outside cwd, use absolute
509
+ display_path = str(resolved)
510
+ else:
511
+ display_path = str(resolved)
512
+
513
+ info_data: dict[str, Any] = {
514
+ "path": display_path,
515
+ "type": file_type,
516
+ "size": st.st_size,
517
+ "size_readable": _format_size(st.st_size),
518
+ "permissions": mode,
519
+ "created": created,
520
+ "modified": modified,
521
+ "accessed": accessed,
522
+ }
523
+
524
+ # Add symlink target if applicable
525
+ if resolved.is_symlink():
526
+ info_data["target"] = str(resolved.readlink())
527
+
528
+ s.add(found=True, type=file_type)
529
+ return info_data
530
+
531
+ except OSError as e:
532
+ s.add(error=str(e))
533
+ return f"Error: {e}"
534
+
535
+
536
+ def list(
537
+ *,
538
+ path: str = ".",
539
+ pattern: str | None = None,
540
+ recursive: bool = False,
541
+ include_hidden: bool = False,
542
+ sort_by: str = "name",
543
+ reverse: bool = False,
544
+ follow_symlinks: bool = False,
545
+ ) -> str:
546
+ """List directory contents.
547
+
548
+ Lists files and directories with optional filtering and sorting.
549
+ Symlinks are shown as type 'l' by default; use follow_symlinks=True
550
+ to show them as their target type ('d' or 'f').
551
+
552
+ Args:
553
+ path: Directory path (default: current directory)
554
+ pattern: Glob pattern to filter (e.g., "*.py", "**/*.md")
555
+ recursive: If True, list recursively (default: False)
556
+ include_hidden: If True, include hidden files (default: False)
557
+ sort_by: Sort field - "name", "type", "size", "modified" (default: "name")
558
+ reverse: If True, reverse sort order (default: False)
559
+ follow_symlinks: If True, show symlinks as their target type (default: False)
560
+
561
+ Returns:
562
+ List of entries with type indicator and size
563
+
564
+ Example:
565
+ file.list()
566
+ file.list(path="src", pattern="*.py")
567
+ file.list(path=".", recursive=True, pattern="*.md")
568
+ file.list(path=".", sort_by="size", reverse=True)
569
+ """
570
+ with LogSpan(span="file.list", path=path, pattern=pattern, recursive=recursive) as s:
571
+ resolved, error = _validate_path(path, must_exist=True)
572
+ if error:
573
+ s.add(error=error)
574
+ return f"Error: {error}"
575
+ assert resolved is not None # mypy: error check above ensures this
576
+
577
+ if not resolved.is_dir():
578
+ s.add(error="not_a_directory")
579
+ return f"Error: Not a directory: {path}"
580
+
581
+ cfg = _get_file_config()
582
+
583
+ try:
584
+ # Collect entries with metadata: (type, rel_path, size, mtime, entry)
585
+ entries: List[tuple[str, str, int, float]] = [] # noqa: UP006
586
+
587
+ if pattern:
588
+ matches = (
589
+ resolved.rglob(pattern) if recursive else resolved.glob(pattern)
590
+ )
591
+ else:
592
+ matches = resolved.rglob("*") if recursive else resolved.iterdir()
593
+
594
+ for entry in matches:
595
+ # Skip hidden files unless requested
596
+ if not include_hidden and entry.name.startswith("."):
597
+ continue
598
+
599
+ # Check against exclude patterns
600
+ if _is_excluded(entry, cfg.exclude_patterns):
601
+ continue
602
+
603
+ # Get relative path from listing root
604
+ try:
605
+ rel_path = entry.relative_to(resolved)
606
+ except ValueError:
607
+ rel_path = entry
608
+
609
+ # Type indicator and metadata
610
+ try:
611
+ st = entry.stat()
612
+ size = st.st_size
613
+ mtime = st.st_mtime
614
+ except OSError:
615
+ size = 0
616
+ mtime = 0
617
+
618
+ # Check symlink first - is_dir() follows symlinks
619
+ if entry.is_symlink() and not follow_symlinks:
620
+ type_ind = "l"
621
+ elif entry.is_dir():
622
+ type_ind = "d"
623
+ else:
624
+ type_ind = "f"
625
+
626
+ entries.append((type_ind, str(rel_path), size, mtime))
627
+
628
+ # Enforce limit before sorting (will re-limit after sort)
629
+ if len(entries) >= cfg.max_list_entries * 2:
630
+ break
631
+
632
+ # Sort based on sort_by parameter
633
+ if sort_by == "type":
634
+ entries.sort(key=lambda x: (x[0], x[1].lower()), reverse=reverse)
635
+ elif sort_by == "size":
636
+ entries.sort(key=lambda x: (x[2], x[1].lower()), reverse=reverse)
637
+ elif sort_by == "modified":
638
+ entries.sort(key=lambda x: (x[3], x[1].lower()), reverse=reverse)
639
+ else: # name (default) - dirs first, then alphabetically
640
+ entries.sort(
641
+ key=lambda x: (0 if x[0] == "d" else 1, x[1].lower()),
642
+ reverse=reverse,
643
+ )
644
+
645
+ # Limit after sorting
646
+ truncated = len(entries) > cfg.max_list_entries
647
+ entries = entries[: cfg.max_list_entries]
648
+
649
+ # Format output with size for files
650
+ lines = []
651
+ for type_ind, rel, size, _ in entries:
652
+ if type_ind == "f":
653
+ size_str = _format_size(size)
654
+ lines.append(f"{type_ind} {rel} ({size_str})")
655
+ else:
656
+ lines.append(f"{type_ind} {rel}")
657
+
658
+ if truncated:
659
+ lines.append(f"\n... (truncated at {cfg.max_list_entries} entries)")
660
+
661
+ s.add(fileCount=len(entries))
662
+ return "\n".join(lines) if lines else "(empty directory)"
663
+
664
+ except OSError as e:
665
+ s.add(error=str(e))
666
+ return f"Error: {e}"
667
+
668
+
669
+ def tree(
670
+ *,
671
+ path: str = ".",
672
+ max_depth: int = 3,
673
+ include_hidden: bool = False,
674
+ ) -> str:
675
+ """Display directory tree structure.
676
+
677
+ Shows an ASCII tree visualization of the directory structure.
678
+
679
+ Args:
680
+ path: Root directory (default: current directory)
681
+ max_depth: Maximum depth to display (default: 3)
682
+ include_hidden: Include hidden files (default: False)
683
+
684
+ Returns:
685
+ ASCII tree visualization
686
+
687
+ Example:
688
+ file.tree()
689
+ file.tree(path="src", max_depth=2)
690
+ file.tree(path=".", include_hidden=True)
691
+ """
692
+ with LogSpan(span="file.tree", path=path, maxDepth=max_depth) as s:
693
+ resolved, error = _validate_path(path, must_exist=True)
694
+ if error:
695
+ s.add(error=error)
696
+ return f"Error: {error}"
697
+ assert resolved is not None # mypy: error check above ensures this
698
+
699
+ if not resolved.is_dir():
700
+ s.add(error="not_a_directory")
701
+ return f"Error: Not a directory: {path}"
702
+
703
+ cfg = _get_file_config()
704
+ node_count = 0
705
+ max_nodes = cfg.max_list_entries
706
+
707
+ def build_tree(dir_path: Path, prefix: str, depth: int) -> List[str]: # noqa: UP006
708
+ nonlocal node_count
709
+
710
+ if depth > max_depth or node_count >= max_nodes:
711
+ return []
712
+
713
+ lines = []
714
+ try:
715
+ # Filter first, then sort (P4 fix - more efficient)
716
+ filtered = [
717
+ entry
718
+ for entry in dir_path.iterdir()
719
+ if (include_hidden or not entry.name.startswith("."))
720
+ and not _is_excluded(entry, cfg.exclude_patterns)
721
+ ]
722
+ filtered.sort(key=lambda x: (not x.is_dir(), x.name.lower()))
723
+ except PermissionError:
724
+ return [f"{prefix}[permission denied]"]
725
+
726
+ for i, entry in enumerate(filtered):
727
+ if node_count >= max_nodes:
728
+ lines.append(f"{prefix}... (truncated)")
729
+ break
730
+
731
+ node_count += 1
732
+ is_last = i == len(filtered) - 1
733
+ connector = "\u2514\u2500\u2500 " if is_last else "\u251c\u2500\u2500 "
734
+ name = entry.name + ("/" if entry.is_dir() else "")
735
+
736
+ lines.append(f"{prefix}{connector}{name}")
737
+
738
+ if entry.is_dir() and depth < max_depth:
739
+ extension = " " if is_last else "\u2502 "
740
+ lines.extend(build_tree(entry, prefix + extension, depth + 1))
741
+
742
+ return lines
743
+
744
+ result_lines = [str(resolved.name) + "/"]
745
+ result_lines.extend(build_tree(resolved, "", 1))
746
+
747
+ s.add(nodeCount=node_count)
748
+ return "\n".join(result_lines)
749
+
750
+
751
+ def search(
752
+ *,
753
+ path: str = ".",
754
+ pattern: str | None = None,
755
+ glob: str | None = None,
756
+ file_pattern: str | None = None,
757
+ case_sensitive: bool = False,
758
+ include_hidden: bool = False,
759
+ max_results: int = 100,
760
+ ) -> str:
761
+ """Search for files by name pattern or glob.
762
+
763
+ Recursively searches for files matching the given pattern.
764
+ Supports filename matching with `pattern` or full path globs with `glob`.
765
+
766
+ Args:
767
+ path: Root directory to search (default: current directory)
768
+ pattern: Filename pattern (e.g., "*test*", "config"). Matches filename only.
769
+ glob: Full path glob pattern (e.g., "src/**/*.py", "**/*.{yaml,yml}").
770
+ Supports ** for recursive matching and brace expansion.
771
+ file_pattern: Filter by file extension (e.g., "*.py"). Used with pattern.
772
+ case_sensitive: If True, pattern matching is case-sensitive (default: False)
773
+ include_hidden: If True, include hidden files (default: False)
774
+ max_results: Maximum number of results to return (default: 100)
775
+
776
+ Returns:
777
+ List of matching files with path and size
778
+
779
+ Example:
780
+ file.search(pattern="*test*")
781
+ file.search(pattern="config", file_pattern="*.yaml")
782
+ file.search(glob="src/**/*.py")
783
+ file.search(glob="tests/**/test_*.py")
784
+ """
785
+ with LogSpan(
786
+ span="file.search", path=path, pattern=pattern, glob=glob, filePattern=file_pattern
787
+ ) as s:
788
+ # Validate: need either pattern or glob
789
+ if not pattern and not glob:
790
+ s.add(error="missing_pattern")
791
+ return "Error: Either 'pattern' or 'glob' parameter is required"
792
+
793
+ resolved, error = _validate_path(path, must_exist=True)
794
+ if error:
795
+ s.add(error=error)
796
+ return f"Error: {error}"
797
+ assert resolved is not None # mypy: error check above ensures this
798
+
799
+ if not resolved.is_dir():
800
+ s.add(error="not_a_directory")
801
+ return f"Error: Not a directory: {path}"
802
+
803
+ cfg = _get_file_config()
804
+ results: List[tuple[str, int]] = [] # noqa: UP006
805
+
806
+ try:
807
+ if glob:
808
+ # Full path glob mode using pathlib.glob
809
+ for entry in resolved.glob(glob):
810
+ if len(results) >= max_results:
811
+ break
812
+
813
+ # Skip directories
814
+ if entry.is_dir():
815
+ continue
816
+
817
+ # Skip hidden files
818
+ if not include_hidden and entry.name.startswith("."):
819
+ continue
820
+
821
+ # Skip excluded patterns
822
+ if _is_excluded(entry, cfg.exclude_patterns):
823
+ continue
824
+
825
+ # Get relative path and size
826
+ try:
827
+ entry_rel = entry.relative_to(resolved)
828
+ entry_size = entry.stat().st_size
829
+ except (ValueError, OSError):
830
+ continue
831
+
832
+ results.append((str(entry_rel), entry_size))
833
+ else:
834
+ # Filename pattern mode (existing behavior)
835
+ assert pattern is not None # mypy: validated above
836
+ search_pattern = pattern if case_sensitive else pattern.lower()
837
+
838
+ for entry in resolved.rglob("*"):
839
+ if len(results) >= max_results:
840
+ break
841
+
842
+ # Skip directories
843
+ if entry.is_dir():
844
+ continue
845
+
846
+ # Skip hidden files
847
+ if not include_hidden and entry.name.startswith("."):
848
+ continue
849
+
850
+ # Skip excluded patterns
851
+ if _is_excluded(entry, cfg.exclude_patterns):
852
+ continue
853
+
854
+ # Apply file pattern filter if specified
855
+ if file_pattern and not fnmatch.fnmatch(entry.name, file_pattern):
856
+ continue
857
+
858
+ # Match against search pattern (glob or substring)
859
+ name_to_match = entry.name if case_sensitive else entry.name.lower()
860
+ pattern_core = search_pattern.replace("*", "")
861
+ if (
862
+ not fnmatch.fnmatch(name_to_match, search_pattern)
863
+ and pattern_core not in name_to_match
864
+ ):
865
+ continue
866
+
867
+ # Get relative path and size
868
+ try:
869
+ entry_rel = entry.relative_to(resolved)
870
+ entry_size = entry.stat().st_size
871
+ except (ValueError, OSError):
872
+ continue
873
+
874
+ results.append((str(entry_rel), entry_size))
875
+
876
+ # Sort by path
877
+ results.sort(key=lambda x: x[0].lower())
878
+
879
+ # Format output
880
+ if not results:
881
+ s.add(resultCount=0)
882
+ search_term = glob if glob else pattern
883
+ return f"No files matching '{search_term}' found in {path}"
884
+
885
+ lines = []
886
+ for rel_path, size in results:
887
+ size_str = _format_size(size)
888
+ lines.append(f"{rel_path} ({size_str})")
889
+
890
+ if len(results) >= max_results:
891
+ lines.append(f"\n... (limited to {max_results} results)")
892
+
893
+ s.add(resultCount=len(results))
894
+ return "\n".join(lines)
895
+
896
+ except OSError as e:
897
+ s.add(error=str(e))
898
+ return f"Error: {e}"
899
+
900
+
901
+ # ============================================================================
902
+ # Write Operations
903
+ # ============================================================================
904
+
905
+
906
+ def write(
907
+ *,
908
+ path: str,
909
+ content: str,
910
+ append: bool = False,
911
+ create_dirs: bool = False,
912
+ encoding: str = "utf-8",
913
+ dry_run: bool = False,
914
+ ) -> str:
915
+ """Write content to a file.
916
+
917
+ Creates the file if it doesn't exist. Optionally creates parent
918
+ directories and can append to existing files.
919
+
920
+ Args:
921
+ path: Path to file
922
+ content: Content to write
923
+ append: If True, append to file (default: overwrite)
924
+ create_dirs: If True, create parent directories (default: False)
925
+ encoding: Character encoding (default: utf-8)
926
+ dry_run: If True, show what would happen without writing (default: False)
927
+
928
+ Returns:
929
+ Success message with bytes written, or error message
930
+
931
+ Example:
932
+ file.write(path="output.txt", content="Hello, World!")
933
+ file.write(path="log.txt", content="New entry\\n", append=True)
934
+ file.write(path="new/dir/file.txt", content="data", create_dirs=True)
935
+ """
936
+ with LogSpan(span="file.write", path=path, append=append, contentLen=len(content)) as s:
937
+ # For new files, validate parent directory
938
+ resolved, error = _validate_path(path, must_exist=False)
939
+ if error:
940
+ s.add(error=error)
941
+ return f"Error: {error}"
942
+ assert resolved is not None # mypy: error check above ensures this
943
+
944
+ # Check parent directory
945
+ parent = resolved.parent
946
+ if not parent.exists():
947
+ if create_dirs:
948
+ try:
949
+ parent.mkdir(parents=True, exist_ok=True)
950
+ except OSError as e:
951
+ s.add(error=f"mkdir_failed: {e}")
952
+ return f"Error: Could not create directory: {e}"
953
+ else:
954
+ s.add(error="parent_not_found")
955
+ return f"Error: Parent directory does not exist: {parent}. Use create_dirs=True to create it."
956
+
957
+ # Encode once for byte count
958
+ try:
959
+ content_bytes = content.encode(encoding)
960
+ except UnicodeEncodeError as e:
961
+ s.add(error="encoding_error")
962
+ return f"Error: Could not encode content with {encoding}: {e}"
963
+ bytes_to_write = len(content_bytes)
964
+
965
+ # Dry run mode - show what would happen
966
+ if dry_run:
967
+ action = "append" if append else "write"
968
+ exists = "existing" if resolved.exists() else "new"
969
+ s.add(dry_run=True, bytesToWrite=bytes_to_write)
970
+ return f"Dry run: Would {action} {bytes_to_write} bytes to {exists} file {path}"
971
+
972
+ # Create backup if file exists
973
+ if resolved.exists():
974
+ backup_error = _create_backup(resolved)
975
+ if backup_error:
976
+ s.add(error=f"backup_failed: {backup_error}")
977
+ return f"Error: {backup_error}"
978
+
979
+ try:
980
+ # Use atomic write for non-append operations
981
+ if append:
982
+ with resolved.open("ab") as f:
983
+ f.write(content_bytes)
984
+ else:
985
+ # Write to temp file then rename (atomic)
986
+ fd, temp_path = tempfile.mkstemp(
987
+ dir=str(parent),
988
+ prefix=".tmp_",
989
+ suffix=resolved.suffix,
990
+ )
991
+ try:
992
+ with os.fdopen(fd, "wb") as f:
993
+ f.write(content_bytes)
994
+ # Preserve permissions if file exists
995
+ if resolved.exists():
996
+ shutil.copymode(str(resolved), temp_path)
997
+ Path(temp_path).replace(resolved)
998
+ except Exception:
999
+ # Clean up temp file on error
1000
+ temp = Path(temp_path)
1001
+ if temp.exists():
1002
+ temp.unlink()
1003
+ raise
1004
+
1005
+ action = "appended" if append else "wrote"
1006
+ s.add(written=True, bytesWritten=bytes_to_write)
1007
+ return f"OK: {action} {bytes_to_write} bytes to {path}"
1008
+
1009
+ except OSError as e:
1010
+ s.add(error=str(e))
1011
+ return f"Error: {e}"
1012
+
1013
+
1014
+ def edit(
1015
+ *,
1016
+ path: str,
1017
+ old_text: str,
1018
+ new_text: str,
1019
+ occurrence: int = 1,
1020
+ encoding: str = "utf-8",
1021
+ dry_run: bool = False,
1022
+ ) -> str:
1023
+ """Edit a file by replacing text.
1024
+
1025
+ Performs exact string replacement. By default replaces the first
1026
+ occurrence. Errors if old_text appears multiple times and occurrence
1027
+ is not specified.
1028
+
1029
+ Args:
1030
+ path: Path to file
1031
+ old_text: Exact text to find and replace
1032
+ new_text: Text to replace with
1033
+ occurrence: Which occurrence to replace (1=first, 0=all, default: 1)
1034
+ encoding: Character encoding (default: utf-8)
1035
+ dry_run: If True, show what would happen without editing (default: False)
1036
+
1037
+ Returns:
1038
+ Success message showing replacement count, or error message
1039
+
1040
+ Example:
1041
+ file.edit(path="config.py", old_text="DEBUG = False", new_text="DEBUG = True")
1042
+ file.edit(path="main.py", old_text="TODO", new_text="DONE", occurrence=0)
1043
+ """
1044
+ with LogSpan(span="file.edit", path=path, oldLen=len(old_text), newLen=len(new_text)) as s:
1045
+ resolved, error = _validate_path(path, must_exist=True)
1046
+ if error:
1047
+ s.add(error=error)
1048
+ return f"Error: {error}"
1049
+ assert resolved is not None # mypy: error check above ensures this
1050
+
1051
+ if not resolved.is_file():
1052
+ s.add(error="not_a_file")
1053
+ return f"Error: Not a file: {path}"
1054
+
1055
+ if not old_text:
1056
+ s.add(error="empty_old_text")
1057
+ return "Error: old_text cannot be empty"
1058
+
1059
+ try:
1060
+ content = resolved.read_text(encoding=encoding)
1061
+ except UnicodeDecodeError as e:
1062
+ s.add(error="encoding_error")
1063
+ return f"Error: Could not read file with encoding {encoding}: {e}"
1064
+ except OSError as e:
1065
+ s.add(error=str(e))
1066
+ return f"Error: {e}"
1067
+
1068
+ # Count occurrences
1069
+ count = content.count(old_text)
1070
+
1071
+ if count == 0:
1072
+ s.add(error="not_found")
1073
+ return "Error: Text not found in file"
1074
+
1075
+ if count > 1 and occurrence == 1:
1076
+ s.add(error="ambiguous")
1077
+ return f"Error: Found {count} occurrences. Use occurrence=0 to replace all, or specify which (1-{count})."
1078
+
1079
+ if occurrence > count:
1080
+ s.add(error="occurrence_out_of_range")
1081
+ return f"Error: Requested occurrence {occurrence} but only found {count}"
1082
+
1083
+ # Calculate how many replacements would be made
1084
+ replace_count = count if occurrence == 0 else 1
1085
+
1086
+ # Dry run mode - show what would happen
1087
+ if dry_run:
1088
+ s.add(dry_run=True, occurrences=count, wouldReplace=replace_count)
1089
+ return f"Dry run: Would replace {replace_count} occurrence(s) of text in {path}"
1090
+
1091
+ # Create backup
1092
+ backup_error = _create_backup(resolved)
1093
+ if backup_error:
1094
+ s.add(error=f"backup_failed: {backup_error}")
1095
+ return f"Error: {backup_error}"
1096
+
1097
+ # Perform replacement
1098
+ if occurrence == 0:
1099
+ # Replace all
1100
+ new_content = content.replace(old_text, new_text)
1101
+ replaced_count = count
1102
+ else:
1103
+ # Replace specific occurrence
1104
+ parts = content.split(old_text)
1105
+ if occurrence <= len(parts) - 1:
1106
+ # Join with old_text except at the target position
1107
+ new_parts = []
1108
+ for i, part in enumerate(parts):
1109
+ new_parts.append(part)
1110
+ if i < len(parts) - 1:
1111
+ if i == occurrence - 1:
1112
+ new_parts.append(new_text)
1113
+ else:
1114
+ new_parts.append(old_text)
1115
+ new_content = "".join(new_parts)
1116
+ replaced_count = 1
1117
+ else:
1118
+ s.add(error="occurrence_out_of_range")
1119
+ return f"Error: Could not find occurrence {occurrence}"
1120
+
1121
+ try:
1122
+ # Write using atomic operation
1123
+ fd, temp_path = tempfile.mkstemp(
1124
+ dir=str(resolved.parent),
1125
+ prefix=".tmp_",
1126
+ suffix=resolved.suffix,
1127
+ )
1128
+ try:
1129
+ with os.fdopen(fd, "w", encoding=encoding) as f:
1130
+ f.write(new_content)
1131
+ shutil.copymode(str(resolved), temp_path)
1132
+ Path(temp_path).replace(resolved)
1133
+ except Exception:
1134
+ temp = Path(temp_path)
1135
+ if temp.exists():
1136
+ temp.unlink()
1137
+ raise
1138
+
1139
+ s.add(edited=True, replacements=replaced_count)
1140
+ return f"OK: Replaced {replaced_count} occurrence(s) in {path}"
1141
+
1142
+ except OSError as e:
1143
+ s.add(error=str(e))
1144
+ return f"Error: {e}"
1145
+
1146
+
1147
+ # ============================================================================
1148
+ # File Management
1149
+ # ============================================================================
1150
+
1151
+
1152
+ def delete(
1153
+ *, path: str, backup: bool = True, recursive: bool = False, dry_run: bool = False
1154
+ ) -> str:
1155
+ """Delete a file or directory.
1156
+
1157
+ By default creates a backup before deletion. If send2trash is
1158
+ available and use_trash is enabled, moves to trash instead.
1159
+
1160
+ Args:
1161
+ path: Path to file or directory
1162
+ backup: If True, create backup before delete (default: True)
1163
+ recursive: If True, delete non-empty directories (default: False)
1164
+ dry_run: If True, show what would happen without deleting (default: False)
1165
+
1166
+ Returns:
1167
+ Success message or error message
1168
+
1169
+ Example:
1170
+ file.delete(path="temp.txt")
1171
+ file.delete(path="old_file.py", backup=False)
1172
+ file.delete(path="old_dir/", recursive=True)
1173
+ """
1174
+ with LogSpan(span="file.delete", path=path) as s:
1175
+ resolved, error = _validate_path(path, must_exist=True)
1176
+ if error:
1177
+ s.add(error=error)
1178
+ return f"Error: {error}"
1179
+ assert resolved is not None # mypy: error check above ensures this
1180
+
1181
+ cfg = _get_file_config()
1182
+
1183
+ # Dry run mode - show what would happen
1184
+ if dry_run:
1185
+ if resolved.is_file():
1186
+ file_type = "file"
1187
+ elif resolved.is_symlink():
1188
+ file_type = "symlink"
1189
+ elif resolved.is_dir():
1190
+ file_type = "directory"
1191
+ if not recursive and any(resolved.iterdir()):
1192
+ s.add(dry_run=True, error="would_fail")
1193
+ return f"Dry run: Would fail - directory not empty: {path}. Use recursive=True."
1194
+ else:
1195
+ file_type = "unknown"
1196
+ method = "trash" if cfg.use_trash and HAS_SEND2TRASH else "delete"
1197
+ s.add(dry_run=True, fileType=file_type, method=method)
1198
+ return f"Dry run: Would {method} {file_type}: {path}"
1199
+
1200
+ try:
1201
+ # Create backup if requested and it's a file
1202
+ if backup and resolved.is_file():
1203
+ backup_error = _create_backup(resolved)
1204
+ if backup_error:
1205
+ s.add(error=f"backup_failed: {backup_error}")
1206
+ return f"Error: {backup_error}"
1207
+
1208
+ # Use send2trash if available and configured
1209
+ if cfg.use_trash and HAS_SEND2TRASH:
1210
+ send2trash.send2trash(str(resolved))
1211
+ s.add(deleted=True, method="trash")
1212
+ return f"OK: Moved to trash: {path}"
1213
+
1214
+ # Standard deletion
1215
+ if resolved.is_file() or resolved.is_symlink():
1216
+ resolved.unlink()
1217
+ elif resolved.is_dir():
1218
+ if recursive:
1219
+ shutil.rmtree(resolved)
1220
+ elif any(resolved.iterdir()):
1221
+ s.add(error="directory_not_empty")
1222
+ return f"Error: Directory not empty: {path}. Use recursive=True to delete contents."
1223
+ else:
1224
+ resolved.rmdir()
1225
+ else:
1226
+ s.add(error="unknown_type")
1227
+ return f"Error: Cannot delete: {path}"
1228
+
1229
+ s.add(deleted=True, method="unlink")
1230
+ return f"OK: Deleted: {path}"
1231
+
1232
+ except OSError as e:
1233
+ s.add(error=str(e))
1234
+ return f"Error: {e}"
1235
+
1236
+
1237
+ def copy(*, source: str, dest: str, follow_symlinks: bool = True) -> str:
1238
+ """Copy a file or directory.
1239
+
1240
+ For files, copies content and metadata. For directories, copies
1241
+ the entire tree recursively. By default, symlinks are followed
1242
+ (copied as their target content) for security. Use follow_symlinks=False
1243
+ to copy symlinks as links.
1244
+
1245
+ Args:
1246
+ source: Source path
1247
+ dest: Destination path
1248
+ follow_symlinks: If True, copy symlink targets; if False, copy as links (default: True)
1249
+
1250
+ Returns:
1251
+ Success message or error message
1252
+
1253
+ Example:
1254
+ file.copy(source="config.yaml", dest="config.backup.yaml")
1255
+ file.copy(source="src/", dest="src_backup/")
1256
+ file.copy(source="src/", dest="backup/", follow_symlinks=False) # Preserve symlinks
1257
+ """
1258
+ with LogSpan(span="file.copy", source=source, dest=dest) as s:
1259
+ src_resolved, error = _validate_path(source, must_exist=True)
1260
+ if error:
1261
+ s.add(error=f"source: {error}")
1262
+ return f"Error: {error}"
1263
+ assert src_resolved is not None # mypy: error check above ensures this
1264
+
1265
+ dest_resolved, error = _validate_path(dest, must_exist=False)
1266
+ if error:
1267
+ s.add(error=f"dest: {error}")
1268
+ return f"Error: {error}"
1269
+ assert dest_resolved is not None # mypy: error check above ensures this
1270
+
1271
+ try:
1272
+ if src_resolved.is_file() or (src_resolved.is_symlink() and follow_symlinks):
1273
+ # Copy file with metadata (follows symlinks by default)
1274
+ shutil.copy2(src_resolved, dest_resolved, follow_symlinks=follow_symlinks)
1275
+ s.add(copied=True, type="file")
1276
+ return f"OK: Copied file: {source} -> {dest}"
1277
+ elif src_resolved.is_symlink() and not follow_symlinks:
1278
+ # Copy symlink as a link
1279
+ link_target = src_resolved.readlink()
1280
+ dest_resolved.symlink_to(link_target)
1281
+ s.add(copied=True, type="symlink")
1282
+ return f"OK: Copied symlink: {source} -> {dest}"
1283
+ elif src_resolved.is_dir():
1284
+ # Copy directory tree
1285
+ if dest_resolved.exists():
1286
+ s.add(error="dest_exists")
1287
+ return f"Error: Destination already exists: {dest}"
1288
+ # symlinks=True preserves symlinks as links, False follows them
1289
+ shutil.copytree(src_resolved, dest_resolved, symlinks=not follow_symlinks)
1290
+ s.add(copied=True, type="directory")
1291
+ return f"OK: Copied directory: {source} -> {dest}"
1292
+ else:
1293
+ s.add(error="unknown_type")
1294
+ return f"Error: Cannot copy: {source}"
1295
+
1296
+ except OSError as e:
1297
+ s.add(error=str(e))
1298
+ return f"Error: {e}"
1299
+
1300
+
1301
+ def move(*, source: str, dest: str) -> str:
1302
+ """Move or rename a file or directory.
1303
+
1304
+ Moves source to destination. Can be used for renaming files
1305
+ within the same directory.
1306
+
1307
+ Args:
1308
+ source: Source path
1309
+ dest: Destination path
1310
+
1311
+ Returns:
1312
+ Success message or error message
1313
+
1314
+ Example:
1315
+ file.move(source="old_name.py", dest="new_name.py")
1316
+ file.move(source="file.txt", dest="archive/file.txt")
1317
+ """
1318
+ with LogSpan(span="file.move", source=source, dest=dest) as s:
1319
+ src_resolved, error = _validate_path(source, must_exist=True)
1320
+ if error:
1321
+ s.add(error=f"source: {error}")
1322
+ return f"Error: {error}"
1323
+ assert src_resolved is not None # mypy: error check above ensures this
1324
+
1325
+ dest_resolved, error = _validate_path(dest, must_exist=False)
1326
+ if error:
1327
+ s.add(error=f"dest: {error}")
1328
+ return f"Error: {error}"
1329
+ assert dest_resolved is not None # mypy: error check above ensures this
1330
+
1331
+ # Check destination parent exists
1332
+ if not dest_resolved.parent.exists():
1333
+ s.add(error="dest_parent_not_found")
1334
+ return (
1335
+ f"Error: Destination directory does not exist: {dest_resolved.parent}"
1336
+ )
1337
+
1338
+ # Determine type before move (source won't exist after)
1339
+ src_type = "file" if src_resolved.is_file() else "directory"
1340
+
1341
+ try:
1342
+ shutil.move(str(src_resolved), str(dest_resolved))
1343
+ s.add(moved=True, type=src_type)
1344
+ return f"OK: Moved: {source} -> {dest}"
1345
+
1346
+ except OSError as e:
1347
+ s.add(error=str(e))
1348
+ return f"Error: {e}"