janito 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. janito/__main__.py +37 -30
  2. janito/agents/__init__.py +8 -2
  3. janito/agents/agent.py +10 -3
  4. janito/agents/claudeai.py +13 -23
  5. janito/agents/openai.py +5 -1
  6. janito/change/analysis/analyze.py +8 -7
  7. janito/change/analysis/prompts.py +4 -12
  8. janito/change/analysis/view/terminal.py +21 -11
  9. janito/change/applier/text.py +7 -5
  10. janito/change/core.py +22 -29
  11. janito/change/parser.py +0 -2
  12. janito/change/prompts.py +16 -21
  13. janito/change/validator.py +27 -9
  14. janito/change/viewer/content.py +1 -1
  15. janito/change/viewer/panels.py +93 -115
  16. janito/change/viewer/styling.py +15 -4
  17. janito/cli/commands.py +63 -20
  18. janito/common.py +44 -18
  19. janito/config.py +44 -44
  20. janito/prompt.py +36 -0
  21. janito/qa.py +5 -14
  22. janito/search_replace/README.md +63 -17
  23. janito/search_replace/__init__.py +2 -1
  24. janito/search_replace/core.py +15 -14
  25. janito/search_replace/logger.py +35 -0
  26. janito/search_replace/searcher.py +160 -48
  27. janito/search_replace/strategy_result.py +10 -0
  28. janito/shell/__init__.py +15 -16
  29. janito/shell/commands.py +38 -97
  30. janito/shell/processor.py +7 -27
  31. janito/shell/prompt.py +48 -0
  32. janito/shell/registry.py +60 -0
  33. janito/workspace/__init__.py +4 -5
  34. janito/workspace/analysis.py +2 -2
  35. janito/workspace/show.py +141 -0
  36. janito/workspace/stats.py +43 -0
  37. janito/workspace/types.py +98 -0
  38. janito/workspace/workset.py +108 -0
  39. janito/workspace/workspace.py +114 -0
  40. janito-0.7.0.dist-info/METADATA +167 -0
  41. {janito-0.6.0.dist-info → janito-0.7.0.dist-info}/RECORD +44 -43
  42. janito/change/viewer/pager.py +0 -56
  43. janito/cli/handlers/ask.py +0 -22
  44. janito/cli/handlers/demo.py +0 -22
  45. janito/cli/handlers/request.py +0 -24
  46. janito/cli/handlers/scan.py +0 -9
  47. janito/prompts.py +0 -2
  48. janito/shell/handlers.py +0 -122
  49. janito/workspace/manager.py +0 -48
  50. janito/workspace/scan.py +0 -232
  51. janito-0.6.0.dist-info/METADATA +0 -185
  52. {janito-0.6.0.dist-info → janito-0.7.0.dist-info}/WHEEL +0 -0
  53. {janito-0.6.0.dist-info → janito-0.7.0.dist-info}/entry_points.txt +0 -0
  54. {janito-0.6.0.dist-info → janito-0.7.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,60 @@
1
+ """Command registry and validation system for Janito shell."""
2
+ from dataclasses import dataclass
3
+ from typing import Optional, Callable, Dict, Any
4
+ from pathlib import Path
5
+ from prompt_toolkit.completion import PathCompleter
6
+
7
+ @dataclass
8
+ class Command:
9
+ """Command definition with handler and metadata."""
10
+ name: str
11
+ description: str
12
+ usage: Optional[str]
13
+ handler: Callable[[str], None]
14
+ completer: Optional[Any] = None
15
+
16
+ class CommandRegistry:
17
+ """Centralized command registry with validation."""
18
+ def __init__(self):
19
+ """Initialize registry."""
20
+ if not hasattr(self, '_commands'):
21
+ self._commands = {}
22
+
23
+
24
+ def register(self, command: Command) -> None:
25
+ """Register a command with validation."""
26
+ if command.name in self._commands:
27
+ raise ValueError(f"Command '{command.name}' already registered")
28
+ if not callable(command.handler):
29
+ raise ValueError(f"Handler for command '{command.name}' must be callable")
30
+ self._commands[command.name] = command
31
+
32
+ def register_alias(self, alias: str, command_name: str) -> None:
33
+ """Register an alias for an existing command."""
34
+ if alias in self._commands:
35
+ raise ValueError(f"Alias '{alias}' already registered")
36
+ if command := self.get_command(command_name):
37
+ self._commands[alias] = command
38
+ else:
39
+ raise ValueError(f"Command '{command_name}' not found")
40
+
41
+ def get_command(self, name: str) -> Optional[Command]:
42
+ """Get a command by name."""
43
+ return self._commands.get(name)
44
+
45
+ def get_commands(self) -> Dict[str, Command]:
46
+ """Get all registered commands."""
47
+ return self._commands.copy()
48
+
49
+ def validate_command(self, command: Command) -> None:
50
+ """Validate command properties."""
51
+ if not command.name:
52
+ raise ValueError("Command name cannot be empty")
53
+ if not command.description:
54
+ raise ValueError(f"Command '{command.name}' must have a description")
55
+ if not callable(command.handler):
56
+ raise ValueError(f"Command '{command.name}' handler must be callable")
57
+
58
+ def get_path_completer(only_directories: bool = False) -> PathCompleter:
59
+ """Get a configured path completer."""
60
+ return PathCompleter(only_directories=only_directories)
@@ -1,7 +1,6 @@
1
- from .manager import WorkspaceManager
2
- from .scan import preview_scan, collect_files_content, is_dir_empty
1
+ from .workset import Workset
3
2
 
4
- # Create singleton instance
5
- workspace = WorkspaceManager.get_instance()
3
+ # Create and export singleton instance
4
+ workset = Workset()
6
5
 
7
- __all__ = ['workspace', 'preview_scan', 'collect_files_content', 'is_dir_empty']
6
+ __all__ = ['workset']
@@ -104,11 +104,11 @@ def analyze_workspace_content(content: str) -> None:
104
104
 
105
105
  content = Group(*content_sections)
106
106
 
107
- # Display workspace analysis in panel
107
+ # Display workset analysis in panel
108
108
  console.print("\n")
109
109
  console.print(Panel(
110
110
  content,
111
- title="[bold blue]Workspace Analysis[/bold blue]",
111
+ title="[bold blue]Workset Analysis[/bold blue]",
112
112
  title_align="center"
113
113
  ))
114
114
 
@@ -0,0 +1,141 @@
1
+ from rich.traceback import install
2
+ install(show_locals=False)
3
+
4
+ from pathlib import Path
5
+ from typing import List, Set
6
+ from rich.columns import Columns
7
+ from rich.console import Console, Group
8
+ from rich.panel import Panel
9
+ from rich.rule import Rule
10
+ from rich.text import Text
11
+ from janito.config import config
12
+ from .types import FileInfo, ScanPath
13
+ from .stats import collect_file_stats, _format_size
14
+
15
+
16
+ def show_workset_analysis(
17
+ files: List[FileInfo],
18
+ scan_paths: List[ScanPath],
19
+ cache_blocks: List[List[FileInfo]] = None
20
+ ) -> None:
21
+ """Display analysis of workspace content and configuration."""
22
+
23
+ console = Console()
24
+ content_sections = []
25
+
26
+ # Get statistics
27
+ dir_counts, file_types = collect_file_stats(files)
28
+
29
+ # Calculate path stats using relative paths
30
+ paths_stats = []
31
+ total_files = 0
32
+ total_size = 0
33
+
34
+
35
+ # Process all paths uniformly
36
+ for scan_path in sorted(scan_paths, key=lambda p: p.path):
37
+
38
+ path = scan_path.path
39
+ is_recursive = scan_path.is_recursive
40
+ path_str = str(path)
41
+
42
+ # Calculate stats based on scan type
43
+ if is_recursive:
44
+ path_files = sum(count for d, [count, _] in dir_counts.items()
45
+ if Path(d) == path or Path(d).is_relative_to(path))
46
+ path_size = sum(size for d, [_, size] in dir_counts.items()
47
+ if Path(d) == path or Path(d).is_relative_to(path))
48
+ else:
49
+ path_files = dir_counts.get(path_str, [0, 0])[0]
50
+ path_size = dir_counts.get(path_str, [0, 0])[1]
51
+
52
+ total_files += path_files
53
+ total_size += path_size
54
+
55
+ paths_stats.append(
56
+ f"[bold cyan]{path}[/bold cyan]"
57
+ f"[yellow]{'/**' if is_recursive else '/'}[/yellow] "
58
+ f"[[green]{path_files}[/green] "
59
+ f"{'total ' if is_recursive else ''}file(s), "
60
+ f"[blue]{_format_size(path_size)}[/blue]]"
61
+ )
62
+
63
+ # Build sections - Show paths first
64
+ if paths_stats or current_dir_stats:
65
+ content_sections.extend([
66
+ "[bold yellow]📌 Included Paths[/bold yellow]",
67
+ Rule(style="yellow"),
68
+ ])
69
+
70
+ # All paths are now handled in the main loop
71
+
72
+ content_sections.append(
73
+ Text(" | ").join(Text.from_markup(path) for path in paths_stats)
74
+ )
75
+
76
+ # Add total summary if there are multiple paths
77
+ if len(paths_stats) > 1:
78
+ content_sections.extend([
79
+ "", # Empty line for spacing
80
+ f"[bold yellow]Total:[/bold yellow] [green]{total_files}[/green] files, "
81
+ f"[blue]{_format_size(total_size)}[/blue]"
82
+ ])
83
+ content_sections.append("\n")
84
+
85
+ # Then show directory structure if verbose
86
+ if config.verbose:
87
+ dir_stats = [
88
+ f"📁 {directory}/ [{count} file(s), {_format_size(size)}]"
89
+ for directory, (count, size) in sorted(dir_counts.items())
90
+ ]
91
+ content_sections.extend([
92
+ "[bold magenta]📂 Directory Structure[/bold magenta]",
93
+ Rule(style="magenta"),
94
+ Columns(dir_stats, equal=True, expand=True),
95
+ "\n"
96
+ ])
97
+
98
+ type_stats = [
99
+ f"[bold cyan].{ext.lstrip('.')}[/bold cyan] [[green]{count}[/green] file(s)]"
100
+ if ext != 'no_ext'
101
+ else f"[bold cyan]no ext[/bold cyan] [[green]{count}[/green] file(s)]"
102
+ for ext, count in sorted(file_types.items())
103
+ ]
104
+ content_sections.extend([
105
+ "[bold cyan]📑 File Types[/bold cyan]",
106
+ Rule(style="cyan"),
107
+ Text(" | ").join(Text.from_markup(stat) for stat in type_stats)
108
+ ])
109
+
110
+ # Finally show cache blocks if in debug mode
111
+ if config.debug and cache_blocks:
112
+ blocks = cache_blocks
113
+ if any(blocks):
114
+ content_sections.extend([
115
+ "\n",
116
+ "[bold blue]🕒 Cache Blocks[/bold blue]",
117
+ Rule(style="blue"),
118
+ ])
119
+
120
+ block_names = ["Last 5 minutes", "Last hour", "Last 24 hours", "Older"]
121
+ for name, block in zip(block_names, blocks):
122
+ if block: # Only show non-empty blocks
123
+ content_sections.extend([
124
+ f"\n[bold]{name}[/bold] ({len(block)} files):",
125
+ Columns([
126
+ Text.assemble(
127
+ f"{f.name} - ",
128
+ (f"{f.content.splitlines()[0][:50]}...", "dim")
129
+ )
130
+ for f in block[:5] # Show first 5 files only
131
+ ], padding=(0, 2)),
132
+ "" if block == blocks[-1] else Rule(style="dim")
133
+ ])
134
+
135
+ # Display analysis
136
+ console.print("\n")
137
+ console.print(Panel(
138
+ Group(*content_sections),
139
+ title="[bold blue]Workset Analysis[/bold blue]",
140
+ title_align="center"
141
+ ))
@@ -0,0 +1,43 @@
1
+ from collections import defaultdict
2
+ from pathlib import Path
3
+ from typing import List, Dict, Tuple
4
+ from .types import FileInfo
5
+
6
+ def collect_file_stats(files: List[FileInfo]) -> Tuple[Dict[str, List[int]], Dict[str, int]]:
7
+ """Collect directory and file type statistics from files.
8
+
9
+ Args:
10
+ files: List of FileInfo objects to analyze
11
+
12
+ Returns:
13
+ Tuple containing:
14
+ - Dictionary of directory stats [count, size]
15
+ - Dictionary of file type counts
16
+ """
17
+ dir_counts = defaultdict(lambda: [0, 0]) # [count, size]
18
+ file_types = defaultdict(int)
19
+
20
+ for file_info in files:
21
+ path = Path(file_info.name)
22
+ dir_path = str(path.parent)
23
+ file_size = len(file_info.content.encode('utf-8'))
24
+
25
+ # Update directory stats
26
+ dir_counts[dir_path][0] += 1
27
+ dir_counts[dir_path][1] += file_size
28
+
29
+ # Update file type stats
30
+ file_types[path.suffix.lower() or 'no_ext'] += 1
31
+
32
+ return dir_counts, file_types
33
+
34
+ def _format_size(size_bytes: int) -> str:
35
+ """Format size in bytes to human readable format."""
36
+ size = size_bytes
37
+ for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
38
+ if size < 1024:
39
+ break
40
+ size //= 1024
41
+ return f"{size} {unit}"
42
+
43
+ # Remove _group_files_by_time function as it's now handled by Workset
@@ -0,0 +1,98 @@
1
+ from dataclasses import dataclass, field
2
+ from pathlib import Path
3
+ from typing import List, Dict, Set, Tuple
4
+ from sys import maxsize
5
+ from janito.config import config
6
+ from enum import auto, Enum
7
+
8
+ @dataclass
9
+ class FileInfo:
10
+ """Represents a file's basic information"""
11
+ name: str # Relative path from workspace root
12
+ content: str
13
+ seconds_ago: int = 0 # Seconds since last modification
14
+
15
+ class ScanType(Enum):
16
+ """Type of path scanning"""
17
+ PLAIN = auto()
18
+ RECURSIVE = auto()
19
+
20
+ @dataclass
21
+ class ScanPath:
22
+ """Represents a path to be scanned"""
23
+ path: Path
24
+ scan_type: ScanType = ScanType.PLAIN
25
+
26
+ @property
27
+ def is_recursive(self) -> bool:
28
+ return self.scan_type == ScanType.RECURSIVE
29
+
30
+ @dataclass
31
+ class WorksetContent:
32
+ """Represents workset content and statistics."""
33
+ files: List[FileInfo] = field(default_factory=list)
34
+ scanned_paths: Set[Path] = field(default_factory=set)
35
+ dir_counts: Dict[str, int] = field(default_factory=dict)
36
+ dir_sizes: Dict[str, int] = field(default_factory=dict)
37
+ file_types: Dict[str, int] = field(default_factory=dict)
38
+ scan_completed: bool = False
39
+ analyzed: bool = False
40
+
41
+ def clear(self) -> None:
42
+ """Reset all content"""
43
+ self.files = []
44
+ self.scanned_paths = set()
45
+ self.dir_counts = {}
46
+ self.dir_sizes = {}
47
+ self.file_types = {}
48
+ self.scan_completed = False
49
+ self.analyzed = False
50
+
51
+ def add_file(self, file_info: FileInfo) -> None:
52
+ """Add a file to the content and update statistics"""
53
+ self.files.append(file_info)
54
+
55
+ # Update file type stats
56
+ suffix = Path(file_info.name).suffix.lower() or 'no_ext'
57
+ self.file_types[suffix] = self.file_types.get(suffix, 0) + 1
58
+
59
+ # Update directory stats
60
+ dir_path = str(Path(file_info.name).parent)
61
+ self.dir_counts[dir_path] = self.dir_counts.get(dir_path, 0) + 1
62
+ self.dir_sizes[dir_path] = self.dir_sizes.get(dir_path, 0) + len(file_info.content.encode('utf-8'))
63
+
64
+ def get_file_info(self, time_ranges: List[int] = None) -> Tuple[List[FileInfo], List[FileInfo], List[FileInfo], List[FileInfo]]:
65
+ """Get file information grouped into 4 blocks based on modification time ranges."""
66
+ if not time_ranges:
67
+ time_ranges = [300, 3600, 86400, maxsize] # 5min, 1h, 24h, rest
68
+ else:
69
+ time_ranges = [int(x) for x in time_ranges[:3]] + [maxsize]
70
+ if len(time_ranges) < 4:
71
+ time_ranges.extend([maxsize] * (4 - len(time_ranges)))
72
+ time_ranges.sort()
73
+
74
+ blocks = [[] for _ in range(4)]
75
+
76
+ def get_range_index(seconds: int) -> int:
77
+ for i, threshold in enumerate(time_ranges):
78
+ if seconds <= threshold:
79
+ return i
80
+ return len(time_ranges) - 1
81
+
82
+ # Sort and group files by modification time
83
+ sorted_files = sorted(self.files, key=lambda f: f.seconds_ago)
84
+ for file_info in sorted_files:
85
+ block_idx = get_range_index(file_info.seconds_ago)
86
+ blocks[block_idx].append(file_info)
87
+
88
+ return tuple(blocks)
89
+
90
+ @property
91
+ def content_size(self) -> int:
92
+ """Get total content size in bytes"""
93
+ return sum(len(f.content.encode('utf-8')) for f in self.files)
94
+
95
+ @property
96
+ def file_count(self) -> int:
97
+ """Get total number of files"""
98
+ return len(self.files)
@@ -0,0 +1,108 @@
1
+ from pathlib import Path
2
+ from typing import List, Set, Tuple
3
+ from .show import show_workset_analysis
4
+ from rich.console import Console
5
+ from janito.config import config
6
+ from .types import WorksetContent, FileInfo, ScanPath, ScanType
7
+ from .workspace import Workspace
8
+
9
+ class PathNotRelativeError(Exception):
10
+ """Raised when a path is not relative."""
11
+ pass
12
+
13
+ class Workset:
14
+ _instance = None
15
+
16
+ def __new__(cls):
17
+ if cls._instance is None:
18
+ cls._instance = super().__new__(cls)
19
+ cls._instance._init()
20
+ return cls._instance
21
+
22
+ def _init(self):
23
+ self._scan_paths: List[ScanPath] = []
24
+ self._content = WorksetContent()
25
+ self._workspace = Workspace()
26
+ if not config.skip_work:
27
+ self._scan_paths.append(ScanPath(Path("."), ScanType.PLAIN))
28
+
29
+ def add_scan_path(self, path: Path, scan_type: ScanType) -> None:
30
+ """Add a path with specific scan type."""
31
+ if path.is_absolute():
32
+ raise PathNotRelativeError(f"Path must be relative: {path}")
33
+ self._scan_paths.append(ScanPath(path, scan_type))
34
+
35
+ if config.debug:
36
+ Console(stderr=True).print(
37
+ f"[cyan]Debug: Added {scan_type.name.lower()} scan path: {path}[/cyan]"
38
+ )
39
+
40
+ def refresh(self) -> None:
41
+ """Refresh content by scanning configured paths"""
42
+ self.clear()
43
+ paths = self.get_scan_paths()
44
+
45
+ if config.debug:
46
+ Console(stderr=True).print(f"[cyan]Debug: Refreshing workset with paths: {paths}[/cyan]")
47
+
48
+ self._workspace.scan_files(paths, self.get_recursive_paths())
49
+ self._content = self._workspace.content
50
+
51
+ def get_scan_paths(self) -> List[Path]:
52
+ """Get effective scan paths based on configuration"""
53
+ paths = set()
54
+ paths.update(p.path for p in self._scan_paths)
55
+ return sorted(paths)
56
+
57
+ def get_recursive_paths(self) -> Set[Path]:
58
+ """Get paths that should be scanned recursively"""
59
+ return {p.path for p in self._scan_paths if p.is_recursive}
60
+
61
+ def is_path_recursive(self, path: Path) -> bool:
62
+ """Check if a path is configured for recursive scanning"""
63
+ return any(scan_path.is_recursive and scan_path.path == path
64
+ for scan_path in self._scan_paths)
65
+
66
+ @property
67
+ def paths(self) -> Set[Path]:
68
+ return {p.path for p in self._scan_paths}
69
+
70
+ @property
71
+ def recursive_paths(self) -> Set[Path]:
72
+ return self.get_recursive_paths()
73
+
74
+ def clear(self) -> None:
75
+ """Clear workspace settings while maintaining current directory in scan paths"""
76
+ self._content = WorksetContent()
77
+
78
+ def show(self) -> None:
79
+ """Display analysis of current workset content."""
80
+ show_workset_analysis(
81
+ files=self._content.files,
82
+ scan_paths=self._scan_paths,
83
+ cache_blocks=self.get_cache_blocks()
84
+ )
85
+
86
+ def get_cache_blocks(self) -> Tuple[List[FileInfo], List[FileInfo], List[FileInfo], List[FileInfo]]:
87
+ """Get files grouped into time-based cache blocks.
88
+
89
+ Returns:
90
+ Tuple of 4 lists containing FileInfo objects:
91
+ - Last 5 minutes
92
+ - Last hour
93
+ - Last 24 hours
94
+ - Older files
95
+ """
96
+ time_ranges = [300, 3600, 86400] # 5min, 1h, 24h
97
+ blocks: List[List[FileInfo]] = [[] for _ in range(4)]
98
+
99
+ for file_info in sorted(self._content.files, key=lambda f: f.seconds_ago):
100
+ # Will return 3 if file is older than all thresholds
101
+ block_idx = next((i for i, threshold in enumerate(time_ranges)
102
+ if file_info.seconds_ago <= threshold), 3)
103
+ blocks[block_idx].append(file_info)
104
+
105
+ return tuple(blocks)
106
+
107
+ # Create and export singleton instance at module level
108
+ workset = Workset()
@@ -0,0 +1,114 @@
1
+ from pathlib import Path
2
+ from typing import List, Set, Dict, Optional, Tuple
3
+ import time
4
+ from rich.console import Console
5
+ from janito.config import config
6
+ from .types import WorksetContent, FileInfo, ScanPath # Add ScanPath import
7
+
8
+ class PathNotRelativeError(Exception):
9
+ """Raised when a path is not relative."""
10
+ pass
11
+
12
+ class Workspace:
13
+ """Handles workspace scanning and content management."""
14
+
15
+ _instance = None
16
+
17
+ def __new__(cls):
18
+ if cls._instance is None:
19
+ cls._instance = super().__new__(cls)
20
+ cls._instance._initialized = False
21
+ return cls._instance
22
+
23
+ def __init__(self):
24
+ if not self._initialized:
25
+ self._content = WorksetContent()
26
+ self._initialized = True
27
+
28
+ def scan_files(self, paths: List[Path], recursive_paths: Set[Path]) -> None:
29
+ """Scan files from given paths and update content.
30
+
31
+ Args:
32
+ paths: List of paths to scan
33
+ recursive_paths: Set of paths to scan recursively
34
+ """
35
+ for path in paths:
36
+ if path.is_absolute():
37
+ raise PathNotRelativeError(f"Path must be relative: {path}")
38
+
39
+ scan_time = time.time()
40
+
41
+ if config.debug:
42
+ console = Console(stderr=True)
43
+ console.print(f"\n[cyan]Debug: Starting scan of {len(paths)} paths[/cyan]")
44
+
45
+ processed_files: Set[Path] = set()
46
+ for path in paths:
47
+ abs_path = config.workspace_dir / path
48
+ if not (config.skip_work and path == Path(".")):
49
+ self._scan_path(abs_path, processed_files, scan_time, recursive_paths)
50
+
51
+ self._content.scan_completed = True
52
+ self._content.analyzed = False
53
+ self._content.scanned_paths = set(paths)
54
+
55
+ def _scan_path(self, path: Path, processed_files: Set[Path], scan_time: float,
56
+ recursive_paths: Set[Path]) -> None:
57
+ """Scan a single path and process its contents."""
58
+ if path in processed_files:
59
+ return
60
+
61
+ # Convert recursive_paths to absolute for comparison
62
+ abs_recursive_paths = {config.workspace_dir / p for p in recursive_paths}
63
+
64
+ path = path.resolve()
65
+ processed_files.add(path)
66
+
67
+ if path.is_dir():
68
+ try:
69
+ for item in path.iterdir():
70
+ if item.name.startswith(('.', '__pycache__')):
71
+ continue
72
+ if path in abs_recursive_paths:
73
+ self._scan_path(item, processed_files, scan_time, recursive_paths)
74
+ elif item.is_file():
75
+ self._scan_path(item, processed_files, scan_time, recursive_paths)
76
+ except PermissionError:
77
+ if config.debug:
78
+ Console(stderr=True).print(f"[red]Debug: Permission denied: {path}[/red]")
79
+ elif path.is_file():
80
+ self._process_file(path, scan_time)
81
+
82
+ def _process_file(self, path: Path, scan_time: float) -> None:
83
+ """Process a single file and add it to the content."""
84
+ try:
85
+ if path.suffix.lower() in {'.py', '.md', '.txt', '.json', '.yaml', '.yml', '.toml'} or not path.suffix:
86
+ content = path.read_text(encoding='utf-8')
87
+ rel_path = path.relative_to(config.workspace_dir)
88
+ seconds_ago = int(scan_time - path.stat().st_mtime)
89
+
90
+ file_info = FileInfo(
91
+ name=str(rel_path),
92
+ content=content,
93
+ seconds_ago=seconds_ago
94
+ )
95
+ self._content.add_file(file_info)
96
+
97
+ if config.debug:
98
+ Console(stderr=True).print(f"[cyan]Debug: Added file: {rel_path}[/cyan]")
99
+ except (UnicodeDecodeError, PermissionError) as e:
100
+ if config.debug:
101
+ Console(stderr=True).print(f"[red]Debug: Error reading file {path}: {str(e)}[/red]")
102
+
103
+ def get_file_info(self, time_ranges: Optional[List[int]] = None) -> Tuple[List[FileInfo], List[FileInfo], List[FileInfo], List[FileInfo]]:
104
+ """Get file information grouped by modification time."""
105
+ return self._content.get_file_info(time_ranges)
106
+
107
+ def clear(self) -> None:
108
+ """Clear all workspace content and settings."""
109
+ self._content = WorksetContent()
110
+
111
+ @property
112
+ def content(self) -> WorksetContent:
113
+ """Get the workspace content."""
114
+ return self._content