janito 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- janito/__init__.py +0 -47
- janito/__main__.py +105 -17
- janito/agents/__init__.py +9 -9
- janito/agents/agent.py +10 -3
- janito/agents/claudeai.py +15 -34
- janito/agents/openai.py +5 -1
- janito/change/__init__.py +29 -16
- janito/change/__main__.py +0 -0
- janito/{analysis → change/analysis}/__init__.py +5 -15
- janito/change/analysis/__main__.py +7 -0
- janito/change/analysis/analyze.py +62 -0
- janito/change/analysis/formatting.py +78 -0
- janito/change/analysis/options.py +81 -0
- janito/{analysis → change/analysis}/prompts.py +33 -18
- janito/change/analysis/view/__init__.py +9 -0
- janito/change/analysis/view/terminal.py +181 -0
- janito/change/applier/__init__.py +5 -0
- janito/change/applier/file.py +58 -0
- janito/change/applier/main.py +156 -0
- janito/change/applier/text.py +247 -0
- janito/change/applier/workspace_dir.py +58 -0
- janito/change/core.py +124 -0
- janito/{changehistory.py → change/history.py} +12 -14
- janito/change/operations.py +7 -0
- janito/change/parser.py +287 -0
- janito/change/play.py +54 -0
- janito/change/preview.py +82 -0
- janito/change/prompts.py +121 -0
- janito/change/test.py +0 -0
- janito/change/validator.py +269 -0
- janito/{changeviewer → change/viewer}/__init__.py +3 -4
- janito/change/viewer/content.py +66 -0
- janito/{changeviewer → change/viewer}/diff.py +19 -4
- janito/change/viewer/panels.py +533 -0
- janito/change/viewer/styling.py +114 -0
- janito/{changeviewer → change/viewer}/themes.py +3 -5
- janito/clear_statement_parser/clear_statement_format.txt +328 -0
- janito/clear_statement_parser/examples.txt +326 -0
- janito/clear_statement_parser/models.py +104 -0
- janito/clear_statement_parser/parser.py +496 -0
- janito/cli/base.py +30 -0
- janito/cli/commands.py +75 -40
- janito/cli/functions.py +19 -194
- janito/cli/history.py +61 -0
- janito/common.py +65 -8
- janito/config.py +70 -5
- janito/demo/__init__.py +4 -0
- janito/demo/data.py +13 -0
- janito/demo/mock_data.py +20 -0
- janito/demo/operations.py +45 -0
- janito/demo/runner.py +59 -0
- janito/demo/scenarios.py +32 -0
- janito/prompt.py +36 -0
- janito/qa.py +6 -14
- janito/search_replace/README.md +192 -0
- janito/search_replace/__init__.py +7 -0
- janito/search_replace/__main__.py +21 -0
- janito/search_replace/core.py +120 -0
- janito/search_replace/logger.py +35 -0
- janito/search_replace/parser.py +52 -0
- janito/search_replace/play.py +61 -0
- janito/search_replace/replacer.py +36 -0
- janito/search_replace/searcher.py +411 -0
- janito/search_replace/strategy_result.py +10 -0
- janito/shell/__init__.py +38 -0
- janito/shell/bus.py +31 -0
- janito/shell/commands.py +136 -0
- janito/shell/history.py +20 -0
- janito/shell/processor.py +32 -0
- janito/shell/prompt.py +48 -0
- janito/shell/registry.py +60 -0
- janito/tui/__init__.py +21 -0
- janito/tui/base.py +22 -0
- janito/tui/flows/__init__.py +5 -0
- janito/tui/flows/changes.py +65 -0
- janito/tui/flows/content.py +128 -0
- janito/tui/flows/selection.py +117 -0
- janito/tui/screens/__init__.py +3 -0
- janito/tui/screens/app.py +1 -0
- janito/workspace/__init__.py +6 -0
- janito/workspace/analysis.py +121 -0
- janito/workspace/show.py +141 -0
- janito/workspace/stats.py +43 -0
- janito/workspace/types.py +98 -0
- janito/workspace/workset.py +108 -0
- janito/workspace/workspace.py +114 -0
- janito-0.7.0.dist-info/METADATA +167 -0
- janito-0.7.0.dist-info/RECORD +96 -0
- {janito-0.5.0.dist-info → janito-0.7.0.dist-info}/WHEEL +1 -1
- janito/_contextparser.py +0 -113
- janito/analysis/display.py +0 -149
- janito/analysis/options.py +0 -112
- janito/change/applier.py +0 -269
- janito/change/content.py +0 -62
- janito/change/indentation.py +0 -33
- janito/change/position.py +0 -169
- janito/changeviewer/panels.py +0 -268
- janito/changeviewer/styling.py +0 -59
- janito/console/__init__.py +0 -3
- janito/console/commands.py +0 -112
- janito/console/core.py +0 -62
- janito/console/display.py +0 -157
- janito/fileparser.py +0 -334
- janito/prompts.py +0 -81
- janito/scan.py +0 -176
- janito/tests/test_fileparser.py +0 -26
- janito-0.5.0.dist-info/METADATA +0 -146
- janito-0.5.0.dist-info/RECORD +0 -45
- {janito-0.5.0.dist-info → janito-0.7.0.dist-info}/entry_points.txt +0 -0
- {janito-0.5.0.dist-info → janito-0.7.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,121 @@
|
|
1
|
+
from collections import defaultdict
|
2
|
+
from pathlib import Path
|
3
|
+
from typing import Dict, List
|
4
|
+
|
5
|
+
from rich.columns import Columns
|
6
|
+
from rich.console import Console, Group
|
7
|
+
from rich.panel import Panel
|
8
|
+
from rich.rule import Rule
|
9
|
+
from janito.config import config
|
10
|
+
|
11
|
+
def analyze_workspace_content(content: str) -> None:
|
12
|
+
"""Show statistics about the scanned content"""
|
13
|
+
if not content:
|
14
|
+
return
|
15
|
+
|
16
|
+
# Collect include paths
|
17
|
+
paths = []
|
18
|
+
if config.include:
|
19
|
+
for path in config.include:
|
20
|
+
is_recursive = path in config.recursive
|
21
|
+
path_str = str(path.relative_to(config.workspace_dir))
|
22
|
+
paths.append(f"{path_str}/*" if is_recursive else f"{path_str}/")
|
23
|
+
else:
|
24
|
+
# Use workspace_dir as fallback when no include paths specified
|
25
|
+
paths.append("./")
|
26
|
+
|
27
|
+
console = Console()
|
28
|
+
|
29
|
+
dir_counts: Dict[str, int] = defaultdict(int)
|
30
|
+
dir_sizes: Dict[str, int] = defaultdict(int)
|
31
|
+
file_types: Dict[str, int] = defaultdict(int)
|
32
|
+
current_path = None
|
33
|
+
current_content = []
|
34
|
+
|
35
|
+
for line in content.split('\n'):
|
36
|
+
if line.startswith('<path>'):
|
37
|
+
path = Path(line.replace('<path>', '').replace('</path>', '').strip())
|
38
|
+
current_path = str(path.parent)
|
39
|
+
dir_counts[current_path] += 1
|
40
|
+
file_types[path.suffix.lower() or 'no_ext'] += 1
|
41
|
+
elif line.startswith('<content>'):
|
42
|
+
current_content = []
|
43
|
+
elif line.startswith('</content>'):
|
44
|
+
content_size = sum(len(line.encode('utf-8')) for line in current_content)
|
45
|
+
if current_path:
|
46
|
+
dir_sizes[current_path] += content_size
|
47
|
+
current_content = []
|
48
|
+
elif current_content is not None:
|
49
|
+
current_content.append(line)
|
50
|
+
|
51
|
+
console = Console()
|
52
|
+
|
53
|
+
# Directory statistics
|
54
|
+
dir_stats = [
|
55
|
+
f"📁 {directory}/ [{count} file(s), {_format_size(size)}]"
|
56
|
+
for directory, (count, size) in (
|
57
|
+
(d, (dir_counts[d], dir_sizes[d]))
|
58
|
+
for d in sorted(dir_counts.keys())
|
59
|
+
)
|
60
|
+
]
|
61
|
+
|
62
|
+
# File type statistics
|
63
|
+
type_stats = [
|
64
|
+
f"📄 .{ext.lstrip('.')} [{count} file(s)]" if ext != 'no_ext' else f"📄 {ext} [{count} file(s)]"
|
65
|
+
for ext, count in sorted(file_types.items())
|
66
|
+
]
|
67
|
+
|
68
|
+
# Create grouped content with styled separators
|
69
|
+
content_sections = []
|
70
|
+
|
71
|
+
if paths:
|
72
|
+
# Group paths with their stats
|
73
|
+
path_stats = []
|
74
|
+
for path in sorted(set(paths)):
|
75
|
+
base_path = Path(path.rstrip("/*"))
|
76
|
+
total_files = sum(1 for d, count in dir_counts.items()
|
77
|
+
if Path(d).is_relative_to(base_path))
|
78
|
+
total_size = sum(size for d, size in dir_sizes.items()
|
79
|
+
if Path(d).is_relative_to(base_path))
|
80
|
+
path_stats.append(f"{path} [{total_files} file(s), {_format_size(total_size)}]")
|
81
|
+
|
82
|
+
content_sections.extend([
|
83
|
+
"[bold yellow]📌 Included Paths[/bold yellow]",
|
84
|
+
Rule(style="yellow"),
|
85
|
+
Columns(path_stats, equal=True, expand=True),
|
86
|
+
"\n"
|
87
|
+
])
|
88
|
+
|
89
|
+
# Add directory structure section only in verbose mode
|
90
|
+
if config.verbose:
|
91
|
+
content_sections.extend([
|
92
|
+
"[bold magenta]📂 Directory Structure[/bold magenta]",
|
93
|
+
Rule(style="magenta"),
|
94
|
+
Columns(dir_stats, equal=True, expand=True),
|
95
|
+
"\n"
|
96
|
+
])
|
97
|
+
|
98
|
+
# Always show file types section
|
99
|
+
content_sections.extend([
|
100
|
+
"[bold cyan]📑 File Types[/bold cyan]",
|
101
|
+
Rule(style="cyan"),
|
102
|
+
Columns(type_stats, equal=True, expand=True)
|
103
|
+
])
|
104
|
+
|
105
|
+
content = Group(*content_sections)
|
106
|
+
|
107
|
+
# Display workset analysis in panel
|
108
|
+
console.print("\n")
|
109
|
+
console.print(Panel(
|
110
|
+
content,
|
111
|
+
title="[bold blue]Workset Analysis[/bold blue]",
|
112
|
+
title_align="center"
|
113
|
+
))
|
114
|
+
|
115
|
+
def _format_size(size_bytes: int) -> str:
|
116
|
+
"""Format size in bytes to human readable format"""
|
117
|
+
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
118
|
+
if size_bytes < 1024.0:
|
119
|
+
break
|
120
|
+
size_bytes /= 1024.0
|
121
|
+
return f"{size_bytes:.1f} {unit}"
|
janito/workspace/show.py
ADDED
@@ -0,0 +1,141 @@
|
|
1
|
+
from rich.traceback import install
|
2
|
+
install(show_locals=False)
|
3
|
+
|
4
|
+
from pathlib import Path
|
5
|
+
from typing import List, Set
|
6
|
+
from rich.columns import Columns
|
7
|
+
from rich.console import Console, Group
|
8
|
+
from rich.panel import Panel
|
9
|
+
from rich.rule import Rule
|
10
|
+
from rich.text import Text
|
11
|
+
from janito.config import config
|
12
|
+
from .types import FileInfo, ScanPath
|
13
|
+
from .stats import collect_file_stats, _format_size
|
14
|
+
|
15
|
+
|
16
|
+
def show_workset_analysis(
|
17
|
+
files: List[FileInfo],
|
18
|
+
scan_paths: List[ScanPath],
|
19
|
+
cache_blocks: List[List[FileInfo]] = None
|
20
|
+
) -> None:
|
21
|
+
"""Display analysis of workspace content and configuration."""
|
22
|
+
|
23
|
+
console = Console()
|
24
|
+
content_sections = []
|
25
|
+
|
26
|
+
# Get statistics
|
27
|
+
dir_counts, file_types = collect_file_stats(files)
|
28
|
+
|
29
|
+
# Calculate path stats using relative paths
|
30
|
+
paths_stats = []
|
31
|
+
total_files = 0
|
32
|
+
total_size = 0
|
33
|
+
|
34
|
+
|
35
|
+
# Process all paths uniformly
|
36
|
+
for scan_path in sorted(scan_paths, key=lambda p: p.path):
|
37
|
+
|
38
|
+
path = scan_path.path
|
39
|
+
is_recursive = scan_path.is_recursive
|
40
|
+
path_str = str(path)
|
41
|
+
|
42
|
+
# Calculate stats based on scan type
|
43
|
+
if is_recursive:
|
44
|
+
path_files = sum(count for d, [count, _] in dir_counts.items()
|
45
|
+
if Path(d) == path or Path(d).is_relative_to(path))
|
46
|
+
path_size = sum(size for d, [_, size] in dir_counts.items()
|
47
|
+
if Path(d) == path or Path(d).is_relative_to(path))
|
48
|
+
else:
|
49
|
+
path_files = dir_counts.get(path_str, [0, 0])[0]
|
50
|
+
path_size = dir_counts.get(path_str, [0, 0])[1]
|
51
|
+
|
52
|
+
total_files += path_files
|
53
|
+
total_size += path_size
|
54
|
+
|
55
|
+
paths_stats.append(
|
56
|
+
f"[bold cyan]{path}[/bold cyan]"
|
57
|
+
f"[yellow]{'/**' if is_recursive else '/'}[/yellow] "
|
58
|
+
f"[[green]{path_files}[/green] "
|
59
|
+
f"{'total ' if is_recursive else ''}file(s), "
|
60
|
+
f"[blue]{_format_size(path_size)}[/blue]]"
|
61
|
+
)
|
62
|
+
|
63
|
+
# Build sections - Show paths first
|
64
|
+
if paths_stats or current_dir_stats:
|
65
|
+
content_sections.extend([
|
66
|
+
"[bold yellow]📌 Included Paths[/bold yellow]",
|
67
|
+
Rule(style="yellow"),
|
68
|
+
])
|
69
|
+
|
70
|
+
# All paths are now handled in the main loop
|
71
|
+
|
72
|
+
content_sections.append(
|
73
|
+
Text(" | ").join(Text.from_markup(path) for path in paths_stats)
|
74
|
+
)
|
75
|
+
|
76
|
+
# Add total summary if there are multiple paths
|
77
|
+
if len(paths_stats) > 1:
|
78
|
+
content_sections.extend([
|
79
|
+
"", # Empty line for spacing
|
80
|
+
f"[bold yellow]Total:[/bold yellow] [green]{total_files}[/green] files, "
|
81
|
+
f"[blue]{_format_size(total_size)}[/blue]"
|
82
|
+
])
|
83
|
+
content_sections.append("\n")
|
84
|
+
|
85
|
+
# Then show directory structure if verbose
|
86
|
+
if config.verbose:
|
87
|
+
dir_stats = [
|
88
|
+
f"📁 {directory}/ [{count} file(s), {_format_size(size)}]"
|
89
|
+
for directory, (count, size) in sorted(dir_counts.items())
|
90
|
+
]
|
91
|
+
content_sections.extend([
|
92
|
+
"[bold magenta]📂 Directory Structure[/bold magenta]",
|
93
|
+
Rule(style="magenta"),
|
94
|
+
Columns(dir_stats, equal=True, expand=True),
|
95
|
+
"\n"
|
96
|
+
])
|
97
|
+
|
98
|
+
type_stats = [
|
99
|
+
f"[bold cyan].{ext.lstrip('.')}[/bold cyan] [[green]{count}[/green] file(s)]"
|
100
|
+
if ext != 'no_ext'
|
101
|
+
else f"[bold cyan]no ext[/bold cyan] [[green]{count}[/green] file(s)]"
|
102
|
+
for ext, count in sorted(file_types.items())
|
103
|
+
]
|
104
|
+
content_sections.extend([
|
105
|
+
"[bold cyan]📑 File Types[/bold cyan]",
|
106
|
+
Rule(style="cyan"),
|
107
|
+
Text(" | ").join(Text.from_markup(stat) for stat in type_stats)
|
108
|
+
])
|
109
|
+
|
110
|
+
# Finally show cache blocks if in debug mode
|
111
|
+
if config.debug and cache_blocks:
|
112
|
+
blocks = cache_blocks
|
113
|
+
if any(blocks):
|
114
|
+
content_sections.extend([
|
115
|
+
"\n",
|
116
|
+
"[bold blue]🕒 Cache Blocks[/bold blue]",
|
117
|
+
Rule(style="blue"),
|
118
|
+
])
|
119
|
+
|
120
|
+
block_names = ["Last 5 minutes", "Last hour", "Last 24 hours", "Older"]
|
121
|
+
for name, block in zip(block_names, blocks):
|
122
|
+
if block: # Only show non-empty blocks
|
123
|
+
content_sections.extend([
|
124
|
+
f"\n[bold]{name}[/bold] ({len(block)} files):",
|
125
|
+
Columns([
|
126
|
+
Text.assemble(
|
127
|
+
f"{f.name} - ",
|
128
|
+
(f"{f.content.splitlines()[0][:50]}...", "dim")
|
129
|
+
)
|
130
|
+
for f in block[:5] # Show first 5 files only
|
131
|
+
], padding=(0, 2)),
|
132
|
+
"" if block == blocks[-1] else Rule(style="dim")
|
133
|
+
])
|
134
|
+
|
135
|
+
# Display analysis
|
136
|
+
console.print("\n")
|
137
|
+
console.print(Panel(
|
138
|
+
Group(*content_sections),
|
139
|
+
title="[bold blue]Workset Analysis[/bold blue]",
|
140
|
+
title_align="center"
|
141
|
+
))
|
@@ -0,0 +1,43 @@
|
|
1
|
+
from collections import defaultdict
|
2
|
+
from pathlib import Path
|
3
|
+
from typing import List, Dict, Tuple
|
4
|
+
from .types import FileInfo
|
5
|
+
|
6
|
+
def collect_file_stats(files: List[FileInfo]) -> Tuple[Dict[str, List[int]], Dict[str, int]]:
|
7
|
+
"""Collect directory and file type statistics from files.
|
8
|
+
|
9
|
+
Args:
|
10
|
+
files: List of FileInfo objects to analyze
|
11
|
+
|
12
|
+
Returns:
|
13
|
+
Tuple containing:
|
14
|
+
- Dictionary of directory stats [count, size]
|
15
|
+
- Dictionary of file type counts
|
16
|
+
"""
|
17
|
+
dir_counts = defaultdict(lambda: [0, 0]) # [count, size]
|
18
|
+
file_types = defaultdict(int)
|
19
|
+
|
20
|
+
for file_info in files:
|
21
|
+
path = Path(file_info.name)
|
22
|
+
dir_path = str(path.parent)
|
23
|
+
file_size = len(file_info.content.encode('utf-8'))
|
24
|
+
|
25
|
+
# Update directory stats
|
26
|
+
dir_counts[dir_path][0] += 1
|
27
|
+
dir_counts[dir_path][1] += file_size
|
28
|
+
|
29
|
+
# Update file type stats
|
30
|
+
file_types[path.suffix.lower() or 'no_ext'] += 1
|
31
|
+
|
32
|
+
return dir_counts, file_types
|
33
|
+
|
34
|
+
def _format_size(size_bytes: int) -> str:
|
35
|
+
"""Format size in bytes to human readable format."""
|
36
|
+
size = size_bytes
|
37
|
+
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
38
|
+
if size < 1024:
|
39
|
+
break
|
40
|
+
size //= 1024
|
41
|
+
return f"{size} {unit}"
|
42
|
+
|
43
|
+
# Remove _group_files_by_time function as it's now handled by Workset
|
@@ -0,0 +1,98 @@
|
|
1
|
+
from dataclasses import dataclass, field
|
2
|
+
from pathlib import Path
|
3
|
+
from typing import List, Dict, Set, Tuple
|
4
|
+
from sys import maxsize
|
5
|
+
from janito.config import config
|
6
|
+
from enum import auto, Enum
|
7
|
+
|
8
|
+
@dataclass
|
9
|
+
class FileInfo:
|
10
|
+
"""Represents a file's basic information"""
|
11
|
+
name: str # Relative path from workspace root
|
12
|
+
content: str
|
13
|
+
seconds_ago: int = 0 # Seconds since last modification
|
14
|
+
|
15
|
+
class ScanType(Enum):
|
16
|
+
"""Type of path scanning"""
|
17
|
+
PLAIN = auto()
|
18
|
+
RECURSIVE = auto()
|
19
|
+
|
20
|
+
@dataclass
|
21
|
+
class ScanPath:
|
22
|
+
"""Represents a path to be scanned"""
|
23
|
+
path: Path
|
24
|
+
scan_type: ScanType = ScanType.PLAIN
|
25
|
+
|
26
|
+
@property
|
27
|
+
def is_recursive(self) -> bool:
|
28
|
+
return self.scan_type == ScanType.RECURSIVE
|
29
|
+
|
30
|
+
@dataclass
|
31
|
+
class WorksetContent:
|
32
|
+
"""Represents workset content and statistics."""
|
33
|
+
files: List[FileInfo] = field(default_factory=list)
|
34
|
+
scanned_paths: Set[Path] = field(default_factory=set)
|
35
|
+
dir_counts: Dict[str, int] = field(default_factory=dict)
|
36
|
+
dir_sizes: Dict[str, int] = field(default_factory=dict)
|
37
|
+
file_types: Dict[str, int] = field(default_factory=dict)
|
38
|
+
scan_completed: bool = False
|
39
|
+
analyzed: bool = False
|
40
|
+
|
41
|
+
def clear(self) -> None:
|
42
|
+
"""Reset all content"""
|
43
|
+
self.files = []
|
44
|
+
self.scanned_paths = set()
|
45
|
+
self.dir_counts = {}
|
46
|
+
self.dir_sizes = {}
|
47
|
+
self.file_types = {}
|
48
|
+
self.scan_completed = False
|
49
|
+
self.analyzed = False
|
50
|
+
|
51
|
+
def add_file(self, file_info: FileInfo) -> None:
|
52
|
+
"""Add a file to the content and update statistics"""
|
53
|
+
self.files.append(file_info)
|
54
|
+
|
55
|
+
# Update file type stats
|
56
|
+
suffix = Path(file_info.name).suffix.lower() or 'no_ext'
|
57
|
+
self.file_types[suffix] = self.file_types.get(suffix, 0) + 1
|
58
|
+
|
59
|
+
# Update directory stats
|
60
|
+
dir_path = str(Path(file_info.name).parent)
|
61
|
+
self.dir_counts[dir_path] = self.dir_counts.get(dir_path, 0) + 1
|
62
|
+
self.dir_sizes[dir_path] = self.dir_sizes.get(dir_path, 0) + len(file_info.content.encode('utf-8'))
|
63
|
+
|
64
|
+
def get_file_info(self, time_ranges: List[int] = None) -> Tuple[List[FileInfo], List[FileInfo], List[FileInfo], List[FileInfo]]:
|
65
|
+
"""Get file information grouped into 4 blocks based on modification time ranges."""
|
66
|
+
if not time_ranges:
|
67
|
+
time_ranges = [300, 3600, 86400, maxsize] # 5min, 1h, 24h, rest
|
68
|
+
else:
|
69
|
+
time_ranges = [int(x) for x in time_ranges[:3]] + [maxsize]
|
70
|
+
if len(time_ranges) < 4:
|
71
|
+
time_ranges.extend([maxsize] * (4 - len(time_ranges)))
|
72
|
+
time_ranges.sort()
|
73
|
+
|
74
|
+
blocks = [[] for _ in range(4)]
|
75
|
+
|
76
|
+
def get_range_index(seconds: int) -> int:
|
77
|
+
for i, threshold in enumerate(time_ranges):
|
78
|
+
if seconds <= threshold:
|
79
|
+
return i
|
80
|
+
return len(time_ranges) - 1
|
81
|
+
|
82
|
+
# Sort and group files by modification time
|
83
|
+
sorted_files = sorted(self.files, key=lambda f: f.seconds_ago)
|
84
|
+
for file_info in sorted_files:
|
85
|
+
block_idx = get_range_index(file_info.seconds_ago)
|
86
|
+
blocks[block_idx].append(file_info)
|
87
|
+
|
88
|
+
return tuple(blocks)
|
89
|
+
|
90
|
+
@property
|
91
|
+
def content_size(self) -> int:
|
92
|
+
"""Get total content size in bytes"""
|
93
|
+
return sum(len(f.content.encode('utf-8')) for f in self.files)
|
94
|
+
|
95
|
+
@property
|
96
|
+
def file_count(self) -> int:
|
97
|
+
"""Get total number of files"""
|
98
|
+
return len(self.files)
|
@@ -0,0 +1,108 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import List, Set, Tuple
|
3
|
+
from .show import show_workset_analysis
|
4
|
+
from rich.console import Console
|
5
|
+
from janito.config import config
|
6
|
+
from .types import WorksetContent, FileInfo, ScanPath, ScanType
|
7
|
+
from .workspace import Workspace
|
8
|
+
|
9
|
+
class PathNotRelativeError(Exception):
|
10
|
+
"""Raised when a path is not relative."""
|
11
|
+
pass
|
12
|
+
|
13
|
+
class Workset:
|
14
|
+
_instance = None
|
15
|
+
|
16
|
+
def __new__(cls):
|
17
|
+
if cls._instance is None:
|
18
|
+
cls._instance = super().__new__(cls)
|
19
|
+
cls._instance._init()
|
20
|
+
return cls._instance
|
21
|
+
|
22
|
+
def _init(self):
|
23
|
+
self._scan_paths: List[ScanPath] = []
|
24
|
+
self._content = WorksetContent()
|
25
|
+
self._workspace = Workspace()
|
26
|
+
if not config.skip_work:
|
27
|
+
self._scan_paths.append(ScanPath(Path("."), ScanType.PLAIN))
|
28
|
+
|
29
|
+
def add_scan_path(self, path: Path, scan_type: ScanType) -> None:
|
30
|
+
"""Add a path with specific scan type."""
|
31
|
+
if path.is_absolute():
|
32
|
+
raise PathNotRelativeError(f"Path must be relative: {path}")
|
33
|
+
self._scan_paths.append(ScanPath(path, scan_type))
|
34
|
+
|
35
|
+
if config.debug:
|
36
|
+
Console(stderr=True).print(
|
37
|
+
f"[cyan]Debug: Added {scan_type.name.lower()} scan path: {path}[/cyan]"
|
38
|
+
)
|
39
|
+
|
40
|
+
def refresh(self) -> None:
|
41
|
+
"""Refresh content by scanning configured paths"""
|
42
|
+
self.clear()
|
43
|
+
paths = self.get_scan_paths()
|
44
|
+
|
45
|
+
if config.debug:
|
46
|
+
Console(stderr=True).print(f"[cyan]Debug: Refreshing workset with paths: {paths}[/cyan]")
|
47
|
+
|
48
|
+
self._workspace.scan_files(paths, self.get_recursive_paths())
|
49
|
+
self._content = self._workspace.content
|
50
|
+
|
51
|
+
def get_scan_paths(self) -> List[Path]:
|
52
|
+
"""Get effective scan paths based on configuration"""
|
53
|
+
paths = set()
|
54
|
+
paths.update(p.path for p in self._scan_paths)
|
55
|
+
return sorted(paths)
|
56
|
+
|
57
|
+
def get_recursive_paths(self) -> Set[Path]:
|
58
|
+
"""Get paths that should be scanned recursively"""
|
59
|
+
return {p.path for p in self._scan_paths if p.is_recursive}
|
60
|
+
|
61
|
+
def is_path_recursive(self, path: Path) -> bool:
|
62
|
+
"""Check if a path is configured for recursive scanning"""
|
63
|
+
return any(scan_path.is_recursive and scan_path.path == path
|
64
|
+
for scan_path in self._scan_paths)
|
65
|
+
|
66
|
+
@property
|
67
|
+
def paths(self) -> Set[Path]:
|
68
|
+
return {p.path for p in self._scan_paths}
|
69
|
+
|
70
|
+
@property
|
71
|
+
def recursive_paths(self) -> Set[Path]:
|
72
|
+
return self.get_recursive_paths()
|
73
|
+
|
74
|
+
def clear(self) -> None:
|
75
|
+
"""Clear workspace settings while maintaining current directory in scan paths"""
|
76
|
+
self._content = WorksetContent()
|
77
|
+
|
78
|
+
def show(self) -> None:
|
79
|
+
"""Display analysis of current workset content."""
|
80
|
+
show_workset_analysis(
|
81
|
+
files=self._content.files,
|
82
|
+
scan_paths=self._scan_paths,
|
83
|
+
cache_blocks=self.get_cache_blocks()
|
84
|
+
)
|
85
|
+
|
86
|
+
def get_cache_blocks(self) -> Tuple[List[FileInfo], List[FileInfo], List[FileInfo], List[FileInfo]]:
|
87
|
+
"""Get files grouped into time-based cache blocks.
|
88
|
+
|
89
|
+
Returns:
|
90
|
+
Tuple of 4 lists containing FileInfo objects:
|
91
|
+
- Last 5 minutes
|
92
|
+
- Last hour
|
93
|
+
- Last 24 hours
|
94
|
+
- Older files
|
95
|
+
"""
|
96
|
+
time_ranges = [300, 3600, 86400] # 5min, 1h, 24h
|
97
|
+
blocks: List[List[FileInfo]] = [[] for _ in range(4)]
|
98
|
+
|
99
|
+
for file_info in sorted(self._content.files, key=lambda f: f.seconds_ago):
|
100
|
+
# Will return 3 if file is older than all thresholds
|
101
|
+
block_idx = next((i for i, threshold in enumerate(time_ranges)
|
102
|
+
if file_info.seconds_ago <= threshold), 3)
|
103
|
+
blocks[block_idx].append(file_info)
|
104
|
+
|
105
|
+
return tuple(blocks)
|
106
|
+
|
107
|
+
# Create and export singleton instance at module level
|
108
|
+
workset = Workset()
|
@@ -0,0 +1,114 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import List, Set, Dict, Optional, Tuple
|
3
|
+
import time
|
4
|
+
from rich.console import Console
|
5
|
+
from janito.config import config
|
6
|
+
from .types import WorksetContent, FileInfo, ScanPath # Add ScanPath import
|
7
|
+
|
8
|
+
class PathNotRelativeError(Exception):
|
9
|
+
"""Raised when a path is not relative."""
|
10
|
+
pass
|
11
|
+
|
12
|
+
class Workspace:
|
13
|
+
"""Handles workspace scanning and content management."""
|
14
|
+
|
15
|
+
_instance = None
|
16
|
+
|
17
|
+
def __new__(cls):
|
18
|
+
if cls._instance is None:
|
19
|
+
cls._instance = super().__new__(cls)
|
20
|
+
cls._instance._initialized = False
|
21
|
+
return cls._instance
|
22
|
+
|
23
|
+
def __init__(self):
|
24
|
+
if not self._initialized:
|
25
|
+
self._content = WorksetContent()
|
26
|
+
self._initialized = True
|
27
|
+
|
28
|
+
def scan_files(self, paths: List[Path], recursive_paths: Set[Path]) -> None:
|
29
|
+
"""Scan files from given paths and update content.
|
30
|
+
|
31
|
+
Args:
|
32
|
+
paths: List of paths to scan
|
33
|
+
recursive_paths: Set of paths to scan recursively
|
34
|
+
"""
|
35
|
+
for path in paths:
|
36
|
+
if path.is_absolute():
|
37
|
+
raise PathNotRelativeError(f"Path must be relative: {path}")
|
38
|
+
|
39
|
+
scan_time = time.time()
|
40
|
+
|
41
|
+
if config.debug:
|
42
|
+
console = Console(stderr=True)
|
43
|
+
console.print(f"\n[cyan]Debug: Starting scan of {len(paths)} paths[/cyan]")
|
44
|
+
|
45
|
+
processed_files: Set[Path] = set()
|
46
|
+
for path in paths:
|
47
|
+
abs_path = config.workspace_dir / path
|
48
|
+
if not (config.skip_work and path == Path(".")):
|
49
|
+
self._scan_path(abs_path, processed_files, scan_time, recursive_paths)
|
50
|
+
|
51
|
+
self._content.scan_completed = True
|
52
|
+
self._content.analyzed = False
|
53
|
+
self._content.scanned_paths = set(paths)
|
54
|
+
|
55
|
+
def _scan_path(self, path: Path, processed_files: Set[Path], scan_time: float,
|
56
|
+
recursive_paths: Set[Path]) -> None:
|
57
|
+
"""Scan a single path and process its contents."""
|
58
|
+
if path in processed_files:
|
59
|
+
return
|
60
|
+
|
61
|
+
# Convert recursive_paths to absolute for comparison
|
62
|
+
abs_recursive_paths = {config.workspace_dir / p for p in recursive_paths}
|
63
|
+
|
64
|
+
path = path.resolve()
|
65
|
+
processed_files.add(path)
|
66
|
+
|
67
|
+
if path.is_dir():
|
68
|
+
try:
|
69
|
+
for item in path.iterdir():
|
70
|
+
if item.name.startswith(('.', '__pycache__')):
|
71
|
+
continue
|
72
|
+
if path in abs_recursive_paths:
|
73
|
+
self._scan_path(item, processed_files, scan_time, recursive_paths)
|
74
|
+
elif item.is_file():
|
75
|
+
self._scan_path(item, processed_files, scan_time, recursive_paths)
|
76
|
+
except PermissionError:
|
77
|
+
if config.debug:
|
78
|
+
Console(stderr=True).print(f"[red]Debug: Permission denied: {path}[/red]")
|
79
|
+
elif path.is_file():
|
80
|
+
self._process_file(path, scan_time)
|
81
|
+
|
82
|
+
def _process_file(self, path: Path, scan_time: float) -> None:
|
83
|
+
"""Process a single file and add it to the content."""
|
84
|
+
try:
|
85
|
+
if path.suffix.lower() in {'.py', '.md', '.txt', '.json', '.yaml', '.yml', '.toml'} or not path.suffix:
|
86
|
+
content = path.read_text(encoding='utf-8')
|
87
|
+
rel_path = path.relative_to(config.workspace_dir)
|
88
|
+
seconds_ago = int(scan_time - path.stat().st_mtime)
|
89
|
+
|
90
|
+
file_info = FileInfo(
|
91
|
+
name=str(rel_path),
|
92
|
+
content=content,
|
93
|
+
seconds_ago=seconds_ago
|
94
|
+
)
|
95
|
+
self._content.add_file(file_info)
|
96
|
+
|
97
|
+
if config.debug:
|
98
|
+
Console(stderr=True).print(f"[cyan]Debug: Added file: {rel_path}[/cyan]")
|
99
|
+
except (UnicodeDecodeError, PermissionError) as e:
|
100
|
+
if config.debug:
|
101
|
+
Console(stderr=True).print(f"[red]Debug: Error reading file {path}: {str(e)}[/red]")
|
102
|
+
|
103
|
+
def get_file_info(self, time_ranges: Optional[List[int]] = None) -> Tuple[List[FileInfo], List[FileInfo], List[FileInfo], List[FileInfo]]:
|
104
|
+
"""Get file information grouped by modification time."""
|
105
|
+
return self._content.get_file_info(time_ranges)
|
106
|
+
|
107
|
+
def clear(self) -> None:
|
108
|
+
"""Clear all workspace content and settings."""
|
109
|
+
self._content = WorksetContent()
|
110
|
+
|
111
|
+
@property
|
112
|
+
def content(self) -> WorksetContent:
|
113
|
+
"""Get the workspace content."""
|
114
|
+
return self._content
|