janito 0.12.0__py3-none-any.whl → 0.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. janito/__init__.py +1 -1
  2. janito/cli/agent/__init__.py +7 -0
  3. janito/cli/agent/conversation.py +149 -0
  4. janito/cli/agent/initialization.py +172 -0
  5. janito/cli/agent/query.py +108 -0
  6. janito/cli/agent.py +7 -282
  7. janito/cli/app.py +105 -9
  8. janito/cli/commands/__init__.py +12 -0
  9. janito/cli/commands/config.py +242 -0
  10. janito/cli/commands/history.py +119 -0
  11. janito/cli/commands/profile.py +72 -0
  12. janito/cli/commands/validation.py +24 -0
  13. janito/cli/commands/workspace.py +31 -0
  14. janito/cli/commands.py +9 -326
  15. janito/config.py +37 -0
  16. janito/data/instructions_template.txt +9 -5
  17. janito/tools/__init__.py +8 -2
  18. janito/tools/bash/bash.py +3 -1
  19. janito/tools/bash/unix_persistent_bash.py +183 -181
  20. janito/tools/bash/win_persistent_bash.py +4 -2
  21. janito/tools/fetch_webpage/__init__.py +22 -33
  22. janito/tools/fetch_webpage/core.py +182 -155
  23. janito/tools/rich_console.py +46 -9
  24. janito/tools/search_text.py +225 -238
  25. janito/tools/str_replace_editor/handlers/str_replace.py +3 -1
  26. janito/tools/str_replace_editor/handlers/view.py +14 -8
  27. janito/tools/think.py +37 -0
  28. janito/tools/usage_tracker.py +1 -0
  29. janito-0.14.0.dist-info/METADATA +396 -0
  30. janito-0.14.0.dist-info/RECORD +53 -0
  31. janito/test_file.py +0 -4
  32. janito/tools/fetch_webpage/chunking.py +0 -76
  33. janito/tools/fetch_webpage/extractors.py +0 -276
  34. janito/tools/fetch_webpage/news.py +0 -137
  35. janito/tools/fetch_webpage/utils.py +0 -108
  36. janito-0.12.0.dist-info/METADATA +0 -203
  37. janito-0.12.0.dist-info/RECORD +0 -47
  38. {janito-0.12.0.dist-info → janito-0.14.0.dist-info}/WHEEL +0 -0
  39. {janito-0.12.0.dist-info → janito-0.14.0.dist-info}/entry_points.txt +0 -0
  40. {janito-0.12.0.dist-info → janito-0.14.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,239 +1,226 @@
1
- import os
2
- import fnmatch
3
- import re
4
- from typing import List, Tuple
5
- from janito.tools.rich_console import print_info, print_success, print_error, print_warning
6
- from janito.tools.usage_tracker import track_usage
7
-
8
-
9
- @track_usage('search_operations')
10
- def search_text(text_pattern: str, file_pattern: str = "*", root_dir: str = ".", recursive: bool = True) -> Tuple[str, bool]:
11
- """
12
- Search for text patterns within files matching a filename pattern.
13
- Files in .gitignore are always ignored.
14
-
15
- Args:
16
- text_pattern: Text pattern to search for within files
17
- file_pattern: Pattern to match file names against (default: "*")
18
- Multiple patterns can be specified using semicolons or spaces as separators
19
- Examples: "*.py *.toml *.sh *.md test*"
20
- root_dir: Root directory to start search from (default: current directory)
21
- recursive: Whether to search recursively in subdirectories (default: True)
22
-
23
- Returns:
24
- A tuple containing (message, is_error)
25
- """
26
- # Simplified initial message
27
- print_info(f"Searching for '{text_pattern}' in '{file_pattern}'", "Text Search")
28
- try:
29
- # Convert to absolute path if relative
30
- abs_root = os.path.abspath(root_dir)
31
-
32
- if not os.path.isdir(abs_root):
33
- error_msg = f"Error: Directory '{root_dir}' does not exist"
34
- print_error(error_msg, "Directory Error")
35
- return error_msg, True
36
-
37
- # Compile the regex pattern for better performance
38
- try:
39
- regex = re.compile(text_pattern)
40
- except re.error as e:
41
- error_msg = f"Error: Invalid regex pattern '{text_pattern}': {str(e)}"
42
- print_error(error_msg, "Regex Error")
43
- return error_msg, True
44
-
45
- matching_files = []
46
- match_count = 0
47
- results = []
48
-
49
- # Get gitignore patterns
50
- ignored_patterns = _get_gitignore_patterns(abs_root)
51
-
52
- # Use os.walk for recursive behavior
53
- if recursive:
54
- for dirpath, dirnames, filenames in os.walk(abs_root):
55
- # Skip ignored directories
56
- dirnames[:] = [d for d in dirnames if not _is_ignored(os.path.join(dirpath, d), ignored_patterns, abs_root)]
57
-
58
- # Handle multiple patterns separated by semicolons or spaces
59
- patterns = []
60
- if ';' in file_pattern:
61
- patterns = file_pattern.split(';')
62
- elif ' ' in file_pattern:
63
- patterns = file_pattern.split()
64
- else:
65
- patterns = [file_pattern]
66
-
67
- for pattern in patterns:
68
- for filename in fnmatch.filter(filenames, pattern):
69
- file_path = os.path.join(dirpath, filename)
70
-
71
- # Skip ignored files
72
- if _is_ignored(file_path, ignored_patterns, abs_root):
73
- continue
74
-
75
- # Skip if already processed this file
76
- if file_path in matching_files:
77
- continue
78
-
79
- file_matches = _search_file(file_path, regex, abs_root)
80
- if file_matches:
81
- matching_files.append(file_path)
82
- match_count += len(file_matches)
83
- results.append(f"\n{os.path.relpath(file_path, abs_root)} ({len(file_matches)} matches):")
84
- results.extend(file_matches)
85
- else:
86
- # Non-recursive mode - only search in the specified directory
87
- # Handle multiple patterns separated by semicolons or spaces
88
- patterns = []
89
- if ';' in file_pattern:
90
- patterns = file_pattern.split(';')
91
- elif ' ' in file_pattern:
92
- patterns = file_pattern.split()
93
- else:
94
- patterns = [file_pattern]
95
-
96
- for pattern in patterns:
97
- for filename in fnmatch.filter(os.listdir(abs_root), pattern):
98
- file_path = os.path.join(abs_root, filename)
99
-
100
- # Skip ignored files
101
- if _is_ignored(file_path, ignored_patterns, abs_root):
102
- continue
103
-
104
- # Skip if already processed this file
105
- if file_path in matching_files:
106
- continue
107
-
108
- if os.path.isfile(file_path):
109
- file_matches = _search_file(file_path, regex, abs_root)
110
- if file_matches:
111
- matching_files.append(file_path)
112
- match_count += len(file_matches)
113
- results.append(f"\n{os.path.relpath(file_path, abs_root)} ({len(file_matches)} matches):")
114
- results.extend(file_matches)
115
-
116
- if matching_files:
117
- # Only print the count summary, not the full results
118
- summary = f"{match_count} matches in {len(matching_files)} files"
119
- print_success(summary, "Search Results")
120
-
121
- # Still return the full results for programmatic use
122
- result_text = "\n".join(results)
123
- result_msg = f"Searching for '{text_pattern}' in files matching '{file_pattern}':{result_text}\n{summary}"
124
- return result_msg, False
125
- else:
126
- result_msg = f"No matches found for '{text_pattern}' in files matching '{file_pattern}'"
127
- print_warning("No matches found.")
128
- return result_msg, False
129
-
130
- except Exception as e:
131
- error_msg = f"Error searching text: {str(e)}"
132
- print_error(error_msg, "Search Error")
133
- return error_msg, True
134
-
135
-
136
- def _search_file(file_path: str, pattern: re.Pattern, root_dir: str) -> List[str]:
137
- """
138
- Search for regex pattern in a file and return matching lines with line numbers.
139
-
140
- Args:
141
- file_path: Path to the file to search
142
- pattern: Compiled regex pattern to search for
143
- root_dir: Root directory (for path display)
144
-
145
- Returns:
146
- List of formatted matches with line numbers and content
147
- """
148
- matches = []
149
- try:
150
- with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
151
- for i, line in enumerate(f, 1):
152
- if pattern.search(line):
153
- # Truncate long lines for display
154
- display_line = line.strip()
155
- if len(display_line) > 100:
156
- display_line = display_line[:97] + "..."
157
- matches.append(f" Line {i}: {display_line}")
158
- except (UnicodeDecodeError, IOError):
159
- # Skip binary files or files with encoding issues
160
- pass
161
- return matches
162
-
163
-
164
- def _get_gitignore_patterns(root_dir: str) -> List[str]:
165
- """
166
- Get patterns from .gitignore files.
167
-
168
- Args:
169
- root_dir: Root directory to start from
170
-
171
- Returns:
172
- List of gitignore patterns
173
- """
174
- patterns = []
175
-
176
- # Check for .gitignore in the root directory
177
- gitignore_path = os.path.join(root_dir, '.gitignore')
178
- if os.path.isfile(gitignore_path):
179
- try:
180
- with open(gitignore_path, 'r', encoding='utf-8') as f:
181
- for line in f:
182
- line = line.strip()
183
- # Skip empty lines and comments
184
- if line and not line.startswith('#'):
185
- patterns.append(line)
186
- except Exception:
187
- pass
188
-
189
- # Add common patterns that are always ignored
190
- common_patterns = [
191
- '.git/', '.venv/', 'venv/', '__pycache__/', '*.pyc',
192
- '*.pyo', '*.pyd', '.DS_Store', '*.so', '*.egg-info/'
193
- ]
194
- patterns.extend(common_patterns)
195
-
196
- return patterns
197
-
198
-
199
- def _is_ignored(path: str, patterns: List[str], root_dir: str) -> bool:
200
- """
201
- Check if a path should be ignored based on gitignore patterns.
202
-
203
- Args:
204
- path: Path to check
205
- patterns: List of gitignore patterns
206
- root_dir: Root directory for relative paths
207
-
208
- Returns:
209
- True if the path should be ignored, False otherwise
210
- """
211
- # Get the relative path from the root directory
212
- rel_path = os.path.relpath(path, root_dir)
213
-
214
- # Convert to forward slashes for consistency with gitignore patterns
215
- rel_path = rel_path.replace(os.sep, '/')
216
-
217
- # Add trailing slash for directories
218
- if os.path.isdir(path) and not rel_path.endswith('/'):
219
- rel_path += '/'
220
-
221
- for pattern in patterns:
222
- # Handle negation patterns (those starting with !)
223
- if pattern.startswith('!'):
224
- continue # Skip negation patterns for simplicity
225
-
226
- # Handle directory-specific patterns (those ending with /)
227
- if pattern.endswith('/'):
228
- if os.path.isdir(path) and fnmatch.fnmatch(rel_path, pattern + '*'):
229
- return True
230
-
231
- # Handle file patterns
232
- if fnmatch.fnmatch(rel_path, pattern):
233
- return True
234
-
235
- # Handle patterns without wildcards as path prefixes
236
- if '*' not in pattern and '?' not in pattern and rel_path.startswith(pattern):
237
- return True
238
-
1
+ import os
2
+ import fnmatch
3
+ import re
4
+ import glob
5
+ from typing import List, Tuple
6
+ from janito.tools.rich_console import print_info, print_success, print_error, print_warning
7
+ from janito.tools.usage_tracker import track_usage
8
+
9
+
10
+ @track_usage('search_operations')
11
+ def search_text(text_pattern: str, file_pattern: str = "*", root_dir: str = ".", recursive: bool = True) -> Tuple[str, bool]:
12
+ """
13
+ Search for text patterns within files matching a filename pattern.
14
+ Files in .gitignore are always ignored.
15
+
16
+ Args:
17
+ text_pattern: Text pattern to search for within files
18
+ file_pattern: Pattern to match file paths against (e.g., "*.py", "*/tools/*.py")
19
+ Multiple patterns can be specified using semicolons or spaces as separators
20
+ root_dir: Root directory to start search from (default: current directory)
21
+ recursive: Whether to search recursively in subdirectories (default: True)
22
+
23
+ Returns:
24
+ A tuple containing (message, is_error)
25
+ """
26
+ # Simplified initial message
27
+ print_info(f"Searching for '{text_pattern}' in '{file_pattern}'", "Text Search")
28
+ try:
29
+ # Convert to absolute path if relative
30
+ abs_root = os.path.abspath(root_dir)
31
+
32
+ if not os.path.isdir(abs_root):
33
+ error_msg = f"Error: Directory '{root_dir}' does not exist"
34
+ print_error(error_msg, "Directory Error")
35
+ return error_msg, True
36
+
37
+ # Compile the regex pattern for better performance
38
+ try:
39
+ regex = re.compile(text_pattern)
40
+ except re.error:
41
+ # Simplified error message without the specific regex error details
42
+ error_msg = f"Error: Invalid regex pattern '{text_pattern}'"
43
+ print_error(error_msg, "Search Error")
44
+ return error_msg, True
45
+
46
+ matching_files = []
47
+ match_count = 0
48
+ results = []
49
+
50
+ # Get gitignore patterns
51
+ ignored_patterns = _get_gitignore_patterns(abs_root)
52
+
53
+ # Handle multiple patterns separated by semicolons or spaces
54
+ patterns = []
55
+ if ';' in file_pattern:
56
+ patterns = file_pattern.split(';')
57
+ elif ' ' in file_pattern and not (os.path.sep in file_pattern or '/' in file_pattern):
58
+ # Only split by space if the pattern doesn't appear to be a path
59
+ patterns = file_pattern.split()
60
+ else:
61
+ patterns = [file_pattern]
62
+
63
+ # Process each pattern
64
+ for pattern in patterns:
65
+ # Construct the glob pattern with the root directory
66
+ glob_pattern = os.path.join(abs_root, pattern) if not pattern.startswith(os.path.sep) else pattern
67
+
68
+ # Use recursive glob if needed
69
+ if recursive:
70
+ # Use ** pattern for recursive search if not already in the pattern
71
+ if '**' not in glob_pattern:
72
+ # Check if the pattern already has a directory component
73
+ if os.path.sep in pattern or '/' in pattern:
74
+ # Pattern already has directory component, keep as is
75
+ pass
76
+ else:
77
+ # Add ** to search in all subdirectories
78
+ glob_pattern = os.path.join(abs_root, '**', pattern)
79
+
80
+ # Use recursive=True for Python 3.5+ glob
81
+ glob_files = glob.glob(glob_pattern, recursive=True)
82
+ else:
83
+ # Non-recursive mode - only search in the specified directory
84
+ glob_files = glob.glob(glob_pattern)
85
+
86
+ # Process matching files
87
+ for file_path in glob_files:
88
+ # Skip directories and already processed files
89
+ if not os.path.isfile(file_path) or file_path in matching_files:
90
+ continue
91
+
92
+ # Skip ignored files
93
+ if _is_ignored(file_path, ignored_patterns, abs_root):
94
+ continue
95
+
96
+ file_matches = _search_file(file_path, regex, abs_root)
97
+ if file_matches:
98
+ matching_files.append(file_path)
99
+ match_count += len(file_matches)
100
+ results.append(f"\n{os.path.relpath(file_path, abs_root)} ({len(file_matches)} matches):")
101
+ results.extend(file_matches)
102
+
103
+ if matching_files:
104
+ # Only print the count summary, not the full results
105
+ summary = f"{match_count} matches in {len(matching_files)} files"
106
+ print_success(summary, "Search Results")
107
+
108
+ # Still return the full results for programmatic use
109
+ result_text = "\n".join(results)
110
+ result_msg = f"Searching for '{text_pattern}' in files matching '{file_pattern}':{result_text}\n{summary}"
111
+ return result_msg, False
112
+ else:
113
+ result_msg = f"No matches found for '{text_pattern}' in files matching '{file_pattern}'"
114
+ print_warning("No matches found.")
115
+ return result_msg, False
116
+
117
+ except Exception as e:
118
+ error_msg = f"Error searching text: {str(e)}"
119
+ print_error(error_msg, "Search Error")
120
+ return error_msg, True
121
+
122
+
123
+ def _search_file(file_path: str, pattern: re.Pattern, root_dir: str) -> List[str]:
124
+ """
125
+ Search for regex pattern in a file and return matching lines with line numbers.
126
+
127
+ Args:
128
+ file_path: Path to the file to search
129
+ pattern: Compiled regex pattern to search for
130
+ root_dir: Root directory (for path display)
131
+
132
+ Returns:
133
+ List of formatted matches with line numbers and content
134
+ """
135
+ matches = []
136
+ try:
137
+ with open(file_path, 'r', encoding='utf-8', errors='replace') as f:
138
+ for i, line in enumerate(f, 1):
139
+ if pattern.search(line):
140
+ # Truncate long lines for display
141
+ display_line = line.strip()
142
+ if len(display_line) > 100:
143
+ display_line = display_line[:97] + "..."
144
+ matches.append(f" Line {i}: {display_line}")
145
+ except (UnicodeDecodeError, IOError):
146
+ # Skip binary files or files with encoding issues
147
+ pass
148
+ return matches
149
+
150
+
151
+ def _get_gitignore_patterns(root_dir: str) -> List[str]:
152
+ """
153
+ Get patterns from .gitignore files.
154
+
155
+ Args:
156
+ root_dir: Root directory to start from
157
+
158
+ Returns:
159
+ List of gitignore patterns
160
+ """
161
+ patterns = []
162
+
163
+ # Check for .gitignore in the root directory
164
+ gitignore_path = os.path.join(root_dir, '.gitignore')
165
+ if os.path.isfile(gitignore_path):
166
+ try:
167
+ with open(gitignore_path, 'r', encoding='utf-8') as f:
168
+ for line in f:
169
+ line = line.strip()
170
+ # Skip empty lines and comments
171
+ if line and not line.startswith('#'):
172
+ patterns.append(line)
173
+ except Exception:
174
+ pass
175
+
176
+ # Add common patterns that are always ignored
177
+ common_patterns = [
178
+ '.git/', '.venv/', 'venv/', '__pycache__/', '*.pyc',
179
+ '*.pyo', '*.pyd', '.DS_Store', '*.so', '*.egg-info/'
180
+ ]
181
+ patterns.extend(common_patterns)
182
+
183
+ return patterns
184
+
185
+
186
+ def _is_ignored(path: str, patterns: List[str], root_dir: str) -> bool:
187
+ """
188
+ Check if a path should be ignored based on gitignore patterns.
189
+
190
+ Args:
191
+ path: Path to check
192
+ patterns: List of gitignore patterns
193
+ root_dir: Root directory for relative paths
194
+
195
+ Returns:
196
+ True if the path should be ignored, False otherwise
197
+ """
198
+ # Get the relative path from the root directory
199
+ rel_path = os.path.relpath(path, root_dir)
200
+
201
+ # Convert to forward slashes for consistency with gitignore patterns
202
+ rel_path = rel_path.replace(os.sep, '/')
203
+
204
+ # Add trailing slash for directories
205
+ if os.path.isdir(path) and not rel_path.endswith('/'):
206
+ rel_path += '/'
207
+
208
+ for pattern in patterns:
209
+ # Handle negation patterns (those starting with !)
210
+ if pattern.startswith('!'):
211
+ continue # Skip negation patterns for simplicity
212
+
213
+ # Handle directory-specific patterns (those ending with /)
214
+ if pattern.endswith('/'):
215
+ if os.path.isdir(path) and fnmatch.fnmatch(rel_path, pattern + '*'):
216
+ return True
217
+
218
+ # Handle file patterns
219
+ if fnmatch.fnmatch(rel_path, pattern):
220
+ return True
221
+
222
+ # Handle patterns without wildcards as path prefixes
223
+ if '*' not in pattern and '?' not in pattern and rel_path.startswith(pattern):
224
+ return True
225
+
239
226
  return False
@@ -60,7 +60,9 @@ def handle_str_replace(args: Dict[str, Any]) -> Tuple[str, bool]:
60
60
 
61
61
  # Check if old_str exists in the content (must match EXACTLY)
62
62
  if old_str not in content:
63
- print_error("No exact match found for replacement. Please check your text and ensure whitespaces match exactly.", "Error")
63
+ # Only print error if not in trust mode
64
+ if not get_config().trust_mode:
65
+ print_error("No exact match", "?")
64
66
  return ("Error: No exact match found for replacement. Please check your text and ensure whitespaces match exactly.", True)
65
67
 
66
68
  # Count occurrences to check for multiple matches
@@ -55,8 +55,8 @@ def handle_view(args: Dict[str, Any]) -> Tuple[str, bool]:
55
55
  file_path = pathlib.Path(path)
56
56
 
57
57
  if not file_path.exists():
58
- print_error(f"File or directory {path} does not exist", "Error")
59
- return (f"File or directory {path} does not exist", True)
58
+ print_error(f" (not found)", "Error")
59
+ return (f" (not found)", True)
60
60
 
61
61
  # If the path is a directory, list non-hidden files and directories up to 2 levels deep
62
62
  if file_path.is_dir():
@@ -94,9 +94,12 @@ def handle_view(args: Dict[str, Any]) -> Tuple[str, bool]:
94
94
  # Directory listings should not be truncated
95
95
  file_dir_count = len(result)
96
96
  output = "\n".join(result)
97
- console.print(f"Found ", style="default", end="")
98
- console.print(f"{file_dir_count}", style="cyan", end="")
99
- console.print(" files and directories")
97
+
98
+ # Only print count if not in trust mode
99
+ if not get_config().trust_mode:
100
+ console.print(f"Found ", style="default", end="")
101
+ console.print(f"{file_dir_count}", style="cyan", end="")
102
+ console.print(" files and directories")
100
103
  return (output, False)
101
104
  except Exception as e:
102
105
  return (f"Error listing directory {path}: {str(e)}", True)
@@ -144,9 +147,12 @@ def handle_view(args: Dict[str, Any]) -> Tuple[str, bool]:
144
147
  return (truncated_content + "\n<response clipped>", False)
145
148
 
146
149
  content_to_print = "".join(numbered_content)
147
- console.print("(", style="default", end="")
148
- console.print(f"{len(numbered_content)}", style="cyan", end="")
149
- console.print(")")
150
+
151
+ # Only print line count if not in trust mode
152
+ if not get_config().trust_mode:
153
+ console.print("(", style="default", end="")
154
+ console.print(f"{len(numbered_content)}", style="cyan", end="")
155
+ console.print(")")
150
156
  # Return the content as a string without any Rich objects
151
157
  return (content_to_print, False)
152
158
  except Exception as e:
janito/tools/think.py ADDED
@@ -0,0 +1,37 @@
1
+ """
2
+ Tool for thinking about something without obtaining new information or changing the database.
3
+ """
4
+ from typing import Tuple
5
+ import logging
6
+ from janito.tools.usage_tracker import track_usage
7
+ from janito.tools.rich_console import print_info
8
+
9
+ # Set up logging
10
+ logger = logging.getLogger(__name__)
11
+
12
+ @track_usage('thoughts')
13
+ def think(
14
+ thought: str,
15
+ ) -> Tuple[str, bool]:
16
+ """
17
+ Use the tool to think about something. It will not obtain new information or change the database,
18
+ but just append the thought to the log. Use it when complex reasoning or some cache memory is needed.
19
+
20
+ Args:
21
+ thought: A thought to think about.
22
+
23
+ Returns:
24
+ A tuple containing (message, is_error)
25
+ """
26
+ try:
27
+ # Log the thought
28
+ logger.info(f"Thought: {thought}")
29
+
30
+ # Print a confirmation message
31
+ print_info(f"Thought recorded: {thought[:50]}{'...' if len(thought) > 50 else ''}", "Thinking")
32
+
33
+ return (f"Thought recorded: {thought}", False)
34
+ except Exception as e:
35
+ error_msg = f"Error recording thought: {str(e)}"
36
+ logger.error(error_msg)
37
+ return (error_msg, True)
@@ -33,6 +33,7 @@ class ToolUsageTracker:
33
33
  self.search_operations = 0
34
34
  self.file_views = 0
35
35
  self.partial_file_views = 0
36
+ self.thoughts = 0 # Track the number of thoughts recorded
36
37
 
37
38
  def increment(self, counter_name: str, value: int = 1):
38
39
  """Increment a specific counter by the given value."""