tunacode-cli 0.0.56__py3-none-any.whl → 0.0.57__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tunacode-cli might be problematic. Click here for more details.

Files changed (45) hide show
  1. tunacode/cli/commands/implementations/plan.py +8 -8
  2. tunacode/cli/commands/registry.py +2 -2
  3. tunacode/cli/repl.py +214 -407
  4. tunacode/cli/repl_components/command_parser.py +37 -4
  5. tunacode/cli/repl_components/error_recovery.py +79 -1
  6. tunacode/cli/repl_components/output_display.py +14 -11
  7. tunacode/cli/repl_components/tool_executor.py +7 -4
  8. tunacode/configuration/defaults.py +8 -0
  9. tunacode/constants.py +8 -2
  10. tunacode/core/agents/agent_components/agent_config.py +128 -65
  11. tunacode/core/agents/agent_components/node_processor.py +6 -2
  12. tunacode/core/code_index.py +83 -29
  13. tunacode/core/state.py +1 -1
  14. tunacode/core/token_usage/usage_tracker.py +2 -2
  15. tunacode/core/tool_handler.py +3 -3
  16. tunacode/prompts/system.md +117 -490
  17. tunacode/services/mcp.py +29 -7
  18. tunacode/tools/base.py +110 -0
  19. tunacode/tools/bash.py +96 -1
  20. tunacode/tools/exit_plan_mode.py +114 -32
  21. tunacode/tools/glob.py +366 -33
  22. tunacode/tools/grep.py +226 -77
  23. tunacode/tools/grep_components/result_formatter.py +98 -4
  24. tunacode/tools/list_dir.py +132 -2
  25. tunacode/tools/present_plan.py +111 -31
  26. tunacode/tools/read_file.py +91 -0
  27. tunacode/tools/run_command.py +99 -0
  28. tunacode/tools/schema_assembler.py +167 -0
  29. tunacode/tools/todo.py +108 -1
  30. tunacode/tools/update_file.py +94 -0
  31. tunacode/tools/write_file.py +86 -0
  32. tunacode/types.py +10 -9
  33. tunacode/ui/input.py +1 -0
  34. tunacode/ui/keybindings.py +1 -0
  35. tunacode/ui/panels.py +49 -27
  36. tunacode/ui/prompt_manager.py +13 -7
  37. tunacode/utils/json_utils.py +206 -0
  38. tunacode/utils/ripgrep.py +332 -9
  39. {tunacode_cli-0.0.56.dist-info → tunacode_cli-0.0.57.dist-info}/METADATA +5 -1
  40. {tunacode_cli-0.0.56.dist-info → tunacode_cli-0.0.57.dist-info}/RECORD +44 -43
  41. tunacode/tools/read_file_async_poc.py +0 -196
  42. {tunacode_cli-0.0.56.dist-info → tunacode_cli-0.0.57.dist-info}/WHEEL +0 -0
  43. {tunacode_cli-0.0.56.dist-info → tunacode_cli-0.0.57.dist-info}/entry_points.txt +0 -0
  44. {tunacode_cli-0.0.56.dist-info → tunacode_cli-0.0.57.dist-info}/licenses/LICENSE +0 -0
  45. {tunacode_cli-0.0.56.dist-info → tunacode_cli-0.0.57.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,206 @@
1
+ """
2
+ Module: tunacode.utils.json_utils
3
+
4
+ JSON parsing utilities with enhanced error handling and concatenated object support.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ from typing import Any, Dict, List, Optional, Union
10
+
11
+ from tunacode.constants import READ_ONLY_TOOLS
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class ConcatenatedJSONError(Exception):
17
+ """Raised when concatenated JSON objects are detected but cannot be safely handled."""
18
+
19
+ def __init__(self, message: str, objects_found: int, tool_name: Optional[str] = None):
20
+ self.message = message
21
+ self.objects_found = objects_found
22
+ self.tool_name = tool_name
23
+ super().__init__(message)
24
+
25
+
26
+ def split_concatenated_json(json_string: str, strict_mode: bool = True) -> List[Dict[str, Any]]:
27
+ """
28
+ Split concatenated JSON objects like {"a": 1}{"b": 2} into separate objects.
29
+
30
+ Args:
31
+ json_string: String containing potentially concatenated JSON objects
32
+ strict_mode: If True, only returns valid JSON objects. If False, attempts
33
+ to recover partial objects.
34
+
35
+ Returns:
36
+ List of parsed JSON objects
37
+
38
+ Raises:
39
+ json.JSONDecodeError: If no valid JSON objects can be extracted
40
+ ConcatenatedJSONError: If multiple objects found but not safe to process
41
+ """
42
+ objects = []
43
+ brace_count = 0
44
+ start_pos = 0
45
+ in_string = False
46
+ escape_next = False
47
+
48
+ for i, char in enumerate(json_string):
49
+ if escape_next:
50
+ escape_next = False
51
+ continue
52
+
53
+ if char == "\\":
54
+ escape_next = True
55
+ continue
56
+
57
+ if char == '"' and not escape_next:
58
+ in_string = not in_string
59
+ continue
60
+
61
+ if in_string:
62
+ continue
63
+
64
+ if char == "{":
65
+ if brace_count == 0:
66
+ start_pos = i
67
+ brace_count += 1
68
+ elif char == "}":
69
+ brace_count -= 1
70
+ if brace_count == 0:
71
+ potential_json = json_string[start_pos : i + 1].strip()
72
+ try:
73
+ parsed = json.loads(potential_json)
74
+ if isinstance(parsed, dict):
75
+ objects.append(parsed)
76
+ else:
77
+ logger.warning(f"Non-dict JSON object ignored: {type(parsed)}")
78
+ except json.JSONDecodeError as e:
79
+ if strict_mode:
80
+ logger.debug(f"Invalid JSON fragment skipped: {potential_json[:100]}...")
81
+ else:
82
+ logger.warning(f"JSON parse error: {e}")
83
+ continue
84
+
85
+ if not objects:
86
+ raise json.JSONDecodeError("No valid JSON objects found", json_string, 0)
87
+
88
+ return objects
89
+
90
+
91
+ def validate_tool_args_safety(
92
+ objects: List[Dict[str, Any]], tool_name: Optional[str] = None
93
+ ) -> bool:
94
+ """
95
+ Validate whether it's safe to execute multiple JSON objects for a given tool.
96
+
97
+ Args:
98
+ objects: List of JSON objects to validate
99
+ tool_name: Name of the tool (if known)
100
+
101
+ Returns:
102
+ bool: True if safe to execute, False otherwise
103
+
104
+ Raises:
105
+ ConcatenatedJSONError: If multiple objects detected for unsafe tool
106
+ """
107
+ if len(objects) <= 1:
108
+ return True
109
+
110
+ # Check if tool is read-only (safer to execute multiple times)
111
+ if tool_name and tool_name in READ_ONLY_TOOLS:
112
+ logger.info(f"Multiple JSON objects for read-only tool {tool_name} - allowing execution")
113
+ return True
114
+
115
+ # For write/execute tools, multiple objects are potentially dangerous
116
+ if tool_name:
117
+ logger.warning(
118
+ f"Multiple JSON objects detected for tool {tool_name} "
119
+ f"({len(objects)} objects). This may indicate a model error."
120
+ )
121
+ raise ConcatenatedJSONError(
122
+ f"Multiple JSON objects not safe for tool {tool_name}",
123
+ objects_found=len(objects),
124
+ tool_name=tool_name,
125
+ )
126
+ else:
127
+ logger.warning(f"Multiple JSON objects detected ({len(objects)}) with unknown tool")
128
+ return False
129
+
130
+
131
+ def safe_json_parse(
132
+ json_string: str, tool_name: Optional[str] = None, allow_concatenated: bool = False
133
+ ) -> Union[Dict[str, Any], List[Dict[str, Any]]]:
134
+ """
135
+ Safely parse JSON with optional concatenated object support.
136
+
137
+ Args:
138
+ json_string: JSON string to parse
139
+ tool_name: Name of the tool (for safety validation)
140
+ allow_concatenated: Whether to attempt splitting concatenated objects
141
+
142
+ Returns:
143
+ Single dict if one object, or list of dicts if multiple objects
144
+
145
+ Raises:
146
+ json.JSONDecodeError: If parsing fails
147
+ ConcatenatedJSONError: If concatenated objects are unsafe
148
+ """
149
+ try:
150
+ # First, try normal JSON parsing
151
+ result = json.loads(json_string)
152
+ if isinstance(result, dict):
153
+ return result
154
+ else:
155
+ raise json.JSONDecodeError(f"Expected dict, got {type(result)}", json_string, 0)
156
+
157
+ except json.JSONDecodeError as e:
158
+ if not allow_concatenated or "Extra data" not in str(e):
159
+ raise
160
+
161
+ logger.info("Attempting to split concatenated JSON objects")
162
+
163
+ # Try to split concatenated objects
164
+ objects = split_concatenated_json(json_string)
165
+
166
+ # Validate safety
167
+ if validate_tool_args_safety(objects, tool_name):
168
+ if len(objects) == 1:
169
+ return objects[0]
170
+ else:
171
+ return objects
172
+ else:
173
+ # Not safe - return first object with warning
174
+ logger.warning(f"Using first of {len(objects)} JSON objects only")
175
+ return objects[0]
176
+
177
+
178
+ def merge_json_objects(objects: List[Dict[str, Any]], strategy: str = "first") -> Dict[str, Any]:
179
+ """
180
+ Merge multiple JSON objects using different strategies.
181
+
182
+ Args:
183
+ objects: List of JSON objects to merge
184
+ strategy: Merge strategy ("first", "last", "combine")
185
+
186
+ Returns:
187
+ Single merged JSON object
188
+ """
189
+ if not objects:
190
+ return {}
191
+
192
+ if len(objects) == 1:
193
+ return objects[0]
194
+
195
+ if strategy == "first":
196
+ return objects[0]
197
+ elif strategy == "last":
198
+ return objects[-1]
199
+ elif strategy == "combine":
200
+ # Combine all objects, later values override earlier ones
201
+ result = {}
202
+ for obj in objects:
203
+ result.update(obj)
204
+ return result
205
+ else:
206
+ raise ValueError(f"Unknown merge strategy: {strategy}")
tunacode/utils/ripgrep.py CHANGED
@@ -1,17 +1,340 @@
1
+ """Ripgrep binary management and execution utilities."""
2
+
3
+ import functools
4
+ import logging
5
+ import os
6
+ import platform
7
+ import shutil
1
8
  import subprocess
2
- from typing import List
9
+ from pathlib import Path
10
+ from typing import List, Optional, Tuple
3
11
 
12
+ logger = logging.getLogger(__name__)
4
13
 
5
- def ripgrep(pattern: str, directory: str = ".") -> List[str]:
6
- """Return a list of file paths matching a pattern using ripgrep."""
14
+
15
+ @functools.lru_cache(maxsize=1)
16
+ def get_platform_identifier() -> Tuple[str, str]:
17
+ """Get the current platform identifier.
18
+
19
+ Returns:
20
+ Tuple of (platform_key, system_name)
21
+ """
22
+ system = platform.system().lower()
23
+ machine = platform.machine().lower()
24
+
25
+ if system == "linux":
26
+ if machine in ["x86_64", "amd64"]:
27
+ return "x64-linux", system
28
+ elif machine in ["aarch64", "arm64"]:
29
+ return "arm64-linux", system
30
+ elif system == "darwin":
31
+ if machine in ["x86_64", "amd64"]:
32
+ return "x64-darwin", system
33
+ elif machine in ["arm64", "aarch64"]:
34
+ return "arm64-darwin", system
35
+ elif system == "windows":
36
+ if machine in ["x86_64", "amd64"]:
37
+ return "x64-win32", system
38
+
39
+ raise ValueError(f"Unsupported platform: {system} {machine}")
40
+
41
+
42
+ @functools.lru_cache(maxsize=1)
43
+ def get_ripgrep_binary_path() -> Optional[Path]:
44
+ """Resolve the path to the ripgrep binary.
45
+
46
+ Resolution order:
47
+ 1. Environment variable override (TUNACODE_RIPGREP_PATH)
48
+ 2. System ripgrep (if newer or equal version)
49
+ 3. Bundled ripgrep binary
50
+ 4. None (fallback to Python-based search)
51
+
52
+ Returns:
53
+ Path to ripgrep binary or None if not available
54
+ """
55
+ # Check for environment variable override
56
+ env_path = os.environ.get("TUNACODE_RIPGREP_PATH")
57
+ if env_path:
58
+ path = Path(env_path)
59
+ if path.exists() and path.is_file():
60
+ logger.debug(f"Using ripgrep from environment variable: {path}")
61
+ return path
62
+ else:
63
+ logger.warning(f"Invalid TUNACODE_RIPGREP_PATH: {env_path}")
64
+
65
+ # Check for system ripgrep
66
+ system_rg = shutil.which("rg")
67
+ if system_rg:
68
+ system_rg_path = Path(system_rg)
69
+ if _check_ripgrep_version(system_rg_path):
70
+ logger.debug(f"Using system ripgrep: {system_rg_path}")
71
+ return system_rg_path
72
+
73
+ # Check for bundled ripgrep
74
+ try:
75
+ platform_key, _ = get_platform_identifier()
76
+ binary_name = "rg.exe" if platform_key == "x64-win32" else "rg"
77
+
78
+ # Look for vendor directory relative to this file
79
+ vendor_dir = (
80
+ Path(__file__).parent.parent.parent.parent / "vendor" / "ripgrep" / platform_key
81
+ )
82
+ bundled_path = vendor_dir / binary_name
83
+
84
+ if bundled_path.exists():
85
+ logger.debug(f"Using bundled ripgrep: {bundled_path}")
86
+ return bundled_path
87
+ except Exception as e:
88
+ logger.debug(f"Could not find bundled ripgrep: {e}")
89
+
90
+ logger.debug("No ripgrep binary found, will use Python fallback")
91
+ return None
92
+
93
+
94
+ def _check_ripgrep_version(rg_path: Path, min_version: str = "13.0.0") -> bool:
95
+ """Check if ripgrep version meets minimum requirement.
96
+
97
+ Args:
98
+ rg_path: Path to ripgrep binary
99
+ min_version: Minimum required version
100
+
101
+ Returns:
102
+ True if version is sufficient, False otherwise
103
+ """
7
104
  try:
8
105
  result = subprocess.run(
9
- ["rg", "--files", "-g", pattern, directory],
106
+ [str(rg_path), "--version"],
10
107
  capture_output=True,
11
108
  text=True,
12
- check=True,
13
- timeout=5,
109
+ timeout=1,
14
110
  )
15
- return [line.strip() for line in result.stdout.splitlines() if line.strip()]
16
- except (subprocess.CalledProcessError, FileNotFoundError, subprocess.TimeoutExpired):
17
- return []
111
+ if result.returncode == 0:
112
+ # Parse version from output like "ripgrep 14.1.1"
113
+ version_line = result.stdout.split("\n")[0]
114
+ version = version_line.split()[-1]
115
+
116
+ # Simple version comparison (works for x.y.z format)
117
+ current = tuple(map(int, version.split(".")))
118
+ required = tuple(map(int, min_version.split(".")))
119
+
120
+ return current >= required
121
+ except Exception as e:
122
+ logger.debug(f"Could not check ripgrep version: {e}")
123
+
124
+ return False
125
+
126
+
127
+ class RipgrepExecutor:
128
+ """Wrapper for executing ripgrep commands with error handling."""
129
+
130
+ def __init__(self, binary_path: Optional[Path] = None):
131
+ """Initialize the executor.
132
+
133
+ Args:
134
+ binary_path: Optional path to ripgrep binary
135
+ """
136
+ self.binary_path = binary_path or get_ripgrep_binary_path()
137
+ self._use_python_fallback = self.binary_path is None
138
+
139
+ if self._use_python_fallback:
140
+ logger.info("Ripgrep binary not available, using Python fallback")
141
+
142
+ def search(
143
+ self,
144
+ pattern: str,
145
+ path: str = ".",
146
+ *,
147
+ timeout: int = 10,
148
+ max_matches: Optional[int] = None,
149
+ file_pattern: Optional[str] = None,
150
+ case_insensitive: bool = False,
151
+ multiline: bool = False,
152
+ context_before: int = 0,
153
+ context_after: int = 0,
154
+ **kwargs,
155
+ ) -> List[str]:
156
+ """Execute a ripgrep search.
157
+
158
+ Args:
159
+ pattern: Search pattern (regex)
160
+ path: Directory or file to search
161
+ timeout: Maximum execution time in seconds
162
+ max_matches: Maximum number of matches to return
163
+ file_pattern: Glob pattern for files to include
164
+ case_insensitive: Case-insensitive search
165
+ multiline: Enable multiline mode
166
+ context_before: Lines of context before match
167
+ context_after: Lines of context after match
168
+ **kwargs: Additional ripgrep arguments
169
+
170
+ Returns:
171
+ List of matching lines or file paths
172
+ """
173
+ if self._use_python_fallback:
174
+ return self._python_fallback_search(
175
+ pattern, path, file_pattern=file_pattern, case_insensitive=case_insensitive
176
+ )
177
+
178
+ try:
179
+ cmd = [str(self.binary_path)]
180
+
181
+ # Add flags
182
+ if case_insensitive:
183
+ cmd.append("-i")
184
+ if multiline:
185
+ cmd.extend(["-U", "--multiline-dotall"])
186
+ if context_before > 0:
187
+ cmd.extend(["-B", str(context_before)])
188
+ if context_after > 0:
189
+ cmd.extend(["-A", str(context_after)])
190
+ if max_matches:
191
+ cmd.extend(["-m", str(max_matches)])
192
+ if file_pattern:
193
+ cmd.extend(["-g", file_pattern])
194
+
195
+ # Add pattern and path
196
+ cmd.extend([pattern, path])
197
+
198
+ logger.debug(f"Executing ripgrep: {' '.join(cmd)}")
199
+
200
+ result = subprocess.run(
201
+ cmd,
202
+ capture_output=True,
203
+ text=True,
204
+ timeout=timeout,
205
+ )
206
+
207
+ if result.returncode in [0, 1]: # 0 = matches found, 1 = no matches
208
+ return [line.strip() for line in result.stdout.splitlines() if line.strip()]
209
+ else:
210
+ logger.warning(f"Ripgrep error: {result.stderr}")
211
+ return []
212
+
213
+ except subprocess.TimeoutExpired:
214
+ logger.warning(f"Ripgrep search timed out after {timeout} seconds")
215
+ return []
216
+ except Exception as e:
217
+ logger.error(f"Ripgrep execution failed: {e}")
218
+ return self._python_fallback_search(pattern, path, file_pattern=file_pattern)
219
+
220
+ def list_files(self, pattern: str, directory: str = ".") -> List[str]:
221
+ """List files matching a glob pattern using ripgrep.
222
+
223
+ Args:
224
+ pattern: Glob pattern for files
225
+ directory: Directory to search
226
+
227
+ Returns:
228
+ List of file paths
229
+ """
230
+ if self._use_python_fallback:
231
+ return self._python_fallback_list_files(pattern, directory)
232
+
233
+ try:
234
+ result = subprocess.run(
235
+ [str(self.binary_path), "--files", "-g", pattern, directory],
236
+ capture_output=True,
237
+ text=True,
238
+ timeout=5,
239
+ )
240
+ return [line.strip() for line in result.stdout.splitlines() if line.strip()]
241
+ except Exception:
242
+ return self._python_fallback_list_files(pattern, directory)
243
+
244
+ def _python_fallback_search(
245
+ self,
246
+ pattern: str,
247
+ path: str,
248
+ file_pattern: Optional[str] = None,
249
+ case_insensitive: bool = False,
250
+ ) -> List[str]:
251
+ """Python-based fallback search implementation."""
252
+ import re
253
+ from pathlib import Path
254
+
255
+ results = []
256
+ path_obj = Path(path)
257
+
258
+ # Compile regex pattern
259
+ flags = re.IGNORECASE if case_insensitive else 0
260
+ try:
261
+ regex = re.compile(pattern, flags)
262
+ except re.error:
263
+ logger.error(f"Invalid regex pattern: {pattern}")
264
+ return []
265
+
266
+ # Search files
267
+ if path_obj.is_file():
268
+ files = [path_obj]
269
+ else:
270
+ glob_pattern = file_pattern or "**/*"
271
+ files = list(path_obj.glob(glob_pattern))
272
+
273
+ for file_path in files:
274
+ if not file_path.is_file():
275
+ continue
276
+
277
+ try:
278
+ with file_path.open("r", encoding="utf-8", errors="ignore") as f:
279
+ for line_num, line in enumerate(f, 1):
280
+ if regex.search(line):
281
+ results.append(f"{file_path}:{line_num}:{line.strip()}")
282
+ except Exception: # nosec B112 - continue on file read errors is appropriate
283
+ continue
284
+
285
+ return results
286
+
287
+ def _python_fallback_list_files(self, pattern: str, directory: str) -> List[str]:
288
+ """Python-based fallback for listing files."""
289
+ from pathlib import Path
290
+
291
+ try:
292
+ base_path = Path(directory)
293
+ return [str(p) for p in base_path.glob(pattern) if p.is_file()]
294
+ except Exception:
295
+ return []
296
+
297
+
298
+ # Maintain backward compatibility
299
+ def ripgrep(pattern: str, directory: str = ".") -> List[str]:
300
+ """Return a list of file paths matching a pattern using ripgrep.
301
+
302
+ This function maintains backward compatibility with the original implementation.
303
+ """
304
+ executor = RipgrepExecutor()
305
+ return executor.list_files(pattern, directory)
306
+
307
+
308
+ # Performance metrics collection
309
+ class RipgrepMetrics:
310
+ """Collect performance metrics for ripgrep operations."""
311
+
312
+ def __init__(self):
313
+ self.search_count = 0
314
+ self.total_search_time = 0.0
315
+ self.fallback_count = 0
316
+
317
+ def record_search(self, duration: float, used_fallback: bool = False):
318
+ """Record a search operation."""
319
+ self.search_count += 1
320
+ self.total_search_time += duration
321
+ if used_fallback:
322
+ self.fallback_count += 1
323
+
324
+ @property
325
+ def average_search_time(self) -> float:
326
+ """Get average search time."""
327
+ if self.search_count == 0:
328
+ return 0.0
329
+ return self.total_search_time / self.search_count
330
+
331
+ @property
332
+ def fallback_rate(self) -> float:
333
+ """Get fallback usage rate."""
334
+ if self.search_count == 0:
335
+ return 0.0
336
+ return self.fallback_count / self.search_count
337
+
338
+
339
+ # Global metrics instance
340
+ metrics = RipgrepMetrics()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tunacode-cli
3
- Version: 0.0.56
3
+ Version: 0.0.57
4
4
  Summary: Your agentic CLI developer.
5
5
  Author-email: larock22 <noreply@github.com>
6
6
  License: MIT
@@ -29,6 +29,7 @@ Requires-Dist: rich==14.0.0
29
29
  Requires-Dist: tiktoken>=0.5.2
30
30
  Provides-Extra: dev
31
31
  Requires-Dist: build; extra == "dev"
32
+ Requires-Dist: twine; extra == "dev"
32
33
  Requires-Dist: ruff; extra == "dev"
33
34
  Requires-Dist: pytest; extra == "dev"
34
35
  Requires-Dist: pytest-cov; extra == "dev"
@@ -40,6 +41,9 @@ Requires-Dist: unimport>=1.0.0; extra == "dev"
40
41
  Requires-Dist: autoflake>=2.0.0; extra == "dev"
41
42
  Requires-Dist: dead>=1.5.0; extra == "dev"
42
43
  Requires-Dist: hatch>=1.6.0; extra == "dev"
44
+ Requires-Dist: defusedxml; extra == "dev"
45
+ Requires-Dist: mypy; extra == "dev"
46
+ Requires-Dist: bandit; extra == "dev"
43
47
  Dynamic: license-file
44
48
 
45
49
  # TunaCode CLI