tunacode-cli 0.0.48__py3-none-any.whl → 0.0.49__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tunacode-cli might be problematic. Click here for more details.

Files changed (45) hide show
  1. api/auth.py +13 -0
  2. api/users.py +8 -0
  3. tunacode/__init__.py +4 -0
  4. tunacode/cli/main.py +4 -0
  5. tunacode/cli/repl.py +39 -6
  6. tunacode/configuration/defaults.py +0 -1
  7. tunacode/constants.py +7 -1
  8. tunacode/core/agents/main.py +268 -245
  9. tunacode/core/agents/utils.py +54 -6
  10. tunacode/core/logging/__init__.py +29 -0
  11. tunacode/core/logging/config.py +28 -0
  12. tunacode/core/logging/formatters.py +48 -0
  13. tunacode/core/logging/handlers.py +83 -0
  14. tunacode/core/logging/logger.py +8 -0
  15. tunacode/core/recursive/__init__.py +18 -0
  16. tunacode/core/recursive/aggregator.py +467 -0
  17. tunacode/core/recursive/budget.py +414 -0
  18. tunacode/core/recursive/decomposer.py +398 -0
  19. tunacode/core/recursive/executor.py +470 -0
  20. tunacode/core/recursive/hierarchy.py +488 -0
  21. tunacode/core/state.py +45 -0
  22. tunacode/exceptions.py +23 -0
  23. tunacode/tools/base.py +7 -1
  24. tunacode/types.py +1 -1
  25. tunacode/ui/completers.py +2 -2
  26. tunacode/ui/console.py +30 -9
  27. tunacode/ui/input.py +2 -1
  28. tunacode/ui/keybindings.py +58 -1
  29. tunacode/ui/logging_compat.py +44 -0
  30. tunacode/ui/output.py +7 -6
  31. tunacode/ui/panels.py +30 -5
  32. tunacode/ui/recursive_progress.py +380 -0
  33. tunacode/utils/retry.py +163 -0
  34. tunacode/utils/security.py +3 -2
  35. tunacode/utils/token_counter.py +1 -2
  36. {tunacode_cli-0.0.48.dist-info → tunacode_cli-0.0.49.dist-info}/METADATA +2 -2
  37. {tunacode_cli-0.0.48.dist-info → tunacode_cli-0.0.49.dist-info}/RECORD +41 -29
  38. {tunacode_cli-0.0.48.dist-info → tunacode_cli-0.0.49.dist-info}/top_level.txt +1 -0
  39. tunacode/core/agents/dspy_integration.py +0 -223
  40. tunacode/core/agents/dspy_tunacode.py +0 -458
  41. tunacode/prompts/dspy_task_planning.md +0 -45
  42. tunacode/prompts/dspy_tool_selection.md +0 -58
  43. {tunacode_cli-0.0.48.dist-info → tunacode_cli-0.0.49.dist-info}/WHEEL +0 -0
  44. {tunacode_cli-0.0.48.dist-info → tunacode_cli-0.0.49.dist-info}/entry_points.txt +0 -0
  45. {tunacode_cli-0.0.48.dist-info → tunacode_cli-0.0.49.dist-info}/licenses/LICENSE +0 -0
@@ -1,13 +1,20 @@
1
1
  import asyncio
2
2
  import importlib
3
3
  import json
4
+ import logging
4
5
  import os
5
6
  import re
6
7
  from collections.abc import Iterator
7
8
  from datetime import datetime, timezone
8
9
  from typing import Any
9
10
 
10
- from tunacode.constants import READ_ONLY_TOOLS
11
+ from tunacode.constants import (
12
+ JSON_PARSE_BASE_DELAY,
13
+ JSON_PARSE_MAX_DELAY,
14
+ JSON_PARSE_MAX_RETRIES,
15
+ READ_ONLY_TOOLS,
16
+ )
17
+ from tunacode.exceptions import ToolBatchingJSONError
11
18
  from tunacode.types import (
12
19
  ErrorMessage,
13
20
  StateManager,
@@ -16,6 +23,9 @@ from tunacode.types import (
16
23
  ToolName,
17
24
  )
18
25
  from tunacode.ui import console as ui
26
+ from tunacode.utils.retry import retry_json_parse_async
27
+
28
+ logger = logging.getLogger(__name__)
19
29
 
20
30
 
21
31
  # Lazy import for Agent and Tool
@@ -167,11 +177,28 @@ async def parse_json_tool_calls(
167
177
  if brace_count == 0 and start_pos != -1:
168
178
  potential_json = text[start_pos : i + 1]
169
179
  try:
170
- parsed = json.loads(potential_json)
180
+ # Use retry logic for JSON parsing
181
+ parsed = await retry_json_parse_async(
182
+ potential_json,
183
+ max_retries=JSON_PARSE_MAX_RETRIES,
184
+ base_delay=JSON_PARSE_BASE_DELAY,
185
+ max_delay=JSON_PARSE_MAX_DELAY,
186
+ )
171
187
  if isinstance(parsed, dict) and "tool" in parsed and "args" in parsed:
172
188
  potential_jsons.append((parsed["tool"], parsed["args"]))
173
- except json.JSONDecodeError:
174
- pass
189
+ except json.JSONDecodeError as e:
190
+ # After all retries failed
191
+ logger.error(f"JSON parsing failed after {JSON_PARSE_MAX_RETRIES} retries: {e}")
192
+ if state_manager.session.show_thoughts:
193
+ await ui.error(
194
+ f"Failed to parse tool JSON after {JSON_PARSE_MAX_RETRIES} retries"
195
+ )
196
+ # Raise custom exception for better error handling
197
+ raise ToolBatchingJSONError(
198
+ json_content=potential_json,
199
+ retry_count=JSON_PARSE_MAX_RETRIES,
200
+ original_error=e,
201
+ ) from e
175
202
  start_pos = -1
176
203
 
177
204
  matches = potential_jsons
@@ -220,7 +247,13 @@ async def extract_and_execute_tool_calls(
220
247
 
221
248
  for match in code_matches:
222
249
  try:
223
- tool_data = json.loads(match)
250
+ # Use retry logic for JSON parsing in code blocks
251
+ tool_data = await retry_json_parse_async(
252
+ match,
253
+ max_retries=JSON_PARSE_MAX_RETRIES,
254
+ base_delay=JSON_PARSE_BASE_DELAY,
255
+ max_delay=JSON_PARSE_MAX_DELAY,
256
+ )
224
257
  if "tool" in tool_data and "args" in tool_data:
225
258
 
226
259
  class MockToolCall:
@@ -240,7 +273,22 @@ async def extract_and_execute_tool_calls(
240
273
  if state_manager.session.show_thoughts:
241
274
  await ui.muted(f"FALLBACK: Executed {tool_data['tool']} from code block")
242
275
 
243
- except (json.JSONDecodeError, KeyError, Exception) as e:
276
+ except json.JSONDecodeError as e:
277
+ # After all retries failed
278
+ logger.error(
279
+ f"Code block JSON parsing failed after {JSON_PARSE_MAX_RETRIES} retries: {e}"
280
+ )
281
+ if state_manager.session.show_thoughts:
282
+ await ui.error(
283
+ f"Failed to parse code block tool JSON after {JSON_PARSE_MAX_RETRIES} retries"
284
+ )
285
+ # Raise custom exception for better error handling
286
+ raise ToolBatchingJSONError(
287
+ json_content=match,
288
+ retry_count=JSON_PARSE_MAX_RETRIES,
289
+ original_error=e,
290
+ ) from e
291
+ except (KeyError, Exception) as e:
244
292
  if state_manager.session.show_thoughts:
245
293
  await ui.error(f"Error parsing code block tool call: {e!s}")
246
294
 
@@ -0,0 +1,29 @@
1
+ import logging
2
+
3
+ # Custom log level: THOUGHT
4
+ THOUGHT = 25
5
+ logging.addLevelName(THOUGHT, "THOUGHT")
6
+
7
+
8
+ def thought(self, message, *args, **kwargs):
9
+ if self.isEnabledFor(THOUGHT):
10
+ self._log(THOUGHT, message, args, **kwargs)
11
+
12
+
13
+ logging.Logger.thought = thought
14
+
15
+
16
+ # RichHandler for UI output (stub, real implementation in handlers.py)
17
+ class RichHandler(logging.Handler):
18
+ def emit(self, record):
19
+ # Actual implementation in handlers.py
20
+ pass
21
+
22
+
23
+ def setup_logging(config_path=None):
24
+ """
25
+ Set up logging configuration from YAML file.
26
+ """
27
+ from .config import LogConfig
28
+
29
+ LogConfig.load(config_path)
@@ -0,0 +1,28 @@
1
+ import logging
2
+ import logging.config
3
+ import os
4
+
5
+ import yaml
6
+
7
+ DEFAULT_CONFIG_PATH = os.path.join(
8
+ os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "config", "logging.yaml"
9
+ )
10
+
11
+
12
+ class LogConfig:
13
+ @staticmethod
14
+ def load(config_path=None):
15
+ """
16
+ Load logging configuration from YAML file and apply it.
17
+ """
18
+ path = config_path or DEFAULT_CONFIG_PATH
19
+ if not os.path.exists(path):
20
+ raise FileNotFoundError(f"Logging config file not found: {path}")
21
+ with open(path, "r") as f:
22
+ config = yaml.safe_load(f)
23
+ logging_config = config.get("logging", config)
24
+ try:
25
+ logging.config.dictConfig(logging_config)
26
+ except Exception as e:
27
+ print(f"Failed to configure logging: {e}")
28
+ logging.basicConfig(level=logging.INFO)
@@ -0,0 +1,48 @@
1
+ import logging
2
+
3
+
4
+ class SimpleFormatter(logging.Formatter):
5
+ """
6
+ Simple formatter for UI output.
7
+ """
8
+
9
+ def __init__(self):
10
+ super().__init__("[%(levelname)s] %(message)s")
11
+
12
+
13
+ class DetailedFormatter(logging.Formatter):
14
+ """
15
+ Detailed formatter for backend text logs.
16
+ """
17
+
18
+ def __init__(self):
19
+ super().__init__("[%(asctime)s] [%(levelname)s] [%(name)s:%(lineno)d] - %(message)s")
20
+
21
+
22
+ try:
23
+ from pythonjsonlogger import jsonlogger
24
+
25
+ class JSONFormatter(jsonlogger.JsonFormatter):
26
+ """
27
+ JSON formatter for structured logs.
28
+ """
29
+
30
+ def __init__(self):
31
+ super().__init__("%(asctime)s %(name)s %(levelname)s %(message)s")
32
+ except ImportError:
33
+ import json
34
+
35
+ class JSONFormatter(logging.Formatter):
36
+ """
37
+ Fallback JSON formatter if pythonjsonlogger is not installed.
38
+ """
39
+
40
+ def format(self, record):
41
+ log_entry = {
42
+ "timestamp": self.formatTime(record),
43
+ "level": record.levelname,
44
+ "name": record.name,
45
+ "line": record.lineno,
46
+ "message": record.getMessage(),
47
+ }
48
+ return json.dumps(log_entry)
@@ -0,0 +1,83 @@
1
+ import json
2
+ import logging
3
+
4
+ from rich.console import Console
5
+ from rich.text import Text
6
+
7
+ # Global context for streaming state
8
+ _streaming_context = {"just_finished": False}
9
+
10
+
11
+ class RichHandler(logging.Handler):
12
+ """
13
+ Handler that outputs logs to the console using rich formatting.
14
+ """
15
+
16
+ level_icons = {
17
+ "INFO": "",
18
+ "WARNING": "⚠️",
19
+ "ERROR": "❌",
20
+ "CRITICAL": "🚨",
21
+ "THOUGHT": "🤔",
22
+ "DEBUG": "",
23
+ }
24
+
25
+ def __init__(self, level=logging.NOTSET):
26
+ super().__init__(level)
27
+ self.console = Console()
28
+
29
+ def emit(self, record):
30
+ try:
31
+ icon = self.level_icons.get(record.levelname, "")
32
+ timestamp = self.formatTime(record)
33
+ msg = self.format(record)
34
+ if icon:
35
+ output = f"[{timestamp}] {icon} {msg}"
36
+ else:
37
+ output = f"[{timestamp}] {msg}"
38
+
39
+ # Check if we just finished streaming to avoid extra newlines
40
+ just_finished_streaming = _streaming_context.get("just_finished", False)
41
+ if just_finished_streaming:
42
+ _streaming_context["just_finished"] = False # Reset after use
43
+ # Don't add extra newline when transitioning from streaming
44
+ self.console.print(Text(output), end="\n")
45
+ else:
46
+ self.console.print(Text(output))
47
+ except Exception:
48
+ self.handleError(record)
49
+
50
+ def formatTime(self, record, datefmt=None):
51
+ from datetime import datetime
52
+
53
+ ct = datetime.fromtimestamp(record.created)
54
+ if datefmt:
55
+ return ct.strftime(datefmt)
56
+ return ct.strftime("%Y-%m-%d %H:%M:%S")
57
+
58
+
59
+ class StructuredFileHandler(logging.FileHandler):
60
+ """
61
+ Handler that outputs logs as structured JSON lines.
62
+ """
63
+
64
+ def emit(self, record):
65
+ try:
66
+ log_entry = {
67
+ "timestamp": self.formatTime(record),
68
+ "level": record.levelname,
69
+ "name": record.name,
70
+ "line": record.lineno,
71
+ "message": record.getMessage(),
72
+ "extra_data": getattr(record, "extra", {}),
73
+ }
74
+ self.stream.write(json.dumps(log_entry) + "\n")
75
+ self.flush()
76
+ except Exception:
77
+ self.handleError(record)
78
+
79
+ def formatTime(self, record, datefmt=None):
80
+ from datetime import datetime, timezone
81
+
82
+ ct = datetime.fromtimestamp(record.created, tz=timezone.utc)
83
+ return ct.isoformat()
@@ -0,0 +1,8 @@
1
+ import logging
2
+
3
+
4
+ def get_logger(name=None):
5
+ """
6
+ Get a logger instance with the given name.
7
+ """
8
+ return logging.getLogger(name)
@@ -0,0 +1,18 @@
1
+ """Module: tunacode.core.recursive
2
+
3
+ Recursive task execution system for complex task decomposition and execution.
4
+ """
5
+
6
+ from .aggregator import ResultAggregator
7
+ from .budget import BudgetManager
8
+ from .decomposer import TaskDecomposer
9
+ from .executor import RecursiveTaskExecutor
10
+ from .hierarchy import TaskHierarchy
11
+
12
+ __all__ = [
13
+ "RecursiveTaskExecutor",
14
+ "TaskDecomposer",
15
+ "TaskHierarchy",
16
+ "BudgetManager",
17
+ "ResultAggregator",
18
+ ]