ripperdoc 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/__main__.py +0 -5
  3. ripperdoc/cli/cli.py +37 -16
  4. ripperdoc/cli/commands/__init__.py +2 -0
  5. ripperdoc/cli/commands/agents_cmd.py +12 -9
  6. ripperdoc/cli/commands/compact_cmd.py +7 -3
  7. ripperdoc/cli/commands/context_cmd.py +35 -15
  8. ripperdoc/cli/commands/doctor_cmd.py +27 -14
  9. ripperdoc/cli/commands/exit_cmd.py +1 -1
  10. ripperdoc/cli/commands/mcp_cmd.py +13 -8
  11. ripperdoc/cli/commands/memory_cmd.py +5 -5
  12. ripperdoc/cli/commands/models_cmd.py +47 -16
  13. ripperdoc/cli/commands/permissions_cmd.py +302 -0
  14. ripperdoc/cli/commands/resume_cmd.py +1 -2
  15. ripperdoc/cli/commands/tasks_cmd.py +24 -13
  16. ripperdoc/cli/ui/rich_ui.py +523 -396
  17. ripperdoc/cli/ui/tool_renderers.py +298 -0
  18. ripperdoc/core/agents.py +172 -4
  19. ripperdoc/core/config.py +130 -6
  20. ripperdoc/core/default_tools.py +13 -2
  21. ripperdoc/core/permissions.py +20 -14
  22. ripperdoc/core/providers/__init__.py +31 -15
  23. ripperdoc/core/providers/anthropic.py +122 -8
  24. ripperdoc/core/providers/base.py +93 -15
  25. ripperdoc/core/providers/gemini.py +539 -96
  26. ripperdoc/core/providers/openai.py +371 -26
  27. ripperdoc/core/query.py +301 -62
  28. ripperdoc/core/query_utils.py +51 -7
  29. ripperdoc/core/skills.py +295 -0
  30. ripperdoc/core/system_prompt.py +79 -67
  31. ripperdoc/core/tool.py +15 -6
  32. ripperdoc/sdk/client.py +14 -1
  33. ripperdoc/tools/ask_user_question_tool.py +431 -0
  34. ripperdoc/tools/background_shell.py +82 -26
  35. ripperdoc/tools/bash_tool.py +356 -209
  36. ripperdoc/tools/dynamic_mcp_tool.py +428 -0
  37. ripperdoc/tools/enter_plan_mode_tool.py +226 -0
  38. ripperdoc/tools/exit_plan_mode_tool.py +153 -0
  39. ripperdoc/tools/file_edit_tool.py +53 -10
  40. ripperdoc/tools/file_read_tool.py +17 -7
  41. ripperdoc/tools/file_write_tool.py +49 -13
  42. ripperdoc/tools/glob_tool.py +10 -9
  43. ripperdoc/tools/grep_tool.py +182 -51
  44. ripperdoc/tools/ls_tool.py +6 -6
  45. ripperdoc/tools/mcp_tools.py +172 -413
  46. ripperdoc/tools/multi_edit_tool.py +49 -9
  47. ripperdoc/tools/notebook_edit_tool.py +57 -13
  48. ripperdoc/tools/skill_tool.py +205 -0
  49. ripperdoc/tools/task_tool.py +91 -9
  50. ripperdoc/tools/todo_tool.py +12 -12
  51. ripperdoc/tools/tool_search_tool.py +5 -6
  52. ripperdoc/utils/coerce.py +34 -0
  53. ripperdoc/utils/context_length_errors.py +252 -0
  54. ripperdoc/utils/file_watch.py +5 -4
  55. ripperdoc/utils/json_utils.py +4 -4
  56. ripperdoc/utils/log.py +3 -3
  57. ripperdoc/utils/mcp.py +82 -22
  58. ripperdoc/utils/memory.py +9 -6
  59. ripperdoc/utils/message_compaction.py +19 -16
  60. ripperdoc/utils/messages.py +73 -8
  61. ripperdoc/utils/path_ignore.py +677 -0
  62. ripperdoc/utils/permissions/__init__.py +7 -1
  63. ripperdoc/utils/permissions/path_validation_utils.py +5 -3
  64. ripperdoc/utils/permissions/shell_command_validation.py +496 -18
  65. ripperdoc/utils/prompt.py +1 -1
  66. ripperdoc/utils/safe_get_cwd.py +5 -2
  67. ripperdoc/utils/session_history.py +38 -19
  68. ripperdoc/utils/todo.py +6 -2
  69. ripperdoc/utils/token_estimation.py +34 -0
  70. {ripperdoc-0.2.3.dist-info → ripperdoc-0.2.5.dist-info}/METADATA +14 -1
  71. ripperdoc-0.2.5.dist-info/RECORD +107 -0
  72. ripperdoc-0.2.3.dist-info/RECORD +0 -95
  73. {ripperdoc-0.2.3.dist-info → ripperdoc-0.2.5.dist-info}/WHEEL +0 -0
  74. {ripperdoc-0.2.3.dist-info → ripperdoc-0.2.5.dist-info}/entry_points.txt +0 -0
  75. {ripperdoc-0.2.3.dist-info → ripperdoc-0.2.5.dist-info}/licenses/LICENSE +0 -0
  76. {ripperdoc-0.2.3.dist-info → ripperdoc-0.2.5.dist-info}/top_level.txt +0 -0
ripperdoc/core/config.py CHANGED
@@ -110,6 +110,9 @@ class ModelProfile(BaseModel):
110
110
  # Tool handling for OpenAI-compatible providers. "native" uses tool_calls, "text" flattens tool
111
111
  # interactions into plain text to support providers that reject tool roles.
112
112
  openai_tool_mode: Literal["native", "text"] = "native"
113
+ # Optional override for thinking protocol handling (e.g., "deepseek", "openrouter",
114
+ # "qwen", "gemini_openai", "openai_reasoning"). When unset, provider heuristics are used.
115
+ thinking_mode: Optional[str] = None
113
116
  # Pricing (USD per 1M tokens). Leave as 0 to skip cost calculation.
114
117
  input_cost_per_million_tokens: float = 0.0
115
118
  output_cost_per_million_tokens: float = 0.0
@@ -140,6 +143,10 @@ class GlobalConfig(BaseModel):
140
143
  auto_compact_enabled: bool = True
141
144
  context_token_limit: Optional[int] = None
142
145
 
146
+ # User-level permission rules (applied globally)
147
+ user_allow_rules: list[str] = Field(default_factory=list)
148
+ user_deny_rules: list[str] = Field(default_factory=list)
149
+
143
150
  # Onboarding
144
151
  has_completed_onboarding: bool = False
145
152
  last_onboarding_version: Optional[str] = None
@@ -151,12 +158,18 @@ class GlobalConfig(BaseModel):
151
158
  class ProjectConfig(BaseModel):
152
159
  """Project-specific configuration stored in .ripperdoc/config.json"""
153
160
 
154
- # Tool permissions
161
+ # Tool permissions (project level - checked into git)
155
162
  allowed_tools: list[str] = Field(default_factory=list)
156
163
  bash_allow_rules: list[str] = Field(default_factory=list)
157
164
  bash_deny_rules: list[str] = Field(default_factory=list)
158
165
  working_directories: list[str] = Field(default_factory=list)
159
166
 
167
+ # Path ignore patterns (gitignore-style)
168
+ ignore_patterns: list[str] = Field(
169
+ default_factory=list,
170
+ description="Gitignore-style patterns for paths to ignore in file operations"
171
+ )
172
+
160
173
  # Context
161
174
  context: Dict[str, str] = Field(default_factory=dict)
162
175
  context_files: list[str] = Field(default_factory=list)
@@ -177,6 +190,14 @@ class ProjectConfig(BaseModel):
177
190
  last_session_id: Optional[str] = None
178
191
 
179
192
 
193
+ class ProjectLocalConfig(BaseModel):
194
+ """Project-local configuration stored in .ripperdoc/config.local.json (not checked into git)"""
195
+
196
+ # Local permission rules (project-specific but not shared)
197
+ local_allow_rules: list[str] = Field(default_factory=list)
198
+ local_deny_rules: list[str] = Field(default_factory=list)
199
+
200
+
180
201
  class ConfigManager:
181
202
  """Manages global and project-specific configuration."""
182
203
 
@@ -185,6 +206,7 @@ class ConfigManager:
185
206
  self.current_project_path: Optional[Path] = None
186
207
  self._global_config: Optional[GlobalConfig] = None
187
208
  self._project_config: Optional[ProjectConfig] = None
209
+ self._project_local_config: Optional[ProjectLocalConfig] = None
188
210
 
189
211
  def get_global_config(self) -> GlobalConfig:
190
212
  """Load and return global configuration."""
@@ -200,8 +222,12 @@ class ConfigManager:
200
222
  "profile_count": len(self._global_config.model_profiles),
201
223
  },
202
224
  )
203
- except Exception as e:
204
- logger.exception("Error loading global config", extra={"error": str(e)})
225
+ except (json.JSONDecodeError, OSError, IOError, UnicodeDecodeError, ValueError, TypeError) as e:
226
+ logger.warning(
227
+ "Error loading global config: %s: %s",
228
+ type(e).__name__, e,
229
+ extra={"error": str(e)},
230
+ )
205
231
  self._global_config = GlobalConfig()
206
232
  else:
207
233
  self._global_config = GlobalConfig()
@@ -250,9 +276,10 @@ class ConfigManager:
250
276
  "allowed_tools": len(self._project_config.allowed_tools),
251
277
  },
252
278
  )
253
- except Exception as e:
254
- logger.exception(
255
- "Error loading project config",
279
+ except (json.JSONDecodeError, OSError, IOError, UnicodeDecodeError, ValueError, TypeError) as e:
280
+ logger.warning(
281
+ "Error loading project config: %s: %s",
282
+ type(e).__name__, e,
256
283
  extra={"error": str(e), "path": str(config_path)},
257
284
  )
258
285
  self._project_config = ProjectConfig()
@@ -293,6 +320,91 @@ class ConfigManager:
293
320
  },
294
321
  )
295
322
 
323
+ def get_project_local_config(self, project_path: Optional[Path] = None) -> ProjectLocalConfig:
324
+ """Load and return project-local configuration (not checked into git)."""
325
+ if project_path is not None:
326
+ if self.current_project_path != project_path:
327
+ self._project_local_config = None
328
+ self.current_project_path = project_path
329
+
330
+ if self.current_project_path is None:
331
+ return ProjectLocalConfig()
332
+
333
+ config_path = self.current_project_path / ".ripperdoc" / "config.local.json"
334
+
335
+ if self._project_local_config is None:
336
+ if config_path.exists():
337
+ try:
338
+ data = json.loads(config_path.read_text())
339
+ self._project_local_config = ProjectLocalConfig(**data)
340
+ logger.debug(
341
+ "[config] Loaded project-local config",
342
+ extra={
343
+ "path": str(config_path),
344
+ "project_path": str(self.current_project_path),
345
+ },
346
+ )
347
+ except (json.JSONDecodeError, OSError, IOError, UnicodeDecodeError, ValueError, TypeError) as e:
348
+ logger.warning(
349
+ "Error loading project-local config: %s: %s",
350
+ type(e).__name__, e,
351
+ extra={"error": str(e), "path": str(config_path)},
352
+ )
353
+ self._project_local_config = ProjectLocalConfig()
354
+ else:
355
+ self._project_local_config = ProjectLocalConfig()
356
+
357
+ return self._project_local_config
358
+
359
+ def save_project_local_config(
360
+ self, config: ProjectLocalConfig, project_path: Optional[Path] = None
361
+ ) -> None:
362
+ """Save project-local configuration."""
363
+ if project_path is not None:
364
+ self.current_project_path = project_path
365
+
366
+ if self.current_project_path is None:
367
+ return
368
+
369
+ config_dir = self.current_project_path / ".ripperdoc"
370
+ config_dir.mkdir(exist_ok=True)
371
+
372
+ config_path = config_dir / "config.local.json"
373
+ self._project_local_config = config
374
+ config_path.write_text(config.model_dump_json(indent=2))
375
+
376
+ # Ensure config.local.json is in .gitignore
377
+ self._ensure_gitignore_entry("config.local.json")
378
+
379
+ logger.debug(
380
+ "[config] Saved project-local config",
381
+ extra={
382
+ "path": str(config_path),
383
+ "project_path": str(self.current_project_path),
384
+ },
385
+ )
386
+
387
+ def _ensure_gitignore_entry(self, entry: str) -> bool:
388
+ """Ensure an entry exists in .ripperdoc/.gitignore. Returns True if added."""
389
+ if self.current_project_path is None:
390
+ return False
391
+
392
+ gitignore_path = self.current_project_path / ".ripperdoc" / ".gitignore"
393
+ try:
394
+ text = ""
395
+ if gitignore_path.exists():
396
+ text = gitignore_path.read_text(encoding="utf-8", errors="ignore")
397
+ existing_lines = text.splitlines()
398
+ if entry in existing_lines:
399
+ return False
400
+ with gitignore_path.open("a", encoding="utf-8") as f:
401
+ if text and not text.endswith("\n"):
402
+ f.write("\n")
403
+ f.write(f"{entry}\n")
404
+ return True
405
+ except (OSError, IOError):
406
+ return False
407
+
296
408
  def get_api_key(self, provider: ProviderType) -> Optional[str]:
297
409
  """Get API key for a provider."""
298
410
  # First check environment variables
@@ -433,3 +545,15 @@ def set_model_pointer(pointer: str, profile_name: str) -> GlobalConfig:
433
545
  def get_current_model_profile(pointer: str = "main") -> Optional[ModelProfile]:
434
546
  """Convenience wrapper to fetch the active profile for a pointer."""
435
547
  return config_manager.get_current_model_profile(pointer)
548
+
549
+
550
+ def get_project_local_config(project_path: Optional[Path] = None) -> ProjectLocalConfig:
551
+ """Get project-local configuration (not checked into git)."""
552
+ return config_manager.get_project_local_config(project_path)
553
+
554
+
555
+ def save_project_local_config(
556
+ config: ProjectLocalConfig, project_path: Optional[Path] = None
557
+ ) -> None:
558
+ """Save project-local configuration."""
559
+ config_manager.save_project_local_config(config, project_path)
@@ -17,7 +17,11 @@ from ripperdoc.tools.file_write_tool import FileWriteTool
17
17
  from ripperdoc.tools.glob_tool import GlobTool
18
18
  from ripperdoc.tools.ls_tool import LSTool
19
19
  from ripperdoc.tools.grep_tool import GrepTool
20
+ from ripperdoc.tools.skill_tool import SkillTool
20
21
  from ripperdoc.tools.todo_tool import TodoReadTool, TodoWriteTool
22
+ from ripperdoc.tools.ask_user_question_tool import AskUserQuestionTool
23
+ from ripperdoc.tools.enter_plan_mode_tool import EnterPlanModeTool
24
+ from ripperdoc.tools.exit_plan_mode_tool import ExitPlanModeTool
21
25
  from ripperdoc.tools.task_tool import TaskTool
22
26
  from ripperdoc.tools.tool_search_tool import ToolSearchTool
23
27
  from ripperdoc.tools.mcp_tools import (
@@ -45,8 +49,12 @@ def get_default_tools() -> List[Tool[Any, Any]]:
45
49
  GlobTool(),
46
50
  LSTool(),
47
51
  GrepTool(),
52
+ SkillTool(),
48
53
  TodoReadTool(),
49
54
  TodoWriteTool(),
55
+ AskUserQuestionTool(),
56
+ EnterPlanModeTool(),
57
+ ExitPlanModeTool(),
50
58
  ToolSearchTool(),
51
59
  ListMcpServersTool(),
52
60
  ListMcpResourcesTool(),
@@ -60,9 +68,12 @@ def get_default_tools() -> List[Tool[Any, Any]]:
60
68
  if isinstance(tool, Tool):
61
69
  base_tools.append(tool)
62
70
  dynamic_tools.append(tool)
63
- except Exception:
71
+ except (ImportError, ModuleNotFoundError, OSError, RuntimeError, ConnectionError, ValueError, TypeError) as exc:
64
72
  # If MCP runtime is not available, continue with base tools only.
65
- logger.exception("[default_tools] Failed to load dynamic MCP tools")
73
+ logger.warning(
74
+ "[default_tools] Failed to load dynamic MCP tools: %s: %s",
75
+ type(exc).__name__, exc,
76
+ )
66
77
 
67
78
  task_tool = TaskTool(lambda: base_tools)
68
79
  all_tools = base_tools + [task_tool]
@@ -48,19 +48,19 @@ def permission_key(tool: Tool[Any, Any], parsed_input: Any) -> str:
48
48
  if hasattr(parsed_input, "file_path"):
49
49
  try:
50
50
  return f"{tool.name}::path::{Path(getattr(parsed_input, 'file_path')).resolve()}"
51
- except Exception:
52
- logger.exception(
51
+ except (OSError, RuntimeError) as exc:
52
+ logger.warning(
53
53
  "[permissions] Failed to resolve file_path for permission key",
54
- extra={"tool": getattr(tool, "name", None)},
54
+ extra={"tool": getattr(tool, "name", None), "error": str(exc)},
55
55
  )
56
56
  return f"{tool.name}::path::{getattr(parsed_input, 'file_path')}"
57
57
  if hasattr(parsed_input, "path"):
58
58
  try:
59
59
  return f"{tool.name}::path::{Path(getattr(parsed_input, 'path')).resolve()}"
60
- except Exception:
61
- logger.exception(
60
+ except (OSError, RuntimeError) as exc:
61
+ logger.warning(
62
62
  "[permissions] Failed to resolve path for permission key",
63
- extra={"tool": getattr(tool, "name", None)},
63
+ extra={"tool": getattr(tool, "name", None), "error": str(exc)},
64
64
  )
65
65
  return f"{tool.name}::path::{getattr(parsed_input, 'path')}"
66
66
  return tool.name
@@ -126,14 +126,15 @@ def make_permission_checker(
126
126
  try:
127
127
  if hasattr(tool, "needs_permissions") and not tool.needs_permissions(parsed_input):
128
128
  return PermissionResult(result=True)
129
- except Exception:
130
- logger.exception(
129
+ except (TypeError, AttributeError, ValueError) as exc:
130
+ # Tool implementation error - log and deny for safety
131
+ logger.warning(
131
132
  "[permissions] Tool needs_permissions check failed",
132
- extra={"tool": getattr(tool, "name", None)},
133
+ extra={"tool": getattr(tool, "name", None), "error": str(exc), "error_type": type(exc).__name__},
133
134
  )
134
135
  return PermissionResult(
135
136
  result=False,
136
- message="Permission check failed for this tool invocation.",
137
+ message=f"Permission check failed: {type(exc).__name__}: {exc}",
137
138
  )
138
139
 
139
140
  allowed_tools = set(config.allowed_tools or [])
@@ -167,14 +168,15 @@ def make_permission_checker(
167
168
  # Allow tools to return a plain dict shaped like PermissionDecision.
168
169
  if isinstance(decision, dict) and "behavior" in decision:
169
170
  decision = PermissionDecision(**decision)
170
- except Exception:
171
- logger.exception(
171
+ except (TypeError, AttributeError, ValueError, KeyError) as exc:
172
+ # Tool implementation error - fall back to asking user
173
+ logger.warning(
172
174
  "[permissions] Tool check_permissions failed",
173
- extra={"tool": getattr(tool, "name", None)},
175
+ extra={"tool": getattr(tool, "name", None), "error": str(exc), "error_type": type(exc).__name__},
174
176
  )
175
177
  decision = PermissionDecision(
176
178
  behavior="ask",
177
- message="Error checking permissions for this tool.",
179
+ message=f"Error checking permissions: {type(exc).__name__}",
178
180
  rule_suggestions=None,
179
181
  )
180
182
 
@@ -219,6 +221,10 @@ def make_permission_checker(
219
221
  ]
220
222
 
221
223
  answer = (await _prompt_user(prompt, options=options)).strip().lower()
224
+ logger.debug(
225
+ "[permissions] User answer for permission prompt",
226
+ extra={"answer": answer, "tool": getattr(tool, "name", None)},
227
+ )
222
228
  rule_suggestions = _rule_strings(decision.rule_suggestions) or [
223
229
  permission_key(tool, parsed_input)
224
230
  ]
@@ -1,31 +1,47 @@
1
- """Provider client registry."""
1
+ """Provider client registry with optional dependencies."""
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import Optional
5
+ import importlib
6
+ from typing import Optional, TYPE_CHECKING, Type, cast
6
7
 
7
8
  from ripperdoc.core.config import ProviderType
8
- from ripperdoc.core.providers.anthropic import AnthropicClient
9
9
  from ripperdoc.core.providers.base import ProviderClient
10
- from ripperdoc.core.providers.gemini import GeminiClient
11
- from ripperdoc.core.providers.openai import OpenAIClient
10
+ from ripperdoc.utils.log import get_logger
11
+
12
+ if TYPE_CHECKING: # pragma: no cover - type checking only
13
+ from ripperdoc.core.providers.anthropic import AnthropicClient # noqa: F401
14
+ from ripperdoc.core.providers.gemini import GeminiClient # noqa: F401
15
+ from ripperdoc.core.providers.openai import OpenAIClient # noqa: F401
16
+
17
+ logger = get_logger()
18
+
19
+
20
+ def _load_client(module: str, cls: str, extra: str) -> Type[ProviderClient]:
21
+ """Dynamically import a provider client, pointing users to the right extra."""
22
+ try:
23
+ mod = importlib.import_module(f"ripperdoc.core.providers.{module}")
24
+ client_cls = cast(Type[ProviderClient], getattr(mod, cls, None))
25
+ if client_cls is None:
26
+ raise ImportError(f"{cls} not found in {module}")
27
+ return client_cls
28
+ except ImportError as exc:
29
+ raise RuntimeError(
30
+ f"{cls} requires optional dependency group '{extra}'. "
31
+ f"Install with `pip install ripperdoc[{extra}]`."
32
+ ) from exc
12
33
 
13
34
 
14
35
  def get_provider_client(provider: ProviderType) -> Optional[ProviderClient]:
15
36
  """Return a provider client for the given protocol."""
16
37
  if provider == ProviderType.ANTHROPIC:
17
- return AnthropicClient()
38
+ return _load_client("anthropic", "AnthropicClient", "anthropic")()
18
39
  if provider == ProviderType.OPENAI_COMPATIBLE:
19
- return OpenAIClient()
40
+ return _load_client("openai", "OpenAIClient", "openai")()
20
41
  if provider == ProviderType.GEMINI:
21
- return GeminiClient()
42
+ return _load_client("gemini", "GeminiClient", "gemini")()
43
+ logger.warning("[providers] Unsupported provider", extra={"provider": provider})
22
44
  return None
23
45
 
24
46
 
25
- __all__ = [
26
- "ProviderClient",
27
- "AnthropicClient",
28
- "GeminiClient",
29
- "OpenAIClient",
30
- "get_provider_client",
31
- ]
47
+ __all__ = ["ProviderClient", "get_provider_client"]
@@ -2,9 +2,11 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import asyncio
5
6
  import time
6
7
  from typing import Any, Awaitable, Callable, Dict, List, Optional
7
8
 
9
+ import anthropic
8
10
  from anthropic import AsyncAnthropic
9
11
 
10
12
  from ripperdoc.core.config import ModelProfile
@@ -13,6 +15,7 @@ from ripperdoc.core.providers.base import (
13
15
  ProviderClient,
14
16
  ProviderResponse,
15
17
  call_with_timeout_and_retries,
18
+ iter_with_timeout,
16
19
  sanitize_tool_history,
17
20
  )
18
21
  from ripperdoc.core.query_utils import (
@@ -28,6 +31,38 @@ from ripperdoc.utils.session_usage import record_usage
28
31
  logger = get_logger()
29
32
 
30
33
 
34
+ def _classify_anthropic_error(exc: Exception) -> tuple[str, str]:
35
+ """Classify an Anthropic exception into error code and user-friendly message."""
36
+ exc_type = type(exc).__name__
37
+ exc_msg = str(exc)
38
+
39
+ if isinstance(exc, anthropic.AuthenticationError):
40
+ return "authentication_error", f"Authentication failed: {exc_msg}"
41
+ if isinstance(exc, anthropic.PermissionDeniedError):
42
+ if "balance" in exc_msg.lower() or "insufficient" in exc_msg.lower():
43
+ return "insufficient_balance", f"Insufficient balance: {exc_msg}"
44
+ return "permission_denied", f"Permission denied: {exc_msg}"
45
+ if isinstance(exc, anthropic.NotFoundError):
46
+ return "model_not_found", f"Model not found: {exc_msg}"
47
+ if isinstance(exc, anthropic.BadRequestError):
48
+ if "context" in exc_msg.lower() or "token" in exc_msg.lower():
49
+ return "context_length_exceeded", f"Context length exceeded: {exc_msg}"
50
+ if "content" in exc_msg.lower() and "policy" in exc_msg.lower():
51
+ return "content_policy_violation", f"Content policy violation: {exc_msg}"
52
+ return "bad_request", f"Invalid request: {exc_msg}"
53
+ if isinstance(exc, anthropic.RateLimitError):
54
+ return "rate_limit", f"Rate limit exceeded: {exc_msg}"
55
+ if isinstance(exc, anthropic.APIConnectionError):
56
+ return "connection_error", f"Connection error: {exc_msg}"
57
+ if isinstance(exc, anthropic.APIStatusError):
58
+ status = getattr(exc, "status_code", "unknown")
59
+ return "api_error", f"API error ({status}): {exc_msg}"
60
+ if isinstance(exc, asyncio.TimeoutError):
61
+ return "timeout", f"Request timed out: {exc_msg}"
62
+
63
+ return "unknown_error", f"Unexpected error ({exc_type}): {exc_msg}"
64
+
65
+
31
66
  class AnthropicClient(ProviderClient):
32
67
  """Anthropic client with streaming and non-streaming support."""
33
68
 
@@ -51,10 +86,64 @@ class AnthropicClient(ProviderClient):
51
86
  progress_callback: Optional[ProgressCallback],
52
87
  request_timeout: Optional[float],
53
88
  max_retries: int,
89
+ max_thinking_tokens: int,
54
90
  ) -> ProviderResponse:
55
91
  start_time = time.time()
92
+
93
+ try:
94
+ return await self._call_impl(
95
+ model_profile=model_profile,
96
+ system_prompt=system_prompt,
97
+ normalized_messages=normalized_messages,
98
+ tools=tools,
99
+ tool_mode=tool_mode,
100
+ stream=stream,
101
+ progress_callback=progress_callback,
102
+ request_timeout=request_timeout,
103
+ max_retries=max_retries,
104
+ max_thinking_tokens=max_thinking_tokens,
105
+ start_time=start_time,
106
+ )
107
+ except asyncio.CancelledError:
108
+ raise # Don't suppress task cancellation
109
+ except Exception as exc:
110
+ duration_ms = (time.time() - start_time) * 1000
111
+ error_code, error_message = _classify_anthropic_error(exc)
112
+ logger.error(
113
+ "[anthropic_client] API call failed",
114
+ extra={
115
+ "model": model_profile.model,
116
+ "error_code": error_code,
117
+ "error_message": error_message,
118
+ "duration_ms": round(duration_ms, 2),
119
+ },
120
+ )
121
+ return ProviderResponse.create_error(
122
+ error_code=error_code,
123
+ error_message=error_message,
124
+ duration_ms=duration_ms,
125
+ )
126
+
127
+ async def _call_impl(
128
+ self,
129
+ *,
130
+ model_profile: ModelProfile,
131
+ system_prompt: str,
132
+ normalized_messages: Any,
133
+ tools: List[Tool[Any, Any]],
134
+ tool_mode: str,
135
+ stream: bool,
136
+ progress_callback: Optional[ProgressCallback],
137
+ request_timeout: Optional[float],
138
+ max_retries: int,
139
+ max_thinking_tokens: int,
140
+ start_time: float,
141
+ ) -> ProviderResponse:
142
+ """Internal implementation of call, may raise exceptions."""
56
143
  tool_schemas = await build_anthropic_tool_schemas(tools)
57
144
  collected_text: List[str] = []
145
+ reasoning_parts: List[str] = []
146
+ response_metadata: Dict[str, Any] = {}
58
147
 
59
148
  anthropic_kwargs = {"base_url": model_profile.api_base}
60
149
  if model_profile.api_key:
@@ -65,31 +154,47 @@ class AnthropicClient(ProviderClient):
65
154
 
66
155
  normalized_messages = sanitize_tool_history(list(normalized_messages))
67
156
 
157
+ thinking_payload: Optional[Dict[str, Any]] = None
158
+ if max_thinking_tokens > 0:
159
+ thinking_payload = {"type": "enabled", "budget_tokens": max_thinking_tokens}
160
+
68
161
  async with await self._client(anthropic_kwargs) as client:
69
162
 
70
163
  async def _stream_request() -> Any:
71
- async with client.messages.stream(
164
+ stream_cm = client.messages.stream(
72
165
  model=model_profile.model,
73
166
  max_tokens=model_profile.max_tokens,
74
167
  system=system_prompt,
75
168
  messages=normalized_messages, # type: ignore[arg-type]
76
169
  tools=tool_schemas if tool_schemas else None, # type: ignore
77
170
  temperature=model_profile.temperature,
78
- ) as stream_resp:
79
- async for text in stream_resp.text_stream:
171
+ thinking=thinking_payload, # type: ignore[arg-type]
172
+ )
173
+ stream_resp = (
174
+ await asyncio.wait_for(stream_cm.__aenter__(), timeout=request_timeout)
175
+ if request_timeout and request_timeout > 0
176
+ else await stream_cm.__aenter__()
177
+ )
178
+ try:
179
+ async for text in iter_with_timeout(stream_resp.text_stream, request_timeout):
80
180
  if text:
81
181
  collected_text.append(text)
82
182
  if progress_callback:
83
183
  try:
84
184
  await progress_callback(text)
85
- except Exception:
86
- logger.exception("[anthropic_client] Stream callback failed")
185
+ except (RuntimeError, ValueError, TypeError, OSError) as cb_exc:
186
+ logger.warning(
187
+ "[anthropic_client] Stream callback failed: %s: %s",
188
+ type(cb_exc).__name__, cb_exc,
189
+ )
87
190
  getter = getattr(stream_resp, "get_final_response", None) or getattr(
88
191
  stream_resp, "get_final_message", None
89
192
  )
90
193
  if getter:
91
194
  return await getter()
92
195
  return None
196
+ finally:
197
+ await stream_cm.__aexit__(None, None, None)
93
198
 
94
199
  async def _non_stream_request() -> Any:
95
200
  return await client.messages.create(
@@ -99,11 +204,13 @@ class AnthropicClient(ProviderClient):
99
204
  messages=normalized_messages, # type: ignore[arg-type]
100
205
  tools=tool_schemas if tool_schemas else None, # type: ignore
101
206
  temperature=model_profile.temperature,
207
+ thinking=thinking_payload, # type: ignore[arg-type]
102
208
  )
103
209
 
210
+ timeout_for_call = None if stream else request_timeout
104
211
  response = await call_with_timeout_and_retries(
105
212
  _stream_request if stream else _non_stream_request,
106
- request_timeout,
213
+ timeout_for_call,
107
214
  max_retries,
108
215
  )
109
216
 
@@ -115,8 +222,14 @@ class AnthropicClient(ProviderClient):
115
222
  )
116
223
 
117
224
  content_blocks = content_blocks_from_anthropic_response(response, tool_mode)
118
- if stream and collected_text and tool_mode == "text":
119
- content_blocks = [{"type": "text", "text": "".join(collected_text)}]
225
+ for blk in content_blocks:
226
+ if blk.get("type") == "thinking":
227
+ thinking_text = blk.get("thinking") or blk.get("text") or ""
228
+ if thinking_text:
229
+ reasoning_parts.append(str(thinking_text))
230
+ if reasoning_parts:
231
+ response_metadata["reasoning_content"] = "\n".join(reasoning_parts)
232
+ # Streaming progress is handled via text_stream; final content retains thinking blocks.
120
233
 
121
234
  logger.info(
122
235
  "[anthropic_client] Response received",
@@ -133,4 +246,5 @@ class AnthropicClient(ProviderClient):
133
246
  usage_tokens=usage_tokens,
134
247
  cost_usd=cost_usd,
135
248
  duration_ms=duration_ms,
249
+ metadata=response_metadata,
136
250
  )