ripperdoc 0.2.10__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +164 -57
  3. ripperdoc/cli/commands/__init__.py +4 -0
  4. ripperdoc/cli/commands/agents_cmd.py +3 -7
  5. ripperdoc/cli/commands/doctor_cmd.py +29 -0
  6. ripperdoc/cli/commands/memory_cmd.py +2 -1
  7. ripperdoc/cli/commands/models_cmd.py +61 -5
  8. ripperdoc/cli/commands/resume_cmd.py +1 -0
  9. ripperdoc/cli/commands/skills_cmd.py +103 -0
  10. ripperdoc/cli/commands/stats_cmd.py +4 -4
  11. ripperdoc/cli/commands/status_cmd.py +10 -0
  12. ripperdoc/cli/commands/tasks_cmd.py +6 -3
  13. ripperdoc/cli/commands/themes_cmd.py +139 -0
  14. ripperdoc/cli/ui/file_mention_completer.py +63 -13
  15. ripperdoc/cli/ui/helpers.py +6 -3
  16. ripperdoc/cli/ui/interrupt_listener.py +233 -0
  17. ripperdoc/cli/ui/message_display.py +7 -0
  18. ripperdoc/cli/ui/panels.py +13 -8
  19. ripperdoc/cli/ui/rich_ui.py +513 -84
  20. ripperdoc/cli/ui/spinner.py +68 -5
  21. ripperdoc/cli/ui/tool_renderers.py +10 -9
  22. ripperdoc/cli/ui/wizard.py +18 -11
  23. ripperdoc/core/agents.py +4 -0
  24. ripperdoc/core/config.py +235 -0
  25. ripperdoc/core/default_tools.py +1 -0
  26. ripperdoc/core/hooks/llm_callback.py +0 -1
  27. ripperdoc/core/hooks/manager.py +6 -0
  28. ripperdoc/core/permissions.py +123 -39
  29. ripperdoc/core/providers/openai.py +55 -9
  30. ripperdoc/core/query.py +349 -108
  31. ripperdoc/core/query_utils.py +17 -14
  32. ripperdoc/core/skills.py +1 -0
  33. ripperdoc/core/theme.py +298 -0
  34. ripperdoc/core/tool.py +8 -3
  35. ripperdoc/protocol/__init__.py +14 -0
  36. ripperdoc/protocol/models.py +300 -0
  37. ripperdoc/protocol/stdio.py +1453 -0
  38. ripperdoc/tools/background_shell.py +49 -5
  39. ripperdoc/tools/bash_tool.py +75 -9
  40. ripperdoc/tools/file_edit_tool.py +98 -29
  41. ripperdoc/tools/file_read_tool.py +139 -8
  42. ripperdoc/tools/file_write_tool.py +46 -3
  43. ripperdoc/tools/grep_tool.py +98 -8
  44. ripperdoc/tools/lsp_tool.py +9 -15
  45. ripperdoc/tools/multi_edit_tool.py +26 -3
  46. ripperdoc/tools/skill_tool.py +52 -1
  47. ripperdoc/tools/task_tool.py +33 -8
  48. ripperdoc/utils/file_watch.py +12 -6
  49. ripperdoc/utils/image_utils.py +125 -0
  50. ripperdoc/utils/log.py +30 -3
  51. ripperdoc/utils/lsp.py +9 -3
  52. ripperdoc/utils/mcp.py +80 -18
  53. ripperdoc/utils/message_formatting.py +2 -2
  54. ripperdoc/utils/messages.py +177 -32
  55. ripperdoc/utils/pending_messages.py +50 -0
  56. ripperdoc/utils/permissions/shell_command_validation.py +3 -3
  57. ripperdoc/utils/permissions/tool_permission_utils.py +9 -3
  58. ripperdoc/utils/platform.py +198 -0
  59. ripperdoc/utils/session_heatmap.py +1 -3
  60. ripperdoc/utils/session_history.py +2 -2
  61. ripperdoc/utils/session_stats.py +1 -0
  62. ripperdoc/utils/shell_utils.py +8 -5
  63. ripperdoc/utils/todo.py +0 -6
  64. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.1.dist-info}/METADATA +49 -17
  65. ripperdoc-0.3.1.dist-info/RECORD +136 -0
  66. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.1.dist-info}/WHEEL +1 -1
  67. ripperdoc/cli/ui/interrupt_handler.py +0 -174
  68. ripperdoc/sdk/__init__.py +0 -9
  69. ripperdoc/sdk/client.py +0 -408
  70. ripperdoc-0.2.10.dist-info/RECORD +0 -129
  71. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.1.dist-info}/entry_points.txt +0 -0
  72. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.1.dist-info}/licenses/LICENSE +0 -0
  73. {ripperdoc-0.2.10.dist-info → ripperdoc-0.3.1.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,6 @@
1
1
  from contextlib import contextmanager
2
+ import shutil
3
+ import sys
2
4
  from typing import Any, Generator, Literal, Optional
3
5
 
4
6
  from rich.console import Console
@@ -6,31 +8,78 @@ from rich.live import Live
6
8
  from rich.text import Text
7
9
  from rich.spinner import Spinner as RichSpinner
8
10
 
11
+ from ripperdoc.core.theme import theme_color
12
+
13
+ # ANSI escape sequences for terminal control
14
+ _CLEAR_LINE = "\r\033[K" # Move to start of line and clear to end
15
+
9
16
 
10
17
  class Spinner:
11
18
  """Lightweight spinner wrapper that plays nicely with other console output."""
12
19
 
20
+ # Reserve space for spinner animation (e.g., "⠧ ") and safety margin
21
+ _SPINNER_MARGIN = 6
22
+
13
23
  def __init__(self, console: Console, text: str = "Thinking...", spinner: str = "dots"):
14
24
  self.console = console
15
25
  self.text = text
16
26
  self.spinner = spinner
17
- self._style = "cyan"
27
+ self._style = theme_color("spinner")
18
28
  self._live: Optional[Live] = None
19
- # Blue spinner for clearer visual separation in the terminal (icon + text)
29
+ # Spinner color from theme for visual separation in the terminal
20
30
  self._renderable: RichSpinner = RichSpinner(
21
- spinner, text=Text(self.text, style=self._style), style=self._style
31
+ spinner,
32
+ text=Text(self._fit_to_terminal(self.text), style=self._style),
33
+ style=self._style,
22
34
  )
23
35
 
36
+ def _get_terminal_width(self) -> int:
37
+ """Get current terminal width, with fallback."""
38
+ try:
39
+ return shutil.get_terminal_size().columns
40
+ except Exception:
41
+ return 80 # Reasonable default
42
+
43
+ def _fit_to_terminal(self, text: str) -> str:
44
+ """Truncate text to fit within terminal width, preventing line wrap issues.
45
+
46
+ This ensures spinner text never causes terminal wrapping, which would
47
+ leave artifacts when the spinner refreshes or stops.
48
+ """
49
+ max_width = self._get_terminal_width() - self._SPINNER_MARGIN
50
+ if max_width < 20:
51
+ max_width = 20 # Minimum usable width
52
+
53
+ if len(text) <= max_width:
54
+ return text
55
+
56
+ # Smart truncation: keep the structure intact
57
+ # Find the last complete parenthetical group if possible
58
+ truncated = text[: max_width - 1] + "…"
59
+ return truncated
60
+
61
+ def _clear_line(self) -> None:
62
+ """Clear the current terminal line to prevent artifacts."""
63
+ if self.console.is_terminal:
64
+ try:
65
+ sys.stdout.write(_CLEAR_LINE)
66
+ sys.stdout.flush()
67
+ except Exception:
68
+ pass # Ignore errors in non-TTY environments
69
+
24
70
  def start(self) -> None:
25
71
  """Start the spinner if not already running."""
26
72
  if self._live is not None:
27
73
  return
28
- self._renderable.text = Text(self.text, style=self._style)
74
+ # Clear any residual content on current line before starting
75
+ self._clear_line()
76
+ self._renderable.text = Text(self._fit_to_terminal(self.text), style=self._style)
29
77
  self._live = Live(
30
78
  self._renderable,
31
79
  console=self.console,
32
80
  transient=True, # Remove spinner line when stopped to avoid layout glitches
33
81
  refresh_per_second=12,
82
+ vertical_overflow="ellipsis", # Prevent multi-line overflow issues
34
83
  )
35
84
  self._live.start()
36
85
 
@@ -40,7 +89,7 @@ class Spinner:
40
89
  return
41
90
  if text is not None:
42
91
  self.text = text
43
- self._renderable.text = Text(self.text, style=self._style)
92
+ self._renderable.text = Text(self._fit_to_terminal(self.text), style=self._style)
44
93
  # Live.refresh() redraws the current renderable
45
94
  self._live.refresh()
46
95
 
@@ -50,6 +99,8 @@ class Spinner:
50
99
  return
51
100
  try:
52
101
  self._live.stop()
102
+ # Clear line to ensure no artifacts remain from long spinner text
103
+ self._clear_line()
53
104
  finally:
54
105
  self._live = None
55
106
 
@@ -82,4 +133,16 @@ class Spinner:
82
133
  yield
83
134
  finally:
84
135
  if was_running:
136
+ # Ensure all output is flushed and cursor is on a clean line
137
+ # before restarting the spinner
138
+ try:
139
+ # Flush console buffer
140
+ self.console.file.flush()
141
+ # Clear any partial line content to prevent spinner
142
+ # from appearing on the same line as previous output
143
+ if self.console.is_terminal:
144
+ sys.stdout.write(_CLEAR_LINE)
145
+ sys.stdout.flush()
146
+ except Exception:
147
+ pass
85
148
  self.start()
@@ -43,7 +43,7 @@ class TodoResultRenderer(ToolResultRenderer):
43
43
  if lines:
44
44
  self.console.print(f" ⎿ [dim]{escape(lines[0])}[/]")
45
45
  for line in lines[1:]:
46
- self.console.print(f" {line}", markup=False)
46
+ self.console.print(f" {line}", markup=False)
47
47
  else:
48
48
  self.console.print(" ⎿ [dim]Todo update[/]")
49
49
 
@@ -107,7 +107,7 @@ class GlobResultRenderer(ToolResultRenderer):
107
107
  if self.verbose:
108
108
  for line in files[:30]:
109
109
  if line.strip():
110
- self.console.print(f" {line}", markup=False)
110
+ self.console.print(f" {line}", markup=False)
111
111
  if file_count > 30:
112
112
  self.console.print(f"[dim]... ({file_count - 30} more)[/]")
113
113
 
@@ -125,7 +125,7 @@ class GrepResultRenderer(ToolResultRenderer):
125
125
  if self.verbose:
126
126
  for line in matches[:30]:
127
127
  if line.strip():
128
- self.console.print(f" {line}", markup=False)
128
+ self.console.print(f" {line}", markup=False)
129
129
  if match_count > 30:
130
130
  self.console.print(f"[dim]... ({match_count - 30} more)[/]")
131
131
 
@@ -142,7 +142,7 @@ class LSResultRenderer(ToolResultRenderer):
142
142
  if self.verbose:
143
143
  preview = tree_lines[:40]
144
144
  for line in preview:
145
- self.console.print(f" {line}", markup=False)
145
+ self.console.print(f" {line}", markup=False)
146
146
  if len(tree_lines) > len(preview):
147
147
  self.console.print(f"[dim]... ({len(tree_lines) - len(preview)} more)[/]")
148
148
 
@@ -193,7 +193,8 @@ class BashResultRenderer(ToolResultRenderer):
193
193
  preview = stdout_lines if self.verbose else stdout_lines[:5]
194
194
  self.console.print(f" ⎿ {preview[0]}", markup=False)
195
195
  for line in preview[1:]:
196
- self.console.print(f" {line}", markup=False)
196
+ # Use consistent 4-space indent to match the ⎿ prefix width
197
+ self.console.print(f" {line}", markup=False)
197
198
  if not self.verbose and len(stdout_lines) > len(preview):
198
199
  self.console.print(f"[dim]... ({len(stdout_lines) - len(preview)} more lines)[/]")
199
200
  else:
@@ -229,28 +230,28 @@ class BashResultRenderer(ToolResultRenderer):
229
230
  preview = stdout_lines if self.verbose else stdout_lines[:5]
230
231
  self.console.print("[dim]stdout:[/]")
231
232
  for line in preview:
232
- self.console.print(f" {line}", markup=False)
233
+ self.console.print(f" {line}", markup=False)
233
234
  if not self.verbose and len(stdout_lines) > len(preview):
234
235
  self.console.print(
235
236
  f"[dim]... ({len(stdout_lines) - len(preview)} more stdout lines)[/]"
236
237
  )
237
238
  else:
238
239
  self.console.print("[dim]stdout:[/]")
239
- self.console.print(" [dim](no stdout)[/]")
240
+ self.console.print(" [dim](no stdout)[/]")
240
241
 
241
242
  # Render stderr
242
243
  if stderr_lines:
243
244
  preview = stderr_lines if self.verbose else stderr_lines[:5]
244
245
  self.console.print("[dim]stderr:[/]")
245
246
  for line in preview:
246
- self.console.print(f" {line}", markup=False)
247
+ self.console.print(f" {line}", markup=False)
247
248
  if not self.verbose and len(stderr_lines) > len(preview):
248
249
  self.console.print(
249
250
  f"[dim]... ({len(stderr_lines) - len(preview)} more stderr lines)[/]"
250
251
  )
251
252
  else:
252
253
  self.console.print("[dim]stderr:[/]")
253
- self.console.print(" [dim](no stderr)[/]")
254
+ self.console.print(" [dim](no stderr)[/]")
254
255
 
255
256
 
256
257
  class ToolResultRendererRegistry:
@@ -2,6 +2,7 @@
2
2
  Interactive onboarding wizard for Ripperdoc.
3
3
  """
4
4
 
5
+ import os
5
6
  from typing import List, Optional, Tuple
6
7
 
7
8
  import click
@@ -46,6 +47,17 @@ def check_onboarding() -> bool:
46
47
  if config.has_completed_onboarding:
47
48
  return True
48
49
 
50
+ # 检查是否有有效的 RIPPERDOC_* 环境变量配置
51
+ # 如果设置了 RIPPERDOC_BASE_URL,可以跳过 onboarding
52
+ # 不写入配置文件,只在内存中处理
53
+ if os.getenv("RIPPERDOC_BASE_URL"):
54
+ # 在内存中标记已完成 onboarding,但不保存到配置文件
55
+ # 这样下次启动时如果环境变量存在仍然可以工作
56
+ config.has_completed_onboarding = True
57
+ config.last_onboarding_version = get_version()
58
+ save_global_config(config)
59
+ return True
60
+
49
61
  console.print("[bold cyan]Welcome to Ripperdoc![/bold cyan]\n")
50
62
  console.print("Let's set up your AI model configuration.\n")
51
63
 
@@ -135,7 +147,7 @@ def get_model_name_with_suggestions(
135
147
  api_base_override: Optional[str],
136
148
  ) -> Tuple[str, Optional[str]]:
137
149
  """Get model name with provider-specific suggestions and default API base.
138
-
150
+
139
151
  Returns:
140
152
  Tuple of (model_name, api_base)
141
153
  """
@@ -152,7 +164,7 @@ def get_model_name_with_suggestions(
152
164
  if suggestions:
153
165
  console.print("\n[dim]Available models for this provider:[/dim]")
154
166
  for i, model_name in enumerate(suggestions[:5]): # Show top 5
155
- console.print(f" [dim]{i+1}. {model_name}[/dim]")
167
+ console.print(f" [dim]{i + 1}. {model_name}[/dim]")
156
168
  console.print("")
157
169
 
158
170
  # Prompt for model name
@@ -162,16 +174,12 @@ def get_model_name_with_suggestions(
162
174
  model = click.prompt("Model name", default=default_model)
163
175
  # Prompt for API base if still not set
164
176
  if api_base is None:
165
- api_base_input = click.prompt(
166
- "API base URL (optional)", default="", show_default=False
167
- )
177
+ api_base_input = click.prompt("API base URL (optional)", default="", show_default=False)
168
178
  api_base = api_base_input or None
169
179
  elif provider.protocol == ProviderType.GEMINI:
170
180
  model = click.prompt("Model name", default=default_model)
171
181
  if api_base is None:
172
- api_base_input = click.prompt(
173
- "API base URL (optional)", default="", show_default=False
174
- )
182
+ api_base_input = click.prompt("API base URL (optional)", default="", show_default=False)
175
183
  api_base = api_base_input or None
176
184
  else:
177
185
  model = click.prompt("Model name", default=default_model)
@@ -191,9 +199,7 @@ def get_context_window() -> Optional[int]:
191
199
  try:
192
200
  context_window = int(context_window_input.strip())
193
201
  except ValueError:
194
- console.print(
195
- "[yellow]Invalid context window, using auto-detected defaults.[/yellow]"
196
- )
202
+ console.print("[yellow]Invalid context window, using auto-detected defaults.[/yellow]")
197
203
  return context_window
198
204
 
199
205
 
@@ -201,6 +207,7 @@ def get_version() -> str:
201
207
  """Get current version of Ripperdoc."""
202
208
  try:
203
209
  from ripperdoc import __version__
210
+
204
211
  return __version__
205
212
  except ImportError:
206
213
  return "unknown"
ripperdoc/core/agents.py CHANGED
@@ -24,8 +24,10 @@ from ripperdoc.tools.glob_tool import GlobTool
24
24
  from ripperdoc.tools.grep_tool import GrepTool
25
25
  from ripperdoc.tools.kill_bash_tool import KillBashTool
26
26
  from ripperdoc.tools.ls_tool import LSTool
27
+ from ripperdoc.tools.lsp_tool import LspTool
27
28
  from ripperdoc.tools.multi_edit_tool import MultiEditTool
28
29
  from ripperdoc.tools.notebook_edit_tool import NotebookEditTool
30
+ from ripperdoc.tools.skill_tool import SkillTool
29
31
  from ripperdoc.tools.todo_tool import TodoReadTool, TodoWriteTool
30
32
  from ripperdoc.tools.tool_search_tool import ToolSearchTool
31
33
  from ripperdoc.tools.mcp_tools import (
@@ -66,6 +68,8 @@ TOOL_SEARCH_TOOL_NAME = _safe_tool_name(ToolSearchTool, "ToolSearch")
66
68
  MCP_LIST_SERVERS_TOOL_NAME = _safe_tool_name(ListMcpServersTool, "ListMcpServers")
67
69
  MCP_LIST_RESOURCES_TOOL_NAME = _safe_tool_name(ListMcpResourcesTool, "ListMcpResources")
68
70
  MCP_READ_RESOURCE_TOOL_NAME = _safe_tool_name(ReadMcpResourceTool, "ReadMcpResource")
71
+ LSP_TOOL_NAME = _safe_tool_name(LspTool, "LSP")
72
+ SKILL_TOOL_NAME = _safe_tool_name(SkillTool, "Skill")
69
73
  TASK_TOOL_NAME = "Task"
70
74
 
71
75
 
ripperdoc/core/config.py CHANGED
@@ -94,6 +94,69 @@ def api_base_env_candidates(provider: ProviderType) -> list[str]:
94
94
  ]
95
95
 
96
96
 
97
+ # Known vision-enabled model patterns for auto-detection
98
+ VISION_ENABLED_MODELS = {
99
+ # Anthropic Claude models
100
+ "claude-haiku-4-5-20251001",
101
+ "claude-sonnet-4-5-20250929",
102
+ "claude-opus-4-5-20251101",
103
+ "claude-haiku-4-5",
104
+ "claude-sonnet-4-5",
105
+ "claude-opus-4-5",
106
+ "claude-3-5-sonnet",
107
+ "claude-3-5-sonnet-20241022",
108
+ "claude-3-5-sonnet-20240620",
109
+ "claude-3-5-haiku",
110
+ "claude-3-5-haiku-20241022",
111
+ "claude-3-opus",
112
+ "claude-3-opus-20240229",
113
+ "claude-3-sonnet",
114
+ "claude-3-sonnet-20240229",
115
+ "claude-3-haiku",
116
+ "claude-3-haiku-20240307",
117
+ # OpenAI models
118
+ "gpt-4o",
119
+ "gpt-4o-2024-08-06",
120
+ "gpt-4o-mini",
121
+ "gpt-4o-mini-2024-07-18",
122
+ "gpt-4-turbo",
123
+ "gpt-4-turbo-2024-04-09",
124
+ "gpt-4",
125
+ "gpt-4-0314",
126
+ "gpt-4-vision-preview",
127
+ "chatgpt-4o-latest",
128
+ # Google Gemini models
129
+ "gemini-3-pro-preview",
130
+ "gemini-3-flash-preview",
131
+ "gemini-2.5-pro",
132
+ "gemini-2.5-flash-lite",
133
+ "gemini-2.5-flash",
134
+ "gemini-2.0-flash-exp",
135
+ "gemini-2.0-flash-thinking-exp",
136
+ "gemini-exp-1206",
137
+ "gemini-pro-vision",
138
+ "gemini-1.5-pro",
139
+ "gemini-1.5-pro-001",
140
+ "gemini-1.5-flash",
141
+ "gemini-1.5-flash-001",
142
+ # Alibaba Qwen models (vision)
143
+ "qwen-vl-max",
144
+ "qwen-vl-plus",
145
+ "qwen-vl-plus-latest",
146
+ "qwen2-vl-72b-instruct",
147
+ "qwen-vl-chat",
148
+ "qwen-vl-7b-chat",
149
+ # DeepSeek models (some support vision)
150
+ "deepseek-vl",
151
+ "deepseek-vl-chat",
152
+ # Other vision models
153
+ "glm-4v",
154
+ "glm-4v-plus",
155
+ "minivision-3b",
156
+ "internvl2",
157
+ }
158
+
159
+
97
160
  class ModelProfile(BaseModel):
98
161
  """Configuration for a specific AI model."""
99
162
 
@@ -113,11 +176,31 @@ class ModelProfile(BaseModel):
113
176
  # Optional override for thinking protocol handling (e.g., "deepseek", "openrouter",
114
177
  # "qwen", "gemini_openai", "openai"). When unset, provider heuristics are used.
115
178
  thinking_mode: Optional[str] = None
179
+ # Vision support flag. None = auto-detect based on model name, True/False = override.
180
+ supports_vision: Optional[bool] = None
116
181
  # Pricing (USD per 1M tokens). Leave as 0 to skip cost calculation.
117
182
  input_cost_per_million_tokens: float = 0.0
118
183
  output_cost_per_million_tokens: float = 0.0
119
184
 
120
185
 
186
+ def model_supports_vision(model_profile: ModelProfile) -> bool:
187
+ """Detect whether a model supports vision/image input.
188
+
189
+ Args:
190
+ model_profile: The model profile to check
191
+
192
+ Returns:
193
+ True if the model supports vision capabilities, False otherwise
194
+ """
195
+ # If explicitly configured, use the config value
196
+ if model_profile.supports_vision is not None:
197
+ return model_profile.supports_vision
198
+
199
+ # Auto-detect based on model name
200
+ model_name = model_profile.model.lower()
201
+ return any(pattern in model_name for pattern in VISION_ENABLED_MODELS)
202
+
203
+
121
204
  class ModelPointers(BaseModel):
122
205
  """Pointers to different model profiles for different purposes."""
123
206
 
@@ -141,6 +224,8 @@ class GlobalConfig(BaseModel):
141
224
  show_full_thinking: bool = Field(default=False)
142
225
  auto_compact_enabled: bool = True
143
226
  context_token_limit: Optional[int] = None
227
+ # Default thinking tokens budget when thinking mode is enabled (0 = disabled by default)
228
+ default_thinking_tokens: int = Field(default=10240)
144
229
 
145
230
  # User-level permission rules (applied globally)
146
231
  user_allow_rules: list[str] = Field(default_factory=list)
@@ -591,3 +676,153 @@ def save_project_local_config(
591
676
  ) -> None:
592
677
  """Save project-local configuration."""
593
678
  config_manager.save_project_local_config(config, project_path)
679
+
680
+
681
+ # ==============================================================================
682
+ # RIPPERDOC_* 全局环境变量支持
683
+ # ==============================================================================
684
+
685
+ # 环境变量名称常量
686
+ RIPPERDOC_BASE_URL = "RIPPERDOC_BASE_URL"
687
+ RIPPERDOC_AUTH_TOKEN = "RIPPERDOC_AUTH_TOKEN"
688
+ RIPPERDOC_MODEL = "RIPPERDOC_MODEL"
689
+ RIPPERDOC_SMALL_FAST_MODEL = "RIPPERDOC_SMALL_FAST_MODEL"
690
+ RIPPERDOC_API_KEY = "RIPPERDOC_API_KEY"
691
+ RIPPERDOC_PROTOCOL = "RIPPERDOC_PROTOCOL"
692
+
693
+
694
+ def _infer_protocol_from_url_and_model(base_url: str, model_name: str = "") -> ProviderType:
695
+ """根据 BASE_URL 和模型名称推断协议类型.
696
+
697
+ Args:
698
+ base_url: API基础URL
699
+ model_name: 模型名称
700
+
701
+ Returns:
702
+ 推断的 ProviderType
703
+ """
704
+ base_lower = base_url.lower()
705
+ model_lower = model_name.lower()
706
+
707
+ # 显式域名检测
708
+ if "anthropic.com" in base_lower:
709
+ return ProviderType.ANTHROPIC
710
+ if "generativelanguage.googleapis.com" in base_lower or "gemini" in model_lower:
711
+ return ProviderType.GEMINI
712
+
713
+ # URL 路径检测 - 检查路径中是否包含协议标识
714
+ if "/anthropic" in base_lower or base_lower.endswith("/anthropic"):
715
+ return ProviderType.ANTHROPIC
716
+ if "/v1/" in base_lower or "/v1" in base_lower:
717
+ # 大多数 /v1/ 路径是 OpenAI 兼容格式
718
+ return ProviderType.OPENAI_COMPATIBLE
719
+
720
+ # 模型名称前缀检测
721
+ if model_lower.startswith("claude-"):
722
+ return ProviderType.ANTHROPIC
723
+ if model_lower.startswith("gemini-"):
724
+ return ProviderType.GEMINI
725
+
726
+ # 默认使用 OpenAI 兼容协议
727
+ return ProviderType.OPENAI_COMPATIBLE
728
+
729
+
730
+ def _get_ripperdoc_env_overrides() -> Dict[str, Any]:
731
+ """获取所有 RIPPERDOC_* 环境变量的值.
732
+
733
+ Returns:
734
+ 包含所有已设置环境变量的字典
735
+ """
736
+ overrides: Dict[str, Any] = {}
737
+ if base_url := os.getenv(RIPPERDOC_BASE_URL):
738
+ overrides["base_url"] = base_url
739
+ if api_key := os.getenv(RIPPERDOC_API_KEY):
740
+ overrides["api_key"] = api_key
741
+ if auth_token := os.getenv(RIPPERDOC_AUTH_TOKEN):
742
+ overrides["auth_token"] = auth_token
743
+ if model := os.getenv(RIPPERDOC_MODEL):
744
+ overrides["model"] = model
745
+ if small_fast_model := os.getenv(RIPPERDOC_SMALL_FAST_MODEL):
746
+ overrides["small_fast_model"] = small_fast_model
747
+ if protocol_str := os.getenv(RIPPERDOC_PROTOCOL):
748
+ try:
749
+ overrides["protocol"] = ProviderType(protocol_str.lower())
750
+ except ValueError:
751
+ logger.warning(
752
+ "[config] Invalid RIPPERDOC_PROTOCOL value: %s (must be anthropic, openai_compatible, or gemini)",
753
+ protocol_str,
754
+ )
755
+ return overrides
756
+
757
+
758
+ def has_ripperdoc_env_overrides() -> bool:
759
+ """检查是否设置了任何 RIPPERDOC_* 环境变量."""
760
+ return bool(_get_ripperdoc_env_overrides())
761
+
762
+
763
+ def get_effective_model_profile(pointer: str = "main") -> Optional[ModelProfile]:
764
+ """获取模型配置,应用 RIPPERDOC_* 环境变量覆盖.
765
+
766
+ 当设置了 RIPPERDOC_BASE_URL 环境变量时,完全在内存中创建 ModelProfile,
767
+ 不依赖也不写入配置文件。这是获取模型配置的新入口点,替代 get_current_model_profile()。
768
+
769
+ Args:
770
+ pointer: 模型指针名称 ("main" 或 "quick")
771
+
772
+ Returns:
773
+ 应用环境变量覆盖后的 ModelProfile,如果没有则返回 None
774
+ """
775
+ env_overrides = _get_ripperdoc_env_overrides()
776
+ base_url = env_overrides.get("base_url")
777
+
778
+ # 如果设置了 RIPPERDOC_BASE_URL,完全在内存中创建 profile
779
+ if base_url:
780
+ # 确定模型名称
781
+ if pointer == "quick":
782
+ model_name = env_overrides.get("small_fast_model") or env_overrides.get("model")
783
+ else:
784
+ model_name = env_overrides.get("model")
785
+
786
+ if not model_name:
787
+ model_name = "claude-sonnet-4-5-20250929"
788
+
789
+ # 确定协议类型
790
+ protocol = env_overrides.get("protocol")
791
+ if not protocol:
792
+ protocol = _infer_protocol_from_url_and_model(base_url, model_name)
793
+
794
+ # 在内存中创建新的 profile,不写入配置文件
795
+ return ModelProfile(
796
+ provider=protocol,
797
+ model=model_name,
798
+ api_base=base_url,
799
+ api_key=env_overrides.get("api_key"),
800
+ auth_token=env_overrides.get("auth_token"),
801
+ )
802
+
803
+ # 没有设置 RIPPERDOC_BASE_URL,返回配置文件中的 profile
804
+ return get_current_model_profile(pointer)
805
+
806
+
807
+ def get_ripperdoc_env_status() -> Dict[str, str]:
808
+ """获取 RIPPERDOC_* 环境变量状态信息用于诊断显示.
809
+
810
+ Returns:
811
+ 字典,键为环境变量名称,值为格式化的显示字符串
812
+ """
813
+ status: Dict[str, str] = {}
814
+ if base_url := os.getenv(RIPPERDOC_BASE_URL):
815
+ status["BASE_URL"] = base_url
816
+ if protocol := os.getenv(RIPPERDOC_PROTOCOL):
817
+ status["PROTOCOL"] = protocol
818
+ if model := os.getenv(RIPPERDOC_MODEL):
819
+ status["MODEL"] = model
820
+ if small_fast_model := os.getenv(RIPPERDOC_SMALL_FAST_MODEL):
821
+ status["SMALL_FAST_MODEL"] = small_fast_model
822
+ if api_key := os.getenv(RIPPERDOC_API_KEY):
823
+ masked = api_key[:4] + "…" if len(api_key) > 4 else "set"
824
+ status["API_KEY"] = f"{masked} (${RIPPERDOC_API_KEY})"
825
+ if auth_token := os.getenv(RIPPERDOC_AUTH_TOKEN):
826
+ masked = auth_token[:4] + "…" if len(auth_token) > 4 else "set"
827
+ status["AUTH_TOKEN"] = f"{masked} (${RIPPERDOC_AUTH_TOKEN})"
828
+ return status
@@ -93,6 +93,7 @@ def filter_tools_by_names(
93
93
 
94
94
  # If Task is requested, recreate it with the filtered base tools
95
95
  if has_task:
96
+
96
97
  def _filtered_base_provider() -> List[Tool[Any, Any]]:
97
98
  return [t for t in filtered if getattr(t, "name", None) != "Task"]
98
99
 
@@ -56,4 +56,3 @@ def build_hook_llm_callback(
56
56
  return f"Prompt hook evaluation failed: {exc}"
57
57
 
58
58
  return _callback
59
-
@@ -510,10 +510,14 @@ class HookManager:
510
510
  stop_sequence: Optional[str] = None,
511
511
  ) -> HookResult:
512
512
  """Run Stop hooks asynchronously."""
513
+ logger.debug("[hook_manager] run_stop_async ENTER")
513
514
  hooks = self._get_hooks(HookEvent.STOP)
515
+ logger.debug(f"[hook_manager] run_stop_async: got {len(hooks)} hooks")
514
516
  if not hooks:
517
+ logger.debug("[hook_manager] run_stop_async: no hooks, returning empty HookResult")
515
518
  return HookResult([])
516
519
 
520
+ logger.debug("[hook_manager] run_stop_async: creating StopInput")
517
521
  input_data = StopInput(
518
522
  stop_hook_active=stop_hook_active,
519
523
  reason=reason,
@@ -524,7 +528,9 @@ class HookManager:
524
528
  permission_mode=self.permission_mode,
525
529
  )
526
530
 
531
+ logger.debug("[hook_manager] run_stop_async: calling executor.execute_hooks_async")
527
532
  outputs = await self.executor.execute_hooks_async(hooks, input_data)
533
+ logger.debug("[hook_manager] run_stop_async: execute_hooks_async returned")
528
534
  return HookResult(outputs)
529
535
 
530
536
  # --- Subagent Stop ---