ripperdoc 0.2.8__py3-none-any.whl → 0.2.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +257 -123
  3. ripperdoc/cli/commands/__init__.py +2 -1
  4. ripperdoc/cli/commands/agents_cmd.py +138 -8
  5. ripperdoc/cli/commands/clear_cmd.py +9 -4
  6. ripperdoc/cli/commands/config_cmd.py +1 -1
  7. ripperdoc/cli/commands/context_cmd.py +3 -2
  8. ripperdoc/cli/commands/doctor_cmd.py +18 -4
  9. ripperdoc/cli/commands/exit_cmd.py +1 -0
  10. ripperdoc/cli/commands/hooks_cmd.py +27 -53
  11. ripperdoc/cli/commands/models_cmd.py +27 -10
  12. ripperdoc/cli/commands/permissions_cmd.py +27 -9
  13. ripperdoc/cli/commands/resume_cmd.py +9 -3
  14. ripperdoc/cli/commands/stats_cmd.py +244 -0
  15. ripperdoc/cli/commands/status_cmd.py +4 -4
  16. ripperdoc/cli/commands/tasks_cmd.py +8 -4
  17. ripperdoc/cli/ui/file_mention_completer.py +2 -1
  18. ripperdoc/cli/ui/interrupt_handler.py +2 -3
  19. ripperdoc/cli/ui/message_display.py +4 -2
  20. ripperdoc/cli/ui/panels.py +1 -0
  21. ripperdoc/cli/ui/provider_options.py +247 -0
  22. ripperdoc/cli/ui/rich_ui.py +403 -81
  23. ripperdoc/cli/ui/spinner.py +54 -18
  24. ripperdoc/cli/ui/thinking_spinner.py +1 -2
  25. ripperdoc/cli/ui/tool_renderers.py +8 -2
  26. ripperdoc/cli/ui/wizard.py +213 -0
  27. ripperdoc/core/agents.py +19 -6
  28. ripperdoc/core/config.py +51 -17
  29. ripperdoc/core/custom_commands.py +7 -6
  30. ripperdoc/core/default_tools.py +101 -12
  31. ripperdoc/core/hooks/config.py +1 -3
  32. ripperdoc/core/hooks/events.py +27 -28
  33. ripperdoc/core/hooks/executor.py +4 -6
  34. ripperdoc/core/hooks/integration.py +12 -21
  35. ripperdoc/core/hooks/llm_callback.py +59 -0
  36. ripperdoc/core/hooks/manager.py +40 -15
  37. ripperdoc/core/permissions.py +118 -12
  38. ripperdoc/core/providers/anthropic.py +109 -36
  39. ripperdoc/core/providers/gemini.py +70 -5
  40. ripperdoc/core/providers/openai.py +89 -24
  41. ripperdoc/core/query.py +273 -68
  42. ripperdoc/core/query_utils.py +2 -0
  43. ripperdoc/core/skills.py +9 -3
  44. ripperdoc/core/system_prompt.py +4 -2
  45. ripperdoc/core/tool.py +17 -8
  46. ripperdoc/sdk/client.py +79 -4
  47. ripperdoc/tools/ask_user_question_tool.py +5 -3
  48. ripperdoc/tools/background_shell.py +307 -135
  49. ripperdoc/tools/bash_output_tool.py +1 -1
  50. ripperdoc/tools/bash_tool.py +63 -24
  51. ripperdoc/tools/dynamic_mcp_tool.py +29 -8
  52. ripperdoc/tools/enter_plan_mode_tool.py +1 -1
  53. ripperdoc/tools/exit_plan_mode_tool.py +1 -1
  54. ripperdoc/tools/file_edit_tool.py +167 -54
  55. ripperdoc/tools/file_read_tool.py +28 -4
  56. ripperdoc/tools/file_write_tool.py +13 -10
  57. ripperdoc/tools/glob_tool.py +3 -2
  58. ripperdoc/tools/grep_tool.py +3 -2
  59. ripperdoc/tools/kill_bash_tool.py +1 -1
  60. ripperdoc/tools/ls_tool.py +1 -1
  61. ripperdoc/tools/lsp_tool.py +615 -0
  62. ripperdoc/tools/mcp_tools.py +13 -10
  63. ripperdoc/tools/multi_edit_tool.py +8 -7
  64. ripperdoc/tools/notebook_edit_tool.py +7 -4
  65. ripperdoc/tools/skill_tool.py +1 -1
  66. ripperdoc/tools/task_tool.py +519 -69
  67. ripperdoc/tools/todo_tool.py +2 -2
  68. ripperdoc/tools/tool_search_tool.py +3 -2
  69. ripperdoc/utils/conversation_compaction.py +9 -5
  70. ripperdoc/utils/file_watch.py +214 -5
  71. ripperdoc/utils/json_utils.py +2 -1
  72. ripperdoc/utils/lsp.py +806 -0
  73. ripperdoc/utils/mcp.py +11 -3
  74. ripperdoc/utils/memory.py +4 -2
  75. ripperdoc/utils/message_compaction.py +21 -7
  76. ripperdoc/utils/message_formatting.py +14 -7
  77. ripperdoc/utils/messages.py +126 -67
  78. ripperdoc/utils/path_ignore.py +35 -8
  79. ripperdoc/utils/permissions/path_validation_utils.py +2 -1
  80. ripperdoc/utils/permissions/shell_command_validation.py +427 -91
  81. ripperdoc/utils/permissions/tool_permission_utils.py +174 -15
  82. ripperdoc/utils/safe_get_cwd.py +2 -1
  83. ripperdoc/utils/session_heatmap.py +244 -0
  84. ripperdoc/utils/session_history.py +13 -6
  85. ripperdoc/utils/session_stats.py +293 -0
  86. ripperdoc/utils/todo.py +2 -1
  87. ripperdoc/utils/token_estimation.py +6 -1
  88. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.10.dist-info}/METADATA +8 -2
  89. ripperdoc-0.2.10.dist-info/RECORD +129 -0
  90. ripperdoc-0.2.8.dist-info/RECORD +0 -121
  91. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.10.dist-info}/WHEEL +0 -0
  92. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.10.dist-info}/entry_points.txt +0 -0
  93. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.10.dist-info}/licenses/LICENSE +0 -0
  94. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.10.dist-info}/top_level.txt +0 -0
@@ -1,43 +1,57 @@
1
- from typing import Any, Literal, Optional
1
+ from contextlib import contextmanager
2
+ from typing import Any, Generator, Literal, Optional
3
+
2
4
  from rich.console import Console
3
- from rich.markup import escape
4
- from rich.status import Status
5
+ from rich.live import Live
6
+ from rich.text import Text
7
+ from rich.spinner import Spinner as RichSpinner
5
8
 
6
9
 
7
10
  class Spinner:
8
- """Lightweight spinner wrapper for Rich status."""
11
+ """Lightweight spinner wrapper that plays nicely with other console output."""
9
12
 
10
13
  def __init__(self, console: Console, text: str = "Thinking...", spinner: str = "dots"):
11
14
  self.console = console
12
15
  self.text = text
13
16
  self.spinner = spinner
14
- self._status: Optional[Status] = None
17
+ self._style = "cyan"
18
+ self._live: Optional[Live] = None
19
+ # Blue spinner for clearer visual separation in the terminal (icon + text)
20
+ self._renderable: RichSpinner = RichSpinner(
21
+ spinner, text=Text(self.text, style=self._style), style=self._style
22
+ )
15
23
 
16
24
  def start(self) -> None:
17
25
  """Start the spinner if not already running."""
18
-
19
- if self._status is not None:
26
+ if self._live is not None:
20
27
  return
21
- self._status = self.console.status(
22
- f"[cyan]{escape(self.text)}[/cyan]", spinner=self.spinner
28
+ self._renderable.text = Text(self.text, style=self._style)
29
+ self._live = Live(
30
+ self._renderable,
31
+ console=self.console,
32
+ transient=True, # Remove spinner line when stopped to avoid layout glitches
33
+ refresh_per_second=12,
23
34
  )
24
- self._status.__enter__()
35
+ self._live.start()
25
36
 
26
37
  def update(self, text: Optional[str] = None) -> None:
27
38
  """Update spinner text."""
28
-
29
- if self._status is None:
39
+ if self._live is None:
30
40
  return
31
- new_text = text if text is not None else self.text
32
- self._status.update(f"[cyan]{escape(new_text)}[/cyan]")
41
+ if text is not None:
42
+ self.text = text
43
+ self._renderable.text = Text(self.text, style=self._style)
44
+ # Live.refresh() redraws the current renderable
45
+ self._live.refresh()
33
46
 
34
47
  def stop(self) -> None:
35
48
  """Stop the spinner if running."""
36
-
37
- if self._status is None:
49
+ if self._live is None:
38
50
  return
39
- self._status.__exit__(None, None, None)
40
- self._status = None
51
+ try:
52
+ self._live.stop()
53
+ finally:
54
+ self._live = None
41
55
 
42
56
  def __enter__(self) -> "Spinner":
43
57
  self.start()
@@ -47,3 +61,25 @@ class Spinner:
47
61
  self.stop()
48
62
  # Do not suppress exceptions
49
63
  return False
64
+
65
+ @property
66
+ def is_running(self) -> bool:
67
+ """Check if spinner is currently running."""
68
+ return self._live is not None
69
+
70
+ @contextmanager
71
+ def paused(self) -> Generator[None, None, None]:
72
+ """Context manager to temporarily pause the spinner for clean output.
73
+
74
+ Usage:
75
+ with spinner.paused():
76
+ console.print("Some output")
77
+ """
78
+ was_running = self.is_running
79
+ if was_running:
80
+ self.stop()
81
+ try:
82
+ yield
83
+ finally:
84
+ if was_running:
85
+ self.start()
@@ -22,7 +22,6 @@ THINKING_WORDS: list[str] = [
22
22
  "Cerebrating",
23
23
  "Channelling",
24
24
  "Churning",
25
- "Clauding",
26
25
  "Coalescing",
27
26
  "Cogitating",
28
27
  "Computing",
@@ -114,7 +113,7 @@ class ThinkingSpinner(Spinner):
114
113
 
115
114
  def _format_text(self, suffix: Optional[str] = None) -> str:
116
115
  elapsed = int(time.monotonic() - self.start_time)
117
- base = f" {self.thinking_word}… (esc to interrupt · {elapsed}s"
116
+ base = f" {self.thinking_word}… (esc to interrupt · {elapsed}s"
118
117
  if self.out_tokens > 0:
119
118
  base += f" · ↓ {self.out_tokens} tokens"
120
119
  else:
@@ -155,7 +155,10 @@ class BashResultRenderer(ToolResultRenderer):
155
155
  """Render Bash tool results."""
156
156
 
157
157
  def __init__(
158
- self, console: Console, verbose: bool = False, parse_fallback: Optional[BashOutputParser] = None
158
+ self,
159
+ console: Console,
160
+ verbose: bool = False,
161
+ parse_fallback: Optional[BashOutputParser] = None,
159
162
  ):
160
163
  super().__init__(console, verbose)
161
164
  self._parse_fallback = parse_fallback
@@ -254,7 +257,10 @@ class ToolResultRendererRegistry:
254
257
  """Registry that selects the appropriate renderer for a tool result."""
255
258
 
256
259
  def __init__(
257
- self, console: Console, verbose: bool = False, parse_bash_fallback: Optional[BashOutputParser] = None
260
+ self,
261
+ console: Console,
262
+ verbose: bool = False,
263
+ parse_bash_fallback: Optional[BashOutputParser] = None,
258
264
  ):
259
265
  self.console = console
260
266
  self.verbose = verbose
@@ -0,0 +1,213 @@
1
+ """
2
+ Interactive onboarding wizard for Ripperdoc.
3
+ """
4
+
5
+ from typing import List, Optional, Tuple
6
+
7
+ import click
8
+ from rich.console import Console
9
+
10
+ from ripperdoc.cli.ui.provider_options import (
11
+ KNOWN_PROVIDERS,
12
+ ProviderOption,
13
+ default_model_for_protocol,
14
+ )
15
+ from ripperdoc.core.config import (
16
+ GlobalConfig,
17
+ ModelProfile,
18
+ ProviderType,
19
+ get_global_config,
20
+ save_global_config,
21
+ )
22
+ from ripperdoc.utils.prompt import prompt_secret
23
+
24
+
25
+ console = Console()
26
+
27
+
28
+ def resolve_provider_choice(raw_choice: str, provider_keys: List[str]) -> Optional[str]:
29
+ """Normalize user input into a provider key."""
30
+ normalized = raw_choice.strip().lower()
31
+ if normalized in provider_keys:
32
+ return normalized
33
+ try:
34
+ idx = int(normalized)
35
+ if 1 <= idx <= len(provider_keys):
36
+ return provider_keys[idx - 1]
37
+ except ValueError:
38
+ return None
39
+ return None
40
+
41
+
42
+ def check_onboarding() -> bool:
43
+ """Check if onboarding is complete and run if needed."""
44
+ config = get_global_config()
45
+
46
+ if config.has_completed_onboarding:
47
+ return True
48
+
49
+ console.print("[bold cyan]Welcome to Ripperdoc![/bold cyan]\n")
50
+ console.print("Let's set up your AI model configuration.\n")
51
+
52
+ return run_onboarding_wizard(config)
53
+
54
+
55
+ def run_onboarding_wizard(config: GlobalConfig) -> bool:
56
+ """Run interactive onboarding wizard."""
57
+ provider_keys = KNOWN_PROVIDERS.keys() + ["custom"]
58
+ default_choice_key = KNOWN_PROVIDERS.default_choice.key
59
+
60
+ # Display provider options vertically
61
+ console.print("[bold]Available providers:[/bold]")
62
+ for i, provider_key in enumerate(provider_keys, 1):
63
+ marker = "[cyan]→[/cyan]" if provider_key == default_choice_key else " "
64
+ console.print(f" {marker} {i}. {provider_key}")
65
+ console.print("")
66
+
67
+ # Prompt for provider choice with validation
68
+ provider_choice: Optional[str] = None
69
+ while provider_choice is None:
70
+ raw_choice = click.prompt(
71
+ "Choose your model provider",
72
+ default=default_choice_key,
73
+ )
74
+ provider_choice = resolve_provider_choice(raw_choice, provider_keys)
75
+ if provider_choice is None:
76
+ console.print(
77
+ f"[red]Invalid choice. Please enter a provider name or number (1-{len(provider_keys)}).[/red]"
78
+ )
79
+
80
+ api_base_override: Optional[str] = None
81
+ if provider_choice == "custom":
82
+ protocol_input = click.prompt(
83
+ "Protocol family (for API compatibility)",
84
+ type=click.Choice([p.value for p in ProviderType]),
85
+ default=ProviderType.OPENAI_COMPATIBLE.value,
86
+ )
87
+ protocol = ProviderType(protocol_input)
88
+ api_base_override = click.prompt("API Base URL")
89
+ provider_option = ProviderOption(
90
+ key="custom",
91
+ protocol=protocol,
92
+ default_model=default_model_for_protocol(protocol),
93
+ model_suggestions=(),
94
+ )
95
+ else:
96
+ provider_option = KNOWN_PROVIDERS.get(provider_choice) or ProviderOption(
97
+ key=provider_choice,
98
+ protocol=ProviderType.OPENAI_COMPATIBLE,
99
+ default_model=default_model_for_protocol(ProviderType.OPENAI_COMPATIBLE),
100
+ model_suggestions=(),
101
+ )
102
+
103
+ api_key = ""
104
+ while not api_key:
105
+ api_key = prompt_secret("Enter your API key").strip()
106
+ if not api_key:
107
+ console.print("[red]API key is required.[/red]")
108
+
109
+ # Get model name with provider-specific suggestions
110
+ model, api_base = get_model_name_with_suggestions(provider_option, api_base_override)
111
+
112
+ # Get context window
113
+ context_window = get_context_window()
114
+
115
+ # Create model profile
116
+ config.model_profiles["default"] = ModelProfile(
117
+ provider=provider_option.protocol,
118
+ model=model,
119
+ api_key=api_key,
120
+ api_base=api_base,
121
+ context_window=context_window,
122
+ )
123
+
124
+ config.has_completed_onboarding = True
125
+ config.last_onboarding_version = get_version()
126
+
127
+ save_global_config(config)
128
+
129
+ console.print("\n[green]✓ Configuration saved![/green]\n")
130
+ return True
131
+
132
+
133
+ def get_model_name_with_suggestions(
134
+ provider: ProviderOption,
135
+ api_base_override: Optional[str],
136
+ ) -> Tuple[str, Optional[str]]:
137
+ """Get model name with provider-specific suggestions and default API base.
138
+
139
+ Returns:
140
+ Tuple of (model_name, api_base)
141
+ """
142
+ # Set default API base based on provider choice
143
+ api_base = api_base_override
144
+ if api_base is None and provider.default_api_base:
145
+ api_base = provider.default_api_base
146
+ console.print(f"[dim]Using default API base: {api_base}[/dim]")
147
+
148
+ default_model = provider.default_model or default_model_for_protocol(provider.protocol)
149
+ suggestions = list(provider.model_suggestions)
150
+
151
+ # Show suggestions if available
152
+ if suggestions:
153
+ console.print("\n[dim]Available models for this provider:[/dim]")
154
+ for i, model_name in enumerate(suggestions[:5]): # Show top 5
155
+ console.print(f" [dim]{i+1}. {model_name}[/dim]")
156
+ console.print("")
157
+
158
+ # Prompt for model name
159
+ if provider.protocol == ProviderType.ANTHROPIC:
160
+ model = click.prompt("Model name", default=default_model)
161
+ elif provider.protocol == ProviderType.OPENAI_COMPATIBLE:
162
+ model = click.prompt("Model name", default=default_model)
163
+ # Prompt for API base if still not set
164
+ if api_base is None:
165
+ api_base_input = click.prompt(
166
+ "API base URL (optional)", default="", show_default=False
167
+ )
168
+ api_base = api_base_input or None
169
+ elif provider.protocol == ProviderType.GEMINI:
170
+ model = click.prompt("Model name", default=default_model)
171
+ if api_base is None:
172
+ api_base_input = click.prompt(
173
+ "API base URL (optional)", default="", show_default=False
174
+ )
175
+ api_base = api_base_input or None
176
+ else:
177
+ model = click.prompt("Model name", default=default_model)
178
+
179
+ return model, api_base
180
+
181
+
182
+ def get_context_window() -> Optional[int]:
183
+ """Get context window size from user."""
184
+ context_window_input = click.prompt(
185
+ "Context window in tokens (optional, press Enter to skip)",
186
+ default="",
187
+ show_default=False,
188
+ )
189
+ context_window = None
190
+ if context_window_input.strip():
191
+ try:
192
+ context_window = int(context_window_input.strip())
193
+ except ValueError:
194
+ console.print(
195
+ "[yellow]Invalid context window, using auto-detected defaults.[/yellow]"
196
+ )
197
+ return context_window
198
+
199
+
200
+ def get_version() -> str:
201
+ """Get current version of Ripperdoc."""
202
+ try:
203
+ from ripperdoc import __version__
204
+ return __version__
205
+ except ImportError:
206
+ return "unknown"
207
+
208
+
209
+ if __name__ == "__main__":
210
+ # For testing
211
+ config = get_global_config()
212
+ config.has_completed_onboarding = False
213
+ run_onboarding_wizard(config)
ripperdoc/core/agents.py CHANGED
@@ -10,6 +10,7 @@ from typing import Any, Dict, Iterable, List, Optional, Tuple
10
10
 
11
11
  import yaml
12
12
 
13
+ from ripperdoc.utils.coerce import parse_boolish
13
14
  from ripperdoc.utils.log import get_logger
14
15
  from ripperdoc.tools.ask_user_question_tool import AskUserQuestionTool
15
16
  from ripperdoc.tools.bash_output_tool import BashOutputTool
@@ -91,6 +92,7 @@ class AgentDefinition:
91
92
  model: Optional[str] = None
92
93
  color: Optional[str] = None
93
94
  filename: Optional[str] = None
95
+ fork_context: bool = False
94
96
 
95
97
 
96
98
  @dataclass
@@ -234,7 +236,7 @@ def _built_in_agents() -> List[AgentDefinition]:
234
236
  system_prompt=EXPLORE_AGENT_PROMPT,
235
237
  location=AgentLocation.BUILT_IN,
236
238
  color="green",
237
- model="task",
239
+ model="main",
238
240
  ),
239
241
  AgentDefinition(
240
242
  agent_type="plan",
@@ -278,10 +280,15 @@ def _split_frontmatter(raw_text: str) -> Tuple[Dict[str, Any], str]:
278
280
  body = "\n".join(lines[idx + 1 :])
279
281
  try:
280
282
  frontmatter = yaml.safe_load(frontmatter_text) or {}
281
- except (yaml.YAMLError, ValueError, TypeError) as exc: # pragma: no cover - defensive
283
+ except (
284
+ yaml.YAMLError,
285
+ ValueError,
286
+ TypeError,
287
+ ) as exc: # pragma: no cover - defensive
282
288
  logger.warning(
283
289
  "Invalid frontmatter in agent file: %s: %s",
284
- type(exc).__name__, exc,
290
+ type(exc).__name__,
291
+ exc,
285
292
  extra={"error": str(exc)},
286
293
  )
287
294
  return {"__error__": f"Invalid frontmatter: {exc}"}, body
@@ -312,14 +319,16 @@ def _parse_agent_file(
312
319
  except (OSError, IOError, UnicodeDecodeError) as exc:
313
320
  logger.warning(
314
321
  "Failed to read agent file: %s: %s",
315
- type(exc).__name__, exc,
322
+ type(exc).__name__,
323
+ exc,
316
324
  extra={"error": str(exc), "path": str(path)},
317
325
  )
318
326
  return None, f"Failed to read agent file {path}: {exc}"
319
327
 
320
328
  frontmatter, body = _split_frontmatter(text)
321
- if "__error__" in frontmatter:
322
- return None, str(frontmatter["__error__"])
329
+ error = frontmatter.get("__error__")
330
+ if error is not None:
331
+ return None, str(error)
323
332
 
324
333
  agent_name = frontmatter.get("name")
325
334
  description = frontmatter.get("description")
@@ -333,6 +342,7 @@ def _parse_agent_file(
333
342
  color_value = frontmatter.get("color")
334
343
  model = model_value if isinstance(model_value, str) else None
335
344
  color = color_value if isinstance(color_value, str) else None
345
+ fork_context = parse_boolish(frontmatter.get("fork_context") or frontmatter.get("fork-context"))
336
346
 
337
347
  agent = AgentDefinition(
338
348
  agent_type=agent_name.strip(),
@@ -343,6 +353,7 @@ def _parse_agent_file(
343
353
  model=model,
344
354
  color=color,
345
355
  filename=path.stem,
356
+ fork_context=fork_context,
346
357
  )
347
358
  return agent, None
348
359
 
@@ -398,6 +409,8 @@ def summarize_agent(agent: AgentDefinition) -> str:
398
409
  tool_label = "all tools" if "*" in agent.tools else ", ".join(agent.tools)
399
410
  location = getattr(agent.location, "value", agent.location)
400
411
  details = [f"tools: {tool_label}"]
412
+ if agent.fork_context:
413
+ details.append("context: forked")
401
414
  if agent.model:
402
415
  details.append(f"model: {agent.model}")
403
416
  return f"- {agent.agent_type} ({location}): {agent.when_to_use} [{'; '.join(details)}]"
ripperdoc/core/config.py CHANGED
@@ -7,8 +7,8 @@ including API keys, model settings, and user preferences.
7
7
  import json
8
8
  import os
9
9
  from pathlib import Path
10
- from typing import Dict, Optional, Literal
11
- from pydantic import BaseModel, Field
10
+ from typing import Any, Dict, Optional, Literal
11
+ from pydantic import BaseModel, Field, model_validator
12
12
  from enum import Enum
13
13
 
14
14
  from ripperdoc.utils.log import get_logger
@@ -111,7 +111,7 @@ class ModelProfile(BaseModel):
111
111
  # interactions into plain text to support providers that reject tool roles.
112
112
  openai_tool_mode: Literal["native", "text"] = "native"
113
113
  # Optional override for thinking protocol handling (e.g., "deepseek", "openrouter",
114
- # "qwen", "gemini_openai", "openai_reasoning"). When unset, provider heuristics are used.
114
+ # "qwen", "gemini_openai", "openai"). When unset, provider heuristics are used.
115
115
  thinking_mode: Optional[str] = None
116
116
  # Pricing (USD per 1M tokens). Leave as 0 to skip cost calculation.
117
117
  input_cost_per_million_tokens: float = 0.0
@@ -122,15 +122,13 @@ class ModelPointers(BaseModel):
122
122
  """Pointers to different model profiles for different purposes."""
123
123
 
124
124
  main: str = "default"
125
- task: str = "default"
126
- reasoning: str = "default"
127
125
  quick: str = "default"
128
126
 
129
127
 
130
128
  class GlobalConfig(BaseModel):
131
129
  """Global configuration stored in ~/.ripperdoc.json"""
132
130
 
133
- model_config = {"protected_namespaces": ()}
131
+ model_config = {"protected_namespaces": (), "populate_by_name": True}
134
132
 
135
133
  # Model configuration
136
134
  model_profiles: Dict[str, ModelProfile] = Field(default_factory=dict)
@@ -139,7 +137,8 @@ class GlobalConfig(BaseModel):
139
137
  # User preferences
140
138
  theme: str = "dark"
141
139
  verbose: bool = False
142
- safe_mode: bool = True
140
+ yolo_mode: bool = Field(default=False)
141
+ show_full_thinking: bool = Field(default=False)
143
142
  auto_compact_enabled: bool = True
144
143
  context_token_limit: Optional[int] = None
145
144
 
@@ -154,6 +153,18 @@ class GlobalConfig(BaseModel):
154
153
  # Statistics
155
154
  num_startups: int = 0
156
155
 
156
+ @model_validator(mode="before")
157
+ @classmethod
158
+ def _migrate_safe_mode(cls, data: Any) -> Any:
159
+ """Translate legacy safe_mode to the new yolo_mode flag."""
160
+ if isinstance(data, dict) and "safe_mode" in data and "yolo_mode" not in data:
161
+ data = dict(data)
162
+ try:
163
+ data["yolo_mode"] = not bool(data.pop("safe_mode"))
164
+ except Exception:
165
+ data["yolo_mode"] = False
166
+ return data
167
+
157
168
 
158
169
  class ProjectConfig(BaseModel):
159
170
  """Project-specific configuration stored in .ripperdoc/config.json"""
@@ -167,7 +178,7 @@ class ProjectConfig(BaseModel):
167
178
  # Path ignore patterns (gitignore-style)
168
179
  ignore_patterns: list[str] = Field(
169
180
  default_factory=list,
170
- description="Gitignore-style patterns for paths to ignore in file operations"
181
+ description="Gitignore-style patterns for paths to ignore in file operations",
171
182
  )
172
183
 
173
184
  # Context
@@ -179,7 +190,6 @@ class ProjectConfig(BaseModel):
179
190
 
180
191
  # Project settings
181
192
  dont_crawl_directory: bool = False
182
- enable_architect_tool: bool = False
183
193
 
184
194
  # Trust
185
195
  has_trust_dialog_accepted: bool = False
@@ -222,10 +232,18 @@ class ConfigManager:
222
232
  "profile_count": len(self._global_config.model_profiles),
223
233
  },
224
234
  )
225
- except (json.JSONDecodeError, OSError, IOError, UnicodeDecodeError, ValueError, TypeError) as e:
235
+ except (
236
+ json.JSONDecodeError,
237
+ OSError,
238
+ IOError,
239
+ UnicodeDecodeError,
240
+ ValueError,
241
+ TypeError,
242
+ ) as e:
226
243
  logger.warning(
227
244
  "Error loading global config: %s: %s",
228
- type(e).__name__, e,
245
+ type(e).__name__,
246
+ e,
229
247
  extra={"error": str(e)},
230
248
  )
231
249
  self._global_config = GlobalConfig()
@@ -276,10 +294,18 @@ class ConfigManager:
276
294
  "allowed_tools": len(self._project_config.allowed_tools),
277
295
  },
278
296
  )
279
- except (json.JSONDecodeError, OSError, IOError, UnicodeDecodeError, ValueError, TypeError) as e:
297
+ except (
298
+ json.JSONDecodeError,
299
+ OSError,
300
+ IOError,
301
+ UnicodeDecodeError,
302
+ ValueError,
303
+ TypeError,
304
+ ) as e:
280
305
  logger.warning(
281
306
  "Error loading project config: %s: %s",
282
- type(e).__name__, e,
307
+ type(e).__name__,
308
+ e,
283
309
  extra={"error": str(e), "path": str(config_path)},
284
310
  )
285
311
  self._project_config = ProjectConfig()
@@ -344,10 +370,18 @@ class ConfigManager:
344
370
  "project_path": str(self.current_project_path),
345
371
  },
346
372
  )
347
- except (json.JSONDecodeError, OSError, IOError, UnicodeDecodeError, ValueError, TypeError) as e:
373
+ except (
374
+ json.JSONDecodeError,
375
+ OSError,
376
+ IOError,
377
+ UnicodeDecodeError,
378
+ ValueError,
379
+ TypeError,
380
+ ) as e:
348
381
  logger.warning(
349
382
  "Error loading project-local config: %s: %s",
350
- type(e).__name__, e,
383
+ type(e).__name__,
384
+ e,
351
385
  extra={"error": str(e), "path": str(config_path)},
352
386
  )
353
387
  self._project_local_config = ProjectLocalConfig()
@@ -480,7 +514,7 @@ class ConfigManager:
480
514
  return config
481
515
 
482
516
  def set_model_pointer(self, pointer: str, profile_name: str) -> GlobalConfig:
483
- """Point a logical model slot (e.g., main/task) to a profile name."""
517
+ """Point a logical model slot (e.g., main/quick) to a profile name."""
484
518
  if pointer not in ModelPointers.model_fields:
485
519
  raise ValueError(f"Unknown model pointer '{pointer}'.")
486
520
 
@@ -538,7 +572,7 @@ def delete_model_profile(name: str) -> GlobalConfig:
538
572
 
539
573
 
540
574
  def set_model_pointer(pointer: str, profile_name: str) -> GlobalConfig:
541
- """Update a model pointer (e.g., main/task) to target a profile."""
575
+ """Update a model pointer (e.g., main/quick) to target a profile."""
542
576
  return config_manager.set_model_pointer(pointer, profile_name)
543
577
 
544
578
 
@@ -14,7 +14,6 @@ Features:
14
14
 
15
15
  from __future__ import annotations
16
16
 
17
- import asyncio
18
17
  import re
19
18
  import subprocess
20
19
  from dataclasses import dataclass, field
@@ -24,7 +23,6 @@ from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple
24
23
 
25
24
  import yaml
26
25
 
27
- from ripperdoc.utils.coerce import parse_boolish, parse_optional_int
28
26
  from ripperdoc.utils.log import get_logger
29
27
 
30
28
  logger = get_logger()
@@ -96,13 +94,14 @@ def _split_frontmatter(raw_text: str) -> Tuple[Dict[str, Any], str]:
96
94
  for idx in range(1, len(lines)):
97
95
  if lines[idx].strip() == "---":
98
96
  frontmatter_text = "\n".join(lines[1:idx])
99
- body = "\n".join(lines[idx + 1:])
97
+ body = "\n".join(lines[idx + 1 :])
100
98
  try:
101
99
  frontmatter = yaml.safe_load(frontmatter_text) or {}
102
100
  except (yaml.YAMLError, ValueError, TypeError) as exc:
103
101
  logger.warning(
104
102
  "[custom_commands] Invalid frontmatter: %s: %s",
105
- type(exc).__name__, exc,
103
+ type(exc).__name__,
104
+ exc,
106
105
  )
107
106
  return {"__error__": f"Invalid frontmatter: {exc}"}, body
108
107
  return frontmatter, body
@@ -163,7 +162,8 @@ def _load_command_file(
163
162
  except (OSError, IOError, UnicodeDecodeError) as exc:
164
163
  logger.warning(
165
164
  "[custom_commands] Failed to read command file: %s: %s",
166
- type(exc).__name__, exc,
165
+ type(exc).__name__,
166
+ exc,
167
167
  extra={"path": str(path)},
168
168
  )
169
169
  return None, CustomCommandLoadError(path=path, reason=f"Failed to read file: {exc}")
@@ -239,7 +239,8 @@ def _load_commands_from_dir(
239
239
  except OSError as exc:
240
240
  logger.warning(
241
241
  "[custom_commands] Failed to scan command directory: %s: %s",
242
- type(exc).__name__, exc,
242
+ type(exc).__name__,
243
+ exc,
243
244
  extra={"path": str(commands_dir)},
244
245
  )
245
246