ripperdoc 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +9 -2
  3. ripperdoc/cli/commands/agents_cmd.py +8 -4
  4. ripperdoc/cli/commands/context_cmd.py +3 -3
  5. ripperdoc/cli/commands/cost_cmd.py +5 -0
  6. ripperdoc/cli/commands/doctor_cmd.py +12 -4
  7. ripperdoc/cli/commands/memory_cmd.py +6 -13
  8. ripperdoc/cli/commands/models_cmd.py +36 -6
  9. ripperdoc/cli/commands/resume_cmd.py +4 -2
  10. ripperdoc/cli/commands/status_cmd.py +1 -1
  11. ripperdoc/cli/ui/rich_ui.py +135 -2
  12. ripperdoc/cli/ui/thinking_spinner.py +128 -0
  13. ripperdoc/core/agents.py +174 -6
  14. ripperdoc/core/config.py +9 -1
  15. ripperdoc/core/default_tools.py +6 -0
  16. ripperdoc/core/providers/__init__.py +47 -0
  17. ripperdoc/core/providers/anthropic.py +147 -0
  18. ripperdoc/core/providers/base.py +236 -0
  19. ripperdoc/core/providers/gemini.py +496 -0
  20. ripperdoc/core/providers/openai.py +253 -0
  21. ripperdoc/core/query.py +337 -141
  22. ripperdoc/core/query_utils.py +65 -24
  23. ripperdoc/core/system_prompt.py +67 -61
  24. ripperdoc/core/tool.py +12 -3
  25. ripperdoc/sdk/client.py +12 -1
  26. ripperdoc/tools/ask_user_question_tool.py +433 -0
  27. ripperdoc/tools/background_shell.py +104 -18
  28. ripperdoc/tools/bash_tool.py +33 -13
  29. ripperdoc/tools/enter_plan_mode_tool.py +223 -0
  30. ripperdoc/tools/exit_plan_mode_tool.py +150 -0
  31. ripperdoc/tools/file_edit_tool.py +13 -0
  32. ripperdoc/tools/file_read_tool.py +16 -0
  33. ripperdoc/tools/file_write_tool.py +13 -0
  34. ripperdoc/tools/glob_tool.py +5 -1
  35. ripperdoc/tools/ls_tool.py +14 -10
  36. ripperdoc/tools/mcp_tools.py +113 -4
  37. ripperdoc/tools/multi_edit_tool.py +12 -0
  38. ripperdoc/tools/notebook_edit_tool.py +12 -0
  39. ripperdoc/tools/task_tool.py +88 -5
  40. ripperdoc/tools/todo_tool.py +1 -3
  41. ripperdoc/tools/tool_search_tool.py +8 -4
  42. ripperdoc/utils/file_watch.py +134 -0
  43. ripperdoc/utils/git_utils.py +36 -38
  44. ripperdoc/utils/json_utils.py +1 -2
  45. ripperdoc/utils/log.py +3 -4
  46. ripperdoc/utils/mcp.py +49 -10
  47. ripperdoc/utils/memory.py +1 -3
  48. ripperdoc/utils/message_compaction.py +5 -11
  49. ripperdoc/utils/messages.py +9 -13
  50. ripperdoc/utils/output_utils.py +1 -3
  51. ripperdoc/utils/prompt.py +17 -0
  52. ripperdoc/utils/session_usage.py +7 -0
  53. ripperdoc/utils/shell_utils.py +159 -0
  54. ripperdoc/utils/token_estimation.py +33 -0
  55. {ripperdoc-0.2.2.dist-info → ripperdoc-0.2.4.dist-info}/METADATA +3 -1
  56. ripperdoc-0.2.4.dist-info/RECORD +99 -0
  57. ripperdoc-0.2.2.dist-info/RECORD +0 -86
  58. {ripperdoc-0.2.2.dist-info → ripperdoc-0.2.4.dist-info}/WHEEL +0 -0
  59. {ripperdoc-0.2.2.dist-info → ripperdoc-0.2.4.dist-info}/entry_points.txt +0 -0
  60. {ripperdoc-0.2.2.dist-info → ripperdoc-0.2.4.dist-info}/licenses/LICENSE +0 -0
  61. {ripperdoc-0.2.2.dist-info → ripperdoc-0.2.4.dist-info}/top_level.txt +0 -0
ripperdoc/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """Ripperdoc - AI-powered coding agent."""
2
2
 
3
- __version__ = "0.2.2"
3
+ __version__ = "0.2.4"
ripperdoc/cli/cli.py CHANGED
@@ -31,6 +31,7 @@ from ripperdoc.utils.mcp import (
31
31
  )
32
32
  from ripperdoc.tools.mcp_tools import load_dynamic_mcp_tools_async, merge_tools_with_dynamic
33
33
  from ripperdoc.utils.log import enable_session_file_logging, get_logger
34
+ from ripperdoc.utils.prompt import prompt_secret
34
35
 
35
36
  from rich.console import Console
36
37
  from rich.markdown import Markdown
@@ -147,7 +148,9 @@ async def run_query(
147
148
  console.print("\n[yellow]Interrupted by user[/yellow]")
148
149
  except Exception as e:
149
150
  console.print(f"[red]Error: {escape(str(e))}[/red]")
150
- logger.exception("[cli] Unhandled error while running prompt", extra={"session_id": session_id})
151
+ logger.exception(
152
+ "[cli] Unhandled error while running prompt", extra={"session_id": session_id}
153
+ )
151
154
  if verbose:
152
155
  import traceback
153
156
 
@@ -197,7 +200,11 @@ def check_onboarding() -> bool:
197
200
  )
198
201
  api_base = click.prompt("API Base URL")
199
202
 
200
- api_key = click.prompt("Enter your API key", hide_input=True)
203
+ api_key = ""
204
+ while not api_key:
205
+ api_key = prompt_secret("Enter your API key").strip()
206
+ if not api_key:
207
+ console.print("[red]API key is required.[/red]")
201
208
 
202
209
  provider = ProviderType(provider_choice)
203
210
 
@@ -31,14 +31,17 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
31
31
  def print_agents_usage() -> None:
32
32
  console.print("[bold]/agents[/bold] — list configured agents")
33
33
  console.print(
34
- "[bold]/agents create <name> [location] [model][/bold] — create agent (location: user|project, default user)"
34
+ "[bold]/agents create <name> [location] [model][/bold] — "
35
+ "create agent (location: user|project, default user)"
35
36
  )
36
37
  console.print("[bold]/agents edit <name> [location][/bold] — edit an existing agent")
37
38
  console.print(
38
- "[bold]/agents delete <name> [location][/bold] — delete agent (location: user|project, default user)"
39
+ "[bold]/agents delete <name> [location][/bold] — "
40
+ "delete agent (location: user|project, default user)"
39
41
  )
40
42
  console.print(
41
- f"[dim]Agent files live in ~/.ripperdoc/{AGENT_DIR_NAME} or ./.ripperdoc/{AGENT_DIR_NAME}[/dim]"
43
+ f"[dim]Agent files live in ~/.ripperdoc/{AGENT_DIR_NAME} "
44
+ f"or ./.ripperdoc/{AGENT_DIR_NAME}[/dim]"
42
45
  )
43
46
  console.print(
44
47
  "[dim]Model can be a profile name or pointer (task/main/etc). Defaults to 'task'.[/dim]"
@@ -92,7 +95,8 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
92
95
  and model_input not in pointer_map
93
96
  ):
94
97
  console.print(
95
- "[yellow]Model not found in profiles or pointers; will fall back to main if unavailable.[/yellow]"
98
+ "[yellow]Model not found in profiles or pointers; "
99
+ "will fall back to main if unavailable.[/yellow]"
96
100
  )
97
101
 
98
102
  try:
@@ -9,11 +9,11 @@ from ripperdoc.core.query import QueryContext
9
9
  from ripperdoc.core.system_prompt import build_system_prompt
10
10
  from ripperdoc.utils.memory import build_memory_instructions
11
11
  from ripperdoc.utils.message_compaction import (
12
- estimate_tokens_from_text,
13
12
  get_remaining_context_tokens,
14
13
  resolve_auto_compact_enabled,
15
14
  summarize_context_usage,
16
15
  )
16
+ from ripperdoc.utils.token_estimation import estimate_tokens
17
17
  from ripperdoc.utils.mcp import (
18
18
  estimate_mcp_tokens,
19
19
  format_mcp_instructions,
@@ -60,7 +60,7 @@ def _handle(ui: Any, _: str) -> bool:
60
60
  mcp_instructions=mcp_instructions,
61
61
  )
62
62
  memory_instructions = build_memory_instructions()
63
- memory_tokens = estimate_tokens_from_text(memory_instructions) if memory_instructions else 0
63
+ memory_tokens = estimate_tokens(memory_instructions) if memory_instructions else 0
64
64
  mcp_tokens = estimate_mcp_tokens(servers) if mcp_instructions else 0
65
65
 
66
66
  breakdown = summarize_context_usage(
@@ -98,7 +98,7 @@ def _handle(ui: Any, _: str) -> bool:
98
98
  display = f"{display} ({server})"
99
99
  try:
100
100
  schema = tool.input_schema.model_json_schema()
101
- token_est = estimate_tokens_from_text(json.dumps(schema, sort_keys=True))
101
+ token_est = estimate_tokens(json.dumps(schema, sort_keys=True))
102
102
  except Exception:
103
103
  token_est = 0
104
104
  lines.append(f" └ {display}: {format_tokens(token_est)} tokens")
@@ -32,6 +32,7 @@ def _handle(ui: Any, _: str) -> bool:
32
32
  total_cache_read = usage.total_cache_read_tokens
33
33
  total_cache_creation = usage.total_cache_creation_tokens
34
34
  total_tokens = total_input + total_output + total_cache_read + total_cache_creation
35
+ total_cost = usage.total_cost_usd
35
36
 
36
37
  ui.console.print("\n[bold]Session token usage[/bold]")
37
38
  ui.console.print(
@@ -44,6 +45,8 @@ def _handle(ui: Any, _: str) -> bool:
44
45
  f"{_fmt_tokens(total_cache_creation)} write"
45
46
  )
46
47
  ui.console.print(f" Requests: {usage.total_requests}")
48
+ if total_cost:
49
+ ui.console.print(f" Cost: ${total_cost:.4f}")
47
50
  if usage.total_duration_ms:
48
51
  ui.console.print(f" API time: {_format_duration(usage.total_duration_ms)}")
49
52
 
@@ -62,6 +65,8 @@ def _handle(ui: Any, _: str) -> bool:
62
65
  if stats.duration_ms:
63
66
  line += f", {_format_duration(stats.duration_ms)} total"
64
67
  line += ")"
68
+ if stats.cost_usd:
69
+ line += f", ${stats.cost_usd:.4f}"
65
70
  ui.console.print(line)
66
71
 
67
72
  return True
@@ -40,6 +40,7 @@ def _status_row(label: str, status: str, detail: str = "") -> Tuple[str, str, st
40
40
  def _api_key_status(provider: ProviderType, profile_key: Optional[str]) -> Tuple[str, str]:
41
41
  """Check API key presence and source."""
42
42
  import os
43
+
43
44
  for env_var in api_key_env_candidates(provider):
44
45
  if os.environ.get(env_var):
45
46
  masked = os.environ[env_var]
@@ -59,7 +60,9 @@ def _model_status(project_path: Path) -> List[Tuple[str, str, str]]:
59
60
  rows: List[Tuple[str, str, str]] = []
60
61
 
61
62
  if not profile:
62
- rows.append(_status_row("Model profile", "error", "No profile configured for pointer 'main'"))
63
+ rows.append(
64
+ _status_row("Model profile", "error", "No profile configured for pointer 'main'")
65
+ )
63
66
  return rows
64
67
 
65
68
  if pointer not in config.model_profiles:
@@ -147,10 +150,14 @@ def _project_status(project_path: Path) -> Tuple[str, str, str]:
147
150
  config = get_project_config(project_path)
148
151
  # Access a field to ensure model parsing does not throw.
149
152
  _ = len(config.allowed_tools)
150
- return _status_row("Project config", "ok", f".ripperdoc/config.json loaded for {project_path}")
153
+ return _status_row(
154
+ "Project config", "ok", f".ripperdoc/config.json loaded for {project_path}"
155
+ )
151
156
  except Exception as exc: # pragma: no cover - defensive
152
157
  logger.exception("[doctor] Failed to load project config", exc_info=exc)
153
- return _status_row("Project config", "warn", f"Could not read .ripperdoc/config.json: {exc}")
158
+ return _status_row(
159
+ "Project config", "warn", f"Could not read .ripperdoc/config.json: {exc}"
160
+ )
154
161
 
155
162
 
156
163
  def _render_table(console: Any, rows: List[Tuple[str, str, str]]) -> None:
@@ -185,7 +192,8 @@ def _handle(ui: Any, _: str) -> bool:
185
192
  ui.console.print(f" • {escape(err)}")
186
193
 
187
194
  ui.console.print(
188
- "\n[dim]If a check is failing, run `ripperdoc` without flags to rerun onboarding or update ~/.ripperdoc.json[/dim]"
195
+ "\n[dim]If a check is failing, run `ripperdoc` without flags "
196
+ "to rerun onboarding or update ~/.ripperdoc.json[/dim]"
189
197
  )
190
198
  return True
191
199
 
@@ -103,7 +103,8 @@ def _open_in_editor(path: Path, console: Any) -> bool:
103
103
  editor_cmd = _determine_editor_command()
104
104
  if not editor_cmd:
105
105
  console.print(
106
- f"[yellow]No editor configured. Set $EDITOR or $VISUAL, or manually edit: {escape(str(path))}[/yellow]"
106
+ f"[yellow]No editor configured. Set $EDITOR or $VISUAL, "
107
+ f"or manually edit: {escape(str(path))}[/yellow]"
107
108
  )
108
109
  return False
109
110
 
@@ -152,9 +153,7 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
152
153
  "global": "user",
153
154
  }
154
155
  if scope not in scope_aliases:
155
- ui.console.print(
156
- "[red]Unknown scope. Use one of: project, local, user.[/red]"
157
- )
156
+ ui.console.print("[red]Unknown scope. Use one of: project, local, user.[/red]")
158
157
  return True
159
158
 
160
159
  resolved_scope = scope_aliases[scope]
@@ -175,9 +174,7 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
175
174
 
176
175
  _open_in_editor(target_path, ui.console)
177
176
 
178
- messages: List[str] = [
179
- f"{heading}: {escape(_shorten_path(target_path, project_path))}"
180
- ]
177
+ messages: List[str] = [f"{heading}: {escape(_shorten_path(target_path, project_path))}"]
181
178
  if created:
182
179
  messages.append("Created new memory file.")
183
180
  if gitignore_added:
@@ -189,12 +186,8 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
189
186
  return True
190
187
 
191
188
  _render_memory_table(ui.console, project_path)
192
- ui.console.print(
193
- "[dim]Usage: /memory project | /memory local | /memory user[/dim]"
194
- )
195
- ui.console.print(
196
- "[dim]Project and user memories feed directly into the system prompt.[/dim]"
197
- )
189
+ ui.console.print("[dim]Usage: /memory project | /memory local | /memory user[/dim]")
190
+ ui.console.print("[dim]Project and user memories feed directly into the system prompt.[/dim]")
198
191
  return True
199
192
 
200
193
 
@@ -1,6 +1,4 @@
1
- from typing import Any
2
- from getpass import getpass
3
- from typing import Optional
1
+ from typing import Any, Optional
4
2
 
5
3
  from rich.markup import escape
6
4
 
@@ -14,6 +12,7 @@ from ripperdoc.core.config import (
14
12
  set_model_pointer,
15
13
  )
16
14
  from ripperdoc.utils.log import get_logger
15
+ from ripperdoc.utils.prompt import prompt_secret
17
16
 
18
17
  from .base import SlashCommand
19
18
 
@@ -110,9 +109,18 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
110
109
  console.print("[red]Model name is required.[/red]")
111
110
  return True
112
111
 
113
- api_key_input = getpass("API key (leave blank to keep unset): ").strip()
112
+ api_key_input = prompt_secret("API key (leave blank to keep unset)").strip()
114
113
  api_key = api_key_input or (existing_profile.api_key if existing_profile else None)
115
114
 
115
+ auth_token = existing_profile.auth_token if existing_profile else None
116
+ if provider == ProviderType.ANTHROPIC:
117
+ auth_token_input = prompt_secret(
118
+ "Auth token (Anthropic only, leave blank to keep unset)"
119
+ ).strip()
120
+ auth_token = auth_token_input or auth_token
121
+ else:
122
+ auth_token = None
123
+
116
124
  api_base_default = existing_profile.api_base if existing_profile else ""
117
125
  api_base = (
118
126
  console.input(
@@ -163,6 +171,7 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
163
171
  max_tokens=max_tokens,
164
172
  temperature=temperature,
165
173
  context_window=context_window,
174
+ auth_token=auth_token,
166
175
  )
167
176
 
168
177
  try:
@@ -213,8 +222,8 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
213
222
  )
214
223
 
215
224
  api_key_label = "[set]" if existing_profile.api_key else "[not set]"
216
- api_key_prompt = f"API key {api_key_label} (Enter=keep, '-'=clear): "
217
- api_key_input = getpass(api_key_prompt).strip()
225
+ api_key_prompt = f"API key {api_key_label} (Enter=keep, '-'=clear)"
226
+ api_key_input = prompt_secret(api_key_prompt).strip()
218
227
  if api_key_input == "-":
219
228
  api_key = None
220
229
  elif api_key_input:
@@ -222,6 +231,21 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
222
231
  else:
223
232
  api_key = existing_profile.api_key
224
233
 
234
+ auth_token = existing_profile.auth_token
235
+ if (
236
+ provider == ProviderType.ANTHROPIC
237
+ or existing_profile.provider == ProviderType.ANTHROPIC
238
+ ):
239
+ auth_label = "[set]" if auth_token else "[not set]"
240
+ auth_prompt = f"Auth token (Anthropic only) {auth_label} (Enter=keep, '-'=clear)"
241
+ auth_token_input = prompt_secret(auth_prompt).strip()
242
+ if auth_token_input == "-":
243
+ auth_token = None
244
+ elif auth_token_input:
245
+ auth_token = auth_token_input
246
+ else:
247
+ auth_token = None
248
+
225
249
  api_base = (
226
250
  console.input(f"API base (optional) [{existing_profile.api_base or ''}]: ").strip()
227
251
  or existing_profile.api_base
@@ -255,6 +279,7 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
255
279
  max_tokens=max_tokens,
256
280
  temperature=temperature,
257
281
  context_window=context_window,
282
+ auth_token=auth_token,
258
283
  )
259
284
 
260
285
  try:
@@ -335,6 +360,11 @@ def _handle(ui: Any, trimmed_arg: str) -> bool:
335
360
  markup=False,
336
361
  )
337
362
  console.print(f" api_key: {'***' if profile.api_key else 'Not set'}", markup=False)
363
+ if profile.provider == ProviderType.ANTHROPIC:
364
+ console.print(
365
+ f" auth_token: {'***' if getattr(profile, 'auth_token', None) else 'Not set'}",
366
+ markup=False,
367
+ )
338
368
  if profile.openai_tool_mode:
339
369
  console.print(f" openai_tool_mode: {profile.openai_tool_mode}", markup=False)
340
370
  pointer_labels = ", ".join(f"{p}->{v or '-'}" for p, v in pointer_map.items())
@@ -30,7 +30,8 @@ def _choose_session(ui: Any, arg: str) -> Optional[SessionSummary]:
30
30
  if 0 <= idx < len(sessions):
31
31
  return sessions[idx]
32
32
  ui.console.print(
33
- f"[red]Invalid session index {escape(str(idx))}. Choose 0-{len(sessions) - 1}.[/red]"
33
+ f"[red]Invalid session index {escape(str(idx))}. "
34
+ f"Choose 0-{len(sessions) - 1}.[/red]"
34
35
  )
35
36
  else:
36
37
  # Treat arg as session id if it matches.
@@ -60,7 +61,8 @@ def _choose_session(ui: Any, arg: str) -> Optional[SessionSummary]:
60
61
  idx = int(choice_text)
61
62
  if idx < 0 or idx >= len(sessions):
62
63
  ui.console.print(
63
- f"[red]Invalid session index {escape(str(idx))}. Choose 0-{len(sessions) - 1}.[/red]"
64
+ f"[red]Invalid session index {escape(str(idx))}. "
65
+ f"Choose 0-{len(sessions) - 1}.[/red]"
64
66
  )
65
67
  return None
66
68
  return sessions[idx]
@@ -34,7 +34,7 @@ def _auth_token_display(profile: Optional[ModelProfile]) -> Tuple[str, Optional[
34
34
  env_var = next((name for name in env_candidates if os.environ.get(name)), None)
35
35
  if env_var:
36
36
  return (f"{env_var} (env)", env_var)
37
- if profile.api_key:
37
+ if profile.api_key or getattr(profile, "auth_token", None):
38
38
  return ("Configured in profile", None)
39
39
  return ("Missing", None)
40
40
 
@@ -34,6 +34,7 @@ from ripperdoc.cli.commands import (
34
34
  from ripperdoc.cli.ui.helpers import get_profile_for_pointer
35
35
  from ripperdoc.core.permissions import make_permission_checker
36
36
  from ripperdoc.cli.ui.spinner import Spinner
37
+ from ripperdoc.cli.ui.thinking_spinner import ThinkingSpinner
37
38
  from ripperdoc.cli.ui.context_display import context_usage_lines
38
39
  from ripperdoc.utils.message_compaction import (
39
40
  compact_messages,
@@ -43,6 +44,7 @@ from ripperdoc.utils.message_compaction import (
43
44
  get_remaining_context_tokens,
44
45
  resolve_auto_compact_enabled,
45
46
  )
47
+ from ripperdoc.utils.token_estimation import estimate_tokens
46
48
  from ripperdoc.utils.mcp import (
47
49
  format_mcp_instructions,
48
50
  load_mcp_servers_async,
@@ -64,6 +66,95 @@ from ripperdoc.utils.log import enable_session_file_logging, get_logger
64
66
  # Type alias for conversation messages
65
67
  ConversationMessage = Union[UserMessage, AssistantMessage, ProgressMessage]
66
68
 
69
+ THINKING_WORDS: list[str] = [
70
+ "Accomplishing",
71
+ "Actioning",
72
+ "Actualizing",
73
+ "Baking",
74
+ "Booping",
75
+ "Brewing",
76
+ "Calculating",
77
+ "Cerebrating",
78
+ "Channelling",
79
+ "Churning",
80
+ "Clauding",
81
+ "Coalescing",
82
+ "Cogitating",
83
+ "Computing",
84
+ "Combobulating",
85
+ "Concocting",
86
+ "Conjuring",
87
+ "Considering",
88
+ "Contemplating",
89
+ "Cooking",
90
+ "Crafting",
91
+ "Creating",
92
+ "Crunching",
93
+ "Deciphering",
94
+ "Deliberating",
95
+ "Determining",
96
+ "Discombobulating",
97
+ "Divining",
98
+ "Doing",
99
+ "Effecting",
100
+ "Elucidating",
101
+ "Enchanting",
102
+ "Envisioning",
103
+ "Finagling",
104
+ "Flibbertigibbeting",
105
+ "Forging",
106
+ "Forming",
107
+ "Frolicking",
108
+ "Generating",
109
+ "Germinating",
110
+ "Hatching",
111
+ "Herding",
112
+ "Honking",
113
+ "Ideating",
114
+ "Imagining",
115
+ "Incubating",
116
+ "Inferring",
117
+ "Manifesting",
118
+ "Marinating",
119
+ "Meandering",
120
+ "Moseying",
121
+ "Mulling",
122
+ "Mustering",
123
+ "Musing",
124
+ "Noodling",
125
+ "Percolating",
126
+ "Perusing",
127
+ "Philosophising",
128
+ "Pontificating",
129
+ "Pondering",
130
+ "Processing",
131
+ "Puttering",
132
+ "Puzzling",
133
+ "Reticulating",
134
+ "Ruminating",
135
+ "Scheming",
136
+ "Schlepping",
137
+ "Shimmying",
138
+ "Simmering",
139
+ "Smooshing",
140
+ "Spelunking",
141
+ "Spinning",
142
+ "Stewing",
143
+ "Sussing",
144
+ "Synthesizing",
145
+ "Thinking",
146
+ "Tinkering",
147
+ "Transmuting",
148
+ "Unfurling",
149
+ "Unravelling",
150
+ "Vibing",
151
+ "Wandering",
152
+ "Whirring",
153
+ "Wibbling",
154
+ "Wizarding",
155
+ "Working",
156
+ "Wrangling",
157
+ ]
67
158
 
68
159
  console = Console()
69
160
  logger = get_logger()
@@ -357,6 +448,13 @@ class RichUI:
357
448
  else:
358
449
  success = getattr(tool_data, "success", None)
359
450
  failed = failed or (success is False)
451
+ failed = failed or bool(self._get_tool_field(tool_data, "is_error"))
452
+
453
+ warning_text = None
454
+ token_estimate = None
455
+ if tool_data is not None:
456
+ warning_text = self._get_tool_field(tool_data, "warning")
457
+ token_estimate = self._get_tool_field(tool_data, "token_estimate")
360
458
 
361
459
  if failed:
362
460
  if content:
@@ -365,6 +463,17 @@ class RichUI:
365
463
  self.console.print(f" ⎿ [red]{escape(sender)} failed[/red]")
366
464
  return
367
465
 
466
+ if warning_text:
467
+ self.console.print(f" ⎿ [yellow]{escape(str(warning_text))}[/yellow]")
468
+ if token_estimate:
469
+ self.console.print(
470
+ f" [dim]Estimated tokens: {escape(str(token_estimate))}[/dim]"
471
+ )
472
+ elif token_estimate and self.verbose:
473
+ self.console.print(
474
+ f" ⎿ [dim]Estimated tokens: {escape(str(token_estimate))}[/dim]"
475
+ )
476
+
368
477
  if not content:
369
478
  self.console.print(" ⎿ [dim]Tool completed[/]")
370
479
  return
@@ -739,7 +848,23 @@ class RichUI:
739
848
  },
740
849
  )
741
850
 
742
- spinner = Spinner(console, "Thinking...", spinner="dots")
851
+ prompt_tokens_est = estimate_conversation_tokens(messages, protocol=protocol)
852
+ spinner = ThinkingSpinner(console, prompt_tokens_est)
853
+
854
+ # Define pause/resume callbacks for tools that need user interaction
855
+ def pause_ui() -> None:
856
+ if spinner:
857
+ spinner.stop()
858
+
859
+ def resume_ui() -> None:
860
+ if spinner:
861
+ spinner.start()
862
+ spinner.update("Thinking...")
863
+
864
+ # Set the UI callbacks on the query context
865
+ self.query_context.pause_ui = pause_ui
866
+ self.query_context.resume_ui = resume_ui
867
+
743
868
  # Wrap permission checker to pause the spinner while waiting for user input.
744
869
  base_permission_checker = self._permission_checker
745
870
 
@@ -759,6 +884,7 @@ class RichUI:
759
884
  # Track tool uses by ID so results align even when multiple tools fire.
760
885
  tool_registry: Dict[str, Dict[str, Any]] = {}
761
886
  last_tool_name = None
887
+ output_token_est = 0
762
888
 
763
889
  try:
764
890
  spinner.start()
@@ -853,7 +979,14 @@ class RichUI:
853
979
  )
854
980
  elif message.content.startswith("Subagent"):
855
981
  self.display_message("Subagent", message.content, is_tool=True)
856
- spinner.update(f"Working... {message.content}")
982
+ if message.tool_use_id == "stream":
983
+ delta_tokens = estimate_tokens(message.content)
984
+ output_token_est += delta_tokens
985
+ spinner.update_tokens(output_token_est)
986
+ else:
987
+ spinner.update_tokens(
988
+ output_token_est, suffix=f"Working... {message.content}"
989
+ )
857
990
 
858
991
  # Add message to history
859
992
  self._log_message(message)
@@ -0,0 +1,128 @@
1
+ """Specialized spinner that shows token progress with playful verbs."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import random
6
+ import time
7
+ from typing import Optional
8
+
9
+ from rich.console import Console
10
+
11
+ from ripperdoc.cli.ui.spinner import Spinner
12
+
13
+
14
+ THINKING_WORDS: list[str] = [
15
+ "Accomplishing",
16
+ "Actioning",
17
+ "Actualizing",
18
+ "Baking",
19
+ "Booping",
20
+ "Brewing",
21
+ "Calculating",
22
+ "Cerebrating",
23
+ "Channelling",
24
+ "Churning",
25
+ "Clauding",
26
+ "Coalescing",
27
+ "Cogitating",
28
+ "Computing",
29
+ "Combobulating",
30
+ "Concocting",
31
+ "Conjuring",
32
+ "Considering",
33
+ "Contemplating",
34
+ "Cooking",
35
+ "Crafting",
36
+ "Creating",
37
+ "Crunching",
38
+ "Deciphering",
39
+ "Deliberating",
40
+ "Determining",
41
+ "Discombobulating",
42
+ "Divining",
43
+ "Doing",
44
+ "Effecting",
45
+ "Elucidating",
46
+ "Enchanting",
47
+ "Envisioning",
48
+ "Finagling",
49
+ "Flibbertigibbeting",
50
+ "Forging",
51
+ "Forming",
52
+ "Frolicking",
53
+ "Generating",
54
+ "Germinating",
55
+ "Hatching",
56
+ "Herding",
57
+ "Honking",
58
+ "Ideating",
59
+ "Imagining",
60
+ "Incubating",
61
+ "Inferring",
62
+ "Manifesting",
63
+ "Marinating",
64
+ "Meandering",
65
+ "Moseying",
66
+ "Mulling",
67
+ "Mustering",
68
+ "Musing",
69
+ "Noodling",
70
+ "Percolating",
71
+ "Perusing",
72
+ "Philosophising",
73
+ "Pontificating",
74
+ "Pondering",
75
+ "Processing",
76
+ "Puttering",
77
+ "Puzzling",
78
+ "Reticulating",
79
+ "Ruminating",
80
+ "Scheming",
81
+ "Schlepping",
82
+ "Shimmying",
83
+ "Simmering",
84
+ "Smooshing",
85
+ "Spelunking",
86
+ "Spinning",
87
+ "Stewing",
88
+ "Sussing",
89
+ "Synthesizing",
90
+ "Thinking",
91
+ "Tinkering",
92
+ "Transmuting",
93
+ "Unfurling",
94
+ "Unravelling",
95
+ "Vibing",
96
+ "Wandering",
97
+ "Whirring",
98
+ "Wibbling",
99
+ "Wizarding",
100
+ "Working",
101
+ "Wrangling",
102
+ ]
103
+
104
+
105
+ class ThinkingSpinner(Spinner):
106
+ """Spinner that shows elapsed time and token progress."""
107
+
108
+ def __init__(self, console: Console, prompt_tokens: int) -> None:
109
+ self.prompt_tokens = prompt_tokens
110
+ self.start_time = time.monotonic()
111
+ self.out_tokens = 0
112
+ self.thinking_word = random.choice(THINKING_WORDS)
113
+ super().__init__(console, self._format_text(), spinner="dots")
114
+
115
+ def _format_text(self, suffix: Optional[str] = None) -> str:
116
+ elapsed = int(time.monotonic() - self.start_time)
117
+ base = f"✽ {self.thinking_word}… (esc to interrupt · {elapsed}s"
118
+ if self.out_tokens > 0:
119
+ base += f" · ↓ {self.out_tokens} tokens"
120
+ else:
121
+ base += f" · ↑ {self.prompt_tokens} tokens"
122
+ if suffix:
123
+ base += f" · {suffix}"
124
+ return base + ")"
125
+
126
+ def update_tokens(self, out_tokens: int, suffix: Optional[str] = None) -> None:
127
+ self.out_tokens = max(0, out_tokens)
128
+ self.update(self._format_text(suffix))