ripperdoc 0.2.0__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +74 -9
  3. ripperdoc/cli/commands/__init__.py +4 -0
  4. ripperdoc/cli/commands/agents_cmd.py +30 -4
  5. ripperdoc/cli/commands/context_cmd.py +11 -1
  6. ripperdoc/cli/commands/cost_cmd.py +5 -0
  7. ripperdoc/cli/commands/doctor_cmd.py +208 -0
  8. ripperdoc/cli/commands/memory_cmd.py +202 -0
  9. ripperdoc/cli/commands/models_cmd.py +61 -6
  10. ripperdoc/cli/commands/resume_cmd.py +4 -2
  11. ripperdoc/cli/commands/status_cmd.py +1 -1
  12. ripperdoc/cli/commands/tasks_cmd.py +27 -0
  13. ripperdoc/cli/ui/rich_ui.py +258 -11
  14. ripperdoc/cli/ui/thinking_spinner.py +128 -0
  15. ripperdoc/core/agents.py +14 -4
  16. ripperdoc/core/config.py +56 -3
  17. ripperdoc/core/default_tools.py +16 -2
  18. ripperdoc/core/permissions.py +19 -0
  19. ripperdoc/core/providers/__init__.py +31 -0
  20. ripperdoc/core/providers/anthropic.py +136 -0
  21. ripperdoc/core/providers/base.py +187 -0
  22. ripperdoc/core/providers/gemini.py +172 -0
  23. ripperdoc/core/providers/openai.py +142 -0
  24. ripperdoc/core/query.py +510 -386
  25. ripperdoc/core/query_utils.py +578 -0
  26. ripperdoc/core/system_prompt.py +2 -1
  27. ripperdoc/core/tool.py +16 -1
  28. ripperdoc/sdk/client.py +12 -1
  29. ripperdoc/tools/background_shell.py +63 -21
  30. ripperdoc/tools/bash_tool.py +48 -13
  31. ripperdoc/tools/file_edit_tool.py +20 -0
  32. ripperdoc/tools/file_read_tool.py +23 -0
  33. ripperdoc/tools/file_write_tool.py +20 -0
  34. ripperdoc/tools/glob_tool.py +59 -15
  35. ripperdoc/tools/grep_tool.py +7 -0
  36. ripperdoc/tools/ls_tool.py +246 -73
  37. ripperdoc/tools/mcp_tools.py +32 -10
  38. ripperdoc/tools/multi_edit_tool.py +23 -0
  39. ripperdoc/tools/notebook_edit_tool.py +18 -3
  40. ripperdoc/tools/task_tool.py +7 -0
  41. ripperdoc/tools/todo_tool.py +157 -25
  42. ripperdoc/tools/tool_search_tool.py +17 -4
  43. ripperdoc/utils/file_watch.py +134 -0
  44. ripperdoc/utils/git_utils.py +274 -0
  45. ripperdoc/utils/json_utils.py +27 -0
  46. ripperdoc/utils/log.py +129 -29
  47. ripperdoc/utils/mcp.py +71 -6
  48. ripperdoc/utils/memory.py +12 -1
  49. ripperdoc/utils/message_compaction.py +22 -5
  50. ripperdoc/utils/messages.py +72 -17
  51. ripperdoc/utils/output_utils.py +34 -9
  52. ripperdoc/utils/permissions/path_validation_utils.py +6 -0
  53. ripperdoc/utils/prompt.py +17 -0
  54. ripperdoc/utils/safe_get_cwd.py +4 -0
  55. ripperdoc/utils/session_history.py +27 -9
  56. ripperdoc/utils/session_usage.py +7 -0
  57. ripperdoc/utils/shell_utils.py +159 -0
  58. ripperdoc/utils/todo.py +2 -2
  59. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.3.dist-info}/METADATA +4 -2
  60. ripperdoc-0.2.3.dist-info/RECORD +95 -0
  61. ripperdoc-0.2.0.dist-info/RECORD +0 -81
  62. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.3.dist-info}/WHEEL +0 -0
  63. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.3.dist-info}/entry_points.txt +0 -0
  64. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.3.dist-info}/licenses/LICENSE +0 -0
  65. {ripperdoc-0.2.0.dist-info → ripperdoc-0.2.3.dist-info}/top_level.txt +0 -0
@@ -34,10 +34,12 @@ from ripperdoc.cli.commands import (
34
34
  from ripperdoc.cli.ui.helpers import get_profile_for_pointer
35
35
  from ripperdoc.core.permissions import make_permission_checker
36
36
  from ripperdoc.cli.ui.spinner import Spinner
37
+ from ripperdoc.cli.ui.thinking_spinner import ThinkingSpinner
37
38
  from ripperdoc.cli.ui.context_display import context_usage_lines
38
39
  from ripperdoc.utils.message_compaction import (
39
40
  compact_messages,
40
41
  estimate_conversation_tokens,
42
+ estimate_tokens_from_text,
41
43
  estimate_used_tokens,
42
44
  get_context_usage_status,
43
45
  get_remaining_context_tokens,
@@ -59,12 +61,103 @@ from ripperdoc.utils.messages import (
59
61
  create_user_message,
60
62
  create_assistant_message,
61
63
  )
64
+ from ripperdoc.utils.log import enable_session_file_logging, get_logger
62
65
 
63
66
  # Type alias for conversation messages
64
67
  ConversationMessage = Union[UserMessage, AssistantMessage, ProgressMessage]
65
68
 
69
+ THINKING_WORDS: list[str] = [
70
+ "Accomplishing",
71
+ "Actioning",
72
+ "Actualizing",
73
+ "Baking",
74
+ "Booping",
75
+ "Brewing",
76
+ "Calculating",
77
+ "Cerebrating",
78
+ "Channelling",
79
+ "Churning",
80
+ "Clauding",
81
+ "Coalescing",
82
+ "Cogitating",
83
+ "Computing",
84
+ "Combobulating",
85
+ "Concocting",
86
+ "Conjuring",
87
+ "Considering",
88
+ "Contemplating",
89
+ "Cooking",
90
+ "Crafting",
91
+ "Creating",
92
+ "Crunching",
93
+ "Deciphering",
94
+ "Deliberating",
95
+ "Determining",
96
+ "Discombobulating",
97
+ "Divining",
98
+ "Doing",
99
+ "Effecting",
100
+ "Elucidating",
101
+ "Enchanting",
102
+ "Envisioning",
103
+ "Finagling",
104
+ "Flibbertigibbeting",
105
+ "Forging",
106
+ "Forming",
107
+ "Frolicking",
108
+ "Generating",
109
+ "Germinating",
110
+ "Hatching",
111
+ "Herding",
112
+ "Honking",
113
+ "Ideating",
114
+ "Imagining",
115
+ "Incubating",
116
+ "Inferring",
117
+ "Manifesting",
118
+ "Marinating",
119
+ "Meandering",
120
+ "Moseying",
121
+ "Mulling",
122
+ "Mustering",
123
+ "Musing",
124
+ "Noodling",
125
+ "Percolating",
126
+ "Perusing",
127
+ "Philosophising",
128
+ "Pontificating",
129
+ "Pondering",
130
+ "Processing",
131
+ "Puttering",
132
+ "Puzzling",
133
+ "Reticulating",
134
+ "Ruminating",
135
+ "Scheming",
136
+ "Schlepping",
137
+ "Shimmying",
138
+ "Simmering",
139
+ "Smooshing",
140
+ "Spelunking",
141
+ "Spinning",
142
+ "Stewing",
143
+ "Sussing",
144
+ "Synthesizing",
145
+ "Thinking",
146
+ "Tinkering",
147
+ "Transmuting",
148
+ "Unfurling",
149
+ "Unravelling",
150
+ "Vibing",
151
+ "Wandering",
152
+ "Whirring",
153
+ "Wibbling",
154
+ "Wizarding",
155
+ "Working",
156
+ "Wrangling",
157
+ ]
66
158
 
67
159
  console = Console()
160
+ logger = get_logger()
68
161
 
69
162
  # Keep a small window of recent messages alongside the summary after /compact so
70
163
  # the model retains immediate context.
@@ -110,7 +203,13 @@ def create_status_bar() -> Text:
110
203
  class RichUI:
111
204
  """Rich-based UI for Ripperdoc."""
112
205
 
113
- def __init__(self, safe_mode: bool = False, verbose: bool = False):
206
+ def __init__(
207
+ self,
208
+ safe_mode: bool = False,
209
+ verbose: bool = False,
210
+ session_id: Optional[str] = None,
211
+ log_file_path: Optional[Path] = None,
212
+ ):
114
213
  self.console = console
115
214
  self.safe_mode = safe_mode
116
215
  self.verbose = verbose
@@ -124,7 +223,22 @@ class RichUI:
124
223
  self._prompt_session: Optional[PromptSession] = None
125
224
  self.project_path = Path.cwd()
126
225
  # Track a stable session identifier for the current UI run.
127
- self.session_id = str(uuid.uuid4())
226
+ self.session_id = session_id or str(uuid.uuid4())
227
+ if log_file_path:
228
+ self.log_file_path = log_file_path
229
+ logger.attach_file_handler(self.log_file_path)
230
+ else:
231
+ self.log_file_path = enable_session_file_logging(self.project_path, self.session_id)
232
+ logger.info(
233
+ "[ui] Initialized Rich UI session",
234
+ extra={
235
+ "session_id": self.session_id,
236
+ "project_path": str(self.project_path),
237
+ "log_file": str(self.log_file_path),
238
+ "safe_mode": self.safe_mode,
239
+ "verbose": self.verbose,
240
+ },
241
+ )
128
242
  self._session_history = SessionHistory(self.project_path, self.session_id)
129
243
  self._permission_checker = (
130
244
  make_permission_checker(self.project_path, safe_mode) if safe_mode else None
@@ -138,6 +252,15 @@ class RichUI:
138
252
  def _set_session(self, session_id: str) -> None:
139
253
  """Switch to a different session id and reset logging."""
140
254
  self.session_id = session_id
255
+ self.log_file_path = enable_session_file_logging(self.project_path, self.session_id)
256
+ logger.info(
257
+ "[ui] Switched session",
258
+ extra={
259
+ "session_id": self.session_id,
260
+ "project_path": str(self.project_path),
261
+ "log_file": str(self.log_file_path),
262
+ },
263
+ )
141
264
  self._session_history = SessionHistory(self.project_path, session_id)
142
265
 
143
266
  def _log_message(self, message: Any) -> None:
@@ -146,7 +269,10 @@ class RichUI:
146
269
  self._session_history.append(message)
147
270
  except Exception:
148
271
  # Logging failures should never interrupt the UI flow
149
- return
272
+ logger.exception(
273
+ "[ui] Failed to append message to session history",
274
+ extra={"session_id": self.session_id},
275
+ )
150
276
 
151
277
  def _append_prompt_history(self, text: str) -> None:
152
278
  """Append text to the interactive prompt history."""
@@ -156,7 +282,10 @@ class RichUI:
156
282
  try:
157
283
  session.history.append_string(text)
158
284
  except Exception:
159
- return
285
+ logger.exception(
286
+ "[ui] Failed to append prompt history",
287
+ extra={"session_id": self.session_id},
288
+ )
160
289
 
161
290
  def replay_conversation(self, messages: List[Dict[str, Any]]) -> None:
162
291
  """Render a conversation history in the console and seed prompt history."""
@@ -200,6 +329,7 @@ class RichUI:
200
329
  tool_type: Optional[str] = None,
201
330
  tool_args: Optional[dict] = None,
202
331
  tool_data: Any = None,
332
+ tool_error: bool = False,
203
333
  ) -> None:
204
334
  """Display a message in the conversation."""
205
335
  if not is_tool:
@@ -211,7 +341,7 @@ class RichUI:
211
341
  return
212
342
 
213
343
  if tool_type == "result":
214
- self._print_tool_result(sender, content, tool_data)
344
+ self._print_tool_result(sender, content, tool_data, tool_error)
215
345
  return
216
346
 
217
347
  self._print_generic_tool(sender, content)
@@ -307,8 +437,25 @@ class RichUI:
307
437
 
308
438
  self.console.print(f"[dim cyan]{escape(tool_display)}[/]")
309
439
 
310
- def _print_tool_result(self, sender: str, content: str, tool_data: Any) -> None:
440
+ def _print_tool_result(
441
+ self, sender: str, content: str, tool_data: Any, tool_error: bool = False
442
+ ) -> None:
311
443
  """Render a tool result summary."""
444
+ failed = tool_error
445
+ if tool_data is not None:
446
+ if isinstance(tool_data, dict):
447
+ failed = failed or (tool_data.get("success") is False)
448
+ else:
449
+ success = getattr(tool_data, "success", None)
450
+ failed = failed or (success is False)
451
+
452
+ if failed:
453
+ if content:
454
+ self.console.print(f" ⎿ [red]{escape(content)}[/red]")
455
+ else:
456
+ self.console.print(f" ⎿ [red]{escape(sender)} failed[/red]")
457
+ return
458
+
312
459
  if not content:
313
460
  self.console.print(" ⎿ [dim]Tool completed[/]")
314
461
  return
@@ -576,6 +723,20 @@ class RichUI:
576
723
  self.query_context = QueryContext(
577
724
  tools=self.get_default_tools(), safe_mode=self.safe_mode, verbose=self.verbose
578
725
  )
726
+ else:
727
+ # Clear any prior abort so new queries aren't immediately interrupted.
728
+ abort_controller = getattr(self.query_context, "abort_controller", None)
729
+ if abort_controller is not None:
730
+ abort_controller.clear()
731
+
732
+ logger.info(
733
+ "[ui] Starting query processing",
734
+ extra={
735
+ "session_id": self.session_id,
736
+ "prompt_length": len(user_input),
737
+ "prompt_preview": user_input[:200],
738
+ },
739
+ )
579
740
 
580
741
  try:
581
742
  context: Dict[str, str] = {}
@@ -585,6 +746,15 @@ class RichUI:
585
746
  self.query_context.tools = merge_tools_with_dynamic(
586
747
  self.query_context.tools, dynamic_tools
587
748
  )
749
+ logger.debug(
750
+ "[ui] Prepared tools and MCP servers",
751
+ extra={
752
+ "session_id": self.session_id,
753
+ "tool_count": len(self.query_context.tools),
754
+ "mcp_servers": len(servers),
755
+ "dynamic_tools": len(dynamic_tools),
756
+ },
757
+ )
588
758
  mcp_instructions = format_mcp_instructions(servers)
589
759
  base_system_prompt = build_system_prompt(
590
760
  self.query_context.tools,
@@ -617,6 +787,16 @@ class RichUI:
617
787
  usage_status = get_context_usage_status(
618
788
  used_tokens, max_context_tokens, auto_compact_enabled
619
789
  )
790
+ logger.debug(
791
+ "[ui] Context usage snapshot",
792
+ extra={
793
+ "session_id": self.session_id,
794
+ "used_tokens": used_tokens,
795
+ "max_context_tokens": max_context_tokens,
796
+ "percent_used": round(usage_status.percent_used, 2),
797
+ "auto_compact_enabled": auto_compact_enabled,
798
+ },
799
+ )
620
800
 
621
801
  if usage_status.is_above_warning:
622
802
  console.print(
@@ -639,8 +819,19 @@ class RichUI:
639
819
  f"[yellow]Auto-compacted conversation (saved ~{compaction.tokens_saved} tokens). "
640
820
  f"Estimated usage: {compaction.tokens_after}/{max_context_tokens} tokens.[/yellow]"
641
821
  )
822
+ logger.info(
823
+ "[ui] Auto-compacted conversation",
824
+ extra={
825
+ "session_id": self.session_id,
826
+ "tokens_before": compaction.tokens_before,
827
+ "tokens_after": compaction.tokens_after,
828
+ "tokens_saved": compaction.tokens_saved,
829
+ "cleared_tool_ids": list(compaction.cleared_tool_ids),
830
+ },
831
+ )
642
832
 
643
- spinner = Spinner(console, "Thinking...", spinner="dots")
833
+ prompt_tokens_est = estimate_conversation_tokens(messages, protocol=protocol)
834
+ spinner = ThinkingSpinner(console, prompt_tokens_est)
644
835
  # Wrap permission checker to pause the spinner while waiting for user input.
645
836
  base_permission_checker = self._permission_checker
646
837
 
@@ -660,6 +851,7 @@ class RichUI:
660
851
  # Track tool uses by ID so results align even when multiple tools fire.
661
852
  tool_registry: Dict[str, Dict[str, Any]] = {}
662
853
  last_tool_name = None
854
+ output_token_est = 0
663
855
 
664
856
  try:
665
857
  spinner.start()
@@ -715,6 +907,7 @@ class RichUI:
715
907
  ):
716
908
  tool_name = "Tool"
717
909
  tool_data = getattr(message, "tool_use_result", None)
910
+ is_error = bool(getattr(block, "is_error", False))
718
911
 
719
912
  tool_use_id = getattr(block, "tool_use_id", None)
720
913
  entry = tool_registry.get(tool_use_id) if tool_use_id else None
@@ -738,6 +931,7 @@ class RichUI:
738
931
  is_tool=True,
739
932
  tool_type="result",
740
933
  tool_data=tool_data,
934
+ tool_error=is_error,
741
935
  )
742
936
 
743
937
  elif message.type == "progress" and isinstance(message, ProgressMessage):
@@ -752,22 +946,43 @@ class RichUI:
752
946
  )
753
947
  elif message.content.startswith("Subagent"):
754
948
  self.display_message("Subagent", message.content, is_tool=True)
755
- spinner.update(f"Working... {message.content}")
949
+ if message.tool_use_id == "stream":
950
+ delta_tokens = estimate_tokens_from_text(message.content)
951
+ output_token_est += delta_tokens
952
+ spinner.update_tokens(output_token_est)
953
+ else:
954
+ spinner.update_tokens(
955
+ output_token_est, suffix=f"Working... {message.content}"
956
+ )
756
957
 
757
958
  # Add message to history
758
959
  self._log_message(message)
759
960
  messages.append(message) # type: ignore[arg-type]
760
961
  except Exception as e:
962
+ logger.exception(
963
+ "[ui] Unhandled error while processing streamed query response",
964
+ extra={"session_id": self.session_id},
965
+ )
761
966
  self.display_message("System", f"Error: {str(e)}", is_tool=True)
762
967
  finally:
763
968
  # Ensure spinner stops even on exceptions
764
969
  try:
765
970
  spinner.stop()
766
971
  except Exception:
767
- pass
972
+ logger.exception(
973
+ "[ui] Failed to stop spinner", extra={"session_id": self.session_id}
974
+ )
768
975
 
769
976
  # Update conversation history
770
977
  self.conversation_messages = messages
978
+ logger.info(
979
+ "[ui] Query processing completed",
980
+ extra={
981
+ "session_id": self.session_id,
982
+ "conversation_messages": len(self.conversation_messages),
983
+ "project_path": str(self.project_path),
984
+ },
985
+ )
771
986
  finally:
772
987
  await shutdown_mcp_runtime()
773
988
  await shutdown_mcp_runtime()
@@ -838,6 +1053,10 @@ class RichUI:
838
1053
  console.print("[dim]Tip: type '/' then press Tab to see available commands.[/dim]\n")
839
1054
 
840
1055
  session = self.get_prompt_session()
1056
+ logger.info(
1057
+ "[ui] Starting interactive loop",
1058
+ extra={"session_id": self.session_id, "log_file": str(self.log_file_path)},
1059
+ )
841
1060
 
842
1061
  while not self._should_exit:
843
1062
  try:
@@ -854,6 +1073,10 @@ class RichUI:
854
1073
 
855
1074
  # Handle slash commands locally
856
1075
  if user_input.startswith("/"):
1076
+ logger.debug(
1077
+ "[ui] Received slash command",
1078
+ extra={"session_id": self.session_id, "command": user_input},
1079
+ )
857
1080
  handled = self.handle_slash_command(user_input)
858
1081
  if self._should_exit:
859
1082
  break
@@ -862,6 +1085,14 @@ class RichUI:
862
1085
  continue
863
1086
 
864
1087
  # Process the query
1088
+ logger.info(
1089
+ "[ui] Processing interactive prompt",
1090
+ extra={
1091
+ "session_id": self.session_id,
1092
+ "prompt_length": len(user_input),
1093
+ "prompt_preview": user_input[:200],
1094
+ },
1095
+ )
865
1096
  asyncio.run(self.process_query(user_input))
866
1097
 
867
1098
  console.print() # Add spacing between interactions
@@ -874,6 +1105,9 @@ class RichUI:
874
1105
  break
875
1106
  except Exception as e:
876
1107
  console.print(f"[red]Error: {escape(str(e))}[/]")
1108
+ logger.exception(
1109
+ "[ui] Error in interactive loop", extra={"session_id": self.session_id}
1110
+ )
877
1111
  if self.verbose:
878
1112
  import traceback
879
1113
 
@@ -903,6 +1137,9 @@ class RichUI:
903
1137
  )
904
1138
  except Exception as e:
905
1139
  console.print(f"[red]Error during compaction: {escape(str(e))}[/red]")
1140
+ logger.exception(
1141
+ "[ui] Error during manual compaction", extra={"session_id": self.session_id}
1142
+ )
906
1143
  return
907
1144
  finally:
908
1145
  spinner.stop()
@@ -1009,7 +1246,12 @@ def check_onboarding_rich() -> bool:
1009
1246
  return check_onboarding()
1010
1247
 
1011
1248
 
1012
- def main_rich(safe_mode: bool = False, verbose: bool = False) -> None:
1249
+ def main_rich(
1250
+ safe_mode: bool = False,
1251
+ verbose: bool = False,
1252
+ session_id: Optional[str] = None,
1253
+ log_file_path: Optional[Path] = None,
1254
+ ) -> None:
1013
1255
  """Main entry point for Rich interface."""
1014
1256
 
1015
1257
  # Ensure onboarding is complete
@@ -1017,7 +1259,12 @@ def main_rich(safe_mode: bool = False, verbose: bool = False) -> None:
1017
1259
  sys.exit(1)
1018
1260
 
1019
1261
  # Run the Rich UI
1020
- ui = RichUI(safe_mode=safe_mode, verbose=verbose)
1262
+ ui = RichUI(
1263
+ safe_mode=safe_mode,
1264
+ verbose=verbose,
1265
+ session_id=session_id,
1266
+ log_file_path=log_file_path,
1267
+ )
1021
1268
  ui.run()
1022
1269
 
1023
1270
 
@@ -0,0 +1,128 @@
1
+ """Specialized spinner that shows token progress with playful verbs."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import random
6
+ import time
7
+ from typing import Optional
8
+
9
+ from rich.console import Console
10
+
11
+ from ripperdoc.cli.ui.spinner import Spinner
12
+
13
+
14
+ THINKING_WORDS: list[str] = [
15
+ "Accomplishing",
16
+ "Actioning",
17
+ "Actualizing",
18
+ "Baking",
19
+ "Booping",
20
+ "Brewing",
21
+ "Calculating",
22
+ "Cerebrating",
23
+ "Channelling",
24
+ "Churning",
25
+ "Clauding",
26
+ "Coalescing",
27
+ "Cogitating",
28
+ "Computing",
29
+ "Combobulating",
30
+ "Concocting",
31
+ "Conjuring",
32
+ "Considering",
33
+ "Contemplating",
34
+ "Cooking",
35
+ "Crafting",
36
+ "Creating",
37
+ "Crunching",
38
+ "Deciphering",
39
+ "Deliberating",
40
+ "Determining",
41
+ "Discombobulating",
42
+ "Divining",
43
+ "Doing",
44
+ "Effecting",
45
+ "Elucidating",
46
+ "Enchanting",
47
+ "Envisioning",
48
+ "Finagling",
49
+ "Flibbertigibbeting",
50
+ "Forging",
51
+ "Forming",
52
+ "Frolicking",
53
+ "Generating",
54
+ "Germinating",
55
+ "Hatching",
56
+ "Herding",
57
+ "Honking",
58
+ "Ideating",
59
+ "Imagining",
60
+ "Incubating",
61
+ "Inferring",
62
+ "Manifesting",
63
+ "Marinating",
64
+ "Meandering",
65
+ "Moseying",
66
+ "Mulling",
67
+ "Mustering",
68
+ "Musing",
69
+ "Noodling",
70
+ "Percolating",
71
+ "Perusing",
72
+ "Philosophising",
73
+ "Pontificating",
74
+ "Pondering",
75
+ "Processing",
76
+ "Puttering",
77
+ "Puzzling",
78
+ "Reticulating",
79
+ "Ruminating",
80
+ "Scheming",
81
+ "Schlepping",
82
+ "Shimmying",
83
+ "Simmering",
84
+ "Smooshing",
85
+ "Spelunking",
86
+ "Spinning",
87
+ "Stewing",
88
+ "Sussing",
89
+ "Synthesizing",
90
+ "Thinking",
91
+ "Tinkering",
92
+ "Transmuting",
93
+ "Unfurling",
94
+ "Unravelling",
95
+ "Vibing",
96
+ "Wandering",
97
+ "Whirring",
98
+ "Wibbling",
99
+ "Wizarding",
100
+ "Working",
101
+ "Wrangling",
102
+ ]
103
+
104
+
105
+ class ThinkingSpinner(Spinner):
106
+ """Spinner that shows elapsed time and token progress."""
107
+
108
+ def __init__(self, console: Console, prompt_tokens: int) -> None:
109
+ self.prompt_tokens = prompt_tokens
110
+ self.start_time = time.monotonic()
111
+ self.out_tokens = 0
112
+ self.thinking_word = random.choice(THINKING_WORDS)
113
+ super().__init__(console, self._format_text(), spinner="dots")
114
+
115
+ def _format_text(self, suffix: Optional[str] = None) -> str:
116
+ elapsed = int(time.monotonic() - self.start_time)
117
+ base = f"✽ {self.thinking_word}… (esc to interrupt · {elapsed}s"
118
+ if self.out_tokens > 0:
119
+ base += f" · ↓ {self.out_tokens} tokens"
120
+ else:
121
+ base += f" · ↑ {self.prompt_tokens} tokens"
122
+ if suffix:
123
+ base += f" · {suffix}"
124
+ return base + ")"
125
+
126
+ def update_tokens(self, out_tokens: int, suffix: Optional[str] = None) -> None:
127
+ self.out_tokens = max(0, out_tokens)
128
+ self.update(self._format_text(suffix))
ripperdoc/core/agents.py CHANGED
@@ -50,9 +50,19 @@ class AgentLoadResult:
50
50
  failed_files: List[Tuple[Path, str]]
51
51
 
52
52
 
53
- GENERAL_AGENT_PROMPT = """You are a general-purpose subagent for Ripperdoc. Work autonomously on the task provided by the parent agent. Use the allowed tools to research, edit files, and run commands as needed. When you finish, provide a concise report describing what you changed, what you investigated, and any follow-ups the parent agent should share with the user."""
53
+ GENERAL_AGENT_PROMPT = (
54
+ "You are a general-purpose subagent for Ripperdoc. Work autonomously on the task "
55
+ "provided by the parent agent. Use the allowed tools to research, edit files, and "
56
+ "run commands as needed. When you finish, provide a concise report describing what "
57
+ "you changed, what you investigated, and any follow-ups the parent agent should "
58
+ "share with the user."
59
+ )
54
60
 
55
- CODE_REVIEW_AGENT_PROMPT = """You are a code review subagent. Inspect the code and summarize risks, bugs, missing tests, security concerns, and regressions. Do not make code changes. Provide clear, actionable feedback that the parent agent can relay to the user."""
61
+ CODE_REVIEW_AGENT_PROMPT = (
62
+ "You are a code review subagent. Inspect the code and summarize risks, bugs, "
63
+ "missing tests, security concerns, and regressions. Do not make code changes. "
64
+ "Provide clear, actionable feedback that the parent agent can relay to the user."
65
+ )
56
66
 
57
67
 
58
68
  def _built_in_agents() -> List[AgentDefinition]:
@@ -109,7 +119,7 @@ def _split_frontmatter(raw_text: str) -> Tuple[Dict[str, Any], str]:
109
119
  try:
110
120
  frontmatter = yaml.safe_load(frontmatter_text) or {}
111
121
  except Exception as exc: # pragma: no cover - defensive
112
- logger.error(f"Invalid frontmatter in agent file: {exc}")
122
+ logger.exception("Invalid frontmatter in agent file", extra={"error": str(exc)})
113
123
  return {"__error__": f"Invalid frontmatter: {exc}"}, body
114
124
  return frontmatter, body
115
125
  return {}, raw_text
@@ -136,7 +146,7 @@ def _parse_agent_file(
136
146
  try:
137
147
  text = path.read_text(encoding="utf-8")
138
148
  except Exception as exc:
139
- logger.error(f"Failed to read agent file {path}: {exc}")
149
+ logger.exception("Failed to read agent file", extra={"error": str(exc), "path": str(path)})
140
150
  return None, f"Failed to read agent file {path}: {exc}"
141
151
 
142
152
  frontmatter, body = _split_frontmatter(text)