ripperdoc 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. ripperdoc/__init__.py +1 -1
  2. ripperdoc/cli/cli.py +28 -115
  3. ripperdoc/cli/commands/__init__.py +0 -1
  4. ripperdoc/cli/commands/agents_cmd.py +6 -3
  5. ripperdoc/cli/commands/clear_cmd.py +1 -4
  6. ripperdoc/cli/commands/config_cmd.py +1 -1
  7. ripperdoc/cli/commands/context_cmd.py +3 -2
  8. ripperdoc/cli/commands/doctor_cmd.py +18 -4
  9. ripperdoc/cli/commands/hooks_cmd.py +27 -53
  10. ripperdoc/cli/commands/models_cmd.py +26 -9
  11. ripperdoc/cli/commands/permissions_cmd.py +27 -9
  12. ripperdoc/cli/commands/resume_cmd.py +5 -3
  13. ripperdoc/cli/commands/status_cmd.py +4 -4
  14. ripperdoc/cli/commands/tasks_cmd.py +8 -4
  15. ripperdoc/cli/ui/file_mention_completer.py +2 -1
  16. ripperdoc/cli/ui/interrupt_handler.py +2 -3
  17. ripperdoc/cli/ui/message_display.py +4 -2
  18. ripperdoc/cli/ui/provider_options.py +247 -0
  19. ripperdoc/cli/ui/rich_ui.py +110 -59
  20. ripperdoc/cli/ui/spinner.py +25 -1
  21. ripperdoc/cli/ui/tool_renderers.py +8 -2
  22. ripperdoc/cli/ui/wizard.py +215 -0
  23. ripperdoc/core/agents.py +9 -3
  24. ripperdoc/core/config.py +49 -12
  25. ripperdoc/core/custom_commands.py +7 -6
  26. ripperdoc/core/default_tools.py +11 -2
  27. ripperdoc/core/hooks/config.py +1 -3
  28. ripperdoc/core/hooks/events.py +23 -28
  29. ripperdoc/core/hooks/executor.py +4 -6
  30. ripperdoc/core/hooks/integration.py +12 -21
  31. ripperdoc/core/hooks/manager.py +40 -15
  32. ripperdoc/core/permissions.py +40 -8
  33. ripperdoc/core/providers/anthropic.py +109 -36
  34. ripperdoc/core/providers/gemini.py +70 -5
  35. ripperdoc/core/providers/openai.py +60 -5
  36. ripperdoc/core/query.py +82 -38
  37. ripperdoc/core/query_utils.py +2 -0
  38. ripperdoc/core/skills.py +9 -3
  39. ripperdoc/core/system_prompt.py +4 -2
  40. ripperdoc/core/tool.py +9 -5
  41. ripperdoc/sdk/client.py +2 -2
  42. ripperdoc/tools/ask_user_question_tool.py +5 -3
  43. ripperdoc/tools/background_shell.py +2 -1
  44. ripperdoc/tools/bash_output_tool.py +1 -1
  45. ripperdoc/tools/bash_tool.py +26 -16
  46. ripperdoc/tools/dynamic_mcp_tool.py +29 -8
  47. ripperdoc/tools/enter_plan_mode_tool.py +1 -1
  48. ripperdoc/tools/exit_plan_mode_tool.py +1 -1
  49. ripperdoc/tools/file_edit_tool.py +8 -4
  50. ripperdoc/tools/file_read_tool.py +8 -4
  51. ripperdoc/tools/file_write_tool.py +9 -5
  52. ripperdoc/tools/glob_tool.py +3 -2
  53. ripperdoc/tools/grep_tool.py +3 -2
  54. ripperdoc/tools/kill_bash_tool.py +1 -1
  55. ripperdoc/tools/ls_tool.py +1 -1
  56. ripperdoc/tools/mcp_tools.py +13 -10
  57. ripperdoc/tools/multi_edit_tool.py +8 -7
  58. ripperdoc/tools/notebook_edit_tool.py +7 -4
  59. ripperdoc/tools/skill_tool.py +1 -1
  60. ripperdoc/tools/task_tool.py +5 -4
  61. ripperdoc/tools/todo_tool.py +2 -2
  62. ripperdoc/tools/tool_search_tool.py +3 -2
  63. ripperdoc/utils/conversation_compaction.py +8 -4
  64. ripperdoc/utils/file_watch.py +8 -2
  65. ripperdoc/utils/json_utils.py +2 -1
  66. ripperdoc/utils/mcp.py +11 -3
  67. ripperdoc/utils/memory.py +4 -2
  68. ripperdoc/utils/message_compaction.py +21 -7
  69. ripperdoc/utils/message_formatting.py +11 -7
  70. ripperdoc/utils/messages.py +105 -66
  71. ripperdoc/utils/path_ignore.py +35 -8
  72. ripperdoc/utils/permissions/path_validation_utils.py +2 -1
  73. ripperdoc/utils/permissions/shell_command_validation.py +427 -91
  74. ripperdoc/utils/safe_get_cwd.py +2 -1
  75. ripperdoc/utils/session_history.py +13 -6
  76. ripperdoc/utils/todo.py +2 -1
  77. ripperdoc/utils/token_estimation.py +6 -1
  78. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.9.dist-info}/METADATA +1 -1
  79. ripperdoc-0.2.9.dist-info/RECORD +123 -0
  80. ripperdoc-0.2.8.dist-info/RECORD +0 -121
  81. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.9.dist-info}/WHEEL +0 -0
  82. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.9.dist-info}/entry_points.txt +0 -0
  83. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.9.dist-info}/licenses/LICENSE +0 -0
  84. {ripperdoc-0.2.8.dist-info → ripperdoc-0.2.9.dist-info}/top_level.txt +0 -0
@@ -96,15 +96,16 @@ class RichUI:
96
96
 
97
97
  def __init__(
98
98
  self,
99
- safe_mode: bool = False,
99
+ yolo_mode: bool = False,
100
100
  verbose: bool = False,
101
+ show_full_thinking: Optional[bool] = None,
101
102
  session_id: Optional[str] = None,
102
103
  log_file_path: Optional[Path] = None,
103
104
  ):
104
105
  self._loop = asyncio.new_event_loop()
105
106
  asyncio.set_event_loop(self._loop)
106
107
  self.console = console
107
- self.safe_mode = safe_mode
108
+ self.yolo_mode = yolo_mode
108
109
  self.verbose = verbose
109
110
  self.conversation_messages: List[ConversationMessage] = []
110
111
  self._saved_conversation: Optional[List[ConversationMessage]] = None
@@ -128,16 +129,17 @@ class RichUI:
128
129
  "session_id": self.session_id,
129
130
  "project_path": str(self.project_path),
130
131
  "log_file": str(self.log_file_path),
131
- "safe_mode": self.safe_mode,
132
+ "yolo_mode": self.yolo_mode,
132
133
  "verbose": self.verbose,
133
134
  },
134
135
  )
135
136
  self._session_history = SessionHistory(self.project_path, self.session_id)
136
137
  self._permission_checker = (
137
- make_permission_checker(self.project_path, safe_mode) if safe_mode else None
138
+ None if yolo_mode else make_permission_checker(self.project_path, yolo_mode=False)
138
139
  )
139
140
  # Build ignore filter for file completion
140
141
  from ripperdoc.utils.path_ignore import get_project_ignore_patterns
142
+
141
143
  project_patterns = get_project_ignore_patterns()
142
144
  self._ignore_filter = build_ignore_filter(
143
145
  self.project_path,
@@ -146,8 +148,17 @@ class RichUI:
146
148
  include_gitignore=True,
147
149
  )
148
150
 
151
+ # Get global config for display preferences
152
+ config = get_global_config()
153
+ if show_full_thinking is None:
154
+ self.show_full_thinking = config.show_full_thinking
155
+ else:
156
+ self.show_full_thinking = show_full_thinking
157
+
149
158
  # Initialize component handlers
150
- self._message_display = MessageDisplay(self.console, self.verbose)
159
+ self._message_display = MessageDisplay(
160
+ self.console, self.verbose, self.show_full_thinking
161
+ )
151
162
  self._interrupt_handler = InterruptHandler()
152
163
  self._interrupt_handler.set_abort_callback(self._trigger_abort)
153
164
 
@@ -157,7 +168,8 @@ class RichUI:
157
168
  except (OSError, RuntimeError, ConnectionError) as exc:
158
169
  logger.warning(
159
170
  "[ui] Failed to initialize MCP runtime at startup: %s: %s",
160
- type(exc).__name__, exc,
171
+ type(exc).__name__,
172
+ exc,
161
173
  extra={"session_id": self.session_id},
162
174
  )
163
175
 
@@ -215,7 +227,8 @@ class RichUI:
215
227
  # Logging failures should never interrupt the UI flow
216
228
  logger.warning(
217
229
  "[ui] Failed to append message to session history: %s: %s",
218
- type(exc).__name__, exc,
230
+ type(exc).__name__,
231
+ exc,
219
232
  extra={"session_id": self.session_id},
220
233
  )
221
234
 
@@ -229,7 +242,8 @@ class RichUI:
229
242
  except (AttributeError, TypeError, ValueError) as exc:
230
243
  logger.warning(
231
244
  "[ui] Failed to append prompt history: %s: %s",
232
- type(exc).__name__, exc,
245
+ type(exc).__name__,
246
+ exc,
233
247
  extra={"session_id": self.session_id},
234
248
  )
235
249
 
@@ -476,29 +490,36 @@ class RichUI:
476
490
  self,
477
491
  message: AssistantMessage,
478
492
  tool_registry: Dict[str, Dict[str, Any]],
493
+ spinner: Optional[ThinkingSpinner] = None,
479
494
  ) -> Optional[str]:
480
495
  """Handle an assistant message from the query stream.
481
496
 
482
497
  Returns:
483
498
  The last tool name if a tool_use block was processed, None otherwise.
484
499
  """
500
+ # Factory to create pause context - spinner.paused() if spinner exists, else no-op
501
+ from contextlib import nullcontext
502
+
503
+ pause = lambda: spinner.paused() if spinner else nullcontext() # noqa: E731
504
+
485
505
  meta = getattr(getattr(message, "message", None), "metadata", {}) or {}
486
506
  reasoning_payload = (
487
- meta.get("reasoning_content")
488
- or meta.get("reasoning")
489
- or meta.get("reasoning_details")
507
+ meta.get("reasoning_content") or meta.get("reasoning") or meta.get("reasoning_details")
490
508
  )
491
509
  if reasoning_payload:
492
- self._print_reasoning(reasoning_payload)
510
+ with pause():
511
+ self._print_reasoning(reasoning_payload)
493
512
 
494
513
  last_tool_name: Optional[str] = None
495
514
 
496
515
  if isinstance(message.message.content, str):
497
- self.display_message("Ripperdoc", message.message.content)
516
+ with pause():
517
+ self.display_message("Ripperdoc", message.message.content)
498
518
  elif isinstance(message.message.content, list):
499
519
  for block in message.message.content:
500
520
  if hasattr(block, "type") and block.type == "text" and block.text:
501
- self.display_message("Ripperdoc", block.text)
521
+ with pause():
522
+ self.display_message("Ripperdoc", block.text)
502
523
  elif hasattr(block, "type") and block.type == "tool_use":
503
524
  tool_name = getattr(block, "name", "unknown tool")
504
525
  tool_args = getattr(block, "input", {})
@@ -512,9 +533,10 @@ class RichUI:
512
533
  }
513
534
 
514
535
  if tool_name == "Task":
515
- self.display_message(
516
- tool_name, "", is_tool=True, tool_type="call", tool_args=tool_args
517
- )
536
+ with pause():
537
+ self.display_message(
538
+ tool_name, "", is_tool=True, tool_type="call", tool_args=tool_args
539
+ )
518
540
  if tool_use_id:
519
541
  tool_registry[tool_use_id]["printed"] = True
520
542
 
@@ -527,11 +549,17 @@ class RichUI:
527
549
  message: UserMessage,
528
550
  tool_registry: Dict[str, Dict[str, Any]],
529
551
  last_tool_name: Optional[str],
552
+ spinner: Optional[ThinkingSpinner] = None,
530
553
  ) -> None:
531
554
  """Handle a user message containing tool results."""
532
555
  if not isinstance(message.message.content, list):
533
556
  return
534
557
 
558
+ # Factory to create pause context - spinner.paused() if spinner exists, else no-op
559
+ from contextlib import nullcontext
560
+
561
+ pause = lambda: spinner.paused() if spinner else nullcontext() # noqa: E731
562
+
535
563
  for block in message.message.content:
536
564
  if not (hasattr(block, "type") and block.type == "tool_result" and block.text):
537
565
  continue
@@ -545,25 +573,27 @@ class RichUI:
545
573
  if entry:
546
574
  tool_name = entry.get("name", tool_name)
547
575
  if not entry.get("printed"):
548
- self.display_message(
549
- tool_name,
550
- "",
551
- is_tool=True,
552
- tool_type="call",
553
- tool_args=entry.get("args", {}),
554
- )
576
+ with pause():
577
+ self.display_message(
578
+ tool_name,
579
+ "",
580
+ is_tool=True,
581
+ tool_type="call",
582
+ tool_args=entry.get("args", {}),
583
+ )
555
584
  entry["printed"] = True
556
585
  elif last_tool_name:
557
586
  tool_name = last_tool_name
558
587
 
559
- self.display_message(
560
- tool_name,
561
- block.text,
562
- is_tool=True,
563
- tool_type="result",
564
- tool_data=tool_data,
565
- tool_error=is_error,
566
- )
588
+ with pause():
589
+ self.display_message(
590
+ tool_name,
591
+ block.text,
592
+ is_tool=True,
593
+ tool_type="result",
594
+ tool_data=tool_data,
595
+ tool_error=is_error,
596
+ )
567
597
 
568
598
  def _handle_progress_message(
569
599
  self,
@@ -577,14 +607,17 @@ class RichUI:
577
607
  Updated output token estimate.
578
608
  """
579
609
  if self.verbose:
580
- self.display_message("System", f"Progress: {message.content}", is_tool=True)
610
+ with spinner.paused():
611
+ self.display_message("System", f"Progress: {message.content}", is_tool=True)
581
612
  elif message.content and isinstance(message.content, str):
582
613
  if message.content.startswith("Subagent: "):
583
- self.display_message(
584
- "Subagent", message.content[len("Subagent: ") :], is_tool=True
585
- )
614
+ with spinner.paused():
615
+ self.display_message(
616
+ "Subagent", message.content[len("Subagent: ") :], is_tool=True
617
+ )
586
618
  elif message.content.startswith("Subagent"):
587
- self.display_message("Subagent", message.content, is_tool=True)
619
+ with spinner.paused():
620
+ self.display_message("Subagent", message.content, is_tool=True)
588
621
 
589
622
  if message.tool_use_id == "stream":
590
623
  delta_tokens = estimate_tokens(message.content)
@@ -600,7 +633,7 @@ class RichUI:
600
633
  # Initialize or reset query context
601
634
  if not self.query_context:
602
635
  self.query_context = QueryContext(
603
- tools=self.get_default_tools(), safe_mode=self.safe_mode, verbose=self.verbose
636
+ tools=self.get_default_tools(), yolo_mode=self.yolo_mode, verbose=self.verbose
604
637
  )
605
638
  else:
606
639
  abort_controller = getattr(self.query_context, "abort_controller", None)
@@ -684,7 +717,8 @@ class RichUI:
684
717
  except (RuntimeError, ValueError, OSError) as exc:
685
718
  logger.debug(
686
719
  "[ui] Failed to restart spinner after permission check: %s: %s",
687
- type(exc).__name__, exc,
720
+ type(exc).__name__,
721
+ exc,
688
722
  )
689
723
 
690
724
  # Process query stream
@@ -702,12 +736,14 @@ class RichUI:
702
736
  permission_checker, # type: ignore[arg-type]
703
737
  ):
704
738
  if message.type == "assistant" and isinstance(message, AssistantMessage):
705
- result = self._handle_assistant_message(message, tool_registry)
739
+ result = self._handle_assistant_message(message, tool_registry, spinner)
706
740
  if result:
707
741
  last_tool_name = result
708
742
 
709
743
  elif message.type == "user" and isinstance(message, UserMessage):
710
- self._handle_tool_result_message(message, tool_registry, last_tool_name)
744
+ self._handle_tool_result_message(
745
+ message, tool_registry, last_tool_name, spinner
746
+ )
711
747
 
712
748
  elif message.type == "progress" and isinstance(message, ProgressMessage):
713
749
  output_token_est = self._handle_progress_message(
@@ -723,7 +759,8 @@ class RichUI:
723
759
  except (OSError, ConnectionError, RuntimeError, ValueError, KeyError, TypeError) as e:
724
760
  logger.warning(
725
761
  "[ui] Error while processing streamed query response: %s: %s",
726
- type(e).__name__, e,
762
+ type(e).__name__,
763
+ e,
727
764
  extra={"session_id": self.session_id},
728
765
  )
729
766
  self.display_message("System", f"Error: {str(e)}", is_tool=True)
@@ -733,7 +770,8 @@ class RichUI:
733
770
  except (RuntimeError, ValueError, OSError) as exc:
734
771
  logger.warning(
735
772
  "[ui] Failed to stop spinner: %s: %s",
736
- type(exc).__name__, exc,
773
+ type(exc).__name__,
774
+ exc,
737
775
  extra={"session_id": self.session_id},
738
776
  )
739
777
 
@@ -753,7 +791,8 @@ class RichUI:
753
791
  except (OSError, ConnectionError, RuntimeError, ValueError, KeyError, TypeError) as exc:
754
792
  logger.warning(
755
793
  "[ui] Error during query processing: %s: %s",
756
- type(exc).__name__, exc,
794
+ type(exc).__name__,
795
+ exc,
757
796
  extra={"session_id": self.session_id},
758
797
  )
759
798
  self.display_message("System", f"Error: {str(exc)}", is_tool=True)
@@ -823,14 +862,10 @@ class RichUI:
823
862
  custom_cmd = get_custom_command(command_name, self.project_path)
824
863
  if custom_cmd is not None:
825
864
  # Expand the custom command content
826
- expanded_content = expand_command_content(
827
- custom_cmd, trimmed_arg, self.project_path
828
- )
865
+ expanded_content = expand_command_content(custom_cmd, trimmed_arg, self.project_path)
829
866
 
830
867
  # Show a hint that this is from a custom command
831
- self.console.print(
832
- f"[dim]Running custom command: /{command_name}[/dim]"
833
- )
868
+ self.console.print(f"[dim]Running custom command: /{command_name}[/dim]")
834
869
  if custom_cmd.argument_hint and trimmed_arg:
835
870
  self.console.print(f"[dim]Arguments: {trimmed_arg}[/dim]")
836
871
 
@@ -921,7 +956,9 @@ class RichUI:
921
956
  # Display status
922
957
  console.print(create_status_bar())
923
958
  console.print()
924
- console.print("[dim]Tip: type '/' then press Tab to see available commands. Type '@' to mention files. Press ESC to interrupt a running query.[/dim]\n")
959
+ console.print(
960
+ "[dim]Tip: type '/' then press Tab to see available commands. Type '@' to mention files. Press ESC to interrupt a running query.[/dim]\n"
961
+ )
925
962
 
926
963
  session = self.get_prompt_session()
927
964
  logger.info(
@@ -972,7 +1009,9 @@ class RichUI:
972
1009
  interrupted = self._run_async_with_esc_interrupt(self.process_query(user_input))
973
1010
 
974
1011
  if interrupted:
975
- console.print("\n[red]■ Conversation interrupted[/red] · [dim]Tell the model what to do differently.[/dim]")
1012
+ console.print(
1013
+ "\n[red]■ Conversation interrupted[/red] · [dim]Tell the model what to do differently.[/dim]"
1014
+ )
976
1015
  logger.info(
977
1016
  "[ui] Query interrupted by ESC key",
978
1017
  extra={"session_id": self.session_id},
@@ -991,11 +1030,19 @@ class RichUI:
991
1030
  except EOFError:
992
1031
  console.print("\n[yellow]Goodbye![/yellow]")
993
1032
  break
994
- except (OSError, ConnectionError, RuntimeError, ValueError, KeyError, TypeError) as e:
1033
+ except (
1034
+ OSError,
1035
+ ConnectionError,
1036
+ RuntimeError,
1037
+ ValueError,
1038
+ KeyError,
1039
+ TypeError,
1040
+ ) as e:
995
1041
  console.print(f"[red]Error: {escape(str(e))}[/]")
996
1042
  logger.warning(
997
1043
  "[ui] Error in interactive loop: %s: %s",
998
- type(e).__name__, e,
1044
+ type(e).__name__,
1045
+ e,
999
1046
  extra={"session_id": self.session_id},
1000
1047
  )
1001
1048
  if self.verbose:
@@ -1029,7 +1076,8 @@ class RichUI:
1029
1076
  # pragma: no cover - defensive shutdown
1030
1077
  logger.warning(
1031
1078
  "[ui] Failed to shut down MCP runtime cleanly: %s: %s",
1032
- type(exc).__name__, exc,
1079
+ type(exc).__name__,
1080
+ exc,
1033
1081
  extra={"session_id": self.session_id},
1034
1082
  )
1035
1083
  finally:
@@ -1082,6 +1130,7 @@ class RichUI:
1082
1130
  )
1083
1131
  except Exception as exc:
1084
1132
  import traceback
1133
+
1085
1134
  self.console.print(f"[red]Error during compaction: {escape(str(exc))}[/red]")
1086
1135
  self.console.print(f"[dim red]{traceback.format_exc()}[/dim red]")
1087
1136
  return
@@ -1110,15 +1159,16 @@ def check_onboarding_rich() -> bool:
1110
1159
  if config.has_completed_onboarding:
1111
1160
  return True
1112
1161
 
1113
- # Use simple console onboarding
1114
- from ripperdoc.cli.cli import check_onboarding
1162
+ # Use the wizard onboarding
1163
+ from ripperdoc.cli.ui.wizard import check_onboarding
1115
1164
 
1116
1165
  return check_onboarding()
1117
1166
 
1118
1167
 
1119
1168
  def main_rich(
1120
- safe_mode: bool = False,
1169
+ yolo_mode: bool = False,
1121
1170
  verbose: bool = False,
1171
+ show_full_thinking: Optional[bool] = None,
1122
1172
  session_id: Optional[str] = None,
1123
1173
  log_file_path: Optional[Path] = None,
1124
1174
  ) -> None:
@@ -1130,8 +1180,9 @@ def main_rich(
1130
1180
 
1131
1181
  # Run the Rich UI
1132
1182
  ui = RichUI(
1133
- safe_mode=safe_mode,
1183
+ yolo_mode=yolo_mode,
1134
1184
  verbose=verbose,
1185
+ show_full_thinking=show_full_thinking,
1135
1186
  session_id=session_id,
1136
1187
  log_file_path=log_file_path,
1137
1188
  )
@@ -1,4 +1,6 @@
1
- from typing import Any, Literal, Optional
1
+ from contextlib import contextmanager
2
+ from typing import Any, Generator, Literal, Optional
3
+
2
4
  from rich.console import Console
3
5
  from rich.markup import escape
4
6
  from rich.status import Status
@@ -47,3 +49,25 @@ class Spinner:
47
49
  self.stop()
48
50
  # Do not suppress exceptions
49
51
  return False
52
+
53
+ @property
54
+ def is_running(self) -> bool:
55
+ """Check if spinner is currently running."""
56
+ return self._status is not None
57
+
58
+ @contextmanager
59
+ def paused(self) -> Generator[None, None, None]:
60
+ """Context manager to temporarily pause the spinner for clean output.
61
+
62
+ Usage:
63
+ with spinner.paused():
64
+ console.print("Some output")
65
+ """
66
+ was_running = self.is_running
67
+ if was_running:
68
+ self.stop()
69
+ try:
70
+ yield
71
+ finally:
72
+ if was_running:
73
+ self.start()
@@ -155,7 +155,10 @@ class BashResultRenderer(ToolResultRenderer):
155
155
  """Render Bash tool results."""
156
156
 
157
157
  def __init__(
158
- self, console: Console, verbose: bool = False, parse_fallback: Optional[BashOutputParser] = None
158
+ self,
159
+ console: Console,
160
+ verbose: bool = False,
161
+ parse_fallback: Optional[BashOutputParser] = None,
159
162
  ):
160
163
  super().__init__(console, verbose)
161
164
  self._parse_fallback = parse_fallback
@@ -254,7 +257,10 @@ class ToolResultRendererRegistry:
254
257
  """Registry that selects the appropriate renderer for a tool result."""
255
258
 
256
259
  def __init__(
257
- self, console: Console, verbose: bool = False, parse_bash_fallback: Optional[BashOutputParser] = None
260
+ self,
261
+ console: Console,
262
+ verbose: bool = False,
263
+ parse_bash_fallback: Optional[BashOutputParser] = None,
258
264
  ):
259
265
  self.console = console
260
266
  self.verbose = verbose
@@ -0,0 +1,215 @@
1
+ """
2
+ Interactive onboarding wizard for Ripperdoc.
3
+ """
4
+
5
+ from typing import List, Optional, Tuple
6
+
7
+ import click
8
+ from rich.console import Console
9
+
10
+ from ripperdoc.cli.ui.provider_options import (
11
+ KNOWN_PROVIDERS,
12
+ ProviderOption,
13
+ default_model_for_protocol,
14
+ )
15
+ from ripperdoc.core.config import (
16
+ GlobalConfig,
17
+ ModelProfile,
18
+ ProviderType,
19
+ get_global_config,
20
+ save_global_config,
21
+ )
22
+ from ripperdoc.utils.prompt import prompt_secret
23
+
24
+
25
+ console = Console()
26
+
27
+
28
+ def resolve_provider_choice(raw_choice: str, provider_keys: List[str]) -> Optional[str]:
29
+ """Normalize user input into a provider key."""
30
+ normalized = raw_choice.strip().lower()
31
+ if normalized in provider_keys:
32
+ return normalized
33
+ try:
34
+ idx = int(normalized)
35
+ if 1 <= idx <= len(provider_keys):
36
+ return provider_keys[idx - 1]
37
+ except ValueError:
38
+ return None
39
+ return None
40
+
41
+
42
+ def check_onboarding() -> bool:
43
+ """Check if onboarding is complete and run if needed."""
44
+ config = get_global_config()
45
+
46
+ if config.has_completed_onboarding:
47
+ return True
48
+
49
+ console.print("[bold cyan]Welcome to Ripperdoc![/bold cyan]\n")
50
+ console.print("Let's set up your AI model configuration.\n")
51
+
52
+ return run_onboarding_wizard(config)
53
+
54
+
55
+ def run_onboarding_wizard(config: GlobalConfig) -> bool:
56
+ """Run interactive onboarding wizard."""
57
+ provider_keys = KNOWN_PROVIDERS.keys() + ["custom"]
58
+ default_choice_key = KNOWN_PROVIDERS.default_choice.key
59
+
60
+ # Display provider options vertically
61
+ console.print("[bold]Available providers:[/bold]")
62
+ for i, provider_key in enumerate(provider_keys, 1):
63
+ marker = "[cyan]→[/cyan]" if provider_key == default_choice_key else " "
64
+ console.print(f" {marker} {i}. {provider_key}")
65
+ console.print("")
66
+
67
+ # Prompt for provider choice with validation
68
+ provider_choice: Optional[str] = None
69
+ while provider_choice is None:
70
+ raw_choice = click.prompt(
71
+ "Choose your model provider",
72
+ default=default_choice_key,
73
+ )
74
+ provider_choice = resolve_provider_choice(raw_choice, provider_keys)
75
+ if provider_choice is None:
76
+ console.print(
77
+ f"[red]Invalid choice. Please enter a provider name or number (1-{len(provider_keys)}).[/red]"
78
+ )
79
+
80
+ api_base_override: Optional[str] = None
81
+ if provider_choice == "custom":
82
+ protocol_input = click.prompt(
83
+ "Protocol family (for API compatibility)",
84
+ type=click.Choice([p.value for p in ProviderType]),
85
+ default=ProviderType.OPENAI_COMPATIBLE.value,
86
+ )
87
+ protocol = ProviderType(protocol_input)
88
+ api_base_override = click.prompt("API Base URL")
89
+ provider_option = ProviderOption(
90
+ key="custom",
91
+ protocol=protocol,
92
+ default_model=default_model_for_protocol(protocol),
93
+ model_suggestions=(),
94
+ )
95
+ else:
96
+ provider_option = KNOWN_PROVIDERS.get(provider_choice)
97
+ if provider_option is None:
98
+ provider_option = ProviderOption(
99
+ key=provider_choice,
100
+ protocol=ProviderType.OPENAI_COMPATIBLE,
101
+ default_model=default_model_for_protocol(ProviderType.OPENAI_COMPATIBLE),
102
+ model_suggestions=(),
103
+ )
104
+
105
+ api_key = ""
106
+ while not api_key:
107
+ api_key = prompt_secret("Enter your API key").strip()
108
+ if not api_key:
109
+ console.print("[red]API key is required.[/red]")
110
+
111
+ # Get model name with provider-specific suggestions
112
+ model, api_base = get_model_name_with_suggestions(provider_option, api_base_override)
113
+
114
+ # Get context window
115
+ context_window = get_context_window()
116
+
117
+ # Create model profile
118
+ config.model_profiles["default"] = ModelProfile(
119
+ provider=provider_option.protocol,
120
+ model=model,
121
+ api_key=api_key,
122
+ api_base=api_base,
123
+ context_window=context_window,
124
+ )
125
+
126
+ config.has_completed_onboarding = True
127
+ config.last_onboarding_version = get_version()
128
+
129
+ save_global_config(config)
130
+
131
+ console.print("\n[green]✓ Configuration saved![/green]\n")
132
+ return True
133
+
134
+
135
+ def get_model_name_with_suggestions(
136
+ provider: ProviderOption,
137
+ api_base_override: Optional[str],
138
+ ) -> Tuple[str, Optional[str]]:
139
+ """Get model name with provider-specific suggestions and default API base.
140
+
141
+ Returns:
142
+ Tuple of (model_name, api_base)
143
+ """
144
+ # Set default API base based on provider choice
145
+ api_base = api_base_override
146
+ if api_base is None and provider.default_api_base:
147
+ api_base = provider.default_api_base
148
+ console.print(f"[dim]Using default API base: {api_base}[/dim]")
149
+
150
+ default_model = provider.default_model or default_model_for_protocol(provider.protocol)
151
+ suggestions = list(provider.model_suggestions)
152
+
153
+ # Show suggestions if available
154
+ if suggestions:
155
+ console.print("\n[dim]Available models for this provider:[/dim]")
156
+ for i, model_name in enumerate(suggestions[:5]): # Show top 5
157
+ console.print(f" [dim]{i+1}. {model_name}[/dim]")
158
+ console.print("")
159
+
160
+ # Prompt for model name
161
+ if provider.protocol == ProviderType.ANTHROPIC:
162
+ model = click.prompt("Model name", default=default_model)
163
+ elif provider.protocol == ProviderType.OPENAI_COMPATIBLE:
164
+ model = click.prompt("Model name", default=default_model)
165
+ # Prompt for API base if still not set
166
+ if api_base is None:
167
+ api_base_input = click.prompt(
168
+ "API base URL (optional)", default="", show_default=False
169
+ )
170
+ api_base = api_base_input or None
171
+ elif provider.protocol == ProviderType.GEMINI:
172
+ model = click.prompt("Model name", default=default_model)
173
+ if api_base is None:
174
+ api_base_input = click.prompt(
175
+ "API base URL (optional)", default="", show_default=False
176
+ )
177
+ api_base = api_base_input or None
178
+ else:
179
+ model = click.prompt("Model name", default=default_model)
180
+
181
+ return model, api_base
182
+
183
+
184
+ def get_context_window() -> Optional[int]:
185
+ """Get context window size from user."""
186
+ context_window_input = click.prompt(
187
+ "Context window in tokens (optional, press Enter to skip)",
188
+ default="",
189
+ show_default=False,
190
+ )
191
+ context_window = None
192
+ if context_window_input.strip():
193
+ try:
194
+ context_window = int(context_window_input.strip())
195
+ except ValueError:
196
+ console.print(
197
+ "[yellow]Invalid context window, using auto-detected defaults.[/yellow]"
198
+ )
199
+ return context_window
200
+
201
+
202
+ def get_version() -> str:
203
+ """Get current version of Ripperdoc."""
204
+ try:
205
+ from ripperdoc import __version__
206
+ return __version__
207
+ except ImportError:
208
+ return "unknown"
209
+
210
+
211
+ if __name__ == "__main__":
212
+ # For testing
213
+ config = get_global_config()
214
+ config.has_completed_onboarding = False
215
+ run_onboarding_wizard(config)