klaude-code 2.9.1__py3-none-any.whl → 2.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. klaude_code/app/runtime.py +5 -1
  2. klaude_code/cli/cost_cmd.py +4 -4
  3. klaude_code/cli/list_model.py +1 -2
  4. klaude_code/cli/main.py +10 -0
  5. klaude_code/config/assets/builtin_config.yaml +15 -14
  6. klaude_code/const.py +4 -3
  7. klaude_code/core/agent_profile.py +23 -0
  8. klaude_code/core/bash_mode.py +276 -0
  9. klaude_code/core/executor.py +40 -7
  10. klaude_code/core/manager/llm_clients.py +1 -0
  11. klaude_code/core/manager/llm_clients_builder.py +2 -2
  12. klaude_code/core/memory.py +140 -0
  13. klaude_code/core/prompts/prompt-sub-agent-web.md +2 -2
  14. klaude_code/core/reminders.py +17 -89
  15. klaude_code/core/tool/offload.py +4 -4
  16. klaude_code/core/tool/web/web_fetch_tool.md +2 -1
  17. klaude_code/core/tool/web/web_fetch_tool.py +1 -1
  18. klaude_code/core/turn.py +9 -4
  19. klaude_code/protocol/events.py +17 -0
  20. klaude_code/protocol/op.py +12 -0
  21. klaude_code/protocol/op_handler.py +5 -0
  22. klaude_code/session/templates/mermaid_viewer.html +85 -0
  23. klaude_code/tui/command/resume_cmd.py +1 -1
  24. klaude_code/tui/commands.py +15 -0
  25. klaude_code/tui/components/command_output.py +4 -5
  26. klaude_code/tui/components/developer.py +1 -3
  27. klaude_code/tui/components/metadata.py +28 -25
  28. klaude_code/tui/components/rich/code_panel.py +31 -16
  29. klaude_code/tui/components/rich/markdown.py +56 -124
  30. klaude_code/tui/components/rich/theme.py +22 -12
  31. klaude_code/tui/components/thinking.py +0 -35
  32. klaude_code/tui/components/tools.py +4 -2
  33. klaude_code/tui/components/user_input.py +49 -59
  34. klaude_code/tui/components/welcome.py +47 -2
  35. klaude_code/tui/display.py +14 -6
  36. klaude_code/tui/input/completers.py +8 -0
  37. klaude_code/tui/input/key_bindings.py +37 -1
  38. klaude_code/tui/input/prompt_toolkit.py +57 -31
  39. klaude_code/tui/machine.py +108 -28
  40. klaude_code/tui/renderer.py +117 -19
  41. klaude_code/tui/runner.py +22 -0
  42. klaude_code/tui/terminal/notifier.py +11 -12
  43. klaude_code/tui/terminal/selector.py +1 -1
  44. klaude_code/ui/terminal/title.py +4 -2
  45. {klaude_code-2.9.1.dist-info → klaude_code-2.10.1.dist-info}/METADATA +1 -1
  46. {klaude_code-2.9.1.dist-info → klaude_code-2.10.1.dist-info}/RECORD +48 -47
  47. klaude_code/tui/components/assistant.py +0 -2
  48. {klaude_code-2.9.1.dist-info → klaude_code-2.10.1.dist-info}/WHEEL +0 -0
  49. {klaude_code-2.9.1.dist-info → klaude_code-2.10.1.dist-info}/entry_points.txt +0 -0
@@ -9,20 +9,23 @@ from klaude_code.const import (
9
9
  STATUS_COMPACTING_TEXT,
10
10
  STATUS_COMPOSING_TEXT,
11
11
  STATUS_DEFAULT_TEXT,
12
+ STATUS_RUNNING_TEXT,
12
13
  STATUS_SHOW_BUFFER_LENGTH,
13
14
  STATUS_THINKING_TEXT,
14
15
  )
15
16
  from klaude_code.protocol import events, model, tools
16
17
  from klaude_code.tui.commands import (
17
18
  AppendAssistant,
19
+ AppendBashCommandOutput,
18
20
  AppendThinking,
19
21
  EmitOsc94Error,
20
22
  EmitTmuxSignal,
21
23
  EndAssistantStream,
22
24
  EndThinkingStream,
23
25
  PrintBlankLine,
24
- PrintRuleLine,
25
26
  RenderAssistantImage,
27
+ RenderBashCommandEnd,
28
+ RenderBashCommandStart,
26
29
  RenderCommand,
27
30
  RenderCommandOutput,
28
31
  RenderCompactionSummary,
@@ -47,7 +50,6 @@ from klaude_code.tui.commands import (
47
50
  )
48
51
  from klaude_code.tui.components.rich import status as r_status
49
52
  from klaude_code.tui.components.rich.theme import ThemeKey
50
- from klaude_code.tui.components.thinking import extract_last_bold_header, normalize_thinking_content
51
53
  from klaude_code.tui.components.tools import get_task_active_form, get_tool_active_form, is_sub_agent_tool
52
54
 
53
55
  # Tools that complete quickly and don't benefit from streaming activity display.
@@ -245,7 +247,7 @@ class SpinnerStatusState:
245
247
 
246
248
  if base_status:
247
249
  # Default "Thinking ..." uses normal style; custom headers use bold italic
248
- is_default_reasoning = base_status == STATUS_THINKING_TEXT
250
+ is_default_reasoning = base_status in {STATUS_THINKING_TEXT, STATUS_RUNNING_TEXT}
249
251
  status_style = ThemeKey.STATUS_TEXT if is_default_reasoning else ThemeKey.STATUS_TEXT_BOLD_ITALIC
250
252
  if activity_text:
251
253
  result = Text()
@@ -290,7 +292,6 @@ class _SessionState:
290
292
  assistant_stream_active: bool = False
291
293
  thinking_stream_active: bool = False
292
294
  assistant_char_count: int = 0
293
- thinking_tail: str = ""
294
295
  task_active: bool = False
295
296
 
296
297
  @property
@@ -301,15 +302,6 @@ class _SessionState:
301
302
  def should_show_sub_agent_thinking_header(self) -> bool:
302
303
  return bool(self.sub_agent_state and self.sub_agent_state.sub_agent_type == tools.IMAGE_GEN)
303
304
 
304
- @property
305
- def should_extract_reasoning_header(self) -> bool:
306
- """Gemini and GPT-5 models use markdown bold headers in thinking."""
307
- return False # Temporarily disabled for all models
308
- if self.model_id is None:
309
- return False
310
- model_lower = self.model_id.lower()
311
- return "gemini" in model_lower or "gpt-5" in model_lower
312
-
313
305
  def should_skip_tool_activity(self, tool_name: str) -> bool:
314
306
  """Check if tool activity should be skipped for non-streaming models."""
315
307
  if self.model_id is None:
@@ -332,6 +324,11 @@ class DisplayStateMachine:
332
324
  self._primary_session_id: str | None = None
333
325
  self._spinner = SpinnerStatusState()
334
326
 
327
+ def _reset_sessions(self) -> None:
328
+ self._sessions = {}
329
+ self._primary_session_id = None
330
+ self._spinner.reset()
331
+
335
332
  def _session(self, session_id: str) -> _SessionState:
336
333
  existing = self._sessions.get(session_id)
337
334
  if existing is not None:
@@ -364,7 +361,9 @@ class DisplayStateMachine:
364
361
  return self._spinner_update_commands()
365
362
 
366
363
  def begin_replay(self) -> list[RenderCommand]:
367
- self._spinner.reset()
364
+ # Replay is a full rebuild of the terminal view; clear session state so primary-session
365
+ # routing is recalculated from the replayed TaskStartEvent.
366
+ self._reset_sessions()
368
367
  return [SpinnerStop(), PrintBlankLine()]
369
368
 
370
369
  def end_replay(self) -> list[RenderCommand]:
@@ -383,6 +382,13 @@ class DisplayStateMachine:
383
382
 
384
383
  match event:
385
384
  case events.WelcomeEvent() as e:
385
+ # WelcomeEvent marks (or reaffirms) the current interactive session.
386
+ # If the session id changes (e.g., /clear creates a new session), clear
387
+ # routing state so subsequent streamed events are not dropped.
388
+ if self._primary_session_id is not None and self._primary_session_id != e.session_id:
389
+ self._reset_sessions()
390
+ s = self._session(e.session_id)
391
+ self._primary_session_id = e.session_id
386
392
  cmds.append(RenderWelcome(e))
387
393
  return cmds
388
394
 
@@ -392,12 +398,48 @@ class DisplayStateMachine:
392
398
  cmds.append(RenderUserMessage(e))
393
399
  return cmds
394
400
 
401
+ case events.BashCommandStartEvent() as e:
402
+ if s.is_sub_agent:
403
+ return []
404
+ if not is_replay:
405
+ self._spinner.set_reasoning_status(STATUS_RUNNING_TEXT)
406
+ cmds.append(TaskClockStart())
407
+ cmds.append(SpinnerStart())
408
+ cmds.extend(self._spinner_update_commands())
409
+
410
+ cmds.append(RenderBashCommandStart(e))
411
+ return cmds
412
+
413
+ case events.BashCommandOutputDeltaEvent() as e:
414
+ if s.is_sub_agent:
415
+ return []
416
+ cmds.append(AppendBashCommandOutput(e))
417
+ return cmds
418
+
419
+ case events.BashCommandEndEvent() as e:
420
+ if s.is_sub_agent:
421
+ return []
422
+ cmds.append(RenderBashCommandEnd(e))
423
+
424
+ if not is_replay:
425
+ self._spinner.set_reasoning_status(None)
426
+ cmds.append(TaskClockClear())
427
+ cmds.append(SpinnerStop())
428
+ cmds.extend(self._spinner_update_commands())
429
+
430
+ return cmds
431
+
395
432
  case events.TaskStartEvent() as e:
396
433
  s.sub_agent_state = e.sub_agent_state
397
434
  s.model_id = e.model_id
398
435
  s.task_active = True
399
436
  if not s.is_sub_agent:
400
- self._set_primary_if_needed(e.session_id)
437
+ # Keep primary session tracking in sync even if the session id changes
438
+ # during the process lifetime (e.g., /clear).
439
+ if is_replay:
440
+ self._set_primary_if_needed(e.session_id)
441
+ else:
442
+ self._primary_session_id = e.session_id
401
443
  if not is_replay:
402
444
  cmds.append(TaskClockStart())
403
445
 
@@ -453,9 +495,8 @@ class DisplayStateMachine:
453
495
  if not self._is_primary(e.session_id):
454
496
  return []
455
497
  s.thinking_stream_active = True
456
- s.thinking_tail = ""
457
498
  # Ensure the status reflects that reasoning has started even
458
- # before we receive any deltas (or a bold header).
499
+ # before we receive any deltas.
459
500
  if not is_replay:
460
501
  self._spinner.set_reasoning_status(STATUS_THINKING_TEXT)
461
502
  cmds.append(StartThinkingStream(session_id=e.session_id))
@@ -473,16 +514,6 @@ class DisplayStateMachine:
473
514
  if not self._is_primary(e.session_id):
474
515
  return []
475
516
  cmds.append(AppendThinking(session_id=e.session_id, content=e.content))
476
-
477
- # Update reasoning status for spinner (based on bounded tail).
478
- # Only extract headers for models that use markdown bold headers in thinking.
479
- if not is_replay and s.should_extract_reasoning_header:
480
- s.thinking_tail = (s.thinking_tail + e.content)[-8192:]
481
- header = extract_last_bold_header(normalize_thinking_content(s.thinking_tail))
482
- if header:
483
- self._spinner.set_reasoning_status(header)
484
- cmds.extend(self._spinner_update_commands())
485
-
486
517
  return cmds
487
518
 
488
519
  case events.ThinkingEndEvent() as e:
@@ -563,6 +594,31 @@ class DisplayStateMachine:
563
594
  return []
564
595
  if not self._is_primary(e.session_id):
565
596
  return []
597
+
598
+ # Some providers/models may not emit fine-grained AssistantText* deltas.
599
+ # In that case, ResponseCompleteEvent.content is the only assistant text we get.
600
+ # Render it as a single assistant stream to avoid dropping the entire message.
601
+ content = e.content
602
+ if content.strip():
603
+ # If we saw no streamed assistant text for this response, render from the final snapshot.
604
+ if s.assistant_char_count == 0:
605
+ if not s.assistant_stream_active:
606
+ s.assistant_stream_active = True
607
+ cmds.append(StartAssistantStream(session_id=e.session_id))
608
+ cmds.append(AppendAssistant(session_id=e.session_id, content=content))
609
+ s.assistant_char_count += len(content)
610
+
611
+ # Ensure any active assistant stream is finalized.
612
+ if s.assistant_stream_active:
613
+ s.assistant_stream_active = False
614
+ cmds.append(EndAssistantStream(session_id=e.session_id))
615
+ else:
616
+ # If there is an active stream but the final snapshot has no text,
617
+ # still finalize to flush any pending markdown rendering.
618
+ if s.assistant_stream_active:
619
+ s.assistant_stream_active = False
620
+ cmds.append(EndAssistantStream(session_id=e.session_id))
621
+
566
622
  if not is_replay:
567
623
  self._spinner.set_composing(False)
568
624
  cmds.append(SpinnerStart())
@@ -632,6 +688,8 @@ class DisplayStateMachine:
632
688
  cmds.append(EndThinkingStream(e.session_id))
633
689
  cmds.append(EndAssistantStream(e.session_id))
634
690
  cmds.append(RenderTaskMetadata(e))
691
+ if is_replay:
692
+ cmds.append(PrintBlankLine())
635
693
  return cmds
636
694
 
637
695
  case events.TodoChangeEvent() as e:
@@ -660,11 +718,33 @@ class DisplayStateMachine:
660
718
  case events.TaskFinishEvent() as e:
661
719
  s.task_active = False
662
720
  cmds.append(RenderTaskFinish(e))
721
+
722
+ # Defensive: finalize any open streams so buffered markdown is flushed.
723
+ if s.thinking_stream_active:
724
+ s.thinking_stream_active = False
725
+ cmds.append(EndThinkingStream(session_id=e.session_id))
726
+ if s.assistant_stream_active:
727
+ s.assistant_stream_active = False
728
+ cmds.append(EndAssistantStream(session_id=e.session_id))
729
+
730
+ # Rare providers / edge cases may complete a turn without emitting any
731
+ # assistant deltas (or without the display consuming them). In that case,
732
+ # fall back to rendering the final task result to avoid a "blank" turn.
733
+ if (
734
+ not is_replay
735
+ and not s.is_sub_agent
736
+ and not e.has_structured_output
737
+ and s.assistant_char_count == 0
738
+ and e.task_result.strip()
739
+ ):
740
+ cmds.append(StartAssistantStream(session_id=e.session_id))
741
+ cmds.append(AppendAssistant(session_id=e.session_id, content=e.task_result))
742
+ cmds.append(EndAssistantStream(session_id=e.session_id))
743
+
663
744
  if not s.is_sub_agent and not is_replay:
664
745
  cmds.append(TaskClockClear())
665
746
  self._spinner.reset()
666
747
  cmds.append(SpinnerStop())
667
- cmds.append(PrintRuleLine())
668
748
  cmds.append(EmitTmuxSignal())
669
749
  return cmds
670
750
 
@@ -18,6 +18,7 @@ from rich.text import Text
18
18
 
19
19
  from klaude_code.const import (
20
20
  MARKDOWN_LEFT_MARGIN,
21
+ MARKDOWN_RIGHT_MARGIN,
21
22
  MARKDOWN_STREAM_LIVE_REPAINT_ENABLED,
22
23
  STATUS_DEFAULT_TEXT,
23
24
  STREAM_MAX_HEIGHT_SHRINK_RESET_LINES,
@@ -25,6 +26,7 @@ from klaude_code.const import (
25
26
  from klaude_code.protocol import events, model, tools
26
27
  from klaude_code.tui.commands import (
27
28
  AppendAssistant,
29
+ AppendBashCommandOutput,
28
30
  AppendThinking,
29
31
  EmitOsc94Error,
30
32
  EmitTmuxSignal,
@@ -33,6 +35,8 @@ from klaude_code.tui.commands import (
33
35
  PrintBlankLine,
34
36
  PrintRuleLine,
35
37
  RenderAssistantImage,
38
+ RenderBashCommandEnd,
39
+ RenderBashCommandStart,
36
40
  RenderCommand,
37
41
  RenderCommandOutput,
38
42
  RenderCompactionSummary,
@@ -56,7 +60,6 @@ from klaude_code.tui.commands import (
56
60
  TaskClockClear,
57
61
  TaskClockStart,
58
62
  )
59
- from klaude_code.tui.components import assistant as c_assistant
60
63
  from klaude_code.tui.components import command_output as c_command_output
61
64
  from klaude_code.tui.components import developer as c_developer
62
65
  from klaude_code.tui.components import errors as c_errors
@@ -165,11 +168,29 @@ class TUICommandRenderer:
165
168
  self._assistant_stream = _StreamState()
166
169
  self._thinking_stream = _StreamState()
167
170
 
171
+ # Replay mode reuses the same event/state machine but does not need streaming UI.
172
+ # When enabled, we avoid bottom Live rendering and defer markdown rendering until
173
+ # the corresponding stream End event.
174
+ self._replay_mode: bool = False
175
+
176
+ self._bash_stream_active: bool = False
177
+ self._bash_last_char_was_newline: bool = True
178
+
168
179
  self._sessions: dict[str, _SessionStatus] = {}
169
180
  self._current_sub_agent_color: Style | None = None
170
181
  self._sub_agent_color_index = 0
171
182
  self._sub_agent_thinking_buffers: dict[str, str] = {}
172
183
 
184
+ def set_replay_mode(self, enabled: bool) -> None:
185
+ """Enable or disable replay rendering mode.
186
+
187
+ Replay mode is optimized for speed and stability:
188
+ - Avoid Rich Live / bottom status rendering.
189
+ - Defer markdown stream rendering until End events.
190
+ """
191
+
192
+ self._replay_mode = enabled
193
+
173
194
  # ---------------------------------------------------------------------
174
195
  # Session helpers
175
196
  # ---------------------------------------------------------------------
@@ -304,7 +325,9 @@ class TUICommandRenderer:
304
325
 
305
326
  def _bottom_renderable(self) -> RenderableType:
306
327
  stream_part: RenderableType = Group()
307
- gap_part: RenderableType = Group()
328
+ # Keep a visible separation between the bottom status line (spinner)
329
+ # and the main terminal output.
330
+ gap_part: RenderableType = Text(" ") if (self._spinner_visible and self._bash_stream_active) else Group()
308
331
 
309
332
  if MARKDOWN_STREAM_LIVE_REPAINT_ENABLED:
310
333
  stream = self._stream_renderable
@@ -326,7 +349,11 @@ class TUICommandRenderer:
326
349
  if pad_lines:
327
350
  stream = Padding(stream, (0, 0, pad_lines, 0))
328
351
  stream_part = stream
329
- gap_part = Text("")
352
+ gap_part = (
353
+ Text(" ")
354
+ if (self._spinner_visible and (self._bash_stream_active or self._stream_renderable))
355
+ else Group()
356
+ )
330
357
 
331
358
  status_part: RenderableType = SingleLine(self._status_spinner) if self._spinner_visible else Group()
332
359
  return Group(stream_part, gap_part, status_part)
@@ -361,17 +388,19 @@ class TUICommandRenderer:
361
388
  mark=c_thinking.THINKING_MESSAGE_MARK,
362
389
  mark_style=ThemeKey.THINKING,
363
390
  left_margin=MARKDOWN_LEFT_MARGIN,
391
+ right_margin=MARKDOWN_RIGHT_MARGIN,
364
392
  markdown_class=ThinkingMarkdown,
365
393
  )
366
394
 
367
395
  def _new_assistant_mdstream(self) -> MarkdownStream:
396
+ live_sink = None if self._replay_mode else self.set_stream_renderable
368
397
  return MarkdownStream(
369
398
  mdargs={"code_theme": self.themes.code_theme},
370
399
  theme=self.themes.markdown_theme,
371
400
  console=self.console,
372
- live_sink=self.set_stream_renderable,
373
- mark=c_assistant.ASSISTANT_MESSAGE_MARK,
401
+ live_sink=live_sink,
374
402
  left_margin=MARKDOWN_LEFT_MARGIN,
403
+ right_margin=MARKDOWN_RIGHT_MARGIN,
375
404
  image_callback=self.display_image,
376
405
  )
377
406
 
@@ -460,6 +489,66 @@ class TUICommandRenderer:
460
489
  self.print(c_command_output.render_command_output(e))
461
490
  self.print()
462
491
 
492
+ def display_bash_command_start(self, e: events.BashCommandStartEvent) -> None:
493
+ # The user input line already shows `!cmd`; bash output is streamed as it arrives.
494
+ # We keep minimal rendering here to avoid adding noise.
495
+ self._bash_stream_active = True
496
+ self._bash_last_char_was_newline = True
497
+ if self._spinner_visible:
498
+ self._refresh_bottom_live()
499
+
500
+ def display_bash_command_delta(self, e: events.BashCommandOutputDeltaEvent) -> None:
501
+ if not self._bash_stream_active:
502
+ self._bash_stream_active = True
503
+ if self._spinner_visible:
504
+ self._refresh_bottom_live()
505
+
506
+ content = e.content
507
+ if content == "":
508
+ return
509
+
510
+ # Rich Live refreshes periodically (even when the renderable doesn't change).
511
+ # If we print bash output without a trailing newline while Live is active,
512
+ # the next refresh can overwrite the partial line.
513
+ #
514
+ # To keep streamed bash output stable, temporarily stop the bottom Live
515
+ # during the print, and only resume it once the output is back at a
516
+ # line boundary (i.e. chunk ends with "\n").
517
+ if self._bottom_live is not None:
518
+ with contextlib.suppress(Exception):
519
+ self._bottom_live.stop()
520
+ self._bottom_live = None
521
+
522
+ try:
523
+ # Do not use Renderer.print() here because it forces overflow="ellipsis",
524
+ # which would truncate long command output lines.
525
+ self.console.print(Text(content, style=ThemeKey.TOOL_RESULT), end="", overflow="ignore")
526
+ self._bash_last_char_was_newline = content.endswith("\n")
527
+ finally:
528
+ # Resume the bottom Live only when we're not in the middle of a line,
529
+ # otherwise periodic refresh can clobber the partial line.
530
+ if self._bash_last_char_was_newline and self._spinner_visible:
531
+ self._ensure_bottom_live_started()
532
+ self._refresh_bottom_live()
533
+
534
+ def display_bash_command_end(self, e: events.BashCommandEndEvent) -> None:
535
+ # Stop the bottom Live before finalizing bash output to prevent a refresh
536
+ # from interfering with the final line(s) written to stdout.
537
+ if self._bottom_live is not None:
538
+ with contextlib.suppress(Exception):
539
+ self._bottom_live.stop()
540
+ self._bottom_live = None
541
+
542
+ # Leave a blank line before the next prompt:
543
+ # - If the command output already ended with a newline, print one more "\n".
544
+ # - Otherwise, print "\n\n" to end the line and add one empty line.
545
+ if self._bash_stream_active:
546
+ sep = "\n" if self._bash_last_char_was_newline else "\n\n"
547
+ self.console.print(Text(sep), end="", overflow="ignore")
548
+
549
+ self._bash_stream_active = False
550
+ self._bash_last_char_was_newline = True
551
+
463
552
  def display_welcome(self, event: events.WelcomeEvent) -> None:
464
553
  self.print(c_welcome.render_welcome(event))
465
554
 
@@ -507,7 +596,6 @@ class TUICommandRenderer:
507
596
  if self.is_sub_agent_session(event.session_id):
508
597
  return
509
598
  self.print(c_metadata.render_task_metadata(event))
510
- self.print()
511
599
 
512
600
  def display_task_finish(self, event: events.TaskFinishEvent) -> None:
513
601
  if self.is_sub_agent_session(event.session_id):
@@ -633,6 +721,12 @@ class TUICommandRenderer:
633
721
  self.display_developer_message(event)
634
722
  case RenderCommandOutput(event=event):
635
723
  self.display_command_output(event)
724
+ case RenderBashCommandStart(event=event):
725
+ self.display_bash_command_start(event)
726
+ case AppendBashCommandOutput(event=event):
727
+ self.display_bash_command_delta(event)
728
+ case RenderBashCommandEnd(event=event):
729
+ self.display_bash_command_end(event)
636
730
  case RenderTurnStart(event=event):
637
731
  self.display_turn_start(event)
638
732
  case StartThinkingStream(session_id=session_id):
@@ -645,11 +739,12 @@ class TUICommandRenderer:
645
739
  if session_id in self._sub_agent_thinking_buffers:
646
740
  self._sub_agent_thinking_buffers[session_id] += content
647
741
  elif self._thinking_stream.is_active:
648
- first_delta = self._thinking_stream.buffer == ""
649
742
  self._thinking_stream.append(content)
650
- if first_delta:
651
- self._thinking_stream.render(transform=c_thinking.normalize_thinking_content)
652
- self._flush_thinking()
743
+ if not self._replay_mode:
744
+ first_delta = self._thinking_stream.buffer == ""
745
+ if first_delta:
746
+ self._thinking_stream.render(transform=c_thinking.normalize_thinking_content)
747
+ self._flush_thinking()
653
748
  case EndThinkingStream(session_id=session_id):
654
749
  if self.is_sub_agent_session(session_id):
655
750
  buf = self._sub_agent_thinking_buffers.pop(session_id, "")
@@ -657,22 +752,25 @@ class TUICommandRenderer:
657
752
  with self.session_print_context(session_id):
658
753
  self._render_sub_agent_thinking(buf)
659
754
  else:
755
+ had_content = bool(self._thinking_stream.buffer.strip())
660
756
  finalized = self._thinking_stream.finalize(transform=c_thinking.normalize_thinking_content)
661
- if finalized:
757
+ if finalized and had_content:
662
758
  self.print()
663
- case StartAssistantStream():
759
+ case StartAssistantStream(session_id=_):
664
760
  if not self._assistant_stream.is_active:
665
761
  self._assistant_stream.start(self._new_assistant_mdstream())
666
- case AppendAssistant(content=content):
762
+ case AppendAssistant(session_id=_, content=content):
667
763
  if self._assistant_stream.is_active:
668
- first_delta = self._assistant_stream.buffer == ""
669
764
  self._assistant_stream.append(content)
670
- if first_delta:
671
- self._assistant_stream.render()
672
- self._flush_assistant()
673
- case EndAssistantStream():
765
+ if not self._replay_mode:
766
+ first_delta = self._assistant_stream.buffer == ""
767
+ if first_delta:
768
+ self._assistant_stream.render()
769
+ self._flush_assistant()
770
+ case EndAssistantStream(session_id=_):
771
+ had_content = bool(self._assistant_stream.buffer.strip())
674
772
  finalized = self._assistant_stream.finalize()
675
- if finalized:
773
+ if finalized and had_content:
676
774
  self.print()
677
775
  case RenderThinkingHeader(session_id=session_id, header=header):
678
776
  with self.session_print_context(session_id):
klaude_code/tui/runner.py CHANGED
@@ -65,11 +65,27 @@ async def submit_user_input_payload(
65
65
 
66
66
  submission_id = uuid4().hex
67
67
 
68
+ # Normalize a leading full-width exclamation mark for consistent UI/history.
69
+ # (Bash mode is triggered only when the first character is `!`.)
70
+ text = user_input.text
71
+ if text.startswith("!"):
72
+ text = "!" + text[1:]
73
+ user_input = UserInputPayload(text=text, images=user_input.images)
74
+
68
75
  # Render the raw user input in the TUI even when it resolves to an event-only command.
69
76
  await executor.context.emit_event(
70
77
  events.UserMessageEvent(content=user_input.text, session_id=sid, images=user_input.images)
71
78
  )
72
79
 
80
+ # Bash mode: run a user-entered command without invoking the agent.
81
+ if user_input.text.startswith("!"):
82
+ command = user_input.text[1:].lstrip(" \t")
83
+ if command == "":
84
+ # Enter should be ignored in the input layer for this case; keep a guard here.
85
+ return None
86
+ bash_op = op.RunBashOperation(id=submission_id, session_id=sid, command=command)
87
+ return await executor.submit(bash_op)
88
+
73
89
  cmd_result = await dispatch_command(user_input, agent, submission_id=submission_id)
74
90
  operations: list[op.Operation] = list(cmd_result.operations or [])
75
91
 
@@ -304,6 +320,9 @@ async def run_interactive(init_config: AppInitConfig, session_id: str | None = N
304
320
  if is_interactive:
305
321
  with _double_ctrl_c_to_exit_while_running():
306
322
  await components.executor.wait_for(wait_id)
323
+ # Ensure all trailing events (e.g. final deltas / spinner stop) are rendered
324
+ # before handing control back to prompt_toolkit.
325
+ await components.event_queue.join()
307
326
  continue
308
327
 
309
328
  async def _on_esc_interrupt() -> None:
@@ -313,6 +332,9 @@ async def run_interactive(init_config: AppInitConfig, session_id: str | None = N
313
332
  try:
314
333
  with _double_ctrl_c_to_exit_while_running():
315
334
  await components.executor.wait_for(wait_id)
335
+ # Ensure all trailing events (e.g. final deltas / spinner stop) are rendered
336
+ # before handing control back to prompt_toolkit.
337
+ await components.event_queue.join()
316
338
  finally:
317
339
  stop_event.set()
318
340
  with contextlib.suppress(Exception):
@@ -64,9 +64,9 @@ class TerminalNotifier:
64
64
  return False
65
65
 
66
66
  output = resolve_stream(self.config.stream)
67
- if not self._supports_osc9(output):
67
+ if not self._supports_notification(output):
68
68
  log_debug(
69
- "Terminal notifier skipped: OSC 9 unsupported or not a TTY",
69
+ "Terminal notifier skipped: not a TTY",
70
70
  debug_type=DebugType.TERMINAL,
71
71
  )
72
72
  return False
@@ -74,27 +74,26 @@ class TerminalNotifier:
74
74
  payload = self._render_payload(notification)
75
75
  return self._emit(payload, output)
76
76
 
77
- def _render_payload(self, notification: Notification) -> str:
78
- title = _compact(notification.title)
79
- body = _compact(notification.body) if notification.body else None
80
- if body:
81
- return f"{title} - {body}"
82
- return title
77
+ def _render_payload(self, notification: Notification) -> tuple[str, str]:
78
+ """Return (title, body) for OSC 777 notification."""
79
+ body = _compact(notification.body) if notification.body else _compact(notification.title)
80
+ return ("klaude", body)
83
81
 
84
- def _emit(self, payload: str, output: TextIO) -> bool:
82
+ def _emit(self, payload: tuple[str, str], output: TextIO) -> bool:
85
83
  terminator = BEL if self.config.use_bel else ST
86
- seq = f"\033]9;{payload}{terminator}"
84
+ title, body = payload
85
+ seq = f"\033]777;notify;{title};{body}{terminator}"
87
86
  try:
88
87
  output.write(seq)
89
88
  output.flush()
90
- log_debug("Terminal notifier sent OSC 9 payload", debug_type=DebugType.TERMINAL)
89
+ log_debug("Terminal notifier sent OSC 777 payload", debug_type=DebugType.TERMINAL)
91
90
  return True
92
91
  except Exception as exc:
93
92
  log_debug(f"Terminal notifier send failed: {exc}", debug_type=DebugType.TERMINAL)
94
93
  return False
95
94
 
96
95
  @staticmethod
97
- def _supports_osc9(stream: TextIO) -> bool:
96
+ def _supports_notification(stream: TextIO) -> bool:
98
97
  if sys.platform == "win32":
99
98
  return False
100
99
  if not getattr(stream, "isatty", lambda: False)():
@@ -111,7 +111,7 @@ def build_model_select_items(models: list[Any]) -> list[SelectItem[str]]:
111
111
  meta_str = " · ".join(meta_parts) if meta_parts else ""
112
112
  title: list[tuple[str, str]] = [
113
113
  ("class:meta", f"{model_idx:>{num_width}}. "),
114
- ("class:msg bold", first_line_prefix),
114
+ ("class:msg", first_line_prefix),
115
115
  ("class:msg dim", " → "),
116
116
  # Keep provider/model_id styling attribute-based (dim/bold) so that
117
117
  # the selector's highlight color can still override uniformly.
@@ -26,6 +26,8 @@ def update_terminal_title(model_name: str | None = None) -> None:
26
26
  """Update terminal title with folder name and optional model name."""
27
27
  folder_name = os.path.basename(os.getcwd())
28
28
  if model_name:
29
- set_terminal_title(f"{folder_name}: klaude {model_name}")
29
+ # Strip provider suffix (e.g., opus@openrouter -> opus)
30
+ model_alias = model_name.split("@")[0]
31
+ set_terminal_title(f"klaude [{model_alias}] · {folder_name}")
30
32
  else:
31
- set_terminal_title(f"{folder_name}: klaude")
33
+ set_terminal_title(f"klaude · {folder_name}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: klaude-code
3
- Version: 2.9.1
3
+ Version: 2.10.1
4
4
  Summary: Minimal code agent CLI
5
5
  Requires-Dist: anthropic>=0.66.0
6
6
  Requires-Dist: chardet>=5.2.0