aipa-cli 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. aipa_cli-0.1.0/.gitignore +6 -0
  2. aipa_cli-0.1.0/CHANGELOG.md +38 -0
  3. aipa_cli-0.1.0/LICENSE +21 -0
  4. aipa_cli-0.1.0/PKG-INFO +69 -0
  5. aipa_cli-0.1.0/README.md +44 -0
  6. aipa_cli-0.1.0/pyproject.toml +60 -0
  7. aipa_cli-0.1.0/src/aipriceaction_terminal/__init__.py +3 -0
  8. aipa_cli-0.1.0/src/aipriceaction_terminal/__main__.py +3 -0
  9. aipa_cli-0.1.0/src/aipriceaction_terminal/actions.py +73 -0
  10. aipa_cli-0.1.0/src/aipriceaction_terminal/agents/__init__.py +20 -0
  11. aipa_cli-0.1.0/src/aipriceaction_terminal/agents/agent.py +175 -0
  12. aipa_cli-0.1.0/src/aipriceaction_terminal/agents/callbacks.py +202 -0
  13. aipa_cli-0.1.0/src/aipriceaction_terminal/agents/config.py +35 -0
  14. aipa_cli-0.1.0/src/aipriceaction_terminal/agents/personas.py +175 -0
  15. aipa_cli-0.1.0/src/aipriceaction_terminal/agents/tools.py +152 -0
  16. aipa_cli-0.1.0/src/aipriceaction_terminal/app.py +97 -0
  17. aipa_cli-0.1.0/src/aipriceaction_terminal/bindings.py +25 -0
  18. aipa_cli-0.1.0/src/aipriceaction_terminal/chat.py +345 -0
  19. aipa_cli-0.1.0/src/aipriceaction_terminal/cli.py +54 -0
  20. aipa_cli-0.1.0/src/aipriceaction_terminal/cli_commands.py +51 -0
  21. aipa_cli-0.1.0/src/aipriceaction_terminal/settings_tab.py +76 -0
  22. aipa_cli-0.1.0/src/aipriceaction_terminal/theme.py +29 -0
  23. aipa_cli-0.1.0/src/aipriceaction_terminal/ticker_data.py +33 -0
  24. aipa_cli-0.1.0/src/aipriceaction_terminal/user_settings.py +27 -0
  25. aipa_cli-0.1.0/src/aipriceaction_terminal/utils.py +29 -0
  26. aipa_cli-0.1.0/src/aipriceaction_terminal/widgets/__init__.py +6 -0
  27. aipa_cli-0.1.0/src/aipriceaction_terminal/widgets/chat_input.py +179 -0
  28. aipa_cli-0.1.0/src/aipriceaction_terminal/widgets/ticker_select.py +168 -0
  29. aipa_cli-0.1.0/src/aipriceaction_terminal/workflows.py +172 -0
  30. aipa_cli-0.1.0/tests/conftest.py +117 -0
  31. aipa_cli-0.1.0/tests/openrouter_responses.py +230 -0
  32. aipa_cli-0.1.0/tests/test_app.py +154 -0
  33. aipa_cli-0.1.0/tests/test_chat.py +280 -0
  34. aipa_cli-0.1.0/tests/test_integration.py +115 -0
  35. aipa_cli-0.1.0/tests/test_thinking.py +243 -0
  36. aipa_cli-0.1.0/tests/test_tool_call_streaming.py +390 -0
  37. aipa_cli-0.1.0/tests/test_utils.py +76 -0
  38. aipa_cli-0.1.0/tests/test_workflows.py +207 -0
@@ -0,0 +1,6 @@
1
+ __pycache__/
2
+ *.py[cod]
3
+ *.egg-info/
4
+ dist/
5
+ .venv/
6
+ .pytest_cache/
@@ -0,0 +1,38 @@
1
+ # Changelog
2
+
3
+ All notable changes to this project will be documented in this file.
4
+
5
+ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6
+ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
+
8
+ ## [0.1.0] - 2026-05-09
9
+
10
+ ### Added
11
+ - Add `aipriceaction-terminal` Textual TUI with chat, workflows, and ticker tabs
12
+ - Add TickerSelect autocomplete widget using textual-autocomplete
13
+ - Add `/exit` command, `/analyze` optional interval arg, and auto-focus chat input
14
+ - Add arrow up/down history navigation in chat input
15
+ - Add ChatInput widget with slash command autocomplete and history
16
+ - Add `/export` command to save AIContextBuilder output to markdown
17
+ - Add agents module for AI-powered chat with streaming and tab-switch fix
18
+ - Add thinking token detection with collapsible summary in chat
19
+ - Add OpenRouterChatOpenAI for reasoning token passthrough and stream thinking visibly
20
+ - Add collapsible thinking display with modal viewer in Chat tab
21
+ - Add CLI subcommands (`analyze`, `get-ohlcv-data`, `deep-research`) to `aipa` binary
22
+ - Add 42 pytest tests for `aipriceaction-terminal`
23
+ - Add integration tests with real LangChain message types
24
+ - Persist user settings to `~/.aipriceaction/settings.json`
25
+ - Show all thinking history with timestamps in ThinkingModal
26
+
27
+ ### Changed
28
+ - Reduce default ohlcv limit from 30 to 5 bars
29
+ - Replace mock-based tests with real LangChain message types and add integration tests
30
+ - Update data policy to fetch via tools
31
+ - Extract key bindings and action handlers from `app.py` into separate modules
32
+
33
+ ### Fixed
34
+ - Improve TickerSelect autocomplete dropdown size and ticker symbol ranking
35
+ - Select autocomplete on Enter instead of submitting when dropdown is open
36
+ - Compact ticker list output and buffer streaming tool calls
37
+ - Escape key closes thinking modal
38
+ - Fix ohlcv docstring default
aipa_cli-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Quan Hua
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,69 @@
1
+ Metadata-Version: 2.4
2
+ Name: aipa-cli
3
+ Version: 0.1.0
4
+ Summary: Terminal TUI for AI-powered ticker analysis
5
+ Project-URL: Homepage, https://github.com/quanhua92/aipriceaction
6
+ Project-URL: Repository, https://github.com/quanhua92/aipriceaction
7
+ Author-email: Quan Hua <quanhua92@gmail.com>
8
+ License-Expression: MIT
9
+ License-File: LICENSE
10
+ Classifier: Development Status :: 3 - Alpha
11
+ Classifier: Environment :: Console :: Curses
12
+ Classifier: Intended Audience :: Financial and Insurance Industry
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.13
16
+ Classifier: Topic :: Office/Business :: Financial
17
+ Requires-Python: >=3.13
18
+ Requires-Dist: aipriceaction
19
+ Requires-Dist: langchain-core
20
+ Requires-Dist: langchain-openai
21
+ Requires-Dist: langgraph
22
+ Requires-Dist: textual-autocomplete>=4.0.6
23
+ Requires-Dist: textual>=3.0.0
24
+ Description-Content-Type: text/markdown
25
+
26
+ # AIPA Terminal
27
+
28
+ **Live site:** [aipriceaction.com](https://aipriceaction.com) | **GitHub:** [aipriceaction](https://github.com/quanhua92/aipriceaction) | **Frontend:** [aipriceaction-web](https://github.com/quanhua92/aipriceaction-web) | **Docker image:** [`quanhua92/aipriceaction:latest`](https://hub.docker.com/r/quanhua92/aipriceaction) | **Python SDK:** [`aipriceaction` on PyPI](https://pypi.org/project/aipriceaction/) | **AIPA Terminal:** [`aipa-cli` on PyPI](https://pypi.org/project/aipa-cli/)
29
+
30
+ Textual-based terminal interface for AI-powered ticker analysis. Features streaming chat with thinking/reasoning display, autocomplete, slash commands, and workflow tabs.
31
+
32
+ ## Install
33
+
34
+ ```bash
35
+ # Run directly (no install)
36
+ uvx aipa-cli
37
+
38
+ # Or install as a standalone tool
39
+ uv tool install aipa-cli
40
+ ```
41
+
42
+ ## Requirements
43
+
44
+ - Python 3.13+
45
+ - An OpenAI-compatible API key (`OPENAI_API_KEY`)
46
+ - Optional: set `OPENAI_BASE_URL` for custom providers like OpenRouter
47
+
48
+ ## Usage
49
+
50
+ ```
51
+ aipa # Launch the TUI
52
+ aipa analyze # Run ticker analysis from CLI
53
+ aipa get-ohlcv-data # Fetch OHLCV data from CLI
54
+ aipa deep-research # Run deep research from CLI
55
+ ```
56
+
57
+ ### TUI
58
+
59
+ The interface has three tabs:
60
+
61
+ - **Chat** — AI-powered chat with streaming responses, thinking/reasoning display, slash commands (`/analyze`, `/export`, `/clear`, `/exit`), and arrow-key history navigation
62
+ - **Workflows** — Structured analysis forms for ticker analysis and deep research
63
+ - **Tickers** — Browse and search available tickers
64
+
65
+ Press `Ctrl+O` in the Chat tab to view thinking/reasoning history.
66
+
67
+ ## License
68
+
69
+ MIT
@@ -0,0 +1,44 @@
1
+ # AIPA Terminal
2
+
3
+ **Live site:** [aipriceaction.com](https://aipriceaction.com) | **GitHub:** [aipriceaction](https://github.com/quanhua92/aipriceaction) | **Frontend:** [aipriceaction-web](https://github.com/quanhua92/aipriceaction-web) | **Docker image:** [`quanhua92/aipriceaction:latest`](https://hub.docker.com/r/quanhua92/aipriceaction) | **Python SDK:** [`aipriceaction` on PyPI](https://pypi.org/project/aipriceaction/) | **AIPA Terminal:** [`aipa-cli` on PyPI](https://pypi.org/project/aipa-cli/)
4
+
5
+ Textual-based terminal interface for AI-powered ticker analysis. Features streaming chat with thinking/reasoning display, autocomplete, slash commands, and workflow tabs.
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ # Run directly (no install)
11
+ uvx aipa-cli
12
+
13
+ # Or install as a standalone tool
14
+ uv tool install aipa-cli
15
+ ```
16
+
17
+ ## Requirements
18
+
19
+ - Python 3.13+
20
+ - An OpenAI-compatible API key (`OPENAI_API_KEY`)
21
+ - Optional: set `OPENAI_BASE_URL` for custom providers like OpenRouter
22
+
23
+ ## Usage
24
+
25
+ ```
26
+ aipa # Launch the TUI
27
+ aipa analyze # Run ticker analysis from CLI
28
+ aipa get-ohlcv-data # Fetch OHLCV data from CLI
29
+ aipa deep-research # Run deep research from CLI
30
+ ```
31
+
32
+ ### TUI
33
+
34
+ The interface has three tabs:
35
+
36
+ - **Chat** — AI-powered chat with streaming responses, thinking/reasoning display, slash commands (`/analyze`, `/export`, `/clear`, `/exit`), and arrow-key history navigation
37
+ - **Workflows** — Structured analysis forms for ticker analysis and deep research
38
+ - **Tickers** — Browse and search available tickers
39
+
40
+ Press `Ctrl+O` in the Chat tab to view thinking/reasoning history.
41
+
42
+ ## License
43
+
44
+ MIT
@@ -0,0 +1,60 @@
1
+ [project]
2
+ name = "aipa-cli"
3
+ dynamic = ["version"]
4
+ description = "Terminal TUI for AI-powered ticker analysis"
5
+ authors = [
6
+ {name = "Quan Hua", email = "quanhua92@gmail.com"},
7
+ ]
8
+ license = "MIT"
9
+ readme = "README.md"
10
+ requires-python = ">=3.13"
11
+ dependencies = [
12
+ "aipriceaction",
13
+ "textual>=3.0.0",
14
+ "textual-autocomplete>=4.0.6",
15
+ "langchain-core",
16
+ "langchain-openai",
17
+ "langgraph",
18
+ ]
19
+ classifiers = [
20
+ "Development Status :: 3 - Alpha",
21
+ "Environment :: Console :: Curses",
22
+ "Intended Audience :: Financial and Insurance Industry",
23
+ "License :: OSI Approved :: MIT License",
24
+ "Programming Language :: Python :: 3",
25
+ "Programming Language :: Python :: 3.13",
26
+ "Topic :: Office/Business :: Financial",
27
+ ]
28
+
29
+ [project.urls]
30
+ Homepage = "https://github.com/quanhua92/aipriceaction"
31
+ Repository = "https://github.com/quanhua92/aipriceaction"
32
+
33
+ [project.scripts]
34
+ aipa = "aipriceaction_terminal.app:main"
35
+
36
+ [build-system]
37
+ requires = ["hatchling"]
38
+ build-backend = "hatchling.build"
39
+
40
+ [tool.hatch.version]
41
+ path = "src/aipriceaction_terminal/__init__.py"
42
+
43
+ [tool.hatch.build.targets.wheel]
44
+ packages = ["src/aipriceaction_terminal"]
45
+
46
+ [tool.hatch.build.targets.sdist]
47
+ include = ["src/aipriceaction_terminal", "tests", "pyproject.toml", "README.md", "LICENSE", "CHANGELOG.md"]
48
+
49
+ [dependency-groups]
50
+ dev = [
51
+ "pytest>=8.0",
52
+ "pytest-asyncio>=0.24",
53
+ ]
54
+
55
+ [tool.pytest.ini_options]
56
+ testpaths = ["tests"]
57
+ asyncio_mode = "auto"
58
+
59
+ [tool.uv.sources]
60
+ aipriceaction = { workspace = true }
@@ -0,0 +1,3 @@
1
+ """AIPriceAction Terminal - TUI chat interface for ticker analysis."""
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,3 @@
1
+ from .cli import run
2
+
3
+ run()
@@ -0,0 +1,73 @@
1
+ """Action handlers for AIPriceActionApp."""
2
+
3
+ import time
4
+
5
+ from textual.widgets import TabbedContent, Input, Select, Button, TextArea
6
+
7
+
8
+ class AppActions:
9
+ """Mixin providing all action_* methods for AIPriceActionApp."""
10
+
11
+ _quit_requested_at: float = 0.0
12
+
13
+ def action_switch_tab(self, tab_id: str) -> None:
14
+ tabs = self.query_one(TabbedContent)
15
+ tabs.active = tab_id
16
+
17
+ def action_focus_none(self) -> None:
18
+ """Blur any focused widget, or dismiss a modal if one is showing."""
19
+ from .chat import ThinkingModal
20
+ if isinstance(self.screen_stack[-1], ThinkingModal):
21
+ self.pop_screen()
22
+ return
23
+ self.set_focus(None)
24
+
25
+ def action_focus_first_input(self) -> None:
26
+ """Focus the first Input or Select in the active tab (respects nested tabs)."""
27
+ # Let Enter pass through to widgets that handle it themselves
28
+ if isinstance(self.focused, (Input, Select, Button, TextArea)):
29
+ return
30
+
31
+ tabs = self.query_one(TabbedContent)
32
+ active_pane = tabs.query(f"TabPane#{tabs.active}").first()
33
+ if active_pane is None:
34
+ return
35
+
36
+ # Find the innermost active TabbedContent within this pane
37
+ container = active_pane
38
+ try:
39
+ nested = active_pane.query_one("TabbedContent")
40
+ inner_pane = nested.query(f"TabPane#{nested.active}").first()
41
+ if inner_pane:
42
+ container = inner_pane
43
+ except Exception:
44
+ pass
45
+
46
+ try:
47
+ first_input = container.query(Input).first()
48
+ first_input.focus()
49
+ return
50
+ except Exception:
51
+ pass
52
+ try:
53
+ first_select = container.query(Select).first()
54
+ first_select.focus()
55
+ except Exception:
56
+ pass
57
+
58
+ def action_confirm_quit(self) -> None:
59
+ """Quit on second press within 2 seconds, otherwise show warning."""
60
+ now = time.monotonic()
61
+ if now - self._quit_requested_at < 2.0:
62
+ self.exit()
63
+ else:
64
+ self._quit_requested_at = now
65
+ self.notify("Press ctrl+q again to quit", severity="warning")
66
+
67
+ def action_show_help(self) -> None:
68
+ self.app.notify(
69
+ "1-6: Switch tabs | ctrl+q: Quit | "
70
+ "esc: Back | enter: Focus input | "
71
+ "Chat: /help for commands",
72
+ title="Keyboard Shortcuts",
73
+ )
@@ -0,0 +1,20 @@
1
+ """Agents module for AI-powered chat in the terminal TUI."""
2
+
3
+ from .agent import AgentSession
4
+ from .callbacks import StreamCallbackHandler, StreamEvent, StreamEventType
5
+ from .config import AgentConfig
6
+ from .personas import Persona, PersonaRegistry, get_default_persona
7
+ from .tools import ToolRegistry, get_default_tools
8
+
9
+ __all__ = [
10
+ "AgentSession",
11
+ "AgentConfig",
12
+ "Persona",
13
+ "PersonaRegistry",
14
+ "get_default_persona",
15
+ "ToolRegistry",
16
+ "get_default_tools",
17
+ "StreamCallbackHandler",
18
+ "StreamEvent",
19
+ "StreamEventType",
20
+ ]
@@ -0,0 +1,175 @@
1
+ """Agent session: wraps LangChain create_agent with streaming and retry."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ from collections.abc import AsyncIterator
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ from langchain.agents import create_agent
10
+ from langchain_core.messages import AIMessageChunk
11
+ from langchain_openai import ChatOpenAI
12
+ from langgraph.checkpoint.memory import MemorySaver
13
+
14
+ from .callbacks import StreamCallbackHandler, StreamEvent, StreamEventType
15
+ from .config import AgentConfig, TRANSIENT_ERROR_KEYWORDS
16
+
17
+ if TYPE_CHECKING:
18
+ from .personas import Persona
19
+ from .tools import ToolRegistry
20
+
21
+
22
+ class OpenRouterChatOpenAI(ChatOpenAI):
23
+ """ChatOpenAI subclass that preserves reasoning tokens from OpenRouter.
24
+
25
+ OpenRouter reasoning models (e.g. nvidia/nemotron-3-nano-omni-reasoning) return
26
+ a ``reasoning`` string field in the streaming delta. LangChain's default
27
+ ``_convert_delta_to_message_chunk`` ignores this field, so we override
28
+ ``_convert_chunk_to_generation_chunk`` to inject it into
29
+ ``AIMessageChunk.additional_kwargs["reasoning_content"]`` after the chunk is built.
30
+ """
31
+
32
+ def _convert_chunk_to_generation_chunk(
33
+ self,
34
+ chunk: dict[str, Any],
35
+ default_chunk_class: type,
36
+ base_generation_info: dict[str, Any] | None,
37
+ ) -> Any:
38
+ """Build the generation chunk, then inject reasoning into additional_kwargs."""
39
+ result = super()._convert_chunk_to_generation_chunk(
40
+ chunk, default_chunk_class, base_generation_info
41
+ )
42
+ if result is None:
43
+ return result
44
+
45
+ # Extract reasoning from the raw delta before LangChain discards it.
46
+ choices = chunk.get("choices", [])
47
+ if choices:
48
+ delta = choices[0].get("delta")
49
+ if isinstance(delta, dict):
50
+ reasoning = delta.get("reasoning")
51
+ if reasoning and isinstance(result.message, AIMessageChunk):
52
+ result.message.additional_kwargs["reasoning_content"] = reasoning
53
+
54
+ return result
55
+
56
+
57
+ class AgentSession:
58
+ """Manages a single agent session with memory, streaming, and retry."""
59
+
60
+ def __init__(
61
+ self,
62
+ config: AgentConfig,
63
+ persona: Persona | None = None,
64
+ tools: ToolRegistry | None = None,
65
+ ) -> None:
66
+ from .personas import get_default_persona
67
+ from .tools import get_default_tools
68
+
69
+ self.config = config
70
+ self.persona = persona or get_default_persona(config.lang)
71
+ self.tools = tools or get_default_tools(config.lang)
72
+ self._checkpointer = MemorySaver()
73
+ self._thread_id = "terminal-default"
74
+ self._agent = self._build_agent()
75
+
76
+ def _build_agent(self) -> object:
77
+ """Build the LangChain agent from current config/persona/tools."""
78
+ llm = OpenRouterChatOpenAI(
79
+ api_key=self.config.api_key,
80
+ base_url=self.config.base_url,
81
+ model=self.config.model,
82
+ extra_body={"reasoning": {"enabled": True}},
83
+ )
84
+ system_prompt = self.persona.build_system_prompt(self.config.lang)
85
+ lc_tools = self.tools.get_tools()
86
+
87
+ return create_agent(
88
+ llm,
89
+ lc_tools,
90
+ checkpointer=self._checkpointer,
91
+ system_prompt=system_prompt,
92
+ )
93
+
94
+ async def stream(
95
+ self,
96
+ message: str,
97
+ *,
98
+ callback: object | None = None,
99
+ ) -> AsyncIterator[StreamEvent]:
100
+ """Stream an agent response as StreamEvents.
101
+
102
+ Built-in retry with exponential backoff on transient errors.
103
+ """
104
+ handler = StreamCallbackHandler(
105
+ show_tool_calls=True,
106
+ show_tool_results=False,
107
+ )
108
+
109
+ last_error: Exception | None = None
110
+ for attempt in range(self.config.max_retries):
111
+ try:
112
+ input_dict = {"messages": [{"role": "user", "content": message}]}
113
+ config = {"configurable": {"thread_id": self._thread_id}}
114
+
115
+ async for lc_event in self._agent.astream(
116
+ input_dict,
117
+ config=config,
118
+ stream_mode="messages",
119
+ ):
120
+ for stream_event in handler.process_agent_event(lc_event):
121
+ if callback:
122
+ await callback(stream_event)
123
+ yield stream_event
124
+
125
+ yield StreamEvent(type=StreamEventType.DONE)
126
+ return
127
+
128
+ except Exception as e:
129
+ last_error = e
130
+ err_str = str(e).lower()
131
+ is_transient = any(kw in err_str for kw in TRANSIENT_ERROR_KEYWORDS)
132
+
133
+ if is_transient and attempt < self.config.max_retries - 1:
134
+ delay = self.config.base_retry_delay * (2 ** attempt)
135
+ delay = min(delay, self.config.max_retry_delay)
136
+ yield StreamEvent(
137
+ type=StreamEventType.ERROR,
138
+ content=f"Retry {attempt + 1}/{self.config.max_retries}: {type(e).__name__}",
139
+ )
140
+ await asyncio.sleep(delay)
141
+ else:
142
+ break
143
+
144
+ # All retries exhausted
145
+ yield StreamEvent(
146
+ type=StreamEventType.ERROR,
147
+ content=f"Error: {last_error}",
148
+ )
149
+ yield StreamEvent(type=StreamEventType.DONE)
150
+
151
+ async def run(self, message: str) -> str:
152
+ """Convenience wrapper: collect tokens and return the final answer."""
153
+ parts: list[str] = []
154
+ async for event in self.stream(message):
155
+ if event.type == StreamEventType.TOKEN:
156
+ parts.append(event.content)
157
+
158
+ # Return the longest part (final answer after potential re-generation)
159
+ return max(parts, key=len) if parts else ""
160
+
161
+ def switch_persona(self, persona: Persona) -> None:
162
+ """Switch to a different persona, clearing conversation history."""
163
+ self.persona = persona
164
+ self._checkpointer = MemorySaver()
165
+ self._agent = self._build_agent()
166
+
167
+ def clear_history(self) -> None:
168
+ """Clear conversation history (start fresh session)."""
169
+ self._checkpointer = MemorySaver()
170
+ self._agent = self._build_agent()
171
+
172
+ def rebuild(self) -> None:
173
+ """Rebuild agent after config change (language/model)."""
174
+ self._checkpointer = MemorySaver()
175
+ self._agent = self._build_agent()