appkit-assistant 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- appkit_assistant/backend/models.py +93 -2
- appkit_assistant/backend/processors/openai_responses_processor.py +16 -11
- appkit_assistant/backend/repositories.py +228 -1
- appkit_assistant/backend/system_prompt_cache.py +161 -0
- appkit_assistant/components/__init__.py +2 -4
- appkit_assistant/components/mcp_server_dialogs.py +7 -2
- appkit_assistant/components/message.py +3 -3
- appkit_assistant/components/system_prompt_editor.py +78 -0
- appkit_assistant/components/thread.py +8 -16
- appkit_assistant/components/threadlist.py +42 -29
- appkit_assistant/components/tools_modal.py +1 -1
- appkit_assistant/configuration.py +1 -0
- appkit_assistant/state/system_prompt_state.py +179 -0
- appkit_assistant/state/thread_list_state.py +271 -0
- appkit_assistant/state/thread_state.py +525 -608
- {appkit_assistant-0.8.0.dist-info → appkit_assistant-0.10.0.dist-info}/METADATA +2 -2
- appkit_assistant-0.10.0.dist-info/RECORD +31 -0
- appkit_assistant-0.8.0.dist-info/RECORD +0 -27
- {appkit_assistant-0.8.0.dist-info → appkit_assistant-0.10.0.dist-info}/WHEEL +0 -0
|
@@ -1,11 +1,19 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
"""Thread state management for the assistant.
|
|
2
|
+
|
|
3
|
+
This module contains ThreadState which manages the current active thread:
|
|
4
|
+
- Creating new threads (not persisted until first response)
|
|
5
|
+
- Loading threads from database when selected from list
|
|
6
|
+
- Processing messages and handling responses
|
|
7
|
+
- Persisting thread data to database
|
|
8
|
+
- Notifying ThreadListState when a new thread is created
|
|
9
|
+
|
|
10
|
+
See thread_list_state.py for ThreadListState which manages the thread list sidebar.
|
|
11
|
+
"""
|
|
12
|
+
|
|
3
13
|
import logging
|
|
4
|
-
import re
|
|
5
14
|
import uuid
|
|
6
15
|
from collections.abc import AsyncGenerator
|
|
7
16
|
from enum import StrEnum
|
|
8
|
-
from re import Match
|
|
9
17
|
from typing import Any
|
|
10
18
|
|
|
11
19
|
import reflex as rx
|
|
@@ -23,75 +31,12 @@ from appkit_assistant.backend.models import (
|
|
|
23
31
|
ThreadModel,
|
|
24
32
|
ThreadStatus,
|
|
25
33
|
)
|
|
26
|
-
from appkit_assistant.backend.repositories import MCPServerRepository
|
|
34
|
+
from appkit_assistant.backend.repositories import MCPServerRepository, ThreadRepository
|
|
35
|
+
from appkit_assistant.state.thread_list_state import ThreadListState
|
|
36
|
+
from appkit_user.authentication.states import UserSession
|
|
27
37
|
|
|
28
38
|
logger = logging.getLogger(__name__)
|
|
29
39
|
|
|
30
|
-
MERMAID_BLOCK_PATTERN = re.compile(
|
|
31
|
-
r"```mermaid\s*\r?\n(.*?)```", re.IGNORECASE | re.DOTALL
|
|
32
|
-
)
|
|
33
|
-
BRACKET_PAIRS: dict[str, str] = {
|
|
34
|
-
"[": "]",
|
|
35
|
-
"(": ")",
|
|
36
|
-
"{": "}",
|
|
37
|
-
"<": ">",
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def _escape_mermaid_label_newlines(block: str) -> str:
|
|
42
|
-
"""Convert literal newlines inside node labels to escaped sequences.
|
|
43
|
-
|
|
44
|
-
Ensures Mermaid labels that previously used ``\n`` survive JSON roundtrips
|
|
45
|
-
where sequences were converted into raw newlines.
|
|
46
|
-
"""
|
|
47
|
-
|
|
48
|
-
if "\n" not in block:
|
|
49
|
-
return block
|
|
50
|
-
|
|
51
|
-
result: list[str] = []
|
|
52
|
-
stack: list[str] = []
|
|
53
|
-
for char in block:
|
|
54
|
-
if stack:
|
|
55
|
-
if char == "\r":
|
|
56
|
-
continue
|
|
57
|
-
if char == "\n":
|
|
58
|
-
result.append("\\n")
|
|
59
|
-
continue
|
|
60
|
-
if char == stack[-1]:
|
|
61
|
-
stack.pop()
|
|
62
|
-
result.append(char)
|
|
63
|
-
continue
|
|
64
|
-
if char in BRACKET_PAIRS:
|
|
65
|
-
stack.append(BRACKET_PAIRS[char])
|
|
66
|
-
result.append(char)
|
|
67
|
-
continue
|
|
68
|
-
result.append(char)
|
|
69
|
-
continue
|
|
70
|
-
|
|
71
|
-
if char in BRACKET_PAIRS:
|
|
72
|
-
stack.append(BRACKET_PAIRS[char])
|
|
73
|
-
result.append(char)
|
|
74
|
-
|
|
75
|
-
return "".join(result)
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def _rehydrate_mermaid_text(text: str) -> str:
|
|
79
|
-
"""Restore Mermaid code blocks by escaping label newlines when needed."""
|
|
80
|
-
|
|
81
|
-
if "```mermaid" not in text.lower():
|
|
82
|
-
return text
|
|
83
|
-
|
|
84
|
-
def _replace(match: Match[str]) -> str:
|
|
85
|
-
code_block = match.group(1)
|
|
86
|
-
repaired = _escape_mermaid_label_newlines(code_block)
|
|
87
|
-
return f"```mermaid\n{repaired}```"
|
|
88
|
-
|
|
89
|
-
try:
|
|
90
|
-
return MERMAID_BLOCK_PATTERN.sub(_replace, text)
|
|
91
|
-
except Exception as exc: # pragma: no cover - defensive
|
|
92
|
-
logger.debug("Failed to rehydrate mermaid text: %s", exc)
|
|
93
|
-
return text
|
|
94
|
-
|
|
95
40
|
|
|
96
41
|
class ThinkingType(StrEnum):
|
|
97
42
|
REASONING = "reasoning"
|
|
@@ -116,22 +61,33 @@ class Thinking(BaseModel):
|
|
|
116
61
|
|
|
117
62
|
|
|
118
63
|
class ThreadState(rx.State):
|
|
64
|
+
"""State for managing the current active thread.
|
|
65
|
+
|
|
66
|
+
Responsibilities:
|
|
67
|
+
- Managing the current thread data and messages
|
|
68
|
+
- Creating new empty threads
|
|
69
|
+
- Loading threads from database when selected
|
|
70
|
+
- Processing messages and streaming responses
|
|
71
|
+
- Persisting thread data to database (incrementally)
|
|
72
|
+
- Notifying ThreadListState when new threads are created
|
|
73
|
+
"""
|
|
74
|
+
|
|
119
75
|
_thread: ThreadModel = ThreadModel(thread_id=str(uuid.uuid4()), prompt="")
|
|
120
76
|
ai_models: list[AIModel] = []
|
|
121
77
|
selected_model: str = ""
|
|
122
78
|
processing: bool = False
|
|
123
79
|
messages: list[Message] = []
|
|
124
80
|
prompt: str = ""
|
|
125
|
-
suggestions: list[Suggestion] = [
|
|
81
|
+
suggestions: list[Suggestion] = []
|
|
126
82
|
|
|
127
83
|
# Chunk processing state
|
|
128
|
-
current_chunks: list[Chunk] = []
|
|
129
84
|
thinking_items: list[Thinking] = [] # Consolidated reasoning and tool calls
|
|
130
85
|
image_chunks: list[Chunk] = []
|
|
131
86
|
show_thinking: bool = False
|
|
132
87
|
thinking_expanded: bool = False
|
|
133
88
|
current_activity: str = ""
|
|
134
89
|
current_reasoning_session: str = "" # Track current reasoning session
|
|
90
|
+
current_tool_session: str = "" # Track current tool session when tool_id missing
|
|
135
91
|
|
|
136
92
|
# MCP Server tool support state
|
|
137
93
|
selected_mcp_servers: list[MCPServer] = []
|
|
@@ -143,34 +99,18 @@ class ThreadState(rx.State):
|
|
|
143
99
|
# Thread list integration
|
|
144
100
|
with_thread_list: bool = False
|
|
145
101
|
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
self.ai_models = model_manager.get_all_models()
|
|
150
|
-
self.selected_model = model_manager.get_default_model()
|
|
102
|
+
# Internal state
|
|
103
|
+
_initialized: bool = False
|
|
104
|
+
_current_user_id: str = ""
|
|
151
105
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
prompt="",
|
|
156
|
-
messages=[],
|
|
157
|
-
state=ThreadStatus.NEW,
|
|
158
|
-
ai_model=self.selected_model,
|
|
159
|
-
active=True,
|
|
160
|
-
)
|
|
161
|
-
self.messages = []
|
|
162
|
-
logger.debug("Initialized thread state: %s", self._thread)
|
|
106
|
+
# -------------------------------------------------------------------------
|
|
107
|
+
# Computed properties
|
|
108
|
+
# -------------------------------------------------------------------------
|
|
163
109
|
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
self.
|
|
168
|
-
self.selected_model = thread.ai_model
|
|
169
|
-
logger.debug("Set current thread: %s", thread.thread_id)
|
|
170
|
-
|
|
171
|
-
def set_prompt(self, prompt: str) -> None:
|
|
172
|
-
"""Set the current prompt."""
|
|
173
|
-
self.prompt = prompt
|
|
110
|
+
@rx.var
|
|
111
|
+
def get_selected_model(self) -> str:
|
|
112
|
+
"""Get the currently selected model ID."""
|
|
113
|
+
return self.selected_model
|
|
174
114
|
|
|
175
115
|
@rx.var
|
|
176
116
|
def has_ai_models(self) -> bool:
|
|
@@ -180,15 +120,15 @@ class ThreadState(rx.State):
|
|
|
180
120
|
@rx.var
|
|
181
121
|
def has_suggestions(self) -> bool:
|
|
182
122
|
"""Check if there are any suggestions."""
|
|
183
|
-
return
|
|
123
|
+
return len(self.suggestions) > 0
|
|
184
124
|
|
|
185
125
|
@rx.var
|
|
186
|
-
def
|
|
187
|
-
"""
|
|
188
|
-
return self.
|
|
126
|
+
def has_thinking_content(self) -> bool:
|
|
127
|
+
"""Check if there are any thinking items to display."""
|
|
128
|
+
return len(self.thinking_items) > 0
|
|
189
129
|
|
|
190
130
|
@rx.var
|
|
191
|
-
def
|
|
131
|
+
def selected_model_supports_tools(self) -> bool:
|
|
192
132
|
"""Check if the currently selected model supports tools."""
|
|
193
133
|
if not self.selected_model:
|
|
194
134
|
return False
|
|
@@ -196,7 +136,7 @@ class ThreadState(rx.State):
|
|
|
196
136
|
return model.supports_tools if model else False
|
|
197
137
|
|
|
198
138
|
@rx.var
|
|
199
|
-
def
|
|
139
|
+
def get_unique_reasoning_sessions(self) -> list[str]:
|
|
200
140
|
"""Get unique reasoning session IDs."""
|
|
201
141
|
return [
|
|
202
142
|
item.id
|
|
@@ -205,7 +145,7 @@ class ThreadState(rx.State):
|
|
|
205
145
|
]
|
|
206
146
|
|
|
207
147
|
@rx.var
|
|
208
|
-
def
|
|
148
|
+
def get_unique_tool_calls(self) -> list[str]:
|
|
209
149
|
"""Get unique tool call IDs."""
|
|
210
150
|
return [
|
|
211
151
|
item.id
|
|
@@ -214,21 +154,174 @@ class ThreadState(rx.State):
|
|
|
214
154
|
]
|
|
215
155
|
|
|
216
156
|
@rx.var
|
|
217
|
-
def
|
|
157
|
+
def get_last_assistant_message_text(self) -> str:
|
|
218
158
|
"""Get the text of the last assistant message in the conversation."""
|
|
219
|
-
for
|
|
220
|
-
if
|
|
221
|
-
return
|
|
159
|
+
for message in reversed(self.messages):
|
|
160
|
+
if message.type == MessageType.ASSISTANT:
|
|
161
|
+
return message.text
|
|
222
162
|
return ""
|
|
223
163
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
164
|
+
# -------------------------------------------------------------------------
|
|
165
|
+
# Initialization and thread management
|
|
166
|
+
# -------------------------------------------------------------------------
|
|
167
|
+
|
|
168
|
+
@rx.event
|
|
169
|
+
def initialize(self) -> None:
|
|
170
|
+
"""Initialize the state with models and a new empty thread.
|
|
171
|
+
|
|
172
|
+
Only initializes once per user session. Resets when user changes.
|
|
173
|
+
"""
|
|
174
|
+
# If already initialized, skip
|
|
175
|
+
if self._initialized:
|
|
176
|
+
logger.debug("Thread state already initialized")
|
|
177
|
+
return
|
|
178
|
+
|
|
179
|
+
model_manager = ModelManager()
|
|
180
|
+
self.ai_models = model_manager.get_all_models()
|
|
181
|
+
self.selected_model = model_manager.get_default_model()
|
|
182
|
+
|
|
183
|
+
self._thread = ThreadModel(
|
|
184
|
+
thread_id=str(uuid.uuid4()),
|
|
185
|
+
title="Neuer Chat",
|
|
186
|
+
prompt="",
|
|
187
|
+
messages=[],
|
|
188
|
+
state=ThreadStatus.NEW,
|
|
189
|
+
ai_model=self.selected_model,
|
|
190
|
+
active=True,
|
|
191
|
+
)
|
|
192
|
+
self.messages = []
|
|
193
|
+
self.thinking_items = []
|
|
194
|
+
self.image_chunks = []
|
|
195
|
+
self.prompt = ""
|
|
196
|
+
self.show_thinking = False
|
|
197
|
+
self._initialized = True
|
|
198
|
+
logger.debug("Initialized thread state: %s", self._thread.thread_id)
|
|
199
|
+
|
|
200
|
+
@rx.event
|
|
201
|
+
def new_thread(self) -> None:
|
|
202
|
+
"""Create a new empty thread (not persisted, not in list yet).
|
|
203
|
+
|
|
204
|
+
Called when user clicks "New Chat" or when active thread is deleted.
|
|
205
|
+
If current thread is already empty/new with no messages, does nothing.
|
|
206
|
+
"""
|
|
207
|
+
# Ensure state is initialized first
|
|
208
|
+
if not self._initialized:
|
|
209
|
+
self.initialize()
|
|
210
|
+
|
|
211
|
+
# Don't create new if current thread is already empty
|
|
212
|
+
if self._thread.state == ThreadStatus.NEW and not self.messages:
|
|
213
|
+
logger.debug("Thread already empty, skipping new_thread")
|
|
214
|
+
return
|
|
215
|
+
|
|
216
|
+
self._thread = ThreadModel(
|
|
217
|
+
thread_id=str(uuid.uuid4()),
|
|
218
|
+
title="Neuer Chat",
|
|
219
|
+
prompt="",
|
|
220
|
+
messages=[],
|
|
221
|
+
state=ThreadStatus.NEW,
|
|
222
|
+
ai_model=self.selected_model or ModelManager().get_default_model(),
|
|
223
|
+
active=True,
|
|
224
|
+
)
|
|
225
|
+
self.messages = []
|
|
226
|
+
self.thinking_items = []
|
|
227
|
+
self.image_chunks = []
|
|
228
|
+
self.prompt = ""
|
|
229
|
+
self.show_thinking = False
|
|
230
|
+
logger.debug("Created new empty thread: %s", self._thread.thread_id)
|
|
231
|
+
|
|
232
|
+
@rx.event
|
|
233
|
+
def set_thread(self, thread: ThreadModel) -> None:
|
|
234
|
+
"""Set the current thread model (internal use)."""
|
|
235
|
+
self._thread = thread
|
|
236
|
+
self.messages = thread.messages
|
|
237
|
+
self.selected_model = thread.ai_model
|
|
238
|
+
self.thinking_items = []
|
|
239
|
+
self.prompt = ""
|
|
240
|
+
logger.debug("Set current thread: %s", thread.thread_id)
|
|
241
|
+
|
|
242
|
+
@rx.event(background=True)
|
|
243
|
+
async def load_thread(self, thread_id: str) -> AsyncGenerator[Any, Any]:
|
|
244
|
+
"""Load and select a thread by ID from database.
|
|
245
|
+
|
|
246
|
+
Called when user clicks on a thread in the sidebar.
|
|
247
|
+
Loads full thread data and updates both ThreadState and ThreadListState.
|
|
248
|
+
|
|
249
|
+
Args:
|
|
250
|
+
thread_id: The ID of the thread to load.
|
|
251
|
+
"""
|
|
252
|
+
async with self:
|
|
253
|
+
user_session: UserSession = await self.get_state(UserSession)
|
|
254
|
+
is_authenticated = await user_session.is_authenticated
|
|
255
|
+
user_id = user_session.user.user_id if user_session.user else None
|
|
256
|
+
|
|
257
|
+
# Set loading state in ThreadListState
|
|
258
|
+
threadlist_state: ThreadListState = await self.get_state(ThreadListState)
|
|
259
|
+
threadlist_state.loading_thread_id = thread_id
|
|
260
|
+
yield
|
|
261
|
+
|
|
262
|
+
if not is_authenticated or not user_id:
|
|
263
|
+
async with self:
|
|
264
|
+
threadlist_state: ThreadListState = await self.get_state(
|
|
265
|
+
ThreadListState
|
|
266
|
+
)
|
|
267
|
+
threadlist_state.loading_thread_id = ""
|
|
268
|
+
return
|
|
269
|
+
|
|
270
|
+
try:
|
|
271
|
+
full_thread = await ThreadRepository.get_thread_by_id(thread_id, user_id)
|
|
272
|
+
|
|
273
|
+
if not full_thread:
|
|
274
|
+
logger.warning("Thread %s not found in database", thread_id)
|
|
275
|
+
async with self:
|
|
276
|
+
threadlist_state: ThreadListState = await self.get_state(
|
|
277
|
+
ThreadListState
|
|
278
|
+
)
|
|
279
|
+
threadlist_state.loading_thread_id = ""
|
|
280
|
+
return
|
|
281
|
+
|
|
282
|
+
# Mark all messages as done (loaded from DB)
|
|
283
|
+
for msg in full_thread.messages:
|
|
284
|
+
msg.done = True
|
|
285
|
+
|
|
286
|
+
async with self:
|
|
287
|
+
# Update self with loaded thread
|
|
288
|
+
self._thread = full_thread
|
|
289
|
+
self.messages = full_thread.messages
|
|
290
|
+
self.selected_model = full_thread.ai_model
|
|
291
|
+
self.thinking_items = []
|
|
292
|
+
self.prompt = ""
|
|
293
|
+
|
|
294
|
+
# Update active state in ThreadListState
|
|
295
|
+
threadlist_state: ThreadListState = await self.get_state(
|
|
296
|
+
ThreadListState
|
|
297
|
+
)
|
|
298
|
+
threadlist_state.threads = [
|
|
299
|
+
ThreadModel(
|
|
300
|
+
**{**t.model_dump(), "active": t.thread_id == thread_id}
|
|
301
|
+
)
|
|
302
|
+
for t in threadlist_state.threads
|
|
303
|
+
]
|
|
304
|
+
threadlist_state.active_thread_id = thread_id
|
|
305
|
+
threadlist_state.loading_thread_id = ""
|
|
306
|
+
|
|
307
|
+
logger.debug("Loaded thread: %s", thread_id)
|
|
308
|
+
|
|
309
|
+
except Exception as e:
|
|
310
|
+
logger.error("Error loading thread %s: %s", thread_id, e)
|
|
311
|
+
async with self:
|
|
312
|
+
threadlist_state: ThreadListState = await self.get_state(
|
|
313
|
+
ThreadListState
|
|
314
|
+
)
|
|
315
|
+
threadlist_state.loading_thread_id = ""
|
|
316
|
+
|
|
317
|
+
# -------------------------------------------------------------------------
|
|
318
|
+
# Prompt and model management
|
|
319
|
+
# -------------------------------------------------------------------------
|
|
228
320
|
|
|
229
321
|
@rx.event
|
|
230
|
-
def
|
|
231
|
-
|
|
322
|
+
def set_prompt(self, prompt: str) -> None:
|
|
323
|
+
"""Set the current prompt."""
|
|
324
|
+
self.prompt = prompt
|
|
232
325
|
|
|
233
326
|
@rx.event
|
|
234
327
|
def set_suggestions(self, suggestions: list[Suggestion]) -> None:
|
|
@@ -236,30 +329,70 @@ class ThreadState(rx.State):
|
|
|
236
329
|
self.suggestions = suggestions
|
|
237
330
|
|
|
238
331
|
@rx.event
|
|
239
|
-
def
|
|
240
|
-
"""Set
|
|
332
|
+
def set_selected_model(self, model_id: str) -> None:
|
|
333
|
+
"""Set the selected model."""
|
|
334
|
+
self.selected_model = model_id
|
|
335
|
+
self._thread.ai_model = model_id
|
|
241
336
|
|
|
242
|
-
|
|
243
|
-
|
|
337
|
+
@rx.event
|
|
338
|
+
def set_with_thread_list(self, with_thread_list: bool) -> None:
|
|
339
|
+
"""Set whether thread list integration is enabled."""
|
|
340
|
+
self.with_thread_list = with_thread_list
|
|
244
341
|
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
342
|
+
# -------------------------------------------------------------------------
|
|
343
|
+
# UI state management
|
|
344
|
+
# -------------------------------------------------------------------------
|
|
345
|
+
|
|
346
|
+
@rx.event
|
|
347
|
+
def toggle_thinking_expanded(self) -> None:
|
|
348
|
+
"""Toggle the expanded state of the thinking section."""
|
|
349
|
+
self.thinking_expanded = not self.thinking_expanded
|
|
350
|
+
|
|
351
|
+
# -------------------------------------------------------------------------
|
|
352
|
+
# MCP Server tool support
|
|
353
|
+
# -------------------------------------------------------------------------
|
|
354
|
+
|
|
355
|
+
@rx.event
|
|
356
|
+
async def load_mcp_servers(self) -> None:
|
|
357
|
+
"""Load available MCP servers from the database."""
|
|
358
|
+
self.available_mcp_servers = await MCPServerRepository.get_all()
|
|
359
|
+
|
|
360
|
+
@rx.event
|
|
361
|
+
def toogle_tools_modal(self, show: bool) -> None:
|
|
362
|
+
"""Set the visibility of the tools modal."""
|
|
363
|
+
self.show_tools_modal = show
|
|
364
|
+
|
|
365
|
+
@rx.event
|
|
366
|
+
def toggle_mcp_server_selection(self, server_id: int, selected: bool) -> None:
|
|
367
|
+
"""Toggle MCP server selection in the modal."""
|
|
368
|
+
self.server_selection_state[server_id] = selected
|
|
369
|
+
if selected and server_id not in self.temp_selected_mcp_servers:
|
|
370
|
+
self.temp_selected_mcp_servers.append(server_id)
|
|
371
|
+
elif not selected and server_id in self.temp_selected_mcp_servers:
|
|
372
|
+
self.temp_selected_mcp_servers.remove(server_id)
|
|
373
|
+
|
|
374
|
+
@rx.event
|
|
375
|
+
def apply_mcp_server_selection(self) -> None:
|
|
376
|
+
"""Apply the temporary MCP server selection."""
|
|
377
|
+
self.selected_mcp_servers = [
|
|
378
|
+
server
|
|
379
|
+
for server in self.available_mcp_servers
|
|
380
|
+
if server.id in self.temp_selected_mcp_servers
|
|
381
|
+
]
|
|
382
|
+
self.show_tools_modal = False
|
|
383
|
+
|
|
384
|
+
@rx.event
|
|
385
|
+
def is_mcp_server_selected(self, server_id: int) -> bool:
|
|
386
|
+
"""Check if an MCP server is selected."""
|
|
387
|
+
return server_id in self.temp_selected_mcp_servers
|
|
388
|
+
|
|
389
|
+
# -------------------------------------------------------------------------
|
|
390
|
+
# Clear/reset
|
|
391
|
+
# -------------------------------------------------------------------------
|
|
260
392
|
|
|
261
393
|
@rx.event
|
|
262
394
|
def clear(self) -> None:
|
|
395
|
+
"""Clear the current thread messages (keeps thread ID)."""
|
|
263
396
|
self._thread.messages = []
|
|
264
397
|
self._thread.state = ThreadStatus.NEW
|
|
265
398
|
self._thread.ai_model = ModelManager().get_default_model()
|
|
@@ -268,33 +401,87 @@ class ThreadState(rx.State):
|
|
|
268
401
|
self.prompt = ""
|
|
269
402
|
self.messages = []
|
|
270
403
|
self.selected_mcp_servers = []
|
|
271
|
-
self.
|
|
272
|
-
self.thinking_items = [] # Clear thinking items only on explicit clear
|
|
404
|
+
self.thinking_items = []
|
|
273
405
|
self.image_chunks = []
|
|
274
406
|
self.show_thinking = False
|
|
275
407
|
|
|
408
|
+
# -------------------------------------------------------------------------
|
|
409
|
+
# Message processing
|
|
410
|
+
# -------------------------------------------------------------------------
|
|
411
|
+
|
|
276
412
|
@rx.event(background=True)
|
|
277
|
-
async def
|
|
278
|
-
|
|
413
|
+
async def submit_message(self) -> AsyncGenerator[Any, Any]:
|
|
414
|
+
"""Submit a message and process the response."""
|
|
415
|
+
await self._process_message()
|
|
279
416
|
|
|
417
|
+
yield rx.call_script("""
|
|
418
|
+
const textarea = document.getElementById('composer-area');
|
|
419
|
+
if (textarea) {
|
|
420
|
+
textarea.value = '';
|
|
421
|
+
textarea.style.height = 'auto';
|
|
422
|
+
textarea.style.height = textarea.scrollHeight + 'px';
|
|
423
|
+
}
|
|
424
|
+
""")
|
|
425
|
+
|
|
426
|
+
async def _process_message(self) -> None:
|
|
427
|
+
"""Process the current message and stream the response."""
|
|
428
|
+
logger.debug("Processing message: %s", self.prompt)
|
|
429
|
+
|
|
430
|
+
start = await self._begin_message_processing()
|
|
431
|
+
if not start:
|
|
432
|
+
return
|
|
433
|
+
current_prompt, selected_model, mcp_servers, is_new_thread = start
|
|
434
|
+
|
|
435
|
+
processor = ModelManager().get_processor_for_model(selected_model)
|
|
436
|
+
if not processor:
|
|
437
|
+
await self._stop_processing_with_error(
|
|
438
|
+
f"Keinen Adapter für das Modell gefunden: {selected_model}"
|
|
439
|
+
)
|
|
440
|
+
return
|
|
441
|
+
|
|
442
|
+
first_response_received = False
|
|
443
|
+
try:
|
|
444
|
+
async for chunk in processor.process(
|
|
445
|
+
self.messages,
|
|
446
|
+
selected_model,
|
|
447
|
+
mcp_servers=mcp_servers,
|
|
448
|
+
):
|
|
449
|
+
first_response_received = await self._handle_stream_chunk(
|
|
450
|
+
chunk=chunk,
|
|
451
|
+
current_prompt=current_prompt,
|
|
452
|
+
is_new_thread=is_new_thread,
|
|
453
|
+
first_response_received=first_response_received,
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
await self._finalize_successful_response()
|
|
457
|
+
|
|
458
|
+
except Exception as ex:
|
|
459
|
+
await self._handle_process_error(
|
|
460
|
+
ex=ex,
|
|
461
|
+
current_prompt=current_prompt,
|
|
462
|
+
is_new_thread=is_new_thread,
|
|
463
|
+
first_response_received=first_response_received,
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
finally:
|
|
467
|
+
await self._finalize_processing()
|
|
468
|
+
|
|
469
|
+
async def _begin_message_processing(
|
|
470
|
+
self,
|
|
471
|
+
) -> tuple[str, str, list[MCPServer], bool] | None:
|
|
472
|
+
"""Prepare state for sending a message. Returns None if no-op."""
|
|
280
473
|
async with self:
|
|
281
|
-
|
|
282
|
-
if self.processing:
|
|
283
|
-
return
|
|
474
|
+
current_prompt = self.prompt.strip()
|
|
475
|
+
if self.processing or not current_prompt:
|
|
476
|
+
return None
|
|
284
477
|
|
|
285
478
|
self.processing = True
|
|
286
479
|
self._clear_chunks()
|
|
287
|
-
# Clear thinking items for new user question
|
|
288
480
|
self.thinking_items = []
|
|
289
481
|
|
|
290
|
-
current_prompt = self.prompt.strip()
|
|
291
|
-
if not current_prompt:
|
|
292
|
-
self.processing = False
|
|
293
|
-
return
|
|
294
|
-
|
|
295
482
|
self.prompt = ""
|
|
296
483
|
|
|
297
|
-
|
|
484
|
+
is_new_thread = self._thread.state == ThreadStatus.NEW
|
|
298
485
|
self.messages.extend(
|
|
299
486
|
[
|
|
300
487
|
Message(text=current_prompt, type=MessageType.HUMAN),
|
|
@@ -302,117 +489,159 @@ class ThreadState(rx.State):
|
|
|
302
489
|
]
|
|
303
490
|
)
|
|
304
491
|
|
|
305
|
-
|
|
306
|
-
if not
|
|
492
|
+
selected_model = self.get_selected_model
|
|
493
|
+
if not selected_model:
|
|
307
494
|
self._add_error_message("Kein Chat-Modell ausgewählt")
|
|
308
495
|
self.processing = False
|
|
309
|
-
return
|
|
496
|
+
return None
|
|
310
497
|
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
if not processor:
|
|
314
|
-
async with self:
|
|
315
|
-
self._add_error_message(
|
|
316
|
-
f"Keinen Adapter für das Modell gefunden: {self.get_ai_model}"
|
|
317
|
-
)
|
|
318
|
-
self.processing = False
|
|
319
|
-
return
|
|
498
|
+
mcp_servers = self.selected_mcp_servers
|
|
499
|
+
return current_prompt, selected_model, mcp_servers, is_new_thread
|
|
320
500
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
501
|
+
async def _stop_processing_with_error(self, error_msg: str) -> None:
|
|
502
|
+
"""Stop processing and show an error message."""
|
|
503
|
+
async with self:
|
|
504
|
+
self._add_error_message(error_msg)
|
|
505
|
+
self.processing = False
|
|
506
|
+
|
|
507
|
+
async def _handle_stream_chunk(
|
|
508
|
+
self,
|
|
509
|
+
*,
|
|
510
|
+
chunk: Chunk,
|
|
511
|
+
current_prompt: str,
|
|
512
|
+
is_new_thread: bool,
|
|
513
|
+
first_response_received: bool,
|
|
514
|
+
) -> bool:
|
|
515
|
+
"""Handle one streamed chunk. Returns updated first_response_received."""
|
|
516
|
+
async with self:
|
|
517
|
+
self._handle_chunk(chunk)
|
|
330
518
|
|
|
331
|
-
|
|
332
|
-
|
|
519
|
+
should_create_thread = (
|
|
520
|
+
not first_response_received
|
|
521
|
+
and chunk.type == ChunkType.TEXT
|
|
522
|
+
and is_new_thread
|
|
523
|
+
and self.with_thread_list
|
|
524
|
+
)
|
|
525
|
+
if not should_create_thread:
|
|
526
|
+
return first_response_received
|
|
333
527
|
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
528
|
+
self._thread.state = ThreadStatus.ACTIVE
|
|
529
|
+
if self._thread.title in {"", "Neuer Chat"}:
|
|
530
|
+
self._thread.title = current_prompt[:100]
|
|
531
|
+
await self._notify_thread_created()
|
|
532
|
+
return True
|
|
337
533
|
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
534
|
+
async def _finalize_successful_response(self) -> None:
|
|
535
|
+
"""Finalize state after a successful full response."""
|
|
536
|
+
async with self:
|
|
537
|
+
self.show_thinking = False
|
|
538
|
+
self._thread.messages = self.messages
|
|
539
|
+
self._thread.ai_model = self.selected_model
|
|
540
|
+
|
|
541
|
+
if self.with_thread_list:
|
|
542
|
+
await self._save_thread_to_db()
|
|
543
|
+
|
|
544
|
+
async def _handle_process_error(
|
|
545
|
+
self,
|
|
546
|
+
*,
|
|
547
|
+
ex: Exception,
|
|
548
|
+
current_prompt: str,
|
|
549
|
+
is_new_thread: bool,
|
|
550
|
+
first_response_received: bool,
|
|
551
|
+
) -> None:
|
|
552
|
+
"""Handle failures during streaming and persist error state."""
|
|
553
|
+
async with self:
|
|
554
|
+
self._thread.state = ThreadStatus.ERROR
|
|
346
555
|
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
556
|
+
if self.messages and self.messages[-1].type == MessageType.ASSISTANT:
|
|
557
|
+
self.messages.pop()
|
|
558
|
+
self.messages.append(Message(text=str(ex), type=MessageType.ERROR))
|
|
350
559
|
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
560
|
+
if is_new_thread and self.with_thread_list and not first_response_received:
|
|
561
|
+
if self._thread.title in {"", "Neuer Chat"}:
|
|
562
|
+
self._thread.title = current_prompt[:100]
|
|
563
|
+
await self._notify_thread_created()
|
|
354
564
|
|
|
355
|
-
|
|
356
|
-
|
|
565
|
+
self._thread.messages = self.messages
|
|
566
|
+
if self.with_thread_list:
|
|
567
|
+
await self._save_thread_to_db()
|
|
357
568
|
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
569
|
+
async def _finalize_processing(self) -> None:
|
|
570
|
+
"""Mark processing done and close out the last message."""
|
|
571
|
+
async with self:
|
|
572
|
+
if self.messages:
|
|
573
|
+
self.messages[-1].done = True
|
|
574
|
+
self.processing = False
|
|
363
575
|
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
logger.debug("Thread already persisted: %s", self._thread.thread_id)
|
|
368
|
-
return
|
|
576
|
+
# -------------------------------------------------------------------------
|
|
577
|
+
# Thread persistence (internal)
|
|
578
|
+
# -------------------------------------------------------------------------
|
|
369
579
|
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
self._thread.title = prompt.strip() if prompt.strip() else "Neuer Chat"
|
|
580
|
+
async def _notify_thread_created(self) -> None:
|
|
581
|
+
"""Notify ThreadListState that a new thread was created.
|
|
373
582
|
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
threadlist_state.threads.insert(0, self._thread)
|
|
583
|
+
Called after the first successful response chunk.
|
|
584
|
+
Adds the thread to ThreadListState without a full reload.
|
|
377
585
|
|
|
378
|
-
|
|
379
|
-
|
|
586
|
+
Note: Called from within an async with self block, so don't create a new one.
|
|
587
|
+
"""
|
|
588
|
+
threadlist_state: ThreadListState = await self.get_state(ThreadListState)
|
|
589
|
+
await threadlist_state.add_thread(self._thread)
|
|
380
590
|
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
await threadlist_state.save_threads()
|
|
591
|
+
async def _save_thread_to_db(self) -> None:
|
|
592
|
+
"""Persist current thread to database.
|
|
384
593
|
|
|
385
|
-
|
|
594
|
+
Called incrementally after each successful response.
|
|
595
|
+
"""
|
|
596
|
+
user_session: UserSession = await self.get_state(UserSession)
|
|
597
|
+
user_id = user_session.user.user_id if user_session.user else None
|
|
386
598
|
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
yield ThreadState.process_message
|
|
599
|
+
if user_id:
|
|
600
|
+
try:
|
|
601
|
+
await ThreadRepository.save_thread(self._thread, user_id)
|
|
602
|
+
logger.debug("Saved thread to DB: %s", self._thread.thread_id)
|
|
603
|
+
except Exception as e:
|
|
604
|
+
logger.error("Error saving thread %s: %s", self._thread.thread_id, e)
|
|
394
605
|
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
textarea.value = '';
|
|
399
|
-
textarea.style.height = 'auto';
|
|
400
|
-
textarea.style.height = textarea.scrollHeight + 'px';
|
|
401
|
-
}
|
|
402
|
-
""")
|
|
606
|
+
# -------------------------------------------------------------------------
|
|
607
|
+
# Chunk handling (internal)
|
|
608
|
+
# -------------------------------------------------------------------------
|
|
403
609
|
|
|
404
610
|
def _clear_chunks(self) -> None:
|
|
405
611
|
"""Clear all chunk categorization lists except thinking_items for display."""
|
|
406
|
-
self.current_chunks = []
|
|
407
|
-
# Don't clear thinking_items to preserve thinking display for previous messages
|
|
408
|
-
# self.thinking_items = []
|
|
409
612
|
self.image_chunks = []
|
|
410
613
|
self.current_reasoning_session = "" # Reset reasoning session for new message
|
|
614
|
+
self.current_tool_session = "" # Reset tool session for new message
|
|
615
|
+
|
|
616
|
+
def _get_or_create_tool_session(self, chunk: Chunk) -> str:
|
|
617
|
+
"""Get tool session ID from metadata or derive one.
|
|
618
|
+
|
|
619
|
+
If the model doesn't include tool_id in chunk metadata, we track the latest
|
|
620
|
+
tool session so TOOL_RESULT can be associated with the preceding TOOL_CALL.
|
|
621
|
+
"""
|
|
622
|
+
tool_id = chunk.chunk_metadata.get("tool_id")
|
|
623
|
+
if tool_id:
|
|
624
|
+
self.current_tool_session = tool_id
|
|
625
|
+
return tool_id
|
|
626
|
+
|
|
627
|
+
if chunk.type == ChunkType.TOOL_CALL:
|
|
628
|
+
tool_count = sum(
|
|
629
|
+
1 for i in self.thinking_items if i.type == ThinkingType.TOOL_CALL
|
|
630
|
+
)
|
|
631
|
+
self.current_tool_session = f"tool_{tool_count}"
|
|
632
|
+
return self.current_tool_session
|
|
633
|
+
|
|
634
|
+
if self.current_tool_session:
|
|
635
|
+
return self.current_tool_session
|
|
636
|
+
|
|
637
|
+
tool_count = sum(
|
|
638
|
+
1 for i in self.thinking_items if i.type == ThinkingType.TOOL_CALL
|
|
639
|
+
)
|
|
640
|
+
self.current_tool_session = f"tool_{tool_count}"
|
|
641
|
+
return self.current_tool_session
|
|
411
642
|
|
|
412
643
|
def _handle_chunk(self, chunk: Chunk) -> None:
|
|
413
644
|
"""Handle incoming chunk based on its type."""
|
|
414
|
-
self.current_chunks.append(chunk)
|
|
415
|
-
|
|
416
645
|
if chunk.type == ChunkType.TEXT:
|
|
417
646
|
self.messages[-1].text += chunk.text
|
|
418
647
|
elif chunk.type in (ChunkType.THINKING, ChunkType.THINKING_RESULT):
|
|
@@ -434,19 +663,47 @@ class ThreadState(rx.State):
|
|
|
434
663
|
else:
|
|
435
664
|
logger.warning("Unhandled chunk type: %s - %s", chunk.type, chunk.text)
|
|
436
665
|
|
|
666
|
+
def _get_or_create_thinking_item(
|
|
667
|
+
self, item_id: str, thinking_type: ThinkingType, **kwargs
|
|
668
|
+
) -> Thinking:
|
|
669
|
+
"""Get existing thinking item or create new one."""
|
|
670
|
+
for item in self.thinking_items:
|
|
671
|
+
if item.type == thinking_type and item.id == item_id:
|
|
672
|
+
return item
|
|
673
|
+
|
|
674
|
+
new_item = Thinking(type=thinking_type, id=item_id, **kwargs)
|
|
675
|
+
self.thinking_items = [*self.thinking_items, new_item]
|
|
676
|
+
return new_item
|
|
677
|
+
|
|
437
678
|
def _handle_reasoning_chunk(self, chunk: Chunk) -> None:
|
|
438
679
|
"""Handle reasoning chunks by consolidating them into thinking items."""
|
|
439
680
|
if chunk.type == ChunkType.THINKING:
|
|
440
681
|
self.show_thinking = True
|
|
441
|
-
logger.debug("Thinking: %s", chunk.text)
|
|
442
682
|
|
|
443
683
|
reasoning_session = self._get_or_create_reasoning_session(chunk)
|
|
444
|
-
existing_item = self._find_existing_reasoning_item(reasoning_session)
|
|
445
684
|
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
685
|
+
# Determine status and text
|
|
686
|
+
status = ThinkingStatus.IN_PROGRESS
|
|
687
|
+
text = ""
|
|
688
|
+
if chunk.type == ChunkType.THINKING:
|
|
689
|
+
text = chunk.text
|
|
690
|
+
elif chunk.type == ChunkType.THINKING_RESULT:
|
|
691
|
+
status = ThinkingStatus.COMPLETED
|
|
692
|
+
|
|
693
|
+
item = self._get_or_create_thinking_item(
|
|
694
|
+
reasoning_session, ThinkingType.REASONING, text=text, status=status
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
# Update existing item
|
|
698
|
+
if chunk.type == ChunkType.THINKING:
|
|
699
|
+
if item.text and item.text != text: # Append if not new
|
|
700
|
+
item.text += f"\n{chunk.text}"
|
|
701
|
+
elif chunk.type == ChunkType.THINKING_RESULT:
|
|
702
|
+
item.status = ThinkingStatus.COMPLETED
|
|
703
|
+
if chunk.text:
|
|
704
|
+
item.text += f" {chunk.text}"
|
|
705
|
+
|
|
706
|
+
self.thinking_items = self.thinking_items.copy()
|
|
450
707
|
|
|
451
708
|
def _get_or_create_reasoning_session(self, chunk: Chunk) -> str:
|
|
452
709
|
"""Get reasoning session ID from metadata or create new one."""
|
|
@@ -473,402 +730,62 @@ class ThreadState(rx.State):
|
|
|
473
730
|
|
|
474
731
|
return self.current_reasoning_session
|
|
475
732
|
|
|
476
|
-
def _find_existing_reasoning_item(self, reasoning_session: str) -> Thinking | None:
|
|
477
|
-
"""Find existing reasoning item by session ID."""
|
|
478
|
-
for item in self.thinking_items:
|
|
479
|
-
if item.type == ThinkingType.REASONING and item.id == reasoning_session:
|
|
480
|
-
return item
|
|
481
|
-
return None
|
|
482
|
-
|
|
483
|
-
def _update_existing_reasoning_item(
|
|
484
|
-
self, existing_item: Thinking, chunk: Chunk
|
|
485
|
-
) -> None:
|
|
486
|
-
"""Update existing reasoning item with new chunk data."""
|
|
487
|
-
if chunk.type == ChunkType.THINKING:
|
|
488
|
-
if existing_item.text:
|
|
489
|
-
existing_item.text += f"\n{chunk.text}"
|
|
490
|
-
else:
|
|
491
|
-
existing_item.text = chunk.text
|
|
492
|
-
elif chunk.type == ChunkType.THINKING_RESULT:
|
|
493
|
-
existing_item.status = ThinkingStatus.COMPLETED
|
|
494
|
-
if chunk.text:
|
|
495
|
-
existing_item.text += f" {chunk.text}"
|
|
496
|
-
# Trigger Reflex reactivity by reassigning the list
|
|
497
|
-
self.thinking_items = self.thinking_items.copy()
|
|
498
|
-
|
|
499
|
-
def _create_new_reasoning_item(self, reasoning_session: str, chunk: Chunk) -> None:
|
|
500
|
-
"""Create new reasoning item."""
|
|
501
|
-
status = (
|
|
502
|
-
ThinkingStatus.COMPLETED
|
|
503
|
-
if chunk.type == ChunkType.THINKING_RESULT
|
|
504
|
-
else ThinkingStatus.IN_PROGRESS
|
|
505
|
-
)
|
|
506
|
-
new_item = Thinking(
|
|
507
|
-
type=ThinkingType.REASONING,
|
|
508
|
-
id=reasoning_session,
|
|
509
|
-
text=chunk.text,
|
|
510
|
-
status=status,
|
|
511
|
-
)
|
|
512
|
-
self.thinking_items = [*self.thinking_items, new_item]
|
|
513
|
-
|
|
514
733
|
def _handle_tool_chunk(self, chunk: Chunk) -> None:
|
|
515
734
|
"""Handle tool chunks by consolidating them into thinking items."""
|
|
516
|
-
tool_id =
|
|
517
|
-
if not tool_id:
|
|
518
|
-
# Generate a tool ID if not provided
|
|
519
|
-
tool_count = len(
|
|
520
|
-
[i for i in self.thinking_items if i.type == ThinkingType.TOOL_CALL]
|
|
521
|
-
)
|
|
522
|
-
tool_id = f"tool_{tool_count}"
|
|
523
|
-
|
|
524
|
-
# Find existing tool item or create new one
|
|
525
|
-
existing_item = self._find_existing_tool_item(tool_id)
|
|
526
|
-
|
|
527
|
-
if existing_item:
|
|
528
|
-
self._update_existing_tool_item(existing_item, chunk)
|
|
529
|
-
else:
|
|
530
|
-
self._create_new_tool_item(tool_id, chunk)
|
|
531
|
-
|
|
532
|
-
logger.debug("Tool event: %s - %s", chunk.type, chunk.text)
|
|
533
|
-
|
|
534
|
-
def _find_existing_tool_item(self, tool_id: str) -> Thinking | None:
|
|
535
|
-
"""Find existing tool item by ID."""
|
|
536
|
-
for item in self.thinking_items:
|
|
537
|
-
if item.type == ThinkingType.TOOL_CALL and item.id == tool_id:
|
|
538
|
-
return item
|
|
539
|
-
return None
|
|
735
|
+
tool_id = self._get_or_create_tool_session(chunk)
|
|
540
736
|
|
|
541
|
-
|
|
542
|
-
"""Update existing tool item with new chunk data."""
|
|
543
|
-
if chunk.type == ChunkType.TOOL_CALL:
|
|
544
|
-
# Store parameters separately from text
|
|
545
|
-
existing_item.parameters = chunk.chunk_metadata.get(
|
|
546
|
-
"parameters", chunk.text
|
|
547
|
-
)
|
|
548
|
-
existing_item.text = chunk.chunk_metadata.get("description", "")
|
|
549
|
-
# Only set tool_name if it's not already present
|
|
550
|
-
if not existing_item.tool_name:
|
|
551
|
-
existing_item.tool_name = chunk.chunk_metadata.get(
|
|
552
|
-
"tool_name", "Unknown"
|
|
553
|
-
)
|
|
554
|
-
existing_item.status = ThinkingStatus.IN_PROGRESS
|
|
555
|
-
elif chunk.type == ChunkType.TOOL_RESULT:
|
|
556
|
-
self._handle_tool_result(existing_item, chunk)
|
|
557
|
-
elif chunk.type == ChunkType.ACTION:
|
|
558
|
-
existing_item.text += f"\n---\nAktion: {chunk.text}"
|
|
559
|
-
# Trigger Reflex reactivity by reassigning the list
|
|
560
|
-
self.thinking_items = self.thinking_items.copy()
|
|
561
|
-
|
|
562
|
-
def _handle_tool_result(self, existing_item: Thinking, chunk: Chunk) -> None:
|
|
563
|
-
"""Handle tool result chunk."""
|
|
564
|
-
# Check if this is an error result
|
|
565
|
-
is_error = (
|
|
566
|
-
"error" in chunk.text.lower()
|
|
567
|
-
or "failed" in chunk.text.lower()
|
|
568
|
-
or chunk.chunk_metadata.get("error")
|
|
569
|
-
)
|
|
570
|
-
existing_item.status = (
|
|
571
|
-
ThinkingStatus.ERROR if is_error else ThinkingStatus.COMPLETED
|
|
572
|
-
)
|
|
573
|
-
# Store result separately from text
|
|
574
|
-
existing_item.result = chunk.text
|
|
575
|
-
if is_error:
|
|
576
|
-
existing_item.error = chunk.text
|
|
577
|
-
|
|
578
|
-
def _create_new_tool_item(self, tool_id: str, chunk: Chunk) -> None:
|
|
579
|
-
"""Create new tool item."""
|
|
737
|
+
# Determine initial properties
|
|
580
738
|
tool_name = chunk.chunk_metadata.get("tool_name", "Unknown")
|
|
581
739
|
status = ThinkingStatus.IN_PROGRESS
|
|
582
740
|
text = ""
|
|
583
741
|
parameters = None
|
|
584
742
|
result = None
|
|
743
|
+
error = None
|
|
585
744
|
|
|
586
745
|
if chunk.type == ChunkType.TOOL_CALL:
|
|
587
|
-
# Store parameters separately from text
|
|
588
746
|
parameters = chunk.chunk_metadata.get("parameters", chunk.text)
|
|
589
747
|
text = chunk.chunk_metadata.get("description", "")
|
|
590
748
|
elif chunk.type == ChunkType.TOOL_RESULT:
|
|
591
|
-
is_error =
|
|
749
|
+
is_error = (
|
|
750
|
+
"error" in chunk.text.lower()
|
|
751
|
+
or "failed" in chunk.text.lower()
|
|
752
|
+
or chunk.chunk_metadata.get("error")
|
|
753
|
+
)
|
|
592
754
|
status = ThinkingStatus.ERROR if is_error else ThinkingStatus.COMPLETED
|
|
593
755
|
result = chunk.text
|
|
756
|
+
if is_error:
|
|
757
|
+
error = chunk.text
|
|
594
758
|
else:
|
|
595
759
|
text = chunk.text
|
|
596
760
|
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
761
|
+
item = self._get_or_create_thinking_item(
|
|
762
|
+
tool_id,
|
|
763
|
+
ThinkingType.TOOL_CALL,
|
|
600
764
|
text=text,
|
|
601
765
|
status=status,
|
|
602
766
|
tool_name=tool_name,
|
|
603
767
|
parameters=parameters,
|
|
604
768
|
result=result,
|
|
605
|
-
error=
|
|
769
|
+
error=error,
|
|
606
770
|
)
|
|
607
|
-
|
|
771
|
+
|
|
772
|
+
# Update existing item
|
|
773
|
+
if chunk.type == ChunkType.TOOL_CALL:
|
|
774
|
+
item.parameters = parameters
|
|
775
|
+
item.text = text
|
|
776
|
+
if not item.tool_name or item.tool_name == "Unknown":
|
|
777
|
+
item.tool_name = tool_name
|
|
778
|
+
item.status = ThinkingStatus.IN_PROGRESS
|
|
779
|
+
elif chunk.type == ChunkType.TOOL_RESULT:
|
|
780
|
+
item.status = status
|
|
781
|
+
item.result = result
|
|
782
|
+
item.error = error
|
|
783
|
+
elif chunk.type == ChunkType.ACTION:
|
|
784
|
+
item.text += f"\n---\nAktion: {chunk.text}"
|
|
785
|
+
|
|
786
|
+
self.thinking_items = self.thinking_items.copy()
|
|
608
787
|
|
|
609
788
|
def _add_error_message(self, error_msg: str) -> None:
|
|
610
789
|
"""Add an error message to the conversation."""
|
|
611
790
|
logger.error(error_msg)
|
|
612
791
|
self.messages.append(Message(text=error_msg, type=MessageType.ERROR))
|
|
613
|
-
|
|
614
|
-
async def _update_thread_list(self) -> None:
|
|
615
|
-
"""Update the thread list with current messages."""
|
|
616
|
-
threadlist_state: ThreadListState = await self.get_state(ThreadListState)
|
|
617
|
-
if self._thread.title in {"", "Neuer Chat"}:
|
|
618
|
-
self._thread.title = (
|
|
619
|
-
self.messages[0].text if self.messages else "Neuer Chat"
|
|
620
|
-
)
|
|
621
|
-
|
|
622
|
-
self._thread.messages = self.messages
|
|
623
|
-
self._thread.ai_model = self.selected_model
|
|
624
|
-
await threadlist_state.update_thread(self._thread)
|
|
625
|
-
|
|
626
|
-
def toggle_thinking_expanded(self) -> None:
|
|
627
|
-
"""Toggle the expanded state of the thinking section."""
|
|
628
|
-
self.thinking_expanded = not self.thinking_expanded
|
|
629
|
-
|
|
630
|
-
# MCP Server tool support event handlers
|
|
631
|
-
@rx.event
|
|
632
|
-
async def load_available_mcp_servers(self) -> None:
|
|
633
|
-
"""Load available MCP servers from the database."""
|
|
634
|
-
self.available_mcp_servers = await MCPServerRepository.get_all()
|
|
635
|
-
|
|
636
|
-
@rx.event
|
|
637
|
-
def open_tools_modal(self) -> None:
|
|
638
|
-
"""Open the tools modal."""
|
|
639
|
-
self.temp_selected_mcp_servers = [
|
|
640
|
-
server.id for server in self.selected_mcp_servers if server.id
|
|
641
|
-
]
|
|
642
|
-
self.server_selection_state = {
|
|
643
|
-
server.id: server.id in self.temp_selected_mcp_servers
|
|
644
|
-
for server in self.available_mcp_servers
|
|
645
|
-
if server.id
|
|
646
|
-
}
|
|
647
|
-
self.show_tools_modal = True
|
|
648
|
-
|
|
649
|
-
@rx.event
|
|
650
|
-
def set_show_tools_modal(self, show: bool) -> None:
|
|
651
|
-
"""Set the visibility of the tools modal."""
|
|
652
|
-
self.show_tools_modal = show
|
|
653
|
-
|
|
654
|
-
@rx.event
|
|
655
|
-
def toggle_mcp_server_selection(self, server_id: int, selected: bool) -> None:
|
|
656
|
-
"""Toggle MCP server selection in the modal."""
|
|
657
|
-
self.server_selection_state[server_id] = selected
|
|
658
|
-
if selected and server_id not in self.temp_selected_mcp_servers:
|
|
659
|
-
self.temp_selected_mcp_servers.append(server_id)
|
|
660
|
-
elif not selected and server_id in self.temp_selected_mcp_servers:
|
|
661
|
-
self.temp_selected_mcp_servers.remove(server_id)
|
|
662
|
-
|
|
663
|
-
@rx.event
|
|
664
|
-
def apply_mcp_server_selection(self) -> None:
|
|
665
|
-
"""Apply the temporary MCP server selection."""
|
|
666
|
-
self.selected_mcp_servers = [
|
|
667
|
-
server
|
|
668
|
-
for server in self.available_mcp_servers
|
|
669
|
-
if server.id in self.temp_selected_mcp_servers
|
|
670
|
-
]
|
|
671
|
-
self.show_tools_modal = False
|
|
672
|
-
|
|
673
|
-
def is_mcp_server_selected(self, server_id: int) -> bool:
|
|
674
|
-
"""Check if an MCP server is selected."""
|
|
675
|
-
return server_id in self.temp_selected_mcp_servers
|
|
676
|
-
|
|
677
|
-
def set_selected_model(self, model_id: str) -> None:
|
|
678
|
-
"""Set the selected model."""
|
|
679
|
-
self.selected_model = model_id
|
|
680
|
-
self._thread.ai_model = model_id
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
class ThreadListState(rx.State):
|
|
684
|
-
"""State for the thread list component."""
|
|
685
|
-
|
|
686
|
-
thread_store: str = rx.LocalStorage("{}", name="asui-threads", sync=True)
|
|
687
|
-
threads: list[ThreadModel] = []
|
|
688
|
-
active_thread_id: str = ""
|
|
689
|
-
autosave: bool = False
|
|
690
|
-
|
|
691
|
-
@rx.var
|
|
692
|
-
def has_threads(self) -> bool:
|
|
693
|
-
"""Check if there are any threads."""
|
|
694
|
-
return len(self.threads) > 0
|
|
695
|
-
|
|
696
|
-
async def initialize(
|
|
697
|
-
self, autosave: bool = False, auto_create_default: bool = False
|
|
698
|
-
) -> None:
|
|
699
|
-
"""Initialize the thread list state.
|
|
700
|
-
|
|
701
|
-
Args:
|
|
702
|
-
autosave: Enable auto-saving threads to local storage.
|
|
703
|
-
auto_create_default: If True, create and select a default thread
|
|
704
|
-
when no threads exist (e.g., on first load or after clearing).
|
|
705
|
-
"""
|
|
706
|
-
self.autosave = autosave
|
|
707
|
-
await self.load_threads()
|
|
708
|
-
|
|
709
|
-
# Auto-create default thread if enabled and no threads exist
|
|
710
|
-
if auto_create_default and not self.has_threads:
|
|
711
|
-
await self.create_thread()
|
|
712
|
-
|
|
713
|
-
logger.debug("Initialized thread list state")
|
|
714
|
-
|
|
715
|
-
async def load_threads(self) -> None:
|
|
716
|
-
"""Load threads from browser local storage."""
|
|
717
|
-
try:
|
|
718
|
-
thread_data = json.loads(self.thread_store)
|
|
719
|
-
if thread_data and "threads" in thread_data:
|
|
720
|
-
processed_threads: list[ThreadModel] = []
|
|
721
|
-
needs_upgrade = False
|
|
722
|
-
for thread in thread_data["threads"]:
|
|
723
|
-
thread_payload = dict(thread)
|
|
724
|
-
messages_payload: list[dict[str, Any]] = []
|
|
725
|
-
for message in thread_payload.get("messages", []):
|
|
726
|
-
msg_data = dict(message)
|
|
727
|
-
encoded = msg_data.pop("text_b64", None)
|
|
728
|
-
if encoded is not None:
|
|
729
|
-
try:
|
|
730
|
-
msg_data["text"] = base64.b64decode(encoded).decode(
|
|
731
|
-
"utf-8"
|
|
732
|
-
)
|
|
733
|
-
except Exception as exc:
|
|
734
|
-
logger.warning(
|
|
735
|
-
"Failed to decode stored message: %s", exc
|
|
736
|
-
)
|
|
737
|
-
msg_data["text"] = _rehydrate_mermaid_text(
|
|
738
|
-
msg_data.get("text", "")
|
|
739
|
-
)
|
|
740
|
-
needs_upgrade = True
|
|
741
|
-
else:
|
|
742
|
-
msg_data["text"] = _rehydrate_mermaid_text(
|
|
743
|
-
msg_data.get("text", "")
|
|
744
|
-
)
|
|
745
|
-
needs_upgrade = True
|
|
746
|
-
messages_payload.append(msg_data)
|
|
747
|
-
thread_payload["messages"] = messages_payload
|
|
748
|
-
processed_threads.append(ThreadModel(**thread_payload))
|
|
749
|
-
|
|
750
|
-
self.threads = processed_threads
|
|
751
|
-
self.active_thread_id = thread_data.get("active_thread_id", "")
|
|
752
|
-
if self.active_thread_id:
|
|
753
|
-
await self.select_thread(self.active_thread_id)
|
|
754
|
-
if needs_upgrade:
|
|
755
|
-
await self.save_threads()
|
|
756
|
-
except Exception as e:
|
|
757
|
-
logger.error("Error loading threads from local storage: %s", e)
|
|
758
|
-
self.threads = []
|
|
759
|
-
self.active_thread_id = ""
|
|
760
|
-
|
|
761
|
-
async def save_threads(self) -> None:
|
|
762
|
-
"""Save threads to browser local storage."""
|
|
763
|
-
try:
|
|
764
|
-
thread_list = []
|
|
765
|
-
for thread in self.threads:
|
|
766
|
-
thread_dict = thread.dict()
|
|
767
|
-
encoded_messages: list[dict[str, Any]] = []
|
|
768
|
-
for message in thread.messages:
|
|
769
|
-
msg_dict = message.dict()
|
|
770
|
-
text_value = msg_dict.get("text", "")
|
|
771
|
-
if isinstance(text_value, str):
|
|
772
|
-
try:
|
|
773
|
-
msg_dict["text_b64"] = base64.b64encode(
|
|
774
|
-
text_value.encode("utf-8")
|
|
775
|
-
).decode("ascii")
|
|
776
|
-
except Exception as exc:
|
|
777
|
-
logger.warning("Failed to encode message text: %s", exc)
|
|
778
|
-
msg_dict["text_b64"] = None
|
|
779
|
-
else:
|
|
780
|
-
msg_dict["text_b64"] = None
|
|
781
|
-
encoded_messages.append(msg_dict)
|
|
782
|
-
thread_dict["messages"] = encoded_messages
|
|
783
|
-
thread_list.append(thread_dict)
|
|
784
|
-
|
|
785
|
-
thread_data = {
|
|
786
|
-
"threads": thread_list,
|
|
787
|
-
"active_thread_id": self.active_thread_id,
|
|
788
|
-
}
|
|
789
|
-
self.thread_store = json.dumps(thread_data)
|
|
790
|
-
logger.debug("Saved threads to local storage")
|
|
791
|
-
except Exception as e:
|
|
792
|
-
logger.error("Error saving threads to local storage: %s", e)
|
|
793
|
-
|
|
794
|
-
async def reset_thread_store(self) -> None:
|
|
795
|
-
self.thread_store = "{}"
|
|
796
|
-
|
|
797
|
-
async def get_thread(self, thread_id: str) -> ThreadModel | None:
|
|
798
|
-
"""Get a thread by its ID."""
|
|
799
|
-
for thread in self.threads:
|
|
800
|
-
if thread.thread_id == thread_id:
|
|
801
|
-
return thread
|
|
802
|
-
return None
|
|
803
|
-
|
|
804
|
-
async def create_thread(self) -> None:
|
|
805
|
-
"""Create a new thread."""
|
|
806
|
-
new_thread = ThreadModel(
|
|
807
|
-
thread_id=str(uuid.uuid4()),
|
|
808
|
-
title="Neuer Chat",
|
|
809
|
-
prompt="",
|
|
810
|
-
messages=[],
|
|
811
|
-
state=ThreadStatus.NEW,
|
|
812
|
-
ai_model=ModelManager().get_default_model(),
|
|
813
|
-
active=True,
|
|
814
|
-
)
|
|
815
|
-
self.threads.insert(0, new_thread)
|
|
816
|
-
await self.select_thread(new_thread.thread_id)
|
|
817
|
-
|
|
818
|
-
logger.debug("Created new thread: %s", new_thread)
|
|
819
|
-
|
|
820
|
-
async def update_thread(self, thread: ThreadModel) -> None:
|
|
821
|
-
"""Update a thread."""
|
|
822
|
-
existing_thread = await self.get_thread(thread.thread_id)
|
|
823
|
-
if existing_thread:
|
|
824
|
-
existing_thread.title = thread.title
|
|
825
|
-
existing_thread.messages = thread.messages
|
|
826
|
-
existing_thread.state = thread.state
|
|
827
|
-
existing_thread.active = thread.active
|
|
828
|
-
existing_thread.ai_model = thread.ai_model
|
|
829
|
-
|
|
830
|
-
if self.autosave:
|
|
831
|
-
await self.save_threads()
|
|
832
|
-
logger.debug("Updated thread: %s", thread.thread_id)
|
|
833
|
-
|
|
834
|
-
async def delete_thread(self, thread_id: str) -> AsyncGenerator[Any, Any]:
|
|
835
|
-
"""Delete a thread."""
|
|
836
|
-
thread = await self.get_thread(thread_id)
|
|
837
|
-
if not thread:
|
|
838
|
-
yield rx.toast.error(
|
|
839
|
-
"Chat nicht gefunden.", position="top-right", close_button=True
|
|
840
|
-
)
|
|
841
|
-
logger.warning("Thread with ID %s not found.", thread_id)
|
|
842
|
-
return
|
|
843
|
-
|
|
844
|
-
was_active = thread_id == self.active_thread_id
|
|
845
|
-
self.threads.remove(thread)
|
|
846
|
-
await self.save_threads()
|
|
847
|
-
yield rx.toast.info(
|
|
848
|
-
f"Chat '{thread.title}' erfolgreich gelöscht.",
|
|
849
|
-
position="top-right",
|
|
850
|
-
close_button=True,
|
|
851
|
-
)
|
|
852
|
-
|
|
853
|
-
# If the deleted thread was active, clear ThreadState and show empty view
|
|
854
|
-
if was_active:
|
|
855
|
-
thread_state: ThreadState = await self.get_state(ThreadState)
|
|
856
|
-
thread_state.initialize()
|
|
857
|
-
self.active_thread_id = ""
|
|
858
|
-
# If other threads remain but we deleted the active one,
|
|
859
|
-
# the empty state is now displayed
|
|
860
|
-
# User can select from existing threads or create new one
|
|
861
|
-
|
|
862
|
-
async def select_thread(self, thread_id: str) -> None:
|
|
863
|
-
"""Select a thread."""
|
|
864
|
-
for thread in self.threads:
|
|
865
|
-
thread.active = thread.thread_id == thread_id
|
|
866
|
-
self.active_thread_id = thread_id
|
|
867
|
-
active_thread = await self.get_thread(thread_id)
|
|
868
|
-
|
|
869
|
-
if active_thread:
|
|
870
|
-
thread_state: ThreadState = await self.get_state(ThreadState)
|
|
871
|
-
thread_state.set_thread(active_thread)
|
|
872
|
-
thread_state.messages = active_thread.messages
|
|
873
|
-
thread_state.selected_model = active_thread.ai_model
|
|
874
|
-
thread_state.with_thread_list = True
|