chuk-ai-session-manager 0.1.1__py3-none-any.whl → 0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chuk_ai_session_manager/__init__.py +336 -34
- chuk_ai_session_manager/api/__init__.py +1 -0
- chuk_ai_session_manager/api/simple_api.py +376 -0
- chuk_ai_session_manager/infinite_conversation.py +7 -4
- chuk_ai_session_manager/models/session.py +27 -18
- chuk_ai_session_manager/session_aware_tool_processor.py +6 -4
- chuk_ai_session_manager/session_prompt_builder.py +6 -4
- chuk_ai_session_manager/session_storage.py +176 -0
- chuk_ai_session_manager/utils/status_display_utils.py +474 -0
- {chuk_ai_session_manager-0.1.1.dist-info → chuk_ai_session_manager-0.2.dist-info}/METADATA +9 -8
- chuk_ai_session_manager-0.2.dist-info/RECORD +23 -0
- chuk_ai_session_manager/storage/__init__.py +0 -44
- chuk_ai_session_manager/storage/base.py +0 -50
- chuk_ai_session_manager/storage/providers/file.py +0 -348
- chuk_ai_session_manager/storage/providers/memory.py +0 -96
- chuk_ai_session_manager/storage/providers/redis.py +0 -295
- chuk_ai_session_manager-0.1.1.dist-info/RECORD +0 -24
- /chuk_ai_session_manager/{storage/providers → utils}/__init__.py +0 -0
- {chuk_ai_session_manager-0.1.1.dist-info → chuk_ai_session_manager-0.2.dist-info}/WHEEL +0 -0
- {chuk_ai_session_manager-0.1.1.dist-info → chuk_ai_session_manager-0.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
# chuk_ai_session_manager/api/simple_api.py
|
|
2
|
+
"""
|
|
3
|
+
Super simple developer API for session management with any LLM.
|
|
4
|
+
|
|
5
|
+
Usage:
|
|
6
|
+
from chuk_ai_session_manager.simple_api import SessionManager
|
|
7
|
+
|
|
8
|
+
# Start a conversation
|
|
9
|
+
sm = SessionManager()
|
|
10
|
+
|
|
11
|
+
# Track interactions easily
|
|
12
|
+
await sm.user_says("Hello!")
|
|
13
|
+
response = await sm.ai_responds("Hi there! How can I help?")
|
|
14
|
+
|
|
15
|
+
# Get conversation history
|
|
16
|
+
history = await sm.get_conversation()
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
import asyncio
|
|
21
|
+
import logging
|
|
22
|
+
from typing import Any, Dict, List, Optional, Union, Callable
|
|
23
|
+
from datetime import datetime
|
|
24
|
+
|
|
25
|
+
from chuk_ai_session_manager.models.session import Session
|
|
26
|
+
from chuk_ai_session_manager.models.session_event import SessionEvent
|
|
27
|
+
from chuk_ai_session_manager.models.event_source import EventSource
|
|
28
|
+
from chuk_ai_session_manager.models.event_type import EventType
|
|
29
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
class SessionManager:
|
|
34
|
+
"""
|
|
35
|
+
Super simple API for session management with any LLM.
|
|
36
|
+
|
|
37
|
+
This class provides a dead-simple interface for tracking conversations
|
|
38
|
+
while hiding all the complexity of the underlying session management.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
session_id: Optional[str] = None,
|
|
44
|
+
auto_save: bool = True,
|
|
45
|
+
store: Optional[Any] = None
|
|
46
|
+
):
|
|
47
|
+
"""
|
|
48
|
+
Initialize a session manager.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
session_id: Use existing session or create new one
|
|
52
|
+
auto_save: Automatically save after each operation
|
|
53
|
+
store: Custom storage backend (defaults to CHUK Sessions)
|
|
54
|
+
"""
|
|
55
|
+
self.auto_save = auto_save
|
|
56
|
+
self._session: Optional[Session] = None
|
|
57
|
+
self._session_id = session_id
|
|
58
|
+
self._is_new_session = session_id is None # Track if this is a new session
|
|
59
|
+
|
|
60
|
+
# If no session_id provided, generate one now for convenience
|
|
61
|
+
if not self._session_id:
|
|
62
|
+
import uuid
|
|
63
|
+
self._session_id = str(uuid.uuid4())
|
|
64
|
+
|
|
65
|
+
async def _ensure_session(self) -> Session:
|
|
66
|
+
"""Ensure we have a session, creating one if needed."""
|
|
67
|
+
if self._session is None:
|
|
68
|
+
if self._is_new_session:
|
|
69
|
+
# This is a new session, create it
|
|
70
|
+
self._session = await Session.create()
|
|
71
|
+
self._session_id = self._session.id
|
|
72
|
+
else:
|
|
73
|
+
# Try to load existing session
|
|
74
|
+
backend = get_backend()
|
|
75
|
+
store = ChukSessionsStore(backend)
|
|
76
|
+
self._session = await store.get(self._session_id)
|
|
77
|
+
if self._session is None:
|
|
78
|
+
raise ValueError(f"Session {self._session_id} not found")
|
|
79
|
+
|
|
80
|
+
return self._session
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def session_id(self) -> str:
|
|
84
|
+
"""Get the current session ID."""
|
|
85
|
+
if self._session:
|
|
86
|
+
return self._session.id
|
|
87
|
+
elif self._session_id:
|
|
88
|
+
return self._session_id
|
|
89
|
+
else:
|
|
90
|
+
# This shouldn't happen with the new __init__, but just in case
|
|
91
|
+
import uuid
|
|
92
|
+
self._session_id = str(uuid.uuid4())
|
|
93
|
+
return self._session_id
|
|
94
|
+
|
|
95
|
+
async def user_says(
|
|
96
|
+
self,
|
|
97
|
+
message: str,
|
|
98
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
99
|
+
) -> str:
|
|
100
|
+
"""
|
|
101
|
+
Track a user message.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
message: What the user said
|
|
105
|
+
metadata: Optional metadata to attach
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
The session ID for this conversation
|
|
109
|
+
"""
|
|
110
|
+
session = await self._ensure_session()
|
|
111
|
+
|
|
112
|
+
event = await SessionEvent.create_with_tokens(
|
|
113
|
+
message=message,
|
|
114
|
+
prompt=message,
|
|
115
|
+
model="gpt-4o-mini", # Default model for token counting
|
|
116
|
+
source=EventSource.USER,
|
|
117
|
+
type=EventType.MESSAGE
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Add metadata if provided
|
|
121
|
+
if metadata:
|
|
122
|
+
for key, value in metadata.items():
|
|
123
|
+
await event.set_metadata(key, value)
|
|
124
|
+
|
|
125
|
+
if self.auto_save:
|
|
126
|
+
await session.add_event_and_save(event)
|
|
127
|
+
else:
|
|
128
|
+
await session.add_event(event)
|
|
129
|
+
|
|
130
|
+
return session.id
|
|
131
|
+
|
|
132
|
+
async def ai_responds(
|
|
133
|
+
self,
|
|
134
|
+
response: str,
|
|
135
|
+
model: str = "unknown",
|
|
136
|
+
provider: str = "unknown",
|
|
137
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
138
|
+
) -> str:
|
|
139
|
+
"""
|
|
140
|
+
Track an AI response.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
response: The AI's response
|
|
144
|
+
model: Model name (e.g., "gpt-4o-mini")
|
|
145
|
+
provider: Provider name (e.g., "openai")
|
|
146
|
+
metadata: Optional metadata to attach
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
The session ID for this conversation
|
|
150
|
+
"""
|
|
151
|
+
session = await self._ensure_session()
|
|
152
|
+
|
|
153
|
+
full_metadata = {
|
|
154
|
+
"model": model,
|
|
155
|
+
"provider": provider,
|
|
156
|
+
"timestamp": datetime.now().isoformat(),
|
|
157
|
+
**(metadata or {})
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
event = await SessionEvent.create_with_tokens(
|
|
161
|
+
message=response,
|
|
162
|
+
prompt="", # No prompt for AI response
|
|
163
|
+
completion=response,
|
|
164
|
+
model=model,
|
|
165
|
+
source=EventSource.LLM,
|
|
166
|
+
type=EventType.MESSAGE
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Add metadata
|
|
170
|
+
for key, value in full_metadata.items():
|
|
171
|
+
await event.set_metadata(key, value)
|
|
172
|
+
|
|
173
|
+
if self.auto_save:
|
|
174
|
+
await session.add_event_and_save(event)
|
|
175
|
+
else:
|
|
176
|
+
await session.add_event(event)
|
|
177
|
+
|
|
178
|
+
return session.id
|
|
179
|
+
|
|
180
|
+
async def tool_called(
|
|
181
|
+
self,
|
|
182
|
+
tool_name: str,
|
|
183
|
+
arguments: Dict[str, Any],
|
|
184
|
+
result: Any,
|
|
185
|
+
error: Optional[str] = None,
|
|
186
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
187
|
+
) -> str:
|
|
188
|
+
"""
|
|
189
|
+
Track a tool call.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
tool_name: Name of the tool that was called
|
|
193
|
+
arguments: Arguments passed to the tool
|
|
194
|
+
result: Result from the tool
|
|
195
|
+
error: Error message if tool failed
|
|
196
|
+
metadata: Optional metadata to attach
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
The session ID for this conversation
|
|
200
|
+
"""
|
|
201
|
+
session = await self._ensure_session()
|
|
202
|
+
|
|
203
|
+
tool_message = {
|
|
204
|
+
"tool": tool_name,
|
|
205
|
+
"arguments": arguments,
|
|
206
|
+
"result": result,
|
|
207
|
+
"error": error,
|
|
208
|
+
"success": error is None
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
event = SessionEvent(
|
|
212
|
+
message=tool_message,
|
|
213
|
+
source=EventSource.SYSTEM,
|
|
214
|
+
type=EventType.TOOL_CALL
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
# Add metadata if provided
|
|
218
|
+
if metadata:
|
|
219
|
+
for key, value in metadata.items():
|
|
220
|
+
await event.set_metadata(key, value)
|
|
221
|
+
|
|
222
|
+
if self.auto_save:
|
|
223
|
+
await session.add_event_and_save(event)
|
|
224
|
+
else:
|
|
225
|
+
await session.add_event(event)
|
|
226
|
+
|
|
227
|
+
return session.id
|
|
228
|
+
|
|
229
|
+
async def get_conversation(self, include_metadata: bool = False) -> List[Dict[str, Any]]:
|
|
230
|
+
"""
|
|
231
|
+
Get the conversation history in a simple format.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
include_metadata: Whether to include event metadata
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
List of conversation turns as dicts
|
|
238
|
+
"""
|
|
239
|
+
session = await self._ensure_session()
|
|
240
|
+
|
|
241
|
+
conversation = []
|
|
242
|
+
for event in session.events:
|
|
243
|
+
if event.type == EventType.MESSAGE:
|
|
244
|
+
turn = {
|
|
245
|
+
"role": "user" if event.source == EventSource.USER else "assistant",
|
|
246
|
+
"content": event.message,
|
|
247
|
+
"timestamp": event.timestamp.isoformat()
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
if include_metadata and event.metadata:
|
|
251
|
+
turn["metadata"] = event.metadata
|
|
252
|
+
|
|
253
|
+
conversation.append(turn)
|
|
254
|
+
|
|
255
|
+
return conversation
|
|
256
|
+
|
|
257
|
+
async def get_tools_used(self) -> List[Dict[str, Any]]:
|
|
258
|
+
"""
|
|
259
|
+
Get all tools that were used in this conversation.
|
|
260
|
+
|
|
261
|
+
Returns:
|
|
262
|
+
List of tool usage information
|
|
263
|
+
"""
|
|
264
|
+
session = await self._ensure_session()
|
|
265
|
+
|
|
266
|
+
tools = []
|
|
267
|
+
for event in session.events:
|
|
268
|
+
if event.type == EventType.TOOL_CALL and isinstance(event.message, dict):
|
|
269
|
+
tools.append({
|
|
270
|
+
"tool": event.message.get("tool", "unknown"),
|
|
271
|
+
"arguments": event.message.get("arguments", {}),
|
|
272
|
+
"result": event.message.get("result"),
|
|
273
|
+
"success": event.message.get("success", True),
|
|
274
|
+
"error": event.message.get("error"),
|
|
275
|
+
"timestamp": event.timestamp.isoformat()
|
|
276
|
+
})
|
|
277
|
+
|
|
278
|
+
return tools
|
|
279
|
+
|
|
280
|
+
async def get_stats(self) -> Dict[str, Any]:
|
|
281
|
+
"""
|
|
282
|
+
Get conversation statistics.
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Dictionary with conversation stats
|
|
286
|
+
"""
|
|
287
|
+
session = await self._ensure_session()
|
|
288
|
+
|
|
289
|
+
user_messages = sum(1 for e in session.events
|
|
290
|
+
if e.type == EventType.MESSAGE and e.source == EventSource.USER)
|
|
291
|
+
ai_messages = sum(1 for e in session.events
|
|
292
|
+
if e.type == EventType.MESSAGE and e.source == EventSource.LLM)
|
|
293
|
+
tool_calls = sum(1 for e in session.events if e.type == EventType.TOOL_CALL)
|
|
294
|
+
|
|
295
|
+
return {
|
|
296
|
+
"session_id": session.id,
|
|
297
|
+
"total_events": len(session.events),
|
|
298
|
+
"user_messages": user_messages,
|
|
299
|
+
"ai_messages": ai_messages,
|
|
300
|
+
"tool_calls": tool_calls,
|
|
301
|
+
"created_at": session.metadata.created_at.isoformat(),
|
|
302
|
+
"last_update": session.last_update_time.isoformat(),
|
|
303
|
+
"total_tokens": session.total_tokens,
|
|
304
|
+
"estimated_cost": session.total_cost
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
async def save(self) -> None:
|
|
308
|
+
"""Manually save the session (if auto_save is False)."""
|
|
309
|
+
if self._session:
|
|
310
|
+
backend = get_backend()
|
|
311
|
+
store = ChukSessionsStore(backend)
|
|
312
|
+
await store.save(self._session)
|
|
313
|
+
|
|
314
|
+
async def clear(self) -> None:
|
|
315
|
+
"""Clear the current conversation and start fresh."""
|
|
316
|
+
self._session = await Session.create()
|
|
317
|
+
self._session_id = self._session.id
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
# Convenience functions for even simpler usage
|
|
321
|
+
async def quick_conversation(
|
|
322
|
+
user_message: str,
|
|
323
|
+
ai_response: str,
|
|
324
|
+
model: str = "unknown",
|
|
325
|
+
provider: str = "unknown"
|
|
326
|
+
) -> str:
|
|
327
|
+
"""
|
|
328
|
+
Super quick way to track a single conversation turn.
|
|
329
|
+
|
|
330
|
+
Returns the session ID for further use.
|
|
331
|
+
"""
|
|
332
|
+
sm = SessionManager()
|
|
333
|
+
await sm.user_says(user_message)
|
|
334
|
+
session_id = await sm.ai_responds(ai_response, model=model, provider=provider)
|
|
335
|
+
return session_id
|
|
336
|
+
|
|
337
|
+
async def track_llm_call(
|
|
338
|
+
user_input: str,
|
|
339
|
+
llm_function: Callable[[str], str],
|
|
340
|
+
model: str = "unknown",
|
|
341
|
+
provider: str = "unknown",
|
|
342
|
+
session_manager: Optional[SessionManager] = None
|
|
343
|
+
) -> tuple[str, str]:
|
|
344
|
+
"""
|
|
345
|
+
Track an LLM call automatically.
|
|
346
|
+
|
|
347
|
+
Args:
|
|
348
|
+
user_input: The user's input
|
|
349
|
+
llm_function: Function that takes user input and returns AI response
|
|
350
|
+
model: Model name
|
|
351
|
+
provider: Provider name
|
|
352
|
+
session_manager: Existing session manager (creates new if None)
|
|
353
|
+
|
|
354
|
+
Returns:
|
|
355
|
+
Tuple of (ai_response, session_id)
|
|
356
|
+
"""
|
|
357
|
+
if session_manager is None:
|
|
358
|
+
session_manager = SessionManager()
|
|
359
|
+
|
|
360
|
+
# Track user input
|
|
361
|
+
await session_manager.user_says(user_input)
|
|
362
|
+
|
|
363
|
+
# Call the LLM
|
|
364
|
+
if asyncio.iscoroutinefunction(llm_function):
|
|
365
|
+
ai_response = await llm_function(user_input)
|
|
366
|
+
else:
|
|
367
|
+
ai_response = llm_function(user_input)
|
|
368
|
+
|
|
369
|
+
# Track AI response
|
|
370
|
+
session_id = await session_manager.ai_responds(
|
|
371
|
+
ai_response,
|
|
372
|
+
model=model,
|
|
373
|
+
provider=provider
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
return ai_response, session_id
|
|
@@ -14,7 +14,7 @@ from chuk_ai_session_manager.models.session import Session
|
|
|
14
14
|
from chuk_ai_session_manager.models.session_event import SessionEvent
|
|
15
15
|
from chuk_ai_session_manager.models.event_type import EventType
|
|
16
16
|
from chuk_ai_session_manager.models.event_source import EventSource
|
|
17
|
-
from chuk_ai_session_manager.
|
|
17
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
18
18
|
|
|
19
19
|
# Type for LLM function callbacks
|
|
20
20
|
LLMCallbackAsync = Callable[[List[Dict[str, str]], str], Any]
|
|
@@ -83,7 +83,8 @@ class InfiniteConversationManager:
|
|
|
83
83
|
The current session ID (may be a new one if threshold was exceeded)
|
|
84
84
|
"""
|
|
85
85
|
# Get the store
|
|
86
|
-
|
|
86
|
+
backend = get_backend()
|
|
87
|
+
store = ChukSessionsStore(backend)
|
|
87
88
|
|
|
88
89
|
# Get the current session
|
|
89
90
|
session = await store.get(session_id)
|
|
@@ -221,7 +222,8 @@ class InfiniteConversationManager:
|
|
|
221
222
|
A list of messages suitable for an LLM call
|
|
222
223
|
"""
|
|
223
224
|
# Get the store
|
|
224
|
-
|
|
225
|
+
backend = get_backend()
|
|
226
|
+
store = ChukSessionsStore(backend)
|
|
225
227
|
|
|
226
228
|
# Get the current session
|
|
227
229
|
session = await store.get(session_id)
|
|
@@ -273,7 +275,8 @@ class InfiniteConversationManager:
|
|
|
273
275
|
*reverse* (closest parent first). Tests expect root-first order,
|
|
274
276
|
so we reverse it and then append the current session.
|
|
275
277
|
"""
|
|
276
|
-
|
|
278
|
+
backend = get_backend()
|
|
279
|
+
store = ChukSessionsStore(backend)
|
|
277
280
|
session = await store.get(session_id)
|
|
278
281
|
if not session:
|
|
279
282
|
raise ValueError(f"Session {session_id} not found")
|
|
@@ -54,8 +54,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
54
54
|
"""
|
|
55
55
|
if self.parent_id:
|
|
56
56
|
# Import here to avoid circular import
|
|
57
|
-
from chuk_ai_session_manager.
|
|
58
|
-
|
|
57
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
58
|
+
backend = get_backend()
|
|
59
|
+
store = ChukSessionsStore(backend)
|
|
59
60
|
parent = await store.get(self.parent_id)
|
|
60
61
|
if parent and self.id not in parent.child_ids:
|
|
61
62
|
parent.child_ids.append(self.id)
|
|
@@ -91,8 +92,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
91
92
|
if child_id not in self.child_ids:
|
|
92
93
|
self.child_ids.append(child_id)
|
|
93
94
|
# Save the updated session
|
|
94
|
-
from chuk_ai_session_manager.
|
|
95
|
-
|
|
95
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
96
|
+
backend = get_backend()
|
|
97
|
+
store = ChukSessionsStore(backend)
|
|
96
98
|
await store.save(self)
|
|
97
99
|
|
|
98
100
|
async def remove_child(self, child_id: str) -> None:
|
|
@@ -100,8 +102,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
100
102
|
if child_id in self.child_ids:
|
|
101
103
|
self.child_ids.remove(child_id)
|
|
102
104
|
# Save the updated session
|
|
103
|
-
from chuk_ai_session_manager.
|
|
104
|
-
|
|
105
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
106
|
+
backend = get_backend()
|
|
107
|
+
store = ChukSessionsStore(backend)
|
|
105
108
|
await store.save(self)
|
|
106
109
|
|
|
107
110
|
async def ancestors(self) -> List[Session]:
|
|
@@ -110,8 +113,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
110
113
|
current = self.parent_id
|
|
111
114
|
|
|
112
115
|
# Import here to avoid circular import
|
|
113
|
-
from chuk_ai_session_manager.
|
|
114
|
-
|
|
116
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
117
|
+
backend = get_backend()
|
|
118
|
+
store = ChukSessionsStore(backend)
|
|
115
119
|
|
|
116
120
|
while current:
|
|
117
121
|
parent = await store.get(current)
|
|
@@ -127,8 +131,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
127
131
|
stack = list(self.child_ids)
|
|
128
132
|
|
|
129
133
|
# Import here to avoid circular import
|
|
130
|
-
from chuk_ai_session_manager.
|
|
131
|
-
|
|
134
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
135
|
+
backend = get_backend()
|
|
136
|
+
store = ChukSessionsStore(backend)
|
|
132
137
|
|
|
133
138
|
while stack:
|
|
134
139
|
cid = stack.pop()
|
|
@@ -163,8 +168,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
163
168
|
await self.add_event(event)
|
|
164
169
|
|
|
165
170
|
# Save the session
|
|
166
|
-
from chuk_ai_session_manager.
|
|
167
|
-
|
|
171
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
172
|
+
backend = get_backend()
|
|
173
|
+
store = ChukSessionsStore(backend)
|
|
168
174
|
await store.save(self)
|
|
169
175
|
|
|
170
176
|
async def get_token_usage_by_source(self) -> Dict[str, TokenSummary]:
|
|
@@ -249,8 +255,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
249
255
|
self.state[key] = value
|
|
250
256
|
|
|
251
257
|
# Auto-save if needed (could be added as an option)
|
|
252
|
-
# from chuk_ai_session_manager.
|
|
253
|
-
#
|
|
258
|
+
# from chuk_ai_session_manager.chuk_sessions_storage import get_backend, ChukSessionsStore
|
|
259
|
+
# backend = get_backend()
|
|
260
|
+
# store = ChukSessionsStore(backend)
|
|
254
261
|
# await store.save(self)
|
|
255
262
|
|
|
256
263
|
async def get_state(self, key: str, default: Any = None) -> Any:
|
|
@@ -289,8 +296,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
289
296
|
del self.state[key]
|
|
290
297
|
|
|
291
298
|
# Auto-save if needed (could be added as an option)
|
|
292
|
-
# from chuk_ai_session_manager.
|
|
293
|
-
#
|
|
299
|
+
# from chuk_ai_session_manager.chuk_sessions_storage import get_backend, ChukSessionsStore
|
|
300
|
+
# backend = get_backend()
|
|
301
|
+
# store = ChukSessionsStore(backend)
|
|
294
302
|
# await store.save(self)
|
|
295
303
|
|
|
296
304
|
@classmethod
|
|
@@ -309,8 +317,9 @@ class Session(BaseModel, Generic[MessageT]):
|
|
|
309
317
|
await session.async_init()
|
|
310
318
|
|
|
311
319
|
# Save the new session
|
|
312
|
-
from chuk_ai_session_manager.
|
|
313
|
-
|
|
320
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
321
|
+
backend = get_backend()
|
|
322
|
+
store = ChukSessionsStore(backend)
|
|
314
323
|
await store.save(session)
|
|
315
324
|
|
|
316
325
|
return session
|
|
@@ -23,7 +23,7 @@ from chuk_tool_processor.models.tool_result import ToolResult
|
|
|
23
23
|
from chuk_ai_session_manager.models.event_source import EventSource
|
|
24
24
|
from chuk_ai_session_manager.models.event_type import EventType
|
|
25
25
|
from chuk_ai_session_manager.models.session_event import SessionEvent
|
|
26
|
-
from chuk_ai_session_manager.
|
|
26
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
27
27
|
|
|
28
28
|
logger = logging.getLogger(__name__)
|
|
29
29
|
|
|
@@ -52,7 +52,8 @@ class SessionAwareToolProcessor:
|
|
|
52
52
|
|
|
53
53
|
@classmethod
|
|
54
54
|
async def create(cls, session_id: str, **kwargs):
|
|
55
|
-
|
|
55
|
+
backend = get_backend()
|
|
56
|
+
store = ChukSessionsStore(backend)
|
|
56
57
|
if not await store.get(session_id):
|
|
57
58
|
raise ValueError(f"Session {session_id} not found")
|
|
58
59
|
return cls(session_id=session_id, **kwargs)
|
|
@@ -114,7 +115,8 @@ class SessionAwareToolProcessor:
|
|
|
114
115
|
|
|
115
116
|
# ─────────────────────────── public API ────────────────────────────
|
|
116
117
|
async def process_llm_message(self, llm_msg: Dict[str, Any], _) -> List[ToolResult]:
|
|
117
|
-
|
|
118
|
+
backend = get_backend()
|
|
119
|
+
store = ChukSessionsStore(backend)
|
|
118
120
|
session = await store.get(self.session_id)
|
|
119
121
|
if not session:
|
|
120
122
|
raise ValueError(f"Session {self.session_id} not found")
|
|
@@ -175,4 +177,4 @@ class SessionAwareToolProcessor:
|
|
|
175
177
|
)
|
|
176
178
|
out.append(err_res)
|
|
177
179
|
|
|
178
|
-
return out
|
|
180
|
+
return out
|
|
@@ -18,7 +18,7 @@ from chuk_ai_session_manager.models.session import Session
|
|
|
18
18
|
from chuk_ai_session_manager.models.event_type import EventType
|
|
19
19
|
from chuk_ai_session_manager.models.event_source import EventSource
|
|
20
20
|
from chuk_ai_session_manager.models.token_usage import TokenUsage
|
|
21
|
-
from chuk_ai_session_manager.
|
|
21
|
+
from chuk_ai_session_manager.session_storage import get_backend, ChukSessionsStore
|
|
22
22
|
|
|
23
23
|
logger = logging.getLogger(__name__)
|
|
24
24
|
|
|
@@ -391,7 +391,9 @@ async def _build_hierarchical_prompt(
|
|
|
391
391
|
|
|
392
392
|
# If parent context is enabled and session has a parent
|
|
393
393
|
if include_parent_context and session.parent_id:
|
|
394
|
-
|
|
394
|
+
# Get the storage backend and create store
|
|
395
|
+
backend = get_backend()
|
|
396
|
+
store = ChukSessionsStore(backend)
|
|
395
397
|
parent = await store.get(session.parent_id)
|
|
396
398
|
|
|
397
399
|
if parent:
|
|
@@ -466,9 +468,9 @@ async def truncate_prompt_to_token_limit(
|
|
|
466
468
|
if remaining > max_tokens:
|
|
467
469
|
# remove any tool messages we just added
|
|
468
470
|
kept = [m for m in kept if m["role"] != "tool"]
|
|
469
|
-
# but guarantee at least one tool message (the first) if it
|
|
471
|
+
# but guarantee at least one tool message (the first) if it'll fit
|
|
470
472
|
first_tool = next((m for m in prompt if m["role"] == "tool"), None)
|
|
471
473
|
if first_tool:
|
|
472
474
|
kept.append(first_tool)
|
|
473
475
|
|
|
474
|
-
return kept
|
|
476
|
+
return kept
|