chuk-ai-session-manager 0.2__py3-none-any.whl → 0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chuk_ai_session_manager/__init__.py +56 -344
- chuk_ai_session_manager/api/simple_api.py +329 -198
- chuk_ai_session_manager-0.3.dist-info/METADATA +391 -0
- {chuk_ai_session_manager-0.2.dist-info → chuk_ai_session_manager-0.3.dist-info}/RECORD +6 -6
- chuk_ai_session_manager-0.2.dist-info/METADATA +0 -502
- {chuk_ai_session_manager-0.2.dist-info → chuk_ai_session_manager-0.3.dist-info}/WHEEL +0 -0
- {chuk_ai_session_manager-0.2.dist-info → chuk_ai_session_manager-0.3.dist-info}/top_level.txt +0 -0
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
# chuk_ai_session_manager/api/simple_api.py
|
|
2
2
|
"""
|
|
3
|
-
|
|
3
|
+
Unified SessionManager with built-in infinite context support.
|
|
4
4
|
|
|
5
5
|
Usage:
|
|
6
|
-
from chuk_ai_session_manager
|
|
6
|
+
from chuk_ai_session_manager import SessionManager
|
|
7
7
|
|
|
8
|
-
#
|
|
8
|
+
# Regular session
|
|
9
9
|
sm = SessionManager()
|
|
10
10
|
|
|
11
|
-
#
|
|
12
|
-
|
|
13
|
-
response = await sm.ai_responds("Hi there! How can I help?")
|
|
11
|
+
# Infinite context session
|
|
12
|
+
sm = SessionManager(infinite_context=True)
|
|
14
13
|
|
|
15
|
-
#
|
|
16
|
-
|
|
14
|
+
# Everything else is identical
|
|
15
|
+
await sm.user_says("Hello!")
|
|
16
|
+
await sm.ai_responds("Hi there!", model="gpt-4")
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
19
|
from __future__ import annotations
|
|
@@ -32,134 +32,223 @@ logger = logging.getLogger(__name__)
|
|
|
32
32
|
|
|
33
33
|
class SessionManager:
|
|
34
34
|
"""
|
|
35
|
-
|
|
35
|
+
Unified session manager with built-in infinite context support.
|
|
36
36
|
|
|
37
|
-
|
|
38
|
-
|
|
37
|
+
Automatically handles session segmentation, summarization, and context
|
|
38
|
+
preservation when infinite_context=True is enabled.
|
|
39
39
|
"""
|
|
40
40
|
|
|
41
41
|
def __init__(
|
|
42
42
|
self,
|
|
43
43
|
session_id: Optional[str] = None,
|
|
44
|
-
|
|
45
|
-
|
|
44
|
+
infinite_context: bool = False,
|
|
45
|
+
token_threshold: int = 4000,
|
|
46
|
+
max_turns_per_segment: int = 20
|
|
46
47
|
):
|
|
47
48
|
"""
|
|
48
49
|
Initialize a session manager.
|
|
49
50
|
|
|
50
51
|
Args:
|
|
51
52
|
session_id: Use existing session or create new one
|
|
52
|
-
|
|
53
|
-
|
|
53
|
+
infinite_context: Enable automatic infinite context handling
|
|
54
|
+
token_threshold: Token limit before creating new session (infinite mode)
|
|
55
|
+
max_turns_per_segment: Turn limit before creating new session (infinite mode)
|
|
54
56
|
"""
|
|
55
|
-
self.auto_save = auto_save
|
|
56
57
|
self._session: Optional[Session] = None
|
|
57
58
|
self._session_id = session_id
|
|
58
|
-
self.
|
|
59
|
+
self._is_new = session_id is None
|
|
60
|
+
|
|
61
|
+
# Infinite context settings
|
|
62
|
+
self._infinite_context = infinite_context
|
|
63
|
+
self._token_threshold = token_threshold
|
|
64
|
+
self._max_turns_per_segment = max_turns_per_segment
|
|
59
65
|
|
|
60
|
-
#
|
|
61
|
-
|
|
66
|
+
# Infinite context state
|
|
67
|
+
self._session_chain: List[str] = []
|
|
68
|
+
self._full_conversation: List[Dict[str, Any]] = []
|
|
69
|
+
self._total_segments = 1
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def session_id(self) -> str:
|
|
73
|
+
"""Get the current session ID."""
|
|
74
|
+
if self._session:
|
|
75
|
+
return self._session.id
|
|
76
|
+
elif self._session_id:
|
|
77
|
+
return self._session_id
|
|
78
|
+
else:
|
|
62
79
|
import uuid
|
|
63
80
|
self._session_id = str(uuid.uuid4())
|
|
81
|
+
return self._session_id
|
|
82
|
+
|
|
83
|
+
@property
|
|
84
|
+
def is_infinite(self) -> bool:
|
|
85
|
+
"""Check if infinite context is enabled."""
|
|
86
|
+
return self._infinite_context
|
|
64
87
|
|
|
65
88
|
async def _ensure_session(self) -> Session:
|
|
66
89
|
"""Ensure we have a session, creating one if needed."""
|
|
67
90
|
if self._session is None:
|
|
68
|
-
|
|
69
|
-
|
|
91
|
+
backend = get_backend()
|
|
92
|
+
store = ChukSessionsStore(backend)
|
|
93
|
+
|
|
94
|
+
if self._is_new:
|
|
70
95
|
self._session = await Session.create()
|
|
71
96
|
self._session_id = self._session.id
|
|
97
|
+
|
|
98
|
+
# Always save new sessions immediately
|
|
99
|
+
await store.save(self._session)
|
|
100
|
+
|
|
101
|
+
# Initialize session chain for infinite context
|
|
102
|
+
if self._infinite_context:
|
|
103
|
+
self._session_chain = [self._session_id]
|
|
72
104
|
else:
|
|
73
|
-
# Try to load existing session
|
|
74
|
-
backend = get_backend()
|
|
75
|
-
store = ChukSessionsStore(backend)
|
|
76
105
|
self._session = await store.get(self._session_id)
|
|
77
106
|
if self._session is None:
|
|
78
107
|
raise ValueError(f"Session {self._session_id} not found")
|
|
79
|
-
|
|
80
108
|
return self._session
|
|
81
109
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
110
|
+
async def _should_create_new_segment(self) -> bool:
|
|
111
|
+
"""Check if we should create a new session segment."""
|
|
112
|
+
if not self._infinite_context:
|
|
113
|
+
return False
|
|
114
|
+
|
|
115
|
+
session = await self._ensure_session()
|
|
116
|
+
|
|
117
|
+
# Check token threshold
|
|
118
|
+
if session.total_tokens >= self._token_threshold:
|
|
119
|
+
return True
|
|
120
|
+
|
|
121
|
+
# Check turn threshold
|
|
122
|
+
message_events = [e for e in session.events if e.type == EventType.MESSAGE]
|
|
123
|
+
if len(message_events) >= self._max_turns_per_segment:
|
|
124
|
+
return True
|
|
125
|
+
|
|
126
|
+
return False
|
|
127
|
+
|
|
128
|
+
async def _create_summary(self) -> str:
|
|
129
|
+
"""Create a summary of the current session."""
|
|
130
|
+
session = await self._ensure_session()
|
|
131
|
+
message_events = [e for e in session.events if e.type == EventType.MESSAGE]
|
|
132
|
+
|
|
133
|
+
# Simple summary generation
|
|
134
|
+
user_messages = [e for e in message_events if e.source == EventSource.USER]
|
|
135
|
+
|
|
136
|
+
topics = []
|
|
137
|
+
for event in user_messages:
|
|
138
|
+
content = str(event.message)
|
|
139
|
+
if "?" in content:
|
|
140
|
+
question = content.split("?")[0].strip()
|
|
141
|
+
if len(question) > 10:
|
|
142
|
+
topics.append(question[:50])
|
|
143
|
+
|
|
144
|
+
if topics:
|
|
145
|
+
summary = f"User discussed: {'; '.join(topics[:3])}"
|
|
146
|
+
if len(topics) > 3:
|
|
147
|
+
summary += f" and {len(topics) - 3} other topics"
|
|
89
148
|
else:
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
return self._session_id
|
|
149
|
+
summary = f"Conversation with {len(user_messages)} user messages and {len(message_events) - len(user_messages)} responses"
|
|
150
|
+
|
|
151
|
+
return summary
|
|
94
152
|
|
|
95
|
-
async def
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
153
|
+
async def _create_new_segment(self) -> str:
|
|
154
|
+
"""Create a new session segment with summary."""
|
|
155
|
+
# Create summary of current session
|
|
156
|
+
summary = await self._create_summary()
|
|
157
|
+
|
|
158
|
+
# Add summary to current session
|
|
159
|
+
summary_event = SessionEvent(
|
|
160
|
+
message=summary,
|
|
161
|
+
source=EventSource.SYSTEM,
|
|
162
|
+
type=EventType.SUMMARY
|
|
163
|
+
)
|
|
164
|
+
current_session = await self._ensure_session()
|
|
165
|
+
await current_session.add_event_and_save(summary_event)
|
|
166
|
+
|
|
167
|
+
# Create new session with current as parent
|
|
168
|
+
new_session = await Session.create(parent_id=self._session_id)
|
|
169
|
+
|
|
170
|
+
# Update our state
|
|
171
|
+
old_session_id = self._session_id
|
|
172
|
+
self._session_id = new_session.id
|
|
173
|
+
self._session = new_session
|
|
174
|
+
self._session_chain.append(self._session_id)
|
|
175
|
+
self._total_segments += 1
|
|
176
|
+
|
|
177
|
+
logger.info(f"Created new session segment: {old_session_id} -> {self._session_id}")
|
|
178
|
+
return self._session_id
|
|
179
|
+
|
|
180
|
+
async def user_says(self, message: str, **metadata) -> str:
|
|
100
181
|
"""
|
|
101
182
|
Track a user message.
|
|
102
183
|
|
|
103
184
|
Args:
|
|
104
185
|
message: What the user said
|
|
105
|
-
metadata: Optional metadata to attach
|
|
186
|
+
**metadata: Optional metadata to attach
|
|
106
187
|
|
|
107
188
|
Returns:
|
|
108
|
-
The session ID
|
|
189
|
+
The current session ID (may change in infinite mode)
|
|
109
190
|
"""
|
|
191
|
+
# Check for segmentation before adding message
|
|
192
|
+
if await self._should_create_new_segment():
|
|
193
|
+
await self._create_new_segment()
|
|
194
|
+
|
|
110
195
|
session = await self._ensure_session()
|
|
111
196
|
|
|
197
|
+
# Create and add the event
|
|
112
198
|
event = await SessionEvent.create_with_tokens(
|
|
113
199
|
message=message,
|
|
114
200
|
prompt=message,
|
|
115
|
-
model="gpt-4o-mini",
|
|
201
|
+
model="gpt-4o-mini",
|
|
116
202
|
source=EventSource.USER,
|
|
117
203
|
type=EventType.MESSAGE
|
|
118
204
|
)
|
|
119
205
|
|
|
120
|
-
# Add metadata
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
206
|
+
# Add metadata
|
|
207
|
+
for key, value in metadata.items():
|
|
208
|
+
await event.set_metadata(key, value)
|
|
209
|
+
|
|
210
|
+
await session.add_event_and_save(event)
|
|
124
211
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
212
|
+
# Track in full conversation for infinite context
|
|
213
|
+
if self._infinite_context:
|
|
214
|
+
self._full_conversation.append({
|
|
215
|
+
"role": "user",
|
|
216
|
+
"content": message,
|
|
217
|
+
"timestamp": event.timestamp.isoformat(),
|
|
218
|
+
"session_id": self._session_id
|
|
219
|
+
})
|
|
129
220
|
|
|
130
|
-
return
|
|
221
|
+
return self._session_id
|
|
131
222
|
|
|
132
223
|
async def ai_responds(
|
|
133
224
|
self,
|
|
134
225
|
response: str,
|
|
135
226
|
model: str = "unknown",
|
|
136
227
|
provider: str = "unknown",
|
|
137
|
-
metadata
|
|
228
|
+
**metadata
|
|
138
229
|
) -> str:
|
|
139
230
|
"""
|
|
140
231
|
Track an AI response.
|
|
141
232
|
|
|
142
233
|
Args:
|
|
143
234
|
response: The AI's response
|
|
144
|
-
model: Model name
|
|
145
|
-
provider: Provider name
|
|
146
|
-
metadata: Optional metadata
|
|
235
|
+
model: Model name
|
|
236
|
+
provider: Provider name
|
|
237
|
+
**metadata: Optional metadata
|
|
147
238
|
|
|
148
239
|
Returns:
|
|
149
|
-
The session ID
|
|
240
|
+
The current session ID (may change in infinite mode)
|
|
150
241
|
"""
|
|
151
|
-
|
|
242
|
+
# Check for segmentation before adding message
|
|
243
|
+
if await self._should_create_new_segment():
|
|
244
|
+
await self._create_new_segment()
|
|
152
245
|
|
|
153
|
-
|
|
154
|
-
"model": model,
|
|
155
|
-
"provider": provider,
|
|
156
|
-
"timestamp": datetime.now().isoformat(),
|
|
157
|
-
**(metadata or {})
|
|
158
|
-
}
|
|
246
|
+
session = await self._ensure_session()
|
|
159
247
|
|
|
248
|
+
# Create and add the event
|
|
160
249
|
event = await SessionEvent.create_with_tokens(
|
|
161
250
|
message=response,
|
|
162
|
-
prompt="",
|
|
251
|
+
prompt="",
|
|
163
252
|
completion=response,
|
|
164
253
|
model=model,
|
|
165
254
|
source=EventSource.LLM,
|
|
@@ -167,37 +256,40 @@ class SessionManager:
|
|
|
167
256
|
)
|
|
168
257
|
|
|
169
258
|
# Add metadata
|
|
259
|
+
full_metadata = {
|
|
260
|
+
"model": model,
|
|
261
|
+
"provider": provider,
|
|
262
|
+
"timestamp": datetime.now().isoformat(),
|
|
263
|
+
**metadata
|
|
264
|
+
}
|
|
265
|
+
|
|
170
266
|
for key, value in full_metadata.items():
|
|
171
267
|
await event.set_metadata(key, value)
|
|
172
268
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
269
|
+
await session.add_event_and_save(event)
|
|
270
|
+
|
|
271
|
+
# Track in full conversation for infinite context
|
|
272
|
+
if self._infinite_context:
|
|
273
|
+
self._full_conversation.append({
|
|
274
|
+
"role": "assistant",
|
|
275
|
+
"content": response,
|
|
276
|
+
"timestamp": event.timestamp.isoformat(),
|
|
277
|
+
"session_id": self._session_id,
|
|
278
|
+
"model": model,
|
|
279
|
+
"provider": provider
|
|
280
|
+
})
|
|
177
281
|
|
|
178
|
-
return
|
|
282
|
+
return self._session_id
|
|
179
283
|
|
|
180
|
-
async def
|
|
284
|
+
async def tool_used(
|
|
181
285
|
self,
|
|
182
286
|
tool_name: str,
|
|
183
287
|
arguments: Dict[str, Any],
|
|
184
288
|
result: Any,
|
|
185
289
|
error: Optional[str] = None,
|
|
186
|
-
metadata
|
|
290
|
+
**metadata
|
|
187
291
|
) -> str:
|
|
188
|
-
"""
|
|
189
|
-
Track a tool call.
|
|
190
|
-
|
|
191
|
-
Args:
|
|
192
|
-
tool_name: Name of the tool that was called
|
|
193
|
-
arguments: Arguments passed to the tool
|
|
194
|
-
result: Result from the tool
|
|
195
|
-
error: Error message if tool failed
|
|
196
|
-
metadata: Optional metadata to attach
|
|
197
|
-
|
|
198
|
-
Returns:
|
|
199
|
-
The session ID for this conversation
|
|
200
|
-
"""
|
|
292
|
+
"""Track a tool call."""
|
|
201
293
|
session = await self._ensure_session()
|
|
202
294
|
|
|
203
295
|
tool_message = {
|
|
@@ -214,163 +306,202 @@ class SessionManager:
|
|
|
214
306
|
type=EventType.TOOL_CALL
|
|
215
307
|
)
|
|
216
308
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
for key, value in metadata.items():
|
|
220
|
-
await event.set_metadata(key, value)
|
|
221
|
-
|
|
222
|
-
if self.auto_save:
|
|
223
|
-
await session.add_event_and_save(event)
|
|
224
|
-
else:
|
|
225
|
-
await session.add_event(event)
|
|
309
|
+
for key, value in metadata.items():
|
|
310
|
+
await event.set_metadata(key, value)
|
|
226
311
|
|
|
227
|
-
|
|
312
|
+
await session.add_event_and_save(event)
|
|
313
|
+
return self._session_id
|
|
228
314
|
|
|
229
|
-
async def get_conversation(self,
|
|
315
|
+
async def get_conversation(self, include_all_segments: bool = None) -> List[Dict[str, Any]]:
|
|
230
316
|
"""
|
|
231
|
-
Get
|
|
317
|
+
Get conversation history.
|
|
232
318
|
|
|
233
319
|
Args:
|
|
234
|
-
|
|
320
|
+
include_all_segments: Include all segments (defaults to infinite_context setting)
|
|
235
321
|
|
|
236
322
|
Returns:
|
|
237
|
-
List of conversation turns
|
|
323
|
+
List of conversation turns
|
|
238
324
|
"""
|
|
239
|
-
|
|
325
|
+
if include_all_segments is None:
|
|
326
|
+
include_all_segments = self._infinite_context
|
|
240
327
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
328
|
+
if self._infinite_context and include_all_segments:
|
|
329
|
+
# Return full conversation across all segments
|
|
330
|
+
return self._full_conversation.copy()
|
|
331
|
+
else:
|
|
332
|
+
# Return current session only
|
|
333
|
+
session = await self._ensure_session()
|
|
334
|
+
conversation = []
|
|
335
|
+
for event in session.events:
|
|
336
|
+
if event.type == EventType.MESSAGE:
|
|
337
|
+
turn = {
|
|
338
|
+
"role": "user" if event.source == EventSource.USER else "assistant",
|
|
339
|
+
"content": event.message,
|
|
340
|
+
"timestamp": event.timestamp.isoformat()
|
|
341
|
+
}
|
|
342
|
+
conversation.append(turn)
|
|
343
|
+
|
|
344
|
+
return conversation
|
|
256
345
|
|
|
257
|
-
async def
|
|
258
|
-
"""
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
"""
|
|
264
|
-
session = await self._ensure_session()
|
|
265
|
-
|
|
266
|
-
tools = []
|
|
267
|
-
for event in session.events:
|
|
268
|
-
if event.type == EventType.TOOL_CALL and isinstance(event.message, dict):
|
|
269
|
-
tools.append({
|
|
270
|
-
"tool": event.message.get("tool", "unknown"),
|
|
271
|
-
"arguments": event.message.get("arguments", {}),
|
|
272
|
-
"result": event.message.get("result"),
|
|
273
|
-
"success": event.message.get("success", True),
|
|
274
|
-
"error": event.message.get("error"),
|
|
275
|
-
"timestamp": event.timestamp.isoformat()
|
|
276
|
-
})
|
|
277
|
-
|
|
278
|
-
return tools
|
|
346
|
+
async def get_session_chain(self) -> List[str]:
|
|
347
|
+
"""Get the chain of session IDs (infinite context only)."""
|
|
348
|
+
if self._infinite_context:
|
|
349
|
+
return self._session_chain.copy()
|
|
350
|
+
else:
|
|
351
|
+
return [self._session_id]
|
|
279
352
|
|
|
280
|
-
async def get_stats(self) -> Dict[str, Any]:
|
|
353
|
+
async def get_stats(self, include_all_segments: bool = None) -> Dict[str, Any]:
|
|
281
354
|
"""
|
|
282
355
|
Get conversation statistics.
|
|
283
356
|
|
|
357
|
+
Args:
|
|
358
|
+
include_all_segments: Include all segments (defaults to infinite_context setting)
|
|
359
|
+
|
|
284
360
|
Returns:
|
|
285
361
|
Dictionary with conversation stats
|
|
286
362
|
"""
|
|
363
|
+
if include_all_segments is None:
|
|
364
|
+
include_all_segments = self._infinite_context
|
|
365
|
+
|
|
287
366
|
session = await self._ensure_session()
|
|
288
367
|
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
"ai_messages": ai_messages,
|
|
300
|
-
"tool_calls": tool_calls,
|
|
301
|
-
"created_at": session.metadata.created_at.isoformat(),
|
|
302
|
-
"last_update": session.last_update_time.isoformat(),
|
|
303
|
-
"total_tokens": session.total_tokens,
|
|
304
|
-
"estimated_cost": session.total_cost
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
async def save(self) -> None:
|
|
308
|
-
"""Manually save the session (if auto_save is False)."""
|
|
309
|
-
if self._session:
|
|
368
|
+
if self._infinite_context and include_all_segments:
|
|
369
|
+
# Calculate stats across all segments
|
|
370
|
+
user_messages = len([t for t in self._full_conversation if t["role"] == "user"])
|
|
371
|
+
ai_messages = len([t for t in self._full_conversation if t["role"] == "assistant"])
|
|
372
|
+
|
|
373
|
+
# Get token/cost stats by loading all sessions in chain
|
|
374
|
+
total_tokens = 0
|
|
375
|
+
total_cost = 0.0
|
|
376
|
+
total_events = 0
|
|
377
|
+
|
|
310
378
|
backend = get_backend()
|
|
311
379
|
store = ChukSessionsStore(backend)
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
380
|
+
|
|
381
|
+
for session_id in self._session_chain:
|
|
382
|
+
try:
|
|
383
|
+
sess = await store.get(session_id)
|
|
384
|
+
if sess:
|
|
385
|
+
total_tokens += sess.total_tokens
|
|
386
|
+
total_cost += sess.total_cost
|
|
387
|
+
total_events += len(sess.events)
|
|
388
|
+
except Exception:
|
|
389
|
+
# Skip if can't load session
|
|
390
|
+
pass
|
|
391
|
+
|
|
392
|
+
return {
|
|
393
|
+
"session_id": self._session_id,
|
|
394
|
+
"session_segments": self._total_segments,
|
|
395
|
+
"session_chain": self._session_chain,
|
|
396
|
+
"total_events": total_events,
|
|
397
|
+
"user_messages": user_messages,
|
|
398
|
+
"ai_messages": ai_messages,
|
|
399
|
+
"tool_calls": 0, # TODO: Track tools in full conversation
|
|
400
|
+
"total_tokens": total_tokens,
|
|
401
|
+
"estimated_cost": total_cost,
|
|
402
|
+
"created_at": session.metadata.created_at.isoformat(),
|
|
403
|
+
"last_update": session.last_update_time.isoformat(),
|
|
404
|
+
"infinite_context": True
|
|
405
|
+
}
|
|
406
|
+
else:
|
|
407
|
+
# Current session stats only
|
|
408
|
+
user_messages = sum(1 for e in session.events
|
|
409
|
+
if e.type == EventType.MESSAGE and e.source == EventSource.USER)
|
|
410
|
+
ai_messages = sum(1 for e in session.events
|
|
411
|
+
if e.type == EventType.MESSAGE and e.source == EventSource.LLM)
|
|
412
|
+
tool_calls = sum(1 for e in session.events if e.type == EventType.TOOL_CALL)
|
|
413
|
+
|
|
414
|
+
return {
|
|
415
|
+
"session_id": session.id,
|
|
416
|
+
"session_segments": 1,
|
|
417
|
+
"total_events": len(session.events),
|
|
418
|
+
"user_messages": user_messages,
|
|
419
|
+
"ai_messages": ai_messages,
|
|
420
|
+
"tool_calls": tool_calls,
|
|
421
|
+
"total_tokens": session.total_tokens,
|
|
422
|
+
"estimated_cost": session.total_cost,
|
|
423
|
+
"created_at": session.metadata.created_at.isoformat(),
|
|
424
|
+
"last_update": session.last_update_time.isoformat(),
|
|
425
|
+
"infinite_context": self._infinite_context
|
|
426
|
+
}
|
|
318
427
|
|
|
319
428
|
|
|
320
|
-
# Convenience functions
|
|
321
|
-
async def
|
|
429
|
+
# Convenience functions remain the same but simpler
|
|
430
|
+
async def track_conversation(
|
|
322
431
|
user_message: str,
|
|
323
432
|
ai_response: str,
|
|
324
433
|
model: str = "unknown",
|
|
325
|
-
provider: str = "unknown"
|
|
434
|
+
provider: str = "unknown",
|
|
435
|
+
infinite_context: bool = False,
|
|
436
|
+
token_threshold: int = 4000
|
|
326
437
|
) -> str:
|
|
327
|
-
"""
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
sm = SessionManager()
|
|
438
|
+
"""Quick way to track a single conversation turn."""
|
|
439
|
+
sm = SessionManager(
|
|
440
|
+
infinite_context=infinite_context,
|
|
441
|
+
token_threshold=token_threshold
|
|
442
|
+
)
|
|
333
443
|
await sm.user_says(user_message)
|
|
334
444
|
session_id = await sm.ai_responds(ai_response, model=model, provider=provider)
|
|
335
445
|
return session_id
|
|
336
446
|
|
|
337
447
|
async def track_llm_call(
|
|
338
448
|
user_input: str,
|
|
339
|
-
llm_function: Callable[[str], str],
|
|
449
|
+
llm_function: Callable[[str], Union[str, Any]],
|
|
340
450
|
model: str = "unknown",
|
|
341
451
|
provider: str = "unknown",
|
|
342
|
-
session_manager: Optional[SessionManager] = None
|
|
452
|
+
session_manager: Optional[SessionManager] = None,
|
|
453
|
+
infinite_context: bool = False,
|
|
454
|
+
token_threshold: int = 4000
|
|
343
455
|
) -> tuple[str, str]:
|
|
344
|
-
"""
|
|
345
|
-
Track an LLM call automatically.
|
|
346
|
-
|
|
347
|
-
Args:
|
|
348
|
-
user_input: The user's input
|
|
349
|
-
llm_function: Function that takes user input and returns AI response
|
|
350
|
-
model: Model name
|
|
351
|
-
provider: Provider name
|
|
352
|
-
session_manager: Existing session manager (creates new if None)
|
|
353
|
-
|
|
354
|
-
Returns:
|
|
355
|
-
Tuple of (ai_response, session_id)
|
|
356
|
-
"""
|
|
456
|
+
"""Track an LLM call automatically."""
|
|
357
457
|
if session_manager is None:
|
|
358
|
-
session_manager = SessionManager(
|
|
458
|
+
session_manager = SessionManager(
|
|
459
|
+
infinite_context=infinite_context,
|
|
460
|
+
token_threshold=token_threshold
|
|
461
|
+
)
|
|
359
462
|
|
|
360
|
-
# Track user input
|
|
361
463
|
await session_manager.user_says(user_input)
|
|
362
464
|
|
|
363
|
-
# Call the LLM
|
|
364
465
|
if asyncio.iscoroutinefunction(llm_function):
|
|
365
466
|
ai_response = await llm_function(user_input)
|
|
366
467
|
else:
|
|
367
468
|
ai_response = llm_function(user_input)
|
|
368
469
|
|
|
369
|
-
#
|
|
470
|
+
# Handle different response formats
|
|
471
|
+
if isinstance(ai_response, dict) and "choices" in ai_response:
|
|
472
|
+
response_text = ai_response["choices"][0]["message"]["content"]
|
|
473
|
+
elif hasattr(ai_response, "content"):
|
|
474
|
+
response_text = ai_response.content
|
|
475
|
+
else:
|
|
476
|
+
response_text = str(ai_response)
|
|
477
|
+
|
|
370
478
|
session_id = await session_manager.ai_responds(
|
|
371
|
-
|
|
372
|
-
model=model,
|
|
373
|
-
provider=provider
|
|
479
|
+
response_text, model=model, provider=provider
|
|
374
480
|
)
|
|
375
481
|
|
|
376
|
-
return
|
|
482
|
+
return response_text, session_id
|
|
483
|
+
|
|
484
|
+
async def quick_conversation(
|
|
485
|
+
user_message: str,
|
|
486
|
+
ai_response: str,
|
|
487
|
+
infinite_context: bool = False
|
|
488
|
+
) -> Dict[str, Any]:
|
|
489
|
+
"""Quickest way to track a conversation and get basic stats."""
|
|
490
|
+
session_id = await track_conversation(
|
|
491
|
+
user_message, ai_response, infinite_context=infinite_context
|
|
492
|
+
)
|
|
493
|
+
sm = SessionManager(session_id, infinite_context=infinite_context)
|
|
494
|
+
return await sm.get_stats()
|
|
495
|
+
|
|
496
|
+
async def track_infinite_conversation(
|
|
497
|
+
user_message: str,
|
|
498
|
+
ai_response: str,
|
|
499
|
+
model: str = "unknown",
|
|
500
|
+
provider: str = "unknown",
|
|
501
|
+
token_threshold: int = 4000
|
|
502
|
+
) -> str:
|
|
503
|
+
"""Track a conversation with infinite context support."""
|
|
504
|
+
return await track_conversation(
|
|
505
|
+
user_message, ai_response, model=model, provider=provider,
|
|
506
|
+
infinite_context=True, token_threshold=token_threshold
|
|
507
|
+
)
|