chuk-ai-session-manager 0.3__tar.gz → 0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/PKG-INFO +1 -38
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/README.md +0 -37
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/pyproject.toml +21 -5
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/__init__.py +12 -10
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/token_usage.py +13 -2
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/sample_tools.py +1 -1
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/session_prompt_builder.py +70 -62
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/PKG-INFO +1 -38
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/SOURCES.txt +7 -7
- chuk_ai_session_manager-0.4/tests/test_basic_functionality.py +341 -0
- chuk_ai_session_manager-0.4/tests/test_exceptions.py +436 -0
- chuk_ai_session_manager-0.4/tests/test_infinite_conversation.py +446 -0
- chuk_ai_session_manager-0.4/tests/test_models.py +457 -0
- chuk_ai_session_manager-0.4/tests/test_prompt_builder.py +668 -0
- chuk_ai_session_manager-0.4/tests/test_simple.py +569 -0
- chuk_ai_session_manager-0.4/tests/test_simple_api.py +862 -0
- chuk_ai_session_manager-0.4/tests/test_storage.py +445 -0
- chuk_ai_session_manager-0.4/tests/test_tools.py +548 -0
- chuk_ai_session_manager-0.3/src/chuk_ai_session_manager/utils/__init__.py +0 -0
- chuk_ai_session_manager-0.3/src/chuk_ai_session_manager/utils/status_display_utils.py +0 -474
- chuk_ai_session_manager-0.3/tests/test_basic_functionality.py +0 -126
- chuk_ai_session_manager-0.3/tests/test_chuk_session_storage.py +0 -249
- chuk_ai_session_manager-0.3/tests/test_infinite_conversation.py +0 -143
- chuk_ai_session_manager-0.3/tests/test_infinite_conversation_advanced.py +0 -308
- chuk_ai_session_manager-0.3/tests/test_session.py +0 -285
- chuk_ai_session_manager-0.3/tests/test_session_aware_tool_processor.py +0 -117
- chuk_ai_session_manager-0.3/tests/test_session_prompt_builder.py +0 -57
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/setup.cfg +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/api/__init__.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/api/simple_api.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/exceptions.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/infinite_conversation.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/__init__.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/event_source.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/event_type.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session_event.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session_metadata.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session_run.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/session_aware_tool_processor.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/session_storage.py +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/dependency_links.txt +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/requires.txt +0 -0
- {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: chuk-ai-session-manager
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4
|
|
4
4
|
Summary: Session manager for AI applications
|
|
5
5
|
Requires-Python: >=3.11
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
@@ -311,36 +311,6 @@ pip install chuk-ai-session-manager
|
|
|
311
311
|
| **Production Ready** | Requires additional work | Built for production |
|
|
312
312
|
| **Learning Curve** | Steep | 5 minutes to productivity |
|
|
313
313
|
|
|
314
|
-
## 🚀 Migration Guides
|
|
315
|
-
|
|
316
|
-
### From LangChain Memory
|
|
317
|
-
```python
|
|
318
|
-
# Old LangChain way
|
|
319
|
-
from langchain.memory import ConversationBufferMemory
|
|
320
|
-
memory = ConversationBufferMemory()
|
|
321
|
-
memory.save_context({"input": "Hi"}, {"output": "Hello"})
|
|
322
|
-
|
|
323
|
-
# New CHUK way (much simpler!)
|
|
324
|
-
from chuk_ai_session_manager import track_conversation
|
|
325
|
-
await track_conversation("Hi", "Hello")
|
|
326
|
-
```
|
|
327
|
-
|
|
328
|
-
### From Manual Session Management
|
|
329
|
-
```python
|
|
330
|
-
# Old manual way
|
|
331
|
-
conversations = {}
|
|
332
|
-
def save_conversation(user_id, message, response):
|
|
333
|
-
if user_id not in conversations:
|
|
334
|
-
conversations[user_id] = []
|
|
335
|
-
conversations[user_id].append({"user": message, "ai": response})
|
|
336
|
-
|
|
337
|
-
# New CHUK way
|
|
338
|
-
from chuk_ai_session_manager import SessionManager
|
|
339
|
-
sm = SessionManager(session_id=user_id)
|
|
340
|
-
await sm.user_says(message)
|
|
341
|
-
await sm.ai_responds(response)
|
|
342
|
-
```
|
|
343
|
-
|
|
344
314
|
## 📖 More Examples
|
|
345
315
|
|
|
346
316
|
Check out the `/examples` directory for complete working examples:
|
|
@@ -362,16 +332,9 @@ Check out the `/examples` directory for complete working examples:
|
|
|
362
332
|
- ✅ Complete conversation analytics and observability
|
|
363
333
|
- ✅ Framework-agnostic solution that works with any LLM library
|
|
364
334
|
|
|
365
|
-
**Consider alternatives if you:**
|
|
366
|
-
- ❌ Only need basic in-memory conversation history
|
|
367
|
-
- ❌ Are locked into a specific framework (LangChain, etc.)
|
|
368
|
-
- ❌ Don't need cost tracking or analytics
|
|
369
|
-
- ❌ Are building simple, stateless AI applications
|
|
370
|
-
|
|
371
335
|
## 🤝 Community & Support
|
|
372
336
|
|
|
373
337
|
- 📖 **Documentation**: [Full docs with tutorials](link-to-docs)
|
|
374
|
-
- 💬 **Discord**: Join our community for help and discussions
|
|
375
338
|
- 🐛 **Issues**: Report bugs on GitHub
|
|
376
339
|
- 💡 **Feature Requests**: Suggest new features
|
|
377
340
|
- 📧 **Support**: enterprise@chuk.dev for production support
|
|
@@ -289,36 +289,6 @@ pip install chuk-ai-session-manager
|
|
|
289
289
|
| **Production Ready** | Requires additional work | Built for production |
|
|
290
290
|
| **Learning Curve** | Steep | 5 minutes to productivity |
|
|
291
291
|
|
|
292
|
-
## 🚀 Migration Guides
|
|
293
|
-
|
|
294
|
-
### From LangChain Memory
|
|
295
|
-
```python
|
|
296
|
-
# Old LangChain way
|
|
297
|
-
from langchain.memory import ConversationBufferMemory
|
|
298
|
-
memory = ConversationBufferMemory()
|
|
299
|
-
memory.save_context({"input": "Hi"}, {"output": "Hello"})
|
|
300
|
-
|
|
301
|
-
# New CHUK way (much simpler!)
|
|
302
|
-
from chuk_ai_session_manager import track_conversation
|
|
303
|
-
await track_conversation("Hi", "Hello")
|
|
304
|
-
```
|
|
305
|
-
|
|
306
|
-
### From Manual Session Management
|
|
307
|
-
```python
|
|
308
|
-
# Old manual way
|
|
309
|
-
conversations = {}
|
|
310
|
-
def save_conversation(user_id, message, response):
|
|
311
|
-
if user_id not in conversations:
|
|
312
|
-
conversations[user_id] = []
|
|
313
|
-
conversations[user_id].append({"user": message, "ai": response})
|
|
314
|
-
|
|
315
|
-
# New CHUK way
|
|
316
|
-
from chuk_ai_session_manager import SessionManager
|
|
317
|
-
sm = SessionManager(session_id=user_id)
|
|
318
|
-
await sm.user_says(message)
|
|
319
|
-
await sm.ai_responds(response)
|
|
320
|
-
```
|
|
321
|
-
|
|
322
292
|
## 📖 More Examples
|
|
323
293
|
|
|
324
294
|
Check out the `/examples` directory for complete working examples:
|
|
@@ -340,16 +310,9 @@ Check out the `/examples` directory for complete working examples:
|
|
|
340
310
|
- ✅ Complete conversation analytics and observability
|
|
341
311
|
- ✅ Framework-agnostic solution that works with any LLM library
|
|
342
312
|
|
|
343
|
-
**Consider alternatives if you:**
|
|
344
|
-
- ❌ Only need basic in-memory conversation history
|
|
345
|
-
- ❌ Are locked into a specific framework (LangChain, etc.)
|
|
346
|
-
- ❌ Don't need cost tracking or analytics
|
|
347
|
-
- ❌ Are building simple, stateless AI applications
|
|
348
|
-
|
|
349
313
|
## 🤝 Community & Support
|
|
350
314
|
|
|
351
315
|
- 📖 **Documentation**: [Full docs with tutorials](link-to-docs)
|
|
352
|
-
- 💬 **Discord**: Join our community for help and discussions
|
|
353
316
|
- 🐛 **Issues**: Report bugs on GitHub
|
|
354
317
|
- 💡 **Feature Requests**: Suggest new features
|
|
355
318
|
- 📧 **Support**: enterprise@chuk.dev for production support
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "chuk-ai-session-manager"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "0.4"
|
|
8
8
|
description = "Session manager for AI applications"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.11"
|
|
@@ -36,10 +36,26 @@ include = ["chuk_ai_session_manager*"]
|
|
|
36
36
|
# pytest settings so it finds your src/ layout automatically
|
|
37
37
|
[tool.pytest.ini_options]
|
|
38
38
|
testpaths = ["tests"]
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
39
|
+
asyncio_mode = "auto"
|
|
40
|
+
python_files = ["test_*.py"]
|
|
41
|
+
python_classes = ["Test*"]
|
|
42
|
+
python_functions = ["test_*"]
|
|
43
|
+
addopts = [
|
|
44
|
+
"--strict-markers",
|
|
45
|
+
"--strict-config",
|
|
46
|
+
"-ra",
|
|
47
|
+
"--tb=short",
|
|
48
|
+
]
|
|
49
|
+
markers = [
|
|
50
|
+
"unit: Unit tests",
|
|
51
|
+
"integration: Integration tests",
|
|
52
|
+
"slow: Slow tests",
|
|
53
|
+
"asyncio: Async tests",
|
|
54
|
+
]
|
|
55
|
+
filterwarnings = [
|
|
56
|
+
"ignore::DeprecationWarning",
|
|
57
|
+
"ignore::PendingDeprecationWarning",
|
|
58
|
+
]
|
|
43
59
|
|
|
44
60
|
[tool.black]
|
|
45
61
|
line-length = 88
|
{chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/__init__.py
RENAMED
|
@@ -15,7 +15,18 @@ Quick Start:
|
|
|
15
15
|
await sm.ai_responds("It's sunny and 72°F", model="gpt-4")
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
|
-
#
|
|
18
|
+
# Import core models first (these have no circular dependencies)
|
|
19
|
+
from chuk_ai_session_manager.models.event_source import EventSource
|
|
20
|
+
from chuk_ai_session_manager.models.event_type import EventType
|
|
21
|
+
|
|
22
|
+
# Import storage setup (this should work now with the fixed session_storage.py)
|
|
23
|
+
from chuk_ai_session_manager.session_storage import setup_chuk_sessions_storage
|
|
24
|
+
|
|
25
|
+
# Import other models (these might depend on storage being set up)
|
|
26
|
+
from chuk_ai_session_manager.models.session import Session
|
|
27
|
+
from chuk_ai_session_manager.models.session_event import SessionEvent
|
|
28
|
+
|
|
29
|
+
# Import the simple API (this should work now that storage is fixed)
|
|
19
30
|
from chuk_ai_session_manager.api.simple_api import (
|
|
20
31
|
SessionManager,
|
|
21
32
|
track_conversation,
|
|
@@ -24,15 +35,6 @@ from chuk_ai_session_manager.api.simple_api import (
|
|
|
24
35
|
track_infinite_conversation
|
|
25
36
|
)
|
|
26
37
|
|
|
27
|
-
# Core models for advanced users
|
|
28
|
-
from chuk_ai_session_manager.models.session import Session
|
|
29
|
-
from chuk_ai_session_manager.models.session_event import SessionEvent
|
|
30
|
-
from chuk_ai_session_manager.models.event_source import EventSource
|
|
31
|
-
from chuk_ai_session_manager.models.event_type import EventType
|
|
32
|
-
|
|
33
|
-
# Storage backend setup
|
|
34
|
-
from chuk_ai_session_manager.session_storage import setup_chuk_sessions_storage
|
|
35
|
-
|
|
36
38
|
__version__ = "0.1.0"
|
|
37
39
|
|
|
38
40
|
# Main exports - keep it simple
|
|
@@ -175,7 +175,7 @@ class TokenUsage(BaseModel):
|
|
|
175
175
|
)
|
|
176
176
|
|
|
177
177
|
@staticmethod
|
|
178
|
-
def _count_tokens_sync(text: Optional[str], model: str = "gpt-3.5-turbo") -> int:
|
|
178
|
+
def _count_tokens_sync(text: Optional[Union[str, Any]], model: str = "gpt-3.5-turbo") -> int:
|
|
179
179
|
"""
|
|
180
180
|
Synchronous implementation of count_tokens.
|
|
181
181
|
|
|
@@ -188,6 +188,17 @@ class TokenUsage(BaseModel):
|
|
|
188
188
|
"""
|
|
189
189
|
if text is None:
|
|
190
190
|
return 0
|
|
191
|
+
|
|
192
|
+
# Convert to string if not already a string
|
|
193
|
+
if not isinstance(text, str):
|
|
194
|
+
try:
|
|
195
|
+
text = str(text)
|
|
196
|
+
except Exception:
|
|
197
|
+
return 0
|
|
198
|
+
|
|
199
|
+
# Empty string has 0 tokens
|
|
200
|
+
if not text:
|
|
201
|
+
return 0
|
|
191
202
|
|
|
192
203
|
if TIKTOKEN_AVAILABLE:
|
|
193
204
|
try:
|
|
@@ -206,7 +217,7 @@ class TokenUsage(BaseModel):
|
|
|
206
217
|
return int(len(text) / 4)
|
|
207
218
|
|
|
208
219
|
@staticmethod
|
|
209
|
-
async def count_tokens(text: Optional[str], model: str = "gpt-3.5-turbo") -> int:
|
|
220
|
+
async def count_tokens(text: Optional[Union[str, Any]], model: str = "gpt-3.5-turbo") -> int:
|
|
210
221
|
"""
|
|
211
222
|
Async version of count_tokens.
|
|
212
223
|
|
|
@@ -37,7 +37,8 @@ async def build_prompt_from_session(
|
|
|
37
37
|
max_tokens: Optional[int] = None,
|
|
38
38
|
model: str = "gpt-3.5-turbo",
|
|
39
39
|
include_parent_context: bool = False,
|
|
40
|
-
current_query: Optional[str] = None
|
|
40
|
+
current_query: Optional[str] = None,
|
|
41
|
+
max_history: int = 5 # Add this parameter for conversation strategy
|
|
41
42
|
) -> List[Dict[str, str]]:
|
|
42
43
|
"""
|
|
43
44
|
Build a prompt for the next LLM call from a Session asynchronously.
|
|
@@ -49,6 +50,7 @@ async def build_prompt_from_session(
|
|
|
49
50
|
model: Model to use for token counting
|
|
50
51
|
include_parent_context: Whether to include context from parent sessions
|
|
51
52
|
current_query: Current user query for relevance-based context selection
|
|
53
|
+
max_history: Maximum number of messages to include for conversation strategy
|
|
52
54
|
|
|
53
55
|
Returns:
|
|
54
56
|
A list of message dictionaries suitable for LLM API calls
|
|
@@ -72,7 +74,7 @@ async def build_prompt_from_session(
|
|
|
72
74
|
elif strategy == PromptStrategy.TOOL_FOCUSED:
|
|
73
75
|
return await _build_tool_focused_prompt(session)
|
|
74
76
|
elif strategy == PromptStrategy.CONVERSATION:
|
|
75
|
-
return await _build_conversation_prompt(session, max_history
|
|
77
|
+
return await _build_conversation_prompt(session, max_history)
|
|
76
78
|
elif strategy == PromptStrategy.HIERARCHICAL:
|
|
77
79
|
return await _build_hierarchical_prompt(session, include_parent_context)
|
|
78
80
|
else:
|
|
@@ -112,7 +114,7 @@ async def _build_minimal_prompt(session: Session) -> List[Dict[str, str]]:
|
|
|
112
114
|
|
|
113
115
|
if assistant_msg is None:
|
|
114
116
|
# Only the user message exists so far
|
|
115
|
-
return [{"role": "user", "content": first_user.message}] if first_user else []
|
|
117
|
+
return [{"role": "user", "content": _extract_content(first_user.message)}] if first_user else []
|
|
116
118
|
|
|
117
119
|
# Children of that assistant
|
|
118
120
|
children = [
|
|
@@ -126,11 +128,7 @@ async def _build_minimal_prompt(session: Session) -> List[Dict[str, str]]:
|
|
|
126
128
|
# Assemble prompt
|
|
127
129
|
prompt: List[Dict[str, str]] = []
|
|
128
130
|
if first_user:
|
|
129
|
-
|
|
130
|
-
user_content = first_user.message
|
|
131
|
-
if isinstance(user_content, dict) and "content" in user_content:
|
|
132
|
-
user_content = user_content["content"]
|
|
133
|
-
prompt.append({"role": "user", "content": user_content})
|
|
131
|
+
prompt.append({"role": "user", "content": _extract_content(first_user.message)})
|
|
134
132
|
|
|
135
133
|
# ALWAYS add the assistant marker - but strip its free text
|
|
136
134
|
prompt.append({"role": "assistant", "content": None})
|
|
@@ -166,6 +164,24 @@ async def _build_minimal_prompt(session: Session) -> List[Dict[str, str]]:
|
|
|
166
164
|
return prompt
|
|
167
165
|
|
|
168
166
|
|
|
167
|
+
def _extract_content(message: Any) -> str:
|
|
168
|
+
"""
|
|
169
|
+
Extract content string from a message that could be a string or dict.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
message: The message content (string, dict, or other)
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
The extracted content as a string
|
|
176
|
+
"""
|
|
177
|
+
if isinstance(message, str):
|
|
178
|
+
return message
|
|
179
|
+
elif isinstance(message, dict) and "content" in message:
|
|
180
|
+
return message["content"]
|
|
181
|
+
else:
|
|
182
|
+
return str(message)
|
|
183
|
+
|
|
184
|
+
|
|
169
185
|
async def _build_task_focused_prompt(session: Session) -> List[Dict[str, str]]:
|
|
170
186
|
"""
|
|
171
187
|
Build a task-focused prompt.
|
|
@@ -201,17 +217,11 @@ async def _build_task_focused_prompt(session: Session) -> List[Dict[str, str]]:
|
|
|
201
217
|
prompt = []
|
|
202
218
|
|
|
203
219
|
# Always include the first user message (the main task)
|
|
204
|
-
|
|
205
|
-
if isinstance(first_content, dict) and "content" in first_content:
|
|
206
|
-
first_content = first_content["content"]
|
|
207
|
-
prompt.append({"role": "user", "content": first_content})
|
|
220
|
+
prompt.append({"role": "user", "content": _extract_content(first_user.message)})
|
|
208
221
|
|
|
209
222
|
# Include the latest user message if different from the first
|
|
210
223
|
if latest_user and latest_user.id != first_user.id:
|
|
211
|
-
|
|
212
|
-
if isinstance(latest_content, dict) and "content" in latest_content:
|
|
213
|
-
latest_content = latest_content["content"]
|
|
214
|
-
prompt.append({"role": "user", "content": latest_content})
|
|
224
|
+
prompt.append({"role": "user", "content": _extract_content(latest_user.message)})
|
|
215
225
|
|
|
216
226
|
# Include assistant response placeholder
|
|
217
227
|
if assistant_msg:
|
|
@@ -274,10 +284,7 @@ async def _build_tool_focused_prompt(session: Session) -> List[Dict[str, str]]:
|
|
|
274
284
|
prompt = []
|
|
275
285
|
|
|
276
286
|
# Include user message
|
|
277
|
-
|
|
278
|
-
if isinstance(user_content, dict) and "content" in user_content:
|
|
279
|
-
user_content = user_content["content"]
|
|
280
|
-
prompt.append({"role": "user", "content": user_content})
|
|
287
|
+
prompt.append({"role": "user", "content": _extract_content(latest_user.message)})
|
|
281
288
|
|
|
282
289
|
# Include assistant placeholder
|
|
283
290
|
if assistant_msg:
|
|
@@ -334,17 +341,17 @@ async def _build_conversation_prompt(
|
|
|
334
341
|
|
|
335
342
|
# Build the conversation history
|
|
336
343
|
prompt = []
|
|
337
|
-
for msg in recent_messages:
|
|
344
|
+
for i, msg in enumerate(recent_messages):
|
|
338
345
|
role = "user" if msg.source == EventSource.USER else "assistant"
|
|
339
|
-
content = msg.message
|
|
346
|
+
content = _extract_content(msg.message)
|
|
340
347
|
|
|
341
|
-
#
|
|
342
|
-
if
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
content
|
|
348
|
+
# For the last assistant message, set content to None and add tool calls
|
|
349
|
+
if (role == "assistant" and
|
|
350
|
+
msg == recent_messages[-1] and
|
|
351
|
+
msg.source != EventSource.USER):
|
|
352
|
+
|
|
353
|
+
# Add the message first with None content
|
|
354
|
+
prompt.append({"role": role, "content": None})
|
|
348
355
|
|
|
349
356
|
# Add tool call results for this assistant message
|
|
350
357
|
tool_calls = [
|
|
@@ -352,9 +359,6 @@ async def _build_conversation_prompt(
|
|
|
352
359
|
if e.type == EventType.TOOL_CALL and e.metadata.get("parent_event_id") == msg.id
|
|
353
360
|
]
|
|
354
361
|
|
|
355
|
-
# Add the message first, then tools
|
|
356
|
-
prompt.append({"role": role, "content": content})
|
|
357
|
-
|
|
358
362
|
# Add tool results
|
|
359
363
|
for tc in tool_calls:
|
|
360
364
|
if isinstance(tc.message, dict):
|
|
@@ -366,11 +370,9 @@ async def _build_conversation_prompt(
|
|
|
366
370
|
"name": tool_name,
|
|
367
371
|
"content": json.dumps(tool_result, default=str),
|
|
368
372
|
})
|
|
369
|
-
|
|
370
|
-
#
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
prompt.append({"role": role, "content": content})
|
|
373
|
+
else:
|
|
374
|
+
# Regular message
|
|
375
|
+
prompt.append({"role": role, "content": content})
|
|
374
376
|
|
|
375
377
|
return prompt
|
|
376
378
|
|
|
@@ -391,32 +393,38 @@ async def _build_hierarchical_prompt(
|
|
|
391
393
|
|
|
392
394
|
# If parent context is enabled and session has a parent
|
|
393
395
|
if include_parent_context and session.parent_id:
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
if parent:
|
|
400
|
-
# Find the most recent summary in parent
|
|
401
|
-
summary_event = next(
|
|
402
|
-
(e for e in reversed(parent.events)
|
|
403
|
-
if e.type == EventType.SUMMARY),
|
|
404
|
-
None
|
|
405
|
-
)
|
|
396
|
+
try:
|
|
397
|
+
# Get the storage backend and create store
|
|
398
|
+
backend = get_backend()
|
|
399
|
+
store = ChukSessionsStore(backend)
|
|
400
|
+
parent = await store.get(session.parent_id)
|
|
406
401
|
|
|
407
|
-
if
|
|
408
|
-
#
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
402
|
+
if parent:
|
|
403
|
+
# Find the most recent summary in parent
|
|
404
|
+
summary_event = next(
|
|
405
|
+
(e for e in reversed(parent.events)
|
|
406
|
+
if e.type == EventType.SUMMARY),
|
|
407
|
+
None
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
if summary_event:
|
|
411
|
+
# Extract summary content
|
|
412
|
+
summary_content = summary_event.message
|
|
413
|
+
if isinstance(summary_content, dict) and "note" in summary_content:
|
|
414
|
+
summary_content = summary_content["note"]
|
|
415
|
+
elif isinstance(summary_content, dict) and "content" in summary_content:
|
|
416
|
+
summary_content = summary_content["content"]
|
|
417
|
+
else:
|
|
418
|
+
summary_content = str(summary_content)
|
|
419
|
+
|
|
420
|
+
# Add parent context at the beginning
|
|
421
|
+
prompt.insert(0, {
|
|
422
|
+
"role": "system",
|
|
423
|
+
"content": f"Context from previous conversation: {summary_content}"
|
|
424
|
+
})
|
|
425
|
+
except Exception as e:
|
|
426
|
+
# If we can't load parent context, just continue with minimal prompt
|
|
427
|
+
logger.warning(f"Could not load parent context for session {session.parent_id}: {e}")
|
|
420
428
|
|
|
421
429
|
return prompt
|
|
422
430
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: chuk-ai-session-manager
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4
|
|
4
4
|
Summary: Session manager for AI applications
|
|
5
5
|
Requires-Python: >=3.11
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
@@ -311,36 +311,6 @@ pip install chuk-ai-session-manager
|
|
|
311
311
|
| **Production Ready** | Requires additional work | Built for production |
|
|
312
312
|
| **Learning Curve** | Steep | 5 minutes to productivity |
|
|
313
313
|
|
|
314
|
-
## 🚀 Migration Guides
|
|
315
|
-
|
|
316
|
-
### From LangChain Memory
|
|
317
|
-
```python
|
|
318
|
-
# Old LangChain way
|
|
319
|
-
from langchain.memory import ConversationBufferMemory
|
|
320
|
-
memory = ConversationBufferMemory()
|
|
321
|
-
memory.save_context({"input": "Hi"}, {"output": "Hello"})
|
|
322
|
-
|
|
323
|
-
# New CHUK way (much simpler!)
|
|
324
|
-
from chuk_ai_session_manager import track_conversation
|
|
325
|
-
await track_conversation("Hi", "Hello")
|
|
326
|
-
```
|
|
327
|
-
|
|
328
|
-
### From Manual Session Management
|
|
329
|
-
```python
|
|
330
|
-
# Old manual way
|
|
331
|
-
conversations = {}
|
|
332
|
-
def save_conversation(user_id, message, response):
|
|
333
|
-
if user_id not in conversations:
|
|
334
|
-
conversations[user_id] = []
|
|
335
|
-
conversations[user_id].append({"user": message, "ai": response})
|
|
336
|
-
|
|
337
|
-
# New CHUK way
|
|
338
|
-
from chuk_ai_session_manager import SessionManager
|
|
339
|
-
sm = SessionManager(session_id=user_id)
|
|
340
|
-
await sm.user_says(message)
|
|
341
|
-
await sm.ai_responds(response)
|
|
342
|
-
```
|
|
343
|
-
|
|
344
314
|
## 📖 More Examples
|
|
345
315
|
|
|
346
316
|
Check out the `/examples` directory for complete working examples:
|
|
@@ -362,16 +332,9 @@ Check out the `/examples` directory for complete working examples:
|
|
|
362
332
|
- ✅ Complete conversation analytics and observability
|
|
363
333
|
- ✅ Framework-agnostic solution that works with any LLM library
|
|
364
334
|
|
|
365
|
-
**Consider alternatives if you:**
|
|
366
|
-
- ❌ Only need basic in-memory conversation history
|
|
367
|
-
- ❌ Are locked into a specific framework (LangChain, etc.)
|
|
368
|
-
- ❌ Don't need cost tracking or analytics
|
|
369
|
-
- ❌ Are building simple, stateless AI applications
|
|
370
|
-
|
|
371
335
|
## 🤝 Community & Support
|
|
372
336
|
|
|
373
337
|
- 📖 **Documentation**: [Full docs with tutorials](link-to-docs)
|
|
374
|
-
- 💬 **Discord**: Join our community for help and discussions
|
|
375
338
|
- 🐛 **Issues**: Report bugs on GitHub
|
|
376
339
|
- 💡 **Feature Requests**: Suggest new features
|
|
377
340
|
- 📧 **Support**: enterprise@chuk.dev for production support
|
|
@@ -22,12 +22,12 @@ src/chuk_ai_session_manager/models/session_event.py
|
|
|
22
22
|
src/chuk_ai_session_manager/models/session_metadata.py
|
|
23
23
|
src/chuk_ai_session_manager/models/session_run.py
|
|
24
24
|
src/chuk_ai_session_manager/models/token_usage.py
|
|
25
|
-
src/chuk_ai_session_manager/utils/__init__.py
|
|
26
|
-
src/chuk_ai_session_manager/utils/status_display_utils.py
|
|
27
25
|
tests/test_basic_functionality.py
|
|
28
|
-
tests/
|
|
26
|
+
tests/test_exceptions.py
|
|
29
27
|
tests/test_infinite_conversation.py
|
|
30
|
-
tests/
|
|
31
|
-
tests/
|
|
32
|
-
tests/
|
|
33
|
-
tests/
|
|
28
|
+
tests/test_models.py
|
|
29
|
+
tests/test_prompt_builder.py
|
|
30
|
+
tests/test_simple.py
|
|
31
|
+
tests/test_simple_api.py
|
|
32
|
+
tests/test_storage.py
|
|
33
|
+
tests/test_tools.py
|