chuk-ai-session-manager 0.3__tar.gz → 0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/PKG-INFO +1 -38
  2. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/README.md +0 -37
  3. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/pyproject.toml +21 -5
  4. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/__init__.py +12 -10
  5. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/token_usage.py +13 -2
  6. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/sample_tools.py +1 -1
  7. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/session_prompt_builder.py +70 -62
  8. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/PKG-INFO +1 -38
  9. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/SOURCES.txt +7 -7
  10. chuk_ai_session_manager-0.4/tests/test_basic_functionality.py +341 -0
  11. chuk_ai_session_manager-0.4/tests/test_exceptions.py +436 -0
  12. chuk_ai_session_manager-0.4/tests/test_infinite_conversation.py +446 -0
  13. chuk_ai_session_manager-0.4/tests/test_models.py +457 -0
  14. chuk_ai_session_manager-0.4/tests/test_prompt_builder.py +668 -0
  15. chuk_ai_session_manager-0.4/tests/test_simple.py +569 -0
  16. chuk_ai_session_manager-0.4/tests/test_simple_api.py +862 -0
  17. chuk_ai_session_manager-0.4/tests/test_storage.py +445 -0
  18. chuk_ai_session_manager-0.4/tests/test_tools.py +548 -0
  19. chuk_ai_session_manager-0.3/src/chuk_ai_session_manager/utils/__init__.py +0 -0
  20. chuk_ai_session_manager-0.3/src/chuk_ai_session_manager/utils/status_display_utils.py +0 -474
  21. chuk_ai_session_manager-0.3/tests/test_basic_functionality.py +0 -126
  22. chuk_ai_session_manager-0.3/tests/test_chuk_session_storage.py +0 -249
  23. chuk_ai_session_manager-0.3/tests/test_infinite_conversation.py +0 -143
  24. chuk_ai_session_manager-0.3/tests/test_infinite_conversation_advanced.py +0 -308
  25. chuk_ai_session_manager-0.3/tests/test_session.py +0 -285
  26. chuk_ai_session_manager-0.3/tests/test_session_aware_tool_processor.py +0 -117
  27. chuk_ai_session_manager-0.3/tests/test_session_prompt_builder.py +0 -57
  28. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/setup.cfg +0 -0
  29. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/api/__init__.py +0 -0
  30. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/api/simple_api.py +0 -0
  31. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/exceptions.py +0 -0
  32. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/infinite_conversation.py +0 -0
  33. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/__init__.py +0 -0
  34. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/event_source.py +0 -0
  35. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/event_type.py +0 -0
  36. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session.py +0 -0
  37. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session_event.py +0 -0
  38. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session_metadata.py +0 -0
  39. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/models/session_run.py +0 -0
  40. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/session_aware_tool_processor.py +0 -0
  41. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager/session_storage.py +0 -0
  42. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/dependency_links.txt +0 -0
  43. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/requires.txt +0 -0
  44. {chuk_ai_session_manager-0.3 → chuk_ai_session_manager-0.4}/src/chuk_ai_session_manager.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: chuk-ai-session-manager
3
- Version: 0.3
3
+ Version: 0.4
4
4
  Summary: Session manager for AI applications
5
5
  Requires-Python: >=3.11
6
6
  Description-Content-Type: text/markdown
@@ -311,36 +311,6 @@ pip install chuk-ai-session-manager
311
311
  | **Production Ready** | Requires additional work | Built for production |
312
312
  | **Learning Curve** | Steep | 5 minutes to productivity |
313
313
 
314
- ## 🚀 Migration Guides
315
-
316
- ### From LangChain Memory
317
- ```python
318
- # Old LangChain way
319
- from langchain.memory import ConversationBufferMemory
320
- memory = ConversationBufferMemory()
321
- memory.save_context({"input": "Hi"}, {"output": "Hello"})
322
-
323
- # New CHUK way (much simpler!)
324
- from chuk_ai_session_manager import track_conversation
325
- await track_conversation("Hi", "Hello")
326
- ```
327
-
328
- ### From Manual Session Management
329
- ```python
330
- # Old manual way
331
- conversations = {}
332
- def save_conversation(user_id, message, response):
333
- if user_id not in conversations:
334
- conversations[user_id] = []
335
- conversations[user_id].append({"user": message, "ai": response})
336
-
337
- # New CHUK way
338
- from chuk_ai_session_manager import SessionManager
339
- sm = SessionManager(session_id=user_id)
340
- await sm.user_says(message)
341
- await sm.ai_responds(response)
342
- ```
343
-
344
314
  ## 📖 More Examples
345
315
 
346
316
  Check out the `/examples` directory for complete working examples:
@@ -362,16 +332,9 @@ Check out the `/examples` directory for complete working examples:
362
332
  - ✅ Complete conversation analytics and observability
363
333
  - ✅ Framework-agnostic solution that works with any LLM library
364
334
 
365
- **Consider alternatives if you:**
366
- - ❌ Only need basic in-memory conversation history
367
- - ❌ Are locked into a specific framework (LangChain, etc.)
368
- - ❌ Don't need cost tracking or analytics
369
- - ❌ Are building simple, stateless AI applications
370
-
371
335
  ## 🤝 Community & Support
372
336
 
373
337
  - 📖 **Documentation**: [Full docs with tutorials](link-to-docs)
374
- - 💬 **Discord**: Join our community for help and discussions
375
338
  - 🐛 **Issues**: Report bugs on GitHub
376
339
  - 💡 **Feature Requests**: Suggest new features
377
340
  - 📧 **Support**: enterprise@chuk.dev for production support
@@ -289,36 +289,6 @@ pip install chuk-ai-session-manager
289
289
  | **Production Ready** | Requires additional work | Built for production |
290
290
  | **Learning Curve** | Steep | 5 minutes to productivity |
291
291
 
292
- ## 🚀 Migration Guides
293
-
294
- ### From LangChain Memory
295
- ```python
296
- # Old LangChain way
297
- from langchain.memory import ConversationBufferMemory
298
- memory = ConversationBufferMemory()
299
- memory.save_context({"input": "Hi"}, {"output": "Hello"})
300
-
301
- # New CHUK way (much simpler!)
302
- from chuk_ai_session_manager import track_conversation
303
- await track_conversation("Hi", "Hello")
304
- ```
305
-
306
- ### From Manual Session Management
307
- ```python
308
- # Old manual way
309
- conversations = {}
310
- def save_conversation(user_id, message, response):
311
- if user_id not in conversations:
312
- conversations[user_id] = []
313
- conversations[user_id].append({"user": message, "ai": response})
314
-
315
- # New CHUK way
316
- from chuk_ai_session_manager import SessionManager
317
- sm = SessionManager(session_id=user_id)
318
- await sm.user_says(message)
319
- await sm.ai_responds(response)
320
- ```
321
-
322
292
  ## 📖 More Examples
323
293
 
324
294
  Check out the `/examples` directory for complete working examples:
@@ -340,16 +310,9 @@ Check out the `/examples` directory for complete working examples:
340
310
  - ✅ Complete conversation analytics and observability
341
311
  - ✅ Framework-agnostic solution that works with any LLM library
342
312
 
343
- **Consider alternatives if you:**
344
- - ❌ Only need basic in-memory conversation history
345
- - ❌ Are locked into a specific framework (LangChain, etc.)
346
- - ❌ Don't need cost tracking or analytics
347
- - ❌ Are building simple, stateless AI applications
348
-
349
313
  ## 🤝 Community & Support
350
314
 
351
315
  - 📖 **Documentation**: [Full docs with tutorials](link-to-docs)
352
- - 💬 **Discord**: Join our community for help and discussions
353
316
  - 🐛 **Issues**: Report bugs on GitHub
354
317
  - 💡 **Feature Requests**: Suggest new features
355
318
  - 📧 **Support**: enterprise@chuk.dev for production support
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "chuk-ai-session-manager"
7
- version = "0.3"
7
+ version = "0.4"
8
8
  description = "Session manager for AI applications"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.11"
@@ -36,10 +36,26 @@ include = ["chuk_ai_session_manager*"]
36
36
  # pytest settings so it finds your src/ layout automatically
37
37
  [tool.pytest.ini_options]
38
38
  testpaths = ["tests"]
39
- pythonpath = ["src"]
40
- addopts = "-v"
41
- asyncio_default_fixture_loop_scope = "function"
42
- asyncio_mode = "strict"
39
+ asyncio_mode = "auto"
40
+ python_files = ["test_*.py"]
41
+ python_classes = ["Test*"]
42
+ python_functions = ["test_*"]
43
+ addopts = [
44
+ "--strict-markers",
45
+ "--strict-config",
46
+ "-ra",
47
+ "--tb=short",
48
+ ]
49
+ markers = [
50
+ "unit: Unit tests",
51
+ "integration: Integration tests",
52
+ "slow: Slow tests",
53
+ "asyncio: Async tests",
54
+ ]
55
+ filterwarnings = [
56
+ "ignore::DeprecationWarning",
57
+ "ignore::PendingDeprecationWarning",
58
+ ]
43
59
 
44
60
  [tool.black]
45
61
  line-length = 88
@@ -15,7 +15,18 @@ Quick Start:
15
15
  await sm.ai_responds("It's sunny and 72°F", model="gpt-4")
16
16
  """
17
17
 
18
- # Simple API exports - the main things developers need
18
+ # Import core models first (these have no circular dependencies)
19
+ from chuk_ai_session_manager.models.event_source import EventSource
20
+ from chuk_ai_session_manager.models.event_type import EventType
21
+
22
+ # Import storage setup (this should work now with the fixed session_storage.py)
23
+ from chuk_ai_session_manager.session_storage import setup_chuk_sessions_storage
24
+
25
+ # Import other models (these might depend on storage being set up)
26
+ from chuk_ai_session_manager.models.session import Session
27
+ from chuk_ai_session_manager.models.session_event import SessionEvent
28
+
29
+ # Import the simple API (this should work now that storage is fixed)
19
30
  from chuk_ai_session_manager.api.simple_api import (
20
31
  SessionManager,
21
32
  track_conversation,
@@ -24,15 +35,6 @@ from chuk_ai_session_manager.api.simple_api import (
24
35
  track_infinite_conversation
25
36
  )
26
37
 
27
- # Core models for advanced users
28
- from chuk_ai_session_manager.models.session import Session
29
- from chuk_ai_session_manager.models.session_event import SessionEvent
30
- from chuk_ai_session_manager.models.event_source import EventSource
31
- from chuk_ai_session_manager.models.event_type import EventType
32
-
33
- # Storage backend setup
34
- from chuk_ai_session_manager.session_storage import setup_chuk_sessions_storage
35
-
36
38
  __version__ = "0.1.0"
37
39
 
38
40
  # Main exports - keep it simple
@@ -175,7 +175,7 @@ class TokenUsage(BaseModel):
175
175
  )
176
176
 
177
177
  @staticmethod
178
- def _count_tokens_sync(text: Optional[str], model: str = "gpt-3.5-turbo") -> int:
178
+ def _count_tokens_sync(text: Optional[Union[str, Any]], model: str = "gpt-3.5-turbo") -> int:
179
179
  """
180
180
  Synchronous implementation of count_tokens.
181
181
 
@@ -188,6 +188,17 @@ class TokenUsage(BaseModel):
188
188
  """
189
189
  if text is None:
190
190
  return 0
191
+
192
+ # Convert to string if not already a string
193
+ if not isinstance(text, str):
194
+ try:
195
+ text = str(text)
196
+ except Exception:
197
+ return 0
198
+
199
+ # Empty string has 0 tokens
200
+ if not text:
201
+ return 0
191
202
 
192
203
  if TIKTOKEN_AVAILABLE:
193
204
  try:
@@ -206,7 +217,7 @@ class TokenUsage(BaseModel):
206
217
  return int(len(text) / 4)
207
218
 
208
219
  @staticmethod
209
- async def count_tokens(text: Optional[str], model: str = "gpt-3.5-turbo") -> int:
220
+ async def count_tokens(text: Optional[Union[str, Any]], model: str = "gpt-3.5-turbo") -> int:
210
221
  """
211
222
  Async version of count_tokens.
212
223
 
@@ -1,4 +1,4 @@
1
- # sample_tools.py
1
+ # chuk_ai_session_manager/sample_tools.py
2
2
  """
3
3
  Sample tools for chuk session manager demos - corrected version following registry example
4
4
  """
@@ -37,7 +37,8 @@ async def build_prompt_from_session(
37
37
  max_tokens: Optional[int] = None,
38
38
  model: str = "gpt-3.5-turbo",
39
39
  include_parent_context: bool = False,
40
- current_query: Optional[str] = None
40
+ current_query: Optional[str] = None,
41
+ max_history: int = 5 # Add this parameter for conversation strategy
41
42
  ) -> List[Dict[str, str]]:
42
43
  """
43
44
  Build a prompt for the next LLM call from a Session asynchronously.
@@ -49,6 +50,7 @@ async def build_prompt_from_session(
49
50
  model: Model to use for token counting
50
51
  include_parent_context: Whether to include context from parent sessions
51
52
  current_query: Current user query for relevance-based context selection
53
+ max_history: Maximum number of messages to include for conversation strategy
52
54
 
53
55
  Returns:
54
56
  A list of message dictionaries suitable for LLM API calls
@@ -72,7 +74,7 @@ async def build_prompt_from_session(
72
74
  elif strategy == PromptStrategy.TOOL_FOCUSED:
73
75
  return await _build_tool_focused_prompt(session)
74
76
  elif strategy == PromptStrategy.CONVERSATION:
75
- return await _build_conversation_prompt(session, max_history=5)
77
+ return await _build_conversation_prompt(session, max_history)
76
78
  elif strategy == PromptStrategy.HIERARCHICAL:
77
79
  return await _build_hierarchical_prompt(session, include_parent_context)
78
80
  else:
@@ -112,7 +114,7 @@ async def _build_minimal_prompt(session: Session) -> List[Dict[str, str]]:
112
114
 
113
115
  if assistant_msg is None:
114
116
  # Only the user message exists so far
115
- return [{"role": "user", "content": first_user.message}] if first_user else []
117
+ return [{"role": "user", "content": _extract_content(first_user.message)}] if first_user else []
116
118
 
117
119
  # Children of that assistant
118
120
  children = [
@@ -126,11 +128,7 @@ async def _build_minimal_prompt(session: Session) -> List[Dict[str, str]]:
126
128
  # Assemble prompt
127
129
  prompt: List[Dict[str, str]] = []
128
130
  if first_user:
129
- # Handle both string messages and dict messages
130
- user_content = first_user.message
131
- if isinstance(user_content, dict) and "content" in user_content:
132
- user_content = user_content["content"]
133
- prompt.append({"role": "user", "content": user_content})
131
+ prompt.append({"role": "user", "content": _extract_content(first_user.message)})
134
132
 
135
133
  # ALWAYS add the assistant marker - but strip its free text
136
134
  prompt.append({"role": "assistant", "content": None})
@@ -166,6 +164,24 @@ async def _build_minimal_prompt(session: Session) -> List[Dict[str, str]]:
166
164
  return prompt
167
165
 
168
166
 
167
+ def _extract_content(message: Any) -> str:
168
+ """
169
+ Extract content string from a message that could be a string or dict.
170
+
171
+ Args:
172
+ message: The message content (string, dict, or other)
173
+
174
+ Returns:
175
+ The extracted content as a string
176
+ """
177
+ if isinstance(message, str):
178
+ return message
179
+ elif isinstance(message, dict) and "content" in message:
180
+ return message["content"]
181
+ else:
182
+ return str(message)
183
+
184
+
169
185
  async def _build_task_focused_prompt(session: Session) -> List[Dict[str, str]]:
170
186
  """
171
187
  Build a task-focused prompt.
@@ -201,17 +217,11 @@ async def _build_task_focused_prompt(session: Session) -> List[Dict[str, str]]:
201
217
  prompt = []
202
218
 
203
219
  # Always include the first user message (the main task)
204
- first_content = first_user.message
205
- if isinstance(first_content, dict) and "content" in first_content:
206
- first_content = first_content["content"]
207
- prompt.append({"role": "user", "content": first_content})
220
+ prompt.append({"role": "user", "content": _extract_content(first_user.message)})
208
221
 
209
222
  # Include the latest user message if different from the first
210
223
  if latest_user and latest_user.id != first_user.id:
211
- latest_content = latest_user.message
212
- if isinstance(latest_content, dict) and "content" in latest_content:
213
- latest_content = latest_content["content"]
214
- prompt.append({"role": "user", "content": latest_content})
224
+ prompt.append({"role": "user", "content": _extract_content(latest_user.message)})
215
225
 
216
226
  # Include assistant response placeholder
217
227
  if assistant_msg:
@@ -274,10 +284,7 @@ async def _build_tool_focused_prompt(session: Session) -> List[Dict[str, str]]:
274
284
  prompt = []
275
285
 
276
286
  # Include user message
277
- user_content = latest_user.message
278
- if isinstance(user_content, dict) and "content" in user_content:
279
- user_content = user_content["content"]
280
- prompt.append({"role": "user", "content": user_content})
287
+ prompt.append({"role": "user", "content": _extract_content(latest_user.message)})
281
288
 
282
289
  # Include assistant placeholder
283
290
  if assistant_msg:
@@ -334,17 +341,17 @@ async def _build_conversation_prompt(
334
341
 
335
342
  # Build the conversation history
336
343
  prompt = []
337
- for msg in recent_messages:
344
+ for i, msg in enumerate(recent_messages):
338
345
  role = "user" if msg.source == EventSource.USER else "assistant"
339
- content = msg.message
346
+ content = _extract_content(msg.message)
340
347
 
341
- # Handle different message formats
342
- if isinstance(content, dict) and "content" in content:
343
- content = content["content"]
344
-
345
- # For the last assistant message, set content to None
346
- if role == "assistant" and msg == recent_messages[-1] and msg.source != EventSource.USER:
347
- content = None
348
+ # For the last assistant message, set content to None and add tool calls
349
+ if (role == "assistant" and
350
+ msg == recent_messages[-1] and
351
+ msg.source != EventSource.USER):
352
+
353
+ # Add the message first with None content
354
+ prompt.append({"role": role, "content": None})
348
355
 
349
356
  # Add tool call results for this assistant message
350
357
  tool_calls = [
@@ -352,9 +359,6 @@ async def _build_conversation_prompt(
352
359
  if e.type == EventType.TOOL_CALL and e.metadata.get("parent_event_id") == msg.id
353
360
  ]
354
361
 
355
- # Add the message first, then tools
356
- prompt.append({"role": role, "content": content})
357
-
358
362
  # Add tool results
359
363
  for tc in tool_calls:
360
364
  if isinstance(tc.message, dict):
@@ -366,11 +370,9 @@ async def _build_conversation_prompt(
366
370
  "name": tool_name,
367
371
  "content": json.dumps(tool_result, default=str),
368
372
  })
369
-
370
- # Skip adding this message again
371
- continue
372
-
373
- prompt.append({"role": role, "content": content})
373
+ else:
374
+ # Regular message
375
+ prompt.append({"role": role, "content": content})
374
376
 
375
377
  return prompt
376
378
 
@@ -391,32 +393,38 @@ async def _build_hierarchical_prompt(
391
393
 
392
394
  # If parent context is enabled and session has a parent
393
395
  if include_parent_context and session.parent_id:
394
- # Get the storage backend and create store
395
- backend = get_backend()
396
- store = ChukSessionsStore(backend)
397
- parent = await store.get(session.parent_id)
398
-
399
- if parent:
400
- # Find the most recent summary in parent
401
- summary_event = next(
402
- (e for e in reversed(parent.events)
403
- if e.type == EventType.SUMMARY),
404
- None
405
- )
396
+ try:
397
+ # Get the storage backend and create store
398
+ backend = get_backend()
399
+ store = ChukSessionsStore(backend)
400
+ parent = await store.get(session.parent_id)
406
401
 
407
- if summary_event:
408
- # Extract summary content
409
- summary_content = summary_event.message
410
- if isinstance(summary_content, dict) and "note" in summary_content:
411
- summary_content = summary_content["note"]
412
- elif isinstance(summary_content, dict) and "content" in summary_content:
413
- summary_content = summary_content["content"]
414
-
415
- # Add parent context at the beginning
416
- prompt.insert(0, {
417
- "role": "system",
418
- "content": f"Context from previous conversation: {summary_content}"
419
- })
402
+ if parent:
403
+ # Find the most recent summary in parent
404
+ summary_event = next(
405
+ (e for e in reversed(parent.events)
406
+ if e.type == EventType.SUMMARY),
407
+ None
408
+ )
409
+
410
+ if summary_event:
411
+ # Extract summary content
412
+ summary_content = summary_event.message
413
+ if isinstance(summary_content, dict) and "note" in summary_content:
414
+ summary_content = summary_content["note"]
415
+ elif isinstance(summary_content, dict) and "content" in summary_content:
416
+ summary_content = summary_content["content"]
417
+ else:
418
+ summary_content = str(summary_content)
419
+
420
+ # Add parent context at the beginning
421
+ prompt.insert(0, {
422
+ "role": "system",
423
+ "content": f"Context from previous conversation: {summary_content}"
424
+ })
425
+ except Exception as e:
426
+ # If we can't load parent context, just continue with minimal prompt
427
+ logger.warning(f"Could not load parent context for session {session.parent_id}: {e}")
420
428
 
421
429
  return prompt
422
430
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: chuk-ai-session-manager
3
- Version: 0.3
3
+ Version: 0.4
4
4
  Summary: Session manager for AI applications
5
5
  Requires-Python: >=3.11
6
6
  Description-Content-Type: text/markdown
@@ -311,36 +311,6 @@ pip install chuk-ai-session-manager
311
311
  | **Production Ready** | Requires additional work | Built for production |
312
312
  | **Learning Curve** | Steep | 5 minutes to productivity |
313
313
 
314
- ## 🚀 Migration Guides
315
-
316
- ### From LangChain Memory
317
- ```python
318
- # Old LangChain way
319
- from langchain.memory import ConversationBufferMemory
320
- memory = ConversationBufferMemory()
321
- memory.save_context({"input": "Hi"}, {"output": "Hello"})
322
-
323
- # New CHUK way (much simpler!)
324
- from chuk_ai_session_manager import track_conversation
325
- await track_conversation("Hi", "Hello")
326
- ```
327
-
328
- ### From Manual Session Management
329
- ```python
330
- # Old manual way
331
- conversations = {}
332
- def save_conversation(user_id, message, response):
333
- if user_id not in conversations:
334
- conversations[user_id] = []
335
- conversations[user_id].append({"user": message, "ai": response})
336
-
337
- # New CHUK way
338
- from chuk_ai_session_manager import SessionManager
339
- sm = SessionManager(session_id=user_id)
340
- await sm.user_says(message)
341
- await sm.ai_responds(response)
342
- ```
343
-
344
314
  ## 📖 More Examples
345
315
 
346
316
  Check out the `/examples` directory for complete working examples:
@@ -362,16 +332,9 @@ Check out the `/examples` directory for complete working examples:
362
332
  - ✅ Complete conversation analytics and observability
363
333
  - ✅ Framework-agnostic solution that works with any LLM library
364
334
 
365
- **Consider alternatives if you:**
366
- - ❌ Only need basic in-memory conversation history
367
- - ❌ Are locked into a specific framework (LangChain, etc.)
368
- - ❌ Don't need cost tracking or analytics
369
- - ❌ Are building simple, stateless AI applications
370
-
371
335
  ## 🤝 Community & Support
372
336
 
373
337
  - 📖 **Documentation**: [Full docs with tutorials](link-to-docs)
374
- - 💬 **Discord**: Join our community for help and discussions
375
338
  - 🐛 **Issues**: Report bugs on GitHub
376
339
  - 💡 **Feature Requests**: Suggest new features
377
340
  - 📧 **Support**: enterprise@chuk.dev for production support
@@ -22,12 +22,12 @@ src/chuk_ai_session_manager/models/session_event.py
22
22
  src/chuk_ai_session_manager/models/session_metadata.py
23
23
  src/chuk_ai_session_manager/models/session_run.py
24
24
  src/chuk_ai_session_manager/models/token_usage.py
25
- src/chuk_ai_session_manager/utils/__init__.py
26
- src/chuk_ai_session_manager/utils/status_display_utils.py
27
25
  tests/test_basic_functionality.py
28
- tests/test_chuk_session_storage.py
26
+ tests/test_exceptions.py
29
27
  tests/test_infinite_conversation.py
30
- tests/test_infinite_conversation_advanced.py
31
- tests/test_session.py
32
- tests/test_session_aware_tool_processor.py
33
- tests/test_session_prompt_builder.py
28
+ tests/test_models.py
29
+ tests/test_prompt_builder.py
30
+ tests/test_simple.py
31
+ tests/test_simple_api.py
32
+ tests/test_storage.py
33
+ tests/test_tools.py