memorisdk 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of memorisdk might be problematic. Click here for more details.

Files changed (63) hide show
  1. memori/__init__.py +3 -3
  2. memori/agents/conscious_agent.py +289 -77
  3. memori/agents/memory_agent.py +19 -9
  4. memori/agents/retrieval_agent.py +138 -63
  5. memori/config/manager.py +7 -7
  6. memori/config/memory_manager.py +25 -25
  7. memori/config/settings.py +13 -6
  8. memori/core/conversation.py +15 -15
  9. memori/core/database.py +14 -13
  10. memori/core/memory.py +438 -123
  11. memori/core/providers.py +25 -25
  12. memori/database/__init__.py +11 -0
  13. memori/database/adapters/__init__.py +11 -0
  14. memori/database/adapters/mongodb_adapter.py +739 -0
  15. memori/database/adapters/mysql_adapter.py +8 -8
  16. memori/database/adapters/postgresql_adapter.py +6 -6
  17. memori/database/adapters/sqlite_adapter.py +6 -6
  18. memori/database/auto_creator.py +8 -9
  19. memori/database/connection_utils.py +5 -5
  20. memori/database/connectors/__init__.py +11 -0
  21. memori/database/connectors/base_connector.py +18 -19
  22. memori/database/connectors/mongodb_connector.py +527 -0
  23. memori/database/connectors/mysql_connector.py +13 -15
  24. memori/database/connectors/postgres_connector.py +12 -12
  25. memori/database/connectors/sqlite_connector.py +11 -11
  26. memori/database/models.py +2 -2
  27. memori/database/mongodb_manager.py +1402 -0
  28. memori/database/queries/base_queries.py +3 -4
  29. memori/database/queries/chat_queries.py +3 -5
  30. memori/database/queries/entity_queries.py +3 -5
  31. memori/database/queries/memory_queries.py +3 -5
  32. memori/database/query_translator.py +11 -11
  33. memori/database/schema_generators/__init__.py +11 -0
  34. memori/database/schema_generators/mongodb_schema_generator.py +666 -0
  35. memori/database/schema_generators/mysql_schema_generator.py +2 -4
  36. memori/database/search/__init__.py +11 -0
  37. memori/database/search/mongodb_search_adapter.py +653 -0
  38. memori/database/search/mysql_search_adapter.py +8 -8
  39. memori/database/search/sqlite_search_adapter.py +6 -6
  40. memori/database/search_service.py +218 -66
  41. memori/database/sqlalchemy_manager.py +72 -25
  42. memori/integrations/__init__.py +1 -1
  43. memori/integrations/anthropic_integration.py +1 -3
  44. memori/integrations/litellm_integration.py +23 -6
  45. memori/integrations/openai_integration.py +31 -3
  46. memori/tools/memory_tool.py +104 -13
  47. memori/utils/exceptions.py +58 -58
  48. memori/utils/helpers.py +11 -12
  49. memori/utils/input_validator.py +10 -12
  50. memori/utils/logging.py +4 -4
  51. memori/utils/pydantic_models.py +57 -57
  52. memori/utils/query_builder.py +20 -20
  53. memori/utils/security_audit.py +28 -28
  54. memori/utils/security_integration.py +9 -9
  55. memori/utils/transaction_manager.py +20 -19
  56. memori/utils/validators.py +6 -6
  57. {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/METADATA +36 -20
  58. memorisdk-2.1.0.dist-info/RECORD +71 -0
  59. memori/scripts/llm_text.py +0 -50
  60. memorisdk-2.0.0.dist-info/RECORD +0 -67
  61. {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/WHEEL +0 -0
  62. {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
  63. {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/top_level.txt +0 -0
memori/core/memory.py CHANGED
@@ -6,7 +6,7 @@ import asyncio
6
6
  import time
7
7
  import uuid
8
8
  from datetime import datetime
9
- from typing import Any, Dict, List, Optional
9
+ from typing import Any
10
10
 
11
11
  from loguru import logger
12
12
 
@@ -22,7 +22,7 @@ except ImportError:
22
22
  from ..agents.conscious_agent import ConsciouscAgent
23
23
  from ..config.memory_manager import MemoryManager
24
24
  from ..config.settings import LoggingSettings, LogLevel
25
- from ..database.sqlalchemy_manager import SQLAlchemyDatabaseManager as DatabaseManager
25
+ from ..database.sqlalchemy_manager import SQLAlchemyDatabaseManager
26
26
  from ..utils.exceptions import DatabaseError, MemoriError
27
27
  from ..utils.logging import LoggingManager
28
28
  from ..utils.pydantic_models import ConversationContext
@@ -41,30 +41,30 @@ class Memori:
41
41
  self,
42
42
  database_connect: str = "sqlite:///memori.db",
43
43
  template: str = "basic",
44
- mem_prompt: Optional[str] = None,
44
+ mem_prompt: str | None = None,
45
45
  conscious_ingest: bool = False,
46
46
  auto_ingest: bool = False,
47
- namespace: Optional[str] = None,
47
+ namespace: str | None = None,
48
48
  shared_memory: bool = False,
49
- memory_filters: Optional[Dict[str, Any]] = None,
50
- openai_api_key: Optional[str] = None,
51
- user_id: Optional[str] = None,
49
+ memory_filters: dict[str, Any] | None = None,
50
+ openai_api_key: str | None = None,
51
+ user_id: str | None = None,
52
52
  verbose: bool = False,
53
53
  # New provider configuration parameters
54
- api_key: Optional[str] = None,
55
- api_type: Optional[str] = None,
56
- base_url: Optional[str] = None,
57
- azure_endpoint: Optional[str] = None,
58
- azure_deployment: Optional[str] = None,
59
- api_version: Optional[str] = None,
60
- azure_ad_token: Optional[str] = None,
61
- organization: Optional[str] = None,
62
- project: Optional[str] = None,
63
- model: Optional[str] = None, # Allow custom model selection
64
- provider_config: Optional[Any] = None, # ProviderConfig when available
54
+ api_key: str | None = None,
55
+ api_type: str | None = None,
56
+ base_url: str | None = None,
57
+ azure_endpoint: str | None = None,
58
+ azure_deployment: str | None = None,
59
+ api_version: str | None = None,
60
+ azure_ad_token: str | None = None,
61
+ organization: str | None = None,
62
+ project: str | None = None,
63
+ model: str | None = None, # Allow custom model selection
64
+ provider_config: Any | None = None, # ProviderConfig when available
65
65
  schema_init: bool = True, # Initialize database schema and create tables
66
- database_prefix: Optional[str] = None, # Database name prefix
67
- database_suffix: Optional[str] = None, # Database name suffix
66
+ database_prefix: str | None = None, # Database name prefix
67
+ database_suffix: str | None = None, # Database name suffix
68
68
  ):
69
69
  """
70
70
  Initialize Memori memory system v1.0.
@@ -185,8 +185,10 @@ class Memori:
185
185
  # Setup logging based on verbose mode
186
186
  self._setup_logging()
187
187
 
188
- # Initialize database manager
189
- self.db_manager = DatabaseManager(database_connect, template, schema_init)
188
+ # Initialize database manager (detect MongoDB vs SQL)
189
+ self.db_manager = self._create_database_manager(
190
+ database_connect, template, schema_init
191
+ )
190
192
 
191
193
  # Initialize Pydantic-based agents
192
194
  self.memory_agent = None
@@ -319,6 +321,62 @@ class Memori:
319
321
  "Verbose logging enabled - only loguru logs will be displayed"
320
322
  )
321
323
 
324
+ def _create_database_manager(
325
+ self, database_connect: str, template: str, schema_init: bool
326
+ ):
327
+ """Create appropriate database manager based on connection string with fallback"""
328
+ try:
329
+ # Detect MongoDB connection strings
330
+ if self._is_mongodb_connection(database_connect):
331
+ logger.info(
332
+ "Detected MongoDB connection string - attempting MongoDB manager"
333
+ )
334
+ try:
335
+ from ..database.mongodb_manager import MongoDBDatabaseManager
336
+
337
+ # Test MongoDB connection before proceeding
338
+ manager = MongoDBDatabaseManager(
339
+ database_connect, template, schema_init
340
+ )
341
+ # Verify connection works
342
+ _ = manager._get_client()
343
+ logger.info("MongoDB manager initialized successfully")
344
+ return manager
345
+ except ImportError:
346
+ logger.error(
347
+ "MongoDB support requires pymongo. Install with: pip install pymongo"
348
+ )
349
+ logger.info("Falling back to SQLite for compatibility")
350
+ return self._create_fallback_sqlite_manager(template, schema_init)
351
+ except Exception as e:
352
+ logger.error(f"MongoDB connection failed: {e}")
353
+ logger.info("Falling back to SQLite for compatibility")
354
+ return self._create_fallback_sqlite_manager(template, schema_init)
355
+ else:
356
+ logger.info("Detected SQL connection string - using SQLAlchemy manager")
357
+ return SQLAlchemyDatabaseManager(
358
+ database_connect, template, schema_init
359
+ )
360
+
361
+ except Exception as e:
362
+ logger.error(f"Failed to create database manager: {e}")
363
+ logger.info("Creating fallback SQLite manager")
364
+ return self._create_fallback_sqlite_manager(template, schema_init)
365
+
366
+ def _create_fallback_sqlite_manager(self, template: str, schema_init: bool):
367
+ """Create fallback SQLite manager when other options fail"""
368
+ fallback_connect = "sqlite:///memori_fallback.db"
369
+ logger.warning(f"Using fallback SQLite database: {fallback_connect}")
370
+ return SQLAlchemyDatabaseManager(fallback_connect, template, schema_init)
371
+
372
+ def _is_mongodb_connection(self, database_connect: str) -> bool:
373
+ """Detect if connection string is for MongoDB"""
374
+ mongodb_prefixes = [
375
+ "mongodb://",
376
+ "mongodb+srv://",
377
+ ]
378
+ return any(database_connect.startswith(prefix) for prefix in mongodb_prefixes)
379
+
322
380
  def _setup_database(self):
323
381
  """Setup database tables based on template"""
324
382
  if not self.schema_init:
@@ -572,7 +630,7 @@ class Memori:
572
630
  )
573
631
  return False
574
632
 
575
- def enable(self, interceptors: Optional[List[str]] = None):
633
+ def enable(self, interceptors: list[str] | None = None):
576
634
  """
577
635
  Enable universal memory recording using LiteLLM's native callback system.
578
636
 
@@ -665,11 +723,11 @@ class Memori:
665
723
 
666
724
  # Memory system status and control methods
667
725
 
668
- def get_interceptor_status(self) -> Dict[str, Dict[str, Any]]:
726
+ def get_interceptor_status(self) -> dict[str, dict[str, Any]]:
669
727
  """Get status of memory recording system"""
670
728
  return self.memory_manager.get_status()
671
729
 
672
- def get_interceptor_health(self) -> Dict[str, Any]:
730
+ def get_interceptor_health(self) -> dict[str, Any]:
673
731
  """Get health check of interceptor system"""
674
732
  return self.memory_manager.get_health()
675
733
 
@@ -955,58 +1013,94 @@ class Memori:
955
1013
 
956
1014
  return params
957
1015
 
958
- def _get_conscious_context(self) -> List[Dict[str, Any]]:
1016
+ def _get_conscious_context(self) -> list[dict[str, Any]]:
959
1017
  """
960
1018
  Get conscious context from ALL short-term memory summaries.
961
1019
  This represents the complete 'working memory' for conscious_ingest mode.
962
1020
  Used only at program startup when conscious_ingest=True.
1021
+ Database-agnostic version that works with both SQL and MongoDB.
963
1022
  """
964
1023
  try:
965
- from sqlalchemy import text
966
-
967
- with self.db_manager._get_connection() as conn:
968
- # Get ALL short-term memories (no limit) ordered by importance and recency
969
- # This gives the complete conscious context as single initial injection
970
- result = conn.execute(
971
- text(
972
- """
973
- SELECT memory_id, processed_data, importance_score,
974
- category_primary, summary, searchable_content,
975
- created_at, access_count
976
- FROM short_term_memory
977
- WHERE namespace = :namespace AND (expires_at IS NULL OR expires_at > :current_time)
978
- ORDER BY importance_score DESC, created_at DESC
979
- """
980
- ),
981
- {"namespace": self.namespace, "current_time": datetime.now()},
1024
+ # Detect database type from the db_manager
1025
+ db_type = getattr(self.db_manager, "database_type", "sql")
1026
+
1027
+ if db_type == "mongodb":
1028
+ # Use MongoDB-specific method
1029
+ memories = self.db_manager.get_short_term_memory(
1030
+ namespace=self.namespace,
1031
+ limit=1000, # Large limit to get all memories
1032
+ include_expired=False,
982
1033
  )
983
1034
 
984
- memories = []
985
- for row in result:
986
- memories.append(
1035
+ # Convert to consistent format
1036
+ formatted_memories = []
1037
+ for memory in memories:
1038
+ formatted_memories.append(
987
1039
  {
988
- "memory_id": row[0],
989
- "processed_data": row[1],
990
- "importance_score": row[2],
991
- "category_primary": row[3],
992
- "summary": row[4],
993
- "searchable_content": row[5],
994
- "created_at": row[6],
995
- "access_count": row[7],
1040
+ "memory_id": memory.get("memory_id"),
1041
+ "processed_data": memory.get("processed_data"),
1042
+ "importance_score": memory.get("importance_score", 0),
1043
+ "category_primary": memory.get("category_primary", ""),
1044
+ "summary": memory.get("summary", ""),
1045
+ "searchable_content": memory.get("searchable_content", ""),
1046
+ "created_at": memory.get("created_at"),
1047
+ "access_count": memory.get("access_count", 0),
996
1048
  "memory_type": "short_term",
997
1049
  }
998
1050
  )
999
1051
 
1000
1052
  logger.debug(
1001
- f"Retrieved {len(memories)} conscious memories from short-term storage"
1053
+ f"Retrieved {len(formatted_memories)} conscious memories from MongoDB short-term storage"
1002
1054
  )
1003
- return memories
1055
+ return formatted_memories
1056
+
1057
+ else:
1058
+ # Use SQL method
1059
+ from sqlalchemy import text
1060
+
1061
+ with self.db_manager._get_connection() as conn:
1062
+ # Get ALL short-term memories (no limit) ordered by importance and recency
1063
+ # This gives the complete conscious context as single initial injection
1064
+ result = conn.execute(
1065
+ text(
1066
+ """
1067
+ SELECT memory_id, processed_data, importance_score,
1068
+ category_primary, summary, searchable_content,
1069
+ created_at, access_count
1070
+ FROM short_term_memory
1071
+ WHERE namespace = :namespace AND (expires_at IS NULL OR expires_at > :current_time)
1072
+ ORDER BY importance_score DESC, created_at DESC
1073
+ """
1074
+ ),
1075
+ {"namespace": self.namespace, "current_time": datetime.now()},
1076
+ )
1077
+
1078
+ memories = []
1079
+ for row in result:
1080
+ memories.append(
1081
+ {
1082
+ "memory_id": row[0],
1083
+ "processed_data": row[1],
1084
+ "importance_score": row[2],
1085
+ "category_primary": row[3],
1086
+ "summary": row[4],
1087
+ "searchable_content": row[5],
1088
+ "created_at": row[6],
1089
+ "access_count": row[7],
1090
+ "memory_type": "short_term",
1091
+ }
1092
+ )
1093
+
1094
+ logger.debug(
1095
+ f"Retrieved {len(memories)} conscious memories from SQL short-term storage"
1096
+ )
1097
+ return memories
1004
1098
 
1005
1099
  except Exception as e:
1006
1100
  logger.error(f"Failed to get conscious context: {e}")
1007
1101
  return []
1008
1102
 
1009
- def _get_auto_ingest_context(self, user_input: str) -> List[Dict[str, Any]]:
1103
+ def _get_auto_ingest_context(self, user_input: str) -> list[dict[str, Any]]:
1010
1104
  """
1011
1105
  Get auto-ingest context using retrieval agent for intelligent search.
1012
1106
  Searches through entire database for relevant memories.
@@ -1036,15 +1130,38 @@ class Memori:
1036
1130
  self._in_context_retrieval = True
1037
1131
 
1038
1132
  logger.debug(
1039
- f"Auto-ingest: Starting context retrieval for query: '{user_input[:50]}...'"
1133
+ f"Auto-ingest: Starting context retrieval for query: '{user_input[:50]}...' in namespace: '{self.namespace}'"
1040
1134
  )
1041
1135
 
1042
1136
  # Always try direct database search first as it's more reliable
1043
1137
  logger.debug("Auto-ingest: Using direct database search (primary method)")
1044
- results = self.db_manager.search_memories(
1045
- query=user_input, namespace=self.namespace, limit=5
1138
+ logger.debug(
1139
+ f"Auto-ingest: Database manager type: {type(self.db_manager).__name__}"
1046
1140
  )
1047
1141
 
1142
+ try:
1143
+ results = self.db_manager.search_memories(
1144
+ query=user_input, namespace=self.namespace, limit=5
1145
+ )
1146
+ logger.debug(
1147
+ f"Auto-ingest: Database search returned {len(results) if results else 0} results"
1148
+ )
1149
+
1150
+ if results:
1151
+ for i, result in enumerate(
1152
+ results[:3]
1153
+ ): # Log first 3 results for debugging
1154
+ logger.debug(
1155
+ f"Auto-ingest: Result {i+1}: {type(result)} with keys: {list(result.keys()) if isinstance(result, dict) else 'N/A'}"
1156
+ )
1157
+ except Exception as db_search_e:
1158
+ logger.error(f"Auto-ingest: Database search failed: {db_search_e}")
1159
+ logger.debug(
1160
+ f"Auto-ingest: Database search error details: {type(db_search_e).__name__}: {str(db_search_e)}",
1161
+ exc_info=True,
1162
+ )
1163
+ results = []
1164
+
1048
1165
  if results:
1049
1166
  logger.debug(
1050
1167
  f"Auto-ingest: Direct database search returned {len(results)} results"
@@ -1085,8 +1202,12 @@ class Memori:
1085
1202
  )
1086
1203
 
1087
1204
  except Exception as search_error:
1088
- logger.warning(
1089
- f"Auto-ingest: Search engine failed ({search_error})"
1205
+ logger.error(
1206
+ f"Auto-ingest: Search engine failed for query '{user_input[:50]}...': {search_error}"
1207
+ )
1208
+ logger.debug(
1209
+ f"Auto-ingest: Search engine error details: {type(search_error).__name__}: {str(search_error)}",
1210
+ exc_info=True,
1090
1211
  )
1091
1212
  else:
1092
1213
  logger.debug("Auto-ingest: No search engine available")
@@ -1095,22 +1216,39 @@ class Memori:
1095
1216
  logger.debug(
1096
1217
  "Auto-ingest: All search methods returned 0 results, using recent memories fallback"
1097
1218
  )
1098
- fallback_results = self.db_manager.search_memories(
1099
- query="", # Empty query to get recent memories
1100
- namespace=self.namespace,
1101
- limit=3,
1219
+ logger.debug(
1220
+ f"Auto-ingest: Attempting fallback search in namespace '{self.namespace}'"
1102
1221
  )
1103
1222
 
1104
- if fallback_results:
1223
+ try:
1224
+ fallback_results = self.db_manager.search_memories(
1225
+ query="", # Empty query to get recent memories
1226
+ namespace=self.namespace,
1227
+ limit=3,
1228
+ )
1105
1229
  logger.debug(
1106
- f"Auto-ingest: Fallback returned {len(fallback_results)} recent memories"
1230
+ f"Auto-ingest: Fallback search returned {len(fallback_results) if fallback_results else 0} results"
1231
+ )
1232
+
1233
+ if fallback_results:
1234
+ logger.debug(
1235
+ f"Auto-ingest: Fallback returned {len(fallback_results)} recent memories"
1236
+ )
1237
+ # Add search metadata to fallback results
1238
+ for result in fallback_results:
1239
+ if isinstance(result, dict):
1240
+ result["retrieval_method"] = "recent_memories_fallback"
1241
+ result["retrieval_query"] = user_input
1242
+ return fallback_results
1243
+ else:
1244
+ logger.debug("Auto-ingest: Fallback search returned no results")
1245
+
1246
+ except Exception as fallback_e:
1247
+ logger.error(f"Auto-ingest: Fallback search failed: {fallback_e}")
1248
+ logger.debug(
1249
+ f"Auto-ingest: Fallback error details: {type(fallback_e).__name__}: {str(fallback_e)}",
1250
+ exc_info=True,
1107
1251
  )
1108
- # Add search metadata to fallback results
1109
- for result in fallback_results:
1110
- if isinstance(result, dict):
1111
- result["retrieval_method"] = "recent_memories_fallback"
1112
- result["retrieval_query"] = user_input
1113
- return fallback_results
1114
1252
 
1115
1253
  logger.debug(
1116
1254
  "Auto-ingest: All retrieval methods failed, returning empty context"
@@ -1165,7 +1303,7 @@ class Memori:
1165
1303
  except Exception as e:
1166
1304
  logger.error(f"Failed to record OpenAI conversation: {e}")
1167
1305
 
1168
- def _extract_openai_user_input(self, messages: List[Dict]) -> str:
1306
+ def _extract_openai_user_input(self, messages: list[dict]) -> str:
1169
1307
  """Extract user input from OpenAI messages with support for complex content types"""
1170
1308
  user_input = ""
1171
1309
  try:
@@ -1244,8 +1382,8 @@ class Memori:
1244
1382
  return ai_output
1245
1383
 
1246
1384
  def _extract_openai_metadata(
1247
- self, kwargs: Dict, response, tokens_used: int
1248
- ) -> Dict:
1385
+ self, kwargs: dict, response, tokens_used: int
1386
+ ) -> dict:
1249
1387
  """Extract comprehensive metadata from OpenAI request and response"""
1250
1388
  metadata = {
1251
1389
  "integration": "openai_auto",
@@ -1339,7 +1477,7 @@ class Memori:
1339
1477
  except Exception as e:
1340
1478
  logger.error(f"Failed to record Anthropic conversation: {e}")
1341
1479
 
1342
- def _extract_anthropic_user_input(self, messages: List[Dict]) -> str:
1480
+ def _extract_anthropic_user_input(self, messages: list[dict]) -> str:
1343
1481
  """Extract user input from Anthropic messages with support for complex content types"""
1344
1482
  user_input = ""
1345
1483
  try:
@@ -1439,8 +1577,8 @@ class Memori:
1439
1577
  return tokens_used
1440
1578
 
1441
1579
  def _extract_anthropic_metadata(
1442
- self, kwargs: Dict, response, tokens_used: int
1443
- ) -> Dict:
1580
+ self, kwargs: dict, response, tokens_used: int
1581
+ ) -> dict:
1444
1582
  """Extract comprehensive metadata from Anthropic request and response"""
1445
1583
  metadata = {
1446
1584
  "integration": "anthropic_auto",
@@ -1564,40 +1702,134 @@ class Memori:
1564
1702
  # in memori.integrations.litellm_integration
1565
1703
 
1566
1704
  def _process_memory_sync(
1567
- self, chat_id: str, user_input: str, ai_output: str, model: str = "unknown"
1705
+ self,
1706
+ chat_id: str,
1707
+ user_input: str,
1708
+ ai_output: str,
1709
+ model: str = "unknown",
1710
+ retry_count: int = 0,
1568
1711
  ):
1569
- """Synchronous memory processing fallback"""
1712
+ """Synchronous memory processing fallback with retry logic"""
1570
1713
  if not self.memory_agent:
1571
1714
  logger.warning("Memory agent not available, skipping memory ingestion")
1572
1715
  return
1573
1716
 
1717
+ max_retries = 2 # Maximum retry attempts
1718
+
1574
1719
  try:
1575
1720
  # Run async processing in new event loop
1576
1721
  import threading
1577
1722
 
1578
1723
  def run_memory_processing():
1579
- new_loop = asyncio.new_event_loop()
1580
- asyncio.set_event_loop(new_loop)
1724
+ """Run memory processing with improved event loop management"""
1725
+ new_loop = None
1581
1726
  try:
1727
+ # Check if we're already in an async context
1728
+ try:
1729
+ asyncio.get_running_loop()
1730
+ logger.debug(
1731
+ "Found existing event loop, creating new one for memory processing"
1732
+ )
1733
+ except RuntimeError:
1734
+ # No running loop, safe to create new one
1735
+ logger.debug("No existing event loop found, creating new one")
1736
+
1737
+ new_loop = asyncio.new_event_loop()
1738
+ asyncio.set_event_loop(new_loop)
1739
+
1740
+ logger.debug(
1741
+ f"Starting memory processing for {chat_id} (attempt {retry_count + 1})"
1742
+ )
1743
+
1744
+ # Add timeout to prevent hanging
1582
1745
  new_loop.run_until_complete(
1583
- self._process_memory_async(
1584
- chat_id, user_input, ai_output, model
1746
+ asyncio.wait_for(
1747
+ self._process_memory_async(
1748
+ chat_id, user_input, ai_output, model
1749
+ ),
1750
+ timeout=60.0, # 60 second timeout
1585
1751
  )
1586
1752
  )
1753
+ logger.debug(
1754
+ f"Memory processing completed successfully for {chat_id}"
1755
+ )
1756
+
1757
+ except asyncio.TimeoutError as e:
1758
+ logger.error(
1759
+ f"Memory processing timed out for {chat_id} (attempt {retry_count + 1}): {e}"
1760
+ )
1761
+ if retry_count < max_retries:
1762
+ logger.info(
1763
+ f"Retrying memory processing for {chat_id} ({retry_count + 1}/{max_retries})"
1764
+ )
1765
+ # Schedule retry
1766
+ import time
1767
+
1768
+ time.sleep(2) # Wait 2 seconds before retry
1769
+ self._process_memory_sync(
1770
+ chat_id, user_input, ai_output, model, retry_count + 1
1771
+ )
1587
1772
  except Exception as e:
1588
- logger.error(f"Synchronous memory processing failed: {e}")
1773
+ logger.error(
1774
+ f"Memory processing failed for {chat_id} (attempt {retry_count + 1}): {e}"
1775
+ )
1776
+ import traceback
1777
+
1778
+ logger.error(f"Full error traceback: {traceback.format_exc()}")
1779
+ if retry_count < max_retries:
1780
+ logger.info(
1781
+ f"Retrying memory processing for {chat_id} ({retry_count + 1}/{max_retries})"
1782
+ )
1783
+ # Schedule retry
1784
+ import time
1785
+
1786
+ time.sleep(2) # Wait 2 seconds before retry
1787
+ self._process_memory_sync(
1788
+ chat_id, user_input, ai_output, model, retry_count + 1
1789
+ )
1589
1790
  finally:
1590
- new_loop.close()
1791
+ if new_loop and not new_loop.is_closed():
1792
+ # Clean up pending tasks
1793
+ pending = asyncio.all_tasks(new_loop)
1794
+ if pending:
1795
+ logger.debug(f"Cancelling {len(pending)} pending tasks")
1796
+ for task in pending:
1797
+ task.cancel()
1798
+ # Wait for cancellation to complete
1799
+ new_loop.run_until_complete(
1800
+ asyncio.gather(*pending, return_exceptions=True)
1801
+ )
1802
+
1803
+ new_loop.close()
1804
+ logger.debug(f"Event loop closed for {chat_id}")
1805
+
1806
+ # Reset event loop policy to prevent conflicts
1807
+ try:
1808
+ asyncio.set_event_loop(None)
1809
+ except:
1810
+ pass
1591
1811
 
1592
1812
  # Run in background thread to avoid blocking
1593
1813
  thread = threading.Thread(target=run_memory_processing, daemon=True)
1594
1814
  thread.start()
1595
1815
  logger.debug(
1596
- f"Memory processing started in background thread for {chat_id}"
1816
+ f"Memory processing started in background thread for {chat_id} (attempt {retry_count + 1})"
1597
1817
  )
1598
1818
 
1599
1819
  except Exception as e:
1600
- logger.error(f"Failed to start synchronous memory processing: {e}")
1820
+ logger.error(
1821
+ f"Failed to start synchronous memory processing for {chat_id}: {e}"
1822
+ )
1823
+ if retry_count < max_retries:
1824
+ logger.info(
1825
+ f"Retrying memory processing startup for {chat_id} ({retry_count + 1}/{max_retries})"
1826
+ )
1827
+ import time
1828
+
1829
+ time.sleep(2)
1830
+ self._process_memory_sync(
1831
+ chat_id, user_input, ai_output, model, retry_count + 1
1832
+ )
1601
1833
 
1602
1834
  def _parse_llm_response(self, response) -> tuple[str, str]:
1603
1835
  """Extract text and model from various LLM response formats."""
@@ -1641,7 +1873,7 @@ class Memori:
1641
1873
  user_input: str,
1642
1874
  ai_output=None,
1643
1875
  model: str = None,
1644
- metadata: Optional[Dict[str, Any]] = None,
1876
+ metadata: dict[str, Any] | None = None,
1645
1877
  ) -> str:
1646
1878
  """
1647
1879
  Record a conversation.
@@ -1658,6 +1890,11 @@ class Memori:
1658
1890
  if not self._enabled:
1659
1891
  raise MemoriError("Memori is not enabled. Call enable() first.")
1660
1892
 
1893
+ # Debug logging for conversation recording
1894
+ logger.info(
1895
+ f"Recording conversation - Input: '{user_input[:100]}...' Model: {model}"
1896
+ )
1897
+
1661
1898
  # Parse response
1662
1899
  response_text, detected_model = self._parse_llm_response(ai_output)
1663
1900
  response_model = model or detected_model
@@ -1666,26 +1903,42 @@ class Memori:
1666
1903
  chat_id = str(uuid.uuid4())
1667
1904
  timestamp = datetime.now()
1668
1905
 
1669
- # Store conversation
1670
- self.db_manager.store_chat_history(
1671
- chat_id=chat_id,
1672
- user_input=user_input,
1673
- ai_output=response_text,
1674
- model=response_model,
1675
- timestamp=timestamp,
1676
- session_id=self._session_id,
1677
- namespace=self.namespace,
1678
- metadata=metadata or {},
1679
- )
1680
-
1681
- # Always process into long-term memory when memory agent is available
1682
- if self.memory_agent:
1683
- self._schedule_memory_processing(
1684
- chat_id, user_input, response_text, response_model
1906
+ try:
1907
+ # Store conversation
1908
+ self.db_manager.store_chat_history(
1909
+ chat_id=chat_id,
1910
+ user_input=user_input,
1911
+ ai_output=response_text,
1912
+ model=response_model,
1913
+ timestamp=timestamp,
1914
+ session_id=self._session_id,
1915
+ namespace=self.namespace,
1916
+ metadata=metadata or {},
1917
+ )
1918
+ logger.debug(
1919
+ f"Successfully stored chat history for conversation: {chat_id}"
1685
1920
  )
1686
1921
 
1687
- logger.debug(f"Recorded conversation: {chat_id}")
1688
- return chat_id
1922
+ # Always process into long-term memory when memory agent is available
1923
+ if self.memory_agent:
1924
+ self._schedule_memory_processing(
1925
+ chat_id, user_input, response_text, response_model
1926
+ )
1927
+ logger.debug(f"Scheduled memory processing for conversation: {chat_id}")
1928
+ else:
1929
+ logger.warning(
1930
+ f"Memory agent not available, skipping memory processing for: {chat_id}"
1931
+ )
1932
+
1933
+ logger.info(f"Recorded conversation successfully: {chat_id}")
1934
+ return chat_id
1935
+
1936
+ except Exception as e:
1937
+ logger.error(f"Failed to record conversation {chat_id}: {e}")
1938
+ import traceback
1939
+
1940
+ logger.error(f"Recording error details: {traceback.format_exc()}")
1941
+ raise
1689
1942
 
1690
1943
  def _schedule_memory_processing(
1691
1944
  self, chat_id: str, user_input: str, ai_output: str, model: str
@@ -1782,7 +2035,7 @@ class Memori:
1782
2035
  except Exception as e:
1783
2036
  logger.error(f"Memory ingestion failed for {chat_id}: {e}")
1784
2037
 
1785
- async def _get_recent_memories_for_dedup(self) -> List:
2038
+ async def _get_recent_memories_for_dedup(self) -> list:
1786
2039
  """Get recent memories for deduplication check"""
1787
2040
  try:
1788
2041
  from sqlalchemy import text
@@ -1828,7 +2081,7 @@ class Memori:
1828
2081
  logger.error(f"Failed to get recent memories for dedup: {e}")
1829
2082
  return []
1830
2083
 
1831
- def retrieve_context(self, query: str, limit: int = 5) -> List[Dict[str, Any]]:
2084
+ def retrieve_context(self, query: str, limit: int = 5) -> list[dict[str, Any]]:
1832
2085
  """
1833
2086
  Retrieve relevant context for a query with priority on essential facts
1834
2087
 
@@ -1885,7 +2138,7 @@ class Memori:
1885
2138
  logger.error(f"Context retrieval failed: {e}")
1886
2139
  return []
1887
2140
 
1888
- def get_conversation_history(self, limit: int = 10) -> List[Dict[str, Any]]:
2141
+ def get_conversation_history(self, limit: int = 10) -> list[dict[str, Any]]:
1889
2142
  """Get recent conversation history"""
1890
2143
  try:
1891
2144
  return self.db_manager.get_chat_history(
@@ -1897,7 +2150,7 @@ class Memori:
1897
2150
  logger.error(f"Failed to get conversation history: {e}")
1898
2151
  return []
1899
2152
 
1900
- def clear_memory(self, memory_type: Optional[str] = None):
2153
+ def clear_memory(self, memory_type: str | None = None):
1901
2154
  """
1902
2155
  Clear memory data
1903
2156
 
@@ -1912,7 +2165,7 @@ class Memori:
1912
2165
  except Exception as e:
1913
2166
  raise MemoriError(f"Failed to clear memory: {e}")
1914
2167
 
1915
- def get_memory_stats(self) -> Dict[str, Any]:
2168
+ def get_memory_stats(self) -> dict[str, Any]:
1916
2169
  """Get memory statistics"""
1917
2170
  try:
1918
2171
  return self.db_manager.get_memory_stats(self.namespace)
@@ -1930,7 +2183,7 @@ class Memori:
1930
2183
  """Get current session ID"""
1931
2184
  return self._session_id
1932
2185
 
1933
- def get_integration_stats(self) -> List[Dict[str, Any]]:
2186
+ def get_integration_stats(self) -> list[dict[str, Any]]:
1934
2187
  """Get statistics from the new interceptor system"""
1935
2188
  try:
1936
2189
  # Get system status first
@@ -2010,9 +2263,9 @@ class Memori:
2010
2263
 
2011
2264
  def update_user_context(
2012
2265
  self,
2013
- current_projects: Optional[List[str]] = None,
2014
- relevant_skills: Optional[List[str]] = None,
2015
- user_preferences: Optional[List[str]] = None,
2266
+ current_projects: list[str] | None = None,
2267
+ relevant_skills: list[str] | None = None,
2268
+ user_preferences: list[str] | None = None,
2016
2269
  ):
2017
2270
  """Update user context for better memory processing"""
2018
2271
  if current_projects is not None:
@@ -2026,7 +2279,7 @@ class Memori:
2026
2279
 
2027
2280
  def search_memories_by_category(
2028
2281
  self, category: str, limit: int = 10
2029
- ) -> List[Dict[str, Any]]:
2282
+ ) -> list[dict[str, Any]]:
2030
2283
  """Search memories by specific category"""
2031
2284
  try:
2032
2285
  return self.db_manager.search_memories(
@@ -2040,8 +2293,8 @@ class Memori:
2040
2293
  return []
2041
2294
 
2042
2295
  def get_entity_memories(
2043
- self, entity_value: str, entity_type: Optional[str] = None, limit: int = 10
2044
- ) -> List[Dict[str, Any]]:
2296
+ self, entity_value: str, entity_type: str | None = None, limit: int = 10
2297
+ ) -> list[dict[str, Any]]:
2045
2298
  """Get memories that contain a specific entity"""
2046
2299
  try:
2047
2300
  # This would use the entity index in the database
@@ -2112,6 +2365,68 @@ class Memori:
2112
2365
  except Exception as e:
2113
2366
  logger.error(f"Failed to stop background analysis: {e}")
2114
2367
 
2368
+ def add(self, text: str, metadata: dict[str, Any] | None = None) -> str:
2369
+ """
2370
+ Add a memory or text to the system.
2371
+
2372
+ This is a unified method that works with both SQL and MongoDB backends.
2373
+ For simple text memories, it will be processed and categorized automatically.
2374
+
2375
+ Args:
2376
+ text: The text content to store as memory
2377
+ metadata: Optional metadata to store with the memory
2378
+
2379
+ Returns:
2380
+ str: Unique identifier for the stored memory/conversation
2381
+ """
2382
+ if not self._enabled:
2383
+ self.enable()
2384
+
2385
+ # For simple text memories, we treat them as user inputs with AI acknowledgment
2386
+ # This ensures they get processed through the normal memory pipeline
2387
+ ai_response = "Memory recorded successfully"
2388
+
2389
+ return self.record_conversation(
2390
+ user_input=text,
2391
+ ai_output=ai_response,
2392
+ metadata=metadata or {"type": "manual_memory", "source": "add_method"},
2393
+ )
2394
+
2395
+ def search(self, query: str, limit: int = 5) -> list[dict[str, Any]]:
2396
+ """
2397
+ Search for memories/conversations based on a query.
2398
+
2399
+ This is a unified method that works with both SQL and MongoDB backends.
2400
+
2401
+ Args:
2402
+ query: Search query string
2403
+ limit: Maximum number of results to return
2404
+
2405
+ Returns:
2406
+ List of matching memories with their content and metadata
2407
+ """
2408
+ if not self._enabled:
2409
+ logger.warning("Memori is not enabled. Returning empty results.")
2410
+ return []
2411
+
2412
+ try:
2413
+ # Use the existing retrieve_context method for consistency
2414
+ return self.retrieve_context(query, limit=limit)
2415
+ except Exception as e:
2416
+ logger.error(f"Search failed: {e}")
2417
+ return []
2418
+
2419
+ def get_stats(self) -> dict[str, Any]:
2420
+ """
2421
+ Get memory statistics.
2422
+
2423
+ This is a unified method that works with both SQL and MongoDB backends.
2424
+
2425
+ Returns:
2426
+ Dictionary containing memory statistics
2427
+ """
2428
+ return self.get_memory_stats()
2429
+
2115
2430
  def cleanup(self):
2116
2431
  """Clean up all async tasks and resources"""
2117
2432
  try:
@@ -2347,7 +2662,7 @@ class Memori:
2347
2662
  logger.error(f"Failed to add memory to messages: {e}")
2348
2663
  return messages
2349
2664
 
2350
- def get_essential_conversations(self, limit: int = 10) -> List[Dict[str, Any]]:
2665
+ def get_essential_conversations(self, limit: int = 10) -> list[dict[str, Any]]:
2351
2666
  """Get essential conversations from short-term memory"""
2352
2667
  try:
2353
2668
  from sqlalchemy import text
@@ -2447,7 +2762,7 @@ class Memori:
2447
2762
 
2448
2763
  # Conversation management methods
2449
2764
 
2450
- def get_conversation_stats(self) -> Dict[str, Any]:
2765
+ def get_conversation_stats(self) -> dict[str, Any]:
2451
2766
  """Get conversation manager statistics"""
2452
2767
  return self.conversation_manager.get_session_stats()
2453
2768