memorisdk 2.0.1__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +3 -3
- memori/agents/conscious_agent.py +289 -77
- memori/agents/memory_agent.py +19 -9
- memori/agents/retrieval_agent.py +59 -51
- memori/config/manager.py +7 -7
- memori/config/memory_manager.py +25 -25
- memori/config/settings.py +13 -6
- memori/core/conversation.py +15 -15
- memori/core/database.py +14 -13
- memori/core/memory.py +376 -105
- memori/core/providers.py +25 -25
- memori/database/__init__.py +11 -0
- memori/database/adapters/__init__.py +11 -0
- memori/database/adapters/mongodb_adapter.py +739 -0
- memori/database/adapters/mysql_adapter.py +8 -8
- memori/database/adapters/postgresql_adapter.py +6 -6
- memori/database/adapters/sqlite_adapter.py +6 -6
- memori/database/auto_creator.py +8 -9
- memori/database/connection_utils.py +5 -5
- memori/database/connectors/__init__.py +11 -0
- memori/database/connectors/base_connector.py +18 -19
- memori/database/connectors/mongodb_connector.py +527 -0
- memori/database/connectors/mysql_connector.py +13 -15
- memori/database/connectors/postgres_connector.py +12 -12
- memori/database/connectors/sqlite_connector.py +11 -11
- memori/database/models.py +2 -2
- memori/database/mongodb_manager.py +1402 -0
- memori/database/queries/base_queries.py +3 -4
- memori/database/queries/chat_queries.py +3 -5
- memori/database/queries/entity_queries.py +3 -5
- memori/database/queries/memory_queries.py +3 -5
- memori/database/query_translator.py +11 -11
- memori/database/schema_generators/__init__.py +11 -0
- memori/database/schema_generators/mongodb_schema_generator.py +666 -0
- memori/database/schema_generators/mysql_schema_generator.py +2 -4
- memori/database/search/__init__.py +11 -0
- memori/database/search/mongodb_search_adapter.py +653 -0
- memori/database/search/mysql_search_adapter.py +8 -8
- memori/database/search/sqlite_search_adapter.py +6 -6
- memori/database/search_service.py +17 -17
- memori/database/sqlalchemy_manager.py +10 -12
- memori/integrations/__init__.py +1 -1
- memori/integrations/anthropic_integration.py +1 -3
- memori/integrations/litellm_integration.py +23 -6
- memori/integrations/openai_integration.py +31 -3
- memori/tools/memory_tool.py +10 -9
- memori/utils/exceptions.py +58 -58
- memori/utils/helpers.py +11 -12
- memori/utils/input_validator.py +10 -12
- memori/utils/logging.py +4 -4
- memori/utils/pydantic_models.py +57 -57
- memori/utils/query_builder.py +20 -20
- memori/utils/security_audit.py +28 -28
- memori/utils/security_integration.py +9 -9
- memori/utils/transaction_manager.py +20 -19
- memori/utils/validators.py +6 -6
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.0.dist-info}/METADATA +22 -12
- memorisdk-2.1.0.dist-info/RECORD +71 -0
- memorisdk-2.0.1.dist-info/RECORD +0 -66
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.0.dist-info}/WHEEL +0 -0
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.0.dist-info}/top_level.txt +0 -0
memori/core/memory.py
CHANGED
|
@@ -6,7 +6,7 @@ import asyncio
|
|
|
6
6
|
import time
|
|
7
7
|
import uuid
|
|
8
8
|
from datetime import datetime
|
|
9
|
-
from typing import Any
|
|
9
|
+
from typing import Any
|
|
10
10
|
|
|
11
11
|
from loguru import logger
|
|
12
12
|
|
|
@@ -22,7 +22,7 @@ except ImportError:
|
|
|
22
22
|
from ..agents.conscious_agent import ConsciouscAgent
|
|
23
23
|
from ..config.memory_manager import MemoryManager
|
|
24
24
|
from ..config.settings import LoggingSettings, LogLevel
|
|
25
|
-
from ..database.sqlalchemy_manager import SQLAlchemyDatabaseManager
|
|
25
|
+
from ..database.sqlalchemy_manager import SQLAlchemyDatabaseManager
|
|
26
26
|
from ..utils.exceptions import DatabaseError, MemoriError
|
|
27
27
|
from ..utils.logging import LoggingManager
|
|
28
28
|
from ..utils.pydantic_models import ConversationContext
|
|
@@ -41,30 +41,30 @@ class Memori:
|
|
|
41
41
|
self,
|
|
42
42
|
database_connect: str = "sqlite:///memori.db",
|
|
43
43
|
template: str = "basic",
|
|
44
|
-
mem_prompt:
|
|
44
|
+
mem_prompt: str | None = None,
|
|
45
45
|
conscious_ingest: bool = False,
|
|
46
46
|
auto_ingest: bool = False,
|
|
47
|
-
namespace:
|
|
47
|
+
namespace: str | None = None,
|
|
48
48
|
shared_memory: bool = False,
|
|
49
|
-
memory_filters:
|
|
50
|
-
openai_api_key:
|
|
51
|
-
user_id:
|
|
49
|
+
memory_filters: dict[str, Any] | None = None,
|
|
50
|
+
openai_api_key: str | None = None,
|
|
51
|
+
user_id: str | None = None,
|
|
52
52
|
verbose: bool = False,
|
|
53
53
|
# New provider configuration parameters
|
|
54
|
-
api_key:
|
|
55
|
-
api_type:
|
|
56
|
-
base_url:
|
|
57
|
-
azure_endpoint:
|
|
58
|
-
azure_deployment:
|
|
59
|
-
api_version:
|
|
60
|
-
azure_ad_token:
|
|
61
|
-
organization:
|
|
62
|
-
project:
|
|
63
|
-
model:
|
|
64
|
-
provider_config:
|
|
54
|
+
api_key: str | None = None,
|
|
55
|
+
api_type: str | None = None,
|
|
56
|
+
base_url: str | None = None,
|
|
57
|
+
azure_endpoint: str | None = None,
|
|
58
|
+
azure_deployment: str | None = None,
|
|
59
|
+
api_version: str | None = None,
|
|
60
|
+
azure_ad_token: str | None = None,
|
|
61
|
+
organization: str | None = None,
|
|
62
|
+
project: str | None = None,
|
|
63
|
+
model: str | None = None, # Allow custom model selection
|
|
64
|
+
provider_config: Any | None = None, # ProviderConfig when available
|
|
65
65
|
schema_init: bool = True, # Initialize database schema and create tables
|
|
66
|
-
database_prefix:
|
|
67
|
-
database_suffix:
|
|
66
|
+
database_prefix: str | None = None, # Database name prefix
|
|
67
|
+
database_suffix: str | None = None, # Database name suffix
|
|
68
68
|
):
|
|
69
69
|
"""
|
|
70
70
|
Initialize Memori memory system v1.0.
|
|
@@ -185,8 +185,10 @@ class Memori:
|
|
|
185
185
|
# Setup logging based on verbose mode
|
|
186
186
|
self._setup_logging()
|
|
187
187
|
|
|
188
|
-
# Initialize database manager
|
|
189
|
-
self.db_manager =
|
|
188
|
+
# Initialize database manager (detect MongoDB vs SQL)
|
|
189
|
+
self.db_manager = self._create_database_manager(
|
|
190
|
+
database_connect, template, schema_init
|
|
191
|
+
)
|
|
190
192
|
|
|
191
193
|
# Initialize Pydantic-based agents
|
|
192
194
|
self.memory_agent = None
|
|
@@ -319,6 +321,62 @@ class Memori:
|
|
|
319
321
|
"Verbose logging enabled - only loguru logs will be displayed"
|
|
320
322
|
)
|
|
321
323
|
|
|
324
|
+
def _create_database_manager(
|
|
325
|
+
self, database_connect: str, template: str, schema_init: bool
|
|
326
|
+
):
|
|
327
|
+
"""Create appropriate database manager based on connection string with fallback"""
|
|
328
|
+
try:
|
|
329
|
+
# Detect MongoDB connection strings
|
|
330
|
+
if self._is_mongodb_connection(database_connect):
|
|
331
|
+
logger.info(
|
|
332
|
+
"Detected MongoDB connection string - attempting MongoDB manager"
|
|
333
|
+
)
|
|
334
|
+
try:
|
|
335
|
+
from ..database.mongodb_manager import MongoDBDatabaseManager
|
|
336
|
+
|
|
337
|
+
# Test MongoDB connection before proceeding
|
|
338
|
+
manager = MongoDBDatabaseManager(
|
|
339
|
+
database_connect, template, schema_init
|
|
340
|
+
)
|
|
341
|
+
# Verify connection works
|
|
342
|
+
_ = manager._get_client()
|
|
343
|
+
logger.info("MongoDB manager initialized successfully")
|
|
344
|
+
return manager
|
|
345
|
+
except ImportError:
|
|
346
|
+
logger.error(
|
|
347
|
+
"MongoDB support requires pymongo. Install with: pip install pymongo"
|
|
348
|
+
)
|
|
349
|
+
logger.info("Falling back to SQLite for compatibility")
|
|
350
|
+
return self._create_fallback_sqlite_manager(template, schema_init)
|
|
351
|
+
except Exception as e:
|
|
352
|
+
logger.error(f"MongoDB connection failed: {e}")
|
|
353
|
+
logger.info("Falling back to SQLite for compatibility")
|
|
354
|
+
return self._create_fallback_sqlite_manager(template, schema_init)
|
|
355
|
+
else:
|
|
356
|
+
logger.info("Detected SQL connection string - using SQLAlchemy manager")
|
|
357
|
+
return SQLAlchemyDatabaseManager(
|
|
358
|
+
database_connect, template, schema_init
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
except Exception as e:
|
|
362
|
+
logger.error(f"Failed to create database manager: {e}")
|
|
363
|
+
logger.info("Creating fallback SQLite manager")
|
|
364
|
+
return self._create_fallback_sqlite_manager(template, schema_init)
|
|
365
|
+
|
|
366
|
+
def _create_fallback_sqlite_manager(self, template: str, schema_init: bool):
|
|
367
|
+
"""Create fallback SQLite manager when other options fail"""
|
|
368
|
+
fallback_connect = "sqlite:///memori_fallback.db"
|
|
369
|
+
logger.warning(f"Using fallback SQLite database: {fallback_connect}")
|
|
370
|
+
return SQLAlchemyDatabaseManager(fallback_connect, template, schema_init)
|
|
371
|
+
|
|
372
|
+
def _is_mongodb_connection(self, database_connect: str) -> bool:
|
|
373
|
+
"""Detect if connection string is for MongoDB"""
|
|
374
|
+
mongodb_prefixes = [
|
|
375
|
+
"mongodb://",
|
|
376
|
+
"mongodb+srv://",
|
|
377
|
+
]
|
|
378
|
+
return any(database_connect.startswith(prefix) for prefix in mongodb_prefixes)
|
|
379
|
+
|
|
322
380
|
def _setup_database(self):
|
|
323
381
|
"""Setup database tables based on template"""
|
|
324
382
|
if not self.schema_init:
|
|
@@ -572,7 +630,7 @@ class Memori:
|
|
|
572
630
|
)
|
|
573
631
|
return False
|
|
574
632
|
|
|
575
|
-
def enable(self, interceptors:
|
|
633
|
+
def enable(self, interceptors: list[str] | None = None):
|
|
576
634
|
"""
|
|
577
635
|
Enable universal memory recording using LiteLLM's native callback system.
|
|
578
636
|
|
|
@@ -665,11 +723,11 @@ class Memori:
|
|
|
665
723
|
|
|
666
724
|
# Memory system status and control methods
|
|
667
725
|
|
|
668
|
-
def get_interceptor_status(self) ->
|
|
726
|
+
def get_interceptor_status(self) -> dict[str, dict[str, Any]]:
|
|
669
727
|
"""Get status of memory recording system"""
|
|
670
728
|
return self.memory_manager.get_status()
|
|
671
729
|
|
|
672
|
-
def get_interceptor_health(self) ->
|
|
730
|
+
def get_interceptor_health(self) -> dict[str, Any]:
|
|
673
731
|
"""Get health check of interceptor system"""
|
|
674
732
|
return self.memory_manager.get_health()
|
|
675
733
|
|
|
@@ -955,58 +1013,94 @@ class Memori:
|
|
|
955
1013
|
|
|
956
1014
|
return params
|
|
957
1015
|
|
|
958
|
-
def _get_conscious_context(self) ->
|
|
1016
|
+
def _get_conscious_context(self) -> list[dict[str, Any]]:
|
|
959
1017
|
"""
|
|
960
1018
|
Get conscious context from ALL short-term memory summaries.
|
|
961
1019
|
This represents the complete 'working memory' for conscious_ingest mode.
|
|
962
1020
|
Used only at program startup when conscious_ingest=True.
|
|
1021
|
+
Database-agnostic version that works with both SQL and MongoDB.
|
|
963
1022
|
"""
|
|
964
1023
|
try:
|
|
965
|
-
from
|
|
1024
|
+
# Detect database type from the db_manager
|
|
1025
|
+
db_type = getattr(self.db_manager, "database_type", "sql")
|
|
966
1026
|
|
|
967
|
-
|
|
968
|
-
#
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
SELECT memory_id, processed_data, importance_score,
|
|
974
|
-
category_primary, summary, searchable_content,
|
|
975
|
-
created_at, access_count
|
|
976
|
-
FROM short_term_memory
|
|
977
|
-
WHERE namespace = :namespace AND (expires_at IS NULL OR expires_at > :current_time)
|
|
978
|
-
ORDER BY importance_score DESC, created_at DESC
|
|
979
|
-
"""
|
|
980
|
-
),
|
|
981
|
-
{"namespace": self.namespace, "current_time": datetime.now()},
|
|
1027
|
+
if db_type == "mongodb":
|
|
1028
|
+
# Use MongoDB-specific method
|
|
1029
|
+
memories = self.db_manager.get_short_term_memory(
|
|
1030
|
+
namespace=self.namespace,
|
|
1031
|
+
limit=1000, # Large limit to get all memories
|
|
1032
|
+
include_expired=False,
|
|
982
1033
|
)
|
|
983
1034
|
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
1035
|
+
# Convert to consistent format
|
|
1036
|
+
formatted_memories = []
|
|
1037
|
+
for memory in memories:
|
|
1038
|
+
formatted_memories.append(
|
|
987
1039
|
{
|
|
988
|
-
"memory_id":
|
|
989
|
-
"processed_data":
|
|
990
|
-
"importance_score":
|
|
991
|
-
"category_primary":
|
|
992
|
-
"summary":
|
|
993
|
-
"searchable_content":
|
|
994
|
-
"created_at":
|
|
995
|
-
"access_count":
|
|
1040
|
+
"memory_id": memory.get("memory_id"),
|
|
1041
|
+
"processed_data": memory.get("processed_data"),
|
|
1042
|
+
"importance_score": memory.get("importance_score", 0),
|
|
1043
|
+
"category_primary": memory.get("category_primary", ""),
|
|
1044
|
+
"summary": memory.get("summary", ""),
|
|
1045
|
+
"searchable_content": memory.get("searchable_content", ""),
|
|
1046
|
+
"created_at": memory.get("created_at"),
|
|
1047
|
+
"access_count": memory.get("access_count", 0),
|
|
996
1048
|
"memory_type": "short_term",
|
|
997
1049
|
}
|
|
998
1050
|
)
|
|
999
1051
|
|
|
1000
1052
|
logger.debug(
|
|
1001
|
-
f"Retrieved {len(
|
|
1053
|
+
f"Retrieved {len(formatted_memories)} conscious memories from MongoDB short-term storage"
|
|
1002
1054
|
)
|
|
1003
|
-
return
|
|
1055
|
+
return formatted_memories
|
|
1056
|
+
|
|
1057
|
+
else:
|
|
1058
|
+
# Use SQL method
|
|
1059
|
+
from sqlalchemy import text
|
|
1060
|
+
|
|
1061
|
+
with self.db_manager._get_connection() as conn:
|
|
1062
|
+
# Get ALL short-term memories (no limit) ordered by importance and recency
|
|
1063
|
+
# This gives the complete conscious context as single initial injection
|
|
1064
|
+
result = conn.execute(
|
|
1065
|
+
text(
|
|
1066
|
+
"""
|
|
1067
|
+
SELECT memory_id, processed_data, importance_score,
|
|
1068
|
+
category_primary, summary, searchable_content,
|
|
1069
|
+
created_at, access_count
|
|
1070
|
+
FROM short_term_memory
|
|
1071
|
+
WHERE namespace = :namespace AND (expires_at IS NULL OR expires_at > :current_time)
|
|
1072
|
+
ORDER BY importance_score DESC, created_at DESC
|
|
1073
|
+
"""
|
|
1074
|
+
),
|
|
1075
|
+
{"namespace": self.namespace, "current_time": datetime.now()},
|
|
1076
|
+
)
|
|
1077
|
+
|
|
1078
|
+
memories = []
|
|
1079
|
+
for row in result:
|
|
1080
|
+
memories.append(
|
|
1081
|
+
{
|
|
1082
|
+
"memory_id": row[0],
|
|
1083
|
+
"processed_data": row[1],
|
|
1084
|
+
"importance_score": row[2],
|
|
1085
|
+
"category_primary": row[3],
|
|
1086
|
+
"summary": row[4],
|
|
1087
|
+
"searchable_content": row[5],
|
|
1088
|
+
"created_at": row[6],
|
|
1089
|
+
"access_count": row[7],
|
|
1090
|
+
"memory_type": "short_term",
|
|
1091
|
+
}
|
|
1092
|
+
)
|
|
1093
|
+
|
|
1094
|
+
logger.debug(
|
|
1095
|
+
f"Retrieved {len(memories)} conscious memories from SQL short-term storage"
|
|
1096
|
+
)
|
|
1097
|
+
return memories
|
|
1004
1098
|
|
|
1005
1099
|
except Exception as e:
|
|
1006
1100
|
logger.error(f"Failed to get conscious context: {e}")
|
|
1007
1101
|
return []
|
|
1008
1102
|
|
|
1009
|
-
def _get_auto_ingest_context(self, user_input: str) ->
|
|
1103
|
+
def _get_auto_ingest_context(self, user_input: str) -> list[dict[str, Any]]:
|
|
1010
1104
|
"""
|
|
1011
1105
|
Get auto-ingest context using retrieval agent for intelligent search.
|
|
1012
1106
|
Searches through entire database for relevant memories.
|
|
@@ -1209,7 +1303,7 @@ class Memori:
|
|
|
1209
1303
|
except Exception as e:
|
|
1210
1304
|
logger.error(f"Failed to record OpenAI conversation: {e}")
|
|
1211
1305
|
|
|
1212
|
-
def _extract_openai_user_input(self, messages:
|
|
1306
|
+
def _extract_openai_user_input(self, messages: list[dict]) -> str:
|
|
1213
1307
|
"""Extract user input from OpenAI messages with support for complex content types"""
|
|
1214
1308
|
user_input = ""
|
|
1215
1309
|
try:
|
|
@@ -1288,8 +1382,8 @@ class Memori:
|
|
|
1288
1382
|
return ai_output
|
|
1289
1383
|
|
|
1290
1384
|
def _extract_openai_metadata(
|
|
1291
|
-
self, kwargs:
|
|
1292
|
-
) ->
|
|
1385
|
+
self, kwargs: dict, response, tokens_used: int
|
|
1386
|
+
) -> dict:
|
|
1293
1387
|
"""Extract comprehensive metadata from OpenAI request and response"""
|
|
1294
1388
|
metadata = {
|
|
1295
1389
|
"integration": "openai_auto",
|
|
@@ -1383,7 +1477,7 @@ class Memori:
|
|
|
1383
1477
|
except Exception as e:
|
|
1384
1478
|
logger.error(f"Failed to record Anthropic conversation: {e}")
|
|
1385
1479
|
|
|
1386
|
-
def _extract_anthropic_user_input(self, messages:
|
|
1480
|
+
def _extract_anthropic_user_input(self, messages: list[dict]) -> str:
|
|
1387
1481
|
"""Extract user input from Anthropic messages with support for complex content types"""
|
|
1388
1482
|
user_input = ""
|
|
1389
1483
|
try:
|
|
@@ -1483,8 +1577,8 @@ class Memori:
|
|
|
1483
1577
|
return tokens_used
|
|
1484
1578
|
|
|
1485
1579
|
def _extract_anthropic_metadata(
|
|
1486
|
-
self, kwargs:
|
|
1487
|
-
) ->
|
|
1580
|
+
self, kwargs: dict, response, tokens_used: int
|
|
1581
|
+
) -> dict:
|
|
1488
1582
|
"""Extract comprehensive metadata from Anthropic request and response"""
|
|
1489
1583
|
metadata = {
|
|
1490
1584
|
"integration": "anthropic_auto",
|
|
@@ -1608,40 +1702,134 @@ class Memori:
|
|
|
1608
1702
|
# in memori.integrations.litellm_integration
|
|
1609
1703
|
|
|
1610
1704
|
def _process_memory_sync(
|
|
1611
|
-
self,
|
|
1705
|
+
self,
|
|
1706
|
+
chat_id: str,
|
|
1707
|
+
user_input: str,
|
|
1708
|
+
ai_output: str,
|
|
1709
|
+
model: str = "unknown",
|
|
1710
|
+
retry_count: int = 0,
|
|
1612
1711
|
):
|
|
1613
|
-
"""Synchronous memory processing fallback"""
|
|
1712
|
+
"""Synchronous memory processing fallback with retry logic"""
|
|
1614
1713
|
if not self.memory_agent:
|
|
1615
1714
|
logger.warning("Memory agent not available, skipping memory ingestion")
|
|
1616
1715
|
return
|
|
1617
1716
|
|
|
1717
|
+
max_retries = 2 # Maximum retry attempts
|
|
1718
|
+
|
|
1618
1719
|
try:
|
|
1619
1720
|
# Run async processing in new event loop
|
|
1620
1721
|
import threading
|
|
1621
1722
|
|
|
1622
1723
|
def run_memory_processing():
|
|
1623
|
-
|
|
1624
|
-
|
|
1724
|
+
"""Run memory processing with improved event loop management"""
|
|
1725
|
+
new_loop = None
|
|
1625
1726
|
try:
|
|
1727
|
+
# Check if we're already in an async context
|
|
1728
|
+
try:
|
|
1729
|
+
asyncio.get_running_loop()
|
|
1730
|
+
logger.debug(
|
|
1731
|
+
"Found existing event loop, creating new one for memory processing"
|
|
1732
|
+
)
|
|
1733
|
+
except RuntimeError:
|
|
1734
|
+
# No running loop, safe to create new one
|
|
1735
|
+
logger.debug("No existing event loop found, creating new one")
|
|
1736
|
+
|
|
1737
|
+
new_loop = asyncio.new_event_loop()
|
|
1738
|
+
asyncio.set_event_loop(new_loop)
|
|
1739
|
+
|
|
1740
|
+
logger.debug(
|
|
1741
|
+
f"Starting memory processing for {chat_id} (attempt {retry_count + 1})"
|
|
1742
|
+
)
|
|
1743
|
+
|
|
1744
|
+
# Add timeout to prevent hanging
|
|
1626
1745
|
new_loop.run_until_complete(
|
|
1627
|
-
|
|
1628
|
-
|
|
1746
|
+
asyncio.wait_for(
|
|
1747
|
+
self._process_memory_async(
|
|
1748
|
+
chat_id, user_input, ai_output, model
|
|
1749
|
+
),
|
|
1750
|
+
timeout=60.0, # 60 second timeout
|
|
1629
1751
|
)
|
|
1630
1752
|
)
|
|
1753
|
+
logger.debug(
|
|
1754
|
+
f"Memory processing completed successfully for {chat_id}"
|
|
1755
|
+
)
|
|
1756
|
+
|
|
1757
|
+
except asyncio.TimeoutError as e:
|
|
1758
|
+
logger.error(
|
|
1759
|
+
f"Memory processing timed out for {chat_id} (attempt {retry_count + 1}): {e}"
|
|
1760
|
+
)
|
|
1761
|
+
if retry_count < max_retries:
|
|
1762
|
+
logger.info(
|
|
1763
|
+
f"Retrying memory processing for {chat_id} ({retry_count + 1}/{max_retries})"
|
|
1764
|
+
)
|
|
1765
|
+
# Schedule retry
|
|
1766
|
+
import time
|
|
1767
|
+
|
|
1768
|
+
time.sleep(2) # Wait 2 seconds before retry
|
|
1769
|
+
self._process_memory_sync(
|
|
1770
|
+
chat_id, user_input, ai_output, model, retry_count + 1
|
|
1771
|
+
)
|
|
1631
1772
|
except Exception as e:
|
|
1632
|
-
logger.error(
|
|
1773
|
+
logger.error(
|
|
1774
|
+
f"Memory processing failed for {chat_id} (attempt {retry_count + 1}): {e}"
|
|
1775
|
+
)
|
|
1776
|
+
import traceback
|
|
1777
|
+
|
|
1778
|
+
logger.error(f"Full error traceback: {traceback.format_exc()}")
|
|
1779
|
+
if retry_count < max_retries:
|
|
1780
|
+
logger.info(
|
|
1781
|
+
f"Retrying memory processing for {chat_id} ({retry_count + 1}/{max_retries})"
|
|
1782
|
+
)
|
|
1783
|
+
# Schedule retry
|
|
1784
|
+
import time
|
|
1785
|
+
|
|
1786
|
+
time.sleep(2) # Wait 2 seconds before retry
|
|
1787
|
+
self._process_memory_sync(
|
|
1788
|
+
chat_id, user_input, ai_output, model, retry_count + 1
|
|
1789
|
+
)
|
|
1633
1790
|
finally:
|
|
1634
|
-
new_loop.
|
|
1791
|
+
if new_loop and not new_loop.is_closed():
|
|
1792
|
+
# Clean up pending tasks
|
|
1793
|
+
pending = asyncio.all_tasks(new_loop)
|
|
1794
|
+
if pending:
|
|
1795
|
+
logger.debug(f"Cancelling {len(pending)} pending tasks")
|
|
1796
|
+
for task in pending:
|
|
1797
|
+
task.cancel()
|
|
1798
|
+
# Wait for cancellation to complete
|
|
1799
|
+
new_loop.run_until_complete(
|
|
1800
|
+
asyncio.gather(*pending, return_exceptions=True)
|
|
1801
|
+
)
|
|
1802
|
+
|
|
1803
|
+
new_loop.close()
|
|
1804
|
+
logger.debug(f"Event loop closed for {chat_id}")
|
|
1805
|
+
|
|
1806
|
+
# Reset event loop policy to prevent conflicts
|
|
1807
|
+
try:
|
|
1808
|
+
asyncio.set_event_loop(None)
|
|
1809
|
+
except:
|
|
1810
|
+
pass
|
|
1635
1811
|
|
|
1636
1812
|
# Run in background thread to avoid blocking
|
|
1637
1813
|
thread = threading.Thread(target=run_memory_processing, daemon=True)
|
|
1638
1814
|
thread.start()
|
|
1639
1815
|
logger.debug(
|
|
1640
|
-
f"Memory processing started in background thread for {chat_id}"
|
|
1816
|
+
f"Memory processing started in background thread for {chat_id} (attempt {retry_count + 1})"
|
|
1641
1817
|
)
|
|
1642
1818
|
|
|
1643
1819
|
except Exception as e:
|
|
1644
|
-
logger.error(
|
|
1820
|
+
logger.error(
|
|
1821
|
+
f"Failed to start synchronous memory processing for {chat_id}: {e}"
|
|
1822
|
+
)
|
|
1823
|
+
if retry_count < max_retries:
|
|
1824
|
+
logger.info(
|
|
1825
|
+
f"Retrying memory processing startup for {chat_id} ({retry_count + 1}/{max_retries})"
|
|
1826
|
+
)
|
|
1827
|
+
import time
|
|
1828
|
+
|
|
1829
|
+
time.sleep(2)
|
|
1830
|
+
self._process_memory_sync(
|
|
1831
|
+
chat_id, user_input, ai_output, model, retry_count + 1
|
|
1832
|
+
)
|
|
1645
1833
|
|
|
1646
1834
|
def _parse_llm_response(self, response) -> tuple[str, str]:
|
|
1647
1835
|
"""Extract text and model from various LLM response formats."""
|
|
@@ -1685,7 +1873,7 @@ class Memori:
|
|
|
1685
1873
|
user_input: str,
|
|
1686
1874
|
ai_output=None,
|
|
1687
1875
|
model: str = None,
|
|
1688
|
-
metadata:
|
|
1876
|
+
metadata: dict[str, Any] | None = None,
|
|
1689
1877
|
) -> str:
|
|
1690
1878
|
"""
|
|
1691
1879
|
Record a conversation.
|
|
@@ -1702,6 +1890,11 @@ class Memori:
|
|
|
1702
1890
|
if not self._enabled:
|
|
1703
1891
|
raise MemoriError("Memori is not enabled. Call enable() first.")
|
|
1704
1892
|
|
|
1893
|
+
# Debug logging for conversation recording
|
|
1894
|
+
logger.info(
|
|
1895
|
+
f"Recording conversation - Input: '{user_input[:100]}...' Model: {model}"
|
|
1896
|
+
)
|
|
1897
|
+
|
|
1705
1898
|
# Parse response
|
|
1706
1899
|
response_text, detected_model = self._parse_llm_response(ai_output)
|
|
1707
1900
|
response_model = model or detected_model
|
|
@@ -1710,26 +1903,42 @@ class Memori:
|
|
|
1710
1903
|
chat_id = str(uuid.uuid4())
|
|
1711
1904
|
timestamp = datetime.now()
|
|
1712
1905
|
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
self._schedule_memory_processing(
|
|
1728
|
-
chat_id, user_input, response_text, response_model
|
|
1906
|
+
try:
|
|
1907
|
+
# Store conversation
|
|
1908
|
+
self.db_manager.store_chat_history(
|
|
1909
|
+
chat_id=chat_id,
|
|
1910
|
+
user_input=user_input,
|
|
1911
|
+
ai_output=response_text,
|
|
1912
|
+
model=response_model,
|
|
1913
|
+
timestamp=timestamp,
|
|
1914
|
+
session_id=self._session_id,
|
|
1915
|
+
namespace=self.namespace,
|
|
1916
|
+
metadata=metadata or {},
|
|
1917
|
+
)
|
|
1918
|
+
logger.debug(
|
|
1919
|
+
f"Successfully stored chat history for conversation: {chat_id}"
|
|
1729
1920
|
)
|
|
1730
1921
|
|
|
1731
|
-
|
|
1732
|
-
|
|
1922
|
+
# Always process into long-term memory when memory agent is available
|
|
1923
|
+
if self.memory_agent:
|
|
1924
|
+
self._schedule_memory_processing(
|
|
1925
|
+
chat_id, user_input, response_text, response_model
|
|
1926
|
+
)
|
|
1927
|
+
logger.debug(f"Scheduled memory processing for conversation: {chat_id}")
|
|
1928
|
+
else:
|
|
1929
|
+
logger.warning(
|
|
1930
|
+
f"Memory agent not available, skipping memory processing for: {chat_id}"
|
|
1931
|
+
)
|
|
1932
|
+
|
|
1933
|
+
logger.info(f"Recorded conversation successfully: {chat_id}")
|
|
1934
|
+
return chat_id
|
|
1935
|
+
|
|
1936
|
+
except Exception as e:
|
|
1937
|
+
logger.error(f"Failed to record conversation {chat_id}: {e}")
|
|
1938
|
+
import traceback
|
|
1939
|
+
|
|
1940
|
+
logger.error(f"Recording error details: {traceback.format_exc()}")
|
|
1941
|
+
raise
|
|
1733
1942
|
|
|
1734
1943
|
def _schedule_memory_processing(
|
|
1735
1944
|
self, chat_id: str, user_input: str, ai_output: str, model: str
|
|
@@ -1826,7 +2035,7 @@ class Memori:
|
|
|
1826
2035
|
except Exception as e:
|
|
1827
2036
|
logger.error(f"Memory ingestion failed for {chat_id}: {e}")
|
|
1828
2037
|
|
|
1829
|
-
async def _get_recent_memories_for_dedup(self) ->
|
|
2038
|
+
async def _get_recent_memories_for_dedup(self) -> list:
|
|
1830
2039
|
"""Get recent memories for deduplication check"""
|
|
1831
2040
|
try:
|
|
1832
2041
|
from sqlalchemy import text
|
|
@@ -1872,7 +2081,7 @@ class Memori:
|
|
|
1872
2081
|
logger.error(f"Failed to get recent memories for dedup: {e}")
|
|
1873
2082
|
return []
|
|
1874
2083
|
|
|
1875
|
-
def retrieve_context(self, query: str, limit: int = 5) ->
|
|
2084
|
+
def retrieve_context(self, query: str, limit: int = 5) -> list[dict[str, Any]]:
|
|
1876
2085
|
"""
|
|
1877
2086
|
Retrieve relevant context for a query with priority on essential facts
|
|
1878
2087
|
|
|
@@ -1929,7 +2138,7 @@ class Memori:
|
|
|
1929
2138
|
logger.error(f"Context retrieval failed: {e}")
|
|
1930
2139
|
return []
|
|
1931
2140
|
|
|
1932
|
-
def get_conversation_history(self, limit: int = 10) ->
|
|
2141
|
+
def get_conversation_history(self, limit: int = 10) -> list[dict[str, Any]]:
|
|
1933
2142
|
"""Get recent conversation history"""
|
|
1934
2143
|
try:
|
|
1935
2144
|
return self.db_manager.get_chat_history(
|
|
@@ -1941,7 +2150,7 @@ class Memori:
|
|
|
1941
2150
|
logger.error(f"Failed to get conversation history: {e}")
|
|
1942
2151
|
return []
|
|
1943
2152
|
|
|
1944
|
-
def clear_memory(self, memory_type:
|
|
2153
|
+
def clear_memory(self, memory_type: str | None = None):
|
|
1945
2154
|
"""
|
|
1946
2155
|
Clear memory data
|
|
1947
2156
|
|
|
@@ -1956,7 +2165,7 @@ class Memori:
|
|
|
1956
2165
|
except Exception as e:
|
|
1957
2166
|
raise MemoriError(f"Failed to clear memory: {e}")
|
|
1958
2167
|
|
|
1959
|
-
def get_memory_stats(self) ->
|
|
2168
|
+
def get_memory_stats(self) -> dict[str, Any]:
|
|
1960
2169
|
"""Get memory statistics"""
|
|
1961
2170
|
try:
|
|
1962
2171
|
return self.db_manager.get_memory_stats(self.namespace)
|
|
@@ -1974,7 +2183,7 @@ class Memori:
|
|
|
1974
2183
|
"""Get current session ID"""
|
|
1975
2184
|
return self._session_id
|
|
1976
2185
|
|
|
1977
|
-
def get_integration_stats(self) ->
|
|
2186
|
+
def get_integration_stats(self) -> list[dict[str, Any]]:
|
|
1978
2187
|
"""Get statistics from the new interceptor system"""
|
|
1979
2188
|
try:
|
|
1980
2189
|
# Get system status first
|
|
@@ -2054,9 +2263,9 @@ class Memori:
|
|
|
2054
2263
|
|
|
2055
2264
|
def update_user_context(
|
|
2056
2265
|
self,
|
|
2057
|
-
current_projects:
|
|
2058
|
-
relevant_skills:
|
|
2059
|
-
user_preferences:
|
|
2266
|
+
current_projects: list[str] | None = None,
|
|
2267
|
+
relevant_skills: list[str] | None = None,
|
|
2268
|
+
user_preferences: list[str] | None = None,
|
|
2060
2269
|
):
|
|
2061
2270
|
"""Update user context for better memory processing"""
|
|
2062
2271
|
if current_projects is not None:
|
|
@@ -2070,7 +2279,7 @@ class Memori:
|
|
|
2070
2279
|
|
|
2071
2280
|
def search_memories_by_category(
|
|
2072
2281
|
self, category: str, limit: int = 10
|
|
2073
|
-
) ->
|
|
2282
|
+
) -> list[dict[str, Any]]:
|
|
2074
2283
|
"""Search memories by specific category"""
|
|
2075
2284
|
try:
|
|
2076
2285
|
return self.db_manager.search_memories(
|
|
@@ -2084,8 +2293,8 @@ class Memori:
|
|
|
2084
2293
|
return []
|
|
2085
2294
|
|
|
2086
2295
|
def get_entity_memories(
|
|
2087
|
-
self, entity_value: str, entity_type:
|
|
2088
|
-
) ->
|
|
2296
|
+
self, entity_value: str, entity_type: str | None = None, limit: int = 10
|
|
2297
|
+
) -> list[dict[str, Any]]:
|
|
2089
2298
|
"""Get memories that contain a specific entity"""
|
|
2090
2299
|
try:
|
|
2091
2300
|
# This would use the entity index in the database
|
|
@@ -2156,6 +2365,68 @@ class Memori:
|
|
|
2156
2365
|
except Exception as e:
|
|
2157
2366
|
logger.error(f"Failed to stop background analysis: {e}")
|
|
2158
2367
|
|
|
2368
|
+
def add(self, text: str, metadata: dict[str, Any] | None = None) -> str:
|
|
2369
|
+
"""
|
|
2370
|
+
Add a memory or text to the system.
|
|
2371
|
+
|
|
2372
|
+
This is a unified method that works with both SQL and MongoDB backends.
|
|
2373
|
+
For simple text memories, it will be processed and categorized automatically.
|
|
2374
|
+
|
|
2375
|
+
Args:
|
|
2376
|
+
text: The text content to store as memory
|
|
2377
|
+
metadata: Optional metadata to store with the memory
|
|
2378
|
+
|
|
2379
|
+
Returns:
|
|
2380
|
+
str: Unique identifier for the stored memory/conversation
|
|
2381
|
+
"""
|
|
2382
|
+
if not self._enabled:
|
|
2383
|
+
self.enable()
|
|
2384
|
+
|
|
2385
|
+
# For simple text memories, we treat them as user inputs with AI acknowledgment
|
|
2386
|
+
# This ensures they get processed through the normal memory pipeline
|
|
2387
|
+
ai_response = "Memory recorded successfully"
|
|
2388
|
+
|
|
2389
|
+
return self.record_conversation(
|
|
2390
|
+
user_input=text,
|
|
2391
|
+
ai_output=ai_response,
|
|
2392
|
+
metadata=metadata or {"type": "manual_memory", "source": "add_method"},
|
|
2393
|
+
)
|
|
2394
|
+
|
|
2395
|
+
def search(self, query: str, limit: int = 5) -> list[dict[str, Any]]:
|
|
2396
|
+
"""
|
|
2397
|
+
Search for memories/conversations based on a query.
|
|
2398
|
+
|
|
2399
|
+
This is a unified method that works with both SQL and MongoDB backends.
|
|
2400
|
+
|
|
2401
|
+
Args:
|
|
2402
|
+
query: Search query string
|
|
2403
|
+
limit: Maximum number of results to return
|
|
2404
|
+
|
|
2405
|
+
Returns:
|
|
2406
|
+
List of matching memories with their content and metadata
|
|
2407
|
+
"""
|
|
2408
|
+
if not self._enabled:
|
|
2409
|
+
logger.warning("Memori is not enabled. Returning empty results.")
|
|
2410
|
+
return []
|
|
2411
|
+
|
|
2412
|
+
try:
|
|
2413
|
+
# Use the existing retrieve_context method for consistency
|
|
2414
|
+
return self.retrieve_context(query, limit=limit)
|
|
2415
|
+
except Exception as e:
|
|
2416
|
+
logger.error(f"Search failed: {e}")
|
|
2417
|
+
return []
|
|
2418
|
+
|
|
2419
|
+
def get_stats(self) -> dict[str, Any]:
|
|
2420
|
+
"""
|
|
2421
|
+
Get memory statistics.
|
|
2422
|
+
|
|
2423
|
+
This is a unified method that works with both SQL and MongoDB backends.
|
|
2424
|
+
|
|
2425
|
+
Returns:
|
|
2426
|
+
Dictionary containing memory statistics
|
|
2427
|
+
"""
|
|
2428
|
+
return self.get_memory_stats()
|
|
2429
|
+
|
|
2159
2430
|
def cleanup(self):
|
|
2160
2431
|
"""Clean up all async tasks and resources"""
|
|
2161
2432
|
try:
|
|
@@ -2391,7 +2662,7 @@ class Memori:
|
|
|
2391
2662
|
logger.error(f"Failed to add memory to messages: {e}")
|
|
2392
2663
|
return messages
|
|
2393
2664
|
|
|
2394
|
-
def get_essential_conversations(self, limit: int = 10) ->
|
|
2665
|
+
def get_essential_conversations(self, limit: int = 10) -> list[dict[str, Any]]:
|
|
2395
2666
|
"""Get essential conversations from short-term memory"""
|
|
2396
2667
|
try:
|
|
2397
2668
|
from sqlalchemy import text
|
|
@@ -2491,7 +2762,7 @@ class Memori:
|
|
|
2491
2762
|
|
|
2492
2763
|
# Conversation management methods
|
|
2493
2764
|
|
|
2494
|
-
def get_conversation_stats(self) ->
|
|
2765
|
+
def get_conversation_stats(self) -> dict[str, Any]:
|
|
2495
2766
|
"""Get conversation manager statistics"""
|
|
2496
2767
|
return self.conversation_manager.get_session_stats()
|
|
2497
2768
|
|