memorisdk 2.0.1__py3-none-any.whl → 2.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +3 -3
- memori/agents/conscious_agent.py +289 -77
- memori/agents/memory_agent.py +19 -9
- memori/agents/retrieval_agent.py +59 -51
- memori/config/manager.py +7 -7
- memori/config/memory_manager.py +25 -25
- memori/config/settings.py +13 -6
- memori/core/conversation.py +15 -15
- memori/core/database.py +14 -13
- memori/core/memory.py +376 -105
- memori/core/providers.py +25 -25
- memori/database/__init__.py +11 -0
- memori/database/adapters/__init__.py +11 -0
- memori/database/adapters/mongodb_adapter.py +739 -0
- memori/database/adapters/mysql_adapter.py +8 -8
- memori/database/adapters/postgresql_adapter.py +6 -6
- memori/database/adapters/sqlite_adapter.py +6 -6
- memori/database/auto_creator.py +8 -9
- memori/database/connection_utils.py +5 -5
- memori/database/connectors/__init__.py +11 -0
- memori/database/connectors/base_connector.py +18 -19
- memori/database/connectors/mongodb_connector.py +654 -0
- memori/database/connectors/mysql_connector.py +13 -15
- memori/database/connectors/postgres_connector.py +12 -12
- memori/database/connectors/sqlite_connector.py +11 -11
- memori/database/models.py +2 -2
- memori/database/mongodb_manager.py +1484 -0
- memori/database/queries/base_queries.py +3 -4
- memori/database/queries/chat_queries.py +3 -5
- memori/database/queries/entity_queries.py +3 -5
- memori/database/queries/memory_queries.py +3 -5
- memori/database/query_translator.py +11 -11
- memori/database/schema_generators/__init__.py +11 -0
- memori/database/schema_generators/mongodb_schema_generator.py +666 -0
- memori/database/schema_generators/mysql_schema_generator.py +2 -4
- memori/database/search/__init__.py +11 -0
- memori/database/search/mongodb_search_adapter.py +653 -0
- memori/database/search/mysql_search_adapter.py +8 -8
- memori/database/search/sqlite_search_adapter.py +6 -6
- memori/database/search_service.py +17 -17
- memori/database/sqlalchemy_manager.py +10 -12
- memori/integrations/__init__.py +1 -1
- memori/integrations/anthropic_integration.py +1 -3
- memori/integrations/litellm_integration.py +23 -6
- memori/integrations/openai_integration.py +31 -3
- memori/tools/memory_tool.py +10 -9
- memori/utils/exceptions.py +58 -58
- memori/utils/helpers.py +11 -12
- memori/utils/input_validator.py +10 -12
- memori/utils/logging.py +4 -4
- memori/utils/pydantic_models.py +57 -57
- memori/utils/query_builder.py +20 -20
- memori/utils/security_audit.py +28 -28
- memori/utils/security_integration.py +9 -9
- memori/utils/transaction_manager.py +20 -19
- memori/utils/validators.py +6 -6
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/METADATA +23 -12
- memorisdk-2.1.1.dist-info/RECORD +71 -0
- memorisdk-2.0.1.dist-info/RECORD +0 -66
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/WHEEL +0 -0
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/top_level.txt +0 -0
|
@@ -3,7 +3,7 @@ MySQL FULLTEXT search adapter for Memori
|
|
|
3
3
|
Implements MySQL-specific search functionality
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
from typing import Any
|
|
6
|
+
from typing import Any
|
|
7
7
|
|
|
8
8
|
from loguru import logger
|
|
9
9
|
|
|
@@ -49,9 +49,9 @@ class MySQLSearchAdapter(BaseSearchAdapter):
|
|
|
49
49
|
self,
|
|
50
50
|
query: str,
|
|
51
51
|
namespace: str = "default",
|
|
52
|
-
category_filter:
|
|
52
|
+
category_filter: list[str] | None = None,
|
|
53
53
|
limit: int = 10,
|
|
54
|
-
) ->
|
|
54
|
+
) -> list[dict[str, Any]]:
|
|
55
55
|
"""Execute MySQL FULLTEXT search or fallback to LIKE search"""
|
|
56
56
|
if self.fulltext_available and query and query.strip():
|
|
57
57
|
try:
|
|
@@ -70,9 +70,9 @@ class MySQLSearchAdapter(BaseSearchAdapter):
|
|
|
70
70
|
self,
|
|
71
71
|
query: str,
|
|
72
72
|
namespace: str,
|
|
73
|
-
category_filter:
|
|
73
|
+
category_filter: list[str] | None,
|
|
74
74
|
limit: int,
|
|
75
|
-
) ->
|
|
75
|
+
) -> list[dict[str, Any]]:
|
|
76
76
|
"""Execute MySQL FULLTEXT search"""
|
|
77
77
|
results = []
|
|
78
78
|
translated_query = self.translate_search_query(query)
|
|
@@ -160,7 +160,7 @@ class MySQLSearchAdapter(BaseSearchAdapter):
|
|
|
160
160
|
except:
|
|
161
161
|
return 0.0
|
|
162
162
|
|
|
163
|
-
def create_search_indexes(self) ->
|
|
163
|
+
def create_search_indexes(self) -> list[str]:
|
|
164
164
|
"""Create MySQL FULLTEXT indexes"""
|
|
165
165
|
commands = [
|
|
166
166
|
# Create FULLTEXT index on short_term_memory
|
|
@@ -206,9 +206,9 @@ class MySQLSearchAdapter(BaseSearchAdapter):
|
|
|
206
206
|
self,
|
|
207
207
|
query: str,
|
|
208
208
|
namespace: str = "default",
|
|
209
|
-
category_filter:
|
|
209
|
+
category_filter: list[str] | None = None,
|
|
210
210
|
limit: int = 10,
|
|
211
|
-
) ->
|
|
211
|
+
) -> list[dict[str, Any]]:
|
|
212
212
|
"""Execute fallback LIKE-based search with MySQL syntax"""
|
|
213
213
|
results = []
|
|
214
214
|
|
|
@@ -4,7 +4,7 @@ Maintains existing FTS5 functionality
|
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
6
|
import sqlite3
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any
|
|
8
8
|
|
|
9
9
|
from loguru import logger
|
|
10
10
|
|
|
@@ -34,9 +34,9 @@ class SQLiteSearchAdapter(BaseSearchAdapter):
|
|
|
34
34
|
self,
|
|
35
35
|
query: str,
|
|
36
36
|
namespace: str = "default",
|
|
37
|
-
category_filter:
|
|
37
|
+
category_filter: list[str] | None = None,
|
|
38
38
|
limit: int = 10,
|
|
39
|
-
) ->
|
|
39
|
+
) -> list[dict[str, Any]]:
|
|
40
40
|
"""Execute FTS5 search or fallback to LIKE search"""
|
|
41
41
|
if self.fts5_available:
|
|
42
42
|
try:
|
|
@@ -53,9 +53,9 @@ class SQLiteSearchAdapter(BaseSearchAdapter):
|
|
|
53
53
|
self,
|
|
54
54
|
query: str,
|
|
55
55
|
namespace: str,
|
|
56
|
-
category_filter:
|
|
56
|
+
category_filter: list[str] | None,
|
|
57
57
|
limit: int,
|
|
58
|
-
) ->
|
|
58
|
+
) -> list[dict[str, Any]]:
|
|
59
59
|
"""Execute FTS5 search (adapted from original implementation)"""
|
|
60
60
|
# Build FTS query with category filter - handle empty queries
|
|
61
61
|
if query and query.strip():
|
|
@@ -119,7 +119,7 @@ class SQLiteSearchAdapter(BaseSearchAdapter):
|
|
|
119
119
|
|
|
120
120
|
return self.connector.execute_query(sql_query, params)
|
|
121
121
|
|
|
122
|
-
def create_search_indexes(self) ->
|
|
122
|
+
def create_search_indexes(self) -> list[str]:
|
|
123
123
|
"""Create FTS5 virtual table and triggers"""
|
|
124
124
|
if not self.fts5_available:
|
|
125
125
|
logger.warning("FTS5 not available, skipping FTS index creation")
|
|
@@ -4,7 +4,7 @@ Provides cross-database full-text search capabilities
|
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
6
|
from datetime import datetime
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any
|
|
8
8
|
|
|
9
9
|
from loguru import logger
|
|
10
10
|
from sqlalchemy import and_, desc, or_, text
|
|
@@ -24,10 +24,10 @@ class SearchService:
|
|
|
24
24
|
self,
|
|
25
25
|
query: str,
|
|
26
26
|
namespace: str = "default",
|
|
27
|
-
category_filter:
|
|
27
|
+
category_filter: list[str] | None = None,
|
|
28
28
|
limit: int = 10,
|
|
29
|
-
memory_types:
|
|
30
|
-
) ->
|
|
29
|
+
memory_types: list[str] | None = None,
|
|
30
|
+
) -> list[dict[str, Any]]:
|
|
31
31
|
"""
|
|
32
32
|
Search memories across different database backends
|
|
33
33
|
|
|
@@ -151,11 +151,11 @@ class SearchService:
|
|
|
151
151
|
self,
|
|
152
152
|
query: str,
|
|
153
153
|
namespace: str,
|
|
154
|
-
category_filter:
|
|
154
|
+
category_filter: list[str] | None,
|
|
155
155
|
limit: int,
|
|
156
156
|
search_short_term: bool,
|
|
157
157
|
search_long_term: bool,
|
|
158
|
-
) ->
|
|
158
|
+
) -> list[dict[str, Any]]:
|
|
159
159
|
"""Search using SQLite FTS5"""
|
|
160
160
|
try:
|
|
161
161
|
logger.debug(
|
|
@@ -259,11 +259,11 @@ class SearchService:
|
|
|
259
259
|
self,
|
|
260
260
|
query: str,
|
|
261
261
|
namespace: str,
|
|
262
|
-
category_filter:
|
|
262
|
+
category_filter: list[str] | None,
|
|
263
263
|
limit: int,
|
|
264
264
|
search_short_term: bool,
|
|
265
265
|
search_long_term: bool,
|
|
266
|
-
) ->
|
|
266
|
+
) -> list[dict[str, Any]]:
|
|
267
267
|
"""Search using MySQL FULLTEXT"""
|
|
268
268
|
results = []
|
|
269
269
|
|
|
@@ -356,11 +356,11 @@ class SearchService:
|
|
|
356
356
|
self,
|
|
357
357
|
query: str,
|
|
358
358
|
namespace: str,
|
|
359
|
-
category_filter:
|
|
359
|
+
category_filter: list[str] | None,
|
|
360
360
|
limit: int,
|
|
361
361
|
search_short_term: bool,
|
|
362
362
|
search_long_term: bool,
|
|
363
|
-
) ->
|
|
363
|
+
) -> list[dict[str, Any]]:
|
|
364
364
|
"""Search using PostgreSQL tsvector"""
|
|
365
365
|
results = []
|
|
366
366
|
|
|
@@ -461,11 +461,11 @@ class SearchService:
|
|
|
461
461
|
self,
|
|
462
462
|
query: str,
|
|
463
463
|
namespace: str,
|
|
464
|
-
category_filter:
|
|
464
|
+
category_filter: list[str] | None,
|
|
465
465
|
limit: int,
|
|
466
466
|
search_short_term: bool,
|
|
467
467
|
search_long_term: bool,
|
|
468
|
-
) ->
|
|
468
|
+
) -> list[dict[str, Any]]:
|
|
469
469
|
"""Fallback LIKE-based search with improved flexibility"""
|
|
470
470
|
logger.debug(
|
|
471
471
|
f"Starting LIKE fallback search for query: '{query}' in namespace: '{namespace}'"
|
|
@@ -592,10 +592,10 @@ class SearchService:
|
|
|
592
592
|
def _get_recent_memories(
|
|
593
593
|
self,
|
|
594
594
|
namespace: str,
|
|
595
|
-
category_filter:
|
|
595
|
+
category_filter: list[str] | None,
|
|
596
596
|
limit: int,
|
|
597
|
-
memory_types:
|
|
598
|
-
) ->
|
|
597
|
+
memory_types: list[str] | None,
|
|
598
|
+
) -> list[dict[str, Any]]:
|
|
599
599
|
"""Get recent memories when no search query is provided"""
|
|
600
600
|
results = []
|
|
601
601
|
|
|
@@ -667,8 +667,8 @@ class SearchService:
|
|
|
667
667
|
return results
|
|
668
668
|
|
|
669
669
|
def _rank_and_limit_results(
|
|
670
|
-
self, results:
|
|
671
|
-
) ->
|
|
670
|
+
self, results: list[dict[str, Any]], limit: int
|
|
671
|
+
) -> list[dict[str, Any]]:
|
|
672
672
|
"""Rank and limit search results"""
|
|
673
673
|
# Calculate composite score
|
|
674
674
|
for result in results:
|
|
@@ -9,7 +9,7 @@ import ssl
|
|
|
9
9
|
import uuid
|
|
10
10
|
from datetime import datetime
|
|
11
11
|
from pathlib import Path
|
|
12
|
-
from typing import Any
|
|
12
|
+
from typing import Any
|
|
13
13
|
from urllib.parse import parse_qs, urlparse
|
|
14
14
|
|
|
15
15
|
from loguru import logger
|
|
@@ -461,7 +461,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
461
461
|
session_id: str,
|
|
462
462
|
namespace: str = "default",
|
|
463
463
|
tokens_used: int = 0,
|
|
464
|
-
metadata:
|
|
464
|
+
metadata: dict[str, Any] | None = None,
|
|
465
465
|
):
|
|
466
466
|
"""Store chat history"""
|
|
467
467
|
with self.SessionLocal() as session:
|
|
@@ -488,9 +488,9 @@ class SQLAlchemyDatabaseManager:
|
|
|
488
488
|
def get_chat_history(
|
|
489
489
|
self,
|
|
490
490
|
namespace: str = "default",
|
|
491
|
-
session_id:
|
|
491
|
+
session_id: str | None = None,
|
|
492
492
|
limit: int = 10,
|
|
493
|
-
) ->
|
|
493
|
+
) -> list[dict[str, Any]]:
|
|
494
494
|
"""Get chat history with optional session filtering"""
|
|
495
495
|
with self.SessionLocal() as session:
|
|
496
496
|
try:
|
|
@@ -581,9 +581,9 @@ class SQLAlchemyDatabaseManager:
|
|
|
581
581
|
self,
|
|
582
582
|
query: str,
|
|
583
583
|
namespace: str = "default",
|
|
584
|
-
category_filter:
|
|
584
|
+
category_filter: list[str] | None = None,
|
|
585
585
|
limit: int = 10,
|
|
586
|
-
) ->
|
|
586
|
+
) -> list[dict[str, Any]]:
|
|
587
587
|
"""Search memories using the cross-database search service"""
|
|
588
588
|
search_service = None
|
|
589
589
|
try:
|
|
@@ -630,7 +630,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
630
630
|
except Exception as session_e:
|
|
631
631
|
logger.warning(f"Error closing search service session: {session_e}")
|
|
632
632
|
|
|
633
|
-
def get_memory_stats(self, namespace: str = "default") ->
|
|
633
|
+
def get_memory_stats(self, namespace: str = "default") -> dict[str, Any]:
|
|
634
634
|
"""Get comprehensive memory statistics"""
|
|
635
635
|
with self.SessionLocal() as session:
|
|
636
636
|
try:
|
|
@@ -727,9 +727,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
727
727
|
except SQLAlchemyError as e:
|
|
728
728
|
raise DatabaseError(f"Failed to get memory stats: {e}")
|
|
729
729
|
|
|
730
|
-
def clear_memory(
|
|
731
|
-
self, namespace: str = "default", memory_type: Optional[str] = None
|
|
732
|
-
):
|
|
730
|
+
def clear_memory(self, namespace: str = "default", memory_type: str | None = None):
|
|
733
731
|
"""Clear memory data"""
|
|
734
732
|
with self.SessionLocal() as session:
|
|
735
733
|
try:
|
|
@@ -762,7 +760,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
762
760
|
session.rollback()
|
|
763
761
|
raise DatabaseError(f"Failed to clear memory: {e}")
|
|
764
762
|
|
|
765
|
-
def execute_with_translation(self, query: str, parameters:
|
|
763
|
+
def execute_with_translation(self, query: str, parameters: dict[str, Any] = None):
|
|
766
764
|
"""
|
|
767
765
|
Execute a query with automatic parameter translation for cross-database compatibility.
|
|
768
766
|
|
|
@@ -865,7 +863,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
865
863
|
if hasattr(self, "engine"):
|
|
866
864
|
self.engine.dispose()
|
|
867
865
|
|
|
868
|
-
def get_database_info(self) ->
|
|
866
|
+
def get_database_info(self) -> dict[str, Any]:
|
|
869
867
|
"""Get database information and capabilities"""
|
|
870
868
|
base_info = {
|
|
871
869
|
"database_type": self.database_type,
|
memori/integrations/__init__.py
CHANGED
|
@@ -14,8 +14,6 @@ Usage:
|
|
|
14
14
|
response = client.messages.create(...)
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
|
-
from typing import Optional
|
|
18
|
-
|
|
19
17
|
from loguru import logger
|
|
20
18
|
|
|
21
19
|
|
|
@@ -25,7 +23,7 @@ class MemoriAnthropic:
|
|
|
25
23
|
without monkey-patching. Drop-in replacement for Anthropic client.
|
|
26
24
|
"""
|
|
27
25
|
|
|
28
|
-
def __init__(self, memori_instance, api_key:
|
|
26
|
+
def __init__(self, memori_instance, api_key: str | None = None, **kwargs):
|
|
29
27
|
"""
|
|
30
28
|
Initialize MemoriAnthropic wrapper
|
|
31
29
|
|
|
@@ -16,8 +16,6 @@ Usage:
|
|
|
16
16
|
response = completion(model="gpt-4o", messages=[...])
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
|
-
from typing import Optional
|
|
20
|
-
|
|
21
19
|
from loguru import logger
|
|
22
20
|
|
|
23
21
|
try:
|
|
@@ -186,6 +184,18 @@ class LiteLLMCallbackManager:
|
|
|
186
184
|
if hasattr(choice, "message") and hasattr(choice.message, "content"):
|
|
187
185
|
ai_output = choice.message.content or ""
|
|
188
186
|
|
|
187
|
+
# Debug logging to help diagnose recording issues
|
|
188
|
+
if user_input:
|
|
189
|
+
logger.debug(
|
|
190
|
+
f"LiteLLM callback: Recording conversation '{user_input[:50]}...'"
|
|
191
|
+
if len(user_input) > 50
|
|
192
|
+
else f"LiteLLM callback: Recording conversation '{user_input}'"
|
|
193
|
+
)
|
|
194
|
+
else:
|
|
195
|
+
logger.warning(
|
|
196
|
+
f"LiteLLM callback: No user input found in messages: {[msg.get('role') for msg in messages]}"
|
|
197
|
+
)
|
|
198
|
+
|
|
189
199
|
# Extract model
|
|
190
200
|
model = kwargs.get("model", "litellm-unknown")
|
|
191
201
|
|
|
@@ -193,8 +203,8 @@ class LiteLLMCallbackManager:
|
|
|
193
203
|
duration_ms = 0
|
|
194
204
|
if start_time is not None and end_time is not None:
|
|
195
205
|
try:
|
|
196
|
-
if isinstance(start_time,
|
|
197
|
-
end_time,
|
|
206
|
+
if isinstance(start_time, int | float) and isinstance(
|
|
207
|
+
end_time, int | float
|
|
198
208
|
):
|
|
199
209
|
duration_ms = (end_time - start_time) * 1000
|
|
200
210
|
except Exception:
|
|
@@ -233,11 +243,18 @@ class LiteLLMCallbackManager:
|
|
|
233
243
|
metadata=metadata,
|
|
234
244
|
)
|
|
235
245
|
logger.debug(
|
|
236
|
-
f"LiteLLM callback:
|
|
246
|
+
f"LiteLLM callback: Successfully recorded conversation for model {model}"
|
|
247
|
+
)
|
|
248
|
+
else:
|
|
249
|
+
logger.warning(
|
|
250
|
+
f"LiteLLM callback: Skipping record - user_input='{bool(user_input)}' ai_output='{bool(ai_output)}'"
|
|
237
251
|
)
|
|
238
252
|
|
|
239
253
|
except Exception as e:
|
|
240
254
|
logger.error(f"LiteLLM callback failed: {e}")
|
|
255
|
+
import traceback
|
|
256
|
+
|
|
257
|
+
logger.error(f"LiteLLM callback error details: {traceback.format_exc()}")
|
|
241
258
|
|
|
242
259
|
def _setup_context_injection(self):
|
|
243
260
|
"""Set up context injection by wrapping LiteLLM's completion function."""
|
|
@@ -325,7 +342,7 @@ class LiteLLMCallbackManager:
|
|
|
325
342
|
return self._callback_registered
|
|
326
343
|
|
|
327
344
|
|
|
328
|
-
def setup_litellm_callbacks(memori_instance) ->
|
|
345
|
+
def setup_litellm_callbacks(memori_instance) -> LiteLLMCallbackManager | None:
|
|
329
346
|
"""
|
|
330
347
|
Convenience function to set up LiteLLM callbacks for a Memori instance.
|
|
331
348
|
|
|
@@ -250,20 +250,28 @@ class OpenAIInterceptor:
|
|
|
250
250
|
for message in messages:
|
|
251
251
|
content = message.get("content", "")
|
|
252
252
|
if isinstance(content, str):
|
|
253
|
-
# Check for internal agent processing patterns
|
|
253
|
+
# Check for specific internal agent processing patterns
|
|
254
|
+
# Made patterns more specific to avoid false positives
|
|
254
255
|
internal_patterns = [
|
|
255
256
|
"Process this conversation for enhanced memory storage:",
|
|
256
|
-
"User query:",
|
|
257
257
|
"Enhanced memory processing:",
|
|
258
258
|
"Memory classification:",
|
|
259
259
|
"Search for relevant memories:",
|
|
260
260
|
"Analyze conversation for:",
|
|
261
261
|
"Extract entities from:",
|
|
262
262
|
"Categorize the following conversation:",
|
|
263
|
+
# More specific patterns to avoid blocking legitimate conversations
|
|
264
|
+
"INTERNAL_MEMORY_PROCESSING:",
|
|
265
|
+
"AGENT_PROCESSING_MODE:",
|
|
266
|
+
"MEMORY_AGENT_TASK:",
|
|
263
267
|
]
|
|
264
268
|
|
|
269
|
+
# Only flag as internal if it matches specific patterns AND has no user role
|
|
265
270
|
for pattern in internal_patterns:
|
|
266
271
|
if pattern in content:
|
|
272
|
+
# Double-check: if this is a user message, don't filter it
|
|
273
|
+
if message.get("role") == "user":
|
|
274
|
+
continue
|
|
267
275
|
return True
|
|
268
276
|
|
|
269
277
|
return False
|
|
@@ -281,9 +289,29 @@ class OpenAIInterceptor:
|
|
|
281
289
|
json_data = getattr(options, "json_data", None) or {}
|
|
282
290
|
|
|
283
291
|
if "messages" in json_data:
|
|
292
|
+
# Check if this is an internal agent processing call
|
|
293
|
+
is_internal = cls._is_internal_agent_call(json_data)
|
|
294
|
+
|
|
295
|
+
# Debug logging to help diagnose recording issues
|
|
296
|
+
user_messages = [
|
|
297
|
+
msg
|
|
298
|
+
for msg in json_data.get("messages", [])
|
|
299
|
+
if msg.get("role") == "user"
|
|
300
|
+
]
|
|
301
|
+
if user_messages and not is_internal:
|
|
302
|
+
user_content = user_messages[-1].get("content", "")[:50]
|
|
303
|
+
logger.debug(
|
|
304
|
+
f"Recording conversation: '{user_content}...' (internal_check={is_internal})"
|
|
305
|
+
)
|
|
306
|
+
elif is_internal:
|
|
307
|
+
logger.debug(
|
|
308
|
+
"Skipping internal agent call (detected pattern match)"
|
|
309
|
+
)
|
|
310
|
+
|
|
284
311
|
# Skip internal agent processing calls
|
|
285
|
-
if
|
|
312
|
+
if is_internal:
|
|
286
313
|
continue
|
|
314
|
+
|
|
287
315
|
# Chat completions
|
|
288
316
|
memori_instance._record_openai_conversation(json_data, response)
|
|
289
317
|
elif "prompt" in json_data:
|
memori/tools/memory_tool.py
CHANGED
|
@@ -3,7 +3,8 @@ Memory Tool - A tool/function for manual integration with any LLM library
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import json
|
|
6
|
-
from
|
|
6
|
+
from collections.abc import Callable
|
|
7
|
+
from typing import Any
|
|
7
8
|
|
|
8
9
|
from loguru import logger
|
|
9
10
|
|
|
@@ -31,7 +32,7 @@ class MemoryTool:
|
|
|
31
32
|
self.tool_name = "memori_memory"
|
|
32
33
|
self.description = "Access and manage AI conversation memory"
|
|
33
34
|
|
|
34
|
-
def get_tool_schema(self) ->
|
|
35
|
+
def get_tool_schema(self) -> dict[str, Any]:
|
|
35
36
|
"""
|
|
36
37
|
Get the tool schema for function calling in LLMs
|
|
37
38
|
|
|
@@ -240,7 +241,7 @@ class MemoryTool:
|
|
|
240
241
|
)
|
|
241
242
|
return f"Error searching memories: {str(e)}"
|
|
242
243
|
|
|
243
|
-
def _format_dict_to_string(self, result_dict:
|
|
244
|
+
def _format_dict_to_string(self, result_dict: dict[str, Any]) -> str:
|
|
244
245
|
"""Helper method to format dictionary results to readable strings"""
|
|
245
246
|
if result_dict.get("error"):
|
|
246
247
|
return f"Error: {result_dict['error']}"
|
|
@@ -287,7 +288,7 @@ class MemoryTool:
|
|
|
287
288
|
message = result_dict.get("message", "Memory search completed")
|
|
288
289
|
return message
|
|
289
290
|
|
|
290
|
-
def _record_conversation(self, **kwargs) ->
|
|
291
|
+
def _record_conversation(self, **kwargs) -> dict[str, Any]:
|
|
291
292
|
"""Record a conversation"""
|
|
292
293
|
try:
|
|
293
294
|
user_input = kwargs.get("user_input", "")
|
|
@@ -316,7 +317,7 @@ class MemoryTool:
|
|
|
316
317
|
logger.error(f"Failed to record conversation: {e}")
|
|
317
318
|
return {"error": f"Failed to record conversation: {str(e)}"}
|
|
318
319
|
|
|
319
|
-
def _retrieve_context(self, **kwargs) ->
|
|
320
|
+
def _retrieve_context(self, **kwargs) -> dict[str, Any]:
|
|
320
321
|
"""Retrieve relevant context for a query"""
|
|
321
322
|
try:
|
|
322
323
|
query = kwargs.get("query", "")
|
|
@@ -352,7 +353,7 @@ class MemoryTool:
|
|
|
352
353
|
logger.error(f"Failed to retrieve context: {e}")
|
|
353
354
|
return {"error": f"Failed to retrieve context: {str(e)}"}
|
|
354
355
|
|
|
355
|
-
def _search_memories(self, **kwargs) ->
|
|
356
|
+
def _search_memories(self, **kwargs) -> dict[str, Any]:
|
|
356
357
|
"""Search memories by content"""
|
|
357
358
|
try:
|
|
358
359
|
query = kwargs.get("query", "")
|
|
@@ -377,7 +378,7 @@ class MemoryTool:
|
|
|
377
378
|
logger.error(f"Failed to search memories: {e}")
|
|
378
379
|
return {"error": f"Failed to search memories: {str(e)}"}
|
|
379
380
|
|
|
380
|
-
def _get_stats(self, **kwargs) ->
|
|
381
|
+
def _get_stats(self, **kwargs) -> dict[str, Any]:
|
|
381
382
|
"""Get memory and integration statistics"""
|
|
382
383
|
# kwargs can be used for future filtering options
|
|
383
384
|
_ = kwargs # Mark as intentionally unused
|
|
@@ -398,7 +399,7 @@ class MemoryTool:
|
|
|
398
399
|
logger.error(f"Failed to get stats: {e}")
|
|
399
400
|
return {"error": f"Failed to get stats: {str(e)}"}
|
|
400
401
|
|
|
401
|
-
def _get_essential_conversations(self, **kwargs) ->
|
|
402
|
+
def _get_essential_conversations(self, **kwargs) -> dict[str, Any]:
|
|
402
403
|
"""Get essential conversations from short-term memory"""
|
|
403
404
|
try:
|
|
404
405
|
limit = kwargs.get("limit", 10)
|
|
@@ -434,7 +435,7 @@ class MemoryTool:
|
|
|
434
435
|
logger.error(f"Failed to get essential conversations: {e}")
|
|
435
436
|
return {"error": f"Failed to get essential conversations: {str(e)}"}
|
|
436
437
|
|
|
437
|
-
def _trigger_analysis(self, **kwargs) ->
|
|
438
|
+
def _trigger_analysis(self, **kwargs) -> dict[str, Any]:
|
|
438
439
|
"""Trigger conscious agent analysis"""
|
|
439
440
|
# kwargs can be used for future analysis options
|
|
440
441
|
_ = kwargs # Mark as intentionally unused
|