memorisdk 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +3 -3
- memori/agents/conscious_agent.py +289 -77
- memori/agents/memory_agent.py +19 -9
- memori/agents/retrieval_agent.py +138 -63
- memori/config/manager.py +7 -7
- memori/config/memory_manager.py +25 -25
- memori/config/settings.py +13 -6
- memori/core/conversation.py +15 -15
- memori/core/database.py +14 -13
- memori/core/memory.py +438 -123
- memori/core/providers.py +25 -25
- memori/database/__init__.py +11 -0
- memori/database/adapters/__init__.py +11 -0
- memori/database/adapters/mongodb_adapter.py +739 -0
- memori/database/adapters/mysql_adapter.py +8 -8
- memori/database/adapters/postgresql_adapter.py +6 -6
- memori/database/adapters/sqlite_adapter.py +6 -6
- memori/database/auto_creator.py +8 -9
- memori/database/connection_utils.py +5 -5
- memori/database/connectors/__init__.py +11 -0
- memori/database/connectors/base_connector.py +18 -19
- memori/database/connectors/mongodb_connector.py +527 -0
- memori/database/connectors/mysql_connector.py +13 -15
- memori/database/connectors/postgres_connector.py +12 -12
- memori/database/connectors/sqlite_connector.py +11 -11
- memori/database/models.py +2 -2
- memori/database/mongodb_manager.py +1402 -0
- memori/database/queries/base_queries.py +3 -4
- memori/database/queries/chat_queries.py +3 -5
- memori/database/queries/entity_queries.py +3 -5
- memori/database/queries/memory_queries.py +3 -5
- memori/database/query_translator.py +11 -11
- memori/database/schema_generators/__init__.py +11 -0
- memori/database/schema_generators/mongodb_schema_generator.py +666 -0
- memori/database/schema_generators/mysql_schema_generator.py +2 -4
- memori/database/search/__init__.py +11 -0
- memori/database/search/mongodb_search_adapter.py +653 -0
- memori/database/search/mysql_search_adapter.py +8 -8
- memori/database/search/sqlite_search_adapter.py +6 -6
- memori/database/search_service.py +218 -66
- memori/database/sqlalchemy_manager.py +72 -25
- memori/integrations/__init__.py +1 -1
- memori/integrations/anthropic_integration.py +1 -3
- memori/integrations/litellm_integration.py +23 -6
- memori/integrations/openai_integration.py +31 -3
- memori/tools/memory_tool.py +104 -13
- memori/utils/exceptions.py +58 -58
- memori/utils/helpers.py +11 -12
- memori/utils/input_validator.py +10 -12
- memori/utils/logging.py +4 -4
- memori/utils/pydantic_models.py +57 -57
- memori/utils/query_builder.py +20 -20
- memori/utils/security_audit.py +28 -28
- memori/utils/security_integration.py +9 -9
- memori/utils/transaction_manager.py +20 -19
- memori/utils/validators.py +6 -6
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/METADATA +36 -20
- memorisdk-2.1.0.dist-info/RECORD +71 -0
- memori/scripts/llm_text.py +0 -50
- memorisdk-2.0.0.dist-info/RECORD +0 -67
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/WHEEL +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/top_level.txt +0 -0
memori/__init__.py
CHANGED
|
@@ -5,7 +5,7 @@ Professional-grade memory layer with comprehensive error handling, configuration
|
|
|
5
5
|
management, and modular architecture for production AI systems.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
__version__ = "2.
|
|
8
|
+
__version__ = "2.1.0"
|
|
9
9
|
__author__ = "Harshal More"
|
|
10
10
|
__email__ = "harshalmore2468@gmail.com"
|
|
11
11
|
|
|
@@ -71,8 +71,8 @@ from .utils import ( # Pydantic models; Enhanced exceptions; Validators and hel
|
|
|
71
71
|
)
|
|
72
72
|
|
|
73
73
|
# Memory agents (dynamically imported to avoid import errors)
|
|
74
|
-
MemoryAgent:
|
|
75
|
-
MemorySearchEngine:
|
|
74
|
+
MemoryAgent: Any | None = None
|
|
75
|
+
MemorySearchEngine: Any | None = None
|
|
76
76
|
_AGENTS_AVAILABLE = False
|
|
77
77
|
|
|
78
78
|
try:
|
memori/agents/conscious_agent.py
CHANGED
|
@@ -3,11 +3,12 @@ Conscious Agent for User Context Management
|
|
|
3
3
|
|
|
4
4
|
This agent copies conscious-info labeled memories from long-term memory
|
|
5
5
|
directly to short-term memory for immediate context availability.
|
|
6
|
+
|
|
7
|
+
Supports both SQL and MongoDB database backends.
|
|
6
8
|
"""
|
|
7
9
|
|
|
8
10
|
import json
|
|
9
11
|
from datetime import datetime
|
|
10
|
-
from typing import List
|
|
11
12
|
|
|
12
13
|
from loguru import logger
|
|
13
14
|
|
|
@@ -23,6 +24,34 @@ class ConsciouscAgent:
|
|
|
23
24
|
def __init__(self):
|
|
24
25
|
"""Initialize the conscious agent"""
|
|
25
26
|
self.context_initialized = False
|
|
27
|
+
self._database_type = None # Will be detected from db_manager
|
|
28
|
+
|
|
29
|
+
def _detect_database_type(self, db_manager):
|
|
30
|
+
"""Detect database type from db_manager with fallback detection"""
|
|
31
|
+
if self._database_type is None:
|
|
32
|
+
# Try multiple detection methods
|
|
33
|
+
if hasattr(db_manager, "database_type"):
|
|
34
|
+
self._database_type = db_manager.database_type
|
|
35
|
+
elif hasattr(db_manager, "__class__"):
|
|
36
|
+
class_name = db_manager.__class__.__name__
|
|
37
|
+
if "MongoDB" in class_name:
|
|
38
|
+
self._database_type = "mongodb"
|
|
39
|
+
elif "SQLAlchemy" in class_name:
|
|
40
|
+
self._database_type = "sql"
|
|
41
|
+
else:
|
|
42
|
+
# Fallback detection by checking for MongoDB-specific methods
|
|
43
|
+
if hasattr(db_manager, "_get_collection"):
|
|
44
|
+
self._database_type = "mongodb"
|
|
45
|
+
else:
|
|
46
|
+
self._database_type = "sql"
|
|
47
|
+
else:
|
|
48
|
+
# Ultimate fallback
|
|
49
|
+
self._database_type = "sql"
|
|
50
|
+
|
|
51
|
+
logger.debug(
|
|
52
|
+
f"ConsciouscAgent: Detected database type: {self._database_type}"
|
|
53
|
+
)
|
|
54
|
+
return self._database_type
|
|
26
55
|
|
|
27
56
|
async def run_conscious_ingest(
|
|
28
57
|
self, db_manager, namespace: str = "default"
|
|
@@ -34,13 +63,15 @@ class ConsciouscAgent:
|
|
|
34
63
|
directly to short-term memory as permanent context
|
|
35
64
|
|
|
36
65
|
Args:
|
|
37
|
-
db_manager: Database manager instance
|
|
66
|
+
db_manager: Database manager instance (SQL or MongoDB)
|
|
38
67
|
namespace: Memory namespace
|
|
39
68
|
|
|
40
69
|
Returns:
|
|
41
70
|
True if memories were copied, False otherwise
|
|
42
71
|
"""
|
|
43
72
|
try:
|
|
73
|
+
db_type = self._detect_database_type(db_manager)
|
|
74
|
+
|
|
44
75
|
# Get all conscious-info labeled memories
|
|
45
76
|
conscious_memories = await self._get_conscious_memories(
|
|
46
77
|
db_manager, namespace
|
|
@@ -52,17 +83,25 @@ class ConsciouscAgent:
|
|
|
52
83
|
|
|
53
84
|
# Copy each conscious-info memory directly to short-term memory
|
|
54
85
|
copied_count = 0
|
|
55
|
-
for
|
|
86
|
+
for memory_data in conscious_memories:
|
|
56
87
|
success = await self._copy_memory_to_short_term(
|
|
57
|
-
db_manager, namespace,
|
|
88
|
+
db_manager, namespace, memory_data
|
|
58
89
|
)
|
|
59
90
|
if success:
|
|
60
91
|
copied_count += 1
|
|
61
92
|
|
|
62
93
|
# Mark memories as processed
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
94
|
+
if db_type == "mongodb":
|
|
95
|
+
memory_ids = [
|
|
96
|
+
mem.get("memory_id")
|
|
97
|
+
for mem in conscious_memories
|
|
98
|
+
if isinstance(mem, dict) and mem.get("memory_id")
|
|
99
|
+
]
|
|
100
|
+
else:
|
|
101
|
+
memory_ids = [
|
|
102
|
+
row[0] for row in conscious_memories
|
|
103
|
+
] # memory_id is first column for SQL
|
|
104
|
+
|
|
66
105
|
await self._mark_memories_processed(db_manager, memory_ids, namespace)
|
|
67
106
|
|
|
68
107
|
self.context_initialized = True
|
|
@@ -92,32 +131,45 @@ class ConsciouscAgent:
|
|
|
92
131
|
True if memories were processed, False otherwise
|
|
93
132
|
"""
|
|
94
133
|
try:
|
|
95
|
-
|
|
134
|
+
db_type = self._detect_database_type(db_manager)
|
|
96
135
|
|
|
97
|
-
|
|
98
|
-
#
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
"""SELECT memory_id, processed_data, summary, searchable_content,
|
|
102
|
-
importance_score, created_at
|
|
103
|
-
FROM long_term_memory
|
|
104
|
-
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
105
|
-
ORDER BY importance_score DESC, created_at DESC"""
|
|
106
|
-
),
|
|
107
|
-
{"namespace": namespace},
|
|
136
|
+
if db_type == "mongodb":
|
|
137
|
+
# Use MongoDB-specific method to get ALL conscious memories
|
|
138
|
+
existing_conscious_memories = db_manager.get_conscious_memories(
|
|
139
|
+
namespace=namespace
|
|
108
140
|
)
|
|
109
|
-
|
|
141
|
+
else:
|
|
142
|
+
# Use SQL method
|
|
143
|
+
from sqlalchemy import text
|
|
144
|
+
|
|
145
|
+
with db_manager._get_connection() as connection:
|
|
146
|
+
# Get ALL conscious-info labeled memories from long-term memory
|
|
147
|
+
cursor = connection.execute(
|
|
148
|
+
text(
|
|
149
|
+
"""SELECT memory_id, processed_data, summary, searchable_content,
|
|
150
|
+
importance_score, created_at
|
|
151
|
+
FROM long_term_memory
|
|
152
|
+
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
153
|
+
ORDER BY importance_score DESC, created_at DESC"""
|
|
154
|
+
),
|
|
155
|
+
{"namespace": namespace},
|
|
156
|
+
)
|
|
157
|
+
existing_conscious_memories = cursor.fetchall()
|
|
110
158
|
|
|
111
159
|
if not existing_conscious_memories:
|
|
112
|
-
logger.
|
|
160
|
+
logger.info(
|
|
113
161
|
"ConsciouscAgent: No existing conscious-info memories found for initialization"
|
|
114
162
|
)
|
|
115
163
|
return False
|
|
116
164
|
|
|
165
|
+
logger.info(
|
|
166
|
+
f"ConsciouscAgent: Found {len(existing_conscious_memories)} conscious-info memories to initialize"
|
|
167
|
+
)
|
|
168
|
+
|
|
117
169
|
copied_count = 0
|
|
118
|
-
for
|
|
170
|
+
for memory_data in existing_conscious_memories:
|
|
119
171
|
success = await self._copy_memory_to_short_term(
|
|
120
|
-
db_manager, namespace,
|
|
172
|
+
db_manager, namespace, memory_data
|
|
121
173
|
)
|
|
122
174
|
if success:
|
|
123
175
|
copied_count += 1
|
|
@@ -128,7 +180,7 @@ class ConsciouscAgent:
|
|
|
128
180
|
)
|
|
129
181
|
return True
|
|
130
182
|
else:
|
|
131
|
-
logger.
|
|
183
|
+
logger.info(
|
|
132
184
|
"ConsciouscAgent: No new conscious memories to initialize (all were duplicates)"
|
|
133
185
|
)
|
|
134
186
|
return False
|
|
@@ -171,8 +223,24 @@ class ConsciouscAgent:
|
|
|
171
223
|
copied_count += 1
|
|
172
224
|
|
|
173
225
|
# Mark new memories as processed
|
|
174
|
-
|
|
175
|
-
|
|
226
|
+
db_type = self._detect_database_type(db_manager)
|
|
227
|
+
if db_type == "mongodb":
|
|
228
|
+
memory_ids = [
|
|
229
|
+
mem.get("memory_id")
|
|
230
|
+
for mem in new_memories
|
|
231
|
+
if isinstance(mem, dict) and mem.get("memory_id")
|
|
232
|
+
]
|
|
233
|
+
else:
|
|
234
|
+
memory_ids = [
|
|
235
|
+
row[0] for row in new_memories
|
|
236
|
+
] # memory_id is first column for SQL
|
|
237
|
+
|
|
238
|
+
if memory_ids:
|
|
239
|
+
await self._mark_memories_processed(db_manager, memory_ids, namespace)
|
|
240
|
+
else:
|
|
241
|
+
logger.warning(
|
|
242
|
+
"ConsciouscAgent: No valid memory IDs found to mark as processed"
|
|
243
|
+
)
|
|
176
244
|
|
|
177
245
|
logger.info(
|
|
178
246
|
f"ConsciouscAgent: Copied {copied_count} new conscious-info memories to short-term memory"
|
|
@@ -180,26 +248,41 @@ class ConsciouscAgent:
|
|
|
180
248
|
return copied_count > 0
|
|
181
249
|
|
|
182
250
|
except Exception as e:
|
|
183
|
-
logger.error(
|
|
251
|
+
logger.error(
|
|
252
|
+
f"ConsciouscAgent: Context update failed with exception: {type(e).__name__}: {e}"
|
|
253
|
+
)
|
|
254
|
+
import traceback
|
|
255
|
+
|
|
256
|
+
logger.error(
|
|
257
|
+
f"ConsciouscAgent: Full error traceback: {traceback.format_exc()}"
|
|
258
|
+
)
|
|
184
259
|
return False
|
|
185
260
|
|
|
186
|
-
async def _get_conscious_memories(self, db_manager, namespace: str) ->
|
|
187
|
-
"""Get all conscious-info labeled memories from long-term memory"""
|
|
261
|
+
async def _get_conscious_memories(self, db_manager, namespace: str) -> list:
|
|
262
|
+
"""Get all conscious-info labeled memories from long-term memory (database-agnostic)"""
|
|
188
263
|
try:
|
|
189
|
-
|
|
264
|
+
db_type = self._detect_database_type(db_manager)
|
|
190
265
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
266
|
+
if db_type == "mongodb":
|
|
267
|
+
# Use MongoDB-specific method
|
|
268
|
+
memories = db_manager.get_conscious_memories(namespace=namespace)
|
|
269
|
+
return memories
|
|
270
|
+
else:
|
|
271
|
+
# Use SQL method
|
|
272
|
+
from sqlalchemy import text
|
|
273
|
+
|
|
274
|
+
with db_manager._get_connection() as connection:
|
|
275
|
+
cursor = connection.execute(
|
|
276
|
+
text(
|
|
277
|
+
"""SELECT memory_id, processed_data, summary, searchable_content,
|
|
278
|
+
importance_score, created_at
|
|
279
|
+
FROM long_term_memory
|
|
280
|
+
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
281
|
+
ORDER BY importance_score DESC, created_at DESC"""
|
|
282
|
+
),
|
|
283
|
+
{"namespace": namespace},
|
|
284
|
+
)
|
|
285
|
+
return cursor.fetchall()
|
|
203
286
|
|
|
204
287
|
except Exception as e:
|
|
205
288
|
logger.error(f"ConsciouscAgent: Failed to get conscious memories: {e}")
|
|
@@ -207,33 +290,70 @@ class ConsciouscAgent:
|
|
|
207
290
|
|
|
208
291
|
async def _get_unprocessed_conscious_memories(
|
|
209
292
|
self, db_manager, namespace: str
|
|
210
|
-
) ->
|
|
211
|
-
"""Get unprocessed conscious-info labeled memories from long-term memory"""
|
|
293
|
+
) -> list:
|
|
294
|
+
"""Get unprocessed conscious-info labeled memories from long-term memory (database-agnostic)"""
|
|
212
295
|
try:
|
|
213
|
-
|
|
296
|
+
db_type = self._detect_database_type(db_manager)
|
|
214
297
|
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
importance_score, created_at
|
|
220
|
-
FROM long_term_memory
|
|
221
|
-
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
222
|
-
AND conscious_processed = :conscious_processed
|
|
223
|
-
ORDER BY importance_score DESC, created_at DESC"""
|
|
224
|
-
),
|
|
225
|
-
{"namespace": namespace, "conscious_processed": False},
|
|
298
|
+
if db_type == "mongodb":
|
|
299
|
+
# Use MongoDB-specific method
|
|
300
|
+
memories = db_manager.get_unprocessed_conscious_memories(
|
|
301
|
+
namespace=namespace
|
|
226
302
|
)
|
|
227
|
-
return
|
|
303
|
+
return memories
|
|
304
|
+
else:
|
|
305
|
+
# Use SQL method
|
|
306
|
+
from sqlalchemy import text
|
|
307
|
+
|
|
308
|
+
with db_manager._get_connection() as connection:
|
|
309
|
+
cursor = connection.execute(
|
|
310
|
+
text(
|
|
311
|
+
"""SELECT memory_id, processed_data, summary, searchable_content,
|
|
312
|
+
importance_score, created_at
|
|
313
|
+
FROM long_term_memory
|
|
314
|
+
WHERE namespace = :namespace AND classification = 'conscious-info'
|
|
315
|
+
AND conscious_processed = :conscious_processed
|
|
316
|
+
ORDER BY importance_score DESC, created_at DESC"""
|
|
317
|
+
),
|
|
318
|
+
{"namespace": namespace, "conscious_processed": False},
|
|
319
|
+
)
|
|
320
|
+
return cursor.fetchall()
|
|
228
321
|
|
|
229
322
|
except Exception as e:
|
|
230
323
|
logger.error(f"ConsciouscAgent: Failed to get unprocessed memories: {e}")
|
|
231
324
|
return []
|
|
232
325
|
|
|
233
326
|
async def _copy_memory_to_short_term(
|
|
327
|
+
self, db_manager, namespace: str, memory_data
|
|
328
|
+
) -> bool:
|
|
329
|
+
"""Copy a conscious memory directly to short-term memory with duplicate filtering (database-agnostic)"""
|
|
330
|
+
try:
|
|
331
|
+
db_type = self._detect_database_type(db_manager)
|
|
332
|
+
|
|
333
|
+
if db_type == "mongodb":
|
|
334
|
+
return await self._copy_memory_to_short_term_mongodb(
|
|
335
|
+
db_manager, namespace, memory_data
|
|
336
|
+
)
|
|
337
|
+
else:
|
|
338
|
+
return await self._copy_memory_to_short_term_sql(
|
|
339
|
+
db_manager, namespace, memory_data
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
except Exception as e:
|
|
343
|
+
memory_id = (
|
|
344
|
+
memory_data.get("memory_id")
|
|
345
|
+
if isinstance(memory_data, dict)
|
|
346
|
+
else memory_data[0]
|
|
347
|
+
)
|
|
348
|
+
logger.error(
|
|
349
|
+
f"ConsciouscAgent: Failed to copy memory {memory_id} to short-term: {e}"
|
|
350
|
+
)
|
|
351
|
+
return False
|
|
352
|
+
|
|
353
|
+
async def _copy_memory_to_short_term_sql(
|
|
234
354
|
self, db_manager, namespace: str, memory_row: tuple
|
|
235
355
|
) -> bool:
|
|
236
|
-
"""Copy a conscious memory
|
|
356
|
+
"""Copy a conscious memory to short-term memory (SQL version)"""
|
|
237
357
|
try:
|
|
238
358
|
(
|
|
239
359
|
memory_id,
|
|
@@ -313,32 +433,124 @@ class ConsciouscAgent:
|
|
|
313
433
|
|
|
314
434
|
except Exception as e:
|
|
315
435
|
logger.error(
|
|
316
|
-
f"ConsciouscAgent: Failed to copy memory {memory_row[0]} to short-term: {e}"
|
|
436
|
+
f"ConsciouscAgent: Failed to copy SQL memory {memory_row[0]} to short-term: {e}"
|
|
437
|
+
)
|
|
438
|
+
return False
|
|
439
|
+
|
|
440
|
+
async def _copy_memory_to_short_term_mongodb(
|
|
441
|
+
self, db_manager, namespace: str, memory_data: dict
|
|
442
|
+
) -> bool:
|
|
443
|
+
"""Copy a conscious memory to short-term memory (MongoDB version)"""
|
|
444
|
+
try:
|
|
445
|
+
memory_id = memory_data.get("memory_id")
|
|
446
|
+
processed_data = memory_data.get("processed_data", "{}")
|
|
447
|
+
summary = memory_data.get("summary", "")
|
|
448
|
+
searchable_content = memory_data.get("searchable_content", "")
|
|
449
|
+
importance_score = memory_data.get("importance_score", 0.5)
|
|
450
|
+
|
|
451
|
+
logger.debug(
|
|
452
|
+
f"ConsciouscAgent: Processing MongoDB memory {memory_id} for short-term promotion"
|
|
453
|
+
)
|
|
454
|
+
logger.debug(f" Content: {searchable_content[:100]}...")
|
|
455
|
+
logger.debug(f" Summary: {summary[:100]}...")
|
|
456
|
+
|
|
457
|
+
# Check if similar content already exists in short-term memory
|
|
458
|
+
existing_memories = db_manager.search_short_term_memory(
|
|
459
|
+
query=searchable_content or summary, namespace=namespace, limit=1
|
|
460
|
+
)
|
|
461
|
+
|
|
462
|
+
# Check for exact duplicates
|
|
463
|
+
for existing in existing_memories:
|
|
464
|
+
if (
|
|
465
|
+
existing.get("searchable_content") == searchable_content
|
|
466
|
+
or existing.get("summary") == summary
|
|
467
|
+
):
|
|
468
|
+
logger.debug(
|
|
469
|
+
f"ConsciouscAgent: Skipping duplicate memory {memory_id} - similar content already exists in short-term memory"
|
|
470
|
+
)
|
|
471
|
+
return False
|
|
472
|
+
|
|
473
|
+
# Create short-term memory ID
|
|
474
|
+
short_term_id = f"conscious_{memory_id}_{int(datetime.now().timestamp())}"
|
|
475
|
+
|
|
476
|
+
# Store in short-term memory using MongoDB-specific method
|
|
477
|
+
db_manager.store_short_term_memory(
|
|
478
|
+
memory_id=short_term_id,
|
|
479
|
+
processed_data=(
|
|
480
|
+
processed_data
|
|
481
|
+
if isinstance(processed_data, str)
|
|
482
|
+
else json.dumps(processed_data)
|
|
483
|
+
),
|
|
484
|
+
importance_score=importance_score,
|
|
485
|
+
category_primary="conscious_context",
|
|
486
|
+
retention_type="permanent",
|
|
487
|
+
namespace=namespace,
|
|
488
|
+
expires_at=None, # No expiration (permanent)
|
|
489
|
+
searchable_content=searchable_content,
|
|
490
|
+
summary=summary,
|
|
491
|
+
is_permanent_context=True,
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
# Verify the memory was actually stored by directly finding it by memory_id
|
|
495
|
+
# Use direct lookup instead of text search since memory_id is not in text search index
|
|
496
|
+
verification_result = db_manager.find_short_term_memory_by_id(
|
|
497
|
+
memory_id=short_term_id, namespace=namespace
|
|
498
|
+
)
|
|
499
|
+
|
|
500
|
+
if not verification_result:
|
|
501
|
+
logger.error(
|
|
502
|
+
f"ConsciouscAgent: VERIFICATION FAILED - Memory {short_term_id} not found in short-term memory after storage"
|
|
503
|
+
)
|
|
504
|
+
return False
|
|
505
|
+
|
|
506
|
+
logger.info(
|
|
507
|
+
f"ConsciouscAgent: Successfully copied memory {memory_id} to short-term as {short_term_id} (MongoDB) ✓ VERIFIED"
|
|
508
|
+
)
|
|
509
|
+
return True
|
|
510
|
+
|
|
511
|
+
except Exception as e:
|
|
512
|
+
logger.error(
|
|
513
|
+
f"ConsciouscAgent: Failed to copy MongoDB memory {memory_data.get('memory_id')} to short-term: {e}"
|
|
514
|
+
)
|
|
515
|
+
import traceback
|
|
516
|
+
|
|
517
|
+
logger.error(
|
|
518
|
+
f"ConsciouscAgent: Full error traceback: {traceback.format_exc()}"
|
|
317
519
|
)
|
|
318
520
|
return False
|
|
319
521
|
|
|
320
522
|
async def _mark_memories_processed(
|
|
321
|
-
self, db_manager, memory_ids:
|
|
523
|
+
self, db_manager, memory_ids: list[str], namespace: str
|
|
322
524
|
):
|
|
323
|
-
"""Mark memories as processed for conscious context"""
|
|
525
|
+
"""Mark memories as processed for conscious context (database-agnostic)"""
|
|
324
526
|
try:
|
|
325
|
-
|
|
527
|
+
if not memory_ids:
|
|
528
|
+
return
|
|
326
529
|
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
530
|
+
db_type = self._detect_database_type(db_manager)
|
|
531
|
+
|
|
532
|
+
if db_type == "mongodb":
|
|
533
|
+
# Use MongoDB-specific method
|
|
534
|
+
db_manager.mark_conscious_memories_processed(memory_ids, namespace)
|
|
535
|
+
else:
|
|
536
|
+
# Use SQL method
|
|
537
|
+
from sqlalchemy import text
|
|
538
|
+
|
|
539
|
+
with db_manager._get_connection() as connection:
|
|
540
|
+
for memory_id in memory_ids:
|
|
541
|
+
connection.execute(
|
|
542
|
+
text(
|
|
543
|
+
"""UPDATE long_term_memory
|
|
544
|
+
SET conscious_processed = :conscious_processed
|
|
545
|
+
WHERE memory_id = :memory_id AND namespace = :namespace"""
|
|
546
|
+
),
|
|
547
|
+
{
|
|
548
|
+
"memory_id": memory_id,
|
|
549
|
+
"namespace": namespace,
|
|
550
|
+
"conscious_processed": True,
|
|
551
|
+
},
|
|
552
|
+
)
|
|
553
|
+
connection.commit()
|
|
342
554
|
|
|
343
555
|
except Exception as e:
|
|
344
556
|
logger.error(f"ConsciouscAgent: Failed to mark memories processed: {e}")
|
memori/agents/memory_agent.py
CHANGED
|
@@ -7,7 +7,7 @@ enhanced classification and conscious context detection.
|
|
|
7
7
|
|
|
8
8
|
import json
|
|
9
9
|
from datetime import datetime
|
|
10
|
-
from typing import TYPE_CHECKING, Any,
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Optional
|
|
11
11
|
|
|
12
12
|
import openai
|
|
13
13
|
from loguru import logger
|
|
@@ -30,8 +30,8 @@ class MemoryAgent:
|
|
|
30
30
|
|
|
31
31
|
def __init__(
|
|
32
32
|
self,
|
|
33
|
-
api_key:
|
|
34
|
-
model:
|
|
33
|
+
api_key: str | None = None,
|
|
34
|
+
model: str | None = None,
|
|
35
35
|
provider_config: Optional["ProviderConfig"] = None,
|
|
36
36
|
):
|
|
37
37
|
"""
|
|
@@ -60,6 +60,16 @@ class MemoryAgent:
|
|
|
60
60
|
# Determine if we're using a local/custom endpoint that might not support structured outputs
|
|
61
61
|
self._supports_structured_outputs = self._detect_structured_output_support()
|
|
62
62
|
|
|
63
|
+
# Database type detection for unified processing
|
|
64
|
+
self._database_type = None
|
|
65
|
+
|
|
66
|
+
def _detect_database_type(self, db_manager):
|
|
67
|
+
"""Detect database type from db_manager"""
|
|
68
|
+
if self._database_type is None:
|
|
69
|
+
self._database_type = getattr(db_manager, "database_type", "sql")
|
|
70
|
+
logger.debug(f"MemoryAgent: Detected database type: {self._database_type}")
|
|
71
|
+
return self._database_type
|
|
72
|
+
|
|
63
73
|
SYSTEM_PROMPT = """You are an advanced Memory Processing Agent responsible for analyzing conversations and extracting structured information with intelligent classification and conscious context detection.
|
|
64
74
|
|
|
65
75
|
Your primary functions:
|
|
@@ -136,8 +146,8 @@ Focus on extracting information that would genuinely help provide better context
|
|
|
136
146
|
chat_id: str,
|
|
137
147
|
user_input: str,
|
|
138
148
|
ai_output: str,
|
|
139
|
-
context:
|
|
140
|
-
existing_memories:
|
|
149
|
+
context: ConversationContext | None = None,
|
|
150
|
+
existing_memories: list[str] | None = None,
|
|
141
151
|
) -> ProcessedLongTermMemory:
|
|
142
152
|
"""
|
|
143
153
|
Async conversation processing with classification and conscious context detection
|
|
@@ -262,9 +272,9 @@ CONVERSATION CONTEXT:
|
|
|
262
272
|
async def detect_duplicates(
|
|
263
273
|
self,
|
|
264
274
|
new_memory: ProcessedLongTermMemory,
|
|
265
|
-
existing_memories:
|
|
275
|
+
existing_memories: list[ProcessedLongTermMemory],
|
|
266
276
|
similarity_threshold: float = 0.8,
|
|
267
|
-
) ->
|
|
277
|
+
) -> str | None:
|
|
268
278
|
"""
|
|
269
279
|
Detect if new memory is a duplicate of existing memories
|
|
270
280
|
|
|
@@ -470,7 +480,7 @@ CONVERSATION CONTEXT:
|
|
|
470
480
|
}"""
|
|
471
481
|
|
|
472
482
|
def _create_memory_from_dict(
|
|
473
|
-
self, data:
|
|
483
|
+
self, data: dict[str, Any], chat_id: str
|
|
474
484
|
) -> ProcessedLongTermMemory:
|
|
475
485
|
"""
|
|
476
486
|
Create ProcessedLongTermMemory from dictionary with proper validation and defaults
|
|
@@ -533,7 +543,7 @@ CONVERSATION CONTEXT:
|
|
|
533
543
|
)
|
|
534
544
|
|
|
535
545
|
def should_filter_memory(
|
|
536
|
-
self, memory: ProcessedLongTermMemory, filters:
|
|
546
|
+
self, memory: ProcessedLongTermMemory, filters: dict[str, Any] | None = None
|
|
537
547
|
) -> bool:
|
|
538
548
|
"""
|
|
539
549
|
Determine if memory should be filtered out
|