memorisdk 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +3 -3
- memori/agents/conscious_agent.py +289 -77
- memori/agents/memory_agent.py +19 -9
- memori/agents/retrieval_agent.py +138 -63
- memori/config/manager.py +7 -7
- memori/config/memory_manager.py +25 -25
- memori/config/settings.py +13 -6
- memori/core/conversation.py +15 -15
- memori/core/database.py +14 -13
- memori/core/memory.py +438 -123
- memori/core/providers.py +25 -25
- memori/database/__init__.py +11 -0
- memori/database/adapters/__init__.py +11 -0
- memori/database/adapters/mongodb_adapter.py +739 -0
- memori/database/adapters/mysql_adapter.py +8 -8
- memori/database/adapters/postgresql_adapter.py +6 -6
- memori/database/adapters/sqlite_adapter.py +6 -6
- memori/database/auto_creator.py +8 -9
- memori/database/connection_utils.py +5 -5
- memori/database/connectors/__init__.py +11 -0
- memori/database/connectors/base_connector.py +18 -19
- memori/database/connectors/mongodb_connector.py +527 -0
- memori/database/connectors/mysql_connector.py +13 -15
- memori/database/connectors/postgres_connector.py +12 -12
- memori/database/connectors/sqlite_connector.py +11 -11
- memori/database/models.py +2 -2
- memori/database/mongodb_manager.py +1402 -0
- memori/database/queries/base_queries.py +3 -4
- memori/database/queries/chat_queries.py +3 -5
- memori/database/queries/entity_queries.py +3 -5
- memori/database/queries/memory_queries.py +3 -5
- memori/database/query_translator.py +11 -11
- memori/database/schema_generators/__init__.py +11 -0
- memori/database/schema_generators/mongodb_schema_generator.py +666 -0
- memori/database/schema_generators/mysql_schema_generator.py +2 -4
- memori/database/search/__init__.py +11 -0
- memori/database/search/mongodb_search_adapter.py +653 -0
- memori/database/search/mysql_search_adapter.py +8 -8
- memori/database/search/sqlite_search_adapter.py +6 -6
- memori/database/search_service.py +218 -66
- memori/database/sqlalchemy_manager.py +72 -25
- memori/integrations/__init__.py +1 -1
- memori/integrations/anthropic_integration.py +1 -3
- memori/integrations/litellm_integration.py +23 -6
- memori/integrations/openai_integration.py +31 -3
- memori/tools/memory_tool.py +104 -13
- memori/utils/exceptions.py +58 -58
- memori/utils/helpers.py +11 -12
- memori/utils/input_validator.py +10 -12
- memori/utils/logging.py +4 -4
- memori/utils/pydantic_models.py +57 -57
- memori/utils/query_builder.py +20 -20
- memori/utils/security_audit.py +28 -28
- memori/utils/security_integration.py +9 -9
- memori/utils/transaction_manager.py +20 -19
- memori/utils/validators.py +6 -6
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/METADATA +36 -20
- memorisdk-2.1.0.dist-info/RECORD +71 -0
- memori/scripts/llm_text.py +0 -50
- memorisdk-2.0.0.dist-info/RECORD +0 -67
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/WHEEL +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/top_level.txt +0 -0
|
@@ -9,7 +9,7 @@ import ssl
|
|
|
9
9
|
import uuid
|
|
10
10
|
from datetime import datetime
|
|
11
11
|
from pathlib import Path
|
|
12
|
-
from typing import Any
|
|
12
|
+
from typing import Any
|
|
13
13
|
from urllib.parse import parse_qs, urlparse
|
|
14
14
|
|
|
15
15
|
from loguru import logger
|
|
@@ -425,10 +425,31 @@ class SQLAlchemyDatabaseManager:
|
|
|
425
425
|
logger.warning(f"PostgreSQL FTS setup failed: {e}")
|
|
426
426
|
|
|
427
427
|
def _get_search_service(self) -> SearchService:
|
|
428
|
-
"""Get search service instance with fresh session"""
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
428
|
+
"""Get search service instance with fresh session and proper error handling"""
|
|
429
|
+
try:
|
|
430
|
+
if not self.SessionLocal:
|
|
431
|
+
logger.error("SessionLocal not available for search service")
|
|
432
|
+
return None
|
|
433
|
+
|
|
434
|
+
# Always create a new session to avoid stale connections
|
|
435
|
+
session = self.SessionLocal()
|
|
436
|
+
if not session:
|
|
437
|
+
logger.error("Failed to create database session")
|
|
438
|
+
return None
|
|
439
|
+
|
|
440
|
+
search_service = SearchService(session, self.database_type)
|
|
441
|
+
logger.debug(
|
|
442
|
+
f"Created new search service instance for database type: {self.database_type}"
|
|
443
|
+
)
|
|
444
|
+
return search_service
|
|
445
|
+
|
|
446
|
+
except Exception as e:
|
|
447
|
+
logger.error(f"Failed to create search service: {e}")
|
|
448
|
+
logger.debug(
|
|
449
|
+
f"Search service creation error: {type(e).__name__}: {str(e)}",
|
|
450
|
+
exc_info=True,
|
|
451
|
+
)
|
|
452
|
+
return None
|
|
432
453
|
|
|
433
454
|
def store_chat_history(
|
|
434
455
|
self,
|
|
@@ -440,7 +461,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
440
461
|
session_id: str,
|
|
441
462
|
namespace: str = "default",
|
|
442
463
|
tokens_used: int = 0,
|
|
443
|
-
metadata:
|
|
464
|
+
metadata: dict[str, Any] | None = None,
|
|
444
465
|
):
|
|
445
466
|
"""Store chat history"""
|
|
446
467
|
with self.SessionLocal() as session:
|
|
@@ -467,9 +488,9 @@ class SQLAlchemyDatabaseManager:
|
|
|
467
488
|
def get_chat_history(
|
|
468
489
|
self,
|
|
469
490
|
namespace: str = "default",
|
|
470
|
-
session_id:
|
|
491
|
+
session_id: str | None = None,
|
|
471
492
|
limit: int = 10,
|
|
472
|
-
) ->
|
|
493
|
+
) -> list[dict[str, Any]]:
|
|
473
494
|
"""Get chat history with optional session filtering"""
|
|
474
495
|
with self.SessionLocal() as session:
|
|
475
496
|
try:
|
|
@@ -560,28 +581,56 @@ class SQLAlchemyDatabaseManager:
|
|
|
560
581
|
self,
|
|
561
582
|
query: str,
|
|
562
583
|
namespace: str = "default",
|
|
563
|
-
category_filter:
|
|
584
|
+
category_filter: list[str] | None = None,
|
|
564
585
|
limit: int = 10,
|
|
565
|
-
) ->
|
|
586
|
+
) -> list[dict[str, Any]]:
|
|
566
587
|
"""Search memories using the cross-database search service"""
|
|
588
|
+
search_service = None
|
|
567
589
|
try:
|
|
590
|
+
logger.debug(
|
|
591
|
+
f"Starting memory search for query '{query}' in namespace '{namespace}' with category_filter={category_filter}"
|
|
592
|
+
)
|
|
568
593
|
search_service = self._get_search_service()
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
594
|
+
|
|
595
|
+
if not search_service:
|
|
596
|
+
logger.error("Failed to create search service instance")
|
|
597
|
+
return []
|
|
598
|
+
|
|
599
|
+
results = search_service.search_memories(
|
|
600
|
+
query, namespace, category_filter, limit
|
|
601
|
+
)
|
|
602
|
+
logger.debug(f"Search for '{query}' returned {len(results)} results")
|
|
603
|
+
|
|
604
|
+
# Validate results structure
|
|
605
|
+
if not isinstance(results, list):
|
|
606
|
+
logger.warning(
|
|
607
|
+
f"Search service returned unexpected type: {type(results)}, converting to list"
|
|
572
608
|
)
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
# Ensure session is properly closed
|
|
577
|
-
search_service.session.close()
|
|
609
|
+
results = list(results) if results else []
|
|
610
|
+
|
|
611
|
+
return results
|
|
578
612
|
|
|
579
613
|
except Exception as e:
|
|
580
|
-
logger.error(
|
|
614
|
+
logger.error(
|
|
615
|
+
f"Memory search failed for query '{query}' in namespace '{namespace}': {e}"
|
|
616
|
+
)
|
|
617
|
+
logger.debug(
|
|
618
|
+
f"Search error details: {type(e).__name__}: {str(e)}", exc_info=True
|
|
619
|
+
)
|
|
581
620
|
# Return empty list instead of raising exception to avoid breaking auto_ingest
|
|
582
621
|
return []
|
|
583
622
|
|
|
584
|
-
|
|
623
|
+
finally:
|
|
624
|
+
# Ensure session is properly closed, even if an exception occurred
|
|
625
|
+
if search_service and hasattr(search_service, "session"):
|
|
626
|
+
try:
|
|
627
|
+
if search_service.session:
|
|
628
|
+
logger.debug("Closing search service session")
|
|
629
|
+
search_service.session.close()
|
|
630
|
+
except Exception as session_e:
|
|
631
|
+
logger.warning(f"Error closing search service session: {session_e}")
|
|
632
|
+
|
|
633
|
+
def get_memory_stats(self, namespace: str = "default") -> dict[str, Any]:
|
|
585
634
|
"""Get comprehensive memory statistics"""
|
|
586
635
|
with self.SessionLocal() as session:
|
|
587
636
|
try:
|
|
@@ -678,9 +727,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
678
727
|
except SQLAlchemyError as e:
|
|
679
728
|
raise DatabaseError(f"Failed to get memory stats: {e}")
|
|
680
729
|
|
|
681
|
-
def clear_memory(
|
|
682
|
-
self, namespace: str = "default", memory_type: Optional[str] = None
|
|
683
|
-
):
|
|
730
|
+
def clear_memory(self, namespace: str = "default", memory_type: str | None = None):
|
|
684
731
|
"""Clear memory data"""
|
|
685
732
|
with self.SessionLocal() as session:
|
|
686
733
|
try:
|
|
@@ -713,7 +760,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
713
760
|
session.rollback()
|
|
714
761
|
raise DatabaseError(f"Failed to clear memory: {e}")
|
|
715
762
|
|
|
716
|
-
def execute_with_translation(self, query: str, parameters:
|
|
763
|
+
def execute_with_translation(self, query: str, parameters: dict[str, Any] = None):
|
|
717
764
|
"""
|
|
718
765
|
Execute a query with automatic parameter translation for cross-database compatibility.
|
|
719
766
|
|
|
@@ -816,7 +863,7 @@ class SQLAlchemyDatabaseManager:
|
|
|
816
863
|
if hasattr(self, "engine"):
|
|
817
864
|
self.engine.dispose()
|
|
818
865
|
|
|
819
|
-
def get_database_info(self) ->
|
|
866
|
+
def get_database_info(self) -> dict[str, Any]:
|
|
820
867
|
"""Get database information and capabilities"""
|
|
821
868
|
base_info = {
|
|
822
869
|
"database_type": self.database_type,
|
memori/integrations/__init__.py
CHANGED
|
@@ -14,8 +14,6 @@ Usage:
|
|
|
14
14
|
response = client.messages.create(...)
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
|
-
from typing import Optional
|
|
18
|
-
|
|
19
17
|
from loguru import logger
|
|
20
18
|
|
|
21
19
|
|
|
@@ -25,7 +23,7 @@ class MemoriAnthropic:
|
|
|
25
23
|
without monkey-patching. Drop-in replacement for Anthropic client.
|
|
26
24
|
"""
|
|
27
25
|
|
|
28
|
-
def __init__(self, memori_instance, api_key:
|
|
26
|
+
def __init__(self, memori_instance, api_key: str | None = None, **kwargs):
|
|
29
27
|
"""
|
|
30
28
|
Initialize MemoriAnthropic wrapper
|
|
31
29
|
|
|
@@ -16,8 +16,6 @@ Usage:
|
|
|
16
16
|
response = completion(model="gpt-4o", messages=[...])
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
|
-
from typing import Optional
|
|
20
|
-
|
|
21
19
|
from loguru import logger
|
|
22
20
|
|
|
23
21
|
try:
|
|
@@ -186,6 +184,18 @@ class LiteLLMCallbackManager:
|
|
|
186
184
|
if hasattr(choice, "message") and hasattr(choice.message, "content"):
|
|
187
185
|
ai_output = choice.message.content or ""
|
|
188
186
|
|
|
187
|
+
# Debug logging to help diagnose recording issues
|
|
188
|
+
if user_input:
|
|
189
|
+
logger.debug(
|
|
190
|
+
f"LiteLLM callback: Recording conversation '{user_input[:50]}...'"
|
|
191
|
+
if len(user_input) > 50
|
|
192
|
+
else f"LiteLLM callback: Recording conversation '{user_input}'"
|
|
193
|
+
)
|
|
194
|
+
else:
|
|
195
|
+
logger.warning(
|
|
196
|
+
f"LiteLLM callback: No user input found in messages: {[msg.get('role') for msg in messages]}"
|
|
197
|
+
)
|
|
198
|
+
|
|
189
199
|
# Extract model
|
|
190
200
|
model = kwargs.get("model", "litellm-unknown")
|
|
191
201
|
|
|
@@ -193,8 +203,8 @@ class LiteLLMCallbackManager:
|
|
|
193
203
|
duration_ms = 0
|
|
194
204
|
if start_time is not None and end_time is not None:
|
|
195
205
|
try:
|
|
196
|
-
if isinstance(start_time,
|
|
197
|
-
end_time,
|
|
206
|
+
if isinstance(start_time, int | float) and isinstance(
|
|
207
|
+
end_time, int | float
|
|
198
208
|
):
|
|
199
209
|
duration_ms = (end_time - start_time) * 1000
|
|
200
210
|
except Exception:
|
|
@@ -233,11 +243,18 @@ class LiteLLMCallbackManager:
|
|
|
233
243
|
metadata=metadata,
|
|
234
244
|
)
|
|
235
245
|
logger.debug(
|
|
236
|
-
f"LiteLLM callback:
|
|
246
|
+
f"LiteLLM callback: Successfully recorded conversation for model {model}"
|
|
247
|
+
)
|
|
248
|
+
else:
|
|
249
|
+
logger.warning(
|
|
250
|
+
f"LiteLLM callback: Skipping record - user_input='{bool(user_input)}' ai_output='{bool(ai_output)}'"
|
|
237
251
|
)
|
|
238
252
|
|
|
239
253
|
except Exception as e:
|
|
240
254
|
logger.error(f"LiteLLM callback failed: {e}")
|
|
255
|
+
import traceback
|
|
256
|
+
|
|
257
|
+
logger.error(f"LiteLLM callback error details: {traceback.format_exc()}")
|
|
241
258
|
|
|
242
259
|
def _setup_context_injection(self):
|
|
243
260
|
"""Set up context injection by wrapping LiteLLM's completion function."""
|
|
@@ -325,7 +342,7 @@ class LiteLLMCallbackManager:
|
|
|
325
342
|
return self._callback_registered
|
|
326
343
|
|
|
327
344
|
|
|
328
|
-
def setup_litellm_callbacks(memori_instance) ->
|
|
345
|
+
def setup_litellm_callbacks(memori_instance) -> LiteLLMCallbackManager | None:
|
|
329
346
|
"""
|
|
330
347
|
Convenience function to set up LiteLLM callbacks for a Memori instance.
|
|
331
348
|
|
|
@@ -250,20 +250,28 @@ class OpenAIInterceptor:
|
|
|
250
250
|
for message in messages:
|
|
251
251
|
content = message.get("content", "")
|
|
252
252
|
if isinstance(content, str):
|
|
253
|
-
# Check for internal agent processing patterns
|
|
253
|
+
# Check for specific internal agent processing patterns
|
|
254
|
+
# Made patterns more specific to avoid false positives
|
|
254
255
|
internal_patterns = [
|
|
255
256
|
"Process this conversation for enhanced memory storage:",
|
|
256
|
-
"User query:",
|
|
257
257
|
"Enhanced memory processing:",
|
|
258
258
|
"Memory classification:",
|
|
259
259
|
"Search for relevant memories:",
|
|
260
260
|
"Analyze conversation for:",
|
|
261
261
|
"Extract entities from:",
|
|
262
262
|
"Categorize the following conversation:",
|
|
263
|
+
# More specific patterns to avoid blocking legitimate conversations
|
|
264
|
+
"INTERNAL_MEMORY_PROCESSING:",
|
|
265
|
+
"AGENT_PROCESSING_MODE:",
|
|
266
|
+
"MEMORY_AGENT_TASK:",
|
|
263
267
|
]
|
|
264
268
|
|
|
269
|
+
# Only flag as internal if it matches specific patterns AND has no user role
|
|
265
270
|
for pattern in internal_patterns:
|
|
266
271
|
if pattern in content:
|
|
272
|
+
# Double-check: if this is a user message, don't filter it
|
|
273
|
+
if message.get("role") == "user":
|
|
274
|
+
continue
|
|
267
275
|
return True
|
|
268
276
|
|
|
269
277
|
return False
|
|
@@ -281,9 +289,29 @@ class OpenAIInterceptor:
|
|
|
281
289
|
json_data = getattr(options, "json_data", None) or {}
|
|
282
290
|
|
|
283
291
|
if "messages" in json_data:
|
|
292
|
+
# Check if this is an internal agent processing call
|
|
293
|
+
is_internal = cls._is_internal_agent_call(json_data)
|
|
294
|
+
|
|
295
|
+
# Debug logging to help diagnose recording issues
|
|
296
|
+
user_messages = [
|
|
297
|
+
msg
|
|
298
|
+
for msg in json_data.get("messages", [])
|
|
299
|
+
if msg.get("role") == "user"
|
|
300
|
+
]
|
|
301
|
+
if user_messages and not is_internal:
|
|
302
|
+
user_content = user_messages[-1].get("content", "")[:50]
|
|
303
|
+
logger.debug(
|
|
304
|
+
f"Recording conversation: '{user_content}...' (internal_check={is_internal})"
|
|
305
|
+
)
|
|
306
|
+
elif is_internal:
|
|
307
|
+
logger.debug(
|
|
308
|
+
"Skipping internal agent call (detected pattern match)"
|
|
309
|
+
)
|
|
310
|
+
|
|
284
311
|
# Skip internal agent processing calls
|
|
285
|
-
if
|
|
312
|
+
if is_internal:
|
|
286
313
|
continue
|
|
314
|
+
|
|
287
315
|
# Chat completions
|
|
288
316
|
memori_instance._record_openai_conversation(json_data, response)
|
|
289
317
|
elif "prompt" in json_data:
|
memori/tools/memory_tool.py
CHANGED
|
@@ -3,7 +3,8 @@ Memory Tool - A tool/function for manual integration with any LLM library
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import json
|
|
6
|
-
from
|
|
6
|
+
from collections.abc import Callable
|
|
7
|
+
from typing import Any
|
|
7
8
|
|
|
8
9
|
from loguru import logger
|
|
9
10
|
|
|
@@ -31,7 +32,7 @@ class MemoryTool:
|
|
|
31
32
|
self.tool_name = "memori_memory"
|
|
32
33
|
self.description = "Access and manage AI conversation memory"
|
|
33
34
|
|
|
34
|
-
def get_tool_schema(self) ->
|
|
35
|
+
def get_tool_schema(self) -> dict[str, Any]:
|
|
35
36
|
"""
|
|
36
37
|
Get the tool schema for function calling in LLMs
|
|
37
38
|
|
|
@@ -73,11 +74,24 @@ class MemoryTool:
|
|
|
73
74
|
|
|
74
75
|
# Use retrieval agent for intelligent search
|
|
75
76
|
try:
|
|
77
|
+
logger.debug(
|
|
78
|
+
f"Attempting to import MemorySearchEngine for query: '{query}'"
|
|
79
|
+
)
|
|
76
80
|
from ..agents.retrieval_agent import MemorySearchEngine
|
|
77
81
|
|
|
82
|
+
logger.debug("Successfully imported MemorySearchEngine")
|
|
83
|
+
|
|
78
84
|
# Create search engine if not already initialized
|
|
79
85
|
if not hasattr(self, "_search_engine"):
|
|
80
|
-
|
|
86
|
+
if (
|
|
87
|
+
hasattr(self.memori, "provider_config")
|
|
88
|
+
and self.memori.provider_config
|
|
89
|
+
):
|
|
90
|
+
self._search_engine = MemorySearchEngine(
|
|
91
|
+
provider_config=self.memori.provider_config
|
|
92
|
+
)
|
|
93
|
+
else:
|
|
94
|
+
self._search_engine = MemorySearchEngine()
|
|
81
95
|
|
|
82
96
|
# Execute search using retrieval agent
|
|
83
97
|
results = self._search_engine.execute_search(
|
|
@@ -88,18 +102,62 @@ class MemoryTool:
|
|
|
88
102
|
)
|
|
89
103
|
|
|
90
104
|
if not results:
|
|
105
|
+
logger.debug(
|
|
106
|
+
f"Primary search returned no results for query: '{query}', trying fallback search"
|
|
107
|
+
)
|
|
108
|
+
# Try fallback direct database search
|
|
109
|
+
try:
|
|
110
|
+
fallback_results = self.memori.db_manager.search_memories(
|
|
111
|
+
query=query, namespace=self.memori.namespace, limit=5
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
if fallback_results:
|
|
115
|
+
logger.debug(
|
|
116
|
+
f"Fallback search found {len(fallback_results)} results"
|
|
117
|
+
)
|
|
118
|
+
results = fallback_results
|
|
119
|
+
else:
|
|
120
|
+
logger.warning(
|
|
121
|
+
f"Both primary and fallback search returned no results for query: '{query}'"
|
|
122
|
+
)
|
|
123
|
+
return f"No relevant memories found for query: '{query}'"
|
|
124
|
+
|
|
125
|
+
except Exception as fallback_e:
|
|
126
|
+
logger.error(
|
|
127
|
+
f"Fallback search also failed for query '{query}': {fallback_e}"
|
|
128
|
+
)
|
|
129
|
+
return f"No relevant memories found for query: '{query}'"
|
|
130
|
+
|
|
131
|
+
# Ensure we have results to format
|
|
132
|
+
if not results:
|
|
133
|
+
logger.warning(
|
|
134
|
+
f"No results available for formatting for query: '{query}'"
|
|
135
|
+
)
|
|
91
136
|
return f"No relevant memories found for query: '{query}'"
|
|
92
137
|
|
|
93
138
|
# Format results as a readable string
|
|
139
|
+
logger.debug(
|
|
140
|
+
f"Starting to format {len(results)} results for query: '{query}'"
|
|
141
|
+
)
|
|
94
142
|
formatted_output = f"🔍 Memory Search Results for: '{query}'\n\n"
|
|
95
143
|
|
|
96
144
|
for i, result in enumerate(results, 1):
|
|
97
145
|
try:
|
|
146
|
+
logger.debug(
|
|
147
|
+
f"Formatting result {i}: type={type(result)}, keys={list(result.keys()) if isinstance(result, dict) else 'not-dict'}"
|
|
148
|
+
)
|
|
149
|
+
|
|
98
150
|
# Try to parse processed data for better formatting
|
|
99
151
|
if "processed_data" in result:
|
|
100
152
|
import json
|
|
101
153
|
|
|
102
|
-
|
|
154
|
+
if isinstance(result["processed_data"], dict):
|
|
155
|
+
processed_data = result["processed_data"]
|
|
156
|
+
elif isinstance(result["processed_data"], str):
|
|
157
|
+
processed_data = json.loads(result["processed_data"])
|
|
158
|
+
else:
|
|
159
|
+
raise ValueError("Error, wrong 'processed_data' format")
|
|
160
|
+
|
|
103
161
|
summary = processed_data.get("summary", "")
|
|
104
162
|
category = processed_data.get("category", {}).get(
|
|
105
163
|
"primary_category", ""
|
|
@@ -124,37 +182,66 @@ class MemoryTool:
|
|
|
124
182
|
|
|
125
183
|
formatted_output += "\n"
|
|
126
184
|
|
|
127
|
-
except Exception:
|
|
185
|
+
except Exception as format_e:
|
|
186
|
+
logger.warning(f"Error formatting result {i}: {format_e}")
|
|
128
187
|
# Fallback formatting
|
|
129
188
|
content = result.get(
|
|
130
189
|
"searchable_content", "Memory content available"
|
|
131
190
|
)[:100]
|
|
132
191
|
formatted_output += f"{i}. {content}...\n\n"
|
|
133
192
|
|
|
193
|
+
logger.debug(
|
|
194
|
+
f"Successfully formatted results, output length: {len(formatted_output)}"
|
|
195
|
+
)
|
|
134
196
|
return formatted_output.strip()
|
|
135
197
|
|
|
136
|
-
except ImportError:
|
|
198
|
+
except ImportError as import_e:
|
|
199
|
+
logger.warning(
|
|
200
|
+
f"Failed to import MemorySearchEngine for query '{query}': {import_e}"
|
|
201
|
+
)
|
|
137
202
|
# Fallback to original search methods if retrieval agent is not available
|
|
203
|
+
logger.debug(
|
|
204
|
+
f"Using ImportError fallback search methods for query: '{query}'"
|
|
205
|
+
)
|
|
206
|
+
|
|
138
207
|
# Try different search strategies based on query content
|
|
139
208
|
if any(word in query.lower() for word in ["name", "who am i", "about me"]):
|
|
209
|
+
logger.debug(
|
|
210
|
+
f"Trying essential conversations for personal query: '{query}'"
|
|
211
|
+
)
|
|
140
212
|
# Personal information query - try essential conversations first
|
|
141
213
|
essential_result = self._get_essential_conversations()
|
|
142
214
|
if essential_result.get("count", 0) > 0:
|
|
215
|
+
logger.debug(
|
|
216
|
+
f"Essential conversations found {essential_result.get('count', 0)} results"
|
|
217
|
+
)
|
|
143
218
|
return self._format_dict_to_string(essential_result)
|
|
144
219
|
|
|
145
220
|
# General search
|
|
221
|
+
logger.debug(f"Trying general search for query: '{query}'")
|
|
146
222
|
search_result = self._search_memories(query=query, limit=10)
|
|
223
|
+
logger.debug(
|
|
224
|
+
f"General search returned results_count: {search_result.get('results_count', 0)}"
|
|
225
|
+
)
|
|
147
226
|
if search_result.get("results_count", 0) > 0:
|
|
148
227
|
return self._format_dict_to_string(search_result)
|
|
149
228
|
|
|
150
229
|
# Fallback to context retrieval
|
|
230
|
+
logger.debug(f"Trying context retrieval fallback for query: '{query}'")
|
|
151
231
|
context_result = self._retrieve_context(query=query, limit=5)
|
|
232
|
+
logger.debug(
|
|
233
|
+
f"Context retrieval returned context_count: {context_result.get('context_count', 0)}"
|
|
234
|
+
)
|
|
152
235
|
return self._format_dict_to_string(context_result)
|
|
153
236
|
|
|
154
237
|
except Exception as e:
|
|
238
|
+
logger.error(
|
|
239
|
+
f"Unexpected error in memory tool execute for query '{query}': {e}",
|
|
240
|
+
exc_info=True,
|
|
241
|
+
)
|
|
155
242
|
return f"Error searching memories: {str(e)}"
|
|
156
243
|
|
|
157
|
-
def _format_dict_to_string(self, result_dict:
|
|
244
|
+
def _format_dict_to_string(self, result_dict: dict[str, Any]) -> str:
|
|
158
245
|
"""Helper method to format dictionary results to readable strings"""
|
|
159
246
|
if result_dict.get("error"):
|
|
160
247
|
return f"Error: {result_dict['error']}"
|
|
@@ -201,7 +288,7 @@ class MemoryTool:
|
|
|
201
288
|
message = result_dict.get("message", "Memory search completed")
|
|
202
289
|
return message
|
|
203
290
|
|
|
204
|
-
def _record_conversation(self, **kwargs) ->
|
|
291
|
+
def _record_conversation(self, **kwargs) -> dict[str, Any]:
|
|
205
292
|
"""Record a conversation"""
|
|
206
293
|
try:
|
|
207
294
|
user_input = kwargs.get("user_input", "")
|
|
@@ -230,7 +317,7 @@ class MemoryTool:
|
|
|
230
317
|
logger.error(f"Failed to record conversation: {e}")
|
|
231
318
|
return {"error": f"Failed to record conversation: {str(e)}"}
|
|
232
319
|
|
|
233
|
-
def _retrieve_context(self, **kwargs) ->
|
|
320
|
+
def _retrieve_context(self, **kwargs) -> dict[str, Any]:
|
|
234
321
|
"""Retrieve relevant context for a query"""
|
|
235
322
|
try:
|
|
236
323
|
query = kwargs.get("query", "")
|
|
@@ -266,7 +353,7 @@ class MemoryTool:
|
|
|
266
353
|
logger.error(f"Failed to retrieve context: {e}")
|
|
267
354
|
return {"error": f"Failed to retrieve context: {str(e)}"}
|
|
268
355
|
|
|
269
|
-
def _search_memories(self, **kwargs) ->
|
|
356
|
+
def _search_memories(self, **kwargs) -> dict[str, Any]:
|
|
270
357
|
"""Search memories by content"""
|
|
271
358
|
try:
|
|
272
359
|
query = kwargs.get("query", "")
|
|
@@ -291,8 +378,10 @@ class MemoryTool:
|
|
|
291
378
|
logger.error(f"Failed to search memories: {e}")
|
|
292
379
|
return {"error": f"Failed to search memories: {str(e)}"}
|
|
293
380
|
|
|
294
|
-
def _get_stats(self, **kwargs) ->
|
|
381
|
+
def _get_stats(self, **kwargs) -> dict[str, Any]:
|
|
295
382
|
"""Get memory and integration statistics"""
|
|
383
|
+
# kwargs can be used for future filtering options
|
|
384
|
+
_ = kwargs # Mark as intentionally unused
|
|
296
385
|
try:
|
|
297
386
|
memory_stats = self.memori.get_memory_stats()
|
|
298
387
|
integration_stats = self.memori.get_integration_stats()
|
|
@@ -310,7 +399,7 @@ class MemoryTool:
|
|
|
310
399
|
logger.error(f"Failed to get stats: {e}")
|
|
311
400
|
return {"error": f"Failed to get stats: {str(e)}"}
|
|
312
401
|
|
|
313
|
-
def _get_essential_conversations(self, **kwargs) ->
|
|
402
|
+
def _get_essential_conversations(self, **kwargs) -> dict[str, Any]:
|
|
314
403
|
"""Get essential conversations from short-term memory"""
|
|
315
404
|
try:
|
|
316
405
|
limit = kwargs.get("limit", 10)
|
|
@@ -346,8 +435,10 @@ class MemoryTool:
|
|
|
346
435
|
logger.error(f"Failed to get essential conversations: {e}")
|
|
347
436
|
return {"error": f"Failed to get essential conversations: {str(e)}"}
|
|
348
437
|
|
|
349
|
-
def _trigger_analysis(self, **kwargs) ->
|
|
438
|
+
def _trigger_analysis(self, **kwargs) -> dict[str, Any]:
|
|
350
439
|
"""Trigger conscious agent analysis"""
|
|
440
|
+
# kwargs can be used for future analysis options
|
|
441
|
+
_ = kwargs # Mark as intentionally unused
|
|
351
442
|
try:
|
|
352
443
|
if hasattr(self.memori, "trigger_conscious_analysis"):
|
|
353
444
|
self.memori.trigger_conscious_analysis()
|