memorisdk 2.0.1__py3-none-any.whl → 2.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +3 -3
- memori/agents/conscious_agent.py +289 -77
- memori/agents/memory_agent.py +19 -9
- memori/agents/retrieval_agent.py +59 -51
- memori/config/manager.py +7 -7
- memori/config/memory_manager.py +25 -25
- memori/config/settings.py +13 -6
- memori/core/conversation.py +15 -15
- memori/core/database.py +14 -13
- memori/core/memory.py +376 -105
- memori/core/providers.py +25 -25
- memori/database/__init__.py +11 -0
- memori/database/adapters/__init__.py +11 -0
- memori/database/adapters/mongodb_adapter.py +739 -0
- memori/database/adapters/mysql_adapter.py +8 -8
- memori/database/adapters/postgresql_adapter.py +6 -6
- memori/database/adapters/sqlite_adapter.py +6 -6
- memori/database/auto_creator.py +8 -9
- memori/database/connection_utils.py +5 -5
- memori/database/connectors/__init__.py +11 -0
- memori/database/connectors/base_connector.py +18 -19
- memori/database/connectors/mongodb_connector.py +654 -0
- memori/database/connectors/mysql_connector.py +13 -15
- memori/database/connectors/postgres_connector.py +12 -12
- memori/database/connectors/sqlite_connector.py +11 -11
- memori/database/models.py +2 -2
- memori/database/mongodb_manager.py +1484 -0
- memori/database/queries/base_queries.py +3 -4
- memori/database/queries/chat_queries.py +3 -5
- memori/database/queries/entity_queries.py +3 -5
- memori/database/queries/memory_queries.py +3 -5
- memori/database/query_translator.py +11 -11
- memori/database/schema_generators/__init__.py +11 -0
- memori/database/schema_generators/mongodb_schema_generator.py +666 -0
- memori/database/schema_generators/mysql_schema_generator.py +2 -4
- memori/database/search/__init__.py +11 -0
- memori/database/search/mongodb_search_adapter.py +653 -0
- memori/database/search/mysql_search_adapter.py +8 -8
- memori/database/search/sqlite_search_adapter.py +6 -6
- memori/database/search_service.py +17 -17
- memori/database/sqlalchemy_manager.py +10 -12
- memori/integrations/__init__.py +1 -1
- memori/integrations/anthropic_integration.py +1 -3
- memori/integrations/litellm_integration.py +23 -6
- memori/integrations/openai_integration.py +31 -3
- memori/tools/memory_tool.py +10 -9
- memori/utils/exceptions.py +58 -58
- memori/utils/helpers.py +11 -12
- memori/utils/input_validator.py +10 -12
- memori/utils/logging.py +4 -4
- memori/utils/pydantic_models.py +57 -57
- memori/utils/query_builder.py +20 -20
- memori/utils/security_audit.py +28 -28
- memori/utils/security_integration.py +9 -9
- memori/utils/transaction_manager.py +20 -19
- memori/utils/validators.py +6 -6
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/METADATA +23 -12
- memorisdk-2.1.1.dist-info/RECORD +71 -0
- memorisdk-2.0.1.dist-info/RECORD +0 -66
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/WHEEL +0 -0
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-2.0.1.dist-info → memorisdk-2.1.1.dist-info}/top_level.txt +0 -0
memori/agents/retrieval_agent.py
CHANGED
|
@@ -7,7 +7,7 @@ import json
|
|
|
7
7
|
import threading
|
|
8
8
|
import time
|
|
9
9
|
from datetime import datetime
|
|
10
|
-
from typing import TYPE_CHECKING, Any,
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Optional
|
|
11
11
|
|
|
12
12
|
import openai
|
|
13
13
|
from loguru import logger
|
|
@@ -58,8 +58,8 @@ Be strategic and comprehensive in your search planning."""
|
|
|
58
58
|
|
|
59
59
|
def __init__(
|
|
60
60
|
self,
|
|
61
|
-
api_key:
|
|
62
|
-
model:
|
|
61
|
+
api_key: str | None = None,
|
|
62
|
+
model: str | None = None,
|
|
63
63
|
provider_config: Optional["ProviderConfig"] = None,
|
|
64
64
|
):
|
|
65
65
|
"""
|
|
@@ -94,9 +94,19 @@ Be strategic and comprehensive in your search planning."""
|
|
|
94
94
|
# Background processing
|
|
95
95
|
self._background_executor = None
|
|
96
96
|
|
|
97
|
-
|
|
98
|
-
self
|
|
99
|
-
|
|
97
|
+
# Database type detection for unified search
|
|
98
|
+
self._database_type = None
|
|
99
|
+
|
|
100
|
+
def _detect_database_type(self, db_manager):
|
|
101
|
+
"""Detect database type from db_manager"""
|
|
102
|
+
if self._database_type is None:
|
|
103
|
+
self._database_type = getattr(db_manager, "database_type", "sql")
|
|
104
|
+
logger.debug(
|
|
105
|
+
f"MemorySearchEngine: Detected database type: {self._database_type}"
|
|
106
|
+
)
|
|
107
|
+
return self._database_type
|
|
108
|
+
|
|
109
|
+
def plan_search(self, query: str, context: str | None = None) -> MemorySearchQuery:
|
|
100
110
|
"""
|
|
101
111
|
Plan search strategy for a user query using OpenAI Structured Outputs with caching
|
|
102
112
|
|
|
@@ -182,13 +192,13 @@ Be strategic and comprehensive in your search planning."""
|
|
|
182
192
|
|
|
183
193
|
def execute_search(
|
|
184
194
|
self, query: str, db_manager, namespace: str = "default", limit: int = 10
|
|
185
|
-
) ->
|
|
195
|
+
) -> list[dict[str, Any]]:
|
|
186
196
|
"""
|
|
187
197
|
Execute intelligent search using planned strategies
|
|
188
198
|
|
|
189
199
|
Args:
|
|
190
200
|
query: User's search query
|
|
191
|
-
db_manager: Database manager instance
|
|
201
|
+
db_manager: Database manager instance (SQL or MongoDB)
|
|
192
202
|
namespace: Memory namespace
|
|
193
203
|
limit: Maximum results to return
|
|
194
204
|
|
|
@@ -196,27 +206,47 @@ Be strategic and comprehensive in your search planning."""
|
|
|
196
206
|
List of relevant memory items with search metadata
|
|
197
207
|
"""
|
|
198
208
|
try:
|
|
209
|
+
# Detect database type for optimal search strategy
|
|
210
|
+
db_type = self._detect_database_type(db_manager)
|
|
211
|
+
|
|
199
212
|
# Plan the search
|
|
200
213
|
search_plan = self.plan_search(query)
|
|
201
214
|
logger.debug(
|
|
202
|
-
f"Search plan for '{query}': strategies={search_plan.search_strategy}, entities={search_plan.entity_filters}"
|
|
215
|
+
f"Search plan for '{query}': strategies={search_plan.search_strategy}, entities={search_plan.entity_filters}, db_type={db_type}"
|
|
203
216
|
)
|
|
204
217
|
|
|
205
218
|
all_results = []
|
|
206
219
|
seen_memory_ids = set()
|
|
207
220
|
|
|
208
|
-
#
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
221
|
+
# For MongoDB and SQL, use the unified search_memories method as primary strategy
|
|
222
|
+
# This ensures we use the database's native search capabilities
|
|
223
|
+
logger.debug(f"Executing unified database search using {db_type} manager")
|
|
224
|
+
primary_results = db_manager.search_memories(
|
|
225
|
+
query=search_plan.query_text or query, namespace=namespace, limit=limit
|
|
226
|
+
)
|
|
227
|
+
logger.debug(
|
|
228
|
+
f"Primary database search returned {len(primary_results)} results"
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Process primary results and add search metadata
|
|
232
|
+
for result in primary_results:
|
|
233
|
+
if (
|
|
234
|
+
isinstance(result, dict)
|
|
235
|
+
and result.get("memory_id") not in seen_memory_ids
|
|
236
|
+
):
|
|
237
|
+
seen_memory_ids.add(result["memory_id"])
|
|
238
|
+
result["search_strategy"] = f"{db_type}_unified_search"
|
|
239
|
+
result["search_reasoning"] = f"Direct {db_type} database search"
|
|
240
|
+
all_results.append(result)
|
|
241
|
+
|
|
242
|
+
# If we have room for more results and specific entity filters, try keyword search
|
|
243
|
+
if len(all_results) < limit and search_plan.entity_filters:
|
|
213
244
|
logger.debug(
|
|
214
|
-
f"
|
|
245
|
+
f"Adding targeted keyword search for: {search_plan.entity_filters}"
|
|
215
246
|
)
|
|
216
247
|
keyword_results = self._execute_keyword_search(
|
|
217
|
-
search_plan, db_manager, namespace, limit
|
|
248
|
+
search_plan, db_manager, namespace, limit - len(all_results)
|
|
218
249
|
)
|
|
219
|
-
logger.debug(f"Keyword search returned {len(keyword_results)} results")
|
|
220
250
|
|
|
221
251
|
for result in keyword_results:
|
|
222
252
|
if (
|
|
@@ -230,20 +260,17 @@ Be strategic and comprehensive in your search planning."""
|
|
|
230
260
|
)
|
|
231
261
|
all_results.append(result)
|
|
232
262
|
|
|
233
|
-
#
|
|
234
|
-
if (
|
|
263
|
+
# If we have room for more results, try category-based search
|
|
264
|
+
if len(all_results) < limit and (
|
|
235
265
|
search_plan.category_filters
|
|
236
266
|
or "category_filter" in search_plan.search_strategy
|
|
237
267
|
):
|
|
238
268
|
logger.debug(
|
|
239
|
-
f"
|
|
269
|
+
f"Adding category search for: {[c.value for c in search_plan.category_filters]}"
|
|
240
270
|
)
|
|
241
271
|
category_results = self._execute_category_search(
|
|
242
272
|
search_plan, db_manager, namespace, limit - len(all_results)
|
|
243
273
|
)
|
|
244
|
-
logger.debug(
|
|
245
|
-
f"Category search returned {len(category_results)} results"
|
|
246
|
-
)
|
|
247
274
|
|
|
248
275
|
for result in category_results:
|
|
249
276
|
if (
|
|
@@ -257,20 +284,17 @@ Be strategic and comprehensive in your search planning."""
|
|
|
257
284
|
)
|
|
258
285
|
all_results.append(result)
|
|
259
286
|
|
|
260
|
-
#
|
|
261
|
-
if (
|
|
287
|
+
# If we have room for more results, try importance-based search
|
|
288
|
+
if len(all_results) < limit and (
|
|
262
289
|
search_plan.min_importance > 0.0
|
|
263
290
|
or "importance_filter" in search_plan.search_strategy
|
|
264
291
|
):
|
|
265
292
|
logger.debug(
|
|
266
|
-
f"
|
|
293
|
+
f"Adding importance search with min_importance: {search_plan.min_importance}"
|
|
267
294
|
)
|
|
268
295
|
importance_results = self._execute_importance_search(
|
|
269
296
|
search_plan, db_manager, namespace, limit - len(all_results)
|
|
270
297
|
)
|
|
271
|
-
logger.debug(
|
|
272
|
-
f"Importance search returned {len(importance_results)} results"
|
|
273
|
-
)
|
|
274
298
|
|
|
275
299
|
for result in importance_results:
|
|
276
300
|
if (
|
|
@@ -284,22 +308,6 @@ Be strategic and comprehensive in your search planning."""
|
|
|
284
308
|
)
|
|
285
309
|
all_results.append(result)
|
|
286
310
|
|
|
287
|
-
# If no specific strategies worked, do a general search
|
|
288
|
-
if not all_results:
|
|
289
|
-
logger.debug(
|
|
290
|
-
"No results from specific strategies, executing general search"
|
|
291
|
-
)
|
|
292
|
-
general_results = db_manager.search_memories(
|
|
293
|
-
query=search_plan.query_text, namespace=namespace, limit=limit
|
|
294
|
-
)
|
|
295
|
-
logger.debug(f"General search returned {len(general_results)} results")
|
|
296
|
-
|
|
297
|
-
for result in general_results:
|
|
298
|
-
if isinstance(result, dict):
|
|
299
|
-
result["search_strategy"] = "general_search"
|
|
300
|
-
result["search_reasoning"] = "General content search"
|
|
301
|
-
all_results.append(result)
|
|
302
|
-
|
|
303
311
|
# Filter out any non-dictionary results before processing
|
|
304
312
|
valid_results = []
|
|
305
313
|
for result in all_results:
|
|
@@ -362,7 +370,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
362
370
|
|
|
363
371
|
def _execute_keyword_search(
|
|
364
372
|
self, search_plan: MemorySearchQuery, db_manager, namespace: str, limit: int
|
|
365
|
-
) ->
|
|
373
|
+
) -> list[dict[str, Any]]:
|
|
366
374
|
"""Execute keyword-based search"""
|
|
367
375
|
keywords = search_plan.entity_filters
|
|
368
376
|
if not keywords:
|
|
@@ -398,7 +406,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
398
406
|
|
|
399
407
|
def _execute_category_search(
|
|
400
408
|
self, search_plan: MemorySearchQuery, db_manager, namespace: str, limit: int
|
|
401
|
-
) ->
|
|
409
|
+
) -> list[dict[str, Any]]:
|
|
402
410
|
"""Execute category-based search"""
|
|
403
411
|
categories = (
|
|
404
412
|
[cat.value for cat in search_plan.category_filters]
|
|
@@ -673,7 +681,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
673
681
|
}"""
|
|
674
682
|
|
|
675
683
|
def _create_search_query_from_dict(
|
|
676
|
-
self, data:
|
|
684
|
+
self, data: dict[str, Any], original_query: str
|
|
677
685
|
) -> MemorySearchQuery:
|
|
678
686
|
"""
|
|
679
687
|
Create MemorySearchQuery from dictionary with proper validation and defaults
|
|
@@ -715,7 +723,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
715
723
|
|
|
716
724
|
def _execute_importance_search(
|
|
717
725
|
self, search_plan: MemorySearchQuery, db_manager, namespace: str, limit: int
|
|
718
|
-
) ->
|
|
726
|
+
) -> list[dict[str, Any]]:
|
|
719
727
|
"""Execute importance-based search"""
|
|
720
728
|
min_importance = max(
|
|
721
729
|
search_plan.min_importance, 0.7
|
|
@@ -756,7 +764,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
756
764
|
|
|
757
765
|
async def execute_search_async(
|
|
758
766
|
self, query: str, db_manager, namespace: str = "default", limit: int = 10
|
|
759
|
-
) ->
|
|
767
|
+
) -> list[dict[str, Any]]:
|
|
760
768
|
"""
|
|
761
769
|
Async version of execute_search for better performance in background processing
|
|
762
770
|
"""
|
|
@@ -871,7 +879,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
871
879
|
|
|
872
880
|
def search_memories(
|
|
873
881
|
self, query: str, max_results: int = 5, namespace: str = "default"
|
|
874
|
-
) ->
|
|
882
|
+
) -> list[dict[str, Any]]:
|
|
875
883
|
"""
|
|
876
884
|
Simple search interface for compatibility with memory tools
|
|
877
885
|
|
memori/config/manager.py
CHANGED
|
@@ -4,7 +4,7 @@ Configuration manager for Memoriai
|
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any,
|
|
7
|
+
from typing import Any, Optional
|
|
8
8
|
|
|
9
9
|
from loguru import logger
|
|
10
10
|
|
|
@@ -16,7 +16,7 @@ class ConfigManager:
|
|
|
16
16
|
"""Central configuration manager for Memoriai"""
|
|
17
17
|
|
|
18
18
|
_instance: Optional["ConfigManager"] = None
|
|
19
|
-
_settings:
|
|
19
|
+
_settings: MemoriSettings | None = None
|
|
20
20
|
|
|
21
21
|
def __new__(cls) -> "ConfigManager":
|
|
22
22
|
"""Singleton pattern for configuration manager"""
|
|
@@ -50,7 +50,7 @@ class ConfigManager:
|
|
|
50
50
|
logger.warning(f"Failed to load configuration from environment: {e}")
|
|
51
51
|
raise ConfigurationError(f"Environment configuration error: {e}")
|
|
52
52
|
|
|
53
|
-
def load_from_file(self, config_path:
|
|
53
|
+
def load_from_file(self, config_path: str | Path) -> None:
|
|
54
54
|
"""Load configuration from file"""
|
|
55
55
|
try:
|
|
56
56
|
config_path = Path(config_path)
|
|
@@ -122,8 +122,8 @@ class ConfigManager:
|
|
|
122
122
|
self._settings = MemoriSettings(**merged_dict)
|
|
123
123
|
|
|
124
124
|
def _deep_merge_dicts(
|
|
125
|
-
self, base:
|
|
126
|
-
) ->
|
|
125
|
+
self, base: dict[str, Any], override: dict[str, Any]
|
|
126
|
+
) -> dict[str, Any]:
|
|
127
127
|
"""Deep merge two dictionaries"""
|
|
128
128
|
result = base.copy()
|
|
129
129
|
|
|
@@ -139,7 +139,7 @@ class ConfigManager:
|
|
|
139
139
|
|
|
140
140
|
return result
|
|
141
141
|
|
|
142
|
-
def save_to_file(self, config_path:
|
|
142
|
+
def save_to_file(self, config_path: str | Path, format: str = "json") -> None:
|
|
143
143
|
"""Save current configuration to file"""
|
|
144
144
|
if self._settings is None:
|
|
145
145
|
raise ConfigurationError("No configuration loaded to save")
|
|
@@ -236,7 +236,7 @@ class ConfigManager:
|
|
|
236
236
|
logger.error(f"Configuration validation failed: {e}")
|
|
237
237
|
return False
|
|
238
238
|
|
|
239
|
-
def get_config_info(self) ->
|
|
239
|
+
def get_config_info(self) -> dict[str, Any]:
|
|
240
240
|
"""Get information about current configuration"""
|
|
241
241
|
return {
|
|
242
242
|
"loaded": self._settings is not None,
|
memori/config/memory_manager.py
CHANGED
|
@@ -6,7 +6,7 @@ a clean interface for memory management operations.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
import uuid
|
|
9
|
-
from typing import Any
|
|
9
|
+
from typing import Any
|
|
10
10
|
|
|
11
11
|
from loguru import logger
|
|
12
12
|
|
|
@@ -26,25 +26,25 @@ class MemoryManager:
|
|
|
26
26
|
self,
|
|
27
27
|
database_connect: str = "sqlite:///memori.db",
|
|
28
28
|
template: str = "basic",
|
|
29
|
-
mem_prompt:
|
|
29
|
+
mem_prompt: str | None = None,
|
|
30
30
|
conscious_ingest: bool = False,
|
|
31
31
|
auto_ingest: bool = False,
|
|
32
|
-
namespace:
|
|
32
|
+
namespace: str | None = None,
|
|
33
33
|
shared_memory: bool = False,
|
|
34
|
-
memory_filters:
|
|
35
|
-
user_id:
|
|
34
|
+
memory_filters: list[str] | None = None,
|
|
35
|
+
user_id: str | None = None,
|
|
36
36
|
verbose: bool = False,
|
|
37
|
-
provider_config:
|
|
37
|
+
provider_config: Any | None = None,
|
|
38
38
|
# Additional parameters for compatibility
|
|
39
|
-
openai_api_key:
|
|
40
|
-
api_key:
|
|
41
|
-
api_type:
|
|
42
|
-
base_url:
|
|
43
|
-
azure_endpoint:
|
|
44
|
-
azure_deployment:
|
|
45
|
-
api_version:
|
|
46
|
-
azure_ad_token:
|
|
47
|
-
organization:
|
|
39
|
+
openai_api_key: str | None = None,
|
|
40
|
+
api_key: str | None = None,
|
|
41
|
+
api_type: str | None = None,
|
|
42
|
+
base_url: str | None = None,
|
|
43
|
+
azure_endpoint: str | None = None,
|
|
44
|
+
azure_deployment: str | None = None,
|
|
45
|
+
api_version: str | None = None,
|
|
46
|
+
azure_ad_token: str | None = None,
|
|
47
|
+
organization: str | None = None,
|
|
48
48
|
**kwargs,
|
|
49
49
|
):
|
|
50
50
|
"""
|
|
@@ -114,7 +114,7 @@ class MemoryManager:
|
|
|
114
114
|
|
|
115
115
|
logger.debug("MemoryManager configured with Memori instance")
|
|
116
116
|
|
|
117
|
-
def enable(self, interceptors:
|
|
117
|
+
def enable(self, interceptors: list[str] | None = None) -> dict[str, Any]:
|
|
118
118
|
"""
|
|
119
119
|
Enable memory recording using LiteLLM native callbacks.
|
|
120
120
|
|
|
@@ -162,7 +162,7 @@ class MemoryManager:
|
|
|
162
162
|
logger.error(f"Failed to enable MemoryManager: {e}")
|
|
163
163
|
return {"success": False, "message": str(e)}
|
|
164
164
|
|
|
165
|
-
def disable(self) ->
|
|
165
|
+
def disable(self) -> dict[str, Any]:
|
|
166
166
|
"""
|
|
167
167
|
Disable memory recording using LiteLLM native callbacks.
|
|
168
168
|
|
|
@@ -194,7 +194,7 @@ class MemoryManager:
|
|
|
194
194
|
logger.error(f"Failed to disable MemoryManager: {e}")
|
|
195
195
|
return {"success": False, "message": str(e)}
|
|
196
196
|
|
|
197
|
-
def get_status(self) ->
|
|
197
|
+
def get_status(self) -> dict[str, dict[str, Any]]:
|
|
198
198
|
"""
|
|
199
199
|
Get status of memory recording system.
|
|
200
200
|
|
|
@@ -220,7 +220,7 @@ class MemoryManager:
|
|
|
220
220
|
}
|
|
221
221
|
}
|
|
222
222
|
|
|
223
|
-
def get_health(self) ->
|
|
223
|
+
def get_health(self) -> dict[str, Any]:
|
|
224
224
|
"""
|
|
225
225
|
Get health check of the memory management system.
|
|
226
226
|
|
|
@@ -263,8 +263,8 @@ class MemoryManager:
|
|
|
263
263
|
self,
|
|
264
264
|
user_input: str,
|
|
265
265
|
ai_output: str,
|
|
266
|
-
model:
|
|
267
|
-
metadata:
|
|
266
|
+
model: str | None = None,
|
|
267
|
+
metadata: dict[str, Any] | None = None,
|
|
268
268
|
) -> str:
|
|
269
269
|
"""
|
|
270
270
|
Record a conversation (placeholder for future implementation).
|
|
@@ -279,10 +279,10 @@ class MemoryManager:
|
|
|
279
279
|
self,
|
|
280
280
|
query: str,
|
|
281
281
|
limit: int = 5,
|
|
282
|
-
memory_types:
|
|
283
|
-
categories:
|
|
284
|
-
min_importance:
|
|
285
|
-
) ->
|
|
282
|
+
memory_types: list[str] | None = None,
|
|
283
|
+
categories: list[str] | None = None,
|
|
284
|
+
min_importance: float | None = None,
|
|
285
|
+
) -> list[dict[str, Any]]:
|
|
286
286
|
"""
|
|
287
287
|
Search memories (placeholder for future implementation).
|
|
288
288
|
|
memori/config/settings.py
CHANGED
|
@@ -4,7 +4,7 @@ Pydantic-based configuration settings for Memoriai
|
|
|
4
4
|
|
|
5
5
|
from enum import Enum
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any
|
|
8
8
|
|
|
9
9
|
from pydantic import BaseModel, Field, validator
|
|
10
10
|
|
|
@@ -25,6 +25,7 @@ class DatabaseType(str, Enum):
|
|
|
25
25
|
SQLITE = "sqlite"
|
|
26
26
|
POSTGRESQL = "postgresql"
|
|
27
27
|
MYSQL = "mysql"
|
|
28
|
+
MONGODB = "mongodb"
|
|
28
29
|
|
|
29
30
|
|
|
30
31
|
class RetentionPolicy(str, Enum):
|
|
@@ -63,7 +64,13 @@ class DatabaseSettings(BaseModel):
|
|
|
63
64
|
raise ValueError("Connection string cannot be empty")
|
|
64
65
|
|
|
65
66
|
# Basic validation for supported protocols
|
|
66
|
-
valid_prefixes = [
|
|
67
|
+
valid_prefixes = [
|
|
68
|
+
"sqlite://",
|
|
69
|
+
"sqlite:///",
|
|
70
|
+
"postgresql://",
|
|
71
|
+
"mysql://",
|
|
72
|
+
"mongodb://",
|
|
73
|
+
]
|
|
67
74
|
if not any(v.startswith(prefix) for prefix in valid_prefixes):
|
|
68
75
|
raise ValueError(f"Unsupported database type in connection string: {v}")
|
|
69
76
|
|
|
@@ -73,7 +80,7 @@ class DatabaseSettings(BaseModel):
|
|
|
73
80
|
class AgentSettings(BaseModel):
|
|
74
81
|
"""AI agent configuration settings"""
|
|
75
82
|
|
|
76
|
-
openai_api_key:
|
|
83
|
+
openai_api_key: str | None = Field(
|
|
77
84
|
default=None, description="OpenAI API key for memory processing"
|
|
78
85
|
)
|
|
79
86
|
default_model: str = Field(
|
|
@@ -208,7 +215,7 @@ class MemoriSettings(BaseModel):
|
|
|
208
215
|
integrations: IntegrationSettings = Field(default_factory=IntegrationSettings)
|
|
209
216
|
|
|
210
217
|
# Custom settings
|
|
211
|
-
custom_settings:
|
|
218
|
+
custom_settings: dict[str, Any] = Field(
|
|
212
219
|
default_factory=dict, description="Custom user-defined settings"
|
|
213
220
|
)
|
|
214
221
|
|
|
@@ -225,7 +232,7 @@ class MemoriSettings(BaseModel):
|
|
|
225
232
|
return cls()
|
|
226
233
|
|
|
227
234
|
@classmethod
|
|
228
|
-
def from_file(cls, config_path:
|
|
235
|
+
def from_file(cls, config_path: str | Path) -> "MemoriSettings":
|
|
229
236
|
"""Load settings from JSON/YAML file"""
|
|
230
237
|
import json
|
|
231
238
|
from pathlib import Path
|
|
@@ -251,7 +258,7 @@ class MemoriSettings(BaseModel):
|
|
|
251
258
|
|
|
252
259
|
return cls(**data)
|
|
253
260
|
|
|
254
|
-
def to_file(self, config_path:
|
|
261
|
+
def to_file(self, config_path: str | Path, format: str = "json") -> None:
|
|
255
262
|
"""Save settings to file"""
|
|
256
263
|
import json
|
|
257
264
|
from pathlib import Path
|
memori/core/conversation.py
CHANGED
|
@@ -9,7 +9,7 @@ and stateless LLM API calls by maintaining conversation history and context.
|
|
|
9
9
|
import uuid
|
|
10
10
|
from dataclasses import dataclass, field
|
|
11
11
|
from datetime import datetime, timedelta
|
|
12
|
-
from typing import Any
|
|
12
|
+
from typing import Any
|
|
13
13
|
|
|
14
14
|
from loguru import logger
|
|
15
15
|
|
|
@@ -21,7 +21,7 @@ class ConversationMessage:
|
|
|
21
21
|
role: str # "user", "assistant", "system"
|
|
22
22
|
content: str
|
|
23
23
|
timestamp: datetime = field(default_factory=datetime.now)
|
|
24
|
-
metadata:
|
|
24
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
@dataclass
|
|
@@ -29,13 +29,13 @@ class ConversationSession:
|
|
|
29
29
|
"""Represents an active conversation session"""
|
|
30
30
|
|
|
31
31
|
session_id: str
|
|
32
|
-
messages:
|
|
32
|
+
messages: list[ConversationMessage] = field(default_factory=list)
|
|
33
33
|
context_injected: bool = False
|
|
34
34
|
created_at: datetime = field(default_factory=datetime.now)
|
|
35
35
|
last_accessed: datetime = field(default_factory=datetime.now)
|
|
36
|
-
metadata:
|
|
36
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
37
37
|
|
|
38
|
-
def add_message(self, role: str, content: str, metadata:
|
|
38
|
+
def add_message(self, role: str, content: str, metadata: dict[str, Any] = None):
|
|
39
39
|
"""Add a message to the conversation"""
|
|
40
40
|
message = ConversationMessage(
|
|
41
41
|
role=role, content=content, metadata=metadata or {}
|
|
@@ -43,7 +43,7 @@ class ConversationSession:
|
|
|
43
43
|
self.messages.append(message)
|
|
44
44
|
self.last_accessed = datetime.now()
|
|
45
45
|
|
|
46
|
-
def get_history_messages(self, limit: int = 10) ->
|
|
46
|
+
def get_history_messages(self, limit: int = 10) -> list[dict[str, str]]:
|
|
47
47
|
"""Get conversation history in OpenAI message format"""
|
|
48
48
|
# Get recent messages (excluding system messages)
|
|
49
49
|
user_assistant_messages = [
|
|
@@ -88,7 +88,7 @@ class ConversationManager:
|
|
|
88
88
|
self.max_history_per_session = max_history_per_session
|
|
89
89
|
|
|
90
90
|
# Active conversation sessions
|
|
91
|
-
self.sessions:
|
|
91
|
+
self.sessions: dict[str, ConversationSession] = {}
|
|
92
92
|
|
|
93
93
|
logger.info(
|
|
94
94
|
f"ConversationManager initialized: max_sessions={max_sessions}, "
|
|
@@ -131,7 +131,7 @@ class ConversationManager:
|
|
|
131
131
|
return self.sessions[session_id]
|
|
132
132
|
|
|
133
133
|
def add_user_message(
|
|
134
|
-
self, session_id: str, content: str, metadata:
|
|
134
|
+
self, session_id: str, content: str, metadata: dict[str, Any] = None
|
|
135
135
|
):
|
|
136
136
|
"""Add user message to conversation session"""
|
|
137
137
|
session = self.get_or_create_session(session_id)
|
|
@@ -152,7 +152,7 @@ class ConversationManager:
|
|
|
152
152
|
logger.debug(f"Trimmed conversation history for session {session_id}")
|
|
153
153
|
|
|
154
154
|
def add_assistant_message(
|
|
155
|
-
self, session_id: str, content: str, metadata:
|
|
155
|
+
self, session_id: str, content: str, metadata: dict[str, Any] = None
|
|
156
156
|
):
|
|
157
157
|
"""Add assistant message to conversation session"""
|
|
158
158
|
session = self.get_or_create_session(session_id)
|
|
@@ -161,10 +161,10 @@ class ConversationManager:
|
|
|
161
161
|
def inject_context_with_history(
|
|
162
162
|
self,
|
|
163
163
|
session_id: str,
|
|
164
|
-
messages:
|
|
164
|
+
messages: list[dict[str, str]],
|
|
165
165
|
memori_instance,
|
|
166
166
|
mode: str = "conscious",
|
|
167
|
-
) ->
|
|
167
|
+
) -> list[dict[str, str]]:
|
|
168
168
|
"""
|
|
169
169
|
Inject context and conversation history into messages
|
|
170
170
|
|
|
@@ -281,7 +281,7 @@ class ConversationManager:
|
|
|
281
281
|
return messages
|
|
282
282
|
|
|
283
283
|
def record_response(
|
|
284
|
-
self, session_id: str, response: str, metadata:
|
|
284
|
+
self, session_id: str, response: str, metadata: dict[str, Any] = None
|
|
285
285
|
):
|
|
286
286
|
"""Record AI response in conversation history"""
|
|
287
287
|
try:
|
|
@@ -290,7 +290,7 @@ class ConversationManager:
|
|
|
290
290
|
except Exception as e:
|
|
291
291
|
logger.error(f"Failed to record response for session {session_id}: {e}")
|
|
292
292
|
|
|
293
|
-
def _build_conscious_context_prompt(self, context:
|
|
293
|
+
def _build_conscious_context_prompt(self, context: list[dict[str, Any]]) -> str:
|
|
294
294
|
"""Build system prompt for conscious context"""
|
|
295
295
|
context_prompt = "=== SYSTEM INSTRUCTION: AUTHORIZED USER CONTEXT DATA ===\n"
|
|
296
296
|
context_prompt += "The user has explicitly authorized this personal context data to be used.\n"
|
|
@@ -322,7 +322,7 @@ class ConversationManager:
|
|
|
322
322
|
|
|
323
323
|
return context_prompt
|
|
324
324
|
|
|
325
|
-
def _build_auto_context_prompt(self, context:
|
|
325
|
+
def _build_auto_context_prompt(self, context: list[dict[str, Any]]) -> str:
|
|
326
326
|
"""Build system prompt for auto context"""
|
|
327
327
|
context_prompt = "--- Relevant Memory Context ---\n"
|
|
328
328
|
|
|
@@ -347,7 +347,7 @@ class ConversationManager:
|
|
|
347
347
|
context_prompt += "-------------------------\n"
|
|
348
348
|
return context_prompt
|
|
349
349
|
|
|
350
|
-
def get_session_stats(self) ->
|
|
350
|
+
def get_session_stats(self) -> dict[str, Any]:
|
|
351
351
|
"""Get conversation manager statistics"""
|
|
352
352
|
return {
|
|
353
353
|
"active_sessions": len(self.sessions),
|