memorisdk 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of memorisdk might be problematic. Click here for more details.
- memori/__init__.py +3 -3
- memori/agents/conscious_agent.py +289 -77
- memori/agents/memory_agent.py +19 -9
- memori/agents/retrieval_agent.py +138 -63
- memori/config/manager.py +7 -7
- memori/config/memory_manager.py +25 -25
- memori/config/settings.py +13 -6
- memori/core/conversation.py +15 -15
- memori/core/database.py +14 -13
- memori/core/memory.py +438 -123
- memori/core/providers.py +25 -25
- memori/database/__init__.py +11 -0
- memori/database/adapters/__init__.py +11 -0
- memori/database/adapters/mongodb_adapter.py +739 -0
- memori/database/adapters/mysql_adapter.py +8 -8
- memori/database/adapters/postgresql_adapter.py +6 -6
- memori/database/adapters/sqlite_adapter.py +6 -6
- memori/database/auto_creator.py +8 -9
- memori/database/connection_utils.py +5 -5
- memori/database/connectors/__init__.py +11 -0
- memori/database/connectors/base_connector.py +18 -19
- memori/database/connectors/mongodb_connector.py +527 -0
- memori/database/connectors/mysql_connector.py +13 -15
- memori/database/connectors/postgres_connector.py +12 -12
- memori/database/connectors/sqlite_connector.py +11 -11
- memori/database/models.py +2 -2
- memori/database/mongodb_manager.py +1402 -0
- memori/database/queries/base_queries.py +3 -4
- memori/database/queries/chat_queries.py +3 -5
- memori/database/queries/entity_queries.py +3 -5
- memori/database/queries/memory_queries.py +3 -5
- memori/database/query_translator.py +11 -11
- memori/database/schema_generators/__init__.py +11 -0
- memori/database/schema_generators/mongodb_schema_generator.py +666 -0
- memori/database/schema_generators/mysql_schema_generator.py +2 -4
- memori/database/search/__init__.py +11 -0
- memori/database/search/mongodb_search_adapter.py +653 -0
- memori/database/search/mysql_search_adapter.py +8 -8
- memori/database/search/sqlite_search_adapter.py +6 -6
- memori/database/search_service.py +218 -66
- memori/database/sqlalchemy_manager.py +72 -25
- memori/integrations/__init__.py +1 -1
- memori/integrations/anthropic_integration.py +1 -3
- memori/integrations/litellm_integration.py +23 -6
- memori/integrations/openai_integration.py +31 -3
- memori/tools/memory_tool.py +104 -13
- memori/utils/exceptions.py +58 -58
- memori/utils/helpers.py +11 -12
- memori/utils/input_validator.py +10 -12
- memori/utils/logging.py +4 -4
- memori/utils/pydantic_models.py +57 -57
- memori/utils/query_builder.py +20 -20
- memori/utils/security_audit.py +28 -28
- memori/utils/security_integration.py +9 -9
- memori/utils/transaction_manager.py +20 -19
- memori/utils/validators.py +6 -6
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/METADATA +36 -20
- memorisdk-2.1.0.dist-info/RECORD +71 -0
- memori/scripts/llm_text.py +0 -50
- memorisdk-2.0.0.dist-info/RECORD +0 -67
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/WHEEL +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/licenses/LICENSE +0 -0
- {memorisdk-2.0.0.dist-info → memorisdk-2.1.0.dist-info}/top_level.txt +0 -0
memori/agents/retrieval_agent.py
CHANGED
|
@@ -7,7 +7,7 @@ import json
|
|
|
7
7
|
import threading
|
|
8
8
|
import time
|
|
9
9
|
from datetime import datetime
|
|
10
|
-
from typing import TYPE_CHECKING, Any,
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Optional
|
|
11
11
|
|
|
12
12
|
import openai
|
|
13
13
|
from loguru import logger
|
|
@@ -58,8 +58,8 @@ Be strategic and comprehensive in your search planning."""
|
|
|
58
58
|
|
|
59
59
|
def __init__(
|
|
60
60
|
self,
|
|
61
|
-
api_key:
|
|
62
|
-
model:
|
|
61
|
+
api_key: str | None = None,
|
|
62
|
+
model: str | None = None,
|
|
63
63
|
provider_config: Optional["ProviderConfig"] = None,
|
|
64
64
|
):
|
|
65
65
|
"""
|
|
@@ -94,9 +94,19 @@ Be strategic and comprehensive in your search planning."""
|
|
|
94
94
|
# Background processing
|
|
95
95
|
self._background_executor = None
|
|
96
96
|
|
|
97
|
-
|
|
98
|
-
self
|
|
99
|
-
|
|
97
|
+
# Database type detection for unified search
|
|
98
|
+
self._database_type = None
|
|
99
|
+
|
|
100
|
+
def _detect_database_type(self, db_manager):
|
|
101
|
+
"""Detect database type from db_manager"""
|
|
102
|
+
if self._database_type is None:
|
|
103
|
+
self._database_type = getattr(db_manager, "database_type", "sql")
|
|
104
|
+
logger.debug(
|
|
105
|
+
f"MemorySearchEngine: Detected database type: {self._database_type}"
|
|
106
|
+
)
|
|
107
|
+
return self._database_type
|
|
108
|
+
|
|
109
|
+
def plan_search(self, query: str, context: str | None = None) -> MemorySearchQuery:
|
|
100
110
|
"""
|
|
101
111
|
Plan search strategy for a user query using OpenAI Structured Outputs with caching
|
|
102
112
|
|
|
@@ -182,13 +192,13 @@ Be strategic and comprehensive in your search planning."""
|
|
|
182
192
|
|
|
183
193
|
def execute_search(
|
|
184
194
|
self, query: str, db_manager, namespace: str = "default", limit: int = 10
|
|
185
|
-
) ->
|
|
195
|
+
) -> list[dict[str, Any]]:
|
|
186
196
|
"""
|
|
187
197
|
Execute intelligent search using planned strategies
|
|
188
198
|
|
|
189
199
|
Args:
|
|
190
200
|
query: User's search query
|
|
191
|
-
db_manager: Database manager instance
|
|
201
|
+
db_manager: Database manager instance (SQL or MongoDB)
|
|
192
202
|
namespace: Memory namespace
|
|
193
203
|
limit: Maximum results to return
|
|
194
204
|
|
|
@@ -196,27 +206,47 @@ Be strategic and comprehensive in your search planning."""
|
|
|
196
206
|
List of relevant memory items with search metadata
|
|
197
207
|
"""
|
|
198
208
|
try:
|
|
209
|
+
# Detect database type for optimal search strategy
|
|
210
|
+
db_type = self._detect_database_type(db_manager)
|
|
211
|
+
|
|
199
212
|
# Plan the search
|
|
200
213
|
search_plan = self.plan_search(query)
|
|
201
214
|
logger.debug(
|
|
202
|
-
f"Search plan for '{query}': strategies={search_plan.search_strategy}, entities={search_plan.entity_filters}"
|
|
215
|
+
f"Search plan for '{query}': strategies={search_plan.search_strategy}, entities={search_plan.entity_filters}, db_type={db_type}"
|
|
203
216
|
)
|
|
204
217
|
|
|
205
218
|
all_results = []
|
|
206
219
|
seen_memory_ids = set()
|
|
207
220
|
|
|
208
|
-
#
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
221
|
+
# For MongoDB and SQL, use the unified search_memories method as primary strategy
|
|
222
|
+
# This ensures we use the database's native search capabilities
|
|
223
|
+
logger.debug(f"Executing unified database search using {db_type} manager")
|
|
224
|
+
primary_results = db_manager.search_memories(
|
|
225
|
+
query=search_plan.query_text or query, namespace=namespace, limit=limit
|
|
226
|
+
)
|
|
227
|
+
logger.debug(
|
|
228
|
+
f"Primary database search returned {len(primary_results)} results"
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Process primary results and add search metadata
|
|
232
|
+
for result in primary_results:
|
|
233
|
+
if (
|
|
234
|
+
isinstance(result, dict)
|
|
235
|
+
and result.get("memory_id") not in seen_memory_ids
|
|
236
|
+
):
|
|
237
|
+
seen_memory_ids.add(result["memory_id"])
|
|
238
|
+
result["search_strategy"] = f"{db_type}_unified_search"
|
|
239
|
+
result["search_reasoning"] = f"Direct {db_type} database search"
|
|
240
|
+
all_results.append(result)
|
|
241
|
+
|
|
242
|
+
# If we have room for more results and specific entity filters, try keyword search
|
|
243
|
+
if len(all_results) < limit and search_plan.entity_filters:
|
|
213
244
|
logger.debug(
|
|
214
|
-
f"
|
|
245
|
+
f"Adding targeted keyword search for: {search_plan.entity_filters}"
|
|
215
246
|
)
|
|
216
247
|
keyword_results = self._execute_keyword_search(
|
|
217
|
-
search_plan, db_manager, namespace, limit
|
|
248
|
+
search_plan, db_manager, namespace, limit - len(all_results)
|
|
218
249
|
)
|
|
219
|
-
logger.debug(f"Keyword search returned {len(keyword_results)} results")
|
|
220
250
|
|
|
221
251
|
for result in keyword_results:
|
|
222
252
|
if (
|
|
@@ -230,20 +260,17 @@ Be strategic and comprehensive in your search planning."""
|
|
|
230
260
|
)
|
|
231
261
|
all_results.append(result)
|
|
232
262
|
|
|
233
|
-
#
|
|
234
|
-
if (
|
|
263
|
+
# If we have room for more results, try category-based search
|
|
264
|
+
if len(all_results) < limit and (
|
|
235
265
|
search_plan.category_filters
|
|
236
266
|
or "category_filter" in search_plan.search_strategy
|
|
237
267
|
):
|
|
238
268
|
logger.debug(
|
|
239
|
-
f"
|
|
269
|
+
f"Adding category search for: {[c.value for c in search_plan.category_filters]}"
|
|
240
270
|
)
|
|
241
271
|
category_results = self._execute_category_search(
|
|
242
272
|
search_plan, db_manager, namespace, limit - len(all_results)
|
|
243
273
|
)
|
|
244
|
-
logger.debug(
|
|
245
|
-
f"Category search returned {len(category_results)} results"
|
|
246
|
-
)
|
|
247
274
|
|
|
248
275
|
for result in category_results:
|
|
249
276
|
if (
|
|
@@ -257,20 +284,17 @@ Be strategic and comprehensive in your search planning."""
|
|
|
257
284
|
)
|
|
258
285
|
all_results.append(result)
|
|
259
286
|
|
|
260
|
-
#
|
|
261
|
-
if (
|
|
287
|
+
# If we have room for more results, try importance-based search
|
|
288
|
+
if len(all_results) < limit and (
|
|
262
289
|
search_plan.min_importance > 0.0
|
|
263
290
|
or "importance_filter" in search_plan.search_strategy
|
|
264
291
|
):
|
|
265
292
|
logger.debug(
|
|
266
|
-
f"
|
|
293
|
+
f"Adding importance search with min_importance: {search_plan.min_importance}"
|
|
267
294
|
)
|
|
268
295
|
importance_results = self._execute_importance_search(
|
|
269
296
|
search_plan, db_manager, namespace, limit - len(all_results)
|
|
270
297
|
)
|
|
271
|
-
logger.debug(
|
|
272
|
-
f"Importance search returned {len(importance_results)} results"
|
|
273
|
-
)
|
|
274
298
|
|
|
275
299
|
for result in importance_results:
|
|
276
300
|
if (
|
|
@@ -284,22 +308,6 @@ Be strategic and comprehensive in your search planning."""
|
|
|
284
308
|
)
|
|
285
309
|
all_results.append(result)
|
|
286
310
|
|
|
287
|
-
# If no specific strategies worked, do a general search
|
|
288
|
-
if not all_results:
|
|
289
|
-
logger.debug(
|
|
290
|
-
"No results from specific strategies, executing general search"
|
|
291
|
-
)
|
|
292
|
-
general_results = db_manager.search_memories(
|
|
293
|
-
query=search_plan.query_text, namespace=namespace, limit=limit
|
|
294
|
-
)
|
|
295
|
-
logger.debug(f"General search returned {len(general_results)} results")
|
|
296
|
-
|
|
297
|
-
for result in general_results:
|
|
298
|
-
if isinstance(result, dict):
|
|
299
|
-
result["search_strategy"] = "general_search"
|
|
300
|
-
result["search_reasoning"] = "General content search"
|
|
301
|
-
all_results.append(result)
|
|
302
|
-
|
|
303
311
|
# Filter out any non-dictionary results before processing
|
|
304
312
|
valid_results = []
|
|
305
313
|
for result in all_results:
|
|
@@ -362,7 +370,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
362
370
|
|
|
363
371
|
def _execute_keyword_search(
|
|
364
372
|
self, search_plan: MemorySearchQuery, db_manager, namespace: str, limit: int
|
|
365
|
-
) ->
|
|
373
|
+
) -> list[dict[str, Any]]:
|
|
366
374
|
"""Execute keyword-based search"""
|
|
367
375
|
keywords = search_plan.entity_filters
|
|
368
376
|
if not keywords:
|
|
@@ -398,7 +406,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
398
406
|
|
|
399
407
|
def _execute_category_search(
|
|
400
408
|
self, search_plan: MemorySearchQuery, db_manager, namespace: str, limit: int
|
|
401
|
-
) ->
|
|
409
|
+
) -> list[dict[str, Any]]:
|
|
402
410
|
"""Execute category-based search"""
|
|
403
411
|
categories = (
|
|
404
412
|
[cat.value for cat in search_plan.category_filters]
|
|
@@ -411,32 +419,96 @@ Be strategic and comprehensive in your search planning."""
|
|
|
411
419
|
|
|
412
420
|
# This would need to be implemented in the database manager
|
|
413
421
|
# For now, get all memories and filter by category
|
|
422
|
+
logger.debug(
|
|
423
|
+
f"Searching memories by categories: {categories} in namespace: {namespace}"
|
|
424
|
+
)
|
|
414
425
|
all_results = db_manager.search_memories(
|
|
415
426
|
query="", namespace=namespace, limit=limit * 3
|
|
416
427
|
)
|
|
417
428
|
|
|
429
|
+
logger.debug(
|
|
430
|
+
f"Retrieved {len(all_results)} total results for category filtering"
|
|
431
|
+
)
|
|
432
|
+
|
|
418
433
|
filtered_results = []
|
|
419
|
-
for result in all_results:
|
|
434
|
+
for i, result in enumerate(all_results):
|
|
435
|
+
logger.debug(f"Processing result {i+1}/{len(all_results)}: {type(result)}")
|
|
436
|
+
|
|
420
437
|
# Extract category from processed_data if it's stored as JSON
|
|
421
438
|
try:
|
|
422
|
-
|
|
439
|
+
memory_category = None
|
|
440
|
+
|
|
441
|
+
# Check processed_data field first
|
|
442
|
+
if "processed_data" in result and result["processed_data"]:
|
|
423
443
|
processed_data = result["processed_data"]
|
|
444
|
+
logger.debug(
|
|
445
|
+
f"Found processed_data: {type(processed_data)} - {str(processed_data)[:100]}..."
|
|
446
|
+
)
|
|
447
|
+
|
|
424
448
|
# Handle both dict and JSON string formats
|
|
425
449
|
if isinstance(processed_data, str):
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
450
|
+
try:
|
|
451
|
+
processed_data = json.loads(processed_data)
|
|
452
|
+
except json.JSONDecodeError as je:
|
|
453
|
+
logger.debug(f"JSON decode error for processed_data: {je}")
|
|
454
|
+
continue
|
|
455
|
+
|
|
456
|
+
if isinstance(processed_data, dict):
|
|
457
|
+
# Try multiple possible category locations
|
|
458
|
+
category_paths = [
|
|
459
|
+
["category", "primary_category"],
|
|
460
|
+
["category"],
|
|
461
|
+
["primary_category"],
|
|
462
|
+
["metadata", "category"],
|
|
463
|
+
["classification", "category"],
|
|
464
|
+
]
|
|
465
|
+
|
|
466
|
+
for path in category_paths:
|
|
467
|
+
temp_data = processed_data
|
|
468
|
+
try:
|
|
469
|
+
for key in path:
|
|
470
|
+
temp_data = temp_data.get(key, {})
|
|
471
|
+
if isinstance(temp_data, str) and temp_data:
|
|
472
|
+
memory_category = temp_data
|
|
473
|
+
logger.debug(
|
|
474
|
+
f"Found category via path {path}: {memory_category}"
|
|
475
|
+
)
|
|
476
|
+
break
|
|
477
|
+
except (AttributeError, TypeError):
|
|
478
|
+
continue
|
|
479
|
+
else:
|
|
480
|
+
logger.debug(
|
|
481
|
+
f"processed_data is not a dict after parsing: {type(processed_data)}"
|
|
482
|
+
)
|
|
483
|
+
continue
|
|
429
484
|
|
|
430
|
-
|
|
431
|
-
|
|
485
|
+
# Fallback: check direct category field
|
|
486
|
+
if not memory_category and "category" in result and result["category"]:
|
|
487
|
+
memory_category = result["category"]
|
|
488
|
+
logger.debug(f"Found category via direct field: {memory_category}")
|
|
489
|
+
|
|
490
|
+
# Check if the found category matches any of our target categories
|
|
491
|
+
if memory_category:
|
|
492
|
+
logger.debug(
|
|
493
|
+
f"Comparing memory category '{memory_category}' against target categories {categories}"
|
|
432
494
|
)
|
|
433
495
|
if memory_category in categories:
|
|
434
496
|
filtered_results.append(result)
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
497
|
+
logger.debug(f"✓ Category match found: {memory_category}")
|
|
498
|
+
else:
|
|
499
|
+
logger.debug(
|
|
500
|
+
f"✗ Category mismatch: {memory_category} not in {categories}"
|
|
501
|
+
)
|
|
502
|
+
else:
|
|
503
|
+
logger.debug("No category found in result")
|
|
504
|
+
|
|
505
|
+
except Exception as e:
|
|
506
|
+
logger.debug(f"Error processing result {i+1}: {e}")
|
|
438
507
|
continue
|
|
439
508
|
|
|
509
|
+
logger.debug(
|
|
510
|
+
f"Category filtering complete: {len(filtered_results)} results match categories {categories}"
|
|
511
|
+
)
|
|
440
512
|
return filtered_results[:limit]
|
|
441
513
|
|
|
442
514
|
def _detect_structured_output_support(self) -> bool:
|
|
@@ -609,7 +681,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
609
681
|
}"""
|
|
610
682
|
|
|
611
683
|
def _create_search_query_from_dict(
|
|
612
|
-
self, data:
|
|
684
|
+
self, data: dict[str, Any], original_query: str
|
|
613
685
|
) -> MemorySearchQuery:
|
|
614
686
|
"""
|
|
615
687
|
Create MemorySearchQuery from dictionary with proper validation and defaults
|
|
@@ -651,7 +723,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
651
723
|
|
|
652
724
|
def _execute_importance_search(
|
|
653
725
|
self, search_plan: MemorySearchQuery, db_manager, namespace: str, limit: int
|
|
654
|
-
) ->
|
|
726
|
+
) -> list[dict[str, Any]]:
|
|
655
727
|
"""Execute importance-based search"""
|
|
656
728
|
min_importance = max(
|
|
657
729
|
search_plan.min_importance, 0.7
|
|
@@ -692,7 +764,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
692
764
|
|
|
693
765
|
async def execute_search_async(
|
|
694
766
|
self, query: str, db_manager, namespace: str = "default", limit: int = 10
|
|
695
|
-
) ->
|
|
767
|
+
) -> list[dict[str, Any]]:
|
|
696
768
|
"""
|
|
697
769
|
Async version of execute_search for better performance in background processing
|
|
698
770
|
"""
|
|
@@ -807,7 +879,7 @@ Be strategic and comprehensive in your search planning."""
|
|
|
807
879
|
|
|
808
880
|
def search_memories(
|
|
809
881
|
self, query: str, max_results: int = 5, namespace: str = "default"
|
|
810
|
-
) ->
|
|
882
|
+
) -> list[dict[str, Any]]:
|
|
811
883
|
"""
|
|
812
884
|
Simple search interface for compatibility with memory tools
|
|
813
885
|
|
|
@@ -821,8 +893,11 @@ Be strategic and comprehensive in your search planning."""
|
|
|
821
893
|
"""
|
|
822
894
|
# This is a compatibility method that uses the database manager directly
|
|
823
895
|
# We'll need the database manager to be injected or passed
|
|
824
|
-
# For now, return empty list and log the issue
|
|
825
|
-
logger.warning(
|
|
896
|
+
# For now, return empty list and log the issue with parameters
|
|
897
|
+
logger.warning(
|
|
898
|
+
f"search_memories called without database manager: query='{query}', "
|
|
899
|
+
f"max_results={max_results}, namespace='{namespace}'"
|
|
900
|
+
)
|
|
826
901
|
return []
|
|
827
902
|
|
|
828
903
|
|
memori/config/manager.py
CHANGED
|
@@ -4,7 +4,7 @@ Configuration manager for Memoriai
|
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any,
|
|
7
|
+
from typing import Any, Optional
|
|
8
8
|
|
|
9
9
|
from loguru import logger
|
|
10
10
|
|
|
@@ -16,7 +16,7 @@ class ConfigManager:
|
|
|
16
16
|
"""Central configuration manager for Memoriai"""
|
|
17
17
|
|
|
18
18
|
_instance: Optional["ConfigManager"] = None
|
|
19
|
-
_settings:
|
|
19
|
+
_settings: MemoriSettings | None = None
|
|
20
20
|
|
|
21
21
|
def __new__(cls) -> "ConfigManager":
|
|
22
22
|
"""Singleton pattern for configuration manager"""
|
|
@@ -50,7 +50,7 @@ class ConfigManager:
|
|
|
50
50
|
logger.warning(f"Failed to load configuration from environment: {e}")
|
|
51
51
|
raise ConfigurationError(f"Environment configuration error: {e}")
|
|
52
52
|
|
|
53
|
-
def load_from_file(self, config_path:
|
|
53
|
+
def load_from_file(self, config_path: str | Path) -> None:
|
|
54
54
|
"""Load configuration from file"""
|
|
55
55
|
try:
|
|
56
56
|
config_path = Path(config_path)
|
|
@@ -122,8 +122,8 @@ class ConfigManager:
|
|
|
122
122
|
self._settings = MemoriSettings(**merged_dict)
|
|
123
123
|
|
|
124
124
|
def _deep_merge_dicts(
|
|
125
|
-
self, base:
|
|
126
|
-
) ->
|
|
125
|
+
self, base: dict[str, Any], override: dict[str, Any]
|
|
126
|
+
) -> dict[str, Any]:
|
|
127
127
|
"""Deep merge two dictionaries"""
|
|
128
128
|
result = base.copy()
|
|
129
129
|
|
|
@@ -139,7 +139,7 @@ class ConfigManager:
|
|
|
139
139
|
|
|
140
140
|
return result
|
|
141
141
|
|
|
142
|
-
def save_to_file(self, config_path:
|
|
142
|
+
def save_to_file(self, config_path: str | Path, format: str = "json") -> None:
|
|
143
143
|
"""Save current configuration to file"""
|
|
144
144
|
if self._settings is None:
|
|
145
145
|
raise ConfigurationError("No configuration loaded to save")
|
|
@@ -236,7 +236,7 @@ class ConfigManager:
|
|
|
236
236
|
logger.error(f"Configuration validation failed: {e}")
|
|
237
237
|
return False
|
|
238
238
|
|
|
239
|
-
def get_config_info(self) ->
|
|
239
|
+
def get_config_info(self) -> dict[str, Any]:
|
|
240
240
|
"""Get information about current configuration"""
|
|
241
241
|
return {
|
|
242
242
|
"loaded": self._settings is not None,
|
memori/config/memory_manager.py
CHANGED
|
@@ -6,7 +6,7 @@ a clean interface for memory management operations.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
import uuid
|
|
9
|
-
from typing import Any
|
|
9
|
+
from typing import Any
|
|
10
10
|
|
|
11
11
|
from loguru import logger
|
|
12
12
|
|
|
@@ -26,25 +26,25 @@ class MemoryManager:
|
|
|
26
26
|
self,
|
|
27
27
|
database_connect: str = "sqlite:///memori.db",
|
|
28
28
|
template: str = "basic",
|
|
29
|
-
mem_prompt:
|
|
29
|
+
mem_prompt: str | None = None,
|
|
30
30
|
conscious_ingest: bool = False,
|
|
31
31
|
auto_ingest: bool = False,
|
|
32
|
-
namespace:
|
|
32
|
+
namespace: str | None = None,
|
|
33
33
|
shared_memory: bool = False,
|
|
34
|
-
memory_filters:
|
|
35
|
-
user_id:
|
|
34
|
+
memory_filters: list[str] | None = None,
|
|
35
|
+
user_id: str | None = None,
|
|
36
36
|
verbose: bool = False,
|
|
37
|
-
provider_config:
|
|
37
|
+
provider_config: Any | None = None,
|
|
38
38
|
# Additional parameters for compatibility
|
|
39
|
-
openai_api_key:
|
|
40
|
-
api_key:
|
|
41
|
-
api_type:
|
|
42
|
-
base_url:
|
|
43
|
-
azure_endpoint:
|
|
44
|
-
azure_deployment:
|
|
45
|
-
api_version:
|
|
46
|
-
azure_ad_token:
|
|
47
|
-
organization:
|
|
39
|
+
openai_api_key: str | None = None,
|
|
40
|
+
api_key: str | None = None,
|
|
41
|
+
api_type: str | None = None,
|
|
42
|
+
base_url: str | None = None,
|
|
43
|
+
azure_endpoint: str | None = None,
|
|
44
|
+
azure_deployment: str | None = None,
|
|
45
|
+
api_version: str | None = None,
|
|
46
|
+
azure_ad_token: str | None = None,
|
|
47
|
+
organization: str | None = None,
|
|
48
48
|
**kwargs,
|
|
49
49
|
):
|
|
50
50
|
"""
|
|
@@ -114,7 +114,7 @@ class MemoryManager:
|
|
|
114
114
|
|
|
115
115
|
logger.debug("MemoryManager configured with Memori instance")
|
|
116
116
|
|
|
117
|
-
def enable(self, interceptors:
|
|
117
|
+
def enable(self, interceptors: list[str] | None = None) -> dict[str, Any]:
|
|
118
118
|
"""
|
|
119
119
|
Enable memory recording using LiteLLM native callbacks.
|
|
120
120
|
|
|
@@ -162,7 +162,7 @@ class MemoryManager:
|
|
|
162
162
|
logger.error(f"Failed to enable MemoryManager: {e}")
|
|
163
163
|
return {"success": False, "message": str(e)}
|
|
164
164
|
|
|
165
|
-
def disable(self) ->
|
|
165
|
+
def disable(self) -> dict[str, Any]:
|
|
166
166
|
"""
|
|
167
167
|
Disable memory recording using LiteLLM native callbacks.
|
|
168
168
|
|
|
@@ -194,7 +194,7 @@ class MemoryManager:
|
|
|
194
194
|
logger.error(f"Failed to disable MemoryManager: {e}")
|
|
195
195
|
return {"success": False, "message": str(e)}
|
|
196
196
|
|
|
197
|
-
def get_status(self) ->
|
|
197
|
+
def get_status(self) -> dict[str, dict[str, Any]]:
|
|
198
198
|
"""
|
|
199
199
|
Get status of memory recording system.
|
|
200
200
|
|
|
@@ -220,7 +220,7 @@ class MemoryManager:
|
|
|
220
220
|
}
|
|
221
221
|
}
|
|
222
222
|
|
|
223
|
-
def get_health(self) ->
|
|
223
|
+
def get_health(self) -> dict[str, Any]:
|
|
224
224
|
"""
|
|
225
225
|
Get health check of the memory management system.
|
|
226
226
|
|
|
@@ -263,8 +263,8 @@ class MemoryManager:
|
|
|
263
263
|
self,
|
|
264
264
|
user_input: str,
|
|
265
265
|
ai_output: str,
|
|
266
|
-
model:
|
|
267
|
-
metadata:
|
|
266
|
+
model: str | None = None,
|
|
267
|
+
metadata: dict[str, Any] | None = None,
|
|
268
268
|
) -> str:
|
|
269
269
|
"""
|
|
270
270
|
Record a conversation (placeholder for future implementation).
|
|
@@ -279,10 +279,10 @@ class MemoryManager:
|
|
|
279
279
|
self,
|
|
280
280
|
query: str,
|
|
281
281
|
limit: int = 5,
|
|
282
|
-
memory_types:
|
|
283
|
-
categories:
|
|
284
|
-
min_importance:
|
|
285
|
-
) ->
|
|
282
|
+
memory_types: list[str] | None = None,
|
|
283
|
+
categories: list[str] | None = None,
|
|
284
|
+
min_importance: float | None = None,
|
|
285
|
+
) -> list[dict[str, Any]]:
|
|
286
286
|
"""
|
|
287
287
|
Search memories (placeholder for future implementation).
|
|
288
288
|
|
memori/config/settings.py
CHANGED
|
@@ -4,7 +4,7 @@ Pydantic-based configuration settings for Memoriai
|
|
|
4
4
|
|
|
5
5
|
from enum import Enum
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any
|
|
8
8
|
|
|
9
9
|
from pydantic import BaseModel, Field, validator
|
|
10
10
|
|
|
@@ -25,6 +25,7 @@ class DatabaseType(str, Enum):
|
|
|
25
25
|
SQLITE = "sqlite"
|
|
26
26
|
POSTGRESQL = "postgresql"
|
|
27
27
|
MYSQL = "mysql"
|
|
28
|
+
MONGODB = "mongodb"
|
|
28
29
|
|
|
29
30
|
|
|
30
31
|
class RetentionPolicy(str, Enum):
|
|
@@ -63,7 +64,13 @@ class DatabaseSettings(BaseModel):
|
|
|
63
64
|
raise ValueError("Connection string cannot be empty")
|
|
64
65
|
|
|
65
66
|
# Basic validation for supported protocols
|
|
66
|
-
valid_prefixes = [
|
|
67
|
+
valid_prefixes = [
|
|
68
|
+
"sqlite://",
|
|
69
|
+
"sqlite:///",
|
|
70
|
+
"postgresql://",
|
|
71
|
+
"mysql://",
|
|
72
|
+
"mongodb://",
|
|
73
|
+
]
|
|
67
74
|
if not any(v.startswith(prefix) for prefix in valid_prefixes):
|
|
68
75
|
raise ValueError(f"Unsupported database type in connection string: {v}")
|
|
69
76
|
|
|
@@ -73,7 +80,7 @@ class DatabaseSettings(BaseModel):
|
|
|
73
80
|
class AgentSettings(BaseModel):
|
|
74
81
|
"""AI agent configuration settings"""
|
|
75
82
|
|
|
76
|
-
openai_api_key:
|
|
83
|
+
openai_api_key: str | None = Field(
|
|
77
84
|
default=None, description="OpenAI API key for memory processing"
|
|
78
85
|
)
|
|
79
86
|
default_model: str = Field(
|
|
@@ -208,7 +215,7 @@ class MemoriSettings(BaseModel):
|
|
|
208
215
|
integrations: IntegrationSettings = Field(default_factory=IntegrationSettings)
|
|
209
216
|
|
|
210
217
|
# Custom settings
|
|
211
|
-
custom_settings:
|
|
218
|
+
custom_settings: dict[str, Any] = Field(
|
|
212
219
|
default_factory=dict, description="Custom user-defined settings"
|
|
213
220
|
)
|
|
214
221
|
|
|
@@ -225,7 +232,7 @@ class MemoriSettings(BaseModel):
|
|
|
225
232
|
return cls()
|
|
226
233
|
|
|
227
234
|
@classmethod
|
|
228
|
-
def from_file(cls, config_path:
|
|
235
|
+
def from_file(cls, config_path: str | Path) -> "MemoriSettings":
|
|
229
236
|
"""Load settings from JSON/YAML file"""
|
|
230
237
|
import json
|
|
231
238
|
from pathlib import Path
|
|
@@ -251,7 +258,7 @@ class MemoriSettings(BaseModel):
|
|
|
251
258
|
|
|
252
259
|
return cls(**data)
|
|
253
260
|
|
|
254
|
-
def to_file(self, config_path:
|
|
261
|
+
def to_file(self, config_path: str | Path, format: str = "json") -> None:
|
|
255
262
|
"""Save settings to file"""
|
|
256
263
|
import json
|
|
257
264
|
from pathlib import Path
|