MemoryOS 2.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memoryos-2.0.3.dist-info/METADATA +418 -0
- memoryos-2.0.3.dist-info/RECORD +315 -0
- memoryos-2.0.3.dist-info/WHEEL +4 -0
- memoryos-2.0.3.dist-info/entry_points.txt +3 -0
- memoryos-2.0.3.dist-info/licenses/LICENSE +201 -0
- memos/__init__.py +20 -0
- memos/api/client.py +571 -0
- memos/api/config.py +1018 -0
- memos/api/context/dependencies.py +50 -0
- memos/api/exceptions.py +53 -0
- memos/api/handlers/__init__.py +62 -0
- memos/api/handlers/add_handler.py +158 -0
- memos/api/handlers/base_handler.py +194 -0
- memos/api/handlers/chat_handler.py +1401 -0
- memos/api/handlers/component_init.py +388 -0
- memos/api/handlers/config_builders.py +190 -0
- memos/api/handlers/feedback_handler.py +93 -0
- memos/api/handlers/formatters_handler.py +237 -0
- memos/api/handlers/memory_handler.py +316 -0
- memos/api/handlers/scheduler_handler.py +497 -0
- memos/api/handlers/search_handler.py +222 -0
- memos/api/handlers/suggestion_handler.py +117 -0
- memos/api/mcp_serve.py +614 -0
- memos/api/middleware/request_context.py +101 -0
- memos/api/product_api.py +38 -0
- memos/api/product_models.py +1206 -0
- memos/api/routers/__init__.py +1 -0
- memos/api/routers/product_router.py +477 -0
- memos/api/routers/server_router.py +394 -0
- memos/api/server_api.py +44 -0
- memos/api/start_api.py +433 -0
- memos/chunkers/__init__.py +4 -0
- memos/chunkers/base.py +24 -0
- memos/chunkers/charactertext_chunker.py +41 -0
- memos/chunkers/factory.py +24 -0
- memos/chunkers/markdown_chunker.py +62 -0
- memos/chunkers/sentence_chunker.py +54 -0
- memos/chunkers/simple_chunker.py +50 -0
- memos/cli.py +113 -0
- memos/configs/__init__.py +0 -0
- memos/configs/base.py +82 -0
- memos/configs/chunker.py +59 -0
- memos/configs/embedder.py +88 -0
- memos/configs/graph_db.py +236 -0
- memos/configs/internet_retriever.py +100 -0
- memos/configs/llm.py +151 -0
- memos/configs/mem_agent.py +54 -0
- memos/configs/mem_chat.py +81 -0
- memos/configs/mem_cube.py +105 -0
- memos/configs/mem_os.py +83 -0
- memos/configs/mem_reader.py +91 -0
- memos/configs/mem_scheduler.py +385 -0
- memos/configs/mem_user.py +70 -0
- memos/configs/memory.py +324 -0
- memos/configs/parser.py +38 -0
- memos/configs/reranker.py +18 -0
- memos/configs/utils.py +8 -0
- memos/configs/vec_db.py +80 -0
- memos/context/context.py +355 -0
- memos/dependency.py +52 -0
- memos/deprecation.py +262 -0
- memos/embedders/__init__.py +0 -0
- memos/embedders/ark.py +95 -0
- memos/embedders/base.py +106 -0
- memos/embedders/factory.py +29 -0
- memos/embedders/ollama.py +77 -0
- memos/embedders/sentence_transformer.py +49 -0
- memos/embedders/universal_api.py +51 -0
- memos/exceptions.py +30 -0
- memos/graph_dbs/__init__.py +0 -0
- memos/graph_dbs/base.py +274 -0
- memos/graph_dbs/factory.py +27 -0
- memos/graph_dbs/item.py +46 -0
- memos/graph_dbs/nebular.py +1794 -0
- memos/graph_dbs/neo4j.py +1942 -0
- memos/graph_dbs/neo4j_community.py +1058 -0
- memos/graph_dbs/polardb.py +5446 -0
- memos/hello_world.py +97 -0
- memos/llms/__init__.py +0 -0
- memos/llms/base.py +25 -0
- memos/llms/deepseek.py +13 -0
- memos/llms/factory.py +38 -0
- memos/llms/hf.py +443 -0
- memos/llms/hf_singleton.py +114 -0
- memos/llms/ollama.py +135 -0
- memos/llms/openai.py +222 -0
- memos/llms/openai_new.py +198 -0
- memos/llms/qwen.py +13 -0
- memos/llms/utils.py +14 -0
- memos/llms/vllm.py +218 -0
- memos/log.py +237 -0
- memos/mem_agent/base.py +19 -0
- memos/mem_agent/deepsearch_agent.py +391 -0
- memos/mem_agent/factory.py +36 -0
- memos/mem_chat/__init__.py +0 -0
- memos/mem_chat/base.py +30 -0
- memos/mem_chat/factory.py +21 -0
- memos/mem_chat/simple.py +200 -0
- memos/mem_cube/__init__.py +0 -0
- memos/mem_cube/base.py +30 -0
- memos/mem_cube/general.py +240 -0
- memos/mem_cube/navie.py +172 -0
- memos/mem_cube/utils.py +169 -0
- memos/mem_feedback/base.py +15 -0
- memos/mem_feedback/feedback.py +1192 -0
- memos/mem_feedback/simple_feedback.py +40 -0
- memos/mem_feedback/utils.py +230 -0
- memos/mem_os/client.py +5 -0
- memos/mem_os/core.py +1203 -0
- memos/mem_os/main.py +582 -0
- memos/mem_os/product.py +1608 -0
- memos/mem_os/product_server.py +455 -0
- memos/mem_os/utils/default_config.py +359 -0
- memos/mem_os/utils/format_utils.py +1403 -0
- memos/mem_os/utils/reference_utils.py +162 -0
- memos/mem_reader/__init__.py +0 -0
- memos/mem_reader/base.py +47 -0
- memos/mem_reader/factory.py +53 -0
- memos/mem_reader/memory.py +298 -0
- memos/mem_reader/multi_modal_struct.py +965 -0
- memos/mem_reader/read_multi_modal/__init__.py +43 -0
- memos/mem_reader/read_multi_modal/assistant_parser.py +311 -0
- memos/mem_reader/read_multi_modal/base.py +273 -0
- memos/mem_reader/read_multi_modal/file_content_parser.py +826 -0
- memos/mem_reader/read_multi_modal/image_parser.py +359 -0
- memos/mem_reader/read_multi_modal/multi_modal_parser.py +252 -0
- memos/mem_reader/read_multi_modal/string_parser.py +139 -0
- memos/mem_reader/read_multi_modal/system_parser.py +327 -0
- memos/mem_reader/read_multi_modal/text_content_parser.py +131 -0
- memos/mem_reader/read_multi_modal/tool_parser.py +210 -0
- memos/mem_reader/read_multi_modal/user_parser.py +218 -0
- memos/mem_reader/read_multi_modal/utils.py +358 -0
- memos/mem_reader/simple_struct.py +912 -0
- memos/mem_reader/strategy_struct.py +163 -0
- memos/mem_reader/utils.py +157 -0
- memos/mem_scheduler/__init__.py +0 -0
- memos/mem_scheduler/analyzer/__init__.py +0 -0
- memos/mem_scheduler/analyzer/api_analyzer.py +714 -0
- memos/mem_scheduler/analyzer/eval_analyzer.py +219 -0
- memos/mem_scheduler/analyzer/mos_for_test_scheduler.py +571 -0
- memos/mem_scheduler/analyzer/scheduler_for_eval.py +280 -0
- memos/mem_scheduler/base_scheduler.py +1319 -0
- memos/mem_scheduler/general_modules/__init__.py +0 -0
- memos/mem_scheduler/general_modules/api_misc.py +137 -0
- memos/mem_scheduler/general_modules/base.py +80 -0
- memos/mem_scheduler/general_modules/init_components_for_scheduler.py +425 -0
- memos/mem_scheduler/general_modules/misc.py +313 -0
- memos/mem_scheduler/general_modules/scheduler_logger.py +389 -0
- memos/mem_scheduler/general_modules/task_threads.py +315 -0
- memos/mem_scheduler/general_scheduler.py +1495 -0
- memos/mem_scheduler/memory_manage_modules/__init__.py +5 -0
- memos/mem_scheduler/memory_manage_modules/memory_filter.py +306 -0
- memos/mem_scheduler/memory_manage_modules/retriever.py +547 -0
- memos/mem_scheduler/monitors/__init__.py +0 -0
- memos/mem_scheduler/monitors/dispatcher_monitor.py +366 -0
- memos/mem_scheduler/monitors/general_monitor.py +394 -0
- memos/mem_scheduler/monitors/task_schedule_monitor.py +254 -0
- memos/mem_scheduler/optimized_scheduler.py +410 -0
- memos/mem_scheduler/orm_modules/__init__.py +0 -0
- memos/mem_scheduler/orm_modules/api_redis_model.py +518 -0
- memos/mem_scheduler/orm_modules/base_model.py +729 -0
- memos/mem_scheduler/orm_modules/monitor_models.py +261 -0
- memos/mem_scheduler/orm_modules/redis_model.py +699 -0
- memos/mem_scheduler/scheduler_factory.py +23 -0
- memos/mem_scheduler/schemas/__init__.py +0 -0
- memos/mem_scheduler/schemas/analyzer_schemas.py +52 -0
- memos/mem_scheduler/schemas/api_schemas.py +233 -0
- memos/mem_scheduler/schemas/general_schemas.py +55 -0
- memos/mem_scheduler/schemas/message_schemas.py +173 -0
- memos/mem_scheduler/schemas/monitor_schemas.py +406 -0
- memos/mem_scheduler/schemas/task_schemas.py +132 -0
- memos/mem_scheduler/task_schedule_modules/__init__.py +0 -0
- memos/mem_scheduler/task_schedule_modules/dispatcher.py +740 -0
- memos/mem_scheduler/task_schedule_modules/local_queue.py +247 -0
- memos/mem_scheduler/task_schedule_modules/orchestrator.py +74 -0
- memos/mem_scheduler/task_schedule_modules/redis_queue.py +1385 -0
- memos/mem_scheduler/task_schedule_modules/task_queue.py +162 -0
- memos/mem_scheduler/utils/__init__.py +0 -0
- memos/mem_scheduler/utils/api_utils.py +77 -0
- memos/mem_scheduler/utils/config_utils.py +100 -0
- memos/mem_scheduler/utils/db_utils.py +50 -0
- memos/mem_scheduler/utils/filter_utils.py +176 -0
- memos/mem_scheduler/utils/metrics.py +125 -0
- memos/mem_scheduler/utils/misc_utils.py +290 -0
- memos/mem_scheduler/utils/monitor_event_utils.py +67 -0
- memos/mem_scheduler/utils/status_tracker.py +229 -0
- memos/mem_scheduler/webservice_modules/__init__.py +0 -0
- memos/mem_scheduler/webservice_modules/rabbitmq_service.py +485 -0
- memos/mem_scheduler/webservice_modules/redis_service.py +380 -0
- memos/mem_user/factory.py +94 -0
- memos/mem_user/mysql_persistent_user_manager.py +271 -0
- memos/mem_user/mysql_user_manager.py +502 -0
- memos/mem_user/persistent_factory.py +98 -0
- memos/mem_user/persistent_user_manager.py +260 -0
- memos/mem_user/redis_persistent_user_manager.py +225 -0
- memos/mem_user/user_manager.py +488 -0
- memos/memories/__init__.py +0 -0
- memos/memories/activation/__init__.py +0 -0
- memos/memories/activation/base.py +42 -0
- memos/memories/activation/item.py +56 -0
- memos/memories/activation/kv.py +292 -0
- memos/memories/activation/vllmkv.py +219 -0
- memos/memories/base.py +19 -0
- memos/memories/factory.py +42 -0
- memos/memories/parametric/__init__.py +0 -0
- memos/memories/parametric/base.py +19 -0
- memos/memories/parametric/item.py +11 -0
- memos/memories/parametric/lora.py +41 -0
- memos/memories/textual/__init__.py +0 -0
- memos/memories/textual/base.py +92 -0
- memos/memories/textual/general.py +236 -0
- memos/memories/textual/item.py +304 -0
- memos/memories/textual/naive.py +187 -0
- memos/memories/textual/prefer_text_memory/__init__.py +0 -0
- memos/memories/textual/prefer_text_memory/adder.py +504 -0
- memos/memories/textual/prefer_text_memory/config.py +106 -0
- memos/memories/textual/prefer_text_memory/extractor.py +221 -0
- memos/memories/textual/prefer_text_memory/factory.py +85 -0
- memos/memories/textual/prefer_text_memory/retrievers.py +177 -0
- memos/memories/textual/prefer_text_memory/spliter.py +132 -0
- memos/memories/textual/prefer_text_memory/utils.py +93 -0
- memos/memories/textual/preference.py +344 -0
- memos/memories/textual/simple_preference.py +161 -0
- memos/memories/textual/simple_tree.py +69 -0
- memos/memories/textual/tree.py +459 -0
- memos/memories/textual/tree_text_memory/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/organize/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/organize/handler.py +184 -0
- memos/memories/textual/tree_text_memory/organize/manager.py +518 -0
- memos/memories/textual/tree_text_memory/organize/relation_reason_detector.py +238 -0
- memos/memories/textual/tree_text_memory/organize/reorganizer.py +622 -0
- memos/memories/textual/tree_text_memory/retrieve/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/retrieve/advanced_searcher.py +364 -0
- memos/memories/textual/tree_text_memory/retrieve/bm25_util.py +186 -0
- memos/memories/textual/tree_text_memory/retrieve/bochasearch.py +419 -0
- memos/memories/textual/tree_text_memory/retrieve/internet_retriever.py +270 -0
- memos/memories/textual/tree_text_memory/retrieve/internet_retriever_factory.py +102 -0
- memos/memories/textual/tree_text_memory/retrieve/reasoner.py +61 -0
- memos/memories/textual/tree_text_memory/retrieve/recall.py +497 -0
- memos/memories/textual/tree_text_memory/retrieve/reranker.py +111 -0
- memos/memories/textual/tree_text_memory/retrieve/retrieval_mid_structs.py +16 -0
- memos/memories/textual/tree_text_memory/retrieve/retrieve_utils.py +472 -0
- memos/memories/textual/tree_text_memory/retrieve/searcher.py +848 -0
- memos/memories/textual/tree_text_memory/retrieve/task_goal_parser.py +135 -0
- memos/memories/textual/tree_text_memory/retrieve/utils.py +54 -0
- memos/memories/textual/tree_text_memory/retrieve/xinyusearch.py +387 -0
- memos/memos_tools/dinding_report_bot.py +453 -0
- memos/memos_tools/lockfree_dict.py +120 -0
- memos/memos_tools/notification_service.py +44 -0
- memos/memos_tools/notification_utils.py +142 -0
- memos/memos_tools/singleton.py +174 -0
- memos/memos_tools/thread_safe_dict.py +310 -0
- memos/memos_tools/thread_safe_dict_segment.py +382 -0
- memos/multi_mem_cube/__init__.py +0 -0
- memos/multi_mem_cube/composite_cube.py +86 -0
- memos/multi_mem_cube/single_cube.py +874 -0
- memos/multi_mem_cube/views.py +54 -0
- memos/parsers/__init__.py +0 -0
- memos/parsers/base.py +15 -0
- memos/parsers/factory.py +21 -0
- memos/parsers/markitdown.py +28 -0
- memos/reranker/__init__.py +4 -0
- memos/reranker/base.py +25 -0
- memos/reranker/concat.py +103 -0
- memos/reranker/cosine_local.py +102 -0
- memos/reranker/factory.py +72 -0
- memos/reranker/http_bge.py +324 -0
- memos/reranker/http_bge_strategy.py +327 -0
- memos/reranker/noop.py +19 -0
- memos/reranker/strategies/__init__.py +4 -0
- memos/reranker/strategies/base.py +61 -0
- memos/reranker/strategies/concat_background.py +94 -0
- memos/reranker/strategies/concat_docsource.py +110 -0
- memos/reranker/strategies/dialogue_common.py +109 -0
- memos/reranker/strategies/factory.py +31 -0
- memos/reranker/strategies/single_turn.py +107 -0
- memos/reranker/strategies/singleturn_outmem.py +98 -0
- memos/settings.py +10 -0
- memos/templates/__init__.py +0 -0
- memos/templates/advanced_search_prompts.py +211 -0
- memos/templates/cloud_service_prompt.py +107 -0
- memos/templates/instruction_completion.py +66 -0
- memos/templates/mem_agent_prompts.py +85 -0
- memos/templates/mem_feedback_prompts.py +822 -0
- memos/templates/mem_reader_prompts.py +1096 -0
- memos/templates/mem_reader_strategy_prompts.py +238 -0
- memos/templates/mem_scheduler_prompts.py +626 -0
- memos/templates/mem_search_prompts.py +93 -0
- memos/templates/mos_prompts.py +403 -0
- memos/templates/prefer_complete_prompt.py +735 -0
- memos/templates/tool_mem_prompts.py +139 -0
- memos/templates/tree_reorganize_prompts.py +230 -0
- memos/types/__init__.py +34 -0
- memos/types/general_types.py +151 -0
- memos/types/openai_chat_completion_types/__init__.py +15 -0
- memos/types/openai_chat_completion_types/chat_completion_assistant_message_param.py +56 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_image_param.py +27 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_input_audio_param.py +23 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_param.py +43 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_refusal_param.py +16 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_text_param.py +16 -0
- memos/types/openai_chat_completion_types/chat_completion_message_custom_tool_call_param.py +27 -0
- memos/types/openai_chat_completion_types/chat_completion_message_function_tool_call_param.py +32 -0
- memos/types/openai_chat_completion_types/chat_completion_message_param.py +18 -0
- memos/types/openai_chat_completion_types/chat_completion_message_tool_call_union_param.py +15 -0
- memos/types/openai_chat_completion_types/chat_completion_system_message_param.py +36 -0
- memos/types/openai_chat_completion_types/chat_completion_tool_message_param.py +30 -0
- memos/types/openai_chat_completion_types/chat_completion_user_message_param.py +34 -0
- memos/utils.py +123 -0
- memos/vec_dbs/__init__.py +0 -0
- memos/vec_dbs/base.py +117 -0
- memos/vec_dbs/factory.py +23 -0
- memos/vec_dbs/item.py +50 -0
- memos/vec_dbs/milvus.py +654 -0
- memos/vec_dbs/qdrant.py +355 -0
|
@@ -0,0 +1,699 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
|
|
4
|
+
from typing import Any, TypeVar
|
|
5
|
+
|
|
6
|
+
from sqlalchemy.engine import Engine
|
|
7
|
+
from sqlalchemy.orm import declarative_base
|
|
8
|
+
|
|
9
|
+
from memos.log import get_logger
|
|
10
|
+
from memos.mem_scheduler.orm_modules.base_model import BaseDBManager
|
|
11
|
+
from memos.mem_scheduler.schemas.monitor_schemas import MemoryMonitorManager
|
|
12
|
+
from memos.mem_scheduler.utils.db_utils import get_utc_now
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
T = TypeVar("T") # The model type (MemoryMonitorManager, QueryMonitorManager, etc.)
|
|
16
|
+
ORM = TypeVar("ORM") # The ORM model type
|
|
17
|
+
|
|
18
|
+
logger = get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
Base = declarative_base()
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class SimpleListManager:
|
|
24
|
+
"""Simple wrapper class for list[str] to work with RedisDBManager"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, items: list[str] | None = None):
|
|
27
|
+
self.items = items or []
|
|
28
|
+
|
|
29
|
+
def to_json(self) -> str:
|
|
30
|
+
"""Serialize to JSON string"""
|
|
31
|
+
return json.dumps({"items": self.items})
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def from_json(cls, json_str: str) -> "SimpleListManager":
|
|
35
|
+
"""Deserialize from JSON string"""
|
|
36
|
+
data = json.loads(json_str)
|
|
37
|
+
return cls(items=data.get("items", []))
|
|
38
|
+
|
|
39
|
+
def add_item(self, item: str):
|
|
40
|
+
"""Add an item to the list"""
|
|
41
|
+
self.items.append(item)
|
|
42
|
+
|
|
43
|
+
def __len__(self):
|
|
44
|
+
return len(self.items)
|
|
45
|
+
|
|
46
|
+
def __str__(self):
|
|
47
|
+
return f"SimpleListManager(items={self.items})"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class RedisLockableORM:
|
|
51
|
+
"""Redis-based implementation of LockableORM interface
|
|
52
|
+
|
|
53
|
+
This class provides Redis-based storage for lockable ORM objects,
|
|
54
|
+
mimicking the SQLAlchemy LockableORM interface but using Redis as the backend.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
def __init__(self, redis_client, user_id: str, mem_cube_id: str):
|
|
58
|
+
self.redis_client = redis_client
|
|
59
|
+
self.user_id = user_id
|
|
60
|
+
self.mem_cube_id = mem_cube_id
|
|
61
|
+
self.serialized_data = None
|
|
62
|
+
self.lock_acquired = False
|
|
63
|
+
self.lock_expiry = None
|
|
64
|
+
self.version_control = "0"
|
|
65
|
+
|
|
66
|
+
def _get_key_prefix(self) -> str:
|
|
67
|
+
"""Generate Redis key prefix for this ORM instance"""
|
|
68
|
+
return f"lockable_orm:{self.user_id}:{self.mem_cube_id}"
|
|
69
|
+
|
|
70
|
+
def _get_data_key(self) -> str:
|
|
71
|
+
"""Get Redis key for serialized data"""
|
|
72
|
+
return f"{self._get_key_prefix()}:data"
|
|
73
|
+
|
|
74
|
+
def _get_lock_key(self) -> str:
|
|
75
|
+
"""Get Redis key for lock information"""
|
|
76
|
+
return f"{self._get_key_prefix()}:lock"
|
|
77
|
+
|
|
78
|
+
def _get_version_key(self) -> str:
|
|
79
|
+
"""Get Redis key for version control"""
|
|
80
|
+
return f"{self._get_key_prefix()}:version"
|
|
81
|
+
|
|
82
|
+
def save(self):
|
|
83
|
+
"""Save this ORM instance to Redis"""
|
|
84
|
+
try:
|
|
85
|
+
# Save serialized data
|
|
86
|
+
if self.serialized_data:
|
|
87
|
+
self.redis_client.set(self._get_data_key(), self.serialized_data)
|
|
88
|
+
|
|
89
|
+
# Note: Lock information is now managed by acquire_lock/release_locks methods
|
|
90
|
+
# We don't save lock info here to avoid conflicts with atomic lock operations
|
|
91
|
+
|
|
92
|
+
# Save version control
|
|
93
|
+
self.redis_client.set(self._get_version_key(), self.version_control)
|
|
94
|
+
|
|
95
|
+
logger.debug(f"Saved RedisLockableORM to Redis: {self._get_key_prefix()}")
|
|
96
|
+
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.error(f"Failed to save RedisLockableORM to Redis: {e}")
|
|
99
|
+
raise
|
|
100
|
+
|
|
101
|
+
def load(self):
|
|
102
|
+
"""Load this ORM instance from Redis"""
|
|
103
|
+
try:
|
|
104
|
+
# Load serialized data
|
|
105
|
+
data = self.redis_client.get(self._get_data_key())
|
|
106
|
+
if data:
|
|
107
|
+
self.serialized_data = data.decode() if isinstance(data, bytes) else data
|
|
108
|
+
else:
|
|
109
|
+
self.serialized_data = None
|
|
110
|
+
|
|
111
|
+
# Note: Lock information is now managed by acquire_lock/release_locks methods
|
|
112
|
+
# We don't load lock info here to avoid conflicts with atomic lock operations
|
|
113
|
+
self.lock_acquired = False
|
|
114
|
+
self.lock_expiry = None
|
|
115
|
+
|
|
116
|
+
# Load version control
|
|
117
|
+
version = self.redis_client.get(self._get_version_key())
|
|
118
|
+
if version:
|
|
119
|
+
self.version_control = version.decode() if isinstance(version, bytes) else version
|
|
120
|
+
else:
|
|
121
|
+
self.version_control = "0"
|
|
122
|
+
|
|
123
|
+
logger.debug(f"Loaded RedisLockableORM from Redis: {self._get_key_prefix()}")
|
|
124
|
+
# Return True if we found any data, False otherwise
|
|
125
|
+
return self.serialized_data is not None
|
|
126
|
+
|
|
127
|
+
except Exception as e:
|
|
128
|
+
logger.error(f"Failed to load RedisLockableORM from Redis: {e}")
|
|
129
|
+
return False
|
|
130
|
+
|
|
131
|
+
def delete(self):
|
|
132
|
+
"""Delete this ORM instance from Redis"""
|
|
133
|
+
try:
|
|
134
|
+
keys_to_delete = [self._get_data_key(), self._get_lock_key(), self._get_version_key()]
|
|
135
|
+
self.redis_client.delete(*keys_to_delete)
|
|
136
|
+
logger.debug(f"Deleted RedisLockableORM from Redis: {self._get_key_prefix()}")
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.error(f"Failed to delete RedisLockableORM from Redis: {e}")
|
|
139
|
+
raise
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class RedisDBManager(BaseDBManager):
|
|
143
|
+
"""Redis-based database manager for any serializable object
|
|
144
|
+
|
|
145
|
+
This class handles persistence, synchronization, and locking
|
|
146
|
+
for any object that implements to_json/from_json methods using Redis as the backend storage.
|
|
147
|
+
"""
|
|
148
|
+
|
|
149
|
+
def __init__(
|
|
150
|
+
self,
|
|
151
|
+
engine: Engine | None = None,
|
|
152
|
+
user_id: str | None = None,
|
|
153
|
+
mem_cube_id: str | None = None,
|
|
154
|
+
obj: Any | None = None,
|
|
155
|
+
lock_timeout: int = 10,
|
|
156
|
+
redis_client=None,
|
|
157
|
+
redis_config: dict | None = None,
|
|
158
|
+
):
|
|
159
|
+
"""Initialize the Redis database manager
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
engine: SQLAlchemy engine (not used for Redis, kept for compatibility)
|
|
163
|
+
user_id: Unique identifier for the user
|
|
164
|
+
mem_cube_id: Unique identifier for the memory cube
|
|
165
|
+
obj: Optional object instance to manage (must have to_json/from_json methods)
|
|
166
|
+
lock_timeout: Timeout in seconds for lock acquisition
|
|
167
|
+
redis_client: Redis client instance (optional)
|
|
168
|
+
redis_config: Redis configuration dictionary (optional)
|
|
169
|
+
"""
|
|
170
|
+
# Initialize Redis client
|
|
171
|
+
self.redis_client = redis_client
|
|
172
|
+
self.redis_config = redis_config or {}
|
|
173
|
+
|
|
174
|
+
if self.redis_client is None:
|
|
175
|
+
self._init_redis_client()
|
|
176
|
+
|
|
177
|
+
# Initialize base attributes without calling parent's init_manager
|
|
178
|
+
self.user_id = user_id
|
|
179
|
+
self.mem_cube_id = mem_cube_id
|
|
180
|
+
self.obj = obj
|
|
181
|
+
self.obj_type = type(obj) if obj is not None else None # Store the actual object type
|
|
182
|
+
self.lock_timeout = lock_timeout
|
|
183
|
+
self.engine = engine # Keep for compatibility but not used
|
|
184
|
+
self.SessionLocal = None # Not used for Redis
|
|
185
|
+
self.last_version_control = None
|
|
186
|
+
|
|
187
|
+
logger.info(
|
|
188
|
+
f"RedisDBManager initialized for user_id: {user_id}, mem_cube_id: {mem_cube_id}"
|
|
189
|
+
)
|
|
190
|
+
logger.info(f"Redis client: {type(self.redis_client).__name__}")
|
|
191
|
+
|
|
192
|
+
# Test Redis connection
|
|
193
|
+
try:
|
|
194
|
+
self.redis_client.ping()
|
|
195
|
+
logger.info("Redis connection successful")
|
|
196
|
+
except Exception as e:
|
|
197
|
+
logger.warning(f"Redis ping failed: {e}")
|
|
198
|
+
# Don't raise error here as it might be a mock client in tests
|
|
199
|
+
|
|
200
|
+
def _init_redis_client(self):
|
|
201
|
+
"""Initialize Redis client from config or environment"""
|
|
202
|
+
try:
|
|
203
|
+
import redis
|
|
204
|
+
|
|
205
|
+
# Try to get Redis client from environment first
|
|
206
|
+
if not self.redis_client:
|
|
207
|
+
self.redis_client = self.load_redis_engine_from_env()
|
|
208
|
+
|
|
209
|
+
# If still no client, try from config
|
|
210
|
+
if not self.redis_client and self.redis_config:
|
|
211
|
+
redis_kwargs = {
|
|
212
|
+
"host": self.redis_config.get("host", "localhost"),
|
|
213
|
+
"port": self.redis_config.get("port", 6379),
|
|
214
|
+
"db": self.redis_config.get("db", 0),
|
|
215
|
+
"decode_responses": True,
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
if self.redis_config.get("password"):
|
|
219
|
+
redis_kwargs["password"] = self.redis_config["password"]
|
|
220
|
+
|
|
221
|
+
self.redis_client = redis.Redis(**redis_kwargs)
|
|
222
|
+
|
|
223
|
+
# Final fallback to localhost
|
|
224
|
+
if not self.redis_client:
|
|
225
|
+
logger.warning("No Redis configuration found, using localhost defaults")
|
|
226
|
+
self.redis_client = redis.Redis(
|
|
227
|
+
host="localhost", port=6379, db=0, decode_responses=True
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
# Test connection
|
|
231
|
+
if not self.redis_client.ping():
|
|
232
|
+
raise ConnectionError("Redis ping failed")
|
|
233
|
+
|
|
234
|
+
logger.info("Redis client initialized successfully")
|
|
235
|
+
|
|
236
|
+
except ImportError:
|
|
237
|
+
logger.error("Redis package not installed. Install with: pip install redis")
|
|
238
|
+
raise
|
|
239
|
+
except Exception as e:
|
|
240
|
+
logger.error(f"Failed to initialize Redis client: {e}")
|
|
241
|
+
raise
|
|
242
|
+
|
|
243
|
+
@property
|
|
244
|
+
def orm_class(self) -> type[RedisLockableORM]:
|
|
245
|
+
"""Return the Redis-based ORM class"""
|
|
246
|
+
return RedisLockableORM
|
|
247
|
+
|
|
248
|
+
@property
|
|
249
|
+
def obj_class(self) -> type:
|
|
250
|
+
"""Return the actual object class"""
|
|
251
|
+
return self.obj_type if self.obj_type is not None else MemoryMonitorManager
|
|
252
|
+
|
|
253
|
+
def merge_items(
|
|
254
|
+
self,
|
|
255
|
+
orm_instance: RedisLockableORM,
|
|
256
|
+
obj_instance: Any,
|
|
257
|
+
size_limit: int,
|
|
258
|
+
):
|
|
259
|
+
"""Merge items from Redis with current object instance
|
|
260
|
+
|
|
261
|
+
This method provides a generic way to merge data from Redis with the current
|
|
262
|
+
object instance. It handles different object types and their specific merge logic.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
orm_instance: Redis ORM instance from database
|
|
266
|
+
obj_instance: Current object instance (any type with to_json/from_json methods)
|
|
267
|
+
size_limit: Maximum number of items to keep after merge
|
|
268
|
+
"""
|
|
269
|
+
logger.debug(f"Starting merge_items with size_limit={size_limit}")
|
|
270
|
+
|
|
271
|
+
try:
|
|
272
|
+
if not orm_instance.serialized_data:
|
|
273
|
+
logger.warning("No serialized data in Redis ORM instance to merge")
|
|
274
|
+
return obj_instance
|
|
275
|
+
|
|
276
|
+
# Deserialize the database object using the actual object type
|
|
277
|
+
if self.obj_type is not None:
|
|
278
|
+
db_obj = self.obj_type.from_json(orm_instance.serialized_data)
|
|
279
|
+
else:
|
|
280
|
+
db_obj = MemoryMonitorManager.from_json(orm_instance.serialized_data)
|
|
281
|
+
|
|
282
|
+
# Handle different object types with specific merge logic based on type
|
|
283
|
+
obj_type = type(obj_instance)
|
|
284
|
+
if obj_type.__name__ == "MemoryMonitorManager" or hasattr(obj_instance, "memories"):
|
|
285
|
+
# MemoryMonitorManager-like objects
|
|
286
|
+
return self._merge_memory_monitor_items(obj_instance, db_obj, size_limit)
|
|
287
|
+
elif obj_type.__name__ == "SimpleListManager" or hasattr(obj_instance, "items"):
|
|
288
|
+
# SimpleListManager-like objects
|
|
289
|
+
return self._merge_list_items(obj_instance, db_obj, size_limit)
|
|
290
|
+
else:
|
|
291
|
+
# Generic objects - just return the current instance
|
|
292
|
+
logger.info(
|
|
293
|
+
f"No specific merge logic for object type {obj_type.__name__}, returning current instance"
|
|
294
|
+
)
|
|
295
|
+
return obj_instance
|
|
296
|
+
|
|
297
|
+
except Exception as e:
|
|
298
|
+
logger.error(f"Failed to deserialize database instance: {e}", exc_info=True)
|
|
299
|
+
logger.warning("Skipping merge due to deserialization error, using current object only")
|
|
300
|
+
return obj_instance
|
|
301
|
+
|
|
302
|
+
def _merge_memory_monitor_items(self, obj_instance, db_obj, size_limit: int):
|
|
303
|
+
"""Merge MemoryMonitorManager items"""
|
|
304
|
+
# Create a mapping of existing memories by their mapping key
|
|
305
|
+
current_memories_dict = obj_instance.memories_mapping_dict
|
|
306
|
+
|
|
307
|
+
# Add memories from database that don't exist in current object
|
|
308
|
+
for db_memory in db_obj.memories:
|
|
309
|
+
if db_memory.tree_memory_item_mapping_key not in current_memories_dict:
|
|
310
|
+
obj_instance.memories.append(db_memory)
|
|
311
|
+
|
|
312
|
+
# Apply size limit if specified
|
|
313
|
+
if size_limit and len(obj_instance.memories) > size_limit:
|
|
314
|
+
# Sort by recording_count and keep the most recorded ones
|
|
315
|
+
obj_instance.memories.sort(key=lambda x: x.recording_count, reverse=True)
|
|
316
|
+
obj_instance.memories = obj_instance.memories[:size_limit]
|
|
317
|
+
logger.info(
|
|
318
|
+
f"Applied size limit {size_limit}, kept {len(obj_instance.memories)} memories"
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
logger.info(f"Merged {len(obj_instance.memories)} memory items")
|
|
322
|
+
return obj_instance
|
|
323
|
+
|
|
324
|
+
def _merge_list_items(self, obj_instance, db_obj, size_limit: int):
|
|
325
|
+
"""Merge SimpleListManager-like items"""
|
|
326
|
+
merged_items = []
|
|
327
|
+
seen_items = set()
|
|
328
|
+
|
|
329
|
+
# First, add all items from current object (higher priority)
|
|
330
|
+
for item in obj_instance.items:
|
|
331
|
+
if item not in seen_items:
|
|
332
|
+
merged_items.append(item)
|
|
333
|
+
seen_items.add(item)
|
|
334
|
+
|
|
335
|
+
# Then, add items from database that aren't in current object
|
|
336
|
+
for item in db_obj.items:
|
|
337
|
+
if item not in seen_items:
|
|
338
|
+
merged_items.append(item)
|
|
339
|
+
seen_items.add(item)
|
|
340
|
+
|
|
341
|
+
# Apply size limit if specified (keep most recent items)
|
|
342
|
+
if size_limit is not None and size_limit > 0 and len(merged_items) > size_limit:
|
|
343
|
+
merged_items = merged_items[:size_limit]
|
|
344
|
+
logger.debug(f"Applied size limit of {size_limit}, kept {len(merged_items)} items")
|
|
345
|
+
|
|
346
|
+
# Update the object with merged items
|
|
347
|
+
obj_instance.items = merged_items
|
|
348
|
+
|
|
349
|
+
logger.info(f"Merged {len(merged_items)} list items (size_limit: {size_limit})")
|
|
350
|
+
return obj_instance
|
|
351
|
+
|
|
352
|
+
def _get_redis_orm_instance(self) -> RedisLockableORM:
|
|
353
|
+
"""Get or create a Redis ORM instance"""
|
|
354
|
+
orm_instance = RedisLockableORM(
|
|
355
|
+
redis_client=self.redis_client, user_id=self.user_id, mem_cube_id=self.mem_cube_id
|
|
356
|
+
)
|
|
357
|
+
return orm_instance
|
|
358
|
+
|
|
359
|
+
def _get_key_prefix(self) -> str:
|
|
360
|
+
"""Generate Redis key prefix for this ORM instance"""
|
|
361
|
+
return f"lockable_orm:{self.user_id}:{self.mem_cube_id}"
|
|
362
|
+
|
|
363
|
+
def acquire_lock(self, block: bool = True, **kwargs) -> bool:
|
|
364
|
+
"""Acquire a distributed lock using Redis with atomic operations
|
|
365
|
+
|
|
366
|
+
Args:
|
|
367
|
+
block: Whether to block until lock is acquired
|
|
368
|
+
**kwargs: Additional filter criteria (ignored for Redis)
|
|
369
|
+
|
|
370
|
+
Returns:
|
|
371
|
+
True if lock was acquired, False otherwise
|
|
372
|
+
"""
|
|
373
|
+
try:
|
|
374
|
+
lock_key = f"{self._get_key_prefix()}:lock"
|
|
375
|
+
now = get_utc_now()
|
|
376
|
+
|
|
377
|
+
# Use Redis SET with NX (only if not exists) and EX (expiry) for atomic lock acquisition
|
|
378
|
+
lock_value = f"{self.user_id}:{self.mem_cube_id}:{now.timestamp()}"
|
|
379
|
+
|
|
380
|
+
while True:
|
|
381
|
+
# Try to acquire lock atomically
|
|
382
|
+
result = self.redis_client.set(
|
|
383
|
+
lock_key,
|
|
384
|
+
lock_value,
|
|
385
|
+
nx=True, # Only set if key doesn't exist
|
|
386
|
+
ex=self.lock_timeout, # Set expiry in seconds
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
if result:
|
|
390
|
+
# Successfully acquired lock
|
|
391
|
+
logger.info(f"Redis lock acquired for {self.user_id}/{self.mem_cube_id}")
|
|
392
|
+
return True
|
|
393
|
+
|
|
394
|
+
if not block:
|
|
395
|
+
logger.warning(
|
|
396
|
+
f"Redis lock is held for {self.user_id}/{self.mem_cube_id}, cannot acquire"
|
|
397
|
+
)
|
|
398
|
+
return False
|
|
399
|
+
|
|
400
|
+
# Wait a bit before retrying
|
|
401
|
+
logger.info(
|
|
402
|
+
f"Waiting for Redis lock to be released for {self.user_id}/{self.mem_cube_id}"
|
|
403
|
+
)
|
|
404
|
+
time.sleep(0.1)
|
|
405
|
+
|
|
406
|
+
except Exception as e:
|
|
407
|
+
logger.error(f"Failed to acquire Redis lock for {self.user_id}/{self.mem_cube_id}: {e}")
|
|
408
|
+
return False
|
|
409
|
+
|
|
410
|
+
def release_locks(self, user_id: str, mem_cube_id: str, **kwargs):
|
|
411
|
+
"""Release Redis locks for the specified user and memory cube
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
user_id: User identifier
|
|
415
|
+
mem_cube_id: Memory cube identifier
|
|
416
|
+
**kwargs: Additional filter criteria (ignored for Redis)
|
|
417
|
+
"""
|
|
418
|
+
try:
|
|
419
|
+
lock_key = f"lockable_orm:{user_id}:{mem_cube_id}:lock"
|
|
420
|
+
|
|
421
|
+
# Delete the lock key to release the lock
|
|
422
|
+
result = self.redis_client.delete(lock_key)
|
|
423
|
+
|
|
424
|
+
if result:
|
|
425
|
+
logger.info(f"Redis lock released for {user_id}/{mem_cube_id}")
|
|
426
|
+
else:
|
|
427
|
+
logger.warning(f"No Redis lock found to release for {user_id}/{mem_cube_id}")
|
|
428
|
+
|
|
429
|
+
except Exception as e:
|
|
430
|
+
logger.error(f"Failed to release Redis lock for {user_id}/{mem_cube_id}: {e}")
|
|
431
|
+
|
|
432
|
+
def sync_with_orm(self, size_limit: int | None = None) -> None:
|
|
433
|
+
"""Synchronize data between Redis and the business object
|
|
434
|
+
|
|
435
|
+
Args:
|
|
436
|
+
size_limit: Optional maximum number of items to keep after synchronization
|
|
437
|
+
"""
|
|
438
|
+
logger.info(
|
|
439
|
+
f"Starting Redis sync_with_orm for {self.user_id}/{self.mem_cube_id} with size_limit={size_limit}"
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
try:
|
|
443
|
+
# Acquire lock before any operations
|
|
444
|
+
lock_status = self.acquire_lock(block=True)
|
|
445
|
+
if not lock_status:
|
|
446
|
+
logger.error("Failed to acquire Redis lock for synchronization")
|
|
447
|
+
return
|
|
448
|
+
|
|
449
|
+
# Get existing data from Redis
|
|
450
|
+
orm_instance = self._get_redis_orm_instance()
|
|
451
|
+
exists = orm_instance.load()
|
|
452
|
+
|
|
453
|
+
# If no existing record, create a new one
|
|
454
|
+
if not exists:
|
|
455
|
+
if self.obj is None:
|
|
456
|
+
logger.warning("No object to synchronize and no existing Redis record")
|
|
457
|
+
return
|
|
458
|
+
|
|
459
|
+
orm_instance.serialized_data = self.obj.to_json()
|
|
460
|
+
orm_instance.version_control = "0"
|
|
461
|
+
orm_instance.save()
|
|
462
|
+
|
|
463
|
+
logger.info("No existing Redis record found. Created a new one.")
|
|
464
|
+
self.last_version_control = "0"
|
|
465
|
+
return
|
|
466
|
+
|
|
467
|
+
# Check version control and merge data
|
|
468
|
+
if self.obj is not None:
|
|
469
|
+
current_redis_tag = orm_instance.version_control
|
|
470
|
+
new_tag = self._increment_version_control(current_redis_tag)
|
|
471
|
+
|
|
472
|
+
# Check if this is the first sync or if we need to merge
|
|
473
|
+
if self.last_version_control is None:
|
|
474
|
+
logger.info("First Redis sync, merging data from Redis")
|
|
475
|
+
# Always merge on first sync to load data from Redis
|
|
476
|
+
try:
|
|
477
|
+
self.merge_items(
|
|
478
|
+
orm_instance=orm_instance, obj_instance=self.obj, size_limit=size_limit
|
|
479
|
+
)
|
|
480
|
+
except Exception as merge_error:
|
|
481
|
+
logger.error(
|
|
482
|
+
f"Error during Redis merge_items: {merge_error}", exc_info=True
|
|
483
|
+
)
|
|
484
|
+
logger.warning("Continuing with current object data without merge")
|
|
485
|
+
elif current_redis_tag == self.last_version_control:
|
|
486
|
+
logger.info(
|
|
487
|
+
f"Redis version control unchanged ({current_redis_tag}), directly update"
|
|
488
|
+
)
|
|
489
|
+
else:
|
|
490
|
+
logger.info(
|
|
491
|
+
f"Redis version control changed from {self.last_version_control} to {current_redis_tag}, merging data"
|
|
492
|
+
)
|
|
493
|
+
try:
|
|
494
|
+
self.merge_items(
|
|
495
|
+
orm_instance=orm_instance, obj_instance=self.obj, size_limit=size_limit
|
|
496
|
+
)
|
|
497
|
+
except Exception as merge_error:
|
|
498
|
+
logger.error(
|
|
499
|
+
f"Error during Redis merge_items: {merge_error}", exc_info=True
|
|
500
|
+
)
|
|
501
|
+
logger.warning("Continuing with current object data without merge")
|
|
502
|
+
|
|
503
|
+
# Write merged data back to Redis
|
|
504
|
+
orm_instance.serialized_data = self.obj.to_json()
|
|
505
|
+
orm_instance.version_control = new_tag
|
|
506
|
+
orm_instance.save()
|
|
507
|
+
|
|
508
|
+
logger.info(f"Updated Redis serialized_data for {self.user_id}/{self.mem_cube_id}")
|
|
509
|
+
self.last_version_control = orm_instance.version_control
|
|
510
|
+
else:
|
|
511
|
+
logger.warning("No current object to merge with Redis data")
|
|
512
|
+
|
|
513
|
+
logger.info(f"Redis synchronization completed for {self.user_id}/{self.mem_cube_id}")
|
|
514
|
+
|
|
515
|
+
except Exception as e:
|
|
516
|
+
logger.error(
|
|
517
|
+
f"Error during Redis synchronization for {self.user_id}/{self.mem_cube_id}: {e}",
|
|
518
|
+
exc_info=True,
|
|
519
|
+
)
|
|
520
|
+
finally:
|
|
521
|
+
# Always release locks
|
|
522
|
+
self.release_locks(user_id=self.user_id, mem_cube_id=self.mem_cube_id)
|
|
523
|
+
|
|
524
|
+
def save_to_db(self, obj_instance: Any) -> None:
|
|
525
|
+
"""Save the current state of the business object to Redis
|
|
526
|
+
|
|
527
|
+
Args:
|
|
528
|
+
obj_instance: The object instance to save (must have to_json method)
|
|
529
|
+
"""
|
|
530
|
+
try:
|
|
531
|
+
# Acquire lock before operations
|
|
532
|
+
lock_status = self.acquire_lock(block=True)
|
|
533
|
+
if not lock_status:
|
|
534
|
+
logger.error("Failed to acquire Redis lock for saving")
|
|
535
|
+
return
|
|
536
|
+
|
|
537
|
+
# Get or create Redis ORM instance
|
|
538
|
+
orm_instance = self._get_redis_orm_instance()
|
|
539
|
+
exists = orm_instance.load()
|
|
540
|
+
|
|
541
|
+
if not exists:
|
|
542
|
+
# Create new record
|
|
543
|
+
orm_instance.serialized_data = obj_instance.to_json()
|
|
544
|
+
orm_instance.version_control = "0"
|
|
545
|
+
orm_instance.save()
|
|
546
|
+
|
|
547
|
+
logger.info(f"Created new Redis record for {self.user_id}/{self.mem_cube_id}")
|
|
548
|
+
self.last_version_control = "0"
|
|
549
|
+
else:
|
|
550
|
+
# Update existing record with version control
|
|
551
|
+
current_version = orm_instance.version_control
|
|
552
|
+
new_version = self._increment_version_control(current_version)
|
|
553
|
+
|
|
554
|
+
orm_instance.serialized_data = obj_instance.to_json()
|
|
555
|
+
orm_instance.version_control = new_version
|
|
556
|
+
orm_instance.save()
|
|
557
|
+
|
|
558
|
+
logger.info(
|
|
559
|
+
f"Updated existing Redis record for {self.user_id}/{self.mem_cube_id} with version {new_version}"
|
|
560
|
+
)
|
|
561
|
+
self.last_version_control = new_version
|
|
562
|
+
|
|
563
|
+
except Exception as e:
|
|
564
|
+
logger.error(f"Error saving to Redis for {self.user_id}/{self.mem_cube_id}: {e}")
|
|
565
|
+
finally:
|
|
566
|
+
# Always release locks
|
|
567
|
+
self.release_locks(user_id=self.user_id, mem_cube_id=self.mem_cube_id)
|
|
568
|
+
|
|
569
|
+
def load_from_db(self, acquire_lock: bool = False) -> Any | None:
|
|
570
|
+
"""Load the business object from Redis
|
|
571
|
+
|
|
572
|
+
Args:
|
|
573
|
+
acquire_lock: Whether to acquire a lock during the load operation
|
|
574
|
+
|
|
575
|
+
Returns:
|
|
576
|
+
The deserialized object instance, or None if not found
|
|
577
|
+
"""
|
|
578
|
+
try:
|
|
579
|
+
if acquire_lock:
|
|
580
|
+
lock_status = self.acquire_lock(block=True)
|
|
581
|
+
if not lock_status:
|
|
582
|
+
logger.error("Failed to acquire Redis lock for loading")
|
|
583
|
+
return None
|
|
584
|
+
|
|
585
|
+
# Load from Redis
|
|
586
|
+
orm_instance = self._get_redis_orm_instance()
|
|
587
|
+
exists = orm_instance.load()
|
|
588
|
+
|
|
589
|
+
if not exists or not orm_instance.serialized_data:
|
|
590
|
+
logger.info(f"No Redis record found for {self.user_id}/{self.mem_cube_id}")
|
|
591
|
+
return None
|
|
592
|
+
|
|
593
|
+
# Deserialize the business object using the actual object type
|
|
594
|
+
if self.obj_type is not None:
|
|
595
|
+
db_instance = self.obj_type.from_json(orm_instance.serialized_data)
|
|
596
|
+
else:
|
|
597
|
+
db_instance = MemoryMonitorManager.from_json(orm_instance.serialized_data)
|
|
598
|
+
self.last_version_control = orm_instance.version_control
|
|
599
|
+
|
|
600
|
+
logger.info(
|
|
601
|
+
f"Successfully loaded object from Redis for {self.user_id}/{self.mem_cube_id} with version {orm_instance.version_control}"
|
|
602
|
+
)
|
|
603
|
+
return db_instance
|
|
604
|
+
|
|
605
|
+
except Exception as e:
|
|
606
|
+
logger.error(f"Error loading from Redis for {self.user_id}/{self.mem_cube_id}: {e}")
|
|
607
|
+
return None
|
|
608
|
+
finally:
|
|
609
|
+
if acquire_lock:
|
|
610
|
+
self.release_locks(user_id=self.user_id, mem_cube_id=self.mem_cube_id)
|
|
611
|
+
|
|
612
|
+
def close(self):
|
|
613
|
+
"""Close the Redis manager and clean up resources"""
|
|
614
|
+
try:
|
|
615
|
+
# Release any locks held by this manager instance
|
|
616
|
+
if self.user_id and self.mem_cube_id:
|
|
617
|
+
self.release_locks(user_id=self.user_id, mem_cube_id=self.mem_cube_id)
|
|
618
|
+
logger.info(f"Released Redis locks for {self.user_id}/{self.mem_cube_id}")
|
|
619
|
+
|
|
620
|
+
# Close Redis connection
|
|
621
|
+
if self.redis_client:
|
|
622
|
+
self.redis_client.close()
|
|
623
|
+
logger.info("Redis connection closed")
|
|
624
|
+
|
|
625
|
+
# Call parent close method for any additional cleanup
|
|
626
|
+
super().close()
|
|
627
|
+
|
|
628
|
+
except Exception as e:
|
|
629
|
+
logger.error(f"Error during Redis close operation: {e}")
|
|
630
|
+
|
|
631
|
+
@classmethod
|
|
632
|
+
def from_env(
|
|
633
|
+
cls,
|
|
634
|
+
user_id: str,
|
|
635
|
+
mem_cube_id: str,
|
|
636
|
+
obj: Any | None = None,
|
|
637
|
+
lock_timeout: int = 10,
|
|
638
|
+
env_file_path: str | None = None,
|
|
639
|
+
) -> "RedisDBManager":
|
|
640
|
+
"""Create RedisDBManager from environment variables
|
|
641
|
+
|
|
642
|
+
Args:
|
|
643
|
+
user_id: User identifier
|
|
644
|
+
mem_cube_id: Memory cube identifier
|
|
645
|
+
obj: Optional MemoryMonitorManager instance
|
|
646
|
+
lock_timeout: Lock timeout in seconds
|
|
647
|
+
env_file_path: Optional path to .env file
|
|
648
|
+
|
|
649
|
+
Returns:
|
|
650
|
+
RedisDBManager instance
|
|
651
|
+
"""
|
|
652
|
+
try:
|
|
653
|
+
redis_client = cls.load_redis_engine_from_env(env_file_path)
|
|
654
|
+
return cls(
|
|
655
|
+
user_id=user_id,
|
|
656
|
+
mem_cube_id=mem_cube_id,
|
|
657
|
+
obj=obj,
|
|
658
|
+
lock_timeout=lock_timeout,
|
|
659
|
+
redis_client=redis_client,
|
|
660
|
+
)
|
|
661
|
+
except Exception as e:
|
|
662
|
+
logger.error(f"Failed to create RedisDBManager from environment: {e}")
|
|
663
|
+
raise
|
|
664
|
+
|
|
665
|
+
def list_keys(self, pattern: str | None = None) -> list[str]:
|
|
666
|
+
"""List all Redis keys for this manager's data
|
|
667
|
+
|
|
668
|
+
Args:
|
|
669
|
+
pattern: Optional pattern to filter keys
|
|
670
|
+
|
|
671
|
+
Returns:
|
|
672
|
+
List of Redis keys
|
|
673
|
+
"""
|
|
674
|
+
try:
|
|
675
|
+
if pattern is None:
|
|
676
|
+
pattern = f"lockable_orm:{self.user_id}:{self.mem_cube_id}:*"
|
|
677
|
+
|
|
678
|
+
keys = self.redis_client.keys(pattern)
|
|
679
|
+
return [key.decode() if isinstance(key, bytes) else key for key in keys]
|
|
680
|
+
|
|
681
|
+
except Exception as e:
|
|
682
|
+
logger.error(f"Error listing Redis keys: {e}")
|
|
683
|
+
return []
|
|
684
|
+
|
|
685
|
+
def health_check(self) -> dict[str, bool]:
|
|
686
|
+
"""Check the health of Redis connection
|
|
687
|
+
|
|
688
|
+
Returns:
|
|
689
|
+
Dictionary with health status
|
|
690
|
+
"""
|
|
691
|
+
try:
|
|
692
|
+
redis_healthy = self.redis_client.ping()
|
|
693
|
+
return {
|
|
694
|
+
"redis": redis_healthy,
|
|
695
|
+
"mysql": False, # Not applicable for Redis manager
|
|
696
|
+
}
|
|
697
|
+
except Exception as e:
|
|
698
|
+
logger.error(f"Redis health check failed: {e}")
|
|
699
|
+
return {"redis": False, "mysql": False}
|