MemoryOS 0.0.1__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MemoryOS might be problematic. Click here for more details.
- memoryos-0.1.13.dist-info/METADATA +288 -0
- memoryos-0.1.13.dist-info/RECORD +122 -0
- memos/__init__.py +20 -1
- memos/api/start_api.py +420 -0
- memos/chunkers/__init__.py +4 -0
- memos/chunkers/base.py +24 -0
- memos/chunkers/factory.py +22 -0
- memos/chunkers/sentence_chunker.py +35 -0
- memos/configs/__init__.py +0 -0
- memos/configs/base.py +82 -0
- memos/configs/chunker.py +45 -0
- memos/configs/embedder.py +53 -0
- memos/configs/graph_db.py +45 -0
- memos/configs/internet_retriever.py +81 -0
- memos/configs/llm.py +71 -0
- memos/configs/mem_chat.py +81 -0
- memos/configs/mem_cube.py +89 -0
- memos/configs/mem_os.py +74 -0
- memos/configs/mem_reader.py +53 -0
- memos/configs/mem_scheduler.py +78 -0
- memos/configs/memory.py +195 -0
- memos/configs/parser.py +38 -0
- memos/configs/utils.py +8 -0
- memos/configs/vec_db.py +64 -0
- memos/deprecation.py +262 -0
- memos/embedders/__init__.py +0 -0
- memos/embedders/base.py +15 -0
- memos/embedders/factory.py +23 -0
- memos/embedders/ollama.py +74 -0
- memos/embedders/sentence_transformer.py +40 -0
- memos/exceptions.py +30 -0
- memos/graph_dbs/__init__.py +0 -0
- memos/graph_dbs/base.py +215 -0
- memos/graph_dbs/factory.py +21 -0
- memos/graph_dbs/neo4j.py +827 -0
- memos/hello_world.py +97 -0
- memos/llms/__init__.py +0 -0
- memos/llms/base.py +16 -0
- memos/llms/factory.py +25 -0
- memos/llms/hf.py +231 -0
- memos/llms/ollama.py +82 -0
- memos/llms/openai.py +34 -0
- memos/llms/utils.py +14 -0
- memos/log.py +78 -0
- memos/mem_chat/__init__.py +0 -0
- memos/mem_chat/base.py +30 -0
- memos/mem_chat/factory.py +21 -0
- memos/mem_chat/simple.py +200 -0
- memos/mem_cube/__init__.py +0 -0
- memos/mem_cube/base.py +29 -0
- memos/mem_cube/general.py +146 -0
- memos/mem_cube/utils.py +24 -0
- memos/mem_os/client.py +5 -0
- memos/mem_os/core.py +819 -0
- memos/mem_os/main.py +503 -0
- memos/mem_os/product.py +89 -0
- memos/mem_reader/__init__.py +0 -0
- memos/mem_reader/base.py +27 -0
- memos/mem_reader/factory.py +21 -0
- memos/mem_reader/memory.py +298 -0
- memos/mem_reader/simple_struct.py +241 -0
- memos/mem_scheduler/__init__.py +0 -0
- memos/mem_scheduler/base_scheduler.py +164 -0
- memos/mem_scheduler/general_scheduler.py +305 -0
- memos/mem_scheduler/modules/__init__.py +0 -0
- memos/mem_scheduler/modules/base.py +74 -0
- memos/mem_scheduler/modules/dispatcher.py +103 -0
- memos/mem_scheduler/modules/monitor.py +82 -0
- memos/mem_scheduler/modules/redis_service.py +146 -0
- memos/mem_scheduler/modules/retriever.py +41 -0
- memos/mem_scheduler/modules/schemas.py +146 -0
- memos/mem_scheduler/scheduler_factory.py +21 -0
- memos/mem_scheduler/utils.py +26 -0
- memos/mem_user/user_manager.py +488 -0
- memos/memories/__init__.py +0 -0
- memos/memories/activation/__init__.py +0 -0
- memos/memories/activation/base.py +42 -0
- memos/memories/activation/item.py +25 -0
- memos/memories/activation/kv.py +232 -0
- memos/memories/base.py +19 -0
- memos/memories/factory.py +34 -0
- memos/memories/parametric/__init__.py +0 -0
- memos/memories/parametric/base.py +19 -0
- memos/memories/parametric/item.py +11 -0
- memos/memories/parametric/lora.py +41 -0
- memos/memories/textual/__init__.py +0 -0
- memos/memories/textual/base.py +89 -0
- memos/memories/textual/general.py +286 -0
- memos/memories/textual/item.py +167 -0
- memos/memories/textual/naive.py +185 -0
- memos/memories/textual/tree.py +321 -0
- memos/memories/textual/tree_text_memory/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/organize/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/organize/manager.py +305 -0
- memos/memories/textual/tree_text_memory/retrieve/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/retrieve/internet_retriever.py +263 -0
- memos/memories/textual/tree_text_memory/retrieve/internet_retriever_factory.py +89 -0
- memos/memories/textual/tree_text_memory/retrieve/reasoner.py +61 -0
- memos/memories/textual/tree_text_memory/retrieve/recall.py +158 -0
- memos/memories/textual/tree_text_memory/retrieve/reranker.py +111 -0
- memos/memories/textual/tree_text_memory/retrieve/retrieval_mid_structs.py +13 -0
- memos/memories/textual/tree_text_memory/retrieve/searcher.py +208 -0
- memos/memories/textual/tree_text_memory/retrieve/task_goal_parser.py +68 -0
- memos/memories/textual/tree_text_memory/retrieve/utils.py +48 -0
- memos/memories/textual/tree_text_memory/retrieve/xinyusearch.py +335 -0
- memos/parsers/__init__.py +0 -0
- memos/parsers/base.py +15 -0
- memos/parsers/factory.py +19 -0
- memos/parsers/markitdown.py +22 -0
- memos/settings.py +8 -0
- memos/templates/__init__.py +0 -0
- memos/templates/mem_reader_prompts.py +98 -0
- memos/templates/mem_scheduler_prompts.py +65 -0
- memos/templates/mos_prompts.py +63 -0
- memos/types.py +55 -0
- memos/vec_dbs/__init__.py +0 -0
- memos/vec_dbs/base.py +105 -0
- memos/vec_dbs/factory.py +21 -0
- memos/vec_dbs/item.py +43 -0
- memos/vec_dbs/qdrant.py +292 -0
- memoryos-0.0.1.dist-info/METADATA +0 -53
- memoryos-0.0.1.dist-info/RECORD +0 -5
- {memoryos-0.0.1.dist-info → memoryos-0.1.13.dist-info}/LICENSE +0 -0
- {memoryos-0.0.1.dist-info → memoryos-0.1.13.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from memos.log import get_logger
|
|
6
|
+
from memos.mem_cube.general import GeneralMemCube
|
|
7
|
+
from memos.mem_scheduler.modules.base import BaseSchedulerModule
|
|
8
|
+
from memos.mem_scheduler.utils import extract_json_dict
|
|
9
|
+
from memos.memories.textual.tree import TreeTextMemory
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
logger = get_logger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SchedulerMonitor(BaseSchedulerModule):
|
|
16
|
+
def __init__(self, chat_llm, activation_mem_size=5):
|
|
17
|
+
super().__init__()
|
|
18
|
+
self.statistics = {}
|
|
19
|
+
self.intent_history: list[str] = []
|
|
20
|
+
self.activation_mem_size = activation_mem_size
|
|
21
|
+
self.activation_memory_freq_list = [
|
|
22
|
+
{"memory": None, "count": 0} for _ in range(self.activation_mem_size)
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
self._chat_llm = chat_llm
|
|
26
|
+
|
|
27
|
+
def update_stats(self, mem_cube):
|
|
28
|
+
self.statistics["activation_mem_size"] = self.activation_mem_size
|
|
29
|
+
mem_cube_info = self.get_mem_cube_info(mem_cube)
|
|
30
|
+
self.statistics.update(mem_cube_info)
|
|
31
|
+
|
|
32
|
+
def get_mem_cube_info(self, mem_cube: GeneralMemCube):
|
|
33
|
+
mem_cube_info = {}
|
|
34
|
+
|
|
35
|
+
text_mem = mem_cube.text_mem
|
|
36
|
+
if isinstance(text_mem, TreeTextMemory):
|
|
37
|
+
memory_size_dict = text_mem.memory_manager.memory_size
|
|
38
|
+
mem_cube_info["text_mem"] = memory_size_dict
|
|
39
|
+
else:
|
|
40
|
+
logger.error("Not Implemented")
|
|
41
|
+
|
|
42
|
+
return mem_cube_info
|
|
43
|
+
|
|
44
|
+
def detect_intent(
|
|
45
|
+
self,
|
|
46
|
+
q_list: list[str],
|
|
47
|
+
text_working_memory: list[str],
|
|
48
|
+
prompt_name="intent_recognizing",
|
|
49
|
+
) -> dict[str, Any]:
|
|
50
|
+
"""
|
|
51
|
+
Detect the intent of the user input.
|
|
52
|
+
"""
|
|
53
|
+
prompt = self.build_prompt(
|
|
54
|
+
template_name=prompt_name,
|
|
55
|
+
q_list=q_list,
|
|
56
|
+
working_memory_list=text_working_memory,
|
|
57
|
+
)
|
|
58
|
+
response = self._chat_llm.generate([{"role": "user", "content": prompt}])
|
|
59
|
+
response = extract_json_dict(response)
|
|
60
|
+
return response
|
|
61
|
+
|
|
62
|
+
def update_freq(
|
|
63
|
+
self,
|
|
64
|
+
answer: str,
|
|
65
|
+
activation_memory_freq_list: list[dict],
|
|
66
|
+
prompt_name="freq_detecting",
|
|
67
|
+
) -> list[dict]:
|
|
68
|
+
"""
|
|
69
|
+
Use LLM to detect which memories in activation_memory_freq_list appear in the answer,
|
|
70
|
+
increment their count by 1, and return the updated list.
|
|
71
|
+
"""
|
|
72
|
+
prompt = self.build_prompt(
|
|
73
|
+
template_name=prompt_name,
|
|
74
|
+
answer=answer,
|
|
75
|
+
activation_memory_freq_list=activation_memory_freq_list,
|
|
76
|
+
)
|
|
77
|
+
response = self._chat_llm.generate([{"role": "user", "content": prompt}])
|
|
78
|
+
try:
|
|
79
|
+
result = json.loads(response)
|
|
80
|
+
except Exception:
|
|
81
|
+
result = activation_memory_freq_list
|
|
82
|
+
return result
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import threading
|
|
3
|
+
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
|
|
6
|
+
import redis
|
|
7
|
+
|
|
8
|
+
from redis import Redis
|
|
9
|
+
|
|
10
|
+
from memos.log import get_logger
|
|
11
|
+
from memos.mem_scheduler.modules.base import BaseSchedulerModule
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
logger = get_logger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class RedisSchedulerModule(BaseSchedulerModule):
|
|
18
|
+
def __init__(self):
|
|
19
|
+
"""
|
|
20
|
+
intent_detector: Object used for intent recognition (such as the above IntentDetector)
|
|
21
|
+
scheduler: The actual scheduling module/interface object
|
|
22
|
+
trigger_intents: The types of intents that need to be triggered (list)
|
|
23
|
+
"""
|
|
24
|
+
super().__init__()
|
|
25
|
+
|
|
26
|
+
# settings for redis
|
|
27
|
+
self.redis_host: str = None
|
|
28
|
+
self.redis_port: int = None
|
|
29
|
+
self.redis_db: int = None
|
|
30
|
+
self._redis_conn = None
|
|
31
|
+
self.query_list_capacity = 1000
|
|
32
|
+
|
|
33
|
+
self._redis_listener_running = False
|
|
34
|
+
self._redis_listener_thread: threading.Thread | None = None
|
|
35
|
+
self._redis_listener_loop: asyncio.AbstractEventLoop | None = None
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def redis(self) -> Redis:
|
|
39
|
+
return self._redis_conn
|
|
40
|
+
|
|
41
|
+
@redis.setter
|
|
42
|
+
def redis(self, value: Redis) -> None:
|
|
43
|
+
self._redis_conn = value
|
|
44
|
+
|
|
45
|
+
def initialize_redis(
|
|
46
|
+
self, redis_host: str = "localhost", redis_port: int = 6379, redis_db: int = 0
|
|
47
|
+
):
|
|
48
|
+
self.redis_host = redis_host
|
|
49
|
+
self.redis_port = redis_port
|
|
50
|
+
self.redis_db = redis_db
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
logger.debug(f"Connecting to Redis at {redis_host}:{redis_port}/{redis_db}")
|
|
54
|
+
self._redis_conn = Redis(
|
|
55
|
+
host=self.redis_host, port=self.redis_port, db=self.redis_db, decode_responses=True
|
|
56
|
+
)
|
|
57
|
+
# test conn
|
|
58
|
+
if not self._redis_conn.ping():
|
|
59
|
+
logger.error("Redis connection failed")
|
|
60
|
+
except redis.ConnectionError as e:
|
|
61
|
+
self._redis_conn = None
|
|
62
|
+
logger.error(f"Redis connection error: {e}")
|
|
63
|
+
self._redis_conn.xtrim("user:queries:stream", self.query_list_capacity)
|
|
64
|
+
return self._redis_conn
|
|
65
|
+
|
|
66
|
+
async def add_message_stream(self, message: dict):
|
|
67
|
+
logger.debug(f"add_message_stream: {message}")
|
|
68
|
+
return self._redis_conn.xadd("user:queries:stream", message)
|
|
69
|
+
|
|
70
|
+
async def consume_message_stream(self, message: dict):
|
|
71
|
+
logger.debug(f"consume_message_stream: {message}")
|
|
72
|
+
|
|
73
|
+
def _run_listener_async(self, handler: Callable):
|
|
74
|
+
"""Run the async listener in a separate thread"""
|
|
75
|
+
self._redis_listener_loop = asyncio.new_event_loop()
|
|
76
|
+
asyncio.set_event_loop(self._redis_listener_loop)
|
|
77
|
+
|
|
78
|
+
async def listener_wrapper():
|
|
79
|
+
try:
|
|
80
|
+
await self._listen_query_stream(handler)
|
|
81
|
+
except Exception as e:
|
|
82
|
+
logger.error(f"Listener thread error: {e}")
|
|
83
|
+
finally:
|
|
84
|
+
self._redis_listener_running = False
|
|
85
|
+
|
|
86
|
+
self._redis_listener_loop.run_until_complete(listener_wrapper())
|
|
87
|
+
|
|
88
|
+
async def _listen_query_stream(self, handler=None, last_id: str = "$", block_time: int = 2000):
|
|
89
|
+
"""Internal async stream listener"""
|
|
90
|
+
self._redis_listener_running = True
|
|
91
|
+
while self._redis_listener_running:
|
|
92
|
+
try:
|
|
93
|
+
# Blocking read for new messages
|
|
94
|
+
messages = self.redis.xread(
|
|
95
|
+
{"user:queries:stream": last_id}, count=1, block=block_time
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
if messages:
|
|
99
|
+
for _, stream_messages in messages:
|
|
100
|
+
for message_id, message_data in stream_messages:
|
|
101
|
+
try:
|
|
102
|
+
await handler(message_data)
|
|
103
|
+
last_id = message_id
|
|
104
|
+
except Exception as e:
|
|
105
|
+
logger.error(f"Error processing message {message_id}: {e}")
|
|
106
|
+
|
|
107
|
+
except redis.ConnectionError as e:
|
|
108
|
+
logger.error(f"Redis connection error: {e}")
|
|
109
|
+
await asyncio.sleep(5) # Wait before reconnecting
|
|
110
|
+
self._redis_conn = None # Force reconnection
|
|
111
|
+
except Exception as e:
|
|
112
|
+
logger.error(f"Unexpected error: {e}")
|
|
113
|
+
await asyncio.sleep(1)
|
|
114
|
+
|
|
115
|
+
def start_listening(self, handler: Callable | None = None):
|
|
116
|
+
"""Start the Redis stream listener in a background thread"""
|
|
117
|
+
if self._redis_listener_thread and self._redis_listener_thread.is_alive():
|
|
118
|
+
logger.warning("Listener is already running")
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
if handler is None:
|
|
122
|
+
handler = self.consume_message_stream
|
|
123
|
+
|
|
124
|
+
self._redis_listener_thread = threading.Thread(
|
|
125
|
+
target=self._run_listener_async,
|
|
126
|
+
args=(handler,),
|
|
127
|
+
daemon=True,
|
|
128
|
+
name="RedisListenerThread",
|
|
129
|
+
)
|
|
130
|
+
self._redis_listener_thread.start()
|
|
131
|
+
logger.info("Started Redis stream listener thread")
|
|
132
|
+
|
|
133
|
+
def close(self):
|
|
134
|
+
"""Close Redis connection"""
|
|
135
|
+
if self._redis_conn is not None:
|
|
136
|
+
self._redis_conn.close()
|
|
137
|
+
self._redis_conn = None
|
|
138
|
+
|
|
139
|
+
def stop_listening(self):
|
|
140
|
+
"""Stop the listener thread gracefully"""
|
|
141
|
+
self._redis_listener_running = False
|
|
142
|
+
if self._redis_listener_thread and self._redis_listener_thread.is_alive():
|
|
143
|
+
self._redis_listener_thread.join(timeout=5.0)
|
|
144
|
+
if self._redis_listener_thread.is_alive():
|
|
145
|
+
logger.warning("Listener thread did not stop gracefully")
|
|
146
|
+
logger.info("Redis stream listener stopped")
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from memos.log import get_logger
|
|
2
|
+
from memos.mem_scheduler.modules.base import BaseSchedulerModule
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
logger = get_logger(__name__)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SchedulerRetriever(BaseSchedulerModule):
|
|
9
|
+
def __init__(self, chat_llm, context_window_size=5):
|
|
10
|
+
"""
|
|
11
|
+
monitor: Object used to acquire monitoring information
|
|
12
|
+
mem_cube: Object/interface for querying the underlying database
|
|
13
|
+
context_window_size: Size of the context window for conversation history
|
|
14
|
+
"""
|
|
15
|
+
super().__init__()
|
|
16
|
+
|
|
17
|
+
self.monitors = {}
|
|
18
|
+
self.context_window_size = context_window_size
|
|
19
|
+
|
|
20
|
+
self._chat_llm = chat_llm
|
|
21
|
+
self._current_mem_cube = None
|
|
22
|
+
|
|
23
|
+
@property
|
|
24
|
+
def memory_texts(self) -> list[str]:
|
|
25
|
+
"""The memory cube associated with this MemChat."""
|
|
26
|
+
return self._memory_text_list
|
|
27
|
+
|
|
28
|
+
@memory_texts.setter
|
|
29
|
+
def memory_texts(self, value: list[str]) -> None:
|
|
30
|
+
"""The memory cube associated with this MemChat."""
|
|
31
|
+
self._memory_text_list = value
|
|
32
|
+
|
|
33
|
+
def fetch_context(self):
|
|
34
|
+
"""
|
|
35
|
+
Extract the context window from the current conversation
|
|
36
|
+
conversation_history: a list (in chronological order)
|
|
37
|
+
"""
|
|
38
|
+
return self._memory_text_list[-self.context_window_size :]
|
|
39
|
+
|
|
40
|
+
def retrieve(self, query: str, memory_texts: list[str], top_k: int = 5) -> list[str]:
|
|
41
|
+
return None
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import ClassVar, TypeVar
|
|
4
|
+
from uuid import uuid4
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
from typing_extensions import TypedDict
|
|
8
|
+
|
|
9
|
+
from memos.mem_cube.general import GeneralMemCube
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
FILE_PATH = Path(__file__).absolute()
|
|
13
|
+
BASE_DIR = FILE_PATH.parent.parent.parent.parent.parent
|
|
14
|
+
|
|
15
|
+
QUERY_LABEL = "query"
|
|
16
|
+
ANSWER_LABEL = "answer"
|
|
17
|
+
|
|
18
|
+
TreeTextMemory_SEARCH_METHOD = "tree_text_memory_search"
|
|
19
|
+
TextMemory_SEARCH_METHOD = "text_memory_search"
|
|
20
|
+
DEFAULT_ACTIVATION_MEM_SIZE = 5
|
|
21
|
+
DEFAULT_ACT_MEM_DUMP_PATH = f"{BASE_DIR}/outputs/mem_scheduler/mem_cube_scheduler_test.kv_cache"
|
|
22
|
+
DEFAULT_THREAD__POOL_MAX_WORKERS = 5
|
|
23
|
+
DEFAULT_CONSUME_INTERVAL_SECONDS = 3
|
|
24
|
+
NOT_INITIALIZED = -1
|
|
25
|
+
BaseModelType = TypeVar("T", bound="BaseModel")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class DictConversionMixin:
|
|
29
|
+
def to_dict(self) -> dict:
|
|
30
|
+
"""Convert the instance to a dictionary."""
|
|
31
|
+
return {
|
|
32
|
+
**self.dict(),
|
|
33
|
+
"timestamp": self.timestamp.isoformat() if hasattr(self, "timestamp") else None,
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
def from_dict(cls: type[BaseModelType], data: dict) -> BaseModelType:
|
|
38
|
+
"""Create an instance from a dictionary."""
|
|
39
|
+
if "timestamp" in data:
|
|
40
|
+
data["timestamp"] = datetime.fromisoformat(data["timestamp"])
|
|
41
|
+
return cls(**data)
|
|
42
|
+
|
|
43
|
+
class Config:
|
|
44
|
+
json_encoders: ClassVar[dict[type, object]] = {datetime: lambda v: v.isoformat()}
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ScheduleMessageItem(BaseModel, DictConversionMixin):
|
|
48
|
+
item_id: str = Field(description="uuid", default_factory=lambda: str(uuid4()))
|
|
49
|
+
user_id: str = Field(..., description="user id")
|
|
50
|
+
mem_cube_id: str = Field(..., description="memcube id")
|
|
51
|
+
label: str = Field(..., description="Label of the schedule message")
|
|
52
|
+
mem_cube: GeneralMemCube | str = Field(..., description="memcube for schedule")
|
|
53
|
+
content: str = Field(..., description="Content of the schedule message")
|
|
54
|
+
timestamp: datetime = Field(
|
|
55
|
+
default_factory=datetime.now, description="submit time for schedule_messages"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
class Config:
|
|
59
|
+
arbitrary_types_allowed = True
|
|
60
|
+
json_encoders: ClassVar[dict[type, object]] = {
|
|
61
|
+
datetime: lambda v: v.isoformat(),
|
|
62
|
+
GeneralMemCube: lambda v: f"<GeneralMemCube:{id(v)}>",
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
def to_dict(self) -> dict:
|
|
66
|
+
"""Convert model to dictionary suitable for Redis Stream"""
|
|
67
|
+
return {
|
|
68
|
+
"item_id": self.item_id,
|
|
69
|
+
"user_id": self.user_id,
|
|
70
|
+
"cube_id": self.mem_cube_id,
|
|
71
|
+
"message_id": self.message_id,
|
|
72
|
+
"label": self.label,
|
|
73
|
+
"cube": "Not Applicable", # Custom cube serialization
|
|
74
|
+
"content": self.content,
|
|
75
|
+
"timestamp": self.timestamp.isoformat(),
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
@classmethod
|
|
79
|
+
def from_dict(cls, data: dict) -> "ScheduleMessageItem":
|
|
80
|
+
"""Create model from Redis Stream dictionary"""
|
|
81
|
+
return cls(
|
|
82
|
+
item_id=data.get("item_id", str(uuid4())),
|
|
83
|
+
user_id=data["user_id"],
|
|
84
|
+
cube_id=data["cube_id"],
|
|
85
|
+
message_id=data.get("message_id", str(uuid4())),
|
|
86
|
+
label=data["label"],
|
|
87
|
+
cube="Not Applicable", # Custom cube deserialization
|
|
88
|
+
content=data["content"],
|
|
89
|
+
timestamp=datetime.fromisoformat(data["timestamp"]),
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class MemorySizes(TypedDict):
|
|
94
|
+
long_term_memory_size: int
|
|
95
|
+
user_memory_size: int
|
|
96
|
+
working_memory_size: int
|
|
97
|
+
transformed_act_memory_size: int
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class MemoryCapacities(TypedDict):
|
|
101
|
+
long_term_memory_capacity: int
|
|
102
|
+
user_memory_capacity: int
|
|
103
|
+
working_memory_capacity: int
|
|
104
|
+
transformed_act_memory_capacity: int
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
DEFAULT_MEMORY_SIZES = {
|
|
108
|
+
"long_term_memory_size": NOT_INITIALIZED,
|
|
109
|
+
"user_memory_size": NOT_INITIALIZED,
|
|
110
|
+
"working_memory_size": NOT_INITIALIZED,
|
|
111
|
+
"transformed_act_memory_size": NOT_INITIALIZED,
|
|
112
|
+
"parameter_memory_size": NOT_INITIALIZED,
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
DEFAULT_MEMORY_CAPACITIES = {
|
|
116
|
+
"long_term_memory_capacity": 10000,
|
|
117
|
+
"user_memory_capacity": 10000,
|
|
118
|
+
"working_memory_capacity": 20,
|
|
119
|
+
"transformed_act_memory_capacity": NOT_INITIALIZED,
|
|
120
|
+
"parameter_memory_capacity": NOT_INITIALIZED,
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class ScheduleLogForWebItem(BaseModel, DictConversionMixin):
|
|
125
|
+
item_id: str = Field(
|
|
126
|
+
description="Unique identifier for the log entry", default_factory=lambda: str(uuid4())
|
|
127
|
+
)
|
|
128
|
+
user_id: str = Field(..., description="Identifier for the user associated with the log")
|
|
129
|
+
mem_cube_id: str = Field(
|
|
130
|
+
..., description="Identifier for the memcube associated with this log entry"
|
|
131
|
+
)
|
|
132
|
+
label: str = Field(..., description="Label categorizing the type of log")
|
|
133
|
+
log_title: str = Field(..., description="Title or brief summary of the log content")
|
|
134
|
+
log_content: str = Field(..., description="Detailed content of the log entry")
|
|
135
|
+
current_memory_sizes: MemorySizes = Field(
|
|
136
|
+
default_factory=lambda: dict(DEFAULT_MEMORY_SIZES),
|
|
137
|
+
description="Current utilization of memory partitions",
|
|
138
|
+
)
|
|
139
|
+
memory_capacities: MemoryCapacities = Field(
|
|
140
|
+
default_factory=lambda: dict(DEFAULT_MEMORY_CAPACITIES),
|
|
141
|
+
description="Maximum capacities of memory partitions",
|
|
142
|
+
)
|
|
143
|
+
timestamp: datetime = Field(
|
|
144
|
+
default_factory=datetime.now,
|
|
145
|
+
description="Timestamp indicating when the log entry was created",
|
|
146
|
+
)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from typing import Any, ClassVar
|
|
2
|
+
|
|
3
|
+
from memos.configs.mem_scheduler import SchedulerConfigFactory
|
|
4
|
+
from memos.mem_scheduler.base_scheduler import BaseScheduler
|
|
5
|
+
from memos.mem_scheduler.general_scheduler import GeneralScheduler
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SchedulerFactory(BaseScheduler):
|
|
9
|
+
"""Factory class for creating scheduler instances."""
|
|
10
|
+
|
|
11
|
+
backend_to_class: ClassVar[dict[str, Any]] = {
|
|
12
|
+
"general_scheduler": GeneralScheduler,
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
@classmethod
|
|
16
|
+
def from_config(cls, config_factory: SchedulerConfigFactory) -> GeneralScheduler:
|
|
17
|
+
backend = config_factory.backend
|
|
18
|
+
if backend not in cls.backend_to_class:
|
|
19
|
+
raise ValueError(f"Invalid backend: {backend}")
|
|
20
|
+
mem_scheduler_class = cls.backend_to_class[backend]
|
|
21
|
+
return mem_scheduler_class(config_factory.config)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import yaml
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def extract_json_dict(text: str):
|
|
9
|
+
text = text.strip()
|
|
10
|
+
patterns_to_remove = ["json'''", "latex'''", "'''"]
|
|
11
|
+
for pattern in patterns_to_remove:
|
|
12
|
+
text = text.replace(pattern, "")
|
|
13
|
+
res = json.loads(text)
|
|
14
|
+
return res
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def parse_yaml(yaml_file):
|
|
18
|
+
yaml_path = Path(yaml_file)
|
|
19
|
+
yaml_path = Path(yaml_file)
|
|
20
|
+
if not yaml_path.is_file():
|
|
21
|
+
raise FileNotFoundError(f"No such file: {yaml_file}")
|
|
22
|
+
|
|
23
|
+
with yaml_path.open("r", encoding="utf-8") as fr:
|
|
24
|
+
data = yaml.safe_load(fr)
|
|
25
|
+
|
|
26
|
+
return data
|