MemoryOS 0.0.1__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MemoryOS might be problematic. Click here for more details.

Files changed (119) hide show
  1. memoryos-0.1.12.dist-info/METADATA +257 -0
  2. memoryos-0.1.12.dist-info/RECORD +117 -0
  3. memos/__init__.py +20 -1
  4. memos/api/start_api.py +420 -0
  5. memos/chunkers/__init__.py +4 -0
  6. memos/chunkers/base.py +24 -0
  7. memos/chunkers/factory.py +22 -0
  8. memos/chunkers/sentence_chunker.py +35 -0
  9. memos/configs/__init__.py +0 -0
  10. memos/configs/base.py +82 -0
  11. memos/configs/chunker.py +45 -0
  12. memos/configs/embedder.py +53 -0
  13. memos/configs/graph_db.py +45 -0
  14. memos/configs/llm.py +71 -0
  15. memos/configs/mem_chat.py +81 -0
  16. memos/configs/mem_cube.py +89 -0
  17. memos/configs/mem_os.py +70 -0
  18. memos/configs/mem_reader.py +53 -0
  19. memos/configs/mem_scheduler.py +78 -0
  20. memos/configs/memory.py +190 -0
  21. memos/configs/parser.py +38 -0
  22. memos/configs/utils.py +8 -0
  23. memos/configs/vec_db.py +64 -0
  24. memos/deprecation.py +262 -0
  25. memos/embedders/__init__.py +0 -0
  26. memos/embedders/base.py +15 -0
  27. memos/embedders/factory.py +23 -0
  28. memos/embedders/ollama.py +74 -0
  29. memos/embedders/sentence_transformer.py +40 -0
  30. memos/exceptions.py +30 -0
  31. memos/graph_dbs/__init__.py +0 -0
  32. memos/graph_dbs/base.py +215 -0
  33. memos/graph_dbs/factory.py +21 -0
  34. memos/graph_dbs/neo4j.py +827 -0
  35. memos/hello_world.py +97 -0
  36. memos/llms/__init__.py +0 -0
  37. memos/llms/base.py +16 -0
  38. memos/llms/factory.py +25 -0
  39. memos/llms/hf.py +231 -0
  40. memos/llms/ollama.py +82 -0
  41. memos/llms/openai.py +34 -0
  42. memos/llms/utils.py +14 -0
  43. memos/log.py +78 -0
  44. memos/mem_chat/__init__.py +0 -0
  45. memos/mem_chat/base.py +30 -0
  46. memos/mem_chat/factory.py +21 -0
  47. memos/mem_chat/simple.py +200 -0
  48. memos/mem_cube/__init__.py +0 -0
  49. memos/mem_cube/base.py +29 -0
  50. memos/mem_cube/general.py +146 -0
  51. memos/mem_cube/utils.py +24 -0
  52. memos/mem_os/client.py +5 -0
  53. memos/mem_os/core.py +819 -0
  54. memos/mem_os/main.py +12 -0
  55. memos/mem_os/product.py +89 -0
  56. memos/mem_reader/__init__.py +0 -0
  57. memos/mem_reader/base.py +27 -0
  58. memos/mem_reader/factory.py +21 -0
  59. memos/mem_reader/memory.py +298 -0
  60. memos/mem_reader/simple_struct.py +241 -0
  61. memos/mem_scheduler/__init__.py +0 -0
  62. memos/mem_scheduler/base_scheduler.py +164 -0
  63. memos/mem_scheduler/general_scheduler.py +305 -0
  64. memos/mem_scheduler/modules/__init__.py +0 -0
  65. memos/mem_scheduler/modules/base.py +74 -0
  66. memos/mem_scheduler/modules/dispatcher.py +103 -0
  67. memos/mem_scheduler/modules/monitor.py +82 -0
  68. memos/mem_scheduler/modules/redis_service.py +146 -0
  69. memos/mem_scheduler/modules/retriever.py +41 -0
  70. memos/mem_scheduler/modules/schemas.py +146 -0
  71. memos/mem_scheduler/scheduler_factory.py +21 -0
  72. memos/mem_scheduler/utils.py +26 -0
  73. memos/mem_user/user_manager.py +478 -0
  74. memos/memories/__init__.py +0 -0
  75. memos/memories/activation/__init__.py +0 -0
  76. memos/memories/activation/base.py +42 -0
  77. memos/memories/activation/item.py +25 -0
  78. memos/memories/activation/kv.py +232 -0
  79. memos/memories/base.py +19 -0
  80. memos/memories/factory.py +34 -0
  81. memos/memories/parametric/__init__.py +0 -0
  82. memos/memories/parametric/base.py +19 -0
  83. memos/memories/parametric/item.py +11 -0
  84. memos/memories/parametric/lora.py +41 -0
  85. memos/memories/textual/__init__.py +0 -0
  86. memos/memories/textual/base.py +89 -0
  87. memos/memories/textual/general.py +286 -0
  88. memos/memories/textual/item.py +167 -0
  89. memos/memories/textual/naive.py +185 -0
  90. memos/memories/textual/tree.py +289 -0
  91. memos/memories/textual/tree_text_memory/__init__.py +0 -0
  92. memos/memories/textual/tree_text_memory/organize/__init__.py +0 -0
  93. memos/memories/textual/tree_text_memory/organize/manager.py +305 -0
  94. memos/memories/textual/tree_text_memory/retrieve/__init__.py +0 -0
  95. memos/memories/textual/tree_text_memory/retrieve/reasoner.py +64 -0
  96. memos/memories/textual/tree_text_memory/retrieve/recall.py +158 -0
  97. memos/memories/textual/tree_text_memory/retrieve/reranker.py +111 -0
  98. memos/memories/textual/tree_text_memory/retrieve/retrieval_mid_structs.py +13 -0
  99. memos/memories/textual/tree_text_memory/retrieve/searcher.py +166 -0
  100. memos/memories/textual/tree_text_memory/retrieve/task_goal_parser.py +68 -0
  101. memos/memories/textual/tree_text_memory/retrieve/utils.py +48 -0
  102. memos/parsers/__init__.py +0 -0
  103. memos/parsers/base.py +15 -0
  104. memos/parsers/factory.py +19 -0
  105. memos/parsers/markitdown.py +22 -0
  106. memos/settings.py +8 -0
  107. memos/templates/__init__.py +0 -0
  108. memos/templates/mem_reader_prompts.py +98 -0
  109. memos/templates/mem_scheduler_prompts.py +65 -0
  110. memos/types.py +55 -0
  111. memos/vec_dbs/__init__.py +0 -0
  112. memos/vec_dbs/base.py +105 -0
  113. memos/vec_dbs/factory.py +21 -0
  114. memos/vec_dbs/item.py +43 -0
  115. memos/vec_dbs/qdrant.py +292 -0
  116. memoryos-0.0.1.dist-info/METADATA +0 -53
  117. memoryos-0.0.1.dist-info/RECORD +0 -5
  118. {memoryos-0.0.1.dist-info → memoryos-0.1.12.dist-info}/LICENSE +0 -0
  119. {memoryos-0.0.1.dist-info → memoryos-0.1.12.dist-info}/WHEEL +0 -0
@@ -0,0 +1,146 @@
1
+ import asyncio
2
+ import threading
3
+
4
+ from collections.abc import Callable
5
+
6
+ import redis
7
+
8
+ from redis import Redis
9
+
10
+ from memos.log import get_logger
11
+ from memos.mem_scheduler.modules.base import BaseSchedulerModule
12
+
13
+
14
+ logger = get_logger(__name__)
15
+
16
+
17
+ class RedisSchedulerModule(BaseSchedulerModule):
18
+ def __init__(self):
19
+ """
20
+ intent_detector: Object used for intent recognition (such as the above IntentDetector)
21
+ scheduler: The actual scheduling module/interface object
22
+ trigger_intents: The types of intents that need to be triggered (list)
23
+ """
24
+ super().__init__()
25
+
26
+ # settings for redis
27
+ self.redis_host: str = None
28
+ self.redis_port: int = None
29
+ self.redis_db: int = None
30
+ self._redis_conn = None
31
+ self.query_list_capacity = 1000
32
+
33
+ self._redis_listener_running = False
34
+ self._redis_listener_thread: threading.Thread | None = None
35
+ self._redis_listener_loop: asyncio.AbstractEventLoop | None = None
36
+
37
+ @property
38
+ def redis(self) -> Redis:
39
+ return self._redis_conn
40
+
41
+ @redis.setter
42
+ def redis(self, value: Redis) -> None:
43
+ self._redis_conn = value
44
+
45
+ def initialize_redis(
46
+ self, redis_host: str = "localhost", redis_port: int = 6379, redis_db: int = 0
47
+ ):
48
+ self.redis_host = redis_host
49
+ self.redis_port = redis_port
50
+ self.redis_db = redis_db
51
+
52
+ try:
53
+ logger.debug(f"Connecting to Redis at {redis_host}:{redis_port}/{redis_db}")
54
+ self._redis_conn = Redis(
55
+ host=self.redis_host, port=self.redis_port, db=self.redis_db, decode_responses=True
56
+ )
57
+ # test conn
58
+ if not self._redis_conn.ping():
59
+ logger.error("Redis connection failed")
60
+ except redis.ConnectionError as e:
61
+ self._redis_conn = None
62
+ logger.error(f"Redis connection error: {e}")
63
+ self._redis_conn.xtrim("user:queries:stream", self.query_list_capacity)
64
+ return self._redis_conn
65
+
66
+ async def add_message_stream(self, message: dict):
67
+ logger.debug(f"add_message_stream: {message}")
68
+ return self._redis_conn.xadd("user:queries:stream", message)
69
+
70
+ async def consume_message_stream(self, message: dict):
71
+ logger.debug(f"consume_message_stream: {message}")
72
+
73
+ def _run_listener_async(self, handler: Callable):
74
+ """Run the async listener in a separate thread"""
75
+ self._redis_listener_loop = asyncio.new_event_loop()
76
+ asyncio.set_event_loop(self._redis_listener_loop)
77
+
78
+ async def listener_wrapper():
79
+ try:
80
+ await self._listen_query_stream(handler)
81
+ except Exception as e:
82
+ logger.error(f"Listener thread error: {e}")
83
+ finally:
84
+ self._redis_listener_running = False
85
+
86
+ self._redis_listener_loop.run_until_complete(listener_wrapper())
87
+
88
+ async def _listen_query_stream(self, handler=None, last_id: str = "$", block_time: int = 2000):
89
+ """Internal async stream listener"""
90
+ self._redis_listener_running = True
91
+ while self._redis_listener_running:
92
+ try:
93
+ # Blocking read for new messages
94
+ messages = self.redis.xread(
95
+ {"user:queries:stream": last_id}, count=1, block=block_time
96
+ )
97
+
98
+ if messages:
99
+ for _, stream_messages in messages:
100
+ for message_id, message_data in stream_messages:
101
+ try:
102
+ await handler(message_data)
103
+ last_id = message_id
104
+ except Exception as e:
105
+ logger.error(f"Error processing message {message_id}: {e}")
106
+
107
+ except redis.ConnectionError as e:
108
+ logger.error(f"Redis connection error: {e}")
109
+ await asyncio.sleep(5) # Wait before reconnecting
110
+ self._redis_conn = None # Force reconnection
111
+ except Exception as e:
112
+ logger.error(f"Unexpected error: {e}")
113
+ await asyncio.sleep(1)
114
+
115
+ def start_listening(self, handler: Callable | None = None):
116
+ """Start the Redis stream listener in a background thread"""
117
+ if self._redis_listener_thread and self._redis_listener_thread.is_alive():
118
+ logger.warning("Listener is already running")
119
+ return
120
+
121
+ if handler is None:
122
+ handler = self.consume_message_stream
123
+
124
+ self._redis_listener_thread = threading.Thread(
125
+ target=self._run_listener_async,
126
+ args=(handler,),
127
+ daemon=True,
128
+ name="RedisListenerThread",
129
+ )
130
+ self._redis_listener_thread.start()
131
+ logger.info("Started Redis stream listener thread")
132
+
133
+ def close(self):
134
+ """Close Redis connection"""
135
+ if self._redis_conn is not None:
136
+ self._redis_conn.close()
137
+ self._redis_conn = None
138
+
139
+ def stop_listening(self):
140
+ """Stop the listener thread gracefully"""
141
+ self._redis_listener_running = False
142
+ if self._redis_listener_thread and self._redis_listener_thread.is_alive():
143
+ self._redis_listener_thread.join(timeout=5.0)
144
+ if self._redis_listener_thread.is_alive():
145
+ logger.warning("Listener thread did not stop gracefully")
146
+ logger.info("Redis stream listener stopped")
@@ -0,0 +1,41 @@
1
+ from memos.log import get_logger
2
+ from memos.mem_scheduler.modules.base import BaseSchedulerModule
3
+
4
+
5
+ logger = get_logger(__name__)
6
+
7
+
8
+ class SchedulerRetriever(BaseSchedulerModule):
9
+ def __init__(self, chat_llm, context_window_size=5):
10
+ """
11
+ monitor: Object used to acquire monitoring information
12
+ mem_cube: Object/interface for querying the underlying database
13
+ context_window_size: Size of the context window for conversation history
14
+ """
15
+ super().__init__()
16
+
17
+ self.monitors = {}
18
+ self.context_window_size = context_window_size
19
+
20
+ self._chat_llm = chat_llm
21
+ self._current_mem_cube = None
22
+
23
+ @property
24
+ def memory_texts(self) -> list[str]:
25
+ """The memory cube associated with this MemChat."""
26
+ return self._memory_text_list
27
+
28
+ @memory_texts.setter
29
+ def memory_texts(self, value: list[str]) -> None:
30
+ """The memory cube associated with this MemChat."""
31
+ self._memory_text_list = value
32
+
33
+ def fetch_context(self):
34
+ """
35
+ Extract the context window from the current conversation
36
+ conversation_history: a list (in chronological order)
37
+ """
38
+ return self._memory_text_list[-self.context_window_size :]
39
+
40
+ def retrieve(self, query: str, memory_texts: list[str], top_k: int = 5) -> list[str]:
41
+ return None
@@ -0,0 +1,146 @@
1
+ from datetime import datetime
2
+ from pathlib import Path
3
+ from typing import ClassVar, TypeVar
4
+ from uuid import uuid4
5
+
6
+ from pydantic import BaseModel, Field
7
+ from typing_extensions import TypedDict
8
+
9
+ from memos.mem_cube.general import GeneralMemCube
10
+
11
+
12
+ FILE_PATH = Path(__file__).absolute()
13
+ BASE_DIR = FILE_PATH.parent.parent.parent.parent.parent
14
+
15
+ QUERY_LABEL = "query"
16
+ ANSWER_LABEL = "answer"
17
+
18
+ TreeTextMemory_SEARCH_METHOD = "tree_text_memory_search"
19
+ TextMemory_SEARCH_METHOD = "text_memory_search"
20
+ DEFAULT_ACTIVATION_MEM_SIZE = 5
21
+ DEFAULT_ACT_MEM_DUMP_PATH = f"{BASE_DIR}/outputs/mem_scheduler/mem_cube_scheduler_test.kv_cache"
22
+ DEFAULT_THREAD__POOL_MAX_WORKERS = 5
23
+ DEFAULT_CONSUME_INTERVAL_SECONDS = 3
24
+ NOT_INITIALIZED = -1
25
+ BaseModelType = TypeVar("T", bound="BaseModel")
26
+
27
+
28
+ class DictConversionMixin:
29
+ def to_dict(self) -> dict:
30
+ """Convert the instance to a dictionary."""
31
+ return {
32
+ **self.dict(),
33
+ "timestamp": self.timestamp.isoformat() if hasattr(self, "timestamp") else None,
34
+ }
35
+
36
+ @classmethod
37
+ def from_dict(cls: type[BaseModelType], data: dict) -> BaseModelType:
38
+ """Create an instance from a dictionary."""
39
+ if "timestamp" in data:
40
+ data["timestamp"] = datetime.fromisoformat(data["timestamp"])
41
+ return cls(**data)
42
+
43
+ class Config:
44
+ json_encoders: ClassVar[dict[type, object]] = {datetime: lambda v: v.isoformat()}
45
+
46
+
47
+ class ScheduleMessageItem(BaseModel, DictConversionMixin):
48
+ item_id: str = Field(description="uuid", default_factory=lambda: str(uuid4()))
49
+ user_id: str = Field(..., description="user id")
50
+ mem_cube_id: str = Field(..., description="memcube id")
51
+ label: str = Field(..., description="Label of the schedule message")
52
+ mem_cube: GeneralMemCube | str = Field(..., description="memcube for schedule")
53
+ content: str = Field(..., description="Content of the schedule message")
54
+ timestamp: datetime = Field(
55
+ default_factory=datetime.now, description="submit time for schedule_messages"
56
+ )
57
+
58
+ class Config:
59
+ arbitrary_types_allowed = True
60
+ json_encoders: ClassVar[dict[type, object]] = {
61
+ datetime: lambda v: v.isoformat(),
62
+ GeneralMemCube: lambda v: f"<GeneralMemCube:{id(v)}>",
63
+ }
64
+
65
+ def to_dict(self) -> dict:
66
+ """Convert model to dictionary suitable for Redis Stream"""
67
+ return {
68
+ "item_id": self.item_id,
69
+ "user_id": self.user_id,
70
+ "cube_id": self.mem_cube_id,
71
+ "message_id": self.message_id,
72
+ "label": self.label,
73
+ "cube": "Not Applicable", # Custom cube serialization
74
+ "content": self.content,
75
+ "timestamp": self.timestamp.isoformat(),
76
+ }
77
+
78
+ @classmethod
79
+ def from_dict(cls, data: dict) -> "ScheduleMessageItem":
80
+ """Create model from Redis Stream dictionary"""
81
+ return cls(
82
+ item_id=data.get("item_id", str(uuid4())),
83
+ user_id=data["user_id"],
84
+ cube_id=data["cube_id"],
85
+ message_id=data.get("message_id", str(uuid4())),
86
+ label=data["label"],
87
+ cube="Not Applicable", # Custom cube deserialization
88
+ content=data["content"],
89
+ timestamp=datetime.fromisoformat(data["timestamp"]),
90
+ )
91
+
92
+
93
+ class MemorySizes(TypedDict):
94
+ long_term_memory_size: int
95
+ user_memory_size: int
96
+ working_memory_size: int
97
+ transformed_act_memory_size: int
98
+
99
+
100
+ class MemoryCapacities(TypedDict):
101
+ long_term_memory_capacity: int
102
+ user_memory_capacity: int
103
+ working_memory_capacity: int
104
+ transformed_act_memory_capacity: int
105
+
106
+
107
+ DEFAULT_MEMORY_SIZES = {
108
+ "long_term_memory_size": NOT_INITIALIZED,
109
+ "user_memory_size": NOT_INITIALIZED,
110
+ "working_memory_size": NOT_INITIALIZED,
111
+ "transformed_act_memory_size": NOT_INITIALIZED,
112
+ "parameter_memory_size": NOT_INITIALIZED,
113
+ }
114
+
115
+ DEFAULT_MEMORY_CAPACITIES = {
116
+ "long_term_memory_capacity": 10000,
117
+ "user_memory_capacity": 10000,
118
+ "working_memory_capacity": 20,
119
+ "transformed_act_memory_capacity": NOT_INITIALIZED,
120
+ "parameter_memory_capacity": NOT_INITIALIZED,
121
+ }
122
+
123
+
124
+ class ScheduleLogForWebItem(BaseModel, DictConversionMixin):
125
+ item_id: str = Field(
126
+ description="Unique identifier for the log entry", default_factory=lambda: str(uuid4())
127
+ )
128
+ user_id: str = Field(..., description="Identifier for the user associated with the log")
129
+ mem_cube_id: str = Field(
130
+ ..., description="Identifier for the memcube associated with this log entry"
131
+ )
132
+ label: str = Field(..., description="Label categorizing the type of log")
133
+ log_title: str = Field(..., description="Title or brief summary of the log content")
134
+ log_content: str = Field(..., description="Detailed content of the log entry")
135
+ current_memory_sizes: MemorySizes = Field(
136
+ default_factory=lambda: dict(DEFAULT_MEMORY_SIZES),
137
+ description="Current utilization of memory partitions",
138
+ )
139
+ memory_capacities: MemoryCapacities = Field(
140
+ default_factory=lambda: dict(DEFAULT_MEMORY_CAPACITIES),
141
+ description="Maximum capacities of memory partitions",
142
+ )
143
+ timestamp: datetime = Field(
144
+ default_factory=datetime.now,
145
+ description="Timestamp indicating when the log entry was created",
146
+ )
@@ -0,0 +1,21 @@
1
+ from typing import Any, ClassVar
2
+
3
+ from memos.configs.mem_scheduler import SchedulerConfigFactory
4
+ from memos.mem_scheduler.base_scheduler import BaseScheduler
5
+ from memos.mem_scheduler.general_scheduler import GeneralScheduler
6
+
7
+
8
+ class SchedulerFactory(BaseScheduler):
9
+ """Factory class for creating scheduler instances."""
10
+
11
+ backend_to_class: ClassVar[dict[str, Any]] = {
12
+ "general_scheduler": GeneralScheduler,
13
+ }
14
+
15
+ @classmethod
16
+ def from_config(cls, config_factory: SchedulerConfigFactory) -> GeneralScheduler:
17
+ backend = config_factory.backend
18
+ if backend not in cls.backend_to_class:
19
+ raise ValueError(f"Invalid backend: {backend}")
20
+ mem_scheduler_class = cls.backend_to_class[backend]
21
+ return mem_scheduler_class(config_factory.config)
@@ -0,0 +1,26 @@
1
+ import json
2
+
3
+ from pathlib import Path
4
+
5
+ import yaml
6
+
7
+
8
+ def extract_json_dict(text: str):
9
+ text = text.strip()
10
+ patterns_to_remove = ["json'''", "latex'''", "'''"]
11
+ for pattern in patterns_to_remove:
12
+ text = text.replace(pattern, "")
13
+ res = json.loads(text)
14
+ return res
15
+
16
+
17
+ def parse_yaml(yaml_file):
18
+ yaml_path = Path(yaml_file)
19
+ yaml_path = Path(yaml_file)
20
+ if not yaml_path.is_file():
21
+ raise FileNotFoundError(f"No such file: {yaml_file}")
22
+
23
+ with yaml_path.open("r", encoding="utf-8") as fr:
24
+ data = yaml.safe_load(fr)
25
+
26
+ return data