MemoryOS 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MemoryOS might be problematic. Click here for more details.

Files changed (114) hide show
  1. {memoryos-0.2.0.dist-info → memoryos-0.2.2.dist-info}/METADATA +67 -26
  2. memoryos-0.2.2.dist-info/RECORD +169 -0
  3. memoryos-0.2.2.dist-info/entry_points.txt +3 -0
  4. memos/__init__.py +1 -1
  5. memos/api/config.py +562 -0
  6. memos/api/context/context.py +147 -0
  7. memos/api/context/dependencies.py +90 -0
  8. memos/api/exceptions.py +28 -0
  9. memos/api/mcp_serve.py +502 -0
  10. memos/api/product_api.py +35 -0
  11. memos/api/product_models.py +163 -0
  12. memos/api/routers/__init__.py +1 -0
  13. memos/api/routers/product_router.py +386 -0
  14. memos/chunkers/sentence_chunker.py +8 -2
  15. memos/cli.py +113 -0
  16. memos/configs/embedder.py +27 -0
  17. memos/configs/graph_db.py +132 -3
  18. memos/configs/internet_retriever.py +6 -0
  19. memos/configs/llm.py +47 -0
  20. memos/configs/mem_cube.py +1 -1
  21. memos/configs/mem_os.py +5 -0
  22. memos/configs/mem_reader.py +9 -0
  23. memos/configs/mem_scheduler.py +107 -7
  24. memos/configs/mem_user.py +58 -0
  25. memos/configs/memory.py +5 -4
  26. memos/dependency.py +52 -0
  27. memos/embedders/ark.py +92 -0
  28. memos/embedders/factory.py +4 -0
  29. memos/embedders/sentence_transformer.py +8 -2
  30. memos/embedders/universal_api.py +32 -0
  31. memos/graph_dbs/base.py +11 -3
  32. memos/graph_dbs/factory.py +4 -0
  33. memos/graph_dbs/nebular.py +1364 -0
  34. memos/graph_dbs/neo4j.py +333 -124
  35. memos/graph_dbs/neo4j_community.py +300 -0
  36. memos/llms/base.py +9 -0
  37. memos/llms/deepseek.py +54 -0
  38. memos/llms/factory.py +10 -1
  39. memos/llms/hf.py +170 -13
  40. memos/llms/hf_singleton.py +114 -0
  41. memos/llms/ollama.py +4 -0
  42. memos/llms/openai.py +67 -1
  43. memos/llms/qwen.py +63 -0
  44. memos/llms/vllm.py +153 -0
  45. memos/log.py +1 -1
  46. memos/mem_cube/general.py +77 -16
  47. memos/mem_cube/utils.py +109 -0
  48. memos/mem_os/core.py +251 -51
  49. memos/mem_os/main.py +94 -12
  50. memos/mem_os/product.py +1220 -43
  51. memos/mem_os/utils/default_config.py +352 -0
  52. memos/mem_os/utils/format_utils.py +1401 -0
  53. memos/mem_reader/simple_struct.py +18 -10
  54. memos/mem_scheduler/base_scheduler.py +441 -40
  55. memos/mem_scheduler/general_scheduler.py +249 -248
  56. memos/mem_scheduler/modules/base.py +14 -5
  57. memos/mem_scheduler/modules/dispatcher.py +67 -4
  58. memos/mem_scheduler/modules/misc.py +104 -0
  59. memos/mem_scheduler/modules/monitor.py +240 -50
  60. memos/mem_scheduler/modules/rabbitmq_service.py +319 -0
  61. memos/mem_scheduler/modules/redis_service.py +32 -22
  62. memos/mem_scheduler/modules/retriever.py +167 -23
  63. memos/mem_scheduler/modules/scheduler_logger.py +255 -0
  64. memos/mem_scheduler/mos_for_test_scheduler.py +140 -0
  65. memos/mem_scheduler/schemas/__init__.py +0 -0
  66. memos/mem_scheduler/schemas/general_schemas.py +43 -0
  67. memos/mem_scheduler/{modules/schemas.py → schemas/message_schemas.py} +63 -61
  68. memos/mem_scheduler/schemas/monitor_schemas.py +329 -0
  69. memos/mem_scheduler/utils/__init__.py +0 -0
  70. memos/mem_scheduler/utils/filter_utils.py +176 -0
  71. memos/mem_scheduler/utils/misc_utils.py +61 -0
  72. memos/mem_user/factory.py +94 -0
  73. memos/mem_user/mysql_persistent_user_manager.py +271 -0
  74. memos/mem_user/mysql_user_manager.py +500 -0
  75. memos/mem_user/persistent_factory.py +96 -0
  76. memos/mem_user/persistent_user_manager.py +260 -0
  77. memos/mem_user/user_manager.py +4 -4
  78. memos/memories/activation/item.py +29 -0
  79. memos/memories/activation/kv.py +10 -3
  80. memos/memories/activation/vllmkv.py +219 -0
  81. memos/memories/factory.py +2 -0
  82. memos/memories/textual/base.py +1 -1
  83. memos/memories/textual/general.py +43 -97
  84. memos/memories/textual/item.py +5 -33
  85. memos/memories/textual/tree.py +22 -12
  86. memos/memories/textual/tree_text_memory/organize/conflict.py +9 -5
  87. memos/memories/textual/tree_text_memory/organize/manager.py +26 -18
  88. memos/memories/textual/tree_text_memory/organize/redundancy.py +25 -44
  89. memos/memories/textual/tree_text_memory/organize/relation_reason_detector.py +50 -48
  90. memos/memories/textual/tree_text_memory/organize/reorganizer.py +81 -56
  91. memos/memories/textual/tree_text_memory/retrieve/internet_retriever.py +6 -3
  92. memos/memories/textual/tree_text_memory/retrieve/internet_retriever_factory.py +2 -0
  93. memos/memories/textual/tree_text_memory/retrieve/recall.py +0 -1
  94. memos/memories/textual/tree_text_memory/retrieve/reranker.py +2 -2
  95. memos/memories/textual/tree_text_memory/retrieve/retrieval_mid_structs.py +2 -0
  96. memos/memories/textual/tree_text_memory/retrieve/searcher.py +52 -28
  97. memos/memories/textual/tree_text_memory/retrieve/task_goal_parser.py +42 -15
  98. memos/memories/textual/tree_text_memory/retrieve/utils.py +11 -7
  99. memos/memories/textual/tree_text_memory/retrieve/xinyusearch.py +62 -58
  100. memos/memos_tools/dinding_report_bot.py +422 -0
  101. memos/memos_tools/notification_service.py +44 -0
  102. memos/memos_tools/notification_utils.py +96 -0
  103. memos/parsers/markitdown.py +8 -2
  104. memos/settings.py +3 -1
  105. memos/templates/mem_reader_prompts.py +66 -23
  106. memos/templates/mem_scheduler_prompts.py +126 -43
  107. memos/templates/mos_prompts.py +87 -0
  108. memos/templates/tree_reorganize_prompts.py +85 -30
  109. memos/vec_dbs/base.py +12 -0
  110. memos/vec_dbs/qdrant.py +46 -20
  111. memoryos-0.2.0.dist-info/RECORD +0 -128
  112. memos/mem_scheduler/utils.py +0 -26
  113. {memoryos-0.2.0.dist-info → memoryos-0.2.2.dist-info}/LICENSE +0 -0
  114. {memoryos-0.2.0.dist-info → memoryos-0.2.2.dist-info}/WHEEL +0 -0
memos/configs/embedder.py CHANGED
@@ -18,6 +18,18 @@ class OllamaEmbedderConfig(BaseEmbedderConfig):
18
18
  api_base: str = Field(default="http://localhost:11434", description="Base URL for Ollama API")
19
19
 
20
20
 
21
+ class ArkEmbedderConfig(BaseEmbedderConfig):
22
+ api_key: str = Field(..., description="Ark API key")
23
+ api_base: str = Field(
24
+ default="https://ark.cn-beijing.volces.com/api/v3/", description="Base URL for Ark API"
25
+ )
26
+ chunk_size: int = Field(default=1, description="Chunk size for Ark API")
27
+ multi_modal: bool = Field(
28
+ default=False,
29
+ description="Whether to use multi-modal embedding (text + image) with Ark",
30
+ )
31
+
32
+
21
33
  class SenTranEmbedderConfig(BaseEmbedderConfig):
22
34
  """Configuration class for Sentence Transformer embeddings."""
23
35
 
@@ -27,6 +39,19 @@ class SenTranEmbedderConfig(BaseEmbedderConfig):
27
39
  )
28
40
 
29
41
 
42
+ class UniversalAPIEmbedderConfig(BaseEmbedderConfig):
43
+ """
44
+ Configuration class for universal API embedding providers, e.g.,
45
+ OpenAI, etc.
46
+ """
47
+
48
+ provider: str = Field(..., description="Provider name, e.g., 'openai'")
49
+ api_key: str = Field(..., description="API key for the embedding provider")
50
+ base_url: str | None = Field(
51
+ default=None, description="Optional base URL for custom or proxied endpoint"
52
+ )
53
+
54
+
30
55
  class EmbedderConfigFactory(BaseConfig):
31
56
  """Factory class for creating embedder configurations."""
32
57
 
@@ -36,6 +61,8 @@ class EmbedderConfigFactory(BaseConfig):
36
61
  backend_to_class: ClassVar[dict[str, Any]] = {
37
62
  "ollama": OllamaEmbedderConfig,
38
63
  "sentence_transformer": SenTranEmbedderConfig,
64
+ "ark": ArkEmbedderConfig,
65
+ "universal_api": UniversalAPIEmbedderConfig,
39
66
  }
40
67
 
41
68
  @field_validator("backend")
memos/configs/graph_db.py CHANGED
@@ -3,25 +3,152 @@ from typing import Any, ClassVar
3
3
  from pydantic import BaseModel, Field, field_validator, model_validator
4
4
 
5
5
  from memos.configs.base import BaseConfig
6
+ from memos.configs.vec_db import VectorDBConfigFactory
6
7
 
7
8
 
8
9
  class BaseGraphDBConfig(BaseConfig):
9
10
  """Base class for all graph database configurations."""
10
11
 
11
- uri: str
12
+ uri: str | list
12
13
  user: str
13
14
  password: str
14
15
 
15
16
 
16
17
  class Neo4jGraphDBConfig(BaseGraphDBConfig):
17
- """Neo4j-specific configuration."""
18
+ """
19
+ Neo4j-specific configuration.
20
+
21
+ This config supports:
22
+ 1) Physical isolation (multi-db) — each user gets a dedicated Neo4j database.
23
+ 2) Logical isolation (single-db) — all users share one or more databases, but each node is tagged with `user_name`.
24
+
25
+ How to use:
26
+ - If `use_multi_db=True`, then `db_name` should usually be the same as `user_name`.
27
+ Each user gets a separate database for physical isolation.
28
+ Example: db_name = "alice", user_name = None or "alice".
29
+
30
+ - If `use_multi_db=False`, then `db_name` is your shared database (e.g., "neo4j" or "shared_db").
31
+ You must provide `user_name` to logically isolate each user's data.
32
+ All nodes and queries must respect this tag.
33
+
34
+ Example configs:
35
+ ---
36
+ # Physical isolation:
37
+ db_name = "alice"
38
+ use_multi_db = True
39
+ user_name = None
40
+
41
+ # Logical isolation:
42
+ db_name = "shared_db_student_group"
43
+ use_multi_db = False
44
+ user_name = "alice"
45
+ """
18
46
 
19
47
  db_name: str = Field(..., description="The name of the target Neo4j database")
20
48
  auto_create: bool = Field(
21
- default=False, description="Whether to create the DB if it doesn't exist"
49
+ default=False,
50
+ description="If True, automatically create the target db_name in multi-db mode if it does not exist.",
51
+ )
52
+
53
+ use_multi_db: bool = Field(
54
+ default=True,
55
+ description=(
56
+ "If True: use Neo4j's multi-database feature for physical isolation; "
57
+ "each user typically gets a separate database. "
58
+ "If False: use a single shared database with logical isolation by user_name."
59
+ ),
60
+ )
61
+
62
+ user_name: str | None = Field(
63
+ default=None,
64
+ description=(
65
+ "Logical user or tenant ID for data isolation. "
66
+ "Required if use_multi_db is False. "
67
+ "All nodes must be tagged with this and all queries must filter by this."
68
+ ),
22
69
  )
70
+
23
71
  embedding_dimension: int = Field(default=768, description="Dimension of vector embedding")
24
72
 
73
+ @model_validator(mode="after")
74
+ def validate_config(self):
75
+ """Validate logical constraints to avoid misconfiguration."""
76
+ if not self.use_multi_db and not self.user_name:
77
+ raise ValueError(
78
+ "In single-database mode (use_multi_db=False), `user_name` must be provided for logical isolation."
79
+ )
80
+ return self
81
+
82
+
83
+ class Neo4jCommunityGraphDBConfig(Neo4jGraphDBConfig):
84
+ """
85
+ Community edition config for Neo4j.
86
+
87
+ Notes:
88
+ - Must set `use_multi_db = False`
89
+ - Must provide `user_name` for logical isolation
90
+ - Embedding vector DB config is required
91
+ """
92
+
93
+ vec_config: VectorDBConfigFactory = Field(
94
+ ..., description="Vector DB config for embedding search"
95
+ )
96
+
97
+ @model_validator(mode="after")
98
+ def validate_community(self):
99
+ if self.use_multi_db:
100
+ raise ValueError("Neo4j Community Edition does not support use_multi_db=True.")
101
+ if not self.user_name:
102
+ raise ValueError("Neo4j Community config requires user_name for logical isolation.")
103
+ return self
104
+
105
+
106
+ class NebulaGraphDBConfig(BaseGraphDBConfig):
107
+ """
108
+ NebulaGraph-specific configuration.
109
+
110
+ Key concepts:
111
+ - `space`: Equivalent to a database or namespace. All tag/edge/schema live within a space.
112
+ - `user_name`: Used for logical tenant isolation if needed.
113
+ - `auto_create`: Whether to automatically create the target space if it does not exist.
114
+
115
+ Example:
116
+ ---
117
+ hosts = ["127.0.0.1:9669"]
118
+ user = "root"
119
+ password = "nebula"
120
+ space = "shared_graph"
121
+ user_name = "alice"
122
+ """
123
+
124
+ space: str = Field(
125
+ ..., description="The name of the target NebulaGraph space (like a database)"
126
+ )
127
+ user_name: str | None = Field(
128
+ default=None,
129
+ description="Logical user or tenant ID for data isolation (optional, used in metadata tagging)",
130
+ )
131
+ auto_create: bool = Field(
132
+ default=False,
133
+ description="Whether to auto-create the space if it does not exist",
134
+ )
135
+ use_multi_db: bool = Field(
136
+ default=True,
137
+ description=(
138
+ "If True: use Neo4j's multi-database feature for physical isolation; "
139
+ "each user typically gets a separate database. "
140
+ "If False: use a single shared database with logical isolation by user_name."
141
+ ),
142
+ )
143
+ embedding_dimension: int = Field(default=3072, description="Dimension of vector embedding")
144
+
145
+ @model_validator(mode="after")
146
+ def validate_config(self):
147
+ """Validate config."""
148
+ if not self.space:
149
+ raise ValueError("`space` must be provided")
150
+ return self
151
+
25
152
 
26
153
  class GraphDBConfigFactory(BaseModel):
27
154
  backend: str = Field(..., description="Backend for graph database")
@@ -29,6 +156,8 @@ class GraphDBConfigFactory(BaseModel):
29
156
 
30
157
  backend_to_class: ClassVar[dict[str, Any]] = {
31
158
  "neo4j": Neo4jGraphDBConfig,
159
+ "neo4j-community": Neo4jCommunityGraphDBConfig,
160
+ "nebular": NebulaGraphDBConfig,
32
161
  }
33
162
 
34
163
  @field_validator("backend")
@@ -6,6 +6,7 @@ from pydantic import Field, field_validator, model_validator
6
6
 
7
7
  from memos.configs.base import BaseConfig
8
8
  from memos.exceptions import ConfigurationError
9
+ from memos.mem_reader.factory import MemReaderConfigFactory
9
10
 
10
11
 
11
12
  class BaseInternetRetrieverConfig(BaseConfig):
@@ -47,6 +48,11 @@ class XinyuSearchConfig(BaseInternetRetrieverConfig):
47
48
  num_per_request: int = Field(
48
49
  default=10, description="Number of results per API request (not used for Xinyu)"
49
50
  )
51
+ reader: MemReaderConfigFactory = Field(
52
+ ...,
53
+ default_factory=MemReaderConfigFactory,
54
+ description="Reader configuration",
55
+ )
50
56
 
51
57
 
52
58
  class InternetRetrieverConfigFactory(BaseConfig):
memos/configs/llm.py CHANGED
@@ -27,6 +27,40 @@ class OpenAILLMConfig(BaseLLMConfig):
27
27
  extra_body: Any = Field(default=None, description="extra body")
28
28
 
29
29
 
30
+ class QwenLLMConfig(BaseLLMConfig):
31
+ api_key: str = Field(..., description="API key for DashScope (Qwen)")
32
+ api_base: str = Field(
33
+ default="https://dashscope-intl.aliyuncs.com/compatible-mode/v1",
34
+ description="Base URL for Qwen OpenAI-compatible API",
35
+ )
36
+ extra_body: Any = Field(default=None, description="extra body")
37
+ model_name_or_path: str = Field(..., description="Model name for Qwen, e.g., 'qwen-plus'")
38
+
39
+
40
+ class DeepSeekLLMConfig(BaseLLMConfig):
41
+ api_key: str = Field(..., description="API key for DeepSeek")
42
+ api_base: str = Field(
43
+ default="https://api.deepseek.com",
44
+ description="Base URL for DeepSeek OpenAI-compatible API",
45
+ )
46
+ extra_body: Any = Field(default=None, description="Extra options for API")
47
+ model_name_or_path: str = Field(
48
+ ..., description="Model name: 'deepseek-chat' or 'deepseek-reasoner'"
49
+ )
50
+
51
+
52
+ class AzureLLMConfig(BaseLLMConfig):
53
+ base_url: str = Field(
54
+ default="https://api.openai.azure.com/",
55
+ description="Base URL for Azure OpenAI API",
56
+ )
57
+ api_version: str = Field(
58
+ default="2024-03-01-preview",
59
+ description="API version for Azure OpenAI",
60
+ )
61
+ api_key: str = Field(..., description="API key for Azure OpenAI")
62
+
63
+
30
64
  class OllamaLLMConfig(BaseLLMConfig):
31
65
  api_base: str = Field(
32
66
  default="http://localhost:11434",
@@ -45,6 +79,14 @@ class HFLLMConfig(BaseLLMConfig):
45
79
  )
46
80
 
47
81
 
82
+ class VLLMLLMConfig(BaseLLMConfig):
83
+ api_key: str = Field(default="", description="API key for vLLM (optional for local server)")
84
+ api_base: str = Field(
85
+ default="http://localhost:8088/v1",
86
+ description="Base URL for vLLM API",
87
+ )
88
+
89
+
48
90
  class LLMConfigFactory(BaseConfig):
49
91
  """Factory class for creating LLM configurations."""
50
92
 
@@ -54,7 +96,12 @@ class LLMConfigFactory(BaseConfig):
54
96
  backend_to_class: ClassVar[dict[str, Any]] = {
55
97
  "openai": OpenAILLMConfig,
56
98
  "ollama": OllamaLLMConfig,
99
+ "azure": AzureLLMConfig,
57
100
  "huggingface": HFLLMConfig,
101
+ "vllm": VLLMLLMConfig,
102
+ "huggingface_singleton": HFLLMConfig, # Add singleton support
103
+ "qwen": QwenLLMConfig,
104
+ "deepseek": DeepSeekLLMConfig,
58
105
  }
59
106
 
60
107
  @field_validator("backend")
memos/configs/mem_cube.py CHANGED
@@ -70,7 +70,7 @@ class GeneralMemCubeConfig(BaseMemCubeConfig):
70
70
  @classmethod
71
71
  def validate_act_mem(cls, act_mem: MemoryConfigFactory) -> MemoryConfigFactory:
72
72
  """Validate the act_mem field."""
73
- allowed_backends = ["kv_cache", "uninitialized"]
73
+ allowed_backends = ["kv_cache", "vllm_kv_cache", "uninitialized"]
74
74
  if act_mem.backend not in allowed_backends:
75
75
  raise ConfigurationError(
76
76
  f"GeneralMemCubeConfig requires act_mem backend to be one of {allowed_backends}, got '{act_mem.backend}'"
memos/configs/mem_os.py CHANGED
@@ -8,6 +8,7 @@ from memos.configs.base import BaseConfig
8
8
  from memos.configs.llm import LLMConfigFactory
9
9
  from memos.configs.mem_reader import MemReaderConfigFactory
10
10
  from memos.configs.mem_scheduler import SchedulerConfigFactory
11
+ from memos.configs.mem_user import UserManagerConfigFactory
11
12
 
12
13
 
13
14
  class MOSConfig(BaseConfig):
@@ -33,6 +34,10 @@ class MOSConfig(BaseConfig):
33
34
  default=None,
34
35
  description="Memory scheduler configuration for managing memory operations",
35
36
  )
37
+ user_manager: UserManagerConfigFactory = Field(
38
+ default_factory=lambda: UserManagerConfigFactory(backend="sqlite", config={}),
39
+ description="User manager configuration for database operations",
40
+ )
36
41
  max_turns_window: int = Field(
37
42
  default=15,
38
43
  description="Maximum number of turns to keep in the conversation history",
@@ -15,6 +15,15 @@ class BaseMemReaderConfig(BaseConfig):
15
15
  created_at: datetime = Field(
16
16
  default_factory=datetime.now, description="Creation timestamp for the MemReader"
17
17
  )
18
+
19
+ @field_validator("created_at", mode="before")
20
+ @classmethod
21
+ def parse_datetime(cls, value):
22
+ """Parse datetime from string if needed."""
23
+ if isinstance(value, str):
24
+ return datetime.fromisoformat(value.replace("Z", "+00:00"))
25
+ return value
26
+
18
27
  llm: LLMConfigFactory = Field(..., description="LLM configuration for the MemReader")
19
28
  embedder: EmbedderConfigFactory = Field(
20
29
  ..., description="Embedder configuration for the MemReader"
@@ -1,11 +1,15 @@
1
+ import os
2
+
3
+ from pathlib import Path
1
4
  from typing import Any, ClassVar
2
5
 
3
6
  from pydantic import ConfigDict, Field, field_validator, model_validator
4
7
 
5
8
  from memos.configs.base import BaseConfig
6
- from memos.mem_scheduler.modules.schemas import (
9
+ from memos.mem_scheduler.modules.misc import DictConversionMixin
10
+ from memos.mem_scheduler.schemas.general_schemas import (
11
+ BASE_DIR,
7
12
  DEFAULT_ACT_MEM_DUMP_PATH,
8
- DEFAULT_ACTIVATION_MEM_SIZE,
9
13
  DEFAULT_CONSUME_INTERVAL_SECONDS,
10
14
  DEFAULT_THREAD__POOL_MAX_WORKERS,
11
15
  )
@@ -17,6 +21,7 @@ class BaseSchedulerConfig(BaseConfig):
17
21
  top_k: int = Field(
18
22
  default=10, description="Number of top candidates to consider in initial retrieval"
19
23
  )
24
+ # TODO: The 'top_n' field is deprecated and will be removed in future versions.
20
25
  top_n: int = Field(default=5, description="Number of final results to return after processing")
21
26
  enable_parallel_dispatch: bool = Field(
22
27
  default=True, description="Whether to enable parallel message processing using thread pool"
@@ -33,6 +38,10 @@ class BaseSchedulerConfig(BaseConfig):
33
38
  le=60,
34
39
  description=f"Interval for consuming messages from queue in seconds (default: {DEFAULT_CONSUME_INTERVAL_SECONDS})",
35
40
  )
41
+ auth_config_path: str | None = Field(
42
+ default=None,
43
+ description="Path to the authentication configuration file containing private credentials",
44
+ )
36
45
 
37
46
 
38
47
  class GeneralSchedulerConfig(BaseSchedulerConfig):
@@ -40,16 +49,15 @@ class GeneralSchedulerConfig(BaseSchedulerConfig):
40
49
  default=300, description="Interval in seconds for updating activation memory"
41
50
  )
42
51
  context_window_size: int | None = Field(
43
- default=5, description="Size of the context window for conversation history"
44
- )
45
- activation_mem_size: int | None = Field(
46
- default=DEFAULT_ACTIVATION_MEM_SIZE, # Assuming DEFAULT_ACTIVATION_MEM_SIZE is 1000
47
- description="Maximum size of the activation memory",
52
+ default=10, description="Size of the context window for conversation history"
48
53
  )
49
54
  act_mem_dump_path: str | None = Field(
50
55
  default=DEFAULT_ACT_MEM_DUMP_PATH, # Replace with DEFAULT_ACT_MEM_DUMP_PATH
51
56
  description="File path for dumping activation memory",
52
57
  )
58
+ enable_act_memory_update: bool = Field(
59
+ default=False, description="Whether to enable automatic activation memory updates"
60
+ )
53
61
 
54
62
 
55
63
  class SchedulerConfigFactory(BaseConfig):
@@ -76,3 +84,95 @@ class SchedulerConfigFactory(BaseConfig):
76
84
  config_class = self.backend_to_class[self.backend]
77
85
  self.config = config_class(**self.config)
78
86
  return self
87
+
88
+
89
+ # ************************* Auth *************************
90
+ class RabbitMQConfig(
91
+ BaseConfig,
92
+ ):
93
+ host_name: str = Field(default="", description="Endpoint for RabbitMQ instance access")
94
+ user_name: str = Field(default="", description="Static username for RabbitMQ instance")
95
+ password: str = Field(default="", description="Password for the static username")
96
+ virtual_host: str = Field(default="", description="Vhost name for RabbitMQ instance")
97
+ erase_on_connect: bool = Field(
98
+ default=True, description="Whether to clear connection state or buffers upon connecting"
99
+ )
100
+ port: int = Field(
101
+ default=5672,
102
+ description="Port number for RabbitMQ instance access",
103
+ ge=1, # Port must be >= 1
104
+ le=65535, # Port must be <= 65535
105
+ )
106
+
107
+
108
+ class GraphDBAuthConfig(BaseConfig):
109
+ uri: str = Field(
110
+ default="bolt://localhost:7687",
111
+ description="URI for graph database access (e.g., bolt://host:port)",
112
+ )
113
+ user: str = Field(default="neo4j", description="Username for graph database authentication")
114
+ password: str = Field(
115
+ default="",
116
+ description="Password for graph database authentication",
117
+ min_length=8, # 建议密码最小长度
118
+ )
119
+ db_name: str = Field(default="neo4j", description="Database name to connect to")
120
+ auto_create: bool = Field(
121
+ default=True, description="Whether to automatically create the database if it doesn't exist"
122
+ )
123
+
124
+
125
+ class OpenAIConfig(BaseConfig):
126
+ api_key: str = Field(default="", description="API key for OpenAI service")
127
+ base_url: str = Field(default="", description="Base URL for API endpoint")
128
+ default_model: str = Field(default="", description="Default model to use")
129
+
130
+
131
+ class AuthConfig(BaseConfig, DictConversionMixin):
132
+ rabbitmq: RabbitMQConfig
133
+ openai: OpenAIConfig
134
+ graph_db: GraphDBAuthConfig
135
+ default_config_path: ClassVar[str] = (
136
+ f"{BASE_DIR}/examples/data/config/mem_scheduler/scheduler_auth.yaml"
137
+ )
138
+
139
+ @classmethod
140
+ def from_local_yaml(cls, config_path: str | None = None) -> "AuthConfig":
141
+ """
142
+ Load configuration from YAML file
143
+
144
+ Args:
145
+ config_path: Path to YAML configuration file
146
+
147
+ Returns:
148
+ AuthConfig instance
149
+
150
+ Raises:
151
+ FileNotFoundError: If config file doesn't exist
152
+ ValueError: If YAML parsing or validation fails
153
+ """
154
+
155
+ if config_path is None:
156
+ config_path = cls.default_config_path
157
+
158
+ # Check file exists
159
+ if not Path(config_path).exists():
160
+ raise FileNotFoundError(f"Config file not found: {config_path}")
161
+
162
+ return cls.from_yaml_file(yaml_path=config_path)
163
+
164
+ def set_openai_config_to_environment(self):
165
+ # Set environment variables
166
+ os.environ["OPENAI_API_KEY"] = self.openai.api_key
167
+ os.environ["OPENAI_BASE_URL"] = self.openai.base_url
168
+ os.environ["MODEL"] = self.openai.default_model
169
+
170
+ @classmethod
171
+ def default_config_exists(cls) -> bool:
172
+ """
173
+ Check if the default configuration file exists.
174
+
175
+ Returns:
176
+ bool: True if the default config file exists, False otherwise
177
+ """
178
+ return Path(cls.default_config_path).exists()
@@ -0,0 +1,58 @@
1
+ from typing import Any, ClassVar
2
+
3
+ from pydantic import BaseModel, Field, field_validator, model_validator
4
+
5
+ from memos.configs.base import BaseConfig
6
+
7
+
8
+ class BaseUserManagerConfig(BaseConfig):
9
+ """Base configuration class for user managers."""
10
+
11
+ user_id: str = Field(default="root", description="Default user ID for initialization")
12
+
13
+
14
+ class SQLiteUserManagerConfig(BaseUserManagerConfig):
15
+ """SQLite user manager configuration."""
16
+
17
+ db_path: str | None = Field(
18
+ default=None,
19
+ description="Path to SQLite database file. If None, uses default path in MEMOS_DIR",
20
+ )
21
+
22
+
23
+ class MySQLUserManagerConfig(BaseUserManagerConfig):
24
+ """MySQL user manager configuration."""
25
+
26
+ host: str = Field(default="localhost", description="MySQL server host")
27
+ port: int = Field(default=3306, description="MySQL server port")
28
+ username: str = Field(default="root", description="MySQL username")
29
+ password: str = Field(default="", description="MySQL password")
30
+ database: str = Field(default="memos_users", description="MySQL database name")
31
+ charset: str = Field(default="utf8mb4", description="MySQL charset")
32
+
33
+
34
+ class UserManagerConfigFactory(BaseModel):
35
+ """Factory for user manager configurations."""
36
+
37
+ backend: str = Field(default="sqlite", description="Backend for user manager")
38
+ config: dict[str, Any] = Field(
39
+ default_factory=dict, description="Configuration for the user manager backend"
40
+ )
41
+
42
+ backend_to_class: ClassVar[dict[str, Any]] = {
43
+ "sqlite": SQLiteUserManagerConfig,
44
+ "mysql": MySQLUserManagerConfig,
45
+ }
46
+
47
+ @field_validator("backend")
48
+ @classmethod
49
+ def validate_backend(cls, backend: str) -> str:
50
+ if backend not in cls.backend_to_class:
51
+ raise ValueError(f"Unsupported user manager backend: {backend}")
52
+ return backend
53
+
54
+ @model_validator(mode="after")
55
+ def instantiate_config(self):
56
+ config_class = self.backend_to_class[self.backend]
57
+ self.config = config_class(**self.config)
58
+ return self
memos/configs/memory.py CHANGED
@@ -52,9 +52,9 @@ class KVCacheMemoryConfig(BaseActMemoryConfig):
52
52
  @classmethod
53
53
  def validate_extractor_llm(cls, extractor_llm: LLMConfigFactory) -> LLMConfigFactory:
54
54
  """Validate the extractor_llm field."""
55
- if extractor_llm.backend != "huggingface":
55
+ if extractor_llm.backend not in ["huggingface", "huggingface_singleton", "vllm"]:
56
56
  raise ConfigurationError(
57
- f"KVCacheMemoryConfig requires extractor_llm backend to be 'huggingface', got '{extractor_llm.backend}'"
57
+ f"KVCacheMemoryConfig requires extractor_llm backend to be 'huggingface' or 'huggingface_singleton', got '{extractor_llm.backend}'"
58
58
  )
59
59
  return extractor_llm
60
60
 
@@ -84,9 +84,9 @@ class LoRAMemoryConfig(BaseParaMemoryConfig):
84
84
  @classmethod
85
85
  def validate_extractor_llm(cls, extractor_llm: LLMConfigFactory) -> LLMConfigFactory:
86
86
  """Validate the extractor_llm field."""
87
- if extractor_llm.backend not in ["huggingface"]:
87
+ if extractor_llm.backend not in ["huggingface", "huggingface_singleton"]:
88
88
  raise ConfigurationError(
89
- f"LoRAMemoryConfig requires extractor_llm backend to be 'huggingface', got '{extractor_llm.backend}'"
89
+ f"LoRAMemoryConfig requires extractor_llm backend to be 'huggingface' or 'huggingface_singleton', got '{extractor_llm.backend}'"
90
90
  )
91
91
  return extractor_llm
92
92
 
@@ -181,6 +181,7 @@ class MemoryConfigFactory(BaseConfig):
181
181
  "general_text": GeneralTextMemoryConfig,
182
182
  "tree_text": TreeTextMemoryConfig,
183
183
  "kv_cache": KVCacheMemoryConfig,
184
+ "vllm_kv_cache": KVCacheMemoryConfig, # Use same config as kv_cache
184
185
  "lora": LoRAMemoryConfig,
185
186
  "uninitialized": UninitializedMemoryConfig,
186
187
  }
memos/dependency.py ADDED
@@ -0,0 +1,52 @@
1
+ """
2
+ This utility provides tools for managing dependencies in MemOS.
3
+ """
4
+
5
+ import functools
6
+ import importlib
7
+
8
+
9
+ def require_python_package(
10
+ import_name: str, install_command: str | None = None, install_link: str | None = None
11
+ ):
12
+ """Check if a package is available and provide installation hints on import failure.
13
+
14
+ Args:
15
+ import_name (str): The top-level importable module name a package provides.
16
+ install_command (str, optional): Installation command.
17
+ install_link (str, optional): URL link to installation guide.
18
+
19
+ Returns:
20
+ Callable: A decorator function that wraps the target function with package availability check.
21
+
22
+ Raises:
23
+ ImportError: When the specified package is not available, with installation
24
+ instructions included in the error message.
25
+
26
+ Example:
27
+ >>> @require_python_package(
28
+ ... import_name='faiss',
29
+ ... install_command='pip install faiss-cpu',
30
+ ... install_link='https://github.com/facebookresearch/faiss/blob/main/INSTALL.md'
31
+ ... )
32
+ ... def create_faiss_index():
33
+ ... from faiss import IndexFlatL2 # Actual import in function
34
+ ... return IndexFlatL2(128)
35
+ """
36
+
37
+ def decorator(func):
38
+ @functools.wraps(func)
39
+ def wrapper(*args, **kwargs):
40
+ try:
41
+ importlib.import_module(import_name)
42
+ except ImportError:
43
+ error_msg = f"Missing required module - '{import_name}'\n"
44
+ error_msg += f"💡 Install command: {install_command}\n" if install_command else ""
45
+ error_msg += f"💡 Install guide: {install_link}\n" if install_link else ""
46
+
47
+ raise ImportError(error_msg) from None
48
+ return func(*args, **kwargs)
49
+
50
+ return wrapper
51
+
52
+ return decorator