MemoryOS 1.0.0__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MemoryOS might be problematic. Click here for more details.

Files changed (94) hide show
  1. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/METADATA +8 -2
  2. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/RECORD +92 -69
  3. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/WHEEL +1 -1
  4. memos/__init__.py +1 -1
  5. memos/api/client.py +109 -0
  6. memos/api/config.py +35 -8
  7. memos/api/context/dependencies.py +15 -66
  8. memos/api/middleware/request_context.py +63 -0
  9. memos/api/product_api.py +5 -2
  10. memos/api/product_models.py +107 -16
  11. memos/api/routers/product_router.py +62 -19
  12. memos/api/start_api.py +13 -0
  13. memos/configs/graph_db.py +4 -0
  14. memos/configs/mem_scheduler.py +38 -3
  15. memos/configs/memory.py +13 -0
  16. memos/configs/reranker.py +18 -0
  17. memos/context/context.py +255 -0
  18. memos/embedders/factory.py +2 -0
  19. memos/graph_dbs/base.py +4 -2
  20. memos/graph_dbs/nebular.py +368 -223
  21. memos/graph_dbs/neo4j.py +49 -13
  22. memos/graph_dbs/neo4j_community.py +13 -3
  23. memos/llms/factory.py +2 -0
  24. memos/llms/openai.py +74 -2
  25. memos/llms/vllm.py +2 -0
  26. memos/log.py +128 -4
  27. memos/mem_cube/general.py +3 -1
  28. memos/mem_os/core.py +89 -23
  29. memos/mem_os/main.py +3 -6
  30. memos/mem_os/product.py +418 -154
  31. memos/mem_os/utils/reference_utils.py +20 -0
  32. memos/mem_reader/factory.py +2 -0
  33. memos/mem_reader/simple_struct.py +204 -82
  34. memos/mem_scheduler/analyzer/__init__.py +0 -0
  35. memos/mem_scheduler/analyzer/mos_for_test_scheduler.py +569 -0
  36. memos/mem_scheduler/analyzer/scheduler_for_eval.py +280 -0
  37. memos/mem_scheduler/base_scheduler.py +126 -56
  38. memos/mem_scheduler/general_modules/dispatcher.py +2 -2
  39. memos/mem_scheduler/general_modules/misc.py +99 -1
  40. memos/mem_scheduler/general_modules/scheduler_logger.py +17 -11
  41. memos/mem_scheduler/general_scheduler.py +40 -88
  42. memos/mem_scheduler/memory_manage_modules/__init__.py +5 -0
  43. memos/mem_scheduler/memory_manage_modules/memory_filter.py +308 -0
  44. memos/mem_scheduler/{general_modules → memory_manage_modules}/retriever.py +34 -7
  45. memos/mem_scheduler/monitors/dispatcher_monitor.py +9 -8
  46. memos/mem_scheduler/monitors/general_monitor.py +119 -39
  47. memos/mem_scheduler/optimized_scheduler.py +124 -0
  48. memos/mem_scheduler/orm_modules/__init__.py +0 -0
  49. memos/mem_scheduler/orm_modules/base_model.py +635 -0
  50. memos/mem_scheduler/orm_modules/monitor_models.py +261 -0
  51. memos/mem_scheduler/scheduler_factory.py +2 -0
  52. memos/mem_scheduler/schemas/monitor_schemas.py +96 -29
  53. memos/mem_scheduler/utils/config_utils.py +100 -0
  54. memos/mem_scheduler/utils/db_utils.py +33 -0
  55. memos/mem_scheduler/utils/filter_utils.py +1 -1
  56. memos/mem_scheduler/webservice_modules/__init__.py +0 -0
  57. memos/mem_user/mysql_user_manager.py +4 -2
  58. memos/memories/activation/kv.py +2 -1
  59. memos/memories/textual/item.py +96 -17
  60. memos/memories/textual/naive.py +1 -1
  61. memos/memories/textual/tree.py +57 -3
  62. memos/memories/textual/tree_text_memory/organize/handler.py +4 -2
  63. memos/memories/textual/tree_text_memory/organize/manager.py +28 -14
  64. memos/memories/textual/tree_text_memory/organize/relation_reason_detector.py +1 -2
  65. memos/memories/textual/tree_text_memory/organize/reorganizer.py +75 -23
  66. memos/memories/textual/tree_text_memory/retrieve/bochasearch.py +10 -6
  67. memos/memories/textual/tree_text_memory/retrieve/internet_retriever.py +6 -2
  68. memos/memories/textual/tree_text_memory/retrieve/internet_retriever_factory.py +2 -0
  69. memos/memories/textual/tree_text_memory/retrieve/recall.py +119 -21
  70. memos/memories/textual/tree_text_memory/retrieve/searcher.py +172 -44
  71. memos/memories/textual/tree_text_memory/retrieve/utils.py +6 -4
  72. memos/memories/textual/tree_text_memory/retrieve/xinyusearch.py +5 -4
  73. memos/memos_tools/notification_utils.py +46 -0
  74. memos/memos_tools/singleton.py +174 -0
  75. memos/memos_tools/thread_safe_dict.py +22 -0
  76. memos/memos_tools/thread_safe_dict_segment.py +382 -0
  77. memos/parsers/factory.py +2 -0
  78. memos/reranker/__init__.py +4 -0
  79. memos/reranker/base.py +24 -0
  80. memos/reranker/concat.py +59 -0
  81. memos/reranker/cosine_local.py +96 -0
  82. memos/reranker/factory.py +48 -0
  83. memos/reranker/http_bge.py +312 -0
  84. memos/reranker/noop.py +16 -0
  85. memos/templates/mem_reader_prompts.py +289 -40
  86. memos/templates/mem_scheduler_prompts.py +242 -0
  87. memos/templates/mos_prompts.py +133 -60
  88. memos/types.py +4 -1
  89. memos/api/context/context.py +0 -147
  90. memos/mem_scheduler/mos_for_test_scheduler.py +0 -146
  91. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/entry_points.txt +0 -0
  92. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info/licenses}/LICENSE +0 -0
  93. /memos/mem_scheduler/{general_modules → webservice_modules}/rabbitmq_service.py +0 -0
  94. /memos/mem_scheduler/{general_modules → webservice_modules}/redis_service.py +0 -0
@@ -1,13 +1,48 @@
1
1
  """Defines memory item types for textual memory."""
2
2
 
3
+ import json
3
4
  import uuid
4
5
 
5
6
  from datetime import datetime
6
- from typing import Literal
7
+ from typing import Any, Literal
7
8
 
8
9
  from pydantic import BaseModel, ConfigDict, Field, field_validator
9
10
 
10
11
 
12
+ ALLOWED_ROLES = {"user", "assistant", "system"}
13
+
14
+
15
+ class SourceMessage(BaseModel):
16
+ """
17
+ Purpose: **memory provenance / traceability**.
18
+
19
+ Capture the minimal, reproducible origin context of a memory item so it can be
20
+ audited, traced, rolled back, or de-duplicated later.
21
+
22
+ Fields & conventions:
23
+ - type: Source kind (e.g., "chat", "doc", "web", "file", "system", ...).
24
+ If not provided, upstream logic may infer it:
25
+ presence of `role` ⇒ "chat"; otherwise ⇒ "doc".
26
+ - role: Conversation role ("user" | "assistant" | "system") when the
27
+ source is a chat turn.
28
+ - content: Minimal reproducible snippet from the source. If omitted,
29
+ upstream may fall back to `doc_path` / `url` / `message_id`.
30
+ - chat_time / message_id / doc_path: Locators for precisely pointing back
31
+ to the original record (timestamp, message id, document path).
32
+ - Extra fields: Allowed (`model_config.extra="allow"`) to carry arbitrary
33
+ provenance attributes (e.g., url, page, offset, span, local_confidence).
34
+ """
35
+
36
+ type: str | None = "chat"
37
+ role: Literal["user", "assistant", "system"] | None = None
38
+ chat_time: str | None = None
39
+ message_id: str | None = None
40
+ content: str | None = None
41
+ doc_path: str | None = None
42
+
43
+ model_config = ConfigDict(extra="allow")
44
+
45
+
11
46
  class TextualMemoryMetadata(BaseModel):
12
47
  """Metadata for a memory item.
13
48
 
@@ -33,7 +68,7 @@ class TextualMemoryMetadata(BaseModel):
33
68
  default=None,
34
69
  description="A numeric score (float between 0 and 100) indicating how certain you are about the accuracy or reliability of the memory.",
35
70
  )
36
- source: Literal["conversation", "retrieved", "web", "file"] | None = Field(
71
+ source: Literal["conversation", "retrieved", "web", "file", "system"] | None = Field(
37
72
  default=None, description="The origin of the memory"
38
73
  )
39
74
  tags: list[str] | None = Field(
@@ -62,7 +97,7 @@ class TreeNodeTextualMemoryMetadata(TextualMemoryMetadata):
62
97
  memory_type: Literal["WorkingMemory", "LongTermMemory", "UserMemory", "OuterMemory"] = Field(
63
98
  default="WorkingMemory", description="Memory lifecycle type."
64
99
  )
65
- sources: list[str] | None = Field(
100
+ sources: list[SourceMessage] | None = Field(
66
101
  default=None, description="Multiple origins of the memory (e.g., URLs, notes)."
67
102
  )
68
103
  embedding: list[float] | None = Field(
@@ -74,8 +109,8 @@ class TreeNodeTextualMemoryMetadata(TextualMemoryMetadata):
74
109
  description="The timestamp of the first creation to the memory. Useful "
75
110
  "for tracking memory initialization. Format: ISO 8601.",
76
111
  )
77
- usage: list[str] | None = Field(
78
- default=[],
112
+ usage: list[str] = Field(
113
+ default_factory=list,
79
114
  description="Usage history of this node",
80
115
  )
81
116
  background: str | None = Field(
@@ -83,12 +118,40 @@ class TreeNodeTextualMemoryMetadata(TextualMemoryMetadata):
83
118
  description="background of this node",
84
119
  )
85
120
 
86
- @field_validator("sources")
121
+ @field_validator("sources", mode="before")
87
122
  @classmethod
88
- def validate_sources(cls, v):
89
- if v is not None and not isinstance(v, list):
90
- raise ValueError("Sources must be a list of strings.")
91
- return v
123
+ def coerce_sources(cls, v):
124
+ if v is None:
125
+ return v
126
+ if not isinstance(v, list):
127
+ raise TypeError("sources must be a list")
128
+ out = []
129
+ for item in v:
130
+ if isinstance(item, SourceMessage):
131
+ out.append(item)
132
+
133
+ elif isinstance(item, dict):
134
+ d = dict(item)
135
+ if d.get("type") is None:
136
+ d["type"] = "chat" if d.get("role") in ALLOWED_ROLES else "doc"
137
+ out.append(SourceMessage(**d))
138
+
139
+ elif isinstance(item, str):
140
+ try:
141
+ parsed = json.loads(item)
142
+ except Exception:
143
+ parsed = None
144
+
145
+ if isinstance(parsed, dict):
146
+ if parsed.get("type") is None:
147
+ parsed["type"] = "chat" if parsed.get("role") in ALLOWED_ROLES else "doc"
148
+ out.append(SourceMessage(**parsed))
149
+ else:
150
+ out.append(SourceMessage(type="doc", content=item))
151
+
152
+ else:
153
+ out.append(SourceMessage(type="doc", content=str(item)))
154
+ return out
92
155
 
93
156
  def __str__(self) -> str:
94
157
  """Pretty string representation of the metadata."""
@@ -114,19 +177,17 @@ class TextualMemoryItem(BaseModel):
114
177
  id: str = Field(default_factory=lambda: str(uuid.uuid4()))
115
178
  memory: str
116
179
  metadata: (
117
- TextualMemoryMetadata
180
+ SearchedTreeNodeTextualMemoryMetadata
118
181
  | TreeNodeTextualMemoryMetadata
119
- | SearchedTreeNodeTextualMemoryMetadata
182
+ | TextualMemoryMetadata
120
183
  ) = Field(default_factory=TextualMemoryMetadata)
121
184
 
122
185
  model_config = ConfigDict(extra="forbid")
123
186
 
187
+ @field_validator("id")
124
188
  @classmethod
125
- def validate_id(cls, v):
126
- try:
127
- uuid.UUID(v)
128
- except ValueError as e:
129
- raise ValueError("Invalid UUID format") from e
189
+ def _validate_id(cls, v: str) -> str:
190
+ uuid.UUID(v)
130
191
  return v
131
192
 
132
193
  @classmethod
@@ -136,6 +197,24 @@ class TextualMemoryItem(BaseModel):
136
197
  def to_dict(self) -> dict:
137
198
  return self.model_dump(exclude_none=True)
138
199
 
200
+ @field_validator("metadata", mode="before")
201
+ @classmethod
202
+ def _coerce_metadata(cls, v: Any):
203
+ if isinstance(
204
+ v,
205
+ SearchedTreeNodeTextualMemoryMetadata
206
+ | TreeNodeTextualMemoryMetadata
207
+ | TextualMemoryMetadata,
208
+ ):
209
+ return v
210
+ if isinstance(v, dict):
211
+ if v.get("relativity") is not None:
212
+ return SearchedTreeNodeTextualMemoryMetadata(**v)
213
+ if any(k in v for k in ("sources", "memory_type", "embedding", "background", "usage")):
214
+ return TreeNodeTextualMemoryMetadata(**v)
215
+ return TextualMemoryMetadata(**v)
216
+ return v
217
+
139
218
  def __str__(self) -> str:
140
219
  """Pretty string representation of the memory item."""
141
220
  return f"<ID: {self.id} | Memory: {self.memory} | Metadata: {self.metadata!s}>"
@@ -115,7 +115,7 @@ class NaiveTextMemory(BaseTextMemory):
115
115
  self.memories[i] = memory_dict
116
116
  break
117
117
 
118
- def search(self, query: str, top_k: int) -> list[TextualMemoryItem]:
118
+ def search(self, query: str, top_k: int, **kwargs) -> list[TextualMemoryItem]:
119
119
  """Search for memories based on a query."""
120
120
  sims = [
121
121
  (memory, len(set(query.split()) & set(memory["memory"].split())))
@@ -2,12 +2,14 @@ import json
2
2
  import os
3
3
  import shutil
4
4
  import tempfile
5
+ import time
5
6
 
6
7
  from datetime import datetime
7
8
  from pathlib import Path
8
9
  from typing import Any
9
10
 
10
11
  from memos.configs.memory import TreeTextMemoryConfig
12
+ from memos.configs.reranker import RerankerConfigFactory
11
13
  from memos.embedders.factory import EmbedderFactory, OllamaEmbedder
12
14
  from memos.graph_dbs.factory import GraphStoreFactory, Neo4jGraphDB
13
15
  from memos.llms.factory import AzureLLM, LLMFactory, OllamaLLM, OpenAILLM
@@ -19,6 +21,7 @@ from memos.memories.textual.tree_text_memory.retrieve.internet_retriever_factory
19
21
  InternetRetrieverFactory,
20
22
  )
21
23
  from memos.memories.textual.tree_text_memory.retrieve.searcher import Searcher
24
+ from memos.reranker.factory import RerankerFactory
22
25
  from memos.types import MessageList
23
26
 
24
27
 
@@ -30,21 +33,59 @@ class TreeTextMemory(BaseTextMemory):
30
33
 
31
34
  def __init__(self, config: TreeTextMemoryConfig):
32
35
  """Initialize memory with the given configuration."""
36
+ time_start = time.time()
33
37
  self.config: TreeTextMemoryConfig = config
34
38
  self.extractor_llm: OpenAILLM | OllamaLLM | AzureLLM = LLMFactory.from_config(
35
39
  config.extractor_llm
36
40
  )
41
+ logger.info(f"time init: extractor_llm time is: {time.time() - time_start}")
42
+
43
+ time_start_ex = time.time()
37
44
  self.dispatcher_llm: OpenAILLM | OllamaLLM | AzureLLM = LLMFactory.from_config(
38
45
  config.dispatcher_llm
39
46
  )
47
+ logger.info(f"time init: dispatcher_llm time is: {time.time() - time_start_ex}")
48
+
49
+ time_start_em = time.time()
40
50
  self.embedder: OllamaEmbedder = EmbedderFactory.from_config(config.embedder)
51
+ logger.info(f"time init: embedder time is: {time.time() - time_start_em}")
52
+
53
+ time_start_gs = time.time()
41
54
  self.graph_store: Neo4jGraphDB = GraphStoreFactory.from_config(config.graph_db)
55
+ logger.info(f"time init: graph_store time is: {time.time() - time_start_gs}")
56
+
57
+ time_start_rr = time.time()
58
+ if config.reranker is None:
59
+ default_cfg = RerankerConfigFactory.model_validate(
60
+ {
61
+ "backend": "cosine_local",
62
+ "config": {
63
+ "level_weights": {"topic": 1.0, "concept": 1.0, "fact": 1.0},
64
+ "level_field": "background",
65
+ },
66
+ }
67
+ )
68
+ self.reranker = RerankerFactory.from_config(default_cfg)
69
+ else:
70
+ self.reranker = RerankerFactory.from_config(config.reranker)
71
+ logger.info(f"time init: reranker time is: {time.time() - time_start_rr}")
42
72
  self.is_reorganize = config.reorganize
43
73
 
74
+ time_start_mm = time.time()
44
75
  self.memory_manager: MemoryManager = MemoryManager(
45
- self.graph_store, self.embedder, self.extractor_llm, is_reorganize=self.is_reorganize
76
+ self.graph_store,
77
+ self.embedder,
78
+ self.extractor_llm,
79
+ memory_size=config.memory_size
80
+ or {
81
+ "WorkingMemory": 20,
82
+ "LongTermMemory": 1500,
83
+ "UserMemory": 480,
84
+ },
85
+ is_reorganize=self.is_reorganize,
46
86
  )
47
-
87
+ logger.info(f"time init: memory_manager time is: {time.time() - time_start_mm}")
88
+ time_start_ir = time.time()
48
89
  # Create internet retriever if configured
49
90
  self.internet_retriever = None
50
91
  if config.internet_retriever is not None:
@@ -56,6 +97,7 @@ class TreeTextMemory(BaseTextMemory):
56
97
  )
57
98
  else:
58
99
  logger.info("No internet retriever configured")
100
+ logger.info(f"time init: internet_retriever time is: {time.time() - time_start_ir}")
59
101
 
60
102
  def add(self, memories: list[TextualMemoryItem | dict[str, Any]]) -> list[str]:
61
103
  """Add memories.
@@ -96,6 +138,8 @@ class TreeTextMemory(BaseTextMemory):
96
138
  mode: str = "fast",
97
139
  memory_type: str = "All",
98
140
  manual_close_internet: bool = False,
141
+ moscube: bool = False,
142
+ search_filter: dict | None = None,
99
143
  ) -> list[TextualMemoryItem]:
100
144
  """Search for memories based on a query.
101
145
  User query -> TaskGoalParser -> MemoryPathResolver ->
@@ -110,6 +154,12 @@ class TreeTextMemory(BaseTextMemory):
110
154
  memory_type (str): Type restriction for search.
111
155
  ['All', 'WorkingMemory', 'LongTermMemory', 'UserMemory']
112
156
  manual_close_internet (bool): If True, the internet retriever will be closed by this search, it high priority than config.
157
+ moscube (bool): whether you use moscube to answer questions
158
+ search_filter (dict, optional): Optional metadata filters for search results.
159
+ - Keys correspond to memory metadata fields (e.g., "user_id", "session_id").
160
+ - Values are exact-match conditions.
161
+ Example: {"user_id": "123", "session_id": "abc"}
162
+ If None, no additional filtering is applied.
113
163
  Returns:
114
164
  list[TextualMemoryItem]: List of matching memories.
115
165
  """
@@ -121,16 +171,20 @@ class TreeTextMemory(BaseTextMemory):
121
171
  self.dispatcher_llm,
122
172
  self.graph_store,
123
173
  self.embedder,
174
+ self.reranker,
124
175
  internet_retriever=None,
176
+ moscube=moscube,
125
177
  )
126
178
  else:
127
179
  searcher = Searcher(
128
180
  self.dispatcher_llm,
129
181
  self.graph_store,
130
182
  self.embedder,
183
+ self.reranker,
131
184
  internet_retriever=self.internet_retriever,
185
+ moscube=moscube,
132
186
  )
133
- return searcher.search(query, top_k, info, mode, memory_type)
187
+ return searcher.search(query, top_k, info, mode, memory_type, search_filter)
134
188
 
135
189
  def get_relevant_subgraph(
136
190
  self, query: str, top_k: int = 5, depth: int = 2, center_status: str = "activated"
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  import re
3
+
3
4
  from datetime import datetime
4
5
 
5
6
  from dateutil import parser
@@ -14,6 +15,7 @@ from memos.templates.tree_reorganize_prompts import (
14
15
  MEMORY_RELATION_RESOLVER_PROMPT,
15
16
  )
16
17
 
18
+
17
19
  logger = get_logger(__name__)
18
20
 
19
21
 
@@ -50,12 +52,12 @@ class NodeHandler:
50
52
  ]
51
53
  result = self.llm.generate(prompt).strip()
52
54
  if result == "contradictory":
53
- logger.warning(
55
+ logger.info(
54
56
  f'detected "{memory.memory}" <==CONFLICT==> "{embedding_candidate.memory}"'
55
57
  )
56
58
  detected_relationships.append([memory, embedding_candidate, "contradictory"])
57
59
  elif result == "redundant":
58
- logger.warning(
60
+ logger.info(
59
61
  f'detected "{memory.memory}" <==REDUNDANT==> "{embedding_candidate.memory}"'
60
62
  )
61
63
  detected_relationships.append([memory, embedding_candidate, "redundant"])
@@ -1,8 +1,10 @@
1
+ import traceback
1
2
  import uuid
2
3
 
3
- from concurrent.futures import ThreadPoolExecutor, as_completed
4
+ from concurrent.futures import as_completed
4
5
  from datetime import datetime
5
6
 
7
+ from memos.context.context import ContextThreadPoolExecutor
6
8
  from memos.embedders.factory import OllamaEmbedder
7
9
  from memos.graph_dbs.neo4j import Neo4jGraphDB
8
10
  from memos.llms.factory import AzureLLM, OllamaLLM, OpenAILLM
@@ -55,24 +57,35 @@ class MemoryManager:
55
57
  """
56
58
  added_ids: list[str] = []
57
59
 
58
- with ThreadPoolExecutor(max_workers=8) as executor:
60
+ with ContextThreadPoolExecutor(max_workers=8) as executor:
59
61
  futures = {executor.submit(self._process_memory, m): m for m in memories}
60
- for future in as_completed(futures):
62
+ for future in as_completed(futures, timeout=60):
61
63
  try:
62
64
  ids = future.result()
63
65
  added_ids.extend(ids)
64
66
  except Exception as e:
65
67
  logger.exception("Memory processing error: ", exc_info=e)
66
68
 
67
- self.graph_store.remove_oldest_memory(
68
- memory_type="WorkingMemory", keep_latest=self.memory_size["WorkingMemory"]
69
- )
70
- self.graph_store.remove_oldest_memory(
71
- memory_type="LongTermMemory", keep_latest=self.memory_size["LongTermMemory"]
72
- )
73
- self.graph_store.remove_oldest_memory(
74
- memory_type="UserMemory", keep_latest=self.memory_size["UserMemory"]
75
- )
69
+ try:
70
+ self.graph_store.remove_oldest_memory(
71
+ memory_type="WorkingMemory", keep_latest=self.memory_size["WorkingMemory"]
72
+ )
73
+ except Exception:
74
+ logger.warning(f"Remove WorkingMemory error: {traceback.format_exc()}")
75
+
76
+ try:
77
+ self.graph_store.remove_oldest_memory(
78
+ memory_type="LongTermMemory", keep_latest=self.memory_size["LongTermMemory"]
79
+ )
80
+ except Exception:
81
+ logger.warning(f"Remove LongTermMemory error: {traceback.format_exc()}")
82
+
83
+ try:
84
+ self.graph_store.remove_oldest_memory(
85
+ memory_type="UserMemory", keep_latest=self.memory_size["UserMemory"]
86
+ )
87
+ except Exception:
88
+ logger.warning(f"Remove UserMemory error: {traceback.format_exc()}")
76
89
 
77
90
  self._refresh_memory_size()
78
91
  return added_ids
@@ -82,12 +95,12 @@ class MemoryManager:
82
95
  Replace WorkingMemory
83
96
  """
84
97
  working_memory_top_k = memories[: self.memory_size["WorkingMemory"]]
85
- with ThreadPoolExecutor(max_workers=8) as executor:
98
+ with ContextThreadPoolExecutor(max_workers=8) as executor:
86
99
  futures = [
87
100
  executor.submit(self._add_memory_to_db, memory, "WorkingMemory")
88
101
  for memory in working_memory_top_k
89
102
  ]
90
- for future in as_completed(futures):
103
+ for future in as_completed(futures, timeout=60):
91
104
  try:
92
105
  future.result()
93
106
  except Exception as e:
@@ -102,6 +115,7 @@ class MemoryManager:
102
115
  """
103
116
  Return the cached memory type counts.
104
117
  """
118
+ self._refresh_memory_size()
105
119
  return self.current_memory_size
106
120
 
107
121
  def _refresh_memory_size(self) -> None:
@@ -46,7 +46,7 @@ class RelationAndReasoningDetector:
46
46
  "sequence_links": [],
47
47
  "aggregate_nodes": [],
48
48
  }
49
-
49
+ """
50
50
  nearest = self.graph_store.get_neighbors_by_tag(
51
51
  tags=node.metadata.tags,
52
52
  exclude_ids=exclude_ids,
@@ -55,7 +55,6 @@ class RelationAndReasoningDetector:
55
55
  )
56
56
  nearest = [GraphDBNode(**cand_data) for cand_data in nearest]
57
57
 
58
- """
59
58
  # 1) Pairwise relations (including CAUSE/CONDITION/CONFLICT)
60
59
  pairwise = self._detect_pairwise_causal_condition_relations(node, nearest)
61
60
  results["relations"].extend(pairwise["relations"])