MemoryOS 1.0.0__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MemoryOS might be problematic. Click here for more details.

Files changed (94) hide show
  1. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/METADATA +8 -2
  2. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/RECORD +92 -69
  3. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/WHEEL +1 -1
  4. memos/__init__.py +1 -1
  5. memos/api/client.py +109 -0
  6. memos/api/config.py +35 -8
  7. memos/api/context/dependencies.py +15 -66
  8. memos/api/middleware/request_context.py +63 -0
  9. memos/api/product_api.py +5 -2
  10. memos/api/product_models.py +107 -16
  11. memos/api/routers/product_router.py +62 -19
  12. memos/api/start_api.py +13 -0
  13. memos/configs/graph_db.py +4 -0
  14. memos/configs/mem_scheduler.py +38 -3
  15. memos/configs/memory.py +13 -0
  16. memos/configs/reranker.py +18 -0
  17. memos/context/context.py +255 -0
  18. memos/embedders/factory.py +2 -0
  19. memos/graph_dbs/base.py +4 -2
  20. memos/graph_dbs/nebular.py +368 -223
  21. memos/graph_dbs/neo4j.py +49 -13
  22. memos/graph_dbs/neo4j_community.py +13 -3
  23. memos/llms/factory.py +2 -0
  24. memos/llms/openai.py +74 -2
  25. memos/llms/vllm.py +2 -0
  26. memos/log.py +128 -4
  27. memos/mem_cube/general.py +3 -1
  28. memos/mem_os/core.py +89 -23
  29. memos/mem_os/main.py +3 -6
  30. memos/mem_os/product.py +418 -154
  31. memos/mem_os/utils/reference_utils.py +20 -0
  32. memos/mem_reader/factory.py +2 -0
  33. memos/mem_reader/simple_struct.py +204 -82
  34. memos/mem_scheduler/analyzer/__init__.py +0 -0
  35. memos/mem_scheduler/analyzer/mos_for_test_scheduler.py +569 -0
  36. memos/mem_scheduler/analyzer/scheduler_for_eval.py +280 -0
  37. memos/mem_scheduler/base_scheduler.py +126 -56
  38. memos/mem_scheduler/general_modules/dispatcher.py +2 -2
  39. memos/mem_scheduler/general_modules/misc.py +99 -1
  40. memos/mem_scheduler/general_modules/scheduler_logger.py +17 -11
  41. memos/mem_scheduler/general_scheduler.py +40 -88
  42. memos/mem_scheduler/memory_manage_modules/__init__.py +5 -0
  43. memos/mem_scheduler/memory_manage_modules/memory_filter.py +308 -0
  44. memos/mem_scheduler/{general_modules → memory_manage_modules}/retriever.py +34 -7
  45. memos/mem_scheduler/monitors/dispatcher_monitor.py +9 -8
  46. memos/mem_scheduler/monitors/general_monitor.py +119 -39
  47. memos/mem_scheduler/optimized_scheduler.py +124 -0
  48. memos/mem_scheduler/orm_modules/__init__.py +0 -0
  49. memos/mem_scheduler/orm_modules/base_model.py +635 -0
  50. memos/mem_scheduler/orm_modules/monitor_models.py +261 -0
  51. memos/mem_scheduler/scheduler_factory.py +2 -0
  52. memos/mem_scheduler/schemas/monitor_schemas.py +96 -29
  53. memos/mem_scheduler/utils/config_utils.py +100 -0
  54. memos/mem_scheduler/utils/db_utils.py +33 -0
  55. memos/mem_scheduler/utils/filter_utils.py +1 -1
  56. memos/mem_scheduler/webservice_modules/__init__.py +0 -0
  57. memos/mem_user/mysql_user_manager.py +4 -2
  58. memos/memories/activation/kv.py +2 -1
  59. memos/memories/textual/item.py +96 -17
  60. memos/memories/textual/naive.py +1 -1
  61. memos/memories/textual/tree.py +57 -3
  62. memos/memories/textual/tree_text_memory/organize/handler.py +4 -2
  63. memos/memories/textual/tree_text_memory/organize/manager.py +28 -14
  64. memos/memories/textual/tree_text_memory/organize/relation_reason_detector.py +1 -2
  65. memos/memories/textual/tree_text_memory/organize/reorganizer.py +75 -23
  66. memos/memories/textual/tree_text_memory/retrieve/bochasearch.py +10 -6
  67. memos/memories/textual/tree_text_memory/retrieve/internet_retriever.py +6 -2
  68. memos/memories/textual/tree_text_memory/retrieve/internet_retriever_factory.py +2 -0
  69. memos/memories/textual/tree_text_memory/retrieve/recall.py +119 -21
  70. memos/memories/textual/tree_text_memory/retrieve/searcher.py +172 -44
  71. memos/memories/textual/tree_text_memory/retrieve/utils.py +6 -4
  72. memos/memories/textual/tree_text_memory/retrieve/xinyusearch.py +5 -4
  73. memos/memos_tools/notification_utils.py +46 -0
  74. memos/memos_tools/singleton.py +174 -0
  75. memos/memos_tools/thread_safe_dict.py +22 -0
  76. memos/memos_tools/thread_safe_dict_segment.py +382 -0
  77. memos/parsers/factory.py +2 -0
  78. memos/reranker/__init__.py +4 -0
  79. memos/reranker/base.py +24 -0
  80. memos/reranker/concat.py +59 -0
  81. memos/reranker/cosine_local.py +96 -0
  82. memos/reranker/factory.py +48 -0
  83. memos/reranker/http_bge.py +312 -0
  84. memos/reranker/noop.py +16 -0
  85. memos/templates/mem_reader_prompts.py +289 -40
  86. memos/templates/mem_scheduler_prompts.py +242 -0
  87. memos/templates/mos_prompts.py +133 -60
  88. memos/types.py +4 -1
  89. memos/api/context/context.py +0 -147
  90. memos/mem_scheduler/mos_for_test_scheduler.py +0 -146
  91. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info}/entry_points.txt +0 -0
  92. {memoryos-1.0.0.dist-info → memoryos-1.1.1.dist-info/licenses}/LICENSE +0 -0
  93. /memos/mem_scheduler/{general_modules → webservice_modules}/rabbitmq_service.py +0 -0
  94. /memos/mem_scheduler/{general_modules → webservice_modules}/redis_service.py +0 -0
@@ -1,9 +1,10 @@
1
1
  import json
2
+ import os
2
3
 
3
4
  from contextlib import suppress
4
5
  from datetime import datetime
5
6
  from queue import Empty, Full, Queue
6
- from typing import TYPE_CHECKING, TypeVar
7
+ from typing import TYPE_CHECKING, Any, Generic, TypeVar
7
8
 
8
9
  from pydantic import field_serializer
9
10
 
@@ -16,6 +17,75 @@ T = TypeVar("T")
16
17
  BaseModelType = TypeVar("T", bound="BaseModel")
17
18
 
18
19
 
20
+ class EnvConfigMixin(Generic[T]):
21
+ """Abstract base class for environment variable configuration."""
22
+
23
+ ENV_PREFIX = "MEMSCHEDULER_"
24
+
25
+ @classmethod
26
+ def get_env_prefix(cls) -> str:
27
+ """Automatically generates environment variable prefix from class name.
28
+
29
+ Converts the class name to uppercase and appends an underscore.
30
+ If the class name ends with 'Config', that suffix is removed first.
31
+
32
+ Examples:
33
+ RabbitMQConfig -> "RABBITMQ_"
34
+ OpenAIConfig -> "OPENAI_"
35
+ GraphDBAuthConfig -> "GRAPH_DB_AUTH_"
36
+ """
37
+ class_name = cls.__name__
38
+ # Remove 'Config' suffix if present
39
+ if class_name.endswith("Config"):
40
+ class_name = class_name[:-6]
41
+ # Convert to uppercase and add trailing underscore
42
+
43
+ return f"{cls.ENV_PREFIX}{class_name.upper()}_"
44
+
45
+ @classmethod
46
+ def from_env(cls: type[T]) -> T:
47
+ """Creates a config instance from environment variables.
48
+
49
+ Reads all environment variables with the class-specific prefix and maps them
50
+ to corresponding configuration fields (converting to the appropriate types).
51
+
52
+ Returns:
53
+ An instance of the config class populated from environment variables.
54
+
55
+ Raises:
56
+ ValueError: If required environment variables are missing.
57
+ """
58
+ prefix = cls.get_env_prefix()
59
+ field_values = {}
60
+
61
+ for field_name, field_info in cls.model_fields.items():
62
+ env_var = f"{prefix}{field_name.upper()}"
63
+ field_type = field_info.annotation
64
+
65
+ if field_info.is_required() and env_var not in os.environ:
66
+ raise ValueError(f"Required environment variable {env_var} is missing")
67
+
68
+ if env_var in os.environ:
69
+ raw_value = os.environ[env_var]
70
+ field_values[field_name] = cls._parse_env_value(raw_value, field_type)
71
+ elif field_info.default is not None:
72
+ field_values[field_name] = field_info.default
73
+ else:
74
+ raise ValueError()
75
+ return cls(**field_values)
76
+
77
+ @classmethod
78
+ def _parse_env_value(cls, value: str, target_type: type) -> Any:
79
+ """Converts environment variable string to appropriate type."""
80
+ if target_type is bool:
81
+ return value.lower() in ("true", "1", "t", "y", "yes")
82
+ if target_type is int:
83
+ return int(value)
84
+ if target_type is float:
85
+ return float(value)
86
+ return value
87
+
88
+
19
89
  class DictConversionMixin:
20
90
  """
21
91
  Provides conversion functionality between Pydantic models and dictionaries,
@@ -44,6 +114,26 @@ class DictConversionMixin:
44
114
  dump_data["timestamp"] = self.serialize_datetime(self.timestamp, None)
45
115
  return dump_data
46
116
 
117
+ def to_json(self, **kwargs) -> str:
118
+ """
119
+ Convert model instance to a JSON string.
120
+ - Accepts the same kwargs as json.dumps (e.g., indent, ensure_ascii)
121
+ - Default settings make JSON human-readable and UTF-8 safe
122
+ """
123
+ return json.dumps(self.to_dict(), ensure_ascii=False, default=lambda o: str(o), **kwargs)
124
+
125
+ @classmethod
126
+ def from_json(cls: type[BaseModelType], json_str: str) -> BaseModelType:
127
+ """
128
+ Create model instance from a JSON string.
129
+ - Parses JSON into a dictionary and delegates to from_dict
130
+ """
131
+ try:
132
+ data = json.loads(json_str)
133
+ except json.JSONDecodeError as e:
134
+ raise ValueError(f"Invalid JSON string: {e}") from e
135
+ return cls.from_dict(data)
136
+
47
137
  @classmethod
48
138
  def from_dict(cls: type[BaseModelType], data: dict) -> BaseModelType:
49
139
  """
@@ -102,3 +192,11 @@ class AutoDroppingQueue(Queue[T]):
102
192
  def get_queue_content_without_pop(self) -> list[T]:
103
193
  """Return a copy of the queue's contents without modifying it."""
104
194
  return list(self.queue)
195
+
196
+ def clear(self) -> None:
197
+ """Remove all items from the queue.
198
+
199
+ This operation is thread-safe.
200
+ """
201
+ with self.mutex:
202
+ self.queue.clear()
@@ -69,7 +69,7 @@ class SchedulerLoggerModule(BaseSchedulerModule):
69
69
  and mem_cube_id in self.monitor.activation_memory_monitors[user_id]
70
70
  ):
71
71
  activation_monitor = self.monitor.activation_memory_monitors[user_id][mem_cube_id]
72
- transformed_act_memory_size = len(activation_monitor.memories)
72
+ transformed_act_memory_size = len(activation_monitor.obj.memories)
73
73
  logger.info(
74
74
  f'activation_memory_monitors currently has "{transformed_act_memory_size}" transformed memory size'
75
75
  )
@@ -98,6 +98,7 @@ class SchedulerLoggerModule(BaseSchedulerModule):
98
98
  )
99
99
  return log_message
100
100
 
101
+ # TODO: 日志打出来数量不对
101
102
  @log_exceptions(logger=logger)
102
103
  def log_working_memory_replacement(
103
104
  self,
@@ -125,6 +126,7 @@ class SchedulerLoggerModule(BaseSchedulerModule):
125
126
  added_memories = list(new_set - original_set) # Present in new but not original
126
127
 
127
128
  # recording messages
129
+ log_messages = []
128
130
  for memory in added_memories:
129
131
  normalized_mem = transform_name_to_key(name=memory)
130
132
  if normalized_mem not in memory_type_map:
@@ -145,11 +147,13 @@ class SchedulerLoggerModule(BaseSchedulerModule):
145
147
  mem_cube_id=mem_cube_id,
146
148
  mem_cube=mem_cube,
147
149
  )
148
- log_func_callback([log_message])
149
- logger.info(
150
- f"{len(added_memories)} {LONG_TERM_MEMORY_TYPE} memorie(s) "
151
- f"transformed to {WORKING_MEMORY_TYPE} memories."
152
- )
150
+ log_messages.append(log_message)
151
+
152
+ logger.info(
153
+ f"{len(added_memories)} {LONG_TERM_MEMORY_TYPE} memorie(s) "
154
+ f"transformed to {WORKING_MEMORY_TYPE} memories."
155
+ )
156
+ log_func_callback(log_messages)
153
157
 
154
158
  @log_exceptions(logger=logger)
155
159
  def log_activation_memory_update(
@@ -170,6 +174,7 @@ class SchedulerLoggerModule(BaseSchedulerModule):
170
174
  added_memories = list(new_set - original_set) # Present in new but not original
171
175
 
172
176
  # recording messages
177
+ log_messages = []
173
178
  for mem in added_memories:
174
179
  log_message_a = self.create_autofilled_log_item(
175
180
  log_content=mem,
@@ -194,12 +199,13 @@ class SchedulerLoggerModule(BaseSchedulerModule):
194
199
  mem_cube_id=mem_cube_id,
195
200
  mem_cube=mem_cube,
196
201
  )
197
- logger.info(
198
- f"{len(added_memories)} {ACTIVATION_MEMORY_TYPE} memorie(s) "
199
- f"transformed to {PARAMETER_MEMORY_TYPE} memories."
200
- )
201
202
 
202
- log_func_callback([log_message_a, log_message_b])
203
+ log_messages.extend([log_message_a, log_message_b])
204
+ logger.info(
205
+ f"{len(added_memories)} {ACTIVATION_MEMORY_TYPE} memorie(s) "
206
+ f"transformed to {PARAMETER_MEMORY_TYPE} memories."
207
+ )
208
+ log_func_callback(log_messages)
203
209
 
204
210
  @log_exceptions(logger=logger)
205
211
  def log_adding_memory(
@@ -27,6 +27,8 @@ class GeneralScheduler(BaseScheduler):
27
27
  """Initialize the scheduler with the given configuration."""
28
28
  super().__init__(config)
29
29
 
30
+ self.query_key_words_limit = self.config.get("query_key_words_limit", 20)
31
+
30
32
  # register handlers
31
33
  handlers = {
32
34
  QUERY_LABEL: self._query_message_consumer,
@@ -35,78 +37,6 @@ class GeneralScheduler(BaseScheduler):
35
37
  }
36
38
  self.dispatcher.register_handlers(handlers)
37
39
 
38
- # for evaluation
39
- def search_for_eval(
40
- self, query: str, user_id: UserID | str, top_k: int, scheduler_flag: bool = True
41
- ) -> (list[str], bool):
42
- self.monitor.register_query_monitor_if_not_exists(
43
- user_id=user_id, mem_cube_id=self.current_mem_cube_id
44
- )
45
-
46
- query_keywords = self.monitor.extract_query_keywords(query=query)
47
- logger.info(f'Extract keywords "{query_keywords}" from query "{query}"')
48
-
49
- item = QueryMonitorItem(
50
- query_text=query,
51
- keywords=query_keywords,
52
- max_keywords=DEFAULT_MAX_QUERY_KEY_WORDS,
53
- )
54
- query_monitor = self.monitor.query_monitors[user_id][self.current_mem_cube_id]
55
- query_monitor.put(item=item)
56
- logger.debug(f"Queries in monitor are {query_monitor.get_queries_with_timesort()}.")
57
-
58
- queries = [query]
59
-
60
- # recall
61
- mem_cube = self.current_mem_cube
62
- text_mem_base = mem_cube.text_mem
63
-
64
- cur_working_memory: list[TextualMemoryItem] = text_mem_base.get_working_memory()
65
- text_working_memory: list[str] = [w_m.memory for w_m in cur_working_memory]
66
- intent_result = self.monitor.detect_intent(
67
- q_list=queries, text_working_memory=text_working_memory
68
- )
69
-
70
- if not scheduler_flag:
71
- return text_working_memory, intent_result["trigger_retrieval"]
72
- else:
73
- if intent_result["trigger_retrieval"]:
74
- missing_evidences = intent_result["missing_evidences"]
75
- num_evidence = len(missing_evidences)
76
- k_per_evidence = max(1, top_k // max(1, num_evidence))
77
- new_candidates = []
78
- for item in missing_evidences:
79
- logger.info(f"missing_evidences: {item}")
80
- results: list[TextualMemoryItem] = self.retriever.search(
81
- query=item,
82
- mem_cube=mem_cube,
83
- top_k=k_per_evidence,
84
- method=self.search_method,
85
- )
86
- logger.info(
87
- f"search results for {missing_evidences}: {[one.memory for one in results]}"
88
- )
89
- new_candidates.extend(results)
90
- print(
91
- f"missing_evidences: {missing_evidences} and get {len(new_candidates)} new candidate memories."
92
- )
93
- else:
94
- new_candidates = []
95
- print(f"intent_result: {intent_result}. not triggered")
96
-
97
- # rerank
98
- new_order_working_memory = self.replace_working_memory(
99
- user_id=user_id,
100
- mem_cube_id=self.current_mem_cube_id,
101
- mem_cube=self.current_mem_cube,
102
- original_memory=cur_working_memory,
103
- new_memory=new_candidates,
104
- )
105
- new_order_working_memory = new_order_working_memory[:top_k]
106
- logger.info(f"size of new_order_working_memory: {len(new_order_working_memory)}")
107
-
108
- return [m.memory for m in new_order_working_memory], intent_result["trigger_retrieval"]
109
-
110
40
  def _query_message_consumer(self, messages: list[ScheduleMessageItem]) -> None:
111
41
  """
112
42
  Process and handle query trigger messages from the queue.
@@ -140,7 +70,9 @@ class GeneralScheduler(BaseScheduler):
140
70
 
141
71
  query = msg.content
142
72
  query_keywords = self.monitor.extract_query_keywords(query=query)
143
- logger.info(f'Extract keywords "{query_keywords}" from query "{query}"')
73
+ logger.info(
74
+ f'Extracted keywords "{query_keywords}" from query "{query}" for user_id={user_id}'
75
+ )
144
76
 
145
77
  if len(query_keywords) == 0:
146
78
  stripped_query = query.strip()
@@ -155,21 +87,26 @@ class GeneralScheduler(BaseScheduler):
155
87
  )
156
88
  words = stripped_query # Default to character count
157
89
 
158
- query_keywords = list(set(words[:20]))
90
+ query_keywords = list(set(words[: self.query_key_words_limit]))
159
91
  logger.error(
160
- f"Keyword extraction failed for query. Using fallback keywords: {query_keywords[:10]}... (truncated)"
92
+ f"Keyword extraction failed for query '{query}' (user_id={user_id}). Using fallback keywords: {query_keywords[:10]}... (truncated)",
93
+ exc_info=True,
161
94
  )
162
95
 
163
96
  item = QueryMonitorItem(
97
+ user_id=user_id,
98
+ mem_cube_id=mem_cube_id,
164
99
  query_text=query,
165
100
  keywords=query_keywords,
166
101
  max_keywords=DEFAULT_MAX_QUERY_KEY_WORDS,
167
102
  )
168
103
 
169
- self.monitor.query_monitors[user_id][mem_cube_id].put(item=item)
104
+ query_db_manager = self.monitor.query_monitors[user_id][mem_cube_id]
105
+ query_db_manager.obj.put(item=item)
106
+ # Sync with database after adding new item
107
+ query_db_manager.sync_with_orm()
170
108
  logger.debug(
171
- f"Queries in monitor are "
172
- f"{self.monitor.query_monitors[user_id][mem_cube_id].get_queries_with_timesort()}."
109
+ f"Queries in monitor for user_id={user_id}, mem_cube_id={mem_cube_id}: {query_db_manager.obj.get_queries_with_timesort()}"
173
110
  )
174
111
 
175
112
  queries = [msg.content for msg in messages]
@@ -183,7 +120,7 @@ class GeneralScheduler(BaseScheduler):
183
120
  top_k=self.top_k,
184
121
  )
185
122
  logger.info(
186
- f"Processed {queries} and get {len(new_candidates)} new candidate memories."
123
+ f"Processed {len(queries)} queries {queries} and retrieved {len(new_candidates)} new candidate memories for user_id={user_id}"
187
124
  )
188
125
 
189
126
  # rerank
@@ -194,7 +131,9 @@ class GeneralScheduler(BaseScheduler):
194
131
  original_memory=cur_working_memory,
195
132
  new_memory=new_candidates,
196
133
  )
197
- logger.info(f"size of new_order_working_memory: {len(new_order_working_memory)}")
134
+ logger.info(
135
+ f"Final working memory size: {len(new_order_working_memory)} memories for user_id={user_id}"
136
+ )
198
137
 
199
138
  # update activation memories
200
139
  logger.info(
@@ -293,10 +232,17 @@ class GeneralScheduler(BaseScheduler):
293
232
 
294
233
  text_mem_base = mem_cube.text_mem
295
234
  if not isinstance(text_mem_base, TreeTextMemory):
296
- logger.error("Not implemented!", exc_info=True)
235
+ logger.error(
236
+ f"Not implemented! Expected TreeTextMemory but got {type(text_mem_base).__name__} "
237
+ f"for mem_cube_id={mem_cube_id}, user_id={user_id}. "
238
+ f"text_mem_base value: {text_mem_base}",
239
+ exc_info=True,
240
+ )
297
241
  return
298
242
 
299
- logger.info(f"Processing {len(queries)} queries.")
243
+ logger.info(
244
+ f"Processing {len(queries)} queries for user_id={user_id}, mem_cube_id={mem_cube_id}"
245
+ )
300
246
 
301
247
  cur_working_memory: list[TextualMemoryItem] = text_mem_base.get_working_memory()
302
248
  text_working_memory: list[str] = [w_m.memory for w_m in cur_working_memory]
@@ -312,16 +258,20 @@ class GeneralScheduler(BaseScheduler):
312
258
  time_trigger_flag = True
313
259
 
314
260
  if (not intent_result["trigger_retrieval"]) and (not time_trigger_flag):
315
- logger.info(f"Query schedule not triggered. Intent_result: {intent_result}")
261
+ logger.info(
262
+ f"Query schedule not triggered for user_id={user_id}, mem_cube_id={mem_cube_id}. Intent_result: {intent_result}"
263
+ )
316
264
  return
317
265
  elif (not intent_result["trigger_retrieval"]) and time_trigger_flag:
318
- logger.info("Query schedule is forced to trigger due to time ticker")
266
+ logger.info(
267
+ f"Query schedule forced to trigger due to time ticker for user_id={user_id}, mem_cube_id={mem_cube_id}"
268
+ )
319
269
  intent_result["trigger_retrieval"] = True
320
270
  intent_result["missing_evidences"] = queries
321
271
  else:
322
272
  logger.info(
323
- f'Query schedule triggered for user "{user_id}" and mem_cube "{mem_cube_id}".'
324
- f" Missing evidences: {intent_result['missing_evidences']}"
273
+ f"Query schedule triggered for user_id={user_id}, mem_cube_id={mem_cube_id}. "
274
+ f"Missing evidences: {intent_result['missing_evidences']}"
325
275
  )
326
276
 
327
277
  missing_evidences = intent_result["missing_evidences"]
@@ -329,7 +279,9 @@ class GeneralScheduler(BaseScheduler):
329
279
  k_per_evidence = max(1, top_k // max(1, num_evidence))
330
280
  new_candidates = []
331
281
  for item in missing_evidences:
332
- logger.info(f"missing_evidences: {item}")
282
+ logger.info(
283
+ f"Searching for missing evidence: '{item}' with top_k={k_per_evidence} for user_id={user_id}"
284
+ )
333
285
  info = {
334
286
  "user_id": user_id,
335
287
  "session_id": "",
@@ -343,7 +295,7 @@ class GeneralScheduler(BaseScheduler):
343
295
  info=info,
344
296
  )
345
297
  logger.info(
346
- f"search results for {missing_evidences}: {[one.memory for one in results]}"
298
+ f"Search results for missing evidence '{item}': {[one.memory for one in results]}"
347
299
  )
348
300
  new_candidates.extend(results)
349
301
  return cur_working_memory, new_candidates
@@ -0,0 +1,5 @@
1
+ from .memory_filter import MemoryFilter
2
+ from .retriever import SchedulerRetriever
3
+
4
+
5
+ __all__ = ["MemoryFilter", "SchedulerRetriever"]