MemoryOS 2.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- memoryos-2.0.3.dist-info/METADATA +418 -0
- memoryos-2.0.3.dist-info/RECORD +315 -0
- memoryos-2.0.3.dist-info/WHEEL +4 -0
- memoryos-2.0.3.dist-info/entry_points.txt +3 -0
- memoryos-2.0.3.dist-info/licenses/LICENSE +201 -0
- memos/__init__.py +20 -0
- memos/api/client.py +571 -0
- memos/api/config.py +1018 -0
- memos/api/context/dependencies.py +50 -0
- memos/api/exceptions.py +53 -0
- memos/api/handlers/__init__.py +62 -0
- memos/api/handlers/add_handler.py +158 -0
- memos/api/handlers/base_handler.py +194 -0
- memos/api/handlers/chat_handler.py +1401 -0
- memos/api/handlers/component_init.py +388 -0
- memos/api/handlers/config_builders.py +190 -0
- memos/api/handlers/feedback_handler.py +93 -0
- memos/api/handlers/formatters_handler.py +237 -0
- memos/api/handlers/memory_handler.py +316 -0
- memos/api/handlers/scheduler_handler.py +497 -0
- memos/api/handlers/search_handler.py +222 -0
- memos/api/handlers/suggestion_handler.py +117 -0
- memos/api/mcp_serve.py +614 -0
- memos/api/middleware/request_context.py +101 -0
- memos/api/product_api.py +38 -0
- memos/api/product_models.py +1206 -0
- memos/api/routers/__init__.py +1 -0
- memos/api/routers/product_router.py +477 -0
- memos/api/routers/server_router.py +394 -0
- memos/api/server_api.py +44 -0
- memos/api/start_api.py +433 -0
- memos/chunkers/__init__.py +4 -0
- memos/chunkers/base.py +24 -0
- memos/chunkers/charactertext_chunker.py +41 -0
- memos/chunkers/factory.py +24 -0
- memos/chunkers/markdown_chunker.py +62 -0
- memos/chunkers/sentence_chunker.py +54 -0
- memos/chunkers/simple_chunker.py +50 -0
- memos/cli.py +113 -0
- memos/configs/__init__.py +0 -0
- memos/configs/base.py +82 -0
- memos/configs/chunker.py +59 -0
- memos/configs/embedder.py +88 -0
- memos/configs/graph_db.py +236 -0
- memos/configs/internet_retriever.py +100 -0
- memos/configs/llm.py +151 -0
- memos/configs/mem_agent.py +54 -0
- memos/configs/mem_chat.py +81 -0
- memos/configs/mem_cube.py +105 -0
- memos/configs/mem_os.py +83 -0
- memos/configs/mem_reader.py +91 -0
- memos/configs/mem_scheduler.py +385 -0
- memos/configs/mem_user.py +70 -0
- memos/configs/memory.py +324 -0
- memos/configs/parser.py +38 -0
- memos/configs/reranker.py +18 -0
- memos/configs/utils.py +8 -0
- memos/configs/vec_db.py +80 -0
- memos/context/context.py +355 -0
- memos/dependency.py +52 -0
- memos/deprecation.py +262 -0
- memos/embedders/__init__.py +0 -0
- memos/embedders/ark.py +95 -0
- memos/embedders/base.py +106 -0
- memos/embedders/factory.py +29 -0
- memos/embedders/ollama.py +77 -0
- memos/embedders/sentence_transformer.py +49 -0
- memos/embedders/universal_api.py +51 -0
- memos/exceptions.py +30 -0
- memos/graph_dbs/__init__.py +0 -0
- memos/graph_dbs/base.py +274 -0
- memos/graph_dbs/factory.py +27 -0
- memos/graph_dbs/item.py +46 -0
- memos/graph_dbs/nebular.py +1794 -0
- memos/graph_dbs/neo4j.py +1942 -0
- memos/graph_dbs/neo4j_community.py +1058 -0
- memos/graph_dbs/polardb.py +5446 -0
- memos/hello_world.py +97 -0
- memos/llms/__init__.py +0 -0
- memos/llms/base.py +25 -0
- memos/llms/deepseek.py +13 -0
- memos/llms/factory.py +38 -0
- memos/llms/hf.py +443 -0
- memos/llms/hf_singleton.py +114 -0
- memos/llms/ollama.py +135 -0
- memos/llms/openai.py +222 -0
- memos/llms/openai_new.py +198 -0
- memos/llms/qwen.py +13 -0
- memos/llms/utils.py +14 -0
- memos/llms/vllm.py +218 -0
- memos/log.py +237 -0
- memos/mem_agent/base.py +19 -0
- memos/mem_agent/deepsearch_agent.py +391 -0
- memos/mem_agent/factory.py +36 -0
- memos/mem_chat/__init__.py +0 -0
- memos/mem_chat/base.py +30 -0
- memos/mem_chat/factory.py +21 -0
- memos/mem_chat/simple.py +200 -0
- memos/mem_cube/__init__.py +0 -0
- memos/mem_cube/base.py +30 -0
- memos/mem_cube/general.py +240 -0
- memos/mem_cube/navie.py +172 -0
- memos/mem_cube/utils.py +169 -0
- memos/mem_feedback/base.py +15 -0
- memos/mem_feedback/feedback.py +1192 -0
- memos/mem_feedback/simple_feedback.py +40 -0
- memos/mem_feedback/utils.py +230 -0
- memos/mem_os/client.py +5 -0
- memos/mem_os/core.py +1203 -0
- memos/mem_os/main.py +582 -0
- memos/mem_os/product.py +1608 -0
- memos/mem_os/product_server.py +455 -0
- memos/mem_os/utils/default_config.py +359 -0
- memos/mem_os/utils/format_utils.py +1403 -0
- memos/mem_os/utils/reference_utils.py +162 -0
- memos/mem_reader/__init__.py +0 -0
- memos/mem_reader/base.py +47 -0
- memos/mem_reader/factory.py +53 -0
- memos/mem_reader/memory.py +298 -0
- memos/mem_reader/multi_modal_struct.py +965 -0
- memos/mem_reader/read_multi_modal/__init__.py +43 -0
- memos/mem_reader/read_multi_modal/assistant_parser.py +311 -0
- memos/mem_reader/read_multi_modal/base.py +273 -0
- memos/mem_reader/read_multi_modal/file_content_parser.py +826 -0
- memos/mem_reader/read_multi_modal/image_parser.py +359 -0
- memos/mem_reader/read_multi_modal/multi_modal_parser.py +252 -0
- memos/mem_reader/read_multi_modal/string_parser.py +139 -0
- memos/mem_reader/read_multi_modal/system_parser.py +327 -0
- memos/mem_reader/read_multi_modal/text_content_parser.py +131 -0
- memos/mem_reader/read_multi_modal/tool_parser.py +210 -0
- memos/mem_reader/read_multi_modal/user_parser.py +218 -0
- memos/mem_reader/read_multi_modal/utils.py +358 -0
- memos/mem_reader/simple_struct.py +912 -0
- memos/mem_reader/strategy_struct.py +163 -0
- memos/mem_reader/utils.py +157 -0
- memos/mem_scheduler/__init__.py +0 -0
- memos/mem_scheduler/analyzer/__init__.py +0 -0
- memos/mem_scheduler/analyzer/api_analyzer.py +714 -0
- memos/mem_scheduler/analyzer/eval_analyzer.py +219 -0
- memos/mem_scheduler/analyzer/mos_for_test_scheduler.py +571 -0
- memos/mem_scheduler/analyzer/scheduler_for_eval.py +280 -0
- memos/mem_scheduler/base_scheduler.py +1319 -0
- memos/mem_scheduler/general_modules/__init__.py +0 -0
- memos/mem_scheduler/general_modules/api_misc.py +137 -0
- memos/mem_scheduler/general_modules/base.py +80 -0
- memos/mem_scheduler/general_modules/init_components_for_scheduler.py +425 -0
- memos/mem_scheduler/general_modules/misc.py +313 -0
- memos/mem_scheduler/general_modules/scheduler_logger.py +389 -0
- memos/mem_scheduler/general_modules/task_threads.py +315 -0
- memos/mem_scheduler/general_scheduler.py +1495 -0
- memos/mem_scheduler/memory_manage_modules/__init__.py +5 -0
- memos/mem_scheduler/memory_manage_modules/memory_filter.py +306 -0
- memos/mem_scheduler/memory_manage_modules/retriever.py +547 -0
- memos/mem_scheduler/monitors/__init__.py +0 -0
- memos/mem_scheduler/monitors/dispatcher_monitor.py +366 -0
- memos/mem_scheduler/monitors/general_monitor.py +394 -0
- memos/mem_scheduler/monitors/task_schedule_monitor.py +254 -0
- memos/mem_scheduler/optimized_scheduler.py +410 -0
- memos/mem_scheduler/orm_modules/__init__.py +0 -0
- memos/mem_scheduler/orm_modules/api_redis_model.py +518 -0
- memos/mem_scheduler/orm_modules/base_model.py +729 -0
- memos/mem_scheduler/orm_modules/monitor_models.py +261 -0
- memos/mem_scheduler/orm_modules/redis_model.py +699 -0
- memos/mem_scheduler/scheduler_factory.py +23 -0
- memos/mem_scheduler/schemas/__init__.py +0 -0
- memos/mem_scheduler/schemas/analyzer_schemas.py +52 -0
- memos/mem_scheduler/schemas/api_schemas.py +233 -0
- memos/mem_scheduler/schemas/general_schemas.py +55 -0
- memos/mem_scheduler/schemas/message_schemas.py +173 -0
- memos/mem_scheduler/schemas/monitor_schemas.py +406 -0
- memos/mem_scheduler/schemas/task_schemas.py +132 -0
- memos/mem_scheduler/task_schedule_modules/__init__.py +0 -0
- memos/mem_scheduler/task_schedule_modules/dispatcher.py +740 -0
- memos/mem_scheduler/task_schedule_modules/local_queue.py +247 -0
- memos/mem_scheduler/task_schedule_modules/orchestrator.py +74 -0
- memos/mem_scheduler/task_schedule_modules/redis_queue.py +1385 -0
- memos/mem_scheduler/task_schedule_modules/task_queue.py +162 -0
- memos/mem_scheduler/utils/__init__.py +0 -0
- memos/mem_scheduler/utils/api_utils.py +77 -0
- memos/mem_scheduler/utils/config_utils.py +100 -0
- memos/mem_scheduler/utils/db_utils.py +50 -0
- memos/mem_scheduler/utils/filter_utils.py +176 -0
- memos/mem_scheduler/utils/metrics.py +125 -0
- memos/mem_scheduler/utils/misc_utils.py +290 -0
- memos/mem_scheduler/utils/monitor_event_utils.py +67 -0
- memos/mem_scheduler/utils/status_tracker.py +229 -0
- memos/mem_scheduler/webservice_modules/__init__.py +0 -0
- memos/mem_scheduler/webservice_modules/rabbitmq_service.py +485 -0
- memos/mem_scheduler/webservice_modules/redis_service.py +380 -0
- memos/mem_user/factory.py +94 -0
- memos/mem_user/mysql_persistent_user_manager.py +271 -0
- memos/mem_user/mysql_user_manager.py +502 -0
- memos/mem_user/persistent_factory.py +98 -0
- memos/mem_user/persistent_user_manager.py +260 -0
- memos/mem_user/redis_persistent_user_manager.py +225 -0
- memos/mem_user/user_manager.py +488 -0
- memos/memories/__init__.py +0 -0
- memos/memories/activation/__init__.py +0 -0
- memos/memories/activation/base.py +42 -0
- memos/memories/activation/item.py +56 -0
- memos/memories/activation/kv.py +292 -0
- memos/memories/activation/vllmkv.py +219 -0
- memos/memories/base.py +19 -0
- memos/memories/factory.py +42 -0
- memos/memories/parametric/__init__.py +0 -0
- memos/memories/parametric/base.py +19 -0
- memos/memories/parametric/item.py +11 -0
- memos/memories/parametric/lora.py +41 -0
- memos/memories/textual/__init__.py +0 -0
- memos/memories/textual/base.py +92 -0
- memos/memories/textual/general.py +236 -0
- memos/memories/textual/item.py +304 -0
- memos/memories/textual/naive.py +187 -0
- memos/memories/textual/prefer_text_memory/__init__.py +0 -0
- memos/memories/textual/prefer_text_memory/adder.py +504 -0
- memos/memories/textual/prefer_text_memory/config.py +106 -0
- memos/memories/textual/prefer_text_memory/extractor.py +221 -0
- memos/memories/textual/prefer_text_memory/factory.py +85 -0
- memos/memories/textual/prefer_text_memory/retrievers.py +177 -0
- memos/memories/textual/prefer_text_memory/spliter.py +132 -0
- memos/memories/textual/prefer_text_memory/utils.py +93 -0
- memos/memories/textual/preference.py +344 -0
- memos/memories/textual/simple_preference.py +161 -0
- memos/memories/textual/simple_tree.py +69 -0
- memos/memories/textual/tree.py +459 -0
- memos/memories/textual/tree_text_memory/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/organize/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/organize/handler.py +184 -0
- memos/memories/textual/tree_text_memory/organize/manager.py +518 -0
- memos/memories/textual/tree_text_memory/organize/relation_reason_detector.py +238 -0
- memos/memories/textual/tree_text_memory/organize/reorganizer.py +622 -0
- memos/memories/textual/tree_text_memory/retrieve/__init__.py +0 -0
- memos/memories/textual/tree_text_memory/retrieve/advanced_searcher.py +364 -0
- memos/memories/textual/tree_text_memory/retrieve/bm25_util.py +186 -0
- memos/memories/textual/tree_text_memory/retrieve/bochasearch.py +419 -0
- memos/memories/textual/tree_text_memory/retrieve/internet_retriever.py +270 -0
- memos/memories/textual/tree_text_memory/retrieve/internet_retriever_factory.py +102 -0
- memos/memories/textual/tree_text_memory/retrieve/reasoner.py +61 -0
- memos/memories/textual/tree_text_memory/retrieve/recall.py +497 -0
- memos/memories/textual/tree_text_memory/retrieve/reranker.py +111 -0
- memos/memories/textual/tree_text_memory/retrieve/retrieval_mid_structs.py +16 -0
- memos/memories/textual/tree_text_memory/retrieve/retrieve_utils.py +472 -0
- memos/memories/textual/tree_text_memory/retrieve/searcher.py +848 -0
- memos/memories/textual/tree_text_memory/retrieve/task_goal_parser.py +135 -0
- memos/memories/textual/tree_text_memory/retrieve/utils.py +54 -0
- memos/memories/textual/tree_text_memory/retrieve/xinyusearch.py +387 -0
- memos/memos_tools/dinding_report_bot.py +453 -0
- memos/memos_tools/lockfree_dict.py +120 -0
- memos/memos_tools/notification_service.py +44 -0
- memos/memos_tools/notification_utils.py +142 -0
- memos/memos_tools/singleton.py +174 -0
- memos/memos_tools/thread_safe_dict.py +310 -0
- memos/memos_tools/thread_safe_dict_segment.py +382 -0
- memos/multi_mem_cube/__init__.py +0 -0
- memos/multi_mem_cube/composite_cube.py +86 -0
- memos/multi_mem_cube/single_cube.py +874 -0
- memos/multi_mem_cube/views.py +54 -0
- memos/parsers/__init__.py +0 -0
- memos/parsers/base.py +15 -0
- memos/parsers/factory.py +21 -0
- memos/parsers/markitdown.py +28 -0
- memos/reranker/__init__.py +4 -0
- memos/reranker/base.py +25 -0
- memos/reranker/concat.py +103 -0
- memos/reranker/cosine_local.py +102 -0
- memos/reranker/factory.py +72 -0
- memos/reranker/http_bge.py +324 -0
- memos/reranker/http_bge_strategy.py +327 -0
- memos/reranker/noop.py +19 -0
- memos/reranker/strategies/__init__.py +4 -0
- memos/reranker/strategies/base.py +61 -0
- memos/reranker/strategies/concat_background.py +94 -0
- memos/reranker/strategies/concat_docsource.py +110 -0
- memos/reranker/strategies/dialogue_common.py +109 -0
- memos/reranker/strategies/factory.py +31 -0
- memos/reranker/strategies/single_turn.py +107 -0
- memos/reranker/strategies/singleturn_outmem.py +98 -0
- memos/settings.py +10 -0
- memos/templates/__init__.py +0 -0
- memos/templates/advanced_search_prompts.py +211 -0
- memos/templates/cloud_service_prompt.py +107 -0
- memos/templates/instruction_completion.py +66 -0
- memos/templates/mem_agent_prompts.py +85 -0
- memos/templates/mem_feedback_prompts.py +822 -0
- memos/templates/mem_reader_prompts.py +1096 -0
- memos/templates/mem_reader_strategy_prompts.py +238 -0
- memos/templates/mem_scheduler_prompts.py +626 -0
- memos/templates/mem_search_prompts.py +93 -0
- memos/templates/mos_prompts.py +403 -0
- memos/templates/prefer_complete_prompt.py +735 -0
- memos/templates/tool_mem_prompts.py +139 -0
- memos/templates/tree_reorganize_prompts.py +230 -0
- memos/types/__init__.py +34 -0
- memos/types/general_types.py +151 -0
- memos/types/openai_chat_completion_types/__init__.py +15 -0
- memos/types/openai_chat_completion_types/chat_completion_assistant_message_param.py +56 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_image_param.py +27 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_input_audio_param.py +23 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_param.py +43 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_refusal_param.py +16 -0
- memos/types/openai_chat_completion_types/chat_completion_content_part_text_param.py +16 -0
- memos/types/openai_chat_completion_types/chat_completion_message_custom_tool_call_param.py +27 -0
- memos/types/openai_chat_completion_types/chat_completion_message_function_tool_call_param.py +32 -0
- memos/types/openai_chat_completion_types/chat_completion_message_param.py +18 -0
- memos/types/openai_chat_completion_types/chat_completion_message_tool_call_union_param.py +15 -0
- memos/types/openai_chat_completion_types/chat_completion_system_message_param.py +36 -0
- memos/types/openai_chat_completion_types/chat_completion_tool_message_param.py +30 -0
- memos/types/openai_chat_completion_types/chat_completion_user_message_param.py +34 -0
- memos/utils.py +123 -0
- memos/vec_dbs/__init__.py +0 -0
- memos/vec_dbs/base.py +117 -0
- memos/vec_dbs/factory.py +23 -0
- memos/vec_dbs/item.py +50 -0
- memos/vec_dbs/milvus.py +654 -0
- memos/vec_dbs/qdrant.py +355 -0
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
import traceback
|
|
5
|
+
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
from functools import wraps
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
import yaml
|
|
11
|
+
|
|
12
|
+
from memos.log import get_logger
|
|
13
|
+
from memos.mem_scheduler.schemas.message_schemas import (
|
|
14
|
+
ScheduleMessageItem,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
logger = get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _normalize_env_value(value: str | None) -> str:
|
|
22
|
+
"""Normalize environment variable values for comparison."""
|
|
23
|
+
return value.strip().lower() if isinstance(value, str) else ""
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def is_playground_env() -> bool:
|
|
27
|
+
"""Return True when ENV_NAME indicates a Playground environment."""
|
|
28
|
+
env_name = _normalize_env_value(os.getenv("ENV_NAME"))
|
|
29
|
+
return env_name.startswith("playground")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def is_cloud_env() -> bool:
|
|
33
|
+
"""
|
|
34
|
+
Determine whether the scheduler should treat the runtime as a cloud environment.
|
|
35
|
+
|
|
36
|
+
Rules:
|
|
37
|
+
- Any Playground ENV_NAME is explicitly NOT cloud.
|
|
38
|
+
- MEMSCHEDULER_RABBITMQ_EXCHANGE_NAME must be set to enable cloud behavior.
|
|
39
|
+
- The default memos-fanout/fanout combination is treated as non-cloud.
|
|
40
|
+
"""
|
|
41
|
+
if is_playground_env():
|
|
42
|
+
return False
|
|
43
|
+
|
|
44
|
+
exchange_name = _normalize_env_value(os.getenv("MEMSCHEDULER_RABBITMQ_EXCHANGE_NAME"))
|
|
45
|
+
exchange_type = _normalize_env_value(os.getenv("MEMSCHEDULER_RABBITMQ_EXCHANGE_TYPE"))
|
|
46
|
+
|
|
47
|
+
if not exchange_name:
|
|
48
|
+
return False
|
|
49
|
+
|
|
50
|
+
return not (
|
|
51
|
+
exchange_name == "memos-fanout" and (not exchange_type or exchange_type == "fanout")
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def extract_json_obj(text: str):
|
|
56
|
+
"""
|
|
57
|
+
Safely extracts JSON from LLM response text with robust error handling.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
text: Raw text response from LLM that may contain JSON
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Parsed JSON data (dict or list)
|
|
64
|
+
|
|
65
|
+
Raises:
|
|
66
|
+
ValueError: If no valid JSON can be extracted
|
|
67
|
+
"""
|
|
68
|
+
if not text:
|
|
69
|
+
raise ValueError("Empty input text")
|
|
70
|
+
|
|
71
|
+
# Normalize the text
|
|
72
|
+
text = text.strip()
|
|
73
|
+
|
|
74
|
+
# Remove common code block markers
|
|
75
|
+
patterns_to_remove = ["json```", "```python", "```json", "latex```", "```latex", "```"]
|
|
76
|
+
for pattern in patterns_to_remove:
|
|
77
|
+
text = text.replace(pattern, "")
|
|
78
|
+
|
|
79
|
+
# Try: direct JSON parse first
|
|
80
|
+
try:
|
|
81
|
+
return json.loads(text.strip())
|
|
82
|
+
except json.JSONDecodeError as e:
|
|
83
|
+
logger.info(f"Failed to parse JSON from text: {text}. Error: {e!s}", exc_info=True)
|
|
84
|
+
|
|
85
|
+
# Fallback 1: Extract JSON using regex
|
|
86
|
+
json_pattern = r"\{[\s\S]*\}|\[[\s\S]*\]"
|
|
87
|
+
matches = re.findall(json_pattern, text)
|
|
88
|
+
if matches:
|
|
89
|
+
try:
|
|
90
|
+
return json.loads(matches[0])
|
|
91
|
+
except json.JSONDecodeError as e:
|
|
92
|
+
logger.info(f"Failed to parse JSON from text: {text}. Error: {e!s}", exc_info=True)
|
|
93
|
+
|
|
94
|
+
# Fallback 2: Handle malformed JSON (common LLM issues)
|
|
95
|
+
try:
|
|
96
|
+
# Try adding missing quotes around keys
|
|
97
|
+
text = re.sub(r"([\{\s,])(\w+)(:)", r'\1"\2"\3', text)
|
|
98
|
+
return json.loads(text)
|
|
99
|
+
except json.JSONDecodeError as e:
|
|
100
|
+
logger.error(f"Failed to parse JSON from text: {text}. Error: {e!s}")
|
|
101
|
+
logger.error("Full traceback:\n" + traceback.format_exc())
|
|
102
|
+
raise ValueError(text) from e
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def extract_list_items(text: str, bullet_prefixes: tuple[str, ...] = ("- ",)) -> list[str]:
|
|
106
|
+
"""
|
|
107
|
+
Extract bullet list items from LLM output where each item is on a single line
|
|
108
|
+
starting with a given bullet prefix (default: "- ").
|
|
109
|
+
|
|
110
|
+
This function is designed to be robust to common LLM formatting variations,
|
|
111
|
+
following similar normalization practices as `extract_json_obj`.
|
|
112
|
+
|
|
113
|
+
Behavior:
|
|
114
|
+
- Strips common code-fence markers (```json, ```python, ``` etc.).
|
|
115
|
+
- Collects all lines that start with any of the provided `bullet_prefixes`.
|
|
116
|
+
- Tolerates the "• " bullet as a loose fallback.
|
|
117
|
+
- Unescapes common sequences like "\\n" and "\\t" within items.
|
|
118
|
+
- If no bullet lines are found, falls back to attempting to parse a JSON array
|
|
119
|
+
(using `extract_json_obj`) and returns its string elements.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
text: Raw text response from LLM.
|
|
123
|
+
bullet_prefixes: Tuple of accepted bullet line prefixes.
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
List of extracted items (strings). Returns an empty list if none can be parsed.
|
|
127
|
+
"""
|
|
128
|
+
if not text:
|
|
129
|
+
return []
|
|
130
|
+
|
|
131
|
+
# Normalize the text similar to extract_json_obj
|
|
132
|
+
normalized = text.strip()
|
|
133
|
+
patterns_to_remove = ["json```", "```python", "```json", "latex```", "```latex", "```"]
|
|
134
|
+
for pattern in patterns_to_remove:
|
|
135
|
+
normalized = normalized.replace(pattern, "")
|
|
136
|
+
normalized = normalized.replace("\r\n", "\n")
|
|
137
|
+
|
|
138
|
+
lines = normalized.splitlines()
|
|
139
|
+
items: list[str] = []
|
|
140
|
+
seen: set[str] = set()
|
|
141
|
+
|
|
142
|
+
for raw in lines:
|
|
143
|
+
line = raw.strip()
|
|
144
|
+
if not line:
|
|
145
|
+
continue
|
|
146
|
+
|
|
147
|
+
matched = False
|
|
148
|
+
for prefix in bullet_prefixes:
|
|
149
|
+
if line.startswith(prefix):
|
|
150
|
+
content = line[len(prefix) :].strip()
|
|
151
|
+
content = content.replace("\\n", "\n").replace("\\t", "\t").replace("\\r", "\r")
|
|
152
|
+
if content and content not in seen:
|
|
153
|
+
items.append(content)
|
|
154
|
+
seen.add(content)
|
|
155
|
+
matched = True
|
|
156
|
+
break
|
|
157
|
+
|
|
158
|
+
if matched:
|
|
159
|
+
continue
|
|
160
|
+
|
|
161
|
+
if items:
|
|
162
|
+
return items
|
|
163
|
+
else:
|
|
164
|
+
logger.error(f"Fail to parse {text}")
|
|
165
|
+
|
|
166
|
+
return []
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def extract_list_items_in_answer(
|
|
170
|
+
text: str, bullet_prefixes: tuple[str, ...] = ("- ",)
|
|
171
|
+
) -> list[str]:
|
|
172
|
+
"""
|
|
173
|
+
Extract list items specifically from content enclosed within `<answer>...</answer>` tags.
|
|
174
|
+
|
|
175
|
+
- When one or more `<answer>...</answer>` blocks are present, concatenates their inner
|
|
176
|
+
contents with newlines and parses using `extract_list_items`.
|
|
177
|
+
- When no `<answer>` block is found, falls back to parsing the entire input with
|
|
178
|
+
`extract_list_items`.
|
|
179
|
+
- Case-insensitive matching of the `<answer>` tag.
|
|
180
|
+
|
|
181
|
+
Args:
|
|
182
|
+
text: Raw text that may contain `<answer>...</answer>` blocks.
|
|
183
|
+
bullet_prefixes: Accepted bullet prefixes (default: strictly `"- "`).
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
List of extracted items (strings), or an empty list when nothing is parseable.
|
|
187
|
+
"""
|
|
188
|
+
if not text:
|
|
189
|
+
return []
|
|
190
|
+
|
|
191
|
+
try:
|
|
192
|
+
normalized = text.strip().replace("\r\n", "\n")
|
|
193
|
+
# Ordered, exact-case matching for <answer> blocks: answer -> Answer -> ANSWER
|
|
194
|
+
tag_variants = ["answer", "Answer", "ANSWER"]
|
|
195
|
+
matches: list[str] = []
|
|
196
|
+
for tag in tag_variants:
|
|
197
|
+
matches = re.findall(rf"<{tag}>([\\s\\S]*?)</{tag}>", normalized)
|
|
198
|
+
if matches:
|
|
199
|
+
break
|
|
200
|
+
# Fallback: case-insensitive matching if none of the exact-case variants matched
|
|
201
|
+
if not matches:
|
|
202
|
+
matches = re.findall(r"<answer>([\\s\\S]*?)</answer>", normalized, flags=re.IGNORECASE)
|
|
203
|
+
|
|
204
|
+
if matches:
|
|
205
|
+
combined = "\n".join(m.strip() for m in matches if m is not None)
|
|
206
|
+
return extract_list_items(combined, bullet_prefixes=bullet_prefixes)
|
|
207
|
+
|
|
208
|
+
# Fallback: parse the whole text if tags are absent
|
|
209
|
+
return extract_list_items(normalized, bullet_prefixes=bullet_prefixes)
|
|
210
|
+
except Exception as e:
|
|
211
|
+
logger.info(f"Failed to extract items within <answer> tags: {e!s}", exc_info=True)
|
|
212
|
+
# Final fallback: attempt direct list extraction
|
|
213
|
+
try:
|
|
214
|
+
return extract_list_items(text, bullet_prefixes=bullet_prefixes)
|
|
215
|
+
except Exception:
|
|
216
|
+
return []
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def parse_yaml(yaml_file: str | Path):
|
|
220
|
+
yaml_path = Path(yaml_file)
|
|
221
|
+
if not yaml_path.is_file():
|
|
222
|
+
raise FileNotFoundError(f"No such file: {yaml_file}")
|
|
223
|
+
|
|
224
|
+
with yaml_path.open("r", encoding="utf-8") as fr:
|
|
225
|
+
data = yaml.safe_load(fr)
|
|
226
|
+
|
|
227
|
+
return data
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def log_exceptions(logger=logger):
|
|
231
|
+
"""
|
|
232
|
+
Exception-catching decorator that automatically logs errors (including stack traces)
|
|
233
|
+
|
|
234
|
+
Args:
|
|
235
|
+
logger: Optional logger object (default: module-level logger)
|
|
236
|
+
|
|
237
|
+
Example:
|
|
238
|
+
@log_exceptions()
|
|
239
|
+
def risky_function():
|
|
240
|
+
raise ValueError("Oops!")
|
|
241
|
+
|
|
242
|
+
@log_exceptions(logger=custom_logger)
|
|
243
|
+
def another_risky_function():
|
|
244
|
+
might_fail()
|
|
245
|
+
"""
|
|
246
|
+
|
|
247
|
+
def decorator(func):
|
|
248
|
+
@wraps(func)
|
|
249
|
+
def wrapper(*args, **kwargs):
|
|
250
|
+
try:
|
|
251
|
+
return func(*args, **kwargs)
|
|
252
|
+
except Exception as e:
|
|
253
|
+
logger.error(f"Error in {func.__name__}: {e}", stack_info=True)
|
|
254
|
+
|
|
255
|
+
return wrapper
|
|
256
|
+
|
|
257
|
+
return decorator
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def group_messages_by_user_and_mem_cube(
|
|
261
|
+
messages: list[ScheduleMessageItem],
|
|
262
|
+
) -> dict[str, dict[str, list[ScheduleMessageItem]]]:
|
|
263
|
+
"""
|
|
264
|
+
Groups messages into a nested dictionary structure first by user_id, then by mem_cube_id.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
messages: List of ScheduleMessageItem objects to be grouped
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
A nested dictionary with the structure:
|
|
271
|
+
{
|
|
272
|
+
"user_id_1": {
|
|
273
|
+
"mem_cube_id_1": [msg1, msg2, ...],
|
|
274
|
+
"mem_cube_id_2": [msg3, msg4, ...],
|
|
275
|
+
...
|
|
276
|
+
},
|
|
277
|
+
"user_id_2": {
|
|
278
|
+
...
|
|
279
|
+
},
|
|
280
|
+
...
|
|
281
|
+
}
|
|
282
|
+
Where each msg is the original ScheduleMessageItem object
|
|
283
|
+
"""
|
|
284
|
+
grouped_dict = defaultdict(lambda: defaultdict(list))
|
|
285
|
+
|
|
286
|
+
for msg in messages:
|
|
287
|
+
grouped_dict[msg.user_id][msg.mem_cube_id].append(msg)
|
|
288
|
+
|
|
289
|
+
# Convert defaultdict to regular dict for cleaner output
|
|
290
|
+
return {user_id: dict(cube_groups) for user_id, cube_groups in grouped_dict.items()}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import socket
|
|
4
|
+
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from memos.log import get_logger
|
|
9
|
+
from memos.mem_scheduler.schemas.message_schemas import ScheduleMessageItem
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
logger = get_logger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _iso_ts_now() -> str:
|
|
16
|
+
"""Return current UTC timestamp in ISO format with milliseconds."""
|
|
17
|
+
return datetime.now(timezone.utc).isoformat()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def to_iso(ts) -> str | None:
|
|
21
|
+
"""Convert datetime to ISO string; return None if not convertible."""
|
|
22
|
+
if ts is None:
|
|
23
|
+
return None
|
|
24
|
+
if isinstance(ts, datetime):
|
|
25
|
+
dt = ts
|
|
26
|
+
if dt.tzinfo is None:
|
|
27
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
28
|
+
return dt.isoformat()
|
|
29
|
+
try:
|
|
30
|
+
return datetime.fromtimestamp(float(ts), tz=timezone.utc).isoformat()
|
|
31
|
+
except Exception:
|
|
32
|
+
return None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def emit_monitor_event(event: str, msg: ScheduleMessageItem, extra: dict[str, Any] | None = None):
|
|
36
|
+
"""
|
|
37
|
+
Emit a structured MONITOR_EVENT log line for SLS consumption.
|
|
38
|
+
|
|
39
|
+
This must be fire-and-forget: any exception here should never break the scheduler flow.
|
|
40
|
+
"""
|
|
41
|
+
try:
|
|
42
|
+
payload: dict[str, Any] = {
|
|
43
|
+
"event": event,
|
|
44
|
+
"ts": _iso_ts_now(),
|
|
45
|
+
"label": getattr(msg, "label", None),
|
|
46
|
+
"user_id": getattr(msg, "user_id", None),
|
|
47
|
+
"mem_cube_id": getattr(msg, "mem_cube_id", None),
|
|
48
|
+
"item_id": getattr(msg, "item_id", None),
|
|
49
|
+
"task_id": getattr(msg, "task_id", "") or "",
|
|
50
|
+
"trace_id": getattr(msg, "trace_id", None),
|
|
51
|
+
"stream_key": getattr(msg, "stream_key", None),
|
|
52
|
+
"redis_message_id": getattr(msg, "redis_message_id", None),
|
|
53
|
+
"monitor_flag": None,
|
|
54
|
+
"host": socket.gethostname(),
|
|
55
|
+
"env": os.getenv("ENV") or os.getenv("ENVIRONMENT") or "",
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
info = getattr(msg, "info", None)
|
|
59
|
+
if isinstance(info, dict):
|
|
60
|
+
payload["monitor_flag"] = info.get("monitor_flag")
|
|
61
|
+
|
|
62
|
+
if extra:
|
|
63
|
+
payload.update(extra)
|
|
64
|
+
|
|
65
|
+
logger.info("MONITOR_EVENT " + json.dumps(payload, ensure_ascii=False))
|
|
66
|
+
except Exception:
|
|
67
|
+
logger.debug("Failed to emit MONITOR_EVENT", exc_info=True)
|
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
# src/memos/mem_scheduler/utils/status_tracker.py
|
|
2
|
+
import json
|
|
3
|
+
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
|
+
|
|
7
|
+
from memos.dependency import require_python_package
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
import redis
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TaskStatusTracker:
|
|
15
|
+
@require_python_package(import_name="redis", install_command="pip install redis")
|
|
16
|
+
def __init__(self, redis_client: "redis.Redis | None"):
|
|
17
|
+
self.redis = redis_client
|
|
18
|
+
|
|
19
|
+
def _get_key(self, user_id: str) -> str:
|
|
20
|
+
if not self.redis:
|
|
21
|
+
return
|
|
22
|
+
|
|
23
|
+
return f"memos:task_meta:{user_id}"
|
|
24
|
+
|
|
25
|
+
def _get_task_items_key(self, user_id: str, task_id: str) -> str:
|
|
26
|
+
"""Get Redis key for task_id → [item_id] mapping."""
|
|
27
|
+
return f"memos:task_items:{user_id}:{task_id}"
|
|
28
|
+
|
|
29
|
+
def task_submitted(
|
|
30
|
+
self,
|
|
31
|
+
task_id: str,
|
|
32
|
+
user_id: str,
|
|
33
|
+
task_type: str,
|
|
34
|
+
mem_cube_id: str,
|
|
35
|
+
business_task_id: str | None = None,
|
|
36
|
+
):
|
|
37
|
+
"""
|
|
38
|
+
Submit a new task for tracking.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
task_id: Internal item_id (UUID)
|
|
42
|
+
user_id: User identifier
|
|
43
|
+
task_type: Type of task (label)
|
|
44
|
+
mem_cube_id: Memory cube identifier
|
|
45
|
+
business_task_id: Optional business-level task ID (one task_id can have multiple item_ids)
|
|
46
|
+
"""
|
|
47
|
+
if not self.redis:
|
|
48
|
+
return
|
|
49
|
+
|
|
50
|
+
key = self._get_key(user_id)
|
|
51
|
+
payload = {
|
|
52
|
+
"status": "waiting",
|
|
53
|
+
"task_type": task_type,
|
|
54
|
+
"mem_cube_id": mem_cube_id,
|
|
55
|
+
"submitted_at": datetime.now(timezone.utc).isoformat(),
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
# Add business_task_id to payload if provided
|
|
59
|
+
if business_task_id:
|
|
60
|
+
payload["business_task_id"] = business_task_id
|
|
61
|
+
# Add item_id to the task_id → [item_ids] set
|
|
62
|
+
task_items_key = self._get_task_items_key(user_id, business_task_id)
|
|
63
|
+
self.redis.sadd(task_items_key, task_id)
|
|
64
|
+
self.redis.expire(task_items_key, timedelta(days=7))
|
|
65
|
+
|
|
66
|
+
self.redis.hset(key, task_id, json.dumps(payload))
|
|
67
|
+
self.redis.expire(key, timedelta(days=7))
|
|
68
|
+
|
|
69
|
+
def task_started(self, task_id: str, user_id: str):
|
|
70
|
+
if not self.redis:
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
key = self._get_key(user_id)
|
|
74
|
+
existing_data_json = self.redis.hget(key, task_id)
|
|
75
|
+
if not existing_data_json:
|
|
76
|
+
# 容错处理: 如果任务不存在, 也创建一个
|
|
77
|
+
payload = {
|
|
78
|
+
"status": "in_progress",
|
|
79
|
+
"started_at": datetime.now(timezone.utc).isoformat(),
|
|
80
|
+
}
|
|
81
|
+
else:
|
|
82
|
+
payload = json.loads(existing_data_json)
|
|
83
|
+
payload["status"] = "in_progress"
|
|
84
|
+
payload["started_at"] = datetime.now(timezone.utc).isoformat()
|
|
85
|
+
self.redis.hset(key, task_id, json.dumps(payload))
|
|
86
|
+
self.redis.expire(key, timedelta(days=7))
|
|
87
|
+
|
|
88
|
+
def task_completed(self, task_id: str, user_id: str):
|
|
89
|
+
if not self.redis:
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
key = self._get_key(user_id)
|
|
93
|
+
existing_data_json = self.redis.hget(key, task_id)
|
|
94
|
+
if not existing_data_json:
|
|
95
|
+
return
|
|
96
|
+
payload = json.loads(existing_data_json)
|
|
97
|
+
payload["status"] = "completed"
|
|
98
|
+
payload["completed_at"] = datetime.now(timezone.utc).isoformat()
|
|
99
|
+
# 设置该任务条目的过期时间, 例如 24 小时
|
|
100
|
+
# 注意: Redis Hash 不能为单个 field 设置 TTL, 这里我们可以 通过后台任务清理或在获取时判断时间戳
|
|
101
|
+
# 简单起见, 我们暂时依赖一个后台清理任务
|
|
102
|
+
self.redis.hset(key, task_id, json.dumps(payload))
|
|
103
|
+
self.redis.expire(key, timedelta(days=7))
|
|
104
|
+
|
|
105
|
+
def task_failed(self, task_id: str, user_id: str, error_message: str):
|
|
106
|
+
if not self.redis:
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
key = self._get_key(user_id)
|
|
110
|
+
existing_data_json = self.redis.hget(key, task_id)
|
|
111
|
+
if not existing_data_json:
|
|
112
|
+
payload = {
|
|
113
|
+
"status": "failed",
|
|
114
|
+
"error": error_message,
|
|
115
|
+
"failed_at": datetime.now(timezone.utc).isoformat(),
|
|
116
|
+
}
|
|
117
|
+
else:
|
|
118
|
+
payload = json.loads(existing_data_json)
|
|
119
|
+
payload["status"] = "failed"
|
|
120
|
+
payload["error"] = error_message
|
|
121
|
+
payload["failed_at"] = datetime.now(timezone.utc).isoformat()
|
|
122
|
+
self.redis.hset(key, task_id, json.dumps(payload))
|
|
123
|
+
self.redis.expire(key, timedelta(days=7))
|
|
124
|
+
|
|
125
|
+
def get_task_status(self, task_id: str, user_id: str) -> dict | None:
|
|
126
|
+
if not self.redis:
|
|
127
|
+
return None
|
|
128
|
+
|
|
129
|
+
key = self._get_key(user_id)
|
|
130
|
+
data = self.redis.hget(key, task_id)
|
|
131
|
+
return json.loads(data) if data else None
|
|
132
|
+
|
|
133
|
+
def get_all_tasks_for_user(self, user_id: str) -> dict[str, dict]:
|
|
134
|
+
if not self.redis:
|
|
135
|
+
return {}
|
|
136
|
+
|
|
137
|
+
key = self._get_key(user_id)
|
|
138
|
+
all_tasks = self.redis.hgetall(key)
|
|
139
|
+
return {tid: json.loads(t_data) for tid, t_data in all_tasks.items()}
|
|
140
|
+
|
|
141
|
+
def get_task_status_by_business_id(self, business_task_id: str, user_id: str) -> dict | None:
|
|
142
|
+
"""
|
|
143
|
+
Get aggregated status for a business-level task_id.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
business_task_id: Business-level task ID
|
|
147
|
+
user_id: User identifier
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
Aggregated status dict with status determined by all item statuses:
|
|
151
|
+
- If any item is 'waiting' or 'in_progress' → 'in_progress'
|
|
152
|
+
- If all items are 'completed' → 'completed'
|
|
153
|
+
- If any item is 'failed' → 'failed'
|
|
154
|
+
Returns None if task_id not found.
|
|
155
|
+
"""
|
|
156
|
+
if not self.redis:
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
# Get all item_ids for this task_id
|
|
160
|
+
task_items_key = self._get_task_items_key(user_id, business_task_id)
|
|
161
|
+
item_ids = self.redis.smembers(task_items_key)
|
|
162
|
+
|
|
163
|
+
if not item_ids:
|
|
164
|
+
return None
|
|
165
|
+
|
|
166
|
+
# Get statuses for all items
|
|
167
|
+
key = self._get_key(user_id)
|
|
168
|
+
item_statuses = []
|
|
169
|
+
errors = []
|
|
170
|
+
for item_id in item_ids:
|
|
171
|
+
item_data_json = self.redis.hget(key, item_id)
|
|
172
|
+
if item_data_json:
|
|
173
|
+
item_data = json.loads(item_data_json)
|
|
174
|
+
item_statuses.append(item_data["status"])
|
|
175
|
+
if item_data.get("status") == "failed" and "error" in item_data:
|
|
176
|
+
errors.append(item_data["error"])
|
|
177
|
+
|
|
178
|
+
if not item_statuses:
|
|
179
|
+
return None
|
|
180
|
+
|
|
181
|
+
# Aggregate status
|
|
182
|
+
if "failed" in item_statuses:
|
|
183
|
+
aggregated_status = "failed"
|
|
184
|
+
elif "in_progress" in item_statuses or "waiting" in item_statuses:
|
|
185
|
+
aggregated_status = "in_progress"
|
|
186
|
+
elif all(s == "completed" for s in item_statuses):
|
|
187
|
+
aggregated_status = "completed"
|
|
188
|
+
else:
|
|
189
|
+
# Fallback
|
|
190
|
+
aggregated_status = "unknown"
|
|
191
|
+
|
|
192
|
+
return {
|
|
193
|
+
"status": aggregated_status,
|
|
194
|
+
"business_task_id": business_task_id,
|
|
195
|
+
"item_count": len(item_ids),
|
|
196
|
+
"item_statuses": item_statuses,
|
|
197
|
+
"errors": errors,
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
def get_all_tasks_global(self) -> dict[str, dict[str, dict]]:
|
|
201
|
+
"""
|
|
202
|
+
Retrieve all tasks for all users from Redis.
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
dict: {user_id: {task_id: task_data, ...}, ...}
|
|
206
|
+
"""
|
|
207
|
+
if not self.redis:
|
|
208
|
+
return {}
|
|
209
|
+
|
|
210
|
+
all_users_tasks = {}
|
|
211
|
+
cursor: int | str = 0
|
|
212
|
+
while True:
|
|
213
|
+
cursor, keys = self.redis.scan(cursor=cursor, match="memos:task_meta:*", count=100)
|
|
214
|
+
for key in keys:
|
|
215
|
+
# key format: memos:task_meta:{user_id}
|
|
216
|
+
parts = key.split(":")
|
|
217
|
+
if len(parts) < 3:
|
|
218
|
+
continue
|
|
219
|
+
user_id = parts[2]
|
|
220
|
+
|
|
221
|
+
tasks = self.redis.hgetall(key)
|
|
222
|
+
if tasks:
|
|
223
|
+
user_tasks = {tid: json.loads(t_data) for tid, t_data in tasks.items()}
|
|
224
|
+
all_users_tasks[user_id] = user_tasks
|
|
225
|
+
|
|
226
|
+
if cursor == 0 or cursor == "0":
|
|
227
|
+
break
|
|
228
|
+
|
|
229
|
+
return all_users_tasks
|
|
File without changes
|