waldiez 0.4.7__py3-none-any.whl → 0.4.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of waldiez might be problematic. Click here for more details.
- waldiez/__init__.py +5 -5
- waldiez/_version.py +1 -1
- waldiez/cli.py +97 -102
- waldiez/exporter.py +61 -19
- waldiez/exporting/__init__.py +25 -6
- waldiez/exporting/agent/__init__.py +7 -3
- waldiez/exporting/agent/code_execution.py +114 -0
- waldiez/exporting/agent/exporter.py +354 -0
- waldiez/exporting/agent/extras/__init__.py +15 -0
- waldiez/exporting/agent/extras/captain_agent_extras.py +315 -0
- waldiez/exporting/agent/extras/group/target.py +178 -0
- waldiez/exporting/agent/extras/group_manager_agent_extas.py +500 -0
- waldiez/exporting/agent/extras/group_member_extras.py +181 -0
- waldiez/exporting/agent/extras/handoffs/__init__.py +19 -0
- waldiez/exporting/agent/extras/handoffs/after_work.py +78 -0
- waldiez/exporting/agent/extras/handoffs/available.py +74 -0
- waldiez/exporting/agent/extras/handoffs/condition.py +158 -0
- waldiez/exporting/agent/extras/handoffs/handoff.py +171 -0
- waldiez/exporting/agent/extras/handoffs/target.py +189 -0
- waldiez/exporting/agent/extras/rag/__init__.py +10 -0
- waldiez/exporting/agent/{utils/rag_user/chroma_utils.py → extras/rag/chroma_extras.py} +37 -24
- waldiez/exporting/agent/{utils/rag_user/mongo_utils.py → extras/rag/mongo_extras.py} +10 -10
- waldiez/exporting/agent/{utils/rag_user/pgvector_utils.py → extras/rag/pgvector_extras.py} +13 -13
- waldiez/exporting/agent/{utils/rag_user/qdrant_utils.py → extras/rag/qdrant_extras.py} +13 -13
- waldiez/exporting/agent/{utils/rag_user/vector_db.py → extras/rag/vector_db_extras.py} +59 -46
- waldiez/exporting/agent/extras/rag_user_proxy_agent_extras.py +245 -0
- waldiez/exporting/agent/extras/reasoning_agent_extras.py +88 -0
- waldiez/exporting/agent/factory.py +95 -0
- waldiez/exporting/agent/processor.py +150 -0
- waldiez/exporting/agent/system_message.py +36 -0
- waldiez/exporting/agent/termination.py +50 -0
- waldiez/exporting/chats/__init__.py +7 -3
- waldiez/exporting/chats/exporter.py +97 -0
- waldiez/exporting/chats/factory.py +65 -0
- waldiez/exporting/chats/processor.py +226 -0
- waldiez/exporting/chats/utils/__init__.py +6 -5
- waldiez/exporting/chats/utils/common.py +11 -45
- waldiez/exporting/chats/utils/group.py +55 -0
- waldiez/exporting/chats/utils/nested.py +37 -52
- waldiez/exporting/chats/utils/sequential.py +72 -61
- waldiez/exporting/chats/utils/{single_chat.py → single.py} +48 -50
- waldiez/exporting/core/__init__.py +196 -0
- waldiez/exporting/core/constants.py +17 -0
- waldiez/exporting/core/content.py +69 -0
- waldiez/exporting/core/context.py +244 -0
- waldiez/exporting/core/enums.py +89 -0
- waldiez/exporting/core/errors.py +19 -0
- waldiez/exporting/core/exporter.py +390 -0
- waldiez/exporting/core/exporters.py +67 -0
- waldiez/exporting/core/extras/__init__.py +39 -0
- waldiez/exporting/core/extras/agent_extras/__init__.py +27 -0
- waldiez/exporting/core/extras/agent_extras/captain_extras.py +57 -0
- waldiez/exporting/core/extras/agent_extras/group_manager_extras.py +102 -0
- waldiez/exporting/core/extras/agent_extras/rag_user_extras.py +53 -0
- waldiez/exporting/core/extras/agent_extras/reasoning_extras.py +68 -0
- waldiez/exporting/core/extras/agent_extras/standard_extras.py +263 -0
- waldiez/exporting/core/extras/base.py +241 -0
- waldiez/exporting/core/extras/chat_extras.py +118 -0
- waldiez/exporting/core/extras/flow_extras.py +70 -0
- waldiez/exporting/core/extras/model_extras.py +73 -0
- waldiez/exporting/core/extras/path_resolver.py +93 -0
- waldiez/exporting/core/extras/serializer.py +138 -0
- waldiez/exporting/core/extras/tool_extras.py +82 -0
- waldiez/exporting/core/protocols.py +259 -0
- waldiez/exporting/core/result.py +705 -0
- waldiez/exporting/core/types.py +329 -0
- waldiez/exporting/core/utils/__init__.py +11 -0
- waldiez/exporting/core/utils/comment.py +33 -0
- waldiez/exporting/core/utils/llm_config.py +117 -0
- waldiez/exporting/core/validation.py +96 -0
- waldiez/exporting/flow/__init__.py +6 -2
- waldiez/exporting/flow/execution_generator.py +193 -0
- waldiez/exporting/flow/exporter.py +107 -0
- waldiez/exporting/flow/factory.py +94 -0
- waldiez/exporting/flow/file_generator.py +214 -0
- waldiez/exporting/flow/merger.py +387 -0
- waldiez/exporting/flow/orchestrator.py +411 -0
- waldiez/exporting/flow/utils/__init__.py +9 -36
- waldiez/exporting/flow/utils/common.py +206 -0
- waldiez/exporting/flow/utils/importing.py +373 -0
- waldiez/exporting/flow/utils/linting.py +200 -0
- waldiez/exporting/flow/utils/{logging_utils.py → logging.py} +23 -9
- waldiez/exporting/models/__init__.py +3 -1
- waldiez/exporting/models/exporter.py +233 -0
- waldiez/exporting/models/factory.py +66 -0
- waldiez/exporting/models/processor.py +139 -0
- waldiez/exporting/tools/__init__.py +11 -0
- waldiez/exporting/tools/exporter.py +207 -0
- waldiez/exporting/tools/factory.py +57 -0
- waldiez/exporting/tools/processor.py +248 -0
- waldiez/exporting/tools/registration.py +133 -0
- waldiez/io/__init__.py +128 -0
- waldiez/io/_ws.py +199 -0
- waldiez/io/models/__init__.py +60 -0
- waldiez/io/models/base.py +66 -0
- waldiez/io/models/constants.py +78 -0
- waldiez/io/models/content/__init__.py +23 -0
- waldiez/io/models/content/audio.py +43 -0
- waldiez/io/models/content/base.py +45 -0
- waldiez/io/models/content/file.py +43 -0
- waldiez/io/models/content/image.py +96 -0
- waldiez/io/models/content/text.py +37 -0
- waldiez/io/models/content/video.py +43 -0
- waldiez/io/models/user_input.py +269 -0
- waldiez/io/models/user_response.py +215 -0
- waldiez/io/mqtt.py +681 -0
- waldiez/io/redis.py +782 -0
- waldiez/io/structured.py +439 -0
- waldiez/io/utils.py +184 -0
- waldiez/io/ws.py +298 -0
- waldiez/logger.py +481 -0
- waldiez/models/__init__.py +108 -51
- waldiez/models/agents/__init__.py +34 -70
- waldiez/models/agents/agent/__init__.py +10 -4
- waldiez/models/agents/agent/agent.py +466 -65
- waldiez/models/agents/agent/agent_data.py +119 -47
- waldiez/models/agents/agent/agent_type.py +13 -2
- waldiez/models/agents/agent/code_execution.py +12 -12
- waldiez/models/agents/agent/human_input_mode.py +8 -0
- waldiez/models/agents/agent/{linked_skill.py → linked_tool.py} +7 -7
- waldiez/models/agents/agent/nested_chat.py +35 -7
- waldiez/models/agents/agent/termination_message.py +30 -22
- waldiez/models/agents/{swarm_agent → agent}/update_system_message.py +22 -22
- waldiez/models/agents/agents.py +58 -63
- waldiez/models/agents/assistant/assistant.py +4 -4
- waldiez/models/agents/assistant/assistant_data.py +13 -1
- waldiez/models/agents/{captain_agent → captain}/captain_agent.py +5 -5
- waldiez/models/agents/{captain_agent → captain}/captain_agent_data.py +5 -5
- waldiez/models/agents/extra_requirements.py +11 -16
- waldiez/models/agents/group_manager/group_manager.py +103 -13
- waldiez/models/agents/group_manager/group_manager_data.py +36 -14
- waldiez/models/agents/group_manager/speakers.py +77 -24
- waldiez/models/agents/{rag_user → rag_user_proxy}/__init__.py +16 -16
- waldiez/models/agents/rag_user_proxy/rag_user_proxy.py +64 -0
- waldiez/models/agents/{rag_user/rag_user_data.py → rag_user_proxy/rag_user_proxy_data.py} +6 -5
- waldiez/models/agents/{rag_user → rag_user_proxy}/retrieve_config.py +182 -114
- waldiez/models/agents/{rag_user → rag_user_proxy}/vector_db_config.py +13 -13
- waldiez/models/agents/reasoning/reasoning_agent.py +6 -6
- waldiez/models/agents/reasoning/reasoning_agent_data.py +110 -63
- waldiez/models/agents/reasoning/reasoning_agent_reason_config.py +38 -10
- waldiez/models/agents/user_proxy/user_proxy.py +11 -7
- waldiez/models/agents/user_proxy/user_proxy_data.py +2 -2
- waldiez/models/chat/__init__.py +2 -1
- waldiez/models/chat/chat.py +166 -87
- waldiez/models/chat/chat_data.py +99 -136
- waldiez/models/chat/chat_message.py +33 -23
- waldiez/models/chat/chat_nested.py +31 -30
- waldiez/models/chat/chat_summary.py +10 -8
- waldiez/models/common/__init__.py +52 -2
- waldiez/models/common/ag2_version.py +1 -1
- waldiez/models/common/base.py +38 -7
- waldiez/models/common/dict_utils.py +42 -17
- waldiez/models/common/handoff.py +459 -0
- waldiez/models/common/id_generator.py +19 -0
- waldiez/models/common/method_utils.py +130 -68
- waldiez/{exporting/base/utils → models/common}/naming.py +38 -61
- waldiez/models/common/waldiez_version.py +37 -0
- waldiez/models/flow/__init__.py +9 -2
- waldiez/models/flow/connection.py +18 -0
- waldiez/models/flow/flow.py +311 -215
- waldiez/models/flow/flow_data.py +207 -40
- waldiez/models/flow/info.py +85 -0
- waldiez/models/flow/naming.py +131 -0
- waldiez/models/model/__init__.py +7 -1
- waldiez/models/model/extra_requirements.py +3 -12
- waldiez/models/model/model.py +76 -21
- waldiez/models/model/model_data.py +108 -20
- waldiez/models/tool/__init__.py +16 -0
- waldiez/models/tool/extra_requirements.py +36 -0
- waldiez/models/{skill/skill.py → tool/tool.py} +88 -88
- waldiez/models/tool/tool_data.py +51 -0
- waldiez/models/tool/tool_type.py +8 -0
- waldiez/models/waldiez.py +97 -80
- waldiez/runner.py +115 -61
- waldiez/running/__init__.py +13 -7
- waldiez/running/environment.py +49 -68
- waldiez/running/gen_seq_diagram.py +16 -14
- waldiez/running/post_run.py +119 -0
- waldiez/running/pre_run.py +149 -0
- waldiez/running/util.py +134 -0
- waldiez/utils/__init__.py +2 -4
- waldiez/utils/cli_extras/jupyter.py +5 -3
- waldiez/utils/cli_extras/runner.py +6 -4
- waldiez/utils/cli_extras/studio.py +6 -4
- waldiez/utils/conflict_checker.py +15 -9
- waldiez/utils/flaml_warnings.py +5 -5
- waldiez/utils/version.py +47 -0
- {waldiez-0.4.7.dist-info → waldiez-0.4.9.dist-info}/METADATA +235 -91
- waldiez-0.4.9.dist-info/RECORD +203 -0
- waldiez/exporting/agent/agent_exporter.py +0 -297
- waldiez/exporting/agent/utils/__init__.py +0 -23
- waldiez/exporting/agent/utils/captain_agent.py +0 -263
- waldiez/exporting/agent/utils/code_execution.py +0 -65
- waldiez/exporting/agent/utils/group_manager.py +0 -220
- waldiez/exporting/agent/utils/rag_user/__init__.py +0 -7
- waldiez/exporting/agent/utils/rag_user/rag_user.py +0 -209
- waldiez/exporting/agent/utils/reasoning.py +0 -36
- waldiez/exporting/agent/utils/swarm_agent.py +0 -469
- waldiez/exporting/agent/utils/teachability.py +0 -41
- waldiez/exporting/agent/utils/termination_message.py +0 -44
- waldiez/exporting/base/__init__.py +0 -25
- waldiez/exporting/base/agent_position.py +0 -75
- waldiez/exporting/base/base_exporter.py +0 -118
- waldiez/exporting/base/export_position.py +0 -48
- waldiez/exporting/base/import_position.py +0 -23
- waldiez/exporting/base/mixin.py +0 -137
- waldiez/exporting/base/utils/__init__.py +0 -18
- waldiez/exporting/base/utils/comments.py +0 -96
- waldiez/exporting/base/utils/path_check.py +0 -68
- waldiez/exporting/base/utils/to_string.py +0 -84
- waldiez/exporting/chats/chats_exporter.py +0 -240
- waldiez/exporting/chats/utils/swarm.py +0 -210
- waldiez/exporting/flow/flow_exporter.py +0 -528
- waldiez/exporting/flow/utils/agent_utils.py +0 -204
- waldiez/exporting/flow/utils/chat_utils.py +0 -71
- waldiez/exporting/flow/utils/def_main.py +0 -77
- waldiez/exporting/flow/utils/flow_content.py +0 -202
- waldiez/exporting/flow/utils/flow_names.py +0 -116
- waldiez/exporting/flow/utils/importing_utils.py +0 -227
- waldiez/exporting/models/models_exporter.py +0 -199
- waldiez/exporting/models/utils.py +0 -174
- waldiez/exporting/skills/__init__.py +0 -9
- waldiez/exporting/skills/skills_exporter.py +0 -176
- waldiez/exporting/skills/utils.py +0 -369
- waldiez/models/agents/agent/teachability.py +0 -70
- waldiez/models/agents/rag_user/rag_user.py +0 -60
- waldiez/models/agents/swarm_agent/__init__.py +0 -50
- waldiez/models/agents/swarm_agent/after_work.py +0 -179
- waldiez/models/agents/swarm_agent/on_condition.py +0 -105
- waldiez/models/agents/swarm_agent/on_condition_available.py +0 -142
- waldiez/models/agents/swarm_agent/on_condition_target.py +0 -40
- waldiez/models/agents/swarm_agent/swarm_agent.py +0 -107
- waldiez/models/agents/swarm_agent/swarm_agent_data.py +0 -124
- waldiez/models/flow/utils.py +0 -232
- waldiez/models/skill/__init__.py +0 -16
- waldiez/models/skill/extra_requirements.py +0 -36
- waldiez/models/skill/skill_data.py +0 -53
- waldiez/models/skill/skill_type.py +0 -8
- waldiez/running/running.py +0 -369
- waldiez/utils/pysqlite3_checker.py +0 -308
- waldiez/utils/rdps_checker.py +0 -122
- waldiez-0.4.7.dist-info/RECORD +0 -149
- /waldiez/models/agents/{captain_agent → captain}/__init__.py +0 -0
- /waldiez/models/agents/{captain_agent → captain}/captain_agent_lib_entry.py +0 -0
- {waldiez-0.4.7.dist-info → waldiez-0.4.9.dist-info}/WHEEL +0 -0
- {waldiez-0.4.7.dist-info → waldiez-0.4.9.dist-info}/entry_points.txt +0 -0
- {waldiez-0.4.7.dist-info → waldiez-0.4.9.dist-info}/licenses/LICENSE +0 -0
- {waldiez-0.4.7.dist-info → waldiez-0.4.9.dist-info}/licenses/NOTICE.md +0 -0
waldiez/io/redis.py
ADDED
|
@@ -0,0 +1,782 @@
|
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0.
|
|
2
|
+
# Copyright (c) 2024 - 2025 Waldiez and contributors.
|
|
3
|
+
|
|
4
|
+
# flake8: noqa: E501
|
|
5
|
+
# pylint: disable=too-many-try-statements,broad-exception-caught,line-too-long
|
|
6
|
+
|
|
7
|
+
"""A Redis I/O stream for handling print and input messages."""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
import time
|
|
12
|
+
import traceback as tb
|
|
13
|
+
import uuid
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from types import TracebackType
|
|
16
|
+
from typing import (
|
|
17
|
+
TYPE_CHECKING,
|
|
18
|
+
Any,
|
|
19
|
+
Awaitable,
|
|
20
|
+
Callable,
|
|
21
|
+
Optional,
|
|
22
|
+
Type,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
import redis
|
|
27
|
+
import redis.asyncio as a_redis
|
|
28
|
+
except ImportError as error: # pragma: no cover
|
|
29
|
+
raise ImportError(
|
|
30
|
+
"Redis client not installed. Please install redis-py with `pip install redis`."
|
|
31
|
+
) from error
|
|
32
|
+
from autogen.io import IOStream # type: ignore
|
|
33
|
+
from autogen.messages import BaseMessage # type: ignore
|
|
34
|
+
|
|
35
|
+
from .models import (
|
|
36
|
+
PrintMessage,
|
|
37
|
+
TextMediaContent,
|
|
38
|
+
UserInputData,
|
|
39
|
+
UserInputRequest,
|
|
40
|
+
UserResponse,
|
|
41
|
+
)
|
|
42
|
+
from .utils import gen_id, now
|
|
43
|
+
|
|
44
|
+
if TYPE_CHECKING:
|
|
45
|
+
Redis = redis.Redis[bytes]
|
|
46
|
+
AsyncRedis = a_redis.Redis[bytes]
|
|
47
|
+
else:
|
|
48
|
+
Redis = redis.Redis
|
|
49
|
+
AsyncRedis = a_redis.Redis
|
|
50
|
+
|
|
51
|
+
LOG = logging.getLogger(__name__)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class RedisIOStream(IOStream):
|
|
55
|
+
"""Redis I/O stream."""
|
|
56
|
+
|
|
57
|
+
redis: Redis
|
|
58
|
+
task_id: str
|
|
59
|
+
input_timeout: int
|
|
60
|
+
on_input_request: Optional[Callable[[str, str, str], None]]
|
|
61
|
+
on_input_received: Optional[Callable[[str, str], None]]
|
|
62
|
+
max_stream_size: int
|
|
63
|
+
output_stream: str
|
|
64
|
+
input_request_channel: str
|
|
65
|
+
input_response_channel: str
|
|
66
|
+
|
|
67
|
+
def __init__(
|
|
68
|
+
self,
|
|
69
|
+
redis_url: str = "redis://localhost:6379/0",
|
|
70
|
+
task_id: str | None = None,
|
|
71
|
+
input_timeout: int = 120,
|
|
72
|
+
max_stream_size: int = 1000,
|
|
73
|
+
on_input_request: Optional[Callable[[str, str, str], None]] = None,
|
|
74
|
+
on_input_response: Optional[Callable[[str, str], None]] = None,
|
|
75
|
+
redis_connection_kwargs: dict[str, Any] | None = None,
|
|
76
|
+
uploads_root: Path | str | None = None,
|
|
77
|
+
) -> None:
|
|
78
|
+
"""Initialize the Redis I/O stream.
|
|
79
|
+
|
|
80
|
+
Parameters
|
|
81
|
+
----------
|
|
82
|
+
task_id : str, optional
|
|
83
|
+
An ID to use for the input channel and the output stream. If not provided,
|
|
84
|
+
a random UUID will be generated.
|
|
85
|
+
redis_url : str, optional
|
|
86
|
+
The Redis URL, by default "redis://localhost:6379/0".
|
|
87
|
+
input_timeout : int, optional
|
|
88
|
+
The time to wait for user input in seconds, by default 180 (3 minutes).
|
|
89
|
+
on_input_request : Optional[Callable[[str, str, str], None]], optional
|
|
90
|
+
Callback for input request, by default None
|
|
91
|
+
parameters: prompt, request_id, task_id
|
|
92
|
+
on_input_response : Optional[Callable[[str, str], None]], optional
|
|
93
|
+
Callback for input response, by default None.
|
|
94
|
+
parameters: user_input, task_id
|
|
95
|
+
redis_connection_kwargs : dict[str, Any] | None, optional
|
|
96
|
+
Additional Redis connection kwargs, to be used with `redis.Redis.from_url`,
|
|
97
|
+
by default None.
|
|
98
|
+
See: https://redis-py.readthedocs.io/en/stable/connections.html#redis.Redis.from_url
|
|
99
|
+
max_stream_size : int, optional
|
|
100
|
+
The maximum number of entries per stream, by default 1000.
|
|
101
|
+
uploads_root : Path | str | None, optional
|
|
102
|
+
The root directory for uploads, by default None.
|
|
103
|
+
If provided, it will be resolved to an absolute path.
|
|
104
|
+
"""
|
|
105
|
+
self.redis = Redis.from_url(redis_url, **redis_connection_kwargs or {})
|
|
106
|
+
self.task_id = task_id or uuid.uuid4().hex
|
|
107
|
+
self.input_timeout = input_timeout
|
|
108
|
+
self.on_input_request = on_input_request
|
|
109
|
+
self.on_input_response = on_input_response
|
|
110
|
+
self.max_stream_size = max_stream_size
|
|
111
|
+
self.task_output_stream = f"task:{self.task_id}:output"
|
|
112
|
+
self.input_request_channel = f"task:{self.task_id}:input_request"
|
|
113
|
+
self.input_response_channel = f"task:{self.task_id}:input_response"
|
|
114
|
+
self.common_output_stream = "task-output"
|
|
115
|
+
self.uploads_root = (
|
|
116
|
+
Path(uploads_root).resolve() if uploads_root else None
|
|
117
|
+
)
|
|
118
|
+
if self.uploads_root and not self.uploads_root.exists():
|
|
119
|
+
self.uploads_root.mkdir(parents=True, exist_ok=True)
|
|
120
|
+
|
|
121
|
+
def __enter__(self) -> "RedisIOStream":
|
|
122
|
+
"""Enable context manager usage."""
|
|
123
|
+
return self
|
|
124
|
+
|
|
125
|
+
def __exit__(
|
|
126
|
+
self,
|
|
127
|
+
exc_type: Type[Exception] | None,
|
|
128
|
+
exc_value: Exception | None,
|
|
129
|
+
traceback: TracebackType | None,
|
|
130
|
+
) -> None:
|
|
131
|
+
"""Exit the context manager.
|
|
132
|
+
|
|
133
|
+
Parameters
|
|
134
|
+
----------
|
|
135
|
+
exc_type : Type[Exception] | None
|
|
136
|
+
The exception type.
|
|
137
|
+
exc_value : Exception | None
|
|
138
|
+
The exception value.
|
|
139
|
+
traceback : TracebackType | None
|
|
140
|
+
The traceback.
|
|
141
|
+
"""
|
|
142
|
+
# cleanup
|
|
143
|
+
RedisIOStream.cleanup_processed_task_requests(
|
|
144
|
+
self.redis, self.task_id, retention_period=86400
|
|
145
|
+
)
|
|
146
|
+
RedisIOStream.trim_task_output_streams(self.redis)
|
|
147
|
+
RedisIOStream.cleanup_processed_requests(self.redis)
|
|
148
|
+
# and close the connection
|
|
149
|
+
self.close()
|
|
150
|
+
|
|
151
|
+
def close(self) -> None:
|
|
152
|
+
"""Close the Redis client."""
|
|
153
|
+
RedisIOStream.try_do(self.redis.close)
|
|
154
|
+
|
|
155
|
+
def _print_to_task_output(self, payload: dict[str, Any]) -> None:
|
|
156
|
+
"""Print message to the task output stream.
|
|
157
|
+
|
|
158
|
+
Parameters
|
|
159
|
+
----------
|
|
160
|
+
message : str
|
|
161
|
+
The message to print.
|
|
162
|
+
message_type : str
|
|
163
|
+
The message type.
|
|
164
|
+
"""
|
|
165
|
+
LOG.debug("Sending print message: %s", payload)
|
|
166
|
+
RedisIOStream.try_do(
|
|
167
|
+
self.redis.xadd, # pyright: ignore
|
|
168
|
+
self.task_output_stream,
|
|
169
|
+
payload,
|
|
170
|
+
maxlen=self.max_stream_size,
|
|
171
|
+
approximate=True,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
def _print_to_common_output(self, payload: dict[str, Any]) -> None:
|
|
175
|
+
"""Print message to the common output stream.
|
|
176
|
+
|
|
177
|
+
Parameters
|
|
178
|
+
----------
|
|
179
|
+
message : str
|
|
180
|
+
The message to print.
|
|
181
|
+
message_type : str
|
|
182
|
+
The message type.
|
|
183
|
+
"""
|
|
184
|
+
LOG.debug("Sending print message: %s", payload)
|
|
185
|
+
RedisIOStream.try_do(
|
|
186
|
+
self.redis.xadd, # pyright: ignore
|
|
187
|
+
self.common_output_stream,
|
|
188
|
+
payload,
|
|
189
|
+
maxlen=self.max_stream_size,
|
|
190
|
+
approximate=True,
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
def _print(self, payload: dict[str, Any]) -> None:
|
|
194
|
+
"""Print message to Redis streams.
|
|
195
|
+
|
|
196
|
+
Parameters
|
|
197
|
+
----------
|
|
198
|
+
payload : dict[str, Any]
|
|
199
|
+
The message to print.
|
|
200
|
+
"""
|
|
201
|
+
if "id" not in payload:
|
|
202
|
+
payload["id"] = gen_id()
|
|
203
|
+
payload["task_id"] = self.task_id
|
|
204
|
+
if "timestamp" not in payload:
|
|
205
|
+
payload["timestamp"] = now()
|
|
206
|
+
self._print_to_task_output(payload)
|
|
207
|
+
self._print_to_common_output(payload)
|
|
208
|
+
|
|
209
|
+
def print(self, *args: Any, **kwargs: Any) -> None:
|
|
210
|
+
"""Print message to Redis stream.
|
|
211
|
+
|
|
212
|
+
Parameters
|
|
213
|
+
----------
|
|
214
|
+
args : Any
|
|
215
|
+
The message to print.
|
|
216
|
+
kwargs : Any
|
|
217
|
+
Additional keyword arguments.
|
|
218
|
+
"""
|
|
219
|
+
print_message = PrintMessage.create(*args, **kwargs)
|
|
220
|
+
payload = print_message.model_dump(mode="json")
|
|
221
|
+
self._print(payload)
|
|
222
|
+
|
|
223
|
+
def input(
|
|
224
|
+
self,
|
|
225
|
+
prompt: str = "",
|
|
226
|
+
*,
|
|
227
|
+
password: bool = False,
|
|
228
|
+
request_id: str | None = None,
|
|
229
|
+
) -> str:
|
|
230
|
+
"""Request input via Redis Pub/Sub and wait for response.
|
|
231
|
+
|
|
232
|
+
Parameters
|
|
233
|
+
----------
|
|
234
|
+
prompt : str, optional
|
|
235
|
+
The prompt message, by default "".
|
|
236
|
+
password : bool, optional
|
|
237
|
+
Whether input is masked, by default False.
|
|
238
|
+
request_id : str, optional
|
|
239
|
+
The request ID (for testing), by default None.
|
|
240
|
+
|
|
241
|
+
Returns
|
|
242
|
+
-------
|
|
243
|
+
str
|
|
244
|
+
The received user input, or empty string if timeout occurs.
|
|
245
|
+
"""
|
|
246
|
+
request_id = request_id or gen_id()
|
|
247
|
+
input_request = UserInputRequest(
|
|
248
|
+
request_id=request_id,
|
|
249
|
+
prompt=prompt,
|
|
250
|
+
password=password,
|
|
251
|
+
)
|
|
252
|
+
payload = input_request.model_dump(mode="json")
|
|
253
|
+
payload["password"] = str(password).lower()
|
|
254
|
+
payload["task_id"] = self.task_id
|
|
255
|
+
LOG.debug("Requesting input via Pub/Sub: %s", payload)
|
|
256
|
+
self._print(payload)
|
|
257
|
+
RedisIOStream.try_do(
|
|
258
|
+
self.redis.publish,
|
|
259
|
+
self.input_request_channel,
|
|
260
|
+
json.dumps(payload),
|
|
261
|
+
)
|
|
262
|
+
if self.on_input_request:
|
|
263
|
+
self.on_input_request(prompt, request_id, self.task_id)
|
|
264
|
+
user_input = self._wait_for_input(request_id)
|
|
265
|
+
if self.on_input_response:
|
|
266
|
+
self.on_input_response(user_input, self.task_id)
|
|
267
|
+
text_response = UserInputData(content=TextMediaContent(text=user_input))
|
|
268
|
+
user_response = UserResponse(
|
|
269
|
+
type="input_response",
|
|
270
|
+
request_id=request_id,
|
|
271
|
+
data=text_response,
|
|
272
|
+
)
|
|
273
|
+
payload = user_response.model_dump(mode="json")
|
|
274
|
+
# no nested dicts :(
|
|
275
|
+
payload["data"] = json.dumps(payload["data"])
|
|
276
|
+
payload["task_id"] = self.task_id
|
|
277
|
+
LOG.debug("Sending input response: %s", payload)
|
|
278
|
+
self._print(payload)
|
|
279
|
+
return user_input
|
|
280
|
+
|
|
281
|
+
def send(self, message: BaseMessage) -> None:
|
|
282
|
+
"""Send a structured message to Redis.
|
|
283
|
+
|
|
284
|
+
Parameters
|
|
285
|
+
----------
|
|
286
|
+
message : dict[str, Any]
|
|
287
|
+
The message to send.
|
|
288
|
+
"""
|
|
289
|
+
try:
|
|
290
|
+
message_dump = message.model_dump(mode="json")
|
|
291
|
+
except Exception as e: # pragma: no cover
|
|
292
|
+
message_dump = {
|
|
293
|
+
"type": "error",
|
|
294
|
+
"error": str(e),
|
|
295
|
+
}
|
|
296
|
+
message_type = message_dump.get("type", None)
|
|
297
|
+
if not message_type:
|
|
298
|
+
message_type = message.__class__.__name__
|
|
299
|
+
self._print(
|
|
300
|
+
{
|
|
301
|
+
"data": json.dumps(message_dump),
|
|
302
|
+
"type": message_type,
|
|
303
|
+
}
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
def _wait_for_input(self, input_request_id: str) -> str:
|
|
307
|
+
"""Wait for user input.
|
|
308
|
+
|
|
309
|
+
Parameters
|
|
310
|
+
----------
|
|
311
|
+
input_request_id : str
|
|
312
|
+
The request ID.
|
|
313
|
+
|
|
314
|
+
Returns
|
|
315
|
+
-------
|
|
316
|
+
str
|
|
317
|
+
The user input.
|
|
318
|
+
"""
|
|
319
|
+
lock_key = f"lock:{self.task_id}"
|
|
320
|
+
start_time = time.time()
|
|
321
|
+
|
|
322
|
+
pubsub = self.redis.pubsub()
|
|
323
|
+
pubsub.subscribe(self.input_response_channel)
|
|
324
|
+
try:
|
|
325
|
+
while (time.time() - start_time) <= self.input_timeout:
|
|
326
|
+
message = pubsub.get_message(ignore_subscribe_messages=True)
|
|
327
|
+
if not message:
|
|
328
|
+
time.sleep(0.1)
|
|
329
|
+
continue
|
|
330
|
+
LOG.debug("Received message: %s", message)
|
|
331
|
+
response = self.parse_pubsub_input(message)
|
|
332
|
+
if not response or response.request_id != input_request_id:
|
|
333
|
+
continue
|
|
334
|
+
|
|
335
|
+
if self._acquire_lock(lock_key): # pragma: no branch
|
|
336
|
+
try:
|
|
337
|
+
if self._is_request_processed(response.request_id):
|
|
338
|
+
continue
|
|
339
|
+
|
|
340
|
+
self._mark_request_processed(response.request_id)
|
|
341
|
+
return self._get_user_input(response)
|
|
342
|
+
finally:
|
|
343
|
+
self._release_lock(lock_key)
|
|
344
|
+
except BaseException: # pragma: no cover
|
|
345
|
+
LOG.error("Error in _wait_for_input: %s", tb.format_exc())
|
|
346
|
+
finally:
|
|
347
|
+
pubsub.unsubscribe(self.input_response_channel)
|
|
348
|
+
|
|
349
|
+
LOG.warning(
|
|
350
|
+
"No input received for %ds on task %s, assuming empty string",
|
|
351
|
+
self.input_timeout,
|
|
352
|
+
self.task_id,
|
|
353
|
+
)
|
|
354
|
+
return ""
|
|
355
|
+
|
|
356
|
+
# pylint:disable=no-self-use
|
|
357
|
+
def _get_user_input(self, response: UserResponse) -> str:
|
|
358
|
+
"""Get user input from the response.
|
|
359
|
+
|
|
360
|
+
Parameters
|
|
361
|
+
----------
|
|
362
|
+
response : UserResponse
|
|
363
|
+
The user response.
|
|
364
|
+
|
|
365
|
+
Returns
|
|
366
|
+
-------
|
|
367
|
+
str
|
|
368
|
+
The user input.
|
|
369
|
+
"""
|
|
370
|
+
if not response.data:
|
|
371
|
+
return ""
|
|
372
|
+
if isinstance(
|
|
373
|
+
response.data, str
|
|
374
|
+
): # pragma: no cover should be structured
|
|
375
|
+
return response.data
|
|
376
|
+
return response.to_string(
|
|
377
|
+
uploads_root=self.uploads_root,
|
|
378
|
+
base_name=response.request_id,
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
def _acquire_lock(self, lock_key: str, lock_expiry: int = 10) -> bool:
|
|
382
|
+
"""Try to acquire a lock, returns True if acquired, False otherwise."""
|
|
383
|
+
try:
|
|
384
|
+
return (
|
|
385
|
+
self.redis.set(lock_key, "locked", ex=lock_expiry, nx=True)
|
|
386
|
+
is True
|
|
387
|
+
)
|
|
388
|
+
except redis.RedisError as e: # pragma: no cover
|
|
389
|
+
LOG.error("Redis error on acquire lock: %s", e)
|
|
390
|
+
return False
|
|
391
|
+
except BaseException as e: # pragma: no cover
|
|
392
|
+
LOG.error("Error on acquire lock: %s", e)
|
|
393
|
+
return False
|
|
394
|
+
|
|
395
|
+
def _release_lock(self, lock_key: str) -> None:
|
|
396
|
+
"""Release a lock."""
|
|
397
|
+
RedisIOStream.try_do(self.redis.delete, lock_key)
|
|
398
|
+
|
|
399
|
+
def _is_request_processed(self, request_id: str) -> bool:
|
|
400
|
+
"""Check if a request is processed for a task."""
|
|
401
|
+
return RedisIOStream.is_request_processed(
|
|
402
|
+
self.redis, task_id=self.task_id, request_id=request_id
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
def _mark_request_processed(self, request_id: str) -> None:
|
|
406
|
+
"""Mark a request as processed for a task."""
|
|
407
|
+
RedisIOStream.try_do(
|
|
408
|
+
self.redis.zadd,
|
|
409
|
+
f"processed_requests:{self.task_id}",
|
|
410
|
+
{request_id: int(time.time() * 1_000_000)},
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
@staticmethod
|
|
414
|
+
def _extract_message_data(data: Any) -> Optional[dict[str, Any]]:
|
|
415
|
+
"""Extract and parse the message data field."""
|
|
416
|
+
message_data = data
|
|
417
|
+
|
|
418
|
+
# Handle string-encoded JSON
|
|
419
|
+
if isinstance(message_data, str):
|
|
420
|
+
try:
|
|
421
|
+
message_data = json.loads(message_data)
|
|
422
|
+
except json.JSONDecodeError:
|
|
423
|
+
LOG.error("Invalid JSON in message data: %s", message_data)
|
|
424
|
+
return None
|
|
425
|
+
|
|
426
|
+
# Validate data type
|
|
427
|
+
if not isinstance(message_data, dict): # pragma: no cover
|
|
428
|
+
LOG.error("Invalid message data format: %s", message_data)
|
|
429
|
+
return None
|
|
430
|
+
|
|
431
|
+
return message_data # pyright: ignore
|
|
432
|
+
|
|
433
|
+
@staticmethod
|
|
434
|
+
def _message_has_required_fields(message_data: dict[str, Any]) -> bool:
|
|
435
|
+
"""Check if message data contains required fields."""
|
|
436
|
+
if "request_id" not in message_data:
|
|
437
|
+
LOG.error("Missing 'request_id' in message data: %s", message_data)
|
|
438
|
+
return False
|
|
439
|
+
|
|
440
|
+
return True
|
|
441
|
+
|
|
442
|
+
@staticmethod
|
|
443
|
+
def _process_nested_data(
|
|
444
|
+
message_data: dict[str, Any],
|
|
445
|
+
) -> Optional[dict[str, Any]]:
|
|
446
|
+
"""Process nested JSON data if present."""
|
|
447
|
+
# Create a copy to avoid modifying the original
|
|
448
|
+
processed_data = message_data.copy()
|
|
449
|
+
|
|
450
|
+
# Handle nested JSON in 'data' field
|
|
451
|
+
if "data" in processed_data and isinstance(
|
|
452
|
+
processed_data["data"], str
|
|
453
|
+
): # pragma: no branch
|
|
454
|
+
try:
|
|
455
|
+
processed_data["data"] = json.loads(processed_data["data"])
|
|
456
|
+
except json.JSONDecodeError:
|
|
457
|
+
LOG.error(
|
|
458
|
+
"Invalid JSON in nested data field: %s", processed_data
|
|
459
|
+
)
|
|
460
|
+
return None
|
|
461
|
+
|
|
462
|
+
return processed_data
|
|
463
|
+
|
|
464
|
+
@staticmethod
|
|
465
|
+
def _create_user_response(
|
|
466
|
+
message_data: dict[str, Any],
|
|
467
|
+
) -> Optional["UserResponse"]:
|
|
468
|
+
"""Create UserResponse object from validated data."""
|
|
469
|
+
try:
|
|
470
|
+
return UserResponse.model_validate(message_data)
|
|
471
|
+
except Exception as e:
|
|
472
|
+
LOG.error(
|
|
473
|
+
"Error parsing user input response: %s - %s",
|
|
474
|
+
message_data,
|
|
475
|
+
str(e),
|
|
476
|
+
)
|
|
477
|
+
return None
|
|
478
|
+
|
|
479
|
+
def parse_pubsub_input(
|
|
480
|
+
self,
|
|
481
|
+
message: dict[str, Any] | None,
|
|
482
|
+
) -> UserResponse | None:
|
|
483
|
+
"""Extract request ID and user input from a message.
|
|
484
|
+
|
|
485
|
+
Parameters
|
|
486
|
+
----------
|
|
487
|
+
message : dict[str, Any]
|
|
488
|
+
The message to parse.
|
|
489
|
+
|
|
490
|
+
Returns
|
|
491
|
+
-------
|
|
492
|
+
UserResponse
|
|
493
|
+
The parsed user response.
|
|
494
|
+
"""
|
|
495
|
+
if not isinstance(message, dict) or "data" not in message:
|
|
496
|
+
LOG.error("Invalid message format or missing 'data': %s", message)
|
|
497
|
+
return None
|
|
498
|
+
message_data = self._extract_message_data(message["data"])
|
|
499
|
+
if message_data is None: # pragma: no cover
|
|
500
|
+
return None
|
|
501
|
+
|
|
502
|
+
if not self._message_has_required_fields(
|
|
503
|
+
message_data
|
|
504
|
+
): # pragma: no cover
|
|
505
|
+
return None
|
|
506
|
+
|
|
507
|
+
processed_data = self._process_nested_data(message_data)
|
|
508
|
+
if processed_data is None: # pragma: no cover
|
|
509
|
+
return None
|
|
510
|
+
|
|
511
|
+
return self._create_user_response(processed_data)
|
|
512
|
+
|
|
513
|
+
@staticmethod
|
|
514
|
+
def try_do(func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
|
|
515
|
+
"""Try to execute.
|
|
516
|
+
|
|
517
|
+
Just to avoid duplicate try/except blocks.
|
|
518
|
+
To only be used if no return value is expected.
|
|
519
|
+
And if we no't need to re-raise the exception.
|
|
520
|
+
Otherwise, we normally try/except at the call site.
|
|
521
|
+
|
|
522
|
+
Parameters
|
|
523
|
+
----------
|
|
524
|
+
func : Callable[..., Any]
|
|
525
|
+
The function to call.
|
|
526
|
+
args : Any
|
|
527
|
+
The function's positional arguments.
|
|
528
|
+
kwargs : Any
|
|
529
|
+
The function's keyword arguments.
|
|
530
|
+
"""
|
|
531
|
+
try:
|
|
532
|
+
func(*args, **kwargs)
|
|
533
|
+
except BaseException: # pragma: no cover
|
|
534
|
+
LOG.error("Error on try_do:")
|
|
535
|
+
LOG.error(tb.format_exc())
|
|
536
|
+
|
|
537
|
+
@staticmethod
|
|
538
|
+
async def a_try_do(
|
|
539
|
+
func: Callable[..., Awaitable[Any]],
|
|
540
|
+
*args: Any,
|
|
541
|
+
**kwargs: Any,
|
|
542
|
+
) -> None:
|
|
543
|
+
"""Async version of try_do.
|
|
544
|
+
|
|
545
|
+
Parameters
|
|
546
|
+
----------
|
|
547
|
+
func : Awaitable[Any]
|
|
548
|
+
The async function to call.
|
|
549
|
+
args : Any
|
|
550
|
+
The positional arguments.
|
|
551
|
+
kwargs : Any
|
|
552
|
+
The keyword arguments.
|
|
553
|
+
"""
|
|
554
|
+
try:
|
|
555
|
+
await func(*args, **kwargs)
|
|
556
|
+
except BaseException: # pragma: no cover
|
|
557
|
+
LOG.error("Error on a_try_do:")
|
|
558
|
+
LOG.error(tb.format_exc())
|
|
559
|
+
|
|
560
|
+
@staticmethod
|
|
561
|
+
def is_request_processed(
|
|
562
|
+
redis_client: Redis,
|
|
563
|
+
task_id: str,
|
|
564
|
+
request_id: str,
|
|
565
|
+
) -> bool:
|
|
566
|
+
"""Check if a request is processed for a task.
|
|
567
|
+
|
|
568
|
+
Parameters
|
|
569
|
+
----------
|
|
570
|
+
redis_client : Redis
|
|
571
|
+
The async Redis client to use.
|
|
572
|
+
task_id : str
|
|
573
|
+
The task ID.
|
|
574
|
+
request_id : str
|
|
575
|
+
The request ID.
|
|
576
|
+
|
|
577
|
+
Returns
|
|
578
|
+
-------
|
|
579
|
+
bool
|
|
580
|
+
True if the request is processed, False otherwise.
|
|
581
|
+
"""
|
|
582
|
+
try:
|
|
583
|
+
return (
|
|
584
|
+
redis_client.zscore(f"processed_requests:{task_id}", request_id)
|
|
585
|
+
is not None
|
|
586
|
+
)
|
|
587
|
+
except BaseException as e: # pragma: no cover
|
|
588
|
+
LOG.error("Error on check request processed: %s", e)
|
|
589
|
+
return False
|
|
590
|
+
|
|
591
|
+
@staticmethod
|
|
592
|
+
async def a_is_request_processed(
|
|
593
|
+
redis_client: AsyncRedis,
|
|
594
|
+
task_id: str,
|
|
595
|
+
request_id: str,
|
|
596
|
+
) -> bool:
|
|
597
|
+
"""Async version of is_request_processed.
|
|
598
|
+
|
|
599
|
+
Parameters
|
|
600
|
+
----------
|
|
601
|
+
redis_client : AsyncRedis
|
|
602
|
+
The async Redis client to use.
|
|
603
|
+
task_id : str
|
|
604
|
+
The task ID.
|
|
605
|
+
request_id : str
|
|
606
|
+
The request ID.
|
|
607
|
+
|
|
608
|
+
Returns
|
|
609
|
+
-------
|
|
610
|
+
bool
|
|
611
|
+
True if the request is processed, False otherwise.
|
|
612
|
+
"""
|
|
613
|
+
try:
|
|
614
|
+
return (
|
|
615
|
+
await redis_client.zscore(
|
|
616
|
+
f"processed_requests:{task_id}", request_id
|
|
617
|
+
)
|
|
618
|
+
is not None
|
|
619
|
+
)
|
|
620
|
+
except BaseException as e: # pragma: no cover
|
|
621
|
+
LOG.error("Error on check request processed: %s", e)
|
|
622
|
+
return False
|
|
623
|
+
|
|
624
|
+
# other static methods for cleanup
|
|
625
|
+
# to be used externally (like in periodic tasks) if needed
|
|
626
|
+
# or after task completion
|
|
627
|
+
@staticmethod
|
|
628
|
+
def cleanup_processed_task_requests(
|
|
629
|
+
redis_client: Redis,
|
|
630
|
+
task_id: str,
|
|
631
|
+
retention_period: int = 86400,
|
|
632
|
+
) -> None:
|
|
633
|
+
"""Cleanup old processed request logs.
|
|
634
|
+
|
|
635
|
+
Parameters
|
|
636
|
+
----------
|
|
637
|
+
redis_client : Redis
|
|
638
|
+
The Redis client.
|
|
639
|
+
task_id : str
|
|
640
|
+
The task ID.
|
|
641
|
+
retention_period : int, optional
|
|
642
|
+
The retention period in seconds, by default 86400.
|
|
643
|
+
"""
|
|
644
|
+
key = f"processed_requests:{task_id}"
|
|
645
|
+
cutoff_time = int(time.time()) - retention_period
|
|
646
|
+
RedisIOStream.try_do(redis_client.zremrangebyscore, key, 0, cutoff_time)
|
|
647
|
+
|
|
648
|
+
@staticmethod
|
|
649
|
+
def cleanup_processed_requests(
|
|
650
|
+
redis_client: Redis, retention_period: int = 86400
|
|
651
|
+
) -> None:
|
|
652
|
+
"""Cleanup stale processed requests.
|
|
653
|
+
|
|
654
|
+
Parameters
|
|
655
|
+
----------
|
|
656
|
+
redis_client : Redis
|
|
657
|
+
The Redis client.
|
|
658
|
+
retention_period : int, optional
|
|
659
|
+
The retention period in seconds
|
|
660
|
+
"""
|
|
661
|
+
cutoff_time = int(time.time()) - retention_period
|
|
662
|
+
for key in redis_client.scan_iter("processed_requests:*", count=100):
|
|
663
|
+
RedisIOStream.try_do(
|
|
664
|
+
redis_client.zremrangebyscore, key, 0, cutoff_time
|
|
665
|
+
)
|
|
666
|
+
|
|
667
|
+
@staticmethod
|
|
668
|
+
def trim_task_output_streams(
|
|
669
|
+
redis_client: Redis, maxlen: int = 1000, approximate: bool = True
|
|
670
|
+
) -> None:
|
|
671
|
+
"""Trim task output streams to a max length.
|
|
672
|
+
|
|
673
|
+
Parameters
|
|
674
|
+
----------
|
|
675
|
+
redis_client : Redis
|
|
676
|
+
The Redis client.
|
|
677
|
+
maxlen : int
|
|
678
|
+
The maximum number of entries per stream.
|
|
679
|
+
approximate : bool
|
|
680
|
+
Whether to use approximate trimming (more efficient).
|
|
681
|
+
"""
|
|
682
|
+
for key in redis_client.scan_iter("task:*:output", count=100):
|
|
683
|
+
RedisIOStream.try_do(
|
|
684
|
+
redis_client.xtrim, # pyright: ignore
|
|
685
|
+
key,
|
|
686
|
+
maxlen=maxlen,
|
|
687
|
+
approximate=approximate,
|
|
688
|
+
)
|
|
689
|
+
|
|
690
|
+
@staticmethod
|
|
691
|
+
async def a_cleanup_processed_task_requests(
|
|
692
|
+
redis_client: AsyncRedis, task_id: str, retention_period: int = 86400
|
|
693
|
+
) -> None:
|
|
694
|
+
"""Async version of cleanup task processed requests.
|
|
695
|
+
|
|
696
|
+
Parameters
|
|
697
|
+
----------
|
|
698
|
+
redis_client : AsyncRedis
|
|
699
|
+
The Redis client.
|
|
700
|
+
task_id : str
|
|
701
|
+
The task ID.
|
|
702
|
+
retention_period : int, optional
|
|
703
|
+
The retention period in seconds, by default 86400.
|
|
704
|
+
"""
|
|
705
|
+
key = f"processed_requests:{task_id}"
|
|
706
|
+
cutoff_time = int(time.time()) - retention_period
|
|
707
|
+
await RedisIOStream.a_try_do(
|
|
708
|
+
redis_client.zremrangebyscore, key, 0, cutoff_time
|
|
709
|
+
)
|
|
710
|
+
|
|
711
|
+
@staticmethod
|
|
712
|
+
async def a_cleanup_processed_requests(
|
|
713
|
+
redis_client: AsyncRedis,
|
|
714
|
+
retention_period: int = 86400,
|
|
715
|
+
) -> None:
|
|
716
|
+
"""Async version of cleanup stale processed requests.
|
|
717
|
+
|
|
718
|
+
Parameters
|
|
719
|
+
----------
|
|
720
|
+
redis_client : Redis
|
|
721
|
+
The Redis client.
|
|
722
|
+
retention_period : int, optional
|
|
723
|
+
The retention period in seconds, by default 86400.
|
|
724
|
+
"""
|
|
725
|
+
cutoff_time = int(time.time()) - retention_period
|
|
726
|
+
async for key in redis_client.scan_iter(
|
|
727
|
+
"processed_requests:*", count=100
|
|
728
|
+
):
|
|
729
|
+
await RedisIOStream.a_try_do(
|
|
730
|
+
redis_client.zremrangebyscore, key, 0, cutoff_time
|
|
731
|
+
)
|
|
732
|
+
|
|
733
|
+
@staticmethod
|
|
734
|
+
async def a_trim_task_output_streams(
|
|
735
|
+
redis_client: AsyncRedis,
|
|
736
|
+
maxlen: int = 1000,
|
|
737
|
+
approximate: bool = True,
|
|
738
|
+
scan_count: int = 100,
|
|
739
|
+
) -> None:
|
|
740
|
+
"""Trim task output Redis streams to a max length.
|
|
741
|
+
|
|
742
|
+
Parameters
|
|
743
|
+
----------
|
|
744
|
+
redis_client : AsyncRedis
|
|
745
|
+
The Redis client.
|
|
746
|
+
maxlen : int
|
|
747
|
+
The maximum number of entries per stream.
|
|
748
|
+
approximate : bool
|
|
749
|
+
Whether to use approximate trimming (more efficient).
|
|
750
|
+
scan_count : int
|
|
751
|
+
The number of keys to scan per iteration.
|
|
752
|
+
"""
|
|
753
|
+
trimmed_count = 0
|
|
754
|
+
|
|
755
|
+
async for key in redis_client.scan_iter(
|
|
756
|
+
"task:*:output", count=scan_count
|
|
757
|
+
): # pragma: no branch
|
|
758
|
+
before = await redis_client.xlen(key)
|
|
759
|
+
await RedisIOStream.a_try_do(
|
|
760
|
+
redis_client.xtrim, # pyright: ignore
|
|
761
|
+
key,
|
|
762
|
+
maxlen=maxlen,
|
|
763
|
+
approximate=approximate,
|
|
764
|
+
)
|
|
765
|
+
after = await redis_client.xlen(key)
|
|
766
|
+
if before > after: # pragma: no branch
|
|
767
|
+
trimmed = before - after
|
|
768
|
+
trimmed_count += trimmed
|
|
769
|
+
LOG.debug("Trimmed %d entries from %s", trimmed, key)
|
|
770
|
+
|
|
771
|
+
LOG.info("Total trimmed entries: %d", trimmed_count)
|
|
772
|
+
# we might also want to use prometheus metrics here,
|
|
773
|
+
# to check and fine-tune the (maxlen, scan_count) parameters
|
|
774
|
+
# e.g.:
|
|
775
|
+
# from prometheus_client import Counter
|
|
776
|
+
|
|
777
|
+
# trimmed_entries = Counter("redis_stream_trimmed_entries_total", "Total trimmed entries from Redis streams")
|
|
778
|
+
# trimmed_streams = Counter("redis_stream_trimmed_streams_total", "Total number of trimmed Redis streams")
|
|
779
|
+
# ...
|
|
780
|
+
# if before > after:
|
|
781
|
+
# trimmed_entries.inc(before - after)
|
|
782
|
+
# trimmed_streams.inc()
|