jettask 0.2.19__py3-none-any.whl → 0.2.23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jettask/__init__.py +12 -3
- jettask/cli.py +314 -228
- jettask/config/__init__.py +9 -1
- jettask/config/config.py +245 -0
- jettask/config/env_loader.py +381 -0
- jettask/config/lua_scripts.py +158 -0
- jettask/config/nacos_config.py +132 -5
- jettask/core/__init__.py +1 -1
- jettask/core/app.py +1573 -666
- jettask/core/app_importer.py +33 -16
- jettask/core/container.py +532 -0
- jettask/core/task.py +1 -4
- jettask/core/unified_manager_base.py +2 -2
- jettask/executor/__init__.py +38 -0
- jettask/executor/core.py +625 -0
- jettask/executor/executor.py +338 -0
- jettask/executor/orchestrator.py +290 -0
- jettask/executor/process_entry.py +638 -0
- jettask/executor/task_executor.py +317 -0
- jettask/messaging/__init__.py +68 -0
- jettask/messaging/event_pool.py +2188 -0
- jettask/messaging/reader.py +519 -0
- jettask/messaging/registry.py +266 -0
- jettask/messaging/scanner.py +369 -0
- jettask/messaging/sender.py +312 -0
- jettask/persistence/__init__.py +118 -0
- jettask/persistence/backlog_monitor.py +567 -0
- jettask/{backend/data_access.py → persistence/base.py} +58 -57
- jettask/persistence/consumer.py +315 -0
- jettask/{core → persistence}/db_manager.py +23 -22
- jettask/persistence/maintenance.py +81 -0
- jettask/persistence/message_consumer.py +259 -0
- jettask/{backend/namespace_data_access.py → persistence/namespace.py} +66 -98
- jettask/persistence/offline_recovery.py +196 -0
- jettask/persistence/queue_discovery.py +215 -0
- jettask/persistence/task_persistence.py +218 -0
- jettask/persistence/task_updater.py +583 -0
- jettask/scheduler/__init__.py +2 -2
- jettask/scheduler/loader.py +6 -5
- jettask/scheduler/run_scheduler.py +1 -1
- jettask/scheduler/scheduler.py +7 -7
- jettask/scheduler/{unified_scheduler_manager.py → scheduler_coordinator.py} +18 -13
- jettask/task/__init__.py +16 -0
- jettask/{router.py → task/router.py} +26 -8
- jettask/task/task_center/__init__.py +9 -0
- jettask/task/task_executor.py +318 -0
- jettask/task/task_registry.py +291 -0
- jettask/test_connection_monitor.py +73 -0
- jettask/utils/__init__.py +31 -1
- jettask/{monitor/run_backlog_collector.py → utils/backlog_collector.py} +1 -1
- jettask/utils/db_connector.py +1629 -0
- jettask/{db_init.py → utils/db_init.py} +1 -1
- jettask/utils/rate_limit/__init__.py +30 -0
- jettask/utils/rate_limit/concurrency_limiter.py +665 -0
- jettask/utils/rate_limit/config.py +145 -0
- jettask/utils/rate_limit/limiter.py +41 -0
- jettask/utils/rate_limit/manager.py +269 -0
- jettask/utils/rate_limit/qps_limiter.py +154 -0
- jettask/utils/rate_limit/task_limiter.py +384 -0
- jettask/utils/serializer.py +3 -0
- jettask/{monitor/stream_backlog_monitor.py → utils/stream_backlog.py} +14 -6
- jettask/utils/time_sync.py +173 -0
- jettask/webui/__init__.py +27 -0
- jettask/{api/v1 → webui/api}/alerts.py +1 -1
- jettask/{api/v1 → webui/api}/analytics.py +2 -2
- jettask/{api/v1 → webui/api}/namespaces.py +1 -1
- jettask/{api/v1 → webui/api}/overview.py +1 -1
- jettask/{api/v1 → webui/api}/queues.py +3 -3
- jettask/{api/v1 → webui/api}/scheduled.py +1 -1
- jettask/{api/v1 → webui/api}/settings.py +1 -1
- jettask/{api.py → webui/app.py} +253 -145
- jettask/webui/namespace_manager/__init__.py +10 -0
- jettask/{multi_namespace_consumer.py → webui/namespace_manager/multi.py} +69 -22
- jettask/{unified_consumer_manager.py → webui/namespace_manager/unified.py} +1 -1
- jettask/{run.py → webui/run.py} +2 -2
- jettask/{services → webui/services}/__init__.py +1 -3
- jettask/{services → webui/services}/overview_service.py +34 -16
- jettask/{services → webui/services}/queue_service.py +1 -1
- jettask/{backend → webui/services}/queue_stats_v2.py +1 -1
- jettask/{services → webui/services}/settings_service.py +1 -1
- jettask/worker/__init__.py +53 -0
- jettask/worker/lifecycle.py +1507 -0
- jettask/worker/manager.py +583 -0
- jettask/{core/offline_worker_recovery.py → worker/recovery.py} +268 -175
- {jettask-0.2.19.dist-info → jettask-0.2.23.dist-info}/METADATA +2 -71
- jettask-0.2.23.dist-info/RECORD +145 -0
- jettask/__main__.py +0 -140
- jettask/api/__init__.py +0 -103
- jettask/backend/__init__.py +0 -1
- jettask/backend/api/__init__.py +0 -3
- jettask/backend/api/v1/__init__.py +0 -17
- jettask/backend/api/v1/monitoring.py +0 -431
- jettask/backend/api/v1/namespaces.py +0 -504
- jettask/backend/api/v1/queues.py +0 -342
- jettask/backend/api/v1/tasks.py +0 -367
- jettask/backend/core/__init__.py +0 -3
- jettask/backend/core/cache.py +0 -221
- jettask/backend/core/database.py +0 -200
- jettask/backend/core/exceptions.py +0 -102
- jettask/backend/dependencies.py +0 -261
- jettask/backend/init_meta_db.py +0 -158
- jettask/backend/main.py +0 -1426
- jettask/backend/main_unified.py +0 -78
- jettask/backend/main_v2.py +0 -394
- jettask/backend/models/__init__.py +0 -3
- jettask/backend/models/requests.py +0 -236
- jettask/backend/models/responses.py +0 -230
- jettask/backend/namespace_api_old.py +0 -267
- jettask/backend/services/__init__.py +0 -3
- jettask/backend/start.py +0 -42
- jettask/backend/unified_api_router.py +0 -1541
- jettask/cleanup_deprecated_tables.sql +0 -16
- jettask/core/consumer_manager.py +0 -1695
- jettask/core/delay_scanner.py +0 -256
- jettask/core/event_pool.py +0 -1700
- jettask/core/heartbeat_process.py +0 -222
- jettask/core/task_batch.py +0 -153
- jettask/core/worker_scanner.py +0 -271
- jettask/executors/__init__.py +0 -5
- jettask/executors/asyncio.py +0 -876
- jettask/executors/base.py +0 -30
- jettask/executors/common.py +0 -148
- jettask/executors/multi_asyncio.py +0 -309
- jettask/gradio_app.py +0 -570
- jettask/integrated_gradio_app.py +0 -1088
- jettask/main.py +0 -0
- jettask/monitoring/__init__.py +0 -3
- jettask/pg_consumer.py +0 -1896
- jettask/run_monitor.py +0 -22
- jettask/run_webui.py +0 -148
- jettask/scheduler/multi_namespace_scheduler.py +0 -294
- jettask/scheduler/unified_manager.py +0 -450
- jettask/task_center_client.py +0 -150
- jettask/utils/serializer_optimized.py +0 -33
- jettask/webui_exceptions.py +0 -67
- jettask-0.2.19.dist-info/RECORD +0 -150
- /jettask/{constants.py → config/constants.py} +0 -0
- /jettask/{backend/config.py → config/task_center.py} +0 -0
- /jettask/{pg_consumer → messaging/pg_consumer}/pg_consumer_v2.py +0 -0
- /jettask/{pg_consumer → messaging/pg_consumer}/sql/add_execution_time_field.sql +0 -0
- /jettask/{pg_consumer → messaging/pg_consumer}/sql/create_new_tables.sql +0 -0
- /jettask/{pg_consumer → messaging/pg_consumer}/sql/create_tables_v3.sql +0 -0
- /jettask/{pg_consumer → messaging/pg_consumer}/sql/migrate_to_new_structure.sql +0 -0
- /jettask/{pg_consumer → messaging/pg_consumer}/sql/modify_time_fields.sql +0 -0
- /jettask/{pg_consumer → messaging/pg_consumer}/sql_utils.py +0 -0
- /jettask/{models.py → persistence/models.py} +0 -0
- /jettask/scheduler/{manager.py → task_crud.py} +0 -0
- /jettask/{schema.sql → schemas/schema.sql} +0 -0
- /jettask/{task_center.py → task/task_center/client.py} +0 -0
- /jettask/{monitoring → utils}/file_watcher.py +0 -0
- /jettask/{services/redis_monitor_service.py → utils/redis_monitor.py} +0 -0
- /jettask/{api/v1 → webui/api}/__init__.py +0 -0
- /jettask/{webui_config.py → webui/config.py} +0 -0
- /jettask/{webui_models → webui/models}/__init__.py +0 -0
- /jettask/{webui_models → webui/models}/namespace.py +0 -0
- /jettask/{services → webui/services}/alert_service.py +0 -0
- /jettask/{services → webui/services}/analytics_service.py +0 -0
- /jettask/{services → webui/services}/scheduled_task_service.py +0 -0
- /jettask/{services → webui/services}/task_service.py +0 -0
- /jettask/{webui_sql → webui/sql}/batch_upsert_functions.sql +0 -0
- /jettask/{webui_sql → webui/sql}/verify_database.sql +0 -0
- {jettask-0.2.19.dist-info → jettask-0.2.23.dist-info}/WHEEL +0 -0
- {jettask-0.2.19.dist-info → jettask-0.2.23.dist-info}/entry_points.txt +0 -0
- {jettask-0.2.19.dist-info → jettask-0.2.23.dist-info}/licenses/LICENSE +0 -0
- {jettask-0.2.19.dist-info → jettask-0.2.23.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,519 @@
|
|
1
|
+
"""
|
2
|
+
消息读取器 - 统一的消息读取和确认接口
|
3
|
+
从 EventPool 中提取的消息读取逻辑
|
4
|
+
"""
|
5
|
+
|
6
|
+
import logging
|
7
|
+
from typing import List, Dict, Optional, Tuple
|
8
|
+
from redis.asyncio import Redis as AsyncRedis
|
9
|
+
|
10
|
+
from ..utils.serializer import loads_str
|
11
|
+
|
12
|
+
logger = logging.getLogger('app')
|
13
|
+
|
14
|
+
|
15
|
+
class MessageReader:
|
16
|
+
"""
|
17
|
+
统一的消息读取接口
|
18
|
+
|
19
|
+
职责:
|
20
|
+
1. 从 Redis Stream 读取消息(支持consumer group)
|
21
|
+
2. 确认消息(ACK)
|
22
|
+
3. 支持优先级队列
|
23
|
+
4. 支持历史消息和新消息的读取
|
24
|
+
5. 追踪读取进度(offset)
|
25
|
+
"""
|
26
|
+
|
27
|
+
def __init__(
|
28
|
+
self,
|
29
|
+
async_redis_client: AsyncRedis,
|
30
|
+
async_binary_redis_client: AsyncRedis,
|
31
|
+
redis_prefix: str = 'jettask'
|
32
|
+
):
|
33
|
+
"""
|
34
|
+
初始化消息读取器
|
35
|
+
|
36
|
+
Args:
|
37
|
+
async_redis_client: 异步Redis客户端(文本模式)
|
38
|
+
async_binary_redis_client: 异步Redis客户端(二进制模式,用于Stream)
|
39
|
+
redis_prefix: Redis键前缀
|
40
|
+
"""
|
41
|
+
self.redis = async_redis_client
|
42
|
+
self.binary_redis = async_binary_redis_client
|
43
|
+
self.redis_prefix = redis_prefix
|
44
|
+
|
45
|
+
logger.debug(f"MessageReader initialized with prefix: {redis_prefix}")
|
46
|
+
|
47
|
+
def _get_prefixed_queue_name(self, queue: str) -> str:
|
48
|
+
"""获取带前缀的队列名"""
|
49
|
+
return f"{self.redis_prefix}:QUEUE:{queue}"
|
50
|
+
|
51
|
+
async def create_consumer_group(
|
52
|
+
self,
|
53
|
+
queue: str,
|
54
|
+
group_name: str,
|
55
|
+
start_id: str = "0"
|
56
|
+
) -> bool:
|
57
|
+
"""
|
58
|
+
创建消费者组
|
59
|
+
|
60
|
+
Args:
|
61
|
+
queue: 队列名(不带前缀)
|
62
|
+
group_name: 消费者组名
|
63
|
+
start_id: 起始消息ID("0"表示从最早的消息开始,"$"表示只读新消息)
|
64
|
+
|
65
|
+
Returns:
|
66
|
+
bool: 是否成功创建(如果已存在返回False)
|
67
|
+
"""
|
68
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
69
|
+
|
70
|
+
try:
|
71
|
+
await self.redis.xgroup_create(
|
72
|
+
name=prefixed_queue,
|
73
|
+
groupname=group_name,
|
74
|
+
id=start_id,
|
75
|
+
mkstream=True # 如果stream不存在则创建
|
76
|
+
)
|
77
|
+
logger.info(f"Created consumer group {group_name} for queue {queue}")
|
78
|
+
return True
|
79
|
+
except Exception as e:
|
80
|
+
if "BUSYGROUP" in str(e):
|
81
|
+
logger.debug(f"Consumer group {group_name} already exists for queue {queue}")
|
82
|
+
return False
|
83
|
+
else:
|
84
|
+
logger.error(f"Error creating consumer group {group_name} for queue {queue}: {e}")
|
85
|
+
raise
|
86
|
+
|
87
|
+
async def read_messages(
|
88
|
+
self,
|
89
|
+
queue: str,
|
90
|
+
group_name: str,
|
91
|
+
consumer_name: str,
|
92
|
+
count: int = 1,
|
93
|
+
block: int = 1000,
|
94
|
+
start_id: str = ">"
|
95
|
+
) -> List[Tuple[str, Dict]]:
|
96
|
+
"""
|
97
|
+
从队列读取消息(使用consumer group)
|
98
|
+
|
99
|
+
Args:
|
100
|
+
queue: 队列名(不带前缀)
|
101
|
+
group_name: 消费者组名
|
102
|
+
consumer_name: 消费者名称
|
103
|
+
count: 读取消息数量
|
104
|
+
block: 阻塞时间(毫秒),0表示不阻塞
|
105
|
+
start_id: 起始消息ID
|
106
|
+
- ">" 表示只读取新消息(未被该组消费过的)
|
107
|
+
- "0-0" 表示读取该消费者的待处理消息(PEL)
|
108
|
+
|
109
|
+
Returns:
|
110
|
+
List[Tuple[str, Dict]]: [(message_id, message_data), ...]
|
111
|
+
|
112
|
+
示例:
|
113
|
+
# 读取新消息
|
114
|
+
messages = await reader.read_messages(
|
115
|
+
"orders", "order_processor", "worker1", count=10
|
116
|
+
)
|
117
|
+
|
118
|
+
# 读取待处理消息(PEL)
|
119
|
+
pending = await reader.read_messages(
|
120
|
+
"orders", "order_processor", "worker1", start_id="0-0"
|
121
|
+
)
|
122
|
+
"""
|
123
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
124
|
+
|
125
|
+
try:
|
126
|
+
# xreadgroup 返回格式: [(stream_name, [(id, {field: value})])]
|
127
|
+
results = await self.binary_redis.xreadgroup(
|
128
|
+
groupname=group_name,
|
129
|
+
consumername=consumer_name,
|
130
|
+
streams={prefixed_queue: start_id},
|
131
|
+
count=count,
|
132
|
+
block=block
|
133
|
+
)
|
134
|
+
|
135
|
+
if not results:
|
136
|
+
return []
|
137
|
+
|
138
|
+
# 解析结果
|
139
|
+
messages = []
|
140
|
+
for stream_name, stream_messages in results:
|
141
|
+
for message_id, message_fields in stream_messages:
|
142
|
+
# 解码 message_id
|
143
|
+
if isinstance(message_id, bytes):
|
144
|
+
message_id = message_id.decode('utf-8')
|
145
|
+
|
146
|
+
# 解码 message_fields
|
147
|
+
# 注意:data字段是二进制的msgpack数据,不能用utf-8 decode
|
148
|
+
decoded_fields = {}
|
149
|
+
for key, value in message_fields.items():
|
150
|
+
key_str = key.decode('utf-8') if isinstance(key, bytes) else key
|
151
|
+
# 对于data字段,保持二进制格式
|
152
|
+
if key_str == 'data':
|
153
|
+
decoded_fields[key_str] = value # 保持bytes
|
154
|
+
else:
|
155
|
+
# 其他字段(如offset)可以解码为字符串
|
156
|
+
value_str = value.decode('utf-8') if isinstance(value, bytes) else value
|
157
|
+
decoded_fields[key_str] = value_str
|
158
|
+
|
159
|
+
# 解析data字段(msgpack二进制数据)
|
160
|
+
if 'data' in decoded_fields:
|
161
|
+
try:
|
162
|
+
data = loads_str(decoded_fields['data'])
|
163
|
+
except Exception as e:
|
164
|
+
logger.warning(f"Failed to parse message data: {e}")
|
165
|
+
data = decoded_fields['data']
|
166
|
+
else:
|
167
|
+
data = decoded_fields
|
168
|
+
|
169
|
+
# 添加offset和其他元数据
|
170
|
+
if 'offset' in decoded_fields:
|
171
|
+
# 确保data是字典类型才添加offset
|
172
|
+
if isinstance(data, dict):
|
173
|
+
data['_offset'] = int(decoded_fields['offset'])
|
174
|
+
|
175
|
+
messages.append((message_id, data))
|
176
|
+
|
177
|
+
logger.debug(f"Read {len(messages)} messages from queue {queue}")
|
178
|
+
return messages
|
179
|
+
|
180
|
+
except Exception as e:
|
181
|
+
if "NOGROUP" in str(e):
|
182
|
+
logger.warning(f"Consumer group {group_name} does not exist for queue {queue}")
|
183
|
+
# 自动创建consumer group
|
184
|
+
await self.create_consumer_group(queue, group_name, start_id="0")
|
185
|
+
# 重试读取
|
186
|
+
return await self.read_messages(
|
187
|
+
queue, group_name, consumer_name, count, block, start_id
|
188
|
+
)
|
189
|
+
else:
|
190
|
+
logger.error(f"Error reading messages from queue {queue}: {e}")
|
191
|
+
raise
|
192
|
+
|
193
|
+
async def read_from_multiple_queues(
|
194
|
+
self,
|
195
|
+
queues: List[str],
|
196
|
+
group_name: str,
|
197
|
+
consumer_name: str,
|
198
|
+
count: int = 1,
|
199
|
+
block: int = 1000,
|
200
|
+
priority_order: bool = True
|
201
|
+
) -> List[Tuple[str, str, Dict]]:
|
202
|
+
"""
|
203
|
+
从多个队列读取消息(支持优先级)
|
204
|
+
|
205
|
+
Args:
|
206
|
+
queues: 队列名列表(按优先级排序,第一个优先级最高)
|
207
|
+
group_name: 消费者组名
|
208
|
+
consumer_name: 消费者名称
|
209
|
+
count: 每个队列读取的消息数量
|
210
|
+
block: 阻塞时间(毫秒)
|
211
|
+
priority_order: 是否按优先级顺序读取(True: 高优先级队列有消息就不读低优先级)
|
212
|
+
|
213
|
+
Returns:
|
214
|
+
List[Tuple[str, str, Dict]]: [(queue, message_id, message_data), ...]
|
215
|
+
"""
|
216
|
+
all_messages = []
|
217
|
+
messages_needed = count
|
218
|
+
|
219
|
+
for queue in queues:
|
220
|
+
if messages_needed <= 0:
|
221
|
+
break
|
222
|
+
|
223
|
+
messages = await self.read_messages(
|
224
|
+
queue, group_name, consumer_name,
|
225
|
+
count=messages_needed, block=block
|
226
|
+
)
|
227
|
+
|
228
|
+
# 添加队列信息
|
229
|
+
for message_id, message_data in messages:
|
230
|
+
all_messages.append((queue, message_id, message_data))
|
231
|
+
|
232
|
+
messages_needed -= len(messages)
|
233
|
+
|
234
|
+
# 如果是优先级模式且高优先级队列有消息,就不再读取低优先级
|
235
|
+
if priority_order and messages:
|
236
|
+
break
|
237
|
+
|
238
|
+
return all_messages
|
239
|
+
|
240
|
+
async def acknowledge_message(
|
241
|
+
self,
|
242
|
+
queue: str,
|
243
|
+
group_name: str,
|
244
|
+
message_id: str
|
245
|
+
) -> bool:
|
246
|
+
"""
|
247
|
+
确认消息(ACK)
|
248
|
+
|
249
|
+
Args:
|
250
|
+
queue: 队列名(不带前缀)
|
251
|
+
group_name: 消费者组名
|
252
|
+
message_id: 消息ID
|
253
|
+
|
254
|
+
Returns:
|
255
|
+
bool: 是否成功确认
|
256
|
+
"""
|
257
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
258
|
+
|
259
|
+
try:
|
260
|
+
result = await self.binary_redis.xack(
|
261
|
+
prefixed_queue,
|
262
|
+
group_name,
|
263
|
+
message_id
|
264
|
+
)
|
265
|
+
logger.debug(f"ACK message {message_id} in queue {queue}")
|
266
|
+
return result > 0
|
267
|
+
except Exception as e:
|
268
|
+
logger.error(f"Error acknowledging message {message_id} in queue {queue}: {e}")
|
269
|
+
return False
|
270
|
+
|
271
|
+
async def acknowledge_messages(
|
272
|
+
self,
|
273
|
+
queue: str,
|
274
|
+
group_name: str,
|
275
|
+
message_ids: List[str]
|
276
|
+
) -> int:
|
277
|
+
"""
|
278
|
+
批量确认消息
|
279
|
+
|
280
|
+
Args:
|
281
|
+
queue: 队列名(不带前缀)
|
282
|
+
group_name: 消费者组名
|
283
|
+
message_ids: 消息ID列表
|
284
|
+
|
285
|
+
Returns:
|
286
|
+
int: 成功确认的消息数量
|
287
|
+
"""
|
288
|
+
if not message_ids:
|
289
|
+
return 0
|
290
|
+
|
291
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
292
|
+
|
293
|
+
try:
|
294
|
+
result = await self.binary_redis.xack(
|
295
|
+
prefixed_queue,
|
296
|
+
group_name,
|
297
|
+
*message_ids
|
298
|
+
)
|
299
|
+
logger.debug(f"ACK {result} messages in queue {queue}")
|
300
|
+
return result
|
301
|
+
except Exception as e:
|
302
|
+
logger.error(f"Error acknowledging {len(message_ids)} messages in queue {queue}: {e}")
|
303
|
+
return 0
|
304
|
+
|
305
|
+
async def get_pending_messages(
|
306
|
+
self,
|
307
|
+
queue: str,
|
308
|
+
group_name: str,
|
309
|
+
consumer_name: Optional[str] = None,
|
310
|
+
count: int = 10
|
311
|
+
) -> List[Dict]:
|
312
|
+
"""
|
313
|
+
获取待处理消息(PEL - Pending Entries List)
|
314
|
+
|
315
|
+
Args:
|
316
|
+
queue: 队列名(不带前缀)
|
317
|
+
group_name: 消费者组名
|
318
|
+
consumer_name: 消费者名称(可选,不指定则获取整个组的PEL)
|
319
|
+
count: 返回的消息数量
|
320
|
+
|
321
|
+
Returns:
|
322
|
+
List[Dict]: 待处理消息信息列表
|
323
|
+
"""
|
324
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
325
|
+
|
326
|
+
try:
|
327
|
+
# XPENDING 命令
|
328
|
+
if consumer_name:
|
329
|
+
# 获取特定消费者的待处理消息
|
330
|
+
result = await self.binary_redis.xpending_range(
|
331
|
+
prefixed_queue,
|
332
|
+
group_name,
|
333
|
+
min="-",
|
334
|
+
max="+",
|
335
|
+
count=count,
|
336
|
+
consumername=consumer_name
|
337
|
+
)
|
338
|
+
else:
|
339
|
+
# 获取整个组的待处理消息
|
340
|
+
result = await self.binary_redis.xpending_range(
|
341
|
+
prefixed_queue,
|
342
|
+
group_name,
|
343
|
+
min="-",
|
344
|
+
max="+",
|
345
|
+
count=count
|
346
|
+
)
|
347
|
+
|
348
|
+
# 解析结果
|
349
|
+
pending = []
|
350
|
+
for item in result:
|
351
|
+
pending.append({
|
352
|
+
'message_id': item['message_id'].decode('utf-8') if isinstance(item['message_id'], bytes) else item['message_id'],
|
353
|
+
'consumer': item['consumer'].decode('utf-8') if isinstance(item['consumer'], bytes) else item['consumer'],
|
354
|
+
'time_since_delivered': item['time_since_delivered'],
|
355
|
+
'times_delivered': item['times_delivered']
|
356
|
+
})
|
357
|
+
|
358
|
+
return pending
|
359
|
+
|
360
|
+
except Exception as e:
|
361
|
+
logger.error(f"Error getting pending messages from queue {queue}: {e}")
|
362
|
+
return []
|
363
|
+
|
364
|
+
async def claim_messages(
|
365
|
+
self,
|
366
|
+
queue: str,
|
367
|
+
group_name: str,
|
368
|
+
consumer_name: str,
|
369
|
+
message_ids: List[str],
|
370
|
+
min_idle_time: int = 60000
|
371
|
+
) -> List[Tuple[str, Dict]]:
|
372
|
+
"""
|
373
|
+
认领消息(从其他消费者转移到当前消费者)
|
374
|
+
用于处理超时的待处理消息
|
375
|
+
|
376
|
+
Args:
|
377
|
+
queue: 队列名(不带前缀)
|
378
|
+
group_name: 消费者组名
|
379
|
+
consumer_name: 当前消费者名称
|
380
|
+
message_ids: 要认领的消息ID列表
|
381
|
+
min_idle_time: 最小空闲时间(毫秒),只认领超过此时间的消息
|
382
|
+
|
383
|
+
Returns:
|
384
|
+
List[Tuple[str, Dict]]: 成功认领的消息列表
|
385
|
+
"""
|
386
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
387
|
+
|
388
|
+
try:
|
389
|
+
# XCLAIM 命令
|
390
|
+
results = await self.binary_redis.xclaim(
|
391
|
+
prefixed_queue,
|
392
|
+
group_name,
|
393
|
+
consumer_name,
|
394
|
+
min_idle_time,
|
395
|
+
message_ids
|
396
|
+
)
|
397
|
+
|
398
|
+
# 解析结果
|
399
|
+
messages = []
|
400
|
+
for message_id, message_fields in results:
|
401
|
+
# 解码
|
402
|
+
if isinstance(message_id, bytes):
|
403
|
+
message_id = message_id.decode('utf-8')
|
404
|
+
|
405
|
+
decoded_fields = {}
|
406
|
+
for key, value in message_fields.items():
|
407
|
+
key_str = key.decode('utf-8') if isinstance(key, bytes) else key
|
408
|
+
# 对于data字段,保持二进制格式
|
409
|
+
if key_str == 'data':
|
410
|
+
decoded_fields[key_str] = value
|
411
|
+
else:
|
412
|
+
value_str = value.decode('utf-8') if isinstance(value, bytes) else value
|
413
|
+
decoded_fields[key_str] = value_str
|
414
|
+
|
415
|
+
# 解析data字段(msgpack二进制数据)
|
416
|
+
if 'data' in decoded_fields:
|
417
|
+
try:
|
418
|
+
data = loads_str(decoded_fields['data'])
|
419
|
+
except Exception:
|
420
|
+
data = decoded_fields['data']
|
421
|
+
else:
|
422
|
+
data = decoded_fields
|
423
|
+
|
424
|
+
# 添加offset(如果有)
|
425
|
+
if 'offset' in decoded_fields and isinstance(data, dict):
|
426
|
+
data['_offset'] = int(decoded_fields['offset'])
|
427
|
+
|
428
|
+
messages.append((message_id, data))
|
429
|
+
|
430
|
+
logger.info(f"Claimed {len(messages)} messages from queue {queue}")
|
431
|
+
return messages
|
432
|
+
|
433
|
+
except Exception as e:
|
434
|
+
logger.error(f"Error claiming messages from queue {queue}: {e}")
|
435
|
+
return []
|
436
|
+
|
437
|
+
async def update_read_offset(
|
438
|
+
self,
|
439
|
+
queue: str,
|
440
|
+
group_name: str,
|
441
|
+
offset: int
|
442
|
+
):
|
443
|
+
"""
|
444
|
+
更新读取进度(offset)
|
445
|
+
|
446
|
+
Args:
|
447
|
+
queue: 队列名(不带前缀,可能包含优先级后缀,如 "robust_bench2:8")
|
448
|
+
group_name: 消费者组名(格式:{prefix}:QUEUE:{base_queue}:{task_name})
|
449
|
+
offset: 新的offset值
|
450
|
+
"""
|
451
|
+
read_offsets_key = f"{self.redis_prefix}:READ_OFFSETS"
|
452
|
+
|
453
|
+
# 从 group_name 中提取 task_name(最后一段)
|
454
|
+
task_name = group_name.split(':')[-1]
|
455
|
+
|
456
|
+
# 构建 field:队列名(含优先级)+ 任务名
|
457
|
+
# 例如:robust_bench2:8:benchmark_task
|
458
|
+
field = f"{queue}:{task_name}"
|
459
|
+
|
460
|
+
try:
|
461
|
+
# 使用Lua脚本原子性地更新最大offset
|
462
|
+
lua_script = """
|
463
|
+
local hash_key = KEYS[1]
|
464
|
+
local field = KEYS[2]
|
465
|
+
local new_value = tonumber(ARGV[1])
|
466
|
+
|
467
|
+
local current = redis.call('HGET', hash_key, field)
|
468
|
+
if current == false or tonumber(current) < new_value then
|
469
|
+
redis.call('HSET', hash_key, field, new_value)
|
470
|
+
return 1
|
471
|
+
else
|
472
|
+
return 0
|
473
|
+
end
|
474
|
+
"""
|
475
|
+
|
476
|
+
await self.redis.eval(
|
477
|
+
lua_script,
|
478
|
+
2,
|
479
|
+
read_offsets_key,
|
480
|
+
field,
|
481
|
+
str(offset)
|
482
|
+
)
|
483
|
+
|
484
|
+
logger.debug(f"Updated read offset for {field} to {offset}")
|
485
|
+
|
486
|
+
except Exception as e:
|
487
|
+
logger.error(f"Error updating read offset: {e}")
|
488
|
+
|
489
|
+
async def get_queue_length(self, queue: str) -> int:
|
490
|
+
"""
|
491
|
+
获取队列长度
|
492
|
+
|
493
|
+
Args:
|
494
|
+
queue: 队列名(不带前缀)
|
495
|
+
|
496
|
+
Returns:
|
497
|
+
int: 队列中的消息数量
|
498
|
+
"""
|
499
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
500
|
+
return await self.binary_redis.xlen(prefixed_queue)
|
501
|
+
|
502
|
+
async def get_consumer_group_info(self, queue: str) -> List[Dict]:
|
503
|
+
"""
|
504
|
+
获取消费者组信息
|
505
|
+
|
506
|
+
Args:
|
507
|
+
queue: 队列名(不带前缀)
|
508
|
+
|
509
|
+
Returns:
|
510
|
+
List[Dict]: 消费者组信息列表
|
511
|
+
"""
|
512
|
+
prefixed_queue = self._get_prefixed_queue_name(queue)
|
513
|
+
|
514
|
+
try:
|
515
|
+
groups = await self.binary_redis.xinfo_groups(prefixed_queue)
|
516
|
+
return groups
|
517
|
+
except Exception as e:
|
518
|
+
logger.error(f"Error getting consumer group info for queue {queue}: {e}")
|
519
|
+
return []
|