jettask 0.2.23__py3-none-any.whl → 0.2.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jettask/__init__.py +2 -0
- jettask/cli.py +12 -8
- jettask/config/lua_scripts.py +37 -0
- jettask/config/nacos_config.py +1 -1
- jettask/core/app.py +313 -340
- jettask/core/container.py +4 -4
- jettask/{persistence → core}/namespace.py +93 -27
- jettask/core/task.py +16 -9
- jettask/core/unified_manager_base.py +136 -26
- jettask/db/__init__.py +67 -0
- jettask/db/base.py +137 -0
- jettask/{utils/db_connector.py → db/connector.py} +130 -26
- jettask/db/models/__init__.py +16 -0
- jettask/db/models/scheduled_task.py +196 -0
- jettask/db/models/task.py +77 -0
- jettask/db/models/task_run.py +85 -0
- jettask/executor/__init__.py +0 -15
- jettask/executor/core.py +76 -31
- jettask/executor/process_entry.py +29 -114
- jettask/executor/task_executor.py +4 -0
- jettask/messaging/event_pool.py +928 -685
- jettask/messaging/scanner.py +30 -0
- jettask/persistence/__init__.py +28 -103
- jettask/persistence/buffer.py +170 -0
- jettask/persistence/consumer.py +330 -249
- jettask/persistence/manager.py +304 -0
- jettask/persistence/persistence.py +391 -0
- jettask/scheduler/__init__.py +15 -3
- jettask/scheduler/{task_crud.py → database.py} +61 -57
- jettask/scheduler/loader.py +2 -2
- jettask/scheduler/{scheduler_coordinator.py → manager.py} +23 -6
- jettask/scheduler/models.py +14 -10
- jettask/scheduler/schedule.py +166 -0
- jettask/scheduler/scheduler.py +12 -11
- jettask/schemas/__init__.py +50 -1
- jettask/schemas/backlog.py +43 -6
- jettask/schemas/namespace.py +70 -19
- jettask/schemas/queue.py +19 -3
- jettask/schemas/responses.py +493 -0
- jettask/task/__init__.py +0 -2
- jettask/task/router.py +3 -0
- jettask/test_connection_monitor.py +1 -1
- jettask/utils/__init__.py +7 -5
- jettask/utils/db_init.py +8 -4
- jettask/utils/namespace_dep.py +167 -0
- jettask/utils/queue_matcher.py +186 -0
- jettask/utils/rate_limit/concurrency_limiter.py +7 -1
- jettask/utils/stream_backlog.py +1 -1
- jettask/webui/__init__.py +0 -1
- jettask/webui/api/__init__.py +4 -4
- jettask/webui/api/alerts.py +806 -71
- jettask/webui/api/example_refactored.py +400 -0
- jettask/webui/api/namespaces.py +390 -45
- jettask/webui/api/overview.py +300 -54
- jettask/webui/api/queues.py +971 -267
- jettask/webui/api/scheduled.py +1249 -56
- jettask/webui/api/settings.py +129 -7
- jettask/webui/api/workers.py +442 -0
- jettask/webui/app.py +46 -2329
- jettask/webui/middleware/__init__.py +6 -0
- jettask/webui/middleware/namespace_middleware.py +135 -0
- jettask/webui/services/__init__.py +146 -0
- jettask/webui/services/heartbeat_service.py +251 -0
- jettask/webui/services/overview_service.py +60 -51
- jettask/webui/services/queue_monitor_service.py +426 -0
- jettask/webui/services/redis_monitor_service.py +87 -0
- jettask/webui/services/settings_service.py +174 -111
- jettask/webui/services/task_monitor_service.py +222 -0
- jettask/webui/services/timeline_pg_service.py +452 -0
- jettask/webui/services/timeline_service.py +189 -0
- jettask/webui/services/worker_monitor_service.py +467 -0
- jettask/webui/utils/__init__.py +11 -0
- jettask/webui/utils/time_utils.py +122 -0
- jettask/worker/lifecycle.py +8 -2
- {jettask-0.2.23.dist-info → jettask-0.2.24.dist-info}/METADATA +1 -1
- jettask-0.2.24.dist-info/RECORD +142 -0
- jettask/executor/executor.py +0 -338
- jettask/persistence/backlog_monitor.py +0 -567
- jettask/persistence/base.py +0 -2334
- jettask/persistence/db_manager.py +0 -516
- jettask/persistence/maintenance.py +0 -81
- jettask/persistence/message_consumer.py +0 -259
- jettask/persistence/models.py +0 -49
- jettask/persistence/offline_recovery.py +0 -196
- jettask/persistence/queue_discovery.py +0 -215
- jettask/persistence/task_persistence.py +0 -218
- jettask/persistence/task_updater.py +0 -583
- jettask/scheduler/add_execution_count.sql +0 -11
- jettask/scheduler/add_priority_field.sql +0 -26
- jettask/scheduler/add_scheduler_id.sql +0 -25
- jettask/scheduler/add_scheduler_id_index.sql +0 -10
- jettask/scheduler/make_scheduler_id_required.sql +0 -28
- jettask/scheduler/migrate_interval_seconds.sql +0 -9
- jettask/scheduler/performance_optimization.sql +0 -45
- jettask/scheduler/run_scheduler.py +0 -186
- jettask/scheduler/schema.sql +0 -84
- jettask/task/task_executor.py +0 -318
- jettask/webui/api/analytics.py +0 -323
- jettask/webui/config.py +0 -90
- jettask/webui/models/__init__.py +0 -3
- jettask/webui/models/namespace.py +0 -63
- jettask/webui/namespace_manager/__init__.py +0 -10
- jettask/webui/namespace_manager/multi.py +0 -593
- jettask/webui/namespace_manager/unified.py +0 -193
- jettask/webui/run.py +0 -46
- jettask-0.2.23.dist-info/RECORD +0 -145
- {jettask-0.2.23.dist-info → jettask-0.2.24.dist-info}/WHEEL +0 -0
- {jettask-0.2.23.dist-info → jettask-0.2.24.dist-info}/entry_points.txt +0 -0
- {jettask-0.2.23.dist-info → jettask-0.2.24.dist-info}/licenses/LICENSE +0 -0
- {jettask-0.2.23.dist-info → jettask-0.2.24.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,222 @@
|
|
1
|
+
"""
|
2
|
+
任务监控服务
|
3
|
+
|
4
|
+
提供任务相关的监控功能
|
5
|
+
"""
|
6
|
+
import logging
|
7
|
+
from typing import Optional, Dict, Any, List
|
8
|
+
from datetime import datetime
|
9
|
+
import redis.asyncio as aioredis
|
10
|
+
|
11
|
+
from .redis_monitor_service import RedisMonitorService
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class TaskMonitorService:
|
17
|
+
"""任务监控服务类"""
|
18
|
+
|
19
|
+
def __init__(self, redis_service: RedisMonitorService):
|
20
|
+
"""
|
21
|
+
初始化任务监控服务
|
22
|
+
|
23
|
+
Args:
|
24
|
+
redis_service: Redis 监控基础服务实例
|
25
|
+
"""
|
26
|
+
self.redis_service = redis_service
|
27
|
+
|
28
|
+
@property
|
29
|
+
def redis(self) -> aioredis.Redis:
|
30
|
+
"""获取 Redis 客户端"""
|
31
|
+
return self.redis_service.redis
|
32
|
+
|
33
|
+
@property
|
34
|
+
def redis_prefix(self) -> str:
|
35
|
+
"""获取 Redis 前缀"""
|
36
|
+
return self.redis_service.redis_prefix
|
37
|
+
|
38
|
+
async def get_task_info(self, stream_id: str, queue_name: str) -> Optional[Dict[str, Any]]:
|
39
|
+
"""
|
40
|
+
获取单个任务的详细信息
|
41
|
+
|
42
|
+
Args:
|
43
|
+
stream_id: Stream ID
|
44
|
+
queue_name: 队列名称
|
45
|
+
|
46
|
+
Returns:
|
47
|
+
任务信息字典,如果任务不存在则返回 None
|
48
|
+
"""
|
49
|
+
try:
|
50
|
+
prefixed_queue = self.redis_service.get_prefixed_queue_name(queue_name)
|
51
|
+
|
52
|
+
# 从 Stream 获取消息
|
53
|
+
messages = await self.redis.xrange(prefixed_queue, min=stream_id, max=stream_id, count=1)
|
54
|
+
|
55
|
+
if not messages:
|
56
|
+
logger.warning(f"Task not found in stream: {stream_id} in queue {queue_name}")
|
57
|
+
return None
|
58
|
+
|
59
|
+
msg_id, msg_data = messages[0]
|
60
|
+
|
61
|
+
# 检查消息是否在 pending 队列中
|
62
|
+
pending_entries = await self.redis.xpending_range(
|
63
|
+
prefixed_queue,
|
64
|
+
f"{self.redis_prefix}:GROUP:{queue_name}",
|
65
|
+
min=msg_id,
|
66
|
+
max=msg_id,
|
67
|
+
count=1
|
68
|
+
)
|
69
|
+
|
70
|
+
is_pending = len(pending_entries) > 0
|
71
|
+
consumer_name = pending_entries[0]["consumer"].decode() if is_pending else None
|
72
|
+
delivery_count = pending_entries[0]["times_delivered"] if is_pending else 0
|
73
|
+
|
74
|
+
# 构建任务信息
|
75
|
+
task_info = {
|
76
|
+
"stream_id": msg_id,
|
77
|
+
"queue": queue_name,
|
78
|
+
"data": msg_data,
|
79
|
+
"is_pending": is_pending,
|
80
|
+
"consumer": consumer_name,
|
81
|
+
"delivery_count": delivery_count,
|
82
|
+
"timestamp": int(msg_id.split('-')[0])
|
83
|
+
}
|
84
|
+
|
85
|
+
logger.debug(f"Retrieved task info for {stream_id}: pending={is_pending}, consumer={consumer_name}")
|
86
|
+
return task_info
|
87
|
+
|
88
|
+
except Exception as e:
|
89
|
+
logger.error(f"Error getting task info for {stream_id} in queue {queue_name}: {e}", exc_info=True)
|
90
|
+
return None
|
91
|
+
|
92
|
+
async def get_stream_info(self, queue_name: str) -> Optional[Dict[str, Any]]:
|
93
|
+
"""
|
94
|
+
获取 Stream 的统计信息
|
95
|
+
|
96
|
+
Args:
|
97
|
+
queue_name: 队列名称
|
98
|
+
|
99
|
+
Returns:
|
100
|
+
Stream 信息字典
|
101
|
+
"""
|
102
|
+
try:
|
103
|
+
prefixed_queue = self.redis_service.get_prefixed_queue_name(queue_name)
|
104
|
+
|
105
|
+
# 获取 Stream 信息
|
106
|
+
info = await self.redis.xinfo_stream(prefixed_queue)
|
107
|
+
|
108
|
+
stream_info = {
|
109
|
+
"length": info.get("length", 0),
|
110
|
+
"first_entry": info.get("first-entry"),
|
111
|
+
"last_entry": info.get("last-entry"),
|
112
|
+
"groups": info.get("groups", 0)
|
113
|
+
}
|
114
|
+
|
115
|
+
logger.debug(f"Retrieved stream info for queue {queue_name}: length={stream_info['length']}")
|
116
|
+
return stream_info
|
117
|
+
|
118
|
+
except Exception as e:
|
119
|
+
logger.error(f"Error getting stream info for queue {queue_name}: {e}", exc_info=True)
|
120
|
+
return None
|
121
|
+
|
122
|
+
async def get_queue_tasks(
|
123
|
+
self,
|
124
|
+
queue_name: str,
|
125
|
+
start: str = "-",
|
126
|
+
end: str = "+",
|
127
|
+
count: int = 100,
|
128
|
+
reverse: bool = False
|
129
|
+
) -> List[Dict[str, Any]]:
|
130
|
+
"""
|
131
|
+
获取队列中的任务列表
|
132
|
+
|
133
|
+
Args:
|
134
|
+
queue_name: 队列名称
|
135
|
+
start: 起始 ID(默认 "-" 表示最小 ID)
|
136
|
+
end: 结束 ID(默认 "+" 表示最大 ID)
|
137
|
+
count: 返回的任务数量
|
138
|
+
reverse: 是否反向获取(从新到旧)
|
139
|
+
|
140
|
+
Returns:
|
141
|
+
任务列表
|
142
|
+
"""
|
143
|
+
try:
|
144
|
+
prefixed_queue = self.redis_service.get_prefixed_queue_name(queue_name)
|
145
|
+
|
146
|
+
# 根据 reverse 参数选择查询方向
|
147
|
+
if reverse:
|
148
|
+
messages = await self.redis.xrevrange(prefixed_queue, max=end, min=start, count=count)
|
149
|
+
else:
|
150
|
+
messages = await self.redis.xrange(prefixed_queue, min=start, max=end, count=count)
|
151
|
+
|
152
|
+
if not messages:
|
153
|
+
logger.debug(f"No tasks found in queue {queue_name}")
|
154
|
+
return []
|
155
|
+
|
156
|
+
# 获取 pending 信息(批量查询优化)
|
157
|
+
group_name = f"{self.redis_prefix}:GROUP:{queue_name}"
|
158
|
+
|
159
|
+
# 尝试获取所有 pending 消息的信息
|
160
|
+
try:
|
161
|
+
pending_entries = await self.redis.xpending_range(
|
162
|
+
prefixed_queue,
|
163
|
+
group_name,
|
164
|
+
min="-",
|
165
|
+
max="+",
|
166
|
+
count=10000 # 获取足够多的 pending 信息
|
167
|
+
)
|
168
|
+
# 构建 pending 映射:{msg_id: {consumer, delivery_count}}
|
169
|
+
pending_map = {}
|
170
|
+
for entry in pending_entries:
|
171
|
+
msg_id = entry["message_id"]
|
172
|
+
pending_map[msg_id] = {
|
173
|
+
"consumer": entry["consumer"].decode() if isinstance(entry["consumer"], bytes) else entry["consumer"],
|
174
|
+
"delivery_count": entry["times_delivered"]
|
175
|
+
}
|
176
|
+
except Exception as e:
|
177
|
+
logger.warning(f"Error getting pending info for queue {queue_name}: {e}")
|
178
|
+
pending_map = {}
|
179
|
+
|
180
|
+
# 构建任务列表
|
181
|
+
tasks = []
|
182
|
+
for msg_id, msg_data in messages:
|
183
|
+
# 检查是否在 pending 中
|
184
|
+
pending_info = pending_map.get(msg_id)
|
185
|
+
is_pending = pending_info is not None
|
186
|
+
|
187
|
+
# 解析时间戳
|
188
|
+
try:
|
189
|
+
timestamp_ms = int(msg_id.split('-')[0])
|
190
|
+
except (ValueError, IndexError):
|
191
|
+
timestamp_ms = 0
|
192
|
+
|
193
|
+
# 解析消息数据
|
194
|
+
task_data = {}
|
195
|
+
for key, value in msg_data.items():
|
196
|
+
# Redis 返回的值可能是 bytes
|
197
|
+
if isinstance(key, bytes):
|
198
|
+
key = key.decode('utf-8')
|
199
|
+
if isinstance(value, bytes):
|
200
|
+
value = value.decode('utf-8')
|
201
|
+
task_data[key] = value
|
202
|
+
|
203
|
+
# 构建任务信息
|
204
|
+
task = {
|
205
|
+
"stream_id": msg_id,
|
206
|
+
"queue": queue_name,
|
207
|
+
"data": task_data,
|
208
|
+
"is_pending": is_pending,
|
209
|
+
"consumer": pending_info["consumer"] if is_pending else None,
|
210
|
+
"delivery_count": pending_info["delivery_count"] if is_pending else 0,
|
211
|
+
"timestamp": timestamp_ms,
|
212
|
+
"timestamp_iso": datetime.fromtimestamp(timestamp_ms / 1000).isoformat() if timestamp_ms else None
|
213
|
+
}
|
214
|
+
|
215
|
+
tasks.append(task)
|
216
|
+
|
217
|
+
logger.info(f"Retrieved {len(tasks)} tasks from queue {queue_name} (reverse={reverse})")
|
218
|
+
return tasks
|
219
|
+
|
220
|
+
except Exception as e:
|
221
|
+
logger.error(f"Error getting queue tasks for {queue_name}: {e}", exc_info=True)
|
222
|
+
return []
|
@@ -0,0 +1,452 @@
|
|
1
|
+
"""
|
2
|
+
PostgreSQL 时间轴服务
|
3
|
+
|
4
|
+
从 PostgreSQL 数据库获取任务时间分布数据
|
5
|
+
"""
|
6
|
+
import logging
|
7
|
+
from datetime import datetime, timedelta, timezone
|
8
|
+
from typing import Dict, List, Any, Optional
|
9
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
10
|
+
from sqlalchemy import text
|
11
|
+
from sqlalchemy.dialects import postgresql
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class TimelinePgService:
|
17
|
+
"""PostgreSQL 时间轴服务"""
|
18
|
+
|
19
|
+
def __init__(self):
|
20
|
+
pass
|
21
|
+
|
22
|
+
@staticmethod
|
23
|
+
def parse_iso_datetime(time_str: str) -> datetime:
|
24
|
+
"""解析ISO格式的时间字符串,确保返回 UTC 时间"""
|
25
|
+
if time_str.endswith('Z'):
|
26
|
+
# Z 表示 UTC 时间
|
27
|
+
dt = datetime.fromisoformat(time_str.replace('Z', '+00:00'))
|
28
|
+
else:
|
29
|
+
dt = datetime.fromisoformat(time_str)
|
30
|
+
|
31
|
+
# 如果没有时区信息,假定为 UTC
|
32
|
+
if dt.tzinfo is None:
|
33
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
34
|
+
# 如果有时区信息,转换为 UTC
|
35
|
+
elif dt.tzinfo != timezone.utc:
|
36
|
+
dt = dt.astimezone(timezone.utc)
|
37
|
+
|
38
|
+
return dt
|
39
|
+
|
40
|
+
@staticmethod
|
41
|
+
def parse_interval(interval: str) -> int:
|
42
|
+
"""解析时间间隔字符串,返回分钟数"""
|
43
|
+
if interval.endswith('m'):
|
44
|
+
return int(interval[:-1])
|
45
|
+
elif interval.endswith('h'):
|
46
|
+
return int(interval[:-1]) * 60
|
47
|
+
elif interval.endswith('s'):
|
48
|
+
return int(interval[:-1]) // 60 if int(interval[:-1]) >= 60 else 1
|
49
|
+
else:
|
50
|
+
return 5 # 默认5分钟
|
51
|
+
|
52
|
+
@staticmethod
|
53
|
+
def calculate_auto_interval(duration_seconds: float) -> tuple:
|
54
|
+
"""
|
55
|
+
根据时间范围自动计算合适的时间间隔
|
56
|
+
|
57
|
+
Returns:
|
58
|
+
tuple: (interval_seconds, interval_type, interval_str)
|
59
|
+
"""
|
60
|
+
if duration_seconds <= 300: # <= 5分钟
|
61
|
+
return 0.5, 'millisecond', '500ms'
|
62
|
+
elif duration_seconds <= 900: # <= 15分钟
|
63
|
+
return 1, 'second', '1s'
|
64
|
+
elif duration_seconds <= 1800: # <= 30分钟
|
65
|
+
return 2, 'second', '2s'
|
66
|
+
elif duration_seconds <= 3600: # <= 1小时
|
67
|
+
return 30, 'second', '30s'
|
68
|
+
elif duration_seconds <= 10800: # <= 3小时
|
69
|
+
return 300, 'minute', '5m'
|
70
|
+
elif duration_seconds <= 21600: # <= 6小时
|
71
|
+
return 600, 'minute', '10m'
|
72
|
+
elif duration_seconds <= 43200: # <= 12小时
|
73
|
+
return 1800, 'minute', '30m'
|
74
|
+
elif duration_seconds <= 86400: # <= 24小时
|
75
|
+
return 3600, 'hour', '1h'
|
76
|
+
elif duration_seconds <= 172800: # <= 2天
|
77
|
+
return 7200, 'hour', '2h'
|
78
|
+
elif duration_seconds <= 604800: # <= 7天
|
79
|
+
return 21600, 'hour', '6h'
|
80
|
+
else: # > 7天
|
81
|
+
return 86400, 'hour', '24h'
|
82
|
+
|
83
|
+
@staticmethod
|
84
|
+
def align_to_interval(dt: datetime, interval_seconds: float) -> datetime:
|
85
|
+
"""对齐时间到interval_seconds的整数倍"""
|
86
|
+
if interval_seconds >= 3600: # 大于等于1小时
|
87
|
+
# 按小时对齐
|
88
|
+
dt = dt.replace(minute=0, second=0, microsecond=0)
|
89
|
+
interval_hours = int(interval_seconds // 3600)
|
90
|
+
aligned_hour = (dt.hour // interval_hours) * interval_hours
|
91
|
+
return dt.replace(hour=aligned_hour)
|
92
|
+
elif interval_seconds >= 60: # 大于等于1分钟
|
93
|
+
# 按分钟对齐
|
94
|
+
dt = dt.replace(second=0, microsecond=0)
|
95
|
+
interval_minutes = int(interval_seconds // 60)
|
96
|
+
total_minutes = dt.hour * 60 + dt.minute
|
97
|
+
aligned_total_minutes = (total_minutes // interval_minutes) * interval_minutes
|
98
|
+
aligned_hour = aligned_total_minutes // 60
|
99
|
+
aligned_minute = aligned_total_minutes % 60
|
100
|
+
return dt.replace(hour=aligned_hour, minute=aligned_minute)
|
101
|
+
elif interval_seconds >= 1: # 秒级别
|
102
|
+
# 按秒对齐
|
103
|
+
dt = dt.replace(microsecond=0)
|
104
|
+
aligned_second = int(dt.second // interval_seconds) * int(interval_seconds)
|
105
|
+
return dt.replace(second=aligned_second)
|
106
|
+
else: # 毫秒级别
|
107
|
+
# 按毫秒对齐
|
108
|
+
total_ms = dt.microsecond / 1000 # 转换为毫秒
|
109
|
+
interval_ms = interval_seconds * 1000
|
110
|
+
aligned_ms = int(total_ms // interval_ms) * interval_ms
|
111
|
+
aligned_microsecond = int(aligned_ms * 1000)
|
112
|
+
return dt.replace(microsecond=aligned_microsecond)
|
113
|
+
|
114
|
+
async def get_single_queue_timeline(
|
115
|
+
self,
|
116
|
+
session: AsyncSession,
|
117
|
+
queue_name: str,
|
118
|
+
start_time: Optional[str] = None,
|
119
|
+
end_time: Optional[str] = None,
|
120
|
+
interval: str = "5m"
|
121
|
+
) -> Dict[str, Any]:
|
122
|
+
"""
|
123
|
+
获取单个队列的时间轴数据
|
124
|
+
|
125
|
+
Args:
|
126
|
+
session: 数据库会话
|
127
|
+
queue_name: 队列名称
|
128
|
+
start_time: 开始时间(ISO格式字符串)
|
129
|
+
end_time: 结束时间(ISO格式字符串)
|
130
|
+
interval: 时间间隔(如 "5m", "1h")
|
131
|
+
|
132
|
+
Returns:
|
133
|
+
时间轴数据
|
134
|
+
"""
|
135
|
+
# 解析时间范围
|
136
|
+
if not end_time:
|
137
|
+
end_dt = datetime.now(timezone.utc)
|
138
|
+
else:
|
139
|
+
end_dt = self.parse_iso_datetime(end_time)
|
140
|
+
|
141
|
+
if not start_time:
|
142
|
+
start_dt = end_dt - timedelta(hours=1)
|
143
|
+
else:
|
144
|
+
start_dt = self.parse_iso_datetime(start_time)
|
145
|
+
|
146
|
+
# 解析时间间隔
|
147
|
+
interval_minutes = self.parse_interval(interval)
|
148
|
+
|
149
|
+
try:
|
150
|
+
# 使用 SQLAlchemy 的原生 SQL 查询(因为复杂的时间分组)
|
151
|
+
query = text(f"""
|
152
|
+
SELECT
|
153
|
+
DATE_TRUNC('minute', created_at) -
|
154
|
+
INTERVAL '{interval_minutes} minutes' * (EXTRACT(MINUTE FROM created_at)::int % {interval_minutes}) as time_bucket,
|
155
|
+
COUNT(*) as count,
|
156
|
+
SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed_count,
|
157
|
+
SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed_count,
|
158
|
+
AVG(CASE WHEN status = 'completed' AND processing_time IS NOT NULL
|
159
|
+
THEN processing_time ELSE NULL END) as avg_processing_time
|
160
|
+
FROM tasks
|
161
|
+
WHERE queue_name = :queue_name
|
162
|
+
AND created_at >= :start_dt
|
163
|
+
AND created_at < :end_dt
|
164
|
+
GROUP BY time_bucket
|
165
|
+
ORDER BY time_bucket
|
166
|
+
""")
|
167
|
+
|
168
|
+
result = await session.execute(query, {
|
169
|
+
'queue_name': queue_name,
|
170
|
+
'start_dt': start_dt,
|
171
|
+
'end_dt': end_dt
|
172
|
+
})
|
173
|
+
rows = result.mappings().all()
|
174
|
+
|
175
|
+
# 构建时间轴数据
|
176
|
+
timeline = []
|
177
|
+
for row in rows:
|
178
|
+
timeline.append({
|
179
|
+
"time": row['time_bucket'].isoformat(),
|
180
|
+
"count": row['count'],
|
181
|
+
"completed_count": row['completed_count'],
|
182
|
+
"failed_count": row['failed_count'],
|
183
|
+
"avg_processing_time": float(row['avg_processing_time']) if row['avg_processing_time'] else 0
|
184
|
+
})
|
185
|
+
|
186
|
+
# 填充缺失的时间点
|
187
|
+
filled_timeline = []
|
188
|
+
current_time = start_dt
|
189
|
+
timeline_dict = {item['time']: item for item in timeline}
|
190
|
+
|
191
|
+
while current_time < end_dt:
|
192
|
+
time_key = current_time.isoformat()
|
193
|
+
if time_key in timeline_dict:
|
194
|
+
filled_timeline.append(timeline_dict[time_key])
|
195
|
+
else:
|
196
|
+
filled_timeline.append({
|
197
|
+
"time": time_key,
|
198
|
+
"count": 0,
|
199
|
+
"completed_count": 0,
|
200
|
+
"failed_count": 0,
|
201
|
+
"avg_processing_time": 0
|
202
|
+
})
|
203
|
+
current_time += timedelta(minutes=interval_minutes)
|
204
|
+
|
205
|
+
return {
|
206
|
+
"timeline": filled_timeline,
|
207
|
+
"interval": interval,
|
208
|
+
"start_time": start_dt.isoformat(),
|
209
|
+
"end_time": end_dt.isoformat()
|
210
|
+
}
|
211
|
+
|
212
|
+
except Exception as e:
|
213
|
+
logger.error(f"Error fetching timeline from PostgreSQL: {e}")
|
214
|
+
return {
|
215
|
+
"timeline": [],
|
216
|
+
"interval": interval,
|
217
|
+
"start_time": start_dt.isoformat(),
|
218
|
+
"end_time": end_dt.isoformat(),
|
219
|
+
"error": str(e)
|
220
|
+
}
|
221
|
+
|
222
|
+
def _build_time_bucket_query(
|
223
|
+
self,
|
224
|
+
interval_type: str,
|
225
|
+
interval_seconds: float,
|
226
|
+
interval_minutes: float
|
227
|
+
) -> text:
|
228
|
+
"""构建时间分组查询"""
|
229
|
+
if interval_type == 'millisecond':
|
230
|
+
return text(f"""
|
231
|
+
SELECT
|
232
|
+
DATE_TRUNC('second', created_at) +
|
233
|
+
INTERVAL '{interval_seconds} seconds' * FLOOR(EXTRACT(MILLISECONDS FROM created_at) / ({interval_seconds} * 1000)) as time_bucket,
|
234
|
+
COUNT(*) as count
|
235
|
+
FROM tasks
|
236
|
+
WHERE queue_name = :queue_name
|
237
|
+
AND created_at >= :start_dt
|
238
|
+
AND created_at < :end_dt
|
239
|
+
GROUP BY time_bucket
|
240
|
+
ORDER BY time_bucket
|
241
|
+
""")
|
242
|
+
elif interval_type == 'second':
|
243
|
+
return text(f"""
|
244
|
+
SELECT
|
245
|
+
DATE_TRUNC('minute', created_at) +
|
246
|
+
INTERVAL '{interval_seconds} seconds' * FLOOR(EXTRACT(SECOND FROM created_at) / {interval_seconds}) as time_bucket,
|
247
|
+
COUNT(*) as count
|
248
|
+
FROM tasks
|
249
|
+
WHERE queue_name = :queue_name
|
250
|
+
AND created_at >= :start_dt
|
251
|
+
AND created_at < :end_dt
|
252
|
+
GROUP BY time_bucket
|
253
|
+
ORDER BY time_bucket
|
254
|
+
""")
|
255
|
+
elif interval_type == 'minute' and interval_minutes < 60:
|
256
|
+
return text(f"""
|
257
|
+
SELECT
|
258
|
+
DATE_TRUNC('hour', created_at) +
|
259
|
+
INTERVAL '{interval_minutes} minutes' * FLOOR(EXTRACT(MINUTE FROM created_at) / {interval_minutes}) as time_bucket,
|
260
|
+
COUNT(*) as count
|
261
|
+
FROM tasks
|
262
|
+
WHERE queue_name = :queue_name
|
263
|
+
AND created_at >= :start_dt
|
264
|
+
AND created_at < :end_dt
|
265
|
+
GROUP BY time_bucket
|
266
|
+
ORDER BY time_bucket
|
267
|
+
""")
|
268
|
+
elif interval_minutes == 60:
|
269
|
+
return text("""
|
270
|
+
SELECT
|
271
|
+
DATE_TRUNC('hour', created_at) as time_bucket,
|
272
|
+
COUNT(*) as count
|
273
|
+
FROM tasks
|
274
|
+
WHERE queue_name = :queue_name
|
275
|
+
AND created_at >= :start_dt
|
276
|
+
AND created_at < :end_dt
|
277
|
+
GROUP BY time_bucket
|
278
|
+
ORDER BY time_bucket
|
279
|
+
""")
|
280
|
+
else:
|
281
|
+
# 大于1小时的间隔
|
282
|
+
interval_hours = int(interval_minutes // 60)
|
283
|
+
return text(f"""
|
284
|
+
SELECT
|
285
|
+
DATE_TRUNC('day', created_at) +
|
286
|
+
INTERVAL '{interval_hours} hours' * FLOOR(EXTRACT(HOUR FROM created_at) / {interval_hours}) as time_bucket,
|
287
|
+
COUNT(*) as count
|
288
|
+
FROM tasks
|
289
|
+
WHERE queue_name = :queue_name
|
290
|
+
AND created_at >= :start_dt
|
291
|
+
AND created_at < :end_dt
|
292
|
+
GROUP BY time_bucket
|
293
|
+
ORDER BY time_bucket
|
294
|
+
""")
|
295
|
+
|
296
|
+
async def get_multiple_queues_timeline(
|
297
|
+
self,
|
298
|
+
session_factory,
|
299
|
+
queues: str,
|
300
|
+
start_time: Optional[str] = None,
|
301
|
+
end_time: Optional[str] = None
|
302
|
+
) -> Dict[str, Any]:
|
303
|
+
"""
|
304
|
+
获取多个队列的时间轴数据
|
305
|
+
|
306
|
+
Args:
|
307
|
+
session_factory: 会话工厂
|
308
|
+
queues: 逗号分隔的队列名称列表
|
309
|
+
start_time: 开始时间(ISO格式字符串)
|
310
|
+
end_time: 结束时间(ISO格式字符串)
|
311
|
+
|
312
|
+
Returns:
|
313
|
+
多队列时间轴数据
|
314
|
+
"""
|
315
|
+
# 解析队列列表
|
316
|
+
if not queues or queues.strip() == "":
|
317
|
+
end_dt = datetime.now(timezone.utc) if not end_time else self.parse_iso_datetime(end_time)
|
318
|
+
start_dt = (end_dt - timedelta(hours=1)) if not start_time else self.parse_iso_datetime(start_time)
|
319
|
+
|
320
|
+
return {
|
321
|
+
"queues": [],
|
322
|
+
"start_time": start_dt.isoformat(),
|
323
|
+
"end_time": end_dt.isoformat(),
|
324
|
+
"interval": "5m",
|
325
|
+
"message": "No queues selected"
|
326
|
+
}
|
327
|
+
|
328
|
+
queue_list = [q.strip() for q in queues.split(',') if q.strip()][:10] # 最多10个队列
|
329
|
+
|
330
|
+
# 解析时间范围
|
331
|
+
if not end_time:
|
332
|
+
end_dt = datetime.now(timezone.utc)
|
333
|
+
else:
|
334
|
+
end_dt = self.parse_iso_datetime(end_time)
|
335
|
+
|
336
|
+
if not start_time:
|
337
|
+
start_dt = end_dt - timedelta(hours=1)
|
338
|
+
else:
|
339
|
+
start_dt = self.parse_iso_datetime(start_time)
|
340
|
+
|
341
|
+
logger.info(f'start_dt={start_dt} end_dt={end_dt}')
|
342
|
+
|
343
|
+
# 根据时间范围自动计算合适的时间间隔
|
344
|
+
duration = (end_dt - start_dt).total_seconds()
|
345
|
+
interval_seconds, interval_type, interval = self.calculate_auto_interval(duration)
|
346
|
+
interval_minutes = interval_seconds / 60
|
347
|
+
|
348
|
+
logger.info(f"Time range: {duration}s, using interval: {interval} -> {interval_seconds} seconds, type: {interval_type}")
|
349
|
+
|
350
|
+
result = []
|
351
|
+
|
352
|
+
for queue_name in queue_list:
|
353
|
+
try:
|
354
|
+
async with session_factory() as session:
|
355
|
+
# 构建查询
|
356
|
+
query = self._build_time_bucket_query(interval_type, interval_seconds, interval_minutes)
|
357
|
+
|
358
|
+
params = {
|
359
|
+
'queue_name': queue_name,
|
360
|
+
'start_dt': start_dt,
|
361
|
+
'end_dt': end_dt
|
362
|
+
}
|
363
|
+
|
364
|
+
# 执行查询
|
365
|
+
result_obj = await session.execute(query, params)
|
366
|
+
rows = result_obj.mappings().all()
|
367
|
+
logger.info(f'rows={rows}')
|
368
|
+
|
369
|
+
# 构建时间轴数据
|
370
|
+
timeline = []
|
371
|
+
for row in rows:
|
372
|
+
timeline.append({
|
373
|
+
"time": row['time_bucket'].isoformat(),
|
374
|
+
"count": row['count']
|
375
|
+
})
|
376
|
+
|
377
|
+
# 填充缺失的时间点
|
378
|
+
timeline_data = []
|
379
|
+
for item in timeline:
|
380
|
+
dt = datetime.fromisoformat(item['time'])
|
381
|
+
timeline_data.append((dt, item['count']))
|
382
|
+
|
383
|
+
# 按时间排序
|
384
|
+
timeline_data.sort(key=lambda x: x[0])
|
385
|
+
|
386
|
+
# 生成完整的时间序列
|
387
|
+
filled_timeline = []
|
388
|
+
current_time = self.align_to_interval(start_dt, interval_seconds)
|
389
|
+
timeline_index = 0
|
390
|
+
|
391
|
+
while current_time < end_dt:
|
392
|
+
# 查找是否有匹配的数据点
|
393
|
+
tolerance = timedelta(seconds=interval_seconds/2)
|
394
|
+
found = False
|
395
|
+
|
396
|
+
# 从当前位置开始查找
|
397
|
+
while timeline_index < len(timeline_data):
|
398
|
+
data_time, count = timeline_data[timeline_index]
|
399
|
+
|
400
|
+
# 计算时间差(秒)
|
401
|
+
time_diff = abs((data_time - current_time).total_seconds())
|
402
|
+
|
403
|
+
if time_diff < interval_seconds / 2:
|
404
|
+
# 找到匹配的数据
|
405
|
+
filled_timeline.append({
|
406
|
+
"time": current_time.isoformat(),
|
407
|
+
"count": count
|
408
|
+
})
|
409
|
+
found = True
|
410
|
+
timeline_index += 1
|
411
|
+
break
|
412
|
+
elif data_time > current_time + tolerance:
|
413
|
+
# 数据时间已经超过当前时间太多,停止查找
|
414
|
+
break
|
415
|
+
else:
|
416
|
+
# 这个数据点太早了,继续查找下一个
|
417
|
+
timeline_index += 1
|
418
|
+
|
419
|
+
if not found:
|
420
|
+
# 没有找到匹配的数据,填充0
|
421
|
+
filled_timeline.append({
|
422
|
+
"time": current_time.isoformat(),
|
423
|
+
"count": 0
|
424
|
+
})
|
425
|
+
|
426
|
+
current_time += timedelta(seconds=interval_seconds)
|
427
|
+
|
428
|
+
result.append({
|
429
|
+
"queue": queue_name,
|
430
|
+
"timeline": {
|
431
|
+
"timeline": filled_timeline,
|
432
|
+
"interval": interval
|
433
|
+
}
|
434
|
+
})
|
435
|
+
|
436
|
+
except Exception as e:
|
437
|
+
logger.error(f"Error fetching timeline for queue {queue_name}: {e}")
|
438
|
+
result.append({
|
439
|
+
"queue": queue_name,
|
440
|
+
"timeline": {
|
441
|
+
"timeline": [],
|
442
|
+
"interval": interval,
|
443
|
+
"error": str(e)
|
444
|
+
}
|
445
|
+
})
|
446
|
+
|
447
|
+
return {
|
448
|
+
"queues": result,
|
449
|
+
"start_time": start_dt.isoformat(),
|
450
|
+
"end_time": end_dt.isoformat(),
|
451
|
+
"interval": interval
|
452
|
+
}
|