jettask 0.2.1__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. jettask/constants.py +213 -0
  2. jettask/core/app.py +525 -205
  3. jettask/core/cli.py +193 -185
  4. jettask/core/consumer_manager.py +126 -34
  5. jettask/core/context.py +3 -0
  6. jettask/core/enums.py +137 -0
  7. jettask/core/event_pool.py +501 -168
  8. jettask/core/message.py +147 -0
  9. jettask/core/offline_worker_recovery.py +181 -114
  10. jettask/core/task.py +10 -174
  11. jettask/core/task_batch.py +153 -0
  12. jettask/core/unified_manager_base.py +243 -0
  13. jettask/core/worker_scanner.py +54 -54
  14. jettask/executors/asyncio.py +184 -64
  15. jettask/webui/backend/config.py +51 -0
  16. jettask/webui/backend/data_access.py +2083 -92
  17. jettask/webui/backend/data_api.py +3294 -0
  18. jettask/webui/backend/dependencies.py +261 -0
  19. jettask/webui/backend/init_meta_db.py +158 -0
  20. jettask/webui/backend/main.py +1358 -69
  21. jettask/webui/backend/main_unified.py +78 -0
  22. jettask/webui/backend/main_v2.py +394 -0
  23. jettask/webui/backend/namespace_api.py +295 -0
  24. jettask/webui/backend/namespace_api_old.py +294 -0
  25. jettask/webui/backend/namespace_data_access.py +611 -0
  26. jettask/webui/backend/queue_backlog_api.py +727 -0
  27. jettask/webui/backend/queue_stats_v2.py +521 -0
  28. jettask/webui/backend/redis_monitor_api.py +476 -0
  29. jettask/webui/backend/unified_api_router.py +1601 -0
  30. jettask/webui/db_init.py +204 -32
  31. jettask/webui/frontend/package-lock.json +492 -1
  32. jettask/webui/frontend/package.json +4 -1
  33. jettask/webui/frontend/src/App.css +105 -7
  34. jettask/webui/frontend/src/App.jsx +49 -20
  35. jettask/webui/frontend/src/components/NamespaceSelector.jsx +166 -0
  36. jettask/webui/frontend/src/components/QueueBacklogChart.jsx +298 -0
  37. jettask/webui/frontend/src/components/QueueBacklogTrend.jsx +638 -0
  38. jettask/webui/frontend/src/components/QueueDetailsTable.css +65 -0
  39. jettask/webui/frontend/src/components/QueueDetailsTable.jsx +487 -0
  40. jettask/webui/frontend/src/components/QueueDetailsTableV2.jsx +465 -0
  41. jettask/webui/frontend/src/components/ScheduledTaskFilter.jsx +423 -0
  42. jettask/webui/frontend/src/components/TaskFilter.jsx +425 -0
  43. jettask/webui/frontend/src/components/TimeRangeSelector.css +21 -0
  44. jettask/webui/frontend/src/components/TimeRangeSelector.jsx +160 -0
  45. jettask/webui/frontend/src/components/layout/AppLayout.css +95 -0
  46. jettask/webui/frontend/src/components/layout/AppLayout.jsx +49 -0
  47. jettask/webui/frontend/src/components/layout/Header.css +34 -10
  48. jettask/webui/frontend/src/components/layout/Header.jsx +31 -23
  49. jettask/webui/frontend/src/components/layout/SideMenu.css +137 -0
  50. jettask/webui/frontend/src/components/layout/SideMenu.jsx +209 -0
  51. jettask/webui/frontend/src/components/layout/TabsNav.css +244 -0
  52. jettask/webui/frontend/src/components/layout/TabsNav.jsx +206 -0
  53. jettask/webui/frontend/src/components/layout/UserInfo.css +197 -0
  54. jettask/webui/frontend/src/components/layout/UserInfo.jsx +197 -0
  55. jettask/webui/frontend/src/contexts/NamespaceContext.jsx +72 -0
  56. jettask/webui/frontend/src/contexts/TabsContext.backup.jsx +245 -0
  57. jettask/webui/frontend/src/main.jsx +1 -0
  58. jettask/webui/frontend/src/pages/Alerts.jsx +684 -0
  59. jettask/webui/frontend/src/pages/Dashboard.jsx +1330 -0
  60. jettask/webui/frontend/src/pages/QueueDetail.jsx +1109 -10
  61. jettask/webui/frontend/src/pages/QueueMonitor.jsx +236 -115
  62. jettask/webui/frontend/src/pages/Queues.jsx +5 -1
  63. jettask/webui/frontend/src/pages/ScheduledTasks.jsx +809 -0
  64. jettask/webui/frontend/src/pages/Settings.jsx +800 -0
  65. jettask/webui/frontend/src/services/api.js +7 -5
  66. jettask/webui/frontend/src/utils/suppressWarnings.js +22 -0
  67. jettask/webui/frontend/src/utils/userPreferences.js +154 -0
  68. jettask/webui/multi_namespace_consumer.py +543 -0
  69. jettask/webui/pg_consumer.py +983 -246
  70. jettask/webui/static/dist/assets/index-7129cfe1.css +1 -0
  71. jettask/webui/static/dist/assets/index-8d1935cc.js +774 -0
  72. jettask/webui/static/dist/index.html +2 -2
  73. jettask/webui/task_center.py +216 -0
  74. jettask/webui/task_center_client.py +150 -0
  75. jettask/webui/unified_consumer_manager.py +193 -0
  76. {jettask-0.2.1.dist-info → jettask-0.2.4.dist-info}/METADATA +1 -1
  77. jettask-0.2.4.dist-info/RECORD +134 -0
  78. jettask/webui/pg_consumer_slow.py +0 -1099
  79. jettask/webui/pg_consumer_test.py +0 -678
  80. jettask/webui/static/dist/assets/index-823408e8.css +0 -1
  81. jettask/webui/static/dist/assets/index-9968b0b8.js +0 -543
  82. jettask/webui/test_pg_consumer_recovery.py +0 -547
  83. jettask/webui/test_recovery_simple.py +0 -492
  84. jettask/webui/test_self_recovery.py +0 -467
  85. jettask-0.2.1.dist-info/RECORD +0 -91
  86. {jettask-0.2.1.dist-info → jettask-0.2.4.dist-info}/WHEEL +0 -0
  87. {jettask-0.2.1.dist-info → jettask-0.2.4.dist-info}/entry_points.txt +0 -0
  88. {jettask-0.2.1.dist-info → jettask-0.2.4.dist-info}/licenses/LICENSE +0 -0
  89. {jettask-0.2.1.dist-info → jettask-0.2.4.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,15 @@
1
1
  """
2
2
  独立的数据访问模块,不依赖 integrated_gradio_app.py
3
3
  """
4
+ import os
4
5
  import asyncio
5
6
  import json
6
7
  import logging
8
+ import time
7
9
  from datetime import datetime, timedelta, timezone
8
10
  from typing import Dict, List, Optional, Tuple
9
11
  import redis.asyncio as redis
10
- from sqlalchemy import text
12
+ from sqlalchemy import text, bindparam
11
13
  from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
12
14
  from sqlalchemy.orm import sessionmaker
13
15
 
@@ -65,10 +67,12 @@ class JetTaskDataAccess:
65
67
  def __init__(self):
66
68
  self.redis_config = RedisConfig.from_env()
67
69
  self.pg_config = PostgreSQLConfig.from_env()
68
- self.redis_prefix = "jettask"
70
+ # Redis前缀可以从环境变量配置,默认为 "jettask"
71
+ self.redis_prefix = os.environ.get('JETTASK_REDIS_PREFIX', 'jettask')
69
72
  self.async_engine = None
70
73
  self.AsyncSessionLocal = None
71
74
  self._redis_pool = None
75
+ self._binary_redis_pool = None # 用于处理包含二进制数据的Stream
72
76
 
73
77
  async def initialize(self):
74
78
  """初始化数据库连接"""
@@ -92,14 +96,29 @@ class JetTaskDataAccess:
92
96
  expire_on_commit=False
93
97
  )
94
98
 
95
- # 初始化Redis连接池
99
+ # 初始化Redis连接池(用于普通操作)
96
100
  self._redis_pool = redis.ConnectionPool(
97
101
  host=self.redis_config.host,
98
102
  port=self.redis_config.port,
99
103
  db=self.redis_config.db,
100
104
  password=self.redis_config.password,
101
105
  encoding='utf-8',
102
- decode_responses=True
106
+ decode_responses=True,
107
+ socket_keepalive=True,
108
+ socket_connect_timeout=5,
109
+ retry_on_timeout=True
110
+ )
111
+
112
+ # 初始化二进制Redis连接池(用于Stream操作)
113
+ self._binary_redis_pool = redis.ConnectionPool(
114
+ host=self.redis_config.host,
115
+ port=self.redis_config.port,
116
+ db=self.redis_config.db,
117
+ password=self.redis_config.password,
118
+ decode_responses=False, # 不解码,因为Stream包含msgpack二进制数据
119
+ socket_keepalive=True,
120
+ socket_connect_timeout=5,
121
+ retry_on_timeout=True
103
122
  )
104
123
 
105
124
  logger.info("数据库连接初始化成功")
@@ -108,21 +127,32 @@ class JetTaskDataAccess:
108
127
  logger.error(f"数据库连接初始化失败: {e}")
109
128
  raise
110
129
 
130
+ def get_session(self):
131
+ """获取数据库会话(作为上下文管理器)"""
132
+ return self.AsyncSessionLocal()
133
+
111
134
  async def close(self):
112
135
  """关闭数据库连接"""
113
136
  if self.async_engine:
114
137
  await self.async_engine.dispose()
115
138
  if self._redis_pool:
116
139
  await self._redis_pool.disconnect()
140
+ if self._binary_redis_pool:
141
+ await self._binary_redis_pool.disconnect()
117
142
 
118
143
  async def get_redis_client(self):
119
144
  """获取Redis客户端"""
120
145
  return redis.Redis(connection_pool=self._redis_pool)
121
146
 
147
+ async def get_binary_redis_client(self):
148
+ """获取二进制Redis客户端(用于Stream操作)"""
149
+ return redis.Redis(connection_pool=self._binary_redis_pool)
150
+
122
151
  async def fetch_queues_data(self) -> List[Dict]:
123
152
  """获取队列数据(基于Redis Stream)"""
124
153
  try:
125
154
  redis_client = await self.get_redis_client()
155
+ binary_redis_client = await self.get_binary_redis_client() # 用于Stream操作
126
156
 
127
157
  # 获取所有Stream类型的队列 - JetTask使用 jettask:QUEUE:队列名 格式
128
158
  all_keys = await redis_client.keys(f"{self.redis_prefix}:QUEUE:*")
@@ -144,11 +174,25 @@ class JetTaskDataAccess:
144
174
  stream_key = f"{self.redis_prefix}:QUEUE:{queue_name}"
145
175
 
146
176
  try:
147
- # 获取Stream信息
148
- stream_info = await redis_client.xinfo_stream(stream_key)
177
+ # 使用二进制客户端获取Stream信息
178
+ stream_info = await binary_redis_client.xinfo_stream(stream_key)
179
+ # 直接提取需要的字段(字符串键)
180
+ stream_length = stream_info.get('length', 0)
149
181
 
150
182
  # 获取消费者组信息
151
- groups_info = await redis_client.xinfo_groups(stream_key)
183
+ groups_info = []
184
+ try:
185
+ groups_info_raw = await binary_redis_client.xinfo_groups(stream_key)
186
+ for group in groups_info_raw:
187
+ group_name = group.get('name', '')
188
+ if isinstance(group_name, bytes):
189
+ group_name = group_name.decode('utf-8')
190
+ groups_info.append({
191
+ 'name': group_name,
192
+ 'pending': group.get('pending', 0)
193
+ })
194
+ except:
195
+ pass
152
196
 
153
197
  pending_count = 0
154
198
  processing_count = 0
@@ -158,15 +202,19 @@ class JetTaskDataAccess:
158
202
  pending_count += group.get('pending', 0)
159
203
 
160
204
  # 获取消费者信息
161
- try:
162
- consumers = await redis_client.xinfo_consumers(stream_key, group['name'])
163
- for consumer in consumers:
164
- processing_count += consumer.get('pending', 0)
165
- except:
166
- pass
205
+ if group.get('name'):
206
+ try:
207
+ consumers = await binary_redis_client.xinfo_consumers(
208
+ stream_key,
209
+ group['name']
210
+ )
211
+ for consumer in consumers:
212
+ processing_count += consumer.get('pending', 0)
213
+ except:
214
+ pass
167
215
 
168
216
  # Stream的长度即为总消息数
169
- total_messages = stream_info.get('length', 0)
217
+ total_messages = stream_length if 'stream_length' in locals() else 0
170
218
 
171
219
  # 完成的消息数 = 总消息数 - 待处理 - 处理中
172
220
  completed_count = max(0, total_messages - pending_count - processing_count)
@@ -193,114 +241,1013 @@ class JetTaskDataAccess:
193
241
  })
194
242
 
195
243
  await redis_client.close()
244
+ await binary_redis_client.close()
196
245
  return sorted(queues_data, key=lambda x: x['队列名称'])
197
246
 
198
247
  except Exception as e:
199
248
  logger.error(f"获取队列数据失败: {e}")
200
249
  return []
201
250
 
202
- async def fetch_queue_timeline_data(self,
203
- queues: List[str],
204
- start_time: datetime,
205
- end_time: datetime) -> List[Dict]:
206
- """获取队列时间线数据 - 返回真实的任务触发时间"""
251
+ async def fetch_queue_details(self, start_time: datetime = None, end_time: datetime = None,
252
+ time_range_minutes: int = None, queues: List[str] = None) -> List[Dict]:
253
+ """获取队列详细信息,包含消费速度、在线workers等
254
+
255
+ Args:
256
+ start_time: 开始时间(优先使用)
257
+ end_time: 结束时间(优先使用)
258
+ time_range_minutes: 时间范围(分钟),仅在没有指定start_time/end_time时使用
259
+ queues: 要筛选的队列列表,如果为None则返回所有队列
260
+ """
261
+ # 确定时间范围
262
+ if start_time and end_time:
263
+ # 使用指定的时间范围
264
+ query_start_time = start_time
265
+ query_end_time = end_time
266
+ elif time_range_minutes:
267
+ # 向后兼容:使用最近N分钟
268
+ query_end_time = datetime.now(timezone.utc)
269
+ query_start_time = query_end_time - timedelta(minutes=time_range_minutes)
270
+ else:
271
+ # 默认最近15分钟
272
+ query_end_time = datetime.now(timezone.utc)
273
+ query_start_time = query_end_time - timedelta(minutes=15)
274
+
275
+ try:
276
+ redis_client = await self.get_redis_client()
277
+
278
+ # 获取所有队列名称
279
+ all_keys = await redis_client.keys(f"{self.redis_prefix}:QUEUE:*")
280
+ queue_details = []
281
+
282
+ for key in all_keys:
283
+ # 检查是否是Stream类型
284
+ key_type = await redis_client.type(key)
285
+ if key_type == 'stream':
286
+ # 解析队列名称
287
+ parts = key.split(':')
288
+ if len(parts) >= 3 and parts[0] == self.redis_prefix and parts[1] == 'QUEUE':
289
+ queue_name = ':'.join(parts[2:])
290
+
291
+ # 如果指定了队列筛选,检查当前队列是否在筛选列表中
292
+ if queues and queue_name not in queues:
293
+ continue
294
+
295
+ # 获取活跃的workers数量
296
+ active_workers = 0
297
+ try:
298
+ worker_keys = await redis_client.keys(f"{self.redis_prefix}:WORKER:*")
299
+ for worker_key in worker_keys:
300
+ worker_info = await redis_client.hgetall(worker_key)
301
+ if worker_info:
302
+ last_heartbeat = worker_info.get('last_heartbeat')
303
+ if last_heartbeat:
304
+ try:
305
+ heartbeat_time = float(last_heartbeat)
306
+ if time.time() - heartbeat_time < 60:
307
+ worker_queues = worker_info.get('queues', '')
308
+ if queue_name in worker_queues:
309
+ active_workers += 1
310
+ except:
311
+ pass
312
+ except:
313
+ pass
314
+
315
+ # 从PostgreSQL获取队列统计信息
316
+ total_messages = 0
317
+ visible_messages = 0
318
+ completed_count = 0
319
+ failed_count = 0
320
+ consumption_rate = 0
321
+ success_rate = 0
322
+
323
+ if self.AsyncSessionLocal:
324
+ try:
325
+ async with self.AsyncSessionLocal() as session:
326
+ # 获取指定时间范围的所有统计数据
327
+ query = text("""
328
+ SELECT
329
+ COUNT(*) as total,
330
+ COUNT(CASE WHEN status = 'pending' THEN 1 END) as pending_count,
331
+ COUNT(CASE WHEN status = 'success' THEN 1 END) as completed,
332
+ COUNT(CASE WHEN status = 'error' THEN 1 END) as failed
333
+ FROM tasks
334
+ WHERE queue = :queue_name
335
+ AND created_at >= :start_time
336
+ AND created_at <= :end_time
337
+ """)
338
+ result = await session.execute(query, {
339
+ 'queue_name': queue_name,
340
+ 'start_time': query_start_time,
341
+ 'end_time': query_end_time
342
+ })
343
+ row = result.first()
344
+ if row:
345
+ total_messages = row.total or 0
346
+ visible_messages = row.pending_count or 0
347
+ completed_count = row.completed or 0
348
+ failed_count = row.failed or 0
349
+
350
+ # 计算消费速度(任务/分钟)
351
+ time_diff_minutes = (query_end_time - query_start_time).total_seconds() / 60
352
+ if time_diff_minutes > 0:
353
+ consumption_rate = round(total_messages / time_diff_minutes, 2)
354
+
355
+ # 计算成功率
356
+ if total_messages > 0:
357
+ success_rate = round((completed_count / total_messages) * 100, 2)
358
+ except Exception as e:
359
+ logger.warning(f"获取队列 {queue_name} 统计信息失败: {e}")
360
+
361
+ # 队列状态
362
+ queue_status = 'active' if total_messages > 0 or active_workers > 0 else 'idle'
363
+
364
+ queue_details.append({
365
+ 'queue_name': queue_name,
366
+ 'message_count': total_messages, # 总消息数量(基于时间范围)
367
+ 'visible_messages': visible_messages, # 可见消息(基于时间范围,status='pending')
368
+ 'invisible_messages': 0, # 不可见消息(现在设为0,不从Redis获取)
369
+ 'completed': completed_count, # 成功数(基于时间范围,status='success')
370
+ 'failed': failed_count, # 失败数(基于时间范围,status='error')
371
+ 'consumption_rate': consumption_rate, # 消费速度(任务/分钟)
372
+ 'success_rate': success_rate, # 成功率(百分比)
373
+ 'active_workers': active_workers, # 在线workers
374
+ 'queue_status': queue_status # 队列状态
375
+ })
376
+
377
+ await redis_client.close()
378
+ return sorted(queue_details, key=lambda x: x['queue_name'])
379
+
380
+ except Exception as e:
381
+ logger.error(f"获取队列详细信息失败: {e}")
382
+ return []
383
+
384
+ async def get_latest_task_time(self, queue_name: str) -> Optional[datetime]:
385
+ """获取队列的最新任务时间"""
207
386
  try:
208
- await asyncio.sleep(1)
209
387
  if not self.AsyncSessionLocal:
210
388
  await self.initialize()
211
389
 
212
390
  async with self.AsyncSessionLocal() as session:
213
- # 构建SQL查询
214
- queue_names_str = "', '".join(queues)
391
+ query = text("""
392
+ SELECT MAX(created_at) as latest_time
393
+ FROM tasks
394
+ WHERE queue = :queue_name
395
+ """)
215
396
 
216
- # 计算时间跨度
217
- duration = (end_time - start_time).total_seconds()
397
+ result = await session.execute(query, {'queue_name': queue_name})
398
+ row = result.fetchone()
218
399
 
219
- # 根据时间跨度决定聚合粒度
220
- # 短时间范围:返回每个任务的真实时间
221
- # 长时间范围:按适当的时间窗口聚合
400
+ if row and row.latest_time:
401
+ return row.latest_time
402
+ return None
222
403
 
223
- if duration <= 3600: # 1小时以内,返回每个任务的真实时间
224
- print(f'一小时内')
225
- # 查询每个任务的真实创建时间
226
- query = text(f"""
404
+ except Exception as e:
405
+ logger.error(f"获取最新任务时间失败: {e}")
406
+ return None
407
+
408
+ async def fetch_task_details(self, task_id: str, consumer_group: Optional[str] = None) -> Optional[Dict]:
409
+ """获取单个任务的详细数据(包括task_data、result和error_message)
410
+
411
+ Args:
412
+ task_id: 任务ID (stream_id)
413
+ consumer_group: 消费者组名称(可选,用于精确定位)
414
+ """
415
+ try:
416
+ if not self.AsyncSessionLocal:
417
+ await self.initialize()
418
+
419
+ async with self.AsyncSessionLocal() as session:
420
+ # 根据是否提供consumer_group来调整查询
421
+ if consumer_group:
422
+ # 如果提供了consumer_group,精确查询特定消费组的执行结果
423
+ query = text("""
227
424
  SELECT
228
- created_at as time_point,
229
- queue_name,
230
- COUNT(*) OVER (
231
- PARTITION BY queue_name, created_at
232
- ) as task_count
233
- FROM tasks
234
- WHERE queue_name IN ('{queue_names_str}')
235
- AND created_at >= :start_time
236
- AND created_at <= :end_time
237
- ORDER BY created_at, queue_name
425
+ t.stream_id as id,
426
+ t.payload as task_data,
427
+ tr.consumer_group,
428
+ tr.result,
429
+ tr.error_message
430
+ FROM tasks t
431
+ LEFT JOIN task_runs tr ON t.stream_id = tr.stream_id
432
+ AND tr.consumer_group = :consumer_group
433
+ WHERE t.stream_id = :task_id
434
+ LIMIT 1
238
435
  """)
239
-
240
- elif duration <= 86400: # 1天以内,按分钟聚合
241
- query = text(f"""
436
+ params = {'task_id': task_id, 'consumer_group': consumer_group}
437
+ else:
438
+ # 如果没有提供consumer_group,返回第一个找到的结果(向后兼容)
439
+ query = text("""
242
440
  SELECT
243
- date_trunc('minute', created_at) as time_point,
244
- queue_name,
245
- COUNT(*) as task_count
246
- FROM tasks
247
- WHERE queue_name IN ('{queue_names_str}')
248
- AND created_at >= :start_time
249
- AND created_at <= :end_time
250
- GROUP BY time_point, queue_name
251
- ORDER BY time_point, queue_name
441
+ t.stream_id as id,
442
+ t.payload as task_data,
443
+ tr.consumer_group,
444
+ tr.result,
445
+ tr.error_message
446
+ FROM tasks t
447
+ LEFT JOIN task_runs tr ON t.stream_id = tr.stream_id
448
+ WHERE t.stream_id = :task_id
449
+ ORDER BY tr.updated_at DESC NULLS LAST
450
+ LIMIT 1
252
451
  """)
253
-
254
- elif duration <= 604800: # 7天以内,按小时聚合
255
- query = text(f"""
452
+ params = {'task_id': task_id}
453
+
454
+ result = await session.execute(query, params)
455
+ row = result.fetchone()
456
+
457
+ if row:
458
+ return {
459
+ 'id': row.id,
460
+ 'task_data': row.task_data,
461
+ 'consumer_group': row.consumer_group if hasattr(row, 'consumer_group') else None,
462
+ 'result': row.result,
463
+ 'error_message': row.error_message
464
+ }
465
+ return None
466
+
467
+ except Exception as e:
468
+ logger.error(f"获取任务详细数据失败: {e}")
469
+ return None
470
+
471
+ async def fetch_queue_flow_rates(self,
472
+ queue_name: str,
473
+ start_time: datetime,
474
+ end_time: datetime,
475
+ filters: List[Dict] = None) -> Tuple[List[Dict], str]:
476
+ """获取队列的三种流量速率:入队、开始执行、完成
477
+
478
+ Args:
479
+ queue_name: 队列名称
480
+ start_time: 开始时间
481
+ end_time: 结束时间
482
+ filters: 筛选条件列表,与fetch_tasks_with_filters的格式相同
483
+ """
484
+ try:
485
+ if not self.AsyncSessionLocal:
486
+ await self.initialize()
487
+ print(f'{filters=}')
488
+ async with self.AsyncSessionLocal() as session:
489
+ # 动态计算时间间隔,目标是生成约200个时间点
490
+ TARGET_POINTS = 200
491
+ duration = (end_time - start_time).total_seconds()
492
+
493
+ # 计算理想的间隔秒数
494
+ ideal_interval_seconds = duration / TARGET_POINTS
495
+ print(f'{duration=} {TARGET_POINTS=} {ideal_interval_seconds=}')
496
+ # 将间隔秒数规范化到合理的值(与fetch_queue_timeline_data保持一致)
497
+ if ideal_interval_seconds <= 1:
498
+ interval_seconds = 1
499
+ interval = '1 seconds'
500
+ granularity = 'second'
501
+ elif ideal_interval_seconds <= 5:
502
+ interval_seconds = 5
503
+ interval = '5 seconds'
504
+ granularity = 'second'
505
+ elif ideal_interval_seconds <= 10:
506
+ interval_seconds = 10
507
+ interval = '10 seconds'
508
+ granularity = 'second'
509
+ elif ideal_interval_seconds <= 30:
510
+ interval_seconds = 30
511
+ interval = '30 seconds'
512
+ granularity = 'second'
513
+ elif ideal_interval_seconds <= 60:
514
+ interval_seconds = 60
515
+ interval = '1 minute'
516
+ granularity = 'minute'
517
+ elif ideal_interval_seconds <= 120:
518
+ interval_seconds = 120
519
+ interval = '2 minutes'
520
+ granularity = 'minute'
521
+ elif ideal_interval_seconds <= 300:
522
+ interval_seconds = 300
523
+ interval = '5 minutes'
524
+ granularity = 'minute'
525
+ elif ideal_interval_seconds <= 600:
526
+ interval_seconds = 600
527
+ interval = '10 minutes'
528
+ granularity = 'minute'
529
+ elif ideal_interval_seconds <= 900:
530
+ interval_seconds = 900
531
+ interval = '15 minutes'
532
+ granularity = 'minute'
533
+ elif ideal_interval_seconds <= 1800:
534
+ interval_seconds = 1800
535
+ interval = '30 minutes'
536
+ granularity = 'minute'
537
+ elif ideal_interval_seconds <= 3600:
538
+ interval_seconds = 3600
539
+ interval = '1 hour'
540
+ granularity = 'hour'
541
+ elif ideal_interval_seconds <= 7200:
542
+ interval_seconds = 7200
543
+ interval = '2 hours'
544
+ granularity = 'hour'
545
+ elif ideal_interval_seconds <= 14400:
546
+ interval_seconds = 14400
547
+ interval = '4 hours'
548
+ granularity = 'hour'
549
+ elif ideal_interval_seconds <= 21600:
550
+ interval_seconds = 21600
551
+ interval = '6 hours'
552
+ granularity = 'hour'
553
+ elif ideal_interval_seconds <= 43200:
554
+ interval_seconds = 43200
555
+ interval = '12 hours'
556
+ granularity = 'hour'
557
+ else:
558
+ interval_seconds = 86400
559
+ interval = '1 day'
560
+ granularity = 'day'
561
+
562
+ # 重新计算实际点数
563
+ actual_points = int(duration / interval_seconds) + 1
564
+ logger.info(f"使用时间间隔: {interval_seconds}秒 ({interval}), 预计生成 {actual_points} 个时间点")
565
+
566
+ # 根据粒度确定 date_trunc 的单位
567
+ if granularity == 'second':
568
+ trunc_unit = 'second'
569
+ elif granularity == 'minute':
570
+ trunc_unit = 'minute'
571
+ elif granularity == 'hour':
572
+ trunc_unit = 'hour'
573
+ else: # day
574
+ trunc_unit = 'day'
575
+
576
+ # 构建筛选条件的WHERE子句
577
+ # 分别为tasks表和task_runs表构建条件
578
+ filter_conditions_enqueue = [] # 用于enqueued_rate(只有tasks表)
579
+ filter_conditions_complete = [] # 用于completed_rate和failed_count(有join)
580
+ filter_params = {}
581
+ has_status_filter = False
582
+ status_filter_value = None
583
+
584
+ if filters:
585
+ for idx, filter_item in enumerate(filters):
586
+ # 跳过被禁用的筛选条件
587
+ if filter_item.get('enabled') == False:
588
+ continue
589
+
590
+ field = filter_item.get('field')
591
+ operator = filter_item.get('operator')
592
+ value = filter_item.get('value')
593
+
594
+ if not field or not operator:
595
+ continue
596
+
597
+ # 检查是否有status筛选
598
+ if field == 'status' and operator == 'eq':
599
+ has_status_filter = True
600
+ status_filter_value = value
601
+
602
+ # 判断字段属于哪个表
603
+ # task_runs表独有的字段
604
+ task_runs_only_fields = ['task_name', 'consumer_group', 'worker_id', 'duration_ms',
605
+ 'retry_count', 'error_message', 'result', 'start_time',
606
+ 'end_time', 'consumer_name']
607
+ # tasks表和task_runs表都有的字段
608
+ both_tables_fields = ['status']
609
+ # tasks表独有的字段
610
+ tasks_only_fields = ['stream_id', 'queue', 'namespace', 'scheduled_task_id',
611
+ 'payload', 'priority', 'created_at', 'source', 'metadata']
612
+
613
+ param_name = f'filter_{idx}_value'
614
+
615
+ if field in task_runs_only_fields:
616
+ # 只在task_runs表中的字段,只能用于completed_rate和failed_count查询
617
+ # enqueued_rate查询不支持这些字段
618
+
619
+ # 特殊处理空值判断
620
+ if operator in ['is_null', 'is_not_null']:
621
+ if operator == 'is_null':
622
+ filter_conditions_complete.append(f"tr.{field} IS NULL")
623
+ else:
624
+ filter_conditions_complete.append(f"tr.{field} IS NOT NULL")
625
+ else:
626
+ op_map = {
627
+ 'eq': '=',
628
+ 'ne': '!=',
629
+ 'contains': 'LIKE',
630
+ 'starts_with': 'LIKE',
631
+ 'ends_with': 'LIKE'
632
+ }
633
+ sql_op = op_map.get(operator, '=')
634
+
635
+ if operator == 'contains':
636
+ filter_params[param_name] = f'%{value}%'
637
+ elif operator == 'starts_with':
638
+ filter_params[param_name] = f'{value}%'
639
+ elif operator == 'ends_with':
640
+ filter_params[param_name] = f'%{value}'
641
+ else:
642
+ filter_params[param_name] = value
643
+
644
+ filter_conditions_complete.append(f"tr.{field} {sql_op} :{param_name}")
645
+
646
+ elif field == 'status':
647
+ # status字段在两个表中都存在,需要特殊处理
648
+ # tasks表中没有status字段,task_runs表中有
649
+ # 对于enqueued_rate,不应用status筛选
650
+ # 对于completed_rate和failed_count,应用到tr.status
651
+
652
+ if operator in ['is_null', 'is_not_null']:
653
+ if operator == 'is_null':
654
+ filter_conditions_complete.append(f"tr.{field} IS NULL")
655
+ else:
656
+ filter_conditions_complete.append(f"tr.{field} IS NOT NULL")
657
+ else:
658
+ op_map = {
659
+ 'eq': '=',
660
+ 'ne': '!=',
661
+ 'contains': 'LIKE',
662
+ 'starts_with': 'LIKE',
663
+ 'ends_with': 'LIKE'
664
+ }
665
+ sql_op = op_map.get(operator, '=')
666
+
667
+ if operator == 'contains':
668
+ filter_params[param_name] = f'%{value}%'
669
+ elif operator == 'starts_with':
670
+ filter_params[param_name] = f'{value}%'
671
+ elif operator == 'ends_with':
672
+ filter_params[param_name] = f'%{value}'
673
+ else:
674
+ filter_params[param_name] = value
675
+
676
+ filter_conditions_complete.append(f"tr.{field} {sql_op} :{param_name}")
677
+
678
+ elif field == 'id':
679
+ # id字段特殊处理,对应tasks表的stream_id
680
+ if operator in ['is_null', 'is_not_null']:
681
+ if operator == 'is_null':
682
+ filter_conditions_enqueue.append(f"stream_id IS NULL")
683
+ filter_conditions_complete.append(f"t.stream_id IS NULL")
684
+ else:
685
+ filter_conditions_enqueue.append(f"stream_id IS NOT NULL")
686
+ filter_conditions_complete.append(f"t.stream_id IS NOT NULL")
687
+ else:
688
+ op_map = {
689
+ 'eq': '=',
690
+ 'ne': '!=',
691
+ 'contains': 'LIKE',
692
+ 'starts_with': 'LIKE',
693
+ 'ends_with': 'LIKE'
694
+ }
695
+ sql_op = op_map.get(operator, '=')
696
+
697
+ if operator == 'contains':
698
+ filter_params[param_name] = f'%{value}%'
699
+ elif operator == 'starts_with':
700
+ filter_params[param_name] = f'{value}%'
701
+ elif operator == 'ends_with':
702
+ filter_params[param_name] = f'%{value}'
703
+ else:
704
+ filter_params[param_name] = value
705
+
706
+ filter_conditions_enqueue.append(f"stream_id {sql_op} :{param_name}")
707
+ filter_conditions_complete.append(f"t.stream_id {sql_op} :{param_name}")
708
+
709
+ elif field == 'scheduled_task_id':
710
+ # scheduled_task_id字段特殊处理,数据库中是TEXT类型,需要转换
711
+ if operator in ['is_null', 'is_not_null']:
712
+ if operator == 'is_null':
713
+ filter_conditions_enqueue.append(f"scheduled_task_id IS NULL")
714
+ filter_conditions_complete.append(f"t.scheduled_task_id IS NULL")
715
+ else:
716
+ filter_conditions_enqueue.append(f"scheduled_task_id IS NOT NULL")
717
+ filter_conditions_complete.append(f"t.scheduled_task_id IS NOT NULL")
718
+ else:
719
+ op_map = {
720
+ 'eq': '=',
721
+ 'ne': '!=',
722
+ 'contains': 'LIKE',
723
+ 'starts_with': 'LIKE',
724
+ 'ends_with': 'LIKE'
725
+ }
726
+ sql_op = op_map.get(operator, '=')
727
+
728
+ # 将值转换为字符串
729
+ if operator == 'contains':
730
+ filter_params[param_name] = f'%{str(value)}%'
731
+ elif operator == 'starts_with':
732
+ filter_params[param_name] = f'{str(value)}%'
733
+ elif operator == 'ends_with':
734
+ filter_params[param_name] = f'%{str(value)}'
735
+ else:
736
+ filter_params[param_name] = str(value)
737
+
738
+ filter_conditions_enqueue.append(f"scheduled_task_id {sql_op} :{param_name}")
739
+ filter_conditions_complete.append(f"t.scheduled_task_id {sql_op} :{param_name}")
740
+
741
+ else:
742
+ # 其他字段默认属于tasks表
743
+ # 特殊处理空值判断
744
+ if operator in ['is_null', 'is_not_null']:
745
+ if operator == 'is_null':
746
+ filter_conditions_enqueue.append(f"{field} IS NULL")
747
+ filter_conditions_complete.append(f"t.{field} IS NULL")
748
+ else:
749
+ filter_conditions_enqueue.append(f"{field} IS NOT NULL")
750
+ filter_conditions_complete.append(f"t.{field} IS NOT NULL")
751
+ else:
752
+ # 处理其他操作符
753
+ op_map = {
754
+ 'eq': '=',
755
+ 'ne': '!=',
756
+ 'contains': 'LIKE',
757
+ 'starts_with': 'LIKE',
758
+ 'ends_with': 'LIKE'
759
+ }
760
+ sql_op = op_map.get(operator, '=')
761
+
762
+ if operator == 'contains':
763
+ filter_params[param_name] = f'%{value}%'
764
+ elif operator == 'starts_with':
765
+ filter_params[param_name] = f'{value}%'
766
+ elif operator == 'ends_with':
767
+ filter_params[param_name] = f'%{value}'
768
+ else:
769
+ filter_params[param_name] = value
770
+
771
+ filter_conditions_enqueue.append(f"{field} {sql_op} :{param_name}")
772
+ filter_conditions_complete.append(f"t.{field} {sql_op} :{param_name}")
773
+
774
+ # 构建额外的WHERE条件
775
+ extra_where_enqueue = ""
776
+ extra_where_complete = ""
777
+ if filter_conditions_enqueue:
778
+ extra_where_enqueue = " AND " + " AND ".join(filter_conditions_enqueue)
779
+ if filter_conditions_complete:
780
+ extra_where_complete = " AND " + " AND ".join(filter_conditions_complete)
781
+
782
+ # SQL查询:获取入队速率、完成速率和失败数
783
+ # 重要:时间桶对齐到固定边界(如整5秒、整分钟),确保聚合区间稳定
784
+ query = text(f"""
785
+ WITH time_series AS (
786
+ -- 生成对齐到固定边界的时间序列
787
+ -- 结束时间需要加一个间隔,确保包含所有在end_time之前的数据
788
+ SELECT generate_series(
789
+ to_timestamp(FLOOR(EXTRACT(epoch FROM CAST(:start_time AS timestamptz)) / {interval_seconds}) * {interval_seconds}),
790
+ to_timestamp(CEILING(EXTRACT(epoch FROM CAST(:end_time AS timestamptz)) / {interval_seconds}) * {interval_seconds} + {interval_seconds}),
791
+ CAST(:interval AS interval)
792
+ ) AS time_bucket
793
+ ),
794
+ enqueued_rate AS (
256
795
  SELECT
257
- date_trunc('hour', created_at) as time_point,
258
- queue_name,
259
- COUNT(*) as task_count
260
- FROM tasks
261
- WHERE queue_name IN ('{queue_names_str}')
262
- AND created_at >= :start_time
796
+ -- 对齐到固定的时间边界
797
+ to_timestamp(
798
+ FLOOR(EXTRACT(epoch FROM created_at) / {interval_seconds}) * {interval_seconds}
799
+ ) AS time_bucket,
800
+ COUNT(*) AS count
801
+ FROM tasks
802
+ WHERE (queue = :queue_name OR queue LIKE :queue_pattern)
803
+ AND created_at >= :start_time
263
804
  AND created_at <= :end_time
264
- GROUP BY time_point, queue_name
265
- ORDER BY time_point, queue_name
266
- """)
805
+ {extra_where_enqueue}
806
+ GROUP BY 1
807
+ ),
808
+ completed_rate AS (
809
+ SELECT
810
+ -- 对齐到固定的时间边界
811
+ to_timestamp(
812
+ FLOOR(EXTRACT(epoch FROM tr.end_time) / {interval_seconds}) * {interval_seconds}
813
+ ) AS time_bucket,
814
+ COUNT(*) AS count
815
+ FROM tasks t
816
+ JOIN task_runs tr ON t.stream_id = tr.stream_id
817
+ WHERE (t.queue = :queue_name OR t.queue LIKE :queue_pattern)
818
+ AND tr.end_time >= :start_time
819
+ AND tr.end_time <= :end_time
820
+ AND tr.status = 'success'
821
+ {extra_where_complete}
822
+ GROUP BY 1
823
+ ),
824
+ failed_count AS (
825
+ SELECT
826
+ -- 对齐到固定的时间边界
827
+ to_timestamp(
828
+ FLOOR(EXTRACT(epoch FROM tr.end_time) / {interval_seconds}) * {interval_seconds}
829
+ ) AS time_bucket,
830
+ COUNT(*) AS count
831
+ FROM tasks t
832
+ JOIN task_runs tr ON t.stream_id = tr.stream_id
833
+ WHERE (t.queue = :queue_name OR t.queue LIKE :queue_pattern)
834
+ AND tr.end_time >= :start_time
835
+ AND tr.end_time <= :end_time
836
+ AND tr.status IN ('failed', 'error')
837
+ {extra_where_complete}
838
+ GROUP BY 1
839
+ )
840
+ SELECT
841
+ ts.time_bucket,
842
+ COALESCE(e.count, 0) AS enqueued,
843
+ COALESCE(c.count, 0) AS completed,
844
+ COALESCE(f.count, 0) AS failed
845
+ FROM time_series ts
846
+ LEFT JOIN enqueued_rate e ON ts.time_bucket = e.time_bucket
847
+ LEFT JOIN completed_rate c ON ts.time_bucket = c.time_bucket
848
+ LEFT JOIN failed_count f ON ts.time_bucket = f.time_bucket
849
+ ORDER BY ts.time_bucket
850
+ """)
851
+
852
+ # 合并参数
853
+ params = {
854
+ 'queue_name': queue_name,
855
+ 'queue_pattern': f'{queue_name}:%', # 匹配所有优先级队列
856
+ 'start_time': start_time,
857
+ 'end_time': end_time,
858
+ 'interval': interval
859
+ }
860
+ params.update(filter_params)
861
+
862
+ logger.info(f"执行查询 - 队列: {queue_name}, 时间范围: {start_time} 到 {end_time}, 间隔: {interval}, 筛选条件: {len(filter_conditions_enqueue) + len(filter_conditions_complete)} 个")
863
+
864
+ result = await session.execute(query, params)
865
+
866
+ rows = result.fetchall()
867
+ logger.info(f"查询返回 {len(rows)} 行数据")
868
+
869
+ # 转换为前端需要的格式
870
+ data = []
871
+ total_enqueued = 0
872
+ total_completed = 0
873
+ total_failed = 0
874
+ end_index = len(rows) - 1
875
+
876
+ # 根据status筛选决定显示什么指标
877
+ if has_status_filter:
878
+ # 有status筛选时,需要特殊处理
879
+ for idx, row in enumerate(rows):
880
+ time_point = row.time_bucket.isoformat()
881
+
882
+ # 累计统计
883
+ total_enqueued += row.enqueued
884
+
885
+ # 添加入队速率数据点(蓝色)
886
+ data.append({
887
+ 'time': time_point,
888
+ 'value': row.enqueued or None if idx > 0 and end_index != idx else row.enqueued,
889
+ 'metric': '入队速率'
890
+ })
891
+
892
+ # 根据筛选的状态决定是否显示完成速率和失败数
893
+ if status_filter_value == 'success':
894
+ # 筛选成功任务时,显示完成速率,不显示失败数
895
+ total_completed += row.completed
896
+ data.append({
897
+ 'time': time_point,
898
+ 'value': row.completed or None if idx > 0 and end_index != idx else row.completed,
899
+ 'metric': '完成速率'
900
+ })
901
+ data.append({
902
+ 'time': time_point,
903
+ 'value': None,
904
+ 'metric': '失败数'
905
+ })
906
+ elif status_filter_value == 'error':
907
+ # 筛选失败任务时,不显示完成速率,显示失败数
908
+ total_failed += row.failed
909
+ data.append({
910
+ 'time': time_point,
911
+ 'value': None,
912
+ 'metric': '完成速率'
913
+ })
914
+ data.append({
915
+ 'time': time_point,
916
+ 'value': row.failed or None if idx > 0 and end_index != idx else row.failed,
917
+ 'metric': '失败数'
918
+ })
919
+ else:
920
+ # 其他状态(running, pending, rejected等),不显示完成速率和失败数
921
+ data.append({
922
+ 'time': time_point,
923
+ 'value': None,
924
+ 'metric': '完成速率'
925
+ })
926
+ data.append({
927
+ 'time': time_point,
928
+ 'value': None,
929
+ 'metric': '失败数'
930
+ })
931
+ else:
932
+ # 默认或其他状态筛选:显示标准指标
933
+ for idx, row in enumerate(rows):
934
+ time_point = row.time_bucket.isoformat()
935
+
936
+ # 累计统计
937
+ total_enqueued += row.enqueued
938
+ total_completed += row.completed
939
+ total_failed += row.failed
940
+
941
+ # 添加入队速率数据点(蓝色)
942
+ data.append({
943
+ 'time': time_point,
944
+ 'value': row.enqueued or None if idx > 0 and end_index != idx else row.enqueued,
945
+ 'metric': '入队速率'
946
+ })
947
+ # 添加完成速率数据点(绿色)
948
+ data.append({
949
+ 'time': time_point,
950
+ 'value': row.completed or None if idx > 0 and end_index != idx else row.completed,
951
+ 'metric': '完成速率'
952
+ })
953
+ # 添加失败数数据点(红色)
954
+ data.append({
955
+ 'time': time_point,
956
+ 'value': row.failed or None if idx > 0 and end_index != idx else row.failed,
957
+ 'metric': '失败数'
958
+ })
267
959
 
268
- else: # 超过7天,按天聚合
269
- query = text(f"""
960
+ # 调试日志:每10个点输出一次
961
+ if idx % 10 == 0 or idx == len(rows) - 1:
962
+ logger.debug(f"Row {idx}: time={time_point}, enqueued={row.enqueued}, completed={row.completed}, failed={row.failed}")
963
+
964
+ logger.info(f"数据汇总 - 总入队: {total_enqueued}, 总完成: {total_completed}, 总失败: {total_failed}")
965
+
966
+ return data, granularity
967
+
968
+ except Exception as e:
969
+ logger.error(f"获取队列流量速率失败: {e}")
970
+ import traceback
971
+ traceback.print_exc()
972
+ raise
973
+
974
+ async def fetch_queue_timeline_data(self,
975
+ queues: List[str],
976
+ start_time: datetime,
977
+ end_time: datetime,
978
+ filters: List[Dict] = None) -> List[Dict]:
979
+ """获取队列时间线数据 - 优化版本,使用generate_series生成完整时间序列
980
+
981
+ Args:
982
+ queues: 队列名称列表
983
+ start_time: 开始时间
984
+ end_time: 结束时间
985
+ filters: 筛选条件列表,与fetch_tasks_with_filters的格式相同
986
+ """
987
+ try:
988
+ if not self.AsyncSessionLocal:
989
+ await self.initialize()
990
+
991
+ async with self.AsyncSessionLocal() as session:
992
+ # 构建队列名称列表字符串
993
+ queue_names_str = "', '".join(queues)
994
+
995
+ # 动态计算时间间隔,目标是生成约200个时间点
996
+ TARGET_POINTS = 200
997
+ duration = (end_time - start_time).total_seconds()
998
+
999
+ # 计算理想的间隔秒数
1000
+ ideal_interval_seconds = duration / TARGET_POINTS
1001
+
1002
+ # 将间隔秒数规范化到合理的值
1003
+ if ideal_interval_seconds <= 1:
1004
+ interval_seconds = 1
1005
+ interval = '1 seconds'
1006
+ trunc_unit = 'second'
1007
+ elif ideal_interval_seconds <= 5:
1008
+ interval_seconds = 5
1009
+ interval = '5 seconds'
1010
+ trunc_unit = 'second'
1011
+ elif ideal_interval_seconds <= 10:
1012
+ interval_seconds = 10
1013
+ interval = '10 seconds'
1014
+ trunc_unit = 'second'
1015
+ elif ideal_interval_seconds <= 30:
1016
+ interval_seconds = 30
1017
+ interval = '30 seconds'
1018
+ trunc_unit = 'second'
1019
+ elif ideal_interval_seconds <= 60:
1020
+ interval_seconds = 60
1021
+ interval = '1 minute'
1022
+ trunc_unit = 'minute'
1023
+ elif ideal_interval_seconds <= 120:
1024
+ interval_seconds = 120
1025
+ interval = '2 minutes'
1026
+ trunc_unit = 'minute'
1027
+ elif ideal_interval_seconds <= 300:
1028
+ interval_seconds = 300
1029
+ interval = '5 minutes'
1030
+ trunc_unit = 'minute'
1031
+ elif ideal_interval_seconds <= 600:
1032
+ interval_seconds = 600
1033
+ interval = '10 minutes'
1034
+ trunc_unit = 'minute'
1035
+ elif ideal_interval_seconds <= 900:
1036
+ interval_seconds = 900
1037
+ interval = '15 minutes'
1038
+ trunc_unit = 'minute'
1039
+ elif ideal_interval_seconds <= 1800:
1040
+ interval_seconds = 1800
1041
+ interval = '30 minutes'
1042
+ trunc_unit = 'minute'
1043
+ elif ideal_interval_seconds <= 3600:
1044
+ interval_seconds = 3600
1045
+ interval = '1 hour'
1046
+ trunc_unit = 'hour'
1047
+ elif ideal_interval_seconds <= 7200:
1048
+ interval_seconds = 7200
1049
+ interval = '2 hours'
1050
+ trunc_unit = 'hour'
1051
+ elif ideal_interval_seconds <= 14400:
1052
+ interval_seconds = 14400
1053
+ interval = '4 hours'
1054
+ trunc_unit = 'hour'
1055
+ elif ideal_interval_seconds <= 21600:
1056
+ interval_seconds = 21600
1057
+ interval = '6 hours'
1058
+ trunc_unit = 'hour'
1059
+ elif ideal_interval_seconds <= 43200:
1060
+ interval_seconds = 43200
1061
+ interval = '12 hours'
1062
+ trunc_unit = 'hour'
1063
+ else:
1064
+ interval_seconds = 86400
1065
+ interval = '1 day'
1066
+ trunc_unit = 'day'
1067
+
1068
+ # 重新计算实际点数
1069
+ actual_points = int(duration / interval_seconds) + 1
1070
+ logger.info(f"使用时间间隔: {interval_seconds}秒 ({interval}), 预计生成 {actual_points} 个时间点")
1071
+
1072
+ # 构建筛选条件的WHERE子句
1073
+ filter_conditions = []
1074
+ filter_params = {}
1075
+
1076
+ if filters:
1077
+ for idx, filter_item in enumerate(filters):
1078
+ # 跳过被禁用的筛选条件
1079
+ if filter_item.get('enabled') == False:
1080
+ continue
1081
+
1082
+ field = filter_item.get('field')
1083
+ operator = filter_item.get('operator')
1084
+ value = filter_item.get('value')
1085
+
1086
+ if not field or not operator:
1087
+ continue
1088
+
1089
+ # 特殊处理空值判断
1090
+ if operator in ['is_null', 'is_not_null']:
1091
+ if operator == 'is_null':
1092
+ filter_conditions.append(f"{field} IS NULL")
1093
+ else:
1094
+ filter_conditions.append(f"{field} IS NOT NULL")
1095
+ # 处理IN和NOT IN操作符
1096
+ elif operator in ['in', 'not_in']:
1097
+ param_name = f'filter_{idx}_value'
1098
+ if isinstance(value, list):
1099
+ values_str = "', '".join(str(v) for v in value)
1100
+ if operator == 'in':
1101
+ filter_conditions.append(f"{field} IN ('{values_str}')")
1102
+ else:
1103
+ filter_conditions.append(f"{field} NOT IN ('{values_str}')")
1104
+ else:
1105
+ if operator == 'in':
1106
+ filter_conditions.append(f"{field} = :{param_name}")
1107
+ else:
1108
+ filter_conditions.append(f"{field} != :{param_name}")
1109
+ filter_params[param_name] = value
1110
+ # 处理包含操作符
1111
+ elif operator == 'contains':
1112
+ param_name = f'filter_{idx}_value'
1113
+ # 特殊处理JSON字段
1114
+ if field in ['task_data', 'result']:
1115
+ filter_conditions.append(f"{field}::text LIKE :{param_name}")
1116
+ else:
1117
+ filter_conditions.append(f"{field} LIKE :{param_name}")
1118
+ filter_params[param_name] = f'%{value}%'
1119
+ # 处理JSON相关操作符
1120
+ elif operator == 'json_key_exists':
1121
+ # 检查JSON中是否存在指定的键
1122
+ if field in ['task_data', 'result']:
1123
+ param_name = f'filter_{idx}_value'
1124
+ filter_conditions.append(f"{field} ? :{param_name}")
1125
+ filter_params[param_name] = value
1126
+ elif operator == 'json_path_value':
1127
+ # 使用JSON路径查询
1128
+ if field in ['task_data', 'result'] and '=' in value:
1129
+ import re
1130
+ path, val = value.split('=', 1)
1131
+ path = path.strip()
1132
+ val = val.strip()
1133
+ if path.startswith('$.'):
1134
+ path = path[2:]
1135
+ path_parts = path.split('.')
1136
+ # 验证路径安全性
1137
+ if all(re.match(r'^[a-zA-Z0-9_]+$', part) for part in path_parts):
1138
+ param_name = f'filter_{idx}_value'
1139
+ if len(path_parts) == 1:
1140
+ filter_conditions.append(f"{field}->>'{path_parts[0]}' = :{param_name}")
1141
+ else:
1142
+ path_str = '{' + ','.join(path_parts) + '}'
1143
+ filter_conditions.append(f"{field}#>>'{path_str}' = :{param_name}")
1144
+ filter_params[param_name] = val.strip('"').strip("'")
1145
+ elif operator == 'starts_with':
1146
+ param_name = f'filter_{idx}_value'
1147
+ filter_conditions.append(f"{field} LIKE :{param_name}")
1148
+ filter_params[param_name] = f'{value}%'
1149
+ elif operator == 'ends_with':
1150
+ param_name = f'filter_{idx}_value'
1151
+ filter_conditions.append(f"{field} LIKE :{param_name}")
1152
+ filter_params[param_name] = f'%{value}'
1153
+ # 处理标准比较操作符
1154
+ else:
1155
+ param_name = f'filter_{idx}_value'
1156
+ op_map = {
1157
+ 'eq': '=',
1158
+ 'ne': '!=',
1159
+ 'gt': '>',
1160
+ 'lt': '<',
1161
+ 'gte': '>=',
1162
+ 'lte': '<='
1163
+ }
1164
+ sql_op = op_map.get(operator, '=')
1165
+ filter_conditions.append(f"{field} {sql_op} :{param_name}")
1166
+ filter_params[param_name] = value
1167
+
1168
+ # 构建额外的WHERE条件
1169
+ extra_where = ""
1170
+ if filter_conditions:
1171
+ extra_where = " AND " + " AND ".join(filter_conditions)
1172
+
1173
+ # 优化的SQL查询 - 使用generate_series和CROSS JOIN生成完整的时间序列
1174
+ # 重要:时间桶对齐到固定边界,而不是基于start_time
1175
+ query = text(f"""
1176
+ WITH time_series AS (
1177
+ -- 生成对齐到固定边界的时间序列
1178
+ -- 先计算对齐后的起始和结束时间
1179
+ SELECT generate_series(
1180
+ to_timestamp(FLOOR(EXTRACT(epoch FROM CAST(:start_time AS timestamptz)) / {interval_seconds}) * {interval_seconds}),
1181
+ to_timestamp(CEILING(EXTRACT(epoch FROM CAST(:end_time AS timestamptz)) / {interval_seconds}) * {interval_seconds} + {interval_seconds}),
1182
+ CAST(:interval AS interval)
1183
+ ) AS time_bucket
1184
+ ),
1185
+ queue_list AS (
1186
+ SELECT UNNEST(ARRAY['{queue_names_str}']) AS queue_name
1187
+ ),
1188
+ queue_data AS (
270
1189
  SELECT
271
- date_trunc('day', created_at) as time_point,
272
- queue_name,
1190
+ -- 对齐到固定的时间边界
1191
+ -- 例如:5秒间隔会对齐到 00:00, 00:05, 00:10...
1192
+ to_timestamp(
1193
+ FLOOR(EXTRACT(epoch FROM created_at) / {interval_seconds}) * {interval_seconds}
1194
+ ) AS time_bucket,
1195
+ queue AS queue_name,
273
1196
  COUNT(*) as task_count
274
1197
  FROM tasks
275
- WHERE queue_name IN ('{queue_names_str}')
1198
+ WHERE queue IN ('{queue_names_str}')
276
1199
  AND created_at >= :start_time
277
1200
  AND created_at <= :end_time
278
- GROUP BY time_point, queue_name
279
- ORDER BY time_point, queue_name
280
- """)
1201
+ {extra_where}
1202
+ GROUP BY 1, 2
1203
+ )
1204
+ SELECT
1205
+ ts.time_bucket,
1206
+ ql.queue_name,
1207
+ COALESCE(qd.task_count, 0) as value
1208
+ FROM time_series ts
1209
+ CROSS JOIN queue_list ql
1210
+ LEFT JOIN queue_data qd
1211
+ ON ts.time_bucket = qd.time_bucket
1212
+ AND ql.queue_name = qd.queue_name
1213
+ ORDER BY ts.time_bucket, ql.queue_name
1214
+ """)
281
1215
 
282
- result = await session.execute(query, {
1216
+ # 合并参数
1217
+ query_params = {
283
1218
  'start_time': start_time,
284
- 'end_time': end_time
285
- })
286
-
287
- # 处理查询结果,返回真实的时间点
288
- timeline_data = []
289
- seen_points = set() # 用于去重(对于窗口函数的结果)
1219
+ 'end_time': end_time,
1220
+ 'interval': interval
1221
+ }
1222
+ query_params.update(filter_params)
290
1223
 
291
- for row in result:
292
- # 创建唯一键避免重复
293
- unique_key = f"{row.time_point.isoformat()}_{row.queue_name}"
294
- if unique_key not in seen_points:
295
- seen_points.add(unique_key)
296
- timeline_data.append({
297
- 'time': row.time_point.isoformat(),
298
- 'queue': row.queue_name,
299
- 'value': row.task_count
300
- })
1224
+ result = await session.execute(query, query_params)
301
1225
 
302
- logger.info(f"从DB查询到 {len(timeline_data)} 个真实数据点")
1226
+ # 直接转换结果为前端需要的格式
1227
+ timeline_data = []
1228
+ prev_time = None
1229
+ time_index = 0
1230
+ result_data = list(result)
1231
+ end_index = len(result_data) - 1
1232
+ for idx, row in enumerate(result_data):
1233
+ time_str = row.time_bucket.isoformat()
1234
+
1235
+ # 跟踪时间索引(用于决定是否将0值显示为None)
1236
+ if prev_time != time_str:
1237
+ time_index += 1
1238
+ prev_time = time_str
1239
+
1240
+ # 第一个时间点显示0,后续时间点如果是0则显示为None(用于图表美观)
1241
+ value = row.value
1242
+ if time_index > 1 and value == 0 and idx != end_index:
1243
+ value = None
1244
+ timeline_data.append({
1245
+ 'time': time_str,
1246
+ 'queue': row.queue_name,
1247
+ 'value': value
1248
+ })
303
1249
 
1250
+ logger.info(f"生成了 {len(timeline_data)} 个数据点")
304
1251
  return timeline_data
305
1252
 
306
1253
  except Exception as e:
@@ -339,4 +1286,1048 @@ class JetTaskDataAccess:
339
1286
 
340
1287
  except Exception as e:
341
1288
  logger.error(f"获取全局统计信息失败: {e}")
342
- return {}
1289
+ return {}
1290
+
1291
+ async def fetch_tasks_with_filters(self,
1292
+ queue_name: str,
1293
+ page: int = 1,
1294
+ page_size: int = 20,
1295
+ filters: List[Dict] = None,
1296
+ start_time: Optional[datetime] = None,
1297
+ end_time: Optional[datetime] = None) -> Dict:
1298
+ """获取带灵活筛选条件的任务列表
1299
+
1300
+ Args:
1301
+ queue_name: 队列名称
1302
+ page: 页码
1303
+ page_size: 每页大小
1304
+ filters: 筛选条件列表,每个条件包含:
1305
+ - field: 字段名 (task_id, status, worker_id, created_at, etc.)
1306
+ - operator: 操作符 (eq, ne, gt, lt, gte, lte, in, not_in, contains)
1307
+ - value: 比较值
1308
+ """
1309
+ try:
1310
+ if not self.AsyncSessionLocal:
1311
+ await self.initialize()
1312
+
1313
+ async with self.AsyncSessionLocal() as session:
1314
+ # 构建基础查询
1315
+ query_parts = []
1316
+ params = {'queue_name': queue_name}
1317
+
1318
+ # 基础条件
1319
+ query_parts.append("queue = :queue_name")
1320
+
1321
+ # 添加时间范围筛选
1322
+ if start_time:
1323
+ query_parts.append("created_at >= :start_time")
1324
+ params['start_time'] = start_time
1325
+ if end_time:
1326
+ query_parts.append("created_at <= :end_time")
1327
+ params['end_time'] = end_time
1328
+
1329
+ # 构建动态筛选条件
1330
+ if filters:
1331
+ for idx, filter_item in enumerate(filters):
1332
+ # 跳过被禁用的筛选条件
1333
+ if filter_item.get('enabled') == False:
1334
+ continue
1335
+
1336
+ field = filter_item.get('field')
1337
+ operator = filter_item.get('operator')
1338
+ value = filter_item.get('value')
1339
+
1340
+ if not field or not operator or value is None:
1341
+ continue
1342
+
1343
+ param_name = f"filter_{idx}"
1344
+
1345
+ # 根据操作符构建SQL条件
1346
+ if operator == 'eq':
1347
+ query_parts.append(f"{field} = :{param_name}")
1348
+ params[param_name] = value
1349
+ elif operator == 'ne':
1350
+ query_parts.append(f"{field} != :{param_name}")
1351
+ params[param_name] = value
1352
+ elif operator == 'gt':
1353
+ query_parts.append(f"{field} > :{param_name}")
1354
+ params[param_name] = value
1355
+ elif operator == 'lt':
1356
+ query_parts.append(f"{field} < :{param_name}")
1357
+ params[param_name] = value
1358
+ elif operator == 'gte':
1359
+ query_parts.append(f"{field} >= :{param_name}")
1360
+ params[param_name] = value
1361
+ elif operator == 'lte':
1362
+ query_parts.append(f"{field} <= :{param_name}")
1363
+ params[param_name] = value
1364
+ elif operator == 'in':
1365
+ # 处理IN操作符
1366
+ if isinstance(value, str):
1367
+ value = value.split(',')
1368
+ in_params = []
1369
+ for i, v in enumerate(value):
1370
+ in_param_name = f"{param_name}_{i}"
1371
+ in_params.append(f":{in_param_name}")
1372
+ params[in_param_name] = v.strip() if isinstance(v, str) else v
1373
+ query_parts.append(f"{field} IN ({','.join(in_params)})")
1374
+ elif operator == 'not_in':
1375
+ # 处理NOT IN操作符
1376
+ if isinstance(value, str):
1377
+ value = value.split(',')
1378
+ not_in_params = []
1379
+ for i, v in enumerate(value):
1380
+ not_in_param_name = f"{param_name}_{i}"
1381
+ not_in_params.append(f":{not_in_param_name}")
1382
+ params[not_in_param_name] = v.strip() if isinstance(v, str) else v
1383
+ query_parts.append(f"{field} NOT IN ({','.join(not_in_params)})")
1384
+ elif operator == 'contains':
1385
+ # 特殊处理JSON字段的搜索
1386
+ if field in ['task_data', 'result']:
1387
+ # 对JSON字段使用JSONB的文本搜索
1388
+ query_parts.append(f"{field}::text LIKE :{param_name}")
1389
+ params[param_name] = f"%{value}%"
1390
+ else:
1391
+ query_parts.append(f"{field} LIKE :{param_name}")
1392
+ params[param_name] = f"%{value}%"
1393
+ elif operator == 'json_key_exists':
1394
+ # 检查JSON中是否存在指定的键
1395
+ if field in ['task_data', 'result']:
1396
+ query_parts.append(f"{field} ? :{param_name}")
1397
+ params[param_name] = value
1398
+ elif operator == 'json_key_value':
1399
+ # 检查JSON中指定键的值
1400
+ if field in ['task_data', 'result'] and '=' in value:
1401
+ key, val = value.split('=', 1)
1402
+ key = key.strip()
1403
+ val = val.strip()
1404
+ # 注意:PostgreSQL的 ->> 操作符的键名不能使用参数绑定,必须直接嵌入SQL
1405
+ # 为了安全,对键名进行验证
1406
+ import re
1407
+ if not re.match(r'^[a-zA-Z0-9_]+$', key):
1408
+ continue # 跳过无效的键名
1409
+
1410
+ # 尝试解析值的类型
1411
+ if val.lower() in ['true', 'false']:
1412
+ # 布尔值
1413
+ query_parts.append(f"({field}->'{key}')::boolean = :{param_name}_val")
1414
+ params[f'{param_name}_val'] = val.lower() == 'true'
1415
+ elif val.isdigit() or (val.startswith('-') and val[1:].isdigit()):
1416
+ # 整数
1417
+ query_parts.append(f"({field}->'{key}')::text = :{param_name}_val")
1418
+ params[f'{param_name}_val'] = val
1419
+ else:
1420
+ # 字符串 - 使用 ->> 操作符获取文本值
1421
+ query_parts.append(f"{field}->>'{key}' = :{param_name}_val")
1422
+ params[f'{param_name}_val'] = val.strip('"').strip("'")
1423
+ elif operator == 'json_path_value':
1424
+ # 使用JSON路径查询
1425
+ if field in ['task_data', 'result'] and '=' in value:
1426
+ path, val = value.split('=', 1)
1427
+ path = path.strip()
1428
+ val = val.strip()
1429
+
1430
+ # 处理路径格式
1431
+ if path.startswith('$.'):
1432
+ path = path[2:] # 移除 $.
1433
+ path_parts = path.split('.')
1434
+
1435
+ # 验证路径部分的安全性
1436
+ import re
1437
+ if not all(re.match(r'^[a-zA-Z0-9_]+$', part) for part in path_parts):
1438
+ continue # 跳过无效的路径
1439
+
1440
+ # 构建JSONB路径查询
1441
+ if len(path_parts) == 1:
1442
+ # 单层路径,同json_key_value处理
1443
+ query_parts.append(f"{field}->>'{path_parts[0]}' = :{param_name}_val")
1444
+ else:
1445
+ # 多层路径,使用 #>> 操作符
1446
+ path_str = '{' + ','.join(path_parts) + '}'
1447
+ query_parts.append(f"{field}#>>'{path_str}' = :{param_name}_val")
1448
+
1449
+ # 处理值
1450
+ params[f'{param_name}_val'] = val.strip('"').strip("'")
1451
+ elif operator == 'starts_with':
1452
+ query_parts.append(f"{field} LIKE :{param_name}")
1453
+ params[param_name] = f"{value}%"
1454
+ elif operator == 'ends_with':
1455
+ query_parts.append(f"{field} LIKE :{param_name}")
1456
+ params[param_name] = f"%{value}"
1457
+ elif operator == 'is_null':
1458
+ query_parts.append(f"{field} IS NULL")
1459
+ elif operator == 'is_not_null':
1460
+ query_parts.append(f"{field} IS NOT NULL")
1461
+
1462
+ # 构建WHERE子句
1463
+ where_clause = " AND ".join(query_parts)
1464
+
1465
+ # 计算总数
1466
+ count_query = text(f"""
1467
+ SELECT COUNT(*) as total
1468
+ FROM tasks
1469
+ WHERE {where_clause}
1470
+ """)
1471
+
1472
+ count_result = await session.execute(count_query, params)
1473
+ total = count_result.scalar() or 0
1474
+
1475
+ # 获取分页数据(默认不包含task_data、result和error_message以提高性能)
1476
+ offset = (page - 1) * page_size
1477
+ data_query = text(f"""
1478
+ SELECT
1479
+ id,
1480
+ queue AS queue_name,
1481
+ task_name,
1482
+ status,
1483
+ worker_id,
1484
+ created_at,
1485
+ started_at,
1486
+ completed_at,
1487
+ retry_count,
1488
+ priority,
1489
+ max_retry,
1490
+ metadata,
1491
+ duration,
1492
+ EXTRACT(epoch FROM (
1493
+ CASE
1494
+ WHEN completed_at IS NOT NULL THEN completed_at - started_at
1495
+ WHEN started_at IS NOT NULL THEN NOW() - started_at
1496
+ ELSE NULL
1497
+ END
1498
+ )) as execution_time
1499
+ FROM tasks
1500
+ WHERE {where_clause}
1501
+ ORDER BY created_at DESC
1502
+ LIMIT :limit OFFSET :offset
1503
+ """)
1504
+
1505
+ params['limit'] = page_size
1506
+ params['offset'] = offset
1507
+
1508
+ result = await session.execute(data_query, params)
1509
+ rows = result.fetchall()
1510
+
1511
+ # 转换为字典格式
1512
+ tasks = []
1513
+ for row in rows:
1514
+ tasks.append({
1515
+ 'id': row.id,
1516
+ 'queue_name': row.queue_name,
1517
+ 'task_name': row.task_name,
1518
+ 'status': row.status,
1519
+ 'worker_id': row.worker_id,
1520
+ 'created_at': row.created_at.isoformat() if row.created_at else None,
1521
+ 'started_at': row.started_at.isoformat() if row.started_at else None,
1522
+ 'completed_at': row.completed_at.isoformat() if row.completed_at else None,
1523
+ 'execution_time': round(row.execution_time, 5) if row.execution_time else None,
1524
+ 'duration': round(row.duration, 5) if row.duration else None,
1525
+ 'retry_count': row.retry_count,
1526
+ 'priority': row.priority,
1527
+ 'max_retry': row.max_retry
1528
+ })
1529
+
1530
+ return {
1531
+ 'success': True,
1532
+ 'data': tasks,
1533
+ 'total': total,
1534
+ 'page': page,
1535
+ 'page_size': page_size
1536
+ }
1537
+
1538
+ except Exception as e:
1539
+ logger.error(f"获取任务列表失败: {e}")
1540
+ import traceback
1541
+ traceback.print_exc()
1542
+ return {
1543
+ 'success': False,
1544
+ 'data': [],
1545
+ 'total': 0,
1546
+ 'page': page,
1547
+ 'page_size': page_size,
1548
+ 'error': str(e)
1549
+ }
1550
+
1551
+ # ============= 定时任务相关方法 =============
1552
+
1553
+ async def get_scheduled_tasks_statistics(self, session, namespace):
1554
+ """获取定时任务统计数据"""
1555
+ try:
1556
+ from datetime import datetime, timezone, timedelta
1557
+
1558
+ # 获取今天的开始时间(UTC)
1559
+ today_start = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
1560
+
1561
+ # 查询统计数据
1562
+ # 今日执行次数:统计今天所有定时任务触发生成的tasks记录数
1563
+ # 成功率:统计今天成功完成的任务占总执行任务的百分比
1564
+ query = text("""
1565
+ WITH stats AS (
1566
+ SELECT
1567
+ COUNT(*) as total,
1568
+ COUNT(CASE WHEN enabled = true THEN 1 END) as active
1569
+ FROM scheduled_tasks
1570
+ WHERE namespace = :namespace
1571
+ ),
1572
+ today_tasks AS (
1573
+ SELECT
1574
+ COUNT(DISTINCT t.stream_id) as today_count,
1575
+ COUNT(DISTINCT CASE WHEN tr.status = 'success' THEN t.stream_id END) as success_count
1576
+ FROM tasks t
1577
+ LEFT JOIN task_runs tr ON t.stream_id = tr.stream_id
1578
+ WHERE t.created_at >= :today_start
1579
+ AND t.scheduled_task_id IS NOT NULL
1580
+ AND t.namespace = :namespace
1581
+ )
1582
+ SELECT
1583
+ stats.total,
1584
+ stats.active,
1585
+ COALESCE(today_tasks.today_count, 0) as today_executions,
1586
+ CASE
1587
+ WHEN today_tasks.today_count > 0
1588
+ THEN ROUND(today_tasks.success_count::numeric * 100.0 / today_tasks.today_count::numeric, 1)
1589
+ ELSE 0
1590
+ END as success_rate
1591
+ FROM stats, today_tasks
1592
+ """)
1593
+
1594
+ result = await session.execute(query, {
1595
+ 'today_start': today_start,
1596
+ 'namespace': namespace
1597
+ })
1598
+ row = result.first()
1599
+
1600
+ if row:
1601
+ return {
1602
+ 'total': row.total or 0,
1603
+ 'active': row.active or 0,
1604
+ 'todayExecutions': int(row.today_executions or 0),
1605
+ 'successRate': float(row.success_rate or 0)
1606
+ }
1607
+
1608
+ return {
1609
+ 'total': 0,
1610
+ 'active': 0,
1611
+ 'todayExecutions': 0,
1612
+ 'successRate': 0
1613
+ }
1614
+
1615
+ except Exception as e:
1616
+ logger.error(f"获取定时任务统计失败: {e}")
1617
+ raise
1618
+
1619
+ async def fetch_scheduled_tasks(self,
1620
+ session,
1621
+ page: int = 1,
1622
+ page_size: int = 20,
1623
+ search: Optional[str] = None,
1624
+ is_active: Optional[bool] = None,
1625
+ filters: Optional[List[Dict]] = None,
1626
+ time_range: Optional[str] = None,
1627
+ start_time: Optional[str] = None,
1628
+ end_time: Optional[str] = None) -> tuple:
1629
+ """获取定时任务列表"""
1630
+ try:
1631
+ # 构建查询条件
1632
+ where_conditions = []
1633
+ params = {}
1634
+
1635
+ if search:
1636
+ where_conditions.append("(task_name ILIKE :search OR description ILIKE :search)")
1637
+ params['search'] = f"%{search}%"
1638
+
1639
+ if is_active is not None:
1640
+ where_conditions.append("enabled = :is_active")
1641
+ params['is_active'] = is_active
1642
+
1643
+ # 处理时间范围筛选 - 针对下次执行时间
1644
+ if time_range or (start_time and end_time):
1645
+ from datetime import datetime, timedelta
1646
+ import dateutil.parser
1647
+ import pytz
1648
+
1649
+ if start_time and end_time:
1650
+ # 使用自定义时间范围
1651
+ params['start_time'] = dateutil.parser.parse(start_time)
1652
+ params['end_time'] = dateutil.parser.parse(end_time)
1653
+ else:
1654
+ # 根据预设时间范围计算
1655
+ # 使用UTC时间,因为数据库中的next_run_time是UTC时区
1656
+ now = datetime.now(pytz.UTC)
1657
+ time_ranges = {
1658
+ '1h': timedelta(hours=1),
1659
+ '6h': timedelta(hours=6),
1660
+ '24h': timedelta(hours=24),
1661
+ '7d': timedelta(days=7),
1662
+ '30d': timedelta(days=30)
1663
+ }
1664
+ delta = time_ranges.get(time_range, timedelta(hours=24))
1665
+ # 从现在开始到未来的时间范围
1666
+ params['start_time'] = now
1667
+ params['end_time'] = now + delta
1668
+
1669
+ # 筛选下次执行时间在指定范围内的任务
1670
+ where_conditions.append("next_run_time IS NOT NULL AND next_run_time BETWEEN :start_time AND :end_time")
1671
+
1672
+ # 处理高级筛选条件
1673
+ if filters:
1674
+ for idx, filter_item in enumerate(filters):
1675
+ if not filter_item.get('enabled', True):
1676
+ continue
1677
+
1678
+ field = filter_item.get('field')
1679
+ operator = filter_item.get('operator')
1680
+ value = filter_item.get('value')
1681
+
1682
+ # 映射字段名
1683
+ field_map = {
1684
+ 'id': 'id',
1685
+ 'scheduler_id': 'scheduler_id',
1686
+ 'name': 'task_name',
1687
+ 'queue_name': 'queue_name',
1688
+ 'schedule_type': 'task_type',
1689
+ 'is_active': 'enabled',
1690
+ 'description': 'description',
1691
+ 'last_run': 'last_run_time',
1692
+ 'next_run': 'next_run_time',
1693
+ 'created_at': 'created_at',
1694
+ 'task_data': 'task_kwargs', # 任务参数存储在task_kwargs字段
1695
+ 'tags': 'tags',
1696
+ 'metadata': 'metadata',
1697
+ }
1698
+
1699
+ db_field = field_map.get(field, field)
1700
+
1701
+ # 处理不同的操作符
1702
+ if operator == 'is_null':
1703
+ where_conditions.append(f"{db_field} IS NULL")
1704
+ elif operator == 'is_not_null':
1705
+ where_conditions.append(f"{db_field} IS NOT NULL")
1706
+ elif operator in ['eq', 'ne', 'gt', 'lt', 'gte', 'lte']:
1707
+ op_map = {
1708
+ 'eq': '=',
1709
+ 'ne': '!=',
1710
+ 'gt': '>',
1711
+ 'lt': '<',
1712
+ 'gte': '>=',
1713
+ 'lte': '<='
1714
+ }
1715
+ param_name = f'filter_{idx}_value'
1716
+ where_conditions.append(f"{db_field} {op_map[operator]} :{param_name}")
1717
+ params[param_name] = value
1718
+ elif operator == 'contains':
1719
+ param_name = f'filter_{idx}_value'
1720
+ # 对于JSON字段,需要转换为文本进行搜索
1721
+ if db_field in ['task_kwargs', 'tags', 'metadata']:
1722
+ where_conditions.append(f"{db_field}::text ILIKE :{param_name}")
1723
+ else:
1724
+ where_conditions.append(f"{db_field} ILIKE :{param_name}")
1725
+ params[param_name] = f'%{value}%'
1726
+ elif operator == 'starts_with':
1727
+ param_name = f'filter_{idx}_value'
1728
+ where_conditions.append(f"{db_field} ILIKE :{param_name}")
1729
+ params[param_name] = f'{value}%'
1730
+ elif operator == 'ends_with':
1731
+ param_name = f'filter_{idx}_value'
1732
+ where_conditions.append(f"{db_field} ILIKE :{param_name}")
1733
+ params[param_name] = f'%{value}'
1734
+ elif operator in ['in', 'not_in']:
1735
+ if isinstance(value, list):
1736
+ placeholders = []
1737
+ for i, v in enumerate(value):
1738
+ param_name = f'filter_{idx}_value_{i}'
1739
+ placeholders.append(f':{param_name}')
1740
+ params[param_name] = v
1741
+ op = 'IN' if operator == 'in' else 'NOT IN'
1742
+ where_conditions.append(f"{db_field} {op} ({','.join(placeholders)})")
1743
+ elif operator == 'json_key_exists' and db_field in ['task_kwargs', 'tags', 'metadata']:
1744
+ # JSON字段键存在检查
1745
+ param_name = f'filter_{idx}_value'
1746
+ where_conditions.append(f"{db_field}::jsonb ? :{param_name}")
1747
+ params[param_name] = value
1748
+ elif operator == 'json_path_value' and db_field in ['task_kwargs', 'tags', 'metadata']:
1749
+ # JSON路径值匹配 - 使用更简单的路径操作符
1750
+ if '=' in value:
1751
+ path, val = value.split('=', 1)
1752
+ path = path.strip()
1753
+ val = val.strip().strip('"').strip("'")
1754
+
1755
+ # 处理JSON路径
1756
+ if path.startswith('$.'):
1757
+ path = path[2:] # 移除 $.
1758
+
1759
+ # 特殊处理task_kwargs字段:
1760
+ # 前端显示的是task_data.kwargs.xxx,但数据库中task_kwargs直接存储的就是kwargs的内容
1761
+ # 所以需要移除kwargs.前缀
1762
+ if db_field == 'task_kwargs':
1763
+ if path.startswith('kwargs.'):
1764
+ path = path[7:] # 移除 'kwargs.' 前缀
1765
+ elif path.startswith('args.'):
1766
+ # args存储在task_args字段,这里不处理
1767
+ continue
1768
+
1769
+ # 分割路径
1770
+ path_parts = path.split('.')
1771
+ param_name = f'filter_{idx}_value'
1772
+
1773
+ if len(path_parts) == 1:
1774
+ # 单层路径:使用 ->> 操作符
1775
+ where_conditions.append(f"{db_field}::jsonb->>'{path_parts[0]}' = :{param_name}")
1776
+ else:
1777
+ # 多层路径:使用 #>> 操作符
1778
+ path_str = '{' + ','.join(path_parts) + '}'
1779
+ where_conditions.append(f"{db_field}::jsonb#>>'{path_str}' = :{param_name}")
1780
+
1781
+ params[param_name] = val
1782
+
1783
+ where_clause = " AND ".join(where_conditions) if where_conditions else "1=1"
1784
+
1785
+ # 计算总数
1786
+ count_query = text(f"""
1787
+ SELECT COUNT(*) as total
1788
+ FROM scheduled_tasks
1789
+ WHERE {where_clause}
1790
+ """)
1791
+ print(f'{count_query.text=}')
1792
+ count_result = await session.execute(count_query, params)
1793
+ total = count_result.scalar() or 0
1794
+
1795
+ # 获取分页数据
1796
+ offset = (page - 1) * page_size
1797
+ data_query = text(f"""
1798
+ SELECT
1799
+ id,
1800
+ scheduler_id,
1801
+ task_name as name,
1802
+ queue_name,
1803
+ task_type as schedule_type,
1804
+ task_args,
1805
+ task_kwargs,
1806
+ cron_expression,
1807
+ interval_seconds,
1808
+ enabled as is_active,
1809
+ description,
1810
+ tags,
1811
+ metadata,
1812
+ last_run_time as last_run,
1813
+ next_run_time as next_run,
1814
+ created_at,
1815
+ updated_at,
1816
+ COALESCE(execution_count, 0) as execution_count
1817
+ FROM scheduled_tasks
1818
+ WHERE {where_clause}
1819
+ ORDER BY created_at DESC, id ASC
1820
+ LIMIT :limit OFFSET :offset
1821
+ """)
1822
+ params['limit'] = page_size
1823
+ params['offset'] = offset
1824
+
1825
+ result = await session.execute(data_query, params)
1826
+ tasks = []
1827
+ for row in result:
1828
+ task = dict(row._mapping)
1829
+
1830
+ # 构建schedule_config字段
1831
+ if task['schedule_type'] == 'cron':
1832
+ task['schedule_config'] = {'cron_expression': task.get('cron_expression')}
1833
+ elif task['schedule_type'] == 'interval':
1834
+ task['schedule_config'] = {'seconds': float(task.get('interval_seconds', 0))}
1835
+ else:
1836
+ task['schedule_config'] = {}
1837
+
1838
+ # 合并task_args和task_kwargs为task_data
1839
+ task['task_data'] = {
1840
+ 'args': task.get('task_args', []),
1841
+ 'kwargs': task.get('task_kwargs', {})
1842
+ }
1843
+
1844
+ # 删除不需要的字段
1845
+ task.pop('task_args', None)
1846
+ task.pop('task_kwargs', None)
1847
+ task.pop('cron_expression', None)
1848
+ task.pop('interval_seconds', None)
1849
+ task.pop('scheduler_id', None)
1850
+
1851
+ # 转换时间字段为ISO格式字符串
1852
+ for field in ['last_run', 'next_run', 'created_at', 'updated_at']:
1853
+ if task.get(field):
1854
+ task[field] = task[field].isoformat()
1855
+
1856
+ tasks.append(task)
1857
+
1858
+ return tasks, total
1859
+
1860
+ except Exception as e:
1861
+ logger.error(f"获取定时任务列表失败: {e}")
1862
+ raise
1863
+
1864
+ async def create_scheduled_task(self, session, task_data: Dict) -> Dict:
1865
+ """创建定时任务"""
1866
+ try:
1867
+ # 生成scheduler_id
1868
+ scheduler_id = f"task_{datetime.now().strftime('%Y%m%d%H%M%S')}_{int(time.time() * 1000) % 100000}"
1869
+
1870
+ # 处理schedule_config
1871
+ cron_expression = None
1872
+ interval_seconds = None
1873
+ if task_data['schedule_type'] == 'cron':
1874
+ cron_expression = task_data['schedule_config'].get('cron_expression')
1875
+ elif task_data['schedule_type'] == 'interval':
1876
+ interval_seconds = task_data['schedule_config'].get('seconds', 60)
1877
+
1878
+ # 处理task_data -> task_args和task_kwargs
1879
+ task_args = task_data.get('task_data', {}).get('args', [])
1880
+ task_kwargs = task_data.get('task_data', {}).get('kwargs', {})
1881
+
1882
+ insert_query = text("""
1883
+ INSERT INTO scheduled_tasks (
1884
+ scheduler_id, task_name, queue_name, task_type,
1885
+ task_args, task_kwargs, cron_expression, interval_seconds,
1886
+ enabled, description
1887
+ ) VALUES (
1888
+ :scheduler_id, :task_name, :queue_name, :task_type,
1889
+ :task_args, :task_kwargs, :cron_expression, :interval_seconds,
1890
+ :enabled, :description
1891
+ )
1892
+ RETURNING *
1893
+ """)
1894
+
1895
+ params = {
1896
+ 'scheduler_id': scheduler_id,
1897
+ 'task_name': task_data['name'],
1898
+ 'queue_name': task_data['queue_name'],
1899
+ 'task_type': task_data['schedule_type'],
1900
+ 'task_args': json.dumps(task_args),
1901
+ 'task_kwargs': json.dumps(task_kwargs),
1902
+ 'cron_expression': cron_expression,
1903
+ 'interval_seconds': interval_seconds,
1904
+ 'enabled': task_data.get('is_active', True),
1905
+ 'description': task_data.get('description')
1906
+ }
1907
+
1908
+ result = await session.execute(insert_query, params)
1909
+ await session.commit()
1910
+
1911
+ created_task = dict(result.first()._mapping)
1912
+
1913
+ # 转换为前端格式
1914
+ created_task['name'] = created_task.pop('task_name', '')
1915
+ created_task['is_active'] = created_task.pop('enabled', True)
1916
+ created_task['schedule_type'] = created_task.pop('task_type', '')
1917
+
1918
+ # 构建schedule_config
1919
+ if created_task['schedule_type'] == 'cron':
1920
+ created_task['schedule_config'] = {'cron_expression': created_task.get('cron_expression')}
1921
+ elif created_task['schedule_type'] == 'interval':
1922
+ created_task['schedule_config'] = {'seconds': float(created_task.get('interval_seconds', 0))}
1923
+ else:
1924
+ created_task['schedule_config'] = {}
1925
+
1926
+ # 合并task_args和task_kwargs为task_data
1927
+ created_task['task_data'] = {
1928
+ 'args': created_task.get('task_args', []),
1929
+ 'kwargs': created_task.get('task_kwargs', {})
1930
+ }
1931
+
1932
+ # 删除不需要的字段
1933
+ created_task.pop('task_args', None)
1934
+ created_task.pop('task_kwargs', None)
1935
+ created_task.pop('cron_expression', None)
1936
+ created_task.pop('interval_seconds', None)
1937
+ created_task.pop('scheduler_id', None)
1938
+
1939
+ # 转换时间字段
1940
+ for field in ['last_run_time', 'next_run_time', 'created_at', 'updated_at']:
1941
+ if created_task.get(field):
1942
+ value = created_task.pop(field)
1943
+ # 重命名字段
1944
+ if field == 'last_run_time':
1945
+ created_task['last_run'] = value.isoformat()
1946
+ elif field == 'next_run_time':
1947
+ created_task['next_run'] = value.isoformat()
1948
+ else:
1949
+ created_task[field] = value.isoformat()
1950
+
1951
+ return created_task
1952
+
1953
+ except Exception as e:
1954
+ logger.error(f"创建定时任务失败: {e}")
1955
+ raise
1956
+
1957
+ async def update_scheduled_task(self, session, task_id: str, task_data: Dict) -> Dict:
1958
+ """更新定时任务"""
1959
+ try:
1960
+ # 处理schedule_config
1961
+ cron_expression = None
1962
+ interval_seconds = None
1963
+ if task_data['schedule_type'] == 'cron':
1964
+ cron_expression = task_data['schedule_config'].get('cron_expression')
1965
+ elif task_data['schedule_type'] == 'interval':
1966
+ interval_seconds = task_data['schedule_config'].get('seconds', 60)
1967
+
1968
+ # 处理task_data -> task_args和task_kwargs
1969
+ task_args = task_data.get('task_data', {}).get('args', [])
1970
+ task_kwargs = task_data.get('task_data', {}).get('kwargs', {})
1971
+
1972
+ update_query = text("""
1973
+ UPDATE scheduled_tasks SET
1974
+ task_name = :task_name,
1975
+ queue_name = :queue_name,
1976
+ task_type = :task_type,
1977
+ task_args = :task_args,
1978
+ task_kwargs = :task_kwargs,
1979
+ cron_expression = :cron_expression,
1980
+ interval_seconds = :interval_seconds,
1981
+ enabled = :enabled,
1982
+ description = :description,
1983
+ updated_at = CURRENT_TIMESTAMP
1984
+ WHERE id = :id
1985
+ RETURNING *
1986
+ """)
1987
+
1988
+ params = {
1989
+ 'id': task_id,
1990
+ 'task_name': task_data['name'],
1991
+ 'queue_name': task_data['queue_name'],
1992
+ 'task_type': task_data['schedule_type'],
1993
+ 'task_args': json.dumps(task_args),
1994
+ 'task_kwargs': json.dumps(task_kwargs),
1995
+ 'cron_expression': cron_expression,
1996
+ 'interval_seconds': interval_seconds,
1997
+ 'enabled': task_data.get('is_active', True),
1998
+ 'description': task_data.get('description')
1999
+ }
2000
+
2001
+ result = await session.execute(update_query, params)
2002
+ await session.commit()
2003
+
2004
+ if result.rowcount == 0:
2005
+ return {
2006
+ 'success': False,
2007
+ 'error': '任务不存在'
2008
+ }
2009
+
2010
+ updated_task = dict(result.first()._mapping)
2011
+ # 转换时间字段
2012
+ for field in ['last_run', 'next_run', 'created_at', 'updated_at']:
2013
+ if updated_task.get(field):
2014
+ updated_task[field] = updated_task[field].isoformat()
2015
+
2016
+ return {
2017
+ 'success': True,
2018
+ 'data': updated_task,
2019
+ 'message': '定时任务更新成功'
2020
+ }
2021
+
2022
+ except Exception as e:
2023
+ logger.error(f"更新定时任务失败: {e}")
2024
+ return {
2025
+ 'success': False,
2026
+ 'error': str(e)
2027
+ }
2028
+
2029
+ async def delete_scheduled_task(self, session, task_id: str) -> bool:
2030
+ """删除定时任务"""
2031
+ try:
2032
+ delete_query = text("""
2033
+ DELETE FROM scheduled_tasks
2034
+ WHERE id = :id
2035
+ """)
2036
+
2037
+ result = await session.execute(delete_query, {'id': task_id})
2038
+ await session.commit()
2039
+
2040
+ return result.rowcount > 0
2041
+
2042
+ except Exception as e:
2043
+ logger.error(f"删除定时任务失败: {e}")
2044
+ raise
2045
+
2046
+ async def _sync_task_to_redis(self, task_id: str, enabled: bool):
2047
+ """同步任务状态到 Redis"""
2048
+ try:
2049
+ if not self._redis_pool:
2050
+ logger.debug("Redis not configured, skipping sync")
2051
+ return
2052
+
2053
+ redis_client = await self.get_redis_client()
2054
+
2055
+ # Redis 中的键名格式(与 scheduler 保持一致)
2056
+ # scheduler 使用的前缀格式是 {redis_prefix}:SCHEDULER
2057
+ scheduler_prefix = f"{self.redis_prefix}:SCHEDULER"
2058
+ zset_key = f"{scheduler_prefix}:tasks"
2059
+ task_detail_key = f"{scheduler_prefix}:task:{task_id}"
2060
+
2061
+ if enabled:
2062
+ # 如果启用,需要重新加载任务到 Redis
2063
+ # 获取任务完整信息并转换为 ScheduledTask 对象
2064
+ async with self.AsyncSessionLocal() as session:
2065
+ query = text("""
2066
+ SELECT * FROM scheduled_tasks
2067
+ WHERE id = :id AND next_run_time IS NOT NULL
2068
+ """)
2069
+ result = await session.execute(query, {'id': task_id})
2070
+ task_row = result.first()
2071
+
2072
+ if task_row and task_row.next_run_time:
2073
+ # 导入必要的类
2074
+ from jettask.scheduler.models import ScheduledTask, TaskType
2075
+ from decimal import Decimal
2076
+
2077
+ # 处理 interval_seconds 的 Decimal 类型
2078
+ interval_seconds = task_row.interval_seconds
2079
+ if interval_seconds is not None and isinstance(interval_seconds, Decimal):
2080
+ interval_seconds = float(interval_seconds)
2081
+
2082
+ # 创建 ScheduledTask 对象
2083
+ task = ScheduledTask(
2084
+ id=task_row.id,
2085
+ scheduler_id=task_row.scheduler_id,
2086
+ task_name=task_row.task_name,
2087
+ task_type=TaskType(task_row.task_type) if task_row.task_type else TaskType.INTERVAL,
2088
+ queue_name=task_row.queue_name,
2089
+ task_args=task_row.task_args if isinstance(task_row.task_args, list) else json.loads(task_row.task_args or '[]'),
2090
+ task_kwargs=task_row.task_kwargs if isinstance(task_row.task_kwargs, dict) else json.loads(task_row.task_kwargs or '{}'),
2091
+ cron_expression=task_row.cron_expression,
2092
+ interval_seconds=interval_seconds,
2093
+ next_run_time=task_row.next_run_time,
2094
+ last_run_time=task_row.last_run_time,
2095
+ enabled=task_row.enabled,
2096
+ max_retries=task_row.max_retries or 3,
2097
+ retry_delay=task_row.retry_delay or 60,
2098
+ timeout=task_row.timeout or 300,
2099
+ description=task_row.description,
2100
+ tags=task_row.tags if isinstance(task_row.tags, list) else (json.loads(task_row.tags) if task_row.tags else []),
2101
+ metadata=task_row.metadata if isinstance(task_row.metadata, dict) else (json.loads(task_row.metadata) if task_row.metadata else None),
2102
+ created_at=task_row.created_at,
2103
+ updated_at=task_row.updated_at
2104
+ )
2105
+
2106
+ # 添加到 ZSET(用于调度)
2107
+ score = task.next_run_time.timestamp()
2108
+ await redis_client.zadd(zset_key, {str(task_id): score})
2109
+
2110
+ # 存储任务详情(使用 ScheduledTask 的 to_redis_value 方法)
2111
+ await redis_client.setex(
2112
+ task_detail_key,
2113
+ 300, # 5分钟过期
2114
+ task.to_redis_value()
2115
+ )
2116
+ logger.info(f"Task {task_id} re-enabled and synced to Redis")
2117
+ else:
2118
+ # 如果禁用,从 Redis 中移除
2119
+ await redis_client.zrem(zset_key, str(task_id))
2120
+ await redis_client.delete(task_detail_key)
2121
+ logger.info(f"Task {task_id} disabled and removed from Redis")
2122
+
2123
+ await redis_client.close()
2124
+
2125
+ except Exception as e:
2126
+ # Redis 同步失败不应影响主要操作
2127
+ logger.warning(f"Failed to sync task {task_id} to Redis: {e}")
2128
+
2129
+ async def toggle_scheduled_task(self, session, task_id: str) -> Dict:
2130
+ """切换定时任务状态"""
2131
+ try:
2132
+ # 先获取当前状态
2133
+ get_query = text("SELECT enabled FROM scheduled_tasks WHERE id = :id")
2134
+ result = await session.execute(get_query, {'id': task_id})
2135
+ row = result.first()
2136
+
2137
+ if not row:
2138
+ return None
2139
+ print(f'{row.enabled=}')
2140
+ # 切换状态
2141
+ new_status = not row.enabled
2142
+ update_query = text("""
2143
+ UPDATE scheduled_tasks
2144
+ SET enabled = :enabled, updated_at = CURRENT_TIMESTAMP
2145
+ WHERE id = :id
2146
+ RETURNING id, enabled
2147
+ """)
2148
+
2149
+ result = await session.execute(update_query, {
2150
+ 'id': task_id,
2151
+ 'enabled': new_status
2152
+ })
2153
+ await session.commit()
2154
+
2155
+ updated_task = dict(result.first()._mapping)
2156
+
2157
+ # 立即同步到 Redis
2158
+ await self._sync_task_to_redis(task_id, new_status)
2159
+
2160
+ return updated_task
2161
+
2162
+ except Exception as e:
2163
+ logger.error(f"切换定时任务状态失败: {e}")
2164
+ raise
2165
+
2166
+ async def get_scheduled_task_by_id(self, session, task_id: str) -> Optional[Dict]:
2167
+ """根据ID获取定时任务详情"""
2168
+ try:
2169
+ query = text("""
2170
+ SELECT
2171
+ id,
2172
+ scheduler_id,
2173
+ task_name,
2174
+ queue_name,
2175
+ task_type,
2176
+ interval_seconds,
2177
+ cron_expression,
2178
+ next_run_time,
2179
+ last_run_time,
2180
+ enabled,
2181
+ task_args,
2182
+ task_kwargs,
2183
+ description,
2184
+ max_retries,
2185
+ retry_delay,
2186
+ timeout,
2187
+ created_at,
2188
+ updated_at
2189
+ FROM scheduled_tasks
2190
+ WHERE id = :task_id
2191
+ LIMIT 1
2192
+ """)
2193
+
2194
+ result = await session.execute(query, {"task_id": int(task_id)})
2195
+ row = result.first()
2196
+
2197
+ if row:
2198
+ task = dict(row._mapping)
2199
+ # 处理JSON字段
2200
+ if task.get('task_args') and isinstance(task['task_args'], str):
2201
+ import json
2202
+ try:
2203
+ task['task_args'] = json.loads(task['task_args'])
2204
+ except:
2205
+ task['task_args'] = []
2206
+
2207
+ if task.get('task_kwargs') and isinstance(task['task_kwargs'], str):
2208
+ import json
2209
+ try:
2210
+ task['task_kwargs'] = json.loads(task['task_kwargs'])
2211
+ except:
2212
+ task['task_kwargs'] = {}
2213
+
2214
+ return task
2215
+ return None
2216
+
2217
+ except Exception as e:
2218
+ logger.error(f"获取定时任务详情失败: {e}")
2219
+ raise
2220
+
2221
+ async def fetch_task_execution_history(self,
2222
+ session,
2223
+ task_id: str,
2224
+ page: int = 1,
2225
+ page_size: int = 20) -> tuple:
2226
+ """获取定时任务执行历史"""
2227
+ try:
2228
+ # 计算总数
2229
+ count_query = text("""
2230
+ SELECT COUNT(*) as total
2231
+ FROM tasks
2232
+ WHERE scheduled_task_id = :task_id
2233
+ """)
2234
+ count_result = await session.execute(count_query, {'task_id': task_id})
2235
+ total = count_result.scalar() or 0
2236
+
2237
+ # 获取分页数据
2238
+ offset = (page - 1) * page_size
2239
+ data_query = text("""
2240
+ SELECT
2241
+ id,
2242
+ scheduled_task_id as task_id,
2243
+ status,
2244
+ created_at as scheduled_time,
2245
+ started_at,
2246
+ completed_at as finished_at,
2247
+ error_message,
2248
+ result as task_result,
2249
+ retry_count,
2250
+ execution_time,
2251
+ worker_id
2252
+ FROM tasks
2253
+ WHERE scheduled_task_id = :task_id
2254
+ ORDER BY created_at DESC
2255
+ LIMIT :limit OFFSET :offset
2256
+ """)
2257
+
2258
+ result = await session.execute(data_query, {
2259
+ 'task_id': task_id,
2260
+ 'limit': page_size,
2261
+ 'offset': offset
2262
+ })
2263
+
2264
+ history = []
2265
+ for row in result:
2266
+ record = dict(row._mapping)
2267
+ # 转换时间字段
2268
+ for field in ['scheduled_time', 'started_at', 'finished_at']:
2269
+ if record.get(field):
2270
+ record[field] = record[field].isoformat()
2271
+ # 计算执行时长(毫秒)
2272
+ if record.get('execution_time'):
2273
+ record['duration_ms'] = int(record['execution_time'] * 1000)
2274
+ history.append(record)
2275
+
2276
+ return history, total
2277
+
2278
+ except Exception as e:
2279
+ logger.error(f"获取任务执行历史失败: {e}")
2280
+ raise
2281
+
2282
+ async def fetch_task_execution_trend(self,
2283
+ session,
2284
+ task_id: str,
2285
+ time_range: str = '7d') -> list:
2286
+ """获取定时任务执行趋势"""
2287
+ try:
2288
+ # 根据时间范围计算开始时间
2289
+ now = datetime.now(timezone.utc)
2290
+ if time_range == '24h':
2291
+ start_time = now - timedelta(hours=24)
2292
+ interval = 'hour'
2293
+ elif time_range == '7d':
2294
+ start_time = now - timedelta(days=7)
2295
+ interval = 'day'
2296
+ elif time_range == '30d':
2297
+ start_time = now - timedelta(days=30)
2298
+ interval = 'day'
2299
+ else:
2300
+ start_time = now - timedelta(days=7)
2301
+ interval = 'day'
2302
+
2303
+ # 查询执行趋势(从tasks表)
2304
+ trend_query = text(f"""
2305
+ SELECT
2306
+ date_trunc(:interval, COALESCE(started_at, created_at)) as time,
2307
+ COUNT(*) as total,
2308
+ COUNT(CASE WHEN status = 'success' THEN 1 END) as success,
2309
+ COUNT(CASE WHEN status = 'error' THEN 1 END) as error
2310
+ FROM tasks
2311
+ WHERE scheduled_task_id = :task_id
2312
+ AND COALESCE(started_at, created_at) >= :start_time
2313
+ GROUP BY date_trunc(:interval, COALESCE(started_at, created_at))
2314
+ ORDER BY time ASC
2315
+ """)
2316
+
2317
+ result = await session.execute(trend_query, {
2318
+ 'task_id': task_id,
2319
+ 'start_time': start_time,
2320
+ 'interval': interval
2321
+ })
2322
+
2323
+ data = []
2324
+ for row in result:
2325
+ record = dict(row._mapping)
2326
+ record['time'] = record['time'].isoformat()
2327
+ data.append(record)
2328
+
2329
+ return data
2330
+
2331
+ except Exception as e:
2332
+ logger.error(f"获取任务执行趋势失败: {e}")
2333
+ raise