jettask 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jettask/monitor/run_backlog_collector.py +96 -0
- jettask/monitor/stream_backlog_monitor.py +362 -0
- jettask/pg_consumer/pg_consumer_v2.py +403 -0
- jettask/pg_consumer/sql_utils.py +182 -0
- jettask/scheduler/__init__.py +17 -0
- jettask/scheduler/add_execution_count.sql +11 -0
- jettask/scheduler/add_priority_field.sql +26 -0
- jettask/scheduler/add_scheduler_id.sql +25 -0
- jettask/scheduler/add_scheduler_id_index.sql +10 -0
- jettask/scheduler/loader.py +249 -0
- jettask/scheduler/make_scheduler_id_required.sql +28 -0
- jettask/scheduler/manager.py +696 -0
- jettask/scheduler/migrate_interval_seconds.sql +9 -0
- jettask/scheduler/models.py +200 -0
- jettask/scheduler/multi_namespace_scheduler.py +294 -0
- jettask/scheduler/performance_optimization.sql +45 -0
- jettask/scheduler/run_scheduler.py +186 -0
- jettask/scheduler/scheduler.py +715 -0
- jettask/scheduler/schema.sql +84 -0
- jettask/scheduler/unified_manager.py +450 -0
- jettask/scheduler/unified_scheduler_manager.py +280 -0
- jettask/webui/backend/api/__init__.py +3 -0
- jettask/webui/backend/api/v1/__init__.py +17 -0
- jettask/webui/backend/api/v1/monitoring.py +431 -0
- jettask/webui/backend/api/v1/namespaces.py +504 -0
- jettask/webui/backend/api/v1/queues.py +342 -0
- jettask/webui/backend/api/v1/tasks.py +367 -0
- jettask/webui/backend/core/__init__.py +3 -0
- jettask/webui/backend/core/cache.py +221 -0
- jettask/webui/backend/core/database.py +200 -0
- jettask/webui/backend/core/exceptions.py +102 -0
- jettask/webui/backend/models/__init__.py +3 -0
- jettask/webui/backend/models/requests.py +236 -0
- jettask/webui/backend/models/responses.py +230 -0
- jettask/webui/backend/services/__init__.py +3 -0
- jettask/webui/frontend/index.html +13 -0
- jettask/webui/models/__init__.py +3 -0
- jettask/webui/models/namespace.py +63 -0
- jettask/webui/sql/batch_upsert_functions.sql +178 -0
- jettask/webui/sql/init_database.sql +640 -0
- {jettask-0.2.5.dist-info → jettask-0.2.7.dist-info}/METADATA +80 -10
- {jettask-0.2.5.dist-info → jettask-0.2.7.dist-info}/RECORD +46 -53
- jettask/webui/frontend/package-lock.json +0 -4833
- jettask/webui/frontend/package.json +0 -30
- jettask/webui/frontend/src/App.css +0 -109
- jettask/webui/frontend/src/App.jsx +0 -66
- jettask/webui/frontend/src/components/NamespaceSelector.jsx +0 -166
- jettask/webui/frontend/src/components/QueueBacklogChart.jsx +0 -298
- jettask/webui/frontend/src/components/QueueBacklogTrend.jsx +0 -638
- jettask/webui/frontend/src/components/QueueDetailsTable.css +0 -65
- jettask/webui/frontend/src/components/QueueDetailsTable.jsx +0 -487
- jettask/webui/frontend/src/components/QueueDetailsTableV2.jsx +0 -465
- jettask/webui/frontend/src/components/ScheduledTaskFilter.jsx +0 -423
- jettask/webui/frontend/src/components/TaskFilter.jsx +0 -425
- jettask/webui/frontend/src/components/TimeRangeSelector.css +0 -21
- jettask/webui/frontend/src/components/TimeRangeSelector.jsx +0 -160
- jettask/webui/frontend/src/components/charts/QueueChart.jsx +0 -111
- jettask/webui/frontend/src/components/charts/QueueTrendChart.jsx +0 -115
- jettask/webui/frontend/src/components/charts/WorkerChart.jsx +0 -40
- jettask/webui/frontend/src/components/common/StatsCard.jsx +0 -18
- jettask/webui/frontend/src/components/layout/AppLayout.css +0 -95
- jettask/webui/frontend/src/components/layout/AppLayout.jsx +0 -49
- jettask/webui/frontend/src/components/layout/Header.css +0 -106
- jettask/webui/frontend/src/components/layout/Header.jsx +0 -106
- jettask/webui/frontend/src/components/layout/SideMenu.css +0 -137
- jettask/webui/frontend/src/components/layout/SideMenu.jsx +0 -209
- jettask/webui/frontend/src/components/layout/TabsNav.css +0 -244
- jettask/webui/frontend/src/components/layout/TabsNav.jsx +0 -206
- jettask/webui/frontend/src/components/layout/UserInfo.css +0 -197
- jettask/webui/frontend/src/components/layout/UserInfo.jsx +0 -197
- jettask/webui/frontend/src/contexts/LoadingContext.jsx +0 -27
- jettask/webui/frontend/src/contexts/NamespaceContext.jsx +0 -72
- jettask/webui/frontend/src/contexts/TabsContext.backup.jsx +0 -245
- jettask/webui/frontend/src/index.css +0 -114
- jettask/webui/frontend/src/main.jsx +0 -20
- jettask/webui/frontend/src/pages/Alerts.jsx +0 -684
- jettask/webui/frontend/src/pages/Dashboard/index.css +0 -35
- jettask/webui/frontend/src/pages/Dashboard/index.jsx +0 -281
- jettask/webui/frontend/src/pages/Dashboard.jsx +0 -1330
- jettask/webui/frontend/src/pages/QueueDetail.jsx +0 -1117
- jettask/webui/frontend/src/pages/QueueMonitor.jsx +0 -527
- jettask/webui/frontend/src/pages/Queues.jsx +0 -12
- jettask/webui/frontend/src/pages/ScheduledTasks.jsx +0 -809
- jettask/webui/frontend/src/pages/Settings.jsx +0 -800
- jettask/webui/frontend/src/pages/Workers.jsx +0 -12
- jettask/webui/frontend/src/services/api.js +0 -114
- jettask/webui/frontend/src/services/queueTrend.js +0 -152
- jettask/webui/frontend/src/utils/suppressWarnings.js +0 -22
- jettask/webui/frontend/src/utils/userPreferences.js +0 -154
- {jettask-0.2.5.dist-info → jettask-0.2.7.dist-info}/WHEEL +0 -0
- {jettask-0.2.5.dist-info → jettask-0.2.7.dist-info}/entry_points.txt +0 -0
- {jettask-0.2.5.dist-info → jettask-0.2.7.dist-info}/licenses/LICENSE +0 -0
- {jettask-0.2.5.dist-info → jettask-0.2.7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,696 @@
|
|
1
|
+
"""
|
2
|
+
定时任务管理器 - 负责数据库CRUD操作
|
3
|
+
"""
|
4
|
+
import asyncio
|
5
|
+
import asyncpg
|
6
|
+
from typing import Optional, List, Dict, Any
|
7
|
+
from datetime import datetime, timedelta
|
8
|
+
import json
|
9
|
+
|
10
|
+
from .models import ScheduledTask, TaskExecutionHistory, TaskType, TaskStatus
|
11
|
+
|
12
|
+
|
13
|
+
class ScheduledTaskManager:
|
14
|
+
"""定时任务数据库管理器"""
|
15
|
+
|
16
|
+
def __init__(self, app_or_db_url):
|
17
|
+
"""
|
18
|
+
初始化管理器
|
19
|
+
|
20
|
+
Args:
|
21
|
+
app_or_db_url: Jettask应用实例或PostgreSQL连接URL字符串
|
22
|
+
"""
|
23
|
+
# 支持两种初始化方式:传入app对象或直接传入db_url
|
24
|
+
if isinstance(app_or_db_url, str):
|
25
|
+
self.db_url = app_or_db_url
|
26
|
+
else:
|
27
|
+
# 从app对象获取pg_url
|
28
|
+
self.db_url = app_or_db_url.pg_url
|
29
|
+
self.pool: Optional[asyncpg.Pool] = None
|
30
|
+
|
31
|
+
async def connect(self):
|
32
|
+
"""建立数据库连接池"""
|
33
|
+
if not self.pool:
|
34
|
+
self.pool = await asyncpg.create_pool(
|
35
|
+
self.db_url,
|
36
|
+
min_size=2,
|
37
|
+
max_size=10,
|
38
|
+
command_timeout=60
|
39
|
+
)
|
40
|
+
|
41
|
+
async def disconnect(self):
|
42
|
+
"""关闭数据库连接池"""
|
43
|
+
if self.pool:
|
44
|
+
await self.pool.close()
|
45
|
+
self.pool = None
|
46
|
+
|
47
|
+
async def init_schema(self):
|
48
|
+
"""初始化数据库表结构(幂等操作)"""
|
49
|
+
import os
|
50
|
+
schema_path = os.path.join(os.path.dirname(__file__), 'schema.sql')
|
51
|
+
|
52
|
+
with open(schema_path, 'r') as f:
|
53
|
+
schema_sql = f.read()
|
54
|
+
|
55
|
+
async with self.pool.acquire() as conn:
|
56
|
+
# 使用事务,忽略已存在的对象错误
|
57
|
+
try:
|
58
|
+
await conn.execute(schema_sql)
|
59
|
+
except Exception as e:
|
60
|
+
if "already exists" in str(e):
|
61
|
+
# 表或索引已存在,这是正常的
|
62
|
+
pass
|
63
|
+
else:
|
64
|
+
# 其他错误则重新抛出
|
65
|
+
raise
|
66
|
+
|
67
|
+
# ==================== 任务CRUD操作 ====================
|
68
|
+
|
69
|
+
async def create_task(self, task: ScheduledTask) -> ScheduledTask:
|
70
|
+
"""创建定时任务"""
|
71
|
+
sql = """
|
72
|
+
INSERT INTO scheduled_tasks (
|
73
|
+
scheduler_id, task_name, task_type, queue_name, namespace,
|
74
|
+
task_args, task_kwargs, cron_expression, interval_seconds,
|
75
|
+
next_run_time, enabled, max_retries, retry_delay, timeout,
|
76
|
+
priority, description, tags, metadata
|
77
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18)
|
78
|
+
RETURNING *
|
79
|
+
"""
|
80
|
+
|
81
|
+
async with self.pool.acquire() as conn:
|
82
|
+
row = await conn.fetchrow(
|
83
|
+
sql,
|
84
|
+
task.scheduler_id,
|
85
|
+
task.task_name,
|
86
|
+
task.task_type.value,
|
87
|
+
task.queue_name,
|
88
|
+
task.namespace, # 添加namespace
|
89
|
+
json.dumps(task.task_args),
|
90
|
+
json.dumps(task.task_kwargs),
|
91
|
+
task.cron_expression,
|
92
|
+
task.interval_seconds,
|
93
|
+
task.next_run_time,
|
94
|
+
task.enabled,
|
95
|
+
task.max_retries,
|
96
|
+
task.retry_delay,
|
97
|
+
task.timeout,
|
98
|
+
task.priority,
|
99
|
+
task.description,
|
100
|
+
json.dumps(task.tags),
|
101
|
+
json.dumps(task.metadata)
|
102
|
+
)
|
103
|
+
|
104
|
+
return self._row_to_task(row)
|
105
|
+
|
106
|
+
async def get_task(self, task_id: int) -> Optional[ScheduledTask]:
|
107
|
+
"""获取单个任务"""
|
108
|
+
sql = "SELECT * FROM scheduled_tasks WHERE id = $1"
|
109
|
+
|
110
|
+
async with self.pool.acquire() as conn:
|
111
|
+
row = await conn.fetchrow(sql, task_id)
|
112
|
+
if row:
|
113
|
+
return self._row_to_task(row)
|
114
|
+
return None
|
115
|
+
|
116
|
+
async def get_task_by_scheduler_id(self, scheduler_id: str) -> Optional[ScheduledTask]:
|
117
|
+
"""通过scheduler_id获取任务"""
|
118
|
+
sql = "SELECT * FROM scheduled_tasks WHERE scheduler_id = $1"
|
119
|
+
|
120
|
+
async with self.pool.acquire() as conn:
|
121
|
+
row = await conn.fetchrow(sql, scheduler_id)
|
122
|
+
if row:
|
123
|
+
return self._row_to_task(row)
|
124
|
+
return None
|
125
|
+
|
126
|
+
async def update_task(self, task: ScheduledTask) -> ScheduledTask:
|
127
|
+
"""更新任务"""
|
128
|
+
sql = """
|
129
|
+
UPDATE scheduled_tasks SET
|
130
|
+
scheduler_id = $2,
|
131
|
+
task_name = $3,
|
132
|
+
task_type = $4,
|
133
|
+
queue_name = $5,
|
134
|
+
namespace = $6,
|
135
|
+
task_args = $7,
|
136
|
+
task_kwargs = $8,
|
137
|
+
cron_expression = $9,
|
138
|
+
interval_seconds = $10,
|
139
|
+
next_run_time = $11,
|
140
|
+
last_run_time = $12,
|
141
|
+
enabled = $13,
|
142
|
+
max_retries = $14,
|
143
|
+
retry_delay = $15,
|
144
|
+
timeout = $16,
|
145
|
+
priority = $17,
|
146
|
+
description = $18,
|
147
|
+
tags = $19,
|
148
|
+
metadata = $20
|
149
|
+
WHERE id = $1
|
150
|
+
RETURNING *
|
151
|
+
"""
|
152
|
+
|
153
|
+
async with self.pool.acquire() as conn:
|
154
|
+
row = await conn.fetchrow(
|
155
|
+
sql,
|
156
|
+
task.id,
|
157
|
+
task.scheduler_id,
|
158
|
+
task.task_name,
|
159
|
+
task.task_type.value,
|
160
|
+
task.queue_name,
|
161
|
+
task.namespace, # 添加namespace
|
162
|
+
json.dumps(task.task_args),
|
163
|
+
json.dumps(task.task_kwargs),
|
164
|
+
task.cron_expression,
|
165
|
+
task.interval_seconds,
|
166
|
+
task.next_run_time,
|
167
|
+
task.last_run_time,
|
168
|
+
task.enabled,
|
169
|
+
task.max_retries,
|
170
|
+
task.retry_delay,
|
171
|
+
task.timeout,
|
172
|
+
task.priority,
|
173
|
+
task.description,
|
174
|
+
json.dumps(task.tags),
|
175
|
+
json.dumps(task.metadata)
|
176
|
+
)
|
177
|
+
|
178
|
+
return self._row_to_task(row)
|
179
|
+
|
180
|
+
async def delete_task(self, task_id: int) -> bool:
|
181
|
+
"""删除任务"""
|
182
|
+
sql = "DELETE FROM scheduled_tasks WHERE id = $1"
|
183
|
+
|
184
|
+
async with self.pool.acquire() as conn:
|
185
|
+
result = await conn.execute(sql, task_id)
|
186
|
+
return result.split()[-1] != '0'
|
187
|
+
|
188
|
+
async def list_tasks(
|
189
|
+
self,
|
190
|
+
enabled: Optional[bool] = None,
|
191
|
+
task_type: Optional[TaskType] = None,
|
192
|
+
queue_name: Optional[str] = None,
|
193
|
+
limit: int = 100,
|
194
|
+
offset: int = 0
|
195
|
+
) -> List[ScheduledTask]:
|
196
|
+
"""列出任务"""
|
197
|
+
conditions = []
|
198
|
+
params = []
|
199
|
+
param_count = 0
|
200
|
+
|
201
|
+
if enabled is not None:
|
202
|
+
param_count += 1
|
203
|
+
conditions.append(f"enabled = ${param_count}")
|
204
|
+
params.append(enabled)
|
205
|
+
|
206
|
+
if task_type is not None:
|
207
|
+
param_count += 1
|
208
|
+
conditions.append(f"task_type = ${param_count}")
|
209
|
+
params.append(task_type.value)
|
210
|
+
|
211
|
+
if queue_name is not None:
|
212
|
+
param_count += 1
|
213
|
+
conditions.append(f"queue_name = ${param_count}")
|
214
|
+
params.append(queue_name)
|
215
|
+
|
216
|
+
where_clause = f"WHERE {' AND '.join(conditions)}" if conditions else ""
|
217
|
+
|
218
|
+
param_count += 1
|
219
|
+
limit_param = f"${param_count}"
|
220
|
+
params.append(limit)
|
221
|
+
|
222
|
+
param_count += 1
|
223
|
+
offset_param = f"${param_count}"
|
224
|
+
params.append(offset)
|
225
|
+
|
226
|
+
sql = f"""
|
227
|
+
SELECT * FROM scheduled_tasks
|
228
|
+
{where_clause}
|
229
|
+
ORDER BY created_at DESC
|
230
|
+
LIMIT {limit_param} OFFSET {offset_param}
|
231
|
+
"""
|
232
|
+
|
233
|
+
async with self.pool.acquire() as conn:
|
234
|
+
rows = await conn.fetch(sql, *params)
|
235
|
+
return [self._row_to_task(row) for row in rows]
|
236
|
+
|
237
|
+
async def get_ready_tasks(
|
238
|
+
self,
|
239
|
+
batch_size: int = 100,
|
240
|
+
lookahead_seconds: int = 60
|
241
|
+
) -> List[ScheduledTask]:
|
242
|
+
"""
|
243
|
+
获取即将执行的任务
|
244
|
+
|
245
|
+
Args:
|
246
|
+
batch_size: 批次大小
|
247
|
+
lookahead_seconds: 向前查看的秒数
|
248
|
+
"""
|
249
|
+
cutoff_time = datetime.now() + timedelta(seconds=lookahead_seconds)
|
250
|
+
|
251
|
+
sql = """
|
252
|
+
SELECT * FROM scheduled_tasks
|
253
|
+
WHERE enabled = true
|
254
|
+
AND next_run_time <= $1
|
255
|
+
AND next_run_time IS NOT NULL
|
256
|
+
ORDER BY next_run_time
|
257
|
+
LIMIT $2
|
258
|
+
"""
|
259
|
+
|
260
|
+
async with self.pool.acquire() as conn:
|
261
|
+
rows = await conn.fetch(sql, cutoff_time, batch_size)
|
262
|
+
return [self._row_to_task(row) for row in rows]
|
263
|
+
|
264
|
+
async def update_task_next_run(
|
265
|
+
self,
|
266
|
+
task_id: int,
|
267
|
+
next_run_time: Optional[datetime],
|
268
|
+
last_run_time: datetime
|
269
|
+
):
|
270
|
+
"""更新任务的下次执行时间"""
|
271
|
+
sql = """
|
272
|
+
UPDATE scheduled_tasks
|
273
|
+
SET next_run_time = $2, last_run_time = $3
|
274
|
+
WHERE id = $1
|
275
|
+
"""
|
276
|
+
|
277
|
+
async with self.pool.acquire() as conn:
|
278
|
+
await conn.execute(sql, task_id, next_run_time, last_run_time)
|
279
|
+
|
280
|
+
async def disable_once_task(self, task_id: int):
|
281
|
+
"""禁用一次性任务(只更新必要字段)"""
|
282
|
+
sql = """
|
283
|
+
UPDATE scheduled_tasks
|
284
|
+
SET enabled = false, next_run_time = NULL
|
285
|
+
WHERE id = $1
|
286
|
+
"""
|
287
|
+
|
288
|
+
async with self.pool.acquire() as conn:
|
289
|
+
await conn.execute(sql, task_id)
|
290
|
+
|
291
|
+
async def batch_update_next_run_times(self, updates: List[tuple]):
|
292
|
+
"""批量更新任务的下次执行时间和执行次数"""
|
293
|
+
if not updates:
|
294
|
+
return
|
295
|
+
|
296
|
+
sql = """
|
297
|
+
UPDATE scheduled_tasks
|
298
|
+
SET next_run_time = u.next_run_time,
|
299
|
+
last_run_time = u.last_run_time,
|
300
|
+
execution_count = COALESCE(execution_count, 0) + 1
|
301
|
+
FROM (VALUES ($1::int, $2::timestamptz, $3::timestamptz)) AS u(id, next_run_time, last_run_time)
|
302
|
+
WHERE scheduled_tasks.id = u.id
|
303
|
+
"""
|
304
|
+
|
305
|
+
async with self.pool.acquire() as conn:
|
306
|
+
# 使用executemany批量更新
|
307
|
+
await conn.executemany(sql, updates)
|
308
|
+
|
309
|
+
async def batch_disable_once_tasks(self, task_ids: List[int]):
|
310
|
+
"""批量禁用一次性任务"""
|
311
|
+
if not task_ids:
|
312
|
+
return
|
313
|
+
|
314
|
+
sql = """
|
315
|
+
UPDATE scheduled_tasks
|
316
|
+
SET enabled = false, next_run_time = NULL
|
317
|
+
WHERE id = ANY($1)
|
318
|
+
"""
|
319
|
+
|
320
|
+
async with self.pool.acquire() as conn:
|
321
|
+
await conn.execute(sql, task_ids)
|
322
|
+
|
323
|
+
# ==================== 执行历史操作 ====================
|
324
|
+
|
325
|
+
async def record_execution(self, history: TaskExecutionHistory):
|
326
|
+
"""记录任务执行历史到tasks表"""
|
327
|
+
# 在tasks表中创建一条新的任务记录,关联到scheduled_task
|
328
|
+
sql = """
|
329
|
+
INSERT INTO tasks (
|
330
|
+
queue_name, status, scheduled_task_id, scheduled_time,
|
331
|
+
started_at, finished_at, duration_ms, worker_id,
|
332
|
+
error_message, retry_count, result
|
333
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
334
|
+
RETURNING id
|
335
|
+
"""
|
336
|
+
|
337
|
+
# 获取scheduled_task的队列名称
|
338
|
+
task_info = await self.get_task(history.task_id)
|
339
|
+
queue_name = task_info.queue_name if task_info else 'default'
|
340
|
+
|
341
|
+
async with self.pool.acquire() as conn:
|
342
|
+
task_id = await conn.fetchval(
|
343
|
+
sql,
|
344
|
+
queue_name,
|
345
|
+
history.status.value,
|
346
|
+
history.task_id, # scheduled_task_id
|
347
|
+
history.scheduled_time,
|
348
|
+
history.started_at,
|
349
|
+
history.finished_at,
|
350
|
+
history.duration_ms,
|
351
|
+
history.worker_id,
|
352
|
+
history.error_message,
|
353
|
+
history.retry_count,
|
354
|
+
json.dumps(history.result) if history.result else None
|
355
|
+
)
|
356
|
+
return task_id
|
357
|
+
|
358
|
+
async def get_task_history(
|
359
|
+
self,
|
360
|
+
task_id: int,
|
361
|
+
limit: int = 100,
|
362
|
+
status: Optional[TaskStatus] = None
|
363
|
+
) -> List[Dict[str, Any]]:
|
364
|
+
"""从tasks表获取任务执行历史"""
|
365
|
+
if status:
|
366
|
+
sql = """
|
367
|
+
SELECT id, queue_name, status, scheduled_task_id,
|
368
|
+
scheduled_time, started_at, finished_at,
|
369
|
+
duration_ms, worker_id, error_message,
|
370
|
+
retry_count, result, created_at
|
371
|
+
FROM tasks
|
372
|
+
WHERE scheduled_task_id = $1 AND status = $2
|
373
|
+
ORDER BY created_at DESC
|
374
|
+
LIMIT $3
|
375
|
+
"""
|
376
|
+
params = [task_id, status.value, limit]
|
377
|
+
else:
|
378
|
+
sql = """
|
379
|
+
SELECT id, queue_name, status, scheduled_task_id,
|
380
|
+
scheduled_time, started_at, finished_at,
|
381
|
+
duration_ms, worker_id, error_message,
|
382
|
+
retry_count, result, created_at
|
383
|
+
FROM tasks
|
384
|
+
WHERE scheduled_task_id = $1
|
385
|
+
ORDER BY created_at DESC
|
386
|
+
LIMIT $2
|
387
|
+
"""
|
388
|
+
params = [task_id, limit]
|
389
|
+
|
390
|
+
async with self.pool.acquire() as conn:
|
391
|
+
rows = await conn.fetch(sql, *params)
|
392
|
+
return [dict(row) for row in rows]
|
393
|
+
|
394
|
+
async def cleanup_old_history(self, days: int = 30):
|
395
|
+
"""清理旧的执行历史"""
|
396
|
+
cutoff_date = datetime.now() - timedelta(days=days)
|
397
|
+
|
398
|
+
sql = "DELETE FROM tasks WHERE scheduled_task_id IS NOT NULL AND created_at < $1"
|
399
|
+
|
400
|
+
async with self.pool.acquire() as conn:
|
401
|
+
result = await conn.execute(sql, cutoff_date)
|
402
|
+
return int(result.split()[-1])
|
403
|
+
|
404
|
+
# ==================== 批量操作方法 ====================
|
405
|
+
|
406
|
+
async def batch_record_executions(self, histories: List[TaskExecutionHistory]):
|
407
|
+
"""批量记录任务执行历史到tasks表"""
|
408
|
+
if not histories:
|
409
|
+
return
|
410
|
+
|
411
|
+
# 获取所有scheduled_task的队列和名称信息
|
412
|
+
task_ids = list(set(h.task_id for h in histories))
|
413
|
+
task_info_map = {}
|
414
|
+
|
415
|
+
if task_ids:
|
416
|
+
async with self.pool.acquire() as conn:
|
417
|
+
rows = await conn.fetch(
|
418
|
+
"SELECT id, queue_name, task_name FROM scheduled_tasks WHERE id = ANY($1)",
|
419
|
+
task_ids
|
420
|
+
)
|
421
|
+
task_info_map = {row['id']: {'queue_name': row['queue_name'], 'task_name': row['task_name']} for row in rows}
|
422
|
+
|
423
|
+
sql = """
|
424
|
+
INSERT INTO tasks (
|
425
|
+
id, queue_name, task_name, status, scheduled_task_id, scheduled_time,
|
426
|
+
started_at, finished_at, duration_ms, worker_id,
|
427
|
+
error_message, retry_count, result
|
428
|
+
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
429
|
+
"""
|
430
|
+
|
431
|
+
# 准备批量数据
|
432
|
+
data = []
|
433
|
+
for history in histories:
|
434
|
+
task_info = task_info_map.get(history.task_id, {})
|
435
|
+
queue_name = task_info.get('queue_name', 'default')
|
436
|
+
task_name = task_info.get('task_name', 'scheduled_task')
|
437
|
+
|
438
|
+
data.append((
|
439
|
+
history.event_id, # id字段使用event_id
|
440
|
+
queue_name,
|
441
|
+
task_name, # task_name
|
442
|
+
history.status.value if isinstance(history.status, TaskStatus) else history.status,
|
443
|
+
history.task_id, # scheduled_task_id
|
444
|
+
history.scheduled_time,
|
445
|
+
history.started_at,
|
446
|
+
history.finished_at,
|
447
|
+
history.duration_ms,
|
448
|
+
history.worker_id,
|
449
|
+
history.error_message,
|
450
|
+
history.retry_count,
|
451
|
+
json.dumps(history.result) if history.result else None
|
452
|
+
))
|
453
|
+
|
454
|
+
async with self.pool.acquire() as conn:
|
455
|
+
# 使用executemany批量插入
|
456
|
+
await conn.executemany(sql, data)
|
457
|
+
|
458
|
+
async def batch_update_tasks(self, tasks: List[ScheduledTask]):
|
459
|
+
"""批量更新任务"""
|
460
|
+
if not tasks:
|
461
|
+
return
|
462
|
+
|
463
|
+
sql = """
|
464
|
+
UPDATE scheduled_tasks SET
|
465
|
+
scheduler_id = $2,
|
466
|
+
task_name = $3,
|
467
|
+
task_type = $4,
|
468
|
+
queue_name = $5,
|
469
|
+
task_args = $6,
|
470
|
+
task_kwargs = $7,
|
471
|
+
cron_expression = $8,
|
472
|
+
interval_seconds = $9,
|
473
|
+
next_run_time = $10,
|
474
|
+
last_run_time = $11,
|
475
|
+
enabled = $12,
|
476
|
+
max_retries = $13,
|
477
|
+
retry_delay = $14,
|
478
|
+
timeout = $15,
|
479
|
+
description = $16,
|
480
|
+
metadata = $17,
|
481
|
+
updated_at = $18
|
482
|
+
WHERE id = $1
|
483
|
+
"""
|
484
|
+
|
485
|
+
# 准备批量数据
|
486
|
+
data = []
|
487
|
+
now = datetime.now()
|
488
|
+
for task in tasks:
|
489
|
+
data.append((
|
490
|
+
task.id,
|
491
|
+
task.scheduler_id,
|
492
|
+
task.task_name,
|
493
|
+
task.task_type.value if isinstance(task.task_type, TaskType) else task.task_type,
|
494
|
+
task.queue_name,
|
495
|
+
json.dumps(task.task_args) if task.task_args else '[]',
|
496
|
+
json.dumps(task.task_kwargs) if task.task_kwargs else '{}',
|
497
|
+
task.cron_expression,
|
498
|
+
task.interval_seconds,
|
499
|
+
task.next_run_time,
|
500
|
+
task.last_run_time,
|
501
|
+
task.enabled,
|
502
|
+
task.max_retries,
|
503
|
+
task.retry_delay,
|
504
|
+
task.timeout,
|
505
|
+
task.description,
|
506
|
+
json.dumps(task.metadata) if task.metadata else None,
|
507
|
+
now
|
508
|
+
))
|
509
|
+
|
510
|
+
async with self.pool.acquire() as conn:
|
511
|
+
# 使用executemany批量更新
|
512
|
+
await conn.executemany(sql, data)
|
513
|
+
|
514
|
+
# ==================== 辅助方法 ====================
|
515
|
+
|
516
|
+
def _row_to_task(self, row) -> ScheduledTask:
|
517
|
+
"""将数据库行转换为ScheduledTask对象"""
|
518
|
+
from decimal import Decimal
|
519
|
+
|
520
|
+
# 处理interval_seconds的Decimal类型
|
521
|
+
interval_seconds = row['interval_seconds']
|
522
|
+
if interval_seconds is not None and isinstance(interval_seconds, Decimal):
|
523
|
+
interval_seconds = float(interval_seconds)
|
524
|
+
|
525
|
+
return ScheduledTask(
|
526
|
+
id=row['id'],
|
527
|
+
scheduler_id=row['scheduler_id'],
|
528
|
+
task_name=row['task_name'],
|
529
|
+
task_type=TaskType(row['task_type']),
|
530
|
+
queue_name=row['queue_name'],
|
531
|
+
namespace=row.get('namespace', 'default'), # 添加namespace字段
|
532
|
+
task_args=row['task_args'] if isinstance(row['task_args'], list) else json.loads(row['task_args']),
|
533
|
+
task_kwargs=row['task_kwargs'] if isinstance(row['task_kwargs'], dict) else json.loads(row['task_kwargs']),
|
534
|
+
cron_expression=row['cron_expression'],
|
535
|
+
interval_seconds=interval_seconds,
|
536
|
+
next_run_time=row['next_run_time'],
|
537
|
+
last_run_time=row['last_run_time'],
|
538
|
+
enabled=row['enabled'],
|
539
|
+
max_retries=row['max_retries'],
|
540
|
+
retry_delay=row['retry_delay'],
|
541
|
+
timeout=row['timeout'],
|
542
|
+
priority=row.get('priority'),
|
543
|
+
description=row['description'],
|
544
|
+
tags=row['tags'] if isinstance(row['tags'], list) else (json.loads(row['tags']) if row['tags'] else []),
|
545
|
+
metadata=row['metadata'] if isinstance(row['metadata'], dict) else (json.loads(row['metadata']) if row['metadata'] else None),
|
546
|
+
created_at=row['created_at'],
|
547
|
+
updated_at=row['updated_at']
|
548
|
+
)
|
549
|
+
|
550
|
+
def _row_to_history(self, row) -> Dict[str, Any]:
|
551
|
+
"""将数据库行转换为历史记录字典"""
|
552
|
+
return {
|
553
|
+
'id': row['id'],
|
554
|
+
'scheduled_task_id': row['scheduled_task_id'],
|
555
|
+
'status': row['status'],
|
556
|
+
'scheduled_time': row['scheduled_time'],
|
557
|
+
'started_at': row['started_at'],
|
558
|
+
'finished_at': row['finished_at'],
|
559
|
+
'duration_ms': row['duration_ms'],
|
560
|
+
'worker_id': row['worker_id'],
|
561
|
+
'error_message': row['error_message'],
|
562
|
+
'retry_count': row['retry_count'],
|
563
|
+
'result': row['result'] if isinstance(row['result'], dict) else json.loads(row['result']) if row['result'] else None,
|
564
|
+
'created_at': row['created_at']
|
565
|
+
}
|
566
|
+
|
567
|
+
async def create_or_get_task(self, task: ScheduledTask, skip_if_exists: bool = True) -> tuple[ScheduledTask, bool]:
|
568
|
+
"""
|
569
|
+
创建任务或获取已存在的任务
|
570
|
+
|
571
|
+
Args:
|
572
|
+
task: 任务对象
|
573
|
+
skip_if_exists: 如果任务已存在是否跳过(True=跳过,False=抛出异常)
|
574
|
+
|
575
|
+
Returns:
|
576
|
+
(task, created): 任务对象和是否新创建的标志
|
577
|
+
"""
|
578
|
+
if task.scheduler_id:
|
579
|
+
# 先检查是否已存在
|
580
|
+
existing = await self.get_task_by_scheduler_id(task.scheduler_id)
|
581
|
+
if existing:
|
582
|
+
if skip_if_exists:
|
583
|
+
return existing, False
|
584
|
+
else:
|
585
|
+
raise ValueError(f"Task with scheduler_id '{task.scheduler_id}' already exists")
|
586
|
+
|
587
|
+
# 创建新任务
|
588
|
+
created_task = await self.create_task(task)
|
589
|
+
return created_task, True
|
590
|
+
|
591
|
+
async def batch_create_tasks(self, tasks: List[ScheduledTask], skip_existing: bool = True) -> List[ScheduledTask]:
|
592
|
+
"""
|
593
|
+
批量创建任务(优化版本)
|
594
|
+
|
595
|
+
Args:
|
596
|
+
tasks: 任务列表
|
597
|
+
skip_existing: 是否跳过已存在的任务
|
598
|
+
|
599
|
+
Returns:
|
600
|
+
成功创建的任务列表
|
601
|
+
"""
|
602
|
+
if not tasks:
|
603
|
+
return []
|
604
|
+
|
605
|
+
async with self.pool.acquire() as conn:
|
606
|
+
# 1. 批量查询已存在的scheduler_id
|
607
|
+
scheduler_ids = [t.scheduler_id for t in tasks if t.scheduler_id]
|
608
|
+
existing_ids = set()
|
609
|
+
|
610
|
+
if scheduler_ids and skip_existing:
|
611
|
+
rows = await conn.fetch(
|
612
|
+
"SELECT scheduler_id FROM scheduled_tasks WHERE scheduler_id = ANY($1)",
|
613
|
+
scheduler_ids
|
614
|
+
)
|
615
|
+
existing_ids = {row['scheduler_id'] for row in rows}
|
616
|
+
|
617
|
+
# 2. 过滤出需要创建的任务
|
618
|
+
tasks_to_create = []
|
619
|
+
for task in tasks:
|
620
|
+
if task.scheduler_id in existing_ids:
|
621
|
+
continue # 跳过已存在的
|
622
|
+
tasks_to_create.append(task)
|
623
|
+
|
624
|
+
if not tasks_to_create:
|
625
|
+
return []
|
626
|
+
|
627
|
+
# 3. 准备批量插入的数据
|
628
|
+
values = []
|
629
|
+
for task in tasks_to_create:
|
630
|
+
values.append((
|
631
|
+
task.scheduler_id,
|
632
|
+
task.task_name,
|
633
|
+
task.task_type.value,
|
634
|
+
task.queue_name,
|
635
|
+
json.dumps(task.task_args),
|
636
|
+
json.dumps(task.task_kwargs),
|
637
|
+
task.cron_expression,
|
638
|
+
task.interval_seconds,
|
639
|
+
task.next_run_time,
|
640
|
+
task.enabled,
|
641
|
+
task.max_retries,
|
642
|
+
task.retry_delay,
|
643
|
+
task.timeout,
|
644
|
+
task.priority,
|
645
|
+
task.description,
|
646
|
+
json.dumps(task.tags),
|
647
|
+
json.dumps(task.metadata)
|
648
|
+
))
|
649
|
+
|
650
|
+
# 4. 批量插入(使用UNNEST进行批量插入)
|
651
|
+
created_rows = await conn.fetch(
|
652
|
+
"""
|
653
|
+
INSERT INTO scheduled_tasks (
|
654
|
+
scheduler_id, task_name, task_type, queue_name,
|
655
|
+
task_args, task_kwargs, cron_expression, interval_seconds,
|
656
|
+
next_run_time, enabled, max_retries, retry_delay, timeout,
|
657
|
+
priority, description, tags, metadata
|
658
|
+
)
|
659
|
+
SELECT * FROM UNNEST(
|
660
|
+
$1::text[], $2::text[], $3::text[], $4::text[],
|
661
|
+
$5::jsonb[], $6::jsonb[], $7::text[], $8::numeric[],
|
662
|
+
$9::timestamptz[], $10::boolean[], $11::int[], $12::int[], $13::int[],
|
663
|
+
$14::int[], $15::text[], $16::jsonb[], $17::jsonb[]
|
664
|
+
) AS t(
|
665
|
+
scheduler_id, task_name, task_type, queue_name,
|
666
|
+
task_args, task_kwargs, cron_expression, interval_seconds,
|
667
|
+
next_run_time, enabled, max_retries, retry_delay, timeout,
|
668
|
+
priority, description, tags, metadata
|
669
|
+
)
|
670
|
+
ON CONFLICT (scheduler_id) DO NOTHING
|
671
|
+
RETURNING *
|
672
|
+
""",
|
673
|
+
# 解包values为列数组
|
674
|
+
[v[0] for v in values], # scheduler_id
|
675
|
+
[v[1] for v in values], # task_name
|
676
|
+
[v[2] for v in values], # task_type
|
677
|
+
[v[3] for v in values], # queue_name
|
678
|
+
[v[4] for v in values], # task_args
|
679
|
+
[v[5] for v in values], # task_kwargs
|
680
|
+
[v[6] for v in values], # cron_expression
|
681
|
+
[v[7] for v in values], # interval_seconds
|
682
|
+
[v[8] for v in values], # next_run_time
|
683
|
+
[v[9] for v in values], # enabled
|
684
|
+
[v[10] for v in values], # max_retries
|
685
|
+
[v[11] for v in values], # retry_delay
|
686
|
+
[v[12] for v in values], # timeout
|
687
|
+
[v[13] for v in values], # priority
|
688
|
+
[v[14] for v in values], # description
|
689
|
+
[v[15] for v in values], # tags
|
690
|
+
[v[16] for v in values], # metadata
|
691
|
+
)
|
692
|
+
|
693
|
+
# 5. 转换结果为任务对象
|
694
|
+
created_tasks = [self._row_to_task(row) for row in created_rows]
|
695
|
+
|
696
|
+
return created_tasks
|
@@ -0,0 +1,9 @@
|
|
1
|
+
-- Migration to change interval_seconds from INTEGER to NUMERIC
|
2
|
+
-- This allows storing decimal values like 0.1 seconds
|
3
|
+
|
4
|
+
-- Alter the column type
|
5
|
+
ALTER TABLE scheduled_tasks
|
6
|
+
ALTER COLUMN interval_seconds TYPE NUMERIC(10,2);
|
7
|
+
|
8
|
+
-- Add a comment to document the change
|
9
|
+
COMMENT ON COLUMN scheduled_tasks.interval_seconds IS 'Interval in seconds for interval-type tasks (supports decimal values)';
|