jettask 0.2.18__py3-none-any.whl → 0.2.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. jettask/__init__.py +60 -2
  2. jettask/cli.py +314 -228
  3. jettask/config/__init__.py +9 -1
  4. jettask/config/config.py +245 -0
  5. jettask/config/env_loader.py +381 -0
  6. jettask/config/lua_scripts.py +158 -0
  7. jettask/config/nacos_config.py +132 -5
  8. jettask/core/__init__.py +1 -1
  9. jettask/core/app.py +1573 -666
  10. jettask/core/app_importer.py +33 -16
  11. jettask/core/container.py +532 -0
  12. jettask/core/task.py +1 -4
  13. jettask/core/unified_manager_base.py +2 -2
  14. jettask/executor/__init__.py +38 -0
  15. jettask/executor/core.py +625 -0
  16. jettask/executor/executor.py +338 -0
  17. jettask/executor/orchestrator.py +290 -0
  18. jettask/executor/process_entry.py +638 -0
  19. jettask/executor/task_executor.py +317 -0
  20. jettask/messaging/__init__.py +68 -0
  21. jettask/messaging/event_pool.py +2188 -0
  22. jettask/messaging/reader.py +519 -0
  23. jettask/messaging/registry.py +266 -0
  24. jettask/messaging/scanner.py +369 -0
  25. jettask/messaging/sender.py +312 -0
  26. jettask/persistence/__init__.py +118 -0
  27. jettask/persistence/backlog_monitor.py +567 -0
  28. jettask/{backend/data_access.py → persistence/base.py} +58 -57
  29. jettask/persistence/consumer.py +315 -0
  30. jettask/{core → persistence}/db_manager.py +23 -22
  31. jettask/persistence/maintenance.py +81 -0
  32. jettask/persistence/message_consumer.py +259 -0
  33. jettask/{backend/namespace_data_access.py → persistence/namespace.py} +66 -98
  34. jettask/persistence/offline_recovery.py +196 -0
  35. jettask/persistence/queue_discovery.py +215 -0
  36. jettask/persistence/task_persistence.py +218 -0
  37. jettask/persistence/task_updater.py +583 -0
  38. jettask/scheduler/__init__.py +2 -2
  39. jettask/scheduler/loader.py +6 -5
  40. jettask/scheduler/run_scheduler.py +1 -1
  41. jettask/scheduler/scheduler.py +7 -7
  42. jettask/scheduler/{unified_scheduler_manager.py → scheduler_coordinator.py} +18 -13
  43. jettask/task/__init__.py +16 -0
  44. jettask/{router.py → task/router.py} +26 -8
  45. jettask/task/task_center/__init__.py +9 -0
  46. jettask/task/task_executor.py +318 -0
  47. jettask/task/task_registry.py +291 -0
  48. jettask/test_connection_monitor.py +73 -0
  49. jettask/utils/__init__.py +31 -1
  50. jettask/{monitor/run_backlog_collector.py → utils/backlog_collector.py} +1 -1
  51. jettask/utils/db_connector.py +1629 -0
  52. jettask/{db_init.py → utils/db_init.py} +1 -1
  53. jettask/utils/rate_limit/__init__.py +30 -0
  54. jettask/utils/rate_limit/concurrency_limiter.py +665 -0
  55. jettask/utils/rate_limit/config.py +145 -0
  56. jettask/utils/rate_limit/limiter.py +41 -0
  57. jettask/utils/rate_limit/manager.py +269 -0
  58. jettask/utils/rate_limit/qps_limiter.py +154 -0
  59. jettask/utils/rate_limit/task_limiter.py +384 -0
  60. jettask/utils/serializer.py +3 -0
  61. jettask/{monitor/stream_backlog_monitor.py → utils/stream_backlog.py} +14 -6
  62. jettask/utils/time_sync.py +173 -0
  63. jettask/webui/__init__.py +27 -0
  64. jettask/{api/v1 → webui/api}/alerts.py +1 -1
  65. jettask/{api/v1 → webui/api}/analytics.py +2 -2
  66. jettask/{api/v1 → webui/api}/namespaces.py +1 -1
  67. jettask/{api/v1 → webui/api}/overview.py +1 -1
  68. jettask/{api/v1 → webui/api}/queues.py +3 -3
  69. jettask/{api/v1 → webui/api}/scheduled.py +1 -1
  70. jettask/{api/v1 → webui/api}/settings.py +1 -1
  71. jettask/{api.py → webui/app.py} +253 -145
  72. jettask/webui/namespace_manager/__init__.py +10 -0
  73. jettask/{multi_namespace_consumer.py → webui/namespace_manager/multi.py} +69 -22
  74. jettask/{unified_consumer_manager.py → webui/namespace_manager/unified.py} +1 -1
  75. jettask/{run.py → webui/run.py} +2 -2
  76. jettask/{services → webui/services}/__init__.py +1 -3
  77. jettask/{services → webui/services}/overview_service.py +34 -16
  78. jettask/{services → webui/services}/queue_service.py +1 -1
  79. jettask/{backend → webui/services}/queue_stats_v2.py +1 -1
  80. jettask/{services → webui/services}/settings_service.py +1 -1
  81. jettask/worker/__init__.py +53 -0
  82. jettask/worker/lifecycle.py +1507 -0
  83. jettask/worker/manager.py +583 -0
  84. jettask/{core/offline_worker_recovery.py → worker/recovery.py} +268 -175
  85. {jettask-0.2.18.dist-info → jettask-0.2.20.dist-info}/METADATA +2 -71
  86. jettask-0.2.20.dist-info/RECORD +145 -0
  87. jettask/__main__.py +0 -140
  88. jettask/api/__init__.py +0 -103
  89. jettask/backend/__init__.py +0 -1
  90. jettask/backend/api/__init__.py +0 -3
  91. jettask/backend/api/v1/__init__.py +0 -17
  92. jettask/backend/api/v1/monitoring.py +0 -431
  93. jettask/backend/api/v1/namespaces.py +0 -504
  94. jettask/backend/api/v1/queues.py +0 -342
  95. jettask/backend/api/v1/tasks.py +0 -367
  96. jettask/backend/core/__init__.py +0 -3
  97. jettask/backend/core/cache.py +0 -221
  98. jettask/backend/core/database.py +0 -200
  99. jettask/backend/core/exceptions.py +0 -102
  100. jettask/backend/dependencies.py +0 -261
  101. jettask/backend/init_meta_db.py +0 -158
  102. jettask/backend/main.py +0 -1426
  103. jettask/backend/main_unified.py +0 -78
  104. jettask/backend/main_v2.py +0 -394
  105. jettask/backend/models/__init__.py +0 -3
  106. jettask/backend/models/requests.py +0 -236
  107. jettask/backend/models/responses.py +0 -230
  108. jettask/backend/namespace_api_old.py +0 -267
  109. jettask/backend/services/__init__.py +0 -3
  110. jettask/backend/start.py +0 -42
  111. jettask/backend/unified_api_router.py +0 -1541
  112. jettask/cleanup_deprecated_tables.sql +0 -16
  113. jettask/core/consumer_manager.py +0 -1695
  114. jettask/core/delay_scanner.py +0 -256
  115. jettask/core/event_pool.py +0 -1700
  116. jettask/core/heartbeat_process.py +0 -222
  117. jettask/core/task_batch.py +0 -153
  118. jettask/core/worker_scanner.py +0 -271
  119. jettask/executors/__init__.py +0 -5
  120. jettask/executors/asyncio.py +0 -876
  121. jettask/executors/base.py +0 -30
  122. jettask/executors/common.py +0 -148
  123. jettask/executors/multi_asyncio.py +0 -309
  124. jettask/gradio_app.py +0 -570
  125. jettask/integrated_gradio_app.py +0 -1088
  126. jettask/main.py +0 -0
  127. jettask/monitoring/__init__.py +0 -3
  128. jettask/pg_consumer.py +0 -1896
  129. jettask/run_monitor.py +0 -22
  130. jettask/run_webui.py +0 -148
  131. jettask/scheduler/multi_namespace_scheduler.py +0 -294
  132. jettask/scheduler/unified_manager.py +0 -450
  133. jettask/task_center_client.py +0 -150
  134. jettask/utils/serializer_optimized.py +0 -33
  135. jettask/webui_exceptions.py +0 -67
  136. jettask-0.2.18.dist-info/RECORD +0 -150
  137. /jettask/{constants.py → config/constants.py} +0 -0
  138. /jettask/{backend/config.py → config/task_center.py} +0 -0
  139. /jettask/{pg_consumer → messaging/pg_consumer}/pg_consumer_v2.py +0 -0
  140. /jettask/{pg_consumer → messaging/pg_consumer}/sql/add_execution_time_field.sql +0 -0
  141. /jettask/{pg_consumer → messaging/pg_consumer}/sql/create_new_tables.sql +0 -0
  142. /jettask/{pg_consumer → messaging/pg_consumer}/sql/create_tables_v3.sql +0 -0
  143. /jettask/{pg_consumer → messaging/pg_consumer}/sql/migrate_to_new_structure.sql +0 -0
  144. /jettask/{pg_consumer → messaging/pg_consumer}/sql/modify_time_fields.sql +0 -0
  145. /jettask/{pg_consumer → messaging/pg_consumer}/sql_utils.py +0 -0
  146. /jettask/{models.py → persistence/models.py} +0 -0
  147. /jettask/scheduler/{manager.py → task_crud.py} +0 -0
  148. /jettask/{schema.sql → schemas/schema.sql} +0 -0
  149. /jettask/{task_center.py → task/task_center/client.py} +0 -0
  150. /jettask/{monitoring → utils}/file_watcher.py +0 -0
  151. /jettask/{services/redis_monitor_service.py → utils/redis_monitor.py} +0 -0
  152. /jettask/{api/v1 → webui/api}/__init__.py +0 -0
  153. /jettask/{webui_config.py → webui/config.py} +0 -0
  154. /jettask/{webui_models → webui/models}/__init__.py +0 -0
  155. /jettask/{webui_models → webui/models}/namespace.py +0 -0
  156. /jettask/{services → webui/services}/alert_service.py +0 -0
  157. /jettask/{services → webui/services}/analytics_service.py +0 -0
  158. /jettask/{services → webui/services}/scheduled_task_service.py +0 -0
  159. /jettask/{services → webui/services}/task_service.py +0 -0
  160. /jettask/{webui_sql → webui/sql}/batch_upsert_functions.sql +0 -0
  161. /jettask/{webui_sql → webui/sql}/verify_database.sql +0 -0
  162. {jettask-0.2.18.dist-info → jettask-0.2.20.dist-info}/WHEEL +0 -0
  163. {jettask-0.2.18.dist-info → jettask-0.2.20.dist-info}/entry_points.txt +0 -0
  164. {jettask-0.2.18.dist-info → jettask-0.2.20.dist-info}/licenses/LICENSE +0 -0
  165. {jettask-0.2.18.dist-info → jettask-0.2.20.dist-info}/top_level.txt +0 -0
@@ -1,342 +0,0 @@
1
- """
2
- Queue management API v1
3
- """
4
- from typing import List, Optional
5
- from fastapi import APIRouter, Depends, HTTPException, Query
6
- from sqlalchemy.ext.asyncio import AsyncSession
7
- import redis.asyncio as redis
8
-
9
- from dependencies import (
10
- get_validated_namespace, get_pg_connection, get_redis_client,
11
- get_namespace_connection, validate_page_params, validate_time_range,
12
- get_request_metrics, RequestMetrics
13
- )
14
- from models.requests import QueueListRequest, QueueMetricsRequest, QueueActionRequest
15
- from models.responses import QueueListResponse, QueueDetailResponse, MonitoringResponse, BaseResponse
16
- from core.cache import cache_result, CACHE_CONFIGS
17
- from core.exceptions import QueueNotFoundError, ValidationError
18
- from queue_stats_v2 import QueueStatsV2
19
- import logging
20
-
21
- logger = logging.getLogger(__name__)
22
- router = APIRouter()
23
-
24
-
25
- @router.get("", response_model=QueueListResponse)
26
- @cache_result(**CACHE_CONFIGS['queue_stats'])
27
- async def list_queues(
28
- namespace: str = Depends(get_validated_namespace),
29
- page: int = Query(1, ge=1, description="页码"),
30
- page_size: int = Query(20, ge=1, le=100, description="每页大小"),
31
- search: Optional[str] = Query(None, description="搜索关键词"),
32
- status: Optional[str] = Query(None, description="状态筛选"),
33
- include_stats: bool = Query(True, description="是否包含统计信息"),
34
- redis_client: redis.Redis = Depends(get_redis_client),
35
- pg_session: AsyncSession = Depends(get_pg_connection),
36
- connection = Depends(get_namespace_connection),
37
- metrics: RequestMetrics = Depends(get_request_metrics)
38
- ):
39
- """获取队列列表"""
40
- metrics.start(namespace, "GET /queues")
41
-
42
- try:
43
- # 创建队列统计服务
44
- stats_service = QueueStatsV2(
45
- redis_client=redis_client,
46
- pg_session=pg_session,
47
- redis_prefix=connection.redis_prefix
48
- )
49
-
50
- # 获取队列统计数据
51
- queue_stats = await stats_service.get_queue_stats_grouped()
52
-
53
- # 提取基础队列名(去除优先级后缀)
54
- base_queues = {}
55
- for stat in queue_stats:
56
- base_name = get_base_queue_name(stat['queue_name'])
57
- if base_name not in base_queues:
58
- base_queues[base_name] = {
59
- 'name': base_name,
60
- 'namespace': namespace,
61
- 'priority': None,
62
- 'pending_count': 0,
63
- 'running_count': 0,
64
- 'completed_count': 0,
65
- 'failed_count': 0,
66
- 'last_activity': None
67
- }
68
-
69
- # 聚合统计数据
70
- queue_info = base_queues[base_name]
71
- queue_info['pending_count'] += stat.get('unprocessed_tasks', 0)
72
- queue_info['running_count'] += stat.get('pending_in_runs', 0)
73
- queue_info['completed_count'] += stat.get('success_count', 0)
74
- queue_info['failed_count'] += stat.get('error_count', 0)
75
-
76
- # 更新最后活动时间
77
- if stat.get('last_activity') and (
78
- not queue_info['last_activity'] or
79
- stat['last_activity'] > queue_info['last_activity']
80
- ):
81
- queue_info['last_activity'] = stat['last_activity']
82
-
83
- # 转换为响应格式
84
- queue_list = list(base_queues.values())
85
-
86
- # 应用搜索筛选
87
- if search:
88
- queue_list = [q for q in queue_list if search.lower() in q['name'].lower()]
89
-
90
- # 应用状态筛选
91
- if status:
92
- if status == 'active':
93
- queue_list = [q for q in queue_list if q['pending_count'] > 0 or q['running_count'] > 0]
94
- elif status == 'idle':
95
- queue_list = [q for q in queue_list if q['pending_count'] == 0 and q['running_count'] == 0]
96
-
97
- # 分页
98
- total = len(queue_list)
99
- start = (page - 1) * page_size
100
- end = start + page_size
101
- paginated_queues = queue_list[start:end]
102
-
103
- return QueueListResponse.create(
104
- data=paginated_queues,
105
- total=total,
106
- page=page,
107
- page_size=page_size
108
- )
109
-
110
- except Exception as e:
111
- logger.error(f"获取队列列表失败: {e}")
112
- raise HTTPException(status_code=500, detail=str(e))
113
- finally:
114
- metrics.finish()
115
-
116
-
117
- @router.get("/{queue_name}", response_model=QueueDetailResponse)
118
- @cache_result(**CACHE_CONFIGS['queue_stats'])
119
- async def get_queue_detail(
120
- queue_name: str,
121
- namespace: str = Depends(get_validated_namespace),
122
- redis_client: redis.Redis = Depends(get_redis_client),
123
- pg_session: AsyncSession = Depends(get_pg_connection),
124
- connection = Depends(get_namespace_connection),
125
- metrics: RequestMetrics = Depends(get_request_metrics)
126
- ):
127
- """获取队列详情"""
128
- metrics.start(namespace, f"GET /queues/{queue_name}")
129
-
130
- try:
131
- # 创建队列统计服务
132
- stats_service = QueueStatsV2(
133
- redis_client=redis_client,
134
- pg_session=pg_session,
135
- redis_prefix=connection.redis_prefix
136
- )
137
-
138
- # 获取队列的详细统计
139
- queue_stats = await stats_service.get_queue_stats_grouped()
140
-
141
- # 筛选指定队列的数据(包括优先级队列)
142
- queue_data = []
143
- for stat in queue_stats:
144
- base_name = get_base_queue_name(stat['queue_name'])
145
- if base_name == queue_name:
146
- queue_data.append(stat)
147
-
148
- if not queue_data:
149
- raise QueueNotFoundError(queue_name, namespace)
150
-
151
- # 聚合队列基本信息
152
- queue_info = {
153
- 'name': queue_name,
154
- 'namespace': namespace,
155
- 'priority': None,
156
- 'pending_count': sum(s.get('unprocessed_tasks', 0) for s in queue_data),
157
- 'running_count': sum(s.get('pending_in_runs', 0) for s in queue_data),
158
- 'completed_count': sum(s.get('success_count', 0) for s in queue_data),
159
- 'failed_count': sum(s.get('error_count', 0) for s in queue_data),
160
- 'last_activity': max((s.get('last_activity') for s in queue_data if s.get('last_activity')), default=None)
161
- }
162
-
163
- # 提取消费者组信息
164
- consumer_groups = []
165
- for stat in queue_data:
166
- if stat.get('group_name'):
167
- consumer_groups.append({
168
- 'name': stat['group_name'],
169
- 'queue_name': stat['queue_name'],
170
- 'pending_count': stat.get('pending_in_runs', 0),
171
- 'processed_count': stat.get('processed_tasks', 0),
172
- 'success_count': stat.get('success_count', 0),
173
- 'error_count': stat.get('error_count', 0),
174
- 'avg_execution_time': stat.get('avg_execution_time'),
175
- 'recent_completed': stat.get('recent_completed', 0)
176
- })
177
-
178
- # 构造响应数据
179
- queue_detail = {
180
- 'queue_info': queue_info,
181
- 'consumer_groups': consumer_groups,
182
- 'metrics': {
183
- 'total_tasks': queue_info['pending_count'] + queue_info['running_count'] + queue_info['completed_count'] + queue_info['failed_count'],
184
- 'success_rate': queue_info['completed_count'] / max(queue_info['completed_count'] + queue_info['failed_count'], 1),
185
- 'active_consumer_groups': len([cg for cg in consumer_groups if cg['pending_count'] > 0])
186
- },
187
- 'trends': {} # 可以后续添加趋势数据
188
- }
189
-
190
- return QueueDetailResponse(data=queue_detail)
191
-
192
- except QueueNotFoundError:
193
- raise
194
- except Exception as e:
195
- logger.error(f"获取队列详情失败: {e}")
196
- raise HTTPException(status_code=500, detail=str(e))
197
- finally:
198
- metrics.finish()
199
-
200
-
201
- @router.get("/{queue_name}/metrics", response_model=MonitoringResponse)
202
- @cache_result(**CACHE_CONFIGS['monitoring_data'])
203
- async def get_queue_metrics(
204
- queue_name: str,
205
- namespace: str = Depends(get_validated_namespace),
206
- time_params: dict = Depends(validate_time_range),
207
- metrics_types: str = Query("pending,processing,completed", description="指标类型,逗号分隔"),
208
- granularity: Optional[str] = Query(None, description="数据粒度"),
209
- include_consumer_groups: bool = Query(False, description="是否包含消费者组数据"),
210
- connection = Depends(get_namespace_connection),
211
- request_metrics: RequestMetrics = Depends(get_request_metrics)
212
- ):
213
- """获取队列监控指标"""
214
- request_metrics.start(namespace, f"GET /queues/{queue_name}/metrics")
215
-
216
- try:
217
- # 这里使用现有的积压监控API功能
218
- from ...queue_backlog_api import get_backlog_trend, BacklogTrendRequest
219
-
220
- # 构造请求参数
221
- backlog_request = BacklogTrendRequest(
222
- namespace=namespace,
223
- queues=[queue_name],
224
- time_range=time_params.get('time_range'),
225
- start_time=time_params.get('start_time'),
226
- end_time=time_params.get('end_time'),
227
- granularity=granularity,
228
- include_groups=include_consumer_groups
229
- )
230
-
231
- # 调用积压趋势API
232
- backlog_response = await get_backlog_trend(backlog_request)
233
-
234
- # 转换为监控响应格式
235
- monitoring_data = {
236
- 'series': [],
237
- 'granularity': backlog_response.granularity,
238
- 'time_range': backlog_response.time_range
239
- }
240
-
241
- # 按系列分组数据
242
- series_data = {}
243
- for item in backlog_response.data:
244
- series_name = item.get('group') or queue_name
245
- if series_name not in series_data:
246
- series_data[series_name] = []
247
-
248
- series_data[series_name].append({
249
- 'timestamp': item['time'],
250
- 'value': item['backlog'],
251
- 'metadata': {
252
- 'queue': item['queue'],
253
- 'consumer_group': item.get('group'),
254
- 'published': item.get('published'),
255
- 'delivered': item.get('delivered')
256
- }
257
- })
258
-
259
- # 转换为时间序列格式
260
- for series_name, data_points in series_data.items():
261
- monitoring_data['series'].append({
262
- 'name': series_name,
263
- 'data_points': data_points,
264
- 'unit': 'tasks'
265
- })
266
-
267
- return MonitoringResponse(data=monitoring_data)
268
-
269
- except Exception as e:
270
- logger.error(f"获取队列指标失败: {e}")
271
- raise HTTPException(status_code=500, detail=str(e))
272
- finally:
273
- request_metrics.finish()
274
-
275
-
276
- @router.post("/{queue_name}/actions", response_model=BaseResponse)
277
- async def execute_queue_action(
278
- queue_name: str,
279
- action_request: QueueActionRequest,
280
- namespace: str = Depends(get_validated_namespace),
281
- redis_client: redis.Redis = Depends(get_redis_client),
282
- metrics: RequestMetrics = Depends(get_request_metrics)
283
- ):
284
- """执行队列操作"""
285
- metrics.start(namespace, f"POST /queues/{queue_name}/actions")
286
-
287
- try:
288
- action = action_request.action.lower()
289
- parameters = action_request.parameters
290
-
291
- if action == "trim":
292
- # 裁剪队列
293
- max_length = parameters.get('max_length')
294
- if not max_length or max_length < 0:
295
- raise ValidationError("max_length parameter is required and must be >= 0")
296
-
297
- # 这里需要实现Redis Stream的XTRIM操作
298
- # 暂时返回模拟响应
299
- return BaseResponse(
300
- message=f"Queue {queue_name} trimmed to {max_length} messages"
301
- )
302
-
303
- elif action == "clear":
304
- # 清空队列
305
- # 这里需要实现清空队列的逻辑
306
- return BaseResponse(
307
- message=f"Queue {queue_name} cleared"
308
- )
309
-
310
- elif action == "pause":
311
- # 暂停队列
312
- # 这里需要实现暂停消费的逻辑
313
- return BaseResponse(
314
- message=f"Queue {queue_name} paused"
315
- )
316
-
317
- elif action == "resume":
318
- # 恢复队列
319
- # 这里需要实现恢复消费的逻辑
320
- return BaseResponse(
321
- message=f"Queue {queue_name} resumed"
322
- )
323
-
324
- else:
325
- raise ValidationError(f"Unsupported action: {action}")
326
-
327
- except ValidationError:
328
- raise
329
- except Exception as e:
330
- logger.error(f"执行队列操作失败: {e}")
331
- raise HTTPException(status_code=500, detail=str(e))
332
- finally:
333
- metrics.finish()
334
-
335
-
336
- def get_base_queue_name(queue_name: str) -> str:
337
- """提取基础队列名(去除优先级后缀)"""
338
- if ':' in queue_name:
339
- parts = queue_name.rsplit(':', 1)
340
- if parts[-1].isdigit():
341
- return parts[0]
342
- return queue_name