django-cfg 1.5.1__py3-none-any.whl → 1.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of django-cfg might be problematic. Click here for more details.
- django_cfg/__init__.py +1 -1
- django_cfg/apps/dashboard/TRANSACTION_FIX.md +73 -0
- django_cfg/apps/dashboard/serializers/__init__.py +0 -12
- django_cfg/apps/dashboard/serializers/activity.py +1 -1
- django_cfg/apps/dashboard/services/__init__.py +0 -2
- django_cfg/apps/dashboard/services/charts_service.py +4 -3
- django_cfg/apps/dashboard/services/statistics_service.py +11 -2
- django_cfg/apps/dashboard/services/system_health_service.py +64 -106
- django_cfg/apps/dashboard/urls.py +0 -2
- django_cfg/apps/dashboard/views/__init__.py +0 -2
- django_cfg/apps/dashboard/views/commands_views.py +3 -6
- django_cfg/apps/dashboard/views/overview_views.py +14 -13
- django_cfg/apps/knowbase/apps.py +2 -2
- django_cfg/apps/maintenance/admin/api_key_admin.py +2 -3
- django_cfg/apps/newsletter/admin/newsletter_admin.py +12 -11
- django_cfg/apps/rq/__init__.py +9 -0
- django_cfg/apps/rq/apps.py +80 -0
- django_cfg/apps/rq/management/__init__.py +1 -0
- django_cfg/apps/rq/management/commands/__init__.py +1 -0
- django_cfg/apps/rq/management/commands/rqscheduler.py +31 -0
- django_cfg/apps/rq/management/commands/rqstats.py +33 -0
- django_cfg/apps/rq/management/commands/rqworker.py +31 -0
- django_cfg/apps/rq/management/commands/rqworker_pool.py +27 -0
- django_cfg/apps/rq/serializers/__init__.py +40 -0
- django_cfg/apps/rq/serializers/health.py +60 -0
- django_cfg/apps/rq/serializers/job.py +100 -0
- django_cfg/apps/rq/serializers/queue.py +80 -0
- django_cfg/apps/rq/serializers/schedule.py +178 -0
- django_cfg/apps/rq/serializers/testing.py +139 -0
- django_cfg/apps/rq/serializers/worker.py +58 -0
- django_cfg/apps/rq/services/__init__.py +25 -0
- django_cfg/apps/rq/services/config_helper.py +233 -0
- django_cfg/apps/rq/services/models/README.md +417 -0
- django_cfg/apps/rq/services/models/__init__.py +30 -0
- django_cfg/apps/rq/services/models/event.py +123 -0
- django_cfg/apps/rq/services/models/job.py +99 -0
- django_cfg/apps/rq/services/models/queue.py +92 -0
- django_cfg/apps/rq/services/models/worker.py +104 -0
- django_cfg/apps/rq/services/rq_converters.py +183 -0
- django_cfg/apps/rq/tasks/__init__.py +23 -0
- django_cfg/apps/rq/tasks/demo_tasks.py +284 -0
- django_cfg/apps/rq/urls.py +54 -0
- django_cfg/apps/rq/views/__init__.py +19 -0
- django_cfg/apps/rq/views/jobs.py +882 -0
- django_cfg/apps/rq/views/monitoring.py +248 -0
- django_cfg/apps/rq/views/queues.py +261 -0
- django_cfg/apps/rq/views/schedule.py +400 -0
- django_cfg/apps/rq/views/testing.py +761 -0
- django_cfg/apps/rq/views/workers.py +195 -0
- django_cfg/apps/urls.py +6 -7
- django_cfg/core/base/config_model.py +10 -26
- django_cfg/core/builders/apps_builder.py +4 -11
- django_cfg/core/generation/integration_generators/__init__.py +3 -6
- django_cfg/core/generation/integration_generators/django_rq.py +80 -0
- django_cfg/core/generation/orchestrator.py +9 -19
- django_cfg/core/integration/display/startup.py +6 -20
- django_cfg/mixins/__init__.py +2 -0
- django_cfg/mixins/superadmin_api.py +59 -0
- django_cfg/models/__init__.py +3 -3
- django_cfg/models/django/__init__.py +3 -3
- django_cfg/models/django/django_rq.py +621 -0
- django_cfg/models/django/revolution_legacy.py +1 -1
- django_cfg/modules/base.py +4 -6
- django_cfg/modules/django_admin/config/background_task_config.py +4 -4
- django_cfg/modules/django_admin/utils/html/composition.py +9 -2
- django_cfg/modules/django_unfold/navigation.py +1 -26
- django_cfg/pyproject.toml +4 -4
- django_cfg/registry/core.py +4 -7
- django_cfg/static/frontend/admin.zip +0 -0
- django_cfg/templates/admin/constance/includes/results_list.html +73 -0
- django_cfg/templates/admin/index.html +187 -62
- django_cfg/templatetags/django_cfg.py +61 -1
- {django_cfg-1.5.1.dist-info → django_cfg-1.5.2.dist-info}/METADATA +5 -6
- {django_cfg-1.5.1.dist-info → django_cfg-1.5.2.dist-info}/RECORD +77 -82
- django_cfg/apps/dashboard/permissions.py +0 -48
- django_cfg/apps/dashboard/serializers/django_q2.py +0 -50
- django_cfg/apps/dashboard/services/django_q2_service.py +0 -159
- django_cfg/apps/dashboard/views/django_q2_views.py +0 -79
- django_cfg/apps/tasks/__init__.py +0 -64
- django_cfg/apps/tasks/admin/__init__.py +0 -4
- django_cfg/apps/tasks/admin/config.py +0 -98
- django_cfg/apps/tasks/admin/task_log.py +0 -238
- django_cfg/apps/tasks/apps.py +0 -15
- django_cfg/apps/tasks/filters/__init__.py +0 -10
- django_cfg/apps/tasks/filters/task_log.py +0 -121
- django_cfg/apps/tasks/migrations/0001_initial.py +0 -196
- django_cfg/apps/tasks/migrations/0002_delete_tasklog.py +0 -16
- django_cfg/apps/tasks/migrations/__init__.py +0 -0
- django_cfg/apps/tasks/models/__init__.py +0 -4
- django_cfg/apps/tasks/models/task_log.py +0 -246
- django_cfg/apps/tasks/serializers/__init__.py +0 -28
- django_cfg/apps/tasks/serializers/task_log.py +0 -249
- django_cfg/apps/tasks/services/__init__.py +0 -10
- django_cfg/apps/tasks/services/client/__init__.py +0 -7
- django_cfg/apps/tasks/services/client/client.py +0 -234
- django_cfg/apps/tasks/services/config_helper.py +0 -63
- django_cfg/apps/tasks/services/sync.py +0 -204
- django_cfg/apps/tasks/urls.py +0 -16
- django_cfg/apps/tasks/views/__init__.py +0 -10
- django_cfg/apps/tasks/views/task_log.py +0 -41
- django_cfg/apps/tasks/views/task_log_base.py +0 -41
- django_cfg/apps/tasks/views/task_log_overview.py +0 -100
- django_cfg/apps/tasks/views/task_log_related.py +0 -41
- django_cfg/apps/tasks/views/task_log_stats.py +0 -91
- django_cfg/apps/tasks/views/task_log_timeline.py +0 -81
- django_cfg/core/generation/integration_generators/django_q2.py +0 -133
- django_cfg/core/generation/integration_generators/tasks.py +0 -88
- django_cfg/models/django/django_q2.py +0 -514
- django_cfg/models/tasks/__init__.py +0 -49
- django_cfg/models/tasks/backends.py +0 -122
- django_cfg/models/tasks/config.py +0 -209
- django_cfg/models/tasks/utils.py +0 -162
- django_cfg/modules/django_q2/README.md +0 -140
- django_cfg/modules/django_q2/__init__.py +0 -8
- django_cfg/modules/django_q2/apps.py +0 -107
- django_cfg/modules/django_q2/management/__init__.py +0 -0
- django_cfg/modules/django_q2/management/commands/__init__.py +0 -0
- django_cfg/modules/django_q2/management/commands/sync_django_q_schedules.py +0 -74
- {django_cfg-1.5.1.dist-info → django_cfg-1.5.2.dist-info}/WHEEL +0 -0
- {django_cfg-1.5.1.dist-info → django_cfg-1.5.2.dist-info}/entry_points.txt +0 -0
- {django_cfg-1.5.1.dist-info → django_cfg-1.5.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,882 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Django-RQ Job Management ViewSet.
|
|
3
|
+
|
|
4
|
+
Provides REST API endpoints for managing RQ jobs.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
|
|
9
|
+
from django_cfg.mixins import AdminAPIMixin
|
|
10
|
+
from django_cfg.modules.django_logging import get_logger
|
|
11
|
+
from drf_spectacular.utils import extend_schema, OpenApiParameter
|
|
12
|
+
from rest_framework import status, viewsets
|
|
13
|
+
from rest_framework.decorators import action
|
|
14
|
+
from rest_framework.response import Response
|
|
15
|
+
|
|
16
|
+
from ..serializers import JobListSerializer, JobDetailSerializer, JobActionResponseSerializer
|
|
17
|
+
from ..services import job_to_model
|
|
18
|
+
|
|
19
|
+
logger = get_logger("rq.jobs")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class JobViewSet(AdminAPIMixin, viewsets.ViewSet):
|
|
23
|
+
"""
|
|
24
|
+
ViewSet for RQ job management.
|
|
25
|
+
|
|
26
|
+
Provides endpoints for:
|
|
27
|
+
- Listing all jobs
|
|
28
|
+
- Getting job details
|
|
29
|
+
- Canceling jobs
|
|
30
|
+
- Requeuing failed jobs
|
|
31
|
+
- Deleting jobs
|
|
32
|
+
|
|
33
|
+
Requires admin authentication (JWT, Session, or Basic Auth).
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
@extend_schema(
|
|
37
|
+
tags=["RQ Jobs"],
|
|
38
|
+
summary="List all jobs",
|
|
39
|
+
description="Returns all jobs across all registries (queued, started, finished, failed, deferred, scheduled).",
|
|
40
|
+
parameters=[
|
|
41
|
+
OpenApiParameter(
|
|
42
|
+
name="queue",
|
|
43
|
+
type=str,
|
|
44
|
+
location=OpenApiParameter.QUERY,
|
|
45
|
+
description="Filter by queue name",
|
|
46
|
+
required=False,
|
|
47
|
+
),
|
|
48
|
+
OpenApiParameter(
|
|
49
|
+
name="status",
|
|
50
|
+
type=str,
|
|
51
|
+
location=OpenApiParameter.QUERY,
|
|
52
|
+
description="Filter by status (queued, started, finished, failed, deferred, scheduled)",
|
|
53
|
+
required=False,
|
|
54
|
+
),
|
|
55
|
+
],
|
|
56
|
+
responses={
|
|
57
|
+
200: JobListSerializer(many=True),
|
|
58
|
+
},
|
|
59
|
+
)
|
|
60
|
+
def list(self, request):
|
|
61
|
+
"""List all jobs across all registries."""
|
|
62
|
+
try:
|
|
63
|
+
import django_rq
|
|
64
|
+
from django.conf import settings
|
|
65
|
+
from rq.job import Job
|
|
66
|
+
from rq.registry import (
|
|
67
|
+
FinishedJobRegistry,
|
|
68
|
+
FailedJobRegistry,
|
|
69
|
+
StartedJobRegistry,
|
|
70
|
+
DeferredJobRegistry,
|
|
71
|
+
ScheduledJobRegistry,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
queue_filter = request.query_params.get('queue')
|
|
75
|
+
status_filter = request.query_params.get('status')
|
|
76
|
+
|
|
77
|
+
all_jobs = []
|
|
78
|
+
|
|
79
|
+
if hasattr(settings, 'RQ_QUEUES'):
|
|
80
|
+
for queue_name in settings.RQ_QUEUES.keys():
|
|
81
|
+
# Apply queue filter
|
|
82
|
+
if queue_filter and queue_filter != queue_name:
|
|
83
|
+
continue
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
queue = django_rq.get_queue(queue_name)
|
|
87
|
+
|
|
88
|
+
# Get jobs from all registries
|
|
89
|
+
registries = {
|
|
90
|
+
'queued': {'jobs': queue.job_ids, 'status': 'queued'},
|
|
91
|
+
'started': {'registry': StartedJobRegistry(queue_name, connection=queue.connection), 'status': 'started'},
|
|
92
|
+
'finished': {'registry': FinishedJobRegistry(queue_name, connection=queue.connection), 'status': 'finished'},
|
|
93
|
+
'failed': {'registry': FailedJobRegistry(queue_name, connection=queue.connection), 'status': 'failed'},
|
|
94
|
+
'deferred': {'registry': DeferredJobRegistry(queue_name, connection=queue.connection), 'status': 'deferred'},
|
|
95
|
+
'scheduled': {'registry': ScheduledJobRegistry(queue_name, connection=queue.connection), 'status': 'scheduled'},
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
for reg_name, reg_data in registries.items():
|
|
99
|
+
# Apply status filter
|
|
100
|
+
if status_filter and status_filter != reg_data['status']:
|
|
101
|
+
continue
|
|
102
|
+
|
|
103
|
+
# Get job IDs
|
|
104
|
+
if 'registry' in reg_data:
|
|
105
|
+
job_ids = reg_data['registry'].get_job_ids()
|
|
106
|
+
else:
|
|
107
|
+
job_ids = reg_data['jobs']
|
|
108
|
+
|
|
109
|
+
# Fetch jobs (limit to 100 per registry to avoid overload)
|
|
110
|
+
for job_id in job_ids[:100]:
|
|
111
|
+
try:
|
|
112
|
+
job = Job.fetch(job_id, connection=queue.connection)
|
|
113
|
+
job_model = job_to_model(job, queue_name)
|
|
114
|
+
|
|
115
|
+
# Convert to dict for DRF serializer (minimal fields for list)
|
|
116
|
+
job_dict = {
|
|
117
|
+
"id": job_model.id,
|
|
118
|
+
"func_name": job_model.func_name,
|
|
119
|
+
"status": job_model.status,
|
|
120
|
+
"queue": queue_name,
|
|
121
|
+
"created_at": job_model.created_at,
|
|
122
|
+
"started_at": job_model.started_at,
|
|
123
|
+
"ended_at": job_model.ended_at,
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
serializer = JobListSerializer(data=job_dict)
|
|
127
|
+
serializer.is_valid(raise_exception=True)
|
|
128
|
+
all_jobs.append(serializer.data)
|
|
129
|
+
|
|
130
|
+
except Exception as e:
|
|
131
|
+
logger.debug(f"Failed to fetch job {job_id}: {e}")
|
|
132
|
+
continue
|
|
133
|
+
|
|
134
|
+
except Exception as e:
|
|
135
|
+
logger.debug(f"Failed to get jobs from queue {queue_name}: {e}")
|
|
136
|
+
continue
|
|
137
|
+
|
|
138
|
+
return Response(all_jobs)
|
|
139
|
+
|
|
140
|
+
except Exception as e:
|
|
141
|
+
import traceback
|
|
142
|
+
logger.error(f"Jobs list error: {e}", exc_info=True)
|
|
143
|
+
return Response(
|
|
144
|
+
{
|
|
145
|
+
"error": str(e),
|
|
146
|
+
"traceback": traceback.format_exc(),
|
|
147
|
+
},
|
|
148
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
@extend_schema(
|
|
152
|
+
tags=["RQ Jobs"],
|
|
153
|
+
summary="Get job details",
|
|
154
|
+
description="Returns detailed information about a specific job.",
|
|
155
|
+
responses={
|
|
156
|
+
200: JobDetailSerializer,
|
|
157
|
+
404: {"description": "Job not found"},
|
|
158
|
+
},
|
|
159
|
+
)
|
|
160
|
+
def retrieve(self, request, pk=None):
|
|
161
|
+
"""Get job details by ID."""
|
|
162
|
+
try:
|
|
163
|
+
from rq.job import Job
|
|
164
|
+
from django.conf import settings
|
|
165
|
+
import django_rq
|
|
166
|
+
|
|
167
|
+
# Try to find job in all queues
|
|
168
|
+
job = None
|
|
169
|
+
job_queue = None
|
|
170
|
+
|
|
171
|
+
if hasattr(settings, 'RQ_QUEUES'):
|
|
172
|
+
for queue_name in settings.RQ_QUEUES.keys():
|
|
173
|
+
try:
|
|
174
|
+
queue = django_rq.get_queue(queue_name)
|
|
175
|
+
job = Job.fetch(pk, connection=queue.connection)
|
|
176
|
+
job_queue = queue_name
|
|
177
|
+
break
|
|
178
|
+
except Exception:
|
|
179
|
+
continue
|
|
180
|
+
|
|
181
|
+
if not job:
|
|
182
|
+
return Response(
|
|
183
|
+
{"error": f"Job {pk} not found"},
|
|
184
|
+
status=status.HTTP_404_NOT_FOUND,
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
# Convert RQ Job to Pydantic model
|
|
188
|
+
job_model = job_to_model(job, job_queue)
|
|
189
|
+
|
|
190
|
+
# Convert Pydantic model to dict for DRF serializer
|
|
191
|
+
# DRF expects args/kwargs/meta as dicts/lists, not JSON strings
|
|
192
|
+
job_dict = {
|
|
193
|
+
"id": job_model.id,
|
|
194
|
+
"func_name": job_model.func_name,
|
|
195
|
+
"args": json.loads(job_model.args_json),
|
|
196
|
+
"kwargs": json.loads(job_model.kwargs_json),
|
|
197
|
+
"created_at": job_model.created_at,
|
|
198
|
+
"enqueued_at": job_model.enqueued_at,
|
|
199
|
+
"started_at": job_model.started_at,
|
|
200
|
+
"ended_at": job_model.ended_at,
|
|
201
|
+
"status": job_model.status,
|
|
202
|
+
"queue": job_model.queue,
|
|
203
|
+
"worker_name": job_model.worker_name,
|
|
204
|
+
"timeout": job_model.timeout,
|
|
205
|
+
"result_ttl": job_model.result_ttl,
|
|
206
|
+
"failure_ttl": job_model.failure_ttl,
|
|
207
|
+
"result": json.loads(job_model.result_json) if job_model.result_json else None,
|
|
208
|
+
"exc_info": job_model.exc_info,
|
|
209
|
+
"meta": json.loads(job_model.meta_json) if job_model.meta_json else {},
|
|
210
|
+
"dependency_ids": job_model.dependency_ids.split(",") if job_model.dependency_ids else [],
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
serializer = JobDetailSerializer(data=job_dict)
|
|
214
|
+
serializer.is_valid(raise_exception=True)
|
|
215
|
+
return Response(serializer.data)
|
|
216
|
+
|
|
217
|
+
except Exception as e:
|
|
218
|
+
logger.error(f"Job detail error: {e}", exc_info=True)
|
|
219
|
+
return Response(
|
|
220
|
+
{"error": "Internal server error"},
|
|
221
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
@extend_schema(
|
|
225
|
+
tags=["RQ Jobs"],
|
|
226
|
+
summary="Cancel job",
|
|
227
|
+
description="Cancels a job (if it's queued or started).",
|
|
228
|
+
responses={
|
|
229
|
+
200: JobActionResponseSerializer,
|
|
230
|
+
404: {"description": "Job not found"},
|
|
231
|
+
},
|
|
232
|
+
)
|
|
233
|
+
@action(detail=True, methods=["post"], url_path="cancel")
|
|
234
|
+
def cancel(self, request, pk=None):
|
|
235
|
+
"""Cancel job."""
|
|
236
|
+
try:
|
|
237
|
+
from rq.job import Job
|
|
238
|
+
from django.conf import settings
|
|
239
|
+
import django_rq
|
|
240
|
+
|
|
241
|
+
# Try to find job in all queues
|
|
242
|
+
job = None
|
|
243
|
+
|
|
244
|
+
if hasattr(settings, 'RQ_QUEUES'):
|
|
245
|
+
for queue_name in settings.RQ_QUEUES.keys():
|
|
246
|
+
try:
|
|
247
|
+
queue = django_rq.get_queue(queue_name)
|
|
248
|
+
job = Job.fetch(pk, connection=queue.connection)
|
|
249
|
+
break
|
|
250
|
+
except Exception:
|
|
251
|
+
continue
|
|
252
|
+
|
|
253
|
+
if not job:
|
|
254
|
+
return Response(
|
|
255
|
+
{"error": f"Job {pk} not found"},
|
|
256
|
+
status=status.HTTP_404_NOT_FOUND,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
# Cancel job
|
|
260
|
+
job.cancel()
|
|
261
|
+
|
|
262
|
+
response_data = {
|
|
263
|
+
"success": True,
|
|
264
|
+
"message": f"Job {pk} canceled successfully",
|
|
265
|
+
"job_id": pk,
|
|
266
|
+
"action": "cancel",
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
serializer = JobActionResponseSerializer(data=response_data)
|
|
270
|
+
serializer.is_valid(raise_exception=True)
|
|
271
|
+
return Response(serializer.data)
|
|
272
|
+
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error(f"Job cancel error: {e}", exc_info=True)
|
|
275
|
+
return Response(
|
|
276
|
+
{"error": f"Failed to cancel job: {str(e)}"},
|
|
277
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
@extend_schema(
|
|
281
|
+
tags=["RQ Jobs"],
|
|
282
|
+
summary="Requeue job",
|
|
283
|
+
description="Requeues a failed job.",
|
|
284
|
+
responses={
|
|
285
|
+
200: JobActionResponseSerializer,
|
|
286
|
+
404: {"description": "Job not found"},
|
|
287
|
+
},
|
|
288
|
+
)
|
|
289
|
+
@action(detail=True, methods=["post"], url_path="requeue")
|
|
290
|
+
def requeue(self, request, pk=None):
|
|
291
|
+
"""Requeue failed job."""
|
|
292
|
+
try:
|
|
293
|
+
from rq.job import Job
|
|
294
|
+
from django.conf import settings
|
|
295
|
+
import django_rq
|
|
296
|
+
|
|
297
|
+
# Try to find job in all queues
|
|
298
|
+
job = None
|
|
299
|
+
queue = None
|
|
300
|
+
|
|
301
|
+
if hasattr(settings, 'RQ_QUEUES'):
|
|
302
|
+
for queue_name in settings.RQ_QUEUES.keys():
|
|
303
|
+
try:
|
|
304
|
+
queue = django_rq.get_queue(queue_name)
|
|
305
|
+
job = Job.fetch(pk, connection=queue.connection)
|
|
306
|
+
break
|
|
307
|
+
except Exception:
|
|
308
|
+
continue
|
|
309
|
+
|
|
310
|
+
if not job or not queue:
|
|
311
|
+
return Response(
|
|
312
|
+
{"error": f"Job {pk} not found"},
|
|
313
|
+
status=status.HTTP_404_NOT_FOUND,
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
# Requeue job
|
|
317
|
+
queue.failed_job_registry.requeue(pk)
|
|
318
|
+
|
|
319
|
+
response_data = {
|
|
320
|
+
"success": True,
|
|
321
|
+
"message": f"Job {pk} requeued successfully",
|
|
322
|
+
"job_id": pk,
|
|
323
|
+
"action": "requeue",
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
serializer = JobActionResponseSerializer(data=response_data)
|
|
327
|
+
serializer.is_valid(raise_exception=True)
|
|
328
|
+
return Response(serializer.data)
|
|
329
|
+
|
|
330
|
+
except Exception as e:
|
|
331
|
+
logger.error(f"Job requeue error: {e}", exc_info=True)
|
|
332
|
+
return Response(
|
|
333
|
+
{"error": f"Failed to requeue job: {str(e)}"},
|
|
334
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
@extend_schema(
|
|
338
|
+
tags=["RQ Jobs"],
|
|
339
|
+
summary="Delete job",
|
|
340
|
+
description="Deletes a job from the queue.",
|
|
341
|
+
responses={
|
|
342
|
+
200: JobActionResponseSerializer,
|
|
343
|
+
404: {"description": "Job not found"},
|
|
344
|
+
},
|
|
345
|
+
)
|
|
346
|
+
def destroy(self, request, pk=None):
|
|
347
|
+
"""Delete job."""
|
|
348
|
+
try:
|
|
349
|
+
from rq.job import Job
|
|
350
|
+
from django.conf import settings
|
|
351
|
+
import django_rq
|
|
352
|
+
|
|
353
|
+
# Try to find job in all queues
|
|
354
|
+
job = None
|
|
355
|
+
|
|
356
|
+
if hasattr(settings, 'RQ_QUEUES'):
|
|
357
|
+
for queue_name in settings.RQ_QUEUES.keys():
|
|
358
|
+
try:
|
|
359
|
+
queue = django_rq.get_queue(queue_name)
|
|
360
|
+
job = Job.fetch(pk, connection=queue.connection)
|
|
361
|
+
break
|
|
362
|
+
except Exception:
|
|
363
|
+
continue
|
|
364
|
+
|
|
365
|
+
if not job:
|
|
366
|
+
return Response(
|
|
367
|
+
{"error": f"Job {pk} not found"},
|
|
368
|
+
status=status.HTTP_404_NOT_FOUND,
|
|
369
|
+
)
|
|
370
|
+
|
|
371
|
+
# Delete job
|
|
372
|
+
job.delete()
|
|
373
|
+
|
|
374
|
+
response_data = {
|
|
375
|
+
"success": True,
|
|
376
|
+
"message": f"Job {pk} deleted successfully",
|
|
377
|
+
"job_id": pk,
|
|
378
|
+
"action": "delete",
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
serializer = JobActionResponseSerializer(data=response_data)
|
|
382
|
+
serializer.is_valid(raise_exception=True)
|
|
383
|
+
return Response(serializer.data)
|
|
384
|
+
|
|
385
|
+
except Exception as e:
|
|
386
|
+
logger.error(f"Job delete error: {e}", exc_info=True)
|
|
387
|
+
return Response(
|
|
388
|
+
{"error": "Internal server error"},
|
|
389
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
# Registry Management Endpoints
|
|
393
|
+
|
|
394
|
+
@extend_schema(
|
|
395
|
+
tags=["RQ Registries"],
|
|
396
|
+
summary="List failed jobs",
|
|
397
|
+
description="Returns list of all failed jobs from failed job registry.",
|
|
398
|
+
parameters=[
|
|
399
|
+
OpenApiParameter(
|
|
400
|
+
name="queue",
|
|
401
|
+
type=str,
|
|
402
|
+
location=OpenApiParameter.QUERY,
|
|
403
|
+
required=False,
|
|
404
|
+
description="Filter by queue name",
|
|
405
|
+
),
|
|
406
|
+
],
|
|
407
|
+
responses={
|
|
408
|
+
200: JobListSerializer(many=True),
|
|
409
|
+
},
|
|
410
|
+
)
|
|
411
|
+
@action(detail=False, methods=["get"], url_path="registries/failed")
|
|
412
|
+
def failed_jobs(self, request):
|
|
413
|
+
"""List all failed jobs."""
|
|
414
|
+
try:
|
|
415
|
+
import django_rq
|
|
416
|
+
from django.conf import settings
|
|
417
|
+
from rq.job import Job
|
|
418
|
+
|
|
419
|
+
queue_filter = request.query_params.get('queue')
|
|
420
|
+
queue_names = settings.RQ_QUEUES.keys() if hasattr(settings, 'RQ_QUEUES') else []
|
|
421
|
+
|
|
422
|
+
if queue_filter:
|
|
423
|
+
queue_names = [q for q in queue_names if q == queue_filter]
|
|
424
|
+
|
|
425
|
+
all_jobs = []
|
|
426
|
+
|
|
427
|
+
for queue_name in queue_names:
|
|
428
|
+
try:
|
|
429
|
+
queue = django_rq.get_queue(queue_name)
|
|
430
|
+
failed_registry = queue.failed_job_registry
|
|
431
|
+
|
|
432
|
+
# Get failed job IDs
|
|
433
|
+
job_ids = failed_registry.get_job_ids()
|
|
434
|
+
|
|
435
|
+
for job_id in job_ids:
|
|
436
|
+
try:
|
|
437
|
+
job = Job.fetch(job_id, connection=queue.connection)
|
|
438
|
+
|
|
439
|
+
# Convert RQ Job to Pydantic model
|
|
440
|
+
job_model = job_to_model(job, queue_name)
|
|
441
|
+
|
|
442
|
+
# Convert to dict for DRF serializer (JobListSerializer needs minimal fields)
|
|
443
|
+
job_data = {
|
|
444
|
+
"id": job_model.id,
|
|
445
|
+
"func_name": job_model.func_name,
|
|
446
|
+
"created_at": job_model.created_at,
|
|
447
|
+
"status": job_model.status,
|
|
448
|
+
"queue": job_model.queue,
|
|
449
|
+
"timeout": job_model.timeout,
|
|
450
|
+
}
|
|
451
|
+
all_jobs.append(job_data)
|
|
452
|
+
except Exception as e:
|
|
453
|
+
logger.debug(f"Failed to fetch job {job_id}: {e}")
|
|
454
|
+
|
|
455
|
+
except Exception as e:
|
|
456
|
+
logger.debug(f"Failed to get failed jobs for queue {queue_name}: {e}")
|
|
457
|
+
|
|
458
|
+
serializer = JobListSerializer(data=all_jobs, many=True)
|
|
459
|
+
serializer.is_valid(raise_exception=True)
|
|
460
|
+
return Response(serializer.data)
|
|
461
|
+
|
|
462
|
+
except Exception as e:
|
|
463
|
+
logger.error(f"Failed jobs list error: {e}", exc_info=True)
|
|
464
|
+
return Response(
|
|
465
|
+
{"error": "Internal server error"},
|
|
466
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
@extend_schema(
|
|
470
|
+
tags=["RQ Registries"],
|
|
471
|
+
summary="List finished jobs",
|
|
472
|
+
description="Returns list of all finished jobs from finished job registry.",
|
|
473
|
+
parameters=[
|
|
474
|
+
OpenApiParameter(
|
|
475
|
+
name="queue",
|
|
476
|
+
type=str,
|
|
477
|
+
location=OpenApiParameter.QUERY,
|
|
478
|
+
required=False,
|
|
479
|
+
description="Filter by queue name",
|
|
480
|
+
),
|
|
481
|
+
],
|
|
482
|
+
responses={
|
|
483
|
+
200: JobListSerializer(many=True),
|
|
484
|
+
},
|
|
485
|
+
)
|
|
486
|
+
@action(detail=False, methods=["get"], url_path="registries/finished")
|
|
487
|
+
def finished_jobs(self, request):
|
|
488
|
+
"""List all finished jobs."""
|
|
489
|
+
try:
|
|
490
|
+
import django_rq
|
|
491
|
+
from django.conf import settings
|
|
492
|
+
from rq.job import Job
|
|
493
|
+
|
|
494
|
+
queue_filter = request.query_params.get('queue')
|
|
495
|
+
queue_names = settings.RQ_QUEUES.keys() if hasattr(settings, 'RQ_QUEUES') else []
|
|
496
|
+
|
|
497
|
+
if queue_filter:
|
|
498
|
+
queue_names = [q for q in queue_names if q == queue_filter]
|
|
499
|
+
|
|
500
|
+
all_jobs = []
|
|
501
|
+
|
|
502
|
+
for queue_name in queue_names:
|
|
503
|
+
try:
|
|
504
|
+
queue = django_rq.get_queue(queue_name)
|
|
505
|
+
finished_registry = queue.finished_job_registry
|
|
506
|
+
|
|
507
|
+
# Get finished job IDs
|
|
508
|
+
job_ids = finished_registry.get_job_ids()
|
|
509
|
+
|
|
510
|
+
for job_id in job_ids:
|
|
511
|
+
try:
|
|
512
|
+
job = Job.fetch(job_id, connection=queue.connection)
|
|
513
|
+
|
|
514
|
+
# Convert RQ Job to Pydantic model
|
|
515
|
+
job_model = job_to_model(job, queue_name)
|
|
516
|
+
|
|
517
|
+
# Convert to dict for DRF serializer (JobListSerializer needs minimal fields)
|
|
518
|
+
job_data = {
|
|
519
|
+
"id": job_model.id,
|
|
520
|
+
"func_name": job_model.func_name,
|
|
521
|
+
"created_at": job_model.created_at,
|
|
522
|
+
"status": job_model.status,
|
|
523
|
+
"queue": job_model.queue,
|
|
524
|
+
"timeout": job_model.timeout,
|
|
525
|
+
}
|
|
526
|
+
all_jobs.append(job_data)
|
|
527
|
+
except Exception as e:
|
|
528
|
+
logger.debug(f"Failed to fetch job {job_id}: {e}")
|
|
529
|
+
|
|
530
|
+
except Exception as e:
|
|
531
|
+
logger.debug(f"Failed to get finished jobs for queue {queue_name}: {e}")
|
|
532
|
+
|
|
533
|
+
serializer = JobListSerializer(data=all_jobs, many=True)
|
|
534
|
+
serializer.is_valid(raise_exception=True)
|
|
535
|
+
return Response(serializer.data)
|
|
536
|
+
|
|
537
|
+
except Exception as e:
|
|
538
|
+
logger.error(f"Finished jobs list error: {e}", exc_info=True)
|
|
539
|
+
return Response(
|
|
540
|
+
{"error": "Internal server error"},
|
|
541
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
@extend_schema(
|
|
545
|
+
tags=["RQ Registries"],
|
|
546
|
+
summary="Requeue all failed jobs",
|
|
547
|
+
description="Requeues all failed jobs in the failed job registry.",
|
|
548
|
+
parameters=[
|
|
549
|
+
OpenApiParameter(
|
|
550
|
+
name="queue",
|
|
551
|
+
type=str,
|
|
552
|
+
location=OpenApiParameter.QUERY,
|
|
553
|
+
required=True,
|
|
554
|
+
description="Queue name",
|
|
555
|
+
),
|
|
556
|
+
],
|
|
557
|
+
responses={
|
|
558
|
+
200: JobActionResponseSerializer,
|
|
559
|
+
},
|
|
560
|
+
)
|
|
561
|
+
@action(detail=False, methods=["post"], url_path="registries/failed/requeue-all")
|
|
562
|
+
def requeue_all_failed(self, request):
|
|
563
|
+
"""Requeue all failed jobs."""
|
|
564
|
+
try:
|
|
565
|
+
import django_rq
|
|
566
|
+
|
|
567
|
+
queue_name = request.query_params.get('queue')
|
|
568
|
+
if not queue_name:
|
|
569
|
+
return Response(
|
|
570
|
+
{"error": "queue parameter is required"},
|
|
571
|
+
status=status.HTTP_400_BAD_REQUEST,
|
|
572
|
+
)
|
|
573
|
+
|
|
574
|
+
queue = django_rq.get_queue(queue_name)
|
|
575
|
+
failed_registry = queue.failed_job_registry
|
|
576
|
+
|
|
577
|
+
# Get all failed job IDs
|
|
578
|
+
job_ids = failed_registry.get_job_ids()
|
|
579
|
+
count = len(job_ids)
|
|
580
|
+
|
|
581
|
+
# Requeue all
|
|
582
|
+
for job_id in job_ids:
|
|
583
|
+
try:
|
|
584
|
+
failed_registry.requeue(job_id)
|
|
585
|
+
except Exception as e:
|
|
586
|
+
logger.debug(f"Failed to requeue job {job_id}: {e}")
|
|
587
|
+
|
|
588
|
+
response_data = {
|
|
589
|
+
"success": True,
|
|
590
|
+
"message": f"Requeued {count} failed jobs from queue '{queue_name}'",
|
|
591
|
+
"job_id": None,
|
|
592
|
+
"action": "requeue_all",
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
serializer = JobActionResponseSerializer(data=response_data)
|
|
596
|
+
serializer.is_valid(raise_exception=True)
|
|
597
|
+
return Response(serializer.data)
|
|
598
|
+
|
|
599
|
+
except Exception as e:
|
|
600
|
+
logger.error(f"Requeue all failed error: {e}", exc_info=True)
|
|
601
|
+
return Response(
|
|
602
|
+
{"error": f"Failed to requeue jobs: {str(e)}"},
|
|
603
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
@extend_schema(
|
|
607
|
+
tags=["RQ Registries"],
|
|
608
|
+
summary="Clear failed jobs registry",
|
|
609
|
+
description="Removes all jobs from the failed job registry.",
|
|
610
|
+
parameters=[
|
|
611
|
+
OpenApiParameter(
|
|
612
|
+
name="queue",
|
|
613
|
+
type=str,
|
|
614
|
+
location=OpenApiParameter.QUERY,
|
|
615
|
+
required=True,
|
|
616
|
+
description="Queue name",
|
|
617
|
+
),
|
|
618
|
+
],
|
|
619
|
+
responses={
|
|
620
|
+
200: JobActionResponseSerializer,
|
|
621
|
+
},
|
|
622
|
+
)
|
|
623
|
+
@action(detail=False, methods=["post"], url_path="registries/failed/clear")
|
|
624
|
+
def clear_failed_registry(self, request):
|
|
625
|
+
"""Clear failed jobs registry."""
|
|
626
|
+
try:
|
|
627
|
+
import django_rq
|
|
628
|
+
from rq.job import Job
|
|
629
|
+
|
|
630
|
+
queue_name = request.query_params.get('queue')
|
|
631
|
+
if not queue_name:
|
|
632
|
+
return Response(
|
|
633
|
+
{"error": "queue parameter is required"},
|
|
634
|
+
status=status.HTTP_400_BAD_REQUEST,
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
queue = django_rq.get_queue(queue_name)
|
|
638
|
+
failed_registry = queue.failed_job_registry
|
|
639
|
+
|
|
640
|
+
# Get all failed job IDs
|
|
641
|
+
job_ids = failed_registry.get_job_ids()
|
|
642
|
+
count = len(job_ids)
|
|
643
|
+
|
|
644
|
+
# Delete all failed jobs
|
|
645
|
+
for job_id in job_ids:
|
|
646
|
+
try:
|
|
647
|
+
job = Job.fetch(job_id, connection=queue.connection)
|
|
648
|
+
failed_registry.remove(job, delete_job=True)
|
|
649
|
+
except Exception as e:
|
|
650
|
+
logger.debug(f"Failed to delete job {job_id}: {e}")
|
|
651
|
+
|
|
652
|
+
response_data = {
|
|
653
|
+
"success": True,
|
|
654
|
+
"message": f"Cleared {count} failed jobs from queue '{queue_name}'",
|
|
655
|
+
"job_id": None,
|
|
656
|
+
"action": "clear_failed",
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
serializer = JobActionResponseSerializer(data=response_data)
|
|
660
|
+
serializer.is_valid(raise_exception=True)
|
|
661
|
+
return Response(serializer.data)
|
|
662
|
+
|
|
663
|
+
except Exception as e:
|
|
664
|
+
logger.error(f"Clear failed registry error: {e}", exc_info=True)
|
|
665
|
+
return Response(
|
|
666
|
+
{"error": f"Failed to clear registry: {str(e)}"},
|
|
667
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
@extend_schema(
|
|
671
|
+
tags=["RQ Registries"],
|
|
672
|
+
summary="Clear finished jobs registry",
|
|
673
|
+
description="Removes all jobs from the finished job registry.",
|
|
674
|
+
parameters=[
|
|
675
|
+
OpenApiParameter(
|
|
676
|
+
name="queue",
|
|
677
|
+
type=str,
|
|
678
|
+
location=OpenApiParameter.QUERY,
|
|
679
|
+
required=True,
|
|
680
|
+
description="Queue name",
|
|
681
|
+
),
|
|
682
|
+
],
|
|
683
|
+
responses={
|
|
684
|
+
200: JobActionResponseSerializer,
|
|
685
|
+
},
|
|
686
|
+
)
|
|
687
|
+
@action(detail=False, methods=["post"], url_path="registries/finished/clear")
|
|
688
|
+
def clear_finished_registry(self, request):
|
|
689
|
+
"""Clear finished jobs registry."""
|
|
690
|
+
try:
|
|
691
|
+
import django_rq
|
|
692
|
+
from rq.job import Job
|
|
693
|
+
|
|
694
|
+
queue_name = request.query_params.get('queue')
|
|
695
|
+
if not queue_name:
|
|
696
|
+
return Response(
|
|
697
|
+
{"error": "queue parameter is required"},
|
|
698
|
+
status=status.HTTP_400_BAD_REQUEST,
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
queue = django_rq.get_queue(queue_name)
|
|
702
|
+
finished_registry = queue.finished_job_registry
|
|
703
|
+
|
|
704
|
+
# Get all finished job IDs
|
|
705
|
+
job_ids = finished_registry.get_job_ids()
|
|
706
|
+
count = len(job_ids)
|
|
707
|
+
|
|
708
|
+
# Delete all finished jobs
|
|
709
|
+
for job_id in job_ids:
|
|
710
|
+
try:
|
|
711
|
+
job = Job.fetch(job_id, connection=queue.connection)
|
|
712
|
+
finished_registry.remove(job, delete_job=True)
|
|
713
|
+
except Exception as e:
|
|
714
|
+
logger.debug(f"Failed to delete job {job_id}: {e}")
|
|
715
|
+
|
|
716
|
+
response_data = {
|
|
717
|
+
"success": True,
|
|
718
|
+
"message": f"Cleared {count} finished jobs from queue '{queue_name}'",
|
|
719
|
+
"job_id": None,
|
|
720
|
+
"action": "clear_finished",
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
serializer = JobActionResponseSerializer(data=response_data)
|
|
724
|
+
serializer.is_valid(raise_exception=True)
|
|
725
|
+
return Response(serializer.data)
|
|
726
|
+
|
|
727
|
+
except Exception as e:
|
|
728
|
+
logger.error(f"Clear finished registry error: {e}", exc_info=True)
|
|
729
|
+
return Response(
|
|
730
|
+
{"error": f"Failed to clear registry: {str(e)}"},
|
|
731
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
732
|
+
)
|
|
733
|
+
|
|
734
|
+
@extend_schema(
|
|
735
|
+
tags=["RQ Registries"],
|
|
736
|
+
summary="List deferred jobs",
|
|
737
|
+
description="Returns list of all deferred jobs from deferred job registry.",
|
|
738
|
+
parameters=[
|
|
739
|
+
OpenApiParameter(
|
|
740
|
+
name="queue",
|
|
741
|
+
type=str,
|
|
742
|
+
location=OpenApiParameter.QUERY,
|
|
743
|
+
required=False,
|
|
744
|
+
description="Filter by queue name",
|
|
745
|
+
),
|
|
746
|
+
],
|
|
747
|
+
responses={
|
|
748
|
+
200: JobListSerializer(many=True),
|
|
749
|
+
},
|
|
750
|
+
)
|
|
751
|
+
@action(detail=False, methods=["get"], url_path="registries/deferred")
|
|
752
|
+
def deferred_jobs(self, request):
|
|
753
|
+
"""List all deferred jobs."""
|
|
754
|
+
try:
|
|
755
|
+
import django_rq
|
|
756
|
+
from django.conf import settings
|
|
757
|
+
from rq.job import Job
|
|
758
|
+
|
|
759
|
+
queue_filter = request.query_params.get('queue')
|
|
760
|
+
queue_names = settings.RQ_QUEUES.keys() if hasattr(settings, 'RQ_QUEUES') else []
|
|
761
|
+
|
|
762
|
+
if queue_filter:
|
|
763
|
+
queue_names = [q for q in queue_names if q == queue_filter]
|
|
764
|
+
|
|
765
|
+
all_jobs = []
|
|
766
|
+
|
|
767
|
+
for queue_name in queue_names:
|
|
768
|
+
try:
|
|
769
|
+
queue = django_rq.get_queue(queue_name)
|
|
770
|
+
deferred_registry = queue.deferred_job_registry
|
|
771
|
+
|
|
772
|
+
# Get deferred job IDs
|
|
773
|
+
job_ids = deferred_registry.get_job_ids()
|
|
774
|
+
|
|
775
|
+
for job_id in job_ids:
|
|
776
|
+
try:
|
|
777
|
+
job = Job.fetch(job_id, connection=queue.connection)
|
|
778
|
+
|
|
779
|
+
# Convert RQ Job to Pydantic model
|
|
780
|
+
job_model = job_to_model(job, queue_name)
|
|
781
|
+
|
|
782
|
+
# Convert to dict for DRF serializer
|
|
783
|
+
job_data = {
|
|
784
|
+
"id": job_model.id,
|
|
785
|
+
"func_name": job_model.func_name,
|
|
786
|
+
"created_at": job_model.created_at,
|
|
787
|
+
"status": job_model.status,
|
|
788
|
+
"queue": job_model.queue,
|
|
789
|
+
"timeout": job_model.timeout,
|
|
790
|
+
}
|
|
791
|
+
all_jobs.append(job_data)
|
|
792
|
+
except Exception as e:
|
|
793
|
+
logger.debug(f"Failed to fetch job {job_id}: {e}")
|
|
794
|
+
|
|
795
|
+
except Exception as e:
|
|
796
|
+
logger.debug(f"Failed to get deferred jobs for queue {queue_name}: {e}")
|
|
797
|
+
|
|
798
|
+
serializer = JobListSerializer(data=all_jobs, many=True)
|
|
799
|
+
serializer.is_valid(raise_exception=True)
|
|
800
|
+
return Response(serializer.data)
|
|
801
|
+
|
|
802
|
+
except Exception as e:
|
|
803
|
+
logger.error(f"Deferred jobs list error: {e}", exc_info=True)
|
|
804
|
+
return Response(
|
|
805
|
+
{"error": "Internal server error"},
|
|
806
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
807
|
+
)
|
|
808
|
+
|
|
809
|
+
@extend_schema(
|
|
810
|
+
tags=["RQ Registries"],
|
|
811
|
+
summary="List started jobs",
|
|
812
|
+
description="Returns list of all currently running jobs from started job registry.",
|
|
813
|
+
parameters=[
|
|
814
|
+
OpenApiParameter(
|
|
815
|
+
name="queue",
|
|
816
|
+
type=str,
|
|
817
|
+
location=OpenApiParameter.QUERY,
|
|
818
|
+
required=False,
|
|
819
|
+
description="Filter by queue name",
|
|
820
|
+
),
|
|
821
|
+
],
|
|
822
|
+
responses={
|
|
823
|
+
200: JobListSerializer(many=True),
|
|
824
|
+
},
|
|
825
|
+
)
|
|
826
|
+
@action(detail=False, methods=["get"], url_path="registries/started")
|
|
827
|
+
def started_jobs(self, request):
|
|
828
|
+
"""List all started (running) jobs."""
|
|
829
|
+
try:
|
|
830
|
+
import django_rq
|
|
831
|
+
from django.conf import settings
|
|
832
|
+
from rq.job import Job
|
|
833
|
+
|
|
834
|
+
queue_filter = request.query_params.get('queue')
|
|
835
|
+
queue_names = settings.RQ_QUEUES.keys() if hasattr(settings, 'RQ_QUEUES') else []
|
|
836
|
+
|
|
837
|
+
if queue_filter:
|
|
838
|
+
queue_names = [q for q in queue_names if q == queue_filter]
|
|
839
|
+
|
|
840
|
+
all_jobs = []
|
|
841
|
+
|
|
842
|
+
for queue_name in queue_names:
|
|
843
|
+
try:
|
|
844
|
+
queue = django_rq.get_queue(queue_name)
|
|
845
|
+
started_registry = queue.started_job_registry
|
|
846
|
+
|
|
847
|
+
# Get started job IDs
|
|
848
|
+
job_ids = started_registry.get_job_ids()
|
|
849
|
+
|
|
850
|
+
for job_id in job_ids:
|
|
851
|
+
try:
|
|
852
|
+
job = Job.fetch(job_id, connection=queue.connection)
|
|
853
|
+
|
|
854
|
+
# Convert RQ Job to Pydantic model
|
|
855
|
+
job_model = job_to_model(job, queue_name)
|
|
856
|
+
|
|
857
|
+
# Convert to dict for DRF serializer
|
|
858
|
+
job_data = {
|
|
859
|
+
"id": job_model.id,
|
|
860
|
+
"func_name": job_model.func_name,
|
|
861
|
+
"created_at": job_model.created_at,
|
|
862
|
+
"status": job_model.status,
|
|
863
|
+
"queue": job_model.queue,
|
|
864
|
+
"timeout": job_model.timeout,
|
|
865
|
+
}
|
|
866
|
+
all_jobs.append(job_data)
|
|
867
|
+
except Exception as e:
|
|
868
|
+
logger.debug(f"Failed to fetch job {job_id}: {e}")
|
|
869
|
+
|
|
870
|
+
except Exception as e:
|
|
871
|
+
logger.debug(f"Failed to get started jobs for queue {queue_name}: {e}")
|
|
872
|
+
|
|
873
|
+
serializer = JobListSerializer(data=all_jobs, many=True)
|
|
874
|
+
serializer.is_valid(raise_exception=True)
|
|
875
|
+
return Response(serializer.data)
|
|
876
|
+
|
|
877
|
+
except Exception as e:
|
|
878
|
+
logger.error(f"Started jobs list error: {e}", exc_info=True)
|
|
879
|
+
return Response(
|
|
880
|
+
{"error": "Internal server error"},
|
|
881
|
+
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
882
|
+
)
|