aegis-stack 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aegis-stack might be problematic. Click here for more details.
- aegis/__init__.py +5 -0
- aegis/__main__.py +374 -0
- aegis/core/CLAUDE.md +365 -0
- aegis/core/__init__.py +6 -0
- aegis/core/components.py +115 -0
- aegis/core/dependency_resolver.py +119 -0
- aegis/core/template_generator.py +163 -0
- aegis/templates/CLAUDE.md +306 -0
- aegis/templates/cookiecutter-aegis-project/cookiecutter.json +27 -0
- aegis/templates/cookiecutter-aegis-project/hooks/post_gen_project.py +172 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/.dockerignore +71 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/.env.example.j2 +70 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/.gitignore +127 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/Dockerfile +53 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/Makefile +211 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/README.md.j2 +196 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/__init__.py +5 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/__init__.py +6 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/health.py +321 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/load_test.py +638 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/cli/main.py +41 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/__init__.py +0 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/__init__.py +0 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/health.py +134 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/models.py.j2 +247 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/routing.py.j2 +14 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/api/tasks.py.j2 +596 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/hooks.py +133 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/main.py +16 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/middleware/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/middleware/cors.py +20 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/shutdown/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/shutdown/cleanup.py +14 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/startup/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/backend/startup/component_health.py.j2 +190 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/__init__.py +0 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/core/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/core/theme.py +46 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/frontend/main.py +687 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/scheduler/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/scheduler/main.py +138 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/CLAUDE.md +213 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/__init__.py +6 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/constants.py.j2 +30 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/pools.py +78 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/load_test.py +48 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/media.py +41 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/queues/system.py +36 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/registry.py +139 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/__init__.py +119 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/load_tasks.py +526 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/simple_system_tasks.py +32 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/components/worker/tasks/system_tasks.py +279 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/config.py.j2 +119 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/constants.py +60 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/db.py +67 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/core/log.py +85 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/entrypoints/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/entrypoints/webserver.py +40 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/entrypoints/{% if cookiecutter.include_scheduler == /"yes/" %}scheduler.py{% endif %}" +21 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/integrations/__init__.py +0 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/integrations/main.py +61 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/py.typed +0 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/load_test.py +661 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/load_test_models.py +269 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/shared/__init__.py +15 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/shared/models.py +26 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/__init__.py +52 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/alerts.py +94 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/health.py.j2 +1105 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/models.py +169 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/app/services/system/ui.py +52 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docker-compose.yml.j2 +195 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/api.md +191 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/components/scheduler.md +414 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/development.md +215 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/health.md +240 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/javascripts/mermaid-config.js +62 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/docs/stylesheets/mermaid.css +95 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/mkdocs.yml.j2 +62 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/pyproject.toml.j2 +156 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/scripts/entrypoint.sh +87 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/scripts/entrypoint.sh.j2 +104 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/scripts/gen_docs.py +16 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/api/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/api/test_health_endpoints.py.j2 +239 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/components/test_scheduler.py +76 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/conftest.py.j2 +81 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/__init__.py +1 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_component_integration.py.j2 +376 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_health_logic.py.j2 +633 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_load_test_models.py +665 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_load_test_service.py +602 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_system_service.py +96 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/services/test_worker_health_registration.py.j2 +224 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/tests/test_core.py +50 -0
- aegis/templates/cookiecutter-aegis-project/{{cookiecutter.project_slug}}/uv.lock +1673 -0
- aegis_stack-0.1.0.dist-info/METADATA +114 -0
- aegis_stack-0.1.0.dist-info/RECORD +103 -0
- aegis_stack-0.1.0.dist-info/WHEEL +4 -0
- aegis_stack-0.1.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,596 @@
|
|
|
1
|
+
{%- if cookiecutter.include_worker == "yes" %}
|
|
2
|
+
"""
|
|
3
|
+
Background tasks API endpoints.
|
|
4
|
+
|
|
5
|
+
Provides endpoints for enqueueing and monitoring background tasks
|
|
6
|
+
using the arq worker infrastructure.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from datetime import datetime, timedelta
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from fastapi import APIRouter, HTTPException
|
|
13
|
+
|
|
14
|
+
from app.components.backend.api.models import (
|
|
15
|
+
LoadTestRequest,
|
|
16
|
+
TaskListResponse,
|
|
17
|
+
TaskRequest,
|
|
18
|
+
TaskResponse,
|
|
19
|
+
TaskResultResponse,
|
|
20
|
+
TaskStatusResponse,
|
|
21
|
+
)
|
|
22
|
+
from app.components.worker.constants import LoadTestTypes
|
|
23
|
+
from app.components.worker.pools import get_queue_pool
|
|
24
|
+
from app.components.worker.tasks import get_task_by_name, list_available_tasks
|
|
25
|
+
from app.core.config import get_default_queue
|
|
26
|
+
from app.core.log import logger
|
|
27
|
+
|
|
28
|
+
router = APIRouter(prefix="/tasks", tags=["tasks"])
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@router.get("/", response_model=TaskListResponse)
|
|
32
|
+
async def list_tasks() -> TaskListResponse:
|
|
33
|
+
"""
|
|
34
|
+
Get list of all available background tasks.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
List of available task names organized by functional queue
|
|
38
|
+
"""
|
|
39
|
+
available_tasks = list_available_tasks()
|
|
40
|
+
|
|
41
|
+
# Import here to avoid circular imports
|
|
42
|
+
from app.components.worker.tasks import get_queue_for_task
|
|
43
|
+
|
|
44
|
+
# Organize tasks by their appropriate functional queues
|
|
45
|
+
from app.core.config import settings
|
|
46
|
+
|
|
47
|
+
from app.core.config import get_available_queues
|
|
48
|
+
queues: dict[str, list[str]] = {
|
|
49
|
+
queue_type: [] for queue_type in get_available_queues()
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
for task in available_tasks:
|
|
53
|
+
queue_type = get_queue_for_task(task)
|
|
54
|
+
queues[queue_type].append(task)
|
|
55
|
+
|
|
56
|
+
return TaskListResponse(
|
|
57
|
+
available_tasks=available_tasks,
|
|
58
|
+
total_count=len(available_tasks),
|
|
59
|
+
queues=queues,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@router.post("/enqueue", response_model=TaskResponse)
|
|
64
|
+
async def enqueue_task(task_request: TaskRequest) -> TaskResponse:
|
|
65
|
+
"""
|
|
66
|
+
Enqueue a background task for processing.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
task_request: Task details including name, priority, and arguments
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Task enqueue confirmation with task ID
|
|
73
|
+
|
|
74
|
+
Raises:
|
|
75
|
+
HTTPException: If task name is invalid or enqueueing fails
|
|
76
|
+
"""
|
|
77
|
+
logger.info(
|
|
78
|
+
f"Enqueueing task: {task_request.task_name} (queue: {task_request.queue_type})"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
# Validate task exists
|
|
82
|
+
task_func = get_task_by_name(task_request.task_name)
|
|
83
|
+
if not task_func:
|
|
84
|
+
available_tasks = list_available_tasks()
|
|
85
|
+
raise HTTPException(
|
|
86
|
+
status_code=400,
|
|
87
|
+
detail={
|
|
88
|
+
"error": "invalid_task_name",
|
|
89
|
+
"message": f"Task '{task_request.task_name}' not found",
|
|
90
|
+
"available_tasks": available_tasks,
|
|
91
|
+
},
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
# Validate queue type
|
|
95
|
+
from app.core.config import settings
|
|
96
|
+
|
|
97
|
+
from app.core.config import is_valid_queue, get_available_queues
|
|
98
|
+
|
|
99
|
+
if not is_valid_queue(task_request.queue_type):
|
|
100
|
+
available_queues = get_available_queues()
|
|
101
|
+
raise HTTPException(
|
|
102
|
+
status_code=400,
|
|
103
|
+
detail={
|
|
104
|
+
"error": "invalid_queue_type",
|
|
105
|
+
"message": f"Queue type must be one of: {available_queues}",
|
|
106
|
+
},
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
try:
|
|
110
|
+
# Get appropriate queue pool and queue name
|
|
111
|
+
pool, queue_name = await get_queue_pool(task_request.queue_type)
|
|
112
|
+
|
|
113
|
+
# Enqueue the task with the specific queue name
|
|
114
|
+
job = await pool.enqueue_job(
|
|
115
|
+
task_request.task_name,
|
|
116
|
+
*task_request.args,
|
|
117
|
+
_queue_name=queue_name,
|
|
118
|
+
_defer_by=task_request.delay_seconds,
|
|
119
|
+
**task_request.kwargs,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
queued_at = datetime.now()
|
|
123
|
+
estimated_start = None
|
|
124
|
+
if task_request.delay_seconds:
|
|
125
|
+
estimated_start = queued_at + timedelta(seconds=task_request.delay_seconds)
|
|
126
|
+
|
|
127
|
+
await pool.aclose() # Use aclose() instead of close()
|
|
128
|
+
|
|
129
|
+
if job is None:
|
|
130
|
+
raise HTTPException(status_code=500, detail="Failed to enqueue task")
|
|
131
|
+
|
|
132
|
+
logger.info(f"✅ Task enqueued: {job.job_id} ({task_request.task_name})")
|
|
133
|
+
|
|
134
|
+
return TaskResponse(
|
|
135
|
+
task_id=job.job_id,
|
|
136
|
+
task_name=task_request.task_name,
|
|
137
|
+
queue_type=task_request.queue_type,
|
|
138
|
+
queued_at=queued_at,
|
|
139
|
+
estimated_start=estimated_start,
|
|
140
|
+
message=(
|
|
141
|
+
f"Task '{task_request.task_name}' enqueued to "
|
|
142
|
+
f"{task_request.queue_type} queue"
|
|
143
|
+
),
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
except Exception as e:
|
|
147
|
+
logger.error(f"Failed to enqueue task {task_request.task_name}: {e}")
|
|
148
|
+
raise HTTPException(
|
|
149
|
+
status_code=500,
|
|
150
|
+
detail={
|
|
151
|
+
"error": "enqueue_failed",
|
|
152
|
+
"message": f"Failed to enqueue task: {str(e)}",
|
|
153
|
+
},
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
@router.get("/status/{task_id}", response_model=TaskStatusResponse)
|
|
158
|
+
async def get_task_status(task_id: str) -> TaskStatusResponse:
|
|
159
|
+
"""
|
|
160
|
+
Get the status of a background task.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
task_id: The task ID returned when enqueueing
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
Task status information
|
|
167
|
+
"""
|
|
168
|
+
try:
|
|
169
|
+
# Get Redis pool to check job status (use default queue for status checks)
|
|
170
|
+
pool, _ = await get_queue_pool(get_default_queue())
|
|
171
|
+
|
|
172
|
+
# Check if job exists in Redis using arq's key patterns
|
|
173
|
+
job_key = f"arq:job:{task_id}"
|
|
174
|
+
result_key = f"arq:result:{task_id}"
|
|
175
|
+
|
|
176
|
+
# Check if job data exists
|
|
177
|
+
job_exists = await pool.exists(job_key)
|
|
178
|
+
result_exists = await pool.exists(result_key)
|
|
179
|
+
|
|
180
|
+
if not job_exists and not result_exists:
|
|
181
|
+
await pool.aclose()
|
|
182
|
+
raise HTTPException(
|
|
183
|
+
status_code=404,
|
|
184
|
+
detail={
|
|
185
|
+
"error": "task_not_found",
|
|
186
|
+
"message": f"Task {task_id} not found",
|
|
187
|
+
},
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Determine status based on what exists in Redis
|
|
191
|
+
if result_exists:
|
|
192
|
+
# Task completed (successfully or failed)
|
|
193
|
+
result_data = await pool.get(result_key)
|
|
194
|
+
if result_data:
|
|
195
|
+
try:
|
|
196
|
+
import pickle
|
|
197
|
+
|
|
198
|
+
result = pickle.loads(result_data)
|
|
199
|
+
if isinstance(result, dict) and result.get("error"):
|
|
200
|
+
status = "failed"
|
|
201
|
+
error = result.get("error")
|
|
202
|
+
else:
|
|
203
|
+
status = "complete"
|
|
204
|
+
error = None
|
|
205
|
+
except Exception:
|
|
206
|
+
status = "complete"
|
|
207
|
+
error = None
|
|
208
|
+
else:
|
|
209
|
+
status = "complete"
|
|
210
|
+
error = None
|
|
211
|
+
elif job_exists:
|
|
212
|
+
# Job is queued or in progress
|
|
213
|
+
# We can't easily distinguish queued vs in_progress without more
|
|
214
|
+
# Redis inspection
|
|
215
|
+
status = "queued"
|
|
216
|
+
error = None
|
|
217
|
+
else:
|
|
218
|
+
status = "unknown"
|
|
219
|
+
error = None
|
|
220
|
+
|
|
221
|
+
await pool.aclose()
|
|
222
|
+
|
|
223
|
+
return TaskStatusResponse(
|
|
224
|
+
task_id=task_id,
|
|
225
|
+
status=status,
|
|
226
|
+
result_available=result_exists,
|
|
227
|
+
error=error,
|
|
228
|
+
enqueue_time=None,
|
|
229
|
+
start_time=None,
|
|
230
|
+
finish_time=None
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
except Exception as e:
|
|
234
|
+
logger.error(f"Failed to get task status for {task_id}: {e}")
|
|
235
|
+
raise HTTPException(
|
|
236
|
+
status_code=500, detail={"error": "status_check_failed", "message": str(e)}
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
@router.get("/result/{task_id}", response_model=TaskResultResponse)
|
|
241
|
+
async def get_task_result(task_id: str) -> TaskResultResponse:
|
|
242
|
+
"""
|
|
243
|
+
Get the result of a completed background task.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
task_id: The task ID returned when enqueueing
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Task result data
|
|
250
|
+
"""
|
|
251
|
+
try:
|
|
252
|
+
# Get Redis pool to check job result (use default queue for result checks)
|
|
253
|
+
pool, _ = await get_queue_pool(get_default_queue())
|
|
254
|
+
|
|
255
|
+
# Check if result exists
|
|
256
|
+
result_key = f"arq:result:{task_id}"
|
|
257
|
+
result_exists = await pool.exists(result_key)
|
|
258
|
+
|
|
259
|
+
if not result_exists:
|
|
260
|
+
# Check if job exists at all
|
|
261
|
+
job_key = f"arq:job:{task_id}"
|
|
262
|
+
job_exists = await pool.exists(job_key)
|
|
263
|
+
|
|
264
|
+
await pool.aclose()
|
|
265
|
+
|
|
266
|
+
if not job_exists:
|
|
267
|
+
raise HTTPException(
|
|
268
|
+
status_code=404,
|
|
269
|
+
detail={
|
|
270
|
+
"error": "task_not_found",
|
|
271
|
+
"message": f"Task {task_id} not found",
|
|
272
|
+
},
|
|
273
|
+
)
|
|
274
|
+
else:
|
|
275
|
+
raise HTTPException(
|
|
276
|
+
status_code=400,
|
|
277
|
+
detail={
|
|
278
|
+
"error": "task_not_completed",
|
|
279
|
+
"message": f"Task {task_id} has not completed yet",
|
|
280
|
+
"current_status": "queued or in_progress",
|
|
281
|
+
},
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
# Get the result data
|
|
285
|
+
result_data = await pool.get(result_key)
|
|
286
|
+
await pool.aclose()
|
|
287
|
+
|
|
288
|
+
if not result_data:
|
|
289
|
+
raise HTTPException(
|
|
290
|
+
status_code=500,
|
|
291
|
+
detail={
|
|
292
|
+
"error": "result_data_missing",
|
|
293
|
+
"message": "Result data is missing",
|
|
294
|
+
},
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
# Deserialize the result
|
|
298
|
+
try:
|
|
299
|
+
import pickle
|
|
300
|
+
|
|
301
|
+
result = pickle.loads(result_data)
|
|
302
|
+
|
|
303
|
+
# Handle error results (failed tasks store exception objects)
|
|
304
|
+
if isinstance(result, Exception):
|
|
305
|
+
result_data = {
|
|
306
|
+
"error_type": type(result).__name__,
|
|
307
|
+
"error_message": str(result),
|
|
308
|
+
"task_failed": True,
|
|
309
|
+
}
|
|
310
|
+
task_status = "failed"
|
|
311
|
+
else:
|
|
312
|
+
# For successful results, ensure they're JSON-serializable
|
|
313
|
+
try:
|
|
314
|
+
import json
|
|
315
|
+
|
|
316
|
+
json.dumps(result) # Test if it's serializable
|
|
317
|
+
result_data = result
|
|
318
|
+
task_status = "completed"
|
|
319
|
+
except (TypeError, ValueError):
|
|
320
|
+
# If not serializable, convert to string representation
|
|
321
|
+
result_data = {
|
|
322
|
+
"result_type": type(result).__name__,
|
|
323
|
+
"result_str": str(result),
|
|
324
|
+
"note": "Result was not JSON-serializable, converted to string",
|
|
325
|
+
}
|
|
326
|
+
task_status = "completed"
|
|
327
|
+
|
|
328
|
+
return TaskResultResponse(
|
|
329
|
+
task_id=task_id,
|
|
330
|
+
status=task_status,
|
|
331
|
+
result=result_data,
|
|
332
|
+
enqueue_time=None,
|
|
333
|
+
start_time=None,
|
|
334
|
+
finish_time=None
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
except Exception as e:
|
|
338
|
+
raise HTTPException(
|
|
339
|
+
status_code=500,
|
|
340
|
+
detail={
|
|
341
|
+
"error": "result_deserialization_failed",
|
|
342
|
+
"message": f"Failed to deserialize result: {str(e)}",
|
|
343
|
+
},
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
except HTTPException:
|
|
347
|
+
raise
|
|
348
|
+
except Exception as e:
|
|
349
|
+
logger.error(f"Failed to get task result for {task_id}: {e}")
|
|
350
|
+
raise HTTPException(
|
|
351
|
+
status_code=500, detail={"error": "result_fetch_failed", "message": str(e)}
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
@router.post("/load-test", response_model=TaskResponse)
|
|
356
|
+
async def start_load_test(load_test_config: LoadTestRequest) -> TaskResponse:
|
|
357
|
+
"""
|
|
358
|
+
Start a comprehensive load test that measures queue throughput.
|
|
359
|
+
|
|
360
|
+
This orchestrates many lightweight tasks to stress test the queue
|
|
361
|
+
infrastructure and provide meaningful performance metrics including
|
|
362
|
+
task-type-specific verification and analysis.
|
|
363
|
+
|
|
364
|
+
Args:
|
|
365
|
+
load_test_config: Load test configuration with parameters
|
|
366
|
+
|
|
367
|
+
Returns:
|
|
368
|
+
Task response with load test orchestrator job ID
|
|
369
|
+
"""
|
|
370
|
+
from app.components.worker.constants import TaskNames
|
|
371
|
+
from app.services.load_test import (
|
|
372
|
+
LoadTestConfiguration,
|
|
373
|
+
LoadTestService,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
logger.info(
|
|
377
|
+
f"🚀 Starting load test: {load_test_config.num_tasks} "
|
|
378
|
+
f"{load_test_config.task_type} tasks"
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
# Convert to service layer configuration
|
|
382
|
+
# Validate task type against known types
|
|
383
|
+
valid_types = [
|
|
384
|
+
LoadTestTypes.CPU_INTENSIVE,
|
|
385
|
+
LoadTestTypes.IO_SIMULATION,
|
|
386
|
+
LoadTestTypes.MEMORY_OPERATIONS,
|
|
387
|
+
LoadTestTypes.FAILURE_TESTING,
|
|
388
|
+
]
|
|
389
|
+
if load_test_config.task_type not in valid_types:
|
|
390
|
+
raise HTTPException(
|
|
391
|
+
status_code=400,
|
|
392
|
+
detail={
|
|
393
|
+
"error": "invalid_task_type",
|
|
394
|
+
"message": f"Invalid task type: {load_test_config.task_type}",
|
|
395
|
+
"valid_types": valid_types,
|
|
396
|
+
},
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
config = LoadTestConfiguration(
|
|
400
|
+
num_tasks=load_test_config.num_tasks,
|
|
401
|
+
task_type=load_test_config.task_type,
|
|
402
|
+
batch_size=load_test_config.batch_size,
|
|
403
|
+
delay_ms=load_test_config.delay_ms,
|
|
404
|
+
target_queue=load_test_config.target_queue,
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
try:
|
|
408
|
+
task_id = await LoadTestService.enqueue_load_test(config)
|
|
409
|
+
|
|
410
|
+
return TaskResponse(
|
|
411
|
+
task_id=task_id,
|
|
412
|
+
task_name=TaskNames.LOAD_TEST_ORCHESTRATOR,
|
|
413
|
+
queue_type=load_test_config.target_queue,
|
|
414
|
+
queued_at=datetime.now(),
|
|
415
|
+
estimated_start=None,
|
|
416
|
+
message=(
|
|
417
|
+
f"Load test '{load_test_config.task_type}' enqueued: "
|
|
418
|
+
f"{load_test_config.num_tasks} tasks to "
|
|
419
|
+
f"{load_test_config.target_queue} queue"
|
|
420
|
+
),
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
except Exception as e:
|
|
424
|
+
logger.error(f"Failed to enqueue load test: {e}")
|
|
425
|
+
raise HTTPException(
|
|
426
|
+
status_code=500,
|
|
427
|
+
detail={
|
|
428
|
+
"error": "load_test_failed",
|
|
429
|
+
"message": f"Failed to start load test: {str(e)}",
|
|
430
|
+
},
|
|
431
|
+
)
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
@router.post("/examples/load-test-small", response_model=TaskResponse)
|
|
435
|
+
async def enqueue_small_load_test() -> TaskResponse:
|
|
436
|
+
"""
|
|
437
|
+
Example: Small load test with 50 CPU tasks.
|
|
438
|
+
|
|
439
|
+
Good for testing basic queue functionality and getting quick results.
|
|
440
|
+
"""
|
|
441
|
+
load_test_config = LoadTestRequest(
|
|
442
|
+
num_tasks=50,
|
|
443
|
+
task_type=LoadTestTypes.CPU_INTENSIVE,
|
|
444
|
+
batch_size=10,
|
|
445
|
+
delay_ms=0,
|
|
446
|
+
target_queue=get_default_queue(),
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
return await start_load_test(load_test_config)
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
@router.post("/examples/load-test-medium", response_model=TaskResponse)
|
|
453
|
+
async def enqueue_medium_load_test() -> TaskResponse:
|
|
454
|
+
"""
|
|
455
|
+
Example: Medium load test with 200 I/O tasks.
|
|
456
|
+
|
|
457
|
+
Tests concurrent async task handling with realistic batching.
|
|
458
|
+
"""
|
|
459
|
+
load_test_config = LoadTestRequest(
|
|
460
|
+
num_tasks=200,
|
|
461
|
+
task_type=LoadTestTypes.IO_SIMULATION,
|
|
462
|
+
batch_size=20,
|
|
463
|
+
delay_ms=50,
|
|
464
|
+
target_queue=get_default_queue(),
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
return await start_load_test(load_test_config)
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
@router.post("/examples/load-test-large", response_model=TaskResponse)
|
|
471
|
+
async def enqueue_large_load_test() -> TaskResponse:
|
|
472
|
+
"""
|
|
473
|
+
Example: Large load test with 1000 memory tasks.
|
|
474
|
+
|
|
475
|
+
Stress tests queue capacity and worker performance under heavy load.
|
|
476
|
+
"""
|
|
477
|
+
load_test_config = LoadTestRequest(
|
|
478
|
+
num_tasks=1000,
|
|
479
|
+
task_type=LoadTestTypes.MEMORY_OPERATIONS,
|
|
480
|
+
batch_size=50,
|
|
481
|
+
delay_ms=0,
|
|
482
|
+
target_queue=get_default_queue(),
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
return await start_load_test(load_test_config)
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
@router.get("/load-test-result/{task_id}")
|
|
489
|
+
async def get_load_test_result(
|
|
490
|
+
task_id: str, target_queue: str | None = None
|
|
491
|
+
) -> dict[str, Any]:
|
|
492
|
+
"""
|
|
493
|
+
Get enhanced load test results with analysis and verification.
|
|
494
|
+
|
|
495
|
+
Returns comprehensive load test results including performance analysis,
|
|
496
|
+
test type verification, and recommendations for optimization.
|
|
497
|
+
|
|
498
|
+
Args:
|
|
499
|
+
task_id: The load test orchestrator task ID
|
|
500
|
+
target_queue: Queue where the test was run (defaults to configured
|
|
501
|
+
load_test queue)
|
|
502
|
+
|
|
503
|
+
Returns:
|
|
504
|
+
Enhanced load test results with analysis
|
|
505
|
+
"""
|
|
506
|
+
from app.services.load_test import LoadTestService
|
|
507
|
+
|
|
508
|
+
try:
|
|
509
|
+
result = await LoadTestService.get_load_test_result(task_id, target_queue)
|
|
510
|
+
|
|
511
|
+
if not result:
|
|
512
|
+
raise HTTPException(
|
|
513
|
+
status_code=404,
|
|
514
|
+
detail={
|
|
515
|
+
"error": "load_test_not_found",
|
|
516
|
+
"message": f"No load test results found for task {task_id}",
|
|
517
|
+
"task_id": task_id,
|
|
518
|
+
"target_queue": target_queue,
|
|
519
|
+
},
|
|
520
|
+
)
|
|
521
|
+
|
|
522
|
+
return result
|
|
523
|
+
|
|
524
|
+
except HTTPException:
|
|
525
|
+
raise
|
|
526
|
+
except Exception as e:
|
|
527
|
+
logger.error(f"Failed to get load test result for {task_id}: {e}")
|
|
528
|
+
raise HTTPException(
|
|
529
|
+
status_code=500,
|
|
530
|
+
detail={
|
|
531
|
+
"error": "result_retrieval_failed",
|
|
532
|
+
"message": f"Failed to retrieve load test results: {str(e)}",
|
|
533
|
+
},
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
@router.get("/load-test-types")
|
|
538
|
+
async def get_load_test_types() -> dict[str, Any]:
|
|
539
|
+
"""
|
|
540
|
+
Get information about available load test types.
|
|
541
|
+
|
|
542
|
+
Returns detailed information about each test type including
|
|
543
|
+
expected metrics, performance characteristics, and usage guidance.
|
|
544
|
+
|
|
545
|
+
Returns:
|
|
546
|
+
Dictionary of test type information
|
|
547
|
+
"""
|
|
548
|
+
from app.components.worker.constants import LoadTestTypes
|
|
549
|
+
from app.services.load_test import LoadTestService
|
|
550
|
+
|
|
551
|
+
test_types = {}
|
|
552
|
+
|
|
553
|
+
# Get info for all available test types
|
|
554
|
+
all_types = [
|
|
555
|
+
LoadTestTypes.CPU_INTENSIVE,
|
|
556
|
+
LoadTestTypes.IO_SIMULATION,
|
|
557
|
+
LoadTestTypes.MEMORY_OPERATIONS,
|
|
558
|
+
LoadTestTypes.FAILURE_TESTING,
|
|
559
|
+
]
|
|
560
|
+
for test_type in all_types:
|
|
561
|
+
test_types[test_type] = LoadTestService.get_test_type_info(test_type)
|
|
562
|
+
|
|
563
|
+
return {
|
|
564
|
+
"available_test_types": test_types,
|
|
565
|
+
"usage_examples": {
|
|
566
|
+
"quick_cpu_test": {
|
|
567
|
+
"description": "Quick CPU test with 50 tasks",
|
|
568
|
+
"parameters": {
|
|
569
|
+
"num_tasks": 50,
|
|
570
|
+
"task_type": "cpu_intensive",
|
|
571
|
+
"batch_size": 10,
|
|
572
|
+
"target_queue": "load_test",
|
|
573
|
+
},
|
|
574
|
+
},
|
|
575
|
+
"io_stress_test": {
|
|
576
|
+
"description": "I/O stress test with concurrent operations",
|
|
577
|
+
"parameters": {
|
|
578
|
+
"num_tasks": 200,
|
|
579
|
+
"task_type": "io_simulation",
|
|
580
|
+
"batch_size": 20,
|
|
581
|
+
"delay_ms": 50,
|
|
582
|
+
"target_queue": "load_test",
|
|
583
|
+
},
|
|
584
|
+
},
|
|
585
|
+
"memory_load_test": {
|
|
586
|
+
"description": "Memory allocation test with GC pressure",
|
|
587
|
+
"parameters": {
|
|
588
|
+
"num_tasks": 500,
|
|
589
|
+
"task_type": "memory_operations",
|
|
590
|
+
"batch_size": 25,
|
|
591
|
+
"target_queue": "media",
|
|
592
|
+
},
|
|
593
|
+
},
|
|
594
|
+
},
|
|
595
|
+
}
|
|
596
|
+
{%- endif %}
|