fairchild 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fairchild/__init__.py +11 -0
- fairchild/cli.py +386 -0
- fairchild/context.py +54 -0
- fairchild/db/__init__.py +0 -0
- fairchild/db/migrations.py +69 -0
- fairchild/fairchild.py +166 -0
- fairchild/future.py +78 -0
- fairchild/job.py +123 -0
- fairchild/record.py +22 -0
- fairchild/task.py +225 -0
- fairchild/templates/dashboard.html +1650 -0
- fairchild/templates/job.html +1245 -0
- fairchild/ui.py +560 -0
- fairchild/worker.py +495 -0
- fairchild-0.0.3.dist-info/METADATA +483 -0
- fairchild-0.0.3.dist-info/RECORD +20 -0
- fairchild-0.0.3.dist-info/entry_points.txt +2 -0
- fairchild-0.0.3.dist-info/licenses/LICENSE +21 -0
- fairchild-0.0.3.dist-info/top_level.txt +1 -0
- fairchild-0.0.1.dist-info/METADATA +0 -6
- fairchild-0.0.1.dist-info/RECORD +0 -5
- fairchild-0.0.1.dist-info/top_level.txt +0 -1
- main.py +0 -6
- {fairchild-0.0.1.dist-info → fairchild-0.0.3.dist-info}/WHEEL +0 -0
fairchild/ui.py
ADDED
|
@@ -0,0 +1,560 @@
|
|
|
1
|
+
from aiohttp import web
|
|
2
|
+
import json
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from fairchild.fairchild import Fairchild
|
|
6
|
+
|
|
7
|
+
# Load templates from files
|
|
8
|
+
_TEMPLATE_DIR = Path(__file__).parent / "templates"
|
|
9
|
+
|
|
10
|
+
# Type-safe app key for storing Fairchild instance
|
|
11
|
+
_fairchild_key: web.AppKey[Fairchild] = web.AppKey("fairchild", Fairchild)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _load_template(name: str) -> str:
|
|
15
|
+
"""Load a template file."""
|
|
16
|
+
return (_TEMPLATE_DIR / name).read_text()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def create_app(fairchild: Fairchild) -> web.Application:
|
|
20
|
+
"""Create the web UI application."""
|
|
21
|
+
app = web.Application()
|
|
22
|
+
app[_fairchild_key] = fairchild
|
|
23
|
+
|
|
24
|
+
app.router.add_get("/", index)
|
|
25
|
+
app.router.add_get("/jobs/{job_id}", job_page)
|
|
26
|
+
app.router.add_get("/api/stats", api_stats)
|
|
27
|
+
app.router.add_get("/api/jobs", api_jobs)
|
|
28
|
+
app.router.add_post("/api/jobs", api_enqueue_job)
|
|
29
|
+
app.router.add_get("/api/jobs/{job_id}", api_job_detail)
|
|
30
|
+
app.router.add_get("/api/jobs/{job_id}/family", api_job_family)
|
|
31
|
+
app.router.add_get("/api/queues", api_queues)
|
|
32
|
+
app.router.add_get("/api/tasks", api_tasks)
|
|
33
|
+
app.router.add_get("/api/timeseries", api_timeseries)
|
|
34
|
+
app.router.add_get("/api/workers", api_workers)
|
|
35
|
+
app.router.add_post("/api/workers/{worker_id}/pause", api_worker_pause)
|
|
36
|
+
app.router.add_post("/api/workers/{worker_id}/resume", api_worker_resume)
|
|
37
|
+
|
|
38
|
+
return app
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
async def index(request: web.Request) -> web.Response:
|
|
42
|
+
"""Serve the main dashboard HTML."""
|
|
43
|
+
html = _load_template("dashboard.html")
|
|
44
|
+
return web.Response(text=html, content_type="text/html")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
async def job_page(request: web.Request) -> web.Response:
|
|
48
|
+
"""Serve the job detail page."""
|
|
49
|
+
job_id = request.match_info["job_id"]
|
|
50
|
+
html = _load_template("job.html").replace("{{JOB_ID}}", job_id)
|
|
51
|
+
return web.Response(text=html, content_type="text/html")
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
async def api_stats(request: web.Request) -> web.Response:
|
|
55
|
+
"""Get overall job statistics."""
|
|
56
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
57
|
+
|
|
58
|
+
query = """
|
|
59
|
+
SELECT
|
|
60
|
+
state,
|
|
61
|
+
COUNT(*) as count
|
|
62
|
+
FROM fairchild_jobs
|
|
63
|
+
GROUP BY state
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
rows = await fairchild._pool.fetch(query)
|
|
67
|
+
stats = {row["state"]: row["count"] for row in rows}
|
|
68
|
+
|
|
69
|
+
return web.json_response(stats)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
async def api_queues(request: web.Request) -> web.Response:
|
|
73
|
+
"""Get queue statistics."""
|
|
74
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
75
|
+
|
|
76
|
+
query = """
|
|
77
|
+
SELECT
|
|
78
|
+
queue,
|
|
79
|
+
state,
|
|
80
|
+
COUNT(*) as count
|
|
81
|
+
FROM fairchild_jobs
|
|
82
|
+
GROUP BY queue, state
|
|
83
|
+
ORDER BY queue, state
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
rows = await fairchild._pool.fetch(query)
|
|
87
|
+
|
|
88
|
+
queues = {}
|
|
89
|
+
for row in rows:
|
|
90
|
+
queue = row["queue"]
|
|
91
|
+
if queue not in queues:
|
|
92
|
+
queues[queue] = {}
|
|
93
|
+
queues[queue][row["state"]] = row["count"]
|
|
94
|
+
|
|
95
|
+
return web.json_response(queues)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
async def api_tasks(request: web.Request) -> web.Response:
|
|
99
|
+
"""Get list of registered tasks with parameter info."""
|
|
100
|
+
import inspect
|
|
101
|
+
from fairchild.task import _task_registry
|
|
102
|
+
|
|
103
|
+
tasks = []
|
|
104
|
+
for name, task in sorted(_task_registry.items()):
|
|
105
|
+
# Extract parameter info from function signature
|
|
106
|
+
sig = inspect.signature(task.fn)
|
|
107
|
+
params = []
|
|
108
|
+
for param_name, param in sig.parameters.items():
|
|
109
|
+
# Skip special injected parameters
|
|
110
|
+
if param_name in ("job", "workflow"):
|
|
111
|
+
continue
|
|
112
|
+
|
|
113
|
+
param_info = {"name": param_name}
|
|
114
|
+
|
|
115
|
+
# Add type annotation if present
|
|
116
|
+
if param.annotation != inspect.Parameter.empty:
|
|
117
|
+
try:
|
|
118
|
+
param_info["type"] = param.annotation.__name__
|
|
119
|
+
except AttributeError:
|
|
120
|
+
param_info["type"] = str(param.annotation)
|
|
121
|
+
|
|
122
|
+
# Add default value if present
|
|
123
|
+
if param.default != inspect.Parameter.empty:
|
|
124
|
+
param_info["default"] = param.default
|
|
125
|
+
param_info["required"] = False
|
|
126
|
+
else:
|
|
127
|
+
param_info["required"] = True
|
|
128
|
+
|
|
129
|
+
params.append(param_info)
|
|
130
|
+
|
|
131
|
+
# Get docstring
|
|
132
|
+
docstring = inspect.getdoc(task.fn)
|
|
133
|
+
|
|
134
|
+
tasks.append(
|
|
135
|
+
{
|
|
136
|
+
"name": name,
|
|
137
|
+
"queue": task.queue,
|
|
138
|
+
"priority": task.priority,
|
|
139
|
+
"max_attempts": task.max_attempts,
|
|
140
|
+
"tags": task.tags,
|
|
141
|
+
"params": params,
|
|
142
|
+
"docstring": docstring,
|
|
143
|
+
}
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
return web.json_response(tasks)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
async def api_jobs(request: web.Request) -> web.Response:
|
|
150
|
+
"""Get jobs with filtering and pagination."""
|
|
151
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
152
|
+
|
|
153
|
+
# Parse query params
|
|
154
|
+
state = request.query.get("state")
|
|
155
|
+
queue = request.query.get("queue")
|
|
156
|
+
limit = int(request.query.get("limit", 50))
|
|
157
|
+
offset = int(request.query.get("offset", 0))
|
|
158
|
+
|
|
159
|
+
# Build query
|
|
160
|
+
conditions = []
|
|
161
|
+
params = []
|
|
162
|
+
param_idx = 1
|
|
163
|
+
|
|
164
|
+
if state:
|
|
165
|
+
conditions.append(f"state = ${param_idx}")
|
|
166
|
+
params.append(state)
|
|
167
|
+
param_idx += 1
|
|
168
|
+
|
|
169
|
+
if queue:
|
|
170
|
+
conditions.append(f"queue = ${param_idx}")
|
|
171
|
+
params.append(queue)
|
|
172
|
+
param_idx += 1
|
|
173
|
+
|
|
174
|
+
where = f"WHERE {' AND '.join(conditions)}" if conditions else ""
|
|
175
|
+
|
|
176
|
+
query = f"""
|
|
177
|
+
SELECT
|
|
178
|
+
id, task_name, queue, args,
|
|
179
|
+
parent_id, deps,
|
|
180
|
+
state, priority, scheduled_at,
|
|
181
|
+
attempted_at, completed_at, attempt, max_attempts,
|
|
182
|
+
recorded, errors, tags,
|
|
183
|
+
inserted_at, updated_at
|
|
184
|
+
FROM fairchild_jobs
|
|
185
|
+
{where}
|
|
186
|
+
ORDER BY inserted_at DESC
|
|
187
|
+
LIMIT ${param_idx} OFFSET ${param_idx + 1}
|
|
188
|
+
"""
|
|
189
|
+
params.extend([limit, offset])
|
|
190
|
+
|
|
191
|
+
rows = await fairchild._pool.fetch(query, *params)
|
|
192
|
+
|
|
193
|
+
jobs = []
|
|
194
|
+
for row in rows:
|
|
195
|
+
job = dict(row)
|
|
196
|
+
job["id"] = str(job["id"])
|
|
197
|
+
job["parent_id"] = str(job["parent_id"]) if job["parent_id"] else None
|
|
198
|
+
job["scheduled_at"] = (
|
|
199
|
+
job["scheduled_at"].isoformat() if job["scheduled_at"] else None
|
|
200
|
+
)
|
|
201
|
+
job["attempted_at"] = (
|
|
202
|
+
job["attempted_at"].isoformat() if job["attempted_at"] else None
|
|
203
|
+
)
|
|
204
|
+
job["completed_at"] = (
|
|
205
|
+
job["completed_at"].isoformat() if job["completed_at"] else None
|
|
206
|
+
)
|
|
207
|
+
job["inserted_at"] = (
|
|
208
|
+
job["inserted_at"].isoformat() if job["inserted_at"] else None
|
|
209
|
+
)
|
|
210
|
+
job["updated_at"] = job["updated_at"].isoformat() if job["updated_at"] else None
|
|
211
|
+
jobs.append(job)
|
|
212
|
+
|
|
213
|
+
return web.json_response(jobs)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
async def api_enqueue_job(request: web.Request) -> web.Response:
|
|
217
|
+
"""Enqueue a new job via JSON API.
|
|
218
|
+
|
|
219
|
+
Request body:
|
|
220
|
+
{
|
|
221
|
+
"task": "module.task_name", # Required: registered task name
|
|
222
|
+
"args": {"key": "value"}, # Optional: task arguments (default: {})
|
|
223
|
+
"queue": "custom_queue", # Optional: override task's default queue
|
|
224
|
+
"priority": 3, # Optional: 0-9, lower = higher priority
|
|
225
|
+
"scheduled_at": "ISO8601" # Optional: schedule for later execution
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
Response:
|
|
229
|
+
{
|
|
230
|
+
"id": "uuid",
|
|
231
|
+
"task": "module.task_name",
|
|
232
|
+
"queue": "default",
|
|
233
|
+
"state": "available",
|
|
234
|
+
"scheduled_at": "ISO8601"
|
|
235
|
+
}
|
|
236
|
+
"""
|
|
237
|
+
from datetime import datetime
|
|
238
|
+
from fairchild.task import get_task
|
|
239
|
+
|
|
240
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
241
|
+
|
|
242
|
+
try:
|
|
243
|
+
body = await request.json()
|
|
244
|
+
except json.JSONDecodeError:
|
|
245
|
+
return web.json_response({"error": "Invalid JSON body"}, status=400)
|
|
246
|
+
|
|
247
|
+
# Validate required fields
|
|
248
|
+
task_name = body.get("task")
|
|
249
|
+
if not task_name:
|
|
250
|
+
return web.json_response({"error": "Missing required field: task"}, status=400)
|
|
251
|
+
|
|
252
|
+
# Look up the task
|
|
253
|
+
try:
|
|
254
|
+
task = get_task(task_name)
|
|
255
|
+
except ValueError:
|
|
256
|
+
return web.json_response({"error": f"Unknown task: {task_name}"}, status=404)
|
|
257
|
+
|
|
258
|
+
# Parse optional fields
|
|
259
|
+
args = body.get("args", {})
|
|
260
|
+
if not isinstance(args, dict):
|
|
261
|
+
return web.json_response({"error": "args must be an object"}, status=400)
|
|
262
|
+
|
|
263
|
+
priority = body.get("priority")
|
|
264
|
+
if priority is not None:
|
|
265
|
+
if not isinstance(priority, int) or not (0 <= priority <= 9):
|
|
266
|
+
return web.json_response(
|
|
267
|
+
{"error": "priority must be an integer 0-9"}, status=400
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
scheduled_at = None
|
|
271
|
+
if "scheduled_at" in body:
|
|
272
|
+
try:
|
|
273
|
+
scheduled_at = datetime.fromisoformat(
|
|
274
|
+
body["scheduled_at"].replace("Z", "+00:00")
|
|
275
|
+
)
|
|
276
|
+
except (ValueError, AttributeError):
|
|
277
|
+
return web.json_response(
|
|
278
|
+
{"error": "scheduled_at must be a valid ISO8601 datetime"}, status=400
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
# Enqueue the job
|
|
282
|
+
try:
|
|
283
|
+
job = await fairchild.enqueue(
|
|
284
|
+
task=task,
|
|
285
|
+
args=args,
|
|
286
|
+
priority=priority,
|
|
287
|
+
scheduled_at=scheduled_at,
|
|
288
|
+
)
|
|
289
|
+
except Exception as e:
|
|
290
|
+
return web.json_response({"error": f"Failed to enqueue job: {e}"}, status=500)
|
|
291
|
+
|
|
292
|
+
return web.json_response(
|
|
293
|
+
{
|
|
294
|
+
"id": str(job.id),
|
|
295
|
+
"task": job.task_name,
|
|
296
|
+
"queue": job.queue,
|
|
297
|
+
"state": job.state.value,
|
|
298
|
+
"scheduled_at": job.scheduled_at.isoformat() if job.scheduled_at else None,
|
|
299
|
+
},
|
|
300
|
+
status=201,
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
async def api_job_detail(request: web.Request) -> web.Response:
|
|
305
|
+
"""Get details for a specific job."""
|
|
306
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
307
|
+
job_id = request.match_info["job_id"]
|
|
308
|
+
|
|
309
|
+
query = """
|
|
310
|
+
SELECT
|
|
311
|
+
id, task_name, queue, args,
|
|
312
|
+
parent_id, deps,
|
|
313
|
+
state, priority, scheduled_at,
|
|
314
|
+
attempted_at, completed_at, attempt, max_attempts,
|
|
315
|
+
recorded, errors, tags,
|
|
316
|
+
inserted_at, updated_at
|
|
317
|
+
FROM fairchild_jobs
|
|
318
|
+
WHERE id = $1
|
|
319
|
+
"""
|
|
320
|
+
|
|
321
|
+
row = await fairchild._pool.fetchrow(query, job_id)
|
|
322
|
+
|
|
323
|
+
if not row:
|
|
324
|
+
return web.json_response({"error": "Job not found"}, status=404)
|
|
325
|
+
|
|
326
|
+
job = dict(row)
|
|
327
|
+
job["id"] = str(job["id"])
|
|
328
|
+
job["parent_id"] = str(job["parent_id"]) if job["parent_id"] else None
|
|
329
|
+
job["scheduled_at"] = (
|
|
330
|
+
job["scheduled_at"].isoformat() if job["scheduled_at"] else None
|
|
331
|
+
)
|
|
332
|
+
job["attempted_at"] = (
|
|
333
|
+
job["attempted_at"].isoformat() if job["attempted_at"] else None
|
|
334
|
+
)
|
|
335
|
+
job["completed_at"] = (
|
|
336
|
+
job["completed_at"].isoformat() if job["completed_at"] else None
|
|
337
|
+
)
|
|
338
|
+
job["inserted_at"] = job["inserted_at"].isoformat() if job["inserted_at"] else None
|
|
339
|
+
job["updated_at"] = job["updated_at"].isoformat() if job["updated_at"] else None
|
|
340
|
+
|
|
341
|
+
return web.json_response(job)
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
async def api_job_family(request: web.Request) -> web.Response:
|
|
345
|
+
"""Get the family tree for a job (ancestors and descendants)."""
|
|
346
|
+
from uuid import UUID
|
|
347
|
+
|
|
348
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
349
|
+
job_id_str = request.match_info["job_id"]
|
|
350
|
+
|
|
351
|
+
try:
|
|
352
|
+
job_id = UUID(job_id_str)
|
|
353
|
+
except ValueError:
|
|
354
|
+
return web.json_response({"error": "Invalid job ID"}, status=400)
|
|
355
|
+
|
|
356
|
+
# Find the root job (traverse up to find the topmost parent)
|
|
357
|
+
root_query = """
|
|
358
|
+
WITH RECURSIVE ancestors AS (
|
|
359
|
+
SELECT id, parent_id, 0 as depth
|
|
360
|
+
FROM fairchild_jobs
|
|
361
|
+
WHERE id = $1
|
|
362
|
+
|
|
363
|
+
UNION ALL
|
|
364
|
+
|
|
365
|
+
SELECT j.id, j.parent_id, a.depth + 1
|
|
366
|
+
FROM fairchild_jobs j
|
|
367
|
+
INNER JOIN ancestors a ON j.id = a.parent_id
|
|
368
|
+
)
|
|
369
|
+
SELECT id FROM ancestors
|
|
370
|
+
WHERE parent_id IS NULL
|
|
371
|
+
LIMIT 1
|
|
372
|
+
"""
|
|
373
|
+
root_id = await fairchild._pool.fetchval(root_query, job_id)
|
|
374
|
+
|
|
375
|
+
if not root_id:
|
|
376
|
+
return web.json_response({"error": "Job not found"}, status=404)
|
|
377
|
+
|
|
378
|
+
# Get all descendants from the root
|
|
379
|
+
family_query = """
|
|
380
|
+
WITH RECURSIVE family AS (
|
|
381
|
+
SELECT id, task_name, parent_id, state, deps, recorded,
|
|
382
|
+
attempted_at, completed_at, attempt, max_attempts
|
|
383
|
+
FROM fairchild_jobs
|
|
384
|
+
WHERE id = $1
|
|
385
|
+
|
|
386
|
+
UNION ALL
|
|
387
|
+
|
|
388
|
+
SELECT j.id, j.task_name, j.parent_id, j.state, j.deps, j.recorded,
|
|
389
|
+
j.attempted_at, j.completed_at, j.attempt, j.max_attempts
|
|
390
|
+
FROM fairchild_jobs j
|
|
391
|
+
INNER JOIN family f ON j.parent_id = f.id
|
|
392
|
+
)
|
|
393
|
+
SELECT * FROM family
|
|
394
|
+
"""
|
|
395
|
+
rows = await fairchild._pool.fetch(family_query, root_id)
|
|
396
|
+
|
|
397
|
+
jobs = []
|
|
398
|
+
for row in rows:
|
|
399
|
+
job = dict(row)
|
|
400
|
+
job["id"] = str(job["id"])
|
|
401
|
+
job["parent_id"] = str(job["parent_id"]) if job["parent_id"] else None
|
|
402
|
+
job["attempted_at"] = (
|
|
403
|
+
job["attempted_at"].isoformat() if job["attempted_at"] else None
|
|
404
|
+
)
|
|
405
|
+
job["completed_at"] = (
|
|
406
|
+
job["completed_at"].isoformat() if job["completed_at"] else None
|
|
407
|
+
)
|
|
408
|
+
jobs.append(job)
|
|
409
|
+
|
|
410
|
+
return web.json_response({"root_id": str(root_id), "jobs": jobs})
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
async def api_timeseries(request: web.Request) -> web.Response:
|
|
414
|
+
"""Get job counts per minute for the last 60 minutes."""
|
|
415
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
416
|
+
|
|
417
|
+
# Get jobs created per minute
|
|
418
|
+
query_inserted = """
|
|
419
|
+
SELECT
|
|
420
|
+
date_trunc('minute', inserted_at) as minute,
|
|
421
|
+
COUNT(*) as count
|
|
422
|
+
FROM fairchild_jobs
|
|
423
|
+
WHERE inserted_at > now() - interval '60 minutes'
|
|
424
|
+
GROUP BY minute
|
|
425
|
+
ORDER BY minute
|
|
426
|
+
"""
|
|
427
|
+
|
|
428
|
+
# Get jobs completed per minute
|
|
429
|
+
query_completed = """
|
|
430
|
+
SELECT
|
|
431
|
+
date_trunc('minute', completed_at) as minute,
|
|
432
|
+
COUNT(*) as count
|
|
433
|
+
FROM fairchild_jobs
|
|
434
|
+
WHERE completed_at > now() - interval '60 minutes'
|
|
435
|
+
AND state = 'completed'
|
|
436
|
+
GROUP BY minute
|
|
437
|
+
ORDER BY minute
|
|
438
|
+
"""
|
|
439
|
+
|
|
440
|
+
# Get jobs failed/discarded per minute
|
|
441
|
+
query_failed = """
|
|
442
|
+
SELECT
|
|
443
|
+
date_trunc('minute', updated_at) as minute,
|
|
444
|
+
COUNT(*) as count
|
|
445
|
+
FROM fairchild_jobs
|
|
446
|
+
WHERE updated_at > now() - interval '60 minutes'
|
|
447
|
+
AND state IN ('failed', 'discarded')
|
|
448
|
+
GROUP BY minute
|
|
449
|
+
ORDER BY minute
|
|
450
|
+
"""
|
|
451
|
+
|
|
452
|
+
inserted_rows = await fairchild._pool.fetch(query_inserted)
|
|
453
|
+
completed_rows = await fairchild._pool.fetch(query_completed)
|
|
454
|
+
failed_rows = await fairchild._pool.fetch(query_failed)
|
|
455
|
+
|
|
456
|
+
# Build minute-by-minute data
|
|
457
|
+
inserted = {row["minute"].isoformat(): row["count"] for row in inserted_rows}
|
|
458
|
+
completed = {row["minute"].isoformat(): row["count"] for row in completed_rows}
|
|
459
|
+
failed = {row["minute"].isoformat(): row["count"] for row in failed_rows}
|
|
460
|
+
|
|
461
|
+
return web.json_response(
|
|
462
|
+
{
|
|
463
|
+
"inserted": inserted,
|
|
464
|
+
"completed": completed,
|
|
465
|
+
"failed": failed,
|
|
466
|
+
}
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
async def api_workers(request: web.Request) -> web.Response:
|
|
471
|
+
"""Get list of active workers."""
|
|
472
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
473
|
+
|
|
474
|
+
# Get workers that have heartbeated in the last 30 seconds (alive)
|
|
475
|
+
# or are in stopped state (for recent history)
|
|
476
|
+
query = """
|
|
477
|
+
SELECT
|
|
478
|
+
id, hostname, pid, queues, active_jobs, state,
|
|
479
|
+
started_at, last_heartbeat_at, paused_at
|
|
480
|
+
FROM fairchild_workers
|
|
481
|
+
WHERE last_heartbeat_at > now() - interval '30 seconds'
|
|
482
|
+
OR state = 'stopped'
|
|
483
|
+
ORDER BY started_at DESC
|
|
484
|
+
"""
|
|
485
|
+
|
|
486
|
+
rows = await fairchild._pool.fetch(query)
|
|
487
|
+
|
|
488
|
+
workers = []
|
|
489
|
+
for row in rows:
|
|
490
|
+
worker = dict(row)
|
|
491
|
+
worker["id"] = str(worker["id"])
|
|
492
|
+
worker["started_at"] = (
|
|
493
|
+
worker["started_at"].isoformat() if worker["started_at"] else None
|
|
494
|
+
)
|
|
495
|
+
worker["last_heartbeat_at"] = (
|
|
496
|
+
worker["last_heartbeat_at"].isoformat()
|
|
497
|
+
if worker["last_heartbeat_at"]
|
|
498
|
+
else None
|
|
499
|
+
)
|
|
500
|
+
worker["paused_at"] = (
|
|
501
|
+
worker["paused_at"].isoformat() if worker["paused_at"] else None
|
|
502
|
+
)
|
|
503
|
+
# Check if worker is actually alive (heartbeat within 15s)
|
|
504
|
+
from datetime import datetime, timezone
|
|
505
|
+
|
|
506
|
+
if worker["last_heartbeat_at"]:
|
|
507
|
+
last_hb = datetime.fromisoformat(worker["last_heartbeat_at"])
|
|
508
|
+
if last_hb.tzinfo is None:
|
|
509
|
+
last_hb = last_hb.replace(tzinfo=timezone.utc)
|
|
510
|
+
age = (datetime.now(timezone.utc) - last_hb).total_seconds()
|
|
511
|
+
worker["alive"] = age < 15 and worker["state"] != "stopped"
|
|
512
|
+
else:
|
|
513
|
+
worker["alive"] = False
|
|
514
|
+
workers.append(worker)
|
|
515
|
+
|
|
516
|
+
return web.json_response(workers)
|
|
517
|
+
|
|
518
|
+
|
|
519
|
+
async def api_worker_pause(request: web.Request) -> web.Response:
|
|
520
|
+
"""Pause a worker."""
|
|
521
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
522
|
+
worker_id = request.match_info["worker_id"]
|
|
523
|
+
|
|
524
|
+
query = """
|
|
525
|
+
UPDATE fairchild_workers
|
|
526
|
+
SET state = 'paused', paused_at = now()
|
|
527
|
+
WHERE id = $1 AND state = 'running'
|
|
528
|
+
RETURNING id
|
|
529
|
+
"""
|
|
530
|
+
|
|
531
|
+
result = await fairchild._pool.fetchval(query, worker_id)
|
|
532
|
+
|
|
533
|
+
if not result:
|
|
534
|
+
return web.json_response(
|
|
535
|
+
{"error": "Worker not found or not running"}, status=404
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
return web.json_response({"status": "paused", "id": worker_id})
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
async def api_worker_resume(request: web.Request) -> web.Response:
|
|
542
|
+
"""Resume a paused worker."""
|
|
543
|
+
fairchild: Fairchild = request.app[_fairchild_key]
|
|
544
|
+
worker_id = request.match_info["worker_id"]
|
|
545
|
+
|
|
546
|
+
query = """
|
|
547
|
+
UPDATE fairchild_workers
|
|
548
|
+
SET state = 'running', paused_at = NULL
|
|
549
|
+
WHERE id = $1 AND state = 'paused'
|
|
550
|
+
RETURNING id
|
|
551
|
+
"""
|
|
552
|
+
|
|
553
|
+
result = await fairchild._pool.fetchval(query, worker_id)
|
|
554
|
+
|
|
555
|
+
if not result:
|
|
556
|
+
return web.json_response(
|
|
557
|
+
{"error": "Worker not found or not paused"}, status=404
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
return web.json_response({"status": "running", "id": worker_id})
|