supython 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- supython/__init__.py +8 -0
- supython/admin/__init__.py +3 -0
- supython/admin/api/__init__.py +24 -0
- supython/admin/api/auth.py +118 -0
- supython/admin/api/auth_templates.py +67 -0
- supython/admin/api/auth_users.py +225 -0
- supython/admin/api/db.py +174 -0
- supython/admin/api/functions.py +92 -0
- supython/admin/api/jobs.py +192 -0
- supython/admin/api/ops.py +224 -0
- supython/admin/api/realtime.py +281 -0
- supython/admin/api/service_auth.py +49 -0
- supython/admin/api/service_auth_templates.py +83 -0
- supython/admin/api/service_auth_users.py +346 -0
- supython/admin/api/service_db.py +214 -0
- supython/admin/api/service_functions.py +287 -0
- supython/admin/api/service_jobs.py +282 -0
- supython/admin/api/service_ops.py +213 -0
- supython/admin/api/service_realtime.py +30 -0
- supython/admin/api/service_storage.py +220 -0
- supython/admin/api/storage.py +117 -0
- supython/admin/api/system.py +37 -0
- supython/admin/audit.py +29 -0
- supython/admin/deps.py +22 -0
- supython/admin/errors.py +16 -0
- supython/admin/schemas.py +310 -0
- supython/admin/session.py +52 -0
- supython/admin/spa.py +38 -0
- supython/admin/static/assets/Alert-dluGVkos.js +49 -0
- supython/admin/static/assets/Audit-Njung3HI.js +2 -0
- supython/admin/static/assets/Backups-DzPlFgrm.js +2 -0
- supython/admin/static/assets/Buckets-ByacGkU1.js +2 -0
- supython/admin/static/assets/Channels-BoIuTtam.js +353 -0
- supython/admin/static/assets/ChevronRight-CtQH1EQ1.js +2 -0
- supython/admin/static/assets/CodeViewer-Bqy7-wvH.js +2 -0
- supython/admin/static/assets/Crons-B67vc39F.js +2 -0
- supython/admin/static/assets/DashboardView-CUTFVL6k.js +2 -0
- supython/admin/static/assets/DataTable-COAAWEft.js +747 -0
- supython/admin/static/assets/DescriptionsItem-P8JUDaBs.js +75 -0
- supython/admin/static/assets/DrawerContent-TpYTFgF1.js +139 -0
- supython/admin/static/assets/Empty-cr2r7e2u.js +25 -0
- supython/admin/static/assets/EmptyState-DeDck-OL.js +2 -0
- supython/admin/static/assets/Grid-hFkp9F4P.js +2 -0
- supython/admin/static/assets/Input-DppYTq9C.js +259 -0
- supython/admin/static/assets/Invoke-DW3Nveeh.js +2 -0
- supython/admin/static/assets/JsonField-DibyJgun.js +2 -0
- supython/admin/static/assets/LoginView-BjLyE3Ds.css +1 -0
- supython/admin/static/assets/LoginView-CoOjECT_.js +111 -0
- supython/admin/static/assets/Logs-D9WYrnIT.js +2 -0
- supython/admin/static/assets/Logs-DS1XPa0h.css +1 -0
- supython/admin/static/assets/Migrations-DOSC2ddQ.js +2 -0
- supython/admin/static/assets/ObjectBrowser-_5w8vOX8.js +2 -0
- supython/admin/static/assets/Queue-CywZs6vI.js +2 -0
- supython/admin/static/assets/RefreshTokens-Ccjr53jg.js +2 -0
- supython/admin/static/assets/RlsEditor-BSlH9vSc.js +2 -0
- supython/admin/static/assets/Routes-BiLXE49D.js +2 -0
- supython/admin/static/assets/Routes-C-ianIGD.css +1 -0
- supython/admin/static/assets/SchemaBrowser-DKy2_KQi.css +1 -0
- supython/admin/static/assets/SchemaBrowser-XFvFbtDB.js +2 -0
- supython/admin/static/assets/Select-DIzZyRZb.js +434 -0
- supython/admin/static/assets/Space-n5-XcguU.js +400 -0
- supython/admin/static/assets/SqlEditor-b8pTsILY.js +3 -0
- supython/admin/static/assets/SqlWorkspace-BUS7IntH.js +104 -0
- supython/admin/static/assets/TableData-CQIagLKn.js +2 -0
- supython/admin/static/assets/Tag-D1fOKpTH.js +72 -0
- supython/admin/static/assets/Templates-BS-ugkdq.js +2 -0
- supython/admin/static/assets/Thing-CEAniuMg.js +107 -0
- supython/admin/static/assets/Users-wzwajhlh.js +2 -0
- supython/admin/static/assets/_plugin-vue_export-helper-DGA9ry_j.js +1 -0
- supython/admin/static/assets/dist-VXIJLCYq.js +13 -0
- supython/admin/static/assets/format-length-CGCY1rMh.js +2 -0
- supython/admin/static/assets/get-Ca6unauB.js +2 -0
- supython/admin/static/assets/index-CeE6v959.js +951 -0
- supython/admin/static/assets/pinia-COXwfrOX.js +2 -0
- supython/admin/static/assets/resources-Bt6thQCD.js +44 -0
- supython/admin/static/assets/use-locale-mtgM0a3a.js +2 -0
- supython/admin/static/assets/use-merged-state-BvhkaHNX.js +2 -0
- supython/admin/static/assets/useConfirm-tMjvBFXR.js +2 -0
- supython/admin/static/assets/useResource-C_rJCY8C.js +2 -0
- supython/admin/static/assets/useTable-CnZc5zhi.js +363 -0
- supython/admin/static/assets/useTable-Dg0XlRlq.css +1 -0
- supython/admin/static/assets/useToast-DsZKx0IX.js +2 -0
- supython/admin/static/assets/utils-sbXoq7Ir.js +2 -0
- supython/admin/static/favicon.svg +1 -0
- supython/admin/static/icons.svg +24 -0
- supython/admin/static/index.html +24 -0
- supython/app.py +149 -0
- supython/auth/__init__.py +3 -0
- supython/auth/_email_job.py +11 -0
- supython/auth/providers/__init__.py +34 -0
- supython/auth/providers/github.py +22 -0
- supython/auth/providers/google.py +19 -0
- supython/auth/providers/oauth.py +56 -0
- supython/auth/providers/registry.py +16 -0
- supython/auth/ratelimit.py +39 -0
- supython/auth/router.py +282 -0
- supython/auth/schemas.py +79 -0
- supython/auth/service.py +587 -0
- supython/body_size.py +184 -0
- supython/cli.py +1653 -0
- supython/client/__init__.py +67 -0
- supython/client/_auth.py +249 -0
- supython/client/_client.py +145 -0
- supython/client/_config.py +92 -0
- supython/client/_functions.py +69 -0
- supython/client/_storage.py +255 -0
- supython/client/py.typed +0 -0
- supython/db.py +151 -0
- supython/db_admin.py +8 -0
- supython/functions/__init__.py +19 -0
- supython/functions/context.py +262 -0
- supython/functions/loader.py +307 -0
- supython/functions/router.py +228 -0
- supython/functions/schemas.py +50 -0
- supython/gen/__init__.py +5 -0
- supython/gen/_introspect.py +137 -0
- supython/gen/types_py.py +270 -0
- supython/gen/types_ts.py +365 -0
- supython/health.py +229 -0
- supython/hooks.py +117 -0
- supython/jobs/__init__.py +31 -0
- supython/jobs/backends.py +97 -0
- supython/jobs/context.py +58 -0
- supython/jobs/cron.py +152 -0
- supython/jobs/cron_inproc.py +118 -0
- supython/jobs/decorators.py +76 -0
- supython/jobs/registry.py +79 -0
- supython/jobs/router.py +136 -0
- supython/jobs/schemas.py +92 -0
- supython/jobs/service.py +311 -0
- supython/jobs/worker.py +219 -0
- supython/jwks.py +257 -0
- supython/keyset.py +279 -0
- supython/logging_config.py +291 -0
- supython/mail.py +33 -0
- supython/mailer.py +65 -0
- supython/migrate.py +81 -0
- supython/migrations/0001_extensions_and_roles.sql +46 -0
- supython/migrations/0002_auth_schema.sql +66 -0
- supython/migrations/0003_demo_todos.sql +42 -0
- supython/migrations/0004_auth_v0_2.sql +47 -0
- supython/migrations/0005_storage_schema.sql +117 -0
- supython/migrations/0006_realtime_schema.sql +206 -0
- supython/migrations/0007_jobs_schema.sql +254 -0
- supython/migrations/0008_jobs_last_error.sql +56 -0
- supython/migrations/0009_auth_rate_limits.sql +33 -0
- supython/migrations/0010_worker_heartbeat.sql +14 -0
- supython/migrations/0011_admin_schema.sql +45 -0
- supython/migrations/0012_auth_banned_until.sql +10 -0
- supython/migrations/0013_email_templates.sql +19 -0
- supython/migrations/0014_realtime_payload_warning.sql +96 -0
- supython/migrations/0015_backups_schema.sql +14 -0
- supython/passwords.py +15 -0
- supython/realtime/__init__.py +6 -0
- supython/realtime/broker.py +814 -0
- supython/realtime/protocol.py +234 -0
- supython/realtime/router.py +184 -0
- supython/realtime/schemas.py +207 -0
- supython/realtime/service.py +261 -0
- supython/realtime/topics.py +175 -0
- supython/realtime/websocket.py +586 -0
- supython/scaffold/__init__.py +5 -0
- supython/scaffold/init_project.py +133 -0
- supython/scaffold/templates/Caddyfile.tmpl +4 -0
- supython/scaffold/templates/README.md.tmpl +22 -0
- supython/scaffold/templates/docker-compose.prod.yml.tmpl +84 -0
- supython/scaffold/templates/docker-compose.yml.tmpl +41 -0
- supython/scaffold/templates/docker_postgres_Dockerfile.tmpl +9 -0
- supython/scaffold/templates/docker_postgres_postgresql.conf.tmpl +3 -0
- supython/scaffold/templates/env.example.tmpl +149 -0
- supython/scaffold/templates/functions_README.md.tmpl +21 -0
- supython/scaffold/templates/gitignore.tmpl +14 -0
- supython/scaffold/templates/migrations/.gitkeep +0 -0
- supython/secretset.py +347 -0
- supython/security_headers.py +78 -0
- supython/settings.py +198 -0
- supython/storage/__init__.py +5 -0
- supython/storage/backends.py +392 -0
- supython/storage/router.py +341 -0
- supython/storage/schemas.py +50 -0
- supython/storage/service.py +445 -0
- supython/storage/signing.py +119 -0
- supython/tokens.py +85 -0
- supython-0.5.0.dist-info/METADATA +714 -0
- supython-0.5.0.dist-info/RECORD +188 -0
- supython-0.5.0.dist-info/WHEEL +4 -0
- supython-0.5.0.dist-info/entry_points.txt +2 -0
- supython-0.5.0.dist-info/licenses/LICENSE +21 -0
supython/jobs/router.py
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
"""Jobs REST API — versioned under /jobs/v1."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Annotated
|
|
6
|
+
from uuid import UUID
|
|
7
|
+
|
|
8
|
+
from fastapi import APIRouter, Depends, Header, HTTPException
|
|
9
|
+
|
|
10
|
+
from .. import db
|
|
11
|
+
from ..tokens import decode_access_token
|
|
12
|
+
from .schemas import EnqueueRequest, EnqueueResult, JobResponse
|
|
13
|
+
from .service import JobError, cancel, enqueue, get_job, list_jobs, retry
|
|
14
|
+
|
|
15
|
+
router = APIRouter(prefix="/jobs/v1", tags=["jobs"])
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def _service_role_required(
|
|
19
|
+
authorization: Annotated[str | None, Header()] = None,
|
|
20
|
+
) -> str:
|
|
21
|
+
if not authorization or not authorization.startswith("Bearer "):
|
|
22
|
+
raise HTTPException(status_code=401, detail="missing bearer token")
|
|
23
|
+
token = authorization[7:]
|
|
24
|
+
try:
|
|
25
|
+
claims = decode_access_token(token)
|
|
26
|
+
except Exception:
|
|
27
|
+
raise HTTPException(status_code=401, detail="invalid token") from None
|
|
28
|
+
if claims.get("role") != "service_role":
|
|
29
|
+
raise HTTPException(status_code=403, detail="service_role required")
|
|
30
|
+
return token
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _to_response(record) -> JobResponse:
|
|
34
|
+
return JobResponse(
|
|
35
|
+
id=record.id,
|
|
36
|
+
name=record.name,
|
|
37
|
+
version=record.version,
|
|
38
|
+
status=record.status,
|
|
39
|
+
payload=record.payload,
|
|
40
|
+
queue=record.queue,
|
|
41
|
+
user_id=record.user_id,
|
|
42
|
+
attempts=record.attempts,
|
|
43
|
+
max_attempts=record.max_attempts,
|
|
44
|
+
run_at=record.run_at,
|
|
45
|
+
finished_at=record.finished_at,
|
|
46
|
+
created_at=record.created_at,
|
|
47
|
+
last_error=record.last_error,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@router.post("/enqueue", response_model=EnqueueResult)
|
|
52
|
+
async def enqueue_endpoint(
|
|
53
|
+
payload: EnqueueRequest,
|
|
54
|
+
_token: Annotated[str, Depends(_service_role_required)],
|
|
55
|
+
) -> EnqueueResult:
|
|
56
|
+
async with db.as_service_role() as conn:
|
|
57
|
+
try:
|
|
58
|
+
return await enqueue(
|
|
59
|
+
conn,
|
|
60
|
+
name=payload.name,
|
|
61
|
+
payload=payload.payload,
|
|
62
|
+
queue=payload.queue,
|
|
63
|
+
idempotency_key=payload.idempotency_key,
|
|
64
|
+
user_id=payload.user_id,
|
|
65
|
+
max_attempts=payload.max_attempts,
|
|
66
|
+
backoff=payload.backoff,
|
|
67
|
+
backoff_base_s=payload.backoff_base_s,
|
|
68
|
+
backoff_max_s=payload.backoff_max_s,
|
|
69
|
+
run_at=payload.run_at,
|
|
70
|
+
version=payload.version,
|
|
71
|
+
role=payload.role,
|
|
72
|
+
claims_from=payload.claims_from,
|
|
73
|
+
)
|
|
74
|
+
except JobError as exc:
|
|
75
|
+
raise HTTPException(
|
|
76
|
+
status_code=exc.status,
|
|
77
|
+
detail={"code": exc.code, "message": exc.message},
|
|
78
|
+
) from exc
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@router.get("/jobs", response_model=list[JobResponse])
|
|
82
|
+
async def list_jobs_endpoint(
|
|
83
|
+
_token: Annotated[str, Depends(_service_role_required)],
|
|
84
|
+
status: str | None = None,
|
|
85
|
+
queue: str | None = None,
|
|
86
|
+
name: str | None = None,
|
|
87
|
+
limit: int = 50,
|
|
88
|
+
offset: int = 0,
|
|
89
|
+
) -> list[JobResponse]:
|
|
90
|
+
async with db.as_service_role() as conn:
|
|
91
|
+
records = await list_jobs(
|
|
92
|
+
conn, status=status, queue=queue, name=name, limit=limit, offset=offset
|
|
93
|
+
)
|
|
94
|
+
return [_to_response(r) for r in records]
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@router.get("/jobs/{job_id}", response_model=JobResponse)
|
|
98
|
+
async def get_job_endpoint(
|
|
99
|
+
job_id: UUID,
|
|
100
|
+
_token: Annotated[str, Depends(_service_role_required)],
|
|
101
|
+
) -> JobResponse:
|
|
102
|
+
async with db.as_service_role() as conn:
|
|
103
|
+
record = await get_job(conn, job_id)
|
|
104
|
+
if record is None:
|
|
105
|
+
raise HTTPException(status_code=404, detail="job not found")
|
|
106
|
+
return _to_response(record)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@router.post("/jobs/{job_id}/cancel", status_code=204)
|
|
110
|
+
async def cancel_job_endpoint(
|
|
111
|
+
job_id: UUID,
|
|
112
|
+
_token: Annotated[str, Depends(_service_role_required)],
|
|
113
|
+
) -> None:
|
|
114
|
+
async with db.as_service_role() as conn:
|
|
115
|
+
try:
|
|
116
|
+
await cancel(conn, job_id)
|
|
117
|
+
except JobError as exc:
|
|
118
|
+
raise HTTPException(
|
|
119
|
+
status_code=exc.status,
|
|
120
|
+
detail={"code": exc.code, "message": exc.message},
|
|
121
|
+
) from exc
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@router.post("/jobs/{job_id}/retry", status_code=204)
|
|
125
|
+
async def retry_job_endpoint(
|
|
126
|
+
job_id: UUID,
|
|
127
|
+
_token: Annotated[str, Depends(_service_role_required)],
|
|
128
|
+
) -> None:
|
|
129
|
+
async with db.as_service_role() as conn:
|
|
130
|
+
try:
|
|
131
|
+
await retry(conn, job_id)
|
|
132
|
+
except JobError as exc:
|
|
133
|
+
raise HTTPException(
|
|
134
|
+
status_code=exc.status,
|
|
135
|
+
detail={"code": exc.code, "message": exc.message},
|
|
136
|
+
) from exc
|
supython/jobs/schemas.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"""Pydantic v2 models for the jobs module."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from uuid import UUID
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class EnqueueRequest(BaseModel):
|
|
10
|
+
name: str
|
|
11
|
+
payload: dict | None = None
|
|
12
|
+
queue: str = "default"
|
|
13
|
+
idempotency_key: str | None = None
|
|
14
|
+
user_id: UUID | None = None
|
|
15
|
+
max_attempts: int = 3
|
|
16
|
+
backoff: str = "exponential"
|
|
17
|
+
backoff_base_s: float = 5.0
|
|
18
|
+
backoff_max_s: float = 300.0
|
|
19
|
+
run_at: datetime | None = None
|
|
20
|
+
version: int = 1
|
|
21
|
+
role: str = "service_role"
|
|
22
|
+
claims_from: str | None = None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class JobRecord(BaseModel):
|
|
26
|
+
id: UUID
|
|
27
|
+
name: str
|
|
28
|
+
version: int = 1
|
|
29
|
+
payload: dict | None = None
|
|
30
|
+
queue: str = "default"
|
|
31
|
+
idempotency_key: str | None = None
|
|
32
|
+
user_id: UUID | None = None
|
|
33
|
+
status: str = "queued"
|
|
34
|
+
attempts: int = 0
|
|
35
|
+
max_attempts: int = 3
|
|
36
|
+
backoff: str = "exponential"
|
|
37
|
+
backoff_base_s: float = 5.0
|
|
38
|
+
backoff_max_s: float = 300.0
|
|
39
|
+
run_at: datetime | None = None
|
|
40
|
+
locked_at: datetime | None = None
|
|
41
|
+
locked_by: str | None = None
|
|
42
|
+
role: str = "service_role"
|
|
43
|
+
claims_from: str | None = None
|
|
44
|
+
finished_at: datetime | None = None
|
|
45
|
+
created_at: datetime | None = None
|
|
46
|
+
last_error: str | None = None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class EnqueueResult(BaseModel):
|
|
50
|
+
job: JobRecord
|
|
51
|
+
is_new: bool
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class JobResponse(BaseModel):
|
|
55
|
+
id: UUID
|
|
56
|
+
name: str
|
|
57
|
+
version: int = 1
|
|
58
|
+
status: str
|
|
59
|
+
payload: dict | None = None
|
|
60
|
+
queue: str = "default"
|
|
61
|
+
user_id: UUID | None = None
|
|
62
|
+
attempts: int = 0
|
|
63
|
+
max_attempts: int = 3
|
|
64
|
+
run_at: datetime | None = None
|
|
65
|
+
finished_at: datetime | None = None
|
|
66
|
+
created_at: datetime | None = None
|
|
67
|
+
last_error: str | None = None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class JobFilter(BaseModel):
|
|
71
|
+
status: str | None = None
|
|
72
|
+
queue: str | None = None
|
|
73
|
+
name: str | None = None
|
|
74
|
+
user_id: UUID | None = None
|
|
75
|
+
limit: int = Field(default=50, ge=1, le=500)
|
|
76
|
+
offset: int = Field(default=0, ge=0)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class CronDefinitionSchema(BaseModel):
|
|
80
|
+
name: str
|
|
81
|
+
cron_expr: str
|
|
82
|
+
job_name: str
|
|
83
|
+
job_version: int = 1
|
|
84
|
+
payload: dict | None = None
|
|
85
|
+
queue: str = "default"
|
|
86
|
+
enabled: bool = True
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class BackendHealth(BaseModel):
|
|
90
|
+
backend: str
|
|
91
|
+
healthy: bool
|
|
92
|
+
detail: str | None = None
|
supython/jobs/service.py
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"""Framework-agnostic async service functions for the jobs queue.
|
|
2
|
+
|
|
3
|
+
All functions take an asyncpg.Connection and raise JobError on failure.
|
|
4
|
+
No FastAPI imports here — this module is testable without HTTP.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
from datetime import UTC, datetime, timedelta
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
|
|
14
|
+
import asyncpg
|
|
15
|
+
|
|
16
|
+
from .schemas import EnqueueResult, JobRecord
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class JobError(Exception):
|
|
22
|
+
def __init__(self, code: str, message: str, status: int = 400) -> None:
|
|
23
|
+
super().__init__(message)
|
|
24
|
+
self.code = code
|
|
25
|
+
self.message = message
|
|
26
|
+
self.status = status
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _row_to_record(row: asyncpg.Record) -> JobRecord:
|
|
30
|
+
# asyncpg hands back jsonb columns as unparsed text unless a codec is
|
|
31
|
+
# registered; registering a codec globally would touch every connection
|
|
32
|
+
# in the pool, so we decode at the edge here instead.
|
|
33
|
+
raw_payload = row["payload"]
|
|
34
|
+
if isinstance(raw_payload, str):
|
|
35
|
+
raw_payload = json.loads(raw_payload) if raw_payload else {}
|
|
36
|
+
return JobRecord(
|
|
37
|
+
id=row["id"],
|
|
38
|
+
name=row["name"],
|
|
39
|
+
version=row["version"],
|
|
40
|
+
payload=raw_payload,
|
|
41
|
+
queue=row["queue"],
|
|
42
|
+
idempotency_key=row["idempotency_key"],
|
|
43
|
+
user_id=row["user_id"],
|
|
44
|
+
status=row["status"],
|
|
45
|
+
attempts=row["attempts"],
|
|
46
|
+
max_attempts=row["max_attempts"],
|
|
47
|
+
backoff=row["backoff"],
|
|
48
|
+
backoff_base_s=row["backoff_base_s"],
|
|
49
|
+
backoff_max_s=row["backoff_max_s"],
|
|
50
|
+
run_at=row["run_at"],
|
|
51
|
+
locked_at=row["locked_at"],
|
|
52
|
+
locked_by=row["locked_by"],
|
|
53
|
+
role=row["role"],
|
|
54
|
+
claims_from=row["claims_from"],
|
|
55
|
+
finished_at=row["finished_at"],
|
|
56
|
+
created_at=row["created_at"],
|
|
57
|
+
last_error=row.get("last_error"),
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
async def enqueue(
|
|
62
|
+
conn: asyncpg.Connection,
|
|
63
|
+
*,
|
|
64
|
+
name: str,
|
|
65
|
+
payload: dict | None = None,
|
|
66
|
+
queue: str = "default",
|
|
67
|
+
idempotency_key: str | None = None,
|
|
68
|
+
user_id: UUID | None = None,
|
|
69
|
+
max_attempts: int = 3,
|
|
70
|
+
backoff: str = "exponential",
|
|
71
|
+
backoff_base_s: float = 5.0,
|
|
72
|
+
backoff_max_s: float = 300.0,
|
|
73
|
+
run_at: datetime | None = None,
|
|
74
|
+
version: int = 1,
|
|
75
|
+
role: str = "service_role",
|
|
76
|
+
claims_from: str | None = None,
|
|
77
|
+
) -> EnqueueResult:
|
|
78
|
+
row = await conn.fetchrow(
|
|
79
|
+
"""
|
|
80
|
+
select (job).*, is_new
|
|
81
|
+
from jobs.enqueue(
|
|
82
|
+
p_name := $1,
|
|
83
|
+
p_payload := $2::jsonb,
|
|
84
|
+
p_queue := $3,
|
|
85
|
+
p_idempotency_key := $4,
|
|
86
|
+
p_user_id := $5,
|
|
87
|
+
p_max_attempts := $6,
|
|
88
|
+
p_backoff := $7,
|
|
89
|
+
p_backoff_base_s := $8,
|
|
90
|
+
p_backoff_max_s := $9,
|
|
91
|
+
p_run_at := $10,
|
|
92
|
+
p_version := $11,
|
|
93
|
+
p_role := $12,
|
|
94
|
+
p_claims_from := $13
|
|
95
|
+
)
|
|
96
|
+
""",
|
|
97
|
+
name,
|
|
98
|
+
json.dumps(payload or {}),
|
|
99
|
+
queue,
|
|
100
|
+
idempotency_key,
|
|
101
|
+
user_id,
|
|
102
|
+
max_attempts,
|
|
103
|
+
backoff,
|
|
104
|
+
backoff_base_s,
|
|
105
|
+
backoff_max_s,
|
|
106
|
+
run_at,
|
|
107
|
+
version,
|
|
108
|
+
role,
|
|
109
|
+
claims_from,
|
|
110
|
+
)
|
|
111
|
+
if row is None:
|
|
112
|
+
raise JobError("enqueue_failed", "enqueue returned no rows", 500)
|
|
113
|
+
|
|
114
|
+
# ``jobs.enqueue`` has named OUT columns (``job jobs.jobs``, ``is_new bool``).
|
|
115
|
+
# ``(job).*`` expands the composite to the jobs.jobs column set so the row
|
|
116
|
+
# we get back is already the full record — no second round-trip needed.
|
|
117
|
+
return EnqueueResult(
|
|
118
|
+
job=_row_to_record(row),
|
|
119
|
+
is_new=row["is_new"],
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
async def claim_next(
|
|
124
|
+
conn: asyncpg.Connection,
|
|
125
|
+
*,
|
|
126
|
+
queue: str = "default",
|
|
127
|
+
worker_id: str = "worker-0",
|
|
128
|
+
visibility_timeout_ms: int = 300000,
|
|
129
|
+
zombie_batch: int = 10,
|
|
130
|
+
) -> list[JobRecord]:
|
|
131
|
+
rows = await conn.fetch(
|
|
132
|
+
"""
|
|
133
|
+
select *
|
|
134
|
+
from jobs.claim_next(
|
|
135
|
+
p_queue := $1,
|
|
136
|
+
p_worker_id := $2,
|
|
137
|
+
p_visibility_timeout_ms := $3,
|
|
138
|
+
p_zombie_batch := $4
|
|
139
|
+
)
|
|
140
|
+
""",
|
|
141
|
+
queue,
|
|
142
|
+
worker_id,
|
|
143
|
+
visibility_timeout_ms,
|
|
144
|
+
zombie_batch,
|
|
145
|
+
)
|
|
146
|
+
return [_row_to_record(r) for r in rows]
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
async def mark_succeeded(conn: asyncpg.Connection, job_id: UUID) -> None:
|
|
150
|
+
await conn.execute(
|
|
151
|
+
"""
|
|
152
|
+
update jobs.jobs
|
|
153
|
+
set status = 'succeeded', finished_at = now()
|
|
154
|
+
where id = $1
|
|
155
|
+
""",
|
|
156
|
+
job_id,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
async def mark_failed_retry(
|
|
161
|
+
conn: asyncpg.Connection,
|
|
162
|
+
job_id: UUID,
|
|
163
|
+
*,
|
|
164
|
+
attempts: int,
|
|
165
|
+
backoff: str,
|
|
166
|
+
backoff_base_s: float,
|
|
167
|
+
backoff_max_s: float,
|
|
168
|
+
last_error: str | None = None,
|
|
169
|
+
) -> None:
|
|
170
|
+
next_run = _compute_backoff(attempts, backoff, backoff_base_s, backoff_max_s)
|
|
171
|
+
await conn.execute(
|
|
172
|
+
"""
|
|
173
|
+
update jobs.jobs
|
|
174
|
+
set status = 'queued',
|
|
175
|
+
run_at = $2,
|
|
176
|
+
locked_at = null,
|
|
177
|
+
locked_by = null,
|
|
178
|
+
last_error = $3
|
|
179
|
+
where id = $1
|
|
180
|
+
""",
|
|
181
|
+
job_id,
|
|
182
|
+
next_run,
|
|
183
|
+
last_error,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
async def mark_failed_final(
|
|
188
|
+
conn: asyncpg.Connection,
|
|
189
|
+
job_id: UUID,
|
|
190
|
+
*,
|
|
191
|
+
last_error: str | None = None,
|
|
192
|
+
) -> None:
|
|
193
|
+
await conn.execute(
|
|
194
|
+
"""
|
|
195
|
+
update jobs.jobs
|
|
196
|
+
set status = 'failed', finished_at = now(), last_error = $2
|
|
197
|
+
where id = $1
|
|
198
|
+
""",
|
|
199
|
+
job_id,
|
|
200
|
+
last_error,
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
_RETRYABLE_STATUSES = ("failed", "cancelled")
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
async def retry(conn: asyncpg.Connection, job_id: UUID) -> None:
|
|
208
|
+
result = await conn.execute(
|
|
209
|
+
"""
|
|
210
|
+
update jobs.jobs
|
|
211
|
+
set status = 'queued',
|
|
212
|
+
run_at = now(),
|
|
213
|
+
locked_at = null,
|
|
214
|
+
locked_by = null,
|
|
215
|
+
last_error = null,
|
|
216
|
+
finished_at = null
|
|
217
|
+
where id = $1 and status = any($2::text[])
|
|
218
|
+
""",
|
|
219
|
+
job_id,
|
|
220
|
+
list(_RETRYABLE_STATUSES),
|
|
221
|
+
)
|
|
222
|
+
if result == "UPDATE 0":
|
|
223
|
+
exists = await conn.fetchval(
|
|
224
|
+
"select 1 from jobs.jobs where id = $1", job_id
|
|
225
|
+
)
|
|
226
|
+
if not exists:
|
|
227
|
+
raise JobError("retry_failed", "job not found", 404)
|
|
228
|
+
raise JobError(
|
|
229
|
+
"retry_failed",
|
|
230
|
+
f"job is not in a retryable state (must be one of {list(_RETRYABLE_STATUSES)})",
|
|
231
|
+
409,
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
async def cancel(conn: asyncpg.Connection, job_id: UUID) -> None:
|
|
236
|
+
result = await conn.execute(
|
|
237
|
+
"""
|
|
238
|
+
update jobs.jobs
|
|
239
|
+
set status = 'cancelled', finished_at = now()
|
|
240
|
+
where id = $1 and status in ('queued', 'running')
|
|
241
|
+
""",
|
|
242
|
+
job_id,
|
|
243
|
+
)
|
|
244
|
+
if result == "UPDATE 0":
|
|
245
|
+
raise JobError("cancel_failed", "job not found or already terminal", 404)
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
async def list_jobs(
|
|
249
|
+
conn: asyncpg.Connection,
|
|
250
|
+
*,
|
|
251
|
+
status: str | None = None,
|
|
252
|
+
queue: str | None = None,
|
|
253
|
+
name: str | None = None,
|
|
254
|
+
user_id: UUID | None = None,
|
|
255
|
+
limit: int = 50,
|
|
256
|
+
offset: int = 0,
|
|
257
|
+
) -> list[JobRecord]:
|
|
258
|
+
clauses: list[str] = []
|
|
259
|
+
args: list = []
|
|
260
|
+
idx = 1
|
|
261
|
+
|
|
262
|
+
if status is not None:
|
|
263
|
+
clauses.append(f"status = ${idx}")
|
|
264
|
+
args.append(status)
|
|
265
|
+
idx += 1
|
|
266
|
+
if queue is not None:
|
|
267
|
+
clauses.append(f"queue = ${idx}")
|
|
268
|
+
args.append(queue)
|
|
269
|
+
idx += 1
|
|
270
|
+
if name is not None:
|
|
271
|
+
clauses.append(f"name = ${idx}")
|
|
272
|
+
args.append(name)
|
|
273
|
+
idx += 1
|
|
274
|
+
if user_id is not None:
|
|
275
|
+
clauses.append(f"user_id = ${idx}")
|
|
276
|
+
args.append(user_id)
|
|
277
|
+
idx += 1
|
|
278
|
+
|
|
279
|
+
where = f"where {' and '.join(clauses)}" if clauses else ""
|
|
280
|
+
rows = await conn.fetch(
|
|
281
|
+
f"""
|
|
282
|
+
select * from jobs.jobs
|
|
283
|
+
{where}
|
|
284
|
+
order by created_at desc
|
|
285
|
+
limit ${idx} offset ${idx + 1}
|
|
286
|
+
""",
|
|
287
|
+
*args,
|
|
288
|
+
limit,
|
|
289
|
+
offset,
|
|
290
|
+
)
|
|
291
|
+
return [_row_to_record(r) for r in rows]
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
async def get_job(conn: asyncpg.Connection, job_id: UUID) -> JobRecord | None:
|
|
295
|
+
row = await conn.fetchrow("select * from jobs.jobs where id = $1", job_id)
|
|
296
|
+
return _row_to_record(row) if row else None
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _compute_backoff(
|
|
300
|
+
attempts: int,
|
|
301
|
+
backoff: str,
|
|
302
|
+
base_s: float,
|
|
303
|
+
max_s: float,
|
|
304
|
+
) -> datetime:
|
|
305
|
+
if backoff == "constant":
|
|
306
|
+
delay = base_s
|
|
307
|
+
elif backoff == "linear":
|
|
308
|
+
delay = base_s * attempts
|
|
309
|
+
else:
|
|
310
|
+
delay = min(base_s * (2 ** (attempts - 1)), max_s)
|
|
311
|
+
return datetime.now(UTC) + timedelta(seconds=delay)
|