supython 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (188) hide show
  1. supython/__init__.py +8 -0
  2. supython/admin/__init__.py +3 -0
  3. supython/admin/api/__init__.py +24 -0
  4. supython/admin/api/auth.py +118 -0
  5. supython/admin/api/auth_templates.py +67 -0
  6. supython/admin/api/auth_users.py +225 -0
  7. supython/admin/api/db.py +174 -0
  8. supython/admin/api/functions.py +92 -0
  9. supython/admin/api/jobs.py +192 -0
  10. supython/admin/api/ops.py +224 -0
  11. supython/admin/api/realtime.py +281 -0
  12. supython/admin/api/service_auth.py +49 -0
  13. supython/admin/api/service_auth_templates.py +83 -0
  14. supython/admin/api/service_auth_users.py +346 -0
  15. supython/admin/api/service_db.py +214 -0
  16. supython/admin/api/service_functions.py +287 -0
  17. supython/admin/api/service_jobs.py +282 -0
  18. supython/admin/api/service_ops.py +213 -0
  19. supython/admin/api/service_realtime.py +30 -0
  20. supython/admin/api/service_storage.py +220 -0
  21. supython/admin/api/storage.py +117 -0
  22. supython/admin/api/system.py +37 -0
  23. supython/admin/audit.py +29 -0
  24. supython/admin/deps.py +22 -0
  25. supython/admin/errors.py +16 -0
  26. supython/admin/schemas.py +310 -0
  27. supython/admin/session.py +52 -0
  28. supython/admin/spa.py +38 -0
  29. supython/admin/static/assets/Alert-dluGVkos.js +49 -0
  30. supython/admin/static/assets/Audit-Njung3HI.js +2 -0
  31. supython/admin/static/assets/Backups-DzPlFgrm.js +2 -0
  32. supython/admin/static/assets/Buckets-ByacGkU1.js +2 -0
  33. supython/admin/static/assets/Channels-BoIuTtam.js +353 -0
  34. supython/admin/static/assets/ChevronRight-CtQH1EQ1.js +2 -0
  35. supython/admin/static/assets/CodeViewer-Bqy7-wvH.js +2 -0
  36. supython/admin/static/assets/Crons-B67vc39F.js +2 -0
  37. supython/admin/static/assets/DashboardView-CUTFVL6k.js +2 -0
  38. supython/admin/static/assets/DataTable-COAAWEft.js +747 -0
  39. supython/admin/static/assets/DescriptionsItem-P8JUDaBs.js +75 -0
  40. supython/admin/static/assets/DrawerContent-TpYTFgF1.js +139 -0
  41. supython/admin/static/assets/Empty-cr2r7e2u.js +25 -0
  42. supython/admin/static/assets/EmptyState-DeDck-OL.js +2 -0
  43. supython/admin/static/assets/Grid-hFkp9F4P.js +2 -0
  44. supython/admin/static/assets/Input-DppYTq9C.js +259 -0
  45. supython/admin/static/assets/Invoke-DW3Nveeh.js +2 -0
  46. supython/admin/static/assets/JsonField-DibyJgun.js +2 -0
  47. supython/admin/static/assets/LoginView-BjLyE3Ds.css +1 -0
  48. supython/admin/static/assets/LoginView-CoOjECT_.js +111 -0
  49. supython/admin/static/assets/Logs-D9WYrnIT.js +2 -0
  50. supython/admin/static/assets/Logs-DS1XPa0h.css +1 -0
  51. supython/admin/static/assets/Migrations-DOSC2ddQ.js +2 -0
  52. supython/admin/static/assets/ObjectBrowser-_5w8vOX8.js +2 -0
  53. supython/admin/static/assets/Queue-CywZs6vI.js +2 -0
  54. supython/admin/static/assets/RefreshTokens-Ccjr53jg.js +2 -0
  55. supython/admin/static/assets/RlsEditor-BSlH9vSc.js +2 -0
  56. supython/admin/static/assets/Routes-BiLXE49D.js +2 -0
  57. supython/admin/static/assets/Routes-C-ianIGD.css +1 -0
  58. supython/admin/static/assets/SchemaBrowser-DKy2_KQi.css +1 -0
  59. supython/admin/static/assets/SchemaBrowser-XFvFbtDB.js +2 -0
  60. supython/admin/static/assets/Select-DIzZyRZb.js +434 -0
  61. supython/admin/static/assets/Space-n5-XcguU.js +400 -0
  62. supython/admin/static/assets/SqlEditor-b8pTsILY.js +3 -0
  63. supython/admin/static/assets/SqlWorkspace-BUS7IntH.js +104 -0
  64. supython/admin/static/assets/TableData-CQIagLKn.js +2 -0
  65. supython/admin/static/assets/Tag-D1fOKpTH.js +72 -0
  66. supython/admin/static/assets/Templates-BS-ugkdq.js +2 -0
  67. supython/admin/static/assets/Thing-CEAniuMg.js +107 -0
  68. supython/admin/static/assets/Users-wzwajhlh.js +2 -0
  69. supython/admin/static/assets/_plugin-vue_export-helper-DGA9ry_j.js +1 -0
  70. supython/admin/static/assets/dist-VXIJLCYq.js +13 -0
  71. supython/admin/static/assets/format-length-CGCY1rMh.js +2 -0
  72. supython/admin/static/assets/get-Ca6unauB.js +2 -0
  73. supython/admin/static/assets/index-CeE6v959.js +951 -0
  74. supython/admin/static/assets/pinia-COXwfrOX.js +2 -0
  75. supython/admin/static/assets/resources-Bt6thQCD.js +44 -0
  76. supython/admin/static/assets/use-locale-mtgM0a3a.js +2 -0
  77. supython/admin/static/assets/use-merged-state-BvhkaHNX.js +2 -0
  78. supython/admin/static/assets/useConfirm-tMjvBFXR.js +2 -0
  79. supython/admin/static/assets/useResource-C_rJCY8C.js +2 -0
  80. supython/admin/static/assets/useTable-CnZc5zhi.js +363 -0
  81. supython/admin/static/assets/useTable-Dg0XlRlq.css +1 -0
  82. supython/admin/static/assets/useToast-DsZKx0IX.js +2 -0
  83. supython/admin/static/assets/utils-sbXoq7Ir.js +2 -0
  84. supython/admin/static/favicon.svg +1 -0
  85. supython/admin/static/icons.svg +24 -0
  86. supython/admin/static/index.html +24 -0
  87. supython/app.py +149 -0
  88. supython/auth/__init__.py +3 -0
  89. supython/auth/_email_job.py +11 -0
  90. supython/auth/providers/__init__.py +34 -0
  91. supython/auth/providers/github.py +22 -0
  92. supython/auth/providers/google.py +19 -0
  93. supython/auth/providers/oauth.py +56 -0
  94. supython/auth/providers/registry.py +16 -0
  95. supython/auth/ratelimit.py +39 -0
  96. supython/auth/router.py +282 -0
  97. supython/auth/schemas.py +79 -0
  98. supython/auth/service.py +587 -0
  99. supython/body_size.py +184 -0
  100. supython/cli.py +1653 -0
  101. supython/client/__init__.py +67 -0
  102. supython/client/_auth.py +249 -0
  103. supython/client/_client.py +145 -0
  104. supython/client/_config.py +92 -0
  105. supython/client/_functions.py +69 -0
  106. supython/client/_storage.py +255 -0
  107. supython/client/py.typed +0 -0
  108. supython/db.py +151 -0
  109. supython/db_admin.py +8 -0
  110. supython/functions/__init__.py +19 -0
  111. supython/functions/context.py +262 -0
  112. supython/functions/loader.py +307 -0
  113. supython/functions/router.py +228 -0
  114. supython/functions/schemas.py +50 -0
  115. supython/gen/__init__.py +5 -0
  116. supython/gen/_introspect.py +137 -0
  117. supython/gen/types_py.py +270 -0
  118. supython/gen/types_ts.py +365 -0
  119. supython/health.py +229 -0
  120. supython/hooks.py +117 -0
  121. supython/jobs/__init__.py +31 -0
  122. supython/jobs/backends.py +97 -0
  123. supython/jobs/context.py +58 -0
  124. supython/jobs/cron.py +152 -0
  125. supython/jobs/cron_inproc.py +118 -0
  126. supython/jobs/decorators.py +76 -0
  127. supython/jobs/registry.py +79 -0
  128. supython/jobs/router.py +136 -0
  129. supython/jobs/schemas.py +92 -0
  130. supython/jobs/service.py +311 -0
  131. supython/jobs/worker.py +219 -0
  132. supython/jwks.py +257 -0
  133. supython/keyset.py +279 -0
  134. supython/logging_config.py +291 -0
  135. supython/mail.py +33 -0
  136. supython/mailer.py +65 -0
  137. supython/migrate.py +81 -0
  138. supython/migrations/0001_extensions_and_roles.sql +46 -0
  139. supython/migrations/0002_auth_schema.sql +66 -0
  140. supython/migrations/0003_demo_todos.sql +42 -0
  141. supython/migrations/0004_auth_v0_2.sql +47 -0
  142. supython/migrations/0005_storage_schema.sql +117 -0
  143. supython/migrations/0006_realtime_schema.sql +206 -0
  144. supython/migrations/0007_jobs_schema.sql +254 -0
  145. supython/migrations/0008_jobs_last_error.sql +56 -0
  146. supython/migrations/0009_auth_rate_limits.sql +33 -0
  147. supython/migrations/0010_worker_heartbeat.sql +14 -0
  148. supython/migrations/0011_admin_schema.sql +45 -0
  149. supython/migrations/0012_auth_banned_until.sql +10 -0
  150. supython/migrations/0013_email_templates.sql +19 -0
  151. supython/migrations/0014_realtime_payload_warning.sql +96 -0
  152. supython/migrations/0015_backups_schema.sql +14 -0
  153. supython/passwords.py +15 -0
  154. supython/realtime/__init__.py +6 -0
  155. supython/realtime/broker.py +814 -0
  156. supython/realtime/protocol.py +234 -0
  157. supython/realtime/router.py +184 -0
  158. supython/realtime/schemas.py +207 -0
  159. supython/realtime/service.py +261 -0
  160. supython/realtime/topics.py +175 -0
  161. supython/realtime/websocket.py +586 -0
  162. supython/scaffold/__init__.py +5 -0
  163. supython/scaffold/init_project.py +133 -0
  164. supython/scaffold/templates/Caddyfile.tmpl +4 -0
  165. supython/scaffold/templates/README.md.tmpl +22 -0
  166. supython/scaffold/templates/docker-compose.prod.yml.tmpl +84 -0
  167. supython/scaffold/templates/docker-compose.yml.tmpl +41 -0
  168. supython/scaffold/templates/docker_postgres_Dockerfile.tmpl +9 -0
  169. supython/scaffold/templates/docker_postgres_postgresql.conf.tmpl +3 -0
  170. supython/scaffold/templates/env.example.tmpl +149 -0
  171. supython/scaffold/templates/functions_README.md.tmpl +21 -0
  172. supython/scaffold/templates/gitignore.tmpl +14 -0
  173. supython/scaffold/templates/migrations/.gitkeep +0 -0
  174. supython/secretset.py +347 -0
  175. supython/security_headers.py +78 -0
  176. supython/settings.py +198 -0
  177. supython/storage/__init__.py +5 -0
  178. supython/storage/backends.py +392 -0
  179. supython/storage/router.py +341 -0
  180. supython/storage/schemas.py +50 -0
  181. supython/storage/service.py +445 -0
  182. supython/storage/signing.py +119 -0
  183. supython/tokens.py +85 -0
  184. supython-0.5.0.dist-info/METADATA +714 -0
  185. supython-0.5.0.dist-info/RECORD +188 -0
  186. supython-0.5.0.dist-info/WHEEL +4 -0
  187. supython-0.5.0.dist-info/entry_points.txt +2 -0
  188. supython-0.5.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,31 @@
1
+ """Jobs module — durable job queue with cron scheduling."""
2
+
3
+ from .backends import get_backend
4
+ from .context import JobCtx
5
+ from .decorators import job
6
+ from .registry import get_registry
7
+ from .router import router
8
+ from .schemas import EnqueueResult, JobResponse
9
+ from .service import JobError, enqueue
10
+ from .worker import Worker
11
+
12
+
13
+ def get_worker() -> Worker:
14
+ from ..settings import get_settings
15
+
16
+ return Worker(get_settings())
17
+
18
+
19
+ __all__ = [
20
+ "Worker",
21
+ "enqueue",
22
+ "get_backend",
23
+ "get_registry",
24
+ "get_worker",
25
+ "job",
26
+ "JobCtx",
27
+ "JobError",
28
+ "JobResponse",
29
+ "EnqueueResult",
30
+ "router",
31
+ ]
@@ -0,0 +1,97 @@
1
+ """Job backend protocol + default Postgres-queue implementation.
2
+
3
+ Collapsed from a two-file ``backends/`` package into a single module during the
4
+ v0.5 grooming pass: until a second backend actually lands (arq / dramatiq as
5
+ optional extras), the package added no structure the Protocol does not already
6
+ express. When an additional backend is introduced it should be promoted back
7
+ to a package.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import logging
13
+ from typing import TYPE_CHECKING, Protocol, runtime_checkable
14
+ from uuid import UUID
15
+
16
+ from .. import db
17
+ from ..settings import Settings, get_settings
18
+ from .schemas import EnqueueResult, JobRecord
19
+ from .service import cancel as svc_cancel
20
+ from .service import enqueue as svc_enqueue
21
+ from .service import get_job as svc_get_job
22
+ from .service import list_jobs as svc_list_jobs
23
+
24
+ if TYPE_CHECKING: # pragma: no cover
25
+ from .worker import Worker
26
+
27
+ logger = logging.getLogger(__name__)
28
+
29
+
30
+ @runtime_checkable
31
+ class JobBackend(Protocol):
32
+ async def enqueue(self, **kwargs) -> EnqueueResult: ...
33
+ async def run(self) -> None: ...
34
+ async def cancel(self, job_id: UUID) -> None: ...
35
+ async def retry(self, job_id: UUID) -> None: ...
36
+ async def list_jobs(self, **kwargs) -> list[JobRecord]: ...
37
+ async def get_job(self, job_id: UUID) -> JobRecord | None: ...
38
+ async def health_check(self) -> dict: ...
39
+
40
+
41
+ class PgQueueBackend:
42
+ """Default backend — polls ``jobs.jobs`` with ``FOR UPDATE SKIP LOCKED``."""
43
+
44
+ def __init__(self, settings: Settings) -> None:
45
+ self._settings = settings
46
+ self._worker: Worker | None = None
47
+
48
+ async def enqueue(self, **kwargs) -> EnqueueResult:
49
+ async with db.as_service_role() as conn:
50
+ return await svc_enqueue(conn, **kwargs)
51
+
52
+ async def run(self) -> None:
53
+ from .worker import Worker
54
+
55
+ self._worker = Worker(self._settings)
56
+ await self._worker.start()
57
+
58
+ async def cancel(self, job_id: UUID) -> None:
59
+ async with db.as_service_role() as conn:
60
+ await svc_cancel(conn, job_id)
61
+
62
+ async def retry(self, job_id: UUID) -> None:
63
+ from .service import mark_failed_retry
64
+
65
+ async with db.as_service_role() as conn:
66
+ record = await svc_get_job(conn, job_id)
67
+ if record is None:
68
+ return
69
+ await mark_failed_retry(
70
+ conn,
71
+ job_id,
72
+ attempts=record.attempts,
73
+ backoff=record.backoff,
74
+ backoff_base_s=record.backoff_base_s,
75
+ backoff_max_s=record.backoff_max_s,
76
+ )
77
+
78
+ async def list_jobs(self, **kwargs) -> list[JobRecord]:
79
+ async with db.as_service_role() as conn:
80
+ return await svc_list_jobs(conn, **kwargs)
81
+
82
+ async def get_job(self, job_id: UUID) -> JobRecord | None:
83
+ async with db.as_service_role() as conn:
84
+ return await svc_get_job(conn, job_id)
85
+
86
+ async def health_check(self) -> dict:
87
+ try:
88
+ async with db.as_service_role() as conn:
89
+ await conn.fetchval("select 1 from jobs.jobs limit 1")
90
+ return {"backend": "pg", "healthy": True}
91
+ except Exception as exc:
92
+ return {"backend": "pg", "healthy": False, "detail": str(exc)}
93
+
94
+
95
+ def get_backend(settings: Settings | None = None) -> JobBackend:
96
+ s = settings or get_settings()
97
+ return PgQueueBackend(s)
@@ -0,0 +1,58 @@
1
+ """Job execution context — mirrors ``functions.context.Ctx`` for worker jobs.
2
+
3
+ ``HookCtx`` and ``build_hook_ctx`` used to live here; they moved to
4
+ ``supython.hooks`` so feature modules (auth, storage, ...) can fire hooks
5
+ without importing the jobs package. This file now only owns the job-side
6
+ context.
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import logging
12
+ from collections.abc import Awaitable, Callable
13
+ from dataclasses import dataclass
14
+ from uuid import UUID
15
+
16
+ import asyncpg
17
+
18
+ from ..functions.context import PostgrestClient, StorageClient, _make_send_email
19
+ from ..mailer import EmailBackend, get_mailer
20
+ from ..settings import Settings, get_settings
21
+ from ..storage.backends import StorageBackend, get_backend
22
+
23
+
24
+ @dataclass
25
+ class JobCtx:
26
+ db: asyncpg.Connection
27
+ settings: Settings
28
+ send_email: Callable[..., Awaitable[None]]
29
+ storage: StorageClient
30
+ postgrest: PostgrestClient
31
+ logger: logging.Logger
32
+ job_id: UUID | None = None
33
+ attempt: int = 0
34
+ name: str = ""
35
+
36
+
37
+ def build_job_ctx(
38
+ *,
39
+ conn: asyncpg.Connection,
40
+ job_id: UUID | None = None,
41
+ attempt: int = 0,
42
+ name: str = "",
43
+ backend: StorageBackend | None = None,
44
+ mailer: EmailBackend | None = None,
45
+ settings: Settings | None = None,
46
+ ) -> JobCtx:
47
+ s = settings or get_settings()
48
+ return JobCtx(
49
+ db=conn,
50
+ settings=s,
51
+ send_email=_make_send_email(mailer or get_mailer()),
52
+ storage=StorageClient(conn, backend or get_backend()),
53
+ postgrest=PostgrestClient(s.postgrest_url, None),
54
+ logger=logging.getLogger(f"supython.jobs.{name}"),
55
+ job_id=job_id,
56
+ attempt=attempt,
57
+ name=name,
58
+ )
supython/jobs/cron.py ADDED
@@ -0,0 +1,152 @@
1
+ """Cron scheduling — ``pg_cron`` synchronisation only.
2
+
3
+ The in-process ``croniter`` fallback moved to :mod:`.cron_inproc` so that
4
+ the ``croniter`` import (an optional extra, see the v0.5 decision log) is
5
+ only touched when ``jobs_cron_backend == 'inproc'``. This module has no
6
+ optional-dep imports and is safe to load at app startup.
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import contextlib
12
+ import json
13
+ import logging
14
+ from collections.abc import AsyncIterator
15
+
16
+ import asyncpg
17
+
18
+ from .registry import get_registry
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ @contextlib.asynccontextmanager
24
+ async def _as_login_role(conn: asyncpg.Connection) -> AsyncIterator[None]:
25
+ """Temporarily step out of a NOLOGIN role for pg_cron scheduling.
26
+
27
+ pg_cron stamps ``current_user`` on the ``cron.job`` row at schedule
28
+ time and uses that role to initialise a background worker on every
29
+ tick. The caller of :func:`sync_pg_cron` is expected to be in
30
+ ``service_role``, which is ``NOLOGIN`` (see
31
+ ``migrations/0001_extensions_and_roles.sql``); every tick would then
32
+ FATAL with ``role "service_role" is not permitted to log in`` and
33
+ the schedule would silently never fire.
34
+
35
+ We hop back to the connection's pre-``service_role`` identity for
36
+ just the ``cron.schedule`` / ``cron.unschedule`` call. That identity
37
+ is whichever role opened the asyncpg connection (see
38
+ ``DATABASE_URL``) and is therefore guaranteed LOGIN-capable.
39
+
40
+ Uses ``SET LOCAL`` when the caller is inside a transaction so the
41
+ hop doesn't leak past a ``db.as_service_role()`` block into the
42
+ pooled connection.
43
+ """
44
+ prev_role = await conn.fetchval("select current_user")
45
+ in_txn = conn.is_in_transaction()
46
+ scope = "local " if in_txn else ""
47
+ await conn.execute(f"set {scope}role none")
48
+ try:
49
+ yield
50
+ finally:
51
+ await conn.execute(f'set {scope}role "{prev_role}"')
52
+
53
+
54
+ async def sync_pg_cron(conn: asyncpg.Connection) -> None:
55
+ """Upsert ``pg_cron`` schedule rows from the registry and remove stale ones.
56
+
57
+ The caller is expected to have entered ``db.as_service_role()`` (or
58
+ the CLI equivalent) before invoking this — the ``jobs.cron_schedules``
59
+ table is owned by ``service_role``. For the actual ``cron.schedule``
60
+ / ``cron.unschedule`` calls we transiently hop back to a LOGIN role
61
+ via :func:`_as_login_role` so pg_cron's background worker can
62
+ initialise a session at tick time.
63
+
64
+ When pg_cron is installed, errors from ``cron.schedule`` propagate
65
+ to the caller.
66
+
67
+ When pg_cron is NOT installed the metadata row is still upserted so
68
+ the in-process scheduler (or a manual runner) can pick it up, and
69
+ ``cron.schedule`` is skipped entirely.
70
+ """
71
+ registry = get_registry()
72
+ has_pg_cron = await conn.fetchval(
73
+ "select exists(select 1 from pg_extension where extname = 'pg_cron')"
74
+ )
75
+
76
+ for cron_defn in registry.iter_crons():
77
+ await conn.execute(
78
+ """
79
+ insert into jobs.cron_schedules
80
+ (name, cron_expr, job_name, job_version, payload, queue, enabled)
81
+ values ($1, $2, $3, $4, $5::jsonb, $6, true)
82
+ on conflict (name) do update set
83
+ cron_expr = excluded.cron_expr,
84
+ job_name = excluded.job_name,
85
+ job_version = excluded.job_version,
86
+ payload = excluded.payload,
87
+ queue = excluded.queue,
88
+ enabled = excluded.enabled
89
+ """,
90
+ cron_defn.name,
91
+ cron_defn.cron_expr,
92
+ cron_defn.job_name,
93
+ cron_defn.job_version,
94
+ json.dumps(cron_defn.payload),
95
+ cron_defn.queue,
96
+ )
97
+
98
+ if not has_pg_cron:
99
+ continue
100
+
101
+ # Build the pg_cron command via ``format(..., %L, %L, %L)`` so
102
+ # Postgres handles quoting/escaping of the payload JSON and text
103
+ # args instead of interpolating them from Python. This closes
104
+ # the pre-grooming bug where f-string interpolation produced
105
+ # invalid JSON for any non-trivial payload.
106
+ command = await conn.fetchval(
107
+ """
108
+ select format(
109
+ 'select jobs.enqueue(p_name := %L, p_payload := %L::jsonb, p_queue := %L)',
110
+ $1::text, $2::text, $3::text
111
+ )
112
+ """,
113
+ cron_defn.job_name,
114
+ json.dumps(cron_defn.payload),
115
+ cron_defn.queue,
116
+ )
117
+
118
+ async with _as_login_role(conn):
119
+ await conn.execute(
120
+ "select cron.schedule($1, $2, $3)",
121
+ cron_defn.name,
122
+ cron_defn.cron_expr,
123
+ command,
124
+ )
125
+
126
+ registered_names = {c.name for c in registry.iter_crons()}
127
+ existing = await conn.fetch("select name from jobs.cron_schedules")
128
+ for row in existing:
129
+ if row["name"] in registered_names:
130
+ continue
131
+ if has_pg_cron:
132
+ async with _as_login_role(conn):
133
+ # Stale metadata may no longer have a matching cron.job
134
+ # row (e.g. manually unscheduled); tolerate that single
135
+ # case. Everything else — permission denied, syntax
136
+ # error, etc. — indicates a misconfiguration and is
137
+ # surfaced to the caller.
138
+ try:
139
+ await conn.execute(
140
+ "select cron.unschedule($1)", row["name"]
141
+ )
142
+ except asyncpg.exceptions.RaiseError as exc:
143
+ if "could not find" not in str(exc):
144
+ raise
145
+ logger.debug(
146
+ "jobs.cron.unschedule_noop",
147
+ extra={"cron_name": row["name"]},
148
+ )
149
+ await conn.execute(
150
+ "delete from jobs.cron_schedules where name = $1",
151
+ row["name"],
152
+ )
@@ -0,0 +1,118 @@
1
+ """Optional in-process cron scheduler (``croniter``-based).
2
+
3
+ Loaded only when ``jobs_cron_backend == 'inproc'``. ``croniter`` is an
4
+ optional extra — users opt in with ``pip install supython[cron-inproc]``
5
+ per the 2026-04-21 decision log row and §15.5 dependency budget.
6
+
7
+ Two bugs in the pre-grooming implementation were fixed here:
8
+
9
+ 1. **Firing condition** — ``croniter.get_next(datetime)`` returns the next
10
+ fire time strictly after its anchor, so the old ``next_fire <= now``
11
+ test was never true. We now track a per-schedule ``_last_fire`` anchor
12
+ and compare ``next_fire`` against the current ``now`` after the sleep.
13
+ 2. **Advisory lock lifetime** — ``pg_advisory_lock`` is session-scoped, so
14
+ acquiring on one pool connection and releasing on another leaks the
15
+ lock forever. Both now happen on the same connection for a single
16
+ cron tick.
17
+ """
18
+
19
+ from __future__ import annotations
20
+
21
+ import asyncio
22
+ import logging
23
+ from datetime import UTC, datetime
24
+ from typing import Any
25
+
26
+ from .. import db
27
+ from ..settings import Settings
28
+ from .registry import CronDefinition, get_registry
29
+ from .service import enqueue
30
+
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ def _require_croniter() -> Any:
35
+ try:
36
+ import croniter # type: ignore[import-not-found]
37
+
38
+ return croniter
39
+ except ImportError as exc: # pragma: no cover — depends on extras
40
+ raise ImportError(
41
+ "croniter is required for in-process cron scheduling. "
42
+ "Install with: pip install supython[cron-inproc]"
43
+ ) from exc
44
+
45
+
46
+ class InProcScheduler:
47
+ """croniter-based fallback scheduler.
48
+
49
+ One tick per minute; at each tick, every registered cron expression that
50
+ has moved past its next scheduled fire time enqueues a single job.
51
+ Advisory-lock + idempotency-key form a belt-and-braces guard against
52
+ duplicate enqueues across replicas.
53
+ """
54
+
55
+ def __init__(self, settings: Settings) -> None:
56
+ self._settings = settings
57
+ self._running = False
58
+ self._last_fire: dict[str, datetime] = {}
59
+
60
+ async def start(self) -> None:
61
+ croniter_mod = _require_croniter()
62
+ self._running = True
63
+
64
+ registry = get_registry()
65
+
66
+ # Seed the ``_last_fire`` map so that the first tick does not
67
+ # retroactively fire schedules that would otherwise look "overdue"
68
+ # after a process restart.
69
+ now = datetime.now(UTC)
70
+ for cron_defn in registry.iter_crons():
71
+ if cron_defn.name not in self._last_fire:
72
+ self._last_fire[cron_defn.name] = now
73
+
74
+ while self._running:
75
+ now = datetime.now(UTC)
76
+ for cron_defn in registry.iter_crons():
77
+ try:
78
+ anchor = self._last_fire.get(cron_defn.name, now)
79
+ cron = croniter_mod.croniter(cron_defn.cron_expr, anchor)
80
+ next_fire = cron.get_next(datetime)
81
+ if next_fire <= now:
82
+ fired = await self._try_fire(cron_defn, next_fire)
83
+ if fired:
84
+ self._last_fire[cron_defn.name] = next_fire
85
+ except Exception:
86
+ logger.exception(
87
+ "jobs.cron.tick_error",
88
+ extra={"cron_name": cron_defn.name},
89
+ )
90
+
91
+ await asyncio.sleep(60)
92
+
93
+ async def stop(self) -> None:
94
+ self._running = False
95
+
96
+ async def _try_fire(
97
+ self, cron_defn: CronDefinition, tick: datetime
98
+ ) -> bool:
99
+ """Acquire-enqueue-release on one connection; skip if another worker has it."""
100
+ async with db.as_service_role() as conn:
101
+ locked = await conn.fetchval(
102
+ "select pg_try_advisory_lock(hashtext($1))", cron_defn.name
103
+ )
104
+ if not locked:
105
+ return False
106
+ try:
107
+ await enqueue(
108
+ conn,
109
+ name=cron_defn.job_name,
110
+ payload=cron_defn.payload,
111
+ queue=cron_defn.queue,
112
+ idempotency_key=f"{cron_defn.name}:{tick.isoformat()}",
113
+ )
114
+ return True
115
+ finally:
116
+ await conn.execute(
117
+ "select pg_advisory_unlock(hashtext($1))", cron_defn.name
118
+ )
@@ -0,0 +1,76 @@
1
+ """Decorators for registering jobs and cron schedules."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Callable, Coroutine
6
+ from enum import StrEnum
7
+ from typing import Any
8
+
9
+ from .registry import CronDefinition, JobDefinition, get_registry
10
+
11
+ Handler = Callable[..., Coroutine[Any, Any, None]]
12
+
13
+
14
+ class Backoff(StrEnum):
15
+ EXPONENTIAL = "exponential"
16
+ LINEAR = "linear"
17
+ CONSTANT = "constant"
18
+
19
+
20
+ def job(
21
+ name: str,
22
+ *,
23
+ version: int = 1,
24
+ max_attempts: int = 3,
25
+ backoff: Backoff | str = Backoff.EXPONENTIAL,
26
+ backoff_base_s: float = 5.0,
27
+ backoff_max_s: float = 300.0,
28
+ queue: str = "default",
29
+ role: str = "service_role",
30
+ claims_from: str | None = None,
31
+ accepts_payload: bool = True,
32
+ ) -> Callable[..., Any]:
33
+ def decorator(fn: Handler) -> Handler:
34
+ get_registry().register_job(
35
+ JobDefinition(
36
+ name=name,
37
+ version=version,
38
+ handler=fn,
39
+ max_attempts=max_attempts,
40
+ backoff=backoff if isinstance(backoff, str) else backoff.value,
41
+ backoff_base_s=backoff_base_s,
42
+ backoff_max_s=backoff_max_s,
43
+ queue=queue,
44
+ role=role,
45
+ claims_from=claims_from,
46
+ accepts_payload=accepts_payload,
47
+ )
48
+ )
49
+ return fn
50
+
51
+ return decorator
52
+
53
+
54
+ def cron(
55
+ cron_expr: str,
56
+ *,
57
+ name: str,
58
+ job_name: str | None = None,
59
+ job_version: int = 1,
60
+ payload: dict | None = None,
61
+ queue: str = "default",
62
+ ) -> Callable[..., Any]:
63
+ def decorator(fn: Handler) -> Handler:
64
+ get_registry().register_cron(
65
+ CronDefinition(
66
+ name=name,
67
+ cron_expr=cron_expr,
68
+ job_name=job_name or name,
69
+ job_version=job_version,
70
+ payload=payload or {},
71
+ queue=queue,
72
+ )
73
+ )
74
+ return fn
75
+
76
+ return decorator
@@ -0,0 +1,79 @@
1
+ """Job and cron definitions registry (process-global singleton)."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Callable, Coroutine
6
+ from dataclasses import dataclass, field
7
+ from typing import Any
8
+
9
+
10
+ @dataclass
11
+ class JobDefinition:
12
+ name: str
13
+ handler: Callable[..., Coroutine[Any, Any, None]] = field(repr=False)
14
+ version: int = 1
15
+ max_attempts: int = 3
16
+ backoff: str = "exponential"
17
+ backoff_base_s: float = 5.0
18
+ backoff_max_s: float = 300.0
19
+ queue: str = "default"
20
+ role: str = "service_role"
21
+ claims_from: str | None = None
22
+ accepts_payload: bool = True
23
+
24
+
25
+ @dataclass
26
+ class CronDefinition:
27
+ name: str
28
+ cron_expr: str
29
+ job_name: str
30
+ job_version: int = 1
31
+ payload: dict = field(default_factory=dict)
32
+ queue: str = "default"
33
+
34
+
35
+ class Registry:
36
+ def __init__(self) -> None:
37
+ self._jobs: dict[tuple[str, int], JobDefinition] = {}
38
+ self._crons: dict[str, CronDefinition] = {}
39
+
40
+ def register_job(self, defn: JobDefinition) -> None:
41
+ key = (defn.name, defn.version)
42
+ if key in self._jobs:
43
+ raise ValueError(f"job {defn.name!r} v{defn.version} already registered")
44
+ self._jobs[key] = defn
45
+
46
+ def register_cron(self, defn: CronDefinition) -> None:
47
+ if defn.name in self._crons:
48
+ raise ValueError(f"cron {defn.name!r} already registered")
49
+ self._crons[defn.name] = defn
50
+
51
+ def get(self, name: str, version: int) -> JobDefinition | None:
52
+ return self._jobs.get((name, version))
53
+
54
+ def get_latest(self, name: str) -> JobDefinition | None:
55
+ versions = [v for (n, v) in self._jobs if n == name]
56
+ if not versions:
57
+ return None
58
+ return self._jobs[(name, max(versions))]
59
+
60
+ def iter_jobs(self):
61
+ return iter(self._jobs.values())
62
+
63
+ def iter_crons(self):
64
+ return iter(self._crons.values())
65
+
66
+
67
+ _registry: Registry | None = None
68
+
69
+
70
+ def get_registry() -> Registry:
71
+ global _registry
72
+ if _registry is None:
73
+ _registry = Registry()
74
+ return _registry
75
+
76
+
77
+ def reset_registry() -> None:
78
+ global _registry
79
+ _registry = None