supython 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- supython/__init__.py +24 -0
- supython/admin/__init__.py +3 -0
- supython/admin/api/__init__.py +24 -0
- supython/admin/api/auth.py +118 -0
- supython/admin/api/auth_templates.py +67 -0
- supython/admin/api/auth_users.py +225 -0
- supython/admin/api/db.py +174 -0
- supython/admin/api/functions.py +92 -0
- supython/admin/api/jobs.py +192 -0
- supython/admin/api/ops.py +224 -0
- supython/admin/api/realtime.py +281 -0
- supython/admin/api/service_auth.py +49 -0
- supython/admin/api/service_auth_templates.py +83 -0
- supython/admin/api/service_auth_users.py +346 -0
- supython/admin/api/service_db.py +214 -0
- supython/admin/api/service_functions.py +287 -0
- supython/admin/api/service_jobs.py +282 -0
- supython/admin/api/service_ops.py +213 -0
- supython/admin/api/service_realtime.py +30 -0
- supython/admin/api/service_storage.py +220 -0
- supython/admin/api/storage.py +117 -0
- supython/admin/api/system.py +37 -0
- supython/admin/audit.py +29 -0
- supython/admin/deps.py +22 -0
- supython/admin/errors.py +16 -0
- supython/admin/schemas.py +310 -0
- supython/admin/session.py +52 -0
- supython/admin/spa.py +38 -0
- supython/admin/static/assets/Alert-dluGVkos.js +49 -0
- supython/admin/static/assets/Audit-Njung3HI.js +2 -0
- supython/admin/static/assets/Backups-DzPlFgrm.js +2 -0
- supython/admin/static/assets/Buckets-ByacGkU1.js +2 -0
- supython/admin/static/assets/Channels-BoIuTtam.js +353 -0
- supython/admin/static/assets/ChevronRight-CtQH1EQ1.js +2 -0
- supython/admin/static/assets/CodeViewer-Bqy7-wvH.js +2 -0
- supython/admin/static/assets/Crons-B67vc39F.js +2 -0
- supython/admin/static/assets/DashboardView-CUTFVL6k.js +2 -0
- supython/admin/static/assets/DataTable-COAAWEft.js +747 -0
- supython/admin/static/assets/DescriptionsItem-P8JUDaBs.js +75 -0
- supython/admin/static/assets/DrawerContent-TpYTFgF1.js +139 -0
- supython/admin/static/assets/Empty-cr2r7e2u.js +25 -0
- supython/admin/static/assets/EmptyState-DeDck-OL.js +2 -0
- supython/admin/static/assets/Grid-hFkp9F4P.js +2 -0
- supython/admin/static/assets/Input-DppYTq9C.js +259 -0
- supython/admin/static/assets/Invoke-DW3Nveeh.js +2 -0
- supython/admin/static/assets/JsonField-DibyJgun.js +2 -0
- supython/admin/static/assets/LoginView-BjLyE3Ds.css +1 -0
- supython/admin/static/assets/LoginView-CoOjECT_.js +111 -0
- supython/admin/static/assets/Logs-D9WYrnIT.js +2 -0
- supython/admin/static/assets/Logs-DS1XPa0h.css +1 -0
- supython/admin/static/assets/Migrations-DOSC2ddQ.js +2 -0
- supython/admin/static/assets/ObjectBrowser-_5w8vOX8.js +2 -0
- supython/admin/static/assets/Queue-CywZs6vI.js +2 -0
- supython/admin/static/assets/RefreshTokens-Ccjr53jg.js +2 -0
- supython/admin/static/assets/RlsEditor-BSlH9vSc.js +2 -0
- supython/admin/static/assets/Routes-BiLXE49D.js +2 -0
- supython/admin/static/assets/Routes-C-ianIGD.css +1 -0
- supython/admin/static/assets/SchemaBrowser-DKy2_KQi.css +1 -0
- supython/admin/static/assets/SchemaBrowser-XFvFbtDB.js +2 -0
- supython/admin/static/assets/Select-DIzZyRZb.js +434 -0
- supython/admin/static/assets/Space-n5-XcguU.js +400 -0
- supython/admin/static/assets/SqlEditor-b8pTsILY.js +3 -0
- supython/admin/static/assets/SqlWorkspace-BUS7IntH.js +104 -0
- supython/admin/static/assets/TableData-CQIagLKn.js +2 -0
- supython/admin/static/assets/Tag-D1fOKpTH.js +72 -0
- supython/admin/static/assets/Templates-BS-ugkdq.js +2 -0
- supython/admin/static/assets/Thing-CEAniuMg.js +107 -0
- supython/admin/static/assets/Users-wzwajhlh.js +2 -0
- supython/admin/static/assets/_plugin-vue_export-helper-DGA9ry_j.js +1 -0
- supython/admin/static/assets/dist-VXIJLCYq.js +13 -0
- supython/admin/static/assets/format-length-CGCY1rMh.js +2 -0
- supython/admin/static/assets/get-Ca6unauB.js +2 -0
- supython/admin/static/assets/index-CeE6v959.js +951 -0
- supython/admin/static/assets/pinia-COXwfrOX.js +2 -0
- supython/admin/static/assets/resources-Bt6thQCD.js +44 -0
- supython/admin/static/assets/use-locale-mtgM0a3a.js +2 -0
- supython/admin/static/assets/use-merged-state-BvhkaHNX.js +2 -0
- supython/admin/static/assets/useConfirm-tMjvBFXR.js +2 -0
- supython/admin/static/assets/useResource-C_rJCY8C.js +2 -0
- supython/admin/static/assets/useTable-CnZc5zhi.js +363 -0
- supython/admin/static/assets/useTable-Dg0XlRlq.css +1 -0
- supython/admin/static/assets/useToast-DsZKx0IX.js +2 -0
- supython/admin/static/assets/utils-sbXoq7Ir.js +2 -0
- supython/admin/static/favicon.svg +1 -0
- supython/admin/static/icons.svg +24 -0
- supython/admin/static/index.html +24 -0
- supython/app.py +162 -0
- supython/auth/__init__.py +3 -0
- supython/auth/_email_job.py +11 -0
- supython/auth/providers/__init__.py +34 -0
- supython/auth/providers/github.py +22 -0
- supython/auth/providers/google.py +19 -0
- supython/auth/providers/oauth.py +56 -0
- supython/auth/providers/registry.py +16 -0
- supython/auth/ratelimit.py +39 -0
- supython/auth/router.py +282 -0
- supython/auth/schemas.py +79 -0
- supython/auth/service.py +587 -0
- supython/backups/__init__.py +24 -0
- supython/backups/_backup_job.py +170 -0
- supython/backups/schemas.py +18 -0
- supython/backups/service.py +217 -0
- supython/body_size.py +184 -0
- supython/cli.py +1663 -0
- supython/client/__init__.py +67 -0
- supython/client/_auth.py +249 -0
- supython/client/_client.py +145 -0
- supython/client/_config.py +92 -0
- supython/client/_functions.py +69 -0
- supython/client/_storage.py +255 -0
- supython/client/py.typed +0 -0
- supython/db.py +151 -0
- supython/db_admin.py +8 -0
- supython/extensions.py +36 -0
- supython/functions/__init__.py +19 -0
- supython/functions/context.py +262 -0
- supython/functions/loader.py +307 -0
- supython/functions/router.py +228 -0
- supython/functions/schemas.py +50 -0
- supython/gen/__init__.py +5 -0
- supython/gen/_introspect.py +137 -0
- supython/gen/types_py.py +270 -0
- supython/gen/types_ts.py +365 -0
- supython/health.py +229 -0
- supython/hooks.py +117 -0
- supython/jobs/__init__.py +31 -0
- supython/jobs/backends.py +97 -0
- supython/jobs/context.py +58 -0
- supython/jobs/cron.py +152 -0
- supython/jobs/cron_inproc.py +119 -0
- supython/jobs/decorators.py +76 -0
- supython/jobs/registry.py +79 -0
- supython/jobs/router.py +136 -0
- supython/jobs/schemas.py +92 -0
- supython/jobs/service.py +311 -0
- supython/jobs/worker.py +219 -0
- supython/jwks.py +257 -0
- supython/keyset.py +279 -0
- supython/logging_config.py +291 -0
- supython/mail.py +33 -0
- supython/mailer.py +65 -0
- supython/migrate.py +81 -0
- supython/migrations/0001_extensions_and_roles.sql +46 -0
- supython/migrations/0002_auth_schema.sql +66 -0
- supython/migrations/0003_demo_todos.sql +42 -0
- supython/migrations/0004_auth_v0_2.sql +47 -0
- supython/migrations/0005_storage_schema.sql +117 -0
- supython/migrations/0006_realtime_schema.sql +206 -0
- supython/migrations/0007_jobs_schema.sql +254 -0
- supython/migrations/0008_jobs_last_error.sql +56 -0
- supython/migrations/0009_auth_rate_limits.sql +33 -0
- supython/migrations/0010_worker_heartbeat.sql +14 -0
- supython/migrations/0011_admin_schema.sql +45 -0
- supython/migrations/0012_auth_banned_until.sql +10 -0
- supython/migrations/0013_email_templates.sql +19 -0
- supython/migrations/0014_realtime_payload_warning.sql +96 -0
- supython/migrations/0015_backups_schema.sql +14 -0
- supython/passwords.py +15 -0
- supython/realtime/__init__.py +6 -0
- supython/realtime/broker.py +814 -0
- supython/realtime/protocol.py +234 -0
- supython/realtime/router.py +184 -0
- supython/realtime/schemas.py +207 -0
- supython/realtime/service.py +261 -0
- supython/realtime/topics.py +175 -0
- supython/realtime/websocket.py +586 -0
- supython/scaffold/__init__.py +5 -0
- supython/scaffold/init_project.py +144 -0
- supython/scaffold/templates/Caddyfile.tmpl +4 -0
- supython/scaffold/templates/README.md.tmpl +22 -0
- supython/scaffold/templates/apps_hooks.py.tmpl +11 -0
- supython/scaffold/templates/apps_jobs.py.tmpl +8 -0
- supython/scaffold/templates/asgi.py.tmpl +14 -0
- supython/scaffold/templates/docker-compose.prod.yml.tmpl +84 -0
- supython/scaffold/templates/docker-compose.yml.tmpl +45 -0
- supython/scaffold/templates/docker_postgres_Dockerfile.tmpl +9 -0
- supython/scaffold/templates/docker_postgres_postgresql.conf.tmpl +3 -0
- supython/scaffold/templates/env.example.tmpl +168 -0
- supython/scaffold/templates/functions_README.md.tmpl +21 -0
- supython/scaffold/templates/gitignore.tmpl +14 -0
- supython/scaffold/templates/manage.py.tmpl +11 -0
- supython/scaffold/templates/migrations/.gitkeep +0 -0
- supython/scaffold/templates/package_init.py.tmpl +1 -0
- supython/scaffold/templates/settings.py.tmpl +31 -0
- supython/secretset.py +347 -0
- supython/security_headers.py +78 -0
- supython/settings.py +244 -0
- supython/settings_module.py +117 -0
- supython/storage/__init__.py +5 -0
- supython/storage/backends.py +392 -0
- supython/storage/router.py +341 -0
- supython/storage/schemas.py +50 -0
- supython/storage/service.py +445 -0
- supython/storage/signing.py +119 -0
- supython/tokens.py +85 -0
- supython-0.1.0.dist-info/METADATA +756 -0
- supython-0.1.0.dist-info/RECORD +200 -0
- supython-0.1.0.dist-info/WHEEL +4 -0
- supython-0.1.0.dist-info/entry_points.txt +2 -0
- supython-0.1.0.dist-info/licenses/LICENSE +21 -0
supython/cli.py
ADDED
|
@@ -0,0 +1,1663 @@
|
|
|
1
|
+
"""supython CLI."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import concurrent.futures
|
|
5
|
+
import os
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import time
|
|
9
|
+
import uuid
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Literal
|
|
13
|
+
|
|
14
|
+
import asyncpg
|
|
15
|
+
import httpx
|
|
16
|
+
import typer
|
|
17
|
+
import uvicorn
|
|
18
|
+
|
|
19
|
+
from . import jwks, keyset, secretset, tokens
|
|
20
|
+
from . import migrate as migrate_mod
|
|
21
|
+
from .db_admin import rotate_role_password
|
|
22
|
+
from .gen import render_types_py, render_types_ts
|
|
23
|
+
from .logging_config import configure_logging
|
|
24
|
+
from .scaffold import scaffold
|
|
25
|
+
from .settings import Settings, get_settings
|
|
26
|
+
|
|
27
|
+
app = typer.Typer(
|
|
28
|
+
help="supython — lightweight Postgres-first BaaS for Python",
|
|
29
|
+
no_args_is_help=True,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
functions_cli = typer.Typer(help="Manage edge functions.")
|
|
33
|
+
app.add_typer(functions_cli, name="functions")
|
|
34
|
+
|
|
35
|
+
gen_cli = typer.Typer(help="Code generation (types, SDKs, ...).")
|
|
36
|
+
app.add_typer(gen_cli, name="gen")
|
|
37
|
+
|
|
38
|
+
realtime_cli = typer.Typer(help="Manage the realtime change-data-capture surface.")
|
|
39
|
+
app.add_typer(realtime_cli, name="realtime")
|
|
40
|
+
|
|
41
|
+
worker_cli = typer.Typer(help="Run and manage the background job worker.")
|
|
42
|
+
app.add_typer(worker_cli, name="worker")
|
|
43
|
+
|
|
44
|
+
jobs_cli = typer.Typer(help="Manage background jobs.")
|
|
45
|
+
app.add_typer(jobs_cli, name="jobs")
|
|
46
|
+
|
|
47
|
+
cron_cli = typer.Typer(help="Manage cron schedules.")
|
|
48
|
+
app.add_typer(cron_cli, name="cron")
|
|
49
|
+
|
|
50
|
+
keygen_cli = typer.Typer(
|
|
51
|
+
help="Manage JWT signing keys (generate, rotate, activate, prune).",
|
|
52
|
+
invoke_without_command=True,
|
|
53
|
+
no_args_is_help=False,
|
|
54
|
+
)
|
|
55
|
+
app.add_typer(keygen_cli, name="keygen")
|
|
56
|
+
|
|
57
|
+
test_cli = typer.Typer(
|
|
58
|
+
help="Manage the integration test database (isolated from `supython up`).",
|
|
59
|
+
no_args_is_help=True,
|
|
60
|
+
)
|
|
61
|
+
app.add_typer(test_cli, name="test")
|
|
62
|
+
|
|
63
|
+
secret_cli = typer.Typer(
|
|
64
|
+
help="Manage symmetric secrets (storage signed URLs, OAuth state).",
|
|
65
|
+
no_args_is_help=True,
|
|
66
|
+
)
|
|
67
|
+
app.add_typer(secret_cli, name="secret")
|
|
68
|
+
|
|
69
|
+
password_cli = typer.Typer(
|
|
70
|
+
help="Rotate Postgres role passwords.",
|
|
71
|
+
no_args_is_help=True,
|
|
72
|
+
)
|
|
73
|
+
app.add_typer(password_cli, name="password")
|
|
74
|
+
|
|
75
|
+
admin_cli = typer.Typer(
|
|
76
|
+
help="Manage admin users (the operator surface for /admin/api/*).",
|
|
77
|
+
no_args_is_help=True,
|
|
78
|
+
)
|
|
79
|
+
app.add_typer(admin_cli, name="admin")
|
|
80
|
+
|
|
81
|
+
_ASYMMETRIC_ALGS = {"RS256", "ES256"}
|
|
82
|
+
_PRIVATE_JWK_MEMBERS = frozenset({"d", "p", "q", "dp", "dq", "qi", "k"})
|
|
83
|
+
_REQUIRED_ROLES = ("anon", "authenticated", "service_role", "authenticator")
|
|
84
|
+
_REQUIRED_EXTENSIONS = ("pgcrypto", "citext")
|
|
85
|
+
_RECOMMENDED_EXTENSIONS = ("pg_cron",)
|
|
86
|
+
_MIN_PG_VERSION = 14
|
|
87
|
+
_ROLE_ATTRIBUTES: dict[str, dict[str, bool]] = {
|
|
88
|
+
"authenticator": {"rolcanlogin": True},
|
|
89
|
+
"service_role": {"rolbypassrls": True},
|
|
90
|
+
"anon": {"rolcanlogin": False},
|
|
91
|
+
"authenticated": {"rolcanlogin": False},
|
|
92
|
+
}
|
|
93
|
+
_FRAMEWORK_SCHEMAS = ("auth", "storage", "realtime", "jobs", "supython")
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclass
|
|
97
|
+
class _DoctorReport:
|
|
98
|
+
ok: list[str] = field(default_factory=list)
|
|
99
|
+
warnings: list[str] = field(default_factory=list)
|
|
100
|
+
failures: list[str] = field(default_factory=list)
|
|
101
|
+
|
|
102
|
+
def merge(self, other: "_DoctorReport") -> None:
|
|
103
|
+
self.ok.extend(other.ok)
|
|
104
|
+
self.warnings.extend(other.warnings)
|
|
105
|
+
self.failures.extend(other.failures)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _run_async(coro): # type: ignore[no-untyped-def]
|
|
109
|
+
"""Run a coroutine, working around an already-running event loop (e.g. pytest-asyncio)."""
|
|
110
|
+
try:
|
|
111
|
+
asyncio.get_running_loop()
|
|
112
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as pool:
|
|
113
|
+
return pool.submit(asyncio.run, coro).result()
|
|
114
|
+
except RuntimeError:
|
|
115
|
+
return asyncio.run(coro)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@gen_cli.command("types")
|
|
119
|
+
def gen_types(
|
|
120
|
+
lang: str = typer.Option("py", "--lang", help="Target language (`py` or `ts`)."),
|
|
121
|
+
schema: list[str] = typer.Option( # noqa: B008 (typer reads call at decoration time)
|
|
122
|
+
["public"],
|
|
123
|
+
"--schema",
|
|
124
|
+
help="Schema to introspect. Repeatable: --schema public --schema auth.",
|
|
125
|
+
),
|
|
126
|
+
out: Path | None = typer.Option( # noqa: B008
|
|
127
|
+
None, "--out", help="Output file path. Defaults to types.<lang>."
|
|
128
|
+
),
|
|
129
|
+
) -> None:
|
|
130
|
+
"""Emit typed code mirroring the live Postgres schema."""
|
|
131
|
+
if lang == "py":
|
|
132
|
+
out_path = out or Path("types.py")
|
|
133
|
+
renderer = render_types_py
|
|
134
|
+
elif lang == "ts":
|
|
135
|
+
out_path = out or Path("types.ts")
|
|
136
|
+
renderer = render_types_ts
|
|
137
|
+
else:
|
|
138
|
+
raise typer.BadParameter("--lang must be 'py' or 'ts'")
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
src = _run_async(renderer(schemas=schema))
|
|
142
|
+
except (OSError, asyncpg.PostgresError) as exc:
|
|
143
|
+
typer.echo(f"error: could not connect to database: {exc}", err=True)
|
|
144
|
+
raise typer.Exit(code=1) from exc
|
|
145
|
+
if out_path.parent and str(out_path.parent) not in ("", "."):
|
|
146
|
+
out_path.parent.mkdir(parents=True, exist_ok=True)
|
|
147
|
+
out_path.write_text(src)
|
|
148
|
+
typer.echo(f"wrote {out_path} ({len(src.splitlines())} lines) for schemas {schema}")
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
@realtime_cli.command("enable")
|
|
152
|
+
def realtime_enable(
|
|
153
|
+
table: str = typer.Argument(
|
|
154
|
+
...,
|
|
155
|
+
help="Fully qualified table name, e.g. `public.todos`.",
|
|
156
|
+
metavar="SCHEMA.TABLE",
|
|
157
|
+
),
|
|
158
|
+
owner_column: str = typer.Option(
|
|
159
|
+
"user_id",
|
|
160
|
+
"--owner-column",
|
|
161
|
+
help=(
|
|
162
|
+
"Column whose value identifies the row's owner. Used to gate "
|
|
163
|
+
"DELETE events (the row is gone, so we fall back to matching "
|
|
164
|
+
"old_record.<owner_column> against auth.uid())."
|
|
165
|
+
),
|
|
166
|
+
),
|
|
167
|
+
no_owner_column: bool = typer.Option(
|
|
168
|
+
False,
|
|
169
|
+
"--no-owner-column",
|
|
170
|
+
help=(
|
|
171
|
+
"Disable the DELETE owner-column fallback. DELETE events on this "
|
|
172
|
+
"table will only fan out to service_role listeners."
|
|
173
|
+
),
|
|
174
|
+
),
|
|
175
|
+
) -> None:
|
|
176
|
+
"""Opt a table into realtime change feeds (wraps `realtime.enable(...)`)."""
|
|
177
|
+
from .realtime.schemas import EnabledTable, EnableTableRequest
|
|
178
|
+
from .realtime.service import RealtimeError, enable_table
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
payload = EnableTableRequest(
|
|
182
|
+
table=table,
|
|
183
|
+
owner_column=None if no_owner_column else owner_column,
|
|
184
|
+
)
|
|
185
|
+
except ValueError as exc:
|
|
186
|
+
typer.echo(f"error: invalid table name {table!r}: {exc}", err=True)
|
|
187
|
+
raise typer.Exit(code=2) from exc
|
|
188
|
+
|
|
189
|
+
s = get_settings()
|
|
190
|
+
|
|
191
|
+
async def _run() -> EnabledTable:
|
|
192
|
+
conn = await asyncpg.connect(s.database_url)
|
|
193
|
+
try:
|
|
194
|
+
return await enable_table(conn, payload)
|
|
195
|
+
finally:
|
|
196
|
+
await conn.close()
|
|
197
|
+
|
|
198
|
+
try:
|
|
199
|
+
enabled = _run_async(_run())
|
|
200
|
+
except RealtimeError as exc:
|
|
201
|
+
typer.echo(f"error: {exc.code}: {exc.message}", err=True)
|
|
202
|
+
raise typer.Exit(code=1) from exc
|
|
203
|
+
except (OSError, asyncpg.PostgresError) as exc:
|
|
204
|
+
typer.echo(f"error: could not connect to database: {exc}", err=True)
|
|
205
|
+
raise typer.Exit(code=1) from exc
|
|
206
|
+
|
|
207
|
+
pk = ",".join(enabled.pk_columns) or "<none>"
|
|
208
|
+
owner = enabled.owner_column or "<none>"
|
|
209
|
+
typer.echo(
|
|
210
|
+
f"enabled {enabled.schema_name}.{enabled.table_name} (pk={pk}, owner_column={owner})"
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
# ---------------------------------------------------------------------------
|
|
215
|
+
# Worker
|
|
216
|
+
# ---------------------------------------------------------------------------
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@worker_cli.command("run")
|
|
220
|
+
def worker_run(
|
|
221
|
+
queue: str = typer.Option("default", "--queue", help="Queue to consume."),
|
|
222
|
+
concurrency: int = typer.Option(5, "--concurrency", help="Max concurrent jobs."),
|
|
223
|
+
batch_size: int = typer.Option(10, "--batch-size", help="Poll batch size."),
|
|
224
|
+
) -> None:
|
|
225
|
+
"""Start the background job worker (blocks until SIGTERM)."""
|
|
226
|
+
import signal
|
|
227
|
+
|
|
228
|
+
from . import db
|
|
229
|
+
from .extensions import load_extensions
|
|
230
|
+
from .jobs.worker import Worker
|
|
231
|
+
from .settings_module import UserSettings, load_user_settings
|
|
232
|
+
|
|
233
|
+
s = get_settings()
|
|
234
|
+
configure_logging(s.log_level, json_format=s.log_json)
|
|
235
|
+
s.jobs_queue_default = queue
|
|
236
|
+
s.jobs_concurrency = concurrency
|
|
237
|
+
user = load_user_settings(s.settings_module) if s.settings_module else UserSettings()
|
|
238
|
+
load_extensions([*s.extensions, *user.extensions])
|
|
239
|
+
worker = Worker(s)
|
|
240
|
+
|
|
241
|
+
loop = asyncio.new_event_loop()
|
|
242
|
+
asyncio.set_event_loop(loop)
|
|
243
|
+
|
|
244
|
+
def _shutdown(signum, frame):
|
|
245
|
+
loop.create_task(worker.stop())
|
|
246
|
+
|
|
247
|
+
signal.signal(signal.SIGINT, _shutdown)
|
|
248
|
+
signal.signal(signal.SIGTERM, _shutdown)
|
|
249
|
+
|
|
250
|
+
typer.echo(f"worker {worker.worker_id} starting (queue={queue})")
|
|
251
|
+
try:
|
|
252
|
+
loop.run_until_complete(db.init_pool())
|
|
253
|
+
try:
|
|
254
|
+
loop.run_until_complete(worker.start())
|
|
255
|
+
except KeyboardInterrupt:
|
|
256
|
+
loop.run_until_complete(worker.stop())
|
|
257
|
+
finally:
|
|
258
|
+
try:
|
|
259
|
+
loop.run_until_complete(db.close_pool())
|
|
260
|
+
finally:
|
|
261
|
+
loop.close()
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
# ---------------------------------------------------------------------------
|
|
265
|
+
# Jobs CLI
|
|
266
|
+
# ---------------------------------------------------------------------------
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
@jobs_cli.command("list")
|
|
270
|
+
def jobs_list(
|
|
271
|
+
status: str | None = typer.Option(None, "--status", help="Filter by status."),
|
|
272
|
+
queue: str | None = typer.Option(None, "--queue", help="Filter by queue."),
|
|
273
|
+
limit: int = typer.Option(50, "--limit"),
|
|
274
|
+
) -> None:
|
|
275
|
+
"""List queued/running/finished jobs."""
|
|
276
|
+
from .jobs.service import list_jobs
|
|
277
|
+
|
|
278
|
+
s = get_settings()
|
|
279
|
+
|
|
280
|
+
async def _run():
|
|
281
|
+
conn = await asyncpg.connect(s.database_url)
|
|
282
|
+
try:
|
|
283
|
+
await conn.execute("set role service_role")
|
|
284
|
+
return await list_jobs(conn, status=status, queue=queue, limit=limit)
|
|
285
|
+
finally:
|
|
286
|
+
await conn.close()
|
|
287
|
+
|
|
288
|
+
records = _run_async(_run())
|
|
289
|
+
if not records:
|
|
290
|
+
typer.echo("no jobs found.")
|
|
291
|
+
return
|
|
292
|
+
for r in records:
|
|
293
|
+
typer.echo(f" {r.id} {r.name:<30} {r.status:<12} attempts={r.attempts}")
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
@jobs_cli.command("show")
|
|
297
|
+
def jobs_show(
|
|
298
|
+
job_id: str = typer.Argument(..., help="Job UUID."),
|
|
299
|
+
) -> None:
|
|
300
|
+
"""Show details for a single job."""
|
|
301
|
+
from uuid import UUID
|
|
302
|
+
|
|
303
|
+
from .jobs.service import get_job
|
|
304
|
+
|
|
305
|
+
s = get_settings()
|
|
306
|
+
|
|
307
|
+
async def _run():
|
|
308
|
+
conn = await asyncpg.connect(s.database_url)
|
|
309
|
+
try:
|
|
310
|
+
await conn.execute("set role service_role")
|
|
311
|
+
return await get_job(conn, UUID(job_id))
|
|
312
|
+
finally:
|
|
313
|
+
await conn.close()
|
|
314
|
+
|
|
315
|
+
record = _run_async(_run())
|
|
316
|
+
if record is None:
|
|
317
|
+
typer.echo(f"job {job_id} not found.", err=True)
|
|
318
|
+
raise typer.Exit(code=1)
|
|
319
|
+
typer.echo(f" id: {record.id}")
|
|
320
|
+
typer.echo(f" name: {record.name}")
|
|
321
|
+
typer.echo(f" version: {record.version}")
|
|
322
|
+
typer.echo(f" status: {record.status}")
|
|
323
|
+
typer.echo(f" attempts: {record.attempts}/{record.max_attempts}")
|
|
324
|
+
typer.echo(f" queue: {record.queue}")
|
|
325
|
+
typer.echo(f" payload: {record.payload}")
|
|
326
|
+
typer.echo(f" created_at: {record.created_at}")
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
@jobs_cli.command("cancel")
|
|
330
|
+
def jobs_cancel(
|
|
331
|
+
job_id: str = typer.Argument(..., help="Job UUID."),
|
|
332
|
+
) -> None:
|
|
333
|
+
"""Cancel a queued or running job."""
|
|
334
|
+
from uuid import UUID
|
|
335
|
+
|
|
336
|
+
from .jobs.service import JobError, cancel
|
|
337
|
+
|
|
338
|
+
s = get_settings()
|
|
339
|
+
|
|
340
|
+
async def _run():
|
|
341
|
+
conn = await asyncpg.connect(s.database_url)
|
|
342
|
+
try:
|
|
343
|
+
await conn.execute("set role service_role")
|
|
344
|
+
await cancel(conn, UUID(job_id))
|
|
345
|
+
finally:
|
|
346
|
+
await conn.close()
|
|
347
|
+
|
|
348
|
+
try:
|
|
349
|
+
_run_async(_run())
|
|
350
|
+
typer.echo(f"cancelled {job_id}")
|
|
351
|
+
except JobError as exc:
|
|
352
|
+
typer.echo(f"error: {exc.message}", err=True)
|
|
353
|
+
raise typer.Exit(code=1) from exc
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
@jobs_cli.command("retry")
|
|
357
|
+
def jobs_retry(
|
|
358
|
+
job_id: str = typer.Argument(..., help="Job UUID."),
|
|
359
|
+
) -> None:
|
|
360
|
+
"""Re-queue a failed job for retry."""
|
|
361
|
+
from uuid import UUID
|
|
362
|
+
|
|
363
|
+
from .jobs.service import JobError
|
|
364
|
+
from .jobs.service import retry as retry_job
|
|
365
|
+
|
|
366
|
+
s = get_settings()
|
|
367
|
+
|
|
368
|
+
async def _run():
|
|
369
|
+
conn = await asyncpg.connect(s.database_url)
|
|
370
|
+
try:
|
|
371
|
+
await conn.execute("set role service_role")
|
|
372
|
+
await retry_job(conn, UUID(job_id))
|
|
373
|
+
finally:
|
|
374
|
+
await conn.close()
|
|
375
|
+
|
|
376
|
+
try:
|
|
377
|
+
_run_async(_run())
|
|
378
|
+
typer.echo(f"re-queued {job_id}")
|
|
379
|
+
except JobError as exc:
|
|
380
|
+
typer.echo(f"error: {exc.message}", err=True)
|
|
381
|
+
raise typer.Exit(code=1) from exc
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
@jobs_cli.command("enqueue")
|
|
385
|
+
def jobs_enqueue(
|
|
386
|
+
name: str = typer.Argument(..., help="Job name."),
|
|
387
|
+
payload: str = typer.Option("{}", "--payload", help="JSON payload."),
|
|
388
|
+
queue: str = typer.Option("default", "--queue"),
|
|
389
|
+
) -> None:
|
|
390
|
+
"""Enqueue a job manually."""
|
|
391
|
+
import json as _json
|
|
392
|
+
|
|
393
|
+
from .jobs.service import enqueue
|
|
394
|
+
|
|
395
|
+
s = get_settings()
|
|
396
|
+
|
|
397
|
+
async def _run():
|
|
398
|
+
conn = await asyncpg.connect(s.database_url)
|
|
399
|
+
try:
|
|
400
|
+
await conn.execute("set role service_role")
|
|
401
|
+
return await enqueue(conn, name=name, payload=_json.loads(payload), queue=queue)
|
|
402
|
+
finally:
|
|
403
|
+
await conn.close()
|
|
404
|
+
|
|
405
|
+
result = _run_async(_run())
|
|
406
|
+
typer.echo(f"enqueued {result.job.id} (is_new={result.is_new})")
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
# ---------------------------------------------------------------------------
|
|
410
|
+
# Cron CLI
|
|
411
|
+
# ---------------------------------------------------------------------------
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
@cron_cli.command("list")
|
|
415
|
+
def cron_list() -> None:
|
|
416
|
+
"""List registered cron schedules."""
|
|
417
|
+
from .jobs.registry import get_registry
|
|
418
|
+
|
|
419
|
+
crons = list(get_registry().iter_crons())
|
|
420
|
+
if not crons:
|
|
421
|
+
typer.echo("no crons registered.")
|
|
422
|
+
return
|
|
423
|
+
for c in crons:
|
|
424
|
+
typer.echo(f" {c.name:<30} {c.cron_expr:<20} → {c.job_name}")
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
@cron_cli.command("sync")
|
|
428
|
+
def cron_sync() -> None:
|
|
429
|
+
"""Sync registered crons with pg_cron."""
|
|
430
|
+
from .jobs.cron import sync_pg_cron
|
|
431
|
+
|
|
432
|
+
s = get_settings()
|
|
433
|
+
|
|
434
|
+
async def _run():
|
|
435
|
+
conn = await asyncpg.connect(s.database_url)
|
|
436
|
+
try:
|
|
437
|
+
await conn.execute("set role service_role")
|
|
438
|
+
await sync_pg_cron(conn)
|
|
439
|
+
finally:
|
|
440
|
+
await conn.close()
|
|
441
|
+
|
|
442
|
+
try:
|
|
443
|
+
_run_async(_run())
|
|
444
|
+
typer.echo("cron schedules synced.")
|
|
445
|
+
except Exception as exc:
|
|
446
|
+
typer.echo(f"error: {exc}", err=True)
|
|
447
|
+
raise typer.Exit(code=1) from exc
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
@app.command()
|
|
451
|
+
def init(
|
|
452
|
+
name: str = typer.Argument(..., help="Directory name of the new project."),
|
|
453
|
+
here: bool = typer.Option(
|
|
454
|
+
False,
|
|
455
|
+
"--here",
|
|
456
|
+
help="Scaffold into the current dir instead of ./<name>.",
|
|
457
|
+
),
|
|
458
|
+
force: bool = typer.Option(False, "--force", help="Overwrite existing files."),
|
|
459
|
+
) -> None:
|
|
460
|
+
"""Scaffold a new supython project."""
|
|
461
|
+
target = Path.cwd() if here else Path.cwd() / name
|
|
462
|
+
try:
|
|
463
|
+
written = scaffold(name=name, target=target, force=force)
|
|
464
|
+
except FileExistsError as exc:
|
|
465
|
+
typer.echo(f"error: {exc}", err=True)
|
|
466
|
+
raise typer.Exit(code=1) from exc
|
|
467
|
+
for p in written:
|
|
468
|
+
typer.echo(f" wrote {p.relative_to(Path.cwd())}")
|
|
469
|
+
typer.echo("")
|
|
470
|
+
typer.echo("next steps:")
|
|
471
|
+
typer.echo(f" cd {target.name if not here else '.'}")
|
|
472
|
+
typer.echo(" cp .env.example .env")
|
|
473
|
+
typer.echo(" supython up && supython dev")
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
def _keygen_init_impl(
|
|
477
|
+
*,
|
|
478
|
+
alg: str,
|
|
479
|
+
out_private: Path,
|
|
480
|
+
out_jwks: Path,
|
|
481
|
+
kid: str | None,
|
|
482
|
+
force: bool,
|
|
483
|
+
) -> None:
|
|
484
|
+
try:
|
|
485
|
+
jwt_alg = _asymmetric_alg(alg)
|
|
486
|
+
signer = _generate_jwt_files(
|
|
487
|
+
alg=jwt_alg,
|
|
488
|
+
out_private=out_private,
|
|
489
|
+
out_jwks=out_jwks,
|
|
490
|
+
kid=kid,
|
|
491
|
+
force=force,
|
|
492
|
+
)
|
|
493
|
+
except (FileExistsError, OSError, ValueError) as exc:
|
|
494
|
+
typer.echo(f"error: {exc}", err=True)
|
|
495
|
+
raise typer.Exit(code=1) from exc
|
|
496
|
+
|
|
497
|
+
typer.echo(f"wrote private key: {out_private}")
|
|
498
|
+
typer.echo(f"wrote JWKS: {out_jwks}")
|
|
499
|
+
typer.echo(f"kid: {signer.kid}")
|
|
500
|
+
typer.echo("")
|
|
501
|
+
typer.echo("set these env vars:")
|
|
502
|
+
typer.echo(f" JWT_ALG={signer.alg}")
|
|
503
|
+
typer.echo(f" JWT_PRIVATE_KEY_PATH={out_private}")
|
|
504
|
+
typer.echo(f" JWT_KID={signer.kid}")
|
|
505
|
+
typer.echo(f" JWT_JWKS_PATH={out_jwks}")
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
@keygen_cli.callback()
|
|
509
|
+
def keygen_root(
|
|
510
|
+
ctx: typer.Context,
|
|
511
|
+
alg: str = typer.Option("RS256", "--alg", help="JWT algorithm: RS256 or ES256."),
|
|
512
|
+
out_private: Path = typer.Option( # noqa: B008
|
|
513
|
+
Path("./.supython/jwt_private.pem"),
|
|
514
|
+
"--out-private",
|
|
515
|
+
help="Private key output path (init/back-compat only).",
|
|
516
|
+
),
|
|
517
|
+
out_jwks: Path = typer.Option( # noqa: B008
|
|
518
|
+
Path("./.supython/jwks.json"),
|
|
519
|
+
"--out-jwks",
|
|
520
|
+
help="Public JWKS output path (init/back-compat only).",
|
|
521
|
+
),
|
|
522
|
+
kid: str | None = typer.Option(None, "--kid", help="Key ID. Defaults to JWK thumbprint."),
|
|
523
|
+
force: bool = typer.Option(False, "--force", help="Overwrite existing files."),
|
|
524
|
+
) -> None:
|
|
525
|
+
"""Generate a JWT signing keypair and public JWKS.
|
|
526
|
+
|
|
527
|
+
Calling `supython keygen` with no subcommand runs the legacy single-key
|
|
528
|
+
behavior (alias for `supython keygen init`). Use the `rotate`, `activate`,
|
|
529
|
+
and `prune` subcommands for zero-downtime key rotation.
|
|
530
|
+
"""
|
|
531
|
+
if ctx.invoked_subcommand is not None:
|
|
532
|
+
return
|
|
533
|
+
_keygen_init_impl(
|
|
534
|
+
alg=alg,
|
|
535
|
+
out_private=out_private,
|
|
536
|
+
out_jwks=out_jwks,
|
|
537
|
+
kid=kid,
|
|
538
|
+
force=force,
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
|
|
542
|
+
@keygen_cli.command("init")
|
|
543
|
+
def keygen_init(
|
|
544
|
+
alg: str = typer.Option("RS256", "--alg", help="JWT algorithm: RS256 or ES256."),
|
|
545
|
+
out_private: Path = typer.Option( # noqa: B008
|
|
546
|
+
Path("./.supython/jwt_private.pem"),
|
|
547
|
+
"--out-private",
|
|
548
|
+
help="Private key output path.",
|
|
549
|
+
),
|
|
550
|
+
out_jwks: Path = typer.Option( # noqa: B008
|
|
551
|
+
Path("./.supython/jwks.json"),
|
|
552
|
+
"--out-jwks",
|
|
553
|
+
help="Public JWKS output path.",
|
|
554
|
+
),
|
|
555
|
+
kid: str | None = typer.Option(None, "--kid", help="Key ID. Defaults to JWK thumbprint."),
|
|
556
|
+
force: bool = typer.Option(False, "--force", help="Overwrite existing files."),
|
|
557
|
+
) -> None:
|
|
558
|
+
"""Generate a single JWT signing keypair + public JWKS (legacy layout)."""
|
|
559
|
+
_keygen_init_impl(
|
|
560
|
+
alg=alg,
|
|
561
|
+
out_private=out_private,
|
|
562
|
+
out_jwks=out_jwks,
|
|
563
|
+
kid=kid,
|
|
564
|
+
force=force,
|
|
565
|
+
)
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
@keygen_cli.command("rotate")
|
|
569
|
+
def keygen_rotate(
|
|
570
|
+
alg: str = typer.Option("RS256", "--alg", help="JWT algorithm: RS256 or ES256."),
|
|
571
|
+
no_reload: bool = typer.Option(
|
|
572
|
+
False,
|
|
573
|
+
"--no-reload",
|
|
574
|
+
help="Skip the best-effort `docker compose kill -s SIGUSR2 postgrest`.",
|
|
575
|
+
),
|
|
576
|
+
) -> None:
|
|
577
|
+
"""Add a new kid in `verifying` status; do NOT change the active signer.
|
|
578
|
+
|
|
579
|
+
On the first run after a legacy single-key setup, the existing private key
|
|
580
|
+
is imported into the keyset directory + manifest.
|
|
581
|
+
"""
|
|
582
|
+
try:
|
|
583
|
+
jwt_alg = _asymmetric_alg(alg)
|
|
584
|
+
legacy = keyset.import_legacy_single_key()
|
|
585
|
+
if legacy is not None:
|
|
586
|
+
typer.echo(f"imported legacy single key as kid={legacy.kid} (status=active)")
|
|
587
|
+
entry = keyset.add_key(jwt_alg, status="verifying")
|
|
588
|
+
jwks.clear_cache()
|
|
589
|
+
s = get_settings()
|
|
590
|
+
jwks_doc = jwks.write_current_jwks(s.jwt_jwks_path)
|
|
591
|
+
except (FileExistsError, OSError, ValueError, RuntimeError) as exc:
|
|
592
|
+
typer.echo(f"error: {exc}", err=True)
|
|
593
|
+
raise typer.Exit(code=1) from exc
|
|
594
|
+
|
|
595
|
+
kids = ", ".join(key["kid"] for key in jwks_doc["keys"])
|
|
596
|
+
typer.echo(f"added kid={entry.kid} ({entry.alg}, status=verifying)")
|
|
597
|
+
typer.echo(f"wrote JWKS: {s.jwt_jwks_path} (kids={kids})")
|
|
598
|
+
if not no_reload:
|
|
599
|
+
_postgrest_reload()
|
|
600
|
+
typer.echo(
|
|
601
|
+
f"next: wait for PostgREST replicas to reload JWKS (>=30s), then "
|
|
602
|
+
f"`supython keygen activate {entry.kid}`"
|
|
603
|
+
)
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
@keygen_cli.command("activate")
|
|
607
|
+
def keygen_activate(
|
|
608
|
+
kid: str = typer.Argument(..., help="Kid to make the active signing key."),
|
|
609
|
+
no_reload: bool = typer.Option(
|
|
610
|
+
False,
|
|
611
|
+
"--no-reload",
|
|
612
|
+
help="Skip the best-effort `docker compose kill -s SIGUSR2 postgrest`.",
|
|
613
|
+
),
|
|
614
|
+
) -> None:
|
|
615
|
+
"""Flip the active signing kid; previous active becomes `retired`."""
|
|
616
|
+
try:
|
|
617
|
+
keyset.activate(kid)
|
|
618
|
+
jwks.clear_cache()
|
|
619
|
+
s = get_settings()
|
|
620
|
+
jwks_doc = jwks.write_current_jwks(s.jwt_jwks_path)
|
|
621
|
+
except (FileNotFoundError, KeyError, OSError, RuntimeError) as exc:
|
|
622
|
+
typer.echo(f"error: {exc}", err=True)
|
|
623
|
+
raise typer.Exit(code=1) from exc
|
|
624
|
+
|
|
625
|
+
kids = ", ".join(key["kid"] for key in jwks_doc["keys"])
|
|
626
|
+
typer.echo(f"active kid is now {kid}")
|
|
627
|
+
typer.echo(f"wrote JWKS: {s.jwt_jwks_path} (kids={kids})")
|
|
628
|
+
if not no_reload:
|
|
629
|
+
_postgrest_reload()
|
|
630
|
+
typer.echo(
|
|
631
|
+
"supython service must be restarted (or `JWT_KID` set in the environment) "
|
|
632
|
+
f"to begin signing under {kid}"
|
|
633
|
+
)
|
|
634
|
+
|
|
635
|
+
|
|
636
|
+
@keygen_cli.command("prune")
|
|
637
|
+
def keygen_prune(
|
|
638
|
+
force: bool = typer.Option(
|
|
639
|
+
False,
|
|
640
|
+
"--force",
|
|
641
|
+
help="Drop all retired kids regardless of `JWT_ROTATION_GRACE_SECONDS`.",
|
|
642
|
+
),
|
|
643
|
+
no_reload: bool = typer.Option(
|
|
644
|
+
False,
|
|
645
|
+
"--no-reload",
|
|
646
|
+
help="Skip the best-effort `docker compose kill -s SIGUSR2 postgrest`.",
|
|
647
|
+
),
|
|
648
|
+
) -> None:
|
|
649
|
+
"""Drop retired kids whose grace window has elapsed."""
|
|
650
|
+
try:
|
|
651
|
+
removed = keyset.prune(force_all=force)
|
|
652
|
+
jwks.clear_cache()
|
|
653
|
+
s = get_settings()
|
|
654
|
+
jwks_doc = jwks.write_current_jwks(s.jwt_jwks_path)
|
|
655
|
+
except (OSError, RuntimeError) as exc:
|
|
656
|
+
typer.echo(f"error: {exc}", err=True)
|
|
657
|
+
raise typer.Exit(code=1) from exc
|
|
658
|
+
|
|
659
|
+
if not removed:
|
|
660
|
+
typer.echo("no retired kids past the grace window; nothing pruned")
|
|
661
|
+
return
|
|
662
|
+
for kid in removed:
|
|
663
|
+
typer.echo(f"pruned kid={kid}")
|
|
664
|
+
kids = ", ".join(key["kid"] for key in jwks_doc["keys"]) or "(none)"
|
|
665
|
+
typer.echo(f"wrote JWKS: {s.jwt_jwks_path} (kids={kids})")
|
|
666
|
+
if not no_reload:
|
|
667
|
+
_postgrest_reload()
|
|
668
|
+
|
|
669
|
+
|
|
670
|
+
def _postgrest_reload() -> None:
|
|
671
|
+
"""Best-effort SIGUSR2 to PostgREST so it reloads JWKS. Non-fatal on failure."""
|
|
672
|
+
files = [None]
|
|
673
|
+
if Path("docker-compose.prod.yml").exists():
|
|
674
|
+
files.append("docker-compose.prod.yml")
|
|
675
|
+
|
|
676
|
+
for compose_file in files:
|
|
677
|
+
cmd = ["docker", "compose"]
|
|
678
|
+
if compose_file:
|
|
679
|
+
cmd += ["-f", compose_file]
|
|
680
|
+
cmd += ["kill", "-s", "SIGUSR2", "postgrest"]
|
|
681
|
+
try:
|
|
682
|
+
result = subprocess.run(cmd, check=False, capture_output=True, text=True)
|
|
683
|
+
except FileNotFoundError:
|
|
684
|
+
typer.echo(
|
|
685
|
+
"warn: docker not found; skipped PostgREST reload (verify with `supython doctor`)"
|
|
686
|
+
)
|
|
687
|
+
return
|
|
688
|
+
if result.returncode == 0:
|
|
689
|
+
typer.echo("sent SIGUSR2 to postgrest (JWKS hot-reload)")
|
|
690
|
+
return
|
|
691
|
+
|
|
692
|
+
typer.echo(
|
|
693
|
+
"warn: PostgREST reload failed; verify with `supython doctor` "
|
|
694
|
+
"(or pass --no-reload if PostgREST runs outside Docker)"
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
|
|
698
|
+
# ---------------------------------------------------------------------------
|
|
699
|
+
# Secret CLI
|
|
700
|
+
# ---------------------------------------------------------------------------
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
def _secret_name(value: str) -> secretset.SecretName:
|
|
704
|
+
if value == "storage":
|
|
705
|
+
return "storage_signed_url"
|
|
706
|
+
if value == "oauth":
|
|
707
|
+
return "oauth_state"
|
|
708
|
+
raise typer.BadParameter("must be 'storage' or 'oauth'")
|
|
709
|
+
|
|
710
|
+
|
|
711
|
+
@secret_cli.command("status")
|
|
712
|
+
def secret_status() -> None:
|
|
713
|
+
"""Show symmetric secret manifest state."""
|
|
714
|
+
try:
|
|
715
|
+
manifest = secretset.load_manifest()
|
|
716
|
+
except (OSError, RuntimeError) as exc:
|
|
717
|
+
typer.echo(f"error: {exc}", err=True)
|
|
718
|
+
raise typer.Exit(code=1) from exc
|
|
719
|
+
|
|
720
|
+
if manifest is None:
|
|
721
|
+
typer.echo("no manifest found")
|
|
722
|
+
return
|
|
723
|
+
|
|
724
|
+
for name in ("storage_signed_url", "oauth_state"):
|
|
725
|
+
section = manifest.get(name)
|
|
726
|
+
if section is None:
|
|
727
|
+
typer.echo(f"{name}: (not in manifest)")
|
|
728
|
+
continue
|
|
729
|
+
active_kid = section.get("active")
|
|
730
|
+
for key in section.get("keys", []):
|
|
731
|
+
prefix = "* " if key["kid"] == active_kid else " "
|
|
732
|
+
retired = f" retired_at={key.get('retired_at')}" if key.get("retired_at") else ""
|
|
733
|
+
typer.echo(
|
|
734
|
+
f"{prefix}{name} kid={key['kid']} status={key['status']}"
|
|
735
|
+
f" created_at={key.get('created_at')}{retired}"
|
|
736
|
+
)
|
|
737
|
+
|
|
738
|
+
|
|
739
|
+
@secret_cli.command("rotate")
|
|
740
|
+
def secret_rotate(
|
|
741
|
+
name: str = typer.Argument(..., help="Secret to rotate: storage | oauth"),
|
|
742
|
+
) -> None:
|
|
743
|
+
"""Add a new verifying secret."""
|
|
744
|
+
secret_name = _secret_name(name)
|
|
745
|
+
try:
|
|
746
|
+
entry = secretset.rotate(secret_name)
|
|
747
|
+
secretset.clear_cache()
|
|
748
|
+
except (OSError, RuntimeError) as exc:
|
|
749
|
+
typer.echo(f"error: {exc}", err=True)
|
|
750
|
+
raise typer.Exit(code=1) from exc
|
|
751
|
+
|
|
752
|
+
typer.echo(f"added kid={entry.kid} ({secret_name}, status=verifying)")
|
|
753
|
+
typer.echo(f"next: `supython secret activate {name} {entry.kid}`")
|
|
754
|
+
|
|
755
|
+
|
|
756
|
+
@secret_cli.command("activate")
|
|
757
|
+
def secret_activate(
|
|
758
|
+
name: str = typer.Argument(..., help="Secret to activate: storage | oauth"),
|
|
759
|
+
kid: str = typer.Argument(..., help="Kid to promote to active."),
|
|
760
|
+
) -> None:
|
|
761
|
+
"""Promote verifying secret to active."""
|
|
762
|
+
secret_name = _secret_name(name)
|
|
763
|
+
try:
|
|
764
|
+
secretset.activate(secret_name, kid)
|
|
765
|
+
secretset.clear_cache()
|
|
766
|
+
except (FileNotFoundError, KeyError, OSError, RuntimeError) as exc:
|
|
767
|
+
typer.echo(f"error: {exc}", err=True)
|
|
768
|
+
raise typer.Exit(code=1) from exc
|
|
769
|
+
|
|
770
|
+
typer.echo(f"active kid is now {kid} for {secret_name}")
|
|
771
|
+
typer.echo(f"next: restart supython replicas so new payloads are signed under {kid}")
|
|
772
|
+
|
|
773
|
+
|
|
774
|
+
@secret_cli.command("prune")
|
|
775
|
+
def secret_prune(
|
|
776
|
+
name: str = typer.Argument(..., help="Secret to prune: storage | oauth"),
|
|
777
|
+
force: bool = typer.Option(
|
|
778
|
+
False,
|
|
779
|
+
"--force",
|
|
780
|
+
help="Drop all retired secrets regardless of grace window.",
|
|
781
|
+
),
|
|
782
|
+
) -> None:
|
|
783
|
+
"""Drop retired secrets past the grace window."""
|
|
784
|
+
secret_name = _secret_name(name)
|
|
785
|
+
try:
|
|
786
|
+
removed = secretset.prune(secret_name, force_all=force)
|
|
787
|
+
secretset.clear_cache()
|
|
788
|
+
except (OSError, RuntimeError) as exc:
|
|
789
|
+
typer.echo(f"error: {exc}", err=True)
|
|
790
|
+
raise typer.Exit(code=1) from exc
|
|
791
|
+
|
|
792
|
+
if not removed:
|
|
793
|
+
typer.echo("no retired secrets past the grace window; nothing pruned")
|
|
794
|
+
return
|
|
795
|
+
for kid in removed:
|
|
796
|
+
typer.echo(f"pruned kid={kid}")
|
|
797
|
+
|
|
798
|
+
|
|
799
|
+
# ---------------------------------------------------------------------------
|
|
800
|
+
# Password rotate CLI
|
|
801
|
+
# ---------------------------------------------------------------------------
|
|
802
|
+
|
|
803
|
+
|
|
804
|
+
def _update_dotenv_key(key: str, value: str) -> bool:
|
|
805
|
+
"""Update a key in ``.env`` if the file exists. Returns whether modified."""
|
|
806
|
+
env_path = Path(".env")
|
|
807
|
+
if not env_path.exists():
|
|
808
|
+
return False
|
|
809
|
+
lines = env_path.read_text().splitlines()
|
|
810
|
+
modified = False
|
|
811
|
+
for i, line in enumerate(lines):
|
|
812
|
+
if line.startswith(f"{key}="):
|
|
813
|
+
lines[i] = f"{key}={value}"
|
|
814
|
+
modified = True
|
|
815
|
+
break
|
|
816
|
+
if modified:
|
|
817
|
+
env_path.write_text("\n".join(lines) + "\n")
|
|
818
|
+
return modified
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
@password_cli.command("rotate")
|
|
822
|
+
def password_rotate(
|
|
823
|
+
role: str = typer.Argument(..., help="Postgres role to rotate."),
|
|
824
|
+
db_url: str | None = typer.Option(
|
|
825
|
+
None, "--db-url", help="Privileged Postgres URL. Defaults to settings.database_url."
|
|
826
|
+
),
|
|
827
|
+
generate: bool = typer.Option(True, help="Generate a strong random password."),
|
|
828
|
+
password: str | None = typer.Option(None, help="Explicit new password (skips generation)."),
|
|
829
|
+
no_confirm: bool = typer.Option(False, "--no-confirm", help="Skip confirmation prompt for CI."),
|
|
830
|
+
) -> None:
|
|
831
|
+
"""Rotate a Postgres role password."""
|
|
832
|
+
if password is None and generate:
|
|
833
|
+
if not no_confirm:
|
|
834
|
+
confirm = typer.confirm(f"Generate and set a new password for role '{role}'?")
|
|
835
|
+
if not confirm:
|
|
836
|
+
raise typer.Abort()
|
|
837
|
+
password = __import__("secrets").token_urlsafe(48)
|
|
838
|
+
|
|
839
|
+
if password is None:
|
|
840
|
+
typer.echo("error: pass --password or use default --generate", err=True)
|
|
841
|
+
raise typer.Exit(code=1)
|
|
842
|
+
|
|
843
|
+
url = db_url or get_settings().database_url
|
|
844
|
+
|
|
845
|
+
async def _run() -> None:
|
|
846
|
+
conn = await asyncpg.connect(url)
|
|
847
|
+
try:
|
|
848
|
+
await rotate_role_password(conn, role, password)
|
|
849
|
+
finally:
|
|
850
|
+
await conn.close()
|
|
851
|
+
|
|
852
|
+
try:
|
|
853
|
+
_run_async(_run())
|
|
854
|
+
except (OSError, asyncpg.PostgresError) as exc:
|
|
855
|
+
typer.echo(f"error: could not rotate password: {exc}", err=True)
|
|
856
|
+
raise typer.Exit(code=1) from exc
|
|
857
|
+
|
|
858
|
+
typer.echo(f"rotated password for role '{role}'")
|
|
859
|
+
|
|
860
|
+
if role == "authenticator":
|
|
861
|
+
updated = _update_dotenv_key("AUTHENTICATOR_PASSWORD", password)
|
|
862
|
+
if updated:
|
|
863
|
+
typer.echo("updated AUTHENTICATOR_PASSWORD in .env")
|
|
864
|
+
else:
|
|
865
|
+
typer.echo("warn: .env not found; update AUTHENTICATOR_PASSWORD manually")
|
|
866
|
+
typer.echo(
|
|
867
|
+
"reminder: update docker-compose.yml (or docker-compose.prod.yml) / "
|
|
868
|
+
"PostgREST env to match"
|
|
869
|
+
)
|
|
870
|
+
|
|
871
|
+
# Detect if role matches the user in DATABASE_URL
|
|
872
|
+
db_url_setting = get_settings().database_url
|
|
873
|
+
if f"//{role}:" in db_url_setting or f"/{role}@" in db_url_setting:
|
|
874
|
+
updated = _update_dotenv_key("DATABASE_URL", db_url_setting)
|
|
875
|
+
if updated:
|
|
876
|
+
typer.echo("updated DATABASE_URL in .env")
|
|
877
|
+
typer.echo("reminder: restart supython replicas to pick up the new password")
|
|
878
|
+
|
|
879
|
+
typer.echo("")
|
|
880
|
+
typer.echo("new password (store in your secret manager; it will not be shown again):")
|
|
881
|
+
typer.echo(password)
|
|
882
|
+
|
|
883
|
+
|
|
884
|
+
# ---------------------------------------------------------------------------
|
|
885
|
+
# Admin user CLI
|
|
886
|
+
# ---------------------------------------------------------------------------
|
|
887
|
+
|
|
888
|
+
|
|
889
|
+
@admin_cli.command("create-user")
|
|
890
|
+
def admin_create_user(
|
|
891
|
+
email: str = typer.Argument(..., help="Admin email address."),
|
|
892
|
+
password: str | None = typer.Option(
|
|
893
|
+
None,
|
|
894
|
+
"--password",
|
|
895
|
+
help="Password (>=12 chars). Prompted securely if omitted.",
|
|
896
|
+
),
|
|
897
|
+
is_root: bool = typer.Option(False, "--root", help="Mark this admin as root (is_root = true)."),
|
|
898
|
+
db_url: str | None = typer.Option(
|
|
899
|
+
None,
|
|
900
|
+
"--db-url",
|
|
901
|
+
help="Privileged Postgres URL. Defaults to settings.database_url.",
|
|
902
|
+
),
|
|
903
|
+
) -> None:
|
|
904
|
+
"""Create an admin user. Run once to bootstrap the first admin."""
|
|
905
|
+
from email_validator import EmailNotValidError, validate_email
|
|
906
|
+
|
|
907
|
+
from . import passwords
|
|
908
|
+
|
|
909
|
+
try:
|
|
910
|
+
normalized = validate_email(email, check_deliverability=False).normalized
|
|
911
|
+
except EmailNotValidError as exc:
|
|
912
|
+
typer.echo(f"error: invalid email: {exc}", err=True)
|
|
913
|
+
raise typer.Exit(code=1) from exc
|
|
914
|
+
|
|
915
|
+
if password is None:
|
|
916
|
+
password = typer.prompt("Password", hide_input=True, confirmation_prompt=True)
|
|
917
|
+
if len(password) < 12:
|
|
918
|
+
typer.echo("error: password must be at least 12 characters", err=True)
|
|
919
|
+
raise typer.Exit(code=1)
|
|
920
|
+
|
|
921
|
+
hashed = passwords.hash_password(password)
|
|
922
|
+
url = db_url or get_settings().database_url
|
|
923
|
+
|
|
924
|
+
async def _run() -> uuid.UUID:
|
|
925
|
+
conn = await asyncpg.connect(url)
|
|
926
|
+
try:
|
|
927
|
+
return await conn.fetchval(
|
|
928
|
+
"""
|
|
929
|
+
insert into admin.admin_users (email, password_hash, is_root)
|
|
930
|
+
values ($1, $2, $3)
|
|
931
|
+
returning id
|
|
932
|
+
""",
|
|
933
|
+
normalized,
|
|
934
|
+
hashed,
|
|
935
|
+
is_root,
|
|
936
|
+
)
|
|
937
|
+
finally:
|
|
938
|
+
await conn.close()
|
|
939
|
+
|
|
940
|
+
try:
|
|
941
|
+
admin_id = _run_async(_run())
|
|
942
|
+
except asyncpg.UniqueViolationError as exc:
|
|
943
|
+
typer.echo(f"error: admin with email {normalized!r} already exists", err=True)
|
|
944
|
+
raise typer.Exit(code=1) from exc
|
|
945
|
+
except (OSError, asyncpg.PostgresError) as exc:
|
|
946
|
+
typer.echo(f"error: could not create admin user: {exc}", err=True)
|
|
947
|
+
raise typer.Exit(code=1) from exc
|
|
948
|
+
|
|
949
|
+
suffix = " [root]" if is_root else ""
|
|
950
|
+
typer.echo(f"created admin user id={admin_id} email={normalized}{suffix}")
|
|
951
|
+
|
|
952
|
+
|
|
953
|
+
@app.command()
|
|
954
|
+
def doctor() -> None:
|
|
955
|
+
"""Health check: Postgres, JWT keys, PostgREST."""
|
|
956
|
+
configure_logging("INFO", json_format=False)
|
|
957
|
+
try:
|
|
958
|
+
s = get_settings()
|
|
959
|
+
except Exception as exc:
|
|
960
|
+
typer.echo(f"error: settings failed to load: {exc}", err=True)
|
|
961
|
+
raise typer.Exit(code=1) from exc
|
|
962
|
+
|
|
963
|
+
report = _DoctorReport()
|
|
964
|
+
report.merge(_check_database(s.database_url))
|
|
965
|
+
report.merge(_check_migration_drift(s.database_url))
|
|
966
|
+
jwt_report = _check_jwt(s)
|
|
967
|
+
report.merge(jwt_report)
|
|
968
|
+
report.merge(_check_symmetric_secrets(s))
|
|
969
|
+
if not jwt_report.failures:
|
|
970
|
+
report.merge(_check_postgrest(s))
|
|
971
|
+
|
|
972
|
+
for line in report.ok:
|
|
973
|
+
typer.echo(f"ok: {line}")
|
|
974
|
+
for line in report.warnings:
|
|
975
|
+
typer.echo(f"warn: {line}")
|
|
976
|
+
for line in report.failures:
|
|
977
|
+
typer.echo(f"fail: {line}", err=True)
|
|
978
|
+
if report.failures:
|
|
979
|
+
raise typer.Exit(code=1)
|
|
980
|
+
|
|
981
|
+
|
|
982
|
+
async def _check_database_async(database_url: str) -> _DoctorReport:
|
|
983
|
+
report = _DoctorReport()
|
|
984
|
+
try:
|
|
985
|
+
conn = await asyncpg.connect(database_url, timeout=2.0)
|
|
986
|
+
except (OSError, TimeoutError, asyncpg.PostgresError) as exc:
|
|
987
|
+
report.failures.append(f"Postgres unreachable at {database_url}: {exc}")
|
|
988
|
+
return report
|
|
989
|
+
|
|
990
|
+
try:
|
|
991
|
+
version_num = await conn.fetchval("show server_version_num")
|
|
992
|
+
try:
|
|
993
|
+
major = int(version_num) // 10000
|
|
994
|
+
except (TypeError, ValueError):
|
|
995
|
+
report.failures.append(f"could not parse Postgres server_version_num: {version_num}")
|
|
996
|
+
else:
|
|
997
|
+
if major < _MIN_PG_VERSION:
|
|
998
|
+
report.failures.append(
|
|
999
|
+
f"Postgres {major} is below minimum supported {_MIN_PG_VERSION}"
|
|
1000
|
+
)
|
|
1001
|
+
else:
|
|
1002
|
+
report.ok.append(f"Postgres reachable (server_version_num={version_num})")
|
|
1003
|
+
|
|
1004
|
+
roles = {
|
|
1005
|
+
row["rolname"]
|
|
1006
|
+
for row in await conn.fetch(
|
|
1007
|
+
"""
|
|
1008
|
+
select rolname
|
|
1009
|
+
from pg_roles
|
|
1010
|
+
where rolname = any($1::text[])
|
|
1011
|
+
""",
|
|
1012
|
+
list(_REQUIRED_ROLES),
|
|
1013
|
+
)
|
|
1014
|
+
}
|
|
1015
|
+
missing_roles = sorted(set(_REQUIRED_ROLES) - roles)
|
|
1016
|
+
if missing_roles:
|
|
1017
|
+
report.failures.append(
|
|
1018
|
+
f"missing Postgres roles: {', '.join(missing_roles)} (run `supython up`)"
|
|
1019
|
+
)
|
|
1020
|
+
else:
|
|
1021
|
+
report.ok.append(f"required roles present: {', '.join(_REQUIRED_ROLES)}")
|
|
1022
|
+
|
|
1023
|
+
present_extensions = {
|
|
1024
|
+
row["extname"]
|
|
1025
|
+
for row in await conn.fetch(
|
|
1026
|
+
"""
|
|
1027
|
+
select extname
|
|
1028
|
+
from pg_extension
|
|
1029
|
+
where extname = any($1::text[])
|
|
1030
|
+
""",
|
|
1031
|
+
list(_REQUIRED_EXTENSIONS) + list(_RECOMMENDED_EXTENSIONS),
|
|
1032
|
+
)
|
|
1033
|
+
}
|
|
1034
|
+
missing_required = sorted(set(_REQUIRED_EXTENSIONS) - present_extensions)
|
|
1035
|
+
if missing_required:
|
|
1036
|
+
report.failures.append(
|
|
1037
|
+
f"missing required Postgres extensions: {', '.join(missing_required)}"
|
|
1038
|
+
)
|
|
1039
|
+
else:
|
|
1040
|
+
report.ok.append(f"required extensions present: {', '.join(_REQUIRED_EXTENSIONS)}")
|
|
1041
|
+
|
|
1042
|
+
missing_recommended = sorted(set(_RECOMMENDED_EXTENSIONS) - present_extensions)
|
|
1043
|
+
for extension in missing_recommended:
|
|
1044
|
+
report.warnings.append(
|
|
1045
|
+
f"recommended extension missing: {extension} "
|
|
1046
|
+
"(used by jobs cron + auth rate-limit prune)"
|
|
1047
|
+
)
|
|
1048
|
+
|
|
1049
|
+
wal_level = await conn.fetchval("show wal_level")
|
|
1050
|
+
if wal_level != "logical":
|
|
1051
|
+
report.warnings.append(
|
|
1052
|
+
f"wal_level={wal_level!r}, but 'logical' is required for realtime (v0.8+)"
|
|
1053
|
+
)
|
|
1054
|
+
else:
|
|
1055
|
+
report.ok.append("wal_level=logical")
|
|
1056
|
+
|
|
1057
|
+
for role_name, expected_attrs in _ROLE_ATTRIBUTES.items():
|
|
1058
|
+
if role_name not in roles:
|
|
1059
|
+
continue
|
|
1060
|
+
for attr, expected_val in expected_attrs.items():
|
|
1061
|
+
actual = await conn.fetchval(
|
|
1062
|
+
f"select {attr} from pg_roles where rolname = $1",
|
|
1063
|
+
role_name,
|
|
1064
|
+
)
|
|
1065
|
+
if actual != expected_val:
|
|
1066
|
+
label = "can login" if attr == "rolcanlogin" else attr
|
|
1067
|
+
msg = f"role {role_name!r} {label}={actual}, expected {expected_val}"
|
|
1068
|
+
if attr == "rolcanlogin" and role_name in ("anon", "authenticated"):
|
|
1069
|
+
report.warnings.append(msg)
|
|
1070
|
+
else:
|
|
1071
|
+
report.failures.append(msg)
|
|
1072
|
+
else:
|
|
1073
|
+
label = "can login" if attr == "rolcanlogin" else attr
|
|
1074
|
+
report.ok.append(f"role {role_name!r} {label}={expected_val}")
|
|
1075
|
+
|
|
1076
|
+
granted_rows = await conn.fetch(
|
|
1077
|
+
"""
|
|
1078
|
+
select r1.rolname as roleid, r2.rolname as member
|
|
1079
|
+
from pg_auth_members m
|
|
1080
|
+
join pg_roles r1 on r1.oid = m.roleid
|
|
1081
|
+
join pg_roles r2 on r2.oid = m.member
|
|
1082
|
+
where r1.rolname = any($1::text[])
|
|
1083
|
+
""",
|
|
1084
|
+
["anon", "authenticated", "service_role"],
|
|
1085
|
+
)
|
|
1086
|
+
granted_to_authenticator = {
|
|
1087
|
+
r["roleid"] for r in granted_rows if r["member"] == "authenticator"
|
|
1088
|
+
}
|
|
1089
|
+
for child in ("anon", "authenticated", "service_role"):
|
|
1090
|
+
if child not in granted_to_authenticator:
|
|
1091
|
+
report.failures.append(f"role {child!r} is not granted to 'authenticator'")
|
|
1092
|
+
else:
|
|
1093
|
+
report.ok.append(f"role {child!r} granted to 'authenticator'")
|
|
1094
|
+
|
|
1095
|
+
schema_owners = {
|
|
1096
|
+
r["nspname"]: r["owner"]
|
|
1097
|
+
for r in await conn.fetch(
|
|
1098
|
+
"""
|
|
1099
|
+
select nspname, rolname as owner
|
|
1100
|
+
from pg_namespace n
|
|
1101
|
+
join pg_roles r on r.oid = n.nspowner
|
|
1102
|
+
where nspname = any($1::text[])
|
|
1103
|
+
""",
|
|
1104
|
+
list(_FRAMEWORK_SCHEMAS),
|
|
1105
|
+
)
|
|
1106
|
+
}
|
|
1107
|
+
for schema in _FRAMEWORK_SCHEMAS:
|
|
1108
|
+
if schema in schema_owners and schema_owners[schema] != "service_role":
|
|
1109
|
+
report.warnings.append(
|
|
1110
|
+
f"schema {schema!r} owned by {schema_owners[schema]!r}, expected 'service_role'"
|
|
1111
|
+
)
|
|
1112
|
+
finally:
|
|
1113
|
+
await conn.close()
|
|
1114
|
+
|
|
1115
|
+
return report
|
|
1116
|
+
|
|
1117
|
+
|
|
1118
|
+
def _check_database(database_url: str) -> _DoctorReport:
|
|
1119
|
+
try:
|
|
1120
|
+
return _run_async(_check_database_async(database_url))
|
|
1121
|
+
except Exception as exc:
|
|
1122
|
+
report = _DoctorReport()
|
|
1123
|
+
report.failures.append(f"database check raised unexpectedly: {exc}")
|
|
1124
|
+
return report
|
|
1125
|
+
|
|
1126
|
+
|
|
1127
|
+
async def _check_migration_drift_async(database_url: str) -> _DoctorReport:
|
|
1128
|
+
report = _DoctorReport()
|
|
1129
|
+
|
|
1130
|
+
if not migrate_mod.DEFAULT_MIGRATIONS_DIR.exists():
|
|
1131
|
+
report.warnings.append(
|
|
1132
|
+
f"migrations directory not found at {migrate_mod.DEFAULT_MIGRATIONS_DIR}; "
|
|
1133
|
+
"skipping drift check"
|
|
1134
|
+
)
|
|
1135
|
+
return report
|
|
1136
|
+
|
|
1137
|
+
disk_files = {p.name for p in sorted(migrate_mod.DEFAULT_MIGRATIONS_DIR.glob("*.sql"))}
|
|
1138
|
+
|
|
1139
|
+
try:
|
|
1140
|
+
conn = await asyncpg.connect(database_url, timeout=2.0)
|
|
1141
|
+
except (OSError, TimeoutError, asyncpg.PostgresError):
|
|
1142
|
+
report.warnings.append("cannot reach DB for migration drift check; skipping")
|
|
1143
|
+
return report
|
|
1144
|
+
|
|
1145
|
+
try:
|
|
1146
|
+
has_table = await conn.fetchval(
|
|
1147
|
+
"select exists(select 1 from information_schema.tables "
|
|
1148
|
+
"where table_schema='supython' and table_name='migrations')"
|
|
1149
|
+
)
|
|
1150
|
+
if not has_table:
|
|
1151
|
+
report.warnings.append(
|
|
1152
|
+
"supython.migrations table not found; run `supython migrate` first"
|
|
1153
|
+
)
|
|
1154
|
+
return report
|
|
1155
|
+
|
|
1156
|
+
db_rows = {r["name"] for r in await conn.fetch("select name from supython.migrations")}
|
|
1157
|
+
finally:
|
|
1158
|
+
await conn.close()
|
|
1159
|
+
|
|
1160
|
+
unapplied = sorted(disk_files - db_rows)
|
|
1161
|
+
orphaned = sorted(db_rows - disk_files)
|
|
1162
|
+
|
|
1163
|
+
if unapplied:
|
|
1164
|
+
report.warnings.append(f"{len(unapplied)} unapplied migration(s): {', '.join(unapplied)}")
|
|
1165
|
+
if orphaned:
|
|
1166
|
+
report.warnings.append(
|
|
1167
|
+
f"{len(orphaned)} migration(s) in DB but not on disk: {', '.join(orphaned)}"
|
|
1168
|
+
)
|
|
1169
|
+
if not unapplied and not orphaned:
|
|
1170
|
+
report.ok.append(f"all {len(disk_files)} migrations applied, no drift")
|
|
1171
|
+
|
|
1172
|
+
return report
|
|
1173
|
+
|
|
1174
|
+
|
|
1175
|
+
def _check_migration_drift(database_url: str) -> _DoctorReport:
|
|
1176
|
+
try:
|
|
1177
|
+
return _run_async(_check_migration_drift_async(database_url))
|
|
1178
|
+
except Exception as exc:
|
|
1179
|
+
report = _DoctorReport()
|
|
1180
|
+
report.warnings.append(f"migration drift check raised unexpectedly: {exc}")
|
|
1181
|
+
return report
|
|
1182
|
+
|
|
1183
|
+
|
|
1184
|
+
def _check_jwt(s: Settings) -> _DoctorReport:
|
|
1185
|
+
report = _DoctorReport()
|
|
1186
|
+
if s.jwt_alg not in _ASYMMETRIC_ALGS:
|
|
1187
|
+
report.failures.append(f"JWT_ALG must be RS256 or ES256 for JWKS mode (got {s.jwt_alg})")
|
|
1188
|
+
|
|
1189
|
+
if s.jwt_private_key is None and s.jwt_private_key_path is None:
|
|
1190
|
+
report.failures.append("set JWT_PRIVATE_KEY or JWT_PRIVATE_KEY_PATH")
|
|
1191
|
+
|
|
1192
|
+
if s.jwt_private_key_path is not None:
|
|
1193
|
+
if not s.jwt_private_key_path.exists():
|
|
1194
|
+
report.failures.append(f"private key file does not exist: {s.jwt_private_key_path}")
|
|
1195
|
+
elif os.name == "posix":
|
|
1196
|
+
mode = s.jwt_private_key_path.stat().st_mode & 0o777
|
|
1197
|
+
if mode & 0o077:
|
|
1198
|
+
report.failures.append(
|
|
1199
|
+
f"private key file must not be group/world-readable: "
|
|
1200
|
+
f"{s.jwt_private_key_path} mode {mode:03o}"
|
|
1201
|
+
)
|
|
1202
|
+
|
|
1203
|
+
if not report.failures:
|
|
1204
|
+
try:
|
|
1205
|
+
signer = jwks.load_signing_key()
|
|
1206
|
+
jwks_doc = jwks.dump_jwks(jwks.load_verification_keyset())
|
|
1207
|
+
_assert_public_jwks(jwks_doc)
|
|
1208
|
+
except Exception as exc:
|
|
1209
|
+
report.failures.append(f"JWT key/JWKS check failed: {exc}")
|
|
1210
|
+
else:
|
|
1211
|
+
report.ok.append(f"JWT signing key loads ({signer.alg}, kid={signer.kid})")
|
|
1212
|
+
report.ok.append("JWKS export contains public key material only")
|
|
1213
|
+
|
|
1214
|
+
return report
|
|
1215
|
+
|
|
1216
|
+
|
|
1217
|
+
def _check_postgrest(s: Settings) -> _DoctorReport:
|
|
1218
|
+
report = _DoctorReport()
|
|
1219
|
+
postgrest_result = _check_postgrest_accepts_token(s.postgrest_url)
|
|
1220
|
+
if postgrest_result == "unreachable":
|
|
1221
|
+
report.warnings.append(f"PostgREST not reachable at {s.postgrest_url}; skipped token check")
|
|
1222
|
+
elif postgrest_result:
|
|
1223
|
+
report.failures.append(postgrest_result)
|
|
1224
|
+
else:
|
|
1225
|
+
report.ok.append("PostgREST accepted a freshly-issued token")
|
|
1226
|
+
return report
|
|
1227
|
+
|
|
1228
|
+
|
|
1229
|
+
def _check_symmetric_secrets(s: Settings) -> _DoctorReport:
|
|
1230
|
+
report = _DoctorReport()
|
|
1231
|
+
for name, legacy_attr in (
|
|
1232
|
+
("storage_signed_url", "storage_signed_url_secret"),
|
|
1233
|
+
("oauth_state", "oauth_state_secret"),
|
|
1234
|
+
):
|
|
1235
|
+
manifest = secretset.load_manifest()
|
|
1236
|
+
section = manifest.get(name) if manifest else None
|
|
1237
|
+
legacy = getattr(s, legacy_attr)
|
|
1238
|
+
|
|
1239
|
+
if section is not None:
|
|
1240
|
+
active_kid = section.get("active")
|
|
1241
|
+
keys = {k["kid"]: k for k in section.get("keys", [])}
|
|
1242
|
+
|
|
1243
|
+
if active_kid is None:
|
|
1244
|
+
report.failures.append(f"{name}: manifest exists but has no active kid")
|
|
1245
|
+
continue
|
|
1246
|
+
|
|
1247
|
+
if active_kid not in keys:
|
|
1248
|
+
report.failures.append(f"{name}: active kid {active_kid!r} not found in keys list")
|
|
1249
|
+
continue
|
|
1250
|
+
|
|
1251
|
+
secret_path = secretset.secrets_dir() / f"{name}.{active_kid}.secret"
|
|
1252
|
+
if not secret_path.exists():
|
|
1253
|
+
report.failures.append(f"{name}: active secret file missing: {secret_path}")
|
|
1254
|
+
continue
|
|
1255
|
+
|
|
1256
|
+
value = secret_path.read_text().strip()
|
|
1257
|
+
if len(value) < 32:
|
|
1258
|
+
report.failures.append(f"{name}: active secret value is shorter than 32 characters")
|
|
1259
|
+
continue
|
|
1260
|
+
|
|
1261
|
+
if os.name == "posix":
|
|
1262
|
+
mode = secret_path.stat().st_mode & 0o777
|
|
1263
|
+
if mode & 0o077:
|
|
1264
|
+
report.warnings.append(
|
|
1265
|
+
f"{name}: secret file should not be group/world-readable: "
|
|
1266
|
+
f"{secret_path} mode {mode:03o}"
|
|
1267
|
+
)
|
|
1268
|
+
|
|
1269
|
+
report.ok.append(f"{name}: active kid={active_kid} secret file readable")
|
|
1270
|
+
else:
|
|
1271
|
+
if not legacy or len(legacy) < 32:
|
|
1272
|
+
report.failures.append(
|
|
1273
|
+
f"{name}: no manifest and no valid legacy env var "
|
|
1274
|
+
f"({legacy_attr}); run `supython secret rotate {name.replace('_signed_url', '')}` "
|
|
1275
|
+
f"or set {legacy_attr.upper()}"
|
|
1276
|
+
)
|
|
1277
|
+
else:
|
|
1278
|
+
report.ok.append(f"{name}: using legacy env var ({legacy_attr})")
|
|
1279
|
+
|
|
1280
|
+
if s.secret_rotation_grace_seconds < s.storage_signed_url_default_ttl:
|
|
1281
|
+
report.warnings.append(
|
|
1282
|
+
f"SECRET_ROTATION_GRACE_SECONDS ({s.secret_rotation_grace_seconds}) "
|
|
1283
|
+
f"is shorter than STORAGE_SIGNED_URL_DEFAULT_TTL ({s.storage_signed_url_default_ttl}); "
|
|
1284
|
+
f"signed URLs may expire before rotation grace window"
|
|
1285
|
+
)
|
|
1286
|
+
|
|
1287
|
+
return report
|
|
1288
|
+
|
|
1289
|
+
|
|
1290
|
+
@functions_cli.command("list")
|
|
1291
|
+
def functions_list() -> None:
|
|
1292
|
+
"""List all discovered edge functions with their methods and auth mode."""
|
|
1293
|
+
from .functions.loader import FunctionRegistry
|
|
1294
|
+
|
|
1295
|
+
s = get_settings()
|
|
1296
|
+
reg = FunctionRegistry(Path(s.functions_dir), hot_reload=False)
|
|
1297
|
+
reg.discover()
|
|
1298
|
+
metas = reg.list()
|
|
1299
|
+
if not metas:
|
|
1300
|
+
typer.echo("no functions found in " + s.functions_dir)
|
|
1301
|
+
return
|
|
1302
|
+
for meta in metas:
|
|
1303
|
+
typer.echo(f" {meta.name:<30} methods={','.join(meta.methods)} auth={meta.auth}")
|
|
1304
|
+
|
|
1305
|
+
|
|
1306
|
+
@app.command()
|
|
1307
|
+
def dev(
|
|
1308
|
+
host: str = typer.Option("127.0.0.1", help="Bind host."),
|
|
1309
|
+
port: int = typer.Option(8000, help="Bind port."),
|
|
1310
|
+
reload: bool = typer.Option(True, help="Reload on code change."),
|
|
1311
|
+
slow_callback_warn_ms: int = typer.Option(
|
|
1312
|
+
100,
|
|
1313
|
+
"--slow-callback-warn-ms",
|
|
1314
|
+
help=(
|
|
1315
|
+
"Warn when an event-loop task blocks for longer than this many ms. "
|
|
1316
|
+
"Catches sync I/O (time.sleep, blocking requests) inside async "
|
|
1317
|
+
"functions. Set 0 to disable. Dev only — adds asyncio debug overhead."
|
|
1318
|
+
),
|
|
1319
|
+
),
|
|
1320
|
+
) -> None:
|
|
1321
|
+
"""Run the supython API service for local development."""
|
|
1322
|
+
s = get_settings()
|
|
1323
|
+
configure_logging(s.log_level, json_format=s.log_json)
|
|
1324
|
+
if slow_callback_warn_ms > 0:
|
|
1325
|
+
os.environ["SUPYTHON_SLOW_CALLBACK_MS"] = str(slow_callback_warn_ms)
|
|
1326
|
+
uvicorn.run("supython.app:app", host=host, port=port, reload=reload)
|
|
1327
|
+
|
|
1328
|
+
|
|
1329
|
+
@app.command()
|
|
1330
|
+
def migrate() -> None:
|
|
1331
|
+
"""Apply pending framework SQL migrations bundled with the supython package."""
|
|
1332
|
+
configure_logging("INFO", json_format=False)
|
|
1333
|
+
applied = migrate_mod.run_sync()
|
|
1334
|
+
if not applied:
|
|
1335
|
+
typer.echo("nothing to apply — database is up to date.")
|
|
1336
|
+
return
|
|
1337
|
+
for name in applied:
|
|
1338
|
+
typer.echo(f"applied {name}")
|
|
1339
|
+
|
|
1340
|
+
|
|
1341
|
+
def _resolve_compose_file(prod: bool = False) -> str | None:
|
|
1342
|
+
if prod:
|
|
1343
|
+
return "docker-compose.prod.yml"
|
|
1344
|
+
if Path("docker-compose.yml").exists():
|
|
1345
|
+
return None
|
|
1346
|
+
if Path("docker-compose.prod.yml").exists():
|
|
1347
|
+
return "docker-compose.prod.yml"
|
|
1348
|
+
return None
|
|
1349
|
+
|
|
1350
|
+
|
|
1351
|
+
@app.command()
|
|
1352
|
+
def up(
|
|
1353
|
+
timeout: int = typer.Option(30, help="Seconds to wait for Postgres."),
|
|
1354
|
+
prod: bool = typer.Option(False, "--prod", help="Use docker-compose.prod.yml."),
|
|
1355
|
+
worker: bool = typer.Option(False, "--worker", help="Also start the worker (prod only)."),
|
|
1356
|
+
) -> None:
|
|
1357
|
+
"""Start Postgres + PostgREST and apply migrations.
|
|
1358
|
+
|
|
1359
|
+
Brings the DB up first, runs migrations (which create the roles
|
|
1360
|
+
PostgREST needs), rotates the authenticator password, then starts PostgREST.
|
|
1361
|
+
"""
|
|
1362
|
+
compose_file = _resolve_compose_file(prod)
|
|
1363
|
+
is_prod = compose_file == "docker-compose.prod.yml"
|
|
1364
|
+
configure_logging("INFO", json_format=False)
|
|
1365
|
+
_compose_with(compose_file, "up", "-d", "db")
|
|
1366
|
+
typer.echo("waiting for Postgres ...")
|
|
1367
|
+
_wait_for_db(timeout)
|
|
1368
|
+
typer.echo("applying migrations ...")
|
|
1369
|
+
applied = migrate_mod.run_sync()
|
|
1370
|
+
for name in applied:
|
|
1371
|
+
typer.echo(f" applied {name}")
|
|
1372
|
+
s = get_settings()
|
|
1373
|
+
typer.echo("rotating authenticator password ...")
|
|
1374
|
+
asyncio.run(_rotate_authenticator_password(s.database_url, s.authenticator_password))
|
|
1375
|
+
_ensure_jwks_for_postgrest(s)
|
|
1376
|
+
if is_prod:
|
|
1377
|
+
_compose_with(compose_file, "up", "-d", "postgrest", "supython")
|
|
1378
|
+
if worker:
|
|
1379
|
+
_compose_with(compose_file, "--profile", "worker", "up", "-d", "worker")
|
|
1380
|
+
typer.echo("")
|
|
1381
|
+
typer.echo("ready.")
|
|
1382
|
+
typer.echo(" postgres localhost:54322 (bind 127.0.0.1 only)")
|
|
1383
|
+
typer.echo(" postgrest (internal only — routed via Caddy when --profile tls)")
|
|
1384
|
+
typer.echo(" supython http://localhost:8000")
|
|
1385
|
+
if worker:
|
|
1386
|
+
typer.echo(" worker running (profile=worker)")
|
|
1387
|
+
else:
|
|
1388
|
+
_compose_with(compose_file, "up", "-d", "postgrest")
|
|
1389
|
+
typer.echo("")
|
|
1390
|
+
typer.echo("ready.")
|
|
1391
|
+
typer.echo(" postgres localhost:54322 (user/db: supython)")
|
|
1392
|
+
typer.echo(f" postgrest {s.postgrest_url}")
|
|
1393
|
+
typer.echo(" next supython dev # start the auth/API service")
|
|
1394
|
+
|
|
1395
|
+
|
|
1396
|
+
@app.command()
|
|
1397
|
+
def down(
|
|
1398
|
+
prod: bool = typer.Option(False, "--prod", help="Use docker-compose.prod.yml."),
|
|
1399
|
+
) -> None:
|
|
1400
|
+
"""Stop the docker-compose stack (keeps data)."""
|
|
1401
|
+
_compose_with(_resolve_compose_file(prod), "down")
|
|
1402
|
+
|
|
1403
|
+
|
|
1404
|
+
@app.command()
|
|
1405
|
+
def reset(
|
|
1406
|
+
prod: bool = typer.Option(False, "--prod", help="Use docker-compose.prod.yml."),
|
|
1407
|
+
) -> None:
|
|
1408
|
+
"""Stop the stack AND drop all data. Destructive."""
|
|
1409
|
+
confirm = typer.confirm("This will delete all Postgres data. Continue?")
|
|
1410
|
+
if not confirm:
|
|
1411
|
+
raise typer.Abort()
|
|
1412
|
+
_compose_with(_resolve_compose_file(prod), "down", "-v")
|
|
1413
|
+
|
|
1414
|
+
|
|
1415
|
+
def _bootstrap_test_db(timeout: int) -> None:
|
|
1416
|
+
"""Bring the test DB up, wait for healthcheck, then apply migrations.
|
|
1417
|
+
|
|
1418
|
+
Idempotent: compose `up -d --wait` is a no-op when the container is
|
|
1419
|
+
already healthy, and the migration runner skips already-applied files.
|
|
1420
|
+
The named volume in `docker-compose.test.yml` makes the second call
|
|
1421
|
+
cheap — schema survives between pytest sessions.
|
|
1422
|
+
"""
|
|
1423
|
+
_compose_with(_TEST_COMPOSE_FILE, "up", "-d", "--wait", "db")
|
|
1424
|
+
|
|
1425
|
+
prev = os.environ.get("DATABASE_URL")
|
|
1426
|
+
os.environ["DATABASE_URL"] = _TEST_DATABASE_URL
|
|
1427
|
+
get_settings.cache_clear()
|
|
1428
|
+
try:
|
|
1429
|
+
applied = migrate_mod.run_sync()
|
|
1430
|
+
for name in applied:
|
|
1431
|
+
typer.echo(f" applied {name}")
|
|
1432
|
+
if not applied:
|
|
1433
|
+
typer.echo(" (no pending migrations)")
|
|
1434
|
+
s = get_settings()
|
|
1435
|
+
asyncio.run(_rotate_authenticator_password(s.database_url, s.authenticator_password))
|
|
1436
|
+
finally:
|
|
1437
|
+
if prev is None:
|
|
1438
|
+
os.environ.pop("DATABASE_URL", None)
|
|
1439
|
+
else:
|
|
1440
|
+
os.environ["DATABASE_URL"] = prev
|
|
1441
|
+
get_settings.cache_clear()
|
|
1442
|
+
_ = timeout # `--wait` already enforces the healthcheck retries.
|
|
1443
|
+
|
|
1444
|
+
|
|
1445
|
+
@test_cli.command("up")
|
|
1446
|
+
def test_up(
|
|
1447
|
+
timeout: int = typer.Option(60, help="Seconds to wait for Postgres health."),
|
|
1448
|
+
) -> None:
|
|
1449
|
+
"""Start the integration-test Postgres and apply pending migrations.
|
|
1450
|
+
|
|
1451
|
+
The test stack runs on host port 54323 (vs 54322 for `supython up`) so
|
|
1452
|
+
it cannot collide with the dev DB. Data lives in a named volume, so
|
|
1453
|
+
the schema persists between pytest sessions; use `supython test reset`
|
|
1454
|
+
to throw it away.
|
|
1455
|
+
"""
|
|
1456
|
+
configure_logging("INFO", json_format=False)
|
|
1457
|
+
_bootstrap_test_db(timeout)
|
|
1458
|
+
typer.echo("")
|
|
1459
|
+
typer.echo("ready.")
|
|
1460
|
+
typer.echo(" postgres localhost:54323 (user/db: supython)")
|
|
1461
|
+
typer.echo(" next supython test run # run pytest")
|
|
1462
|
+
|
|
1463
|
+
|
|
1464
|
+
@test_cli.command("down")
|
|
1465
|
+
def test_down() -> None:
|
|
1466
|
+
"""Stop the test DB container. Keeps the volume (and migrations)."""
|
|
1467
|
+
_compose_with(_TEST_COMPOSE_FILE, "down")
|
|
1468
|
+
|
|
1469
|
+
|
|
1470
|
+
@test_cli.command("reset")
|
|
1471
|
+
def test_reset() -> None:
|
|
1472
|
+
"""Stop the test DB container AND delete its data volume. Destructive."""
|
|
1473
|
+
confirm = typer.confirm("This will delete the test DB volume. Continue?")
|
|
1474
|
+
if not confirm:
|
|
1475
|
+
raise typer.Abort()
|
|
1476
|
+
_compose_with(_TEST_COMPOSE_FILE, "down", "-v")
|
|
1477
|
+
|
|
1478
|
+
|
|
1479
|
+
@test_cli.command(
|
|
1480
|
+
"run",
|
|
1481
|
+
context_settings={"allow_extra_args": True, "ignore_unknown_options": True},
|
|
1482
|
+
)
|
|
1483
|
+
def test_run(ctx: typer.Context) -> None:
|
|
1484
|
+
"""Run pytest against the test DB. Extra args are forwarded to pytest.
|
|
1485
|
+
|
|
1486
|
+
Ensures the test stack is up + migrated, then exec's pytest with
|
|
1487
|
+
DATABASE_URL pointing at the test container. Examples:
|
|
1488
|
+
|
|
1489
|
+
supython test run # full suite
|
|
1490
|
+
supython test run tests/unit # unit-only (fast, no fixtures)
|
|
1491
|
+
supython test run -k auth_signup # forward -k to pytest
|
|
1492
|
+
"""
|
|
1493
|
+
configure_logging("INFO", json_format=False)
|
|
1494
|
+
_bootstrap_test_db(timeout=60)
|
|
1495
|
+
|
|
1496
|
+
env = os.environ.copy()
|
|
1497
|
+
env["DATABASE_URL"] = _TEST_DATABASE_URL
|
|
1498
|
+
pytest_args = list(ctx.args) if ctx.args else ["tests"]
|
|
1499
|
+
cmd = [sys.executable, "-m", "pytest", *pytest_args]
|
|
1500
|
+
try:
|
|
1501
|
+
result = subprocess.run(cmd, env=env, check=False)
|
|
1502
|
+
except FileNotFoundError as exc:
|
|
1503
|
+
typer.echo(f"error: pytest not found ({exc})", err=True)
|
|
1504
|
+
raise typer.Exit(code=1) from exc
|
|
1505
|
+
raise typer.Exit(code=result.returncode)
|
|
1506
|
+
|
|
1507
|
+
|
|
1508
|
+
@app.command()
|
|
1509
|
+
def info() -> None:
|
|
1510
|
+
"""Print resolved settings."""
|
|
1511
|
+
s = get_settings()
|
|
1512
|
+
typer.echo(f"database_url {s.database_url}")
|
|
1513
|
+
typer.echo(f"postgrest_url {s.postgrest_url}")
|
|
1514
|
+
typer.echo(f"jwt_alg {s.jwt_alg}")
|
|
1515
|
+
typer.echo(f"jwt_aud {s.jwt_aud}")
|
|
1516
|
+
typer.echo(f"access_token_ttl {s.access_token_ttl}s")
|
|
1517
|
+
typer.echo(f"refresh_token_ttl {s.refresh_token_ttl}s")
|
|
1518
|
+
typer.echo(f"cors_origins {s.cors_origins or '(none)'}")
|
|
1519
|
+
|
|
1520
|
+
|
|
1521
|
+
def _asymmetric_alg(alg: str) -> Literal["RS256", "ES256"]:
|
|
1522
|
+
normalized = alg.upper()
|
|
1523
|
+
if normalized not in _ASYMMETRIC_ALGS:
|
|
1524
|
+
raise ValueError(f"JWT algorithm must be RS256 or ES256, got {alg!r}")
|
|
1525
|
+
return normalized # type: ignore[return-value]
|
|
1526
|
+
|
|
1527
|
+
|
|
1528
|
+
def _assert_public_jwks(jwks_doc: dict) -> None: # type: ignore[type-arg]
|
|
1529
|
+
for key in jwks_doc.get("keys", []):
|
|
1530
|
+
leaked = sorted(_PRIVATE_JWK_MEMBERS.intersection(key))
|
|
1531
|
+
if leaked:
|
|
1532
|
+
raise ValueError(f"JWKS contains private/symmetric members: {', '.join(leaked)}")
|
|
1533
|
+
|
|
1534
|
+
|
|
1535
|
+
def _check_postgrest_accepts_token(postgrest_url: str) -> str | None:
|
|
1536
|
+
try:
|
|
1537
|
+
token, _ = tokens.issue_access_token(uuid.uuid4(), "doctor@supython.local")
|
|
1538
|
+
with httpx.Client(base_url=postgrest_url, timeout=2.0) as client:
|
|
1539
|
+
response = client.get("/", headers={"authorization": f"Bearer {token}"})
|
|
1540
|
+
except httpx.HTTPError:
|
|
1541
|
+
return "unreachable"
|
|
1542
|
+
except Exception as exc:
|
|
1543
|
+
return f"PostgREST token check failed: {exc}"
|
|
1544
|
+
|
|
1545
|
+
if response.status_code in {200, 300, 304}:
|
|
1546
|
+
return None
|
|
1547
|
+
if response.status_code >= 500:
|
|
1548
|
+
return "unreachable"
|
|
1549
|
+
return f"PostgREST rejected a freshly-issued token: {response.status_code} {response.text}"
|
|
1550
|
+
|
|
1551
|
+
|
|
1552
|
+
def _generate_jwt_files(
|
|
1553
|
+
*,
|
|
1554
|
+
alg: Literal["RS256", "ES256"],
|
|
1555
|
+
out_private: Path,
|
|
1556
|
+
out_jwks: Path,
|
|
1557
|
+
kid: str | None,
|
|
1558
|
+
force: bool,
|
|
1559
|
+
) -> jwks.SigningKey:
|
|
1560
|
+
if out_private == out_jwks:
|
|
1561
|
+
raise ValueError("--out-private and --out-jwks must be different paths")
|
|
1562
|
+
if not force:
|
|
1563
|
+
for path in (out_private, out_jwks):
|
|
1564
|
+
if path.exists():
|
|
1565
|
+
raise FileExistsError(f"{path} exists; pass --force to overwrite")
|
|
1566
|
+
|
|
1567
|
+
key = jwks.generate_private_key(alg)
|
|
1568
|
+
signer = jwks.signing_key_from_private_key(key, alg, kid)
|
|
1569
|
+
jwks.write_private_key_pem(out_private, jwks.private_key_to_pem(key), force=force)
|
|
1570
|
+
jwks.write_jwks_file(out_jwks, jwks.jwks_for_signing_key(signer))
|
|
1571
|
+
return signer
|
|
1572
|
+
|
|
1573
|
+
|
|
1574
|
+
def _ensure_jwks_for_postgrest(s: Settings) -> None:
|
|
1575
|
+
if s.jwt_alg in _ASYMMETRIC_ALGS and (
|
|
1576
|
+
s.jwt_private_key is not None or s.jwt_private_key_path is not None
|
|
1577
|
+
):
|
|
1578
|
+
jwks_doc = jwks.write_current_jwks(s.jwt_jwks_path)
|
|
1579
|
+
kids = ", ".join(key["kid"] for key in jwks_doc["keys"])
|
|
1580
|
+
typer.echo(f"wrote JWKS for PostgREST: {s.jwt_jwks_path} (kids={kids})")
|
|
1581
|
+
return
|
|
1582
|
+
|
|
1583
|
+
if s.jwt_jwks_path.exists():
|
|
1584
|
+
typer.echo(f"using existing JWKS for PostgREST: {s.jwt_jwks_path}")
|
|
1585
|
+
return
|
|
1586
|
+
|
|
1587
|
+
out_private = Path("./.supython/jwt_private.pem")
|
|
1588
|
+
signer = _generate_jwt_files(
|
|
1589
|
+
alg="RS256",
|
|
1590
|
+
out_private=out_private,
|
|
1591
|
+
out_jwks=s.jwt_jwks_path,
|
|
1592
|
+
kid=None,
|
|
1593
|
+
force=False,
|
|
1594
|
+
)
|
|
1595
|
+
typer.echo(f"generated RS256 keypair for PostgREST: {s.jwt_jwks_path}")
|
|
1596
|
+
typer.echo("add these env vars before running `supython dev`:")
|
|
1597
|
+
typer.echo(" JWT_ALG=RS256")
|
|
1598
|
+
typer.echo(f" JWT_PRIVATE_KEY_PATH={out_private}")
|
|
1599
|
+
typer.echo(f" JWT_KID={signer.kid}")
|
|
1600
|
+
typer.echo(f" JWT_JWKS_PATH={s.jwt_jwks_path}")
|
|
1601
|
+
|
|
1602
|
+
|
|
1603
|
+
_TEST_COMPOSE_FILE = "docker-compose.test.yml"
|
|
1604
|
+
_TEST_DATABASE_URL = "postgresql://supython:supython@localhost:54323/supython"
|
|
1605
|
+
|
|
1606
|
+
|
|
1607
|
+
def _compose(*args: str) -> None:
|
|
1608
|
+
_compose_with(None, *args)
|
|
1609
|
+
|
|
1610
|
+
|
|
1611
|
+
def _compose_with(file: str | None, *args: str) -> None:
|
|
1612
|
+
"""Run `docker compose [-f <file>] <args...>`, surfacing common errors."""
|
|
1613
|
+
cmd = ["docker", "compose"]
|
|
1614
|
+
if file:
|
|
1615
|
+
cmd += ["-f", file]
|
|
1616
|
+
cmd += list(args)
|
|
1617
|
+
try:
|
|
1618
|
+
subprocess.run(cmd, check=True)
|
|
1619
|
+
except FileNotFoundError as exc:
|
|
1620
|
+
typer.echo(f"error: docker not found ({exc})", err=True)
|
|
1621
|
+
raise typer.Exit(code=1) from exc
|
|
1622
|
+
except subprocess.CalledProcessError as exc:
|
|
1623
|
+
raise typer.Exit(code=exc.returncode) from exc
|
|
1624
|
+
|
|
1625
|
+
|
|
1626
|
+
def _wait_for_db(timeout_s: int) -> None:
|
|
1627
|
+
s = get_settings()
|
|
1628
|
+
deadline = time.time() + timeout_s
|
|
1629
|
+
last_err: Exception | None = None
|
|
1630
|
+
while time.time() < deadline:
|
|
1631
|
+
try:
|
|
1632
|
+
asyncio.run(_ping(s.database_url))
|
|
1633
|
+
return
|
|
1634
|
+
except Exception as exc:
|
|
1635
|
+
last_err = exc
|
|
1636
|
+
time.sleep(1)
|
|
1637
|
+
typer.echo(f"timed out waiting for Postgres: {last_err}", err=True)
|
|
1638
|
+
raise typer.Exit(code=1)
|
|
1639
|
+
|
|
1640
|
+
|
|
1641
|
+
async def _ping(url: str) -> None:
|
|
1642
|
+
conn = await asyncpg.connect(url)
|
|
1643
|
+
try:
|
|
1644
|
+
await conn.fetchval("select 1")
|
|
1645
|
+
finally:
|
|
1646
|
+
await conn.close()
|
|
1647
|
+
|
|
1648
|
+
|
|
1649
|
+
async def _rotate_authenticator_password(db_url: str, password: str) -> None:
|
|
1650
|
+
"""Idempotent: set the authenticator role password to the configured value."""
|
|
1651
|
+
conn = await asyncpg.connect(db_url)
|
|
1652
|
+
try:
|
|
1653
|
+
await rotate_role_password(conn, "authenticator", password)
|
|
1654
|
+
finally:
|
|
1655
|
+
await conn.close()
|
|
1656
|
+
|
|
1657
|
+
|
|
1658
|
+
def main() -> None:
|
|
1659
|
+
sys.exit(app() or 0)
|
|
1660
|
+
|
|
1661
|
+
|
|
1662
|
+
if __name__ == "__main__":
|
|
1663
|
+
main()
|