supython 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- supython/__init__.py +24 -0
- supython/admin/__init__.py +3 -0
- supython/admin/api/__init__.py +24 -0
- supython/admin/api/auth.py +118 -0
- supython/admin/api/auth_templates.py +67 -0
- supython/admin/api/auth_users.py +225 -0
- supython/admin/api/db.py +174 -0
- supython/admin/api/functions.py +92 -0
- supython/admin/api/jobs.py +192 -0
- supython/admin/api/ops.py +224 -0
- supython/admin/api/realtime.py +281 -0
- supython/admin/api/service_auth.py +49 -0
- supython/admin/api/service_auth_templates.py +83 -0
- supython/admin/api/service_auth_users.py +346 -0
- supython/admin/api/service_db.py +214 -0
- supython/admin/api/service_functions.py +287 -0
- supython/admin/api/service_jobs.py +282 -0
- supython/admin/api/service_ops.py +213 -0
- supython/admin/api/service_realtime.py +30 -0
- supython/admin/api/service_storage.py +220 -0
- supython/admin/api/storage.py +117 -0
- supython/admin/api/system.py +37 -0
- supython/admin/audit.py +29 -0
- supython/admin/deps.py +22 -0
- supython/admin/errors.py +16 -0
- supython/admin/schemas.py +310 -0
- supython/admin/session.py +52 -0
- supython/admin/spa.py +38 -0
- supython/admin/static/assets/Alert-dluGVkos.js +49 -0
- supython/admin/static/assets/Audit-Njung3HI.js +2 -0
- supython/admin/static/assets/Backups-DzPlFgrm.js +2 -0
- supython/admin/static/assets/Buckets-ByacGkU1.js +2 -0
- supython/admin/static/assets/Channels-BoIuTtam.js +353 -0
- supython/admin/static/assets/ChevronRight-CtQH1EQ1.js +2 -0
- supython/admin/static/assets/CodeViewer-Bqy7-wvH.js +2 -0
- supython/admin/static/assets/Crons-B67vc39F.js +2 -0
- supython/admin/static/assets/DashboardView-CUTFVL6k.js +2 -0
- supython/admin/static/assets/DataTable-COAAWEft.js +747 -0
- supython/admin/static/assets/DescriptionsItem-P8JUDaBs.js +75 -0
- supython/admin/static/assets/DrawerContent-TpYTFgF1.js +139 -0
- supython/admin/static/assets/Empty-cr2r7e2u.js +25 -0
- supython/admin/static/assets/EmptyState-DeDck-OL.js +2 -0
- supython/admin/static/assets/Grid-hFkp9F4P.js +2 -0
- supython/admin/static/assets/Input-DppYTq9C.js +259 -0
- supython/admin/static/assets/Invoke-DW3Nveeh.js +2 -0
- supython/admin/static/assets/JsonField-DibyJgun.js +2 -0
- supython/admin/static/assets/LoginView-BjLyE3Ds.css +1 -0
- supython/admin/static/assets/LoginView-CoOjECT_.js +111 -0
- supython/admin/static/assets/Logs-D9WYrnIT.js +2 -0
- supython/admin/static/assets/Logs-DS1XPa0h.css +1 -0
- supython/admin/static/assets/Migrations-DOSC2ddQ.js +2 -0
- supython/admin/static/assets/ObjectBrowser-_5w8vOX8.js +2 -0
- supython/admin/static/assets/Queue-CywZs6vI.js +2 -0
- supython/admin/static/assets/RefreshTokens-Ccjr53jg.js +2 -0
- supython/admin/static/assets/RlsEditor-BSlH9vSc.js +2 -0
- supython/admin/static/assets/Routes-BiLXE49D.js +2 -0
- supython/admin/static/assets/Routes-C-ianIGD.css +1 -0
- supython/admin/static/assets/SchemaBrowser-DKy2_KQi.css +1 -0
- supython/admin/static/assets/SchemaBrowser-XFvFbtDB.js +2 -0
- supython/admin/static/assets/Select-DIzZyRZb.js +434 -0
- supython/admin/static/assets/Space-n5-XcguU.js +400 -0
- supython/admin/static/assets/SqlEditor-b8pTsILY.js +3 -0
- supython/admin/static/assets/SqlWorkspace-BUS7IntH.js +104 -0
- supython/admin/static/assets/TableData-CQIagLKn.js +2 -0
- supython/admin/static/assets/Tag-D1fOKpTH.js +72 -0
- supython/admin/static/assets/Templates-BS-ugkdq.js +2 -0
- supython/admin/static/assets/Thing-CEAniuMg.js +107 -0
- supython/admin/static/assets/Users-wzwajhlh.js +2 -0
- supython/admin/static/assets/_plugin-vue_export-helper-DGA9ry_j.js +1 -0
- supython/admin/static/assets/dist-VXIJLCYq.js +13 -0
- supython/admin/static/assets/format-length-CGCY1rMh.js +2 -0
- supython/admin/static/assets/get-Ca6unauB.js +2 -0
- supython/admin/static/assets/index-CeE6v959.js +951 -0
- supython/admin/static/assets/pinia-COXwfrOX.js +2 -0
- supython/admin/static/assets/resources-Bt6thQCD.js +44 -0
- supython/admin/static/assets/use-locale-mtgM0a3a.js +2 -0
- supython/admin/static/assets/use-merged-state-BvhkaHNX.js +2 -0
- supython/admin/static/assets/useConfirm-tMjvBFXR.js +2 -0
- supython/admin/static/assets/useResource-C_rJCY8C.js +2 -0
- supython/admin/static/assets/useTable-CnZc5zhi.js +363 -0
- supython/admin/static/assets/useTable-Dg0XlRlq.css +1 -0
- supython/admin/static/assets/useToast-DsZKx0IX.js +2 -0
- supython/admin/static/assets/utils-sbXoq7Ir.js +2 -0
- supython/admin/static/favicon.svg +1 -0
- supython/admin/static/icons.svg +24 -0
- supython/admin/static/index.html +24 -0
- supython/app.py +162 -0
- supython/auth/__init__.py +3 -0
- supython/auth/_email_job.py +11 -0
- supython/auth/providers/__init__.py +34 -0
- supython/auth/providers/github.py +22 -0
- supython/auth/providers/google.py +19 -0
- supython/auth/providers/oauth.py +56 -0
- supython/auth/providers/registry.py +16 -0
- supython/auth/ratelimit.py +39 -0
- supython/auth/router.py +282 -0
- supython/auth/schemas.py +79 -0
- supython/auth/service.py +587 -0
- supython/backups/__init__.py +24 -0
- supython/backups/_backup_job.py +170 -0
- supython/backups/schemas.py +18 -0
- supython/backups/service.py +217 -0
- supython/body_size.py +184 -0
- supython/cli.py +1663 -0
- supython/client/__init__.py +67 -0
- supython/client/_auth.py +249 -0
- supython/client/_client.py +145 -0
- supython/client/_config.py +92 -0
- supython/client/_functions.py +69 -0
- supython/client/_storage.py +255 -0
- supython/client/py.typed +0 -0
- supython/db.py +151 -0
- supython/db_admin.py +8 -0
- supython/extensions.py +36 -0
- supython/functions/__init__.py +19 -0
- supython/functions/context.py +262 -0
- supython/functions/loader.py +307 -0
- supython/functions/router.py +228 -0
- supython/functions/schemas.py +50 -0
- supython/gen/__init__.py +5 -0
- supython/gen/_introspect.py +137 -0
- supython/gen/types_py.py +270 -0
- supython/gen/types_ts.py +365 -0
- supython/health.py +229 -0
- supython/hooks.py +117 -0
- supython/jobs/__init__.py +31 -0
- supython/jobs/backends.py +97 -0
- supython/jobs/context.py +58 -0
- supython/jobs/cron.py +152 -0
- supython/jobs/cron_inproc.py +119 -0
- supython/jobs/decorators.py +76 -0
- supython/jobs/registry.py +79 -0
- supython/jobs/router.py +136 -0
- supython/jobs/schemas.py +92 -0
- supython/jobs/service.py +311 -0
- supython/jobs/worker.py +219 -0
- supython/jwks.py +257 -0
- supython/keyset.py +279 -0
- supython/logging_config.py +291 -0
- supython/mail.py +33 -0
- supython/mailer.py +65 -0
- supython/migrate.py +81 -0
- supython/migrations/0001_extensions_and_roles.sql +46 -0
- supython/migrations/0002_auth_schema.sql +66 -0
- supython/migrations/0003_demo_todos.sql +42 -0
- supython/migrations/0004_auth_v0_2.sql +47 -0
- supython/migrations/0005_storage_schema.sql +117 -0
- supython/migrations/0006_realtime_schema.sql +206 -0
- supython/migrations/0007_jobs_schema.sql +254 -0
- supython/migrations/0008_jobs_last_error.sql +56 -0
- supython/migrations/0009_auth_rate_limits.sql +33 -0
- supython/migrations/0010_worker_heartbeat.sql +14 -0
- supython/migrations/0011_admin_schema.sql +45 -0
- supython/migrations/0012_auth_banned_until.sql +10 -0
- supython/migrations/0013_email_templates.sql +19 -0
- supython/migrations/0014_realtime_payload_warning.sql +96 -0
- supython/migrations/0015_backups_schema.sql +14 -0
- supython/passwords.py +15 -0
- supython/realtime/__init__.py +6 -0
- supython/realtime/broker.py +814 -0
- supython/realtime/protocol.py +234 -0
- supython/realtime/router.py +184 -0
- supython/realtime/schemas.py +207 -0
- supython/realtime/service.py +261 -0
- supython/realtime/topics.py +175 -0
- supython/realtime/websocket.py +586 -0
- supython/scaffold/__init__.py +5 -0
- supython/scaffold/init_project.py +144 -0
- supython/scaffold/templates/Caddyfile.tmpl +4 -0
- supython/scaffold/templates/README.md.tmpl +22 -0
- supython/scaffold/templates/apps_hooks.py.tmpl +11 -0
- supython/scaffold/templates/apps_jobs.py.tmpl +8 -0
- supython/scaffold/templates/asgi.py.tmpl +14 -0
- supython/scaffold/templates/docker-compose.prod.yml.tmpl +84 -0
- supython/scaffold/templates/docker-compose.yml.tmpl +45 -0
- supython/scaffold/templates/docker_postgres_Dockerfile.tmpl +9 -0
- supython/scaffold/templates/docker_postgres_postgresql.conf.tmpl +3 -0
- supython/scaffold/templates/env.example.tmpl +168 -0
- supython/scaffold/templates/functions_README.md.tmpl +21 -0
- supython/scaffold/templates/gitignore.tmpl +14 -0
- supython/scaffold/templates/manage.py.tmpl +11 -0
- supython/scaffold/templates/migrations/.gitkeep +0 -0
- supython/scaffold/templates/package_init.py.tmpl +1 -0
- supython/scaffold/templates/settings.py.tmpl +31 -0
- supython/secretset.py +347 -0
- supython/security_headers.py +78 -0
- supython/settings.py +244 -0
- supython/settings_module.py +117 -0
- supython/storage/__init__.py +5 -0
- supython/storage/backends.py +392 -0
- supython/storage/router.py +341 -0
- supython/storage/schemas.py +50 -0
- supython/storage/service.py +445 -0
- supython/storage/signing.py +119 -0
- supython/tokens.py +85 -0
- supython-0.1.0.dist-info/METADATA +756 -0
- supython-0.1.0.dist-info/RECORD +200 -0
- supython-0.1.0.dist-info/WHEEL +4 -0
- supython-0.1.0.dist-info/entry_points.txt +2 -0
- supython-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
"""Job handler for admin database backups.
|
|
2
|
+
|
|
3
|
+
Registered with the jobs framework so backups inherit retries, backoff,
|
|
4
|
+
visibility timeout, and observability instead of running as one-shot
|
|
5
|
+
asyncio tasks tied to the FastAPI process lifetime.
|
|
6
|
+
|
|
7
|
+
Two execution modes (selected via ``settings.backup_via``):
|
|
8
|
+
|
|
9
|
+
* ``host`` — invoke ``pg_dump`` from the worker's PATH and use ``-f file``.
|
|
10
|
+
Suitable for production where ``postgresql-client`` is bundled
|
|
11
|
+
in the worker image.
|
|
12
|
+
* ``docker`` — ``docker exec`` into the running postgres container and stream
|
|
13
|
+
pg_dump's stdout into a host-side file. Suitable for the
|
|
14
|
+
bundled docker-compose dev setup; no host install required and
|
|
15
|
+
the pg_dump version always matches the server.
|
|
16
|
+
|
|
17
|
+
Payload shape:
|
|
18
|
+
{"backup_id": "<uuid>", "kind": "full" | "schema-only", "file_path": "<abs path>"}
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from __future__ import annotations
|
|
22
|
+
|
|
23
|
+
import asyncio
|
|
24
|
+
import contextlib
|
|
25
|
+
import os
|
|
26
|
+
from uuid import UUID
|
|
27
|
+
|
|
28
|
+
from ..jobs.decorators import job
|
|
29
|
+
from ..settings import get_settings
|
|
30
|
+
from .service import _parse_db_url
|
|
31
|
+
|
|
32
|
+
JOB_NAME = "admin_backup_run"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _build_args(*, kind: str, file_path: str) -> tuple[list[str], dict[str, str], bool]:
|
|
36
|
+
"""Compose the argv, env, and "stream stdout to file?" flag.
|
|
37
|
+
|
|
38
|
+
Returns (args, env, stream_stdout). When ``stream_stdout`` is True the
|
|
39
|
+
caller must capture stdout into ``file_path`` itself (used for the
|
|
40
|
+
docker-exec path, where ``-f`` would write inside the container).
|
|
41
|
+
"""
|
|
42
|
+
settings = get_settings()
|
|
43
|
+
db_info = _parse_db_url(settings.database_url)
|
|
44
|
+
env = os.environ.copy()
|
|
45
|
+
|
|
46
|
+
if settings.backup_via == "docker":
|
|
47
|
+
container = settings.backup_docker_container
|
|
48
|
+
if not container:
|
|
49
|
+
raise RuntimeError(
|
|
50
|
+
"BACKUP_DOCKER_CONTAINER is required when BACKUP_VIA=docker. "
|
|
51
|
+
"Set it to the name of your postgres container (for scaffolded "
|
|
52
|
+
"projects this is typically '<project>-db', e.g. 'igo-db'), or "
|
|
53
|
+
"switch to BACKUP_VIA=host."
|
|
54
|
+
)
|
|
55
|
+
args = [
|
|
56
|
+
"docker",
|
|
57
|
+
"exec",
|
|
58
|
+
"-i",
|
|
59
|
+
"-e",
|
|
60
|
+
f"PGPASSWORD={db_info['password']}",
|
|
61
|
+
container,
|
|
62
|
+
"pg_dump",
|
|
63
|
+
"-U",
|
|
64
|
+
db_info["user"],
|
|
65
|
+
"-d",
|
|
66
|
+
db_info["dbname"],
|
|
67
|
+
"--no-owner",
|
|
68
|
+
"--no-acl",
|
|
69
|
+
]
|
|
70
|
+
if kind == "schema-only":
|
|
71
|
+
args.append("--schema-only")
|
|
72
|
+
return args, env, True
|
|
73
|
+
|
|
74
|
+
args = [
|
|
75
|
+
"pg_dump",
|
|
76
|
+
"-h",
|
|
77
|
+
db_info["host"],
|
|
78
|
+
"-p",
|
|
79
|
+
db_info["port"],
|
|
80
|
+
"-U",
|
|
81
|
+
db_info["user"],
|
|
82
|
+
"-d",
|
|
83
|
+
db_info["dbname"],
|
|
84
|
+
"--no-owner",
|
|
85
|
+
"--no-acl",
|
|
86
|
+
"-f",
|
|
87
|
+
file_path,
|
|
88
|
+
]
|
|
89
|
+
if kind == "schema-only":
|
|
90
|
+
args.append("--schema-only")
|
|
91
|
+
env["PGPASSWORD"] = db_info["password"]
|
|
92
|
+
return args, env, False
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@job(
|
|
96
|
+
JOB_NAME,
|
|
97
|
+
max_attempts=2,
|
|
98
|
+
backoff="exponential",
|
|
99
|
+
backoff_base_s=30.0,
|
|
100
|
+
backoff_max_s=300.0,
|
|
101
|
+
)
|
|
102
|
+
async def admin_backup_run(ctx, payload: dict) -> None:
|
|
103
|
+
backup_id = UUID(payload["backup_id"])
|
|
104
|
+
kind: str = payload["kind"]
|
|
105
|
+
file_path: str = payload["file_path"]
|
|
106
|
+
|
|
107
|
+
timeout_s = get_settings().backup_timeout_s
|
|
108
|
+
|
|
109
|
+
await ctx.db.execute(
|
|
110
|
+
"""
|
|
111
|
+
update admin.backups
|
|
112
|
+
set status = 'running', error_message = null
|
|
113
|
+
where id = $1
|
|
114
|
+
""",
|
|
115
|
+
backup_id,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
args, env, stream_stdout = _build_args(kind=kind, file_path=file_path)
|
|
120
|
+
|
|
121
|
+
with contextlib.ExitStack() as stack:
|
|
122
|
+
out_file = stack.enter_context(open(file_path, "wb")) if stream_stdout else None
|
|
123
|
+
proc = await asyncio.create_subprocess_exec(
|
|
124
|
+
*args,
|
|
125
|
+
env=env,
|
|
126
|
+
stdout=out_file if out_file is not None else asyncio.subprocess.PIPE,
|
|
127
|
+
stderr=asyncio.subprocess.PIPE,
|
|
128
|
+
)
|
|
129
|
+
try:
|
|
130
|
+
_stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=timeout_s)
|
|
131
|
+
except TimeoutError as exc:
|
|
132
|
+
proc.kill()
|
|
133
|
+
with contextlib.suppress(Exception):
|
|
134
|
+
await proc.wait()
|
|
135
|
+
raise RuntimeError(f"pg_dump timed out after {timeout_s}s") from exc
|
|
136
|
+
|
|
137
|
+
if proc.returncode != 0:
|
|
138
|
+
error_text = stderr.decode(errors="replace")[:2000] or "pg_dump failed"
|
|
139
|
+
raise RuntimeError(error_text)
|
|
140
|
+
|
|
141
|
+
file_size = os.path.getsize(file_path)
|
|
142
|
+
await ctx.db.execute(
|
|
143
|
+
"""
|
|
144
|
+
update admin.backups
|
|
145
|
+
set status = 'completed',
|
|
146
|
+
size = $2,
|
|
147
|
+
file_path = $3,
|
|
148
|
+
finished_at = now()
|
|
149
|
+
where id = $1
|
|
150
|
+
""",
|
|
151
|
+
backup_id,
|
|
152
|
+
file_size,
|
|
153
|
+
file_path,
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
except Exception as exc:
|
|
157
|
+
with contextlib.suppress(OSError):
|
|
158
|
+
os.unlink(file_path)
|
|
159
|
+
await ctx.db.execute(
|
|
160
|
+
"""
|
|
161
|
+
update admin.backups
|
|
162
|
+
set status = 'failed',
|
|
163
|
+
error_message = $2,
|
|
164
|
+
finished_at = now()
|
|
165
|
+
where id = $1
|
|
166
|
+
""",
|
|
167
|
+
backup_id,
|
|
168
|
+
str(exc)[:2000],
|
|
169
|
+
)
|
|
170
|
+
raise
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Pydantic v2 models for the backups module."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from uuid import UUID
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BackupRecord(BaseModel):
|
|
10
|
+
id: UUID
|
|
11
|
+
kind: str
|
|
12
|
+
status: str
|
|
13
|
+
size: int | None = None
|
|
14
|
+
file_path: str | None = None
|
|
15
|
+
error_message: str | None = None
|
|
16
|
+
started_at: datetime
|
|
17
|
+
finished_at: datetime | None = None
|
|
18
|
+
created_at: datetime
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
"""Framework-agnostic async service functions for the backups module.
|
|
2
|
+
|
|
3
|
+
All functions take an asyncpg.Connection and raise BackupError on failure.
|
|
4
|
+
No FastAPI imports here — this module is testable without HTTP.
|
|
5
|
+
|
|
6
|
+
Backups execute via the jobs framework (see ``_backup_job.py``); this
|
|
7
|
+
module is responsible for the admin.backups bookkeeping and for
|
|
8
|
+
enqueueing the work. The jobs worker is the durable executor.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import hashlib
|
|
14
|
+
import hmac
|
|
15
|
+
import logging
|
|
16
|
+
import time
|
|
17
|
+
from datetime import UTC, datetime
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from urllib.parse import urlparse
|
|
20
|
+
from uuid import UUID
|
|
21
|
+
|
|
22
|
+
import asyncpg
|
|
23
|
+
|
|
24
|
+
from ..settings import get_settings
|
|
25
|
+
from .schemas import BackupRecord
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
_VALID_KINDS = ("full", "schema-only")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class BackupError(Exception):
|
|
33
|
+
def __init__(self, code: str, message: str, status: int = 400) -> None:
|
|
34
|
+
super().__init__(message)
|
|
35
|
+
self.code = code
|
|
36
|
+
self.message = message
|
|
37
|
+
self.status = status
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _get_backups_dir() -> Path:
|
|
41
|
+
s = get_settings()
|
|
42
|
+
return Path(s.backups_dir).resolve()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _parse_db_url(db_url: str) -> dict[str, str]:
|
|
46
|
+
parsed = urlparse(db_url)
|
|
47
|
+
return {
|
|
48
|
+
"host": parsed.hostname or "localhost",
|
|
49
|
+
"port": str(parsed.port or 5432),
|
|
50
|
+
"user": parsed.username or "",
|
|
51
|
+
"password": parsed.password or "",
|
|
52
|
+
"dbname": (parsed.path or "/").lstrip("/") or "supython",
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _row_to_record(row: asyncpg.Record) -> BackupRecord:
|
|
57
|
+
return BackupRecord(
|
|
58
|
+
id=row["id"],
|
|
59
|
+
kind=row["kind"],
|
|
60
|
+
status=row["status"],
|
|
61
|
+
size=row.get("size"),
|
|
62
|
+
file_path=row.get("file_path"),
|
|
63
|
+
error_message=row.get("error_message"),
|
|
64
|
+
started_at=row["started_at"],
|
|
65
|
+
finished_at=row.get("finished_at"),
|
|
66
|
+
created_at=row["created_at"],
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
async def start_backup(conn: asyncpg.Connection, *, kind: str) -> BackupRecord:
|
|
71
|
+
"""Insert a backup row and enqueue a job to execute the dump.
|
|
72
|
+
|
|
73
|
+
Returns the initial record immediately. The jobs worker picks up the
|
|
74
|
+
queued job, runs pg_dump, and updates admin.backups status/size/file_path
|
|
75
|
+
when done. If a worker dies mid-job the visibility timeout reclaims it.
|
|
76
|
+
"""
|
|
77
|
+
if kind not in _VALID_KINDS:
|
|
78
|
+
raise BackupError(
|
|
79
|
+
"invalid_kind",
|
|
80
|
+
f"kind must be one of {_VALID_KINDS}",
|
|
81
|
+
422,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
row = await conn.fetchrow(
|
|
85
|
+
"""
|
|
86
|
+
insert into admin.backups (kind)
|
|
87
|
+
values ($1)
|
|
88
|
+
returning id, kind, status, size, file_path, error_message,
|
|
89
|
+
started_at, finished_at, created_at
|
|
90
|
+
""",
|
|
91
|
+
kind,
|
|
92
|
+
)
|
|
93
|
+
if row is None:
|
|
94
|
+
raise BackupError("insert_failed", "Failed to create backup row", 500)
|
|
95
|
+
record = _row_to_record(row)
|
|
96
|
+
|
|
97
|
+
backups_dir = _get_backups_dir()
|
|
98
|
+
backups_dir.mkdir(parents=True, exist_ok=True)
|
|
99
|
+
|
|
100
|
+
timestamp = datetime.now(UTC).strftime("%Y%m%dT%H%M%SZ")
|
|
101
|
+
suffix = ".ddl.sql" if kind == "schema-only" else ".sql"
|
|
102
|
+
filename = f"backup_{record.id}_{kind}_{timestamp}{suffix}"
|
|
103
|
+
file_path = str(backups_dir / filename)
|
|
104
|
+
|
|
105
|
+
# Avoid an import cycle: jobs.service is imported lazily because
|
|
106
|
+
# ``jobs/__init__`` re-exports the admin router which can transitively
|
|
107
|
+
# touch this module.
|
|
108
|
+
from ..jobs.service import enqueue
|
|
109
|
+
from ._backup_job import JOB_NAME
|
|
110
|
+
|
|
111
|
+
await enqueue(
|
|
112
|
+
conn,
|
|
113
|
+
name=JOB_NAME,
|
|
114
|
+
payload={
|
|
115
|
+
"backup_id": str(record.id),
|
|
116
|
+
"kind": kind,
|
|
117
|
+
"file_path": file_path,
|
|
118
|
+
},
|
|
119
|
+
idempotency_key=f"admin_backup:{record.id}",
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
return record
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
async def list_backups(
|
|
126
|
+
conn: asyncpg.Connection,
|
|
127
|
+
*,
|
|
128
|
+
limit: int = 50,
|
|
129
|
+
offset: int = 0,
|
|
130
|
+
) -> list[BackupRecord]:
|
|
131
|
+
rows = await conn.fetch(
|
|
132
|
+
"""
|
|
133
|
+
select id, kind, status, size, file_path, error_message,
|
|
134
|
+
started_at, finished_at, created_at
|
|
135
|
+
from admin.backups
|
|
136
|
+
order by created_at desc
|
|
137
|
+
limit $1 offset $2
|
|
138
|
+
""",
|
|
139
|
+
limit,
|
|
140
|
+
offset,
|
|
141
|
+
)
|
|
142
|
+
return [_row_to_record(r) for r in rows]
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
async def get_backup(conn: asyncpg.Connection, backup_id: UUID) -> BackupRecord | None:
|
|
146
|
+
row = await conn.fetchrow(
|
|
147
|
+
"""
|
|
148
|
+
select id, kind, status, size, file_path, error_message,
|
|
149
|
+
started_at, finished_at, created_at
|
|
150
|
+
from admin.backups
|
|
151
|
+
where id = $1
|
|
152
|
+
""",
|
|
153
|
+
backup_id,
|
|
154
|
+
)
|
|
155
|
+
return _row_to_record(row) if row else None
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
async def count_backups(conn: asyncpg.Connection) -> int:
|
|
159
|
+
val = await conn.fetchval("select count(*) from admin.backups")
|
|
160
|
+
return val or 0
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
_SIGNED_URL_TTL_S = 600 # 10 minutes
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def _signing_secret() -> str:
|
|
167
|
+
"""Derive a signing secret from the storage signed URL secret.
|
|
168
|
+
|
|
169
|
+
Falls back to a hard-coded dev value when the secret is not configured
|
|
170
|
+
(mirrors the storage module's dev-friendly posture).
|
|
171
|
+
"""
|
|
172
|
+
s = get_settings()
|
|
173
|
+
secret = s.storage_signed_url_secret
|
|
174
|
+
if secret is None:
|
|
175
|
+
logger.warning(
|
|
176
|
+
"backups: STORAGE_SIGNED_URL_SECRET not set; "
|
|
177
|
+
"download tokens are trivially forgeable in dev mode"
|
|
178
|
+
)
|
|
179
|
+
return "backups-dev-secret-not-for-production"
|
|
180
|
+
return f"backups:{secret}"
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def generate_download_token(backup_id: UUID) -> str:
|
|
184
|
+
"""Generate a time-limited HMAC token for downloading a backup file.
|
|
185
|
+
|
|
186
|
+
Returns an opaque string that embeds the backup_id, expiry, and HMAC
|
|
187
|
+
signature. Valid for _SIGNED_URL_TTL_S seconds.
|
|
188
|
+
"""
|
|
189
|
+
expires_at = int(time.time()) + _SIGNED_URL_TTL_S
|
|
190
|
+
payload = f"{backup_id}:{expires_at}"
|
|
191
|
+
secret = _signing_secret()
|
|
192
|
+
sig = hmac.new(secret.encode(), payload.encode(), hashlib.sha256).hexdigest()
|
|
193
|
+
token = f"{backup_id}:{expires_at}:{sig}"
|
|
194
|
+
return token
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def verify_download_token(token: str) -> UUID | None:
|
|
198
|
+
"""Verify a download token. Returns the backup_id if valid, None otherwise."""
|
|
199
|
+
parts = token.split(":")
|
|
200
|
+
if len(parts) != 3:
|
|
201
|
+
return None
|
|
202
|
+
backup_id_str, expires_str, sig = parts
|
|
203
|
+
try:
|
|
204
|
+
expires_at = int(expires_str)
|
|
205
|
+
except ValueError:
|
|
206
|
+
return None
|
|
207
|
+
if time.time() > expires_at:
|
|
208
|
+
return None
|
|
209
|
+
payload = f"{backup_id_str}:{expires_at}"
|
|
210
|
+
secret = _signing_secret()
|
|
211
|
+
expected = hmac.new(secret.encode(), payload.encode(), hashlib.sha256).hexdigest()
|
|
212
|
+
if not hmac.compare_digest(expected, sig):
|
|
213
|
+
return None
|
|
214
|
+
try:
|
|
215
|
+
return UUID(backup_id_str)
|
|
216
|
+
except ValueError:
|
|
217
|
+
return None
|
supython/body_size.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
"""Reject oversized request bodies before they reach the app.
|
|
2
|
+
|
|
3
|
+
The cap is the first line of defense for routes that accept JSON/form
|
|
4
|
+
payloads — auth, jobs control plane, realtime control, etc. Anything that
|
|
5
|
+
genuinely streams (storage uploads, functions) is exempted via path
|
|
6
|
+
prefix and governed by its own per-feature setting.
|
|
7
|
+
|
|
8
|
+
The motivation is concrete: argon2 hashes the entire submitted password,
|
|
9
|
+
so a multi-megabyte password DoS-es a worker. Bound the body, the worry
|
|
10
|
+
goes away.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import logging
|
|
14
|
+
from collections.abc import Awaitable, Callable
|
|
15
|
+
from typing import Any
|
|
16
|
+
|
|
17
|
+
from .settings import Settings, get_settings
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
ERR_BODY_TOO_LARGE = "body_too_large"
|
|
22
|
+
|
|
23
|
+
# Methods that may carry a body. We don't gate GET/HEAD/OPTIONS — even if
|
|
24
|
+
# a curious client attaches one, FastAPI ignores it for those methods.
|
|
25
|
+
_BODY_METHODS = frozenset({"POST", "PUT", "PATCH"})
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class _BodyTooLargeError(Exception):
|
|
29
|
+
"""Raised by the wrapped ASGI receive() once the cap is exceeded.
|
|
30
|
+
|
|
31
|
+
Propagates out of FastAPI's body-parsing code (``request.body()`` /
|
|
32
|
+
``request.json()``) and is caught by ``BodySizeLimitMiddleware``,
|
|
33
|
+
which converts it into a 413 response. Custom class so we don't
|
|
34
|
+
accidentally swallow legitimate exceptions from the inner app.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _path_matches(path: str, prefixes: tuple[str, ...]) -> bool:
|
|
39
|
+
return any(path == p or path.startswith(p + "/") for p in prefixes)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class BodySizeLimitMiddleware:
|
|
43
|
+
"""ASGI middleware that enforces ``security_max_body_bytes``.
|
|
44
|
+
|
|
45
|
+
Two-layer defense:
|
|
46
|
+
|
|
47
|
+
1. *Cheap path*: reject up-front when ``Content-Length`` declares a
|
|
48
|
+
body larger than the cap. The inner app is never invoked.
|
|
49
|
+
2. *Streaming path*: forward chunks to the inner app as they arrive
|
|
50
|
+
while counting bytes. As soon as the cap is exceeded, the wrapped
|
|
51
|
+
receive() raises :class:`_BodyTooLargeError`, which propagates out
|
|
52
|
+
of the route handler and is caught here. The middleware then sends
|
|
53
|
+
a 413 — provided the inner app hasn't already started a response.
|
|
54
|
+
|
|
55
|
+
Streaming (rather than buffering) keeps memory bounded and lets
|
|
56
|
+
routes that *do* legitimately stream (storage, functions, exempt by
|
|
57
|
+
path prefix) operate without a copy.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
def __init__(self, app: Any, settings: Settings | None = None) -> None:
|
|
61
|
+
self.app = app
|
|
62
|
+
self._settings = settings or get_settings()
|
|
63
|
+
self._max_bytes = self._settings.security_max_body_bytes
|
|
64
|
+
self._exempt = tuple(
|
|
65
|
+
p.strip()
|
|
66
|
+
for p in self._settings.security_body_limit_exempt_paths.split(",")
|
|
67
|
+
if p.strip()
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
async def __call__(
|
|
71
|
+
self,
|
|
72
|
+
scope: dict[str, Any],
|
|
73
|
+
receive: Callable[[], Awaitable[dict[str, Any]]],
|
|
74
|
+
send: Callable[[dict[str, Any]], Awaitable[None]],
|
|
75
|
+
) -> None:
|
|
76
|
+
if scope["type"] != "http" or self._max_bytes <= 0:
|
|
77
|
+
await self.app(scope, receive, send)
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
method = scope.get("method", "").upper()
|
|
81
|
+
if method not in _BODY_METHODS:
|
|
82
|
+
await self.app(scope, receive, send)
|
|
83
|
+
return
|
|
84
|
+
|
|
85
|
+
if _path_matches(scope.get("path", ""), self._exempt):
|
|
86
|
+
await self.app(scope, receive, send)
|
|
87
|
+
return
|
|
88
|
+
|
|
89
|
+
if not await self._content_length_ok(scope, send):
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
await self._enforce_streaming(scope, receive, send)
|
|
93
|
+
|
|
94
|
+
async def _content_length_ok(
|
|
95
|
+
self,
|
|
96
|
+
scope: dict[str, Any],
|
|
97
|
+
send: Callable[[dict[str, Any]], Awaitable[None]],
|
|
98
|
+
) -> bool:
|
|
99
|
+
for name, value in scope.get("headers", []):
|
|
100
|
+
if name == b"content-length":
|
|
101
|
+
try:
|
|
102
|
+
declared = int(value)
|
|
103
|
+
except ValueError:
|
|
104
|
+
await self._send_413(send, "Malformed Content-Length")
|
|
105
|
+
return False
|
|
106
|
+
if declared > self._max_bytes:
|
|
107
|
+
await self._send_413(send)
|
|
108
|
+
return False
|
|
109
|
+
break
|
|
110
|
+
return True
|
|
111
|
+
|
|
112
|
+
async def _enforce_streaming(
|
|
113
|
+
self,
|
|
114
|
+
scope: dict[str, Any],
|
|
115
|
+
receive: Callable[[], Awaitable[dict[str, Any]]],
|
|
116
|
+
send: Callable[[dict[str, Any]], Awaitable[None]],
|
|
117
|
+
) -> None:
|
|
118
|
+
received = 0
|
|
119
|
+
response_started = False
|
|
120
|
+
|
|
121
|
+
async def _bounded_receive() -> dict[str, Any]:
|
|
122
|
+
nonlocal received
|
|
123
|
+
msg = await receive()
|
|
124
|
+
if msg.get("type") == "http.request":
|
|
125
|
+
received += len(msg.get("body", b""))
|
|
126
|
+
if received > self._max_bytes:
|
|
127
|
+
raise _BodyTooLargeError()
|
|
128
|
+
return msg
|
|
129
|
+
|
|
130
|
+
async def _send_wrapper(msg: dict[str, Any]) -> None:
|
|
131
|
+
nonlocal response_started
|
|
132
|
+
if msg["type"] == "http.response.start":
|
|
133
|
+
response_started = True
|
|
134
|
+
await send(msg)
|
|
135
|
+
|
|
136
|
+
try:
|
|
137
|
+
await self.app(scope, _bounded_receive, _send_wrapper)
|
|
138
|
+
except _BodyTooLargeError:
|
|
139
|
+
if not response_started:
|
|
140
|
+
await self._send_413(send)
|
|
141
|
+
else:
|
|
142
|
+
# The app already committed to a response before we
|
|
143
|
+
# noticed the overflow. We can't change the status — the
|
|
144
|
+
# bytes have left the building — but we should make this
|
|
145
|
+
# visible: the request was incomplete from our side.
|
|
146
|
+
logger.warning(
|
|
147
|
+
"body-size: cap exceeded after response started "
|
|
148
|
+
"(path=%s); response delivered but body was truncated",
|
|
149
|
+
scope.get("path", ""),
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
async def _send_413(
|
|
153
|
+
self,
|
|
154
|
+
send: Callable[[dict[str, Any]], Awaitable[None]],
|
|
155
|
+
message: str | None = None,
|
|
156
|
+
) -> None:
|
|
157
|
+
msg = message or (
|
|
158
|
+
f"Request body exceeds maximum size of {self._max_bytes} bytes"
|
|
159
|
+
)
|
|
160
|
+
body = (
|
|
161
|
+
b'{"detail":{"code":"'
|
|
162
|
+
+ ERR_BODY_TOO_LARGE.encode("ascii")
|
|
163
|
+
+ b'","message":"'
|
|
164
|
+
+ msg.encode("utf-8").replace(b'"', b'\\"')
|
|
165
|
+
+ b'"}}'
|
|
166
|
+
)
|
|
167
|
+
await send(
|
|
168
|
+
{
|
|
169
|
+
"type": "http.response.start",
|
|
170
|
+
"status": 413,
|
|
171
|
+
"headers": [
|
|
172
|
+
(b"content-type", b"application/json"),
|
|
173
|
+
(b"content-length", str(len(body)).encode("ascii")),
|
|
174
|
+
(b"connection", b"close"),
|
|
175
|
+
],
|
|
176
|
+
}
|
|
177
|
+
)
|
|
178
|
+
await send(
|
|
179
|
+
{
|
|
180
|
+
"type": "http.response.body",
|
|
181
|
+
"body": body,
|
|
182
|
+
"more_body": False,
|
|
183
|
+
}
|
|
184
|
+
)
|