supython 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- supython/__init__.py +8 -0
- supython/admin/__init__.py +3 -0
- supython/admin/api/__init__.py +24 -0
- supython/admin/api/auth.py +118 -0
- supython/admin/api/auth_templates.py +67 -0
- supython/admin/api/auth_users.py +225 -0
- supython/admin/api/db.py +174 -0
- supython/admin/api/functions.py +92 -0
- supython/admin/api/jobs.py +192 -0
- supython/admin/api/ops.py +224 -0
- supython/admin/api/realtime.py +281 -0
- supython/admin/api/service_auth.py +49 -0
- supython/admin/api/service_auth_templates.py +83 -0
- supython/admin/api/service_auth_users.py +346 -0
- supython/admin/api/service_db.py +214 -0
- supython/admin/api/service_functions.py +287 -0
- supython/admin/api/service_jobs.py +282 -0
- supython/admin/api/service_ops.py +213 -0
- supython/admin/api/service_realtime.py +30 -0
- supython/admin/api/service_storage.py +220 -0
- supython/admin/api/storage.py +117 -0
- supython/admin/api/system.py +37 -0
- supython/admin/audit.py +29 -0
- supython/admin/deps.py +22 -0
- supython/admin/errors.py +16 -0
- supython/admin/schemas.py +310 -0
- supython/admin/session.py +52 -0
- supython/admin/spa.py +38 -0
- supython/admin/static/assets/Alert-dluGVkos.js +49 -0
- supython/admin/static/assets/Audit-Njung3HI.js +2 -0
- supython/admin/static/assets/Backups-DzPlFgrm.js +2 -0
- supython/admin/static/assets/Buckets-ByacGkU1.js +2 -0
- supython/admin/static/assets/Channels-BoIuTtam.js +353 -0
- supython/admin/static/assets/ChevronRight-CtQH1EQ1.js +2 -0
- supython/admin/static/assets/CodeViewer-Bqy7-wvH.js +2 -0
- supython/admin/static/assets/Crons-B67vc39F.js +2 -0
- supython/admin/static/assets/DashboardView-CUTFVL6k.js +2 -0
- supython/admin/static/assets/DataTable-COAAWEft.js +747 -0
- supython/admin/static/assets/DescriptionsItem-P8JUDaBs.js +75 -0
- supython/admin/static/assets/DrawerContent-TpYTFgF1.js +139 -0
- supython/admin/static/assets/Empty-cr2r7e2u.js +25 -0
- supython/admin/static/assets/EmptyState-DeDck-OL.js +2 -0
- supython/admin/static/assets/Grid-hFkp9F4P.js +2 -0
- supython/admin/static/assets/Input-DppYTq9C.js +259 -0
- supython/admin/static/assets/Invoke-DW3Nveeh.js +2 -0
- supython/admin/static/assets/JsonField-DibyJgun.js +2 -0
- supython/admin/static/assets/LoginView-BjLyE3Ds.css +1 -0
- supython/admin/static/assets/LoginView-CoOjECT_.js +111 -0
- supython/admin/static/assets/Logs-D9WYrnIT.js +2 -0
- supython/admin/static/assets/Logs-DS1XPa0h.css +1 -0
- supython/admin/static/assets/Migrations-DOSC2ddQ.js +2 -0
- supython/admin/static/assets/ObjectBrowser-_5w8vOX8.js +2 -0
- supython/admin/static/assets/Queue-CywZs6vI.js +2 -0
- supython/admin/static/assets/RefreshTokens-Ccjr53jg.js +2 -0
- supython/admin/static/assets/RlsEditor-BSlH9vSc.js +2 -0
- supython/admin/static/assets/Routes-BiLXE49D.js +2 -0
- supython/admin/static/assets/Routes-C-ianIGD.css +1 -0
- supython/admin/static/assets/SchemaBrowser-DKy2_KQi.css +1 -0
- supython/admin/static/assets/SchemaBrowser-XFvFbtDB.js +2 -0
- supython/admin/static/assets/Select-DIzZyRZb.js +434 -0
- supython/admin/static/assets/Space-n5-XcguU.js +400 -0
- supython/admin/static/assets/SqlEditor-b8pTsILY.js +3 -0
- supython/admin/static/assets/SqlWorkspace-BUS7IntH.js +104 -0
- supython/admin/static/assets/TableData-CQIagLKn.js +2 -0
- supython/admin/static/assets/Tag-D1fOKpTH.js +72 -0
- supython/admin/static/assets/Templates-BS-ugkdq.js +2 -0
- supython/admin/static/assets/Thing-CEAniuMg.js +107 -0
- supython/admin/static/assets/Users-wzwajhlh.js +2 -0
- supython/admin/static/assets/_plugin-vue_export-helper-DGA9ry_j.js +1 -0
- supython/admin/static/assets/dist-VXIJLCYq.js +13 -0
- supython/admin/static/assets/format-length-CGCY1rMh.js +2 -0
- supython/admin/static/assets/get-Ca6unauB.js +2 -0
- supython/admin/static/assets/index-CeE6v959.js +951 -0
- supython/admin/static/assets/pinia-COXwfrOX.js +2 -0
- supython/admin/static/assets/resources-Bt6thQCD.js +44 -0
- supython/admin/static/assets/use-locale-mtgM0a3a.js +2 -0
- supython/admin/static/assets/use-merged-state-BvhkaHNX.js +2 -0
- supython/admin/static/assets/useConfirm-tMjvBFXR.js +2 -0
- supython/admin/static/assets/useResource-C_rJCY8C.js +2 -0
- supython/admin/static/assets/useTable-CnZc5zhi.js +363 -0
- supython/admin/static/assets/useTable-Dg0XlRlq.css +1 -0
- supython/admin/static/assets/useToast-DsZKx0IX.js +2 -0
- supython/admin/static/assets/utils-sbXoq7Ir.js +2 -0
- supython/admin/static/favicon.svg +1 -0
- supython/admin/static/icons.svg +24 -0
- supython/admin/static/index.html +24 -0
- supython/app.py +149 -0
- supython/auth/__init__.py +3 -0
- supython/auth/_email_job.py +11 -0
- supython/auth/providers/__init__.py +34 -0
- supython/auth/providers/github.py +22 -0
- supython/auth/providers/google.py +19 -0
- supython/auth/providers/oauth.py +56 -0
- supython/auth/providers/registry.py +16 -0
- supython/auth/ratelimit.py +39 -0
- supython/auth/router.py +282 -0
- supython/auth/schemas.py +79 -0
- supython/auth/service.py +587 -0
- supython/body_size.py +184 -0
- supython/cli.py +1653 -0
- supython/client/__init__.py +67 -0
- supython/client/_auth.py +249 -0
- supython/client/_client.py +145 -0
- supython/client/_config.py +92 -0
- supython/client/_functions.py +69 -0
- supython/client/_storage.py +255 -0
- supython/client/py.typed +0 -0
- supython/db.py +151 -0
- supython/db_admin.py +8 -0
- supython/functions/__init__.py +19 -0
- supython/functions/context.py +262 -0
- supython/functions/loader.py +307 -0
- supython/functions/router.py +228 -0
- supython/functions/schemas.py +50 -0
- supython/gen/__init__.py +5 -0
- supython/gen/_introspect.py +137 -0
- supython/gen/types_py.py +270 -0
- supython/gen/types_ts.py +365 -0
- supython/health.py +229 -0
- supython/hooks.py +117 -0
- supython/jobs/__init__.py +31 -0
- supython/jobs/backends.py +97 -0
- supython/jobs/context.py +58 -0
- supython/jobs/cron.py +152 -0
- supython/jobs/cron_inproc.py +118 -0
- supython/jobs/decorators.py +76 -0
- supython/jobs/registry.py +79 -0
- supython/jobs/router.py +136 -0
- supython/jobs/schemas.py +92 -0
- supython/jobs/service.py +311 -0
- supython/jobs/worker.py +219 -0
- supython/jwks.py +257 -0
- supython/keyset.py +279 -0
- supython/logging_config.py +291 -0
- supython/mail.py +33 -0
- supython/mailer.py +65 -0
- supython/migrate.py +81 -0
- supython/migrations/0001_extensions_and_roles.sql +46 -0
- supython/migrations/0002_auth_schema.sql +66 -0
- supython/migrations/0003_demo_todos.sql +42 -0
- supython/migrations/0004_auth_v0_2.sql +47 -0
- supython/migrations/0005_storage_schema.sql +117 -0
- supython/migrations/0006_realtime_schema.sql +206 -0
- supython/migrations/0007_jobs_schema.sql +254 -0
- supython/migrations/0008_jobs_last_error.sql +56 -0
- supython/migrations/0009_auth_rate_limits.sql +33 -0
- supython/migrations/0010_worker_heartbeat.sql +14 -0
- supython/migrations/0011_admin_schema.sql +45 -0
- supython/migrations/0012_auth_banned_until.sql +10 -0
- supython/migrations/0013_email_templates.sql +19 -0
- supython/migrations/0014_realtime_payload_warning.sql +96 -0
- supython/migrations/0015_backups_schema.sql +14 -0
- supython/passwords.py +15 -0
- supython/realtime/__init__.py +6 -0
- supython/realtime/broker.py +814 -0
- supython/realtime/protocol.py +234 -0
- supython/realtime/router.py +184 -0
- supython/realtime/schemas.py +207 -0
- supython/realtime/service.py +261 -0
- supython/realtime/topics.py +175 -0
- supython/realtime/websocket.py +586 -0
- supython/scaffold/__init__.py +5 -0
- supython/scaffold/init_project.py +133 -0
- supython/scaffold/templates/Caddyfile.tmpl +4 -0
- supython/scaffold/templates/README.md.tmpl +22 -0
- supython/scaffold/templates/docker-compose.prod.yml.tmpl +84 -0
- supython/scaffold/templates/docker-compose.yml.tmpl +41 -0
- supython/scaffold/templates/docker_postgres_Dockerfile.tmpl +9 -0
- supython/scaffold/templates/docker_postgres_postgresql.conf.tmpl +3 -0
- supython/scaffold/templates/env.example.tmpl +149 -0
- supython/scaffold/templates/functions_README.md.tmpl +21 -0
- supython/scaffold/templates/gitignore.tmpl +14 -0
- supython/scaffold/templates/migrations/.gitkeep +0 -0
- supython/secretset.py +347 -0
- supython/security_headers.py +78 -0
- supython/settings.py +198 -0
- supython/storage/__init__.py +5 -0
- supython/storage/backends.py +392 -0
- supython/storage/router.py +341 -0
- supython/storage/schemas.py +50 -0
- supython/storage/service.py +445 -0
- supython/storage/signing.py +119 -0
- supython/tokens.py +85 -0
- supython-0.5.0.dist-info/METADATA +714 -0
- supython-0.5.0.dist-info/RECORD +188 -0
- supython-0.5.0.dist-info/WHEEL +4 -0
- supython-0.5.0.dist-info/entry_points.txt +2 -0
- supython-0.5.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
|
|
8
|
+
from ._auth import SupythonResponse
|
|
9
|
+
from ._config import _parse_error_detail
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class StorageError:
|
|
14
|
+
code: str
|
|
15
|
+
message: str
|
|
16
|
+
status: int
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class BucketResponse:
|
|
21
|
+
id: str
|
|
22
|
+
name: str
|
|
23
|
+
owner: str | None
|
|
24
|
+
public: bool
|
|
25
|
+
file_size_limit: int | None
|
|
26
|
+
allowed_mime_types: list[str] | None
|
|
27
|
+
created_at: str
|
|
28
|
+
updated_at: str
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class ObjectResponse:
|
|
33
|
+
id: str
|
|
34
|
+
bucket_id: str
|
|
35
|
+
bucket: str
|
|
36
|
+
name: str
|
|
37
|
+
owner: str
|
|
38
|
+
size: int
|
|
39
|
+
mime_type: str | None
|
|
40
|
+
etag: str | None
|
|
41
|
+
created_at: str
|
|
42
|
+
updated_at: str
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass
|
|
46
|
+
class SignedUrlResponse:
|
|
47
|
+
signed_url: str
|
|
48
|
+
token: str
|
|
49
|
+
expires_at: str
|
|
50
|
+
expires_in: int
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _make_storage_error(resp: httpx.Response) -> StorageError:
|
|
54
|
+
try:
|
|
55
|
+
body = resp.json()
|
|
56
|
+
except Exception:
|
|
57
|
+
msg = resp.text or f"HTTP {resp.status_code}"
|
|
58
|
+
return StorageError("network_error", msg, resp.status_code)
|
|
59
|
+
code, message = _parse_error_detail(body)
|
|
60
|
+
return StorageError(code, message, resp.status_code)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _parse_bucket(body: dict[str, Any]) -> BucketResponse:
|
|
64
|
+
return BucketResponse(
|
|
65
|
+
id=str(body["id"]),
|
|
66
|
+
name=body["name"],
|
|
67
|
+
owner=str(body["owner"]) if body.get("owner") else None,
|
|
68
|
+
public=body["public"],
|
|
69
|
+
file_size_limit=body.get("file_size_limit"),
|
|
70
|
+
allowed_mime_types=body.get("allowed_mime_types"),
|
|
71
|
+
created_at=body["created_at"],
|
|
72
|
+
updated_at=body["updated_at"],
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _parse_object(body: dict[str, Any]) -> ObjectResponse:
|
|
77
|
+
return ObjectResponse(
|
|
78
|
+
id=str(body["id"]),
|
|
79
|
+
bucket_id=str(body["bucket_id"]),
|
|
80
|
+
bucket=body["bucket"],
|
|
81
|
+
name=body["name"],
|
|
82
|
+
owner=str(body["owner"]),
|
|
83
|
+
size=body["size"],
|
|
84
|
+
mime_type=body.get("mime_type"),
|
|
85
|
+
etag=body.get("etag"),
|
|
86
|
+
created_at=body["created_at"],
|
|
87
|
+
updated_at=body["updated_at"],
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class StorageBucket:
|
|
92
|
+
def __init__(self, client: StorageClient, bucket_name: str) -> None:
|
|
93
|
+
self._client = client
|
|
94
|
+
self._bucket_name = bucket_name
|
|
95
|
+
|
|
96
|
+
def _headers(self) -> dict[str, str]:
|
|
97
|
+
return self._client._headers()
|
|
98
|
+
|
|
99
|
+
async def upload(
|
|
100
|
+
self,
|
|
101
|
+
path: str,
|
|
102
|
+
data: bytes,
|
|
103
|
+
*,
|
|
104
|
+
content_type: str | None = None,
|
|
105
|
+
) -> SupythonResponse[ObjectResponse]:
|
|
106
|
+
url = f"{self._client._url}/object/{self._bucket_name}/{path}"
|
|
107
|
+
files = {"file": (path, data, content_type or "application/octet-stream")}
|
|
108
|
+
try:
|
|
109
|
+
resp = await self._client._http.post(
|
|
110
|
+
url, files=files, headers=self._headers()
|
|
111
|
+
)
|
|
112
|
+
except httpx.HTTPError as exc:
|
|
113
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
114
|
+
|
|
115
|
+
if resp.status_code >= 400:
|
|
116
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
117
|
+
|
|
118
|
+
return SupythonResponse(data=_parse_object(resp.json()))
|
|
119
|
+
|
|
120
|
+
async def download(self, path: str) -> SupythonResponse[bytes]:
|
|
121
|
+
url = f"{self._client._url}/object/{self._bucket_name}/{path}"
|
|
122
|
+
try:
|
|
123
|
+
resp = await self._client._http.get(url, headers=self._headers())
|
|
124
|
+
except httpx.HTTPError as exc:
|
|
125
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
126
|
+
|
|
127
|
+
if resp.status_code >= 400:
|
|
128
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
129
|
+
|
|
130
|
+
return SupythonResponse(data=resp.content)
|
|
131
|
+
|
|
132
|
+
async def remove(self, path: str) -> SupythonResponse[None]:
|
|
133
|
+
url = f"{self._client._url}/object/{self._bucket_name}/{path}"
|
|
134
|
+
try:
|
|
135
|
+
resp = await self._client._http.delete(url, headers=self._headers())
|
|
136
|
+
except httpx.HTTPError as exc:
|
|
137
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
138
|
+
|
|
139
|
+
if resp.status_code >= 400:
|
|
140
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
141
|
+
|
|
142
|
+
return SupythonResponse(data=None)
|
|
143
|
+
|
|
144
|
+
async def create_signed_url(
|
|
145
|
+
self, path: str, *, expires_in: int | None = None
|
|
146
|
+
) -> SupythonResponse[SignedUrlResponse]:
|
|
147
|
+
url = f"{self._client._url}/object/sign/{self._bucket_name}/{path}"
|
|
148
|
+
body: dict[str, Any] = {}
|
|
149
|
+
if expires_in is not None:
|
|
150
|
+
body["expires_in"] = expires_in
|
|
151
|
+
try:
|
|
152
|
+
resp = await self._client._http.post(
|
|
153
|
+
url, json=body or None, headers=self._headers()
|
|
154
|
+
)
|
|
155
|
+
except httpx.HTTPError as exc:
|
|
156
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
157
|
+
|
|
158
|
+
if resp.status_code >= 400:
|
|
159
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
160
|
+
|
|
161
|
+
data = resp.json()
|
|
162
|
+
return SupythonResponse(
|
|
163
|
+
data=SignedUrlResponse(
|
|
164
|
+
signed_url=data["signed_url"],
|
|
165
|
+
token=data["token"],
|
|
166
|
+
expires_at=data["expires_at"],
|
|
167
|
+
expires_in=data["expires_in"],
|
|
168
|
+
)
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
def get_public_url(self, path: str) -> str:
|
|
172
|
+
return f"{self._client._url}/object/public/{self._bucket_name}/{path}"
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
class StorageClient:
|
|
176
|
+
def __init__(self, base_url: str, anon_key: str, client: Any) -> None:
|
|
177
|
+
self._url = base_url
|
|
178
|
+
self._anon_key = anon_key
|
|
179
|
+
self._client = client
|
|
180
|
+
self._http = httpx.AsyncClient()
|
|
181
|
+
|
|
182
|
+
def _headers(self) -> dict[str, str]:
|
|
183
|
+
headers: dict[str, str] = {}
|
|
184
|
+
if self._anon_key:
|
|
185
|
+
headers["apikey"] = self._anon_key
|
|
186
|
+
access_token = self._client._access_token
|
|
187
|
+
if access_token:
|
|
188
|
+
headers["Authorization"] = f"Bearer {access_token}"
|
|
189
|
+
return headers
|
|
190
|
+
|
|
191
|
+
def from_(self, bucket_name: str) -> StorageBucket:
|
|
192
|
+
return StorageBucket(self, bucket_name)
|
|
193
|
+
|
|
194
|
+
async def create_bucket(
|
|
195
|
+
self,
|
|
196
|
+
*,
|
|
197
|
+
name: str,
|
|
198
|
+
public: bool = False,
|
|
199
|
+
file_size_limit: int | None = None,
|
|
200
|
+
allowed_mime_types: list[str] | None = None,
|
|
201
|
+
) -> SupythonResponse[BucketResponse]:
|
|
202
|
+
body: dict[str, Any] = {"name": name, "public": public}
|
|
203
|
+
if file_size_limit is not None:
|
|
204
|
+
body["file_size_limit"] = file_size_limit
|
|
205
|
+
if allowed_mime_types is not None:
|
|
206
|
+
body["allowed_mime_types"] = allowed_mime_types
|
|
207
|
+
|
|
208
|
+
try:
|
|
209
|
+
resp = await self._http.post(
|
|
210
|
+
f"{self._url}/bucket", json=body, headers=self._headers()
|
|
211
|
+
)
|
|
212
|
+
except httpx.HTTPError as exc:
|
|
213
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
214
|
+
|
|
215
|
+
if resp.status_code >= 400:
|
|
216
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
217
|
+
|
|
218
|
+
return SupythonResponse(data=_parse_bucket(resp.json()))
|
|
219
|
+
|
|
220
|
+
async def list_buckets(self) -> SupythonResponse[list[BucketResponse]]:
|
|
221
|
+
try:
|
|
222
|
+
resp = await self._http.get(f"{self._url}/bucket", headers=self._headers())
|
|
223
|
+
except httpx.HTTPError as exc:
|
|
224
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
225
|
+
|
|
226
|
+
if resp.status_code >= 400:
|
|
227
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
228
|
+
|
|
229
|
+
return SupythonResponse(data=[_parse_bucket(b) for b in resp.json()])
|
|
230
|
+
|
|
231
|
+
async def get_bucket(self, name: str) -> SupythonResponse[BucketResponse]:
|
|
232
|
+
try:
|
|
233
|
+
resp = await self._http.get(
|
|
234
|
+
f"{self._url}/bucket/{name}", headers=self._headers()
|
|
235
|
+
)
|
|
236
|
+
except httpx.HTTPError as exc:
|
|
237
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
238
|
+
|
|
239
|
+
if resp.status_code >= 400:
|
|
240
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
241
|
+
|
|
242
|
+
return SupythonResponse(data=_parse_bucket(resp.json()))
|
|
243
|
+
|
|
244
|
+
async def delete_bucket(self, name: str) -> SupythonResponse[None]:
|
|
245
|
+
try:
|
|
246
|
+
resp = await self._http.delete(
|
|
247
|
+
f"{self._url}/bucket/{name}", headers=self._headers()
|
|
248
|
+
)
|
|
249
|
+
except httpx.HTTPError as exc:
|
|
250
|
+
return SupythonResponse(error=StorageError("network_error", str(exc), 0))
|
|
251
|
+
|
|
252
|
+
if resp.status_code >= 400:
|
|
253
|
+
return SupythonResponse(error=_make_storage_error(resp))
|
|
254
|
+
|
|
255
|
+
return SupythonResponse(data=None)
|
supython/client/py.typed
ADDED
|
File without changes
|
supython/db.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
from collections.abc import AsyncGenerator
|
|
6
|
+
from contextlib import asynccontextmanager
|
|
7
|
+
from typing import Any, cast
|
|
8
|
+
|
|
9
|
+
import asyncpg
|
|
10
|
+
from fastapi import FastAPI
|
|
11
|
+
|
|
12
|
+
from .settings import get_settings
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
_pool: asyncpg.Pool | None = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async def _connection_setup(conn: asyncpg.Connection) -> None:
|
|
20
|
+
timeout_ms = get_settings().db_statement_timeout_ms
|
|
21
|
+
if timeout_ms > 0:
|
|
22
|
+
await conn.execute(f"set statement_timeout = {int(timeout_ms)}")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
async def init_pool() -> asyncpg.Pool:
|
|
26
|
+
global _pool
|
|
27
|
+
if _pool is None:
|
|
28
|
+
s = get_settings()
|
|
29
|
+
_pool = await asyncpg.create_pool(
|
|
30
|
+
s.database_url,
|
|
31
|
+
min_size=s.db_pool_min_size,
|
|
32
|
+
max_size=s.db_pool_max_size,
|
|
33
|
+
setup=_connection_setup,
|
|
34
|
+
)
|
|
35
|
+
return _pool
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
async def close_pool() -> None:
|
|
39
|
+
global _pool
|
|
40
|
+
if _pool is not None:
|
|
41
|
+
await _pool.close()
|
|
42
|
+
_pool = None
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def get_pool() -> asyncpg.Pool:
|
|
46
|
+
if _pool is None:
|
|
47
|
+
raise RuntimeError("DB pool not initialised. Call init_pool() first.")
|
|
48
|
+
return _pool
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@asynccontextmanager
|
|
52
|
+
async def acquire() -> AsyncGenerator[asyncpg.Connection, None]:
|
|
53
|
+
pool = get_pool()
|
|
54
|
+
async with pool.acquire() as conn:
|
|
55
|
+
yield cast(asyncpg.Connection, conn)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@asynccontextmanager
|
|
59
|
+
async def as_role(role: str, claims: dict[str, Any]) -> AsyncGenerator[asyncpg.Connection, None]:
|
|
60
|
+
"""Yield a connection scoped to *role* with *claims* set as the JWT GUC.
|
|
61
|
+
|
|
62
|
+
Mirrors PostgREST's per-request role switch so that any SQL executed on
|
|
63
|
+
the yielded connection sees the same RLS verdict as a PostgREST request
|
|
64
|
+
would for the same JWT payload.
|
|
65
|
+
"""
|
|
66
|
+
pool = get_pool()
|
|
67
|
+
|
|
68
|
+
allowed = get_settings().db_allowed_roles
|
|
69
|
+
|
|
70
|
+
if role not in allowed:
|
|
71
|
+
raise ValueError(f"role {role!r} not in {sorted(allowed)}")
|
|
72
|
+
|
|
73
|
+
async with pool.acquire() as conn, conn.transaction():
|
|
74
|
+
await conn.execute(f'set local role "{role}"')
|
|
75
|
+
await conn.execute(
|
|
76
|
+
"select set_config('request.jwt.claims', $1, true)",
|
|
77
|
+
json.dumps(claims),
|
|
78
|
+
)
|
|
79
|
+
yield cast(asyncpg.Connection, conn)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@asynccontextmanager
|
|
83
|
+
async def as_service_role(
|
|
84
|
+
*,
|
|
85
|
+
claims: dict[str, Any] | None = None,
|
|
86
|
+
) -> AsyncGenerator[asyncpg.Connection, None]:
|
|
87
|
+
"""Yield a pool connection running as ``service_role`` for the duration.
|
|
88
|
+
|
|
89
|
+
Framework-internal housekeeping primitive — distinct from :func:`as_role`,
|
|
90
|
+
which is the JWT-driven switch PostgREST mirrors. ``service_role`` bypasses
|
|
91
|
+
RLS, so the choice of role is made by internal code and never by a JWT's
|
|
92
|
+
``role`` claim.
|
|
93
|
+
|
|
94
|
+
The optional ``claims`` argument sets ``request.jwt.claims`` on the
|
|
95
|
+
session via ``set_config('request.jwt.claims', …, true)`` so helpers
|
|
96
|
+
like ``auth.uid()`` see the caller's identity inside the block. This
|
|
97
|
+
is purely informational: ``service_role`` still bypasses RLS and
|
|
98
|
+
setting claims does not grant or restrict anything — it just makes
|
|
99
|
+
audit / stamping helpers return meaningful values during server-side
|
|
100
|
+
work performed on behalf of a specific user.
|
|
101
|
+
|
|
102
|
+
Uses ``SET LOCAL ROLE`` and ``set_config(..., true)`` inside a
|
|
103
|
+
transaction, so the role and GUC reset on ``COMMIT`` before the
|
|
104
|
+
connection returns to the pool.
|
|
105
|
+
"""
|
|
106
|
+
pool = get_pool()
|
|
107
|
+
async with pool.acquire() as conn, conn.transaction():
|
|
108
|
+
await conn.execute("set local role service_role")
|
|
109
|
+
if claims is not None:
|
|
110
|
+
await conn.execute(
|
|
111
|
+
"select set_config('request.jwt.claims', $1, true)",
|
|
112
|
+
json.dumps(claims),
|
|
113
|
+
)
|
|
114
|
+
yield cast(asyncpg.Connection, conn)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _maybe_enable_slow_callback_warnings() -> None:
|
|
118
|
+
"""Turn on asyncio debug mode if SUPYTHON_SLOW_CALLBACK_MS is set.
|
|
119
|
+
|
|
120
|
+
The CLI's ``dev`` command sets this env var; production code paths leave
|
|
121
|
+
it unset so the (non-trivial) debug overhead is not paid in prod.
|
|
122
|
+
"""
|
|
123
|
+
raw = os.environ.get("SUPYTHON_SLOW_CALLBACK_MS")
|
|
124
|
+
if not raw:
|
|
125
|
+
return
|
|
126
|
+
try:
|
|
127
|
+
threshold_ms = int(raw)
|
|
128
|
+
except ValueError:
|
|
129
|
+
logger.warning("SUPYTHON_SLOW_CALLBACK_MS=%r is not an int; ignoring", raw)
|
|
130
|
+
return
|
|
131
|
+
if threshold_ms <= 0:
|
|
132
|
+
return
|
|
133
|
+
loop = asyncio.get_running_loop()
|
|
134
|
+
loop.slow_callback_duration = threshold_ms / 1000.0
|
|
135
|
+
loop.set_debug(True)
|
|
136
|
+
logger.info(
|
|
137
|
+
"asyncio debug enabled; warning on callbacks > %dms (dev mode)",
|
|
138
|
+
threshold_ms,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@asynccontextmanager
|
|
143
|
+
async def lifespan(app: FastAPI):
|
|
144
|
+
_maybe_enable_slow_callback_warnings()
|
|
145
|
+
_created_pool = _pool is None
|
|
146
|
+
await init_pool()
|
|
147
|
+
try:
|
|
148
|
+
yield
|
|
149
|
+
finally:
|
|
150
|
+
if _created_pool:
|
|
151
|
+
await close_pool()
|
supython/db_admin.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"""Database administration helpers."""
|
|
2
|
+
|
|
3
|
+
import asyncpg
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
async def rotate_role_password(conn: asyncpg.Connection, role: str, password: str) -> None:
|
|
7
|
+
quoted = "'" + password.replace("'", "''") + "'"
|
|
8
|
+
await conn.execute(f"alter role {role} with password {quoted}")
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Filesystem-loaded edge functions.
|
|
2
|
+
|
|
3
|
+
Layout: every ``*.py`` under ``settings.functions_dir`` becomes a route at
|
|
4
|
+
``/functions/<relative path without .py>``. See :mod:`.loader` for discovery
|
|
5
|
+
rules and :mod:`.router` for the dispatcher contract.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .context import Ctx, FunctionUser, PostgrestClient, StorageClient
|
|
9
|
+
from .loader import FunctionRegistry
|
|
10
|
+
from .schemas import FunctionMeta
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"Ctx",
|
|
14
|
+
"FunctionUser",
|
|
15
|
+
"FunctionMeta",
|
|
16
|
+
"FunctionRegistry",
|
|
17
|
+
"PostgrestClient",
|
|
18
|
+
"StorageClient",
|
|
19
|
+
]
|
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
"""Per-request context object passed to every user function.
|
|
2
|
+
|
|
3
|
+
This is the *one* module in supython that is allowed to import across feature
|
|
4
|
+
packages: it composes ``storage``, ``mailer``, and ``postgrest`` into the
|
|
5
|
+
``Ctx`` value that handlers receive. Functions are the edge layer — they
|
|
6
|
+
exist precisely so user code can stitch sibling subsystems together — so the
|
|
7
|
+
loader/dispatcher pair is the deliberate exception to the no-cross-import
|
|
8
|
+
rule that applies elsewhere.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
from collections.abc import AsyncIterator, Awaitable, Callable
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from typing import TYPE_CHECKING, Any
|
|
16
|
+
from uuid import UUID
|
|
17
|
+
|
|
18
|
+
import asyncpg
|
|
19
|
+
import httpx
|
|
20
|
+
|
|
21
|
+
from ..mailer import EmailBackend, EmailMessage, get_mailer
|
|
22
|
+
from ..settings import Settings, get_settings
|
|
23
|
+
from ..storage import service as storage_service
|
|
24
|
+
from ..storage.backends import StorageBackend, get_backend
|
|
25
|
+
from ..storage.schemas import ObjectResponse, SignedUrlResponse
|
|
26
|
+
|
|
27
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
28
|
+
from fastapi import Request
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# ---------------------------------------------------------------------------
|
|
32
|
+
# User
|
|
33
|
+
# ---------------------------------------------------------------------------
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class FunctionUser:
|
|
38
|
+
"""Caller identity decoded from the bearer token, or ``None`` for anon."""
|
|
39
|
+
|
|
40
|
+
id: UUID | None
|
|
41
|
+
email: str | None
|
|
42
|
+
role: str
|
|
43
|
+
claims: dict[str, Any] = field(default_factory=dict)
|
|
44
|
+
|
|
45
|
+
@classmethod
|
|
46
|
+
def from_claims(cls, claims: dict[str, Any]) -> FunctionUser:
|
|
47
|
+
sub = claims.get("sub")
|
|
48
|
+
try:
|
|
49
|
+
uid = UUID(sub) if isinstance(sub, str) else None
|
|
50
|
+
except ValueError:
|
|
51
|
+
uid = None
|
|
52
|
+
email = claims.get("email")
|
|
53
|
+
role = claims.get("role") or "anon"
|
|
54
|
+
return cls(
|
|
55
|
+
id=uid,
|
|
56
|
+
email=email if isinstance(email, str) else None,
|
|
57
|
+
role=role if isinstance(role, str) else "anon",
|
|
58
|
+
claims=claims,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
# ---------------------------------------------------------------------------
|
|
63
|
+
# Storage facade
|
|
64
|
+
# ---------------------------------------------------------------------------
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class StorageClient:
|
|
68
|
+
"""Short-call-site wrapper around ``storage.service`` bound to ``ctx.db``.
|
|
69
|
+
|
|
70
|
+
All authorization still flows through the role-scoped connection — the
|
|
71
|
+
wrapper exists purely so handlers can write
|
|
72
|
+
``await ctx.storage.upload(...)`` instead of threading ``conn`` and
|
|
73
|
+
``backend`` by hand.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
def __init__(
|
|
77
|
+
self, conn: asyncpg.Connection, backend: StorageBackend
|
|
78
|
+
) -> None:
|
|
79
|
+
self._conn = conn
|
|
80
|
+
self._backend = backend
|
|
81
|
+
|
|
82
|
+
async def upload(
|
|
83
|
+
self,
|
|
84
|
+
*,
|
|
85
|
+
bucket: str,
|
|
86
|
+
path: str,
|
|
87
|
+
data: AsyncIterator[bytes],
|
|
88
|
+
content_type: str | None = None,
|
|
89
|
+
) -> ObjectResponse:
|
|
90
|
+
return await storage_service.upload_object(
|
|
91
|
+
self._conn,
|
|
92
|
+
self._backend,
|
|
93
|
+
bucket_name=bucket,
|
|
94
|
+
path=path,
|
|
95
|
+
data=data,
|
|
96
|
+
content_type=content_type,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
async def download(
|
|
100
|
+
self,
|
|
101
|
+
*,
|
|
102
|
+
bucket: str,
|
|
103
|
+
path: str,
|
|
104
|
+
byte_range: tuple[int, int | None] | None = None,
|
|
105
|
+
):
|
|
106
|
+
return await storage_service.download_object(
|
|
107
|
+
self._conn,
|
|
108
|
+
self._backend,
|
|
109
|
+
bucket_name=bucket,
|
|
110
|
+
path=path,
|
|
111
|
+
byte_range=byte_range,
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
async def delete(self, *, bucket: str, path: str) -> None:
|
|
115
|
+
await storage_service.delete_object(
|
|
116
|
+
self._conn,
|
|
117
|
+
self._backend,
|
|
118
|
+
bucket_name=bucket,
|
|
119
|
+
path=path,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
async def get_metadata(self, *, bucket: str, path: str) -> ObjectResponse:
|
|
123
|
+
return await storage_service.get_object_metadata(
|
|
124
|
+
self._conn, bucket, path
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
async def sign(
|
|
128
|
+
self, *, bucket: str, path: str, expires_in: int | None = None
|
|
129
|
+
) -> SignedUrlResponse:
|
|
130
|
+
return await storage_service.issue_signed_url(
|
|
131
|
+
self._conn,
|
|
132
|
+
bucket_name=bucket,
|
|
133
|
+
path=path,
|
|
134
|
+
expires_in=expires_in,
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
# ---------------------------------------------------------------------------
|
|
139
|
+
# PostgREST forwarding client
|
|
140
|
+
# ---------------------------------------------------------------------------
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class PostgrestClient:
|
|
144
|
+
"""Request-scoped ``httpx.AsyncClient`` aimed at the configured PostgREST.
|
|
145
|
+
|
|
146
|
+
When the caller is authenticated, the bearer token is forwarded so the
|
|
147
|
+
upstream RLS verdict matches whatever ``ctx.db`` would see. For anon
|
|
148
|
+
callers no ``Authorization`` header is sent, which lets PostgREST resolve
|
|
149
|
+
the request to its ``anon`` role.
|
|
150
|
+
|
|
151
|
+
The dispatcher constructs one of these per request and ``aclose()``s it
|
|
152
|
+
in a ``finally`` block; user code should treat it like a borrowed handle.
|
|
153
|
+
"""
|
|
154
|
+
|
|
155
|
+
def __init__(self, base_url: str, jwt: str | None) -> None:
|
|
156
|
+
headers: dict[str, str] = {}
|
|
157
|
+
if jwt:
|
|
158
|
+
headers["Authorization"] = f"Bearer {jwt}"
|
|
159
|
+
self._client = httpx.AsyncClient(
|
|
160
|
+
base_url=base_url.rstrip("/"),
|
|
161
|
+
headers=headers,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
async def get(self, url: str, **kw: Any) -> httpx.Response:
|
|
165
|
+
return await self._client.get(url, **kw)
|
|
166
|
+
|
|
167
|
+
async def post(self, url: str, **kw: Any) -> httpx.Response:
|
|
168
|
+
return await self._client.post(url, **kw)
|
|
169
|
+
|
|
170
|
+
async def patch(self, url: str, **kw: Any) -> httpx.Response:
|
|
171
|
+
return await self._client.patch(url, **kw)
|
|
172
|
+
|
|
173
|
+
async def put(self, url: str, **kw: Any) -> httpx.Response:
|
|
174
|
+
return await self._client.put(url, **kw)
|
|
175
|
+
|
|
176
|
+
async def delete(self, url: str, **kw: Any) -> httpx.Response:
|
|
177
|
+
return await self._client.delete(url, **kw)
|
|
178
|
+
|
|
179
|
+
async def request(self, method: str, url: str, **kw: Any) -> httpx.Response:
|
|
180
|
+
return await self._client.request(method, url, **kw)
|
|
181
|
+
|
|
182
|
+
async def aclose(self) -> None:
|
|
183
|
+
await self._client.aclose()
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
# ---------------------------------------------------------------------------
|
|
187
|
+
# send_email kwargs wrapper
|
|
188
|
+
# ---------------------------------------------------------------------------
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _make_send_email(
|
|
192
|
+
backend: EmailBackend,
|
|
193
|
+
) -> Callable[..., Awaitable[None]]:
|
|
194
|
+
"""Adapt ``backend.send(EmailMessage)`` to the kwargs form handlers want.
|
|
195
|
+
|
|
196
|
+
``await ctx.send_email(to="x@y", subject="Hi", text="...", html=None)``
|
|
197
|
+
is the documented surface; ``to`` may be a single address or a list.
|
|
198
|
+
"""
|
|
199
|
+
|
|
200
|
+
async def send_email(
|
|
201
|
+
*,
|
|
202
|
+
to: str | list[str],
|
|
203
|
+
subject: str,
|
|
204
|
+
text: str,
|
|
205
|
+
html: str | None = None,
|
|
206
|
+
) -> None:
|
|
207
|
+
recipients = [to] if isinstance(to, str) else list(to)
|
|
208
|
+
msg = EmailMessage(to=recipients, subject=subject, text=text, html=html)
|
|
209
|
+
await backend.send(msg)
|
|
210
|
+
|
|
211
|
+
return send_email
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
# ---------------------------------------------------------------------------
|
|
215
|
+
# Ctx
|
|
216
|
+
# ---------------------------------------------------------------------------
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@dataclass
|
|
220
|
+
class Ctx:
|
|
221
|
+
"""The ``ctx`` argument every user handler receives.
|
|
222
|
+
|
|
223
|
+
Lifetime: one HTTP request. ``db`` is a live, role-scoped connection
|
|
224
|
+
already inside ``db.as_role(...)`` — handlers can call
|
|
225
|
+
``await ctx.db.fetchrow(...)`` directly. ``postgrest`` is closed by the
|
|
226
|
+
dispatcher in ``finally``; everything else is plain references.
|
|
227
|
+
"""
|
|
228
|
+
|
|
229
|
+
db: asyncpg.Connection
|
|
230
|
+
user: FunctionUser | None
|
|
231
|
+
storage: StorageClient
|
|
232
|
+
postgrest: PostgrestClient
|
|
233
|
+
send_email: Callable[..., Awaitable[None]]
|
|
234
|
+
request: Request
|
|
235
|
+
settings: Settings
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def build_ctx(
|
|
239
|
+
*,
|
|
240
|
+
conn: asyncpg.Connection,
|
|
241
|
+
user: FunctionUser | None,
|
|
242
|
+
request: Request,
|
|
243
|
+
raw_jwt: str | None,
|
|
244
|
+
backend: StorageBackend | None = None,
|
|
245
|
+
mailer: EmailBackend | None = None,
|
|
246
|
+
settings: Settings | None = None,
|
|
247
|
+
) -> Ctx:
|
|
248
|
+
"""Assemble a ``Ctx`` for one request.
|
|
249
|
+
|
|
250
|
+
Kept as a free function so tests can construct a ``Ctx`` directly with
|
|
251
|
+
fakes for ``backend`` / ``mailer`` without going through the dispatcher.
|
|
252
|
+
"""
|
|
253
|
+
s = settings or get_settings()
|
|
254
|
+
return Ctx(
|
|
255
|
+
db=conn,
|
|
256
|
+
user=user,
|
|
257
|
+
storage=StorageClient(conn, backend or get_backend()),
|
|
258
|
+
postgrest=PostgrestClient(s.postgrest_url, raw_jwt),
|
|
259
|
+
send_email=_make_send_email(mailer or get_mailer()),
|
|
260
|
+
request=request,
|
|
261
|
+
settings=s,
|
|
262
|
+
)
|