svc-infra 0.1.595__py3-none-any.whl → 0.1.706__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of svc-infra might be problematic. Click here for more details.
- svc_infra/__init__.py +58 -2
- svc_infra/apf_payments/models.py +133 -42
- svc_infra/apf_payments/provider/aiydan.py +121 -47
- svc_infra/apf_payments/provider/base.py +30 -9
- svc_infra/apf_payments/provider/stripe.py +156 -62
- svc_infra/apf_payments/schemas.py +18 -9
- svc_infra/apf_payments/service.py +98 -41
- svc_infra/apf_payments/settings.py +5 -1
- svc_infra/api/__init__.py +61 -0
- svc_infra/api/fastapi/__init__.py +15 -0
- svc_infra/api/fastapi/admin/__init__.py +3 -0
- svc_infra/api/fastapi/admin/add.py +245 -0
- svc_infra/api/fastapi/apf_payments/router.py +128 -70
- svc_infra/api/fastapi/apf_payments/setup.py +13 -6
- svc_infra/api/fastapi/auth/__init__.py +65 -0
- svc_infra/api/fastapi/auth/_cookies.py +6 -2
- svc_infra/api/fastapi/auth/add.py +17 -14
- svc_infra/api/fastapi/auth/gaurd.py +45 -16
- svc_infra/api/fastapi/auth/mfa/models.py +3 -1
- svc_infra/api/fastapi/auth/mfa/pre_auth.py +10 -6
- svc_infra/api/fastapi/auth/mfa/router.py +15 -8
- svc_infra/api/fastapi/auth/mfa/security.py +1 -2
- svc_infra/api/fastapi/auth/mfa/utils.py +2 -1
- svc_infra/api/fastapi/auth/mfa/verify.py +9 -2
- svc_infra/api/fastapi/auth/policy.py +0 -1
- svc_infra/api/fastapi/auth/providers.py +3 -1
- svc_infra/api/fastapi/auth/routers/apikey_router.py +6 -6
- svc_infra/api/fastapi/auth/routers/oauth_router.py +146 -52
- svc_infra/api/fastapi/auth/routers/session_router.py +6 -2
- svc_infra/api/fastapi/auth/security.py +31 -10
- svc_infra/api/fastapi/auth/sender.py +8 -1
- svc_infra/api/fastapi/auth/state.py +3 -1
- svc_infra/api/fastapi/auth/ws_security.py +275 -0
- svc_infra/api/fastapi/billing/router.py +73 -0
- svc_infra/api/fastapi/billing/setup.py +19 -0
- svc_infra/api/fastapi/cache/add.py +9 -5
- svc_infra/api/fastapi/db/__init__.py +5 -1
- svc_infra/api/fastapi/db/http.py +3 -1
- svc_infra/api/fastapi/db/nosql/__init__.py +39 -1
- svc_infra/api/fastapi/db/nosql/mongo/add.py +47 -32
- svc_infra/api/fastapi/db/nosql/mongo/crud_router.py +30 -11
- svc_infra/api/fastapi/db/sql/__init__.py +5 -1
- svc_infra/api/fastapi/db/sql/add.py +71 -26
- svc_infra/api/fastapi/db/sql/crud_router.py +210 -22
- svc_infra/api/fastapi/db/sql/health.py +3 -1
- svc_infra/api/fastapi/db/sql/session.py +18 -0
- svc_infra/api/fastapi/db/sql/users.py +18 -6
- svc_infra/api/fastapi/dependencies/ratelimit.py +78 -14
- svc_infra/api/fastapi/docs/add.py +173 -0
- svc_infra/api/fastapi/docs/landing.py +4 -2
- svc_infra/api/fastapi/docs/scoped.py +62 -15
- svc_infra/api/fastapi/dual/__init__.py +12 -2
- svc_infra/api/fastapi/dual/dualize.py +1 -1
- svc_infra/api/fastapi/dual/protected.py +126 -4
- svc_infra/api/fastapi/dual/public.py +25 -0
- svc_infra/api/fastapi/dual/router.py +40 -13
- svc_infra/api/fastapi/dx.py +33 -2
- svc_infra/api/fastapi/ease.py +10 -2
- svc_infra/api/fastapi/http/concurrency.py +2 -1
- svc_infra/api/fastapi/http/conditional.py +3 -1
- svc_infra/api/fastapi/middleware/debug.py +4 -1
- svc_infra/api/fastapi/middleware/errors/catchall.py +6 -2
- svc_infra/api/fastapi/middleware/errors/exceptions.py +1 -1
- svc_infra/api/fastapi/middleware/errors/handlers.py +54 -8
- svc_infra/api/fastapi/middleware/graceful_shutdown.py +104 -0
- svc_infra/api/fastapi/middleware/idempotency.py +197 -70
- svc_infra/api/fastapi/middleware/idempotency_store.py +187 -0
- svc_infra/api/fastapi/middleware/optimistic_lock.py +42 -0
- svc_infra/api/fastapi/middleware/ratelimit.py +125 -28
- svc_infra/api/fastapi/middleware/ratelimit_store.py +43 -10
- svc_infra/api/fastapi/middleware/request_id.py +27 -11
- svc_infra/api/fastapi/middleware/request_size_limit.py +3 -3
- svc_infra/api/fastapi/middleware/timeout.py +177 -0
- svc_infra/api/fastapi/openapi/apply.py +5 -3
- svc_infra/api/fastapi/openapi/conventions.py +9 -2
- svc_infra/api/fastapi/openapi/mutators.py +165 -20
- svc_infra/api/fastapi/openapi/pipeline.py +1 -1
- svc_infra/api/fastapi/openapi/security.py +3 -1
- svc_infra/api/fastapi/ops/add.py +75 -0
- svc_infra/api/fastapi/pagination.py +47 -20
- svc_infra/api/fastapi/routers/__init__.py +43 -15
- svc_infra/api/fastapi/routers/ping.py +1 -0
- svc_infra/api/fastapi/setup.py +188 -57
- svc_infra/api/fastapi/tenancy/add.py +19 -0
- svc_infra/api/fastapi/tenancy/context.py +112 -0
- svc_infra/api/fastapi/versioned.py +101 -0
- svc_infra/app/README.md +5 -5
- svc_infra/app/__init__.py +3 -1
- svc_infra/app/env.py +69 -1
- svc_infra/app/logging/add.py +9 -2
- svc_infra/app/logging/formats.py +12 -5
- svc_infra/billing/__init__.py +23 -0
- svc_infra/billing/async_service.py +147 -0
- svc_infra/billing/jobs.py +241 -0
- svc_infra/billing/models.py +177 -0
- svc_infra/billing/quotas.py +103 -0
- svc_infra/billing/schemas.py +36 -0
- svc_infra/billing/service.py +123 -0
- svc_infra/bundled_docs/README.md +5 -0
- svc_infra/bundled_docs/__init__.py +1 -0
- svc_infra/bundled_docs/getting-started.md +6 -0
- svc_infra/cache/__init__.py +9 -0
- svc_infra/cache/add.py +170 -0
- svc_infra/cache/backend.py +7 -6
- svc_infra/cache/decorators.py +81 -15
- svc_infra/cache/demo.py +2 -2
- svc_infra/cache/keys.py +24 -4
- svc_infra/cache/recache.py +26 -14
- svc_infra/cache/resources.py +14 -5
- svc_infra/cache/tags.py +19 -44
- svc_infra/cache/utils.py +3 -1
- svc_infra/cli/__init__.py +52 -8
- svc_infra/cli/__main__.py +4 -0
- svc_infra/cli/cmds/__init__.py +39 -2
- svc_infra/cli/cmds/db/nosql/mongo/mongo_cmds.py +7 -4
- svc_infra/cli/cmds/db/nosql/mongo/mongo_scaffold_cmds.py +7 -5
- svc_infra/cli/cmds/db/ops_cmds.py +270 -0
- svc_infra/cli/cmds/db/sql/alembic_cmds.py +103 -18
- svc_infra/cli/cmds/db/sql/sql_export_cmds.py +88 -0
- svc_infra/cli/cmds/db/sql/sql_scaffold_cmds.py +3 -3
- svc_infra/cli/cmds/docs/docs_cmds.py +142 -0
- svc_infra/cli/cmds/dx/__init__.py +12 -0
- svc_infra/cli/cmds/dx/dx_cmds.py +116 -0
- svc_infra/cli/cmds/health/__init__.py +179 -0
- svc_infra/cli/cmds/health/health_cmds.py +8 -0
- svc_infra/cli/cmds/help.py +4 -0
- svc_infra/cli/cmds/jobs/__init__.py +1 -0
- svc_infra/cli/cmds/jobs/jobs_cmds.py +47 -0
- svc_infra/cli/cmds/obs/obs_cmds.py +36 -15
- svc_infra/cli/cmds/sdk/__init__.py +0 -0
- svc_infra/cli/cmds/sdk/sdk_cmds.py +112 -0
- svc_infra/cli/foundation/runner.py +6 -2
- svc_infra/data/add.py +61 -0
- svc_infra/data/backup.py +58 -0
- svc_infra/data/erasure.py +45 -0
- svc_infra/data/fixtures.py +42 -0
- svc_infra/data/retention.py +61 -0
- svc_infra/db/__init__.py +15 -0
- svc_infra/db/crud_schema.py +9 -9
- svc_infra/db/inbox.py +67 -0
- svc_infra/db/nosql/__init__.py +3 -0
- svc_infra/db/nosql/core.py +30 -9
- svc_infra/db/nosql/indexes.py +3 -1
- svc_infra/db/nosql/management.py +1 -1
- svc_infra/db/nosql/mongo/README.md +13 -13
- svc_infra/db/nosql/mongo/client.py +19 -2
- svc_infra/db/nosql/mongo/settings.py +6 -2
- svc_infra/db/nosql/repository.py +35 -15
- svc_infra/db/nosql/resource.py +20 -3
- svc_infra/db/nosql/scaffold.py +9 -3
- svc_infra/db/nosql/service.py +3 -1
- svc_infra/db/nosql/types.py +6 -2
- svc_infra/db/ops.py +384 -0
- svc_infra/db/outbox.py +108 -0
- svc_infra/db/sql/apikey.py +37 -9
- svc_infra/db/sql/authref.py +9 -3
- svc_infra/db/sql/constants.py +12 -8
- svc_infra/db/sql/core.py +2 -2
- svc_infra/db/sql/management.py +11 -8
- svc_infra/db/sql/repository.py +99 -26
- svc_infra/db/sql/resource.py +5 -0
- svc_infra/db/sql/scaffold.py +6 -2
- svc_infra/db/sql/service.py +15 -5
- svc_infra/db/sql/templates/models_schemas/auth/models.py.tmpl +7 -56
- svc_infra/db/sql/templates/setup/env_async.py.tmpl +34 -12
- svc_infra/db/sql/templates/setup/env_sync.py.tmpl +29 -7
- svc_infra/db/sql/tenant.py +88 -0
- svc_infra/db/sql/uniq_hooks.py +9 -3
- svc_infra/db/sql/utils.py +138 -51
- svc_infra/db/sql/versioning.py +14 -0
- svc_infra/deploy/__init__.py +538 -0
- svc_infra/documents/__init__.py +100 -0
- svc_infra/documents/add.py +264 -0
- svc_infra/documents/ease.py +233 -0
- svc_infra/documents/models.py +114 -0
- svc_infra/documents/storage.py +264 -0
- svc_infra/dx/add.py +65 -0
- svc_infra/dx/changelog.py +74 -0
- svc_infra/dx/checks.py +68 -0
- svc_infra/exceptions.py +141 -0
- svc_infra/health/__init__.py +864 -0
- svc_infra/http/__init__.py +13 -0
- svc_infra/http/client.py +105 -0
- svc_infra/jobs/builtins/outbox_processor.py +40 -0
- svc_infra/jobs/builtins/webhook_delivery.py +95 -0
- svc_infra/jobs/easy.py +33 -0
- svc_infra/jobs/loader.py +50 -0
- svc_infra/jobs/queue.py +116 -0
- svc_infra/jobs/redis_queue.py +256 -0
- svc_infra/jobs/runner.py +79 -0
- svc_infra/jobs/scheduler.py +53 -0
- svc_infra/jobs/worker.py +40 -0
- svc_infra/loaders/__init__.py +186 -0
- svc_infra/loaders/base.py +142 -0
- svc_infra/loaders/github.py +311 -0
- svc_infra/loaders/models.py +147 -0
- svc_infra/loaders/url.py +235 -0
- svc_infra/logging/__init__.py +374 -0
- svc_infra/mcp/svc_infra_mcp.py +91 -33
- svc_infra/obs/README.md +2 -0
- svc_infra/obs/add.py +65 -9
- svc_infra/obs/cloud_dash.py +2 -1
- svc_infra/obs/grafana/dashboards/http-overview.json +45 -0
- svc_infra/obs/metrics/__init__.py +3 -4
- svc_infra/obs/metrics/asgi.py +13 -7
- svc_infra/obs/metrics/http.py +9 -5
- svc_infra/obs/metrics/sqlalchemy.py +13 -9
- svc_infra/obs/metrics.py +6 -5
- svc_infra/obs/settings.py +6 -2
- svc_infra/security/add.py +217 -0
- svc_infra/security/audit.py +92 -10
- svc_infra/security/audit_service.py +4 -3
- svc_infra/security/headers.py +15 -2
- svc_infra/security/hibp.py +14 -4
- svc_infra/security/jwt_rotation.py +74 -22
- svc_infra/security/lockout.py +11 -5
- svc_infra/security/models.py +54 -12
- svc_infra/security/oauth_models.py +73 -0
- svc_infra/security/org_invites.py +5 -3
- svc_infra/security/passwords.py +3 -1
- svc_infra/security/permissions.py +25 -2
- svc_infra/security/session.py +1 -1
- svc_infra/security/signed_cookies.py +21 -1
- svc_infra/storage/__init__.py +93 -0
- svc_infra/storage/add.py +253 -0
- svc_infra/storage/backends/__init__.py +11 -0
- svc_infra/storage/backends/local.py +339 -0
- svc_infra/storage/backends/memory.py +216 -0
- svc_infra/storage/backends/s3.py +353 -0
- svc_infra/storage/base.py +239 -0
- svc_infra/storage/easy.py +185 -0
- svc_infra/storage/settings.py +195 -0
- svc_infra/testing/__init__.py +685 -0
- svc_infra/utils.py +7 -3
- svc_infra/webhooks/__init__.py +69 -0
- svc_infra/webhooks/add.py +339 -0
- svc_infra/webhooks/encryption.py +115 -0
- svc_infra/webhooks/fastapi.py +39 -0
- svc_infra/webhooks/router.py +55 -0
- svc_infra/webhooks/service.py +70 -0
- svc_infra/webhooks/signing.py +34 -0
- svc_infra/websocket/__init__.py +79 -0
- svc_infra/websocket/add.py +140 -0
- svc_infra/websocket/client.py +282 -0
- svc_infra/websocket/config.py +69 -0
- svc_infra/websocket/easy.py +76 -0
- svc_infra/websocket/exceptions.py +61 -0
- svc_infra/websocket/manager.py +344 -0
- svc_infra/websocket/models.py +49 -0
- svc_infra-0.1.706.dist-info/LICENSE +21 -0
- svc_infra-0.1.706.dist-info/METADATA +356 -0
- svc_infra-0.1.706.dist-info/RECORD +357 -0
- svc_infra-0.1.595.dist-info/METADATA +0 -80
- svc_infra-0.1.595.dist-info/RECORD +0 -253
- {svc_infra-0.1.595.dist-info → svc_infra-0.1.706.dist-info}/WHEEL +0 -0
- {svc_infra-0.1.595.dist-info → svc_infra-0.1.706.dist-info}/entry_points.txt +0 -0
|
@@ -4,18 +4,22 @@ import os
|
|
|
4
4
|
import socket
|
|
5
5
|
import subprocess
|
|
6
6
|
from pathlib import Path
|
|
7
|
+
from typing import Any, Callable
|
|
7
8
|
from urllib.parse import urlparse
|
|
8
9
|
|
|
9
10
|
import typer
|
|
10
11
|
|
|
12
|
+
from svc_infra.obs.cloud_dash import push_dashboards_from_pkg
|
|
13
|
+
from svc_infra.utils import render_template, write
|
|
14
|
+
|
|
11
15
|
# --- NEW: load .env automatically (best-effort) ---
|
|
16
|
+
load_dotenv: Callable[..., Any] | None
|
|
12
17
|
try:
|
|
13
|
-
from dotenv import load_dotenv
|
|
18
|
+
from dotenv import load_dotenv as _real_load_dotenv
|
|
14
19
|
except Exception: # pragma: no cover
|
|
15
20
|
load_dotenv = None
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
from svc_infra.utils import render_template, write
|
|
21
|
+
else:
|
|
22
|
+
load_dotenv = _real_load_dotenv
|
|
19
23
|
|
|
20
24
|
|
|
21
25
|
def _run(cmd: list[str], *, env: dict | None = None):
|
|
@@ -25,7 +29,9 @@ def _run(cmd: list[str], *, env: dict | None = None):
|
|
|
25
29
|
def _emit_local_stack(root: Path, metrics_url: str):
|
|
26
30
|
write(
|
|
27
31
|
root / "docker-compose.yml",
|
|
28
|
-
render_template(
|
|
32
|
+
render_template(
|
|
33
|
+
"svc_infra.obs.providers.grafana.templates", "docker-compose.yml.tmpl", {}
|
|
34
|
+
),
|
|
29
35
|
)
|
|
30
36
|
p = urlparse(metrics_url)
|
|
31
37
|
prom_yml = render_template(
|
|
@@ -102,7 +108,7 @@ def up():
|
|
|
102
108
|
- Else → Local mode (Grafana + Prometheus).
|
|
103
109
|
"""
|
|
104
110
|
# NEW: load .env once, best-effort, without crashing if package missing
|
|
105
|
-
if load_dotenv:
|
|
111
|
+
if load_dotenv is not None:
|
|
106
112
|
try:
|
|
107
113
|
load_dotenv(dotenv_path=Path(".env"), override=False)
|
|
108
114
|
except Exception:
|
|
@@ -110,7 +116,9 @@ def up():
|
|
|
110
116
|
|
|
111
117
|
root = Path(".obs")
|
|
112
118
|
root.mkdir(exist_ok=True)
|
|
113
|
-
metrics_url = os.getenv(
|
|
119
|
+
metrics_url = os.getenv(
|
|
120
|
+
"SVC_INFRA_METRICS_URL", "http://host.docker.internal:8000/metrics"
|
|
121
|
+
)
|
|
114
122
|
|
|
115
123
|
cloud_url = os.getenv("GRAFANA_CLOUD_URL", "").strip()
|
|
116
124
|
cloud_token = os.getenv("GRAFANA_CLOUD_TOKEN", "").strip()
|
|
@@ -131,7 +139,14 @@ def up():
|
|
|
131
139
|
):
|
|
132
140
|
_emit_local_agent(root, metrics_url)
|
|
133
141
|
_run(
|
|
134
|
-
[
|
|
142
|
+
[
|
|
143
|
+
"docker",
|
|
144
|
+
"compose",
|
|
145
|
+
"-f",
|
|
146
|
+
str(root / "docker-compose.cloud.yml"),
|
|
147
|
+
"up",
|
|
148
|
+
"-d",
|
|
149
|
+
],
|
|
135
150
|
env=os.environ.copy(),
|
|
136
151
|
)
|
|
137
152
|
typer.echo("[cloud] local Grafana Agent started (pushing metrics to Cloud)")
|
|
@@ -146,7 +161,10 @@ def up():
|
|
|
146
161
|
env["GRAFANA_PORT"] = str(local_graf)
|
|
147
162
|
env["PROM_PORT"] = str(local_prom)
|
|
148
163
|
_emit_local_stack(root, metrics_url)
|
|
149
|
-
_run(
|
|
164
|
+
_run(
|
|
165
|
+
["docker", "compose", "-f", str(root / "docker-compose.yml"), "up", "-d"],
|
|
166
|
+
env=env,
|
|
167
|
+
)
|
|
150
168
|
typer.echo(f"Local Grafana → http://localhost:{local_graf} (admin/admin)")
|
|
151
169
|
typer.echo(f"Local Prometheus → http://localhost:{local_prom}")
|
|
152
170
|
|
|
@@ -155,11 +173,13 @@ def down():
|
|
|
155
173
|
root = Path(".obs")
|
|
156
174
|
if (root / "docker-compose.yml").exists():
|
|
157
175
|
subprocess.run(
|
|
158
|
-
["docker", "compose", "-f", str(root / "docker-compose.yml"), "down"],
|
|
176
|
+
["docker", "compose", "-f", str(root / "docker-compose.yml"), "down"],
|
|
177
|
+
check=False,
|
|
159
178
|
)
|
|
160
179
|
if (root / "docker-compose.cloud.yml").exists():
|
|
161
180
|
subprocess.run(
|
|
162
|
-
["docker", "compose", "-f", str(root / "docker-compose.cloud.yml"), "down"],
|
|
181
|
+
["docker", "compose", "-f", str(root / "docker-compose.cloud.yml"), "down"],
|
|
182
|
+
check=False,
|
|
163
183
|
)
|
|
164
184
|
typer.echo("Stopped local obs services.")
|
|
165
185
|
|
|
@@ -171,7 +191,7 @@ def scaffold(target: str = typer.Option(..., help="compose|railway|k8s|fly")):
|
|
|
171
191
|
out.mkdir(parents=True, exist_ok=True)
|
|
172
192
|
|
|
173
193
|
base = files("svc_infra.obs.templates.sidecars").joinpath(target)
|
|
174
|
-
for p in base.rglob("*"):
|
|
194
|
+
for p in base.rglob("*"): # type: ignore[attr-defined]
|
|
175
195
|
if p.is_file():
|
|
176
196
|
rel = p.relative_to(base)
|
|
177
197
|
dst = out / rel
|
|
@@ -182,6 +202,7 @@ def scaffold(target: str = typer.Option(..., help="compose|railway|k8s|fly")):
|
|
|
182
202
|
|
|
183
203
|
|
|
184
204
|
def register(app: typer.Typer) -> None:
|
|
185
|
-
app
|
|
186
|
-
app.command("
|
|
187
|
-
app.command("
|
|
205
|
+
# Attach to 'obs' group app
|
|
206
|
+
app.command("up")(up)
|
|
207
|
+
app.command("down")(down)
|
|
208
|
+
app.command("scaffold")(scaffold)
|
|
File without changes
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import subprocess
|
|
4
|
+
|
|
5
|
+
import typer
|
|
6
|
+
|
|
7
|
+
app = typer.Typer(
|
|
8
|
+
no_args_is_help=True, add_completion=False, help="Generate SDKs from OpenAPI."
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _echo(cmd: list[str]):
|
|
13
|
+
typer.echo("$ " + " ".join(cmd))
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _parse_bool(val: str | bool | None, default: bool = True) -> bool:
|
|
17
|
+
if isinstance(val, bool):
|
|
18
|
+
return val
|
|
19
|
+
if val is None:
|
|
20
|
+
return default
|
|
21
|
+
s = str(val).strip().lower()
|
|
22
|
+
if s in {"1", "true", "yes", "y"}:
|
|
23
|
+
return True
|
|
24
|
+
if s in {"0", "false", "no", "n"}:
|
|
25
|
+
return False
|
|
26
|
+
return default
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@app.command("ts")
|
|
30
|
+
def sdk_ts(
|
|
31
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
32
|
+
outdir: str = typer.Option("sdk-ts", help="Output directory"),
|
|
33
|
+
dry_run: str = typer.Option(
|
|
34
|
+
"true", help="Print commands instead of running (true/false)"
|
|
35
|
+
),
|
|
36
|
+
):
|
|
37
|
+
"""Generate a TypeScript SDK (openapi-typescript-codegen as default)."""
|
|
38
|
+
cmd = [
|
|
39
|
+
"npx",
|
|
40
|
+
"openapi-typescript-codegen",
|
|
41
|
+
"--input",
|
|
42
|
+
openapi,
|
|
43
|
+
"--output",
|
|
44
|
+
outdir,
|
|
45
|
+
]
|
|
46
|
+
if _parse_bool(dry_run, True):
|
|
47
|
+
_echo(cmd)
|
|
48
|
+
return
|
|
49
|
+
subprocess.check_call(cmd)
|
|
50
|
+
typer.secho(f"TS SDK generated → {outdir}", fg=typer.colors.GREEN)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@app.command("py")
|
|
54
|
+
def sdk_py(
|
|
55
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
56
|
+
outdir: str = typer.Option("sdk-py", help="Output directory"),
|
|
57
|
+
package_name: str = typer.Option("client_sdk", help="Python package name"),
|
|
58
|
+
dry_run: str = typer.Option(
|
|
59
|
+
"true", help="Print commands instead of running (true/false)"
|
|
60
|
+
),
|
|
61
|
+
):
|
|
62
|
+
"""Generate a Python SDK via openapi-generator-cli with "python" generator."""
|
|
63
|
+
cmd = [
|
|
64
|
+
"npx",
|
|
65
|
+
"-y",
|
|
66
|
+
"@openapitools/openapi-generator-cli",
|
|
67
|
+
"generate",
|
|
68
|
+
"-i",
|
|
69
|
+
openapi,
|
|
70
|
+
"-g",
|
|
71
|
+
"python",
|
|
72
|
+
"-o",
|
|
73
|
+
outdir,
|
|
74
|
+
"--additional-properties",
|
|
75
|
+
f"packageName={package_name}",
|
|
76
|
+
]
|
|
77
|
+
if _parse_bool(dry_run, True):
|
|
78
|
+
_echo(cmd)
|
|
79
|
+
return
|
|
80
|
+
subprocess.check_call(cmd)
|
|
81
|
+
typer.secho(f"Python SDK generated → {outdir}", fg=typer.colors.GREEN)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@app.command("postman")
|
|
85
|
+
def sdk_postman(
|
|
86
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
87
|
+
out: str = typer.Option(
|
|
88
|
+
"postman_collection.json", help="Output Postman collection"
|
|
89
|
+
),
|
|
90
|
+
dry_run: str = typer.Option(
|
|
91
|
+
"true", help="Print commands instead of running (true/false)"
|
|
92
|
+
),
|
|
93
|
+
):
|
|
94
|
+
"""Convert OpenAPI to a Postman collection via openapi-to-postmanv2."""
|
|
95
|
+
cmd = [
|
|
96
|
+
"npx",
|
|
97
|
+
"-y",
|
|
98
|
+
"openapi-to-postmanv2",
|
|
99
|
+
"-s",
|
|
100
|
+
openapi,
|
|
101
|
+
"-o",
|
|
102
|
+
out,
|
|
103
|
+
]
|
|
104
|
+
if _parse_bool(dry_run, True):
|
|
105
|
+
_echo(cmd)
|
|
106
|
+
return
|
|
107
|
+
subprocess.check_call(cmd)
|
|
108
|
+
typer.secho(f"Postman collection generated → {out}", fg=typer.colors.GREEN)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def register(root: typer.Typer):
|
|
112
|
+
root.add_typer(app, name="sdk")
|
|
@@ -25,7 +25,9 @@ def candidate_cmds(root: Path, prog: str, argv: List[str]) -> List[List[str]]:
|
|
|
25
25
|
cmds.append([prog, *argv])
|
|
26
26
|
|
|
27
27
|
py = shutil.which("python3") or shutil.which("python") or "python"
|
|
28
|
-
module =
|
|
28
|
+
module = (
|
|
29
|
+
prog.replace("-", "_") + ".cli_shim"
|
|
30
|
+
) # e.g., svc-infra -> svc_infra.cli_shim
|
|
29
31
|
cmds.append([py, "-m", module, *argv])
|
|
30
32
|
|
|
31
33
|
return cmds
|
|
@@ -54,4 +56,6 @@ async def run_from_root(root: Path, prog: str, argv: List[str]) -> str:
|
|
|
54
56
|
except Exception as e:
|
|
55
57
|
last_exc = e
|
|
56
58
|
continue
|
|
57
|
-
raise RuntimeError(
|
|
59
|
+
raise RuntimeError(
|
|
60
|
+
f"All runners failed in {root} for: {prog} {' '.join(argv)}"
|
|
61
|
+
) from last_exc
|
svc_infra/data/add.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from typing import Callable, Iterable, Optional
|
|
5
|
+
|
|
6
|
+
from fastapi import FastAPI
|
|
7
|
+
|
|
8
|
+
from svc_infra.cli.cmds.db.sql.alembic_cmds import cmd_setup_and_migrate
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def add_data_lifecycle(
|
|
12
|
+
app: FastAPI,
|
|
13
|
+
*,
|
|
14
|
+
auto_migrate: bool = True,
|
|
15
|
+
database_url: str | None = None,
|
|
16
|
+
discover_packages: Optional[list[str]] = None,
|
|
17
|
+
with_payments: bool | None = None,
|
|
18
|
+
on_load_fixtures: Optional[Callable[[], None]] = None,
|
|
19
|
+
retention_jobs: Optional[Iterable[Callable[[], None]]] = None,
|
|
20
|
+
erasure_job: Optional[Callable[[str], None]] = None,
|
|
21
|
+
) -> None:
|
|
22
|
+
"""
|
|
23
|
+
Wire data lifecycle conveniences:
|
|
24
|
+
|
|
25
|
+
- auto_migrate: run end-to-end Alembic setup-and-migrate on startup (idempotent).
|
|
26
|
+
- on_load_fixtures: optional callback to load reference/fixture data once at startup.
|
|
27
|
+
- retention_jobs: optional list of callables to register purge tasks (scheduler integration is external).
|
|
28
|
+
- erasure_job: optional callable to trigger a GDPR erasure workflow for a given principal ID.
|
|
29
|
+
|
|
30
|
+
This helper is intentionally minimal: it coordinates existing building blocks
|
|
31
|
+
and offers extension points. Jobs should be scheduled using svc_infra.jobs helpers.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
async def _run_lifecycle() -> None:
|
|
35
|
+
# Startup
|
|
36
|
+
if auto_migrate:
|
|
37
|
+
cmd_setup_and_migrate(
|
|
38
|
+
database_url=database_url,
|
|
39
|
+
overwrite_scaffold=False,
|
|
40
|
+
create_db_if_missing=True,
|
|
41
|
+
create_followup_revision=True,
|
|
42
|
+
initial_message="initial schema",
|
|
43
|
+
followup_message="autogen",
|
|
44
|
+
discover_packages=discover_packages,
|
|
45
|
+
with_payments=with_payments if with_payments is not None else False,
|
|
46
|
+
)
|
|
47
|
+
if on_load_fixtures:
|
|
48
|
+
res = on_load_fixtures()
|
|
49
|
+
if inspect.isawaitable(res):
|
|
50
|
+
await res
|
|
51
|
+
|
|
52
|
+
app.add_event_handler("startup", _run_lifecycle)
|
|
53
|
+
|
|
54
|
+
# Store optional jobs on app.state for external schedulers to discover/register.
|
|
55
|
+
if retention_jobs is not None:
|
|
56
|
+
app.state.data_retention_jobs = list(retention_jobs)
|
|
57
|
+
if erasure_job is not None:
|
|
58
|
+
app.state.data_erasure_job = erasure_job
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
__all__ = ["add_data_lifecycle"]
|
svc_infra/data/backup.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from typing import Callable, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass(frozen=True)
|
|
9
|
+
class BackupHealthReport:
|
|
10
|
+
ok: bool
|
|
11
|
+
last_success: Optional[datetime]
|
|
12
|
+
retention_days: Optional[int]
|
|
13
|
+
message: str = ""
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def verify_backups(
|
|
17
|
+
*, last_success: Optional[datetime] = None, retention_days: Optional[int] = None
|
|
18
|
+
) -> BackupHealthReport:
|
|
19
|
+
"""Return a basic backup health report.
|
|
20
|
+
|
|
21
|
+
In production, callers should plug a provider-specific checker and translate into this report.
|
|
22
|
+
"""
|
|
23
|
+
if last_success is None:
|
|
24
|
+
return BackupHealthReport(
|
|
25
|
+
ok=False,
|
|
26
|
+
last_success=None,
|
|
27
|
+
retention_days=retention_days,
|
|
28
|
+
message="no_backup_seen",
|
|
29
|
+
)
|
|
30
|
+
now = datetime.now(timezone.utc)
|
|
31
|
+
age_days = (now - last_success).total_seconds() / 86400.0
|
|
32
|
+
ok = retention_days is None or age_days <= max(1, retention_days)
|
|
33
|
+
return BackupHealthReport(
|
|
34
|
+
ok=ok, last_success=last_success, retention_days=retention_days
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
__all__ = ["BackupHealthReport", "verify_backups"]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def make_backup_verification_job(
|
|
42
|
+
checker: Callable[[], BackupHealthReport],
|
|
43
|
+
*,
|
|
44
|
+
on_report: Optional[Callable[[BackupHealthReport], None]] = None,
|
|
45
|
+
):
|
|
46
|
+
"""Return a callable suitable for scheduling in a job runner.
|
|
47
|
+
|
|
48
|
+
The checker should perform provider-specific checks and return a BackupHealthReport.
|
|
49
|
+
If on_report is provided, it will be invoked with the report.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
def _job() -> BackupHealthReport:
|
|
53
|
+
rep = checker()
|
|
54
|
+
if on_report:
|
|
55
|
+
on_report(rep)
|
|
56
|
+
return rep
|
|
57
|
+
|
|
58
|
+
return _job
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any, Awaitable, Callable, Iterable, Optional, Protocol
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SqlSession(Protocol): # minimal protocol for tests/integration
|
|
8
|
+
async def execute(self, stmt: Any) -> Any:
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(frozen=True)
|
|
13
|
+
class ErasureStep:
|
|
14
|
+
name: str
|
|
15
|
+
run: Callable[[SqlSession, str], Awaitable[int] | int]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(frozen=True)
|
|
19
|
+
class ErasurePlan:
|
|
20
|
+
steps: Iterable[ErasureStep]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def run_erasure(
|
|
24
|
+
session: SqlSession,
|
|
25
|
+
principal_id: str,
|
|
26
|
+
plan: ErasurePlan,
|
|
27
|
+
*,
|
|
28
|
+
on_audit: Optional[Callable[[str, dict[str, Any]], None]] = None,
|
|
29
|
+
) -> int:
|
|
30
|
+
"""Run an erasure plan and optionally emit an audit event.
|
|
31
|
+
|
|
32
|
+
Returns total affected rows across steps.
|
|
33
|
+
"""
|
|
34
|
+
total = 0
|
|
35
|
+
for s in plan.steps:
|
|
36
|
+
res = s.run(session, principal_id)
|
|
37
|
+
if hasattr(res, "__await__"):
|
|
38
|
+
res = await res
|
|
39
|
+
total += int(res or 0)
|
|
40
|
+
if on_audit:
|
|
41
|
+
on_audit("erasure.completed", {"principal_id": principal_id, "affected": total})
|
|
42
|
+
return total
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
__all__ = ["ErasureStep", "ErasurePlan", "run_erasure"]
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Awaitable, Callable, Iterable, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def run_fixtures(
|
|
9
|
+
loaders: Iterable[Callable[[], None | Awaitable[None]]],
|
|
10
|
+
*,
|
|
11
|
+
run_once_file: Optional[str] = None,
|
|
12
|
+
) -> None:
|
|
13
|
+
"""Run a sequence of fixture loaders (sync or async).
|
|
14
|
+
|
|
15
|
+
- If run_once_file is provided and exists, does nothing.
|
|
16
|
+
- On success, creates the run_once_file sentinel (parent dirs included).
|
|
17
|
+
"""
|
|
18
|
+
if run_once_file:
|
|
19
|
+
sentinel = Path(run_once_file)
|
|
20
|
+
if sentinel.exists():
|
|
21
|
+
return
|
|
22
|
+
for fn in loaders:
|
|
23
|
+
res = fn()
|
|
24
|
+
if inspect.isawaitable(res):
|
|
25
|
+
await res
|
|
26
|
+
if run_once_file:
|
|
27
|
+
sentinel.parent.mkdir(parents=True, exist_ok=True)
|
|
28
|
+
Path(run_once_file).write_text("ok")
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def make_on_load_fixtures(
|
|
32
|
+
*loaders: Callable[[], None | Awaitable[None]], run_once_file: Optional[str] = None
|
|
33
|
+
) -> Callable[[], Awaitable[None]]:
|
|
34
|
+
"""Return an async callable suitable for add_data_lifecycle(on_load_fixtures=...)."""
|
|
35
|
+
|
|
36
|
+
async def _runner() -> None:
|
|
37
|
+
await run_fixtures(loaders, run_once_file=run_once_file)
|
|
38
|
+
|
|
39
|
+
return _runner
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
__all__ = ["run_fixtures", "make_on_load_fixtures"]
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
|
+
from typing import Any, Iterable, Optional, Protocol, Sequence
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SqlSession(Protocol): # minimal protocol for tests/integration
|
|
9
|
+
async def execute(self, stmt: Any) -> Any:
|
|
10
|
+
pass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class RetentionPolicy:
|
|
15
|
+
name: str
|
|
16
|
+
model: Any # SQLAlchemy model or test double exposing columns
|
|
17
|
+
older_than_days: int
|
|
18
|
+
soft_delete_field: Optional[str] = "deleted_at"
|
|
19
|
+
extra_where: Optional[Sequence[Any]] = None
|
|
20
|
+
hard_delete: bool = False
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def purge_policy(session: SqlSession, policy: RetentionPolicy) -> int:
|
|
24
|
+
"""Execute a single retention purge according to policy.
|
|
25
|
+
|
|
26
|
+
If hard_delete is False and soft_delete_field exists on model, set timestamp; else DELETE.
|
|
27
|
+
Returns number of affected rows (best-effort; test doubles may return an int directly).
|
|
28
|
+
"""
|
|
29
|
+
cutoff = datetime.now(timezone.utc) - timedelta(days=policy.older_than_days)
|
|
30
|
+
m = policy.model
|
|
31
|
+
where = list(policy.extra_where or [])
|
|
32
|
+
created_col = getattr(m, "created_at", None)
|
|
33
|
+
if created_col is not None and hasattr(created_col, "__le__"):
|
|
34
|
+
where.append(created_col <= cutoff)
|
|
35
|
+
|
|
36
|
+
# Soft-delete path when available and requested
|
|
37
|
+
if (
|
|
38
|
+
not policy.hard_delete
|
|
39
|
+
and policy.soft_delete_field
|
|
40
|
+
and hasattr(m, policy.soft_delete_field)
|
|
41
|
+
):
|
|
42
|
+
stmt = m.update().where(*where).values({policy.soft_delete_field: cutoff})
|
|
43
|
+
res = await session.execute(stmt)
|
|
44
|
+
return getattr(res, "rowcount", 0)
|
|
45
|
+
|
|
46
|
+
# Hard delete fallback
|
|
47
|
+
stmt = m.delete().where(*where)
|
|
48
|
+
res = await session.execute(stmt)
|
|
49
|
+
return getattr(res, "rowcount", 0)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
async def run_retention_purge(
|
|
53
|
+
session: SqlSession, policies: Iterable[RetentionPolicy]
|
|
54
|
+
) -> int:
|
|
55
|
+
total = 0
|
|
56
|
+
for p in policies:
|
|
57
|
+
total += await purge_policy(session, p)
|
|
58
|
+
return total
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
__all__ = ["RetentionPolicy", "purge_policy", "run_retention_purge"]
|
svc_infra/db/__init__.py
CHANGED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from svc_infra.db.ops import (
|
|
2
|
+
drop_table_safe,
|
|
3
|
+
get_database_url,
|
|
4
|
+
kill_blocking_queries,
|
|
5
|
+
run_sync_sql,
|
|
6
|
+
wait_for_database,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"drop_table_safe",
|
|
11
|
+
"get_database_url",
|
|
12
|
+
"kill_blocking_queries",
|
|
13
|
+
"run_sync_sql",
|
|
14
|
+
"wait_for_database",
|
|
15
|
+
]
|
svc_infra/db/crud_schema.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
-
from typing import Any, Optional, Sequence
|
|
4
|
+
from typing import Any, Optional, Sequence, cast
|
|
5
5
|
|
|
6
6
|
from pydantic import BaseModel, ConfigDict, create_model
|
|
7
7
|
|
|
@@ -27,9 +27,9 @@ class FieldSpec:
|
|
|
27
27
|
exclude_from_update: bool = False
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
def _opt(t: type[Any]) -> tuple[
|
|
30
|
+
def _opt(t: type[Any]) -> tuple[Any, Any]:
|
|
31
31
|
# convenience: Optional[t] with default None
|
|
32
|
-
return (t | None, None)
|
|
32
|
+
return (t | None, None)
|
|
33
33
|
|
|
34
34
|
|
|
35
35
|
def make_crud_schemas_from_specs(
|
|
@@ -40,9 +40,9 @@ def make_crud_schemas_from_specs(
|
|
|
40
40
|
update_name: Optional[str],
|
|
41
41
|
json_encoders: Optional[dict[type[Any], Any]] = None,
|
|
42
42
|
) -> tuple[type[BaseModel], type[BaseModel], type[BaseModel]]:
|
|
43
|
-
ann_read: dict[str, tuple[
|
|
44
|
-
ann_create: dict[str, tuple[
|
|
45
|
-
ann_update: dict[str, tuple[
|
|
43
|
+
ann_read: dict[str, tuple[Any, Any]] = {}
|
|
44
|
+
ann_create: dict[str, tuple[Any, Any]] = {}
|
|
45
|
+
ann_update: dict[str, tuple[Any, Any]] = {}
|
|
46
46
|
|
|
47
47
|
for s in specs:
|
|
48
48
|
# READ: include unless excluded; all fields Optional
|
|
@@ -60,9 +60,9 @@ def make_crud_schemas_from_specs(
|
|
|
60
60
|
if not s.exclude_from_update:
|
|
61
61
|
ann_update[s.name] = _opt(s.typ)
|
|
62
62
|
|
|
63
|
-
Read = create_model(read_name or "Read", **ann_read)
|
|
64
|
-
Create = create_model(create_name or "Create", **ann_create)
|
|
65
|
-
Update = create_model(update_name or "Update", **ann_update)
|
|
63
|
+
Read = create_model(read_name or "Read", **cast(dict[str, Any], ann_read))
|
|
64
|
+
Create = create_model(create_name or "Create", **cast(dict[str, Any], ann_create))
|
|
65
|
+
Update = create_model(update_name or "Update", **cast(dict[str, Any], ann_update))
|
|
66
66
|
|
|
67
67
|
cfg = ConfigDict(from_attributes=True)
|
|
68
68
|
if json_encoders:
|
svc_infra/db/inbox.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from typing import Protocol
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class InboxStore(Protocol):
|
|
8
|
+
def mark_if_new(self, key: str, ttl_seconds: int = 24 * 3600) -> bool:
|
|
9
|
+
"""Mark key as processed if not seen; return True if newly marked, False if duplicate."""
|
|
10
|
+
...
|
|
11
|
+
|
|
12
|
+
def purge_expired(self) -> int:
|
|
13
|
+
"""Optional: remove expired keys, return number purged."""
|
|
14
|
+
...
|
|
15
|
+
|
|
16
|
+
def is_marked(self, key: str) -> bool:
|
|
17
|
+
"""Return True if key is already marked (not expired), without modifying it."""
|
|
18
|
+
...
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class InMemoryInboxStore:
|
|
22
|
+
def __init__(self) -> None:
|
|
23
|
+
self._keys: dict[str, float] = {}
|
|
24
|
+
|
|
25
|
+
def mark_if_new(self, key: str, ttl_seconds: int = 24 * 3600) -> bool:
|
|
26
|
+
now = time.time()
|
|
27
|
+
exp = self._keys.get(key)
|
|
28
|
+
if exp and exp > now:
|
|
29
|
+
return False
|
|
30
|
+
self._keys[key] = now + ttl_seconds
|
|
31
|
+
return True
|
|
32
|
+
|
|
33
|
+
def purge_expired(self) -> int:
|
|
34
|
+
now = time.time()
|
|
35
|
+
to_del = [k for k, e in self._keys.items() if e <= now]
|
|
36
|
+
for k in to_del:
|
|
37
|
+
self._keys.pop(k, None)
|
|
38
|
+
return len(to_del)
|
|
39
|
+
|
|
40
|
+
def is_marked(self, key: str) -> bool:
|
|
41
|
+
now = time.time()
|
|
42
|
+
exp = self._keys.get(key)
|
|
43
|
+
return bool(exp and exp > now)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class SqlInboxStore:
|
|
47
|
+
"""Skeleton for a SQL-backed inbox store (dedupe table).
|
|
48
|
+
|
|
49
|
+
Implementations should:
|
|
50
|
+
- INSERT key with expires_at if not exists (unique constraint)
|
|
51
|
+
- Return False on duplicate key violations
|
|
52
|
+
- Periodically DELETE expired rows
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def __init__(self, session_factory):
|
|
56
|
+
self._session_factory = session_factory
|
|
57
|
+
|
|
58
|
+
def mark_if_new(
|
|
59
|
+
self, key: str, ttl_seconds: int = 24 * 3600
|
|
60
|
+
) -> bool: # pragma: no cover - skeleton
|
|
61
|
+
raise NotImplementedError
|
|
62
|
+
|
|
63
|
+
def purge_expired(self) -> int: # pragma: no cover - skeleton
|
|
64
|
+
raise NotImplementedError
|
|
65
|
+
|
|
66
|
+
def is_marked(self, key: str) -> bool: # pragma: no cover - skeleton
|
|
67
|
+
raise NotImplementedError
|