svc-infra 0.1.506__py3-none-any.whl → 0.1.654__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- svc_infra/apf_payments/README.md +732 -0
- svc_infra/apf_payments/alembic.py +11 -0
- svc_infra/apf_payments/models.py +339 -0
- svc_infra/apf_payments/provider/__init__.py +4 -0
- svc_infra/apf_payments/provider/aiydan.py +797 -0
- svc_infra/apf_payments/provider/base.py +270 -0
- svc_infra/apf_payments/provider/registry.py +31 -0
- svc_infra/apf_payments/provider/stripe.py +873 -0
- svc_infra/apf_payments/schemas.py +333 -0
- svc_infra/apf_payments/service.py +892 -0
- svc_infra/apf_payments/settings.py +67 -0
- svc_infra/api/fastapi/__init__.py +6 -0
- svc_infra/api/fastapi/admin/__init__.py +3 -0
- svc_infra/api/fastapi/admin/add.py +231 -0
- svc_infra/api/fastapi/apf_payments/__init__.py +0 -0
- svc_infra/api/fastapi/apf_payments/router.py +1082 -0
- svc_infra/api/fastapi/apf_payments/setup.py +73 -0
- svc_infra/api/fastapi/auth/add.py +15 -6
- svc_infra/api/fastapi/auth/gaurd.py +67 -5
- svc_infra/api/fastapi/auth/mfa/router.py +18 -9
- svc_infra/api/fastapi/auth/routers/account.py +3 -2
- svc_infra/api/fastapi/auth/routers/apikey_router.py +11 -5
- svc_infra/api/fastapi/auth/routers/oauth_router.py +82 -37
- svc_infra/api/fastapi/auth/routers/session_router.py +63 -0
- svc_infra/api/fastapi/auth/security.py +3 -1
- svc_infra/api/fastapi/auth/settings.py +2 -0
- svc_infra/api/fastapi/auth/state.py +1 -1
- svc_infra/api/fastapi/billing/router.py +64 -0
- svc_infra/api/fastapi/billing/setup.py +19 -0
- svc_infra/api/fastapi/cache/add.py +9 -5
- svc_infra/api/fastapi/db/nosql/mongo/add.py +33 -27
- svc_infra/api/fastapi/db/sql/add.py +40 -18
- svc_infra/api/fastapi/db/sql/crud_router.py +176 -14
- svc_infra/api/fastapi/db/sql/session.py +16 -0
- svc_infra/api/fastapi/db/sql/users.py +14 -2
- svc_infra/api/fastapi/dependencies/ratelimit.py +116 -0
- svc_infra/api/fastapi/docs/add.py +160 -0
- svc_infra/api/fastapi/docs/landing.py +1 -1
- svc_infra/api/fastapi/docs/scoped.py +254 -0
- svc_infra/api/fastapi/dual/dualize.py +38 -33
- svc_infra/api/fastapi/dual/router.py +48 -1
- svc_infra/api/fastapi/dx.py +3 -3
- svc_infra/api/fastapi/http/__init__.py +0 -0
- svc_infra/api/fastapi/http/concurrency.py +14 -0
- svc_infra/api/fastapi/http/conditional.py +33 -0
- svc_infra/api/fastapi/http/deprecation.py +21 -0
- svc_infra/api/fastapi/middleware/errors/handlers.py +45 -7
- svc_infra/api/fastapi/middleware/graceful_shutdown.py +87 -0
- svc_infra/api/fastapi/middleware/idempotency.py +116 -0
- svc_infra/api/fastapi/middleware/idempotency_store.py +187 -0
- svc_infra/api/fastapi/middleware/optimistic_lock.py +37 -0
- svc_infra/api/fastapi/middleware/ratelimit.py +119 -0
- svc_infra/api/fastapi/middleware/ratelimit_store.py +84 -0
- svc_infra/api/fastapi/middleware/request_id.py +23 -0
- svc_infra/api/fastapi/middleware/request_size_limit.py +36 -0
- svc_infra/api/fastapi/middleware/timeout.py +148 -0
- svc_infra/api/fastapi/openapi/mutators.py +768 -55
- svc_infra/api/fastapi/ops/add.py +73 -0
- svc_infra/api/fastapi/pagination.py +363 -0
- svc_infra/api/fastapi/paths/auth.py +14 -14
- svc_infra/api/fastapi/paths/prefix.py +0 -1
- svc_infra/api/fastapi/paths/user.py +1 -1
- svc_infra/api/fastapi/routers/ping.py +1 -0
- svc_infra/api/fastapi/setup.py +48 -15
- svc_infra/api/fastapi/tenancy/add.py +19 -0
- svc_infra/api/fastapi/tenancy/context.py +112 -0
- svc_infra/api/fastapi/versioned.py +101 -0
- svc_infra/app/README.md +5 -5
- svc_infra/billing/__init__.py +23 -0
- svc_infra/billing/async_service.py +147 -0
- svc_infra/billing/jobs.py +230 -0
- svc_infra/billing/models.py +131 -0
- svc_infra/billing/quotas.py +101 -0
- svc_infra/billing/schemas.py +33 -0
- svc_infra/billing/service.py +115 -0
- svc_infra/bundled_docs/README.md +5 -0
- svc_infra/bundled_docs/__init__.py +1 -0
- svc_infra/bundled_docs/getting-started.md +6 -0
- svc_infra/cache/__init__.py +4 -0
- svc_infra/cache/add.py +158 -0
- svc_infra/cache/backend.py +5 -2
- svc_infra/cache/decorators.py +19 -1
- svc_infra/cache/keys.py +24 -4
- svc_infra/cli/__init__.py +32 -8
- svc_infra/cli/__main__.py +4 -0
- svc_infra/cli/cmds/__init__.py +10 -0
- svc_infra/cli/cmds/db/nosql/mongo/mongo_cmds.py +4 -3
- svc_infra/cli/cmds/db/nosql/mongo/mongo_scaffold_cmds.py +4 -4
- svc_infra/cli/cmds/db/sql/alembic_cmds.py +120 -14
- svc_infra/cli/cmds/db/sql/sql_export_cmds.py +80 -0
- svc_infra/cli/cmds/db/sql/sql_scaffold_cmds.py +5 -4
- svc_infra/cli/cmds/docs/docs_cmds.py +140 -0
- svc_infra/cli/cmds/dx/__init__.py +12 -0
- svc_infra/cli/cmds/dx/dx_cmds.py +99 -0
- svc_infra/cli/cmds/help.py +4 -0
- svc_infra/cli/cmds/jobs/__init__.py +1 -0
- svc_infra/cli/cmds/jobs/jobs_cmds.py +43 -0
- svc_infra/cli/cmds/obs/obs_cmds.py +4 -3
- svc_infra/cli/cmds/sdk/__init__.py +0 -0
- svc_infra/cli/cmds/sdk/sdk_cmds.py +102 -0
- svc_infra/data/add.py +61 -0
- svc_infra/data/backup.py +53 -0
- svc_infra/data/erasure.py +45 -0
- svc_infra/data/fixtures.py +40 -0
- svc_infra/data/retention.py +55 -0
- svc_infra/db/inbox.py +67 -0
- svc_infra/db/nosql/mongo/README.md +13 -13
- svc_infra/db/outbox.py +104 -0
- svc_infra/db/sql/apikey.py +1 -1
- svc_infra/db/sql/authref.py +61 -0
- svc_infra/db/sql/core.py +2 -2
- svc_infra/db/sql/repository.py +52 -12
- svc_infra/db/sql/resource.py +5 -0
- svc_infra/db/sql/scaffold.py +16 -4
- svc_infra/db/sql/templates/models_schemas/auth/schemas.py.tmpl +1 -1
- svc_infra/db/sql/templates/setup/env_async.py.tmpl +199 -76
- svc_infra/db/sql/templates/setup/env_sync.py.tmpl +231 -79
- svc_infra/db/sql/tenant.py +79 -0
- svc_infra/db/sql/utils.py +18 -4
- svc_infra/db/sql/versioning.py +14 -0
- svc_infra/docs/acceptance-matrix.md +71 -0
- svc_infra/docs/acceptance.md +44 -0
- svc_infra/docs/admin.md +425 -0
- svc_infra/docs/adr/0002-background-jobs-and-scheduling.md +40 -0
- svc_infra/docs/adr/0003-webhooks-framework.md +24 -0
- svc_infra/docs/adr/0004-tenancy-model.md +42 -0
- svc_infra/docs/adr/0005-data-lifecycle.md +86 -0
- svc_infra/docs/adr/0006-ops-slos-and-metrics.md +47 -0
- svc_infra/docs/adr/0007-docs-and-sdks.md +83 -0
- svc_infra/docs/adr/0008-billing-primitives.md +143 -0
- svc_infra/docs/adr/0009-acceptance-harness.md +40 -0
- svc_infra/docs/adr/0010-timeouts-and-resource-limits.md +54 -0
- svc_infra/docs/adr/0011-admin-scope-and-impersonation.md +73 -0
- svc_infra/docs/api.md +59 -0
- svc_infra/docs/auth.md +11 -0
- svc_infra/docs/billing.md +190 -0
- svc_infra/docs/cache.md +76 -0
- svc_infra/docs/cli.md +74 -0
- svc_infra/docs/contributing.md +34 -0
- svc_infra/docs/data-lifecycle.md +52 -0
- svc_infra/docs/database.md +14 -0
- svc_infra/docs/docs-and-sdks.md +62 -0
- svc_infra/docs/environment.md +114 -0
- svc_infra/docs/getting-started.md +63 -0
- svc_infra/docs/idempotency.md +111 -0
- svc_infra/docs/jobs.md +67 -0
- svc_infra/docs/observability.md +16 -0
- svc_infra/docs/ops.md +37 -0
- svc_infra/docs/rate-limiting.md +125 -0
- svc_infra/docs/repo-review.md +48 -0
- svc_infra/docs/security.md +176 -0
- svc_infra/docs/tenancy.md +35 -0
- svc_infra/docs/timeouts-and-resource-limits.md +147 -0
- svc_infra/docs/versioned-integrations.md +146 -0
- svc_infra/docs/webhooks.md +112 -0
- svc_infra/dx/add.py +63 -0
- svc_infra/dx/changelog.py +74 -0
- svc_infra/dx/checks.py +67 -0
- svc_infra/http/__init__.py +13 -0
- svc_infra/http/client.py +72 -0
- svc_infra/jobs/builtins/outbox_processor.py +38 -0
- svc_infra/jobs/builtins/webhook_delivery.py +90 -0
- svc_infra/jobs/easy.py +32 -0
- svc_infra/jobs/loader.py +45 -0
- svc_infra/jobs/queue.py +81 -0
- svc_infra/jobs/redis_queue.py +191 -0
- svc_infra/jobs/runner.py +75 -0
- svc_infra/jobs/scheduler.py +41 -0
- svc_infra/jobs/worker.py +40 -0
- svc_infra/mcp/svc_infra_mcp.py +85 -28
- svc_infra/obs/README.md +2 -0
- svc_infra/obs/add.py +54 -7
- svc_infra/obs/grafana/dashboards/http-overview.json +45 -0
- svc_infra/obs/metrics/__init__.py +53 -0
- svc_infra/obs/metrics.py +52 -0
- svc_infra/security/add.py +201 -0
- svc_infra/security/audit.py +130 -0
- svc_infra/security/audit_service.py +73 -0
- svc_infra/security/headers.py +52 -0
- svc_infra/security/hibp.py +95 -0
- svc_infra/security/jwt_rotation.py +53 -0
- svc_infra/security/lockout.py +96 -0
- svc_infra/security/models.py +255 -0
- svc_infra/security/org_invites.py +128 -0
- svc_infra/security/passwords.py +77 -0
- svc_infra/security/permissions.py +149 -0
- svc_infra/security/session.py +98 -0
- svc_infra/security/signed_cookies.py +80 -0
- svc_infra/webhooks/__init__.py +16 -0
- svc_infra/webhooks/add.py +322 -0
- svc_infra/webhooks/fastapi.py +37 -0
- svc_infra/webhooks/router.py +55 -0
- svc_infra/webhooks/service.py +67 -0
- svc_infra/webhooks/signing.py +30 -0
- svc_infra-0.1.654.dist-info/METADATA +154 -0
- svc_infra-0.1.654.dist-info/RECORD +352 -0
- svc_infra/api/fastapi/deps.py +0 -3
- svc_infra-0.1.506.dist-info/METADATA +0 -78
- svc_infra-0.1.506.dist-info/RECORD +0 -213
- /svc_infra/{api/fastapi/schemas → apf_payments}/__init__.py +0 -0
- {svc_infra-0.1.506.dist-info → svc_infra-0.1.654.dist-info}/WHEEL +0 -0
- {svc_infra-0.1.506.dist-info → svc_infra-0.1.654.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
|
|
8
|
+
from svc_infra.jobs.easy import easy_jobs
|
|
9
|
+
from svc_infra.jobs.loader import schedule_from_env
|
|
10
|
+
from svc_infra.jobs.worker import process_one
|
|
11
|
+
|
|
12
|
+
app = typer.Typer(help="Background jobs and scheduler commands")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@app.command("run")
|
|
16
|
+
def run(
|
|
17
|
+
poll_interval: float = typer.Option(0.5, help="Sleep seconds between loops when idle"),
|
|
18
|
+
max_loops: Optional[int] = typer.Option(None, help="Max loops before exit (for tests)"),
|
|
19
|
+
):
|
|
20
|
+
"""Run scheduler ticks and process jobs in a simple loop."""
|
|
21
|
+
|
|
22
|
+
queue, scheduler = easy_jobs()
|
|
23
|
+
# load schedule from env JSON if provided
|
|
24
|
+
schedule_from_env(scheduler)
|
|
25
|
+
|
|
26
|
+
async def _loop():
|
|
27
|
+
loops = 0
|
|
28
|
+
while True:
|
|
29
|
+
await scheduler.tick()
|
|
30
|
+
processed = await process_one(queue, _noop_handler)
|
|
31
|
+
if not processed:
|
|
32
|
+
# idle
|
|
33
|
+
await asyncio.sleep(poll_interval)
|
|
34
|
+
if max_loops is not None:
|
|
35
|
+
loops += 1
|
|
36
|
+
if loops >= max_loops:
|
|
37
|
+
break
|
|
38
|
+
|
|
39
|
+
async def _noop_handler(job):
|
|
40
|
+
# Default handler does nothing; users should write their own runners
|
|
41
|
+
return None
|
|
42
|
+
|
|
43
|
+
asyncio.run(_loop())
|
|
@@ -182,6 +182,7 @@ def scaffold(target: str = typer.Option(..., help="compose|railway|k8s|fly")):
|
|
|
182
182
|
|
|
183
183
|
|
|
184
184
|
def register(app: typer.Typer) -> None:
|
|
185
|
-
app
|
|
186
|
-
app.command("
|
|
187
|
-
app.command("
|
|
185
|
+
# Attach to 'obs' group app
|
|
186
|
+
app.command("up")(up)
|
|
187
|
+
app.command("down")(down)
|
|
188
|
+
app.command("scaffold")(scaffold)
|
|
File without changes
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import subprocess
|
|
4
|
+
|
|
5
|
+
import typer
|
|
6
|
+
|
|
7
|
+
app = typer.Typer(no_args_is_help=True, add_completion=False, help="Generate SDKs from OpenAPI.")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _echo(cmd: list[str]):
|
|
11
|
+
typer.echo("$ " + " ".join(cmd))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _parse_bool(val: str | bool | None, default: bool = True) -> bool:
|
|
15
|
+
if isinstance(val, bool):
|
|
16
|
+
return val
|
|
17
|
+
if val is None:
|
|
18
|
+
return default
|
|
19
|
+
s = str(val).strip().lower()
|
|
20
|
+
if s in {"1", "true", "yes", "y"}:
|
|
21
|
+
return True
|
|
22
|
+
if s in {"0", "false", "no", "n"}:
|
|
23
|
+
return False
|
|
24
|
+
return default
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@app.command("ts")
|
|
28
|
+
def sdk_ts(
|
|
29
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
30
|
+
outdir: str = typer.Option("sdk-ts", help="Output directory"),
|
|
31
|
+
dry_run: str = typer.Option("true", help="Print commands instead of running (true/false)"),
|
|
32
|
+
):
|
|
33
|
+
"""Generate a TypeScript SDK (openapi-typescript-codegen as default)."""
|
|
34
|
+
cmd = [
|
|
35
|
+
"npx",
|
|
36
|
+
"openapi-typescript-codegen",
|
|
37
|
+
"--input",
|
|
38
|
+
openapi,
|
|
39
|
+
"--output",
|
|
40
|
+
outdir,
|
|
41
|
+
]
|
|
42
|
+
if _parse_bool(dry_run, True):
|
|
43
|
+
_echo(cmd)
|
|
44
|
+
return
|
|
45
|
+
subprocess.check_call(cmd)
|
|
46
|
+
typer.secho(f"TS SDK generated → {outdir}", fg=typer.colors.GREEN)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@app.command("py")
|
|
50
|
+
def sdk_py(
|
|
51
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
52
|
+
outdir: str = typer.Option("sdk-py", help="Output directory"),
|
|
53
|
+
package_name: str = typer.Option("client_sdk", help="Python package name"),
|
|
54
|
+
dry_run: str = typer.Option("true", help="Print commands instead of running (true/false)"),
|
|
55
|
+
):
|
|
56
|
+
"""Generate a Python SDK via openapi-generator-cli with "python" generator."""
|
|
57
|
+
cmd = [
|
|
58
|
+
"npx",
|
|
59
|
+
"-y",
|
|
60
|
+
"@openapitools/openapi-generator-cli",
|
|
61
|
+
"generate",
|
|
62
|
+
"-i",
|
|
63
|
+
openapi,
|
|
64
|
+
"-g",
|
|
65
|
+
"python",
|
|
66
|
+
"-o",
|
|
67
|
+
outdir,
|
|
68
|
+
"--additional-properties",
|
|
69
|
+
f"packageName={package_name}",
|
|
70
|
+
]
|
|
71
|
+
if _parse_bool(dry_run, True):
|
|
72
|
+
_echo(cmd)
|
|
73
|
+
return
|
|
74
|
+
subprocess.check_call(cmd)
|
|
75
|
+
typer.secho(f"Python SDK generated → {outdir}", fg=typer.colors.GREEN)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@app.command("postman")
|
|
79
|
+
def sdk_postman(
|
|
80
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
81
|
+
out: str = typer.Option("postman_collection.json", help="Output Postman collection"),
|
|
82
|
+
dry_run: str = typer.Option("true", help="Print commands instead of running (true/false)"),
|
|
83
|
+
):
|
|
84
|
+
"""Convert OpenAPI to a Postman collection via openapi-to-postmanv2."""
|
|
85
|
+
cmd = [
|
|
86
|
+
"npx",
|
|
87
|
+
"-y",
|
|
88
|
+
"openapi-to-postmanv2",
|
|
89
|
+
"-s",
|
|
90
|
+
openapi,
|
|
91
|
+
"-o",
|
|
92
|
+
out,
|
|
93
|
+
]
|
|
94
|
+
if _parse_bool(dry_run, True):
|
|
95
|
+
_echo(cmd)
|
|
96
|
+
return
|
|
97
|
+
subprocess.check_call(cmd)
|
|
98
|
+
typer.secho(f"Postman collection generated → {out}", fg=typer.colors.GREEN)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def register(root: typer.Typer):
|
|
102
|
+
root.add_typer(app, name="sdk")
|
svc_infra/data/add.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from typing import Callable, Iterable, Optional
|
|
5
|
+
|
|
6
|
+
from fastapi import FastAPI
|
|
7
|
+
|
|
8
|
+
from svc_infra.cli.cmds.db.sql.alembic_cmds import cmd_setup_and_migrate
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def add_data_lifecycle(
|
|
12
|
+
app: FastAPI,
|
|
13
|
+
*,
|
|
14
|
+
auto_migrate: bool = True,
|
|
15
|
+
database_url: str | None = None,
|
|
16
|
+
discover_packages: Optional[list[str]] = None,
|
|
17
|
+
with_payments: bool | None = None,
|
|
18
|
+
on_load_fixtures: Optional[Callable[[], None]] = None,
|
|
19
|
+
retention_jobs: Optional[Iterable[Callable[[], None]]] = None,
|
|
20
|
+
erasure_job: Optional[Callable[[str], None]] = None,
|
|
21
|
+
) -> None:
|
|
22
|
+
"""
|
|
23
|
+
Wire data lifecycle conveniences:
|
|
24
|
+
|
|
25
|
+
- auto_migrate: run end-to-end Alembic setup-and-migrate on startup (idempotent).
|
|
26
|
+
- on_load_fixtures: optional callback to load reference/fixture data once at startup.
|
|
27
|
+
- retention_jobs: optional list of callables to register purge tasks (scheduler integration is external).
|
|
28
|
+
- erasure_job: optional callable to trigger a GDPR erasure workflow for a given principal ID.
|
|
29
|
+
|
|
30
|
+
This helper is intentionally minimal: it coordinates existing building blocks
|
|
31
|
+
and offers extension points. Jobs should be scheduled using svc_infra.jobs helpers.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
async def _run_lifecycle() -> None:
|
|
35
|
+
# Startup
|
|
36
|
+
if auto_migrate:
|
|
37
|
+
cmd_setup_and_migrate(
|
|
38
|
+
database_url=database_url,
|
|
39
|
+
overwrite_scaffold=False,
|
|
40
|
+
create_db_if_missing=True,
|
|
41
|
+
create_followup_revision=True,
|
|
42
|
+
initial_message="initial schema",
|
|
43
|
+
followup_message="autogen",
|
|
44
|
+
discover_packages=discover_packages,
|
|
45
|
+
with_payments=with_payments,
|
|
46
|
+
)
|
|
47
|
+
if on_load_fixtures:
|
|
48
|
+
res = on_load_fixtures()
|
|
49
|
+
if inspect.isawaitable(res):
|
|
50
|
+
await res # type: ignore[misc]
|
|
51
|
+
|
|
52
|
+
app.add_event_handler("startup", _run_lifecycle)
|
|
53
|
+
|
|
54
|
+
# Store optional jobs on app.state for external schedulers to discover/register.
|
|
55
|
+
if retention_jobs is not None:
|
|
56
|
+
app.state.data_retention_jobs = list(retention_jobs)
|
|
57
|
+
if erasure_job is not None:
|
|
58
|
+
app.state.data_erasure_job = erasure_job
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
__all__ = ["add_data_lifecycle"]
|
svc_infra/data/backup.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from typing import Callable, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass(frozen=True)
|
|
9
|
+
class BackupHealthReport:
|
|
10
|
+
ok: bool
|
|
11
|
+
last_success: Optional[datetime]
|
|
12
|
+
retention_days: Optional[int]
|
|
13
|
+
message: str = ""
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def verify_backups(
|
|
17
|
+
*, last_success: Optional[datetime] = None, retention_days: Optional[int] = None
|
|
18
|
+
) -> BackupHealthReport:
|
|
19
|
+
"""Return a basic backup health report.
|
|
20
|
+
|
|
21
|
+
In production, callers should plug a provider-specific checker and translate into this report.
|
|
22
|
+
"""
|
|
23
|
+
if last_success is None:
|
|
24
|
+
return BackupHealthReport(
|
|
25
|
+
ok=False, last_success=None, retention_days=retention_days, message="no_backup_seen"
|
|
26
|
+
)
|
|
27
|
+
now = datetime.now(timezone.utc)
|
|
28
|
+
age_days = (now - last_success).total_seconds() / 86400.0
|
|
29
|
+
ok = retention_days is None or age_days <= max(1, retention_days)
|
|
30
|
+
return BackupHealthReport(ok=ok, last_success=last_success, retention_days=retention_days)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
__all__ = ["BackupHealthReport", "verify_backups"]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def make_backup_verification_job(
|
|
37
|
+
checker: Callable[[], BackupHealthReport],
|
|
38
|
+
*,
|
|
39
|
+
on_report: Optional[Callable[[BackupHealthReport], None]] = None,
|
|
40
|
+
):
|
|
41
|
+
"""Return a callable suitable for scheduling in a job runner.
|
|
42
|
+
|
|
43
|
+
The checker should perform provider-specific checks and return a BackupHealthReport.
|
|
44
|
+
If on_report is provided, it will be invoked with the report.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def _job() -> BackupHealthReport:
|
|
48
|
+
rep = checker()
|
|
49
|
+
if on_report:
|
|
50
|
+
on_report(rep)
|
|
51
|
+
return rep
|
|
52
|
+
|
|
53
|
+
return _job
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any, Awaitable, Callable, Iterable, Optional, Protocol
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SqlSession(Protocol): # minimal protocol for tests/integration
|
|
8
|
+
async def execute(self, stmt: Any) -> Any:
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(frozen=True)
|
|
13
|
+
class ErasureStep:
|
|
14
|
+
name: str
|
|
15
|
+
run: Callable[[SqlSession, str], Awaitable[int] | int]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(frozen=True)
|
|
19
|
+
class ErasurePlan:
|
|
20
|
+
steps: Iterable[ErasureStep]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def run_erasure(
|
|
24
|
+
session: SqlSession,
|
|
25
|
+
principal_id: str,
|
|
26
|
+
plan: ErasurePlan,
|
|
27
|
+
*,
|
|
28
|
+
on_audit: Optional[Callable[[str, dict[str, Any]], None]] = None,
|
|
29
|
+
) -> int:
|
|
30
|
+
"""Run an erasure plan and optionally emit an audit event.
|
|
31
|
+
|
|
32
|
+
Returns total affected rows across steps.
|
|
33
|
+
"""
|
|
34
|
+
total = 0
|
|
35
|
+
for s in plan.steps:
|
|
36
|
+
res = s.run(session, principal_id)
|
|
37
|
+
if hasattr(res, "__await__"):
|
|
38
|
+
res = await res # type: ignore[misc]
|
|
39
|
+
total += int(res or 0)
|
|
40
|
+
if on_audit:
|
|
41
|
+
on_audit("erasure.completed", {"principal_id": principal_id, "affected": total})
|
|
42
|
+
return total
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
__all__ = ["ErasureStep", "ErasurePlan", "run_erasure"]
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Awaitable, Callable, Iterable, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def run_fixtures(
|
|
9
|
+
loaders: Iterable[Callable[[], None | Awaitable[None]]], *, run_once_file: Optional[str] = None
|
|
10
|
+
) -> None:
|
|
11
|
+
"""Run a sequence of fixture loaders (sync or async).
|
|
12
|
+
|
|
13
|
+
- If run_once_file is provided and exists, does nothing.
|
|
14
|
+
- On success, creates the run_once_file sentinel (parent dirs included).
|
|
15
|
+
"""
|
|
16
|
+
if run_once_file:
|
|
17
|
+
sentinel = Path(run_once_file)
|
|
18
|
+
if sentinel.exists():
|
|
19
|
+
return
|
|
20
|
+
for fn in loaders:
|
|
21
|
+
res = fn()
|
|
22
|
+
if inspect.isawaitable(res): # type: ignore[arg-type]
|
|
23
|
+
await res # type: ignore[misc]
|
|
24
|
+
if run_once_file:
|
|
25
|
+
sentinel.parent.mkdir(parents=True, exist_ok=True)
|
|
26
|
+
Path(run_once_file).write_text("ok")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def make_on_load_fixtures(
|
|
30
|
+
*loaders: Callable[[], None | Awaitable[None]], run_once_file: Optional[str] = None
|
|
31
|
+
) -> Callable[[], Awaitable[None]]:
|
|
32
|
+
"""Return an async callable suitable for add_data_lifecycle(on_load_fixtures=...)."""
|
|
33
|
+
|
|
34
|
+
async def _runner() -> None:
|
|
35
|
+
await run_fixtures(loaders, run_once_file=run_once_file)
|
|
36
|
+
|
|
37
|
+
return _runner
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
__all__ = ["run_fixtures", "make_on_load_fixtures"]
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
|
+
from typing import Any, Iterable, Optional, Protocol, Sequence
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SqlSession(Protocol): # minimal protocol for tests/integration
|
|
9
|
+
async def execute(self, stmt: Any) -> Any:
|
|
10
|
+
pass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class RetentionPolicy:
|
|
15
|
+
name: str
|
|
16
|
+
model: Any # SQLAlchemy model or test double exposing columns
|
|
17
|
+
older_than_days: int
|
|
18
|
+
soft_delete_field: Optional[str] = "deleted_at"
|
|
19
|
+
extra_where: Optional[Sequence[Any]] = None
|
|
20
|
+
hard_delete: bool = False
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def purge_policy(session: SqlSession, policy: RetentionPolicy) -> int:
|
|
24
|
+
"""Execute a single retention purge according to policy.
|
|
25
|
+
|
|
26
|
+
If hard_delete is False and soft_delete_field exists on model, set timestamp; else DELETE.
|
|
27
|
+
Returns number of affected rows (best-effort; test doubles may return an int directly).
|
|
28
|
+
"""
|
|
29
|
+
cutoff = datetime.now(timezone.utc) - timedelta(days=policy.older_than_days)
|
|
30
|
+
m = policy.model
|
|
31
|
+
where = list(policy.extra_where or [])
|
|
32
|
+
created_col = getattr(m, "created_at", None)
|
|
33
|
+
if created_col is not None and hasattr(created_col, "__le__"):
|
|
34
|
+
where.append(created_col <= cutoff) # type: ignore[operator]
|
|
35
|
+
|
|
36
|
+
# Soft-delete path when available and requested
|
|
37
|
+
if not policy.hard_delete and policy.soft_delete_field and hasattr(m, policy.soft_delete_field):
|
|
38
|
+
stmt = m.update().where(*where).values({policy.soft_delete_field: cutoff}) # type: ignore[attr-defined]
|
|
39
|
+
res = await session.execute(stmt)
|
|
40
|
+
return getattr(res, "rowcount", 0)
|
|
41
|
+
|
|
42
|
+
# Hard delete fallback
|
|
43
|
+
stmt = m.delete().where(*where) # type: ignore[attr-defined]
|
|
44
|
+
res = await session.execute(stmt)
|
|
45
|
+
return getattr(res, "rowcount", 0)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
async def run_retention_purge(session: SqlSession, policies: Iterable[RetentionPolicy]) -> int:
|
|
49
|
+
total = 0
|
|
50
|
+
for p in policies:
|
|
51
|
+
total += await purge_policy(session, p)
|
|
52
|
+
return total
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
__all__ = ["RetentionPolicy", "purge_policy", "run_retention_purge"]
|
svc_infra/db/inbox.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from typing import Protocol
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class InboxStore(Protocol):
|
|
8
|
+
def mark_if_new(self, key: str, ttl_seconds: int = 24 * 3600) -> bool:
|
|
9
|
+
"""Mark key as processed if not seen; return True if newly marked, False if duplicate."""
|
|
10
|
+
...
|
|
11
|
+
|
|
12
|
+
def purge_expired(self) -> int:
|
|
13
|
+
"""Optional: remove expired keys, return number purged."""
|
|
14
|
+
...
|
|
15
|
+
|
|
16
|
+
def is_marked(self, key: str) -> bool:
|
|
17
|
+
"""Return True if key is already marked (not expired), without modifying it."""
|
|
18
|
+
...
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class InMemoryInboxStore:
|
|
22
|
+
def __init__(self) -> None:
|
|
23
|
+
self._keys: dict[str, float] = {}
|
|
24
|
+
|
|
25
|
+
def mark_if_new(self, key: str, ttl_seconds: int = 24 * 3600) -> bool:
|
|
26
|
+
now = time.time()
|
|
27
|
+
exp = self._keys.get(key)
|
|
28
|
+
if exp and exp > now:
|
|
29
|
+
return False
|
|
30
|
+
self._keys[key] = now + ttl_seconds
|
|
31
|
+
return True
|
|
32
|
+
|
|
33
|
+
def purge_expired(self) -> int:
|
|
34
|
+
now = time.time()
|
|
35
|
+
to_del = [k for k, e in self._keys.items() if e <= now]
|
|
36
|
+
for k in to_del:
|
|
37
|
+
self._keys.pop(k, None)
|
|
38
|
+
return len(to_del)
|
|
39
|
+
|
|
40
|
+
def is_marked(self, key: str) -> bool:
|
|
41
|
+
now = time.time()
|
|
42
|
+
exp = self._keys.get(key)
|
|
43
|
+
return bool(exp and exp > now)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class SqlInboxStore:
|
|
47
|
+
"""Skeleton for a SQL-backed inbox store (dedupe table).
|
|
48
|
+
|
|
49
|
+
Implementations should:
|
|
50
|
+
- INSERT key with expires_at if not exists (unique constraint)
|
|
51
|
+
- Return False on duplicate key violations
|
|
52
|
+
- Periodically DELETE expired rows
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def __init__(self, session_factory):
|
|
56
|
+
self._session_factory = session_factory
|
|
57
|
+
|
|
58
|
+
def mark_if_new(
|
|
59
|
+
self, key: str, ttl_seconds: int = 24 * 3600
|
|
60
|
+
) -> bool: # pragma: no cover - skeleton
|
|
61
|
+
raise NotImplementedError
|
|
62
|
+
|
|
63
|
+
def purge_expired(self) -> int: # pragma: no cover - skeleton
|
|
64
|
+
raise NotImplementedError
|
|
65
|
+
|
|
66
|
+
def is_marked(self, key: str) -> bool: # pragma: no cover - skeleton
|
|
67
|
+
raise NotImplementedError
|
|
@@ -29,17 +29,17 @@ We provide four CLI commands. You can register them on your Typer app or invoke
|
|
|
29
29
|
|
|
30
30
|
### Commands
|
|
31
31
|
|
|
32
|
-
- `mongo
|
|
33
|
-
- `mongo
|
|
34
|
-
- `mongo
|
|
35
|
-
- `mongo
|
|
32
|
+
- `mongo scaffold` — create both document **and** CRUD schemas
|
|
33
|
+
- `mongo scaffold-documents` — create only the **document** model (Pydantic)
|
|
34
|
+
- `mongo scaffold-schemas` — create only the **CRUD schemas**
|
|
35
|
+
- `mongo scaffold-resources` — create a starter `resources.py` with a `RESOURCES` list
|
|
36
36
|
|
|
37
37
|
### Typical usage
|
|
38
38
|
|
|
39
39
|
#### A) Scaffold documents + schemas together
|
|
40
40
|
|
|
41
41
|
```bash
|
|
42
|
-
yourapp mongo
|
|
42
|
+
yourapp mongo scaffold \
|
|
43
43
|
--entity-name Product \
|
|
44
44
|
--documents-dir ./src/your_app/products \
|
|
45
45
|
--schemas-dir ./src/your_app/products \
|
|
@@ -57,7 +57,7 @@ src/your_app/products/schemas.py # ProductRead/ProductCreate/ProductUpdate
|
|
|
57
57
|
B) Documents only
|
|
58
58
|
|
|
59
59
|
```bash
|
|
60
|
-
yourapp mongo
|
|
60
|
+
yourapp mongo scaffold-documents \
|
|
61
61
|
--dest-dir ./src/your_app/products \
|
|
62
62
|
--entity-name Product \
|
|
63
63
|
--documents-filename product_doc.py
|
|
@@ -66,7 +66,7 @@ yourapp mongo-scaffold-documents \
|
|
|
66
66
|
C) Schemas only
|
|
67
67
|
|
|
68
68
|
```bash
|
|
69
|
-
yourapp mongo
|
|
69
|
+
yourapp mongo scaffold-schemas \
|
|
70
70
|
--dest-dir ./src/your_app/products \
|
|
71
71
|
--entity-name Product \
|
|
72
72
|
--schemas-filename product_schemas.py
|
|
@@ -75,7 +75,7 @@ yourapp mongo-scaffold-schemas \
|
|
|
75
75
|
D) Starter resources.py
|
|
76
76
|
|
|
77
77
|
```bash
|
|
78
|
-
yourapp mongo
|
|
78
|
+
yourapp mongo scaffold-resources \
|
|
79
79
|
--dest-dir ./src/your_app/mongo \
|
|
80
80
|
--filename resources.py \
|
|
81
81
|
--overwrite
|
|
@@ -131,7 +131,7 @@ There are two flavors:
|
|
|
131
131
|
A) Async, minimal (connect, create collections, apply indexes)
|
|
132
132
|
|
|
133
133
|
```bash
|
|
134
|
-
yourapp mongo
|
|
134
|
+
yourapp mongo prepare \
|
|
135
135
|
--resources your_app.mongo.resources:RESOURCES \
|
|
136
136
|
--mongo-url "$MONGO_URL" \
|
|
137
137
|
--mongo-db "$MONGO_DB"
|
|
@@ -140,7 +140,7 @@ yourapp mongo-prepare \
|
|
|
140
140
|
B) Synchronous wrapper (end-to-end convenience)
|
|
141
141
|
|
|
142
142
|
```bash
|
|
143
|
-
yourapp mongo
|
|
143
|
+
yourapp mongo setup-and-prepare \
|
|
144
144
|
--resources your_app.mongo.resources:RESOURCES \
|
|
145
145
|
--mongo-url "$MONGO_URL" \
|
|
146
146
|
--mongo-db "$MONGO_DB"
|
|
@@ -149,7 +149,7 @@ yourapp mongo-setup-and-prepare \
|
|
|
149
149
|
You can also ping connectivity:
|
|
150
150
|
|
|
151
151
|
```bash
|
|
152
|
-
yourapp mongo
|
|
152
|
+
yourapp mongo ping --mongo-url "$MONGO_URL" --mongo-db "$MONGO_DB"
|
|
153
153
|
```
|
|
154
154
|
|
|
155
155
|
Behind the scenes, preparation also locks a service ID to a DB name to prevent accidental cross-DB usage. You can pass --allow-rebind if you intentionally move environments.
|
|
@@ -430,9 +430,9 @@ NoSqlResource(
|
|
|
430
430
|
• If using explicit schemas with PyObjectId, make sure model_config.json_encoders includes {PyObjectId: str}.
|
|
431
431
|
• When using auto-schemas, we expose ObjectId-like fields as str so no custom encoder is needed.
|
|
432
432
|
• Connected to wrong DB name
|
|
433
|
-
|
|
433
|
+
• The system locks a service_id to the DB name once prepared. If you change DBs, run `mongo prepare` with --allow-rebind.
|
|
434
434
|
• Indexes not created
|
|
435
|
-
|
|
435
|
+
• Double-check RESOURCES[indexes]. Run `mongo prepare` again and inspect the output dictionary of created indexes.
|
|
436
436
|
|
|
437
437
|
⸻
|
|
438
438
|
|
svc_infra/db/outbox.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from typing import Any, Dict, Iterable, List, Optional, Protocol
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class OutboxMessage:
|
|
10
|
+
id: int
|
|
11
|
+
topic: str
|
|
12
|
+
payload: Dict[str, Any]
|
|
13
|
+
created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
|
14
|
+
attempts: int = 0
|
|
15
|
+
processed_at: Optional[datetime] = None
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class OutboxStore(Protocol):
|
|
19
|
+
def enqueue(self, topic: str, payload: Dict[str, Any]) -> OutboxMessage:
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
def fetch_next(self, *, topics: Optional[Iterable[str]] = None) -> Optional[OutboxMessage]:
|
|
23
|
+
"""Return the next undispatched, unprocessed message (FIFO per-topic), or None.
|
|
24
|
+
|
|
25
|
+
Notes:
|
|
26
|
+
- Messages with attempts > 0 are considered "dispatched" to the job queue and won't be re-enqueued.
|
|
27
|
+
- Delivery retries are handled by the job queue worker, not by re-reading the outbox.
|
|
28
|
+
"""
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
def mark_processed(self, msg_id: int) -> None:
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
def mark_failed(self, msg_id: int) -> None:
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class InMemoryOutboxStore:
|
|
39
|
+
"""Simple in-memory outbox for tests and local runs."""
|
|
40
|
+
|
|
41
|
+
def __init__(self):
|
|
42
|
+
self._seq = 0
|
|
43
|
+
self._messages: List[OutboxMessage] = []
|
|
44
|
+
|
|
45
|
+
def enqueue(self, topic: str, payload: Dict[str, Any]) -> OutboxMessage:
|
|
46
|
+
self._seq += 1
|
|
47
|
+
msg = OutboxMessage(id=self._seq, topic=topic, payload=dict(payload))
|
|
48
|
+
self._messages.append(msg)
|
|
49
|
+
return msg
|
|
50
|
+
|
|
51
|
+
def fetch_next(self, *, topics: Optional[Iterable[str]] = None) -> Optional[OutboxMessage]:
|
|
52
|
+
allowed = set(topics) if topics else None
|
|
53
|
+
for msg in self._messages:
|
|
54
|
+
if msg.processed_at is not None:
|
|
55
|
+
continue
|
|
56
|
+
# skip already dispatched messages (attempts>0)
|
|
57
|
+
if msg.attempts > 0:
|
|
58
|
+
continue
|
|
59
|
+
if allowed is not None and msg.topic not in allowed:
|
|
60
|
+
continue
|
|
61
|
+
return msg
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
def mark_processed(self, msg_id: int) -> None:
|
|
65
|
+
for msg in self._messages:
|
|
66
|
+
if msg.id == msg_id:
|
|
67
|
+
msg.processed_at = datetime.now(timezone.utc)
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
def mark_failed(self, msg_id: int) -> None:
|
|
71
|
+
for msg in self._messages:
|
|
72
|
+
if msg.id == msg_id:
|
|
73
|
+
msg.attempts += 1
|
|
74
|
+
return
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class SqlOutboxStore:
|
|
78
|
+
"""Skeleton for a SQL-backed outbox store.
|
|
79
|
+
|
|
80
|
+
Implementations should:
|
|
81
|
+
- INSERT on enqueue
|
|
82
|
+
- SELECT FOR UPDATE SKIP LOCKED (or equivalent) to fetch next
|
|
83
|
+
- UPDATE processed_at (and attempts on failure)
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
def __init__(self, session_factory):
|
|
87
|
+
self._session_factory = session_factory
|
|
88
|
+
|
|
89
|
+
# Placeholders to outline the API; not implemented here.
|
|
90
|
+
def enqueue(
|
|
91
|
+
self, topic: str, payload: Dict[str, Any]
|
|
92
|
+
) -> OutboxMessage: # pragma: no cover - skeleton
|
|
93
|
+
raise NotImplementedError
|
|
94
|
+
|
|
95
|
+
def fetch_next(
|
|
96
|
+
self, *, topics: Optional[Iterable[str]] = None
|
|
97
|
+
) -> Optional[OutboxMessage]: # pragma: no cover - skeleton
|
|
98
|
+
raise NotImplementedError
|
|
99
|
+
|
|
100
|
+
def mark_processed(self, msg_id: int) -> None: # pragma: no cover - skeleton
|
|
101
|
+
raise NotImplementedError
|
|
102
|
+
|
|
103
|
+
def mark_failed(self, msg_id: int) -> None: # pragma: no cover - skeleton
|
|
104
|
+
raise NotImplementedError
|
svc_infra/db/sql/apikey.py
CHANGED
|
@@ -40,7 +40,7 @@ def get_apikey_model() -> type:
|
|
|
40
40
|
def bind_apikey_model(user_model: Type, *, table_name: str = "api_keys") -> type:
|
|
41
41
|
"""
|
|
42
42
|
Create and register an ApiKey model bound to the provided user_model and table name.
|
|
43
|
-
Call this once during app boot (e.g., inside
|
|
43
|
+
Call this once during app boot (e.g., inside add_auth_users when enable_api_keys=True).
|
|
44
44
|
"""
|
|
45
45
|
|
|
46
46
|
class ApiKey(ModelBase): # type: ignore[misc, valid-type]
|