svc-infra 0.1.600__py3-none-any.whl → 0.1.640__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of svc-infra might be problematic. Click here for more details.
- svc_infra/api/fastapi/admin/__init__.py +3 -0
- svc_infra/api/fastapi/admin/add.py +231 -0
- svc_infra/api/fastapi/billing/router.py +64 -0
- svc_infra/api/fastapi/billing/setup.py +19 -0
- svc_infra/api/fastapi/db/sql/add.py +32 -13
- svc_infra/api/fastapi/db/sql/crud_router.py +178 -16
- svc_infra/api/fastapi/db/sql/session.py +16 -0
- svc_infra/api/fastapi/dependencies/ratelimit.py +57 -7
- svc_infra/api/fastapi/docs/add.py +160 -0
- svc_infra/api/fastapi/docs/landing.py +1 -1
- svc_infra/api/fastapi/middleware/errors/handlers.py +45 -7
- svc_infra/api/fastapi/middleware/graceful_shutdown.py +87 -0
- svc_infra/api/fastapi/middleware/ratelimit.py +59 -1
- svc_infra/api/fastapi/middleware/ratelimit_store.py +12 -6
- svc_infra/api/fastapi/middleware/timeout.py +148 -0
- svc_infra/api/fastapi/openapi/mutators.py +114 -0
- svc_infra/api/fastapi/ops/add.py +73 -0
- svc_infra/api/fastapi/pagination.py +3 -1
- svc_infra/api/fastapi/routers/ping.py +1 -0
- svc_infra/api/fastapi/setup.py +11 -1
- svc_infra/api/fastapi/tenancy/add.py +19 -0
- svc_infra/api/fastapi/tenancy/context.py +112 -0
- svc_infra/app/README.md +5 -5
- svc_infra/billing/__init__.py +23 -0
- svc_infra/billing/async_service.py +147 -0
- svc_infra/billing/jobs.py +230 -0
- svc_infra/billing/models.py +131 -0
- svc_infra/billing/quotas.py +101 -0
- svc_infra/billing/schemas.py +33 -0
- svc_infra/billing/service.py +115 -0
- svc_infra/bundled_docs/README.md +5 -0
- svc_infra/bundled_docs/__init__.py +1 -0
- svc_infra/bundled_docs/getting-started.md +6 -0
- svc_infra/cache/__init__.py +4 -0
- svc_infra/cache/add.py +158 -0
- svc_infra/cache/backend.py +5 -2
- svc_infra/cache/decorators.py +19 -1
- svc_infra/cache/keys.py +24 -4
- svc_infra/cli/__init__.py +28 -8
- svc_infra/cli/cmds/__init__.py +8 -0
- svc_infra/cli/cmds/db/nosql/mongo/mongo_cmds.py +4 -3
- svc_infra/cli/cmds/db/nosql/mongo/mongo_scaffold_cmds.py +4 -4
- svc_infra/cli/cmds/db/sql/alembic_cmds.py +80 -11
- svc_infra/cli/cmds/db/sql/sql_export_cmds.py +80 -0
- svc_infra/cli/cmds/db/sql/sql_scaffold_cmds.py +3 -3
- svc_infra/cli/cmds/docs/docs_cmds.py +140 -0
- svc_infra/cli/cmds/dx/__init__.py +12 -0
- svc_infra/cli/cmds/dx/dx_cmds.py +99 -0
- svc_infra/cli/cmds/help.py +4 -0
- svc_infra/cli/cmds/obs/obs_cmds.py +4 -3
- svc_infra/cli/cmds/sdk/__init__.py +0 -0
- svc_infra/cli/cmds/sdk/sdk_cmds.py +102 -0
- svc_infra/data/add.py +61 -0
- svc_infra/data/backup.py +53 -0
- svc_infra/data/erasure.py +45 -0
- svc_infra/data/fixtures.py +40 -0
- svc_infra/data/retention.py +55 -0
- svc_infra/db/nosql/mongo/README.md +13 -13
- svc_infra/db/sql/repository.py +51 -11
- svc_infra/db/sql/resource.py +5 -0
- svc_infra/db/sql/templates/setup/env_async.py.tmpl +9 -1
- svc_infra/db/sql/templates/setup/env_sync.py.tmpl +9 -2
- svc_infra/db/sql/tenant.py +79 -0
- svc_infra/db/sql/utils.py +18 -4
- svc_infra/docs/acceptance-matrix.md +71 -0
- svc_infra/docs/acceptance.md +44 -0
- svc_infra/docs/admin.md +425 -0
- svc_infra/docs/adr/0002-background-jobs-and-scheduling.md +40 -0
- svc_infra/docs/adr/0003-webhooks-framework.md +24 -0
- svc_infra/docs/adr/0004-tenancy-model.md +42 -0
- svc_infra/docs/adr/0005-data-lifecycle.md +86 -0
- svc_infra/docs/adr/0006-ops-slos-and-metrics.md +47 -0
- svc_infra/docs/adr/0007-docs-and-sdks.md +83 -0
- svc_infra/docs/adr/0008-billing-primitives.md +143 -0
- svc_infra/docs/adr/0009-acceptance-harness.md +40 -0
- svc_infra/docs/adr/0010-timeouts-and-resource-limits.md +54 -0
- svc_infra/docs/adr/0011-admin-scope-and-impersonation.md +73 -0
- svc_infra/docs/api.md +59 -0
- svc_infra/docs/auth.md +11 -0
- svc_infra/docs/billing.md +190 -0
- svc_infra/docs/cache.md +76 -0
- svc_infra/docs/cli.md +74 -0
- svc_infra/docs/contributing.md +34 -0
- svc_infra/docs/data-lifecycle.md +52 -0
- svc_infra/docs/database.md +14 -0
- svc_infra/docs/docs-and-sdks.md +62 -0
- svc_infra/docs/environment.md +114 -0
- svc_infra/docs/getting-started.md +63 -0
- svc_infra/docs/idempotency.md +111 -0
- svc_infra/docs/jobs.md +67 -0
- svc_infra/docs/observability.md +16 -0
- svc_infra/docs/ops.md +37 -0
- svc_infra/docs/rate-limiting.md +125 -0
- svc_infra/docs/repo-review.md +48 -0
- svc_infra/docs/security.md +176 -0
- svc_infra/docs/tenancy.md +35 -0
- svc_infra/docs/timeouts-and-resource-limits.md +147 -0
- svc_infra/docs/webhooks.md +112 -0
- svc_infra/dx/add.py +63 -0
- svc_infra/dx/changelog.py +74 -0
- svc_infra/dx/checks.py +67 -0
- svc_infra/http/__init__.py +13 -0
- svc_infra/http/client.py +72 -0
- svc_infra/jobs/builtins/webhook_delivery.py +14 -2
- svc_infra/jobs/queue.py +9 -1
- svc_infra/jobs/runner.py +75 -0
- svc_infra/jobs/worker.py +17 -1
- svc_infra/mcp/svc_infra_mcp.py +85 -28
- svc_infra/obs/add.py +54 -7
- svc_infra/obs/grafana/dashboards/http-overview.json +45 -0
- svc_infra/security/headers.py +15 -2
- svc_infra/security/hibp.py +6 -2
- svc_infra/security/permissions.py +1 -0
- svc_infra/webhooks/service.py +10 -2
- {svc_infra-0.1.600.dist-info → svc_infra-0.1.640.dist-info}/METADATA +40 -14
- {svc_infra-0.1.600.dist-info → svc_infra-0.1.640.dist-info}/RECORD +118 -44
- {svc_infra-0.1.600.dist-info → svc_infra-0.1.640.dist-info}/WHEEL +0 -0
- {svc_infra-0.1.600.dist-info → svc_infra-0.1.640.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Optional
|
|
9
|
+
|
|
10
|
+
import typer
|
|
11
|
+
from sqlalchemy import text
|
|
12
|
+
|
|
13
|
+
from svc_infra.db.sql.utils import build_engine
|
|
14
|
+
|
|
15
|
+
try: # SQLAlchemy async extras are optional
|
|
16
|
+
from sqlalchemy.ext.asyncio import AsyncEngine
|
|
17
|
+
except Exception: # pragma: no cover - fallback when async extras unavailable
|
|
18
|
+
AsyncEngine = None # type: ignore[assignment]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def export_tenant(
|
|
22
|
+
table: str = typer.Argument(..., help="Qualified table name to export (e.g., public.items)"),
|
|
23
|
+
tenant_id: str = typer.Option(..., "--tenant-id", help="Tenant id value to filter by."),
|
|
24
|
+
tenant_field: str = typer.Option("tenant_id", help="Column name for tenant id filter."),
|
|
25
|
+
output: Optional[Path] = typer.Option(
|
|
26
|
+
None, "--output", help="Output file; defaults to stdout."
|
|
27
|
+
),
|
|
28
|
+
limit: Optional[int] = typer.Option(None, help="Max rows to export."),
|
|
29
|
+
database_url: Optional[str] = typer.Option(
|
|
30
|
+
None, "--database-url", help="Overrides env SQL_URL for this command."
|
|
31
|
+
),
|
|
32
|
+
):
|
|
33
|
+
"""Export rows for a tenant from a given SQL table as JSON array."""
|
|
34
|
+
if database_url:
|
|
35
|
+
os.environ["SQL_URL"] = database_url
|
|
36
|
+
|
|
37
|
+
url = os.getenv("SQL_URL")
|
|
38
|
+
if not url:
|
|
39
|
+
typer.echo("SQL_URL is required (or pass --database-url)", err=True)
|
|
40
|
+
raise typer.Exit(code=2)
|
|
41
|
+
|
|
42
|
+
engine = build_engine(url)
|
|
43
|
+
rows: list[dict[str, Any]]
|
|
44
|
+
query = f"SELECT * FROM {table} WHERE {tenant_field} = :tenant_id"
|
|
45
|
+
if limit and limit > 0:
|
|
46
|
+
query += " LIMIT :limit"
|
|
47
|
+
|
|
48
|
+
params: dict[str, Any] = {"tenant_id": tenant_id}
|
|
49
|
+
if limit and limit > 0:
|
|
50
|
+
params["limit"] = int(limit)
|
|
51
|
+
|
|
52
|
+
stmt = text(query)
|
|
53
|
+
|
|
54
|
+
is_async_engine = AsyncEngine is not None and isinstance(engine, AsyncEngine)
|
|
55
|
+
|
|
56
|
+
if is_async_engine:
|
|
57
|
+
assert AsyncEngine is not None # for type checkers
|
|
58
|
+
|
|
59
|
+
async def _fetch() -> list[dict[str, Any]]:
|
|
60
|
+
async with engine.connect() as conn: # type: ignore[call-arg]
|
|
61
|
+
result = await conn.execute(stmt, params)
|
|
62
|
+
return [dict(row) for row in result.mappings()]
|
|
63
|
+
|
|
64
|
+
rows = asyncio.run(_fetch())
|
|
65
|
+
else:
|
|
66
|
+
with engine.connect() as conn: # type: ignore[attr-defined]
|
|
67
|
+
result = conn.execute(stmt, params)
|
|
68
|
+
rows = [dict(row) for row in result.mappings()]
|
|
69
|
+
|
|
70
|
+
data = json.dumps(rows, indent=2)
|
|
71
|
+
if output:
|
|
72
|
+
output.write_text(data)
|
|
73
|
+
typer.echo(str(output))
|
|
74
|
+
else:
|
|
75
|
+
sys.stdout.write(data)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def register(app_root: typer.Typer) -> None:
|
|
79
|
+
# Attach directly to the provided 'sql' group app
|
|
80
|
+
app_root.command("export-tenant")(export_tenant)
|
|
@@ -134,6 +134,6 @@ def cmd_scaffold_schemas(
|
|
|
134
134
|
|
|
135
135
|
|
|
136
136
|
def register(app: typer.Typer) -> None:
|
|
137
|
-
app.command("
|
|
138
|
-
app.command("
|
|
139
|
-
app.command("
|
|
137
|
+
app.command("scaffold")(cmd_scaffold)
|
|
138
|
+
app.command("scaffold-models")(cmd_scaffold_models)
|
|
139
|
+
app.command("scaffold-schemas")(cmd_scaffold_schemas)
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from importlib.resources import as_file
|
|
5
|
+
from importlib.resources import files as pkg_files
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Dict, List
|
|
8
|
+
|
|
9
|
+
import click
|
|
10
|
+
import typer
|
|
11
|
+
from typer.core import TyperGroup
|
|
12
|
+
|
|
13
|
+
from svc_infra.app.root import resolve_project_root
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _norm(name: str) -> str:
|
|
17
|
+
return name.strip().lower().replace(" ", "-").replace("_", "-")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _discover_fs_topics(docs_dir: Path) -> Dict[str, Path]:
|
|
21
|
+
topics: Dict[str, Path] = {}
|
|
22
|
+
if docs_dir.exists() and docs_dir.is_dir():
|
|
23
|
+
for p in sorted(docs_dir.glob("*.md")):
|
|
24
|
+
if p.is_file():
|
|
25
|
+
topics[_norm(p.stem)] = p
|
|
26
|
+
return topics
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _discover_pkg_topics() -> Dict[str, Path]:
|
|
30
|
+
"""
|
|
31
|
+
Discover docs shipped inside the installed package at svc_infra/docs/*,
|
|
32
|
+
using importlib.resources so this works for wheels, sdists, and zipped wheels.
|
|
33
|
+
"""
|
|
34
|
+
topics: Dict[str, Path] = {}
|
|
35
|
+
try:
|
|
36
|
+
docs_root = pkg_files("svc_infra").joinpath("docs")
|
|
37
|
+
# docs_root is a Traversable; it may be inside a zip. Iterate safely.
|
|
38
|
+
for entry in docs_root.iterdir():
|
|
39
|
+
if entry.name.endswith(".md"):
|
|
40
|
+
# materialize to a real tempfile path if needed
|
|
41
|
+
with as_file(entry) as concrete:
|
|
42
|
+
p = Path(concrete)
|
|
43
|
+
if p.exists() and p.is_file():
|
|
44
|
+
topics[_norm(p.stem)] = p
|
|
45
|
+
except Exception:
|
|
46
|
+
# If the package has no docs directory, just return empty.
|
|
47
|
+
pass
|
|
48
|
+
return topics
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _resolve_docs_dir(ctx: click.Context) -> Path | None:
|
|
52
|
+
"""
|
|
53
|
+
Optional override precedence:
|
|
54
|
+
1) SVC_INFRA_DOCS_DIR env var
|
|
55
|
+
2) *Only when working inside the svc-infra repo itself*: repo-root /docs
|
|
56
|
+
"""
|
|
57
|
+
# 1) Env var
|
|
58
|
+
env_dir = os.getenv("SVC_INFRA_DOCS_DIR")
|
|
59
|
+
if env_dir:
|
|
60
|
+
p = Path(env_dir).expanduser()
|
|
61
|
+
if p.exists():
|
|
62
|
+
return p
|
|
63
|
+
|
|
64
|
+
# 2) In-repo convenience (so `svc-infra docs` works inside this repo)
|
|
65
|
+
try:
|
|
66
|
+
root = resolve_project_root()
|
|
67
|
+
proj_docs = root / "docs"
|
|
68
|
+
if proj_docs.exists():
|
|
69
|
+
return proj_docs
|
|
70
|
+
except Exception:
|
|
71
|
+
pass
|
|
72
|
+
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class DocsGroup(TyperGroup):
|
|
77
|
+
def list_commands(self, ctx: click.Context) -> List[str]:
|
|
78
|
+
names: List[str] = list(super().list_commands(ctx) or [])
|
|
79
|
+
dir_to_use = _resolve_docs_dir(ctx)
|
|
80
|
+
fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
|
|
81
|
+
pkg = _discover_pkg_topics()
|
|
82
|
+
names.extend(fs.keys())
|
|
83
|
+
names.extend([k for k in pkg.keys() if k not in fs])
|
|
84
|
+
return sorted(set(names))
|
|
85
|
+
|
|
86
|
+
def get_command(self, ctx: click.Context, name: str) -> click.Command | None:
|
|
87
|
+
cmd = super().get_command(ctx, name)
|
|
88
|
+
if cmd is not None:
|
|
89
|
+
return cmd
|
|
90
|
+
|
|
91
|
+
key = _norm(name)
|
|
92
|
+
|
|
93
|
+
dir_to_use = _resolve_docs_dir(ctx)
|
|
94
|
+
fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
|
|
95
|
+
if key in fs:
|
|
96
|
+
file_path = fs[key]
|
|
97
|
+
|
|
98
|
+
@click.command(name=name)
|
|
99
|
+
def _show_fs() -> None:
|
|
100
|
+
click.echo(file_path.read_text(encoding="utf-8", errors="replace"))
|
|
101
|
+
|
|
102
|
+
return _show_fs
|
|
103
|
+
|
|
104
|
+
pkg = _discover_pkg_topics()
|
|
105
|
+
if key in pkg:
|
|
106
|
+
file_path = pkg[key]
|
|
107
|
+
|
|
108
|
+
@click.command(name=name)
|
|
109
|
+
def _show_pkg() -> None:
|
|
110
|
+
click.echo(file_path.read_text(encoding="utf-8", errors="replace"))
|
|
111
|
+
|
|
112
|
+
return _show_pkg
|
|
113
|
+
|
|
114
|
+
return None
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def register(app: typer.Typer) -> None:
|
|
118
|
+
docs_app = typer.Typer(no_args_is_help=True, add_completion=False, cls=DocsGroup)
|
|
119
|
+
|
|
120
|
+
@docs_app.callback(invoke_without_command=True)
|
|
121
|
+
def _docs_options() -> None:
|
|
122
|
+
# No group-level options; dynamic commands and 'show' handle topics.
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
@docs_app.command("show", help="Show docs for a topic (alternative to dynamic subcommand)")
|
|
126
|
+
def show(topic: str) -> None:
|
|
127
|
+
key = _norm(topic)
|
|
128
|
+
ctx = click.get_current_context()
|
|
129
|
+
dir_to_use = _resolve_docs_dir(ctx)
|
|
130
|
+
fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
|
|
131
|
+
if key in fs:
|
|
132
|
+
typer.echo(fs[key].read_text(encoding="utf-8", errors="replace"))
|
|
133
|
+
return
|
|
134
|
+
pkg = _discover_pkg_topics()
|
|
135
|
+
if key in pkg:
|
|
136
|
+
typer.echo(pkg[key].read_text(encoding="utf-8", errors="replace"))
|
|
137
|
+
return
|
|
138
|
+
raise typer.BadParameter(f"Unknown topic: {topic}")
|
|
139
|
+
|
|
140
|
+
app.add_typer(docs_app, name="docs")
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
|
|
8
|
+
from svc_infra.dx.changelog import Commit, generate_release_section
|
|
9
|
+
from svc_infra.dx.checks import check_migrations_up_to_date, check_openapi_problem_schema
|
|
10
|
+
|
|
11
|
+
app = typer.Typer(no_args_is_help=True, add_completion=False)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@app.command("openapi")
|
|
15
|
+
def cmd_openapi(path: str = typer.Argument(..., help="Path to OpenAPI JSON")):
|
|
16
|
+
try:
|
|
17
|
+
check_openapi_problem_schema(path=path)
|
|
18
|
+
except Exception as e: # noqa: BLE001
|
|
19
|
+
typer.secho(f"OpenAPI check failed: {e}", fg=typer.colors.RED, err=True)
|
|
20
|
+
raise typer.Exit(2)
|
|
21
|
+
typer.secho("OpenAPI checks passed", fg=typer.colors.GREEN)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@app.command("migrations")
|
|
25
|
+
def cmd_migrations(project_root: str = typer.Option(".", help="Project root")):
|
|
26
|
+
try:
|
|
27
|
+
check_migrations_up_to_date(project_root=project_root)
|
|
28
|
+
except Exception as e: # noqa: BLE001
|
|
29
|
+
typer.secho(f"Migrations check failed: {e}", fg=typer.colors.RED, err=True)
|
|
30
|
+
raise typer.Exit(2)
|
|
31
|
+
typer.secho("Migrations checks passed", fg=typer.colors.GREEN)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@app.command("changelog")
|
|
35
|
+
def cmd_changelog(
|
|
36
|
+
version: str = typer.Argument(..., help="Version (e.g., 0.1.604)"),
|
|
37
|
+
commits_file: str = typer.Option(None, help="Path to JSON lines of commits (sha,subject)"),
|
|
38
|
+
):
|
|
39
|
+
"""Generate a changelog section from commit messages.
|
|
40
|
+
|
|
41
|
+
Expects Conventional Commits style for best grouping; falls back to Other.
|
|
42
|
+
If commits_file is omitted, prints an example format.
|
|
43
|
+
"""
|
|
44
|
+
import json
|
|
45
|
+
import sys
|
|
46
|
+
|
|
47
|
+
if not commits_file:
|
|
48
|
+
typer.echo(
|
|
49
|
+
'# Provide --commits-file with JSONL: {"sha": "<sha>", "subject": "feat: ..."}',
|
|
50
|
+
err=True,
|
|
51
|
+
)
|
|
52
|
+
raise typer.Exit(2)
|
|
53
|
+
rows = [
|
|
54
|
+
json.loads(line) for line in Path(commits_file).read_text().splitlines() if line.strip()
|
|
55
|
+
]
|
|
56
|
+
commits = [Commit(sha=r["sha"], subject=r["subject"]) for r in rows]
|
|
57
|
+
out = generate_release_section(version=version, commits=commits)
|
|
58
|
+
sys.stdout.write(out)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@app.command("ci")
|
|
62
|
+
def cmd_ci(
|
|
63
|
+
run: bool = typer.Option(False, help="Execute the steps; default just prints a plan"),
|
|
64
|
+
openapi: str | None = typer.Option(None, help="Path to OpenAPI JSON to lint"),
|
|
65
|
+
project_root: str = typer.Option(".", help="Project root for migrations check"),
|
|
66
|
+
):
|
|
67
|
+
"""Print (or run) the CI steps locally to mirror the workflow."""
|
|
68
|
+
steps: list[list[str]] = []
|
|
69
|
+
# Lint, typecheck, tests
|
|
70
|
+
steps.append(["flake8", "--select=E,F"]) # mirrors CI
|
|
71
|
+
steps.append(["mypy", "src"]) # mirrors CI
|
|
72
|
+
if openapi:
|
|
73
|
+
steps.append([sys.executable, "-m", "svc_infra.cli", "dx", "openapi", openapi])
|
|
74
|
+
steps.append(
|
|
75
|
+
[sys.executable, "-m", "svc_infra.cli", "dx", "migrations", "--project-root", project_root]
|
|
76
|
+
)
|
|
77
|
+
steps.append(["pytest", "-q", "-W", "error"]) # mirrors CI
|
|
78
|
+
|
|
79
|
+
if not run:
|
|
80
|
+
typer.echo("CI dry-run plan:")
|
|
81
|
+
for cmd in steps:
|
|
82
|
+
typer.echo(" $ " + " ".join(cmd))
|
|
83
|
+
return
|
|
84
|
+
|
|
85
|
+
import subprocess
|
|
86
|
+
|
|
87
|
+
for cmd in steps:
|
|
88
|
+
typer.echo("Running: " + " ".join(cmd))
|
|
89
|
+
res = subprocess.run(cmd)
|
|
90
|
+
if res.returncode != 0:
|
|
91
|
+
raise typer.Exit(res.returncode)
|
|
92
|
+
typer.echo("All CI steps passed")
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def main(): # pragma: no cover - CLI entrypoint
|
|
96
|
+
app()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
__all__ = ["main", "app"]
|
svc_infra/cli/cmds/help.py
CHANGED
|
@@ -21,4 +21,8 @@ How to run (pick what fits your workflow):
|
|
|
21
21
|
Notes:
|
|
22
22
|
* Make sure you’re in the right virtual environment (or use `pipx`).
|
|
23
23
|
* You can point `--project-root` at your Alembic root; if omitted we auto-detect.
|
|
24
|
+
|
|
25
|
+
Learn more:
|
|
26
|
+
* Explore available topics: `svc-infra docs --help`
|
|
27
|
+
* Show a topic directly: `svc-infra docs <topic>` or `svc-infra docs show <topic>`
|
|
24
28
|
"""
|
|
@@ -182,6 +182,7 @@ def scaffold(target: str = typer.Option(..., help="compose|railway|k8s|fly")):
|
|
|
182
182
|
|
|
183
183
|
|
|
184
184
|
def register(app: typer.Typer) -> None:
|
|
185
|
-
app
|
|
186
|
-
app.command("
|
|
187
|
-
app.command("
|
|
185
|
+
# Attach to 'obs' group app
|
|
186
|
+
app.command("up")(up)
|
|
187
|
+
app.command("down")(down)
|
|
188
|
+
app.command("scaffold")(scaffold)
|
|
File without changes
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import subprocess
|
|
4
|
+
|
|
5
|
+
import typer
|
|
6
|
+
|
|
7
|
+
app = typer.Typer(no_args_is_help=True, add_completion=False, help="Generate SDKs from OpenAPI.")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _echo(cmd: list[str]):
|
|
11
|
+
typer.echo("$ " + " ".join(cmd))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _parse_bool(val: str | bool | None, default: bool = True) -> bool:
|
|
15
|
+
if isinstance(val, bool):
|
|
16
|
+
return val
|
|
17
|
+
if val is None:
|
|
18
|
+
return default
|
|
19
|
+
s = str(val).strip().lower()
|
|
20
|
+
if s in {"1", "true", "yes", "y"}:
|
|
21
|
+
return True
|
|
22
|
+
if s in {"0", "false", "no", "n"}:
|
|
23
|
+
return False
|
|
24
|
+
return default
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@app.command("ts")
|
|
28
|
+
def sdk_ts(
|
|
29
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
30
|
+
outdir: str = typer.Option("sdk-ts", help="Output directory"),
|
|
31
|
+
dry_run: str = typer.Option("true", help="Print commands instead of running (true/false)"),
|
|
32
|
+
):
|
|
33
|
+
"""Generate a TypeScript SDK (openapi-typescript-codegen as default)."""
|
|
34
|
+
cmd = [
|
|
35
|
+
"npx",
|
|
36
|
+
"openapi-typescript-codegen",
|
|
37
|
+
"--input",
|
|
38
|
+
openapi,
|
|
39
|
+
"--output",
|
|
40
|
+
outdir,
|
|
41
|
+
]
|
|
42
|
+
if _parse_bool(dry_run, True):
|
|
43
|
+
_echo(cmd)
|
|
44
|
+
return
|
|
45
|
+
subprocess.check_call(cmd)
|
|
46
|
+
typer.secho(f"TS SDK generated → {outdir}", fg=typer.colors.GREEN)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@app.command("py")
|
|
50
|
+
def sdk_py(
|
|
51
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
52
|
+
outdir: str = typer.Option("sdk-py", help="Output directory"),
|
|
53
|
+
package_name: str = typer.Option("client_sdk", help="Python package name"),
|
|
54
|
+
dry_run: str = typer.Option("true", help="Print commands instead of running (true/false)"),
|
|
55
|
+
):
|
|
56
|
+
"""Generate a Python SDK via openapi-generator-cli with "python" generator."""
|
|
57
|
+
cmd = [
|
|
58
|
+
"npx",
|
|
59
|
+
"-y",
|
|
60
|
+
"@openapitools/openapi-generator-cli",
|
|
61
|
+
"generate",
|
|
62
|
+
"-i",
|
|
63
|
+
openapi,
|
|
64
|
+
"-g",
|
|
65
|
+
"python",
|
|
66
|
+
"-o",
|
|
67
|
+
outdir,
|
|
68
|
+
"--additional-properties",
|
|
69
|
+
f"packageName={package_name}",
|
|
70
|
+
]
|
|
71
|
+
if _parse_bool(dry_run, True):
|
|
72
|
+
_echo(cmd)
|
|
73
|
+
return
|
|
74
|
+
subprocess.check_call(cmd)
|
|
75
|
+
typer.secho(f"Python SDK generated → {outdir}", fg=typer.colors.GREEN)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@app.command("postman")
|
|
79
|
+
def sdk_postman(
|
|
80
|
+
openapi: str = typer.Argument(..., help="Path to OpenAPI JSON"),
|
|
81
|
+
out: str = typer.Option("postman_collection.json", help="Output Postman collection"),
|
|
82
|
+
dry_run: str = typer.Option("true", help="Print commands instead of running (true/false)"),
|
|
83
|
+
):
|
|
84
|
+
"""Convert OpenAPI to a Postman collection via openapi-to-postmanv2."""
|
|
85
|
+
cmd = [
|
|
86
|
+
"npx",
|
|
87
|
+
"-y",
|
|
88
|
+
"openapi-to-postmanv2",
|
|
89
|
+
"-s",
|
|
90
|
+
openapi,
|
|
91
|
+
"-o",
|
|
92
|
+
out,
|
|
93
|
+
]
|
|
94
|
+
if _parse_bool(dry_run, True):
|
|
95
|
+
_echo(cmd)
|
|
96
|
+
return
|
|
97
|
+
subprocess.check_call(cmd)
|
|
98
|
+
typer.secho(f"Postman collection generated → {out}", fg=typer.colors.GREEN)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def register(root: typer.Typer):
|
|
102
|
+
root.add_typer(app, name="sdk")
|
svc_infra/data/add.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from typing import Callable, Iterable, Optional
|
|
5
|
+
|
|
6
|
+
from fastapi import FastAPI
|
|
7
|
+
|
|
8
|
+
from svc_infra.cli.cmds.db.sql.alembic_cmds import cmd_setup_and_migrate
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def add_data_lifecycle(
|
|
12
|
+
app: FastAPI,
|
|
13
|
+
*,
|
|
14
|
+
auto_migrate: bool = True,
|
|
15
|
+
database_url: str | None = None,
|
|
16
|
+
discover_packages: Optional[list[str]] = None,
|
|
17
|
+
with_payments: bool | None = None,
|
|
18
|
+
on_load_fixtures: Optional[Callable[[], None]] = None,
|
|
19
|
+
retention_jobs: Optional[Iterable[Callable[[], None]]] = None,
|
|
20
|
+
erasure_job: Optional[Callable[[str], None]] = None,
|
|
21
|
+
) -> None:
|
|
22
|
+
"""
|
|
23
|
+
Wire data lifecycle conveniences:
|
|
24
|
+
|
|
25
|
+
- auto_migrate: run end-to-end Alembic setup-and-migrate on startup (idempotent).
|
|
26
|
+
- on_load_fixtures: optional callback to load reference/fixture data once at startup.
|
|
27
|
+
- retention_jobs: optional list of callables to register purge tasks (scheduler integration is external).
|
|
28
|
+
- erasure_job: optional callable to trigger a GDPR erasure workflow for a given principal ID.
|
|
29
|
+
|
|
30
|
+
This helper is intentionally minimal: it coordinates existing building blocks
|
|
31
|
+
and offers extension points. Jobs should be scheduled using svc_infra.jobs helpers.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
async def _run_lifecycle() -> None:
|
|
35
|
+
# Startup
|
|
36
|
+
if auto_migrate:
|
|
37
|
+
cmd_setup_and_migrate(
|
|
38
|
+
database_url=database_url,
|
|
39
|
+
overwrite_scaffold=False,
|
|
40
|
+
create_db_if_missing=True,
|
|
41
|
+
create_followup_revision=True,
|
|
42
|
+
initial_message="initial schema",
|
|
43
|
+
followup_message="autogen",
|
|
44
|
+
discover_packages=discover_packages,
|
|
45
|
+
with_payments=with_payments,
|
|
46
|
+
)
|
|
47
|
+
if on_load_fixtures:
|
|
48
|
+
res = on_load_fixtures()
|
|
49
|
+
if inspect.isawaitable(res):
|
|
50
|
+
await res # type: ignore[misc]
|
|
51
|
+
|
|
52
|
+
app.add_event_handler("startup", _run_lifecycle)
|
|
53
|
+
|
|
54
|
+
# Store optional jobs on app.state for external schedulers to discover/register.
|
|
55
|
+
if retention_jobs is not None:
|
|
56
|
+
app.state.data_retention_jobs = list(retention_jobs)
|
|
57
|
+
if erasure_job is not None:
|
|
58
|
+
app.state.data_erasure_job = erasure_job
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
__all__ = ["add_data_lifecycle"]
|
svc_infra/data/backup.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from typing import Callable, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass(frozen=True)
|
|
9
|
+
class BackupHealthReport:
|
|
10
|
+
ok: bool
|
|
11
|
+
last_success: Optional[datetime]
|
|
12
|
+
retention_days: Optional[int]
|
|
13
|
+
message: str = ""
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def verify_backups(
|
|
17
|
+
*, last_success: Optional[datetime] = None, retention_days: Optional[int] = None
|
|
18
|
+
) -> BackupHealthReport:
|
|
19
|
+
"""Return a basic backup health report.
|
|
20
|
+
|
|
21
|
+
In production, callers should plug a provider-specific checker and translate into this report.
|
|
22
|
+
"""
|
|
23
|
+
if last_success is None:
|
|
24
|
+
return BackupHealthReport(
|
|
25
|
+
ok=False, last_success=None, retention_days=retention_days, message="no_backup_seen"
|
|
26
|
+
)
|
|
27
|
+
now = datetime.now(timezone.utc)
|
|
28
|
+
age_days = (now - last_success).total_seconds() / 86400.0
|
|
29
|
+
ok = retention_days is None or age_days <= max(1, retention_days)
|
|
30
|
+
return BackupHealthReport(ok=ok, last_success=last_success, retention_days=retention_days)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
__all__ = ["BackupHealthReport", "verify_backups"]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def make_backup_verification_job(
|
|
37
|
+
checker: Callable[[], BackupHealthReport],
|
|
38
|
+
*,
|
|
39
|
+
on_report: Optional[Callable[[BackupHealthReport], None]] = None,
|
|
40
|
+
):
|
|
41
|
+
"""Return a callable suitable for scheduling in a job runner.
|
|
42
|
+
|
|
43
|
+
The checker should perform provider-specific checks and return a BackupHealthReport.
|
|
44
|
+
If on_report is provided, it will be invoked with the report.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def _job() -> BackupHealthReport:
|
|
48
|
+
rep = checker()
|
|
49
|
+
if on_report:
|
|
50
|
+
on_report(rep)
|
|
51
|
+
return rep
|
|
52
|
+
|
|
53
|
+
return _job
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any, Awaitable, Callable, Iterable, Optional, Protocol
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class SqlSession(Protocol): # minimal protocol for tests/integration
|
|
8
|
+
async def execute(self, stmt: Any) -> Any:
|
|
9
|
+
pass
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(frozen=True)
|
|
13
|
+
class ErasureStep:
|
|
14
|
+
name: str
|
|
15
|
+
run: Callable[[SqlSession, str], Awaitable[int] | int]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass(frozen=True)
|
|
19
|
+
class ErasurePlan:
|
|
20
|
+
steps: Iterable[ErasureStep]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def run_erasure(
|
|
24
|
+
session: SqlSession,
|
|
25
|
+
principal_id: str,
|
|
26
|
+
plan: ErasurePlan,
|
|
27
|
+
*,
|
|
28
|
+
on_audit: Optional[Callable[[str, dict[str, Any]], None]] = None,
|
|
29
|
+
) -> int:
|
|
30
|
+
"""Run an erasure plan and optionally emit an audit event.
|
|
31
|
+
|
|
32
|
+
Returns total affected rows across steps.
|
|
33
|
+
"""
|
|
34
|
+
total = 0
|
|
35
|
+
for s in plan.steps:
|
|
36
|
+
res = s.run(session, principal_id)
|
|
37
|
+
if hasattr(res, "__await__"):
|
|
38
|
+
res = await res # type: ignore[misc]
|
|
39
|
+
total += int(res or 0)
|
|
40
|
+
if on_audit:
|
|
41
|
+
on_audit("erasure.completed", {"principal_id": principal_id, "affected": total})
|
|
42
|
+
return total
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
__all__ = ["ErasureStep", "ErasurePlan", "run_erasure"]
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Awaitable, Callable, Iterable, Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def run_fixtures(
|
|
9
|
+
loaders: Iterable[Callable[[], None | Awaitable[None]]], *, run_once_file: Optional[str] = None
|
|
10
|
+
) -> None:
|
|
11
|
+
"""Run a sequence of fixture loaders (sync or async).
|
|
12
|
+
|
|
13
|
+
- If run_once_file is provided and exists, does nothing.
|
|
14
|
+
- On success, creates the run_once_file sentinel (parent dirs included).
|
|
15
|
+
"""
|
|
16
|
+
if run_once_file:
|
|
17
|
+
sentinel = Path(run_once_file)
|
|
18
|
+
if sentinel.exists():
|
|
19
|
+
return
|
|
20
|
+
for fn in loaders:
|
|
21
|
+
res = fn()
|
|
22
|
+
if inspect.isawaitable(res): # type: ignore[arg-type]
|
|
23
|
+
await res # type: ignore[misc]
|
|
24
|
+
if run_once_file:
|
|
25
|
+
sentinel.parent.mkdir(parents=True, exist_ok=True)
|
|
26
|
+
Path(run_once_file).write_text("ok")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def make_on_load_fixtures(
|
|
30
|
+
*loaders: Callable[[], None | Awaitable[None]], run_once_file: Optional[str] = None
|
|
31
|
+
) -> Callable[[], Awaitable[None]]:
|
|
32
|
+
"""Return an async callable suitable for add_data_lifecycle(on_load_fixtures=...)."""
|
|
33
|
+
|
|
34
|
+
async def _runner() -> None:
|
|
35
|
+
await run_fixtures(loaders, run_once_file=run_once_file)
|
|
36
|
+
|
|
37
|
+
return _runner
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
__all__ = ["run_fixtures", "make_on_load_fixtures"]
|