svc-infra 0.1.621__py3-none-any.whl → 0.1.623__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of svc-infra might be problematic. Click here for more details.

Files changed (33) hide show
  1. svc_infra/cli/cmds/docs/docs_cmds.py +102 -179
  2. svc_infra/docs/acceptance-matrix.md +71 -0
  3. svc_infra/docs/acceptance.md +44 -0
  4. svc_infra/docs/adr/0002-background-jobs-and-scheduling.md +40 -0
  5. svc_infra/docs/adr/0003-webhooks-framework.md +24 -0
  6. svc_infra/docs/adr/0004-tenancy-model.md +42 -0
  7. svc_infra/docs/adr/0005-data-lifecycle.md +86 -0
  8. svc_infra/docs/adr/0006-ops-slos-and-metrics.md +47 -0
  9. svc_infra/docs/adr/0007-docs-and-sdks.md +83 -0
  10. svc_infra/docs/adr/0008-billing-primitives.md +109 -0
  11. svc_infra/docs/adr/0009-acceptance-harness.md +40 -0
  12. svc_infra/docs/api.md +59 -0
  13. svc_infra/docs/auth.md +11 -0
  14. svc_infra/docs/cache.md +18 -0
  15. svc_infra/docs/cli.md +74 -0
  16. svc_infra/docs/contributing.md +34 -0
  17. svc_infra/docs/data-lifecycle.md +52 -0
  18. svc_infra/docs/database.md +14 -0
  19. svc_infra/docs/docs-and-sdks.md +62 -0
  20. svc_infra/docs/environment.md +114 -0
  21. svc_infra/docs/idempotency.md +111 -0
  22. svc_infra/docs/jobs.md +67 -0
  23. svc_infra/docs/observability.md +16 -0
  24. svc_infra/docs/ops.md +33 -0
  25. svc_infra/docs/rate-limiting.md +121 -0
  26. svc_infra/docs/repo-review.md +48 -0
  27. svc_infra/docs/security.md +155 -0
  28. svc_infra/docs/tenancy.md +35 -0
  29. svc_infra/docs/webhooks.md +112 -0
  30. {svc_infra-0.1.621.dist-info → svc_infra-0.1.623.dist-info}/METADATA +16 -16
  31. {svc_infra-0.1.621.dist-info → svc_infra-0.1.623.dist-info}/RECORD +33 -5
  32. {svc_infra-0.1.621.dist-info → svc_infra-0.1.623.dist-info}/WHEEL +0 -0
  33. {svc_infra-0.1.621.dist-info → svc_infra-0.1.623.dist-info}/entry_points.txt +0 -0
@@ -1,9 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
- import importlib.util
4
3
  import os
5
- import sys
6
- from importlib.metadata import PackageNotFoundError, distribution
4
+ from importlib.resources import as_file
5
+ from importlib.resources import files as pkg_files
7
6
  from pathlib import Path
8
7
  from typing import Dict, List
9
8
 
@@ -11,14 +10,10 @@ import click
11
10
  import typer
12
11
  from typer.core import TyperGroup
13
12
 
13
+ from svc_infra.app.root import resolve_project_root
14
14
 
15
- def _norm(name: str) -> str:
16
- """Normalize a topic name for stable CLI commands.
17
15
 
18
- - Lowercase
19
- - Replace spaces and underscores with hyphens
20
- - Strip leading/trailing whitespace
21
- """
16
+ def _norm(name: str) -> str:
22
17
  return name.strip().lower().replace(" ", "-").replace("_", "-")
23
18
 
24
19
 
@@ -32,190 +27,95 @@ def _discover_fs_topics(docs_dir: Path) -> Dict[str, Path]:
32
27
 
33
28
 
34
29
  def _discover_pkg_topics() -> Dict[str, Path]:
35
- """Discover docs packaged under 'docs/' in the installed distribution.
36
-
37
- Works in external projects without a local docs/ by inspecting the wheel
38
- metadata and, as a fallback, searching for a top-level docs/ next to the
39
- installed package directory in site-packages.
30
+ """
31
+ Discover docs shipped inside the installed package at svc_infra/docs/*,
32
+ using importlib.resources so this works for wheels, sdists, and zipped wheels.
40
33
  """
41
34
  topics: Dict[str, Path] = {}
42
-
43
- # 1) Prefer distribution metadata (RECORD) for both hyphen/underscore names
44
- dist = None
45
- for name in ("svc-infra", "svc_infra"):
46
- try:
47
- dist = distribution(name)
48
- break
49
- except PackageNotFoundError:
50
- dist = None
51
-
52
- if dist is not None:
53
- files = getattr(dist, "files", None) or []
54
- for f in files:
55
- s = str(f)
56
- if not s.startswith("docs/") or not s.endswith(".md"):
57
- continue
58
- topic_name = _norm(Path(s).stem)
59
- try:
60
- abs_path = Path(dist.locate_file(f))
61
- if abs_path.exists() and abs_path.is_file():
62
- topics[topic_name] = abs_path
63
- except Exception:
64
- # Best effort; continue to next
65
- continue
66
-
67
- # 2) Fallback: site-packages sibling 'docs/' directory (and repo-root docs in editable installs)
68
35
  try:
69
- spec = importlib.util.find_spec("svc_infra")
70
- if spec and spec.submodule_search_locations:
71
- pkg_dir = Path(next(iter(spec.submodule_search_locations)))
72
- candidates = [
73
- pkg_dir.parent / "docs", # site-packages/docs OR src/docs
74
- pkg_dir / "docs", # site-packages/svc_infra/docs OR src/svc_infra/docs
75
- pkg_dir.parent.parent
76
- / "docs", # repo-root/docs when running editable from repo (src/svc_infra → ../../docs)
77
- ]
78
- for candidate in candidates:
79
- if candidate.exists() and candidate.is_dir():
80
- for p in sorted(candidate.glob("*.md")):
81
- if p.is_file():
82
- topics.setdefault(_norm(p.stem), p)
83
- # If one candidate had docs, that's sufficient
84
- if any(k for k in topics):
85
- break
86
- except Exception:
87
- # Optional fallback only
88
- pass
89
-
90
- # 3) Last-resort: scan sys.path entries that look like site-/dist-packages for a top-level docs/
91
- # directory containing markdown files. This covers non-standard installs/editable modes.
92
- try:
93
- if not topics:
94
- for entry in sys.path:
95
- try:
96
- if not entry or ("site-packages" not in entry and "dist-packages" not in entry):
97
- continue
98
- docs_dir = Path(entry) / "docs"
99
- if docs_dir.exists() and docs_dir.is_dir():
100
- found = _discover_fs_topics(docs_dir)
101
- if found:
102
- # Merge but do not override anything already found
103
- for k, v in found.items():
104
- topics.setdefault(k, v)
105
- # If we found one valid docs dir, it's enough
106
- break
107
- except Exception:
108
- continue
36
+ docs_root = pkg_files("svc_infra").joinpath("docs")
37
+ # docs_root is a Traversable; it may be inside a zip. Iterate safely.
38
+ for entry in docs_root.iterdir():
39
+ if entry.name.endswith(".md"):
40
+ # materialize to a real tempfile path if needed
41
+ with as_file(entry) as concrete:
42
+ p = Path(concrete)
43
+ if p.exists() and p.is_file():
44
+ topics[_norm(p.stem)] = p
109
45
  except Exception:
46
+ # If the package has no docs directory, just return empty.
110
47
  pass
48
+ return topics
111
49
 
112
- # 4) Parse dist-info/RECORD or egg-info/SOURCES.txt to enumerate docs if available
113
- try:
114
- if not topics:
115
- spec = importlib.util.find_spec("svc_infra")
116
- base_dir: Path | None = None
117
- if spec and spec.submodule_search_locations:
118
- base_dir = Path(next(iter(spec.submodule_search_locations))).parent
119
- # Fallback to first site-packages on sys.path
120
- if base_dir is None:
121
- for entry in sys.path:
122
- if entry and "site-packages" in entry:
123
- base_dir = Path(entry)
124
- break
125
- if base_dir and base_dir.exists():
126
- # Check for both hyphen and underscore dist-info names
127
- candidates = list(base_dir.glob("svc_infra-*.dist-info")) + list(
128
- base_dir.glob("svc-infra-*.dist-info")
129
- )
130
- for di in candidates:
131
- record = di / "RECORD"
132
- if record.exists():
133
- try:
134
- for line in record.read_text(
135
- encoding="utf-8", errors="ignore"
136
- ).splitlines():
137
- rel = line.split(",", 1)[0]
138
- if rel.startswith("docs/") and rel.endswith(".md"):
139
- abs_p = base_dir / rel
140
- if abs_p.exists() and abs_p.is_file():
141
- topics.setdefault(_norm(Path(rel).stem), abs_p)
142
- except Exception:
143
- continue
144
- # egg-info fallback
145
- if not topics:
146
- egg_candidates = list(base_dir.glob("svc_infra-*.egg-info")) + list(
147
- base_dir.glob("svc-infra-*.egg-info")
148
- )
149
- for ei in egg_candidates:
150
- sources = ei / "SOURCES.txt"
151
- if sources.exists():
152
- try:
153
- for rel in sources.read_text(
154
- encoding="utf-8", errors="ignore"
155
- ).splitlines():
156
- rel = rel.strip()
157
- if rel.startswith("docs/") and rel.endswith(".md"):
158
- abs_p = base_dir / rel
159
- if abs_p.exists() and abs_p.is_file():
160
- topics.setdefault(_norm(Path(rel).stem), abs_p)
161
- except Exception:
162
- continue
163
- except Exception:
164
- pass
165
50
 
166
- # 5) Deep fallback: recursively search site-packages/dist-packages for any 'docs' folder
167
- # containing markdown files (limited depth to keep overhead reasonable).
51
+ def _resolve_docs_dir(ctx: click.Context) -> Path | None:
52
+ """
53
+ Optional override precedence:
54
+ 1) --docs-dir CLI option
55
+ 2) SVC_INFRA_DOCS_DIR env var
56
+ 3) *Only when working inside the svc-infra repo itself*: repo-root /docs
57
+ """
58
+ # 1) CLI option on this or parent contexts
59
+ current: click.Context | None = ctx
60
+ while current is not None:
61
+ docs_dir_opt = (current.params or {}).get("docs_dir")
62
+ if docs_dir_opt:
63
+ path = docs_dir_opt if isinstance(docs_dir_opt, Path) else Path(docs_dir_opt)
64
+ path = path.expanduser()
65
+ if path.exists():
66
+ return path
67
+ current = current.parent
68
+
69
+ # 2) Env var
70
+ env_dir = os.getenv("SVC_INFRA_DOCS_DIR")
71
+ if env_dir:
72
+ p = Path(env_dir).expanduser()
73
+ if p.exists():
74
+ return p
75
+
76
+ # 3) In-repo convenience (so `svc-infra docs` works inside this repo)
168
77
  try:
169
- if not topics:
170
- for entry in sys.path:
171
- if not entry or ("site-packages" not in entry and "dist-packages" not in entry):
172
- continue
173
- base = Path(entry)
174
- if not base.exists() or not base.is_dir():
175
- continue
176
- base_parts = len(base.parts)
177
- for root, dirs, files in os.walk(base):
178
- root_path = Path(root)
179
- # Limit search depth to avoid expensive scans
180
- if len(root_path.parts) - base_parts > 4:
181
- # prune
182
- dirs[:] = []
183
- continue
184
- if root_path.name == "docs":
185
- for p in sorted(root_path.glob("*.md")):
186
- if p.is_file():
187
- topics.setdefault(_norm(p.stem), p)
188
- # do not break; there might be multiple doc dirs
78
+ root = resolve_project_root()
79
+ proj_docs = root / "docs"
80
+ if proj_docs.exists():
81
+ return proj_docs
189
82
  except Exception:
190
83
  pass
191
84
 
192
- return topics
193
-
194
-
195
- def _resolve_docs_dir(ctx: click.Context) -> Path | None:
196
- # Deprecated: we no longer read docs from arbitrary paths or env.
197
- # All docs are sourced from the packaged svc-infra distribution only.
198
85
  return None
199
86
 
200
87
 
201
88
  class DocsGroup(TyperGroup):
202
89
  def list_commands(self, ctx: click.Context) -> List[str]:
203
90
  names: List[str] = list(super().list_commands(ctx) or [])
91
+ dir_to_use = _resolve_docs_dir(ctx)
92
+ fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
204
93
  pkg = _discover_pkg_topics()
205
- names.extend([k for k in pkg.keys()])
206
- # Deduplicate and sort
207
- return sorted({*names})
94
+ names.extend(fs.keys())
95
+ names.extend([k for k in pkg.keys() if k not in fs])
96
+ return sorted(set(names))
208
97
 
209
98
  def get_command(self, ctx: click.Context, name: str) -> click.Command | None:
210
- # Built-ins first (e.g., list, show)
211
99
  cmd = super().get_command(ctx, name)
212
100
  if cmd is not None:
213
101
  return cmd
214
102
 
215
- # Packaged topics only
103
+ key = _norm(name)
104
+
105
+ dir_to_use = _resolve_docs_dir(ctx)
106
+ fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
107
+ if key in fs:
108
+ file_path = fs[key]
109
+
110
+ @click.command(name=name)
111
+ def _show_fs() -> None:
112
+ click.echo(file_path.read_text(encoding="utf-8", errors="replace"))
113
+
114
+ return _show_fs
115
+
216
116
  pkg = _discover_pkg_topics()
217
- if name in pkg:
218
- file_path = pkg[name]
117
+ if key in pkg:
118
+ file_path = pkg[key]
219
119
 
220
120
  @click.command(name=name)
221
121
  def _show_pkg() -> None:
@@ -227,39 +127,62 @@ class DocsGroup(TyperGroup):
227
127
 
228
128
 
229
129
  def register(app: typer.Typer) -> None:
230
- """Register the `docs` command group with dynamic topic subcommands."""
231
-
232
130
  docs_app = typer.Typer(no_args_is_help=True, add_completion=False, cls=DocsGroup)
233
131
 
234
132
  @docs_app.callback(invoke_without_command=True)
235
133
  def _docs_options(
134
+ docs_dir: Path | None = typer.Option(
135
+ None,
136
+ "--docs-dir",
137
+ help="Path to a docs directory to read from (overrides packaged docs)",
138
+ ),
236
139
  topic: str | None = typer.Option(None, "--topic", help="Topic to show directly"),
237
140
  ) -> None:
238
- """Support --topic at group level (packaged docs only)."""
239
141
  if topic:
142
+ key = _norm(topic)
143
+ ctx = click.get_current_context()
144
+ dir_to_use = _resolve_docs_dir(ctx)
145
+ fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
146
+ if key in fs:
147
+ typer.echo(fs[key].read_text(encoding="utf-8", errors="replace"))
148
+ raise typer.Exit(code=0)
240
149
  pkg = _discover_pkg_topics()
241
- if topic in pkg:
242
- typer.echo(pkg[topic].read_text(encoding="utf-8", errors="replace"))
150
+ if key in pkg:
151
+ typer.echo(pkg[key].read_text(encoding="utf-8", errors="replace"))
243
152
  raise typer.Exit(code=0)
244
153
  raise typer.BadParameter(f"Unknown topic: {topic}")
245
154
 
246
155
  @docs_app.command("list", help="List available documentation topics")
247
156
  def list_topics() -> None:
157
+ ctx = click.get_current_context()
158
+ dir_to_use = _resolve_docs_dir(ctx)
159
+ fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
248
160
  pkg = _discover_pkg_topics()
249
161
 
250
- # Print packaged topics only
251
162
  def _print(name: str, path: Path) -> None:
252
- typer.echo(f"{name}\t{path}")
163
+ try:
164
+ typer.echo(f"{name}\t{path}")
165
+ except Exception:
166
+ typer.echo(name)
253
167
 
254
- for name, path in pkg.items():
168
+ for name, path in fs.items():
255
169
  _print(name, path)
170
+ for name, path in pkg.items():
171
+ if name not in fs:
172
+ _print(name, path)
256
173
 
257
- # Also support a generic "show" command
258
174
  @docs_app.command("show", help="Show docs for a topic (alternative to dynamic subcommand)")
259
175
  def show(topic: str) -> None:
176
+ key = _norm(topic)
177
+ ctx = click.get_current_context()
178
+ dir_to_use = _resolve_docs_dir(ctx)
179
+ fs = _discover_fs_topics(dir_to_use) if dir_to_use else {}
180
+ if key in fs:
181
+ typer.echo(fs[key].read_text(encoding="utf-8", errors="replace"))
182
+ return
260
183
  pkg = _discover_pkg_topics()
261
- if topic in pkg:
262
- typer.echo(pkg[topic].read_text(encoding="utf-8", errors="replace"))
184
+ if key in pkg:
185
+ typer.echo(pkg[key].read_text(encoding="utf-8", errors="replace"))
263
186
  return
264
187
  raise typer.BadParameter(f"Unknown topic: {topic}")
265
188
 
@@ -0,0 +1,71 @@
1
+ # Acceptance Matrix (A-IDs)
2
+
3
+ This document maps Acceptance scenarios (A-IDs) to endpoints, CLIs, fixtures, and seed data. Use it to drive the CI promotion gate and local `make accept` runs.
4
+
5
+ ## A0. Harness
6
+ - Stack: docker-compose.test.yml (api, db, redis)
7
+ - Makefile targets: accept, compose_up, wait, seed, down
8
+ - Tests bootstrap: tests/acceptance/conftest.py (BASE_URL), _auth.py, _seed.py, _http.py
9
+
10
+ ## A1. Security & Auth
11
+ - A1-01 Register → Verify → Login → /auth/me
12
+ - Endpoints: POST /auth/register, POST /auth/verify, POST /auth/login, GET /auth/me
13
+ - Fixtures: admin, user
14
+ - A1-02 Password policy & breach check
15
+ - Endpoints: POST /auth/register
16
+ - A1-03 Lockout escalation and cooldown
17
+ - Endpoints: POST /auth/login
18
+ - A1-04 RBAC/ABAC enforced
19
+ - Endpoints: GET /admin/*, resource GET with owner guard
20
+ - A1-05 Session list & revoke
21
+ - Endpoints: GET/DELETE /auth/sessions
22
+ - A1-06 API keys lifecycle
23
+ - Endpoints: POST/GET/DELETE /auth/api-keys, usage via Authorization header
24
+ - A1-07 MFA lifecycle
25
+ - Endpoints: /auth/mfa/*
26
+
27
+ ## A2. Rate Limiting
28
+ - A2-01 Global limit → 429 with Retry-After
29
+ - A2-02 Per-route & tenant override honored
30
+ - A2-03 Window reset
31
+
32
+ ## A3. Idempotency & Concurrency
33
+ - A3-01 Same Idempotency-Key → identical 2xx
34
+ - A3-02 Conflicting payload + same key → 409
35
+ - A3-03 Optimistic lock mismatch → 409; success increments version
36
+
37
+ ## A4. Jobs & Scheduling
38
+ - A4-01 Custom job consumed
39
+ - A4-02 Backoff & DLQ
40
+ - A4-03 Cron tick observed
41
+
42
+ ## A5. Webhooks
43
+ - A5-01 Producer → delivery (HMAC verified)
44
+ - A5-02 Retry stops on success
45
+ - A5-03 Secret rotation window accepts old+new
46
+
47
+ ## A6. Tenancy
48
+ - A6-01 tenant_id injected on create; list scoped
49
+ - A6-02 Cross-tenant → 404/403
50
+ - A6-03 Per-tenant quotas enforced
51
+
52
+ ## A7. Data Lifecycle
53
+ - A7-01 Soft delete hides; undelete restores
54
+ - A7-02 GDPR erasure steps with audit
55
+ - A7-03 Retention purge soft→hard
56
+ - A7-04 Backup verification healthy
57
+
58
+ ## A8. SLOs & Ops
59
+ - A8-01 Metrics http_server_* and db_pool_* present
60
+ - A8-02 Maintenance mode 503; circuit breaker trips/recover
61
+ - A8-03 Liveness/readiness under DB up/down
62
+
63
+ ## A9. OpenAPI & Error Contracts
64
+ - A9-01 /openapi.json valid; examples present
65
+ - A9-02 Problem+JSON conforms
66
+ - A9-03 Spectral + API Doctor pass
67
+
68
+ ## A10. CLI & DX
69
+ - A10-01 DB migrate/rollback/seed
70
+ - A10-02 Jobs runner consumes a sample job
71
+ - A10-03 SDK smoke-import and /ping
@@ -0,0 +1,44 @@
1
+ # Pre-Deploy Acceptance (Promotion Gate)
2
+
3
+ This guide describes the acceptance harness that runs post-build against an ephemeral stack. Artifacts are promoted only if acceptance checks pass.
4
+
5
+ ## Stack
6
+ - docker-compose.test.yml: api (uvicorn serving tests.acceptance.app), optional db/redis (via profiles), and a tester container to run pytest inside
7
+ - Makefile targets: accept, compose_up, wait, seed, down
8
+ - Health probes: /healthz (liveness), /readyz (readiness), /startupz (startup)
9
+
10
+ ## Workflow
11
+ 1. Build image
12
+ 2. docker compose up -d (test stack)
13
+ 3. CLI DB checks & seed: run `sql setup-and-migrate`, `sql current`, `sql downgrade -1`, `sql upgrade head` against an ephemeral SQLite DB, then call `sql seed tests.acceptance._seed:acceptance_seed` (no-op by default)
14
+ 4. Run pytest inside tester: docker compose run --rm tester (Makefile wires this)
15
+ 5. OpenAPI lint & API Doctor
16
+ 6. Teardown
17
+
18
+ ## Supply-chain & Matrix (v1 scope)
19
+ - SBOM: generate and upload as artifact; image scan (Trivy/Grype) with severity gate.
20
+ - Provenance: sign/attest images (cosign/SLSA) on best-effort basis.
21
+ - Backend matrix: run acceptance against two stacks via COMPOSE_PROFILES:
22
+ 1) in-memory stores (default), 2) Redis + Postgres (COMPOSE_PROFILES=pg-redis).
23
+
24
+ ## Additional Acceptance Checks (fast wins)
25
+ - Headers/CORS: assert HSTS, X-Content-Type-Options, Referrer-Policy, X-Frame-Options/SameSite; OPTIONS preflight behavior.
26
+ - Resilience: restart DB/Redis during request; expect breaker trip and recovery.
27
+ - DR drill: restore a tiny SQL dump then run smoke.
28
+ - OpenAPI invariants: no orphan routes; servers block correctness for versions; 100% examples for public JSON; stable operationIds; reject /auth/{id} path via lint rule.
29
+ - CLI contracts: `svc-infra --help` and key subcommands exit 0 and print expected flags.
30
+
31
+ ## Local usage
32
+ - make accept (runs the full flow locally)
33
+ - make down (tears down the stack)
34
+ - To run tests manually: docker compose run --rm tester
35
+ - To target a different backend: COMPOSE_PROFILES=pg-redis make accept
36
+
37
+ ## Files
38
+ - tests/acceptance/conftest.py: BASE_URL, httpx client, fixtures
39
+ - tests/acceptance/_auth.py: login/register helpers
40
+ - tests/acceptance/_seed.py: seed users/tenants/api keys
41
+ - tests/acceptance/_http.py: HTTP helpers
42
+
43
+ ## Scenarios
44
+ See docs/acceptance-matrix.md for A-IDs and mapping to endpoints.
@@ -0,0 +1,40 @@
1
+ # ADR 0002: Background Jobs & Scheduling
2
+
3
+ Date: 2025-10-15
4
+
5
+ Status: Accepted
6
+
7
+ ## Context
8
+ We need production-grade background job processing and simple scheduling with a one-call setup. The library already includes in-memory queue/scheduler for tests/local. We need a production backend and a minimal runner.
9
+
10
+ ## Decision
11
+ - JobQueue protocol defines enqueue/reserve/ack/fail with retry and exponential backoff (base seconds * attempts). Jobs have: id, name, payload, available_at, attempts, max_attempts, backoff_seconds, last_error.
12
+ - Backends:
13
+ - InMemoryJobQueue for tests/local.
14
+ - RedisJobQueue for production using Redis primitives with visibility timeout and atomic operations.
15
+ - Scheduler:
16
+ - InMemoryScheduler providing interval-based scheduling via next_run_at. Cron parsing is out of scope initially; a simple YAML loader can be added later.
17
+ - Runner:
18
+ - A CLI loop `svc-infra jobs run` will tick the scheduler and process jobs in a loop with small sleep/backoff.
19
+ - Configuration:
20
+ - One-call `easy_jobs()` returns (queue, scheduler). Picks backend via `JOBS_DRIVER` env (memory|redis). Redis URL via `REDIS_URL`.
21
+
22
+ ## Alternatives Considered
23
+ - Using RQ/Huey/Celery: heavier dependency and less control over API ergonomic goals; we prefer thin primitives aligned with svc-infra patterns.
24
+ - SQL-backed queue first: we will consider later; Redis is sufficient for v1.
25
+
26
+ ## Consequences
27
+ - Enables outbox/webhook processors on a reliable queue.
28
+ - Minimal cognitive load: consistent APIs, ENV-driven.
29
+ - Future work: SQL queue, cron YAML loader, metrics, concurrency controls.
30
+
31
+ ## Redis Data Model (initial)
32
+ - List `jobs:ready` holds ready job IDs; a ZSET `jobs:delayed` with score=available_at keeps delayed jobs; a HASH per job `job:{id}` stores fields.
33
+ - Reserve uses RPOPLPUSH from `jobs:ready` to `jobs:processing` or BRPOPLPUSH with timeout; sets `visible_at` on job as now+vt and increments `attempts`.
34
+ - Ack removes job from `jobs:processing` and deletes `job:{id}`.
35
+ - Fail increments attempts and computes next available_at = now + backoff_seconds * attempts; moves job to delayed ZSET.
36
+ - A housekeeping step periodically moves due jobs from delayed ZSET to ready list. Reserve also checks ZSET for due jobs opportunistically.
37
+
38
+ ## Testing Strategy
39
+ - Unit tests cover enqueue/reserve/ack/fail, visibility timeout behavior, and DLQ after max_attempts.
40
+ - Runner tests cover one iteration loop processing.
@@ -0,0 +1,24 @@
1
+ # ADR 0003: Webhooks Framework
2
+
3
+ Date: 2025-10-15
4
+
5
+ Status: Accepted
6
+
7
+ ## Context
8
+ Services need a consistent way to publish domain events to external consumers via webhooks, verify inbound signatures, and handle retries with backoff. We already have an outbox pattern, a job queue, and a webhook delivery worker.
9
+
10
+ ## Decision
11
+ - Event Schema: minimal fields {topic, payload, version, created_at}. Versioning included to evolve payloads.
12
+ - Signing: HMAC-SHA256 over canonical JSON payload; header `X-Signature` carries hex digest. Future: include timestamp and v1 signature header variant.
13
+ - Outbox → Job Queue: Producer writes events to Outbox; outbox tick enqueues delivery jobs; worker performs HTTP POST with signature.
14
+ - Subscriptions: In-memory subscription store maps topic → {url, secret}. Persistence deferred.
15
+ - Verification: Provide helper for verifying incoming webhook requests by recomputing the HMAC.
16
+ - Retry: Already handled by JobQueue backoff; DLQ after max attempts.
17
+
18
+ ## Consequences
19
+ - Clear boundary: producers don't call HTTP directly; they publish to Outbox.
20
+ - Deterministic signing & verification across producer/consumer.
21
+ - Extensibility: timestamped signed headers, secret rotation, persisted subscriptions are future extensions.
22
+
23
+ ## Testing
24
+ - Unit tests for verification helper and end-to-end publish→outbox→queue→delivery using in-memory components and a fake HTTP client.
@@ -0,0 +1,42 @@
1
+ # ADR-0004: Tenancy Model and Enforcement
2
+
3
+ Date: 2025-10-15
4
+
5
+ Status: Proposed
6
+
7
+ ## Context
8
+
9
+ The framework needs a consistent, ergonomic multi-tenant story across modules (API scaffolding, SQL/Mongo persistence, auth/security, payments, jobs, webhooks). Existing patterns already reference `tenant_id` in many places (payments models and service, audit/session models, SQL/Mongo scaffolds). However, enforcement and app ergonomics were not unified.
10
+
11
+ ## Decision
12
+
13
+ Adopt a default "soft-tenant" isolation model via a `tenant_id` column and centralized enforcement primitives:
14
+
15
+ - Resolution: `resolve_tenant_id` and `require_tenant_id` FastAPI dependencies in `api.fastapi.tenancy.context`. Resolution order: override hook → identity (user/api_key) → `X-Tenant-Id` header → `request.state.tenant_id`.
16
+ - Enforcement in SQL: `TenantSqlService(SqlService)` that scopes list/get/update/delete/search/count with a `where` clause and injects `tenant_id` on create when the model supports it. Repository methods accept optional `where` filters.
17
+ - Router ergonomics: `make_tenant_crud_router_plus_sql` which requires `TenantId` and uses `TenantSqlService` under the hood. This keeps route code simple while enforcing scoping.
18
+ - Extensibility: `set_tenant_resolver` hook to override resolution logic per app; `tenant_field` parameter to support custom column names. Future: schema-per-tenant or db-per-tenant via alternate repository/service implementations.
19
+
20
+ ## Alternatives considered
21
+
22
+ 1) Enforce tenancy at the ORM layer (SQLAlchemy events/session) – rejected for clarity and testability; we prefer explicit service/dep composition.
23
+ 2) Global middleware that rewrites queries – rejected due to SQLAlchemy complexity and opacity.
24
+ 3) Only rely on developers to remember filters – rejected due to footguns.
25
+
26
+ ## Consequences
27
+
28
+ - Clear default behavior with escape hatches. Minimal changes for consumers using CRUD builders and SqlService.
29
+ - Requires models to include an optional or required `tenant_id` column for scoping.
30
+ - Non-SQL stores should add equivalent wrappers; Mongo scaffolds already include `tenant_id` fields and can mirror these patterns later.
31
+
32
+ ## Implementation Notes
33
+
34
+ - New modules: `api.fastapi.tenancy.context`, `db.sql.tenant`. Repository updated to accept `where` filters.
35
+ - CRUD router extended with `make_tenant_crud_router_plus_sql` to require `TenantId`.
36
+ - Tests added: `tests/tenancy/*` for resolution and service scoping.
37
+
38
+ ## Open Items
39
+
40
+ - Per-tenant quotas & rate limit overrides (tie into rate limit dependency/middleware via a resolver that returns per-tenant config).
41
+ - Export tenant CLI (dump/import data for a specific tenant).
42
+ - Docs: isolation guidance (column vs schema vs db), migration guidance.
@@ -0,0 +1,86 @@
1
+ # ADR 0005: Data Lifecycle — Soft Delete, Retention, Erasure, Backups
2
+
3
+ Date: 2025-10-16
4
+ Status: Accepted
5
+
6
+ ## Context
7
+ We need a coherent Data Lifecycle story in svc-infra that covers:
8
+ - Migrations & fixtures: simple way to run DB setup/migrations and load reference data.
9
+ - Soft delete conventions: consistent filtering and model scaffolding support.
10
+ - Retention policies: periodic purging of expired records per model/table.
11
+ - GDPR/PII erasure: queued workflow to scrub user-related data while preserving legal audit.
12
+ - Backups/PITR verification: a job that exercises restore checks or at least validates backup health signals.
13
+
14
+ Existing building blocks:
15
+ - Migrations CLI with end-to-end "setup-and-migrate" and new `sql seed` command for executing a user-specified seed callable.
16
+ - Code: `src/svc_infra/cli/cmds/db/sql/alembic_cmds.py` (cmd_setup_and_migrate, cmd_seed)
17
+ - Soft delete support in repository and scaffold:
18
+ - Repo filtering: `src/svc_infra/db/sql/repository.py` (soft_delete flags, `deleted_at` timestamp, optional active flag)
19
+ - Model scaffolding: `src/svc_infra/db/sql/scaffold.py` (optional `deleted_at` field)
20
+ - Easy-setup helper to coordinate lifecycle bits:
21
+ - `src/svc_infra/data/add.py` provides a startup hook to auto-migrate and optional callbacks for fixtures, retention jobs, and an erasure job.
22
+
23
+ Gaps:
24
+ - No standardized fixture loader contract beyond the callback surface.
25
+ - No built-in retention policy registry or purge execution job.
26
+ - No opinionated GDPR erasure workflow and audit trail.
27
+ - No backup/PITR verification job implementation.
28
+
29
+ ## Decision
30
+ Introduce minimal, composable primitives that keep svc-infra flexible while providing a clear path to production-grade lifecycle.
31
+
32
+ 1) Fixture Loader Contract
33
+ - Provide a simple callable signature for deterministic, idempotent fixture loading: `Callable[[], None | Awaitable[None]]`.
34
+ - Document best practices: UPSERT by natural keys, avoid random IDs, guard on existing rows.
35
+ - Expose via `add_data_lifecycle(on_load_fixtures=...)` (already available); add docs and tests.
36
+
37
+ 2) Retention Policy Registry
38
+ - Define a registry API that allows services to register per-resource retention rules.
39
+ - Basic shape:
40
+ - `RetentionPolicy(name: str, model: type, where: list[Any] | None, older_than_days: int, soft_delete_field: str = "deleted_at")`
41
+ - A purge function computes a cutoff timestamp and issues DELETE or marks soft-delete fields.
42
+ - Execution model: a periodic job (via jobs scheduler) calls `run_retention_purge(registry)`.
43
+ - Keep SQL-only first; room for NoSQL extensions later.
44
+
45
+ 3) GDPR Erasure Workflow
46
+ - Provide a single callable entrypoint `erase_principal(principal_id: str) -> None | Awaitable[None]`.
47
+ - Default strategy: enqueue a job that runs a configurable erasure plan composed of steps (delete/soft-delete/overwrite) across tables.
48
+ - Add an audit log entry per erasure request with outcome and timestamp (reuse `security.audit` helpers if feasible).
49
+ - Keep the plan provider pluggable so apps specify which tables/columns participate.
50
+
51
+ 4) Backup/PITR Verification Job
52
+ - Define an interface `verify_backups() -> HealthReport` with a minimal default implementation that:
53
+ - Queries the backup system or driver for last successful backup timestamp and retention window.
54
+ - Emits metrics/logs and returns a structured status.
55
+ - Defer full "restore drill" capability; provide extension hook only.
56
+
57
+ ## Interfaces
58
+ - Registry
59
+ - `register_retention(policy: RetentionPolicy) -> None`
60
+ - `run_retention_purge(session_factory, policies: list[RetentionPolicy]) -> PurgeReport`
61
+ - Erasure
62
+ - `erase_principal(principal_id: str, plan: ErasurePlan, session_factory) -> ErasureReport`
63
+ - Fixtures
64
+ - `load_fixtures()` as provided by caller via `add_data_lifecycle`.
65
+ - Backup
66
+ - `verify_backups() -> BackupHealthReport`
67
+
68
+ ## Alternatives Considered
69
+ - Heavy-weight DSL for retention and erasure: rejected for now; keep APIs Pythonic and pluggable.
70
+ - Trigger-level soft delete enforcement: skipped to avoid provider lock-in; enforced at repository and query layer.
71
+ - Full restore drill automation: out of scope for v1; introduce later behind provider integrations.
72
+
73
+ ## Consequences
74
+ - Minimal surface that doesn't over-constrain adopters; provides default patterns and contracts.
75
+ - Requires additional test scaffolds and example docs to demonstrate usage.
76
+ - SQL-focused initial implementation; other backends can plug via similar interfaces.
77
+
78
+ ## Rollout & Testing
79
+ - Add unit/integration tests for fixture loader, retention purge logic, and erasure workflow skeleton.
80
+ - Provide docs in `docs/database.md` with examples and operational guidance.
81
+
82
+ ## References
83
+ - `src/svc_infra/db/sql/repository.py` soft-delete handling
84
+ - `src/svc_infra/db/sql/scaffold.py` deleted_at field scaffolding
85
+ - `src/svc_infra/data/add.py` data lifecycle helper
86
+ - `src/svc_infra/cli/cmds/db/sql/alembic_cmds.py` migrations & seed