svc-infra 0.1.595__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of svc-infra might be problematic. Click here for more details.
- svc_infra/__init__.py +58 -2
- svc_infra/apf_payments/models.py +68 -38
- svc_infra/apf_payments/provider/__init__.py +2 -2
- svc_infra/apf_payments/provider/aiydan.py +39 -23
- svc_infra/apf_payments/provider/base.py +8 -3
- svc_infra/apf_payments/provider/registry.py +3 -5
- svc_infra/apf_payments/provider/stripe.py +74 -52
- svc_infra/apf_payments/schemas.py +84 -83
- svc_infra/apf_payments/service.py +27 -16
- svc_infra/apf_payments/settings.py +12 -11
- svc_infra/api/__init__.py +61 -0
- svc_infra/api/fastapi/__init__.py +34 -0
- svc_infra/api/fastapi/admin/__init__.py +3 -0
- svc_infra/api/fastapi/admin/add.py +240 -0
- svc_infra/api/fastapi/apf_payments/router.py +94 -73
- svc_infra/api/fastapi/apf_payments/setup.py +10 -9
- svc_infra/api/fastapi/auth/__init__.py +65 -0
- svc_infra/api/fastapi/auth/_cookies.py +1 -3
- svc_infra/api/fastapi/auth/add.py +14 -15
- svc_infra/api/fastapi/auth/gaurd.py +32 -20
- svc_infra/api/fastapi/auth/mfa/models.py +3 -4
- svc_infra/api/fastapi/auth/mfa/pre_auth.py +13 -9
- svc_infra/api/fastapi/auth/mfa/router.py +9 -8
- svc_infra/api/fastapi/auth/mfa/security.py +4 -7
- svc_infra/api/fastapi/auth/mfa/utils.py +5 -3
- svc_infra/api/fastapi/auth/policy.py +0 -1
- svc_infra/api/fastapi/auth/providers.py +3 -3
- svc_infra/api/fastapi/auth/routers/apikey_router.py +19 -21
- svc_infra/api/fastapi/auth/routers/oauth_router.py +98 -52
- svc_infra/api/fastapi/auth/routers/session_router.py +6 -5
- svc_infra/api/fastapi/auth/security.py +25 -15
- svc_infra/api/fastapi/auth/sender.py +5 -0
- svc_infra/api/fastapi/auth/settings.py +18 -19
- svc_infra/api/fastapi/auth/state.py +5 -4
- svc_infra/api/fastapi/auth/ws_security.py +275 -0
- svc_infra/api/fastapi/billing/router.py +71 -0
- svc_infra/api/fastapi/billing/setup.py +19 -0
- svc_infra/api/fastapi/cache/add.py +9 -5
- svc_infra/api/fastapi/db/__init__.py +5 -1
- svc_infra/api/fastapi/db/http.py +10 -9
- svc_infra/api/fastapi/db/nosql/__init__.py +39 -1
- svc_infra/api/fastapi/db/nosql/mongo/add.py +35 -30
- svc_infra/api/fastapi/db/nosql/mongo/crud_router.py +39 -21
- svc_infra/api/fastapi/db/sql/__init__.py +5 -1
- svc_infra/api/fastapi/db/sql/add.py +62 -25
- svc_infra/api/fastapi/db/sql/crud_router.py +205 -30
- svc_infra/api/fastapi/db/sql/session.py +19 -2
- svc_infra/api/fastapi/db/sql/users.py +18 -9
- svc_infra/api/fastapi/dependencies/ratelimit.py +76 -14
- svc_infra/api/fastapi/docs/add.py +163 -0
- svc_infra/api/fastapi/docs/landing.py +6 -6
- svc_infra/api/fastapi/docs/scoped.py +75 -36
- svc_infra/api/fastapi/dual/__init__.py +12 -2
- svc_infra/api/fastapi/dual/dualize.py +2 -2
- svc_infra/api/fastapi/dual/protected.py +123 -10
- svc_infra/api/fastapi/dual/public.py +25 -0
- svc_infra/api/fastapi/dual/router.py +18 -8
- svc_infra/api/fastapi/dx.py +33 -2
- svc_infra/api/fastapi/ease.py +59 -7
- svc_infra/api/fastapi/http/concurrency.py +2 -1
- svc_infra/api/fastapi/http/conditional.py +2 -2
- svc_infra/api/fastapi/middleware/debug.py +4 -1
- svc_infra/api/fastapi/middleware/errors/exceptions.py +2 -5
- svc_infra/api/fastapi/middleware/errors/handlers.py +50 -10
- svc_infra/api/fastapi/middleware/graceful_shutdown.py +95 -0
- svc_infra/api/fastapi/middleware/idempotency.py +190 -68
- svc_infra/api/fastapi/middleware/idempotency_store.py +187 -0
- svc_infra/api/fastapi/middleware/optimistic_lock.py +39 -0
- svc_infra/api/fastapi/middleware/ratelimit.py +125 -28
- svc_infra/api/fastapi/middleware/ratelimit_store.py +45 -13
- svc_infra/api/fastapi/middleware/request_id.py +24 -10
- svc_infra/api/fastapi/middleware/request_size_limit.py +3 -3
- svc_infra/api/fastapi/middleware/timeout.py +176 -0
- svc_infra/api/fastapi/object_router.py +1060 -0
- svc_infra/api/fastapi/openapi/apply.py +4 -3
- svc_infra/api/fastapi/openapi/conventions.py +13 -6
- svc_infra/api/fastapi/openapi/mutators.py +144 -17
- svc_infra/api/fastapi/openapi/pipeline.py +2 -2
- svc_infra/api/fastapi/openapi/responses.py +4 -6
- svc_infra/api/fastapi/openapi/security.py +1 -1
- svc_infra/api/fastapi/ops/add.py +73 -0
- svc_infra/api/fastapi/pagination.py +47 -32
- svc_infra/api/fastapi/routers/__init__.py +16 -10
- svc_infra/api/fastapi/routers/ping.py +1 -0
- svc_infra/api/fastapi/setup.py +167 -54
- svc_infra/api/fastapi/tenancy/add.py +20 -0
- svc_infra/api/fastapi/tenancy/context.py +113 -0
- svc_infra/api/fastapi/versioned.py +102 -0
- svc_infra/app/README.md +5 -5
- svc_infra/app/__init__.py +3 -1
- svc_infra/app/env.py +70 -4
- svc_infra/app/logging/add.py +10 -2
- svc_infra/app/logging/filter.py +1 -1
- svc_infra/app/logging/formats.py +13 -5
- svc_infra/app/root.py +3 -3
- svc_infra/billing/__init__.py +40 -0
- svc_infra/billing/async_service.py +167 -0
- svc_infra/billing/jobs.py +231 -0
- svc_infra/billing/models.py +146 -0
- svc_infra/billing/quotas.py +101 -0
- svc_infra/billing/schemas.py +34 -0
- svc_infra/bundled_docs/README.md +5 -0
- svc_infra/bundled_docs/__init__.py +1 -0
- svc_infra/bundled_docs/getting-started.md +6 -0
- svc_infra/cache/__init__.py +21 -5
- svc_infra/cache/add.py +167 -0
- svc_infra/cache/backend.py +9 -7
- svc_infra/cache/decorators.py +75 -20
- svc_infra/cache/demo.py +2 -2
- svc_infra/cache/keys.py +26 -6
- svc_infra/cache/recache.py +26 -27
- svc_infra/cache/resources.py +6 -5
- svc_infra/cache/tags.py +19 -44
- svc_infra/cache/ttl.py +2 -3
- svc_infra/cache/utils.py +4 -3
- svc_infra/cli/__init__.py +44 -8
- svc_infra/cli/__main__.py +4 -0
- svc_infra/cli/cmds/__init__.py +39 -2
- svc_infra/cli/cmds/db/nosql/mongo/mongo_cmds.py +18 -14
- svc_infra/cli/cmds/db/nosql/mongo/mongo_scaffold_cmds.py +9 -10
- svc_infra/cli/cmds/db/ops_cmds.py +267 -0
- svc_infra/cli/cmds/db/sql/alembic_cmds.py +97 -29
- svc_infra/cli/cmds/db/sql/sql_export_cmds.py +80 -0
- svc_infra/cli/cmds/db/sql/sql_scaffold_cmds.py +13 -13
- svc_infra/cli/cmds/docs/docs_cmds.py +139 -0
- svc_infra/cli/cmds/dx/__init__.py +12 -0
- svc_infra/cli/cmds/dx/dx_cmds.py +110 -0
- svc_infra/cli/cmds/health/__init__.py +179 -0
- svc_infra/cli/cmds/health/health_cmds.py +8 -0
- svc_infra/cli/cmds/help.py +4 -0
- svc_infra/cli/cmds/jobs/__init__.py +1 -0
- svc_infra/cli/cmds/jobs/jobs_cmds.py +42 -0
- svc_infra/cli/cmds/obs/obs_cmds.py +31 -13
- svc_infra/cli/cmds/sdk/__init__.py +0 -0
- svc_infra/cli/cmds/sdk/sdk_cmds.py +102 -0
- svc_infra/cli/foundation/runner.py +4 -5
- svc_infra/cli/foundation/typer_bootstrap.py +1 -2
- svc_infra/data/__init__.py +83 -0
- svc_infra/data/add.py +61 -0
- svc_infra/data/backup.py +56 -0
- svc_infra/data/erasure.py +46 -0
- svc_infra/data/fixtures.py +42 -0
- svc_infra/data/retention.py +56 -0
- svc_infra/db/__init__.py +15 -0
- svc_infra/db/crud_schema.py +14 -13
- svc_infra/db/inbox.py +67 -0
- svc_infra/db/nosql/__init__.py +2 -0
- svc_infra/db/nosql/constants.py +1 -1
- svc_infra/db/nosql/core.py +19 -5
- svc_infra/db/nosql/indexes.py +12 -9
- svc_infra/db/nosql/management.py +4 -4
- svc_infra/db/nosql/mongo/README.md +13 -13
- svc_infra/db/nosql/mongo/client.py +21 -4
- svc_infra/db/nosql/mongo/settings.py +1 -1
- svc_infra/db/nosql/repository.py +46 -27
- svc_infra/db/nosql/resource.py +28 -16
- svc_infra/db/nosql/scaffold.py +14 -12
- svc_infra/db/nosql/service.py +2 -1
- svc_infra/db/nosql/service_with_hooks.py +4 -3
- svc_infra/db/nosql/utils.py +4 -4
- svc_infra/db/ops.py +380 -0
- svc_infra/db/outbox.py +105 -0
- svc_infra/db/sql/apikey.py +34 -15
- svc_infra/db/sql/authref.py +8 -6
- svc_infra/db/sql/constants.py +5 -1
- svc_infra/db/sql/core.py +13 -13
- svc_infra/db/sql/management.py +5 -6
- svc_infra/db/sql/repository.py +92 -26
- svc_infra/db/sql/resource.py +18 -12
- svc_infra/db/sql/scaffold.py +11 -11
- svc_infra/db/sql/service.py +2 -1
- svc_infra/db/sql/service_with_hooks.py +4 -3
- svc_infra/db/sql/templates/models_schemas/auth/models.py.tmpl +7 -56
- svc_infra/db/sql/templates/setup/env_async.py.tmpl +34 -12
- svc_infra/db/sql/templates/setup/env_sync.py.tmpl +29 -7
- svc_infra/db/sql/tenant.py +80 -0
- svc_infra/db/sql/uniq.py +8 -7
- svc_infra/db/sql/uniq_hooks.py +12 -11
- svc_infra/db/sql/utils.py +105 -47
- svc_infra/db/sql/versioning.py +14 -0
- svc_infra/db/utils.py +3 -3
- svc_infra/deploy/__init__.py +531 -0
- svc_infra/documents/__init__.py +100 -0
- svc_infra/documents/add.py +263 -0
- svc_infra/documents/ease.py +233 -0
- svc_infra/documents/models.py +114 -0
- svc_infra/documents/storage.py +262 -0
- svc_infra/dx/__init__.py +58 -0
- svc_infra/dx/add.py +63 -0
- svc_infra/dx/changelog.py +74 -0
- svc_infra/dx/checks.py +68 -0
- svc_infra/exceptions.py +141 -0
- svc_infra/health/__init__.py +863 -0
- svc_infra/http/__init__.py +13 -0
- svc_infra/http/client.py +101 -0
- svc_infra/jobs/__init__.py +79 -0
- svc_infra/jobs/builtins/outbox_processor.py +38 -0
- svc_infra/jobs/builtins/webhook_delivery.py +93 -0
- svc_infra/jobs/easy.py +33 -0
- svc_infra/jobs/loader.py +49 -0
- svc_infra/jobs/queue.py +106 -0
- svc_infra/jobs/redis_queue.py +242 -0
- svc_infra/jobs/runner.py +75 -0
- svc_infra/jobs/scheduler.py +53 -0
- svc_infra/jobs/worker.py +40 -0
- svc_infra/loaders/__init__.py +186 -0
- svc_infra/loaders/base.py +143 -0
- svc_infra/loaders/github.py +309 -0
- svc_infra/loaders/models.py +147 -0
- svc_infra/loaders/url.py +229 -0
- svc_infra/logging/__init__.py +375 -0
- svc_infra/mcp/__init__.py +82 -0
- svc_infra/mcp/svc_infra_mcp.py +91 -33
- svc_infra/obs/README.md +2 -0
- svc_infra/obs/add.py +68 -11
- svc_infra/obs/cloud_dash.py +2 -1
- svc_infra/obs/grafana/dashboards/http-overview.json +45 -0
- svc_infra/obs/metrics/__init__.py +6 -7
- svc_infra/obs/metrics/asgi.py +8 -7
- svc_infra/obs/metrics/base.py +13 -13
- svc_infra/obs/metrics/http.py +3 -3
- svc_infra/obs/metrics/sqlalchemy.py +14 -13
- svc_infra/obs/metrics.py +9 -8
- svc_infra/resilience/__init__.py +44 -0
- svc_infra/resilience/circuit_breaker.py +328 -0
- svc_infra/resilience/retry.py +289 -0
- svc_infra/security/__init__.py +167 -0
- svc_infra/security/add.py +213 -0
- svc_infra/security/audit.py +97 -18
- svc_infra/security/audit_service.py +10 -9
- svc_infra/security/headers.py +15 -2
- svc_infra/security/hibp.py +14 -7
- svc_infra/security/jwt_rotation.py +78 -29
- svc_infra/security/lockout.py +23 -16
- svc_infra/security/models.py +77 -44
- svc_infra/security/oauth_models.py +73 -0
- svc_infra/security/org_invites.py +12 -12
- svc_infra/security/passwords.py +3 -3
- svc_infra/security/permissions.py +31 -7
- svc_infra/security/session.py +7 -8
- svc_infra/security/signed_cookies.py +26 -6
- svc_infra/storage/__init__.py +93 -0
- svc_infra/storage/add.py +250 -0
- svc_infra/storage/backends/__init__.py +11 -0
- svc_infra/storage/backends/local.py +331 -0
- svc_infra/storage/backends/memory.py +213 -0
- svc_infra/storage/backends/s3.py +334 -0
- svc_infra/storage/base.py +239 -0
- svc_infra/storage/easy.py +181 -0
- svc_infra/storage/settings.py +193 -0
- svc_infra/testing/__init__.py +682 -0
- svc_infra/utils.py +170 -5
- svc_infra/webhooks/__init__.py +69 -0
- svc_infra/webhooks/add.py +327 -0
- svc_infra/webhooks/encryption.py +115 -0
- svc_infra/webhooks/fastapi.py +37 -0
- svc_infra/webhooks/router.py +55 -0
- svc_infra/webhooks/service.py +69 -0
- svc_infra/webhooks/signing.py +34 -0
- svc_infra/websocket/__init__.py +79 -0
- svc_infra/websocket/add.py +139 -0
- svc_infra/websocket/client.py +283 -0
- svc_infra/websocket/config.py +57 -0
- svc_infra/websocket/easy.py +76 -0
- svc_infra/websocket/exceptions.py +61 -0
- svc_infra/websocket/manager.py +343 -0
- svc_infra/websocket/models.py +49 -0
- svc_infra-1.1.0.dist-info/LICENSE +21 -0
- svc_infra-1.1.0.dist-info/METADATA +362 -0
- svc_infra-1.1.0.dist-info/RECORD +364 -0
- svc_infra-0.1.595.dist-info/METADATA +0 -80
- svc_infra-0.1.595.dist-info/RECORD +0 -253
- {svc_infra-0.1.595.dist-info → svc_infra-1.1.0.dist-info}/WHEEL +0 -0
- {svc_infra-0.1.595.dist-info → svc_infra-1.1.0.dist-info}/entry_points.txt +0 -0
svc_infra/db/ops.py
ADDED
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
"""Database operations utilities for one-off administrative tasks.
|
|
2
|
+
|
|
3
|
+
This module provides synchronous database utilities for operations that
|
|
4
|
+
don't fit the normal async SQLAlchemy workflow, such as:
|
|
5
|
+
- Waiting for database readiness at startup
|
|
6
|
+
- Executing maintenance SQL
|
|
7
|
+
- Dropping tables with lock handling
|
|
8
|
+
- Terminating blocking queries
|
|
9
|
+
|
|
10
|
+
These utilities use psycopg2 directly for maximum reliability in
|
|
11
|
+
edge cases where the ORM might not be available or appropriate.
|
|
12
|
+
|
|
13
|
+
Example:
|
|
14
|
+
>>> from svc_infra.db.ops import wait_for_database, run_sync_sql
|
|
15
|
+
>>>
|
|
16
|
+
>>> # Wait for database before app starts
|
|
17
|
+
>>> wait_for_database(timeout=30)
|
|
18
|
+
>>>
|
|
19
|
+
>>> # Run maintenance query
|
|
20
|
+
>>> run_sync_sql("VACUUM ANALYZE my_table")
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import logging
|
|
26
|
+
import sys
|
|
27
|
+
import time
|
|
28
|
+
from collections.abc import Sequence
|
|
29
|
+
from typing import Any, cast
|
|
30
|
+
|
|
31
|
+
from .sql.utils import get_database_url_from_env
|
|
32
|
+
|
|
33
|
+
logger = logging.getLogger(__name__)
|
|
34
|
+
|
|
35
|
+
# Timeout for individual database operations (seconds)
|
|
36
|
+
DEFAULT_STATEMENT_TIMEOUT = 30
|
|
37
|
+
|
|
38
|
+
# Default wait-for-database settings
|
|
39
|
+
DEFAULT_WAIT_TIMEOUT = 30
|
|
40
|
+
DEFAULT_WAIT_INTERVAL = 1.0
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _flush() -> None:
|
|
44
|
+
"""Force flush stdout/stderr for containerized log visibility."""
|
|
45
|
+
sys.stdout.flush()
|
|
46
|
+
sys.stderr.flush()
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _get_connection(url: str | None = None, connect_timeout: int = 10) -> Any:
|
|
50
|
+
"""
|
|
51
|
+
Get a psycopg2 connection.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
url: Database URL. If None, resolved from environment.
|
|
55
|
+
connect_timeout: Connection timeout in seconds.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
psycopg2 connection object
|
|
59
|
+
|
|
60
|
+
Raises:
|
|
61
|
+
ImportError: If psycopg2 is not installed
|
|
62
|
+
RuntimeError: If no database URL is available
|
|
63
|
+
"""
|
|
64
|
+
try:
|
|
65
|
+
import psycopg2
|
|
66
|
+
except ImportError as e:
|
|
67
|
+
raise ImportError(
|
|
68
|
+
"psycopg2 is required for db.ops utilities. Install with: pip install psycopg2-binary"
|
|
69
|
+
) from e
|
|
70
|
+
|
|
71
|
+
if url is None:
|
|
72
|
+
url = get_database_url_from_env(required=True)
|
|
73
|
+
|
|
74
|
+
# Add connect_timeout to connection options
|
|
75
|
+
return psycopg2.connect(url, connect_timeout=connect_timeout)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def wait_for_database(
|
|
79
|
+
url: str | None = None,
|
|
80
|
+
timeout: float = DEFAULT_WAIT_TIMEOUT,
|
|
81
|
+
interval: float = DEFAULT_WAIT_INTERVAL,
|
|
82
|
+
verbose: bool = True,
|
|
83
|
+
) -> bool:
|
|
84
|
+
"""
|
|
85
|
+
Wait for database to be ready, with retries.
|
|
86
|
+
|
|
87
|
+
Useful for container startup scripts where the database may not
|
|
88
|
+
be immediately available.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
url: Database URL. If None, resolved from environment.
|
|
92
|
+
timeout: Maximum time to wait in seconds (default: 30)
|
|
93
|
+
interval: Time between retry attempts in seconds (default: 1.0)
|
|
94
|
+
verbose: If True, log progress messages
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
True if database is ready, False if timeout reached
|
|
98
|
+
|
|
99
|
+
Example:
|
|
100
|
+
>>> # In container startup script
|
|
101
|
+
>>> if not wait_for_database(timeout=60):
|
|
102
|
+
... sys.exit(1)
|
|
103
|
+
>>> # Database is ready, continue with app startup
|
|
104
|
+
"""
|
|
105
|
+
if url is None:
|
|
106
|
+
url = get_database_url_from_env(required=True)
|
|
107
|
+
|
|
108
|
+
start = time.monotonic()
|
|
109
|
+
attempt = 0
|
|
110
|
+
|
|
111
|
+
while True:
|
|
112
|
+
attempt += 1
|
|
113
|
+
elapsed = time.monotonic() - start
|
|
114
|
+
|
|
115
|
+
if elapsed >= timeout:
|
|
116
|
+
if verbose:
|
|
117
|
+
logger.error(f"Database not ready after {timeout}s ({attempt} attempts)")
|
|
118
|
+
_flush()
|
|
119
|
+
return False
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
conn = _get_connection(url, connect_timeout=min(5, int(timeout - elapsed)))
|
|
123
|
+
conn.close()
|
|
124
|
+
if verbose:
|
|
125
|
+
logger.info(f"Database ready after {elapsed:.1f}s ({attempt} attempts)")
|
|
126
|
+
_flush()
|
|
127
|
+
return True
|
|
128
|
+
except Exception as e:
|
|
129
|
+
if verbose:
|
|
130
|
+
remaining = timeout - elapsed
|
|
131
|
+
logger.debug(f"Database not ready ({e}), retrying... ({remaining:.0f}s remaining)")
|
|
132
|
+
_flush()
|
|
133
|
+
time.sleep(interval)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def run_sync_sql(
|
|
137
|
+
sql: str,
|
|
138
|
+
params: Sequence[Any] | None = None,
|
|
139
|
+
url: str | None = None,
|
|
140
|
+
timeout: int = DEFAULT_STATEMENT_TIMEOUT,
|
|
141
|
+
fetch: bool = False,
|
|
142
|
+
) -> list[tuple[Any, ...]] | None:
|
|
143
|
+
"""
|
|
144
|
+
Execute SQL synchronously with a statement timeout.
|
|
145
|
+
|
|
146
|
+
This is useful for one-off administrative queries that don't fit
|
|
147
|
+
the normal async SQLAlchemy workflow.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
sql: SQL statement to execute
|
|
151
|
+
params: Optional parameters for parameterized queries
|
|
152
|
+
url: Database URL. If None, resolved from environment.
|
|
153
|
+
timeout: Statement timeout in seconds (default: 30)
|
|
154
|
+
fetch: If True, return fetched rows; if False, return None
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
List of tuples if fetch=True, otherwise None
|
|
158
|
+
|
|
159
|
+
Raises:
|
|
160
|
+
psycopg2.Error: On database errors
|
|
161
|
+
TimeoutError: If statement exceeds timeout
|
|
162
|
+
|
|
163
|
+
Example:
|
|
164
|
+
>>> # Run a maintenance query
|
|
165
|
+
>>> run_sync_sql("VACUUM ANALYZE users")
|
|
166
|
+
>>>
|
|
167
|
+
>>> # Fetch data with timeout
|
|
168
|
+
>>> rows = run_sync_sql(
|
|
169
|
+
... "SELECT id, name FROM users WHERE active = %s",
|
|
170
|
+
... params=(True,),
|
|
171
|
+
... fetch=True,
|
|
172
|
+
... timeout=10
|
|
173
|
+
... )
|
|
174
|
+
"""
|
|
175
|
+
conn = _get_connection(url)
|
|
176
|
+
try:
|
|
177
|
+
with conn.cursor() as cur:
|
|
178
|
+
# Set statement timeout (PostgreSQL-specific)
|
|
179
|
+
cur.execute(f"SET statement_timeout = '{timeout}s'")
|
|
180
|
+
|
|
181
|
+
if params:
|
|
182
|
+
cur.execute(sql, params)
|
|
183
|
+
else:
|
|
184
|
+
cur.execute(sql)
|
|
185
|
+
|
|
186
|
+
if fetch:
|
|
187
|
+
return cast("list[tuple[Any, ...]]", cur.fetchall())
|
|
188
|
+
|
|
189
|
+
conn.commit()
|
|
190
|
+
return None
|
|
191
|
+
finally:
|
|
192
|
+
conn.close()
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def kill_blocking_queries(
|
|
196
|
+
table_name: str,
|
|
197
|
+
url: str | None = None,
|
|
198
|
+
timeout: int = DEFAULT_STATEMENT_TIMEOUT,
|
|
199
|
+
dry_run: bool = False,
|
|
200
|
+
) -> list[dict[str, Any]]:
|
|
201
|
+
"""
|
|
202
|
+
Terminate queries blocking operations on a specific table.
|
|
203
|
+
|
|
204
|
+
This is useful before DROP TABLE or ALTER TABLE operations that
|
|
205
|
+
might be blocked by long-running queries or idle transactions.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
table_name: Name of the table (can include schema as 'schema.table')
|
|
209
|
+
url: Database URL. If None, resolved from environment.
|
|
210
|
+
timeout: Statement timeout in seconds (default: 30)
|
|
211
|
+
dry_run: If True, only report blocking queries without terminating
|
|
212
|
+
|
|
213
|
+
Returns:
|
|
214
|
+
List of dicts with info about terminated (or found) queries:
|
|
215
|
+
[{"pid": 123, "query": "SELECT...", "state": "active", "terminated": True}]
|
|
216
|
+
|
|
217
|
+
Example:
|
|
218
|
+
>>> # Check what would be terminated
|
|
219
|
+
>>> blocking = kill_blocking_queries("embeddings", dry_run=True)
|
|
220
|
+
>>> print(f"Found {len(blocking)} blocking queries")
|
|
221
|
+
>>>
|
|
222
|
+
>>> # Actually terminate them
|
|
223
|
+
>>> kill_blocking_queries("embeddings")
|
|
224
|
+
"""
|
|
225
|
+
# Query to find blocking queries on a table
|
|
226
|
+
find_blocking_sql = """
|
|
227
|
+
SELECT pid, state, query, age(clock_timestamp(), query_start) as duration
|
|
228
|
+
FROM pg_stat_activity
|
|
229
|
+
WHERE pid != pg_backend_pid()
|
|
230
|
+
AND state != 'idle'
|
|
231
|
+
AND (
|
|
232
|
+
query ILIKE %s
|
|
233
|
+
OR query ILIKE %s
|
|
234
|
+
OR query ILIKE %s
|
|
235
|
+
)
|
|
236
|
+
ORDER BY query_start;
|
|
237
|
+
"""
|
|
238
|
+
|
|
239
|
+
# Patterns to match queries involving the table
|
|
240
|
+
patterns = (
|
|
241
|
+
f"%{table_name}%",
|
|
242
|
+
f"%{table_name.split('.')[-1]}%", # Just table name without schema
|
|
243
|
+
f"%{table_name.replace('.', '%')}%", # Handle schema.table pattern
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
conn = _get_connection(url)
|
|
247
|
+
terminated: list[dict[str, Any]] = []
|
|
248
|
+
|
|
249
|
+
try:
|
|
250
|
+
with conn.cursor() as cur:
|
|
251
|
+
cur.execute(f"SET statement_timeout = '{timeout}s'")
|
|
252
|
+
cur.execute(find_blocking_sql, patterns)
|
|
253
|
+
rows = cur.fetchall()
|
|
254
|
+
|
|
255
|
+
for pid, state, query, duration in rows:
|
|
256
|
+
info = {
|
|
257
|
+
"pid": pid,
|
|
258
|
+
"state": state,
|
|
259
|
+
"query": query[:200] + "..." if len(query) > 200 else query,
|
|
260
|
+
"duration": str(duration),
|
|
261
|
+
"terminated": False,
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
if not dry_run:
|
|
265
|
+
try:
|
|
266
|
+
cur.execute("SELECT pg_terminate_backend(%s)", (pid,))
|
|
267
|
+
info["terminated"] = True
|
|
268
|
+
logger.info(f"Terminated query PID {pid}: {query[:100]}...")
|
|
269
|
+
except Exception as e:
|
|
270
|
+
logger.warning(f"Failed to terminate PID {pid}: {e}")
|
|
271
|
+
info["error"] = str(e)
|
|
272
|
+
|
|
273
|
+
terminated.append(info)
|
|
274
|
+
|
|
275
|
+
conn.commit()
|
|
276
|
+
finally:
|
|
277
|
+
conn.close()
|
|
278
|
+
|
|
279
|
+
_flush()
|
|
280
|
+
return terminated
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def drop_table_safe(
|
|
284
|
+
table_name: str,
|
|
285
|
+
url: str | None = None,
|
|
286
|
+
timeout: int = DEFAULT_STATEMENT_TIMEOUT,
|
|
287
|
+
kill_blocking: bool = True,
|
|
288
|
+
if_exists: bool = True,
|
|
289
|
+
cascade: bool = False,
|
|
290
|
+
) -> bool:
|
|
291
|
+
"""
|
|
292
|
+
Drop a table safely with lock handling.
|
|
293
|
+
|
|
294
|
+
Handles common issues with DROP TABLE:
|
|
295
|
+
- Terminates blocking queries first (optional)
|
|
296
|
+
- Uses statement timeout to avoid hanging
|
|
297
|
+
- Handles 'table does not exist' gracefully
|
|
298
|
+
|
|
299
|
+
Args:
|
|
300
|
+
table_name: Name of table to drop (can include schema)
|
|
301
|
+
url: Database URL. If None, resolved from environment.
|
|
302
|
+
timeout: Statement timeout in seconds (default: 30)
|
|
303
|
+
kill_blocking: If True, terminate blocking queries first (default: True)
|
|
304
|
+
if_exists: If True, don't error if table doesn't exist (default: True)
|
|
305
|
+
cascade: If True, drop dependent objects (default: False)
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
True if table was dropped (or didn't exist), False on error
|
|
309
|
+
|
|
310
|
+
Example:
|
|
311
|
+
>>> # Drop table, killing any blocking queries first
|
|
312
|
+
>>> drop_table_safe("embeddings", cascade=True)
|
|
313
|
+
True
|
|
314
|
+
>>>
|
|
315
|
+
>>> # Safe to call even if table doesn't exist
|
|
316
|
+
>>> drop_table_safe("nonexistent_table")
|
|
317
|
+
True
|
|
318
|
+
"""
|
|
319
|
+
if url is None:
|
|
320
|
+
url = get_database_url_from_env(required=True)
|
|
321
|
+
|
|
322
|
+
# Kill blocking queries first if requested
|
|
323
|
+
if kill_blocking:
|
|
324
|
+
blocked = kill_blocking_queries(table_name, url=url, timeout=timeout)
|
|
325
|
+
if blocked:
|
|
326
|
+
logger.info(f"Terminated {len(blocked)} blocking queries before DROP")
|
|
327
|
+
# Brief pause to let connections clean up
|
|
328
|
+
time.sleep(0.5)
|
|
329
|
+
|
|
330
|
+
# Build DROP statement
|
|
331
|
+
drop_sql = "DROP TABLE"
|
|
332
|
+
if if_exists:
|
|
333
|
+
drop_sql += " IF EXISTS"
|
|
334
|
+
drop_sql += f" {table_name}"
|
|
335
|
+
if cascade:
|
|
336
|
+
drop_sql += " CASCADE"
|
|
337
|
+
|
|
338
|
+
try:
|
|
339
|
+
run_sync_sql(drop_sql, url=url, timeout=timeout)
|
|
340
|
+
logger.info(f"Dropped table: {table_name}")
|
|
341
|
+
_flush()
|
|
342
|
+
return True
|
|
343
|
+
except Exception as e:
|
|
344
|
+
logger.error(f"Failed to drop table {table_name}: {e}")
|
|
345
|
+
_flush()
|
|
346
|
+
return False
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
def get_database_url(
|
|
350
|
+
required: bool = True,
|
|
351
|
+
normalize: bool = True,
|
|
352
|
+
) -> str | None:
|
|
353
|
+
"""
|
|
354
|
+
Convenience wrapper for get_database_url_from_env().
|
|
355
|
+
|
|
356
|
+
This is the recommended way to get the database URL, as it
|
|
357
|
+
handles all common environment variable names and normalizations.
|
|
358
|
+
|
|
359
|
+
Args:
|
|
360
|
+
required: If True, raise RuntimeError when no URL is found
|
|
361
|
+
normalize: If True, convert postgres:// to postgresql://
|
|
362
|
+
|
|
363
|
+
Returns:
|
|
364
|
+
Database URL string, or None if not found and not required
|
|
365
|
+
|
|
366
|
+
Example:
|
|
367
|
+
>>> url = get_database_url()
|
|
368
|
+
>>> print(url)
|
|
369
|
+
'postgresql://user:pass@host:5432/db'
|
|
370
|
+
"""
|
|
371
|
+
return get_database_url_from_env(required=required, normalize=normalize)
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
__all__ = [
|
|
375
|
+
"wait_for_database",
|
|
376
|
+
"run_sync_sql",
|
|
377
|
+
"kill_blocking_queries",
|
|
378
|
+
"drop_table_safe",
|
|
379
|
+
"get_database_url",
|
|
380
|
+
]
|
svc_infra/db/outbox.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Iterable
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from datetime import UTC, datetime
|
|
6
|
+
from typing import Any, Protocol
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class OutboxMessage:
|
|
11
|
+
id: int
|
|
12
|
+
topic: str
|
|
13
|
+
payload: dict[str, Any]
|
|
14
|
+
created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
|
|
15
|
+
attempts: int = 0
|
|
16
|
+
processed_at: datetime | None = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class OutboxStore(Protocol):
|
|
20
|
+
def enqueue(self, topic: str, payload: dict[str, Any]) -> OutboxMessage:
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
def fetch_next(self, *, topics: Iterable[str] | None = None) -> OutboxMessage | None:
|
|
24
|
+
"""Return the next undispatched, unprocessed message (FIFO per-topic), or None.
|
|
25
|
+
|
|
26
|
+
Notes:
|
|
27
|
+
- Messages with attempts > 0 are considered "dispatched" to the job queue and won't be re-enqueued.
|
|
28
|
+
- Delivery retries are handled by the job queue worker, not by re-reading the outbox.
|
|
29
|
+
"""
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
def mark_processed(self, msg_id: int) -> None:
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
def mark_failed(self, msg_id: int) -> None:
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class InMemoryOutboxStore:
|
|
40
|
+
"""Simple in-memory outbox for tests and local runs."""
|
|
41
|
+
|
|
42
|
+
def __init__(self):
|
|
43
|
+
self._seq = 0
|
|
44
|
+
self._messages: list[OutboxMessage] = []
|
|
45
|
+
|
|
46
|
+
def enqueue(self, topic: str, payload: dict[str, Any]) -> OutboxMessage:
|
|
47
|
+
self._seq += 1
|
|
48
|
+
msg = OutboxMessage(id=self._seq, topic=topic, payload=dict(payload))
|
|
49
|
+
self._messages.append(msg)
|
|
50
|
+
return msg
|
|
51
|
+
|
|
52
|
+
def fetch_next(self, *, topics: Iterable[str] | None = None) -> OutboxMessage | None:
|
|
53
|
+
allowed = set(topics) if topics else None
|
|
54
|
+
for msg in self._messages:
|
|
55
|
+
if msg.processed_at is not None:
|
|
56
|
+
continue
|
|
57
|
+
# skip already dispatched messages (attempts>0)
|
|
58
|
+
if msg.attempts > 0:
|
|
59
|
+
continue
|
|
60
|
+
if allowed is not None and msg.topic not in allowed:
|
|
61
|
+
continue
|
|
62
|
+
return msg
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
def mark_processed(self, msg_id: int) -> None:
|
|
66
|
+
for msg in self._messages:
|
|
67
|
+
if msg.id == msg_id:
|
|
68
|
+
msg.processed_at = datetime.now(UTC)
|
|
69
|
+
return
|
|
70
|
+
|
|
71
|
+
def mark_failed(self, msg_id: int) -> None:
|
|
72
|
+
for msg in self._messages:
|
|
73
|
+
if msg.id == msg_id:
|
|
74
|
+
msg.attempts += 1
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class SqlOutboxStore:
|
|
79
|
+
"""Skeleton for a SQL-backed outbox store.
|
|
80
|
+
|
|
81
|
+
Implementations should:
|
|
82
|
+
- INSERT on enqueue
|
|
83
|
+
- SELECT FOR UPDATE SKIP LOCKED (or equivalent) to fetch next
|
|
84
|
+
- UPDATE processed_at (and attempts on failure)
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
def __init__(self, session_factory):
|
|
88
|
+
self._session_factory = session_factory
|
|
89
|
+
|
|
90
|
+
# Placeholders to outline the API; not implemented here.
|
|
91
|
+
def enqueue(
|
|
92
|
+
self, topic: str, payload: dict[str, Any]
|
|
93
|
+
) -> OutboxMessage: # pragma: no cover - skeleton
|
|
94
|
+
raise NotImplementedError
|
|
95
|
+
|
|
96
|
+
def fetch_next(
|
|
97
|
+
self, *, topics: Iterable[str] | None = None
|
|
98
|
+
) -> OutboxMessage | None: # pragma: no cover - skeleton
|
|
99
|
+
raise NotImplementedError
|
|
100
|
+
|
|
101
|
+
def mark_processed(self, msg_id: int) -> None: # pragma: no cover - skeleton
|
|
102
|
+
raise NotImplementedError
|
|
103
|
+
|
|
104
|
+
def mark_failed(self, msg_id: int) -> None: # pragma: no cover - skeleton
|
|
105
|
+
raise NotImplementedError
|
svc_infra/db/sql/apikey.py
CHANGED
|
@@ -4,30 +4,47 @@ import hashlib
|
|
|
4
4
|
import hmac
|
|
5
5
|
import os
|
|
6
6
|
import uuid
|
|
7
|
-
from datetime import
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
7
|
+
from datetime import UTC, datetime
|
|
8
|
+
|
|
9
|
+
from sqlalchemy import (
|
|
10
|
+
JSON,
|
|
11
|
+
Boolean,
|
|
12
|
+
DateTime,
|
|
13
|
+
ForeignKey,
|
|
14
|
+
Index,
|
|
15
|
+
String,
|
|
16
|
+
UniqueConstraint,
|
|
17
|
+
text,
|
|
18
|
+
)
|
|
11
19
|
from sqlalchemy.ext.mutable import MutableDict, MutableList
|
|
12
20
|
from sqlalchemy.orm import Mapped, declared_attr, mapped_column, relationship
|
|
13
21
|
|
|
22
|
+
from svc_infra.app.env import require_secret
|
|
14
23
|
from svc_infra.db.sql.base import ModelBase
|
|
15
24
|
from svc_infra.db.sql.types import GUID
|
|
16
25
|
|
|
17
|
-
|
|
26
|
+
|
|
27
|
+
def _get_apikey_secret() -> str:
|
|
28
|
+
"""Get APIKEY_HASH_SECRET, requiring it in production."""
|
|
29
|
+
return require_secret(
|
|
30
|
+
os.getenv("APIKEY_HASH_SECRET"),
|
|
31
|
+
"APIKEY_HASH_SECRET",
|
|
32
|
+
dev_default="dev-only-apikey-hmac-secret-not-for-production",
|
|
33
|
+
)
|
|
18
34
|
|
|
19
35
|
|
|
20
36
|
def _hmac_sha256(s: str) -> str:
|
|
21
|
-
|
|
37
|
+
secret = _get_apikey_secret()
|
|
38
|
+
return hmac.new(secret.encode(), s.encode(), hashlib.sha256).hexdigest()
|
|
22
39
|
|
|
23
40
|
|
|
24
41
|
def _now() -> datetime:
|
|
25
|
-
return datetime.now(
|
|
42
|
+
return datetime.now(UTC)
|
|
26
43
|
|
|
27
44
|
|
|
28
45
|
# -------------------- Factory & registry --------------------
|
|
29
46
|
|
|
30
|
-
_ApiKeyModel:
|
|
47
|
+
_ApiKeyModel: type | None = None
|
|
31
48
|
|
|
32
49
|
|
|
33
50
|
def get_apikey_model() -> type:
|
|
@@ -37,19 +54,19 @@ def get_apikey_model() -> type:
|
|
|
37
54
|
return _ApiKeyModel
|
|
38
55
|
|
|
39
56
|
|
|
40
|
-
def bind_apikey_model(user_model:
|
|
57
|
+
def bind_apikey_model(user_model: type[ModelBase], *, table_name: str = "api_keys") -> type:
|
|
41
58
|
"""
|
|
42
59
|
Create and register an ApiKey model bound to the provided user_model and table name.
|
|
43
60
|
Call this once during app boot (e.g., inside add_auth_users when enable_api_keys=True).
|
|
44
61
|
"""
|
|
45
62
|
|
|
46
|
-
class ApiKey(ModelBase):
|
|
63
|
+
class ApiKey(ModelBase):
|
|
47
64
|
__tablename__ = table_name
|
|
48
65
|
|
|
49
66
|
id: Mapped[uuid.UUID] = mapped_column(GUID(), primary_key=True, default=uuid.uuid4)
|
|
50
67
|
|
|
51
68
|
@declared_attr
|
|
52
|
-
def user_id(cls) -> Mapped[uuid.UUID | None]:
|
|
69
|
+
def user_id(cls) -> Mapped[uuid.UUID | None]:
|
|
53
70
|
return mapped_column(
|
|
54
71
|
GUID(),
|
|
55
72
|
ForeignKey(f"{user_model.__tablename__}.id", ondelete="SET NULL"),
|
|
@@ -58,7 +75,7 @@ def bind_apikey_model(user_model: Type, *, table_name: str = "api_keys") -> type
|
|
|
58
75
|
)
|
|
59
76
|
|
|
60
77
|
@declared_attr
|
|
61
|
-
def user(cls):
|
|
78
|
+
def user(cls):
|
|
62
79
|
return relationship(user_model.__name__, lazy="selectin")
|
|
63
80
|
|
|
64
81
|
name: Mapped[str] = mapped_column(String(128), nullable=False)
|
|
@@ -74,7 +91,9 @@ def bind_apikey_model(user_model: Type, *, table_name: str = "api_keys") -> type
|
|
|
74
91
|
meta: Mapped[dict] = mapped_column(MutableDict.as_mutable(JSON), default=dict)
|
|
75
92
|
|
|
76
93
|
created_at = mapped_column(
|
|
77
|
-
DateTime(timezone=True),
|
|
94
|
+
DateTime(timezone=True),
|
|
95
|
+
server_default=text("CURRENT_TIMESTAMP"),
|
|
96
|
+
nullable=False,
|
|
78
97
|
)
|
|
79
98
|
updated_at = mapped_column(
|
|
80
99
|
DateTime(timezone=True),
|
|
@@ -115,7 +134,7 @@ def bind_apikey_model(user_model: Type, *, table_name: str = "api_keys") -> type
|
|
|
115
134
|
return ApiKey
|
|
116
135
|
|
|
117
136
|
|
|
118
|
-
def try_autobind_apikey_model(*, require_env: bool = False) ->
|
|
137
|
+
def try_autobind_apikey_model(*, require_env: bool = False) -> type | None:
|
|
119
138
|
"""
|
|
120
139
|
If API keys aren’t bound yet, try to discover the User model and bind.
|
|
121
140
|
- If require_env=True, only bind when AUTH_ENABLE_API_KEYS is truthy.
|
|
@@ -133,7 +152,7 @@ def try_autobind_apikey_model(*, require_env: bool = False) -> Optional[type]:
|
|
|
133
152
|
from svc_infra.db.sql.base import ModelBase
|
|
134
153
|
|
|
135
154
|
# SQLAlchemy 2.x: iterate registry mappers to get mapped classes
|
|
136
|
-
for mapper in list(
|
|
155
|
+
for mapper in list(ModelBase.registry.mappers):
|
|
137
156
|
cls = mapper.class_
|
|
138
157
|
if getattr(cls, "__svc_infra_auth_user__", False):
|
|
139
158
|
return bind_apikey_model(cls) # binds and returns ApiKey
|
svc_infra/db/sql/authref.py
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import Optional, Tuple
|
|
4
|
-
|
|
5
3
|
from sqlalchemy import ForeignKeyConstraint
|
|
6
4
|
from sqlalchemy.sql.type_api import TypeEngine
|
|
7
5
|
|
|
@@ -9,7 +7,7 @@ from svc_infra.db.sql.base import ModelBase
|
|
|
9
7
|
from svc_infra.db.sql.types import GUID
|
|
10
8
|
|
|
11
9
|
|
|
12
|
-
def _find_auth_mapper() ->
|
|
10
|
+
def _find_auth_mapper() -> tuple[str, TypeEngine, str] | None:
|
|
13
11
|
"""
|
|
14
12
|
Returns (table_name, pk_sqlatype, pk_name) for the auth user model.
|
|
15
13
|
Looks for any mapped class with __svc_infra_auth_user__ = True that
|
|
@@ -22,17 +20,21 @@ def _find_auth_mapper() -> Optional[Tuple[str, TypeEngine, str]]:
|
|
|
22
20
|
table = mapper.local_table or getattr(cls, "__table__", None)
|
|
23
21
|
if table is None:
|
|
24
22
|
continue
|
|
25
|
-
|
|
23
|
+
table_name = getattr(table, "name", None)
|
|
24
|
+
if not isinstance(table_name, str) or not table_name:
|
|
25
|
+
continue
|
|
26
|
+
# SQLAlchemy's primary_key is iterable; don't rely on .columns typing.
|
|
27
|
+
pk_cols = list(table.primary_key)
|
|
26
28
|
if len(pk_cols) != 1:
|
|
27
29
|
continue # require single-column PK
|
|
28
30
|
pk_col = pk_cols[0]
|
|
29
|
-
return (
|
|
31
|
+
return (table_name, pk_col.type, pk_col.name)
|
|
30
32
|
except Exception:
|
|
31
33
|
pass
|
|
32
34
|
return None
|
|
33
35
|
|
|
34
36
|
|
|
35
|
-
def resolve_auth_table_pk() ->
|
|
37
|
+
def resolve_auth_table_pk() -> tuple[str, TypeEngine, str]:
|
|
36
38
|
"""
|
|
37
39
|
Single source of truth for the auth table and PK.
|
|
38
40
|
Falls back to ('users', GUID(), 'id') if nothing is marked.
|
svc_infra/db/sql/constants.py
CHANGED
|
@@ -1,12 +1,16 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import re
|
|
4
|
-
from
|
|
4
|
+
from collections.abc import Sequence
|
|
5
5
|
|
|
6
6
|
# Environment variable names to look up for DB URL
|
|
7
|
+
# Order matters: svc-infra canonical names first, then common PaaS names
|
|
7
8
|
DEFAULT_DB_ENV_VARS: Sequence[str] = (
|
|
8
9
|
"SQL_URL",
|
|
9
10
|
"DB_URL",
|
|
11
|
+
"DATABASE_URL", # Heroku, Railway (public)
|
|
12
|
+
"DATABASE_URL_PRIVATE", # Railway (private networking)
|
|
13
|
+
"PRIVATE_SQL_URL", # Legacy svc-infra naming
|
|
10
14
|
)
|
|
11
15
|
|
|
12
16
|
# Regex used to detect async drivers from URL drivername
|