mcp-hangar 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_hangar/__init__.py +139 -0
- mcp_hangar/application/__init__.py +1 -0
- mcp_hangar/application/commands/__init__.py +67 -0
- mcp_hangar/application/commands/auth_commands.py +118 -0
- mcp_hangar/application/commands/auth_handlers.py +296 -0
- mcp_hangar/application/commands/commands.py +59 -0
- mcp_hangar/application/commands/handlers.py +189 -0
- mcp_hangar/application/discovery/__init__.py +21 -0
- mcp_hangar/application/discovery/discovery_metrics.py +283 -0
- mcp_hangar/application/discovery/discovery_orchestrator.py +497 -0
- mcp_hangar/application/discovery/lifecycle_manager.py +315 -0
- mcp_hangar/application/discovery/security_validator.py +414 -0
- mcp_hangar/application/event_handlers/__init__.py +50 -0
- mcp_hangar/application/event_handlers/alert_handler.py +191 -0
- mcp_hangar/application/event_handlers/audit_handler.py +203 -0
- mcp_hangar/application/event_handlers/knowledge_base_handler.py +120 -0
- mcp_hangar/application/event_handlers/logging_handler.py +69 -0
- mcp_hangar/application/event_handlers/metrics_handler.py +152 -0
- mcp_hangar/application/event_handlers/persistent_audit_store.py +217 -0
- mcp_hangar/application/event_handlers/security_handler.py +604 -0
- mcp_hangar/application/mcp/tooling.py +158 -0
- mcp_hangar/application/ports/__init__.py +9 -0
- mcp_hangar/application/ports/observability.py +237 -0
- mcp_hangar/application/queries/__init__.py +52 -0
- mcp_hangar/application/queries/auth_handlers.py +237 -0
- mcp_hangar/application/queries/auth_queries.py +118 -0
- mcp_hangar/application/queries/handlers.py +227 -0
- mcp_hangar/application/read_models/__init__.py +11 -0
- mcp_hangar/application/read_models/provider_views.py +139 -0
- mcp_hangar/application/sagas/__init__.py +11 -0
- mcp_hangar/application/sagas/group_rebalance_saga.py +137 -0
- mcp_hangar/application/sagas/provider_failover_saga.py +266 -0
- mcp_hangar/application/sagas/provider_recovery_saga.py +172 -0
- mcp_hangar/application/services/__init__.py +9 -0
- mcp_hangar/application/services/provider_service.py +208 -0
- mcp_hangar/application/services/traced_provider_service.py +211 -0
- mcp_hangar/bootstrap/runtime.py +328 -0
- mcp_hangar/context.py +178 -0
- mcp_hangar/domain/__init__.py +117 -0
- mcp_hangar/domain/contracts/__init__.py +57 -0
- mcp_hangar/domain/contracts/authentication.py +225 -0
- mcp_hangar/domain/contracts/authorization.py +229 -0
- mcp_hangar/domain/contracts/event_store.py +178 -0
- mcp_hangar/domain/contracts/metrics_publisher.py +59 -0
- mcp_hangar/domain/contracts/persistence.py +383 -0
- mcp_hangar/domain/contracts/provider_runtime.py +146 -0
- mcp_hangar/domain/discovery/__init__.py +20 -0
- mcp_hangar/domain/discovery/conflict_resolver.py +267 -0
- mcp_hangar/domain/discovery/discovered_provider.py +185 -0
- mcp_hangar/domain/discovery/discovery_service.py +412 -0
- mcp_hangar/domain/discovery/discovery_source.py +192 -0
- mcp_hangar/domain/events.py +433 -0
- mcp_hangar/domain/exceptions.py +525 -0
- mcp_hangar/domain/model/__init__.py +70 -0
- mcp_hangar/domain/model/aggregate.py +58 -0
- mcp_hangar/domain/model/circuit_breaker.py +152 -0
- mcp_hangar/domain/model/event_sourced_api_key.py +413 -0
- mcp_hangar/domain/model/event_sourced_provider.py +423 -0
- mcp_hangar/domain/model/event_sourced_role_assignment.py +268 -0
- mcp_hangar/domain/model/health_tracker.py +183 -0
- mcp_hangar/domain/model/load_balancer.py +185 -0
- mcp_hangar/domain/model/provider.py +810 -0
- mcp_hangar/domain/model/provider_group.py +656 -0
- mcp_hangar/domain/model/tool_catalog.py +105 -0
- mcp_hangar/domain/policies/__init__.py +19 -0
- mcp_hangar/domain/policies/provider_health.py +187 -0
- mcp_hangar/domain/repository.py +249 -0
- mcp_hangar/domain/security/__init__.py +85 -0
- mcp_hangar/domain/security/input_validator.py +710 -0
- mcp_hangar/domain/security/rate_limiter.py +387 -0
- mcp_hangar/domain/security/roles.py +237 -0
- mcp_hangar/domain/security/sanitizer.py +387 -0
- mcp_hangar/domain/security/secrets.py +501 -0
- mcp_hangar/domain/services/__init__.py +20 -0
- mcp_hangar/domain/services/audit_service.py +376 -0
- mcp_hangar/domain/services/image_builder.py +328 -0
- mcp_hangar/domain/services/provider_launcher.py +1046 -0
- mcp_hangar/domain/value_objects.py +1138 -0
- mcp_hangar/errors.py +818 -0
- mcp_hangar/fastmcp_server.py +1105 -0
- mcp_hangar/gc.py +134 -0
- mcp_hangar/infrastructure/__init__.py +79 -0
- mcp_hangar/infrastructure/async_executor.py +133 -0
- mcp_hangar/infrastructure/auth/__init__.py +37 -0
- mcp_hangar/infrastructure/auth/api_key_authenticator.py +388 -0
- mcp_hangar/infrastructure/auth/event_sourced_store.py +567 -0
- mcp_hangar/infrastructure/auth/jwt_authenticator.py +360 -0
- mcp_hangar/infrastructure/auth/middleware.py +340 -0
- mcp_hangar/infrastructure/auth/opa_authorizer.py +243 -0
- mcp_hangar/infrastructure/auth/postgres_store.py +659 -0
- mcp_hangar/infrastructure/auth/projections.py +366 -0
- mcp_hangar/infrastructure/auth/rate_limiter.py +311 -0
- mcp_hangar/infrastructure/auth/rbac_authorizer.py +323 -0
- mcp_hangar/infrastructure/auth/sqlite_store.py +624 -0
- mcp_hangar/infrastructure/command_bus.py +112 -0
- mcp_hangar/infrastructure/discovery/__init__.py +110 -0
- mcp_hangar/infrastructure/discovery/docker_source.py +289 -0
- mcp_hangar/infrastructure/discovery/entrypoint_source.py +249 -0
- mcp_hangar/infrastructure/discovery/filesystem_source.py +383 -0
- mcp_hangar/infrastructure/discovery/kubernetes_source.py +247 -0
- mcp_hangar/infrastructure/event_bus.py +260 -0
- mcp_hangar/infrastructure/event_sourced_repository.py +443 -0
- mcp_hangar/infrastructure/event_store.py +396 -0
- mcp_hangar/infrastructure/knowledge_base/__init__.py +259 -0
- mcp_hangar/infrastructure/knowledge_base/contracts.py +202 -0
- mcp_hangar/infrastructure/knowledge_base/memory.py +177 -0
- mcp_hangar/infrastructure/knowledge_base/postgres.py +545 -0
- mcp_hangar/infrastructure/knowledge_base/sqlite.py +513 -0
- mcp_hangar/infrastructure/metrics_publisher.py +36 -0
- mcp_hangar/infrastructure/observability/__init__.py +10 -0
- mcp_hangar/infrastructure/observability/langfuse_adapter.py +534 -0
- mcp_hangar/infrastructure/persistence/__init__.py +33 -0
- mcp_hangar/infrastructure/persistence/audit_repository.py +371 -0
- mcp_hangar/infrastructure/persistence/config_repository.py +398 -0
- mcp_hangar/infrastructure/persistence/database.py +333 -0
- mcp_hangar/infrastructure/persistence/database_common.py +330 -0
- mcp_hangar/infrastructure/persistence/event_serializer.py +280 -0
- mcp_hangar/infrastructure/persistence/event_upcaster.py +166 -0
- mcp_hangar/infrastructure/persistence/in_memory_event_store.py +150 -0
- mcp_hangar/infrastructure/persistence/recovery_service.py +312 -0
- mcp_hangar/infrastructure/persistence/sqlite_event_store.py +386 -0
- mcp_hangar/infrastructure/persistence/unit_of_work.py +409 -0
- mcp_hangar/infrastructure/persistence/upcasters/README.md +13 -0
- mcp_hangar/infrastructure/persistence/upcasters/__init__.py +7 -0
- mcp_hangar/infrastructure/query_bus.py +153 -0
- mcp_hangar/infrastructure/saga_manager.py +401 -0
- mcp_hangar/logging_config.py +209 -0
- mcp_hangar/metrics.py +1007 -0
- mcp_hangar/models.py +31 -0
- mcp_hangar/observability/__init__.py +54 -0
- mcp_hangar/observability/health.py +487 -0
- mcp_hangar/observability/metrics.py +319 -0
- mcp_hangar/observability/tracing.py +433 -0
- mcp_hangar/progress.py +542 -0
- mcp_hangar/retry.py +613 -0
- mcp_hangar/server/__init__.py +120 -0
- mcp_hangar/server/__main__.py +6 -0
- mcp_hangar/server/auth_bootstrap.py +340 -0
- mcp_hangar/server/auth_cli.py +335 -0
- mcp_hangar/server/auth_config.py +305 -0
- mcp_hangar/server/bootstrap.py +735 -0
- mcp_hangar/server/cli.py +161 -0
- mcp_hangar/server/config.py +224 -0
- mcp_hangar/server/context.py +215 -0
- mcp_hangar/server/http_auth_middleware.py +165 -0
- mcp_hangar/server/lifecycle.py +467 -0
- mcp_hangar/server/state.py +117 -0
- mcp_hangar/server/tools/__init__.py +16 -0
- mcp_hangar/server/tools/discovery.py +186 -0
- mcp_hangar/server/tools/groups.py +75 -0
- mcp_hangar/server/tools/health.py +301 -0
- mcp_hangar/server/tools/provider.py +939 -0
- mcp_hangar/server/tools/registry.py +320 -0
- mcp_hangar/server/validation.py +113 -0
- mcp_hangar/stdio_client.py +229 -0
- mcp_hangar-0.2.0.dist-info/METADATA +347 -0
- mcp_hangar-0.2.0.dist-info/RECORD +160 -0
- mcp_hangar-0.2.0.dist-info/WHEEL +4 -0
- mcp_hangar-0.2.0.dist-info/entry_points.txt +2 -0
- mcp_hangar-0.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,624 @@
|
|
|
1
|
+
"""SQLite-based persistent storage for API keys and roles.
|
|
2
|
+
|
|
3
|
+
Provides lightweight persistent storage for single-instance deployments.
|
|
4
|
+
Uses WAL mode for better concurrent read performance.
|
|
5
|
+
|
|
6
|
+
For multi-instance deployments, use PostgresApiKeyStore instead.
|
|
7
|
+
|
|
8
|
+
Note: This store emits domain events for all write operations via
|
|
9
|
+
an optional event_publisher callback. For full CQRS integration,
|
|
10
|
+
inject the EventBus.publish method as the event_publisher.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from datetime import datetime, timezone
|
|
14
|
+
import json
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
import secrets
|
|
17
|
+
import sqlite3
|
|
18
|
+
import threading
|
|
19
|
+
from typing import Callable
|
|
20
|
+
|
|
21
|
+
import structlog
|
|
22
|
+
|
|
23
|
+
from ...domain.contracts.authentication import ApiKeyMetadata, IApiKeyStore
|
|
24
|
+
from ...domain.contracts.authorization import IRoleStore
|
|
25
|
+
from ...domain.events import ApiKeyCreated, ApiKeyRevoked, RoleAssigned, RoleRevoked
|
|
26
|
+
from ...domain.exceptions import ExpiredCredentialsError, RevokedCredentialsError
|
|
27
|
+
from ...domain.security.roles import BUILTIN_ROLES
|
|
28
|
+
from ...domain.value_objects import Permission, Principal, PrincipalId, PrincipalType, Role
|
|
29
|
+
|
|
30
|
+
logger = structlog.get_logger(__name__)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
# SQLite Schema
|
|
34
|
+
SQLITE_SCHEMA = """
|
|
35
|
+
-- API Keys table
|
|
36
|
+
CREATE TABLE IF NOT EXISTS api_keys (
|
|
37
|
+
key_hash TEXT PRIMARY KEY,
|
|
38
|
+
key_id TEXT NOT NULL UNIQUE,
|
|
39
|
+
principal_id TEXT NOT NULL,
|
|
40
|
+
name TEXT NOT NULL,
|
|
41
|
+
tenant_id TEXT,
|
|
42
|
+
groups TEXT DEFAULT '[]',
|
|
43
|
+
created_at TEXT NOT NULL,
|
|
44
|
+
expires_at TEXT,
|
|
45
|
+
last_used_at TEXT,
|
|
46
|
+
revoked INTEGER NOT NULL DEFAULT 0,
|
|
47
|
+
revoked_at TEXT,
|
|
48
|
+
metadata TEXT DEFAULT '{}'
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
CREATE INDEX IF NOT EXISTS idx_api_keys_principal_id ON api_keys(principal_id);
|
|
52
|
+
CREATE INDEX IF NOT EXISTS idx_api_keys_key_id ON api_keys(key_id);
|
|
53
|
+
|
|
54
|
+
-- Roles table
|
|
55
|
+
CREATE TABLE IF NOT EXISTS roles (
|
|
56
|
+
name TEXT PRIMARY KEY,
|
|
57
|
+
description TEXT,
|
|
58
|
+
permissions TEXT NOT NULL DEFAULT '[]',
|
|
59
|
+
is_builtin INTEGER NOT NULL DEFAULT 0,
|
|
60
|
+
created_at TEXT NOT NULL,
|
|
61
|
+
updated_at TEXT NOT NULL
|
|
62
|
+
);
|
|
63
|
+
|
|
64
|
+
-- Role assignments table
|
|
65
|
+
CREATE TABLE IF NOT EXISTS role_assignments (
|
|
66
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
67
|
+
principal_id TEXT NOT NULL,
|
|
68
|
+
role_name TEXT NOT NULL REFERENCES roles(name) ON DELETE CASCADE,
|
|
69
|
+
scope TEXT NOT NULL DEFAULT 'global',
|
|
70
|
+
assigned_at TEXT NOT NULL,
|
|
71
|
+
assigned_by TEXT,
|
|
72
|
+
UNIQUE(principal_id, role_name, scope)
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
CREATE INDEX IF NOT EXISTS idx_role_assignments_principal_scope
|
|
76
|
+
ON role_assignments(principal_id, scope);
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class SQLiteApiKeyStore(IApiKeyStore):
|
|
81
|
+
"""SQLite-based API key store.
|
|
82
|
+
|
|
83
|
+
Suitable for single-instance deployments or development.
|
|
84
|
+
Uses WAL mode for better concurrent read performance.
|
|
85
|
+
|
|
86
|
+
WARNING: For multi-instance deployments, use PostgresApiKeyStore
|
|
87
|
+
which provides proper distributed locking.
|
|
88
|
+
|
|
89
|
+
Events emitted:
|
|
90
|
+
- ApiKeyCreated: When a new key is created
|
|
91
|
+
- ApiKeyRevoked: When a key is revoked
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
MAX_KEYS_PER_PRINCIPAL = 100
|
|
95
|
+
|
|
96
|
+
def __init__(
|
|
97
|
+
self,
|
|
98
|
+
db_path: str | Path,
|
|
99
|
+
event_publisher: Callable | None = None,
|
|
100
|
+
):
|
|
101
|
+
"""Initialize the SQLite store.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
db_path: Path to SQLite database file.
|
|
105
|
+
event_publisher: Optional callback for publishing domain events.
|
|
106
|
+
For CQRS integration, pass EventBus.publish.
|
|
107
|
+
"""
|
|
108
|
+
self._db_path = str(db_path)
|
|
109
|
+
self._local = threading.local()
|
|
110
|
+
self._initialized = False
|
|
111
|
+
self._event_publisher = event_publisher
|
|
112
|
+
|
|
113
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
114
|
+
"""Get thread-local connection."""
|
|
115
|
+
if not hasattr(self._local, "connection") or self._local.connection is None:
|
|
116
|
+
conn = sqlite3.connect(self._db_path, check_same_thread=False)
|
|
117
|
+
conn.row_factory = sqlite3.Row
|
|
118
|
+
# Enable WAL mode for better concurrent reads
|
|
119
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
120
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
121
|
+
self._local.connection = conn
|
|
122
|
+
return self._local.connection
|
|
123
|
+
|
|
124
|
+
def initialize(self) -> None:
|
|
125
|
+
"""Create tables if they don't exist."""
|
|
126
|
+
if self._initialized:
|
|
127
|
+
return
|
|
128
|
+
|
|
129
|
+
conn = self._get_connection()
|
|
130
|
+
conn.executescript(SQLITE_SCHEMA)
|
|
131
|
+
conn.commit()
|
|
132
|
+
self._initialized = True
|
|
133
|
+
logger.info("sqlite_api_key_store_initialized", db_path=self._db_path)
|
|
134
|
+
|
|
135
|
+
def get_principal_for_key(self, key_hash: str) -> Principal | None:
|
|
136
|
+
"""Look up principal for an API key hash."""
|
|
137
|
+
conn = self._get_connection()
|
|
138
|
+
cursor = conn.execute(
|
|
139
|
+
"""
|
|
140
|
+
SELECT principal_id, tenant_id, groups, name, key_id,
|
|
141
|
+
expires_at, revoked, metadata
|
|
142
|
+
FROM api_keys
|
|
143
|
+
WHERE key_hash = ?
|
|
144
|
+
""",
|
|
145
|
+
(key_hash,),
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
row = cursor.fetchone()
|
|
149
|
+
if row is None:
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
# Check revocation
|
|
153
|
+
if row["revoked"]:
|
|
154
|
+
raise RevokedCredentialsError(
|
|
155
|
+
message="API key has been revoked",
|
|
156
|
+
auth_method="api_key",
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
# Check expiration
|
|
160
|
+
if row["expires_at"]:
|
|
161
|
+
expires_at = datetime.fromisoformat(row["expires_at"])
|
|
162
|
+
if expires_at < datetime.now(timezone.utc):
|
|
163
|
+
raise ExpiredCredentialsError(
|
|
164
|
+
message="API key has expired",
|
|
165
|
+
auth_method="api_key",
|
|
166
|
+
expired_at=expires_at.timestamp(),
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
# Update last_used_at
|
|
170
|
+
try:
|
|
171
|
+
conn.execute(
|
|
172
|
+
"""
|
|
173
|
+
UPDATE api_keys
|
|
174
|
+
SET last_used_at = ?
|
|
175
|
+
WHERE key_hash = ?
|
|
176
|
+
""",
|
|
177
|
+
(datetime.now(timezone.utc).isoformat(), key_hash),
|
|
178
|
+
)
|
|
179
|
+
conn.commit()
|
|
180
|
+
except Exception as e:
|
|
181
|
+
logger.warning("failed_to_update_last_used", error=str(e))
|
|
182
|
+
|
|
183
|
+
# Parse groups from JSON
|
|
184
|
+
groups = json.loads(row["groups"]) if row["groups"] else []
|
|
185
|
+
metadata = json.loads(row["metadata"]) if row["metadata"] else {}
|
|
186
|
+
|
|
187
|
+
return Principal(
|
|
188
|
+
id=PrincipalId(row["principal_id"]),
|
|
189
|
+
type=PrincipalType.SERVICE_ACCOUNT,
|
|
190
|
+
tenant_id=row["tenant_id"],
|
|
191
|
+
groups=frozenset(groups),
|
|
192
|
+
metadata={"key_id": row["key_id"], "key_name": row["name"], **metadata},
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
def create_key(
|
|
196
|
+
self,
|
|
197
|
+
principal_id: str,
|
|
198
|
+
name: str,
|
|
199
|
+
expires_at: datetime | None = None,
|
|
200
|
+
groups: frozenset[str] | None = None,
|
|
201
|
+
tenant_id: str | None = None,
|
|
202
|
+
created_by: str = "system",
|
|
203
|
+
) -> str:
|
|
204
|
+
"""Create a new API key.
|
|
205
|
+
|
|
206
|
+
Emits: ApiKeyCreated event
|
|
207
|
+
"""
|
|
208
|
+
from .api_key_authenticator import ApiKeyAuthenticator
|
|
209
|
+
|
|
210
|
+
conn = self._get_connection()
|
|
211
|
+
|
|
212
|
+
# Check key count for principal
|
|
213
|
+
cursor = conn.execute(
|
|
214
|
+
"""
|
|
215
|
+
SELECT COUNT(*) as count FROM api_keys
|
|
216
|
+
WHERE principal_id = ? AND revoked = 0
|
|
217
|
+
""",
|
|
218
|
+
(principal_id,),
|
|
219
|
+
)
|
|
220
|
+
count = cursor.fetchone()["count"]
|
|
221
|
+
|
|
222
|
+
if count >= self.MAX_KEYS_PER_PRINCIPAL:
|
|
223
|
+
raise ValueError(f"Principal {principal_id} has reached maximum API keys ({self.MAX_KEYS_PER_PRINCIPAL})")
|
|
224
|
+
|
|
225
|
+
# Generate key
|
|
226
|
+
raw_key = ApiKeyAuthenticator.generate_key()
|
|
227
|
+
key_hash = ApiKeyAuthenticator._hash_key(raw_key)
|
|
228
|
+
key_id = secrets.token_urlsafe(8)
|
|
229
|
+
|
|
230
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
231
|
+
|
|
232
|
+
# Insert
|
|
233
|
+
conn.execute(
|
|
234
|
+
"""
|
|
235
|
+
INSERT INTO api_keys
|
|
236
|
+
(key_hash, key_id, principal_id, name, tenant_id, groups, created_at, expires_at)
|
|
237
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
238
|
+
""",
|
|
239
|
+
(
|
|
240
|
+
key_hash,
|
|
241
|
+
key_id,
|
|
242
|
+
principal_id,
|
|
243
|
+
name,
|
|
244
|
+
tenant_id,
|
|
245
|
+
json.dumps(list(groups or [])),
|
|
246
|
+
now,
|
|
247
|
+
expires_at.isoformat() if expires_at else None,
|
|
248
|
+
),
|
|
249
|
+
)
|
|
250
|
+
conn.commit()
|
|
251
|
+
|
|
252
|
+
logger.info(
|
|
253
|
+
"api_key_created",
|
|
254
|
+
key_id=key_id,
|
|
255
|
+
principal_id=principal_id,
|
|
256
|
+
name=name,
|
|
257
|
+
expires_at=expires_at.isoformat() if expires_at else None,
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
# Emit domain event
|
|
261
|
+
if self._event_publisher:
|
|
262
|
+
self._event_publisher(
|
|
263
|
+
ApiKeyCreated(
|
|
264
|
+
key_id=key_id,
|
|
265
|
+
principal_id=principal_id,
|
|
266
|
+
key_name=name,
|
|
267
|
+
expires_at=expires_at.timestamp() if expires_at else None,
|
|
268
|
+
created_by=created_by,
|
|
269
|
+
)
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
return raw_key
|
|
273
|
+
|
|
274
|
+
def revoke_key(self, key_id: str, revoked_by: str = "system", reason: str = "") -> bool:
|
|
275
|
+
"""Revoke an API key.
|
|
276
|
+
|
|
277
|
+
Emits: ApiKeyRevoked event
|
|
278
|
+
"""
|
|
279
|
+
conn = self._get_connection()
|
|
280
|
+
|
|
281
|
+
# Get principal_id before revoking (for event)
|
|
282
|
+
cursor = conn.execute(
|
|
283
|
+
"""
|
|
284
|
+
SELECT principal_id FROM api_keys WHERE key_id = ? AND revoked = 0
|
|
285
|
+
""",
|
|
286
|
+
(key_id,),
|
|
287
|
+
)
|
|
288
|
+
row = cursor.fetchone()
|
|
289
|
+
principal_id = row["principal_id"] if row else None
|
|
290
|
+
|
|
291
|
+
cursor = conn.execute(
|
|
292
|
+
"""
|
|
293
|
+
UPDATE api_keys
|
|
294
|
+
SET revoked = 1, revoked_at = ?
|
|
295
|
+
WHERE key_id = ? AND revoked = 0
|
|
296
|
+
""",
|
|
297
|
+
(datetime.now(timezone.utc).isoformat(), key_id),
|
|
298
|
+
)
|
|
299
|
+
conn.commit()
|
|
300
|
+
|
|
301
|
+
if cursor.rowcount > 0:
|
|
302
|
+
logger.info("api_key_revoked", key_id=key_id)
|
|
303
|
+
|
|
304
|
+
# Emit domain event
|
|
305
|
+
if self._event_publisher and principal_id:
|
|
306
|
+
self._event_publisher(
|
|
307
|
+
ApiKeyRevoked(
|
|
308
|
+
key_id=key_id,
|
|
309
|
+
principal_id=principal_id,
|
|
310
|
+
revoked_by=revoked_by,
|
|
311
|
+
reason=reason,
|
|
312
|
+
)
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
return True
|
|
316
|
+
return False
|
|
317
|
+
|
|
318
|
+
def list_keys(self, principal_id: str) -> list[ApiKeyMetadata]:
|
|
319
|
+
"""List API keys for a principal."""
|
|
320
|
+
conn = self._get_connection()
|
|
321
|
+
|
|
322
|
+
cursor = conn.execute(
|
|
323
|
+
"""
|
|
324
|
+
SELECT key_id, name, principal_id, created_at,
|
|
325
|
+
expires_at, last_used_at, revoked
|
|
326
|
+
FROM api_keys
|
|
327
|
+
WHERE principal_id = ?
|
|
328
|
+
ORDER BY created_at DESC
|
|
329
|
+
""",
|
|
330
|
+
(principal_id,),
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
return [
|
|
334
|
+
ApiKeyMetadata(
|
|
335
|
+
key_id=row["key_id"],
|
|
336
|
+
name=row["name"],
|
|
337
|
+
principal_id=row["principal_id"],
|
|
338
|
+
created_at=datetime.fromisoformat(row["created_at"]) if row["created_at"] else None,
|
|
339
|
+
expires_at=datetime.fromisoformat(row["expires_at"]) if row["expires_at"] else None,
|
|
340
|
+
last_used_at=datetime.fromisoformat(row["last_used_at"]) if row["last_used_at"] else None,
|
|
341
|
+
revoked=bool(row["revoked"]),
|
|
342
|
+
)
|
|
343
|
+
for row in cursor.fetchall()
|
|
344
|
+
]
|
|
345
|
+
|
|
346
|
+
def count_keys(self, principal_id: str) -> int:
|
|
347
|
+
"""Count active keys for a principal."""
|
|
348
|
+
conn = self._get_connection()
|
|
349
|
+
|
|
350
|
+
cursor = conn.execute(
|
|
351
|
+
"""
|
|
352
|
+
SELECT COUNT(*) as count FROM api_keys
|
|
353
|
+
WHERE principal_id = ? AND revoked = 0
|
|
354
|
+
""",
|
|
355
|
+
(principal_id,),
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
return cursor.fetchone()["count"]
|
|
359
|
+
|
|
360
|
+
def close(self) -> None:
|
|
361
|
+
"""Close the database connection."""
|
|
362
|
+
if hasattr(self._local, "connection") and self._local.connection:
|
|
363
|
+
try:
|
|
364
|
+
self._local.connection.commit() # Ensure any pending changes
|
|
365
|
+
except Exception:
|
|
366
|
+
pass
|
|
367
|
+
self._local.connection.close()
|
|
368
|
+
self._local.connection = None
|
|
369
|
+
self._initialized = False
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
class SQLiteRoleStore(IRoleStore):
|
|
373
|
+
"""SQLite-based role store.
|
|
374
|
+
|
|
375
|
+
Suitable for single-instance deployments or development.
|
|
376
|
+
|
|
377
|
+
Events emitted:
|
|
378
|
+
- RoleAssigned: When a role is assigned to a principal
|
|
379
|
+
- RoleRevoked: When a role is revoked from a principal
|
|
380
|
+
"""
|
|
381
|
+
|
|
382
|
+
def __init__(
|
|
383
|
+
self,
|
|
384
|
+
db_path: str | Path,
|
|
385
|
+
event_publisher: Callable | None = None,
|
|
386
|
+
):
|
|
387
|
+
"""Initialize the SQLite store.
|
|
388
|
+
|
|
389
|
+
Args:
|
|
390
|
+
db_path: Path to SQLite database file.
|
|
391
|
+
event_publisher: Optional callback for publishing domain events.
|
|
392
|
+
"""
|
|
393
|
+
self._db_path = str(db_path)
|
|
394
|
+
self._local = threading.local()
|
|
395
|
+
self._initialized = False
|
|
396
|
+
self._event_publisher = event_publisher
|
|
397
|
+
|
|
398
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
399
|
+
"""Get thread-local connection."""
|
|
400
|
+
if not hasattr(self._local, "connection") or self._local.connection is None:
|
|
401
|
+
conn = sqlite3.connect(self._db_path, check_same_thread=False)
|
|
402
|
+
conn.row_factory = sqlite3.Row
|
|
403
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
404
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
405
|
+
self._local.connection = conn
|
|
406
|
+
return self._local.connection
|
|
407
|
+
|
|
408
|
+
def initialize(self) -> None:
|
|
409
|
+
"""Create tables and seed built-in roles."""
|
|
410
|
+
if self._initialized:
|
|
411
|
+
return
|
|
412
|
+
|
|
413
|
+
conn = self._get_connection()
|
|
414
|
+
conn.executescript(SQLITE_SCHEMA)
|
|
415
|
+
|
|
416
|
+
# Seed built-in roles
|
|
417
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
418
|
+
for role_name, role in BUILTIN_ROLES.items():
|
|
419
|
+
permissions_json = json.dumps(
|
|
420
|
+
[
|
|
421
|
+
{"resource_type": p.resource_type, "action": p.action, "resource_id": p.resource_id}
|
|
422
|
+
for p in role.permissions
|
|
423
|
+
]
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
conn.execute(
|
|
427
|
+
"""
|
|
428
|
+
INSERT OR REPLACE INTO roles (name, description, permissions, is_builtin, created_at, updated_at)
|
|
429
|
+
VALUES (?, ?, ?, 1, ?, ?)
|
|
430
|
+
""",
|
|
431
|
+
(role_name, role.description, permissions_json, now, now),
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
conn.commit()
|
|
435
|
+
self._initialized = True
|
|
436
|
+
logger.info("sqlite_role_store_initialized", db_path=self._db_path)
|
|
437
|
+
|
|
438
|
+
def get_role(self, role_name: str) -> Role | None:
|
|
439
|
+
"""Get role by name."""
|
|
440
|
+
conn = self._get_connection()
|
|
441
|
+
|
|
442
|
+
cursor = conn.execute(
|
|
443
|
+
"""
|
|
444
|
+
SELECT name, description, permissions
|
|
445
|
+
FROM roles
|
|
446
|
+
WHERE name = ?
|
|
447
|
+
""",
|
|
448
|
+
(role_name,),
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
row = cursor.fetchone()
|
|
452
|
+
if row is None:
|
|
453
|
+
return None
|
|
454
|
+
|
|
455
|
+
permissions_list = json.loads(row["permissions"]) if row["permissions"] else []
|
|
456
|
+
permissions = frozenset(
|
|
457
|
+
Permission(
|
|
458
|
+
resource_type=p["resource_type"],
|
|
459
|
+
action=p["action"],
|
|
460
|
+
resource_id=p.get("resource_id", "*"),
|
|
461
|
+
)
|
|
462
|
+
for p in permissions_list
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
return Role(name=row["name"], description=row["description"] or "", permissions=permissions)
|
|
466
|
+
|
|
467
|
+
def add_role(self, role: Role) -> None:
|
|
468
|
+
"""Add a custom role."""
|
|
469
|
+
conn = self._get_connection()
|
|
470
|
+
|
|
471
|
+
permissions_json = json.dumps(
|
|
472
|
+
[
|
|
473
|
+
{"resource_type": p.resource_type, "action": p.action, "resource_id": p.resource_id}
|
|
474
|
+
for p in role.permissions
|
|
475
|
+
]
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
479
|
+
conn.execute(
|
|
480
|
+
"""
|
|
481
|
+
INSERT OR REPLACE INTO roles (name, description, permissions, is_builtin, created_at, updated_at)
|
|
482
|
+
VALUES (?, ?, ?, 0, ?, ?)
|
|
483
|
+
""",
|
|
484
|
+
(role.name, role.description, permissions_json, now, now),
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
conn.commit()
|
|
488
|
+
logger.info("role_created", role_name=role.name)
|
|
489
|
+
|
|
490
|
+
def get_roles_for_principal(
|
|
491
|
+
self,
|
|
492
|
+
principal_id: str,
|
|
493
|
+
scope: str = "*",
|
|
494
|
+
) -> list[Role]:
|
|
495
|
+
"""Get all roles assigned to a principal."""
|
|
496
|
+
conn = self._get_connection()
|
|
497
|
+
|
|
498
|
+
if scope == "*":
|
|
499
|
+
cursor = conn.execute(
|
|
500
|
+
"""
|
|
501
|
+
SELECT r.name, r.description, r.permissions
|
|
502
|
+
FROM roles r
|
|
503
|
+
JOIN role_assignments a ON r.name = a.role_name
|
|
504
|
+
WHERE a.principal_id = ?
|
|
505
|
+
""",
|
|
506
|
+
(principal_id,),
|
|
507
|
+
)
|
|
508
|
+
else:
|
|
509
|
+
cursor = conn.execute(
|
|
510
|
+
"""
|
|
511
|
+
SELECT r.name, r.description, r.permissions
|
|
512
|
+
FROM roles r
|
|
513
|
+
JOIN role_assignments a ON r.name = a.role_name
|
|
514
|
+
WHERE a.principal_id = ? AND (a.scope = ? OR a.scope = 'global')
|
|
515
|
+
""",
|
|
516
|
+
(principal_id, scope),
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
roles = []
|
|
520
|
+
for row in cursor.fetchall():
|
|
521
|
+
permissions_list = json.loads(row["permissions"]) if row["permissions"] else []
|
|
522
|
+
permissions = frozenset(
|
|
523
|
+
Permission(
|
|
524
|
+
resource_type=p["resource_type"],
|
|
525
|
+
action=p["action"],
|
|
526
|
+
resource_id=p.get("resource_id", "*"),
|
|
527
|
+
)
|
|
528
|
+
for p in permissions_list
|
|
529
|
+
)
|
|
530
|
+
roles.append(Role(name=row["name"], description=row["description"] or "", permissions=permissions))
|
|
531
|
+
|
|
532
|
+
return roles
|
|
533
|
+
|
|
534
|
+
def assign_role(
|
|
535
|
+
self,
|
|
536
|
+
principal_id: str,
|
|
537
|
+
role_name: str,
|
|
538
|
+
scope: str = "global",
|
|
539
|
+
assigned_by: str = "system",
|
|
540
|
+
) -> None:
|
|
541
|
+
"""Assign a role to a principal.
|
|
542
|
+
|
|
543
|
+
Emits: RoleAssigned event
|
|
544
|
+
"""
|
|
545
|
+
conn = self._get_connection()
|
|
546
|
+
|
|
547
|
+
# Verify role exists
|
|
548
|
+
cursor = conn.execute("SELECT 1 FROM roles WHERE name = ?", (role_name,))
|
|
549
|
+
if cursor.fetchone() is None:
|
|
550
|
+
raise ValueError(f"Unknown role: {role_name}")
|
|
551
|
+
|
|
552
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
553
|
+
cursor = conn.execute(
|
|
554
|
+
"""
|
|
555
|
+
INSERT OR IGNORE INTO role_assignments (principal_id, role_name, scope, assigned_at)
|
|
556
|
+
VALUES (?, ?, ?, ?)
|
|
557
|
+
""",
|
|
558
|
+
(principal_id, role_name, scope, now),
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
conn.commit()
|
|
562
|
+
|
|
563
|
+
# Only emit event if actually inserted (not ignored due to duplicate)
|
|
564
|
+
if cursor.rowcount > 0:
|
|
565
|
+
logger.info("role_assigned", principal_id=principal_id, role_name=role_name, scope=scope)
|
|
566
|
+
|
|
567
|
+
if self._event_publisher:
|
|
568
|
+
self._event_publisher(
|
|
569
|
+
RoleAssigned(
|
|
570
|
+
principal_id=principal_id,
|
|
571
|
+
role_name=role_name,
|
|
572
|
+
scope=scope,
|
|
573
|
+
assigned_by=assigned_by,
|
|
574
|
+
)
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
def revoke_role(
|
|
578
|
+
self,
|
|
579
|
+
principal_id: str,
|
|
580
|
+
role_name: str,
|
|
581
|
+
scope: str = "global",
|
|
582
|
+
revoked_by: str = "system",
|
|
583
|
+
) -> None:
|
|
584
|
+
"""Revoke a role from a principal.
|
|
585
|
+
|
|
586
|
+
Emits: RoleRevoked event
|
|
587
|
+
"""
|
|
588
|
+
conn = self._get_connection()
|
|
589
|
+
|
|
590
|
+
cursor = conn.execute(
|
|
591
|
+
"""
|
|
592
|
+
DELETE FROM role_assignments
|
|
593
|
+
WHERE principal_id = ? AND role_name = ? AND scope = ?
|
|
594
|
+
""",
|
|
595
|
+
(principal_id, role_name, scope),
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
conn.commit()
|
|
599
|
+
|
|
600
|
+
if cursor.rowcount > 0:
|
|
601
|
+
logger.info("role_revoked", principal_id=principal_id, role_name=role_name, scope=scope)
|
|
602
|
+
|
|
603
|
+
if self._event_publisher:
|
|
604
|
+
self._event_publisher(
|
|
605
|
+
RoleRevoked(
|
|
606
|
+
principal_id=principal_id,
|
|
607
|
+
role_name=role_name,
|
|
608
|
+
scope=scope,
|
|
609
|
+
revoked_by=revoked_by,
|
|
610
|
+
)
|
|
611
|
+
)
|
|
612
|
+
|
|
613
|
+
def close(self) -> None:
|
|
614
|
+
"""Close the database connection."""
|
|
615
|
+
if hasattr(self._local, "connection") and self._local.connection:
|
|
616
|
+
try:
|
|
617
|
+
self._local.connection.commit() # Ensure any pending changes
|
|
618
|
+
# Checkpoint WAL to make data visible to new connections
|
|
619
|
+
self._local.connection.execute("PRAGMA wal_checkpoint(TRUNCATE)")
|
|
620
|
+
except Exception:
|
|
621
|
+
pass
|
|
622
|
+
self._local.connection.close()
|
|
623
|
+
self._local.connection = None
|
|
624
|
+
self._initialized = False
|