mcp-hangar 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_hangar/__init__.py +139 -0
- mcp_hangar/application/__init__.py +1 -0
- mcp_hangar/application/commands/__init__.py +67 -0
- mcp_hangar/application/commands/auth_commands.py +118 -0
- mcp_hangar/application/commands/auth_handlers.py +296 -0
- mcp_hangar/application/commands/commands.py +59 -0
- mcp_hangar/application/commands/handlers.py +189 -0
- mcp_hangar/application/discovery/__init__.py +21 -0
- mcp_hangar/application/discovery/discovery_metrics.py +283 -0
- mcp_hangar/application/discovery/discovery_orchestrator.py +497 -0
- mcp_hangar/application/discovery/lifecycle_manager.py +315 -0
- mcp_hangar/application/discovery/security_validator.py +414 -0
- mcp_hangar/application/event_handlers/__init__.py +50 -0
- mcp_hangar/application/event_handlers/alert_handler.py +191 -0
- mcp_hangar/application/event_handlers/audit_handler.py +203 -0
- mcp_hangar/application/event_handlers/knowledge_base_handler.py +120 -0
- mcp_hangar/application/event_handlers/logging_handler.py +69 -0
- mcp_hangar/application/event_handlers/metrics_handler.py +152 -0
- mcp_hangar/application/event_handlers/persistent_audit_store.py +217 -0
- mcp_hangar/application/event_handlers/security_handler.py +604 -0
- mcp_hangar/application/mcp/tooling.py +158 -0
- mcp_hangar/application/ports/__init__.py +9 -0
- mcp_hangar/application/ports/observability.py +237 -0
- mcp_hangar/application/queries/__init__.py +52 -0
- mcp_hangar/application/queries/auth_handlers.py +237 -0
- mcp_hangar/application/queries/auth_queries.py +118 -0
- mcp_hangar/application/queries/handlers.py +227 -0
- mcp_hangar/application/read_models/__init__.py +11 -0
- mcp_hangar/application/read_models/provider_views.py +139 -0
- mcp_hangar/application/sagas/__init__.py +11 -0
- mcp_hangar/application/sagas/group_rebalance_saga.py +137 -0
- mcp_hangar/application/sagas/provider_failover_saga.py +266 -0
- mcp_hangar/application/sagas/provider_recovery_saga.py +172 -0
- mcp_hangar/application/services/__init__.py +9 -0
- mcp_hangar/application/services/provider_service.py +208 -0
- mcp_hangar/application/services/traced_provider_service.py +211 -0
- mcp_hangar/bootstrap/runtime.py +328 -0
- mcp_hangar/context.py +178 -0
- mcp_hangar/domain/__init__.py +117 -0
- mcp_hangar/domain/contracts/__init__.py +57 -0
- mcp_hangar/domain/contracts/authentication.py +225 -0
- mcp_hangar/domain/contracts/authorization.py +229 -0
- mcp_hangar/domain/contracts/event_store.py +178 -0
- mcp_hangar/domain/contracts/metrics_publisher.py +59 -0
- mcp_hangar/domain/contracts/persistence.py +383 -0
- mcp_hangar/domain/contracts/provider_runtime.py +146 -0
- mcp_hangar/domain/discovery/__init__.py +20 -0
- mcp_hangar/domain/discovery/conflict_resolver.py +267 -0
- mcp_hangar/domain/discovery/discovered_provider.py +185 -0
- mcp_hangar/domain/discovery/discovery_service.py +412 -0
- mcp_hangar/domain/discovery/discovery_source.py +192 -0
- mcp_hangar/domain/events.py +433 -0
- mcp_hangar/domain/exceptions.py +525 -0
- mcp_hangar/domain/model/__init__.py +70 -0
- mcp_hangar/domain/model/aggregate.py +58 -0
- mcp_hangar/domain/model/circuit_breaker.py +152 -0
- mcp_hangar/domain/model/event_sourced_api_key.py +413 -0
- mcp_hangar/domain/model/event_sourced_provider.py +423 -0
- mcp_hangar/domain/model/event_sourced_role_assignment.py +268 -0
- mcp_hangar/domain/model/health_tracker.py +183 -0
- mcp_hangar/domain/model/load_balancer.py +185 -0
- mcp_hangar/domain/model/provider.py +810 -0
- mcp_hangar/domain/model/provider_group.py +656 -0
- mcp_hangar/domain/model/tool_catalog.py +105 -0
- mcp_hangar/domain/policies/__init__.py +19 -0
- mcp_hangar/domain/policies/provider_health.py +187 -0
- mcp_hangar/domain/repository.py +249 -0
- mcp_hangar/domain/security/__init__.py +85 -0
- mcp_hangar/domain/security/input_validator.py +710 -0
- mcp_hangar/domain/security/rate_limiter.py +387 -0
- mcp_hangar/domain/security/roles.py +237 -0
- mcp_hangar/domain/security/sanitizer.py +387 -0
- mcp_hangar/domain/security/secrets.py +501 -0
- mcp_hangar/domain/services/__init__.py +20 -0
- mcp_hangar/domain/services/audit_service.py +376 -0
- mcp_hangar/domain/services/image_builder.py +328 -0
- mcp_hangar/domain/services/provider_launcher.py +1046 -0
- mcp_hangar/domain/value_objects.py +1138 -0
- mcp_hangar/errors.py +818 -0
- mcp_hangar/fastmcp_server.py +1105 -0
- mcp_hangar/gc.py +134 -0
- mcp_hangar/infrastructure/__init__.py +79 -0
- mcp_hangar/infrastructure/async_executor.py +133 -0
- mcp_hangar/infrastructure/auth/__init__.py +37 -0
- mcp_hangar/infrastructure/auth/api_key_authenticator.py +388 -0
- mcp_hangar/infrastructure/auth/event_sourced_store.py +567 -0
- mcp_hangar/infrastructure/auth/jwt_authenticator.py +360 -0
- mcp_hangar/infrastructure/auth/middleware.py +340 -0
- mcp_hangar/infrastructure/auth/opa_authorizer.py +243 -0
- mcp_hangar/infrastructure/auth/postgres_store.py +659 -0
- mcp_hangar/infrastructure/auth/projections.py +366 -0
- mcp_hangar/infrastructure/auth/rate_limiter.py +311 -0
- mcp_hangar/infrastructure/auth/rbac_authorizer.py +323 -0
- mcp_hangar/infrastructure/auth/sqlite_store.py +624 -0
- mcp_hangar/infrastructure/command_bus.py +112 -0
- mcp_hangar/infrastructure/discovery/__init__.py +110 -0
- mcp_hangar/infrastructure/discovery/docker_source.py +289 -0
- mcp_hangar/infrastructure/discovery/entrypoint_source.py +249 -0
- mcp_hangar/infrastructure/discovery/filesystem_source.py +383 -0
- mcp_hangar/infrastructure/discovery/kubernetes_source.py +247 -0
- mcp_hangar/infrastructure/event_bus.py +260 -0
- mcp_hangar/infrastructure/event_sourced_repository.py +443 -0
- mcp_hangar/infrastructure/event_store.py +396 -0
- mcp_hangar/infrastructure/knowledge_base/__init__.py +259 -0
- mcp_hangar/infrastructure/knowledge_base/contracts.py +202 -0
- mcp_hangar/infrastructure/knowledge_base/memory.py +177 -0
- mcp_hangar/infrastructure/knowledge_base/postgres.py +545 -0
- mcp_hangar/infrastructure/knowledge_base/sqlite.py +513 -0
- mcp_hangar/infrastructure/metrics_publisher.py +36 -0
- mcp_hangar/infrastructure/observability/__init__.py +10 -0
- mcp_hangar/infrastructure/observability/langfuse_adapter.py +534 -0
- mcp_hangar/infrastructure/persistence/__init__.py +33 -0
- mcp_hangar/infrastructure/persistence/audit_repository.py +371 -0
- mcp_hangar/infrastructure/persistence/config_repository.py +398 -0
- mcp_hangar/infrastructure/persistence/database.py +333 -0
- mcp_hangar/infrastructure/persistence/database_common.py +330 -0
- mcp_hangar/infrastructure/persistence/event_serializer.py +280 -0
- mcp_hangar/infrastructure/persistence/event_upcaster.py +166 -0
- mcp_hangar/infrastructure/persistence/in_memory_event_store.py +150 -0
- mcp_hangar/infrastructure/persistence/recovery_service.py +312 -0
- mcp_hangar/infrastructure/persistence/sqlite_event_store.py +386 -0
- mcp_hangar/infrastructure/persistence/unit_of_work.py +409 -0
- mcp_hangar/infrastructure/persistence/upcasters/README.md +13 -0
- mcp_hangar/infrastructure/persistence/upcasters/__init__.py +7 -0
- mcp_hangar/infrastructure/query_bus.py +153 -0
- mcp_hangar/infrastructure/saga_manager.py +401 -0
- mcp_hangar/logging_config.py +209 -0
- mcp_hangar/metrics.py +1007 -0
- mcp_hangar/models.py +31 -0
- mcp_hangar/observability/__init__.py +54 -0
- mcp_hangar/observability/health.py +487 -0
- mcp_hangar/observability/metrics.py +319 -0
- mcp_hangar/observability/tracing.py +433 -0
- mcp_hangar/progress.py +542 -0
- mcp_hangar/retry.py +613 -0
- mcp_hangar/server/__init__.py +120 -0
- mcp_hangar/server/__main__.py +6 -0
- mcp_hangar/server/auth_bootstrap.py +340 -0
- mcp_hangar/server/auth_cli.py +335 -0
- mcp_hangar/server/auth_config.py +305 -0
- mcp_hangar/server/bootstrap.py +735 -0
- mcp_hangar/server/cli.py +161 -0
- mcp_hangar/server/config.py +224 -0
- mcp_hangar/server/context.py +215 -0
- mcp_hangar/server/http_auth_middleware.py +165 -0
- mcp_hangar/server/lifecycle.py +467 -0
- mcp_hangar/server/state.py +117 -0
- mcp_hangar/server/tools/__init__.py +16 -0
- mcp_hangar/server/tools/discovery.py +186 -0
- mcp_hangar/server/tools/groups.py +75 -0
- mcp_hangar/server/tools/health.py +301 -0
- mcp_hangar/server/tools/provider.py +939 -0
- mcp_hangar/server/tools/registry.py +320 -0
- mcp_hangar/server/validation.py +113 -0
- mcp_hangar/stdio_client.py +229 -0
- mcp_hangar-0.2.0.dist-info/METADATA +347 -0
- mcp_hangar-0.2.0.dist-info/RECORD +160 -0
- mcp_hangar-0.2.0.dist-info/WHEEL +4 -0
- mcp_hangar-0.2.0.dist-info/entry_points.txt +2 -0
- mcp_hangar-0.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,567 @@
|
|
|
1
|
+
"""Event Sourced Repository for Auth aggregates.
|
|
2
|
+
|
|
3
|
+
Stores API Keys and Role Assignments by persisting their domain events
|
|
4
|
+
and rebuilding state on load. Provides:
|
|
5
|
+
- Event persistence via IEventStore
|
|
6
|
+
- Snapshot support for performance
|
|
7
|
+
- Event publishing via EventBus
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
import hashlib
|
|
12
|
+
import secrets
|
|
13
|
+
import threading
|
|
14
|
+
from typing import Protocol
|
|
15
|
+
|
|
16
|
+
from ...domain.contracts.authentication import ApiKeyMetadata, IApiKeyStore
|
|
17
|
+
from ...domain.contracts.authorization import IRoleStore
|
|
18
|
+
from ...domain.contracts.event_store import IEventStore
|
|
19
|
+
from ...domain.events import ApiKeyCreated, DomainEvent
|
|
20
|
+
from ...domain.exceptions import ExpiredCredentialsError, RevokedCredentialsError
|
|
21
|
+
from ...domain.model.event_sourced_api_key import ApiKeySnapshot, EventSourcedApiKey
|
|
22
|
+
from ...domain.model.event_sourced_role_assignment import EventSourcedRoleAssignment, RoleAssignmentSnapshot
|
|
23
|
+
from ...domain.security.roles import BUILTIN_ROLES
|
|
24
|
+
from ...domain.value_objects import Principal, Role
|
|
25
|
+
from ...logging_config import get_logger
|
|
26
|
+
|
|
27
|
+
logger = get_logger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class IEventPublisher(Protocol):
|
|
31
|
+
"""Interface for event publishing (Dependency Inversion)."""
|
|
32
|
+
|
|
33
|
+
def publish(self, event: DomainEvent) -> None:
|
|
34
|
+
"""Publish a domain event."""
|
|
35
|
+
...
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _generate_key() -> str:
|
|
39
|
+
"""Generate a new API key."""
|
|
40
|
+
return f"mcp_{secrets.token_urlsafe(32)}"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _hash_key(key: str) -> str:
|
|
44
|
+
"""Hash an API key for storage."""
|
|
45
|
+
return hashlib.sha256(key.encode()).hexdigest()
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class EventSourcedApiKeyStore(IApiKeyStore):
|
|
49
|
+
"""Event Sourced API Key Store.
|
|
50
|
+
|
|
51
|
+
Persists API keys as event streams and rebuilds state on load.
|
|
52
|
+
|
|
53
|
+
Stream naming: "api_key:{key_hash}"
|
|
54
|
+
|
|
55
|
+
Features:
|
|
56
|
+
- Full audit trail via events
|
|
57
|
+
- Snapshot support for large streams
|
|
58
|
+
- Event publishing for integrations
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
STREAM_PREFIX = "api_key"
|
|
62
|
+
SNAPSHOT_INTERVAL = 50 # Events between snapshots
|
|
63
|
+
MAX_KEYS_PER_PRINCIPAL = 100
|
|
64
|
+
|
|
65
|
+
def __init__(
|
|
66
|
+
self,
|
|
67
|
+
event_store: IEventStore,
|
|
68
|
+
event_publisher: IEventPublisher | None = None,
|
|
69
|
+
snapshot_store: dict[str, ApiKeySnapshot] | None = None,
|
|
70
|
+
):
|
|
71
|
+
"""Initialize the event sourced store.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
event_store: Event store for persistence.
|
|
75
|
+
event_publisher: Optional publisher for events (e.g., EventBus).
|
|
76
|
+
snapshot_store: Optional snapshot cache.
|
|
77
|
+
"""
|
|
78
|
+
self._event_store = event_store
|
|
79
|
+
self._event_publisher = event_publisher
|
|
80
|
+
self._snapshot_store = snapshot_store or {}
|
|
81
|
+
self._lock = threading.RLock()
|
|
82
|
+
|
|
83
|
+
# Index: key_hash -> (key_id, principal_id)
|
|
84
|
+
# Built by scanning events on first access
|
|
85
|
+
self._index: dict[str, tuple[str, str]] | None = None
|
|
86
|
+
# Reverse index: principal_id -> set of key_hashes
|
|
87
|
+
self._principal_index: dict[str, set[str]] | None = None
|
|
88
|
+
|
|
89
|
+
def _build_index(self) -> None:
|
|
90
|
+
"""Build index by scanning all api_key streams."""
|
|
91
|
+
if self._index is not None:
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
self._index = {}
|
|
95
|
+
self._principal_index = {}
|
|
96
|
+
|
|
97
|
+
# Scan all streams with our prefix
|
|
98
|
+
# This is expensive but only done once
|
|
99
|
+
for stream_id in self._event_store.list_streams(f"{self.STREAM_PREFIX}:"):
|
|
100
|
+
key_hash = stream_id.split(":", 1)[1]
|
|
101
|
+
events = list(self._event_store.read_stream(stream_id))
|
|
102
|
+
|
|
103
|
+
if events:
|
|
104
|
+
# Find creation event for metadata
|
|
105
|
+
for event in events:
|
|
106
|
+
if isinstance(event, ApiKeyCreated):
|
|
107
|
+
self._index[key_hash] = (event.key_id, event.principal_id)
|
|
108
|
+
|
|
109
|
+
if event.principal_id not in self._principal_index:
|
|
110
|
+
self._principal_index[event.principal_id] = set()
|
|
111
|
+
self._principal_index[event.principal_id].add(key_hash)
|
|
112
|
+
break
|
|
113
|
+
|
|
114
|
+
logger.info(
|
|
115
|
+
"api_key_index_built",
|
|
116
|
+
total_keys=len(self._index),
|
|
117
|
+
total_principals=len(self._principal_index),
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
def _stream_id(self, key_hash: str) -> str:
|
|
121
|
+
"""Get stream ID for a key hash."""
|
|
122
|
+
return f"{self.STREAM_PREFIX}:{key_hash}"
|
|
123
|
+
|
|
124
|
+
def _load_key(self, key_hash: str) -> EventSourcedApiKey | None:
|
|
125
|
+
"""Load API key aggregate from events."""
|
|
126
|
+
stream_id = self._stream_id(key_hash)
|
|
127
|
+
|
|
128
|
+
# Try snapshot first
|
|
129
|
+
snapshot = self._snapshot_store.get(key_hash)
|
|
130
|
+
start_version = snapshot.version if snapshot else 0
|
|
131
|
+
|
|
132
|
+
# Read events (after snapshot version if available)
|
|
133
|
+
events = list(self._event_store.read_stream(stream_id, from_version=start_version))
|
|
134
|
+
|
|
135
|
+
if not events and not snapshot:
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
# Get metadata from index or first event
|
|
139
|
+
self._build_index()
|
|
140
|
+
if key_hash not in self._index:
|
|
141
|
+
return None
|
|
142
|
+
|
|
143
|
+
key_id, principal_id = self._index[key_hash]
|
|
144
|
+
|
|
145
|
+
if snapshot:
|
|
146
|
+
key = EventSourcedApiKey.from_snapshot(snapshot, events)
|
|
147
|
+
else:
|
|
148
|
+
# Need to find creation event for full metadata
|
|
149
|
+
all_events = list(self._event_store.read_stream(stream_id))
|
|
150
|
+
creation_event = next((e for e in all_events if isinstance(e, ApiKeyCreated)), None)
|
|
151
|
+
|
|
152
|
+
if not creation_event:
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
key = EventSourcedApiKey.from_events(
|
|
156
|
+
key_hash=key_hash,
|
|
157
|
+
key_id=key_id,
|
|
158
|
+
principal_id=principal_id,
|
|
159
|
+
name=creation_event.key_name,
|
|
160
|
+
events=all_events,
|
|
161
|
+
expires_at=(
|
|
162
|
+
datetime.fromtimestamp(creation_event.expires_at, tz=timezone.utc)
|
|
163
|
+
if creation_event.expires_at
|
|
164
|
+
else None
|
|
165
|
+
),
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
return key
|
|
169
|
+
|
|
170
|
+
def _publish_events(self, events: list[DomainEvent]) -> None:
|
|
171
|
+
"""Publish events if publisher is configured."""
|
|
172
|
+
if self._event_publisher:
|
|
173
|
+
for event in events:
|
|
174
|
+
self._event_publisher.publish(event)
|
|
175
|
+
|
|
176
|
+
def _maybe_create_snapshot(
|
|
177
|
+
self,
|
|
178
|
+
key_id: str,
|
|
179
|
+
new_version: int,
|
|
180
|
+
create_snapshot_fn: callable,
|
|
181
|
+
) -> None:
|
|
182
|
+
"""Create snapshot if threshold reached."""
|
|
183
|
+
if new_version < self.SNAPSHOT_INTERVAL:
|
|
184
|
+
return
|
|
185
|
+
|
|
186
|
+
existing = self._snapshot_store.get(key_id)
|
|
187
|
+
existing_version = existing.version if existing else 0
|
|
188
|
+
events_since = new_version - existing_version
|
|
189
|
+
|
|
190
|
+
if events_since >= self.SNAPSHOT_INTERVAL:
|
|
191
|
+
self._snapshot_store[key_id] = create_snapshot_fn()
|
|
192
|
+
|
|
193
|
+
def _save_key(self, key: EventSourcedApiKey) -> None:
|
|
194
|
+
"""Save API key events and publish."""
|
|
195
|
+
events = key.collect_events()
|
|
196
|
+
if not events:
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
stream_id = self._stream_id(key.key_hash)
|
|
200
|
+
|
|
201
|
+
# Append events
|
|
202
|
+
new_version = self._event_store.append(
|
|
203
|
+
stream_id=stream_id,
|
|
204
|
+
events=events,
|
|
205
|
+
expected_version=key.version - len(events),
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
# Update index
|
|
209
|
+
with self._lock:
|
|
210
|
+
if self._index is not None:
|
|
211
|
+
self._index[key.key_hash] = (key.key_id, key.principal_id)
|
|
212
|
+
if key.principal_id not in self._principal_index:
|
|
213
|
+
self._principal_index[key.principal_id] = set()
|
|
214
|
+
self._principal_index[key.principal_id].add(key.key_hash)
|
|
215
|
+
|
|
216
|
+
# Create snapshot if needed
|
|
217
|
+
self._maybe_create_snapshot(key.key_hash, new_version, key.create_snapshot)
|
|
218
|
+
|
|
219
|
+
# Publish events
|
|
220
|
+
self._publish_events(events)
|
|
221
|
+
|
|
222
|
+
logger.debug(
|
|
223
|
+
"api_key_events_saved",
|
|
224
|
+
key_id=key.key_id,
|
|
225
|
+
events_count=len(events),
|
|
226
|
+
new_version=new_version,
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
# =========================================================================
|
|
230
|
+
# IApiKeyStore Implementation
|
|
231
|
+
# =========================================================================
|
|
232
|
+
|
|
233
|
+
def get_principal_for_key(self, key_hash: str) -> Principal | None:
|
|
234
|
+
"""Look up principal for an API key hash."""
|
|
235
|
+
key = self._load_key(key_hash)
|
|
236
|
+
|
|
237
|
+
if key is None:
|
|
238
|
+
return None
|
|
239
|
+
|
|
240
|
+
if key.is_revoked:
|
|
241
|
+
raise RevokedCredentialsError("API key has been revoked")
|
|
242
|
+
|
|
243
|
+
if key.is_expired:
|
|
244
|
+
raise ExpiredCredentialsError("API key has expired")
|
|
245
|
+
|
|
246
|
+
# Record usage
|
|
247
|
+
key.record_usage()
|
|
248
|
+
|
|
249
|
+
return key.to_principal()
|
|
250
|
+
|
|
251
|
+
def create_key(
|
|
252
|
+
self,
|
|
253
|
+
principal_id: str,
|
|
254
|
+
name: str,
|
|
255
|
+
expires_at: datetime | None = None,
|
|
256
|
+
groups: frozenset[str] | None = None,
|
|
257
|
+
tenant_id: str | None = None,
|
|
258
|
+
created_by: str = "system",
|
|
259
|
+
) -> str:
|
|
260
|
+
"""Create a new API key."""
|
|
261
|
+
# Build index to check limits
|
|
262
|
+
self._build_index()
|
|
263
|
+
|
|
264
|
+
# Check key limit
|
|
265
|
+
existing_keys = self._principal_index.get(principal_id, set())
|
|
266
|
+
if len(existing_keys) >= self.MAX_KEYS_PER_PRINCIPAL:
|
|
267
|
+
raise ValueError(f"Principal {principal_id} has reached maximum API keys ({self.MAX_KEYS_PER_PRINCIPAL})")
|
|
268
|
+
|
|
269
|
+
# Generate key
|
|
270
|
+
raw_key = _generate_key()
|
|
271
|
+
key_hash = _hash_key(raw_key)
|
|
272
|
+
key_id = secrets.token_urlsafe(8)
|
|
273
|
+
|
|
274
|
+
# Create aggregate
|
|
275
|
+
key = EventSourcedApiKey.create(
|
|
276
|
+
key_hash=key_hash,
|
|
277
|
+
key_id=key_id,
|
|
278
|
+
principal_id=principal_id,
|
|
279
|
+
name=name,
|
|
280
|
+
created_by=created_by,
|
|
281
|
+
tenant_id=tenant_id,
|
|
282
|
+
groups=groups,
|
|
283
|
+
expires_at=expires_at,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
# Save
|
|
287
|
+
self._save_key(key)
|
|
288
|
+
|
|
289
|
+
logger.info(
|
|
290
|
+
"api_key_created",
|
|
291
|
+
key_id=key_id,
|
|
292
|
+
principal_id=principal_id,
|
|
293
|
+
name=name,
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
return raw_key
|
|
297
|
+
|
|
298
|
+
def revoke_key(
|
|
299
|
+
self,
|
|
300
|
+
key_id: str,
|
|
301
|
+
revoked_by: str = "system",
|
|
302
|
+
reason: str = "",
|
|
303
|
+
) -> bool:
|
|
304
|
+
"""Revoke an API key."""
|
|
305
|
+
# Find key by key_id
|
|
306
|
+
self._build_index()
|
|
307
|
+
|
|
308
|
+
key_hash = None
|
|
309
|
+
for kh, (kid, _) in self._index.items():
|
|
310
|
+
if kid == key_id:
|
|
311
|
+
key_hash = kh
|
|
312
|
+
break
|
|
313
|
+
|
|
314
|
+
if key_hash is None:
|
|
315
|
+
return False
|
|
316
|
+
|
|
317
|
+
key = self._load_key(key_hash)
|
|
318
|
+
if key is None or key.is_revoked:
|
|
319
|
+
return False
|
|
320
|
+
|
|
321
|
+
key.revoke(revoked_by=revoked_by, reason=reason)
|
|
322
|
+
self._save_key(key)
|
|
323
|
+
|
|
324
|
+
logger.info(
|
|
325
|
+
"api_key_revoked",
|
|
326
|
+
key_id=key_id,
|
|
327
|
+
revoked_by=revoked_by,
|
|
328
|
+
reason=reason,
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
return True
|
|
332
|
+
|
|
333
|
+
def list_keys(self, principal_id: str) -> list[ApiKeyMetadata]:
|
|
334
|
+
"""List API keys for a principal."""
|
|
335
|
+
self._build_index()
|
|
336
|
+
|
|
337
|
+
key_hashes = self._principal_index.get(principal_id, set())
|
|
338
|
+
result = []
|
|
339
|
+
|
|
340
|
+
for key_hash in key_hashes:
|
|
341
|
+
key = self._load_key(key_hash)
|
|
342
|
+
if key:
|
|
343
|
+
result.append(
|
|
344
|
+
ApiKeyMetadata(
|
|
345
|
+
key_id=key.key_id,
|
|
346
|
+
name=key.name,
|
|
347
|
+
principal_id=key.principal_id,
|
|
348
|
+
created_at=key.created_at,
|
|
349
|
+
expires_at=key.expires_at,
|
|
350
|
+
last_used_at=key.last_used_at,
|
|
351
|
+
revoked=key.is_revoked,
|
|
352
|
+
)
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
return result
|
|
356
|
+
|
|
357
|
+
def count_keys(self, principal_id: str) -> int:
|
|
358
|
+
"""Count active API keys for a principal."""
|
|
359
|
+
self._build_index()
|
|
360
|
+
|
|
361
|
+
key_hashes = self._principal_index.get(principal_id, set())
|
|
362
|
+
count = 0
|
|
363
|
+
|
|
364
|
+
for key_hash in key_hashes:
|
|
365
|
+
key = self._load_key(key_hash)
|
|
366
|
+
if key and key.is_valid:
|
|
367
|
+
count += 1
|
|
368
|
+
|
|
369
|
+
return count
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
class EventSourcedRoleStore(IRoleStore):
|
|
373
|
+
"""Event Sourced Role Store.
|
|
374
|
+
|
|
375
|
+
Persists role assignments as event streams and rebuilds state on load.
|
|
376
|
+
|
|
377
|
+
Stream naming: "role_assignment:{principal_id}"
|
|
378
|
+
|
|
379
|
+
Features:
|
|
380
|
+
- Full audit trail via events
|
|
381
|
+
- Snapshot support for large streams
|
|
382
|
+
- Event publishing for integrations
|
|
383
|
+
"""
|
|
384
|
+
|
|
385
|
+
STREAM_PREFIX = "role_assignment"
|
|
386
|
+
SNAPSHOT_INTERVAL = 50
|
|
387
|
+
|
|
388
|
+
def __init__(
|
|
389
|
+
self,
|
|
390
|
+
event_store: IEventStore,
|
|
391
|
+
event_publisher: IEventPublisher | None = None,
|
|
392
|
+
snapshot_store: dict[str, RoleAssignmentSnapshot] | None = None,
|
|
393
|
+
):
|
|
394
|
+
"""Initialize the event sourced store.
|
|
395
|
+
|
|
396
|
+
Args:
|
|
397
|
+
event_store: Event store for persistence.
|
|
398
|
+
event_publisher: Optional publisher for events (e.g., EventBus).
|
|
399
|
+
snapshot_store: Optional snapshot cache.
|
|
400
|
+
"""
|
|
401
|
+
self._event_store = event_store
|
|
402
|
+
self._event_publisher = event_publisher
|
|
403
|
+
self._snapshot_store = snapshot_store or {}
|
|
404
|
+
self._lock = threading.RLock()
|
|
405
|
+
|
|
406
|
+
# Custom roles (in addition to built-in)
|
|
407
|
+
self._custom_roles: dict[str, Role] = {}
|
|
408
|
+
|
|
409
|
+
def _stream_id(self, principal_id: str) -> str:
|
|
410
|
+
"""Get stream ID for a principal."""
|
|
411
|
+
return f"{self.STREAM_PREFIX}:{principal_id}"
|
|
412
|
+
|
|
413
|
+
def _load_assignment(self, principal_id: str) -> EventSourcedRoleAssignment:
|
|
414
|
+
"""Load role assignment aggregate from events."""
|
|
415
|
+
stream_id = self._stream_id(principal_id)
|
|
416
|
+
|
|
417
|
+
# Try snapshot first
|
|
418
|
+
snapshot = self._snapshot_store.get(principal_id)
|
|
419
|
+
start_version = snapshot.version if snapshot else 0
|
|
420
|
+
|
|
421
|
+
# Read events
|
|
422
|
+
events = list(self._event_store.read_stream(stream_id, from_version=start_version))
|
|
423
|
+
|
|
424
|
+
if snapshot:
|
|
425
|
+
return EventSourcedRoleAssignment.from_snapshot(snapshot, events)
|
|
426
|
+
elif events:
|
|
427
|
+
return EventSourcedRoleAssignment.from_events(principal_id, events)
|
|
428
|
+
else:
|
|
429
|
+
return EventSourcedRoleAssignment(principal_id)
|
|
430
|
+
|
|
431
|
+
def _publish_events(self, events: list[DomainEvent]) -> None:
|
|
432
|
+
"""Publish events if publisher is configured."""
|
|
433
|
+
if self._event_publisher:
|
|
434
|
+
for event in events:
|
|
435
|
+
self._event_publisher.publish(event)
|
|
436
|
+
|
|
437
|
+
def _maybe_create_snapshot(
|
|
438
|
+
self,
|
|
439
|
+
key_id: str,
|
|
440
|
+
new_version: int,
|
|
441
|
+
create_snapshot_fn: callable,
|
|
442
|
+
) -> None:
|
|
443
|
+
"""Create snapshot if threshold reached."""
|
|
444
|
+
if new_version < self.SNAPSHOT_INTERVAL:
|
|
445
|
+
return
|
|
446
|
+
|
|
447
|
+
existing = self._snapshot_store.get(key_id)
|
|
448
|
+
existing_version = existing.version if existing else 0
|
|
449
|
+
events_since = new_version - existing_version
|
|
450
|
+
|
|
451
|
+
if events_since >= self.SNAPSHOT_INTERVAL:
|
|
452
|
+
self._snapshot_store[key_id] = create_snapshot_fn()
|
|
453
|
+
|
|
454
|
+
def _save_assignment(self, assignment: EventSourcedRoleAssignment) -> None:
|
|
455
|
+
"""Save role assignment events and publish."""
|
|
456
|
+
events = assignment.collect_events()
|
|
457
|
+
if not events:
|
|
458
|
+
return
|
|
459
|
+
|
|
460
|
+
stream_id = self._stream_id(assignment.principal_id)
|
|
461
|
+
current_version = self._event_store.get_stream_version(stream_id)
|
|
462
|
+
|
|
463
|
+
# Append events
|
|
464
|
+
new_version = self._event_store.append(
|
|
465
|
+
stream_id=stream_id,
|
|
466
|
+
events=events,
|
|
467
|
+
expected_version=current_version,
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
# Create snapshot if needed
|
|
471
|
+
self._maybe_create_snapshot(
|
|
472
|
+
assignment.principal_id,
|
|
473
|
+
new_version,
|
|
474
|
+
assignment.create_snapshot,
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# Publish events
|
|
478
|
+
self._publish_events(events)
|
|
479
|
+
|
|
480
|
+
logger.debug(
|
|
481
|
+
"role_assignment_events_saved",
|
|
482
|
+
principal_id=assignment.principal_id,
|
|
483
|
+
events_count=len(events),
|
|
484
|
+
new_version=new_version,
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
# =========================================================================
|
|
488
|
+
# IRoleStore Implementation
|
|
489
|
+
# =========================================================================
|
|
490
|
+
|
|
491
|
+
def get_role(self, role_name: str) -> Role | None:
|
|
492
|
+
"""Get role by name."""
|
|
493
|
+
# Check built-in first
|
|
494
|
+
if role_name in BUILTIN_ROLES:
|
|
495
|
+
return BUILTIN_ROLES[role_name]
|
|
496
|
+
|
|
497
|
+
# Check custom roles
|
|
498
|
+
return self._custom_roles.get(role_name)
|
|
499
|
+
|
|
500
|
+
def add_role(self, role: Role) -> None:
|
|
501
|
+
"""Add a custom role."""
|
|
502
|
+
if role.name in BUILTIN_ROLES:
|
|
503
|
+
raise ValueError(f"Cannot override built-in role: {role.name}")
|
|
504
|
+
|
|
505
|
+
self._custom_roles[role.name] = role
|
|
506
|
+
logger.info("custom_role_added", role_name=role.name)
|
|
507
|
+
|
|
508
|
+
def get_roles_for_principal(
|
|
509
|
+
self,
|
|
510
|
+
principal_id: str,
|
|
511
|
+
scope: str = "*",
|
|
512
|
+
) -> list[Role]:
|
|
513
|
+
"""Get all roles assigned to a principal."""
|
|
514
|
+
assignment = self._load_assignment(principal_id)
|
|
515
|
+
role_names = assignment.get_role_names(scope)
|
|
516
|
+
|
|
517
|
+
roles = []
|
|
518
|
+
for name in role_names:
|
|
519
|
+
role = self.get_role(name)
|
|
520
|
+
if role:
|
|
521
|
+
roles.append(role)
|
|
522
|
+
|
|
523
|
+
return roles
|
|
524
|
+
|
|
525
|
+
def assign_role(
|
|
526
|
+
self,
|
|
527
|
+
principal_id: str,
|
|
528
|
+
role_name: str,
|
|
529
|
+
scope: str = "global",
|
|
530
|
+
assigned_by: str = "system",
|
|
531
|
+
) -> None:
|
|
532
|
+
"""Assign a role to a principal."""
|
|
533
|
+
# Verify role exists
|
|
534
|
+
if self.get_role(role_name) is None:
|
|
535
|
+
raise ValueError(f"Unknown role: {role_name}")
|
|
536
|
+
|
|
537
|
+
assignment = self._load_assignment(principal_id)
|
|
538
|
+
|
|
539
|
+
if assignment.assign_role(role_name, scope, assigned_by):
|
|
540
|
+
self._save_assignment(assignment)
|
|
541
|
+
logger.info(
|
|
542
|
+
"role_assigned",
|
|
543
|
+
principal_id=principal_id,
|
|
544
|
+
role_name=role_name,
|
|
545
|
+
scope=scope,
|
|
546
|
+
assigned_by=assigned_by,
|
|
547
|
+
)
|
|
548
|
+
|
|
549
|
+
def revoke_role(
|
|
550
|
+
self,
|
|
551
|
+
principal_id: str,
|
|
552
|
+
role_name: str,
|
|
553
|
+
scope: str = "global",
|
|
554
|
+
revoked_by: str = "system",
|
|
555
|
+
) -> None:
|
|
556
|
+
"""Revoke a role from a principal."""
|
|
557
|
+
assignment = self._load_assignment(principal_id)
|
|
558
|
+
|
|
559
|
+
if assignment.revoke_role(role_name, scope, revoked_by):
|
|
560
|
+
self._save_assignment(assignment)
|
|
561
|
+
logger.info(
|
|
562
|
+
"role_revoked",
|
|
563
|
+
principal_id=principal_id,
|
|
564
|
+
role_name=role_name,
|
|
565
|
+
scope=scope,
|
|
566
|
+
revoked_by=revoked_by,
|
|
567
|
+
)
|