mcp-hangar 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (160) hide show
  1. mcp_hangar/__init__.py +139 -0
  2. mcp_hangar/application/__init__.py +1 -0
  3. mcp_hangar/application/commands/__init__.py +67 -0
  4. mcp_hangar/application/commands/auth_commands.py +118 -0
  5. mcp_hangar/application/commands/auth_handlers.py +296 -0
  6. mcp_hangar/application/commands/commands.py +59 -0
  7. mcp_hangar/application/commands/handlers.py +189 -0
  8. mcp_hangar/application/discovery/__init__.py +21 -0
  9. mcp_hangar/application/discovery/discovery_metrics.py +283 -0
  10. mcp_hangar/application/discovery/discovery_orchestrator.py +497 -0
  11. mcp_hangar/application/discovery/lifecycle_manager.py +315 -0
  12. mcp_hangar/application/discovery/security_validator.py +414 -0
  13. mcp_hangar/application/event_handlers/__init__.py +50 -0
  14. mcp_hangar/application/event_handlers/alert_handler.py +191 -0
  15. mcp_hangar/application/event_handlers/audit_handler.py +203 -0
  16. mcp_hangar/application/event_handlers/knowledge_base_handler.py +120 -0
  17. mcp_hangar/application/event_handlers/logging_handler.py +69 -0
  18. mcp_hangar/application/event_handlers/metrics_handler.py +152 -0
  19. mcp_hangar/application/event_handlers/persistent_audit_store.py +217 -0
  20. mcp_hangar/application/event_handlers/security_handler.py +604 -0
  21. mcp_hangar/application/mcp/tooling.py +158 -0
  22. mcp_hangar/application/ports/__init__.py +9 -0
  23. mcp_hangar/application/ports/observability.py +237 -0
  24. mcp_hangar/application/queries/__init__.py +52 -0
  25. mcp_hangar/application/queries/auth_handlers.py +237 -0
  26. mcp_hangar/application/queries/auth_queries.py +118 -0
  27. mcp_hangar/application/queries/handlers.py +227 -0
  28. mcp_hangar/application/read_models/__init__.py +11 -0
  29. mcp_hangar/application/read_models/provider_views.py +139 -0
  30. mcp_hangar/application/sagas/__init__.py +11 -0
  31. mcp_hangar/application/sagas/group_rebalance_saga.py +137 -0
  32. mcp_hangar/application/sagas/provider_failover_saga.py +266 -0
  33. mcp_hangar/application/sagas/provider_recovery_saga.py +172 -0
  34. mcp_hangar/application/services/__init__.py +9 -0
  35. mcp_hangar/application/services/provider_service.py +208 -0
  36. mcp_hangar/application/services/traced_provider_service.py +211 -0
  37. mcp_hangar/bootstrap/runtime.py +328 -0
  38. mcp_hangar/context.py +178 -0
  39. mcp_hangar/domain/__init__.py +117 -0
  40. mcp_hangar/domain/contracts/__init__.py +57 -0
  41. mcp_hangar/domain/contracts/authentication.py +225 -0
  42. mcp_hangar/domain/contracts/authorization.py +229 -0
  43. mcp_hangar/domain/contracts/event_store.py +178 -0
  44. mcp_hangar/domain/contracts/metrics_publisher.py +59 -0
  45. mcp_hangar/domain/contracts/persistence.py +383 -0
  46. mcp_hangar/domain/contracts/provider_runtime.py +146 -0
  47. mcp_hangar/domain/discovery/__init__.py +20 -0
  48. mcp_hangar/domain/discovery/conflict_resolver.py +267 -0
  49. mcp_hangar/domain/discovery/discovered_provider.py +185 -0
  50. mcp_hangar/domain/discovery/discovery_service.py +412 -0
  51. mcp_hangar/domain/discovery/discovery_source.py +192 -0
  52. mcp_hangar/domain/events.py +433 -0
  53. mcp_hangar/domain/exceptions.py +525 -0
  54. mcp_hangar/domain/model/__init__.py +70 -0
  55. mcp_hangar/domain/model/aggregate.py +58 -0
  56. mcp_hangar/domain/model/circuit_breaker.py +152 -0
  57. mcp_hangar/domain/model/event_sourced_api_key.py +413 -0
  58. mcp_hangar/domain/model/event_sourced_provider.py +423 -0
  59. mcp_hangar/domain/model/event_sourced_role_assignment.py +268 -0
  60. mcp_hangar/domain/model/health_tracker.py +183 -0
  61. mcp_hangar/domain/model/load_balancer.py +185 -0
  62. mcp_hangar/domain/model/provider.py +810 -0
  63. mcp_hangar/domain/model/provider_group.py +656 -0
  64. mcp_hangar/domain/model/tool_catalog.py +105 -0
  65. mcp_hangar/domain/policies/__init__.py +19 -0
  66. mcp_hangar/domain/policies/provider_health.py +187 -0
  67. mcp_hangar/domain/repository.py +249 -0
  68. mcp_hangar/domain/security/__init__.py +85 -0
  69. mcp_hangar/domain/security/input_validator.py +710 -0
  70. mcp_hangar/domain/security/rate_limiter.py +387 -0
  71. mcp_hangar/domain/security/roles.py +237 -0
  72. mcp_hangar/domain/security/sanitizer.py +387 -0
  73. mcp_hangar/domain/security/secrets.py +501 -0
  74. mcp_hangar/domain/services/__init__.py +20 -0
  75. mcp_hangar/domain/services/audit_service.py +376 -0
  76. mcp_hangar/domain/services/image_builder.py +328 -0
  77. mcp_hangar/domain/services/provider_launcher.py +1046 -0
  78. mcp_hangar/domain/value_objects.py +1138 -0
  79. mcp_hangar/errors.py +818 -0
  80. mcp_hangar/fastmcp_server.py +1105 -0
  81. mcp_hangar/gc.py +134 -0
  82. mcp_hangar/infrastructure/__init__.py +79 -0
  83. mcp_hangar/infrastructure/async_executor.py +133 -0
  84. mcp_hangar/infrastructure/auth/__init__.py +37 -0
  85. mcp_hangar/infrastructure/auth/api_key_authenticator.py +388 -0
  86. mcp_hangar/infrastructure/auth/event_sourced_store.py +567 -0
  87. mcp_hangar/infrastructure/auth/jwt_authenticator.py +360 -0
  88. mcp_hangar/infrastructure/auth/middleware.py +340 -0
  89. mcp_hangar/infrastructure/auth/opa_authorizer.py +243 -0
  90. mcp_hangar/infrastructure/auth/postgres_store.py +659 -0
  91. mcp_hangar/infrastructure/auth/projections.py +366 -0
  92. mcp_hangar/infrastructure/auth/rate_limiter.py +311 -0
  93. mcp_hangar/infrastructure/auth/rbac_authorizer.py +323 -0
  94. mcp_hangar/infrastructure/auth/sqlite_store.py +624 -0
  95. mcp_hangar/infrastructure/command_bus.py +112 -0
  96. mcp_hangar/infrastructure/discovery/__init__.py +110 -0
  97. mcp_hangar/infrastructure/discovery/docker_source.py +289 -0
  98. mcp_hangar/infrastructure/discovery/entrypoint_source.py +249 -0
  99. mcp_hangar/infrastructure/discovery/filesystem_source.py +383 -0
  100. mcp_hangar/infrastructure/discovery/kubernetes_source.py +247 -0
  101. mcp_hangar/infrastructure/event_bus.py +260 -0
  102. mcp_hangar/infrastructure/event_sourced_repository.py +443 -0
  103. mcp_hangar/infrastructure/event_store.py +396 -0
  104. mcp_hangar/infrastructure/knowledge_base/__init__.py +259 -0
  105. mcp_hangar/infrastructure/knowledge_base/contracts.py +202 -0
  106. mcp_hangar/infrastructure/knowledge_base/memory.py +177 -0
  107. mcp_hangar/infrastructure/knowledge_base/postgres.py +545 -0
  108. mcp_hangar/infrastructure/knowledge_base/sqlite.py +513 -0
  109. mcp_hangar/infrastructure/metrics_publisher.py +36 -0
  110. mcp_hangar/infrastructure/observability/__init__.py +10 -0
  111. mcp_hangar/infrastructure/observability/langfuse_adapter.py +534 -0
  112. mcp_hangar/infrastructure/persistence/__init__.py +33 -0
  113. mcp_hangar/infrastructure/persistence/audit_repository.py +371 -0
  114. mcp_hangar/infrastructure/persistence/config_repository.py +398 -0
  115. mcp_hangar/infrastructure/persistence/database.py +333 -0
  116. mcp_hangar/infrastructure/persistence/database_common.py +330 -0
  117. mcp_hangar/infrastructure/persistence/event_serializer.py +280 -0
  118. mcp_hangar/infrastructure/persistence/event_upcaster.py +166 -0
  119. mcp_hangar/infrastructure/persistence/in_memory_event_store.py +150 -0
  120. mcp_hangar/infrastructure/persistence/recovery_service.py +312 -0
  121. mcp_hangar/infrastructure/persistence/sqlite_event_store.py +386 -0
  122. mcp_hangar/infrastructure/persistence/unit_of_work.py +409 -0
  123. mcp_hangar/infrastructure/persistence/upcasters/README.md +13 -0
  124. mcp_hangar/infrastructure/persistence/upcasters/__init__.py +7 -0
  125. mcp_hangar/infrastructure/query_bus.py +153 -0
  126. mcp_hangar/infrastructure/saga_manager.py +401 -0
  127. mcp_hangar/logging_config.py +209 -0
  128. mcp_hangar/metrics.py +1007 -0
  129. mcp_hangar/models.py +31 -0
  130. mcp_hangar/observability/__init__.py +54 -0
  131. mcp_hangar/observability/health.py +487 -0
  132. mcp_hangar/observability/metrics.py +319 -0
  133. mcp_hangar/observability/tracing.py +433 -0
  134. mcp_hangar/progress.py +542 -0
  135. mcp_hangar/retry.py +613 -0
  136. mcp_hangar/server/__init__.py +120 -0
  137. mcp_hangar/server/__main__.py +6 -0
  138. mcp_hangar/server/auth_bootstrap.py +340 -0
  139. mcp_hangar/server/auth_cli.py +335 -0
  140. mcp_hangar/server/auth_config.py +305 -0
  141. mcp_hangar/server/bootstrap.py +735 -0
  142. mcp_hangar/server/cli.py +161 -0
  143. mcp_hangar/server/config.py +224 -0
  144. mcp_hangar/server/context.py +215 -0
  145. mcp_hangar/server/http_auth_middleware.py +165 -0
  146. mcp_hangar/server/lifecycle.py +467 -0
  147. mcp_hangar/server/state.py +117 -0
  148. mcp_hangar/server/tools/__init__.py +16 -0
  149. mcp_hangar/server/tools/discovery.py +186 -0
  150. mcp_hangar/server/tools/groups.py +75 -0
  151. mcp_hangar/server/tools/health.py +301 -0
  152. mcp_hangar/server/tools/provider.py +939 -0
  153. mcp_hangar/server/tools/registry.py +320 -0
  154. mcp_hangar/server/validation.py +113 -0
  155. mcp_hangar/stdio_client.py +229 -0
  156. mcp_hangar-0.2.0.dist-info/METADATA +347 -0
  157. mcp_hangar-0.2.0.dist-info/RECORD +160 -0
  158. mcp_hangar-0.2.0.dist-info/WHEEL +4 -0
  159. mcp_hangar-0.2.0.dist-info/entry_points.txt +2 -0
  160. mcp_hangar-0.2.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,396 @@
1
+ """Event Store for persisting domain events.
2
+
3
+ Provides append-only storage of domain events with optimistic concurrency control.
4
+ """
5
+
6
+ from abc import ABC, abstractmethod
7
+ from dataclasses import dataclass, field
8
+ import json
9
+ from pathlib import Path
10
+ import threading
11
+ import time
12
+ from typing import Any, Callable, Dict, List, Optional
13
+
14
+ from ..domain.events import DomainEvent
15
+ from ..logging_config import get_logger
16
+
17
+ logger = get_logger(__name__)
18
+
19
+
20
+ @dataclass
21
+ class StoredEvent:
22
+ """Wrapper for persisted event with metadata."""
23
+
24
+ stream_id: str
25
+ version: int
26
+ event_type: str
27
+ event_id: str
28
+ occurred_at: float
29
+ data: Dict[str, Any]
30
+ stored_at: float = field(default_factory=time.time)
31
+
32
+ def to_dict(self) -> Dict[str, Any]:
33
+ """Convert to dictionary for serialization."""
34
+ return {
35
+ "stream_id": self.stream_id,
36
+ "version": self.version,
37
+ "event_type": self.event_type,
38
+ "event_id": self.event_id,
39
+ "occurred_at": self.occurred_at,
40
+ "data": self.data,
41
+ "stored_at": self.stored_at,
42
+ }
43
+
44
+ @classmethod
45
+ def from_dict(cls, d: Dict[str, Any]) -> "StoredEvent":
46
+ """Create from dictionary."""
47
+ return cls(
48
+ stream_id=d["stream_id"],
49
+ version=d["version"],
50
+ event_type=d["event_type"],
51
+ event_id=d["event_id"],
52
+ occurred_at=d["occurred_at"],
53
+ data=d["data"],
54
+ stored_at=d.get("stored_at", time.time()),
55
+ )
56
+
57
+
58
+ class ConcurrencyError(Exception):
59
+ """Raised when optimistic concurrency check fails."""
60
+
61
+ def __init__(self, stream_id: str, expected_version: int, actual_version: int):
62
+ self.stream_id = stream_id
63
+ self.expected_version = expected_version
64
+ self.actual_version = actual_version
65
+ super().__init__(
66
+ f"Concurrency conflict on stream '{stream_id}': "
67
+ f"expected version {expected_version}, actual {actual_version}"
68
+ )
69
+
70
+
71
+ class StreamNotFoundError(Exception):
72
+ """Raised when event stream doesn't exist."""
73
+
74
+ def __init__(self, stream_id: str):
75
+ self.stream_id = stream_id
76
+ super().__init__(f"Stream '{stream_id}' not found")
77
+
78
+
79
+ class EventStore(ABC):
80
+ """Abstract interface for event storage."""
81
+
82
+ @abstractmethod
83
+ def append(self, stream_id: str, events: List[DomainEvent], expected_version: int) -> int:
84
+ """
85
+ Append events to a stream with optimistic concurrency.
86
+
87
+ Args:
88
+ stream_id: Identifier for the event stream
89
+ events: List of domain events to append
90
+ expected_version: Expected current version (-1 for new stream)
91
+
92
+ Returns:
93
+ New version after append
94
+
95
+ Raises:
96
+ ConcurrencyError: If expected_version doesn't match actual version
97
+ """
98
+ pass
99
+
100
+ @abstractmethod
101
+ def load(self, stream_id: str, from_version: int = 0, to_version: Optional[int] = None) -> List[StoredEvent]:
102
+ """
103
+ Load events from a stream.
104
+
105
+ Args:
106
+ stream_id: Identifier for the event stream
107
+ from_version: Start version (inclusive)
108
+ to_version: End version (inclusive), None for all
109
+
110
+ Returns:
111
+ List of stored events in order
112
+ """
113
+ pass
114
+
115
+ @abstractmethod
116
+ def get_version(self, stream_id: str) -> int:
117
+ """
118
+ Get current version of a stream.
119
+
120
+ Returns:
121
+ Current version, or -1 if stream doesn't exist
122
+ """
123
+ pass
124
+
125
+ @abstractmethod
126
+ def get_all_stream_ids(self) -> List[str]:
127
+ """Get all stream IDs in the store."""
128
+ pass
129
+
130
+ @abstractmethod
131
+ def stream_exists(self, stream_id: str) -> bool:
132
+ """Check if a stream exists."""
133
+ pass
134
+
135
+
136
+ class InMemoryEventStore(EventStore):
137
+ """In-memory event store for testing and development."""
138
+
139
+ def __init__(self):
140
+ self._streams: Dict[str, List[StoredEvent]] = {}
141
+ self._lock = threading.RLock()
142
+ self._subscribers: List[Callable[[StoredEvent], None]] = []
143
+
144
+ def append(self, stream_id: str, events: List[DomainEvent], expected_version: int) -> int:
145
+ """Append events with optimistic concurrency."""
146
+ with self._lock:
147
+ current_version = self.get_version(stream_id)
148
+
149
+ if expected_version != current_version:
150
+ raise ConcurrencyError(stream_id, expected_version, current_version)
151
+
152
+ if stream_id not in self._streams:
153
+ self._streams[stream_id] = []
154
+
155
+ stream = self._streams[stream_id]
156
+ new_version = current_version
157
+
158
+ for event in events:
159
+ new_version += 1
160
+ stored = StoredEvent(
161
+ stream_id=stream_id,
162
+ version=new_version,
163
+ event_type=type(event).__name__,
164
+ event_id=event.event_id,
165
+ occurred_at=event.occurred_at,
166
+ data=event.to_dict(),
167
+ )
168
+ stream.append(stored)
169
+
170
+ # Notify subscribers
171
+ for subscriber in self._subscribers:
172
+ try:
173
+ subscriber(stored)
174
+ except Exception as e:
175
+ logger.error(f"Event subscriber error: {e}")
176
+
177
+ return new_version
178
+
179
+ def load(self, stream_id: str, from_version: int = 0, to_version: Optional[int] = None) -> List[StoredEvent]:
180
+ """Load events from a stream."""
181
+ with self._lock:
182
+ if stream_id not in self._streams:
183
+ return []
184
+
185
+ stream = self._streams[stream_id]
186
+
187
+ # Filter by version range
188
+ result = []
189
+ for event in stream:
190
+ if event.version < from_version:
191
+ continue
192
+ if to_version is not None and event.version > to_version:
193
+ break
194
+ result.append(event)
195
+
196
+ return result
197
+
198
+ def get_version(self, stream_id: str) -> int:
199
+ """Get current version of a stream."""
200
+ with self._lock:
201
+ if stream_id not in self._streams:
202
+ return -1
203
+ stream = self._streams[stream_id]
204
+ return stream[-1].version if stream else -1
205
+
206
+ def get_all_stream_ids(self) -> List[str]:
207
+ """Get all stream IDs."""
208
+ with self._lock:
209
+ return list(self._streams.keys())
210
+
211
+ def stream_exists(self, stream_id: str) -> bool:
212
+ """Check if stream exists."""
213
+ with self._lock:
214
+ return stream_id in self._streams
215
+
216
+ def subscribe(self, callback: Callable[[StoredEvent], None]) -> None:
217
+ """Subscribe to new events."""
218
+ self._subscribers.append(callback)
219
+
220
+ def clear(self) -> None:
221
+ """Clear all streams (for testing)."""
222
+ with self._lock:
223
+ self._streams.clear()
224
+
225
+ @property
226
+ def total_events(self) -> int:
227
+ """Get total number of events across all streams."""
228
+ with self._lock:
229
+ return sum(len(stream) for stream in self._streams.values())
230
+
231
+
232
+ class FileEventStore(EventStore):
233
+ """File-based event store for persistence."""
234
+
235
+ def __init__(self, storage_path: str):
236
+ self._storage_path = Path(storage_path)
237
+ self._storage_path.mkdir(parents=True, exist_ok=True)
238
+ self._lock = threading.RLock()
239
+ self._cache: Dict[str, List[StoredEvent]] = {}
240
+
241
+ def _stream_file(self, stream_id: str) -> Path:
242
+ """Get file path for a stream."""
243
+ # Sanitize stream_id for filesystem
244
+ safe_id = stream_id.replace("/", "_").replace("\\", "_")
245
+ return self._storage_path / f"{safe_id}.jsonl"
246
+
247
+ def append(self, stream_id: str, events: List[DomainEvent], expected_version: int) -> int:
248
+ """Append events with optimistic concurrency."""
249
+ with self._lock:
250
+ current_version = self.get_version(stream_id)
251
+
252
+ if expected_version != current_version:
253
+ raise ConcurrencyError(stream_id, expected_version, current_version)
254
+
255
+ stream_file = self._stream_file(stream_id)
256
+ new_version = current_version
257
+
258
+ # Initialize cache if needed
259
+ if stream_id not in self._cache:
260
+ self._cache[stream_id] = self.load(stream_id)
261
+
262
+ with open(stream_file, "a") as f:
263
+ for event in events:
264
+ new_version += 1
265
+ stored = StoredEvent(
266
+ stream_id=stream_id,
267
+ version=new_version,
268
+ event_type=type(event).__name__,
269
+ event_id=event.event_id,
270
+ occurred_at=event.occurred_at,
271
+ data=event.to_dict(),
272
+ )
273
+ f.write(json.dumps(stored.to_dict()) + "\n")
274
+ self._cache[stream_id].append(stored)
275
+
276
+ return new_version
277
+
278
+ def load(self, stream_id: str, from_version: int = 0, to_version: Optional[int] = None) -> List[StoredEvent]:
279
+ """Load events from a stream."""
280
+ with self._lock:
281
+ # Check cache first
282
+ if stream_id in self._cache:
283
+ cached = self._cache[stream_id]
284
+ return [
285
+ e for e in cached if e.version >= from_version and (to_version is None or e.version <= to_version)
286
+ ]
287
+
288
+ # Load from file
289
+ stream_file = self._stream_file(stream_id)
290
+ if not stream_file.exists():
291
+ return []
292
+
293
+ events = []
294
+ with open(stream_file, "r") as f:
295
+ for line in f:
296
+ if line.strip():
297
+ event = StoredEvent.from_dict(json.loads(line))
298
+ if event.version >= from_version:
299
+ if to_version is not None and event.version > to_version:
300
+ break
301
+ events.append(event)
302
+
303
+ # Cache for future reads
304
+ self._cache[stream_id] = events
305
+
306
+ return events
307
+
308
+ def get_version(self, stream_id: str) -> int:
309
+ """Get current version of a stream."""
310
+ events = self.load(stream_id)
311
+ return events[-1].version if events else -1
312
+
313
+ def get_all_stream_ids(self) -> List[str]:
314
+ """Get all stream IDs."""
315
+ with self._lock:
316
+ stream_ids = []
317
+ for file in self._storage_path.glob("*.jsonl"):
318
+ stream_id = file.stem.replace("_", "/")
319
+ stream_ids.append(stream_id)
320
+ return stream_ids
321
+
322
+ def stream_exists(self, stream_id: str) -> bool:
323
+ """Check if stream exists."""
324
+ return self._stream_file(stream_id).exists()
325
+
326
+ def clear(self) -> None:
327
+ """Clear all streams (for testing)."""
328
+ with self._lock:
329
+ for file in self._storage_path.glob("*.jsonl"):
330
+ file.unlink()
331
+ self._cache.clear()
332
+
333
+
334
+ class EventStoreSnapshot:
335
+ """Manages snapshots for event-sourced aggregates."""
336
+
337
+ def __init__(self, storage_path: str, snapshot_interval: int = 100):
338
+ self._storage_path = Path(storage_path)
339
+ self._storage_path.mkdir(parents=True, exist_ok=True)
340
+ self._snapshot_interval = snapshot_interval
341
+ self._lock = threading.RLock()
342
+
343
+ def _snapshot_file(self, stream_id: str) -> Path:
344
+ """Get file path for a snapshot."""
345
+ safe_id = stream_id.replace("/", "_").replace("\\", "_")
346
+ return self._storage_path / f"{safe_id}.snapshot.json"
347
+
348
+ def save_snapshot(self, stream_id: str, version: int, state: Dict[str, Any]) -> None:
349
+ """Save a snapshot of aggregate state."""
350
+ with self._lock:
351
+ snapshot = {
352
+ "stream_id": stream_id,
353
+ "version": version,
354
+ "state": state,
355
+ "created_at": time.time(),
356
+ }
357
+ with open(self._snapshot_file(stream_id), "w") as f:
358
+ json.dump(snapshot, f)
359
+
360
+ def load_snapshot(self, stream_id: str) -> Optional[Dict[str, Any]]:
361
+ """Load the latest snapshot for a stream."""
362
+ with self._lock:
363
+ snapshot_file = self._snapshot_file(stream_id)
364
+ if not snapshot_file.exists():
365
+ return None
366
+
367
+ with open(snapshot_file, "r") as f:
368
+ return json.load(f)
369
+
370
+ def should_snapshot(self, events_since_snapshot: int) -> bool:
371
+ """Determine if a snapshot should be taken."""
372
+ return events_since_snapshot >= self._snapshot_interval
373
+
374
+ def clear(self) -> None:
375
+ """Clear all snapshots (for testing)."""
376
+ with self._lock:
377
+ for file in self._storage_path.glob("*.snapshot.json"):
378
+ file.unlink()
379
+
380
+
381
+ # Singleton instance
382
+ _event_store: Optional[EventStore] = None
383
+
384
+
385
+ def get_event_store() -> EventStore:
386
+ """Get the global event store instance."""
387
+ global _event_store
388
+ if _event_store is None:
389
+ _event_store = InMemoryEventStore()
390
+ return _event_store
391
+
392
+
393
+ def set_event_store(store: EventStore) -> None:
394
+ """Set the global event store instance."""
395
+ global _event_store
396
+ _event_store = store
@@ -0,0 +1,259 @@
1
+ """Knowledge Base package.
2
+
3
+ Provides pluggable storage backends for:
4
+ - Tool result caching
5
+ - Audit logging
6
+ - Provider state tracking
7
+ - Metrics
8
+
9
+ Supported drivers:
10
+ - PostgreSQL (requires asyncpg)
11
+ - SQLite (uses aiosqlite, included)
12
+ - Memory (for testing)
13
+
14
+ Usage:
15
+ from mcp_hangar.infrastructure.knowledge_base import (
16
+ get_knowledge_base,
17
+ init_knowledge_base,
18
+ KnowledgeBaseConfig,
19
+ )
20
+
21
+ # Initialize from config
22
+ config = KnowledgeBaseConfig.from_dict({
23
+ "enabled": True,
24
+ "dsn": "postgresql://user:pass@localhost/db",
25
+ })
26
+ kb = await init_knowledge_base(config)
27
+
28
+ # Use
29
+ await kb.audit_log(AuditEntry(event_type="test"))
30
+ await kb.cache_set("math", "add", {"a": 1}, {"result": 2})
31
+ """
32
+
33
+ from typing import Optional
34
+
35
+ from ...logging_config import get_logger
36
+ from .contracts import (
37
+ AuditEntry,
38
+ IKnowledgeBase,
39
+ KnowledgeBaseConfig,
40
+ KnowledgeBaseDriver,
41
+ MetricEntry,
42
+ ProviderStateEntry,
43
+ )
44
+
45
+ logger = get_logger(__name__)
46
+
47
+ # Global instance
48
+ _instance: Optional[IKnowledgeBase] = None
49
+ _config: Optional[KnowledgeBaseConfig] = None
50
+
51
+
52
+ def get_knowledge_base() -> Optional[IKnowledgeBase]:
53
+ """Get the global knowledge base instance.
54
+
55
+ Returns None if not initialized or disabled.
56
+ """
57
+ return _instance
58
+
59
+
60
+ def get_config() -> Optional[KnowledgeBaseConfig]:
61
+ """Get current knowledge base configuration."""
62
+ return _config
63
+
64
+
65
+ def is_available() -> bool:
66
+ """Check if knowledge base is available and healthy."""
67
+ return _instance is not None
68
+
69
+
70
+ async def init_knowledge_base(config: KnowledgeBaseConfig) -> Optional[IKnowledgeBase]:
71
+ """Initialize knowledge base from configuration.
72
+
73
+ Creates appropriate driver based on config and runs migrations.
74
+
75
+ Args:
76
+ config: Knowledge base configuration
77
+
78
+ Returns:
79
+ Initialized knowledge base instance, or None if disabled/failed
80
+ """
81
+ global _instance, _config
82
+
83
+ if not config.enabled:
84
+ logger.info("knowledge_base_disabled")
85
+ return None
86
+
87
+ _config = config
88
+
89
+ # Create driver based on config
90
+ if config.driver == KnowledgeBaseDriver.POSTGRES:
91
+ from .postgres import PostgresKnowledgeBase
92
+
93
+ _instance = PostgresKnowledgeBase(config)
94
+
95
+ elif config.driver == KnowledgeBaseDriver.SQLITE:
96
+ from .sqlite import SQLiteKnowledgeBase
97
+
98
+ _instance = SQLiteKnowledgeBase(config)
99
+
100
+ elif config.driver == KnowledgeBaseDriver.MEMORY:
101
+ from .memory import MemoryKnowledgeBase
102
+
103
+ _instance = MemoryKnowledgeBase(config)
104
+
105
+ else:
106
+ logger.error("unknown_kb_driver", driver=config.driver)
107
+ return None
108
+
109
+ # Initialize (runs migrations)
110
+ success = await _instance.initialize()
111
+
112
+ if not success:
113
+ logger.error("knowledge_base_init_failed", driver=config.driver.value)
114
+ _instance = None
115
+ return None
116
+
117
+ # Mask password in DSN for logging
118
+ dsn = config.dsn
119
+ if "@" in dsn:
120
+ parts = dsn.split("@")
121
+ masked_dsn = parts[0].rsplit(":", 1)[0] + ":***@" + parts[1]
122
+ else:
123
+ masked_dsn = dsn
124
+
125
+ logger.info(
126
+ "knowledge_base_ready",
127
+ driver=config.driver.value,
128
+ dsn=masked_dsn,
129
+ )
130
+
131
+ return _instance
132
+
133
+
134
+ async def close_knowledge_base() -> None:
135
+ """Close knowledge base and cleanup resources."""
136
+ global _instance, _config
137
+
138
+ if _instance:
139
+ await _instance.close()
140
+ _instance = None
141
+
142
+ _config = None
143
+
144
+
145
+ # Convenience functions that use global instance
146
+
147
+
148
+ async def audit_log(
149
+ event_type: str,
150
+ provider: Optional[str] = None,
151
+ tool: Optional[str] = None,
152
+ arguments: Optional[dict] = None,
153
+ result_summary: Optional[str] = None,
154
+ duration_ms: Optional[int] = None,
155
+ success: bool = True,
156
+ error_message: Optional[str] = None,
157
+ correlation_id: Optional[str] = None,
158
+ ) -> bool:
159
+ """Log audit entry using global instance."""
160
+ if not _instance:
161
+ return False
162
+
163
+ return await _instance.audit_log(
164
+ AuditEntry(
165
+ event_type=event_type,
166
+ provider=provider,
167
+ tool=tool,
168
+ arguments=arguments,
169
+ result_summary=result_summary,
170
+ duration_ms=duration_ms,
171
+ success=success,
172
+ error_message=error_message,
173
+ correlation_id=correlation_id,
174
+ )
175
+ )
176
+
177
+
178
+ async def record_state_change(
179
+ provider_id: str,
180
+ old_state: Optional[str],
181
+ new_state: str,
182
+ reason: Optional[str] = None,
183
+ ) -> bool:
184
+ """Record provider state change using global instance."""
185
+ if not _instance:
186
+ return False
187
+
188
+ return await _instance.record_state_change(
189
+ ProviderStateEntry(
190
+ provider_id=provider_id,
191
+ old_state=old_state,
192
+ new_state=new_state,
193
+ reason=reason,
194
+ )
195
+ )
196
+
197
+
198
+ async def record_metric(
199
+ provider_id: str,
200
+ metric_name: str,
201
+ metric_value: float,
202
+ labels: Optional[dict] = None,
203
+ ) -> bool:
204
+ """Record provider metric using global instance."""
205
+ if not _instance:
206
+ return False
207
+
208
+ return await _instance.record_metric(
209
+ MetricEntry(
210
+ provider_id=provider_id,
211
+ metric_name=metric_name,
212
+ metric_value=metric_value,
213
+ labels=labels,
214
+ )
215
+ )
216
+
217
+
218
+ async def cache_get(provider: str, tool: str, arguments: dict) -> Optional[dict]:
219
+ """Get cached result using global instance."""
220
+ if not _instance:
221
+ return None
222
+ return await _instance.cache_get(provider, tool, arguments)
223
+
224
+
225
+ async def cache_set(
226
+ provider: str,
227
+ tool: str,
228
+ arguments: dict,
229
+ result: dict,
230
+ ttl_s: Optional[int] = None,
231
+ ) -> bool:
232
+ """Set cached result using global instance."""
233
+ if not _instance:
234
+ return False
235
+ return await _instance.cache_set(provider, tool, arguments, result, ttl_s)
236
+
237
+
238
+ __all__ = [
239
+ # Config
240
+ "KnowledgeBaseConfig",
241
+ "KnowledgeBaseDriver",
242
+ # Contracts
243
+ "IKnowledgeBase",
244
+ "AuditEntry",
245
+ "ProviderStateEntry",
246
+ "MetricEntry",
247
+ # Instance management
248
+ "init_knowledge_base",
249
+ "close_knowledge_base",
250
+ "get_knowledge_base",
251
+ "get_config",
252
+ "is_available",
253
+ # Convenience functions
254
+ "audit_log",
255
+ "record_state_change",
256
+ "record_metric",
257
+ "cache_get",
258
+ "cache_set",
259
+ ]