mcp-hangar 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_hangar/__init__.py +139 -0
- mcp_hangar/application/__init__.py +1 -0
- mcp_hangar/application/commands/__init__.py +67 -0
- mcp_hangar/application/commands/auth_commands.py +118 -0
- mcp_hangar/application/commands/auth_handlers.py +296 -0
- mcp_hangar/application/commands/commands.py +59 -0
- mcp_hangar/application/commands/handlers.py +189 -0
- mcp_hangar/application/discovery/__init__.py +21 -0
- mcp_hangar/application/discovery/discovery_metrics.py +283 -0
- mcp_hangar/application/discovery/discovery_orchestrator.py +497 -0
- mcp_hangar/application/discovery/lifecycle_manager.py +315 -0
- mcp_hangar/application/discovery/security_validator.py +414 -0
- mcp_hangar/application/event_handlers/__init__.py +50 -0
- mcp_hangar/application/event_handlers/alert_handler.py +191 -0
- mcp_hangar/application/event_handlers/audit_handler.py +203 -0
- mcp_hangar/application/event_handlers/knowledge_base_handler.py +120 -0
- mcp_hangar/application/event_handlers/logging_handler.py +69 -0
- mcp_hangar/application/event_handlers/metrics_handler.py +152 -0
- mcp_hangar/application/event_handlers/persistent_audit_store.py +217 -0
- mcp_hangar/application/event_handlers/security_handler.py +604 -0
- mcp_hangar/application/mcp/tooling.py +158 -0
- mcp_hangar/application/ports/__init__.py +9 -0
- mcp_hangar/application/ports/observability.py +237 -0
- mcp_hangar/application/queries/__init__.py +52 -0
- mcp_hangar/application/queries/auth_handlers.py +237 -0
- mcp_hangar/application/queries/auth_queries.py +118 -0
- mcp_hangar/application/queries/handlers.py +227 -0
- mcp_hangar/application/read_models/__init__.py +11 -0
- mcp_hangar/application/read_models/provider_views.py +139 -0
- mcp_hangar/application/sagas/__init__.py +11 -0
- mcp_hangar/application/sagas/group_rebalance_saga.py +137 -0
- mcp_hangar/application/sagas/provider_failover_saga.py +266 -0
- mcp_hangar/application/sagas/provider_recovery_saga.py +172 -0
- mcp_hangar/application/services/__init__.py +9 -0
- mcp_hangar/application/services/provider_service.py +208 -0
- mcp_hangar/application/services/traced_provider_service.py +211 -0
- mcp_hangar/bootstrap/runtime.py +328 -0
- mcp_hangar/context.py +178 -0
- mcp_hangar/domain/__init__.py +117 -0
- mcp_hangar/domain/contracts/__init__.py +57 -0
- mcp_hangar/domain/contracts/authentication.py +225 -0
- mcp_hangar/domain/contracts/authorization.py +229 -0
- mcp_hangar/domain/contracts/event_store.py +178 -0
- mcp_hangar/domain/contracts/metrics_publisher.py +59 -0
- mcp_hangar/domain/contracts/persistence.py +383 -0
- mcp_hangar/domain/contracts/provider_runtime.py +146 -0
- mcp_hangar/domain/discovery/__init__.py +20 -0
- mcp_hangar/domain/discovery/conflict_resolver.py +267 -0
- mcp_hangar/domain/discovery/discovered_provider.py +185 -0
- mcp_hangar/domain/discovery/discovery_service.py +412 -0
- mcp_hangar/domain/discovery/discovery_source.py +192 -0
- mcp_hangar/domain/events.py +433 -0
- mcp_hangar/domain/exceptions.py +525 -0
- mcp_hangar/domain/model/__init__.py +70 -0
- mcp_hangar/domain/model/aggregate.py +58 -0
- mcp_hangar/domain/model/circuit_breaker.py +152 -0
- mcp_hangar/domain/model/event_sourced_api_key.py +413 -0
- mcp_hangar/domain/model/event_sourced_provider.py +423 -0
- mcp_hangar/domain/model/event_sourced_role_assignment.py +268 -0
- mcp_hangar/domain/model/health_tracker.py +183 -0
- mcp_hangar/domain/model/load_balancer.py +185 -0
- mcp_hangar/domain/model/provider.py +810 -0
- mcp_hangar/domain/model/provider_group.py +656 -0
- mcp_hangar/domain/model/tool_catalog.py +105 -0
- mcp_hangar/domain/policies/__init__.py +19 -0
- mcp_hangar/domain/policies/provider_health.py +187 -0
- mcp_hangar/domain/repository.py +249 -0
- mcp_hangar/domain/security/__init__.py +85 -0
- mcp_hangar/domain/security/input_validator.py +710 -0
- mcp_hangar/domain/security/rate_limiter.py +387 -0
- mcp_hangar/domain/security/roles.py +237 -0
- mcp_hangar/domain/security/sanitizer.py +387 -0
- mcp_hangar/domain/security/secrets.py +501 -0
- mcp_hangar/domain/services/__init__.py +20 -0
- mcp_hangar/domain/services/audit_service.py +376 -0
- mcp_hangar/domain/services/image_builder.py +328 -0
- mcp_hangar/domain/services/provider_launcher.py +1046 -0
- mcp_hangar/domain/value_objects.py +1138 -0
- mcp_hangar/errors.py +818 -0
- mcp_hangar/fastmcp_server.py +1105 -0
- mcp_hangar/gc.py +134 -0
- mcp_hangar/infrastructure/__init__.py +79 -0
- mcp_hangar/infrastructure/async_executor.py +133 -0
- mcp_hangar/infrastructure/auth/__init__.py +37 -0
- mcp_hangar/infrastructure/auth/api_key_authenticator.py +388 -0
- mcp_hangar/infrastructure/auth/event_sourced_store.py +567 -0
- mcp_hangar/infrastructure/auth/jwt_authenticator.py +360 -0
- mcp_hangar/infrastructure/auth/middleware.py +340 -0
- mcp_hangar/infrastructure/auth/opa_authorizer.py +243 -0
- mcp_hangar/infrastructure/auth/postgres_store.py +659 -0
- mcp_hangar/infrastructure/auth/projections.py +366 -0
- mcp_hangar/infrastructure/auth/rate_limiter.py +311 -0
- mcp_hangar/infrastructure/auth/rbac_authorizer.py +323 -0
- mcp_hangar/infrastructure/auth/sqlite_store.py +624 -0
- mcp_hangar/infrastructure/command_bus.py +112 -0
- mcp_hangar/infrastructure/discovery/__init__.py +110 -0
- mcp_hangar/infrastructure/discovery/docker_source.py +289 -0
- mcp_hangar/infrastructure/discovery/entrypoint_source.py +249 -0
- mcp_hangar/infrastructure/discovery/filesystem_source.py +383 -0
- mcp_hangar/infrastructure/discovery/kubernetes_source.py +247 -0
- mcp_hangar/infrastructure/event_bus.py +260 -0
- mcp_hangar/infrastructure/event_sourced_repository.py +443 -0
- mcp_hangar/infrastructure/event_store.py +396 -0
- mcp_hangar/infrastructure/knowledge_base/__init__.py +259 -0
- mcp_hangar/infrastructure/knowledge_base/contracts.py +202 -0
- mcp_hangar/infrastructure/knowledge_base/memory.py +177 -0
- mcp_hangar/infrastructure/knowledge_base/postgres.py +545 -0
- mcp_hangar/infrastructure/knowledge_base/sqlite.py +513 -0
- mcp_hangar/infrastructure/metrics_publisher.py +36 -0
- mcp_hangar/infrastructure/observability/__init__.py +10 -0
- mcp_hangar/infrastructure/observability/langfuse_adapter.py +534 -0
- mcp_hangar/infrastructure/persistence/__init__.py +33 -0
- mcp_hangar/infrastructure/persistence/audit_repository.py +371 -0
- mcp_hangar/infrastructure/persistence/config_repository.py +398 -0
- mcp_hangar/infrastructure/persistence/database.py +333 -0
- mcp_hangar/infrastructure/persistence/database_common.py +330 -0
- mcp_hangar/infrastructure/persistence/event_serializer.py +280 -0
- mcp_hangar/infrastructure/persistence/event_upcaster.py +166 -0
- mcp_hangar/infrastructure/persistence/in_memory_event_store.py +150 -0
- mcp_hangar/infrastructure/persistence/recovery_service.py +312 -0
- mcp_hangar/infrastructure/persistence/sqlite_event_store.py +386 -0
- mcp_hangar/infrastructure/persistence/unit_of_work.py +409 -0
- mcp_hangar/infrastructure/persistence/upcasters/README.md +13 -0
- mcp_hangar/infrastructure/persistence/upcasters/__init__.py +7 -0
- mcp_hangar/infrastructure/query_bus.py +153 -0
- mcp_hangar/infrastructure/saga_manager.py +401 -0
- mcp_hangar/logging_config.py +209 -0
- mcp_hangar/metrics.py +1007 -0
- mcp_hangar/models.py +31 -0
- mcp_hangar/observability/__init__.py +54 -0
- mcp_hangar/observability/health.py +487 -0
- mcp_hangar/observability/metrics.py +319 -0
- mcp_hangar/observability/tracing.py +433 -0
- mcp_hangar/progress.py +542 -0
- mcp_hangar/retry.py +613 -0
- mcp_hangar/server/__init__.py +120 -0
- mcp_hangar/server/__main__.py +6 -0
- mcp_hangar/server/auth_bootstrap.py +340 -0
- mcp_hangar/server/auth_cli.py +335 -0
- mcp_hangar/server/auth_config.py +305 -0
- mcp_hangar/server/bootstrap.py +735 -0
- mcp_hangar/server/cli.py +161 -0
- mcp_hangar/server/config.py +224 -0
- mcp_hangar/server/context.py +215 -0
- mcp_hangar/server/http_auth_middleware.py +165 -0
- mcp_hangar/server/lifecycle.py +467 -0
- mcp_hangar/server/state.py +117 -0
- mcp_hangar/server/tools/__init__.py +16 -0
- mcp_hangar/server/tools/discovery.py +186 -0
- mcp_hangar/server/tools/groups.py +75 -0
- mcp_hangar/server/tools/health.py +301 -0
- mcp_hangar/server/tools/provider.py +939 -0
- mcp_hangar/server/tools/registry.py +320 -0
- mcp_hangar/server/validation.py +113 -0
- mcp_hangar/stdio_client.py +229 -0
- mcp_hangar-0.2.0.dist-info/METADATA +347 -0
- mcp_hangar-0.2.0.dist-info/RECORD +160 -0
- mcp_hangar-0.2.0.dist-info/WHEEL +4 -0
- mcp_hangar-0.2.0.dist-info/entry_points.txt +2 -0
- mcp_hangar-0.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,513 @@
|
|
|
1
|
+
"""SQLite implementation of IKnowledgeBase."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
4
|
+
import hashlib
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any, Optional
|
|
8
|
+
|
|
9
|
+
import aiosqlite
|
|
10
|
+
|
|
11
|
+
from ...logging_config import get_logger
|
|
12
|
+
from .contracts import AuditEntry, IKnowledgeBase, KnowledgeBaseConfig, MetricEntry, ProviderStateEntry
|
|
13
|
+
|
|
14
|
+
logger = get_logger(__name__)
|
|
15
|
+
|
|
16
|
+
# SQL Migrations for SQLite
|
|
17
|
+
MIGRATIONS = [
|
|
18
|
+
{
|
|
19
|
+
"version": 1,
|
|
20
|
+
"name": "initial_schema",
|
|
21
|
+
"sql": """
|
|
22
|
+
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
23
|
+
version INTEGER PRIMARY KEY,
|
|
24
|
+
name TEXT NOT NULL,
|
|
25
|
+
applied_at TEXT DEFAULT (datetime('now'))
|
|
26
|
+
);
|
|
27
|
+
|
|
28
|
+
CREATE TABLE IF NOT EXISTS tool_cache (
|
|
29
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
30
|
+
provider TEXT NOT NULL,
|
|
31
|
+
tool TEXT NOT NULL,
|
|
32
|
+
arguments_hash TEXT NOT NULL,
|
|
33
|
+
result TEXT NOT NULL,
|
|
34
|
+
created_at TEXT DEFAULT (datetime('now')),
|
|
35
|
+
expires_at TEXT NOT NULL,
|
|
36
|
+
UNIQUE(provider, tool, arguments_hash)
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
CREATE INDEX IF NOT EXISTS idx_tool_cache_lookup
|
|
40
|
+
ON tool_cache(provider, tool, arguments_hash);
|
|
41
|
+
CREATE INDEX IF NOT EXISTS idx_tool_cache_expires
|
|
42
|
+
ON tool_cache(expires_at);
|
|
43
|
+
|
|
44
|
+
CREATE TABLE IF NOT EXISTS audit_log (
|
|
45
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
46
|
+
timestamp TEXT DEFAULT (datetime('now')),
|
|
47
|
+
event_type TEXT NOT NULL,
|
|
48
|
+
provider TEXT,
|
|
49
|
+
tool TEXT,
|
|
50
|
+
arguments TEXT,
|
|
51
|
+
result_summary TEXT,
|
|
52
|
+
duration_ms INTEGER,
|
|
53
|
+
success INTEGER NOT NULL,
|
|
54
|
+
error_message TEXT,
|
|
55
|
+
correlation_id TEXT
|
|
56
|
+
);
|
|
57
|
+
|
|
58
|
+
CREATE INDEX IF NOT EXISTS idx_audit_log_timestamp
|
|
59
|
+
ON audit_log(timestamp DESC);
|
|
60
|
+
CREATE INDEX IF NOT EXISTS idx_audit_log_provider
|
|
61
|
+
ON audit_log(provider, timestamp DESC);
|
|
62
|
+
|
|
63
|
+
CREATE TABLE IF NOT EXISTS provider_state_history (
|
|
64
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
65
|
+
provider_id TEXT NOT NULL,
|
|
66
|
+
old_state TEXT,
|
|
67
|
+
new_state TEXT NOT NULL,
|
|
68
|
+
timestamp TEXT DEFAULT (datetime('now')),
|
|
69
|
+
reason TEXT
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
CREATE INDEX IF NOT EXISTS idx_provider_state_provider
|
|
73
|
+
ON provider_state_history(provider_id, timestamp DESC);
|
|
74
|
+
|
|
75
|
+
CREATE TABLE IF NOT EXISTS provider_metrics (
|
|
76
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
77
|
+
provider_id TEXT NOT NULL,
|
|
78
|
+
timestamp TEXT DEFAULT (datetime('now')),
|
|
79
|
+
metric_name TEXT NOT NULL,
|
|
80
|
+
metric_value REAL NOT NULL,
|
|
81
|
+
labels TEXT DEFAULT '{}'
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
CREATE INDEX IF NOT EXISTS idx_provider_metrics_lookup
|
|
85
|
+
ON provider_metrics(provider_id, metric_name, timestamp DESC);
|
|
86
|
+
""",
|
|
87
|
+
},
|
|
88
|
+
]
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class SQLiteKnowledgeBase(IKnowledgeBase):
|
|
92
|
+
"""SQLite implementation of knowledge base."""
|
|
93
|
+
|
|
94
|
+
def __init__(self, config: KnowledgeBaseConfig):
|
|
95
|
+
self._config = config
|
|
96
|
+
self._db_path = self._parse_dsn(config.dsn)
|
|
97
|
+
self._initialized = False
|
|
98
|
+
|
|
99
|
+
def _parse_dsn(self, dsn: str) -> str:
|
|
100
|
+
"""Parse DSN to get database path."""
|
|
101
|
+
if dsn.startswith("sqlite://"):
|
|
102
|
+
return dsn.replace("sqlite://", "")
|
|
103
|
+
elif dsn.startswith("sqlite:///"):
|
|
104
|
+
return dsn.replace("sqlite:///", "")
|
|
105
|
+
elif dsn.endswith(".db"):
|
|
106
|
+
return dsn
|
|
107
|
+
else:
|
|
108
|
+
# Default to data directory
|
|
109
|
+
return "data/knowledge_base.db"
|
|
110
|
+
|
|
111
|
+
async def initialize(self) -> bool:
|
|
112
|
+
"""Create database and run migrations."""
|
|
113
|
+
try:
|
|
114
|
+
# Ensure directory exists
|
|
115
|
+
db_path = Path(self._db_path)
|
|
116
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
117
|
+
|
|
118
|
+
# Run migrations
|
|
119
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
120
|
+
await self._run_migrations(db)
|
|
121
|
+
|
|
122
|
+
self._initialized = True
|
|
123
|
+
logger.info("sqlite_kb_initialized", path=self._db_path)
|
|
124
|
+
return True
|
|
125
|
+
|
|
126
|
+
except Exception as e:
|
|
127
|
+
logger.error("sqlite_kb_init_failed", error=str(e))
|
|
128
|
+
return False
|
|
129
|
+
|
|
130
|
+
async def _run_migrations(self, db: aiosqlite.Connection) -> None:
|
|
131
|
+
"""Run pending migrations."""
|
|
132
|
+
# Get current version
|
|
133
|
+
try:
|
|
134
|
+
async with db.execute("SELECT MAX(version) FROM schema_migrations") as cursor:
|
|
135
|
+
row = await cursor.fetchone()
|
|
136
|
+
current_version = row[0] if row and row[0] else 0
|
|
137
|
+
except (aiosqlite.OperationalError, aiosqlite.DatabaseError):
|
|
138
|
+
# Table doesn't exist yet - this is first run
|
|
139
|
+
current_version = 0
|
|
140
|
+
|
|
141
|
+
for migration in MIGRATIONS:
|
|
142
|
+
if migration["version"] <= current_version:
|
|
143
|
+
continue
|
|
144
|
+
|
|
145
|
+
logger.info(
|
|
146
|
+
"applying_migration",
|
|
147
|
+
version=migration["version"],
|
|
148
|
+
name=migration["name"],
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
await db.executescript(migration["sql"])
|
|
152
|
+
await db.execute(
|
|
153
|
+
"INSERT INTO schema_migrations (version, name) VALUES (?, ?)",
|
|
154
|
+
(migration["version"], migration["name"]),
|
|
155
|
+
)
|
|
156
|
+
await db.commit()
|
|
157
|
+
|
|
158
|
+
# Get final version
|
|
159
|
+
async with db.execute("SELECT MAX(version) FROM schema_migrations") as cursor:
|
|
160
|
+
row = await cursor.fetchone()
|
|
161
|
+
final_version = row[0] if row and row[0] else 0
|
|
162
|
+
|
|
163
|
+
logger.info("sqlite_kb_schema_ready", version=final_version)
|
|
164
|
+
|
|
165
|
+
async def close(self) -> None:
|
|
166
|
+
"""No persistent connections to close for SQLite."""
|
|
167
|
+
self._initialized = False
|
|
168
|
+
logger.info("sqlite_kb_closed")
|
|
169
|
+
|
|
170
|
+
async def is_healthy(self) -> bool:
|
|
171
|
+
"""Check if database is accessible."""
|
|
172
|
+
try:
|
|
173
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
174
|
+
async with db.execute("SELECT 1") as cursor:
|
|
175
|
+
await cursor.fetchone()
|
|
176
|
+
return True
|
|
177
|
+
except (aiosqlite.Error, OSError) as e:
|
|
178
|
+
logger.debug("sqlite_health_check_failed", error=str(e))
|
|
179
|
+
return False
|
|
180
|
+
|
|
181
|
+
def _hash_arguments(self, arguments: dict) -> str:
|
|
182
|
+
"""Create hash of arguments for cache key."""
|
|
183
|
+
serialized = json.dumps(arguments, sort_keys=True, default=str)
|
|
184
|
+
return hashlib.sha256(serialized.encode()).hexdigest()[:32]
|
|
185
|
+
|
|
186
|
+
# === Cache Operations ===
|
|
187
|
+
|
|
188
|
+
async def cache_get(self, provider: str, tool: str, arguments: dict) -> Optional[dict]:
|
|
189
|
+
args_hash = self._hash_arguments(arguments)
|
|
190
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
191
|
+
|
|
192
|
+
try:
|
|
193
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
194
|
+
async with db.execute(
|
|
195
|
+
"""
|
|
196
|
+
SELECT result FROM tool_cache
|
|
197
|
+
WHERE provider = ? AND tool = ? AND arguments_hash = ?
|
|
198
|
+
AND expires_at > ?
|
|
199
|
+
""",
|
|
200
|
+
(provider, tool, args_hash, now),
|
|
201
|
+
) as cursor:
|
|
202
|
+
row = await cursor.fetchone()
|
|
203
|
+
if row:
|
|
204
|
+
logger.debug("cache_hit", provider=provider, tool=tool)
|
|
205
|
+
return json.loads(row[0])
|
|
206
|
+
return None
|
|
207
|
+
except Exception as e:
|
|
208
|
+
logger.warning("cache_get_failed", error=str(e))
|
|
209
|
+
return None
|
|
210
|
+
|
|
211
|
+
async def cache_set(
|
|
212
|
+
self,
|
|
213
|
+
provider: str,
|
|
214
|
+
tool: str,
|
|
215
|
+
arguments: dict,
|
|
216
|
+
result: Any,
|
|
217
|
+
ttl_s: Optional[int] = None,
|
|
218
|
+
) -> bool:
|
|
219
|
+
args_hash = self._hash_arguments(arguments)
|
|
220
|
+
ttl = ttl_s or self._config.cache_ttl_s
|
|
221
|
+
expires_at = (datetime.now(timezone.utc) + timedelta(seconds=ttl)).isoformat()
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
225
|
+
await db.execute(
|
|
226
|
+
"""
|
|
227
|
+
INSERT INTO tool_cache (provider, tool, arguments_hash, result, expires_at)
|
|
228
|
+
VALUES (?, ?, ?, ?, ?)
|
|
229
|
+
ON CONFLICT (provider, tool, arguments_hash)
|
|
230
|
+
DO UPDATE SET result = excluded.result,
|
|
231
|
+
expires_at = excluded.expires_at,
|
|
232
|
+
created_at = datetime('now')
|
|
233
|
+
""",
|
|
234
|
+
(provider, tool, args_hash, json.dumps(result, default=str), expires_at),
|
|
235
|
+
)
|
|
236
|
+
await db.commit()
|
|
237
|
+
return True
|
|
238
|
+
except Exception as e:
|
|
239
|
+
logger.warning("cache_set_failed", error=str(e))
|
|
240
|
+
return False
|
|
241
|
+
|
|
242
|
+
async def cache_invalidate(self, provider: Optional[str] = None, tool: Optional[str] = None) -> int:
|
|
243
|
+
try:
|
|
244
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
245
|
+
if provider and tool:
|
|
246
|
+
cursor = await db.execute(
|
|
247
|
+
"DELETE FROM tool_cache WHERE provider = ? AND tool = ?",
|
|
248
|
+
(provider, tool),
|
|
249
|
+
)
|
|
250
|
+
elif provider:
|
|
251
|
+
cursor = await db.execute("DELETE FROM tool_cache WHERE provider = ?", (provider,))
|
|
252
|
+
else:
|
|
253
|
+
cursor = await db.execute("DELETE FROM tool_cache")
|
|
254
|
+
await db.commit()
|
|
255
|
+
return cursor.rowcount
|
|
256
|
+
except Exception as e:
|
|
257
|
+
logger.warning("cache_invalidate_failed", error=str(e))
|
|
258
|
+
return 0
|
|
259
|
+
|
|
260
|
+
async def cache_cleanup(self) -> int:
|
|
261
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
262
|
+
|
|
263
|
+
try:
|
|
264
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
265
|
+
cursor = await db.execute("DELETE FROM tool_cache WHERE expires_at < ?", (now,))
|
|
266
|
+
await db.commit()
|
|
267
|
+
logger.info("cache_cleanup", deleted=cursor.rowcount)
|
|
268
|
+
return cursor.rowcount
|
|
269
|
+
except Exception as e:
|
|
270
|
+
logger.warning("cache_cleanup_failed", error=str(e))
|
|
271
|
+
return 0
|
|
272
|
+
|
|
273
|
+
# === Audit Operations ===
|
|
274
|
+
|
|
275
|
+
async def audit_log(self, entry: AuditEntry) -> bool:
|
|
276
|
+
try:
|
|
277
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
278
|
+
await db.execute(
|
|
279
|
+
"""
|
|
280
|
+
INSERT INTO audit_log
|
|
281
|
+
(event_type, provider, tool, arguments, result_summary,
|
|
282
|
+
duration_ms, success, error_message, correlation_id)
|
|
283
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
284
|
+
""",
|
|
285
|
+
(
|
|
286
|
+
entry.event_type,
|
|
287
|
+
entry.provider,
|
|
288
|
+
entry.tool,
|
|
289
|
+
json.dumps(entry.arguments, default=str) if entry.arguments else None,
|
|
290
|
+
entry.result_summary,
|
|
291
|
+
entry.duration_ms,
|
|
292
|
+
1 if entry.success else 0,
|
|
293
|
+
entry.error_message,
|
|
294
|
+
entry.correlation_id,
|
|
295
|
+
),
|
|
296
|
+
)
|
|
297
|
+
await db.commit()
|
|
298
|
+
return True
|
|
299
|
+
except Exception as e:
|
|
300
|
+
logger.warning("audit_log_failed", error=str(e))
|
|
301
|
+
return False
|
|
302
|
+
|
|
303
|
+
async def audit_query(
|
|
304
|
+
self,
|
|
305
|
+
provider: Optional[str] = None,
|
|
306
|
+
tool: Optional[str] = None,
|
|
307
|
+
success: Optional[bool] = None,
|
|
308
|
+
since: Optional[datetime] = None,
|
|
309
|
+
limit: int = 100,
|
|
310
|
+
) -> list[AuditEntry]:
|
|
311
|
+
try:
|
|
312
|
+
conditions = []
|
|
313
|
+
params = []
|
|
314
|
+
|
|
315
|
+
if provider:
|
|
316
|
+
conditions.append("provider = ?")
|
|
317
|
+
params.append(provider)
|
|
318
|
+
if tool:
|
|
319
|
+
conditions.append("tool = ?")
|
|
320
|
+
params.append(tool)
|
|
321
|
+
if success is not None:
|
|
322
|
+
conditions.append("success = ?")
|
|
323
|
+
params.append(1 if success else 0)
|
|
324
|
+
if since:
|
|
325
|
+
conditions.append("timestamp >= ?")
|
|
326
|
+
params.append(since.isoformat())
|
|
327
|
+
|
|
328
|
+
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
|
329
|
+
params.append(limit)
|
|
330
|
+
|
|
331
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
332
|
+
async with db.execute(
|
|
333
|
+
f"""
|
|
334
|
+
SELECT event_type, provider, tool, arguments, result_summary,
|
|
335
|
+
duration_ms, success, error_message, correlation_id, timestamp
|
|
336
|
+
FROM audit_log
|
|
337
|
+
WHERE {where_clause}
|
|
338
|
+
ORDER BY timestamp DESC
|
|
339
|
+
LIMIT ?
|
|
340
|
+
""",
|
|
341
|
+
params,
|
|
342
|
+
) as cursor:
|
|
343
|
+
rows = await cursor.fetchall()
|
|
344
|
+
|
|
345
|
+
return [
|
|
346
|
+
AuditEntry(
|
|
347
|
+
event_type=row[0],
|
|
348
|
+
provider=row[1],
|
|
349
|
+
tool=row[2],
|
|
350
|
+
arguments=json.loads(row[3]) if row[3] else None,
|
|
351
|
+
result_summary=row[4],
|
|
352
|
+
duration_ms=row[5],
|
|
353
|
+
success=bool(row[6]),
|
|
354
|
+
error_message=row[7],
|
|
355
|
+
correlation_id=row[8],
|
|
356
|
+
timestamp=datetime.fromisoformat(row[9]) if row[9] else None,
|
|
357
|
+
)
|
|
358
|
+
for row in rows
|
|
359
|
+
]
|
|
360
|
+
except Exception as e:
|
|
361
|
+
logger.warning("audit_query_failed", error=str(e))
|
|
362
|
+
return []
|
|
363
|
+
|
|
364
|
+
async def audit_stats(self, hours: int = 24) -> dict:
|
|
365
|
+
since = (datetime.now(timezone.utc) - timedelta(hours=hours)).isoformat()
|
|
366
|
+
|
|
367
|
+
try:
|
|
368
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
369
|
+
async with db.execute(
|
|
370
|
+
"""
|
|
371
|
+
SELECT
|
|
372
|
+
COUNT(*) as total,
|
|
373
|
+
SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as success_count,
|
|
374
|
+
SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as error_count,
|
|
375
|
+
COUNT(DISTINCT provider) as providers,
|
|
376
|
+
COUNT(DISTINCT tool) as tools,
|
|
377
|
+
AVG(duration_ms) as avg_duration_ms
|
|
378
|
+
FROM audit_log
|
|
379
|
+
WHERE timestamp > ?
|
|
380
|
+
""",
|
|
381
|
+
(since,),
|
|
382
|
+
) as cursor:
|
|
383
|
+
row = await cursor.fetchone()
|
|
384
|
+
if row:
|
|
385
|
+
return {
|
|
386
|
+
"total": row[0] or 0,
|
|
387
|
+
"success_count": row[1] or 0,
|
|
388
|
+
"error_count": row[2] or 0,
|
|
389
|
+
"providers": row[3] or 0,
|
|
390
|
+
"tools": row[4] or 0,
|
|
391
|
+
"avg_duration_ms": row[5],
|
|
392
|
+
}
|
|
393
|
+
return {}
|
|
394
|
+
except Exception as e:
|
|
395
|
+
logger.warning("audit_stats_failed", error=str(e))
|
|
396
|
+
return {}
|
|
397
|
+
|
|
398
|
+
# === Provider State Operations ===
|
|
399
|
+
|
|
400
|
+
async def record_state_change(self, entry: ProviderStateEntry) -> bool:
|
|
401
|
+
try:
|
|
402
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
403
|
+
await db.execute(
|
|
404
|
+
"""
|
|
405
|
+
INSERT INTO provider_state_history
|
|
406
|
+
(provider_id, old_state, new_state, reason)
|
|
407
|
+
VALUES (?, ?, ?, ?)
|
|
408
|
+
""",
|
|
409
|
+
(entry.provider_id, entry.old_state, entry.new_state, entry.reason),
|
|
410
|
+
)
|
|
411
|
+
await db.commit()
|
|
412
|
+
return True
|
|
413
|
+
except Exception as e:
|
|
414
|
+
logger.warning("record_state_failed", error=str(e))
|
|
415
|
+
return False
|
|
416
|
+
|
|
417
|
+
async def get_state_history(self, provider_id: str, limit: int = 100) -> list[ProviderStateEntry]:
|
|
418
|
+
try:
|
|
419
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
420
|
+
async with db.execute(
|
|
421
|
+
"""
|
|
422
|
+
SELECT provider_id, old_state, new_state, reason, timestamp
|
|
423
|
+
FROM provider_state_history
|
|
424
|
+
WHERE provider_id = ?
|
|
425
|
+
ORDER BY timestamp DESC
|
|
426
|
+
LIMIT ?
|
|
427
|
+
""",
|
|
428
|
+
(provider_id, limit),
|
|
429
|
+
) as cursor:
|
|
430
|
+
rows = await cursor.fetchall()
|
|
431
|
+
return [
|
|
432
|
+
ProviderStateEntry(
|
|
433
|
+
provider_id=row[0],
|
|
434
|
+
old_state=row[1],
|
|
435
|
+
new_state=row[2],
|
|
436
|
+
reason=row[3],
|
|
437
|
+
timestamp=datetime.fromisoformat(row[4]) if row[4] else None,
|
|
438
|
+
)
|
|
439
|
+
for row in rows
|
|
440
|
+
]
|
|
441
|
+
except Exception as e:
|
|
442
|
+
logger.warning("get_state_history_failed", error=str(e))
|
|
443
|
+
return []
|
|
444
|
+
|
|
445
|
+
# === Metrics Operations ===
|
|
446
|
+
|
|
447
|
+
async def record_metric(self, entry: MetricEntry) -> bool:
|
|
448
|
+
try:
|
|
449
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
450
|
+
await db.execute(
|
|
451
|
+
"""
|
|
452
|
+
INSERT INTO provider_metrics
|
|
453
|
+
(provider_id, metric_name, metric_value, labels)
|
|
454
|
+
VALUES (?, ?, ?, ?)
|
|
455
|
+
""",
|
|
456
|
+
(
|
|
457
|
+
entry.provider_id,
|
|
458
|
+
entry.metric_name,
|
|
459
|
+
entry.metric_value,
|
|
460
|
+
json.dumps(entry.labels or {}),
|
|
461
|
+
),
|
|
462
|
+
)
|
|
463
|
+
await db.commit()
|
|
464
|
+
return True
|
|
465
|
+
except Exception as e:
|
|
466
|
+
logger.warning("record_metric_failed", error=str(e))
|
|
467
|
+
return False
|
|
468
|
+
|
|
469
|
+
async def get_metrics(
|
|
470
|
+
self,
|
|
471
|
+
provider_id: str,
|
|
472
|
+
metric_name: Optional[str] = None,
|
|
473
|
+
since: Optional[datetime] = None,
|
|
474
|
+
limit: int = 100,
|
|
475
|
+
) -> list[MetricEntry]:
|
|
476
|
+
try:
|
|
477
|
+
conditions = ["provider_id = ?"]
|
|
478
|
+
params = [provider_id]
|
|
479
|
+
|
|
480
|
+
if metric_name:
|
|
481
|
+
conditions.append("metric_name = ?")
|
|
482
|
+
params.append(metric_name)
|
|
483
|
+
if since:
|
|
484
|
+
conditions.append("timestamp >= ?")
|
|
485
|
+
params.append(since.isoformat())
|
|
486
|
+
|
|
487
|
+
params.append(limit)
|
|
488
|
+
|
|
489
|
+
async with aiosqlite.connect(self._db_path) as db:
|
|
490
|
+
async with db.execute(
|
|
491
|
+
f"""
|
|
492
|
+
SELECT provider_id, metric_name, metric_value, labels, timestamp
|
|
493
|
+
FROM provider_metrics
|
|
494
|
+
WHERE {" AND ".join(conditions)}
|
|
495
|
+
ORDER BY timestamp DESC
|
|
496
|
+
LIMIT ?
|
|
497
|
+
""",
|
|
498
|
+
params,
|
|
499
|
+
) as cursor:
|
|
500
|
+
rows = await cursor.fetchall()
|
|
501
|
+
return [
|
|
502
|
+
MetricEntry(
|
|
503
|
+
provider_id=row[0],
|
|
504
|
+
metric_name=row[1],
|
|
505
|
+
metric_value=row[2],
|
|
506
|
+
labels=json.loads(row[3]) if row[3] else None,
|
|
507
|
+
timestamp=datetime.fromisoformat(row[4]) if row[4] else None,
|
|
508
|
+
)
|
|
509
|
+
for row in rows
|
|
510
|
+
]
|
|
511
|
+
except Exception as e:
|
|
512
|
+
logger.warning("get_metrics_failed", error=str(e))
|
|
513
|
+
return []
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""Prometheus Metrics Publisher - Infrastructure implementation.
|
|
2
|
+
|
|
3
|
+
This adapter implements the IMetricsPublisher contract using Prometheus metrics.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from ..domain.contracts.metrics_publisher import IMetricsPublisher
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PrometheusMetricsPublisher(IMetricsPublisher):
|
|
10
|
+
"""Prometheus implementation of metrics publisher."""
|
|
11
|
+
|
|
12
|
+
def __init__(self):
|
|
13
|
+
"""Initialize with lazy import to avoid circular dependencies."""
|
|
14
|
+
self._metrics = None
|
|
15
|
+
|
|
16
|
+
def _ensure_metrics(self):
|
|
17
|
+
"""Lazy load metrics module."""
|
|
18
|
+
if self._metrics is None:
|
|
19
|
+
from mcp_hangar import metrics
|
|
20
|
+
|
|
21
|
+
self._metrics = metrics
|
|
22
|
+
|
|
23
|
+
def record_cold_start(self, provider_id: str, duration_s: float, mode: str) -> None:
|
|
24
|
+
"""Record a cold start event."""
|
|
25
|
+
self._ensure_metrics()
|
|
26
|
+
self._metrics.record_cold_start(provider_id, duration_s, mode)
|
|
27
|
+
|
|
28
|
+
def begin_cold_start(self, provider_id: str) -> None:
|
|
29
|
+
"""Mark the beginning of a cold start."""
|
|
30
|
+
self._ensure_metrics()
|
|
31
|
+
self._metrics.cold_start_begin(provider_id)
|
|
32
|
+
|
|
33
|
+
def end_cold_start(self, provider_id: str) -> None:
|
|
34
|
+
"""Mark the end of a cold start."""
|
|
35
|
+
self._ensure_metrics()
|
|
36
|
+
self._metrics.cold_start_end(provider_id)
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""Observability infrastructure adapters."""
|
|
2
|
+
|
|
3
|
+
from .langfuse_adapter import LangfuseAdapter, LangfuseConfig, LangfuseObservabilityAdapter, LangfuseSpanHandle
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"LangfuseAdapter",
|
|
7
|
+
"LangfuseObservabilityAdapter",
|
|
8
|
+
"LangfuseSpanHandle",
|
|
9
|
+
"LangfuseConfig",
|
|
10
|
+
]
|