kailash 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. kailash/__init__.py +33 -1
  2. kailash/access_control/__init__.py +129 -0
  3. kailash/access_control/managers.py +461 -0
  4. kailash/access_control/rule_evaluators.py +467 -0
  5. kailash/access_control_abac.py +825 -0
  6. kailash/config/__init__.py +27 -0
  7. kailash/config/database_config.py +359 -0
  8. kailash/database/__init__.py +28 -0
  9. kailash/database/execution_pipeline.py +499 -0
  10. kailash/middleware/__init__.py +306 -0
  11. kailash/middleware/auth/__init__.py +33 -0
  12. kailash/middleware/auth/access_control.py +436 -0
  13. kailash/middleware/auth/auth_manager.py +422 -0
  14. kailash/middleware/auth/jwt_auth.py +477 -0
  15. kailash/middleware/auth/kailash_jwt_auth.py +616 -0
  16. kailash/middleware/communication/__init__.py +37 -0
  17. kailash/middleware/communication/ai_chat.py +989 -0
  18. kailash/middleware/communication/api_gateway.py +802 -0
  19. kailash/middleware/communication/events.py +470 -0
  20. kailash/middleware/communication/realtime.py +710 -0
  21. kailash/middleware/core/__init__.py +21 -0
  22. kailash/middleware/core/agent_ui.py +890 -0
  23. kailash/middleware/core/schema.py +643 -0
  24. kailash/middleware/core/workflows.py +396 -0
  25. kailash/middleware/database/__init__.py +63 -0
  26. kailash/middleware/database/base.py +113 -0
  27. kailash/middleware/database/base_models.py +525 -0
  28. kailash/middleware/database/enums.py +106 -0
  29. kailash/middleware/database/migrations.py +12 -0
  30. kailash/{api/database.py → middleware/database/models.py} +183 -291
  31. kailash/middleware/database/repositories.py +685 -0
  32. kailash/middleware/database/session_manager.py +19 -0
  33. kailash/middleware/mcp/__init__.py +38 -0
  34. kailash/middleware/mcp/client_integration.py +585 -0
  35. kailash/middleware/mcp/enhanced_server.py +576 -0
  36. kailash/nodes/__init__.py +25 -3
  37. kailash/nodes/admin/__init__.py +35 -0
  38. kailash/nodes/admin/audit_log.py +794 -0
  39. kailash/nodes/admin/permission_check.py +864 -0
  40. kailash/nodes/admin/role_management.py +823 -0
  41. kailash/nodes/admin/security_event.py +1519 -0
  42. kailash/nodes/admin/user_management.py +944 -0
  43. kailash/nodes/ai/a2a.py +24 -7
  44. kailash/nodes/ai/ai_providers.py +1 -0
  45. kailash/nodes/ai/embedding_generator.py +11 -11
  46. kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
  47. kailash/nodes/ai/llm_agent.py +407 -2
  48. kailash/nodes/ai/self_organizing.py +85 -10
  49. kailash/nodes/api/auth.py +287 -6
  50. kailash/nodes/api/rest.py +151 -0
  51. kailash/nodes/auth/__init__.py +17 -0
  52. kailash/nodes/auth/directory_integration.py +1228 -0
  53. kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
  54. kailash/nodes/auth/mfa.py +2338 -0
  55. kailash/nodes/auth/risk_assessment.py +872 -0
  56. kailash/nodes/auth/session_management.py +1093 -0
  57. kailash/nodes/auth/sso.py +1040 -0
  58. kailash/nodes/base.py +344 -13
  59. kailash/nodes/base_cycle_aware.py +4 -2
  60. kailash/nodes/base_with_acl.py +1 -1
  61. kailash/nodes/code/python.py +293 -12
  62. kailash/nodes/compliance/__init__.py +9 -0
  63. kailash/nodes/compliance/data_retention.py +1888 -0
  64. kailash/nodes/compliance/gdpr.py +2004 -0
  65. kailash/nodes/data/__init__.py +22 -2
  66. kailash/nodes/data/async_connection.py +469 -0
  67. kailash/nodes/data/async_sql.py +757 -0
  68. kailash/nodes/data/async_vector.py +598 -0
  69. kailash/nodes/data/readers.py +767 -0
  70. kailash/nodes/data/retrieval.py +360 -1
  71. kailash/nodes/data/sharepoint_graph.py +397 -21
  72. kailash/nodes/data/sql.py +94 -5
  73. kailash/nodes/data/streaming.py +68 -8
  74. kailash/nodes/data/vector_db.py +54 -4
  75. kailash/nodes/enterprise/__init__.py +13 -0
  76. kailash/nodes/enterprise/batch_processor.py +741 -0
  77. kailash/nodes/enterprise/data_lineage.py +497 -0
  78. kailash/nodes/logic/convergence.py +31 -9
  79. kailash/nodes/logic/operations.py +14 -3
  80. kailash/nodes/mixins/__init__.py +8 -0
  81. kailash/nodes/mixins/event_emitter.py +201 -0
  82. kailash/nodes/mixins/mcp.py +9 -4
  83. kailash/nodes/mixins/security.py +165 -0
  84. kailash/nodes/monitoring/__init__.py +7 -0
  85. kailash/nodes/monitoring/performance_benchmark.py +2497 -0
  86. kailash/nodes/rag/__init__.py +284 -0
  87. kailash/nodes/rag/advanced.py +1615 -0
  88. kailash/nodes/rag/agentic.py +773 -0
  89. kailash/nodes/rag/conversational.py +999 -0
  90. kailash/nodes/rag/evaluation.py +875 -0
  91. kailash/nodes/rag/federated.py +1188 -0
  92. kailash/nodes/rag/graph.py +721 -0
  93. kailash/nodes/rag/multimodal.py +671 -0
  94. kailash/nodes/rag/optimized.py +933 -0
  95. kailash/nodes/rag/privacy.py +1059 -0
  96. kailash/nodes/rag/query_processing.py +1335 -0
  97. kailash/nodes/rag/realtime.py +764 -0
  98. kailash/nodes/rag/registry.py +547 -0
  99. kailash/nodes/rag/router.py +837 -0
  100. kailash/nodes/rag/similarity.py +1854 -0
  101. kailash/nodes/rag/strategies.py +566 -0
  102. kailash/nodes/rag/workflows.py +575 -0
  103. kailash/nodes/security/__init__.py +19 -0
  104. kailash/nodes/security/abac_evaluator.py +1411 -0
  105. kailash/nodes/security/audit_log.py +91 -0
  106. kailash/nodes/security/behavior_analysis.py +1893 -0
  107. kailash/nodes/security/credential_manager.py +401 -0
  108. kailash/nodes/security/rotating_credentials.py +760 -0
  109. kailash/nodes/security/security_event.py +132 -0
  110. kailash/nodes/security/threat_detection.py +1103 -0
  111. kailash/nodes/testing/__init__.py +9 -0
  112. kailash/nodes/testing/credential_testing.py +499 -0
  113. kailash/nodes/transform/__init__.py +10 -2
  114. kailash/nodes/transform/chunkers.py +592 -1
  115. kailash/nodes/transform/processors.py +484 -14
  116. kailash/nodes/validation.py +321 -0
  117. kailash/runtime/access_controlled.py +1 -1
  118. kailash/runtime/async_local.py +41 -7
  119. kailash/runtime/docker.py +1 -1
  120. kailash/runtime/local.py +474 -55
  121. kailash/runtime/parallel.py +1 -1
  122. kailash/runtime/parallel_cyclic.py +1 -1
  123. kailash/runtime/testing.py +210 -2
  124. kailash/utils/migrations/__init__.py +25 -0
  125. kailash/utils/migrations/generator.py +433 -0
  126. kailash/utils/migrations/models.py +231 -0
  127. kailash/utils/migrations/runner.py +489 -0
  128. kailash/utils/secure_logging.py +342 -0
  129. kailash/workflow/__init__.py +16 -0
  130. kailash/workflow/cyclic_runner.py +3 -4
  131. kailash/workflow/graph.py +70 -2
  132. kailash/workflow/resilience.py +249 -0
  133. kailash/workflow/templates.py +726 -0
  134. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
  135. kailash-0.4.0.dist-info/RECORD +223 -0
  136. kailash/api/__init__.py +0 -17
  137. kailash/api/__main__.py +0 -6
  138. kailash/api/studio_secure.py +0 -893
  139. kailash/mcp/__main__.py +0 -13
  140. kailash/mcp/server_new.py +0 -336
  141. kailash/mcp/servers/__init__.py +0 -12
  142. kailash-0.3.1.dist-info/RECORD +0 -136
  143. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
  144. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
  145. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
  146. {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
@@ -80,11 +80,24 @@ Example Workflows:
80
80
  workflow.connect('process', 'publish')
81
81
  """
82
82
 
83
+ from kailash.nodes.data.async_connection import (
84
+ AsyncConnectionManager,
85
+ get_connection_manager,
86
+ )
87
+
88
+ # Async nodes
89
+ from kailash.nodes.data.async_sql import AsyncSQLDatabaseNode
90
+ from kailash.nodes.data.async_vector import AsyncPostgreSQLVectorNode
83
91
  from kailash.nodes.data.directory import DirectoryReaderNode
84
92
  from kailash.nodes.data.event_generation import EventGeneratorNode
85
93
  from kailash.nodes.data.file_discovery import FileDiscoveryNode
86
- from kailash.nodes.data.readers import CSVReaderNode, JSONReaderNode, TextReaderNode
87
- from kailash.nodes.data.retrieval import RelevanceScorerNode
94
+ from kailash.nodes.data.readers import (
95
+ CSVReaderNode,
96
+ DocumentProcessorNode,
97
+ JSONReaderNode,
98
+ TextReaderNode,
99
+ )
100
+ from kailash.nodes.data.retrieval import HybridRetrieverNode, RelevanceScorerNode
88
101
  from kailash.nodes.data.sharepoint_graph import (
89
102
  SharePointGraphReader,
90
103
  SharePointGraphWriter,
@@ -113,6 +126,7 @@ __all__ = [
113
126
  "FileDiscoveryNode",
114
127
  # Readers
115
128
  "CSVReaderNode",
129
+ "DocumentProcessorNode",
116
130
  "JSONReaderNode",
117
131
  "TextReaderNode",
118
132
  "SharePointGraphReader",
@@ -126,6 +140,7 @@ __all__ = [
126
140
  "QuerySourceNode",
127
141
  # Retrieval
128
142
  "RelevanceScorerNode",
143
+ "HybridRetrieverNode",
129
144
  # SQL
130
145
  "SQLDatabaseNode",
131
146
  # Vector DB
@@ -137,4 +152,9 @@ __all__ = [
137
152
  "StreamPublisherNode",
138
153
  "WebSocketNode",
139
154
  "EventStreamNode",
155
+ # Async
156
+ "AsyncSQLDatabaseNode",
157
+ "AsyncConnectionManager",
158
+ "get_connection_manager",
159
+ "AsyncPostgreSQLVectorNode",
140
160
  ]
@@ -0,0 +1,469 @@
1
+ """Asynchronous connection manager for database connection pooling.
2
+
3
+ This module provides centralized connection pool management for async database
4
+ operations across the Kailash SDK and external repositories. It manages connection
5
+ lifecycles, provides health monitoring, and ensures efficient resource utilization.
6
+
7
+ Design Philosophy:
8
+ 1. Singleton pattern for global connection management
9
+ 2. Multi-tenant connection isolation
10
+ 3. Health monitoring and auto-recovery
11
+ 4. Configurable pool parameters
12
+ 5. Graceful shutdown handling
13
+ 6. Thread-safe operations
14
+
15
+ Key Features:
16
+ - Connection pool management for PostgreSQL, MySQL, SQLite
17
+ - Automatic connection validation and recovery
18
+ - Pool metrics and monitoring
19
+ - Multi-tenant support with isolated pools
20
+ - Connection encryption support
21
+ - Graceful degradation under load
22
+ """
23
+
24
+ import asyncio
25
+ import logging
26
+ import time
27
+ from collections import defaultdict
28
+ from contextlib import asynccontextmanager
29
+ from dataclasses import dataclass, field
30
+ from threading import Lock
31
+ from typing import Any, AsyncContextManager, Dict, Optional
32
+
33
+ from kailash.sdk_exceptions import NodeExecutionError
34
+
35
+ logger = logging.getLogger(__name__)
36
+
37
+
38
+ @dataclass
39
+ class PoolMetrics:
40
+ """Metrics for a connection pool."""
41
+
42
+ created_at: float = field(default_factory=time.time)
43
+ total_connections: int = 0
44
+ active_connections: int = 0
45
+ idle_connections: int = 0
46
+ total_requests: int = 0
47
+ failed_requests: int = 0
48
+ avg_wait_time: float = 0.0
49
+ last_health_check: float = field(default_factory=time.time)
50
+ is_healthy: bool = True
51
+
52
+
53
+ @dataclass
54
+ class PoolConfig:
55
+ """Configuration for a connection pool."""
56
+
57
+ min_size: int = 1
58
+ max_size: int = 20
59
+ max_queries: int = 50000
60
+ max_inactive_connection_lifetime: float = 300.0
61
+ connection_timeout: float = 10.0
62
+ command_timeout: float = 60.0
63
+ pool_timeout: float = 30.0
64
+ health_check_interval: float = 60.0
65
+ retry_attempts: int = 3
66
+ retry_delay: float = 1.0
67
+
68
+
69
+ class AsyncConnectionManager:
70
+ """Centralized async connection pool manager.
71
+
72
+ This singleton class manages all database connection pools across the SDK,
73
+ providing efficient connection reuse, health monitoring, and multi-tenant
74
+ isolation.
75
+
76
+ Features:
77
+ - Singleton pattern ensures single manager instance
78
+ - Per-tenant connection pool isolation
79
+ - Automatic health checks and recovery
80
+ - Connection pool metrics
81
+ - Graceful shutdown
82
+
83
+ Example:
84
+ >>> manager = AsyncConnectionManager.get_instance()
85
+ >>> async with manager.get_connection(
86
+ ... tenant_id="tenant1",
87
+ ... db_config={"type": "postgresql", "host": "localhost", ...}
88
+ ... ) as conn:
89
+ ... result = await conn.fetch("SELECT * FROM users")
90
+ """
91
+
92
+ _instance = None
93
+ _lock = Lock()
94
+
95
+ def __new__(cls):
96
+ """Ensure singleton instance."""
97
+ if cls._instance is None:
98
+ with cls._lock:
99
+ if cls._instance is None:
100
+ cls._instance = super().__new__(cls)
101
+ return cls._instance
102
+
103
+ def __init__(self):
104
+ """Initialize connection manager."""
105
+ if not hasattr(self, "_initialized"):
106
+ self._pools: Dict[str, Dict[str, Any]] = defaultdict(dict)
107
+ self._metrics: Dict[str, Dict[str, PoolMetrics]] = defaultdict(dict)
108
+ self._configs: Dict[str, Dict[str, PoolConfig]] = defaultdict(dict)
109
+ self._health_check_tasks: Dict[str, asyncio.Task] = {}
110
+ self._shutdown = False
111
+ self._initialized = True
112
+ logger.info("AsyncConnectionManager initialized")
113
+
114
+ @classmethod
115
+ def get_instance(cls) -> "AsyncConnectionManager":
116
+ """Get singleton instance."""
117
+ return cls()
118
+
119
+ def _get_pool_key(self, db_config: dict) -> str:
120
+ """Generate unique key for connection pool."""
121
+ # Create deterministic key from connection parameters
122
+ key_parts = [
123
+ db_config.get("type", "unknown"),
124
+ db_config.get("host", "localhost"),
125
+ str(db_config.get("port", 0)),
126
+ db_config.get("database", "default"),
127
+ db_config.get("user", ""),
128
+ ]
129
+ return "|".join(key_parts)
130
+
131
+ async def get_pool(
132
+ self, tenant_id: str, db_config: dict, pool_config: Optional[PoolConfig] = None
133
+ ) -> Any:
134
+ """Get or create connection pool for tenant and database.
135
+
136
+ Args:
137
+ tenant_id: Tenant identifier for isolation
138
+ db_config: Database connection configuration
139
+ pool_config: Optional pool configuration overrides
140
+
141
+ Returns:
142
+ Database connection pool
143
+ """
144
+ if self._shutdown:
145
+ raise NodeExecutionError("Connection manager is shutting down")
146
+
147
+ pool_key = self._get_pool_key(db_config)
148
+
149
+ # Check if pool exists
150
+ if pool_key in self._pools[tenant_id]:
151
+ pool = self._pools[tenant_id][pool_key]
152
+ # Validate pool health
153
+ if await self._validate_pool(tenant_id, pool_key, pool):
154
+ self._metrics[tenant_id][pool_key].total_requests += 1
155
+ return pool
156
+
157
+ # Create new pool
158
+ pool = await self._create_pool(tenant_id, db_config, pool_config)
159
+ self._pools[tenant_id][pool_key] = pool
160
+
161
+ # Initialize metrics
162
+ self._metrics[tenant_id][pool_key] = PoolMetrics()
163
+ self._configs[tenant_id][pool_key] = pool_config or PoolConfig()
164
+
165
+ # Start health check task
166
+ task_key = f"{tenant_id}:{pool_key}"
167
+ if task_key in self._health_check_tasks:
168
+ self._health_check_tasks[task_key].cancel()
169
+
170
+ self._health_check_tasks[task_key] = asyncio.create_task(
171
+ self._health_check_loop(tenant_id, pool_key)
172
+ )
173
+
174
+ return pool
175
+
176
+ async def _create_pool(
177
+ self, tenant_id: str, db_config: dict, pool_config: Optional[PoolConfig] = None
178
+ ) -> Any:
179
+ """Create new connection pool."""
180
+ config = pool_config or PoolConfig()
181
+ db_type = db_config.get("type", "").lower()
182
+
183
+ try:
184
+ if db_type == "postgresql":
185
+ return await self._create_postgresql_pool(db_config, config)
186
+ elif db_type == "mysql":
187
+ return await self._create_mysql_pool(db_config, config)
188
+ elif db_type == "sqlite":
189
+ return await self._create_sqlite_pool(db_config, config)
190
+ else:
191
+ raise NodeExecutionError(f"Unsupported database type: {db_type}")
192
+ except Exception as e:
193
+ logger.error(f"Failed to create pool for tenant {tenant_id}: {e}")
194
+ raise NodeExecutionError(f"Connection pool creation failed: {str(e)}")
195
+
196
+ async def _create_postgresql_pool(
197
+ self, db_config: dict, pool_config: PoolConfig
198
+ ) -> Any:
199
+ """Create PostgreSQL connection pool."""
200
+ try:
201
+ import asyncpg
202
+ except ImportError:
203
+ raise NodeExecutionError("asyncpg not installed")
204
+
205
+ dsn = db_config.get("connection_string")
206
+ if not dsn:
207
+ dsn = (
208
+ f"postgresql://{db_config.get('user')}:{db_config.get('password')}@"
209
+ f"{db_config.get('host')}:{db_config.get('port', 5432)}/"
210
+ f"{db_config.get('database')}"
211
+ )
212
+
213
+ return await asyncpg.create_pool(
214
+ dsn,
215
+ min_size=pool_config.min_size,
216
+ max_size=pool_config.max_size,
217
+ max_queries=pool_config.max_queries,
218
+ max_inactive_connection_lifetime=pool_config.max_inactive_connection_lifetime,
219
+ timeout=pool_config.pool_timeout,
220
+ command_timeout=pool_config.command_timeout,
221
+ )
222
+
223
+ async def _create_mysql_pool(self, db_config: dict, pool_config: PoolConfig) -> Any:
224
+ """Create MySQL connection pool."""
225
+ try:
226
+ import aiomysql
227
+ except ImportError:
228
+ raise NodeExecutionError("aiomysql not installed")
229
+
230
+ return await aiomysql.create_pool(
231
+ host=db_config.get("host"),
232
+ port=db_config.get("port", 3306),
233
+ user=db_config.get("user"),
234
+ password=db_config.get("password"),
235
+ db=db_config.get("database"),
236
+ minsize=pool_config.min_size,
237
+ maxsize=pool_config.max_size,
238
+ pool_recycle=int(pool_config.max_inactive_connection_lifetime),
239
+ connect_timeout=int(pool_config.connection_timeout),
240
+ )
241
+
242
+ async def _create_sqlite_pool(
243
+ self, db_config: dict, pool_config: PoolConfig
244
+ ) -> Any:
245
+ """Create SQLite connection pool (mock pool for consistency)."""
246
+ try:
247
+ import aiosqlite
248
+ except ImportError:
249
+ raise NodeExecutionError("aiosqlite not installed")
250
+
251
+ # SQLite doesn't support true pooling, return config for connection creation
252
+ return {
253
+ "type": "sqlite",
254
+ "database": db_config.get("database"),
255
+ "timeout": pool_config.command_timeout,
256
+ }
257
+
258
+ async def _validate_pool(self, tenant_id: str, pool_key: str, pool: Any) -> bool:
259
+ """Validate pool health."""
260
+ metrics = self._metrics[tenant_id][pool_key]
261
+
262
+ # Check if pool is marked unhealthy
263
+ if not metrics.is_healthy:
264
+ logger.warning(f"Pool {pool_key} for tenant {tenant_id} is unhealthy")
265
+ return False
266
+
267
+ # Quick validation based on pool type
268
+ if hasattr(pool, "_closed"):
269
+ # asyncpg pool
270
+ return not pool._closed
271
+ elif hasattr(pool, "closed"):
272
+ # aiomysql pool
273
+ return not pool.closed
274
+ elif isinstance(pool, dict) and pool.get("type") == "sqlite":
275
+ # SQLite mock pool
276
+ return True
277
+
278
+ return True
279
+
280
+ @asynccontextmanager
281
+ async def get_connection(
282
+ self, tenant_id: str, db_config: dict, pool_config: Optional[PoolConfig] = None
283
+ ) -> AsyncContextManager[Any]:
284
+ """Get database connection from pool.
285
+
286
+ Args:
287
+ tenant_id: Tenant identifier
288
+ db_config: Database configuration
289
+ pool_config: Optional pool configuration
290
+
291
+ Yields:
292
+ Database connection
293
+ """
294
+ pool = await self.get_pool(tenant_id, db_config, pool_config)
295
+ pool_key = self._get_pool_key(db_config)
296
+ metrics = self._metrics[tenant_id][pool_key]
297
+
298
+ start_time = time.time()
299
+
300
+ try:
301
+ if isinstance(pool, dict) and pool.get("type") == "sqlite":
302
+ # SQLite special handling
303
+ import aiosqlite
304
+
305
+ async with aiosqlite.connect(pool["database"]) as conn:
306
+ conn.row_factory = aiosqlite.Row
307
+ metrics.active_connections += 1
308
+ yield conn
309
+ else:
310
+ # PostgreSQL/MySQL connection acquisition
311
+ async with pool.acquire() as conn:
312
+ wait_time = time.time() - start_time
313
+ metrics.avg_wait_time = (
314
+ metrics.avg_wait_time * metrics.total_requests + wait_time
315
+ ) / (metrics.total_requests + 1)
316
+ metrics.active_connections += 1
317
+ yield conn
318
+ except Exception as e:
319
+ metrics.failed_requests += 1
320
+ logger.error(f"Connection acquisition failed: {e}")
321
+ raise
322
+ finally:
323
+ metrics.active_connections -= 1
324
+
325
+ async def _health_check_loop(self, tenant_id: str, pool_key: str):
326
+ """Background health check for connection pool."""
327
+ config = self._configs[tenant_id][pool_key]
328
+
329
+ while not self._shutdown:
330
+ try:
331
+ await asyncio.sleep(config.health_check_interval)
332
+
333
+ pool = self._pools[tenant_id].get(pool_key)
334
+ if not pool:
335
+ break
336
+
337
+ metrics = self._metrics[tenant_id][pool_key]
338
+ metrics.last_health_check = time.time()
339
+
340
+ # Perform health check based on pool type
341
+ if hasattr(pool, "fetchval"):
342
+ # PostgreSQL
343
+ try:
344
+ async with pool.acquire() as conn:
345
+ await conn.fetchval("SELECT 1")
346
+ metrics.is_healthy = True
347
+ except Exception as e:
348
+ logger.error(f"PostgreSQL health check failed: {e}")
349
+ metrics.is_healthy = False
350
+ elif hasattr(pool, "acquire"):
351
+ # MySQL
352
+ try:
353
+ async with pool.acquire() as conn:
354
+ async with conn.cursor() as cursor:
355
+ await cursor.execute("SELECT 1")
356
+ metrics.is_healthy = True
357
+ except Exception as e:
358
+ logger.error(f"MySQL health check failed: {e}")
359
+ metrics.is_healthy = False
360
+
361
+ # Update pool metrics
362
+ if hasattr(pool, "_holders"):
363
+ # asyncpg
364
+ metrics.total_connections = len(pool._holders)
365
+ metrics.idle_connections = pool._queue.qsize()
366
+ elif hasattr(pool, "_free_pool"):
367
+ # aiomysql
368
+ metrics.total_connections = pool.size
369
+ metrics.idle_connections = len(pool._free_pool)
370
+
371
+ except asyncio.CancelledError:
372
+ break
373
+ except Exception as e:
374
+ logger.error(f"Health check error for {pool_key}: {e}")
375
+
376
+ async def close_tenant_pools(self, tenant_id: str):
377
+ """Close all pools for a tenant."""
378
+ if tenant_id not in self._pools:
379
+ return
380
+
381
+ logger.info(f"Closing all pools for tenant {tenant_id}")
382
+
383
+ # Cancel health check tasks
384
+ for pool_key in self._pools[tenant_id]:
385
+ task_key = f"{tenant_id}:{pool_key}"
386
+ if task_key in self._health_check_tasks:
387
+ self._health_check_tasks[task_key].cancel()
388
+
389
+ # Close pools
390
+ for pool_key, pool in self._pools[tenant_id].items():
391
+ try:
392
+ if hasattr(pool, "close"):
393
+ await pool.close()
394
+ if hasattr(pool, "wait_closed"):
395
+ await pool.wait_closed()
396
+ except Exception as e:
397
+ logger.error(f"Error closing pool {pool_key}: {e}")
398
+
399
+ # Clean up references
400
+ del self._pools[tenant_id]
401
+ del self._metrics[tenant_id]
402
+ del self._configs[tenant_id]
403
+
404
+ async def shutdown(self):
405
+ """Shutdown all connection pools."""
406
+ logger.info("Shutting down AsyncConnectionManager")
407
+ self._shutdown = True
408
+
409
+ # Cancel all health check tasks
410
+ for task in self._health_check_tasks.values():
411
+ task.cancel()
412
+
413
+ # Close all pools
414
+ for tenant_id in list(self._pools.keys()):
415
+ await self.close_tenant_pools(tenant_id)
416
+
417
+ self._health_check_tasks.clear()
418
+
419
+ def get_metrics(self, tenant_id: Optional[str] = None) -> dict:
420
+ """Get connection pool metrics.
421
+
422
+ Args:
423
+ tenant_id: Optional tenant ID to filter metrics
424
+
425
+ Returns:
426
+ Dictionary of metrics by tenant and pool
427
+ """
428
+ if tenant_id:
429
+ return {
430
+ pool_key: {
431
+ "created_at": metrics.created_at,
432
+ "total_connections": metrics.total_connections,
433
+ "active_connections": metrics.active_connections,
434
+ "idle_connections": metrics.idle_connections,
435
+ "total_requests": metrics.total_requests,
436
+ "failed_requests": metrics.failed_requests,
437
+ "avg_wait_time": metrics.avg_wait_time,
438
+ "last_health_check": metrics.last_health_check,
439
+ "is_healthy": metrics.is_healthy,
440
+ }
441
+ for pool_key, metrics in self._metrics.get(tenant_id, {}).items()
442
+ }
443
+ else:
444
+ return {
445
+ tenant_id: {
446
+ pool_key: {
447
+ "created_at": metrics.created_at,
448
+ "total_connections": metrics.total_connections,
449
+ "active_connections": metrics.active_connections,
450
+ "idle_connections": metrics.idle_connections,
451
+ "total_requests": metrics.total_requests,
452
+ "failed_requests": metrics.failed_requests,
453
+ "avg_wait_time": metrics.avg_wait_time,
454
+ "last_health_check": metrics.last_health_check,
455
+ "is_healthy": metrics.is_healthy,
456
+ }
457
+ for pool_key, metrics in tenant_metrics.items()
458
+ }
459
+ for tenant_id, tenant_metrics in self._metrics.items()
460
+ }
461
+
462
+
463
+ # Global instance for easy access
464
+ _connection_manager = AsyncConnectionManager()
465
+
466
+
467
+ def get_connection_manager() -> AsyncConnectionManager:
468
+ """Get the global connection manager instance."""
469
+ return _connection_manager