kailash 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +25 -3
- kailash/nodes/admin/__init__.py +35 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1519 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +1 -0
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +407 -2
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +293 -12
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +91 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +132 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
- kailash-0.4.0.dist-info/RECORD +223 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.1.dist-info/RECORD +0 -136
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,757 @@
|
|
1
|
+
"""Asynchronous SQL database node for the Kailash SDK.
|
2
|
+
|
3
|
+
This module provides async nodes for interacting with relational databases using SQL.
|
4
|
+
It supports PostgreSQL, MySQL, and SQLite through database-specific async libraries,
|
5
|
+
providing high-performance concurrent database operations.
|
6
|
+
|
7
|
+
Design Philosophy:
|
8
|
+
1. Async-first design for high concurrency
|
9
|
+
2. Database-agnostic interface with adapter pattern
|
10
|
+
3. Connection pooling for performance
|
11
|
+
4. Safe parameterized queries
|
12
|
+
5. Flexible result formats
|
13
|
+
6. Transaction support
|
14
|
+
7. Compatible with external repositories
|
15
|
+
|
16
|
+
Key Features:
|
17
|
+
- Non-blocking database operations
|
18
|
+
- Connection pooling with configurable limits
|
19
|
+
- Support for PostgreSQL (asyncpg), MySQL (aiomysql), SQLite (aiosqlite)
|
20
|
+
- Parameterized queries to prevent SQL injection
|
21
|
+
- Multiple fetch modes (one, all, many, iterator)
|
22
|
+
- Transaction management
|
23
|
+
- Timeout handling
|
24
|
+
- Retry logic with exponential backoff
|
25
|
+
"""
|
26
|
+
|
27
|
+
import asyncio
|
28
|
+
import json
|
29
|
+
import os
|
30
|
+
from abc import ABC, abstractmethod
|
31
|
+
from dataclasses import dataclass
|
32
|
+
from datetime import date, datetime
|
33
|
+
from decimal import Decimal
|
34
|
+
from enum import Enum
|
35
|
+
from typing import Any, AsyncIterator, Optional, Union
|
36
|
+
|
37
|
+
from kailash.nodes.base import Node, NodeParameter, register_node
|
38
|
+
from kailash.sdk_exceptions import NodeExecutionError, NodeValidationError
|
39
|
+
|
40
|
+
|
41
|
+
class DatabaseType(Enum):
|
42
|
+
"""Supported database types."""
|
43
|
+
|
44
|
+
POSTGRESQL = "postgresql"
|
45
|
+
MYSQL = "mysql"
|
46
|
+
SQLITE = "sqlite"
|
47
|
+
|
48
|
+
|
49
|
+
class FetchMode(Enum):
|
50
|
+
"""Result fetch modes."""
|
51
|
+
|
52
|
+
ONE = "one" # Fetch single row
|
53
|
+
ALL = "all" # Fetch all rows
|
54
|
+
MANY = "many" # Fetch specific number of rows
|
55
|
+
ITERATOR = "iterator" # Return async iterator
|
56
|
+
|
57
|
+
|
58
|
+
@dataclass
|
59
|
+
class DatabaseConfig:
|
60
|
+
"""Database connection configuration."""
|
61
|
+
|
62
|
+
type: DatabaseType
|
63
|
+
host: Optional[str] = None
|
64
|
+
port: Optional[int] = None
|
65
|
+
database: Optional[str] = None
|
66
|
+
user: Optional[str] = None
|
67
|
+
password: Optional[str] = None
|
68
|
+
connection_string: Optional[str] = None
|
69
|
+
pool_size: int = 10
|
70
|
+
max_pool_size: int = 20
|
71
|
+
pool_timeout: float = 30.0
|
72
|
+
command_timeout: float = 60.0
|
73
|
+
|
74
|
+
def __post_init__(self):
|
75
|
+
"""Validate configuration."""
|
76
|
+
if not self.connection_string:
|
77
|
+
if self.type != DatabaseType.SQLITE:
|
78
|
+
if not all([self.host, self.database]):
|
79
|
+
raise ValueError(
|
80
|
+
f"{self.type.value} requires host and database or connection_string"
|
81
|
+
)
|
82
|
+
else:
|
83
|
+
if not self.database:
|
84
|
+
raise ValueError("SQLite requires database path")
|
85
|
+
|
86
|
+
|
87
|
+
class DatabaseAdapter(ABC):
|
88
|
+
"""Abstract base class for database adapters."""
|
89
|
+
|
90
|
+
def __init__(self, config: DatabaseConfig):
|
91
|
+
self.config = config
|
92
|
+
self._pool = None
|
93
|
+
|
94
|
+
def _convert_row(self, row: dict) -> dict:
|
95
|
+
"""Convert database-specific types to JSON-serializable types."""
|
96
|
+
converted = {}
|
97
|
+
for key, value in row.items():
|
98
|
+
if isinstance(value, Decimal):
|
99
|
+
# Convert Decimal to float for JSON serialization
|
100
|
+
converted[key] = float(value)
|
101
|
+
elif isinstance(value, datetime):
|
102
|
+
# Convert datetime to ISO format string
|
103
|
+
converted[key] = value.isoformat()
|
104
|
+
elif isinstance(value, date):
|
105
|
+
# Convert date to ISO format string
|
106
|
+
converted[key] = value.isoformat()
|
107
|
+
else:
|
108
|
+
converted[key] = value
|
109
|
+
return converted
|
110
|
+
|
111
|
+
@abstractmethod
|
112
|
+
async def connect(self) -> None:
|
113
|
+
"""Establish connection pool."""
|
114
|
+
pass
|
115
|
+
|
116
|
+
@abstractmethod
|
117
|
+
async def disconnect(self) -> None:
|
118
|
+
"""Close connection pool."""
|
119
|
+
pass
|
120
|
+
|
121
|
+
@abstractmethod
|
122
|
+
async def execute(
|
123
|
+
self,
|
124
|
+
query: str,
|
125
|
+
params: Optional[Union[tuple, dict]] = None,
|
126
|
+
fetch_mode: FetchMode = FetchMode.ALL,
|
127
|
+
fetch_size: Optional[int] = None,
|
128
|
+
) -> Any:
|
129
|
+
"""Execute query and return results."""
|
130
|
+
pass
|
131
|
+
|
132
|
+
@abstractmethod
|
133
|
+
async def execute_many(
|
134
|
+
self, query: str, params_list: list[Union[tuple, dict]]
|
135
|
+
) -> None:
|
136
|
+
"""Execute query multiple times with different parameters."""
|
137
|
+
pass
|
138
|
+
|
139
|
+
@abstractmethod
|
140
|
+
async def begin_transaction(self) -> Any:
|
141
|
+
"""Begin a transaction."""
|
142
|
+
pass
|
143
|
+
|
144
|
+
@abstractmethod
|
145
|
+
async def commit_transaction(self, transaction: Any) -> None:
|
146
|
+
"""Commit a transaction."""
|
147
|
+
pass
|
148
|
+
|
149
|
+
@abstractmethod
|
150
|
+
async def rollback_transaction(self, transaction: Any) -> None:
|
151
|
+
"""Rollback a transaction."""
|
152
|
+
pass
|
153
|
+
|
154
|
+
|
155
|
+
class PostgreSQLAdapter(DatabaseAdapter):
|
156
|
+
"""PostgreSQL adapter using asyncpg."""
|
157
|
+
|
158
|
+
async def connect(self) -> None:
|
159
|
+
"""Establish connection pool."""
|
160
|
+
try:
|
161
|
+
import asyncpg
|
162
|
+
except ImportError:
|
163
|
+
raise NodeExecutionError(
|
164
|
+
"asyncpg not installed. Install with: pip install asyncpg"
|
165
|
+
)
|
166
|
+
|
167
|
+
if self.config.connection_string:
|
168
|
+
dsn = self.config.connection_string
|
169
|
+
else:
|
170
|
+
dsn = (
|
171
|
+
f"postgresql://{self.config.user}:{self.config.password}@"
|
172
|
+
f"{self.config.host}:{self.config.port or 5432}/{self.config.database}"
|
173
|
+
)
|
174
|
+
|
175
|
+
self._pool = await asyncpg.create_pool(
|
176
|
+
dsn,
|
177
|
+
min_size=1,
|
178
|
+
max_size=self.config.max_pool_size,
|
179
|
+
timeout=self.config.pool_timeout,
|
180
|
+
command_timeout=self.config.command_timeout,
|
181
|
+
)
|
182
|
+
|
183
|
+
async def disconnect(self) -> None:
|
184
|
+
"""Close connection pool."""
|
185
|
+
if self._pool:
|
186
|
+
await self._pool.close()
|
187
|
+
|
188
|
+
async def execute(
|
189
|
+
self,
|
190
|
+
query: str,
|
191
|
+
params: Optional[Union[tuple, dict]] = None,
|
192
|
+
fetch_mode: FetchMode = FetchMode.ALL,
|
193
|
+
fetch_size: Optional[int] = None,
|
194
|
+
) -> Any:
|
195
|
+
"""Execute query and return results."""
|
196
|
+
async with self._pool.acquire() as conn:
|
197
|
+
# Convert dict params to positional for asyncpg
|
198
|
+
if isinstance(params, dict):
|
199
|
+
# Simple parameter substitution for named params
|
200
|
+
# In production, use a proper SQL parser
|
201
|
+
query_params = []
|
202
|
+
for i, (key, value) in enumerate(params.items(), 1):
|
203
|
+
query = query.replace(f":{key}", f"${i}")
|
204
|
+
query_params.append(value)
|
205
|
+
params = query_params
|
206
|
+
|
207
|
+
if fetch_mode == FetchMode.ONE:
|
208
|
+
row = await conn.fetchrow(query, *(params or []))
|
209
|
+
return self._convert_row(dict(row)) if row else None
|
210
|
+
elif fetch_mode == FetchMode.ALL:
|
211
|
+
rows = await conn.fetch(query, *(params or []))
|
212
|
+
return [self._convert_row(dict(row)) for row in rows]
|
213
|
+
elif fetch_mode == FetchMode.MANY:
|
214
|
+
if not fetch_size:
|
215
|
+
raise ValueError("fetch_size required for MANY mode")
|
216
|
+
rows = await conn.fetch(query, *(params or []))
|
217
|
+
return [self._convert_row(dict(row)) for row in rows[:fetch_size]]
|
218
|
+
elif fetch_mode == FetchMode.ITERATOR:
|
219
|
+
raise NotImplementedError("Iterator mode not yet implemented")
|
220
|
+
|
221
|
+
async def execute_many(
|
222
|
+
self, query: str, params_list: list[Union[tuple, dict]]
|
223
|
+
) -> None:
|
224
|
+
"""Execute query multiple times with different parameters."""
|
225
|
+
async with self._pool.acquire() as conn:
|
226
|
+
# Convert all dict params to tuples
|
227
|
+
converted_params = []
|
228
|
+
for params in params_list:
|
229
|
+
if isinstance(params, dict):
|
230
|
+
query_params = []
|
231
|
+
for i, (key, value) in enumerate(params.items(), 1):
|
232
|
+
if i == 1: # Only replace on first iteration
|
233
|
+
query = query.replace(f":{key}", f"${i}")
|
234
|
+
query_params.append(value)
|
235
|
+
converted_params.append(query_params)
|
236
|
+
else:
|
237
|
+
converted_params.append(params)
|
238
|
+
|
239
|
+
await conn.executemany(query, converted_params)
|
240
|
+
|
241
|
+
async def begin_transaction(self) -> Any:
|
242
|
+
"""Begin a transaction."""
|
243
|
+
conn = await self._pool.acquire()
|
244
|
+
tx = conn.transaction()
|
245
|
+
await tx.start()
|
246
|
+
return (conn, tx)
|
247
|
+
|
248
|
+
async def commit_transaction(self, transaction: Any) -> None:
|
249
|
+
"""Commit a transaction."""
|
250
|
+
conn, tx = transaction
|
251
|
+
await tx.commit()
|
252
|
+
await self._pool.release(conn)
|
253
|
+
|
254
|
+
async def rollback_transaction(self, transaction: Any) -> None:
|
255
|
+
"""Rollback a transaction."""
|
256
|
+
conn, tx = transaction
|
257
|
+
await tx.rollback()
|
258
|
+
await self._pool.release(conn)
|
259
|
+
|
260
|
+
|
261
|
+
class MySQLAdapter(DatabaseAdapter):
|
262
|
+
"""MySQL adapter using aiomysql."""
|
263
|
+
|
264
|
+
async def connect(self) -> None:
|
265
|
+
"""Establish connection pool."""
|
266
|
+
try:
|
267
|
+
import aiomysql
|
268
|
+
except ImportError:
|
269
|
+
raise NodeExecutionError(
|
270
|
+
"aiomysql not installed. Install with: pip install aiomysql"
|
271
|
+
)
|
272
|
+
|
273
|
+
self._pool = await aiomysql.create_pool(
|
274
|
+
host=self.config.host,
|
275
|
+
port=self.config.port or 3306,
|
276
|
+
user=self.config.user,
|
277
|
+
password=self.config.password,
|
278
|
+
db=self.config.database,
|
279
|
+
minsize=1,
|
280
|
+
maxsize=self.config.max_pool_size,
|
281
|
+
pool_recycle=3600,
|
282
|
+
)
|
283
|
+
|
284
|
+
async def disconnect(self) -> None:
|
285
|
+
"""Close connection pool."""
|
286
|
+
if self._pool:
|
287
|
+
self._pool.close()
|
288
|
+
await self._pool.wait_closed()
|
289
|
+
|
290
|
+
async def execute(
|
291
|
+
self,
|
292
|
+
query: str,
|
293
|
+
params: Optional[Union[tuple, dict]] = None,
|
294
|
+
fetch_mode: FetchMode = FetchMode.ALL,
|
295
|
+
fetch_size: Optional[int] = None,
|
296
|
+
) -> Any:
|
297
|
+
"""Execute query and return results."""
|
298
|
+
async with self._pool.acquire() as conn:
|
299
|
+
async with conn.cursor() as cursor:
|
300
|
+
await cursor.execute(query, params)
|
301
|
+
|
302
|
+
if fetch_mode == FetchMode.ONE:
|
303
|
+
row = await cursor.fetchone()
|
304
|
+
if row and cursor.description:
|
305
|
+
columns = [desc[0] for desc in cursor.description]
|
306
|
+
return self._convert_row(dict(zip(columns, row)))
|
307
|
+
return None
|
308
|
+
elif fetch_mode == FetchMode.ALL:
|
309
|
+
rows = await cursor.fetchall()
|
310
|
+
if rows and cursor.description:
|
311
|
+
columns = [desc[0] for desc in cursor.description]
|
312
|
+
return [
|
313
|
+
self._convert_row(dict(zip(columns, row))) for row in rows
|
314
|
+
]
|
315
|
+
return []
|
316
|
+
elif fetch_mode == FetchMode.MANY:
|
317
|
+
if not fetch_size:
|
318
|
+
raise ValueError("fetch_size required for MANY mode")
|
319
|
+
rows = await cursor.fetchmany(fetch_size)
|
320
|
+
if rows and cursor.description:
|
321
|
+
columns = [desc[0] for desc in cursor.description]
|
322
|
+
return [
|
323
|
+
self._convert_row(dict(zip(columns, row))) for row in rows
|
324
|
+
]
|
325
|
+
return []
|
326
|
+
|
327
|
+
async def execute_many(
|
328
|
+
self, query: str, params_list: list[Union[tuple, dict]]
|
329
|
+
) -> None:
|
330
|
+
"""Execute query multiple times with different parameters."""
|
331
|
+
async with self._pool.acquire() as conn:
|
332
|
+
async with conn.cursor() as cursor:
|
333
|
+
await cursor.executemany(query, params_list)
|
334
|
+
await conn.commit()
|
335
|
+
|
336
|
+
async def begin_transaction(self) -> Any:
|
337
|
+
"""Begin a transaction."""
|
338
|
+
conn = await self._pool.acquire()
|
339
|
+
await conn.begin()
|
340
|
+
return conn
|
341
|
+
|
342
|
+
async def commit_transaction(self, transaction: Any) -> None:
|
343
|
+
"""Commit a transaction."""
|
344
|
+
await transaction.commit()
|
345
|
+
await self._pool.release(transaction)
|
346
|
+
|
347
|
+
async def rollback_transaction(self, transaction: Any) -> None:
|
348
|
+
"""Rollback a transaction."""
|
349
|
+
await transaction.rollback()
|
350
|
+
await self._pool.release(transaction)
|
351
|
+
|
352
|
+
|
353
|
+
class SQLiteAdapter(DatabaseAdapter):
|
354
|
+
"""SQLite adapter using aiosqlite."""
|
355
|
+
|
356
|
+
async def connect(self) -> None:
|
357
|
+
"""Establish connection pool."""
|
358
|
+
try:
|
359
|
+
import aiosqlite
|
360
|
+
except ImportError:
|
361
|
+
raise NodeExecutionError(
|
362
|
+
"aiosqlite not installed. Install with: pip install aiosqlite"
|
363
|
+
)
|
364
|
+
|
365
|
+
# SQLite doesn't have true connection pooling
|
366
|
+
# We'll manage a single connection for simplicity
|
367
|
+
self._aiosqlite = aiosqlite
|
368
|
+
self._db_path = self.config.database
|
369
|
+
|
370
|
+
async def disconnect(self) -> None:
|
371
|
+
"""Close connection."""
|
372
|
+
# Connections are managed per-operation for SQLite
|
373
|
+
pass
|
374
|
+
|
375
|
+
async def execute(
|
376
|
+
self,
|
377
|
+
query: str,
|
378
|
+
params: Optional[Union[tuple, dict]] = None,
|
379
|
+
fetch_mode: FetchMode = FetchMode.ALL,
|
380
|
+
fetch_size: Optional[int] = None,
|
381
|
+
) -> Any:
|
382
|
+
"""Execute query and return results."""
|
383
|
+
async with self._aiosqlite.connect(self._db_path) as db:
|
384
|
+
db.row_factory = self._aiosqlite.Row
|
385
|
+
cursor = await db.execute(query, params or [])
|
386
|
+
|
387
|
+
if fetch_mode == FetchMode.ONE:
|
388
|
+
row = await cursor.fetchone()
|
389
|
+
return self._convert_row(dict(row)) if row else None
|
390
|
+
elif fetch_mode == FetchMode.ALL:
|
391
|
+
rows = await cursor.fetchall()
|
392
|
+
return [self._convert_row(dict(row)) for row in rows]
|
393
|
+
elif fetch_mode == FetchMode.MANY:
|
394
|
+
if not fetch_size:
|
395
|
+
raise ValueError("fetch_size required for MANY mode")
|
396
|
+
rows = await cursor.fetchmany(fetch_size)
|
397
|
+
return [self._convert_row(dict(row)) for row in rows]
|
398
|
+
|
399
|
+
await db.commit()
|
400
|
+
|
401
|
+
async def execute_many(
|
402
|
+
self, query: str, params_list: list[Union[tuple, dict]]
|
403
|
+
) -> None:
|
404
|
+
"""Execute query multiple times with different parameters."""
|
405
|
+
async with self._aiosqlite.connect(self._db_path) as db:
|
406
|
+
await db.executemany(query, params_list)
|
407
|
+
await db.commit()
|
408
|
+
|
409
|
+
async def begin_transaction(self) -> Any:
|
410
|
+
"""Begin a transaction."""
|
411
|
+
db = await self._aiosqlite.connect(self._db_path)
|
412
|
+
db.row_factory = self._aiosqlite.Row
|
413
|
+
await db.execute("BEGIN")
|
414
|
+
return db
|
415
|
+
|
416
|
+
async def commit_transaction(self, transaction: Any) -> None:
|
417
|
+
"""Commit a transaction."""
|
418
|
+
await transaction.commit()
|
419
|
+
await transaction.close()
|
420
|
+
|
421
|
+
async def rollback_transaction(self, transaction: Any) -> None:
|
422
|
+
"""Rollback a transaction."""
|
423
|
+
await transaction.rollback()
|
424
|
+
await transaction.close()
|
425
|
+
|
426
|
+
|
427
|
+
@register_node()
|
428
|
+
class AsyncSQLDatabaseNode(Node):
|
429
|
+
"""Asynchronous SQL database node for high-concurrency database operations.
|
430
|
+
|
431
|
+
This node provides non-blocking database operations with connection pooling,
|
432
|
+
supporting PostgreSQL, MySQL, and SQLite databases. It's designed for
|
433
|
+
high-concurrency scenarios and can handle hundreds of simultaneous connections.
|
434
|
+
|
435
|
+
Parameters:
|
436
|
+
database_type: Type of database (postgresql, mysql, sqlite)
|
437
|
+
connection_string: Full database connection string (optional)
|
438
|
+
host: Database host (required if no connection_string)
|
439
|
+
port: Database port (optional, uses defaults)
|
440
|
+
database: Database name
|
441
|
+
user: Database user
|
442
|
+
password: Database password
|
443
|
+
query: SQL query to execute
|
444
|
+
params: Query parameters (dict or tuple)
|
445
|
+
fetch_mode: How to fetch results (one, all, many)
|
446
|
+
fetch_size: Number of rows for 'many' mode
|
447
|
+
pool_size: Initial connection pool size
|
448
|
+
max_pool_size: Maximum connection pool size
|
449
|
+
timeout: Query timeout in seconds
|
450
|
+
|
451
|
+
Example:
|
452
|
+
>>> node = AsyncSQLDatabaseNode(
|
453
|
+
... name="fetch_users",
|
454
|
+
... database_type="postgresql",
|
455
|
+
... host="localhost",
|
456
|
+
... database="myapp",
|
457
|
+
... user="dbuser",
|
458
|
+
... password="dbpass",
|
459
|
+
... query="SELECT * FROM users WHERE active = :active",
|
460
|
+
... params={"active": True},
|
461
|
+
... fetch_mode="all"
|
462
|
+
... )
|
463
|
+
>>> result = await node.async_run()
|
464
|
+
>>> users = result["data"]
|
465
|
+
"""
|
466
|
+
|
467
|
+
def __init__(self, **config):
|
468
|
+
self._adapter: Optional[DatabaseAdapter] = None
|
469
|
+
self._connected = False
|
470
|
+
# Extract access control manager before passing to parent
|
471
|
+
self.access_control_manager = config.pop("access_control_manager", None)
|
472
|
+
super().__init__(**config)
|
473
|
+
|
474
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
475
|
+
"""Define the parameters this node accepts."""
|
476
|
+
params = [
|
477
|
+
NodeParameter(
|
478
|
+
name="database_type",
|
479
|
+
type=str,
|
480
|
+
required=True,
|
481
|
+
default="postgresql",
|
482
|
+
description="Type of database: postgresql, mysql, or sqlite",
|
483
|
+
),
|
484
|
+
NodeParameter(
|
485
|
+
name="connection_string",
|
486
|
+
type=str,
|
487
|
+
required=False,
|
488
|
+
description="Full database connection string (overrides individual params)",
|
489
|
+
),
|
490
|
+
NodeParameter(
|
491
|
+
name="host", type=str, required=False, description="Database host"
|
492
|
+
),
|
493
|
+
NodeParameter(
|
494
|
+
name="port", type=int, required=False, description="Database port"
|
495
|
+
),
|
496
|
+
NodeParameter(
|
497
|
+
name="database", type=str, required=False, description="Database name"
|
498
|
+
),
|
499
|
+
NodeParameter(
|
500
|
+
name="user", type=str, required=False, description="Database user"
|
501
|
+
),
|
502
|
+
NodeParameter(
|
503
|
+
name="password",
|
504
|
+
type=str,
|
505
|
+
required=False,
|
506
|
+
description="Database password",
|
507
|
+
),
|
508
|
+
NodeParameter(
|
509
|
+
name="query",
|
510
|
+
type=str,
|
511
|
+
required=True,
|
512
|
+
description="SQL query to execute",
|
513
|
+
),
|
514
|
+
NodeParameter(
|
515
|
+
name="params",
|
516
|
+
type=Any,
|
517
|
+
required=False,
|
518
|
+
description="Query parameters as dict or tuple",
|
519
|
+
),
|
520
|
+
NodeParameter(
|
521
|
+
name="fetch_mode",
|
522
|
+
type=str,
|
523
|
+
required=False,
|
524
|
+
default="all",
|
525
|
+
description="Fetch mode: one, all, many",
|
526
|
+
),
|
527
|
+
NodeParameter(
|
528
|
+
name="fetch_size",
|
529
|
+
type=int,
|
530
|
+
required=False,
|
531
|
+
description="Number of rows to fetch in 'many' mode",
|
532
|
+
),
|
533
|
+
NodeParameter(
|
534
|
+
name="pool_size",
|
535
|
+
type=int,
|
536
|
+
required=False,
|
537
|
+
default=10,
|
538
|
+
description="Initial connection pool size",
|
539
|
+
),
|
540
|
+
NodeParameter(
|
541
|
+
name="max_pool_size",
|
542
|
+
type=int,
|
543
|
+
required=False,
|
544
|
+
default=20,
|
545
|
+
description="Maximum connection pool size",
|
546
|
+
),
|
547
|
+
NodeParameter(
|
548
|
+
name="timeout",
|
549
|
+
type=float,
|
550
|
+
required=False,
|
551
|
+
default=60.0,
|
552
|
+
description="Query timeout in seconds",
|
553
|
+
),
|
554
|
+
NodeParameter(
|
555
|
+
name="user_context",
|
556
|
+
type=Any,
|
557
|
+
required=False,
|
558
|
+
description="User context for access control",
|
559
|
+
),
|
560
|
+
]
|
561
|
+
|
562
|
+
# Convert list to dict as required by base class
|
563
|
+
return {param.name: param for param in params}
|
564
|
+
|
565
|
+
def _validate_config(self):
|
566
|
+
"""Validate node configuration."""
|
567
|
+
super()._validate_config()
|
568
|
+
|
569
|
+
# Validate database type
|
570
|
+
db_type = self.config.get("database_type", "").lower()
|
571
|
+
if db_type not in ["postgresql", "mysql", "sqlite"]:
|
572
|
+
raise NodeValidationError(
|
573
|
+
f"Invalid database_type: {db_type}. "
|
574
|
+
"Must be one of: postgresql, mysql, sqlite"
|
575
|
+
)
|
576
|
+
|
577
|
+
# Validate connection parameters
|
578
|
+
if not self.config.get("connection_string"):
|
579
|
+
if db_type != "sqlite":
|
580
|
+
if not self.config.get("host") or not self.config.get("database"):
|
581
|
+
raise NodeValidationError(
|
582
|
+
f"{db_type} requires host and database or connection_string"
|
583
|
+
)
|
584
|
+
else:
|
585
|
+
if not self.config.get("database"):
|
586
|
+
raise NodeValidationError("SQLite requires database path")
|
587
|
+
|
588
|
+
# Validate fetch mode
|
589
|
+
fetch_mode = self.config.get("fetch_mode", "all").lower()
|
590
|
+
if fetch_mode not in ["one", "all", "many", "iterator"]:
|
591
|
+
raise NodeValidationError(
|
592
|
+
f"Invalid fetch_mode: {fetch_mode}. "
|
593
|
+
"Must be one of: one, all, many, iterator"
|
594
|
+
)
|
595
|
+
|
596
|
+
if fetch_mode == "many" and not self.config.get("fetch_size"):
|
597
|
+
raise NodeValidationError("fetch_size required when fetch_mode is 'many'")
|
598
|
+
|
599
|
+
async def _get_adapter(self) -> DatabaseAdapter:
|
600
|
+
"""Get or create database adapter."""
|
601
|
+
if not self._adapter:
|
602
|
+
db_type = DatabaseType(self.config["database_type"].lower())
|
603
|
+
db_config = DatabaseConfig(
|
604
|
+
type=db_type,
|
605
|
+
host=self.config.get("host"),
|
606
|
+
port=self.config.get("port"),
|
607
|
+
database=self.config.get("database"),
|
608
|
+
user=self.config.get("user"),
|
609
|
+
password=self.config.get("password"),
|
610
|
+
connection_string=self.config.get("connection_string"),
|
611
|
+
pool_size=self.config.get("pool_size", 10),
|
612
|
+
max_pool_size=self.config.get("max_pool_size", 20),
|
613
|
+
command_timeout=self.config.get("timeout", 60.0),
|
614
|
+
)
|
615
|
+
|
616
|
+
if db_type == DatabaseType.POSTGRESQL:
|
617
|
+
self._adapter = PostgreSQLAdapter(db_config)
|
618
|
+
elif db_type == DatabaseType.MYSQL:
|
619
|
+
self._adapter = MySQLAdapter(db_config)
|
620
|
+
elif db_type == DatabaseType.SQLITE:
|
621
|
+
self._adapter = SQLiteAdapter(db_config)
|
622
|
+
else:
|
623
|
+
raise NodeExecutionError(f"Unsupported database type: {db_type}")
|
624
|
+
|
625
|
+
if not self._connected:
|
626
|
+
await self._adapter.connect()
|
627
|
+
self._connected = True
|
628
|
+
|
629
|
+
return self._adapter
|
630
|
+
|
631
|
+
async def async_run(self, **inputs) -> dict[str, Any]:
|
632
|
+
"""Execute database query asynchronously with optional access control."""
|
633
|
+
try:
|
634
|
+
# Get runtime parameters
|
635
|
+
query = inputs.get("query", self.config.get("query"))
|
636
|
+
params = inputs.get("params", self.config.get("params"))
|
637
|
+
fetch_mode = FetchMode(
|
638
|
+
inputs.get("fetch_mode", self.config.get("fetch_mode", "all")).lower()
|
639
|
+
)
|
640
|
+
fetch_size = inputs.get("fetch_size", self.config.get("fetch_size"))
|
641
|
+
user_context = inputs.get("user_context")
|
642
|
+
|
643
|
+
if not query:
|
644
|
+
raise NodeExecutionError("No query provided")
|
645
|
+
|
646
|
+
# Check access control if enabled
|
647
|
+
if self.access_control_manager and user_context:
|
648
|
+
from kailash.access_control import NodePermission
|
649
|
+
|
650
|
+
decision = self.access_control_manager.check_node_access(
|
651
|
+
user_context, self.metadata.name, NodePermission.EXECUTE
|
652
|
+
)
|
653
|
+
if not decision.allowed:
|
654
|
+
raise NodeExecutionError(f"Access denied: {decision.reason}")
|
655
|
+
|
656
|
+
# Get adapter and execute query
|
657
|
+
adapter = await self._get_adapter()
|
658
|
+
|
659
|
+
# Execute query with retry logic
|
660
|
+
max_retries = 3
|
661
|
+
retry_delay = 1.0
|
662
|
+
|
663
|
+
for attempt in range(max_retries):
|
664
|
+
try:
|
665
|
+
result = await adapter.execute(
|
666
|
+
query=query,
|
667
|
+
params=params,
|
668
|
+
fetch_mode=fetch_mode,
|
669
|
+
fetch_size=fetch_size,
|
670
|
+
)
|
671
|
+
|
672
|
+
# Apply data masking if access control is enabled
|
673
|
+
if (
|
674
|
+
self.access_control_manager
|
675
|
+
and user_context
|
676
|
+
and isinstance(result, list)
|
677
|
+
):
|
678
|
+
masked_result = []
|
679
|
+
for row in result:
|
680
|
+
masked_row = self.access_control_manager.apply_data_masking(
|
681
|
+
user_context, self.metadata.name, row
|
682
|
+
)
|
683
|
+
masked_result.append(masked_row)
|
684
|
+
result = masked_result
|
685
|
+
elif (
|
686
|
+
self.access_control_manager
|
687
|
+
and user_context
|
688
|
+
and isinstance(result, dict)
|
689
|
+
):
|
690
|
+
result = self.access_control_manager.apply_data_masking(
|
691
|
+
user_context, self.metadata.name, result
|
692
|
+
)
|
693
|
+
|
694
|
+
return {
|
695
|
+
"result": {
|
696
|
+
"data": result,
|
697
|
+
"row_count": (
|
698
|
+
len(result)
|
699
|
+
if isinstance(result, list)
|
700
|
+
else (1 if result else 0)
|
701
|
+
),
|
702
|
+
"query": query,
|
703
|
+
"database_type": self.config["database_type"],
|
704
|
+
}
|
705
|
+
}
|
706
|
+
|
707
|
+
except Exception as e:
|
708
|
+
if attempt < max_retries - 1:
|
709
|
+
await asyncio.sleep(retry_delay * (2**attempt))
|
710
|
+
continue
|
711
|
+
raise
|
712
|
+
|
713
|
+
except Exception as e:
|
714
|
+
raise NodeExecutionError(f"Database query failed: {str(e)}")
|
715
|
+
|
716
|
+
def run(self, **inputs) -> dict[str, Any]:
|
717
|
+
"""Synchronous run method - delegates to async_run."""
|
718
|
+
import asyncio
|
719
|
+
|
720
|
+
import nest_asyncio
|
721
|
+
|
722
|
+
try:
|
723
|
+
# Check if we're already in an event loop
|
724
|
+
loop = asyncio.get_running_loop()
|
725
|
+
|
726
|
+
# Apply nest_asyncio to allow nested event loops
|
727
|
+
nest_asyncio.apply()
|
728
|
+
|
729
|
+
# Now we can safely run even in an existing event loop
|
730
|
+
return asyncio.run(self.async_run(**inputs))
|
731
|
+
|
732
|
+
except RuntimeError:
|
733
|
+
# No event loop running, we can use asyncio.run() directly
|
734
|
+
return asyncio.run(self.async_run(**inputs))
|
735
|
+
|
736
|
+
async def process(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
737
|
+
"""Async process method for middleware compatibility."""
|
738
|
+
return await self.async_run(**inputs)
|
739
|
+
|
740
|
+
async def cleanup(self):
|
741
|
+
"""Clean up database connections."""
|
742
|
+
if self._adapter and self._connected:
|
743
|
+
await self._adapter.disconnect()
|
744
|
+
self._connected = False
|
745
|
+
self._adapter = None
|
746
|
+
|
747
|
+
def __del__(self):
|
748
|
+
"""Ensure connections are closed."""
|
749
|
+
if self._adapter and self._connected:
|
750
|
+
# Schedule cleanup in the event loop if it exists
|
751
|
+
try:
|
752
|
+
loop = asyncio.get_event_loop()
|
753
|
+
if not loop.is_closed():
|
754
|
+
loop.create_task(self.cleanup())
|
755
|
+
except RuntimeError:
|
756
|
+
# No event loop, can't clean up async resources
|
757
|
+
pass
|