kailash 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +25 -3
- kailash/nodes/admin/__init__.py +35 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1519 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +1 -0
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +407 -2
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +293 -12
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +91 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +132 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
- kailash-0.4.0.dist-info/RECORD +223 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.1.dist-info/RECORD +0 -136
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.1.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,685 @@
|
|
1
|
+
"""
|
2
|
+
Enhanced Database Repositories for Kailash Middleware
|
3
|
+
|
4
|
+
Uses SDK database nodes for all database operations instead of raw SQLAlchemy
|
5
|
+
or other database libraries. Provides consistent patterns and automatic features
|
6
|
+
like connection pooling, retry logic, and monitoring.
|
7
|
+
"""
|
8
|
+
|
9
|
+
import json
|
10
|
+
import logging
|
11
|
+
from datetime import datetime, timezone
|
12
|
+
from typing import Any, Dict, List, Optional, Union
|
13
|
+
from uuid import uuid4
|
14
|
+
|
15
|
+
from ...nodes.data import AsyncSQLDatabaseNode, SQLDatabaseNode
|
16
|
+
from ...nodes.security import CredentialManagerNode
|
17
|
+
from ...nodes.transform import DataTransformer
|
18
|
+
|
19
|
+
logger = logging.getLogger(__name__)
|
20
|
+
|
21
|
+
|
22
|
+
class BaseRepository:
|
23
|
+
"""Base repository class using SDK database nodes."""
|
24
|
+
|
25
|
+
def __init__(self, connection_string: str, table_name: str, use_async: bool = True):
|
26
|
+
self.connection_string = connection_string
|
27
|
+
self.table_name = table_name
|
28
|
+
self.use_async = use_async
|
29
|
+
|
30
|
+
# Use appropriate database node
|
31
|
+
if use_async:
|
32
|
+
self.db_node = AsyncSQLDatabaseNode(
|
33
|
+
name=f"{table_name}_async_db",
|
34
|
+
connection_string=connection_string,
|
35
|
+
pool_size=10,
|
36
|
+
max_overflow=20,
|
37
|
+
)
|
38
|
+
else:
|
39
|
+
self.db_node = SQLDatabaseNode(
|
40
|
+
name=f"{table_name}_db", connection_string=connection_string
|
41
|
+
)
|
42
|
+
|
43
|
+
# Data transformer for result mapping
|
44
|
+
self.transformer = DataTransformer(name=f"{table_name}_transformer")
|
45
|
+
|
46
|
+
# Credential management for database security
|
47
|
+
self.credential_manager = CredentialManagerNode(
|
48
|
+
name=f"{table_name}_credentials",
|
49
|
+
credential_name="database_secrets",
|
50
|
+
credential_type="database",
|
51
|
+
)
|
52
|
+
|
53
|
+
async def _execute_query(
|
54
|
+
self, query: str, params: Dict[str, Any] = None
|
55
|
+
) -> Dict[str, Any]:
|
56
|
+
"""Execute database query using SDK node."""
|
57
|
+
try:
|
58
|
+
if self.use_async:
|
59
|
+
result = await self.db_node.execute(query=query, params=params or {})
|
60
|
+
else:
|
61
|
+
result = self.db_node.execute(query=query, params=params or {})
|
62
|
+
|
63
|
+
return result
|
64
|
+
except Exception as e:
|
65
|
+
logger.error(f"Database query failed: {e}")
|
66
|
+
raise
|
67
|
+
|
68
|
+
async def _transform_result(self, data: Any, schema: Dict[str, str]) -> Any:
|
69
|
+
"""Transform database result using DataTransformer."""
|
70
|
+
if not data:
|
71
|
+
return data
|
72
|
+
|
73
|
+
result = await self.transformer.execute(data=data, schema=schema)
|
74
|
+
|
75
|
+
return result["result"]
|
76
|
+
|
77
|
+
async def _log_operation(
|
78
|
+
self, operation: str, entity_id: str = None, details: Dict[str, Any] = None
|
79
|
+
):
|
80
|
+
"""Log database operation for audit trail."""
|
81
|
+
logger.info(
|
82
|
+
f"Database operation: {self.table_name}_{operation} for entity {entity_id}"
|
83
|
+
)
|
84
|
+
|
85
|
+
|
86
|
+
class SessionRepository(BaseRepository):
|
87
|
+
"""Repository for session management using SDK database nodes."""
|
88
|
+
|
89
|
+
def __init__(self, connection_string: str):
|
90
|
+
super().__init__(connection_string, "sessions", use_async=True)
|
91
|
+
|
92
|
+
async def create_session(self, user_id: str) -> str:
|
93
|
+
"""Create a new session."""
|
94
|
+
session_id = str(uuid4())
|
95
|
+
await self._execute_query(
|
96
|
+
"INSERT INTO sessions (id, user_id, created_at, active) VALUES (?, ?, ?, ?)",
|
97
|
+
{
|
98
|
+
"id": session_id,
|
99
|
+
"user_id": user_id,
|
100
|
+
"created_at": datetime.now(timezone.utc),
|
101
|
+
"active": True,
|
102
|
+
},
|
103
|
+
)
|
104
|
+
await self._log_operation("create", session_id)
|
105
|
+
return session_id
|
106
|
+
|
107
|
+
async def get_session(self, session_id: str) -> Optional[Dict[str, Any]]:
|
108
|
+
"""Get session by ID."""
|
109
|
+
result = await self._execute_query(
|
110
|
+
"SELECT * FROM sessions WHERE id = ?", {"id": session_id}
|
111
|
+
)
|
112
|
+
return (
|
113
|
+
result.get("result", {}).get("rows", [{}])[0]
|
114
|
+
if result.get("result", {}).get("rows")
|
115
|
+
else None
|
116
|
+
)
|
117
|
+
|
118
|
+
|
119
|
+
class WorkflowRepository(BaseRepository):
|
120
|
+
"""Repository for workflow management using SDK database nodes."""
|
121
|
+
|
122
|
+
def __init__(self, connection_string: str):
|
123
|
+
super().__init__(connection_string, "workflows", use_async=True)
|
124
|
+
|
125
|
+
async def create_workflow(self, workflow_config: Dict[str, Any]) -> str:
|
126
|
+
"""Create a new workflow."""
|
127
|
+
workflow_id = str(uuid4())
|
128
|
+
await self._execute_query(
|
129
|
+
"INSERT INTO workflows (id, name, config, created_at) VALUES (?, ?, ?, ?)",
|
130
|
+
{
|
131
|
+
"id": workflow_id,
|
132
|
+
"name": workflow_config.get("name", "unnamed"),
|
133
|
+
"config": json.dumps(workflow_config),
|
134
|
+
"created_at": datetime.now(timezone.utc),
|
135
|
+
},
|
136
|
+
)
|
137
|
+
await self._log_operation("create", workflow_id)
|
138
|
+
return workflow_id
|
139
|
+
|
140
|
+
async def get_workflow(self, workflow_id: str) -> Optional[Dict[str, Any]]:
|
141
|
+
"""Get workflow by ID."""
|
142
|
+
result = await self._execute_query(
|
143
|
+
"SELECT * FROM workflows WHERE id = ?", {"id": workflow_id}
|
144
|
+
)
|
145
|
+
return (
|
146
|
+
result.get("result", {}).get("rows", [{}])[0]
|
147
|
+
if result.get("result", {}).get("rows")
|
148
|
+
else None
|
149
|
+
)
|
150
|
+
|
151
|
+
|
152
|
+
class MiddlewareWorkflowRepository(BaseRepository):
|
153
|
+
"""Workflow repository using SDK database nodes."""
|
154
|
+
|
155
|
+
def __init__(self, connection_string: str):
|
156
|
+
super().__init__(connection_string, "workflows")
|
157
|
+
self._ensure_table()
|
158
|
+
|
159
|
+
def _ensure_table(self):
|
160
|
+
"""Ensure workflow table exists."""
|
161
|
+
create_table_query = """
|
162
|
+
CREATE TABLE IF NOT EXISTS workflows (
|
163
|
+
id VARCHAR(255) PRIMARY KEY,
|
164
|
+
name VARCHAR(255) NOT NULL,
|
165
|
+
description TEXT,
|
166
|
+
config JSON NOT NULL,
|
167
|
+
created_by VARCHAR(255),
|
168
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
169
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
170
|
+
is_active BOOLEAN DEFAULT TRUE,
|
171
|
+
metadata JSON
|
172
|
+
)
|
173
|
+
"""
|
174
|
+
|
175
|
+
# Execute synchronously for table creation
|
176
|
+
sync_db = SQLDatabaseNode(
|
177
|
+
name="table_creator", connection_string=self.connection_string
|
178
|
+
)
|
179
|
+
sync_db.execute(query=create_table_query)
|
180
|
+
|
181
|
+
async def create(self, workflow_data: Dict[str, Any]) -> Dict[str, Any]:
|
182
|
+
"""Create a new workflow."""
|
183
|
+
workflow_id = workflow_data.get("id", str(uuid4()))
|
184
|
+
|
185
|
+
query = """
|
186
|
+
INSERT INTO workflows (id, name, description, config, created_by, metadata)
|
187
|
+
VALUES (:id, :name, :description, :config, :created_by, :metadata)
|
188
|
+
RETURNING *
|
189
|
+
"""
|
190
|
+
|
191
|
+
params = {
|
192
|
+
"id": workflow_id,
|
193
|
+
"name": workflow_data["name"],
|
194
|
+
"description": workflow_data.get("description", ""),
|
195
|
+
"config": json.dumps(workflow_data.get("config", {})),
|
196
|
+
"created_by": workflow_data.get("created_by"),
|
197
|
+
"metadata": json.dumps(workflow_data.get("metadata", {})),
|
198
|
+
}
|
199
|
+
|
200
|
+
result = await self._execute_query(query, params)
|
201
|
+
|
202
|
+
if result["rows"]:
|
203
|
+
workflow = await self._transform_workflow(result["rows"][0])
|
204
|
+
await self._log_operation(
|
205
|
+
"create", workflow_id, {"name": workflow_data["name"]}
|
206
|
+
)
|
207
|
+
return workflow
|
208
|
+
|
209
|
+
raise Exception("Failed to create workflow")
|
210
|
+
|
211
|
+
async def get(self, workflow_id: str) -> Optional[Dict[str, Any]]:
|
212
|
+
"""Get workflow by ID."""
|
213
|
+
query = "SELECT * FROM workflows WHERE id = :id AND is_active = TRUE"
|
214
|
+
result = await self._execute_query(query, {"id": workflow_id})
|
215
|
+
|
216
|
+
if result["rows"]:
|
217
|
+
workflow = await self._transform_workflow(result["rows"][0])
|
218
|
+
await self._log_operation("read", workflow_id)
|
219
|
+
return workflow
|
220
|
+
|
221
|
+
return None
|
222
|
+
|
223
|
+
async def list(
|
224
|
+
self,
|
225
|
+
limit: int = 100,
|
226
|
+
offset: int = 0,
|
227
|
+
created_by: str = None,
|
228
|
+
is_active: bool = True,
|
229
|
+
) -> List[Dict[str, Any]]:
|
230
|
+
"""List workflows with filtering."""
|
231
|
+
query = """
|
232
|
+
SELECT * FROM workflows
|
233
|
+
WHERE is_active = :is_active
|
234
|
+
"""
|
235
|
+
params = {"is_active": is_active}
|
236
|
+
|
237
|
+
if created_by:
|
238
|
+
query += " AND created_by = :created_by"
|
239
|
+
params["created_by"] = created_by
|
240
|
+
|
241
|
+
query += " ORDER BY created_at DESC LIMIT :limit OFFSET :offset"
|
242
|
+
params.update({"limit": limit, "offset": offset})
|
243
|
+
|
244
|
+
result = await self._execute_query(query, params)
|
245
|
+
|
246
|
+
workflows = []
|
247
|
+
for row in result["rows"]:
|
248
|
+
workflow = await self._transform_workflow(row)
|
249
|
+
workflows.append(workflow)
|
250
|
+
|
251
|
+
await self._log_operation("list", details={"count": len(workflows)})
|
252
|
+
return workflows
|
253
|
+
|
254
|
+
async def update(self, workflow_id: str, updates: Dict[str, Any]) -> Dict[str, Any]:
|
255
|
+
"""Update workflow."""
|
256
|
+
set_clauses = []
|
257
|
+
params = {"id": workflow_id}
|
258
|
+
|
259
|
+
if "name" in updates:
|
260
|
+
set_clauses.append("name = :name")
|
261
|
+
params["name"] = updates["name"]
|
262
|
+
|
263
|
+
if "description" in updates:
|
264
|
+
set_clauses.append("description = :description")
|
265
|
+
params["description"] = updates["description"]
|
266
|
+
|
267
|
+
if "config" in updates:
|
268
|
+
set_clauses.append("config = :config")
|
269
|
+
params["config"] = json.dumps(updates["config"])
|
270
|
+
|
271
|
+
if "metadata" in updates:
|
272
|
+
set_clauses.append("metadata = :metadata")
|
273
|
+
params["metadata"] = json.dumps(updates["metadata"])
|
274
|
+
|
275
|
+
set_clauses.append("updated_at = CURRENT_TIMESTAMP")
|
276
|
+
|
277
|
+
query = f"""
|
278
|
+
UPDATE workflows
|
279
|
+
SET {', '.join(set_clauses)}
|
280
|
+
WHERE id = :id
|
281
|
+
RETURNING *
|
282
|
+
"""
|
283
|
+
|
284
|
+
result = await self._execute_query(query, params)
|
285
|
+
|
286
|
+
if result["rows"]:
|
287
|
+
workflow = await self._transform_workflow(result["rows"][0])
|
288
|
+
await self._log_operation(
|
289
|
+
"update", workflow_id, {"updates": list(updates.keys())}
|
290
|
+
)
|
291
|
+
return workflow
|
292
|
+
|
293
|
+
raise Exception("Workflow not found")
|
294
|
+
|
295
|
+
async def delete(self, workflow_id: str, soft_delete: bool = True):
|
296
|
+
"""Delete workflow (soft delete by default)."""
|
297
|
+
if soft_delete:
|
298
|
+
query = """
|
299
|
+
UPDATE workflows
|
300
|
+
SET is_active = FALSE, updated_at = CURRENT_TIMESTAMP
|
301
|
+
WHERE id = :id
|
302
|
+
"""
|
303
|
+
else:
|
304
|
+
query = "DELETE FROM workflows WHERE id = :id"
|
305
|
+
|
306
|
+
await self._execute_query(query, {"id": workflow_id})
|
307
|
+
await self._log_operation("delete", workflow_id, {"soft_delete": soft_delete})
|
308
|
+
|
309
|
+
async def _transform_workflow(self, row: Dict[str, Any]) -> Dict[str, Any]:
|
310
|
+
"""Transform database row to workflow object."""
|
311
|
+
schema = {
|
312
|
+
"id": "string",
|
313
|
+
"name": "string",
|
314
|
+
"description": "string",
|
315
|
+
"config": "json",
|
316
|
+
"created_by": "string",
|
317
|
+
"created_at": "datetime",
|
318
|
+
"updated_at": "datetime",
|
319
|
+
"is_active": "boolean",
|
320
|
+
"metadata": "json",
|
321
|
+
}
|
322
|
+
|
323
|
+
workflow = await self._transform_result(row, schema)
|
324
|
+
|
325
|
+
# Parse JSON fields
|
326
|
+
if isinstance(workflow.get("config"), str):
|
327
|
+
workflow["config"] = json.loads(workflow["config"])
|
328
|
+
if isinstance(workflow.get("metadata"), str):
|
329
|
+
workflow["metadata"] = json.loads(workflow["metadata"])
|
330
|
+
|
331
|
+
return workflow
|
332
|
+
|
333
|
+
|
334
|
+
class MiddlewareExecutionRepository(BaseRepository):
|
335
|
+
"""Execution repository using SDK database nodes."""
|
336
|
+
|
337
|
+
def __init__(self, connection_string: str):
|
338
|
+
super().__init__(connection_string, "executions")
|
339
|
+
self._ensure_table()
|
340
|
+
|
341
|
+
def _ensure_table(self):
|
342
|
+
"""Ensure execution table exists."""
|
343
|
+
create_table_query = """
|
344
|
+
CREATE TABLE IF NOT EXISTS executions (
|
345
|
+
id VARCHAR(255) PRIMARY KEY,
|
346
|
+
workflow_id VARCHAR(255) NOT NULL,
|
347
|
+
session_id VARCHAR(255),
|
348
|
+
user_id VARCHAR(255),
|
349
|
+
status VARCHAR(50) NOT NULL,
|
350
|
+
inputs JSON,
|
351
|
+
outputs JSON,
|
352
|
+
error TEXT,
|
353
|
+
started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
354
|
+
completed_at TIMESTAMP,
|
355
|
+
metadata JSON
|
356
|
+
)
|
357
|
+
"""
|
358
|
+
|
359
|
+
sync_db = SQLDatabaseNode(
|
360
|
+
name="table_creator", connection_string=self.connection_string
|
361
|
+
)
|
362
|
+
sync_db.execute(query=create_table_query)
|
363
|
+
|
364
|
+
async def create(self, execution_data: Dict[str, Any]) -> Dict[str, Any]:
|
365
|
+
"""Create execution record."""
|
366
|
+
execution_id = execution_data.get("id", str(uuid4()))
|
367
|
+
|
368
|
+
query = """
|
369
|
+
INSERT INTO executions (
|
370
|
+
id, workflow_id, session_id, user_id, status, inputs, metadata
|
371
|
+
) VALUES (
|
372
|
+
:id, :workflow_id, :session_id, :user_id, :status, :inputs, :metadata
|
373
|
+
) RETURNING *
|
374
|
+
"""
|
375
|
+
|
376
|
+
params = {
|
377
|
+
"id": execution_id,
|
378
|
+
"workflow_id": execution_data["workflow_id"],
|
379
|
+
"session_id": execution_data.get("session_id"),
|
380
|
+
"user_id": execution_data.get("user_id"),
|
381
|
+
"status": "pending",
|
382
|
+
"inputs": json.dumps(execution_data.get("inputs", {})),
|
383
|
+
"metadata": json.dumps(execution_data.get("metadata", {})),
|
384
|
+
}
|
385
|
+
|
386
|
+
result = await self._execute_query(query, params)
|
387
|
+
|
388
|
+
if result["rows"]:
|
389
|
+
execution = await self._transform_execution(result["rows"][0])
|
390
|
+
await self._log_operation(
|
391
|
+
"create",
|
392
|
+
execution_id,
|
393
|
+
{
|
394
|
+
"workflow_id": execution_data["workflow_id"],
|
395
|
+
"user_id": execution_data.get("user_id"),
|
396
|
+
},
|
397
|
+
)
|
398
|
+
return execution
|
399
|
+
|
400
|
+
raise Exception("Failed to create execution")
|
401
|
+
|
402
|
+
async def update_status(
|
403
|
+
self,
|
404
|
+
execution_id: str,
|
405
|
+
status: str,
|
406
|
+
outputs: Dict[str, Any] = None,
|
407
|
+
error: str = None,
|
408
|
+
) -> Dict[str, Any]:
|
409
|
+
"""Update execution status."""
|
410
|
+
query = """
|
411
|
+
UPDATE executions
|
412
|
+
SET status = :status,
|
413
|
+
outputs = :outputs,
|
414
|
+
error = :error,
|
415
|
+
completed_at = CASE WHEN :status IN ('completed', 'failed') THEN CURRENT_TIMESTAMP ELSE NULL END
|
416
|
+
WHERE id = :id
|
417
|
+
RETURNING *
|
418
|
+
"""
|
419
|
+
|
420
|
+
params = {
|
421
|
+
"id": execution_id,
|
422
|
+
"status": status,
|
423
|
+
"outputs": json.dumps(outputs) if outputs else None,
|
424
|
+
"error": error,
|
425
|
+
}
|
426
|
+
|
427
|
+
result = await self._execute_query(query, params)
|
428
|
+
|
429
|
+
if result["rows"]:
|
430
|
+
execution = await self._transform_execution(result["rows"][0])
|
431
|
+
await self._log_operation("update_status", execution_id, {"status": status})
|
432
|
+
return execution
|
433
|
+
|
434
|
+
raise Exception("Execution not found")
|
435
|
+
|
436
|
+
async def get(self, execution_id: str) -> Optional[Dict[str, Any]]:
|
437
|
+
"""Get execution by ID."""
|
438
|
+
query = "SELECT * FROM executions WHERE id = :id"
|
439
|
+
result = await self._execute_query(query, {"id": execution_id})
|
440
|
+
|
441
|
+
if result["rows"]:
|
442
|
+
return await self._transform_execution(result["rows"][0])
|
443
|
+
|
444
|
+
return None
|
445
|
+
|
446
|
+
async def list_by_workflow(
|
447
|
+
self, workflow_id: str, limit: int = 100, status: str = None
|
448
|
+
) -> List[Dict[str, Any]]:
|
449
|
+
"""List executions for a workflow."""
|
450
|
+
query = "SELECT * FROM executions WHERE workflow_id = :workflow_id"
|
451
|
+
params = {"workflow_id": workflow_id}
|
452
|
+
|
453
|
+
if status:
|
454
|
+
query += " AND status = :status"
|
455
|
+
params["status"] = status
|
456
|
+
|
457
|
+
query += " ORDER BY started_at DESC LIMIT :limit"
|
458
|
+
params["limit"] = limit
|
459
|
+
|
460
|
+
result = await self._execute_query(query, params)
|
461
|
+
|
462
|
+
executions = []
|
463
|
+
for row in result["rows"]:
|
464
|
+
execution = await self._transform_execution(row)
|
465
|
+
executions.append(execution)
|
466
|
+
|
467
|
+
return executions
|
468
|
+
|
469
|
+
async def _transform_execution(self, row: Dict[str, Any]) -> Dict[str, Any]:
|
470
|
+
"""Transform database row to execution object."""
|
471
|
+
schema = {
|
472
|
+
"id": "string",
|
473
|
+
"workflow_id": "string",
|
474
|
+
"session_id": "string",
|
475
|
+
"user_id": "string",
|
476
|
+
"status": "string",
|
477
|
+
"inputs": "json",
|
478
|
+
"outputs": "json",
|
479
|
+
"error": "string",
|
480
|
+
"started_at": "datetime",
|
481
|
+
"completed_at": "datetime",
|
482
|
+
"metadata": "json",
|
483
|
+
}
|
484
|
+
|
485
|
+
execution = await self._transform_result(row, schema)
|
486
|
+
|
487
|
+
# Parse JSON fields
|
488
|
+
for field in ["inputs", "outputs", "metadata"]:
|
489
|
+
if isinstance(execution.get(field), str):
|
490
|
+
execution[field] = json.loads(execution[field])
|
491
|
+
|
492
|
+
return execution
|
493
|
+
|
494
|
+
|
495
|
+
class MiddlewareUserRepository(BaseRepository):
|
496
|
+
"""User repository using SDK database nodes."""
|
497
|
+
|
498
|
+
def __init__(self, connection_string: str):
|
499
|
+
super().__init__(connection_string, "users")
|
500
|
+
self._ensure_table()
|
501
|
+
|
502
|
+
def _ensure_table(self):
|
503
|
+
"""Ensure user table exists."""
|
504
|
+
create_table_query = """
|
505
|
+
CREATE TABLE IF NOT EXISTS users (
|
506
|
+
id VARCHAR(255) PRIMARY KEY,
|
507
|
+
username VARCHAR(255) UNIQUE NOT NULL,
|
508
|
+
email VARCHAR(255) UNIQUE NOT NULL,
|
509
|
+
full_name VARCHAR(255),
|
510
|
+
is_active BOOLEAN DEFAULT TRUE,
|
511
|
+
is_superuser BOOLEAN DEFAULT FALSE,
|
512
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
513
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
514
|
+
metadata JSON
|
515
|
+
)
|
516
|
+
"""
|
517
|
+
|
518
|
+
sync_db = SQLDatabaseNode(
|
519
|
+
name="table_creator", connection_string=self.connection_string
|
520
|
+
)
|
521
|
+
sync_db.execute(query=create_table_query)
|
522
|
+
|
523
|
+
async def create(self, user_data: Dict[str, Any]) -> Dict[str, Any]:
|
524
|
+
"""Create new user."""
|
525
|
+
user_id = user_data.get("id", str(uuid4()))
|
526
|
+
|
527
|
+
query = """
|
528
|
+
INSERT INTO users (id, username, email, full_name, metadata)
|
529
|
+
VALUES (:id, :username, :email, :full_name, :metadata)
|
530
|
+
RETURNING *
|
531
|
+
"""
|
532
|
+
|
533
|
+
params = {
|
534
|
+
"id": user_id,
|
535
|
+
"username": user_data["username"],
|
536
|
+
"email": user_data["email"],
|
537
|
+
"full_name": user_data.get("full_name", ""),
|
538
|
+
"metadata": json.dumps(user_data.get("metadata", {})),
|
539
|
+
}
|
540
|
+
|
541
|
+
result = await self._execute_query(query, params)
|
542
|
+
|
543
|
+
if result["rows"]:
|
544
|
+
user = await self._transform_user(result["rows"][0])
|
545
|
+
await self._log_operation(
|
546
|
+
"create", user_id, {"username": user_data["username"]}
|
547
|
+
)
|
548
|
+
return user
|
549
|
+
|
550
|
+
raise Exception("Failed to create user")
|
551
|
+
|
552
|
+
async def get_by_username(self, username: str) -> Optional[Dict[str, Any]]:
|
553
|
+
"""Get user by username."""
|
554
|
+
query = "SELECT * FROM users WHERE username = :username AND is_active = TRUE"
|
555
|
+
result = await self._execute_query(query, {"username": username})
|
556
|
+
|
557
|
+
if result["rows"]:
|
558
|
+
return await self._transform_user(result["rows"][0])
|
559
|
+
|
560
|
+
return None
|
561
|
+
|
562
|
+
async def _transform_user(self, row: Dict[str, Any]) -> Dict[str, Any]:
|
563
|
+
"""Transform database row to user object."""
|
564
|
+
schema = {
|
565
|
+
"id": "string",
|
566
|
+
"username": "string",
|
567
|
+
"email": "string",
|
568
|
+
"full_name": "string",
|
569
|
+
"is_active": "boolean",
|
570
|
+
"is_superuser": "boolean",
|
571
|
+
"created_at": "datetime",
|
572
|
+
"updated_at": "datetime",
|
573
|
+
"metadata": "json",
|
574
|
+
}
|
575
|
+
|
576
|
+
user = await self._transform_result(row, schema)
|
577
|
+
|
578
|
+
if isinstance(user.get("metadata"), str):
|
579
|
+
user["metadata"] = json.loads(user["metadata"])
|
580
|
+
|
581
|
+
return user
|
582
|
+
|
583
|
+
|
584
|
+
class MiddlewarePermissionRepository(BaseRepository):
|
585
|
+
"""Permission repository using SDK database nodes."""
|
586
|
+
|
587
|
+
def __init__(self, connection_string: str):
|
588
|
+
super().__init__(connection_string, "permissions")
|
589
|
+
self._ensure_table()
|
590
|
+
|
591
|
+
def _ensure_table(self):
|
592
|
+
"""Ensure permission tables exist."""
|
593
|
+
create_tables_query = """
|
594
|
+
CREATE TABLE IF NOT EXISTS permissions (
|
595
|
+
id VARCHAR(255) PRIMARY KEY,
|
596
|
+
resource VARCHAR(255) NOT NULL,
|
597
|
+
action VARCHAR(255) NOT NULL,
|
598
|
+
description TEXT,
|
599
|
+
UNIQUE(resource, action)
|
600
|
+
);
|
601
|
+
|
602
|
+
CREATE TABLE IF NOT EXISTS user_permissions (
|
603
|
+
user_id VARCHAR(255) NOT NULL,
|
604
|
+
permission_id VARCHAR(255) NOT NULL,
|
605
|
+
granted_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
606
|
+
granted_by VARCHAR(255),
|
607
|
+
expires_at TIMESTAMP,
|
608
|
+
PRIMARY KEY (user_id, permission_id)
|
609
|
+
);
|
610
|
+
"""
|
611
|
+
|
612
|
+
sync_db = SQLDatabaseNode(
|
613
|
+
name="table_creator", connection_string=self.connection_string
|
614
|
+
)
|
615
|
+
for query in create_tables_query.split(";"):
|
616
|
+
if query.strip():
|
617
|
+
sync_db.execute(query=query.strip())
|
618
|
+
|
619
|
+
async def grant_permission(
|
620
|
+
self,
|
621
|
+
user_id: str,
|
622
|
+
resource: str,
|
623
|
+
action: str,
|
624
|
+
granted_by: str = None,
|
625
|
+
expires_at: datetime = None,
|
626
|
+
):
|
627
|
+
"""Grant permission to user."""
|
628
|
+
# First ensure permission exists
|
629
|
+
perm_query = """
|
630
|
+
INSERT INTO permissions (id, resource, action)
|
631
|
+
VALUES (:id, :resource, :action)
|
632
|
+
ON CONFLICT (resource, action) DO NOTHING
|
633
|
+
RETURNING id
|
634
|
+
"""
|
635
|
+
|
636
|
+
perm_id = f"{resource}:{action}"
|
637
|
+
perm_params = {"id": perm_id, "resource": resource, "action": action}
|
638
|
+
|
639
|
+
await self._execute_query(perm_query, perm_params)
|
640
|
+
|
641
|
+
# Grant to user
|
642
|
+
grant_query = """
|
643
|
+
INSERT INTO user_permissions (user_id, permission_id, granted_by, expires_at)
|
644
|
+
VALUES (:user_id, :permission_id, :granted_by, :expires_at)
|
645
|
+
ON CONFLICT (user_id, permission_id) DO UPDATE
|
646
|
+
SET granted_at = CURRENT_TIMESTAMP, granted_by = :granted_by, expires_at = :expires_at
|
647
|
+
"""
|
648
|
+
|
649
|
+
grant_params = {
|
650
|
+
"user_id": user_id,
|
651
|
+
"permission_id": perm_id,
|
652
|
+
"granted_by": granted_by,
|
653
|
+
"expires_at": expires_at,
|
654
|
+
}
|
655
|
+
|
656
|
+
await self._execute_query(grant_query, grant_params)
|
657
|
+
await self._log_operation(
|
658
|
+
"grant",
|
659
|
+
user_id,
|
660
|
+
{"resource": resource, "action": action, "granted_by": granted_by},
|
661
|
+
)
|
662
|
+
|
663
|
+
async def check_permission(self, user_id: str, resource: str, action: str) -> bool:
|
664
|
+
"""Check if user has permission."""
|
665
|
+
query = """
|
666
|
+
SELECT 1 FROM user_permissions up
|
667
|
+
JOIN permissions p ON up.permission_id = p.id
|
668
|
+
WHERE up.user_id = :user_id
|
669
|
+
AND p.resource = :resource
|
670
|
+
AND p.action = :action
|
671
|
+
AND (up.expires_at IS NULL OR up.expires_at > CURRENT_TIMESTAMP)
|
672
|
+
"""
|
673
|
+
|
674
|
+
params = {"user_id": user_id, "resource": resource, "action": action}
|
675
|
+
|
676
|
+
result = await self._execute_query(query, params)
|
677
|
+
has_permission = len(result["rows"]) > 0
|
678
|
+
|
679
|
+
await self._log_operation(
|
680
|
+
"check",
|
681
|
+
user_id,
|
682
|
+
{"resource": resource, "action": action, "granted": has_permission},
|
683
|
+
)
|
684
|
+
|
685
|
+
return has_permission
|
@@ -0,0 +1,19 @@
|
|
1
|
+
"""
|
2
|
+
Database Session Manager for Kailash Middleware
|
3
|
+
|
4
|
+
Provides database session management with middleware integration.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from typing import Any, Dict, Optional
|
8
|
+
|
9
|
+
|
10
|
+
class MiddlewareDatabaseManager:
|
11
|
+
"""Database manager with middleware integration."""
|
12
|
+
|
13
|
+
def __init__(self):
|
14
|
+
pass
|
15
|
+
|
16
|
+
|
17
|
+
def get_middleware_db_session():
|
18
|
+
"""Get middleware database session."""
|
19
|
+
pass
|