kailash 0.6.0__py3-none-any.whl → 0.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/access_control/__init__.py +1 -1
- kailash/core/actors/adaptive_pool_controller.py +630 -0
- kailash/core/actors/connection_actor.py +3 -3
- kailash/core/ml/__init__.py +1 -0
- kailash/core/ml/query_patterns.py +544 -0
- kailash/core/monitoring/__init__.py +19 -0
- kailash/core/monitoring/connection_metrics.py +488 -0
- kailash/core/optimization/__init__.py +1 -0
- kailash/core/resilience/__init__.py +17 -0
- kailash/core/resilience/circuit_breaker.py +382 -0
- kailash/gateway/api.py +7 -5
- kailash/gateway/enhanced_gateway.py +1 -1
- kailash/middleware/auth/access_control.py +11 -11
- kailash/middleware/communication/ai_chat.py +7 -7
- kailash/middleware/communication/api_gateway.py +5 -15
- kailash/middleware/gateway/checkpoint_manager.py +45 -8
- kailash/middleware/gateway/event_store.py +66 -26
- kailash/middleware/mcp/enhanced_server.py +2 -2
- kailash/nodes/admin/permission_check.py +110 -30
- kailash/nodes/admin/schema.sql +387 -0
- kailash/nodes/admin/tenant_isolation.py +249 -0
- kailash/nodes/admin/transaction_utils.py +244 -0
- kailash/nodes/admin/user_management.py +37 -9
- kailash/nodes/ai/ai_providers.py +55 -3
- kailash/nodes/ai/llm_agent.py +115 -13
- kailash/nodes/data/query_pipeline.py +641 -0
- kailash/nodes/data/query_router.py +895 -0
- kailash/nodes/data/sql.py +24 -0
- kailash/nodes/data/workflow_connection_pool.py +451 -23
- kailash/nodes/monitoring/__init__.py +3 -5
- kailash/nodes/monitoring/connection_dashboard.py +822 -0
- kailash/nodes/rag/__init__.py +1 -3
- kailash/resources/registry.py +6 -0
- kailash/runtime/async_local.py +7 -0
- kailash/utils/export.py +152 -0
- kailash/workflow/builder.py +42 -0
- kailash/workflow/graph.py +86 -17
- kailash/workflow/templates.py +4 -9
- {kailash-0.6.0.dist-info → kailash-0.6.2.dist-info}/METADATA +14 -1
- {kailash-0.6.0.dist-info → kailash-0.6.2.dist-info}/RECORD +45 -31
- {kailash-0.6.0.dist-info → kailash-0.6.2.dist-info}/WHEEL +0 -0
- {kailash-0.6.0.dist-info → kailash-0.6.2.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.0.dist-info → kailash-0.6.2.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.0.dist-info → kailash-0.6.2.dist-info}/top_level.txt +0 -0
@@ -13,7 +13,7 @@ import logging
|
|
13
13
|
import time
|
14
14
|
import uuid
|
15
15
|
from dataclasses import dataclass, field
|
16
|
-
from datetime import datetime
|
16
|
+
from datetime import UTC, datetime
|
17
17
|
from enum import Enum
|
18
18
|
from typing import Any, AsyncIterator, Callable, Dict, List, Optional
|
19
19
|
|
@@ -55,7 +55,7 @@ class RequestEvent:
|
|
55
55
|
event_id: str = field(default_factory=lambda: f"evt_{uuid.uuid4().hex[:12]}")
|
56
56
|
event_type: EventType = EventType.REQUEST_CREATED
|
57
57
|
request_id: str = ""
|
58
|
-
timestamp: datetime = field(default_factory=datetime.
|
58
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(UTC))
|
59
59
|
sequence_number: int = 0
|
60
60
|
data: Dict[str, Any] = field(default_factory=dict)
|
61
61
|
metadata: Dict[str, Any] = field(default_factory=dict)
|
@@ -103,6 +103,7 @@ class EventStore:
|
|
103
103
|
# In-memory buffer
|
104
104
|
self._buffer: List[RequestEvent] = []
|
105
105
|
self._buffer_lock = asyncio.Lock()
|
106
|
+
self._flush_in_progress = False
|
106
107
|
|
107
108
|
# Event stream
|
108
109
|
self._event_stream: List[RequestEvent] = []
|
@@ -120,7 +121,16 @@ class EventStore:
|
|
120
121
|
self.flush_count = 0
|
121
122
|
|
122
123
|
# Start flush task
|
123
|
-
|
124
|
+
try:
|
125
|
+
self._flush_task = asyncio.create_task(self._flush_loop())
|
126
|
+
except RuntimeError:
|
127
|
+
# If no event loop is running, defer task creation
|
128
|
+
self._flush_task = None
|
129
|
+
|
130
|
+
async def _ensure_flush_task(self):
|
131
|
+
"""Ensure the flush task is running."""
|
132
|
+
if self._flush_task is None:
|
133
|
+
self._flush_task = asyncio.create_task(self._flush_loop())
|
124
134
|
|
125
135
|
async def append(
|
126
136
|
self,
|
@@ -130,6 +140,9 @@ class EventStore:
|
|
130
140
|
metadata: Optional[Dict[str, Any]] = None,
|
131
141
|
) -> RequestEvent:
|
132
142
|
"""Append an event to the store."""
|
143
|
+
# Ensure flush task is running
|
144
|
+
await self._ensure_flush_task()
|
145
|
+
|
133
146
|
async with self._buffer_lock:
|
134
147
|
# Get next sequence number
|
135
148
|
sequence = self._sequences.get(request_id, 0)
|
@@ -148,19 +161,27 @@ class EventStore:
|
|
148
161
|
self._buffer.append(event)
|
149
162
|
self.event_count += 1
|
150
163
|
|
151
|
-
#
|
152
|
-
|
153
|
-
await self._flush_buffer()
|
164
|
+
# Check if we need to flush (but don't flush inside the lock)
|
165
|
+
needs_flush = len(self._buffer) >= self.batch_size
|
154
166
|
|
155
|
-
|
156
|
-
|
167
|
+
# Apply projections outside the lock
|
168
|
+
await self._apply_projections(event)
|
157
169
|
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
170
|
+
# Flush if needed (outside the lock to avoid deadlock)
|
171
|
+
if needs_flush and not self._flush_in_progress:
|
172
|
+
# Set flag to prevent concurrent flushes
|
173
|
+
self._flush_in_progress = True
|
174
|
+
try:
|
175
|
+
await self._flush_buffer()
|
176
|
+
finally:
|
177
|
+
self._flush_in_progress = False
|
162
178
|
|
163
|
-
|
179
|
+
logger.debug(
|
180
|
+
f"Appended event {event.event_type.value} for request {request_id} "
|
181
|
+
f"(seq: {sequence})"
|
182
|
+
)
|
183
|
+
|
184
|
+
return event
|
164
185
|
|
165
186
|
async def get_events(
|
166
187
|
self,
|
@@ -233,6 +254,9 @@ class EventStore:
|
|
233
254
|
follow: bool = False,
|
234
255
|
) -> AsyncIterator[RequestEvent]:
|
235
256
|
"""Stream events as they occur."""
|
257
|
+
# Ensure buffer is flushed before streaming
|
258
|
+
await self._flush_buffer()
|
259
|
+
|
236
260
|
last_index = 0
|
237
261
|
|
238
262
|
while True:
|
@@ -294,12 +318,19 @@ class EventStore:
|
|
294
318
|
|
295
319
|
async def _flush_buffer(self) -> None:
|
296
320
|
"""Flush event buffer to storage."""
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
321
|
+
# Acquire lock with timeout to prevent deadlock
|
322
|
+
try:
|
323
|
+
# Use wait_for to add timeout on lock acquisition
|
324
|
+
async with asyncio.timeout(1.0): # 1 second timeout
|
325
|
+
async with self._buffer_lock:
|
326
|
+
if not self._buffer:
|
327
|
+
return
|
328
|
+
|
329
|
+
events_to_flush = self._buffer.copy()
|
330
|
+
self._buffer.clear()
|
331
|
+
except asyncio.TimeoutError:
|
332
|
+
logger.warning("Timeout acquiring buffer lock during flush")
|
333
|
+
return
|
303
334
|
|
304
335
|
# Add to in-memory stream
|
305
336
|
async with self._stream_lock:
|
@@ -317,10 +348,16 @@ class EventStore:
|
|
317
348
|
while True:
|
318
349
|
try:
|
319
350
|
await asyncio.sleep(self.flush_interval)
|
320
|
-
|
351
|
+
if not self._flush_in_progress:
|
352
|
+
self._flush_in_progress = True
|
353
|
+
try:
|
354
|
+
await self._flush_buffer()
|
355
|
+
finally:
|
356
|
+
self._flush_in_progress = False
|
321
357
|
except asyncio.CancelledError:
|
322
358
|
# Final flush before shutdown
|
323
|
-
|
359
|
+
if not self._flush_in_progress:
|
360
|
+
await self._flush_buffer()
|
324
361
|
break
|
325
362
|
except Exception as e:
|
326
363
|
logger.error(f"Flush error: {e}")
|
@@ -388,11 +425,14 @@ class EventStore:
|
|
388
425
|
|
389
426
|
async def close(self) -> None:
|
390
427
|
"""Close event store and flush remaining events."""
|
391
|
-
self._flush_task
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
428
|
+
if self._flush_task is not None:
|
429
|
+
self._flush_task.cancel()
|
430
|
+
try:
|
431
|
+
await self._flush_task
|
432
|
+
except asyncio.CancelledError:
|
433
|
+
pass
|
434
|
+
# Final flush
|
435
|
+
await self._flush_buffer()
|
396
436
|
|
397
437
|
|
398
438
|
# Example projection handlers
|
@@ -389,7 +389,7 @@ result = {'execution_result': execution_result}
|
|
389
389
|
tool_node = self.tools[tool_name]
|
390
390
|
|
391
391
|
try:
|
392
|
-
result = tool_node.
|
392
|
+
result = tool_node.execute(tool_input=arguments)
|
393
393
|
|
394
394
|
# Emit middleware event
|
395
395
|
if self.event_stream:
|
@@ -433,7 +433,7 @@ result = {'execution_result': execution_result}
|
|
433
433
|
resource_node = self.resources[uri]
|
434
434
|
|
435
435
|
try:
|
436
|
-
result = resource_node.
|
436
|
+
result = resource_node.execute({"resource_uri": uri})
|
437
437
|
|
438
438
|
# Emit middleware event
|
439
439
|
if self.event_stream:
|
@@ -721,36 +721,67 @@ class PermissionCheckNode(Node):
|
|
721
721
|
}
|
722
722
|
|
723
723
|
def _get_user_context(self, user_id: str, tenant_id: str) -> Optional[UserContext]:
|
724
|
-
"""Get user context for permission evaluation."""
|
725
|
-
# Query user data from unified admin schema
|
726
|
-
|
727
|
-
SELECT user_id, email,
|
724
|
+
"""Get user context for permission evaluation with strict tenant isolation."""
|
725
|
+
# Query user data and assigned roles from unified admin schema
|
726
|
+
user_query = """
|
727
|
+
SELECT user_id, email, attributes, status, tenant_id
|
728
728
|
FROM users
|
729
729
|
WHERE user_id = $1 AND tenant_id = $2 AND status = 'active'
|
730
730
|
"""
|
731
731
|
|
732
|
+
# Get assigned roles from user_role_assignments table with strict tenant isolation
|
733
|
+
roles_query = """
|
734
|
+
SELECT role_id
|
735
|
+
FROM user_role_assignments
|
736
|
+
WHERE user_id = $1 AND tenant_id = $2 AND is_active = true
|
737
|
+
"""
|
738
|
+
|
732
739
|
try:
|
733
|
-
|
734
|
-
|
740
|
+
# Get user data - strict tenant check
|
741
|
+
user_result = self._db_node.run(
|
742
|
+
query=user_query, parameters=[user_id, tenant_id], result_format="dict"
|
735
743
|
)
|
736
744
|
|
737
|
-
|
738
|
-
user_rows = result.get("data", [])
|
745
|
+
user_rows = user_result.get("data", [])
|
739
746
|
if not user_rows:
|
747
|
+
# User not found in this tenant - strict tenant isolation
|
748
|
+
self.logger.debug(f"User {user_id} not found in tenant {tenant_id}")
|
740
749
|
return None
|
741
750
|
|
742
751
|
user_data = user_rows[0]
|
743
752
|
|
753
|
+
# Verify tenant isolation - ensure user belongs to the requested tenant
|
754
|
+
if user_data.get("tenant_id") != tenant_id:
|
755
|
+
self.logger.warning(
|
756
|
+
f"Tenant isolation violation: User {user_id} belongs to {user_data.get('tenant_id')} but permission check requested for {tenant_id}"
|
757
|
+
)
|
758
|
+
return None
|
759
|
+
|
760
|
+
# Get assigned roles - also with strict tenant isolation
|
761
|
+
roles_result = self._db_node.run(
|
762
|
+
query=roles_query, parameters=[user_id, tenant_id], result_format="dict"
|
763
|
+
)
|
764
|
+
|
765
|
+
role_rows = roles_result.get("data", [])
|
766
|
+
assigned_roles = [row["role_id"] for row in role_rows]
|
767
|
+
|
768
|
+
# Log for debugging tenant isolation
|
769
|
+
self.logger.debug(
|
770
|
+
f"User {user_id} in tenant {tenant_id} has roles: {assigned_roles}"
|
771
|
+
)
|
772
|
+
|
744
773
|
return UserContext(
|
745
774
|
user_id=user_data["user_id"],
|
746
775
|
tenant_id=user_data["tenant_id"],
|
747
776
|
email=user_data["email"],
|
748
|
-
roles=
|
777
|
+
roles=assigned_roles,
|
749
778
|
attributes=user_data.get("attributes", {}),
|
750
779
|
)
|
751
780
|
except Exception as e:
|
752
781
|
# Log the error and return None to indicate user not found
|
753
|
-
self.logger.warning(
|
782
|
+
self.logger.warning(
|
783
|
+
f"Failed to get user context for {user_id} in tenant {tenant_id}: {e}"
|
784
|
+
)
|
754
785
|
return None
|
755
786
|
|
756
787
|
def _check_rbac_permission(
|
@@ -826,39 +857,57 @@ class PermissionCheckNode(Node):
|
|
826
857
|
return permissions
|
827
858
|
|
828
859
|
def _get_role_permissions(self, role_id: str, tenant_id: str) -> Set[str]:
|
829
|
-
"""Get permissions for a specific role including inherited permissions."""
|
830
|
-
# Query role and its hierarchy
|
860
|
+
"""Get permissions for a specific role including inherited permissions with strict tenant isolation."""
|
861
|
+
# Query role and its hierarchy with strict tenant boundaries
|
831
862
|
query = """
|
832
863
|
WITH RECURSIVE role_hierarchy AS (
|
833
|
-
SELECT role_id, permissions, parent_roles
|
864
|
+
SELECT role_id, permissions, parent_roles, tenant_id
|
834
865
|
FROM roles
|
835
866
|
WHERE role_id = $1 AND tenant_id = $2 AND is_active = true
|
836
867
|
|
837
868
|
UNION ALL
|
838
869
|
|
839
|
-
SELECT r.role_id, r.permissions, r.parent_roles
|
870
|
+
SELECT r.role_id, r.permissions, r.parent_roles, r.tenant_id
|
840
871
|
FROM roles r
|
841
872
|
JOIN role_hierarchy rh ON r.role_id = ANY(
|
842
873
|
SELECT jsonb_array_elements_text(rh.parent_roles)
|
843
874
|
)
|
844
|
-
WHERE r.tenant_id = $
|
875
|
+
WHERE r.tenant_id = $3 AND r.is_active = true
|
845
876
|
)
|
846
877
|
SELECT DISTINCT unnest(
|
847
878
|
CASE
|
848
879
|
WHEN jsonb_typeof(permissions) = 'array'
|
849
880
|
THEN ARRAY(SELECT jsonb_array_elements_text(permissions))
|
881
|
+
WHEN permissions IS NOT NULL AND permissions::text != 'null'
|
882
|
+
THEN ARRAY[permissions::text]
|
850
883
|
ELSE ARRAY[]::text[]
|
851
884
|
END
|
852
885
|
) as permission
|
853
886
|
FROM role_hierarchy
|
887
|
+
WHERE tenant_id = $4
|
854
888
|
"""
|
855
889
|
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
890
|
+
try:
|
891
|
+
result = self._db_node.run(
|
892
|
+
query=query,
|
893
|
+
parameters=[role_id, tenant_id, tenant_id, tenant_id],
|
894
|
+
result_format="dict",
|
895
|
+
)
|
896
|
+
permission_rows = result.get("data", [])
|
860
897
|
|
861
|
-
|
898
|
+
permissions = {
|
899
|
+
row["permission"] for row in permission_rows if row["permission"]
|
900
|
+
}
|
901
|
+
self.logger.debug(
|
902
|
+
f"Role {role_id} in tenant {tenant_id} has permissions: {permissions}"
|
903
|
+
)
|
904
|
+
|
905
|
+
return permissions
|
906
|
+
except Exception as e:
|
907
|
+
self.logger.warning(
|
908
|
+
f"Failed to get permissions for role {role_id} in tenant {tenant_id}: {e}"
|
909
|
+
)
|
910
|
+
return set()
|
862
911
|
|
863
912
|
def _build_permission_explanation(
|
864
913
|
self,
|
@@ -1189,23 +1238,54 @@ class PermissionCheckNode(Node):
|
|
1189
1238
|
}
|
1190
1239
|
|
1191
1240
|
def _get_role_direct_permissions(self, role_id: str, tenant_id: str) -> Set[str]:
|
1192
|
-
"""Get direct permissions for a role (no inheritance)."""
|
1241
|
+
"""Get direct permissions for a role (no inheritance) with proper format handling."""
|
1193
1242
|
query = """
|
1194
1243
|
SELECT permissions
|
1195
1244
|
FROM roles
|
1196
1245
|
WHERE role_id = $1 AND tenant_id = $2 AND is_active = true
|
1197
1246
|
"""
|
1198
1247
|
|
1199
|
-
|
1200
|
-
|
1201
|
-
|
1202
|
-
|
1203
|
-
|
1248
|
+
try:
|
1249
|
+
result = self._db_node.run(
|
1250
|
+
query=query, parameters=[role_id, tenant_id], result_format="dict"
|
1251
|
+
)
|
1252
|
+
role_rows = result.get("data", [])
|
1253
|
+
role_data = role_rows[0] if role_rows else None
|
1204
1254
|
|
1205
|
-
|
1206
|
-
|
1255
|
+
if not role_data:
|
1256
|
+
self.logger.debug(f"Role {role_id} not found in tenant {tenant_id}")
|
1257
|
+
return set()
|
1207
1258
|
|
1208
|
-
|
1259
|
+
permissions_data = role_data.get("permissions", [])
|
1260
|
+
|
1261
|
+
# Handle different permission storage formats
|
1262
|
+
if isinstance(permissions_data, list):
|
1263
|
+
permissions = set(permissions_data)
|
1264
|
+
elif isinstance(permissions_data, str):
|
1265
|
+
try:
|
1266
|
+
# Try to parse as JSON array
|
1267
|
+
import json
|
1268
|
+
|
1269
|
+
parsed = json.loads(permissions_data)
|
1270
|
+
permissions = (
|
1271
|
+
set(parsed) if isinstance(parsed, list) else {permissions_data}
|
1272
|
+
)
|
1273
|
+
except (json.JSONDecodeError, TypeError):
|
1274
|
+
# Treat as single permission string
|
1275
|
+
permissions = {permissions_data} if permissions_data else set()
|
1276
|
+
else:
|
1277
|
+
permissions = set()
|
1278
|
+
|
1279
|
+
self.logger.debug(
|
1280
|
+
f"Role {role_id} direct permissions in tenant {tenant_id}: {permissions}"
|
1281
|
+
)
|
1282
|
+
return permissions
|
1283
|
+
|
1284
|
+
except Exception as e:
|
1285
|
+
self.logger.warning(
|
1286
|
+
f"Failed to get direct permissions for role {role_id} in tenant {tenant_id}: {e}"
|
1287
|
+
)
|
1288
|
+
return set()
|
1209
1289
|
|
1210
1290
|
def _explain_permission(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
|
1211
1291
|
"""Provide detailed explanation of permission logic."""
|
@@ -1625,7 +1705,7 @@ class PermissionCheckNode(Node):
|
|
1625
1705
|
audit_query = """
|
1626
1706
|
INSERT INTO admin_audit_log (
|
1627
1707
|
user_id, action, resource_type, resource_id,
|
1628
|
-
operation,
|
1708
|
+
operation, context, success, tenant_id, created_at
|
1629
1709
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
1630
1710
|
"""
|
1631
1711
|
|