kailash 0.6.1__py3-none-any.whl → 0.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/core/actors/connection_actor.py +3 -3
- kailash/gateway/api.py +7 -5
- kailash/gateway/enhanced_gateway.py +1 -1
- kailash/middleware/auth/access_control.py +5 -5
- kailash/middleware/gateway/checkpoint_manager.py +45 -8
- kailash/nodes/admin/permission_check.py +110 -30
- kailash/nodes/admin/schema.sql +387 -0
- kailash/nodes/admin/tenant_isolation.py +249 -0
- kailash/nodes/admin/transaction_utils.py +244 -0
- kailash/nodes/admin/user_management.py +37 -9
- kailash/nodes/ai/ai_providers.py +55 -3
- kailash/nodes/ai/llm_agent.py +115 -13
- kailash/nodes/data/sql.py +24 -0
- kailash/resources/registry.py +6 -0
- kailash/runtime/async_local.py +7 -0
- kailash/utils/export.py +152 -0
- kailash/workflow/builder.py +42 -0
- kailash/workflow/graph.py +86 -17
- kailash/workflow/templates.py +4 -9
- {kailash-0.6.1.dist-info → kailash-0.6.2.dist-info}/METADATA +2 -1
- {kailash-0.6.1.dist-info → kailash-0.6.2.dist-info}/RECORD +26 -23
- {kailash-0.6.1.dist-info → kailash-0.6.2.dist-info}/WHEEL +0 -0
- {kailash-0.6.1.dist-info → kailash-0.6.2.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.1.dist-info → kailash-0.6.2.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.1.dist-info → kailash-0.6.2.dist-info}/top_level.txt +0 -0
kailash/__init__.py
CHANGED
@@ -47,7 +47,7 @@ class Message:
|
|
47
47
|
type: MessageType = MessageType.QUERY
|
48
48
|
payload: Any = None
|
49
49
|
reply_to: Optional[asyncio.Queue] = None
|
50
|
-
timestamp: datetime = field(default_factory=datetime.
|
50
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(UTC))
|
51
51
|
|
52
52
|
|
53
53
|
@dataclass
|
@@ -70,8 +70,8 @@ class ConnectionStats:
|
|
70
70
|
total_execution_time: float = 0.0
|
71
71
|
health_checks_passed: int = 0
|
72
72
|
health_checks_failed: int = 0
|
73
|
-
created_at: datetime = field(default_factory=datetime.
|
74
|
-
last_used_at: datetime = field(default_factory=datetime.
|
73
|
+
created_at: datetime = field(default_factory=lambda: datetime.now(UTC))
|
74
|
+
last_used_at: datetime = field(default_factory=lambda: datetime.now(UTC))
|
75
75
|
health_score: float = 100.0
|
76
76
|
|
77
77
|
|
kailash/gateway/api.py
CHANGED
@@ -11,7 +11,7 @@ from typing import Any, Dict, List, Optional, Union
|
|
11
11
|
|
12
12
|
from fastapi import APIRouter, BackgroundTasks, Depends, FastAPI, HTTPException
|
13
13
|
from fastapi.responses import JSONResponse
|
14
|
-
from pydantic import BaseModel, Field
|
14
|
+
from pydantic import BaseModel, ConfigDict, Field
|
15
15
|
|
16
16
|
from ..resources.registry import ResourceRegistry
|
17
17
|
from .enhanced_gateway import (
|
@@ -37,14 +37,15 @@ class ResourceReferenceModel(BaseModel):
|
|
37
37
|
None, description="Reference to credentials secret"
|
38
38
|
)
|
39
39
|
|
40
|
-
|
41
|
-
|
40
|
+
model_config = ConfigDict(
|
41
|
+
json_schema_extra={
|
42
42
|
"example": {
|
43
43
|
"type": "database",
|
44
44
|
"config": {"host": "localhost", "port": 5432, "database": "myapp"},
|
45
45
|
"credentials_ref": "db_credentials",
|
46
46
|
}
|
47
47
|
}
|
48
|
+
)
|
48
49
|
|
49
50
|
|
50
51
|
class WorkflowRequestModel(BaseModel):
|
@@ -59,8 +60,8 @@ class WorkflowRequestModel(BaseModel):
|
|
59
60
|
None, description="Additional context variables"
|
60
61
|
)
|
61
62
|
|
62
|
-
|
63
|
-
|
63
|
+
model_config = ConfigDict(
|
64
|
+
json_schema_extra={
|
64
65
|
"example": {
|
65
66
|
"inputs": {"user_id": 123, "action": "process"},
|
66
67
|
"resources": {
|
@@ -74,6 +75,7 @@ class WorkflowRequestModel(BaseModel):
|
|
74
75
|
"context": {"environment": "production", "trace_id": "abc123"},
|
75
76
|
}
|
76
77
|
}
|
78
|
+
)
|
77
79
|
|
78
80
|
|
79
81
|
class WorkflowResponseModel(BaseModel):
|
@@ -40,7 +40,7 @@ class WorkflowRequest:
|
|
40
40
|
inputs: Dict[str, Any] = field(default_factory=dict)
|
41
41
|
resources: Dict[str, Union[str, ResourceReference]] = field(default_factory=dict)
|
42
42
|
context: Dict[str, Any] = field(default_factory=dict)
|
43
|
-
timestamp: datetime = field(default_factory=datetime.
|
43
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(UTC))
|
44
44
|
|
45
45
|
def to_dict(self) -> Dict[str, Any]:
|
46
46
|
"""Convert to JSON-serializable dict."""
|
@@ -60,11 +60,11 @@ class MiddlewareAccessControlManager:
|
|
60
60
|
self.enable_audit = enable_audit
|
61
61
|
|
62
62
|
# Kailash nodes for operations
|
63
|
-
self.user_mgmt_node = UserManagementNode(
|
64
|
-
self.role_mgmt_node = RoleManagementNode(
|
65
|
-
self.permission_check_node = PermissionCheckNode(
|
66
|
-
self.audit_node = AuditLogNode(
|
67
|
-
self.security_event_node = SecurityEventNode(
|
63
|
+
self.user_mgmt_node = UserManagementNode()
|
64
|
+
self.role_mgmt_node = RoleManagementNode()
|
65
|
+
self.permission_check_node = PermissionCheckNode()
|
66
|
+
self.audit_node = AuditLogNode() if enable_audit else None
|
67
|
+
self.security_event_node = SecurityEventNode()
|
68
68
|
|
69
69
|
async def check_session_access(
|
70
70
|
self, user_context: UserContext, session_id: str, action: str = "access"
|
@@ -175,8 +175,32 @@ class CheckpointManager:
|
|
175
175
|
compression_enabled: bool = True,
|
176
176
|
compression_threshold_bytes: int = 1024, # 1KB
|
177
177
|
retention_hours: int = 24,
|
178
|
+
# Backward compatibility parameter
|
179
|
+
storage: Optional[DiskStorage] = None,
|
178
180
|
):
|
179
|
-
"""Initialize checkpoint manager.
|
181
|
+
"""Initialize checkpoint manager.
|
182
|
+
|
183
|
+
Args:
|
184
|
+
memory_storage: Memory storage backend (optional)
|
185
|
+
disk_storage: Disk storage backend (optional)
|
186
|
+
cloud_storage: Cloud storage backend (optional)
|
187
|
+
compression_enabled: Enable compression for large checkpoints
|
188
|
+
compression_threshold_bytes: Minimum size for compression
|
189
|
+
retention_hours: Hours to retain checkpoints
|
190
|
+
storage: DEPRECATED - Use disk_storage instead
|
191
|
+
"""
|
192
|
+
# Handle backward compatibility
|
193
|
+
if storage is not None:
|
194
|
+
import warnings
|
195
|
+
|
196
|
+
warnings.warn(
|
197
|
+
"The 'storage' parameter is deprecated. Use 'disk_storage' instead.",
|
198
|
+
DeprecationWarning,
|
199
|
+
stacklevel=2,
|
200
|
+
)
|
201
|
+
if disk_storage is None:
|
202
|
+
disk_storage = storage
|
203
|
+
|
180
204
|
self.memory_storage = memory_storage or MemoryStorage()
|
181
205
|
self.disk_storage = disk_storage or DiskStorage()
|
182
206
|
self.cloud_storage = cloud_storage # Optional cloud backend
|
@@ -189,11 +213,23 @@ class CheckpointManager:
|
|
189
213
|
self.load_count = 0
|
190
214
|
self.compression_ratio_sum = 0.0
|
191
215
|
|
192
|
-
#
|
193
|
-
self._gc_task =
|
216
|
+
# Initialize garbage collection task (will be started when first used)
|
217
|
+
self._gc_task = None
|
218
|
+
self._gc_started = False
|
219
|
+
|
220
|
+
def _ensure_gc_started(self):
|
221
|
+
"""Ensure garbage collection task is started (lazy initialization)."""
|
222
|
+
if not self._gc_started:
|
223
|
+
try:
|
224
|
+
self._gc_task = asyncio.create_task(self._garbage_collection_loop())
|
225
|
+
self._gc_started = True
|
226
|
+
except RuntimeError:
|
227
|
+
# No event loop running, GC will be started later
|
228
|
+
pass
|
194
229
|
|
195
230
|
async def save_checkpoint(self, checkpoint: Checkpoint) -> None:
|
196
231
|
"""Save checkpoint to storage."""
|
232
|
+
self._ensure_gc_started()
|
197
233
|
start_time = time.time()
|
198
234
|
|
199
235
|
# Serialize checkpoint
|
@@ -391,8 +427,9 @@ class CheckpointManager:
|
|
391
427
|
|
392
428
|
async def close(self) -> None:
|
393
429
|
"""Close checkpoint manager and cleanup."""
|
394
|
-
self._gc_task
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
430
|
+
if self._gc_task is not None:
|
431
|
+
self._gc_task.cancel()
|
432
|
+
try:
|
433
|
+
await self._gc_task
|
434
|
+
except asyncio.CancelledError:
|
435
|
+
pass
|
@@ -721,36 +721,67 @@ class PermissionCheckNode(Node):
|
|
721
721
|
}
|
722
722
|
|
723
723
|
def _get_user_context(self, user_id: str, tenant_id: str) -> Optional[UserContext]:
|
724
|
-
"""Get user context for permission evaluation."""
|
725
|
-
# Query user data from unified admin schema
|
726
|
-
|
727
|
-
SELECT user_id, email,
|
724
|
+
"""Get user context for permission evaluation with strict tenant isolation."""
|
725
|
+
# Query user data and assigned roles from unified admin schema
|
726
|
+
user_query = """
|
727
|
+
SELECT user_id, email, attributes, status, tenant_id
|
728
728
|
FROM users
|
729
729
|
WHERE user_id = $1 AND tenant_id = $2 AND status = 'active'
|
730
730
|
"""
|
731
731
|
|
732
|
+
# Get assigned roles from user_role_assignments table with strict tenant isolation
|
733
|
+
roles_query = """
|
734
|
+
SELECT role_id
|
735
|
+
FROM user_role_assignments
|
736
|
+
WHERE user_id = $1 AND tenant_id = $2 AND is_active = true
|
737
|
+
"""
|
738
|
+
|
732
739
|
try:
|
733
|
-
|
734
|
-
|
740
|
+
# Get user data - strict tenant check
|
741
|
+
user_result = self._db_node.run(
|
742
|
+
query=user_query, parameters=[user_id, tenant_id], result_format="dict"
|
735
743
|
)
|
736
744
|
|
737
|
-
|
738
|
-
user_rows = result.get("data", [])
|
745
|
+
user_rows = user_result.get("data", [])
|
739
746
|
if not user_rows:
|
747
|
+
# User not found in this tenant - strict tenant isolation
|
748
|
+
self.logger.debug(f"User {user_id} not found in tenant {tenant_id}")
|
740
749
|
return None
|
741
750
|
|
742
751
|
user_data = user_rows[0]
|
743
752
|
|
753
|
+
# Verify tenant isolation - ensure user belongs to the requested tenant
|
754
|
+
if user_data.get("tenant_id") != tenant_id:
|
755
|
+
self.logger.warning(
|
756
|
+
f"Tenant isolation violation: User {user_id} belongs to {user_data.get('tenant_id')} but permission check requested for {tenant_id}"
|
757
|
+
)
|
758
|
+
return None
|
759
|
+
|
760
|
+
# Get assigned roles - also with strict tenant isolation
|
761
|
+
roles_result = self._db_node.run(
|
762
|
+
query=roles_query, parameters=[user_id, tenant_id], result_format="dict"
|
763
|
+
)
|
764
|
+
|
765
|
+
role_rows = roles_result.get("data", [])
|
766
|
+
assigned_roles = [row["role_id"] for row in role_rows]
|
767
|
+
|
768
|
+
# Log for debugging tenant isolation
|
769
|
+
self.logger.debug(
|
770
|
+
f"User {user_id} in tenant {tenant_id} has roles: {assigned_roles}"
|
771
|
+
)
|
772
|
+
|
744
773
|
return UserContext(
|
745
774
|
user_id=user_data["user_id"],
|
746
775
|
tenant_id=user_data["tenant_id"],
|
747
776
|
email=user_data["email"],
|
748
|
-
roles=
|
777
|
+
roles=assigned_roles,
|
749
778
|
attributes=user_data.get("attributes", {}),
|
750
779
|
)
|
751
780
|
except Exception as e:
|
752
781
|
# Log the error and return None to indicate user not found
|
753
|
-
self.logger.warning(
|
782
|
+
self.logger.warning(
|
783
|
+
f"Failed to get user context for {user_id} in tenant {tenant_id}: {e}"
|
784
|
+
)
|
754
785
|
return None
|
755
786
|
|
756
787
|
def _check_rbac_permission(
|
@@ -826,39 +857,57 @@ class PermissionCheckNode(Node):
|
|
826
857
|
return permissions
|
827
858
|
|
828
859
|
def _get_role_permissions(self, role_id: str, tenant_id: str) -> Set[str]:
|
829
|
-
"""Get permissions for a specific role including inherited permissions."""
|
830
|
-
# Query role and its hierarchy
|
860
|
+
"""Get permissions for a specific role including inherited permissions with strict tenant isolation."""
|
861
|
+
# Query role and its hierarchy with strict tenant boundaries
|
831
862
|
query = """
|
832
863
|
WITH RECURSIVE role_hierarchy AS (
|
833
|
-
SELECT role_id, permissions, parent_roles
|
864
|
+
SELECT role_id, permissions, parent_roles, tenant_id
|
834
865
|
FROM roles
|
835
866
|
WHERE role_id = $1 AND tenant_id = $2 AND is_active = true
|
836
867
|
|
837
868
|
UNION ALL
|
838
869
|
|
839
|
-
SELECT r.role_id, r.permissions, r.parent_roles
|
870
|
+
SELECT r.role_id, r.permissions, r.parent_roles, r.tenant_id
|
840
871
|
FROM roles r
|
841
872
|
JOIN role_hierarchy rh ON r.role_id = ANY(
|
842
873
|
SELECT jsonb_array_elements_text(rh.parent_roles)
|
843
874
|
)
|
844
|
-
WHERE r.tenant_id = $
|
875
|
+
WHERE r.tenant_id = $3 AND r.is_active = true
|
845
876
|
)
|
846
877
|
SELECT DISTINCT unnest(
|
847
878
|
CASE
|
848
879
|
WHEN jsonb_typeof(permissions) = 'array'
|
849
880
|
THEN ARRAY(SELECT jsonb_array_elements_text(permissions))
|
881
|
+
WHEN permissions IS NOT NULL AND permissions::text != 'null'
|
882
|
+
THEN ARRAY[permissions::text]
|
850
883
|
ELSE ARRAY[]::text[]
|
851
884
|
END
|
852
885
|
) as permission
|
853
886
|
FROM role_hierarchy
|
887
|
+
WHERE tenant_id = $4
|
854
888
|
"""
|
855
889
|
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
890
|
+
try:
|
891
|
+
result = self._db_node.run(
|
892
|
+
query=query,
|
893
|
+
parameters=[role_id, tenant_id, tenant_id, tenant_id],
|
894
|
+
result_format="dict",
|
895
|
+
)
|
896
|
+
permission_rows = result.get("data", [])
|
860
897
|
|
861
|
-
|
898
|
+
permissions = {
|
899
|
+
row["permission"] for row in permission_rows if row["permission"]
|
900
|
+
}
|
901
|
+
self.logger.debug(
|
902
|
+
f"Role {role_id} in tenant {tenant_id} has permissions: {permissions}"
|
903
|
+
)
|
904
|
+
|
905
|
+
return permissions
|
906
|
+
except Exception as e:
|
907
|
+
self.logger.warning(
|
908
|
+
f"Failed to get permissions for role {role_id} in tenant {tenant_id}: {e}"
|
909
|
+
)
|
910
|
+
return set()
|
862
911
|
|
863
912
|
def _build_permission_explanation(
|
864
913
|
self,
|
@@ -1189,23 +1238,54 @@ class PermissionCheckNode(Node):
|
|
1189
1238
|
}
|
1190
1239
|
|
1191
1240
|
def _get_role_direct_permissions(self, role_id: str, tenant_id: str) -> Set[str]:
|
1192
|
-
"""Get direct permissions for a role (no inheritance)."""
|
1241
|
+
"""Get direct permissions for a role (no inheritance) with proper format handling."""
|
1193
1242
|
query = """
|
1194
1243
|
SELECT permissions
|
1195
1244
|
FROM roles
|
1196
1245
|
WHERE role_id = $1 AND tenant_id = $2 AND is_active = true
|
1197
1246
|
"""
|
1198
1247
|
|
1199
|
-
|
1200
|
-
|
1201
|
-
|
1202
|
-
|
1203
|
-
|
1248
|
+
try:
|
1249
|
+
result = self._db_node.run(
|
1250
|
+
query=query, parameters=[role_id, tenant_id], result_format="dict"
|
1251
|
+
)
|
1252
|
+
role_rows = result.get("data", [])
|
1253
|
+
role_data = role_rows[0] if role_rows else None
|
1204
1254
|
|
1205
|
-
|
1206
|
-
|
1255
|
+
if not role_data:
|
1256
|
+
self.logger.debug(f"Role {role_id} not found in tenant {tenant_id}")
|
1257
|
+
return set()
|
1207
1258
|
|
1208
|
-
|
1259
|
+
permissions_data = role_data.get("permissions", [])
|
1260
|
+
|
1261
|
+
# Handle different permission storage formats
|
1262
|
+
if isinstance(permissions_data, list):
|
1263
|
+
permissions = set(permissions_data)
|
1264
|
+
elif isinstance(permissions_data, str):
|
1265
|
+
try:
|
1266
|
+
# Try to parse as JSON array
|
1267
|
+
import json
|
1268
|
+
|
1269
|
+
parsed = json.loads(permissions_data)
|
1270
|
+
permissions = (
|
1271
|
+
set(parsed) if isinstance(parsed, list) else {permissions_data}
|
1272
|
+
)
|
1273
|
+
except (json.JSONDecodeError, TypeError):
|
1274
|
+
# Treat as single permission string
|
1275
|
+
permissions = {permissions_data} if permissions_data else set()
|
1276
|
+
else:
|
1277
|
+
permissions = set()
|
1278
|
+
|
1279
|
+
self.logger.debug(
|
1280
|
+
f"Role {role_id} direct permissions in tenant {tenant_id}: {permissions}"
|
1281
|
+
)
|
1282
|
+
return permissions
|
1283
|
+
|
1284
|
+
except Exception as e:
|
1285
|
+
self.logger.warning(
|
1286
|
+
f"Failed to get direct permissions for role {role_id} in tenant {tenant_id}: {e}"
|
1287
|
+
)
|
1288
|
+
return set()
|
1209
1289
|
|
1210
1290
|
def _explain_permission(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
|
1211
1291
|
"""Provide detailed explanation of permission logic."""
|
@@ -1625,7 +1705,7 @@ class PermissionCheckNode(Node):
|
|
1625
1705
|
audit_query = """
|
1626
1706
|
INSERT INTO admin_audit_log (
|
1627
1707
|
user_id, action, resource_type, resource_id,
|
1628
|
-
operation,
|
1708
|
+
operation, context, success, tenant_id, created_at
|
1629
1709
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
1630
1710
|
"""
|
1631
1711
|
|