kailash 0.3.2__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +33 -1
- kailash/access_control/__init__.py +129 -0
- kailash/access_control/managers.py +461 -0
- kailash/access_control/rule_evaluators.py +467 -0
- kailash/access_control_abac.py +825 -0
- kailash/config/__init__.py +27 -0
- kailash/config/database_config.py +359 -0
- kailash/database/__init__.py +28 -0
- kailash/database/execution_pipeline.py +499 -0
- kailash/middleware/__init__.py +306 -0
- kailash/middleware/auth/__init__.py +33 -0
- kailash/middleware/auth/access_control.py +436 -0
- kailash/middleware/auth/auth_manager.py +422 -0
- kailash/middleware/auth/jwt_auth.py +477 -0
- kailash/middleware/auth/kailash_jwt_auth.py +616 -0
- kailash/middleware/communication/__init__.py +37 -0
- kailash/middleware/communication/ai_chat.py +989 -0
- kailash/middleware/communication/api_gateway.py +802 -0
- kailash/middleware/communication/events.py +470 -0
- kailash/middleware/communication/realtime.py +710 -0
- kailash/middleware/core/__init__.py +21 -0
- kailash/middleware/core/agent_ui.py +890 -0
- kailash/middleware/core/schema.py +643 -0
- kailash/middleware/core/workflows.py +396 -0
- kailash/middleware/database/__init__.py +63 -0
- kailash/middleware/database/base.py +113 -0
- kailash/middleware/database/base_models.py +525 -0
- kailash/middleware/database/enums.py +106 -0
- kailash/middleware/database/migrations.py +12 -0
- kailash/{api/database.py → middleware/database/models.py} +183 -291
- kailash/middleware/database/repositories.py +685 -0
- kailash/middleware/database/session_manager.py +19 -0
- kailash/middleware/mcp/__init__.py +38 -0
- kailash/middleware/mcp/client_integration.py +585 -0
- kailash/middleware/mcp/enhanced_server.py +576 -0
- kailash/nodes/__init__.py +25 -3
- kailash/nodes/admin/__init__.py +35 -0
- kailash/nodes/admin/audit_log.py +794 -0
- kailash/nodes/admin/permission_check.py +864 -0
- kailash/nodes/admin/role_management.py +823 -0
- kailash/nodes/admin/security_event.py +1519 -0
- kailash/nodes/admin/user_management.py +944 -0
- kailash/nodes/ai/a2a.py +24 -7
- kailash/nodes/ai/ai_providers.py +1 -0
- kailash/nodes/ai/embedding_generator.py +11 -11
- kailash/nodes/ai/intelligent_agent_orchestrator.py +99 -11
- kailash/nodes/ai/llm_agent.py +407 -2
- kailash/nodes/ai/self_organizing.py +85 -10
- kailash/nodes/api/auth.py +287 -6
- kailash/nodes/api/rest.py +151 -0
- kailash/nodes/auth/__init__.py +17 -0
- kailash/nodes/auth/directory_integration.py +1228 -0
- kailash/nodes/auth/enterprise_auth_provider.py +1328 -0
- kailash/nodes/auth/mfa.py +2338 -0
- kailash/nodes/auth/risk_assessment.py +872 -0
- kailash/nodes/auth/session_management.py +1093 -0
- kailash/nodes/auth/sso.py +1040 -0
- kailash/nodes/base.py +344 -13
- kailash/nodes/base_cycle_aware.py +4 -2
- kailash/nodes/base_with_acl.py +1 -1
- kailash/nodes/code/python.py +283 -10
- kailash/nodes/compliance/__init__.py +9 -0
- kailash/nodes/compliance/data_retention.py +1888 -0
- kailash/nodes/compliance/gdpr.py +2004 -0
- kailash/nodes/data/__init__.py +22 -2
- kailash/nodes/data/async_connection.py +469 -0
- kailash/nodes/data/async_sql.py +757 -0
- kailash/nodes/data/async_vector.py +598 -0
- kailash/nodes/data/readers.py +767 -0
- kailash/nodes/data/retrieval.py +360 -1
- kailash/nodes/data/sharepoint_graph.py +397 -21
- kailash/nodes/data/sql.py +94 -5
- kailash/nodes/data/streaming.py +68 -8
- kailash/nodes/data/vector_db.py +54 -4
- kailash/nodes/enterprise/__init__.py +13 -0
- kailash/nodes/enterprise/batch_processor.py +741 -0
- kailash/nodes/enterprise/data_lineage.py +497 -0
- kailash/nodes/logic/convergence.py +31 -9
- kailash/nodes/logic/operations.py +14 -3
- kailash/nodes/mixins/__init__.py +8 -0
- kailash/nodes/mixins/event_emitter.py +201 -0
- kailash/nodes/mixins/mcp.py +9 -4
- kailash/nodes/mixins/security.py +165 -0
- kailash/nodes/monitoring/__init__.py +7 -0
- kailash/nodes/monitoring/performance_benchmark.py +2497 -0
- kailash/nodes/rag/__init__.py +284 -0
- kailash/nodes/rag/advanced.py +1615 -0
- kailash/nodes/rag/agentic.py +773 -0
- kailash/nodes/rag/conversational.py +999 -0
- kailash/nodes/rag/evaluation.py +875 -0
- kailash/nodes/rag/federated.py +1188 -0
- kailash/nodes/rag/graph.py +721 -0
- kailash/nodes/rag/multimodal.py +671 -0
- kailash/nodes/rag/optimized.py +933 -0
- kailash/nodes/rag/privacy.py +1059 -0
- kailash/nodes/rag/query_processing.py +1335 -0
- kailash/nodes/rag/realtime.py +764 -0
- kailash/nodes/rag/registry.py +547 -0
- kailash/nodes/rag/router.py +837 -0
- kailash/nodes/rag/similarity.py +1854 -0
- kailash/nodes/rag/strategies.py +566 -0
- kailash/nodes/rag/workflows.py +575 -0
- kailash/nodes/security/__init__.py +19 -0
- kailash/nodes/security/abac_evaluator.py +1411 -0
- kailash/nodes/security/audit_log.py +91 -0
- kailash/nodes/security/behavior_analysis.py +1893 -0
- kailash/nodes/security/credential_manager.py +401 -0
- kailash/nodes/security/rotating_credentials.py +760 -0
- kailash/nodes/security/security_event.py +132 -0
- kailash/nodes/security/threat_detection.py +1103 -0
- kailash/nodes/testing/__init__.py +9 -0
- kailash/nodes/testing/credential_testing.py +499 -0
- kailash/nodes/transform/__init__.py +10 -2
- kailash/nodes/transform/chunkers.py +592 -1
- kailash/nodes/transform/processors.py +484 -14
- kailash/nodes/validation.py +321 -0
- kailash/runtime/access_controlled.py +1 -1
- kailash/runtime/async_local.py +41 -7
- kailash/runtime/docker.py +1 -1
- kailash/runtime/local.py +474 -55
- kailash/runtime/parallel.py +1 -1
- kailash/runtime/parallel_cyclic.py +1 -1
- kailash/runtime/testing.py +210 -2
- kailash/utils/migrations/__init__.py +25 -0
- kailash/utils/migrations/generator.py +433 -0
- kailash/utils/migrations/models.py +231 -0
- kailash/utils/migrations/runner.py +489 -0
- kailash/utils/secure_logging.py +342 -0
- kailash/workflow/__init__.py +16 -0
- kailash/workflow/cyclic_runner.py +3 -4
- kailash/workflow/graph.py +70 -2
- kailash/workflow/resilience.py +249 -0
- kailash/workflow/templates.py +726 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/METADATA +253 -20
- kailash-0.4.0.dist-info/RECORD +223 -0
- kailash/api/__init__.py +0 -17
- kailash/api/__main__.py +0 -6
- kailash/api/studio_secure.py +0 -893
- kailash/mcp/__main__.py +0 -13
- kailash/mcp/server_new.py +0 -336
- kailash/mcp/servers/__init__.py +0 -12
- kailash-0.3.2.dist-info/RECORD +0 -136
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/WHEEL +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.2.dist-info → kailash-0.4.0.dist-info}/top_level.txt +0 -0
kailash/runtime/local.py
CHANGED
@@ -1,47 +1,43 @@
|
|
1
|
-
"""
|
1
|
+
"""Unified Runtime Engine with Enterprise Capabilities.
|
2
2
|
|
3
|
-
This module provides a
|
4
|
-
|
5
|
-
|
6
|
-
|
3
|
+
This module provides a unified, production-ready execution engine that seamlessly
|
4
|
+
integrates all enterprise features through the composable node architecture. It
|
5
|
+
combines sync/async execution, enterprise security, monitoring, and resource
|
6
|
+
management - all implemented through existing enterprise nodes and SDK patterns.
|
7
7
|
|
8
8
|
Examples:
|
9
|
-
Basic workflow execution:
|
9
|
+
Basic workflow execution (backward compatible):
|
10
10
|
|
11
11
|
>>> from kailash.runtime.local import LocalRuntime
|
12
12
|
>>> runtime = LocalRuntime(debug=True, enable_cycles=True)
|
13
|
-
>>> results = runtime.execute(workflow, parameters={"input": "data"})
|
13
|
+
>>> results, run_id = runtime.execute(workflow, parameters={"input": "data"})
|
14
14
|
|
15
|
-
|
15
|
+
Enterprise configuration with security:
|
16
16
|
|
17
|
-
>>> from kailash.
|
18
|
-
>>>
|
19
|
-
>>>
|
20
|
-
|
21
|
-
...
|
22
|
-
...
|
23
|
-
... parameters={"initial_value": 10}
|
17
|
+
>>> from kailash.access_control import UserContext
|
18
|
+
>>> user_context = UserContext(user_id="user123", roles=["analyst"])
|
19
|
+
>>> runtime = LocalRuntime(
|
20
|
+
... user_context=user_context,
|
21
|
+
... enable_monitoring=True,
|
22
|
+
... enable_security=True
|
24
23
|
... )
|
25
|
-
>>>
|
26
|
-
>>> tasks = task_manager.get_tasks_for_workflow(workflow.workflow_id)
|
27
|
-
>>> metrics = task_manager.get_performance_summary()
|
24
|
+
>>> results, run_id = runtime.execute(workflow, parameters={"data": input_data})
|
28
25
|
|
29
|
-
|
26
|
+
Full enterprise features:
|
30
27
|
|
31
28
|
>>> runtime = LocalRuntime(
|
32
|
-
...
|
33
|
-
...
|
34
|
-
...
|
35
|
-
|
36
|
-
...
|
37
|
-
... parameters=input_params,
|
38
|
-
... run_id="production_run_001"
|
29
|
+
... enable_async=True, # Async node execution
|
30
|
+
... enable_monitoring=True, # Performance tracking
|
31
|
+
... enable_security=True, # Access control
|
32
|
+
... enable_audit=True, # Compliance logging
|
33
|
+
... max_concurrency=10 # Parallel execution
|
39
34
|
... )
|
40
35
|
"""
|
41
36
|
|
37
|
+
import asyncio
|
42
38
|
import logging
|
43
39
|
from datetime import UTC, datetime
|
44
|
-
from typing import Any
|
40
|
+
from typing import Any, Optional
|
45
41
|
|
46
42
|
import networkx as nx
|
47
43
|
|
@@ -61,39 +57,217 @@ logger = logging.getLogger(__name__)
|
|
61
57
|
|
62
58
|
|
63
59
|
class LocalRuntime:
|
64
|
-
"""
|
65
|
-
|
66
|
-
This class provides a
|
67
|
-
handles both traditional workflows and advanced cyclic patterns
|
60
|
+
"""Unified runtime with enterprise capabilities.
|
61
|
+
|
62
|
+
This class provides a comprehensive, production-ready execution engine that
|
63
|
+
seamlessly handles both traditional workflows and advanced cyclic patterns,
|
64
|
+
with full enterprise feature integration through composable nodes.
|
65
|
+
|
66
|
+
Enterprise Features (Composably Integrated):
|
67
|
+
- Access control via existing AccessControlManager and security nodes
|
68
|
+
- Real-time monitoring via TaskManager and MetricsCollector
|
69
|
+
- Audit logging via AuditLogNode and SecurityEventNode
|
70
|
+
- Resource management via enterprise monitoring nodes
|
71
|
+
- Async execution support for AsyncNode instances
|
72
|
+
- Performance optimization via PerformanceBenchmarkNode
|
68
73
|
"""
|
69
74
|
|
70
|
-
def __init__(
|
71
|
-
|
75
|
+
def __init__(
|
76
|
+
self,
|
77
|
+
debug: bool = False,
|
78
|
+
enable_cycles: bool = True,
|
79
|
+
enable_async: bool = True,
|
80
|
+
max_concurrency: int = 10,
|
81
|
+
user_context: Optional[Any] = None,
|
82
|
+
enable_monitoring: bool = True,
|
83
|
+
enable_security: bool = False,
|
84
|
+
enable_audit: bool = False,
|
85
|
+
resource_limits: Optional[dict[str, Any]] = None,
|
86
|
+
):
|
87
|
+
"""Initialize the unified runtime.
|
72
88
|
|
73
89
|
Args:
|
74
90
|
debug: Whether to enable debug logging.
|
75
91
|
enable_cycles: Whether to enable cyclic workflow support.
|
92
|
+
enable_async: Whether to enable async execution for async nodes.
|
93
|
+
max_concurrency: Maximum concurrent async operations.
|
94
|
+
user_context: User context for access control (optional).
|
95
|
+
enable_monitoring: Whether to enable performance monitoring.
|
96
|
+
enable_security: Whether to enable security features.
|
97
|
+
enable_audit: Whether to enable audit logging.
|
98
|
+
resource_limits: Resource limits (memory_mb, cpu_cores, etc.).
|
76
99
|
"""
|
77
100
|
self.debug = debug
|
78
101
|
self.enable_cycles = enable_cycles
|
102
|
+
self.enable_async = enable_async
|
103
|
+
self.max_concurrency = max_concurrency
|
104
|
+
self.user_context = user_context
|
105
|
+
self.enable_monitoring = enable_monitoring
|
106
|
+
self.enable_security = enable_security
|
107
|
+
self.enable_audit = enable_audit
|
108
|
+
self.resource_limits = resource_limits or {}
|
79
109
|
self.logger = logger
|
80
110
|
|
111
|
+
# Enterprise feature managers (lazy initialization)
|
112
|
+
self._access_control_manager = None
|
113
|
+
|
81
114
|
# Initialize cyclic workflow executor if enabled
|
82
115
|
if enable_cycles:
|
83
116
|
self.cyclic_executor = CyclicWorkflowExecutor()
|
84
117
|
|
118
|
+
# Configure logging
|
85
119
|
if debug:
|
86
120
|
self.logger.setLevel(logging.DEBUG)
|
87
121
|
else:
|
88
122
|
self.logger.setLevel(logging.INFO)
|
89
123
|
|
124
|
+
# Enterprise execution context
|
125
|
+
self._execution_context = {
|
126
|
+
"security_enabled": enable_security,
|
127
|
+
"monitoring_enabled": enable_monitoring,
|
128
|
+
"audit_enabled": enable_audit,
|
129
|
+
"async_enabled": enable_async,
|
130
|
+
"resource_limits": self.resource_limits,
|
131
|
+
"user_context": user_context,
|
132
|
+
}
|
133
|
+
|
90
134
|
def execute(
|
91
135
|
self,
|
92
136
|
workflow: Workflow,
|
93
137
|
task_manager: TaskManager | None = None,
|
94
138
|
parameters: dict[str, dict[str, Any]] | None = None,
|
95
139
|
) -> tuple[dict[str, Any], str | None]:
|
96
|
-
"""Execute a workflow
|
140
|
+
"""Execute a workflow with unified enterprise capabilities.
|
141
|
+
|
142
|
+
Args:
|
143
|
+
workflow: Workflow to execute.
|
144
|
+
task_manager: Optional task manager for tracking.
|
145
|
+
parameters: Optional parameter overrides per node.
|
146
|
+
|
147
|
+
Returns:
|
148
|
+
Tuple of (results dict, run_id).
|
149
|
+
|
150
|
+
Raises:
|
151
|
+
RuntimeExecutionError: If execution fails.
|
152
|
+
WorkflowValidationError: If workflow is invalid.
|
153
|
+
PermissionError: If access control denies execution.
|
154
|
+
"""
|
155
|
+
# For backward compatibility, run the async version in a sync wrapper
|
156
|
+
try:
|
157
|
+
# Check if we're already in an event loop
|
158
|
+
loop = asyncio.get_running_loop()
|
159
|
+
# If we're in an event loop, run synchronously instead
|
160
|
+
return self._execute_sync(
|
161
|
+
workflow=workflow, task_manager=task_manager, parameters=parameters
|
162
|
+
)
|
163
|
+
except RuntimeError:
|
164
|
+
# No event loop running, safe to use asyncio.run
|
165
|
+
return asyncio.run(
|
166
|
+
self._execute_async(
|
167
|
+
workflow=workflow, task_manager=task_manager, parameters=parameters
|
168
|
+
)
|
169
|
+
)
|
170
|
+
|
171
|
+
async def execute_async(
|
172
|
+
self,
|
173
|
+
workflow: Workflow,
|
174
|
+
task_manager: TaskManager | None = None,
|
175
|
+
parameters: dict[str, dict[str, Any]] | None = None,
|
176
|
+
) -> tuple[dict[str, Any], str | None]:
|
177
|
+
"""Execute a workflow asynchronously (for AsyncLocalRuntime compatibility).
|
178
|
+
|
179
|
+
Args:
|
180
|
+
workflow: Workflow to execute.
|
181
|
+
task_manager: Optional task manager for tracking.
|
182
|
+
parameters: Optional parameter overrides per node.
|
183
|
+
|
184
|
+
Returns:
|
185
|
+
Tuple of (results dict, run_id).
|
186
|
+
|
187
|
+
Raises:
|
188
|
+
RuntimeExecutionError: If execution fails.
|
189
|
+
WorkflowValidationError: If workflow is invalid.
|
190
|
+
PermissionError: If access control denies execution.
|
191
|
+
"""
|
192
|
+
return await self._execute_async(
|
193
|
+
workflow=workflow, task_manager=task_manager, parameters=parameters
|
194
|
+
)
|
195
|
+
|
196
|
+
def _execute_sync(
|
197
|
+
self,
|
198
|
+
workflow: Workflow,
|
199
|
+
task_manager: TaskManager | None = None,
|
200
|
+
parameters: dict[str, dict[str, Any]] | None = None,
|
201
|
+
) -> tuple[dict[str, Any], str | None]:
|
202
|
+
"""Execute workflow synchronously when already in an event loop.
|
203
|
+
|
204
|
+
This method creates a new event loop in a separate thread to avoid
|
205
|
+
conflicts with existing event loops. This ensures backward compatibility
|
206
|
+
when LocalRuntime.execute() is called from within async contexts.
|
207
|
+
|
208
|
+
Args:
|
209
|
+
workflow: Workflow to execute.
|
210
|
+
task_manager: Optional task manager for tracking.
|
211
|
+
parameters: Optional parameter overrides per node.
|
212
|
+
|
213
|
+
Returns:
|
214
|
+
Tuple of (results dict, run_id).
|
215
|
+
|
216
|
+
Raises:
|
217
|
+
RuntimeExecutionError: If execution fails.
|
218
|
+
WorkflowValidationError: If workflow is invalid.
|
219
|
+
"""
|
220
|
+
# Create new event loop for sync execution
|
221
|
+
import threading
|
222
|
+
|
223
|
+
result_container = []
|
224
|
+
exception_container = []
|
225
|
+
|
226
|
+
def run_in_thread():
|
227
|
+
"""Run async execution in separate thread."""
|
228
|
+
loop = None
|
229
|
+
try:
|
230
|
+
# Create new event loop in thread
|
231
|
+
loop = asyncio.new_event_loop()
|
232
|
+
asyncio.set_event_loop(loop)
|
233
|
+
result = loop.run_until_complete(
|
234
|
+
self._execute_async(
|
235
|
+
workflow=workflow,
|
236
|
+
task_manager=task_manager,
|
237
|
+
parameters=parameters,
|
238
|
+
)
|
239
|
+
)
|
240
|
+
result_container.append(result)
|
241
|
+
except Exception as e:
|
242
|
+
exception_container.append(e)
|
243
|
+
finally:
|
244
|
+
if loop:
|
245
|
+
loop.close()
|
246
|
+
|
247
|
+
thread = threading.Thread(target=run_in_thread)
|
248
|
+
thread.start()
|
249
|
+
thread.join()
|
250
|
+
|
251
|
+
if exception_container:
|
252
|
+
raise exception_container[0]
|
253
|
+
|
254
|
+
return result_container[0]
|
255
|
+
|
256
|
+
async def _execute_async(
|
257
|
+
self,
|
258
|
+
workflow: Workflow,
|
259
|
+
task_manager: TaskManager | None = None,
|
260
|
+
parameters: dict[str, dict[str, Any]] | None = None,
|
261
|
+
) -> tuple[dict[str, Any], str | None]:
|
262
|
+
"""Core async execution implementation with enterprise features.
|
263
|
+
|
264
|
+
This method orchestrates the entire workflow execution including:
|
265
|
+
- Security checks via AccessControlManager (if enabled)
|
266
|
+
- Audit logging via AuditLogNode (if enabled)
|
267
|
+
- Performance monitoring via TaskManager/MetricsCollector
|
268
|
+
- Async node detection and execution
|
269
|
+
- Resource limit enforcement
|
270
|
+
- Error handling and recovery
|
97
271
|
|
98
272
|
Args:
|
99
273
|
workflow: Workflow to execute.
|
@@ -106,6 +280,7 @@ class LocalRuntime:
|
|
106
280
|
Raises:
|
107
281
|
RuntimeExecutionError: If execution fails.
|
108
282
|
WorkflowValidationError: If workflow is invalid.
|
283
|
+
PermissionError: If access control denies execution.
|
109
284
|
"""
|
110
285
|
if not workflow:
|
111
286
|
raise RuntimeExecutionError("No workflow provided")
|
@@ -113,10 +288,28 @@ class LocalRuntime:
|
|
113
288
|
run_id = None
|
114
289
|
|
115
290
|
try:
|
291
|
+
# Enterprise Security Check: Validate user access to workflow
|
292
|
+
if self.enable_security and self.user_context:
|
293
|
+
self._check_workflow_access(workflow)
|
294
|
+
|
116
295
|
# Validate workflow with runtime parameters (Session 061)
|
117
296
|
workflow.validate(runtime_parameters=parameters)
|
118
297
|
|
119
|
-
#
|
298
|
+
# Enterprise Audit: Log workflow execution start
|
299
|
+
if self.enable_audit:
|
300
|
+
await self._log_audit_event_async(
|
301
|
+
"workflow_execution_start",
|
302
|
+
{
|
303
|
+
"workflow_id": workflow.workflow_id,
|
304
|
+
"user_context": self._serialize_user_context(),
|
305
|
+
"parameters": parameters,
|
306
|
+
},
|
307
|
+
)
|
308
|
+
|
309
|
+
# Initialize enhanced tracking with enterprise context
|
310
|
+
if task_manager is None and self.enable_monitoring:
|
311
|
+
task_manager = TaskManager()
|
312
|
+
|
120
313
|
if task_manager:
|
121
314
|
try:
|
122
315
|
run_id = task_manager.create_run(
|
@@ -124,7 +317,9 @@ class LocalRuntime:
|
|
124
317
|
metadata={
|
125
318
|
"parameters": parameters,
|
126
319
|
"debug": self.debug,
|
127
|
-
"runtime": "
|
320
|
+
"runtime": "unified_enterprise",
|
321
|
+
"enterprise_features": self._execution_context,
|
322
|
+
"user_context": self._serialize_user_context(),
|
128
323
|
},
|
129
324
|
)
|
130
325
|
except Exception as e:
|
@@ -151,17 +346,30 @@ class LocalRuntime:
|
|
151
346
|
f"Cyclic workflow execution failed: {e}"
|
152
347
|
) from e
|
153
348
|
else:
|
154
|
-
# Execute standard DAG workflow
|
349
|
+
# Execute standard DAG workflow with enterprise features
|
155
350
|
self.logger.info(
|
156
|
-
"Standard DAG workflow detected, using
|
351
|
+
"Standard DAG workflow detected, using unified enterprise execution"
|
157
352
|
)
|
158
|
-
results = self.
|
353
|
+
results = await self._execute_workflow_async(
|
159
354
|
workflow=workflow,
|
160
355
|
task_manager=task_manager,
|
161
356
|
run_id=run_id,
|
162
357
|
parameters=parameters or {},
|
163
358
|
)
|
164
359
|
|
360
|
+
# Enterprise Audit: Log successful completion
|
361
|
+
if self.enable_audit:
|
362
|
+
await self._log_audit_event_async(
|
363
|
+
"workflow_execution_completed",
|
364
|
+
{
|
365
|
+
"workflow_id": workflow.workflow_id,
|
366
|
+
"run_id": run_id,
|
367
|
+
"result_summary": {
|
368
|
+
k: type(v).__name__ for k, v in results.items()
|
369
|
+
},
|
370
|
+
},
|
371
|
+
)
|
372
|
+
|
165
373
|
# Mark run as completed
|
166
374
|
if task_manager and run_id:
|
167
375
|
try:
|
@@ -172,6 +380,15 @@ class LocalRuntime:
|
|
172
380
|
return results, run_id
|
173
381
|
|
174
382
|
except WorkflowValidationError:
|
383
|
+
# Enterprise Audit: Log validation failure
|
384
|
+
if self.enable_audit:
|
385
|
+
await self._log_audit_event_async(
|
386
|
+
"workflow_validation_failed",
|
387
|
+
{
|
388
|
+
"workflow_id": workflow.workflow_id,
|
389
|
+
"error": "Validation failed",
|
390
|
+
},
|
391
|
+
)
|
175
392
|
# Re-raise validation errors as-is
|
176
393
|
if task_manager and run_id:
|
177
394
|
try:
|
@@ -181,7 +398,34 @@ class LocalRuntime:
|
|
181
398
|
except Exception:
|
182
399
|
pass
|
183
400
|
raise
|
401
|
+
except PermissionError as e:
|
402
|
+
# Enterprise Audit: Log access denial
|
403
|
+
if self.enable_audit:
|
404
|
+
await self._log_audit_event_async(
|
405
|
+
"workflow_access_denied",
|
406
|
+
{
|
407
|
+
"workflow_id": workflow.workflow_id,
|
408
|
+
"user_context": self._serialize_user_context(),
|
409
|
+
"error": str(e),
|
410
|
+
},
|
411
|
+
)
|
412
|
+
# Re-raise permission errors as-is
|
413
|
+
if task_manager and run_id:
|
414
|
+
try:
|
415
|
+
task_manager.update_run_status(run_id, "failed", error=str(e))
|
416
|
+
except Exception:
|
417
|
+
pass
|
418
|
+
raise
|
184
419
|
except Exception as e:
|
420
|
+
# Enterprise Audit: Log execution failure
|
421
|
+
if self.enable_audit:
|
422
|
+
await self._log_audit_event_async(
|
423
|
+
"workflow_execution_failed",
|
424
|
+
{
|
425
|
+
"workflow_id": workflow.workflow_id,
|
426
|
+
"error": str(e),
|
427
|
+
},
|
428
|
+
)
|
185
429
|
# Mark run as failed
|
186
430
|
if task_manager and run_id:
|
187
431
|
try:
|
@@ -191,10 +435,10 @@ class LocalRuntime:
|
|
191
435
|
|
192
436
|
# Wrap other errors in RuntimeExecutionError
|
193
437
|
raise RuntimeExecutionError(
|
194
|
-
f"
|
438
|
+
f"Unified enterprise workflow execution failed: {type(e).__name__}: {e}"
|
195
439
|
) from e
|
196
440
|
|
197
|
-
def
|
441
|
+
async def _execute_workflow_async(
|
198
442
|
self,
|
199
443
|
workflow: Workflow,
|
200
444
|
task_manager: TaskManager | None,
|
@@ -303,10 +547,16 @@ class LocalRuntime:
|
|
303
547
|
if self.debug:
|
304
548
|
self.logger.debug(f"Node {node_id} inputs: {inputs}")
|
305
549
|
|
306
|
-
# Execute node with metrics collection
|
550
|
+
# Execute node with unified async/sync support and metrics collection
|
307
551
|
collector = MetricsCollector()
|
308
552
|
with collector.collect(node_id=node_id) as metrics_context:
|
309
|
-
|
553
|
+
# Unified async/sync execution
|
554
|
+
if self.enable_async and hasattr(node_instance, "execute_async"):
|
555
|
+
# Use async execution method that includes validation
|
556
|
+
outputs = await node_instance.execute_async(**inputs)
|
557
|
+
else:
|
558
|
+
# Standard synchronous execution
|
559
|
+
outputs = node_instance.execute(**inputs)
|
310
560
|
|
311
561
|
# Get performance metrics
|
312
562
|
performance_metrics = metrics_context.result()
|
@@ -433,21 +683,75 @@ class LocalRuntime:
|
|
433
683
|
)
|
434
684
|
|
435
685
|
for source_key, target_key in mapping.items():
|
436
|
-
|
437
|
-
|
686
|
+
# Handle nested output access (e.g., "result.files")
|
687
|
+
if "." in source_key:
|
688
|
+
# Navigate nested structure
|
689
|
+
value = source_outputs
|
690
|
+
parts = source_key.split(".")
|
691
|
+
found = True
|
692
|
+
|
438
693
|
if self.debug:
|
439
|
-
self.logger.debug(
|
440
|
-
|
441
|
-
|
694
|
+
self.logger.debug(f" Navigating nested path: {source_key}")
|
695
|
+
self.logger.debug(f" Starting value: {value}")
|
696
|
+
|
697
|
+
for i, part in enumerate(parts):
|
698
|
+
if isinstance(value, dict) and part in value:
|
699
|
+
value = value[part]
|
700
|
+
if self.debug:
|
701
|
+
self.logger.debug(
|
702
|
+
f" Part '{part}' found, value type: {type(value)}"
|
703
|
+
)
|
704
|
+
else:
|
705
|
+
# Check if it's a direct key in source_outputs (for backwards compatibility)
|
706
|
+
if i == 0 and source_key in source_outputs:
|
707
|
+
value = source_outputs[source_key]
|
708
|
+
if self.debug:
|
709
|
+
self.logger.debug(
|
710
|
+
f" Found direct key '{source_key}' in source_outputs"
|
711
|
+
)
|
712
|
+
break
|
713
|
+
else:
|
714
|
+
found = False
|
715
|
+
if self.debug:
|
716
|
+
self.logger.debug(
|
717
|
+
f" MISSING: Nested path '{source_key}' - failed at part '{part}'"
|
718
|
+
)
|
719
|
+
self.logger.debug(
|
720
|
+
f" Current value type: {type(value)}"
|
721
|
+
)
|
722
|
+
if isinstance(value, dict):
|
723
|
+
self.logger.debug(
|
724
|
+
f" Available keys: {list(value.keys())}"
|
725
|
+
)
|
726
|
+
self.logger.warning(
|
727
|
+
f"Source output '{source_key}' not found in node '{source_node_id}'. "
|
728
|
+
f"Available outputs: {list(source_outputs.keys())}"
|
729
|
+
)
|
730
|
+
break
|
731
|
+
|
732
|
+
if found:
|
733
|
+
inputs[target_key] = value
|
734
|
+
if self.debug:
|
735
|
+
self.logger.debug(
|
736
|
+
f" MAPPED: {source_key} -> {target_key} (type: {type(value)})"
|
737
|
+
)
|
442
738
|
else:
|
443
|
-
|
444
|
-
|
445
|
-
|
739
|
+
# Simple key mapping
|
740
|
+
if source_key in source_outputs:
|
741
|
+
inputs[target_key] = source_outputs[source_key]
|
742
|
+
if self.debug:
|
743
|
+
self.logger.debug(
|
744
|
+
f" MAPPED: {source_key} -> {target_key} (type: {type(source_outputs[source_key])})"
|
745
|
+
)
|
746
|
+
else:
|
747
|
+
if self.debug:
|
748
|
+
self.logger.debug(
|
749
|
+
f" MISSING: {source_key} not in {list(source_outputs.keys())}"
|
750
|
+
)
|
751
|
+
self.logger.warning(
|
752
|
+
f"Source output '{source_key}' not found in node '{source_node_id}'. "
|
753
|
+
f"Available outputs: {list(source_outputs.keys())}"
|
446
754
|
)
|
447
|
-
self.logger.warning(
|
448
|
-
f"Source output '{source_key}' not found in node '{source_node_id}'. "
|
449
|
-
f"Available outputs: {list(source_outputs.keys())}"
|
450
|
-
)
|
451
755
|
else:
|
452
756
|
if self.debug:
|
453
757
|
self.logger.debug(
|
@@ -542,3 +846,118 @@ class LocalRuntime:
|
|
542
846
|
)
|
543
847
|
|
544
848
|
return warnings
|
849
|
+
|
850
|
+
# Enterprise Feature Helper Methods
|
851
|
+
|
852
|
+
def _check_workflow_access(self, workflow: Workflow) -> None:
|
853
|
+
"""Check if user has access to execute the workflow."""
|
854
|
+
if not self.enable_security or not self.user_context:
|
855
|
+
return
|
856
|
+
|
857
|
+
try:
|
858
|
+
# Use existing AccessControlManager pattern
|
859
|
+
from kailash.access_control import (
|
860
|
+
WorkflowPermission,
|
861
|
+
get_access_control_manager,
|
862
|
+
)
|
863
|
+
|
864
|
+
if self._access_control_manager is None:
|
865
|
+
self._access_control_manager = get_access_control_manager()
|
866
|
+
|
867
|
+
decision = self._access_control_manager.check_workflow_access(
|
868
|
+
self.user_context, workflow.workflow_id, WorkflowPermission.EXECUTE
|
869
|
+
)
|
870
|
+
if not decision.allowed:
|
871
|
+
raise PermissionError(
|
872
|
+
f"Access denied to workflow '{workflow.workflow_id}': {decision.reason}"
|
873
|
+
)
|
874
|
+
except ImportError:
|
875
|
+
# Access control not available, log and continue
|
876
|
+
self.logger.warning(
|
877
|
+
"Access control system not available, skipping security check"
|
878
|
+
)
|
879
|
+
except Exception as e:
|
880
|
+
if isinstance(e, PermissionError):
|
881
|
+
raise
|
882
|
+
# Log but don't fail on access control errors
|
883
|
+
self.logger.warning(f"Access control check failed: {e}")
|
884
|
+
|
885
|
+
def _log_audit_event(self, event_type: str, event_data: dict[str, Any]) -> None:
|
886
|
+
"""Log audit events using enterprise audit logging (synchronous)."""
|
887
|
+
if not self.enable_audit:
|
888
|
+
return
|
889
|
+
|
890
|
+
try:
|
891
|
+
# Use existing AuditLogNode pattern
|
892
|
+
from kailash.nodes.security.audit_log import AuditLogNode
|
893
|
+
|
894
|
+
audit_node = AuditLogNode()
|
895
|
+
# Use the SDK pattern - execute the node
|
896
|
+
audit_node.execute(
|
897
|
+
event_type=event_type,
|
898
|
+
event_data=event_data,
|
899
|
+
user_context=self.user_context,
|
900
|
+
timestamp=datetime.now(UTC),
|
901
|
+
)
|
902
|
+
except ImportError:
|
903
|
+
# Audit logging not available, fall back to standard logging
|
904
|
+
self.logger.info(f"AUDIT: {event_type} - {event_data}")
|
905
|
+
except Exception as e:
|
906
|
+
# Audit logging failures shouldn't stop execution
|
907
|
+
self.logger.warning(f"Audit logging failed: {e}")
|
908
|
+
|
909
|
+
async def _log_audit_event_async(
|
910
|
+
self, event_type: str, event_data: dict[str, Any]
|
911
|
+
) -> None:
|
912
|
+
"""Log audit events using enterprise audit logging (asynchronous)."""
|
913
|
+
if not self.enable_audit:
|
914
|
+
return
|
915
|
+
|
916
|
+
try:
|
917
|
+
# Use existing AuditLogNode pattern
|
918
|
+
from kailash.nodes.security.audit_log import AuditLogNode
|
919
|
+
|
920
|
+
audit_node = AuditLogNode()
|
921
|
+
# Use the SDK pattern - try async first, fallback to sync
|
922
|
+
if hasattr(audit_node, "async_run"):
|
923
|
+
await audit_node.async_run(
|
924
|
+
event_type=event_type,
|
925
|
+
event_data=event_data,
|
926
|
+
user_context=self.user_context,
|
927
|
+
timestamp=datetime.now(UTC),
|
928
|
+
)
|
929
|
+
else:
|
930
|
+
# Fallback to sync execution
|
931
|
+
audit_node.execute(
|
932
|
+
event_type=event_type,
|
933
|
+
event_data=event_data,
|
934
|
+
user_context=self.user_context,
|
935
|
+
timestamp=datetime.now(UTC),
|
936
|
+
)
|
937
|
+
except ImportError:
|
938
|
+
# Audit logging not available, fall back to standard logging
|
939
|
+
self.logger.info(f"AUDIT: {event_type} - {event_data}")
|
940
|
+
except Exception as e:
|
941
|
+
# Audit logging failures shouldn't stop execution
|
942
|
+
self.logger.warning(f"Audit logging failed: {e}")
|
943
|
+
|
944
|
+
def _serialize_user_context(self) -> dict[str, Any] | None:
|
945
|
+
"""Serialize user context for logging/tracking."""
|
946
|
+
if not self.user_context:
|
947
|
+
return None
|
948
|
+
|
949
|
+
try:
|
950
|
+
# Try to use model_dump if it's a Pydantic model
|
951
|
+
if hasattr(self.user_context, "model_dump"):
|
952
|
+
return self.user_context.model_dump()
|
953
|
+
# Try to use dict() if it's a Pydantic model
|
954
|
+
elif hasattr(self.user_context, "dict"):
|
955
|
+
return self.user_context.dict()
|
956
|
+
# Convert to dict if possible
|
957
|
+
elif hasattr(self.user_context, "__dict__"):
|
958
|
+
return self.user_context.__dict__
|
959
|
+
else:
|
960
|
+
return {"user_context": str(self.user_context)}
|
961
|
+
except Exception as e:
|
962
|
+
self.logger.warning(f"Failed to serialize user context: {e}")
|
963
|
+
return {"user_context": str(self.user_context)}
|
kailash/runtime/parallel.py
CHANGED
@@ -398,7 +398,7 @@ class ParallelRuntime:
|
|
398
398
|
async def execute_with_metrics():
|
399
399
|
with collector.collect(node_id=node_id) as context:
|
400
400
|
result = await loop.run_in_executor(
|
401
|
-
None, lambda: node_instance.
|
401
|
+
None, lambda: node_instance.execute(**inputs)
|
402
402
|
)
|
403
403
|
return result, context.result()
|
404
404
|
|
@@ -380,7 +380,7 @@ class ParallelCyclicRuntime:
|
|
380
380
|
# Execute node with metrics collection
|
381
381
|
collector = MetricsCollector()
|
382
382
|
with collector.collect(node_id=node_id) as metrics_context:
|
383
|
-
outputs = node_instance.
|
383
|
+
outputs = node_instance.execute(**inputs)
|
384
384
|
|
385
385
|
# Get performance metrics
|
386
386
|
performance_metrics = metrics_context.result()
|