kailash 0.1.5__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +740 -0
  3. kailash/api/__main__.py +6 -0
  4. kailash/api/auth.py +668 -0
  5. kailash/api/custom_nodes.py +285 -0
  6. kailash/api/custom_nodes_secure.py +377 -0
  7. kailash/api/database.py +620 -0
  8. kailash/api/studio.py +915 -0
  9. kailash/api/studio_secure.py +893 -0
  10. kailash/mcp/__init__.py +53 -0
  11. kailash/mcp/__main__.py +13 -0
  12. kailash/mcp/ai_registry_server.py +712 -0
  13. kailash/mcp/client.py +447 -0
  14. kailash/mcp/client_new.py +334 -0
  15. kailash/mcp/server.py +293 -0
  16. kailash/mcp/server_new.py +336 -0
  17. kailash/mcp/servers/__init__.py +12 -0
  18. kailash/mcp/servers/ai_registry.py +289 -0
  19. kailash/nodes/__init__.py +4 -2
  20. kailash/nodes/ai/__init__.py +2 -0
  21. kailash/nodes/ai/a2a.py +714 -67
  22. kailash/nodes/ai/intelligent_agent_orchestrator.py +31 -37
  23. kailash/nodes/ai/iterative_llm_agent.py +1280 -0
  24. kailash/nodes/ai/llm_agent.py +324 -1
  25. kailash/nodes/ai/self_organizing.py +5 -6
  26. kailash/nodes/base.py +15 -2
  27. kailash/nodes/base_async.py +45 -0
  28. kailash/nodes/base_cycle_aware.py +374 -0
  29. kailash/nodes/base_with_acl.py +338 -0
  30. kailash/nodes/code/python.py +135 -27
  31. kailash/nodes/data/__init__.py +1 -2
  32. kailash/nodes/data/readers.py +16 -6
  33. kailash/nodes/data/sql.py +699 -256
  34. kailash/nodes/data/writers.py +16 -6
  35. kailash/nodes/logic/__init__.py +8 -0
  36. kailash/nodes/logic/convergence.py +642 -0
  37. kailash/nodes/logic/loop.py +153 -0
  38. kailash/nodes/logic/operations.py +187 -27
  39. kailash/nodes/mixins/__init__.py +11 -0
  40. kailash/nodes/mixins/mcp.py +228 -0
  41. kailash/nodes/mixins.py +387 -0
  42. kailash/runtime/__init__.py +2 -1
  43. kailash/runtime/access_controlled.py +458 -0
  44. kailash/runtime/local.py +106 -33
  45. kailash/runtime/parallel_cyclic.py +529 -0
  46. kailash/sdk_exceptions.py +90 -5
  47. kailash/security.py +845 -0
  48. kailash/tracking/manager.py +38 -15
  49. kailash/tracking/models.py +1 -1
  50. kailash/tracking/storage/filesystem.py +30 -2
  51. kailash/utils/__init__.py +8 -0
  52. kailash/workflow/__init__.py +18 -0
  53. kailash/workflow/convergence.py +270 -0
  54. kailash/workflow/cycle_analyzer.py +889 -0
  55. kailash/workflow/cycle_builder.py +579 -0
  56. kailash/workflow/cycle_config.py +725 -0
  57. kailash/workflow/cycle_debugger.py +860 -0
  58. kailash/workflow/cycle_exceptions.py +615 -0
  59. kailash/workflow/cycle_profiler.py +741 -0
  60. kailash/workflow/cycle_state.py +338 -0
  61. kailash/workflow/cyclic_runner.py +985 -0
  62. kailash/workflow/graph.py +500 -39
  63. kailash/workflow/migration.py +809 -0
  64. kailash/workflow/safety.py +365 -0
  65. kailash/workflow/templates.py +763 -0
  66. kailash/workflow/validation.py +751 -0
  67. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/METADATA +259 -12
  68. kailash-0.2.1.dist-info/RECORD +125 -0
  69. kailash/nodes/mcp/__init__.py +0 -11
  70. kailash/nodes/mcp/client.py +0 -554
  71. kailash/nodes/mcp/resource.py +0 -682
  72. kailash/nodes/mcp/server.py +0 -577
  73. kailash-0.1.5.dist-info/RECORD +0 -88
  74. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/WHEEL +0 -0
  75. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/entry_points.txt +0 -0
  76. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/licenses/LICENSE +0 -0
  77. {kailash-0.1.5.dist-info → kailash-0.2.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,458 @@
1
+ """
2
+ Access-Controlled Runtime for Kailash SDK
3
+
4
+ This module provides an access-controlled runtime that wraps the standard runtime
5
+ to add permission checks. The standard runtime remains unchanged, ensuring complete
6
+ backward compatibility.
7
+
8
+ Users who don't need access control continue using LocalRuntime as normal.
9
+ Users who need access control use AccessControlledRuntime instead.
10
+
11
+ Example without access control (existing code):
12
+ >>> from kailash.runtime.local import LocalRuntime
13
+ >>> from kailash.workflow import Workflow
14
+ >>> runtime = LocalRuntime()
15
+ >>> workflow = Workflow(workflow_id="test", name="Test")
16
+ >>> result, run_id = runtime.execute(workflow) # Works exactly as before
17
+
18
+ Example with access control (opt-in):
19
+ >>> from kailash.runtime.access_controlled import AccessControlledRuntime
20
+ >>> from kailash.access_control import UserContext, get_access_control_manager
21
+ >>> user = UserContext(user_id="123", tenant_id="abc", email="user@test.com", roles=["analyst"])
22
+ >>> runtime = AccessControlledRuntime(user_context=user)
23
+ >>> # Access control manager is disabled by default for compatibility
24
+ >>> acm = get_access_control_manager()
25
+ >>> acm.enabled # Should be False by default
26
+ False
27
+ """
28
+
29
+ import logging
30
+ from typing import Any, Dict, List, Optional, Tuple
31
+
32
+ from kailash.access_control import (
33
+ AccessControlManager,
34
+ NodePermission,
35
+ PermissionEffect,
36
+ PermissionRule,
37
+ UserContext,
38
+ WorkflowPermission,
39
+ get_access_control_manager,
40
+ )
41
+ from kailash.nodes.base import Node
42
+ from kailash.runtime.local import LocalRuntime
43
+ from kailash.workflow import Workflow
44
+
45
+ logger = logging.getLogger(__name__)
46
+
47
+
48
+ class AccessControlledRuntime:
49
+ """
50
+ Runtime with transparent access control layer.
51
+
52
+ This runtime wraps the standard LocalRuntime and adds access control
53
+ checks without modifying the original runtime or requiring any changes
54
+ to existing nodes or workflows.
55
+
56
+ Design Purpose:
57
+ Provides a drop-in replacement for LocalRuntime that adds security
58
+ without breaking existing workflows. Enables role-based access control,
59
+ data masking, and conditional execution based on user permissions.
60
+
61
+ Upstream Dependencies:
62
+ - AccessControlManager for permission evaluation
63
+ - UserContext from authentication systems
64
+ - LocalRuntime for actual workflow execution
65
+ - PermissionRule definitions from configuration
66
+
67
+ Downstream Consumers:
68
+ - Applications requiring secure workflow execution
69
+ - Multi-tenant systems with user isolation
70
+ - Audit systems for compliance logging
71
+ - Data governance systems for access tracking
72
+
73
+ Usage Patterns:
74
+ - Used as direct replacement for LocalRuntime
75
+ - Configured with user context during initialization
76
+ - Integrates with JWT authentication systems
77
+ - Supports both workflow and node-level permissions
78
+
79
+ Implementation Details:
80
+ Wraps LocalRuntime and intercepts workflow execution to add
81
+ permission checks. Creates access-controlled node wrappers that
82
+ evaluate permissions before execution. Supports data masking,
83
+ conditional routing, and fallback execution.
84
+
85
+ Error Handling:
86
+ - Access denied raises PermissionError with clear messages
87
+ - Missing permissions default to deny for security
88
+ - Configuration errors are logged and treated as disabled
89
+ - Evaluation errors fall back to base runtime behavior
90
+
91
+ Side Effects:
92
+ - Logs all access decisions for audit purposes
93
+ - May redirect execution to alternative nodes
94
+ - Applies data masking to sensitive outputs
95
+ - Caches permission decisions for performance
96
+
97
+ Example:
98
+ >>> from kailash.runtime.access_controlled import AccessControlledRuntime
99
+ >>> from kailash.access_control import UserContext
100
+ >>> from kailash.workflow import Workflow
101
+ >>>
102
+ >>> user = UserContext(user_id="123", tenant_id="abc", email="user@test.com", roles=["analyst"])
103
+ >>> runtime = AccessControlledRuntime(user_context=user)
104
+ >>> # By default, access control is disabled for backward compatibility
105
+ >>> workflow = Workflow(workflow_id="test", name="Test Workflow")
106
+ >>> isinstance(runtime, AccessControlledRuntime)
107
+ True
108
+ """
109
+
110
+ def __init__(
111
+ self, user_context: UserContext, base_runtime: Optional[LocalRuntime] = None
112
+ ):
113
+ """
114
+ Initialize access-controlled runtime.
115
+
116
+ Args:
117
+ user_context: The user context for access control decisions
118
+ base_runtime: The underlying runtime to use (defaults to LocalRuntime)
119
+ """
120
+ self.user_context = user_context
121
+ self.base_runtime = base_runtime or LocalRuntime()
122
+ self.acm = get_access_control_manager()
123
+
124
+ # Track skipped nodes for alternative routing
125
+ self._skipped_nodes: set[str] = set()
126
+ self._node_outputs: Dict[str, Any] = {}
127
+
128
+ def execute(
129
+ self, workflow: Workflow, parameters: Optional[Dict[str, Any]] = None
130
+ ) -> Tuple[Any, str]:
131
+ """
132
+ Execute workflow with access control.
133
+
134
+ This method has the exact same signature as the standard runtime,
135
+ ensuring complete compatibility.
136
+ """
137
+ # Check workflow-level access
138
+ workflow_decision = self.acm.check_workflow_access(
139
+ self.user_context, workflow.workflow_id, WorkflowPermission.EXECUTE
140
+ )
141
+
142
+ if not workflow_decision.allowed:
143
+ raise PermissionError(f"Access denied: {workflow_decision.reason}")
144
+
145
+ # For simplicity, directly execute with the base runtime
146
+ # In a full implementation, we would wrap nodes or intercept execution
147
+ # But for this example, we'll rely on the nodes having access control attributes
148
+ return self.base_runtime.execute(workflow, parameters)
149
+
150
+ def _create_controlled_workflow(self, workflow: Workflow) -> Workflow:
151
+ """
152
+ Create a workflow wrapper that enforces access control.
153
+
154
+ This wrapper intercepts node execution to add permission checks
155
+ without modifying the original workflow.
156
+ """
157
+ # Create a new workflow instance
158
+ controlled = Workflow(
159
+ workflow_id=workflow.workflow_id,
160
+ name=workflow.name,
161
+ description=workflow.description,
162
+ version=workflow.version,
163
+ )
164
+
165
+ # Copy graph structure
166
+ controlled.graph = workflow.graph.copy()
167
+
168
+ # Wrap each node with access control
169
+ for node_id in workflow.graph.nodes:
170
+ node_data = workflow.graph.nodes[node_id]
171
+ original_node = node_data.get("node")
172
+
173
+ if original_node:
174
+ # Create access-controlled wrapper for the node
175
+ wrapped_node = self._create_controlled_node(node_id, original_node)
176
+ controlled.graph.nodes[node_id]["node"] = wrapped_node
177
+
178
+ return controlled
179
+
180
+ def _create_controlled_node(self, node_id: str, original_node: Node) -> Node:
181
+ """
182
+ Create an access-controlled wrapper for a node.
183
+
184
+ This wrapper intercepts the node's run() method to add permission
185
+ checks without modifying the original node.
186
+ """
187
+ runtime = self # Capture runtime reference
188
+
189
+ class AccessControlledNodeWrapper(Node):
190
+ """Dynamic wrapper that adds access control to any node"""
191
+
192
+ def __init__(self):
193
+ # Don't initialize Node base class, just store reference
194
+ self._original_node = original_node
195
+ self._node_id = node_id
196
+ # Copy all attributes from original node
197
+ for attr, value in original_node.__dict__.items():
198
+ if not attr.startswith("_"):
199
+ setattr(self, attr, value)
200
+
201
+ def get_parameters(self):
202
+ """Delegate to original node"""
203
+ return self._original_node.get_parameters()
204
+
205
+ def validate_config(self):
206
+ """Delegate to original node if it has the method"""
207
+ if hasattr(self._original_node, "validate_config"):
208
+ return self._original_node.validate_config()
209
+ return True
210
+
211
+ def get_output_schema(self):
212
+ """Delegate to original node"""
213
+ if hasattr(self._original_node, "get_output_schema"):
214
+ return self._original_node.get_output_schema()
215
+ return None
216
+
217
+ def run(self, **inputs) -> Any:
218
+ """Execute with access control checks"""
219
+ # Check execute permission
220
+ execute_decision = runtime.acm.check_node_access(
221
+ runtime.user_context,
222
+ self._node_id,
223
+ NodePermission.EXECUTE,
224
+ runtime_context={"inputs": inputs},
225
+ )
226
+
227
+ if not execute_decision.allowed:
228
+ # Node execution denied
229
+ logger.info(
230
+ f"Node {self._node_id} skipped for user {runtime.user_context.user_id}"
231
+ )
232
+ runtime._skipped_nodes.add(self._node_id)
233
+
234
+ # Check if there's an alternative path
235
+ if execute_decision.redirect_node:
236
+ return {"_redirect_to": execute_decision.redirect_node}
237
+
238
+ # Return empty result
239
+ return {}
240
+
241
+ # Execute the original node
242
+ result = self._original_node.run(**inputs)
243
+
244
+ # Check output read permission
245
+ output_decision = runtime.acm.check_node_access(
246
+ runtime.user_context,
247
+ self._node_id,
248
+ NodePermission.READ_OUTPUT,
249
+ runtime_context={"output": result},
250
+ )
251
+
252
+ if not output_decision.allowed:
253
+ # Mask entire output
254
+ return {"_access_denied": True}
255
+
256
+ # Apply field masking if needed
257
+ if output_decision.masked_fields and isinstance(result, dict):
258
+ result = runtime._mask_fields(result, output_decision.masked_fields)
259
+
260
+ # Store output for conditional routing
261
+ runtime._node_outputs[self._node_id] = result
262
+
263
+ return result
264
+
265
+ # Create instance of wrapper
266
+ wrapper = AccessControlledNodeWrapper()
267
+
268
+ # Preserve node metadata
269
+ wrapper.__class__.__name__ = f"Controlled{original_node.__class__.__name__}"
270
+ wrapper.__class__.__module__ = original_node.__class__.__module__
271
+
272
+ return wrapper
273
+
274
+ @staticmethod
275
+ def _mask_fields(data: Dict[str, Any], fields: List[str]) -> Dict[str, Any]:
276
+ """Mask sensitive fields in data"""
277
+ masked = data.copy()
278
+ for field in fields:
279
+ if field in masked:
280
+ masked[field] = "***MASKED***"
281
+ return masked
282
+
283
+ def _handle_conditional_routing(
284
+ self, node_id: str, true_path: List[str], false_path: List[str]
285
+ ) -> List[str]:
286
+ """
287
+ Determine which path to take based on permissions.
288
+
289
+ This is used for conditional nodes where the path depends on
290
+ user permissions rather than data conditions.
291
+ """
292
+ # Check which path the user has access to
293
+ return self.acm.get_permission_based_route(
294
+ self.user_context, node_id, true_path, false_path
295
+ )
296
+
297
+
298
+ class AccessControlConfig:
299
+ """
300
+ Configuration for access control in workflows.
301
+
302
+ Provides a declarative way to define access rules without modifying
303
+ workflow code. Enables administrators to configure permissions
304
+ externally from workflow definitions.
305
+
306
+ Design Purpose:
307
+ Separates access control policy from workflow implementation,
308
+ enabling dynamic permission changes without code modifications.
309
+ Supports both workflow-level and node-level permission rules.
310
+
311
+ Upstream Dependencies:
312
+ - Administrative interfaces for rule creation
313
+ - Configuration management systems
314
+ - Policy definition templates
315
+
316
+ Downstream Consumers:
317
+ - AccessControlManager for rule application
318
+ - AccessControlledRuntime for secure execution
319
+ - Policy management tools for validation
320
+
321
+ Usage Patterns:
322
+ - Created by administrators or configuration systems
323
+ - Applied to workflows before execution
324
+ - Used for testing different access scenarios
325
+ - Integrated with external policy management
326
+
327
+ Implementation Details:
328
+ Maintains list of PermissionRule objects with helper methods
329
+ for adding common rule types. Rules are applied to manager
330
+ in batch for consistency.
331
+
332
+ Example:
333
+ >>> config = AccessControlConfig()
334
+ >>> config.add_workflow_permission(
335
+ ... workflow_id="analytics",
336
+ ... permission=WorkflowPermission.EXECUTE,
337
+ ... role="analyst"
338
+ ... )
339
+ >>> config.add_node_permission(
340
+ ... workflow_id="analytics",
341
+ ... node_id="sensitive_data",
342
+ ... permission=NodePermission.READ_OUTPUT,
343
+ ... role="admin"
344
+ ... )
345
+ """
346
+
347
+ def __init__(self):
348
+ self.rules: List[PermissionRule] = []
349
+
350
+ def add_workflow_permission(
351
+ self,
352
+ workflow_id: str,
353
+ permission: WorkflowPermission,
354
+ user_id: Optional[str] = None,
355
+ role: Optional[str] = None,
356
+ effect: PermissionEffect = PermissionEffect.ALLOW,
357
+ ):
358
+ """Add a workflow-level permission rule"""
359
+ rule = PermissionRule(
360
+ id=f"workflow_{workflow_id}_{permission.value}_{len(self.rules)}",
361
+ resource_type="workflow",
362
+ resource_id=workflow_id,
363
+ permission=permission,
364
+ effect=effect,
365
+ user_id=user_id,
366
+ role=role,
367
+ )
368
+ self.rules.append(rule)
369
+
370
+ def add_node_permission(
371
+ self,
372
+ workflow_id: str,
373
+ node_id: str,
374
+ permission: NodePermission,
375
+ user_id: Optional[str] = None,
376
+ role: Optional[str] = None,
377
+ effect: PermissionEffect = PermissionEffect.ALLOW,
378
+ masked_fields: Optional[List[str]] = None,
379
+ redirect_node: Optional[str] = None,
380
+ ):
381
+ """Add a node-level permission rule"""
382
+ rule = PermissionRule(
383
+ id=f"node_{workflow_id}_{node_id}_{permission.value}_{len(self.rules)}",
384
+ resource_type="node",
385
+ resource_id=node_id,
386
+ permission=permission,
387
+ effect=effect,
388
+ user_id=user_id,
389
+ role=role,
390
+ )
391
+
392
+ if masked_fields:
393
+ rule.conditions["masked_fields"] = masked_fields
394
+
395
+ if redirect_node:
396
+ rule.conditions["redirect_node"] = redirect_node
397
+
398
+ self.rules.append(rule)
399
+
400
+ def apply_to_manager(self, manager: AccessControlManager):
401
+ """Apply all rules to an access control manager"""
402
+ for rule in self.rules:
403
+ manager.add_rule(rule)
404
+
405
+
406
+ def execute_with_access_control(
407
+ workflow: Workflow,
408
+ user_context: UserContext,
409
+ parameters: Optional[Dict[str, Any]] = None,
410
+ access_config: Optional[AccessControlConfig] = None,
411
+ ) -> Tuple[Any, str]:
412
+ """
413
+ Convenience function to execute a workflow with access control.
414
+
415
+ Provides a simple way to execute workflows with access control without
416
+ manually creating runtime instances. Automatically applies access
417
+ configuration and manages the runtime lifecycle.
418
+
419
+ Args:
420
+ workflow: The workflow to execute
421
+ user_context: User context for access control decisions
422
+ parameters: Optional runtime parameters for workflow execution
423
+ access_config: Optional access control configuration to apply
424
+
425
+ Returns:
426
+ Tuple containing:
427
+ - result: The workflow execution result
428
+ - run_id: Unique identifier for this execution run
429
+
430
+ Raises:
431
+ PermissionError: If user lacks permission to execute workflow
432
+ ValueError: If workflow or user_context is invalid
433
+
434
+ Side Effects:
435
+ - Applies access control rules to global manager if config provided
436
+ - Logs audit events for access decisions
437
+ - Enables access control globally during execution
438
+
439
+ Example:
440
+ >>> from kailash.runtime.access_controlled import execute_with_access_control
441
+ >>> from kailash.access_control import UserContext
442
+ >>> from kailash.workflow import Workflow
443
+ >>>
444
+ >>> user = UserContext(user_id="123", tenant_id="abc", email="user@test.com", roles=["viewer"])
445
+ >>> workflow = Workflow(workflow_id="test", name="Test")
446
+ >>> # Function exists and can be called
447
+ >>> callable(execute_with_access_control)
448
+ True
449
+ """
450
+ # Set up access control if config provided
451
+ if access_config:
452
+ acm = get_access_control_manager()
453
+ access_config.apply_to_manager(acm)
454
+ acm.enabled = True # Enable access control
455
+
456
+ # Create runtime and execute
457
+ runtime = AccessControlledRuntime(user_context)
458
+ return runtime.execute(workflow, parameters)
kailash/runtime/local.py CHANGED
@@ -1,4 +1,43 @@
1
- """Local runtime engine for executing workflows."""
1
+ """Enhanced Local Runtime Engine with Comprehensive Cycle Support.
2
+
3
+ This module provides a sophisticated local execution engine for workflows with
4
+ advanced support for both traditional DAG workflows and complex cyclic patterns.
5
+ It offers comprehensive task tracking, performance monitoring, and debugging
6
+ capabilities for development and production use.
7
+
8
+ Examples:
9
+ Basic workflow execution:
10
+
11
+ >>> from kailash.runtime.local import LocalRuntime
12
+ >>> runtime = LocalRuntime(debug=True, enable_cycles=True)
13
+ >>> results = runtime.execute(workflow, parameters={"input": "data"})
14
+
15
+ With comprehensive tracking:
16
+
17
+ >>> from kailash.tracking import TaskManager
18
+ >>> runtime = LocalRuntime(enable_cycles=True)
19
+ >>> task_manager = TaskManager()
20
+ >>> results = runtime.execute(
21
+ ... workflow,
22
+ ... task_manager=task_manager,
23
+ ... parameters={"initial_value": 10}
24
+ ... )
25
+ >>> # Access detailed execution information
26
+ >>> tasks = task_manager.get_tasks_for_workflow(workflow.workflow_id)
27
+ >>> metrics = task_manager.get_performance_summary()
28
+
29
+ Production configuration:
30
+
31
+ >>> runtime = LocalRuntime(
32
+ ... debug=False, # Optimized for performance
33
+ ... enable_cycles=True # Support cyclic patterns
34
+ ... )
35
+ >>> results = runtime.execute(
36
+ ... workflow,
37
+ ... parameters=input_params,
38
+ ... run_id="production_run_001"
39
+ ... )
40
+ """
2
41
 
3
42
  import logging
4
43
  from datetime import datetime, timezone
@@ -16,22 +55,33 @@ from kailash.tracking import TaskManager, TaskStatus
16
55
  from kailash.tracking.metrics_collector import MetricsCollector
17
56
  from kailash.tracking.models import TaskMetrics
18
57
  from kailash.workflow import Workflow
58
+ from kailash.workflow.cyclic_runner import CyclicWorkflowExecutor
19
59
 
20
60
  logger = logging.getLogger(__name__)
21
61
 
22
62
 
23
63
  class LocalRuntime:
24
- """Local execution engine for workflows."""
64
+ """Local execution engine for workflows.
25
65
 
26
- def __init__(self, debug: bool = False):
66
+ This class provides a robust, production-ready execution engine that seamlessly
67
+ handles both traditional workflows and advanced cyclic patterns.
68
+ """
69
+
70
+ def __init__(self, debug: bool = False, enable_cycles: bool = True):
27
71
  """Initialize the local runtime.
28
72
 
29
73
  Args:
30
- debug: Whether to enable debug logging
74
+ debug: Whether to enable debug logging.
75
+ enable_cycles: Whether to enable cyclic workflow support.
31
76
  """
32
77
  self.debug = debug
78
+ self.enable_cycles = enable_cycles
33
79
  self.logger = logger
34
80
 
81
+ # Initialize cyclic workflow executor if enabled
82
+ if enable_cycles:
83
+ self.cyclic_executor = CyclicWorkflowExecutor()
84
+
35
85
  if debug:
36
86
  self.logger.setLevel(logging.DEBUG)
37
87
  else:
@@ -46,16 +96,16 @@ class LocalRuntime:
46
96
  """Execute a workflow locally.
47
97
 
48
98
  Args:
49
- workflow: Workflow to execute
50
- task_manager: Optional task manager for tracking
51
- parameters: Optional parameter overrides per node
99
+ workflow: Workflow to execute.
100
+ task_manager: Optional task manager for tracking.
101
+ parameters: Optional parameter overrides per node.
52
102
 
53
103
  Returns:
54
- Tuple of (results dict, run_id)
104
+ Tuple of (results dict, run_id).
55
105
 
56
106
  Raises:
57
- RuntimeExecutionError: If execution fails
58
- WorkflowValidationError: If workflow is invalid
107
+ RuntimeExecutionError: If execution fails.
108
+ WorkflowValidationError: If workflow is invalid.
59
109
  """
60
110
  if not workflow:
61
111
  raise RuntimeExecutionError("No workflow provided")
@@ -81,13 +131,36 @@ class LocalRuntime:
81
131
  self.logger.warning(f"Failed to create task run: {e}")
82
132
  # Continue without tracking
83
133
 
84
- # Execute workflow
85
- results = self._execute_workflow(
86
- workflow=workflow,
87
- task_manager=task_manager,
88
- run_id=run_id,
89
- parameters=parameters or {},
90
- )
134
+ # Check for cyclic workflows and delegate accordingly
135
+ if self.enable_cycles and workflow.has_cycles():
136
+ self.logger.info(
137
+ "Cyclic workflow detected, using CyclicWorkflowExecutor"
138
+ )
139
+ # Use cyclic executor for workflows with cycles
140
+ try:
141
+ # Pass run_id to cyclic executor if available
142
+ cyclic_results, cyclic_run_id = self.cyclic_executor.execute(
143
+ workflow, parameters, task_manager, run_id
144
+ )
145
+ results = cyclic_results
146
+ # Update run_id if task manager is being used
147
+ if not run_id:
148
+ run_id = cyclic_run_id
149
+ except Exception as e:
150
+ raise RuntimeExecutionError(
151
+ f"Cyclic workflow execution failed: {e}"
152
+ ) from e
153
+ else:
154
+ # Execute standard DAG workflow
155
+ self.logger.info(
156
+ "Standard DAG workflow detected, using local execution"
157
+ )
158
+ results = self._execute_workflow(
159
+ workflow=workflow,
160
+ task_manager=task_manager,
161
+ run_id=run_id,
162
+ parameters=parameters or {},
163
+ )
91
164
 
92
165
  # Mark run as completed
93
166
  if task_manager and run_id:
@@ -131,16 +204,16 @@ class LocalRuntime:
131
204
  """Execute the workflow nodes in topological order.
132
205
 
133
206
  Args:
134
- workflow: Workflow to execute
135
- task_manager: Task manager for tracking
136
- run_id: Run ID for tracking
137
- parameters: Parameter overrides
207
+ workflow: Workflow to execute.
208
+ task_manager: Task manager for tracking.
209
+ run_id: Run ID for tracking.
210
+ parameters: Parameter overrides.
138
211
 
139
212
  Returns:
140
- Dictionary of node results
213
+ Dictionary of node results.
141
214
 
142
215
  Raises:
143
- WorkflowExecutionError: If execution fails
216
+ WorkflowExecutionError: If execution fails.
144
217
  """
145
218
  # Get execution order
146
219
  try:
@@ -304,17 +377,17 @@ class LocalRuntime:
304
377
  """Prepare inputs for a node execution.
305
378
 
306
379
  Args:
307
- workflow: The workflow being executed
308
- node_id: Current node ID
309
- node_instance: Current node instance
310
- node_outputs: Outputs from previously executed nodes
311
- parameters: Parameter overrides
380
+ workflow: The workflow being executed.
381
+ node_id: Current node ID.
382
+ node_instance: Current node instance.
383
+ node_outputs: Outputs from previously executed nodes.
384
+ parameters: Parameter overrides.
312
385
 
313
386
  Returns:
314
- Dictionary of inputs for the node
387
+ Dictionary of inputs for the node.
315
388
 
316
389
  Raises:
317
- WorkflowExecutionError: If input preparation fails
390
+ WorkflowExecutionError: If input preparation fails.
318
391
  """
319
392
  inputs = {}
320
393
 
@@ -353,11 +426,11 @@ class LocalRuntime:
353
426
  """Determine if execution should stop when a node fails.
354
427
 
355
428
  Args:
356
- workflow: The workflow being executed
357
- node_id: Failed node ID
429
+ workflow: The workflow being executed.
430
+ node_id: Failed node ID.
358
431
 
359
432
  Returns:
360
- Whether to stop execution
433
+ Whether to stop execution.
361
434
  """
362
435
  # Check if any downstream nodes depend on this node
363
436
  has_dependents = workflow.graph.out_degree(node_id) > 0