kailash 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. kailash/access_control.py +40 -39
  2. kailash/api/auth.py +26 -32
  3. kailash/api/custom_nodes.py +29 -29
  4. kailash/api/custom_nodes_secure.py +35 -35
  5. kailash/api/database.py +17 -17
  6. kailash/api/gateway.py +19 -19
  7. kailash/api/mcp_integration.py +24 -23
  8. kailash/api/studio.py +45 -45
  9. kailash/api/workflow_api.py +8 -8
  10. kailash/cli/commands.py +5 -8
  11. kailash/manifest.py +42 -42
  12. kailash/mcp/__init__.py +1 -1
  13. kailash/mcp/ai_registry_server.py +20 -20
  14. kailash/mcp/client.py +9 -11
  15. kailash/mcp/client_new.py +10 -10
  16. kailash/mcp/server.py +1 -2
  17. kailash/mcp/server_enhanced.py +449 -0
  18. kailash/mcp/servers/ai_registry.py +6 -6
  19. kailash/mcp/utils/__init__.py +31 -0
  20. kailash/mcp/utils/cache.py +267 -0
  21. kailash/mcp/utils/config.py +263 -0
  22. kailash/mcp/utils/formatters.py +293 -0
  23. kailash/mcp/utils/metrics.py +418 -0
  24. kailash/nodes/ai/agents.py +9 -9
  25. kailash/nodes/ai/ai_providers.py +33 -34
  26. kailash/nodes/ai/embedding_generator.py +31 -32
  27. kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
  28. kailash/nodes/ai/iterative_llm_agent.py +48 -48
  29. kailash/nodes/ai/llm_agent.py +32 -33
  30. kailash/nodes/ai/models.py +13 -13
  31. kailash/nodes/ai/self_organizing.py +44 -44
  32. kailash/nodes/api/auth.py +11 -11
  33. kailash/nodes/api/graphql.py +13 -13
  34. kailash/nodes/api/http.py +19 -19
  35. kailash/nodes/api/monitoring.py +20 -20
  36. kailash/nodes/api/rate_limiting.py +9 -13
  37. kailash/nodes/api/rest.py +29 -29
  38. kailash/nodes/api/security.py +44 -47
  39. kailash/nodes/base.py +21 -23
  40. kailash/nodes/base_async.py +7 -7
  41. kailash/nodes/base_cycle_aware.py +12 -12
  42. kailash/nodes/base_with_acl.py +5 -5
  43. kailash/nodes/code/python.py +66 -57
  44. kailash/nodes/data/directory.py +6 -6
  45. kailash/nodes/data/event_generation.py +10 -10
  46. kailash/nodes/data/file_discovery.py +28 -31
  47. kailash/nodes/data/readers.py +8 -8
  48. kailash/nodes/data/retrieval.py +10 -10
  49. kailash/nodes/data/sharepoint_graph.py +17 -17
  50. kailash/nodes/data/sources.py +5 -5
  51. kailash/nodes/data/sql.py +13 -13
  52. kailash/nodes/data/streaming.py +25 -25
  53. kailash/nodes/data/vector_db.py +22 -22
  54. kailash/nodes/data/writers.py +7 -7
  55. kailash/nodes/logic/async_operations.py +17 -17
  56. kailash/nodes/logic/convergence.py +11 -11
  57. kailash/nodes/logic/loop.py +4 -4
  58. kailash/nodes/logic/operations.py +11 -11
  59. kailash/nodes/logic/workflow.py +8 -9
  60. kailash/nodes/mixins/mcp.py +17 -17
  61. kailash/nodes/mixins.py +8 -10
  62. kailash/nodes/transform/chunkers.py +3 -3
  63. kailash/nodes/transform/formatters.py +7 -7
  64. kailash/nodes/transform/processors.py +10 -10
  65. kailash/runtime/access_controlled.py +18 -18
  66. kailash/runtime/async_local.py +17 -19
  67. kailash/runtime/docker.py +20 -22
  68. kailash/runtime/local.py +16 -16
  69. kailash/runtime/parallel.py +23 -23
  70. kailash/runtime/parallel_cyclic.py +27 -27
  71. kailash/runtime/runner.py +6 -6
  72. kailash/runtime/testing.py +20 -20
  73. kailash/sdk_exceptions.py +0 -58
  74. kailash/security.py +14 -26
  75. kailash/tracking/manager.py +38 -38
  76. kailash/tracking/metrics_collector.py +15 -14
  77. kailash/tracking/models.py +53 -53
  78. kailash/tracking/storage/base.py +7 -17
  79. kailash/tracking/storage/database.py +22 -23
  80. kailash/tracking/storage/filesystem.py +38 -40
  81. kailash/utils/export.py +21 -21
  82. kailash/utils/templates.py +2 -3
  83. kailash/visualization/api.py +30 -34
  84. kailash/visualization/dashboard.py +17 -17
  85. kailash/visualization/performance.py +16 -16
  86. kailash/visualization/reports.py +25 -27
  87. kailash/workflow/builder.py +8 -8
  88. kailash/workflow/convergence.py +13 -12
  89. kailash/workflow/cycle_analyzer.py +30 -32
  90. kailash/workflow/cycle_builder.py +12 -12
  91. kailash/workflow/cycle_config.py +16 -15
  92. kailash/workflow/cycle_debugger.py +40 -40
  93. kailash/workflow/cycle_exceptions.py +29 -29
  94. kailash/workflow/cycle_profiler.py +21 -21
  95. kailash/workflow/cycle_state.py +20 -22
  96. kailash/workflow/cyclic_runner.py +44 -44
  97. kailash/workflow/graph.py +40 -40
  98. kailash/workflow/mermaid_visualizer.py +9 -11
  99. kailash/workflow/migration.py +22 -22
  100. kailash/workflow/mock_registry.py +6 -6
  101. kailash/workflow/runner.py +9 -9
  102. kailash/workflow/safety.py +12 -13
  103. kailash/workflow/state.py +8 -11
  104. kailash/workflow/templates.py +19 -19
  105. kailash/workflow/validation.py +14 -14
  106. kailash/workflow/visualization.py +22 -22
  107. {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/METADATA +53 -5
  108. kailash-0.3.2.dist-info/RECORD +136 -0
  109. kailash-0.3.0.dist-info/RECORD +0 -130
  110. {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/WHEEL +0 -0
  111. {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/entry_points.txt +0 -0
  112. {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/licenses/LICENSE +0 -0
  113. {kailash-0.3.0.dist-info → kailash-0.3.2.dist-info}/top_level.txt +0 -0
@@ -27,7 +27,7 @@ Example with access control (opt-in):
27
27
  """
28
28
 
29
29
  import logging
30
- from typing import Any, Dict, List, Optional, Tuple
30
+ from typing import Any
31
31
 
32
32
  from kailash.access_control import (
33
33
  AccessControlManager,
@@ -108,7 +108,7 @@ class AccessControlledRuntime:
108
108
  """
109
109
 
110
110
  def __init__(
111
- self, user_context: UserContext, base_runtime: Optional[LocalRuntime] = None
111
+ self, user_context: UserContext, base_runtime: LocalRuntime | None = None
112
112
  ):
113
113
  """
114
114
  Initialize access-controlled runtime.
@@ -123,11 +123,11 @@ class AccessControlledRuntime:
123
123
 
124
124
  # Track skipped nodes for alternative routing
125
125
  self._skipped_nodes: set[str] = set()
126
- self._node_outputs: Dict[str, Any] = {}
126
+ self._node_outputs: dict[str, Any] = {}
127
127
 
128
128
  def execute(
129
- self, workflow: Workflow, parameters: Optional[Dict[str, Any]] = None
130
- ) -> Tuple[Any, str]:
129
+ self, workflow: Workflow, parameters: dict[str, Any] | None = None
130
+ ) -> tuple[Any, str]:
131
131
  """
132
132
  Execute workflow with access control.
133
133
 
@@ -272,7 +272,7 @@ class AccessControlledRuntime:
272
272
  return wrapper
273
273
 
274
274
  @staticmethod
275
- def _mask_fields(data: Dict[str, Any], fields: List[str]) -> Dict[str, Any]:
275
+ def _mask_fields(data: dict[str, Any], fields: list[str]) -> dict[str, Any]:
276
276
  """Mask sensitive fields in data"""
277
277
  masked = data.copy()
278
278
  for field in fields:
@@ -281,8 +281,8 @@ class AccessControlledRuntime:
281
281
  return masked
282
282
 
283
283
  def _handle_conditional_routing(
284
- self, node_id: str, true_path: List[str], false_path: List[str]
285
- ) -> List[str]:
284
+ self, node_id: str, true_path: list[str], false_path: list[str]
285
+ ) -> list[str]:
286
286
  """
287
287
  Determine which path to take based on permissions.
288
288
 
@@ -345,14 +345,14 @@ class AccessControlConfig:
345
345
  """
346
346
 
347
347
  def __init__(self):
348
- self.rules: List[PermissionRule] = []
348
+ self.rules: list[PermissionRule] = []
349
349
 
350
350
  def add_workflow_permission(
351
351
  self,
352
352
  workflow_id: str,
353
353
  permission: WorkflowPermission,
354
- user_id: Optional[str] = None,
355
- role: Optional[str] = None,
354
+ user_id: str | None = None,
355
+ role: str | None = None,
356
356
  effect: PermissionEffect = PermissionEffect.ALLOW,
357
357
  ):
358
358
  """Add a workflow-level permission rule"""
@@ -372,11 +372,11 @@ class AccessControlConfig:
372
372
  workflow_id: str,
373
373
  node_id: str,
374
374
  permission: NodePermission,
375
- user_id: Optional[str] = None,
376
- role: Optional[str] = None,
375
+ user_id: str | None = None,
376
+ role: str | None = None,
377
377
  effect: PermissionEffect = PermissionEffect.ALLOW,
378
- masked_fields: Optional[List[str]] = None,
379
- redirect_node: Optional[str] = None,
378
+ masked_fields: list[str] | None = None,
379
+ redirect_node: str | None = None,
380
380
  ):
381
381
  """Add a node-level permission rule"""
382
382
  rule = PermissionRule(
@@ -406,9 +406,9 @@ class AccessControlConfig:
406
406
  def execute_with_access_control(
407
407
  workflow: Workflow,
408
408
  user_context: UserContext,
409
- parameters: Optional[Dict[str, Any]] = None,
410
- access_config: Optional[AccessControlConfig] = None,
411
- ) -> Tuple[Any, str]:
409
+ parameters: dict[str, Any] | None = None,
410
+ access_config: AccessControlConfig | None = None,
411
+ ) -> tuple[Any, str]:
412
412
  """
413
413
  Convenience function to execute a workflow with access control.
414
414
 
@@ -6,8 +6,8 @@ database queries, or LLM interactions.
6
6
  """
7
7
 
8
8
  import logging
9
- from datetime import datetime, timezone
10
- from typing import Any, Dict, Optional, Tuple
9
+ from datetime import UTC, datetime
10
+ from typing import Any
11
11
 
12
12
  import networkx as nx
13
13
 
@@ -60,9 +60,9 @@ class AsyncLocalRuntime:
60
60
  async def execute(
61
61
  self,
62
62
  workflow: Workflow,
63
- task_manager: Optional[TaskManager] = None,
64
- parameters: Optional[Dict[str, Dict[str, Any]]] = None,
65
- ) -> Tuple[Dict[str, Any], Optional[str]]:
63
+ task_manager: TaskManager | None = None,
64
+ parameters: dict[str, dict[str, Any]] | None = None,
65
+ ) -> tuple[dict[str, Any], str | None]:
66
66
  """Execute a workflow asynchronously.
67
67
 
68
68
  Args:
@@ -144,10 +144,10 @@ class AsyncLocalRuntime:
144
144
  async def _execute_workflow(
145
145
  self,
146
146
  workflow: Workflow,
147
- task_manager: Optional[TaskManager],
148
- run_id: Optional[str],
149
- parameters: Dict[str, Dict[str, Any]],
150
- ) -> Dict[str, Any]:
147
+ task_manager: TaskManager | None,
148
+ run_id: str | None,
149
+ parameters: dict[str, dict[str, Any]],
150
+ ) -> dict[str, Any]:
151
151
  """Execute the workflow nodes asynchronously.
152
152
 
153
153
  Args:
@@ -195,7 +195,7 @@ class AsyncLocalRuntime:
195
195
  run_id=run_id,
196
196
  node_id=node_id,
197
197
  node_type=node_instance.__class__.__name__,
198
- started_at=datetime.now(timezone.utc),
198
+ started_at=datetime.now(UTC),
199
199
  )
200
200
  except Exception as e:
201
201
  self.logger.warning(
@@ -220,7 +220,7 @@ class AsyncLocalRuntime:
220
220
  task.update_status(TaskStatus.RUNNING)
221
221
 
222
222
  # Execute node - check if it supports async execution
223
- start_time = datetime.now(timezone.utc)
223
+ start_time = datetime.now(UTC)
224
224
 
225
225
  if isinstance(node_instance, AsyncNode):
226
226
  # Use async execution
@@ -229,9 +229,7 @@ class AsyncLocalRuntime:
229
229
  # Fall back to synchronous execution
230
230
  outputs = node_instance.run(**inputs)
231
231
 
232
- execution_time = (
233
- datetime.now(timezone.utc) - start_time
234
- ).total_seconds()
232
+ execution_time = (datetime.now(UTC) - start_time).total_seconds()
235
233
 
236
234
  # Store outputs
237
235
  node_outputs[node_id] = outputs
@@ -245,7 +243,7 @@ class AsyncLocalRuntime:
245
243
  task.update_status(
246
244
  TaskStatus.COMPLETED,
247
245
  result=outputs,
248
- ended_at=datetime.now(timezone.utc),
246
+ ended_at=datetime.now(UTC),
249
247
  metadata={"execution_time": execution_time},
250
248
  )
251
249
 
@@ -262,7 +260,7 @@ class AsyncLocalRuntime:
262
260
  task.update_status(
263
261
  TaskStatus.FAILED,
264
262
  error=str(e),
265
- ended_at=datetime.now(timezone.utc),
263
+ ended_at=datetime.now(UTC),
266
264
  )
267
265
 
268
266
  # Determine if we should continue or stop
@@ -287,9 +285,9 @@ class AsyncLocalRuntime:
287
285
  workflow: Workflow,
288
286
  node_id: str,
289
287
  node_instance: Any,
290
- node_outputs: Dict[str, Dict[str, Any]],
291
- parameters: Dict[str, Any],
292
- ) -> Dict[str, Any]:
288
+ node_outputs: dict[str, dict[str, Any]],
289
+ parameters: dict[str, Any],
290
+ ) -> dict[str, Any]:
293
291
  """Prepare inputs for a node execution.
294
292
 
295
293
  Args:
kailash/runtime/docker.py CHANGED
@@ -21,7 +21,7 @@ import subprocess
21
21
  import sys
22
22
  import tempfile
23
23
  from pathlib import Path
24
- from typing import Any, Dict, Optional, Tuple
24
+ from typing import Any
25
25
 
26
26
  from kailash.nodes.base import Node
27
27
 
@@ -50,8 +50,8 @@ class DockerNodeWrapper:
50
50
  node: Node,
51
51
  node_id: str,
52
52
  base_image: str = "python:3.11-slim",
53
- work_dir: Optional[Path] = None,
54
- sdk_path: Optional[Path] = None,
53
+ work_dir: Path | None = None,
54
+ sdk_path: Path | None = None,
55
55
  ):
56
56
  """
57
57
  Initialize a Docker node wrapper.
@@ -335,7 +335,7 @@ ENTRYPOINT ["/app/entrypoint.py"]
335
335
  logger.error(error_msg)
336
336
  raise RuntimeError(error_msg)
337
337
 
338
- def prepare_inputs(self, inputs: Dict[str, Any]):
338
+ def prepare_inputs(self, inputs: dict[str, Any]):
339
339
  """
340
340
  Prepare inputs for node execution.
341
341
 
@@ -349,8 +349,8 @@ ENTRYPOINT ["/app/entrypoint.py"]
349
349
  def run_container(
350
350
  self,
351
351
  network: str = None,
352
- env_vars: Dict[str, str] = None,
353
- resource_limits: Dict[str, str] = None,
352
+ env_vars: dict[str, str] = None,
353
+ resource_limits: dict[str, str] = None,
354
354
  ) -> bool:
355
355
  """
356
356
  Run the node in a Docker container.
@@ -416,13 +416,13 @@ ENTRYPOINT ["/app/entrypoint.py"]
416
416
  # Check if there's an error file
417
417
  error_file = self.output_dir / "error.json"
418
418
  if error_file.exists():
419
- with open(error_file, "r") as f:
419
+ with open(error_file) as f:
420
420
  error_data = json.load(f)
421
421
  error_msg = f"Node execution error: {error_data.get('error', 'Unknown error')}"
422
422
 
423
423
  raise NodeExecutionError(error_msg)
424
424
 
425
- def get_results(self) -> Dict[str, Any]:
425
+ def get_results(self) -> dict[str, Any]:
426
426
  """
427
427
  Get the results of node execution.
428
428
 
@@ -431,12 +431,12 @@ ENTRYPOINT ["/app/entrypoint.py"]
431
431
  """
432
432
  result_file = self.output_dir / "result.json"
433
433
  if result_file.exists():
434
- with open(result_file, "r") as f:
434
+ with open(result_file) as f:
435
435
  return json.load(f)
436
436
 
437
437
  error_file = self.output_dir / "error.json"
438
438
  if error_file.exists():
439
- with open(error_file, "r") as f:
439
+ with open(error_file) as f:
440
440
  error_data = json.load(f)
441
441
  raise NodeExecutionError(
442
442
  f"Node {self.node_id} execution failed: {error_data.get('error', 'Unknown error')}"
@@ -464,10 +464,10 @@ class DockerRuntime:
464
464
  self,
465
465
  base_image: str = "python:3.11-slim",
466
466
  network_name: str = "kailash-network",
467
- work_dir: Optional[str] = None,
468
- sdk_path: Optional[str] = None,
469
- resource_limits: Optional[Dict[str, str]] = None,
470
- task_manager: Optional[TaskManager] = None,
467
+ work_dir: str | None = None,
468
+ sdk_path: str | None = None,
469
+ resource_limits: dict[str, str] | None = None,
470
+ task_manager: TaskManager | None = None,
471
471
  ):
472
472
  """
473
473
  Initialize the Docker runtime.
@@ -516,14 +516,14 @@ class DockerRuntime:
516
516
  # Track node wrappers
517
517
  self.node_wrappers = {}
518
518
 
519
- def _create_task_run(self, workflow: Workflow) -> Optional[str]:
519
+ def _create_task_run(self, workflow: Workflow) -> str | None:
520
520
  """Create a task run if task manager is available."""
521
521
  if self.task_manager:
522
522
  return self.task_manager.create_run(workflow.name)
523
523
  return None
524
524
 
525
525
  def _update_task_status(
526
- self, run_id: Optional[str], node_id: str, status: str, output: Any = None
526
+ self, run_id: str | None, node_id: str, status: str, output: Any = None
527
527
  ):
528
528
  """Update task status if task manager is available."""
529
529
  if self.task_manager and run_id:
@@ -534,9 +534,7 @@ class DockerRuntime:
534
534
  )
535
535
  self.task_manager.update_run_status(run_id, "failed", error_msg)
536
536
 
537
- def _complete_task_run(
538
- self, run_id: Optional[str], status: str, result: Any = None
539
- ):
537
+ def _complete_task_run(self, run_id: str | None, status: str, result: Any = None):
540
538
  """Complete task run if task manager is available."""
541
539
  if self.task_manager and run_id:
542
540
  if status == "completed":
@@ -567,9 +565,9 @@ class DockerRuntime:
567
565
  def execute(
568
566
  self,
569
567
  workflow: Workflow,
570
- inputs: Dict[str, Dict[str, Any]] = None,
571
- node_resource_limits: Dict[str, Dict[str, str]] = None,
572
- ) -> Tuple[Dict[str, Dict[str, Any]], str]:
568
+ inputs: dict[str, dict[str, Any]] = None,
569
+ node_resource_limits: dict[str, dict[str, str]] = None,
570
+ ) -> tuple[dict[str, dict[str, Any]], str]:
573
571
  """
574
572
  Execute a workflow using Docker containers.
575
573
 
kailash/runtime/local.py CHANGED
@@ -40,8 +40,8 @@ Examples:
40
40
  """
41
41
 
42
42
  import logging
43
- from datetime import datetime, timezone
44
- from typing import Any, Dict, List, Optional, Tuple
43
+ from datetime import UTC, datetime
44
+ from typing import Any
45
45
 
46
46
  import networkx as nx
47
47
 
@@ -90,9 +90,9 @@ class LocalRuntime:
90
90
  def execute(
91
91
  self,
92
92
  workflow: Workflow,
93
- task_manager: Optional[TaskManager] = None,
94
- parameters: Optional[Dict[str, Dict[str, Any]]] = None,
95
- ) -> Tuple[Dict[str, Any], Optional[str]]:
93
+ task_manager: TaskManager | None = None,
94
+ parameters: dict[str, dict[str, Any]] | None = None,
95
+ ) -> tuple[dict[str, Any], str | None]:
96
96
  """Execute a workflow locally.
97
97
 
98
98
  Args:
@@ -197,10 +197,10 @@ class LocalRuntime:
197
197
  def _execute_workflow(
198
198
  self,
199
199
  workflow: Workflow,
200
- task_manager: Optional[TaskManager],
201
- run_id: Optional[str],
202
- parameters: Dict[str, Dict[str, Any]],
203
- ) -> Dict[str, Any]:
200
+ task_manager: TaskManager | None,
201
+ run_id: str | None,
202
+ parameters: dict[str, dict[str, Any]],
203
+ ) -> dict[str, Any]:
204
204
  """Execute the workflow nodes in topological order.
205
205
 
206
206
  Args:
@@ -273,7 +273,7 @@ class LocalRuntime:
273
273
  run_id=run_id,
274
274
  node_id=node_id,
275
275
  node_type=node_instance.__class__.__name__,
276
- started_at=datetime.now(timezone.utc),
276
+ started_at=datetime.now(UTC),
277
277
  metadata=node_metadata,
278
278
  )
279
279
  # Start the task
@@ -329,7 +329,7 @@ class LocalRuntime:
329
329
  task.task_id,
330
330
  TaskStatus.COMPLETED,
331
331
  result=outputs,
332
- ended_at=datetime.now(timezone.utc),
332
+ ended_at=datetime.now(UTC),
333
333
  metadata={"execution_time": performance_metrics.duration},
334
334
  )
335
335
 
@@ -350,7 +350,7 @@ class LocalRuntime:
350
350
  task.task_id,
351
351
  TaskStatus.FAILED,
352
352
  error=str(e),
353
- ended_at=datetime.now(timezone.utc),
353
+ ended_at=datetime.now(UTC),
354
354
  )
355
355
 
356
356
  # Determine if we should continue
@@ -375,9 +375,9 @@ class LocalRuntime:
375
375
  workflow: Workflow,
376
376
  node_id: str,
377
377
  node_instance: Node,
378
- node_outputs: Dict[str, Dict[str, Any]],
379
- parameters: Dict[str, Any],
380
- ) -> Dict[str, Any]:
378
+ node_outputs: dict[str, dict[str, Any]],
379
+ parameters: dict[str, Any],
380
+ ) -> dict[str, Any]:
381
381
  """Prepare inputs for a node execution.
382
382
 
383
383
  Args:
@@ -476,7 +476,7 @@ class LocalRuntime:
476
476
  # Future: implement configurable error handling policies
477
477
  return has_dependents
478
478
 
479
- def validate_workflow(self, workflow: Workflow) -> List[str]:
479
+ def validate_workflow(self, workflow: Workflow) -> list[str]:
480
480
  """Validate a workflow before execution.
481
481
 
482
482
  Args:
@@ -8,8 +8,8 @@ import asyncio
8
8
  import logging
9
9
  import time
10
10
  from collections import deque
11
- from datetime import datetime, timezone
12
- from typing import Any, Deque, Dict, Optional, Set, Tuple
11
+ from datetime import UTC, datetime
12
+ from typing import Any
13
13
 
14
14
  import networkx as nx
15
15
 
@@ -66,9 +66,9 @@ class ParallelRuntime:
66
66
  async def execute(
67
67
  self,
68
68
  workflow: Workflow,
69
- task_manager: Optional[TaskManager] = None,
70
- parameters: Optional[Dict[str, Dict[str, Any]]] = None,
71
- ) -> Tuple[Dict[str, Any], Optional[str]]:
69
+ task_manager: TaskManager | None = None,
70
+ parameters: dict[str, dict[str, Any]] | None = None,
71
+ ) -> tuple[dict[str, Any], str | None]:
72
72
  """Execute a workflow with parallel node execution.
73
73
 
74
74
  Args:
@@ -159,10 +159,10 @@ class ParallelRuntime:
159
159
  async def _execute_workflow_parallel(
160
160
  self,
161
161
  workflow: Workflow,
162
- task_manager: Optional[TaskManager],
163
- run_id: Optional[str],
164
- parameters: Dict[str, Dict[str, Any]],
165
- ) -> Dict[str, Any]:
162
+ task_manager: TaskManager | None,
163
+ run_id: str | None,
164
+ parameters: dict[str, dict[str, Any]],
165
+ ) -> dict[str, Any]:
166
166
  """Execute the workflow nodes in parallel where possible.
167
167
 
168
168
  This method uses a dynamic scheduling approach to run independent nodes
@@ -323,11 +323,11 @@ class ParallelRuntime:
323
323
  self,
324
324
  workflow: Workflow,
325
325
  node_id: str,
326
- node_outputs: Dict[str, Dict[str, Any]],
327
- parameters: Dict[str, Any],
328
- task_manager: Optional[TaskManager],
329
- run_id: Optional[str],
330
- ) -> Tuple[Dict[str, Any], bool]:
326
+ node_outputs: dict[str, dict[str, Any]],
327
+ parameters: dict[str, Any],
328
+ task_manager: TaskManager | None,
329
+ run_id: str | None,
330
+ ) -> tuple[dict[str, Any], bool]:
331
331
  """Execute a single node asynchronously.
332
332
 
333
333
  Args:
@@ -359,7 +359,7 @@ class ParallelRuntime:
359
359
  run_id=run_id,
360
360
  node_id=node_id,
361
361
  node_type=node_instance.__class__.__name__,
362
- started_at=datetime.now(timezone.utc),
362
+ started_at=datetime.now(UTC),
363
363
  )
364
364
  except Exception as e:
365
365
  self.logger.warning(f"Failed to create task for node '{node_id}': {e}")
@@ -409,7 +409,7 @@ class ParallelRuntime:
409
409
  task.update_status(
410
410
  TaskStatus.COMPLETED,
411
411
  result=outputs,
412
- ended_at=datetime.now(timezone.utc),
412
+ ended_at=datetime.now(UTC),
413
413
  metadata={"execution_time": performance_metrics.duration},
414
414
  )
415
415
 
@@ -431,7 +431,7 @@ class ParallelRuntime:
431
431
  # Update task status
432
432
  if task:
433
433
  task.update_status(
434
- TaskStatus.FAILED, error=str(e), ended_at=datetime.now(timezone.utc)
434
+ TaskStatus.FAILED, error=str(e), ended_at=datetime.now(UTC)
435
435
  )
436
436
 
437
437
  # Return error result
@@ -448,9 +448,9 @@ class ParallelRuntime:
448
448
  workflow: Workflow,
449
449
  node_id: str,
450
450
  node_instance: Any,
451
- node_outputs: Dict[str, Dict[str, Any]],
452
- parameters: Dict[str, Any],
453
- ) -> Dict[str, Any]:
451
+ node_outputs: dict[str, dict[str, Any]],
452
+ parameters: dict[str, Any],
453
+ ) -> dict[str, Any]:
454
454
  """Prepare inputs for a node execution.
455
455
 
456
456
  Args:
@@ -520,9 +520,9 @@ class ParallelRuntime:
520
520
  self,
521
521
  workflow: Workflow,
522
522
  failed_node: str,
523
- failed_nodes: Set[str],
524
- pending_nodes: Set[str],
525
- ready_nodes: Deque[str],
523
+ failed_nodes: set[str],
524
+ pending_nodes: set[str],
525
+ ready_nodes: deque[str],
526
526
  ) -> None:
527
527
  """Mark all dependent nodes as failed.
528
528
 
@@ -2,8 +2,8 @@
2
2
 
3
3
  import logging
4
4
  from concurrent.futures import ThreadPoolExecutor, as_completed
5
- from datetime import datetime, timezone
6
- from typing import Any, Dict, List, Optional, Set, Tuple
5
+ from datetime import UTC, datetime
6
+ from typing import Any
7
7
 
8
8
  import networkx as nx
9
9
 
@@ -56,10 +56,10 @@ class ParallelCyclicRuntime:
56
56
  def execute(
57
57
  self,
58
58
  workflow: Workflow,
59
- task_manager: Optional[TaskManager] = None,
60
- parameters: Optional[Dict[str, Dict[str, Any]]] = None,
61
- parallel_nodes: Optional[Set[str]] = None,
62
- ) -> Tuple[Dict[str, Any], Optional[str]]:
59
+ task_manager: TaskManager | None = None,
60
+ parameters: dict[str, dict[str, Any]] | None = None,
61
+ parallel_nodes: set[str] | None = None,
62
+ ) -> tuple[dict[str, Any], str | None]:
63
63
  """Execute a workflow with parallel and cyclic support.
64
64
 
65
65
  Args:
@@ -108,9 +108,9 @@ class ParallelCyclicRuntime:
108
108
  def _execute_cyclic_workflow(
109
109
  self,
110
110
  workflow: Workflow,
111
- task_manager: Optional[TaskManager],
112
- parameters: Optional[Dict[str, Dict[str, Any]]],
113
- ) -> Tuple[Dict[str, Any], str]:
111
+ task_manager: TaskManager | None,
112
+ parameters: dict[str, dict[str, Any]] | None,
113
+ ) -> tuple[dict[str, Any], str]:
114
114
  """Execute a cyclic workflow with potential parallel optimizations.
115
115
 
116
116
  Args:
@@ -141,10 +141,10 @@ class ParallelCyclicRuntime:
141
141
  def _execute_parallel_dag(
142
142
  self,
143
143
  workflow: Workflow,
144
- task_manager: Optional[TaskManager],
145
- parameters: Optional[Dict[str, Dict[str, Any]]],
146
- parallel_nodes: Optional[Set[str]],
147
- ) -> Tuple[Dict[str, Any], Optional[str]]:
144
+ task_manager: TaskManager | None,
145
+ parameters: dict[str, dict[str, Any]] | None,
146
+ parallel_nodes: set[str] | None,
147
+ ) -> tuple[dict[str, Any], str | None]:
148
148
  """Execute a DAG workflow with parallel node execution.
149
149
 
150
150
  Args:
@@ -252,8 +252,8 @@ class ParallelCyclicRuntime:
252
252
  raise
253
253
 
254
254
  def _analyze_parallel_groups(
255
- self, workflow: Workflow, parallel_nodes: Optional[Set[str]]
256
- ) -> List[List[str]]:
255
+ self, workflow: Workflow, parallel_nodes: set[str] | None
256
+ ) -> list[list[str]]:
257
257
  """Analyze workflow to identify groups of nodes that can be executed in parallel.
258
258
 
259
259
  Args:
@@ -320,11 +320,11 @@ class ParallelCyclicRuntime:
320
320
  self,
321
321
  workflow: Workflow,
322
322
  node_id: str,
323
- previous_results: Dict[str, Any],
324
- parameters: Optional[Dict[str, Dict[str, Any]]],
325
- task_manager: Optional[TaskManager],
326
- run_id: Optional[str],
327
- ) -> Dict[str, Any]:
323
+ previous_results: dict[str, Any],
324
+ parameters: dict[str, dict[str, Any]] | None,
325
+ task_manager: TaskManager | None,
326
+ run_id: str | None,
327
+ ) -> dict[str, Any]:
328
328
  """Execute a single node in isolation.
329
329
 
330
330
  Args:
@@ -356,7 +356,7 @@ class ParallelCyclicRuntime:
356
356
  run_id=run_id,
357
357
  node_id=node_id,
358
358
  node_type=node_instance.__class__.__name__,
359
- started_at=datetime.now(timezone.utc),
359
+ started_at=datetime.now(UTC),
360
360
  metadata={},
361
361
  )
362
362
  if task:
@@ -397,7 +397,7 @@ class ParallelCyclicRuntime:
397
397
  task.task_id,
398
398
  TaskStatus.COMPLETED,
399
399
  result=outputs,
400
- ended_at=datetime.now(timezone.utc),
400
+ ended_at=datetime.now(UTC),
401
401
  metadata={"execution_time": performance_metrics.duration},
402
402
  )
403
403
  task_manager.update_task_metrics(task.task_id, task_metrics)
@@ -415,7 +415,7 @@ class ParallelCyclicRuntime:
415
415
  task.task_id,
416
416
  TaskStatus.FAILED,
417
417
  error=str(e),
418
- ended_at=datetime.now(timezone.utc),
418
+ ended_at=datetime.now(UTC),
419
419
  )
420
420
 
421
421
  self.logger.error(f"Node {node_id} failed: {e}", exc_info=self.debug)
@@ -428,9 +428,9 @@ class ParallelCyclicRuntime:
428
428
  workflow: Workflow,
429
429
  node_id: str,
430
430
  node_instance: Node,
431
- previous_results: Dict[str, Any],
432
- parameters: Dict[str, Any],
433
- ) -> Dict[str, Any]:
431
+ previous_results: dict[str, Any],
432
+ parameters: dict[str, Any],
433
+ ) -> dict[str, Any]:
434
434
  """Prepare inputs for a node execution in parallel context.
435
435
 
436
436
  Args:
@@ -509,7 +509,7 @@ class ParallelCyclicRuntime:
509
509
  return False
510
510
 
511
511
  def _should_stop_on_group_error(
512
- self, workflow: Workflow, failed_node: str, node_group: List[str]
512
+ self, workflow: Workflow, failed_node: str, node_group: list[str]
513
513
  ) -> bool:
514
514
  """Determine if execution should stop when a node in a parallel group fails.
515
515