kailash 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +40 -39
  3. kailash/api/auth.py +26 -32
  4. kailash/api/custom_nodes.py +29 -29
  5. kailash/api/custom_nodes_secure.py +35 -35
  6. kailash/api/database.py +17 -17
  7. kailash/api/gateway.py +19 -19
  8. kailash/api/mcp_integration.py +24 -23
  9. kailash/api/studio.py +45 -45
  10. kailash/api/workflow_api.py +8 -8
  11. kailash/cli/commands.py +5 -8
  12. kailash/manifest.py +42 -42
  13. kailash/mcp/__init__.py +1 -1
  14. kailash/mcp/ai_registry_server.py +20 -20
  15. kailash/mcp/client.py +9 -11
  16. kailash/mcp/client_new.py +10 -10
  17. kailash/mcp/server.py +1 -2
  18. kailash/mcp/server_enhanced.py +449 -0
  19. kailash/mcp/servers/ai_registry.py +6 -6
  20. kailash/mcp/utils/__init__.py +31 -0
  21. kailash/mcp/utils/cache.py +267 -0
  22. kailash/mcp/utils/config.py +263 -0
  23. kailash/mcp/utils/formatters.py +293 -0
  24. kailash/mcp/utils/metrics.py +418 -0
  25. kailash/nodes/ai/agents.py +9 -9
  26. kailash/nodes/ai/ai_providers.py +33 -34
  27. kailash/nodes/ai/embedding_generator.py +31 -32
  28. kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
  29. kailash/nodes/ai/iterative_llm_agent.py +48 -48
  30. kailash/nodes/ai/llm_agent.py +32 -33
  31. kailash/nodes/ai/models.py +13 -13
  32. kailash/nodes/ai/self_organizing.py +44 -44
  33. kailash/nodes/api/auth.py +11 -11
  34. kailash/nodes/api/graphql.py +13 -13
  35. kailash/nodes/api/http.py +19 -19
  36. kailash/nodes/api/monitoring.py +20 -20
  37. kailash/nodes/api/rate_limiting.py +9 -13
  38. kailash/nodes/api/rest.py +29 -29
  39. kailash/nodes/api/security.py +44 -47
  40. kailash/nodes/base.py +21 -23
  41. kailash/nodes/base_async.py +7 -7
  42. kailash/nodes/base_cycle_aware.py +12 -12
  43. kailash/nodes/base_with_acl.py +5 -5
  44. kailash/nodes/code/python.py +56 -55
  45. kailash/nodes/data/directory.py +6 -6
  46. kailash/nodes/data/event_generation.py +10 -10
  47. kailash/nodes/data/file_discovery.py +28 -31
  48. kailash/nodes/data/readers.py +8 -8
  49. kailash/nodes/data/retrieval.py +10 -10
  50. kailash/nodes/data/sharepoint_graph.py +17 -17
  51. kailash/nodes/data/sources.py +5 -5
  52. kailash/nodes/data/sql.py +13 -13
  53. kailash/nodes/data/streaming.py +25 -25
  54. kailash/nodes/data/vector_db.py +22 -22
  55. kailash/nodes/data/writers.py +7 -7
  56. kailash/nodes/logic/async_operations.py +17 -17
  57. kailash/nodes/logic/convergence.py +11 -11
  58. kailash/nodes/logic/loop.py +4 -4
  59. kailash/nodes/logic/operations.py +11 -11
  60. kailash/nodes/logic/workflow.py +8 -9
  61. kailash/nodes/mixins/mcp.py +17 -17
  62. kailash/nodes/mixins.py +8 -10
  63. kailash/nodes/transform/chunkers.py +3 -3
  64. kailash/nodes/transform/formatters.py +7 -7
  65. kailash/nodes/transform/processors.py +10 -10
  66. kailash/runtime/access_controlled.py +18 -18
  67. kailash/runtime/async_local.py +17 -19
  68. kailash/runtime/docker.py +20 -22
  69. kailash/runtime/local.py +16 -16
  70. kailash/runtime/parallel.py +23 -23
  71. kailash/runtime/parallel_cyclic.py +27 -27
  72. kailash/runtime/runner.py +6 -6
  73. kailash/runtime/testing.py +20 -20
  74. kailash/sdk_exceptions.py +0 -58
  75. kailash/security.py +14 -26
  76. kailash/tracking/manager.py +38 -38
  77. kailash/tracking/metrics_collector.py +15 -14
  78. kailash/tracking/models.py +53 -53
  79. kailash/tracking/storage/base.py +7 -17
  80. kailash/tracking/storage/database.py +22 -23
  81. kailash/tracking/storage/filesystem.py +38 -40
  82. kailash/utils/export.py +21 -21
  83. kailash/utils/templates.py +2 -3
  84. kailash/visualization/api.py +30 -34
  85. kailash/visualization/dashboard.py +17 -17
  86. kailash/visualization/performance.py +16 -16
  87. kailash/visualization/reports.py +25 -27
  88. kailash/workflow/builder.py +8 -8
  89. kailash/workflow/convergence.py +13 -12
  90. kailash/workflow/cycle_analyzer.py +30 -32
  91. kailash/workflow/cycle_builder.py +12 -12
  92. kailash/workflow/cycle_config.py +16 -15
  93. kailash/workflow/cycle_debugger.py +40 -40
  94. kailash/workflow/cycle_exceptions.py +29 -29
  95. kailash/workflow/cycle_profiler.py +21 -21
  96. kailash/workflow/cycle_state.py +20 -22
  97. kailash/workflow/cyclic_runner.py +44 -44
  98. kailash/workflow/graph.py +40 -40
  99. kailash/workflow/mermaid_visualizer.py +9 -11
  100. kailash/workflow/migration.py +22 -22
  101. kailash/workflow/mock_registry.py +6 -6
  102. kailash/workflow/runner.py +9 -9
  103. kailash/workflow/safety.py +12 -13
  104. kailash/workflow/state.py +8 -11
  105. kailash/workflow/templates.py +19 -19
  106. kailash/workflow/validation.py +14 -14
  107. kailash/workflow/visualization.py +22 -22
  108. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/METADATA +53 -5
  109. kailash-0.3.1.dist-info/RECORD +136 -0
  110. kailash-0.3.0.dist-info/RECORD +0 -130
  111. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
  112. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
  113. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
  114. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
kailash/workflow/graph.py CHANGED
@@ -4,8 +4,8 @@ import json
4
4
  import logging
5
5
  import uuid
6
6
  import warnings
7
- from datetime import datetime, timezone
8
- from typing import Any, Dict, List, Optional, Tuple
7
+ from datetime import UTC, datetime
8
+ from typing import Any
9
9
 
10
10
  import networkx as nx
11
11
  import yaml
@@ -38,10 +38,10 @@ class NodeInstance(BaseModel):
38
38
 
39
39
  node_id: str = Field(..., description="Unique identifier for this instance")
40
40
  node_type: str = Field(..., description="Type of node")
41
- config: Dict[str, Any] = Field(
41
+ config: dict[str, Any] = Field(
42
42
  default_factory=dict, description="Node configuration"
43
43
  )
44
- position: Tuple[float, float] = Field(default=(0, 0), description="Visual position")
44
+ position: tuple[float, float] = Field(default=(0, 0), description="Visual position")
45
45
 
46
46
 
47
47
  class Connection(BaseModel):
@@ -59,23 +59,21 @@ class CyclicConnection(Connection):
59
59
  cycle: bool = Field(
60
60
  default=False, description="Whether this connection creates a cycle"
61
61
  )
62
- max_iterations: Optional[int] = Field(
62
+ max_iterations: int | None = Field(
63
63
  default=None, description="Maximum cycle iterations"
64
64
  )
65
- convergence_check: Optional[str] = Field(
65
+ convergence_check: str | None = Field(
66
66
  default=None, description="Convergence condition expression"
67
67
  )
68
- cycle_id: Optional[str] = Field(
68
+ cycle_id: str | None = Field(
69
69
  default=None, description="Logical cycle group identifier"
70
70
  )
71
- timeout: Optional[float] = Field(
72
- default=None, description="Cycle timeout in seconds"
73
- )
74
- memory_limit: Optional[int] = Field(default=None, description="Memory limit in MB")
75
- condition: Optional[str] = Field(
71
+ timeout: float | None = Field(default=None, description="Cycle timeout in seconds")
72
+ memory_limit: int | None = Field(default=None, description="Memory limit in MB")
73
+ condition: str | None = Field(
76
74
  default=None, description="Conditional cycle routing expression"
77
75
  )
78
- parent_cycle: Optional[str] = Field(
76
+ parent_cycle: str | None = Field(
79
77
  default=None, description="Parent cycle for nested cycles"
80
78
  )
81
79
 
@@ -90,7 +88,7 @@ class Workflow:
90
88
  description: str = "",
91
89
  version: str = "1.0.0",
92
90
  author: str = "",
93
- metadata: Optional[Dict[str, Any]] = None,
91
+ metadata: dict[str, Any] | None = None,
94
92
  ):
95
93
  """Initialize a workflow.
96
94
 
@@ -118,7 +116,7 @@ class Workflow:
118
116
  if "version" not in self.metadata and version:
119
117
  self.metadata["version"] = version
120
118
  if "created_at" not in self.metadata:
121
- self.metadata["created_at"] = datetime.now(timezone.utc).isoformat()
119
+ self.metadata["created_at"] = datetime.now(UTC).isoformat()
122
120
 
123
121
  # Create directed graph for the workflow
124
122
  self.graph = nx.DiGraph()
@@ -212,7 +210,7 @@ class Workflow:
212
210
  logger.info(f"Added node '{node_id}' of type '{node_type}'")
213
211
 
214
212
  def _add_node_internal(
215
- self, node_id: str, node_type: str, config: Optional[Dict[str, Any]] = None
213
+ self, node_id: str, node_type: str, config: dict[str, Any] | None = None
216
214
  ) -> None:
217
215
  """Add a node to the workflow (internal method).
218
216
 
@@ -229,15 +227,15 @@ class Workflow:
229
227
  self,
230
228
  source_node: str,
231
229
  target_node: str,
232
- mapping: Optional[Dict[str, str]] = None,
230
+ mapping: dict[str, str] | None = None,
233
231
  cycle: bool = False,
234
- max_iterations: Optional[int] = None,
235
- convergence_check: Optional[str] = None,
236
- cycle_id: Optional[str] = None,
237
- timeout: Optional[float] = None,
238
- memory_limit: Optional[int] = None,
239
- condition: Optional[str] = None,
240
- parent_cycle: Optional[str] = None,
232
+ max_iterations: int | None = None,
233
+ convergence_check: str | None = None,
234
+ cycle_id: str | None = None,
235
+ timeout: float | None = None,
236
+ memory_limit: int | None = None,
237
+ condition: str | None = None,
238
+ parent_cycle: str | None = None,
241
239
  ) -> None:
242
240
  """Connect two nodes in the workflow.
243
241
 
@@ -443,7 +441,7 @@ class Workflow:
443
441
  f"Connected '{source_node}' to '{target_node}' with mapping: {mapping}"
444
442
  )
445
443
 
446
- def create_cycle(self, cycle_id: Optional[str] = None):
444
+ def create_cycle(self, cycle_id: str | None = None):
447
445
  """
448
446
  Create a new CycleBuilder for intuitive cycle configuration.
449
447
 
@@ -541,7 +539,7 @@ class Workflow:
541
539
  source_node=from_node, target_node=to_node, mapping={from_output: to_input}
542
540
  )
543
541
 
544
- def get_node(self, node_id: str) -> Optional[Node]:
542
+ def get_node(self, node_id: str) -> Node | None:
545
543
  """Get node instance by ID.
546
544
 
547
545
  Args:
@@ -561,7 +559,7 @@ class Workflow:
561
559
  # Fallback to _node_instances
562
560
  return self._node_instances.get(node_id)
563
561
 
564
- def separate_dag_and_cycle_edges(self) -> Tuple[List[Tuple], List[Tuple]]:
562
+ def separate_dag_and_cycle_edges(self) -> tuple[list[tuple], list[tuple]]:
565
563
  """Separate DAG edges from cycle edges.
566
564
 
567
565
  Returns:
@@ -578,7 +576,7 @@ class Workflow:
578
576
 
579
577
  return dag_edges, cycle_edges
580
578
 
581
- def get_cycle_groups(self) -> Dict[str, List[Tuple]]:
579
+ def get_cycle_groups(self) -> dict[str, list[tuple]]:
582
580
  """Get cycle edges grouped by cycle_id with enhanced multi-node cycle detection.
583
581
 
584
582
  For multi-node cycles like A → B → C → A where only C → A is marked as cycle,
@@ -678,7 +676,7 @@ class Workflow:
678
676
  _, cycle_edges = self.separate_dag_and_cycle_edges()
679
677
  return len(cycle_edges) > 0
680
678
 
681
- def get_execution_order(self) -> List[str]:
679
+ def get_execution_order(self) -> list[str]:
682
680
  """Get topological execution order for nodes, handling cycles gracefully.
683
681
 
684
682
  Returns:
@@ -711,7 +709,7 @@ class Workflow:
711
709
  # This shouldn't happen, but handle gracefully
712
710
  raise WorkflowValidationError("Unable to determine execution order")
713
711
 
714
- def validate(self, runtime_parameters: Optional[Dict[str, Any]] = None) -> None:
712
+ def validate(self, runtime_parameters: dict[str, Any] | None = None) -> None:
715
713
  """Validate the workflow structure.
716
714
 
717
715
  Args:
@@ -843,8 +841,8 @@ class Workflow:
843
841
  )
844
842
 
845
843
  def run(
846
- self, task_manager: Optional[TaskManager] = None, **overrides
847
- ) -> Tuple[Dict[str, Any], Optional[str]]:
844
+ self, task_manager: TaskManager | None = None, **overrides
845
+ ) -> tuple[dict[str, Any], str | None]:
848
846
  """Execute the workflow.
849
847
 
850
848
  Args:
@@ -863,9 +861,9 @@ class Workflow:
863
861
 
864
862
  def execute(
865
863
  self,
866
- inputs: Optional[Dict[str, Any]] = None,
867
- task_manager: Optional[TaskManager] = None,
868
- ) -> Dict[str, Any]:
864
+ inputs: dict[str, Any] | None = None,
865
+ task_manager: TaskManager | None = None,
866
+ ) -> dict[str, Any]:
869
867
  """Execute the workflow.
870
868
 
871
869
  Args:
@@ -963,7 +961,9 @@ class Workflow:
963
961
  )
964
962
 
965
963
  # Process each mapping pair
966
- for i, (src, dst) in enumerate(zip(from_outputs, to_inputs)):
964
+ for i, (src, dst) in enumerate(
965
+ zip(from_outputs, to_inputs, strict=False)
966
+ ):
967
967
  if src in source_results:
968
968
  node_inputs[dst] = source_results[src]
969
969
 
@@ -1045,7 +1045,7 @@ class Workflow:
1045
1045
  f"Failed to export workflow to '{output_path}': {e}"
1046
1046
  ) from e
1047
1047
 
1048
- def to_dict(self) -> Dict[str, Any]:
1048
+ def to_dict(self) -> dict[str, Any]:
1049
1049
  """Convert workflow to dictionary.
1050
1050
 
1051
1051
  Returns:
@@ -1107,7 +1107,7 @@ class Workflow:
1107
1107
  raise ValueError(f"Unsupported format: {format}")
1108
1108
 
1109
1109
  @classmethod
1110
- def from_dict(cls, data: Dict[str, Any]) -> "Workflow":
1110
+ def from_dict(cls, data: dict[str, Any]) -> "Workflow":
1111
1111
  """Create workflow from dictionary.
1112
1112
 
1113
1113
  Args:
@@ -1228,9 +1228,9 @@ class Workflow:
1228
1228
  self,
1229
1229
  state_model: BaseModel,
1230
1230
  wrap_state: bool = True,
1231
- task_manager: Optional[TaskManager] = None,
1231
+ task_manager: TaskManager | None = None,
1232
1232
  **overrides,
1233
- ) -> Tuple[BaseModel, Dict[str, Any]]:
1233
+ ) -> tuple[BaseModel, dict[str, Any]]:
1234
1234
  """Execute the workflow with state management.
1235
1235
 
1236
1236
  This method provides a simplified interface for executing workflows
@@ -5,8 +5,6 @@ offering a text-based format that can be embedded in markdown files and
5
5
  rendered in various documentation platforms.
6
6
  """
7
7
 
8
- from typing import Dict, Optional, Tuple
9
-
10
8
  from kailash.workflow.graph import Workflow
11
9
 
12
10
 
@@ -27,7 +25,7 @@ class MermaidVisualizer:
27
25
  self,
28
26
  workflow: Workflow,
29
27
  direction: str = "TB",
30
- node_styles: Optional[Dict[str, str]] = None,
28
+ node_styles: dict[str, str] | None = None,
31
29
  ):
32
30
  """Initialize the Mermaid visualizer.
33
31
 
@@ -40,7 +38,7 @@ class MermaidVisualizer:
40
38
  self.direction = direction
41
39
  self.node_styles = node_styles or self._default_node_styles()
42
40
 
43
- def _default_node_styles(self) -> Dict[str, str]:
41
+ def _default_node_styles(self) -> dict[str, str]:
44
42
  """Get default node styles for different node types.
45
43
 
46
44
  Returns:
@@ -79,7 +77,7 @@ class MermaidVisualizer:
79
77
  # Use line break without parentheses to avoid Mermaid parsing issues
80
78
  return f"{clean_type}<br/>{node_id}"
81
79
 
82
- def _get_pattern_edge_label(self, source: str, target: str, data: Dict) -> str:
80
+ def _get_pattern_edge_label(self, source: str, target: str, data: dict) -> str:
83
81
  """Get a pattern-oriented edge label.
84
82
 
85
83
  Args:
@@ -262,7 +260,7 @@ class MermaidVisualizer:
262
260
  return node_type[:-4]
263
261
  return node_type
264
262
 
265
- def _get_node_shape(self, node_type: str) -> Tuple[str, str]:
263
+ def _get_node_shape(self, node_type: str) -> tuple[str, str]:
266
264
  """Get the shape brackets for a node type.
267
265
 
268
266
  Args:
@@ -460,7 +458,7 @@ class MermaidVisualizer:
460
458
 
461
459
  return "\n".join(lines)
462
460
 
463
- def _get_edge_label(self, source: str, target: str, data: Dict) -> str:
461
+ def _get_edge_label(self, source: str, target: str, data: dict) -> str:
464
462
  """Get label for an edge.
465
463
 
466
464
  Args:
@@ -491,7 +489,7 @@ class MermaidVisualizer:
491
489
 
492
490
  return ""
493
491
 
494
- def generate_markdown(self, title: Optional[str] = None) -> str:
492
+ def generate_markdown(self, title: str | None = None) -> str:
495
493
  """Generate a complete markdown section with the Mermaid diagram.
496
494
 
497
495
  Args:
@@ -559,7 +557,7 @@ class MermaidVisualizer:
559
557
 
560
558
  return "\n".join(lines)
561
559
 
562
- def save_markdown(self, filepath: str, title: Optional[str] = None) -> None:
560
+ def save_markdown(self, filepath: str, title: str | None = None) -> None:
563
561
  """Save the Mermaid diagram as a markdown file.
564
562
 
565
563
  Args:
@@ -596,7 +594,7 @@ def add_mermaid_to_workflow():
596
594
  visualizer = MermaidVisualizer(self, direction=direction)
597
595
  return visualizer.generate()
598
596
 
599
- def to_mermaid_markdown(self, title: Optional[str] = None) -> str:
597
+ def to_mermaid_markdown(self, title: str | None = None) -> str:
600
598
  """Generate markdown with embedded Mermaid diagram.
601
599
 
602
600
  Args:
@@ -608,7 +606,7 @@ def add_mermaid_to_workflow():
608
606
  visualizer = MermaidVisualizer(self)
609
607
  return visualizer.generate_markdown(title)
610
608
 
611
- def save_mermaid_markdown(self, filepath: str, title: Optional[str] = None) -> None:
609
+ def save_mermaid_markdown(self, filepath: str, title: str | None = None) -> None:
612
610
  """Save workflow as markdown with Mermaid diagram.
613
611
 
614
612
  Args:
@@ -103,7 +103,7 @@ See Also:
103
103
  import re
104
104
  from collections import defaultdict
105
105
  from dataclasses import dataclass
106
- from typing import Any, Dict, List, Optional
106
+ from typing import Any
107
107
 
108
108
  from . import Workflow
109
109
  from .templates import CycleTemplates
@@ -113,11 +113,11 @@ from .templates import CycleTemplates
113
113
  class CyclificationOpportunity:
114
114
  """Represents an opportunity to convert a DAG pattern to a cycle."""
115
115
 
116
- nodes: List[str]
116
+ nodes: list[str]
117
117
  pattern_type: str
118
118
  confidence: float
119
119
  description: str
120
- suggested_convergence: Optional[str] = None
120
+ suggested_convergence: str | None = None
121
121
  estimated_benefit: str = "unknown"
122
122
  implementation_complexity: str = "medium"
123
123
 
@@ -127,10 +127,10 @@ class CyclificationSuggestion:
127
127
  """Detailed suggestion for converting nodes to a cycle."""
128
128
 
129
129
  opportunity: CyclificationOpportunity
130
- implementation_steps: List[str]
130
+ implementation_steps: list[str]
131
131
  code_example: str
132
132
  expected_outcome: str
133
- risks: List[str]
133
+ risks: list[str]
134
134
 
135
135
 
136
136
  class DAGToCycleConverter:
@@ -150,9 +150,9 @@ class DAGToCycleConverter:
150
150
  """
151
151
  self.workflow = workflow
152
152
  self.graph = workflow.graph
153
- self.opportunities: List[CyclificationOpportunity] = []
153
+ self.opportunities: list[CyclificationOpportunity] = []
154
154
 
155
- def analyze_cyclification_opportunities(self) -> List[CyclificationOpportunity]:
155
+ def analyze_cyclification_opportunities(self) -> list[CyclificationOpportunity]:
156
156
  """
157
157
  Analyze workflow for patterns that could benefit from cyclification.
158
158
 
@@ -322,7 +322,7 @@ class DAGToCycleConverter:
322
322
  )
323
323
  self.opportunities.append(opportunity)
324
324
 
325
- def _find_related_nodes(self, node_id: str) -> List[str]:
325
+ def _find_related_nodes(self, node_id: str) -> list[str]:
326
326
  """Find nodes that are closely related to the given node."""
327
327
  related = []
328
328
 
@@ -343,7 +343,7 @@ class DAGToCycleConverter:
343
343
  # Check if there's an edge between the nodes in either direction
344
344
  return graph.has_edge(node1, node2) or graph.has_edge(node2, node1)
345
345
 
346
- def generate_detailed_suggestions(self) -> List[CyclificationSuggestion]:
346
+ def generate_detailed_suggestions(self) -> list[CyclificationSuggestion]:
347
347
  """
348
348
  Generate detailed suggestions with implementation guidance.
349
349
 
@@ -602,9 +602,9 @@ print(f"Created convergence cycle: {{cycle_id}}")
602
602
 
603
603
  def convert_to_cycle(
604
604
  self,
605
- nodes: List[str],
605
+ nodes: list[str],
606
606
  convergence_strategy: str = "error_reduction",
607
- cycle_type: Optional[str] = None,
607
+ cycle_type: str | None = None,
608
608
  **kwargs,
609
609
  ) -> str:
610
610
  """
@@ -643,7 +643,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
643
643
  else:
644
644
  raise ValueError(f"Unknown cycle type: {cycle_type}")
645
645
 
646
- def _detect_cycle_type(self, nodes: List[str], strategy: str) -> str:
646
+ def _detect_cycle_type(self, nodes: list[str], strategy: str) -> str:
647
647
  """Detect the most appropriate cycle type for given nodes and strategy."""
648
648
  if strategy == "error_reduction" or strategy == "quality_improvement":
649
649
  return "optimization"
@@ -659,7 +659,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
659
659
  # Default to optimization for unknown strategies
660
660
  return "optimization"
661
661
 
662
- def _convert_to_optimization_cycle(self, nodes: List[str], **kwargs) -> str:
662
+ def _convert_to_optimization_cycle(self, nodes: list[str], **kwargs) -> str:
663
663
  """Convert nodes to optimization cycle."""
664
664
  if len(nodes) < 2:
665
665
  raise ValueError("Optimization cycle requires at least 2 nodes")
@@ -668,14 +668,14 @@ print(f"Created convergence cycle: {{cycle_id}}")
668
668
  self.workflow, processor_node=nodes[0], evaluator_node=nodes[1], **kwargs
669
669
  )
670
670
 
671
- def _convert_to_retry_cycle(self, nodes: List[str], **kwargs) -> str:
671
+ def _convert_to_retry_cycle(self, nodes: list[str], **kwargs) -> str:
672
672
  """Convert nodes to retry cycle."""
673
673
  if len(nodes) < 1:
674
674
  raise ValueError("Retry cycle requires at least 1 node")
675
675
 
676
676
  return CycleTemplates.retry_cycle(self.workflow, target_node=nodes[0], **kwargs)
677
677
 
678
- def _convert_to_data_quality_cycle(self, nodes: List[str], **kwargs) -> str:
678
+ def _convert_to_data_quality_cycle(self, nodes: list[str], **kwargs) -> str:
679
679
  """Convert nodes to data quality cycle."""
680
680
  if len(nodes) < 2:
681
681
  raise ValueError("Data quality cycle requires at least 2 nodes")
@@ -684,7 +684,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
684
684
  self.workflow, cleaner_node=nodes[0], validator_node=nodes[1], **kwargs
685
685
  )
686
686
 
687
- def _convert_to_batch_processing_cycle(self, nodes: List[str], **kwargs) -> str:
687
+ def _convert_to_batch_processing_cycle(self, nodes: list[str], **kwargs) -> str:
688
688
  """Convert nodes to batch processing cycle."""
689
689
  if len(nodes) < 1:
690
690
  raise ValueError("Batch processing cycle requires at least 1 node")
@@ -693,7 +693,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
693
693
  self.workflow, processor_node=nodes[0], **kwargs
694
694
  )
695
695
 
696
- def _convert_to_convergence_cycle(self, nodes: List[str], **kwargs) -> str:
696
+ def _convert_to_convergence_cycle(self, nodes: list[str], **kwargs) -> str:
697
697
  """Convert nodes to convergence cycle."""
698
698
  if len(nodes) < 1:
699
699
  raise ValueError("Convergence cycle requires at least 1 node")
@@ -702,7 +702,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
702
702
  self.workflow, processor_node=nodes[0], **kwargs
703
703
  )
704
704
 
705
- def generate_migration_report(self) -> Dict[str, Any]:
705
+ def generate_migration_report(self) -> dict[str, Any]:
706
706
  """
707
707
  Generate comprehensive migration report with analysis and recommendations.
708
708
 
@@ -745,8 +745,8 @@ print(f"Created convergence cycle: {{cycle_id}}")
745
745
  }
746
746
 
747
747
  def _generate_migration_recommendations(
748
- self, opportunities: List[CyclificationOpportunity]
749
- ) -> List[str]:
748
+ self, opportunities: list[CyclificationOpportunity]
749
+ ) -> list[str]:
750
750
  """Generate high-level recommendations for migration."""
751
751
  recommendations = []
752
752
 
@@ -779,8 +779,8 @@ print(f"Created convergence cycle: {{cycle_id}}")
779
779
  return recommendations
780
780
 
781
781
  def _suggest_implementation_order(
782
- self, opportunities: List[CyclificationOpportunity]
783
- ) -> List[Dict[str, Any]]:
782
+ self, opportunities: list[CyclificationOpportunity]
783
+ ) -> list[dict[str, Any]]:
784
784
  """Suggest order for implementing cyclification opportunities."""
785
785
  # Sort by: confidence desc, complexity asc (low=1, medium=2, high=3)
786
786
  complexity_score = {"low": 1, "medium": 2, "high": 3}
@@ -1,6 +1,6 @@
1
1
  """Mock node registry for tests."""
2
2
 
3
- from typing import Any, Dict, Type
3
+ from typing import Any
4
4
 
5
5
  from kailash.nodes.base import Node, NodeRegistry
6
6
  from kailash.sdk_exceptions import NodeConfigurationError
@@ -15,15 +15,15 @@ class MockNode(Node):
15
15
  self.name = name or node_id
16
16
  self.config = kwargs.copy()
17
17
 
18
- def process(self, data: Dict[str, Any]) -> Dict[str, Any]:
18
+ def process(self, data: dict[str, Any]) -> dict[str, Any]:
19
19
  """Process data."""
20
20
  return {"value": data.get("value", 0) * 2}
21
21
 
22
- def execute(self, **kwargs) -> Dict[str, Any]:
22
+ def execute(self, **kwargs) -> dict[str, Any]:
23
23
  """Execute node with keyword arguments."""
24
24
  return self.process(kwargs)
25
25
 
26
- def get_parameters(self) -> Dict[str, Any]:
26
+ def get_parameters(self) -> dict[str, Any]:
27
27
  """Get node parameters."""
28
28
  return {}
29
29
 
@@ -50,10 +50,10 @@ for node_type in NODE_TYPES:
50
50
  class MockRegistry:
51
51
  """Mock node registry for testing."""
52
52
 
53
- _registry: Dict[str, Type[Node]] = {node_type: MockNode for node_type in NODE_TYPES}
53
+ _registry: dict[str, type[Node]] = {node_type: MockNode for node_type in NODE_TYPES}
54
54
 
55
55
  @classmethod
56
- def get(cls, node_type: str) -> Type[Node]:
56
+ def get(cls, node_type: str) -> type[Node]:
57
57
  """Get node class by type name."""
58
58
  if node_type not in cls._registry:
59
59
  raise NodeConfigurationError(
@@ -5,7 +5,7 @@ allowing for complex multi-stage processing pipelines.
5
5
  """
6
6
 
7
7
  import logging
8
- from typing import Any, Dict, List, Optional, Tuple
8
+ from typing import Any
9
9
 
10
10
  from pydantic import BaseModel
11
11
 
@@ -23,8 +23,8 @@ class WorkflowConnection:
23
23
  self,
24
24
  source_workflow_id: str,
25
25
  target_workflow_id: str,
26
- condition: Optional[Dict[str, Any]] = None,
27
- state_mapping: Optional[Dict[str, str]] = None,
26
+ condition: dict[str, Any] | None = None,
27
+ state_mapping: dict[str, str] | None = None,
28
28
  ):
29
29
  """Initialize a workflow connection.
30
30
 
@@ -88,7 +88,7 @@ class WorkflowConnection:
88
88
  )
89
89
  return True
90
90
 
91
- def map_state(self, state: BaseModel) -> Dict[str, Any]:
91
+ def map_state(self, state: BaseModel) -> dict[str, Any]:
92
92
  """Map state fields according to the mapping configuration.
93
93
 
94
94
  Args:
@@ -142,8 +142,8 @@ class WorkflowRunner:
142
142
  self,
143
143
  source_workflow_id: str,
144
144
  target_workflow_id: str,
145
- condition: Optional[Dict[str, Any]] = None,
146
- state_mapping: Optional[Dict[str, str]] = None,
145
+ condition: dict[str, Any] | None = None,
146
+ state_mapping: dict[str, str] | None = None,
147
147
  ) -> None:
148
148
  """Connect two workflows.
149
149
 
@@ -182,7 +182,7 @@ class WorkflowRunner:
182
182
 
183
183
  def get_next_workflows(
184
184
  self, current_workflow_id: str, state: BaseModel
185
- ) -> List[Tuple[str, Dict[str, Any]]]:
185
+ ) -> list[tuple[str, dict[str, Any]]]:
186
186
  """Get the next workflows to execute based on current state.
187
187
 
188
188
  Args:
@@ -206,9 +206,9 @@ class WorkflowRunner:
206
206
  self,
207
207
  entry_workflow_id: str,
208
208
  initial_state: BaseModel,
209
- task_manager: Optional[TaskManager] = None,
209
+ task_manager: TaskManager | None = None,
210
210
  max_steps: int = 10, # Prevent infinite loops
211
- ) -> Tuple[BaseModel, Dict[str, Dict[str, Any]]]:
211
+ ) -> tuple[BaseModel, dict[str, dict[str, Any]]]:
212
212
  """Execute a sequence of connected workflows.
213
213
 
214
214
  Args:
@@ -4,7 +4,6 @@ import logging
4
4
  import threading
5
5
  import time
6
6
  from contextlib import contextmanager
7
- from typing import Dict, Optional, Set
8
7
 
9
8
  import psutil
10
9
 
@@ -18,13 +17,13 @@ class CycleSafetyManager:
18
17
 
19
18
  def __init__(self):
20
19
  """Initialize cycle safety manager."""
21
- self.active_cycles: Dict[str, "CycleMonitor"] = {}
20
+ self.active_cycles: dict[str, "CycleMonitor"] = {}
22
21
  self.global_memory_limit = None # MB
23
22
  self.global_timeout = None # seconds
24
23
  self._lock = threading.Lock()
25
24
 
26
25
  def set_global_limits(
27
- self, memory_limit: Optional[int] = None, timeout: Optional[float] = None
26
+ self, memory_limit: int | None = None, timeout: float | None = None
28
27
  ) -> None:
29
28
  """Set global resource limits.
30
29
 
@@ -38,9 +37,9 @@ class CycleSafetyManager:
38
37
  def start_monitoring(
39
38
  self,
40
39
  cycle_id: str,
41
- max_iterations: Optional[int] = None,
42
- timeout: Optional[float] = None,
43
- memory_limit: Optional[int] = None,
40
+ max_iterations: int | None = None,
41
+ timeout: float | None = None,
42
+ memory_limit: int | None = None,
44
43
  ) -> "CycleMonitor":
45
44
  """Start monitoring a cycle.
46
45
 
@@ -88,7 +87,7 @@ class CycleSafetyManager:
88
87
  monitor.stop()
89
88
  del self.active_cycles[cycle_id]
90
89
 
91
- def check_all_cycles(self) -> Dict[str, bool]:
90
+ def check_all_cycles(self) -> dict[str, bool]:
92
91
  """Check all active cycles for violations.
93
92
 
94
93
  Returns:
@@ -102,7 +101,7 @@ class CycleSafetyManager:
102
101
 
103
102
  return violations
104
103
 
105
- def get_cycle_status(self, cycle_id: str) -> Optional[Dict[str, any]]:
104
+ def get_cycle_status(self, cycle_id: str) -> dict[str, any] | None:
106
105
  """Get status of a specific cycle.
107
106
 
108
107
  Args:
@@ -116,7 +115,7 @@ class CycleSafetyManager:
116
115
  return self.active_cycles[cycle_id].get_status()
117
116
  return None
118
117
 
119
- def detect_deadlocks(self) -> Set[str]:
118
+ def detect_deadlocks(self) -> set[str]:
120
119
  """Detect potential deadlocks in active cycles.
121
120
 
122
121
  Returns:
@@ -138,9 +137,9 @@ class CycleMonitor:
138
137
  def __init__(
139
138
  self,
140
139
  cycle_id: str,
141
- max_iterations: Optional[int] = None,
142
- timeout: Optional[float] = None,
143
- memory_limit: Optional[int] = None,
140
+ max_iterations: int | None = None,
141
+ timeout: float | None = None,
142
+ memory_limit: int | None = None,
144
143
  ):
145
144
  """Initialize cycle monitor.
146
145
 
@@ -256,7 +255,7 @@ class CycleMonitor:
256
255
  time_since_progress = time.time() - self.last_progress_time
257
256
  return time_since_progress > stall_threshold
258
257
 
259
- def get_status(self) -> Dict[str, any]:
258
+ def get_status(self) -> dict[str, any]:
260
259
  """Get current monitor status.
261
260
 
262
261
  Returns: