kailash 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (117) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +40 -39
  3. kailash/api/auth.py +26 -32
  4. kailash/api/custom_nodes.py +29 -29
  5. kailash/api/custom_nodes_secure.py +35 -35
  6. kailash/api/database.py +17 -17
  7. kailash/api/gateway.py +19 -19
  8. kailash/api/mcp_integration.py +24 -23
  9. kailash/api/studio.py +45 -45
  10. kailash/api/workflow_api.py +8 -8
  11. kailash/cli/commands.py +5 -8
  12. kailash/manifest.py +42 -42
  13. kailash/mcp/__init__.py +1 -1
  14. kailash/mcp/ai_registry_server.py +20 -20
  15. kailash/mcp/client.py +9 -11
  16. kailash/mcp/client_new.py +10 -10
  17. kailash/mcp/server.py +1 -2
  18. kailash/mcp/server_enhanced.py +449 -0
  19. kailash/mcp/servers/ai_registry.py +6 -6
  20. kailash/mcp/utils/__init__.py +31 -0
  21. kailash/mcp/utils/cache.py +267 -0
  22. kailash/mcp/utils/config.py +263 -0
  23. kailash/mcp/utils/formatters.py +293 -0
  24. kailash/mcp/utils/metrics.py +418 -0
  25. kailash/nodes/ai/agents.py +9 -9
  26. kailash/nodes/ai/ai_providers.py +33 -34
  27. kailash/nodes/ai/embedding_generator.py +31 -32
  28. kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
  29. kailash/nodes/ai/iterative_llm_agent.py +48 -48
  30. kailash/nodes/ai/llm_agent.py +32 -33
  31. kailash/nodes/ai/models.py +13 -13
  32. kailash/nodes/ai/self_organizing.py +44 -44
  33. kailash/nodes/api/__init__.py +5 -0
  34. kailash/nodes/api/auth.py +11 -11
  35. kailash/nodes/api/graphql.py +13 -13
  36. kailash/nodes/api/http.py +19 -19
  37. kailash/nodes/api/monitoring.py +463 -0
  38. kailash/nodes/api/rate_limiting.py +9 -13
  39. kailash/nodes/api/rest.py +29 -29
  40. kailash/nodes/api/security.py +819 -0
  41. kailash/nodes/base.py +24 -26
  42. kailash/nodes/base_async.py +7 -7
  43. kailash/nodes/base_cycle_aware.py +12 -12
  44. kailash/nodes/base_with_acl.py +5 -5
  45. kailash/nodes/code/python.py +56 -55
  46. kailash/nodes/data/__init__.py +6 -0
  47. kailash/nodes/data/directory.py +6 -6
  48. kailash/nodes/data/event_generation.py +297 -0
  49. kailash/nodes/data/file_discovery.py +598 -0
  50. kailash/nodes/data/readers.py +8 -8
  51. kailash/nodes/data/retrieval.py +10 -10
  52. kailash/nodes/data/sharepoint_graph.py +17 -17
  53. kailash/nodes/data/sources.py +5 -5
  54. kailash/nodes/data/sql.py +13 -13
  55. kailash/nodes/data/streaming.py +25 -25
  56. kailash/nodes/data/vector_db.py +22 -22
  57. kailash/nodes/data/writers.py +7 -7
  58. kailash/nodes/logic/async_operations.py +17 -17
  59. kailash/nodes/logic/convergence.py +11 -11
  60. kailash/nodes/logic/loop.py +4 -4
  61. kailash/nodes/logic/operations.py +11 -11
  62. kailash/nodes/logic/workflow.py +8 -9
  63. kailash/nodes/mixins/mcp.py +17 -17
  64. kailash/nodes/mixins.py +8 -10
  65. kailash/nodes/transform/chunkers.py +3 -3
  66. kailash/nodes/transform/formatters.py +7 -7
  67. kailash/nodes/transform/processors.py +11 -11
  68. kailash/runtime/access_controlled.py +18 -18
  69. kailash/runtime/async_local.py +18 -20
  70. kailash/runtime/docker.py +24 -26
  71. kailash/runtime/local.py +55 -31
  72. kailash/runtime/parallel.py +25 -25
  73. kailash/runtime/parallel_cyclic.py +29 -29
  74. kailash/runtime/runner.py +6 -6
  75. kailash/runtime/testing.py +22 -22
  76. kailash/sdk_exceptions.py +0 -58
  77. kailash/security.py +14 -26
  78. kailash/tracking/manager.py +38 -38
  79. kailash/tracking/metrics_collector.py +15 -14
  80. kailash/tracking/models.py +53 -53
  81. kailash/tracking/storage/base.py +7 -17
  82. kailash/tracking/storage/database.py +22 -23
  83. kailash/tracking/storage/filesystem.py +38 -40
  84. kailash/utils/export.py +21 -21
  85. kailash/utils/templates.py +8 -9
  86. kailash/visualization/api.py +30 -34
  87. kailash/visualization/dashboard.py +17 -17
  88. kailash/visualization/performance.py +32 -19
  89. kailash/visualization/reports.py +30 -28
  90. kailash/workflow/builder.py +8 -8
  91. kailash/workflow/convergence.py +13 -12
  92. kailash/workflow/cycle_analyzer.py +38 -33
  93. kailash/workflow/cycle_builder.py +12 -12
  94. kailash/workflow/cycle_config.py +16 -15
  95. kailash/workflow/cycle_debugger.py +40 -40
  96. kailash/workflow/cycle_exceptions.py +29 -29
  97. kailash/workflow/cycle_profiler.py +21 -21
  98. kailash/workflow/cycle_state.py +20 -22
  99. kailash/workflow/cyclic_runner.py +45 -45
  100. kailash/workflow/graph.py +57 -45
  101. kailash/workflow/mermaid_visualizer.py +9 -11
  102. kailash/workflow/migration.py +22 -22
  103. kailash/workflow/mock_registry.py +6 -6
  104. kailash/workflow/runner.py +9 -9
  105. kailash/workflow/safety.py +12 -13
  106. kailash/workflow/state.py +8 -11
  107. kailash/workflow/templates.py +19 -19
  108. kailash/workflow/validation.py +14 -14
  109. kailash/workflow/visualization.py +32 -24
  110. kailash-0.3.1.dist-info/METADATA +476 -0
  111. kailash-0.3.1.dist-info/RECORD +136 -0
  112. kailash-0.2.2.dist-info/METADATA +0 -121
  113. kailash-0.2.2.dist-info/RECORD +0 -126
  114. {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
  115. {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
  116. {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
  117. {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,7 @@
2
2
 
3
3
  import logging
4
4
  import uuid
5
- from typing import Any, Dict, List, Optional
5
+ from typing import Any
6
6
 
7
7
  from kailash.sdk_exceptions import ConnectionError, WorkflowValidationError
8
8
  from kailash.workflow.graph import Workflow
@@ -15,15 +15,15 @@ class WorkflowBuilder:
15
15
 
16
16
  def __init__(self):
17
17
  """Initialize an empty workflow builder."""
18
- self.nodes: Dict[str, Dict[str, Any]] = {}
19
- self.connections: List[Dict[str, str]] = []
20
- self._metadata: Dict[str, Any] = {}
18
+ self.nodes: dict[str, dict[str, Any]] = {}
19
+ self.connections: list[dict[str, str]] = []
20
+ self._metadata: dict[str, Any] = {}
21
21
 
22
22
  def add_node(
23
23
  self,
24
24
  node_type: str,
25
- node_id: Optional[str] = None,
26
- config: Optional[Dict[str, Any]] = None,
25
+ node_id: str | None = None,
26
+ config: dict[str, Any] | None = None,
27
27
  ) -> str:
28
28
  """
29
29
  Add a node to the workflow.
@@ -106,7 +106,7 @@ class WorkflowBuilder:
106
106
  self._metadata.update(kwargs)
107
107
  return self
108
108
 
109
- def build(self, workflow_id: Optional[str] = None, **kwargs) -> Workflow:
109
+ def build(self, workflow_id: str | None = None, **kwargs) -> Workflow:
110
110
  """
111
111
  Build and return a Workflow instance.
112
112
 
@@ -193,7 +193,7 @@ class WorkflowBuilder:
193
193
  return self
194
194
 
195
195
  @classmethod
196
- def from_dict(cls, config: Dict[str, Any]) -> "WorkflowBuilder":
196
+ def from_dict(cls, config: dict[str, Any]) -> "WorkflowBuilder":
197
197
  """
198
198
  Create builder from dictionary configuration.
199
199
 
@@ -2,7 +2,8 @@
2
2
 
3
3
  import logging
4
4
  from abc import ABC, abstractmethod
5
- from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
5
+ from collections.abc import Callable
6
+ from typing import TYPE_CHECKING, Any
6
7
 
7
8
  if TYPE_CHECKING:
8
9
  from kailash.workflow.cycle_state import CycleState
@@ -14,7 +15,7 @@ class ConvergenceCondition(ABC):
14
15
  """Base class for cycle convergence conditions."""
15
16
 
16
17
  @abstractmethod
17
- def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
18
+ def evaluate(self, results: dict[str, Any], cycle_state: "CycleState") -> bool:
18
19
  """Evaluate if cycle should terminate.
19
20
 
20
21
  Args:
@@ -48,7 +49,7 @@ class ExpressionCondition(ConvergenceCondition):
48
49
  """
49
50
  self.expression = expression
50
51
 
51
- def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
52
+ def evaluate(self, results: dict[str, Any], cycle_state: "CycleState") -> bool:
52
53
  """Evaluate expression with results and cycle state context."""
53
54
  # Create evaluation context
54
55
  context = {
@@ -117,8 +118,8 @@ class CallbackCondition(ConvergenceCondition):
117
118
 
118
119
  def __init__(
119
120
  self,
120
- callback: Callable[[Dict[str, Any], "CycleState"], bool],
121
- name: Optional[str] = None,
121
+ callback: Callable[[dict[str, Any], "CycleState"], bool],
122
+ name: str | None = None,
122
123
  ):
123
124
  """Initialize with callback function.
124
125
 
@@ -129,7 +130,7 @@ class CallbackCondition(ConvergenceCondition):
129
130
  self.callback = callback
130
131
  self.name = name or callback.__name__
131
132
 
132
- def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
133
+ def evaluate(self, results: dict[str, Any], cycle_state: "CycleState") -> bool:
133
134
  """Evaluate callback with results and cycle state."""
134
135
  try:
135
136
  return self.callback(results, cycle_state)
@@ -154,7 +155,7 @@ class MaxIterationsCondition(ConvergenceCondition):
154
155
  """
155
156
  self.max_iterations = max_iterations
156
157
 
157
- def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
158
+ def evaluate(self, results: dict[str, Any], cycle_state: "CycleState") -> bool:
158
159
  """Check if maximum iterations reached."""
159
160
  return cycle_state.iteration >= self.max_iterations
160
161
 
@@ -166,7 +167,7 @@ class MaxIterationsCondition(ConvergenceCondition):
166
167
  class CompoundCondition(ConvergenceCondition):
167
168
  """Combine multiple conditions with AND/OR logic."""
168
169
 
169
- def __init__(self, conditions: List[ConvergenceCondition], operator: str = "OR"):
170
+ def __init__(self, conditions: list[ConvergenceCondition], operator: str = "OR"):
170
171
  """Initialize with list of conditions.
171
172
 
172
173
  Args:
@@ -178,7 +179,7 @@ class CompoundCondition(ConvergenceCondition):
178
179
  if self.operator not in ["AND", "OR"]:
179
180
  raise ValueError("Operator must be 'AND' or 'OR'")
180
181
 
181
- def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
182
+ def evaluate(self, results: dict[str, Any], cycle_state: "CycleState") -> bool:
182
183
  """Evaluate all conditions with specified operator."""
183
184
  evaluations = [cond.evaluate(results, cycle_state) for cond in self.conditions]
184
185
 
@@ -196,7 +197,7 @@ class CompoundCondition(ConvergenceCondition):
196
197
  class AdaptiveCondition(ConvergenceCondition):
197
198
  """Adaptive convergence that changes based on iteration progress."""
198
199
 
199
- def __init__(self, stages: List[tuple[int, ConvergenceCondition]]):
200
+ def __init__(self, stages: list[tuple[int, ConvergenceCondition]]):
200
201
  """Initialize with stages of conditions.
201
202
 
202
203
  Args:
@@ -205,7 +206,7 @@ class AdaptiveCondition(ConvergenceCondition):
205
206
  """
206
207
  self.stages = sorted(stages, key=lambda x: x[0])
207
208
 
208
- def evaluate(self, results: Dict[str, Any], cycle_state: "CycleState") -> bool:
209
+ def evaluate(self, results: dict[str, Any], cycle_state: "CycleState") -> bool:
209
210
  """Evaluate condition based on current iteration stage."""
210
211
  current_iteration = cycle_state.iteration
211
212
 
@@ -229,7 +230,7 @@ class AdaptiveCondition(ConvergenceCondition):
229
230
 
230
231
 
231
232
  def create_convergence_condition(
232
- spec: Union[str, int, Callable, Dict],
233
+ spec: str | int | Callable | dict,
233
234
  ) -> ConvergenceCondition:
234
235
  """Factory function to create convergence conditions from various specs.
235
236
 
@@ -36,7 +36,7 @@ import json
36
36
  import logging
37
37
  from datetime import datetime
38
38
  from pathlib import Path
39
- from typing import Any, Dict, List, Optional
39
+ from typing import Any
40
40
 
41
41
  from kailash.workflow.cycle_debugger import CycleDebugger, CycleExecutionTrace
42
42
  from kailash.workflow.cycle_profiler import CycleProfiler
@@ -70,7 +70,7 @@ class CycleAnalyzer:
70
70
  analysis_level: str = "standard",
71
71
  enable_profiling: bool = True,
72
72
  enable_debugging: bool = True,
73
- output_directory: Optional[str] = None,
73
+ output_directory: str | None = None,
74
74
  ):
75
75
  """
76
76
  Initialize cycle analyzer.
@@ -84,7 +84,14 @@ class CycleAnalyzer:
84
84
  self.analysis_level = analysis_level
85
85
  self.enable_profiling = enable_profiling
86
86
  self.enable_debugging = enable_debugging
87
- self.output_directory = Path(output_directory) if output_directory else None
87
+
88
+ # Set output directory - use centralized location if not specified
89
+ if output_directory:
90
+ self.output_directory = Path(output_directory)
91
+ else:
92
+ # Use centralized output directory by default
93
+ project_root = Path(__file__).parent.parent.parent.parent
94
+ self.output_directory = project_root / "data" / "outputs" / "cycle_analysis"
88
95
 
89
96
  # Initialize components based on configuration
90
97
  debug_level = {
@@ -106,9 +113,9 @@ class CycleAnalyzer:
106
113
  )
107
114
 
108
115
  # Analysis session tracking
109
- self.current_session: Optional[str] = None
110
- self.session_traces: List[CycleExecutionTrace] = []
111
- self.analysis_history: List[Dict[str, Any]] = []
116
+ self.current_session: str | None = None
117
+ self.session_traces: list[CycleExecutionTrace] = []
118
+ self.analysis_history: list[dict[str, Any]] = []
112
119
 
113
120
  # Create output directory if specified
114
121
  if self.output_directory:
@@ -142,10 +149,10 @@ class CycleAnalyzer:
142
149
  self,
143
150
  cycle_id: str,
144
151
  workflow_id: str,
145
- max_iterations: Optional[int] = None,
146
- timeout: Optional[float] = None,
147
- convergence_condition: Optional[str] = None,
148
- ) -> Optional[CycleExecutionTrace]:
152
+ max_iterations: int | None = None,
153
+ timeout: float | None = None,
154
+ convergence_condition: str | None = None,
155
+ ) -> CycleExecutionTrace | None:
149
156
  """
150
157
  Start analysis for a new cycle execution.
151
158
 
@@ -186,10 +193,10 @@ class CycleAnalyzer:
186
193
  def track_iteration(
187
194
  self,
188
195
  trace: CycleExecutionTrace,
189
- input_data: Dict[str, Any],
190
- output_data: Dict[str, Any],
191
- convergence_value: Optional[float] = None,
192
- node_executions: Optional[List[str]] = None,
196
+ input_data: dict[str, Any],
197
+ output_data: dict[str, Any],
198
+ convergence_value: float | None = None,
199
+ node_executions: list[str] | None = None,
193
200
  ):
194
201
  """
195
202
  Track a single cycle iteration with input/output data.
@@ -227,7 +234,7 @@ class CycleAnalyzer:
227
234
  trace: CycleExecutionTrace,
228
235
  converged: bool,
229
236
  termination_reason: str,
230
- convergence_iteration: Optional[int] = None,
237
+ convergence_iteration: int | None = None,
231
238
  ):
232
239
  """
233
240
  Complete cycle analysis and generate insights.
@@ -269,7 +276,7 @@ class CycleAnalyzer:
269
276
  if self.analysis_level == "comprehensive":
270
277
  self._generate_immediate_insights(trace)
271
278
 
272
- def generate_cycle_report(self, trace: CycleExecutionTrace) -> Dict[str, Any]:
279
+ def generate_cycle_report(self, trace: CycleExecutionTrace) -> dict[str, Any]:
273
280
  """
274
281
  Generate comprehensive report for a single cycle.
275
282
 
@@ -326,9 +333,7 @@ class CycleAnalyzer:
326
333
 
327
334
  return report
328
335
 
329
- def generate_session_report(
330
- self, session_id: Optional[str] = None
331
- ) -> Dict[str, Any]:
336
+ def generate_session_report(self, session_id: str | None = None) -> dict[str, Any]:
332
337
  """
333
338
  Generate comprehensive report for an analysis session.
334
339
 
@@ -407,7 +412,7 @@ class CycleAnalyzer:
407
412
 
408
413
  return report
409
414
 
410
- def get_real_time_metrics(self, trace: CycleExecutionTrace) -> Dict[str, Any]:
415
+ def get_real_time_metrics(self, trace: CycleExecutionTrace) -> dict[str, Any]:
411
416
  """
412
417
  Get real-time metrics for an active cycle.
413
418
 
@@ -480,7 +485,7 @@ class CycleAnalyzer:
480
485
 
481
486
  def export_analysis_data(
482
487
  self,
483
- filepath: Optional[str] = None,
488
+ filepath: str | None = None,
484
489
  format: str = "json",
485
490
  include_traces: bool = True,
486
491
  ):
@@ -598,7 +603,7 @@ class CycleAnalyzer:
598
603
  f"Slow iterations detected for cycle '{trace.cycle_id}' - avg: {stats['avg_iteration_time']:.3f}s"
599
604
  )
600
605
 
601
- def _generate_advanced_analysis(self, trace: CycleExecutionTrace) -> Dict[str, Any]:
606
+ def _generate_advanced_analysis(self, trace: CycleExecutionTrace) -> dict[str, Any]:
602
607
  """Generate advanced analysis insights for comprehensive mode."""
603
608
  convergence_trend = trace.get_convergence_trend()
604
609
 
@@ -650,8 +655,8 @@ class CycleAnalyzer:
650
655
  }
651
656
 
652
657
  def _generate_session_insights(
653
- self, traces: List[CycleExecutionTrace]
654
- ) -> Dict[str, Any]:
658
+ self, traces: list[CycleExecutionTrace]
659
+ ) -> dict[str, Any]:
655
660
  """Generate insights across multiple cycles in a session."""
656
661
  if not traces:
657
662
  return {}
@@ -685,7 +690,7 @@ class CycleAnalyzer:
685
690
  return insights
686
691
 
687
692
  def _calculate_real_time_health_score(
688
- self, trace: CycleExecutionTrace, recent_iterations: List
693
+ self, trace: CycleExecutionTrace, recent_iterations: list
689
694
  ) -> float:
690
695
  """Calculate real-time health score for an active cycle."""
691
696
  score_components = []
@@ -725,8 +730,8 @@ class CycleAnalyzer:
725
730
  )
726
731
 
727
732
  def _generate_real_time_alerts(
728
- self, trace: CycleExecutionTrace, recent_iterations: List
729
- ) -> List[str]:
733
+ self, trace: CycleExecutionTrace, recent_iterations: list
734
+ ) -> list[str]:
730
735
  """Generate real-time alerts for potential issues."""
731
736
  alerts = []
732
737
 
@@ -764,7 +769,7 @@ class CycleAnalyzer:
764
769
 
765
770
  return alerts
766
771
 
767
- def _calculate_convergence_stability(self, values: List[float]) -> float:
772
+ def _calculate_convergence_stability(self, values: list[float]) -> float:
768
773
  """Calculate stability score for convergence values."""
769
774
  if len(values) < 2:
770
775
  return 1.0
@@ -781,7 +786,7 @@ class CycleAnalyzer:
781
786
  # Lower CV means more stable
782
787
  return max(0.0, 1.0 - min(1.0, cv))
783
788
 
784
- def _calculate_skewness(self, data: List[float]) -> float:
789
+ def _calculate_skewness(self, data: list[float]) -> float:
785
790
  """Calculate skewness of data distribution."""
786
791
  if len(data) < 3:
787
792
  return 0.0
@@ -798,7 +803,7 @@ class CycleAnalyzer:
798
803
  skewness = sum((x - mean_val) ** 3 for x in data) / (n * std_dev**3)
799
804
  return skewness
800
805
 
801
- def _analyze_performance_trend(self, iteration_times: List[float]) -> str:
806
+ def _analyze_performance_trend(self, iteration_times: list[float]) -> str:
802
807
  """Analyze performance trend over iterations."""
803
808
  if len(iteration_times) < 3:
804
809
  return "insufficient_data"
@@ -823,7 +828,7 @@ class CycleAnalyzer:
823
828
 
824
829
  def _analyze_resource_efficiency(
825
830
  self, trace: CycleExecutionTrace
826
- ) -> Dict[str, Any]:
831
+ ) -> dict[str, Any]:
827
832
  """Analyze resource usage efficiency."""
828
833
  memory_values = [
829
834
  iter.memory_usage_mb for iter in trace.iterations if iter.memory_usage_mb
@@ -860,7 +865,7 @@ class CycleAnalyzer:
860
865
 
861
866
  return efficiency
862
867
 
863
- def _export_cycle_report(self, report: Dict[str, Any], cycle_id: str):
868
+ def _export_cycle_report(self, report: dict[str, Any], cycle_id: str):
864
869
  """Export cycle report to file."""
865
870
  if not self.output_directory:
866
871
  return
@@ -875,7 +880,7 @@ class CycleAnalyzer:
875
880
 
876
881
  logger.debug(f"Exported cycle report to {filepath}")
877
882
 
878
- def _export_session_report(self, report: Dict[str, Any], session_id: str):
883
+ def _export_session_report(self, report: dict[str, Any], session_id: str):
879
884
  """Export session report to file."""
880
885
  if not self.output_directory:
881
886
  return
@@ -40,7 +40,7 @@ Examples:
40
40
  """
41
41
 
42
42
  import logging
43
- from typing import TYPE_CHECKING, Dict, Optional
43
+ from typing import TYPE_CHECKING
44
44
 
45
45
  from kailash.sdk_exceptions import WorkflowValidationError
46
46
  from kailash.workflow.cycle_exceptions import (
@@ -75,7 +75,7 @@ class CycleBuilder:
75
75
  ... .build()
76
76
  """
77
77
 
78
- def __init__(self, workflow: "Workflow", cycle_id: Optional[str] = None):
78
+ def __init__(self, workflow: "Workflow", cycle_id: str | None = None):
79
79
  """
80
80
  Initialize a new CycleBuilder.
81
81
 
@@ -87,23 +87,23 @@ class CycleBuilder:
87
87
  self._cycle_id = cycle_id
88
88
 
89
89
  # Connection parameters
90
- self._source_node: Optional[str] = None
91
- self._target_node: Optional[str] = None
92
- self._mapping: Optional[Dict[str, str]] = None
90
+ self._source_node: str | None = None
91
+ self._target_node: str | None = None
92
+ self._mapping: dict[str, str] | None = None
93
93
 
94
94
  # Cycle parameters
95
- self._max_iterations: Optional[int] = None
96
- self._convergence_check: Optional[str] = None
97
- self._timeout: Optional[float] = None
98
- self._memory_limit: Optional[int] = None
99
- self._condition: Optional[str] = None
100
- self._parent_cycle: Optional[str] = None
95
+ self._max_iterations: int | None = None
96
+ self._convergence_check: str | None = None
97
+ self._timeout: float | None = None
98
+ self._memory_limit: int | None = None
99
+ self._condition: str | None = None
100
+ self._parent_cycle: str | None = None
101
101
 
102
102
  def connect(
103
103
  self,
104
104
  source_node: str,
105
105
  target_node: str,
106
- mapping: Optional[Dict[str, str]] = None,
106
+ mapping: dict[str, str] | None = None,
107
107
  ) -> "CycleBuilder":
108
108
  """
109
109
  Configure the source and target nodes for the cycle connection.
@@ -93,8 +93,9 @@ See Also:
93
93
  """
94
94
 
95
95
  import logging
96
+ from collections.abc import Callable
96
97
  from dataclasses import dataclass, field
97
- from typing import Any, Callable, Dict, Optional, Union
98
+ from typing import Any
98
99
 
99
100
  from kailash.workflow.cycle_exceptions import CycleConfigurationError
100
101
 
@@ -150,26 +151,26 @@ class CycleConfig:
150
151
  """
151
152
 
152
153
  # Termination conditions (at least one required)
153
- max_iterations: Optional[int] = None
154
- convergence_check: Optional[Union[str, Callable]] = None
155
- timeout: Optional[float] = None
154
+ max_iterations: int | None = None
155
+ convergence_check: str | Callable | None = None
156
+ timeout: float | None = None
156
157
 
157
158
  # Safety and resource limits
158
- memory_limit: Optional[int] = None
159
+ memory_limit: int | None = None
159
160
  iteration_safety_factor: float = 1.5 # Multiplier for max_iterations safety
160
161
 
161
162
  # Cycle metadata and identification
162
- cycle_id: Optional[str] = None
163
- parent_cycle: Optional[str] = None
163
+ cycle_id: str | None = None
164
+ parent_cycle: str | None = None
164
165
  description: str = ""
165
166
 
166
167
  # Execution control and conditions
167
- condition: Optional[str] = None # When to execute the cycle
168
+ condition: str | None = None # When to execute the cycle
168
169
  priority: int = 0 # Execution priority for multiple cycles
169
170
 
170
171
  # Advanced configuration
171
- retry_policy: Dict[str, Any] = field(default_factory=dict)
172
- metadata: Dict[str, Any] = field(default_factory=dict)
172
+ retry_policy: dict[str, Any] = field(default_factory=dict)
173
+ metadata: dict[str, Any] = field(default_factory=dict)
173
174
 
174
175
  def __post_init__(self):
175
176
  """
@@ -378,7 +379,7 @@ class CycleConfig:
378
379
  ],
379
380
  )
380
381
 
381
- def get_effective_max_iterations(self) -> Optional[int]:
382
+ def get_effective_max_iterations(self) -> int | None:
382
383
  """
383
384
  Get the effective maximum iterations with safety factor applied.
384
385
 
@@ -401,7 +402,7 @@ class CycleConfig:
401
402
  return None
402
403
  return int(self.max_iterations * self.iteration_safety_factor)
403
404
 
404
- def to_dict(self) -> Dict[str, Any]:
405
+ def to_dict(self) -> dict[str, Any]:
405
406
  """
406
407
  Convert configuration to dictionary format.
407
408
 
@@ -433,7 +434,7 @@ class CycleConfig:
433
434
  return result
434
435
 
435
436
  @classmethod
436
- def from_dict(cls, data: Dict[str, Any]) -> "CycleConfig":
437
+ def from_dict(cls, data: dict[str, Any]) -> "CycleConfig":
437
438
  """
438
439
  Create configuration from dictionary data.
439
440
 
@@ -508,7 +509,7 @@ class CycleConfig:
508
509
 
509
510
  return CycleConfig(**merged_data)
510
511
 
511
- def create_template(self, template_name: str) -> Dict[str, Any]:
512
+ def create_template(self, template_name: str) -> dict[str, Any]:
512
513
  """
513
514
  Create a reusable template from this configuration.
514
515
 
@@ -600,7 +601,7 @@ class CycleTemplates:
600
601
  def optimization_loop(
601
602
  max_iterations: int = 100,
602
603
  convergence_threshold: float = 0.01,
603
- timeout: Optional[float] = None,
604
+ timeout: float | None = None,
604
605
  ) -> CycleConfig:
605
606
  """
606
607
  Create configuration for optimization cycles.