kailash 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/access_control.py +40 -39
- kailash/api/auth.py +26 -32
- kailash/api/custom_nodes.py +29 -29
- kailash/api/custom_nodes_secure.py +35 -35
- kailash/api/database.py +17 -17
- kailash/api/gateway.py +19 -19
- kailash/api/mcp_integration.py +24 -23
- kailash/api/studio.py +45 -45
- kailash/api/workflow_api.py +8 -8
- kailash/cli/commands.py +5 -8
- kailash/manifest.py +42 -42
- kailash/mcp/__init__.py +1 -1
- kailash/mcp/ai_registry_server.py +20 -20
- kailash/mcp/client.py +9 -11
- kailash/mcp/client_new.py +10 -10
- kailash/mcp/server.py +1 -2
- kailash/mcp/server_enhanced.py +449 -0
- kailash/mcp/servers/ai_registry.py +6 -6
- kailash/mcp/utils/__init__.py +31 -0
- kailash/mcp/utils/cache.py +267 -0
- kailash/mcp/utils/config.py +263 -0
- kailash/mcp/utils/formatters.py +293 -0
- kailash/mcp/utils/metrics.py +418 -0
- kailash/nodes/ai/agents.py +9 -9
- kailash/nodes/ai/ai_providers.py +33 -34
- kailash/nodes/ai/embedding_generator.py +31 -32
- kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
- kailash/nodes/ai/iterative_llm_agent.py +48 -48
- kailash/nodes/ai/llm_agent.py +32 -33
- kailash/nodes/ai/models.py +13 -13
- kailash/nodes/ai/self_organizing.py +44 -44
- kailash/nodes/api/__init__.py +5 -0
- kailash/nodes/api/auth.py +11 -11
- kailash/nodes/api/graphql.py +13 -13
- kailash/nodes/api/http.py +19 -19
- kailash/nodes/api/monitoring.py +463 -0
- kailash/nodes/api/rate_limiting.py +9 -13
- kailash/nodes/api/rest.py +29 -29
- kailash/nodes/api/security.py +819 -0
- kailash/nodes/base.py +24 -26
- kailash/nodes/base_async.py +7 -7
- kailash/nodes/base_cycle_aware.py +12 -12
- kailash/nodes/base_with_acl.py +5 -5
- kailash/nodes/code/python.py +56 -55
- kailash/nodes/data/__init__.py +6 -0
- kailash/nodes/data/directory.py +6 -6
- kailash/nodes/data/event_generation.py +297 -0
- kailash/nodes/data/file_discovery.py +598 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/retrieval.py +10 -10
- kailash/nodes/data/sharepoint_graph.py +17 -17
- kailash/nodes/data/sources.py +5 -5
- kailash/nodes/data/sql.py +13 -13
- kailash/nodes/data/streaming.py +25 -25
- kailash/nodes/data/vector_db.py +22 -22
- kailash/nodes/data/writers.py +7 -7
- kailash/nodes/logic/async_operations.py +17 -17
- kailash/nodes/logic/convergence.py +11 -11
- kailash/nodes/logic/loop.py +4 -4
- kailash/nodes/logic/operations.py +11 -11
- kailash/nodes/logic/workflow.py +8 -9
- kailash/nodes/mixins/mcp.py +17 -17
- kailash/nodes/mixins.py +8 -10
- kailash/nodes/transform/chunkers.py +3 -3
- kailash/nodes/transform/formatters.py +7 -7
- kailash/nodes/transform/processors.py +11 -11
- kailash/runtime/access_controlled.py +18 -18
- kailash/runtime/async_local.py +18 -20
- kailash/runtime/docker.py +24 -26
- kailash/runtime/local.py +55 -31
- kailash/runtime/parallel.py +25 -25
- kailash/runtime/parallel_cyclic.py +29 -29
- kailash/runtime/runner.py +6 -6
- kailash/runtime/testing.py +22 -22
- kailash/sdk_exceptions.py +0 -58
- kailash/security.py +14 -26
- kailash/tracking/manager.py +38 -38
- kailash/tracking/metrics_collector.py +15 -14
- kailash/tracking/models.py +53 -53
- kailash/tracking/storage/base.py +7 -17
- kailash/tracking/storage/database.py +22 -23
- kailash/tracking/storage/filesystem.py +38 -40
- kailash/utils/export.py +21 -21
- kailash/utils/templates.py +8 -9
- kailash/visualization/api.py +30 -34
- kailash/visualization/dashboard.py +17 -17
- kailash/visualization/performance.py +32 -19
- kailash/visualization/reports.py +30 -28
- kailash/workflow/builder.py +8 -8
- kailash/workflow/convergence.py +13 -12
- kailash/workflow/cycle_analyzer.py +38 -33
- kailash/workflow/cycle_builder.py +12 -12
- kailash/workflow/cycle_config.py +16 -15
- kailash/workflow/cycle_debugger.py +40 -40
- kailash/workflow/cycle_exceptions.py +29 -29
- kailash/workflow/cycle_profiler.py +21 -21
- kailash/workflow/cycle_state.py +20 -22
- kailash/workflow/cyclic_runner.py +45 -45
- kailash/workflow/graph.py +57 -45
- kailash/workflow/mermaid_visualizer.py +9 -11
- kailash/workflow/migration.py +22 -22
- kailash/workflow/mock_registry.py +6 -6
- kailash/workflow/runner.py +9 -9
- kailash/workflow/safety.py +12 -13
- kailash/workflow/state.py +8 -11
- kailash/workflow/templates.py +19 -19
- kailash/workflow/validation.py +14 -14
- kailash/workflow/visualization.py +32 -24
- kailash-0.3.1.dist-info/METADATA +476 -0
- kailash-0.3.1.dist-info/RECORD +136 -0
- kailash-0.2.2.dist-info/METADATA +0 -121
- kailash-0.2.2.dist-info/RECORD +0 -126
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
kailash/workflow/migration.py
CHANGED
@@ -103,7 +103,7 @@ See Also:
|
|
103
103
|
import re
|
104
104
|
from collections import defaultdict
|
105
105
|
from dataclasses import dataclass
|
106
|
-
from typing import Any
|
106
|
+
from typing import Any
|
107
107
|
|
108
108
|
from . import Workflow
|
109
109
|
from .templates import CycleTemplates
|
@@ -113,11 +113,11 @@ from .templates import CycleTemplates
|
|
113
113
|
class CyclificationOpportunity:
|
114
114
|
"""Represents an opportunity to convert a DAG pattern to a cycle."""
|
115
115
|
|
116
|
-
nodes:
|
116
|
+
nodes: list[str]
|
117
117
|
pattern_type: str
|
118
118
|
confidence: float
|
119
119
|
description: str
|
120
|
-
suggested_convergence:
|
120
|
+
suggested_convergence: str | None = None
|
121
121
|
estimated_benefit: str = "unknown"
|
122
122
|
implementation_complexity: str = "medium"
|
123
123
|
|
@@ -127,10 +127,10 @@ class CyclificationSuggestion:
|
|
127
127
|
"""Detailed suggestion for converting nodes to a cycle."""
|
128
128
|
|
129
129
|
opportunity: CyclificationOpportunity
|
130
|
-
implementation_steps:
|
130
|
+
implementation_steps: list[str]
|
131
131
|
code_example: str
|
132
132
|
expected_outcome: str
|
133
|
-
risks:
|
133
|
+
risks: list[str]
|
134
134
|
|
135
135
|
|
136
136
|
class DAGToCycleConverter:
|
@@ -150,9 +150,9 @@ class DAGToCycleConverter:
|
|
150
150
|
"""
|
151
151
|
self.workflow = workflow
|
152
152
|
self.graph = workflow.graph
|
153
|
-
self.opportunities:
|
153
|
+
self.opportunities: list[CyclificationOpportunity] = []
|
154
154
|
|
155
|
-
def analyze_cyclification_opportunities(self) ->
|
155
|
+
def analyze_cyclification_opportunities(self) -> list[CyclificationOpportunity]:
|
156
156
|
"""
|
157
157
|
Analyze workflow for patterns that could benefit from cyclification.
|
158
158
|
|
@@ -322,7 +322,7 @@ class DAGToCycleConverter:
|
|
322
322
|
)
|
323
323
|
self.opportunities.append(opportunity)
|
324
324
|
|
325
|
-
def _find_related_nodes(self, node_id: str) ->
|
325
|
+
def _find_related_nodes(self, node_id: str) -> list[str]:
|
326
326
|
"""Find nodes that are closely related to the given node."""
|
327
327
|
related = []
|
328
328
|
|
@@ -343,7 +343,7 @@ class DAGToCycleConverter:
|
|
343
343
|
# Check if there's an edge between the nodes in either direction
|
344
344
|
return graph.has_edge(node1, node2) or graph.has_edge(node2, node1)
|
345
345
|
|
346
|
-
def generate_detailed_suggestions(self) ->
|
346
|
+
def generate_detailed_suggestions(self) -> list[CyclificationSuggestion]:
|
347
347
|
"""
|
348
348
|
Generate detailed suggestions with implementation guidance.
|
349
349
|
|
@@ -602,9 +602,9 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
602
602
|
|
603
603
|
def convert_to_cycle(
|
604
604
|
self,
|
605
|
-
nodes:
|
605
|
+
nodes: list[str],
|
606
606
|
convergence_strategy: str = "error_reduction",
|
607
|
-
cycle_type:
|
607
|
+
cycle_type: str | None = None,
|
608
608
|
**kwargs,
|
609
609
|
) -> str:
|
610
610
|
"""
|
@@ -643,7 +643,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
643
643
|
else:
|
644
644
|
raise ValueError(f"Unknown cycle type: {cycle_type}")
|
645
645
|
|
646
|
-
def _detect_cycle_type(self, nodes:
|
646
|
+
def _detect_cycle_type(self, nodes: list[str], strategy: str) -> str:
|
647
647
|
"""Detect the most appropriate cycle type for given nodes and strategy."""
|
648
648
|
if strategy == "error_reduction" or strategy == "quality_improvement":
|
649
649
|
return "optimization"
|
@@ -659,7 +659,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
659
659
|
# Default to optimization for unknown strategies
|
660
660
|
return "optimization"
|
661
661
|
|
662
|
-
def _convert_to_optimization_cycle(self, nodes:
|
662
|
+
def _convert_to_optimization_cycle(self, nodes: list[str], **kwargs) -> str:
|
663
663
|
"""Convert nodes to optimization cycle."""
|
664
664
|
if len(nodes) < 2:
|
665
665
|
raise ValueError("Optimization cycle requires at least 2 nodes")
|
@@ -668,14 +668,14 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
668
668
|
self.workflow, processor_node=nodes[0], evaluator_node=nodes[1], **kwargs
|
669
669
|
)
|
670
670
|
|
671
|
-
def _convert_to_retry_cycle(self, nodes:
|
671
|
+
def _convert_to_retry_cycle(self, nodes: list[str], **kwargs) -> str:
|
672
672
|
"""Convert nodes to retry cycle."""
|
673
673
|
if len(nodes) < 1:
|
674
674
|
raise ValueError("Retry cycle requires at least 1 node")
|
675
675
|
|
676
676
|
return CycleTemplates.retry_cycle(self.workflow, target_node=nodes[0], **kwargs)
|
677
677
|
|
678
|
-
def _convert_to_data_quality_cycle(self, nodes:
|
678
|
+
def _convert_to_data_quality_cycle(self, nodes: list[str], **kwargs) -> str:
|
679
679
|
"""Convert nodes to data quality cycle."""
|
680
680
|
if len(nodes) < 2:
|
681
681
|
raise ValueError("Data quality cycle requires at least 2 nodes")
|
@@ -684,7 +684,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
684
684
|
self.workflow, cleaner_node=nodes[0], validator_node=nodes[1], **kwargs
|
685
685
|
)
|
686
686
|
|
687
|
-
def _convert_to_batch_processing_cycle(self, nodes:
|
687
|
+
def _convert_to_batch_processing_cycle(self, nodes: list[str], **kwargs) -> str:
|
688
688
|
"""Convert nodes to batch processing cycle."""
|
689
689
|
if len(nodes) < 1:
|
690
690
|
raise ValueError("Batch processing cycle requires at least 1 node")
|
@@ -693,7 +693,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
693
693
|
self.workflow, processor_node=nodes[0], **kwargs
|
694
694
|
)
|
695
695
|
|
696
|
-
def _convert_to_convergence_cycle(self, nodes:
|
696
|
+
def _convert_to_convergence_cycle(self, nodes: list[str], **kwargs) -> str:
|
697
697
|
"""Convert nodes to convergence cycle."""
|
698
698
|
if len(nodes) < 1:
|
699
699
|
raise ValueError("Convergence cycle requires at least 1 node")
|
@@ -702,7 +702,7 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
702
702
|
self.workflow, processor_node=nodes[0], **kwargs
|
703
703
|
)
|
704
704
|
|
705
|
-
def generate_migration_report(self) ->
|
705
|
+
def generate_migration_report(self) -> dict[str, Any]:
|
706
706
|
"""
|
707
707
|
Generate comprehensive migration report with analysis and recommendations.
|
708
708
|
|
@@ -745,8 +745,8 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
745
745
|
}
|
746
746
|
|
747
747
|
def _generate_migration_recommendations(
|
748
|
-
self, opportunities:
|
749
|
-
) ->
|
748
|
+
self, opportunities: list[CyclificationOpportunity]
|
749
|
+
) -> list[str]:
|
750
750
|
"""Generate high-level recommendations for migration."""
|
751
751
|
recommendations = []
|
752
752
|
|
@@ -779,8 +779,8 @@ print(f"Created convergence cycle: {{cycle_id}}")
|
|
779
779
|
return recommendations
|
780
780
|
|
781
781
|
def _suggest_implementation_order(
|
782
|
-
self, opportunities:
|
783
|
-
) ->
|
782
|
+
self, opportunities: list[CyclificationOpportunity]
|
783
|
+
) -> list[dict[str, Any]]:
|
784
784
|
"""Suggest order for implementing cyclification opportunities."""
|
785
785
|
# Sort by: confidence desc, complexity asc (low=1, medium=2, high=3)
|
786
786
|
complexity_score = {"low": 1, "medium": 2, "high": 3}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"""Mock node registry for tests."""
|
2
2
|
|
3
|
-
from typing import Any
|
3
|
+
from typing import Any
|
4
4
|
|
5
5
|
from kailash.nodes.base import Node, NodeRegistry
|
6
6
|
from kailash.sdk_exceptions import NodeConfigurationError
|
@@ -15,15 +15,15 @@ class MockNode(Node):
|
|
15
15
|
self.name = name or node_id
|
16
16
|
self.config = kwargs.copy()
|
17
17
|
|
18
|
-
def process(self, data:
|
18
|
+
def process(self, data: dict[str, Any]) -> dict[str, Any]:
|
19
19
|
"""Process data."""
|
20
20
|
return {"value": data.get("value", 0) * 2}
|
21
21
|
|
22
|
-
def execute(self, **kwargs) ->
|
22
|
+
def execute(self, **kwargs) -> dict[str, Any]:
|
23
23
|
"""Execute node with keyword arguments."""
|
24
24
|
return self.process(kwargs)
|
25
25
|
|
26
|
-
def get_parameters(self) ->
|
26
|
+
def get_parameters(self) -> dict[str, Any]:
|
27
27
|
"""Get node parameters."""
|
28
28
|
return {}
|
29
29
|
|
@@ -50,10 +50,10 @@ for node_type in NODE_TYPES:
|
|
50
50
|
class MockRegistry:
|
51
51
|
"""Mock node registry for testing."""
|
52
52
|
|
53
|
-
_registry:
|
53
|
+
_registry: dict[str, type[Node]] = {node_type: MockNode for node_type in NODE_TYPES}
|
54
54
|
|
55
55
|
@classmethod
|
56
|
-
def get(cls, node_type: str) ->
|
56
|
+
def get(cls, node_type: str) -> type[Node]:
|
57
57
|
"""Get node class by type name."""
|
58
58
|
if node_type not in cls._registry:
|
59
59
|
raise NodeConfigurationError(
|
kailash/workflow/runner.py
CHANGED
@@ -5,7 +5,7 @@ allowing for complex multi-stage processing pipelines.
|
|
5
5
|
"""
|
6
6
|
|
7
7
|
import logging
|
8
|
-
from typing import Any
|
8
|
+
from typing import Any
|
9
9
|
|
10
10
|
from pydantic import BaseModel
|
11
11
|
|
@@ -23,8 +23,8 @@ class WorkflowConnection:
|
|
23
23
|
self,
|
24
24
|
source_workflow_id: str,
|
25
25
|
target_workflow_id: str,
|
26
|
-
condition:
|
27
|
-
state_mapping:
|
26
|
+
condition: dict[str, Any] | None = None,
|
27
|
+
state_mapping: dict[str, str] | None = None,
|
28
28
|
):
|
29
29
|
"""Initialize a workflow connection.
|
30
30
|
|
@@ -88,7 +88,7 @@ class WorkflowConnection:
|
|
88
88
|
)
|
89
89
|
return True
|
90
90
|
|
91
|
-
def map_state(self, state: BaseModel) ->
|
91
|
+
def map_state(self, state: BaseModel) -> dict[str, Any]:
|
92
92
|
"""Map state fields according to the mapping configuration.
|
93
93
|
|
94
94
|
Args:
|
@@ -142,8 +142,8 @@ class WorkflowRunner:
|
|
142
142
|
self,
|
143
143
|
source_workflow_id: str,
|
144
144
|
target_workflow_id: str,
|
145
|
-
condition:
|
146
|
-
state_mapping:
|
145
|
+
condition: dict[str, Any] | None = None,
|
146
|
+
state_mapping: dict[str, str] | None = None,
|
147
147
|
) -> None:
|
148
148
|
"""Connect two workflows.
|
149
149
|
|
@@ -182,7 +182,7 @@ class WorkflowRunner:
|
|
182
182
|
|
183
183
|
def get_next_workflows(
|
184
184
|
self, current_workflow_id: str, state: BaseModel
|
185
|
-
) ->
|
185
|
+
) -> list[tuple[str, dict[str, Any]]]:
|
186
186
|
"""Get the next workflows to execute based on current state.
|
187
187
|
|
188
188
|
Args:
|
@@ -206,9 +206,9 @@ class WorkflowRunner:
|
|
206
206
|
self,
|
207
207
|
entry_workflow_id: str,
|
208
208
|
initial_state: BaseModel,
|
209
|
-
task_manager:
|
209
|
+
task_manager: TaskManager | None = None,
|
210
210
|
max_steps: int = 10, # Prevent infinite loops
|
211
|
-
) ->
|
211
|
+
) -> tuple[BaseModel, dict[str, dict[str, Any]]]:
|
212
212
|
"""Execute a sequence of connected workflows.
|
213
213
|
|
214
214
|
Args:
|
kailash/workflow/safety.py
CHANGED
@@ -4,7 +4,6 @@ import logging
|
|
4
4
|
import threading
|
5
5
|
import time
|
6
6
|
from contextlib import contextmanager
|
7
|
-
from typing import Dict, Optional, Set
|
8
7
|
|
9
8
|
import psutil
|
10
9
|
|
@@ -18,13 +17,13 @@ class CycleSafetyManager:
|
|
18
17
|
|
19
18
|
def __init__(self):
|
20
19
|
"""Initialize cycle safety manager."""
|
21
|
-
self.active_cycles:
|
20
|
+
self.active_cycles: dict[str, "CycleMonitor"] = {}
|
22
21
|
self.global_memory_limit = None # MB
|
23
22
|
self.global_timeout = None # seconds
|
24
23
|
self._lock = threading.Lock()
|
25
24
|
|
26
25
|
def set_global_limits(
|
27
|
-
self, memory_limit:
|
26
|
+
self, memory_limit: int | None = None, timeout: float | None = None
|
28
27
|
) -> None:
|
29
28
|
"""Set global resource limits.
|
30
29
|
|
@@ -38,9 +37,9 @@ class CycleSafetyManager:
|
|
38
37
|
def start_monitoring(
|
39
38
|
self,
|
40
39
|
cycle_id: str,
|
41
|
-
max_iterations:
|
42
|
-
timeout:
|
43
|
-
memory_limit:
|
40
|
+
max_iterations: int | None = None,
|
41
|
+
timeout: float | None = None,
|
42
|
+
memory_limit: int | None = None,
|
44
43
|
) -> "CycleMonitor":
|
45
44
|
"""Start monitoring a cycle.
|
46
45
|
|
@@ -88,7 +87,7 @@ class CycleSafetyManager:
|
|
88
87
|
monitor.stop()
|
89
88
|
del self.active_cycles[cycle_id]
|
90
89
|
|
91
|
-
def check_all_cycles(self) ->
|
90
|
+
def check_all_cycles(self) -> dict[str, bool]:
|
92
91
|
"""Check all active cycles for violations.
|
93
92
|
|
94
93
|
Returns:
|
@@ -102,7 +101,7 @@ class CycleSafetyManager:
|
|
102
101
|
|
103
102
|
return violations
|
104
103
|
|
105
|
-
def get_cycle_status(self, cycle_id: str) ->
|
104
|
+
def get_cycle_status(self, cycle_id: str) -> dict[str, any] | None:
|
106
105
|
"""Get status of a specific cycle.
|
107
106
|
|
108
107
|
Args:
|
@@ -116,7 +115,7 @@ class CycleSafetyManager:
|
|
116
115
|
return self.active_cycles[cycle_id].get_status()
|
117
116
|
return None
|
118
117
|
|
119
|
-
def detect_deadlocks(self) ->
|
118
|
+
def detect_deadlocks(self) -> set[str]:
|
120
119
|
"""Detect potential deadlocks in active cycles.
|
121
120
|
|
122
121
|
Returns:
|
@@ -138,9 +137,9 @@ class CycleMonitor:
|
|
138
137
|
def __init__(
|
139
138
|
self,
|
140
139
|
cycle_id: str,
|
141
|
-
max_iterations:
|
142
|
-
timeout:
|
143
|
-
memory_limit:
|
140
|
+
max_iterations: int | None = None,
|
141
|
+
timeout: float | None = None,
|
142
|
+
memory_limit: int | None = None,
|
144
143
|
):
|
145
144
|
"""Initialize cycle monitor.
|
146
145
|
|
@@ -256,7 +255,7 @@ class CycleMonitor:
|
|
256
255
|
time_since_progress = time.time() - self.last_progress_time
|
257
256
|
return time_since_progress > stall_threshold
|
258
257
|
|
259
|
-
def get_status(self) ->
|
258
|
+
def get_status(self) -> dict[str, any]:
|
260
259
|
"""Get current monitor status.
|
261
260
|
|
262
261
|
Returns:
|
kailash/workflow/state.py
CHANGED
@@ -6,7 +6,7 @@ making it easier to handle state transitions in a predictable manner.
|
|
6
6
|
|
7
7
|
import logging
|
8
8
|
from copy import deepcopy
|
9
|
-
from typing import Any, Generic,
|
9
|
+
from typing import Any, Generic, TypeVar
|
10
10
|
|
11
11
|
from pydantic import BaseModel
|
12
12
|
|
@@ -24,7 +24,7 @@ class StateManager:
|
|
24
24
|
"""
|
25
25
|
|
26
26
|
@staticmethod
|
27
|
-
def update_in(state_obj: BaseModel, path:
|
27
|
+
def update_in(state_obj: BaseModel, path: list[str], value: Any) -> BaseModel:
|
28
28
|
"""Update a nested property in the state and return a new state object.
|
29
29
|
|
30
30
|
Args:
|
@@ -61,10 +61,7 @@ class StateManager:
|
|
61
61
|
if isinstance(next_obj, BaseModel):
|
62
62
|
next_obj = next_obj.model_copy(deep=True)
|
63
63
|
setattr(current, key, next_obj)
|
64
|
-
elif isinstance(next_obj, dict):
|
65
|
-
next_obj = deepcopy(next_obj)
|
66
|
-
setattr(current, key, next_obj)
|
67
|
-
elif isinstance(next_obj, list):
|
64
|
+
elif isinstance(next_obj, dict) or isinstance(next_obj, list):
|
68
65
|
next_obj = deepcopy(next_obj)
|
69
66
|
setattr(current, key, next_obj)
|
70
67
|
|
@@ -80,7 +77,7 @@ class StateManager:
|
|
80
77
|
|
81
78
|
@staticmethod
|
82
79
|
def batch_update(
|
83
|
-
state_obj: BaseModel, updates:
|
80
|
+
state_obj: BaseModel, updates: list[tuple[list[str], Any]]
|
84
81
|
) -> BaseModel:
|
85
82
|
"""Apply multiple updates to the state atomically.
|
86
83
|
|
@@ -108,7 +105,7 @@ class StateManager:
|
|
108
105
|
return new_state
|
109
106
|
|
110
107
|
@staticmethod
|
111
|
-
def get_in(state_obj: BaseModel, path:
|
108
|
+
def get_in(state_obj: BaseModel, path: list[str]) -> Any:
|
112
109
|
"""Get the value at a nested path.
|
113
110
|
|
114
111
|
Args:
|
@@ -175,7 +172,7 @@ class WorkflowStateWrapper(Generic[StateT]):
|
|
175
172
|
"""
|
176
173
|
self._state = state
|
177
174
|
|
178
|
-
def update_in(self, path:
|
175
|
+
def update_in(self, path: list[str], value: Any) -> "WorkflowStateWrapper[StateT]":
|
179
176
|
"""Update state at path and return new wrapper.
|
180
177
|
|
181
178
|
Args:
|
@@ -189,7 +186,7 @@ class WorkflowStateWrapper(Generic[StateT]):
|
|
189
186
|
return WorkflowStateWrapper(new_state)
|
190
187
|
|
191
188
|
def batch_update(
|
192
|
-
self, updates:
|
189
|
+
self, updates: list[tuple[list[str], Any]]
|
193
190
|
) -> "WorkflowStateWrapper[StateT]":
|
194
191
|
"""Apply multiple updates to the state atomically.
|
195
192
|
|
@@ -202,7 +199,7 @@ class WorkflowStateWrapper(Generic[StateT]):
|
|
202
199
|
new_state = StateManager.batch_update(self._state, updates)
|
203
200
|
return WorkflowStateWrapper(new_state)
|
204
201
|
|
205
|
-
def get_in(self, path:
|
202
|
+
def get_in(self, path: list[str]) -> Any:
|
206
203
|
"""Get the value at a nested path.
|
207
204
|
|
208
205
|
Args:
|
kailash/workflow/templates.py
CHANGED
@@ -113,7 +113,7 @@ See Also:
|
|
113
113
|
import math
|
114
114
|
import time
|
115
115
|
from dataclasses import dataclass
|
116
|
-
from typing import Any
|
116
|
+
from typing import Any
|
117
117
|
|
118
118
|
from ..nodes.code import PythonCodeNode
|
119
119
|
from . import Workflow
|
@@ -125,11 +125,11 @@ class CycleTemplate:
|
|
125
125
|
|
126
126
|
name: str
|
127
127
|
description: str
|
128
|
-
nodes:
|
129
|
-
convergence_condition:
|
128
|
+
nodes: list[str]
|
129
|
+
convergence_condition: str | None = None
|
130
130
|
max_iterations: int = 100
|
131
|
-
timeout:
|
132
|
-
parameters:
|
131
|
+
timeout: float | None = None
|
132
|
+
parameters: dict[str, Any] | None = None
|
133
133
|
|
134
134
|
|
135
135
|
class CycleTemplates:
|
@@ -142,7 +142,7 @@ class CycleTemplates:
|
|
142
142
|
evaluator_node: str,
|
143
143
|
convergence: str = "quality > 0.9",
|
144
144
|
max_iterations: int = 50,
|
145
|
-
cycle_id:
|
145
|
+
cycle_id: str | None = None,
|
146
146
|
) -> str:
|
147
147
|
"""
|
148
148
|
Add an optimization cycle pattern to workflow.
|
@@ -195,7 +195,7 @@ class CycleTemplates:
|
|
195
195
|
max_retries: int = 3,
|
196
196
|
backoff_strategy: str = "exponential",
|
197
197
|
success_condition: str = "success == True",
|
198
|
-
cycle_id:
|
198
|
+
cycle_id: str | None = None,
|
199
199
|
) -> str:
|
200
200
|
"""
|
201
201
|
Add a retry cycle pattern to workflow.
|
@@ -300,7 +300,7 @@ result = {{
|
|
300
300
|
validator_node: str,
|
301
301
|
quality_threshold: float = 0.95,
|
302
302
|
max_iterations: int = 10,
|
303
|
-
cycle_id:
|
303
|
+
cycle_id: str | None = None,
|
304
304
|
) -> str:
|
305
305
|
"""
|
306
306
|
Add a data quality improvement cycle to workflow.
|
@@ -353,7 +353,7 @@ result = {{
|
|
353
353
|
target_accuracy: float = 0.95,
|
354
354
|
max_epochs: int = 100,
|
355
355
|
early_stopping_patience: int = 10,
|
356
|
-
cycle_id:
|
356
|
+
cycle_id: str | None = None,
|
357
357
|
) -> str:
|
358
358
|
"""
|
359
359
|
Add a machine learning training cycle to workflow.
|
@@ -462,7 +462,7 @@ result = {{
|
|
462
462
|
processor_node: str,
|
463
463
|
tolerance: float = 0.001,
|
464
464
|
max_iterations: int = 1000,
|
465
|
-
cycle_id:
|
465
|
+
cycle_id: str | None = None,
|
466
466
|
) -> str:
|
467
467
|
"""
|
468
468
|
Add a numerical convergence cycle to workflow.
|
@@ -560,8 +560,8 @@ previous_value = current_value
|
|
560
560
|
workflow: Workflow,
|
561
561
|
processor_node: str,
|
562
562
|
batch_size: int = 100,
|
563
|
-
total_items:
|
564
|
-
cycle_id:
|
563
|
+
total_items: int | None = None,
|
564
|
+
cycle_id: str | None = None,
|
565
565
|
) -> str:
|
566
566
|
"""
|
567
567
|
Add a batch processing cycle to workflow.
|
@@ -671,7 +671,7 @@ def add_optimization_cycle(
|
|
671
671
|
evaluator_node: str,
|
672
672
|
convergence: str = "quality > 0.9",
|
673
673
|
max_iterations: int = 50,
|
674
|
-
cycle_id:
|
674
|
+
cycle_id: str | None = None,
|
675
675
|
) -> str:
|
676
676
|
"""Add an optimization cycle pattern to this workflow."""
|
677
677
|
return CycleTemplates.optimization_cycle(
|
@@ -685,7 +685,7 @@ def add_retry_cycle(
|
|
685
685
|
max_retries: int = 3,
|
686
686
|
backoff_strategy: str = "exponential",
|
687
687
|
success_condition: str = "success == True",
|
688
|
-
cycle_id:
|
688
|
+
cycle_id: str | None = None,
|
689
689
|
) -> str:
|
690
690
|
"""Add a retry cycle pattern to this workflow."""
|
691
691
|
return CycleTemplates.retry_cycle(
|
@@ -699,7 +699,7 @@ def add_data_quality_cycle(
|
|
699
699
|
validator_node: str,
|
700
700
|
quality_threshold: float = 0.95,
|
701
701
|
max_iterations: int = 10,
|
702
|
-
cycle_id:
|
702
|
+
cycle_id: str | None = None,
|
703
703
|
) -> str:
|
704
704
|
"""Add a data quality improvement cycle to this workflow."""
|
705
705
|
return CycleTemplates.data_quality_cycle(
|
@@ -714,7 +714,7 @@ def add_learning_cycle(
|
|
714
714
|
target_accuracy: float = 0.95,
|
715
715
|
max_epochs: int = 100,
|
716
716
|
early_stopping_patience: int = 10,
|
717
|
-
cycle_id:
|
717
|
+
cycle_id: str | None = None,
|
718
718
|
) -> str:
|
719
719
|
"""Add a machine learning training cycle to this workflow."""
|
720
720
|
return CycleTemplates.learning_cycle(
|
@@ -733,7 +733,7 @@ def add_convergence_cycle(
|
|
733
733
|
processor_node: str,
|
734
734
|
tolerance: float = 0.001,
|
735
735
|
max_iterations: int = 1000,
|
736
|
-
cycle_id:
|
736
|
+
cycle_id: str | None = None,
|
737
737
|
) -> str:
|
738
738
|
"""Add a numerical convergence cycle to this workflow."""
|
739
739
|
return CycleTemplates.convergence_cycle(
|
@@ -745,8 +745,8 @@ def add_batch_processing_cycle(
|
|
745
745
|
self,
|
746
746
|
processor_node: str,
|
747
747
|
batch_size: int = 100,
|
748
|
-
total_items:
|
749
|
-
cycle_id:
|
748
|
+
total_items: int | None = None,
|
749
|
+
cycle_id: str | None = None,
|
750
750
|
) -> str:
|
751
751
|
"""Add a batch processing cycle to this workflow."""
|
752
752
|
return CycleTemplates.batch_processing_cycle(
|
kailash/workflow/validation.py
CHANGED
@@ -116,7 +116,7 @@ See Also:
|
|
116
116
|
import re
|
117
117
|
from dataclasses import dataclass
|
118
118
|
from enum import Enum
|
119
|
-
from typing import Any
|
119
|
+
from typing import Any
|
120
120
|
|
121
121
|
from . import Workflow
|
122
122
|
|
@@ -137,10 +137,10 @@ class ValidationIssue:
|
|
137
137
|
category: str
|
138
138
|
code: str
|
139
139
|
message: str
|
140
|
-
node_id:
|
141
|
-
cycle_id:
|
142
|
-
suggestion:
|
143
|
-
documentation_link:
|
140
|
+
node_id: str | None = None
|
141
|
+
cycle_id: str | None = None
|
142
|
+
suggestion: str | None = None
|
143
|
+
documentation_link: str | None = None
|
144
144
|
|
145
145
|
|
146
146
|
class CycleLinter:
|
@@ -160,9 +160,9 @@ class CycleLinter:
|
|
160
160
|
"""
|
161
161
|
self.workflow = workflow
|
162
162
|
self.graph = workflow.graph
|
163
|
-
self.issues:
|
163
|
+
self.issues: list[ValidationIssue] = []
|
164
164
|
|
165
|
-
def check_all(self) ->
|
165
|
+
def check_all(self) -> list[ValidationIssue]:
|
166
166
|
"""
|
167
167
|
Run all validation checks on the workflow.
|
168
168
|
|
@@ -536,7 +536,7 @@ class CycleLinter:
|
|
536
536
|
)
|
537
537
|
)
|
538
538
|
|
539
|
-
def _get_cycle_id(self, cycle_nodes:
|
539
|
+
def _get_cycle_id(self, cycle_nodes: list[str]) -> str:
|
540
540
|
"""Generate a cycle identifier from cycle nodes."""
|
541
541
|
return f"cycle_{'-'.join(sorted(cycle_nodes))}"
|
542
542
|
|
@@ -658,23 +658,23 @@ class CycleLinter:
|
|
658
658
|
node_id_lower = node_id.lower()
|
659
659
|
return any(keyword in node_id_lower for keyword in file_keywords)
|
660
660
|
|
661
|
-
def get_issues_by_severity(self, severity: IssueSeverity) ->
|
661
|
+
def get_issues_by_severity(self, severity: IssueSeverity) -> list[ValidationIssue]:
|
662
662
|
"""Get all issues of a specific severity level."""
|
663
663
|
return [issue for issue in self.issues if issue.severity == severity]
|
664
664
|
|
665
|
-
def get_issues_by_category(self, category: str) ->
|
665
|
+
def get_issues_by_category(self, category: str) -> list[ValidationIssue]:
|
666
666
|
"""Get all issues of a specific category."""
|
667
667
|
return [issue for issue in self.issues if issue.category == category]
|
668
668
|
|
669
|
-
def get_issues_for_cycle(self, cycle_id: str) ->
|
669
|
+
def get_issues_for_cycle(self, cycle_id: str) -> list[ValidationIssue]:
|
670
670
|
"""Get all issues for a specific cycle."""
|
671
671
|
return [issue for issue in self.issues if issue.cycle_id == cycle_id]
|
672
672
|
|
673
|
-
def get_issues_for_node(self, node_id: str) ->
|
673
|
+
def get_issues_for_node(self, node_id: str) -> list[ValidationIssue]:
|
674
674
|
"""Get all issues for a specific node."""
|
675
675
|
return [issue for issue in self.issues if issue.node_id == node_id]
|
676
676
|
|
677
|
-
def generate_report(self) ->
|
677
|
+
def generate_report(self) -> dict[str, Any]:
|
678
678
|
"""
|
679
679
|
Generate comprehensive validation report.
|
680
680
|
|
@@ -724,7 +724,7 @@ class CycleLinter:
|
|
724
724
|
"recommendations": self._generate_recommendations(),
|
725
725
|
}
|
726
726
|
|
727
|
-
def _generate_recommendations(self) ->
|
727
|
+
def _generate_recommendations(self) -> list[str]:
|
728
728
|
"""Generate high-level recommendations based on found issues."""
|
729
729
|
recommendations = []
|
730
730
|
|