kailash 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/access_control.py +40 -39
- kailash/api/auth.py +26 -32
- kailash/api/custom_nodes.py +29 -29
- kailash/api/custom_nodes_secure.py +35 -35
- kailash/api/database.py +17 -17
- kailash/api/gateway.py +19 -19
- kailash/api/mcp_integration.py +24 -23
- kailash/api/studio.py +45 -45
- kailash/api/workflow_api.py +8 -8
- kailash/cli/commands.py +5 -8
- kailash/manifest.py +42 -42
- kailash/mcp/__init__.py +1 -1
- kailash/mcp/ai_registry_server.py +20 -20
- kailash/mcp/client.py +9 -11
- kailash/mcp/client_new.py +10 -10
- kailash/mcp/server.py +1 -2
- kailash/mcp/server_enhanced.py +449 -0
- kailash/mcp/servers/ai_registry.py +6 -6
- kailash/mcp/utils/__init__.py +31 -0
- kailash/mcp/utils/cache.py +267 -0
- kailash/mcp/utils/config.py +263 -0
- kailash/mcp/utils/formatters.py +293 -0
- kailash/mcp/utils/metrics.py +418 -0
- kailash/nodes/ai/agents.py +9 -9
- kailash/nodes/ai/ai_providers.py +33 -34
- kailash/nodes/ai/embedding_generator.py +31 -32
- kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
- kailash/nodes/ai/iterative_llm_agent.py +48 -48
- kailash/nodes/ai/llm_agent.py +32 -33
- kailash/nodes/ai/models.py +13 -13
- kailash/nodes/ai/self_organizing.py +44 -44
- kailash/nodes/api/__init__.py +5 -0
- kailash/nodes/api/auth.py +11 -11
- kailash/nodes/api/graphql.py +13 -13
- kailash/nodes/api/http.py +19 -19
- kailash/nodes/api/monitoring.py +463 -0
- kailash/nodes/api/rate_limiting.py +9 -13
- kailash/nodes/api/rest.py +29 -29
- kailash/nodes/api/security.py +819 -0
- kailash/nodes/base.py +24 -26
- kailash/nodes/base_async.py +7 -7
- kailash/nodes/base_cycle_aware.py +12 -12
- kailash/nodes/base_with_acl.py +5 -5
- kailash/nodes/code/python.py +56 -55
- kailash/nodes/data/__init__.py +6 -0
- kailash/nodes/data/directory.py +6 -6
- kailash/nodes/data/event_generation.py +297 -0
- kailash/nodes/data/file_discovery.py +598 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/retrieval.py +10 -10
- kailash/nodes/data/sharepoint_graph.py +17 -17
- kailash/nodes/data/sources.py +5 -5
- kailash/nodes/data/sql.py +13 -13
- kailash/nodes/data/streaming.py +25 -25
- kailash/nodes/data/vector_db.py +22 -22
- kailash/nodes/data/writers.py +7 -7
- kailash/nodes/logic/async_operations.py +17 -17
- kailash/nodes/logic/convergence.py +11 -11
- kailash/nodes/logic/loop.py +4 -4
- kailash/nodes/logic/operations.py +11 -11
- kailash/nodes/logic/workflow.py +8 -9
- kailash/nodes/mixins/mcp.py +17 -17
- kailash/nodes/mixins.py +8 -10
- kailash/nodes/transform/chunkers.py +3 -3
- kailash/nodes/transform/formatters.py +7 -7
- kailash/nodes/transform/processors.py +11 -11
- kailash/runtime/access_controlled.py +18 -18
- kailash/runtime/async_local.py +18 -20
- kailash/runtime/docker.py +24 -26
- kailash/runtime/local.py +55 -31
- kailash/runtime/parallel.py +25 -25
- kailash/runtime/parallel_cyclic.py +29 -29
- kailash/runtime/runner.py +6 -6
- kailash/runtime/testing.py +22 -22
- kailash/sdk_exceptions.py +0 -58
- kailash/security.py +14 -26
- kailash/tracking/manager.py +38 -38
- kailash/tracking/metrics_collector.py +15 -14
- kailash/tracking/models.py +53 -53
- kailash/tracking/storage/base.py +7 -17
- kailash/tracking/storage/database.py +22 -23
- kailash/tracking/storage/filesystem.py +38 -40
- kailash/utils/export.py +21 -21
- kailash/utils/templates.py +8 -9
- kailash/visualization/api.py +30 -34
- kailash/visualization/dashboard.py +17 -17
- kailash/visualization/performance.py +32 -19
- kailash/visualization/reports.py +30 -28
- kailash/workflow/builder.py +8 -8
- kailash/workflow/convergence.py +13 -12
- kailash/workflow/cycle_analyzer.py +38 -33
- kailash/workflow/cycle_builder.py +12 -12
- kailash/workflow/cycle_config.py +16 -15
- kailash/workflow/cycle_debugger.py +40 -40
- kailash/workflow/cycle_exceptions.py +29 -29
- kailash/workflow/cycle_profiler.py +21 -21
- kailash/workflow/cycle_state.py +20 -22
- kailash/workflow/cyclic_runner.py +45 -45
- kailash/workflow/graph.py +57 -45
- kailash/workflow/mermaid_visualizer.py +9 -11
- kailash/workflow/migration.py +22 -22
- kailash/workflow/mock_registry.py +6 -6
- kailash/workflow/runner.py +9 -9
- kailash/workflow/safety.py +12 -13
- kailash/workflow/state.py +8 -11
- kailash/workflow/templates.py +19 -19
- kailash/workflow/validation.py +14 -14
- kailash/workflow/visualization.py +32 -24
- kailash-0.3.1.dist-info/METADATA +476 -0
- kailash-0.3.1.dist-info/RECORD +136 -0
- kailash-0.2.2.dist-info/METADATA +0 -121
- kailash-0.2.2.dist-info/RECORD +0 -126
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
@@ -99,8 +99,8 @@ See Also:
|
|
99
99
|
"""
|
100
100
|
|
101
101
|
import logging
|
102
|
-
from datetime import
|
103
|
-
from typing import Any,
|
102
|
+
from datetime import UTC, datetime
|
103
|
+
from typing import Any, Optional
|
104
104
|
|
105
105
|
import networkx as nx
|
106
106
|
|
@@ -127,15 +127,15 @@ class WorkflowState:
|
|
127
127
|
run_id: Unique execution run ID
|
128
128
|
"""
|
129
129
|
self.run_id = run_id
|
130
|
-
self.node_outputs:
|
131
|
-
self.execution_order:
|
132
|
-
self.metadata:
|
130
|
+
self.node_outputs: dict[str, Any] = {}
|
131
|
+
self.execution_order: list[str] = []
|
132
|
+
self.metadata: dict[str, Any] = {}
|
133
133
|
|
134
134
|
|
135
135
|
class CyclicWorkflowExecutor:
|
136
136
|
"""Execution engine supporting cyclic workflows with fixed parameter propagation."""
|
137
137
|
|
138
|
-
def __init__(self, safety_manager:
|
138
|
+
def __init__(self, safety_manager: CycleSafetyManager | None = None):
|
139
139
|
"""Initialize cyclic workflow executor.
|
140
140
|
|
141
141
|
Args:
|
@@ -148,10 +148,10 @@ class CyclicWorkflowExecutor:
|
|
148
148
|
def execute(
|
149
149
|
self,
|
150
150
|
workflow: Workflow,
|
151
|
-
parameters:
|
152
|
-
task_manager:
|
153
|
-
run_id:
|
154
|
-
) ->
|
151
|
+
parameters: dict[str, Any] | None = None,
|
152
|
+
task_manager: TaskManager | None = None,
|
153
|
+
run_id: str | None = None,
|
154
|
+
) -> tuple[dict[str, Any], str]:
|
155
155
|
"""Execute workflow with cycle support.
|
156
156
|
|
157
157
|
Args:
|
@@ -168,7 +168,7 @@ class CyclicWorkflowExecutor:
|
|
168
168
|
WorkflowValidationError: If workflow is invalid
|
169
169
|
"""
|
170
170
|
# Validate workflow (including cycles)
|
171
|
-
workflow.validate()
|
171
|
+
workflow.validate(runtime_parameters=parameters)
|
172
172
|
|
173
173
|
# Generate run ID if not provided
|
174
174
|
if not run_id:
|
@@ -223,10 +223,10 @@ class CyclicWorkflowExecutor:
|
|
223
223
|
def _execute_with_cycles(
|
224
224
|
self,
|
225
225
|
workflow: Workflow,
|
226
|
-
parameters:
|
226
|
+
parameters: dict[str, Any] | None,
|
227
227
|
run_id: str,
|
228
|
-
task_manager:
|
229
|
-
) ->
|
228
|
+
task_manager: TaskManager | None = None,
|
229
|
+
) -> dict[str, Any]:
|
230
230
|
"""Execute workflow with cycle handling.
|
231
231
|
|
232
232
|
Args:
|
@@ -264,8 +264,8 @@ class CyclicWorkflowExecutor:
|
|
264
264
|
def _create_execution_plan(
|
265
265
|
self,
|
266
266
|
workflow: Workflow,
|
267
|
-
dag_edges:
|
268
|
-
cycle_groups:
|
267
|
+
dag_edges: list[tuple],
|
268
|
+
cycle_groups: dict[str, list[tuple]],
|
269
269
|
) -> "ExecutionPlan":
|
270
270
|
"""Create execution plan handling cycles.
|
271
271
|
|
@@ -329,8 +329,8 @@ class CyclicWorkflowExecutor:
|
|
329
329
|
workflow: Workflow,
|
330
330
|
plan: "ExecutionPlan",
|
331
331
|
state: WorkflowState,
|
332
|
-
task_manager:
|
333
|
-
) ->
|
332
|
+
task_manager: TaskManager | None = None,
|
333
|
+
) -> dict[str, Any]:
|
334
334
|
"""Execute the workflow plan.
|
335
335
|
|
336
336
|
Args:
|
@@ -377,8 +377,8 @@ class CyclicWorkflowExecutor:
|
|
377
377
|
workflow: Workflow,
|
378
378
|
cycle_group: "CycleGroup",
|
379
379
|
state: WorkflowState,
|
380
|
-
task_manager:
|
381
|
-
) ->
|
380
|
+
task_manager: TaskManager | None = None,
|
381
|
+
) -> dict[str, Any]:
|
382
382
|
"""Execute a cycle group with proper parameter propagation.
|
383
383
|
|
384
384
|
Args:
|
@@ -432,7 +432,7 @@ class CyclicWorkflowExecutor:
|
|
432
432
|
run_id=state.run_id,
|
433
433
|
node_id=f"cycle_group_{cycle_id}",
|
434
434
|
node_type="CycleGroup",
|
435
|
-
started_at=datetime.now(
|
435
|
+
started_at=datetime.now(UTC),
|
436
436
|
metadata={
|
437
437
|
"cycle_id": cycle_id,
|
438
438
|
"max_iterations": cycle_config.get("max_iterations"),
|
@@ -463,7 +463,7 @@ class CyclicWorkflowExecutor:
|
|
463
463
|
run_id=state.run_id,
|
464
464
|
node_id=f"cycle_{cycle_id}_iteration_{loop_count}",
|
465
465
|
node_type="CycleIteration",
|
466
|
-
started_at=datetime.now(
|
466
|
+
started_at=datetime.now(UTC),
|
467
467
|
metadata={
|
468
468
|
"cycle_id": cycle_id,
|
469
469
|
"iteration": loop_count,
|
@@ -552,7 +552,7 @@ class CyclicWorkflowExecutor:
|
|
552
552
|
task_manager.update_task_status(
|
553
553
|
iteration_task_id,
|
554
554
|
TaskStatus.COMPLETED,
|
555
|
-
ended_at=datetime.now(
|
555
|
+
ended_at=datetime.now(UTC),
|
556
556
|
result=iteration_results,
|
557
557
|
metadata={
|
558
558
|
"converged": (
|
@@ -579,7 +579,7 @@ class CyclicWorkflowExecutor:
|
|
579
579
|
task_manager.update_task_status(
|
580
580
|
cycle_task_id,
|
581
581
|
TaskStatus.COMPLETED,
|
582
|
-
ended_at=datetime.now(
|
582
|
+
ended_at=datetime.now(UTC),
|
583
583
|
result=results,
|
584
584
|
metadata={
|
585
585
|
"total_iterations": loop_count,
|
@@ -603,11 +603,11 @@ class CyclicWorkflowExecutor:
|
|
603
603
|
workflow: Workflow,
|
604
604
|
node_id: str,
|
605
605
|
state: WorkflowState,
|
606
|
-
cycle_state:
|
607
|
-
cycle_edges:
|
608
|
-
previous_iteration_results:
|
609
|
-
task_manager:
|
610
|
-
iteration:
|
606
|
+
cycle_state: CycleState | None = None,
|
607
|
+
cycle_edges: list[tuple] | None = None,
|
608
|
+
previous_iteration_results: dict[str, Any] | None = None,
|
609
|
+
task_manager: TaskManager | None = None,
|
610
|
+
iteration: int | None = None,
|
611
611
|
) -> Any:
|
612
612
|
"""Execute a single node with proper parameter handling for cycles.
|
613
613
|
|
@@ -757,7 +757,7 @@ class CyclicWorkflowExecutor:
|
|
757
757
|
run_id=state.run_id,
|
758
758
|
node_id=task_node_id,
|
759
759
|
node_type=node.__class__.__name__,
|
760
|
-
started_at=datetime.now(
|
760
|
+
started_at=datetime.now(UTC),
|
761
761
|
metadata=task_metadata,
|
762
762
|
)
|
763
763
|
if task:
|
@@ -794,7 +794,7 @@ class CyclicWorkflowExecutor:
|
|
794
794
|
task.task_id,
|
795
795
|
TaskStatus.COMPLETED,
|
796
796
|
result=result,
|
797
|
-
ended_at=datetime.now(
|
797
|
+
ended_at=datetime.now(UTC),
|
798
798
|
metadata={"execution_time": performance_metrics.duration},
|
799
799
|
)
|
800
800
|
|
@@ -811,7 +811,7 @@ class CyclicWorkflowExecutor:
|
|
811
811
|
task.task_id,
|
812
812
|
TaskStatus.FAILED,
|
813
813
|
error=str(e),
|
814
|
-
ended_at=datetime.now(
|
814
|
+
ended_at=datetime.now(UTC),
|
815
815
|
)
|
816
816
|
except Exception as update_error:
|
817
817
|
logger.warning(
|
@@ -831,16 +831,16 @@ class ExecutionPlan:
|
|
831
831
|
|
832
832
|
def __init__(self):
|
833
833
|
"""Initialize execution plan."""
|
834
|
-
self.stages:
|
835
|
-
self.cycle_groups:
|
834
|
+
self.stages: list["ExecutionStage"] = []
|
835
|
+
self.cycle_groups: dict[str, "CycleGroup"] = {}
|
836
836
|
|
837
837
|
def add_cycle_group(
|
838
838
|
self,
|
839
839
|
cycle_id: str,
|
840
|
-
nodes:
|
841
|
-
entry_nodes:
|
842
|
-
exit_nodes:
|
843
|
-
edges:
|
840
|
+
nodes: set[str],
|
841
|
+
entry_nodes: set[str],
|
842
|
+
exit_nodes: set[str],
|
843
|
+
edges: list[tuple],
|
844
844
|
) -> None:
|
845
845
|
"""Add a cycle group to the plan.
|
846
846
|
|
@@ -859,7 +859,7 @@ class ExecutionPlan:
|
|
859
859
|
edges=edges,
|
860
860
|
)
|
861
861
|
|
862
|
-
def build_stages(self, topo_order:
|
862
|
+
def build_stages(self, topo_order: list[str], dag_graph: nx.DiGraph) -> None:
|
863
863
|
"""Build execution stages.
|
864
864
|
|
865
865
|
Args:
|
@@ -914,7 +914,7 @@ class ExecutionStage:
|
|
914
914
|
def __init__(
|
915
915
|
self,
|
916
916
|
is_cycle: bool,
|
917
|
-
nodes:
|
917
|
+
nodes: list[str] | None = None,
|
918
918
|
cycle_group: Optional["CycleGroup"] = None,
|
919
919
|
):
|
920
920
|
"""Initialize execution stage.
|
@@ -935,10 +935,10 @@ class CycleGroup:
|
|
935
935
|
def __init__(
|
936
936
|
self,
|
937
937
|
cycle_id: str,
|
938
|
-
nodes:
|
939
|
-
entry_nodes:
|
940
|
-
exit_nodes:
|
941
|
-
edges:
|
938
|
+
nodes: set[str],
|
939
|
+
entry_nodes: set[str],
|
940
|
+
exit_nodes: set[str],
|
941
|
+
edges: list[tuple],
|
942
942
|
):
|
943
943
|
"""Initialize cycle group.
|
944
944
|
|
@@ -955,7 +955,7 @@ class CycleGroup:
|
|
955
955
|
self.exit_nodes = exit_nodes
|
956
956
|
self.edges = edges
|
957
957
|
|
958
|
-
def get_execution_order(self, full_graph: nx.DiGraph) ->
|
958
|
+
def get_execution_order(self, full_graph: nx.DiGraph) -> list[str]:
|
959
959
|
"""Get execution order for nodes in cycle.
|
960
960
|
|
961
961
|
Args:
|
kailash/workflow/graph.py
CHANGED
@@ -4,8 +4,8 @@ import json
|
|
4
4
|
import logging
|
5
5
|
import uuid
|
6
6
|
import warnings
|
7
|
-
from datetime import
|
8
|
-
from typing import Any
|
7
|
+
from datetime import UTC, datetime
|
8
|
+
from typing import Any
|
9
9
|
|
10
10
|
import networkx as nx
|
11
11
|
import yaml
|
@@ -38,10 +38,10 @@ class NodeInstance(BaseModel):
|
|
38
38
|
|
39
39
|
node_id: str = Field(..., description="Unique identifier for this instance")
|
40
40
|
node_type: str = Field(..., description="Type of node")
|
41
|
-
config:
|
41
|
+
config: dict[str, Any] = Field(
|
42
42
|
default_factory=dict, description="Node configuration"
|
43
43
|
)
|
44
|
-
position:
|
44
|
+
position: tuple[float, float] = Field(default=(0, 0), description="Visual position")
|
45
45
|
|
46
46
|
|
47
47
|
class Connection(BaseModel):
|
@@ -59,23 +59,21 @@ class CyclicConnection(Connection):
|
|
59
59
|
cycle: bool = Field(
|
60
60
|
default=False, description="Whether this connection creates a cycle"
|
61
61
|
)
|
62
|
-
max_iterations:
|
62
|
+
max_iterations: int | None = Field(
|
63
63
|
default=None, description="Maximum cycle iterations"
|
64
64
|
)
|
65
|
-
convergence_check:
|
65
|
+
convergence_check: str | None = Field(
|
66
66
|
default=None, description="Convergence condition expression"
|
67
67
|
)
|
68
|
-
cycle_id:
|
68
|
+
cycle_id: str | None = Field(
|
69
69
|
default=None, description="Logical cycle group identifier"
|
70
70
|
)
|
71
|
-
timeout:
|
72
|
-
|
73
|
-
|
74
|
-
memory_limit: Optional[int] = Field(default=None, description="Memory limit in MB")
|
75
|
-
condition: Optional[str] = Field(
|
71
|
+
timeout: float | None = Field(default=None, description="Cycle timeout in seconds")
|
72
|
+
memory_limit: int | None = Field(default=None, description="Memory limit in MB")
|
73
|
+
condition: str | None = Field(
|
76
74
|
default=None, description="Conditional cycle routing expression"
|
77
75
|
)
|
78
|
-
parent_cycle:
|
76
|
+
parent_cycle: str | None = Field(
|
79
77
|
default=None, description="Parent cycle for nested cycles"
|
80
78
|
)
|
81
79
|
|
@@ -90,7 +88,7 @@ class Workflow:
|
|
90
88
|
description: str = "",
|
91
89
|
version: str = "1.0.0",
|
92
90
|
author: str = "",
|
93
|
-
metadata:
|
91
|
+
metadata: dict[str, Any] | None = None,
|
94
92
|
):
|
95
93
|
"""Initialize a workflow.
|
96
94
|
|
@@ -118,7 +116,7 @@ class Workflow:
|
|
118
116
|
if "version" not in self.metadata and version:
|
119
117
|
self.metadata["version"] = version
|
120
118
|
if "created_at" not in self.metadata:
|
121
|
-
self.metadata["created_at"] = datetime.now(
|
119
|
+
self.metadata["created_at"] = datetime.now(UTC).isoformat()
|
122
120
|
|
123
121
|
# Create directed graph for the workflow
|
124
122
|
self.graph = nx.DiGraph()
|
@@ -191,10 +189,12 @@ class Workflow:
|
|
191
189
|
|
192
190
|
# Store node instance and metadata
|
193
191
|
try:
|
192
|
+
# Use the node instance's actual config, which includes both original config and any updates
|
193
|
+
actual_config = node_instance.config.copy()
|
194
194
|
node_instance_data = NodeInstance(
|
195
195
|
node_id=node_id,
|
196
196
|
node_type=node_type,
|
197
|
-
config=
|
197
|
+
config=actual_config,
|
198
198
|
position=(len(self.nodes) * 150, 100),
|
199
199
|
)
|
200
200
|
self.nodes[node_id] = node_instance_data
|
@@ -203,12 +203,14 @@ class Workflow:
|
|
203
203
|
|
204
204
|
self._node_instances[node_id] = node_instance
|
205
205
|
|
206
|
-
# Add to graph
|
207
|
-
self.graph.add_node(
|
206
|
+
# Add to graph with actual config
|
207
|
+
self.graph.add_node(
|
208
|
+
node_id, node=node_instance, type=node_type, config=actual_config
|
209
|
+
)
|
208
210
|
logger.info(f"Added node '{node_id}' of type '{node_type}'")
|
209
211
|
|
210
212
|
def _add_node_internal(
|
211
|
-
self, node_id: str, node_type: str, config:
|
213
|
+
self, node_id: str, node_type: str, config: dict[str, Any] | None = None
|
212
214
|
) -> None:
|
213
215
|
"""Add a node to the workflow (internal method).
|
214
216
|
|
@@ -225,15 +227,15 @@ class Workflow:
|
|
225
227
|
self,
|
226
228
|
source_node: str,
|
227
229
|
target_node: str,
|
228
|
-
mapping:
|
230
|
+
mapping: dict[str, str] | None = None,
|
229
231
|
cycle: bool = False,
|
230
|
-
max_iterations:
|
231
|
-
convergence_check:
|
232
|
-
cycle_id:
|
233
|
-
timeout:
|
234
|
-
memory_limit:
|
235
|
-
condition:
|
236
|
-
parent_cycle:
|
232
|
+
max_iterations: int | None = None,
|
233
|
+
convergence_check: str | None = None,
|
234
|
+
cycle_id: str | None = None,
|
235
|
+
timeout: float | None = None,
|
236
|
+
memory_limit: int | None = None,
|
237
|
+
condition: str | None = None,
|
238
|
+
parent_cycle: str | None = None,
|
237
239
|
) -> None:
|
238
240
|
"""Connect two nodes in the workflow.
|
239
241
|
|
@@ -439,7 +441,7 @@ class Workflow:
|
|
439
441
|
f"Connected '{source_node}' to '{target_node}' with mapping: {mapping}"
|
440
442
|
)
|
441
443
|
|
442
|
-
def create_cycle(self, cycle_id:
|
444
|
+
def create_cycle(self, cycle_id: str | None = None):
|
443
445
|
"""
|
444
446
|
Create a new CycleBuilder for intuitive cycle configuration.
|
445
447
|
|
@@ -537,7 +539,7 @@ class Workflow:
|
|
537
539
|
source_node=from_node, target_node=to_node, mapping={from_output: to_input}
|
538
540
|
)
|
539
541
|
|
540
|
-
def get_node(self, node_id: str) ->
|
542
|
+
def get_node(self, node_id: str) -> Node | None:
|
541
543
|
"""Get node instance by ID.
|
542
544
|
|
543
545
|
Args:
|
@@ -557,7 +559,7 @@ class Workflow:
|
|
557
559
|
# Fallback to _node_instances
|
558
560
|
return self._node_instances.get(node_id)
|
559
561
|
|
560
|
-
def separate_dag_and_cycle_edges(self) ->
|
562
|
+
def separate_dag_and_cycle_edges(self) -> tuple[list[tuple], list[tuple]]:
|
561
563
|
"""Separate DAG edges from cycle edges.
|
562
564
|
|
563
565
|
Returns:
|
@@ -574,7 +576,7 @@ class Workflow:
|
|
574
576
|
|
575
577
|
return dag_edges, cycle_edges
|
576
578
|
|
577
|
-
def get_cycle_groups(self) ->
|
579
|
+
def get_cycle_groups(self) -> dict[str, list[tuple]]:
|
578
580
|
"""Get cycle edges grouped by cycle_id with enhanced multi-node cycle detection.
|
579
581
|
|
580
582
|
For multi-node cycles like A → B → C → A where only C → A is marked as cycle,
|
@@ -674,7 +676,7 @@ class Workflow:
|
|
674
676
|
_, cycle_edges = self.separate_dag_and_cycle_edges()
|
675
677
|
return len(cycle_edges) > 0
|
676
678
|
|
677
|
-
def get_execution_order(self) ->
|
679
|
+
def get_execution_order(self) -> list[str]:
|
678
680
|
"""Get topological execution order for nodes, handling cycles gracefully.
|
679
681
|
|
680
682
|
Returns:
|
@@ -707,9 +709,12 @@ class Workflow:
|
|
707
709
|
# This shouldn't happen, but handle gracefully
|
708
710
|
raise WorkflowValidationError("Unable to determine execution order")
|
709
711
|
|
710
|
-
def validate(self) -> None:
|
712
|
+
def validate(self, runtime_parameters: dict[str, Any] | None = None) -> None:
|
711
713
|
"""Validate the workflow structure.
|
712
714
|
|
715
|
+
Args:
|
716
|
+
runtime_parameters: Parameters that will be provided at runtime (Session 061)
|
717
|
+
|
713
718
|
Raises:
|
714
719
|
WorkflowValidationError: If workflow is invalid
|
715
720
|
"""
|
@@ -758,14 +763,19 @@ class Workflow:
|
|
758
763
|
# Check nested config
|
759
764
|
found_in_config = param_name in node_instance.config["config"]
|
760
765
|
|
761
|
-
if
|
766
|
+
# Session 061: Check if parameter will be provided at runtime
|
767
|
+
found_in_runtime = False
|
768
|
+
if runtime_parameters and node_id in runtime_parameters:
|
769
|
+
found_in_runtime = param_name in runtime_parameters[node_id]
|
770
|
+
|
771
|
+
if not found_in_config and not found_in_runtime:
|
762
772
|
if param_def.default is None:
|
763
773
|
missing_inputs.append(param_name)
|
764
774
|
|
765
775
|
if missing_inputs:
|
766
776
|
raise WorkflowValidationError(
|
767
777
|
f"Node '{node_id}' missing required inputs: {missing_inputs}. "
|
768
|
-
f"Provide these inputs via connections or
|
778
|
+
f"Provide these inputs via connections, node configuration, or runtime parameters"
|
769
779
|
)
|
770
780
|
|
771
781
|
logger.info(f"Workflow '{self.name}' validated successfully")
|
@@ -831,8 +841,8 @@ class Workflow:
|
|
831
841
|
)
|
832
842
|
|
833
843
|
def run(
|
834
|
-
self, task_manager:
|
835
|
-
) ->
|
844
|
+
self, task_manager: TaskManager | None = None, **overrides
|
845
|
+
) -> tuple[dict[str, Any], str | None]:
|
836
846
|
"""Execute the workflow.
|
837
847
|
|
838
848
|
Args:
|
@@ -851,9 +861,9 @@ class Workflow:
|
|
851
861
|
|
852
862
|
def execute(
|
853
863
|
self,
|
854
|
-
inputs:
|
855
|
-
task_manager:
|
856
|
-
) ->
|
864
|
+
inputs: dict[str, Any] | None = None,
|
865
|
+
task_manager: TaskManager | None = None,
|
866
|
+
) -> dict[str, Any]:
|
857
867
|
"""Execute the workflow.
|
858
868
|
|
859
869
|
Args:
|
@@ -951,7 +961,9 @@ class Workflow:
|
|
951
961
|
)
|
952
962
|
|
953
963
|
# Process each mapping pair
|
954
|
-
for i, (src, dst) in enumerate(
|
964
|
+
for i, (src, dst) in enumerate(
|
965
|
+
zip(from_outputs, to_inputs, strict=False)
|
966
|
+
):
|
955
967
|
if src in source_results:
|
956
968
|
node_inputs[dst] = source_results[src]
|
957
969
|
|
@@ -1033,7 +1045,7 @@ class Workflow:
|
|
1033
1045
|
f"Failed to export workflow to '{output_path}': {e}"
|
1034
1046
|
) from e
|
1035
1047
|
|
1036
|
-
def to_dict(self) ->
|
1048
|
+
def to_dict(self) -> dict[str, Any]:
|
1037
1049
|
"""Convert workflow to dictionary.
|
1038
1050
|
|
1039
1051
|
Returns:
|
@@ -1095,7 +1107,7 @@ class Workflow:
|
|
1095
1107
|
raise ValueError(f"Unsupported format: {format}")
|
1096
1108
|
|
1097
1109
|
@classmethod
|
1098
|
-
def from_dict(cls, data:
|
1110
|
+
def from_dict(cls, data: dict[str, Any]) -> "Workflow":
|
1099
1111
|
"""Create workflow from dictionary.
|
1100
1112
|
|
1101
1113
|
Args:
|
@@ -1216,9 +1228,9 @@ class Workflow:
|
|
1216
1228
|
self,
|
1217
1229
|
state_model: BaseModel,
|
1218
1230
|
wrap_state: bool = True,
|
1219
|
-
task_manager:
|
1231
|
+
task_manager: TaskManager | None = None,
|
1220
1232
|
**overrides,
|
1221
|
-
) ->
|
1233
|
+
) -> tuple[BaseModel, dict[str, Any]]:
|
1222
1234
|
"""Execute the workflow with state management.
|
1223
1235
|
|
1224
1236
|
This method provides a simplified interface for executing workflows
|
@@ -5,8 +5,6 @@ offering a text-based format that can be embedded in markdown files and
|
|
5
5
|
rendered in various documentation platforms.
|
6
6
|
"""
|
7
7
|
|
8
|
-
from typing import Dict, Optional, Tuple
|
9
|
-
|
10
8
|
from kailash.workflow.graph import Workflow
|
11
9
|
|
12
10
|
|
@@ -27,7 +25,7 @@ class MermaidVisualizer:
|
|
27
25
|
self,
|
28
26
|
workflow: Workflow,
|
29
27
|
direction: str = "TB",
|
30
|
-
node_styles:
|
28
|
+
node_styles: dict[str, str] | None = None,
|
31
29
|
):
|
32
30
|
"""Initialize the Mermaid visualizer.
|
33
31
|
|
@@ -40,7 +38,7 @@ class MermaidVisualizer:
|
|
40
38
|
self.direction = direction
|
41
39
|
self.node_styles = node_styles or self._default_node_styles()
|
42
40
|
|
43
|
-
def _default_node_styles(self) ->
|
41
|
+
def _default_node_styles(self) -> dict[str, str]:
|
44
42
|
"""Get default node styles for different node types.
|
45
43
|
|
46
44
|
Returns:
|
@@ -79,7 +77,7 @@ class MermaidVisualizer:
|
|
79
77
|
# Use line break without parentheses to avoid Mermaid parsing issues
|
80
78
|
return f"{clean_type}<br/>{node_id}"
|
81
79
|
|
82
|
-
def _get_pattern_edge_label(self, source: str, target: str, data:
|
80
|
+
def _get_pattern_edge_label(self, source: str, target: str, data: dict) -> str:
|
83
81
|
"""Get a pattern-oriented edge label.
|
84
82
|
|
85
83
|
Args:
|
@@ -262,7 +260,7 @@ class MermaidVisualizer:
|
|
262
260
|
return node_type[:-4]
|
263
261
|
return node_type
|
264
262
|
|
265
|
-
def _get_node_shape(self, node_type: str) ->
|
263
|
+
def _get_node_shape(self, node_type: str) -> tuple[str, str]:
|
266
264
|
"""Get the shape brackets for a node type.
|
267
265
|
|
268
266
|
Args:
|
@@ -460,7 +458,7 @@ class MermaidVisualizer:
|
|
460
458
|
|
461
459
|
return "\n".join(lines)
|
462
460
|
|
463
|
-
def _get_edge_label(self, source: str, target: str, data:
|
461
|
+
def _get_edge_label(self, source: str, target: str, data: dict) -> str:
|
464
462
|
"""Get label for an edge.
|
465
463
|
|
466
464
|
Args:
|
@@ -491,7 +489,7 @@ class MermaidVisualizer:
|
|
491
489
|
|
492
490
|
return ""
|
493
491
|
|
494
|
-
def generate_markdown(self, title:
|
492
|
+
def generate_markdown(self, title: str | None = None) -> str:
|
495
493
|
"""Generate a complete markdown section with the Mermaid diagram.
|
496
494
|
|
497
495
|
Args:
|
@@ -559,7 +557,7 @@ class MermaidVisualizer:
|
|
559
557
|
|
560
558
|
return "\n".join(lines)
|
561
559
|
|
562
|
-
def save_markdown(self, filepath: str, title:
|
560
|
+
def save_markdown(self, filepath: str, title: str | None = None) -> None:
|
563
561
|
"""Save the Mermaid diagram as a markdown file.
|
564
562
|
|
565
563
|
Args:
|
@@ -596,7 +594,7 @@ def add_mermaid_to_workflow():
|
|
596
594
|
visualizer = MermaidVisualizer(self, direction=direction)
|
597
595
|
return visualizer.generate()
|
598
596
|
|
599
|
-
def to_mermaid_markdown(self, title:
|
597
|
+
def to_mermaid_markdown(self, title: str | None = None) -> str:
|
600
598
|
"""Generate markdown with embedded Mermaid diagram.
|
601
599
|
|
602
600
|
Args:
|
@@ -608,7 +606,7 @@ def add_mermaid_to_workflow():
|
|
608
606
|
visualizer = MermaidVisualizer(self)
|
609
607
|
return visualizer.generate_markdown(title)
|
610
608
|
|
611
|
-
def save_mermaid_markdown(self, filepath: str, title:
|
609
|
+
def save_mermaid_markdown(self, filepath: str, title: str | None = None) -> None:
|
612
610
|
"""Save workflow as markdown with Mermaid diagram.
|
613
611
|
|
614
612
|
Args:
|