kailash 0.6.6__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +35 -5
- kailash/access_control.py +64 -46
- kailash/adapters/__init__.py +5 -0
- kailash/adapters/mcp_platform_adapter.py +273 -0
- kailash/api/workflow_api.py +34 -3
- kailash/channels/__init__.py +21 -0
- kailash/channels/api_channel.py +409 -0
- kailash/channels/base.py +271 -0
- kailash/channels/cli_channel.py +661 -0
- kailash/channels/event_router.py +496 -0
- kailash/channels/mcp_channel.py +648 -0
- kailash/channels/session.py +423 -0
- kailash/mcp_server/discovery.py +57 -18
- kailash/middleware/communication/api_gateway.py +23 -3
- kailash/middleware/communication/realtime.py +83 -0
- kailash/middleware/core/agent_ui.py +1 -1
- kailash/middleware/gateway/storage_backends.py +393 -0
- kailash/middleware/mcp/enhanced_server.py +22 -16
- kailash/nexus/__init__.py +21 -0
- kailash/nexus/cli/__init__.py +5 -0
- kailash/nexus/cli/__main__.py +6 -0
- kailash/nexus/cli/main.py +176 -0
- kailash/nexus/factory.py +413 -0
- kailash/nexus/gateway.py +545 -0
- kailash/nodes/__init__.py +8 -5
- kailash/nodes/ai/iterative_llm_agent.py +988 -17
- kailash/nodes/ai/llm_agent.py +29 -9
- kailash/nodes/api/__init__.py +2 -2
- kailash/nodes/api/monitoring.py +1 -1
- kailash/nodes/base.py +29 -5
- kailash/nodes/base_async.py +54 -14
- kailash/nodes/code/async_python.py +1 -1
- kailash/nodes/code/python.py +50 -6
- kailash/nodes/data/async_sql.py +90 -0
- kailash/nodes/data/bulk_operations.py +939 -0
- kailash/nodes/data/query_builder.py +373 -0
- kailash/nodes/data/query_cache.py +512 -0
- kailash/nodes/monitoring/__init__.py +10 -0
- kailash/nodes/monitoring/deadlock_detector.py +964 -0
- kailash/nodes/monitoring/performance_anomaly.py +1078 -0
- kailash/nodes/monitoring/race_condition_detector.py +1151 -0
- kailash/nodes/monitoring/transaction_metrics.py +790 -0
- kailash/nodes/monitoring/transaction_monitor.py +931 -0
- kailash/nodes/security/behavior_analysis.py +414 -0
- kailash/nodes/system/__init__.py +17 -0
- kailash/nodes/system/command_parser.py +820 -0
- kailash/nodes/transaction/__init__.py +48 -0
- kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
- kailash/nodes/transaction/saga_coordinator.py +652 -0
- kailash/nodes/transaction/saga_state_storage.py +411 -0
- kailash/nodes/transaction/saga_step.py +467 -0
- kailash/nodes/transaction/transaction_context.py +756 -0
- kailash/nodes/transaction/two_phase_commit.py +978 -0
- kailash/nodes/transform/processors.py +17 -1
- kailash/nodes/validation/__init__.py +21 -0
- kailash/nodes/validation/test_executor.py +532 -0
- kailash/nodes/validation/validation_nodes.py +447 -0
- kailash/resources/factory.py +1 -1
- kailash/runtime/access_controlled.py +9 -7
- kailash/runtime/async_local.py +84 -21
- kailash/runtime/local.py +21 -2
- kailash/runtime/parameter_injector.py +187 -31
- kailash/runtime/runner.py +6 -4
- kailash/runtime/testing.py +1 -1
- kailash/security.py +22 -3
- kailash/servers/__init__.py +32 -0
- kailash/servers/durable_workflow_server.py +430 -0
- kailash/servers/enterprise_workflow_server.py +522 -0
- kailash/servers/gateway.py +183 -0
- kailash/servers/workflow_server.py +293 -0
- kailash/utils/data_validation.py +192 -0
- kailash/workflow/builder.py +382 -15
- kailash/workflow/cyclic_runner.py +102 -10
- kailash/workflow/validation.py +144 -8
- kailash/workflow/visualization.py +99 -27
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/METADATA +3 -2
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/RECORD +81 -40
- kailash/workflow/builder_improvements.py +0 -207
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/WHEEL +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.6.dist-info → kailash-0.8.0.dist-info}/top_level.txt +0 -0
kailash/workflow/builder.py
CHANGED
@@ -2,8 +2,9 @@
|
|
2
2
|
|
3
3
|
import logging
|
4
4
|
import uuid
|
5
|
-
from typing import Any
|
5
|
+
from typing import TYPE_CHECKING, Any
|
6
6
|
|
7
|
+
from kailash.nodes.base import Node
|
7
8
|
from kailash.sdk_exceptions import ConnectionError, WorkflowValidationError
|
8
9
|
from kailash.workflow.graph import Workflow
|
9
10
|
|
@@ -18,23 +19,234 @@ class WorkflowBuilder:
|
|
18
19
|
self.nodes: dict[str, dict[str, Any]] = {}
|
19
20
|
self.connections: list[dict[str, str]] = []
|
20
21
|
self._metadata: dict[str, Any] = {}
|
22
|
+
# Parameter injection capabilities
|
23
|
+
self.workflow_parameters: dict[str, Any] = {}
|
24
|
+
self.parameter_mappings: dict[str, dict[str, str]] = {}
|
21
25
|
|
22
|
-
def add_node(
|
26
|
+
def add_node(self, *args, **kwargs) -> str:
|
27
|
+
"""
|
28
|
+
Unified add_node method supporting multiple API patterns.
|
29
|
+
|
30
|
+
Supported patterns:
|
31
|
+
1. add_node("NodeType", "node_id", {"param": value}) # Current/Preferred
|
32
|
+
2. add_node("node_id", NodeClass, param=value) # Legacy fluent
|
33
|
+
3. add_node(NodeClass, "node_id", param=value) # Alternative
|
34
|
+
|
35
|
+
Args:
|
36
|
+
*args: Positional arguments (pattern-dependent)
|
37
|
+
**kwargs: Keyword arguments for configuration
|
38
|
+
|
39
|
+
Returns:
|
40
|
+
Node ID (useful for method chaining)
|
41
|
+
|
42
|
+
Raises:
|
43
|
+
WorkflowValidationError: If node_id is already used or invalid pattern
|
44
|
+
"""
|
45
|
+
# Pattern detection and routing
|
46
|
+
if len(args) == 0 and kwargs:
|
47
|
+
# Keyword-only pattern: add_node(node_type="NodeType", node_id="id", config={})
|
48
|
+
node_type = kwargs.pop("node_type", None)
|
49
|
+
node_id = kwargs.pop("node_id", None)
|
50
|
+
config = kwargs.pop("config", {})
|
51
|
+
# Any remaining kwargs are treated as config
|
52
|
+
config.update(kwargs)
|
53
|
+
|
54
|
+
if node_type is None:
|
55
|
+
raise WorkflowValidationError(
|
56
|
+
"node_type is required when using keyword arguments"
|
57
|
+
)
|
58
|
+
|
59
|
+
return self._add_node_current(node_type, node_id, config)
|
60
|
+
|
61
|
+
elif len(args) == 1:
|
62
|
+
# Single argument with possible keywords
|
63
|
+
if isinstance(args[0], str) and kwargs:
|
64
|
+
# Pattern: add_node("NodeType", node_id="id", config={})
|
65
|
+
node_type = args[0]
|
66
|
+
node_id = kwargs.pop("node_id", None)
|
67
|
+
config = kwargs.pop("config", {})
|
68
|
+
# Any remaining kwargs are treated as config
|
69
|
+
config.update(kwargs)
|
70
|
+
return self._add_node_current(node_type, node_id, config)
|
71
|
+
elif isinstance(args[0], str):
|
72
|
+
# Pattern: add_node("NodeType")
|
73
|
+
return self._add_node_current(args[0], None, {})
|
74
|
+
elif hasattr(args[0], "__name__"):
|
75
|
+
# Pattern: add_node(NodeClass)
|
76
|
+
return self._add_node_alternative(args[0], None, **kwargs)
|
77
|
+
else:
|
78
|
+
if isinstance(args[0], Node):
|
79
|
+
# Pattern: add_node(node_instance)
|
80
|
+
return self._add_node_instance(args[0], None)
|
81
|
+
|
82
|
+
elif len(args) == 3 and isinstance(args[0], str) and isinstance(args[2], dict):
|
83
|
+
# Pattern 1: Current API - add_node("NodeType", "node_id", {"param": value})
|
84
|
+
return self._add_node_current(args[0], args[1], args[2])
|
85
|
+
|
86
|
+
elif len(args) >= 2 and isinstance(args[0], str):
|
87
|
+
# Pattern 2: Legacy fluent API - add_node("node_id", NodeClass, param=value)
|
88
|
+
if hasattr(args[1], "__name__") or isinstance(args[1], type):
|
89
|
+
return self._add_node_legacy_fluent(args[0], args[1], **kwargs)
|
90
|
+
elif isinstance(args[1], str):
|
91
|
+
# Two strings - assume current API: add_node("NodeType", "node_id")
|
92
|
+
config = kwargs if kwargs else (args[2] if len(args) > 2 else {})
|
93
|
+
return self._add_node_current(args[0], args[1], config)
|
94
|
+
else:
|
95
|
+
# Invalid second argument
|
96
|
+
raise WorkflowValidationError(
|
97
|
+
f"Invalid node type: {type(args[1]).__name__}. "
|
98
|
+
"Expected: str (node type name), Node class, or Node instance"
|
99
|
+
)
|
100
|
+
|
101
|
+
elif len(args) >= 2 and hasattr(args[0], "__name__"):
|
102
|
+
# Pattern 3: Alternative - add_node(NodeClass, "node_id", param=value)
|
103
|
+
# Handle both dict config and keyword args
|
104
|
+
if len(args) == 3 and isinstance(args[2], dict):
|
105
|
+
# Config provided as dict
|
106
|
+
return self._add_node_alternative(args[0], args[1], **args[2])
|
107
|
+
else:
|
108
|
+
# Config provided as kwargs
|
109
|
+
return self._add_node_alternative(args[0], args[1], **kwargs)
|
110
|
+
|
111
|
+
elif len(args) >= 2:
|
112
|
+
# Check if first arg is a Node instance
|
113
|
+
if isinstance(args[0], Node):
|
114
|
+
# Pattern 4: Instance - add_node(node_instance, "node_id") or add_node(node_instance, "node_id", config)
|
115
|
+
# Config is ignored for instances
|
116
|
+
return self._add_node_instance(args[0], args[1])
|
117
|
+
elif len(args) == 2:
|
118
|
+
# Invalid arguments for 2-arg call
|
119
|
+
raise WorkflowValidationError(
|
120
|
+
f"Invalid node type: {type(args[0]).__name__}. "
|
121
|
+
"Expected: str (node type name), Node class, or Node instance"
|
122
|
+
)
|
123
|
+
|
124
|
+
# For 3 or more args that don't match other patterns
|
125
|
+
# Error with helpful message
|
126
|
+
raise WorkflowValidationError(
|
127
|
+
f"Invalid add_node signature. Received {len(args)} args: {[type(arg).__name__ for arg in args]}\n"
|
128
|
+
f"Supported patterns:\n"
|
129
|
+
f" add_node('NodeType', 'node_id', {{'param': value}}) # Preferred\n"
|
130
|
+
f" add_node('node_id', NodeClass, param=value) # Legacy\n"
|
131
|
+
f" add_node(NodeClass, 'node_id', param=value) # Alternative\n"
|
132
|
+
f"Examples:\n"
|
133
|
+
f" add_node('HTTPRequestNode', 'api_call', {{'url': 'https://api.com'}})\n"
|
134
|
+
f" add_node('csv_reader', CSVReaderNode, file_path='data.csv')"
|
135
|
+
)
|
136
|
+
|
137
|
+
def _add_node_current(
|
138
|
+
self, node_type: str, node_id: str | None, config: dict[str, Any]
|
139
|
+
) -> str:
|
140
|
+
"""Handle current API pattern: add_node('NodeType', 'node_id', {'param': value})"""
|
141
|
+
return self._add_node_unified(node_type, node_id, config)
|
142
|
+
|
143
|
+
def _add_node_legacy_fluent(
|
144
|
+
self, node_id: str, node_class_or_type: Any, **config
|
145
|
+
) -> "WorkflowBuilder":
|
146
|
+
"""Handle legacy fluent API pattern: add_node('node_id', NodeClass, param=value)"""
|
147
|
+
import warnings
|
148
|
+
|
149
|
+
# If it's a class, validate it's a Node subclass
|
150
|
+
if isinstance(node_class_or_type, type) and not issubclass(
|
151
|
+
node_class_or_type, Node
|
152
|
+
):
|
153
|
+
raise WorkflowValidationError(
|
154
|
+
f"Invalid node type: {node_class_or_type}. Expected a Node subclass or string."
|
155
|
+
)
|
156
|
+
|
157
|
+
warnings.warn(
|
158
|
+
f"Legacy fluent API usage detected. "
|
159
|
+
f"Migration guide:\n"
|
160
|
+
f" OLD: add_node('{node_id}', {getattr(node_class_or_type, '__name__', str(node_class_or_type))}, {list(config.keys())})\n"
|
161
|
+
f" NEW: add_node('{getattr(node_class_or_type, '__name__', str(node_class_or_type))}', '{node_id}', {config})\n"
|
162
|
+
f"Legacy support will be removed in v0.8.0",
|
163
|
+
DeprecationWarning,
|
164
|
+
stacklevel=3,
|
165
|
+
)
|
166
|
+
|
167
|
+
if hasattr(node_class_or_type, "__name__"):
|
168
|
+
node_type = node_class_or_type.__name__
|
169
|
+
else:
|
170
|
+
node_type = str(node_class_or_type)
|
171
|
+
|
172
|
+
self._add_node_unified(node_type, node_id, config)
|
173
|
+
return self # Return self for fluent chaining
|
174
|
+
|
175
|
+
def _add_node_alternative(
|
176
|
+
self, node_class: type, node_id: str | None, **config
|
177
|
+
) -> str:
|
178
|
+
"""Handle alternative pattern: add_node(NodeClass, 'node_id', param=value)"""
|
179
|
+
import warnings
|
180
|
+
|
181
|
+
# Validate that node_class is actually a Node subclass
|
182
|
+
if not isinstance(node_class, type) or not issubclass(node_class, Node):
|
183
|
+
raise WorkflowValidationError(
|
184
|
+
f"Invalid node type: {node_class}. Expected a Node subclass."
|
185
|
+
)
|
186
|
+
|
187
|
+
# Generate ID if not provided
|
188
|
+
if node_id is None:
|
189
|
+
node_id = f"node_{uuid.uuid4().hex[:8]}"
|
190
|
+
|
191
|
+
warnings.warn(
|
192
|
+
f"Alternative API usage detected. Consider using preferred pattern:\n"
|
193
|
+
f" CURRENT: add_node({node_class.__name__}, '{node_id}', {list(config.keys())})\n"
|
194
|
+
f" PREFERRED: add_node('{node_class.__name__}', '{node_id}', {config})",
|
195
|
+
UserWarning,
|
196
|
+
stacklevel=3,
|
197
|
+
)
|
198
|
+
|
199
|
+
# Store the class reference along with the type name
|
200
|
+
self.nodes[node_id] = {
|
201
|
+
"type": node_class.__name__,
|
202
|
+
"config": config,
|
203
|
+
"class": node_class,
|
204
|
+
}
|
205
|
+
logger.info(f"Added node '{node_id}' of type '{node_class.__name__}'")
|
206
|
+
return node_id
|
207
|
+
|
208
|
+
def _add_node_instance(self, node_instance: "Node", node_id: str | None) -> str:
|
209
|
+
"""Handle instance pattern: add_node(node_instance, 'node_id')"""
|
210
|
+
import warnings
|
211
|
+
|
212
|
+
# Generate ID if not provided
|
213
|
+
if node_id is None:
|
214
|
+
node_id = f"node_{uuid.uuid4().hex[:8]}"
|
215
|
+
|
216
|
+
warnings.warn(
|
217
|
+
f"Instance-based API usage detected. Consider using preferred pattern:\n"
|
218
|
+
f" CURRENT: add_node(<instance>, '{node_id}')\n"
|
219
|
+
f" PREFERRED: add_node('{node_instance.__class__.__name__}', '{node_id}', {{'param': value}})",
|
220
|
+
UserWarning,
|
221
|
+
stacklevel=3,
|
222
|
+
)
|
223
|
+
|
224
|
+
# Store the instance
|
225
|
+
self.nodes[node_id] = {
|
226
|
+
"instance": node_instance,
|
227
|
+
"type": node_instance.__class__.__name__,
|
228
|
+
}
|
229
|
+
logger.info(
|
230
|
+
f"Added node '{node_id}' with instance of type '{node_instance.__class__.__name__}'"
|
231
|
+
)
|
232
|
+
return node_id
|
233
|
+
|
234
|
+
def _add_node_unified(
|
23
235
|
self,
|
24
|
-
node_type: str
|
236
|
+
node_type: str,
|
25
237
|
node_id: str | None = None,
|
26
238
|
config: dict[str, Any] | None = None,
|
27
239
|
) -> str:
|
28
240
|
"""
|
29
|
-
|
241
|
+
Unified implementation for all add_node patterns.
|
30
242
|
|
31
243
|
Args:
|
32
|
-
node_type: Node type name (string)
|
244
|
+
node_type: Node type name (string)
|
33
245
|
node_id: Unique identifier for this node (auto-generated if not provided)
|
34
|
-
config: Configuration for the node
|
246
|
+
config: Configuration for the node
|
35
247
|
|
36
248
|
Returns:
|
37
|
-
Node ID
|
249
|
+
Node ID
|
38
250
|
|
39
251
|
Raises:
|
40
252
|
WorkflowValidationError: If node_id is already used
|
@@ -48,9 +260,6 @@ class WorkflowBuilder:
|
|
48
260
|
f"Node ID '{node_id}' already exists in workflow"
|
49
261
|
)
|
50
262
|
|
51
|
-
# Import Node here to avoid circular imports
|
52
|
-
from kailash.nodes.base import Node
|
53
|
-
|
54
263
|
# Handle different input types
|
55
264
|
if isinstance(node_type, str):
|
56
265
|
# String node type name
|
@@ -80,6 +289,39 @@ class WorkflowBuilder:
|
|
80
289
|
logger.info(f"Added node '{node_id}' of type '{type_name}'")
|
81
290
|
return node_id
|
82
291
|
|
292
|
+
# Fluent API methods for backward compatibility
|
293
|
+
def add_node_fluent(
|
294
|
+
self, node_id: str, node_class_or_type: Any, **config
|
295
|
+
) -> "WorkflowBuilder":
|
296
|
+
"""
|
297
|
+
DEPRECATED: Fluent API for backward compatibility.
|
298
|
+
Use add_node(node_type, node_id, config) instead.
|
299
|
+
|
300
|
+
Args:
|
301
|
+
node_id: Node identifier
|
302
|
+
node_class_or_type: Node class or type
|
303
|
+
**config: Node configuration as keyword arguments
|
304
|
+
|
305
|
+
Returns:
|
306
|
+
Self for method chaining
|
307
|
+
"""
|
308
|
+
import warnings
|
309
|
+
|
310
|
+
warnings.warn(
|
311
|
+
"Fluent API is deprecated. Use add_node(node_type, node_id, config) instead.",
|
312
|
+
DeprecationWarning,
|
313
|
+
stacklevel=2,
|
314
|
+
)
|
315
|
+
|
316
|
+
if hasattr(node_class_or_type, "__name__"):
|
317
|
+
# Node class
|
318
|
+
self.add_node(node_class_or_type.__name__, node_id, config)
|
319
|
+
else:
|
320
|
+
# Assume string type
|
321
|
+
self.add_node(str(node_class_or_type), node_id, config)
|
322
|
+
|
323
|
+
return self
|
324
|
+
|
83
325
|
def add_node_instance(self, node_instance: Any, node_id: str | None = None) -> str:
|
84
326
|
"""
|
85
327
|
Add a node instance to the workflow.
|
@@ -124,7 +366,7 @@ class WorkflowBuilder:
|
|
124
366
|
|
125
367
|
def add_connection(
|
126
368
|
self, from_node: str, from_output: str, to_node: str, to_input: str
|
127
|
-
) ->
|
369
|
+
) -> "WorkflowBuilder":
|
128
370
|
"""
|
129
371
|
Connect two nodes in the workflow.
|
130
372
|
|
@@ -161,6 +403,7 @@ class WorkflowBuilder:
|
|
161
403
|
self.connections.append(connection)
|
162
404
|
|
163
405
|
logger.info(f"Connected '{from_node}.{from_output}' to '{to_node}.{to_input}'")
|
406
|
+
return self
|
164
407
|
|
165
408
|
def connect(
|
166
409
|
self,
|
@@ -353,12 +596,108 @@ class WorkflowBuilder:
|
|
353
596
|
f"Failed to connect '{from_node}' to '{to_node}': {e}"
|
354
597
|
) from e
|
355
598
|
|
599
|
+
# Parameter injection: Find nodes without incoming connections and inject parameters
|
600
|
+
if self.workflow_parameters:
|
601
|
+
nodes_with_inputs = set()
|
602
|
+
for conn in self.connections:
|
603
|
+
if not conn.get("is_workflow_input"):
|
604
|
+
nodes_with_inputs.add(conn["to_node"])
|
605
|
+
|
606
|
+
nodes_without_inputs = set(self.nodes.keys()) - nodes_with_inputs
|
607
|
+
|
608
|
+
# For each node without inputs, check if it needs workflow parameters
|
609
|
+
for node_id in nodes_without_inputs:
|
610
|
+
node_info = self.nodes[node_id]
|
611
|
+
node_instance = workflow.get_node(node_id)
|
612
|
+
|
613
|
+
if hasattr(node_instance, "get_parameters"):
|
614
|
+
params = node_instance.get_parameters()
|
615
|
+
|
616
|
+
# Check which required parameters are missing from config
|
617
|
+
for param_name, param_def in params.items():
|
618
|
+
if param_def.required and param_name not in node_info["config"]:
|
619
|
+
# Check if this parameter should come from workflow parameters
|
620
|
+
if param_name in self.workflow_parameters:
|
621
|
+
# Add to node config
|
622
|
+
node_info["config"][param_name] = (
|
623
|
+
self.workflow_parameters[param_name]
|
624
|
+
)
|
625
|
+
elif node_id in self.parameter_mappings:
|
626
|
+
# Check parameter mappings
|
627
|
+
mapping = self.parameter_mappings[node_id]
|
628
|
+
if param_name in mapping:
|
629
|
+
workflow_param = mapping[param_name]
|
630
|
+
if workflow_param in self.workflow_parameters:
|
631
|
+
node_info["config"][param_name] = (
|
632
|
+
self.workflow_parameters[workflow_param]
|
633
|
+
)
|
634
|
+
|
635
|
+
# Store workflow parameters in metadata for runtime reference
|
636
|
+
workflow.metadata["workflow_parameters"] = self.workflow_parameters
|
637
|
+
workflow.metadata["parameter_mappings"] = self.parameter_mappings
|
638
|
+
|
356
639
|
logger.info(
|
357
640
|
f"Built workflow '{workflow_id}' with "
|
358
641
|
f"{len(self.nodes)} nodes and {len(self.connections)} connections"
|
359
642
|
)
|
360
643
|
return workflow
|
361
644
|
|
645
|
+
def set_workflow_parameters(self, **parameters) -> "WorkflowBuilder":
|
646
|
+
"""
|
647
|
+
Set default parameters that will be passed to all nodes.
|
648
|
+
|
649
|
+
Args:
|
650
|
+
**parameters: Key-value pairs of workflow-level parameters
|
651
|
+
|
652
|
+
Returns:
|
653
|
+
Self for chaining
|
654
|
+
"""
|
655
|
+
self.workflow_parameters.update(parameters)
|
656
|
+
return self
|
657
|
+
|
658
|
+
def add_parameter_mapping(
|
659
|
+
self, node_id: str, mappings: dict[str, str]
|
660
|
+
) -> "WorkflowBuilder":
|
661
|
+
"""
|
662
|
+
Add parameter mappings for a specific node.
|
663
|
+
|
664
|
+
Args:
|
665
|
+
node_id: Node to configure
|
666
|
+
mappings: Dict mapping workflow param names to node param names
|
667
|
+
|
668
|
+
Returns:
|
669
|
+
Self for chaining
|
670
|
+
"""
|
671
|
+
if node_id not in self.parameter_mappings:
|
672
|
+
self.parameter_mappings[node_id] = {}
|
673
|
+
self.parameter_mappings[node_id].update(mappings)
|
674
|
+
return self
|
675
|
+
|
676
|
+
def add_input_connection(
|
677
|
+
self, to_node: str, to_input: str, from_workflow_param: str
|
678
|
+
) -> "WorkflowBuilder":
|
679
|
+
"""
|
680
|
+
Connect a workflow parameter directly to a node input.
|
681
|
+
|
682
|
+
Args:
|
683
|
+
to_node: Target node ID
|
684
|
+
to_input: Input parameter name on the node
|
685
|
+
from_workflow_param: Workflow parameter name
|
686
|
+
|
687
|
+
Returns:
|
688
|
+
Self for chaining
|
689
|
+
"""
|
690
|
+
# Add a special connection type for workflow inputs
|
691
|
+
connection = {
|
692
|
+
"from_node": "__workflow_input__",
|
693
|
+
"from_output": from_workflow_param,
|
694
|
+
"to_node": to_node,
|
695
|
+
"to_input": to_input,
|
696
|
+
"is_workflow_input": True,
|
697
|
+
}
|
698
|
+
self.connections.append(connection)
|
699
|
+
return self
|
700
|
+
|
362
701
|
def clear(self) -> "WorkflowBuilder":
|
363
702
|
"""
|
364
703
|
Clear builder state.
|
@@ -369,6 +708,8 @@ class WorkflowBuilder:
|
|
369
708
|
self.nodes = {}
|
370
709
|
self.connections = []
|
371
710
|
self._metadata = {}
|
711
|
+
self.workflow_parameters = {}
|
712
|
+
self.parameter_mappings = {}
|
372
713
|
return self
|
373
714
|
|
374
715
|
@classmethod
|
@@ -399,9 +740,21 @@ class WorkflowBuilder:
|
|
399
740
|
# Dict format: {node_id: {type: "...", parameters: {...}}}
|
400
741
|
for node_id, node_config in nodes_config.items():
|
401
742
|
node_type = node_config.get("type")
|
402
|
-
|
403
|
-
|
404
|
-
|
743
|
+
|
744
|
+
# Handle parameter naming inconsistencies - prefer 'parameters' over 'config'
|
745
|
+
if "parameters" in node_config:
|
746
|
+
node_params = node_config["parameters"]
|
747
|
+
elif "config" in node_config:
|
748
|
+
node_params = node_config["config"]
|
749
|
+
else:
|
750
|
+
node_params = {}
|
751
|
+
|
752
|
+
# Ensure node_params is a dictionary
|
753
|
+
if not isinstance(node_params, dict):
|
754
|
+
logger.warning(
|
755
|
+
f"Node '{node_id}' parameters must be a dict, got {type(node_params)}. Using empty dict."
|
756
|
+
)
|
757
|
+
node_params = {}
|
405
758
|
|
406
759
|
if not node_type:
|
407
760
|
raise WorkflowValidationError(
|
@@ -414,7 +767,21 @@ class WorkflowBuilder:
|
|
414
767
|
for node_config in nodes_config:
|
415
768
|
node_id = node_config.get("id")
|
416
769
|
node_type = node_config.get("type")
|
417
|
-
|
770
|
+
|
771
|
+
# Handle parameter naming inconsistencies - prefer 'parameters' over 'config'
|
772
|
+
if "parameters" in node_config:
|
773
|
+
node_params = node_config["parameters"]
|
774
|
+
elif "config" in node_config:
|
775
|
+
node_params = node_config["config"]
|
776
|
+
else:
|
777
|
+
node_params = {}
|
778
|
+
|
779
|
+
# Ensure node_params is a dictionary
|
780
|
+
if not isinstance(node_params, dict):
|
781
|
+
logger.warning(
|
782
|
+
f"Node '{node_id}' parameters must be a dict, got {type(node_params)}. Using empty dict."
|
783
|
+
)
|
784
|
+
node_params = {}
|
418
785
|
|
419
786
|
if not node_id:
|
420
787
|
raise WorkflowValidationError("Node ID is required")
|
@@ -184,7 +184,7 @@ class CyclicWorkflowExecutor:
|
|
184
184
|
if not workflow.has_cycles():
|
185
185
|
# No cycles, use standard DAG execution
|
186
186
|
logger.info("No cycles detected, using standard DAG execution")
|
187
|
-
return self.dag_runner.
|
187
|
+
return self.dag_runner.execute(workflow, parameters), run_id
|
188
188
|
|
189
189
|
# Execute with cycle support
|
190
190
|
try:
|
@@ -370,18 +370,110 @@ class CyclicWorkflowExecutor:
|
|
370
370
|
)
|
371
371
|
results.update(cycle_results)
|
372
372
|
else:
|
373
|
-
# Execute DAG nodes
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
373
|
+
# Execute DAG nodes using extracted method
|
374
|
+
dag_results = self._execute_dag_portion(
|
375
|
+
workflow, stage.nodes, state, task_manager
|
376
|
+
)
|
377
|
+
results.update(dag_results)
|
378
|
+
|
379
|
+
return results
|
380
|
+
|
381
|
+
def _execute_dag_portion(
|
382
|
+
self,
|
383
|
+
workflow: Workflow,
|
384
|
+
dag_nodes: list[str],
|
385
|
+
state: WorkflowState,
|
386
|
+
task_manager: TaskManager | None = None,
|
387
|
+
) -> dict[str, Any]:
|
388
|
+
"""Execute DAG (non-cyclic) portion of the workflow.
|
389
|
+
|
390
|
+
Args:
|
391
|
+
workflow: Workflow instance
|
392
|
+
dag_nodes: List of DAG node IDs to execute
|
393
|
+
state: Workflow state
|
394
|
+
task_manager: Optional task manager for tracking
|
395
|
+
|
396
|
+
Returns:
|
397
|
+
Dictionary with node IDs as keys and their results as values
|
398
|
+
"""
|
399
|
+
results = {}
|
400
|
+
|
401
|
+
for node_id in dag_nodes:
|
402
|
+
if node_id not in state.node_outputs:
|
403
|
+
logger.info(f"Executing DAG node: {node_id}")
|
404
|
+
node_result = self._execute_node(
|
405
|
+
workflow, node_id, state, task_manager=task_manager
|
406
|
+
)
|
407
|
+
results[node_id] = node_result
|
408
|
+
state.node_outputs[node_id] = node_result
|
409
|
+
|
410
|
+
return results
|
411
|
+
|
412
|
+
def _execute_cycle_groups(
|
413
|
+
self,
|
414
|
+
workflow: Workflow,
|
415
|
+
cycle_groups: list["CycleGroup"],
|
416
|
+
state: WorkflowState,
|
417
|
+
task_manager: TaskManager | None = None,
|
418
|
+
) -> dict[str, Any]:
|
419
|
+
"""Execute cycle groups portion of the workflow.
|
420
|
+
|
421
|
+
Args:
|
422
|
+
workflow: Workflow instance
|
423
|
+
cycle_groups: List of cycle groups to execute
|
424
|
+
state: Workflow state
|
425
|
+
task_manager: Optional task manager for tracking
|
426
|
+
|
427
|
+
Returns:
|
428
|
+
Dictionary with node IDs as keys and their results as values
|
429
|
+
"""
|
430
|
+
results = {}
|
431
|
+
|
432
|
+
for cycle_group in cycle_groups:
|
433
|
+
logger.info(f"Executing cycle group: {cycle_group.cycle_id}")
|
434
|
+
cycle_results = self._execute_cycle_group(
|
435
|
+
workflow, cycle_group, state, task_manager
|
436
|
+
)
|
437
|
+
results.update(cycle_results)
|
382
438
|
|
383
439
|
return results
|
384
440
|
|
441
|
+
def _propagate_parameters(
|
442
|
+
self,
|
443
|
+
current_params: dict[str, Any],
|
444
|
+
current_results: dict[str, Any],
|
445
|
+
cycle_config: dict[str, Any] | None = None,
|
446
|
+
) -> dict[str, Any]:
|
447
|
+
"""Handle parameter propagation between cycle iterations.
|
448
|
+
|
449
|
+
Args:
|
450
|
+
current_params: Current iteration parameters
|
451
|
+
current_results: Results from current iteration
|
452
|
+
cycle_config: Cycle configuration (optional)
|
453
|
+
|
454
|
+
Returns:
|
455
|
+
Updated parameters for the next iteration
|
456
|
+
"""
|
457
|
+
# Base propagation: copy current results for next iteration
|
458
|
+
next_params = current_results.copy() if current_results else {}
|
459
|
+
|
460
|
+
# Apply any cycle-specific parameter mappings if provided
|
461
|
+
if cycle_config and "parameter_mappings" in cycle_config:
|
462
|
+
mappings = cycle_config["parameter_mappings"]
|
463
|
+
for src_key, dst_key in mappings.items():
|
464
|
+
if src_key in current_results:
|
465
|
+
next_params[dst_key] = current_results[src_key]
|
466
|
+
|
467
|
+
# Preserve any initial parameters that aren't overridden
|
468
|
+
for key, value in current_params.items():
|
469
|
+
if key not in next_params:
|
470
|
+
next_params[key] = value
|
471
|
+
|
472
|
+
# Filter out None values to avoid validation errors
|
473
|
+
next_params = self._filter_none_values(next_params)
|
474
|
+
|
475
|
+
return next_params
|
476
|
+
|
385
477
|
def _execute_cycle_group(
|
386
478
|
self,
|
387
479
|
workflow: Workflow,
|