quantalogic 0.53.0__py3-none-any.whl → 0.56.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/__init__.py +7 -0
- quantalogic/flow/flow.py +267 -80
- quantalogic/flow/flow_extractor.py +216 -87
- quantalogic/flow/flow_generator.py +157 -88
- quantalogic/flow/flow_manager.py +252 -125
- quantalogic/flow/flow_manager_schema.py +62 -43
- quantalogic/flow/flow_mermaid.py +151 -68
- quantalogic/flow/flow_validator.py +204 -77
- quantalogic/flow/flow_yaml.md +341 -156
- quantalogic/tools/safe_python_interpreter_tool.py +6 -1
- quantalogic/xml_parser.py +5 -1
- quantalogic/xml_tool_parser.py +4 -1
- {quantalogic-0.53.0.dist-info → quantalogic-0.56.0.dist-info}/METADATA +16 -6
- {quantalogic-0.53.0.dist-info → quantalogic-0.56.0.dist-info}/RECORD +17 -17
- {quantalogic-0.53.0.dist-info → quantalogic-0.56.0.dist-info}/LICENSE +0 -0
- {quantalogic-0.53.0.dist-info → quantalogic-0.56.0.dist-info}/WHEEL +0 -0
- {quantalogic-0.53.0.dist-info → quantalogic-0.56.0.dist-info}/entry_points.txt +0 -0
@@ -1,12 +1,13 @@
|
|
1
1
|
import ast
|
2
2
|
import re
|
3
3
|
from collections import defaultdict
|
4
|
-
from typing import Dict, List, Optional, Set
|
4
|
+
from typing import Dict, List, Optional, Set, Union
|
5
5
|
|
6
6
|
from pydantic import BaseModel
|
7
7
|
|
8
8
|
from quantalogic.flow.flow_manager import WorkflowManager
|
9
9
|
from quantalogic.flow.flow_manager_schema import (
|
10
|
+
BranchCondition,
|
10
11
|
NodeDefinition,
|
11
12
|
TransitionDefinition,
|
12
13
|
WorkflowDefinition,
|
@@ -37,12 +38,14 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
37
38
|
issues: List[NodeError] = []
|
38
39
|
output_names: Set[str] = set()
|
39
40
|
|
41
|
+
# Validate function definitions
|
40
42
|
for name, func_def in workflow_def.functions.items():
|
41
43
|
if func_def.type == "embedded" and not func_def.code:
|
42
44
|
issues.append(NodeError(node_name=None, description=f"Embedded function '{name}' is missing 'code'"))
|
43
45
|
elif func_def.type == "external" and (not func_def.module or not func_def.function):
|
44
46
|
issues.append(NodeError(node_name=None, description=f"External function '{name}' is missing 'module' or 'function'"))
|
45
47
|
|
48
|
+
# Validate nodes
|
46
49
|
for name, node_def in workflow_def.nodes.items():
|
47
50
|
if node_def.function and node_def.function not in workflow_def.functions:
|
48
51
|
issues.append(NodeError(node_name=name, description=f"References undefined function '{node_def.function}'"))
|
@@ -65,11 +68,17 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
65
68
|
llm = node_def.llm_config
|
66
69
|
if not llm.model:
|
67
70
|
issues.append(NodeError(node_name=name, description="Missing 'model' in llm_config"))
|
68
|
-
if not llm.prompt_template:
|
69
|
-
issues.append(NodeError(node_name=name, description="Missing 'prompt_template' in llm_config"))
|
71
|
+
if not llm.prompt_template and not llm.prompt_file:
|
72
|
+
issues.append(NodeError(node_name=name, description="Missing 'prompt_template' or 'prompt_file' in llm_config"))
|
70
73
|
if llm.temperature < 0 or llm.temperature > 1:
|
71
74
|
issues.append(NodeError(node_name=name, description=f"Has invalid temperature: {llm.temperature}"))
|
72
75
|
|
76
|
+
if node_def.template_config:
|
77
|
+
template = node_def.template_config
|
78
|
+
if not template.template and not template.template_file:
|
79
|
+
issues.append(NodeError(node_name=name, description="Missing 'template' or 'template_file' in template_config"))
|
80
|
+
|
81
|
+
# Validate main workflow structure
|
73
82
|
issues.extend(validate_workflow_structure(workflow_def.workflow, workflow_def.nodes, is_main=True))
|
74
83
|
issues.extend(check_circular_transitions(workflow_def))
|
75
84
|
|
@@ -81,17 +90,18 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
81
90
|
# Add main workflow transitions
|
82
91
|
for trans in workflow_def.workflow.transitions:
|
83
92
|
from_node = trans.from_node
|
84
|
-
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
93
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else [tn if isinstance(tn, str) else tn.to_node for tn in trans.to_node]
|
85
94
|
for to_node in to_nodes:
|
86
95
|
successors[from_node].append(to_node)
|
87
96
|
predecessors[to_node].append(from_node)
|
97
|
+
all_nodes.add(to_node)
|
88
98
|
|
89
99
|
# Add sub-workflow transitions with namespaced node names
|
90
100
|
for parent_name, node_def in workflow_def.nodes.items():
|
91
101
|
if node_def.sub_workflow:
|
92
102
|
for trans in node_def.sub_workflow.transitions:
|
93
103
|
from_node = f"{parent_name}/{trans.from_node}"
|
94
|
-
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
104
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else [tn if isinstance(tn, str) else tn.to_node for tn in trans.to_node]
|
95
105
|
namespaced_to_nodes = [f"{parent_name}/{to_node}" for to_node in to_nodes]
|
96
106
|
all_nodes.add(from_node)
|
97
107
|
all_nodes.update(namespaced_to_nodes)
|
@@ -100,7 +110,9 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
100
110
|
predecessors[to_node].append(from_node)
|
101
111
|
|
102
112
|
# Define function to get ancestors, handling cycles with a visited set
|
103
|
-
def get_ancestors(node: str, visited: Set[str] =
|
113
|
+
def get_ancestors(node: str, visited: Set[str] = None) -> Set[str]:
|
114
|
+
if visited is None:
|
115
|
+
visited = set()
|
104
116
|
if node in visited or node not in all_nodes:
|
105
117
|
return set()
|
106
118
|
visited.add(node)
|
@@ -115,9 +127,8 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
115
127
|
if node_def.output:
|
116
128
|
output_to_node[node_def.output] = node_name
|
117
129
|
if node_def.sub_workflow:
|
118
|
-
for sub_node_name in
|
119
|
-
sub_node_def
|
120
|
-
if sub_node_def and sub_node_def.output:
|
130
|
+
for sub_node_name, sub_node_def in workflow_def.nodes.items():
|
131
|
+
if sub_node_def.output:
|
121
132
|
output_to_node[sub_node_def.output] = f"{node_name}/{sub_node_name}"
|
122
133
|
|
123
134
|
# Check each node's inputs against ancestors' outputs, including sub-workflows
|
@@ -125,6 +136,24 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
125
136
|
required_inputs = set()
|
126
137
|
full_node_name = node_name
|
127
138
|
|
139
|
+
# Handle inputs_mapping
|
140
|
+
if node_def.inputs_mapping:
|
141
|
+
for input_name, mapping in node_def.inputs_mapping.items():
|
142
|
+
if mapping.startswith("lambda ctx:"):
|
143
|
+
try:
|
144
|
+
# Basic syntax check for lambda
|
145
|
+
compile(mapping, "<string>", "eval")
|
146
|
+
except SyntaxError:
|
147
|
+
issues.append(NodeError(
|
148
|
+
node_name=node_name,
|
149
|
+
description=f"Invalid lambda expression in inputs_mapping for '{input_name}': {mapping}"
|
150
|
+
))
|
151
|
+
elif not mapping.isidentifier():
|
152
|
+
issues.append(NodeError(
|
153
|
+
node_name=node_name,
|
154
|
+
description=f"Invalid context key in inputs_mapping for '{input_name}': {mapping}"
|
155
|
+
))
|
156
|
+
|
128
157
|
if node_def.function:
|
129
158
|
maybe_func_def = workflow_def.functions.get(node_def.function)
|
130
159
|
if maybe_func_def is None:
|
@@ -133,7 +162,7 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
133
162
|
description=f"Function '{node_def.function}' not found in workflow functions"
|
134
163
|
))
|
135
164
|
else:
|
136
|
-
func_def = maybe_func_def
|
165
|
+
func_def = maybe_func_def
|
137
166
|
if func_def.type == "embedded" and func_def.code:
|
138
167
|
try:
|
139
168
|
params = get_function_params(func_def.code, node_def.function)
|
@@ -141,7 +170,7 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
141
170
|
except ValueError as e:
|
142
171
|
issues.append(NodeError(node_name=node_name, description=f"Failed to parse function '{node_def.function}': {e}"))
|
143
172
|
elif node_def.llm_config:
|
144
|
-
prompt_template = node_def.llm_config.prompt_template
|
173
|
+
prompt_template = node_def.llm_config.prompt_template or ""
|
145
174
|
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt_template))
|
146
175
|
cleaned_inputs = set()
|
147
176
|
for var in input_vars:
|
@@ -149,48 +178,75 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
149
178
|
if base_var.isidentifier():
|
150
179
|
cleaned_inputs.add(base_var)
|
151
180
|
required_inputs = cleaned_inputs
|
181
|
+
elif node_def.template_config:
|
182
|
+
template = node_def.template_config.template or ""
|
183
|
+
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", template))
|
184
|
+
cleaned_inputs = set()
|
185
|
+
for var in input_vars:
|
186
|
+
base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
187
|
+
if base_var.isidentifier():
|
188
|
+
cleaned_inputs.add(base_var)
|
189
|
+
required_inputs = cleaned_inputs
|
152
190
|
elif node_def.sub_workflow:
|
153
|
-
for sub_node_name in
|
154
|
-
|
155
|
-
if sub_node_def:
|
156
|
-
|
157
|
-
if
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
required_inputs = set(params)
|
170
|
-
except ValueError as e:
|
171
|
-
issues.append(NodeError(
|
172
|
-
node_name=full_node_name,
|
173
|
-
description=f"Failed to parse function '{sub_node_def.function}': {e}"
|
174
|
-
))
|
175
|
-
elif sub_node_def.llm_config:
|
176
|
-
prompt_template = sub_node_def.llm_config.prompt_template
|
177
|
-
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt_template))
|
178
|
-
cleaned_inputs = set()
|
179
|
-
for var in input_vars:
|
180
|
-
base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
181
|
-
if base_var.isidentifier():
|
182
|
-
cleaned_inputs.add(base_var)
|
183
|
-
required_inputs = cleaned_inputs
|
184
|
-
|
185
|
-
if required_inputs:
|
186
|
-
ancestors = get_ancestors(full_node_name)
|
187
|
-
for input_name in required_inputs:
|
188
|
-
producer_node = output_to_node.get(input_name)
|
189
|
-
if producer_node is None or producer_node not in ancestors:
|
191
|
+
for sub_node_name, sub_node_def in workflow_def.nodes.items():
|
192
|
+
full_node_name = f"{node_name}/{sub_node_name}"
|
193
|
+
if sub_node_def.function:
|
194
|
+
maybe_func_def = workflow_def.functions.get(sub_node_def.function)
|
195
|
+
if maybe_func_def is None:
|
196
|
+
issues.append(NodeError(
|
197
|
+
node_name=full_node_name,
|
198
|
+
description=f"Function '{sub_node_def.function}' not found in workflow functions"
|
199
|
+
))
|
200
|
+
else:
|
201
|
+
func_def = maybe_func_def
|
202
|
+
if func_def.type == "embedded" and func_def.code:
|
203
|
+
try:
|
204
|
+
params = get_function_params(func_def.code, sub_node_def.function)
|
205
|
+
required_inputs = set(params)
|
206
|
+
except ValueError as e:
|
190
207
|
issues.append(NodeError(
|
191
208
|
node_name=full_node_name,
|
192
|
-
description=f"
|
209
|
+
description=f"Failed to parse function '{sub_node_def.function}': {e}"
|
193
210
|
))
|
211
|
+
elif sub_node_def.llm_config:
|
212
|
+
prompt_template = sub_node_def.llm_config.prompt_template or ""
|
213
|
+
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt_template))
|
214
|
+
cleaned_inputs = set()
|
215
|
+
for var in input_vars:
|
216
|
+
base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
217
|
+
if base_var.isidentifier():
|
218
|
+
cleaned_inputs.add(base_var)
|
219
|
+
required_inputs = cleaned_inputs
|
220
|
+
elif sub_node_def.template_config:
|
221
|
+
template = sub_node_def.template_config.template or ""
|
222
|
+
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", template))
|
223
|
+
cleaned_inputs = set()
|
224
|
+
for var in input_vars:
|
225
|
+
base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
226
|
+
if base_var.isidentifier():
|
227
|
+
cleaned_inputs.add(base_var)
|
228
|
+
required_inputs = cleaned_inputs
|
229
|
+
|
230
|
+
if required_inputs:
|
231
|
+
ancestors = get_ancestors(full_node_name)
|
232
|
+
for input_name in required_inputs:
|
233
|
+
# Check if input is mapped
|
234
|
+
if node_def.inputs_mapping and input_name in node_def.inputs_mapping:
|
235
|
+
mapping = node_def.inputs_mapping[input_name]
|
236
|
+
if not mapping.startswith("lambda ctx:") and mapping in output_to_node:
|
237
|
+
producer_node = output_to_node.get(mapping)
|
238
|
+
if producer_node not in ancestors:
|
239
|
+
issues.append(NodeError(
|
240
|
+
node_name=full_node_name,
|
241
|
+
description=f"inputs_mapping for '{input_name}' maps to '{mapping}', but it is not produced by an ancestor"
|
242
|
+
))
|
243
|
+
continue
|
244
|
+
producer_node = output_to_node.get(input_name)
|
245
|
+
if producer_node is None or producer_node not in ancestors:
|
246
|
+
issues.append(NodeError(
|
247
|
+
node_name=full_node_name,
|
248
|
+
description=f"Requires input '{input_name}', but it is not produced by any ancestor"
|
249
|
+
))
|
194
250
|
continue
|
195
251
|
|
196
252
|
if not required_inputs:
|
@@ -198,6 +254,17 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
198
254
|
|
199
255
|
ancestors = get_ancestors(full_node_name)
|
200
256
|
for input_name in required_inputs:
|
257
|
+
# Check if input is mapped
|
258
|
+
if node_def.inputs_mapping and input_name in node_def.inputs_mapping:
|
259
|
+
mapping = node_def.inputs_mapping[input_name]
|
260
|
+
if not mapping.startswith("lambda ctx:") and mapping in output_to_node:
|
261
|
+
producer_node = output_to_node.get(mapping)
|
262
|
+
if producer_node not in ancestors:
|
263
|
+
issues.append(NodeError(
|
264
|
+
node_name=full_node_name,
|
265
|
+
description=f"inputs_mapping for '{input_name}' maps to '{mapping}', but it is not produced by an ancestor"
|
266
|
+
))
|
267
|
+
continue
|
201
268
|
producer_node = output_to_node.get(input_name)
|
202
269
|
if producer_node is None or producer_node not in ancestors:
|
203
270
|
issues.append(NodeError(
|
@@ -205,16 +272,30 @@ def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeE
|
|
205
272
|
description=f"Requires input '{input_name}', but it is not produced by any ancestor"
|
206
273
|
))
|
207
274
|
|
275
|
+
# Validate observers
|
208
276
|
for observer in workflow_def.observers:
|
209
277
|
if observer not in workflow_def.functions:
|
210
278
|
issues.append(NodeError(node_name=None, description=f"Observer '{observer}' references undefined function"))
|
211
279
|
|
280
|
+
# Validate convergence nodes
|
281
|
+
for conv_node in workflow_def.workflow.convergence_nodes:
|
282
|
+
if conv_node not in workflow_def.nodes:
|
283
|
+
issues.append(NodeError(node_name=conv_node, description="Convergence node is not defined in nodes"))
|
284
|
+
# Check if the convergence node has multiple incoming transitions
|
285
|
+
incoming = [t for t in workflow_def.workflow.transitions if
|
286
|
+
(isinstance(t.to_node, str) and t.to_node == conv_node) or
|
287
|
+
(isinstance(t.to_node, list) and any(isinstance(tn, str) and tn == conv_node or
|
288
|
+
isinstance(tn, BranchCondition) and tn.to_node == conv_node
|
289
|
+
for tn in t.to_node))]
|
290
|
+
if len(incoming) < 2:
|
291
|
+
issues.append(NodeError(node_name=conv_node, description="Convergence node has fewer than 2 incoming transitions"))
|
292
|
+
|
212
293
|
return issues
|
213
294
|
|
214
295
|
|
215
|
-
def validate_workflow_structure(structure: WorkflowStructure, nodes: Dict[str, NodeDefinition],
|
296
|
+
def validate_workflow_structure(structure: WorkflowStructure, nodes: Dict[str, NodeDefinition],
|
216
297
|
is_main: bool = False) -> List[NodeError]:
|
217
|
-
"""Validate a WorkflowStructure for consistency."""
|
298
|
+
"""Validate a WorkflowStructure for consistency, including branch and converge support."""
|
218
299
|
issues: List[NodeError] = []
|
219
300
|
|
220
301
|
if is_main and not structure.start:
|
@@ -225,11 +306,19 @@ def validate_workflow_structure(structure: WorkflowStructure, nodes: Dict[str, N
|
|
225
306
|
for trans in structure.transitions:
|
226
307
|
if trans.from_node not in nodes:
|
227
308
|
issues.append(NodeError(node_name=trans.from_node, description="Transition from undefined node"))
|
228
|
-
|
309
|
+
|
310
|
+
to_nodes: List[Union[str, BranchCondition]] = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
229
311
|
for to_node in to_nodes:
|
230
|
-
if to_node
|
231
|
-
|
232
|
-
|
312
|
+
target_node = to_node if isinstance(to_node, str) else to_node.to_node
|
313
|
+
if target_node not in nodes:
|
314
|
+
issues.append(NodeError(node_name=target_node, description=f"Transition to undefined node from '{trans.from_node}'"))
|
315
|
+
if isinstance(to_node, BranchCondition) and to_node.condition:
|
316
|
+
try:
|
317
|
+
compile(to_node.condition, "<string>", "eval")
|
318
|
+
except SyntaxError:
|
319
|
+
issues.append(NodeError(node_name=trans.from_node, description=f"Invalid branch condition syntax: {to_node.condition}"))
|
320
|
+
|
321
|
+
if trans.condition and isinstance(trans.to_node, str):
|
233
322
|
try:
|
234
323
|
compile(trans.condition, "<string>", "eval")
|
235
324
|
except SyntaxError:
|
@@ -247,12 +336,17 @@ def check_circular_transitions(workflow_def: WorkflowDefinition) -> List[NodeErr
|
|
247
336
|
cycle_nodes = list(path)[list(path).index(node):] + [node]
|
248
337
|
cycle = " -> ".join(cycle_nodes)
|
249
338
|
cycle_transitions = [
|
250
|
-
t for t in path_transitions
|
251
|
-
if t.from_node in cycle_nodes and
|
252
|
-
(isinstance(t.to_node, str) and t.to_node in cycle_nodes) or
|
253
|
-
|
339
|
+
t for t in path_transitions
|
340
|
+
if t.from_node in cycle_nodes and
|
341
|
+
((isinstance(t.to_node, str) and t.to_node in cycle_nodes) or
|
342
|
+
(isinstance(t.to_node, list) and any((isinstance(tn, str) and tn in cycle_nodes) or
|
343
|
+
(isinstance(tn, BranchCondition) and tn.to_node in cycle_nodes)
|
344
|
+
for tn in t.to_node)))
|
254
345
|
]
|
255
|
-
if all
|
346
|
+
# Check if all transitions in the cycle are unconditional
|
347
|
+
if all((t.condition is None if isinstance(t.to_node, str) else
|
348
|
+
all(isinstance(tn, str) or (isinstance(tn, BranchCondition) and tn.condition is None) for tn in t.to_node))
|
349
|
+
for t in cycle_transitions):
|
256
350
|
issues.append(NodeError(node_name=None, description=f"Unconditional circular transition detected: {cycle}"))
|
257
351
|
return
|
258
352
|
if node in visited or node not in workflow_def.nodes:
|
@@ -265,7 +359,8 @@ def check_circular_transitions(workflow_def: WorkflowDefinition) -> List[NodeErr
|
|
265
359
|
if trans.from_node == node:
|
266
360
|
path_transitions.append(trans)
|
267
361
|
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
268
|
-
for
|
362
|
+
for to_node in to_nodes:
|
363
|
+
next_node = to_node if isinstance(to_node, str) else to_node.to_node
|
269
364
|
dfs(next_node, visited, path, transitions, path_transitions)
|
270
365
|
path_transitions.pop()
|
271
366
|
|
@@ -282,7 +377,7 @@ def check_circular_transitions(workflow_def: WorkflowDefinition) -> List[NodeErr
|
|
282
377
|
|
283
378
|
|
284
379
|
def main():
|
285
|
-
"""Build a sample workflow using WorkflowManager and validate it."""
|
380
|
+
"""Build a sample workflow with branch, converge, template node, and input mapping using WorkflowManager and validate it."""
|
286
381
|
manager = WorkflowManager()
|
287
382
|
|
288
383
|
# Define functions
|
@@ -293,40 +388,72 @@ def main():
|
|
293
388
|
)
|
294
389
|
manager.add_function(
|
295
390
|
name="say_goodbye",
|
296
|
-
type_="
|
297
|
-
|
298
|
-
|
391
|
+
type_="embedded",
|
392
|
+
code="def say_goodbye():\n return 'Goodbye, World!'"
|
393
|
+
)
|
394
|
+
manager.add_function(
|
395
|
+
name="check_condition",
|
396
|
+
type_="embedded",
|
397
|
+
code="def check_condition(text: str):\n return 'yes' if 'Hello' in text else 'no'"
|
398
|
+
)
|
399
|
+
manager.add_function(
|
400
|
+
name="finalize",
|
401
|
+
type_="embedded",
|
402
|
+
code="def finalize(text: str):\n return 'Done'"
|
299
403
|
)
|
300
404
|
|
301
405
|
# Add nodes for main workflow
|
302
|
-
manager.add_node(name="start", function="say_hello", output="
|
406
|
+
manager.add_node(name="start", function="say_hello", output="text")
|
407
|
+
manager.add_node(name="check", function="check_condition", output="result",
|
408
|
+
inputs_mapping={"text": "text"}) # Mapping input to context key
|
409
|
+
manager.add_node(name="goodbye", function="say_goodbye", output="farewell")
|
410
|
+
manager.add_node(name="finalize", function="finalize", output="status",
|
411
|
+
inputs_mapping={"text": "lambda ctx: ctx['farewell'] if ctx['result'] == 'no' else ctx['ai_result']"})
|
303
412
|
manager.add_node(name="outro", function="non_existent") # Intentional: undefined function
|
304
|
-
|
413
|
+
|
305
414
|
# Add LLM node with valid temperature
|
306
415
|
manager.add_node(
|
307
416
|
name="ai_node",
|
308
417
|
llm_config={
|
309
|
-
"model": "gpt-3.5-turbo",
|
310
|
-
"prompt_template": "{{
|
418
|
+
"model": "gpt-3.5-turbo",
|
419
|
+
"prompt_template": "{{text}}",
|
311
420
|
"temperature": 0.7
|
312
|
-
}
|
421
|
+
},
|
422
|
+
output="ai_result"
|
423
|
+
)
|
424
|
+
|
425
|
+
# Add template node
|
426
|
+
manager.add_node(
|
427
|
+
name="template_node",
|
428
|
+
template_config={
|
429
|
+
"template": "Response: {{text}} - {{result}}"
|
430
|
+
},
|
431
|
+
output="template_output"
|
313
432
|
)
|
314
433
|
|
315
434
|
# Add nodes and sub-workflow
|
316
|
-
manager.add_node(name="nested_start", function="say_hello", output="
|
435
|
+
manager.add_node(name="nested_start", function="say_hello", output="nested_text")
|
317
436
|
manager.add_node(name="nested_end", function="say_goodbye")
|
318
437
|
sub_workflow = WorkflowStructure(start="nested_start")
|
319
|
-
sub_workflow.transitions.
|
320
|
-
TransitionDefinition(from_node="nested_start", to_node="nested_end"),
|
321
|
-
TransitionDefinition(from_node="nested_end", to_node="nested_start") # Intentional: circular
|
322
|
-
])
|
438
|
+
sub_workflow.transitions.append(TransitionDefinition(from_node="nested_start", to_node="nested_end"))
|
323
439
|
manager.add_node(name="nested", sub_workflow=sub_workflow)
|
324
440
|
|
325
|
-
# Configure main workflow
|
441
|
+
# Configure main workflow with branch and converge
|
326
442
|
manager.set_start_node("start")
|
443
|
+
manager.add_transition(from_node="start", to_node="check")
|
444
|
+
manager.add_transition(
|
445
|
+
from_node="check",
|
446
|
+
to_node=[
|
447
|
+
BranchCondition(to_node="ai_node", condition="ctx['result'] == 'yes'"),
|
448
|
+
BranchCondition(to_node="goodbye", condition="ctx['result'] == 'no'")
|
449
|
+
]
|
450
|
+
)
|
451
|
+
manager.add_transition(from_node="ai_node", to_node="finalize")
|
452
|
+
manager.add_transition(from_node="goodbye", to_node="finalize")
|
453
|
+
manager.add_transition(from_node="finalize", to_node="template_node")
|
327
454
|
manager.add_transition(from_node="start", to_node="outro")
|
328
455
|
manager.add_transition(from_node="outro", to_node="start") # Intentional: circular
|
329
|
-
manager.
|
456
|
+
manager.add_convergence_node("finalize")
|
330
457
|
|
331
458
|
# Add observer with error handling
|
332
459
|
try:
|