quantalogic 0.51.0__py3-none-any.whl → 0.52.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/agent.py +1 -1
- quantalogic/flow/__init__.py +17 -0
- quantalogic/flow/flow_extractor.py +32 -103
- quantalogic/flow/flow_generator.py +6 -2
- quantalogic/flow/flow_manager.py +33 -24
- quantalogic/flow/flow_manager_schema.py +2 -3
- quantalogic/flow/flow_mermaid.py +240 -0
- quantalogic/flow/flow_validator.py +335 -0
- quantalogic/flow/flow_yaml.md +313 -329
- quantalogic/tools/__init__.py +3 -2
- quantalogic/tools/tool.py +129 -3
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.1.dist-info}/METADATA +89 -2
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.1.dist-info}/RECORD +16 -14
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.1.dist-info}/LICENSE +0 -0
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.1.dist-info}/WHEEL +0 -0
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,335 @@
|
|
1
|
+
import ast
|
2
|
+
import re
|
3
|
+
from collections import defaultdict
|
4
|
+
from typing import Dict, List, Set, Optional
|
5
|
+
|
6
|
+
from pydantic import ValidationError, BaseModel
|
7
|
+
|
8
|
+
from quantalogic.flow.flow_manager import WorkflowManager
|
9
|
+
from quantalogic.flow.flow_manager_schema import (
|
10
|
+
FunctionDefinition,
|
11
|
+
LLMConfig,
|
12
|
+
NodeDefinition,
|
13
|
+
TransitionDefinition,
|
14
|
+
WorkflowDefinition,
|
15
|
+
WorkflowStructure,
|
16
|
+
)
|
17
|
+
|
18
|
+
|
19
|
+
class NodeError(BaseModel):
|
20
|
+
"""Represents an error associated with a specific node or workflow component."""
|
21
|
+
node_name: Optional[str] = None # None if the error isn’t tied to a specific node
|
22
|
+
description: str
|
23
|
+
|
24
|
+
|
25
|
+
def get_function_params(code: str, func_name: str) -> List[str]:
|
26
|
+
"""Extract parameter names from an embedded function's code."""
|
27
|
+
try:
|
28
|
+
tree = ast.parse(code)
|
29
|
+
for node in ast.walk(tree):
|
30
|
+
if isinstance(node, ast.FunctionDef) and node.name == func_name:
|
31
|
+
return [arg.arg for arg in node.args.args]
|
32
|
+
raise ValueError(f"Function '{func_name}' not found in code")
|
33
|
+
except SyntaxError as e:
|
34
|
+
raise ValueError(f"Invalid syntax in code: {e}")
|
35
|
+
|
36
|
+
|
37
|
+
def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeError]:
|
38
|
+
"""Validate a workflow definition and return a list of NodeError objects."""
|
39
|
+
issues: List[NodeError] = []
|
40
|
+
output_names: Set[str] = set()
|
41
|
+
|
42
|
+
for name, func_def in workflow_def.functions.items():
|
43
|
+
if func_def.type == "embedded" and not func_def.code:
|
44
|
+
issues.append(NodeError(node_name=None, description=f"Embedded function '{name}' is missing 'code'"))
|
45
|
+
elif func_def.type == "external" and (not func_def.module or not func_def.function):
|
46
|
+
issues.append(NodeError(node_name=None, description=f"External function '{name}' is missing 'module' or 'function'"))
|
47
|
+
|
48
|
+
for name, node_def in workflow_def.nodes.items():
|
49
|
+
if node_def.function and node_def.function not in workflow_def.functions:
|
50
|
+
issues.append(NodeError(node_name=name, description=f"References undefined function '{node_def.function}'"))
|
51
|
+
|
52
|
+
if node_def.output:
|
53
|
+
if not node_def.output.isidentifier():
|
54
|
+
issues.append(NodeError(node_name=name, description=f"Has invalid output name '{node_def.output}'"))
|
55
|
+
elif node_def.output in output_names:
|
56
|
+
issues.append(NodeError(node_name=name, description=f"Has duplicate output name '{node_def.output}'"))
|
57
|
+
output_names.add(node_def.output)
|
58
|
+
|
59
|
+
if node_def.sub_workflow:
|
60
|
+
sub_issues = validate_workflow_structure(node_def.sub_workflow, workflow_def.nodes)
|
61
|
+
issues.extend(
|
62
|
+
NodeError(node_name=f"{name}/{issue.node_name}" if issue.node_name else name, description=issue.description)
|
63
|
+
for issue in sub_issues
|
64
|
+
)
|
65
|
+
|
66
|
+
if node_def.llm_config:
|
67
|
+
llm = node_def.llm_config
|
68
|
+
if not llm.model:
|
69
|
+
issues.append(NodeError(node_name=name, description="Missing 'model' in llm_config"))
|
70
|
+
if not llm.prompt_template:
|
71
|
+
issues.append(NodeError(node_name=name, description="Missing 'prompt_template' in llm_config"))
|
72
|
+
if llm.temperature < 0 or llm.temperature > 1:
|
73
|
+
issues.append(NodeError(node_name=name, description=f"Has invalid temperature: {llm.temperature}"))
|
74
|
+
|
75
|
+
issues.extend(validate_workflow_structure(workflow_def.workflow, workflow_def.nodes, is_main=True))
|
76
|
+
issues.extend(check_circular_transitions(workflow_def))
|
77
|
+
|
78
|
+
# Build the unified graph for main workflow and sub-workflows
|
79
|
+
successors = defaultdict(list)
|
80
|
+
predecessors = defaultdict(list)
|
81
|
+
all_nodes = set(workflow_def.nodes.keys())
|
82
|
+
|
83
|
+
# Add main workflow transitions
|
84
|
+
for trans in workflow_def.workflow.transitions:
|
85
|
+
from_node = trans.from_node
|
86
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
87
|
+
for to_node in to_nodes:
|
88
|
+
successors[from_node].append(to_node)
|
89
|
+
predecessors[to_node].append(from_node)
|
90
|
+
|
91
|
+
# Add sub-workflow transitions with namespaced node names
|
92
|
+
for parent_name, node_def in workflow_def.nodes.items():
|
93
|
+
if node_def.sub_workflow:
|
94
|
+
for trans in node_def.sub_workflow.transitions:
|
95
|
+
from_node = f"{parent_name}/{trans.from_node}"
|
96
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
97
|
+
namespaced_to_nodes = [f"{parent_name}/{to_node}" for to_node in to_nodes]
|
98
|
+
all_nodes.add(from_node)
|
99
|
+
all_nodes.update(namespaced_to_nodes)
|
100
|
+
successors[from_node].extend(namespaced_to_nodes)
|
101
|
+
for to_node in namespaced_to_nodes:
|
102
|
+
predecessors[to_node].append(from_node)
|
103
|
+
|
104
|
+
# Define function to get ancestors, handling cycles with a visited set
|
105
|
+
def get_ancestors(node: str, visited: Set[str] = None) -> Set[str]:
|
106
|
+
if visited is None:
|
107
|
+
visited = set()
|
108
|
+
if node in visited or node not in all_nodes:
|
109
|
+
return set()
|
110
|
+
visited.add(node)
|
111
|
+
ancestors = set(predecessors[node])
|
112
|
+
for pred in predecessors[node]:
|
113
|
+
ancestors.update(get_ancestors(pred, visited.copy()))
|
114
|
+
return ancestors
|
115
|
+
|
116
|
+
# Create output-to-node mapping, including sub-workflow nodes
|
117
|
+
output_to_node = {}
|
118
|
+
for node_name, node_def in workflow_def.nodes.items():
|
119
|
+
if node_def.output:
|
120
|
+
output_to_node[node_def.output] = node_name
|
121
|
+
if node_def.sub_workflow:
|
122
|
+
for sub_node_name in node_def.sub_workflow.__dict__.get("nodes", {}):
|
123
|
+
sub_node_def = workflow_def.nodes.get(sub_node_name)
|
124
|
+
if sub_node_def and sub_node_def.output:
|
125
|
+
output_to_node[sub_node_def.output] = f"{node_name}/{sub_node_name}"
|
126
|
+
|
127
|
+
# Check each node's inputs against ancestors' outputs, including sub-workflows
|
128
|
+
for node_name, node_def in workflow_def.nodes.items():
|
129
|
+
required_inputs = set()
|
130
|
+
full_node_name = node_name
|
131
|
+
|
132
|
+
if node_def.function:
|
133
|
+
func_def = workflow_def.functions.get(node_def.function)
|
134
|
+
if func_def and func_def.type == "embedded" and func_def.code:
|
135
|
+
try:
|
136
|
+
params = get_function_params(func_def.code, node_def.function)
|
137
|
+
required_inputs = set(params)
|
138
|
+
except ValueError as e:
|
139
|
+
issues.append(NodeError(node_name=node_name, description=f"Failed to parse function '{node_def.function}': {e}"))
|
140
|
+
else:
|
141
|
+
pass
|
142
|
+
elif node_def.llm_config:
|
143
|
+
prompt_template = node_def.llm_config.prompt_template
|
144
|
+
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt_template))
|
145
|
+
cleaned_inputs = set()
|
146
|
+
for var in input_vars:
|
147
|
+
base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
148
|
+
if base_var.isidentifier():
|
149
|
+
cleaned_inputs.add(base_var)
|
150
|
+
required_inputs = cleaned_inputs
|
151
|
+
elif node_def.sub_workflow:
|
152
|
+
for sub_node_name in node_def.sub_workflow.__dict__.get("nodes", {}):
|
153
|
+
sub_node_def = workflow_def.nodes.get(sub_node_name)
|
154
|
+
if sub_node_def:
|
155
|
+
full_node_name = f"{node_name}/{sub_node_name}"
|
156
|
+
if sub_node_def.function:
|
157
|
+
func_def = workflow_def.functions.get(sub_node_def.function)
|
158
|
+
if func_def and func_def.type == "embedded" and func_def.code:
|
159
|
+
try:
|
160
|
+
params = get_function_params(func_def.code, sub_node_def.function)
|
161
|
+
required_inputs = set(params)
|
162
|
+
except ValueError as e:
|
163
|
+
issues.append(NodeError(node_name=full_node_name, description=f"Failed to parse function '{sub_node_def.function}': {e}"))
|
164
|
+
elif sub_node_def.llm_config:
|
165
|
+
prompt_template = sub_node_def.llm_config.prompt_template
|
166
|
+
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt_template))
|
167
|
+
cleaned_inputs = set()
|
168
|
+
for var in input_vars:
|
169
|
+
base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
170
|
+
if base_var.isidentifier():
|
171
|
+
cleaned_inputs.add(base_var)
|
172
|
+
required_inputs = cleaned_inputs
|
173
|
+
|
174
|
+
if required_inputs:
|
175
|
+
ancestors = get_ancestors(full_node_name)
|
176
|
+
for input_name in required_inputs:
|
177
|
+
producer_node = output_to_node.get(input_name)
|
178
|
+
if producer_node is None or producer_node not in ancestors:
|
179
|
+
issues.append(NodeError(node_name=full_node_name, description=f"Requires input '{input_name}', but it is not produced by any ancestor"))
|
180
|
+
continue
|
181
|
+
|
182
|
+
if not required_inputs:
|
183
|
+
continue
|
184
|
+
|
185
|
+
ancestors = get_ancestors(full_node_name)
|
186
|
+
for input_name in required_inputs:
|
187
|
+
producer_node = output_to_node.get(input_name)
|
188
|
+
if producer_node is None or producer_node not in ancestors:
|
189
|
+
issues.append(NodeError(node_name=full_node_name, description=f"Requires input '{input_name}', but it is not produced by any ancestor"))
|
190
|
+
|
191
|
+
for observer in workflow_def.observers:
|
192
|
+
if observer not in workflow_def.functions:
|
193
|
+
issues.append(NodeError(node_name=None, description=f"Observer '{observer}' references undefined function"))
|
194
|
+
|
195
|
+
return issues
|
196
|
+
|
197
|
+
|
198
|
+
def validate_workflow_structure(structure: WorkflowStructure, nodes: Dict[str, NodeDefinition],
|
199
|
+
is_main: bool = False) -> List[NodeError]:
|
200
|
+
"""Validate a WorkflowStructure for consistency."""
|
201
|
+
issues: List[NodeError] = []
|
202
|
+
|
203
|
+
if is_main and not structure.start:
|
204
|
+
issues.append(NodeError(node_name=None, description="Main workflow is missing a start node"))
|
205
|
+
elif structure.start and structure.start not in nodes:
|
206
|
+
issues.append(NodeError(node_name=structure.start, description="Start node is not defined in nodes"))
|
207
|
+
|
208
|
+
for trans in structure.transitions:
|
209
|
+
if trans.from_node not in nodes:
|
210
|
+
issues.append(NodeError(node_name=trans.from_node, description="Transition from undefined node"))
|
211
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
212
|
+
for to_node in to_nodes:
|
213
|
+
if to_node not in nodes:
|
214
|
+
issues.append(NodeError(node_name=to_node, description=f"Transition to undefined node from '{trans.from_node}'"))
|
215
|
+
if trans.condition:
|
216
|
+
try:
|
217
|
+
compile(trans.condition, "<string>", "eval")
|
218
|
+
except SyntaxError:
|
219
|
+
issues.append(NodeError(node_name=trans.from_node, description=f"Invalid condition syntax in transition: {trans.condition}"))
|
220
|
+
|
221
|
+
return issues
|
222
|
+
|
223
|
+
|
224
|
+
def check_circular_transitions(workflow_def: WorkflowDefinition) -> List[NodeError]:
|
225
|
+
"""Detect circular transitions in the workflow using DFS, allowing cycles with conditions."""
|
226
|
+
issues: List[NodeError] = []
|
227
|
+
|
228
|
+
def dfs(node: str, visited: Set[str], path: Set[str], transitions: List[TransitionDefinition], path_transitions: List[TransitionDefinition]) -> None:
|
229
|
+
if node in path:
|
230
|
+
cycle_nodes = list(path)[list(path).index(node):] + [node]
|
231
|
+
cycle = " -> ".join(cycle_nodes)
|
232
|
+
cycle_transitions = [
|
233
|
+
t for t in path_transitions
|
234
|
+
if t.from_node in cycle_nodes and
|
235
|
+
(isinstance(t.to_node, str) and t.to_node in cycle_nodes) or
|
236
|
+
(isinstance(t.to_node, list) and any(to in cycle_nodes for to in t.to_node))
|
237
|
+
]
|
238
|
+
if all(t.condition is None for t in cycle_transitions):
|
239
|
+
issues.append(NodeError(node_name=None, description=f"Unconditional circular transition detected: {cycle}"))
|
240
|
+
return
|
241
|
+
if node in visited or node not in workflow_def.nodes:
|
242
|
+
return
|
243
|
+
|
244
|
+
visited.add(node)
|
245
|
+
path.add(node)
|
246
|
+
|
247
|
+
for trans in transitions:
|
248
|
+
if trans.from_node == node:
|
249
|
+
path_transitions.append(trans)
|
250
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
251
|
+
for next_node in to_nodes:
|
252
|
+
dfs(next_node, visited, path, transitions, path_transitions)
|
253
|
+
path_transitions.pop()
|
254
|
+
|
255
|
+
path.remove(node)
|
256
|
+
|
257
|
+
if workflow_def.workflow.start:
|
258
|
+
dfs(workflow_def.workflow.start, set(), set(), workflow_def.workflow.transitions, [])
|
259
|
+
|
260
|
+
for node_name, node_def in workflow_def.nodes.items():
|
261
|
+
if node_def.sub_workflow and node_def.sub_workflow.start:
|
262
|
+
dfs(node_def.sub_workflow.start, set(), set(), node_def.sub_workflow.transitions, [])
|
263
|
+
|
264
|
+
return issues
|
265
|
+
|
266
|
+
|
267
|
+
def main():
|
268
|
+
"""Build a sample workflow using WorkflowManager and validate it."""
|
269
|
+
manager = WorkflowManager()
|
270
|
+
|
271
|
+
# Define functions
|
272
|
+
manager.add_function(
|
273
|
+
name="say_hello",
|
274
|
+
type_="embedded",
|
275
|
+
code="def say_hello():\n return 'Hello, World!'"
|
276
|
+
)
|
277
|
+
manager.add_function(
|
278
|
+
name="say_goodbye",
|
279
|
+
type_="external",
|
280
|
+
module="external_module",
|
281
|
+
function="goodbye_func"
|
282
|
+
)
|
283
|
+
|
284
|
+
# Add nodes for main workflow
|
285
|
+
manager.add_node(name="start", function="say_hello", output="result")
|
286
|
+
manager.add_node(name="outro", function="non_existent") # Intentional: undefined function
|
287
|
+
|
288
|
+
# Add LLM node with valid temperature
|
289
|
+
manager.add_node(
|
290
|
+
name="ai_node",
|
291
|
+
llm_config={
|
292
|
+
"model": "gpt-3.5-turbo",
|
293
|
+
"prompt_template": "{{input}}",
|
294
|
+
"temperature": 0.7
|
295
|
+
}
|
296
|
+
)
|
297
|
+
|
298
|
+
# Add nodes and sub-workflow
|
299
|
+
manager.add_node(name="nested_start", function="say_hello", output="greeting")
|
300
|
+
manager.add_node(name="nested_end", function="say_goodbye")
|
301
|
+
sub_workflow = WorkflowStructure(start="nested_start")
|
302
|
+
sub_workflow.transitions.extend([
|
303
|
+
TransitionDefinition(from_node="nested_start", to_node="nested_end"),
|
304
|
+
TransitionDefinition(from_node="nested_end", to_node="nested_start") # Intentional: circular
|
305
|
+
])
|
306
|
+
manager.add_node(name="nested", sub_workflow=sub_workflow)
|
307
|
+
|
308
|
+
# Configure main workflow
|
309
|
+
manager.set_start_node("start")
|
310
|
+
manager.add_transition(from_node="start", to_node="outro")
|
311
|
+
manager.add_transition(from_node="outro", to_node="start") # Intentional: circular
|
312
|
+
manager.add_transition(from_node="start", to_node="missing_node", strict=False) # Intentional: undefined node
|
313
|
+
|
314
|
+
# Add observer with error handling
|
315
|
+
try:
|
316
|
+
manager.add_observer("undefined_observer") # Intentional: undefined observer
|
317
|
+
except ValueError:
|
318
|
+
pass # Allow validation to proceed
|
319
|
+
|
320
|
+
# Validate the constructed workflow
|
321
|
+
workflow = manager.workflow
|
322
|
+
issues = validate_workflow_definition(workflow)
|
323
|
+
|
324
|
+
# Display results
|
325
|
+
if issues:
|
326
|
+
print("Issues found in workflow definition:")
|
327
|
+
for issue in sorted(issues, key=lambda x: (x.node_name or '', x.description)):
|
328
|
+
node_part = f"Node '{issue.node_name}'" if issue.node_name else "Workflow"
|
329
|
+
print(f"- {node_part}: {issue.description}")
|
330
|
+
else:
|
331
|
+
print("No issues found in workflow definition.")
|
332
|
+
|
333
|
+
|
334
|
+
if __name__ == "__main__":
|
335
|
+
main()
|