quantalogic 0.80__py3-none-any.whl → 0.93__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/flow/__init__.py +16 -34
- quantalogic/main.py +11 -6
- quantalogic/tools/tool.py +8 -922
- quantalogic-0.93.dist-info/METADATA +475 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/RECORD +8 -54
- quantalogic/codeact/TODO.md +0 -14
- quantalogic/codeact/__init__.py +0 -0
- quantalogic/codeact/agent.py +0 -478
- quantalogic/codeact/cli.py +0 -50
- quantalogic/codeact/cli_commands/__init__.py +0 -0
- quantalogic/codeact/cli_commands/create_toolbox.py +0 -45
- quantalogic/codeact/cli_commands/install_toolbox.py +0 -20
- quantalogic/codeact/cli_commands/list_executor.py +0 -15
- quantalogic/codeact/cli_commands/list_reasoners.py +0 -15
- quantalogic/codeact/cli_commands/list_toolboxes.py +0 -47
- quantalogic/codeact/cli_commands/task.py +0 -215
- quantalogic/codeact/cli_commands/tool_info.py +0 -24
- quantalogic/codeact/cli_commands/uninstall_toolbox.py +0 -43
- quantalogic/codeact/config.yaml +0 -21
- quantalogic/codeact/constants.py +0 -9
- quantalogic/codeact/events.py +0 -85
- quantalogic/codeact/examples/README.md +0 -342
- quantalogic/codeact/examples/agent_sample.yaml +0 -29
- quantalogic/codeact/executor.py +0 -186
- quantalogic/codeact/history_manager.py +0 -94
- quantalogic/codeact/llm_util.py +0 -57
- quantalogic/codeact/plugin_manager.py +0 -92
- quantalogic/codeact/prompts/error_format.j2 +0 -11
- quantalogic/codeact/prompts/generate_action.j2 +0 -77
- quantalogic/codeact/prompts/generate_program.j2 +0 -52
- quantalogic/codeact/prompts/response_format.j2 +0 -11
- quantalogic/codeact/react_agent.py +0 -318
- quantalogic/codeact/reasoner.py +0 -185
- quantalogic/codeact/templates/toolbox/README.md.j2 +0 -10
- quantalogic/codeact/templates/toolbox/pyproject.toml.j2 +0 -16
- quantalogic/codeact/templates/toolbox/tools.py.j2 +0 -6
- quantalogic/codeact/templates.py +0 -7
- quantalogic/codeact/tools_manager.py +0 -258
- quantalogic/codeact/utils.py +0 -62
- quantalogic/codeact/xml_utils.py +0 -126
- quantalogic/flow/flow.py +0 -1070
- quantalogic/flow/flow_extractor.py +0 -783
- quantalogic/flow/flow_generator.py +0 -322
- quantalogic/flow/flow_manager.py +0 -676
- quantalogic/flow/flow_manager_schema.py +0 -287
- quantalogic/flow/flow_mermaid.py +0 -365
- quantalogic/flow/flow_validator.py +0 -479
- quantalogic/flow/flow_yaml.linkedin.md +0 -31
- quantalogic/flow/flow_yaml.md +0 -767
- quantalogic/flow/templates/prompt_check_inventory.j2 +0 -1
- quantalogic/flow/templates/system_check_inventory.j2 +0 -1
- quantalogic-0.80.dist-info/METADATA +0 -900
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/LICENSE +0 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/WHEEL +0 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/entry_points.txt +0 -0
@@ -1,783 +0,0 @@
|
|
1
|
-
import ast
|
2
|
-
import os
|
3
|
-
|
4
|
-
from loguru import logger
|
5
|
-
|
6
|
-
from quantalogic.flow.flow_generator import generate_executable_script
|
7
|
-
from quantalogic.flow.flow_manager import WorkflowManager
|
8
|
-
from quantalogic.flow.flow_manager_schema import (
|
9
|
-
BranchCondition,
|
10
|
-
FunctionDefinition,
|
11
|
-
NodeDefinition,
|
12
|
-
TemplateConfig,
|
13
|
-
TransitionDefinition,
|
14
|
-
WorkflowDefinition,
|
15
|
-
WorkflowStructure,
|
16
|
-
)
|
17
|
-
|
18
|
-
|
19
|
-
class WorkflowExtractor(ast.NodeVisitor):
|
20
|
-
"""
|
21
|
-
AST visitor to extract workflow nodes and structure from a Python file.
|
22
|
-
|
23
|
-
This class parses Python source code to identify workflow components defined with Nodes decorators
|
24
|
-
and Workflow construction, including branch, converge, and loop patterns, building a WorkflowDefinition
|
25
|
-
compatible with WorkflowManager. Fully supports input mappings and template nodes.
|
26
|
-
"""
|
27
|
-
|
28
|
-
def __init__(self):
|
29
|
-
"""Initialize the extractor with empty collections for workflow components."""
|
30
|
-
self.nodes = {} # Maps node names to their definitions
|
31
|
-
self.functions = {} # Maps function names to their code
|
32
|
-
self.transitions = [] # List of TransitionDefinition objects
|
33
|
-
self.start_node = None # Starting node of the workflow
|
34
|
-
self.global_vars = {} # Tracks global variable assignments (e.g., DEFAULT_LLM_PARAMS)
|
35
|
-
self.observers = [] # List of observer function names
|
36
|
-
self.convergence_nodes = [] # List of convergence nodes
|
37
|
-
# Added for loop support
|
38
|
-
self.in_loop = False # Flag indicating if we're inside a loop
|
39
|
-
self.loop_nodes = [] # List of nodes within the current loop
|
40
|
-
self.loop_entry_node = None # Node before the loop starts
|
41
|
-
|
42
|
-
def visit_Module(self, node):
|
43
|
-
"""Log and explicitly process top-level statements in the module."""
|
44
|
-
logger.debug(f"Visiting module with {len(node.body)} top-level statements")
|
45
|
-
for item in node.body:
|
46
|
-
logger.debug(f"Processing top-level node: {type(item).__name__}")
|
47
|
-
if isinstance(item, ast.FunctionDef):
|
48
|
-
self.visit_FunctionDef(item)
|
49
|
-
elif isinstance(item, ast.AsyncFunctionDef):
|
50
|
-
self.visit_AsyncFunctionDef(item)
|
51
|
-
else:
|
52
|
-
self.visit(item)
|
53
|
-
|
54
|
-
def visit_Assign(self, node):
|
55
|
-
"""Detect global variable assignments and workflow assignments."""
|
56
|
-
if len(node.targets) == 1 and isinstance(node.targets[0], ast.Name):
|
57
|
-
var_name = node.targets[0].id
|
58
|
-
value = node.value
|
59
|
-
|
60
|
-
# Handle global variable assignments (e.g., MODEL, DEFAULT_LLM_PARAMS)
|
61
|
-
if isinstance(value, ast.Dict):
|
62
|
-
self.global_vars[var_name] = {}
|
63
|
-
for k, v in zip(value.keys, value.values):
|
64
|
-
if isinstance(k, ast.Constant):
|
65
|
-
key = k.value
|
66
|
-
if isinstance(v, ast.Constant):
|
67
|
-
self.global_vars[var_name][key] = v.value
|
68
|
-
elif isinstance(v, ast.Name) and v.id in self.global_vars:
|
69
|
-
self.global_vars[var_name][key] = self.global_vars[v.id]
|
70
|
-
logger.debug(
|
71
|
-
f"Captured global variable '{var_name}' with keys: {list(self.global_vars[var_name].keys())}"
|
72
|
-
)
|
73
|
-
|
74
|
-
# Handle simple constant assignments (e.g., MODEL = "gemini/gemini-2.0-flash")
|
75
|
-
elif isinstance(value, ast.Constant):
|
76
|
-
self.global_vars[var_name] = value.value
|
77
|
-
logger.debug(f"Captured global constant '{var_name}' with value: {value.value}")
|
78
|
-
|
79
|
-
# Handle workflow assignments, including parenthesized expressions
|
80
|
-
if isinstance(value, ast.Tuple) and len(value.elts) == 1:
|
81
|
-
value = value.elts[0] # Unwrap single-element tuple from parentheses
|
82
|
-
if isinstance(value, ast.Call):
|
83
|
-
self.process_workflow_expr(value, var_name)
|
84
|
-
|
85
|
-
self.generic_visit(node)
|
86
|
-
|
87
|
-
def visit_FunctionDef(self, node):
|
88
|
-
"""Extract node information from synchronous function definitions."""
|
89
|
-
logger.debug(f"Visiting synchronous function definition: '{node.name}'")
|
90
|
-
for decorator in node.decorator_list:
|
91
|
-
decorator_name = None
|
92
|
-
kwargs = {}
|
93
|
-
logger.debug(f"Examining decorator for '{node.name}': {ast.dump(decorator)}")
|
94
|
-
|
95
|
-
if (
|
96
|
-
isinstance(decorator, ast.Attribute)
|
97
|
-
and isinstance(decorator.value, ast.Name)
|
98
|
-
and decorator.value.id == "Nodes"
|
99
|
-
):
|
100
|
-
decorator_name = decorator.attr
|
101
|
-
logger.debug(f"Found simple decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
102
|
-
|
103
|
-
elif (
|
104
|
-
isinstance(decorator, ast.Call)
|
105
|
-
and isinstance(decorator.func, ast.Attribute)
|
106
|
-
and isinstance(decorator.func.value, ast.Name)
|
107
|
-
and decorator.func.value.id == "Nodes"
|
108
|
-
):
|
109
|
-
decorator_name = decorator.func.attr
|
110
|
-
logger.debug(f"Found call decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
111
|
-
for kw in decorator.keywords:
|
112
|
-
if kw.arg is None and isinstance(kw.value, ast.Name): # Handle **kwargs
|
113
|
-
var_name = kw.value.id
|
114
|
-
if var_name in self.global_vars:
|
115
|
-
kwargs.update(self.global_vars[var_name])
|
116
|
-
logger.debug(f"Unpacked '{var_name}' into kwargs: {self.global_vars[var_name]}")
|
117
|
-
elif isinstance(kw.value, ast.Constant):
|
118
|
-
kwargs[kw.arg] = kw.value.value
|
119
|
-
elif kw.arg == "response_model" and isinstance(kw.value, ast.Name):
|
120
|
-
kwargs[kw.arg] = ast.unparse(kw.value)
|
121
|
-
elif kw.arg == "transformer" and isinstance(kw.value, ast.Lambda):
|
122
|
-
kwargs[kw.arg] = ast.unparse(kw.value)
|
123
|
-
|
124
|
-
if decorator_name:
|
125
|
-
func_name = node.name
|
126
|
-
inputs = [arg.arg for arg in node.args.args]
|
127
|
-
|
128
|
-
if decorator_name == "define":
|
129
|
-
output = kwargs.get("output")
|
130
|
-
self.nodes[func_name] = {
|
131
|
-
"type": "function",
|
132
|
-
"function": func_name,
|
133
|
-
"inputs": inputs,
|
134
|
-
"output": output,
|
135
|
-
}
|
136
|
-
logger.debug(f"Registered function node '{func_name}' with output '{output}'")
|
137
|
-
elif decorator_name == "llm_node":
|
138
|
-
llm_config = {
|
139
|
-
key: value
|
140
|
-
for key, value in kwargs.items()
|
141
|
-
if key in [
|
142
|
-
"model",
|
143
|
-
"system_prompt",
|
144
|
-
"system_prompt_file",
|
145
|
-
"prompt_template",
|
146
|
-
"prompt_file",
|
147
|
-
"temperature",
|
148
|
-
"max_tokens",
|
149
|
-
"top_p",
|
150
|
-
"presence_penalty",
|
151
|
-
"frequency_penalty",
|
152
|
-
"output",
|
153
|
-
]
|
154
|
-
}
|
155
|
-
self.nodes[func_name] = {
|
156
|
-
"type": "llm",
|
157
|
-
"llm_config": llm_config,
|
158
|
-
"inputs": inputs,
|
159
|
-
"output": llm_config.get("output"),
|
160
|
-
}
|
161
|
-
logger.debug(f"Registered LLM node '{func_name}' with model '{llm_config.get('model')}'")
|
162
|
-
elif decorator_name == "validate_node":
|
163
|
-
output = kwargs.get("output")
|
164
|
-
self.nodes[func_name] = {
|
165
|
-
"type": "function",
|
166
|
-
"function": func_name,
|
167
|
-
"inputs": inputs,
|
168
|
-
"output": output,
|
169
|
-
}
|
170
|
-
logger.debug(f"Registered validate node '{func_name}' with output '{output}'")
|
171
|
-
elif decorator_name == "structured_llm_node":
|
172
|
-
llm_config = {
|
173
|
-
key: value
|
174
|
-
for key, value in kwargs.items()
|
175
|
-
if key in [
|
176
|
-
"model",
|
177
|
-
"system_prompt",
|
178
|
-
"system_prompt_file",
|
179
|
-
"prompt_template",
|
180
|
-
"prompt_file",
|
181
|
-
"temperature",
|
182
|
-
"max_tokens",
|
183
|
-
"top_p",
|
184
|
-
"presence_penalty",
|
185
|
-
"frequency_penalty",
|
186
|
-
"output",
|
187
|
-
"response_model",
|
188
|
-
]
|
189
|
-
}
|
190
|
-
self.nodes[func_name] = {
|
191
|
-
"type": "structured_llm",
|
192
|
-
"llm_config": llm_config,
|
193
|
-
"inputs": inputs,
|
194
|
-
"output": llm_config.get("output"),
|
195
|
-
}
|
196
|
-
logger.debug(f"Registered structured LLM node '{func_name}' with model '{llm_config.get('model')}'")
|
197
|
-
elif decorator_name == "template_node":
|
198
|
-
template_config = {
|
199
|
-
"template": kwargs.get("template", ""),
|
200
|
-
"template_file": kwargs.get("template_file"),
|
201
|
-
}
|
202
|
-
if "rendered_content" not in inputs:
|
203
|
-
inputs.insert(0, "rendered_content")
|
204
|
-
self.nodes[func_name] = {
|
205
|
-
"type": "template",
|
206
|
-
"template_config": template_config,
|
207
|
-
"inputs": inputs,
|
208
|
-
"output": kwargs.get("output"),
|
209
|
-
}
|
210
|
-
logger.debug(f"Registered template node '{func_name}' with config: {template_config}")
|
211
|
-
elif decorator_name == "transform_node":
|
212
|
-
output = kwargs.get("output")
|
213
|
-
self.nodes[func_name] = {
|
214
|
-
"type": "function",
|
215
|
-
"function": func_name,
|
216
|
-
"inputs": inputs,
|
217
|
-
"output": output,
|
218
|
-
}
|
219
|
-
logger.debug(f"Registered transform node '{func_name}' with output '{output}'")
|
220
|
-
else:
|
221
|
-
logger.warning(f"Unsupported decorator 'Nodes.{decorator_name}' in function '{func_name}'")
|
222
|
-
|
223
|
-
func_code = ast.unparse(node)
|
224
|
-
self.functions[func_name] = {
|
225
|
-
"type": "embedded",
|
226
|
-
"code": func_code,
|
227
|
-
}
|
228
|
-
else:
|
229
|
-
logger.debug(f"No recognized 'Nodes' decorator found for '{node.name}'")
|
230
|
-
|
231
|
-
self.generic_visit(node)
|
232
|
-
|
233
|
-
def visit_AsyncFunctionDef(self, node):
|
234
|
-
"""Extract node information from asynchronous function definitions."""
|
235
|
-
logger.debug(f"Visiting asynchronous function definition: '{node.name}'")
|
236
|
-
for decorator in node.decorator_list:
|
237
|
-
decorator_name = None
|
238
|
-
kwargs = {}
|
239
|
-
logger.debug(f"Examining decorator for '{node.name}': {ast.dump(decorator)}")
|
240
|
-
|
241
|
-
if (
|
242
|
-
isinstance(decorator, ast.Attribute)
|
243
|
-
and isinstance(decorator.value, ast.Name)
|
244
|
-
and decorator.value.id == "Nodes"
|
245
|
-
):
|
246
|
-
decorator_name = decorator.attr
|
247
|
-
logger.debug(f"Found simple decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
248
|
-
|
249
|
-
elif (
|
250
|
-
isinstance(decorator, ast.Call)
|
251
|
-
and isinstance(decorator.func, ast.Attribute)
|
252
|
-
and isinstance(decorator.func.value, ast.Name)
|
253
|
-
and decorator.func.value.id == "Nodes"
|
254
|
-
):
|
255
|
-
decorator_name = decorator.func.attr
|
256
|
-
logger.debug(f"Found call decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
257
|
-
for kw in decorator.keywords:
|
258
|
-
if kw.arg is None and isinstance(kw.value, ast.Name): # Handle **kwargs
|
259
|
-
var_name = kw.value.id
|
260
|
-
if var_name in self.global_vars:
|
261
|
-
kwargs.update(self.global_vars[var_name])
|
262
|
-
logger.debug(f"Unpacked '{var_name}' into kwargs: {self.global_vars[var_name]}")
|
263
|
-
elif isinstance(kw.value, ast.Constant):
|
264
|
-
kwargs[kw.arg] = kw.value.value
|
265
|
-
elif kw.arg == "response_model" and isinstance(kw.value, ast.Name):
|
266
|
-
kwargs[kw.arg] = ast.unparse(kw.value)
|
267
|
-
elif kw.arg == "transformer" and isinstance(kw.value, ast.Lambda):
|
268
|
-
kwargs[kw.arg] = ast.unparse(kw.value)
|
269
|
-
|
270
|
-
if decorator_name:
|
271
|
-
func_name = node.name
|
272
|
-
inputs = [arg.arg for arg in node.args.args]
|
273
|
-
|
274
|
-
if decorator_name == "define":
|
275
|
-
output = kwargs.get("output")
|
276
|
-
self.nodes[func_name] = {
|
277
|
-
"type": "function",
|
278
|
-
"function": func_name,
|
279
|
-
"inputs": inputs,
|
280
|
-
"output": output,
|
281
|
-
}
|
282
|
-
logger.debug(f"Registered function node '{func_name}' with output '{output}'")
|
283
|
-
elif decorator_name == "llm_node":
|
284
|
-
llm_config = {
|
285
|
-
key: value
|
286
|
-
for key, value in kwargs.items()
|
287
|
-
if key in [
|
288
|
-
"model",
|
289
|
-
"system_prompt",
|
290
|
-
"system_prompt_file",
|
291
|
-
"prompt_template",
|
292
|
-
"prompt_file",
|
293
|
-
"temperature",
|
294
|
-
"max_tokens",
|
295
|
-
"top_p",
|
296
|
-
"presence_penalty",
|
297
|
-
"frequency_penalty",
|
298
|
-
"output",
|
299
|
-
]
|
300
|
-
}
|
301
|
-
self.nodes[func_name] = {
|
302
|
-
"type": "llm",
|
303
|
-
"llm_config": llm_config,
|
304
|
-
"inputs": inputs,
|
305
|
-
"output": llm_config.get("output"),
|
306
|
-
}
|
307
|
-
logger.debug(f"Registered LLM node '{func_name}' with model '{llm_config.get('model')}'")
|
308
|
-
elif decorator_name == "validate_node":
|
309
|
-
output = kwargs.get("output")
|
310
|
-
self.nodes[func_name] = {
|
311
|
-
"type": "function",
|
312
|
-
"function": func_name,
|
313
|
-
"inputs": inputs,
|
314
|
-
"output": output,
|
315
|
-
}
|
316
|
-
logger.debug(f"Registered validate node '{func_name}' with output '{output}'")
|
317
|
-
elif decorator_name == "structured_llm_node":
|
318
|
-
llm_config = {
|
319
|
-
key: value
|
320
|
-
for key, value in kwargs.items()
|
321
|
-
if key in [
|
322
|
-
"model",
|
323
|
-
"system_prompt",
|
324
|
-
"system_prompt_file",
|
325
|
-
"prompt_template",
|
326
|
-
"prompt_file",
|
327
|
-
"temperature",
|
328
|
-
"max_tokens",
|
329
|
-
"top_p",
|
330
|
-
"presence_penalty",
|
331
|
-
"frequency_penalty",
|
332
|
-
"output",
|
333
|
-
"response_model",
|
334
|
-
]
|
335
|
-
}
|
336
|
-
self.nodes[func_name] = {
|
337
|
-
"type": "structured_llm",
|
338
|
-
"llm_config": llm_config,
|
339
|
-
"inputs": inputs,
|
340
|
-
"output": llm_config.get("output"),
|
341
|
-
}
|
342
|
-
logger.debug(f"Registered structured LLM node '{func_name}' with model '{llm_config.get('model')}'")
|
343
|
-
elif decorator_name == "template_node":
|
344
|
-
template_config = {
|
345
|
-
"template": kwargs.get("template", ""),
|
346
|
-
"template_file": kwargs.get("template_file"),
|
347
|
-
}
|
348
|
-
if "rendered_content" not in inputs:
|
349
|
-
inputs.insert(0, "rendered_content")
|
350
|
-
self.nodes[func_name] = {
|
351
|
-
"type": "template",
|
352
|
-
"template_config": template_config,
|
353
|
-
"inputs": inputs,
|
354
|
-
"output": kwargs.get("output"),
|
355
|
-
}
|
356
|
-
logger.debug(f"Registered template node '{func_name}' with config: {template_config}")
|
357
|
-
elif decorator_name == "transform_node":
|
358
|
-
output = kwargs.get("output")
|
359
|
-
self.nodes[func_name] = {
|
360
|
-
"type": "function",
|
361
|
-
"function": func_name,
|
362
|
-
"inputs": inputs,
|
363
|
-
"output": output,
|
364
|
-
}
|
365
|
-
logger.debug(f"Registered transform node '{func_name}' with output '{output}'")
|
366
|
-
else:
|
367
|
-
logger.warning(f"Unsupported decorator 'Nodes.{decorator_name}' in function '{func_name}'")
|
368
|
-
|
369
|
-
func_code = ast.unparse(node)
|
370
|
-
self.functions[func_name] = {
|
371
|
-
"type": "embedded",
|
372
|
-
"code": func_code,
|
373
|
-
}
|
374
|
-
else:
|
375
|
-
logger.debug(f"No recognized 'Nodes' decorator found for '{node.name}'")
|
376
|
-
|
377
|
-
self.generic_visit(node)
|
378
|
-
|
379
|
-
def process_workflow_expr(self, expr, var_name):
|
380
|
-
"""
|
381
|
-
Recursively process Workflow method chaining to build transitions, structure, and observers.
|
382
|
-
|
383
|
-
Args:
|
384
|
-
expr: The AST expression to process.
|
385
|
-
var_name: The variable name to which the workflow is assigned (for logging/context).
|
386
|
-
|
387
|
-
Returns:
|
388
|
-
str or None: The current node name or None if no specific node is returned.
|
389
|
-
"""
|
390
|
-
if not isinstance(expr, ast.Call):
|
391
|
-
logger.debug(f"Skipping non-Call node in workflow processing for '{var_name}'")
|
392
|
-
return None
|
393
|
-
|
394
|
-
func = expr.func
|
395
|
-
logger.debug(f"Processing Call node with func type: {type(func).__name__} for '{var_name}'")
|
396
|
-
|
397
|
-
if isinstance(func, ast.Name) and func.id == "Workflow":
|
398
|
-
self.start_node = expr.args[0].value if expr.args else None
|
399
|
-
logger.debug(f"Workflow start node set to '{self.start_node}' for variable '{var_name}'")
|
400
|
-
return self.start_node
|
401
|
-
elif isinstance(func, ast.Attribute):
|
402
|
-
method_name = func.attr
|
403
|
-
obj = func.value
|
404
|
-
previous_node = self.process_workflow_expr(obj, var_name)
|
405
|
-
|
406
|
-
if method_name == "then":
|
407
|
-
next_node = expr.args[0].value if expr.args else None
|
408
|
-
condition = None
|
409
|
-
for keyword in expr.keywords:
|
410
|
-
if keyword.arg == "condition" and keyword.value:
|
411
|
-
condition = ast.unparse(keyword.value)
|
412
|
-
if previous_node and next_node:
|
413
|
-
self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=next_node, condition=condition))
|
414
|
-
logger.debug(f"Added transition: {previous_node} -> {next_node} (condition: {condition})")
|
415
|
-
return next_node
|
416
|
-
|
417
|
-
elif method_name == "sequence":
|
418
|
-
nodes = [arg.value for arg in expr.args]
|
419
|
-
if previous_node and nodes:
|
420
|
-
self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=nodes[0]))
|
421
|
-
logger.debug(f"Added sequence start transition: {previous_node} -> {nodes[0]}")
|
422
|
-
for i in range(len(nodes) - 1):
|
423
|
-
self.transitions.append(TransitionDefinition(from_node=nodes[i], to_node=nodes[i + 1]))
|
424
|
-
logger.debug(f"Added sequence transition: {nodes[i]} -> {nodes[i + 1]}")
|
425
|
-
return nodes[-1] if nodes else previous_node
|
426
|
-
|
427
|
-
elif method_name == "parallel":
|
428
|
-
to_nodes = [arg.value for arg in expr.args]
|
429
|
-
if previous_node:
|
430
|
-
self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=to_nodes))
|
431
|
-
logger.debug(f"Added parallel transition: {previous_node} -> {to_nodes}")
|
432
|
-
return None
|
433
|
-
|
434
|
-
elif method_name == "branch":
|
435
|
-
branches = []
|
436
|
-
if expr.args and isinstance(expr.args[0], ast.List):
|
437
|
-
for elt in expr.args[0].elts:
|
438
|
-
if isinstance(elt, ast.Tuple) and len(elt.elts) == 2:
|
439
|
-
to_node = elt.elts[0].value
|
440
|
-
cond = ast.unparse(elt.elts[1]) if elt.elts[1] else None
|
441
|
-
branches.append(BranchCondition(to_node=to_node, condition=cond))
|
442
|
-
logger.debug(f"Added branch: {previous_node} -> {to_node} (condition: {cond})")
|
443
|
-
if previous_node and branches:
|
444
|
-
self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=branches))
|
445
|
-
return None
|
446
|
-
|
447
|
-
elif method_name == "converge":
|
448
|
-
conv_node = expr.args[0].value if expr.args else None
|
449
|
-
if conv_node and conv_node not in self.convergence_nodes:
|
450
|
-
self.convergence_nodes.append(conv_node)
|
451
|
-
logger.debug(f"Added convergence node: {conv_node}")
|
452
|
-
return conv_node
|
453
|
-
|
454
|
-
elif method_name == "node":
|
455
|
-
node_name = expr.args[0].value if expr.args else None
|
456
|
-
inputs_mapping = None
|
457
|
-
for keyword in expr.keywords:
|
458
|
-
if keyword.arg == "inputs_mapping" and isinstance(keyword.value, ast.Dict):
|
459
|
-
inputs_mapping = {}
|
460
|
-
for k, v in zip(keyword.value.keys, keyword.value.values):
|
461
|
-
key = k.value if isinstance(k, ast.Constant) else ast.unparse(k)
|
462
|
-
if isinstance(v, ast.Constant):
|
463
|
-
inputs_mapping[key] = v.value
|
464
|
-
elif isinstance(v, ast.Lambda):
|
465
|
-
inputs_mapping[key] = f"lambda ctx: {ast.unparse(v.body)}"
|
466
|
-
else:
|
467
|
-
inputs_mapping[key] = ast.unparse(v)
|
468
|
-
if node_name:
|
469
|
-
if node_name in self.nodes and inputs_mapping:
|
470
|
-
self.nodes[node_name]["inputs_mapping"] = inputs_mapping
|
471
|
-
logger.debug(f"Added inputs_mapping to node '{node_name}': {inputs_mapping}")
|
472
|
-
if previous_node:
|
473
|
-
self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=node_name))
|
474
|
-
logger.debug(f"Added node transition: {previous_node} -> {node_name}")
|
475
|
-
# Add node to loop_nodes if inside a loop
|
476
|
-
if self.in_loop:
|
477
|
-
self.loop_nodes.append(node_name)
|
478
|
-
logger.debug(f"Added '{node_name}' to loop_nodes in '{var_name}'")
|
479
|
-
return node_name
|
480
|
-
|
481
|
-
elif method_name == "add_sub_workflow":
|
482
|
-
sub_wf_name = expr.args[0].value if expr.args else None
|
483
|
-
sub_wf_obj = expr.args[1] if len(expr.args) > 1 else None
|
484
|
-
inputs = {}
|
485
|
-
inputs_mapping = None
|
486
|
-
output = None
|
487
|
-
if len(expr.args) > 2 and isinstance(expr.args[2], ast.Dict):
|
488
|
-
inputs_mapping = {}
|
489
|
-
for k, v in zip(expr.args[2].keys, expr.args[2].values):
|
490
|
-
key = k.value if isinstance(k, ast.Constant) else ast.unparse(k)
|
491
|
-
if isinstance(v, ast.Constant):
|
492
|
-
inputs_mapping[key] = v.value
|
493
|
-
elif isinstance(v, ast.Lambda):
|
494
|
-
inputs_mapping[key] = f"lambda ctx: {ast.unparse(v.body)}"
|
495
|
-
else:
|
496
|
-
inputs_mapping[key] = ast.unparse(v)
|
497
|
-
inputs = list(inputs_mapping.keys())
|
498
|
-
if len(expr.args) > 3:
|
499
|
-
output = expr.args[3].value
|
500
|
-
if sub_wf_name and sub_wf_obj:
|
501
|
-
sub_extractor = WorkflowExtractor()
|
502
|
-
sub_extractor.process_workflow_expr(sub_wf_obj, f"{var_name}_{sub_wf_name}")
|
503
|
-
self.nodes[sub_wf_name] = {
|
504
|
-
"type": "sub_workflow",
|
505
|
-
"sub_workflow": WorkflowStructure(
|
506
|
-
start=sub_extractor.start_node,
|
507
|
-
transitions=sub_extractor.transitions,
|
508
|
-
convergence_nodes=sub_extractor.convergence_nodes,
|
509
|
-
),
|
510
|
-
"inputs": inputs,
|
511
|
-
"inputs_mapping": inputs_mapping,
|
512
|
-
"output": output,
|
513
|
-
}
|
514
|
-
self.observers.extend(sub_extractor.observers)
|
515
|
-
logger.debug(f"Added sub-workflow node '{sub_wf_name}' with start '{sub_extractor.start_node}' and inputs_mapping: {inputs_mapping}")
|
516
|
-
if previous_node:
|
517
|
-
self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=sub_wf_name))
|
518
|
-
return sub_wf_name
|
519
|
-
|
520
|
-
elif method_name == "add_observer":
|
521
|
-
if expr.args and isinstance(expr.args[0], (ast.Name, ast.Constant)):
|
522
|
-
observer_name = expr.args[0].id if isinstance(expr.args[0], ast.Name) else expr.args[0].value
|
523
|
-
if observer_name not in self.observers:
|
524
|
-
self.observers.append(observer_name)
|
525
|
-
logger.debug(f"Added observer '{observer_name}' to workflow '{var_name}'")
|
526
|
-
else:
|
527
|
-
logger.warning(f"Unsupported observer argument in 'add_observer' for '{var_name}'")
|
528
|
-
return previous_node
|
529
|
-
|
530
|
-
elif method_name == "start_loop":
|
531
|
-
if previous_node is None:
|
532
|
-
logger.warning(f"start_loop called without a previous node in '{var_name}'")
|
533
|
-
return None
|
534
|
-
self.in_loop = True
|
535
|
-
self.loop_entry_node = previous_node
|
536
|
-
self.loop_nodes = []
|
537
|
-
logger.debug(f"Started loop after node '{previous_node}' in '{var_name}'")
|
538
|
-
return previous_node
|
539
|
-
|
540
|
-
elif method_name == "end_loop":
|
541
|
-
cond = None
|
542
|
-
next_node = None
|
543
|
-
for keyword in expr.keywords:
|
544
|
-
if keyword.arg == "condition":
|
545
|
-
cond = ast.unparse(keyword.value)
|
546
|
-
elif keyword.arg == "next_node":
|
547
|
-
next_node = (keyword.value.value
|
548
|
-
if isinstance(keyword.value, ast.Constant)
|
549
|
-
else ast.unparse(keyword.value))
|
550
|
-
if not cond or not next_node:
|
551
|
-
logger.warning(f"end_loop in '{var_name}' missing condition or next_node")
|
552
|
-
return None
|
553
|
-
if not self.loop_nodes:
|
554
|
-
logger.warning(f"end_loop called without loop nodes in '{var_name}'")
|
555
|
-
return None
|
556
|
-
first_loop_node = self.loop_nodes[0]
|
557
|
-
last_loop_node = self.loop_nodes[-1]
|
558
|
-
# Loop-back transition: last node to first node when condition is false
|
559
|
-
negated_cond = f"not ({cond})"
|
560
|
-
self.transitions.append(
|
561
|
-
TransitionDefinition(
|
562
|
-
from_node=last_loop_node,
|
563
|
-
to_node=first_loop_node,
|
564
|
-
condition=negated_cond
|
565
|
-
)
|
566
|
-
)
|
567
|
-
# Exit transition: last node to next_node when condition is true
|
568
|
-
self.transitions.append(
|
569
|
-
TransitionDefinition(
|
570
|
-
from_node=last_loop_node,
|
571
|
-
to_node=next_node,
|
572
|
-
condition=cond
|
573
|
-
)
|
574
|
-
)
|
575
|
-
logger.debug(f"Added loop transitions: '{last_loop_node}' -> '{first_loop_node}' "
|
576
|
-
f"(not {cond}), '{last_loop_node}' -> '{next_node}' ({cond})")
|
577
|
-
self.in_loop = False
|
578
|
-
self.loop_nodes = []
|
579
|
-
self.loop_entry_node = None
|
580
|
-
return next_node
|
581
|
-
|
582
|
-
else:
|
583
|
-
logger.warning(f"Unsupported Workflow method '{method_name}' in variable '{var_name}'")
|
584
|
-
return None
|
585
|
-
|
586
|
-
|
587
|
-
def extract_workflow_from_file(file_path):
|
588
|
-
"""
|
589
|
-
Extract a WorkflowDefinition and global variables from a Python file containing a workflow.
|
590
|
-
|
591
|
-
Args:
|
592
|
-
file_path (str): Path to the Python file to parse.
|
593
|
-
|
594
|
-
Returns:
|
595
|
-
tuple: (WorkflowDefinition, Dict[str, Any]) - The workflow definition and captured global variables.
|
596
|
-
"""
|
597
|
-
with open(file_path) as f:
|
598
|
-
source = f.read()
|
599
|
-
tree = ast.parse(source)
|
600
|
-
|
601
|
-
extractor = WorkflowExtractor()
|
602
|
-
extractor.visit(tree)
|
603
|
-
|
604
|
-
functions = {name: FunctionDefinition(**func) for name, func in extractor.functions.items()}
|
605
|
-
|
606
|
-
nodes = {}
|
607
|
-
from quantalogic.flow.flow_manager_schema import LLMConfig
|
608
|
-
|
609
|
-
for name, node_info in extractor.nodes.items():
|
610
|
-
if node_info["type"] == "function":
|
611
|
-
nodes[name] = NodeDefinition(
|
612
|
-
function=node_info["function"],
|
613
|
-
inputs_mapping=node_info.get("inputs_mapping"),
|
614
|
-
output=node_info["output"],
|
615
|
-
retries=3,
|
616
|
-
delay=1.0,
|
617
|
-
timeout=None,
|
618
|
-
parallel=False,
|
619
|
-
)
|
620
|
-
elif node_info["type"] == "llm":
|
621
|
-
llm_config = LLMConfig(**node_info["llm_config"])
|
622
|
-
nodes[name] = NodeDefinition(
|
623
|
-
llm_config=llm_config,
|
624
|
-
inputs_mapping=node_info.get("inputs_mapping"),
|
625
|
-
output=node_info["output"],
|
626
|
-
retries=3,
|
627
|
-
delay=1.0,
|
628
|
-
timeout=None,
|
629
|
-
parallel=False,
|
630
|
-
)
|
631
|
-
elif node_info["type"] == "structured_llm":
|
632
|
-
llm_config = LLMConfig(**node_info["llm_config"])
|
633
|
-
nodes[name] = NodeDefinition(
|
634
|
-
llm_config=llm_config,
|
635
|
-
inputs_mapping=node_info.get("inputs_mapping"),
|
636
|
-
output=node_info["output"],
|
637
|
-
retries=3,
|
638
|
-
delay=1.0,
|
639
|
-
timeout=None,
|
640
|
-
parallel=False,
|
641
|
-
)
|
642
|
-
elif node_info["type"] == "template":
|
643
|
-
template_config = TemplateConfig(**node_info["template_config"])
|
644
|
-
nodes[name] = NodeDefinition(
|
645
|
-
template_config=template_config,
|
646
|
-
inputs_mapping=node_info.get("inputs_mapping"),
|
647
|
-
output=node_info["output"],
|
648
|
-
retries=3,
|
649
|
-
delay=1.0,
|
650
|
-
timeout=None,
|
651
|
-
parallel=False,
|
652
|
-
)
|
653
|
-
elif node_info["type"] == "sub_workflow":
|
654
|
-
nodes[name] = NodeDefinition(
|
655
|
-
sub_workflow=node_info["sub_workflow"],
|
656
|
-
inputs_mapping=node_info.get("inputs_mapping"),
|
657
|
-
output=node_info["output"],
|
658
|
-
retries=3,
|
659
|
-
delay=1.0,
|
660
|
-
timeout=None,
|
661
|
-
parallel=False,
|
662
|
-
)
|
663
|
-
|
664
|
-
workflow_structure = WorkflowStructure(
|
665
|
-
start=extractor.start_node,
|
666
|
-
transitions=extractor.transitions,
|
667
|
-
convergence_nodes=extractor.convergence_nodes,
|
668
|
-
)
|
669
|
-
|
670
|
-
workflow_def = WorkflowDefinition(
|
671
|
-
functions=functions,
|
672
|
-
nodes=nodes,
|
673
|
-
workflow=workflow_structure,
|
674
|
-
observers=extractor.observers,
|
675
|
-
)
|
676
|
-
|
677
|
-
return workflow_def, extractor.global_vars
|
678
|
-
|
679
|
-
|
680
|
-
def print_workflow_definition(workflow_def):
|
681
|
-
"""
|
682
|
-
Utility function to print a WorkflowDefinition in a human-readable format.
|
683
|
-
|
684
|
-
Args:
|
685
|
-
workflow_def (WorkflowDefinition): The workflow definition to print.
|
686
|
-
"""
|
687
|
-
print("### Workflow Definition ###")
|
688
|
-
print("\n#### Functions:")
|
689
|
-
for name, func in workflow_def.functions.items():
|
690
|
-
print(f"- {name}:")
|
691
|
-
print(f" Type: {func.type}")
|
692
|
-
print(f" Code (first line): {func.code.splitlines()[0][:50]}..." if func.code else " Code: None")
|
693
|
-
|
694
|
-
print("\n#### Nodes:")
|
695
|
-
for name, node in workflow_def.nodes.items():
|
696
|
-
print(f"- {name}:")
|
697
|
-
if node.function:
|
698
|
-
print(" Type: Function")
|
699
|
-
print(f" Function: {node.function}")
|
700
|
-
elif node.llm_config:
|
701
|
-
if node.llm_config.response_model:
|
702
|
-
print(" Type: Structured LLM")
|
703
|
-
print(f" Response Model: {node.llm_config.response_model}")
|
704
|
-
else:
|
705
|
-
print(" Type: LLM")
|
706
|
-
print(f" Model: {node.llm_config.model}")
|
707
|
-
print(f" Prompt Template: {node.llm_config.prompt_template}")
|
708
|
-
if node.llm_config.prompt_file:
|
709
|
-
print(f" Prompt File: {node.llm_config.prompt_file}")
|
710
|
-
elif node.template_config:
|
711
|
-
print(" Type: Template")
|
712
|
-
print(f" Template: {node.template_config.template}")
|
713
|
-
if node.template_config.template_file:
|
714
|
-
print(f" Template File: {node.template_config.template_file}")
|
715
|
-
elif node.sub_workflow:
|
716
|
-
print(" Type: Sub-Workflow")
|
717
|
-
print(f" Start Node: {node.sub_workflow.start}")
|
718
|
-
if node.inputs_mapping:
|
719
|
-
print(f" Inputs Mapping: {node.inputs_mapping}")
|
720
|
-
print(f" Output: {node.output or 'None'}")
|
721
|
-
|
722
|
-
print("\n#### Workflow Structure:")
|
723
|
-
print(f"Start Node: {workflow_def.workflow.start}")
|
724
|
-
print("Transitions:")
|
725
|
-
for trans in workflow_def.workflow.transitions:
|
726
|
-
if isinstance(trans.to_node, list):
|
727
|
-
if all(isinstance(tn, BranchCondition) for tn in trans.to_node):
|
728
|
-
for branch in trans.to_node:
|
729
|
-
cond_str = f" [Condition: {branch.condition}]" if branch.condition else ""
|
730
|
-
print(f"- {trans.from_node} -> {branch.to_node}{cond_str}")
|
731
|
-
else:
|
732
|
-
print(f"- {trans.from_node} -> {trans.to_node} (parallel)")
|
733
|
-
else:
|
734
|
-
cond_str = f" [Condition: {trans.condition}]" if trans.condition else ""
|
735
|
-
print(f"- {trans.from_node} -> {trans.to_node}{cond_str}")
|
736
|
-
print("Convergence Nodes:")
|
737
|
-
for conv_node in workflow_def.workflow.convergence_nodes:
|
738
|
-
print(f"- {conv_node}")
|
739
|
-
|
740
|
-
print("\n#### Observers:")
|
741
|
-
for observer in workflow_def.observers:
|
742
|
-
print(f"- {observer}")
|
743
|
-
|
744
|
-
|
745
|
-
def main():
|
746
|
-
"""Demonstrate extracting a workflow from a Python file and saving it to YAML."""
|
747
|
-
import argparse
|
748
|
-
import sys
|
749
|
-
|
750
|
-
parser = argparse.ArgumentParser(description='Extract workflow from a Python file')
|
751
|
-
parser.add_argument('file_path', nargs='?', default="examples/flow/simple_story_generator/story_generator_agent.py",
|
752
|
-
help='Path to the Python file containing the workflow')
|
753
|
-
parser.add_argument('--output', '-o', default="./generated_workflow.py",
|
754
|
-
help='Output path for the executable Python script')
|
755
|
-
parser.add_argument('--yaml', '-y', default="workflow_definition.yaml",
|
756
|
-
help='Output path for the YAML workflow definition')
|
757
|
-
|
758
|
-
args = parser.parse_args()
|
759
|
-
file_path = args.file_path
|
760
|
-
output_file_python = args.output
|
761
|
-
yaml_output_path = args.yaml
|
762
|
-
|
763
|
-
if not os.path.exists(file_path):
|
764
|
-
logger.error(f"File '{file_path}' not found. Please provide a valid file path.")
|
765
|
-
logger.info("Example usage: python -m quantalogic.flow.flow_extractor path/to/your/workflow_file.py")
|
766
|
-
sys.exit(1)
|
767
|
-
|
768
|
-
try:
|
769
|
-
workflow_def, global_vars = extract_workflow_from_file(file_path)
|
770
|
-
logger.info(f"Successfully extracted workflow from '{file_path}'")
|
771
|
-
print_workflow_definition(workflow_def)
|
772
|
-
generate_executable_script(workflow_def, global_vars, output_file_python)
|
773
|
-
logger.info(f"Executable script generated at '{output_file_python}'")
|
774
|
-
|
775
|
-
manager = WorkflowManager(workflow_def)
|
776
|
-
manager.save_to_yaml(yaml_output_path)
|
777
|
-
logger.info(f"Workflow saved to YAML file '{yaml_output_path}'")
|
778
|
-
except Exception as e:
|
779
|
-
logger.error(f"Failed to parse or save workflow from '{file_path}': {e}")
|
780
|
-
|
781
|
-
|
782
|
-
if __name__ == "__main__":
|
783
|
-
main()
|