quantalogic 0.61.3__py3-none-any.whl → 0.92__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. quantalogic/agent.py +0 -1
  2. quantalogic/flow/__init__.py +16 -34
  3. quantalogic/main.py +11 -6
  4. quantalogic/tools/action_gen.py +1 -1
  5. quantalogic/tools/tool.py +8 -500
  6. quantalogic-0.92.dist-info/METADATA +448 -0
  7. {quantalogic-0.61.3.dist-info → quantalogic-0.92.dist-info}/RECORD +10 -33
  8. {quantalogic-0.61.3.dist-info → quantalogic-0.92.dist-info}/WHEEL +1 -1
  9. quantalogic-0.92.dist-info/entry_points.txt +3 -0
  10. quantalogic/codeact/__init__.py +0 -0
  11. quantalogic/codeact/agent.py +0 -499
  12. quantalogic/codeact/cli.py +0 -232
  13. quantalogic/codeact/constants.py +0 -9
  14. quantalogic/codeact/events.py +0 -78
  15. quantalogic/codeact/llm_util.py +0 -76
  16. quantalogic/codeact/prompts/error_format.j2 +0 -11
  17. quantalogic/codeact/prompts/generate_action.j2 +0 -26
  18. quantalogic/codeact/prompts/generate_program.j2 +0 -39
  19. quantalogic/codeact/prompts/response_format.j2 +0 -11
  20. quantalogic/codeact/tools_manager.py +0 -135
  21. quantalogic/codeact/utils.py +0 -135
  22. quantalogic/flow/flow.py +0 -960
  23. quantalogic/flow/flow_extractor.py +0 -723
  24. quantalogic/flow/flow_generator.py +0 -294
  25. quantalogic/flow/flow_manager.py +0 -637
  26. quantalogic/flow/flow_manager_schema.py +0 -255
  27. quantalogic/flow/flow_mermaid.py +0 -365
  28. quantalogic/flow/flow_validator.py +0 -479
  29. quantalogic/flow/flow_yaml.linkedin.md +0 -31
  30. quantalogic/flow/flow_yaml.md +0 -767
  31. quantalogic/flow/templates/prompt_check_inventory.j2 +0 -1
  32. quantalogic/flow/templates/system_check_inventory.j2 +0 -1
  33. quantalogic-0.61.3.dist-info/METADATA +0 -900
  34. quantalogic-0.61.3.dist-info/entry_points.txt +0 -6
  35. {quantalogic-0.61.3.dist-info → quantalogic-0.92.dist-info}/LICENSE +0 -0
@@ -1,723 +0,0 @@
1
- import ast
2
- import os
3
-
4
- from loguru import logger
5
-
6
- from quantalogic.flow.flow_generator import generate_executable_script
7
- from quantalogic.flow.flow_manager import WorkflowManager
8
- from quantalogic.flow.flow_manager_schema import (
9
- BranchCondition,
10
- FunctionDefinition,
11
- NodeDefinition,
12
- TemplateConfig,
13
- TransitionDefinition,
14
- WorkflowDefinition,
15
- WorkflowStructure,
16
- )
17
-
18
-
19
- class WorkflowExtractor(ast.NodeVisitor):
20
- """
21
- AST visitor to extract workflow nodes and structure from a Python file.
22
-
23
- This class parses Python source code to identify workflow components defined with Nodes decorators
24
- and Workflow construction, including branch and converge patterns, building a WorkflowDefinition
25
- compatible with WorkflowManager. Fully supports input mappings and template nodes.
26
- """
27
-
28
- def __init__(self):
29
- """Initialize the extractor with empty collections for workflow components."""
30
- self.nodes = {} # Maps node names to their definitions
31
- self.functions = {} # Maps function names to their code
32
- self.transitions = [] # List of TransitionDefinition objects
33
- self.start_node = None # Starting node of the workflow
34
- self.global_vars = {} # Tracks global variable assignments (e.g., DEFAULT_LLM_PARAMS)
35
- self.observers = [] # List of observer function names
36
- self.convergence_nodes = [] # List of convergence nodes
37
-
38
- def visit_Module(self, node):
39
- """Log and explicitly process top-level statements in the module."""
40
- logger.debug(f"Visiting module with {len(node.body)} top-level statements")
41
- for item in node.body:
42
- logger.debug(f"Processing top-level node: {type(item).__name__}")
43
- if isinstance(item, ast.FunctionDef):
44
- self.visit_FunctionDef(item)
45
- elif isinstance(item, ast.AsyncFunctionDef):
46
- self.visit_AsyncFunctionDef(item)
47
- else:
48
- self.visit(item)
49
-
50
- def visit_Assign(self, node):
51
- """Detect global variable assignments and workflow assignments."""
52
- if len(node.targets) == 1 and isinstance(node.targets[0], ast.Name):
53
- var_name = node.targets[0].id
54
- value = node.value
55
-
56
- # Handle global variable assignments (e.g., MODEL, DEFAULT_LLM_PARAMS)
57
- if isinstance(value, ast.Dict):
58
- self.global_vars[var_name] = {}
59
- for k, v in zip(value.keys, value.values):
60
- if isinstance(k, ast.Constant):
61
- key = k.value
62
- if isinstance(v, ast.Constant):
63
- self.global_vars[var_name][key] = v.value
64
- elif isinstance(v, ast.Name) and v.id in self.global_vars:
65
- self.global_vars[var_name][key] = self.global_vars[v.id]
66
- logger.debug(
67
- f"Captured global variable '{var_name}' with keys: {list(self.global_vars[var_name].keys())}"
68
- )
69
-
70
- # Handle simple constant assignments (e.g., MODEL = "gemini/gemini-2.0-flash")
71
- elif isinstance(value, ast.Constant):
72
- self.global_vars[var_name] = value.value
73
- logger.debug(f"Captured global constant '{var_name}' with value: {value.value}")
74
-
75
- # Handle workflow assignments, including parenthesized expressions
76
- if isinstance(value, ast.Tuple) and len(value.elts) == 1:
77
- value = value.elts[0] # Unwrap single-element tuple from parentheses
78
- if isinstance(value, ast.Call):
79
- self.process_workflow_expr(value, var_name)
80
-
81
- self.generic_visit(node)
82
-
83
- def visit_FunctionDef(self, node):
84
- """Extract node information from synchronous function definitions."""
85
- logger.debug(f"Visiting synchronous function definition: '{node.name}'")
86
- for decorator in node.decorator_list:
87
- decorator_name = None
88
- kwargs = {}
89
- logger.debug(f"Examining decorator for '{node.name}': {ast.dump(decorator)}")
90
-
91
- if (
92
- isinstance(decorator, ast.Attribute)
93
- and isinstance(decorator.value, ast.Name)
94
- and decorator.value.id == "Nodes"
95
- ):
96
- decorator_name = decorator.attr
97
- logger.debug(f"Found simple decorator 'Nodes.{decorator_name}' for '{node.name}'")
98
-
99
- elif (
100
- isinstance(decorator, ast.Call)
101
- and isinstance(decorator.func, ast.Attribute)
102
- and isinstance(decorator.func.value, ast.Name)
103
- and decorator.func.value.id == "Nodes"
104
- ):
105
- decorator_name = decorator.func.attr
106
- logger.debug(f"Found call decorator 'Nodes.{decorator_name}' for '{node.name}'")
107
- for kw in decorator.keywords:
108
- if kw.arg is None and isinstance(kw.value, ast.Name): # Handle **kwargs
109
- var_name = kw.value.id
110
- if var_name in self.global_vars:
111
- kwargs.update(self.global_vars[var_name])
112
- logger.debug(f"Unpacked '{var_name}' into kwargs: {self.global_vars[var_name]}")
113
- elif isinstance(kw.value, ast.Constant):
114
- kwargs[kw.arg] = kw.value.value
115
- elif kw.arg == "response_model" and isinstance(kw.value, ast.Name):
116
- kwargs[kw.arg] = ast.unparse(kw.value)
117
- elif kw.arg == "transformer" and isinstance(kw.value, ast.Lambda):
118
- kwargs[kw.arg] = ast.unparse(kw.value)
119
-
120
- if decorator_name:
121
- func_name = node.name
122
- inputs = [arg.arg for arg in node.args.args]
123
-
124
- if decorator_name == "define":
125
- output = kwargs.get("output")
126
- self.nodes[func_name] = {
127
- "type": "function",
128
- "function": func_name,
129
- "inputs": inputs,
130
- "output": output,
131
- }
132
- logger.debug(f"Registered function node '{func_name}' with output '{output}'")
133
- elif decorator_name == "llm_node":
134
- llm_config = {
135
- key: value
136
- for key, value in kwargs.items()
137
- if key in [
138
- "model",
139
- "system_prompt",
140
- "system_prompt_file",
141
- "prompt_template",
142
- "prompt_file",
143
- "temperature",
144
- "max_tokens",
145
- "top_p",
146
- "presence_penalty",
147
- "frequency_penalty",
148
- "output",
149
- ]
150
- }
151
- self.nodes[func_name] = {
152
- "type": "llm",
153
- "llm_config": llm_config,
154
- "inputs": inputs,
155
- "output": llm_config.get("output"),
156
- }
157
- logger.debug(f"Registered LLM node '{func_name}' with model '{llm_config.get('model')}'")
158
- elif decorator_name == "validate_node":
159
- output = kwargs.get("output")
160
- self.nodes[func_name] = {
161
- "type": "function",
162
- "function": func_name,
163
- "inputs": inputs,
164
- "output": output,
165
- }
166
- logger.debug(f"Registered validate node '{func_name}' with output '{output}'")
167
- elif decorator_name == "structured_llm_node":
168
- llm_config = {
169
- key: value
170
- for key, value in kwargs.items()
171
- if key in [
172
- "model",
173
- "system_prompt",
174
- "system_prompt_file",
175
- "prompt_template",
176
- "prompt_file",
177
- "temperature",
178
- "max_tokens",
179
- "top_p",
180
- "presence_penalty",
181
- "frequency_penalty",
182
- "output",
183
- "response_model",
184
- ]
185
- }
186
- self.nodes[func_name] = {
187
- "type": "structured_llm",
188
- "llm_config": llm_config,
189
- "inputs": inputs,
190
- "output": llm_config.get("output"),
191
- }
192
- logger.debug(f"Registered structured LLM node '{func_name}' with model '{llm_config.get('model')}'")
193
- elif decorator_name == "template_node":
194
- template_config = {
195
- "template": kwargs.get("template", ""),
196
- "template_file": kwargs.get("template_file"),
197
- }
198
- if "rendered_content" not in inputs:
199
- inputs.insert(0, "rendered_content")
200
- self.nodes[func_name] = {
201
- "type": "template",
202
- "template_config": template_config,
203
- "inputs": inputs,
204
- "output": kwargs.get("output"),
205
- }
206
- logger.debug(f"Registered template node '{func_name}' with config: {template_config}")
207
- elif decorator_name == "transform_node":
208
- output = kwargs.get("output")
209
- self.nodes[func_name] = {
210
- "type": "function",
211
- "function": func_name,
212
- "inputs": inputs,
213
- "output": output,
214
- }
215
- logger.debug(f"Registered transform node '{func_name}' with output '{output}'")
216
- else:
217
- logger.warning(f"Unsupported decorator 'Nodes.{decorator_name}' in function '{func_name}'")
218
-
219
- func_code = ast.unparse(node)
220
- self.functions[func_name] = {
221
- "type": "embedded",
222
- "code": func_code,
223
- }
224
- else:
225
- logger.debug(f"No recognized 'Nodes' decorator found for '{node.name}'")
226
-
227
- self.generic_visit(node)
228
-
229
- def visit_AsyncFunctionDef(self, node):
230
- """Extract node information from asynchronous function definitions."""
231
- logger.debug(f"Visiting asynchronous function definition: '{node.name}'")
232
- for decorator in node.decorator_list:
233
- decorator_name = None
234
- kwargs = {}
235
- logger.debug(f"Examining decorator for '{node.name}': {ast.dump(decorator)}")
236
-
237
- if (
238
- isinstance(decorator, ast.Attribute)
239
- and isinstance(decorator.value, ast.Name)
240
- and decorator.value.id == "Nodes"
241
- ):
242
- decorator_name = decorator.attr
243
- logger.debug(f"Found simple decorator 'Nodes.{decorator_name}' for '{node.name}'")
244
-
245
- elif (
246
- isinstance(decorator, ast.Call)
247
- and isinstance(decorator.func, ast.Attribute)
248
- and isinstance(decorator.func.value, ast.Name)
249
- and decorator.func.value.id == "Nodes"
250
- ):
251
- decorator_name = decorator.func.attr
252
- logger.debug(f"Found call decorator 'Nodes.{decorator_name}' for '{node.name}'")
253
- for kw in decorator.keywords:
254
- if kw.arg is None and isinstance(kw.value, ast.Name): # Handle **kwargs
255
- var_name = kw.value.id
256
- if var_name in self.global_vars:
257
- kwargs.update(self.global_vars[var_name])
258
- logger.debug(f"Unpacked '{var_name}' into kwargs: {self.global_vars[var_name]}")
259
- elif isinstance(kw.value, ast.Constant):
260
- kwargs[kw.arg] = kw.value.value
261
- elif kw.arg == "response_model" and isinstance(kw.value, ast.Name):
262
- kwargs[kw.arg] = ast.unparse(kw.value)
263
- elif kw.arg == "transformer" and isinstance(kw.value, ast.Lambda):
264
- kwargs[kw.arg] = ast.unparse(kw.value)
265
-
266
- if decorator_name:
267
- func_name = node.name
268
- inputs = [arg.arg for arg in node.args.args]
269
-
270
- if decorator_name == "define":
271
- output = kwargs.get("output")
272
- self.nodes[func_name] = {
273
- "type": "function",
274
- "function": func_name,
275
- "inputs": inputs,
276
- "output": output,
277
- }
278
- logger.debug(f"Registered function node '{func_name}' with output '{output}'")
279
- elif decorator_name == "llm_node":
280
- llm_config = {
281
- key: value
282
- for key, value in kwargs.items()
283
- if key in [
284
- "model",
285
- "system_prompt",
286
- "system_prompt_file",
287
- "prompt_template",
288
- "prompt_file",
289
- "temperature",
290
- "max_tokens",
291
- "top_p",
292
- "presence_penalty",
293
- "frequency_penalty",
294
- "output",
295
- ]
296
- }
297
- self.nodes[func_name] = {
298
- "type": "llm",
299
- "llm_config": llm_config,
300
- "inputs": inputs,
301
- "output": llm_config.get("output"),
302
- }
303
- logger.debug(f"Registered LLM node '{func_name}' with model '{llm_config.get('model')}'")
304
- elif decorator_name == "validate_node":
305
- output = kwargs.get("output")
306
- self.nodes[func_name] = {
307
- "type": "function",
308
- "function": func_name,
309
- "inputs": inputs,
310
- "output": output,
311
- }
312
- logger.debug(f"Registered validate node '{func_name}' with output '{output}'")
313
- elif decorator_name == "structured_llm_node":
314
- llm_config = {
315
- key: value
316
- for key, value in kwargs.items()
317
- if key in [
318
- "model",
319
- "system_prompt",
320
- "system_prompt_file",
321
- "prompt_template",
322
- "prompt_file",
323
- "temperature",
324
- "max_tokens",
325
- "top_p",
326
- "presence_penalty",
327
- "frequency_penalty",
328
- "output",
329
- "response_model",
330
- ]
331
- }
332
- self.nodes[func_name] = {
333
- "type": "structured_llm",
334
- "llm_config": llm_config,
335
- "inputs": inputs,
336
- "output": llm_config.get("output"),
337
- }
338
- logger.debug(f"Registered structured LLM node '{func_name}' with model '{llm_config.get('model')}'")
339
- elif decorator_name == "template_node":
340
- template_config = {
341
- "template": kwargs.get("template", ""),
342
- "template_file": kwargs.get("template_file"),
343
- }
344
- if "rendered_content" not in inputs:
345
- inputs.insert(0, "rendered_content")
346
- self.nodes[func_name] = {
347
- "type": "template",
348
- "template_config": template_config,
349
- "inputs": inputs,
350
- "output": kwargs.get("output"),
351
- }
352
- logger.debug(f"Registered template node '{func_name}' with config: {template_config}")
353
- elif decorator_name == "transform_node":
354
- output = kwargs.get("output")
355
- self.nodes[func_name] = {
356
- "type": "function",
357
- "function": func_name,
358
- "inputs": inputs,
359
- "output": output,
360
- }
361
- logger.debug(f"Registered transform node '{func_name}' with output '{output}'")
362
- else:
363
- logger.warning(f"Unsupported decorator 'Nodes.{decorator_name}' in function '{func_name}'")
364
-
365
- func_code = ast.unparse(node)
366
- self.functions[func_name] = {
367
- "type": "embedded",
368
- "code": func_code,
369
- }
370
- else:
371
- logger.debug(f"No recognized 'Nodes' decorator found for '{node.name}'")
372
-
373
- self.generic_visit(node)
374
-
375
- def process_workflow_expr(self, expr, var_name):
376
- """
377
- Recursively process Workflow method chaining to build transitions, structure, and observers.
378
-
379
- Args:
380
- expr: The AST expression to process.
381
- var_name: The variable name to which the workflow is assigned (for logging/context).
382
-
383
- Returns:
384
- str or None: The current node name or None if no specific node is returned.
385
- """
386
- if not isinstance(expr, ast.Call):
387
- logger.debug(f"Skipping non-Call node in workflow processing for '{var_name}'")
388
- return None
389
-
390
- func = expr.func
391
- logger.debug(f"Processing Call node with func type: {type(func).__name__} for '{var_name}'")
392
-
393
- if isinstance(func, ast.Name) and func.id == "Workflow":
394
- self.start_node = expr.args[0].value if expr.args else None
395
- logger.debug(f"Workflow start node set to '{self.start_node}' for variable '{var_name}'")
396
- return self.start_node
397
- elif isinstance(func, ast.Attribute):
398
- method_name = func.attr
399
- obj = func.value
400
- previous_node = self.process_workflow_expr(obj, var_name)
401
-
402
- if method_name == "then":
403
- next_node = expr.args[0].value if expr.args else None
404
- condition = None
405
- for keyword in expr.keywords:
406
- if keyword.arg == "condition" and keyword.value:
407
- condition = ast.unparse(keyword.value)
408
- if previous_node and next_node:
409
- self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=next_node, condition=condition))
410
- logger.debug(f"Added transition: {previous_node} -> {next_node} (condition: {condition})")
411
- return next_node
412
-
413
- elif method_name == "sequence":
414
- nodes = [arg.value for arg in expr.args]
415
- if previous_node and nodes:
416
- self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=nodes[0]))
417
- logger.debug(f"Added sequence start transition: {previous_node} -> {nodes[0]}")
418
- for i in range(len(nodes) - 1):
419
- self.transitions.append(TransitionDefinition(from_node=nodes[i], to_node=nodes[i + 1]))
420
- logger.debug(f"Added sequence transition: {nodes[i]} -> {nodes[i + 1]}")
421
- return nodes[-1] if nodes else previous_node
422
-
423
- elif method_name == "parallel":
424
- to_nodes = [arg.value for arg in expr.args]
425
- if previous_node:
426
- self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=to_nodes))
427
- logger.debug(f"Added parallel transition: {previous_node} -> {to_nodes}")
428
- return None
429
-
430
- elif method_name == "branch":
431
- branches = []
432
- if expr.args and isinstance(expr.args[0], ast.List):
433
- for elt in expr.args[0].elts:
434
- if isinstance(elt, ast.Tuple) and len(elt.elts) == 2:
435
- to_node = elt.elts[0].value
436
- cond = ast.unparse(elt.elts[1]) if elt.elts[1] else None
437
- branches.append(BranchCondition(to_node=to_node, condition=cond))
438
- logger.debug(f"Added branch: {previous_node} -> {to_node} (condition: {cond})")
439
- if previous_node and branches:
440
- self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=branches))
441
- return None
442
-
443
- elif method_name == "converge":
444
- conv_node = expr.args[0].value if expr.args else None
445
- if conv_node and conv_node not in self.convergence_nodes:
446
- self.convergence_nodes.append(conv_node)
447
- logger.debug(f"Added convergence node: {conv_node}")
448
- return conv_node
449
-
450
- elif method_name == "node":
451
- node_name = expr.args[0].value if expr.args else None
452
- inputs_mapping = None
453
- for keyword in expr.keywords:
454
- if keyword.arg == "inputs_mapping" and isinstance(keyword.value, ast.Dict):
455
- inputs_mapping = {}
456
- for k, v in zip(keyword.value.keys, keyword.value.values):
457
- key = k.value if isinstance(k, ast.Constant) else ast.unparse(k)
458
- if isinstance(v, ast.Constant):
459
- inputs_mapping[key] = v.value
460
- elif isinstance(v, ast.Lambda):
461
- inputs_mapping[key] = f"lambda ctx: {ast.unparse(v.body)}"
462
- else:
463
- inputs_mapping[key] = ast.unparse(v)
464
- if node_name:
465
- if node_name in self.nodes and inputs_mapping:
466
- self.nodes[node_name]["inputs_mapping"] = inputs_mapping
467
- logger.debug(f"Added inputs_mapping to node '{node_name}': {inputs_mapping}")
468
- if previous_node:
469
- self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=node_name))
470
- logger.debug(f"Added node transition: {previous_node} -> {node_name}")
471
- return node_name
472
-
473
- elif method_name == "add_sub_workflow":
474
- sub_wf_name = expr.args[0].value if expr.args else None
475
- sub_wf_obj = expr.args[1] if len(expr.args) > 1 else None
476
- inputs = {}
477
- inputs_mapping = None
478
- output = None
479
- if len(expr.args) > 2 and isinstance(expr.args[2], ast.Dict):
480
- inputs_mapping = {}
481
- for k, v in zip(expr.args[2].keys, expr.args[2].values):
482
- key = k.value if isinstance(k, ast.Constant) else ast.unparse(k)
483
- if isinstance(v, ast.Constant):
484
- inputs_mapping[key] = v.value
485
- elif isinstance(v, ast.Lambda):
486
- inputs_mapping[key] = f"lambda ctx: {ast.unparse(v.body)}"
487
- else:
488
- inputs_mapping[key] = ast.unparse(v)
489
- inputs = list(inputs_mapping.keys())
490
- if len(expr.args) > 3:
491
- output = expr.args[3].value
492
- if sub_wf_name and sub_wf_obj:
493
- sub_extractor = WorkflowExtractor()
494
- sub_extractor.process_workflow_expr(sub_wf_obj, f"{var_name}_{sub_wf_name}")
495
- self.nodes[sub_wf_name] = {
496
- "type": "sub_workflow",
497
- "sub_workflow": WorkflowStructure(
498
- start=sub_extractor.start_node,
499
- transitions=sub_extractor.transitions,
500
- convergence_nodes=sub_extractor.convergence_nodes,
501
- ),
502
- "inputs": inputs,
503
- "inputs_mapping": inputs_mapping,
504
- "output": output,
505
- }
506
- self.observers.extend(sub_extractor.observers)
507
- logger.debug(f"Added sub-workflow node '{sub_wf_name}' with start '{sub_extractor.start_node}' and inputs_mapping: {inputs_mapping}")
508
- if previous_node:
509
- self.transitions.append(TransitionDefinition(from_node=previous_node, to_node=sub_wf_name))
510
- return sub_wf_name
511
-
512
- elif method_name == "add_observer":
513
- if expr.args and isinstance(expr.args[0], (ast.Name, ast.Constant)):
514
- observer_name = expr.args[0].id if isinstance(expr.args[0], ast.Name) else expr.args[0].value
515
- if observer_name not in self.observers:
516
- self.observers.append(observer_name)
517
- logger.debug(f"Added observer '{observer_name}' to workflow '{var_name}'")
518
- else:
519
- logger.warning(f"Unsupported observer argument in 'add_observer' for '{var_name}'")
520
- return previous_node
521
-
522
- else:
523
- logger.warning(f"Unsupported Workflow method '{method_name}' in variable '{var_name}'")
524
- return None
525
-
526
-
527
- def extract_workflow_from_file(file_path):
528
- """
529
- Extract a WorkflowDefinition and global variables from a Python file containing a workflow.
530
-
531
- Args:
532
- file_path (str): Path to the Python file to parse.
533
-
534
- Returns:
535
- tuple: (WorkflowDefinition, Dict[str, Any]) - The workflow definition and captured global variables.
536
- """
537
- with open(file_path) as f:
538
- source = f.read()
539
- tree = ast.parse(source)
540
-
541
- extractor = WorkflowExtractor()
542
- extractor.visit(tree)
543
-
544
- functions = {name: FunctionDefinition(**func) for name, func in extractor.functions.items()}
545
-
546
- nodes = {}
547
- from quantalogic.flow.flow_manager_schema import LLMConfig
548
-
549
- for name, node_info in extractor.nodes.items():
550
- if node_info["type"] == "function":
551
- nodes[name] = NodeDefinition(
552
- function=node_info["function"],
553
- inputs_mapping=node_info.get("inputs_mapping"),
554
- output=node_info["output"],
555
- retries=3,
556
- delay=1.0,
557
- timeout=None,
558
- parallel=False,
559
- )
560
- elif node_info["type"] == "llm":
561
- llm_config = LLMConfig(**node_info["llm_config"])
562
- nodes[name] = NodeDefinition(
563
- llm_config=llm_config,
564
- inputs_mapping=node_info.get("inputs_mapping"),
565
- output=node_info["output"],
566
- retries=3,
567
- delay=1.0,
568
- timeout=None,
569
- parallel=False,
570
- )
571
- elif node_info["type"] == "structured_llm":
572
- llm_config = LLMConfig(**node_info["llm_config"])
573
- nodes[name] = NodeDefinition(
574
- llm_config=llm_config,
575
- inputs_mapping=node_info.get("inputs_mapping"),
576
- output=node_info["output"],
577
- retries=3,
578
- delay=1.0,
579
- timeout=None,
580
- parallel=False,
581
- )
582
- elif node_info["type"] == "template":
583
- template_config = TemplateConfig(**node_info["template_config"])
584
- nodes[name] = NodeDefinition(
585
- template_config=template_config,
586
- inputs_mapping=node_info.get("inputs_mapping"),
587
- output=node_info["output"],
588
- retries=3,
589
- delay=1.0,
590
- timeout=None,
591
- parallel=False,
592
- )
593
- elif node_info["type"] == "sub_workflow":
594
- nodes[name] = NodeDefinition(
595
- sub_workflow=node_info["sub_workflow"],
596
- inputs_mapping=node_info.get("inputs_mapping"),
597
- output=node_info["output"],
598
- retries=3,
599
- delay=1.0,
600
- timeout=None,
601
- parallel=False,
602
- )
603
-
604
- workflow_structure = WorkflowStructure(
605
- start=extractor.start_node,
606
- transitions=extractor.transitions,
607
- convergence_nodes=extractor.convergence_nodes,
608
- )
609
-
610
- workflow_def = WorkflowDefinition(
611
- functions=functions,
612
- nodes=nodes,
613
- workflow=workflow_structure,
614
- observers=extractor.observers,
615
- )
616
-
617
- return workflow_def, extractor.global_vars
618
-
619
-
620
- def print_workflow_definition(workflow_def):
621
- """
622
- Utility function to print a WorkflowDefinition in a human-readable format.
623
-
624
- Args:
625
- workflow_def (WorkflowDefinition): The workflow definition to print.
626
- """
627
- print("### Workflow Definition ###")
628
- print("\n#### Functions:")
629
- for name, func in workflow_def.functions.items():
630
- print(f"- {name}:")
631
- print(f" Type: {func.type}")
632
- print(f" Code (first line): {func.code.splitlines()[0][:50]}..." if func.code else " Code: None")
633
-
634
- print("\n#### Nodes:")
635
- for name, node in workflow_def.nodes.items():
636
- print(f"- {name}:")
637
- if node.function:
638
- print(" Type: Function")
639
- print(f" Function: {node.function}")
640
- elif node.llm_config:
641
- if node.llm_config.response_model:
642
- print(" Type: Structured LLM")
643
- print(f" Response Model: {node.llm_config.response_model}")
644
- else:
645
- print(" Type: LLM")
646
- print(f" Model: {node.llm_config.model}")
647
- print(f" Prompt Template: {node.llm_config.prompt_template}")
648
- if node.llm_config.prompt_file:
649
- print(f" Prompt File: {node.llm_config.prompt_file}")
650
- elif node.template_config:
651
- print(" Type: Template")
652
- print(f" Template: {node.template_config.template}")
653
- if node.template_config.template_file:
654
- print(f" Template File: {node.template_config.template_file}")
655
- elif node.sub_workflow:
656
- print(" Type: Sub-Workflow")
657
- print(f" Start Node: {node.sub_workflow.start}")
658
- if node.inputs_mapping:
659
- print(f" Inputs Mapping: {node.inputs_mapping}")
660
- print(f" Output: {node.output or 'None'}")
661
-
662
- print("\n#### Workflow Structure:")
663
- print(f"Start Node: {workflow_def.workflow.start}")
664
- print("Transitions:")
665
- for trans in workflow_def.workflow.transitions:
666
- if isinstance(trans.to_node, list):
667
- if all(isinstance(tn, BranchCondition) for tn in trans.to_node):
668
- for branch in trans.to_node:
669
- cond_str = f" [Condition: {branch.condition}]" if branch.condition else ""
670
- print(f"- {trans.from_node} -> {branch.to_node}{cond_str}")
671
- else:
672
- print(f"- {trans.from_node} -> {trans.to_node} (parallel)")
673
- else:
674
- cond_str = f" [Condition: {trans.condition}]" if trans.condition else ""
675
- print(f"- {trans.from_node} -> {trans.to_node}{cond_str}")
676
- print("Convergence Nodes:")
677
- for conv_node in workflow_def.workflow.convergence_nodes:
678
- print(f"- {conv_node}")
679
-
680
- print("\n#### Observers:")
681
- for observer in workflow_def.observers:
682
- print(f"- {observer}")
683
-
684
-
685
- def main():
686
- """Demonstrate extracting a workflow from a Python file and saving it to YAML."""
687
- import argparse
688
- import sys
689
-
690
- parser = argparse.ArgumentParser(description='Extract workflow from a Python file')
691
- parser.add_argument('file_path', nargs='?', default="examples/flow/simple_story_generator/story_generator_agent.py",
692
- help='Path to the Python file containing the workflow')
693
- parser.add_argument('--output', '-o', default="./generated_workflow.py",
694
- help='Output path for the executable Python script')
695
- parser.add_argument('--yaml', '-y', default="workflow_definition.yaml",
696
- help='Output path for the YAML workflow definition')
697
-
698
- args = parser.parse_args()
699
- file_path = args.file_path
700
- output_file_python = args.output
701
- yaml_output_path = args.yaml
702
-
703
- if not os.path.exists(file_path):
704
- logger.error(f"File '{file_path}' not found. Please provide a valid file path.")
705
- logger.info("Example usage: python -m quantalogic.flow.flow_extractor path/to/your/workflow_file.py")
706
- sys.exit(1)
707
-
708
- try:
709
- workflow_def, global_vars = extract_workflow_from_file(file_path)
710
- logger.info(f"Successfully extracted workflow from '{file_path}'")
711
- print_workflow_definition(workflow_def)
712
- generate_executable_script(workflow_def, global_vars, output_file_python)
713
- logger.info(f"Executable script generated at '{output_file_python}'")
714
-
715
- manager = WorkflowManager(workflow_def)
716
- manager.save_to_yaml(yaml_output_path)
717
- logger.info(f"Workflow saved to YAML file '{yaml_output_path}'")
718
- except Exception as e:
719
- logger.error(f"Failed to parse or save workflow from '{file_path}': {e}")
720
-
721
-
722
- if __name__ == "__main__":
723
- main()