quantalogic 0.50.29__py3-none-any.whl → 0.52.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,13 +3,26 @@ Flow Package Initialization
3
3
 
4
4
  This module initializes the flow package and provides package-level imports.
5
5
  Now supports nested workflows for hierarchical flow definitions.
6
+
7
+ Key Visualization Utilities:
8
+ - generate_mermaid_diagram(): Convert workflow definitions to visual Mermaid flowcharts
9
+ - Supports pastel-colored node styling
10
+ - Generates interactive, readable workflow diagrams
11
+ - Handles complex workflows with multiple node types
12
+
13
+ - Generates descriptive labels
14
+ - Supports conditional node detection
6
15
  """
7
16
 
8
17
  from loguru import logger
9
18
 
10
19
  # Expose key components for easy importing
11
20
  from .flow import Nodes, Workflow, WorkflowEngine
21
+ from .flow_extractor import extract_workflow_from_file
22
+ from .flow_generator import generate_executable_script
12
23
  from .flow_manager import WorkflowManager
24
+ from .flow_mermaid import generate_mermaid_diagram
25
+ from .flow_validator import validate_workflow_definition
13
26
 
14
27
  # Define which symbols are exported when using `from flow import *`
15
28
  __all__ = [
@@ -17,6 +30,10 @@ __all__ = [
17
30
  "Nodes",
18
31
  "Workflow",
19
32
  "WorkflowEngine",
33
+ "generate_mermaid_diagram",
34
+ "extract_workflow_from_file",
35
+ "generate_executable_script",
36
+ "validate_workflow_definition"
20
37
  ]
21
38
 
22
39
  # Package-level logger configuration
quantalogic/flow/flow.py CHANGED
@@ -71,7 +71,7 @@ class WorkflowEngine:
71
71
  def __init__(self, workflow, parent_engine: Optional["WorkflowEngine"] = None):
72
72
  """Initialize the WorkflowEngine with a workflow and optional parent for sub-workflows."""
73
73
  self.workflow = workflow
74
- self.context = {}
74
+ self.context: Dict[str, Any] = {}
75
75
  self.observers: List[WorkflowObserver] = []
76
76
  self.parent_engine = parent_engine # Link to parent engine for sub-workflow observer propagation
77
77
 
@@ -302,7 +302,7 @@ class Workflow:
302
302
 
303
303
 
304
304
  class Nodes:
305
- NODE_REGISTRY = {} # Registry to hold node functions and metadata
305
+ NODE_REGISTRY: Dict[str, Tuple[Callable, List[str], Optional[str]]] = {} # Registry to hold node functions and metadata
306
306
 
307
307
  @classmethod
308
308
  def define(cls, output: Optional[str] = None):
@@ -485,8 +485,8 @@ async def example_workflow():
485
485
  # Define Pydantic model for structured output
486
486
  class OrderDetails(BaseModel):
487
487
  order_id: str
488
- items: List[str]
489
- in_stock: bool
488
+ items_in_stock: List[str]
489
+ items_out_of_stock: List[str]
490
490
 
491
491
  # Define an example observer for progress
492
492
  async def progress_monitor(event: WorkflowEvent):
@@ -533,7 +533,9 @@ async def example_workflow():
533
533
  output="inventory_status",
534
534
  )
535
535
  async def check_inventory(items: List[str]) -> OrderDetails:
536
- pass
536
+ # This is a placeholder function that would normally call an LLM
537
+ # The actual implementation is handled by the structured_llm_node decorator
538
+ return OrderDetails(order_id="123", items_in_stock=["item1"], items_out_of_stock=[])
537
539
 
538
540
  @Nodes.define(output="payment_status")
539
541
  async def process_payment(order: Dict[str, Any]) -> str:
@@ -572,11 +574,11 @@ async def example_workflow():
572
574
  .sequence("validate_order", "check_inventory")
573
575
  .then(
574
576
  "payment_shipping",
575
- condition=lambda ctx: ctx.get("inventory_status").in_stock if ctx.get("inventory_status") else False,
577
+ condition=lambda ctx: len(ctx.get("inventory_status").items_out_of_stock) == 0 if ctx.get("inventory_status") else False,
576
578
  )
577
579
  .then(
578
580
  "notify_customer_out_of_stock",
579
- condition=lambda ctx: not ctx.get("inventory_status").in_stock if ctx.get("inventory_status") else True,
581
+ condition=lambda ctx: len(ctx.get("inventory_status").items_out_of_stock) > 0 if ctx.get("inventory_status") else True,
580
582
  )
581
583
  .parallel("update_order_status", "send_confirmation_email")
582
584
  .node("update_order_status")
@@ -3,6 +3,7 @@ import os
3
3
 
4
4
  from loguru import logger
5
5
 
6
+ from quantalogic.flow.flow_generator import generate_executable_script # Import from flow_generator
6
7
  from quantalogic.flow.flow_manager import WorkflowManager # Added for YAML saving
7
8
  from quantalogic.flow.flow_manager_schema import (
8
9
  FunctionDefinition,
@@ -396,7 +397,7 @@ class WorkflowExtractor(ast.NodeVisitor):
396
397
  "sub_workflow": WorkflowStructure(
397
398
  start=sub_extractor.start_node,
398
399
  transitions=[
399
- TransitionDefinition(from_=t[0], to=t[1], condition=t[2]) for t in sub_extractor.transitions
400
+ TransitionDefinition(from_node=t[0], to_node=t[1], condition=t[2]) for t in sub_extractor.transitions
400
401
  ],
401
402
  ),
402
403
  "inputs": list(inputs.keys()),
@@ -494,7 +495,7 @@ def extract_workflow_from_file(file_path):
494
495
 
495
496
  # Construct TransitionDefinition objects
496
497
  transitions = [
497
- TransitionDefinition(**{"from": from_node, "to": to_node, "condition": cond})
498
+ TransitionDefinition(from_node=from_node, to_node=to_node, condition=cond)
498
499
  for from_node, to_node, cond in extractor.transitions
499
500
  ]
500
501
 
@@ -509,92 +510,7 @@ def extract_workflow_from_file(file_path):
509
510
  return workflow_def, extractor.global_vars
510
511
 
511
512
 
512
- def generate_executable_script(workflow_def: WorkflowDefinition, global_vars: dict, output_file: str) -> None:
513
- """
514
- Generate an executable Python script from a WorkflowDefinition with global variables.
515
-
516
- Args:
517
- workflow_def: The WorkflowDefinition object containing the workflow details.
518
- global_vars: Dictionary of global variables extracted from the source file.
519
- output_file: The path where the executable script will be written.
520
-
521
- The generated script includes:
522
- - A shebang using `uv run` for environment management.
523
- - Metadata specifying the required Python version and dependencies.
524
- - Global variables from the original script.
525
- - Embedded functions included directly in the script.
526
- - Workflow instantiation using direct chaining syntax.
527
- - A default initial_context matching the example.
528
- """
529
- with open(output_file, "w") as f:
530
- # Write the shebang and metadata
531
- f.write("#!/usr/bin/env -S uv run\n")
532
- f.write("# /// script\n")
533
- f.write('# requires-python = ">=3.12"\n')
534
- f.write("# dependencies = [\n")
535
- f.write('# "loguru",\n')
536
- f.write('# "litellm",\n')
537
- f.write('# "pydantic>=2.0",\n')
538
- f.write('# "anyio",\n')
539
- f.write('# "quantalogic>=0.35",\n')
540
- f.write('# "jinja2",\n')
541
- f.write('# "instructor[litellm]",\n') # Kept for potential structured LLM support
542
- f.write("# ]\n")
543
- f.write("# ///\n\n")
544
-
545
- # Write necessary imports
546
- f.write("import anyio\n")
547
- f.write("from typing import List\n")
548
- f.write("from loguru import logger\n")
549
- f.write("from quantalogic.flow import Nodes, Workflow\n\n")
550
-
551
- # Write global variables
552
- for var_name, value in global_vars.items():
553
- f.write(f"{var_name} = {repr(value)}\n")
554
- f.write("\n")
555
-
556
- # Embed functions from workflow_def
557
- for func_name, func_def in workflow_def.functions.items():
558
- if func_def.type == "embedded":
559
- f.write(func_def.code + "\n\n")
560
-
561
- # Define workflow using chaining syntax
562
- f.write("# Define the workflow using simplified syntax with automatic node registration\n")
563
- f.write("workflow = (\n")
564
- f.write(f' Workflow("{workflow_def.workflow.start}")\n')
565
- for trans in workflow_def.workflow.transitions:
566
- from_node = trans.from_
567
- to_node = trans.to
568
- condition = trans.condition or "None"
569
- if condition != "None":
570
- # Ensure condition is formatted as a lambda if not already
571
- if not condition.startswith("lambda ctx:"):
572
- condition = f"lambda ctx: {condition}"
573
- f.write(f' .then("{to_node}", condition={condition})\n')
574
- for observer in workflow_def.observers:
575
- f.write(f" .add_observer({observer})\n")
576
- f.write(")\n\n")
577
-
578
- # Main asynchronous function to run the workflow
579
- f.write("async def main():\n")
580
- f.write(' """Main function to run the story generation workflow."""\n')
581
- f.write(" initial_context = {\n")
582
- f.write(' "genre": "science fiction",\n')
583
- f.write(' "num_chapters": 3,\n')
584
- f.write(' "chapters": [],\n')
585
- f.write(' "completed_chapters": 0,\n')
586
- f.write(' "style": "descriptive"\n')
587
- f.write(" } # Customize initial_context as needed\n")
588
- f.write(" engine = workflow.build()\n")
589
- f.write(" result = await engine.run(initial_context)\n")
590
- f.write(' logger.info(f"Workflow result: {result}")\n\n')
591
-
592
- # Entry point to execute the main function
593
- f.write('if __name__ == "__main__":\n')
594
- f.write(" anyio.run(main)\n")
595
-
596
- # Set executable permissions (rwxr-xr-x)
597
- os.chmod(output_file, 0o755)
513
+ # The generate_executable_script function has been moved to flow_generator.py
598
514
 
599
515
 
600
516
  def print_workflow_definition(workflow_def):
@@ -635,11 +551,11 @@ def print_workflow_definition(workflow_def):
635
551
  print("Transitions:")
636
552
  for trans in workflow_def.workflow.transitions:
637
553
  condition_str = f" [Condition: {trans.condition}]" if trans.condition else ""
638
- if isinstance(trans.to, list):
639
- for to_node in trans.to:
640
- print(f"- {trans.from_} -> {to_node}{condition_str}")
554
+ if isinstance(trans.to_node, list):
555
+ for to_node in trans.to_node:
556
+ print(f"- {trans.from_node} -> {to_node}{condition_str}")
641
557
  else:
642
- print(f"- {trans.from_} -> {trans.to}{condition_str}")
558
+ print(f"- {trans.from_node} -> {trans.to_node}{condition_str}")
643
559
 
644
560
  print("\n#### Observers:")
645
561
  for observer in workflow_def.observers:
@@ -647,23 +563,39 @@ def print_workflow_definition(workflow_def):
647
563
 
648
564
 
649
565
  def main():
650
- """Demonstrate parsing the story_generator_agent.py workflow and saving it to YAML."""
651
- from quantalogic.flow.flow_generator import generate_executable_script # Ensure correct import
652
-
653
- output_file_python = "./story_generator.py"
654
- file_path = "examples/qflow/story_generator_agent.py"
655
- yaml_output_path = "story_generator_workflow.yaml" # Output YAML file path
566
+ """Demonstrate extracting a workflow from a Python file and saving it to YAML."""
567
+ import argparse
568
+ import sys
569
+
570
+ parser = argparse.ArgumentParser(description='Extract workflow from a Python file')
571
+ parser.add_argument('file_path', nargs='?', default="examples/qflow/story_generator_agent.py",
572
+ help='Path to the Python file containing the workflow')
573
+ parser.add_argument('--output', '-o', default="./generated_workflow.py",
574
+ help='Output path for the executable Python script')
575
+ parser.add_argument('--yaml', '-y', default="workflow_definition.yaml",
576
+ help='Output path for the YAML workflow definition')
577
+
578
+ args = parser.parse_args()
579
+ file_path = args.file_path
580
+ output_file_python = args.output
581
+ yaml_output_path = args.yaml
582
+
583
+ if not os.path.exists(file_path):
584
+ logger.error(f"File '{file_path}' not found. Please provide a valid file path.")
585
+ logger.info("Example usage: python -m quantalogic.flow.flow_extractor path/to/your/workflow_file.py")
586
+ sys.exit(1)
587
+
656
588
  try:
657
589
  workflow_def, global_vars = extract_workflow_from_file(file_path)
658
590
  logger.info(f"Successfully extracted workflow from '{file_path}'")
659
591
  print_workflow_definition(workflow_def)
660
592
  generate_executable_script(workflow_def, global_vars, output_file_python)
593
+ logger.info(f"Executable script generated at '{output_file_python}'")
594
+
661
595
  # Save the workflow to a YAML file
662
596
  manager = WorkflowManager(workflow_def)
663
597
  manager.save_to_yaml(yaml_output_path)
664
598
  logger.info(f"Workflow saved to YAML file '{yaml_output_path}'")
665
- except FileNotFoundError:
666
- logger.error(f"File '{file_path}' not found. Please ensure it exists in the specified directory.")
667
599
  except Exception as e:
668
600
  logger.error(f"Failed to parse or save workflow from '{file_path}': {e}")
669
601
 
@@ -50,21 +50,28 @@ def generate_executable_script(workflow_def: WorkflowDefinition, global_vars: di
50
50
  # Embed functions from workflow_def
51
51
  for func_name, func_def in workflow_def.functions.items():
52
52
  if func_def.type == "embedded":
53
- f.write(func_def.code + "\n\n")
53
+ if func_def.code is not None:
54
+ f.write(func_def.code + "\n\n")
55
+ else:
56
+ f.write("\n\n")
54
57
 
55
58
  # Define workflow using chaining syntax
56
59
  f.write("# Define the workflow using simplified syntax with automatic node registration\n")
57
60
  f.write("workflow = (\n")
58
61
  f.write(f' Workflow("{workflow_def.workflow.start}")\n')
59
62
  for trans in workflow_def.workflow.transitions:
60
- from_node = trans.from_
61
- to_node = trans.to
63
+ _from_node = trans.from_node
64
+ to_node = trans.to_node
62
65
  condition = trans.condition or "None"
63
66
  if condition != "None":
64
67
  # Ensure condition is formatted as a lambda if not already
65
68
  if not condition.startswith("lambda ctx:"):
66
69
  condition = f"lambda ctx: {condition}"
67
70
  f.write(f' .then("{to_node}", condition={condition})\n')
71
+ # Add observers if any exist in the workflow definition
72
+ if hasattr(workflow_def, 'observers'):
73
+ for observer in workflow_def.observers:
74
+ f.write(f" .add_observer({observer})\n")
68
75
  f.write(")\n\n")
69
76
 
70
77
  # Main asynchronous function to run the workflow
@@ -8,7 +8,7 @@ import urllib
8
8
  from pathlib import Path
9
9
  from typing import Any, Callable, Dict, List, Optional, Type, Union
10
10
 
11
- import yaml
11
+ import yaml # type: ignore
12
12
  from loguru import logger
13
13
  from pydantic import BaseModel, ValidationError
14
14
 
@@ -16,6 +16,7 @@ from pydantic import BaseModel, ValidationError
16
16
  from quantalogic.flow.flow import Nodes, Workflow
17
17
  from quantalogic.flow.flow_manager_schema import (
18
18
  FunctionDefinition,
19
+ LLMConfig,
19
20
  NodeDefinition,
20
21
  TransitionDefinition,
21
22
  WorkflowDefinition,
@@ -41,10 +42,13 @@ class WorkflowManager:
41
42
  parallel: bool = False,
42
43
  ) -> None:
43
44
  """Add a new node to the workflow definition, supporting sub-workflows and LLM nodes."""
45
+ # Convert dict to LLMConfig if provided
46
+ llm_config_obj = LLMConfig(**llm_config) if llm_config is not None else None
47
+
44
48
  node = NodeDefinition(
45
49
  function=function,
46
50
  sub_workflow=sub_workflow,
47
- llm_config=llm_config,
51
+ llm_config=llm_config_obj,
48
52
  output=output or (f"{name}_result" if function or llm_config else None),
49
53
  retries=retries,
50
54
  delay=delay,
@@ -61,7 +65,7 @@ class WorkflowManager:
61
65
  self.workflow.workflow.transitions = [
62
66
  t
63
67
  for t in self.workflow.workflow.transitions
64
- if t.from_ != name and (isinstance(t.to, str) or name not in t.to)
68
+ if t.from_node != name and (isinstance(t.to_node, str) or name not in t.to_node)
65
69
  ]
66
70
  if self.workflow.workflow.start == name:
67
71
  self.workflow.workflow.start = None
@@ -95,22 +99,36 @@ class WorkflowManager:
95
99
 
96
100
  def add_transition(
97
101
  self,
98
- from_: str,
99
- to: Union[str, List[str]],
102
+ from_node: str,
103
+ to_node: Union[str, List[str]],
100
104
  condition: Optional[str] = None,
105
+ strict: bool = True,
101
106
  ) -> None:
102
- """Add a transition between nodes, ensuring all nodes exist."""
103
- if from_ not in self.workflow.nodes:
104
- raise ValueError(f"Source node '{from_}' does not exist")
105
- if isinstance(to, str):
106
- if to not in self.workflow.nodes:
107
- raise ValueError(f"Target node '{to}' does not exist")
108
- else:
109
- for t in to:
110
- if t not in self.workflow.nodes:
111
- raise ValueError(f"Target node '{t}' does not exist")
112
- # Use 'from' field name instead of the alias 'from_'
113
- transition = TransitionDefinition(**{"from": from_, "to": to, "condition": condition})
107
+ """Add a transition between nodes.
108
+
109
+ Args:
110
+ from_node: Source node name
111
+ to_node: Target node name or list of target node names
112
+ condition: Optional condition for the transition
113
+ strict: If True, validates that all nodes exist before adding the transition.
114
+ If False, allows adding transitions to non-existent nodes.
115
+ """
116
+ if strict:
117
+ if from_node not in self.workflow.nodes:
118
+ raise ValueError(f"Source node '{from_node}' does not exist")
119
+ if isinstance(to_node, str):
120
+ if to_node not in self.workflow.nodes:
121
+ raise ValueError(f"Target node '{to_node}' does not exist")
122
+ else:
123
+ for t in to_node:
124
+ if t not in self.workflow.nodes:
125
+ raise ValueError(f"Target node '{t}' does not exist")
126
+ # Create TransitionDefinition with named parameters
127
+ transition = TransitionDefinition(
128
+ from_node=from_node,
129
+ to_node=to_node,
130
+ condition=condition
131
+ )
114
132
  self.workflow.workflow.transitions.append(transition)
115
133
 
116
134
  def set_start_node(self, name: str) -> None:
@@ -174,8 +192,12 @@ class WorkflowManager:
174
192
  temp_path = temp_file.name
175
193
  module_name = f"temp_module_{hash(temp_path)}"
176
194
  spec = importlib.util.spec_from_file_location(module_name, temp_path)
195
+ if spec is None:
196
+ raise ValueError(f"Failed to create module spec from {temp_path}")
177
197
  module = importlib.util.module_from_spec(spec)
178
198
  sys.modules[module_name] = module
199
+ if spec.loader is None:
200
+ raise ValueError(f"Module spec has no loader for {temp_path}")
179
201
  spec.loader.exec_module(module)
180
202
  os.remove(temp_path)
181
203
  return module
@@ -186,8 +208,12 @@ class WorkflowManager:
186
208
  try:
187
209
  module_name = f"local_module_{hash(source)}"
188
210
  spec = importlib.util.spec_from_file_location(module_name, source)
211
+ if spec is None:
212
+ raise ValueError(f"Failed to create module spec from {source}")
189
213
  module = importlib.util.module_from_spec(spec)
190
214
  sys.modules[module_name] = module
215
+ if spec.loader is None:
216
+ raise ValueError(f"Module spec has no loader for {source}")
191
217
  spec.loader.exec_module(module)
192
218
  return module
193
219
  except Exception as e:
@@ -209,21 +235,40 @@ class WorkflowManager:
209
235
  functions: Dict[str, Callable] = {}
210
236
  for func_name, func_def in self.workflow.functions.items():
211
237
  if func_def.type == "embedded":
212
- local_scope = {}
213
- exec(func_def.code, local_scope)
214
- if func_name not in local_scope:
215
- raise ValueError(f"Embedded function '{func_name}' not defined in code")
216
- functions[func_name] = local_scope[func_name]
238
+ local_scope: Dict[str, Any] = {}
239
+ if func_def.code is not None:
240
+ exec(func_def.code, local_scope)
241
+ if func_name not in local_scope:
242
+ raise ValueError(f"Embedded function '{func_name}' not defined in code")
243
+ functions[func_name] = local_scope[func_name]
244
+ else:
245
+ raise ValueError(f"Embedded function '{func_name}' has no code")
217
246
  elif func_def.type == "external":
218
247
  try:
248
+ if func_def.module is None:
249
+ raise ValueError(f"External function '{func_name}' has no module specified")
219
250
  module = self.import_module_from_source(func_def.module)
251
+ if func_def.function is None:
252
+ raise ValueError(f"External function '{func_name}' has no function name specified")
220
253
  functions[func_name] = getattr(module, func_def.function)
221
254
  except (ImportError, AttributeError) as e:
222
255
  raise ValueError(f"Failed to import external function '{func_name}': {e}")
223
256
 
257
+ # Check if start node is set
224
258
  if not self.workflow.workflow.start:
225
259
  raise ValueError("Start node not set in workflow definition")
226
- wf = Workflow(start_node=self.workflow.workflow.start)
260
+
261
+ # We need to ensure we have a valid string for the start node
262
+ # First check if it's None and provide a fallback
263
+ if self.workflow.workflow.start is None:
264
+ logger.warning("Start node was None, using 'start' as default")
265
+ start_node_name = "start"
266
+ else:
267
+ # Otherwise convert to string
268
+ start_node_name = str(self.workflow.workflow.start)
269
+
270
+ # Create the workflow with a valid start node
271
+ wf = Workflow(start_node=start_node_name)
227
272
 
228
273
  # Register observers
229
274
  for observer_name in self.workflow.observers:
@@ -235,12 +280,18 @@ class WorkflowManager:
235
280
  sub_workflows: Dict[str, Workflow] = {}
236
281
  for node_name, node_def in self.workflow.nodes.items():
237
282
  if node_def.sub_workflow:
238
- sub_wf = Workflow(node_def.sub_workflow.start)
283
+ # Ensure we have a valid start node for the sub-workflow
284
+ if node_def.sub_workflow.start is None:
285
+ logger.warning(f"Sub-workflow for node '{node_name}' has no start node, using '{node_name}_start' as default")
286
+ start_node = f"{node_name}_start"
287
+ else:
288
+ start_node = str(node_def.sub_workflow.start)
289
+ sub_wf = Workflow(start_node=start_node)
239
290
  sub_workflows[node_name] = sub_wf
240
291
  added_sub_nodes = set()
241
292
  for trans in node_def.sub_workflow.transitions:
242
- from_node = trans.from_
243
- to_nodes = [trans.to] if isinstance(trans.to, str) else trans.to
293
+ from_node = trans.from_node
294
+ to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
244
295
  if from_node not in added_sub_nodes:
245
296
  sub_wf.node(from_node)
246
297
  added_sub_nodes.add(from_node)
@@ -254,7 +305,9 @@ class WorkflowManager:
254
305
  else:
255
306
  sub_wf.then(to_nodes[0], condition=condition)
256
307
  inputs = list(Nodes.NODE_REGISTRY[sub_wf.start_node][1])
257
- wf.add_sub_workflow(node_name, sub_wf, inputs={k: k for k in inputs}, output=node_def.output)
308
+ # Ensure output is a string
309
+ output = node_def.output if node_def.output is not None else f"{node_name}_result"
310
+ wf.add_sub_workflow(node_name, sub_wf, inputs={k: k for k in inputs}, output=output)
258
311
  elif node_def.function:
259
312
  if node_def.function not in functions:
260
313
  raise ValueError(f"Function '{node_def.function}' for node '{node_name}' not found")
@@ -265,13 +318,15 @@ class WorkflowManager:
265
318
  elif node_def.llm_config:
266
319
  llm_config = node_def.llm_config
267
320
  # Extract inputs from prompt_template using regex
268
- inputs = set(re.findall(r"{{\s*([^}]+?)\s*}}", llm_config.prompt_template))
321
+ # Extract inputs from prompt_template using regex
322
+ input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", llm_config.prompt_template))
269
323
  cleaned_inputs = set()
270
- for input_var in inputs:
324
+ for input_var in input_vars:
271
325
  base_var = re.split(r"\s*[\+\-\*/]\s*", input_var.strip())[0].strip()
272
326
  if base_var.isidentifier():
273
327
  cleaned_inputs.add(base_var)
274
- inputs_list = list(cleaned_inputs)
328
+ # Convert set to list for type compatibility
329
+ inputs_list: List[str] = list(cleaned_inputs)
275
330
 
276
331
  # Define a dummy function to be decorated
277
332
  async def dummy_func(**kwargs):
@@ -316,8 +371,8 @@ class WorkflowManager:
316
371
 
317
372
  added_nodes = set()
318
373
  for trans in self.workflow.workflow.transitions:
319
- from_node = trans.from_
320
- to_nodes = [trans.to] if isinstance(trans.to, str) else trans.to
374
+ from_node = trans.from_node
375
+ to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
321
376
  if from_node not in added_nodes and from_node not in sub_workflows:
322
377
  wf.node(from_node)
323
378
  added_nodes.add(from_node)
@@ -395,7 +450,7 @@ def main():
395
450
  manager.add_node(name="start", function="greet")
396
451
  manager.add_node(name="end", function="farewell")
397
452
  manager.set_start_node("start")
398
- manager.add_transition(from_="start", to="end")
453
+ manager.add_transition(from_node="start", to_node="end")
399
454
  manager.add_observer("monitor") # Add the observer
400
455
  manager.save_to_yaml("workflow.yaml")
401
456
  new_manager = WorkflowManager()
@@ -128,12 +128,11 @@ class NodeDefinition(BaseModel):
128
128
  class TransitionDefinition(BaseModel):
129
129
  """Definition of a transition between nodes."""
130
130
 
131
- from_: str = Field(
131
+ from_node: str = Field(
132
132
  ...,
133
133
  description="Source node name for the transition.",
134
- alias="from", # Supports YAML aliasing
135
134
  )
136
- to: Union[str, List[str]] = Field(
135
+ to_node: Union[str, List[str]] = Field(
137
136
  ..., description="Target node(s). A string for sequential, a list for parallel execution."
138
137
  )
139
138
  condition: Optional[str] = Field(
@@ -158,7 +157,7 @@ class WorkflowDefinition(BaseModel):
158
157
  )
159
158
  nodes: Dict[str, NodeDefinition] = Field(default_factory=dict, description="Dictionary of node definitions.")
160
159
  workflow: WorkflowStructure = Field(
161
- default_factory=WorkflowStructure, description="Main workflow structure with start node and transitions."
160
+ default_factory=lambda: WorkflowStructure(start=None), description="Main workflow structure with start node and transitions."
162
161
  )
163
162
  observers: List[str] = Field(
164
163
  default_factory=list, description="List of observer function names to monitor workflow execution."