quantalogic 0.51.0__py3-none-any.whl → 0.52.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/flow/__init__.py +17 -0
- quantalogic/flow/flow_extractor.py +32 -103
- quantalogic/flow/flow_generator.py +6 -2
- quantalogic/flow/flow_manager.py +33 -24
- quantalogic/flow/flow_manager_schema.py +2 -3
- quantalogic/flow/flow_mermaid.py +240 -0
- quantalogic/flow/flow_validator.py +335 -0
- quantalogic/flow/flow_yaml.md +313 -329
- quantalogic/tools/__init__.py +3 -2
- quantalogic/tools/tool.py +129 -3
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.0.dist-info}/METADATA +89 -2
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.0.dist-info}/RECORD +15 -13
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.0.dist-info}/LICENSE +0 -0
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.0.dist-info}/WHEEL +0 -0
- {quantalogic-0.51.0.dist-info → quantalogic-0.52.0.dist-info}/entry_points.txt +0 -0
quantalogic/flow/__init__.py
CHANGED
@@ -3,13 +3,26 @@ Flow Package Initialization
|
|
3
3
|
|
4
4
|
This module initializes the flow package and provides package-level imports.
|
5
5
|
Now supports nested workflows for hierarchical flow definitions.
|
6
|
+
|
7
|
+
Key Visualization Utilities:
|
8
|
+
- generate_mermaid_diagram(): Convert workflow definitions to visual Mermaid flowcharts
|
9
|
+
- Supports pastel-colored node styling
|
10
|
+
- Generates interactive, readable workflow diagrams
|
11
|
+
- Handles complex workflows with multiple node types
|
12
|
+
|
13
|
+
- Generates descriptive labels
|
14
|
+
- Supports conditional node detection
|
6
15
|
"""
|
7
16
|
|
8
17
|
from loguru import logger
|
9
18
|
|
10
19
|
# Expose key components for easy importing
|
11
20
|
from .flow import Nodes, Workflow, WorkflowEngine
|
21
|
+
from .flow_extractor import extract_workflow_from_file
|
22
|
+
from .flow_generator import generate_executable_script
|
12
23
|
from .flow_manager import WorkflowManager
|
24
|
+
from .flow_mermaid import generate_mermaid_diagram
|
25
|
+
from .flow_validator import validate_workflow_definition
|
13
26
|
|
14
27
|
# Define which symbols are exported when using `from flow import *`
|
15
28
|
__all__ = [
|
@@ -17,6 +30,10 @@ __all__ = [
|
|
17
30
|
"Nodes",
|
18
31
|
"Workflow",
|
19
32
|
"WorkflowEngine",
|
33
|
+
"generate_mermaid_diagram",
|
34
|
+
"extract_workflow_from_file",
|
35
|
+
"generate_executable_script",
|
36
|
+
"validate_workflow_definition"
|
20
37
|
]
|
21
38
|
|
22
39
|
# Package-level logger configuration
|
@@ -3,6 +3,7 @@ import os
|
|
3
3
|
|
4
4
|
from loguru import logger
|
5
5
|
|
6
|
+
from quantalogic.flow.flow_generator import generate_executable_script # Import from flow_generator
|
6
7
|
from quantalogic.flow.flow_manager import WorkflowManager # Added for YAML saving
|
7
8
|
from quantalogic.flow.flow_manager_schema import (
|
8
9
|
FunctionDefinition,
|
@@ -396,7 +397,7 @@ class WorkflowExtractor(ast.NodeVisitor):
|
|
396
397
|
"sub_workflow": WorkflowStructure(
|
397
398
|
start=sub_extractor.start_node,
|
398
399
|
transitions=[
|
399
|
-
TransitionDefinition(
|
400
|
+
TransitionDefinition(from_node=t[0], to_node=t[1], condition=t[2]) for t in sub_extractor.transitions
|
400
401
|
],
|
401
402
|
),
|
402
403
|
"inputs": list(inputs.keys()),
|
@@ -494,7 +495,7 @@ def extract_workflow_from_file(file_path):
|
|
494
495
|
|
495
496
|
# Construct TransitionDefinition objects
|
496
497
|
transitions = [
|
497
|
-
TransitionDefinition(
|
498
|
+
TransitionDefinition(from_node=from_node, to_node=to_node, condition=cond)
|
498
499
|
for from_node, to_node, cond in extractor.transitions
|
499
500
|
]
|
500
501
|
|
@@ -509,95 +510,7 @@ def extract_workflow_from_file(file_path):
|
|
509
510
|
return workflow_def, extractor.global_vars
|
510
511
|
|
511
512
|
|
512
|
-
|
513
|
-
"""
|
514
|
-
Generate an executable Python script from a WorkflowDefinition with global variables.
|
515
|
-
|
516
|
-
Args:
|
517
|
-
workflow_def: The WorkflowDefinition object containing the workflow details.
|
518
|
-
global_vars: Dictionary of global variables extracted from the source file.
|
519
|
-
output_file: The path where the executable script will be written.
|
520
|
-
|
521
|
-
The generated script includes:
|
522
|
-
- A shebang using `uv run` for environment management.
|
523
|
-
- Metadata specifying the required Python version and dependencies.
|
524
|
-
- Global variables from the original script.
|
525
|
-
- Embedded functions included directly in the script.
|
526
|
-
- Workflow instantiation using direct chaining syntax.
|
527
|
-
- A default initial_context matching the example.
|
528
|
-
"""
|
529
|
-
with open(output_file, "w") as f:
|
530
|
-
# Write the shebang and metadata
|
531
|
-
f.write("#!/usr/bin/env -S uv run\n")
|
532
|
-
f.write("# /// script\n")
|
533
|
-
f.write('# requires-python = ">=3.12"\n')
|
534
|
-
f.write("# dependencies = [\n")
|
535
|
-
f.write('# "loguru",\n')
|
536
|
-
f.write('# "litellm",\n')
|
537
|
-
f.write('# "pydantic>=2.0",\n')
|
538
|
-
f.write('# "anyio",\n')
|
539
|
-
f.write('# "quantalogic>=0.35",\n')
|
540
|
-
f.write('# "jinja2",\n')
|
541
|
-
f.write('# "instructor[litellm]",\n') # Kept for potential structured LLM support
|
542
|
-
f.write("# ]\n")
|
543
|
-
f.write("# ///\n\n")
|
544
|
-
|
545
|
-
# Write necessary imports
|
546
|
-
f.write("import anyio\n")
|
547
|
-
f.write("from typing import List\n")
|
548
|
-
f.write("from loguru import logger\n")
|
549
|
-
f.write("from quantalogic.flow import Nodes, Workflow\n\n")
|
550
|
-
|
551
|
-
# Write global variables
|
552
|
-
for var_name, value in global_vars.items():
|
553
|
-
f.write(f"{var_name} = {repr(value)}\n")
|
554
|
-
f.write("\n")
|
555
|
-
|
556
|
-
# Embed functions from workflow_def
|
557
|
-
for func_name, func_def in workflow_def.functions.items():
|
558
|
-
if func_def.type == "embedded":
|
559
|
-
if func_def.code is not None:
|
560
|
-
f.write(func_def.code + "\n\n")
|
561
|
-
else:
|
562
|
-
f.write("\n\n")
|
563
|
-
|
564
|
-
# Define workflow using chaining syntax
|
565
|
-
f.write("# Define the workflow using simplified syntax with automatic node registration\n")
|
566
|
-
f.write("workflow = (\n")
|
567
|
-
f.write(f' Workflow("{workflow_def.workflow.start}")\n')
|
568
|
-
for trans in workflow_def.workflow.transitions:
|
569
|
-
_from_node = trans.from_
|
570
|
-
to_node = trans.to
|
571
|
-
condition = trans.condition or "None"
|
572
|
-
if condition != "None":
|
573
|
-
# Ensure condition is formatted as a lambda if not already
|
574
|
-
if not condition.startswith("lambda ctx:"):
|
575
|
-
condition = f"lambda ctx: {condition}"
|
576
|
-
f.write(f' .then("{to_node}", condition={condition})\n')
|
577
|
-
for observer in workflow_def.observers:
|
578
|
-
f.write(f" .add_observer({observer})\n")
|
579
|
-
f.write(")\n\n")
|
580
|
-
|
581
|
-
# Main asynchronous function to run the workflow
|
582
|
-
f.write("async def main():\n")
|
583
|
-
f.write(' """Main function to run the story generation workflow."""\n')
|
584
|
-
f.write(" initial_context = {\n")
|
585
|
-
f.write(' "genre": "science fiction",\n')
|
586
|
-
f.write(' "num_chapters": 3,\n')
|
587
|
-
f.write(' "chapters": [],\n')
|
588
|
-
f.write(' "completed_chapters": 0,\n')
|
589
|
-
f.write(' "style": "descriptive"\n')
|
590
|
-
f.write(" } # Customize initial_context as needed\n")
|
591
|
-
f.write(" engine = workflow.build()\n")
|
592
|
-
f.write(" result = await engine.run(initial_context)\n")
|
593
|
-
f.write(' logger.info(f"Workflow result: {result}")\n\n')
|
594
|
-
|
595
|
-
# Entry point to execute the main function
|
596
|
-
f.write('if __name__ == "__main__":\n')
|
597
|
-
f.write(" anyio.run(main)\n")
|
598
|
-
|
599
|
-
# Set executable permissions (rwxr-xr-x)
|
600
|
-
os.chmod(output_file, 0o755)
|
513
|
+
# The generate_executable_script function has been moved to flow_generator.py
|
601
514
|
|
602
515
|
|
603
516
|
def print_workflow_definition(workflow_def):
|
@@ -638,11 +551,11 @@ def print_workflow_definition(workflow_def):
|
|
638
551
|
print("Transitions:")
|
639
552
|
for trans in workflow_def.workflow.transitions:
|
640
553
|
condition_str = f" [Condition: {trans.condition}]" if trans.condition else ""
|
641
|
-
if isinstance(trans.
|
642
|
-
for to_node in trans.
|
643
|
-
print(f"- {trans.
|
554
|
+
if isinstance(trans.to_node, list):
|
555
|
+
for to_node in trans.to_node:
|
556
|
+
print(f"- {trans.from_node} -> {to_node}{condition_str}")
|
644
557
|
else:
|
645
|
-
print(f"- {trans.
|
558
|
+
print(f"- {trans.from_node} -> {trans.to_node}{condition_str}")
|
646
559
|
|
647
560
|
print("\n#### Observers:")
|
648
561
|
for observer in workflow_def.observers:
|
@@ -650,23 +563,39 @@ def print_workflow_definition(workflow_def):
|
|
650
563
|
|
651
564
|
|
652
565
|
def main():
|
653
|
-
"""Demonstrate
|
654
|
-
|
655
|
-
|
656
|
-
|
657
|
-
|
658
|
-
|
566
|
+
"""Demonstrate extracting a workflow from a Python file and saving it to YAML."""
|
567
|
+
import argparse
|
568
|
+
import sys
|
569
|
+
|
570
|
+
parser = argparse.ArgumentParser(description='Extract workflow from a Python file')
|
571
|
+
parser.add_argument('file_path', nargs='?', default="examples/qflow/story_generator_agent.py",
|
572
|
+
help='Path to the Python file containing the workflow')
|
573
|
+
parser.add_argument('--output', '-o', default="./generated_workflow.py",
|
574
|
+
help='Output path for the executable Python script')
|
575
|
+
parser.add_argument('--yaml', '-y', default="workflow_definition.yaml",
|
576
|
+
help='Output path for the YAML workflow definition')
|
577
|
+
|
578
|
+
args = parser.parse_args()
|
579
|
+
file_path = args.file_path
|
580
|
+
output_file_python = args.output
|
581
|
+
yaml_output_path = args.yaml
|
582
|
+
|
583
|
+
if not os.path.exists(file_path):
|
584
|
+
logger.error(f"File '{file_path}' not found. Please provide a valid file path.")
|
585
|
+
logger.info("Example usage: python -m quantalogic.flow.flow_extractor path/to/your/workflow_file.py")
|
586
|
+
sys.exit(1)
|
587
|
+
|
659
588
|
try:
|
660
589
|
workflow_def, global_vars = extract_workflow_from_file(file_path)
|
661
590
|
logger.info(f"Successfully extracted workflow from '{file_path}'")
|
662
591
|
print_workflow_definition(workflow_def)
|
663
592
|
generate_executable_script(workflow_def, global_vars, output_file_python)
|
593
|
+
logger.info(f"Executable script generated at '{output_file_python}'")
|
594
|
+
|
664
595
|
# Save the workflow to a YAML file
|
665
596
|
manager = WorkflowManager(workflow_def)
|
666
597
|
manager.save_to_yaml(yaml_output_path)
|
667
598
|
logger.info(f"Workflow saved to YAML file '{yaml_output_path}'")
|
668
|
-
except FileNotFoundError:
|
669
|
-
logger.error(f"File '{file_path}' not found. Please ensure it exists in the specified directory.")
|
670
599
|
except Exception as e:
|
671
600
|
logger.error(f"Failed to parse or save workflow from '{file_path}': {e}")
|
672
601
|
|
@@ -60,14 +60,18 @@ def generate_executable_script(workflow_def: WorkflowDefinition, global_vars: di
|
|
60
60
|
f.write("workflow = (\n")
|
61
61
|
f.write(f' Workflow("{workflow_def.workflow.start}")\n')
|
62
62
|
for trans in workflow_def.workflow.transitions:
|
63
|
-
_from_node = trans.
|
64
|
-
to_node = trans.
|
63
|
+
_from_node = trans.from_node
|
64
|
+
to_node = trans.to_node
|
65
65
|
condition = trans.condition or "None"
|
66
66
|
if condition != "None":
|
67
67
|
# Ensure condition is formatted as a lambda if not already
|
68
68
|
if not condition.startswith("lambda ctx:"):
|
69
69
|
condition = f"lambda ctx: {condition}"
|
70
70
|
f.write(f' .then("{to_node}", condition={condition})\n')
|
71
|
+
# Add observers if any exist in the workflow definition
|
72
|
+
if hasattr(workflow_def, 'observers'):
|
73
|
+
for observer in workflow_def.observers:
|
74
|
+
f.write(f" .add_observer({observer})\n")
|
71
75
|
f.write(")\n\n")
|
72
76
|
|
73
77
|
# Main asynchronous function to run the workflow
|
quantalogic/flow/flow_manager.py
CHANGED
@@ -65,7 +65,7 @@ class WorkflowManager:
|
|
65
65
|
self.workflow.workflow.transitions = [
|
66
66
|
t
|
67
67
|
for t in self.workflow.workflow.transitions
|
68
|
-
if t.
|
68
|
+
if t.from_node != name and (isinstance(t.to_node, str) or name not in t.to_node)
|
69
69
|
]
|
70
70
|
if self.workflow.workflow.start == name:
|
71
71
|
self.workflow.workflow.start = None
|
@@ -99,27 +99,36 @@ class WorkflowManager:
|
|
99
99
|
|
100
100
|
def add_transition(
|
101
101
|
self,
|
102
|
-
|
103
|
-
|
102
|
+
from_node: str,
|
103
|
+
to_node: Union[str, List[str]],
|
104
104
|
condition: Optional[str] = None,
|
105
|
+
strict: bool = True,
|
105
106
|
) -> None:
|
106
|
-
"""Add a transition between nodes
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
107
|
+
"""Add a transition between nodes.
|
108
|
+
|
109
|
+
Args:
|
110
|
+
from_node: Source node name
|
111
|
+
to_node: Target node name or list of target node names
|
112
|
+
condition: Optional condition for the transition
|
113
|
+
strict: If True, validates that all nodes exist before adding the transition.
|
114
|
+
If False, allows adding transitions to non-existent nodes.
|
115
|
+
"""
|
116
|
+
if strict:
|
117
|
+
if from_node not in self.workflow.nodes:
|
118
|
+
raise ValueError(f"Source node '{from_node}' does not exist")
|
119
|
+
if isinstance(to_node, str):
|
120
|
+
if to_node not in self.workflow.nodes:
|
121
|
+
raise ValueError(f"Target node '{to_node}' does not exist")
|
122
|
+
else:
|
123
|
+
for t in to_node:
|
124
|
+
if t not in self.workflow.nodes:
|
125
|
+
raise ValueError(f"Target node '{t}' does not exist")
|
116
126
|
# Create TransitionDefinition with named parameters
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
transition = TransitionDefinition.model_validate(transition_dict)
|
127
|
+
transition = TransitionDefinition(
|
128
|
+
from_node=from_node,
|
129
|
+
to_node=to_node,
|
130
|
+
condition=condition
|
131
|
+
)
|
123
132
|
self.workflow.workflow.transitions.append(transition)
|
124
133
|
|
125
134
|
def set_start_node(self, name: str) -> None:
|
@@ -281,8 +290,8 @@ class WorkflowManager:
|
|
281
290
|
sub_workflows[node_name] = sub_wf
|
282
291
|
added_sub_nodes = set()
|
283
292
|
for trans in node_def.sub_workflow.transitions:
|
284
|
-
from_node = trans.
|
285
|
-
to_nodes = [trans.
|
293
|
+
from_node = trans.from_node
|
294
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
286
295
|
if from_node not in added_sub_nodes:
|
287
296
|
sub_wf.node(from_node)
|
288
297
|
added_sub_nodes.add(from_node)
|
@@ -362,8 +371,8 @@ class WorkflowManager:
|
|
362
371
|
|
363
372
|
added_nodes = set()
|
364
373
|
for trans in self.workflow.workflow.transitions:
|
365
|
-
from_node = trans.
|
366
|
-
to_nodes = [trans.
|
374
|
+
from_node = trans.from_node
|
375
|
+
to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
|
367
376
|
if from_node not in added_nodes and from_node not in sub_workflows:
|
368
377
|
wf.node(from_node)
|
369
378
|
added_nodes.add(from_node)
|
@@ -441,7 +450,7 @@ def main():
|
|
441
450
|
manager.add_node(name="start", function="greet")
|
442
451
|
manager.add_node(name="end", function="farewell")
|
443
452
|
manager.set_start_node("start")
|
444
|
-
manager.add_transition(
|
453
|
+
manager.add_transition(from_node="start", to_node="end")
|
445
454
|
manager.add_observer("monitor") # Add the observer
|
446
455
|
manager.save_to_yaml("workflow.yaml")
|
447
456
|
new_manager = WorkflowManager()
|
@@ -128,12 +128,11 @@ class NodeDefinition(BaseModel):
|
|
128
128
|
class TransitionDefinition(BaseModel):
|
129
129
|
"""Definition of a transition between nodes."""
|
130
130
|
|
131
|
-
|
131
|
+
from_node: str = Field(
|
132
132
|
...,
|
133
133
|
description="Source node name for the transition.",
|
134
|
-
alias="from", # Supports YAML aliasing
|
135
134
|
)
|
136
|
-
|
135
|
+
to_node: Union[str, List[str]] = Field(
|
137
136
|
..., description="Target node(s). A string for sequential, a list for parallel execution."
|
138
137
|
)
|
139
138
|
condition: Optional[str] = Field(
|
@@ -0,0 +1,240 @@
|
|
1
|
+
import re
|
2
|
+
from typing import Dict, List, Optional, Set, Tuple
|
3
|
+
|
4
|
+
from quantalogic.flow.flow_manager import WorkflowManager
|
5
|
+
from quantalogic.flow.flow_manager_schema import NodeDefinition, WorkflowDefinition
|
6
|
+
|
7
|
+
|
8
|
+
def get_node_label_and_type(node_name: str, node_def: Optional[NodeDefinition], has_conditions: bool) -> Tuple[str, str, str]:
|
9
|
+
"""
|
10
|
+
Generate a label, type identifier, and shape for a node based on its definition and transition context.
|
11
|
+
|
12
|
+
Args:
|
13
|
+
node_name: The name of the node.
|
14
|
+
node_def: The NodeDefinition object from the workflow, or None if undefined.
|
15
|
+
has_conditions: True if the node has outgoing transitions with conditions.
|
16
|
+
|
17
|
+
Returns:
|
18
|
+
A tuple of (display label, type key for styling, shape identifier).
|
19
|
+
"""
|
20
|
+
# No truncation unless necessary, escape quotes for safety
|
21
|
+
escaped_name = node_name.replace('"', '\\"')
|
22
|
+
|
23
|
+
# Use diamond shape for nodes with conditional transitions, rectangle otherwise
|
24
|
+
shape = "diamond" if has_conditions else "rect"
|
25
|
+
|
26
|
+
if not node_def:
|
27
|
+
return f"{escaped_name} (unknown)", "unknown", shape
|
28
|
+
|
29
|
+
if node_def.function:
|
30
|
+
return f"{escaped_name} (function)", "function", shape
|
31
|
+
elif node_def.llm_config:
|
32
|
+
if node_def.llm_config.response_model:
|
33
|
+
return f"{escaped_name} (structured LLM)", "structured_llm", shape
|
34
|
+
return f"{escaped_name} (LLM)", "llm", shape
|
35
|
+
elif node_def.sub_workflow:
|
36
|
+
return f"{escaped_name} (Sub-Workflow)", "sub_workflow", shape
|
37
|
+
return f"{escaped_name} (unknown)", "unknown", shape
|
38
|
+
|
39
|
+
|
40
|
+
def generate_mermaid_diagram(
|
41
|
+
workflow_def: WorkflowDefinition,
|
42
|
+
include_subgraphs: bool = False,
|
43
|
+
title: Optional[str] = None,
|
44
|
+
include_legend: bool = True
|
45
|
+
) -> str:
|
46
|
+
"""
|
47
|
+
Generate a Mermaid flowchart diagram from a WorkflowDefinition with pastel colors and optimal UX.
|
48
|
+
|
49
|
+
Args:
|
50
|
+
workflow_def: The workflow definition to visualize.
|
51
|
+
include_subgraphs: If True, nests sub-workflows in Mermaid subgraphs.
|
52
|
+
title: Optional title for the diagram.
|
53
|
+
include_legend: If True, adds a comment-based legend explaining node types.
|
54
|
+
|
55
|
+
Returns:
|
56
|
+
A string containing the Mermaid syntax for the flowchart.
|
57
|
+
|
58
|
+
Raises:
|
59
|
+
ValueError: If node names contain invalid Mermaid characters.
|
60
|
+
"""
|
61
|
+
# Pastel color scheme for a soft, user-friendly look
|
62
|
+
node_styles: Dict[str, str] = {
|
63
|
+
"function": "fill:#90CAF9,stroke:#42A5F5,stroke-width:2px", # Pastel Blue
|
64
|
+
"structured_llm": "fill:#A5D6A7,stroke:#66BB6A,stroke-width:2px", # Pastel Green
|
65
|
+
"llm": "fill:#CE93D8,stroke:#AB47BC,stroke-width:2px", # Pastel Purple
|
66
|
+
"sub_workflow": "fill:#FFCCBC,stroke:#FF7043,stroke-width:2px", # Pastel Orange
|
67
|
+
"unknown": "fill:#CFD8DC,stroke:#B0BEC5,stroke-width:2px" # Pastel Grey
|
68
|
+
}
|
69
|
+
|
70
|
+
# Shape mappings for Mermaid syntax
|
71
|
+
shape_syntax: Dict[str, Tuple[str, str]] = {
|
72
|
+
"rect": ("[", "]"), # Rectangle for standard nodes
|
73
|
+
"diamond": ("{{", "}}") # Diamond for decision points
|
74
|
+
}
|
75
|
+
|
76
|
+
# Validate node names for Mermaid compatibility (alphanumeric, underscore, hyphen)
|
77
|
+
invalid_chars = r'[^a-zA-Z0-9_-]'
|
78
|
+
all_nodes: Set[str] = set()
|
79
|
+
if workflow_def.workflow.start:
|
80
|
+
if re.search(invalid_chars, workflow_def.workflow.start):
|
81
|
+
raise ValueError(f"Invalid node name '{workflow_def.workflow.start}' for Mermaid")
|
82
|
+
all_nodes.add(workflow_def.workflow.start)
|
83
|
+
for trans in workflow_def.workflow.transitions:
|
84
|
+
if re.search(invalid_chars, trans.from_node):
|
85
|
+
raise ValueError(f"Invalid node name '{trans.from_node}' for Mermaid")
|
86
|
+
all_nodes.add(trans.from_node)
|
87
|
+
if isinstance(trans.to_node, str):
|
88
|
+
if re.search(invalid_chars, trans.to_node):
|
89
|
+
raise ValueError(f"Invalid node name '{trans.to_node}' for Mermaid")
|
90
|
+
all_nodes.add(trans.to_node)
|
91
|
+
else:
|
92
|
+
for to_node in trans.to_node:
|
93
|
+
if re.search(invalid_chars, to_node):
|
94
|
+
raise ValueError(f"Invalid node name '{to_node}' for Mermaid")
|
95
|
+
all_nodes.add(to_node)
|
96
|
+
|
97
|
+
# Determine which nodes have conditional transitions
|
98
|
+
conditional_nodes: Set[str] = set()
|
99
|
+
for trans in workflow_def.workflow.transitions:
|
100
|
+
if trans.condition and isinstance(trans.to_node, str):
|
101
|
+
conditional_nodes.add(trans.from_node)
|
102
|
+
|
103
|
+
# Generate node definitions and track types/shapes
|
104
|
+
node_defs: List[str] = []
|
105
|
+
node_types: Dict[str, str] = {}
|
106
|
+
node_shapes: Dict[str, str] = {}
|
107
|
+
for node in all_nodes:
|
108
|
+
node_def = workflow_def.nodes.get(node)
|
109
|
+
has_conditions = node in conditional_nodes
|
110
|
+
label, node_type, shape = get_node_label_and_type(node, node_def, has_conditions)
|
111
|
+
start_shape, end_shape = shape_syntax[shape]
|
112
|
+
node_defs.append(f'{node}{start_shape}"{label}"{end_shape}')
|
113
|
+
node_types[node] = node_type
|
114
|
+
node_shapes[node] = shape
|
115
|
+
|
116
|
+
# Generate arrows for transitions (all solid lines)
|
117
|
+
arrows: List[str] = []
|
118
|
+
for trans in workflow_def.workflow.transitions:
|
119
|
+
from_node = trans.from_node
|
120
|
+
if isinstance(trans.to_node, str):
|
121
|
+
to_node = trans.to_node
|
122
|
+
condition = trans.condition
|
123
|
+
if condition:
|
124
|
+
cond = condition.replace('"', '\\"')[:30] + ("..." if len(condition) > 30 else "")
|
125
|
+
arrows.append(f'{from_node} -->|"{cond}"| {to_node}') # Solid arrow with condition
|
126
|
+
else:
|
127
|
+
arrows.append(f'{from_node} --> {to_node}')
|
128
|
+
else:
|
129
|
+
for to_node in trans.to_node:
|
130
|
+
arrows.append(f'{from_node} --> {to_node}') # Solid arrow for parallel
|
131
|
+
|
132
|
+
# Assemble the Mermaid syntax
|
133
|
+
mermaid_code = "```mermaid\n"
|
134
|
+
mermaid_code += "graph TD\n" # Top-down layout
|
135
|
+
if title:
|
136
|
+
mermaid_code += f" %% Diagram: {title}\n"
|
137
|
+
|
138
|
+
# Optional legend for UX
|
139
|
+
if include_legend:
|
140
|
+
mermaid_code += " %% Legend:\n"
|
141
|
+
mermaid_code += " %% - Rectangle: Process Step\n"
|
142
|
+
mermaid_code += " %% - Diamond: Decision Point\n"
|
143
|
+
mermaid_code += " %% - Colors: Blue (Function), Green (Structured LLM), Purple (LLM), Orange (Sub-Workflow), Grey (Unknown)\n"
|
144
|
+
|
145
|
+
# Add node definitions
|
146
|
+
for node_def in node_defs:
|
147
|
+
mermaid_code += f" {node_def}\n"
|
148
|
+
|
149
|
+
# Add transition arrows
|
150
|
+
for arrow in arrows:
|
151
|
+
mermaid_code += f" {arrow}\n"
|
152
|
+
|
153
|
+
# Add styles for node types (no stroke-dasharray for solid appearance)
|
154
|
+
for node, node_type in node_types.items():
|
155
|
+
if node_type in node_styles:
|
156
|
+
mermaid_code += f" style {node} {node_styles[node_type]}\n"
|
157
|
+
|
158
|
+
# Highlight the start node with a thicker border
|
159
|
+
if workflow_def.workflow.start and workflow_def.workflow.start in node_types:
|
160
|
+
mermaid_code += f" style {workflow_def.workflow.start} stroke-width:4px\n"
|
161
|
+
|
162
|
+
# Optional: Subgraphs for sub-workflows
|
163
|
+
if include_subgraphs:
|
164
|
+
for node, node_def in workflow_def.nodes.items():
|
165
|
+
if node_def and node_def.sub_workflow:
|
166
|
+
mermaid_code += f" subgraph {node}_sub[Sub-Workflow: {node}]\n"
|
167
|
+
sub_nodes = {node_def.sub_workflow.start} if node_def.sub_workflow.start else set()
|
168
|
+
for trans in node_def.sub_workflow.transitions:
|
169
|
+
sub_nodes.add(trans.from_node)
|
170
|
+
if isinstance(trans.to_node, str):
|
171
|
+
sub_nodes.add(trans.to_node)
|
172
|
+
else:
|
173
|
+
sub_nodes.update(trans.to_node)
|
174
|
+
for sub_node in sub_nodes:
|
175
|
+
mermaid_code += f" {sub_node}[[{sub_node}]]\n"
|
176
|
+
mermaid_code += " end\n"
|
177
|
+
|
178
|
+
mermaid_code += "```\n"
|
179
|
+
return mermaid_code
|
180
|
+
|
181
|
+
|
182
|
+
def main() -> None:
|
183
|
+
"""
|
184
|
+
Create a complex workflow and print its improved Mermaid diagram representation.
|
185
|
+
"""
|
186
|
+
manager = WorkflowManager()
|
187
|
+
|
188
|
+
# Add functions
|
189
|
+
manager.add_function(
|
190
|
+
name="analyze_sentiment",
|
191
|
+
type_="embedded",
|
192
|
+
code="async def analyze_sentiment(summary: str) -> str:\n return 'positive' if 'good' in summary.lower() else 'negative'",
|
193
|
+
)
|
194
|
+
manager.add_function(
|
195
|
+
name="extract_keywords",
|
196
|
+
type_="embedded",
|
197
|
+
code="async def extract_keywords(summary: str) -> str:\n return 'key1, key2'",
|
198
|
+
)
|
199
|
+
manager.add_function(
|
200
|
+
name="publish_content",
|
201
|
+
type_="embedded",
|
202
|
+
code="async def publish_content(summary: str, sentiment: str, keywords: str) -> str:\n return 'Published'",
|
203
|
+
)
|
204
|
+
manager.add_function(
|
205
|
+
name="revise_content",
|
206
|
+
type_="embedded",
|
207
|
+
code="async def revise_content(summary: str) -> str:\n return 'Revised summary'",
|
208
|
+
)
|
209
|
+
|
210
|
+
# Add LLM node
|
211
|
+
llm_config = {
|
212
|
+
"model": "grok/xai",
|
213
|
+
"system_prompt": "You are a concise summarizer.",
|
214
|
+
"prompt_template": "Summarize the following text: {{ input_text }}",
|
215
|
+
"temperature": "0.5",
|
216
|
+
"max_tokens": "150",
|
217
|
+
}
|
218
|
+
manager.add_node(name="summarize_text", llm_config=llm_config, output="summary")
|
219
|
+
|
220
|
+
# Add function nodes
|
221
|
+
manager.add_node(name="sentiment_analysis", function="analyze_sentiment", output="sentiment")
|
222
|
+
manager.add_node(name="keyword_extraction", function="extract_keywords", output="keywords")
|
223
|
+
manager.add_node(name="publish", function="publish_content", output="status")
|
224
|
+
manager.add_node(name="revise", function="revise_content", output="revised_summary")
|
225
|
+
|
226
|
+
# Define workflow structure
|
227
|
+
manager.set_start_node("summarize_text")
|
228
|
+
manager.add_transition(from_node="summarize_text", to_node=["sentiment_analysis", "keyword_extraction"])
|
229
|
+
manager.add_transition(from_node="sentiment_analysis", to_node="publish", condition="ctx['sentiment'] == 'positive'")
|
230
|
+
manager.add_transition(from_node="sentiment_analysis", to_node="revise", condition="ctx['sentiment'] == 'negative'")
|
231
|
+
manager.add_transition(from_node="keyword_extraction", to_node="publish")
|
232
|
+
|
233
|
+
# Generate and print the diagram
|
234
|
+
workflow_def = manager.workflow
|
235
|
+
diagram = generate_mermaid_diagram(workflow_def, include_subgraphs=False, title="Content Processing Workflow")
|
236
|
+
print(diagram)
|
237
|
+
|
238
|
+
|
239
|
+
if __name__ == "__main__":
|
240
|
+
main()
|