quantalogic 0.35.0__py3-none-any.whl → 0.40.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. quantalogic/__init__.py +0 -4
  2. quantalogic/agent.py +603 -363
  3. quantalogic/agent_config.py +233 -46
  4. quantalogic/agent_factory.py +34 -22
  5. quantalogic/coding_agent.py +16 -14
  6. quantalogic/config.py +2 -1
  7. quantalogic/console_print_events.py +4 -8
  8. quantalogic/console_print_token.py +2 -2
  9. quantalogic/docs_cli.py +15 -10
  10. quantalogic/event_emitter.py +258 -83
  11. quantalogic/flow/__init__.py +23 -0
  12. quantalogic/flow/flow.py +595 -0
  13. quantalogic/flow/flow_extractor.py +672 -0
  14. quantalogic/flow/flow_generator.py +89 -0
  15. quantalogic/flow/flow_manager.py +407 -0
  16. quantalogic/flow/flow_manager_schema.py +169 -0
  17. quantalogic/flow/flow_yaml.md +419 -0
  18. quantalogic/generative_model.py +109 -77
  19. quantalogic/get_model_info.py +5 -5
  20. quantalogic/interactive_text_editor.py +100 -73
  21. quantalogic/main.py +17 -21
  22. quantalogic/model_info_list.py +3 -3
  23. quantalogic/model_info_litellm.py +14 -14
  24. quantalogic/prompts.py +2 -1
  25. quantalogic/{llm.py → quantlitellm.py} +29 -39
  26. quantalogic/search_agent.py +4 -4
  27. quantalogic/server/models.py +4 -1
  28. quantalogic/task_file_reader.py +5 -5
  29. quantalogic/task_runner.py +20 -20
  30. quantalogic/tool_manager.py +10 -21
  31. quantalogic/tools/__init__.py +98 -68
  32. quantalogic/tools/composio/composio.py +416 -0
  33. quantalogic/tools/{generate_database_report_tool.py → database/generate_database_report_tool.py} +4 -9
  34. quantalogic/tools/database/sql_query_tool_advanced.py +261 -0
  35. quantalogic/tools/document_tools/markdown_to_docx_tool.py +620 -0
  36. quantalogic/tools/document_tools/markdown_to_epub_tool.py +438 -0
  37. quantalogic/tools/document_tools/markdown_to_html_tool.py +362 -0
  38. quantalogic/tools/document_tools/markdown_to_ipynb_tool.py +319 -0
  39. quantalogic/tools/document_tools/markdown_to_latex_tool.py +420 -0
  40. quantalogic/tools/document_tools/markdown_to_pdf_tool.py +623 -0
  41. quantalogic/tools/document_tools/markdown_to_pptx_tool.py +319 -0
  42. quantalogic/tools/duckduckgo_search_tool.py +2 -4
  43. quantalogic/tools/finance/alpha_vantage_tool.py +440 -0
  44. quantalogic/tools/finance/ccxt_tool.py +373 -0
  45. quantalogic/tools/finance/finance_llm_tool.py +387 -0
  46. quantalogic/tools/finance/google_finance.py +192 -0
  47. quantalogic/tools/finance/market_intelligence_tool.py +520 -0
  48. quantalogic/tools/finance/technical_analysis_tool.py +491 -0
  49. quantalogic/tools/finance/tradingview_tool.py +336 -0
  50. quantalogic/tools/finance/yahoo_finance.py +236 -0
  51. quantalogic/tools/git/bitbucket_clone_repo_tool.py +181 -0
  52. quantalogic/tools/git/bitbucket_operations_tool.py +326 -0
  53. quantalogic/tools/git/clone_repo_tool.py +189 -0
  54. quantalogic/tools/git/git_operations_tool.py +532 -0
  55. quantalogic/tools/google_packages/google_news_tool.py +480 -0
  56. quantalogic/tools/grep_app_tool.py +123 -186
  57. quantalogic/tools/{dalle_e.py → image_generation/dalle_e.py} +37 -27
  58. quantalogic/tools/jinja_tool.py +6 -10
  59. quantalogic/tools/language_handlers/__init__.py +22 -9
  60. quantalogic/tools/list_directory_tool.py +131 -42
  61. quantalogic/tools/llm_tool.py +45 -15
  62. quantalogic/tools/llm_vision_tool.py +59 -7
  63. quantalogic/tools/markitdown_tool.py +17 -5
  64. quantalogic/tools/nasa_packages/models.py +47 -0
  65. quantalogic/tools/nasa_packages/nasa_apod_tool.py +232 -0
  66. quantalogic/tools/nasa_packages/nasa_neows_tool.py +147 -0
  67. quantalogic/tools/nasa_packages/services.py +82 -0
  68. quantalogic/tools/presentation_tools/presentation_llm_tool.py +396 -0
  69. quantalogic/tools/product_hunt/product_hunt_tool.py +258 -0
  70. quantalogic/tools/product_hunt/services.py +63 -0
  71. quantalogic/tools/rag_tool/__init__.py +48 -0
  72. quantalogic/tools/rag_tool/document_metadata.py +15 -0
  73. quantalogic/tools/rag_tool/query_response.py +20 -0
  74. quantalogic/tools/rag_tool/rag_tool.py +566 -0
  75. quantalogic/tools/rag_tool/rag_tool_beta.py +264 -0
  76. quantalogic/tools/read_html_tool.py +24 -38
  77. quantalogic/tools/replace_in_file_tool.py +10 -10
  78. quantalogic/tools/safe_python_interpreter_tool.py +10 -24
  79. quantalogic/tools/search_definition_names.py +2 -2
  80. quantalogic/tools/sequence_tool.py +14 -23
  81. quantalogic/tools/sql_query_tool.py +17 -19
  82. quantalogic/tools/tool.py +39 -15
  83. quantalogic/tools/unified_diff_tool.py +1 -1
  84. quantalogic/tools/utilities/csv_processor_tool.py +234 -0
  85. quantalogic/tools/utilities/download_file_tool.py +179 -0
  86. quantalogic/tools/utilities/mermaid_validator_tool.py +661 -0
  87. quantalogic/tools/utils/__init__.py +1 -4
  88. quantalogic/tools/utils/create_sample_database.py +24 -38
  89. quantalogic/tools/utils/generate_database_report.py +74 -82
  90. quantalogic/tools/wikipedia_search_tool.py +17 -21
  91. quantalogic/utils/ask_user_validation.py +1 -1
  92. quantalogic/utils/async_utils.py +35 -0
  93. quantalogic/utils/check_version.py +3 -5
  94. quantalogic/utils/get_all_models.py +2 -1
  95. quantalogic/utils/git_ls.py +21 -7
  96. quantalogic/utils/lm_studio_model_info.py +9 -7
  97. quantalogic/utils/python_interpreter.py +113 -43
  98. quantalogic/utils/xml_utility.py +178 -0
  99. quantalogic/version_check.py +1 -1
  100. quantalogic/welcome_message.py +7 -7
  101. quantalogic/xml_parser.py +0 -1
  102. {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/METADATA +41 -1
  103. quantalogic-0.40.0.dist-info/RECORD +148 -0
  104. quantalogic-0.35.0.dist-info/RECORD +0 -102
  105. {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/LICENSE +0 -0
  106. {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/WHEEL +0 -0
  107. {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,89 @@
1
+ import os
2
+
3
+ from quantalogic.flow.flow_manager_schema import WorkflowDefinition
4
+
5
+
6
+ def generate_executable_script(workflow_def: WorkflowDefinition, global_vars: dict, output_file: str) -> None:
7
+ """
8
+ Generate an executable Python script from a WorkflowDefinition with global variables.
9
+
10
+ Args:
11
+ workflow_def: The WorkflowDefinition object containing the workflow details.
12
+ global_vars: Dictionary of global variables extracted from the source file.
13
+ output_file: The path where the executable script will be written.
14
+
15
+ The generated script includes:
16
+ - A shebang using `uv run` for environment management.
17
+ - Metadata specifying the required Python version and dependencies.
18
+ - Global variables from the original script.
19
+ - Embedded functions included directly in the script.
20
+ - Workflow instantiation using direct chaining syntax.
21
+ - A default initial_context matching the example.
22
+ """
23
+ with open(output_file, "w") as f:
24
+ # Write the shebang and metadata
25
+ f.write("#!/usr/bin/env -S uv run\n")
26
+ f.write("# /// script\n")
27
+ f.write('# requires-python = ">=3.12"\n')
28
+ f.write("# dependencies = [\n")
29
+ f.write('# "loguru",\n')
30
+ f.write('# "litellm",\n')
31
+ f.write('# "pydantic>=2.0",\n')
32
+ f.write('# "anyio",\n')
33
+ f.write('# "quantalogic>=0.35",\n')
34
+ f.write('# "jinja2",\n')
35
+ f.write('# "instructor[litellm]",\n') # Kept for potential structured LLM support
36
+ f.write("# ]\n")
37
+ f.write("# ///\n\n")
38
+
39
+ # Write necessary imports
40
+ f.write("import anyio\n")
41
+ f.write("from typing import List\n")
42
+ f.write("from loguru import logger\n")
43
+ f.write("from quantalogic.flow import Nodes, Workflow\n\n")
44
+
45
+ # Write global variables
46
+ for var_name, value in global_vars.items():
47
+ f.write(f"{var_name} = {repr(value)}\n")
48
+ f.write("\n")
49
+
50
+ # Embed functions from workflow_def
51
+ for func_name, func_def in workflow_def.functions.items():
52
+ if func_def.type == "embedded":
53
+ f.write(func_def.code + "\n\n")
54
+
55
+ # Define workflow using chaining syntax
56
+ f.write("# Define the workflow using simplified syntax with automatic node registration\n")
57
+ f.write("workflow = (\n")
58
+ f.write(f' Workflow("{workflow_def.workflow.start}")\n')
59
+ for trans in workflow_def.workflow.transitions:
60
+ from_node = trans.from_
61
+ to_node = trans.to
62
+ condition = trans.condition or "None"
63
+ if condition != "None":
64
+ # Ensure condition is formatted as a lambda if not already
65
+ if not condition.startswith("lambda ctx:"):
66
+ condition = f"lambda ctx: {condition}"
67
+ f.write(f' .then("{to_node}", condition={condition})\n')
68
+ f.write(")\n\n")
69
+
70
+ # Main asynchronous function to run the workflow
71
+ f.write("async def main():\n")
72
+ f.write(' """Main function to run the story generation workflow."""\n')
73
+ f.write(" initial_context = {\n")
74
+ f.write(' "genre": "science fiction",\n')
75
+ f.write(' "num_chapters": 3,\n')
76
+ f.write(' "chapters": [],\n')
77
+ f.write(' "completed_chapters": 0,\n')
78
+ f.write(' "style": "descriptive"\n')
79
+ f.write(" } # Customize initial_context as needed\n")
80
+ f.write(" engine = workflow.build()\n")
81
+ f.write(" result = await engine.run(initial_context)\n")
82
+ f.write(' logger.info(f"Workflow result: {result}")\n\n')
83
+
84
+ # Entry point to execute the main function
85
+ f.write('if __name__ == "__main__":\n')
86
+ f.write(" anyio.run(main)\n")
87
+
88
+ # Set executable permissions (rwxr-xr-x)
89
+ os.chmod(output_file, 0o755)
@@ -0,0 +1,407 @@
1
+ import importlib
2
+ import importlib.util
3
+ import os
4
+ import re
5
+ import sys
6
+ import tempfile
7
+ import urllib
8
+ from pathlib import Path
9
+ from typing import Any, Callable, Dict, List, Optional, Type, Union
10
+
11
+ import yaml
12
+ from loguru import logger
13
+ from pydantic import BaseModel, ValidationError
14
+
15
+ # Import directly from flow.py to avoid circular import through __init__.py
16
+ from quantalogic.flow.flow import Nodes, Workflow
17
+ from quantalogic.flow.flow_manager_schema import (
18
+ FunctionDefinition,
19
+ NodeDefinition,
20
+ TransitionDefinition,
21
+ WorkflowDefinition,
22
+ WorkflowStructure,
23
+ )
24
+
25
+
26
+ class WorkflowManager:
27
+ def __init__(self, workflow: Optional[WorkflowDefinition] = None):
28
+ """Initialize the WorkflowManager with an optional workflow definition."""
29
+ self.workflow = workflow or WorkflowDefinition()
30
+
31
+ def add_node(
32
+ self,
33
+ name: str,
34
+ function: Optional[str] = None,
35
+ sub_workflow: Optional[WorkflowStructure] = None,
36
+ llm_config: Optional[Dict[str, Any]] = None,
37
+ output: Optional[str] = None,
38
+ retries: int = 3,
39
+ delay: float = 1.0,
40
+ timeout: Optional[float] = None,
41
+ parallel: bool = False,
42
+ ) -> None:
43
+ """Add a new node to the workflow definition, supporting sub-workflows and LLM nodes."""
44
+ node = NodeDefinition(
45
+ function=function,
46
+ sub_workflow=sub_workflow,
47
+ llm_config=llm_config,
48
+ output=output or (f"{name}_result" if function or llm_config else None),
49
+ retries=retries,
50
+ delay=delay,
51
+ timeout=timeout,
52
+ parallel=parallel,
53
+ )
54
+ self.workflow.nodes[name] = node
55
+
56
+ def remove_node(self, name: str) -> None:
57
+ """Remove a node and clean up related transitions and start node."""
58
+ if name not in self.workflow.nodes:
59
+ raise ValueError(f"Node '{name}' does not exist")
60
+ del self.workflow.nodes[name]
61
+ self.workflow.workflow.transitions = [
62
+ t
63
+ for t in self.workflow.workflow.transitions
64
+ if t.from_ != name and (isinstance(t.to, str) or name not in t.to)
65
+ ]
66
+ if self.workflow.workflow.start == name:
67
+ self.workflow.workflow.start = None
68
+
69
+ def update_node(
70
+ self,
71
+ name: str,
72
+ function: Optional[str] = None,
73
+ output: Optional[str] = None,
74
+ retries: Optional[int] = None,
75
+ delay: Optional[float] = None,
76
+ timeout: Optional[Union[float, None]] = None,
77
+ parallel: Optional[bool] = None,
78
+ ) -> None:
79
+ """Update specific fields of an existing node."""
80
+ if name not in self.workflow.nodes:
81
+ raise ValueError(f"Node '{name}' does not exist")
82
+ node = self.workflow.nodes[name]
83
+ if function is not None:
84
+ node.function = function
85
+ if output is not None:
86
+ node.output = output
87
+ if retries is not None:
88
+ node.retries = retries
89
+ if delay is not None:
90
+ node.delay = delay
91
+ if timeout is not None:
92
+ node.timeout = timeout
93
+ if parallel is not None:
94
+ node.parallel = parallel
95
+
96
+ def add_transition(
97
+ self,
98
+ from_: str,
99
+ to: Union[str, List[str]],
100
+ condition: Optional[str] = None,
101
+ ) -> None:
102
+ """Add a transition between nodes, ensuring all nodes exist."""
103
+ if from_ not in self.workflow.nodes:
104
+ raise ValueError(f"Source node '{from_}' does not exist")
105
+ if isinstance(to, str):
106
+ if to not in self.workflow.nodes:
107
+ raise ValueError(f"Target node '{to}' does not exist")
108
+ else:
109
+ for t in to:
110
+ if t not in self.workflow.nodes:
111
+ raise ValueError(f"Target node '{t}' does not exist")
112
+ # Use 'from' field name instead of the alias 'from_'
113
+ transition = TransitionDefinition(**{"from": from_, "to": to, "condition": condition})
114
+ self.workflow.workflow.transitions.append(transition)
115
+
116
+ def set_start_node(self, name: str) -> None:
117
+ """Set the start node of the workflow."""
118
+ if name not in self.workflow.nodes:
119
+ raise ValueError(f"Node '{name}' does not exist")
120
+ self.workflow.workflow.start = name
121
+
122
+ def add_function(
123
+ self,
124
+ name: str,
125
+ type_: str,
126
+ code: Optional[str] = None,
127
+ module: Optional[str] = None,
128
+ function: Optional[str] = None,
129
+ ) -> None:
130
+ """Add a function definition to the workflow."""
131
+ func_def = FunctionDefinition(type=type_, code=code, module=module, function=function)
132
+ self.workflow.functions[name] = func_def
133
+
134
+ def add_observer(self, observer_name: str) -> None:
135
+ """Add an observer function name to the workflow."""
136
+ if observer_name not in self.workflow.functions:
137
+ raise ValueError(f"Observer function '{observer_name}' not defined in functions")
138
+ if observer_name not in self.workflow.observers:
139
+ self.workflow.observers.append(observer_name)
140
+ logger.debug(f"Added observer '{observer_name}' to workflow")
141
+
142
+ def _resolve_model(self, model_str: str) -> Type[BaseModel]:
143
+ """Resolve a string to a Pydantic model class for structured_llm_node."""
144
+ try:
145
+ module_name, class_name = model_str.split(":")
146
+ module = importlib.import_module(module_name)
147
+ model_class = getattr(module, class_name)
148
+ if not issubclass(model_class, BaseModel):
149
+ raise ValueError(f"{model_str} is not a Pydantic model")
150
+ return model_class
151
+ except (ValueError, ImportError, AttributeError) as e:
152
+ raise ValueError(f"Failed to resolve response_model '{model_str}': {e}")
153
+
154
+ def import_module_from_source(self, source: str) -> Any:
155
+ """
156
+ Import a module from various sources: installed module name (e.g., PyPI), local file path, or remote URL.
157
+
158
+ Args:
159
+ source: The module specification (e.g., 'requests', '/path/to/file.py', 'https://example.com/module.py').
160
+
161
+ Returns:
162
+ The imported module object.
163
+
164
+ Raises:
165
+ ValueError: If the module cannot be imported, with suggestions for installation if it's a PyPI package.
166
+ """
167
+ if source.startswith("http://") or source.startswith("https://"):
168
+ # Handle remote URL
169
+ try:
170
+ with urllib.request.urlopen(source) as response:
171
+ code = response.read().decode("utf-8")
172
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".py") as temp_file:
173
+ temp_file.write(code.encode("utf-8"))
174
+ temp_path = temp_file.name
175
+ module_name = f"temp_module_{hash(temp_path)}"
176
+ spec = importlib.util.spec_from_file_location(module_name, temp_path)
177
+ module = importlib.util.module_from_spec(spec)
178
+ sys.modules[module_name] = module
179
+ spec.loader.exec_module(module)
180
+ os.remove(temp_path)
181
+ return module
182
+ except Exception as e:
183
+ raise ValueError(f"Failed to import module from URL '{source}': {e}")
184
+ elif os.path.isfile(source):
185
+ # Handle local file path
186
+ try:
187
+ module_name = f"local_module_{hash(source)}"
188
+ spec = importlib.util.spec_from_file_location(module_name, source)
189
+ module = importlib.util.module_from_spec(spec)
190
+ sys.modules[module_name] = module
191
+ spec.loader.exec_module(module)
192
+ return module
193
+ except Exception as e:
194
+ raise ValueError(f"Failed to import module from file '{source}': {e}")
195
+ else:
196
+ # Assume installed module name from PyPI or system
197
+ try:
198
+ return importlib.import_module(source)
199
+ except ImportError as e:
200
+ logger.error(f"Module '{source}' not found: {e}")
201
+ raise ValueError(
202
+ f"Failed to import module '{source}': {e}. "
203
+ f"This may be a PyPI package. Ensure it is installed using 'pip install {source}' "
204
+ "or check if the module name is correct."
205
+ )
206
+
207
+ def instantiate_workflow(self) -> Workflow:
208
+ """Instantiates a Workflow object based on the definitions stored in the WorkflowManager."""
209
+ functions: Dict[str, Callable] = {}
210
+ for func_name, func_def in self.workflow.functions.items():
211
+ if func_def.type == "embedded":
212
+ local_scope = {}
213
+ exec(func_def.code, local_scope)
214
+ if func_name not in local_scope:
215
+ raise ValueError(f"Embedded function '{func_name}' not defined in code")
216
+ functions[func_name] = local_scope[func_name]
217
+ elif func_def.type == "external":
218
+ try:
219
+ module = self.import_module_from_source(func_def.module)
220
+ functions[func_name] = getattr(module, func_def.function)
221
+ except (ImportError, AttributeError) as e:
222
+ raise ValueError(f"Failed to import external function '{func_name}': {e}")
223
+
224
+ if not self.workflow.workflow.start:
225
+ raise ValueError("Start node not set in workflow definition")
226
+ wf = Workflow(start_node=self.workflow.workflow.start)
227
+
228
+ # Register observers
229
+ for observer_name in self.workflow.observers:
230
+ if observer_name not in functions:
231
+ raise ValueError(f"Observer '{observer_name}' not found in functions")
232
+ wf.add_observer(functions[observer_name])
233
+ logger.debug(f"Registered observer '{observer_name}' in workflow")
234
+
235
+ sub_workflows: Dict[str, Workflow] = {}
236
+ for node_name, node_def in self.workflow.nodes.items():
237
+ if node_def.sub_workflow:
238
+ sub_wf = Workflow(node_def.sub_workflow.start)
239
+ sub_workflows[node_name] = sub_wf
240
+ added_sub_nodes = set()
241
+ for trans in node_def.sub_workflow.transitions:
242
+ from_node = trans.from_
243
+ to_nodes = [trans.to] if isinstance(trans.to, str) else trans.to
244
+ if from_node not in added_sub_nodes:
245
+ sub_wf.node(from_node)
246
+ added_sub_nodes.add(from_node)
247
+ for to_node in to_nodes:
248
+ if to_node not in added_sub_nodes:
249
+ sub_wf.node(to_node)
250
+ added_sub_nodes.add(to_node)
251
+ condition = eval(f"lambda ctx: {trans.condition}") if trans.condition else None
252
+ if len(to_nodes) > 1:
253
+ sub_wf.parallel(*to_nodes) # No condition support in parallel as per original
254
+ else:
255
+ sub_wf.then(to_nodes[0], condition=condition)
256
+ inputs = list(Nodes.NODE_REGISTRY[sub_wf.start_node][1])
257
+ wf.add_sub_workflow(node_name, sub_wf, inputs={k: k for k in inputs}, output=node_def.output)
258
+ elif node_def.function:
259
+ if node_def.function not in functions:
260
+ raise ValueError(f"Function '{node_def.function}' for node '{node_name}' not found")
261
+ func = functions[node_def.function]
262
+ Nodes.define(
263
+ output=node_def.output,
264
+ )(func)
265
+ elif node_def.llm_config:
266
+ llm_config = node_def.llm_config
267
+ # Extract inputs from prompt_template using regex
268
+ inputs = set(re.findall(r"{{\s*([^}]+?)\s*}}", llm_config.prompt_template))
269
+ cleaned_inputs = set()
270
+ for input_var in inputs:
271
+ base_var = re.split(r"\s*[\+\-\*/]\s*", input_var.strip())[0].strip()
272
+ if base_var.isidentifier():
273
+ cleaned_inputs.add(base_var)
274
+ inputs_list = list(cleaned_inputs)
275
+
276
+ # Define a dummy function to be decorated
277
+ async def dummy_func(**kwargs):
278
+ pass # This will be replaced by the decorator logic
279
+
280
+ if llm_config.response_model:
281
+ # Structured LLM node
282
+ response_model = self._resolve_model(llm_config.response_model)
283
+ decorated_func = Nodes.structured_llm_node(
284
+ model=llm_config.model,
285
+ system_prompt=llm_config.system_prompt or "",
286
+ prompt_template=llm_config.prompt_template,
287
+ response_model=response_model,
288
+ output=node_def.output or f"{node_name}_result",
289
+ temperature=llm_config.temperature,
290
+ max_tokens=llm_config.max_tokens or 2000,
291
+ top_p=llm_config.top_p,
292
+ presence_penalty=llm_config.presence_penalty,
293
+ frequency_penalty=llm_config.frequency_penalty,
294
+ api_key=llm_config.api_key,
295
+ )(dummy_func)
296
+ else:
297
+ # Plain LLM node
298
+ decorated_func = Nodes.llm_node(
299
+ model=llm_config.model,
300
+ system_prompt=llm_config.system_prompt or "",
301
+ prompt_template=llm_config.prompt_template,
302
+ output=node_def.output or f"{node_name}_result",
303
+ temperature=llm_config.temperature,
304
+ max_tokens=llm_config.max_tokens or 2000,
305
+ top_p=llm_config.top_p,
306
+ presence_penalty=llm_config.presence_penalty,
307
+ frequency_penalty=llm_config.frequency_penalty,
308
+ api_key=llm_config.api_key,
309
+ )(dummy_func)
310
+
311
+ # Register the node in NODE_REGISTRY with proper inputs
312
+ Nodes.NODE_REGISTRY[node_name] = (decorated_func, inputs_list, node_def.output or f"{node_name}_result")
313
+ logger.debug(
314
+ f"Registered LLM node '{node_name}' with inputs {inputs_list} and output {node_def.output or f'{node_name}_result'}"
315
+ )
316
+
317
+ added_nodes = set()
318
+ for trans in self.workflow.workflow.transitions:
319
+ from_node = trans.from_
320
+ to_nodes = [trans.to] if isinstance(trans.to, str) else trans.to
321
+ if from_node not in added_nodes and from_node not in sub_workflows:
322
+ wf.node(from_node)
323
+ added_nodes.add(from_node)
324
+ for to_node in to_nodes:
325
+ if to_node not in added_nodes and to_node not in sub_workflows:
326
+ wf.node(to_node)
327
+ added_nodes.add(to_node)
328
+ condition = eval(f"lambda ctx: {trans.condition}") if trans.condition else None
329
+ if len(to_nodes) > 1:
330
+ wf.parallel(*to_nodes)
331
+ else:
332
+ wf.then(to_nodes[0], condition=condition)
333
+
334
+ return wf
335
+
336
+ def load_from_yaml(self, file_path: Union[str, Path]) -> None:
337
+ """Load a workflow from a YAML file with validation."""
338
+ file_path = Path(file_path)
339
+ if not file_path.exists():
340
+ raise FileNotFoundError(f"YAML file '{file_path}' not found")
341
+ with file_path.open("r") as f:
342
+ data = yaml.safe_load(f)
343
+ try:
344
+ self.workflow = WorkflowDefinition.model_validate(data)
345
+ except ValidationError as e:
346
+ raise ValueError(f"Invalid workflow YAML: {e}")
347
+
348
+ def save_to_yaml(self, file_path: Union[str, Path]) -> None:
349
+ """Save the workflow to a YAML file using aliases and multi-line block scalars for code."""
350
+ file_path = Path(file_path)
351
+
352
+ # Custom representer to use multi-line block scalars for multi-line strings
353
+ def str_representer(dumper, data):
354
+ if "\n" in data: # Use block scalar for multi-line strings
355
+ return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
356
+ return dumper.represent_scalar("tag:yaml.org,2002:str", data)
357
+
358
+ # Add the custom representer to the SafeDumper
359
+ yaml.add_representer(str, str_representer, Dumper=yaml.SafeDumper)
360
+
361
+ with file_path.open("w") as f:
362
+ yaml.safe_dump(
363
+ self.workflow.model_dump(by_alias=True),
364
+ f,
365
+ default_flow_style=False,
366
+ sort_keys=False,
367
+ allow_unicode=True,
368
+ width=120, # Wider width to reduce wrapping
369
+ )
370
+
371
+
372
+ def main():
373
+ """Demonstrate usage of WorkflowManager with observer support."""
374
+ manager = WorkflowManager()
375
+ manager.add_function(
376
+ name="greet",
377
+ type_="embedded",
378
+ code="def greet(user_name): return f'Hello, {user_name}!'",
379
+ )
380
+ manager.add_function(
381
+ name="farewell",
382
+ type_="embedded",
383
+ code="def farewell(user_name): return f'Goodbye, {user_name}!'",
384
+ )
385
+ manager.add_function(
386
+ name="monitor",
387
+ type_="embedded",
388
+ code="""async def monitor(event):
389
+ print(f'[EVENT] {event.event_type.value} @ {event.node_name or "workflow"}')
390
+ if event.result:
391
+ print(f'Result: {event.result}')
392
+ if event.exception:
393
+ print(f'Error: {event.exception}')""",
394
+ )
395
+ manager.add_node(name="start", function="greet")
396
+ manager.add_node(name="end", function="farewell")
397
+ manager.set_start_node("start")
398
+ manager.add_transition(from_="start", to="end")
399
+ manager.add_observer("monitor") # Add the observer
400
+ manager.save_to_yaml("workflow.yaml")
401
+ new_manager = WorkflowManager()
402
+ new_manager.load_from_yaml("workflow.yaml")
403
+ print(new_manager.workflow.model_dump())
404
+
405
+
406
+ if __name__ == "__main__":
407
+ main()
@@ -0,0 +1,169 @@
1
+ from typing import Any, Dict, List, Optional, Union
2
+
3
+ from pydantic import BaseModel, Field, model_validator
4
+
5
+
6
+ class FunctionDefinition(BaseModel):
7
+ """
8
+ Definition of a function used in the workflow.
9
+
10
+ This model supports both embedded functions (inline code) and external functions sourced
11
+ from Python modules, including PyPI packages, local files, or remote URLs.
12
+ """
13
+
14
+ type: str = Field(
15
+ ...,
16
+ description="Type of function source. Must be 'embedded' for inline code or 'external' for module-based functions.",
17
+ )
18
+ code: Optional[str] = Field(
19
+ None, description="Multi-line Python code for embedded functions. Required if type is 'embedded'."
20
+ )
21
+ module: Optional[str] = Field(
22
+ None,
23
+ description=(
24
+ "Source of the external module for 'external' functions. Can be:"
25
+ " - A PyPI package name (e.g., 'requests', 'numpy') installed in the environment."
26
+ " - A local file path (e.g., '/path/to/module.py')."
27
+ " - A remote URL (e.g., 'https://example.com/module.py')."
28
+ " Required if type is 'external'."
29
+ ),
30
+ )
31
+ function: Optional[str] = Field(
32
+ None,
33
+ description="Name of the function within the module for 'external' functions. Required if type is 'external'.",
34
+ )
35
+
36
+ @model_validator(mode="before")
37
+ @classmethod
38
+ def check_function_source(cls, data: Any) -> Any:
39
+ """Ensure the function definition is valid based on its type."""
40
+ type_ = data.get("type")
41
+ if type_ == "embedded":
42
+ if not data.get("code"):
43
+ raise ValueError("Embedded functions require 'code' to be specified")
44
+ if data.get("module") or data.get("function"):
45
+ raise ValueError("Embedded functions should not specify 'module' or 'function'")
46
+ elif type_ == "external":
47
+ if not data.get("module") or not data.get("function"):
48
+ raise ValueError("External functions require both 'module' and 'function'")
49
+ if data.get("code"):
50
+ raise ValueError("External functions should not specify 'code'")
51
+ else:
52
+ raise ValueError("Function type must be 'embedded' or 'external'")
53
+ return data
54
+
55
+
56
+ class LLMConfig(BaseModel):
57
+ """Configuration for LLM-based nodes."""
58
+
59
+ model: str = Field(
60
+ default="gpt-3.5-turbo", description="The LLM model to use (e.g., 'gpt-3.5-turbo', 'gemini/gemini-2.0-flash')."
61
+ )
62
+ system_prompt: Optional[str] = Field(None, description="System prompt defining the LLM's role or context.")
63
+ prompt_template: str = Field(
64
+ default="{{ input }}", description="Jinja2 template for the user prompt (e.g., 'Summarize {{ text }}')."
65
+ )
66
+ temperature: float = Field(
67
+ default=0.7, ge=0.0, le=1.0, description="Controls randomness of LLM output (0.0 to 1.0)."
68
+ )
69
+ max_tokens: Optional[int] = Field(None, ge=1, description="Maximum number of tokens in the response.")
70
+ top_p: float = Field(default=1.0, ge=0.0, le=1.0, description="Nucleus sampling parameter (0.0 to 1.0).")
71
+ presence_penalty: float = Field(
72
+ default=0.0, ge=-2.0, le=2.0, description="Penalty for repeating topics (-2.0 to 2.0)."
73
+ )
74
+ frequency_penalty: float = Field(
75
+ default=0.0, ge=-2.0, le=2.0, description="Penalty for repeating words (-2.0 to 2.0)."
76
+ )
77
+ stop: Optional[List[str]] = Field(None, description="List of stop sequences for LLM generation (e.g., ['\\n']).")
78
+ response_model: Optional[str] = Field(
79
+ None,
80
+ description=(
81
+ "Path to a Pydantic model for structured output (e.g., 'my_module:OrderDetails'). "
82
+ "If specified, uses structured_llm_node; otherwise, uses llm_node."
83
+ ),
84
+ )
85
+ api_key: Optional[str] = Field(None, description="Custom API key for the LLM provider, if required.")
86
+
87
+
88
+ class NodeDefinition(BaseModel):
89
+ """
90
+ Definition of a workflow node.
91
+
92
+ A node must specify exactly one of 'function', 'sub_workflow', or 'llm_config'.
93
+ """
94
+
95
+ function: Optional[str] = Field(
96
+ None, description="Name of the function to execute (references a FunctionDefinition)."
97
+ )
98
+ sub_workflow: Optional["WorkflowStructure"] = Field(
99
+ None, description="Nested workflow definition for sub-workflow nodes."
100
+ )
101
+ llm_config: Optional[LLMConfig] = Field(None, description="Configuration for LLM-based nodes.")
102
+ output: Optional[str] = Field(
103
+ None,
104
+ description=(
105
+ "Context key to store the node's result. Defaults to '<node_name>_result' "
106
+ "for function or LLM nodes if not specified."
107
+ ),
108
+ )
109
+ retries: int = Field(default=3, ge=0, description="Number of retry attempts on failure.")
110
+ delay: float = Field(default=1.0, ge=0.0, description="Delay in seconds between retries.")
111
+ timeout: Optional[float] = Field(
112
+ None, ge=0.0, description="Maximum execution time in seconds (null for no timeout)."
113
+ )
114
+ parallel: bool = Field(default=False, description="Whether the node can execute in parallel with others.")
115
+
116
+ @model_validator(mode="before")
117
+ @classmethod
118
+ def check_function_or_sub_workflow_or_llm(cls, data: Any) -> Any:
119
+ """Ensure a node has exactly one of 'function', 'sub_workflow', or 'llm_config'."""
120
+ func = data.get("function")
121
+ sub_wf = data.get("sub_workflow")
122
+ llm = data.get("llm_config")
123
+ if sum(x is not None for x in (func, sub_wf, llm)) != 1:
124
+ raise ValueError("Node must have exactly one of 'function', 'sub_workflow', or 'llm_config'")
125
+ return data
126
+
127
+
128
+ class TransitionDefinition(BaseModel):
129
+ """Definition of a transition between nodes."""
130
+
131
+ from_: str = Field(
132
+ ...,
133
+ description="Source node name for the transition.",
134
+ alias="from", # Supports YAML aliasing
135
+ )
136
+ to: Union[str, List[str]] = Field(
137
+ ..., description="Target node(s). A string for sequential, a list for parallel execution."
138
+ )
139
+ condition: Optional[str] = Field(
140
+ None, description="Python expression using 'ctx' for conditional transitions (e.g., 'ctx.get(\"in_stock\")')."
141
+ )
142
+
143
+
144
+ class WorkflowStructure(BaseModel):
145
+ """Structure defining the workflow's execution flow."""
146
+
147
+ start: Optional[str] = Field(None, description="Name of the starting node.")
148
+ transitions: List[TransitionDefinition] = Field(
149
+ default_factory=list, description="List of transitions between nodes."
150
+ )
151
+
152
+
153
+ class WorkflowDefinition(BaseModel):
154
+ """Top-level definition of the workflow."""
155
+
156
+ functions: Dict[str, FunctionDefinition] = Field(
157
+ default_factory=dict, description="Dictionary of function definitions used in the workflow."
158
+ )
159
+ nodes: Dict[str, NodeDefinition] = Field(default_factory=dict, description="Dictionary of node definitions.")
160
+ workflow: WorkflowStructure = Field(
161
+ default_factory=WorkflowStructure, description="Main workflow structure with start node and transitions."
162
+ )
163
+ observers: List[str] = Field(
164
+ default_factory=list, description="List of observer function names to monitor workflow execution."
165
+ )
166
+
167
+
168
+ # Resolve forward reference for sub_workflow in NodeDefinition
169
+ NodeDefinition.model_rebuild()