quantalogic 0.80__py3-none-any.whl → 0.93__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/flow/__init__.py +16 -34
- quantalogic/main.py +11 -6
- quantalogic/tools/tool.py +8 -922
- quantalogic-0.93.dist-info/METADATA +475 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/RECORD +8 -54
- quantalogic/codeact/TODO.md +0 -14
- quantalogic/codeact/__init__.py +0 -0
- quantalogic/codeact/agent.py +0 -478
- quantalogic/codeact/cli.py +0 -50
- quantalogic/codeact/cli_commands/__init__.py +0 -0
- quantalogic/codeact/cli_commands/create_toolbox.py +0 -45
- quantalogic/codeact/cli_commands/install_toolbox.py +0 -20
- quantalogic/codeact/cli_commands/list_executor.py +0 -15
- quantalogic/codeact/cli_commands/list_reasoners.py +0 -15
- quantalogic/codeact/cli_commands/list_toolboxes.py +0 -47
- quantalogic/codeact/cli_commands/task.py +0 -215
- quantalogic/codeact/cli_commands/tool_info.py +0 -24
- quantalogic/codeact/cli_commands/uninstall_toolbox.py +0 -43
- quantalogic/codeact/config.yaml +0 -21
- quantalogic/codeact/constants.py +0 -9
- quantalogic/codeact/events.py +0 -85
- quantalogic/codeact/examples/README.md +0 -342
- quantalogic/codeact/examples/agent_sample.yaml +0 -29
- quantalogic/codeact/executor.py +0 -186
- quantalogic/codeact/history_manager.py +0 -94
- quantalogic/codeact/llm_util.py +0 -57
- quantalogic/codeact/plugin_manager.py +0 -92
- quantalogic/codeact/prompts/error_format.j2 +0 -11
- quantalogic/codeact/prompts/generate_action.j2 +0 -77
- quantalogic/codeact/prompts/generate_program.j2 +0 -52
- quantalogic/codeact/prompts/response_format.j2 +0 -11
- quantalogic/codeact/react_agent.py +0 -318
- quantalogic/codeact/reasoner.py +0 -185
- quantalogic/codeact/templates/toolbox/README.md.j2 +0 -10
- quantalogic/codeact/templates/toolbox/pyproject.toml.j2 +0 -16
- quantalogic/codeact/templates/toolbox/tools.py.j2 +0 -6
- quantalogic/codeact/templates.py +0 -7
- quantalogic/codeact/tools_manager.py +0 -258
- quantalogic/codeact/utils.py +0 -62
- quantalogic/codeact/xml_utils.py +0 -126
- quantalogic/flow/flow.py +0 -1070
- quantalogic/flow/flow_extractor.py +0 -783
- quantalogic/flow/flow_generator.py +0 -322
- quantalogic/flow/flow_manager.py +0 -676
- quantalogic/flow/flow_manager_schema.py +0 -287
- quantalogic/flow/flow_mermaid.py +0 -365
- quantalogic/flow/flow_validator.py +0 -479
- quantalogic/flow/flow_yaml.linkedin.md +0 -31
- quantalogic/flow/flow_yaml.md +0 -767
- quantalogic/flow/templates/prompt_check_inventory.j2 +0 -1
- quantalogic/flow/templates/system_check_inventory.j2 +0 -1
- quantalogic-0.80.dist-info/METADATA +0 -900
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/LICENSE +0 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/WHEEL +0 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/entry_points.txt +0 -0
quantalogic/flow/flow_manager.py
DELETED
@@ -1,676 +0,0 @@
|
|
1
|
-
import asyncio
|
2
|
-
import importlib
|
3
|
-
import importlib.util
|
4
|
-
import os
|
5
|
-
import re
|
6
|
-
import subprocess
|
7
|
-
import sys
|
8
|
-
import tempfile
|
9
|
-
import urllib
|
10
|
-
from pathlib import Path
|
11
|
-
from typing import Any, Callable, Dict, List, Optional, Type, Union
|
12
|
-
|
13
|
-
import yaml # type: ignore
|
14
|
-
from loguru import logger
|
15
|
-
from pydantic import BaseModel, ValidationError
|
16
|
-
|
17
|
-
from quantalogic.flow.flow import Nodes, Workflow
|
18
|
-
from quantalogic.flow.flow_manager_schema import (
|
19
|
-
BranchCondition,
|
20
|
-
FunctionDefinition,
|
21
|
-
LLMConfig,
|
22
|
-
NodeDefinition,
|
23
|
-
TemplateConfig,
|
24
|
-
TransitionDefinition,
|
25
|
-
WorkflowDefinition,
|
26
|
-
WorkflowStructure,
|
27
|
-
)
|
28
|
-
|
29
|
-
|
30
|
-
class WorkflowManager:
|
31
|
-
def __init__(self, workflow: Optional[WorkflowDefinition] = None):
|
32
|
-
"""Initialize the WorkflowManager with an optional workflow definition."""
|
33
|
-
self.workflow = workflow or WorkflowDefinition()
|
34
|
-
self._ensure_dependencies()
|
35
|
-
|
36
|
-
def _ensure_dependencies(self) -> None:
|
37
|
-
"""Ensure all specified dependencies are installed or available."""
|
38
|
-
if not self.workflow.dependencies:
|
39
|
-
return
|
40
|
-
|
41
|
-
for dep in self.workflow.dependencies:
|
42
|
-
if dep.startswith("http://") or dep.startswith("https://"):
|
43
|
-
logger.debug(f"Dependency '{dep}' is a remote URL, will be fetched during instantiation")
|
44
|
-
elif os.path.isfile(dep):
|
45
|
-
logger.debug(f"Dependency '{dep}' is a local file, will be loaded during instantiation")
|
46
|
-
else:
|
47
|
-
try:
|
48
|
-
module_name = dep.split(">")[0].split("<")[0].split("=")[0].strip()
|
49
|
-
importlib.import_module(module_name)
|
50
|
-
logger.debug(f"Dependency '{dep}' is already installed")
|
51
|
-
except ImportError:
|
52
|
-
logger.info(f"Installing dependency '{dep}' via pip")
|
53
|
-
try:
|
54
|
-
subprocess.check_call([sys.executable, "-m", "pip", "install", dep])
|
55
|
-
logger.debug(f"Successfully installed '{dep}'")
|
56
|
-
except subprocess.CalledProcessError as e:
|
57
|
-
raise ValueError(f"Failed to install dependency '{dep}': {e}")
|
58
|
-
|
59
|
-
def add_node(
|
60
|
-
self,
|
61
|
-
name: str,
|
62
|
-
function: Optional[str] = None,
|
63
|
-
sub_workflow: Optional[WorkflowStructure] = None,
|
64
|
-
llm_config: Optional[Dict[str, Any]] = None,
|
65
|
-
template_config: Optional[Dict[str, Any]] = None,
|
66
|
-
inputs_mapping: Optional[Dict[str, Union[str, Callable]]] = None,
|
67
|
-
output: Optional[str] = None,
|
68
|
-
retries: int = 3,
|
69
|
-
delay: float = 1.0,
|
70
|
-
timeout: Optional[float] = None,
|
71
|
-
parallel: bool = False,
|
72
|
-
) -> None:
|
73
|
-
"""Add a new node to the workflow definition with support for template nodes and inputs mapping."""
|
74
|
-
llm_config_obj = LLMConfig(**llm_config) if llm_config is not None else None
|
75
|
-
template_config_obj = TemplateConfig(**template_config) if template_config is not None else None
|
76
|
-
|
77
|
-
serializable_inputs_mapping = {}
|
78
|
-
if inputs_mapping:
|
79
|
-
for key, value in inputs_mapping.items():
|
80
|
-
if callable(value):
|
81
|
-
if hasattr(value, '__name__') and value.__name__ == '<lambda>':
|
82
|
-
import inspect
|
83
|
-
try:
|
84
|
-
source = inspect.getsource(value).strip()
|
85
|
-
serializable_inputs_mapping[key] = f"lambda ctx: {source.split(':')[-1].strip()}"
|
86
|
-
except Exception:
|
87
|
-
serializable_inputs_mapping[key] = str(value)
|
88
|
-
else:
|
89
|
-
serializable_inputs_mapping[key] = value.__name__
|
90
|
-
else:
|
91
|
-
serializable_inputs_mapping[key] = value
|
92
|
-
|
93
|
-
node = NodeDefinition(
|
94
|
-
function=function,
|
95
|
-
sub_workflow=sub_workflow,
|
96
|
-
llm_config=llm_config_obj,
|
97
|
-
template_config=template_config_obj,
|
98
|
-
inputs_mapping=serializable_inputs_mapping,
|
99
|
-
output=output or (f"{name}_result" if function or llm_config or template_config else None),
|
100
|
-
retries=retries,
|
101
|
-
delay=delay,
|
102
|
-
timeout=timeout,
|
103
|
-
parallel=parallel,
|
104
|
-
)
|
105
|
-
self.workflow.nodes[name] = node
|
106
|
-
|
107
|
-
def remove_node(self, name: str) -> None:
|
108
|
-
"""Remove a node and clean up related transitions and start node."""
|
109
|
-
if name not in self.workflow.nodes:
|
110
|
-
raise ValueError(f"Node '{name}' does not exist")
|
111
|
-
del self.workflow.nodes[name]
|
112
|
-
self.workflow.workflow.transitions = [
|
113
|
-
t
|
114
|
-
for t in self.workflow.workflow.transitions
|
115
|
-
if t.from_node != name and (isinstance(t.to_node, str) or all(
|
116
|
-
isinstance(tn, str) and tn != name or isinstance(tn, BranchCondition) and tn.to_node != name
|
117
|
-
for tn in t.to_node
|
118
|
-
))
|
119
|
-
]
|
120
|
-
if self.workflow.workflow.start == name:
|
121
|
-
self.workflow.workflow.start = None
|
122
|
-
if name in self.workflow.workflow.convergence_nodes:
|
123
|
-
self.workflow.workflow.convergence_nodes.remove(name)
|
124
|
-
|
125
|
-
def update_node(
|
126
|
-
self,
|
127
|
-
name: str,
|
128
|
-
function: Optional[str] = None,
|
129
|
-
template_config: Optional[Dict[str, Any]] = None,
|
130
|
-
inputs_mapping: Optional[Dict[str, Union[str, Callable]]] = None,
|
131
|
-
output: Optional[str] = None,
|
132
|
-
retries: Optional[int] = None,
|
133
|
-
delay: Optional[float] = None,
|
134
|
-
timeout: Optional[Union[float, None]] = None,
|
135
|
-
parallel: Optional[bool] = None,
|
136
|
-
) -> None:
|
137
|
-
"""Update specific fields of an existing node with template and mapping support."""
|
138
|
-
if name not in self.workflow.nodes:
|
139
|
-
raise ValueError(f"Node '{name}' does not exist")
|
140
|
-
node = self.workflow.nodes[name]
|
141
|
-
if function is not None:
|
142
|
-
node.function = function
|
143
|
-
if template_config is not None:
|
144
|
-
node.template_config = TemplateConfig(**template_config)
|
145
|
-
if inputs_mapping is not None:
|
146
|
-
serializable_inputs_mapping = {}
|
147
|
-
for key, value in inputs_mapping.items():
|
148
|
-
if callable(value):
|
149
|
-
if hasattr(value, '__name__') and value.__name__ == '<lambda>':
|
150
|
-
import inspect
|
151
|
-
try:
|
152
|
-
source = inspect.getsource(value).strip()
|
153
|
-
serializable_inputs_mapping[key] = f"lambda ctx: {source.split(':')[-1].strip()}"
|
154
|
-
except Exception:
|
155
|
-
serializable_inputs_mapping[key] = str(value)
|
156
|
-
else:
|
157
|
-
serializable_inputs_mapping[key] = value.__name__
|
158
|
-
else:
|
159
|
-
serializable_inputs_mapping[key] = value
|
160
|
-
node.inputs_mapping = serializable_inputs_mapping
|
161
|
-
if output is not None:
|
162
|
-
node.output = output
|
163
|
-
if retries is not None:
|
164
|
-
node.retries = retries
|
165
|
-
if delay is not None:
|
166
|
-
node.delay = delay
|
167
|
-
if timeout is not None:
|
168
|
-
node.timeout = timeout
|
169
|
-
if parallel is not None:
|
170
|
-
node.parallel = parallel
|
171
|
-
|
172
|
-
def add_transition(
|
173
|
-
self,
|
174
|
-
from_node: str,
|
175
|
-
to_node: Union[str, List[Union[str, BranchCondition]]],
|
176
|
-
condition: Optional[str] = None,
|
177
|
-
strict: bool = True,
|
178
|
-
) -> None:
|
179
|
-
"""Add a transition between nodes, supporting branching."""
|
180
|
-
if strict:
|
181
|
-
if from_node not in self.workflow.nodes:
|
182
|
-
raise ValueError(f"Source node '{from_node}' does not exist")
|
183
|
-
if isinstance(to_node, str):
|
184
|
-
if to_node not in self.workflow.nodes:
|
185
|
-
raise ValueError(f"Target node '{to_node}' does not exist")
|
186
|
-
else:
|
187
|
-
for t in to_node:
|
188
|
-
target = t if isinstance(t, str) else t.to_node
|
189
|
-
if target not in self.workflow.nodes:
|
190
|
-
raise ValueError(f"Target node '{target}' does not exist")
|
191
|
-
transition = TransitionDefinition(
|
192
|
-
from_node=from_node,
|
193
|
-
to_node=to_node,
|
194
|
-
condition=condition
|
195
|
-
)
|
196
|
-
self.workflow.workflow.transitions.append(transition)
|
197
|
-
|
198
|
-
def add_loop(self, loop_nodes: List[str], condition: str, exit_node: str) -> None:
|
199
|
-
"""Add a loop construct to the workflow.
|
200
|
-
|
201
|
-
Args:
|
202
|
-
loop_nodes: List of node names to execute in the loop.
|
203
|
-
condition: Python expression using 'ctx' that, when True, keeps the loop running.
|
204
|
-
exit_node: Node to transition to when the loop condition is False.
|
205
|
-
"""
|
206
|
-
if not loop_nodes:
|
207
|
-
raise ValueError("Loop must contain at least one node")
|
208
|
-
for node in loop_nodes + [exit_node]:
|
209
|
-
if node not in self.workflow.nodes:
|
210
|
-
raise ValueError(f"Node '{node}' does not exist")
|
211
|
-
# Add transitions between loop nodes
|
212
|
-
for i in range(len(loop_nodes) - 1):
|
213
|
-
self.add_transition(from_node=loop_nodes[i], to_node=loop_nodes[i + 1])
|
214
|
-
# Add loop-back transition
|
215
|
-
self.add_transition(from_node=loop_nodes[-1], to_node=loop_nodes[0], condition=condition)
|
216
|
-
# Add exit transition
|
217
|
-
self.add_transition(from_node=loop_nodes[-1], to_node=exit_node, condition=f"not ({condition})")
|
218
|
-
|
219
|
-
def set_start_node(self, name: str) -> None:
|
220
|
-
"""Set the start node of the workflow."""
|
221
|
-
if name not in self.workflow.nodes:
|
222
|
-
raise ValueError(f"Node '{name}' does not exist")
|
223
|
-
self.workflow.workflow.start = name
|
224
|
-
|
225
|
-
def add_convergence_node(self, name: str) -> None:
|
226
|
-
"""Add a convergence node to the workflow."""
|
227
|
-
if name not in self.workflow.nodes:
|
228
|
-
raise ValueError(f"Node '{name}' does not exist")
|
229
|
-
if name not in self.workflow.workflow.convergence_nodes:
|
230
|
-
self.workflow.workflow.convergence_nodes.append(name)
|
231
|
-
logger.debug(f"Added convergence node '{name}'")
|
232
|
-
|
233
|
-
def add_function(
|
234
|
-
self,
|
235
|
-
name: str,
|
236
|
-
type_: str,
|
237
|
-
code: Optional[str] = None,
|
238
|
-
module: Optional[str] = None,
|
239
|
-
function: Optional[str] = None,
|
240
|
-
) -> None:
|
241
|
-
"""Add a function definition to the workflow."""
|
242
|
-
func_def = FunctionDefinition(type=type_, code=code, module=module, function=function)
|
243
|
-
self.workflow.functions[name] = func_def
|
244
|
-
|
245
|
-
def add_observer(self, observer_name: str) -> None:
|
246
|
-
"""Add an observer function name to the workflow."""
|
247
|
-
if observer_name not in self.workflow.functions:
|
248
|
-
raise ValueError(f"Observer function '{observer_name}' not defined in functions")
|
249
|
-
if observer_name not in self.workflow.observers:
|
250
|
-
self.workflow.observers.append(observer_name)
|
251
|
-
logger.debug(f"Added observer '{observer_name}' to workflow")
|
252
|
-
|
253
|
-
def _resolve_model(self, model_str: str) -> Type[BaseModel]:
|
254
|
-
"""Resolve a string to a Pydantic model class for structured_llm_node."""
|
255
|
-
try:
|
256
|
-
module_name, class_name = model_str.split(":")
|
257
|
-
module = importlib.import_module(module_name)
|
258
|
-
model_class = getattr(module, class_name)
|
259
|
-
if not issubclass(model_class, BaseModel):
|
260
|
-
raise ValueError(f"{model_str} is not a Pydantic model")
|
261
|
-
return model_class
|
262
|
-
except (ValueError, ImportError, AttributeError) as e:
|
263
|
-
raise ValueError(f"Failed to resolve response_model '{model_str}': {e}")
|
264
|
-
|
265
|
-
def import_module_from_source(self, source: str) -> Any:
|
266
|
-
"""Import a module from various sources."""
|
267
|
-
if source.startswith("http://") or source.startswith("https://"):
|
268
|
-
try:
|
269
|
-
with urllib.request.urlopen(source) as response:
|
270
|
-
code = response.read().decode("utf-8")
|
271
|
-
with tempfile.NamedTemporaryFile(delete=False, suffix=".py") as temp_file:
|
272
|
-
temp_file.write(code.encode("utf-8"))
|
273
|
-
temp_path = temp_file.name
|
274
|
-
module_name = f"temp_module_{hash(temp_path)}"
|
275
|
-
spec = importlib.util.spec_from_file_location(module_name, temp_path)
|
276
|
-
if spec is None:
|
277
|
-
raise ValueError(f"Failed to create module spec from {temp_path}")
|
278
|
-
module = importlib.util.module_from_spec(spec)
|
279
|
-
sys.modules[module_name] = module
|
280
|
-
if spec.loader is None:
|
281
|
-
raise ValueError(f"Module spec has no loader for {temp_path}")
|
282
|
-
spec.loader.exec_module(module)
|
283
|
-
os.remove(temp_path)
|
284
|
-
return module
|
285
|
-
except Exception as e:
|
286
|
-
raise ValueError(f"Failed to import module from URL '{source}': {e}")
|
287
|
-
elif os.path.isfile(source):
|
288
|
-
try:
|
289
|
-
module_name = f"local_module_{hash(source)}"
|
290
|
-
spec = importlib.util.spec_from_file_location(module_name, source)
|
291
|
-
if spec is None:
|
292
|
-
raise ValueError(f"Failed to create module spec from {source}")
|
293
|
-
module = importlib.util.module_from_spec(spec)
|
294
|
-
sys.modules[module_name] = module
|
295
|
-
if spec.loader is None:
|
296
|
-
raise ValueError(f"Module spec has no loader for {source}")
|
297
|
-
spec.loader.exec_module(module)
|
298
|
-
return module
|
299
|
-
except Exception as e:
|
300
|
-
raise ValueError(f"Failed to import module from file '{source}': {e}")
|
301
|
-
else:
|
302
|
-
try:
|
303
|
-
return importlib.import_module(source)
|
304
|
-
except ImportError as e:
|
305
|
-
logger.error(f"Module '{source}' not found: {e}")
|
306
|
-
raise ValueError(
|
307
|
-
f"Failed to import module '{source}': {e}. "
|
308
|
-
f"Ensure it is installed using 'pip install {source}' or check the module name."
|
309
|
-
)
|
310
|
-
|
311
|
-
def instantiate_workflow(self) -> Workflow:
|
312
|
-
"""Instantiate a Workflow object with full support for template_node, inputs_mapping, and loops."""
|
313
|
-
self._ensure_dependencies()
|
314
|
-
|
315
|
-
functions: Dict[str, Callable] = {}
|
316
|
-
for func_name, func_def in self.workflow.functions.items():
|
317
|
-
if func_def.type == "embedded":
|
318
|
-
local_scope: Dict[str, Any] = {}
|
319
|
-
if func_def.code is not None:
|
320
|
-
exec(func_def.code, local_scope)
|
321
|
-
if func_name not in local_scope:
|
322
|
-
raise ValueError(f"Embedded function '{func_name}' not defined in code")
|
323
|
-
functions[func_name] = local_scope[func_name]
|
324
|
-
else:
|
325
|
-
raise ValueError(f"Embedded function '{func_name}' has no code")
|
326
|
-
elif func_def.type == "external":
|
327
|
-
try:
|
328
|
-
if func_def.module is None:
|
329
|
-
raise ValueError(f"External function '{func_name}' has no module specified")
|
330
|
-
module = self.import_module_from_source(func_def.module)
|
331
|
-
if func_def.function is None:
|
332
|
-
raise ValueError(f"External function '{func_name}' has no function name specified")
|
333
|
-
functions[func_name] = getattr(module, func_def.function)
|
334
|
-
except (ImportError, AttributeError) as e:
|
335
|
-
raise ValueError(f"Failed to import external function '{func_name}': {e}")
|
336
|
-
|
337
|
-
if not self.workflow.workflow.start:
|
338
|
-
raise ValueError("Start node not set in workflow definition")
|
339
|
-
|
340
|
-
start_node_name = str(self.workflow.workflow.start) if self.workflow.workflow.start else "start"
|
341
|
-
if self.workflow.workflow.start is None:
|
342
|
-
logger.warning("Start node was None, using 'start' as default")
|
343
|
-
|
344
|
-
# Register all nodes with their node names
|
345
|
-
for node_name, node_def in self.workflow.nodes.items():
|
346
|
-
if node_def.function:
|
347
|
-
if node_def.function not in functions:
|
348
|
-
raise ValueError(f"Function '{node_def.function}' for node '{node_name}' not found")
|
349
|
-
func = functions[node_def.function]
|
350
|
-
Nodes.NODE_REGISTRY[node_name] = (
|
351
|
-
Nodes.define(output=node_def.output)(func),
|
352
|
-
["user_name"], # Explicitly define inputs based on function signature
|
353
|
-
node_def.output
|
354
|
-
)
|
355
|
-
elif node_def.llm_config:
|
356
|
-
llm_config = node_def.llm_config
|
357
|
-
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", llm_config.prompt_template)) if not llm_config.prompt_file else set()
|
358
|
-
cleaned_inputs = set()
|
359
|
-
for input_var in input_vars:
|
360
|
-
base_var = re.split(r"\s*[\+\-\*/]\s*", input_var.strip())[0].strip()
|
361
|
-
if base_var.isidentifier():
|
362
|
-
cleaned_inputs.add(base_var)
|
363
|
-
inputs_list: List[str] = list(cleaned_inputs)
|
364
|
-
|
365
|
-
async def dummy_func(**kwargs):
|
366
|
-
pass
|
367
|
-
|
368
|
-
# Handle callable model if specified in inputs_mapping, else use default
|
369
|
-
def model_callable(ctx):
|
370
|
-
return llm_config.model # Default to string from schema
|
371
|
-
if node_def.inputs_mapping and "model" in node_def.inputs_mapping:
|
372
|
-
model_value = node_def.inputs_mapping["model"]
|
373
|
-
if isinstance(model_value, str) and model_value.startswith("lambda ctx:"):
|
374
|
-
try:
|
375
|
-
model_callable = eval(model_value)
|
376
|
-
except Exception as e:
|
377
|
-
logger.warning(f"Failed to evaluate model lambda for {node_name}: {e}")
|
378
|
-
def model_callable(ctx):
|
379
|
-
return model_value
|
380
|
-
|
381
|
-
if llm_config.response_model:
|
382
|
-
response_model = self._resolve_model(llm_config.response_model)
|
383
|
-
decorated_func = Nodes.structured_llm_node(
|
384
|
-
model=model_callable,
|
385
|
-
system_prompt=llm_config.system_prompt or "",
|
386
|
-
system_prompt_file=llm_config.system_prompt_file,
|
387
|
-
prompt_template=llm_config.prompt_template,
|
388
|
-
prompt_file=llm_config.prompt_file,
|
389
|
-
response_model=response_model,
|
390
|
-
output=node_def.output or f"{node_name}_result",
|
391
|
-
temperature=llm_config.temperature,
|
392
|
-
max_tokens=llm_config.max_tokens or 2000,
|
393
|
-
top_p=llm_config.top_p,
|
394
|
-
presence_penalty=llm_config.presence_penalty,
|
395
|
-
frequency_penalty=llm_config.frequency_penalty,
|
396
|
-
api_key=llm_config.api_key,
|
397
|
-
)(dummy_func)
|
398
|
-
else:
|
399
|
-
decorated_func = Nodes.llm_node(
|
400
|
-
model=model_callable,
|
401
|
-
system_prompt=llm_config.system_prompt or "",
|
402
|
-
system_prompt_file=llm_config.system_prompt_file,
|
403
|
-
prompt_template=llm_config.prompt_template,
|
404
|
-
prompt_file=llm_config.prompt_file,
|
405
|
-
output=node_def.output or f"{node_name}_result",
|
406
|
-
temperature=llm_config.temperature,
|
407
|
-
max_tokens=llm_config.max_tokens or 2000,
|
408
|
-
top_p=llm_config.top_p,
|
409
|
-
presence_penalty=llm_config.presence_penalty,
|
410
|
-
frequency_penalty=llm_config.frequency_penalty,
|
411
|
-
api_key=llm_config.api_key,
|
412
|
-
)(dummy_func)
|
413
|
-
|
414
|
-
Nodes.NODE_REGISTRY[node_name] = (decorated_func, inputs_list, node_def.output or f"{node_name}_result")
|
415
|
-
elif node_def.template_config:
|
416
|
-
template_config = node_def.template_config
|
417
|
-
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", template_config.template)) if not template_config.template_file else set()
|
418
|
-
cleaned_inputs = {var.strip() for var in input_vars if var.strip().isidentifier()}
|
419
|
-
inputs_list = list(cleaned_inputs)
|
420
|
-
|
421
|
-
async def dummy_template_func(rendered_content: str, **kwargs):
|
422
|
-
return rendered_content
|
423
|
-
|
424
|
-
decorated_func = Nodes.template_node(
|
425
|
-
output=node_def.output or f"{node_name}_result",
|
426
|
-
template=template_config.template,
|
427
|
-
template_file=template_config.template_file,
|
428
|
-
)(dummy_template_func)
|
429
|
-
|
430
|
-
Nodes.NODE_REGISTRY[node_name] = (decorated_func, ["rendered_content"] + inputs_list, node_def.output or f"{node_name}_result")
|
431
|
-
|
432
|
-
# Create the Workflow instance after all nodes are registered
|
433
|
-
wf = Workflow(start_node=start_node_name)
|
434
|
-
|
435
|
-
for observer_name in self.workflow.observers:
|
436
|
-
if observer_name not in functions:
|
437
|
-
raise ValueError(f"Observer '{observer_name}' not found in functions")
|
438
|
-
wf.add_observer(functions[observer_name])
|
439
|
-
logger.debug(f"Registered observer '{observer_name}' in workflow")
|
440
|
-
|
441
|
-
sub_workflows: Dict[str, Workflow] = {}
|
442
|
-
for node_name, node_def in self.workflow.nodes.items():
|
443
|
-
inputs_mapping = {}
|
444
|
-
if node_def.inputs_mapping:
|
445
|
-
for key, value in node_def.inputs_mapping.items():
|
446
|
-
if isinstance(value, str) and value.startswith("lambda ctx:"):
|
447
|
-
try:
|
448
|
-
inputs_mapping[key] = eval(value)
|
449
|
-
except Exception as e:
|
450
|
-
logger.warning(f"Failed to evaluate lambda for {key} in {node_name}: {e}")
|
451
|
-
inputs_mapping[key] = value
|
452
|
-
else:
|
453
|
-
inputs_mapping[key] = value
|
454
|
-
|
455
|
-
if node_def.sub_workflow:
|
456
|
-
start_node = str(node_def.sub_workflow.start) if node_def.sub_workflow.start else f"{node_name}_start"
|
457
|
-
if node_def.sub_workflow.start is None:
|
458
|
-
logger.warning(f"Sub-workflow for node '{node_name}' has no start node, using '{start_node}'")
|
459
|
-
sub_wf = Workflow(start_node=start_node)
|
460
|
-
sub_workflows[node_name] = sub_wf
|
461
|
-
added_sub_nodes = set()
|
462
|
-
for trans in node_def.sub_workflow.transitions:
|
463
|
-
from_node = trans.from_node
|
464
|
-
if from_node not in added_sub_nodes:
|
465
|
-
sub_wf.node(from_node)
|
466
|
-
added_sub_nodes.add(from_node)
|
467
|
-
if isinstance(trans.to_node, str):
|
468
|
-
to_nodes = [trans.to_node]
|
469
|
-
condition = eval(f"lambda ctx: {trans.condition}") if trans.condition else None
|
470
|
-
if to_nodes[0] not in added_sub_nodes:
|
471
|
-
sub_wf.node(to_nodes[0])
|
472
|
-
added_sub_nodes.add(to_nodes[0])
|
473
|
-
sub_wf.then(to_nodes[0], condition=condition)
|
474
|
-
elif all(isinstance(tn, str) for tn in trans.to_node):
|
475
|
-
to_nodes = trans.to_node
|
476
|
-
for to_node in to_nodes:
|
477
|
-
if to_node not in added_sub_nodes:
|
478
|
-
sub_wf.node(to_node)
|
479
|
-
added_sub_nodes.add(to_node)
|
480
|
-
sub_wf.parallel(*to_nodes)
|
481
|
-
else:
|
482
|
-
branches = [(tn.to_node, eval(f"lambda ctx: {tn.condition}") if tn.condition else None)
|
483
|
-
for tn in trans.to_node]
|
484
|
-
for to_node, _ in branches:
|
485
|
-
if to_node not in added_sub_nodes:
|
486
|
-
sub_wf.node(to_node)
|
487
|
-
added_sub_nodes.add(to_node)
|
488
|
-
sub_wf.branch(branches)
|
489
|
-
inputs = list(Nodes.NODE_REGISTRY[sub_wf.start_node][1])
|
490
|
-
output = node_def.output if node_def.output is not None else f"{node_name}_result"
|
491
|
-
wf.add_sub_workflow(node_name, sub_wf, inputs={k: k for k in inputs}, output=output)
|
492
|
-
else:
|
493
|
-
wf.node(node_name, inputs_mapping=inputs_mapping if inputs_mapping else None)
|
494
|
-
|
495
|
-
# Detect loops by finding cycles with conditions
|
496
|
-
loop_nodes = []
|
497
|
-
loop_condition = None
|
498
|
-
loop_exit_node = None
|
499
|
-
for trans in self.workflow.workflow.transitions:
|
500
|
-
if isinstance(trans.to_node, str) and trans.condition:
|
501
|
-
if trans.to_node in loop_nodes and trans.from_node in loop_nodes:
|
502
|
-
continue # Already identified as part of loop
|
503
|
-
if any(t.from_node == trans.to_node and t.to_node == trans.from_node for t in self.workflow.workflow.transitions):
|
504
|
-
# Found a potential loop
|
505
|
-
loop_nodes.append(trans.from_node)
|
506
|
-
loop_nodes.append(trans.to_node)
|
507
|
-
loop_condition = trans.condition
|
508
|
-
elif trans.from_node in loop_nodes:
|
509
|
-
# Check for exit transition
|
510
|
-
if f"not ({loop_condition})" in trans.condition:
|
511
|
-
loop_exit_node = trans.to_node
|
512
|
-
|
513
|
-
added_nodes = set()
|
514
|
-
for trans in self.workflow.workflow.transitions:
|
515
|
-
from_node = trans.from_node
|
516
|
-
if from_node not in added_nodes and from_node not in sub_workflows:
|
517
|
-
if loop_nodes and from_node == loop_nodes[0]: # Start of loop
|
518
|
-
wf.start_loop()
|
519
|
-
wf.node(from_node)
|
520
|
-
added_nodes.add(from_node)
|
521
|
-
if isinstance(trans.to_node, str):
|
522
|
-
to_nodes = [trans.to_node]
|
523
|
-
condition = eval(f"lambda ctx: {trans.condition}") if trans.condition else None
|
524
|
-
if to_nodes[0] not in added_nodes and to_nodes[0] not in sub_workflows:
|
525
|
-
wf.node(to_nodes[0])
|
526
|
-
added_nodes.add(to_nodes[0])
|
527
|
-
if loop_nodes and to_nodes[0] == loop_exit_node and loop_condition: # End of loop
|
528
|
-
wf.end_loop(condition=eval(f"lambda ctx: {loop_condition}"), next_node=to_nodes[0])
|
529
|
-
else:
|
530
|
-
wf.then(to_nodes[0], condition=condition)
|
531
|
-
elif all(isinstance(tn, str) for tn in trans.to_node):
|
532
|
-
to_nodes = trans.to_node
|
533
|
-
for to_node in to_nodes:
|
534
|
-
if to_node not in added_nodes and to_node not in sub_workflows:
|
535
|
-
wf.node(to_node)
|
536
|
-
added_nodes.add(to_node)
|
537
|
-
wf.parallel(*to_nodes)
|
538
|
-
else:
|
539
|
-
branches = [(tn.to_node, eval(f"lambda ctx: {tn.condition}") if tn.condition else None)
|
540
|
-
for tn in trans.to_node]
|
541
|
-
for to_node, _ in branches:
|
542
|
-
if to_node not in added_nodes and to_node not in sub_workflows:
|
543
|
-
wf.node(to_node)
|
544
|
-
added_nodes.add(to_node)
|
545
|
-
wf.branch(branches)
|
546
|
-
|
547
|
-
for conv_node in self.workflow.workflow.convergence_nodes:
|
548
|
-
if conv_node not in added_nodes and conv_node not in sub_workflows:
|
549
|
-
wf.node(conv_node)
|
550
|
-
added_nodes.add(conv_node)
|
551
|
-
wf.converge(conv_node)
|
552
|
-
|
553
|
-
return wf
|
554
|
-
|
555
|
-
def load_from_yaml(self, file_path: Union[str, Path]) -> None:
|
556
|
-
"""Load a workflow from a YAML file with validation."""
|
557
|
-
file_path = Path(file_path)
|
558
|
-
if not file_path.exists():
|
559
|
-
raise FileNotFoundError(f"YAML file '{file_path}' not found")
|
560
|
-
with file_path.open("r") as f:
|
561
|
-
data = yaml.safe_load(f)
|
562
|
-
try:
|
563
|
-
self.workflow = WorkflowDefinition.model_validate(data)
|
564
|
-
self._ensure_dependencies()
|
565
|
-
except ValidationError as e:
|
566
|
-
raise ValueError(f"Invalid workflow YAML: {e}")
|
567
|
-
|
568
|
-
def save_to_yaml(self, file_path: Union[str, Path]) -> None:
|
569
|
-
"""Save the workflow to a YAML file using aliases and multi-line block scalars for code."""
|
570
|
-
file_path = Path(file_path)
|
571
|
-
|
572
|
-
def str_representer(dumper, data):
|
573
|
-
if "\n" in data:
|
574
|
-
return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
|
575
|
-
return dumper.represent_scalar("tag:yaml.org,2002:str", data)
|
576
|
-
|
577
|
-
yaml.add_representer(str, str_representer, Dumper=yaml.SafeDumper)
|
578
|
-
|
579
|
-
with file_path.open("w") as f:
|
580
|
-
yaml.safe_dump(
|
581
|
-
self.workflow.model_dump(by_alias=True),
|
582
|
-
f,
|
583
|
-
default_flow_style=False,
|
584
|
-
sort_keys=False,
|
585
|
-
allow_unicode=True,
|
586
|
-
width=120,
|
587
|
-
)
|
588
|
-
|
589
|
-
|
590
|
-
async def test_workflow():
|
591
|
-
"""Test the workflow execution with a loop."""
|
592
|
-
manager = WorkflowManager()
|
593
|
-
manager.workflow.dependencies = ["requests>=2.28.0"]
|
594
|
-
manager.add_function(
|
595
|
-
name="greet",
|
596
|
-
type_="embedded",
|
597
|
-
code="def greet(user_name): return f'Hello, {user_name}!'",
|
598
|
-
)
|
599
|
-
manager.add_function(
|
600
|
-
name="check_length",
|
601
|
-
type_="embedded",
|
602
|
-
code="def check_length(user_name): return len(user_name) < 5",
|
603
|
-
)
|
604
|
-
manager.add_function(
|
605
|
-
name="append_char",
|
606
|
-
type_="embedded",
|
607
|
-
code="def append_char(user_name): return user_name + 'x'",
|
608
|
-
)
|
609
|
-
manager.add_function(
|
610
|
-
name="farewell",
|
611
|
-
type_="embedded",
|
612
|
-
code="def farewell(user_name): return f'Goodbye, {user_name}!'",
|
613
|
-
)
|
614
|
-
manager.add_function(
|
615
|
-
name="monitor",
|
616
|
-
type_="embedded",
|
617
|
-
code="""async def monitor(event):
|
618
|
-
print(f'[EVENT] {event.event_type.value} @ {event.node_name or "workflow"}')
|
619
|
-
if event.result:
|
620
|
-
print(f'Result: {event.result}')
|
621
|
-
if event.exception:
|
622
|
-
print(f'Error: {event.exception}')""",
|
623
|
-
)
|
624
|
-
manager.add_node(
|
625
|
-
name="start",
|
626
|
-
function="greet",
|
627
|
-
inputs_mapping={"user_name": "name_input"},
|
628
|
-
)
|
629
|
-
manager.add_node(
|
630
|
-
name="check",
|
631
|
-
function="check_length",
|
632
|
-
inputs_mapping={"user_name": "name_input"},
|
633
|
-
output="continue_loop"
|
634
|
-
)
|
635
|
-
manager.add_node(
|
636
|
-
name="append",
|
637
|
-
function="append_char",
|
638
|
-
inputs_mapping={"user_name": "name_input"},
|
639
|
-
output="name_input"
|
640
|
-
)
|
641
|
-
manager.add_node(
|
642
|
-
name="end",
|
643
|
-
function="farewell",
|
644
|
-
inputs_mapping={"user_name": "name_input"},
|
645
|
-
)
|
646
|
-
manager.set_start_node("start")
|
647
|
-
manager.add_loop(
|
648
|
-
loop_nodes=["start", "check", "append"],
|
649
|
-
condition="ctx.get('continue_loop', False)",
|
650
|
-
exit_node="end"
|
651
|
-
)
|
652
|
-
manager.add_observer("monitor")
|
653
|
-
manager.save_to_yaml("workflow.yaml")
|
654
|
-
|
655
|
-
# Load and instantiate
|
656
|
-
new_manager = WorkflowManager()
|
657
|
-
new_manager.load_from_yaml("workflow.yaml")
|
658
|
-
print("Workflow structure:")
|
659
|
-
print(new_manager.workflow.model_dump())
|
660
|
-
|
661
|
-
# Execute the workflow
|
662
|
-
workflow = new_manager.instantiate_workflow()
|
663
|
-
engine = workflow.build()
|
664
|
-
initial_context = {"name_input": "Alice"}
|
665
|
-
result = await engine.run(initial_context)
|
666
|
-
print("\nExecution result:")
|
667
|
-
print(result)
|
668
|
-
|
669
|
-
|
670
|
-
def main():
|
671
|
-
"""Run the workflow test."""
|
672
|
-
asyncio.run(test_workflow())
|
673
|
-
|
674
|
-
|
675
|
-
if __name__ == "__main__":
|
676
|
-
main()
|