quantalogic 0.80__py3-none-any.whl → 0.93__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/flow/__init__.py +16 -34
- quantalogic/main.py +11 -6
- quantalogic/tools/tool.py +8 -922
- quantalogic-0.93.dist-info/METADATA +475 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/RECORD +8 -54
- quantalogic/codeact/TODO.md +0 -14
- quantalogic/codeact/__init__.py +0 -0
- quantalogic/codeact/agent.py +0 -478
- quantalogic/codeact/cli.py +0 -50
- quantalogic/codeact/cli_commands/__init__.py +0 -0
- quantalogic/codeact/cli_commands/create_toolbox.py +0 -45
- quantalogic/codeact/cli_commands/install_toolbox.py +0 -20
- quantalogic/codeact/cli_commands/list_executor.py +0 -15
- quantalogic/codeact/cli_commands/list_reasoners.py +0 -15
- quantalogic/codeact/cli_commands/list_toolboxes.py +0 -47
- quantalogic/codeact/cli_commands/task.py +0 -215
- quantalogic/codeact/cli_commands/tool_info.py +0 -24
- quantalogic/codeact/cli_commands/uninstall_toolbox.py +0 -43
- quantalogic/codeact/config.yaml +0 -21
- quantalogic/codeact/constants.py +0 -9
- quantalogic/codeact/events.py +0 -85
- quantalogic/codeact/examples/README.md +0 -342
- quantalogic/codeact/examples/agent_sample.yaml +0 -29
- quantalogic/codeact/executor.py +0 -186
- quantalogic/codeact/history_manager.py +0 -94
- quantalogic/codeact/llm_util.py +0 -57
- quantalogic/codeact/plugin_manager.py +0 -92
- quantalogic/codeact/prompts/error_format.j2 +0 -11
- quantalogic/codeact/prompts/generate_action.j2 +0 -77
- quantalogic/codeact/prompts/generate_program.j2 +0 -52
- quantalogic/codeact/prompts/response_format.j2 +0 -11
- quantalogic/codeact/react_agent.py +0 -318
- quantalogic/codeact/reasoner.py +0 -185
- quantalogic/codeact/templates/toolbox/README.md.j2 +0 -10
- quantalogic/codeact/templates/toolbox/pyproject.toml.j2 +0 -16
- quantalogic/codeact/templates/toolbox/tools.py.j2 +0 -6
- quantalogic/codeact/templates.py +0 -7
- quantalogic/codeact/tools_manager.py +0 -258
- quantalogic/codeact/utils.py +0 -62
- quantalogic/codeact/xml_utils.py +0 -126
- quantalogic/flow/flow.py +0 -1070
- quantalogic/flow/flow_extractor.py +0 -783
- quantalogic/flow/flow_generator.py +0 -322
- quantalogic/flow/flow_manager.py +0 -676
- quantalogic/flow/flow_manager_schema.py +0 -287
- quantalogic/flow/flow_mermaid.py +0 -365
- quantalogic/flow/flow_validator.py +0 -479
- quantalogic/flow/flow_yaml.linkedin.md +0 -31
- quantalogic/flow/flow_yaml.md +0 -767
- quantalogic/flow/templates/prompt_check_inventory.j2 +0 -1
- quantalogic/flow/templates/system_check_inventory.j2 +0 -1
- quantalogic-0.80.dist-info/METADATA +0 -900
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/LICENSE +0 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/WHEEL +0 -0
- {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/entry_points.txt +0 -0
@@ -1,322 +0,0 @@
|
|
1
|
-
import ast
|
2
|
-
import os
|
3
|
-
import re
|
4
|
-
from typing import Dict, Optional
|
5
|
-
|
6
|
-
from quantalogic.flow.flow_manager_schema import BranchCondition, WorkflowDefinition
|
7
|
-
|
8
|
-
|
9
|
-
def generate_executable_script(
|
10
|
-
workflow_def: WorkflowDefinition,
|
11
|
-
global_vars: Dict[str, object],
|
12
|
-
output_file: str,
|
13
|
-
initial_context: Optional[Dict[str, object]] = None,
|
14
|
-
) -> None:
|
15
|
-
"""
|
16
|
-
Generate an executable Python script from a WorkflowDefinition with global variables using decorators.
|
17
|
-
|
18
|
-
Args:
|
19
|
-
workflow_def: The WorkflowDefinition object containing the workflow details.
|
20
|
-
global_vars: Dictionary of global variables extracted from the source file.
|
21
|
-
output_file: The path where the executable script will be written.
|
22
|
-
initial_context: Optional initial context; if None, inferred from the workflow with default values.
|
23
|
-
|
24
|
-
The generated script includes:
|
25
|
-
- A shebang using `uv run` for environment management.
|
26
|
-
- Metadata specifying the required Python version and dependencies.
|
27
|
-
- Global variables from the original script.
|
28
|
-
- Functions defined with appropriate Nodes decorators (e.g., @Nodes.define, @Nodes.llm_node).
|
29
|
-
- Workflow instantiation using direct chaining syntax with function names, including branch, converge, and loop support.
|
30
|
-
- Support for input mappings and template nodes via workflow configuration and decorators.
|
31
|
-
- A default initial_context inferred from the workflow with customization guidance.
|
32
|
-
"""
|
33
|
-
# Infer initial context if not provided
|
34
|
-
if initial_context is None:
|
35
|
-
initial_context = {}
|
36
|
-
start_node = workflow_def.workflow.start
|
37
|
-
if start_node and start_node in workflow_def.nodes:
|
38
|
-
node_def = workflow_def.nodes[start_node]
|
39
|
-
if node_def.function and node_def.function in workflow_def.functions:
|
40
|
-
func_def = workflow_def.functions[node_def.function]
|
41
|
-
if func_def.type == "embedded" and func_def.code:
|
42
|
-
try:
|
43
|
-
tree = ast.parse(func_def.code)
|
44
|
-
for node in ast.walk(tree):
|
45
|
-
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
46
|
-
inputs = [param.arg for param in node.args.args]
|
47
|
-
for input_name in inputs:
|
48
|
-
initial_context[input_name] = "" # Default to empty string
|
49
|
-
break
|
50
|
-
except SyntaxError:
|
51
|
-
pass
|
52
|
-
elif node_def.llm_config:
|
53
|
-
prompt = node_def.llm_config.prompt_template or ""
|
54
|
-
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt))
|
55
|
-
cleaned_inputs = {
|
56
|
-
re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
57
|
-
for var in input_vars
|
58
|
-
if var.strip().isidentifier()
|
59
|
-
}
|
60
|
-
for var in cleaned_inputs:
|
61
|
-
initial_context[var] = ""
|
62
|
-
elif node_def.template_config:
|
63
|
-
template = node_def.template_config.template or ""
|
64
|
-
input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", template))
|
65
|
-
cleaned_inputs = {
|
66
|
-
re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
|
67
|
-
for var in input_vars
|
68
|
-
if var.strip().isidentifier()
|
69
|
-
}
|
70
|
-
initial_context = {"rendered_content": "", **{var: "" for var in cleaned_inputs}}
|
71
|
-
elif node_def.sub_workflow:
|
72
|
-
sub_start = node_def.sub_workflow.start or f"{start_node}_start"
|
73
|
-
if sub_start in workflow_def.nodes:
|
74
|
-
sub_node_def = workflow_def.nodes[sub_start]
|
75
|
-
if sub_node_def.function in workflow_def.functions:
|
76
|
-
func_def = workflow_def.functions[sub_node_def.function]
|
77
|
-
if func_def.type == "embedded" and func_def.code:
|
78
|
-
try:
|
79
|
-
tree = ast.parse(func_def.code)
|
80
|
-
for node in ast.walk(tree):
|
81
|
-
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
82
|
-
inputs = [param.arg for param in node.args.args]
|
83
|
-
for input_name in inputs:
|
84
|
-
initial_context[input_name] = ""
|
85
|
-
break
|
86
|
-
except SyntaxError:
|
87
|
-
pass
|
88
|
-
if node_def.inputs_mapping:
|
89
|
-
for key, value in node_def.inputs_mapping.items():
|
90
|
-
if not value.startswith("lambda ctx:"): # Static mappings only
|
91
|
-
initial_context[value] = ""
|
92
|
-
|
93
|
-
# Detect loops
|
94
|
-
loop_nodes = []
|
95
|
-
loop_condition = None
|
96
|
-
loop_exit_node = None
|
97
|
-
for trans in workflow_def.workflow.transitions:
|
98
|
-
if isinstance(trans.to_node, str) and trans.condition:
|
99
|
-
# Check for loop-back transition
|
100
|
-
if any(t.from_node == trans.to_node and t.to_node == trans.from_node for t in workflow_def.workflow.transitions):
|
101
|
-
loop_nodes.append(trans.from_node)
|
102
|
-
loop_nodes.append(trans.to_node)
|
103
|
-
loop_condition = trans.condition
|
104
|
-
# Check for exit transition
|
105
|
-
elif loop_nodes and trans.from_node == loop_nodes[-1] and f"not ({loop_condition})" in trans.condition:
|
106
|
-
loop_exit_node = trans.to_node
|
107
|
-
loop_nodes = list(dict.fromkeys(loop_nodes)) # Remove duplicates, preserve order
|
108
|
-
|
109
|
-
with open(output_file, "w") as f:
|
110
|
-
# Shebang and metadata
|
111
|
-
f.write("#!/usr/bin/env -S uv run\n")
|
112
|
-
f.write("# /// script\n")
|
113
|
-
f.write('# requires-python = ">=3.12"\n')
|
114
|
-
f.write("# dependencies = [\n")
|
115
|
-
f.write('# "loguru",\n')
|
116
|
-
f.write('# "litellm",\n')
|
117
|
-
f.write('# "pydantic>=2.0",\n')
|
118
|
-
f.write('# "anyio",\n')
|
119
|
-
f.write('# "quantalogic>=0.35",\n')
|
120
|
-
f.write('# "jinja2",\n')
|
121
|
-
f.write('# "instructor[litellm]",\n')
|
122
|
-
f.write("# ]\n")
|
123
|
-
f.write("# ///\n\n")
|
124
|
-
|
125
|
-
# Imports
|
126
|
-
f.write("import anyio\n")
|
127
|
-
f.write("from typing import List\n")
|
128
|
-
f.write("from loguru import logger\n")
|
129
|
-
f.write("from quantalogic.flow import Nodes, Workflow\n\n")
|
130
|
-
|
131
|
-
# Global variables
|
132
|
-
for var_name, value in global_vars.items():
|
133
|
-
f.write(f"{var_name} = {repr(value)}\n")
|
134
|
-
f.write("\n")
|
135
|
-
|
136
|
-
# Define functions with decorators
|
137
|
-
for node_name, node_def in workflow_def.nodes.items():
|
138
|
-
if node_def.function and node_def.function in workflow_def.functions:
|
139
|
-
func_def = workflow_def.functions[node_def.function]
|
140
|
-
if func_def.type == "embedded" and func_def.code:
|
141
|
-
code_lines = func_def.code.split('\n')
|
142
|
-
func_body = "".join(
|
143
|
-
line + "\n" for line in code_lines if not line.strip().startswith('@Nodes.')
|
144
|
-
).rstrip("\n")
|
145
|
-
decorator = ""
|
146
|
-
if node_def.llm_config:
|
147
|
-
params = []
|
148
|
-
if node_def.llm_config.model.startswith("lambda ctx:"):
|
149
|
-
params.append(f"model={node_def.llm_config.model}")
|
150
|
-
else:
|
151
|
-
params.append(f"model={repr(node_def.llm_config.model)}")
|
152
|
-
if node_def.llm_config.system_prompt_file:
|
153
|
-
params.append(f"system_prompt_file={repr(node_def.llm_config.system_prompt_file)}")
|
154
|
-
elif node_def.llm_config.system_prompt:
|
155
|
-
params.append(f"system_prompt={repr(node_def.llm_config.system_prompt)}")
|
156
|
-
if node_def.llm_config.prompt_template:
|
157
|
-
params.append(f"prompt_template={repr(node_def.llm_config.prompt_template)}")
|
158
|
-
if node_def.llm_config.prompt_file:
|
159
|
-
params.append(f"prompt_file={repr(node_def.llm_config.prompt_file)}")
|
160
|
-
params.append(f"output={repr(node_def.output or f'{node_name}_result')}")
|
161
|
-
for param in ["temperature", "max_tokens", "top_p", "presence_penalty", "frequency_penalty"]:
|
162
|
-
value = getattr(node_def.llm_config, param, None)
|
163
|
-
if value is not None:
|
164
|
-
params.append(f"{param}={repr(value)}")
|
165
|
-
decorator = f"@Nodes.llm_node({', '.join(params)})\n"
|
166
|
-
elif node_def.template_config:
|
167
|
-
params = [f"output={repr(node_def.output or f'{node_name}_result')}"]
|
168
|
-
if node_def.template_config.template:
|
169
|
-
params.append(f"template={repr(node_def.template_config.template)}")
|
170
|
-
if node_def.template_config.template_file:
|
171
|
-
params.append(f"template_file={repr(node_def.template_config.template_file)}")
|
172
|
-
decorator = f"@Nodes.template_node({', '.join(params)})\n"
|
173
|
-
else:
|
174
|
-
decorator = f"@Nodes.define(output={repr(node_def.output or f'{node_name}_result')})\n"
|
175
|
-
f.write(f"{decorator}{func_body}\n\n")
|
176
|
-
|
177
|
-
# Define workflow using chaining syntax with loop support
|
178
|
-
f.write("# Define the workflow with branch, converge, and loop support\n")
|
179
|
-
f.write("workflow = (\n")
|
180
|
-
start_node = workflow_def.workflow.start
|
181
|
-
start_func = workflow_def.nodes[start_node].function if start_node in workflow_def.nodes and workflow_def.nodes[start_node].function else start_node
|
182
|
-
f.write(f' Workflow("{start_func}")\n')
|
183
|
-
|
184
|
-
added_nodes = set()
|
185
|
-
for node_name, node_def in workflow_def.nodes.items():
|
186
|
-
if node_name in added_nodes:
|
187
|
-
continue
|
188
|
-
func_name = node_def.function if node_def.function else node_name
|
189
|
-
if loop_nodes and node_name == loop_nodes[0]:
|
190
|
-
f.write(" .start_loop()\n")
|
191
|
-
if node_def.sub_workflow:
|
192
|
-
sub_start = node_def.sub_workflow.start or f"{node_name}_start"
|
193
|
-
sub_start_func = workflow_def.nodes[sub_start].function if sub_start in workflow_def.nodes and workflow_def.nodes[sub_start].function else sub_start
|
194
|
-
f.write(f' .add_sub_workflow("{node_name}", Workflow("{sub_start_func}"), ')
|
195
|
-
if node_def.inputs_mapping:
|
196
|
-
inputs_mapping_str = "{"
|
197
|
-
for k, v in node_def.inputs_mapping.items():
|
198
|
-
if v.startswith("lambda ctx:"):
|
199
|
-
inputs_mapping_str += f"{repr(k)}: {v}, "
|
200
|
-
else:
|
201
|
-
inputs_mapping_str += f"{repr(k)}: {repr(v)}, "
|
202
|
-
inputs_mapping_str = inputs_mapping_str.rstrip(", ") + "}"
|
203
|
-
f.write(f"inputs={inputs_mapping_str}, ")
|
204
|
-
else:
|
205
|
-
inputs = []
|
206
|
-
if sub_start in workflow_def.nodes and workflow_def.nodes[sub_start].function in workflow_def.functions:
|
207
|
-
func_def = workflow_def.functions[workflow_def.nodes[sub_start].function]
|
208
|
-
if func_def.code:
|
209
|
-
try:
|
210
|
-
tree = ast.parse(func_def.code)
|
211
|
-
for node in ast.walk(tree):
|
212
|
-
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
213
|
-
inputs = [param.arg for param in node.args.args]
|
214
|
-
break
|
215
|
-
except SyntaxError:
|
216
|
-
pass
|
217
|
-
f.write(f'inputs={{{", ".join(f"{k!r}: {k!r}" for k in inputs)}}}, ')
|
218
|
-
f.write(f'output="{node_def.output or f"{node_name}_result"}")\n')
|
219
|
-
else:
|
220
|
-
if node_def.inputs_mapping:
|
221
|
-
inputs_mapping_str = "{"
|
222
|
-
for k, v in node_def.inputs_mapping.items():
|
223
|
-
if v.startswith("lambda ctx:"):
|
224
|
-
inputs_mapping_str += f"{repr(k)}: {v}, "
|
225
|
-
else:
|
226
|
-
inputs_mapping_str += f"{repr(k)}: {repr(v)}, "
|
227
|
-
inputs_mapping_str = inputs_mapping_str.rstrip(", ") + "}"
|
228
|
-
f.write(f' .node("{func_name}", inputs_mapping={inputs_mapping_str})\n')
|
229
|
-
else:
|
230
|
-
f.write(f' .node("{func_name}")\n')
|
231
|
-
added_nodes.add(node_name)
|
232
|
-
|
233
|
-
for trans in workflow_def.workflow.transitions:
|
234
|
-
from_node = trans.from_node
|
235
|
-
_from_func = workflow_def.nodes[from_node].function if from_node in workflow_def.nodes and workflow_def.nodes[from_node].function else from_node
|
236
|
-
if from_node not in added_nodes:
|
237
|
-
continue # Skip if already added via .node()
|
238
|
-
to_node = trans.to_node
|
239
|
-
if isinstance(to_node, str):
|
240
|
-
if loop_nodes and from_node in loop_nodes and to_node in loop_nodes:
|
241
|
-
continue # Skip loop-back transition, handled by end_loop
|
242
|
-
to_func = workflow_def.nodes[to_node].function if to_node in workflow_def.nodes and workflow_def.nodes[to_node].function else to_node
|
243
|
-
condition = f"lambda ctx: {trans.condition}" if trans.condition else "None"
|
244
|
-
if loop_nodes and from_node == loop_nodes[-1] and to_node == loop_exit_node:
|
245
|
-
f.write(f' .end_loop(condition=lambda ctx: {loop_condition}, next_node="{to_func}")\n')
|
246
|
-
else:
|
247
|
-
f.write(f' .then("{to_func}", condition={condition})\n')
|
248
|
-
elif all(isinstance(tn, str) for tn in to_node):
|
249
|
-
to_funcs = [workflow_def.nodes[tn].function if tn in workflow_def.nodes and workflow_def.nodes[tn].function else tn for tn in to_node]
|
250
|
-
f.write(f' .parallel({", ".join(f"{n!r}" for n in to_funcs)})\n')
|
251
|
-
else: # BranchCondition list
|
252
|
-
branches = []
|
253
|
-
for branch in to_node:
|
254
|
-
branch_func = workflow_def.nodes[branch.to_node].function if branch.to_node in workflow_def.nodes and workflow_def.nodes[branch.to_node].function else branch.to_node
|
255
|
-
cond = f"lambda ctx: {branch.condition}" if branch.condition else "None"
|
256
|
-
branches.append(f'("{branch_func}", {cond})')
|
257
|
-
f.write(f' .branch([{", ".join(branches)}])\n')
|
258
|
-
|
259
|
-
for conv_node in workflow_def.workflow.convergence_nodes:
|
260
|
-
conv_func = workflow_def.nodes[conv_node].function if conv_node in workflow_def.nodes and workflow_def.nodes[conv_node].function else conv_node
|
261
|
-
f.write(f' .converge("{conv_func}")\n')
|
262
|
-
|
263
|
-
if hasattr(workflow_def, 'observers'):
|
264
|
-
for observer in workflow_def.observers:
|
265
|
-
f.write(f" .add_observer({observer})\n")
|
266
|
-
f.write(")\n\n")
|
267
|
-
|
268
|
-
# Main function
|
269
|
-
f.write("async def main():\n")
|
270
|
-
f.write(' """Main function to run the workflow."""\n')
|
271
|
-
f.write(" # Customize initial_context as needed\n")
|
272
|
-
f.write(" # Inferred required inputs:\n")
|
273
|
-
inferred_inputs = list(initial_context.keys())
|
274
|
-
f.write(f" # {', '.join(inferred_inputs) if inferred_inputs else 'None detected'}\n")
|
275
|
-
f.write(" initial_context = {\n")
|
276
|
-
for key, value in initial_context.items():
|
277
|
-
f.write(f" {repr(key)}: {repr(value)},\n")
|
278
|
-
f.write(" }\n")
|
279
|
-
f.write(" engine = workflow.build()\n")
|
280
|
-
f.write(" result = await engine.run(initial_context)\n")
|
281
|
-
f.write(' logger.info(f"Workflow result: {result}")\n\n')
|
282
|
-
|
283
|
-
# Entry point
|
284
|
-
f.write('if __name__ == "__main__":\n')
|
285
|
-
f.write(" anyio.run(main)\n")
|
286
|
-
|
287
|
-
os.chmod(output_file, 0o755)
|
288
|
-
|
289
|
-
|
290
|
-
if __name__ == "__main__":
|
291
|
-
from quantalogic.flow.flow_manager import WorkflowManager
|
292
|
-
|
293
|
-
manager = WorkflowManager()
|
294
|
-
manager.add_function(
|
295
|
-
name="greet",
|
296
|
-
type_="embedded",
|
297
|
-
code="async def greet(name): return f'Hello, {name}!'",
|
298
|
-
)
|
299
|
-
manager.add_function(
|
300
|
-
name="check",
|
301
|
-
type_="embedded",
|
302
|
-
code="async def check(name): return len(name) > 3",
|
303
|
-
)
|
304
|
-
manager.add_function(
|
305
|
-
name="end",
|
306
|
-
type_="embedded",
|
307
|
-
code="async def end(greeting): return f'{greeting} Goodbye!'",
|
308
|
-
)
|
309
|
-
manager.add_node(name="start", function="greet", output="greeting", inputs_mapping={"name": "user_name"})
|
310
|
-
manager.add_node(name="check", function="check", output="condition")
|
311
|
-
manager.add_node(name="end", function="end", output="farewell")
|
312
|
-
manager.set_start_node("start")
|
313
|
-
manager.add_transition(
|
314
|
-
from_node="start",
|
315
|
-
to_node=[
|
316
|
-
BranchCondition(to_node="check", condition="ctx['name'] == 'Alice'")
|
317
|
-
]
|
318
|
-
)
|
319
|
-
manager.add_convergence_node("end")
|
320
|
-
wf_def = manager.workflow
|
321
|
-
global_vars = {"MY_CONSTANT": 42}
|
322
|
-
generate_executable_script(wf_def, global_vars, "workflow_script.py")
|