quantalogic 0.61.3__py3-none-any.whl → 0.92__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. quantalogic/agent.py +0 -1
  2. quantalogic/flow/__init__.py +16 -34
  3. quantalogic/main.py +11 -6
  4. quantalogic/tools/action_gen.py +1 -1
  5. quantalogic/tools/tool.py +8 -500
  6. quantalogic-0.92.dist-info/METADATA +448 -0
  7. {quantalogic-0.61.3.dist-info → quantalogic-0.92.dist-info}/RECORD +10 -33
  8. {quantalogic-0.61.3.dist-info → quantalogic-0.92.dist-info}/WHEEL +1 -1
  9. quantalogic-0.92.dist-info/entry_points.txt +3 -0
  10. quantalogic/codeact/__init__.py +0 -0
  11. quantalogic/codeact/agent.py +0 -499
  12. quantalogic/codeact/cli.py +0 -232
  13. quantalogic/codeact/constants.py +0 -9
  14. quantalogic/codeact/events.py +0 -78
  15. quantalogic/codeact/llm_util.py +0 -76
  16. quantalogic/codeact/prompts/error_format.j2 +0 -11
  17. quantalogic/codeact/prompts/generate_action.j2 +0 -26
  18. quantalogic/codeact/prompts/generate_program.j2 +0 -39
  19. quantalogic/codeact/prompts/response_format.j2 +0 -11
  20. quantalogic/codeact/tools_manager.py +0 -135
  21. quantalogic/codeact/utils.py +0 -135
  22. quantalogic/flow/flow.py +0 -960
  23. quantalogic/flow/flow_extractor.py +0 -723
  24. quantalogic/flow/flow_generator.py +0 -294
  25. quantalogic/flow/flow_manager.py +0 -637
  26. quantalogic/flow/flow_manager_schema.py +0 -255
  27. quantalogic/flow/flow_mermaid.py +0 -365
  28. quantalogic/flow/flow_validator.py +0 -479
  29. quantalogic/flow/flow_yaml.linkedin.md +0 -31
  30. quantalogic/flow/flow_yaml.md +0 -767
  31. quantalogic/flow/templates/prompt_check_inventory.j2 +0 -1
  32. quantalogic/flow/templates/system_check_inventory.j2 +0 -1
  33. quantalogic-0.61.3.dist-info/METADATA +0 -900
  34. quantalogic-0.61.3.dist-info/entry_points.txt +0 -6
  35. {quantalogic-0.61.3.dist-info → quantalogic-0.92.dist-info}/LICENSE +0 -0
@@ -1,294 +0,0 @@
1
- import ast
2
- import os
3
- import re
4
- from typing import Dict, Optional
5
-
6
- from quantalogic.flow.flow import Nodes
7
- from quantalogic.flow.flow_manager_schema import BranchCondition, WorkflowDefinition
8
-
9
-
10
- def generate_executable_script(
11
- workflow_def: WorkflowDefinition,
12
- global_vars: Dict[str, object],
13
- output_file: str,
14
- initial_context: Optional[Dict[str, object]] = None,
15
- ) -> None:
16
- """
17
- Generate an executable Python script from a WorkflowDefinition with global variables using decorators.
18
-
19
- Args:
20
- workflow_def: The WorkflowDefinition object containing the workflow details.
21
- global_vars: Dictionary of global variables extracted from the source file.
22
- output_file: The path where the executable script will be written.
23
- initial_context: Optional initial context; if None, inferred from the workflow with default values.
24
-
25
- The generated script includes:
26
- - A shebang using `uv run` for environment management.
27
- - Metadata specifying the required Python version and dependencies.
28
- - Global variables from the original script.
29
- - Functions defined with appropriate Nodes decorators (e.g., @Nodes.define, @Nodes.llm_node).
30
- - Workflow instantiation using direct chaining syntax with function names, including branch and converge.
31
- - Support for input mappings and template nodes via workflow configuration and decorators.
32
- - A default initial_context inferred from the workflow with customization guidance.
33
- """
34
- # Infer initial context if not provided
35
- if initial_context is None:
36
- initial_context = {}
37
- start_node = workflow_def.workflow.start
38
- if start_node and start_node in workflow_def.nodes:
39
- node_def = workflow_def.nodes[start_node]
40
- if node_def.function and node_def.function in workflow_def.functions:
41
- func_def = workflow_def.functions[node_def.function]
42
- if func_def.type == "embedded" and func_def.code:
43
- try:
44
- tree = ast.parse(func_def.code)
45
- for node in ast.walk(tree):
46
- if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
47
- inputs = [param.arg for param in node.args.args]
48
- for input_name in inputs:
49
- initial_context[input_name] = "" # Default to empty string
50
- break
51
- except SyntaxError:
52
- pass
53
- elif node_def.llm_config:
54
- prompt = node_def.llm_config.prompt_template or ""
55
- input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt))
56
- cleaned_inputs = {
57
- re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
58
- for var in input_vars
59
- if var.strip().isidentifier()
60
- }
61
- for var in cleaned_inputs:
62
- initial_context[var] = ""
63
- elif node_def.template_config:
64
- template = node_def.template_config.template or ""
65
- input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", template))
66
- cleaned_inputs = {
67
- re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
68
- for var in input_vars
69
- if var.strip().isidentifier()
70
- }
71
- initial_context = {"rendered_content": "", **{var: "" for var in cleaned_inputs}}
72
- elif node_def.sub_workflow:
73
- sub_start = node_def.sub_workflow.start or f"{start_node}_start"
74
- if sub_start in workflow_def.nodes:
75
- sub_node_def = workflow_def.nodes[sub_start]
76
- if sub_node_def.function in workflow_def.functions:
77
- func_def = workflow_def.functions[sub_node_def.function]
78
- if func_def.type == "embedded" and func_def.code:
79
- try:
80
- tree = ast.parse(func_def.code)
81
- for node in ast.walk(tree):
82
- if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
83
- inputs = [param.arg for param in node.args.args]
84
- for input_name in inputs:
85
- initial_context[input_name] = ""
86
- break
87
- except SyntaxError:
88
- pass
89
- if node_def.inputs_mapping:
90
- for key, value in node_def.inputs_mapping.items():
91
- if not value.startswith("lambda ctx:"): # Static mappings only
92
- initial_context[value] = ""
93
-
94
- with open(output_file, "w") as f:
95
- # Shebang and metadata
96
- f.write("#!/usr/bin/env -S uv run\n")
97
- f.write("# /// script\n")
98
- f.write('# requires-python = ">=3.12"\n')
99
- f.write("# dependencies = [\n")
100
- f.write('# "loguru",\n')
101
- f.write('# "litellm",\n')
102
- f.write('# "pydantic>=2.0",\n')
103
- f.write('# "anyio",\n')
104
- f.write('# "quantalogic>=0.35",\n')
105
- f.write('# "jinja2",\n')
106
- f.write('# "instructor[litellm]",\n')
107
- f.write("# ]\n")
108
- f.write("# ///\n\n")
109
-
110
- # Imports
111
- f.write("import anyio\n")
112
- f.write("from typing import List\n")
113
- f.write("from loguru import logger\n")
114
- f.write("from quantalogic.flow import Nodes, Workflow\n\n")
115
-
116
- # Global variables
117
- for var_name, value in global_vars.items():
118
- f.write(f"{var_name} = {repr(value)}\n")
119
- f.write("\n")
120
-
121
- # Define functions with decorators
122
- for node_name, node_def in workflow_def.nodes.items():
123
- if node_def.function and node_def.function in workflow_def.functions:
124
- func_def = workflow_def.functions[node_def.function]
125
- if func_def.type == "embedded" and func_def.code:
126
- code_lines = func_def.code.split('\n')
127
- func_body = "".join(
128
- line + "\n" for line in code_lines if not line.strip().startswith('@Nodes.')
129
- ).rstrip("\n")
130
- decorator = ""
131
- if node_def.llm_config:
132
- params = []
133
- if node_def.llm_config.model.startswith("lambda ctx:"):
134
- params.append(f"model={node_def.llm_config.model}")
135
- else:
136
- params.append(f"model={repr(node_def.llm_config.model)}")
137
- if node_def.llm_config.system_prompt_file:
138
- params.append(f"system_prompt_file={repr(node_def.llm_config.system_prompt_file)}")
139
- elif node_def.llm_config.system_prompt:
140
- params.append(f"system_prompt={repr(node_def.llm_config.system_prompt)}")
141
- if node_def.llm_config.prompt_template:
142
- params.append(f"prompt_template={repr(node_def.llm_config.prompt_template)}")
143
- if node_def.llm_config.prompt_file:
144
- params.append(f"prompt_file={repr(node_def.llm_config.prompt_file)}")
145
- params.append(f"output={repr(node_def.output or f'{node_name}_result')}")
146
- for param in ["temperature", "max_tokens", "top_p", "presence_penalty", "frequency_penalty"]:
147
- value = getattr(node_def.llm_config, param, None)
148
- if value is not None:
149
- params.append(f"{param}={repr(value)}")
150
- decorator = f"@Nodes.llm_node({', '.join(params)})\n"
151
- elif node_def.template_config:
152
- params = [f"output={repr(node_def.output or f'{node_name}_result')}"]
153
- if node_def.template_config.template:
154
- params.append(f"template={repr(node_def.template_config.template)}")
155
- if node_def.template_config.template_file:
156
- params.append(f"template_file={repr(node_def.template_config.template_file)}")
157
- decorator = f"@Nodes.template_node({', '.join(params)})\n"
158
- else:
159
- decorator = f"@Nodes.define(output={repr(node_def.output or f'{node_name}_result')})\n"
160
- f.write(f"{decorator}{func_body}\n\n")
161
-
162
- # Define workflow using chaining syntax
163
- f.write("# Define the workflow with branch and converge support\n")
164
- f.write("workflow = (\n")
165
- start_node = workflow_def.workflow.start
166
- start_func = workflow_def.nodes[start_node].function if start_node in workflow_def.nodes and workflow_def.nodes[start_node].function else start_node
167
- f.write(f' Workflow("{start_func}")\n')
168
-
169
- for node_name, node_def in workflow_def.nodes.items():
170
- func_name = node_def.function if node_def.function else node_name
171
- if node_def.sub_workflow:
172
- sub_start = node_def.sub_workflow.start or f"{node_name}_start"
173
- sub_start_func = workflow_def.nodes[sub_start].function if sub_start in workflow_def.nodes and workflow_def.nodes[sub_start].function else sub_start
174
- f.write(f' .add_sub_workflow("{node_name}", Workflow("{sub_start_func}"), ')
175
- if node_def.inputs_mapping:
176
- inputs_mapping_str = "{"
177
- for k, v in node_def.inputs_mapping.items():
178
- if v.startswith("lambda ctx:"):
179
- inputs_mapping_str += f"{repr(k)}: {v}, "
180
- else:
181
- inputs_mapping_str += f"{repr(k)}: {repr(v)}, "
182
- inputs_mapping_str = inputs_mapping_str.rstrip(", ") + "}"
183
- f.write(f"inputs={inputs_mapping_str}, ")
184
- else:
185
- inputs = []
186
- if sub_start in workflow_def.nodes and workflow_def.nodes[sub_start].function in workflow_def.functions:
187
- func_def = workflow_def.functions[workflow_def.nodes[sub_start].function]
188
- if func_def.code:
189
- try:
190
- tree = ast.parse(func_def.code)
191
- for node in ast.walk(tree):
192
- if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
193
- inputs = [param.arg for param in node.args.args]
194
- break
195
- except SyntaxError:
196
- pass
197
- f.write(f'inputs={{{", ".join(f"{k!r}: {k!r}" for k in inputs)}}}, ')
198
- f.write(f'output="{node_def.output or f"{node_name}_result"}")\n')
199
- else:
200
- if node_def.inputs_mapping:
201
- inputs_mapping_str = "{"
202
- for k, v in node_def.inputs_mapping.items():
203
- if v.startswith("lambda ctx:"):
204
- inputs_mapping_str += f"{repr(k)}: {v}, "
205
- else:
206
- inputs_mapping_str += f"{repr(k)}: {repr(v)}, "
207
- inputs_mapping_str = inputs_mapping_str.rstrip(", ") + "}"
208
- f.write(f' .node("{func_name}", inputs_mapping={inputs_mapping_str})\n')
209
- else:
210
- f.write(f' .node("{func_name}")\n')
211
-
212
- for trans in workflow_def.workflow.transitions:
213
- from_node = trans.from_node
214
- from_func = workflow_def.nodes[from_node].function if from_node in workflow_def.nodes and workflow_def.nodes[from_node].function else from_node
215
- to_node = trans.to_node
216
- if isinstance(to_node, str):
217
- to_func = workflow_def.nodes[to_node].function if to_node in workflow_def.nodes and workflow_def.nodes[to_node].function else to_node
218
- condition = f"lambda ctx: {trans.condition}" if trans.condition else "None"
219
- f.write(f' .then("{to_func}", condition={condition})\n')
220
- elif all(isinstance(tn, str) for tn in to_node):
221
- to_funcs = [workflow_def.nodes[tn].function if tn in workflow_def.nodes and workflow_def.nodes[tn].function else tn for tn in to_node]
222
- f.write(f' .parallel({", ".join(f"{n!r}" for n in to_funcs)})\n')
223
- else: # BranchCondition list
224
- branches = []
225
- for branch in to_node:
226
- branch_func = workflow_def.nodes[branch.to_node].function if branch.to_node in workflow_def.nodes and workflow_def.nodes[branch.to_node].function else branch.to_node
227
- cond = f"lambda ctx: {branch.condition}" if branch.condition else "None"
228
- branches.append(f'("{branch_func}", {cond})')
229
- f.write(f' .branch([{", ".join(branches)}])\n')
230
-
231
- for conv_node in workflow_def.workflow.convergence_nodes:
232
- conv_func = workflow_def.nodes[conv_node].function if conv_node in workflow_def.nodes and workflow_def.nodes[conv_node].function else conv_node
233
- f.write(f' .converge("{conv_func}")\n')
234
-
235
- if hasattr(workflow_def, 'observers'):
236
- for observer in workflow_def.observers:
237
- f.write(f" .add_observer({observer})\n")
238
- f.write(")\n\n")
239
-
240
- # Main function
241
- f.write("async def main():\n")
242
- f.write(' """Main function to run the workflow."""\n')
243
- f.write(" # Customize initial_context as needed\n")
244
- f.write(" # Inferred required inputs:\n")
245
- inferred_inputs = list(initial_context.keys())
246
- f.write(f" # {', '.join(inferred_inputs) if inferred_inputs else 'None detected'}\n")
247
- f.write(" initial_context = {\n")
248
- for key, value in initial_context.items():
249
- f.write(f" {repr(key)}: {repr(value)},\n")
250
- f.write(" }\n")
251
- f.write(" engine = workflow.build()\n")
252
- f.write(" result = await engine.run(initial_context)\n")
253
- f.write(' logger.info(f"Workflow result: {result}")\n\n')
254
-
255
- # Entry point
256
- f.write('if __name__ == "__main__":\n')
257
- f.write(" anyio.run(main)\n")
258
-
259
- os.chmod(output_file, 0o755)
260
-
261
-
262
- if __name__ == "__main__":
263
- from quantalogic.flow.flow_manager import WorkflowManager
264
-
265
- manager = WorkflowManager()
266
- manager.add_function(
267
- name="greet",
268
- type_="embedded",
269
- code="async def greet(name): return f'Hello, {name}!'",
270
- )
271
- manager.add_function(
272
- name="check",
273
- type_="embedded",
274
- code="async def check(name): return len(name) > 3",
275
- )
276
- manager.add_function(
277
- name="end",
278
- type_="embedded",
279
- code="async def end(greeting): return f'{greeting} Goodbye!'",
280
- )
281
- manager.add_node(name="start", function="greet", output="greeting", inputs_mapping={"name": "user_name"})
282
- manager.add_node(name="check", function="check", output="condition")
283
- manager.add_node(name="end", function="end", output="farewell")
284
- manager.set_start_node("start")
285
- manager.add_transition(
286
- from_node="start",
287
- to_node=[
288
- BranchCondition(to_node="check", condition="ctx['name'] == 'Alice'")
289
- ]
290
- )
291
- manager.add_convergence_node("end")
292
- wf_def = manager.workflow
293
- global_vars = {"MY_CONSTANT": 42}
294
- generate_executable_script(wf_def, global_vars, "workflow_script.py")