quantalogic 0.35.0__py3-none-any.whl → 0.40.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- quantalogic/__init__.py +0 -4
- quantalogic/agent.py +603 -363
- quantalogic/agent_config.py +233 -46
- quantalogic/agent_factory.py +34 -22
- quantalogic/coding_agent.py +16 -14
- quantalogic/config.py +2 -1
- quantalogic/console_print_events.py +4 -8
- quantalogic/console_print_token.py +2 -2
- quantalogic/docs_cli.py +15 -10
- quantalogic/event_emitter.py +258 -83
- quantalogic/flow/__init__.py +23 -0
- quantalogic/flow/flow.py +595 -0
- quantalogic/flow/flow_extractor.py +672 -0
- quantalogic/flow/flow_generator.py +89 -0
- quantalogic/flow/flow_manager.py +407 -0
- quantalogic/flow/flow_manager_schema.py +169 -0
- quantalogic/flow/flow_yaml.md +419 -0
- quantalogic/generative_model.py +109 -77
- quantalogic/get_model_info.py +5 -5
- quantalogic/interactive_text_editor.py +100 -73
- quantalogic/main.py +17 -21
- quantalogic/model_info_list.py +3 -3
- quantalogic/model_info_litellm.py +14 -14
- quantalogic/prompts.py +2 -1
- quantalogic/{llm.py → quantlitellm.py} +29 -39
- quantalogic/search_agent.py +4 -4
- quantalogic/server/models.py +4 -1
- quantalogic/task_file_reader.py +5 -5
- quantalogic/task_runner.py +20 -20
- quantalogic/tool_manager.py +10 -21
- quantalogic/tools/__init__.py +98 -68
- quantalogic/tools/composio/composio.py +416 -0
- quantalogic/tools/{generate_database_report_tool.py → database/generate_database_report_tool.py} +4 -9
- quantalogic/tools/database/sql_query_tool_advanced.py +261 -0
- quantalogic/tools/document_tools/markdown_to_docx_tool.py +620 -0
- quantalogic/tools/document_tools/markdown_to_epub_tool.py +438 -0
- quantalogic/tools/document_tools/markdown_to_html_tool.py +362 -0
- quantalogic/tools/document_tools/markdown_to_ipynb_tool.py +319 -0
- quantalogic/tools/document_tools/markdown_to_latex_tool.py +420 -0
- quantalogic/tools/document_tools/markdown_to_pdf_tool.py +623 -0
- quantalogic/tools/document_tools/markdown_to_pptx_tool.py +319 -0
- quantalogic/tools/duckduckgo_search_tool.py +2 -4
- quantalogic/tools/finance/alpha_vantage_tool.py +440 -0
- quantalogic/tools/finance/ccxt_tool.py +373 -0
- quantalogic/tools/finance/finance_llm_tool.py +387 -0
- quantalogic/tools/finance/google_finance.py +192 -0
- quantalogic/tools/finance/market_intelligence_tool.py +520 -0
- quantalogic/tools/finance/technical_analysis_tool.py +491 -0
- quantalogic/tools/finance/tradingview_tool.py +336 -0
- quantalogic/tools/finance/yahoo_finance.py +236 -0
- quantalogic/tools/git/bitbucket_clone_repo_tool.py +181 -0
- quantalogic/tools/git/bitbucket_operations_tool.py +326 -0
- quantalogic/tools/git/clone_repo_tool.py +189 -0
- quantalogic/tools/git/git_operations_tool.py +532 -0
- quantalogic/tools/google_packages/google_news_tool.py +480 -0
- quantalogic/tools/grep_app_tool.py +123 -186
- quantalogic/tools/{dalle_e.py → image_generation/dalle_e.py} +37 -27
- quantalogic/tools/jinja_tool.py +6 -10
- quantalogic/tools/language_handlers/__init__.py +22 -9
- quantalogic/tools/list_directory_tool.py +131 -42
- quantalogic/tools/llm_tool.py +45 -15
- quantalogic/tools/llm_vision_tool.py +59 -7
- quantalogic/tools/markitdown_tool.py +17 -5
- quantalogic/tools/nasa_packages/models.py +47 -0
- quantalogic/tools/nasa_packages/nasa_apod_tool.py +232 -0
- quantalogic/tools/nasa_packages/nasa_neows_tool.py +147 -0
- quantalogic/tools/nasa_packages/services.py +82 -0
- quantalogic/tools/presentation_tools/presentation_llm_tool.py +396 -0
- quantalogic/tools/product_hunt/product_hunt_tool.py +258 -0
- quantalogic/tools/product_hunt/services.py +63 -0
- quantalogic/tools/rag_tool/__init__.py +48 -0
- quantalogic/tools/rag_tool/document_metadata.py +15 -0
- quantalogic/tools/rag_tool/query_response.py +20 -0
- quantalogic/tools/rag_tool/rag_tool.py +566 -0
- quantalogic/tools/rag_tool/rag_tool_beta.py +264 -0
- quantalogic/tools/read_html_tool.py +24 -38
- quantalogic/tools/replace_in_file_tool.py +10 -10
- quantalogic/tools/safe_python_interpreter_tool.py +10 -24
- quantalogic/tools/search_definition_names.py +2 -2
- quantalogic/tools/sequence_tool.py +14 -23
- quantalogic/tools/sql_query_tool.py +17 -19
- quantalogic/tools/tool.py +39 -15
- quantalogic/tools/unified_diff_tool.py +1 -1
- quantalogic/tools/utilities/csv_processor_tool.py +234 -0
- quantalogic/tools/utilities/download_file_tool.py +179 -0
- quantalogic/tools/utilities/mermaid_validator_tool.py +661 -0
- quantalogic/tools/utils/__init__.py +1 -4
- quantalogic/tools/utils/create_sample_database.py +24 -38
- quantalogic/tools/utils/generate_database_report.py +74 -82
- quantalogic/tools/wikipedia_search_tool.py +17 -21
- quantalogic/utils/ask_user_validation.py +1 -1
- quantalogic/utils/async_utils.py +35 -0
- quantalogic/utils/check_version.py +3 -5
- quantalogic/utils/get_all_models.py +2 -1
- quantalogic/utils/git_ls.py +21 -7
- quantalogic/utils/lm_studio_model_info.py +9 -7
- quantalogic/utils/python_interpreter.py +113 -43
- quantalogic/utils/xml_utility.py +178 -0
- quantalogic/version_check.py +1 -1
- quantalogic/welcome_message.py +7 -7
- quantalogic/xml_parser.py +0 -1
- {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/METADATA +41 -1
- quantalogic-0.40.0.dist-info/RECORD +148 -0
- quantalogic-0.35.0.dist-info/RECORD +0 -102
- {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/LICENSE +0 -0
- {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/WHEEL +0 -0
- {quantalogic-0.35.0.dist-info → quantalogic-0.40.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,672 @@
|
|
1
|
+
import ast
|
2
|
+
import os
|
3
|
+
|
4
|
+
from loguru import logger
|
5
|
+
|
6
|
+
from quantalogic.flow.flow_manager import WorkflowManager # Added for YAML saving
|
7
|
+
from quantalogic.flow.flow_manager_schema import (
|
8
|
+
FunctionDefinition,
|
9
|
+
NodeDefinition,
|
10
|
+
TransitionDefinition,
|
11
|
+
WorkflowDefinition,
|
12
|
+
WorkflowStructure,
|
13
|
+
)
|
14
|
+
|
15
|
+
|
16
|
+
class WorkflowExtractor(ast.NodeVisitor):
|
17
|
+
"""
|
18
|
+
AST visitor to extract workflow nodes and structure from a Python file.
|
19
|
+
|
20
|
+
This class parses Python source code to identify workflow components defined with Nodes decorators
|
21
|
+
and Workflow construction, building a WorkflowDefinition compatible with WorkflowManager.
|
22
|
+
"""
|
23
|
+
|
24
|
+
def __init__(self):
|
25
|
+
"""Initialize the extractor with empty collections for workflow components."""
|
26
|
+
self.nodes = {} # Maps node names to their definitions
|
27
|
+
self.functions = {} # Maps function names to their code
|
28
|
+
self.transitions = [] # List of (from_node, to_node, condition) tuples
|
29
|
+
self.start_node = None # Starting node of the workflow
|
30
|
+
self.global_vars = {} # Tracks global variable assignments (e.g., DEFAULT_LLM_PARAMS)
|
31
|
+
self.observers = [] # List of observer function names
|
32
|
+
|
33
|
+
def visit_Module(self, node):
|
34
|
+
"""Log and explicitly process top-level statements in the module."""
|
35
|
+
logger.debug(f"Visiting module with {len(node.body)} top-level statements")
|
36
|
+
for item in node.body:
|
37
|
+
logger.debug(f"Processing top-level node: {type(item).__name__}")
|
38
|
+
if isinstance(item, ast.FunctionDef):
|
39
|
+
self.visit_FunctionDef(item)
|
40
|
+
elif isinstance(item, ast.AsyncFunctionDef):
|
41
|
+
self.visit_AsyncFunctionDef(item)
|
42
|
+
else:
|
43
|
+
self.visit(item)
|
44
|
+
|
45
|
+
def visit_Assign(self, node):
|
46
|
+
"""Detect global variable assignments and workflow assignments."""
|
47
|
+
if len(node.targets) == 1 and isinstance(node.targets[0], ast.Name):
|
48
|
+
var_name = node.targets[0].id
|
49
|
+
value = node.value
|
50
|
+
|
51
|
+
# Handle global variable assignments (e.g., MODEL, DEFAULT_LLM_PARAMS)
|
52
|
+
if isinstance(value, ast.Dict):
|
53
|
+
self.global_vars[var_name] = {}
|
54
|
+
for k, v in zip(value.keys, value.values):
|
55
|
+
if isinstance(k, ast.Constant):
|
56
|
+
key = k.value
|
57
|
+
if isinstance(v, ast.Constant):
|
58
|
+
self.global_vars[var_name][key] = v.value
|
59
|
+
elif isinstance(v, ast.Name) and v.id in self.global_vars:
|
60
|
+
# Resolve variable references to previously defined globals
|
61
|
+
self.global_vars[var_name][key] = self.global_vars[v.id]
|
62
|
+
logger.debug(
|
63
|
+
f"Captured global variable '{var_name}' with keys: {list(self.global_vars[var_name].keys())}"
|
64
|
+
)
|
65
|
+
|
66
|
+
# Handle simple constant assignments (e.g., MODEL = "gemini/gemini-2.0-flash")
|
67
|
+
elif isinstance(value, ast.Constant):
|
68
|
+
self.global_vars[var_name] = value.value
|
69
|
+
logger.debug(f"Captured global constant '{var_name}' with value: {value.value}")
|
70
|
+
|
71
|
+
# Handle workflow assignments, including parenthesized expressions
|
72
|
+
if isinstance(value, ast.Tuple) and len(value.elts) == 1:
|
73
|
+
value = value.elts[0] # Unwrap single-element tuple from parentheses
|
74
|
+
if isinstance(value, ast.Call):
|
75
|
+
self.process_workflow_expr(value, var_name)
|
76
|
+
|
77
|
+
self.generic_visit(node)
|
78
|
+
|
79
|
+
def visit_FunctionDef(self, node):
|
80
|
+
"""Extract node information from synchronous function definitions."""
|
81
|
+
logger.debug(f"Visiting synchronous function definition: '{node.name}'")
|
82
|
+
for decorator in node.decorator_list:
|
83
|
+
decorator_name = None
|
84
|
+
kwargs = {}
|
85
|
+
logger.debug(f"Examining decorator for '{node.name}': {ast.dump(decorator)}")
|
86
|
+
|
87
|
+
# Handle simple decorators (e.g., @Nodes.define)
|
88
|
+
if (
|
89
|
+
isinstance(decorator, ast.Attribute)
|
90
|
+
and isinstance(decorator.value, ast.Name)
|
91
|
+
and decorator.value.id == "Nodes"
|
92
|
+
):
|
93
|
+
decorator_name = decorator.attr
|
94
|
+
logger.debug(f"Found simple decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
95
|
+
|
96
|
+
# Handle decorators with arguments (e.g., @Nodes.llm_node(...))
|
97
|
+
elif (
|
98
|
+
isinstance(decorator, ast.Call)
|
99
|
+
and isinstance(decorator.func, ast.Attribute)
|
100
|
+
and isinstance(decorator.func.value, ast.Name)
|
101
|
+
and decorator.func.value.id == "Nodes"
|
102
|
+
):
|
103
|
+
decorator_name = decorator.func.attr
|
104
|
+
logger.debug(f"Found call decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
105
|
+
for kw in decorator.keywords:
|
106
|
+
if kw.arg is None and isinstance(kw.value, ast.Name): # Handle **kwargs
|
107
|
+
var_name = kw.value.id
|
108
|
+
if var_name in self.global_vars:
|
109
|
+
kwargs.update(self.global_vars[var_name])
|
110
|
+
logger.debug(f"Unpacked '{var_name}' into kwargs: {self.global_vars[var_name]}")
|
111
|
+
elif isinstance(kw.value, ast.Constant):
|
112
|
+
kwargs[kw.arg] = kw.value.value
|
113
|
+
elif kw.arg == "response_model" and isinstance(kw.value, ast.Name):
|
114
|
+
kwargs[kw.arg] = ast.unparse(kw.value)
|
115
|
+
|
116
|
+
# Process recognized decorators
|
117
|
+
if decorator_name:
|
118
|
+
func_name = node.name
|
119
|
+
inputs = [arg.arg for arg in node.args.args]
|
120
|
+
|
121
|
+
if decorator_name == "define":
|
122
|
+
output = kwargs.get("output")
|
123
|
+
self.nodes[func_name] = {
|
124
|
+
"type": "function",
|
125
|
+
"function": func_name,
|
126
|
+
"inputs": inputs,
|
127
|
+
"output": output,
|
128
|
+
}
|
129
|
+
elif decorator_name == "llm_node":
|
130
|
+
llm_config = {
|
131
|
+
key: value
|
132
|
+
for key, value in kwargs.items()
|
133
|
+
if key
|
134
|
+
in [
|
135
|
+
"model",
|
136
|
+
"system_prompt",
|
137
|
+
"prompt_template",
|
138
|
+
"temperature",
|
139
|
+
"max_tokens",
|
140
|
+
"top_p",
|
141
|
+
"presence_penalty",
|
142
|
+
"frequency_penalty",
|
143
|
+
"output",
|
144
|
+
]
|
145
|
+
}
|
146
|
+
self.nodes[func_name] = {
|
147
|
+
"type": "llm",
|
148
|
+
"llm_config": llm_config,
|
149
|
+
"inputs": inputs,
|
150
|
+
"output": llm_config.get("output"),
|
151
|
+
}
|
152
|
+
elif decorator_name == "validate_node":
|
153
|
+
output = kwargs.get("output")
|
154
|
+
self.nodes[func_name] = {
|
155
|
+
"type": "function",
|
156
|
+
"function": func_name,
|
157
|
+
"inputs": inputs,
|
158
|
+
"output": output,
|
159
|
+
}
|
160
|
+
elif decorator_name == "structured_llm_node":
|
161
|
+
llm_config = {
|
162
|
+
key: value
|
163
|
+
for key, value in kwargs.items()
|
164
|
+
if key
|
165
|
+
in [
|
166
|
+
"model",
|
167
|
+
"system_prompt",
|
168
|
+
"prompt_template",
|
169
|
+
"temperature",
|
170
|
+
"max_tokens",
|
171
|
+
"top_p",
|
172
|
+
"presence_penalty",
|
173
|
+
"frequency_penalty",
|
174
|
+
"output",
|
175
|
+
"response_model",
|
176
|
+
]
|
177
|
+
}
|
178
|
+
self.nodes[func_name] = {
|
179
|
+
"type": "structured_llm",
|
180
|
+
"llm_config": llm_config,
|
181
|
+
"inputs": inputs,
|
182
|
+
"output": llm_config.get("output"),
|
183
|
+
}
|
184
|
+
else:
|
185
|
+
logger.warning(f"Unsupported decorator 'Nodes.{decorator_name}' in function '{func_name}'")
|
186
|
+
|
187
|
+
# Store the function code as embedded
|
188
|
+
func_code = ast.unparse(node)
|
189
|
+
self.functions[func_name] = {
|
190
|
+
"type": "embedded",
|
191
|
+
"code": func_code,
|
192
|
+
}
|
193
|
+
else:
|
194
|
+
logger.debug(f"No recognized 'Nodes' decorator found for '{node.name}'")
|
195
|
+
|
196
|
+
self.generic_visit(node)
|
197
|
+
|
198
|
+
def visit_AsyncFunctionDef(self, node):
|
199
|
+
"""Extract node information from asynchronous function definitions."""
|
200
|
+
logger.debug(f"Visiting asynchronous function definition: '{node.name}'")
|
201
|
+
for decorator in node.decorator_list:
|
202
|
+
decorator_name = None
|
203
|
+
kwargs = {}
|
204
|
+
logger.debug(f"Examining decorator for '{node.name}': {ast.dump(decorator)}")
|
205
|
+
|
206
|
+
# Handle simple decorators (e.g., @Nodes.define)
|
207
|
+
if (
|
208
|
+
isinstance(decorator, ast.Attribute)
|
209
|
+
and isinstance(decorator.value, ast.Name)
|
210
|
+
and decorator.value.id == "Nodes"
|
211
|
+
):
|
212
|
+
decorator_name = decorator.attr
|
213
|
+
logger.debug(f"Found simple decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
214
|
+
|
215
|
+
# Handle decorators with arguments (e.g., @Nodes.llm_node(...))
|
216
|
+
elif (
|
217
|
+
isinstance(decorator, ast.Call)
|
218
|
+
and isinstance(decorator.func, ast.Attribute)
|
219
|
+
and isinstance(decorator.func.value, ast.Name)
|
220
|
+
and decorator.func.value.id == "Nodes"
|
221
|
+
):
|
222
|
+
decorator_name = decorator.func.attr
|
223
|
+
logger.debug(f"Found call decorator 'Nodes.{decorator_name}' for '{node.name}'")
|
224
|
+
for kw in decorator.keywords:
|
225
|
+
if kw.arg is None and isinstance(kw.value, ast.Name): # Handle **kwargs
|
226
|
+
var_name = kw.value.id
|
227
|
+
if var_name in self.global_vars:
|
228
|
+
kwargs.update(self.global_vars[var_name])
|
229
|
+
logger.debug(f"Unpacked '{var_name}' into kwargs: {self.global_vars[var_name]}")
|
230
|
+
elif isinstance(kw.value, ast.Constant):
|
231
|
+
kwargs[kw.arg] = kw.value.value
|
232
|
+
elif kw.arg == "response_model" and isinstance(kw.value, ast.Name):
|
233
|
+
kwargs[kw.arg] = ast.unparse(kw.value)
|
234
|
+
|
235
|
+
# Process recognized decorators
|
236
|
+
if decorator_name:
|
237
|
+
func_name = node.name
|
238
|
+
inputs = [arg.arg for arg in node.args.args]
|
239
|
+
|
240
|
+
if decorator_name == "define":
|
241
|
+
output = kwargs.get("output")
|
242
|
+
self.nodes[func_name] = {
|
243
|
+
"type": "function",
|
244
|
+
"function": func_name,
|
245
|
+
"inputs": inputs,
|
246
|
+
"output": output,
|
247
|
+
}
|
248
|
+
elif decorator_name == "llm_node":
|
249
|
+
llm_config = {
|
250
|
+
key: value
|
251
|
+
for key, value in kwargs.items()
|
252
|
+
if key
|
253
|
+
in [
|
254
|
+
"model",
|
255
|
+
"system_prompt",
|
256
|
+
"prompt_template",
|
257
|
+
"temperature",
|
258
|
+
"max_tokens",
|
259
|
+
"top_p",
|
260
|
+
"presence_penalty",
|
261
|
+
"frequency_penalty",
|
262
|
+
"output",
|
263
|
+
]
|
264
|
+
}
|
265
|
+
self.nodes[func_name] = {
|
266
|
+
"type": "llm",
|
267
|
+
"llm_config": llm_config,
|
268
|
+
"inputs": inputs,
|
269
|
+
"output": llm_config.get("output"),
|
270
|
+
}
|
271
|
+
elif decorator_name == "validate_node":
|
272
|
+
output = kwargs.get("output")
|
273
|
+
self.nodes[func_name] = {
|
274
|
+
"type": "function",
|
275
|
+
"function": func_name,
|
276
|
+
"inputs": inputs,
|
277
|
+
"output": output,
|
278
|
+
}
|
279
|
+
elif decorator_name == "structured_llm_node":
|
280
|
+
llm_config = {
|
281
|
+
key: value
|
282
|
+
for key, value in kwargs.items()
|
283
|
+
if key
|
284
|
+
in [
|
285
|
+
"model",
|
286
|
+
"system_prompt",
|
287
|
+
"prompt_template",
|
288
|
+
"temperature",
|
289
|
+
"max_tokens",
|
290
|
+
"top_p",
|
291
|
+
"presence_penalty",
|
292
|
+
"frequency_penalty",
|
293
|
+
"output",
|
294
|
+
"response_model",
|
295
|
+
]
|
296
|
+
}
|
297
|
+
self.nodes[func_name] = {
|
298
|
+
"type": "structured_llm",
|
299
|
+
"llm_config": llm_config,
|
300
|
+
"inputs": inputs,
|
301
|
+
"output": llm_config.get("output"),
|
302
|
+
}
|
303
|
+
else:
|
304
|
+
logger.warning(f"Unsupported decorator 'Nodes.{decorator_name}' in function '{func_name}'")
|
305
|
+
|
306
|
+
# Store the function code as embedded
|
307
|
+
func_code = ast.unparse(node)
|
308
|
+
self.functions[func_name] = {
|
309
|
+
"type": "embedded",
|
310
|
+
"code": func_code,
|
311
|
+
}
|
312
|
+
else:
|
313
|
+
logger.debug(f"No recognized 'Nodes' decorator found for '{node.name}'")
|
314
|
+
|
315
|
+
self.generic_visit(node)
|
316
|
+
|
317
|
+
def process_workflow_expr(self, expr, var_name):
|
318
|
+
"""
|
319
|
+
Recursively process Workflow method chaining to build transitions, structure, and observers.
|
320
|
+
|
321
|
+
Args:
|
322
|
+
expr: The AST expression to process.
|
323
|
+
var_name: The variable name to which the workflow is assigned (for logging/context).
|
324
|
+
|
325
|
+
Returns:
|
326
|
+
str or None: The current node name or None if no specific node is returned.
|
327
|
+
"""
|
328
|
+
if not isinstance(expr, ast.Call):
|
329
|
+
logger.debug(f"Skipping non-Call node in workflow processing for '{var_name}'")
|
330
|
+
return None
|
331
|
+
|
332
|
+
func = expr.func
|
333
|
+
logger.debug(f"Processing Call node with func type: {type(func).__name__} for '{var_name}'")
|
334
|
+
|
335
|
+
if isinstance(func, ast.Name) and func.id == "Workflow":
|
336
|
+
self.start_node = expr.args[0].value if expr.args else None
|
337
|
+
logger.debug(f"Workflow start node set to '{self.start_node}' for variable '{var_name}'")
|
338
|
+
return self.start_node
|
339
|
+
elif isinstance(func, ast.Attribute):
|
340
|
+
method_name = func.attr
|
341
|
+
obj = func.value
|
342
|
+
previous_node = self.process_workflow_expr(obj, var_name)
|
343
|
+
|
344
|
+
if method_name == "then":
|
345
|
+
next_node = expr.args[0].value if expr.args else None
|
346
|
+
condition = None
|
347
|
+
for keyword in expr.keywords:
|
348
|
+
if keyword.arg == "condition":
|
349
|
+
if isinstance(keyword.value, ast.Lambda):
|
350
|
+
condition = ast.unparse(keyword.value)
|
351
|
+
else:
|
352
|
+
condition = ast.unparse(keyword.value)
|
353
|
+
logger.warning(
|
354
|
+
f"Non-lambda condition in 'then' for '{next_node}' may not be fully supported"
|
355
|
+
)
|
356
|
+
if previous_node and next_node:
|
357
|
+
self.transitions.append((previous_node, next_node, condition))
|
358
|
+
logger.debug(f"Added transition: {previous_node} -> {next_node} (condition: {condition})")
|
359
|
+
return next_node
|
360
|
+
|
361
|
+
elif method_name == "sequence":
|
362
|
+
nodes = [arg.value for arg in expr.args]
|
363
|
+
if previous_node:
|
364
|
+
self.transitions.append((previous_node, nodes[0], None))
|
365
|
+
for i in range(len(nodes) - 1):
|
366
|
+
self.transitions.append((nodes[i], nodes[i + 1], None))
|
367
|
+
logger.debug(f"Added sequence transition: {nodes[i]} -> {nodes[i + 1]}")
|
368
|
+
return nodes[-1] if nodes else previous_node
|
369
|
+
|
370
|
+
elif method_name == "parallel":
|
371
|
+
to_nodes = [arg.value for arg in expr.args]
|
372
|
+
if previous_node:
|
373
|
+
for to_node in to_nodes:
|
374
|
+
self.transitions.append((previous_node, to_node, None))
|
375
|
+
logger.debug(f"Added parallel transition: {previous_node} -> {to_node}")
|
376
|
+
return None # Parallel transitions reset the current node
|
377
|
+
|
378
|
+
elif method_name == "node":
|
379
|
+
node_name = expr.args[0].value if expr.args else None
|
380
|
+
if node_name and previous_node:
|
381
|
+
self.transitions.append((previous_node, node_name, None))
|
382
|
+
logger.debug(f"Added node transition: {previous_node} -> {node_name}")
|
383
|
+
return node_name
|
384
|
+
|
385
|
+
elif method_name == "add_sub_workflow":
|
386
|
+
sub_wf_name = expr.args[0].value
|
387
|
+
sub_wf_obj = expr.args[1]
|
388
|
+
inputs = {}
|
389
|
+
if len(expr.args) > 2 and isinstance(expr.args[2], ast.Dict):
|
390
|
+
inputs = {k.value: v.value for k, v in zip(expr.args[2].keys, expr.args[2].values)}
|
391
|
+
output = expr.args[3].value if len(expr.args) > 3 else None
|
392
|
+
sub_extractor = WorkflowExtractor()
|
393
|
+
sub_extractor.process_workflow_expr(sub_wf_obj, f"{var_name}_{sub_wf_name}")
|
394
|
+
self.nodes[sub_wf_name] = {
|
395
|
+
"type": "sub_workflow",
|
396
|
+
"sub_workflow": WorkflowStructure(
|
397
|
+
start=sub_extractor.start_node,
|
398
|
+
transitions=[
|
399
|
+
TransitionDefinition(from_=t[0], to=t[1], condition=t[2]) for t in sub_extractor.transitions
|
400
|
+
],
|
401
|
+
),
|
402
|
+
"inputs": list(inputs.keys()),
|
403
|
+
"output": output,
|
404
|
+
}
|
405
|
+
# Propagate observers from sub-workflow
|
406
|
+
self.observers.extend(sub_extractor.observers)
|
407
|
+
logger.debug(f"Added sub-workflow node '{sub_wf_name}' with start '{sub_extractor.start_node}'")
|
408
|
+
if previous_node:
|
409
|
+
self.transitions.append((previous_node, sub_wf_name, None))
|
410
|
+
return sub_wf_name
|
411
|
+
|
412
|
+
elif method_name == "add_observer":
|
413
|
+
if expr.args and isinstance(expr.args[0], (ast.Name, ast.Constant)):
|
414
|
+
observer_name = expr.args[0].id if isinstance(expr.args[0], ast.Name) else expr.args[0].value
|
415
|
+
if observer_name not in self.observers:
|
416
|
+
self.observers.append(observer_name)
|
417
|
+
logger.debug(f"Added observer '{observer_name}' to workflow '{var_name}'")
|
418
|
+
else:
|
419
|
+
logger.warning(f"Unsupported observer argument in 'add_observer' for '{var_name}'")
|
420
|
+
return previous_node
|
421
|
+
|
422
|
+
else:
|
423
|
+
logger.warning(f"Unsupported Workflow method '{method_name}' in variable '{var_name}'")
|
424
|
+
return None
|
425
|
+
|
426
|
+
|
427
|
+
def extract_workflow_from_file(file_path):
|
428
|
+
"""
|
429
|
+
Extract a WorkflowDefinition and global variables from a Python file containing a workflow.
|
430
|
+
|
431
|
+
Args:
|
432
|
+
file_path (str): Path to the Python file to parse.
|
433
|
+
|
434
|
+
Returns:
|
435
|
+
tuple: (WorkflowDefinition, Dict[str, Any]) - The workflow definition and captured global variables.
|
436
|
+
"""
|
437
|
+
# Read and parse the file
|
438
|
+
with open(file_path) as f:
|
439
|
+
source = f.read()
|
440
|
+
tree = ast.parse(source)
|
441
|
+
|
442
|
+
# Extract workflow components
|
443
|
+
extractor = WorkflowExtractor()
|
444
|
+
extractor.visit(tree)
|
445
|
+
|
446
|
+
# Construct FunctionDefinition objects
|
447
|
+
functions = {name: FunctionDefinition(**func) for name, func in extractor.functions.items()}
|
448
|
+
|
449
|
+
# Construct NodeDefinition objects
|
450
|
+
nodes = {}
|
451
|
+
from quantalogic.flow.flow_manager_schema import LLMConfig # Import LLMConfig explicitly
|
452
|
+
|
453
|
+
for name, node_info in extractor.nodes.items():
|
454
|
+
if node_info["type"] == "function":
|
455
|
+
nodes[name] = NodeDefinition(
|
456
|
+
function=node_info["function"],
|
457
|
+
output=node_info["output"],
|
458
|
+
retries=3, # Default values
|
459
|
+
delay=1.0,
|
460
|
+
timeout=None,
|
461
|
+
parallel=False,
|
462
|
+
)
|
463
|
+
elif node_info["type"] == "llm":
|
464
|
+
# Convert llm_config dictionary to LLMConfig object to ensure model is preserved
|
465
|
+
llm_config = LLMConfig(**node_info["llm_config"])
|
466
|
+
nodes[name] = NodeDefinition(
|
467
|
+
llm_config=llm_config,
|
468
|
+
output=node_info["output"],
|
469
|
+
retries=3,
|
470
|
+
delay=1.0,
|
471
|
+
timeout=None,
|
472
|
+
parallel=False,
|
473
|
+
)
|
474
|
+
elif node_info["type"] == "structured_llm":
|
475
|
+
# Convert llm_config dictionary to LLMConfig object for structured LLM
|
476
|
+
llm_config = LLMConfig(**node_info["llm_config"])
|
477
|
+
nodes[name] = NodeDefinition(
|
478
|
+
llm_config=llm_config,
|
479
|
+
output=node_info["output"],
|
480
|
+
retries=3,
|
481
|
+
delay=1.0,
|
482
|
+
timeout=None,
|
483
|
+
parallel=False,
|
484
|
+
)
|
485
|
+
elif node_info["type"] == "sub_workflow":
|
486
|
+
nodes[name] = NodeDefinition(
|
487
|
+
sub_workflow=node_info["sub_workflow"],
|
488
|
+
output=node_info["output"],
|
489
|
+
retries=3,
|
490
|
+
delay=1.0,
|
491
|
+
timeout=None,
|
492
|
+
parallel=False,
|
493
|
+
)
|
494
|
+
|
495
|
+
# Construct TransitionDefinition objects
|
496
|
+
transitions = [
|
497
|
+
TransitionDefinition(**{"from": from_node, "to": to_node, "condition": cond})
|
498
|
+
for from_node, to_node, cond in extractor.transitions
|
499
|
+
]
|
500
|
+
|
501
|
+
# Build WorkflowStructure
|
502
|
+
workflow_structure = WorkflowStructure(start=extractor.start_node, transitions=transitions)
|
503
|
+
|
504
|
+
# Assemble WorkflowDefinition with observers
|
505
|
+
workflow_def = WorkflowDefinition(
|
506
|
+
functions=functions, nodes=nodes, workflow=workflow_structure, observers=extractor.observers
|
507
|
+
)
|
508
|
+
|
509
|
+
return workflow_def, extractor.global_vars
|
510
|
+
|
511
|
+
|
512
|
+
def generate_executable_script(workflow_def: WorkflowDefinition, global_vars: dict, output_file: str) -> None:
|
513
|
+
"""
|
514
|
+
Generate an executable Python script from a WorkflowDefinition with global variables.
|
515
|
+
|
516
|
+
Args:
|
517
|
+
workflow_def: The WorkflowDefinition object containing the workflow details.
|
518
|
+
global_vars: Dictionary of global variables extracted from the source file.
|
519
|
+
output_file: The path where the executable script will be written.
|
520
|
+
|
521
|
+
The generated script includes:
|
522
|
+
- A shebang using `uv run` for environment management.
|
523
|
+
- Metadata specifying the required Python version and dependencies.
|
524
|
+
- Global variables from the original script.
|
525
|
+
- Embedded functions included directly in the script.
|
526
|
+
- Workflow instantiation using direct chaining syntax.
|
527
|
+
- A default initial_context matching the example.
|
528
|
+
"""
|
529
|
+
with open(output_file, "w") as f:
|
530
|
+
# Write the shebang and metadata
|
531
|
+
f.write("#!/usr/bin/env -S uv run\n")
|
532
|
+
f.write("# /// script\n")
|
533
|
+
f.write('# requires-python = ">=3.12"\n')
|
534
|
+
f.write("# dependencies = [\n")
|
535
|
+
f.write('# "loguru",\n')
|
536
|
+
f.write('# "litellm",\n')
|
537
|
+
f.write('# "pydantic>=2.0",\n')
|
538
|
+
f.write('# "anyio",\n')
|
539
|
+
f.write('# "quantalogic>=0.35",\n')
|
540
|
+
f.write('# "jinja2",\n')
|
541
|
+
f.write('# "instructor[litellm]",\n') # Kept for potential structured LLM support
|
542
|
+
f.write("# ]\n")
|
543
|
+
f.write("# ///\n\n")
|
544
|
+
|
545
|
+
# Write necessary imports
|
546
|
+
f.write("import anyio\n")
|
547
|
+
f.write("from typing import List\n")
|
548
|
+
f.write("from loguru import logger\n")
|
549
|
+
f.write("from quantalogic.flow import Nodes, Workflow\n\n")
|
550
|
+
|
551
|
+
# Write global variables
|
552
|
+
for var_name, value in global_vars.items():
|
553
|
+
f.write(f"{var_name} = {repr(value)}\n")
|
554
|
+
f.write("\n")
|
555
|
+
|
556
|
+
# Embed functions from workflow_def
|
557
|
+
for func_name, func_def in workflow_def.functions.items():
|
558
|
+
if func_def.type == "embedded":
|
559
|
+
f.write(func_def.code + "\n\n")
|
560
|
+
|
561
|
+
# Define workflow using chaining syntax
|
562
|
+
f.write("# Define the workflow using simplified syntax with automatic node registration\n")
|
563
|
+
f.write("workflow = (\n")
|
564
|
+
f.write(f' Workflow("{workflow_def.workflow.start}")\n')
|
565
|
+
for trans in workflow_def.workflow.transitions:
|
566
|
+
from_node = trans.from_
|
567
|
+
to_node = trans.to
|
568
|
+
condition = trans.condition or "None"
|
569
|
+
if condition != "None":
|
570
|
+
# Ensure condition is formatted as a lambda if not already
|
571
|
+
if not condition.startswith("lambda ctx:"):
|
572
|
+
condition = f"lambda ctx: {condition}"
|
573
|
+
f.write(f' .then("{to_node}", condition={condition})\n')
|
574
|
+
for observer in workflow_def.observers:
|
575
|
+
f.write(f" .add_observer({observer})\n")
|
576
|
+
f.write(")\n\n")
|
577
|
+
|
578
|
+
# Main asynchronous function to run the workflow
|
579
|
+
f.write("async def main():\n")
|
580
|
+
f.write(' """Main function to run the story generation workflow."""\n')
|
581
|
+
f.write(" initial_context = {\n")
|
582
|
+
f.write(' "genre": "science fiction",\n')
|
583
|
+
f.write(' "num_chapters": 3,\n')
|
584
|
+
f.write(' "chapters": [],\n')
|
585
|
+
f.write(' "completed_chapters": 0,\n')
|
586
|
+
f.write(' "style": "descriptive"\n')
|
587
|
+
f.write(" } # Customize initial_context as needed\n")
|
588
|
+
f.write(" engine = workflow.build()\n")
|
589
|
+
f.write(" result = await engine.run(initial_context)\n")
|
590
|
+
f.write(' logger.info(f"Workflow result: {result}")\n\n')
|
591
|
+
|
592
|
+
# Entry point to execute the main function
|
593
|
+
f.write('if __name__ == "__main__":\n')
|
594
|
+
f.write(" anyio.run(main)\n")
|
595
|
+
|
596
|
+
# Set executable permissions (rwxr-xr-x)
|
597
|
+
os.chmod(output_file, 0o755)
|
598
|
+
|
599
|
+
|
600
|
+
def print_workflow_definition(workflow_def):
|
601
|
+
"""
|
602
|
+
Utility function to print a WorkflowDefinition in a human-readable format.
|
603
|
+
|
604
|
+
Args:
|
605
|
+
workflow_def (WorkflowDefinition): The workflow definition to print.
|
606
|
+
"""
|
607
|
+
print("### Workflow Definition ###")
|
608
|
+
print("\n#### Functions:")
|
609
|
+
for name, func in workflow_def.functions.items():
|
610
|
+
print(f"- {name}:")
|
611
|
+
print(f" Type: {func.type}")
|
612
|
+
print(f" Code (first line): {func.code.splitlines()[0][:50]}..." if func.code else " Code: None")
|
613
|
+
|
614
|
+
print("\n#### Nodes:")
|
615
|
+
for name, node in workflow_def.nodes.items():
|
616
|
+
print(f"- {name}:")
|
617
|
+
if node.function:
|
618
|
+
print(" Type: Function")
|
619
|
+
print(f" Function: {node.function}")
|
620
|
+
elif node.llm_config:
|
621
|
+
if node.llm_config.response_model:
|
622
|
+
print(" Type: Structured LLM")
|
623
|
+
print(f" Response Model: {node.llm_config.response_model}")
|
624
|
+
else:
|
625
|
+
print(" Type: LLM")
|
626
|
+
print(f" Model: {node.llm_config.model}")
|
627
|
+
print(f" Prompt Template: {node.llm_config.prompt_template}")
|
628
|
+
elif node.sub_workflow:
|
629
|
+
print(" Type: Sub-Workflow")
|
630
|
+
print(f" Start Node: {node.sub_workflow.start}")
|
631
|
+
print(f" Output: {node.output or 'None'}")
|
632
|
+
|
633
|
+
print("\n#### Workflow Structure:")
|
634
|
+
print(f"Start Node: {workflow_def.workflow.start}")
|
635
|
+
print("Transitions:")
|
636
|
+
for trans in workflow_def.workflow.transitions:
|
637
|
+
condition_str = f" [Condition: {trans.condition}]" if trans.condition else ""
|
638
|
+
if isinstance(trans.to, list):
|
639
|
+
for to_node in trans.to:
|
640
|
+
print(f"- {trans.from_} -> {to_node}{condition_str}")
|
641
|
+
else:
|
642
|
+
print(f"- {trans.from_} -> {trans.to}{condition_str}")
|
643
|
+
|
644
|
+
print("\n#### Observers:")
|
645
|
+
for observer in workflow_def.observers:
|
646
|
+
print(f"- {observer}")
|
647
|
+
|
648
|
+
|
649
|
+
def main():
|
650
|
+
"""Demonstrate parsing the story_generator_agent.py workflow and saving it to YAML."""
|
651
|
+
from quantalogic.flow.flow_generator import generate_executable_script # Ensure correct import
|
652
|
+
|
653
|
+
output_file_python = "./story_generator.py"
|
654
|
+
file_path = "examples/qflow/story_generator_agent.py"
|
655
|
+
yaml_output_path = "story_generator_workflow.yaml" # Output YAML file path
|
656
|
+
try:
|
657
|
+
workflow_def, global_vars = extract_workflow_from_file(file_path)
|
658
|
+
logger.info(f"Successfully extracted workflow from '{file_path}'")
|
659
|
+
print_workflow_definition(workflow_def)
|
660
|
+
generate_executable_script(workflow_def, global_vars, output_file_python)
|
661
|
+
# Save the workflow to a YAML file
|
662
|
+
manager = WorkflowManager(workflow_def)
|
663
|
+
manager.save_to_yaml(yaml_output_path)
|
664
|
+
logger.info(f"Workflow saved to YAML file '{yaml_output_path}'")
|
665
|
+
except FileNotFoundError:
|
666
|
+
logger.error(f"File '{file_path}' not found. Please ensure it exists in the specified directory.")
|
667
|
+
except Exception as e:
|
668
|
+
logger.error(f"Failed to parse or save workflow from '{file_path}': {e}")
|
669
|
+
|
670
|
+
|
671
|
+
if __name__ == "__main__":
|
672
|
+
main()
|