quantalogic 0.80__py3-none-any.whl → 0.93__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. quantalogic/flow/__init__.py +16 -34
  2. quantalogic/main.py +11 -6
  3. quantalogic/tools/tool.py +8 -922
  4. quantalogic-0.93.dist-info/METADATA +475 -0
  5. {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/RECORD +8 -54
  6. quantalogic/codeact/TODO.md +0 -14
  7. quantalogic/codeact/__init__.py +0 -0
  8. quantalogic/codeact/agent.py +0 -478
  9. quantalogic/codeact/cli.py +0 -50
  10. quantalogic/codeact/cli_commands/__init__.py +0 -0
  11. quantalogic/codeact/cli_commands/create_toolbox.py +0 -45
  12. quantalogic/codeact/cli_commands/install_toolbox.py +0 -20
  13. quantalogic/codeact/cli_commands/list_executor.py +0 -15
  14. quantalogic/codeact/cli_commands/list_reasoners.py +0 -15
  15. quantalogic/codeact/cli_commands/list_toolboxes.py +0 -47
  16. quantalogic/codeact/cli_commands/task.py +0 -215
  17. quantalogic/codeact/cli_commands/tool_info.py +0 -24
  18. quantalogic/codeact/cli_commands/uninstall_toolbox.py +0 -43
  19. quantalogic/codeact/config.yaml +0 -21
  20. quantalogic/codeact/constants.py +0 -9
  21. quantalogic/codeact/events.py +0 -85
  22. quantalogic/codeact/examples/README.md +0 -342
  23. quantalogic/codeact/examples/agent_sample.yaml +0 -29
  24. quantalogic/codeact/executor.py +0 -186
  25. quantalogic/codeact/history_manager.py +0 -94
  26. quantalogic/codeact/llm_util.py +0 -57
  27. quantalogic/codeact/plugin_manager.py +0 -92
  28. quantalogic/codeact/prompts/error_format.j2 +0 -11
  29. quantalogic/codeact/prompts/generate_action.j2 +0 -77
  30. quantalogic/codeact/prompts/generate_program.j2 +0 -52
  31. quantalogic/codeact/prompts/response_format.j2 +0 -11
  32. quantalogic/codeact/react_agent.py +0 -318
  33. quantalogic/codeact/reasoner.py +0 -185
  34. quantalogic/codeact/templates/toolbox/README.md.j2 +0 -10
  35. quantalogic/codeact/templates/toolbox/pyproject.toml.j2 +0 -16
  36. quantalogic/codeact/templates/toolbox/tools.py.j2 +0 -6
  37. quantalogic/codeact/templates.py +0 -7
  38. quantalogic/codeact/tools_manager.py +0 -258
  39. quantalogic/codeact/utils.py +0 -62
  40. quantalogic/codeact/xml_utils.py +0 -126
  41. quantalogic/flow/flow.py +0 -1070
  42. quantalogic/flow/flow_extractor.py +0 -783
  43. quantalogic/flow/flow_generator.py +0 -322
  44. quantalogic/flow/flow_manager.py +0 -676
  45. quantalogic/flow/flow_manager_schema.py +0 -287
  46. quantalogic/flow/flow_mermaid.py +0 -365
  47. quantalogic/flow/flow_validator.py +0 -479
  48. quantalogic/flow/flow_yaml.linkedin.md +0 -31
  49. quantalogic/flow/flow_yaml.md +0 -767
  50. quantalogic/flow/templates/prompt_check_inventory.j2 +0 -1
  51. quantalogic/flow/templates/system_check_inventory.j2 +0 -1
  52. quantalogic-0.80.dist-info/METADATA +0 -900
  53. {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/LICENSE +0 -0
  54. {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/WHEEL +0 -0
  55. {quantalogic-0.80.dist-info → quantalogic-0.93.dist-info}/entry_points.txt +0 -0
@@ -1,479 +0,0 @@
1
- import ast
2
- import re
3
- from collections import defaultdict
4
- from typing import Dict, List, Optional, Set, Union
5
-
6
- from pydantic import BaseModel
7
-
8
- from quantalogic.flow.flow_manager import WorkflowManager
9
- from quantalogic.flow.flow_manager_schema import (
10
- BranchCondition,
11
- NodeDefinition,
12
- TransitionDefinition,
13
- WorkflowDefinition,
14
- WorkflowStructure,
15
- )
16
-
17
-
18
- class NodeError(BaseModel):
19
- """Represents an error associated with a specific node or workflow component."""
20
- node_name: Optional[str] = None # None if the error isn’t tied to a specific node
21
- description: str
22
-
23
-
24
- def get_function_params(code: str, func_name: str) -> List[str]:
25
- """Extract parameter names from an embedded function's code."""
26
- try:
27
- tree = ast.parse(code)
28
- for node in ast.walk(tree):
29
- if isinstance(node, ast.FunctionDef) and node.name == func_name:
30
- return [arg.arg for arg in node.args.args]
31
- raise ValueError(f"Function '{func_name}' not found in code")
32
- except SyntaxError as e:
33
- raise ValueError(f"Invalid syntax in code: {e}")
34
-
35
-
36
- def validate_workflow_definition(workflow_def: WorkflowDefinition) -> List[NodeError]:
37
- """Validate a workflow definition and return a list of NodeError objects."""
38
- issues: List[NodeError] = []
39
- output_names: Set[str] = set()
40
-
41
- # Validate function definitions
42
- for name, func_def in workflow_def.functions.items():
43
- if func_def.type == "embedded" and not func_def.code:
44
- issues.append(NodeError(node_name=None, description=f"Embedded function '{name}' is missing 'code'"))
45
- elif func_def.type == "external" and (not func_def.module or not func_def.function):
46
- issues.append(NodeError(node_name=None, description=f"External function '{name}' is missing 'module' or 'function'"))
47
-
48
- # Validate nodes
49
- for name, node_def in workflow_def.nodes.items():
50
- if node_def.function and node_def.function not in workflow_def.functions:
51
- issues.append(NodeError(node_name=name, description=f"References undefined function '{node_def.function}'"))
52
-
53
- if node_def.output:
54
- if not node_def.output.isidentifier():
55
- issues.append(NodeError(node_name=name, description=f"Has invalid output name '{node_def.output}'"))
56
- elif node_def.output in output_names:
57
- issues.append(NodeError(node_name=name, description=f"Has duplicate output name '{node_def.output}'"))
58
- output_names.add(node_def.output)
59
-
60
- if node_def.sub_workflow:
61
- sub_issues = validate_workflow_structure(node_def.sub_workflow, workflow_def.nodes)
62
- issues.extend(
63
- NodeError(node_name=f"{name}/{issue.node_name}" if issue.node_name else name, description=issue.description)
64
- for issue in sub_issues
65
- )
66
-
67
- if node_def.llm_config:
68
- llm = node_def.llm_config
69
- if not llm.model:
70
- issues.append(NodeError(node_name=name, description="Missing 'model' in llm_config"))
71
- if not llm.prompt_template and not llm.prompt_file:
72
- issues.append(NodeError(node_name=name, description="Missing 'prompt_template' or 'prompt_file' in llm_config"))
73
- if llm.temperature < 0 or llm.temperature > 1:
74
- issues.append(NodeError(node_name=name, description=f"Has invalid temperature: {llm.temperature}"))
75
-
76
- if node_def.template_config:
77
- template = node_def.template_config
78
- if not template.template and not template.template_file:
79
- issues.append(NodeError(node_name=name, description="Missing 'template' or 'template_file' in template_config"))
80
-
81
- # Validate main workflow structure
82
- issues.extend(validate_workflow_structure(workflow_def.workflow, workflow_def.nodes, is_main=True))
83
- issues.extend(check_circular_transitions(workflow_def))
84
-
85
- # Build the unified graph for main workflow and sub-workflows
86
- successors = defaultdict(list)
87
- predecessors = defaultdict(list)
88
- all_nodes = set(workflow_def.nodes.keys())
89
-
90
- # Add main workflow transitions
91
- for trans in workflow_def.workflow.transitions:
92
- from_node = trans.from_node
93
- to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else [tn if isinstance(tn, str) else tn.to_node for tn in trans.to_node]
94
- for to_node in to_nodes:
95
- successors[from_node].append(to_node)
96
- predecessors[to_node].append(from_node)
97
- all_nodes.add(to_node)
98
-
99
- # Add sub-workflow transitions with namespaced node names
100
- for parent_name, node_def in workflow_def.nodes.items():
101
- if node_def.sub_workflow:
102
- for trans in node_def.sub_workflow.transitions:
103
- from_node = f"{parent_name}/{trans.from_node}"
104
- to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else [tn if isinstance(tn, str) else tn.to_node for tn in trans.to_node]
105
- namespaced_to_nodes = [f"{parent_name}/{to_node}" for to_node in to_nodes]
106
- all_nodes.add(from_node)
107
- all_nodes.update(namespaced_to_nodes)
108
- successors[from_node].extend(namespaced_to_nodes)
109
- for to_node in namespaced_to_nodes:
110
- predecessors[to_node].append(from_node)
111
-
112
- # Define function to get ancestors, handling cycles with a visited set
113
- def get_ancestors(node: str, visited: Set[str] = None) -> Set[str]:
114
- if visited is None:
115
- visited = set()
116
- if node in visited or node not in all_nodes:
117
- return set()
118
- visited.add(node)
119
- ancestors = set(predecessors[node])
120
- for pred in predecessors[node]:
121
- ancestors.update(get_ancestors(pred, visited.copy()))
122
- return ancestors
123
-
124
- # Create output-to-node mapping, including sub-workflow nodes
125
- output_to_node = {}
126
- for node_name, node_def in workflow_def.nodes.items():
127
- if node_def.output:
128
- output_to_node[node_def.output] = node_name
129
- if node_def.sub_workflow:
130
- for sub_node_name, sub_node_def in workflow_def.nodes.items():
131
- if sub_node_def.output:
132
- output_to_node[sub_node_def.output] = f"{node_name}/{sub_node_name}"
133
-
134
- # Check each node's inputs against ancestors' outputs, including sub-workflows
135
- for node_name, node_def in workflow_def.nodes.items():
136
- required_inputs = set()
137
- full_node_name = node_name
138
-
139
- # Handle inputs_mapping
140
- if node_def.inputs_mapping:
141
- for input_name, mapping in node_def.inputs_mapping.items():
142
- if mapping.startswith("lambda ctx:"):
143
- try:
144
- # Basic syntax check for lambda
145
- compile(mapping, "<string>", "eval")
146
- except SyntaxError:
147
- issues.append(NodeError(
148
- node_name=node_name,
149
- description=f"Invalid lambda expression in inputs_mapping for '{input_name}': {mapping}"
150
- ))
151
- elif not mapping.isidentifier():
152
- issues.append(NodeError(
153
- node_name=node_name,
154
- description=f"Invalid context key in inputs_mapping for '{input_name}': {mapping}"
155
- ))
156
-
157
- if node_def.function:
158
- maybe_func_def = workflow_def.functions.get(node_def.function)
159
- if maybe_func_def is None:
160
- issues.append(NodeError(
161
- node_name=node_name,
162
- description=f"Function '{node_def.function}' not found in workflow functions"
163
- ))
164
- else:
165
- func_def = maybe_func_def
166
- if func_def.type == "embedded" and func_def.code:
167
- try:
168
- params = get_function_params(func_def.code, node_def.function)
169
- required_inputs = set(params)
170
- except ValueError as e:
171
- issues.append(NodeError(node_name=node_name, description=f"Failed to parse function '{node_def.function}': {e}"))
172
- elif node_def.llm_config:
173
- prompt_template = node_def.llm_config.prompt_template or ""
174
- input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt_template))
175
- cleaned_inputs = set()
176
- for var in input_vars:
177
- base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
178
- if base_var.isidentifier():
179
- cleaned_inputs.add(base_var)
180
- required_inputs = cleaned_inputs
181
- elif node_def.template_config:
182
- template = node_def.template_config.template or ""
183
- input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", template))
184
- cleaned_inputs = set()
185
- for var in input_vars:
186
- base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
187
- if base_var.isidentifier():
188
- cleaned_inputs.add(base_var)
189
- required_inputs = cleaned_inputs
190
- elif node_def.sub_workflow:
191
- for sub_node_name, sub_node_def in workflow_def.nodes.items():
192
- full_node_name = f"{node_name}/{sub_node_name}"
193
- if sub_node_def.function:
194
- maybe_func_def = workflow_def.functions.get(sub_node_def.function)
195
- if maybe_func_def is None:
196
- issues.append(NodeError(
197
- node_name=full_node_name,
198
- description=f"Function '{sub_node_def.function}' not found in workflow functions"
199
- ))
200
- else:
201
- func_def = maybe_func_def
202
- if func_def.type == "embedded" and func_def.code:
203
- try:
204
- params = get_function_params(func_def.code, sub_node_def.function)
205
- required_inputs = set(params)
206
- except ValueError as e:
207
- issues.append(NodeError(
208
- node_name=full_node_name,
209
- description=f"Failed to parse function '{sub_node_def.function}': {e}"
210
- ))
211
- elif sub_node_def.llm_config:
212
- prompt_template = sub_node_def.llm_config.prompt_template or ""
213
- input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", prompt_template))
214
- cleaned_inputs = set()
215
- for var in input_vars:
216
- base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
217
- if base_var.isidentifier():
218
- cleaned_inputs.add(base_var)
219
- required_inputs = cleaned_inputs
220
- elif sub_node_def.template_config:
221
- template = sub_node_def.template_config.template or ""
222
- input_vars = set(re.findall(r"{{\s*([^}]+?)\s*}}", template))
223
- cleaned_inputs = set()
224
- for var in input_vars:
225
- base_var = re.split(r"\s*[\+\-\*/]\s*", var.strip())[0].strip()
226
- if base_var.isidentifier():
227
- cleaned_inputs.add(base_var)
228
- required_inputs = cleaned_inputs
229
-
230
- if required_inputs:
231
- ancestors = get_ancestors(full_node_name)
232
- for input_name in required_inputs:
233
- # Check if input is mapped
234
- if node_def.inputs_mapping and input_name in node_def.inputs_mapping:
235
- mapping = node_def.inputs_mapping[input_name]
236
- if not mapping.startswith("lambda ctx:") and mapping in output_to_node:
237
- producer_node = output_to_node.get(mapping)
238
- if producer_node not in ancestors:
239
- issues.append(NodeError(
240
- node_name=full_node_name,
241
- description=f"inputs_mapping for '{input_name}' maps to '{mapping}', but it is not produced by an ancestor"
242
- ))
243
- continue
244
- producer_node = output_to_node.get(input_name)
245
- if producer_node is None or producer_node not in ancestors:
246
- issues.append(NodeError(
247
- node_name=full_node_name,
248
- description=f"Requires input '{input_name}', but it is not produced by any ancestor"
249
- ))
250
- continue
251
-
252
- if not required_inputs:
253
- continue
254
-
255
- ancestors = get_ancestors(full_node_name)
256
- for input_name in required_inputs:
257
- # Check if input is mapped
258
- if node_def.inputs_mapping and input_name in node_def.inputs_mapping:
259
- mapping = node_def.inputs_mapping[input_name]
260
- if not mapping.startswith("lambda ctx:") and mapping in output_to_node:
261
- producer_node = output_to_node.get(mapping)
262
- if producer_node not in ancestors:
263
- issues.append(NodeError(
264
- node_name=full_node_name,
265
- description=f"inputs_mapping for '{input_name}' maps to '{mapping}', but it is not produced by an ancestor"
266
- ))
267
- continue
268
- producer_node = output_to_node.get(input_name)
269
- if producer_node is None or producer_node not in ancestors:
270
- issues.append(NodeError(
271
- node_name=full_node_name,
272
- description=f"Requires input '{input_name}', but it is not produced by any ancestor"
273
- ))
274
-
275
- # Validate observers
276
- for observer in workflow_def.observers:
277
- if observer not in workflow_def.functions:
278
- issues.append(NodeError(node_name=None, description=f"Observer '{observer}' references undefined function"))
279
-
280
- # Validate convergence nodes
281
- for conv_node in workflow_def.workflow.convergence_nodes:
282
- if conv_node not in workflow_def.nodes:
283
- issues.append(NodeError(node_name=conv_node, description="Convergence node is not defined in nodes"))
284
- # Check if the convergence node has multiple incoming transitions
285
- incoming = [t for t in workflow_def.workflow.transitions if
286
- (isinstance(t.to_node, str) and t.to_node == conv_node) or
287
- (isinstance(t.to_node, list) and any(isinstance(tn, str) and tn == conv_node or
288
- isinstance(tn, BranchCondition) and tn.to_node == conv_node
289
- for tn in t.to_node))]
290
- if len(incoming) < 2:
291
- issues.append(NodeError(node_name=conv_node, description="Convergence node has fewer than 2 incoming transitions"))
292
-
293
- return issues
294
-
295
-
296
- def validate_workflow_structure(structure: WorkflowStructure, nodes: Dict[str, NodeDefinition],
297
- is_main: bool = False) -> List[NodeError]:
298
- """Validate a WorkflowStructure for consistency, including branch and converge support."""
299
- issues: List[NodeError] = []
300
-
301
- if is_main and not structure.start:
302
- issues.append(NodeError(node_name=None, description="Main workflow is missing a start node"))
303
- elif structure.start and structure.start not in nodes:
304
- issues.append(NodeError(node_name=structure.start, description="Start node is not defined in nodes"))
305
-
306
- for trans in structure.transitions:
307
- if trans.from_node not in nodes:
308
- issues.append(NodeError(node_name=trans.from_node, description="Transition from undefined node"))
309
-
310
- to_nodes: List[Union[str, BranchCondition]] = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
311
- for to_node in to_nodes:
312
- target_node = to_node if isinstance(to_node, str) else to_node.to_node
313
- if target_node not in nodes:
314
- issues.append(NodeError(node_name=target_node, description=f"Transition to undefined node from '{trans.from_node}'"))
315
- if isinstance(to_node, BranchCondition) and to_node.condition:
316
- try:
317
- compile(to_node.condition, "<string>", "eval")
318
- except SyntaxError:
319
- issues.append(NodeError(node_name=trans.from_node, description=f"Invalid branch condition syntax: {to_node.condition}"))
320
-
321
- if trans.condition and isinstance(trans.to_node, str):
322
- try:
323
- compile(trans.condition, "<string>", "eval")
324
- except SyntaxError:
325
- issues.append(NodeError(node_name=trans.from_node, description=f"Invalid condition syntax in transition: {trans.condition}"))
326
-
327
- return issues
328
-
329
-
330
- def check_circular_transitions(workflow_def: WorkflowDefinition) -> List[NodeError]:
331
- """Detect circular transitions in the workflow using DFS, allowing cycles with conditions."""
332
- issues: List[NodeError] = []
333
-
334
- def dfs(node: str, visited: Set[str], path: Set[str], transitions: List[TransitionDefinition], path_transitions: List[TransitionDefinition]) -> None:
335
- if node in path:
336
- cycle_nodes = list(path)[list(path).index(node):] + [node]
337
- cycle = " -> ".join(cycle_nodes)
338
- cycle_transitions = [
339
- t for t in path_transitions
340
- if t.from_node in cycle_nodes and
341
- ((isinstance(t.to_node, str) and t.to_node in cycle_nodes) or
342
- (isinstance(t.to_node, list) and any((isinstance(tn, str) and tn in cycle_nodes) or
343
- (isinstance(tn, BranchCondition) and tn.to_node in cycle_nodes)
344
- for tn in t.to_node)))
345
- ]
346
- # Check if all transitions in the cycle are unconditional
347
- if all((t.condition is None if isinstance(t.to_node, str) else
348
- all(isinstance(tn, str) or (isinstance(tn, BranchCondition) and tn.condition is None) for tn in t.to_node))
349
- for t in cycle_transitions):
350
- issues.append(NodeError(node_name=None, description=f"Unconditional circular transition detected: {cycle}"))
351
- return
352
- if node in visited or node not in workflow_def.nodes:
353
- return
354
-
355
- visited.add(node)
356
- path.add(node)
357
-
358
- for trans in transitions:
359
- if trans.from_node == node:
360
- path_transitions.append(trans)
361
- to_nodes = [trans.to_node] if isinstance(trans.to_node, str) else trans.to_node
362
- for to_node in to_nodes:
363
- next_node = to_node if isinstance(to_node, str) else to_node.to_node
364
- dfs(next_node, visited, path, transitions, path_transitions)
365
- path_transitions.pop()
366
-
367
- path.remove(node)
368
-
369
- if workflow_def.workflow.start:
370
- dfs(workflow_def.workflow.start, set(), set(), workflow_def.workflow.transitions, [])
371
-
372
- for node_name, node_def in workflow_def.nodes.items():
373
- if node_def.sub_workflow and node_def.sub_workflow.start:
374
- dfs(node_def.sub_workflow.start, set(), set(), node_def.sub_workflow.transitions, [])
375
-
376
- return issues
377
-
378
-
379
- def main():
380
- """Build a sample workflow with branch, converge, template node, and input mapping using WorkflowManager and validate it."""
381
- manager = WorkflowManager()
382
-
383
- # Define functions
384
- manager.add_function(
385
- name="say_hello",
386
- type_="embedded",
387
- code="def say_hello():\n return 'Hello, World!'"
388
- )
389
- manager.add_function(
390
- name="say_goodbye",
391
- type_="embedded",
392
- code="def say_goodbye():\n return 'Goodbye, World!'"
393
- )
394
- manager.add_function(
395
- name="check_condition",
396
- type_="embedded",
397
- code="def check_condition(text: str):\n return 'yes' if 'Hello' in text else 'no'"
398
- )
399
- manager.add_function(
400
- name="finalize",
401
- type_="embedded",
402
- code="def finalize(text: str):\n return 'Done'"
403
- )
404
-
405
- # Add nodes for main workflow
406
- manager.add_node(name="start", function="say_hello", output="text")
407
- manager.add_node(name="check", function="check_condition", output="result",
408
- inputs_mapping={"text": "text"}) # Mapping input to context key
409
- manager.add_node(name="goodbye", function="say_goodbye", output="farewell")
410
- manager.add_node(name="finalize", function="finalize", output="status",
411
- inputs_mapping={"text": "lambda ctx: ctx['farewell'] if ctx['result'] == 'no' else ctx['ai_result']"})
412
- manager.add_node(name="outro", function="non_existent") # Intentional: undefined function
413
-
414
- # Add LLM node with valid temperature
415
- manager.add_node(
416
- name="ai_node",
417
- llm_config={
418
- "model": "gpt-3.5-turbo",
419
- "prompt_template": "{{text}}",
420
- "temperature": 0.7
421
- },
422
- output="ai_result"
423
- )
424
-
425
- # Add template node
426
- manager.add_node(
427
- name="template_node",
428
- template_config={
429
- "template": "Response: {{text}} - {{result}}"
430
- },
431
- output="template_output"
432
- )
433
-
434
- # Add nodes and sub-workflow
435
- manager.add_node(name="nested_start", function="say_hello", output="nested_text")
436
- manager.add_node(name="nested_end", function="say_goodbye")
437
- sub_workflow = WorkflowStructure(start="nested_start")
438
- sub_workflow.transitions.append(TransitionDefinition(from_node="nested_start", to_node="nested_end"))
439
- manager.add_node(name="nested", sub_workflow=sub_workflow)
440
-
441
- # Configure main workflow with branch and converge
442
- manager.set_start_node("start")
443
- manager.add_transition(from_node="start", to_node="check")
444
- manager.add_transition(
445
- from_node="check",
446
- to_node=[
447
- BranchCondition(to_node="ai_node", condition="ctx['result'] == 'yes'"),
448
- BranchCondition(to_node="goodbye", condition="ctx['result'] == 'no'")
449
- ]
450
- )
451
- manager.add_transition(from_node="ai_node", to_node="finalize")
452
- manager.add_transition(from_node="goodbye", to_node="finalize")
453
- manager.add_transition(from_node="finalize", to_node="template_node")
454
- manager.add_transition(from_node="start", to_node="outro")
455
- manager.add_transition(from_node="outro", to_node="start") # Intentional: circular
456
- manager.add_convergence_node("finalize")
457
-
458
- # Add observer with error handling
459
- try:
460
- manager.add_observer("undefined_observer") # Intentional: undefined observer
461
- except ValueError:
462
- pass # Allow validation to proceed
463
-
464
- # Validate the constructed workflow
465
- workflow = manager.workflow
466
- issues = validate_workflow_definition(workflow)
467
-
468
- # Display results
469
- if issues:
470
- print("Issues found in workflow definition:")
471
- for issue in sorted(issues, key=lambda x: (x.node_name or '', x.description)):
472
- node_part = f"Node '{issue.node_name}'" if issue.node_name else "Workflow"
473
- print(f"- {node_part}: {issue.description}")
474
- else:
475
- print("No issues found in workflow definition.")
476
-
477
-
478
- if __name__ == "__main__":
479
- main()
@@ -1,31 +0,0 @@
1
- Feeling like you're duct-taping AI components instead of building real solutions? 😩 We've ALL been there.
2
-
3
- Introducing **QuantaLogic Flow** 🧩: your new (free & open-source!) workflow architect.
4
-
5
- Think of it as an AI LEGO set:
6
-
7
- ✅ Build pipelines blazingly fast.
8
- ✅ Configure complex workflows in simple YAML.
9
- ✅ Get LLMs collaborating effectively.
10
- ✅ Automate tasks from paper analysis to story generation.
11
-
12
- Why are engineers loving it?
13
-
14
- ⚡️ Branching logic that *doesn't* induce stress.
15
- ⚡️ Validation that catches errors early.
16
- ⚡️ Python ↔️ YAML conversion that feels magical.
17
- ⚡️ Input mapping so clean, Marie Kondo would approve.
18
-
19
- So, what workflow automation feature would save *you* the most time?
20
-
21
- 1️⃣ Dynamic LLM content?
22
- 2️⃣ Visual branching?
23
- 3️⃣ Declarative YAML config?
24
-
25
- Comment below with your pick! I'll share pro tips for the most requested. 👇
26
-
27
- P.S. Know a dev drowning in manual pipeline work? Share this post and be their hero!
28
-
29
- Link in comments.
30
-
31
- #AIEngineering #WorkflowAutomation