abstractflow 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. abstractflow/__init__.py +75 -95
  2. abstractflow/__main__.py +2 -0
  3. abstractflow/adapters/__init__.py +11 -0
  4. abstractflow/adapters/agent_adapter.py +124 -0
  5. abstractflow/adapters/control_adapter.py +615 -0
  6. abstractflow/adapters/effect_adapter.py +645 -0
  7. abstractflow/adapters/event_adapter.py +307 -0
  8. abstractflow/adapters/function_adapter.py +97 -0
  9. abstractflow/adapters/subflow_adapter.py +74 -0
  10. abstractflow/adapters/variable_adapter.py +317 -0
  11. abstractflow/cli.py +2 -0
  12. abstractflow/compiler.py +2027 -0
  13. abstractflow/core/__init__.py +5 -0
  14. abstractflow/core/flow.py +247 -0
  15. abstractflow/py.typed +2 -0
  16. abstractflow/runner.py +348 -0
  17. abstractflow/visual/__init__.py +43 -0
  18. abstractflow/visual/agent_ids.py +29 -0
  19. abstractflow/visual/builtins.py +789 -0
  20. abstractflow/visual/code_executor.py +214 -0
  21. abstractflow/visual/event_ids.py +33 -0
  22. abstractflow/visual/executor.py +2789 -0
  23. abstractflow/visual/interfaces.py +347 -0
  24. abstractflow/visual/models.py +252 -0
  25. abstractflow/visual/session_runner.py +168 -0
  26. abstractflow/visual/workspace_scoped_tools.py +261 -0
  27. abstractflow-0.3.0.dist-info/METADATA +413 -0
  28. abstractflow-0.3.0.dist-info/RECORD +32 -0
  29. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/licenses/LICENSE +2 -0
  30. abstractflow-0.1.0.dist-info/METADATA +0 -238
  31. abstractflow-0.1.0.dist-info/RECORD +0 -10
  32. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/WHEEL +0 -0
  33. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/entry_points.txt +0 -0
  34. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/top_level.txt +0 -0
abstractflow/__init__.py CHANGED
@@ -1,104 +1,84 @@
1
+ """AbstractFlow - Multi-agent orchestration layer for the Abstract Framework.
2
+
3
+ AbstractFlow enables composition of agents into pipelines and coordinates
4
+ their execution via AbstractRuntime. It provides:
5
+
6
+ - Flow: Declarative flow definition with nodes and edges
7
+ - FlowRunner: High-level interface for running flows
8
+ - compile_flow: Convert Flow to WorkflowSpec for direct runtime usage
9
+
10
+ Example:
11
+ >>> from abstractflow import Flow, FlowRunner
12
+ >>>
13
+ >>> # Define a simple flow
14
+ >>> flow = Flow("my_pipeline")
15
+ >>> flow.add_node("step1", lambda x: x * 2, input_key="value", output_key="doubled")
16
+ >>> flow.add_node("step2", lambda x: x + 10, input_key="doubled", output_key="result")
17
+ >>> flow.add_edge("step1", "step2")
18
+ >>> flow.set_entry("step1")
19
+ >>>
20
+ >>> # Run the flow
21
+ >>> runner = FlowRunner(flow)
22
+ >>> result = runner.run({"value": 5})
23
+ >>> print(result) # {'result': 20, 'success': True}
24
+
25
+ For agent-based flows:
26
+ >>> from abstractflow import Flow, FlowRunner
27
+ >>> from abstractagent import create_react_agent
28
+ >>>
29
+ >>> planner = create_react_agent(provider="ollama", model="qwen3:4b")
30
+ >>> executor = create_react_agent(provider="ollama", model="qwen3:4b")
31
+ >>>
32
+ >>> flow = Flow("plan_and_execute")
33
+ >>> flow.add_node("plan", planner, output_key="plan")
34
+ >>> flow.add_node("execute", executor, input_key="plan")
35
+ >>> flow.add_edge("plan", "execute")
36
+ >>> flow.set_entry("plan")
37
+ >>>
38
+ >>> runner = FlowRunner(flow)
39
+ >>> result = runner.run({"context": {"task": "Build a REST API"}})
1
40
  """
2
- AbstractFlow - Diagram-based AI workflow generation.
3
41
 
4
- Built on top of AbstractCore for unified LLM provider access.
5
- """
6
-
7
- __version__ = "0.1.0"
8
- __author__ = "AbstractFlow Team"
9
- __email__ = "contact@abstractflow.ai"
42
+ __version__ = "0.3.0"
43
+ __author__ = "Laurent-Philippe Albou"
44
+ __email__ = "contact@abstractcore.ai"
10
45
  __license__ = "MIT"
11
46
 
12
- # Core imports that will be available when the package is fully implemented
13
- __all__ = [
14
- "__version__",
15
- "WorkflowBuilder",
16
- "Node",
17
- "LLMNode",
18
- "TextNode",
19
- "ConditionalNode",
20
- "TransformNode",
21
- ]
22
-
23
- # Placeholder implementations - these will be replaced with actual implementations
24
- class WorkflowBuilder:
25
- """
26
- Visual workflow builder for creating AI-powered diagrams.
27
-
28
- This is a placeholder implementation. The full version will provide:
29
- - Drag-and-drop workflow creation
30
- - Real-time execution monitoring
31
- - Multi-provider LLM support via AbstractCore
32
- - Export to various formats
33
- """
34
-
35
- def __init__(self):
36
- """Initialize a new workflow builder."""
37
- raise NotImplementedError(
38
- "AbstractFlow is currently in development. "
39
- "This placeholder package reserves the PyPI name. "
40
- "Follow https://github.com/lpalbou/AbstractFlow for updates."
41
- )
42
-
43
-
44
- class Node:
45
- """Base class for all workflow nodes."""
46
-
47
- def __init__(self, node_id: str):
48
- """Initialize a workflow node."""
49
- raise NotImplementedError(
50
- "AbstractFlow is currently in development. "
51
- "This placeholder package reserves the PyPI name. "
52
- "Follow https://github.com/lpalbou/AbstractFlow for updates."
53
- )
47
+ # Core classes
48
+ from .core.flow import Flow, FlowNode, FlowEdge
54
49
 
50
+ # Compiler
51
+ from .compiler import compile_flow
55
52
 
56
- class LLMNode(Node):
57
- """Node for LLM-based text generation and processing."""
58
-
59
- def __init__(self, provider: str, model: str, **kwargs):
60
- """Initialize an LLM node with AbstractCore provider."""
61
- raise NotImplementedError(
62
- "AbstractFlow is currently in development. "
63
- "This placeholder package reserves the PyPI name. "
64
- "Follow https://github.com/lpalbou/AbstractFlow for updates."
65
- )
53
+ # Runner
54
+ from .runner import FlowRunner
66
55
 
56
+ # Adapters (for advanced usage)
57
+ from .adapters import (
58
+ create_function_node_handler,
59
+ create_agent_node_handler,
60
+ create_subflow_node_handler,
61
+ )
67
62
 
68
- class TextNode(Node):
69
- """Node for text input/output operations."""
70
-
71
- def __init__(self, text_id: str):
72
- """Initialize a text node."""
73
- raise NotImplementedError(
74
- "AbstractFlow is currently in development. "
75
- "This placeholder package reserves the PyPI name. "
76
- "Follow https://github.com/lpalbou/AbstractFlow for updates."
77
- )
78
-
79
-
80
- class ConditionalNode(Node):
81
- """Node for conditional branching in workflows."""
82
-
83
- def __init__(self, condition: str):
84
- """Initialize a conditional node."""
85
- raise NotImplementedError(
86
- "AbstractFlow is currently in development. "
87
- "This placeholder package reserves the PyPI name. "
88
- "Follow https://github.com/lpalbou/AbstractFlow for updates."
89
- )
90
-
91
-
92
- class TransformNode(Node):
93
- """Node for data transformation operations."""
94
-
95
- def __init__(self, transform_func: str):
96
- """Initialize a transform node."""
97
- raise NotImplementedError(
98
- "AbstractFlow is currently in development. "
99
- "This placeholder package reserves the PyPI name. "
100
- "Follow https://github.com/lpalbou/AbstractFlow for updates."
101
- )
63
+ __all__ = [
64
+ # Version info
65
+ "__version__",
66
+ "__author__",
67
+ "__email__",
68
+ "__license__",
69
+ # Core classes
70
+ "Flow",
71
+ "FlowNode",
72
+ "FlowEdge",
73
+ # Compiler
74
+ "compile_flow",
75
+ # Runner
76
+ "FlowRunner",
77
+ # Adapters
78
+ "create_function_node_handler",
79
+ "create_agent_node_handler",
80
+ "create_subflow_node_handler",
81
+ ]
102
82
 
103
83
 
104
84
  def get_version() -> str:
@@ -107,5 +87,5 @@ def get_version() -> str:
107
87
 
108
88
 
109
89
  def is_development_version() -> bool:
110
- """Check if this is a development/placeholder version."""
111
- return True # This will be False in the actual implementation
90
+ """Check if this is a development version."""
91
+ return False # Now implemented!
abstractflow/__main__.py CHANGED
@@ -8,3 +8,5 @@ from .cli import main
8
8
 
9
9
  if __name__ == "__main__":
10
10
  exit(main())
11
+
12
+
@@ -0,0 +1,11 @@
1
+ """AbstractFlow adapters for converting handlers to workflow nodes."""
2
+
3
+ from .function_adapter import create_function_node_handler
4
+ from .agent_adapter import create_agent_node_handler
5
+ from .subflow_adapter import create_subflow_node_handler
6
+
7
+ __all__ = [
8
+ "create_function_node_handler",
9
+ "create_agent_node_handler",
10
+ "create_subflow_node_handler",
11
+ ]
@@ -0,0 +1,124 @@
1
+ """Adapter for using AbstractAgent agents as flow nodes."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Any, Callable, Optional, TYPE_CHECKING
6
+
7
+ if TYPE_CHECKING:
8
+ from abstractruntime.core.models import RunState, StepPlan
9
+ from abstractagent.agents.base import BaseAgent
10
+
11
+
12
+ def create_agent_node_handler(
13
+ node_id: str,
14
+ agent: "BaseAgent",
15
+ next_node: Optional[str],
16
+ input_key: Optional[str] = None,
17
+ output_key: Optional[str] = None,
18
+ ) -> Callable:
19
+ """Create a node handler that runs an agent as a subworkflow.
20
+
21
+ Agent nodes execute the full agent workflow (ReAct loop, CodeAct, etc.)
22
+ as a subworkflow. The agent runs to completion before transitioning.
23
+
24
+ Args:
25
+ node_id: Unique identifier for this node
26
+ agent: The agent instance to run
27
+ next_node: ID of the next node to transition to (None for terminal)
28
+ input_key: Key in run.vars to read task/input from
29
+ output_key: Key in run.vars to write agent output to
30
+
31
+ Returns:
32
+ A node handler function compatible with AbstractRuntime
33
+
34
+ Example:
35
+ >>> from abstractagent import create_react_agent
36
+ >>> planner = create_react_agent(provider="ollama", model="qwen3:4b")
37
+ >>> handler = create_agent_node_handler("plan", planner, "search")
38
+ """
39
+ from abstractruntime.core.models import Effect, EffectType, StepPlan
40
+
41
+ def handler(run: "RunState", ctx: Any) -> "StepPlan":
42
+ """Start the agent as a subworkflow."""
43
+ # Determine task for the agent
44
+ task = ""
45
+
46
+ if input_key:
47
+ input_data = run.vars.get(input_key, {})
48
+ if isinstance(input_data, dict):
49
+ task = input_data.get("task", "") or input_data.get("query", "")
50
+ if not task:
51
+ # Use the whole input as context
52
+ task = str(input_data)
53
+ else:
54
+ task = str(input_data)
55
+
56
+ # Fallback to flow's main task
57
+ if not task:
58
+ context = run.vars.get("context", {})
59
+ if isinstance(context, dict):
60
+ task = context.get("task", "")
61
+
62
+ if not task:
63
+ task = f"Execute {node_id} step"
64
+
65
+ # Build initial vars for the agent subworkflow
66
+ max_iterations = getattr(agent, "_max_iterations", 25)
67
+ max_history_messages = getattr(agent, "_max_history_messages", -1)
68
+ max_tokens = getattr(agent, "_max_tokens", None)
69
+ if not isinstance(max_tokens, int) or max_tokens <= 0:
70
+ try:
71
+ runtime = getattr(agent, "runtime", None)
72
+ config = getattr(runtime, "config", None)
73
+ base = config.to_limits_dict() if config is not None else {}
74
+ max_tokens = int(base.get("max_tokens", 32768) or 32768)
75
+ except Exception:
76
+ max_tokens = 32768
77
+
78
+ agent_vars = {
79
+ "context": {
80
+ "task": task,
81
+ "messages": [],
82
+ },
83
+ "scratchpad": {
84
+ "iteration": 0,
85
+ "max_iterations": max_iterations,
86
+ "max_history_messages": max_history_messages,
87
+ },
88
+ "_runtime": {"inbox": []},
89
+ "_temp": {},
90
+ # Canonical _limits namespace for runtime awareness
91
+ "_limits": {
92
+ "max_iterations": max_iterations,
93
+ "current_iteration": 0,
94
+ "max_tokens": max_tokens,
95
+ "max_history_messages": max_history_messages,
96
+ "estimated_tokens_used": 0,
97
+ "warn_iterations_pct": 80,
98
+ "warn_tokens_pct": 80,
99
+ },
100
+ }
101
+
102
+ # Inject any additional context from the flow
103
+ if input_key and isinstance(run.vars.get(input_key), dict):
104
+ # Merge additional context
105
+ for k, v in run.vars.get(input_key, {}).items():
106
+ if k not in ("task", "query"):
107
+ agent_vars["context"][k] = v
108
+
109
+ # Use START_SUBWORKFLOW effect to run agent durably
110
+ return StepPlan(
111
+ node_id=node_id,
112
+ effect=Effect(
113
+ type=EffectType.START_SUBWORKFLOW,
114
+ payload={
115
+ "workflow_id": agent.workflow.workflow_id,
116
+ "vars": agent_vars,
117
+ "async": False, # Sync: wait for completion
118
+ },
119
+ result_key=output_key or f"_flow.{node_id}.result",
120
+ ),
121
+ next_node=next_node,
122
+ )
123
+
124
+ return handler