abstractflow 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. abstractflow/__init__.py +75 -95
  2. abstractflow/__main__.py +2 -0
  3. abstractflow/adapters/__init__.py +11 -0
  4. abstractflow/adapters/agent_adapter.py +124 -0
  5. abstractflow/adapters/control_adapter.py +615 -0
  6. abstractflow/adapters/effect_adapter.py +645 -0
  7. abstractflow/adapters/event_adapter.py +307 -0
  8. abstractflow/adapters/function_adapter.py +97 -0
  9. abstractflow/adapters/subflow_adapter.py +74 -0
  10. abstractflow/adapters/variable_adapter.py +317 -0
  11. abstractflow/cli.py +2 -0
  12. abstractflow/compiler.py +2027 -0
  13. abstractflow/core/__init__.py +5 -0
  14. abstractflow/core/flow.py +247 -0
  15. abstractflow/py.typed +2 -0
  16. abstractflow/runner.py +348 -0
  17. abstractflow/visual/__init__.py +43 -0
  18. abstractflow/visual/agent_ids.py +29 -0
  19. abstractflow/visual/builtins.py +789 -0
  20. abstractflow/visual/code_executor.py +214 -0
  21. abstractflow/visual/event_ids.py +33 -0
  22. abstractflow/visual/executor.py +2789 -0
  23. abstractflow/visual/interfaces.py +347 -0
  24. abstractflow/visual/models.py +252 -0
  25. abstractflow/visual/session_runner.py +168 -0
  26. abstractflow/visual/workspace_scoped_tools.py +261 -0
  27. abstractflow-0.3.0.dist-info/METADATA +413 -0
  28. abstractflow-0.3.0.dist-info/RECORD +32 -0
  29. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/licenses/LICENSE +2 -0
  30. abstractflow-0.1.0.dist-info/METADATA +0 -238
  31. abstractflow-0.1.0.dist-info/RECORD +0 -10
  32. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/WHEEL +0 -0
  33. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/entry_points.txt +0 -0
  34. {abstractflow-0.1.0.dist-info → abstractflow-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,168 @@
1
+ """Session-aware runner for VisualFlow executions.
2
+
3
+ This runner extends FlowRunner with:
4
+ - A durable session_id (defaults to root run_id)
5
+ - Auto-started custom event listener workflows ("On Event" nodes)
6
+
7
+ This keeps VisualFlow JSON portable: any host can execute a visual flow and its
8
+ event listeners using AbstractRuntime's durable semantics.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ from typing import Any, Dict, List, Optional, TYPE_CHECKING
14
+
15
+ from ..runner import FlowRunner
16
+
17
+ if TYPE_CHECKING:
18
+ from abstractruntime.core.runtime import Runtime
19
+ from abstractruntime.core.spec import WorkflowSpec
20
+
21
+
22
+ class VisualSessionRunner(FlowRunner):
23
+ """FlowRunner that starts event listener workflows within the same session."""
24
+
25
+ def __init__(
26
+ self,
27
+ flow: Any,
28
+ *,
29
+ runtime: Optional["Runtime"] = None,
30
+ event_listener_specs: Optional[List["WorkflowSpec"]] = None,
31
+ ) -> None:
32
+ super().__init__(flow, runtime=runtime)
33
+ self._event_listener_specs: List["WorkflowSpec"] = list(event_listener_specs or [])
34
+ self._event_listener_run_ids: List[str] = []
35
+
36
+ @property
37
+ def event_listener_run_ids(self) -> List[str]:
38
+ return list(self._event_listener_run_ids)
39
+
40
+ def start(self, input_data: Optional[Dict[str, Any]] = None) -> str:
41
+ run_id = super().start(input_data)
42
+
43
+ # Default session_id to the root run_id for session-scoped events.
44
+ try:
45
+ state = self.runtime.get_state(run_id)
46
+ if not getattr(state, "session_id", None):
47
+ state.session_id = run_id # type: ignore[attr-defined]
48
+ self.runtime.run_store.save(state)
49
+ except Exception:
50
+ # Best-effort; session-scoped keys will fall back to run_id if missing.
51
+ pass
52
+
53
+ if not self._event_listener_specs:
54
+ return run_id
55
+
56
+ # Start listeners as child runs in the same session.
57
+ for spec in self._event_listener_specs:
58
+ try:
59
+ child_run_id = self.runtime.start(
60
+ workflow=spec,
61
+ vars={},
62
+ session_id=run_id,
63
+ parent_run_id=run_id,
64
+ )
65
+ # Advance the listener to its first WAIT_EVENT (On Event node).
66
+ self.runtime.tick(workflow=spec, run_id=child_run_id, max_steps=10)
67
+ self._event_listener_run_ids.append(child_run_id)
68
+ except Exception:
69
+ # Listener start failures should surface during execution, but
70
+ # we don't want to hide root runs from starting.
71
+ continue
72
+
73
+ return run_id
74
+
75
+ def run(self, input_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
76
+ """Execute the root run and drive session-level listener runs.
77
+
78
+ Rationale:
79
+ - `EMIT_EVENT` resumes listener runs but does not (by default) execute them.
80
+ - Hosts that only "run the main workflow" should still see event handler branches run.
81
+ """
82
+ from abstractruntime.core.models import RunStatus, WaitReason
83
+
84
+ run_id = self.start(input_data)
85
+ runtime = self.runtime
86
+
87
+ def _list_session_runs() -> List[str]:
88
+ out: List[str] = [run_id]
89
+ try:
90
+ rs = getattr(runtime, "run_store", None)
91
+ if rs is not None and hasattr(rs, "list_children"):
92
+ children = rs.list_children(parent_run_id=run_id) # type: ignore[attr-defined]
93
+ for c in children:
94
+ cid = getattr(c, "run_id", None)
95
+ if isinstance(cid, str) and cid and cid not in out:
96
+ out.append(cid)
97
+ except Exception:
98
+ pass
99
+ for cid in self._event_listener_run_ids:
100
+ if cid not in out:
101
+ out.append(cid)
102
+ return out
103
+
104
+ def _tick_child(child_run_id: str, *, max_steps: int = 100) -> None:
105
+ st = runtime.get_state(child_run_id)
106
+ if st.status != RunStatus.RUNNING:
107
+ # WAIT_UNTIL can auto-unblock, so still call tick() to allow progress.
108
+ if st.status == RunStatus.WAITING and st.waiting and st.waiting.reason == WaitReason.UNTIL:
109
+ pass
110
+ else:
111
+ return
112
+ reg = getattr(runtime, "workflow_registry", None)
113
+ if reg is None:
114
+ return
115
+ wf = reg.get(st.workflow_id)
116
+ if wf is None:
117
+ return
118
+ runtime.tick(workflow=wf, run_id=child_run_id, max_steps=max_steps)
119
+
120
+ while True:
121
+ state = runtime.tick(workflow=self.workflow, run_id=run_id, max_steps=100)
122
+
123
+ # Drive children that became RUNNING due to EMIT_EVENT (or subflows).
124
+ for cid in _list_session_runs():
125
+ if cid == run_id:
126
+ continue
127
+ _tick_child(cid, max_steps=100)
128
+
129
+ # Root completion/termination conditions.
130
+ if state.status == RunStatus.COMPLETED:
131
+ # If all children are idle listeners (WAITING EVENT) or terminal, close the session.
132
+ all_idle_or_done = True
133
+ for cid in _list_session_runs():
134
+ if cid == run_id:
135
+ continue
136
+ st = runtime.get_state(cid)
137
+ if st.status in (RunStatus.COMPLETED, RunStatus.FAILED, RunStatus.CANCELLED):
138
+ continue
139
+ if st.status == RunStatus.WAITING and st.waiting and st.waiting.reason == WaitReason.EVENT:
140
+ continue
141
+ all_idle_or_done = False
142
+ if all_idle_or_done:
143
+ # Cancel idle listeners to keep the session tidy.
144
+ for cid in _list_session_runs():
145
+ if cid == run_id:
146
+ continue
147
+ st = runtime.get_state(cid)
148
+ if st.status == RunStatus.WAITING and st.waiting and st.waiting.reason == WaitReason.EVENT:
149
+ try:
150
+ runtime.cancel_run(cid, reason="Session completed")
151
+ except Exception:
152
+ pass
153
+ return state.output or {}
154
+ # Otherwise, keep driving until children settle into waits/terminal.
155
+ continue
156
+
157
+ if state.status == RunStatus.FAILED:
158
+ raise RuntimeError(f"Flow failed: {state.error}")
159
+
160
+ if state.status == RunStatus.WAITING:
161
+ # Preserve FlowRunner shape.
162
+ return {
163
+ "waiting": True,
164
+ "state": state,
165
+ "wait_key": state.waiting.wait_key if state.waiting else None,
166
+ }
167
+
168
+
@@ -0,0 +1,261 @@
1
+ """Workspace-scoped tool execution helpers.
2
+
3
+ These are host-friendly utilities to scope filesystem-ish tool calls (files + shell)
4
+ to a single "workspace root" folder:
5
+
6
+ - Relative paths resolve under `workspace_root`.
7
+ - Absolute paths are only allowed if they remain under `workspace_root`.
8
+ - `execute_command` defaults to `working_directory=workspace_root` when not specified.
9
+
10
+ This is implemented as a thin wrapper around an AbstractRuntime ToolExecutor that
11
+ rewrites/validates tool call arguments before delegating.
12
+ """
13
+
14
+ from __future__ import annotations
15
+
16
+ from dataclasses import dataclass
17
+ import os
18
+ from pathlib import Path
19
+ from typing import Any, Dict, List, Optional, Tuple
20
+
21
+
22
+ def _resolve_no_strict(path: Path) -> Path:
23
+ """Resolve without requiring the path to exist (best-effort across py versions)."""
24
+ try:
25
+ return path.resolve(strict=False)
26
+ except TypeError: # pragma: no cover (older python)
27
+ return path.resolve()
28
+
29
+
30
+ def _find_repo_root_from_here(*, start: Path, max_hops: int = 10) -> Optional[Path]:
31
+ """Best-effort monorepo root detection for local/dev runs."""
32
+ cur = _resolve_no_strict(start)
33
+ for _ in range(max_hops):
34
+ docs = cur / "docs" / "KnowledgeBase.md"
35
+ if docs.exists():
36
+ return cur
37
+ if (cur / "abstractflow").exists() and (cur / "abstractcore").exists() and (cur / "abstractruntime").exists():
38
+ return cur
39
+ nxt = cur.parent
40
+ if nxt == cur:
41
+ break
42
+ cur = nxt
43
+ return None
44
+
45
+
46
+ def resolve_workspace_base_dir() -> Path:
47
+ """Base directory against which relative workspace roots are resolved.
48
+
49
+ Priority:
50
+ - `ABSTRACTFLOW_WORKSPACE_BASE_DIR` env var, if set.
51
+ - Best-effort monorepo root detection from this file location.
52
+ - Current working directory.
53
+ """
54
+ env = os.getenv("ABSTRACTFLOW_WORKSPACE_BASE_DIR")
55
+ if isinstance(env, str) and env.strip():
56
+ return _resolve_no_strict(Path(env.strip()).expanduser())
57
+
58
+ here_dir = Path(__file__).resolve().parent
59
+ guessed = _find_repo_root_from_here(start=here_dir)
60
+ if guessed is not None:
61
+ return guessed
62
+
63
+ return _resolve_no_strict(Path.cwd())
64
+
65
+
66
+ def _resolve_under_root(*, root: Path, user_path: str) -> Path:
67
+ """Resolve a user-provided path under a workspace root and ensure it doesn't escape."""
68
+ p = Path(str(user_path or "").strip()).expanduser()
69
+ if not p.is_absolute():
70
+ p = root / p
71
+ resolved = _resolve_no_strict(p)
72
+ root_resolved = _resolve_no_strict(root)
73
+ try:
74
+ resolved.relative_to(root_resolved)
75
+ except Exception as e:
76
+ raise ValueError(f"Path escapes workspace_root: '{user_path}'") from e
77
+ return resolved
78
+
79
+
80
+ def _normalize_arguments(raw: Any) -> Dict[str, Any]:
81
+ if raw is None:
82
+ return {}
83
+ if isinstance(raw, dict):
84
+ return dict(raw)
85
+ # Some models emit JSON strings for args.
86
+ if isinstance(raw, str) and raw.strip():
87
+ import json
88
+
89
+ try:
90
+ parsed = json.loads(raw)
91
+ except Exception:
92
+ return {}
93
+ return dict(parsed) if isinstance(parsed, dict) else {}
94
+ return {}
95
+
96
+
97
+ @dataclass(frozen=True)
98
+ class WorkspaceScope:
99
+ root: Path
100
+
101
+ @classmethod
102
+ def from_input_data(
103
+ cls, input_data: Dict[str, Any], *, key: str = "workspace_root", base_dir: Optional[Path] = None
104
+ ) -> Optional["WorkspaceScope"]:
105
+ raw = input_data.get(key)
106
+ if not isinstance(raw, str) or not raw.strip():
107
+ return None
108
+ base = base_dir or resolve_workspace_base_dir()
109
+ root = Path(raw.strip()).expanduser()
110
+ if not root.is_absolute():
111
+ root = base / root
112
+ root = _resolve_no_strict(root)
113
+ if root.exists() and not root.is_dir():
114
+ raise ValueError(f"workspace_root must be a directory (got file): {raw}")
115
+ root.mkdir(parents=True, exist_ok=True)
116
+ return cls(root=root)
117
+
118
+
119
+ class WorkspaceScopedToolExecutor:
120
+ """Wrap another ToolExecutor and scope filesystem-ish tool calls to a workspace root."""
121
+
122
+ def __init__(self, *, scope: WorkspaceScope, delegate: Any):
123
+ self._scope = scope
124
+ self._delegate = delegate
125
+
126
+ def set_timeout_s(self, timeout_s: Optional[float]) -> None: # pragma: no cover (depends on delegate)
127
+ setter = getattr(self._delegate, "set_timeout_s", None)
128
+ if callable(setter):
129
+ setter(timeout_s)
130
+
131
+ def execute(self, *, tool_calls: List[Dict[str, Any]]) -> Dict[str, Any]:
132
+ # Preprocess: rewrite and pre-block invalid calls so we don't crash the whole run.
133
+ blocked: Dict[Tuple[int, str], Dict[str, Any]] = {}
134
+ to_execute: List[Dict[str, Any]] = []
135
+
136
+ for i, tc in enumerate(tool_calls or []):
137
+ name = str(tc.get("name", "") or "")
138
+ call_id = str(tc.get("call_id") or tc.get("id") or f"call_{i}")
139
+ args = _normalize_arguments(tc.get("arguments"))
140
+
141
+ try:
142
+ rewritten_args = self._rewrite_args(tool_name=name, args=args)
143
+ except Exception as e:
144
+ blocked[(i, call_id)] = {
145
+ "call_id": call_id,
146
+ "name": name,
147
+ "success": False,
148
+ "output": None,
149
+ "error": str(e),
150
+ }
151
+ continue
152
+
153
+ rewritten = dict(tc)
154
+ rewritten["name"] = name
155
+ rewritten["call_id"] = call_id
156
+ rewritten["arguments"] = rewritten_args
157
+ to_execute.append(rewritten)
158
+
159
+ delegate_result = self._delegate.execute(tool_calls=to_execute)
160
+
161
+ # If the delegate didn't execute tools, we can't merge blocked results meaningfully.
162
+ if not isinstance(delegate_result, dict) or delegate_result.get("mode") != "executed":
163
+ return delegate_result
164
+
165
+ results = delegate_result.get("results")
166
+ if not isinstance(results, list):
167
+ results = []
168
+
169
+ by_id: Dict[str, Dict[str, Any]] = {}
170
+ for r in results:
171
+ if not isinstance(r, dict):
172
+ continue
173
+ rid = str(r.get("call_id") or "")
174
+ if rid:
175
+ by_id[rid] = r
176
+
177
+ merged: List[Dict[str, Any]] = []
178
+ for i, tc in enumerate(tool_calls or []):
179
+ call_id = str(tc.get("call_id") or tc.get("id") or f"call_{i}")
180
+ key = (i, call_id)
181
+ if key in blocked:
182
+ merged.append(blocked[key])
183
+ continue
184
+ r = by_id.get(call_id)
185
+ if r is None:
186
+ merged.append(
187
+ {
188
+ "call_id": call_id,
189
+ "name": str(tc.get("name", "") or ""),
190
+ "success": False,
191
+ "output": None,
192
+ "error": "Tool result missing (internal error)",
193
+ }
194
+ )
195
+ continue
196
+ merged.append(r)
197
+
198
+ return {"mode": "executed", "results": merged}
199
+
200
+ def _rewrite_args(self, *, tool_name: str, args: Dict[str, Any]) -> Dict[str, Any]:
201
+ """Rewrite tool args so file operations are scoped under workspace_root."""
202
+ root = self._scope.root
203
+ out = dict(args or {})
204
+
205
+ def _rewrite_path_field(field: str, *, default_to_root: bool = False) -> None:
206
+ raw = out.get(field)
207
+ if (raw is None or (isinstance(raw, str) and not raw.strip())) and default_to_root:
208
+ out[field] = str(_resolve_no_strict(root))
209
+ return
210
+ if raw is None:
211
+ return
212
+ if not isinstance(raw, str):
213
+ raw = str(raw)
214
+ resolved = _resolve_under_root(root=root, user_path=raw)
215
+ out[field] = str(resolved)
216
+
217
+ # Filesystem-ish tools (AbstractCore common tools)
218
+ if tool_name == "list_files":
219
+ _rewrite_path_field("directory_path", default_to_root=True)
220
+ return out
221
+ if tool_name == "search_files":
222
+ _rewrite_path_field("path", default_to_root=True)
223
+ return out
224
+ if tool_name == "analyze_code":
225
+ _rewrite_path_field("file_path")
226
+ if "file_path" not in out:
227
+ raise ValueError("analyze_code requires file_path")
228
+ return out
229
+ if tool_name == "read_file":
230
+ _rewrite_path_field("file_path")
231
+ if "file_path" not in out:
232
+ raise ValueError("read_file requires file_path")
233
+ return out
234
+ if tool_name == "write_file":
235
+ _rewrite_path_field("file_path")
236
+ if "file_path" not in out:
237
+ raise ValueError("write_file requires file_path")
238
+ return out
239
+ if tool_name == "edit_file":
240
+ _rewrite_path_field("file_path")
241
+ if "file_path" not in out:
242
+ raise ValueError("edit_file requires file_path")
243
+ return out
244
+ if tool_name == "execute_command":
245
+ _rewrite_path_field("working_directory", default_to_root=True)
246
+ return out
247
+
248
+ return out
249
+
250
+
251
+ def build_scoped_tool_executor(*, scope: WorkspaceScope) -> Any:
252
+ """Create a local tool executor wrapped with workspace scoping."""
253
+ from abstractruntime.integrations.abstractcore.default_tools import get_default_tools
254
+ from abstractruntime.integrations.abstractcore.tool_executor import MappingToolExecutor
255
+
256
+ delegate = MappingToolExecutor.from_tools(get_default_tools())
257
+ return WorkspaceScopedToolExecutor(scope=scope, delegate=delegate)
258
+
259
+
260
+
261
+