abstractflow 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractflow/__init__.py +2 -2
- abstractflow/adapters/agent_adapter.py +2 -121
- abstractflow/adapters/control_adapter.py +2 -612
- abstractflow/adapters/effect_adapter.py +2 -642
- abstractflow/adapters/event_adapter.py +2 -304
- abstractflow/adapters/function_adapter.py +2 -94
- abstractflow/adapters/subflow_adapter.py +2 -71
- abstractflow/adapters/variable_adapter.py +2 -314
- abstractflow/cli.py +73 -28
- abstractflow/compiler.py +18 -2022
- abstractflow/core/flow.py +4 -240
- abstractflow/runner.py +59 -5
- abstractflow/visual/agent_ids.py +2 -26
- abstractflow/visual/builtins.py +2 -786
- abstractflow/visual/code_executor.py +2 -211
- abstractflow/visual/executor.py +319 -2140
- abstractflow/visual/interfaces.py +103 -10
- abstractflow/visual/models.py +26 -1
- abstractflow/visual/session_runner.py +23 -9
- abstractflow/visual/workspace_scoped_tools.py +11 -243
- abstractflow/workflow_bundle.py +290 -0
- abstractflow-0.3.1.dist-info/METADATA +186 -0
- abstractflow-0.3.1.dist-info/RECORD +33 -0
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/WHEEL +1 -1
- abstractflow-0.3.0.dist-info/METADATA +0 -413
- abstractflow-0.3.0.dist-info/RECORD +0 -32
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/entry_points.txt +0 -0
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {abstractflow-0.3.0.dist-info → abstractflow-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -1,307 +1,5 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
This module provides durable, session-scoped custom events (Blueprint-style):
|
|
4
|
-
- `on_event`: a listener node that waits for an event and then runs its branch
|
|
5
|
-
- `on_schedule`: a listener node that waits for a schedule tick and then runs its branch
|
|
6
|
-
- `emit_event`: an emitter node that signals listeners in the same session (or a target session)
|
|
7
|
-
|
|
8
|
-
These are built on AbstractRuntime primitives:
|
|
9
|
-
- WAIT_EVENT (durable pause)
|
|
10
|
-
- WAIT_UNTIL (durable time wait)
|
|
11
|
-
- EMIT_EVENT (durable dispatch + resume)
|
|
12
|
-
"""
|
|
1
|
+
"""Re-export: AbstractRuntime VisualFlow compiler adapter."""
|
|
13
2
|
|
|
14
3
|
from __future__ import annotations
|
|
15
4
|
|
|
16
|
-
from
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def create_on_event_node_handler(
|
|
20
|
-
*,
|
|
21
|
-
node_id: str,
|
|
22
|
-
next_node: Optional[str],
|
|
23
|
-
resolve_inputs: Optional[Callable[[Any], Dict[str, Any]]] = None,
|
|
24
|
-
default_name: str,
|
|
25
|
-
scope: str = "session",
|
|
26
|
-
flow: Optional[Any] = None,
|
|
27
|
-
) -> Callable:
|
|
28
|
-
"""Create an `on_event` node handler.
|
|
29
|
-
|
|
30
|
-
The node:
|
|
31
|
-
- pushes itself as the active control node so terminal branch nodes return here
|
|
32
|
-
- waits for a session-scoped event via WAIT_EVENT
|
|
33
|
-
- resumes into `next_node` when the event arrives
|
|
34
|
-
"""
|
|
35
|
-
from abstractruntime.core.models import Effect, EffectType, StepPlan
|
|
36
|
-
|
|
37
|
-
from .control_adapter import _ensure_control
|
|
38
|
-
|
|
39
|
-
def _invalidate_pure_cache() -> None:
|
|
40
|
-
if flow is None:
|
|
41
|
-
return
|
|
42
|
-
node_outputs = getattr(flow, "_node_outputs", None)
|
|
43
|
-
pure_ids = getattr(flow, "_pure_node_ids", None)
|
|
44
|
-
if not isinstance(node_outputs, dict):
|
|
45
|
-
return
|
|
46
|
-
if not isinstance(pure_ids, (set, list, tuple)):
|
|
47
|
-
return
|
|
48
|
-
for nid in list(pure_ids):
|
|
49
|
-
if isinstance(nid, str) and nid:
|
|
50
|
-
node_outputs.pop(nid, None)
|
|
51
|
-
|
|
52
|
-
def _normalize_scope(raw: Any) -> str:
|
|
53
|
-
v = str(raw or "session").strip().lower() or "session"
|
|
54
|
-
if v not in {"session", "workflow", "run", "global"}:
|
|
55
|
-
v = "session"
|
|
56
|
-
return v
|
|
57
|
-
|
|
58
|
-
def handler(run: Any, ctx: Any) -> "StepPlan":
|
|
59
|
-
del ctx
|
|
60
|
-
|
|
61
|
-
_invalidate_pure_cache()
|
|
62
|
-
|
|
63
|
-
resolved: Dict[str, Any] = {}
|
|
64
|
-
if callable(resolve_inputs):
|
|
65
|
-
try:
|
|
66
|
-
resolved = resolve_inputs(run)
|
|
67
|
-
except Exception:
|
|
68
|
-
resolved = {}
|
|
69
|
-
resolved = resolved if isinstance(resolved, dict) else {}
|
|
70
|
-
|
|
71
|
-
# Blank/unspecified name is treated as "listen to any event" (wildcard).
|
|
72
|
-
# This avoids the surprising behavior of binding to an opaque node_id and
|
|
73
|
-
# makes older saved flows (that may have name="") still behave sensibly.
|
|
74
|
-
name_raw = resolved.get("name") or resolved.get("event_name") or default_name
|
|
75
|
-
name = str(name_raw or "").strip() or "*"
|
|
76
|
-
scope_norm = _normalize_scope(resolved.get("scope") if "scope" in resolved else scope)
|
|
77
|
-
|
|
78
|
-
_ctrl, stack, _frames = _ensure_control(run.vars)
|
|
79
|
-
if not stack or stack[-1] != node_id:
|
|
80
|
-
# Ensure this node is the active scheduler for its branch.
|
|
81
|
-
stack.append(node_id)
|
|
82
|
-
|
|
83
|
-
# If the event has no connected branch, we still wait and "consume" the event.
|
|
84
|
-
# This mirrors Blueprint semantics: an unconnected Custom Event is a no-op.
|
|
85
|
-
resume_to = next_node or node_id
|
|
86
|
-
|
|
87
|
-
effect = Effect(
|
|
88
|
-
type=EffectType.WAIT_EVENT,
|
|
89
|
-
payload={"scope": scope_norm, "name": name, "resume_to_node": resume_to},
|
|
90
|
-
result_key=f"_temp.effects.{node_id}",
|
|
91
|
-
)
|
|
92
|
-
|
|
93
|
-
return StepPlan(node_id=node_id, effect=effect, next_node=next_node)
|
|
94
|
-
|
|
95
|
-
return handler
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
def create_on_schedule_node_handler(
|
|
99
|
-
*,
|
|
100
|
-
node_id: str,
|
|
101
|
-
next_node: Optional[str],
|
|
102
|
-
resolve_inputs: Optional[Callable[[Any], Dict[str, Any]]] = None,
|
|
103
|
-
schedule: str,
|
|
104
|
-
recurrent: bool = True,
|
|
105
|
-
flow: Optional[Any] = None,
|
|
106
|
-
) -> Callable:
|
|
107
|
-
"""Create an `on_schedule` node handler.
|
|
108
|
-
|
|
109
|
-
The node:
|
|
110
|
-
- (optionally) pushes itself as the active control node so terminal branch nodes return here
|
|
111
|
-
- waits for a time tick via WAIT_UNTIL
|
|
112
|
-
- resumes into `next_node` when the time elapses
|
|
113
|
-
"""
|
|
114
|
-
import re
|
|
115
|
-
from datetime import datetime, timedelta, timezone
|
|
116
|
-
|
|
117
|
-
from abstractruntime.core.models import Effect, EffectType, StepPlan
|
|
118
|
-
|
|
119
|
-
from .control_adapter import _ensure_control
|
|
120
|
-
|
|
121
|
-
def _invalidate_pure_cache() -> None:
|
|
122
|
-
if flow is None:
|
|
123
|
-
return
|
|
124
|
-
node_outputs = getattr(flow, "_node_outputs", None)
|
|
125
|
-
pure_ids = getattr(flow, "_pure_node_ids", None)
|
|
126
|
-
if not isinstance(node_outputs, dict):
|
|
127
|
-
return
|
|
128
|
-
if not isinstance(pure_ids, (set, list, tuple)):
|
|
129
|
-
return
|
|
130
|
-
for nid in list(pure_ids):
|
|
131
|
-
if isinstance(nid, str) and nid:
|
|
132
|
-
node_outputs.pop(nid, None)
|
|
133
|
-
|
|
134
|
-
interval_re = re.compile(r"^\s*(\d+(?:\.\d+)?)\s*(ms|s|m|h|d)\s*$", re.IGNORECASE)
|
|
135
|
-
unit_seconds: Dict[str, float] = {"ms": 0.001, "s": 1.0, "m": 60.0, "h": 3600.0, "d": 86400.0}
|
|
136
|
-
|
|
137
|
-
def _parse_until(raw: str, *, now: datetime) -> tuple[str, Optional[float]]:
|
|
138
|
-
"""Return (until_iso, interval_seconds_or_none)."""
|
|
139
|
-
s = str(raw or "").strip()
|
|
140
|
-
if not s:
|
|
141
|
-
raise ValueError("Missing schedule")
|
|
142
|
-
|
|
143
|
-
m = interval_re.match(s)
|
|
144
|
-
if m:
|
|
145
|
-
amount = float(m.group(1))
|
|
146
|
-
unit = str(m.group(2)).lower()
|
|
147
|
-
seconds = amount * unit_seconds.get(unit, 1.0)
|
|
148
|
-
until = (now + timedelta(seconds=float(seconds))).isoformat()
|
|
149
|
-
return until, float(seconds)
|
|
150
|
-
|
|
151
|
-
# ISO 8601 timestamp (treated as one-shot)
|
|
152
|
-
s2 = s[:-1] + "+00:00" if s.endswith("Z") else s
|
|
153
|
-
try:
|
|
154
|
-
dt = datetime.fromisoformat(s2)
|
|
155
|
-
except Exception as e:
|
|
156
|
-
raise ValueError(
|
|
157
|
-
f"Invalid schedule '{s}': expected interval like '30s', '5m', '1h' or an ISO timestamp"
|
|
158
|
-
) from e
|
|
159
|
-
if dt.tzinfo is None:
|
|
160
|
-
dt = dt.replace(tzinfo=timezone.utc)
|
|
161
|
-
dt = dt.astimezone(timezone.utc)
|
|
162
|
-
return dt.isoformat(), None
|
|
163
|
-
|
|
164
|
-
def handler(run: Any, ctx: Any) -> "StepPlan":
|
|
165
|
-
del ctx
|
|
166
|
-
|
|
167
|
-
_invalidate_pure_cache()
|
|
168
|
-
|
|
169
|
-
resolved: Dict[str, Any] = {}
|
|
170
|
-
if callable(resolve_inputs):
|
|
171
|
-
try:
|
|
172
|
-
resolved = resolve_inputs(run)
|
|
173
|
-
except Exception:
|
|
174
|
-
resolved = {}
|
|
175
|
-
resolved = resolved if isinstance(resolved, dict) else {}
|
|
176
|
-
|
|
177
|
-
schedule_raw = resolved.get("schedule") if "schedule" in resolved else None
|
|
178
|
-
schedule_str = str(schedule_raw or schedule or "").strip()
|
|
179
|
-
if not schedule_str:
|
|
180
|
-
raise ValueError(f"on_schedule node '{node_id}' missing schedule")
|
|
181
|
-
|
|
182
|
-
recurrent_raw = resolved.get("recurrent") if "recurrent" in resolved else recurrent
|
|
183
|
-
recurrent_flag = bool(recurrent_raw) if recurrent_raw is not None else bool(recurrent)
|
|
184
|
-
|
|
185
|
-
now = datetime.now(timezone.utc)
|
|
186
|
-
until, interval_s = _parse_until(schedule_str, now=now)
|
|
187
|
-
|
|
188
|
-
# Absolute timestamps are one-shot; recurrence would cause a tight loop.
|
|
189
|
-
if interval_s is None:
|
|
190
|
-
recurrent_flag = False
|
|
191
|
-
|
|
192
|
-
if recurrent_flag:
|
|
193
|
-
_ctrl, stack, _frames = _ensure_control(run.vars)
|
|
194
|
-
if not stack or stack[-1] != node_id:
|
|
195
|
-
stack.append(node_id)
|
|
196
|
-
|
|
197
|
-
effect = Effect(
|
|
198
|
-
type=EffectType.WAIT_UNTIL,
|
|
199
|
-
payload={"until": until},
|
|
200
|
-
result_key=f"_temp.effects.{node_id}",
|
|
201
|
-
)
|
|
202
|
-
|
|
203
|
-
return StepPlan(node_id=node_id, effect=effect, next_node=next_node)
|
|
204
|
-
|
|
205
|
-
return handler
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
def create_emit_event_node_handler(
|
|
209
|
-
*,
|
|
210
|
-
node_id: str,
|
|
211
|
-
next_node: Optional[str],
|
|
212
|
-
resolve_inputs: Callable[[Any], Dict[str, Any]],
|
|
213
|
-
default_name: str,
|
|
214
|
-
default_session_id: Optional[str] = None,
|
|
215
|
-
scope: str = "session",
|
|
216
|
-
) -> Callable:
|
|
217
|
-
"""Create an `emit_event` node handler.
|
|
218
|
-
|
|
219
|
-
The node resolves its inputs durably (via Visual data edges) and emits an EMIT_EVENT effect.
|
|
220
|
-
|
|
221
|
-
Inputs (resolved via `resolve_inputs`):
|
|
222
|
-
- name: str (optional, falls back to default_name)
|
|
223
|
-
- payload: dict|any (optional)
|
|
224
|
-
- session_id: str (optional, target session id for cross-workflow delivery)
|
|
225
|
-
"""
|
|
226
|
-
from abstractruntime.core.models import Effect, EffectType, StepPlan
|
|
227
|
-
|
|
228
|
-
default_name2 = str(default_name or "").strip()
|
|
229
|
-
default_scope = str(scope or "session").strip()
|
|
230
|
-
|
|
231
|
-
def _normalize_scope(raw: Any) -> str:
|
|
232
|
-
v = str(raw or "session").strip().lower() or "session"
|
|
233
|
-
if v not in {"session", "workflow", "run", "global"}:
|
|
234
|
-
v = "session"
|
|
235
|
-
return v
|
|
236
|
-
|
|
237
|
-
def _next_seq(run_vars: Dict[str, Any]) -> int:
|
|
238
|
-
temp = run_vars.get("_temp")
|
|
239
|
-
if not isinstance(temp, dict):
|
|
240
|
-
temp = {}
|
|
241
|
-
run_vars["_temp"] = temp
|
|
242
|
-
seqs = temp.get("event_seq")
|
|
243
|
-
if not isinstance(seqs, dict):
|
|
244
|
-
seqs = {}
|
|
245
|
-
temp["event_seq"] = seqs
|
|
246
|
-
raw = seqs.get(node_id, 0)
|
|
247
|
-
try:
|
|
248
|
-
cur = int(raw or 0)
|
|
249
|
-
except Exception:
|
|
250
|
-
cur = 0
|
|
251
|
-
nxt = cur + 1
|
|
252
|
-
seqs[node_id] = nxt
|
|
253
|
-
return nxt
|
|
254
|
-
|
|
255
|
-
def handler(run: Any, ctx: Any) -> "StepPlan":
|
|
256
|
-
del ctx
|
|
257
|
-
resolved = resolve_inputs(run)
|
|
258
|
-
|
|
259
|
-
name_raw = resolved.get("name") or resolved.get("event_name") or default_name2
|
|
260
|
-
name = str(name_raw or "").strip()
|
|
261
|
-
if not name:
|
|
262
|
-
raise ValueError(f"emit_event node '{node_id}' missing event name")
|
|
263
|
-
|
|
264
|
-
scope_norm = _normalize_scope(resolved.get("scope") if "scope" in resolved else default_scope)
|
|
265
|
-
|
|
266
|
-
payload = resolved.get("payload")
|
|
267
|
-
if isinstance(payload, dict):
|
|
268
|
-
payload_dict: Dict[str, Any] = dict(payload)
|
|
269
|
-
elif payload is None:
|
|
270
|
-
payload_dict = {}
|
|
271
|
-
else:
|
|
272
|
-
# Event payloads are stored durably and delivered over the network.
|
|
273
|
-
# We normalize non-dict values under {"value": ...} for a stable shape.
|
|
274
|
-
payload_dict = {"value": payload}
|
|
275
|
-
|
|
276
|
-
target_session_id = resolved.get("session_id")
|
|
277
|
-
if target_session_id is None and isinstance(default_session_id, str) and default_session_id.strip():
|
|
278
|
-
target_session_id = default_session_id.strip()
|
|
279
|
-
if isinstance(target_session_id, str) and not target_session_id.strip():
|
|
280
|
-
target_session_id = None
|
|
281
|
-
|
|
282
|
-
seq = _next_seq(run.vars)
|
|
283
|
-
event_id = f"{run.run_id}:{node_id}:{seq}"
|
|
284
|
-
|
|
285
|
-
eff_payload: Dict[str, Any] = {
|
|
286
|
-
"scope": scope_norm,
|
|
287
|
-
"name": name,
|
|
288
|
-
"payload": payload_dict,
|
|
289
|
-
"event_id": event_id,
|
|
290
|
-
# IMPORTANT (Blueprint semantics + observability):
|
|
291
|
-
# - Emit should resume listeners durably, but hosts (WS loop / schedulers)
|
|
292
|
-
# should drive execution so we can stream node_start/node_complete in-order.
|
|
293
|
-
# - This avoids "invisible" listener execution that happens inside the emitter tick.
|
|
294
|
-
"max_steps": 0,
|
|
295
|
-
}
|
|
296
|
-
if isinstance(target_session_id, str) and target_session_id.strip():
|
|
297
|
-
eff_payload["session_id"] = target_session_id.strip()
|
|
298
|
-
|
|
299
|
-
effect = Effect(
|
|
300
|
-
type=EffectType.EMIT_EVENT,
|
|
301
|
-
payload=eff_payload,
|
|
302
|
-
result_key=f"_temp.effects.{node_id}",
|
|
303
|
-
)
|
|
304
|
-
|
|
305
|
-
return StepPlan(node_id=node_id, effect=effect, next_node=next_node)
|
|
306
|
-
|
|
307
|
-
return handler
|
|
5
|
+
from abstractruntime.visualflow_compiler.adapters.event_adapter import * # noqa: F401,F403
|
|
@@ -1,97 +1,5 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""Re-export: AbstractRuntime VisualFlow compiler adapter."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from
|
|
6
|
-
|
|
7
|
-
if TYPE_CHECKING:
|
|
8
|
-
from abstractruntime.core.models import RunState, StepPlan
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def create_function_node_handler(
|
|
12
|
-
node_id: str,
|
|
13
|
-
func: Callable[[Any], Any],
|
|
14
|
-
next_node: Optional[str],
|
|
15
|
-
input_key: Optional[str] = None,
|
|
16
|
-
output_key: Optional[str] = None,
|
|
17
|
-
) -> Callable:
|
|
18
|
-
"""Create a node handler that runs a Python function.
|
|
19
|
-
|
|
20
|
-
Function nodes execute synchronously within the workflow. They're ideal for:
|
|
21
|
-
- Data transformations
|
|
22
|
-
- Validation logic
|
|
23
|
-
- Aggregating results from previous nodes
|
|
24
|
-
|
|
25
|
-
Args:
|
|
26
|
-
node_id: Unique identifier for this node
|
|
27
|
-
func: The function to execute. Receives input data and returns result.
|
|
28
|
-
next_node: ID of the next node to transition to (None for terminal)
|
|
29
|
-
input_key: Key in run.vars to read input from (uses full vars if not set)
|
|
30
|
-
output_key: Key in run.vars to write output to
|
|
31
|
-
|
|
32
|
-
Returns:
|
|
33
|
-
A node handler function compatible with AbstractRuntime
|
|
34
|
-
|
|
35
|
-
Example:
|
|
36
|
-
>>> def double(x):
|
|
37
|
-
... return x * 2
|
|
38
|
-
>>> handler = create_function_node_handler("double", double, "next", "input", "result")
|
|
39
|
-
"""
|
|
40
|
-
# Import here to avoid import-time dependency
|
|
41
|
-
from abstractruntime.core.models import StepPlan
|
|
42
|
-
|
|
43
|
-
def handler(run: "RunState", ctx: Any) -> "StepPlan":
|
|
44
|
-
"""Execute the function and transition to next node."""
|
|
45
|
-
# Get input from vars
|
|
46
|
-
if input_key:
|
|
47
|
-
input_data = run.vars.get(input_key)
|
|
48
|
-
else:
|
|
49
|
-
input_data = run.vars
|
|
50
|
-
|
|
51
|
-
# Execute function
|
|
52
|
-
try:
|
|
53
|
-
result = func(input_data)
|
|
54
|
-
except Exception as e:
|
|
55
|
-
# Store error and fail the flow
|
|
56
|
-
run.vars["_flow_error"] = str(e)
|
|
57
|
-
run.vars["_flow_error_node"] = node_id
|
|
58
|
-
return StepPlan(
|
|
59
|
-
node_id=node_id,
|
|
60
|
-
complete_output={"error": str(e), "success": False, "node": node_id},
|
|
61
|
-
)
|
|
62
|
-
|
|
63
|
-
# Store result in vars
|
|
64
|
-
if output_key:
|
|
65
|
-
_set_nested(run.vars, output_key, result)
|
|
66
|
-
|
|
67
|
-
# Continue to next node or complete
|
|
68
|
-
if next_node:
|
|
69
|
-
return StepPlan(node_id=node_id, next_node=next_node)
|
|
70
|
-
else:
|
|
71
|
-
# Terminal node - complete with result
|
|
72
|
-
return StepPlan(
|
|
73
|
-
node_id=node_id,
|
|
74
|
-
complete_output={"result": result, "success": True},
|
|
75
|
-
)
|
|
76
|
-
|
|
77
|
-
return handler
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def _set_nested(target: Dict[str, Any], dotted_key: str, value: Any) -> None:
|
|
81
|
-
"""Set a nested dictionary value using dot notation.
|
|
82
|
-
|
|
83
|
-
Example:
|
|
84
|
-
>>> d = {}
|
|
85
|
-
>>> _set_nested(d, "a.b.c", 123)
|
|
86
|
-
>>> d
|
|
87
|
-
{'a': {'b': {'c': 123}}}
|
|
88
|
-
"""
|
|
89
|
-
parts = dotted_key.split(".")
|
|
90
|
-
cur = target
|
|
91
|
-
for p in parts[:-1]:
|
|
92
|
-
nxt = cur.get(p)
|
|
93
|
-
if not isinstance(nxt, dict):
|
|
94
|
-
nxt = {}
|
|
95
|
-
cur[p] = nxt
|
|
96
|
-
cur = nxt
|
|
97
|
-
cur[parts[-1]] = value
|
|
5
|
+
from abstractruntime.visualflow_compiler.adapters.function_adapter import * # noqa: F401,F403
|
|
@@ -1,74 +1,5 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""Re-export: AbstractRuntime VisualFlow compiler adapter."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from
|
|
6
|
-
|
|
7
|
-
if TYPE_CHECKING:
|
|
8
|
-
from abstractruntime.core.models import RunState, StepPlan
|
|
9
|
-
from abstractruntime.core.spec import WorkflowSpec
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def create_subflow_node_handler(
|
|
13
|
-
node_id: str,
|
|
14
|
-
nested_workflow: "WorkflowSpec",
|
|
15
|
-
next_node: Optional[str],
|
|
16
|
-
input_key: Optional[str] = None,
|
|
17
|
-
output_key: Optional[str] = None,
|
|
18
|
-
) -> Callable:
|
|
19
|
-
"""Create a node handler that runs a nested flow as a subworkflow.
|
|
20
|
-
|
|
21
|
-
Subflow nodes enable hierarchical flow composition. A nested flow runs
|
|
22
|
-
as a subworkflow with its own state, completing before the parent continues.
|
|
23
|
-
|
|
24
|
-
Args:
|
|
25
|
-
node_id: Unique identifier for this node
|
|
26
|
-
nested_workflow: The compiled WorkflowSpec of the nested flow
|
|
27
|
-
next_node: ID of the next node to transition to (None for terminal)
|
|
28
|
-
input_key: Key in run.vars to read input from
|
|
29
|
-
output_key: Key in run.vars to write output to
|
|
30
|
-
|
|
31
|
-
Returns:
|
|
32
|
-
A node handler function compatible with AbstractRuntime
|
|
33
|
-
|
|
34
|
-
Example:
|
|
35
|
-
>>> inner_flow = Flow("preprocessing")
|
|
36
|
-
>>> # ... define inner flow ...
|
|
37
|
-
>>> inner_spec = compile_flow(inner_flow)
|
|
38
|
-
>>> handler = create_subflow_node_handler("preprocess", inner_spec, "main")
|
|
39
|
-
"""
|
|
40
|
-
from abstractruntime.core.models import Effect, EffectType, StepPlan
|
|
41
|
-
|
|
42
|
-
def handler(run: "RunState", ctx: Any) -> "StepPlan":
|
|
43
|
-
"""Start the nested flow as a subworkflow."""
|
|
44
|
-
# Get input from parent flow's vars
|
|
45
|
-
subflow_vars: Dict[str, Any] = {}
|
|
46
|
-
|
|
47
|
-
if input_key:
|
|
48
|
-
input_data = run.vars.get(input_key, {})
|
|
49
|
-
if isinstance(input_data, dict):
|
|
50
|
-
subflow_vars = dict(input_data)
|
|
51
|
-
else:
|
|
52
|
-
subflow_vars = {"input": input_data}
|
|
53
|
-
else:
|
|
54
|
-
# Copy relevant vars to subflow
|
|
55
|
-
subflow_vars = {
|
|
56
|
-
"context": run.vars.get("context", {}),
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
# Use START_SUBWORKFLOW effect
|
|
60
|
-
return StepPlan(
|
|
61
|
-
node_id=node_id,
|
|
62
|
-
effect=Effect(
|
|
63
|
-
type=EffectType.START_SUBWORKFLOW,
|
|
64
|
-
payload={
|
|
65
|
-
"workflow_id": nested_workflow.workflow_id,
|
|
66
|
-
"vars": subflow_vars,
|
|
67
|
-
"async": False, # Sync: wait for completion
|
|
68
|
-
},
|
|
69
|
-
result_key=output_key or f"_flow.{node_id}.result",
|
|
70
|
-
),
|
|
71
|
-
next_node=next_node,
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
return handler
|
|
5
|
+
from abstractruntime.visualflow_compiler.adapters.subflow_adapter import * # noqa: F401,F403
|