abstractflow 0.1.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractflow/__init__.py +74 -94
- abstractflow/__main__.py +2 -0
- abstractflow/adapters/__init__.py +11 -0
- abstractflow/adapters/agent_adapter.py +5 -0
- abstractflow/adapters/control_adapter.py +5 -0
- abstractflow/adapters/effect_adapter.py +5 -0
- abstractflow/adapters/event_adapter.py +5 -0
- abstractflow/adapters/function_adapter.py +5 -0
- abstractflow/adapters/subflow_adapter.py +5 -0
- abstractflow/adapters/variable_adapter.py +5 -0
- abstractflow/cli.py +75 -28
- abstractflow/compiler.py +23 -0
- abstractflow/core/__init__.py +5 -0
- abstractflow/core/flow.py +11 -0
- abstractflow/py.typed +2 -0
- abstractflow/runner.py +402 -0
- abstractflow/visual/__init__.py +43 -0
- abstractflow/visual/agent_ids.py +5 -0
- abstractflow/visual/builtins.py +5 -0
- abstractflow/visual/code_executor.py +5 -0
- abstractflow/visual/event_ids.py +33 -0
- abstractflow/visual/executor.py +968 -0
- abstractflow/visual/interfaces.py +440 -0
- abstractflow/visual/models.py +277 -0
- abstractflow/visual/session_runner.py +182 -0
- abstractflow/visual/workspace_scoped_tools.py +29 -0
- abstractflow/workflow_bundle.py +290 -0
- abstractflow-0.3.1.dist-info/METADATA +186 -0
- abstractflow-0.3.1.dist-info/RECORD +33 -0
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/WHEEL +1 -1
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/licenses/LICENSE +2 -0
- abstractflow-0.1.0.dist-info/METADATA +0 -238
- abstractflow-0.1.0.dist-info/RECORD +0 -10
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/entry_points.txt +0 -0
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,440 @@
|
|
|
1
|
+
"""VisualFlow interface contracts (portable host validation).
|
|
2
|
+
|
|
3
|
+
This module defines *declarative* workflow interface markers and best-effort
|
|
4
|
+
validators so hosts (e.g. AbstractCode) can safely treat a workflow as a
|
|
5
|
+
specialized capability with a known IO contract.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple
|
|
12
|
+
|
|
13
|
+
from .models import VisualFlow
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
ABSTRACTCODE_AGENT_V1 = "abstractcode.agent.v1"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass(frozen=True)
|
|
20
|
+
class VisualFlowInterfaceSpec:
|
|
21
|
+
interface_id: str
|
|
22
|
+
label: str
|
|
23
|
+
description: str
|
|
24
|
+
required_start_outputs: Mapping[str, str]
|
|
25
|
+
required_end_inputs: Mapping[str, str]
|
|
26
|
+
recommended_start_outputs: Mapping[str, str] = None # type: ignore[assignment]
|
|
27
|
+
recommended_end_inputs: Mapping[str, str] = None # type: ignore[assignment]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _pin_types(pins: Any) -> Dict[str, str]:
|
|
31
|
+
"""Return {pin_id -> type_str} for a pin list.
|
|
32
|
+
|
|
33
|
+
VisualFlow stores pins inside the node's `data.inputs/outputs` lists.
|
|
34
|
+
"""
|
|
35
|
+
out: Dict[str, str] = {}
|
|
36
|
+
if not isinstance(pins, list):
|
|
37
|
+
return out
|
|
38
|
+
for p in pins:
|
|
39
|
+
if not isinstance(p, dict):
|
|
40
|
+
continue
|
|
41
|
+
pid = p.get("id")
|
|
42
|
+
if not isinstance(pid, str) or not pid:
|
|
43
|
+
continue
|
|
44
|
+
ptype = p.get("type")
|
|
45
|
+
t = ptype.value if hasattr(ptype, "value") else str(ptype or "")
|
|
46
|
+
out[pid] = t
|
|
47
|
+
return out
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _node_type_str(node: Any) -> str:
|
|
51
|
+
t = getattr(node, "type", None)
|
|
52
|
+
return t.value if hasattr(t, "value") else str(t or "")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _iter_nodes(flow: VisualFlow) -> Iterable[Any]:
|
|
56
|
+
for n in getattr(flow, "nodes", []) or []:
|
|
57
|
+
yield n
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def get_interface_specs() -> Dict[str, VisualFlowInterfaceSpec]:
|
|
61
|
+
"""Return known interface specs (by id)."""
|
|
62
|
+
return {
|
|
63
|
+
ABSTRACTCODE_AGENT_V1: VisualFlowInterfaceSpec(
|
|
64
|
+
interface_id=ABSTRACTCODE_AGENT_V1,
|
|
65
|
+
label="RunnableFlow (v1)",
|
|
66
|
+
description=(
|
|
67
|
+
"Host-configurable prompt → response contract for running a workflow in chat-like clients (AbstractCode, AbstractObserver, etc)."
|
|
68
|
+
),
|
|
69
|
+
required_start_outputs={
|
|
70
|
+
"provider": "provider",
|
|
71
|
+
"model": "model",
|
|
72
|
+
"prompt": "string",
|
|
73
|
+
},
|
|
74
|
+
required_end_inputs={
|
|
75
|
+
"response": "string",
|
|
76
|
+
"success": "boolean",
|
|
77
|
+
"meta": "object",
|
|
78
|
+
},
|
|
79
|
+
recommended_start_outputs={
|
|
80
|
+
"use_context": "boolean",
|
|
81
|
+
"memory": "memory",
|
|
82
|
+
"context": "object",
|
|
83
|
+
"system": "string",
|
|
84
|
+
"tools": "tools",
|
|
85
|
+
"max_iterations": "number",
|
|
86
|
+
"max_in_tokens": "number",
|
|
87
|
+
"temperature": "number",
|
|
88
|
+
"seed": "number",
|
|
89
|
+
"resp_schema": "object",
|
|
90
|
+
},
|
|
91
|
+
recommended_end_inputs={
|
|
92
|
+
# Optional but commonly wired for host UX:
|
|
93
|
+
"scratchpad": "object",
|
|
94
|
+
},
|
|
95
|
+
),
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def validate_visual_flow_interface(flow: VisualFlow, interface_id: str) -> List[str]:
|
|
100
|
+
"""Validate that a VisualFlow implements a known interface contract.
|
|
101
|
+
|
|
102
|
+
Returns a list of human-friendly error strings (empty when valid).
|
|
103
|
+
"""
|
|
104
|
+
errors: List[str] = []
|
|
105
|
+
iid = str(interface_id or "").strip()
|
|
106
|
+
if not iid:
|
|
107
|
+
return ["interface_id is required"]
|
|
108
|
+
|
|
109
|
+
spec = get_interface_specs().get(iid)
|
|
110
|
+
if spec is None:
|
|
111
|
+
return [f"Unknown interface_id: {iid}"]
|
|
112
|
+
|
|
113
|
+
declared = getattr(flow, "interfaces", None)
|
|
114
|
+
declared_list = list(declared) if isinstance(declared, list) else []
|
|
115
|
+
if iid not in declared_list:
|
|
116
|
+
errors.append(f"Flow must declare interfaces: ['{iid}']")
|
|
117
|
+
|
|
118
|
+
starts = [n for n in _iter_nodes(flow) if _node_type_str(n) == "on_flow_start"]
|
|
119
|
+
if not starts:
|
|
120
|
+
errors.append("Flow must include an On Flow Start node (type=on_flow_start).")
|
|
121
|
+
return errors
|
|
122
|
+
if len(starts) > 1:
|
|
123
|
+
errors.append("Flow must include exactly one On Flow Start node (found multiple).")
|
|
124
|
+
return errors
|
|
125
|
+
|
|
126
|
+
ends = [n for n in _iter_nodes(flow) if _node_type_str(n) == "on_flow_end"]
|
|
127
|
+
if not ends:
|
|
128
|
+
errors.append("Flow must include at least one On Flow End node (type=on_flow_end).")
|
|
129
|
+
return errors
|
|
130
|
+
|
|
131
|
+
start = starts[0]
|
|
132
|
+
start_data = getattr(start, "data", None)
|
|
133
|
+
start_out = _pin_types(start_data.get("outputs") if isinstance(start_data, dict) else None)
|
|
134
|
+
|
|
135
|
+
for pin_id, expected_type in dict(spec.required_start_outputs).items():
|
|
136
|
+
if pin_id not in start_out:
|
|
137
|
+
errors.append(f"On Flow Start must expose an output pin '{pin_id}' ({expected_type}).")
|
|
138
|
+
continue
|
|
139
|
+
actual = start_out.get(pin_id) or ""
|
|
140
|
+
if expected_type and actual and actual != expected_type:
|
|
141
|
+
errors.append(
|
|
142
|
+
f"On Flow Start pin '{pin_id}' must be type '{expected_type}' (got '{actual}')."
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
# Validate all end nodes: whichever executes must satisfy the contract.
|
|
146
|
+
for end in ends:
|
|
147
|
+
end_data = getattr(end, "data", None)
|
|
148
|
+
end_in = _pin_types(end_data.get("inputs") if isinstance(end_data, dict) else None)
|
|
149
|
+
for pin_id, expected_type in dict(spec.required_end_inputs).items():
|
|
150
|
+
if pin_id not in end_in:
|
|
151
|
+
errors.append(
|
|
152
|
+
f"On Flow End node '{getattr(end, 'id', '')}' must expose an input pin '{pin_id}' ({expected_type})."
|
|
153
|
+
)
|
|
154
|
+
continue
|
|
155
|
+
actual = end_in.get(pin_id) or ""
|
|
156
|
+
if expected_type and actual and actual != expected_type:
|
|
157
|
+
errors.append(
|
|
158
|
+
f"On Flow End node '{getattr(end, 'id', '')}' pin '{pin_id}' must be type '{expected_type}' (got '{actual}')."
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
return errors
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def apply_visual_flow_interface_scaffold(
|
|
165
|
+
flow: VisualFlow,
|
|
166
|
+
interface_id: str,
|
|
167
|
+
*,
|
|
168
|
+
include_recommended: bool = True,
|
|
169
|
+
) -> bool:
|
|
170
|
+
"""Best-effort: apply a known interface's pin scaffolding to a VisualFlow.
|
|
171
|
+
|
|
172
|
+
This is intended for authoring UX:
|
|
173
|
+
- When a workflow is marked as implementing an interface, we ensure the
|
|
174
|
+
required pins exist on the expected nodes (On Flow Start / On Flow End).
|
|
175
|
+
- If those nodes are missing, we create them (unconnected) so the author
|
|
176
|
+
has a correct starting point.
|
|
177
|
+
|
|
178
|
+
Returns True if the flow was mutated.
|
|
179
|
+
"""
|
|
180
|
+
iid = str(interface_id or "").strip()
|
|
181
|
+
spec = get_interface_specs().get(iid)
|
|
182
|
+
if spec is None:
|
|
183
|
+
return False
|
|
184
|
+
|
|
185
|
+
def _pin_dict(pin_id: str, type_str: str, *, label: Optional[str] = None) -> Dict[str, Any]:
|
|
186
|
+
return {"id": pin_id, "label": label or pin_id, "type": type_str}
|
|
187
|
+
|
|
188
|
+
def _ensure_pin(
|
|
189
|
+
pins: list[Any],
|
|
190
|
+
*,
|
|
191
|
+
pin_id: str,
|
|
192
|
+
type_str: str,
|
|
193
|
+
label: Optional[str] = None,
|
|
194
|
+
) -> bool:
|
|
195
|
+
for p in pins:
|
|
196
|
+
if isinstance(p, dict) and p.get("id") == pin_id:
|
|
197
|
+
# Ensure type matches the interface contract.
|
|
198
|
+
if p.get("type") != type_str:
|
|
199
|
+
p["type"] = type_str
|
|
200
|
+
return True
|
|
201
|
+
return False
|
|
202
|
+
pins.append(_pin_dict(pin_id, type_str, label=label))
|
|
203
|
+
return True
|
|
204
|
+
|
|
205
|
+
def _ensure_exec_pin(pins: list[Any], *, pin_id: str, direction: str) -> bool:
|
|
206
|
+
# We keep exec pins present because most authoring UX expects them, even though the
|
|
207
|
+
# interface contract itself only speaks about data pins.
|
|
208
|
+
if not isinstance(direction, str) or direction not in {"in", "out"}:
|
|
209
|
+
direction = "out"
|
|
210
|
+
changed = False
|
|
211
|
+
for p in pins:
|
|
212
|
+
if isinstance(p, dict) and p.get("id") == pin_id:
|
|
213
|
+
if p.get("type") != "execution":
|
|
214
|
+
p["type"] = "execution"
|
|
215
|
+
changed = True
|
|
216
|
+
# exec pins typically have empty label; keep existing label if present.
|
|
217
|
+
return changed
|
|
218
|
+
# Prepend exec pins for readability.
|
|
219
|
+
pins.insert(0, {"id": pin_id, "label": "", "type": "execution"})
|
|
220
|
+
return True
|
|
221
|
+
|
|
222
|
+
def _reorder_pins(pins: list[Any], *, desired_ids: list[str]) -> bool:
|
|
223
|
+
"""Reorder pins in-place so interface pins appear in a stable, readable order."""
|
|
224
|
+
if not isinstance(pins, list) or not desired_ids:
|
|
225
|
+
return False
|
|
226
|
+
ordered: list[Any] = []
|
|
227
|
+
seen: set[str] = set()
|
|
228
|
+
|
|
229
|
+
def _first_pin(pid: str) -> Any | None:
|
|
230
|
+
for p in pins:
|
|
231
|
+
if isinstance(p, dict) and p.get("id") == pid:
|
|
232
|
+
return p
|
|
233
|
+
return None
|
|
234
|
+
|
|
235
|
+
for pid in desired_ids:
|
|
236
|
+
if pid in seen:
|
|
237
|
+
continue
|
|
238
|
+
p = _first_pin(pid)
|
|
239
|
+
if p is None:
|
|
240
|
+
continue
|
|
241
|
+
ordered.append(p)
|
|
242
|
+
seen.add(pid)
|
|
243
|
+
|
|
244
|
+
for p in pins:
|
|
245
|
+
pid = p.get("id") if isinstance(p, dict) else None
|
|
246
|
+
if isinstance(pid, str) and pid in seen:
|
|
247
|
+
continue
|
|
248
|
+
ordered.append(p)
|
|
249
|
+
|
|
250
|
+
if ordered == pins:
|
|
251
|
+
return False
|
|
252
|
+
pins[:] = ordered
|
|
253
|
+
return True
|
|
254
|
+
|
|
255
|
+
# Desired pins (required + optional recommended).
|
|
256
|
+
start_pins = dict(spec.required_start_outputs)
|
|
257
|
+
end_pins = dict(spec.required_end_inputs)
|
|
258
|
+
if include_recommended:
|
|
259
|
+
if isinstance(spec.recommended_start_outputs, Mapping):
|
|
260
|
+
for k, v in dict(spec.recommended_start_outputs).items():
|
|
261
|
+
start_pins.setdefault(str(k), str(v))
|
|
262
|
+
if isinstance(spec.recommended_end_inputs, Mapping):
|
|
263
|
+
for k, v in dict(spec.recommended_end_inputs).items():
|
|
264
|
+
end_pins.setdefault(str(k), str(v))
|
|
265
|
+
|
|
266
|
+
# Locate nodes.
|
|
267
|
+
nodes = list(getattr(flow, "nodes", []) or [])
|
|
268
|
+
used_ids = {str(getattr(n, "id", "") or "") for n in nodes}
|
|
269
|
+
|
|
270
|
+
def _unique_node_id(base: str) -> str:
|
|
271
|
+
b = str(base or "").strip() or "node"
|
|
272
|
+
if b not in used_ids:
|
|
273
|
+
used_ids.add(b)
|
|
274
|
+
return b
|
|
275
|
+
i = 2
|
|
276
|
+
while True:
|
|
277
|
+
cand = f"{b}-{i}"
|
|
278
|
+
if cand not in used_ids:
|
|
279
|
+
used_ids.add(cand)
|
|
280
|
+
return cand
|
|
281
|
+
i += 1
|
|
282
|
+
|
|
283
|
+
def _ensure_nodes() -> Tuple[Any, List[Any], bool]:
|
|
284
|
+
changed_local = False
|
|
285
|
+
starts = [n for n in nodes if _node_type_str(n) == "on_flow_start"]
|
|
286
|
+
ends = [n for n in nodes if _node_type_str(n) == "on_flow_end"]
|
|
287
|
+
|
|
288
|
+
if not starts:
|
|
289
|
+
try:
|
|
290
|
+
from .models import NodeType, Position, VisualNode
|
|
291
|
+
except Exception:
|
|
292
|
+
# Should not happen in normal installs; bail out gracefully.
|
|
293
|
+
return (None, ends, False)
|
|
294
|
+
start_id = _unique_node_id("start")
|
|
295
|
+
start = VisualNode(
|
|
296
|
+
id=start_id,
|
|
297
|
+
type=NodeType.ON_FLOW_START,
|
|
298
|
+
position=Position(x=-420.0, y=120.0),
|
|
299
|
+
data={
|
|
300
|
+
"nodeType": "on_flow_start",
|
|
301
|
+
"label": "On Flow Start",
|
|
302
|
+
"icon": "🏁",
|
|
303
|
+
"headerColor": "#C0392B",
|
|
304
|
+
"inputs": [],
|
|
305
|
+
"outputs": [{"id": "exec-out", "label": "", "type": "execution"}],
|
|
306
|
+
},
|
|
307
|
+
)
|
|
308
|
+
nodes.insert(0, start)
|
|
309
|
+
changed_local = True
|
|
310
|
+
starts = [start]
|
|
311
|
+
|
|
312
|
+
if not ends:
|
|
313
|
+
try:
|
|
314
|
+
from .models import NodeType, Position, VisualNode
|
|
315
|
+
except Exception:
|
|
316
|
+
return (starts[0], [], changed_local)
|
|
317
|
+
end_id = _unique_node_id("end")
|
|
318
|
+
end = VisualNode(
|
|
319
|
+
id=end_id,
|
|
320
|
+
type=NodeType.ON_FLOW_END,
|
|
321
|
+
position=Position(x=260.0, y=120.0),
|
|
322
|
+
data={
|
|
323
|
+
"nodeType": "on_flow_end",
|
|
324
|
+
"label": "On Flow End",
|
|
325
|
+
"icon": "⏹",
|
|
326
|
+
"headerColor": "#C0392B",
|
|
327
|
+
"inputs": [{"id": "exec-in", "label": "", "type": "execution"}],
|
|
328
|
+
"outputs": [],
|
|
329
|
+
},
|
|
330
|
+
)
|
|
331
|
+
nodes.append(end)
|
|
332
|
+
changed_local = True
|
|
333
|
+
ends = [end]
|
|
334
|
+
|
|
335
|
+
return (starts[0], ends, changed_local)
|
|
336
|
+
|
|
337
|
+
start_node, end_nodes, changed = _ensure_nodes()
|
|
338
|
+
if start_node is None:
|
|
339
|
+
return False
|
|
340
|
+
|
|
341
|
+
# Ensure pins on start.
|
|
342
|
+
start_data = getattr(start_node, "data", None)
|
|
343
|
+
if not isinstance(start_data, dict):
|
|
344
|
+
start_data = {}
|
|
345
|
+
setattr(start_node, "data", start_data)
|
|
346
|
+
changed = True
|
|
347
|
+
outputs = start_data.get("outputs")
|
|
348
|
+
if not isinstance(outputs, list):
|
|
349
|
+
outputs = []
|
|
350
|
+
start_data["outputs"] = outputs
|
|
351
|
+
changed = True
|
|
352
|
+
changed = _ensure_exec_pin(outputs, pin_id="exec-out", direction="out") or changed
|
|
353
|
+
for pid, t in start_pins.items():
|
|
354
|
+
changed = _ensure_pin(outputs, pin_id=str(pid), type_str=str(t), label=str(pid)) or changed
|
|
355
|
+
|
|
356
|
+
desired_start_order = [
|
|
357
|
+
"exec-out",
|
|
358
|
+
"use_context",
|
|
359
|
+
"memory",
|
|
360
|
+
"context",
|
|
361
|
+
"provider",
|
|
362
|
+
"model",
|
|
363
|
+
"system",
|
|
364
|
+
"prompt",
|
|
365
|
+
"tools",
|
|
366
|
+
"max_iterations",
|
|
367
|
+
"max_in_tokens",
|
|
368
|
+
"temperature",
|
|
369
|
+
"seed",
|
|
370
|
+
"resp_schema",
|
|
371
|
+
]
|
|
372
|
+
changed = _reorder_pins(outputs, desired_ids=desired_start_order) or changed
|
|
373
|
+
|
|
374
|
+
# Ensure pins on all end nodes.
|
|
375
|
+
for end in end_nodes:
|
|
376
|
+
end_data = getattr(end, "data", None)
|
|
377
|
+
if not isinstance(end_data, dict):
|
|
378
|
+
end_data = {}
|
|
379
|
+
setattr(end, "data", end_data)
|
|
380
|
+
changed = True
|
|
381
|
+
inputs = end_data.get("inputs")
|
|
382
|
+
if not isinstance(inputs, list):
|
|
383
|
+
inputs = []
|
|
384
|
+
end_data["inputs"] = inputs
|
|
385
|
+
changed = True
|
|
386
|
+
|
|
387
|
+
# Backward-compat cleanup: remove deprecated interface pins (`result` / `raw_result`)
|
|
388
|
+
# when they are not part of the current desired contract.
|
|
389
|
+
deprecated_end_pins = {"result", "raw_result"}
|
|
390
|
+
if not any(pid in end_pins for pid in deprecated_end_pins):
|
|
391
|
+
removed: set[str] = set()
|
|
392
|
+
kept: list[Any] = []
|
|
393
|
+
for p in inputs:
|
|
394
|
+
pid = p.get("id") if isinstance(p, dict) else None
|
|
395
|
+
if isinstance(pid, str) and pid in deprecated_end_pins:
|
|
396
|
+
removed.add(pid)
|
|
397
|
+
changed = True
|
|
398
|
+
continue
|
|
399
|
+
kept.append(p)
|
|
400
|
+
if removed:
|
|
401
|
+
inputs[:] = kept
|
|
402
|
+
# Remove edges that targeted the deprecated pins (best-effort).
|
|
403
|
+
try:
|
|
404
|
+
flow_edges = getattr(flow, "edges", None)
|
|
405
|
+
if isinstance(flow_edges, list):
|
|
406
|
+
flow.edges = [
|
|
407
|
+
e
|
|
408
|
+
for e in flow_edges
|
|
409
|
+
if not (
|
|
410
|
+
getattr(e, "target", None) == getattr(end, "id", None)
|
|
411
|
+
and getattr(e, "targetHandle", None) in removed
|
|
412
|
+
)
|
|
413
|
+
]
|
|
414
|
+
except Exception:
|
|
415
|
+
pass
|
|
416
|
+
|
|
417
|
+
changed = _ensure_exec_pin(inputs, pin_id="exec-in", direction="in") or changed
|
|
418
|
+
for pid, t in end_pins.items():
|
|
419
|
+
changed = _ensure_pin(inputs, pin_id=str(pid), type_str=str(t), label=str(pid)) or changed
|
|
420
|
+
|
|
421
|
+
# Keep interface pins in a predictable order for UX.
|
|
422
|
+
desired_end_order = ["exec-in", "response", "success", "meta", "scratchpad"]
|
|
423
|
+
changed = _reorder_pins(inputs, desired_ids=desired_end_order) or changed
|
|
424
|
+
|
|
425
|
+
# Write back nodes list if it was reconstructed.
|
|
426
|
+
try:
|
|
427
|
+
flow.nodes = nodes # type: ignore[assignment]
|
|
428
|
+
except Exception:
|
|
429
|
+
pass
|
|
430
|
+
|
|
431
|
+
# Ensure entryNode points at the start when missing/empty.
|
|
432
|
+
try:
|
|
433
|
+
entry = getattr(flow, "entryNode", None)
|
|
434
|
+
if not isinstance(entry, str) or not entry.strip():
|
|
435
|
+
flow.entryNode = str(getattr(start_node, "id", "") or "") or None
|
|
436
|
+
changed = True
|
|
437
|
+
except Exception:
|
|
438
|
+
pass
|
|
439
|
+
|
|
440
|
+
return bool(changed)
|