abstractflow 0.1.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractflow/__init__.py +74 -94
- abstractflow/__main__.py +2 -0
- abstractflow/adapters/__init__.py +11 -0
- abstractflow/adapters/agent_adapter.py +5 -0
- abstractflow/adapters/control_adapter.py +5 -0
- abstractflow/adapters/effect_adapter.py +5 -0
- abstractflow/adapters/event_adapter.py +5 -0
- abstractflow/adapters/function_adapter.py +5 -0
- abstractflow/adapters/subflow_adapter.py +5 -0
- abstractflow/adapters/variable_adapter.py +5 -0
- abstractflow/cli.py +75 -28
- abstractflow/compiler.py +23 -0
- abstractflow/core/__init__.py +5 -0
- abstractflow/core/flow.py +11 -0
- abstractflow/py.typed +2 -0
- abstractflow/runner.py +402 -0
- abstractflow/visual/__init__.py +43 -0
- abstractflow/visual/agent_ids.py +5 -0
- abstractflow/visual/builtins.py +5 -0
- abstractflow/visual/code_executor.py +5 -0
- abstractflow/visual/event_ids.py +33 -0
- abstractflow/visual/executor.py +968 -0
- abstractflow/visual/interfaces.py +440 -0
- abstractflow/visual/models.py +277 -0
- abstractflow/visual/session_runner.py +182 -0
- abstractflow/visual/workspace_scoped_tools.py +29 -0
- abstractflow/workflow_bundle.py +290 -0
- abstractflow-0.3.1.dist-info/METADATA +186 -0
- abstractflow-0.3.1.dist-info/RECORD +33 -0
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/WHEEL +1 -1
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/licenses/LICENSE +2 -0
- abstractflow-0.1.0.dist-info/METADATA +0 -238
- abstractflow-0.1.0.dist-info/RECORD +0 -10
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/entry_points.txt +0 -0
- {abstractflow-0.1.0.dist-info → abstractflow-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
"""Pydantic models for the AbstractFlow visual workflow JSON format.
|
|
2
|
+
|
|
3
|
+
These models are intentionally kept in the `abstractflow` package so workflows
|
|
4
|
+
authored in the visual editor can be loaded and executed from any host (CLI,
|
|
5
|
+
AbstractCode, servers), not only the web backend.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from enum import Enum
|
|
11
|
+
from datetime import datetime, timezone
|
|
12
|
+
import uuid
|
|
13
|
+
from typing import Any, Dict, List, Optional
|
|
14
|
+
|
|
15
|
+
from pydantic import BaseModel, Field
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class PinType(str, Enum):
|
|
19
|
+
"""Types of pins with their colors."""
|
|
20
|
+
|
|
21
|
+
EXECUTION = "execution" # White #FFFFFF - Flow control
|
|
22
|
+
STRING = "string" # Magenta #FF00FF - Text data
|
|
23
|
+
NUMBER = "number" # Green #00FF00 - Integer/Float
|
|
24
|
+
BOOLEAN = "boolean" # Red #FF0000 - True/False
|
|
25
|
+
OBJECT = "object" # Cyan #00FFFF - JSON objects
|
|
26
|
+
MEMORY = "memory" # Mint - Memory configuration object (KG/span/session controls)
|
|
27
|
+
ASSERTION = "assertion" # Teal - KG assertion object (subject/predicate/object + metadata)
|
|
28
|
+
ASSERTIONS = "assertions" # Teal - List of KG assertion objects (assertion[])
|
|
29
|
+
ARRAY = "array" # Orange #FF8800 - Collections
|
|
30
|
+
TOOLS = "tools" # Orange - Tool allowlist (string[])
|
|
31
|
+
PROVIDER = "provider" # Cyan-blue - LLM provider id/name (string-like)
|
|
32
|
+
MODEL = "model" # Purple - LLM model id/name (string-like)
|
|
33
|
+
AGENT = "agent" # Blue #4488FF - Agent reference
|
|
34
|
+
ANY = "any" # Gray #888888 - Accepts any type
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class NodeType(str, Enum):
|
|
38
|
+
"""Types of nodes in the visual editor."""
|
|
39
|
+
|
|
40
|
+
# Event/Trigger nodes (entry points)
|
|
41
|
+
ON_FLOW_START = "on_flow_start"
|
|
42
|
+
ON_USER_REQUEST = "on_user_request"
|
|
43
|
+
ON_AGENT_MESSAGE = "on_agent_message"
|
|
44
|
+
ON_SCHEDULE = "on_schedule"
|
|
45
|
+
ON_EVENT = "on_event"
|
|
46
|
+
# Flow IO nodes
|
|
47
|
+
ON_FLOW_END = "on_flow_end"
|
|
48
|
+
# Core execution nodes
|
|
49
|
+
AGENT = "agent"
|
|
50
|
+
FUNCTION = "function"
|
|
51
|
+
CODE = "code"
|
|
52
|
+
SUBFLOW = "subflow"
|
|
53
|
+
# Math
|
|
54
|
+
ADD = "add"
|
|
55
|
+
SUBTRACT = "subtract"
|
|
56
|
+
MULTIPLY = "multiply"
|
|
57
|
+
DIVIDE = "divide"
|
|
58
|
+
MODULO = "modulo"
|
|
59
|
+
POWER = "power"
|
|
60
|
+
ABS = "abs"
|
|
61
|
+
ROUND = "round"
|
|
62
|
+
RANDOM_INT = "random_int"
|
|
63
|
+
RANDOM_FLOAT = "random_float"
|
|
64
|
+
# String
|
|
65
|
+
CONCAT = "concat"
|
|
66
|
+
SPLIT = "split"
|
|
67
|
+
JOIN = "join"
|
|
68
|
+
FORMAT = "format"
|
|
69
|
+
STRING_TEMPLATE = "string_template"
|
|
70
|
+
UPPERCASE = "uppercase"
|
|
71
|
+
LOWERCASE = "lowercase"
|
|
72
|
+
TRIM = "trim"
|
|
73
|
+
SUBSTRING = "substring"
|
|
74
|
+
LENGTH = "length"
|
|
75
|
+
CONTAINS = "contains"
|
|
76
|
+
REPLACE = "replace"
|
|
77
|
+
# Control
|
|
78
|
+
IF = "if"
|
|
79
|
+
SWITCH = "switch"
|
|
80
|
+
LOOP = "loop"
|
|
81
|
+
WHILE = "while"
|
|
82
|
+
FOR = "for"
|
|
83
|
+
SEQUENCE = "sequence"
|
|
84
|
+
PARALLEL = "parallel"
|
|
85
|
+
COMPARE = "compare"
|
|
86
|
+
NOT = "not"
|
|
87
|
+
AND = "and"
|
|
88
|
+
OR = "or"
|
|
89
|
+
COALESCE = "coalesce"
|
|
90
|
+
# Data
|
|
91
|
+
GET = "get"
|
|
92
|
+
SET = "set"
|
|
93
|
+
MERGE = "merge"
|
|
94
|
+
MAKE_ARRAY = "make_array"
|
|
95
|
+
MAKE_OBJECT = "make_object"
|
|
96
|
+
MAKE_CONTEXT = "make_context"
|
|
97
|
+
MAKE_META = "make_meta"
|
|
98
|
+
MAKE_SCRATCHPAD = "make_scratchpad"
|
|
99
|
+
GET_ELEMENT = "get_element"
|
|
100
|
+
GET_RANDOM_ELEMENT = "get_random_element"
|
|
101
|
+
ARRAY_MAP = "array_map"
|
|
102
|
+
ARRAY_FILTER = "array_filter"
|
|
103
|
+
ARRAY_CONCAT = "array_concat"
|
|
104
|
+
ARRAY_LENGTH = "array_length"
|
|
105
|
+
ARRAY_APPEND = "array_append"
|
|
106
|
+
ARRAY_DEDUP = "array_dedup"
|
|
107
|
+
HAS_TOOLS = "has_tools"
|
|
108
|
+
ADD_MESSAGE = "add_message"
|
|
109
|
+
GET_CONTEXT = "get_context"
|
|
110
|
+
GET_VAR = "get_var"
|
|
111
|
+
SET_VAR = "set_var"
|
|
112
|
+
SET_VARS = "set_vars"
|
|
113
|
+
SET_VAR_PROPERTY = "set_var_property"
|
|
114
|
+
PARSE_JSON = "parse_json"
|
|
115
|
+
STRINGIFY_JSON = "stringify_json"
|
|
116
|
+
FORMAT_TOOL_RESULTS = "format_tool_results"
|
|
117
|
+
AGENT_TRACE_REPORT = "agent_trace_report"
|
|
118
|
+
BREAK_OBJECT = "break_object"
|
|
119
|
+
SYSTEM_DATETIME = "system_datetime"
|
|
120
|
+
MODEL_CATALOG = "model_catalog"
|
|
121
|
+
PROVIDER_CATALOG = "provider_catalog"
|
|
122
|
+
PROVIDER_MODELS = "provider_models"
|
|
123
|
+
# Literals
|
|
124
|
+
LITERAL_STRING = "literal_string"
|
|
125
|
+
LITERAL_NUMBER = "literal_number"
|
|
126
|
+
LITERAL_BOOLEAN = "literal_boolean"
|
|
127
|
+
LITERAL_JSON = "literal_json"
|
|
128
|
+
JSON_SCHEMA = "json_schema"
|
|
129
|
+
LITERAL_ARRAY = "literal_array"
|
|
130
|
+
TOOL_PARAMETERS = "tool_parameters"
|
|
131
|
+
# Effects
|
|
132
|
+
ASK_USER = "ask_user"
|
|
133
|
+
ANSWER_USER = "answer_user"
|
|
134
|
+
LLM_CALL = "llm_call"
|
|
135
|
+
WAIT_UNTIL = "wait_until"
|
|
136
|
+
WAIT_EVENT = "wait_event"
|
|
137
|
+
EMIT_EVENT = "emit_event"
|
|
138
|
+
READ_FILE = "read_file"
|
|
139
|
+
WRITE_FILE = "write_file"
|
|
140
|
+
MEMORY_NOTE = "memory_note"
|
|
141
|
+
MEMORY_QUERY = "memory_query"
|
|
142
|
+
MEMORY_TAG = "memory_tag"
|
|
143
|
+
MEMORY_COMPACT = "memory_compact"
|
|
144
|
+
MEMORY_REHYDRATE = "memory_rehydrate"
|
|
145
|
+
MEMORY_KG_ASSERT = "memory_kg_assert"
|
|
146
|
+
MEMORY_KG_QUERY = "memory_kg_query"
|
|
147
|
+
MEMORY_KG_RESOLVE = "memory_kg_resolve"
|
|
148
|
+
MEMACT_COMPOSE = "memact_compose"
|
|
149
|
+
TOOL_CALLS = "tool_calls"
|
|
150
|
+
CALL_TOOL = "call_tool"
|
|
151
|
+
TOOLS_ALLOWLIST = "tools_allowlist"
|
|
152
|
+
BOOL_VAR = "bool_var"
|
|
153
|
+
VAR_DECL = "var_decl"
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class Pin(BaseModel):
|
|
157
|
+
"""A connection point on a node."""
|
|
158
|
+
|
|
159
|
+
id: str
|
|
160
|
+
label: str
|
|
161
|
+
type: PinType
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
class Position(BaseModel):
|
|
165
|
+
"""2D position on canvas."""
|
|
166
|
+
|
|
167
|
+
x: float
|
|
168
|
+
y: float
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class VisualNode(BaseModel):
|
|
172
|
+
"""A node in the visual flow editor."""
|
|
173
|
+
|
|
174
|
+
id: str = Field(default_factory=lambda: str(uuid.uuid4())[:8])
|
|
175
|
+
type: NodeType
|
|
176
|
+
position: Position
|
|
177
|
+
data: Dict[str, Any] = Field(default_factory=dict)
|
|
178
|
+
# Node display properties (from template)
|
|
179
|
+
label: Optional[str] = None
|
|
180
|
+
icon: Optional[str] = None
|
|
181
|
+
headerColor: Optional[str] = None
|
|
182
|
+
inputs: List[Pin] = Field(default_factory=list)
|
|
183
|
+
outputs: List[Pin] = Field(default_factory=list)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
class VisualEdge(BaseModel):
|
|
187
|
+
"""An edge connecting two nodes."""
|
|
188
|
+
|
|
189
|
+
id: str = Field(default_factory=lambda: str(uuid.uuid4())[:8])
|
|
190
|
+
source: str
|
|
191
|
+
sourceHandle: str # Pin ID on source node
|
|
192
|
+
target: str
|
|
193
|
+
targetHandle: str # Pin ID on target node
|
|
194
|
+
animated: bool = False
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class VisualFlow(BaseModel):
|
|
198
|
+
"""A complete visual flow definition."""
|
|
199
|
+
|
|
200
|
+
id: str = Field(default_factory=lambda: str(uuid.uuid4())[:8])
|
|
201
|
+
name: str
|
|
202
|
+
description: str = ""
|
|
203
|
+
# Optional interface markers (host contracts).
|
|
204
|
+
# Example: ["abstractcode.agent.v1"] to indicate this workflow can be run as a RunnableFlow in chat-like clients.
|
|
205
|
+
interfaces: List[str] = Field(default_factory=list)
|
|
206
|
+
nodes: List[VisualNode] = Field(default_factory=list)
|
|
207
|
+
edges: List[VisualEdge] = Field(default_factory=list)
|
|
208
|
+
entryNode: Optional[str] = None
|
|
209
|
+
created_at: Optional[str] = None
|
|
210
|
+
updated_at: Optional[str] = None
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
class FlowCreateRequest(BaseModel):
|
|
214
|
+
"""Request to create a new flow."""
|
|
215
|
+
|
|
216
|
+
name: str
|
|
217
|
+
description: str = ""
|
|
218
|
+
interfaces: List[str] = Field(default_factory=list)
|
|
219
|
+
nodes: List[VisualNode] = Field(default_factory=list)
|
|
220
|
+
edges: List[VisualEdge] = Field(default_factory=list)
|
|
221
|
+
entryNode: Optional[str] = None
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
class FlowUpdateRequest(BaseModel):
|
|
225
|
+
"""Request to update an existing flow."""
|
|
226
|
+
|
|
227
|
+
name: Optional[str] = None
|
|
228
|
+
description: Optional[str] = None
|
|
229
|
+
interfaces: Optional[List[str]] = None
|
|
230
|
+
nodes: Optional[List[VisualNode]] = None
|
|
231
|
+
edges: Optional[List[VisualEdge]] = None
|
|
232
|
+
entryNode: Optional[str] = None
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class FlowRunRequest(BaseModel):
|
|
236
|
+
"""Request to execute a flow."""
|
|
237
|
+
|
|
238
|
+
input_data: Dict[str, Any] = Field(default_factory=dict)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
class FlowRunResult(BaseModel):
|
|
242
|
+
"""Result of a flow execution."""
|
|
243
|
+
|
|
244
|
+
success: bool
|
|
245
|
+
result: Optional[Any] = None
|
|
246
|
+
error: Optional[str] = None
|
|
247
|
+
run_id: Optional[str] = None
|
|
248
|
+
waiting: bool = False
|
|
249
|
+
wait_key: Optional[str] = None
|
|
250
|
+
prompt: Optional[str] = None
|
|
251
|
+
choices: Optional[List[str]] = None
|
|
252
|
+
allow_free_text: Optional[bool] = None
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
class ExecutionMetrics(BaseModel):
|
|
256
|
+
"""Optional per-step (or whole-run) execution metrics.
|
|
257
|
+
|
|
258
|
+
These fields are best-effort and may be omitted depending on host/runtime capabilities.
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
duration_ms: Optional[float] = None
|
|
262
|
+
input_tokens: Optional[int] = None
|
|
263
|
+
output_tokens: Optional[int] = None
|
|
264
|
+
tokens_per_s: Optional[float] = None
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
class ExecutionEvent(BaseModel):
|
|
268
|
+
"""Real-time execution event for WebSocket."""
|
|
269
|
+
|
|
270
|
+
type: str # "node_start", "node_complete", "flow_complete", "flow_error"
|
|
271
|
+
# ISO 8601 UTC timestamp for event emission (host-side observability).
|
|
272
|
+
ts: str = Field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
|
|
273
|
+
runId: Optional[str] = None
|
|
274
|
+
nodeId: Optional[str] = None
|
|
275
|
+
result: Optional[Any] = None
|
|
276
|
+
error: Optional[str] = None
|
|
277
|
+
meta: Optional[ExecutionMetrics] = None
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""Session-aware runner for VisualFlow executions.
|
|
2
|
+
|
|
3
|
+
This runner extends FlowRunner with:
|
|
4
|
+
- A durable session_id (defaults to root run_id)
|
|
5
|
+
- Auto-started custom event listener workflows ("On Event" nodes)
|
|
6
|
+
|
|
7
|
+
This keeps VisualFlow JSON portable: any host can execute a visual flow and its
|
|
8
|
+
event listeners using AbstractRuntime's durable semantics.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
from typing import Any, Dict, List, Optional, TYPE_CHECKING
|
|
14
|
+
|
|
15
|
+
from ..runner import FlowRunner
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from abstractruntime.core.runtime import Runtime
|
|
19
|
+
from abstractruntime.core.spec import WorkflowSpec
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class VisualSessionRunner(FlowRunner):
|
|
23
|
+
"""FlowRunner that starts event listener workflows within the same session."""
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
flow: Any,
|
|
28
|
+
*,
|
|
29
|
+
runtime: Optional["Runtime"] = None,
|
|
30
|
+
event_listener_specs: Optional[List["WorkflowSpec"]] = None,
|
|
31
|
+
) -> None:
|
|
32
|
+
super().__init__(flow, runtime=runtime)
|
|
33
|
+
self._event_listener_specs: List["WorkflowSpec"] = list(event_listener_specs or [])
|
|
34
|
+
self._event_listener_run_ids: List[str] = []
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def event_listener_run_ids(self) -> List[str]:
|
|
38
|
+
return list(self._event_listener_run_ids)
|
|
39
|
+
|
|
40
|
+
def start(
|
|
41
|
+
self,
|
|
42
|
+
input_data: Optional[Dict[str, Any]] = None,
|
|
43
|
+
*,
|
|
44
|
+
actor_id: Optional[str] = None,
|
|
45
|
+
session_id: Optional[str] = None,
|
|
46
|
+
) -> str:
|
|
47
|
+
run_id = super().start(input_data, actor_id=actor_id, session_id=session_id)
|
|
48
|
+
|
|
49
|
+
# Default session_id to the root run_id for session-scoped events.
|
|
50
|
+
# If a session_id is explicitly provided, preserve it.
|
|
51
|
+
effective_session_id = session_id
|
|
52
|
+
try:
|
|
53
|
+
state = self.runtime.get_state(run_id)
|
|
54
|
+
if not getattr(state, "session_id", None):
|
|
55
|
+
state.session_id = run_id # type: ignore[attr-defined]
|
|
56
|
+
self.runtime.run_store.save(state)
|
|
57
|
+
effective_session_id = str(getattr(state, "session_id", None) or run_id).strip() or run_id
|
|
58
|
+
except Exception:
|
|
59
|
+
# Best-effort; session-scoped keys will fall back to run_id if missing.
|
|
60
|
+
effective_session_id = str(session_id).strip() if isinstance(session_id, str) and session_id.strip() else run_id
|
|
61
|
+
|
|
62
|
+
if not self._event_listener_specs:
|
|
63
|
+
return run_id
|
|
64
|
+
|
|
65
|
+
# Start listeners as child runs in the same session.
|
|
66
|
+
for spec in self._event_listener_specs:
|
|
67
|
+
try:
|
|
68
|
+
child_run_id = self.runtime.start(
|
|
69
|
+
workflow=spec,
|
|
70
|
+
vars={},
|
|
71
|
+
session_id=effective_session_id,
|
|
72
|
+
actor_id=actor_id,
|
|
73
|
+
parent_run_id=run_id,
|
|
74
|
+
)
|
|
75
|
+
# Advance the listener to its first WAIT_EVENT (On Event node).
|
|
76
|
+
self.runtime.tick(workflow=spec, run_id=child_run_id, max_steps=10)
|
|
77
|
+
self._event_listener_run_ids.append(child_run_id)
|
|
78
|
+
except Exception:
|
|
79
|
+
# Listener start failures should surface during execution, but
|
|
80
|
+
# we don't want to hide root runs from starting.
|
|
81
|
+
continue
|
|
82
|
+
|
|
83
|
+
return run_id
|
|
84
|
+
|
|
85
|
+
def run(
|
|
86
|
+
self,
|
|
87
|
+
input_data: Optional[Dict[str, Any]] = None,
|
|
88
|
+
*,
|
|
89
|
+
actor_id: Optional[str] = None,
|
|
90
|
+
session_id: Optional[str] = None,
|
|
91
|
+
) -> Dict[str, Any]:
|
|
92
|
+
"""Execute the root run and drive session-level listener runs.
|
|
93
|
+
|
|
94
|
+
Rationale:
|
|
95
|
+
- `EMIT_EVENT` resumes listener runs but does not (by default) execute them.
|
|
96
|
+
- Hosts that only "run the main workflow" should still see event handler branches run.
|
|
97
|
+
"""
|
|
98
|
+
from abstractruntime.core.models import RunStatus, WaitReason
|
|
99
|
+
|
|
100
|
+
run_id = self.start(input_data, actor_id=actor_id, session_id=session_id)
|
|
101
|
+
runtime = self.runtime
|
|
102
|
+
|
|
103
|
+
def _list_session_runs() -> List[str]:
|
|
104
|
+
out: List[str] = [run_id]
|
|
105
|
+
try:
|
|
106
|
+
rs = getattr(runtime, "run_store", None)
|
|
107
|
+
if rs is not None and hasattr(rs, "list_children"):
|
|
108
|
+
children = rs.list_children(parent_run_id=run_id) # type: ignore[attr-defined]
|
|
109
|
+
for c in children:
|
|
110
|
+
cid = getattr(c, "run_id", None)
|
|
111
|
+
if isinstance(cid, str) and cid and cid not in out:
|
|
112
|
+
out.append(cid)
|
|
113
|
+
except Exception:
|
|
114
|
+
pass
|
|
115
|
+
for cid in self._event_listener_run_ids:
|
|
116
|
+
if cid not in out:
|
|
117
|
+
out.append(cid)
|
|
118
|
+
return out
|
|
119
|
+
|
|
120
|
+
def _tick_child(child_run_id: str, *, max_steps: int = 100) -> None:
|
|
121
|
+
st = runtime.get_state(child_run_id)
|
|
122
|
+
if st.status != RunStatus.RUNNING:
|
|
123
|
+
# WAIT_UNTIL can auto-unblock, so still call tick() to allow progress.
|
|
124
|
+
if st.status == RunStatus.WAITING and st.waiting and st.waiting.reason == WaitReason.UNTIL:
|
|
125
|
+
pass
|
|
126
|
+
else:
|
|
127
|
+
return
|
|
128
|
+
reg = getattr(runtime, "workflow_registry", None)
|
|
129
|
+
if reg is None:
|
|
130
|
+
return
|
|
131
|
+
wf = reg.get(st.workflow_id)
|
|
132
|
+
if wf is None:
|
|
133
|
+
return
|
|
134
|
+
runtime.tick(workflow=wf, run_id=child_run_id, max_steps=max_steps)
|
|
135
|
+
|
|
136
|
+
while True:
|
|
137
|
+
state = runtime.tick(workflow=self.workflow, run_id=run_id, max_steps=100)
|
|
138
|
+
|
|
139
|
+
# Drive children that became RUNNING due to EMIT_EVENT (or subflows).
|
|
140
|
+
for cid in _list_session_runs():
|
|
141
|
+
if cid == run_id:
|
|
142
|
+
continue
|
|
143
|
+
_tick_child(cid, max_steps=100)
|
|
144
|
+
|
|
145
|
+
# Root completion/termination conditions.
|
|
146
|
+
if state.status == RunStatus.COMPLETED:
|
|
147
|
+
# If all children are idle listeners (WAITING EVENT) or terminal, close the session.
|
|
148
|
+
all_idle_or_done = True
|
|
149
|
+
for cid in _list_session_runs():
|
|
150
|
+
if cid == run_id:
|
|
151
|
+
continue
|
|
152
|
+
st = runtime.get_state(cid)
|
|
153
|
+
if st.status in (RunStatus.COMPLETED, RunStatus.FAILED, RunStatus.CANCELLED):
|
|
154
|
+
continue
|
|
155
|
+
if st.status == RunStatus.WAITING and st.waiting and st.waiting.reason == WaitReason.EVENT:
|
|
156
|
+
continue
|
|
157
|
+
all_idle_or_done = False
|
|
158
|
+
if all_idle_or_done:
|
|
159
|
+
# Cancel idle listeners to keep the session tidy.
|
|
160
|
+
for cid in _list_session_runs():
|
|
161
|
+
if cid == run_id:
|
|
162
|
+
continue
|
|
163
|
+
st = runtime.get_state(cid)
|
|
164
|
+
if st.status == RunStatus.WAITING and st.waiting and st.waiting.reason == WaitReason.EVENT:
|
|
165
|
+
try:
|
|
166
|
+
runtime.cancel_run(cid, reason="Session completed")
|
|
167
|
+
except Exception:
|
|
168
|
+
pass
|
|
169
|
+
return self._normalize_completed_output(state.output)
|
|
170
|
+
# Otherwise, keep driving until children settle into waits/terminal.
|
|
171
|
+
continue
|
|
172
|
+
|
|
173
|
+
if state.status == RunStatus.FAILED:
|
|
174
|
+
raise RuntimeError(f"Flow failed: {state.error}")
|
|
175
|
+
|
|
176
|
+
if state.status == RunStatus.WAITING:
|
|
177
|
+
# Preserve FlowRunner shape.
|
|
178
|
+
return {
|
|
179
|
+
"waiting": True,
|
|
180
|
+
"state": state,
|
|
181
|
+
"wait_key": state.waiting.wait_key if state.waiting else None,
|
|
182
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Workspace-scoped tool execution helpers (AbstractFlow re-export).
|
|
2
|
+
|
|
3
|
+
The core implementation lives in AbstractRuntime's AbstractCore integration so it can
|
|
4
|
+
be shared across hosts/clients. AbstractFlow re-exports the types to preserve existing
|
|
5
|
+
imports (`abstractflow.visual.workspace_scoped_tools`).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from abstractruntime.integrations.abstractcore.workspace_scoped_tools import ( # noqa: F401
|
|
13
|
+
WorkspaceScope,
|
|
14
|
+
WorkspaceScopedToolExecutor,
|
|
15
|
+
resolve_workspace_base_dir,
|
|
16
|
+
resolve_user_path,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def build_scoped_tool_executor(*, scope: WorkspaceScope) -> Any:
|
|
21
|
+
"""Create a local tool executor wrapped with workspace scoping."""
|
|
22
|
+
from abstractruntime.integrations.abstractcore.default_tools import get_default_tools
|
|
23
|
+
from abstractruntime.integrations.abstractcore.tool_executor import MappingToolExecutor
|
|
24
|
+
|
|
25
|
+
delegate = MappingToolExecutor.from_tools(get_default_tools())
|
|
26
|
+
return WorkspaceScopedToolExecutor(scope=scope, delegate=delegate)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
|