abstractgateway 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,213 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import logging
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+ from typing import Any, Dict, Optional
8
+
9
+ from abstractruntime import Runtime
10
+
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def _parse_visual_listener_id(workflow_id: str) -> Optional[tuple[str, str]]:
16
+ """Return (flow_id, node_id) for a visual_event_listener_* workflow_id."""
17
+ prefix = "visual_event_listener_"
18
+ if not isinstance(workflow_id, str) or not workflow_id.startswith(prefix):
19
+ return None
20
+ rest = workflow_id[len(prefix) :]
21
+ parts = rest.split("_", 1)
22
+ if len(parts) != 2:
23
+ return None
24
+ flow_id, node_id = parts[0], parts[1]
25
+ if not flow_id or not node_id:
26
+ return None
27
+ return (flow_id, node_id)
28
+
29
+
30
+ def _parse_visual_agent_id(workflow_id: str) -> Optional[tuple[str, str]]:
31
+ """Return (flow_id, node_id) for a visual_react_agent_* workflow_id."""
32
+ prefix = "visual_react_agent_"
33
+ if not isinstance(workflow_id, str) or not workflow_id.startswith(prefix):
34
+ return None
35
+ rest = workflow_id[len(prefix) :]
36
+ parts = rest.split("_", 1)
37
+ if len(parts) != 2:
38
+ return None
39
+ flow_id, node_id = parts[0], parts[1]
40
+ if not flow_id or not node_id:
41
+ return None
42
+ return (flow_id, node_id)
43
+
44
+
45
+ def _require_visualflow_deps() -> None:
46
+ try:
47
+ import abstractflow # noqa: F401
48
+ except Exception as e: # pragma: no cover
49
+ raise RuntimeError(
50
+ "VisualFlow execution requires the AbstractFlow compiler library (not the web UI). "
51
+ "Install: `pip install abstractgateway[visualflow]`"
52
+ ) from e
53
+
54
+
55
+ @dataclass(frozen=True)
56
+ class VisualFlowRegistry:
57
+ """Load VisualFlow JSON files from a directory."""
58
+
59
+ flows_dir: Path
60
+
61
+ def load(self) -> Dict[str, Any]:
62
+ _require_visualflow_deps()
63
+ from abstractflow.visual.models import VisualFlow
64
+
65
+ base = Path(self.flows_dir).expanduser().resolve()
66
+ if not base.exists():
67
+ raise FileNotFoundError(f"flows_dir does not exist: {base}")
68
+
69
+ flows: Dict[str, Any] = {}
70
+ for p in sorted(base.glob("*.json")):
71
+ try:
72
+ raw = p.read_text(encoding="utf-8")
73
+ data = json.loads(raw)
74
+ flow = VisualFlow.model_validate(data)
75
+ flow_id = str(getattr(flow, "id", "") or "").strip() or p.stem
76
+ flows[flow_id] = flow
77
+ except Exception as e:
78
+ logger.warning("Failed to load flow file %s: %s", p, e)
79
+ return flows
80
+
81
+
82
+ class VisualFlowGatewayHost:
83
+ """Gateway host that starts/ticks runs from VisualFlow JSON.
84
+
85
+ This host is optional. For dependency-light deployments prefer bundle mode (`.flow` bundles),
86
+ which compiles VisualFlow via `abstractruntime.visualflow_compiler` without importing `abstractflow`.
87
+ """
88
+
89
+ def __init__(
90
+ self,
91
+ *,
92
+ flows_dir: Path,
93
+ flows: Dict[str, Any],
94
+ run_store: Any,
95
+ ledger_store: Any,
96
+ artifact_store: Any,
97
+ ) -> None:
98
+ _require_visualflow_deps()
99
+ self._flows_dir = Path(flows_dir).expanduser().resolve()
100
+ self._flows = dict(flows)
101
+ self._run_store = run_store
102
+ self._ledger_store = ledger_store
103
+ self._artifact_store = artifact_store
104
+
105
+ @property
106
+ def flows_dir(self) -> Path:
107
+ return self._flows_dir
108
+
109
+ @property
110
+ def flows(self) -> Dict[str, Any]:
111
+ return dict(self._flows)
112
+
113
+ @property
114
+ def run_store(self) -> Any:
115
+ return self._run_store
116
+
117
+ @property
118
+ def ledger_store(self) -> Any:
119
+ return self._ledger_store
120
+
121
+ @property
122
+ def artifact_store(self) -> Any:
123
+ return self._artifact_store
124
+
125
+ def start_run(
126
+ self,
127
+ *,
128
+ flow_id: str,
129
+ input_data: Dict[str, Any],
130
+ actor_id: str = "gateway",
131
+ bundle_id: Optional[str] = None,
132
+ ) -> str:
133
+ # bundle_id is ignored for VisualFlow sources (kept for API compatibility with bundle mode).
134
+ del bundle_id
135
+ _require_visualflow_deps()
136
+ from abstractflow.visual.executor import create_visual_runner
137
+ from abstractflow.visual.workspace_scoped_tools import WorkspaceScope, build_scoped_tool_executor
138
+
139
+ fid = str(flow_id or "").strip()
140
+ if not fid:
141
+ raise ValueError("flow_id is required")
142
+ visual_flow = self._flows.get(fid)
143
+ if visual_flow is None:
144
+ raise KeyError(f"Flow '{fid}' not found in {self._flows_dir}")
145
+
146
+ data = dict(input_data or {})
147
+ scope = WorkspaceScope.from_input_data(data)
148
+ tool_executor = build_scoped_tool_executor(scope=scope) if scope is not None else None
149
+
150
+ vis_runner = create_visual_runner(
151
+ visual_flow,
152
+ flows=self._flows,
153
+ run_store=self._run_store,
154
+ ledger_store=self._ledger_store,
155
+ artifact_store=self._artifact_store,
156
+ tool_executor=tool_executor,
157
+ )
158
+ return str(vis_runner.start(data, actor_id=actor_id))
159
+
160
+ def runtime_and_workflow_for_run(self, run_id: str) -> tuple[Runtime, Any]:
161
+ """Return (runtime, workflow_spec) for the given run, ensuring derived VisualFlow workflows are registered."""
162
+
163
+ run = self._run_store.load(run_id)
164
+ if run is None:
165
+ raise KeyError(f"Run '{run_id}' not found")
166
+ workflow_id = getattr(run, "workflow_id", None)
167
+ if not isinstance(workflow_id, str) or not workflow_id:
168
+ raise ValueError(f"Run '{run_id}' missing workflow_id")
169
+
170
+ # Determine which VisualFlow root we need to compile to ensure this workflow_id is registered.
171
+ root_flow_id = workflow_id
172
+ parsed = _parse_visual_listener_id(workflow_id)
173
+ if parsed is not None:
174
+ root_flow_id = parsed[0]
175
+ parsed2 = _parse_visual_agent_id(workflow_id)
176
+ if parsed2 is not None:
177
+ root_flow_id = parsed2[0]
178
+
179
+ visual_flow = self._flows.get(root_flow_id)
180
+ if visual_flow is None:
181
+ raise KeyError(f"Flow '{root_flow_id}' not found (needed for workflow '{workflow_id}')")
182
+
183
+ # Rebuild tool scope from persisted vars (best-effort).
184
+ _require_visualflow_deps()
185
+ from abstractflow.visual.executor import create_visual_runner
186
+ from abstractflow.visual.workspace_scoped_tools import WorkspaceScope, build_scoped_tool_executor
187
+
188
+ vars0 = getattr(run, "vars", None)
189
+ scope = WorkspaceScope.from_input_data(vars0) if isinstance(vars0, dict) else None
190
+ tool_executor = build_scoped_tool_executor(scope=scope) if scope is not None else None
191
+
192
+ runner = create_visual_runner(
193
+ visual_flow,
194
+ flows=self._flows,
195
+ run_store=self._run_store,
196
+ ledger_store=self._ledger_store,
197
+ artifact_store=self._artifact_store,
198
+ tool_executor=tool_executor,
199
+ )
200
+ runtime = runner.runtime
201
+
202
+ # WorkflowSpec lookup: root uses runner.workflow; derived workflows use runtime.workflow_registry.
203
+ if getattr(runner.workflow, "workflow_id", None) == workflow_id:
204
+ return (runtime, runner.workflow)
205
+
206
+ reg = getattr(runtime, "workflow_registry", None)
207
+ if reg is None or not hasattr(reg, "get"):
208
+ raise KeyError(f"Runtime has no workflow_registry for workflow '{workflow_id}'")
209
+ wf = reg.get(workflow_id)
210
+ if wf is None:
211
+ raise KeyError(f"Workflow '{workflow_id}' not registered")
212
+ return (runtime, wf)
213
+
@@ -0,0 +1,5 @@
1
+ from .gateway import router as gateway_router
2
+
3
+ __all__ = ["gateway_router"]
4
+
5
+
@@ -0,0 +1,393 @@
1
+ """Run Gateway API (HTTP + SSE).
2
+
3
+ Backlog 307: Durable Run Gateway (Command Inbox + Ledger Stream)
4
+
5
+ This is intentionally replay-first:
6
+ - The durable ledger is the source of truth.
7
+ - SSE is an optimization; clients must be able to reconnect and replay by cursor.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import asyncio
13
+ import json
14
+ import logging
15
+ from typing import Any, Dict, Optional
16
+
17
+ from fastapi import APIRouter, HTTPException, Query
18
+ from fastapi.responses import StreamingResponse
19
+ from pydantic import BaseModel, Field
20
+
21
+ from abstractruntime.storage.commands import CommandRecord
22
+
23
+ from ..service import get_gateway_service, run_summary
24
+
25
+
26
+ router = APIRouter(prefix="/gateway", tags=["gateway"])
27
+ logger = logging.getLogger(__name__)
28
+
29
+
30
+ class StartRunRequest(BaseModel):
31
+ bundle_id: Optional[str] = Field(
32
+ default=None,
33
+ description="Bundle id (when workflow source is 'bundle'). Optional if flow_id is already namespaced as 'bundle:flow'.",
34
+ )
35
+ flow_id: Optional[str] = Field(
36
+ default=None,
37
+ description=(
38
+ "Workflow id to start (flow id or 'bundle:flow'). Optional when bundle_id is provided and the bundle has a single entrypoint."
39
+ ),
40
+ )
41
+ input_data: Dict[str, Any] = Field(default_factory=dict)
42
+
43
+
44
+ class StartRunResponse(BaseModel):
45
+ run_id: str
46
+
47
+
48
+ class SubmitCommandRequest(BaseModel):
49
+ command_id: str = Field(..., description="Client-supplied idempotency key (UUID recommended).")
50
+ run_id: str = Field(..., description="Target run id (or session id for emit_event).")
51
+ type: str = Field(..., description="pause|resume|cancel|emit_event")
52
+ payload: Dict[str, Any] = Field(default_factory=dict)
53
+ ts: Optional[str] = Field(default=None, description="ISO timestamp (optional).")
54
+ client_id: Optional[str] = None
55
+
56
+
57
+ class SubmitCommandResponse(BaseModel):
58
+ accepted: bool
59
+ duplicate: bool
60
+ seq: int
61
+
62
+
63
+ def _require_bundle_host(svc: Any) -> Any:
64
+ host = getattr(svc, "host", None)
65
+ bundles = getattr(host, "bundles", None)
66
+ if not isinstance(bundles, dict):
67
+ raise HTTPException(status_code=400, detail="Bundle workflow source is not enabled on this gateway")
68
+ return host
69
+
70
+
71
+ def _extract_entrypoint_inputs_from_visualflow(raw: Any) -> list[Dict[str, Any]]:
72
+ """Best-effort derive start input definitions from a VisualFlow JSON object."""
73
+ if not isinstance(raw, dict):
74
+ return []
75
+ nodes = raw.get("nodes")
76
+ if not isinstance(nodes, list):
77
+ return []
78
+
79
+ start_node: Optional[Dict[str, Any]] = None
80
+ for n in nodes:
81
+ if not isinstance(n, dict):
82
+ continue
83
+ t = n.get("type")
84
+ t_str = t.value if hasattr(t, "value") else str(t or "")
85
+ data = n.get("data") if isinstance(n.get("data"), dict) else {}
86
+ node_type = str(data.get("nodeType") or t_str or "").strip()
87
+ if node_type == "on_flow_start" or t_str == "on_flow_start":
88
+ start_node = n
89
+ break
90
+
91
+ if start_node is None:
92
+ return []
93
+
94
+ data = start_node.get("data") if isinstance(start_node.get("data"), dict) else {}
95
+ outputs = data.get("outputs")
96
+ pin_defaults = data.get("pinDefaults") if isinstance(data.get("pinDefaults"), dict) else {}
97
+ if not isinstance(outputs, list):
98
+ return []
99
+
100
+ out: list[Dict[str, Any]] = []
101
+ for p in outputs:
102
+ if not isinstance(p, dict):
103
+ continue
104
+ pid = str(p.get("id") or "").strip()
105
+ if not pid:
106
+ continue
107
+ ptype = str(p.get("type") or "").strip()
108
+ if ptype == "execution" or pid in {"exec-out", "exec"}:
109
+ continue
110
+ label = str(p.get("label") or pid).strip() or pid
111
+ item: Dict[str, Any] = {"id": pid, "label": label, "type": ptype or "unknown"}
112
+ if pid in pin_defaults:
113
+ item["default"] = pin_defaults.get(pid)
114
+ out.append(item)
115
+ return out
116
+
117
+
118
+ def _extract_node_index_from_visualflow(raw: Any) -> Dict[str, Dict[str, Any]]:
119
+ """Best-effort node metadata index {node_id -> {type,label,headerColor}}."""
120
+ if not isinstance(raw, dict):
121
+ return {}
122
+ nodes = raw.get("nodes")
123
+ if not isinstance(nodes, list):
124
+ return {}
125
+ out: Dict[str, Dict[str, Any]] = {}
126
+ for n in nodes:
127
+ if not isinstance(n, dict):
128
+ continue
129
+ node_id = str(n.get("id") or "").strip()
130
+ if not node_id:
131
+ continue
132
+ t = n.get("type")
133
+ t_str = t.value if hasattr(t, "value") else str(t or "")
134
+ data = n.get("data") if isinstance(n.get("data"), dict) else {}
135
+ node_type = str(data.get("nodeType") or t_str or "").strip() or "unknown"
136
+ label = str(data.get("label") or n.get("label") or node_id).strip() or node_id
137
+ header_color = data.get("headerColor") or n.get("headerColor")
138
+ item: Dict[str, Any] = {"type": node_type, "label": label}
139
+ if isinstance(header_color, str) and header_color.strip():
140
+ item["headerColor"] = header_color.strip()
141
+ out[node_id] = item
142
+ return out
143
+
144
+
145
+ @router.get("/bundles")
146
+ async def list_bundles() -> Dict[str, Any]:
147
+ svc = get_gateway_service()
148
+ host = _require_bundle_host(svc)
149
+
150
+ items: list[Dict[str, Any]] = []
151
+ bundles = getattr(host, "bundles", {}) or {}
152
+ for bid, b in bundles.items():
153
+ man = getattr(b, "manifest", None)
154
+ if man is None:
155
+ continue
156
+ entrypoints = getattr(man, "entrypoints", None) or []
157
+ eps: list[Dict[str, Any]] = []
158
+ for ep in entrypoints:
159
+ eps.append(
160
+ {
161
+ "flow_id": getattr(ep, "flow_id", None),
162
+ "name": getattr(ep, "name", None),
163
+ "description": getattr(ep, "description", "") or "",
164
+ "interfaces": list(getattr(ep, "interfaces", None) or []),
165
+ }
166
+ )
167
+ items.append(
168
+ {
169
+ "bundle_id": str(bid),
170
+ "bundle_version": getattr(man, "bundle_version", "0.0.0"),
171
+ "created_at": getattr(man, "created_at", ""),
172
+ "default_entrypoint": str(getattr(man, "default_entrypoint", "") or "") or None,
173
+ "entrypoints": eps,
174
+ }
175
+ )
176
+
177
+ items.sort(key=lambda x: str(x.get("bundle_id") or ""))
178
+ return {"items": items, "default_bundle_id": getattr(host, "_default_bundle_id", None)}
179
+
180
+
181
+ @router.get("/bundles/{bundle_id}")
182
+ async def get_bundle(bundle_id: str) -> Dict[str, Any]:
183
+ svc = get_gateway_service()
184
+ host = _require_bundle_host(svc)
185
+
186
+ bid = str(bundle_id or "").strip()
187
+ if not bid:
188
+ raise HTTPException(status_code=400, detail="bundle_id is required")
189
+
190
+ bundles = getattr(host, "bundles", {}) or {}
191
+ bundle = bundles.get(bid)
192
+ if bundle is None:
193
+ raise HTTPException(status_code=404, detail=f"Bundle '{bid}' not found")
194
+
195
+ man = bundle.manifest
196
+ entrypoints_out: list[Dict[str, Any]] = []
197
+ for ep in list(man.entrypoints or []):
198
+ fid = str(getattr(ep, "flow_id", "") or "").strip()
199
+ rel = man.flow_path_for(fid) if fid else None
200
+ raw = bundle.read_json(rel) if isinstance(rel, str) and rel.strip() else None
201
+ entrypoints_out.append(
202
+ {
203
+ "flow_id": fid,
204
+ "workflow_id": f"{bid}:{fid}" if fid else None,
205
+ "name": getattr(ep, "name", None),
206
+ "description": str(getattr(ep, "description", "") or ""),
207
+ "interfaces": list(getattr(ep, "interfaces", None) or []),
208
+ "inputs": _extract_entrypoint_inputs_from_visualflow(raw),
209
+ "node_index": _extract_node_index_from_visualflow(raw),
210
+ }
211
+ )
212
+
213
+ flow_ids = sorted([str(k) for k in (man.flows or {}).keys() if isinstance(k, str) and k.strip()])
214
+ return {
215
+ "bundle_id": str(man.bundle_id),
216
+ "bundle_version": str(man.bundle_version or "0.0.0"),
217
+ "created_at": str(man.created_at or ""),
218
+ "default_entrypoint": str(getattr(man, "default_entrypoint", "") or "") or None,
219
+ "entrypoints": entrypoints_out,
220
+ "flows": flow_ids,
221
+ "metadata": dict(getattr(man, "metadata", None) or {}),
222
+ }
223
+
224
+
225
+ @router.get("/bundles/{bundle_id}/flows/{flow_id}")
226
+ async def get_bundle_flow(bundle_id: str, flow_id: str) -> Dict[str, Any]:
227
+ """Return a VisualFlow JSON from a bundle (best-effort; intended for thin clients)."""
228
+ svc = get_gateway_service()
229
+ host = _require_bundle_host(svc)
230
+
231
+ bid = str(bundle_id or "").strip()
232
+ if not bid:
233
+ raise HTTPException(status_code=400, detail="bundle_id is required")
234
+
235
+ fid = str(flow_id or "").strip()
236
+ if not fid:
237
+ raise HTTPException(status_code=400, detail="flow_id is required")
238
+
239
+ # Accept "bundle:flow" for convenience but enforce matching bundle_id.
240
+ if ":" in fid:
241
+ parsed = fid.split(":", 1)
242
+ if len(parsed) == 2 and parsed[0] and parsed[1]:
243
+ if parsed[0] != bid:
244
+ raise HTTPException(status_code=400, detail="flow_id bundle prefix does not match bundle_id")
245
+ fid = parsed[1].strip()
246
+ else:
247
+ raise HTTPException(status_code=400, detail="Invalid flow_id")
248
+
249
+ bundles = getattr(host, "bundles", {}) or {}
250
+ bundle = bundles.get(bid)
251
+ if bundle is None:
252
+ raise HTTPException(status_code=404, detail=f"Bundle '{bid}' not found")
253
+
254
+ rel = bundle.manifest.flow_path_for(fid)
255
+ if not isinstance(rel, str) or not rel.strip():
256
+ raise HTTPException(status_code=404, detail=f"Flow '{fid}' not found in bundle '{bid}'")
257
+
258
+ raw = bundle.read_json(rel)
259
+ if not isinstance(raw, dict):
260
+ raise HTTPException(status_code=500, detail="Flow JSON is invalid")
261
+
262
+ return {"bundle_id": bid, "flow_id": fid, "workflow_id": f"{bid}:{fid}", "flow": raw}
263
+
264
+
265
+ @router.post("/runs/start", response_model=StartRunResponse)
266
+ async def start_run(req: StartRunRequest) -> StartRunResponse:
267
+ svc = get_gateway_service()
268
+ svc.runner.start()
269
+
270
+ flow_id = str(req.flow_id or "").strip()
271
+ bundle_id = str(req.bundle_id).strip() if isinstance(req.bundle_id, str) and str(req.bundle_id).strip() else None
272
+ if not flow_id and not bundle_id:
273
+ raise HTTPException(status_code=400, detail="flow_id is required (or provide bundle_id to start a bundle entrypoint)")
274
+
275
+ try:
276
+ run_id = svc.host.start_run(
277
+ flow_id=flow_id,
278
+ bundle_id=bundle_id,
279
+ input_data=dict(req.input_data or {}),
280
+ actor_id="gateway",
281
+ )
282
+ except KeyError:
283
+ # Best-effort error message: in bundle mode, KeyError can refer to either a bundle or a flow.
284
+ msg = f"Flow '{flow_id}' not found" if flow_id else "Bundle not found"
285
+ raise HTTPException(status_code=404, detail=msg)
286
+ except Exception as e:
287
+ raise HTTPException(status_code=400, detail=f"Failed to start run: {e}")
288
+ return StartRunResponse(run_id=str(run_id))
289
+
290
+
291
+ @router.get("/runs/{run_id}")
292
+ async def get_run(run_id: str) -> Dict[str, Any]:
293
+ svc = get_gateway_service()
294
+ rs = svc.host.run_store
295
+ try:
296
+ run = rs.load(str(run_id))
297
+ except Exception as e:
298
+ raise HTTPException(status_code=500, detail=f"Failed to load run: {e}")
299
+ if run is None:
300
+ raise HTTPException(status_code=404, detail=f"Run '{run_id}' not found")
301
+ return run_summary(run)
302
+
303
+
304
+ @router.get("/runs/{run_id}/ledger")
305
+ async def get_ledger(
306
+ run_id: str,
307
+ after: int = Query(0, ge=0, description="Cursor: number of records already consumed."),
308
+ limit: int = Query(200, ge=1, le=2000),
309
+ ) -> Dict[str, Any]:
310
+ svc = get_gateway_service()
311
+ ledger = svc.host.ledger_store.list(str(run_id))
312
+ if not isinstance(ledger, list):
313
+ ledger = []
314
+ a = int(after or 0)
315
+ items = ledger[a : a + int(limit)]
316
+ next_after = a + len(items)
317
+ return {"items": items, "next_after": next_after}
318
+
319
+
320
+ @router.get("/runs/{run_id}/ledger/stream")
321
+ async def stream_ledger(
322
+ run_id: str,
323
+ after: int = Query(0, ge=0, description="Cursor: number of records already consumed."),
324
+ heartbeat_s: float = Query(5.0, gt=0.1, le=60.0),
325
+ ) -> StreamingResponse:
326
+ svc = get_gateway_service()
327
+ run_id2 = str(run_id)
328
+
329
+ async def _gen():
330
+ cursor = int(after or 0)
331
+ last_emit = asyncio.get_event_loop().time()
332
+ while True:
333
+ ledger = svc.host.ledger_store.list(run_id2)
334
+ if not isinstance(ledger, list):
335
+ ledger = []
336
+ if cursor < 0:
337
+ cursor = 0
338
+ if cursor < len(ledger):
339
+ while cursor < len(ledger):
340
+ item = ledger[cursor]
341
+ data = json.dumps({"cursor": cursor + 1, "record": item}, ensure_ascii=False)
342
+ yield f"id: {cursor + 1}\n".encode("utf-8")
343
+ yield b"event: step\n"
344
+ yield f"data: {data}\n\n".encode("utf-8")
345
+ cursor += 1
346
+ last_emit = asyncio.get_event_loop().time()
347
+ else:
348
+ now = asyncio.get_event_loop().time()
349
+ if (now - last_emit) >= float(heartbeat_s):
350
+ yield b": keep-alive\n\n"
351
+ last_emit = now
352
+ await asyncio.sleep(0.25)
353
+
354
+ return StreamingResponse(_gen(), media_type="text/event-stream")
355
+
356
+
357
+ @router.post("/commands", response_model=SubmitCommandResponse)
358
+ async def submit_command(req: SubmitCommandRequest) -> SubmitCommandResponse:
359
+ svc = get_gateway_service()
360
+ typ = str(req.type or "").strip()
361
+ if typ not in {"pause", "resume", "cancel", "emit_event"}:
362
+ raise HTTPException(status_code=400, detail="type must be one of pause|resume|cancel|emit_event")
363
+
364
+ record = CommandRecord(
365
+ command_id=str(req.command_id),
366
+ run_id=str(req.run_id),
367
+ type=typ,
368
+ payload=dict(req.payload or {}),
369
+ ts=str(req.ts) if isinstance(req.ts, str) and req.ts else "",
370
+ client_id=str(req.client_id) if isinstance(req.client_id, str) and req.client_id else None,
371
+ seq=0,
372
+ )
373
+
374
+ try:
375
+ res = svc.runner.command_store.append(record)
376
+ except Exception as e:
377
+ raise HTTPException(status_code=400, detail=f"Failed to append command: {e}")
378
+
379
+ try:
380
+ logger.info(
381
+ "gateway_command accepted=%s dup=%s seq=%s run_id=%s type=%s command_id=%s client_id=%s",
382
+ bool(res.accepted),
383
+ bool(res.duplicate),
384
+ int(res.seq),
385
+ str(req.run_id),
386
+ str(req.type),
387
+ str(req.command_id),
388
+ str(req.client_id) if req.client_id else "",
389
+ )
390
+ except Exception:
391
+ pass
392
+
393
+ return SubmitCommandResponse(accepted=bool(res.accepted), duplicate=bool(res.duplicate), seq=int(res.seq))