abstractcode 0.2.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- abstractcode/__init__.py +1 -1
- abstractcode/cli.py +911 -9
- abstractcode/file_mentions.py +276 -0
- abstractcode/flow_cli.py +1413 -0
- abstractcode/fullscreen_ui.py +2473 -158
- abstractcode/gateway_cli.py +715 -0
- abstractcode/py.typed +1 -0
- abstractcode/react_shell.py +8140 -546
- abstractcode/recall.py +384 -0
- abstractcode/remember.py +184 -0
- abstractcode/terminal_markdown.py +557 -0
- abstractcode/theme.py +244 -0
- abstractcode/workflow_agent.py +1412 -0
- abstractcode/workflow_cli.py +229 -0
- abstractcode-0.3.1.dist-info/METADATA +158 -0
- abstractcode-0.3.1.dist-info/RECORD +21 -0
- {abstractcode-0.2.0.dist-info → abstractcode-0.3.1.dist-info}/WHEEL +1 -1
- abstractcode-0.2.0.dist-info/METADATA +0 -160
- abstractcode-0.2.0.dist-info/RECORD +0 -11
- {abstractcode-0.2.0.dist-info → abstractcode-0.3.1.dist-info}/entry_points.txt +0 -0
- {abstractcode-0.2.0.dist-info → abstractcode-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {abstractcode-0.2.0.dist-info → abstractcode-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,715 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import time
|
|
6
|
+
import uuid
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Dict, Optional
|
|
10
|
+
from urllib.error import HTTPError
|
|
11
|
+
from urllib.request import Request, urlopen
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _env(name: str, fallback: Optional[str] = None) -> Optional[str]:
|
|
15
|
+
v = os.getenv(name)
|
|
16
|
+
if v is not None and str(v).strip():
|
|
17
|
+
return v
|
|
18
|
+
if fallback:
|
|
19
|
+
v2 = os.getenv(fallback)
|
|
20
|
+
if v2 is not None and str(v2).strip():
|
|
21
|
+
return v2
|
|
22
|
+
return None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def default_gateway_url() -> str:
|
|
26
|
+
# Canonical env vars:
|
|
27
|
+
# - ABSTRACTGATEWAY_URL (gateway)
|
|
28
|
+
# - ABSTRACTFLOW_GATEWAY_URL (legacy compatibility)
|
|
29
|
+
# AbstractCode convention:
|
|
30
|
+
# - ABSTRACTCODE_GATEWAY_URL
|
|
31
|
+
candidates = [
|
|
32
|
+
"ABSTRACTCODE_GATEWAY_URL",
|
|
33
|
+
"ABSTRACTFLOW_GATEWAY_URL",
|
|
34
|
+
"ABSTRACTGATEWAY_URL",
|
|
35
|
+
]
|
|
36
|
+
for name in candidates:
|
|
37
|
+
v = os.getenv(name)
|
|
38
|
+
if isinstance(v, str) and v.strip():
|
|
39
|
+
return v.strip().rstrip("/")
|
|
40
|
+
# AbstractGateway docs default to 8081.
|
|
41
|
+
return "http://127.0.0.1:8081"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def default_gateway_token() -> Optional[str]:
|
|
45
|
+
# Canonical env vars:
|
|
46
|
+
# - ABSTRACTGATEWAY_AUTH_TOKEN
|
|
47
|
+
# - ABSTRACTFLOW_GATEWAY_AUTH_TOKEN (legacy compatibility)
|
|
48
|
+
# AbstractCode convention:
|
|
49
|
+
# - ABSTRACTCODE_GATEWAY_TOKEN
|
|
50
|
+
candidates = [
|
|
51
|
+
"ABSTRACTCODE_GATEWAY_TOKEN",
|
|
52
|
+
"ABSTRACTGATEWAY_AUTH_TOKEN",
|
|
53
|
+
"ABSTRACTFLOW_GATEWAY_AUTH_TOKEN",
|
|
54
|
+
]
|
|
55
|
+
for name in candidates:
|
|
56
|
+
v = os.getenv(name)
|
|
57
|
+
if isinstance(v, str) and v.strip():
|
|
58
|
+
return v.strip()
|
|
59
|
+
|
|
60
|
+
token_lists = [
|
|
61
|
+
"ABSTRACTGATEWAY_AUTH_TOKENS",
|
|
62
|
+
"ABSTRACTFLOW_GATEWAY_AUTH_TOKENS",
|
|
63
|
+
]
|
|
64
|
+
for name in token_lists:
|
|
65
|
+
raw = os.getenv(name)
|
|
66
|
+
if not isinstance(raw, str) or not raw.strip():
|
|
67
|
+
continue
|
|
68
|
+
first = raw.split(",", 1)[0].strip()
|
|
69
|
+
if first:
|
|
70
|
+
return first
|
|
71
|
+
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _join_url(base_url: str, path: str) -> str:
|
|
76
|
+
b = str(base_url or "").rstrip("/")
|
|
77
|
+
p = str(path or "")
|
|
78
|
+
if not p.startswith("/"):
|
|
79
|
+
p = "/" + p
|
|
80
|
+
return b + p
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _request_json(
|
|
84
|
+
*,
|
|
85
|
+
method: str,
|
|
86
|
+
url: str,
|
|
87
|
+
token: Optional[str],
|
|
88
|
+
payload: Optional[Dict[str, Any]] = None,
|
|
89
|
+
timeout_s: float = 30.0,
|
|
90
|
+
) -> Dict[str, Any]:
|
|
91
|
+
body: Optional[bytes]
|
|
92
|
+
headers = {"Accept": "application/json"}
|
|
93
|
+
if token:
|
|
94
|
+
headers["Authorization"] = f"Bearer {token}"
|
|
95
|
+
if payload is None:
|
|
96
|
+
body = None
|
|
97
|
+
else:
|
|
98
|
+
body = json.dumps(payload, ensure_ascii=False).encode("utf-8")
|
|
99
|
+
headers["Content-Type"] = "application/json"
|
|
100
|
+
|
|
101
|
+
req = Request(url=url, data=body, headers=headers, method=str(method).upper())
|
|
102
|
+
try:
|
|
103
|
+
with urlopen(req, timeout=float(timeout_s)) as resp:
|
|
104
|
+
raw = resp.read().decode("utf-8")
|
|
105
|
+
return json.loads(raw) if raw else {}
|
|
106
|
+
except HTTPError as e:
|
|
107
|
+
try:
|
|
108
|
+
raw = e.read().decode("utf-8")
|
|
109
|
+
except Exception:
|
|
110
|
+
raw = ""
|
|
111
|
+
detail = raw.strip() or str(e)
|
|
112
|
+
raise RuntimeError(f"Gateway HTTP {e.code}: {detail}") from e
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _request_multipart(
|
|
116
|
+
*,
|
|
117
|
+
url: str,
|
|
118
|
+
token: Optional[str],
|
|
119
|
+
fields: Dict[str, str],
|
|
120
|
+
file_field: str,
|
|
121
|
+
filename: str,
|
|
122
|
+
content: bytes,
|
|
123
|
+
content_type: str = "application/octet-stream",
|
|
124
|
+
timeout_s: float = 60.0,
|
|
125
|
+
) -> Dict[str, Any]:
|
|
126
|
+
boundary = uuid.uuid4().hex
|
|
127
|
+
crlf = b"\r\n"
|
|
128
|
+
body = bytearray()
|
|
129
|
+
|
|
130
|
+
for k, v in (fields or {}).items():
|
|
131
|
+
body.extend(b"--" + boundary.encode("ascii") + crlf)
|
|
132
|
+
body.extend(f'Content-Disposition: form-data; name="{k}"'.encode("utf-8"))
|
|
133
|
+
body.extend(crlf + crlf)
|
|
134
|
+
body.extend(str(v).encode("utf-8"))
|
|
135
|
+
body.extend(crlf)
|
|
136
|
+
|
|
137
|
+
body.extend(b"--" + boundary.encode("ascii") + crlf)
|
|
138
|
+
body.extend(f'Content-Disposition: form-data; name="{file_field}"; filename="{filename}"'.encode("utf-8"))
|
|
139
|
+
body.extend(crlf)
|
|
140
|
+
body.extend(f"Content-Type: {content_type}".encode("utf-8"))
|
|
141
|
+
body.extend(crlf + crlf)
|
|
142
|
+
body.extend(bytes(content or b""))
|
|
143
|
+
body.extend(crlf)
|
|
144
|
+
body.extend(b"--" + boundary.encode("ascii") + b"--" + crlf)
|
|
145
|
+
|
|
146
|
+
headers = {
|
|
147
|
+
"Accept": "application/json",
|
|
148
|
+
"Content-Type": f"multipart/form-data; boundary={boundary}",
|
|
149
|
+
}
|
|
150
|
+
if token:
|
|
151
|
+
headers["Authorization"] = f"Bearer {token}"
|
|
152
|
+
|
|
153
|
+
req = Request(url=url, data=bytes(body), headers=headers, method="POST")
|
|
154
|
+
try:
|
|
155
|
+
with urlopen(req, timeout=float(timeout_s)) as resp:
|
|
156
|
+
raw = resp.read().decode("utf-8")
|
|
157
|
+
return json.loads(raw) if raw else {}
|
|
158
|
+
except HTTPError as e:
|
|
159
|
+
try:
|
|
160
|
+
raw = e.read().decode("utf-8")
|
|
161
|
+
except Exception:
|
|
162
|
+
raw = ""
|
|
163
|
+
detail = raw.strip() or str(e)
|
|
164
|
+
raise RuntimeError(f"Gateway HTTP {e.code}: {detail}") from e
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _split_bundle_ref(raw: str) -> tuple[str, Optional[str]]:
|
|
168
|
+
s = str(raw or "").strip()
|
|
169
|
+
if not s:
|
|
170
|
+
return ("", None)
|
|
171
|
+
if "@" not in s:
|
|
172
|
+
return (s, None)
|
|
173
|
+
a, b = s.split("@", 1)
|
|
174
|
+
a = a.strip()
|
|
175
|
+
b = b.strip() if b.strip() else None
|
|
176
|
+
if not a:
|
|
177
|
+
return ("", None)
|
|
178
|
+
return (a, b)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
@dataclass(frozen=True)
|
|
182
|
+
class GatewayApi:
|
|
183
|
+
base_url: str
|
|
184
|
+
token: Optional[str] = None
|
|
185
|
+
|
|
186
|
+
def start_run(self, *, flow_id: str, input_data: Dict[str, Any], bundle_id: Optional[str] = None) -> str:
|
|
187
|
+
body: Dict[str, Any] = {"flow_id": flow_id, "input_data": dict(input_data or {})}
|
|
188
|
+
if bundle_id:
|
|
189
|
+
body["bundle_id"] = bundle_id
|
|
190
|
+
resp = _request_json(
|
|
191
|
+
method="POST",
|
|
192
|
+
url=_join_url(self.base_url, "/api/gateway/runs/start"),
|
|
193
|
+
token=self.token,
|
|
194
|
+
payload=body,
|
|
195
|
+
)
|
|
196
|
+
run_id = resp.get("run_id")
|
|
197
|
+
if not isinstance(run_id, str) or not run_id.strip():
|
|
198
|
+
raise RuntimeError(f"Invalid gateway response: {resp}")
|
|
199
|
+
return run_id.strip()
|
|
200
|
+
|
|
201
|
+
def get_run(self, run_id: str) -> Dict[str, Any]:
|
|
202
|
+
return _request_json(
|
|
203
|
+
method="GET",
|
|
204
|
+
url=_join_url(self.base_url, f"/api/gateway/runs/{run_id}"),
|
|
205
|
+
token=self.token,
|
|
206
|
+
payload=None,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
def get_ledger(self, run_id: str, *, after: int, limit: int = 200) -> Dict[str, Any]:
|
|
210
|
+
return _request_json(
|
|
211
|
+
method="GET",
|
|
212
|
+
url=_join_url(self.base_url, f"/api/gateway/runs/{run_id}/ledger?after={int(after)}&limit={int(limit)}"),
|
|
213
|
+
token=self.token,
|
|
214
|
+
payload=None,
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
def submit_command(
|
|
218
|
+
self,
|
|
219
|
+
*,
|
|
220
|
+
run_id: str,
|
|
221
|
+
typ: str,
|
|
222
|
+
payload: Dict[str, Any],
|
|
223
|
+
command_id: Optional[str] = None,
|
|
224
|
+
client_id: Optional[str] = None,
|
|
225
|
+
) -> Dict[str, Any]:
|
|
226
|
+
body = {
|
|
227
|
+
"command_id": command_id or f"cmd_{uuid.uuid4().hex}",
|
|
228
|
+
"run_id": run_id,
|
|
229
|
+
"type": typ,
|
|
230
|
+
"payload": dict(payload or {}),
|
|
231
|
+
"client_id": client_id,
|
|
232
|
+
}
|
|
233
|
+
return _request_json(
|
|
234
|
+
method="POST",
|
|
235
|
+
url=_join_url(self.base_url, "/api/gateway/commands"),
|
|
236
|
+
token=self.token,
|
|
237
|
+
payload=body,
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
def kg_query(
|
|
241
|
+
self,
|
|
242
|
+
*,
|
|
243
|
+
run_id: Optional[str] = None,
|
|
244
|
+
session_id: Optional[str] = None,
|
|
245
|
+
scope: str = "session",
|
|
246
|
+
owner_id: Optional[str] = None,
|
|
247
|
+
all_owners: bool = False,
|
|
248
|
+
subject: Optional[str] = None,
|
|
249
|
+
predicate: Optional[str] = None,
|
|
250
|
+
object_value: Optional[str] = None,
|
|
251
|
+
since: Optional[str] = None,
|
|
252
|
+
until: Optional[str] = None,
|
|
253
|
+
active_at: Optional[str] = None,
|
|
254
|
+
query_text: Optional[str] = None,
|
|
255
|
+
min_score: Optional[float] = None,
|
|
256
|
+
limit: int = 500,
|
|
257
|
+
order: str = "desc",
|
|
258
|
+
timeout_s: float = 60.0,
|
|
259
|
+
) -> Dict[str, Any]:
|
|
260
|
+
body: Dict[str, Any] = {
|
|
261
|
+
"scope": str(scope or "session").strip().lower() or "session",
|
|
262
|
+
"limit": int(limit),
|
|
263
|
+
"order": str(order or "desc").strip().lower() or "desc",
|
|
264
|
+
}
|
|
265
|
+
if bool(all_owners):
|
|
266
|
+
body["all_owners"] = True
|
|
267
|
+
if run_id:
|
|
268
|
+
body["run_id"] = str(run_id or "").strip()
|
|
269
|
+
if session_id:
|
|
270
|
+
body["session_id"] = str(session_id or "").strip()
|
|
271
|
+
if owner_id:
|
|
272
|
+
body["owner_id"] = owner_id
|
|
273
|
+
if subject:
|
|
274
|
+
body["subject"] = subject
|
|
275
|
+
if predicate:
|
|
276
|
+
body["predicate"] = predicate
|
|
277
|
+
if object_value:
|
|
278
|
+
body["object"] = object_value
|
|
279
|
+
if since:
|
|
280
|
+
body["since"] = since
|
|
281
|
+
if until:
|
|
282
|
+
body["until"] = until
|
|
283
|
+
if active_at:
|
|
284
|
+
body["active_at"] = active_at
|
|
285
|
+
if query_text:
|
|
286
|
+
body["query_text"] = query_text
|
|
287
|
+
if min_score is not None:
|
|
288
|
+
body["min_score"] = float(min_score)
|
|
289
|
+
return _request_json(
|
|
290
|
+
method="POST",
|
|
291
|
+
url=_join_url(self.base_url, "/api/gateway/kg/query"),
|
|
292
|
+
token=self.token,
|
|
293
|
+
payload=body,
|
|
294
|
+
timeout_s=float(timeout_s),
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
def list_bundles(self, *, all_versions: bool = False, include_deprecated: bool = False) -> Dict[str, Any]:
|
|
298
|
+
qs = [
|
|
299
|
+
"all_versions=true" if bool(all_versions) else "all_versions=false",
|
|
300
|
+
"include_deprecated=true" if bool(include_deprecated) else "include_deprecated=false",
|
|
301
|
+
]
|
|
302
|
+
return _request_json(
|
|
303
|
+
method="GET",
|
|
304
|
+
url=_join_url(self.base_url, f"/api/gateway/bundles?{'&'.join(qs)}"),
|
|
305
|
+
token=self.token,
|
|
306
|
+
payload=None,
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
def get_bundle(self, *, bundle_id: str, bundle_version: Optional[str] = None) -> Dict[str, Any]:
|
|
310
|
+
bid = str(bundle_id or "").strip()
|
|
311
|
+
if not bid:
|
|
312
|
+
raise ValueError("bundle_id is required")
|
|
313
|
+
qs = f"?bundle_version={bundle_version}" if isinstance(bundle_version, str) and bundle_version.strip() else ""
|
|
314
|
+
return _request_json(
|
|
315
|
+
method="GET",
|
|
316
|
+
url=_join_url(self.base_url, f"/api/gateway/bundles/{bid}{qs}"),
|
|
317
|
+
token=self.token,
|
|
318
|
+
payload=None,
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
def upload_bundle(self, *, path: str, overwrite: bool = False, reload: bool = True) -> Dict[str, Any]:
|
|
322
|
+
src = Path(str(path or "").strip()).expanduser().resolve()
|
|
323
|
+
if not src.exists() or not src.is_file():
|
|
324
|
+
raise FileNotFoundError(f"Bundle not found: {src}")
|
|
325
|
+
content = src.read_bytes()
|
|
326
|
+
return _request_multipart(
|
|
327
|
+
url=_join_url(self.base_url, "/api/gateway/bundles/upload"),
|
|
328
|
+
token=self.token,
|
|
329
|
+
fields={"overwrite": "true" if overwrite else "false", "reload": "true" if reload else "false"},
|
|
330
|
+
file_field="file",
|
|
331
|
+
filename=src.name,
|
|
332
|
+
content=content,
|
|
333
|
+
content_type="application/octet-stream",
|
|
334
|
+
timeout_s=60.0,
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
def remove_bundle(self, *, bundle_ref: str, reload: bool = True) -> Dict[str, Any]:
|
|
338
|
+
bid, ver = _split_bundle_ref(bundle_ref)
|
|
339
|
+
if not bid:
|
|
340
|
+
raise ValueError("bundle_ref must be 'bundle_id' or 'bundle_id@version'")
|
|
341
|
+
qs = []
|
|
342
|
+
if ver:
|
|
343
|
+
qs.append(f"bundle_version={ver}")
|
|
344
|
+
if bool(reload):
|
|
345
|
+
qs.append("reload=true")
|
|
346
|
+
else:
|
|
347
|
+
qs.append("reload=false")
|
|
348
|
+
q = ("?" + "&".join(qs)) if qs else ""
|
|
349
|
+
return _request_json(
|
|
350
|
+
method="DELETE",
|
|
351
|
+
url=_join_url(self.base_url, f"/api/gateway/bundles/{bid}{q}"),
|
|
352
|
+
token=self.token,
|
|
353
|
+
payload=None,
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
def deprecate_bundle(self, *, bundle_id: str, flow_id: Optional[str] = None, reason: Optional[str] = None) -> Dict[str, Any]:
|
|
357
|
+
bid = str(bundle_id or "").strip()
|
|
358
|
+
if not bid:
|
|
359
|
+
raise ValueError("bundle_id is required")
|
|
360
|
+
payload: Dict[str, Any] = {}
|
|
361
|
+
fid = str(flow_id or "").strip()
|
|
362
|
+
if fid:
|
|
363
|
+
payload["flow_id"] = fid
|
|
364
|
+
r = str(reason or "").strip()
|
|
365
|
+
if r:
|
|
366
|
+
payload["reason"] = r
|
|
367
|
+
return _request_json(
|
|
368
|
+
method="POST",
|
|
369
|
+
url=_join_url(self.base_url, f"/api/gateway/bundles/{bid}/deprecate"),
|
|
370
|
+
token=self.token,
|
|
371
|
+
payload=payload,
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
def undeprecate_bundle(self, *, bundle_id: str, flow_id: Optional[str] = None) -> Dict[str, Any]:
|
|
375
|
+
bid = str(bundle_id or "").strip()
|
|
376
|
+
if not bid:
|
|
377
|
+
raise ValueError("bundle_id is required")
|
|
378
|
+
payload: Dict[str, Any] = {}
|
|
379
|
+
fid = str(flow_id or "").strip()
|
|
380
|
+
if fid:
|
|
381
|
+
payload["flow_id"] = fid
|
|
382
|
+
return _request_json(
|
|
383
|
+
method="POST",
|
|
384
|
+
url=_join_url(self.base_url, f"/api/gateway/bundles/{bid}/undeprecate"),
|
|
385
|
+
token=self.token,
|
|
386
|
+
payload=payload,
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
def _extract_sub_run_id_from_step(record: Dict[str, Any]) -> Optional[str]:
|
|
391
|
+
if not isinstance(record, dict):
|
|
392
|
+
return None
|
|
393
|
+
if record.get("status") != "waiting":
|
|
394
|
+
return None
|
|
395
|
+
result = record.get("result")
|
|
396
|
+
if not isinstance(result, dict):
|
|
397
|
+
return None
|
|
398
|
+
wait = result.get("wait")
|
|
399
|
+
if not isinstance(wait, dict):
|
|
400
|
+
return None
|
|
401
|
+
if wait.get("reason") != "subworkflow":
|
|
402
|
+
return None
|
|
403
|
+
details = wait.get("details")
|
|
404
|
+
if not isinstance(details, dict):
|
|
405
|
+
return None
|
|
406
|
+
sub_run_id = details.get("sub_run_id")
|
|
407
|
+
return sub_run_id.strip() if isinstance(sub_run_id, str) and sub_run_id.strip() else None
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
def _print_step(*, run_id: str, rec: Dict[str, Any]) -> None:
|
|
411
|
+
node_id = rec.get("node_id") or rec.get("nodeId") or ""
|
|
412
|
+
status = rec.get("status") or ""
|
|
413
|
+
effect = rec.get("effect") if isinstance(rec.get("effect"), dict) else None
|
|
414
|
+
effect_type = effect.get("type") if isinstance(effect, dict) else None
|
|
415
|
+
prefix = run_id[:8]
|
|
416
|
+
|
|
417
|
+
line = f"[{prefix}] {status} {node_id}"
|
|
418
|
+
if effect_type:
|
|
419
|
+
line += f" ({effect_type})"
|
|
420
|
+
print(line)
|
|
421
|
+
|
|
422
|
+
sub = _extract_sub_run_id_from_step(rec)
|
|
423
|
+
if sub:
|
|
424
|
+
print(f"[{prefix}] ↳ sub_run_id={sub}")
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def _prompt_user(waiting: Dict[str, Any]) -> str:
|
|
428
|
+
prompt = waiting.get("prompt") or "Please respond:"
|
|
429
|
+
choices = waiting.get("choices")
|
|
430
|
+
if isinstance(choices, list) and choices:
|
|
431
|
+
print(str(prompt))
|
|
432
|
+
for i, c in enumerate(choices, start=1):
|
|
433
|
+
print(f" {i}. {c}")
|
|
434
|
+
raw = input("> ").strip()
|
|
435
|
+
if raw.isdigit():
|
|
436
|
+
idx = int(raw)
|
|
437
|
+
if 1 <= idx <= len(choices):
|
|
438
|
+
return str(choices[idx - 1])
|
|
439
|
+
return raw
|
|
440
|
+
return input(f"{prompt}\n> ").strip()
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def run_gateway_flow_command(
|
|
444
|
+
*,
|
|
445
|
+
gateway_url: Optional[str],
|
|
446
|
+
gateway_token: Optional[str],
|
|
447
|
+
flow_id: str,
|
|
448
|
+
bundle_id: Optional[str],
|
|
449
|
+
input_data: Dict[str, Any],
|
|
450
|
+
follow: bool,
|
|
451
|
+
poll_s: float = 0.25,
|
|
452
|
+
) -> str:
|
|
453
|
+
api = GatewayApi(base_url=str(gateway_url or default_gateway_url()), token=gateway_token or default_gateway_token())
|
|
454
|
+
|
|
455
|
+
run_id = api.start_run(flow_id=flow_id, bundle_id=bundle_id, input_data=input_data)
|
|
456
|
+
print(f"run_id={run_id}")
|
|
457
|
+
if not follow:
|
|
458
|
+
return run_id
|
|
459
|
+
_follow_runs(api=api, root_run_id=run_id, poll_s=poll_s)
|
|
460
|
+
return run_id
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
def attach_gateway_run_command(
|
|
464
|
+
*,
|
|
465
|
+
gateway_url: Optional[str],
|
|
466
|
+
gateway_token: Optional[str],
|
|
467
|
+
run_id: str,
|
|
468
|
+
follow: bool,
|
|
469
|
+
poll_s: float = 0.25,
|
|
470
|
+
) -> None:
|
|
471
|
+
api = GatewayApi(base_url=str(gateway_url or default_gateway_url()), token=gateway_token or default_gateway_token())
|
|
472
|
+
if not follow:
|
|
473
|
+
state = api.get_run(run_id)
|
|
474
|
+
print(json.dumps(state, indent=2, ensure_ascii=False))
|
|
475
|
+
return
|
|
476
|
+
|
|
477
|
+
_follow_runs(api=api, root_run_id=str(run_id), poll_s=poll_s)
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
def query_gateway_kg_command(
|
|
481
|
+
*,
|
|
482
|
+
gateway_url: Optional[str],
|
|
483
|
+
gateway_token: Optional[str],
|
|
484
|
+
run_id: Optional[str],
|
|
485
|
+
scope: str = "session",
|
|
486
|
+
owner_id: Optional[str] = None,
|
|
487
|
+
all_owners: bool = False,
|
|
488
|
+
subject: Optional[str] = None,
|
|
489
|
+
predicate: Optional[str] = None,
|
|
490
|
+
object_value: Optional[str] = None,
|
|
491
|
+
since: Optional[str] = None,
|
|
492
|
+
until: Optional[str] = None,
|
|
493
|
+
active_at: Optional[str] = None,
|
|
494
|
+
query_text: Optional[str] = None,
|
|
495
|
+
min_score: Optional[float] = None,
|
|
496
|
+
limit: int = 0,
|
|
497
|
+
order: str = "desc",
|
|
498
|
+
fmt: str = "triples",
|
|
499
|
+
pretty: bool = False,
|
|
500
|
+
) -> None:
|
|
501
|
+
api = GatewayApi(base_url=str(gateway_url or default_gateway_url()), token=gateway_token or default_gateway_token())
|
|
502
|
+
id_value = str(run_id or "").strip() if isinstance(run_id, str) else ""
|
|
503
|
+
scope_norm = str(scope or "").strip().lower() or "session"
|
|
504
|
+
if not id_value and not all_owners and scope_norm not in {"global"} and not owner_id:
|
|
505
|
+
raise SystemExit("abstractcode gateway kg: id is required unless using --scope global or --all-owners (or provide --owner-id)")
|
|
506
|
+
|
|
507
|
+
run_id_arg: Optional[str] = None
|
|
508
|
+
session_id_arg: Optional[str] = None
|
|
509
|
+
if id_value and scope_norm != "global" and not all_owners and not owner_id:
|
|
510
|
+
# Prefer session_id for session scope (common id shape overlaps with run_ids).
|
|
511
|
+
if scope_norm == "session":
|
|
512
|
+
session_id_arg = id_value
|
|
513
|
+
else:
|
|
514
|
+
run_id_arg = id_value
|
|
515
|
+
try:
|
|
516
|
+
resp = api.kg_query(
|
|
517
|
+
run_id=run_id_arg,
|
|
518
|
+
session_id=session_id_arg,
|
|
519
|
+
scope=str(scope_norm),
|
|
520
|
+
owner_id=owner_id,
|
|
521
|
+
all_owners=bool(all_owners),
|
|
522
|
+
subject=subject,
|
|
523
|
+
predicate=predicate,
|
|
524
|
+
object_value=object_value,
|
|
525
|
+
since=since,
|
|
526
|
+
until=until,
|
|
527
|
+
active_at=active_at,
|
|
528
|
+
query_text=query_text,
|
|
529
|
+
min_score=min_score,
|
|
530
|
+
limit=int(limit),
|
|
531
|
+
order=str(order),
|
|
532
|
+
)
|
|
533
|
+
except RuntimeError as e:
|
|
534
|
+
# Convenience: when the user passes a session id (e.g. AbstractCode Web session_id),
|
|
535
|
+
# the gateway won't find a RunState by that id. Retry as `session_id` for session scope.
|
|
536
|
+
msg = str(e)
|
|
537
|
+
is_run_not_found = "Gateway HTTP 404:" in msg and "not found" in msg and "Run '" in msg
|
|
538
|
+
if (
|
|
539
|
+
is_run_not_found
|
|
540
|
+
and scope_norm in {"session", "all"}
|
|
541
|
+
and not owner_id
|
|
542
|
+
and not all_owners
|
|
543
|
+
and id_value
|
|
544
|
+
):
|
|
545
|
+
resp = api.kg_query(
|
|
546
|
+
run_id=None,
|
|
547
|
+
session_id=str(id_value),
|
|
548
|
+
scope=str(scope_norm),
|
|
549
|
+
owner_id=owner_id,
|
|
550
|
+
all_owners=bool(all_owners),
|
|
551
|
+
subject=subject,
|
|
552
|
+
predicate=predicate,
|
|
553
|
+
object_value=object_value,
|
|
554
|
+
since=since,
|
|
555
|
+
until=until,
|
|
556
|
+
active_at=active_at,
|
|
557
|
+
query_text=query_text,
|
|
558
|
+
min_score=min_score,
|
|
559
|
+
limit=int(limit),
|
|
560
|
+
order=str(order),
|
|
561
|
+
)
|
|
562
|
+
else:
|
|
563
|
+
raise
|
|
564
|
+
|
|
565
|
+
warnings = resp.get("warnings")
|
|
566
|
+
if isinstance(warnings, list) and warnings:
|
|
567
|
+
for w in warnings:
|
|
568
|
+
if isinstance(w, str) and w.strip():
|
|
569
|
+
print(f"warning: {w.strip()}", file=os.sys.stderr)
|
|
570
|
+
|
|
571
|
+
items = resp.get("items")
|
|
572
|
+
if not isinstance(items, list):
|
|
573
|
+
items = []
|
|
574
|
+
|
|
575
|
+
fmt2 = str(fmt or "triples").strip().lower() or "triples"
|
|
576
|
+
if fmt2 == "json":
|
|
577
|
+
indent = 2 if bool(pretty) else None
|
|
578
|
+
print(json.dumps(resp, indent=indent, ensure_ascii=False))
|
|
579
|
+
return
|
|
580
|
+
|
|
581
|
+
if fmt2 == "jsonl":
|
|
582
|
+
for item in items:
|
|
583
|
+
if isinstance(item, dict):
|
|
584
|
+
print(json.dumps(item, ensure_ascii=False))
|
|
585
|
+
return
|
|
586
|
+
|
|
587
|
+
# Default: human-readable triples.
|
|
588
|
+
for item in items:
|
|
589
|
+
if not isinstance(item, dict):
|
|
590
|
+
continue
|
|
591
|
+
observed_at = str(item.get("observed_at") or "").strip()
|
|
592
|
+
subj = str(item.get("subject") or "").strip()
|
|
593
|
+
pred = str(item.get("predicate") or "").strip()
|
|
594
|
+
obj = str(item.get("object") or "").strip()
|
|
595
|
+
scope_v = str(item.get("scope") or "").strip()
|
|
596
|
+
owner_v = str(item.get("owner_id") or "").strip()
|
|
597
|
+
|
|
598
|
+
suffix_parts: list[str] = []
|
|
599
|
+
if scope_v:
|
|
600
|
+
suffix_parts.append(f"scope={scope_v}")
|
|
601
|
+
if owner_v:
|
|
602
|
+
suffix_parts.append(f"owner_id={owner_v}")
|
|
603
|
+
conf = item.get("confidence")
|
|
604
|
+
if isinstance(conf, (int, float)):
|
|
605
|
+
suffix_parts.append(f"confidence={float(conf):.3f}")
|
|
606
|
+
attrs = item.get("attributes")
|
|
607
|
+
if isinstance(attrs, dict):
|
|
608
|
+
ret = attrs.get("_retrieval")
|
|
609
|
+
if isinstance(ret, dict) and isinstance(ret.get("score"), (int, float)):
|
|
610
|
+
suffix_parts.append(f"score={float(ret['score']):.3f}")
|
|
611
|
+
|
|
612
|
+
suffix = f" ({', '.join(suffix_parts)})" if suffix_parts else ""
|
|
613
|
+
ts = f"[{observed_at}] " if observed_at else ""
|
|
614
|
+
print(f"{ts}{subj} --{pred}--> {obj}{suffix}")
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
def _follow_runs(*, api: GatewayApi, root_run_id: str, poll_s: float) -> None:
|
|
618
|
+
cursors: Dict[str, int] = {root_run_id: 0}
|
|
619
|
+
active: Dict[str, bool] = {root_run_id: True}
|
|
620
|
+
|
|
621
|
+
while True:
|
|
622
|
+
# 1) Replay ledgers for all known runs (root + discovered subruns).
|
|
623
|
+
for rid in list(cursors.keys()):
|
|
624
|
+
cur = int(cursors.get(rid, 0))
|
|
625
|
+
page = api.get_ledger(rid, after=cur, limit=200)
|
|
626
|
+
items = page.get("items")
|
|
627
|
+
if not isinstance(items, list):
|
|
628
|
+
items = []
|
|
629
|
+
for rec_any in items:
|
|
630
|
+
rec = rec_any if isinstance(rec_any, dict) else None
|
|
631
|
+
if rec is None:
|
|
632
|
+
continue
|
|
633
|
+
_print_step(run_id=rid, rec=rec)
|
|
634
|
+
sub = _extract_sub_run_id_from_step(rec)
|
|
635
|
+
if sub and sub not in cursors:
|
|
636
|
+
cursors[sub] = 0
|
|
637
|
+
active[sub] = True
|
|
638
|
+
|
|
639
|
+
cursors[rid] = int(page.get("next_after") or (cur + len(items)))
|
|
640
|
+
|
|
641
|
+
# 2) Handle waiting/user prompts and stop conditions.
|
|
642
|
+
any_active = False
|
|
643
|
+
root_status: Optional[str] = None
|
|
644
|
+
root_waiting: Optional[Dict[str, Any]] = None
|
|
645
|
+
tool_blocked: list[tuple[str, str, Optional[str]]] = []
|
|
646
|
+
for rid in list(active.keys()):
|
|
647
|
+
if not active.get(rid):
|
|
648
|
+
continue
|
|
649
|
+
any_active = True
|
|
650
|
+
state = api.get_run(rid)
|
|
651
|
+
status = state.get("status")
|
|
652
|
+
if rid == root_run_id:
|
|
653
|
+
root_status = status if isinstance(status, str) else None
|
|
654
|
+
root_waiting = state.get("waiting") if isinstance(state.get("waiting"), dict) else None
|
|
655
|
+
if status in {"completed", "failed", "cancelled"}:
|
|
656
|
+
active[rid] = False
|
|
657
|
+
continue
|
|
658
|
+
|
|
659
|
+
if status != "waiting":
|
|
660
|
+
continue
|
|
661
|
+
waiting = state.get("waiting")
|
|
662
|
+
if not isinstance(waiting, dict):
|
|
663
|
+
continue
|
|
664
|
+
reason = waiting.get("reason")
|
|
665
|
+
details = waiting.get("details") if isinstance(waiting.get("details"), dict) else {}
|
|
666
|
+
mode = details.get("mode") if isinstance(details, dict) else None
|
|
667
|
+
if reason in {"event", "job"} and isinstance(details, dict) and ("tool_calls" in details or "mode" in details):
|
|
668
|
+
tool_blocked.append((rid, str(reason), str(mode) if mode is not None else None))
|
|
669
|
+
|
|
670
|
+
if reason != "user":
|
|
671
|
+
continue
|
|
672
|
+
|
|
673
|
+
wait_key = waiting.get("wait_key")
|
|
674
|
+
wait_key = wait_key.strip() if isinstance(wait_key, str) and wait_key.strip() else None
|
|
675
|
+
if not wait_key:
|
|
676
|
+
continue
|
|
677
|
+
|
|
678
|
+
response = _prompt_user(waiting)
|
|
679
|
+
api.submit_command(
|
|
680
|
+
run_id=rid,
|
|
681
|
+
typ="resume",
|
|
682
|
+
payload={"wait_key": wait_key, "payload": {"response": response}},
|
|
683
|
+
)
|
|
684
|
+
|
|
685
|
+
# Root completion is the natural stop condition for "run".
|
|
686
|
+
if isinstance(root_status, str) and root_status in {"completed", "failed", "cancelled"}:
|
|
687
|
+
return
|
|
688
|
+
|
|
689
|
+
# If the root is blocked waiting on a subworkflow, but a child is blocked on a tool wait,
|
|
690
|
+
# stop to avoid hanging indefinitely (manual resume is required).
|
|
691
|
+
if (
|
|
692
|
+
isinstance(root_status, str)
|
|
693
|
+
and root_status == "waiting"
|
|
694
|
+
and isinstance(root_waiting, dict)
|
|
695
|
+
and root_waiting.get("reason") == "subworkflow"
|
|
696
|
+
and tool_blocked
|
|
697
|
+
):
|
|
698
|
+
rid, reason, mode = tool_blocked[0]
|
|
699
|
+
print(f"[{root_run_id[:8]}] blocked: subworkflow waiting on run={rid} reason={reason} mode={mode}")
|
|
700
|
+
print(f"[{root_run_id[:8]}] resume tools via gateway, then re-attach")
|
|
701
|
+
return
|
|
702
|
+
|
|
703
|
+
# If the root is waiting on a non-user input, stop (manual resume required).
|
|
704
|
+
if isinstance(root_status, str) and root_status == "waiting" and isinstance(root_waiting, dict):
|
|
705
|
+
reason = root_waiting.get("reason")
|
|
706
|
+
if reason not in {"user", "subworkflow", "until"}:
|
|
707
|
+
details = root_waiting.get("details") if isinstance(root_waiting.get("details"), dict) else {}
|
|
708
|
+
mode = details.get("mode") if isinstance(details, dict) else None
|
|
709
|
+
print(f"[{root_run_id[:8]}] waiting reason={reason} mode={mode} (manual resume required)")
|
|
710
|
+
return
|
|
711
|
+
|
|
712
|
+
if not any_active:
|
|
713
|
+
return
|
|
714
|
+
|
|
715
|
+
time.sleep(float(poll_s))
|