dap-cli 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dap_cli/__init__.py +1 -0
- dap_cli/__main__.py +99 -0
- dap_cli/_dashboard/.gitkeep +5 -0
- dap_cli/_dashboard/BUNDLE_INFO.txt +3 -0
- dap_cli/_dashboard/package.json +52 -0
- dap_cli/_dashboard/server.js +38 -0
- dap_cli/bootstrap.py +262 -0
- dap_cli/commands/__init__.py +0 -0
- dap_cli/commands/cortex.py +688 -0
- dap_cli/commands/init.py +183 -0
- dap_cli/commands/project.py +134 -0
- dap_cli/commands/start.py +124 -0
- dap_cli/commands/status.py +136 -0
- dap_cli/commands/stop.py +35 -0
- dap_cli/dashboard.py +99 -0
- dap_cli/paths.py +27 -0
- dap_cli/process.py +130 -0
- dap_cli/py.typed +0 -0
- dap_cli-0.3.0.dist-info/METADATA +43 -0
- dap_cli-0.3.0.dist-info/RECORD +22 -0
- dap_cli-0.3.0.dist-info/WHEEL +4 -0
- dap_cli-0.3.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,688 @@
|
|
|
1
|
+
"""Cortex pipeline operations — implementation behind `dap project run/approve/reject/state cortex`.
|
|
2
|
+
|
|
3
|
+
Calls the DAP engine REST API. Does NOT import from `cortex.*` directly —
|
|
4
|
+
the cortex package lives in packages/cortex/ (Fase 1) and is loaded
|
|
5
|
+
only via the bundle JSON that ships inside it.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import importlib.resources
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
import re
|
|
14
|
+
import subprocess
|
|
15
|
+
import time
|
|
16
|
+
from datetime import date, datetime
|
|
17
|
+
from decimal import Decimal
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Any
|
|
20
|
+
|
|
21
|
+
import httpx
|
|
22
|
+
from rich.console import Console
|
|
23
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
24
|
+
|
|
25
|
+
console = Console()
|
|
26
|
+
|
|
27
|
+
DEFAULT_ENGINE_URL = "http://localhost:7333"
|
|
28
|
+
POLL_INTERVAL_SECONDS = 10
|
|
29
|
+
CORTEX_BUNDLE_NAME = "cortex-full.pipeline-bundle.json"
|
|
30
|
+
CORTEX_PROJECT_NAME = "cortex"
|
|
31
|
+
CORTEX_PIPELINE_KIND = "full"
|
|
32
|
+
|
|
33
|
+
# Status colours for display
|
|
34
|
+
_STATUS_STYLE: dict[str, str] = {
|
|
35
|
+
"running": "cyan",
|
|
36
|
+
"paused": "yellow",
|
|
37
|
+
"success": "green",
|
|
38
|
+
"failed": "red",
|
|
39
|
+
"aborted": "red",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# ---------------------------------------------------------------------------
|
|
44
|
+
# URL parsing
|
|
45
|
+
# ---------------------------------------------------------------------------
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def parse_issue_url(url: str) -> tuple[str, int]:
|
|
49
|
+
"""Parse a GitHub issue URL and return (owner/repo, issue_number).
|
|
50
|
+
|
|
51
|
+
Accepts:
|
|
52
|
+
https://github.com/Dixter999/cortex-project/issues/332
|
|
53
|
+
https://github.com/owner/repo/issues/42
|
|
54
|
+
"""
|
|
55
|
+
pattern = r"https?://github\.com/([^/]+/[^/]+)/issues/(\d+)"
|
|
56
|
+
match = re.fullmatch(pattern, url.rstrip("/"))
|
|
57
|
+
if not match:
|
|
58
|
+
raise ValueError(
|
|
59
|
+
f"Cannot parse GitHub issue URL: {url!r}\n"
|
|
60
|
+
"Expected format: https://github.com/<owner>/<repo>/issues/<number>"
|
|
61
|
+
)
|
|
62
|
+
return match.group(1), int(match.group(2))
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# ---------------------------------------------------------------------------
|
|
66
|
+
# Bundle loading
|
|
67
|
+
# ---------------------------------------------------------------------------
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def load_cortex_bundle() -> dict[str, Any]:
|
|
71
|
+
"""Load cortex-full.pipeline-bundle.json from the installed cortex package.
|
|
72
|
+
|
|
73
|
+
Requires `cortex` package (packages/cortex/) to be installed in the
|
|
74
|
+
current Python environment. Raises ImportError with a clear message if not.
|
|
75
|
+
"""
|
|
76
|
+
try:
|
|
77
|
+
bundle_ref = importlib.resources.files("cortex.dap_bundles") / CORTEX_BUNDLE_NAME
|
|
78
|
+
bundle_text = bundle_ref.read_text(encoding="utf-8")
|
|
79
|
+
except (ModuleNotFoundError, FileNotFoundError) as exc:
|
|
80
|
+
raise ImportError(
|
|
81
|
+
"Could not load the Cortex pipeline bundle.\n\n"
|
|
82
|
+
"The `cortex` package must be installed in this environment.\n"
|
|
83
|
+
"Run: uv add --workspace cortex\n\n"
|
|
84
|
+
"(This depends on packages/cortex/ being present in the DAP monorepo — "
|
|
85
|
+
"see Etapa 2 / dap#170 for the migration status.)"
|
|
86
|
+
) from exc
|
|
87
|
+
return dict(json.loads(bundle_text))
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# ---------------------------------------------------------------------------
|
|
91
|
+
# Engine health check
|
|
92
|
+
# ---------------------------------------------------------------------------
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _client(engine_url: str) -> httpx.Client:
|
|
96
|
+
return httpx.Client(base_url=engine_url.rstrip("/"), timeout=10.0)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def check_engine(engine_url: str) -> None:
|
|
100
|
+
"""Raise SystemExit with a helpful message if the engine is unreachable."""
|
|
101
|
+
try:
|
|
102
|
+
with _client(engine_url) as client:
|
|
103
|
+
resp = client.get("/health")
|
|
104
|
+
resp.raise_for_status()
|
|
105
|
+
except httpx.HTTPError as exc:
|
|
106
|
+
console.print(f"[red]✗ DAP engine not reachable at {engine_url}[/red]")
|
|
107
|
+
console.print("[dim] Start with: uv run dap-engine[/dim]")
|
|
108
|
+
console.print(f"[dim] Error: {exc}[/dim]")
|
|
109
|
+
raise SystemExit(1) from exc
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
# ---------------------------------------------------------------------------
|
|
113
|
+
# Pipeline import — idempotent
|
|
114
|
+
# ---------------------------------------------------------------------------
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _find_pipeline_by_name(client: httpx.Client, name: str, version: str) -> str | None:
|
|
118
|
+
"""Return existing pipeline id if name+description matches, else None."""
|
|
119
|
+
resp = client.get("/pipelines", params={"limit": 200})
|
|
120
|
+
resp.raise_for_status()
|
|
121
|
+
for p in resp.json().get("items", []):
|
|
122
|
+
if p.get("name") == name:
|
|
123
|
+
return str(p["id"])
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def ensure_pipeline_imported(engine_url: str, bundle: dict[str, Any]) -> str:
|
|
128
|
+
"""Import the Cortex bundle into the engine if not already present.
|
|
129
|
+
|
|
130
|
+
Returns the pipeline_id (existing or freshly created).
|
|
131
|
+
"""
|
|
132
|
+
pipeline_payload = bundle.get("pipeline", {})
|
|
133
|
+
pipeline_name: str = pipeline_payload.get("name", "cortex-full")
|
|
134
|
+
|
|
135
|
+
# Strip keys that are not part of PipelineImportRequest
|
|
136
|
+
import_body = {
|
|
137
|
+
"schema_version": bundle.get("schema_version", "pipeline-export/1"),
|
|
138
|
+
"pipeline": pipeline_payload,
|
|
139
|
+
}
|
|
140
|
+
if "bundled_agents" in bundle:
|
|
141
|
+
import_body["bundled_agents"] = bundle["bundled_agents"]
|
|
142
|
+
|
|
143
|
+
with _client(engine_url) as client:
|
|
144
|
+
existing_id = _find_pipeline_by_name(client, pipeline_name, "")
|
|
145
|
+
if existing_id:
|
|
146
|
+
console.print(
|
|
147
|
+
f"[dim] Pipeline '{pipeline_name}' already imported — id {existing_id}[/dim]"
|
|
148
|
+
)
|
|
149
|
+
return existing_id
|
|
150
|
+
|
|
151
|
+
console.print(f"[cyan]→ Importing pipeline '{pipeline_name}'...[/cyan]")
|
|
152
|
+
resp = client.post("/pipelines/import", json=import_body)
|
|
153
|
+
if resp.status_code not in (200, 201):
|
|
154
|
+
console.print(f"[red]✗ Failed to import pipeline: {resp.status_code}[/red]")
|
|
155
|
+
console.print(f"[dim]{resp.text}[/dim]")
|
|
156
|
+
raise SystemExit(1)
|
|
157
|
+
pipeline_id = str(resp.json()["id"])
|
|
158
|
+
console.print(f"[green]✓ Pipeline imported — id {pipeline_id}[/green]")
|
|
159
|
+
return pipeline_id
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
# ---------------------------------------------------------------------------
|
|
163
|
+
# Project — create or reuse
|
|
164
|
+
# ---------------------------------------------------------------------------
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _collect_env_vars() -> dict[str, str]:
|
|
168
|
+
"""Collect Cortex-relevant env vars from the current process environment."""
|
|
169
|
+
keys = [
|
|
170
|
+
"GH_TOKEN_READ",
|
|
171
|
+
"GH_TOKEN_ISSUES",
|
|
172
|
+
"GH_TOKEN_CODE",
|
|
173
|
+
"GH_TOKEN_MERGE",
|
|
174
|
+
"CORTEX_DATABASE_URL",
|
|
175
|
+
"DAP_DATABASE_URL",
|
|
176
|
+
]
|
|
177
|
+
return {k: v for k in keys if (v := os.environ.get(k))}
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def ensure_project(engine_url: str, pipeline_id: str, workspace_path: str) -> str:
|
|
181
|
+
"""Create the 'cortex' project if it doesn't exist. Returns project_id."""
|
|
182
|
+
with _client(engine_url) as client:
|
|
183
|
+
# Look for existing project named 'cortex'
|
|
184
|
+
resp = client.get("/projects", params={"limit": 200})
|
|
185
|
+
resp.raise_for_status()
|
|
186
|
+
for p in resp.json().get("items", []):
|
|
187
|
+
if p.get("name") == CORTEX_PROJECT_NAME and p.get("archived_at") is None:
|
|
188
|
+
proj_id = str(p["id"])
|
|
189
|
+
console.print(
|
|
190
|
+
f"[dim] Project '{CORTEX_PROJECT_NAME}' already exists — id {proj_id}[/dim]"
|
|
191
|
+
)
|
|
192
|
+
# Keep working_directory and env_vars current on every run so
|
|
193
|
+
# bash nodes (git-branch) always get the right cwd. Without
|
|
194
|
+
# this, a project created before working_directory was set, or
|
|
195
|
+
# one pointing to the wrong path, would silently pass the wrong
|
|
196
|
+
# cwd to every bash node for the lifetime of the project (#224).
|
|
197
|
+
#
|
|
198
|
+
# Build from existing project data to preserve fields we don't
|
|
199
|
+
# control (repo_url, default_branch) and merge pipeline bindings
|
|
200
|
+
# rather than replacing them wholesale (Copilot review).
|
|
201
|
+
update_payload = {
|
|
202
|
+
"name": p.get("name", CORTEX_PROJECT_NAME),
|
|
203
|
+
"description": p.get("description", "Cortex multi-agent pipeline"),
|
|
204
|
+
"working_directory": workspace_path,
|
|
205
|
+
"repo_url": p.get("repo_url"),
|
|
206
|
+
"default_branch": p.get("default_branch", "main"),
|
|
207
|
+
"pipelines": {
|
|
208
|
+
**p.get("pipelines", {}),
|
|
209
|
+
CORTEX_PIPELINE_KIND: pipeline_id,
|
|
210
|
+
},
|
|
211
|
+
"env_vars": _collect_env_vars(),
|
|
212
|
+
}
|
|
213
|
+
resp_put = client.put(f"/projects/{proj_id}", json=update_payload)
|
|
214
|
+
resp_put.raise_for_status()
|
|
215
|
+
return proj_id
|
|
216
|
+
|
|
217
|
+
console.print(f"[cyan]→ Creating project '{CORTEX_PROJECT_NAME}'...[/cyan]")
|
|
218
|
+
payload = {
|
|
219
|
+
"name": CORTEX_PROJECT_NAME,
|
|
220
|
+
"description": "Cortex multi-agent pipeline",
|
|
221
|
+
"working_directory": workspace_path,
|
|
222
|
+
"pipelines": {CORTEX_PIPELINE_KIND: pipeline_id},
|
|
223
|
+
"env_vars": _collect_env_vars(),
|
|
224
|
+
}
|
|
225
|
+
resp = client.post("/projects", json=payload)
|
|
226
|
+
if resp.status_code not in (200, 201):
|
|
227
|
+
console.print(f"[red]✗ Failed to create project: {resp.status_code}[/red]")
|
|
228
|
+
console.print(f"[dim]{resp.text}[/dim]")
|
|
229
|
+
raise SystemExit(1)
|
|
230
|
+
proj_id = str(resp.json()["id"])
|
|
231
|
+
console.print(f"[green]✓ Project created — id {proj_id}[/green]")
|
|
232
|
+
return proj_id
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
# ---------------------------------------------------------------------------
|
|
236
|
+
# Run creation
|
|
237
|
+
# ---------------------------------------------------------------------------
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def create_run(
|
|
241
|
+
engine_url: str,
|
|
242
|
+
project_id: str,
|
|
243
|
+
pipeline_id: str,
|
|
244
|
+
issue_url: str,
|
|
245
|
+
repo: str,
|
|
246
|
+
issue_number: int,
|
|
247
|
+
workspace_path: str,
|
|
248
|
+
) -> str:
|
|
249
|
+
"""POST /projects/{project_id}/run/full — returns run_id."""
|
|
250
|
+
initial_state: dict[str, Any] = {
|
|
251
|
+
"extensions": {
|
|
252
|
+
"issue_url": issue_url,
|
|
253
|
+
"issue_number": issue_number,
|
|
254
|
+
"repo": repo,
|
|
255
|
+
"workspace_path": workspace_path,
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
with _client(engine_url) as client:
|
|
259
|
+
resp = client.post(
|
|
260
|
+
f"/projects/{project_id}/run/{CORTEX_PIPELINE_KIND}",
|
|
261
|
+
json={"initial_state": initial_state},
|
|
262
|
+
)
|
|
263
|
+
if resp.status_code not in (200, 201):
|
|
264
|
+
console.print(f"[red]✗ Failed to create run: {resp.status_code}[/red]")
|
|
265
|
+
console.print(f"[dim]{resp.text}[/dim]")
|
|
266
|
+
raise SystemExit(1)
|
|
267
|
+
run_id = str(resp.json()["id"])
|
|
268
|
+
return run_id
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
# ---------------------------------------------------------------------------
|
|
272
|
+
# Polling + gate interaction
|
|
273
|
+
# ---------------------------------------------------------------------------
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def _get_run(engine_url: str, run_id: str) -> dict[str, Any]:
|
|
277
|
+
with _client(engine_url) as client:
|
|
278
|
+
resp = client.get(f"/runs/{run_id}")
|
|
279
|
+
resp.raise_for_status()
|
|
280
|
+
return dict(resp.json())
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def _get_run_state(engine_url: str, run_id: str) -> dict[str, Any]:
|
|
284
|
+
with _client(engine_url) as client:
|
|
285
|
+
resp = client.get(f"/runs/{run_id}/state")
|
|
286
|
+
resp.raise_for_status()
|
|
287
|
+
return dict(resp.json())
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def _approve_gate(engine_url: str, run_id: str, node_id: str) -> None:
|
|
291
|
+
with _client(engine_url) as client:
|
|
292
|
+
resp = client.post(f"/runs/{run_id}/nodes/{node_id}/approve")
|
|
293
|
+
resp.raise_for_status()
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def _reject_gate(engine_url: str, run_id: str, node_id: str, reason: str) -> None:
|
|
297
|
+
"""Abort the run (no direct reject endpoint; operator feedback goes in issue)."""
|
|
298
|
+
with _client(engine_url) as client:
|
|
299
|
+
# DAP has /abort but not /reject-with-feedback at gate level.
|
|
300
|
+
# Abort the run and surface the reason to the operator.
|
|
301
|
+
resp = client.post(f"/runs/{run_id}/abort")
|
|
302
|
+
if resp.status_code not in (200, 409):
|
|
303
|
+
resp.raise_for_status()
|
|
304
|
+
console.print(f"[yellow] Feedback: {reason}[/yellow]")
|
|
305
|
+
console.print(
|
|
306
|
+
"[dim] Run aborted. Comment on the GitHub issue with the rejection reason.[/dim]"
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def _find_pending_gate(engine_url: str, run_id: str) -> str | None:
|
|
311
|
+
"""Return the node_id of the pending gate from run state, or None."""
|
|
312
|
+
try:
|
|
313
|
+
state = _get_run_state(engine_url, run_id)
|
|
314
|
+
except httpx.HTTPError:
|
|
315
|
+
return None
|
|
316
|
+
# The gate node id is in state.next or state.extensions.pending_gate
|
|
317
|
+
extensions = state.get("extensions") or {}
|
|
318
|
+
pending = extensions.get("pending_gate") or extensions.get("pending_approval")
|
|
319
|
+
if pending:
|
|
320
|
+
return str(pending)
|
|
321
|
+
# Fall back to known gate node names from the Cortex full bundle
|
|
322
|
+
return None
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def _prompt_gate_approval(gate_node: str, no_interactive: bool) -> tuple[bool, str]:
|
|
326
|
+
"""Return (approved, reason). In non-interactive mode always approves."""
|
|
327
|
+
if no_interactive:
|
|
328
|
+
return True, ""
|
|
329
|
+
console.print(f"\n[yellow]⏸ Paused at gate:[/yellow] [bold]{gate_node}[/bold]")
|
|
330
|
+
answer = console.input(" Approve? [y/N]: ").strip().lower()
|
|
331
|
+
if answer == "y":
|
|
332
|
+
return True, ""
|
|
333
|
+
reason = console.input(" Rejection reason (optional): ").strip()
|
|
334
|
+
return False, reason
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
def _known_gate_for_node(node_id: str) -> str:
|
|
338
|
+
"""Map a status hint to the canonical gate node id."""
|
|
339
|
+
gate_map = {
|
|
340
|
+
"gate-phase1": "gate-phase1",
|
|
341
|
+
"gate-phase2": "gate-phase2",
|
|
342
|
+
"gate-phase3": "gate-phase3",
|
|
343
|
+
}
|
|
344
|
+
return gate_map.get(node_id, node_id)
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def _poll_until_settled(
|
|
348
|
+
engine_url: str,
|
|
349
|
+
run_id: str,
|
|
350
|
+
label: str,
|
|
351
|
+
) -> tuple[str, dict[str, Any]]:
|
|
352
|
+
"""Poll GET /runs/{run_id} until status is terminal or paused.
|
|
353
|
+
|
|
354
|
+
Returns (final_status, last_run_dict).
|
|
355
|
+
"""
|
|
356
|
+
with Progress(
|
|
357
|
+
SpinnerColumn(),
|
|
358
|
+
TextColumn("[progress.description]{task.description}"),
|
|
359
|
+
transient=True,
|
|
360
|
+
console=console,
|
|
361
|
+
) as progress:
|
|
362
|
+
task = progress.add_task(label, total=None)
|
|
363
|
+
last_status = "running"
|
|
364
|
+
last_run: dict[str, Any] = {}
|
|
365
|
+
while True:
|
|
366
|
+
time.sleep(POLL_INTERVAL_SECONDS)
|
|
367
|
+
try:
|
|
368
|
+
last_run = _get_run(engine_url, run_id)
|
|
369
|
+
except httpx.HTTPError as exc:
|
|
370
|
+
console.print(f"[red]✗ Could not fetch run status: {exc}[/red]")
|
|
371
|
+
raise SystemExit(1) from exc
|
|
372
|
+
last_status = last_run.get("final_status", "running")
|
|
373
|
+
style = _STATUS_STYLE.get(last_status, "white")
|
|
374
|
+
progress.update(
|
|
375
|
+
task,
|
|
376
|
+
description=f"[{style}]{last_status.upper()}[/{style}] — run {run_id[:8]}",
|
|
377
|
+
)
|
|
378
|
+
if last_status in ("success", "failed", "aborted", "paused"):
|
|
379
|
+
break
|
|
380
|
+
return last_status, last_run
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
def _handle_gate(
|
|
384
|
+
engine_url: str,
|
|
385
|
+
run_id: str,
|
|
386
|
+
run: dict[str, Any],
|
|
387
|
+
watch_only: bool,
|
|
388
|
+
no_interactive: bool,
|
|
389
|
+
) -> bool:
|
|
390
|
+
"""Handle a paused gate. Returns True to continue polling, False to stop."""
|
|
391
|
+
gate_node = _find_pending_gate(engine_url, run_id)
|
|
392
|
+
if not gate_node:
|
|
393
|
+
gate_node = run.get("current_node") or "gate-phase1"
|
|
394
|
+
gate_node = _known_gate_for_node(gate_node)
|
|
395
|
+
|
|
396
|
+
if watch_only:
|
|
397
|
+
console.print(
|
|
398
|
+
f"\n[yellow]⏸ Paused at:[/yellow] [bold]{gate_node}[/bold] "
|
|
399
|
+
f"(--watch mode — not approving)"
|
|
400
|
+
)
|
|
401
|
+
console.print(f" Approve with: dap project run cortex --run-id {run_id} approve")
|
|
402
|
+
return False
|
|
403
|
+
|
|
404
|
+
approved, reason = _prompt_gate_approval(gate_node, no_interactive)
|
|
405
|
+
if not approved:
|
|
406
|
+
_reject_gate(engine_url, run_id, gate_node, reason)
|
|
407
|
+
return False
|
|
408
|
+
|
|
409
|
+
console.print(f"[green]✓ Approving gate {gate_node}...[/green]")
|
|
410
|
+
try:
|
|
411
|
+
_approve_gate(engine_url, run_id, gate_node)
|
|
412
|
+
except httpx.HTTPError as exc:
|
|
413
|
+
console.print(f"[red]✗ Approve failed: {exc}[/red]")
|
|
414
|
+
raise SystemExit(1) from exc
|
|
415
|
+
return True
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def poll_and_handle(
|
|
419
|
+
engine_url: str,
|
|
420
|
+
run_id: str,
|
|
421
|
+
no_interactive: bool,
|
|
422
|
+
watch_only: bool,
|
|
423
|
+
) -> None:
|
|
424
|
+
"""Poll run status and handle gates until completion or failure."""
|
|
425
|
+
start_time = time.monotonic()
|
|
426
|
+
last_status = "running"
|
|
427
|
+
|
|
428
|
+
while True:
|
|
429
|
+
last_status, last_run = _poll_until_settled(engine_url, run_id, "Running pipeline...")
|
|
430
|
+
if last_status != "paused":
|
|
431
|
+
break
|
|
432
|
+
should_continue = _handle_gate(engine_url, run_id, last_run, watch_only, no_interactive)
|
|
433
|
+
if not should_continue:
|
|
434
|
+
return
|
|
435
|
+
time.sleep(3)
|
|
436
|
+
|
|
437
|
+
# Final report
|
|
438
|
+
elapsed = int(time.monotonic() - start_time)
|
|
439
|
+
style = _STATUS_STYLE.get(last_status, "white")
|
|
440
|
+
icon = "✅" if last_status == "success" else "❌"
|
|
441
|
+
console.print(
|
|
442
|
+
f"\n{icon} [{style}]Pipeline {last_status or 'done'}[/{style}] "
|
|
443
|
+
f"run {run_id[:8]} duration {elapsed}s"
|
|
444
|
+
)
|
|
445
|
+
if last_status != "success":
|
|
446
|
+
raise SystemExit(1)
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+
# ---------------------------------------------------------------------------
|
|
450
|
+
# Default workspace path
|
|
451
|
+
# ---------------------------------------------------------------------------
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
def default_workspace_path(repo: str) -> str:
|
|
455
|
+
"""Return the conventional Cortex workspace path for a repo.
|
|
456
|
+
|
|
457
|
+
Uses the same ``owner-name`` slug that ``cortex/init/profile.py:_repo_slug``
|
|
458
|
+
produces — dash separator, not underscore (#224).
|
|
459
|
+
"""
|
|
460
|
+
repo_slug = repo.replace("/", "-")
|
|
461
|
+
return str(os.path.expanduser(f"~/.cortex/projects/{repo_slug}/repo"))
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
# ---------------------------------------------------------------------------
|
|
465
|
+
# Workspace sync
|
|
466
|
+
# ---------------------------------------------------------------------------
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
def _sync_workspace(ws_path: str) -> None:
|
|
470
|
+
"""Sync the workspace clone to its default branch before starting a run.
|
|
471
|
+
|
|
472
|
+
Mirrors cortex-project cli.py:_sync_workspace. Non-fatal: logs a warning
|
|
473
|
+
and continues if the workspace doesn't exist or git fails — execution.py
|
|
474
|
+
_sync_base_branch is a per-agent fallback inside the pipeline (#245).
|
|
475
|
+
|
|
476
|
+
"""
|
|
477
|
+
# Expand ~ so --workspace ~/... works correctly.
|
|
478
|
+
workspace = Path(ws_path).expanduser()
|
|
479
|
+
cwd = str(workspace)
|
|
480
|
+
if not workspace.exists():
|
|
481
|
+
console.print(f"[yellow]⚠ Workspace not found at {workspace} — skipping sync[/yellow]")
|
|
482
|
+
return
|
|
483
|
+
try:
|
|
484
|
+
subprocess.run(
|
|
485
|
+
["git", "fetch", "origin"],
|
|
486
|
+
cwd=cwd,
|
|
487
|
+
check=True,
|
|
488
|
+
capture_output=True,
|
|
489
|
+
timeout=60,
|
|
490
|
+
)
|
|
491
|
+
result = subprocess.run(
|
|
492
|
+
["git", "symbolic-ref", "refs/remotes/origin/HEAD"],
|
|
493
|
+
cwd=cwd,
|
|
494
|
+
capture_output=True,
|
|
495
|
+
text=True,
|
|
496
|
+
timeout=5,
|
|
497
|
+
check=False,
|
|
498
|
+
)
|
|
499
|
+
branch = result.stdout.strip().split("/")[-1] if result.returncode == 0 else "main"
|
|
500
|
+
subprocess.run(
|
|
501
|
+
["git", "checkout", branch],
|
|
502
|
+
cwd=cwd,
|
|
503
|
+
check=True,
|
|
504
|
+
capture_output=True,
|
|
505
|
+
timeout=10,
|
|
506
|
+
)
|
|
507
|
+
subprocess.run(
|
|
508
|
+
["git", "reset", "--hard", f"origin/{branch}"],
|
|
509
|
+
cwd=cwd,
|
|
510
|
+
check=True,
|
|
511
|
+
capture_output=True,
|
|
512
|
+
timeout=30,
|
|
513
|
+
)
|
|
514
|
+
console.print(f"[dim] Workspace synced → origin/{branch}[/dim]")
|
|
515
|
+
except (subprocess.CalledProcessError, subprocess.TimeoutExpired, OSError) as e:
|
|
516
|
+
stderr = b""
|
|
517
|
+
if isinstance(e, subprocess.CalledProcessError):
|
|
518
|
+
stderr = e.stderr or b""
|
|
519
|
+
stderr_str = stderr.decode("utf-8", errors="replace")[:200]
|
|
520
|
+
detail = f" — {stderr_str}" if stderr_str else ""
|
|
521
|
+
console.print(f"[yellow]⚠ Workspace sync failed: {e}{detail}[/yellow]")
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
# ---------------------------------------------------------------------------
|
|
525
|
+
# High-level command functions
|
|
526
|
+
# ---------------------------------------------------------------------------
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
def cortex_run(
|
|
530
|
+
issue_url: str,
|
|
531
|
+
engine_url: str,
|
|
532
|
+
no_interactive: bool,
|
|
533
|
+
watch: bool,
|
|
534
|
+
workspace: str | None,
|
|
535
|
+
) -> None:
|
|
536
|
+
"""Implement `dap project run cortex <issue-url>`."""
|
|
537
|
+
# Parse URL
|
|
538
|
+
try:
|
|
539
|
+
repo, issue_number = parse_issue_url(issue_url)
|
|
540
|
+
except ValueError as exc:
|
|
541
|
+
console.print(f"[red]✗ {exc}[/red]")
|
|
542
|
+
raise SystemExit(1) from exc
|
|
543
|
+
|
|
544
|
+
console.print(f"[bold]Cortex pipeline[/bold] — {repo}#{issue_number}")
|
|
545
|
+
console.print(f"[dim] Engine: {engine_url}[/dim]")
|
|
546
|
+
|
|
547
|
+
# Check engine
|
|
548
|
+
check_engine(engine_url)
|
|
549
|
+
|
|
550
|
+
# Load bundle
|
|
551
|
+
try:
|
|
552
|
+
bundle = load_cortex_bundle()
|
|
553
|
+
except ImportError as exc:
|
|
554
|
+
console.print(f"[red]✗ {exc}[/red]")
|
|
555
|
+
raise SystemExit(1) from exc
|
|
556
|
+
|
|
557
|
+
# Import pipeline
|
|
558
|
+
pipeline_id = ensure_pipeline_imported(engine_url, bundle)
|
|
559
|
+
|
|
560
|
+
# Workspace
|
|
561
|
+
ws_path = workspace or default_workspace_path(repo)
|
|
562
|
+
|
|
563
|
+
# Sync workspace to default branch HEAD before the run so Phase 1 agents
|
|
564
|
+
# read current code and the coder branches from the right baseline (#241).
|
|
565
|
+
_sync_workspace(ws_path)
|
|
566
|
+
|
|
567
|
+
# Create / reuse project
|
|
568
|
+
project_id = ensure_project(engine_url, pipeline_id, ws_path)
|
|
569
|
+
|
|
570
|
+
# Create run
|
|
571
|
+
console.print(f"[cyan]→ Starting run for {repo}#{issue_number}...[/cyan]")
|
|
572
|
+
run_id = create_run(
|
|
573
|
+
engine_url=engine_url,
|
|
574
|
+
project_id=project_id,
|
|
575
|
+
pipeline_id=pipeline_id,
|
|
576
|
+
issue_url=issue_url,
|
|
577
|
+
repo=repo,
|
|
578
|
+
issue_number=issue_number,
|
|
579
|
+
workspace_path=ws_path,
|
|
580
|
+
)
|
|
581
|
+
console.print(f"[green]✓ Run created — id {run_id}[/green]")
|
|
582
|
+
console.print(f"[dim] Poll: GET {engine_url}/runs/{run_id}[/dim]")
|
|
583
|
+
|
|
584
|
+
# Monitor
|
|
585
|
+
poll_and_handle(
|
|
586
|
+
engine_url=engine_url,
|
|
587
|
+
run_id=run_id,
|
|
588
|
+
no_interactive=no_interactive,
|
|
589
|
+
watch_only=watch,
|
|
590
|
+
)
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def cortex_approve(run_id: str, engine_url: str) -> None:
|
|
594
|
+
"""Implement `dap project approve cortex <run-id>`."""
|
|
595
|
+
run = _get_run(engine_url, run_id)
|
|
596
|
+
status = run.get("final_status")
|
|
597
|
+
if status != "paused":
|
|
598
|
+
console.print(f"[yellow]⚠ Run {run_id[:8]} is not paused (status={status})[/yellow]")
|
|
599
|
+
raise SystemExit(1)
|
|
600
|
+
|
|
601
|
+
gate_node = _find_pending_gate(engine_url, run_id) or "gate-phase1"
|
|
602
|
+
gate_node = _known_gate_for_node(gate_node)
|
|
603
|
+
console.print(f"[cyan]→ Approving gate {gate_node} for run {run_id[:8]}...[/cyan]")
|
|
604
|
+
try:
|
|
605
|
+
_approve_gate(engine_url, run_id, gate_node)
|
|
606
|
+
except httpx.HTTPError as exc:
|
|
607
|
+
console.print(f"[red]✗ Approve failed: {exc}[/red]")
|
|
608
|
+
raise SystemExit(1) from exc
|
|
609
|
+
console.print(f"[green]✓ Approved — run {run_id[:8]} resuming[/green]")
|
|
610
|
+
|
|
611
|
+
|
|
612
|
+
def cortex_reject(run_id: str, reason: str, engine_url: str) -> None:
|
|
613
|
+
"""Implement `dap project reject cortex <run-id> [reason]`."""
|
|
614
|
+
run = _get_run(engine_url, run_id)
|
|
615
|
+
status = run.get("final_status")
|
|
616
|
+
if status != "paused":
|
|
617
|
+
console.print(f"[yellow]⚠ Run {run_id[:8]} is not paused (status={status})[/yellow]")
|
|
618
|
+
raise SystemExit(1)
|
|
619
|
+
|
|
620
|
+
gate_node = _find_pending_gate(engine_url, run_id) or "gate-phase1"
|
|
621
|
+
gate_node = _known_gate_for_node(gate_node)
|
|
622
|
+
console.print(f"[cyan]→ Rejecting gate {gate_node} for run {run_id[:8]}...[/cyan]")
|
|
623
|
+
_reject_gate(engine_url, run_id, gate_node, reason)
|
|
624
|
+
console.print(f"[green]✓ Run {run_id[:8]} aborted[/green]")
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
def _state_to_dict(run: dict[str, Any], state: dict[str, Any]) -> dict[str, Any]:
|
|
628
|
+
"""Build a JSON-serialisable dict from run + run-state data."""
|
|
629
|
+
extensions = state.get("extensions") or {}
|
|
630
|
+
decisions_raw = extensions.get("decisions") or []
|
|
631
|
+
return {
|
|
632
|
+
"run_id": run.get("id", ""),
|
|
633
|
+
"status": run.get("final_status", "unknown"),
|
|
634
|
+
"issue_title": extensions.get("issue_title", ""),
|
|
635
|
+
"current_phase": extensions.get("current_phase", ""),
|
|
636
|
+
"pipeline_id": run.get("pipeline_id", ""),
|
|
637
|
+
"pipeline_version": run.get("pipeline_version", ""),
|
|
638
|
+
"project_id": run.get("project_id", ""),
|
|
639
|
+
"started_at": run.get("started_at"),
|
|
640
|
+
"ended_at": run.get("ended_at"),
|
|
641
|
+
"next_nodes": extensions.get("next_nodes") or [],
|
|
642
|
+
"task_assignments": extensions.get("task_assignments") or {},
|
|
643
|
+
"decisions": decisions_raw[-5:],
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
def _json_default(obj: object) -> str:
|
|
648
|
+
"""Fallback serialiser for json.dumps — handles datetime & Decimal."""
|
|
649
|
+
if isinstance(obj, (datetime, date)):
|
|
650
|
+
return obj.isoformat()
|
|
651
|
+
if isinstance(obj, Decimal):
|
|
652
|
+
return str(obj)
|
|
653
|
+
raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable")
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+
def cortex_state(run_id: str, engine_url: str, fmt: str = "table") -> None:
|
|
657
|
+
"""Implement `dap project state cortex <run-id>`."""
|
|
658
|
+
try:
|
|
659
|
+
run = _get_run(engine_url, run_id)
|
|
660
|
+
except httpx.HTTPStatusError as exc:
|
|
661
|
+
# Only treat 404 as "not found" — other status codes are real errors
|
|
662
|
+
# that should surface rather than be swallowed as "not_found".
|
|
663
|
+
if exc.response.status_code == 404 and fmt == "json": # noqa: PLR2004
|
|
664
|
+
print(json.dumps({"error": "not_found", "run_id": run_id}))
|
|
665
|
+
return
|
|
666
|
+
raise
|
|
667
|
+
except (httpx.HTTPError, SystemExit):
|
|
668
|
+
raise
|
|
669
|
+
|
|
670
|
+
if fmt == "json":
|
|
671
|
+
try:
|
|
672
|
+
state = _get_run_state(engine_url, run_id)
|
|
673
|
+
except httpx.HTTPError:
|
|
674
|
+
state = {}
|
|
675
|
+
result = _state_to_dict(run, state)
|
|
676
|
+
print(json.dumps(result, indent=2, default=_json_default))
|
|
677
|
+
return
|
|
678
|
+
|
|
679
|
+
status = run.get("final_status", "unknown")
|
|
680
|
+
style = _STATUS_STYLE.get(status, "white")
|
|
681
|
+
console.print(f"Run [bold]{run_id}[/bold]")
|
|
682
|
+
console.print(f" Status: [{style}]{status}[/{style}]")
|
|
683
|
+
console.print(f" Pipeline: {run.get('pipeline_id', '?')} v{run.get('pipeline_version', '?')}")
|
|
684
|
+
console.print(f" Project: {run.get('project_id', 'ad-hoc')}")
|
|
685
|
+
started = run.get("started_at", "?")
|
|
686
|
+
ended = run.get("ended_at", "—")
|
|
687
|
+
console.print(f" Started: {started}")
|
|
688
|
+
console.print(f" Ended: {ended}")
|