hyperloop 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hyperloop/__init__.py ADDED
File without changes
hyperloop/__main__.py ADDED
@@ -0,0 +1,5 @@
1
+ """Entry point for ``python -m hyperloop``."""
2
+
3
+ from hyperloop.cli import app
4
+
5
+ app()
File without changes
@@ -0,0 +1,254 @@
1
+ """GitStateStore — reads/writes task files in a git repo.
2
+
3
+ Implements the StateStore protocol by parsing YAML frontmatter task files
4
+ and using git for persistence.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import re
10
+ import subprocess
11
+ from typing import TYPE_CHECKING, cast
12
+
13
+ import yaml
14
+
15
+ from hyperloop.domain.model import Phase, Task, TaskStatus, World
16
+
17
+ if TYPE_CHECKING:
18
+ from pathlib import Path
19
+
20
+ # Map between kebab-case YAML values and TaskStatus enum members
21
+ _STATUS_FROM_YAML: dict[str, TaskStatus] = {
22
+ "not-started": TaskStatus.NOT_STARTED,
23
+ "in-progress": TaskStatus.IN_PROGRESS,
24
+ "needs-rebase": TaskStatus.NEEDS_REBASE,
25
+ "complete": TaskStatus.COMPLETE,
26
+ "failed": TaskStatus.FAILED,
27
+ }
28
+
29
+ _STATUS_TO_YAML: dict[TaskStatus, str] = {v: k for k, v in _STATUS_FROM_YAML.items()}
30
+
31
+
32
+ def _parse_task_file(content: str) -> tuple[dict[str, object], str]:
33
+ """Parse a task file into (frontmatter_dict, body_text).
34
+
35
+ The file format is YAML frontmatter delimited by --- lines, followed by
36
+ markdown body.
37
+ """
38
+ match = re.match(r"^---\n(.*?)\n---\n(.*)", content, re.DOTALL)
39
+ if not match:
40
+ msg = "Task file missing YAML frontmatter"
41
+ raise ValueError(msg)
42
+ fm_raw = match.group(1)
43
+ body = match.group(2)
44
+ frontmatter: dict[str, object] = yaml.safe_load(fm_raw)
45
+ return frontmatter, body
46
+
47
+
48
+ def _frontmatter_to_task(fm: dict[str, object]) -> Task:
49
+ """Convert parsed YAML frontmatter into a Task domain object."""
50
+ raw_status = str(fm["status"])
51
+ status = _STATUS_FROM_YAML[raw_status]
52
+
53
+ raw_phase = fm.get("phase")
54
+ phase = Phase(str(raw_phase)) if raw_phase is not None else None
55
+
56
+ raw_deps = fm.get("deps")
57
+ dep_list = cast("list[object]", raw_deps) if isinstance(raw_deps, list) else []
58
+ deps = tuple(str(d) for d in dep_list)
59
+
60
+ raw_branch = fm.get("branch")
61
+ branch = str(raw_branch) if raw_branch is not None else None
62
+
63
+ raw_pr = fm.get("pr")
64
+ pr = str(raw_pr) if raw_pr is not None else None
65
+
66
+ return Task(
67
+ id=str(fm["id"]),
68
+ title=str(fm["title"]),
69
+ spec_ref=str(fm["spec_ref"]),
70
+ status=status,
71
+ phase=phase,
72
+ deps=deps,
73
+ round=int(fm.get("round", 0)), # type: ignore[arg-type]
74
+ branch=branch,
75
+ pr=pr,
76
+ )
77
+
78
+
79
+ def _serialize_task_file(fm: dict[str, object], body: str) -> str:
80
+ """Serialize frontmatter dict and body back into a task file string."""
81
+ # Preserve a specific field order for readability
82
+ ordered_keys = ["id", "title", "spec_ref", "status", "phase", "deps", "round", "branch", "pr"]
83
+ ordered_fm: dict[str, object] = {}
84
+ for key in ordered_keys:
85
+ if key in fm:
86
+ ordered_fm[key] = fm[key]
87
+ for key in fm:
88
+ if key not in ordered_keys:
89
+ ordered_fm[key] = fm[key]
90
+
91
+ fm_text = yaml.dump(
92
+ ordered_fm,
93
+ default_flow_style=False,
94
+ sort_keys=False,
95
+ allow_unicode=True,
96
+ )
97
+ return "---\n" + fm_text + "---\n" + body
98
+
99
+
100
+ class GitStateStore:
101
+ """StateStore implementation backed by task files in a git repository."""
102
+
103
+ def __init__(self, repo_path: Path, specs_dir: str = "specs") -> None:
104
+ self._repo = repo_path
105
+ self._specs_dir = specs_dir
106
+ self._tasks_dir = repo_path / specs_dir / "tasks"
107
+ self._epochs: dict[str, str] = {}
108
+
109
+ def _task_file_path(self, task_id: str) -> Path:
110
+ return self._tasks_dir / f"{task_id}.md"
111
+
112
+ def _read_task_file(self, task_id: str) -> tuple[dict[str, object], str]:
113
+ """Read and parse a task file. Raises KeyError if not found."""
114
+ path = self._task_file_path(task_id)
115
+ if not path.exists():
116
+ raise KeyError(task_id)
117
+ return _parse_task_file(path.read_text())
118
+
119
+ def _write_task_file(self, task_id: str, fm: dict[str, object], body: str) -> None:
120
+ """Write a task file to disk (does NOT commit)."""
121
+ path = self._task_file_path(task_id)
122
+ path.parent.mkdir(parents=True, exist_ok=True)
123
+ path.write_text(_serialize_task_file(fm, body))
124
+
125
+ def _git(self, *args: str) -> str:
126
+ """Run a git command in the repo and return stdout."""
127
+ result = subprocess.run(
128
+ ["git", "-C", str(self._repo), *args],
129
+ check=True,
130
+ capture_output=True,
131
+ text=True,
132
+ )
133
+ return result.stdout.strip()
134
+
135
+ # -- StateStore protocol ------------------------------------------------
136
+
137
+ def get_world(self) -> World:
138
+ """Return a complete snapshot of all tasks, workers, and the current epoch."""
139
+ tasks: dict[str, Task] = {}
140
+ if self._tasks_dir.exists():
141
+ for task_file in sorted(self._tasks_dir.glob("task-*.md")):
142
+ fm, _body = _parse_task_file(task_file.read_text())
143
+ task = _frontmatter_to_task(fm)
144
+ tasks[task.id] = task
145
+
146
+ try:
147
+ epoch = self._git("rev-parse", "HEAD")
148
+ except subprocess.CalledProcessError:
149
+ epoch = ""
150
+
151
+ return World(tasks=tasks, workers={}, epoch=epoch)
152
+
153
+ def get_task(self, task_id: str) -> Task:
154
+ """Return a single task by ID. Raises KeyError if not found."""
155
+ fm, _body = self._read_task_file(task_id)
156
+ return _frontmatter_to_task(fm)
157
+
158
+ def transition_task(
159
+ self,
160
+ task_id: str,
161
+ status: TaskStatus,
162
+ phase: Phase | None,
163
+ round: int | None = None,
164
+ ) -> None:
165
+ """Update a task's status, phase, and optionally round."""
166
+ fm, body = self._read_task_file(task_id)
167
+ fm["status"] = _STATUS_TO_YAML[status]
168
+ fm["phase"] = str(phase) if phase is not None else None
169
+ if round is not None:
170
+ fm["round"] = round
171
+ self._write_task_file(task_id, fm, body)
172
+
173
+ def store_findings(self, task_id: str, detail: str) -> None:
174
+ """Append findings detail text to the task file's Findings section."""
175
+ fm, body = self._read_task_file(task_id)
176
+ body = _append_to_findings(body, detail)
177
+ self._write_task_file(task_id, fm, body)
178
+
179
+ def get_findings(self, task_id: str) -> str:
180
+ """Return stored findings for a task. Empty string if none."""
181
+ _fm, body = self._read_task_file(task_id)
182
+ return _extract_findings(body)
183
+
184
+ def clear_findings(self, task_id: str) -> None:
185
+ """Clear the findings section of a task file."""
186
+ fm, body = self._read_task_file(task_id)
187
+ body = _clear_findings(body)
188
+ self._write_task_file(task_id, fm, body)
189
+
190
+ def get_epoch(self, key: str) -> str:
191
+ """Return content fingerprint. 'head' returns git HEAD SHA. Others use in-memory dict."""
192
+ if key == "head":
193
+ try:
194
+ return self._git("rev-parse", "HEAD")
195
+ except subprocess.CalledProcessError:
196
+ return ""
197
+ return self._epochs.get(key, "")
198
+
199
+ def set_epoch(self, key: str, value: str) -> None:
200
+ """Record a last-run marker."""
201
+ self._epochs[key] = value
202
+
203
+ def read_file(self, path: str) -> str | None:
204
+ """Read a file from the repo. Returns None if it does not exist."""
205
+ file_path = self._repo / path
206
+ if not file_path.exists():
207
+ return None
208
+ return file_path.read_text()
209
+
210
+ def commit(self, message: str) -> None:
211
+ """Stage all changes and create a git commit."""
212
+ self._git("add", "-A")
213
+ self._git("commit", "-m", message)
214
+
215
+
216
+ # ---------------------------------------------------------------------------
217
+ # Body manipulation helpers
218
+ # ---------------------------------------------------------------------------
219
+
220
+
221
+ def _append_to_findings(body: str, detail: str) -> str:
222
+ """Append text to the ## Findings section of the markdown body."""
223
+ # Find the ## Findings section
224
+ findings_match = re.search(r"(## Findings\n)", body)
225
+ if not findings_match:
226
+ # No findings section — append one
227
+ return body.rstrip() + "\n\n## Findings\n" + detail
228
+ # Insert the detail at the end of the body (after existing findings content)
229
+ # The findings section runs from the header to the end of the body
230
+ findings_start = findings_match.end()
231
+ existing = body[findings_start:]
232
+ if existing.strip():
233
+ # There's existing content — append after it
234
+ return body.rstrip() + "\n" + detail
235
+ else:
236
+ # Empty findings section — add content
237
+ return body[:findings_start] + detail
238
+
239
+
240
+ def _extract_findings(body: str) -> str:
241
+ """Extract content from the ## Findings section. Returns empty string if none."""
242
+ findings_match = re.search(r"## Findings\n", body)
243
+ if not findings_match:
244
+ return ""
245
+ content = body[findings_match.end() :]
246
+ return content.strip()
247
+
248
+
249
+ def _clear_findings(body: str) -> str:
250
+ """Clear the content of the ## Findings section, preserving the header."""
251
+ findings_match = re.search(r"(## Findings\n)", body)
252
+ if not findings_match:
253
+ return body
254
+ return body[: findings_match.end()]
@@ -0,0 +1,273 @@
1
+ """LocalRuntime — spawns workers as subprocesses in git worktrees.
2
+
3
+ Uses the configured command (default: ``claude --dangerously-skip-permissions``)
4
+ to run worker agents. Each worker gets its own worktree on a dedicated branch.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import os
11
+ import shutil
12
+ import subprocess
13
+ from pathlib import Path
14
+ from typing import TYPE_CHECKING
15
+
16
+ from hyperloop.domain.model import Verdict, WorkerHandle, WorkerResult
17
+
18
+ if TYPE_CHECKING:
19
+ from hyperloop.ports.runtime import WorkerPollStatus
20
+
21
+
22
+ def _clean_git_env() -> dict[str, str]:
23
+ """Return a copy of the environment with interfering GIT_* variables removed.
24
+
25
+ Git sets variables like GIT_INDEX_FILE, GIT_DIR, etc. when running hooks
26
+ or inside worktrees. These interfere when we spawn new git operations
27
+ targeting a different repo. Stripping them ensures each git command
28
+ operates on the repo specified via -C.
29
+ """
30
+ env = os.environ.copy()
31
+ for key in list(env):
32
+ if key.startswith("GIT_"):
33
+ del env[key]
34
+ return env
35
+
36
+
37
+ class LocalRuntime:
38
+ """Runtime implementation using local git worktrees and subprocesses."""
39
+
40
+ def __init__(
41
+ self,
42
+ repo_path: str,
43
+ worktree_base: str | None = None,
44
+ command: str | None = None,
45
+ ) -> None:
46
+ self._repo_path = repo_path
47
+ self._worktree_base = worktree_base or f"{repo_path}/worktrees/workers"
48
+ self._command = command or "claude --dangerously-skip-permissions"
49
+ self._processes: dict[str, subprocess.Popen[bytes]] = {}
50
+ self._worktrees: dict[str, str] = {} # task_id -> worktree_path
51
+
52
+ def spawn(self, task_id: str, role: str, prompt: str, branch: str) -> WorkerHandle:
53
+ """Create a worktree, write the prompt, start the subprocess, return a handle."""
54
+ worktree_path = os.path.join(self._worktree_base, task_id)
55
+
56
+ # Ensure the parent directory exists
57
+ os.makedirs(self._worktree_base, exist_ok=True)
58
+
59
+ # Create the git worktree on a new branch
60
+ subprocess.run(
61
+ [
62
+ "git",
63
+ "-C",
64
+ self._repo_path,
65
+ "worktree",
66
+ "add",
67
+ worktree_path,
68
+ "-b",
69
+ branch,
70
+ "HEAD",
71
+ ],
72
+ check=True,
73
+ capture_output=True,
74
+ env=_clean_git_env(),
75
+ )
76
+
77
+ # Write the prompt file
78
+ prompt_path = os.path.join(worktree_path, "prompt.md")
79
+ Path(prompt_path).write_text(prompt)
80
+
81
+ # Start the subprocess with stdin from the prompt file
82
+ with open(prompt_path) as prompt_file:
83
+ proc = subprocess.Popen(
84
+ self._command,
85
+ shell=True,
86
+ cwd=worktree_path,
87
+ stdin=prompt_file,
88
+ stdout=subprocess.DEVNULL,
89
+ stderr=subprocess.DEVNULL,
90
+ )
91
+
92
+ self._processes[task_id] = proc
93
+ self._worktrees[task_id] = worktree_path
94
+
95
+ return WorkerHandle(
96
+ task_id=task_id,
97
+ role=role,
98
+ agent_id=str(proc.pid),
99
+ session_id=None,
100
+ )
101
+
102
+ def poll(self, handle: WorkerHandle) -> WorkerPollStatus:
103
+ """Check subprocess status. Returns 'running', 'done', or 'failed'."""
104
+ task_id = handle.task_id
105
+ proc = self._processes.get(task_id)
106
+
107
+ if proc is None:
108
+ # No tracked process — check if worktree exists (orphan scenario)
109
+ worktree_path = self._worktrees.get(task_id)
110
+ if worktree_path and os.path.isdir(worktree_path):
111
+ return "done"
112
+ return "failed"
113
+
114
+ returncode = proc.poll()
115
+
116
+ if returncode is None:
117
+ return "running"
118
+ if returncode == 0:
119
+ return "done"
120
+ return "failed"
121
+
122
+ def reap(self, handle: WorkerHandle) -> WorkerResult:
123
+ """Read the result file, clean up worktree and branch, return WorkerResult."""
124
+ task_id = handle.task_id
125
+ worktree_path = self._worktrees.get(task_id)
126
+
127
+ if worktree_path is None:
128
+ worktree_path = os.path.join(self._worktree_base, task_id)
129
+
130
+ result = self._read_result(worktree_path)
131
+ branch = self._get_worktree_branch(worktree_path)
132
+ self._cleanup_worktree(task_id, worktree_path, branch)
133
+
134
+ return result
135
+
136
+ def cancel(self, handle: WorkerHandle) -> None:
137
+ """Kill the subprocess and clean up the worktree and branch."""
138
+ task_id = handle.task_id
139
+ proc = self._processes.get(task_id)
140
+
141
+ # Kill the process if it's still running
142
+ if proc is not None:
143
+ try:
144
+ proc.kill()
145
+ proc.wait(timeout=5)
146
+ except (ProcessLookupError, OSError):
147
+ pass # Process already dead
148
+
149
+ worktree_path = self._worktrees.get(task_id)
150
+ if worktree_path is None:
151
+ worktree_path = os.path.join(self._worktree_base, task_id)
152
+
153
+ branch = self._get_worktree_branch(worktree_path)
154
+ self._cleanup_worktree(task_id, worktree_path, branch)
155
+
156
+ def find_orphan(self, task_id: str, branch: str) -> WorkerHandle | None:
157
+ """Check if a worktree exists for the given branch. Return a handle if so."""
158
+ worktree_path = os.path.join(self._worktree_base, task_id)
159
+
160
+ if not os.path.isdir(worktree_path):
161
+ return None
162
+
163
+ # Verify this worktree is on the expected branch
164
+ try:
165
+ result = subprocess.run(
166
+ ["git", "-C", worktree_path, "branch", "--show-current"],
167
+ check=True,
168
+ capture_output=True,
169
+ text=True,
170
+ env=_clean_git_env(),
171
+ )
172
+ current_branch = result.stdout.strip()
173
+ if current_branch != branch:
174
+ return None
175
+ except subprocess.CalledProcessError:
176
+ return None
177
+
178
+ return WorkerHandle(
179
+ task_id=task_id,
180
+ role="unknown",
181
+ agent_id="orphan",
182
+ session_id=None,
183
+ )
184
+
185
+ # -- Private helpers -------------------------------------------------------
186
+
187
+ def _read_result(self, worktree_path: str) -> WorkerResult:
188
+ """Read and parse .worker-result.json from the worktree."""
189
+ result_path = os.path.join(worktree_path, ".worker-result.json")
190
+
191
+ if not os.path.exists(result_path):
192
+ return WorkerResult(
193
+ verdict=Verdict.ERROR,
194
+ findings=0,
195
+ detail="Worker result file not found",
196
+ )
197
+
198
+ try:
199
+ with open(result_path) as f:
200
+ data = json.load(f)
201
+ return WorkerResult(
202
+ verdict=Verdict(data["verdict"]),
203
+ findings=int(data["findings"]),
204
+ detail=str(data["detail"]),
205
+ )
206
+ except (json.JSONDecodeError, KeyError, ValueError) as exc:
207
+ return WorkerResult(
208
+ verdict=Verdict.ERROR,
209
+ findings=0,
210
+ detail=f"Failed to parse worker result: {exc}",
211
+ )
212
+
213
+ def _get_worktree_branch(self, worktree_path: str) -> str | None:
214
+ """Get the branch name for a worktree, or None if it can't be determined."""
215
+ if not os.path.isdir(worktree_path):
216
+ return None
217
+
218
+ try:
219
+ result = subprocess.run(
220
+ ["git", "-C", worktree_path, "branch", "--show-current"],
221
+ check=True,
222
+ capture_output=True,
223
+ text=True,
224
+ env=_clean_git_env(),
225
+ )
226
+ branch = result.stdout.strip()
227
+ return branch if branch else None
228
+ except subprocess.CalledProcessError:
229
+ return None
230
+
231
+ def _cleanup_worktree(self, task_id: str, worktree_path: str, branch: str | None) -> None:
232
+ """Remove the worktree directory and delete the branch."""
233
+ # Remove from internal tracking
234
+ self._processes.pop(task_id, None)
235
+ self._worktrees.pop(task_id, None)
236
+
237
+ if not os.path.isdir(worktree_path):
238
+ return
239
+
240
+ # Try git worktree remove first; fall back to shutil.rmtree
241
+ env = _clean_git_env()
242
+ try:
243
+ subprocess.run(
244
+ [
245
+ "git",
246
+ "-C",
247
+ self._repo_path,
248
+ "worktree",
249
+ "remove",
250
+ "--force",
251
+ worktree_path,
252
+ ],
253
+ check=True,
254
+ capture_output=True,
255
+ env=env,
256
+ )
257
+ except subprocess.CalledProcessError:
258
+ # Force remove the directory if git worktree remove fails
259
+ shutil.rmtree(worktree_path, ignore_errors=True)
260
+ # Prune stale worktree references
261
+ subprocess.run(
262
+ ["git", "-C", self._repo_path, "worktree", "prune"],
263
+ capture_output=True,
264
+ env=env,
265
+ )
266
+
267
+ # Delete the branch
268
+ if branch:
269
+ subprocess.run(
270
+ ["git", "-C", self._repo_path, "branch", "-D", branch],
271
+ capture_output=True,
272
+ env=env,
273
+ )
hyperloop/cli.py ADDED
@@ -0,0 +1,166 @@
1
+ """CLI entry point — typer app with rich output formatting.
2
+
3
+ Provides the ``hyperloop run`` command that loads config, constructs
4
+ the orchestrator, and runs the loop with rich status output.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import sys
10
+ from pathlib import Path
11
+
12
+ import typer
13
+ from rich.console import Console
14
+ from rich.panel import Panel
15
+ from rich.table import Table
16
+
17
+ from hyperloop.config import Config, ConfigError, load_config
18
+
19
+ app = typer.Typer(
20
+ name="hyperloop",
21
+ no_args_is_help=True,
22
+ )
23
+ console = Console()
24
+
25
+
26
+ @app.callback()
27
+ def main() -> None:
28
+ """AI agent orchestrator for composable process pipelines."""
29
+
30
+
31
+ def _config_table(cfg: Config) -> Table:
32
+ """Build a rich table showing the current configuration."""
33
+ table = Table(title="Configuration", show_header=True, header_style="bold cyan")
34
+ table.add_column("Setting", style="bold")
35
+ table.add_column("Value")
36
+
37
+ table.add_row("repo", str(cfg.repo) if cfg.repo else "[dim]not set (will infer from git)[/dim]")
38
+ table.add_row("base_branch", cfg.base_branch)
39
+ table.add_row("specs_dir", cfg.specs_dir)
40
+ table.add_row("overlay", cfg.overlay or "[dim]none[/dim]")
41
+ table.add_row("runtime", cfg.runtime)
42
+ table.add_row("max_workers", str(cfg.max_workers))
43
+ table.add_row("auto_merge", str(cfg.auto_merge))
44
+ table.add_row("merge_strategy", cfg.merge_strategy)
45
+ table.add_row("delete_branch", str(cfg.delete_branch))
46
+ table.add_row("poll_interval", f"{cfg.poll_interval}s")
47
+ table.add_row("max_rounds", str(cfg.max_rounds))
48
+ table.add_row("max_rebase_attempts", str(cfg.max_rebase_attempts))
49
+
50
+ return table
51
+
52
+
53
+ @app.command()
54
+ def run(
55
+ repo: str | None = typer.Option(
56
+ None,
57
+ help="GitHub repo (owner/repo). Inferred from git remote if not set.",
58
+ ),
59
+ branch: str | None = typer.Option(
60
+ None,
61
+ help="Base branch. Default: from config or 'main'.",
62
+ ),
63
+ config_file: Path | None = typer.Option(
64
+ None,
65
+ "--config",
66
+ "-c",
67
+ help="Config file path. Default: .hyperloop.yaml",
68
+ ),
69
+ max_workers: int | None = typer.Option(
70
+ None,
71
+ help="Max parallel workers.",
72
+ ),
73
+ dry_run: bool = typer.Option(
74
+ False,
75
+ "--dry-run",
76
+ help="Show what would be done without executing.",
77
+ ),
78
+ ) -> None:
79
+ """Run the orchestrator loop."""
80
+ # 1. Load config (file + CLI overrides)
81
+ config_path = config_file or Path(".hyperloop.yaml")
82
+ try:
83
+ cfg = load_config(
84
+ config_path,
85
+ repo=repo,
86
+ base_branch=branch,
87
+ max_workers=max_workers,
88
+ )
89
+ except ConfigError as exc:
90
+ console.print(f"[bold red]Config error:[/bold red] {exc}")
91
+ raise typer.Exit(code=1) from exc
92
+
93
+ # 2. Show config
94
+ console.print()
95
+ console.print(
96
+ Panel(
97
+ "[bold]hyperloop[/bold]",
98
+ subtitle="AI agent orchestrator",
99
+ style="blue",
100
+ )
101
+ )
102
+ console.print(_config_table(cfg))
103
+ console.print()
104
+
105
+ # 3. Dry run — show config and exit
106
+ if dry_run:
107
+ console.print("[bold yellow]Dry run[/bold yellow] -- exiting without executing.")
108
+ return
109
+
110
+ # 4. Validate repo is set (required for actual run)
111
+ if cfg.repo is None:
112
+ console.print(
113
+ "[bold red]Error:[/bold red] No repo specified. "
114
+ "Use --repo owner/repo or set target.repo in .hyperloop.yaml"
115
+ )
116
+ raise typer.Exit(code=1)
117
+
118
+ # 5. Construct runtime and state store, run loop
119
+ from hyperloop.adapters.git_state import GitStateStore
120
+ from hyperloop.adapters.local import LocalRuntime
121
+ from hyperloop.domain.model import ActionStep, LoopStep, Process, RoleStep
122
+ from hyperloop.loop import Orchestrator
123
+
124
+ # Default process: loop(implementer, verifier) -> merge-pr
125
+ default_process = Process(
126
+ name="default",
127
+ intake=(),
128
+ pipeline=(
129
+ LoopStep(
130
+ steps=(
131
+ RoleStep(role="implementer", on_pass=None, on_fail=None),
132
+ RoleStep(role="verifier", on_pass=None, on_fail=None),
133
+ )
134
+ ),
135
+ ActionStep(action="merge-pr"),
136
+ ),
137
+ )
138
+
139
+ repo_path = Path.cwd()
140
+ state = GitStateStore(repo_path, specs_dir=cfg.specs_dir)
141
+ runtime = LocalRuntime(repo_path=str(repo_path))
142
+
143
+ orchestrator = Orchestrator(
144
+ state=state,
145
+ runtime=runtime,
146
+ process=default_process,
147
+ max_workers=cfg.max_workers,
148
+ max_rounds=cfg.max_rounds,
149
+ )
150
+
151
+ # 6. Recover and run
152
+ with console.status("[bold green]Recovering state...[/bold green]"):
153
+ orchestrator.recover()
154
+
155
+ console.print("[bold green]Starting orchestrator loop[/bold green]")
156
+ console.print()
157
+
158
+ reason = orchestrator.run_loop()
159
+
160
+ # 7. Final summary
161
+ console.print()
162
+ if "complete" in reason.lower():
163
+ console.print(Panel(f"[bold green]{reason}[/bold green]", title="Done"))
164
+ else:
165
+ console.print(Panel(f"[bold red]{reason}[/bold red]", title="Halted"))
166
+ sys.exit(1)