codex-autorunner 1.1.0__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +17 -7
- codex_autorunner/bootstrap.py +219 -1
- codex_autorunner/core/__init__.py +17 -1
- codex_autorunner/core/about_car.py +114 -1
- codex_autorunner/core/app_server_threads.py +6 -0
- codex_autorunner/core/config.py +236 -1
- codex_autorunner/core/context_awareness.py +38 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +71 -1
- codex_autorunner/core/flows/reconciler.py +4 -1
- codex_autorunner/core/flows/runtime.py +22 -0
- codex_autorunner/core/flows/store.py +61 -9
- codex_autorunner/core/flows/transition.py +23 -16
- codex_autorunner/core/flows/ux_helpers.py +18 -3
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/hub.py +198 -41
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +496 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/agent_backend.py +2 -5
- codex_autorunner/core/ports/run_event.py +1 -4
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +5 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/ticket_linter_cli.py +17 -0
- codex_autorunner/core/ticket_manager_cli.py +154 -92
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/utils.py +34 -6
- codex_autorunner/flows/review/service.py +23 -25
- codex_autorunner/flows/ticket_flow/definition.py +43 -1
- codex_autorunner/integrations/agents/__init__.py +2 -0
- codex_autorunner/integrations/agents/backend_orchestrator.py +18 -0
- codex_autorunner/integrations/agents/codex_backend.py +19 -8
- codex_autorunner/integrations/agents/runner.py +3 -8
- codex_autorunner/integrations/agents/wiring.py +8 -0
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +346 -58
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +202 -45
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +18 -7
- codex_autorunner/integrations/telegram/handlers/messages.py +26 -1
- codex_autorunner/integrations/telegram/helpers.py +1 -3
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +30 -0
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +10 -4
- codex_autorunner/integrations/telegram/transport.py +10 -3
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/server.py +2 -2
- codex_autorunner/static/agentControls.js +21 -5
- codex_autorunner/static/app.js +115 -11
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +46 -81
- codex_autorunner/static/index.html +303 -24
- codex_autorunner/static/messages.js +82 -4
- codex_autorunner/static/notifications.js +255 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/settings.js +3 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9125 -6742
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +41 -13
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +69 -19
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +28 -0
- codex_autorunner/static/workspace.js +258 -44
- codex_autorunner/static/workspaceFileBrowser.js +6 -4
- codex_autorunner/surfaces/cli/cli.py +1465 -155
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/web/app.py +253 -49
- codex_autorunner/surfaces/web/routes/__init__.py +4 -0
- codex_autorunner/surfaces/web/routes/analytics.py +29 -22
- codex_autorunner/surfaces/web/routes/file_chat.py +317 -36
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +219 -29
- codex_autorunner/surfaces/web/routes/messages.py +70 -39
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +1 -1
- codex_autorunner/surfaces/web/routes/shared.py +0 -3
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/runner_manager.py +2 -2
- codex_autorunner/surfaces/web/schemas.py +70 -18
- codex_autorunner/tickets/agent_pool.py +27 -0
- codex_autorunner/tickets/files.py +33 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +3 -0
- codex_autorunner/tickets/outbox.py +41 -5
- codex_autorunner/tickets/runner.py +350 -69
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.0.dist-info}/METADATA +15 -19
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.0.dist-info}/RECORD +125 -94
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -3302
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.0.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.0.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.0.dist-info}/top_level.txt +0 -0
|
@@ -28,12 +28,39 @@ from ...core.config import (
|
|
|
28
28
|
load_hub_config,
|
|
29
29
|
load_repo_config,
|
|
30
30
|
)
|
|
31
|
-
from ...core.
|
|
31
|
+
from ...core.flows import FlowController, FlowStore
|
|
32
|
+
from ...core.flows.models import FlowRunRecord, FlowRunStatus
|
|
33
|
+
from ...core.flows.ux_helpers import build_flow_status_snapshot, ensure_worker
|
|
34
|
+
from ...core.flows.worker_process import (
|
|
35
|
+
check_worker_health,
|
|
36
|
+
clear_worker_metadata,
|
|
37
|
+
register_worker_metadata,
|
|
38
|
+
)
|
|
32
39
|
from ...core.git_utils import GitError, run_git
|
|
33
40
|
from ...core.hub import HubSupervisor
|
|
41
|
+
from ...core.locks import file_lock
|
|
34
42
|
from ...core.logging_utils import log_event, setup_rotating_logger
|
|
35
43
|
from ...core.optional_dependencies import require_optional_dependencies
|
|
44
|
+
from ...core.runtime import (
|
|
45
|
+
DoctorReport,
|
|
46
|
+
RuntimeContext,
|
|
47
|
+
clear_stale_lock,
|
|
48
|
+
doctor,
|
|
49
|
+
hub_worktree_doctor_checks,
|
|
50
|
+
pma_doctor_checks,
|
|
51
|
+
)
|
|
36
52
|
from ...core.state import RunnerState, load_state, now_iso, save_state, state_lock
|
|
53
|
+
from ...core.templates import (
|
|
54
|
+
NetworkUnavailableError,
|
|
55
|
+
RefNotFoundError,
|
|
56
|
+
RepoNotConfiguredError,
|
|
57
|
+
TemplateNotFoundError,
|
|
58
|
+
fetch_template,
|
|
59
|
+
get_scan_record,
|
|
60
|
+
inject_provenance,
|
|
61
|
+
parse_template_ref,
|
|
62
|
+
scan_lock,
|
|
63
|
+
)
|
|
37
64
|
from ...core.usage import (
|
|
38
65
|
UsageError,
|
|
39
66
|
default_codex_home,
|
|
@@ -41,7 +68,9 @@ from ...core.usage import (
|
|
|
41
68
|
summarize_hub_usage,
|
|
42
69
|
summarize_repo_usage,
|
|
43
70
|
)
|
|
44
|
-
from ...core.utils import RepoNotFoundError, default_editor, find_repo_root
|
|
71
|
+
from ...core.utils import RepoNotFoundError, default_editor, find_repo_root, is_within
|
|
72
|
+
from ...flows.ticket_flow import build_ticket_flow_definition
|
|
73
|
+
from ...integrations.agents import build_backend_orchestrator
|
|
45
74
|
from ...integrations.agents.wiring import (
|
|
46
75
|
build_agent_backend_factory,
|
|
47
76
|
build_app_server_supervisor_factory,
|
|
@@ -55,15 +84,39 @@ from ...integrations.telegram.service import (
|
|
|
55
84
|
TelegramBotService,
|
|
56
85
|
)
|
|
57
86
|
from ...integrations.telegram.state import TelegramStateStore
|
|
87
|
+
from ...integrations.templates.scan_agent import (
|
|
88
|
+
TemplateScanError,
|
|
89
|
+
TemplateScanRejectedError,
|
|
90
|
+
format_template_scan_rejection,
|
|
91
|
+
run_template_scan,
|
|
92
|
+
)
|
|
58
93
|
from ...manifest import load_manifest
|
|
94
|
+
from ...tickets import AgentPool
|
|
95
|
+
from ...tickets.files import (
|
|
96
|
+
list_ticket_paths,
|
|
97
|
+
read_ticket,
|
|
98
|
+
safe_relpath,
|
|
99
|
+
ticket_is_done,
|
|
100
|
+
)
|
|
101
|
+
from ...tickets.frontmatter import split_markdown_frontmatter
|
|
102
|
+
from ...tickets.lint import (
|
|
103
|
+
lint_ticket_directory,
|
|
104
|
+
parse_ticket_index,
|
|
105
|
+
)
|
|
59
106
|
from ...voice import VoiceConfig
|
|
60
107
|
from ..web.app import create_hub_app
|
|
108
|
+
from .pma_cli import pma_app as pma_cli_app
|
|
61
109
|
|
|
62
110
|
logger = logging.getLogger("codex_autorunner.cli")
|
|
63
111
|
|
|
64
112
|
app = typer.Typer(add_completion=False)
|
|
65
113
|
hub_app = typer.Typer(add_completion=False)
|
|
66
114
|
telegram_app = typer.Typer(add_completion=False)
|
|
115
|
+
templates_app = typer.Typer(add_completion=False)
|
|
116
|
+
repos_app = typer.Typer(add_completion=False)
|
|
117
|
+
worktree_app = typer.Typer(add_completion=False)
|
|
118
|
+
flow_app = typer.Typer(add_completion=False)
|
|
119
|
+
ticket_flow_app = typer.Typer(add_completion=False)
|
|
67
120
|
|
|
68
121
|
|
|
69
122
|
def main() -> None:
|
|
@@ -78,20 +131,18 @@ def _raise_exit(message: str, *, cause: Optional[BaseException] = None) -> NoRet
|
|
|
78
131
|
raise typer.Exit(code=1)
|
|
79
132
|
|
|
80
133
|
|
|
81
|
-
def _require_repo_config(repo: Optional[Path], hub: Optional[Path]) ->
|
|
134
|
+
def _require_repo_config(repo: Optional[Path], hub: Optional[Path]) -> RuntimeContext:
|
|
82
135
|
try:
|
|
83
136
|
repo_root = find_repo_root(repo or Path.cwd())
|
|
84
137
|
except RepoNotFoundError as exc:
|
|
85
138
|
_raise_exit("No .git directory found for repo commands.", cause=exc)
|
|
86
139
|
try:
|
|
87
140
|
config = load_repo_config(repo_root, hub_path=hub)
|
|
88
|
-
|
|
141
|
+
backend_orchestrator = build_backend_orchestrator(repo_root, config)
|
|
142
|
+
return RuntimeContext(
|
|
89
143
|
repo_root,
|
|
90
144
|
config=config,
|
|
91
|
-
|
|
92
|
-
backend_factory=build_agent_backend_factory(repo_root, config),
|
|
93
|
-
app_server_supervisor_factory=build_app_server_supervisor_factory(config),
|
|
94
|
-
agent_id_validator=validate_agent_id,
|
|
145
|
+
backend_orchestrator=backend_orchestrator,
|
|
95
146
|
)
|
|
96
147
|
except ConfigError as exc:
|
|
97
148
|
_raise_exit(str(exc), cause=exc)
|
|
@@ -104,6 +155,161 @@ def _require_hub_config(path: Optional[Path]) -> HubConfig:
|
|
|
104
155
|
_raise_exit(str(exc), cause=exc)
|
|
105
156
|
|
|
106
157
|
|
|
158
|
+
def _load_hub_config_yaml(path: Path) -> dict:
|
|
159
|
+
if not path.exists():
|
|
160
|
+
_raise_exit(f"Hub config file not found: {path}")
|
|
161
|
+
try:
|
|
162
|
+
data = yaml.safe_load(path.read_text(encoding="utf-8"))
|
|
163
|
+
if not isinstance(data, dict):
|
|
164
|
+
_raise_exit(f"Hub config must be a YAML mapping: {path}")
|
|
165
|
+
return data
|
|
166
|
+
except yaml.YAMLError as exc:
|
|
167
|
+
_raise_exit(f"Invalid YAML in hub config: {exc}", cause=exc)
|
|
168
|
+
except OSError as exc:
|
|
169
|
+
_raise_exit(f"Failed to read hub config: {exc}", cause=exc)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _write_hub_config_yaml(path: Path, data: dict) -> None:
|
|
173
|
+
lock_path = path.parent / (path.name + ".lock")
|
|
174
|
+
with file_lock(lock_path):
|
|
175
|
+
path.write_text(yaml.safe_dump(data, sort_keys=False), encoding="utf-8")
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _require_templates_enabled(config: RepoConfig) -> None:
|
|
179
|
+
if not config.templates.enabled:
|
|
180
|
+
_raise_exit(
|
|
181
|
+
"Templates are disabled. Set templates.enabled=true in the hub config to enable."
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _find_template_repo(config: RepoConfig, repo_id: str):
|
|
186
|
+
for repo in config.templates.repos:
|
|
187
|
+
if repo.id == repo_id:
|
|
188
|
+
return repo
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _fetch_template_with_scan(template: str, ctx: RuntimeContext, hub: Optional[Path]):
|
|
193
|
+
try:
|
|
194
|
+
parsed = parse_template_ref(template)
|
|
195
|
+
except ValueError as exc:
|
|
196
|
+
_raise_exit(str(exc), cause=exc)
|
|
197
|
+
|
|
198
|
+
repo_cfg = _find_template_repo(ctx.config, parsed.repo_id)
|
|
199
|
+
if repo_cfg is None:
|
|
200
|
+
_raise_exit(f"Template repo not configured: {parsed.repo_id}")
|
|
201
|
+
|
|
202
|
+
hub_config_path = _resolve_hub_config_path_for_cli(ctx.repo_root, hub)
|
|
203
|
+
if hub_config_path is None:
|
|
204
|
+
try:
|
|
205
|
+
hub_config = load_hub_config(ctx.repo_root)
|
|
206
|
+
hub_root = hub_config.root
|
|
207
|
+
except ConfigError as exc:
|
|
208
|
+
_raise_exit(str(exc), cause=exc)
|
|
209
|
+
else:
|
|
210
|
+
hub_root = hub_config_path.parent.parent.resolve()
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
fetched = fetch_template(
|
|
214
|
+
repo=repo_cfg, hub_root=hub_root, template_ref=template
|
|
215
|
+
)
|
|
216
|
+
except NetworkUnavailableError as exc:
|
|
217
|
+
_raise_exit(
|
|
218
|
+
f"{str(exc)}\n"
|
|
219
|
+
"Hint: Fetch once while online to seed the cache. "
|
|
220
|
+
"If this template is untrusted, scanning may also require a working agent backend."
|
|
221
|
+
)
|
|
222
|
+
except (
|
|
223
|
+
RepoNotConfiguredError,
|
|
224
|
+
RefNotFoundError,
|
|
225
|
+
TemplateNotFoundError,
|
|
226
|
+
GitError,
|
|
227
|
+
) as exc:
|
|
228
|
+
_raise_exit(str(exc), cause=exc)
|
|
229
|
+
|
|
230
|
+
scan_record = None
|
|
231
|
+
if not fetched.trusted:
|
|
232
|
+
with scan_lock(hub_root, fetched.blob_sha):
|
|
233
|
+
scan_record = get_scan_record(hub_root, fetched.blob_sha)
|
|
234
|
+
if scan_record is None:
|
|
235
|
+
try:
|
|
236
|
+
scan_record = asyncio.run(
|
|
237
|
+
run_template_scan(ctx=ctx, template=fetched)
|
|
238
|
+
)
|
|
239
|
+
except TemplateScanRejectedError as exc:
|
|
240
|
+
_raise_exit(str(exc), cause=exc)
|
|
241
|
+
except TemplateScanError as exc:
|
|
242
|
+
_raise_exit(str(exc), cause=exc)
|
|
243
|
+
elif scan_record.decision != "approve":
|
|
244
|
+
_raise_exit(format_template_scan_rejection(scan_record))
|
|
245
|
+
|
|
246
|
+
return fetched, scan_record, hub_root
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _resolve_ticket_dir(repo_root: Path, ticket_dir: Optional[Path]) -> Path:
|
|
250
|
+
if ticket_dir is None:
|
|
251
|
+
return repo_root / ".codex-autorunner" / "tickets"
|
|
252
|
+
if ticket_dir.is_absolute():
|
|
253
|
+
return ticket_dir
|
|
254
|
+
return repo_root / ticket_dir
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def _collect_ticket_indices(ticket_dir: Path) -> list[int]:
|
|
258
|
+
indices: list[int] = []
|
|
259
|
+
if not ticket_dir.exists() or not ticket_dir.is_dir():
|
|
260
|
+
return indices
|
|
261
|
+
for path in ticket_dir.iterdir():
|
|
262
|
+
if not path.is_file():
|
|
263
|
+
continue
|
|
264
|
+
idx = parse_ticket_index(path.name)
|
|
265
|
+
if idx is None:
|
|
266
|
+
continue
|
|
267
|
+
indices.append(idx)
|
|
268
|
+
return indices
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def _next_available_ticket_index(existing: list[int]) -> int:
|
|
272
|
+
if not existing:
|
|
273
|
+
return 1
|
|
274
|
+
seen = set(existing)
|
|
275
|
+
candidate = 1
|
|
276
|
+
while candidate in seen:
|
|
277
|
+
candidate += 1
|
|
278
|
+
return candidate
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def _ticket_filename(index: int, *, suffix: str, width: int) -> str:
|
|
282
|
+
return f"TICKET-{index:0{width}d}{suffix}.md"
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
def _normalize_ticket_suffix(suffix: Optional[str]) -> str:
|
|
286
|
+
if not suffix:
|
|
287
|
+
return ""
|
|
288
|
+
cleaned = suffix.strip()
|
|
289
|
+
if not cleaned:
|
|
290
|
+
return ""
|
|
291
|
+
if "/" in cleaned or "\\" in cleaned:
|
|
292
|
+
_raise_exit("Ticket suffix may not include path separators.")
|
|
293
|
+
if not cleaned.startswith("-"):
|
|
294
|
+
return f"-{cleaned}"
|
|
295
|
+
return cleaned
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def _apply_agent_override(content: str, agent: str) -> str:
|
|
299
|
+
fm_yaml, body = split_markdown_frontmatter(content)
|
|
300
|
+
if fm_yaml is None:
|
|
301
|
+
_raise_exit("Template is missing YAML frontmatter; cannot set agent.")
|
|
302
|
+
try:
|
|
303
|
+
data = yaml.safe_load(fm_yaml)
|
|
304
|
+
except yaml.YAMLError as exc:
|
|
305
|
+
_raise_exit(f"Template frontmatter is invalid YAML: {exc}")
|
|
306
|
+
if not isinstance(data, dict):
|
|
307
|
+
_raise_exit("Template frontmatter must be a YAML mapping to set agent.")
|
|
308
|
+
data["agent"] = agent
|
|
309
|
+
rendered = yaml.safe_dump(data, sort_keys=False).rstrip()
|
|
310
|
+
return f"---\n{rendered}\n---{body}"
|
|
311
|
+
|
|
312
|
+
|
|
107
313
|
def _build_server_url(config, path: str) -> str:
|
|
108
314
|
base_path = config.server_base_path or ""
|
|
109
315
|
if base_path.endswith("/") and path.startswith("/"):
|
|
@@ -122,6 +328,39 @@ def _resolve_hub_config_path_for_cli(
|
|
|
122
328
|
return find_nearest_hub_config_path(repo_root)
|
|
123
329
|
|
|
124
330
|
|
|
331
|
+
def _guard_unregistered_hub_repo(repo_root: Path, hub: Optional[Path]) -> None:
|
|
332
|
+
hub_config_path = _resolve_hub_config_path_for_cli(repo_root, hub)
|
|
333
|
+
if hub_config_path is None:
|
|
334
|
+
return
|
|
335
|
+
try:
|
|
336
|
+
hub_config = load_hub_config(hub_config_path)
|
|
337
|
+
except ConfigError as exc:
|
|
338
|
+
_raise_exit(str(exc), cause=exc)
|
|
339
|
+
|
|
340
|
+
repo_root = repo_root.resolve()
|
|
341
|
+
under_repos = is_within(hub_config.repos_root, repo_root)
|
|
342
|
+
under_worktrees = is_within(hub_config.worktrees_root, repo_root)
|
|
343
|
+
if not (under_repos or under_worktrees):
|
|
344
|
+
return
|
|
345
|
+
|
|
346
|
+
manifest = load_manifest(hub_config.manifest_path, hub_config.root)
|
|
347
|
+
if manifest.get_by_path(hub_config.root, repo_root) is not None:
|
|
348
|
+
return
|
|
349
|
+
|
|
350
|
+
lines = [
|
|
351
|
+
"Repo not registered in hub manifest. Run car hub scan or create via car hub worktree create.",
|
|
352
|
+
f"Detected hub root: {hub_config.root}",
|
|
353
|
+
f"Repo path: {repo_root}",
|
|
354
|
+
"Runs won't show up in the hub UI until registered.",
|
|
355
|
+
]
|
|
356
|
+
if under_worktrees:
|
|
357
|
+
lines.append(
|
|
358
|
+
"Hint: Worktree names should look like <base_repo_id>--<branch> under "
|
|
359
|
+
f"{hub_config.worktrees_root}"
|
|
360
|
+
)
|
|
361
|
+
_raise_exit("\n".join(lines))
|
|
362
|
+
|
|
363
|
+
|
|
125
364
|
def _resolve_repo_api_path(repo_root: Path, hub: Optional[Path], path: str) -> str:
|
|
126
365
|
if not path.startswith("/"):
|
|
127
366
|
path = f"/{path}"
|
|
@@ -223,7 +462,14 @@ def _require_optional_feature(
|
|
|
223
462
|
|
|
224
463
|
|
|
225
464
|
app.add_typer(hub_app, name="hub")
|
|
465
|
+
hub_app.add_typer(worktree_app, name="worktree")
|
|
226
466
|
app.add_typer(telegram_app, name="telegram")
|
|
467
|
+
app.add_typer(templates_app, name="templates")
|
|
468
|
+
templates_app.add_typer(repos_app, name="repos")
|
|
469
|
+
app.add_typer(flow_app, name="flow")
|
|
470
|
+
app.add_typer(ticket_flow_app, name="ticket-flow")
|
|
471
|
+
flow_app.add_typer(ticket_flow_app, name="ticket_flow")
|
|
472
|
+
app.add_typer(pma_cli_app, name="pma")
|
|
227
473
|
|
|
228
474
|
|
|
229
475
|
def _has_nested_git(path: Path) -> bool:
|
|
@@ -317,7 +563,6 @@ def status(
|
|
|
317
563
|
"""Show autorunner status."""
|
|
318
564
|
engine = _require_repo_config(repo, hub)
|
|
319
565
|
state = load_state(engine.state_path)
|
|
320
|
-
outstanding, _ = engine.docs.todos()
|
|
321
566
|
repo_key = str(engine.repo_root)
|
|
322
567
|
session_id = state.repo_to_session.get(repo_key) or state.repo_to_session.get(
|
|
323
568
|
f"{repo_key}:codex"
|
|
@@ -367,7 +612,6 @@ def status(
|
|
|
367
612
|
if opencode_record
|
|
368
613
|
else None
|
|
369
614
|
),
|
|
370
|
-
"outstanding_todos": len(outstanding),
|
|
371
615
|
}
|
|
372
616
|
typer.echo(json.dumps(payload, indent=2))
|
|
373
617
|
return
|
|
@@ -391,7 +635,340 @@ def status(
|
|
|
391
635
|
if opencode_record:
|
|
392
636
|
detail = f" (status={opencode_record.status}, last_seen={opencode_record.last_seen_at})"
|
|
393
637
|
typer.echo(f"Terminal session (opencode): {opencode_session_id}{detail}")
|
|
394
|
-
|
|
638
|
+
|
|
639
|
+
|
|
640
|
+
@templates_app.command("fetch")
|
|
641
|
+
def templates_fetch(
|
|
642
|
+
template: str = typer.Argument(
|
|
643
|
+
..., help="Template ref formatted as REPO_ID:PATH[@REF]"
|
|
644
|
+
),
|
|
645
|
+
out: Optional[Path] = typer.Option(
|
|
646
|
+
None, "--out", help="Write template content to a file"
|
|
647
|
+
),
|
|
648
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
649
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
650
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
651
|
+
):
|
|
652
|
+
"""Fetch a template from a configured templates repo."""
|
|
653
|
+
ctx = _require_repo_config(repo, hub)
|
|
654
|
+
_require_templates_enabled(ctx.config)
|
|
655
|
+
fetched, scan_record, _hub_root = _fetch_template_with_scan(template, ctx, hub)
|
|
656
|
+
|
|
657
|
+
if out is not None:
|
|
658
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
659
|
+
out.write_text(fetched.content, encoding="utf-8")
|
|
660
|
+
typer.echo(f"Wrote template to {out}", err=True)
|
|
661
|
+
|
|
662
|
+
if output_json:
|
|
663
|
+
payload = {
|
|
664
|
+
"content": fetched.content,
|
|
665
|
+
"repo_id": fetched.repo_id,
|
|
666
|
+
"path": fetched.path,
|
|
667
|
+
"ref": fetched.ref,
|
|
668
|
+
"commit_sha": fetched.commit_sha,
|
|
669
|
+
"blob_sha": fetched.blob_sha,
|
|
670
|
+
"trusted": fetched.trusted,
|
|
671
|
+
"scan_decision": scan_record.to_dict() if scan_record else None,
|
|
672
|
+
}
|
|
673
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
674
|
+
return
|
|
675
|
+
|
|
676
|
+
if out is None:
|
|
677
|
+
typer.echo(fetched.content, nl=False)
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
@templates_app.command("apply")
|
|
681
|
+
def templates_apply(
|
|
682
|
+
template: str = typer.Argument(
|
|
683
|
+
..., help="Template ref formatted as REPO_ID:PATH[@REF]"
|
|
684
|
+
),
|
|
685
|
+
ticket_dir: Optional[Path] = typer.Option(
|
|
686
|
+
None,
|
|
687
|
+
"--ticket-dir",
|
|
688
|
+
help="Ticket directory (default .codex-autorunner/tickets)",
|
|
689
|
+
),
|
|
690
|
+
at: Optional[int] = typer.Option(None, "--at", help="Explicit ticket index"),
|
|
691
|
+
next_index: bool = typer.Option(
|
|
692
|
+
True, "--next/--no-next", help="Use next available index (default)"
|
|
693
|
+
),
|
|
694
|
+
suffix: Optional[str] = typer.Option(
|
|
695
|
+
None, "--suffix", help="Optional filename suffix (e.g. -foo)"
|
|
696
|
+
),
|
|
697
|
+
set_agent: Optional[str] = typer.Option(
|
|
698
|
+
None, "--set-agent", help="Override frontmatter agent"
|
|
699
|
+
),
|
|
700
|
+
provenance: bool = typer.Option(
|
|
701
|
+
False,
|
|
702
|
+
"--provenance/--no-provenance",
|
|
703
|
+
help="Embed template provenance in ticket",
|
|
704
|
+
),
|
|
705
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
706
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
707
|
+
):
|
|
708
|
+
"""Apply a template by writing it into the ticket directory."""
|
|
709
|
+
ctx = _require_repo_config(repo, hub)
|
|
710
|
+
_require_templates_enabled(ctx.config)
|
|
711
|
+
|
|
712
|
+
fetched, scan_record, _hub_root = _fetch_template_with_scan(template, ctx, hub)
|
|
713
|
+
|
|
714
|
+
resolved_dir = _resolve_ticket_dir(ctx.repo_root, ticket_dir)
|
|
715
|
+
if resolved_dir.exists() and not resolved_dir.is_dir():
|
|
716
|
+
_raise_exit(f"Ticket dir is not a directory: {resolved_dir}")
|
|
717
|
+
try:
|
|
718
|
+
resolved_dir.mkdir(parents=True, exist_ok=True)
|
|
719
|
+
except OSError as exc:
|
|
720
|
+
_raise_exit(f"Unable to create ticket dir: {exc}")
|
|
721
|
+
|
|
722
|
+
if at is None and not next_index:
|
|
723
|
+
_raise_exit("Specify --at or leave --next enabled to pick an index.")
|
|
724
|
+
if at is not None and at < 1:
|
|
725
|
+
_raise_exit("Ticket index must be >= 1.")
|
|
726
|
+
|
|
727
|
+
existing_indices = _collect_ticket_indices(resolved_dir)
|
|
728
|
+
if at is None:
|
|
729
|
+
index = _next_available_ticket_index(existing_indices)
|
|
730
|
+
else:
|
|
731
|
+
index = at
|
|
732
|
+
if index in existing_indices:
|
|
733
|
+
_raise_exit(
|
|
734
|
+
f"Ticket index {index} already exists. Choose another index or open a gap."
|
|
735
|
+
)
|
|
736
|
+
|
|
737
|
+
normalized_suffix = _normalize_ticket_suffix(suffix)
|
|
738
|
+
width = max(3, max([len(str(i)) for i in existing_indices + [index]]))
|
|
739
|
+
filename = _ticket_filename(index, suffix=normalized_suffix, width=width)
|
|
740
|
+
path = resolved_dir / filename
|
|
741
|
+
if path.exists():
|
|
742
|
+
_raise_exit(f"Ticket already exists: {path}")
|
|
743
|
+
|
|
744
|
+
content = fetched.content
|
|
745
|
+
if set_agent:
|
|
746
|
+
if set_agent != "user":
|
|
747
|
+
try:
|
|
748
|
+
validate_agent_id(set_agent)
|
|
749
|
+
except ValueError as exc:
|
|
750
|
+
_raise_exit(str(exc), cause=exc)
|
|
751
|
+
content = _apply_agent_override(content, set_agent)
|
|
752
|
+
|
|
753
|
+
if provenance:
|
|
754
|
+
content = inject_provenance(content, fetched, scan_record)
|
|
755
|
+
|
|
756
|
+
try:
|
|
757
|
+
path.write_text(content, encoding="utf-8")
|
|
758
|
+
except OSError as exc:
|
|
759
|
+
_raise_exit(f"Failed to write ticket: {exc}")
|
|
760
|
+
|
|
761
|
+
metadata = {
|
|
762
|
+
"repo_id": fetched.repo_id,
|
|
763
|
+
"path": fetched.path,
|
|
764
|
+
"ref": fetched.ref,
|
|
765
|
+
"commit_sha": fetched.commit_sha,
|
|
766
|
+
"blob_sha": fetched.blob_sha,
|
|
767
|
+
"trusted": fetched.trusted,
|
|
768
|
+
"scan": scan_record.to_dict() if scan_record else None,
|
|
769
|
+
}
|
|
770
|
+
typer.echo(
|
|
771
|
+
"Created ticket "
|
|
772
|
+
f"{path} (index={index}, template={fetched.repo_id}:{fetched.path}@{fetched.ref})"
|
|
773
|
+
)
|
|
774
|
+
typer.echo(json.dumps(metadata, indent=2))
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
@repos_app.command("list")
|
|
778
|
+
def repos_list(
|
|
779
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
780
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
781
|
+
):
|
|
782
|
+
"""List configured template repos."""
|
|
783
|
+
config = _require_hub_config(hub)
|
|
784
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
785
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
786
|
+
|
|
787
|
+
templates_config = data.get("templates", {})
|
|
788
|
+
if not isinstance(templates_config, dict):
|
|
789
|
+
templates_config = {}
|
|
790
|
+
repos = templates_config.get("repos", [])
|
|
791
|
+
if not isinstance(repos, list):
|
|
792
|
+
repos = []
|
|
793
|
+
|
|
794
|
+
if output_json:
|
|
795
|
+
payload = {"repos": repos}
|
|
796
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
797
|
+
return
|
|
798
|
+
|
|
799
|
+
if not repos:
|
|
800
|
+
typer.echo("No template repos configured.")
|
|
801
|
+
return
|
|
802
|
+
|
|
803
|
+
typer.echo(f"Template repos ({len(repos)}):")
|
|
804
|
+
for repo in repos:
|
|
805
|
+
if not isinstance(repo, dict):
|
|
806
|
+
continue
|
|
807
|
+
repo_id = repo.get("id", "")
|
|
808
|
+
url = repo.get("url", "")
|
|
809
|
+
trusted = repo.get("trusted", False)
|
|
810
|
+
default_ref = repo.get("default_ref", "main")
|
|
811
|
+
trusted_text = "trusted" if trusted else "untrusted"
|
|
812
|
+
typer.echo(f" - {repo_id}: {url} [{trusted_text}] (default_ref={default_ref})")
|
|
813
|
+
|
|
814
|
+
|
|
815
|
+
@repos_app.command("add")
|
|
816
|
+
def repos_add(
|
|
817
|
+
repo_id: str = typer.Argument(..., help="Unique repo ID"),
|
|
818
|
+
url: str = typer.Argument(..., help="Git repo URL or path"),
|
|
819
|
+
trusted: Optional[bool] = typer.Option(
|
|
820
|
+
None, "--trusted/--untrusted", help="Trust level (default: untrusted)"
|
|
821
|
+
),
|
|
822
|
+
default_ref: str = typer.Option("main", "--default-ref", help="Default git ref"),
|
|
823
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
824
|
+
):
|
|
825
|
+
"""Add a template repo to the hub config."""
|
|
826
|
+
config = _require_hub_config(hub)
|
|
827
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
828
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
829
|
+
|
|
830
|
+
templates_config = data.get("templates", {})
|
|
831
|
+
if not isinstance(templates_config, dict):
|
|
832
|
+
templates_config = {}
|
|
833
|
+
enabled = templates_config.get("enabled", True)
|
|
834
|
+
if enabled is False:
|
|
835
|
+
_raise_exit(
|
|
836
|
+
"Templates are disabled. Set templates.enabled=true in the hub config to enable."
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
templates_config = data.setdefault("templates", {})
|
|
840
|
+
if not isinstance(templates_config, dict):
|
|
841
|
+
_raise_exit("Invalid templates config in hub config")
|
|
842
|
+
templates_config.setdefault("enabled", True)
|
|
843
|
+
repos = templates_config.setdefault("repos", [])
|
|
844
|
+
if not isinstance(repos, list):
|
|
845
|
+
_raise_exit("Invalid repos config in hub config")
|
|
846
|
+
|
|
847
|
+
existing_ids = {repo.get("id") for repo in repos if isinstance(repo, dict)}
|
|
848
|
+
if repo_id in existing_ids:
|
|
849
|
+
_raise_exit(f"Repo ID '{repo_id}' already exists. Use a unique ID.")
|
|
850
|
+
|
|
851
|
+
new_repo = {
|
|
852
|
+
"id": repo_id,
|
|
853
|
+
"url": url,
|
|
854
|
+
"default_ref": default_ref,
|
|
855
|
+
}
|
|
856
|
+
if trusted is not None:
|
|
857
|
+
new_repo["trusted"] = trusted
|
|
858
|
+
|
|
859
|
+
repos.append(new_repo)
|
|
860
|
+
|
|
861
|
+
try:
|
|
862
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
863
|
+
except OSError as exc:
|
|
864
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
865
|
+
|
|
866
|
+
typer.echo(f"Added template repo '{repo_id}' to hub config.")
|
|
867
|
+
|
|
868
|
+
|
|
869
|
+
@repos_app.command("remove")
|
|
870
|
+
def repos_remove(
|
|
871
|
+
repo_id: str = typer.Argument(..., help="Repo ID to remove"),
|
|
872
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
873
|
+
):
|
|
874
|
+
"""Remove a template repo from the hub config."""
|
|
875
|
+
config = _require_hub_config(hub)
|
|
876
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
877
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
878
|
+
|
|
879
|
+
templates_config = data.get("templates", {})
|
|
880
|
+
if not isinstance(templates_config, dict):
|
|
881
|
+
templates_config = {}
|
|
882
|
+
repos = templates_config.get("repos", [])
|
|
883
|
+
if not isinstance(repos, list):
|
|
884
|
+
repos = []
|
|
885
|
+
|
|
886
|
+
original_count = len(repos)
|
|
887
|
+
filtered_repos = [
|
|
888
|
+
repo for repo in repos if isinstance(repo, dict) and repo.get("id") != repo_id
|
|
889
|
+
]
|
|
890
|
+
|
|
891
|
+
if len(filtered_repos) == original_count:
|
|
892
|
+
_raise_exit(f"Repo ID '{repo_id}' not found in config.")
|
|
893
|
+
|
|
894
|
+
templates_config["repos"] = filtered_repos
|
|
895
|
+
|
|
896
|
+
try:
|
|
897
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
898
|
+
except OSError as exc:
|
|
899
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
900
|
+
|
|
901
|
+
typer.echo(f"Removed template repo '{repo_id}' from hub config.")
|
|
902
|
+
|
|
903
|
+
|
|
904
|
+
@repos_app.command("trust")
|
|
905
|
+
def repos_trust(
|
|
906
|
+
repo_id: str = typer.Argument(..., help="Repo ID to trust"),
|
|
907
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
908
|
+
):
|
|
909
|
+
"""Mark a template repo as trusted (skip scanning)."""
|
|
910
|
+
config = _require_hub_config(hub)
|
|
911
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
912
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
913
|
+
|
|
914
|
+
templates_config = data.get("templates", {})
|
|
915
|
+
if not isinstance(templates_config, dict):
|
|
916
|
+
templates_config = {}
|
|
917
|
+
repos = templates_config.get("repos", [])
|
|
918
|
+
if not isinstance(repos, list):
|
|
919
|
+
repos = []
|
|
920
|
+
|
|
921
|
+
found = False
|
|
922
|
+
for repo in repos:
|
|
923
|
+
if isinstance(repo, dict) and repo.get("id") == repo_id:
|
|
924
|
+
repo["trusted"] = True
|
|
925
|
+
found = True
|
|
926
|
+
break
|
|
927
|
+
|
|
928
|
+
if not found:
|
|
929
|
+
_raise_exit(f"Repo ID '{repo_id}' not found in config.")
|
|
930
|
+
|
|
931
|
+
try:
|
|
932
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
933
|
+
except OSError as exc:
|
|
934
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
935
|
+
|
|
936
|
+
typer.echo(f"Marked repo '{repo_id}' as trusted.")
|
|
937
|
+
|
|
938
|
+
|
|
939
|
+
@repos_app.command("untrust")
|
|
940
|
+
def repos_untrust(
|
|
941
|
+
repo_id: str = typer.Argument(..., help="Repo ID to untrust"),
|
|
942
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
943
|
+
):
|
|
944
|
+
"""Mark a template repo as untrusted (require scanning)."""
|
|
945
|
+
config = _require_hub_config(hub)
|
|
946
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
947
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
948
|
+
|
|
949
|
+
templates_config = data.get("templates", {})
|
|
950
|
+
if not isinstance(templates_config, dict):
|
|
951
|
+
templates_config = {}
|
|
952
|
+
repos = templates_config.get("repos", [])
|
|
953
|
+
if not isinstance(repos, list):
|
|
954
|
+
repos = []
|
|
955
|
+
|
|
956
|
+
found = False
|
|
957
|
+
for repo in repos:
|
|
958
|
+
if isinstance(repo, dict) and repo.get("id") == repo_id:
|
|
959
|
+
repo["trusted"] = False
|
|
960
|
+
found = True
|
|
961
|
+
break
|
|
962
|
+
|
|
963
|
+
if not found:
|
|
964
|
+
_raise_exit(f"Repo ID '{repo_id}' not found in config.")
|
|
965
|
+
|
|
966
|
+
try:
|
|
967
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
968
|
+
except OSError as exc:
|
|
969
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
970
|
+
|
|
971
|
+
typer.echo(f"Marked repo '{repo_id}' as untrusted.")
|
|
395
972
|
|
|
396
973
|
|
|
397
974
|
@app.command()
|
|
@@ -635,52 +1212,6 @@ def usage(
|
|
|
635
1212
|
)
|
|
636
1213
|
|
|
637
1214
|
|
|
638
|
-
@app.command()
|
|
639
|
-
def run(
|
|
640
|
-
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
641
|
-
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
642
|
-
force: bool = typer.Option(False, "--force", help="Ignore existing lock"),
|
|
643
|
-
):
|
|
644
|
-
"""Run the autorunner loop."""
|
|
645
|
-
engine: Optional[Engine] = None
|
|
646
|
-
try:
|
|
647
|
-
engine = _require_repo_config(repo, hub)
|
|
648
|
-
engine.clear_stop_request()
|
|
649
|
-
engine.acquire_lock(force=force)
|
|
650
|
-
engine.run_loop()
|
|
651
|
-
except (ConfigError, LockError) as exc:
|
|
652
|
-
_raise_exit(str(exc), cause=exc)
|
|
653
|
-
finally:
|
|
654
|
-
if engine:
|
|
655
|
-
try:
|
|
656
|
-
engine.release_lock()
|
|
657
|
-
except OSError as exc:
|
|
658
|
-
logger.debug("Failed to release lock in run command: %s", exc)
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
@app.command()
|
|
662
|
-
def once(
|
|
663
|
-
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
664
|
-
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
665
|
-
force: bool = typer.Option(False, "--force", help="Ignore existing lock"),
|
|
666
|
-
):
|
|
667
|
-
"""Execute a single Codex run."""
|
|
668
|
-
engine: Optional[Engine] = None
|
|
669
|
-
try:
|
|
670
|
-
engine = _require_repo_config(repo, hub)
|
|
671
|
-
engine.clear_stop_request()
|
|
672
|
-
engine.acquire_lock(force=force)
|
|
673
|
-
engine.run_once()
|
|
674
|
-
except (ConfigError, LockError) as exc:
|
|
675
|
-
_raise_exit(str(exc), cause=exc)
|
|
676
|
-
finally:
|
|
677
|
-
if engine:
|
|
678
|
-
try:
|
|
679
|
-
engine.release_lock()
|
|
680
|
-
except OSError as exc:
|
|
681
|
-
logger.debug("Failed to release lock in once command: %s", exc)
|
|
682
|
-
|
|
683
|
-
|
|
684
1215
|
@app.command()
|
|
685
1216
|
def kill(
|
|
686
1217
|
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
@@ -719,25 +1250,17 @@ def kill(
|
|
|
719
1250
|
def resume(
|
|
720
1251
|
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
721
1252
|
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
722
|
-
once: bool = typer.Option(False, "--once", help="Resume with a single run"),
|
|
723
|
-
force: bool = typer.Option(False, "--force", help="Override active lock"),
|
|
724
1253
|
):
|
|
725
|
-
"""Resume a
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
finally:
|
|
736
|
-
if engine:
|
|
737
|
-
try:
|
|
738
|
-
engine.release_lock()
|
|
739
|
-
except OSError as exc:
|
|
740
|
-
logger.debug("Failed to release lock in resume command: %s", exc)
|
|
1254
|
+
"""Resume a paused/running ticket flow (now uses ticket_flow).
|
|
1255
|
+
|
|
1256
|
+
This command now uses ticket_flow for execution. For full control over
|
|
1257
|
+
flows, use 'car flow' commands instead.
|
|
1258
|
+
"""
|
|
1259
|
+
# Note: Resume is now handled by 'car flow ticket_flow/start' which
|
|
1260
|
+
# will reuse an active/paused run automatically.
|
|
1261
|
+
typer.echo("The 'resume' command has been deprecated in favor of ticket_flow.")
|
|
1262
|
+
typer.echo("Use 'car flow ticket_flow/start' to resume existing flows.")
|
|
1263
|
+
raise typer.Exit(code=0)
|
|
741
1264
|
|
|
742
1265
|
|
|
743
1266
|
@app.command()
|
|
@@ -816,14 +1339,22 @@ def doctor_cmd(
|
|
|
816
1339
|
|
|
817
1340
|
hub_config = load_hub_config(start_path)
|
|
818
1341
|
repo_config: Optional[RepoConfig] = None
|
|
1342
|
+
repo_root: Optional[Path] = None
|
|
819
1343
|
try:
|
|
820
1344
|
repo_root = find_repo_root(start_path)
|
|
821
1345
|
repo_config = derive_repo_config(hub_config, repo_root)
|
|
822
1346
|
except RepoNotFoundError:
|
|
823
1347
|
repo_config = None
|
|
824
1348
|
|
|
825
|
-
telegram_checks = telegram_doctor_checks(
|
|
826
|
-
|
|
1349
|
+
telegram_checks = telegram_doctor_checks(
|
|
1350
|
+
repo_config or hub_config, repo_root=repo_root
|
|
1351
|
+
)
|
|
1352
|
+
pma_checks = pma_doctor_checks(hub_config, repo_root=repo_root)
|
|
1353
|
+
hub_worktree_checks = hub_worktree_doctor_checks(hub_config)
|
|
1354
|
+
|
|
1355
|
+
report = DoctorReport(
|
|
1356
|
+
checks=report.checks + telegram_checks + pma_checks + hub_worktree_checks
|
|
1357
|
+
)
|
|
827
1358
|
except ConfigError as exc:
|
|
828
1359
|
_raise_exit(str(exc), cause=exc)
|
|
829
1360
|
if json_output:
|
|
@@ -875,7 +1406,7 @@ def serve(
|
|
|
875
1406
|
|
|
876
1407
|
@hub_app.command("create")
|
|
877
1408
|
def hub_create(
|
|
878
|
-
repo_id: str = typer.Argument(..., help="
|
|
1409
|
+
repo_id: str = typer.Argument(..., help="Base repo id to create and initialize"),
|
|
879
1410
|
repo_path: Optional[Path] = typer.Option(
|
|
880
1411
|
None,
|
|
881
1412
|
"--repo-path",
|
|
@@ -887,12 +1418,16 @@ def hub_create(
|
|
|
887
1418
|
True, "--git-init/--no-git-init", help="Run git init in the new repo"
|
|
888
1419
|
),
|
|
889
1420
|
):
|
|
890
|
-
"""Create a new git repo under the hub and initialize codex-autorunner files.
|
|
1421
|
+
"""Create a new base git repo under the hub and initialize codex-autorunner files.
|
|
1422
|
+
|
|
1423
|
+
For worktrees, use `car hub worktree create`.
|
|
1424
|
+
"""
|
|
891
1425
|
config = _require_hub_config(path)
|
|
892
1426
|
supervisor = HubSupervisor(
|
|
893
1427
|
config,
|
|
894
1428
|
backend_factory_builder=build_agent_backend_factory,
|
|
895
1429
|
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1430
|
+
backend_orchestrator_builder=build_backend_orchestrator,
|
|
896
1431
|
agent_id_validator=validate_agent_id,
|
|
897
1432
|
)
|
|
898
1433
|
try:
|
|
@@ -939,6 +1474,157 @@ def hub_clone(
|
|
|
939
1474
|
)
|
|
940
1475
|
|
|
941
1476
|
|
|
1477
|
+
def _worktree_snapshot_payload(snapshot) -> dict:
|
|
1478
|
+
return {
|
|
1479
|
+
"id": snapshot.id,
|
|
1480
|
+
"worktree_of": snapshot.worktree_of,
|
|
1481
|
+
"branch": snapshot.branch,
|
|
1482
|
+
"path": str(snapshot.path),
|
|
1483
|
+
"initialized": snapshot.initialized,
|
|
1484
|
+
"exists_on_disk": snapshot.exists_on_disk,
|
|
1485
|
+
"status": snapshot.status.value,
|
|
1486
|
+
}
|
|
1487
|
+
|
|
1488
|
+
|
|
1489
|
+
@worktree_app.command("create")
|
|
1490
|
+
def hub_worktree_create(
|
|
1491
|
+
base_repo_id: str = typer.Argument(..., help="Base repo id to branch from"),
|
|
1492
|
+
branch: str = typer.Argument(..., help="Branch name for the new worktree"),
|
|
1493
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1494
|
+
force: bool = typer.Option(False, "--force", help="Allow existing directory"),
|
|
1495
|
+
start_point: Optional[str] = typer.Option(
|
|
1496
|
+
None, "--start-point", help="Optional git ref to branch from"
|
|
1497
|
+
),
|
|
1498
|
+
):
|
|
1499
|
+
"""Create a new hub-managed worktree."""
|
|
1500
|
+
config = _require_hub_config(hub)
|
|
1501
|
+
supervisor = HubSupervisor(
|
|
1502
|
+
config,
|
|
1503
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1504
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1505
|
+
backend_orchestrator_builder=build_backend_orchestrator,
|
|
1506
|
+
agent_id_validator=validate_agent_id,
|
|
1507
|
+
)
|
|
1508
|
+
try:
|
|
1509
|
+
snapshot = supervisor.create_worktree(
|
|
1510
|
+
base_repo_id=base_repo_id,
|
|
1511
|
+
branch=branch,
|
|
1512
|
+
force=force,
|
|
1513
|
+
start_point=start_point,
|
|
1514
|
+
)
|
|
1515
|
+
except Exception as exc:
|
|
1516
|
+
_raise_exit(str(exc), cause=exc)
|
|
1517
|
+
typer.echo(
|
|
1518
|
+
f"Created worktree {snapshot.id} (branch={snapshot.branch}) at {snapshot.path}"
|
|
1519
|
+
)
|
|
1520
|
+
|
|
1521
|
+
|
|
1522
|
+
@worktree_app.command("list")
|
|
1523
|
+
def hub_worktree_list(
|
|
1524
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1525
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
1526
|
+
):
|
|
1527
|
+
"""List hub-managed worktrees."""
|
|
1528
|
+
config = _require_hub_config(hub)
|
|
1529
|
+
supervisor = HubSupervisor(
|
|
1530
|
+
config,
|
|
1531
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1532
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1533
|
+
agent_id_validator=validate_agent_id,
|
|
1534
|
+
)
|
|
1535
|
+
snapshots = [
|
|
1536
|
+
snapshot
|
|
1537
|
+
for snapshot in supervisor.list_repos(use_cache=False)
|
|
1538
|
+
if snapshot.kind == "worktree"
|
|
1539
|
+
]
|
|
1540
|
+
payload = [_worktree_snapshot_payload(snapshot) for snapshot in snapshots]
|
|
1541
|
+
if output_json:
|
|
1542
|
+
typer.echo(json.dumps({"worktrees": payload}, indent=2))
|
|
1543
|
+
return
|
|
1544
|
+
if not payload:
|
|
1545
|
+
typer.echo("No worktrees found.")
|
|
1546
|
+
return
|
|
1547
|
+
typer.echo(f"Worktrees ({len(payload)}):")
|
|
1548
|
+
for item in payload:
|
|
1549
|
+
typer.echo(
|
|
1550
|
+
" - {id} (base={worktree_of}, branch={branch}, status={status}, initialized={initialized}, exists={exists_on_disk}, path={path})".format(
|
|
1551
|
+
**item
|
|
1552
|
+
)
|
|
1553
|
+
)
|
|
1554
|
+
|
|
1555
|
+
|
|
1556
|
+
@worktree_app.command("scan")
|
|
1557
|
+
def hub_worktree_scan(
|
|
1558
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1559
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
1560
|
+
):
|
|
1561
|
+
"""Scan hub root and list discovered worktrees."""
|
|
1562
|
+
config = _require_hub_config(hub)
|
|
1563
|
+
supervisor = HubSupervisor(
|
|
1564
|
+
config,
|
|
1565
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1566
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1567
|
+
agent_id_validator=validate_agent_id,
|
|
1568
|
+
)
|
|
1569
|
+
snapshots = [snap for snap in supervisor.scan() if snap.kind == "worktree"]
|
|
1570
|
+
payload = [_worktree_snapshot_payload(snapshot) for snapshot in snapshots]
|
|
1571
|
+
if output_json:
|
|
1572
|
+
typer.echo(json.dumps({"worktrees": payload}, indent=2))
|
|
1573
|
+
return
|
|
1574
|
+
if not payload:
|
|
1575
|
+
typer.echo("No worktrees found.")
|
|
1576
|
+
return
|
|
1577
|
+
typer.echo(f"Worktrees ({len(payload)}):")
|
|
1578
|
+
for item in payload:
|
|
1579
|
+
typer.echo(
|
|
1580
|
+
" - {id} (base={worktree_of}, branch={branch}, status={status}, initialized={initialized}, exists={exists_on_disk}, path={path})".format(
|
|
1581
|
+
**item
|
|
1582
|
+
)
|
|
1583
|
+
)
|
|
1584
|
+
|
|
1585
|
+
|
|
1586
|
+
@worktree_app.command("cleanup")
|
|
1587
|
+
def hub_worktree_cleanup(
|
|
1588
|
+
worktree_repo_id: str = typer.Argument(..., help="Worktree repo id to remove"),
|
|
1589
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1590
|
+
delete_branch: bool = typer.Option(
|
|
1591
|
+
False, "--delete-branch", help="Delete the local branch"
|
|
1592
|
+
),
|
|
1593
|
+
delete_remote: bool = typer.Option(
|
|
1594
|
+
False, "--delete-remote", help="Delete the remote branch"
|
|
1595
|
+
),
|
|
1596
|
+
archive: bool = typer.Option(
|
|
1597
|
+
True, "--archive/--no-archive", help="Archive worktree snapshot"
|
|
1598
|
+
),
|
|
1599
|
+
force_archive: bool = typer.Option(
|
|
1600
|
+
False, "--force-archive", help="Continue cleanup if archive fails"
|
|
1601
|
+
),
|
|
1602
|
+
archive_note: Optional[str] = typer.Option(
|
|
1603
|
+
None, "--archive-note", help="Optional archive note"
|
|
1604
|
+
),
|
|
1605
|
+
):
|
|
1606
|
+
"""Cleanup a hub-managed worktree."""
|
|
1607
|
+
config = _require_hub_config(hub)
|
|
1608
|
+
supervisor = HubSupervisor(
|
|
1609
|
+
config,
|
|
1610
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1611
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1612
|
+
agent_id_validator=validate_agent_id,
|
|
1613
|
+
)
|
|
1614
|
+
try:
|
|
1615
|
+
supervisor.cleanup_worktree(
|
|
1616
|
+
worktree_repo_id=worktree_repo_id,
|
|
1617
|
+
delete_branch=delete_branch,
|
|
1618
|
+
delete_remote=delete_remote,
|
|
1619
|
+
archive=archive,
|
|
1620
|
+
force_archive=force_archive,
|
|
1621
|
+
archive_note=archive_note,
|
|
1622
|
+
)
|
|
1623
|
+
except Exception as exc:
|
|
1624
|
+
_raise_exit(str(exc), cause=exc)
|
|
1625
|
+
typer.echo("ok")
|
|
1626
|
+
|
|
1627
|
+
|
|
942
1628
|
@hub_app.command("serve")
|
|
943
1629
|
def hub_serve(
|
|
944
1630
|
path: Optional[Path] = typer.Option(None, "--path", help="Hub root path"),
|
|
@@ -986,6 +1672,117 @@ def hub_scan(path: Optional[Path] = typer.Option(None, "--path", help="Hub root
|
|
|
986
1672
|
)
|
|
987
1673
|
|
|
988
1674
|
|
|
1675
|
+
@hub_app.command("snapshot")
|
|
1676
|
+
def hub_snapshot(
|
|
1677
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Hub root path"),
|
|
1678
|
+
output_json: bool = typer.Option(
|
|
1679
|
+
True, "--json/--no-json", help="Emit JSON output (default: true)"
|
|
1680
|
+
),
|
|
1681
|
+
pretty: bool = typer.Option(False, "--pretty", help="Pretty-print JSON output"),
|
|
1682
|
+
):
|
|
1683
|
+
"""Return a compact hub snapshot (repos + inbox items)."""
|
|
1684
|
+
config = _require_hub_config(path)
|
|
1685
|
+
repos_url = _build_server_url(config, "/hub/repos")
|
|
1686
|
+
messages_url = _build_server_url(config, "/hub/messages?limit=50")
|
|
1687
|
+
|
|
1688
|
+
try:
|
|
1689
|
+
repos_response = _request_json(
|
|
1690
|
+
"GET", repos_url, token_env=config.server_auth_token_env
|
|
1691
|
+
)
|
|
1692
|
+
messages_response = _request_json(
|
|
1693
|
+
"GET", messages_url, token_env=config.server_auth_token_env
|
|
1694
|
+
)
|
|
1695
|
+
except (
|
|
1696
|
+
httpx.HTTPError,
|
|
1697
|
+
httpx.ConnectError,
|
|
1698
|
+
httpx.TimeoutException,
|
|
1699
|
+
OSError,
|
|
1700
|
+
) as exc:
|
|
1701
|
+
logger.debug("Failed to fetch hub snapshot from server: %s", exc)
|
|
1702
|
+
_raise_exit(
|
|
1703
|
+
"Failed to connect to hub server. Ensure 'car hub serve' is running.",
|
|
1704
|
+
cause=exc,
|
|
1705
|
+
)
|
|
1706
|
+
|
|
1707
|
+
repos_payload = repos_response if isinstance(repos_response, dict) else {}
|
|
1708
|
+
messages_payload = messages_response if isinstance(messages_response, dict) else {}
|
|
1709
|
+
|
|
1710
|
+
repos = repos_payload.get("repos", []) if isinstance(repos_payload, dict) else []
|
|
1711
|
+
messages_items = (
|
|
1712
|
+
messages_payload.get("items", []) if isinstance(messages_payload, dict) else []
|
|
1713
|
+
)
|
|
1714
|
+
|
|
1715
|
+
def _summarize_repo(repo: dict) -> dict:
|
|
1716
|
+
if not isinstance(repo, dict):
|
|
1717
|
+
return {}
|
|
1718
|
+
return {
|
|
1719
|
+
"id": repo.get("id"),
|
|
1720
|
+
"display_name": repo.get("display_name"),
|
|
1721
|
+
"status": repo.get("status"),
|
|
1722
|
+
"initialized": repo.get("initialized"),
|
|
1723
|
+
"exists_on_disk": repo.get("exists_on_disk"),
|
|
1724
|
+
"last_run_id": repo.get("last_run_id"),
|
|
1725
|
+
"last_run_started_at": repo.get("last_run_started_at"),
|
|
1726
|
+
"last_run_finished_at": repo.get("last_run_finished_at"),
|
|
1727
|
+
}
|
|
1728
|
+
|
|
1729
|
+
def _summarize_message(msg: dict) -> dict:
|
|
1730
|
+
if not isinstance(msg, dict):
|
|
1731
|
+
return {}
|
|
1732
|
+
dispatch = msg.get("dispatch", {})
|
|
1733
|
+
if not isinstance(dispatch, dict):
|
|
1734
|
+
dispatch = {}
|
|
1735
|
+
body = dispatch.get("body", "")
|
|
1736
|
+
title = dispatch.get("title", "")
|
|
1737
|
+
truncated_body = (body[:200] + "...") if len(body) > 200 else body
|
|
1738
|
+
return {
|
|
1739
|
+
"repo_id": msg.get("repo_id"),
|
|
1740
|
+
"repo_display_name": msg.get("repo_display_name"),
|
|
1741
|
+
"run_id": msg.get("run_id"),
|
|
1742
|
+
"run_created_at": msg.get("run_created_at"),
|
|
1743
|
+
"status": msg.get("status"),
|
|
1744
|
+
"seq": msg.get("seq"),
|
|
1745
|
+
"dispatch": {
|
|
1746
|
+
"mode": dispatch.get("mode"),
|
|
1747
|
+
"title": title,
|
|
1748
|
+
"body": truncated_body,
|
|
1749
|
+
"is_handoff": dispatch.get("is_handoff"),
|
|
1750
|
+
},
|
|
1751
|
+
"files_count": (
|
|
1752
|
+
len(msg.get("files", [])) if isinstance(msg.get("files"), list) else 0
|
|
1753
|
+
),
|
|
1754
|
+
}
|
|
1755
|
+
|
|
1756
|
+
snapshot = {
|
|
1757
|
+
"last_scan_at": (
|
|
1758
|
+
repos_payload.get("last_scan_at")
|
|
1759
|
+
if isinstance(repos_payload, dict)
|
|
1760
|
+
else None
|
|
1761
|
+
),
|
|
1762
|
+
"repos": [_summarize_repo(repo) for repo in repos],
|
|
1763
|
+
"inbox_items": [_summarize_message(msg) for msg in messages_items],
|
|
1764
|
+
}
|
|
1765
|
+
|
|
1766
|
+
if not output_json:
|
|
1767
|
+
typer.echo(
|
|
1768
|
+
f"Hub Snapshot (repos={len(snapshot['repos'])}, inbox={len(snapshot['inbox_items'])})"
|
|
1769
|
+
)
|
|
1770
|
+
for repo in snapshot["repos"]:
|
|
1771
|
+
typer.echo(
|
|
1772
|
+
f"- {repo.get('id')}: status={repo.get('status')}, "
|
|
1773
|
+
f"initialized={repo.get('initialized')}, exists={repo.get('exists_on_disk')}"
|
|
1774
|
+
)
|
|
1775
|
+
for msg in snapshot["inbox_items"]:
|
|
1776
|
+
typer.echo(
|
|
1777
|
+
f"- Inbox: repo={msg.get('repo_id')}, run_id={msg.get('run_id')}, "
|
|
1778
|
+
f"title={msg.get('dispatch', {}).get('title')}"
|
|
1779
|
+
)
|
|
1780
|
+
return
|
|
1781
|
+
|
|
1782
|
+
indent = 2 if pretty else None
|
|
1783
|
+
typer.echo(json.dumps(snapshot, indent=indent))
|
|
1784
|
+
|
|
1785
|
+
|
|
989
1786
|
@telegram_app.command("start")
|
|
990
1787
|
def telegram_start(
|
|
991
1788
|
path: Optional[Path] = typer.Option(None, "--path", help="Repo or hub root path"),
|
|
@@ -1116,108 +1913,621 @@ def telegram_state_check(
|
|
|
1116
1913
|
_raise_exit(f"Telegram state check failed: {exc}", cause=exc)
|
|
1117
1914
|
|
|
1118
1915
|
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1916
|
+
def _normalize_flow_run_id(run_id: Optional[str]) -> Optional[str]:
|
|
1917
|
+
if run_id is None:
|
|
1918
|
+
return None
|
|
1919
|
+
try:
|
|
1920
|
+
return str(uuid.UUID(str(run_id)))
|
|
1921
|
+
except ValueError:
|
|
1922
|
+
_raise_exit("Invalid run_id format; must be a UUID")
|
|
1923
|
+
|
|
1924
|
+
|
|
1925
|
+
def _ticket_flow_paths(engine: RuntimeContext) -> tuple[Path, Path, Path]:
|
|
1926
|
+
db_path = engine.repo_root / ".codex-autorunner" / "flows.db"
|
|
1927
|
+
artifacts_root = engine.repo_root / ".codex-autorunner" / "flows"
|
|
1928
|
+
ticket_dir = engine.repo_root / ".codex-autorunner" / "tickets"
|
|
1929
|
+
return db_path, artifacts_root, ticket_dir
|
|
1930
|
+
|
|
1931
|
+
|
|
1932
|
+
def _validate_tickets(ticket_dir: Path) -> list[str]:
|
|
1933
|
+
"""Validate all tickets in the directory and return a list of error messages."""
|
|
1934
|
+
errors: list[str] = []
|
|
1935
|
+
|
|
1936
|
+
if not ticket_dir.exists():
|
|
1937
|
+
return errors
|
|
1938
|
+
|
|
1939
|
+
ticket_root = ticket_dir.parent
|
|
1940
|
+
for path in sorted(ticket_dir.iterdir()):
|
|
1941
|
+
if not path.is_file():
|
|
1942
|
+
continue
|
|
1943
|
+
if path.name == "AGENTS.md":
|
|
1944
|
+
continue
|
|
1945
|
+
if parse_ticket_index(path.name) is None:
|
|
1946
|
+
rel_path = safe_relpath(path, ticket_root)
|
|
1947
|
+
errors.append(
|
|
1948
|
+
f"{rel_path}: Invalid ticket filename; expected TICKET-<number>[suffix].md (e.g. TICKET-001-foo.md)"
|
|
1949
|
+
)
|
|
1950
|
+
|
|
1951
|
+
# Check for directory-level errors (duplicate indices)
|
|
1952
|
+
dir_errors = lint_ticket_directory(ticket_dir)
|
|
1953
|
+
errors.extend(dir_errors)
|
|
1954
|
+
|
|
1955
|
+
# Check each ticket file for frontmatter errors
|
|
1956
|
+
ticket_paths = list_ticket_paths(ticket_dir)
|
|
1957
|
+
for path in ticket_paths:
|
|
1958
|
+
_, ticket_errors = read_ticket(path)
|
|
1959
|
+
for err in ticket_errors:
|
|
1960
|
+
errors.append(f"{path.relative_to(path.parent.parent)}: {err}")
|
|
1961
|
+
|
|
1962
|
+
return errors
|
|
1963
|
+
|
|
1964
|
+
|
|
1965
|
+
def _open_flow_store(engine: RuntimeContext) -> FlowStore:
|
|
1966
|
+
db_path, _, _ = _ticket_flow_paths(engine)
|
|
1967
|
+
store = FlowStore(db_path, durable=engine.config.durable_writes)
|
|
1968
|
+
store.initialize()
|
|
1969
|
+
return store
|
|
1970
|
+
|
|
1971
|
+
|
|
1972
|
+
def _active_or_paused_run(records: list[FlowRunRecord]) -> Optional[FlowRunRecord]:
|
|
1973
|
+
if not records:
|
|
1974
|
+
return None
|
|
1975
|
+
latest = records[0]
|
|
1976
|
+
if latest.status in (FlowRunStatus.RUNNING, FlowRunStatus.PAUSED):
|
|
1977
|
+
return latest
|
|
1978
|
+
return None
|
|
1979
|
+
|
|
1980
|
+
|
|
1981
|
+
def _resumable_run(records: list[FlowRunRecord]) -> tuple[Optional[FlowRunRecord], str]:
|
|
1982
|
+
"""Return a resumable run and the reason.
|
|
1983
|
+
|
|
1984
|
+
Returns (run, reason) where run may be None.
|
|
1985
|
+
Reason is one of: 'active', 'completed_pending', 'force_new', 'new_run'.
|
|
1986
|
+
"""
|
|
1987
|
+
if not records:
|
|
1988
|
+
return None, "new_run"
|
|
1989
|
+
latest = records[0]
|
|
1990
|
+
if latest.status in (FlowRunStatus.RUNNING, FlowRunStatus.PAUSED):
|
|
1991
|
+
return latest, "active"
|
|
1992
|
+
if latest.status == FlowRunStatus.COMPLETED:
|
|
1993
|
+
return latest, "completed_pending"
|
|
1994
|
+
return None, "new_run"
|
|
1995
|
+
|
|
1996
|
+
|
|
1997
|
+
def _ticket_flow_status_payload(
|
|
1998
|
+
engine: RuntimeContext, record: FlowRunRecord, store: Optional[FlowStore]
|
|
1999
|
+
) -> dict:
|
|
2000
|
+
snapshot = build_flow_status_snapshot(engine.repo_root, record, store)
|
|
2001
|
+
health = snapshot.get("worker_health")
|
|
2002
|
+
effective_ticket = snapshot.get("effective_current_ticket")
|
|
2003
|
+
return {
|
|
2004
|
+
"run_id": record.id,
|
|
2005
|
+
"flow_type": record.flow_type,
|
|
2006
|
+
"status": record.status.value,
|
|
2007
|
+
"current_step": record.current_step,
|
|
2008
|
+
"created_at": record.created_at,
|
|
2009
|
+
"started_at": record.started_at,
|
|
2010
|
+
"finished_at": record.finished_at,
|
|
2011
|
+
"last_event_seq": snapshot.get("last_event_seq"),
|
|
2012
|
+
"last_event_at": snapshot.get("last_event_at"),
|
|
2013
|
+
"current_ticket": effective_ticket,
|
|
2014
|
+
"ticket_progress": snapshot.get("ticket_progress"),
|
|
2015
|
+
"worker": (
|
|
2016
|
+
{
|
|
2017
|
+
"status": health.status,
|
|
2018
|
+
"pid": health.pid,
|
|
2019
|
+
"message": health.message,
|
|
2020
|
+
}
|
|
2021
|
+
if health
|
|
2022
|
+
else None
|
|
2023
|
+
),
|
|
2024
|
+
}
|
|
2025
|
+
|
|
2026
|
+
|
|
2027
|
+
def _print_ticket_flow_status(payload: dict) -> None:
|
|
2028
|
+
typer.echo(f"Run id: {payload.get('run_id')}")
|
|
2029
|
+
typer.echo(f"Status: {payload.get('status')}")
|
|
2030
|
+
progress = payload.get("ticket_progress") or {}
|
|
2031
|
+
if isinstance(progress, dict):
|
|
2032
|
+
done = progress.get("done")
|
|
2033
|
+
total = progress.get("total")
|
|
2034
|
+
if isinstance(done, int) and isinstance(total, int):
|
|
2035
|
+
typer.echo(f"Tickets: {done}/{total}")
|
|
2036
|
+
typer.echo(f"Current step: {payload.get('current_step')}")
|
|
2037
|
+
typer.echo(f"Current ticket: {payload.get('current_ticket') or 'n/a'}")
|
|
2038
|
+
typer.echo(f"Created at: {payload.get('created_at')}")
|
|
2039
|
+
typer.echo(f"Started at: {payload.get('started_at')}")
|
|
2040
|
+
typer.echo(f"Finished at: {payload.get('finished_at')}")
|
|
2041
|
+
typer.echo(
|
|
2042
|
+
f"Last event: {payload.get('last_event_at')} (seq={payload.get('last_event_seq')})"
|
|
2043
|
+
)
|
|
2044
|
+
worker = payload.get("worker") or {}
|
|
2045
|
+
status = payload.get("status") or ""
|
|
2046
|
+
# Only show worker details for non-terminal states
|
|
2047
|
+
if worker and status not in {"completed", "failed", "stopped"}:
|
|
2048
|
+
typer.echo(
|
|
2049
|
+
f"Worker: {worker.get('status')} pid={worker.get('pid')} {worker.get('message') or ''}".rstrip()
|
|
2050
|
+
)
|
|
2051
|
+
elif worker and status in {"completed", "failed", "stopped"}:
|
|
2052
|
+
# For terminal runs, show minimal worker info or clarify state
|
|
2053
|
+
worker_status = worker.get("status") or ""
|
|
2054
|
+
worker_pid = worker.get("pid")
|
|
2055
|
+
worker_msg = worker.get("message") or ""
|
|
2056
|
+
if worker_status == "absent" or "missing" in worker_msg.lower():
|
|
2057
|
+
typer.echo("Worker: exited")
|
|
2058
|
+
elif worker_status == "dead" or "not running" in worker_msg.lower():
|
|
2059
|
+
typer.echo(f"Worker: exited (pid={worker_pid})")
|
|
2060
|
+
else:
|
|
2061
|
+
typer.echo(
|
|
2062
|
+
f"Worker: {worker.get('status')} pid={worker.get('pid')} {worker.get('message') or ''}".rstrip()
|
|
2063
|
+
)
|
|
2064
|
+
|
|
2065
|
+
|
|
2066
|
+
def _start_ticket_flow_worker(
|
|
2067
|
+
repo_root: Path, run_id: str, is_terminal: bool = False
|
|
2068
|
+
) -> None:
|
|
2069
|
+
result = ensure_worker(repo_root, run_id, is_terminal=is_terminal)
|
|
2070
|
+
if result["status"] == "reused":
|
|
2071
|
+
return
|
|
2072
|
+
|
|
2073
|
+
|
|
2074
|
+
def _stop_ticket_flow_worker(repo_root: Path, run_id: str) -> None:
|
|
2075
|
+
health = check_worker_health(repo_root, run_id)
|
|
2076
|
+
if health.status in {"dead", "mismatch", "invalid"}:
|
|
2077
|
+
try:
|
|
2078
|
+
clear_worker_metadata(health.artifact_path.parent)
|
|
2079
|
+
except Exception:
|
|
2080
|
+
pass
|
|
2081
|
+
if not health.pid:
|
|
2082
|
+
return
|
|
2083
|
+
try:
|
|
2084
|
+
subprocess.run(["kill", str(health.pid)], check=False)
|
|
2085
|
+
except Exception:
|
|
2086
|
+
pass
|
|
2087
|
+
|
|
2088
|
+
|
|
2089
|
+
def _ticket_flow_controller(
|
|
2090
|
+
engine: RuntimeContext,
|
|
2091
|
+
) -> tuple[FlowController, AgentPool]:
|
|
2092
|
+
db_path, artifacts_root, _ = _ticket_flow_paths(engine)
|
|
2093
|
+
agent_pool = AgentPool(engine.config)
|
|
2094
|
+
definition = build_ticket_flow_definition(agent_pool=agent_pool)
|
|
2095
|
+
definition.validate()
|
|
2096
|
+
controller = FlowController(
|
|
2097
|
+
definition=definition,
|
|
2098
|
+
db_path=db_path,
|
|
2099
|
+
artifacts_root=artifacts_root,
|
|
2100
|
+
durable=engine.config.durable_writes,
|
|
2101
|
+
)
|
|
2102
|
+
controller.initialize()
|
|
2103
|
+
return controller, agent_pool
|
|
2104
|
+
|
|
2105
|
+
|
|
2106
|
+
@flow_app.command("worker")
|
|
2107
|
+
def flow_worker(
|
|
1122
2108
|
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
1123
2109
|
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
1124
2110
|
run_id: Optional[str] = typer.Option(
|
|
1125
|
-
None, "--run-id", help="Flow run ID (
|
|
2111
|
+
None, "--run-id", help="Flow run ID (required)"
|
|
1126
2112
|
),
|
|
1127
2113
|
):
|
|
1128
|
-
"""
|
|
2114
|
+
"""Start a flow worker process for an existing run."""
|
|
1129
2115
|
engine = _require_repo_config(repo, hub)
|
|
2116
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2117
|
+
if not normalized_run_id:
|
|
2118
|
+
_raise_exit("--run-id is required for worker command")
|
|
1130
2119
|
|
|
1131
|
-
|
|
1132
|
-
if not run_id:
|
|
1133
|
-
_raise_exit("--run-id is required for worker command")
|
|
1134
|
-
try:
|
|
1135
|
-
run_id = str(uuid.UUID(str(run_id)))
|
|
1136
|
-
except ValueError:
|
|
1137
|
-
_raise_exit("Invalid run_id format; must be a UUID")
|
|
1138
|
-
|
|
1139
|
-
from ...core.flows import FlowController, FlowStore
|
|
1140
|
-
from ...core.flows.models import FlowRunStatus
|
|
1141
|
-
from ...flows.ticket_flow.definition import build_ticket_flow_definition
|
|
1142
|
-
from ...tickets import AgentPool
|
|
2120
|
+
db_path, artifacts_root, ticket_dir = _ticket_flow_paths(engine)
|
|
1143
2121
|
|
|
1144
|
-
|
|
1145
|
-
artifacts_root = engine.repo_root / ".codex-autorunner" / "flows"
|
|
2122
|
+
typer.echo(f"Starting flow worker for run {normalized_run_id}")
|
|
1146
2123
|
|
|
1147
|
-
|
|
2124
|
+
async def _run_worker():
|
|
2125
|
+
typer.echo(f"Flow worker started for {normalized_run_id}")
|
|
2126
|
+
typer.echo(f"DB path: {db_path}")
|
|
2127
|
+
typer.echo(f"Artifacts root: {artifacts_root}")
|
|
1148
2128
|
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
typer.echo(f"DB path: {db_path}")
|
|
1152
|
-
typer.echo(f"Artifacts root: {artifacts_root}")
|
|
2129
|
+
store = FlowStore(db_path, durable=engine.config.durable_writes)
|
|
2130
|
+
store.initialize()
|
|
1153
2131
|
|
|
1154
|
-
|
|
1155
|
-
|
|
2132
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2133
|
+
if not record:
|
|
2134
|
+
typer.echo(f"Flow run {normalized_run_id} not found", err=True)
|
|
2135
|
+
store.close()
|
|
2136
|
+
raise typer.Exit(code=1)
|
|
1156
2137
|
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
2138
|
+
if record.flow_type == "ticket_flow":
|
|
2139
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2140
|
+
if lint_errors:
|
|
2141
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2142
|
+
for err in lint_errors:
|
|
2143
|
+
typer.echo(f" - {err}", err=True)
|
|
2144
|
+
typer.echo("", err=True)
|
|
2145
|
+
typer.echo(
|
|
2146
|
+
"Fix the above errors before starting the ticket flow.",
|
|
2147
|
+
err=True,
|
|
2148
|
+
)
|
|
1160
2149
|
store.close()
|
|
1161
2150
|
raise typer.Exit(code=1)
|
|
1162
|
-
|
|
2151
|
+
ticket_paths = list_ticket_paths(ticket_dir)
|
|
2152
|
+
if not ticket_paths:
|
|
2153
|
+
typer.echo(
|
|
2154
|
+
"No tickets found. Create tickets or run ticket_flow bootstrap to get started."
|
|
2155
|
+
)
|
|
2156
|
+
typer.echo(
|
|
2157
|
+
f" Ticket directory: {ticket_dir.relative_to(engine.repo_root)}"
|
|
2158
|
+
)
|
|
2159
|
+
typer.echo(" To bootstrap: car flow ticket_flow bootstrap")
|
|
2160
|
+
store.close()
|
|
2161
|
+
raise typer.Exit(code=0)
|
|
1163
2162
|
|
|
1164
|
-
|
|
2163
|
+
store.close()
|
|
1165
2164
|
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
"PR flow is no longer supported. Use ticket_flow instead."
|
|
1171
|
-
)
|
|
1172
|
-
if flow_type == "ticket_flow":
|
|
1173
|
-
agent_pool = AgentPool(engine.config)
|
|
1174
|
-
return build_ticket_flow_definition(agent_pool=agent_pool)
|
|
1175
|
-
_raise_exit(f"Unknown flow type for run {run_id}: {flow_type}")
|
|
1176
|
-
return None
|
|
1177
|
-
|
|
1178
|
-
definition = _build_definition(record.flow_type)
|
|
1179
|
-
definition.validate()
|
|
1180
|
-
|
|
1181
|
-
controller = FlowController(
|
|
1182
|
-
definition=definition,
|
|
1183
|
-
db_path=db_path,
|
|
2165
|
+
try:
|
|
2166
|
+
register_worker_metadata(
|
|
2167
|
+
engine.repo_root,
|
|
2168
|
+
normalized_run_id,
|
|
1184
2169
|
artifacts_root=artifacts_root,
|
|
1185
2170
|
)
|
|
1186
|
-
|
|
2171
|
+
except Exception as exc:
|
|
2172
|
+
typer.echo(f"Failed to register worker metadata: {exc}", err=True)
|
|
2173
|
+
|
|
2174
|
+
agent_pool: AgentPool | None = None
|
|
2175
|
+
|
|
2176
|
+
def _build_definition(flow_type: str):
|
|
2177
|
+
nonlocal agent_pool
|
|
2178
|
+
if flow_type == "pr_flow":
|
|
2179
|
+
_raise_exit("PR flow is no longer supported. Use ticket_flow instead.")
|
|
2180
|
+
if flow_type == "ticket_flow":
|
|
2181
|
+
agent_pool = AgentPool(engine.config)
|
|
2182
|
+
return build_ticket_flow_definition(agent_pool=agent_pool)
|
|
2183
|
+
_raise_exit(f"Unknown flow type for run {normalized_run_id}: {flow_type}")
|
|
2184
|
+
return None
|
|
2185
|
+
|
|
2186
|
+
definition = _build_definition(record.flow_type)
|
|
2187
|
+
definition.validate()
|
|
2188
|
+
|
|
2189
|
+
controller = FlowController(
|
|
2190
|
+
definition=definition,
|
|
2191
|
+
db_path=db_path,
|
|
2192
|
+
artifacts_root=artifacts_root,
|
|
2193
|
+
durable=engine.config.durable_writes,
|
|
2194
|
+
)
|
|
2195
|
+
controller.initialize()
|
|
1187
2196
|
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
2197
|
+
record = controller.get_status(normalized_run_id)
|
|
2198
|
+
if not record:
|
|
2199
|
+
typer.echo(f"Flow run {normalized_run_id} not found", err=True)
|
|
2200
|
+
raise typer.Exit(code=1)
|
|
1192
2201
|
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
2202
|
+
if record.status.is_terminal() and record.status not in {
|
|
2203
|
+
FlowRunStatus.STOPPED,
|
|
2204
|
+
FlowRunStatus.FAILED,
|
|
2205
|
+
}:
|
|
2206
|
+
typer.echo(
|
|
2207
|
+
f"Flow run {normalized_run_id} already completed (status={record.status})"
|
|
2208
|
+
)
|
|
2209
|
+
return
|
|
2210
|
+
|
|
2211
|
+
action = "Resuming" if record.status != FlowRunStatus.PENDING else "Starting"
|
|
2212
|
+
typer.echo(
|
|
2213
|
+
f"{action} flow run {normalized_run_id} from step: {record.current_step}"
|
|
2214
|
+
)
|
|
2215
|
+
try:
|
|
2216
|
+
final_record = await controller.run_flow(normalized_run_id)
|
|
2217
|
+
typer.echo(
|
|
2218
|
+
f"Flow run {normalized_run_id} finished with status {final_record.status}"
|
|
2219
|
+
)
|
|
2220
|
+
finally:
|
|
2221
|
+
if agent_pool is not None:
|
|
2222
|
+
try:
|
|
2223
|
+
await agent_pool.close()
|
|
2224
|
+
except Exception:
|
|
2225
|
+
typer.echo("Failed to close agent pool cleanly", err=True)
|
|
2226
|
+
|
|
2227
|
+
asyncio.run(_run_worker())
|
|
2228
|
+
|
|
2229
|
+
|
|
2230
|
+
@ticket_flow_app.command("bootstrap")
|
|
2231
|
+
def ticket_flow_bootstrap(
|
|
2232
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2233
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2234
|
+
force_new: bool = typer.Option(
|
|
2235
|
+
False, "--force-new", help="Always create a new run"
|
|
2236
|
+
),
|
|
2237
|
+
):
|
|
2238
|
+
"""Bootstrap ticket_flow (seed TICKET-001 if needed) and start a run.
|
|
2239
|
+
|
|
2240
|
+
If latest run is COMPLETED and new tickets are added, a new run is created
|
|
2241
|
+
(use --force-new to force a new run regardless of state)."""
|
|
2242
|
+
engine = _require_repo_config(repo, hub)
|
|
2243
|
+
_guard_unregistered_hub_repo(engine.repo_root, hub)
|
|
2244
|
+
db_path, artifacts_root, ticket_dir = _ticket_flow_paths(engine)
|
|
2245
|
+
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
2246
|
+
ticket_path = ticket_dir / "TICKET-001.md"
|
|
2247
|
+
|
|
2248
|
+
store = _open_flow_store(engine)
|
|
2249
|
+
try:
|
|
2250
|
+
if not force_new:
|
|
2251
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2252
|
+
existing_run, reason = _resumable_run(records)
|
|
2253
|
+
if existing_run and reason == "active":
|
|
2254
|
+
_start_ticket_flow_worker(
|
|
2255
|
+
engine.repo_root, existing_run.id, is_terminal=False
|
|
2256
|
+
)
|
|
2257
|
+
typer.echo(f"Reused active run: {existing_run.id}")
|
|
1197
2258
|
typer.echo(
|
|
1198
|
-
f"
|
|
2259
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {existing_run.id}"
|
|
1199
2260
|
)
|
|
1200
2261
|
return
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
2262
|
+
elif existing_run and reason == "completed_pending":
|
|
2263
|
+
existing_tickets = list_ticket_paths(ticket_dir)
|
|
2264
|
+
pending_count = len(
|
|
2265
|
+
[t for t in existing_tickets if not ticket_is_done(t)]
|
|
2266
|
+
)
|
|
2267
|
+
if pending_count > 0:
|
|
2268
|
+
typer.echo(
|
|
2269
|
+
f"Warning: Latest run {existing_run.id} is COMPLETED with {pending_count} pending ticket(s)."
|
|
2270
|
+
)
|
|
2271
|
+
typer.echo(
|
|
2272
|
+
"Use --force-new to start a fresh run (dispatch history will be reset)."
|
|
2273
|
+
)
|
|
2274
|
+
_raise_exit("Add --force-new to create a new run.")
|
|
2275
|
+
finally:
|
|
2276
|
+
store.close()
|
|
2277
|
+
|
|
2278
|
+
existing_tickets = list_ticket_paths(ticket_dir)
|
|
2279
|
+
seeded = False
|
|
2280
|
+
if not existing_tickets and not ticket_path.exists():
|
|
2281
|
+
template = """---
|
|
2282
|
+
agent: codex
|
|
2283
|
+
done: false
|
|
2284
|
+
title: Bootstrap ticket plan
|
|
2285
|
+
goal: Capture scope and seed follow-up tickets
|
|
2286
|
+
---
|
|
2287
|
+
|
|
2288
|
+
You are the first ticket in a new ticket_flow run.
|
|
2289
|
+
|
|
2290
|
+
- Read `.codex-autorunner/ISSUE.md`. If it is missing:
|
|
2291
|
+
- If GitHub is available, ask the user for the issue/PR URL or number and create `.codex-autorunner/ISSUE.md` from it.
|
|
2292
|
+
- If GitHub is not available, write `DISPATCH.md` with `mode: pause` asking the user to describe the work (or share a doc). After the reply, create `.codex-autorunner/ISSUE.md` with their input.
|
|
2293
|
+
- If helpful, create or update workspace docs under `.codex-autorunner/workspace/`:
|
|
2294
|
+
- `active_context.md` for current context and links
|
|
2295
|
+
- `decisions.md` for decisions/rationale
|
|
2296
|
+
- `spec.md` for requirements and constraints
|
|
2297
|
+
- Break the work into additional `TICKET-00X.md` files with clear owners/goals; keep this ticket open until they exist.
|
|
2298
|
+
- Place any supporting artifacts in `.codex-autorunner/runs/<run_id>/dispatch/` if needed.
|
|
2299
|
+
- Write `DISPATCH.md` to dispatch a message to the user:
|
|
2300
|
+
- Use `mode: pause` (handoff) to wait for user response. This pauses execution.
|
|
2301
|
+
- Use `mode: notify` (informational) to message the user but keep running.
|
|
2302
|
+
"""
|
|
2303
|
+
ticket_path.write_text(template, encoding="utf-8")
|
|
2304
|
+
seeded = True
|
|
2305
|
+
|
|
2306
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2307
|
+
try:
|
|
2308
|
+
run_id = str(uuid.uuid4())
|
|
2309
|
+
record = asyncio.run(
|
|
2310
|
+
controller.start_flow(
|
|
2311
|
+
input_data={},
|
|
2312
|
+
run_id=run_id,
|
|
2313
|
+
metadata={"seeded_ticket": seeded},
|
|
1204
2314
|
)
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
2315
|
+
)
|
|
2316
|
+
_start_ticket_flow_worker(engine.repo_root, record.id, is_terminal=False)
|
|
2317
|
+
finally:
|
|
2318
|
+
controller.shutdown()
|
|
2319
|
+
asyncio.run(agent_pool.close())
|
|
2320
|
+
|
|
2321
|
+
typer.echo(f"Started ticket_flow run: {run_id}")
|
|
2322
|
+
typer.echo(
|
|
2323
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {run_id}"
|
|
2324
|
+
)
|
|
2325
|
+
|
|
2326
|
+
|
|
2327
|
+
@ticket_flow_app.command("start")
|
|
2328
|
+
def ticket_flow_start(
|
|
2329
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2330
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2331
|
+
force_new: bool = typer.Option(
|
|
2332
|
+
False, "--force-new", help="Always create a new run"
|
|
2333
|
+
),
|
|
2334
|
+
):
|
|
2335
|
+
"""Start or resume the latest ticket_flow run.
|
|
2336
|
+
|
|
2337
|
+
If latest run is COMPLETED and new tickets are added, a new run is created
|
|
2338
|
+
(use --force-new to force a new run regardless of state)."""
|
|
2339
|
+
engine = _require_repo_config(repo, hub)
|
|
2340
|
+
_guard_unregistered_hub_repo(engine.repo_root, hub)
|
|
2341
|
+
_, _, ticket_dir = _ticket_flow_paths(engine)
|
|
2342
|
+
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
2343
|
+
|
|
2344
|
+
store = _open_flow_store(engine)
|
|
2345
|
+
try:
|
|
2346
|
+
if not force_new:
|
|
2347
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2348
|
+
existing_run, reason = _resumable_run(records)
|
|
2349
|
+
if existing_run and reason == "active":
|
|
2350
|
+
# Validate tickets before reusing active run
|
|
2351
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2352
|
+
if lint_errors:
|
|
2353
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2354
|
+
for err in lint_errors:
|
|
2355
|
+
typer.echo(f" - {err}", err=True)
|
|
2356
|
+
typer.echo("", err=True)
|
|
2357
|
+
typer.echo(
|
|
2358
|
+
"Fix the above errors before starting the ticket flow.",
|
|
2359
|
+
err=True,
|
|
2360
|
+
)
|
|
2361
|
+
_raise_exit("")
|
|
2362
|
+
_start_ticket_flow_worker(
|
|
2363
|
+
engine.repo_root, existing_run.id, is_terminal=False
|
|
2364
|
+
)
|
|
2365
|
+
typer.echo(f"Reused active run: {existing_run.id}")
|
|
1208
2366
|
typer.echo(
|
|
1209
|
-
f"
|
|
2367
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {existing_run.id}"
|
|
1210
2368
|
)
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
2369
|
+
return
|
|
2370
|
+
elif existing_run and reason == "completed_pending":
|
|
2371
|
+
existing_tickets = list_ticket_paths(ticket_dir)
|
|
2372
|
+
pending_count = len(
|
|
2373
|
+
[t for t in existing_tickets if not ticket_is_done(t)]
|
|
2374
|
+
)
|
|
2375
|
+
if pending_count > 0:
|
|
2376
|
+
typer.echo(
|
|
2377
|
+
f"Warning: Latest run {existing_run.id} is COMPLETED with {pending_count} pending ticket(s)."
|
|
2378
|
+
)
|
|
2379
|
+
typer.echo(
|
|
2380
|
+
"Use --force-new to start a fresh run (dispatch history will be reset)."
|
|
2381
|
+
)
|
|
2382
|
+
_raise_exit("Add --force-new to create a new run.")
|
|
2383
|
+
|
|
2384
|
+
finally:
|
|
2385
|
+
store.close()
|
|
2386
|
+
|
|
2387
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2388
|
+
if lint_errors:
|
|
2389
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2390
|
+
for err in lint_errors:
|
|
2391
|
+
typer.echo(f" - {err}", err=True)
|
|
2392
|
+
typer.echo("", err=True)
|
|
2393
|
+
typer.echo("Fix the above errors before starting the ticket flow.", err=True)
|
|
2394
|
+
_raise_exit("")
|
|
2395
|
+
if not list_ticket_paths(ticket_dir):
|
|
2396
|
+
_raise_exit(
|
|
2397
|
+
"No tickets found under .codex-autorunner/tickets. Use bootstrap first."
|
|
2398
|
+
)
|
|
2399
|
+
|
|
2400
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2401
|
+
try:
|
|
2402
|
+
run_id = str(uuid.uuid4())
|
|
2403
|
+
record = asyncio.run(controller.start_flow(input_data={}, run_id=run_id))
|
|
2404
|
+
_start_ticket_flow_worker(engine.repo_root, record.id, is_terminal=False)
|
|
2405
|
+
finally:
|
|
2406
|
+
controller.shutdown()
|
|
2407
|
+
asyncio.run(agent_pool.close())
|
|
2408
|
+
|
|
2409
|
+
typer.echo(f"Started ticket_flow run: {run_id}")
|
|
2410
|
+
typer.echo(
|
|
2411
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {run_id}"
|
|
2412
|
+
)
|
|
2413
|
+
|
|
2414
|
+
|
|
2415
|
+
@ticket_flow_app.command("status")
|
|
2416
|
+
def ticket_flow_status(
|
|
2417
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2418
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2419
|
+
run_id: Optional[str] = typer.Option(None, "--run-id", help="Flow run ID"),
|
|
2420
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
2421
|
+
):
|
|
2422
|
+
"""Show status for a ticket_flow run."""
|
|
2423
|
+
engine = _require_repo_config(repo, hub)
|
|
2424
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2425
|
+
|
|
2426
|
+
store = _open_flow_store(engine)
|
|
2427
|
+
try:
|
|
2428
|
+
record = None
|
|
2429
|
+
if normalized_run_id:
|
|
2430
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2431
|
+
else:
|
|
2432
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2433
|
+
record = records[0] if records else None
|
|
2434
|
+
if not record:
|
|
2435
|
+
_raise_exit("No ticket_flow runs found.")
|
|
2436
|
+
payload = _ticket_flow_status_payload(engine, record, store)
|
|
2437
|
+
finally:
|
|
2438
|
+
store.close()
|
|
2439
|
+
|
|
2440
|
+
if output_json:
|
|
2441
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
2442
|
+
return
|
|
2443
|
+
_print_ticket_flow_status(payload)
|
|
2444
|
+
|
|
2445
|
+
|
|
2446
|
+
@ticket_flow_app.command("resume")
|
|
2447
|
+
def ticket_flow_resume(
|
|
2448
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2449
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2450
|
+
run_id: Optional[str] = typer.Option(None, "--run-id", help="Flow run ID"),
|
|
2451
|
+
):
|
|
2452
|
+
"""Resume a paused ticket_flow run."""
|
|
2453
|
+
engine = _require_repo_config(repo, hub)
|
|
2454
|
+
_guard_unregistered_hub_repo(engine.repo_root, hub)
|
|
2455
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2456
|
+
|
|
2457
|
+
store = _open_flow_store(engine)
|
|
2458
|
+
try:
|
|
2459
|
+
record = None
|
|
2460
|
+
if normalized_run_id:
|
|
2461
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2462
|
+
else:
|
|
2463
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2464
|
+
record = records[0] if records else None
|
|
2465
|
+
if not record:
|
|
2466
|
+
_raise_exit("No ticket_flow runs found.")
|
|
2467
|
+
normalized_run_id = record.id
|
|
2468
|
+
finally:
|
|
2469
|
+
store.close()
|
|
2470
|
+
|
|
2471
|
+
_, _, ticket_dir = _ticket_flow_paths(engine)
|
|
2472
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2473
|
+
if lint_errors:
|
|
2474
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2475
|
+
for err in lint_errors:
|
|
2476
|
+
typer.echo(f" - {err}", err=True)
|
|
2477
|
+
typer.echo("", err=True)
|
|
2478
|
+
typer.echo("Fix the above errors before resuming the ticket flow.", err=True)
|
|
2479
|
+
_raise_exit("")
|
|
2480
|
+
|
|
2481
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2482
|
+
try:
|
|
2483
|
+
try:
|
|
2484
|
+
updated = asyncio.run(controller.resume_flow(normalized_run_id))
|
|
2485
|
+
except ValueError as exc:
|
|
2486
|
+
_raise_exit(str(exc), cause=exc)
|
|
2487
|
+
_start_ticket_flow_worker(engine.repo_root, normalized_run_id)
|
|
2488
|
+
finally:
|
|
2489
|
+
controller.shutdown()
|
|
2490
|
+
asyncio.run(agent_pool.close())
|
|
2491
|
+
|
|
2492
|
+
typer.echo(f"Resumed ticket_flow run: {updated.id}")
|
|
2493
|
+
typer.echo(
|
|
2494
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {updated.id}"
|
|
2495
|
+
)
|
|
2496
|
+
|
|
2497
|
+
|
|
2498
|
+
@ticket_flow_app.command("stop")
|
|
2499
|
+
def ticket_flow_stop(
|
|
2500
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2501
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2502
|
+
run_id: Optional[str] = typer.Option(None, "--run-id", help="Flow run ID"),
|
|
2503
|
+
):
|
|
2504
|
+
"""Stop a ticket_flow run."""
|
|
2505
|
+
engine = _require_repo_config(repo, hub)
|
|
2506
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2507
|
+
|
|
2508
|
+
store = _open_flow_store(engine)
|
|
2509
|
+
try:
|
|
2510
|
+
record = None
|
|
2511
|
+
if normalized_run_id:
|
|
2512
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2513
|
+
else:
|
|
2514
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2515
|
+
record = records[0] if records else None
|
|
2516
|
+
if not record:
|
|
2517
|
+
_raise_exit("No ticket_flow runs found.")
|
|
2518
|
+
normalized_run_id = record.id
|
|
2519
|
+
finally:
|
|
2520
|
+
store.close()
|
|
2521
|
+
|
|
2522
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2523
|
+
try:
|
|
2524
|
+
_stop_ticket_flow_worker(engine.repo_root, normalized_run_id)
|
|
2525
|
+
updated = asyncio.run(controller.stop_flow(normalized_run_id))
|
|
2526
|
+
finally:
|
|
2527
|
+
controller.shutdown()
|
|
2528
|
+
asyncio.run(agent_pool.close())
|
|
2529
|
+
|
|
2530
|
+
typer.echo(f"Stop requested for run: {updated.id} (status={updated.status.value})")
|
|
1221
2531
|
|
|
1222
2532
|
|
|
1223
2533
|
if __name__ == "__main__":
|