codex-autorunner 1.0.0__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/__init__.py +12 -1
- codex_autorunner/agents/codex/harness.py +1 -1
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/constants.py +3 -0
- codex_autorunner/agents/opencode/harness.py +6 -1
- codex_autorunner/agents/opencode/runtime.py +59 -18
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +36 -7
- codex_autorunner/bootstrap.py +226 -4
- codex_autorunner/cli.py +5 -1174
- codex_autorunner/codex_cli.py +20 -84
- codex_autorunner/core/__init__.py +20 -0
- codex_autorunner/core/about_car.py +119 -1
- codex_autorunner/core/app_server_ids.py +59 -0
- codex_autorunner/core/app_server_threads.py +17 -2
- codex_autorunner/core/app_server_utils.py +165 -0
- codex_autorunner/core/archive.py +349 -0
- codex_autorunner/core/codex_runner.py +6 -2
- codex_autorunner/core/config.py +433 -4
- codex_autorunner/core/context_awareness.py +38 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/drafts.py +58 -4
- codex_autorunner/core/exceptions.py +4 -0
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +96 -2
- codex_autorunner/core/flows/models.py +13 -0
- codex_autorunner/core/flows/reasons.py +52 -0
- codex_autorunner/core/flows/reconciler.py +134 -0
- codex_autorunner/core/flows/runtime.py +57 -4
- codex_autorunner/core/flows/store.py +142 -7
- codex_autorunner/core/flows/transition.py +27 -15
- codex_autorunner/core/flows/ux_helpers.py +272 -0
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/git_utils.py +62 -0
- codex_autorunner/core/hub.py +291 -20
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/notifications.py +14 -2
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +496 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/__init__.py +28 -0
- codex_autorunner/{integrations/agents → core/ports}/agent_backend.py +13 -8
- codex_autorunner/core/ports/backend_orchestrator.py +41 -0
- codex_autorunner/{integrations/agents → core/ports}/run_event.py +23 -6
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +62 -0
- codex_autorunner/core/supervisor_protocol.py +15 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/text_delta_coalescer.py +54 -0
- codex_autorunner/core/ticket_linter_cli.py +218 -0
- codex_autorunner/core/ticket_manager_cli.py +494 -0
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/update.py +4 -5
- codex_autorunner/core/update_paths.py +28 -0
- codex_autorunner/core/usage.py +164 -12
- codex_autorunner/core/utils.py +125 -15
- codex_autorunner/flows/review/__init__.py +17 -0
- codex_autorunner/{core/review.py → flows/review/service.py} +37 -34
- codex_autorunner/flows/ticket_flow/definition.py +52 -3
- codex_autorunner/integrations/agents/__init__.py +11 -19
- codex_autorunner/integrations/agents/backend_orchestrator.py +302 -0
- codex_autorunner/integrations/agents/codex_adapter.py +90 -0
- codex_autorunner/integrations/agents/codex_backend.py +177 -25
- codex_autorunner/integrations/agents/opencode_adapter.py +108 -0
- codex_autorunner/integrations/agents/opencode_backend.py +305 -32
- codex_autorunner/integrations/agents/runner.py +86 -0
- codex_autorunner/integrations/agents/wiring.py +279 -0
- codex_autorunner/integrations/app_server/client.py +7 -60
- codex_autorunner/integrations/app_server/env.py +2 -107
- codex_autorunner/{core/app_server_events.py → integrations/app_server/event_buffer.py} +15 -8
- codex_autorunner/integrations/telegram/adapter.py +65 -0
- codex_autorunner/integrations/telegram/config.py +46 -0
- codex_autorunner/integrations/telegram/constants.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/callbacks.py +7 -0
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +1496 -71
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +206 -48
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +20 -3
- codex_autorunner/integrations/telegram/handlers/messages.py +27 -1
- codex_autorunner/integrations/telegram/handlers/selections.py +61 -1
- codex_autorunner/integrations/telegram/helpers.py +22 -1
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +45 -10
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +338 -43
- codex_autorunner/integrations/telegram/transport.py +13 -4
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/routes/__init__.py +37 -76
- codex_autorunner/routes/agents.py +2 -137
- codex_autorunner/routes/analytics.py +2 -238
- codex_autorunner/routes/app_server.py +2 -131
- codex_autorunner/routes/base.py +2 -596
- codex_autorunner/routes/file_chat.py +4 -833
- codex_autorunner/routes/flows.py +4 -977
- codex_autorunner/routes/messages.py +4 -456
- codex_autorunner/routes/repos.py +2 -196
- codex_autorunner/routes/review.py +2 -147
- codex_autorunner/routes/sessions.py +2 -175
- codex_autorunner/routes/settings.py +2 -168
- codex_autorunner/routes/shared.py +2 -275
- codex_autorunner/routes/system.py +4 -193
- codex_autorunner/routes/usage.py +2 -86
- codex_autorunner/routes/voice.py +2 -119
- codex_autorunner/routes/workspace.py +2 -270
- codex_autorunner/server.py +4 -4
- codex_autorunner/static/agentControls.js +61 -16
- codex_autorunner/static/app.js +126 -14
- codex_autorunner/static/archive.js +826 -0
- codex_autorunner/static/archiveApi.js +37 -0
- codex_autorunner/static/autoRefresh.js +7 -7
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/dashboard.js +224 -171
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +114 -131
- codex_autorunner/static/index.html +375 -49
- codex_autorunner/static/messages.js +568 -87
- codex_autorunner/static/notifications.js +255 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/preserve.js +17 -0
- codex_autorunner/static/settings.js +128 -6
- codex_autorunner/static/smartRefresh.js +52 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9798 -6143
- codex_autorunner/static/tabs.js +152 -11
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminal.js +18 -0
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +137 -15
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +821 -98
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +39 -0
- codex_autorunner/static/workspace.js +389 -82
- codex_autorunner/static/workspaceFileBrowser.js +15 -13
- codex_autorunner/surfaces/__init__.py +5 -0
- codex_autorunner/surfaces/cli/__init__.py +6 -0
- codex_autorunner/surfaces/cli/cli.py +2534 -0
- codex_autorunner/surfaces/cli/codex_cli.py +20 -0
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/telegram/__init__.py +3 -0
- codex_autorunner/surfaces/web/__init__.py +1 -0
- codex_autorunner/surfaces/web/app.py +2223 -0
- codex_autorunner/surfaces/web/hub_jobs.py +192 -0
- codex_autorunner/surfaces/web/middleware.py +587 -0
- codex_autorunner/surfaces/web/pty_session.py +370 -0
- codex_autorunner/surfaces/web/review.py +6 -0
- codex_autorunner/surfaces/web/routes/__init__.py +82 -0
- codex_autorunner/surfaces/web/routes/agents.py +138 -0
- codex_autorunner/surfaces/web/routes/analytics.py +284 -0
- codex_autorunner/surfaces/web/routes/app_server.py +132 -0
- codex_autorunner/surfaces/web/routes/archive.py +357 -0
- codex_autorunner/surfaces/web/routes/base.py +615 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +1117 -0
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +1354 -0
- codex_autorunner/surfaces/web/routes/messages.py +490 -0
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +197 -0
- codex_autorunner/surfaces/web/routes/review.py +148 -0
- codex_autorunner/surfaces/web/routes/sessions.py +176 -0
- codex_autorunner/surfaces/web/routes/settings.py +169 -0
- codex_autorunner/surfaces/web/routes/shared.py +277 -0
- codex_autorunner/surfaces/web/routes/system.py +196 -0
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/routes/usage.py +89 -0
- codex_autorunner/surfaces/web/routes/voice.py +120 -0
- codex_autorunner/surfaces/web/routes/workspace.py +271 -0
- codex_autorunner/surfaces/web/runner_manager.py +25 -0
- codex_autorunner/surfaces/web/schemas.py +469 -0
- codex_autorunner/surfaces/web/static_assets.py +490 -0
- codex_autorunner/surfaces/web/static_refresh.py +86 -0
- codex_autorunner/surfaces/web/terminal_sessions.py +78 -0
- codex_autorunner/tickets/__init__.py +8 -1
- codex_autorunner/tickets/agent_pool.py +53 -4
- codex_autorunner/tickets/files.py +37 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +6 -1
- codex_autorunner/tickets/outbox.py +50 -2
- codex_autorunner/tickets/runner.py +396 -57
- codex_autorunner/web/__init__.py +5 -1
- codex_autorunner/web/app.py +2 -1949
- codex_autorunner/web/hub_jobs.py +2 -191
- codex_autorunner/web/middleware.py +2 -586
- codex_autorunner/web/pty_session.py +2 -369
- codex_autorunner/web/runner_manager.py +2 -24
- codex_autorunner/web/schemas.py +2 -376
- codex_autorunner/web/static_assets.py +4 -441
- codex_autorunner/web/static_refresh.py +2 -85
- codex_autorunner/web/terminal_sessions.py +2 -77
- codex_autorunner/workspace/paths.py +49 -33
- codex_autorunner-1.2.0.dist-info/METADATA +150 -0
- codex_autorunner-1.2.0.dist-info/RECORD +339 -0
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -2653
- codex_autorunner/core/static_assets.py +0 -55
- codex_autorunner-1.0.0.dist-info/METADATA +0 -246
- codex_autorunner-1.0.0.dist-info/RECORD +0 -251
- /codex_autorunner/{routes → surfaces/web/routes}/terminal_images.py +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,2534 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import ipaddress
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import shlex
|
|
7
|
+
import subprocess
|
|
8
|
+
import uuid
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import NoReturn, Optional
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
import typer
|
|
14
|
+
import uvicorn
|
|
15
|
+
import yaml
|
|
16
|
+
|
|
17
|
+
from ...agents.registry import validate_agent_id
|
|
18
|
+
from ...bootstrap import seed_hub_files, seed_repo_files
|
|
19
|
+
from ...core.config import (
|
|
20
|
+
CONFIG_FILENAME,
|
|
21
|
+
ConfigError,
|
|
22
|
+
HubConfig,
|
|
23
|
+
RepoConfig,
|
|
24
|
+
_normalize_base_path,
|
|
25
|
+
collect_env_overrides,
|
|
26
|
+
derive_repo_config,
|
|
27
|
+
find_nearest_hub_config_path,
|
|
28
|
+
load_hub_config,
|
|
29
|
+
load_repo_config,
|
|
30
|
+
)
|
|
31
|
+
from ...core.flows import FlowController, FlowStore
|
|
32
|
+
from ...core.flows.models import FlowRunRecord, FlowRunStatus
|
|
33
|
+
from ...core.flows.ux_helpers import build_flow_status_snapshot, ensure_worker
|
|
34
|
+
from ...core.flows.worker_process import (
|
|
35
|
+
check_worker_health,
|
|
36
|
+
clear_worker_metadata,
|
|
37
|
+
register_worker_metadata,
|
|
38
|
+
)
|
|
39
|
+
from ...core.git_utils import GitError, run_git
|
|
40
|
+
from ...core.hub import HubSupervisor
|
|
41
|
+
from ...core.locks import file_lock
|
|
42
|
+
from ...core.logging_utils import log_event, setup_rotating_logger
|
|
43
|
+
from ...core.optional_dependencies import require_optional_dependencies
|
|
44
|
+
from ...core.runtime import (
|
|
45
|
+
DoctorReport,
|
|
46
|
+
RuntimeContext,
|
|
47
|
+
clear_stale_lock,
|
|
48
|
+
doctor,
|
|
49
|
+
hub_worktree_doctor_checks,
|
|
50
|
+
pma_doctor_checks,
|
|
51
|
+
)
|
|
52
|
+
from ...core.state import RunnerState, load_state, now_iso, save_state, state_lock
|
|
53
|
+
from ...core.templates import (
|
|
54
|
+
NetworkUnavailableError,
|
|
55
|
+
RefNotFoundError,
|
|
56
|
+
RepoNotConfiguredError,
|
|
57
|
+
TemplateNotFoundError,
|
|
58
|
+
fetch_template,
|
|
59
|
+
get_scan_record,
|
|
60
|
+
inject_provenance,
|
|
61
|
+
parse_template_ref,
|
|
62
|
+
scan_lock,
|
|
63
|
+
)
|
|
64
|
+
from ...core.usage import (
|
|
65
|
+
UsageError,
|
|
66
|
+
default_codex_home,
|
|
67
|
+
parse_iso_datetime,
|
|
68
|
+
summarize_hub_usage,
|
|
69
|
+
summarize_repo_usage,
|
|
70
|
+
)
|
|
71
|
+
from ...core.utils import RepoNotFoundError, default_editor, find_repo_root, is_within
|
|
72
|
+
from ...flows.ticket_flow import build_ticket_flow_definition
|
|
73
|
+
from ...integrations.agents import build_backend_orchestrator
|
|
74
|
+
from ...integrations.agents.wiring import (
|
|
75
|
+
build_agent_backend_factory,
|
|
76
|
+
build_app_server_supervisor_factory,
|
|
77
|
+
)
|
|
78
|
+
from ...integrations.telegram.adapter import TelegramAPIError, TelegramBotClient
|
|
79
|
+
from ...integrations.telegram.doctor import telegram_doctor_checks
|
|
80
|
+
from ...integrations.telegram.service import (
|
|
81
|
+
TelegramBotConfig,
|
|
82
|
+
TelegramBotConfigError,
|
|
83
|
+
TelegramBotLockError,
|
|
84
|
+
TelegramBotService,
|
|
85
|
+
)
|
|
86
|
+
from ...integrations.telegram.state import TelegramStateStore
|
|
87
|
+
from ...integrations.templates.scan_agent import (
|
|
88
|
+
TemplateScanError,
|
|
89
|
+
TemplateScanRejectedError,
|
|
90
|
+
format_template_scan_rejection,
|
|
91
|
+
run_template_scan,
|
|
92
|
+
)
|
|
93
|
+
from ...manifest import load_manifest
|
|
94
|
+
from ...tickets import AgentPool
|
|
95
|
+
from ...tickets.files import (
|
|
96
|
+
list_ticket_paths,
|
|
97
|
+
read_ticket,
|
|
98
|
+
safe_relpath,
|
|
99
|
+
ticket_is_done,
|
|
100
|
+
)
|
|
101
|
+
from ...tickets.frontmatter import split_markdown_frontmatter
|
|
102
|
+
from ...tickets.lint import (
|
|
103
|
+
lint_ticket_directory,
|
|
104
|
+
parse_ticket_index,
|
|
105
|
+
)
|
|
106
|
+
from ...voice import VoiceConfig
|
|
107
|
+
from ..web.app import create_hub_app
|
|
108
|
+
from .pma_cli import pma_app as pma_cli_app
|
|
109
|
+
|
|
110
|
+
logger = logging.getLogger("codex_autorunner.cli")
|
|
111
|
+
|
|
112
|
+
app = typer.Typer(add_completion=False)
|
|
113
|
+
hub_app = typer.Typer(add_completion=False)
|
|
114
|
+
telegram_app = typer.Typer(add_completion=False)
|
|
115
|
+
templates_app = typer.Typer(add_completion=False)
|
|
116
|
+
repos_app = typer.Typer(add_completion=False)
|
|
117
|
+
worktree_app = typer.Typer(add_completion=False)
|
|
118
|
+
flow_app = typer.Typer(add_completion=False)
|
|
119
|
+
ticket_flow_app = typer.Typer(add_completion=False)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def main() -> None:
|
|
123
|
+
"""Entrypoint for CLI execution."""
|
|
124
|
+
app()
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _raise_exit(message: str, *, cause: Optional[BaseException] = None) -> NoReturn:
|
|
128
|
+
typer.echo(message, err=True)
|
|
129
|
+
if cause is not None:
|
|
130
|
+
raise typer.Exit(code=1) from cause
|
|
131
|
+
raise typer.Exit(code=1)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _require_repo_config(repo: Optional[Path], hub: Optional[Path]) -> RuntimeContext:
|
|
135
|
+
try:
|
|
136
|
+
repo_root = find_repo_root(repo or Path.cwd())
|
|
137
|
+
except RepoNotFoundError as exc:
|
|
138
|
+
_raise_exit("No .git directory found for repo commands.", cause=exc)
|
|
139
|
+
try:
|
|
140
|
+
config = load_repo_config(repo_root, hub_path=hub)
|
|
141
|
+
backend_orchestrator = build_backend_orchestrator(repo_root, config)
|
|
142
|
+
return RuntimeContext(
|
|
143
|
+
repo_root,
|
|
144
|
+
config=config,
|
|
145
|
+
backend_orchestrator=backend_orchestrator,
|
|
146
|
+
)
|
|
147
|
+
except ConfigError as exc:
|
|
148
|
+
_raise_exit(str(exc), cause=exc)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def _require_hub_config(path: Optional[Path]) -> HubConfig:
|
|
152
|
+
try:
|
|
153
|
+
return load_hub_config(path or Path.cwd())
|
|
154
|
+
except ConfigError as exc:
|
|
155
|
+
_raise_exit(str(exc), cause=exc)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def _load_hub_config_yaml(path: Path) -> dict:
|
|
159
|
+
if not path.exists():
|
|
160
|
+
_raise_exit(f"Hub config file not found: {path}")
|
|
161
|
+
try:
|
|
162
|
+
data = yaml.safe_load(path.read_text(encoding="utf-8"))
|
|
163
|
+
if not isinstance(data, dict):
|
|
164
|
+
_raise_exit(f"Hub config must be a YAML mapping: {path}")
|
|
165
|
+
return data
|
|
166
|
+
except yaml.YAMLError as exc:
|
|
167
|
+
_raise_exit(f"Invalid YAML in hub config: {exc}", cause=exc)
|
|
168
|
+
except OSError as exc:
|
|
169
|
+
_raise_exit(f"Failed to read hub config: {exc}", cause=exc)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _write_hub_config_yaml(path: Path, data: dict) -> None:
|
|
173
|
+
lock_path = path.parent / (path.name + ".lock")
|
|
174
|
+
with file_lock(lock_path):
|
|
175
|
+
path.write_text(yaml.safe_dump(data, sort_keys=False), encoding="utf-8")
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _require_templates_enabled(config: RepoConfig) -> None:
|
|
179
|
+
if not config.templates.enabled:
|
|
180
|
+
_raise_exit(
|
|
181
|
+
"Templates are disabled. Set templates.enabled=true in the hub config to enable."
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _find_template_repo(config: RepoConfig, repo_id: str):
|
|
186
|
+
for repo in config.templates.repos:
|
|
187
|
+
if repo.id == repo_id:
|
|
188
|
+
return repo
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def _fetch_template_with_scan(template: str, ctx: RuntimeContext, hub: Optional[Path]):
|
|
193
|
+
try:
|
|
194
|
+
parsed = parse_template_ref(template)
|
|
195
|
+
except ValueError as exc:
|
|
196
|
+
_raise_exit(str(exc), cause=exc)
|
|
197
|
+
|
|
198
|
+
repo_cfg = _find_template_repo(ctx.config, parsed.repo_id)
|
|
199
|
+
if repo_cfg is None:
|
|
200
|
+
_raise_exit(f"Template repo not configured: {parsed.repo_id}")
|
|
201
|
+
|
|
202
|
+
hub_config_path = _resolve_hub_config_path_for_cli(ctx.repo_root, hub)
|
|
203
|
+
if hub_config_path is None:
|
|
204
|
+
try:
|
|
205
|
+
hub_config = load_hub_config(ctx.repo_root)
|
|
206
|
+
hub_root = hub_config.root
|
|
207
|
+
except ConfigError as exc:
|
|
208
|
+
_raise_exit(str(exc), cause=exc)
|
|
209
|
+
else:
|
|
210
|
+
hub_root = hub_config_path.parent.parent.resolve()
|
|
211
|
+
|
|
212
|
+
try:
|
|
213
|
+
fetched = fetch_template(
|
|
214
|
+
repo=repo_cfg, hub_root=hub_root, template_ref=template
|
|
215
|
+
)
|
|
216
|
+
except NetworkUnavailableError as exc:
|
|
217
|
+
_raise_exit(
|
|
218
|
+
f"{str(exc)}\n"
|
|
219
|
+
"Hint: Fetch once while online to seed the cache. "
|
|
220
|
+
"If this template is untrusted, scanning may also require a working agent backend."
|
|
221
|
+
)
|
|
222
|
+
except (
|
|
223
|
+
RepoNotConfiguredError,
|
|
224
|
+
RefNotFoundError,
|
|
225
|
+
TemplateNotFoundError,
|
|
226
|
+
GitError,
|
|
227
|
+
) as exc:
|
|
228
|
+
_raise_exit(str(exc), cause=exc)
|
|
229
|
+
|
|
230
|
+
scan_record = None
|
|
231
|
+
if not fetched.trusted:
|
|
232
|
+
with scan_lock(hub_root, fetched.blob_sha):
|
|
233
|
+
scan_record = get_scan_record(hub_root, fetched.blob_sha)
|
|
234
|
+
if scan_record is None:
|
|
235
|
+
try:
|
|
236
|
+
scan_record = asyncio.run(
|
|
237
|
+
run_template_scan(ctx=ctx, template=fetched)
|
|
238
|
+
)
|
|
239
|
+
except TemplateScanRejectedError as exc:
|
|
240
|
+
_raise_exit(str(exc), cause=exc)
|
|
241
|
+
except TemplateScanError as exc:
|
|
242
|
+
_raise_exit(str(exc), cause=exc)
|
|
243
|
+
elif scan_record.decision != "approve":
|
|
244
|
+
_raise_exit(format_template_scan_rejection(scan_record))
|
|
245
|
+
|
|
246
|
+
return fetched, scan_record, hub_root
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _resolve_ticket_dir(repo_root: Path, ticket_dir: Optional[Path]) -> Path:
|
|
250
|
+
if ticket_dir is None:
|
|
251
|
+
return repo_root / ".codex-autorunner" / "tickets"
|
|
252
|
+
if ticket_dir.is_absolute():
|
|
253
|
+
return ticket_dir
|
|
254
|
+
return repo_root / ticket_dir
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def _collect_ticket_indices(ticket_dir: Path) -> list[int]:
|
|
258
|
+
indices: list[int] = []
|
|
259
|
+
if not ticket_dir.exists() or not ticket_dir.is_dir():
|
|
260
|
+
return indices
|
|
261
|
+
for path in ticket_dir.iterdir():
|
|
262
|
+
if not path.is_file():
|
|
263
|
+
continue
|
|
264
|
+
idx = parse_ticket_index(path.name)
|
|
265
|
+
if idx is None:
|
|
266
|
+
continue
|
|
267
|
+
indices.append(idx)
|
|
268
|
+
return indices
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def _next_available_ticket_index(existing: list[int]) -> int:
|
|
272
|
+
if not existing:
|
|
273
|
+
return 1
|
|
274
|
+
seen = set(existing)
|
|
275
|
+
candidate = 1
|
|
276
|
+
while candidate in seen:
|
|
277
|
+
candidate += 1
|
|
278
|
+
return candidate
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def _ticket_filename(index: int, *, suffix: str, width: int) -> str:
|
|
282
|
+
return f"TICKET-{index:0{width}d}{suffix}.md"
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
def _normalize_ticket_suffix(suffix: Optional[str]) -> str:
|
|
286
|
+
if not suffix:
|
|
287
|
+
return ""
|
|
288
|
+
cleaned = suffix.strip()
|
|
289
|
+
if not cleaned:
|
|
290
|
+
return ""
|
|
291
|
+
if "/" in cleaned or "\\" in cleaned:
|
|
292
|
+
_raise_exit("Ticket suffix may not include path separators.")
|
|
293
|
+
if not cleaned.startswith("-"):
|
|
294
|
+
return f"-{cleaned}"
|
|
295
|
+
return cleaned
|
|
296
|
+
|
|
297
|
+
|
|
298
|
+
def _apply_agent_override(content: str, agent: str) -> str:
|
|
299
|
+
fm_yaml, body = split_markdown_frontmatter(content)
|
|
300
|
+
if fm_yaml is None:
|
|
301
|
+
_raise_exit("Template is missing YAML frontmatter; cannot set agent.")
|
|
302
|
+
try:
|
|
303
|
+
data = yaml.safe_load(fm_yaml)
|
|
304
|
+
except yaml.YAMLError as exc:
|
|
305
|
+
_raise_exit(f"Template frontmatter is invalid YAML: {exc}")
|
|
306
|
+
if not isinstance(data, dict):
|
|
307
|
+
_raise_exit("Template frontmatter must be a YAML mapping to set agent.")
|
|
308
|
+
data["agent"] = agent
|
|
309
|
+
rendered = yaml.safe_dump(data, sort_keys=False).rstrip()
|
|
310
|
+
return f"---\n{rendered}\n---{body}"
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def _build_server_url(config, path: str) -> str:
|
|
314
|
+
base_path = config.server_base_path or ""
|
|
315
|
+
if base_path.endswith("/") and path.startswith("/"):
|
|
316
|
+
base_path = base_path[:-1]
|
|
317
|
+
return f"http://{config.server_host}:{config.server_port}{base_path}{path}"
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def _resolve_hub_config_path_for_cli(
|
|
321
|
+
repo_root: Path, hub: Optional[Path]
|
|
322
|
+
) -> Optional[Path]:
|
|
323
|
+
if hub:
|
|
324
|
+
candidate = hub
|
|
325
|
+
if candidate.is_dir():
|
|
326
|
+
candidate = candidate / CONFIG_FILENAME
|
|
327
|
+
return candidate if candidate.exists() else None
|
|
328
|
+
return find_nearest_hub_config_path(repo_root)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def _guard_unregistered_hub_repo(repo_root: Path, hub: Optional[Path]) -> None:
|
|
332
|
+
hub_config_path = _resolve_hub_config_path_for_cli(repo_root, hub)
|
|
333
|
+
if hub_config_path is None:
|
|
334
|
+
return
|
|
335
|
+
try:
|
|
336
|
+
hub_config = load_hub_config(hub_config_path)
|
|
337
|
+
except ConfigError as exc:
|
|
338
|
+
_raise_exit(str(exc), cause=exc)
|
|
339
|
+
|
|
340
|
+
repo_root = repo_root.resolve()
|
|
341
|
+
under_repos = is_within(hub_config.repos_root, repo_root)
|
|
342
|
+
under_worktrees = is_within(hub_config.worktrees_root, repo_root)
|
|
343
|
+
if not (under_repos or under_worktrees):
|
|
344
|
+
return
|
|
345
|
+
|
|
346
|
+
manifest = load_manifest(hub_config.manifest_path, hub_config.root)
|
|
347
|
+
if manifest.get_by_path(hub_config.root, repo_root) is not None:
|
|
348
|
+
return
|
|
349
|
+
|
|
350
|
+
lines = [
|
|
351
|
+
"Repo not registered in hub manifest. Run car hub scan or create via car hub worktree create.",
|
|
352
|
+
f"Detected hub root: {hub_config.root}",
|
|
353
|
+
f"Repo path: {repo_root}",
|
|
354
|
+
"Runs won't show up in the hub UI until registered.",
|
|
355
|
+
]
|
|
356
|
+
if under_worktrees:
|
|
357
|
+
lines.append(
|
|
358
|
+
"Hint: Worktree names should look like <base_repo_id>--<branch> under "
|
|
359
|
+
f"{hub_config.worktrees_root}"
|
|
360
|
+
)
|
|
361
|
+
_raise_exit("\n".join(lines))
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def _resolve_repo_api_path(repo_root: Path, hub: Optional[Path], path: str) -> str:
|
|
365
|
+
if not path.startswith("/"):
|
|
366
|
+
path = f"/{path}"
|
|
367
|
+
hub_config_path = _resolve_hub_config_path_for_cli(repo_root, hub)
|
|
368
|
+
if hub_config_path is None:
|
|
369
|
+
return path
|
|
370
|
+
hub_root = hub_config_path.parent.parent.resolve()
|
|
371
|
+
manifest_rel: Optional[str] = None
|
|
372
|
+
try:
|
|
373
|
+
raw = yaml.safe_load(hub_config_path.read_text(encoding="utf-8")) or {}
|
|
374
|
+
if isinstance(raw, dict):
|
|
375
|
+
hub_cfg = raw.get("hub")
|
|
376
|
+
if isinstance(hub_cfg, dict):
|
|
377
|
+
manifest_value = hub_cfg.get("manifest")
|
|
378
|
+
if isinstance(manifest_value, str) and manifest_value.strip():
|
|
379
|
+
manifest_rel = manifest_value.strip()
|
|
380
|
+
except (OSError, yaml.YAMLError, KeyError, ValueError) as exc:
|
|
381
|
+
logger.debug("Failed to read hub config for manifest: %s", exc)
|
|
382
|
+
manifest_rel = None
|
|
383
|
+
manifest_path = hub_root / (manifest_rel or ".codex-autorunner/manifest.yml")
|
|
384
|
+
if not manifest_path.exists():
|
|
385
|
+
return path
|
|
386
|
+
try:
|
|
387
|
+
manifest = load_manifest(manifest_path, hub_root)
|
|
388
|
+
except (OSError, ValueError, KeyError) as exc:
|
|
389
|
+
logger.debug("Failed to load manifest: %s", exc)
|
|
390
|
+
return path
|
|
391
|
+
repo_root = repo_root.resolve()
|
|
392
|
+
for entry in manifest.repos:
|
|
393
|
+
candidate = (hub_root / entry.path).resolve()
|
|
394
|
+
if candidate == repo_root:
|
|
395
|
+
return f"/repos/{entry.id}{path}"
|
|
396
|
+
return path
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
def _resolve_auth_token(env_name: str) -> Optional[str]:
|
|
400
|
+
if not env_name:
|
|
401
|
+
return None
|
|
402
|
+
value = os.environ.get(env_name)
|
|
403
|
+
if value is None:
|
|
404
|
+
return None
|
|
405
|
+
value = value.strip()
|
|
406
|
+
return value or None
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def _require_auth_token(env_name: Optional[str]) -> Optional[str]:
|
|
410
|
+
if not env_name:
|
|
411
|
+
return None
|
|
412
|
+
token = _resolve_auth_token(env_name)
|
|
413
|
+
if not token:
|
|
414
|
+
_raise_exit(
|
|
415
|
+
f"server.auth_token_env is set to {env_name}, but the environment variable is missing."
|
|
416
|
+
)
|
|
417
|
+
return token
|
|
418
|
+
|
|
419
|
+
|
|
420
|
+
def _is_loopback_host(host: str) -> bool:
|
|
421
|
+
if host == "localhost":
|
|
422
|
+
return True
|
|
423
|
+
try:
|
|
424
|
+
return ipaddress.ip_address(host).is_loopback
|
|
425
|
+
except ValueError:
|
|
426
|
+
return False
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
def _enforce_bind_auth(host: str, token_env: str) -> None:
|
|
430
|
+
if _is_loopback_host(host):
|
|
431
|
+
return
|
|
432
|
+
if _resolve_auth_token(token_env):
|
|
433
|
+
return
|
|
434
|
+
_raise_exit(
|
|
435
|
+
"Refusing to bind to a non-loopback host without server.auth_token_env set."
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
def _request_json(
|
|
440
|
+
method: str,
|
|
441
|
+
url: str,
|
|
442
|
+
payload: Optional[dict] = None,
|
|
443
|
+
token_env: Optional[str] = None,
|
|
444
|
+
) -> dict:
|
|
445
|
+
headers = None
|
|
446
|
+
if token_env:
|
|
447
|
+
token = _require_auth_token(token_env)
|
|
448
|
+
headers = {"Authorization": f"Bearer {token}"}
|
|
449
|
+
response = httpx.request(method, url, json=payload, timeout=2.0, headers=headers)
|
|
450
|
+
response.raise_for_status()
|
|
451
|
+
data = response.json()
|
|
452
|
+
return data if isinstance(data, dict) else {}
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def _require_optional_feature(
|
|
456
|
+
*, feature: str, deps: list[tuple[str, str]], extra: Optional[str] = None
|
|
457
|
+
) -> None:
|
|
458
|
+
try:
|
|
459
|
+
require_optional_dependencies(feature=feature, deps=deps, extra=extra)
|
|
460
|
+
except ConfigError as exc:
|
|
461
|
+
_raise_exit(str(exc), cause=exc)
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
app.add_typer(hub_app, name="hub")
|
|
465
|
+
hub_app.add_typer(worktree_app, name="worktree")
|
|
466
|
+
app.add_typer(telegram_app, name="telegram")
|
|
467
|
+
app.add_typer(templates_app, name="templates")
|
|
468
|
+
templates_app.add_typer(repos_app, name="repos")
|
|
469
|
+
app.add_typer(flow_app, name="flow")
|
|
470
|
+
app.add_typer(ticket_flow_app, name="ticket-flow")
|
|
471
|
+
flow_app.add_typer(ticket_flow_app, name="ticket_flow")
|
|
472
|
+
app.add_typer(pma_cli_app, name="pma")
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def _has_nested_git(path: Path) -> bool:
|
|
476
|
+
try:
|
|
477
|
+
for child in path.iterdir():
|
|
478
|
+
if not child.is_dir() or child.is_symlink():
|
|
479
|
+
continue
|
|
480
|
+
if (child / ".git").exists():
|
|
481
|
+
return True
|
|
482
|
+
if _has_nested_git(child):
|
|
483
|
+
return True
|
|
484
|
+
except OSError:
|
|
485
|
+
return False
|
|
486
|
+
return False
|
|
487
|
+
|
|
488
|
+
|
|
489
|
+
@app.command()
|
|
490
|
+
def init(
|
|
491
|
+
path: Optional[Path] = typer.Argument(None, help="Repo path; defaults to CWD"),
|
|
492
|
+
force: bool = typer.Option(False, "--force", help="Overwrite existing files"),
|
|
493
|
+
git_init: bool = typer.Option(False, "--git-init", help="Run git init if missing"),
|
|
494
|
+
mode: str = typer.Option(
|
|
495
|
+
"auto",
|
|
496
|
+
"--mode",
|
|
497
|
+
help="Initialization mode: repo, hub, or auto (default)",
|
|
498
|
+
),
|
|
499
|
+
):
|
|
500
|
+
"""Initialize a repo for Codex autorunner."""
|
|
501
|
+
start_path = (path or Path.cwd()).resolve()
|
|
502
|
+
mode = (mode or "auto").lower()
|
|
503
|
+
if mode not in ("auto", "repo", "hub"):
|
|
504
|
+
_raise_exit("Invalid mode; expected repo, hub, or auto")
|
|
505
|
+
|
|
506
|
+
git_required = True
|
|
507
|
+
target_root: Optional[Path] = None
|
|
508
|
+
selected_mode = mode
|
|
509
|
+
|
|
510
|
+
# First try to treat this as a repo init if requested or auto-detected via .git.
|
|
511
|
+
if mode in ("auto", "repo"):
|
|
512
|
+
try:
|
|
513
|
+
target_root = find_repo_root(start_path)
|
|
514
|
+
selected_mode = "repo"
|
|
515
|
+
except RepoNotFoundError:
|
|
516
|
+
target_root = None
|
|
517
|
+
|
|
518
|
+
# If no git root was found, decide between hub or repo-with-git-init.
|
|
519
|
+
if target_root is None:
|
|
520
|
+
target_root = start_path
|
|
521
|
+
if mode in ("hub",) or (mode == "auto" and _has_nested_git(target_root)):
|
|
522
|
+
selected_mode = "hub"
|
|
523
|
+
git_required = False
|
|
524
|
+
elif git_init:
|
|
525
|
+
selected_mode = "repo"
|
|
526
|
+
try:
|
|
527
|
+
proc = run_git(["init"], target_root, check=False)
|
|
528
|
+
except GitError as exc:
|
|
529
|
+
_raise_exit(f"git init failed: {exc}")
|
|
530
|
+
if proc.returncode != 0:
|
|
531
|
+
detail = (
|
|
532
|
+
proc.stderr or proc.stdout or ""
|
|
533
|
+
).strip() or f"exit {proc.returncode}"
|
|
534
|
+
_raise_exit(f"git init failed: {detail}")
|
|
535
|
+
else:
|
|
536
|
+
_raise_exit("No .git directory found; rerun with --git-init to create one")
|
|
537
|
+
|
|
538
|
+
ca_dir = target_root / ".codex-autorunner"
|
|
539
|
+
ca_dir.mkdir(parents=True, exist_ok=True)
|
|
540
|
+
|
|
541
|
+
hub_config_path = find_nearest_hub_config_path(target_root)
|
|
542
|
+
try:
|
|
543
|
+
if selected_mode == "hub":
|
|
544
|
+
seed_hub_files(target_root, force=force)
|
|
545
|
+
typer.echo(f"Initialized hub at {ca_dir}")
|
|
546
|
+
else:
|
|
547
|
+
seed_repo_files(target_root, force=force, git_required=git_required)
|
|
548
|
+
typer.echo(f"Initialized repo at {ca_dir}")
|
|
549
|
+
if hub_config_path is None:
|
|
550
|
+
seed_hub_files(target_root, force=force)
|
|
551
|
+
typer.echo(f"Initialized hub at {ca_dir}")
|
|
552
|
+
except ConfigError as exc:
|
|
553
|
+
_raise_exit(str(exc), cause=exc)
|
|
554
|
+
typer.echo("Init complete")
|
|
555
|
+
|
|
556
|
+
|
|
557
|
+
@app.command()
|
|
558
|
+
def status(
|
|
559
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
560
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
561
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
562
|
+
):
|
|
563
|
+
"""Show autorunner status."""
|
|
564
|
+
engine = _require_repo_config(repo, hub)
|
|
565
|
+
state = load_state(engine.state_path)
|
|
566
|
+
repo_key = str(engine.repo_root)
|
|
567
|
+
session_id = state.repo_to_session.get(repo_key) or state.repo_to_session.get(
|
|
568
|
+
f"{repo_key}:codex"
|
|
569
|
+
)
|
|
570
|
+
opencode_session_id = state.repo_to_session.get(f"{repo_key}:opencode")
|
|
571
|
+
session_record = state.sessions.get(session_id) if session_id else None
|
|
572
|
+
opencode_record = (
|
|
573
|
+
state.sessions.get(opencode_session_id) if opencode_session_id else None
|
|
574
|
+
)
|
|
575
|
+
|
|
576
|
+
if output_json:
|
|
577
|
+
hub_config_path = _resolve_hub_config_path_for_cli(engine.repo_root, hub)
|
|
578
|
+
payload = {
|
|
579
|
+
"repo": str(engine.repo_root),
|
|
580
|
+
"hub": (
|
|
581
|
+
str(hub_config_path.parent.parent.resolve())
|
|
582
|
+
if hub_config_path
|
|
583
|
+
else None
|
|
584
|
+
),
|
|
585
|
+
"status": state.status,
|
|
586
|
+
"last_run_id": state.last_run_id,
|
|
587
|
+
"last_exit_code": state.last_exit_code,
|
|
588
|
+
"last_run_started_at": state.last_run_started_at,
|
|
589
|
+
"last_run_finished_at": state.last_run_finished_at,
|
|
590
|
+
"runner_pid": state.runner_pid,
|
|
591
|
+
"session_id": session_id,
|
|
592
|
+
"session_record": (
|
|
593
|
+
{
|
|
594
|
+
"repo_path": session_record.repo_path,
|
|
595
|
+
"created_at": session_record.created_at,
|
|
596
|
+
"last_seen_at": session_record.last_seen_at,
|
|
597
|
+
"status": session_record.status,
|
|
598
|
+
"agent": session_record.agent,
|
|
599
|
+
}
|
|
600
|
+
if session_record
|
|
601
|
+
else None
|
|
602
|
+
),
|
|
603
|
+
"opencode_session_id": opencode_session_id,
|
|
604
|
+
"opencode_record": (
|
|
605
|
+
{
|
|
606
|
+
"repo_path": opencode_record.repo_path,
|
|
607
|
+
"created_at": opencode_record.created_at,
|
|
608
|
+
"last_seen_at": opencode_record.last_seen_at,
|
|
609
|
+
"status": opencode_record.status,
|
|
610
|
+
"agent": opencode_record.agent,
|
|
611
|
+
}
|
|
612
|
+
if opencode_record
|
|
613
|
+
else None
|
|
614
|
+
),
|
|
615
|
+
}
|
|
616
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
617
|
+
return
|
|
618
|
+
|
|
619
|
+
typer.echo(f"Repo: {engine.repo_root}")
|
|
620
|
+
typer.echo(f"Status: {state.status}")
|
|
621
|
+
typer.echo(f"Last run id: {state.last_run_id}")
|
|
622
|
+
typer.echo(f"Last exit code: {state.last_exit_code}")
|
|
623
|
+
typer.echo(f"Last start: {state.last_run_started_at}")
|
|
624
|
+
typer.echo(f"Last finish: {state.last_run_finished_at}")
|
|
625
|
+
typer.echo(f"Runner pid: {state.runner_pid}")
|
|
626
|
+
if not session_id and not opencode_session_id:
|
|
627
|
+
typer.echo("Terminal session: none")
|
|
628
|
+
if session_id:
|
|
629
|
+
detail = ""
|
|
630
|
+
if session_record:
|
|
631
|
+
detail = f" (status={session_record.status}, last_seen={session_record.last_seen_at})"
|
|
632
|
+
typer.echo(f"Terminal session (codex): {session_id}{detail}")
|
|
633
|
+
if opencode_session_id and opencode_session_id != session_id:
|
|
634
|
+
detail = ""
|
|
635
|
+
if opencode_record:
|
|
636
|
+
detail = f" (status={opencode_record.status}, last_seen={opencode_record.last_seen_at})"
|
|
637
|
+
typer.echo(f"Terminal session (opencode): {opencode_session_id}{detail}")
|
|
638
|
+
|
|
639
|
+
|
|
640
|
+
@templates_app.command("fetch")
|
|
641
|
+
def templates_fetch(
|
|
642
|
+
template: str = typer.Argument(
|
|
643
|
+
..., help="Template ref formatted as REPO_ID:PATH[@REF]"
|
|
644
|
+
),
|
|
645
|
+
out: Optional[Path] = typer.Option(
|
|
646
|
+
None, "--out", help="Write template content to a file"
|
|
647
|
+
),
|
|
648
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
649
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
650
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
651
|
+
):
|
|
652
|
+
"""Fetch a template from a configured templates repo."""
|
|
653
|
+
ctx = _require_repo_config(repo, hub)
|
|
654
|
+
_require_templates_enabled(ctx.config)
|
|
655
|
+
fetched, scan_record, _hub_root = _fetch_template_with_scan(template, ctx, hub)
|
|
656
|
+
|
|
657
|
+
if out is not None:
|
|
658
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
659
|
+
out.write_text(fetched.content, encoding="utf-8")
|
|
660
|
+
typer.echo(f"Wrote template to {out}", err=True)
|
|
661
|
+
|
|
662
|
+
if output_json:
|
|
663
|
+
payload = {
|
|
664
|
+
"content": fetched.content,
|
|
665
|
+
"repo_id": fetched.repo_id,
|
|
666
|
+
"path": fetched.path,
|
|
667
|
+
"ref": fetched.ref,
|
|
668
|
+
"commit_sha": fetched.commit_sha,
|
|
669
|
+
"blob_sha": fetched.blob_sha,
|
|
670
|
+
"trusted": fetched.trusted,
|
|
671
|
+
"scan_decision": scan_record.to_dict() if scan_record else None,
|
|
672
|
+
}
|
|
673
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
674
|
+
return
|
|
675
|
+
|
|
676
|
+
if out is None:
|
|
677
|
+
typer.echo(fetched.content, nl=False)
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
@templates_app.command("apply")
|
|
681
|
+
def templates_apply(
|
|
682
|
+
template: str = typer.Argument(
|
|
683
|
+
..., help="Template ref formatted as REPO_ID:PATH[@REF]"
|
|
684
|
+
),
|
|
685
|
+
ticket_dir: Optional[Path] = typer.Option(
|
|
686
|
+
None,
|
|
687
|
+
"--ticket-dir",
|
|
688
|
+
help="Ticket directory (default .codex-autorunner/tickets)",
|
|
689
|
+
),
|
|
690
|
+
at: Optional[int] = typer.Option(None, "--at", help="Explicit ticket index"),
|
|
691
|
+
next_index: bool = typer.Option(
|
|
692
|
+
True, "--next/--no-next", help="Use next available index (default)"
|
|
693
|
+
),
|
|
694
|
+
suffix: Optional[str] = typer.Option(
|
|
695
|
+
None, "--suffix", help="Optional filename suffix (e.g. -foo)"
|
|
696
|
+
),
|
|
697
|
+
set_agent: Optional[str] = typer.Option(
|
|
698
|
+
None, "--set-agent", help="Override frontmatter agent"
|
|
699
|
+
),
|
|
700
|
+
provenance: bool = typer.Option(
|
|
701
|
+
False,
|
|
702
|
+
"--provenance/--no-provenance",
|
|
703
|
+
help="Embed template provenance in ticket",
|
|
704
|
+
),
|
|
705
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
706
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
707
|
+
):
|
|
708
|
+
"""Apply a template by writing it into the ticket directory."""
|
|
709
|
+
ctx = _require_repo_config(repo, hub)
|
|
710
|
+
_require_templates_enabled(ctx.config)
|
|
711
|
+
|
|
712
|
+
fetched, scan_record, _hub_root = _fetch_template_with_scan(template, ctx, hub)
|
|
713
|
+
|
|
714
|
+
resolved_dir = _resolve_ticket_dir(ctx.repo_root, ticket_dir)
|
|
715
|
+
if resolved_dir.exists() and not resolved_dir.is_dir():
|
|
716
|
+
_raise_exit(f"Ticket dir is not a directory: {resolved_dir}")
|
|
717
|
+
try:
|
|
718
|
+
resolved_dir.mkdir(parents=True, exist_ok=True)
|
|
719
|
+
except OSError as exc:
|
|
720
|
+
_raise_exit(f"Unable to create ticket dir: {exc}")
|
|
721
|
+
|
|
722
|
+
if at is None and not next_index:
|
|
723
|
+
_raise_exit("Specify --at or leave --next enabled to pick an index.")
|
|
724
|
+
if at is not None and at < 1:
|
|
725
|
+
_raise_exit("Ticket index must be >= 1.")
|
|
726
|
+
|
|
727
|
+
existing_indices = _collect_ticket_indices(resolved_dir)
|
|
728
|
+
if at is None:
|
|
729
|
+
index = _next_available_ticket_index(existing_indices)
|
|
730
|
+
else:
|
|
731
|
+
index = at
|
|
732
|
+
if index in existing_indices:
|
|
733
|
+
_raise_exit(
|
|
734
|
+
f"Ticket index {index} already exists. Choose another index or open a gap."
|
|
735
|
+
)
|
|
736
|
+
|
|
737
|
+
normalized_suffix = _normalize_ticket_suffix(suffix)
|
|
738
|
+
width = max(3, max([len(str(i)) for i in existing_indices + [index]]))
|
|
739
|
+
filename = _ticket_filename(index, suffix=normalized_suffix, width=width)
|
|
740
|
+
path = resolved_dir / filename
|
|
741
|
+
if path.exists():
|
|
742
|
+
_raise_exit(f"Ticket already exists: {path}")
|
|
743
|
+
|
|
744
|
+
content = fetched.content
|
|
745
|
+
if set_agent:
|
|
746
|
+
if set_agent != "user":
|
|
747
|
+
try:
|
|
748
|
+
validate_agent_id(set_agent)
|
|
749
|
+
except ValueError as exc:
|
|
750
|
+
_raise_exit(str(exc), cause=exc)
|
|
751
|
+
content = _apply_agent_override(content, set_agent)
|
|
752
|
+
|
|
753
|
+
if provenance:
|
|
754
|
+
content = inject_provenance(content, fetched, scan_record)
|
|
755
|
+
|
|
756
|
+
try:
|
|
757
|
+
path.write_text(content, encoding="utf-8")
|
|
758
|
+
except OSError as exc:
|
|
759
|
+
_raise_exit(f"Failed to write ticket: {exc}")
|
|
760
|
+
|
|
761
|
+
metadata = {
|
|
762
|
+
"repo_id": fetched.repo_id,
|
|
763
|
+
"path": fetched.path,
|
|
764
|
+
"ref": fetched.ref,
|
|
765
|
+
"commit_sha": fetched.commit_sha,
|
|
766
|
+
"blob_sha": fetched.blob_sha,
|
|
767
|
+
"trusted": fetched.trusted,
|
|
768
|
+
"scan": scan_record.to_dict() if scan_record else None,
|
|
769
|
+
}
|
|
770
|
+
typer.echo(
|
|
771
|
+
"Created ticket "
|
|
772
|
+
f"{path} (index={index}, template={fetched.repo_id}:{fetched.path}@{fetched.ref})"
|
|
773
|
+
)
|
|
774
|
+
typer.echo(json.dumps(metadata, indent=2))
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
@repos_app.command("list")
|
|
778
|
+
def repos_list(
|
|
779
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
780
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
781
|
+
):
|
|
782
|
+
"""List configured template repos."""
|
|
783
|
+
config = _require_hub_config(hub)
|
|
784
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
785
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
786
|
+
|
|
787
|
+
templates_config = data.get("templates", {})
|
|
788
|
+
if not isinstance(templates_config, dict):
|
|
789
|
+
templates_config = {}
|
|
790
|
+
repos = templates_config.get("repos", [])
|
|
791
|
+
if not isinstance(repos, list):
|
|
792
|
+
repos = []
|
|
793
|
+
|
|
794
|
+
if output_json:
|
|
795
|
+
payload = {"repos": repos}
|
|
796
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
797
|
+
return
|
|
798
|
+
|
|
799
|
+
if not repos:
|
|
800
|
+
typer.echo("No template repos configured.")
|
|
801
|
+
return
|
|
802
|
+
|
|
803
|
+
typer.echo(f"Template repos ({len(repos)}):")
|
|
804
|
+
for repo in repos:
|
|
805
|
+
if not isinstance(repo, dict):
|
|
806
|
+
continue
|
|
807
|
+
repo_id = repo.get("id", "")
|
|
808
|
+
url = repo.get("url", "")
|
|
809
|
+
trusted = repo.get("trusted", False)
|
|
810
|
+
default_ref = repo.get("default_ref", "main")
|
|
811
|
+
trusted_text = "trusted" if trusted else "untrusted"
|
|
812
|
+
typer.echo(f" - {repo_id}: {url} [{trusted_text}] (default_ref={default_ref})")
|
|
813
|
+
|
|
814
|
+
|
|
815
|
+
@repos_app.command("add")
|
|
816
|
+
def repos_add(
|
|
817
|
+
repo_id: str = typer.Argument(..., help="Unique repo ID"),
|
|
818
|
+
url: str = typer.Argument(..., help="Git repo URL or path"),
|
|
819
|
+
trusted: Optional[bool] = typer.Option(
|
|
820
|
+
None, "--trusted/--untrusted", help="Trust level (default: untrusted)"
|
|
821
|
+
),
|
|
822
|
+
default_ref: str = typer.Option("main", "--default-ref", help="Default git ref"),
|
|
823
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
824
|
+
):
|
|
825
|
+
"""Add a template repo to the hub config."""
|
|
826
|
+
config = _require_hub_config(hub)
|
|
827
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
828
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
829
|
+
|
|
830
|
+
templates_config = data.get("templates", {})
|
|
831
|
+
if not isinstance(templates_config, dict):
|
|
832
|
+
templates_config = {}
|
|
833
|
+
enabled = templates_config.get("enabled", True)
|
|
834
|
+
if enabled is False:
|
|
835
|
+
_raise_exit(
|
|
836
|
+
"Templates are disabled. Set templates.enabled=true in the hub config to enable."
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
templates_config = data.setdefault("templates", {})
|
|
840
|
+
if not isinstance(templates_config, dict):
|
|
841
|
+
_raise_exit("Invalid templates config in hub config")
|
|
842
|
+
templates_config.setdefault("enabled", True)
|
|
843
|
+
repos = templates_config.setdefault("repos", [])
|
|
844
|
+
if not isinstance(repos, list):
|
|
845
|
+
_raise_exit("Invalid repos config in hub config")
|
|
846
|
+
|
|
847
|
+
existing_ids = {repo.get("id") for repo in repos if isinstance(repo, dict)}
|
|
848
|
+
if repo_id in existing_ids:
|
|
849
|
+
_raise_exit(f"Repo ID '{repo_id}' already exists. Use a unique ID.")
|
|
850
|
+
|
|
851
|
+
new_repo = {
|
|
852
|
+
"id": repo_id,
|
|
853
|
+
"url": url,
|
|
854
|
+
"default_ref": default_ref,
|
|
855
|
+
}
|
|
856
|
+
if trusted is not None:
|
|
857
|
+
new_repo["trusted"] = trusted
|
|
858
|
+
|
|
859
|
+
repos.append(new_repo)
|
|
860
|
+
|
|
861
|
+
try:
|
|
862
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
863
|
+
except OSError as exc:
|
|
864
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
865
|
+
|
|
866
|
+
typer.echo(f"Added template repo '{repo_id}' to hub config.")
|
|
867
|
+
|
|
868
|
+
|
|
869
|
+
@repos_app.command("remove")
|
|
870
|
+
def repos_remove(
|
|
871
|
+
repo_id: str = typer.Argument(..., help="Repo ID to remove"),
|
|
872
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
873
|
+
):
|
|
874
|
+
"""Remove a template repo from the hub config."""
|
|
875
|
+
config = _require_hub_config(hub)
|
|
876
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
877
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
878
|
+
|
|
879
|
+
templates_config = data.get("templates", {})
|
|
880
|
+
if not isinstance(templates_config, dict):
|
|
881
|
+
templates_config = {}
|
|
882
|
+
repos = templates_config.get("repos", [])
|
|
883
|
+
if not isinstance(repos, list):
|
|
884
|
+
repos = []
|
|
885
|
+
|
|
886
|
+
original_count = len(repos)
|
|
887
|
+
filtered_repos = [
|
|
888
|
+
repo for repo in repos if isinstance(repo, dict) and repo.get("id") != repo_id
|
|
889
|
+
]
|
|
890
|
+
|
|
891
|
+
if len(filtered_repos) == original_count:
|
|
892
|
+
_raise_exit(f"Repo ID '{repo_id}' not found in config.")
|
|
893
|
+
|
|
894
|
+
templates_config["repos"] = filtered_repos
|
|
895
|
+
|
|
896
|
+
try:
|
|
897
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
898
|
+
except OSError as exc:
|
|
899
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
900
|
+
|
|
901
|
+
typer.echo(f"Removed template repo '{repo_id}' from hub config.")
|
|
902
|
+
|
|
903
|
+
|
|
904
|
+
@repos_app.command("trust")
|
|
905
|
+
def repos_trust(
|
|
906
|
+
repo_id: str = typer.Argument(..., help="Repo ID to trust"),
|
|
907
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
908
|
+
):
|
|
909
|
+
"""Mark a template repo as trusted (skip scanning)."""
|
|
910
|
+
config = _require_hub_config(hub)
|
|
911
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
912
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
913
|
+
|
|
914
|
+
templates_config = data.get("templates", {})
|
|
915
|
+
if not isinstance(templates_config, dict):
|
|
916
|
+
templates_config = {}
|
|
917
|
+
repos = templates_config.get("repos", [])
|
|
918
|
+
if not isinstance(repos, list):
|
|
919
|
+
repos = []
|
|
920
|
+
|
|
921
|
+
found = False
|
|
922
|
+
for repo in repos:
|
|
923
|
+
if isinstance(repo, dict) and repo.get("id") == repo_id:
|
|
924
|
+
repo["trusted"] = True
|
|
925
|
+
found = True
|
|
926
|
+
break
|
|
927
|
+
|
|
928
|
+
if not found:
|
|
929
|
+
_raise_exit(f"Repo ID '{repo_id}' not found in config.")
|
|
930
|
+
|
|
931
|
+
try:
|
|
932
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
933
|
+
except OSError as exc:
|
|
934
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
935
|
+
|
|
936
|
+
typer.echo(f"Marked repo '{repo_id}' as trusted.")
|
|
937
|
+
|
|
938
|
+
|
|
939
|
+
@repos_app.command("untrust")
|
|
940
|
+
def repos_untrust(
|
|
941
|
+
repo_id: str = typer.Argument(..., help="Repo ID to untrust"),
|
|
942
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
943
|
+
):
|
|
944
|
+
"""Mark a template repo as untrusted (require scanning)."""
|
|
945
|
+
config = _require_hub_config(hub)
|
|
946
|
+
hub_config_path = config.root / CONFIG_FILENAME
|
|
947
|
+
data = _load_hub_config_yaml(hub_config_path)
|
|
948
|
+
|
|
949
|
+
templates_config = data.get("templates", {})
|
|
950
|
+
if not isinstance(templates_config, dict):
|
|
951
|
+
templates_config = {}
|
|
952
|
+
repos = templates_config.get("repos", [])
|
|
953
|
+
if not isinstance(repos, list):
|
|
954
|
+
repos = []
|
|
955
|
+
|
|
956
|
+
found = False
|
|
957
|
+
for repo in repos:
|
|
958
|
+
if isinstance(repo, dict) and repo.get("id") == repo_id:
|
|
959
|
+
repo["trusted"] = False
|
|
960
|
+
found = True
|
|
961
|
+
break
|
|
962
|
+
|
|
963
|
+
if not found:
|
|
964
|
+
_raise_exit(f"Repo ID '{repo_id}' not found in config.")
|
|
965
|
+
|
|
966
|
+
try:
|
|
967
|
+
_write_hub_config_yaml(hub_config_path, data)
|
|
968
|
+
except OSError as exc:
|
|
969
|
+
_raise_exit(f"Failed to write hub config: {exc}", cause=exc)
|
|
970
|
+
|
|
971
|
+
typer.echo(f"Marked repo '{repo_id}' as untrusted.")
|
|
972
|
+
|
|
973
|
+
|
|
974
|
+
@app.command()
|
|
975
|
+
def sessions(
|
|
976
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
977
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
978
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
979
|
+
):
|
|
980
|
+
"""List active terminal sessions."""
|
|
981
|
+
engine = _require_repo_config(repo, hub)
|
|
982
|
+
config = engine.config
|
|
983
|
+
path = _resolve_repo_api_path(engine.repo_root, hub, "/api/sessions")
|
|
984
|
+
url = _build_server_url(config, path)
|
|
985
|
+
auth_token = _resolve_auth_token(config.server_auth_token_env)
|
|
986
|
+
if auth_token:
|
|
987
|
+
url = f"{url}?include_abs_paths=1"
|
|
988
|
+
payload = None
|
|
989
|
+
source = "server"
|
|
990
|
+
try:
|
|
991
|
+
payload = _request_json("GET", url, token_env=config.server_auth_token_env)
|
|
992
|
+
except (
|
|
993
|
+
httpx.HTTPError,
|
|
994
|
+
httpx.ConnectError,
|
|
995
|
+
httpx.TimeoutException,
|
|
996
|
+
OSError,
|
|
997
|
+
) as exc:
|
|
998
|
+
logger.debug(
|
|
999
|
+
"Failed to fetch sessions from server, falling back to state: %s", exc
|
|
1000
|
+
)
|
|
1001
|
+
state = load_state(engine.state_path)
|
|
1002
|
+
payload = {
|
|
1003
|
+
"sessions": [
|
|
1004
|
+
{
|
|
1005
|
+
"session_id": session_id,
|
|
1006
|
+
"repo_path": record.repo_path,
|
|
1007
|
+
"created_at": record.created_at,
|
|
1008
|
+
"last_seen_at": record.last_seen_at,
|
|
1009
|
+
"status": record.status,
|
|
1010
|
+
"alive": None,
|
|
1011
|
+
}
|
|
1012
|
+
for session_id, record in state.sessions.items()
|
|
1013
|
+
],
|
|
1014
|
+
"repo_to_session": dict(state.repo_to_session),
|
|
1015
|
+
}
|
|
1016
|
+
source = "state"
|
|
1017
|
+
|
|
1018
|
+
if output_json:
|
|
1019
|
+
if source != "server":
|
|
1020
|
+
payload["source"] = source
|
|
1021
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
1022
|
+
return
|
|
1023
|
+
|
|
1024
|
+
sessions_payload = payload.get("sessions", []) if isinstance(payload, dict) else []
|
|
1025
|
+
typer.echo(f"Sessions ({source}): {len(sessions_payload)}")
|
|
1026
|
+
for entry in sessions_payload:
|
|
1027
|
+
if not isinstance(entry, dict):
|
|
1028
|
+
continue
|
|
1029
|
+
session_id = entry.get("session_id") or "unknown"
|
|
1030
|
+
repo_path = entry.get("abs_repo_path") or entry.get("repo_path") or "unknown"
|
|
1031
|
+
status = entry.get("status") or "unknown"
|
|
1032
|
+
last_seen = entry.get("last_seen_at") or "unknown"
|
|
1033
|
+
alive = entry.get("alive")
|
|
1034
|
+
alive_text = "unknown" if alive is None else str(bool(alive))
|
|
1035
|
+
typer.echo(
|
|
1036
|
+
f"- {session_id}: repo={repo_path} status={status} last_seen={last_seen} alive={alive_text}"
|
|
1037
|
+
)
|
|
1038
|
+
|
|
1039
|
+
|
|
1040
|
+
@app.command("stop-session")
|
|
1041
|
+
def stop_session(
|
|
1042
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
1043
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
1044
|
+
session_id: Optional[str] = typer.Option(
|
|
1045
|
+
None, "--session", help="Session id to stop"
|
|
1046
|
+
),
|
|
1047
|
+
):
|
|
1048
|
+
"""Stop a terminal session by id or repo path."""
|
|
1049
|
+
engine = _require_repo_config(repo, hub)
|
|
1050
|
+
config = engine.config
|
|
1051
|
+
payload: dict[str, str] = {}
|
|
1052
|
+
if session_id:
|
|
1053
|
+
payload["session_id"] = session_id
|
|
1054
|
+
else:
|
|
1055
|
+
payload["repo_path"] = str(engine.repo_root)
|
|
1056
|
+
|
|
1057
|
+
path = _resolve_repo_api_path(engine.repo_root, hub, "/api/sessions/stop")
|
|
1058
|
+
url = _build_server_url(config, path)
|
|
1059
|
+
try:
|
|
1060
|
+
response = _request_json(
|
|
1061
|
+
"POST", url, payload, token_env=config.server_auth_token_env
|
|
1062
|
+
)
|
|
1063
|
+
stopped_id = response.get("session_id", payload.get("session_id", ""))
|
|
1064
|
+
typer.echo(f"Stopped session {stopped_id}")
|
|
1065
|
+
return
|
|
1066
|
+
except (
|
|
1067
|
+
httpx.HTTPError,
|
|
1068
|
+
httpx.ConnectError,
|
|
1069
|
+
httpx.TimeoutException,
|
|
1070
|
+
OSError,
|
|
1071
|
+
) as exc:
|
|
1072
|
+
logger.debug(
|
|
1073
|
+
"Failed to stop session via server, falling back to state: %s", exc
|
|
1074
|
+
)
|
|
1075
|
+
|
|
1076
|
+
with state_lock(engine.state_path):
|
|
1077
|
+
state = load_state(engine.state_path)
|
|
1078
|
+
target_id = payload.get("session_id")
|
|
1079
|
+
if not target_id:
|
|
1080
|
+
repo_lookup = payload.get("repo_path")
|
|
1081
|
+
if repo_lookup:
|
|
1082
|
+
target_id = (
|
|
1083
|
+
state.repo_to_session.get(repo_lookup)
|
|
1084
|
+
or state.repo_to_session.get(f"{repo_lookup}:codex")
|
|
1085
|
+
or state.repo_to_session.get(f"{repo_lookup}:opencode")
|
|
1086
|
+
)
|
|
1087
|
+
if not target_id:
|
|
1088
|
+
_raise_exit("Session not found (server unavailable)")
|
|
1089
|
+
state.sessions.pop(target_id, None)
|
|
1090
|
+
state.repo_to_session = {
|
|
1091
|
+
repo_key: sid
|
|
1092
|
+
for repo_key, sid in state.repo_to_session.items()
|
|
1093
|
+
if sid != target_id
|
|
1094
|
+
}
|
|
1095
|
+
save_state(engine.state_path, state)
|
|
1096
|
+
typer.echo(f"Stopped session {target_id} (state only)")
|
|
1097
|
+
|
|
1098
|
+
|
|
1099
|
+
@app.command()
|
|
1100
|
+
def usage(
|
|
1101
|
+
repo: Optional[Path] = typer.Option(
|
|
1102
|
+
None, "--repo", help="Repo or hub path; defaults to CWD"
|
|
1103
|
+
),
|
|
1104
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
1105
|
+
codex_home: Optional[Path] = typer.Option(
|
|
1106
|
+
None, "--codex-home", help="Override CODEX_HOME (defaults to env or ~/.codex)"
|
|
1107
|
+
),
|
|
1108
|
+
since: Optional[str] = typer.Option(
|
|
1109
|
+
None,
|
|
1110
|
+
"--since",
|
|
1111
|
+
help="ISO timestamp filter, e.g. 2025-12-01 or 2025-12-01T12:00Z",
|
|
1112
|
+
),
|
|
1113
|
+
until: Optional[str] = typer.Option(
|
|
1114
|
+
None, "--until", help="Upper bound ISO timestamp filter"
|
|
1115
|
+
),
|
|
1116
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
1117
|
+
):
|
|
1118
|
+
"""Show Codex/OpenCode token usage for a repo or hub by reading local session logs."""
|
|
1119
|
+
try:
|
|
1120
|
+
since_dt = parse_iso_datetime(since)
|
|
1121
|
+
until_dt = parse_iso_datetime(until)
|
|
1122
|
+
except UsageError as exc:
|
|
1123
|
+
_raise_exit(str(exc), cause=exc)
|
|
1124
|
+
|
|
1125
|
+
codex_root = (codex_home or default_codex_home()).expanduser()
|
|
1126
|
+
|
|
1127
|
+
repo_root: Optional[Path] = None
|
|
1128
|
+
try:
|
|
1129
|
+
repo_root = find_repo_root(repo or Path.cwd())
|
|
1130
|
+
except RepoNotFoundError:
|
|
1131
|
+
repo_root = None
|
|
1132
|
+
|
|
1133
|
+
if repo_root and (repo_root / ".codex-autorunner" / "state.sqlite3").exists():
|
|
1134
|
+
engine = _require_repo_config(repo, hub)
|
|
1135
|
+
else:
|
|
1136
|
+
try:
|
|
1137
|
+
config = load_hub_config(hub or repo or Path.cwd())
|
|
1138
|
+
except ConfigError as exc:
|
|
1139
|
+
_raise_exit(str(exc), cause=exc)
|
|
1140
|
+
manifest = load_manifest(config.manifest_path, config.root)
|
|
1141
|
+
repo_map = [(entry.id, (config.root / entry.path)) for entry in manifest.repos]
|
|
1142
|
+
per_repo, unmatched = summarize_hub_usage(
|
|
1143
|
+
repo_map,
|
|
1144
|
+
codex_root,
|
|
1145
|
+
since=since_dt,
|
|
1146
|
+
until=until_dt,
|
|
1147
|
+
)
|
|
1148
|
+
if output_json:
|
|
1149
|
+
payload = {
|
|
1150
|
+
"mode": "hub",
|
|
1151
|
+
"hub_root": str(config.root),
|
|
1152
|
+
"codex_home": str(codex_root),
|
|
1153
|
+
"since": since,
|
|
1154
|
+
"until": until,
|
|
1155
|
+
"repos": {
|
|
1156
|
+
repo_id: summary.to_dict() for repo_id, summary in per_repo.items()
|
|
1157
|
+
},
|
|
1158
|
+
"unmatched": unmatched.to_dict(),
|
|
1159
|
+
}
|
|
1160
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
1161
|
+
return
|
|
1162
|
+
|
|
1163
|
+
typer.echo(f"Hub: {config.root}")
|
|
1164
|
+
typer.echo(f"CODEX_HOME: {codex_root}")
|
|
1165
|
+
typer.echo(f"Repos: {len(per_repo)}")
|
|
1166
|
+
for repo_id, summary in per_repo.items():
|
|
1167
|
+
typer.echo(
|
|
1168
|
+
f"- {repo_id}: total={summary.totals.total_tokens} "
|
|
1169
|
+
f"(input={summary.totals.input_tokens}, cached={summary.totals.cached_input_tokens}, "
|
|
1170
|
+
f"output={summary.totals.output_tokens}, reasoning={summary.totals.reasoning_output_tokens}) "
|
|
1171
|
+
f"events={summary.events}"
|
|
1172
|
+
)
|
|
1173
|
+
if unmatched.events or unmatched.totals.total_tokens:
|
|
1174
|
+
typer.echo(
|
|
1175
|
+
f"- unmatched: total={unmatched.totals.total_tokens} events={unmatched.events}"
|
|
1176
|
+
)
|
|
1177
|
+
return
|
|
1178
|
+
|
|
1179
|
+
summary = summarize_repo_usage(
|
|
1180
|
+
engine.repo_root,
|
|
1181
|
+
codex_root,
|
|
1182
|
+
since=since_dt,
|
|
1183
|
+
until=until_dt,
|
|
1184
|
+
)
|
|
1185
|
+
|
|
1186
|
+
if output_json:
|
|
1187
|
+
payload = {
|
|
1188
|
+
"mode": "repo",
|
|
1189
|
+
"repo": str(engine.repo_root),
|
|
1190
|
+
"codex_home": str(codex_root),
|
|
1191
|
+
"since": since,
|
|
1192
|
+
"until": until,
|
|
1193
|
+
"usage": summary.to_dict(),
|
|
1194
|
+
}
|
|
1195
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
1196
|
+
return
|
|
1197
|
+
|
|
1198
|
+
typer.echo(f"Repo: {engine.repo_root}")
|
|
1199
|
+
typer.echo(f"CODEX_HOME: {codex_root}")
|
|
1200
|
+
typer.echo(
|
|
1201
|
+
f"Totals: total={summary.totals.total_tokens} "
|
|
1202
|
+
f"(input={summary.totals.input_tokens}, cached={summary.totals.cached_input_tokens}, "
|
|
1203
|
+
f"output={summary.totals.output_tokens}, reasoning={summary.totals.reasoning_output_tokens})"
|
|
1204
|
+
)
|
|
1205
|
+
typer.echo(f"Events counted: {summary.events}")
|
|
1206
|
+
if summary.latest_rate_limits:
|
|
1207
|
+
primary = summary.latest_rate_limits.get("primary", {}) or {}
|
|
1208
|
+
secondary = summary.latest_rate_limits.get("secondary", {}) or {}
|
|
1209
|
+
typer.echo(
|
|
1210
|
+
f"Latest rate limits: primary_used={primary.get('used_percent')}%/{primary.get('window_minutes')}m, "
|
|
1211
|
+
f"secondary_used={secondary.get('used_percent')}%/{secondary.get('window_minutes')}m"
|
|
1212
|
+
)
|
|
1213
|
+
|
|
1214
|
+
|
|
1215
|
+
@app.command()
|
|
1216
|
+
def kill(
|
|
1217
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
1218
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
1219
|
+
):
|
|
1220
|
+
"""Force-kill a running autorunner and clear stale lock/state."""
|
|
1221
|
+
engine = _require_repo_config(repo, hub)
|
|
1222
|
+
pid = engine.kill_running_process()
|
|
1223
|
+
with state_lock(engine.state_path):
|
|
1224
|
+
state = load_state(engine.state_path)
|
|
1225
|
+
new_state = RunnerState(
|
|
1226
|
+
last_run_id=state.last_run_id,
|
|
1227
|
+
status="error",
|
|
1228
|
+
last_exit_code=137,
|
|
1229
|
+
last_run_started_at=state.last_run_started_at,
|
|
1230
|
+
last_run_finished_at=now_iso(),
|
|
1231
|
+
autorunner_agent_override=state.autorunner_agent_override,
|
|
1232
|
+
autorunner_model_override=state.autorunner_model_override,
|
|
1233
|
+
autorunner_effort_override=state.autorunner_effort_override,
|
|
1234
|
+
autorunner_approval_policy=state.autorunner_approval_policy,
|
|
1235
|
+
autorunner_sandbox_mode=state.autorunner_sandbox_mode,
|
|
1236
|
+
autorunner_workspace_write_network=state.autorunner_workspace_write_network,
|
|
1237
|
+
runner_pid=None,
|
|
1238
|
+
sessions=state.sessions,
|
|
1239
|
+
repo_to_session=state.repo_to_session,
|
|
1240
|
+
)
|
|
1241
|
+
save_state(engine.state_path, new_state)
|
|
1242
|
+
clear_stale_lock(engine.lock_path)
|
|
1243
|
+
if pid:
|
|
1244
|
+
typer.echo(f"Sent SIGTERM to pid {pid}")
|
|
1245
|
+
else:
|
|
1246
|
+
typer.echo("No active autorunner process found; cleared stale lock if any.")
|
|
1247
|
+
|
|
1248
|
+
|
|
1249
|
+
@app.command()
|
|
1250
|
+
def resume(
|
|
1251
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
1252
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
1253
|
+
):
|
|
1254
|
+
"""Resume a paused/running ticket flow (now uses ticket_flow).
|
|
1255
|
+
|
|
1256
|
+
This command now uses ticket_flow for execution. For full control over
|
|
1257
|
+
flows, use 'car flow' commands instead.
|
|
1258
|
+
"""
|
|
1259
|
+
# Note: Resume is now handled by 'car flow ticket_flow/start' which
|
|
1260
|
+
# will reuse an active/paused run automatically.
|
|
1261
|
+
typer.echo("The 'resume' command has been deprecated in favor of ticket_flow.")
|
|
1262
|
+
typer.echo("Use 'car flow ticket_flow/start' to resume existing flows.")
|
|
1263
|
+
raise typer.Exit(code=0)
|
|
1264
|
+
|
|
1265
|
+
|
|
1266
|
+
@app.command()
|
|
1267
|
+
def log(
|
|
1268
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
1269
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
1270
|
+
run_id: Optional[int] = typer.Option(None, "--run", help="Show a specific run"),
|
|
1271
|
+
tail: Optional[int] = typer.Option(None, "--tail", help="Tail last N lines"),
|
|
1272
|
+
):
|
|
1273
|
+
"""Show autorunner log output."""
|
|
1274
|
+
engine = _require_repo_config(repo, hub)
|
|
1275
|
+
if not engine.log_path.exists():
|
|
1276
|
+
_raise_exit("Log file not found; run init")
|
|
1277
|
+
|
|
1278
|
+
if run_id is not None:
|
|
1279
|
+
block = engine.read_run_block(run_id)
|
|
1280
|
+
if not block:
|
|
1281
|
+
_raise_exit("run not found")
|
|
1282
|
+
typer.echo(block)
|
|
1283
|
+
return
|
|
1284
|
+
|
|
1285
|
+
if tail is not None:
|
|
1286
|
+
typer.echo(engine.tail_log(tail))
|
|
1287
|
+
else:
|
|
1288
|
+
state = load_state(engine.state_path)
|
|
1289
|
+
last_id = state.last_run_id
|
|
1290
|
+
if last_id is None:
|
|
1291
|
+
typer.echo("No runs recorded yet")
|
|
1292
|
+
return
|
|
1293
|
+
block = engine.read_run_block(last_id)
|
|
1294
|
+
if not block:
|
|
1295
|
+
typer.echo("No run block found (log may have rotated)")
|
|
1296
|
+
return
|
|
1297
|
+
typer.echo(block)
|
|
1298
|
+
|
|
1299
|
+
|
|
1300
|
+
@app.command()
|
|
1301
|
+
def edit(
|
|
1302
|
+
target: str = typer.Argument(..., help="active_context|decisions|spec"),
|
|
1303
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
1304
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
1305
|
+
):
|
|
1306
|
+
"""Open one of the docs in $EDITOR."""
|
|
1307
|
+
engine = _require_repo_config(repo, hub)
|
|
1308
|
+
config = engine.config
|
|
1309
|
+
key = target.lower()
|
|
1310
|
+
if key not in ("active_context", "decisions", "spec"):
|
|
1311
|
+
_raise_exit("Invalid target; choose active_context, decisions, or spec")
|
|
1312
|
+
path = config.doc_path(key)
|
|
1313
|
+
ui_cfg = config.raw.get("ui") if isinstance(config.raw, dict) else {}
|
|
1314
|
+
ui_cfg = ui_cfg if isinstance(ui_cfg, dict) else {}
|
|
1315
|
+
config_editor = ui_cfg.get("editor") if isinstance(ui_cfg, dict) else None
|
|
1316
|
+
if not isinstance(config_editor, str) or not config_editor.strip():
|
|
1317
|
+
config_editor = "vi"
|
|
1318
|
+
editor = (
|
|
1319
|
+
os.environ.get("VISUAL")
|
|
1320
|
+
or os.environ.get("EDITOR")
|
|
1321
|
+
or default_editor(fallback=config_editor)
|
|
1322
|
+
)
|
|
1323
|
+
editor_parts = shlex.split(editor)
|
|
1324
|
+
if not editor_parts:
|
|
1325
|
+
editor_parts = [editor]
|
|
1326
|
+
typer.echo(f"Opening {path} with {' '.join(editor_parts)}")
|
|
1327
|
+
subprocess.run([*editor_parts, str(path)])
|
|
1328
|
+
|
|
1329
|
+
|
|
1330
|
+
@app.command("doctor")
|
|
1331
|
+
def doctor_cmd(
|
|
1332
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo or hub path"),
|
|
1333
|
+
json_output: bool = typer.Option(False, "--json", help="Output JSON for scripting"),
|
|
1334
|
+
):
|
|
1335
|
+
"""Validate repo or hub setup."""
|
|
1336
|
+
try:
|
|
1337
|
+
start_path = repo or Path.cwd()
|
|
1338
|
+
report = doctor(start_path)
|
|
1339
|
+
|
|
1340
|
+
hub_config = load_hub_config(start_path)
|
|
1341
|
+
repo_config: Optional[RepoConfig] = None
|
|
1342
|
+
repo_root: Optional[Path] = None
|
|
1343
|
+
try:
|
|
1344
|
+
repo_root = find_repo_root(start_path)
|
|
1345
|
+
repo_config = derive_repo_config(hub_config, repo_root)
|
|
1346
|
+
except RepoNotFoundError:
|
|
1347
|
+
repo_config = None
|
|
1348
|
+
|
|
1349
|
+
telegram_checks = telegram_doctor_checks(
|
|
1350
|
+
repo_config or hub_config, repo_root=repo_root
|
|
1351
|
+
)
|
|
1352
|
+
pma_checks = pma_doctor_checks(hub_config, repo_root=repo_root)
|
|
1353
|
+
hub_worktree_checks = hub_worktree_doctor_checks(hub_config)
|
|
1354
|
+
|
|
1355
|
+
report = DoctorReport(
|
|
1356
|
+
checks=report.checks + telegram_checks + pma_checks + hub_worktree_checks
|
|
1357
|
+
)
|
|
1358
|
+
except ConfigError as exc:
|
|
1359
|
+
_raise_exit(str(exc), cause=exc)
|
|
1360
|
+
if json_output:
|
|
1361
|
+
typer.echo(json.dumps(report.to_dict(), indent=2))
|
|
1362
|
+
if report.has_errors():
|
|
1363
|
+
raise typer.Exit(code=1)
|
|
1364
|
+
return
|
|
1365
|
+
for check in report.checks:
|
|
1366
|
+
line = f"- {check.status.upper()}: {check.message}"
|
|
1367
|
+
if check.fix:
|
|
1368
|
+
line = f"{line} Fix: {check.fix}"
|
|
1369
|
+
typer.echo(line)
|
|
1370
|
+
if report.has_errors():
|
|
1371
|
+
_raise_exit("Doctor check failed")
|
|
1372
|
+
typer.echo("Doctor check passed")
|
|
1373
|
+
|
|
1374
|
+
|
|
1375
|
+
@app.command()
|
|
1376
|
+
def serve(
|
|
1377
|
+
path: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1378
|
+
host: Optional[str] = typer.Option(None, "--host", help="Host to bind"),
|
|
1379
|
+
port: Optional[int] = typer.Option(None, "--port", help="Port to bind"),
|
|
1380
|
+
base_path: Optional[str] = typer.Option(
|
|
1381
|
+
None, "--base-path", help="Base path for the server"
|
|
1382
|
+
),
|
|
1383
|
+
):
|
|
1384
|
+
"""Start the hub web server and UI API."""
|
|
1385
|
+
try:
|
|
1386
|
+
config = load_hub_config(path or Path.cwd())
|
|
1387
|
+
except ConfigError as exc:
|
|
1388
|
+
_raise_exit(str(exc), cause=exc)
|
|
1389
|
+
bind_host = host or config.server_host
|
|
1390
|
+
bind_port = port or config.server_port
|
|
1391
|
+
normalized_base = (
|
|
1392
|
+
_normalize_base_path(base_path)
|
|
1393
|
+
if base_path is not None
|
|
1394
|
+
else config.server_base_path
|
|
1395
|
+
)
|
|
1396
|
+
_enforce_bind_auth(bind_host, config.server_auth_token_env)
|
|
1397
|
+
typer.echo(f"Serving hub on http://{bind_host}:{bind_port}{normalized_base or ''}")
|
|
1398
|
+
uvicorn.run(
|
|
1399
|
+
create_hub_app(config.root, base_path=normalized_base),
|
|
1400
|
+
host=bind_host,
|
|
1401
|
+
port=bind_port,
|
|
1402
|
+
root_path="",
|
|
1403
|
+
access_log=config.server_access_log,
|
|
1404
|
+
)
|
|
1405
|
+
|
|
1406
|
+
|
|
1407
|
+
@hub_app.command("create")
|
|
1408
|
+
def hub_create(
|
|
1409
|
+
repo_id: str = typer.Argument(..., help="Base repo id to create and initialize"),
|
|
1410
|
+
repo_path: Optional[Path] = typer.Option(
|
|
1411
|
+
None,
|
|
1412
|
+
"--repo-path",
|
|
1413
|
+
help="Custom repo path relative to hub repos_root",
|
|
1414
|
+
),
|
|
1415
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Hub root path"),
|
|
1416
|
+
force: bool = typer.Option(False, "--force", help="Allow existing directory"),
|
|
1417
|
+
git_init: bool = typer.Option(
|
|
1418
|
+
True, "--git-init/--no-git-init", help="Run git init in the new repo"
|
|
1419
|
+
),
|
|
1420
|
+
):
|
|
1421
|
+
"""Create a new base git repo under the hub and initialize codex-autorunner files.
|
|
1422
|
+
|
|
1423
|
+
For worktrees, use `car hub worktree create`.
|
|
1424
|
+
"""
|
|
1425
|
+
config = _require_hub_config(path)
|
|
1426
|
+
supervisor = HubSupervisor(
|
|
1427
|
+
config,
|
|
1428
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1429
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1430
|
+
backend_orchestrator_builder=build_backend_orchestrator,
|
|
1431
|
+
agent_id_validator=validate_agent_id,
|
|
1432
|
+
)
|
|
1433
|
+
try:
|
|
1434
|
+
snapshot = supervisor.create_repo(
|
|
1435
|
+
repo_id, repo_path, git_init=git_init, force=force
|
|
1436
|
+
)
|
|
1437
|
+
except Exception as exc:
|
|
1438
|
+
_raise_exit(str(exc), cause=exc)
|
|
1439
|
+
typer.echo(f"Created repo {snapshot.id} at {snapshot.path}")
|
|
1440
|
+
|
|
1441
|
+
|
|
1442
|
+
@hub_app.command("clone")
|
|
1443
|
+
def hub_clone(
|
|
1444
|
+
git_url: str = typer.Option(
|
|
1445
|
+
..., "--git-url", help="Git URL or local path to clone"
|
|
1446
|
+
),
|
|
1447
|
+
repo_id: Optional[str] = typer.Option(
|
|
1448
|
+
None, "--id", help="Repo id to register (defaults from git URL)"
|
|
1449
|
+
),
|
|
1450
|
+
repo_path: Optional[Path] = typer.Option(
|
|
1451
|
+
None,
|
|
1452
|
+
"--repo-path",
|
|
1453
|
+
help="Custom repo path relative to hub repos_root",
|
|
1454
|
+
),
|
|
1455
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Hub root path"),
|
|
1456
|
+
force: bool = typer.Option(False, "--force", help="Allow existing directory"),
|
|
1457
|
+
):
|
|
1458
|
+
"""Clone a git repo under the hub and initialize codex-autorunner files."""
|
|
1459
|
+
config = _require_hub_config(path)
|
|
1460
|
+
supervisor = HubSupervisor(
|
|
1461
|
+
config,
|
|
1462
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1463
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1464
|
+
agent_id_validator=validate_agent_id,
|
|
1465
|
+
)
|
|
1466
|
+
try:
|
|
1467
|
+
snapshot = supervisor.clone_repo(
|
|
1468
|
+
git_url=git_url, repo_id=repo_id, repo_path=repo_path, force=force
|
|
1469
|
+
)
|
|
1470
|
+
except Exception as exc:
|
|
1471
|
+
_raise_exit(str(exc), cause=exc)
|
|
1472
|
+
typer.echo(
|
|
1473
|
+
f"Cloned repo {snapshot.id} at {snapshot.path} (status={snapshot.status.value})"
|
|
1474
|
+
)
|
|
1475
|
+
|
|
1476
|
+
|
|
1477
|
+
def _worktree_snapshot_payload(snapshot) -> dict:
|
|
1478
|
+
return {
|
|
1479
|
+
"id": snapshot.id,
|
|
1480
|
+
"worktree_of": snapshot.worktree_of,
|
|
1481
|
+
"branch": snapshot.branch,
|
|
1482
|
+
"path": str(snapshot.path),
|
|
1483
|
+
"initialized": snapshot.initialized,
|
|
1484
|
+
"exists_on_disk": snapshot.exists_on_disk,
|
|
1485
|
+
"status": snapshot.status.value,
|
|
1486
|
+
}
|
|
1487
|
+
|
|
1488
|
+
|
|
1489
|
+
@worktree_app.command("create")
|
|
1490
|
+
def hub_worktree_create(
|
|
1491
|
+
base_repo_id: str = typer.Argument(..., help="Base repo id to branch from"),
|
|
1492
|
+
branch: str = typer.Argument(..., help="Branch name for the new worktree"),
|
|
1493
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1494
|
+
force: bool = typer.Option(False, "--force", help="Allow existing directory"),
|
|
1495
|
+
start_point: Optional[str] = typer.Option(
|
|
1496
|
+
None, "--start-point", help="Optional git ref to branch from"
|
|
1497
|
+
),
|
|
1498
|
+
):
|
|
1499
|
+
"""Create a new hub-managed worktree."""
|
|
1500
|
+
config = _require_hub_config(hub)
|
|
1501
|
+
supervisor = HubSupervisor(
|
|
1502
|
+
config,
|
|
1503
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1504
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1505
|
+
backend_orchestrator_builder=build_backend_orchestrator,
|
|
1506
|
+
agent_id_validator=validate_agent_id,
|
|
1507
|
+
)
|
|
1508
|
+
try:
|
|
1509
|
+
snapshot = supervisor.create_worktree(
|
|
1510
|
+
base_repo_id=base_repo_id,
|
|
1511
|
+
branch=branch,
|
|
1512
|
+
force=force,
|
|
1513
|
+
start_point=start_point,
|
|
1514
|
+
)
|
|
1515
|
+
except Exception as exc:
|
|
1516
|
+
_raise_exit(str(exc), cause=exc)
|
|
1517
|
+
typer.echo(
|
|
1518
|
+
f"Created worktree {snapshot.id} (branch={snapshot.branch}) at {snapshot.path}"
|
|
1519
|
+
)
|
|
1520
|
+
|
|
1521
|
+
|
|
1522
|
+
@worktree_app.command("list")
|
|
1523
|
+
def hub_worktree_list(
|
|
1524
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1525
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
1526
|
+
):
|
|
1527
|
+
"""List hub-managed worktrees."""
|
|
1528
|
+
config = _require_hub_config(hub)
|
|
1529
|
+
supervisor = HubSupervisor(
|
|
1530
|
+
config,
|
|
1531
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1532
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1533
|
+
agent_id_validator=validate_agent_id,
|
|
1534
|
+
)
|
|
1535
|
+
snapshots = [
|
|
1536
|
+
snapshot
|
|
1537
|
+
for snapshot in supervisor.list_repos(use_cache=False)
|
|
1538
|
+
if snapshot.kind == "worktree"
|
|
1539
|
+
]
|
|
1540
|
+
payload = [_worktree_snapshot_payload(snapshot) for snapshot in snapshots]
|
|
1541
|
+
if output_json:
|
|
1542
|
+
typer.echo(json.dumps({"worktrees": payload}, indent=2))
|
|
1543
|
+
return
|
|
1544
|
+
if not payload:
|
|
1545
|
+
typer.echo("No worktrees found.")
|
|
1546
|
+
return
|
|
1547
|
+
typer.echo(f"Worktrees ({len(payload)}):")
|
|
1548
|
+
for item in payload:
|
|
1549
|
+
typer.echo(
|
|
1550
|
+
" - {id} (base={worktree_of}, branch={branch}, status={status}, initialized={initialized}, exists={exists_on_disk}, path={path})".format(
|
|
1551
|
+
**item
|
|
1552
|
+
)
|
|
1553
|
+
)
|
|
1554
|
+
|
|
1555
|
+
|
|
1556
|
+
@worktree_app.command("scan")
|
|
1557
|
+
def hub_worktree_scan(
|
|
1558
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1559
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
1560
|
+
):
|
|
1561
|
+
"""Scan hub root and list discovered worktrees."""
|
|
1562
|
+
config = _require_hub_config(hub)
|
|
1563
|
+
supervisor = HubSupervisor(
|
|
1564
|
+
config,
|
|
1565
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1566
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1567
|
+
agent_id_validator=validate_agent_id,
|
|
1568
|
+
)
|
|
1569
|
+
snapshots = [snap for snap in supervisor.scan() if snap.kind == "worktree"]
|
|
1570
|
+
payload = [_worktree_snapshot_payload(snapshot) for snapshot in snapshots]
|
|
1571
|
+
if output_json:
|
|
1572
|
+
typer.echo(json.dumps({"worktrees": payload}, indent=2))
|
|
1573
|
+
return
|
|
1574
|
+
if not payload:
|
|
1575
|
+
typer.echo("No worktrees found.")
|
|
1576
|
+
return
|
|
1577
|
+
typer.echo(f"Worktrees ({len(payload)}):")
|
|
1578
|
+
for item in payload:
|
|
1579
|
+
typer.echo(
|
|
1580
|
+
" - {id} (base={worktree_of}, branch={branch}, status={status}, initialized={initialized}, exists={exists_on_disk}, path={path})".format(
|
|
1581
|
+
**item
|
|
1582
|
+
)
|
|
1583
|
+
)
|
|
1584
|
+
|
|
1585
|
+
|
|
1586
|
+
@worktree_app.command("cleanup")
|
|
1587
|
+
def hub_worktree_cleanup(
|
|
1588
|
+
worktree_repo_id: str = typer.Argument(..., help="Worktree repo id to remove"),
|
|
1589
|
+
hub: Optional[Path] = typer.Option(None, "--path", "--hub", help="Hub root path"),
|
|
1590
|
+
delete_branch: bool = typer.Option(
|
|
1591
|
+
False, "--delete-branch", help="Delete the local branch"
|
|
1592
|
+
),
|
|
1593
|
+
delete_remote: bool = typer.Option(
|
|
1594
|
+
False, "--delete-remote", help="Delete the remote branch"
|
|
1595
|
+
),
|
|
1596
|
+
archive: bool = typer.Option(
|
|
1597
|
+
True, "--archive/--no-archive", help="Archive worktree snapshot"
|
|
1598
|
+
),
|
|
1599
|
+
force_archive: bool = typer.Option(
|
|
1600
|
+
False, "--force-archive", help="Continue cleanup if archive fails"
|
|
1601
|
+
),
|
|
1602
|
+
archive_note: Optional[str] = typer.Option(
|
|
1603
|
+
None, "--archive-note", help="Optional archive note"
|
|
1604
|
+
),
|
|
1605
|
+
):
|
|
1606
|
+
"""Cleanup a hub-managed worktree."""
|
|
1607
|
+
config = _require_hub_config(hub)
|
|
1608
|
+
supervisor = HubSupervisor(
|
|
1609
|
+
config,
|
|
1610
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1611
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1612
|
+
agent_id_validator=validate_agent_id,
|
|
1613
|
+
)
|
|
1614
|
+
try:
|
|
1615
|
+
supervisor.cleanup_worktree(
|
|
1616
|
+
worktree_repo_id=worktree_repo_id,
|
|
1617
|
+
delete_branch=delete_branch,
|
|
1618
|
+
delete_remote=delete_remote,
|
|
1619
|
+
archive=archive,
|
|
1620
|
+
force_archive=force_archive,
|
|
1621
|
+
archive_note=archive_note,
|
|
1622
|
+
)
|
|
1623
|
+
except Exception as exc:
|
|
1624
|
+
_raise_exit(str(exc), cause=exc)
|
|
1625
|
+
typer.echo("ok")
|
|
1626
|
+
|
|
1627
|
+
|
|
1628
|
+
@hub_app.command("serve")
|
|
1629
|
+
def hub_serve(
|
|
1630
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Hub root path"),
|
|
1631
|
+
host: Optional[str] = typer.Option(None, "--host", help="Host to bind"),
|
|
1632
|
+
port: Optional[int] = typer.Option(None, "--port", help="Port to bind"),
|
|
1633
|
+
base_path: Optional[str] = typer.Option(
|
|
1634
|
+
None, "--base-path", help="Base path for the server"
|
|
1635
|
+
),
|
|
1636
|
+
):
|
|
1637
|
+
"""Start the hub supervisor server."""
|
|
1638
|
+
config = _require_hub_config(path)
|
|
1639
|
+
normalized_base = (
|
|
1640
|
+
_normalize_base_path(base_path)
|
|
1641
|
+
if base_path is not None
|
|
1642
|
+
else config.server_base_path
|
|
1643
|
+
)
|
|
1644
|
+
bind_host = host or config.server_host
|
|
1645
|
+
bind_port = port or config.server_port
|
|
1646
|
+
_enforce_bind_auth(bind_host, config.server_auth_token_env)
|
|
1647
|
+
typer.echo(f"Serving hub on http://{bind_host}:{bind_port}{normalized_base or ''}")
|
|
1648
|
+
uvicorn.run(
|
|
1649
|
+
create_hub_app(config.root, base_path=normalized_base),
|
|
1650
|
+
host=bind_host,
|
|
1651
|
+
port=bind_port,
|
|
1652
|
+
root_path="",
|
|
1653
|
+
access_log=config.server_access_log,
|
|
1654
|
+
)
|
|
1655
|
+
|
|
1656
|
+
|
|
1657
|
+
@hub_app.command("scan")
|
|
1658
|
+
def hub_scan(path: Optional[Path] = typer.Option(None, "--path", help="Hub root path")):
|
|
1659
|
+
"""Trigger discovery/init and print repo statuses."""
|
|
1660
|
+
config = _require_hub_config(path)
|
|
1661
|
+
supervisor = HubSupervisor(
|
|
1662
|
+
config,
|
|
1663
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
1664
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
1665
|
+
agent_id_validator=validate_agent_id,
|
|
1666
|
+
)
|
|
1667
|
+
snapshots = supervisor.scan()
|
|
1668
|
+
typer.echo(f"Scanned hub at {config.root} (repos_root={config.repos_root})")
|
|
1669
|
+
for snap in snapshots:
|
|
1670
|
+
typer.echo(
|
|
1671
|
+
f"- {snap.id}: {snap.status.value}, initialized={snap.initialized}, exists={snap.exists_on_disk}"
|
|
1672
|
+
)
|
|
1673
|
+
|
|
1674
|
+
|
|
1675
|
+
@hub_app.command("snapshot")
|
|
1676
|
+
def hub_snapshot(
|
|
1677
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Hub root path"),
|
|
1678
|
+
output_json: bool = typer.Option(
|
|
1679
|
+
True, "--json/--no-json", help="Emit JSON output (default: true)"
|
|
1680
|
+
),
|
|
1681
|
+
pretty: bool = typer.Option(False, "--pretty", help="Pretty-print JSON output"),
|
|
1682
|
+
):
|
|
1683
|
+
"""Return a compact hub snapshot (repos + inbox items)."""
|
|
1684
|
+
config = _require_hub_config(path)
|
|
1685
|
+
repos_url = _build_server_url(config, "/hub/repos")
|
|
1686
|
+
messages_url = _build_server_url(config, "/hub/messages?limit=50")
|
|
1687
|
+
|
|
1688
|
+
try:
|
|
1689
|
+
repos_response = _request_json(
|
|
1690
|
+
"GET", repos_url, token_env=config.server_auth_token_env
|
|
1691
|
+
)
|
|
1692
|
+
messages_response = _request_json(
|
|
1693
|
+
"GET", messages_url, token_env=config.server_auth_token_env
|
|
1694
|
+
)
|
|
1695
|
+
except (
|
|
1696
|
+
httpx.HTTPError,
|
|
1697
|
+
httpx.ConnectError,
|
|
1698
|
+
httpx.TimeoutException,
|
|
1699
|
+
OSError,
|
|
1700
|
+
) as exc:
|
|
1701
|
+
logger.debug("Failed to fetch hub snapshot from server: %s", exc)
|
|
1702
|
+
_raise_exit(
|
|
1703
|
+
"Failed to connect to hub server. Ensure 'car hub serve' is running.",
|
|
1704
|
+
cause=exc,
|
|
1705
|
+
)
|
|
1706
|
+
|
|
1707
|
+
repos_payload = repos_response if isinstance(repos_response, dict) else {}
|
|
1708
|
+
messages_payload = messages_response if isinstance(messages_response, dict) else {}
|
|
1709
|
+
|
|
1710
|
+
repos = repos_payload.get("repos", []) if isinstance(repos_payload, dict) else []
|
|
1711
|
+
messages_items = (
|
|
1712
|
+
messages_payload.get("items", []) if isinstance(messages_payload, dict) else []
|
|
1713
|
+
)
|
|
1714
|
+
|
|
1715
|
+
def _summarize_repo(repo: dict) -> dict:
|
|
1716
|
+
if not isinstance(repo, dict):
|
|
1717
|
+
return {}
|
|
1718
|
+
return {
|
|
1719
|
+
"id": repo.get("id"),
|
|
1720
|
+
"display_name": repo.get("display_name"),
|
|
1721
|
+
"status": repo.get("status"),
|
|
1722
|
+
"initialized": repo.get("initialized"),
|
|
1723
|
+
"exists_on_disk": repo.get("exists_on_disk"),
|
|
1724
|
+
"last_run_id": repo.get("last_run_id"),
|
|
1725
|
+
"last_run_started_at": repo.get("last_run_started_at"),
|
|
1726
|
+
"last_run_finished_at": repo.get("last_run_finished_at"),
|
|
1727
|
+
}
|
|
1728
|
+
|
|
1729
|
+
def _summarize_message(msg: dict) -> dict:
|
|
1730
|
+
if not isinstance(msg, dict):
|
|
1731
|
+
return {}
|
|
1732
|
+
dispatch = msg.get("dispatch", {})
|
|
1733
|
+
if not isinstance(dispatch, dict):
|
|
1734
|
+
dispatch = {}
|
|
1735
|
+
body = dispatch.get("body", "")
|
|
1736
|
+
title = dispatch.get("title", "")
|
|
1737
|
+
truncated_body = (body[:200] + "...") if len(body) > 200 else body
|
|
1738
|
+
return {
|
|
1739
|
+
"repo_id": msg.get("repo_id"),
|
|
1740
|
+
"repo_display_name": msg.get("repo_display_name"),
|
|
1741
|
+
"run_id": msg.get("run_id"),
|
|
1742
|
+
"run_created_at": msg.get("run_created_at"),
|
|
1743
|
+
"status": msg.get("status"),
|
|
1744
|
+
"seq": msg.get("seq"),
|
|
1745
|
+
"dispatch": {
|
|
1746
|
+
"mode": dispatch.get("mode"),
|
|
1747
|
+
"title": title,
|
|
1748
|
+
"body": truncated_body,
|
|
1749
|
+
"is_handoff": dispatch.get("is_handoff"),
|
|
1750
|
+
},
|
|
1751
|
+
"files_count": (
|
|
1752
|
+
len(msg.get("files", [])) if isinstance(msg.get("files"), list) else 0
|
|
1753
|
+
),
|
|
1754
|
+
}
|
|
1755
|
+
|
|
1756
|
+
snapshot = {
|
|
1757
|
+
"last_scan_at": (
|
|
1758
|
+
repos_payload.get("last_scan_at")
|
|
1759
|
+
if isinstance(repos_payload, dict)
|
|
1760
|
+
else None
|
|
1761
|
+
),
|
|
1762
|
+
"repos": [_summarize_repo(repo) for repo in repos],
|
|
1763
|
+
"inbox_items": [_summarize_message(msg) for msg in messages_items],
|
|
1764
|
+
}
|
|
1765
|
+
|
|
1766
|
+
if not output_json:
|
|
1767
|
+
typer.echo(
|
|
1768
|
+
f"Hub Snapshot (repos={len(snapshot['repos'])}, inbox={len(snapshot['inbox_items'])})"
|
|
1769
|
+
)
|
|
1770
|
+
for repo in snapshot["repos"]:
|
|
1771
|
+
typer.echo(
|
|
1772
|
+
f"- {repo.get('id')}: status={repo.get('status')}, "
|
|
1773
|
+
f"initialized={repo.get('initialized')}, exists={repo.get('exists_on_disk')}"
|
|
1774
|
+
)
|
|
1775
|
+
for msg in snapshot["inbox_items"]:
|
|
1776
|
+
typer.echo(
|
|
1777
|
+
f"- Inbox: repo={msg.get('repo_id')}, run_id={msg.get('run_id')}, "
|
|
1778
|
+
f"title={msg.get('dispatch', {}).get('title')}"
|
|
1779
|
+
)
|
|
1780
|
+
return
|
|
1781
|
+
|
|
1782
|
+
indent = 2 if pretty else None
|
|
1783
|
+
typer.echo(json.dumps(snapshot, indent=indent))
|
|
1784
|
+
|
|
1785
|
+
|
|
1786
|
+
@telegram_app.command("start")
|
|
1787
|
+
def telegram_start(
|
|
1788
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Repo or hub root path"),
|
|
1789
|
+
):
|
|
1790
|
+
"""Start the Telegram bot (polling)."""
|
|
1791
|
+
_require_optional_feature(
|
|
1792
|
+
feature="telegram",
|
|
1793
|
+
deps=[("httpx", "httpx")],
|
|
1794
|
+
extra="telegram",
|
|
1795
|
+
)
|
|
1796
|
+
try:
|
|
1797
|
+
config = load_hub_config(path or Path.cwd())
|
|
1798
|
+
except ConfigError as exc:
|
|
1799
|
+
_raise_exit(str(exc), cause=exc)
|
|
1800
|
+
telegram_cfg = TelegramBotConfig.from_raw(
|
|
1801
|
+
config.raw.get("telegram_bot") if isinstance(config.raw, dict) else None,
|
|
1802
|
+
root=config.root,
|
|
1803
|
+
agent_binaries=getattr(config, "agents", None)
|
|
1804
|
+
and {name: agent.binary for name, agent in config.agents.items()},
|
|
1805
|
+
)
|
|
1806
|
+
if not telegram_cfg.enabled:
|
|
1807
|
+
_raise_exit("telegram_bot is disabled; set telegram_bot.enabled: true")
|
|
1808
|
+
try:
|
|
1809
|
+
telegram_cfg.validate()
|
|
1810
|
+
except TelegramBotConfigError as exc:
|
|
1811
|
+
_raise_exit(str(exc), cause=exc)
|
|
1812
|
+
logger = setup_rotating_logger("codex-autorunner-telegram", config.log)
|
|
1813
|
+
env_overrides = collect_env_overrides(env=os.environ, include_telegram=True)
|
|
1814
|
+
if env_overrides:
|
|
1815
|
+
logger.info("Environment overrides active: %s", ", ".join(env_overrides))
|
|
1816
|
+
log_event(
|
|
1817
|
+
logger,
|
|
1818
|
+
logging.INFO,
|
|
1819
|
+
"telegram.bot.starting",
|
|
1820
|
+
root=str(config.root),
|
|
1821
|
+
mode="hub",
|
|
1822
|
+
)
|
|
1823
|
+
voice_raw = config.repo_defaults.get("voice") if config.repo_defaults else None
|
|
1824
|
+
voice_config = VoiceConfig.from_raw(voice_raw, env=os.environ)
|
|
1825
|
+
update_repo_url = config.update_repo_url
|
|
1826
|
+
update_repo_ref = config.update_repo_ref
|
|
1827
|
+
|
|
1828
|
+
async def _run() -> None:
|
|
1829
|
+
service = TelegramBotService(
|
|
1830
|
+
telegram_cfg,
|
|
1831
|
+
logger=logger,
|
|
1832
|
+
hub_root=config.root,
|
|
1833
|
+
manifest_path=config.manifest_path,
|
|
1834
|
+
voice_config=voice_config,
|
|
1835
|
+
housekeeping_config=config.housekeeping,
|
|
1836
|
+
update_repo_url=update_repo_url,
|
|
1837
|
+
update_repo_ref=update_repo_ref,
|
|
1838
|
+
update_skip_checks=config.update_skip_checks,
|
|
1839
|
+
app_server_auto_restart=config.app_server.auto_restart,
|
|
1840
|
+
)
|
|
1841
|
+
await service.run_polling()
|
|
1842
|
+
|
|
1843
|
+
try:
|
|
1844
|
+
asyncio.run(_run())
|
|
1845
|
+
except TelegramBotLockError as exc:
|
|
1846
|
+
_raise_exit(str(exc), cause=exc)
|
|
1847
|
+
|
|
1848
|
+
|
|
1849
|
+
@telegram_app.command("health")
|
|
1850
|
+
def telegram_health(
|
|
1851
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Repo or hub root path"),
|
|
1852
|
+
timeout: float = typer.Option(5.0, "--timeout", help="Timeout (seconds)"),
|
|
1853
|
+
):
|
|
1854
|
+
"""Check Telegram API connectivity for the configured bot."""
|
|
1855
|
+
_require_optional_feature(
|
|
1856
|
+
feature="telegram",
|
|
1857
|
+
deps=[("httpx", "httpx")],
|
|
1858
|
+
extra="telegram",
|
|
1859
|
+
)
|
|
1860
|
+
try:
|
|
1861
|
+
config = load_hub_config(path or Path.cwd())
|
|
1862
|
+
except ConfigError as exc:
|
|
1863
|
+
_raise_exit(str(exc), cause=exc)
|
|
1864
|
+
telegram_cfg = TelegramBotConfig.from_raw(
|
|
1865
|
+
config.raw.get("telegram_bot") if isinstance(config.raw, dict) else None,
|
|
1866
|
+
root=config.root,
|
|
1867
|
+
agent_binaries=getattr(config, "agents", None)
|
|
1868
|
+
and {name: agent.binary for name, agent in config.agents.items()},
|
|
1869
|
+
)
|
|
1870
|
+
if not telegram_cfg.enabled:
|
|
1871
|
+
_raise_exit("telegram_bot is disabled; set telegram_bot.enabled: true")
|
|
1872
|
+
bot_token = telegram_cfg.bot_token
|
|
1873
|
+
if not bot_token:
|
|
1874
|
+
_raise_exit(f"missing bot token env '{telegram_cfg.bot_token_env}'")
|
|
1875
|
+
timeout_seconds = max(float(timeout), 0.1)
|
|
1876
|
+
|
|
1877
|
+
async def _run() -> None:
|
|
1878
|
+
async with TelegramBotClient(bot_token) as client:
|
|
1879
|
+
await asyncio.wait_for(client.get_me(), timeout=timeout_seconds)
|
|
1880
|
+
|
|
1881
|
+
try:
|
|
1882
|
+
asyncio.run(_run())
|
|
1883
|
+
except TelegramAPIError as exc:
|
|
1884
|
+
_raise_exit(f"Telegram health check failed: {exc}", cause=exc)
|
|
1885
|
+
|
|
1886
|
+
|
|
1887
|
+
@telegram_app.command("state-check")
|
|
1888
|
+
def telegram_state_check(
|
|
1889
|
+
path: Optional[Path] = typer.Option(None, "--path", help="Repo or hub root path"),
|
|
1890
|
+
):
|
|
1891
|
+
"""Open the Telegram state DB and ensure schema migrations apply."""
|
|
1892
|
+
try:
|
|
1893
|
+
config = load_hub_config(path or Path.cwd())
|
|
1894
|
+
except ConfigError as exc:
|
|
1895
|
+
_raise_exit(str(exc), cause=exc)
|
|
1896
|
+
telegram_cfg = TelegramBotConfig.from_raw(
|
|
1897
|
+
config.raw.get("telegram_bot") if isinstance(config.raw, dict) else None,
|
|
1898
|
+
root=config.root,
|
|
1899
|
+
agent_binaries=getattr(config, "agents", None)
|
|
1900
|
+
and {name: agent.binary for name, agent in config.agents.items()},
|
|
1901
|
+
)
|
|
1902
|
+
if not telegram_cfg.enabled:
|
|
1903
|
+
_raise_exit("telegram_bot is disabled; set telegram_bot.enabled: true")
|
|
1904
|
+
|
|
1905
|
+
try:
|
|
1906
|
+
store = TelegramStateStore(
|
|
1907
|
+
telegram_cfg.state_file,
|
|
1908
|
+
default_approval_mode=telegram_cfg.defaults.approval_mode,
|
|
1909
|
+
)
|
|
1910
|
+
# This will open the DB and apply schema/migrations.
|
|
1911
|
+
store._connection_sync() # type: ignore[attr-defined]
|
|
1912
|
+
except Exception as exc: # pragma: no cover - defensive runtime check
|
|
1913
|
+
_raise_exit(f"Telegram state check failed: {exc}", cause=exc)
|
|
1914
|
+
|
|
1915
|
+
|
|
1916
|
+
def _normalize_flow_run_id(run_id: Optional[str]) -> Optional[str]:
|
|
1917
|
+
if run_id is None:
|
|
1918
|
+
return None
|
|
1919
|
+
try:
|
|
1920
|
+
return str(uuid.UUID(str(run_id)))
|
|
1921
|
+
except ValueError:
|
|
1922
|
+
_raise_exit("Invalid run_id format; must be a UUID")
|
|
1923
|
+
|
|
1924
|
+
|
|
1925
|
+
def _ticket_flow_paths(engine: RuntimeContext) -> tuple[Path, Path, Path]:
|
|
1926
|
+
db_path = engine.repo_root / ".codex-autorunner" / "flows.db"
|
|
1927
|
+
artifacts_root = engine.repo_root / ".codex-autorunner" / "flows"
|
|
1928
|
+
ticket_dir = engine.repo_root / ".codex-autorunner" / "tickets"
|
|
1929
|
+
return db_path, artifacts_root, ticket_dir
|
|
1930
|
+
|
|
1931
|
+
|
|
1932
|
+
def _validate_tickets(ticket_dir: Path) -> list[str]:
|
|
1933
|
+
"""Validate all tickets in the directory and return a list of error messages."""
|
|
1934
|
+
errors: list[str] = []
|
|
1935
|
+
|
|
1936
|
+
if not ticket_dir.exists():
|
|
1937
|
+
return errors
|
|
1938
|
+
|
|
1939
|
+
ticket_root = ticket_dir.parent
|
|
1940
|
+
for path in sorted(ticket_dir.iterdir()):
|
|
1941
|
+
if not path.is_file():
|
|
1942
|
+
continue
|
|
1943
|
+
if path.name == "AGENTS.md":
|
|
1944
|
+
continue
|
|
1945
|
+
if parse_ticket_index(path.name) is None:
|
|
1946
|
+
rel_path = safe_relpath(path, ticket_root)
|
|
1947
|
+
errors.append(
|
|
1948
|
+
f"{rel_path}: Invalid ticket filename; expected TICKET-<number>[suffix].md (e.g. TICKET-001-foo.md)"
|
|
1949
|
+
)
|
|
1950
|
+
|
|
1951
|
+
# Check for directory-level errors (duplicate indices)
|
|
1952
|
+
dir_errors = lint_ticket_directory(ticket_dir)
|
|
1953
|
+
errors.extend(dir_errors)
|
|
1954
|
+
|
|
1955
|
+
# Check each ticket file for frontmatter errors
|
|
1956
|
+
ticket_paths = list_ticket_paths(ticket_dir)
|
|
1957
|
+
for path in ticket_paths:
|
|
1958
|
+
_, ticket_errors = read_ticket(path)
|
|
1959
|
+
for err in ticket_errors:
|
|
1960
|
+
errors.append(f"{path.relative_to(path.parent.parent)}: {err}")
|
|
1961
|
+
|
|
1962
|
+
return errors
|
|
1963
|
+
|
|
1964
|
+
|
|
1965
|
+
def _open_flow_store(engine: RuntimeContext) -> FlowStore:
|
|
1966
|
+
db_path, _, _ = _ticket_flow_paths(engine)
|
|
1967
|
+
store = FlowStore(db_path, durable=engine.config.durable_writes)
|
|
1968
|
+
store.initialize()
|
|
1969
|
+
return store
|
|
1970
|
+
|
|
1971
|
+
|
|
1972
|
+
def _active_or_paused_run(records: list[FlowRunRecord]) -> Optional[FlowRunRecord]:
|
|
1973
|
+
if not records:
|
|
1974
|
+
return None
|
|
1975
|
+
latest = records[0]
|
|
1976
|
+
if latest.status in (FlowRunStatus.RUNNING, FlowRunStatus.PAUSED):
|
|
1977
|
+
return latest
|
|
1978
|
+
return None
|
|
1979
|
+
|
|
1980
|
+
|
|
1981
|
+
def _resumable_run(records: list[FlowRunRecord]) -> tuple[Optional[FlowRunRecord], str]:
|
|
1982
|
+
"""Return a resumable run and the reason.
|
|
1983
|
+
|
|
1984
|
+
Returns (run, reason) where run may be None.
|
|
1985
|
+
Reason is one of: 'active', 'completed_pending', 'force_new', 'new_run'.
|
|
1986
|
+
"""
|
|
1987
|
+
if not records:
|
|
1988
|
+
return None, "new_run"
|
|
1989
|
+
latest = records[0]
|
|
1990
|
+
if latest.status in (FlowRunStatus.RUNNING, FlowRunStatus.PAUSED):
|
|
1991
|
+
return latest, "active"
|
|
1992
|
+
if latest.status == FlowRunStatus.COMPLETED:
|
|
1993
|
+
return latest, "completed_pending"
|
|
1994
|
+
return None, "new_run"
|
|
1995
|
+
|
|
1996
|
+
|
|
1997
|
+
def _ticket_flow_status_payload(
|
|
1998
|
+
engine: RuntimeContext, record: FlowRunRecord, store: Optional[FlowStore]
|
|
1999
|
+
) -> dict:
|
|
2000
|
+
snapshot = build_flow_status_snapshot(engine.repo_root, record, store)
|
|
2001
|
+
health = snapshot.get("worker_health")
|
|
2002
|
+
effective_ticket = snapshot.get("effective_current_ticket")
|
|
2003
|
+
return {
|
|
2004
|
+
"run_id": record.id,
|
|
2005
|
+
"flow_type": record.flow_type,
|
|
2006
|
+
"status": record.status.value,
|
|
2007
|
+
"current_step": record.current_step,
|
|
2008
|
+
"created_at": record.created_at,
|
|
2009
|
+
"started_at": record.started_at,
|
|
2010
|
+
"finished_at": record.finished_at,
|
|
2011
|
+
"last_event_seq": snapshot.get("last_event_seq"),
|
|
2012
|
+
"last_event_at": snapshot.get("last_event_at"),
|
|
2013
|
+
"current_ticket": effective_ticket,
|
|
2014
|
+
"ticket_progress": snapshot.get("ticket_progress"),
|
|
2015
|
+
"worker": (
|
|
2016
|
+
{
|
|
2017
|
+
"status": health.status,
|
|
2018
|
+
"pid": health.pid,
|
|
2019
|
+
"message": health.message,
|
|
2020
|
+
}
|
|
2021
|
+
if health
|
|
2022
|
+
else None
|
|
2023
|
+
),
|
|
2024
|
+
}
|
|
2025
|
+
|
|
2026
|
+
|
|
2027
|
+
def _print_ticket_flow_status(payload: dict) -> None:
|
|
2028
|
+
typer.echo(f"Run id: {payload.get('run_id')}")
|
|
2029
|
+
typer.echo(f"Status: {payload.get('status')}")
|
|
2030
|
+
progress = payload.get("ticket_progress") or {}
|
|
2031
|
+
if isinstance(progress, dict):
|
|
2032
|
+
done = progress.get("done")
|
|
2033
|
+
total = progress.get("total")
|
|
2034
|
+
if isinstance(done, int) and isinstance(total, int):
|
|
2035
|
+
typer.echo(f"Tickets: {done}/{total}")
|
|
2036
|
+
typer.echo(f"Current step: {payload.get('current_step')}")
|
|
2037
|
+
typer.echo(f"Current ticket: {payload.get('current_ticket') or 'n/a'}")
|
|
2038
|
+
typer.echo(f"Created at: {payload.get('created_at')}")
|
|
2039
|
+
typer.echo(f"Started at: {payload.get('started_at')}")
|
|
2040
|
+
typer.echo(f"Finished at: {payload.get('finished_at')}")
|
|
2041
|
+
typer.echo(
|
|
2042
|
+
f"Last event: {payload.get('last_event_at')} (seq={payload.get('last_event_seq')})"
|
|
2043
|
+
)
|
|
2044
|
+
worker = payload.get("worker") or {}
|
|
2045
|
+
status = payload.get("status") or ""
|
|
2046
|
+
# Only show worker details for non-terminal states
|
|
2047
|
+
if worker and status not in {"completed", "failed", "stopped"}:
|
|
2048
|
+
typer.echo(
|
|
2049
|
+
f"Worker: {worker.get('status')} pid={worker.get('pid')} {worker.get('message') or ''}".rstrip()
|
|
2050
|
+
)
|
|
2051
|
+
elif worker and status in {"completed", "failed", "stopped"}:
|
|
2052
|
+
# For terminal runs, show minimal worker info or clarify state
|
|
2053
|
+
worker_status = worker.get("status") or ""
|
|
2054
|
+
worker_pid = worker.get("pid")
|
|
2055
|
+
worker_msg = worker.get("message") or ""
|
|
2056
|
+
if worker_status == "absent" or "missing" in worker_msg.lower():
|
|
2057
|
+
typer.echo("Worker: exited")
|
|
2058
|
+
elif worker_status == "dead" or "not running" in worker_msg.lower():
|
|
2059
|
+
typer.echo(f"Worker: exited (pid={worker_pid})")
|
|
2060
|
+
else:
|
|
2061
|
+
typer.echo(
|
|
2062
|
+
f"Worker: {worker.get('status')} pid={worker.get('pid')} {worker.get('message') or ''}".rstrip()
|
|
2063
|
+
)
|
|
2064
|
+
|
|
2065
|
+
|
|
2066
|
+
def _start_ticket_flow_worker(
|
|
2067
|
+
repo_root: Path, run_id: str, is_terminal: bool = False
|
|
2068
|
+
) -> None:
|
|
2069
|
+
result = ensure_worker(repo_root, run_id, is_terminal=is_terminal)
|
|
2070
|
+
if result["status"] == "reused":
|
|
2071
|
+
return
|
|
2072
|
+
|
|
2073
|
+
|
|
2074
|
+
def _stop_ticket_flow_worker(repo_root: Path, run_id: str) -> None:
|
|
2075
|
+
health = check_worker_health(repo_root, run_id)
|
|
2076
|
+
if health.status in {"dead", "mismatch", "invalid"}:
|
|
2077
|
+
try:
|
|
2078
|
+
clear_worker_metadata(health.artifact_path.parent)
|
|
2079
|
+
except Exception:
|
|
2080
|
+
pass
|
|
2081
|
+
if not health.pid:
|
|
2082
|
+
return
|
|
2083
|
+
try:
|
|
2084
|
+
subprocess.run(["kill", str(health.pid)], check=False)
|
|
2085
|
+
except Exception:
|
|
2086
|
+
pass
|
|
2087
|
+
|
|
2088
|
+
|
|
2089
|
+
def _ticket_flow_controller(
|
|
2090
|
+
engine: RuntimeContext,
|
|
2091
|
+
) -> tuple[FlowController, AgentPool]:
|
|
2092
|
+
db_path, artifacts_root, _ = _ticket_flow_paths(engine)
|
|
2093
|
+
agent_pool = AgentPool(engine.config)
|
|
2094
|
+
definition = build_ticket_flow_definition(agent_pool=agent_pool)
|
|
2095
|
+
definition.validate()
|
|
2096
|
+
controller = FlowController(
|
|
2097
|
+
definition=definition,
|
|
2098
|
+
db_path=db_path,
|
|
2099
|
+
artifacts_root=artifacts_root,
|
|
2100
|
+
durable=engine.config.durable_writes,
|
|
2101
|
+
)
|
|
2102
|
+
controller.initialize()
|
|
2103
|
+
return controller, agent_pool
|
|
2104
|
+
|
|
2105
|
+
|
|
2106
|
+
@flow_app.command("worker")
|
|
2107
|
+
def flow_worker(
|
|
2108
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2109
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2110
|
+
run_id: Optional[str] = typer.Option(
|
|
2111
|
+
None, "--run-id", help="Flow run ID (required)"
|
|
2112
|
+
),
|
|
2113
|
+
):
|
|
2114
|
+
"""Start a flow worker process for an existing run."""
|
|
2115
|
+
engine = _require_repo_config(repo, hub)
|
|
2116
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2117
|
+
if not normalized_run_id:
|
|
2118
|
+
_raise_exit("--run-id is required for worker command")
|
|
2119
|
+
|
|
2120
|
+
db_path, artifacts_root, ticket_dir = _ticket_flow_paths(engine)
|
|
2121
|
+
|
|
2122
|
+
typer.echo(f"Starting flow worker for run {normalized_run_id}")
|
|
2123
|
+
|
|
2124
|
+
async def _run_worker():
|
|
2125
|
+
typer.echo(f"Flow worker started for {normalized_run_id}")
|
|
2126
|
+
typer.echo(f"DB path: {db_path}")
|
|
2127
|
+
typer.echo(f"Artifacts root: {artifacts_root}")
|
|
2128
|
+
|
|
2129
|
+
store = FlowStore(db_path, durable=engine.config.durable_writes)
|
|
2130
|
+
store.initialize()
|
|
2131
|
+
|
|
2132
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2133
|
+
if not record:
|
|
2134
|
+
typer.echo(f"Flow run {normalized_run_id} not found", err=True)
|
|
2135
|
+
store.close()
|
|
2136
|
+
raise typer.Exit(code=1)
|
|
2137
|
+
|
|
2138
|
+
if record.flow_type == "ticket_flow":
|
|
2139
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2140
|
+
if lint_errors:
|
|
2141
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2142
|
+
for err in lint_errors:
|
|
2143
|
+
typer.echo(f" - {err}", err=True)
|
|
2144
|
+
typer.echo("", err=True)
|
|
2145
|
+
typer.echo(
|
|
2146
|
+
"Fix the above errors before starting the ticket flow.",
|
|
2147
|
+
err=True,
|
|
2148
|
+
)
|
|
2149
|
+
store.close()
|
|
2150
|
+
raise typer.Exit(code=1)
|
|
2151
|
+
ticket_paths = list_ticket_paths(ticket_dir)
|
|
2152
|
+
if not ticket_paths:
|
|
2153
|
+
typer.echo(
|
|
2154
|
+
"No tickets found. Create tickets or run ticket_flow bootstrap to get started."
|
|
2155
|
+
)
|
|
2156
|
+
typer.echo(
|
|
2157
|
+
f" Ticket directory: {ticket_dir.relative_to(engine.repo_root)}"
|
|
2158
|
+
)
|
|
2159
|
+
typer.echo(" To bootstrap: car flow ticket_flow bootstrap")
|
|
2160
|
+
store.close()
|
|
2161
|
+
raise typer.Exit(code=0)
|
|
2162
|
+
|
|
2163
|
+
store.close()
|
|
2164
|
+
|
|
2165
|
+
try:
|
|
2166
|
+
register_worker_metadata(
|
|
2167
|
+
engine.repo_root,
|
|
2168
|
+
normalized_run_id,
|
|
2169
|
+
artifacts_root=artifacts_root,
|
|
2170
|
+
)
|
|
2171
|
+
except Exception as exc:
|
|
2172
|
+
typer.echo(f"Failed to register worker metadata: {exc}", err=True)
|
|
2173
|
+
|
|
2174
|
+
agent_pool: AgentPool | None = None
|
|
2175
|
+
|
|
2176
|
+
def _build_definition(flow_type: str):
|
|
2177
|
+
nonlocal agent_pool
|
|
2178
|
+
if flow_type == "pr_flow":
|
|
2179
|
+
_raise_exit("PR flow is no longer supported. Use ticket_flow instead.")
|
|
2180
|
+
if flow_type == "ticket_flow":
|
|
2181
|
+
agent_pool = AgentPool(engine.config)
|
|
2182
|
+
return build_ticket_flow_definition(agent_pool=agent_pool)
|
|
2183
|
+
_raise_exit(f"Unknown flow type for run {normalized_run_id}: {flow_type}")
|
|
2184
|
+
return None
|
|
2185
|
+
|
|
2186
|
+
definition = _build_definition(record.flow_type)
|
|
2187
|
+
definition.validate()
|
|
2188
|
+
|
|
2189
|
+
controller = FlowController(
|
|
2190
|
+
definition=definition,
|
|
2191
|
+
db_path=db_path,
|
|
2192
|
+
artifacts_root=artifacts_root,
|
|
2193
|
+
durable=engine.config.durable_writes,
|
|
2194
|
+
)
|
|
2195
|
+
controller.initialize()
|
|
2196
|
+
|
|
2197
|
+
record = controller.get_status(normalized_run_id)
|
|
2198
|
+
if not record:
|
|
2199
|
+
typer.echo(f"Flow run {normalized_run_id} not found", err=True)
|
|
2200
|
+
raise typer.Exit(code=1)
|
|
2201
|
+
|
|
2202
|
+
if record.status.is_terminal() and record.status not in {
|
|
2203
|
+
FlowRunStatus.STOPPED,
|
|
2204
|
+
FlowRunStatus.FAILED,
|
|
2205
|
+
}:
|
|
2206
|
+
typer.echo(
|
|
2207
|
+
f"Flow run {normalized_run_id} already completed (status={record.status})"
|
|
2208
|
+
)
|
|
2209
|
+
return
|
|
2210
|
+
|
|
2211
|
+
action = "Resuming" if record.status != FlowRunStatus.PENDING else "Starting"
|
|
2212
|
+
typer.echo(
|
|
2213
|
+
f"{action} flow run {normalized_run_id} from step: {record.current_step}"
|
|
2214
|
+
)
|
|
2215
|
+
try:
|
|
2216
|
+
final_record = await controller.run_flow(normalized_run_id)
|
|
2217
|
+
typer.echo(
|
|
2218
|
+
f"Flow run {normalized_run_id} finished with status {final_record.status}"
|
|
2219
|
+
)
|
|
2220
|
+
finally:
|
|
2221
|
+
if agent_pool is not None:
|
|
2222
|
+
try:
|
|
2223
|
+
await agent_pool.close()
|
|
2224
|
+
except Exception:
|
|
2225
|
+
typer.echo("Failed to close agent pool cleanly", err=True)
|
|
2226
|
+
|
|
2227
|
+
asyncio.run(_run_worker())
|
|
2228
|
+
|
|
2229
|
+
|
|
2230
|
+
@ticket_flow_app.command("bootstrap")
|
|
2231
|
+
def ticket_flow_bootstrap(
|
|
2232
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2233
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2234
|
+
force_new: bool = typer.Option(
|
|
2235
|
+
False, "--force-new", help="Always create a new run"
|
|
2236
|
+
),
|
|
2237
|
+
):
|
|
2238
|
+
"""Bootstrap ticket_flow (seed TICKET-001 if needed) and start a run.
|
|
2239
|
+
|
|
2240
|
+
If latest run is COMPLETED and new tickets are added, a new run is created
|
|
2241
|
+
(use --force-new to force a new run regardless of state)."""
|
|
2242
|
+
engine = _require_repo_config(repo, hub)
|
|
2243
|
+
_guard_unregistered_hub_repo(engine.repo_root, hub)
|
|
2244
|
+
db_path, artifacts_root, ticket_dir = _ticket_flow_paths(engine)
|
|
2245
|
+
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
2246
|
+
ticket_path = ticket_dir / "TICKET-001.md"
|
|
2247
|
+
|
|
2248
|
+
store = _open_flow_store(engine)
|
|
2249
|
+
try:
|
|
2250
|
+
if not force_new:
|
|
2251
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2252
|
+
existing_run, reason = _resumable_run(records)
|
|
2253
|
+
if existing_run and reason == "active":
|
|
2254
|
+
_start_ticket_flow_worker(
|
|
2255
|
+
engine.repo_root, existing_run.id, is_terminal=False
|
|
2256
|
+
)
|
|
2257
|
+
typer.echo(f"Reused active run: {existing_run.id}")
|
|
2258
|
+
typer.echo(
|
|
2259
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {existing_run.id}"
|
|
2260
|
+
)
|
|
2261
|
+
return
|
|
2262
|
+
elif existing_run and reason == "completed_pending":
|
|
2263
|
+
existing_tickets = list_ticket_paths(ticket_dir)
|
|
2264
|
+
pending_count = len(
|
|
2265
|
+
[t for t in existing_tickets if not ticket_is_done(t)]
|
|
2266
|
+
)
|
|
2267
|
+
if pending_count > 0:
|
|
2268
|
+
typer.echo(
|
|
2269
|
+
f"Warning: Latest run {existing_run.id} is COMPLETED with {pending_count} pending ticket(s)."
|
|
2270
|
+
)
|
|
2271
|
+
typer.echo(
|
|
2272
|
+
"Use --force-new to start a fresh run (dispatch history will be reset)."
|
|
2273
|
+
)
|
|
2274
|
+
_raise_exit("Add --force-new to create a new run.")
|
|
2275
|
+
finally:
|
|
2276
|
+
store.close()
|
|
2277
|
+
|
|
2278
|
+
existing_tickets = list_ticket_paths(ticket_dir)
|
|
2279
|
+
seeded = False
|
|
2280
|
+
if not existing_tickets and not ticket_path.exists():
|
|
2281
|
+
template = """---
|
|
2282
|
+
agent: codex
|
|
2283
|
+
done: false
|
|
2284
|
+
title: Bootstrap ticket plan
|
|
2285
|
+
goal: Capture scope and seed follow-up tickets
|
|
2286
|
+
---
|
|
2287
|
+
|
|
2288
|
+
You are the first ticket in a new ticket_flow run.
|
|
2289
|
+
|
|
2290
|
+
- Read `.codex-autorunner/ISSUE.md`. If it is missing:
|
|
2291
|
+
- If GitHub is available, ask the user for the issue/PR URL or number and create `.codex-autorunner/ISSUE.md` from it.
|
|
2292
|
+
- If GitHub is not available, write `DISPATCH.md` with `mode: pause` asking the user to describe the work (or share a doc). After the reply, create `.codex-autorunner/ISSUE.md` with their input.
|
|
2293
|
+
- If helpful, create or update workspace docs under `.codex-autorunner/workspace/`:
|
|
2294
|
+
- `active_context.md` for current context and links
|
|
2295
|
+
- `decisions.md` for decisions/rationale
|
|
2296
|
+
- `spec.md` for requirements and constraints
|
|
2297
|
+
- Break the work into additional `TICKET-00X.md` files with clear owners/goals; keep this ticket open until they exist.
|
|
2298
|
+
- Place any supporting artifacts in `.codex-autorunner/runs/<run_id>/dispatch/` if needed.
|
|
2299
|
+
- Write `DISPATCH.md` to dispatch a message to the user:
|
|
2300
|
+
- Use `mode: pause` (handoff) to wait for user response. This pauses execution.
|
|
2301
|
+
- Use `mode: notify` (informational) to message the user but keep running.
|
|
2302
|
+
"""
|
|
2303
|
+
ticket_path.write_text(template, encoding="utf-8")
|
|
2304
|
+
seeded = True
|
|
2305
|
+
|
|
2306
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2307
|
+
try:
|
|
2308
|
+
run_id = str(uuid.uuid4())
|
|
2309
|
+
record = asyncio.run(
|
|
2310
|
+
controller.start_flow(
|
|
2311
|
+
input_data={},
|
|
2312
|
+
run_id=run_id,
|
|
2313
|
+
metadata={"seeded_ticket": seeded},
|
|
2314
|
+
)
|
|
2315
|
+
)
|
|
2316
|
+
_start_ticket_flow_worker(engine.repo_root, record.id, is_terminal=False)
|
|
2317
|
+
finally:
|
|
2318
|
+
controller.shutdown()
|
|
2319
|
+
asyncio.run(agent_pool.close())
|
|
2320
|
+
|
|
2321
|
+
typer.echo(f"Started ticket_flow run: {run_id}")
|
|
2322
|
+
typer.echo(
|
|
2323
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {run_id}"
|
|
2324
|
+
)
|
|
2325
|
+
|
|
2326
|
+
|
|
2327
|
+
@ticket_flow_app.command("start")
|
|
2328
|
+
def ticket_flow_start(
|
|
2329
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2330
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2331
|
+
force_new: bool = typer.Option(
|
|
2332
|
+
False, "--force-new", help="Always create a new run"
|
|
2333
|
+
),
|
|
2334
|
+
):
|
|
2335
|
+
"""Start or resume the latest ticket_flow run.
|
|
2336
|
+
|
|
2337
|
+
If latest run is COMPLETED and new tickets are added, a new run is created
|
|
2338
|
+
(use --force-new to force a new run regardless of state)."""
|
|
2339
|
+
engine = _require_repo_config(repo, hub)
|
|
2340
|
+
_guard_unregistered_hub_repo(engine.repo_root, hub)
|
|
2341
|
+
_, _, ticket_dir = _ticket_flow_paths(engine)
|
|
2342
|
+
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
2343
|
+
|
|
2344
|
+
store = _open_flow_store(engine)
|
|
2345
|
+
try:
|
|
2346
|
+
if not force_new:
|
|
2347
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2348
|
+
existing_run, reason = _resumable_run(records)
|
|
2349
|
+
if existing_run and reason == "active":
|
|
2350
|
+
# Validate tickets before reusing active run
|
|
2351
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2352
|
+
if lint_errors:
|
|
2353
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2354
|
+
for err in lint_errors:
|
|
2355
|
+
typer.echo(f" - {err}", err=True)
|
|
2356
|
+
typer.echo("", err=True)
|
|
2357
|
+
typer.echo(
|
|
2358
|
+
"Fix the above errors before starting the ticket flow.",
|
|
2359
|
+
err=True,
|
|
2360
|
+
)
|
|
2361
|
+
_raise_exit("")
|
|
2362
|
+
_start_ticket_flow_worker(
|
|
2363
|
+
engine.repo_root, existing_run.id, is_terminal=False
|
|
2364
|
+
)
|
|
2365
|
+
typer.echo(f"Reused active run: {existing_run.id}")
|
|
2366
|
+
typer.echo(
|
|
2367
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {existing_run.id}"
|
|
2368
|
+
)
|
|
2369
|
+
return
|
|
2370
|
+
elif existing_run and reason == "completed_pending":
|
|
2371
|
+
existing_tickets = list_ticket_paths(ticket_dir)
|
|
2372
|
+
pending_count = len(
|
|
2373
|
+
[t for t in existing_tickets if not ticket_is_done(t)]
|
|
2374
|
+
)
|
|
2375
|
+
if pending_count > 0:
|
|
2376
|
+
typer.echo(
|
|
2377
|
+
f"Warning: Latest run {existing_run.id} is COMPLETED with {pending_count} pending ticket(s)."
|
|
2378
|
+
)
|
|
2379
|
+
typer.echo(
|
|
2380
|
+
"Use --force-new to start a fresh run (dispatch history will be reset)."
|
|
2381
|
+
)
|
|
2382
|
+
_raise_exit("Add --force-new to create a new run.")
|
|
2383
|
+
|
|
2384
|
+
finally:
|
|
2385
|
+
store.close()
|
|
2386
|
+
|
|
2387
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2388
|
+
if lint_errors:
|
|
2389
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2390
|
+
for err in lint_errors:
|
|
2391
|
+
typer.echo(f" - {err}", err=True)
|
|
2392
|
+
typer.echo("", err=True)
|
|
2393
|
+
typer.echo("Fix the above errors before starting the ticket flow.", err=True)
|
|
2394
|
+
_raise_exit("")
|
|
2395
|
+
if not list_ticket_paths(ticket_dir):
|
|
2396
|
+
_raise_exit(
|
|
2397
|
+
"No tickets found under .codex-autorunner/tickets. Use bootstrap first."
|
|
2398
|
+
)
|
|
2399
|
+
|
|
2400
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2401
|
+
try:
|
|
2402
|
+
run_id = str(uuid.uuid4())
|
|
2403
|
+
record = asyncio.run(controller.start_flow(input_data={}, run_id=run_id))
|
|
2404
|
+
_start_ticket_flow_worker(engine.repo_root, record.id, is_terminal=False)
|
|
2405
|
+
finally:
|
|
2406
|
+
controller.shutdown()
|
|
2407
|
+
asyncio.run(agent_pool.close())
|
|
2408
|
+
|
|
2409
|
+
typer.echo(f"Started ticket_flow run: {run_id}")
|
|
2410
|
+
typer.echo(
|
|
2411
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {run_id}"
|
|
2412
|
+
)
|
|
2413
|
+
|
|
2414
|
+
|
|
2415
|
+
@ticket_flow_app.command("status")
|
|
2416
|
+
def ticket_flow_status(
|
|
2417
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2418
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2419
|
+
run_id: Optional[str] = typer.Option(None, "--run-id", help="Flow run ID"),
|
|
2420
|
+
output_json: bool = typer.Option(False, "--json", help="Emit JSON output"),
|
|
2421
|
+
):
|
|
2422
|
+
"""Show status for a ticket_flow run."""
|
|
2423
|
+
engine = _require_repo_config(repo, hub)
|
|
2424
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2425
|
+
|
|
2426
|
+
store = _open_flow_store(engine)
|
|
2427
|
+
try:
|
|
2428
|
+
record = None
|
|
2429
|
+
if normalized_run_id:
|
|
2430
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2431
|
+
else:
|
|
2432
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2433
|
+
record = records[0] if records else None
|
|
2434
|
+
if not record:
|
|
2435
|
+
_raise_exit("No ticket_flow runs found.")
|
|
2436
|
+
payload = _ticket_flow_status_payload(engine, record, store)
|
|
2437
|
+
finally:
|
|
2438
|
+
store.close()
|
|
2439
|
+
|
|
2440
|
+
if output_json:
|
|
2441
|
+
typer.echo(json.dumps(payload, indent=2))
|
|
2442
|
+
return
|
|
2443
|
+
_print_ticket_flow_status(payload)
|
|
2444
|
+
|
|
2445
|
+
|
|
2446
|
+
@ticket_flow_app.command("resume")
|
|
2447
|
+
def ticket_flow_resume(
|
|
2448
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2449
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2450
|
+
run_id: Optional[str] = typer.Option(None, "--run-id", help="Flow run ID"),
|
|
2451
|
+
):
|
|
2452
|
+
"""Resume a paused ticket_flow run."""
|
|
2453
|
+
engine = _require_repo_config(repo, hub)
|
|
2454
|
+
_guard_unregistered_hub_repo(engine.repo_root, hub)
|
|
2455
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2456
|
+
|
|
2457
|
+
store = _open_flow_store(engine)
|
|
2458
|
+
try:
|
|
2459
|
+
record = None
|
|
2460
|
+
if normalized_run_id:
|
|
2461
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2462
|
+
else:
|
|
2463
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2464
|
+
record = records[0] if records else None
|
|
2465
|
+
if not record:
|
|
2466
|
+
_raise_exit("No ticket_flow runs found.")
|
|
2467
|
+
normalized_run_id = record.id
|
|
2468
|
+
finally:
|
|
2469
|
+
store.close()
|
|
2470
|
+
|
|
2471
|
+
_, _, ticket_dir = _ticket_flow_paths(engine)
|
|
2472
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
2473
|
+
if lint_errors:
|
|
2474
|
+
typer.echo("Ticket validation failed:", err=True)
|
|
2475
|
+
for err in lint_errors:
|
|
2476
|
+
typer.echo(f" - {err}", err=True)
|
|
2477
|
+
typer.echo("", err=True)
|
|
2478
|
+
typer.echo("Fix the above errors before resuming the ticket flow.", err=True)
|
|
2479
|
+
_raise_exit("")
|
|
2480
|
+
|
|
2481
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2482
|
+
try:
|
|
2483
|
+
try:
|
|
2484
|
+
updated = asyncio.run(controller.resume_flow(normalized_run_id))
|
|
2485
|
+
except ValueError as exc:
|
|
2486
|
+
_raise_exit(str(exc), cause=exc)
|
|
2487
|
+
_start_ticket_flow_worker(engine.repo_root, normalized_run_id)
|
|
2488
|
+
finally:
|
|
2489
|
+
controller.shutdown()
|
|
2490
|
+
asyncio.run(agent_pool.close())
|
|
2491
|
+
|
|
2492
|
+
typer.echo(f"Resumed ticket_flow run: {updated.id}")
|
|
2493
|
+
typer.echo(
|
|
2494
|
+
f"Next: car flow ticket_flow status --repo {engine.repo_root} --run-id {updated.id}"
|
|
2495
|
+
)
|
|
2496
|
+
|
|
2497
|
+
|
|
2498
|
+
@ticket_flow_app.command("stop")
|
|
2499
|
+
def ticket_flow_stop(
|
|
2500
|
+
repo: Optional[Path] = typer.Option(None, "--repo", help="Repo path"),
|
|
2501
|
+
hub: Optional[Path] = typer.Option(None, "--hub", help="Hub root path"),
|
|
2502
|
+
run_id: Optional[str] = typer.Option(None, "--run-id", help="Flow run ID"),
|
|
2503
|
+
):
|
|
2504
|
+
"""Stop a ticket_flow run."""
|
|
2505
|
+
engine = _require_repo_config(repo, hub)
|
|
2506
|
+
normalized_run_id = _normalize_flow_run_id(run_id)
|
|
2507
|
+
|
|
2508
|
+
store = _open_flow_store(engine)
|
|
2509
|
+
try:
|
|
2510
|
+
record = None
|
|
2511
|
+
if normalized_run_id:
|
|
2512
|
+
record = store.get_flow_run(normalized_run_id)
|
|
2513
|
+
else:
|
|
2514
|
+
records = store.list_flow_runs(flow_type="ticket_flow")
|
|
2515
|
+
record = records[0] if records else None
|
|
2516
|
+
if not record:
|
|
2517
|
+
_raise_exit("No ticket_flow runs found.")
|
|
2518
|
+
normalized_run_id = record.id
|
|
2519
|
+
finally:
|
|
2520
|
+
store.close()
|
|
2521
|
+
|
|
2522
|
+
controller, agent_pool = _ticket_flow_controller(engine)
|
|
2523
|
+
try:
|
|
2524
|
+
_stop_ticket_flow_worker(engine.repo_root, normalized_run_id)
|
|
2525
|
+
updated = asyncio.run(controller.stop_flow(normalized_run_id))
|
|
2526
|
+
finally:
|
|
2527
|
+
controller.shutdown()
|
|
2528
|
+
asyncio.run(agent_pool.close())
|
|
2529
|
+
|
|
2530
|
+
typer.echo(f"Stop requested for run: {updated.id} (status={updated.status.value})")
|
|
2531
|
+
|
|
2532
|
+
|
|
2533
|
+
if __name__ == "__main__":
|
|
2534
|
+
app()
|