codex-autorunner 1.0.0__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/__init__.py +12 -1
- codex_autorunner/agents/codex/harness.py +1 -1
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/constants.py +3 -0
- codex_autorunner/agents/opencode/harness.py +6 -1
- codex_autorunner/agents/opencode/runtime.py +59 -18
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +36 -7
- codex_autorunner/bootstrap.py +226 -4
- codex_autorunner/cli.py +5 -1174
- codex_autorunner/codex_cli.py +20 -84
- codex_autorunner/core/__init__.py +20 -0
- codex_autorunner/core/about_car.py +119 -1
- codex_autorunner/core/app_server_ids.py +59 -0
- codex_autorunner/core/app_server_threads.py +17 -2
- codex_autorunner/core/app_server_utils.py +165 -0
- codex_autorunner/core/archive.py +349 -0
- codex_autorunner/core/codex_runner.py +6 -2
- codex_autorunner/core/config.py +433 -4
- codex_autorunner/core/context_awareness.py +38 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/drafts.py +58 -4
- codex_autorunner/core/exceptions.py +4 -0
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +96 -2
- codex_autorunner/core/flows/models.py +13 -0
- codex_autorunner/core/flows/reasons.py +52 -0
- codex_autorunner/core/flows/reconciler.py +134 -0
- codex_autorunner/core/flows/runtime.py +57 -4
- codex_autorunner/core/flows/store.py +142 -7
- codex_autorunner/core/flows/transition.py +27 -15
- codex_autorunner/core/flows/ux_helpers.py +272 -0
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/git_utils.py +62 -0
- codex_autorunner/core/hub.py +291 -20
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/notifications.py +14 -2
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +496 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/__init__.py +28 -0
- codex_autorunner/{integrations/agents → core/ports}/agent_backend.py +13 -8
- codex_autorunner/core/ports/backend_orchestrator.py +41 -0
- codex_autorunner/{integrations/agents → core/ports}/run_event.py +23 -6
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +62 -0
- codex_autorunner/core/supervisor_protocol.py +15 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/text_delta_coalescer.py +54 -0
- codex_autorunner/core/ticket_linter_cli.py +218 -0
- codex_autorunner/core/ticket_manager_cli.py +494 -0
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/update.py +4 -5
- codex_autorunner/core/update_paths.py +28 -0
- codex_autorunner/core/usage.py +164 -12
- codex_autorunner/core/utils.py +125 -15
- codex_autorunner/flows/review/__init__.py +17 -0
- codex_autorunner/{core/review.py → flows/review/service.py} +37 -34
- codex_autorunner/flows/ticket_flow/definition.py +52 -3
- codex_autorunner/integrations/agents/__init__.py +11 -19
- codex_autorunner/integrations/agents/backend_orchestrator.py +302 -0
- codex_autorunner/integrations/agents/codex_adapter.py +90 -0
- codex_autorunner/integrations/agents/codex_backend.py +177 -25
- codex_autorunner/integrations/agents/opencode_adapter.py +108 -0
- codex_autorunner/integrations/agents/opencode_backend.py +305 -32
- codex_autorunner/integrations/agents/runner.py +86 -0
- codex_autorunner/integrations/agents/wiring.py +279 -0
- codex_autorunner/integrations/app_server/client.py +7 -60
- codex_autorunner/integrations/app_server/env.py +2 -107
- codex_autorunner/{core/app_server_events.py → integrations/app_server/event_buffer.py} +15 -8
- codex_autorunner/integrations/telegram/adapter.py +65 -0
- codex_autorunner/integrations/telegram/config.py +46 -0
- codex_autorunner/integrations/telegram/constants.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/callbacks.py +7 -0
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +1496 -71
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +206 -48
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +20 -3
- codex_autorunner/integrations/telegram/handlers/messages.py +27 -1
- codex_autorunner/integrations/telegram/handlers/selections.py +61 -1
- codex_autorunner/integrations/telegram/helpers.py +22 -1
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +45 -10
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +338 -43
- codex_autorunner/integrations/telegram/transport.py +13 -4
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/routes/__init__.py +37 -76
- codex_autorunner/routes/agents.py +2 -137
- codex_autorunner/routes/analytics.py +2 -238
- codex_autorunner/routes/app_server.py +2 -131
- codex_autorunner/routes/base.py +2 -596
- codex_autorunner/routes/file_chat.py +4 -833
- codex_autorunner/routes/flows.py +4 -977
- codex_autorunner/routes/messages.py +4 -456
- codex_autorunner/routes/repos.py +2 -196
- codex_autorunner/routes/review.py +2 -147
- codex_autorunner/routes/sessions.py +2 -175
- codex_autorunner/routes/settings.py +2 -168
- codex_autorunner/routes/shared.py +2 -275
- codex_autorunner/routes/system.py +4 -193
- codex_autorunner/routes/usage.py +2 -86
- codex_autorunner/routes/voice.py +2 -119
- codex_autorunner/routes/workspace.py +2 -270
- codex_autorunner/server.py +4 -4
- codex_autorunner/static/agentControls.js +61 -16
- codex_autorunner/static/app.js +126 -14
- codex_autorunner/static/archive.js +826 -0
- codex_autorunner/static/archiveApi.js +37 -0
- codex_autorunner/static/autoRefresh.js +7 -7
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/dashboard.js +224 -171
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +114 -131
- codex_autorunner/static/index.html +375 -49
- codex_autorunner/static/messages.js +568 -87
- codex_autorunner/static/notifications.js +255 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/preserve.js +17 -0
- codex_autorunner/static/settings.js +128 -6
- codex_autorunner/static/smartRefresh.js +52 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9798 -6143
- codex_autorunner/static/tabs.js +152 -11
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminal.js +18 -0
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +137 -15
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +821 -98
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +39 -0
- codex_autorunner/static/workspace.js +389 -82
- codex_autorunner/static/workspaceFileBrowser.js +15 -13
- codex_autorunner/surfaces/__init__.py +5 -0
- codex_autorunner/surfaces/cli/__init__.py +6 -0
- codex_autorunner/surfaces/cli/cli.py +2534 -0
- codex_autorunner/surfaces/cli/codex_cli.py +20 -0
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/telegram/__init__.py +3 -0
- codex_autorunner/surfaces/web/__init__.py +1 -0
- codex_autorunner/surfaces/web/app.py +2223 -0
- codex_autorunner/surfaces/web/hub_jobs.py +192 -0
- codex_autorunner/surfaces/web/middleware.py +587 -0
- codex_autorunner/surfaces/web/pty_session.py +370 -0
- codex_autorunner/surfaces/web/review.py +6 -0
- codex_autorunner/surfaces/web/routes/__init__.py +82 -0
- codex_autorunner/surfaces/web/routes/agents.py +138 -0
- codex_autorunner/surfaces/web/routes/analytics.py +284 -0
- codex_autorunner/surfaces/web/routes/app_server.py +132 -0
- codex_autorunner/surfaces/web/routes/archive.py +357 -0
- codex_autorunner/surfaces/web/routes/base.py +615 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +1117 -0
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +1354 -0
- codex_autorunner/surfaces/web/routes/messages.py +490 -0
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +197 -0
- codex_autorunner/surfaces/web/routes/review.py +148 -0
- codex_autorunner/surfaces/web/routes/sessions.py +176 -0
- codex_autorunner/surfaces/web/routes/settings.py +169 -0
- codex_autorunner/surfaces/web/routes/shared.py +277 -0
- codex_autorunner/surfaces/web/routes/system.py +196 -0
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/routes/usage.py +89 -0
- codex_autorunner/surfaces/web/routes/voice.py +120 -0
- codex_autorunner/surfaces/web/routes/workspace.py +271 -0
- codex_autorunner/surfaces/web/runner_manager.py +25 -0
- codex_autorunner/surfaces/web/schemas.py +469 -0
- codex_autorunner/surfaces/web/static_assets.py +490 -0
- codex_autorunner/surfaces/web/static_refresh.py +86 -0
- codex_autorunner/surfaces/web/terminal_sessions.py +78 -0
- codex_autorunner/tickets/__init__.py +8 -1
- codex_autorunner/tickets/agent_pool.py +53 -4
- codex_autorunner/tickets/files.py +37 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +6 -1
- codex_autorunner/tickets/outbox.py +50 -2
- codex_autorunner/tickets/runner.py +396 -57
- codex_autorunner/web/__init__.py +5 -1
- codex_autorunner/web/app.py +2 -1949
- codex_autorunner/web/hub_jobs.py +2 -191
- codex_autorunner/web/middleware.py +2 -586
- codex_autorunner/web/pty_session.py +2 -369
- codex_autorunner/web/runner_manager.py +2 -24
- codex_autorunner/web/schemas.py +2 -376
- codex_autorunner/web/static_assets.py +4 -441
- codex_autorunner/web/static_refresh.py +2 -85
- codex_autorunner/web/terminal_sessions.py +2 -77
- codex_autorunner/workspace/paths.py +49 -33
- codex_autorunner-1.2.0.dist-info/METADATA +150 -0
- codex_autorunner-1.2.0.dist-info/RECORD +339 -0
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -2653
- codex_autorunner/core/static_assets.py +0 -55
- codex_autorunner-1.0.0.dist-info/METADATA +0 -246
- codex_autorunner-1.0.0.dist-info/RECORD +0 -251
- /codex_autorunner/{routes → surfaces/web/routes}/terminal_images.py +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1354 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import re
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
import uuid
|
|
8
|
+
from dataclasses import asdict
|
|
9
|
+
from pathlib import Path, PurePosixPath
|
|
10
|
+
from typing import IO, Dict, Optional, Tuple, Union
|
|
11
|
+
from urllib.parse import quote
|
|
12
|
+
|
|
13
|
+
from fastapi import APIRouter, HTTPException, Request
|
|
14
|
+
from fastapi.responses import FileResponse, StreamingResponse
|
|
15
|
+
from pydantic import BaseModel, Field
|
|
16
|
+
|
|
17
|
+
from ....core.config import load_repo_config
|
|
18
|
+
from ....core.flows import (
|
|
19
|
+
FlowController,
|
|
20
|
+
FlowDefinition,
|
|
21
|
+
FlowEventType,
|
|
22
|
+
FlowRunRecord,
|
|
23
|
+
FlowRunStatus,
|
|
24
|
+
FlowStore,
|
|
25
|
+
)
|
|
26
|
+
from ....core.flows.reconciler import reconcile_flow_run
|
|
27
|
+
from ....core.flows.ux_helpers import (
|
|
28
|
+
bootstrap_check as ux_bootstrap_check,
|
|
29
|
+
)
|
|
30
|
+
from ....core.flows.ux_helpers import (
|
|
31
|
+
build_flow_status_snapshot,
|
|
32
|
+
ensure_worker,
|
|
33
|
+
issue_md_path,
|
|
34
|
+
seed_issue_from_github,
|
|
35
|
+
seed_issue_from_text,
|
|
36
|
+
)
|
|
37
|
+
from ....core.flows.worker_process import FlowWorkerHealth, check_worker_health
|
|
38
|
+
from ....core.runtime import RuntimeContext
|
|
39
|
+
from ....core.utils import atomic_write, find_repo_root
|
|
40
|
+
from ....flows.ticket_flow import build_ticket_flow_definition
|
|
41
|
+
from ....integrations.github.service import GitHubError, GitHubService
|
|
42
|
+
from ....tickets import AgentPool
|
|
43
|
+
from ....tickets.files import (
|
|
44
|
+
list_ticket_paths,
|
|
45
|
+
parse_ticket_index,
|
|
46
|
+
read_ticket,
|
|
47
|
+
safe_relpath,
|
|
48
|
+
)
|
|
49
|
+
from ....tickets.frontmatter import parse_markdown_frontmatter
|
|
50
|
+
from ....tickets.lint import lint_ticket_directory, lint_ticket_frontmatter
|
|
51
|
+
from ....tickets.outbox import parse_dispatch, resolve_outbox_paths
|
|
52
|
+
from ..schemas import (
|
|
53
|
+
TicketCreateRequest,
|
|
54
|
+
TicketDeleteResponse,
|
|
55
|
+
TicketResponse,
|
|
56
|
+
TicketUpdateRequest,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
_logger = logging.getLogger(__name__)
|
|
60
|
+
|
|
61
|
+
_active_workers: Dict[
|
|
62
|
+
str, Tuple[Optional[subprocess.Popen], Optional[IO[bytes]], Optional[IO[bytes]]]
|
|
63
|
+
] = {}
|
|
64
|
+
_controller_cache: Dict[tuple[Path, str], FlowController] = {}
|
|
65
|
+
_definition_cache: Dict[tuple[Path, str], FlowDefinition] = {}
|
|
66
|
+
_supported_flow_types = ("ticket_flow",)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _flow_paths(repo_root: Path) -> tuple[Path, Path]:
|
|
70
|
+
repo_root = repo_root.resolve()
|
|
71
|
+
db_path = repo_root / ".codex-autorunner" / "flows.db"
|
|
72
|
+
artifacts_root = repo_root / ".codex-autorunner" / "flows"
|
|
73
|
+
return db_path, artifacts_root
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _ticket_dir(repo_root: Path) -> Path:
|
|
77
|
+
repo_root = repo_root.resolve()
|
|
78
|
+
return repo_root / ".codex-autorunner" / "tickets"
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def _find_ticket_path_by_index(ticket_dir: Path, index: int) -> Optional[Path]:
|
|
82
|
+
for path in list_ticket_paths(ticket_dir):
|
|
83
|
+
idx = parse_ticket_index(path.name)
|
|
84
|
+
if idx == index:
|
|
85
|
+
return path
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _require_flow_store(repo_root: Path) -> Optional[FlowStore]:
|
|
90
|
+
db_path, _ = _flow_paths(repo_root)
|
|
91
|
+
store = FlowStore(db_path)
|
|
92
|
+
try:
|
|
93
|
+
store.initialize()
|
|
94
|
+
return store
|
|
95
|
+
except Exception as exc:
|
|
96
|
+
_logger.warning("Flows database unavailable at %s: %s", db_path, exc)
|
|
97
|
+
return None
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def _safe_list_flow_runs(
|
|
101
|
+
repo_root: Path, flow_type: Optional[str] = None, *, recover_stuck: bool = False
|
|
102
|
+
) -> list[FlowRunRecord]:
|
|
103
|
+
db_path, _ = _flow_paths(repo_root)
|
|
104
|
+
store = FlowStore(db_path)
|
|
105
|
+
try:
|
|
106
|
+
store.initialize()
|
|
107
|
+
records = store.list_flow_runs(flow_type=flow_type)
|
|
108
|
+
if recover_stuck:
|
|
109
|
+
# Recover any flows stuck in active states with dead workers
|
|
110
|
+
records = [
|
|
111
|
+
reconcile_flow_run(repo_root, rec, store, logger=_logger)[0]
|
|
112
|
+
for rec in records
|
|
113
|
+
]
|
|
114
|
+
return records
|
|
115
|
+
except Exception as exc:
|
|
116
|
+
_logger.debug("FlowStore list runs failed: %s", exc)
|
|
117
|
+
return []
|
|
118
|
+
finally:
|
|
119
|
+
try:
|
|
120
|
+
store.close()
|
|
121
|
+
except Exception:
|
|
122
|
+
pass
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _build_flow_definition(repo_root: Path, flow_type: str) -> FlowDefinition:
|
|
126
|
+
repo_root = repo_root.resolve()
|
|
127
|
+
key = (repo_root, flow_type)
|
|
128
|
+
if key in _definition_cache:
|
|
129
|
+
return _definition_cache[key]
|
|
130
|
+
|
|
131
|
+
if flow_type == "ticket_flow":
|
|
132
|
+
config = load_repo_config(repo_root)
|
|
133
|
+
engine = RuntimeContext(
|
|
134
|
+
repo_root=repo_root,
|
|
135
|
+
config=config,
|
|
136
|
+
)
|
|
137
|
+
agent_pool = AgentPool(engine.config)
|
|
138
|
+
definition = build_ticket_flow_definition(agent_pool=agent_pool)
|
|
139
|
+
else:
|
|
140
|
+
raise HTTPException(status_code=404, detail=f"Unknown flow type: {flow_type}")
|
|
141
|
+
|
|
142
|
+
definition.validate()
|
|
143
|
+
_definition_cache[key] = definition
|
|
144
|
+
return definition
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _get_flow_controller(repo_root: Path, flow_type: str) -> FlowController:
|
|
148
|
+
repo_root = repo_root.resolve()
|
|
149
|
+
key = (repo_root, flow_type)
|
|
150
|
+
if key in _controller_cache:
|
|
151
|
+
return _controller_cache[key]
|
|
152
|
+
|
|
153
|
+
db_path, artifacts_root = _flow_paths(repo_root)
|
|
154
|
+
definition = _build_flow_definition(repo_root, flow_type)
|
|
155
|
+
|
|
156
|
+
controller = FlowController(
|
|
157
|
+
definition=definition,
|
|
158
|
+
db_path=db_path,
|
|
159
|
+
artifacts_root=artifacts_root,
|
|
160
|
+
)
|
|
161
|
+
try:
|
|
162
|
+
controller.initialize()
|
|
163
|
+
except Exception as exc:
|
|
164
|
+
_logger.warning("Failed to initialize flow controller: %s", exc)
|
|
165
|
+
raise HTTPException(
|
|
166
|
+
status_code=503, detail="Flows unavailable; initialize the repo first."
|
|
167
|
+
) from exc
|
|
168
|
+
_controller_cache[key] = controller
|
|
169
|
+
return controller
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _get_flow_record(repo_root: Path, run_id: str) -> FlowRunRecord:
|
|
173
|
+
store = _require_flow_store(repo_root)
|
|
174
|
+
if store is None:
|
|
175
|
+
raise HTTPException(status_code=503, detail="Flows database unavailable")
|
|
176
|
+
try:
|
|
177
|
+
record = store.get_flow_run(run_id)
|
|
178
|
+
finally:
|
|
179
|
+
try:
|
|
180
|
+
store.close()
|
|
181
|
+
except Exception:
|
|
182
|
+
pass
|
|
183
|
+
if not record:
|
|
184
|
+
raise HTTPException(status_code=404, detail=f"Flow run {run_id} not found")
|
|
185
|
+
return record
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _active_or_paused_run(records: list[FlowRunRecord]) -> Optional[FlowRunRecord]:
|
|
189
|
+
if not records:
|
|
190
|
+
return None
|
|
191
|
+
latest = records[0]
|
|
192
|
+
if latest.status in (FlowRunStatus.RUNNING, FlowRunStatus.PAUSED):
|
|
193
|
+
return latest
|
|
194
|
+
return None
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _normalize_run_id(run_id: Union[str, uuid.UUID]) -> str:
|
|
198
|
+
try:
|
|
199
|
+
return str(uuid.UUID(str(run_id)))
|
|
200
|
+
except ValueError:
|
|
201
|
+
raise HTTPException(status_code=400, detail="Invalid run_id") from None
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def _validate_tickets(ticket_dir: Path) -> list[str]:
|
|
205
|
+
"""Validate all tickets in the directory and return a list of error messages."""
|
|
206
|
+
errors: list[str] = []
|
|
207
|
+
|
|
208
|
+
if not ticket_dir.exists():
|
|
209
|
+
return errors
|
|
210
|
+
|
|
211
|
+
# Check for directory-level errors (duplicate indices)
|
|
212
|
+
dir_errors = lint_ticket_directory(ticket_dir)
|
|
213
|
+
errors.extend(dir_errors)
|
|
214
|
+
|
|
215
|
+
# Check each ticket file for frontmatter errors
|
|
216
|
+
ticket_paths = list_ticket_paths(ticket_dir)
|
|
217
|
+
for path in ticket_paths:
|
|
218
|
+
_, ticket_errors = read_ticket(path)
|
|
219
|
+
for err in ticket_errors:
|
|
220
|
+
errors.append(f"{path.relative_to(path.parent.parent)}: {err}")
|
|
221
|
+
|
|
222
|
+
return errors
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def _cleanup_worker_handle(run_id: str) -> None:
|
|
226
|
+
handle = _active_workers.pop(run_id, None)
|
|
227
|
+
if not handle:
|
|
228
|
+
return
|
|
229
|
+
|
|
230
|
+
proc, stdout, stderr = handle
|
|
231
|
+
if proc and proc.poll() is None:
|
|
232
|
+
try:
|
|
233
|
+
proc.terminate()
|
|
234
|
+
except Exception:
|
|
235
|
+
pass
|
|
236
|
+
|
|
237
|
+
for stream in (stdout, stderr):
|
|
238
|
+
if stream and not stream.closed:
|
|
239
|
+
try:
|
|
240
|
+
stream.flush()
|
|
241
|
+
except Exception:
|
|
242
|
+
pass
|
|
243
|
+
try:
|
|
244
|
+
stream.close()
|
|
245
|
+
except Exception:
|
|
246
|
+
pass
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _reap_dead_worker(run_id: str) -> None:
|
|
250
|
+
handle = _active_workers.get(run_id)
|
|
251
|
+
if not handle:
|
|
252
|
+
return
|
|
253
|
+
proc, *_ = handle
|
|
254
|
+
if proc and proc.poll() is not None:
|
|
255
|
+
_cleanup_worker_handle(run_id)
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
class FlowStartRequest(BaseModel):
|
|
259
|
+
input_data: Dict = Field(default_factory=dict)
|
|
260
|
+
metadata: Optional[Dict] = None
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
class BootstrapCheckResponse(BaseModel):
|
|
264
|
+
status: str
|
|
265
|
+
github_available: Optional[bool] = None
|
|
266
|
+
repo: Optional[str] = None
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class SeedIssueRequest(BaseModel):
|
|
270
|
+
issue_ref: Optional[str] = None # GitHub issue number, #num, or URL
|
|
271
|
+
plan_text: Optional[str] = None # Freeform plan text when GitHub unavailable
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
class FlowWorkerHealthResponse(BaseModel):
|
|
275
|
+
status: str
|
|
276
|
+
pid: Optional[int]
|
|
277
|
+
is_alive: bool
|
|
278
|
+
message: Optional[str] = None
|
|
279
|
+
|
|
280
|
+
@classmethod
|
|
281
|
+
def from_health(cls, health: FlowWorkerHealth) -> "FlowWorkerHealthResponse":
|
|
282
|
+
return cls(
|
|
283
|
+
status=health.status,
|
|
284
|
+
pid=health.pid,
|
|
285
|
+
is_alive=health.is_alive,
|
|
286
|
+
message=health.message,
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
class FlowStatusResponse(BaseModel):
|
|
291
|
+
id: str
|
|
292
|
+
flow_type: str
|
|
293
|
+
status: str
|
|
294
|
+
current_step: Optional[str]
|
|
295
|
+
created_at: str
|
|
296
|
+
started_at: Optional[str]
|
|
297
|
+
finished_at: Optional[str]
|
|
298
|
+
error_message: Optional[str]
|
|
299
|
+
state: Dict = Field(default_factory=dict)
|
|
300
|
+
reason_summary: Optional[str] = None
|
|
301
|
+
ticket_progress: Optional[Dict[str, int]] = None
|
|
302
|
+
last_event_seq: Optional[int] = None
|
|
303
|
+
last_event_at: Optional[str] = None
|
|
304
|
+
worker_health: Optional[FlowWorkerHealthResponse] = None
|
|
305
|
+
|
|
306
|
+
@classmethod
|
|
307
|
+
def from_record(
|
|
308
|
+
cls,
|
|
309
|
+
record: FlowRunRecord,
|
|
310
|
+
*,
|
|
311
|
+
last_event_seq: Optional[int] = None,
|
|
312
|
+
last_event_at: Optional[str] = None,
|
|
313
|
+
worker_health: Optional[FlowWorkerHealth] = None,
|
|
314
|
+
) -> "FlowStatusResponse":
|
|
315
|
+
state = record.state or {}
|
|
316
|
+
reason_summary = None
|
|
317
|
+
if isinstance(state, dict):
|
|
318
|
+
value = state.get("reason_summary")
|
|
319
|
+
if isinstance(value, str):
|
|
320
|
+
reason_summary = value
|
|
321
|
+
return cls(
|
|
322
|
+
id=record.id,
|
|
323
|
+
flow_type=record.flow_type,
|
|
324
|
+
status=record.status.value,
|
|
325
|
+
current_step=record.current_step,
|
|
326
|
+
created_at=record.created_at,
|
|
327
|
+
started_at=record.started_at,
|
|
328
|
+
finished_at=record.finished_at,
|
|
329
|
+
error_message=record.error_message,
|
|
330
|
+
state=state,
|
|
331
|
+
reason_summary=reason_summary,
|
|
332
|
+
last_event_seq=last_event_seq,
|
|
333
|
+
last_event_at=last_event_at,
|
|
334
|
+
worker_health=(
|
|
335
|
+
FlowWorkerHealthResponse.from_health(worker_health)
|
|
336
|
+
if worker_health
|
|
337
|
+
else None
|
|
338
|
+
),
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
class FlowArtifactInfo(BaseModel):
|
|
343
|
+
id: str
|
|
344
|
+
kind: str
|
|
345
|
+
path: str
|
|
346
|
+
created_at: str
|
|
347
|
+
metadata: Dict = Field(default_factory=dict)
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def _build_flow_status_response(
|
|
351
|
+
record: FlowRunRecord,
|
|
352
|
+
repo_root: Path,
|
|
353
|
+
*,
|
|
354
|
+
store: Optional[FlowStore] = None,
|
|
355
|
+
) -> FlowStatusResponse:
|
|
356
|
+
snapshot = build_flow_status_snapshot(repo_root, record, store)
|
|
357
|
+
resp = FlowStatusResponse.from_record(
|
|
358
|
+
record,
|
|
359
|
+
last_event_seq=snapshot["last_event_seq"],
|
|
360
|
+
last_event_at=snapshot["last_event_at"],
|
|
361
|
+
worker_health=snapshot["worker_health"],
|
|
362
|
+
)
|
|
363
|
+
resp.ticket_progress = snapshot.get("ticket_progress")
|
|
364
|
+
if snapshot.get("state") is not None:
|
|
365
|
+
resp.state = snapshot["state"]
|
|
366
|
+
return resp
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def _start_flow_worker(repo_root: Path, run_id: str) -> Optional[subprocess.Popen]:
|
|
370
|
+
normalized_run_id = _normalize_run_id(run_id)
|
|
371
|
+
|
|
372
|
+
_reap_dead_worker(normalized_run_id)
|
|
373
|
+
result = ensure_worker(repo_root, normalized_run_id)
|
|
374
|
+
if result["status"] == "reused":
|
|
375
|
+
health = result["health"]
|
|
376
|
+
_logger.info(
|
|
377
|
+
"Worker already active for run %s (pid=%s), skipping spawn",
|
|
378
|
+
normalized_run_id,
|
|
379
|
+
health.pid,
|
|
380
|
+
)
|
|
381
|
+
return None
|
|
382
|
+
proc = result["proc"]
|
|
383
|
+
stdout_handle = result["stdout"]
|
|
384
|
+
stderr_handle = result["stderr"]
|
|
385
|
+
_active_workers[normalized_run_id] = (proc, stdout_handle, stderr_handle)
|
|
386
|
+
_logger.info("Started flow worker for run %s (pid=%d)", normalized_run_id, proc.pid)
|
|
387
|
+
return proc
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
def _stop_worker(run_id: str, timeout: float = 10.0) -> None:
|
|
391
|
+
normalized_run_id = _normalize_run_id(run_id)
|
|
392
|
+
handle = _active_workers.get(normalized_run_id)
|
|
393
|
+
if not handle:
|
|
394
|
+
health = check_worker_health(find_repo_root(), normalized_run_id)
|
|
395
|
+
if health.is_alive and health.pid:
|
|
396
|
+
try:
|
|
397
|
+
_logger.info(
|
|
398
|
+
"Stopping untracked worker for run %s (pid=%s)",
|
|
399
|
+
normalized_run_id,
|
|
400
|
+
health.pid,
|
|
401
|
+
)
|
|
402
|
+
subprocess.run(["kill", str(health.pid)], check=False)
|
|
403
|
+
except Exception as exc:
|
|
404
|
+
_logger.warning(
|
|
405
|
+
"Failed to stop untracked worker %s: %s", normalized_run_id, exc
|
|
406
|
+
)
|
|
407
|
+
return
|
|
408
|
+
|
|
409
|
+
proc, *_ = handle
|
|
410
|
+
if proc and proc.poll() is None:
|
|
411
|
+
proc.terminate()
|
|
412
|
+
try:
|
|
413
|
+
proc.wait(timeout=timeout)
|
|
414
|
+
except subprocess.TimeoutExpired:
|
|
415
|
+
_logger.warning(
|
|
416
|
+
"Worker for run %s did not exit in time, killing", normalized_run_id
|
|
417
|
+
)
|
|
418
|
+
proc.kill()
|
|
419
|
+
except Exception as exc:
|
|
420
|
+
_logger.warning("Error stopping worker %s: %s", normalized_run_id, exc)
|
|
421
|
+
|
|
422
|
+
_cleanup_worker_handle(normalized_run_id)
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
def build_flow_routes() -> APIRouter:
|
|
426
|
+
router = APIRouter(prefix="/api/flows", tags=["flows"])
|
|
427
|
+
|
|
428
|
+
def _definition_info(definition: FlowDefinition) -> Dict:
|
|
429
|
+
return {
|
|
430
|
+
"type": definition.flow_type,
|
|
431
|
+
"name": definition.name,
|
|
432
|
+
"description": definition.description,
|
|
433
|
+
"input_schema": definition.input_schema or {},
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
def _resolve_outbox_for_record(record: FlowRunRecord, repo_root: Path):
|
|
437
|
+
workspace_root = Path(record.input_data.get("workspace_root") or repo_root)
|
|
438
|
+
runs_dir = Path(record.input_data.get("runs_dir") or ".codex-autorunner/runs")
|
|
439
|
+
return resolve_outbox_paths(
|
|
440
|
+
workspace_root=workspace_root, runs_dir=runs_dir, run_id=record.id
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
def _get_diff_stats_by_dispatch_seq(
|
|
444
|
+
repo_root: Path, *, run_id: str
|
|
445
|
+
) -> dict[int, dict[str, int]]:
|
|
446
|
+
"""Return mapping of dispatch_seq -> diff stats for the run."""
|
|
447
|
+
store = _require_flow_store(repo_root)
|
|
448
|
+
if store is None:
|
|
449
|
+
return {}
|
|
450
|
+
try:
|
|
451
|
+
events = store.get_events_by_type(run_id, FlowEventType.DIFF_UPDATED)
|
|
452
|
+
except Exception:
|
|
453
|
+
events = []
|
|
454
|
+
finally:
|
|
455
|
+
try:
|
|
456
|
+
store.close()
|
|
457
|
+
except Exception:
|
|
458
|
+
pass
|
|
459
|
+
|
|
460
|
+
by_seq: dict[int, dict[str, int]] = {}
|
|
461
|
+
for ev in events:
|
|
462
|
+
data = ev.data or {}
|
|
463
|
+
try:
|
|
464
|
+
seq_val = int(data.get("dispatch_seq") or 0)
|
|
465
|
+
except Exception:
|
|
466
|
+
continue
|
|
467
|
+
if seq_val <= 0:
|
|
468
|
+
continue
|
|
469
|
+
by_seq[seq_val] = {
|
|
470
|
+
"insertions": int(data.get("insertions") or 0),
|
|
471
|
+
"deletions": int(data.get("deletions") or 0),
|
|
472
|
+
"files_changed": int(data.get("files_changed") or 0),
|
|
473
|
+
}
|
|
474
|
+
return by_seq
|
|
475
|
+
|
|
476
|
+
@router.get("")
|
|
477
|
+
async def list_flow_definitions():
|
|
478
|
+
repo_root = find_repo_root()
|
|
479
|
+
definitions = [
|
|
480
|
+
_definition_info(_build_flow_definition(repo_root, flow_type))
|
|
481
|
+
for flow_type in _supported_flow_types
|
|
482
|
+
]
|
|
483
|
+
return {"definitions": definitions}
|
|
484
|
+
|
|
485
|
+
@router.get("/runs", response_model=list[FlowStatusResponse])
|
|
486
|
+
async def list_runs(flow_type: Optional[str] = None, reconcile: bool = False):
|
|
487
|
+
repo_root = find_repo_root()
|
|
488
|
+
store = _require_flow_store(repo_root)
|
|
489
|
+
records: list[FlowRunRecord] = []
|
|
490
|
+
try:
|
|
491
|
+
if store:
|
|
492
|
+
records = store.list_flow_runs(flow_type=flow_type)
|
|
493
|
+
if reconcile:
|
|
494
|
+
records = [
|
|
495
|
+
reconcile_flow_run(repo_root, rec, store, logger=_logger)[0]
|
|
496
|
+
for rec in records
|
|
497
|
+
]
|
|
498
|
+
else:
|
|
499
|
+
records = _safe_list_flow_runs(
|
|
500
|
+
repo_root, flow_type=flow_type, recover_stuck=reconcile
|
|
501
|
+
)
|
|
502
|
+
return [
|
|
503
|
+
_build_flow_status_response(rec, repo_root, store=store)
|
|
504
|
+
for rec in records
|
|
505
|
+
]
|
|
506
|
+
finally:
|
|
507
|
+
if store:
|
|
508
|
+
store.close()
|
|
509
|
+
|
|
510
|
+
@router.get("/{flow_type}")
|
|
511
|
+
async def get_flow_definition(flow_type: str):
|
|
512
|
+
repo_root = find_repo_root()
|
|
513
|
+
if flow_type not in _supported_flow_types:
|
|
514
|
+
raise HTTPException(
|
|
515
|
+
status_code=404, detail=f"Unknown flow type: {flow_type}"
|
|
516
|
+
)
|
|
517
|
+
definition = _build_flow_definition(repo_root, flow_type)
|
|
518
|
+
return _definition_info(definition)
|
|
519
|
+
|
|
520
|
+
async def _start_flow(
|
|
521
|
+
flow_type: str,
|
|
522
|
+
request: FlowStartRequest,
|
|
523
|
+
*,
|
|
524
|
+
force_new: bool = False,
|
|
525
|
+
validate_tickets: bool = True,
|
|
526
|
+
) -> FlowStatusResponse:
|
|
527
|
+
if flow_type not in _supported_flow_types:
|
|
528
|
+
raise HTTPException(
|
|
529
|
+
status_code=404, detail=f"Unknown flow type: {flow_type}"
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
repo_root = find_repo_root()
|
|
533
|
+
controller = _get_flow_controller(repo_root, flow_type)
|
|
534
|
+
|
|
535
|
+
if flow_type == "ticket_flow" and validate_tickets:
|
|
536
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
537
|
+
if force_new and not list_ticket_paths(ticket_dir):
|
|
538
|
+
raise HTTPException(
|
|
539
|
+
status_code=400,
|
|
540
|
+
detail=(
|
|
541
|
+
"No tickets found under .codex-autorunner/tickets. "
|
|
542
|
+
"Use /api/flows/ticket_flow/bootstrap to seed tickets."
|
|
543
|
+
),
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
547
|
+
if lint_errors:
|
|
548
|
+
raise HTTPException(
|
|
549
|
+
status_code=400,
|
|
550
|
+
detail={
|
|
551
|
+
"message": "Ticket validation failed",
|
|
552
|
+
"errors": lint_errors,
|
|
553
|
+
},
|
|
554
|
+
)
|
|
555
|
+
|
|
556
|
+
# Reuse an active/paused run unless force_new is requested.
|
|
557
|
+
if not force_new:
|
|
558
|
+
runs = _safe_list_flow_runs(
|
|
559
|
+
repo_root, flow_type=flow_type, recover_stuck=True
|
|
560
|
+
)
|
|
561
|
+
active = _active_or_paused_run(runs)
|
|
562
|
+
if active:
|
|
563
|
+
_reap_dead_worker(active.id)
|
|
564
|
+
_start_flow_worker(repo_root, active.id)
|
|
565
|
+
store = _require_flow_store(repo_root)
|
|
566
|
+
try:
|
|
567
|
+
response = _build_flow_status_response(
|
|
568
|
+
active, repo_root, store=store
|
|
569
|
+
)
|
|
570
|
+
finally:
|
|
571
|
+
if store:
|
|
572
|
+
store.close()
|
|
573
|
+
response.state = response.state or {}
|
|
574
|
+
response.state["hint"] = "active_run_reused"
|
|
575
|
+
return response
|
|
576
|
+
|
|
577
|
+
run_id = _normalize_run_id(uuid.uuid4())
|
|
578
|
+
|
|
579
|
+
record = await controller.start_flow(
|
|
580
|
+
input_data=request.input_data,
|
|
581
|
+
run_id=run_id,
|
|
582
|
+
metadata=request.metadata,
|
|
583
|
+
)
|
|
584
|
+
|
|
585
|
+
_start_flow_worker(repo_root, run_id)
|
|
586
|
+
|
|
587
|
+
store = _require_flow_store(repo_root)
|
|
588
|
+
try:
|
|
589
|
+
return _build_flow_status_response(record, repo_root, store=store)
|
|
590
|
+
finally:
|
|
591
|
+
if store:
|
|
592
|
+
store.close()
|
|
593
|
+
|
|
594
|
+
@router.post("/{flow_type}/start", response_model=FlowStatusResponse)
|
|
595
|
+
async def start_flow(flow_type: str, request: FlowStartRequest):
|
|
596
|
+
meta = request.metadata if isinstance(request.metadata, dict) else {}
|
|
597
|
+
force_new = bool(meta.get("force_new"))
|
|
598
|
+
return await _start_flow(flow_type, request, force_new=force_new)
|
|
599
|
+
|
|
600
|
+
@router.get("/ticket_flow/bootstrap-check", response_model=BootstrapCheckResponse)
|
|
601
|
+
async def bootstrap_check():
|
|
602
|
+
"""
|
|
603
|
+
Determine whether ISSUE.md already exists and whether GitHub is available
|
|
604
|
+
for fetching an issue before bootstrapping the ticket flow.
|
|
605
|
+
"""
|
|
606
|
+
repo_root = find_repo_root()
|
|
607
|
+
result = ux_bootstrap_check(repo_root, github_service_factory=GitHubService)
|
|
608
|
+
if result.status == "ready":
|
|
609
|
+
return BootstrapCheckResponse(status="ready")
|
|
610
|
+
return BootstrapCheckResponse(
|
|
611
|
+
status=result.status,
|
|
612
|
+
github_available=result.github_available,
|
|
613
|
+
repo=result.repo_slug,
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
@router.post("/ticket_flow/seed-issue")
|
|
617
|
+
async def seed_issue(request: SeedIssueRequest):
|
|
618
|
+
"""Create .codex-autorunner/ISSUE.md from GitHub issue or user-provided text."""
|
|
619
|
+
repo_root = find_repo_root()
|
|
620
|
+
issue_path = issue_md_path(repo_root)
|
|
621
|
+
issue_path.parent.mkdir(parents=True, exist_ok=True)
|
|
622
|
+
|
|
623
|
+
# GitHub-backed path
|
|
624
|
+
if request.issue_ref:
|
|
625
|
+
try:
|
|
626
|
+
seed = seed_issue_from_github(
|
|
627
|
+
repo_root, request.issue_ref, github_service_factory=GitHubService
|
|
628
|
+
)
|
|
629
|
+
atomic_write(issue_path, seed.content)
|
|
630
|
+
return {
|
|
631
|
+
"status": "ok",
|
|
632
|
+
"source": "github",
|
|
633
|
+
"issue_number": seed.issue_number,
|
|
634
|
+
"repo": seed.repo_slug,
|
|
635
|
+
}
|
|
636
|
+
except GitHubError as exc:
|
|
637
|
+
raise HTTPException(
|
|
638
|
+
status_code=exc.status_code, detail=str(exc)
|
|
639
|
+
) from exc
|
|
640
|
+
except RuntimeError as exc:
|
|
641
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
642
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
643
|
+
_logger.exception("Failed to seed ISSUE.md from GitHub: %s", exc)
|
|
644
|
+
raise HTTPException(
|
|
645
|
+
status_code=500, detail="Failed to fetch issue from GitHub"
|
|
646
|
+
) from exc
|
|
647
|
+
|
|
648
|
+
# Manual text path
|
|
649
|
+
if request.plan_text:
|
|
650
|
+
content = seed_issue_from_text(request.plan_text)
|
|
651
|
+
atomic_write(issue_path, content)
|
|
652
|
+
return {"status": "ok", "source": "user_input"}
|
|
653
|
+
|
|
654
|
+
raise HTTPException(
|
|
655
|
+
status_code=400,
|
|
656
|
+
detail="issue_ref or plan_text is required to seed ISSUE.md",
|
|
657
|
+
)
|
|
658
|
+
|
|
659
|
+
@router.post("/ticket_flow/bootstrap", response_model=FlowStatusResponse)
|
|
660
|
+
async def bootstrap_ticket_flow(request: Optional[FlowStartRequest] = None):
|
|
661
|
+
repo_root = find_repo_root()
|
|
662
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
663
|
+
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
664
|
+
ticket_path = ticket_dir / "TICKET-001.md"
|
|
665
|
+
existing_tickets = list_ticket_paths(ticket_dir)
|
|
666
|
+
tickets_exist = bool(existing_tickets)
|
|
667
|
+
flow_request = request or FlowStartRequest()
|
|
668
|
+
meta = flow_request.metadata if isinstance(flow_request.metadata, dict) else {}
|
|
669
|
+
force_new = bool(meta.get("force_new"))
|
|
670
|
+
|
|
671
|
+
if not force_new:
|
|
672
|
+
records = _safe_list_flow_runs(
|
|
673
|
+
repo_root, flow_type="ticket_flow", recover_stuck=True
|
|
674
|
+
)
|
|
675
|
+
active = _active_or_paused_run(records)
|
|
676
|
+
if active:
|
|
677
|
+
# Validate tickets before reusing active run
|
|
678
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
679
|
+
if lint_errors:
|
|
680
|
+
raise HTTPException(
|
|
681
|
+
status_code=400,
|
|
682
|
+
detail={
|
|
683
|
+
"message": "Ticket validation failed",
|
|
684
|
+
"errors": lint_errors,
|
|
685
|
+
},
|
|
686
|
+
)
|
|
687
|
+
_reap_dead_worker(active.id)
|
|
688
|
+
_start_flow_worker(repo_root, active.id)
|
|
689
|
+
store = _require_flow_store(repo_root)
|
|
690
|
+
try:
|
|
691
|
+
resp = _build_flow_status_response(active, repo_root, store=store)
|
|
692
|
+
finally:
|
|
693
|
+
if store:
|
|
694
|
+
store.close()
|
|
695
|
+
resp.state = resp.state or {}
|
|
696
|
+
resp.state["hint"] = "active_run_reused"
|
|
697
|
+
return resp
|
|
698
|
+
|
|
699
|
+
seeded = False
|
|
700
|
+
if not tickets_exist and not ticket_path.exists():
|
|
701
|
+
template = """---
|
|
702
|
+
agent: codex
|
|
703
|
+
done: false
|
|
704
|
+
title: Bootstrap ticket plan
|
|
705
|
+
goal: Capture scope and seed follow-up tickets
|
|
706
|
+
---
|
|
707
|
+
|
|
708
|
+
You are the first ticket in a new ticket_flow run.
|
|
709
|
+
|
|
710
|
+
- Read `.codex-autorunner/ISSUE.md`. If it is missing:
|
|
711
|
+
- If GitHub is available, ask the user for the issue/PR URL or number and create `.codex-autorunner/ISSUE.md` from it.
|
|
712
|
+
- If GitHub is not available, write `DISPATCH.md` with `mode: pause` asking the user to describe the work (or share a doc). After the reply, create `.codex-autorunner/ISSUE.md` with their input.
|
|
713
|
+
- If helpful, create or update workspace docs under `.codex-autorunner/workspace/`:
|
|
714
|
+
- `active_context.md` for current context and links
|
|
715
|
+
- `decisions.md` for decisions/rationale
|
|
716
|
+
- `spec.md` for requirements and constraints
|
|
717
|
+
- Break the work into additional `TICKET-00X.md` files with clear owners/goals; keep this ticket open until they exist.
|
|
718
|
+
- Place any supporting artifacts in `.codex-autorunner/runs/<run_id>/dispatch/` if needed.
|
|
719
|
+
- Write `DISPATCH.md` to dispatch a message to the user:
|
|
720
|
+
- Use `mode: pause` (handoff) to wait for user response. This pauses execution.
|
|
721
|
+
- Use `mode: notify` (informational) to message the user but keep running.
|
|
722
|
+
"""
|
|
723
|
+
ticket_path.write_text(template, encoding="utf-8")
|
|
724
|
+
seeded = True
|
|
725
|
+
|
|
726
|
+
meta = flow_request.metadata if isinstance(flow_request.metadata, dict) else {}
|
|
727
|
+
payload = FlowStartRequest(
|
|
728
|
+
input_data=flow_request.input_data,
|
|
729
|
+
metadata=meta | {"seeded_ticket": seeded},
|
|
730
|
+
)
|
|
731
|
+
validate_tickets = not tickets_exist or force_new
|
|
732
|
+
return await _start_flow(
|
|
733
|
+
"ticket_flow",
|
|
734
|
+
payload,
|
|
735
|
+
force_new=force_new,
|
|
736
|
+
validate_tickets=validate_tickets,
|
|
737
|
+
)
|
|
738
|
+
|
|
739
|
+
@router.get("/ticket_flow/tickets")
|
|
740
|
+
async def list_ticket_files():
|
|
741
|
+
repo_root = find_repo_root()
|
|
742
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
743
|
+
# Compute cumulative diff stats per ticket for the active/paused run.
|
|
744
|
+
runs = _safe_list_flow_runs(
|
|
745
|
+
repo_root, flow_type="ticket_flow", recover_stuck=True
|
|
746
|
+
)
|
|
747
|
+
active_run = _active_or_paused_run(runs)
|
|
748
|
+
diff_by_ticket: dict[str, dict[str, int]] = {}
|
|
749
|
+
if active_run:
|
|
750
|
+
store = _require_flow_store(repo_root)
|
|
751
|
+
if store is not None:
|
|
752
|
+
try:
|
|
753
|
+
events = store.get_events_by_type(
|
|
754
|
+
active_run.id, FlowEventType.DIFF_UPDATED
|
|
755
|
+
)
|
|
756
|
+
except Exception:
|
|
757
|
+
events = []
|
|
758
|
+
finally:
|
|
759
|
+
try:
|
|
760
|
+
store.close()
|
|
761
|
+
except Exception:
|
|
762
|
+
pass
|
|
763
|
+
for ev in events:
|
|
764
|
+
data = ev.data or {}
|
|
765
|
+
ticket_id = data.get("ticket_id")
|
|
766
|
+
if not isinstance(ticket_id, str) or not ticket_id.strip():
|
|
767
|
+
continue
|
|
768
|
+
stats = diff_by_ticket.setdefault(
|
|
769
|
+
ticket_id,
|
|
770
|
+
{"insertions": 0, "deletions": 0, "files_changed": 0},
|
|
771
|
+
)
|
|
772
|
+
stats["insertions"] += int(data.get("insertions") or 0)
|
|
773
|
+
stats["deletions"] += int(data.get("deletions") or 0)
|
|
774
|
+
stats["files_changed"] += int(data.get("files_changed") or 0)
|
|
775
|
+
|
|
776
|
+
tickets = []
|
|
777
|
+
for path in list_ticket_paths(ticket_dir):
|
|
778
|
+
doc, errors = read_ticket(path)
|
|
779
|
+
idx = getattr(doc, "index", None) or parse_ticket_index(path.name)
|
|
780
|
+
# When frontmatter is broken, still surface the raw ticket body so
|
|
781
|
+
# the user can inspect and fix the file in the UI instead of seeing
|
|
782
|
+
# an empty card.
|
|
783
|
+
try:
|
|
784
|
+
raw_body = path.read_text(encoding="utf-8")
|
|
785
|
+
_, parsed_body = parse_markdown_frontmatter(raw_body)
|
|
786
|
+
except Exception:
|
|
787
|
+
parsed_body = None
|
|
788
|
+
rel_path = safe_relpath(path, repo_root)
|
|
789
|
+
tickets.append(
|
|
790
|
+
{
|
|
791
|
+
"path": rel_path,
|
|
792
|
+
"index": idx,
|
|
793
|
+
"frontmatter": asdict(doc.frontmatter) if doc else None,
|
|
794
|
+
"body": doc.body if doc else parsed_body,
|
|
795
|
+
"errors": errors,
|
|
796
|
+
"diff_stats": diff_by_ticket.get(rel_path),
|
|
797
|
+
}
|
|
798
|
+
)
|
|
799
|
+
return {
|
|
800
|
+
"ticket_dir": safe_relpath(ticket_dir, repo_root),
|
|
801
|
+
"tickets": tickets,
|
|
802
|
+
}
|
|
803
|
+
|
|
804
|
+
@router.get("/ticket_flow/tickets/{index}", response_model=TicketResponse)
|
|
805
|
+
async def get_ticket(index: int):
|
|
806
|
+
"""Fetch a single ticket by index; return raw body even if frontmatter is invalid."""
|
|
807
|
+
repo_root = find_repo_root()
|
|
808
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
809
|
+
ticket_path = _find_ticket_path_by_index(ticket_dir, index)
|
|
810
|
+
|
|
811
|
+
if not ticket_path:
|
|
812
|
+
raise HTTPException(status_code=404, detail=f"Ticket {index:03d} not found")
|
|
813
|
+
|
|
814
|
+
doc, errors = read_ticket(ticket_path)
|
|
815
|
+
if doc and not errors:
|
|
816
|
+
return TicketResponse(
|
|
817
|
+
path=safe_relpath(ticket_path, repo_root),
|
|
818
|
+
index=doc.index,
|
|
819
|
+
frontmatter=asdict(doc.frontmatter),
|
|
820
|
+
body=doc.body,
|
|
821
|
+
)
|
|
822
|
+
|
|
823
|
+
# Mirror list endpoint: surface raw body for repair when frontmatter is broken.
|
|
824
|
+
try:
|
|
825
|
+
raw_body = ticket_path.read_text(encoding="utf-8")
|
|
826
|
+
parsed_frontmatter, parsed_body = parse_markdown_frontmatter(raw_body)
|
|
827
|
+
except Exception:
|
|
828
|
+
parsed_frontmatter, parsed_body = {}, None
|
|
829
|
+
|
|
830
|
+
return TicketResponse(
|
|
831
|
+
path=safe_relpath(ticket_path, repo_root),
|
|
832
|
+
index=parse_ticket_index(ticket_path.name),
|
|
833
|
+
frontmatter=parsed_frontmatter or {},
|
|
834
|
+
body=parsed_body,
|
|
835
|
+
)
|
|
836
|
+
|
|
837
|
+
@router.post("/ticket_flow/tickets", response_model=TicketResponse)
|
|
838
|
+
async def create_ticket(request: TicketCreateRequest):
|
|
839
|
+
"""Create a new ticket with auto-generated index."""
|
|
840
|
+
repo_root = find_repo_root()
|
|
841
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
842
|
+
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
843
|
+
|
|
844
|
+
# Find next available index
|
|
845
|
+
existing_paths = list_ticket_paths(ticket_dir)
|
|
846
|
+
existing_indices = set()
|
|
847
|
+
for p in existing_paths:
|
|
848
|
+
idx = parse_ticket_index(p.name)
|
|
849
|
+
if idx is not None:
|
|
850
|
+
existing_indices.add(idx)
|
|
851
|
+
|
|
852
|
+
next_index = 1
|
|
853
|
+
while next_index in existing_indices:
|
|
854
|
+
next_index += 1
|
|
855
|
+
|
|
856
|
+
# Build frontmatter (quote scalars to avoid YAML parse issues with colons, etc.)
|
|
857
|
+
def _quote(val: Optional[str]) -> str:
|
|
858
|
+
return (
|
|
859
|
+
json.dumps(val) if val is not None else ""
|
|
860
|
+
) # JSON string is valid YAML scalar
|
|
861
|
+
|
|
862
|
+
title_line = f"title: {_quote(request.title)}\n" if request.title else ""
|
|
863
|
+
goal_line = f"goal: {_quote(request.goal)}\n" if request.goal else ""
|
|
864
|
+
|
|
865
|
+
content = (
|
|
866
|
+
"---\n"
|
|
867
|
+
f"agent: {_quote(request.agent)}\n"
|
|
868
|
+
"done: false\n"
|
|
869
|
+
f"{title_line}"
|
|
870
|
+
f"{goal_line}"
|
|
871
|
+
"---\n\n"
|
|
872
|
+
f"{request.body}\n"
|
|
873
|
+
)
|
|
874
|
+
|
|
875
|
+
ticket_path = ticket_dir / f"TICKET-{next_index:03d}.md"
|
|
876
|
+
atomic_write(ticket_path, content)
|
|
877
|
+
|
|
878
|
+
# Read back to validate and return
|
|
879
|
+
doc, errors = read_ticket(ticket_path)
|
|
880
|
+
if errors or not doc:
|
|
881
|
+
raise HTTPException(
|
|
882
|
+
status_code=400, detail=f"Failed to create valid ticket: {errors}"
|
|
883
|
+
)
|
|
884
|
+
|
|
885
|
+
return TicketResponse(
|
|
886
|
+
path=safe_relpath(ticket_path, repo_root),
|
|
887
|
+
index=doc.index,
|
|
888
|
+
frontmatter=asdict(doc.frontmatter),
|
|
889
|
+
body=doc.body,
|
|
890
|
+
)
|
|
891
|
+
|
|
892
|
+
@router.put("/ticket_flow/tickets/{index}", response_model=TicketResponse)
|
|
893
|
+
async def update_ticket(index: int, request: TicketUpdateRequest):
|
|
894
|
+
"""Update an existing ticket by index."""
|
|
895
|
+
repo_root = find_repo_root()
|
|
896
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
897
|
+
ticket_path = _find_ticket_path_by_index(ticket_dir, index)
|
|
898
|
+
|
|
899
|
+
if not ticket_path:
|
|
900
|
+
raise HTTPException(status_code=404, detail=f"Ticket {index:03d} not found")
|
|
901
|
+
|
|
902
|
+
# Validate frontmatter before saving
|
|
903
|
+
data, body = parse_markdown_frontmatter(request.content)
|
|
904
|
+
_, errors = lint_ticket_frontmatter(data)
|
|
905
|
+
if errors:
|
|
906
|
+
raise HTTPException(
|
|
907
|
+
status_code=400,
|
|
908
|
+
detail={"message": "Invalid ticket frontmatter", "errors": errors},
|
|
909
|
+
)
|
|
910
|
+
|
|
911
|
+
atomic_write(ticket_path, request.content)
|
|
912
|
+
|
|
913
|
+
# Read back to return validated data
|
|
914
|
+
doc, read_errors = read_ticket(ticket_path)
|
|
915
|
+
if read_errors or not doc:
|
|
916
|
+
raise HTTPException(
|
|
917
|
+
status_code=400, detail=f"Failed to save valid ticket: {read_errors}"
|
|
918
|
+
)
|
|
919
|
+
|
|
920
|
+
return TicketResponse(
|
|
921
|
+
path=safe_relpath(ticket_path, repo_root),
|
|
922
|
+
index=doc.index,
|
|
923
|
+
frontmatter=asdict(doc.frontmatter),
|
|
924
|
+
body=doc.body,
|
|
925
|
+
)
|
|
926
|
+
|
|
927
|
+
@router.delete("/ticket_flow/tickets/{index}", response_model=TicketDeleteResponse)
|
|
928
|
+
async def delete_ticket(index: int):
|
|
929
|
+
"""Delete a ticket by index."""
|
|
930
|
+
repo_root = find_repo_root()
|
|
931
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
932
|
+
ticket_path = _find_ticket_path_by_index(ticket_dir, index)
|
|
933
|
+
|
|
934
|
+
if not ticket_path:
|
|
935
|
+
raise HTTPException(status_code=404, detail=f"Ticket {index:03d} not found")
|
|
936
|
+
|
|
937
|
+
rel_path = safe_relpath(ticket_path, repo_root)
|
|
938
|
+
ticket_path.unlink()
|
|
939
|
+
|
|
940
|
+
return TicketDeleteResponse(
|
|
941
|
+
status="deleted",
|
|
942
|
+
index=index,
|
|
943
|
+
path=rel_path,
|
|
944
|
+
)
|
|
945
|
+
|
|
946
|
+
@router.post("/{run_id}/stop", response_model=FlowStatusResponse)
|
|
947
|
+
async def stop_flow(run_id: uuid.UUID):
|
|
948
|
+
run_id = _normalize_run_id(run_id)
|
|
949
|
+
repo_root = find_repo_root()
|
|
950
|
+
record = _get_flow_record(repo_root, run_id)
|
|
951
|
+
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
952
|
+
|
|
953
|
+
_stop_worker(run_id)
|
|
954
|
+
|
|
955
|
+
updated = await controller.stop_flow(run_id)
|
|
956
|
+
store = _require_flow_store(repo_root)
|
|
957
|
+
try:
|
|
958
|
+
return _build_flow_status_response(updated, repo_root, store=store)
|
|
959
|
+
finally:
|
|
960
|
+
if store:
|
|
961
|
+
store.close()
|
|
962
|
+
|
|
963
|
+
@router.post("/{run_id}/resume", response_model=FlowStatusResponse)
|
|
964
|
+
async def resume_flow(run_id: uuid.UUID):
|
|
965
|
+
run_id = _normalize_run_id(run_id)
|
|
966
|
+
repo_root = find_repo_root()
|
|
967
|
+
record = _get_flow_record(repo_root, run_id)
|
|
968
|
+
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
969
|
+
|
|
970
|
+
# Validate tickets before resuming ticket_flow
|
|
971
|
+
if record.flow_type == "ticket_flow":
|
|
972
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
973
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
974
|
+
if lint_errors:
|
|
975
|
+
raise HTTPException(
|
|
976
|
+
status_code=400,
|
|
977
|
+
detail={
|
|
978
|
+
"message": "Ticket validation failed",
|
|
979
|
+
"errors": lint_errors,
|
|
980
|
+
},
|
|
981
|
+
)
|
|
982
|
+
|
|
983
|
+
updated = await controller.resume_flow(run_id)
|
|
984
|
+
_reap_dead_worker(run_id)
|
|
985
|
+
_start_flow_worker(repo_root, run_id)
|
|
986
|
+
|
|
987
|
+
store = _require_flow_store(repo_root)
|
|
988
|
+
try:
|
|
989
|
+
return _build_flow_status_response(updated, repo_root, store=store)
|
|
990
|
+
finally:
|
|
991
|
+
if store:
|
|
992
|
+
store.close()
|
|
993
|
+
|
|
994
|
+
@router.post("/{run_id}/reconcile", response_model=FlowStatusResponse)
|
|
995
|
+
async def reconcile_flow(run_id: uuid.UUID):
|
|
996
|
+
run_id = _normalize_run_id(run_id)
|
|
997
|
+
repo_root = find_repo_root()
|
|
998
|
+
record = _get_flow_record(repo_root, run_id)
|
|
999
|
+
store = _require_flow_store(repo_root)
|
|
1000
|
+
if not store:
|
|
1001
|
+
raise HTTPException(status_code=503, detail="Flow store unavailable")
|
|
1002
|
+
try:
|
|
1003
|
+
record = reconcile_flow_run(repo_root, record, store, logger=_logger)[0]
|
|
1004
|
+
return _build_flow_status_response(record, repo_root, store=store)
|
|
1005
|
+
finally:
|
|
1006
|
+
store.close()
|
|
1007
|
+
|
|
1008
|
+
@router.post("/{run_id}/archive")
|
|
1009
|
+
async def archive_flow(
|
|
1010
|
+
run_id: uuid.UUID, delete_run: bool = True, force: bool = False
|
|
1011
|
+
):
|
|
1012
|
+
"""Archive a completed flow by moving tickets to the run's artifact directory.
|
|
1013
|
+
|
|
1014
|
+
Args:
|
|
1015
|
+
run_id: The flow run to archive.
|
|
1016
|
+
delete_run: Whether to delete the run record after archiving.
|
|
1017
|
+
force: If True, allow archiving flows stuck in stopping/paused state
|
|
1018
|
+
by force-stopping the worker first.
|
|
1019
|
+
"""
|
|
1020
|
+
run_id = _normalize_run_id(run_id)
|
|
1021
|
+
repo_root = find_repo_root()
|
|
1022
|
+
record = _get_flow_record(repo_root, run_id)
|
|
1023
|
+
|
|
1024
|
+
# Allow archiving terminal flows, or force-archiving stuck flows
|
|
1025
|
+
if not FlowRunStatus(record.status).is_terminal():
|
|
1026
|
+
if force and record.status in (
|
|
1027
|
+
FlowRunStatus.STOPPING,
|
|
1028
|
+
FlowRunStatus.PAUSED,
|
|
1029
|
+
):
|
|
1030
|
+
# Force-stop any remaining worker before archiving
|
|
1031
|
+
_stop_worker(run_id, timeout=2.0)
|
|
1032
|
+
_logger.info(
|
|
1033
|
+
"Force-archiving flow %s in %s state", run_id, record.status.value
|
|
1034
|
+
)
|
|
1035
|
+
else:
|
|
1036
|
+
raise HTTPException(
|
|
1037
|
+
status_code=400,
|
|
1038
|
+
detail="Can only archive completed/stopped/failed flows (use force=true for stuck flows)",
|
|
1039
|
+
)
|
|
1040
|
+
|
|
1041
|
+
# Move tickets to run artifacts directory
|
|
1042
|
+
_, artifacts_root = _flow_paths(repo_root)
|
|
1043
|
+
archive_dir = artifacts_root / run_id / "archived_tickets"
|
|
1044
|
+
archive_dir.mkdir(parents=True, exist_ok=True)
|
|
1045
|
+
|
|
1046
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
1047
|
+
archived_count = 0
|
|
1048
|
+
for ticket_path in list_ticket_paths(ticket_dir):
|
|
1049
|
+
dest = archive_dir / ticket_path.name
|
|
1050
|
+
shutil.move(str(ticket_path), str(dest))
|
|
1051
|
+
archived_count += 1
|
|
1052
|
+
|
|
1053
|
+
# Archive runs directory (dispatch_history, reply_history, etc.) to dismiss notifications
|
|
1054
|
+
outbox_paths = _resolve_outbox_for_record(record, repo_root)
|
|
1055
|
+
run_dir = outbox_paths.run_dir
|
|
1056
|
+
if run_dir.exists() and run_dir.is_dir():
|
|
1057
|
+
archived_runs_dir = artifacts_root / run_id / "archived_runs"
|
|
1058
|
+
shutil.move(str(run_dir), str(archived_runs_dir))
|
|
1059
|
+
|
|
1060
|
+
# Delete run record if requested
|
|
1061
|
+
if delete_run:
|
|
1062
|
+
store = _require_flow_store(repo_root)
|
|
1063
|
+
if store:
|
|
1064
|
+
store.delete_flow_run(run_id)
|
|
1065
|
+
store.close()
|
|
1066
|
+
|
|
1067
|
+
return {
|
|
1068
|
+
"status": "archived",
|
|
1069
|
+
"run_id": run_id,
|
|
1070
|
+
"tickets_archived": archived_count,
|
|
1071
|
+
}
|
|
1072
|
+
|
|
1073
|
+
@router.get("/{run_id}/status", response_model=FlowStatusResponse)
|
|
1074
|
+
async def get_flow_status(run_id: uuid.UUID, reconcile: bool = False):
|
|
1075
|
+
run_id = _normalize_run_id(run_id)
|
|
1076
|
+
repo_root = find_repo_root()
|
|
1077
|
+
|
|
1078
|
+
_reap_dead_worker(run_id)
|
|
1079
|
+
|
|
1080
|
+
record = _get_flow_record(repo_root, run_id)
|
|
1081
|
+
store = _require_flow_store(repo_root)
|
|
1082
|
+
try:
|
|
1083
|
+
if reconcile and store:
|
|
1084
|
+
record = reconcile_flow_run(repo_root, record, store, logger=_logger)[0]
|
|
1085
|
+
return _build_flow_status_response(record, repo_root, store=store)
|
|
1086
|
+
finally:
|
|
1087
|
+
if store:
|
|
1088
|
+
store.close()
|
|
1089
|
+
|
|
1090
|
+
@router.get("/{run_id}/events")
|
|
1091
|
+
async def stream_flow_events(
|
|
1092
|
+
run_id: uuid.UUID, request: Request, after: Optional[int] = None
|
|
1093
|
+
):
|
|
1094
|
+
run_id = _normalize_run_id(run_id)
|
|
1095
|
+
repo_root = find_repo_root()
|
|
1096
|
+
record = _get_flow_record(repo_root, run_id)
|
|
1097
|
+
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
1098
|
+
|
|
1099
|
+
async def event_stream():
|
|
1100
|
+
try:
|
|
1101
|
+
resume_after = after
|
|
1102
|
+
if resume_after is None:
|
|
1103
|
+
last_event_id = request.headers.get("Last-Event-ID")
|
|
1104
|
+
if last_event_id:
|
|
1105
|
+
try:
|
|
1106
|
+
resume_after = int(last_event_id)
|
|
1107
|
+
except ValueError:
|
|
1108
|
+
_logger.debug(
|
|
1109
|
+
"Invalid Last-Event-ID %s for run %s",
|
|
1110
|
+
last_event_id,
|
|
1111
|
+
run_id,
|
|
1112
|
+
)
|
|
1113
|
+
async for event in controller.stream_events(
|
|
1114
|
+
run_id, after_seq=resume_after
|
|
1115
|
+
):
|
|
1116
|
+
data = event.model_dump(mode="json")
|
|
1117
|
+
yield f"id: {event.seq}\ndata: {json.dumps(data)}\n\n"
|
|
1118
|
+
except Exception as e:
|
|
1119
|
+
_logger.exception("Error streaming events for run %s: %s", run_id, e)
|
|
1120
|
+
raise
|
|
1121
|
+
|
|
1122
|
+
return StreamingResponse(
|
|
1123
|
+
event_stream(),
|
|
1124
|
+
media_type="text/event-stream",
|
|
1125
|
+
headers={
|
|
1126
|
+
"Cache-Control": "no-cache",
|
|
1127
|
+
"Connection": "keep-alive",
|
|
1128
|
+
"X-Accel-Buffering": "no",
|
|
1129
|
+
},
|
|
1130
|
+
)
|
|
1131
|
+
|
|
1132
|
+
@router.get("/{run_id}/dispatch_history")
|
|
1133
|
+
async def get_dispatch_history(run_id: str):
|
|
1134
|
+
"""Get dispatch history for a flow run.
|
|
1135
|
+
|
|
1136
|
+
Returns all dispatches (agent->human communications) for this run.
|
|
1137
|
+
"""
|
|
1138
|
+
normalized = _normalize_run_id(run_id)
|
|
1139
|
+
repo_root = find_repo_root()
|
|
1140
|
+
record = _get_flow_record(repo_root, normalized)
|
|
1141
|
+
paths = _resolve_outbox_for_record(record, repo_root)
|
|
1142
|
+
|
|
1143
|
+
# Pull diff stats from FlowStore keyed by dispatch sequence number so we
|
|
1144
|
+
# can enrich dispatch history entries without relying on DISPATCH.md metadata.
|
|
1145
|
+
diff_by_seq = _get_diff_stats_by_dispatch_seq(repo_root, run_id=normalized)
|
|
1146
|
+
|
|
1147
|
+
history_entries = []
|
|
1148
|
+
history_dir = paths.dispatch_history_dir
|
|
1149
|
+
if history_dir.exists() and history_dir.is_dir():
|
|
1150
|
+
for entry in sorted(
|
|
1151
|
+
[p for p in history_dir.iterdir() if p.is_dir()],
|
|
1152
|
+
key=lambda p: p.name,
|
|
1153
|
+
reverse=True,
|
|
1154
|
+
):
|
|
1155
|
+
dispatch_path = entry / "DISPATCH.md"
|
|
1156
|
+
dispatch, errors = (
|
|
1157
|
+
parse_dispatch(dispatch_path)
|
|
1158
|
+
if dispatch_path.exists()
|
|
1159
|
+
else (None, ["Dispatch file missing"])
|
|
1160
|
+
)
|
|
1161
|
+
dispatch_dict = asdict(dispatch) if dispatch else None
|
|
1162
|
+
if dispatch_dict and dispatch:
|
|
1163
|
+
dispatch_dict["is_handoff"] = dispatch.is_handoff
|
|
1164
|
+
# Add structured diff stats (per turn summary), matched by seq.
|
|
1165
|
+
try:
|
|
1166
|
+
entry_seq_int = int(entry.name)
|
|
1167
|
+
except Exception:
|
|
1168
|
+
entry_seq_int = 0
|
|
1169
|
+
if entry_seq_int and entry_seq_int in diff_by_seq:
|
|
1170
|
+
dispatch_dict["diff_stats"] = diff_by_seq[entry_seq_int]
|
|
1171
|
+
attachments = []
|
|
1172
|
+
for child in sorted(entry.rglob("*")):
|
|
1173
|
+
if child.name == "DISPATCH.md":
|
|
1174
|
+
continue
|
|
1175
|
+
rel = child.relative_to(entry).as_posix()
|
|
1176
|
+
if any(part.startswith(".") for part in Path(rel).parts):
|
|
1177
|
+
continue
|
|
1178
|
+
if child.is_dir():
|
|
1179
|
+
continue
|
|
1180
|
+
attachments.append(
|
|
1181
|
+
{
|
|
1182
|
+
"name": child.name,
|
|
1183
|
+
"rel_path": rel,
|
|
1184
|
+
"path": safe_relpath(child, repo_root),
|
|
1185
|
+
"size": child.stat().st_size if child.is_file() else None,
|
|
1186
|
+
"url": f"api/flows/{normalized}/dispatch_history/{entry.name}/{quote(rel)}",
|
|
1187
|
+
}
|
|
1188
|
+
)
|
|
1189
|
+
history_entries.append(
|
|
1190
|
+
{
|
|
1191
|
+
"seq": entry.name,
|
|
1192
|
+
"dispatch": dispatch_dict,
|
|
1193
|
+
"errors": errors,
|
|
1194
|
+
"attachments": attachments,
|
|
1195
|
+
"path": safe_relpath(entry, repo_root),
|
|
1196
|
+
}
|
|
1197
|
+
)
|
|
1198
|
+
|
|
1199
|
+
return {"run_id": normalized, "history": history_entries}
|
|
1200
|
+
|
|
1201
|
+
@router.get("/{run_id}/reply_history/{seq}/{file_path:path}")
|
|
1202
|
+
def get_reply_history_file(run_id: str, seq: str, file_path: str):
|
|
1203
|
+
repo_root = find_repo_root()
|
|
1204
|
+
db_path, _ = _flow_paths(repo_root)
|
|
1205
|
+
store = FlowStore(db_path)
|
|
1206
|
+
try:
|
|
1207
|
+
store.initialize()
|
|
1208
|
+
record = store.get_flow_run(run_id)
|
|
1209
|
+
finally:
|
|
1210
|
+
try:
|
|
1211
|
+
store.close()
|
|
1212
|
+
except Exception:
|
|
1213
|
+
pass
|
|
1214
|
+
if not record:
|
|
1215
|
+
raise HTTPException(status_code=404, detail="Run not found")
|
|
1216
|
+
|
|
1217
|
+
if not (len(seq) == 4 and seq.isdigit()):
|
|
1218
|
+
raise HTTPException(status_code=400, detail="Invalid seq")
|
|
1219
|
+
if ".." in file_path or file_path.startswith("/"):
|
|
1220
|
+
raise HTTPException(status_code=400, detail="Invalid file path")
|
|
1221
|
+
filename = os.path.basename(file_path)
|
|
1222
|
+
if filename != file_path:
|
|
1223
|
+
raise HTTPException(status_code=400, detail="Invalid file path")
|
|
1224
|
+
|
|
1225
|
+
input_data = dict(record.input_data or {})
|
|
1226
|
+
workspace_root = Path(input_data.get("workspace_root") or repo_root)
|
|
1227
|
+
runs_dir = Path(input_data.get("runs_dir") or ".codex-autorunner/runs")
|
|
1228
|
+
from ....tickets.replies import resolve_reply_paths
|
|
1229
|
+
|
|
1230
|
+
reply_paths = resolve_reply_paths(
|
|
1231
|
+
workspace_root=workspace_root, runs_dir=runs_dir, run_id=run_id
|
|
1232
|
+
)
|
|
1233
|
+
target = reply_paths.reply_history_dir / seq / filename
|
|
1234
|
+
if not target.exists() or not target.is_file():
|
|
1235
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
1236
|
+
return FileResponse(path=str(target), filename=filename)
|
|
1237
|
+
|
|
1238
|
+
@router.get("/{run_id}/dispatch_history/{seq}/{file_path:path}")
|
|
1239
|
+
async def get_dispatch_file(run_id: str, seq: str, file_path: str):
|
|
1240
|
+
"""Get an attachment file from a dispatch history entry."""
|
|
1241
|
+
normalized = _normalize_run_id(run_id)
|
|
1242
|
+
repo_root = find_repo_root()
|
|
1243
|
+
record = _get_flow_record(repo_root, normalized)
|
|
1244
|
+
paths = _resolve_outbox_for_record(record, repo_root)
|
|
1245
|
+
|
|
1246
|
+
base_history = paths.dispatch_history_dir.resolve()
|
|
1247
|
+
|
|
1248
|
+
seq_clean = seq.strip()
|
|
1249
|
+
if not re.fullmatch(r"[0-9]{4}", seq_clean):
|
|
1250
|
+
raise HTTPException(
|
|
1251
|
+
status_code=400, detail="Invalid dispatch history sequence"
|
|
1252
|
+
)
|
|
1253
|
+
|
|
1254
|
+
history_dir = (base_history / seq_clean).resolve()
|
|
1255
|
+
if not history_dir.is_relative_to(base_history) or not history_dir.is_dir():
|
|
1256
|
+
raise HTTPException(
|
|
1257
|
+
status_code=404, detail=f"Dispatch history not found for run {run_id}"
|
|
1258
|
+
)
|
|
1259
|
+
|
|
1260
|
+
file_rel = PurePosixPath(file_path)
|
|
1261
|
+
if file_rel.is_absolute() or ".." in file_rel.parts or "\\" in file_path:
|
|
1262
|
+
raise HTTPException(status_code=400, detail="Invalid dispatch file path")
|
|
1263
|
+
|
|
1264
|
+
safe_parts = [part for part in file_rel.parts if part not in {"", "."}]
|
|
1265
|
+
if any(not re.fullmatch(r"[A-Za-z0-9._-]+", part) for part in safe_parts):
|
|
1266
|
+
raise HTTPException(status_code=400, detail="Invalid dispatch file path")
|
|
1267
|
+
|
|
1268
|
+
target = (history_dir / Path(*safe_parts)).resolve()
|
|
1269
|
+
try:
|
|
1270
|
+
resolved = target.resolve()
|
|
1271
|
+
except OSError as exc:
|
|
1272
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
1273
|
+
|
|
1274
|
+
if not resolved.exists():
|
|
1275
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
1276
|
+
|
|
1277
|
+
if not resolved.is_relative_to(history_dir):
|
|
1278
|
+
raise HTTPException(
|
|
1279
|
+
status_code=403,
|
|
1280
|
+
detail="Access denied: file outside dispatch history directory",
|
|
1281
|
+
)
|
|
1282
|
+
|
|
1283
|
+
return FileResponse(resolved, filename=resolved.name)
|
|
1284
|
+
|
|
1285
|
+
@router.get("/{run_id}/artifacts", response_model=list[FlowArtifactInfo])
|
|
1286
|
+
async def list_flow_artifacts(run_id: str):
|
|
1287
|
+
normalized = _normalize_run_id(run_id)
|
|
1288
|
+
repo_root = find_repo_root()
|
|
1289
|
+
record = _get_flow_record(repo_root, normalized)
|
|
1290
|
+
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
1291
|
+
|
|
1292
|
+
artifacts = controller.get_artifacts(normalized)
|
|
1293
|
+
return [
|
|
1294
|
+
FlowArtifactInfo(
|
|
1295
|
+
id=art.id,
|
|
1296
|
+
kind=art.kind,
|
|
1297
|
+
path=art.path,
|
|
1298
|
+
created_at=art.created_at,
|
|
1299
|
+
metadata=art.metadata,
|
|
1300
|
+
)
|
|
1301
|
+
for art in artifacts
|
|
1302
|
+
]
|
|
1303
|
+
|
|
1304
|
+
@router.get("/{run_id}/artifact")
|
|
1305
|
+
async def get_flow_artifact(run_id: str, kind: Optional[str] = None):
|
|
1306
|
+
normalized = _normalize_run_id(run_id)
|
|
1307
|
+
repo_root = find_repo_root()
|
|
1308
|
+
record = _get_flow_record(repo_root, normalized)
|
|
1309
|
+
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
1310
|
+
|
|
1311
|
+
artifacts_root = controller.get_artifacts_dir(normalized)
|
|
1312
|
+
if not artifacts_root:
|
|
1313
|
+
from fastapi import HTTPException
|
|
1314
|
+
|
|
1315
|
+
raise HTTPException(
|
|
1316
|
+
status_code=404, detail=f"Artifact directory not found for run {run_id}"
|
|
1317
|
+
)
|
|
1318
|
+
|
|
1319
|
+
artifacts = controller.get_artifacts(normalized)
|
|
1320
|
+
|
|
1321
|
+
if kind:
|
|
1322
|
+
matching = [a for a in artifacts if a.kind == kind]
|
|
1323
|
+
else:
|
|
1324
|
+
matching = artifacts
|
|
1325
|
+
|
|
1326
|
+
if not matching:
|
|
1327
|
+
from fastapi import HTTPException
|
|
1328
|
+
|
|
1329
|
+
raise HTTPException(
|
|
1330
|
+
status_code=404,
|
|
1331
|
+
detail=f"No artifact found for run {run_id} with kind={kind}",
|
|
1332
|
+
)
|
|
1333
|
+
|
|
1334
|
+
artifact = matching[0]
|
|
1335
|
+
artifact_path = Path(artifact.path)
|
|
1336
|
+
|
|
1337
|
+
if not artifact_path.exists():
|
|
1338
|
+
from fastapi import HTTPException
|
|
1339
|
+
|
|
1340
|
+
raise HTTPException(
|
|
1341
|
+
status_code=404, detail=f"Artifact file not found: {artifact.path}"
|
|
1342
|
+
)
|
|
1343
|
+
|
|
1344
|
+
if not artifact_path.resolve().is_relative_to(artifacts_root.resolve()):
|
|
1345
|
+
from fastapi import HTTPException
|
|
1346
|
+
|
|
1347
|
+
raise HTTPException(
|
|
1348
|
+
status_code=403,
|
|
1349
|
+
detail="Access denied: artifact path outside run directory",
|
|
1350
|
+
)
|
|
1351
|
+
|
|
1352
|
+
return FileResponse(artifact_path, filename=artifact_path.name)
|
|
1353
|
+
|
|
1354
|
+
return router
|