codex-autorunner 1.0.0__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/__init__.py +12 -1
- codex_autorunner/agents/codex/harness.py +1 -1
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/constants.py +3 -0
- codex_autorunner/agents/opencode/harness.py +6 -1
- codex_autorunner/agents/opencode/runtime.py +59 -18
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +36 -7
- codex_autorunner/bootstrap.py +226 -4
- codex_autorunner/cli.py +5 -1174
- codex_autorunner/codex_cli.py +20 -84
- codex_autorunner/core/__init__.py +20 -0
- codex_autorunner/core/about_car.py +119 -1
- codex_autorunner/core/app_server_ids.py +59 -0
- codex_autorunner/core/app_server_threads.py +17 -2
- codex_autorunner/core/app_server_utils.py +165 -0
- codex_autorunner/core/archive.py +349 -0
- codex_autorunner/core/codex_runner.py +6 -2
- codex_autorunner/core/config.py +433 -4
- codex_autorunner/core/context_awareness.py +38 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/drafts.py +58 -4
- codex_autorunner/core/exceptions.py +4 -0
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +96 -2
- codex_autorunner/core/flows/models.py +13 -0
- codex_autorunner/core/flows/reasons.py +52 -0
- codex_autorunner/core/flows/reconciler.py +134 -0
- codex_autorunner/core/flows/runtime.py +57 -4
- codex_autorunner/core/flows/store.py +142 -7
- codex_autorunner/core/flows/transition.py +27 -15
- codex_autorunner/core/flows/ux_helpers.py +272 -0
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/git_utils.py +62 -0
- codex_autorunner/core/hub.py +291 -20
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/notifications.py +14 -2
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +496 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/__init__.py +28 -0
- codex_autorunner/{integrations/agents → core/ports}/agent_backend.py +13 -8
- codex_autorunner/core/ports/backend_orchestrator.py +41 -0
- codex_autorunner/{integrations/agents → core/ports}/run_event.py +23 -6
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +62 -0
- codex_autorunner/core/supervisor_protocol.py +15 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/text_delta_coalescer.py +54 -0
- codex_autorunner/core/ticket_linter_cli.py +218 -0
- codex_autorunner/core/ticket_manager_cli.py +494 -0
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/update.py +4 -5
- codex_autorunner/core/update_paths.py +28 -0
- codex_autorunner/core/usage.py +164 -12
- codex_autorunner/core/utils.py +125 -15
- codex_autorunner/flows/review/__init__.py +17 -0
- codex_autorunner/{core/review.py → flows/review/service.py} +37 -34
- codex_autorunner/flows/ticket_flow/definition.py +52 -3
- codex_autorunner/integrations/agents/__init__.py +11 -19
- codex_autorunner/integrations/agents/backend_orchestrator.py +302 -0
- codex_autorunner/integrations/agents/codex_adapter.py +90 -0
- codex_autorunner/integrations/agents/codex_backend.py +177 -25
- codex_autorunner/integrations/agents/opencode_adapter.py +108 -0
- codex_autorunner/integrations/agents/opencode_backend.py +305 -32
- codex_autorunner/integrations/agents/runner.py +86 -0
- codex_autorunner/integrations/agents/wiring.py +279 -0
- codex_autorunner/integrations/app_server/client.py +7 -60
- codex_autorunner/integrations/app_server/env.py +2 -107
- codex_autorunner/{core/app_server_events.py → integrations/app_server/event_buffer.py} +15 -8
- codex_autorunner/integrations/telegram/adapter.py +65 -0
- codex_autorunner/integrations/telegram/config.py +46 -0
- codex_autorunner/integrations/telegram/constants.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/callbacks.py +7 -0
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +1496 -71
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +206 -48
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +20 -3
- codex_autorunner/integrations/telegram/handlers/messages.py +27 -1
- codex_autorunner/integrations/telegram/handlers/selections.py +61 -1
- codex_autorunner/integrations/telegram/helpers.py +22 -1
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +45 -10
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +338 -43
- codex_autorunner/integrations/telegram/transport.py +13 -4
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/routes/__init__.py +37 -76
- codex_autorunner/routes/agents.py +2 -137
- codex_autorunner/routes/analytics.py +2 -238
- codex_autorunner/routes/app_server.py +2 -131
- codex_autorunner/routes/base.py +2 -596
- codex_autorunner/routes/file_chat.py +4 -833
- codex_autorunner/routes/flows.py +4 -977
- codex_autorunner/routes/messages.py +4 -456
- codex_autorunner/routes/repos.py +2 -196
- codex_autorunner/routes/review.py +2 -147
- codex_autorunner/routes/sessions.py +2 -175
- codex_autorunner/routes/settings.py +2 -168
- codex_autorunner/routes/shared.py +2 -275
- codex_autorunner/routes/system.py +4 -193
- codex_autorunner/routes/usage.py +2 -86
- codex_autorunner/routes/voice.py +2 -119
- codex_autorunner/routes/workspace.py +2 -270
- codex_autorunner/server.py +4 -4
- codex_autorunner/static/agentControls.js +61 -16
- codex_autorunner/static/app.js +126 -14
- codex_autorunner/static/archive.js +826 -0
- codex_autorunner/static/archiveApi.js +37 -0
- codex_autorunner/static/autoRefresh.js +7 -7
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/dashboard.js +224 -171
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +114 -131
- codex_autorunner/static/index.html +375 -49
- codex_autorunner/static/messages.js +568 -87
- codex_autorunner/static/notifications.js +255 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/preserve.js +17 -0
- codex_autorunner/static/settings.js +128 -6
- codex_autorunner/static/smartRefresh.js +52 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9798 -6143
- codex_autorunner/static/tabs.js +152 -11
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminal.js +18 -0
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +137 -15
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +821 -98
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +39 -0
- codex_autorunner/static/workspace.js +389 -82
- codex_autorunner/static/workspaceFileBrowser.js +15 -13
- codex_autorunner/surfaces/__init__.py +5 -0
- codex_autorunner/surfaces/cli/__init__.py +6 -0
- codex_autorunner/surfaces/cli/cli.py +2534 -0
- codex_autorunner/surfaces/cli/codex_cli.py +20 -0
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/telegram/__init__.py +3 -0
- codex_autorunner/surfaces/web/__init__.py +1 -0
- codex_autorunner/surfaces/web/app.py +2223 -0
- codex_autorunner/surfaces/web/hub_jobs.py +192 -0
- codex_autorunner/surfaces/web/middleware.py +587 -0
- codex_autorunner/surfaces/web/pty_session.py +370 -0
- codex_autorunner/surfaces/web/review.py +6 -0
- codex_autorunner/surfaces/web/routes/__init__.py +82 -0
- codex_autorunner/surfaces/web/routes/agents.py +138 -0
- codex_autorunner/surfaces/web/routes/analytics.py +284 -0
- codex_autorunner/surfaces/web/routes/app_server.py +132 -0
- codex_autorunner/surfaces/web/routes/archive.py +357 -0
- codex_autorunner/surfaces/web/routes/base.py +615 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +1117 -0
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +1354 -0
- codex_autorunner/surfaces/web/routes/messages.py +490 -0
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +197 -0
- codex_autorunner/surfaces/web/routes/review.py +148 -0
- codex_autorunner/surfaces/web/routes/sessions.py +176 -0
- codex_autorunner/surfaces/web/routes/settings.py +169 -0
- codex_autorunner/surfaces/web/routes/shared.py +277 -0
- codex_autorunner/surfaces/web/routes/system.py +196 -0
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/routes/usage.py +89 -0
- codex_autorunner/surfaces/web/routes/voice.py +120 -0
- codex_autorunner/surfaces/web/routes/workspace.py +271 -0
- codex_autorunner/surfaces/web/runner_manager.py +25 -0
- codex_autorunner/surfaces/web/schemas.py +469 -0
- codex_autorunner/surfaces/web/static_assets.py +490 -0
- codex_autorunner/surfaces/web/static_refresh.py +86 -0
- codex_autorunner/surfaces/web/terminal_sessions.py +78 -0
- codex_autorunner/tickets/__init__.py +8 -1
- codex_autorunner/tickets/agent_pool.py +53 -4
- codex_autorunner/tickets/files.py +37 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +6 -1
- codex_autorunner/tickets/outbox.py +50 -2
- codex_autorunner/tickets/runner.py +396 -57
- codex_autorunner/web/__init__.py +5 -1
- codex_autorunner/web/app.py +2 -1949
- codex_autorunner/web/hub_jobs.py +2 -191
- codex_autorunner/web/middleware.py +2 -586
- codex_autorunner/web/pty_session.py +2 -369
- codex_autorunner/web/runner_manager.py +2 -24
- codex_autorunner/web/schemas.py +2 -376
- codex_autorunner/web/static_assets.py +4 -441
- codex_autorunner/web/static_refresh.py +2 -85
- codex_autorunner/web/terminal_sessions.py +2 -77
- codex_autorunner/workspace/paths.py +49 -33
- codex_autorunner-1.2.0.dist-info/METADATA +150 -0
- codex_autorunner-1.2.0.dist-info/RECORD +339 -0
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -2653
- codex_autorunner/core/static_assets.py +0 -55
- codex_autorunner-1.0.0.dist-info/METADATA +0 -246
- codex_autorunner-1.0.0.dist-info/RECORD +0 -251
- /codex_autorunner/{routes → surfaces/web/routes}/terminal_images.py +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/top_level.txt +0 -0
codex_autorunner/routes/flows.py
CHANGED
|
@@ -1,980 +1,7 @@
|
|
|
1
|
-
|
|
2
|
-
import logging
|
|
3
|
-
import os
|
|
4
|
-
import re
|
|
5
|
-
import shutil
|
|
6
|
-
import subprocess
|
|
7
|
-
import uuid
|
|
8
|
-
from dataclasses import asdict
|
|
9
|
-
from pathlib import Path, PurePosixPath
|
|
10
|
-
from typing import IO, Dict, Optional, Tuple, Union
|
|
11
|
-
from urllib.parse import quote
|
|
1
|
+
"""Backward-compatible flow routes."""
|
|
12
2
|
|
|
13
|
-
|
|
14
|
-
from fastapi.responses import FileResponse, StreamingResponse
|
|
15
|
-
from pydantic import BaseModel, Field
|
|
3
|
+
import sys
|
|
16
4
|
|
|
17
|
-
from ..
|
|
18
|
-
from ..core.flows import (
|
|
19
|
-
FlowController,
|
|
20
|
-
FlowDefinition,
|
|
21
|
-
FlowRunRecord,
|
|
22
|
-
FlowRunStatus,
|
|
23
|
-
FlowStore,
|
|
24
|
-
)
|
|
25
|
-
from ..core.flows.store import UNSET
|
|
26
|
-
from ..core.flows.transition import resolve_flow_transition
|
|
27
|
-
from ..core.flows.worker_process import (
|
|
28
|
-
FlowWorkerHealth,
|
|
29
|
-
check_worker_health,
|
|
30
|
-
clear_worker_metadata,
|
|
31
|
-
spawn_flow_worker,
|
|
32
|
-
)
|
|
33
|
-
from ..core.utils import atomic_write, find_repo_root
|
|
34
|
-
from ..flows.ticket_flow import build_ticket_flow_definition
|
|
35
|
-
from ..tickets import AgentPool
|
|
36
|
-
from ..tickets.files import (
|
|
37
|
-
list_ticket_paths,
|
|
38
|
-
parse_ticket_index,
|
|
39
|
-
read_ticket,
|
|
40
|
-
safe_relpath,
|
|
41
|
-
)
|
|
42
|
-
from ..tickets.frontmatter import parse_markdown_frontmatter
|
|
43
|
-
from ..tickets.lint import lint_ticket_frontmatter
|
|
44
|
-
from ..tickets.outbox import parse_dispatch, resolve_outbox_paths
|
|
45
|
-
from ..web.schemas import (
|
|
46
|
-
TicketCreateRequest,
|
|
47
|
-
TicketDeleteResponse,
|
|
48
|
-
TicketResponse,
|
|
49
|
-
TicketUpdateRequest,
|
|
50
|
-
)
|
|
5
|
+
from ..surfaces.web.routes import flows as _flows
|
|
51
6
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
_active_workers: Dict[
|
|
55
|
-
str, Tuple[Optional[subprocess.Popen], Optional[IO[bytes]], Optional[IO[bytes]]]
|
|
56
|
-
] = {}
|
|
57
|
-
_controller_cache: Dict[tuple[Path, str], FlowController] = {}
|
|
58
|
-
_definition_cache: Dict[tuple[Path, str], FlowDefinition] = {}
|
|
59
|
-
_supported_flow_types = ("ticket_flow",)
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
def _flow_paths(repo_root: Path) -> tuple[Path, Path]:
|
|
63
|
-
repo_root = repo_root.resolve()
|
|
64
|
-
db_path = repo_root / ".codex-autorunner" / "flows.db"
|
|
65
|
-
artifacts_root = repo_root / ".codex-autorunner" / "flows"
|
|
66
|
-
return db_path, artifacts_root
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
def _require_flow_store(repo_root: Path) -> Optional[FlowStore]:
|
|
70
|
-
db_path, _ = _flow_paths(repo_root)
|
|
71
|
-
store = FlowStore(db_path)
|
|
72
|
-
try:
|
|
73
|
-
store.initialize()
|
|
74
|
-
return store
|
|
75
|
-
except Exception as exc:
|
|
76
|
-
_logger.warning("Flows database unavailable at %s: %s", db_path, exc)
|
|
77
|
-
return None
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def _safe_list_flow_runs(
|
|
81
|
-
repo_root: Path, flow_type: Optional[str] = None, *, recover_stuck: bool = False
|
|
82
|
-
) -> list[FlowRunRecord]:
|
|
83
|
-
db_path, _ = _flow_paths(repo_root)
|
|
84
|
-
store = FlowStore(db_path)
|
|
85
|
-
try:
|
|
86
|
-
store.initialize()
|
|
87
|
-
records = store.list_flow_runs(flow_type=flow_type)
|
|
88
|
-
if recover_stuck:
|
|
89
|
-
# Recover any flows stuck in active states with dead workers
|
|
90
|
-
records = [
|
|
91
|
-
_maybe_recover_stuck_flow(repo_root, rec, store) for rec in records
|
|
92
|
-
]
|
|
93
|
-
return records
|
|
94
|
-
except Exception as exc:
|
|
95
|
-
_logger.debug("FlowStore list runs failed: %s", exc)
|
|
96
|
-
return []
|
|
97
|
-
finally:
|
|
98
|
-
try:
|
|
99
|
-
store.close()
|
|
100
|
-
except Exception:
|
|
101
|
-
pass
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
def _build_flow_definition(repo_root: Path, flow_type: str) -> FlowDefinition:
|
|
105
|
-
repo_root = repo_root.resolve()
|
|
106
|
-
key = (repo_root, flow_type)
|
|
107
|
-
if key in _definition_cache:
|
|
108
|
-
return _definition_cache[key]
|
|
109
|
-
|
|
110
|
-
if flow_type == "ticket_flow":
|
|
111
|
-
engine = Engine(repo_root)
|
|
112
|
-
agent_pool = AgentPool(engine.config)
|
|
113
|
-
definition = build_ticket_flow_definition(agent_pool=agent_pool)
|
|
114
|
-
else:
|
|
115
|
-
raise HTTPException(status_code=404, detail=f"Unknown flow type: {flow_type}")
|
|
116
|
-
|
|
117
|
-
definition.validate()
|
|
118
|
-
_definition_cache[key] = definition
|
|
119
|
-
return definition
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
def _get_flow_controller(repo_root: Path, flow_type: str) -> FlowController:
|
|
123
|
-
repo_root = repo_root.resolve()
|
|
124
|
-
key = (repo_root, flow_type)
|
|
125
|
-
if key in _controller_cache:
|
|
126
|
-
return _controller_cache[key]
|
|
127
|
-
|
|
128
|
-
db_path, artifacts_root = _flow_paths(repo_root)
|
|
129
|
-
definition = _build_flow_definition(repo_root, flow_type)
|
|
130
|
-
|
|
131
|
-
controller = FlowController(
|
|
132
|
-
definition=definition,
|
|
133
|
-
db_path=db_path,
|
|
134
|
-
artifacts_root=artifacts_root,
|
|
135
|
-
)
|
|
136
|
-
try:
|
|
137
|
-
controller.initialize()
|
|
138
|
-
except Exception as exc:
|
|
139
|
-
_logger.warning("Failed to initialize flow controller: %s", exc)
|
|
140
|
-
raise HTTPException(
|
|
141
|
-
status_code=503, detail="Flows unavailable; initialize the repo first."
|
|
142
|
-
) from exc
|
|
143
|
-
_controller_cache[key] = controller
|
|
144
|
-
return controller
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
def _get_flow_record(repo_root: Path, run_id: str) -> FlowRunRecord:
|
|
148
|
-
store = _require_flow_store(repo_root)
|
|
149
|
-
if store is None:
|
|
150
|
-
raise HTTPException(status_code=503, detail="Flows database unavailable")
|
|
151
|
-
try:
|
|
152
|
-
record = store.get_flow_run(run_id)
|
|
153
|
-
finally:
|
|
154
|
-
try:
|
|
155
|
-
store.close()
|
|
156
|
-
except Exception:
|
|
157
|
-
pass
|
|
158
|
-
if not record:
|
|
159
|
-
raise HTTPException(status_code=404, detail=f"Flow run {run_id} not found")
|
|
160
|
-
return record
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
def _active_or_paused_run(records: list[FlowRunRecord]) -> Optional[FlowRunRecord]:
|
|
164
|
-
if not records:
|
|
165
|
-
return None
|
|
166
|
-
latest = records[0]
|
|
167
|
-
if latest.status in (FlowRunStatus.RUNNING, FlowRunStatus.PAUSED):
|
|
168
|
-
return latest
|
|
169
|
-
return None
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
def _normalize_run_id(run_id: Union[str, uuid.UUID]) -> str:
|
|
173
|
-
try:
|
|
174
|
-
return str(uuid.UUID(str(run_id)))
|
|
175
|
-
except ValueError:
|
|
176
|
-
raise HTTPException(status_code=400, detail="Invalid run_id") from None
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
def _cleanup_worker_handle(run_id: str) -> None:
|
|
180
|
-
handle = _active_workers.pop(run_id, None)
|
|
181
|
-
if not handle:
|
|
182
|
-
return
|
|
183
|
-
|
|
184
|
-
proc, stdout, stderr = handle
|
|
185
|
-
if proc and proc.poll() is None:
|
|
186
|
-
try:
|
|
187
|
-
proc.terminate()
|
|
188
|
-
except Exception:
|
|
189
|
-
pass
|
|
190
|
-
|
|
191
|
-
for stream in (stdout, stderr):
|
|
192
|
-
if stream and not stream.closed:
|
|
193
|
-
try:
|
|
194
|
-
stream.flush()
|
|
195
|
-
except Exception:
|
|
196
|
-
pass
|
|
197
|
-
try:
|
|
198
|
-
stream.close()
|
|
199
|
-
except Exception:
|
|
200
|
-
pass
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
def _reap_dead_worker(run_id: str) -> None:
|
|
204
|
-
handle = _active_workers.get(run_id)
|
|
205
|
-
if not handle:
|
|
206
|
-
return
|
|
207
|
-
proc, *_ = handle
|
|
208
|
-
if proc and proc.poll() is not None:
|
|
209
|
-
_cleanup_worker_handle(run_id)
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
def _ensure_worker_not_stale(health: FlowWorkerHealth) -> None:
|
|
213
|
-
# Clear metadata if stale to allow clean respawn.
|
|
214
|
-
if health.status in {"dead", "mismatch", "invalid"}:
|
|
215
|
-
try:
|
|
216
|
-
clear_worker_metadata(health.artifact_path.parent)
|
|
217
|
-
except Exception:
|
|
218
|
-
_logger.debug("Failed to clear worker metadata: %s", health.artifact_path)
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
def _maybe_recover_stuck_flow(
|
|
222
|
-
repo_root: Path, record: FlowRunRecord, store: FlowStore
|
|
223
|
-
) -> FlowRunRecord:
|
|
224
|
-
"""
|
|
225
|
-
Reconcile persisted flow state with worker health and inner ticket_engine status.
|
|
226
|
-
|
|
227
|
-
Delegates decision logic to a pure transition resolver to keep recovery predictable
|
|
228
|
-
and exhaustively testable.
|
|
229
|
-
"""
|
|
230
|
-
if record.status not in (
|
|
231
|
-
FlowRunStatus.RUNNING,
|
|
232
|
-
FlowRunStatus.STOPPING,
|
|
233
|
-
FlowRunStatus.PAUSED,
|
|
234
|
-
):
|
|
235
|
-
return record
|
|
236
|
-
|
|
237
|
-
health = check_worker_health(repo_root, record.id)
|
|
238
|
-
|
|
239
|
-
decision = resolve_flow_transition(record, health)
|
|
240
|
-
|
|
241
|
-
if (
|
|
242
|
-
decision.status == record.status
|
|
243
|
-
and decision.finished_at == record.finished_at
|
|
244
|
-
and decision.state == (record.state or {})
|
|
245
|
-
):
|
|
246
|
-
return record
|
|
247
|
-
|
|
248
|
-
_logger.info(
|
|
249
|
-
"Recovering flow %s: %s -> %s (%s)",
|
|
250
|
-
record.id,
|
|
251
|
-
record.status.value,
|
|
252
|
-
decision.status.value,
|
|
253
|
-
decision.note or "reconcile",
|
|
254
|
-
)
|
|
255
|
-
|
|
256
|
-
updated = store.update_flow_run_status(
|
|
257
|
-
run_id=record.id,
|
|
258
|
-
status=decision.status,
|
|
259
|
-
state=decision.state,
|
|
260
|
-
finished_at=decision.finished_at if decision.finished_at else UNSET,
|
|
261
|
-
)
|
|
262
|
-
_ensure_worker_not_stale(health)
|
|
263
|
-
return updated or record
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
class FlowStartRequest(BaseModel):
|
|
267
|
-
input_data: Dict = Field(default_factory=dict)
|
|
268
|
-
metadata: Optional[Dict] = None
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
class FlowStatusResponse(BaseModel):
|
|
272
|
-
id: str
|
|
273
|
-
flow_type: str
|
|
274
|
-
status: str
|
|
275
|
-
current_step: Optional[str]
|
|
276
|
-
created_at: str
|
|
277
|
-
started_at: Optional[str]
|
|
278
|
-
finished_at: Optional[str]
|
|
279
|
-
error_message: Optional[str]
|
|
280
|
-
state: Dict = Field(default_factory=dict)
|
|
281
|
-
|
|
282
|
-
@classmethod
|
|
283
|
-
def from_record(cls, record: FlowRunRecord) -> "FlowStatusResponse":
|
|
284
|
-
return cls(
|
|
285
|
-
id=record.id,
|
|
286
|
-
flow_type=record.flow_type,
|
|
287
|
-
status=record.status.value,
|
|
288
|
-
current_step=record.current_step,
|
|
289
|
-
created_at=record.created_at,
|
|
290
|
-
started_at=record.started_at,
|
|
291
|
-
finished_at=record.finished_at,
|
|
292
|
-
error_message=record.error_message,
|
|
293
|
-
state=record.state,
|
|
294
|
-
)
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
class FlowArtifactInfo(BaseModel):
|
|
298
|
-
id: str
|
|
299
|
-
kind: str
|
|
300
|
-
path: str
|
|
301
|
-
created_at: str
|
|
302
|
-
metadata: Dict = Field(default_factory=dict)
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
def _start_flow_worker(repo_root: Path, run_id: str) -> Optional[subprocess.Popen]:
|
|
306
|
-
normalized_run_id = _normalize_run_id(run_id)
|
|
307
|
-
|
|
308
|
-
health = check_worker_health(repo_root, normalized_run_id)
|
|
309
|
-
_ensure_worker_not_stale(health)
|
|
310
|
-
if health.is_alive:
|
|
311
|
-
_logger.info(
|
|
312
|
-
"Worker already active for run %s (pid=%s), skipping spawn",
|
|
313
|
-
normalized_run_id,
|
|
314
|
-
health.pid,
|
|
315
|
-
)
|
|
316
|
-
return None
|
|
317
|
-
|
|
318
|
-
_reap_dead_worker(normalized_run_id)
|
|
319
|
-
|
|
320
|
-
proc, stdout_handle, stderr_handle = spawn_flow_worker(repo_root, normalized_run_id)
|
|
321
|
-
_active_workers[normalized_run_id] = (proc, stdout_handle, stderr_handle)
|
|
322
|
-
_logger.info("Started flow worker for run %s (pid=%d)", normalized_run_id, proc.pid)
|
|
323
|
-
return proc
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
def _stop_worker(run_id: str, timeout: float = 10.0) -> None:
|
|
327
|
-
normalized_run_id = _normalize_run_id(run_id)
|
|
328
|
-
handle = _active_workers.get(normalized_run_id)
|
|
329
|
-
if not handle:
|
|
330
|
-
health = check_worker_health(find_repo_root(), normalized_run_id)
|
|
331
|
-
if health.is_alive and health.pid:
|
|
332
|
-
try:
|
|
333
|
-
_logger.info(
|
|
334
|
-
"Stopping untracked worker for run %s (pid=%s)",
|
|
335
|
-
normalized_run_id,
|
|
336
|
-
health.pid,
|
|
337
|
-
)
|
|
338
|
-
subprocess.run(["kill", str(health.pid)], check=False)
|
|
339
|
-
except Exception as exc:
|
|
340
|
-
_logger.warning(
|
|
341
|
-
"Failed to stop untracked worker %s: %s", normalized_run_id, exc
|
|
342
|
-
)
|
|
343
|
-
return
|
|
344
|
-
|
|
345
|
-
proc, *_ = handle
|
|
346
|
-
if proc and proc.poll() is None:
|
|
347
|
-
proc.terminate()
|
|
348
|
-
try:
|
|
349
|
-
proc.wait(timeout=timeout)
|
|
350
|
-
except subprocess.TimeoutExpired:
|
|
351
|
-
_logger.warning(
|
|
352
|
-
"Worker for run %s did not exit in time, killing", normalized_run_id
|
|
353
|
-
)
|
|
354
|
-
proc.kill()
|
|
355
|
-
except Exception as exc:
|
|
356
|
-
_logger.warning("Error stopping worker %s: %s", normalized_run_id, exc)
|
|
357
|
-
|
|
358
|
-
_cleanup_worker_handle(normalized_run_id)
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
def build_flow_routes() -> APIRouter:
|
|
362
|
-
router = APIRouter(prefix="/api/flows", tags=["flows"])
|
|
363
|
-
|
|
364
|
-
def _definition_info(definition: FlowDefinition) -> Dict:
|
|
365
|
-
return {
|
|
366
|
-
"type": definition.flow_type,
|
|
367
|
-
"name": definition.name,
|
|
368
|
-
"description": definition.description,
|
|
369
|
-
"input_schema": definition.input_schema or {},
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
def _resolve_outbox_for_record(record: FlowRunRecord, repo_root: Path):
|
|
373
|
-
workspace_root = Path(record.input_data.get("workspace_root") or repo_root)
|
|
374
|
-
runs_dir = Path(record.input_data.get("runs_dir") or ".codex-autorunner/runs")
|
|
375
|
-
return resolve_outbox_paths(
|
|
376
|
-
workspace_root=workspace_root, runs_dir=runs_dir, run_id=record.id
|
|
377
|
-
)
|
|
378
|
-
|
|
379
|
-
@router.get("")
|
|
380
|
-
async def list_flow_definitions():
|
|
381
|
-
repo_root = find_repo_root()
|
|
382
|
-
definitions = [
|
|
383
|
-
_definition_info(_build_flow_definition(repo_root, flow_type))
|
|
384
|
-
for flow_type in _supported_flow_types
|
|
385
|
-
]
|
|
386
|
-
return {"definitions": definitions}
|
|
387
|
-
|
|
388
|
-
@router.get("/runs", response_model=list[FlowStatusResponse])
|
|
389
|
-
async def list_runs(flow_type: Optional[str] = None):
|
|
390
|
-
repo_root = find_repo_root()
|
|
391
|
-
records = _safe_list_flow_runs(
|
|
392
|
-
repo_root, flow_type=flow_type, recover_stuck=True
|
|
393
|
-
)
|
|
394
|
-
return [FlowStatusResponse.from_record(rec) for rec in records]
|
|
395
|
-
|
|
396
|
-
@router.get("/{flow_type}")
|
|
397
|
-
async def get_flow_definition(flow_type: str):
|
|
398
|
-
repo_root = find_repo_root()
|
|
399
|
-
if flow_type not in _supported_flow_types:
|
|
400
|
-
raise HTTPException(
|
|
401
|
-
status_code=404, detail=f"Unknown flow type: {flow_type}"
|
|
402
|
-
)
|
|
403
|
-
definition = _build_flow_definition(repo_root, flow_type)
|
|
404
|
-
return _definition_info(definition)
|
|
405
|
-
|
|
406
|
-
async def _start_flow(
|
|
407
|
-
flow_type: str, request: FlowStartRequest, *, force_new: bool = False
|
|
408
|
-
) -> FlowStatusResponse:
|
|
409
|
-
if flow_type not in _supported_flow_types:
|
|
410
|
-
raise HTTPException(
|
|
411
|
-
status_code=404, detail=f"Unknown flow type: {flow_type}"
|
|
412
|
-
)
|
|
413
|
-
|
|
414
|
-
repo_root = find_repo_root()
|
|
415
|
-
controller = _get_flow_controller(repo_root, flow_type)
|
|
416
|
-
|
|
417
|
-
# Reuse an active/paused run unless force_new is requested.
|
|
418
|
-
if not force_new:
|
|
419
|
-
runs = _safe_list_flow_runs(
|
|
420
|
-
repo_root, flow_type=flow_type, recover_stuck=True
|
|
421
|
-
)
|
|
422
|
-
active = _active_or_paused_run(runs)
|
|
423
|
-
if active:
|
|
424
|
-
_reap_dead_worker(active.id)
|
|
425
|
-
_start_flow_worker(repo_root, active.id)
|
|
426
|
-
response = FlowStatusResponse.from_record(active)
|
|
427
|
-
response.state = response.state or {}
|
|
428
|
-
response.state["hint"] = "active_run_reused"
|
|
429
|
-
return response
|
|
430
|
-
|
|
431
|
-
run_id = _normalize_run_id(uuid.uuid4())
|
|
432
|
-
|
|
433
|
-
record = await controller.start_flow(
|
|
434
|
-
input_data=request.input_data,
|
|
435
|
-
run_id=run_id,
|
|
436
|
-
metadata=request.metadata,
|
|
437
|
-
)
|
|
438
|
-
|
|
439
|
-
_start_flow_worker(repo_root, run_id)
|
|
440
|
-
|
|
441
|
-
return FlowStatusResponse.from_record(record)
|
|
442
|
-
|
|
443
|
-
@router.post("/{flow_type}/start", response_model=FlowStatusResponse)
|
|
444
|
-
async def start_flow(flow_type: str, request: FlowStartRequest):
|
|
445
|
-
meta = request.metadata if isinstance(request.metadata, dict) else {}
|
|
446
|
-
force_new = bool(meta.get("force_new"))
|
|
447
|
-
return await _start_flow(flow_type, request, force_new=force_new)
|
|
448
|
-
|
|
449
|
-
@router.post("/ticket_flow/bootstrap", response_model=FlowStatusResponse)
|
|
450
|
-
async def bootstrap_ticket_flow(request: Optional[FlowStartRequest] = None):
|
|
451
|
-
repo_root = find_repo_root()
|
|
452
|
-
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
453
|
-
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
454
|
-
ticket_path = ticket_dir / "TICKET-001.md"
|
|
455
|
-
flow_request = request or FlowStartRequest()
|
|
456
|
-
meta = flow_request.metadata if isinstance(flow_request.metadata, dict) else {}
|
|
457
|
-
force_new = bool(meta.get("force_new"))
|
|
458
|
-
|
|
459
|
-
if not force_new:
|
|
460
|
-
records = _safe_list_flow_runs(
|
|
461
|
-
repo_root, flow_type="ticket_flow", recover_stuck=True
|
|
462
|
-
)
|
|
463
|
-
active = _active_or_paused_run(records)
|
|
464
|
-
if active:
|
|
465
|
-
_reap_dead_worker(active.id)
|
|
466
|
-
_start_flow_worker(repo_root, active.id)
|
|
467
|
-
resp = FlowStatusResponse.from_record(active)
|
|
468
|
-
resp.state = resp.state or {}
|
|
469
|
-
resp.state["hint"] = "active_run_reused"
|
|
470
|
-
return resp
|
|
471
|
-
|
|
472
|
-
seeded = False
|
|
473
|
-
if not ticket_path.exists():
|
|
474
|
-
template = """---
|
|
475
|
-
agent: codex
|
|
476
|
-
done: false
|
|
477
|
-
title: Bootstrap ticket plan
|
|
478
|
-
goal: Capture scope and seed follow-up tickets
|
|
479
|
-
---
|
|
480
|
-
|
|
481
|
-
You are the first ticket in a new ticket_flow run.
|
|
482
|
-
|
|
483
|
-
- Read `.codex-autorunner/ISSUE.md` (or ask for the issue/PR URL if missing).
|
|
484
|
-
- If helpful, create or update workspace docs under `.codex-autorunner/workspace/`:
|
|
485
|
-
- `active_context.md` for current context and links
|
|
486
|
-
- `decisions.md` for decisions/rationale
|
|
487
|
-
- `spec.md` for requirements and constraints
|
|
488
|
-
- Break the work into additional `TICKET-00X.md` files with clear owners/goals; keep this ticket open until they exist.
|
|
489
|
-
- Place any supporting artifacts in `.codex-autorunner/runs/<run_id>/dispatch/` if needed.
|
|
490
|
-
- Write `DISPATCH.md` to dispatch a message to the user:
|
|
491
|
-
- Use `mode: pause` (handoff) to wait for user response. This pauses execution.
|
|
492
|
-
- Use `mode: notify` (informational) to message the user but keep running.
|
|
493
|
-
"""
|
|
494
|
-
ticket_path.write_text(template, encoding="utf-8")
|
|
495
|
-
seeded = True
|
|
496
|
-
|
|
497
|
-
meta = flow_request.metadata if isinstance(flow_request.metadata, dict) else {}
|
|
498
|
-
payload = FlowStartRequest(
|
|
499
|
-
input_data=flow_request.input_data,
|
|
500
|
-
metadata=meta | {"seeded_ticket": seeded},
|
|
501
|
-
)
|
|
502
|
-
return await _start_flow("ticket_flow", payload, force_new=force_new)
|
|
503
|
-
|
|
504
|
-
@router.get("/ticket_flow/tickets")
|
|
505
|
-
async def list_ticket_files():
|
|
506
|
-
repo_root = find_repo_root()
|
|
507
|
-
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
508
|
-
tickets = []
|
|
509
|
-
for path in list_ticket_paths(ticket_dir):
|
|
510
|
-
doc, errors = read_ticket(path)
|
|
511
|
-
rel_path = safe_relpath(path, repo_root)
|
|
512
|
-
tickets.append(
|
|
513
|
-
{
|
|
514
|
-
"path": rel_path,
|
|
515
|
-
"index": getattr(doc, "index", None),
|
|
516
|
-
"frontmatter": asdict(doc.frontmatter) if doc else None,
|
|
517
|
-
"body": doc.body if doc else None,
|
|
518
|
-
"errors": errors,
|
|
519
|
-
}
|
|
520
|
-
)
|
|
521
|
-
return {
|
|
522
|
-
"ticket_dir": safe_relpath(ticket_dir, repo_root),
|
|
523
|
-
"tickets": tickets,
|
|
524
|
-
}
|
|
525
|
-
|
|
526
|
-
@router.post("/ticket_flow/tickets", response_model=TicketResponse)
|
|
527
|
-
async def create_ticket(request: TicketCreateRequest):
|
|
528
|
-
"""Create a new ticket with auto-generated index."""
|
|
529
|
-
repo_root = find_repo_root()
|
|
530
|
-
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
531
|
-
ticket_dir.mkdir(parents=True, exist_ok=True)
|
|
532
|
-
|
|
533
|
-
# Find next available index
|
|
534
|
-
existing_paths = list_ticket_paths(ticket_dir)
|
|
535
|
-
existing_indices = set()
|
|
536
|
-
for p in existing_paths:
|
|
537
|
-
idx = parse_ticket_index(p.name)
|
|
538
|
-
if idx is not None:
|
|
539
|
-
existing_indices.add(idx)
|
|
540
|
-
|
|
541
|
-
next_index = 1
|
|
542
|
-
while next_index in existing_indices:
|
|
543
|
-
next_index += 1
|
|
544
|
-
|
|
545
|
-
# Build frontmatter
|
|
546
|
-
title_line = f"title: {request.title}\n" if request.title else ""
|
|
547
|
-
goal_line = f"goal: {request.goal}\n" if request.goal else ""
|
|
548
|
-
|
|
549
|
-
content = (
|
|
550
|
-
"---\n"
|
|
551
|
-
f"agent: {request.agent}\n"
|
|
552
|
-
"done: false\n"
|
|
553
|
-
f"{title_line}"
|
|
554
|
-
f"{goal_line}"
|
|
555
|
-
"---\n\n"
|
|
556
|
-
f"{request.body}\n"
|
|
557
|
-
)
|
|
558
|
-
|
|
559
|
-
ticket_path = ticket_dir / f"TICKET-{next_index:03d}.md"
|
|
560
|
-
atomic_write(ticket_path, content)
|
|
561
|
-
|
|
562
|
-
# Read back to validate and return
|
|
563
|
-
doc, errors = read_ticket(ticket_path)
|
|
564
|
-
if errors or not doc:
|
|
565
|
-
raise HTTPException(
|
|
566
|
-
status_code=400, detail=f"Failed to create valid ticket: {errors}"
|
|
567
|
-
)
|
|
568
|
-
|
|
569
|
-
return TicketResponse(
|
|
570
|
-
path=safe_relpath(ticket_path, repo_root),
|
|
571
|
-
index=doc.index,
|
|
572
|
-
frontmatter=asdict(doc.frontmatter),
|
|
573
|
-
body=doc.body,
|
|
574
|
-
)
|
|
575
|
-
|
|
576
|
-
@router.put("/ticket_flow/tickets/{index}", response_model=TicketResponse)
|
|
577
|
-
async def update_ticket(index: int, request: TicketUpdateRequest):
|
|
578
|
-
"""Update an existing ticket by index."""
|
|
579
|
-
repo_root = find_repo_root()
|
|
580
|
-
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
581
|
-
ticket_path = ticket_dir / f"TICKET-{index:03d}.md"
|
|
582
|
-
|
|
583
|
-
if not ticket_path.exists():
|
|
584
|
-
raise HTTPException(
|
|
585
|
-
status_code=404, detail=f"Ticket TICKET-{index:03d}.md not found"
|
|
586
|
-
)
|
|
587
|
-
|
|
588
|
-
# Validate frontmatter before saving
|
|
589
|
-
data, body = parse_markdown_frontmatter(request.content)
|
|
590
|
-
_, errors = lint_ticket_frontmatter(data)
|
|
591
|
-
if errors:
|
|
592
|
-
raise HTTPException(
|
|
593
|
-
status_code=400,
|
|
594
|
-
detail={"message": "Invalid ticket frontmatter", "errors": errors},
|
|
595
|
-
)
|
|
596
|
-
|
|
597
|
-
atomic_write(ticket_path, request.content)
|
|
598
|
-
|
|
599
|
-
# Read back to return validated data
|
|
600
|
-
doc, read_errors = read_ticket(ticket_path)
|
|
601
|
-
if read_errors or not doc:
|
|
602
|
-
raise HTTPException(
|
|
603
|
-
status_code=400, detail=f"Failed to save valid ticket: {read_errors}"
|
|
604
|
-
)
|
|
605
|
-
|
|
606
|
-
return TicketResponse(
|
|
607
|
-
path=safe_relpath(ticket_path, repo_root),
|
|
608
|
-
index=doc.index,
|
|
609
|
-
frontmatter=asdict(doc.frontmatter),
|
|
610
|
-
body=doc.body,
|
|
611
|
-
)
|
|
612
|
-
|
|
613
|
-
@router.delete("/ticket_flow/tickets/{index}", response_model=TicketDeleteResponse)
|
|
614
|
-
async def delete_ticket(index: int):
|
|
615
|
-
"""Delete a ticket by index."""
|
|
616
|
-
repo_root = find_repo_root()
|
|
617
|
-
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
618
|
-
ticket_path = ticket_dir / f"TICKET-{index:03d}.md"
|
|
619
|
-
|
|
620
|
-
if not ticket_path.exists():
|
|
621
|
-
raise HTTPException(
|
|
622
|
-
status_code=404, detail=f"Ticket TICKET-{index:03d}.md not found"
|
|
623
|
-
)
|
|
624
|
-
|
|
625
|
-
rel_path = safe_relpath(ticket_path, repo_root)
|
|
626
|
-
ticket_path.unlink()
|
|
627
|
-
|
|
628
|
-
return TicketDeleteResponse(
|
|
629
|
-
status="deleted",
|
|
630
|
-
index=index,
|
|
631
|
-
path=rel_path,
|
|
632
|
-
)
|
|
633
|
-
|
|
634
|
-
@router.post("/{run_id}/stop", response_model=FlowStatusResponse)
|
|
635
|
-
async def stop_flow(run_id: uuid.UUID):
|
|
636
|
-
run_id = _normalize_run_id(run_id)
|
|
637
|
-
repo_root = find_repo_root()
|
|
638
|
-
record = _get_flow_record(repo_root, run_id)
|
|
639
|
-
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
640
|
-
|
|
641
|
-
_stop_worker(run_id)
|
|
642
|
-
|
|
643
|
-
updated = await controller.stop_flow(run_id)
|
|
644
|
-
return FlowStatusResponse.from_record(updated)
|
|
645
|
-
|
|
646
|
-
@router.post("/{run_id}/resume", response_model=FlowStatusResponse)
|
|
647
|
-
async def resume_flow(run_id: uuid.UUID):
|
|
648
|
-
run_id = _normalize_run_id(run_id)
|
|
649
|
-
repo_root = find_repo_root()
|
|
650
|
-
record = _get_flow_record(repo_root, run_id)
|
|
651
|
-
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
652
|
-
|
|
653
|
-
updated = await controller.resume_flow(run_id)
|
|
654
|
-
_reap_dead_worker(run_id)
|
|
655
|
-
_start_flow_worker(repo_root, run_id)
|
|
656
|
-
|
|
657
|
-
return FlowStatusResponse.from_record(updated)
|
|
658
|
-
|
|
659
|
-
@router.post("/{run_id}/archive")
|
|
660
|
-
async def archive_flow(
|
|
661
|
-
run_id: uuid.UUID, delete_run: bool = True, force: bool = False
|
|
662
|
-
):
|
|
663
|
-
"""Archive a completed flow by moving tickets to the run's artifact directory.
|
|
664
|
-
|
|
665
|
-
Args:
|
|
666
|
-
run_id: The flow run to archive.
|
|
667
|
-
delete_run: Whether to delete the run record after archiving.
|
|
668
|
-
force: If True, allow archiving flows stuck in stopping/paused state
|
|
669
|
-
by force-stopping the worker first.
|
|
670
|
-
"""
|
|
671
|
-
run_id = _normalize_run_id(run_id)
|
|
672
|
-
repo_root = find_repo_root()
|
|
673
|
-
record = _get_flow_record(repo_root, run_id)
|
|
674
|
-
|
|
675
|
-
# Allow archiving terminal flows, or force-archiving stuck flows
|
|
676
|
-
if not FlowRunStatus(record.status).is_terminal():
|
|
677
|
-
if force and record.status in (
|
|
678
|
-
FlowRunStatus.STOPPING,
|
|
679
|
-
FlowRunStatus.PAUSED,
|
|
680
|
-
):
|
|
681
|
-
# Force-stop any remaining worker before archiving
|
|
682
|
-
_stop_worker(run_id, timeout=2.0)
|
|
683
|
-
_logger.info(
|
|
684
|
-
"Force-archiving flow %s in %s state", run_id, record.status.value
|
|
685
|
-
)
|
|
686
|
-
else:
|
|
687
|
-
raise HTTPException(
|
|
688
|
-
status_code=400,
|
|
689
|
-
detail="Can only archive completed/stopped/failed flows (use force=true for stuck flows)",
|
|
690
|
-
)
|
|
691
|
-
|
|
692
|
-
# Move tickets to run artifacts directory
|
|
693
|
-
_, artifacts_root = _flow_paths(repo_root)
|
|
694
|
-
archive_dir = artifacts_root / run_id / "archived_tickets"
|
|
695
|
-
archive_dir.mkdir(parents=True, exist_ok=True)
|
|
696
|
-
|
|
697
|
-
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
698
|
-
archived_count = 0
|
|
699
|
-
for ticket_path in list_ticket_paths(ticket_dir):
|
|
700
|
-
dest = archive_dir / ticket_path.name
|
|
701
|
-
shutil.move(str(ticket_path), str(dest))
|
|
702
|
-
archived_count += 1
|
|
703
|
-
|
|
704
|
-
# Archive runs directory (dispatch_history, reply_history, etc.) to dismiss notifications
|
|
705
|
-
outbox_paths = _resolve_outbox_for_record(record, repo_root)
|
|
706
|
-
run_dir = outbox_paths.run_dir
|
|
707
|
-
if run_dir.exists() and run_dir.is_dir():
|
|
708
|
-
archived_runs_dir = artifacts_root / run_id / "archived_runs"
|
|
709
|
-
shutil.move(str(run_dir), str(archived_runs_dir))
|
|
710
|
-
|
|
711
|
-
# Delete run record if requested
|
|
712
|
-
if delete_run:
|
|
713
|
-
store = _require_flow_store(repo_root)
|
|
714
|
-
if store:
|
|
715
|
-
store.delete_flow_run(run_id)
|
|
716
|
-
store.close()
|
|
717
|
-
|
|
718
|
-
return {
|
|
719
|
-
"status": "archived",
|
|
720
|
-
"run_id": run_id,
|
|
721
|
-
"tickets_archived": archived_count,
|
|
722
|
-
}
|
|
723
|
-
|
|
724
|
-
@router.get("/{run_id}/status", response_model=FlowStatusResponse)
|
|
725
|
-
async def get_flow_status(run_id: uuid.UUID):
|
|
726
|
-
run_id = _normalize_run_id(run_id)
|
|
727
|
-
repo_root = find_repo_root()
|
|
728
|
-
|
|
729
|
-
_reap_dead_worker(run_id)
|
|
730
|
-
|
|
731
|
-
record = _get_flow_record(repo_root, run_id)
|
|
732
|
-
|
|
733
|
-
# If the worker died but status claims it's still active, recover the flow
|
|
734
|
-
store = _require_flow_store(repo_root)
|
|
735
|
-
if store:
|
|
736
|
-
try:
|
|
737
|
-
record = _maybe_recover_stuck_flow(repo_root, record, store)
|
|
738
|
-
finally:
|
|
739
|
-
store.close()
|
|
740
|
-
|
|
741
|
-
return FlowStatusResponse.from_record(record)
|
|
742
|
-
|
|
743
|
-
@router.get("/{run_id}/events")
|
|
744
|
-
async def stream_flow_events(run_id: uuid.UUID, after: Optional[int] = None):
|
|
745
|
-
run_id = _normalize_run_id(run_id)
|
|
746
|
-
repo_root = find_repo_root()
|
|
747
|
-
record = _get_flow_record(repo_root, run_id)
|
|
748
|
-
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
749
|
-
|
|
750
|
-
async def event_stream():
|
|
751
|
-
try:
|
|
752
|
-
async for event in controller.stream_events(run_id, after_seq=after):
|
|
753
|
-
data = event.model_dump(mode="json")
|
|
754
|
-
yield f"data: {json.dumps(data)}\n\n"
|
|
755
|
-
except Exception as e:
|
|
756
|
-
_logger.exception("Error streaming events for run %s: %s", run_id, e)
|
|
757
|
-
raise
|
|
758
|
-
|
|
759
|
-
return StreamingResponse(
|
|
760
|
-
event_stream(),
|
|
761
|
-
media_type="text/event-stream",
|
|
762
|
-
headers={
|
|
763
|
-
"Cache-Control": "no-cache",
|
|
764
|
-
"Connection": "keep-alive",
|
|
765
|
-
"X-Accel-Buffering": "no",
|
|
766
|
-
},
|
|
767
|
-
)
|
|
768
|
-
|
|
769
|
-
@router.get("/{run_id}/dispatch_history")
|
|
770
|
-
async def get_dispatch_history(run_id: str):
|
|
771
|
-
"""Get dispatch history for a flow run.
|
|
772
|
-
|
|
773
|
-
Returns all dispatches (agent->human communications) for this run.
|
|
774
|
-
"""
|
|
775
|
-
normalized = _normalize_run_id(run_id)
|
|
776
|
-
repo_root = find_repo_root()
|
|
777
|
-
record = _get_flow_record(repo_root, normalized)
|
|
778
|
-
paths = _resolve_outbox_for_record(record, repo_root)
|
|
779
|
-
|
|
780
|
-
history_entries = []
|
|
781
|
-
history_dir = paths.dispatch_history_dir
|
|
782
|
-
if history_dir.exists() and history_dir.is_dir():
|
|
783
|
-
for entry in sorted(
|
|
784
|
-
[p for p in history_dir.iterdir() if p.is_dir()],
|
|
785
|
-
key=lambda p: p.name,
|
|
786
|
-
reverse=True,
|
|
787
|
-
):
|
|
788
|
-
dispatch_path = entry / "DISPATCH.md"
|
|
789
|
-
dispatch, errors = (
|
|
790
|
-
parse_dispatch(dispatch_path)
|
|
791
|
-
if dispatch_path.exists()
|
|
792
|
-
else (None, ["Dispatch file missing"])
|
|
793
|
-
)
|
|
794
|
-
dispatch_dict = asdict(dispatch) if dispatch else None
|
|
795
|
-
if dispatch_dict and dispatch:
|
|
796
|
-
dispatch_dict["is_handoff"] = dispatch.is_handoff
|
|
797
|
-
attachments = []
|
|
798
|
-
for child in sorted(entry.rglob("*")):
|
|
799
|
-
if child.name == "DISPATCH.md":
|
|
800
|
-
continue
|
|
801
|
-
rel = child.relative_to(entry).as_posix()
|
|
802
|
-
if any(part.startswith(".") for part in Path(rel).parts):
|
|
803
|
-
continue
|
|
804
|
-
if child.is_dir():
|
|
805
|
-
continue
|
|
806
|
-
attachments.append(
|
|
807
|
-
{
|
|
808
|
-
"name": child.name,
|
|
809
|
-
"rel_path": rel,
|
|
810
|
-
"path": safe_relpath(child, repo_root),
|
|
811
|
-
"size": child.stat().st_size if child.is_file() else None,
|
|
812
|
-
"url": f"api/flows/{normalized}/dispatch_history/{entry.name}/{quote(rel)}",
|
|
813
|
-
}
|
|
814
|
-
)
|
|
815
|
-
history_entries.append(
|
|
816
|
-
{
|
|
817
|
-
"seq": entry.name,
|
|
818
|
-
"dispatch": dispatch_dict,
|
|
819
|
-
"errors": errors,
|
|
820
|
-
"attachments": attachments,
|
|
821
|
-
"path": safe_relpath(entry, repo_root),
|
|
822
|
-
}
|
|
823
|
-
)
|
|
824
|
-
|
|
825
|
-
return {"run_id": normalized, "history": history_entries}
|
|
826
|
-
|
|
827
|
-
@router.get("/{run_id}/reply_history/{seq}/{file_path:path}")
|
|
828
|
-
def get_reply_history_file(run_id: str, seq: str, file_path: str):
|
|
829
|
-
repo_root = find_repo_root()
|
|
830
|
-
db_path, _ = _flow_paths(repo_root)
|
|
831
|
-
store = FlowStore(db_path)
|
|
832
|
-
try:
|
|
833
|
-
store.initialize()
|
|
834
|
-
record = store.get_flow_run(run_id)
|
|
835
|
-
finally:
|
|
836
|
-
try:
|
|
837
|
-
store.close()
|
|
838
|
-
except Exception:
|
|
839
|
-
pass
|
|
840
|
-
if not record:
|
|
841
|
-
raise HTTPException(status_code=404, detail="Run not found")
|
|
842
|
-
|
|
843
|
-
if not (len(seq) == 4 and seq.isdigit()):
|
|
844
|
-
raise HTTPException(status_code=400, detail="Invalid seq")
|
|
845
|
-
if ".." in file_path or file_path.startswith("/"):
|
|
846
|
-
raise HTTPException(status_code=400, detail="Invalid file path")
|
|
847
|
-
filename = os.path.basename(file_path)
|
|
848
|
-
if filename != file_path:
|
|
849
|
-
raise HTTPException(status_code=400, detail="Invalid file path")
|
|
850
|
-
|
|
851
|
-
input_data = dict(record.input_data or {})
|
|
852
|
-
workspace_root = Path(input_data.get("workspace_root") or repo_root)
|
|
853
|
-
runs_dir = Path(input_data.get("runs_dir") or ".codex-autorunner/runs")
|
|
854
|
-
from ..tickets.replies import resolve_reply_paths
|
|
855
|
-
|
|
856
|
-
reply_paths = resolve_reply_paths(
|
|
857
|
-
workspace_root=workspace_root, runs_dir=runs_dir, run_id=run_id
|
|
858
|
-
)
|
|
859
|
-
target = reply_paths.reply_history_dir / seq / filename
|
|
860
|
-
if not target.exists() or not target.is_file():
|
|
861
|
-
raise HTTPException(status_code=404, detail="File not found")
|
|
862
|
-
return FileResponse(path=str(target), filename=filename)
|
|
863
|
-
|
|
864
|
-
@router.get("/{run_id}/dispatch_history/{seq}/{file_path:path}")
|
|
865
|
-
async def get_dispatch_file(run_id: str, seq: str, file_path: str):
|
|
866
|
-
"""Get an attachment file from a dispatch history entry."""
|
|
867
|
-
normalized = _normalize_run_id(run_id)
|
|
868
|
-
repo_root = find_repo_root()
|
|
869
|
-
record = _get_flow_record(repo_root, normalized)
|
|
870
|
-
paths = _resolve_outbox_for_record(record, repo_root)
|
|
871
|
-
|
|
872
|
-
base_history = paths.dispatch_history_dir.resolve()
|
|
873
|
-
|
|
874
|
-
seq_clean = seq.strip()
|
|
875
|
-
if not re.fullmatch(r"[0-9]{4}", seq_clean):
|
|
876
|
-
raise HTTPException(
|
|
877
|
-
status_code=400, detail="Invalid dispatch history sequence"
|
|
878
|
-
)
|
|
879
|
-
|
|
880
|
-
history_dir = (base_history / seq_clean).resolve()
|
|
881
|
-
if not history_dir.is_relative_to(base_history) or not history_dir.is_dir():
|
|
882
|
-
raise HTTPException(
|
|
883
|
-
status_code=404, detail=f"Dispatch history not found for run {run_id}"
|
|
884
|
-
)
|
|
885
|
-
|
|
886
|
-
file_rel = PurePosixPath(file_path)
|
|
887
|
-
if file_rel.is_absolute() or ".." in file_rel.parts or "\\" in file_path:
|
|
888
|
-
raise HTTPException(status_code=400, detail="Invalid dispatch file path")
|
|
889
|
-
|
|
890
|
-
safe_parts = [part for part in file_rel.parts if part not in {"", "."}]
|
|
891
|
-
if any(not re.fullmatch(r"[A-Za-z0-9._-]+", part) for part in safe_parts):
|
|
892
|
-
raise HTTPException(status_code=400, detail="Invalid dispatch file path")
|
|
893
|
-
|
|
894
|
-
target = (history_dir / Path(*safe_parts)).resolve()
|
|
895
|
-
try:
|
|
896
|
-
resolved = target.resolve()
|
|
897
|
-
except OSError as exc:
|
|
898
|
-
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
899
|
-
|
|
900
|
-
if not resolved.exists():
|
|
901
|
-
raise HTTPException(status_code=404, detail="File not found")
|
|
902
|
-
|
|
903
|
-
if not resolved.is_relative_to(history_dir):
|
|
904
|
-
raise HTTPException(
|
|
905
|
-
status_code=403,
|
|
906
|
-
detail="Access denied: file outside dispatch history directory",
|
|
907
|
-
)
|
|
908
|
-
|
|
909
|
-
return FileResponse(resolved, filename=resolved.name)
|
|
910
|
-
|
|
911
|
-
@router.get("/{run_id}/artifacts", response_model=list[FlowArtifactInfo])
|
|
912
|
-
async def list_flow_artifacts(run_id: str):
|
|
913
|
-
normalized = _normalize_run_id(run_id)
|
|
914
|
-
repo_root = find_repo_root()
|
|
915
|
-
record = _get_flow_record(repo_root, normalized)
|
|
916
|
-
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
917
|
-
|
|
918
|
-
artifacts = controller.get_artifacts(normalized)
|
|
919
|
-
return [
|
|
920
|
-
FlowArtifactInfo(
|
|
921
|
-
id=art.id,
|
|
922
|
-
kind=art.kind,
|
|
923
|
-
path=art.path,
|
|
924
|
-
created_at=art.created_at,
|
|
925
|
-
metadata=art.metadata,
|
|
926
|
-
)
|
|
927
|
-
for art in artifacts
|
|
928
|
-
]
|
|
929
|
-
|
|
930
|
-
@router.get("/{run_id}/artifact")
|
|
931
|
-
async def get_flow_artifact(run_id: str, kind: Optional[str] = None):
|
|
932
|
-
normalized = _normalize_run_id(run_id)
|
|
933
|
-
repo_root = find_repo_root()
|
|
934
|
-
record = _get_flow_record(repo_root, normalized)
|
|
935
|
-
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
936
|
-
|
|
937
|
-
artifacts_root = controller.get_artifacts_dir(normalized)
|
|
938
|
-
if not artifacts_root:
|
|
939
|
-
from fastapi import HTTPException
|
|
940
|
-
|
|
941
|
-
raise HTTPException(
|
|
942
|
-
status_code=404, detail=f"Artifact directory not found for run {run_id}"
|
|
943
|
-
)
|
|
944
|
-
|
|
945
|
-
artifacts = controller.get_artifacts(normalized)
|
|
946
|
-
|
|
947
|
-
if kind:
|
|
948
|
-
matching = [a for a in artifacts if a.kind == kind]
|
|
949
|
-
else:
|
|
950
|
-
matching = artifacts
|
|
951
|
-
|
|
952
|
-
if not matching:
|
|
953
|
-
from fastapi import HTTPException
|
|
954
|
-
|
|
955
|
-
raise HTTPException(
|
|
956
|
-
status_code=404,
|
|
957
|
-
detail=f"No artifact found for run {run_id} with kind={kind}",
|
|
958
|
-
)
|
|
959
|
-
|
|
960
|
-
artifact = matching[0]
|
|
961
|
-
artifact_path = Path(artifact.path)
|
|
962
|
-
|
|
963
|
-
if not artifact_path.exists():
|
|
964
|
-
from fastapi import HTTPException
|
|
965
|
-
|
|
966
|
-
raise HTTPException(
|
|
967
|
-
status_code=404, detail=f"Artifact file not found: {artifact.path}"
|
|
968
|
-
)
|
|
969
|
-
|
|
970
|
-
if not artifact_path.resolve().is_relative_to(artifacts_root.resolve()):
|
|
971
|
-
from fastapi import HTTPException
|
|
972
|
-
|
|
973
|
-
raise HTTPException(
|
|
974
|
-
status_code=403,
|
|
975
|
-
detail="Access denied: artifact path outside run directory",
|
|
976
|
-
)
|
|
977
|
-
|
|
978
|
-
return FileResponse(artifact_path, filename=artifact_path.name)
|
|
979
|
-
|
|
980
|
-
return router
|
|
7
|
+
sys.modules[__name__] = _flows
|