codex-autorunner 0.1.2__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (189) hide show
  1. codex_autorunner/__main__.py +4 -0
  2. codex_autorunner/agents/opencode/client.py +68 -35
  3. codex_autorunner/agents/opencode/logging.py +21 -5
  4. codex_autorunner/agents/opencode/run_prompt.py +1 -0
  5. codex_autorunner/agents/opencode/runtime.py +118 -30
  6. codex_autorunner/agents/opencode/supervisor.py +36 -48
  7. codex_autorunner/agents/registry.py +136 -8
  8. codex_autorunner/api.py +25 -0
  9. codex_autorunner/bootstrap.py +16 -35
  10. codex_autorunner/cli.py +157 -139
  11. codex_autorunner/core/about_car.py +44 -32
  12. codex_autorunner/core/adapter_utils.py +21 -0
  13. codex_autorunner/core/app_server_logging.py +7 -3
  14. codex_autorunner/core/app_server_prompts.py +27 -260
  15. codex_autorunner/core/app_server_threads.py +15 -26
  16. codex_autorunner/core/codex_runner.py +6 -0
  17. codex_autorunner/core/config.py +390 -100
  18. codex_autorunner/core/docs.py +10 -2
  19. codex_autorunner/core/drafts.py +82 -0
  20. codex_autorunner/core/engine.py +278 -262
  21. codex_autorunner/core/flows/__init__.py +25 -0
  22. codex_autorunner/core/flows/controller.py +178 -0
  23. codex_autorunner/core/flows/definition.py +82 -0
  24. codex_autorunner/core/flows/models.py +75 -0
  25. codex_autorunner/core/flows/runtime.py +351 -0
  26. codex_autorunner/core/flows/store.py +485 -0
  27. codex_autorunner/core/flows/transition.py +133 -0
  28. codex_autorunner/core/flows/worker_process.py +242 -0
  29. codex_autorunner/core/hub.py +15 -9
  30. codex_autorunner/core/locks.py +4 -0
  31. codex_autorunner/core/prompt.py +15 -7
  32. codex_autorunner/core/redaction.py +29 -0
  33. codex_autorunner/core/review_context.py +5 -8
  34. codex_autorunner/core/run_index.py +6 -0
  35. codex_autorunner/core/runner_process.py +5 -2
  36. codex_autorunner/core/state.py +0 -88
  37. codex_autorunner/core/static_assets.py +55 -0
  38. codex_autorunner/core/supervisor_utils.py +67 -0
  39. codex_autorunner/core/update.py +20 -11
  40. codex_autorunner/core/update_runner.py +2 -0
  41. codex_autorunner/core/utils.py +29 -2
  42. codex_autorunner/discovery.py +2 -4
  43. codex_autorunner/flows/ticket_flow/__init__.py +3 -0
  44. codex_autorunner/flows/ticket_flow/definition.py +91 -0
  45. codex_autorunner/integrations/agents/__init__.py +27 -0
  46. codex_autorunner/integrations/agents/agent_backend.py +142 -0
  47. codex_autorunner/integrations/agents/codex_backend.py +307 -0
  48. codex_autorunner/integrations/agents/opencode_backend.py +325 -0
  49. codex_autorunner/integrations/agents/run_event.py +71 -0
  50. codex_autorunner/integrations/app_server/client.py +576 -92
  51. codex_autorunner/integrations/app_server/supervisor.py +59 -33
  52. codex_autorunner/integrations/telegram/adapter.py +141 -167
  53. codex_autorunner/integrations/telegram/api_schemas.py +120 -0
  54. codex_autorunner/integrations/telegram/config.py +175 -0
  55. codex_autorunner/integrations/telegram/constants.py +16 -1
  56. codex_autorunner/integrations/telegram/dispatch.py +17 -0
  57. codex_autorunner/integrations/telegram/doctor.py +47 -0
  58. codex_autorunner/integrations/telegram/handlers/callbacks.py +0 -4
  59. codex_autorunner/integrations/telegram/handlers/commands/__init__.py +2 -0
  60. codex_autorunner/integrations/telegram/handlers/commands/execution.py +53 -57
  61. codex_autorunner/integrations/telegram/handlers/commands/files.py +2 -6
  62. codex_autorunner/integrations/telegram/handlers/commands/flows.py +227 -0
  63. codex_autorunner/integrations/telegram/handlers/commands/formatting.py +1 -1
  64. codex_autorunner/integrations/telegram/handlers/commands/github.py +41 -582
  65. codex_autorunner/integrations/telegram/handlers/commands/workspace.py +8 -8
  66. codex_autorunner/integrations/telegram/handlers/commands_runtime.py +133 -475
  67. codex_autorunner/integrations/telegram/handlers/commands_spec.py +11 -4
  68. codex_autorunner/integrations/telegram/handlers/messages.py +120 -9
  69. codex_autorunner/integrations/telegram/helpers.py +88 -16
  70. codex_autorunner/integrations/telegram/outbox.py +208 -37
  71. codex_autorunner/integrations/telegram/progress_stream.py +3 -10
  72. codex_autorunner/integrations/telegram/service.py +214 -40
  73. codex_autorunner/integrations/telegram/state.py +100 -2
  74. codex_autorunner/integrations/telegram/ticket_flow_bridge.py +322 -0
  75. codex_autorunner/integrations/telegram/transport.py +36 -3
  76. codex_autorunner/integrations/telegram/trigger_mode.py +53 -0
  77. codex_autorunner/manifest.py +2 -0
  78. codex_autorunner/plugin_api.py +22 -0
  79. codex_autorunner/routes/__init__.py +23 -14
  80. codex_autorunner/routes/analytics.py +239 -0
  81. codex_autorunner/routes/base.py +81 -109
  82. codex_autorunner/routes/file_chat.py +836 -0
  83. codex_autorunner/routes/flows.py +980 -0
  84. codex_autorunner/routes/messages.py +459 -0
  85. codex_autorunner/routes/system.py +6 -1
  86. codex_autorunner/routes/usage.py +87 -0
  87. codex_autorunner/routes/workspace.py +271 -0
  88. codex_autorunner/server.py +2 -1
  89. codex_autorunner/static/agentControls.js +1 -0
  90. codex_autorunner/static/agentEvents.js +248 -0
  91. codex_autorunner/static/app.js +25 -22
  92. codex_autorunner/static/autoRefresh.js +29 -1
  93. codex_autorunner/static/bootstrap.js +1 -0
  94. codex_autorunner/static/bus.js +1 -0
  95. codex_autorunner/static/cache.js +1 -0
  96. codex_autorunner/static/constants.js +20 -4
  97. codex_autorunner/static/dashboard.js +162 -196
  98. codex_autorunner/static/diffRenderer.js +37 -0
  99. codex_autorunner/static/docChatCore.js +324 -0
  100. codex_autorunner/static/docChatStorage.js +65 -0
  101. codex_autorunner/static/docChatVoice.js +65 -0
  102. codex_autorunner/static/docEditor.js +133 -0
  103. codex_autorunner/static/env.js +1 -0
  104. codex_autorunner/static/eventSummarizer.js +166 -0
  105. codex_autorunner/static/fileChat.js +182 -0
  106. codex_autorunner/static/health.js +155 -0
  107. codex_autorunner/static/hub.js +41 -118
  108. codex_autorunner/static/index.html +787 -858
  109. codex_autorunner/static/liveUpdates.js +1 -0
  110. codex_autorunner/static/loader.js +1 -0
  111. codex_autorunner/static/messages.js +470 -0
  112. codex_autorunner/static/mobileCompact.js +2 -1
  113. codex_autorunner/static/settings.js +24 -211
  114. codex_autorunner/static/styles.css +7567 -3865
  115. codex_autorunner/static/tabs.js +28 -5
  116. codex_autorunner/static/terminal.js +14 -0
  117. codex_autorunner/static/terminalManager.js +34 -59
  118. codex_autorunner/static/ticketChatActions.js +333 -0
  119. codex_autorunner/static/ticketChatEvents.js +16 -0
  120. codex_autorunner/static/ticketChatStorage.js +16 -0
  121. codex_autorunner/static/ticketChatStream.js +264 -0
  122. codex_autorunner/static/ticketEditor.js +750 -0
  123. codex_autorunner/static/ticketVoice.js +9 -0
  124. codex_autorunner/static/tickets.js +1315 -0
  125. codex_autorunner/static/utils.js +32 -3
  126. codex_autorunner/static/voice.js +1 -0
  127. codex_autorunner/static/workspace.js +672 -0
  128. codex_autorunner/static/workspaceApi.js +53 -0
  129. codex_autorunner/static/workspaceFileBrowser.js +504 -0
  130. codex_autorunner/tickets/__init__.py +20 -0
  131. codex_autorunner/tickets/agent_pool.py +377 -0
  132. codex_autorunner/tickets/files.py +85 -0
  133. codex_autorunner/tickets/frontmatter.py +55 -0
  134. codex_autorunner/tickets/lint.py +102 -0
  135. codex_autorunner/tickets/models.py +95 -0
  136. codex_autorunner/tickets/outbox.py +232 -0
  137. codex_autorunner/tickets/replies.py +179 -0
  138. codex_autorunner/tickets/runner.py +823 -0
  139. codex_autorunner/tickets/spec_ingest.py +77 -0
  140. codex_autorunner/web/app.py +269 -91
  141. codex_autorunner/web/middleware.py +3 -4
  142. codex_autorunner/web/schemas.py +89 -109
  143. codex_autorunner/web/static_assets.py +1 -44
  144. codex_autorunner/workspace/__init__.py +40 -0
  145. codex_autorunner/workspace/paths.py +319 -0
  146. {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.0.0.dist-info}/METADATA +18 -21
  147. codex_autorunner-1.0.0.dist-info/RECORD +251 -0
  148. {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.0.0.dist-info}/WHEEL +1 -1
  149. codex_autorunner/agents/execution/policy.py +0 -292
  150. codex_autorunner/agents/factory.py +0 -52
  151. codex_autorunner/agents/orchestrator.py +0 -358
  152. codex_autorunner/core/doc_chat.py +0 -1446
  153. codex_autorunner/core/snapshot.py +0 -580
  154. codex_autorunner/integrations/github/chatops.py +0 -268
  155. codex_autorunner/integrations/github/pr_flow.py +0 -1314
  156. codex_autorunner/routes/docs.py +0 -381
  157. codex_autorunner/routes/github.py +0 -327
  158. codex_autorunner/routes/runs.py +0 -250
  159. codex_autorunner/spec_ingest.py +0 -812
  160. codex_autorunner/static/docChatActions.js +0 -287
  161. codex_autorunner/static/docChatEvents.js +0 -300
  162. codex_autorunner/static/docChatRender.js +0 -205
  163. codex_autorunner/static/docChatStream.js +0 -361
  164. codex_autorunner/static/docs.js +0 -20
  165. codex_autorunner/static/docsClipboard.js +0 -69
  166. codex_autorunner/static/docsCrud.js +0 -257
  167. codex_autorunner/static/docsDocUpdates.js +0 -62
  168. codex_autorunner/static/docsDrafts.js +0 -16
  169. codex_autorunner/static/docsElements.js +0 -69
  170. codex_autorunner/static/docsInit.js +0 -285
  171. codex_autorunner/static/docsParse.js +0 -160
  172. codex_autorunner/static/docsSnapshot.js +0 -87
  173. codex_autorunner/static/docsSpecIngest.js +0 -263
  174. codex_autorunner/static/docsState.js +0 -127
  175. codex_autorunner/static/docsThreadRegistry.js +0 -44
  176. codex_autorunner/static/docsUi.js +0 -153
  177. codex_autorunner/static/docsVoice.js +0 -56
  178. codex_autorunner/static/github.js +0 -504
  179. codex_autorunner/static/logs.js +0 -678
  180. codex_autorunner/static/review.js +0 -157
  181. codex_autorunner/static/runs.js +0 -418
  182. codex_autorunner/static/snapshot.js +0 -124
  183. codex_autorunner/static/state.js +0 -94
  184. codex_autorunner/static/todoPreview.js +0 -27
  185. codex_autorunner/workspace.py +0 -16
  186. codex_autorunner-0.1.2.dist-info/RECORD +0 -222
  187. {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.0.0.dist-info}/entry_points.txt +0 -0
  188. {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.0.0.dist-info}/licenses/LICENSE +0 -0
  189. {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,980 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ import re
5
+ import shutil
6
+ import subprocess
7
+ import uuid
8
+ from dataclasses import asdict
9
+ from pathlib import Path, PurePosixPath
10
+ from typing import IO, Dict, Optional, Tuple, Union
11
+ from urllib.parse import quote
12
+
13
+ from fastapi import APIRouter, HTTPException
14
+ from fastapi.responses import FileResponse, StreamingResponse
15
+ from pydantic import BaseModel, Field
16
+
17
+ from ..core.engine import Engine
18
+ from ..core.flows import (
19
+ FlowController,
20
+ FlowDefinition,
21
+ FlowRunRecord,
22
+ FlowRunStatus,
23
+ FlowStore,
24
+ )
25
+ from ..core.flows.store import UNSET
26
+ from ..core.flows.transition import resolve_flow_transition
27
+ from ..core.flows.worker_process import (
28
+ FlowWorkerHealth,
29
+ check_worker_health,
30
+ clear_worker_metadata,
31
+ spawn_flow_worker,
32
+ )
33
+ from ..core.utils import atomic_write, find_repo_root
34
+ from ..flows.ticket_flow import build_ticket_flow_definition
35
+ from ..tickets import AgentPool
36
+ from ..tickets.files import (
37
+ list_ticket_paths,
38
+ parse_ticket_index,
39
+ read_ticket,
40
+ safe_relpath,
41
+ )
42
+ from ..tickets.frontmatter import parse_markdown_frontmatter
43
+ from ..tickets.lint import lint_ticket_frontmatter
44
+ from ..tickets.outbox import parse_dispatch, resolve_outbox_paths
45
+ from ..web.schemas import (
46
+ TicketCreateRequest,
47
+ TicketDeleteResponse,
48
+ TicketResponse,
49
+ TicketUpdateRequest,
50
+ )
51
+
52
+ _logger = logging.getLogger(__name__)
53
+
54
+ _active_workers: Dict[
55
+ str, Tuple[Optional[subprocess.Popen], Optional[IO[bytes]], Optional[IO[bytes]]]
56
+ ] = {}
57
+ _controller_cache: Dict[tuple[Path, str], FlowController] = {}
58
+ _definition_cache: Dict[tuple[Path, str], FlowDefinition] = {}
59
+ _supported_flow_types = ("ticket_flow",)
60
+
61
+
62
+ def _flow_paths(repo_root: Path) -> tuple[Path, Path]:
63
+ repo_root = repo_root.resolve()
64
+ db_path = repo_root / ".codex-autorunner" / "flows.db"
65
+ artifacts_root = repo_root / ".codex-autorunner" / "flows"
66
+ return db_path, artifacts_root
67
+
68
+
69
+ def _require_flow_store(repo_root: Path) -> Optional[FlowStore]:
70
+ db_path, _ = _flow_paths(repo_root)
71
+ store = FlowStore(db_path)
72
+ try:
73
+ store.initialize()
74
+ return store
75
+ except Exception as exc:
76
+ _logger.warning("Flows database unavailable at %s: %s", db_path, exc)
77
+ return None
78
+
79
+
80
+ def _safe_list_flow_runs(
81
+ repo_root: Path, flow_type: Optional[str] = None, *, recover_stuck: bool = False
82
+ ) -> list[FlowRunRecord]:
83
+ db_path, _ = _flow_paths(repo_root)
84
+ store = FlowStore(db_path)
85
+ try:
86
+ store.initialize()
87
+ records = store.list_flow_runs(flow_type=flow_type)
88
+ if recover_stuck:
89
+ # Recover any flows stuck in active states with dead workers
90
+ records = [
91
+ _maybe_recover_stuck_flow(repo_root, rec, store) for rec in records
92
+ ]
93
+ return records
94
+ except Exception as exc:
95
+ _logger.debug("FlowStore list runs failed: %s", exc)
96
+ return []
97
+ finally:
98
+ try:
99
+ store.close()
100
+ except Exception:
101
+ pass
102
+
103
+
104
+ def _build_flow_definition(repo_root: Path, flow_type: str) -> FlowDefinition:
105
+ repo_root = repo_root.resolve()
106
+ key = (repo_root, flow_type)
107
+ if key in _definition_cache:
108
+ return _definition_cache[key]
109
+
110
+ if flow_type == "ticket_flow":
111
+ engine = Engine(repo_root)
112
+ agent_pool = AgentPool(engine.config)
113
+ definition = build_ticket_flow_definition(agent_pool=agent_pool)
114
+ else:
115
+ raise HTTPException(status_code=404, detail=f"Unknown flow type: {flow_type}")
116
+
117
+ definition.validate()
118
+ _definition_cache[key] = definition
119
+ return definition
120
+
121
+
122
+ def _get_flow_controller(repo_root: Path, flow_type: str) -> FlowController:
123
+ repo_root = repo_root.resolve()
124
+ key = (repo_root, flow_type)
125
+ if key in _controller_cache:
126
+ return _controller_cache[key]
127
+
128
+ db_path, artifacts_root = _flow_paths(repo_root)
129
+ definition = _build_flow_definition(repo_root, flow_type)
130
+
131
+ controller = FlowController(
132
+ definition=definition,
133
+ db_path=db_path,
134
+ artifacts_root=artifacts_root,
135
+ )
136
+ try:
137
+ controller.initialize()
138
+ except Exception as exc:
139
+ _logger.warning("Failed to initialize flow controller: %s", exc)
140
+ raise HTTPException(
141
+ status_code=503, detail="Flows unavailable; initialize the repo first."
142
+ ) from exc
143
+ _controller_cache[key] = controller
144
+ return controller
145
+
146
+
147
+ def _get_flow_record(repo_root: Path, run_id: str) -> FlowRunRecord:
148
+ store = _require_flow_store(repo_root)
149
+ if store is None:
150
+ raise HTTPException(status_code=503, detail="Flows database unavailable")
151
+ try:
152
+ record = store.get_flow_run(run_id)
153
+ finally:
154
+ try:
155
+ store.close()
156
+ except Exception:
157
+ pass
158
+ if not record:
159
+ raise HTTPException(status_code=404, detail=f"Flow run {run_id} not found")
160
+ return record
161
+
162
+
163
+ def _active_or_paused_run(records: list[FlowRunRecord]) -> Optional[FlowRunRecord]:
164
+ if not records:
165
+ return None
166
+ latest = records[0]
167
+ if latest.status in (FlowRunStatus.RUNNING, FlowRunStatus.PAUSED):
168
+ return latest
169
+ return None
170
+
171
+
172
+ def _normalize_run_id(run_id: Union[str, uuid.UUID]) -> str:
173
+ try:
174
+ return str(uuid.UUID(str(run_id)))
175
+ except ValueError:
176
+ raise HTTPException(status_code=400, detail="Invalid run_id") from None
177
+
178
+
179
+ def _cleanup_worker_handle(run_id: str) -> None:
180
+ handle = _active_workers.pop(run_id, None)
181
+ if not handle:
182
+ return
183
+
184
+ proc, stdout, stderr = handle
185
+ if proc and proc.poll() is None:
186
+ try:
187
+ proc.terminate()
188
+ except Exception:
189
+ pass
190
+
191
+ for stream in (stdout, stderr):
192
+ if stream and not stream.closed:
193
+ try:
194
+ stream.flush()
195
+ except Exception:
196
+ pass
197
+ try:
198
+ stream.close()
199
+ except Exception:
200
+ pass
201
+
202
+
203
+ def _reap_dead_worker(run_id: str) -> None:
204
+ handle = _active_workers.get(run_id)
205
+ if not handle:
206
+ return
207
+ proc, *_ = handle
208
+ if proc and proc.poll() is not None:
209
+ _cleanup_worker_handle(run_id)
210
+
211
+
212
+ def _ensure_worker_not_stale(health: FlowWorkerHealth) -> None:
213
+ # Clear metadata if stale to allow clean respawn.
214
+ if health.status in {"dead", "mismatch", "invalid"}:
215
+ try:
216
+ clear_worker_metadata(health.artifact_path.parent)
217
+ except Exception:
218
+ _logger.debug("Failed to clear worker metadata: %s", health.artifact_path)
219
+
220
+
221
+ def _maybe_recover_stuck_flow(
222
+ repo_root: Path, record: FlowRunRecord, store: FlowStore
223
+ ) -> FlowRunRecord:
224
+ """
225
+ Reconcile persisted flow state with worker health and inner ticket_engine status.
226
+
227
+ Delegates decision logic to a pure transition resolver to keep recovery predictable
228
+ and exhaustively testable.
229
+ """
230
+ if record.status not in (
231
+ FlowRunStatus.RUNNING,
232
+ FlowRunStatus.STOPPING,
233
+ FlowRunStatus.PAUSED,
234
+ ):
235
+ return record
236
+
237
+ health = check_worker_health(repo_root, record.id)
238
+
239
+ decision = resolve_flow_transition(record, health)
240
+
241
+ if (
242
+ decision.status == record.status
243
+ and decision.finished_at == record.finished_at
244
+ and decision.state == (record.state or {})
245
+ ):
246
+ return record
247
+
248
+ _logger.info(
249
+ "Recovering flow %s: %s -> %s (%s)",
250
+ record.id,
251
+ record.status.value,
252
+ decision.status.value,
253
+ decision.note or "reconcile",
254
+ )
255
+
256
+ updated = store.update_flow_run_status(
257
+ run_id=record.id,
258
+ status=decision.status,
259
+ state=decision.state,
260
+ finished_at=decision.finished_at if decision.finished_at else UNSET,
261
+ )
262
+ _ensure_worker_not_stale(health)
263
+ return updated or record
264
+
265
+
266
+ class FlowStartRequest(BaseModel):
267
+ input_data: Dict = Field(default_factory=dict)
268
+ metadata: Optional[Dict] = None
269
+
270
+
271
+ class FlowStatusResponse(BaseModel):
272
+ id: str
273
+ flow_type: str
274
+ status: str
275
+ current_step: Optional[str]
276
+ created_at: str
277
+ started_at: Optional[str]
278
+ finished_at: Optional[str]
279
+ error_message: Optional[str]
280
+ state: Dict = Field(default_factory=dict)
281
+
282
+ @classmethod
283
+ def from_record(cls, record: FlowRunRecord) -> "FlowStatusResponse":
284
+ return cls(
285
+ id=record.id,
286
+ flow_type=record.flow_type,
287
+ status=record.status.value,
288
+ current_step=record.current_step,
289
+ created_at=record.created_at,
290
+ started_at=record.started_at,
291
+ finished_at=record.finished_at,
292
+ error_message=record.error_message,
293
+ state=record.state,
294
+ )
295
+
296
+
297
+ class FlowArtifactInfo(BaseModel):
298
+ id: str
299
+ kind: str
300
+ path: str
301
+ created_at: str
302
+ metadata: Dict = Field(default_factory=dict)
303
+
304
+
305
+ def _start_flow_worker(repo_root: Path, run_id: str) -> Optional[subprocess.Popen]:
306
+ normalized_run_id = _normalize_run_id(run_id)
307
+
308
+ health = check_worker_health(repo_root, normalized_run_id)
309
+ _ensure_worker_not_stale(health)
310
+ if health.is_alive:
311
+ _logger.info(
312
+ "Worker already active for run %s (pid=%s), skipping spawn",
313
+ normalized_run_id,
314
+ health.pid,
315
+ )
316
+ return None
317
+
318
+ _reap_dead_worker(normalized_run_id)
319
+
320
+ proc, stdout_handle, stderr_handle = spawn_flow_worker(repo_root, normalized_run_id)
321
+ _active_workers[normalized_run_id] = (proc, stdout_handle, stderr_handle)
322
+ _logger.info("Started flow worker for run %s (pid=%d)", normalized_run_id, proc.pid)
323
+ return proc
324
+
325
+
326
+ def _stop_worker(run_id: str, timeout: float = 10.0) -> None:
327
+ normalized_run_id = _normalize_run_id(run_id)
328
+ handle = _active_workers.get(normalized_run_id)
329
+ if not handle:
330
+ health = check_worker_health(find_repo_root(), normalized_run_id)
331
+ if health.is_alive and health.pid:
332
+ try:
333
+ _logger.info(
334
+ "Stopping untracked worker for run %s (pid=%s)",
335
+ normalized_run_id,
336
+ health.pid,
337
+ )
338
+ subprocess.run(["kill", str(health.pid)], check=False)
339
+ except Exception as exc:
340
+ _logger.warning(
341
+ "Failed to stop untracked worker %s: %s", normalized_run_id, exc
342
+ )
343
+ return
344
+
345
+ proc, *_ = handle
346
+ if proc and proc.poll() is None:
347
+ proc.terminate()
348
+ try:
349
+ proc.wait(timeout=timeout)
350
+ except subprocess.TimeoutExpired:
351
+ _logger.warning(
352
+ "Worker for run %s did not exit in time, killing", normalized_run_id
353
+ )
354
+ proc.kill()
355
+ except Exception as exc:
356
+ _logger.warning("Error stopping worker %s: %s", normalized_run_id, exc)
357
+
358
+ _cleanup_worker_handle(normalized_run_id)
359
+
360
+
361
+ def build_flow_routes() -> APIRouter:
362
+ router = APIRouter(prefix="/api/flows", tags=["flows"])
363
+
364
+ def _definition_info(definition: FlowDefinition) -> Dict:
365
+ return {
366
+ "type": definition.flow_type,
367
+ "name": definition.name,
368
+ "description": definition.description,
369
+ "input_schema": definition.input_schema or {},
370
+ }
371
+
372
+ def _resolve_outbox_for_record(record: FlowRunRecord, repo_root: Path):
373
+ workspace_root = Path(record.input_data.get("workspace_root") or repo_root)
374
+ runs_dir = Path(record.input_data.get("runs_dir") or ".codex-autorunner/runs")
375
+ return resolve_outbox_paths(
376
+ workspace_root=workspace_root, runs_dir=runs_dir, run_id=record.id
377
+ )
378
+
379
+ @router.get("")
380
+ async def list_flow_definitions():
381
+ repo_root = find_repo_root()
382
+ definitions = [
383
+ _definition_info(_build_flow_definition(repo_root, flow_type))
384
+ for flow_type in _supported_flow_types
385
+ ]
386
+ return {"definitions": definitions}
387
+
388
+ @router.get("/runs", response_model=list[FlowStatusResponse])
389
+ async def list_runs(flow_type: Optional[str] = None):
390
+ repo_root = find_repo_root()
391
+ records = _safe_list_flow_runs(
392
+ repo_root, flow_type=flow_type, recover_stuck=True
393
+ )
394
+ return [FlowStatusResponse.from_record(rec) for rec in records]
395
+
396
+ @router.get("/{flow_type}")
397
+ async def get_flow_definition(flow_type: str):
398
+ repo_root = find_repo_root()
399
+ if flow_type not in _supported_flow_types:
400
+ raise HTTPException(
401
+ status_code=404, detail=f"Unknown flow type: {flow_type}"
402
+ )
403
+ definition = _build_flow_definition(repo_root, flow_type)
404
+ return _definition_info(definition)
405
+
406
+ async def _start_flow(
407
+ flow_type: str, request: FlowStartRequest, *, force_new: bool = False
408
+ ) -> FlowStatusResponse:
409
+ if flow_type not in _supported_flow_types:
410
+ raise HTTPException(
411
+ status_code=404, detail=f"Unknown flow type: {flow_type}"
412
+ )
413
+
414
+ repo_root = find_repo_root()
415
+ controller = _get_flow_controller(repo_root, flow_type)
416
+
417
+ # Reuse an active/paused run unless force_new is requested.
418
+ if not force_new:
419
+ runs = _safe_list_flow_runs(
420
+ repo_root, flow_type=flow_type, recover_stuck=True
421
+ )
422
+ active = _active_or_paused_run(runs)
423
+ if active:
424
+ _reap_dead_worker(active.id)
425
+ _start_flow_worker(repo_root, active.id)
426
+ response = FlowStatusResponse.from_record(active)
427
+ response.state = response.state or {}
428
+ response.state["hint"] = "active_run_reused"
429
+ return response
430
+
431
+ run_id = _normalize_run_id(uuid.uuid4())
432
+
433
+ record = await controller.start_flow(
434
+ input_data=request.input_data,
435
+ run_id=run_id,
436
+ metadata=request.metadata,
437
+ )
438
+
439
+ _start_flow_worker(repo_root, run_id)
440
+
441
+ return FlowStatusResponse.from_record(record)
442
+
443
+ @router.post("/{flow_type}/start", response_model=FlowStatusResponse)
444
+ async def start_flow(flow_type: str, request: FlowStartRequest):
445
+ meta = request.metadata if isinstance(request.metadata, dict) else {}
446
+ force_new = bool(meta.get("force_new"))
447
+ return await _start_flow(flow_type, request, force_new=force_new)
448
+
449
+ @router.post("/ticket_flow/bootstrap", response_model=FlowStatusResponse)
450
+ async def bootstrap_ticket_flow(request: Optional[FlowStartRequest] = None):
451
+ repo_root = find_repo_root()
452
+ ticket_dir = repo_root / ".codex-autorunner" / "tickets"
453
+ ticket_dir.mkdir(parents=True, exist_ok=True)
454
+ ticket_path = ticket_dir / "TICKET-001.md"
455
+ flow_request = request or FlowStartRequest()
456
+ meta = flow_request.metadata if isinstance(flow_request.metadata, dict) else {}
457
+ force_new = bool(meta.get("force_new"))
458
+
459
+ if not force_new:
460
+ records = _safe_list_flow_runs(
461
+ repo_root, flow_type="ticket_flow", recover_stuck=True
462
+ )
463
+ active = _active_or_paused_run(records)
464
+ if active:
465
+ _reap_dead_worker(active.id)
466
+ _start_flow_worker(repo_root, active.id)
467
+ resp = FlowStatusResponse.from_record(active)
468
+ resp.state = resp.state or {}
469
+ resp.state["hint"] = "active_run_reused"
470
+ return resp
471
+
472
+ seeded = False
473
+ if not ticket_path.exists():
474
+ template = """---
475
+ agent: codex
476
+ done: false
477
+ title: Bootstrap ticket plan
478
+ goal: Capture scope and seed follow-up tickets
479
+ ---
480
+
481
+ You are the first ticket in a new ticket_flow run.
482
+
483
+ - Read `.codex-autorunner/ISSUE.md` (or ask for the issue/PR URL if missing).
484
+ - If helpful, create or update workspace docs under `.codex-autorunner/workspace/`:
485
+ - `active_context.md` for current context and links
486
+ - `decisions.md` for decisions/rationale
487
+ - `spec.md` for requirements and constraints
488
+ - Break the work into additional `TICKET-00X.md` files with clear owners/goals; keep this ticket open until they exist.
489
+ - Place any supporting artifacts in `.codex-autorunner/runs/<run_id>/dispatch/` if needed.
490
+ - Write `DISPATCH.md` to dispatch a message to the user:
491
+ - Use `mode: pause` (handoff) to wait for user response. This pauses execution.
492
+ - Use `mode: notify` (informational) to message the user but keep running.
493
+ """
494
+ ticket_path.write_text(template, encoding="utf-8")
495
+ seeded = True
496
+
497
+ meta = flow_request.metadata if isinstance(flow_request.metadata, dict) else {}
498
+ payload = FlowStartRequest(
499
+ input_data=flow_request.input_data,
500
+ metadata=meta | {"seeded_ticket": seeded},
501
+ )
502
+ return await _start_flow("ticket_flow", payload, force_new=force_new)
503
+
504
+ @router.get("/ticket_flow/tickets")
505
+ async def list_ticket_files():
506
+ repo_root = find_repo_root()
507
+ ticket_dir = repo_root / ".codex-autorunner" / "tickets"
508
+ tickets = []
509
+ for path in list_ticket_paths(ticket_dir):
510
+ doc, errors = read_ticket(path)
511
+ rel_path = safe_relpath(path, repo_root)
512
+ tickets.append(
513
+ {
514
+ "path": rel_path,
515
+ "index": getattr(doc, "index", None),
516
+ "frontmatter": asdict(doc.frontmatter) if doc else None,
517
+ "body": doc.body if doc else None,
518
+ "errors": errors,
519
+ }
520
+ )
521
+ return {
522
+ "ticket_dir": safe_relpath(ticket_dir, repo_root),
523
+ "tickets": tickets,
524
+ }
525
+
526
+ @router.post("/ticket_flow/tickets", response_model=TicketResponse)
527
+ async def create_ticket(request: TicketCreateRequest):
528
+ """Create a new ticket with auto-generated index."""
529
+ repo_root = find_repo_root()
530
+ ticket_dir = repo_root / ".codex-autorunner" / "tickets"
531
+ ticket_dir.mkdir(parents=True, exist_ok=True)
532
+
533
+ # Find next available index
534
+ existing_paths = list_ticket_paths(ticket_dir)
535
+ existing_indices = set()
536
+ for p in existing_paths:
537
+ idx = parse_ticket_index(p.name)
538
+ if idx is not None:
539
+ existing_indices.add(idx)
540
+
541
+ next_index = 1
542
+ while next_index in existing_indices:
543
+ next_index += 1
544
+
545
+ # Build frontmatter
546
+ title_line = f"title: {request.title}\n" if request.title else ""
547
+ goal_line = f"goal: {request.goal}\n" if request.goal else ""
548
+
549
+ content = (
550
+ "---\n"
551
+ f"agent: {request.agent}\n"
552
+ "done: false\n"
553
+ f"{title_line}"
554
+ f"{goal_line}"
555
+ "---\n\n"
556
+ f"{request.body}\n"
557
+ )
558
+
559
+ ticket_path = ticket_dir / f"TICKET-{next_index:03d}.md"
560
+ atomic_write(ticket_path, content)
561
+
562
+ # Read back to validate and return
563
+ doc, errors = read_ticket(ticket_path)
564
+ if errors or not doc:
565
+ raise HTTPException(
566
+ status_code=400, detail=f"Failed to create valid ticket: {errors}"
567
+ )
568
+
569
+ return TicketResponse(
570
+ path=safe_relpath(ticket_path, repo_root),
571
+ index=doc.index,
572
+ frontmatter=asdict(doc.frontmatter),
573
+ body=doc.body,
574
+ )
575
+
576
+ @router.put("/ticket_flow/tickets/{index}", response_model=TicketResponse)
577
+ async def update_ticket(index: int, request: TicketUpdateRequest):
578
+ """Update an existing ticket by index."""
579
+ repo_root = find_repo_root()
580
+ ticket_dir = repo_root / ".codex-autorunner" / "tickets"
581
+ ticket_path = ticket_dir / f"TICKET-{index:03d}.md"
582
+
583
+ if not ticket_path.exists():
584
+ raise HTTPException(
585
+ status_code=404, detail=f"Ticket TICKET-{index:03d}.md not found"
586
+ )
587
+
588
+ # Validate frontmatter before saving
589
+ data, body = parse_markdown_frontmatter(request.content)
590
+ _, errors = lint_ticket_frontmatter(data)
591
+ if errors:
592
+ raise HTTPException(
593
+ status_code=400,
594
+ detail={"message": "Invalid ticket frontmatter", "errors": errors},
595
+ )
596
+
597
+ atomic_write(ticket_path, request.content)
598
+
599
+ # Read back to return validated data
600
+ doc, read_errors = read_ticket(ticket_path)
601
+ if read_errors or not doc:
602
+ raise HTTPException(
603
+ status_code=400, detail=f"Failed to save valid ticket: {read_errors}"
604
+ )
605
+
606
+ return TicketResponse(
607
+ path=safe_relpath(ticket_path, repo_root),
608
+ index=doc.index,
609
+ frontmatter=asdict(doc.frontmatter),
610
+ body=doc.body,
611
+ )
612
+
613
+ @router.delete("/ticket_flow/tickets/{index}", response_model=TicketDeleteResponse)
614
+ async def delete_ticket(index: int):
615
+ """Delete a ticket by index."""
616
+ repo_root = find_repo_root()
617
+ ticket_dir = repo_root / ".codex-autorunner" / "tickets"
618
+ ticket_path = ticket_dir / f"TICKET-{index:03d}.md"
619
+
620
+ if not ticket_path.exists():
621
+ raise HTTPException(
622
+ status_code=404, detail=f"Ticket TICKET-{index:03d}.md not found"
623
+ )
624
+
625
+ rel_path = safe_relpath(ticket_path, repo_root)
626
+ ticket_path.unlink()
627
+
628
+ return TicketDeleteResponse(
629
+ status="deleted",
630
+ index=index,
631
+ path=rel_path,
632
+ )
633
+
634
+ @router.post("/{run_id}/stop", response_model=FlowStatusResponse)
635
+ async def stop_flow(run_id: uuid.UUID):
636
+ run_id = _normalize_run_id(run_id)
637
+ repo_root = find_repo_root()
638
+ record = _get_flow_record(repo_root, run_id)
639
+ controller = _get_flow_controller(repo_root, record.flow_type)
640
+
641
+ _stop_worker(run_id)
642
+
643
+ updated = await controller.stop_flow(run_id)
644
+ return FlowStatusResponse.from_record(updated)
645
+
646
+ @router.post("/{run_id}/resume", response_model=FlowStatusResponse)
647
+ async def resume_flow(run_id: uuid.UUID):
648
+ run_id = _normalize_run_id(run_id)
649
+ repo_root = find_repo_root()
650
+ record = _get_flow_record(repo_root, run_id)
651
+ controller = _get_flow_controller(repo_root, record.flow_type)
652
+
653
+ updated = await controller.resume_flow(run_id)
654
+ _reap_dead_worker(run_id)
655
+ _start_flow_worker(repo_root, run_id)
656
+
657
+ return FlowStatusResponse.from_record(updated)
658
+
659
+ @router.post("/{run_id}/archive")
660
+ async def archive_flow(
661
+ run_id: uuid.UUID, delete_run: bool = True, force: bool = False
662
+ ):
663
+ """Archive a completed flow by moving tickets to the run's artifact directory.
664
+
665
+ Args:
666
+ run_id: The flow run to archive.
667
+ delete_run: Whether to delete the run record after archiving.
668
+ force: If True, allow archiving flows stuck in stopping/paused state
669
+ by force-stopping the worker first.
670
+ """
671
+ run_id = _normalize_run_id(run_id)
672
+ repo_root = find_repo_root()
673
+ record = _get_flow_record(repo_root, run_id)
674
+
675
+ # Allow archiving terminal flows, or force-archiving stuck flows
676
+ if not FlowRunStatus(record.status).is_terminal():
677
+ if force and record.status in (
678
+ FlowRunStatus.STOPPING,
679
+ FlowRunStatus.PAUSED,
680
+ ):
681
+ # Force-stop any remaining worker before archiving
682
+ _stop_worker(run_id, timeout=2.0)
683
+ _logger.info(
684
+ "Force-archiving flow %s in %s state", run_id, record.status.value
685
+ )
686
+ else:
687
+ raise HTTPException(
688
+ status_code=400,
689
+ detail="Can only archive completed/stopped/failed flows (use force=true for stuck flows)",
690
+ )
691
+
692
+ # Move tickets to run artifacts directory
693
+ _, artifacts_root = _flow_paths(repo_root)
694
+ archive_dir = artifacts_root / run_id / "archived_tickets"
695
+ archive_dir.mkdir(parents=True, exist_ok=True)
696
+
697
+ ticket_dir = repo_root / ".codex-autorunner" / "tickets"
698
+ archived_count = 0
699
+ for ticket_path in list_ticket_paths(ticket_dir):
700
+ dest = archive_dir / ticket_path.name
701
+ shutil.move(str(ticket_path), str(dest))
702
+ archived_count += 1
703
+
704
+ # Archive runs directory (dispatch_history, reply_history, etc.) to dismiss notifications
705
+ outbox_paths = _resolve_outbox_for_record(record, repo_root)
706
+ run_dir = outbox_paths.run_dir
707
+ if run_dir.exists() and run_dir.is_dir():
708
+ archived_runs_dir = artifacts_root / run_id / "archived_runs"
709
+ shutil.move(str(run_dir), str(archived_runs_dir))
710
+
711
+ # Delete run record if requested
712
+ if delete_run:
713
+ store = _require_flow_store(repo_root)
714
+ if store:
715
+ store.delete_flow_run(run_id)
716
+ store.close()
717
+
718
+ return {
719
+ "status": "archived",
720
+ "run_id": run_id,
721
+ "tickets_archived": archived_count,
722
+ }
723
+
724
+ @router.get("/{run_id}/status", response_model=FlowStatusResponse)
725
+ async def get_flow_status(run_id: uuid.UUID):
726
+ run_id = _normalize_run_id(run_id)
727
+ repo_root = find_repo_root()
728
+
729
+ _reap_dead_worker(run_id)
730
+
731
+ record = _get_flow_record(repo_root, run_id)
732
+
733
+ # If the worker died but status claims it's still active, recover the flow
734
+ store = _require_flow_store(repo_root)
735
+ if store:
736
+ try:
737
+ record = _maybe_recover_stuck_flow(repo_root, record, store)
738
+ finally:
739
+ store.close()
740
+
741
+ return FlowStatusResponse.from_record(record)
742
+
743
+ @router.get("/{run_id}/events")
744
+ async def stream_flow_events(run_id: uuid.UUID, after: Optional[int] = None):
745
+ run_id = _normalize_run_id(run_id)
746
+ repo_root = find_repo_root()
747
+ record = _get_flow_record(repo_root, run_id)
748
+ controller = _get_flow_controller(repo_root, record.flow_type)
749
+
750
+ async def event_stream():
751
+ try:
752
+ async for event in controller.stream_events(run_id, after_seq=after):
753
+ data = event.model_dump(mode="json")
754
+ yield f"data: {json.dumps(data)}\n\n"
755
+ except Exception as e:
756
+ _logger.exception("Error streaming events for run %s: %s", run_id, e)
757
+ raise
758
+
759
+ return StreamingResponse(
760
+ event_stream(),
761
+ media_type="text/event-stream",
762
+ headers={
763
+ "Cache-Control": "no-cache",
764
+ "Connection": "keep-alive",
765
+ "X-Accel-Buffering": "no",
766
+ },
767
+ )
768
+
769
+ @router.get("/{run_id}/dispatch_history")
770
+ async def get_dispatch_history(run_id: str):
771
+ """Get dispatch history for a flow run.
772
+
773
+ Returns all dispatches (agent->human communications) for this run.
774
+ """
775
+ normalized = _normalize_run_id(run_id)
776
+ repo_root = find_repo_root()
777
+ record = _get_flow_record(repo_root, normalized)
778
+ paths = _resolve_outbox_for_record(record, repo_root)
779
+
780
+ history_entries = []
781
+ history_dir = paths.dispatch_history_dir
782
+ if history_dir.exists() and history_dir.is_dir():
783
+ for entry in sorted(
784
+ [p for p in history_dir.iterdir() if p.is_dir()],
785
+ key=lambda p: p.name,
786
+ reverse=True,
787
+ ):
788
+ dispatch_path = entry / "DISPATCH.md"
789
+ dispatch, errors = (
790
+ parse_dispatch(dispatch_path)
791
+ if dispatch_path.exists()
792
+ else (None, ["Dispatch file missing"])
793
+ )
794
+ dispatch_dict = asdict(dispatch) if dispatch else None
795
+ if dispatch_dict and dispatch:
796
+ dispatch_dict["is_handoff"] = dispatch.is_handoff
797
+ attachments = []
798
+ for child in sorted(entry.rglob("*")):
799
+ if child.name == "DISPATCH.md":
800
+ continue
801
+ rel = child.relative_to(entry).as_posix()
802
+ if any(part.startswith(".") for part in Path(rel).parts):
803
+ continue
804
+ if child.is_dir():
805
+ continue
806
+ attachments.append(
807
+ {
808
+ "name": child.name,
809
+ "rel_path": rel,
810
+ "path": safe_relpath(child, repo_root),
811
+ "size": child.stat().st_size if child.is_file() else None,
812
+ "url": f"api/flows/{normalized}/dispatch_history/{entry.name}/{quote(rel)}",
813
+ }
814
+ )
815
+ history_entries.append(
816
+ {
817
+ "seq": entry.name,
818
+ "dispatch": dispatch_dict,
819
+ "errors": errors,
820
+ "attachments": attachments,
821
+ "path": safe_relpath(entry, repo_root),
822
+ }
823
+ )
824
+
825
+ return {"run_id": normalized, "history": history_entries}
826
+
827
+ @router.get("/{run_id}/reply_history/{seq}/{file_path:path}")
828
+ def get_reply_history_file(run_id: str, seq: str, file_path: str):
829
+ repo_root = find_repo_root()
830
+ db_path, _ = _flow_paths(repo_root)
831
+ store = FlowStore(db_path)
832
+ try:
833
+ store.initialize()
834
+ record = store.get_flow_run(run_id)
835
+ finally:
836
+ try:
837
+ store.close()
838
+ except Exception:
839
+ pass
840
+ if not record:
841
+ raise HTTPException(status_code=404, detail="Run not found")
842
+
843
+ if not (len(seq) == 4 and seq.isdigit()):
844
+ raise HTTPException(status_code=400, detail="Invalid seq")
845
+ if ".." in file_path or file_path.startswith("/"):
846
+ raise HTTPException(status_code=400, detail="Invalid file path")
847
+ filename = os.path.basename(file_path)
848
+ if filename != file_path:
849
+ raise HTTPException(status_code=400, detail="Invalid file path")
850
+
851
+ input_data = dict(record.input_data or {})
852
+ workspace_root = Path(input_data.get("workspace_root") or repo_root)
853
+ runs_dir = Path(input_data.get("runs_dir") or ".codex-autorunner/runs")
854
+ from ..tickets.replies import resolve_reply_paths
855
+
856
+ reply_paths = resolve_reply_paths(
857
+ workspace_root=workspace_root, runs_dir=runs_dir, run_id=run_id
858
+ )
859
+ target = reply_paths.reply_history_dir / seq / filename
860
+ if not target.exists() or not target.is_file():
861
+ raise HTTPException(status_code=404, detail="File not found")
862
+ return FileResponse(path=str(target), filename=filename)
863
+
864
+ @router.get("/{run_id}/dispatch_history/{seq}/{file_path:path}")
865
+ async def get_dispatch_file(run_id: str, seq: str, file_path: str):
866
+ """Get an attachment file from a dispatch history entry."""
867
+ normalized = _normalize_run_id(run_id)
868
+ repo_root = find_repo_root()
869
+ record = _get_flow_record(repo_root, normalized)
870
+ paths = _resolve_outbox_for_record(record, repo_root)
871
+
872
+ base_history = paths.dispatch_history_dir.resolve()
873
+
874
+ seq_clean = seq.strip()
875
+ if not re.fullmatch(r"[0-9]{4}", seq_clean):
876
+ raise HTTPException(
877
+ status_code=400, detail="Invalid dispatch history sequence"
878
+ )
879
+
880
+ history_dir = (base_history / seq_clean).resolve()
881
+ if not history_dir.is_relative_to(base_history) or not history_dir.is_dir():
882
+ raise HTTPException(
883
+ status_code=404, detail=f"Dispatch history not found for run {run_id}"
884
+ )
885
+
886
+ file_rel = PurePosixPath(file_path)
887
+ if file_rel.is_absolute() or ".." in file_rel.parts or "\\" in file_path:
888
+ raise HTTPException(status_code=400, detail="Invalid dispatch file path")
889
+
890
+ safe_parts = [part for part in file_rel.parts if part not in {"", "."}]
891
+ if any(not re.fullmatch(r"[A-Za-z0-9._-]+", part) for part in safe_parts):
892
+ raise HTTPException(status_code=400, detail="Invalid dispatch file path")
893
+
894
+ target = (history_dir / Path(*safe_parts)).resolve()
895
+ try:
896
+ resolved = target.resolve()
897
+ except OSError as exc:
898
+ raise HTTPException(status_code=400, detail=str(exc)) from exc
899
+
900
+ if not resolved.exists():
901
+ raise HTTPException(status_code=404, detail="File not found")
902
+
903
+ if not resolved.is_relative_to(history_dir):
904
+ raise HTTPException(
905
+ status_code=403,
906
+ detail="Access denied: file outside dispatch history directory",
907
+ )
908
+
909
+ return FileResponse(resolved, filename=resolved.name)
910
+
911
+ @router.get("/{run_id}/artifacts", response_model=list[FlowArtifactInfo])
912
+ async def list_flow_artifacts(run_id: str):
913
+ normalized = _normalize_run_id(run_id)
914
+ repo_root = find_repo_root()
915
+ record = _get_flow_record(repo_root, normalized)
916
+ controller = _get_flow_controller(repo_root, record.flow_type)
917
+
918
+ artifacts = controller.get_artifacts(normalized)
919
+ return [
920
+ FlowArtifactInfo(
921
+ id=art.id,
922
+ kind=art.kind,
923
+ path=art.path,
924
+ created_at=art.created_at,
925
+ metadata=art.metadata,
926
+ )
927
+ for art in artifacts
928
+ ]
929
+
930
+ @router.get("/{run_id}/artifact")
931
+ async def get_flow_artifact(run_id: str, kind: Optional[str] = None):
932
+ normalized = _normalize_run_id(run_id)
933
+ repo_root = find_repo_root()
934
+ record = _get_flow_record(repo_root, normalized)
935
+ controller = _get_flow_controller(repo_root, record.flow_type)
936
+
937
+ artifacts_root = controller.get_artifacts_dir(normalized)
938
+ if not artifacts_root:
939
+ from fastapi import HTTPException
940
+
941
+ raise HTTPException(
942
+ status_code=404, detail=f"Artifact directory not found for run {run_id}"
943
+ )
944
+
945
+ artifacts = controller.get_artifacts(normalized)
946
+
947
+ if kind:
948
+ matching = [a for a in artifacts if a.kind == kind]
949
+ else:
950
+ matching = artifacts
951
+
952
+ if not matching:
953
+ from fastapi import HTTPException
954
+
955
+ raise HTTPException(
956
+ status_code=404,
957
+ detail=f"No artifact found for run {run_id} with kind={kind}",
958
+ )
959
+
960
+ artifact = matching[0]
961
+ artifact_path = Path(artifact.path)
962
+
963
+ if not artifact_path.exists():
964
+ from fastapi import HTTPException
965
+
966
+ raise HTTPException(
967
+ status_code=404, detail=f"Artifact file not found: {artifact.path}"
968
+ )
969
+
970
+ if not artifact_path.resolve().is_relative_to(artifacts_root.resolve()):
971
+ from fastapi import HTTPException
972
+
973
+ raise HTTPException(
974
+ status_code=403,
975
+ detail="Access denied: artifact path outside run directory",
976
+ )
977
+
978
+ return FileResponse(artifact_path, filename=artifact_path.name)
979
+
980
+ return router