codex-autorunner 1.1.0__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +17 -7
- codex_autorunner/bootstrap.py +219 -1
- codex_autorunner/core/__init__.py +17 -1
- codex_autorunner/core/about_car.py +124 -11
- codex_autorunner/core/app_server_threads.py +6 -0
- codex_autorunner/core/config.py +238 -3
- codex_autorunner/core/context_awareness.py +39 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +71 -1
- codex_autorunner/core/flows/reconciler.py +4 -1
- codex_autorunner/core/flows/runtime.py +22 -0
- codex_autorunner/core/flows/store.py +61 -9
- codex_autorunner/core/flows/transition.py +23 -16
- codex_autorunner/core/flows/ux_helpers.py +18 -3
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/hub.py +198 -41
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +683 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/agent_backend.py +2 -5
- codex_autorunner/core/ports/run_event.py +1 -4
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +5 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/ticket_linter_cli.py +17 -0
- codex_autorunner/core/ticket_manager_cli.py +154 -92
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/utils.py +34 -6
- codex_autorunner/flows/review/service.py +23 -25
- codex_autorunner/flows/ticket_flow/definition.py +43 -1
- codex_autorunner/integrations/agents/__init__.py +2 -0
- codex_autorunner/integrations/agents/backend_orchestrator.py +18 -0
- codex_autorunner/integrations/agents/codex_backend.py +19 -8
- codex_autorunner/integrations/agents/runner.py +3 -8
- codex_autorunner/integrations/agents/wiring.py +8 -0
- codex_autorunner/integrations/telegram/adapter.py +1 -1
- codex_autorunner/integrations/telegram/config.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +346 -58
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +202 -45
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +18 -7
- codex_autorunner/integrations/telegram/handlers/messages.py +34 -3
- codex_autorunner/integrations/telegram/helpers.py +1 -3
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +30 -0
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +10 -4
- codex_autorunner/integrations/telegram/transport.py +10 -3
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/server.py +2 -2
- codex_autorunner/static/agentControls.js +21 -5
- codex_autorunner/static/app.js +115 -11
- codex_autorunner/static/archive.js +274 -81
- codex_autorunner/static/archiveApi.js +21 -0
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/constants.js +1 -1
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +46 -81
- codex_autorunner/static/index.html +303 -24
- codex_autorunner/static/messages.js +82 -4
- codex_autorunner/static/notifications.js +288 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/settings.js +3 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9141 -6742
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminalManager.js +22 -3
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +41 -13
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +69 -19
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +28 -0
- codex_autorunner/static/workspace.js +258 -44
- codex_autorunner/static/workspaceFileBrowser.js +6 -4
- codex_autorunner/surfaces/cli/cli.py +1465 -155
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/web/app.py +253 -49
- codex_autorunner/surfaces/web/routes/__init__.py +4 -0
- codex_autorunner/surfaces/web/routes/analytics.py +29 -22
- codex_autorunner/surfaces/web/routes/archive.py +197 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +297 -36
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +219 -29
- codex_autorunner/surfaces/web/routes/messages.py +70 -39
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +1 -1
- codex_autorunner/surfaces/web/routes/shared.py +0 -3
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/runner_manager.py +2 -2
- codex_autorunner/surfaces/web/schemas.py +81 -18
- codex_autorunner/tickets/agent_pool.py +27 -0
- codex_autorunner/tickets/files.py +33 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +3 -0
- codex_autorunner/tickets/outbox.py +41 -5
- codex_autorunner/tickets/runner.py +350 -69
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/METADATA +15 -19
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/RECORD +132 -101
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -3302
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, HTTPException, Request
|
|
8
|
+
from fastapi.responses import FileResponse
|
|
9
|
+
|
|
10
|
+
from ....core.filebox import (
|
|
11
|
+
BOXES,
|
|
12
|
+
FileBoxEntry,
|
|
13
|
+
delete_file,
|
|
14
|
+
ensure_structure,
|
|
15
|
+
list_filebox,
|
|
16
|
+
migrate_legacy,
|
|
17
|
+
resolve_file,
|
|
18
|
+
save_file,
|
|
19
|
+
)
|
|
20
|
+
from ....core.hub import HubSupervisor
|
|
21
|
+
from ....core.utils import find_repo_root
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _serialize_entry(entry: FileBoxEntry, *, request: Request) -> dict[str, Any]:
|
|
27
|
+
base = request.scope.get("root_path", "") or ""
|
|
28
|
+
# Provide a download URL that survives base_path rewrites.
|
|
29
|
+
download = f"{base}/api/filebox/{entry.box}/{entry.name}"
|
|
30
|
+
return {
|
|
31
|
+
"name": entry.name,
|
|
32
|
+
"box": entry.box,
|
|
33
|
+
"size": entry.size,
|
|
34
|
+
"modified_at": entry.modified_at,
|
|
35
|
+
"source": entry.source,
|
|
36
|
+
"url": download,
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _serialize_listing(
|
|
41
|
+
entries: dict[str, list[FileBoxEntry]], *, request: Request
|
|
42
|
+
) -> dict[str, Any]:
|
|
43
|
+
return {
|
|
44
|
+
box: [_serialize_entry(e, request=request) for e in files]
|
|
45
|
+
for box, files in entries.items()
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _resolve_repo_root(request: Request) -> Path:
|
|
50
|
+
engine = getattr(request.app.state, "engine", None)
|
|
51
|
+
repo_root = getattr(engine, "repo_root", None)
|
|
52
|
+
if isinstance(repo_root, Path):
|
|
53
|
+
return repo_root
|
|
54
|
+
if isinstance(repo_root, str):
|
|
55
|
+
try:
|
|
56
|
+
return Path(repo_root)
|
|
57
|
+
except Exception:
|
|
58
|
+
pass
|
|
59
|
+
return find_repo_root()
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def build_filebox_routes() -> APIRouter:
|
|
63
|
+
router = APIRouter(prefix="/api", tags=["filebox"])
|
|
64
|
+
|
|
65
|
+
@router.get("/filebox")
|
|
66
|
+
def list_box(request: Request) -> dict[str, Any]:
|
|
67
|
+
repo_root = _resolve_repo_root(request)
|
|
68
|
+
ensure_structure(repo_root)
|
|
69
|
+
try:
|
|
70
|
+
migrate_legacy(repo_root)
|
|
71
|
+
except Exception:
|
|
72
|
+
logger.debug("FileBox legacy migration skipped", exc_info=True)
|
|
73
|
+
entries = list_filebox(repo_root)
|
|
74
|
+
return _serialize_listing(entries, request=request)
|
|
75
|
+
|
|
76
|
+
@router.get("/filebox/{box}")
|
|
77
|
+
def list_single_box(box: str, request: Request) -> dict[str, Any]:
|
|
78
|
+
if box not in BOXES:
|
|
79
|
+
raise HTTPException(status_code=400, detail="Invalid box")
|
|
80
|
+
repo_root = _resolve_repo_root(request)
|
|
81
|
+
ensure_structure(repo_root)
|
|
82
|
+
entries = list_filebox(repo_root)
|
|
83
|
+
return {box: _serialize_listing(entries, request=request).get(box, [])}
|
|
84
|
+
|
|
85
|
+
@router.post("/filebox/{box}")
|
|
86
|
+
async def upload_file(box: str, request: Request) -> dict[str, Any]:
|
|
87
|
+
if box not in BOXES:
|
|
88
|
+
raise HTTPException(status_code=400, detail="Invalid box")
|
|
89
|
+
repo_root = _resolve_repo_root(request)
|
|
90
|
+
ensure_structure(repo_root)
|
|
91
|
+
|
|
92
|
+
form = await request.form()
|
|
93
|
+
saved = []
|
|
94
|
+
for filename, file in form.items():
|
|
95
|
+
try:
|
|
96
|
+
data = await file.read()
|
|
97
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
98
|
+
logger.warning("Failed to read upload: %s", exc)
|
|
99
|
+
continue
|
|
100
|
+
try:
|
|
101
|
+
path = save_file(repo_root, box, filename, data)
|
|
102
|
+
saved.append(path.name)
|
|
103
|
+
except ValueError as exc:
|
|
104
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
105
|
+
return {"status": "ok", "saved": saved}
|
|
106
|
+
|
|
107
|
+
@router.get("/filebox/{box}/{filename}")
|
|
108
|
+
def download_file(box: str, filename: str, request: Request):
|
|
109
|
+
if box not in BOXES:
|
|
110
|
+
raise HTTPException(status_code=400, detail="Invalid box")
|
|
111
|
+
repo_root = _resolve_repo_root(request)
|
|
112
|
+
entry = resolve_file(repo_root, box, filename)
|
|
113
|
+
if entry is None:
|
|
114
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
115
|
+
return FileResponse(entry.path, filename=entry.name)
|
|
116
|
+
|
|
117
|
+
@router.delete("/filebox/{box}/{filename}")
|
|
118
|
+
def delete_file_entry(box: str, filename: str, request: Request) -> dict[str, Any]:
|
|
119
|
+
if box not in BOXES:
|
|
120
|
+
raise HTTPException(status_code=400, detail="Invalid box")
|
|
121
|
+
repo_root = _resolve_repo_root(request)
|
|
122
|
+
removed = delete_file(repo_root, box, filename)
|
|
123
|
+
if not removed:
|
|
124
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
125
|
+
return {"status": "ok"}
|
|
126
|
+
|
|
127
|
+
return router
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _resolve_hub_repo_root(request: Request, repo_id: Optional[str]) -> Path:
|
|
131
|
+
supervisor: HubSupervisor | None = getattr(
|
|
132
|
+
request.app.state, "hub_supervisor", None
|
|
133
|
+
)
|
|
134
|
+
if supervisor is None:
|
|
135
|
+
raise HTTPException(status_code=404, detail="Hub supervisor unavailable")
|
|
136
|
+
snapshots = supervisor.list_repos()
|
|
137
|
+
candidates = [
|
|
138
|
+
snap for snap in snapshots if snap.initialized and snap.exists_on_disk
|
|
139
|
+
]
|
|
140
|
+
target = None
|
|
141
|
+
if repo_id:
|
|
142
|
+
target = next((snap for snap in candidates if snap.id == repo_id), None)
|
|
143
|
+
if target is None:
|
|
144
|
+
raise HTTPException(status_code=404, detail="Repo not found")
|
|
145
|
+
else:
|
|
146
|
+
if len(candidates) == 1:
|
|
147
|
+
target = candidates[0]
|
|
148
|
+
if target is None:
|
|
149
|
+
raise HTTPException(status_code=400, detail="repo_id is required")
|
|
150
|
+
return target.path
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _serialize_hub_entry(
|
|
154
|
+
entry: FileBoxEntry, *, request: Request, repo_id: str
|
|
155
|
+
) -> dict[str, Any]:
|
|
156
|
+
base = request.scope.get("root_path", "") or ""
|
|
157
|
+
download = f"{base}/hub/filebox/{repo_id}/{entry.box}/{entry.name}"
|
|
158
|
+
return {
|
|
159
|
+
"name": entry.name,
|
|
160
|
+
"box": entry.box,
|
|
161
|
+
"size": entry.size,
|
|
162
|
+
"modified_at": entry.modified_at,
|
|
163
|
+
"source": entry.source,
|
|
164
|
+
"url": download,
|
|
165
|
+
"repo_id": repo_id,
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def build_hub_filebox_routes() -> APIRouter:
|
|
170
|
+
router = APIRouter(prefix="/hub/filebox", tags=["filebox"])
|
|
171
|
+
|
|
172
|
+
@router.get("/{repo_id}")
|
|
173
|
+
def list_repo_filebox(repo_id: str, request: Request) -> dict[str, Any]:
|
|
174
|
+
repo_root = _resolve_hub_repo_root(request, repo_id)
|
|
175
|
+
try:
|
|
176
|
+
migrate_legacy(repo_root)
|
|
177
|
+
except Exception:
|
|
178
|
+
logger.debug("Hub FileBox legacy migration skipped", exc_info=True)
|
|
179
|
+
entries = list_filebox(repo_root)
|
|
180
|
+
serialized = {
|
|
181
|
+
box: [
|
|
182
|
+
_serialize_hub_entry(e, request=request, repo_id=repo_id) for e in files
|
|
183
|
+
]
|
|
184
|
+
for box, files in entries.items()
|
|
185
|
+
}
|
|
186
|
+
return serialized
|
|
187
|
+
|
|
188
|
+
@router.post("/{repo_id}/{box}")
|
|
189
|
+
async def hub_upload(repo_id: str, box: str, request: Request) -> dict[str, Any]:
|
|
190
|
+
if box not in BOXES:
|
|
191
|
+
raise HTTPException(status_code=400, detail="Invalid box")
|
|
192
|
+
repo_root = _resolve_hub_repo_root(request, repo_id)
|
|
193
|
+
ensure_structure(repo_root)
|
|
194
|
+
form = await request.form()
|
|
195
|
+
saved = []
|
|
196
|
+
for filename, file in form.items():
|
|
197
|
+
data = await file.read()
|
|
198
|
+
path = save_file(repo_root, box, filename, data)
|
|
199
|
+
saved.append(path.name)
|
|
200
|
+
return {"status": "ok", "saved": saved}
|
|
201
|
+
|
|
202
|
+
@router.get("/{repo_id}/{box}/{filename}")
|
|
203
|
+
def hub_download(repo_id: str, box: str, filename: str, request: Request):
|
|
204
|
+
if box not in BOXES:
|
|
205
|
+
raise HTTPException(status_code=400, detail="Invalid box")
|
|
206
|
+
repo_root = _resolve_hub_repo_root(request, repo_id)
|
|
207
|
+
entry = resolve_file(repo_root, box, filename)
|
|
208
|
+
if entry is None:
|
|
209
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
210
|
+
return FileResponse(entry.path, filename=entry.name)
|
|
211
|
+
|
|
212
|
+
@router.delete("/{repo_id}/{box}/{filename}")
|
|
213
|
+
def hub_delete(
|
|
214
|
+
repo_id: str, box: str, filename: str, request: Request
|
|
215
|
+
) -> dict[str, Any]:
|
|
216
|
+
if box not in BOXES:
|
|
217
|
+
raise HTTPException(status_code=400, detail="Invalid box")
|
|
218
|
+
repo_root = _resolve_hub_repo_root(request, repo_id)
|
|
219
|
+
removed = delete_file(repo_root, box, filename)
|
|
220
|
+
if not removed:
|
|
221
|
+
raise HTTPException(status_code=404, detail="File not found")
|
|
222
|
+
return {"status": "ok"}
|
|
223
|
+
|
|
224
|
+
return router
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
__all__ = ["build_filebox_routes", "build_hub_filebox_routes"]
|
|
@@ -14,12 +14,11 @@ from fastapi import APIRouter, HTTPException, Request
|
|
|
14
14
|
from fastapi.responses import FileResponse, StreamingResponse
|
|
15
15
|
from pydantic import BaseModel, Field
|
|
16
16
|
|
|
17
|
-
from ....agents.registry import validate_agent_id
|
|
18
17
|
from ....core.config import load_repo_config
|
|
19
|
-
from ....core.engine import Engine
|
|
20
18
|
from ....core.flows import (
|
|
21
19
|
FlowController,
|
|
22
20
|
FlowDefinition,
|
|
21
|
+
FlowEventType,
|
|
23
22
|
FlowRunRecord,
|
|
24
23
|
FlowRunStatus,
|
|
25
24
|
FlowStore,
|
|
@@ -36,12 +35,9 @@ from ....core.flows.ux_helpers import (
|
|
|
36
35
|
seed_issue_from_text,
|
|
37
36
|
)
|
|
38
37
|
from ....core.flows.worker_process import FlowWorkerHealth, check_worker_health
|
|
38
|
+
from ....core.runtime import RuntimeContext
|
|
39
39
|
from ....core.utils import atomic_write, find_repo_root
|
|
40
40
|
from ....flows.ticket_flow import build_ticket_flow_definition
|
|
41
|
-
from ....integrations.agents.wiring import (
|
|
42
|
-
build_agent_backend_factory,
|
|
43
|
-
build_app_server_supervisor_factory,
|
|
44
|
-
)
|
|
45
41
|
from ....integrations.github.service import GitHubError, GitHubService
|
|
46
42
|
from ....tickets import AgentPool
|
|
47
43
|
from ....tickets.files import (
|
|
@@ -51,7 +47,7 @@ from ....tickets.files import (
|
|
|
51
47
|
safe_relpath,
|
|
52
48
|
)
|
|
53
49
|
from ....tickets.frontmatter import parse_markdown_frontmatter
|
|
54
|
-
from ....tickets.lint import lint_ticket_frontmatter
|
|
50
|
+
from ....tickets.lint import lint_ticket_directory, lint_ticket_frontmatter
|
|
55
51
|
from ....tickets.outbox import parse_dispatch, resolve_outbox_paths
|
|
56
52
|
from ..schemas import (
|
|
57
53
|
TicketCreateRequest,
|
|
@@ -82,6 +78,14 @@ def _ticket_dir(repo_root: Path) -> Path:
|
|
|
82
78
|
return repo_root / ".codex-autorunner" / "tickets"
|
|
83
79
|
|
|
84
80
|
|
|
81
|
+
def _find_ticket_path_by_index(ticket_dir: Path, index: int) -> Optional[Path]:
|
|
82
|
+
for path in list_ticket_paths(ticket_dir):
|
|
83
|
+
idx = parse_ticket_index(path.name)
|
|
84
|
+
if idx == index:
|
|
85
|
+
return path
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
85
89
|
def _require_flow_store(repo_root: Path) -> Optional[FlowStore]:
|
|
86
90
|
db_path, _ = _flow_paths(repo_root)
|
|
87
91
|
store = FlowStore(db_path)
|
|
@@ -126,12 +130,9 @@ def _build_flow_definition(repo_root: Path, flow_type: str) -> FlowDefinition:
|
|
|
126
130
|
|
|
127
131
|
if flow_type == "ticket_flow":
|
|
128
132
|
config = load_repo_config(repo_root)
|
|
129
|
-
engine =
|
|
130
|
-
repo_root,
|
|
133
|
+
engine = RuntimeContext(
|
|
134
|
+
repo_root=repo_root,
|
|
131
135
|
config=config,
|
|
132
|
-
backend_factory=build_agent_backend_factory(repo_root, config),
|
|
133
|
-
app_server_supervisor_factory=build_app_server_supervisor_factory(config),
|
|
134
|
-
agent_id_validator=validate_agent_id,
|
|
135
136
|
)
|
|
136
137
|
agent_pool = AgentPool(engine.config)
|
|
137
138
|
definition = build_ticket_flow_definition(agent_pool=agent_pool)
|
|
@@ -200,6 +201,27 @@ def _normalize_run_id(run_id: Union[str, uuid.UUID]) -> str:
|
|
|
200
201
|
raise HTTPException(status_code=400, detail="Invalid run_id") from None
|
|
201
202
|
|
|
202
203
|
|
|
204
|
+
def _validate_tickets(ticket_dir: Path) -> list[str]:
|
|
205
|
+
"""Validate all tickets in the directory and return a list of error messages."""
|
|
206
|
+
errors: list[str] = []
|
|
207
|
+
|
|
208
|
+
if not ticket_dir.exists():
|
|
209
|
+
return errors
|
|
210
|
+
|
|
211
|
+
# Check for directory-level errors (duplicate indices)
|
|
212
|
+
dir_errors = lint_ticket_directory(ticket_dir)
|
|
213
|
+
errors.extend(dir_errors)
|
|
214
|
+
|
|
215
|
+
# Check each ticket file for frontmatter errors
|
|
216
|
+
ticket_paths = list_ticket_paths(ticket_dir)
|
|
217
|
+
for path in ticket_paths:
|
|
218
|
+
_, ticket_errors = read_ticket(path)
|
|
219
|
+
for err in ticket_errors:
|
|
220
|
+
errors.append(f"{path.relative_to(path.parent.parent)}: {err}")
|
|
221
|
+
|
|
222
|
+
return errors
|
|
223
|
+
|
|
224
|
+
|
|
203
225
|
def _cleanup_worker_handle(run_id: str) -> None:
|
|
204
226
|
handle = _active_workers.pop(run_id, None)
|
|
205
227
|
if not handle:
|
|
@@ -276,6 +298,7 @@ class FlowStatusResponse(BaseModel):
|
|
|
276
298
|
error_message: Optional[str]
|
|
277
299
|
state: Dict = Field(default_factory=dict)
|
|
278
300
|
reason_summary: Optional[str] = None
|
|
301
|
+
ticket_progress: Optional[Dict[str, int]] = None
|
|
279
302
|
last_event_seq: Optional[int] = None
|
|
280
303
|
last_event_at: Optional[str] = None
|
|
281
304
|
worker_health: Optional[FlowWorkerHealthResponse] = None
|
|
@@ -337,6 +360,7 @@ def _build_flow_status_response(
|
|
|
337
360
|
last_event_at=snapshot["last_event_at"],
|
|
338
361
|
worker_health=snapshot["worker_health"],
|
|
339
362
|
)
|
|
363
|
+
resp.ticket_progress = snapshot.get("ticket_progress")
|
|
340
364
|
if snapshot.get("state") is not None:
|
|
341
365
|
resp.state = snapshot["state"]
|
|
342
366
|
return resp
|
|
@@ -416,6 +440,39 @@ def build_flow_routes() -> APIRouter:
|
|
|
416
440
|
workspace_root=workspace_root, runs_dir=runs_dir, run_id=record.id
|
|
417
441
|
)
|
|
418
442
|
|
|
443
|
+
def _get_diff_stats_by_dispatch_seq(
|
|
444
|
+
repo_root: Path, *, run_id: str
|
|
445
|
+
) -> dict[int, dict[str, int]]:
|
|
446
|
+
"""Return mapping of dispatch_seq -> diff stats for the run."""
|
|
447
|
+
store = _require_flow_store(repo_root)
|
|
448
|
+
if store is None:
|
|
449
|
+
return {}
|
|
450
|
+
try:
|
|
451
|
+
events = store.get_events_by_type(run_id, FlowEventType.DIFF_UPDATED)
|
|
452
|
+
except Exception:
|
|
453
|
+
events = []
|
|
454
|
+
finally:
|
|
455
|
+
try:
|
|
456
|
+
store.close()
|
|
457
|
+
except Exception:
|
|
458
|
+
pass
|
|
459
|
+
|
|
460
|
+
by_seq: dict[int, dict[str, int]] = {}
|
|
461
|
+
for ev in events:
|
|
462
|
+
data = ev.data or {}
|
|
463
|
+
try:
|
|
464
|
+
seq_val = int(data.get("dispatch_seq") or 0)
|
|
465
|
+
except Exception:
|
|
466
|
+
continue
|
|
467
|
+
if seq_val <= 0:
|
|
468
|
+
continue
|
|
469
|
+
by_seq[seq_val] = {
|
|
470
|
+
"insertions": int(data.get("insertions") or 0),
|
|
471
|
+
"deletions": int(data.get("deletions") or 0),
|
|
472
|
+
"files_changed": int(data.get("files_changed") or 0),
|
|
473
|
+
}
|
|
474
|
+
return by_seq
|
|
475
|
+
|
|
419
476
|
@router.get("")
|
|
420
477
|
async def list_flow_definitions():
|
|
421
478
|
repo_root = find_repo_root()
|
|
@@ -461,7 +518,11 @@ def build_flow_routes() -> APIRouter:
|
|
|
461
518
|
return _definition_info(definition)
|
|
462
519
|
|
|
463
520
|
async def _start_flow(
|
|
464
|
-
flow_type: str,
|
|
521
|
+
flow_type: str,
|
|
522
|
+
request: FlowStartRequest,
|
|
523
|
+
*,
|
|
524
|
+
force_new: bool = False,
|
|
525
|
+
validate_tickets: bool = True,
|
|
465
526
|
) -> FlowStatusResponse:
|
|
466
527
|
if flow_type not in _supported_flow_types:
|
|
467
528
|
raise HTTPException(
|
|
@@ -471,6 +532,27 @@ def build_flow_routes() -> APIRouter:
|
|
|
471
532
|
repo_root = find_repo_root()
|
|
472
533
|
controller = _get_flow_controller(repo_root, flow_type)
|
|
473
534
|
|
|
535
|
+
if flow_type == "ticket_flow" and validate_tickets:
|
|
536
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
537
|
+
if force_new and not list_ticket_paths(ticket_dir):
|
|
538
|
+
raise HTTPException(
|
|
539
|
+
status_code=400,
|
|
540
|
+
detail=(
|
|
541
|
+
"No tickets found under .codex-autorunner/tickets. "
|
|
542
|
+
"Use /api/flows/ticket_flow/bootstrap to seed tickets."
|
|
543
|
+
),
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
547
|
+
if lint_errors:
|
|
548
|
+
raise HTTPException(
|
|
549
|
+
status_code=400,
|
|
550
|
+
detail={
|
|
551
|
+
"message": "Ticket validation failed",
|
|
552
|
+
"errors": lint_errors,
|
|
553
|
+
},
|
|
554
|
+
)
|
|
555
|
+
|
|
474
556
|
# Reuse an active/paused run unless force_new is requested.
|
|
475
557
|
if not force_new:
|
|
476
558
|
runs = _safe_list_flow_runs(
|
|
@@ -592,6 +674,16 @@ def build_flow_routes() -> APIRouter:
|
|
|
592
674
|
)
|
|
593
675
|
active = _active_or_paused_run(records)
|
|
594
676
|
if active:
|
|
677
|
+
# Validate tickets before reusing active run
|
|
678
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
679
|
+
if lint_errors:
|
|
680
|
+
raise HTTPException(
|
|
681
|
+
status_code=400,
|
|
682
|
+
detail={
|
|
683
|
+
"message": "Ticket validation failed",
|
|
684
|
+
"errors": lint_errors,
|
|
685
|
+
},
|
|
686
|
+
)
|
|
595
687
|
_reap_dead_worker(active.id)
|
|
596
688
|
_start_flow_worker(repo_root, active.id)
|
|
597
689
|
store = _require_flow_store(repo_root)
|
|
@@ -636,12 +728,51 @@ You are the first ticket in a new ticket_flow run.
|
|
|
636
728
|
input_data=flow_request.input_data,
|
|
637
729
|
metadata=meta | {"seeded_ticket": seeded},
|
|
638
730
|
)
|
|
639
|
-
|
|
731
|
+
validate_tickets = not tickets_exist or force_new
|
|
732
|
+
return await _start_flow(
|
|
733
|
+
"ticket_flow",
|
|
734
|
+
payload,
|
|
735
|
+
force_new=force_new,
|
|
736
|
+
validate_tickets=validate_tickets,
|
|
737
|
+
)
|
|
640
738
|
|
|
641
739
|
@router.get("/ticket_flow/tickets")
|
|
642
740
|
async def list_ticket_files():
|
|
643
741
|
repo_root = find_repo_root()
|
|
644
742
|
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
743
|
+
# Compute cumulative diff stats per ticket for the active/paused run.
|
|
744
|
+
runs = _safe_list_flow_runs(
|
|
745
|
+
repo_root, flow_type="ticket_flow", recover_stuck=True
|
|
746
|
+
)
|
|
747
|
+
active_run = _active_or_paused_run(runs)
|
|
748
|
+
diff_by_ticket: dict[str, dict[str, int]] = {}
|
|
749
|
+
if active_run:
|
|
750
|
+
store = _require_flow_store(repo_root)
|
|
751
|
+
if store is not None:
|
|
752
|
+
try:
|
|
753
|
+
events = store.get_events_by_type(
|
|
754
|
+
active_run.id, FlowEventType.DIFF_UPDATED
|
|
755
|
+
)
|
|
756
|
+
except Exception:
|
|
757
|
+
events = []
|
|
758
|
+
finally:
|
|
759
|
+
try:
|
|
760
|
+
store.close()
|
|
761
|
+
except Exception:
|
|
762
|
+
pass
|
|
763
|
+
for ev in events:
|
|
764
|
+
data = ev.data or {}
|
|
765
|
+
ticket_id = data.get("ticket_id")
|
|
766
|
+
if not isinstance(ticket_id, str) or not ticket_id.strip():
|
|
767
|
+
continue
|
|
768
|
+
stats = diff_by_ticket.setdefault(
|
|
769
|
+
ticket_id,
|
|
770
|
+
{"insertions": 0, "deletions": 0, "files_changed": 0},
|
|
771
|
+
)
|
|
772
|
+
stats["insertions"] += int(data.get("insertions") or 0)
|
|
773
|
+
stats["deletions"] += int(data.get("deletions") or 0)
|
|
774
|
+
stats["files_changed"] += int(data.get("files_changed") or 0)
|
|
775
|
+
|
|
645
776
|
tickets = []
|
|
646
777
|
for path in list_ticket_paths(ticket_dir):
|
|
647
778
|
doc, errors = read_ticket(path)
|
|
@@ -662,6 +793,7 @@ You are the first ticket in a new ticket_flow run.
|
|
|
662
793
|
"frontmatter": asdict(doc.frontmatter) if doc else None,
|
|
663
794
|
"body": doc.body if doc else parsed_body,
|
|
664
795
|
"errors": errors,
|
|
796
|
+
"diff_stats": diff_by_ticket.get(rel_path),
|
|
665
797
|
}
|
|
666
798
|
)
|
|
667
799
|
return {
|
|
@@ -669,6 +801,39 @@ You are the first ticket in a new ticket_flow run.
|
|
|
669
801
|
"tickets": tickets,
|
|
670
802
|
}
|
|
671
803
|
|
|
804
|
+
@router.get("/ticket_flow/tickets/{index}", response_model=TicketResponse)
|
|
805
|
+
async def get_ticket(index: int):
|
|
806
|
+
"""Fetch a single ticket by index; return raw body even if frontmatter is invalid."""
|
|
807
|
+
repo_root = find_repo_root()
|
|
808
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
809
|
+
ticket_path = _find_ticket_path_by_index(ticket_dir, index)
|
|
810
|
+
|
|
811
|
+
if not ticket_path:
|
|
812
|
+
raise HTTPException(status_code=404, detail=f"Ticket {index:03d} not found")
|
|
813
|
+
|
|
814
|
+
doc, errors = read_ticket(ticket_path)
|
|
815
|
+
if doc and not errors:
|
|
816
|
+
return TicketResponse(
|
|
817
|
+
path=safe_relpath(ticket_path, repo_root),
|
|
818
|
+
index=doc.index,
|
|
819
|
+
frontmatter=asdict(doc.frontmatter),
|
|
820
|
+
body=doc.body,
|
|
821
|
+
)
|
|
822
|
+
|
|
823
|
+
# Mirror list endpoint: surface raw body for repair when frontmatter is broken.
|
|
824
|
+
try:
|
|
825
|
+
raw_body = ticket_path.read_text(encoding="utf-8")
|
|
826
|
+
parsed_frontmatter, parsed_body = parse_markdown_frontmatter(raw_body)
|
|
827
|
+
except Exception:
|
|
828
|
+
parsed_frontmatter, parsed_body = {}, None
|
|
829
|
+
|
|
830
|
+
return TicketResponse(
|
|
831
|
+
path=safe_relpath(ticket_path, repo_root),
|
|
832
|
+
index=parse_ticket_index(ticket_path.name),
|
|
833
|
+
frontmatter=parsed_frontmatter or {},
|
|
834
|
+
body=parsed_body,
|
|
835
|
+
)
|
|
836
|
+
|
|
672
837
|
@router.post("/ticket_flow/tickets", response_model=TicketResponse)
|
|
673
838
|
async def create_ticket(request: TicketCreateRequest):
|
|
674
839
|
"""Create a new ticket with auto-generated index."""
|
|
@@ -688,13 +853,18 @@ You are the first ticket in a new ticket_flow run.
|
|
|
688
853
|
while next_index in existing_indices:
|
|
689
854
|
next_index += 1
|
|
690
855
|
|
|
691
|
-
# Build frontmatter
|
|
692
|
-
|
|
693
|
-
|
|
856
|
+
# Build frontmatter (quote scalars to avoid YAML parse issues with colons, etc.)
|
|
857
|
+
def _quote(val: Optional[str]) -> str:
|
|
858
|
+
return (
|
|
859
|
+
json.dumps(val) if val is not None else ""
|
|
860
|
+
) # JSON string is valid YAML scalar
|
|
861
|
+
|
|
862
|
+
title_line = f"title: {_quote(request.title)}\n" if request.title else ""
|
|
863
|
+
goal_line = f"goal: {_quote(request.goal)}\n" if request.goal else ""
|
|
694
864
|
|
|
695
865
|
content = (
|
|
696
866
|
"---\n"
|
|
697
|
-
f"agent: {request.agent}\n"
|
|
867
|
+
f"agent: {_quote(request.agent)}\n"
|
|
698
868
|
"done: false\n"
|
|
699
869
|
f"{title_line}"
|
|
700
870
|
f"{goal_line}"
|
|
@@ -724,12 +894,10 @@ You are the first ticket in a new ticket_flow run.
|
|
|
724
894
|
"""Update an existing ticket by index."""
|
|
725
895
|
repo_root = find_repo_root()
|
|
726
896
|
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
727
|
-
ticket_path = ticket_dir
|
|
897
|
+
ticket_path = _find_ticket_path_by_index(ticket_dir, index)
|
|
728
898
|
|
|
729
|
-
if not ticket_path
|
|
730
|
-
raise HTTPException(
|
|
731
|
-
status_code=404, detail=f"Ticket TICKET-{index:03d}.md not found"
|
|
732
|
-
)
|
|
899
|
+
if not ticket_path:
|
|
900
|
+
raise HTTPException(status_code=404, detail=f"Ticket {index:03d} not found")
|
|
733
901
|
|
|
734
902
|
# Validate frontmatter before saving
|
|
735
903
|
data, body = parse_markdown_frontmatter(request.content)
|
|
@@ -761,12 +929,10 @@ You are the first ticket in a new ticket_flow run.
|
|
|
761
929
|
"""Delete a ticket by index."""
|
|
762
930
|
repo_root = find_repo_root()
|
|
763
931
|
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
764
|
-
ticket_path = ticket_dir
|
|
932
|
+
ticket_path = _find_ticket_path_by_index(ticket_dir, index)
|
|
765
933
|
|
|
766
|
-
if not ticket_path
|
|
767
|
-
raise HTTPException(
|
|
768
|
-
status_code=404, detail=f"Ticket TICKET-{index:03d}.md not found"
|
|
769
|
-
)
|
|
934
|
+
if not ticket_path:
|
|
935
|
+
raise HTTPException(status_code=404, detail=f"Ticket {index:03d} not found")
|
|
770
936
|
|
|
771
937
|
rel_path = safe_relpath(ticket_path, repo_root)
|
|
772
938
|
ticket_path.unlink()
|
|
@@ -801,6 +967,19 @@ You are the first ticket in a new ticket_flow run.
|
|
|
801
967
|
record = _get_flow_record(repo_root, run_id)
|
|
802
968
|
controller = _get_flow_controller(repo_root, record.flow_type)
|
|
803
969
|
|
|
970
|
+
# Validate tickets before resuming ticket_flow
|
|
971
|
+
if record.flow_type == "ticket_flow":
|
|
972
|
+
ticket_dir = repo_root / ".codex-autorunner" / "tickets"
|
|
973
|
+
lint_errors = _validate_tickets(ticket_dir)
|
|
974
|
+
if lint_errors:
|
|
975
|
+
raise HTTPException(
|
|
976
|
+
status_code=400,
|
|
977
|
+
detail={
|
|
978
|
+
"message": "Ticket validation failed",
|
|
979
|
+
"errors": lint_errors,
|
|
980
|
+
},
|
|
981
|
+
)
|
|
982
|
+
|
|
804
983
|
updated = await controller.resume_flow(run_id)
|
|
805
984
|
_reap_dead_worker(run_id)
|
|
806
985
|
_start_flow_worker(repo_root, run_id)
|
|
@@ -935,7 +1114,7 @@ You are the first ticket in a new ticket_flow run.
|
|
|
935
1114
|
run_id, after_seq=resume_after
|
|
936
1115
|
):
|
|
937
1116
|
data = event.model_dump(mode="json")
|
|
938
|
-
yield f"id: {event.seq}\
|
|
1117
|
+
yield f"id: {event.seq}\ndata: {json.dumps(data)}\n\n"
|
|
939
1118
|
except Exception as e:
|
|
940
1119
|
_logger.exception("Error streaming events for run %s: %s", run_id, e)
|
|
941
1120
|
raise
|
|
@@ -961,6 +1140,10 @@ You are the first ticket in a new ticket_flow run.
|
|
|
961
1140
|
record = _get_flow_record(repo_root, normalized)
|
|
962
1141
|
paths = _resolve_outbox_for_record(record, repo_root)
|
|
963
1142
|
|
|
1143
|
+
# Pull diff stats from FlowStore keyed by dispatch sequence number so we
|
|
1144
|
+
# can enrich dispatch history entries without relying on DISPATCH.md metadata.
|
|
1145
|
+
diff_by_seq = _get_diff_stats_by_dispatch_seq(repo_root, run_id=normalized)
|
|
1146
|
+
|
|
964
1147
|
history_entries = []
|
|
965
1148
|
history_dir = paths.dispatch_history_dir
|
|
966
1149
|
if history_dir.exists() and history_dir.is_dir():
|
|
@@ -978,6 +1161,13 @@ You are the first ticket in a new ticket_flow run.
|
|
|
978
1161
|
dispatch_dict = asdict(dispatch) if dispatch else None
|
|
979
1162
|
if dispatch_dict and dispatch:
|
|
980
1163
|
dispatch_dict["is_handoff"] = dispatch.is_handoff
|
|
1164
|
+
# Add structured diff stats (per turn summary), matched by seq.
|
|
1165
|
+
try:
|
|
1166
|
+
entry_seq_int = int(entry.name)
|
|
1167
|
+
except Exception:
|
|
1168
|
+
entry_seq_int = 0
|
|
1169
|
+
if entry_seq_int and entry_seq_int in diff_by_seq:
|
|
1170
|
+
dispatch_dict["diff_stats"] = diff_by_seq[entry_seq_int]
|
|
981
1171
|
attachments = []
|
|
982
1172
|
for child in sorted(entry.rglob("*")):
|
|
983
1173
|
if child.name == "DISPATCH.md":
|