codex-autorunner 1.2.1__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. codex_autorunner/bootstrap.py +26 -5
  2. codex_autorunner/core/config.py +176 -59
  3. codex_autorunner/core/filesystem.py +24 -0
  4. codex_autorunner/core/flows/controller.py +50 -12
  5. codex_autorunner/core/flows/runtime.py +8 -3
  6. codex_autorunner/core/hub.py +293 -16
  7. codex_autorunner/core/lifecycle_events.py +44 -5
  8. codex_autorunner/core/pma_delivery.py +81 -0
  9. codex_autorunner/core/pma_dispatches.py +224 -0
  10. codex_autorunner/core/pma_lane_worker.py +122 -0
  11. codex_autorunner/core/pma_queue.py +167 -18
  12. codex_autorunner/core/pma_reactive.py +91 -0
  13. codex_autorunner/core/pma_safety.py +58 -0
  14. codex_autorunner/core/pma_sink.py +104 -0
  15. codex_autorunner/core/pma_transcripts.py +183 -0
  16. codex_autorunner/core/safe_paths.py +117 -0
  17. codex_autorunner/housekeeping.py +77 -23
  18. codex_autorunner/integrations/agents/codex_backend.py +18 -12
  19. codex_autorunner/integrations/agents/wiring.py +2 -0
  20. codex_autorunner/integrations/app_server/client.py +31 -0
  21. codex_autorunner/integrations/app_server/supervisor.py +3 -0
  22. codex_autorunner/integrations/telegram/constants.py +1 -1
  23. codex_autorunner/integrations/telegram/handlers/commands/execution.py +16 -15
  24. codex_autorunner/integrations/telegram/handlers/commands/files.py +5 -8
  25. codex_autorunner/integrations/telegram/handlers/commands/github.py +10 -6
  26. codex_autorunner/integrations/telegram/handlers/commands/shared.py +9 -8
  27. codex_autorunner/integrations/telegram/handlers/commands/workspace.py +85 -2
  28. codex_autorunner/integrations/telegram/handlers/commands_runtime.py +29 -8
  29. codex_autorunner/integrations/telegram/helpers.py +30 -2
  30. codex_autorunner/integrations/telegram/ticket_flow_bridge.py +54 -3
  31. codex_autorunner/static/docChatCore.js +2 -0
  32. codex_autorunner/static/hub.js +59 -0
  33. codex_autorunner/static/index.html +70 -54
  34. codex_autorunner/static/notificationBell.js +173 -0
  35. codex_autorunner/static/notifications.js +154 -36
  36. codex_autorunner/static/pma.js +96 -35
  37. codex_autorunner/static/styles.css +415 -4
  38. codex_autorunner/static/utils.js +5 -1
  39. codex_autorunner/surfaces/cli/cli.py +206 -129
  40. codex_autorunner/surfaces/cli/template_repos.py +157 -0
  41. codex_autorunner/surfaces/web/app.py +193 -5
  42. codex_autorunner/surfaces/web/routes/file_chat.py +109 -61
  43. codex_autorunner/surfaces/web/routes/flows.py +125 -67
  44. codex_autorunner/surfaces/web/routes/pma.py +638 -57
  45. codex_autorunner/tickets/agent_pool.py +6 -1
  46. codex_autorunner/tickets/outbox.py +27 -14
  47. codex_autorunner/tickets/replies.py +4 -10
  48. codex_autorunner/tickets/runner.py +1 -0
  49. codex_autorunner/workspace/paths.py +8 -3
  50. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/METADATA +1 -1
  51. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/RECORD +55 -45
  52. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/WHEEL +0 -0
  53. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/entry_points.txt +0 -0
  54. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/licenses/LICENSE +0 -0
  55. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/top_level.txt +0 -0
@@ -1,11 +1,11 @@
1
1
  import json
2
2
  import logging
3
- import os
4
3
  import re
5
4
  import shutil
6
5
  import subprocess
6
+ import threading
7
7
  import uuid
8
- from dataclasses import asdict
8
+ from dataclasses import asdict, dataclass
9
9
  from pathlib import Path, PurePosixPath
10
10
  from typing import IO, Dict, Optional, Tuple, Union
11
11
  from urllib.parse import quote
@@ -36,6 +36,7 @@ from ....core.flows.ux_helpers import (
36
36
  )
37
37
  from ....core.flows.worker_process import FlowWorkerHealth, check_worker_health
38
38
  from ....core.runtime import RuntimeContext
39
+ from ....core.safe_paths import SafePathError, validate_single_filename
39
40
  from ....core.utils import atomic_write, find_repo_root
40
41
  from ....flows.ticket_flow import build_ticket_flow_definition
41
42
  from ....integrations.github.service import GitHubError, GitHubService
@@ -58,14 +59,25 @@ from ..schemas import (
58
59
 
59
60
  _logger = logging.getLogger(__name__)
60
61
 
61
- _active_workers: Dict[
62
- str, Tuple[Optional[subprocess.Popen], Optional[IO[bytes]], Optional[IO[bytes]]]
63
- ] = {}
64
- _controller_cache: Dict[tuple[Path, str], FlowController] = {}
65
- _definition_cache: Dict[tuple[Path, str], FlowDefinition] = {}
66
62
  _supported_flow_types = ("ticket_flow",)
67
63
 
68
64
 
65
+ @dataclass
66
+ class FlowRoutesState:
67
+ active_workers: Dict[
68
+ str, Tuple[Optional[subprocess.Popen], Optional[IO[bytes]], Optional[IO[bytes]]]
69
+ ]
70
+ controller_cache: Dict[tuple[Path, str], FlowController]
71
+ definition_cache: Dict[tuple[Path, str], FlowDefinition]
72
+ lock: threading.Lock
73
+
74
+ def __init__(self) -> None:
75
+ self.active_workers = {}
76
+ self.controller_cache = {}
77
+ self.definition_cache = {}
78
+ self.lock = threading.Lock()
79
+
80
+
69
81
  def _flow_paths(repo_root: Path) -> tuple[Path, Path]:
70
82
  repo_root = repo_root.resolve()
71
83
  db_path = repo_root / ".codex-autorunner" / "flows.db"
@@ -122,11 +134,14 @@ def _safe_list_flow_runs(
122
134
  pass
123
135
 
124
136
 
125
- def _build_flow_definition(repo_root: Path, flow_type: str) -> FlowDefinition:
137
+ def _build_flow_definition(
138
+ repo_root: Path, flow_type: str, state: FlowRoutesState
139
+ ) -> FlowDefinition:
126
140
  repo_root = repo_root.resolve()
127
141
  key = (repo_root, flow_type)
128
- if key in _definition_cache:
129
- return _definition_cache[key]
142
+ with state.lock:
143
+ if key in state.definition_cache:
144
+ return state.definition_cache[key]
130
145
 
131
146
  if flow_type == "ticket_flow":
132
147
  config = load_repo_config(repo_root)
@@ -140,18 +155,22 @@ def _build_flow_definition(repo_root: Path, flow_type: str) -> FlowDefinition:
140
155
  raise HTTPException(status_code=404, detail=f"Unknown flow type: {flow_type}")
141
156
 
142
157
  definition.validate()
143
- _definition_cache[key] = definition
158
+ with state.lock:
159
+ state.definition_cache[key] = definition
144
160
  return definition
145
161
 
146
162
 
147
- def _get_flow_controller(repo_root: Path, flow_type: str) -> FlowController:
163
+ def _get_flow_controller(
164
+ repo_root: Path, flow_type: str, state: FlowRoutesState
165
+ ) -> FlowController:
148
166
  repo_root = repo_root.resolve()
149
167
  key = (repo_root, flow_type)
150
- if key in _controller_cache:
151
- return _controller_cache[key]
168
+ with state.lock:
169
+ if key in state.controller_cache:
170
+ return state.controller_cache[key]
152
171
 
153
172
  db_path, artifacts_root = _flow_paths(repo_root)
154
- definition = _build_flow_definition(repo_root, flow_type)
173
+ definition = _build_flow_definition(repo_root, flow_type, state)
155
174
 
156
175
  controller = FlowController(
157
176
  definition=definition,
@@ -165,7 +184,8 @@ def _get_flow_controller(repo_root: Path, flow_type: str) -> FlowController:
165
184
  raise HTTPException(
166
185
  status_code=503, detail="Flows unavailable; initialize the repo first."
167
186
  ) from exc
168
- _controller_cache[key] = controller
187
+ with state.lock:
188
+ state.controller_cache[key] = controller
169
189
  return controller
170
190
 
171
191
 
@@ -222,8 +242,9 @@ def _validate_tickets(ticket_dir: Path) -> list[str]:
222
242
  return errors
223
243
 
224
244
 
225
- def _cleanup_worker_handle(run_id: str) -> None:
226
- handle = _active_workers.pop(run_id, None)
245
+ def _cleanup_worker_handle(run_id: str, state: FlowRoutesState) -> None:
246
+ with state.lock:
247
+ handle = state.active_workers.pop(run_id, None)
227
248
  if not handle:
228
249
  return
229
250
 
@@ -246,13 +267,14 @@ def _cleanup_worker_handle(run_id: str) -> None:
246
267
  pass
247
268
 
248
269
 
249
- def _reap_dead_worker(run_id: str) -> None:
250
- handle = _active_workers.get(run_id)
270
+ def _reap_dead_worker(run_id: str, state: FlowRoutesState) -> None:
271
+ with state.lock:
272
+ handle = state.active_workers.get(run_id)
251
273
  if not handle:
252
274
  return
253
275
  proc, *_ = handle
254
276
  if proc and proc.poll() is not None:
255
- _cleanup_worker_handle(run_id)
277
+ _cleanup_worker_handle(run_id, state)
256
278
 
257
279
 
258
280
  class FlowStartRequest(BaseModel):
@@ -366,10 +388,12 @@ def _build_flow_status_response(
366
388
  return resp
367
389
 
368
390
 
369
- def _start_flow_worker(repo_root: Path, run_id: str) -> Optional[subprocess.Popen]:
391
+ def _start_flow_worker(
392
+ repo_root: Path, run_id: str, state: FlowRoutesState
393
+ ) -> Optional[subprocess.Popen]:
370
394
  normalized_run_id = _normalize_run_id(run_id)
371
395
 
372
- _reap_dead_worker(normalized_run_id)
396
+ _reap_dead_worker(normalized_run_id, state)
373
397
  result = ensure_worker(repo_root, normalized_run_id)
374
398
  if result["status"] == "reused":
375
399
  health = result["health"]
@@ -382,14 +406,16 @@ def _start_flow_worker(repo_root: Path, run_id: str) -> Optional[subprocess.Pope
382
406
  proc = result["proc"]
383
407
  stdout_handle = result["stdout"]
384
408
  stderr_handle = result["stderr"]
385
- _active_workers[normalized_run_id] = (proc, stdout_handle, stderr_handle)
409
+ with state.lock:
410
+ state.active_workers[normalized_run_id] = (proc, stdout_handle, stderr_handle)
386
411
  _logger.info("Started flow worker for run %s (pid=%d)", normalized_run_id, proc.pid)
387
412
  return proc
388
413
 
389
414
 
390
- def _stop_worker(run_id: str, timeout: float = 10.0) -> None:
415
+ def _stop_worker(run_id: str, state: FlowRoutesState, timeout: float = 10.0) -> None:
391
416
  normalized_run_id = _normalize_run_id(run_id)
392
- handle = _active_workers.get(normalized_run_id)
417
+ with state.lock:
418
+ handle = state.active_workers.get(normalized_run_id)
393
419
  if not handle:
394
420
  health = check_worker_health(find_repo_root(), normalized_run_id)
395
421
  if health.is_alive and health.pid:
@@ -419,12 +445,19 @@ def _stop_worker(run_id: str, timeout: float = 10.0) -> None:
419
445
  except Exception as exc:
420
446
  _logger.warning("Error stopping worker %s: %s", normalized_run_id, exc)
421
447
 
422
- _cleanup_worker_handle(normalized_run_id)
448
+ _cleanup_worker_handle(normalized_run_id, state)
423
449
 
424
450
 
425
451
  def build_flow_routes() -> APIRouter:
426
452
  router = APIRouter(prefix="/api/flows", tags=["flows"])
427
453
 
454
+ state = FlowRoutesState()
455
+
456
+ def _ensure_state_in_app(request: Request) -> FlowRoutesState:
457
+ if not hasattr(request.app.state, "flow_routes_state"):
458
+ request.app.state.flow_routes_state = state
459
+ return request.app.state.flow_routes_state
460
+
428
461
  def _definition_info(definition: FlowDefinition) -> Dict:
429
462
  return {
430
463
  "type": definition.flow_type,
@@ -474,16 +507,20 @@ def build_flow_routes() -> APIRouter:
474
507
  return by_seq
475
508
 
476
509
  @router.get("")
477
- async def list_flow_definitions():
510
+ async def list_flow_definitions(request: Request):
511
+ state = _ensure_state_in_app(request)
478
512
  repo_root = find_repo_root()
479
513
  definitions = [
480
- _definition_info(_build_flow_definition(repo_root, flow_type))
514
+ _definition_info(_build_flow_definition(repo_root, flow_type, state))
481
515
  for flow_type in _supported_flow_types
482
516
  ]
483
517
  return {"definitions": definitions}
484
518
 
485
519
  @router.get("/runs", response_model=list[FlowStatusResponse])
486
- async def list_runs(flow_type: Optional[str] = None, reconcile: bool = False):
520
+ async def list_runs(
521
+ request: Request, flow_type: Optional[str] = None, reconcile: bool = False
522
+ ):
523
+ _ensure_state_in_app(request)
487
524
  repo_root = find_repo_root()
488
525
  store = _require_flow_store(repo_root)
489
526
  records: list[FlowRunRecord] = []
@@ -508,18 +545,20 @@ def build_flow_routes() -> APIRouter:
508
545
  store.close()
509
546
 
510
547
  @router.get("/{flow_type}")
511
- async def get_flow_definition(flow_type: str):
548
+ async def get_flow_definition(request: Request, flow_type: str):
549
+ state = _ensure_state_in_app(request)
512
550
  repo_root = find_repo_root()
513
551
  if flow_type not in _supported_flow_types:
514
552
  raise HTTPException(
515
553
  status_code=404, detail=f"Unknown flow type: {flow_type}"
516
554
  )
517
- definition = _build_flow_definition(repo_root, flow_type)
555
+ definition = _build_flow_definition(repo_root, flow_type, state)
518
556
  return _definition_info(definition)
519
557
 
520
558
  async def _start_flow(
521
559
  flow_type: str,
522
560
  request: FlowStartRequest,
561
+ state: FlowRoutesState,
523
562
  *,
524
563
  force_new: bool = False,
525
564
  validate_tickets: bool = True,
@@ -530,7 +569,7 @@ def build_flow_routes() -> APIRouter:
530
569
  )
531
570
 
532
571
  repo_root = find_repo_root()
533
- controller = _get_flow_controller(repo_root, flow_type)
572
+ controller = _get_flow_controller(repo_root, flow_type, state)
534
573
 
535
574
  if flow_type == "ticket_flow" and validate_tickets:
536
575
  ticket_dir = repo_root / ".codex-autorunner" / "tickets"
@@ -560,8 +599,8 @@ def build_flow_routes() -> APIRouter:
560
599
  )
561
600
  active = _active_or_paused_run(runs)
562
601
  if active:
563
- _reap_dead_worker(active.id)
564
- _start_flow_worker(repo_root, active.id)
602
+ _reap_dead_worker(active.id, state)
603
+ _start_flow_worker(repo_root, active.id, state)
565
604
  store = _require_flow_store(repo_root)
566
605
  try:
567
606
  response = _build_flow_status_response(
@@ -582,7 +621,7 @@ def build_flow_routes() -> APIRouter:
582
621
  metadata=request.metadata,
583
622
  )
584
623
 
585
- _start_flow_worker(repo_root, run_id)
624
+ _start_flow_worker(repo_root, run_id, state)
586
625
 
587
626
  store = _require_flow_store(repo_root)
588
627
  try:
@@ -592,10 +631,11 @@ def build_flow_routes() -> APIRouter:
592
631
  store.close()
593
632
 
594
633
  @router.post("/{flow_type}/start", response_model=FlowStatusResponse)
595
- async def start_flow(flow_type: str, request: FlowStartRequest):
596
- meta = request.metadata if isinstance(request.metadata, dict) else {}
634
+ async def start_flow(request: Request, flow_type: str, req: FlowStartRequest):
635
+ state = _ensure_state_in_app(request)
636
+ meta = req.metadata if isinstance(req.metadata, dict) else {}
597
637
  force_new = bool(meta.get("force_new"))
598
- return await _start_flow(flow_type, request, force_new=force_new)
638
+ return await _start_flow(flow_type, req, state, force_new=force_new)
599
639
 
600
640
  @router.get("/ticket_flow/bootstrap-check", response_model=BootstrapCheckResponse)
601
641
  async def bootstrap_check():
@@ -657,7 +697,10 @@ def build_flow_routes() -> APIRouter:
657
697
  )
658
698
 
659
699
  @router.post("/ticket_flow/bootstrap", response_model=FlowStatusResponse)
660
- async def bootstrap_ticket_flow(request: Optional[FlowStartRequest] = None):
700
+ async def bootstrap_ticket_flow(
701
+ http_request: Request, request: Optional[FlowStartRequest] = None
702
+ ):
703
+ state = _ensure_state_in_app(http_request)
661
704
  repo_root = find_repo_root()
662
705
  ticket_dir = repo_root / ".codex-autorunner" / "tickets"
663
706
  ticket_dir.mkdir(parents=True, exist_ok=True)
@@ -684,8 +727,8 @@ def build_flow_routes() -> APIRouter:
684
727
  "errors": lint_errors,
685
728
  },
686
729
  )
687
- _reap_dead_worker(active.id)
688
- _start_flow_worker(repo_root, active.id)
730
+ _reap_dead_worker(active.id, state)
731
+ _start_flow_worker(repo_root, active.id, state)
689
732
  store = _require_flow_store(repo_root)
690
733
  try:
691
734
  resp = _build_flow_status_response(active, repo_root, store=store)
@@ -732,6 +775,7 @@ You are the first ticket in a new ticket_flow run.
732
775
  return await _start_flow(
733
776
  "ticket_flow",
734
777
  payload,
778
+ state,
735
779
  force_new=force_new,
736
780
  validate_tickets=validate_tickets,
737
781
  )
@@ -944,13 +988,14 @@ You are the first ticket in a new ticket_flow run.
944
988
  )
945
989
 
946
990
  @router.post("/{run_id}/stop", response_model=FlowStatusResponse)
947
- async def stop_flow(run_id: uuid.UUID):
991
+ async def stop_flow(http_request: Request, run_id: uuid.UUID):
992
+ state = _ensure_state_in_app(http_request)
948
993
  run_id = _normalize_run_id(run_id)
949
994
  repo_root = find_repo_root()
950
995
  record = _get_flow_record(repo_root, run_id)
951
- controller = _get_flow_controller(repo_root, record.flow_type)
996
+ controller = _get_flow_controller(repo_root, record.flow_type, state)
952
997
 
953
- _stop_worker(run_id)
998
+ _stop_worker(run_id, state)
954
999
 
955
1000
  updated = await controller.stop_flow(run_id)
956
1001
  store = _require_flow_store(repo_root)
@@ -961,11 +1006,12 @@ You are the first ticket in a new ticket_flow run.
961
1006
  store.close()
962
1007
 
963
1008
  @router.post("/{run_id}/resume", response_model=FlowStatusResponse)
964
- async def resume_flow(run_id: uuid.UUID):
1009
+ async def resume_flow(http_request: Request, run_id: uuid.UUID):
1010
+ state = _ensure_state_in_app(http_request)
965
1011
  run_id = _normalize_run_id(run_id)
966
1012
  repo_root = find_repo_root()
967
1013
  record = _get_flow_record(repo_root, run_id)
968
- controller = _get_flow_controller(repo_root, record.flow_type)
1014
+ controller = _get_flow_controller(repo_root, record.flow_type, state)
969
1015
 
970
1016
  # Validate tickets before resuming ticket_flow
971
1017
  if record.flow_type == "ticket_flow":
@@ -981,8 +1027,8 @@ You are the first ticket in a new ticket_flow run.
981
1027
  )
982
1028
 
983
1029
  updated = await controller.resume_flow(run_id)
984
- _reap_dead_worker(run_id)
985
- _start_flow_worker(repo_root, run_id)
1030
+ _reap_dead_worker(run_id, state)
1031
+ _start_flow_worker(repo_root, run_id, state)
986
1032
 
987
1033
  store = _require_flow_store(repo_root)
988
1034
  try:
@@ -992,7 +1038,7 @@ You are the first ticket in a new ticket_flow run.
992
1038
  store.close()
993
1039
 
994
1040
  @router.post("/{run_id}/reconcile", response_model=FlowStatusResponse)
995
- async def reconcile_flow(run_id: uuid.UUID):
1041
+ async def reconcile_flow(http_request: Request, run_id: uuid.UUID):
996
1042
  run_id = _normalize_run_id(run_id)
997
1043
  repo_root = find_repo_root()
998
1044
  record = _get_flow_record(repo_root, run_id)
@@ -1007,7 +1053,10 @@ You are the first ticket in a new ticket_flow run.
1007
1053
 
1008
1054
  @router.post("/{run_id}/archive")
1009
1055
  async def archive_flow(
1010
- run_id: uuid.UUID, delete_run: bool = True, force: bool = False
1056
+ http_request: Request,
1057
+ run_id: uuid.UUID,
1058
+ delete_run: bool = True,
1059
+ force: bool = False,
1011
1060
  ):
1012
1061
  """Archive a completed flow by moving tickets to the run's artifact directory.
1013
1062
 
@@ -1017,6 +1066,7 @@ You are the first ticket in a new ticket_flow run.
1017
1066
  force: If True, allow archiving flows stuck in stopping/paused state
1018
1067
  by force-stopping the worker first.
1019
1068
  """
1069
+ state = _ensure_state_in_app(http_request)
1020
1070
  run_id = _normalize_run_id(run_id)
1021
1071
  repo_root = find_repo_root()
1022
1072
  record = _get_flow_record(repo_root, run_id)
@@ -1028,7 +1078,7 @@ You are the first ticket in a new ticket_flow run.
1028
1078
  FlowRunStatus.PAUSED,
1029
1079
  ):
1030
1080
  # Force-stop any remaining worker before archiving
1031
- _stop_worker(run_id, timeout=2.0)
1081
+ _stop_worker(run_id, state, timeout=2.0)
1032
1082
  _logger.info(
1033
1083
  "Force-archiving flow %s in %s state", run_id, record.status.value
1034
1084
  )
@@ -1071,11 +1121,14 @@ You are the first ticket in a new ticket_flow run.
1071
1121
  }
1072
1122
 
1073
1123
  @router.get("/{run_id}/status", response_model=FlowStatusResponse)
1074
- async def get_flow_status(run_id: uuid.UUID, reconcile: bool = False):
1124
+ async def get_flow_status(
1125
+ http_request: Request, run_id: uuid.UUID, reconcile: bool = False
1126
+ ):
1127
+ state = _ensure_state_in_app(http_request)
1075
1128
  run_id = _normalize_run_id(run_id)
1076
1129
  repo_root = find_repo_root()
1077
1130
 
1078
- _reap_dead_worker(run_id)
1131
+ _reap_dead_worker(run_id, state)
1079
1132
 
1080
1133
  record = _get_flow_record(repo_root, run_id)
1081
1134
  store = _require_flow_store(repo_root)
@@ -1089,18 +1142,19 @@ You are the first ticket in a new ticket_flow run.
1089
1142
 
1090
1143
  @router.get("/{run_id}/events")
1091
1144
  async def stream_flow_events(
1092
- run_id: uuid.UUID, request: Request, after: Optional[int] = None
1145
+ http_request: Request, run_id: uuid.UUID, after: Optional[int] = None
1093
1146
  ):
1147
+ state = _ensure_state_in_app(http_request)
1094
1148
  run_id = _normalize_run_id(run_id)
1095
1149
  repo_root = find_repo_root()
1096
1150
  record = _get_flow_record(repo_root, run_id)
1097
- controller = _get_flow_controller(repo_root, record.flow_type)
1151
+ controller = _get_flow_controller(repo_root, record.flow_type, state)
1098
1152
 
1099
1153
  async def event_stream():
1100
1154
  try:
1101
1155
  resume_after = after
1102
1156
  if resume_after is None:
1103
- last_event_id = request.headers.get("Last-Event-ID")
1157
+ last_event_id = http_request.headers.get("Last-Event-ID")
1104
1158
  if last_event_id:
1105
1159
  try:
1106
1160
  resume_after = int(last_event_id)
@@ -1130,7 +1184,7 @@ You are the first ticket in a new ticket_flow run.
1130
1184
  )
1131
1185
 
1132
1186
  @router.get("/{run_id}/dispatch_history")
1133
- async def get_dispatch_history(run_id: str):
1187
+ async def get_dispatch_history(http_request: Request, run_id: str):
1134
1188
  """Get dispatch history for a flow run.
1135
1189
 
1136
1190
  Returns all dispatches (agent->human communications) for this run.
@@ -1216,11 +1270,11 @@ You are the first ticket in a new ticket_flow run.
1216
1270
 
1217
1271
  if not (len(seq) == 4 and seq.isdigit()):
1218
1272
  raise HTTPException(status_code=400, detail="Invalid seq")
1219
- if ".." in file_path or file_path.startswith("/"):
1220
- raise HTTPException(status_code=400, detail="Invalid file path")
1221
- filename = os.path.basename(file_path)
1222
- if filename != file_path:
1223
- raise HTTPException(status_code=400, detail="Invalid file path")
1273
+
1274
+ try:
1275
+ filename = validate_single_filename(file_path)
1276
+ except SafePathError as exc:
1277
+ raise HTTPException(status_code=400, detail=str(exc)) from exc
1224
1278
 
1225
1279
  input_data = dict(record.input_data or {})
1226
1280
  workspace_root = Path(input_data.get("workspace_root") or repo_root)
@@ -1283,11 +1337,12 @@ You are the first ticket in a new ticket_flow run.
1283
1337
  return FileResponse(resolved, filename=resolved.name)
1284
1338
 
1285
1339
  @router.get("/{run_id}/artifacts", response_model=list[FlowArtifactInfo])
1286
- async def list_flow_artifacts(run_id: str):
1340
+ async def list_flow_artifacts(http_request: Request, run_id: str):
1341
+ state = _ensure_state_in_app(http_request)
1287
1342
  normalized = _normalize_run_id(run_id)
1288
1343
  repo_root = find_repo_root()
1289
1344
  record = _get_flow_record(repo_root, normalized)
1290
- controller = _get_flow_controller(repo_root, record.flow_type)
1345
+ controller = _get_flow_controller(repo_root, record.flow_type, state)
1291
1346
 
1292
1347
  artifacts = controller.get_artifacts(normalized)
1293
1348
  return [
@@ -1302,11 +1357,14 @@ You are the first ticket in a new ticket_flow run.
1302
1357
  ]
1303
1358
 
1304
1359
  @router.get("/{run_id}/artifact")
1305
- async def get_flow_artifact(run_id: str, kind: Optional[str] = None):
1360
+ async def get_flow_artifact(
1361
+ http_request: Request, run_id: str, kind: Optional[str] = None
1362
+ ):
1363
+ state = _ensure_state_in_app(http_request)
1306
1364
  normalized = _normalize_run_id(run_id)
1307
1365
  repo_root = find_repo_root()
1308
1366
  record = _get_flow_record(repo_root, normalized)
1309
- controller = _get_flow_controller(repo_root, record.flow_type)
1367
+ controller = _get_flow_controller(repo_root, record.flow_type, state)
1310
1368
 
1311
1369
  artifacts_root = controller.get_artifacts_dir(normalized)
1312
1370
  if not artifacts_root: