codex-autorunner 1.2.0__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. codex_autorunner/bootstrap.py +26 -5
  2. codex_autorunner/core/about_car.py +12 -12
  3. codex_autorunner/core/config.py +178 -61
  4. codex_autorunner/core/context_awareness.py +1 -0
  5. codex_autorunner/core/filesystem.py +24 -0
  6. codex_autorunner/core/flows/controller.py +50 -12
  7. codex_autorunner/core/flows/runtime.py +8 -3
  8. codex_autorunner/core/hub.py +293 -16
  9. codex_autorunner/core/lifecycle_events.py +44 -5
  10. codex_autorunner/core/pma_context.py +188 -1
  11. codex_autorunner/core/pma_delivery.py +81 -0
  12. codex_autorunner/core/pma_dispatches.py +224 -0
  13. codex_autorunner/core/pma_lane_worker.py +122 -0
  14. codex_autorunner/core/pma_queue.py +167 -18
  15. codex_autorunner/core/pma_reactive.py +91 -0
  16. codex_autorunner/core/pma_safety.py +58 -0
  17. codex_autorunner/core/pma_sink.py +104 -0
  18. codex_autorunner/core/pma_transcripts.py +183 -0
  19. codex_autorunner/core/safe_paths.py +117 -0
  20. codex_autorunner/housekeeping.py +77 -23
  21. codex_autorunner/integrations/agents/codex_backend.py +18 -12
  22. codex_autorunner/integrations/agents/wiring.py +2 -0
  23. codex_autorunner/integrations/app_server/client.py +31 -0
  24. codex_autorunner/integrations/app_server/supervisor.py +3 -0
  25. codex_autorunner/integrations/telegram/adapter.py +1 -1
  26. codex_autorunner/integrations/telegram/config.py +1 -1
  27. codex_autorunner/integrations/telegram/constants.py +1 -1
  28. codex_autorunner/integrations/telegram/handlers/commands/execution.py +16 -15
  29. codex_autorunner/integrations/telegram/handlers/commands/files.py +5 -8
  30. codex_autorunner/integrations/telegram/handlers/commands/github.py +10 -6
  31. codex_autorunner/integrations/telegram/handlers/commands/shared.py +9 -8
  32. codex_autorunner/integrations/telegram/handlers/commands/workspace.py +85 -2
  33. codex_autorunner/integrations/telegram/handlers/commands_runtime.py +29 -8
  34. codex_autorunner/integrations/telegram/handlers/messages.py +8 -2
  35. codex_autorunner/integrations/telegram/helpers.py +30 -2
  36. codex_autorunner/integrations/telegram/ticket_flow_bridge.py +54 -3
  37. codex_autorunner/static/archive.js +274 -81
  38. codex_autorunner/static/archiveApi.js +21 -0
  39. codex_autorunner/static/constants.js +1 -1
  40. codex_autorunner/static/docChatCore.js +2 -0
  41. codex_autorunner/static/hub.js +59 -0
  42. codex_autorunner/static/index.html +70 -54
  43. codex_autorunner/static/notificationBell.js +173 -0
  44. codex_autorunner/static/notifications.js +187 -36
  45. codex_autorunner/static/pma.js +96 -35
  46. codex_autorunner/static/styles.css +431 -4
  47. codex_autorunner/static/terminalManager.js +22 -3
  48. codex_autorunner/static/utils.js +5 -1
  49. codex_autorunner/surfaces/cli/cli.py +206 -129
  50. codex_autorunner/surfaces/cli/template_repos.py +157 -0
  51. codex_autorunner/surfaces/web/app.py +193 -5
  52. codex_autorunner/surfaces/web/routes/archive.py +197 -0
  53. codex_autorunner/surfaces/web/routes/file_chat.py +115 -87
  54. codex_autorunner/surfaces/web/routes/flows.py +125 -67
  55. codex_autorunner/surfaces/web/routes/pma.py +638 -57
  56. codex_autorunner/surfaces/web/schemas.py +11 -0
  57. codex_autorunner/tickets/agent_pool.py +6 -1
  58. codex_autorunner/tickets/outbox.py +27 -14
  59. codex_autorunner/tickets/replies.py +4 -10
  60. codex_autorunner/tickets/runner.py +1 -0
  61. codex_autorunner/workspace/paths.py +8 -3
  62. {codex_autorunner-1.2.0.dist-info → codex_autorunner-1.3.0.dist-info}/METADATA +1 -1
  63. {codex_autorunner-1.2.0.dist-info → codex_autorunner-1.3.0.dist-info}/RECORD +67 -57
  64. {codex_autorunner-1.2.0.dist-info → codex_autorunner-1.3.0.dist-info}/WHEEL +0 -0
  65. {codex_autorunner-1.2.0.dist-info → codex_autorunner-1.3.0.dist-info}/entry_points.txt +0 -0
  66. {codex_autorunner-1.2.0.dist-info → codex_autorunner-1.3.0.dist-info}/licenses/LICENSE +0 -0
  67. {codex_autorunner-1.2.0.dist-info → codex_autorunner-1.3.0.dist-info}/top_level.txt +0 -0
@@ -19,6 +19,30 @@ PMA_MAX_MESSAGES = 10
19
19
  PMA_MAX_TEXT = 800
20
20
  PMA_MAX_TEMPLATE_REPOS = 25
21
21
  PMA_MAX_TEMPLATE_FIELD_CHARS = 120
22
+ PMA_MAX_PMA_FILES = 50
23
+ PMA_MAX_LIFECYCLE_EVENTS = 20
24
+
25
+ # Keep this short and stable; see ticket TICKET-001 for rationale.
26
+ PMA_FASTPATH = """<pma_fastpath>
27
+ You are PMA inside Codex Autorunner (CAR). Treat the filesystem as truth; prefer creating/updating CAR artifacts over "chat-only" plans.
28
+
29
+ First-turn routine:
30
+ 1) Read <user_message> and <hub_snapshot>.
31
+ 2) If hub_snapshot.inbox has entries, handle them first (these are paused runs needing input):
32
+ - Summarize the dispatch question.
33
+ - Answer it or propose the next minimal action.
34
+ - Include the item.open_url so the user can jump straight to the repo Inbox tab.
35
+ 3) If the request is new work:
36
+ - Identify the target repo(s).
37
+ - Prefer hub-owned worktrees for changes.
38
+ - Create/adjust repo tickets under each repo's `.codex-autorunner/tickets/`.
39
+
40
+ Web UI map (user perspective):
41
+ - Hub root: `/` (repos list + global notifications).
42
+ - Repo view: `/repos/<repo_id>/` tabs: Tickets | Inbox | Workspace | Terminal | Analytics | Archive.
43
+ - Tickets: edit queue; Inbox: paused run dispatches; Workspace: active_context/spec/decisions.
44
+ </pma_fastpath>
45
+ """
22
46
 
23
47
  # Defaults used when hub config is not available (should be rare).
24
48
  PMA_DOCS_MAX_CHARS = 12_000
@@ -192,13 +216,170 @@ def load_pma_prompt(hub_root: Path) -> str:
192
216
  return ""
193
217
 
194
218
 
219
+ def _render_ticket_flow_summary(summary: Optional[dict[str, Any]]) -> str:
220
+ if not summary:
221
+ return "null"
222
+ status = summary.get("status")
223
+ done_count = summary.get("done_count")
224
+ total_count = summary.get("total_count")
225
+ current_step = summary.get("current_step")
226
+ parts: list[str] = []
227
+ if status is not None:
228
+ parts.append(f"status={status}")
229
+ if done_count is not None and total_count is not None:
230
+ parts.append(f"done={done_count}/{total_count}")
231
+ if current_step is not None:
232
+ parts.append(f"step={current_step}")
233
+ if not parts:
234
+ return "null"
235
+ return " ".join(parts)
236
+
237
+
238
+ def _render_hub_snapshot(
239
+ snapshot: dict[str, Any],
240
+ *,
241
+ max_repos: int = PMA_MAX_REPOS,
242
+ max_messages: int = PMA_MAX_MESSAGES,
243
+ max_text_chars: int = PMA_MAX_TEXT,
244
+ max_template_repos: int = PMA_MAX_TEMPLATE_REPOS,
245
+ max_field_chars: int = PMA_MAX_TEMPLATE_FIELD_CHARS,
246
+ max_pma_files: int = PMA_MAX_PMA_FILES,
247
+ max_lifecycle_events: int = PMA_MAX_LIFECYCLE_EVENTS,
248
+ ) -> str:
249
+ lines: list[str] = []
250
+
251
+ inbox = snapshot.get("inbox") or []
252
+ if inbox:
253
+ lines.append("Inbox (paused runs needing attention):")
254
+ for item in list(inbox)[: max(0, max_messages)]:
255
+ repo_id = _truncate(str(item.get("repo_id", "")), max_field_chars)
256
+ run_id = _truncate(str(item.get("run_id", "")), max_field_chars)
257
+ seq = _truncate(str(item.get("seq", "")), max_field_chars)
258
+ dispatch = item.get("dispatch") or {}
259
+ mode = _truncate(str(dispatch.get("mode", "")), max_field_chars)
260
+ handoff = bool(dispatch.get("is_handoff"))
261
+ lines.append(
262
+ f"- repo_id={repo_id} run_id={run_id} seq={seq} mode={mode} "
263
+ f"handoff={str(handoff).lower()}"
264
+ )
265
+ title = dispatch.get("title")
266
+ if title:
267
+ lines.append(f" title: {_truncate(str(title), max_text_chars)}")
268
+ body = dispatch.get("body")
269
+ if body:
270
+ lines.append(f" body: {_truncate(str(body), max_text_chars)}")
271
+ files = item.get("files") or []
272
+ if files:
273
+ display = [
274
+ _truncate(str(name), max_field_chars)
275
+ for name in list(files)[: max(0, max_pma_files)]
276
+ ]
277
+ lines.append(f" attachments: [{', '.join(display)}]")
278
+ open_url = item.get("open_url")
279
+ if open_url:
280
+ lines.append(f" open_url: {_truncate(str(open_url), max_field_chars)}")
281
+ lines.append("")
282
+
283
+ repos = snapshot.get("repos") or []
284
+ if repos:
285
+ lines.append("Repos:")
286
+ for repo in list(repos)[: max(0, max_repos)]:
287
+ repo_id = _truncate(str(repo.get("id", "")), max_field_chars)
288
+ display_name = _truncate(str(repo.get("display_name", "")), max_field_chars)
289
+ status = _truncate(str(repo.get("status", "")), max_field_chars)
290
+ last_run_id = _truncate(str(repo.get("last_run_id", "")), max_field_chars)
291
+ last_exit = _truncate(str(repo.get("last_exit_code", "")), max_field_chars)
292
+ ticket_flow = _render_ticket_flow_summary(repo.get("ticket_flow"))
293
+ lines.append(
294
+ f"- {repo_id} ({display_name}): status={status} "
295
+ f"last_run_id={last_run_id} last_exit_code={last_exit} "
296
+ f"ticket_flow={ticket_flow}"
297
+ )
298
+ lines.append("")
299
+
300
+ templates = snapshot.get("templates") or {}
301
+ template_repos = templates.get("repos") or []
302
+ template_scan = templates.get("last_scan")
303
+ if templates.get("enabled") or template_repos or template_scan:
304
+ enabled = bool(templates.get("enabled"))
305
+ lines.append("Templates:")
306
+ lines.append(f"- enabled={str(enabled).lower()}")
307
+ if template_repos:
308
+ items: list[str] = []
309
+ for repo in list(template_repos)[: max(0, max_template_repos)]:
310
+ repo_id = _truncate(str(repo.get("id", "")), max_field_chars)
311
+ default_ref = _truncate(
312
+ str(repo.get("default_ref", "")), max_field_chars
313
+ )
314
+ trusted = bool(repo.get("trusted"))
315
+ items.append(f"{repo_id}@{default_ref} trusted={str(trusted).lower()}")
316
+ lines.append(f"- repos: [{', '.join(items)}]")
317
+ if template_scan:
318
+ repo_id = _truncate(str(template_scan.get("repo_id", "")), max_field_chars)
319
+ decision = _truncate(
320
+ str(template_scan.get("decision", "")), max_field_chars
321
+ )
322
+ severity = _truncate(
323
+ str(template_scan.get("severity", "")), max_field_chars
324
+ )
325
+ scanned_at = _truncate(
326
+ str(template_scan.get("scanned_at", "")), max_field_chars
327
+ )
328
+ lines.append(
329
+ f"- last_scan: {repo_id} {decision} {severity} {scanned_at}".strip()
330
+ )
331
+ lines.append("")
332
+
333
+ pma_files = snapshot.get("pma_files") or {}
334
+ inbox_files = pma_files.get("inbox") or []
335
+ outbox_files = pma_files.get("outbox") or []
336
+ if inbox_files or outbox_files:
337
+ lines.append("PMA files:")
338
+ if inbox_files:
339
+ files = [
340
+ _truncate(str(name), max_field_chars)
341
+ for name in list(inbox_files)[: max(0, max_pma_files)]
342
+ ]
343
+ lines.append(f"- inbox: [{', '.join(files)}]")
344
+ if outbox_files:
345
+ files = [
346
+ _truncate(str(name), max_field_chars)
347
+ for name in list(outbox_files)[: max(0, max_pma_files)]
348
+ ]
349
+ lines.append(f"- outbox: [{', '.join(files)}]")
350
+ lines.append("")
351
+
352
+ lifecycle_events = snapshot.get("lifecycle_events") or []
353
+ if lifecycle_events:
354
+ lines.append("Lifecycle events (recent):")
355
+ for event in list(lifecycle_events)[: max(0, max_lifecycle_events)]:
356
+ timestamp = _truncate(str(event.get("timestamp", "")), max_field_chars)
357
+ event_type = _truncate(str(event.get("event_type", "")), max_field_chars)
358
+ repo_id = _truncate(str(event.get("repo_id", "")), max_field_chars)
359
+ run_id = _truncate(str(event.get("run_id", "")), max_field_chars)
360
+ lines.append(
361
+ f"- {timestamp} {event_type} repo_id={repo_id} run_id={run_id}"
362
+ )
363
+ lines.append("")
364
+
365
+ if lines and lines[-1] == "":
366
+ lines.pop()
367
+ return "\n".join(lines)
368
+
369
+
195
370
  def format_pma_prompt(
196
371
  base_prompt: str,
197
372
  snapshot: dict[str, Any],
198
373
  message: str,
199
374
  hub_root: Optional[Path] = None,
200
375
  ) -> str:
201
- snapshot_text = json.dumps(snapshot, sort_keys=True)
376
+ limits = snapshot.get("limits") or {}
377
+ snapshot_text = _render_hub_snapshot(
378
+ snapshot,
379
+ max_repos=limits.get("max_repos", PMA_MAX_REPOS),
380
+ max_messages=limits.get("max_messages", PMA_MAX_MESSAGES),
381
+ max_text_chars=limits.get("max_text_chars", PMA_MAX_TEXT),
382
+ )
202
383
 
203
384
  pma_docs: Optional[dict[str, Any]] = None
204
385
  if hub_root is not None:
@@ -235,6 +416,7 @@ def format_pma_prompt(
235
416
  "</pma_workspace_docs>\n\n"
236
417
  )
237
418
 
419
+ prompt += f"{PMA_FASTPATH}\n\n"
238
420
  prompt += (
239
421
  "<hub_snapshot>\n"
240
422
  f"{snapshot_text}\n"
@@ -493,4 +675,9 @@ async def build_hub_snapshot(
493
675
  "templates": templates,
494
676
  "pma_files": pma_files,
495
677
  "lifecycle_events": lifecycle_events,
678
+ "limits": {
679
+ "max_repos": max_repos,
680
+ "max_messages": max_messages,
681
+ "max_text_chars": max_text_chars,
682
+ },
496
683
  }
@@ -0,0 +1,81 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Any, Optional
6
+
7
+ from ..integrations.telegram.adapter import chunk_message
8
+ from ..integrations.telegram.constants import TELEGRAM_MAX_MESSAGE_LENGTH
9
+ from ..integrations.telegram.state import OutboxRecord, TelegramStateStore
10
+ from .pma_sink import PmaActiveSinkStore
11
+ from .time_utils import now_iso
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ async def deliver_pma_output_to_active_sink(
17
+ *,
18
+ hub_root: Path,
19
+ assistant_text: str,
20
+ turn_id: str,
21
+ lifecycle_event: Optional[dict[str, Any]],
22
+ telegram_state_path: Path,
23
+ ) -> bool:
24
+ if not lifecycle_event:
25
+ return False
26
+ if not assistant_text or not assistant_text.strip():
27
+ return False
28
+ if not isinstance(turn_id, str) or not turn_id:
29
+ return False
30
+
31
+ sink_store = PmaActiveSinkStore(hub_root)
32
+ sink = sink_store.load()
33
+ if not isinstance(sink, dict):
34
+ return False
35
+ if sink.get("kind") != "telegram":
36
+ return False
37
+
38
+ last_delivery = sink.get("last_delivery_turn_id")
39
+ if isinstance(last_delivery, str) and last_delivery == turn_id:
40
+ return False
41
+
42
+ chat_id = sink.get("chat_id")
43
+ thread_id = sink.get("thread_id")
44
+ if not isinstance(chat_id, int):
45
+ return False
46
+ if thread_id is not None and not isinstance(thread_id, int):
47
+ thread_id = None
48
+
49
+ chunks = chunk_message(
50
+ assistant_text, max_len=TELEGRAM_MAX_MESSAGE_LENGTH, with_numbering=True
51
+ )
52
+ if not chunks:
53
+ return False
54
+
55
+ store = TelegramStateStore(telegram_state_path)
56
+ try:
57
+ for idx, chunk in enumerate(chunks, 1):
58
+ record_id = f"pma:{turn_id}:{idx}"
59
+ record = OutboxRecord(
60
+ record_id=record_id,
61
+ chat_id=chat_id,
62
+ thread_id=thread_id,
63
+ reply_to_message_id=None,
64
+ placeholder_message_id=None,
65
+ text=chunk,
66
+ created_at=now_iso(),
67
+ operation="send",
68
+ outbox_key=record_id,
69
+ )
70
+ await store.enqueue_outbox(record)
71
+ except Exception:
72
+ logger.exception("Failed to enqueue PMA output to Telegram outbox")
73
+ return False
74
+ finally:
75
+ await store.close()
76
+
77
+ sink_store.mark_delivered(turn_id)
78
+ return True
79
+
80
+
81
+ __all__ = ["deliver_pma_output_to_active_sink"]
@@ -0,0 +1,224 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from dataclasses import dataclass
5
+ from datetime import datetime, timezone
6
+ from pathlib import Path
7
+ from typing import Any, Optional
8
+
9
+ import yaml
10
+
11
+ from ..tickets.frontmatter import parse_markdown_frontmatter
12
+ from .time_utils import now_iso
13
+ from .utils import atomic_write
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+ PMA_DISPATCHES_DIRNAME = "dispatches"
18
+ PMA_DISPATCH_ALLOWED_PRIORITIES = {"info", "warn", "action"}
19
+ PMA_DISPATCH_ALLOWED_EXTS = {".md", ".markdown"}
20
+
21
+
22
+ @dataclass(frozen=True)
23
+ class PmaDispatch:
24
+ dispatch_id: str
25
+ title: str
26
+ body: str
27
+ priority: str
28
+ links: list[dict[str, str]]
29
+ created_at: str
30
+ resolved_at: Optional[str]
31
+ source_turn_id: Optional[str]
32
+ path: Path
33
+
34
+
35
+ def _dispatches_root(hub_root: Path) -> Path:
36
+ return hub_root / ".codex-autorunner" / "pma" / PMA_DISPATCHES_DIRNAME
37
+
38
+
39
+ def ensure_pma_dispatches_dir(hub_root: Path) -> Path:
40
+ root = _dispatches_root(hub_root)
41
+ root.mkdir(parents=True, exist_ok=True)
42
+ return root
43
+
44
+
45
+ def _normalize_priority(value: Any) -> str:
46
+ if not isinstance(value, str):
47
+ return "info"
48
+ priority = value.strip().lower()
49
+ if priority in PMA_DISPATCH_ALLOWED_PRIORITIES:
50
+ return priority
51
+ return "info"
52
+
53
+
54
+ def _normalize_links(value: Any) -> list[dict[str, str]]:
55
+ if not isinstance(value, list):
56
+ return []
57
+ links: list[dict[str, str]] = []
58
+ for item in value:
59
+ if not isinstance(item, dict):
60
+ continue
61
+ label = item.get("label")
62
+ href = item.get("href")
63
+ if not isinstance(label, str) or not isinstance(href, str):
64
+ continue
65
+ label = label.strip()
66
+ href = href.strip()
67
+ if not label or not href:
68
+ continue
69
+ links.append({"label": label, "href": href})
70
+ return links
71
+
72
+
73
+ def _created_at_from_path(path: Path) -> str:
74
+ try:
75
+ stamp = path.stat().st_mtime
76
+ except OSError:
77
+ return now_iso()
78
+ return datetime.fromtimestamp(stamp, tz=timezone.utc).isoformat()
79
+
80
+
81
+ def _parse_iso(value: Optional[str]) -> Optional[datetime]:
82
+ if not isinstance(value, str) or not value:
83
+ return None
84
+ try:
85
+ return datetime.fromisoformat(value.replace("Z", "+00:00"))
86
+ except Exception:
87
+ return None
88
+
89
+
90
+ def parse_pma_dispatch(path: Path) -> tuple[Optional[PmaDispatch], list[str]]:
91
+ errors: list[str] = []
92
+ try:
93
+ raw = path.read_text(encoding="utf-8")
94
+ except OSError as exc:
95
+ return None, [f"Failed to read dispatch: {exc}"]
96
+
97
+ data, body = parse_markdown_frontmatter(raw)
98
+ if not isinstance(data, dict):
99
+ data = {}
100
+
101
+ raw_title = data.get("title")
102
+ title = raw_title if isinstance(raw_title, str) else ""
103
+ body = body or ""
104
+ priority = _normalize_priority(data.get("priority"))
105
+ links = _normalize_links(data.get("links"))
106
+ created_at = (
107
+ data.get("created_at") if isinstance(data.get("created_at"), str) else None
108
+ )
109
+ resolved_at = (
110
+ data.get("resolved_at") if isinstance(data.get("resolved_at"), str) else None
111
+ )
112
+ source_turn_id = (
113
+ data.get("source_turn_id")
114
+ if isinstance(data.get("source_turn_id"), str)
115
+ else None
116
+ )
117
+
118
+ if not created_at:
119
+ created_at = _created_at_from_path(path)
120
+
121
+ dispatch_id = path.stem
122
+
123
+ dispatch = PmaDispatch(
124
+ dispatch_id=dispatch_id,
125
+ title=title,
126
+ body=body,
127
+ priority=priority,
128
+ links=links,
129
+ created_at=created_at,
130
+ resolved_at=resolved_at,
131
+ source_turn_id=source_turn_id,
132
+ path=path,
133
+ )
134
+ return dispatch, errors
135
+
136
+
137
+ def list_pma_dispatches(
138
+ hub_root: Path,
139
+ *,
140
+ include_resolved: bool = False,
141
+ limit: Optional[int] = None,
142
+ ) -> list[PmaDispatch]:
143
+ root = ensure_pma_dispatches_dir(hub_root)
144
+ dispatches: list[PmaDispatch] = []
145
+ for child in sorted(root.iterdir(), key=lambda p: p.name):
146
+ if child.suffix.lower() not in PMA_DISPATCH_ALLOWED_EXTS:
147
+ continue
148
+ dispatch, errors = parse_pma_dispatch(child)
149
+ if errors or dispatch is None:
150
+ continue
151
+ if not include_resolved and dispatch.resolved_at:
152
+ continue
153
+ dispatches.append(dispatch)
154
+
155
+ dispatches.sort(
156
+ key=lambda d: _parse_iso(d.created_at)
157
+ or datetime.min.replace(tzinfo=timezone.utc),
158
+ reverse=True,
159
+ )
160
+ if isinstance(limit, int) and limit > 0:
161
+ dispatches = dispatches[:limit]
162
+ return dispatches
163
+
164
+
165
+ def list_pma_dispatches_for_turn(hub_root: Path, turn_id: str) -> list[PmaDispatch]:
166
+ if not isinstance(turn_id, str) or not turn_id:
167
+ return []
168
+ dispatches = list_pma_dispatches(hub_root, include_resolved=False)
169
+ return [d for d in dispatches if d.source_turn_id == turn_id]
170
+
171
+
172
+ def find_pma_dispatch_path(hub_root: Path, dispatch_id: str) -> Optional[Path]:
173
+ if not isinstance(dispatch_id, str) or not dispatch_id:
174
+ return None
175
+ safe_id = dispatch_id.strip()
176
+ if not safe_id or "/" in safe_id or "\\" in safe_id:
177
+ return None
178
+ root = ensure_pma_dispatches_dir(hub_root)
179
+ for ext in PMA_DISPATCH_ALLOWED_EXTS:
180
+ candidate = root / f"{safe_id}{ext}"
181
+ if candidate.exists():
182
+ return candidate
183
+ return None
184
+
185
+
186
+ def resolve_pma_dispatch(path: Path) -> tuple[Optional[PmaDispatch], list[str]]:
187
+ dispatch, errors = parse_pma_dispatch(path)
188
+ if errors or dispatch is None:
189
+ return dispatch, errors
190
+
191
+ if dispatch.resolved_at:
192
+ return dispatch, []
193
+
194
+ data = {
195
+ "title": dispatch.title,
196
+ "priority": dispatch.priority,
197
+ "links": dispatch.links,
198
+ "created_at": dispatch.created_at,
199
+ "resolved_at": now_iso(),
200
+ "source_turn_id": dispatch.source_turn_id,
201
+ }
202
+ frontmatter = yaml.safe_dump(data, sort_keys=False).strip()
203
+ content = f"---\n{frontmatter}\n---\n\n{dispatch.body.strip()}\n"
204
+ try:
205
+ atomic_write(path, content)
206
+ except OSError as exc:
207
+ return dispatch, [f"Failed to resolve dispatch: {exc}"]
208
+
209
+ updated, parse_errors = parse_pma_dispatch(path)
210
+ if parse_errors:
211
+ return updated, parse_errors
212
+ return updated, []
213
+
214
+
215
+ __all__ = [
216
+ "PmaDispatch",
217
+ "PMA_DISPATCHES_DIRNAME",
218
+ "ensure_pma_dispatches_dir",
219
+ "parse_pma_dispatch",
220
+ "list_pma_dispatches",
221
+ "list_pma_dispatches_for_turn",
222
+ "find_pma_dispatch_path",
223
+ "resolve_pma_dispatch",
224
+ ]
@@ -0,0 +1,122 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import logging
5
+ from typing import Any, Awaitable, Callable, Optional
6
+
7
+ from .pma_queue import PmaQueue, PmaQueueItem
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ PmaLaneExecutor = Callable[[PmaQueueItem], Awaitable[dict[str, Any]]]
12
+ PmaLaneResultHook = Callable[
13
+ [PmaQueueItem, dict[str, Any]],
14
+ Optional[Awaitable[None]],
15
+ ]
16
+
17
+
18
+ class PmaLaneWorker:
19
+ """Background worker that drains a PMA lane queue."""
20
+
21
+ def __init__(
22
+ self,
23
+ lane_id: str,
24
+ queue: PmaQueue,
25
+ executor: PmaLaneExecutor,
26
+ *,
27
+ log: Optional[logging.Logger] = None,
28
+ on_result: Optional[PmaLaneResultHook] = None,
29
+ poll_interval_seconds: float = 1.0,
30
+ ) -> None:
31
+ self.lane_id = lane_id
32
+ self._queue = queue
33
+ self._executor = executor
34
+ self._log = log or logger
35
+ self._on_result = on_result
36
+ self._poll_interval_seconds = max(0.1, poll_interval_seconds)
37
+ self._task: Optional[asyncio.Task[None]] = None
38
+ self._cancel_event = asyncio.Event()
39
+ self._lock = asyncio.Lock()
40
+
41
+ @property
42
+ def is_running(self) -> bool:
43
+ return self._task is not None and not self._task.done()
44
+
45
+ async def start(self) -> bool:
46
+ async with self._lock:
47
+ if self.is_running:
48
+ return False
49
+ self._cancel_event = asyncio.Event()
50
+ self._task = asyncio.create_task(self._run())
51
+ self._log.info("PMA lane worker started (lane_id=%s)", self.lane_id)
52
+ return True
53
+
54
+ async def stop(self) -> None:
55
+ async with self._lock:
56
+ task = self._task
57
+ if task is None:
58
+ return
59
+ self._cancel_event.set()
60
+ if task is not None:
61
+ try:
62
+ await task
63
+ finally:
64
+ self._log.info("PMA lane worker stopped (lane_id=%s)", self.lane_id)
65
+
66
+ async def _run(self) -> None:
67
+ await self._queue.replay_pending(self.lane_id)
68
+ while not self._cancel_event.is_set():
69
+ item = await self._queue.dequeue(self.lane_id)
70
+ if item is None:
71
+ await self._queue.wait_for_lane_item(
72
+ self.lane_id,
73
+ self._cancel_event,
74
+ poll_interval_seconds=self._poll_interval_seconds,
75
+ )
76
+ continue
77
+
78
+ if self._cancel_event.is_set():
79
+ await self._queue.fail_item(item, "cancelled by lane stop")
80
+ await self._notify(item, {"status": "error", "detail": "lane stopped"})
81
+ continue
82
+
83
+ self._log.info(
84
+ "PMA lane item started (lane_id=%s item_id=%s)",
85
+ self.lane_id,
86
+ item.item_id,
87
+ )
88
+ try:
89
+ result = await self._executor(item)
90
+ await self._queue.complete_item(item, result)
91
+ self._log.info(
92
+ "PMA lane item completed (lane_id=%s item_id=%s status=%s)",
93
+ self.lane_id,
94
+ item.item_id,
95
+ result.get("status") if isinstance(result, dict) else None,
96
+ )
97
+ await self._notify(item, result)
98
+ except Exception as exc:
99
+ self._log.exception("Failed to process PMA queue item %s", item.item_id)
100
+ error_result = {"status": "error", "detail": str(exc)}
101
+ await self._queue.fail_item(item, str(exc))
102
+ self._log.info(
103
+ "PMA lane item failed (lane_id=%s item_id=%s error=%s)",
104
+ self.lane_id,
105
+ item.item_id,
106
+ str(exc),
107
+ )
108
+ await self._notify(item, error_result)
109
+
110
+ async def _notify(self, item: PmaQueueItem, result: dict[str, Any]) -> None:
111
+ if self._on_result is None:
112
+ return
113
+ try:
114
+ maybe = self._on_result(item, result)
115
+ if asyncio.iscoroutine(maybe):
116
+ await maybe
117
+ except Exception:
118
+ self._log.exception(
119
+ "PMA lane result hook failed (lane_id=%s item_id=%s)",
120
+ self.lane_id,
121
+ item.item_id,
122
+ )