codex-autorunner 1.2.1__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. codex_autorunner/bootstrap.py +26 -5
  2. codex_autorunner/core/config.py +176 -59
  3. codex_autorunner/core/filesystem.py +24 -0
  4. codex_autorunner/core/flows/controller.py +50 -12
  5. codex_autorunner/core/flows/runtime.py +8 -3
  6. codex_autorunner/core/hub.py +293 -16
  7. codex_autorunner/core/lifecycle_events.py +44 -5
  8. codex_autorunner/core/pma_delivery.py +81 -0
  9. codex_autorunner/core/pma_dispatches.py +224 -0
  10. codex_autorunner/core/pma_lane_worker.py +122 -0
  11. codex_autorunner/core/pma_queue.py +167 -18
  12. codex_autorunner/core/pma_reactive.py +91 -0
  13. codex_autorunner/core/pma_safety.py +58 -0
  14. codex_autorunner/core/pma_sink.py +104 -0
  15. codex_autorunner/core/pma_transcripts.py +183 -0
  16. codex_autorunner/core/safe_paths.py +117 -0
  17. codex_autorunner/housekeeping.py +77 -23
  18. codex_autorunner/integrations/agents/codex_backend.py +18 -12
  19. codex_autorunner/integrations/agents/wiring.py +2 -0
  20. codex_autorunner/integrations/app_server/client.py +31 -0
  21. codex_autorunner/integrations/app_server/supervisor.py +3 -0
  22. codex_autorunner/integrations/telegram/constants.py +1 -1
  23. codex_autorunner/integrations/telegram/handlers/commands/execution.py +16 -15
  24. codex_autorunner/integrations/telegram/handlers/commands/files.py +5 -8
  25. codex_autorunner/integrations/telegram/handlers/commands/github.py +10 -6
  26. codex_autorunner/integrations/telegram/handlers/commands/shared.py +9 -8
  27. codex_autorunner/integrations/telegram/handlers/commands/workspace.py +85 -2
  28. codex_autorunner/integrations/telegram/handlers/commands_runtime.py +29 -8
  29. codex_autorunner/integrations/telegram/helpers.py +30 -2
  30. codex_autorunner/integrations/telegram/ticket_flow_bridge.py +54 -3
  31. codex_autorunner/static/docChatCore.js +2 -0
  32. codex_autorunner/static/hub.js +59 -0
  33. codex_autorunner/static/index.html +70 -54
  34. codex_autorunner/static/notificationBell.js +173 -0
  35. codex_autorunner/static/notifications.js +154 -36
  36. codex_autorunner/static/pma.js +96 -35
  37. codex_autorunner/static/styles.css +415 -4
  38. codex_autorunner/static/utils.js +5 -1
  39. codex_autorunner/surfaces/cli/cli.py +206 -129
  40. codex_autorunner/surfaces/cli/template_repos.py +157 -0
  41. codex_autorunner/surfaces/web/app.py +193 -5
  42. codex_autorunner/surfaces/web/routes/file_chat.py +109 -61
  43. codex_autorunner/surfaces/web/routes/flows.py +125 -67
  44. codex_autorunner/surfaces/web/routes/pma.py +638 -57
  45. codex_autorunner/tickets/agent_pool.py +6 -1
  46. codex_autorunner/tickets/outbox.py +27 -14
  47. codex_autorunner/tickets/replies.py +4 -10
  48. codex_autorunner/tickets/runner.py +1 -0
  49. codex_autorunner/workspace/paths.py +8 -3
  50. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/METADATA +1 -1
  51. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/RECORD +55 -45
  52. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/WHEEL +0 -0
  53. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/entry_points.txt +0 -0
  54. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/licenses/LICENSE +0 -0
  55. {codex_autorunner-1.2.1.dist-info → codex_autorunner-1.3.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  import dataclasses
3
3
  import enum
4
+ import json
4
5
  import logging
5
6
  import re
6
7
  import shutil
@@ -18,6 +19,7 @@ from ..manifest import (
18
19
  sanitize_repo_id,
19
20
  save_manifest,
20
21
  )
22
+ from ..tickets.outbox import set_lifecycle_emitter
21
23
  from .archive import archive_worktree_snapshot, build_snapshot_id
22
24
  from .config import HubConfig, RepoConfig, derive_repo_config, load_hub_config
23
25
  from .git_utils import (
@@ -30,8 +32,17 @@ from .git_utils import (
30
32
  git_upstream_status,
31
33
  run_git,
32
34
  )
33
- from .lifecycle_events import LifecycleEvent, LifecycleEventEmitter, LifecycleEventStore
35
+ from .lifecycle_events import (
36
+ LifecycleEvent,
37
+ LifecycleEventEmitter,
38
+ LifecycleEventStore,
39
+ LifecycleEventType,
40
+ )
34
41
  from .locks import DEFAULT_RUNNER_CMD_HINTS, assess_lock, process_alive
42
+ from .pma_dispatch_interceptor import PmaDispatchInterceptor
43
+ from .pma_queue import PmaQueue
44
+ from .pma_reactive import PmaReactiveStore
45
+ from .pma_safety import PmaSafetyChecker, PmaSafetyConfig
35
46
  from .ports.backend_orchestrator import (
36
47
  BackendOrchestrator as BackendOrchestratorProtocol,
37
48
  )
@@ -284,9 +295,11 @@ class HubSupervisor:
284
295
  self._dispatch_interceptor_task: Optional[asyncio.Task] = None
285
296
  self._dispatch_interceptor_stop_event: Optional[threading.Event] = None
286
297
  self._dispatch_interceptor_thread: Optional[threading.Thread] = None
298
+ self._dispatch_interceptor: Optional[PmaDispatchInterceptor] = None
299
+ self._pma_safety_checker: Optional[PmaSafetyChecker] = None
300
+ self._wire_outbox_lifecycle()
287
301
  self._reconcile_startup()
288
302
  self._start_lifecycle_event_processor()
289
- self._start_dispatch_interceptor()
290
303
 
291
304
  @classmethod
292
305
  def from_path(
@@ -963,19 +976,7 @@ class HubSupervisor:
963
976
  return self._lifecycle_emitter._store
964
977
 
965
978
  def trigger_pma_from_lifecycle_event(self, event: LifecycleEvent) -> None:
966
- if event.processed:
967
- return
968
- event_id = event.event_id
969
- if event_id is None:
970
- return
971
- self.lifecycle_store.mark_processed(event_id)
972
- self.lifecycle_store.prune_processed(keep_last=50)
973
- logger.info(
974
- "PMA wakeup triggered by lifecycle event: type=%s repo_id=%s run_id=%s",
975
- event.event_type.value,
976
- event.repo_id,
977
- event.run_id,
978
- )
979
+ self._process_lifecycle_event(event)
979
980
 
980
981
  def process_lifecycle_events(self) -> None:
981
982
  events = self.lifecycle_store.get_unprocessed(limit=100)
@@ -983,7 +984,7 @@ class HubSupervisor:
983
984
  return
984
985
  for event in events:
985
986
  try:
986
- self.trigger_pma_from_lifecycle_event(event)
987
+ self._process_lifecycle_event(event)
987
988
  except Exception as exc:
988
989
  logger.exception(
989
990
  "Failed to process lifecycle event %s: %s", event.event_id, exc
@@ -1015,6 +1016,26 @@ class HubSupervisor:
1015
1016
  def shutdown(self) -> None:
1016
1017
  self._stop_lifecycle_event_processor()
1017
1018
  self._stop_dispatch_interceptor()
1019
+ set_lifecycle_emitter(None)
1020
+
1021
+ def _wire_outbox_lifecycle(self) -> None:
1022
+ if not self.hub_config.pma.enabled:
1023
+ set_lifecycle_emitter(None)
1024
+ return
1025
+
1026
+ def _emit_outbox_event(
1027
+ event_type: str,
1028
+ repo_id: str,
1029
+ run_id: str,
1030
+ data: Dict[str, Any],
1031
+ origin: str,
1032
+ ) -> None:
1033
+ if event_type == "dispatch_created":
1034
+ self._lifecycle_emitter.emit_dispatch_created(
1035
+ repo_id, run_id, data=data, origin=origin
1036
+ )
1037
+
1038
+ set_lifecycle_emitter(_emit_outbox_event)
1018
1039
 
1019
1040
  def _start_dispatch_interceptor(self) -> None:
1020
1041
  if not self.hub_config.pma.enabled:
@@ -1093,6 +1114,262 @@ class HubSupervisor:
1093
1114
  ),
1094
1115
  )
1095
1116
 
1117
+ def _ensure_dispatch_interceptor(self) -> Optional[PmaDispatchInterceptor]:
1118
+ if not self.hub_config.pma.enabled:
1119
+ return None
1120
+ if not self.hub_config.pma.dispatch_interception_enabled:
1121
+ return None
1122
+ if self._dispatch_interceptor is None:
1123
+ self._dispatch_interceptor = PmaDispatchInterceptor(
1124
+ hub_root=self.hub_config.root,
1125
+ supervisor=self,
1126
+ on_intercept=self._on_dispatch_intercept,
1127
+ )
1128
+ return self._dispatch_interceptor
1129
+
1130
+ def _run_coroutine(self, coro: Any) -> Any:
1131
+ try:
1132
+ return asyncio.run(coro)
1133
+ except RuntimeError as exc:
1134
+ if "asyncio.run() cannot be called" not in str(exc):
1135
+ raise
1136
+ loop = asyncio.new_event_loop()
1137
+ try:
1138
+ return loop.run_until_complete(coro)
1139
+ finally:
1140
+ loop.close()
1141
+
1142
+ def _build_pma_lifecycle_message(
1143
+ self, event: LifecycleEvent, *, reason: str
1144
+ ) -> str:
1145
+ lines = [
1146
+ "Lifecycle event received.",
1147
+ f"type: {event.event_type.value}",
1148
+ f"repo_id: {event.repo_id}",
1149
+ f"run_id: {event.run_id}",
1150
+ f"event_id: {event.event_id}",
1151
+ ]
1152
+ if reason:
1153
+ lines.append(f"reason: {reason}")
1154
+ if event.data:
1155
+ try:
1156
+ payload = json.dumps(event.data, sort_keys=True, ensure_ascii=True)
1157
+ except Exception:
1158
+ payload = str(event.data)
1159
+ lines.append(f"data: {payload}")
1160
+ if event.event_type == LifecycleEventType.DISPATCH_CREATED:
1161
+ lines.append("Dispatch requires attention; check the repo inbox.")
1162
+ return "\n".join(lines)
1163
+
1164
+ def get_pma_safety_checker(self) -> PmaSafetyChecker:
1165
+ if self._pma_safety_checker is not None:
1166
+ return self._pma_safety_checker
1167
+
1168
+ raw = getattr(self.hub_config, "raw", {})
1169
+ pma_config = raw.get("pma", {}) if isinstance(raw, dict) else {}
1170
+ if not isinstance(pma_config, dict):
1171
+ pma_config = {}
1172
+
1173
+ def _resolve_int(key: str, fallback: int) -> int:
1174
+ raw_value = pma_config.get(key, fallback)
1175
+ try:
1176
+ value = int(raw_value)
1177
+ except (TypeError, ValueError):
1178
+ return fallback
1179
+ return value if value >= 0 else fallback
1180
+
1181
+ safety_config = PmaSafetyConfig(
1182
+ dedup_window_seconds=_resolve_int("dedup_window_seconds", 300),
1183
+ max_duplicate_actions=_resolve_int("max_duplicate_actions", 3),
1184
+ rate_limit_window_seconds=_resolve_int("rate_limit_window_seconds", 60),
1185
+ max_actions_per_window=_resolve_int("max_actions_per_window", 20),
1186
+ circuit_breaker_threshold=_resolve_int("circuit_breaker_threshold", 5),
1187
+ circuit_breaker_cooldown_seconds=_resolve_int(
1188
+ "circuit_breaker_cooldown_seconds", 600
1189
+ ),
1190
+ enable_dedup=bool(pma_config.get("enable_dedup", True)),
1191
+ enable_rate_limit=bool(pma_config.get("enable_rate_limit", True)),
1192
+ enable_circuit_breaker=bool(pma_config.get("enable_circuit_breaker", True)),
1193
+ )
1194
+ self._pma_safety_checker = PmaSafetyChecker(
1195
+ self.hub_config.root, config=safety_config
1196
+ )
1197
+ return self._pma_safety_checker
1198
+
1199
+ def _pma_reactive_gate(self, event: LifecycleEvent) -> tuple[bool, str]:
1200
+ pma = self.hub_config.pma
1201
+ reactive_enabled = getattr(pma, "reactive_enabled", True)
1202
+ if not reactive_enabled:
1203
+ return False, "reactive_disabled"
1204
+
1205
+ origin = (event.origin or "").strip().lower()
1206
+ blocked_origins = getattr(pma, "reactive_origin_blocklist", [])
1207
+ if blocked_origins:
1208
+ blocked = {str(value).strip().lower() for value in blocked_origins}
1209
+ if origin and origin in blocked:
1210
+ logger.info(
1211
+ "Skipping PMA reactive trigger for event %s due to origin=%s",
1212
+ event.event_id,
1213
+ origin,
1214
+ )
1215
+ return False, "reactive_origin_blocked"
1216
+
1217
+ allowlist = getattr(pma, "reactive_event_types", None)
1218
+ if allowlist:
1219
+ if event.event_type.value not in set(allowlist):
1220
+ return False, "reactive_filtered"
1221
+
1222
+ debounce_seconds = int(getattr(pma, "reactive_debounce_seconds", 0) or 0)
1223
+ if debounce_seconds > 0:
1224
+ key = f"{event.event_type.value}:{event.repo_id}:{event.run_id}"
1225
+ store = PmaReactiveStore(self.hub_config.root)
1226
+ if not store.check_and_update(key, debounce_seconds):
1227
+ return False, "reactive_debounced"
1228
+
1229
+ safety_checker = self.get_pma_safety_checker()
1230
+ safety_check = safety_checker.check_reactive_turn()
1231
+ if not safety_check.allowed:
1232
+ logger.info(
1233
+ "Blocked PMA reactive trigger for event %s: %s",
1234
+ event.event_id,
1235
+ safety_check.reason,
1236
+ )
1237
+ return False, safety_check.reason or "reactive_blocked"
1238
+
1239
+ return True, "reactive_allowed"
1240
+
1241
+ def _enqueue_pma_for_lifecycle_event(
1242
+ self, event: LifecycleEvent, *, reason: str
1243
+ ) -> bool:
1244
+ if not self.hub_config.pma.enabled:
1245
+ return False
1246
+
1247
+ async def _enqueue() -> tuple[object, Optional[str]]:
1248
+ queue = PmaQueue(self.hub_config.root)
1249
+ message = self._build_pma_lifecycle_message(event, reason=reason)
1250
+ payload = {
1251
+ "message": message,
1252
+ "agent": None,
1253
+ "model": None,
1254
+ "reasoning": None,
1255
+ "client_turn_id": event.event_id,
1256
+ "stream": False,
1257
+ "hub_root": str(self.hub_config.root),
1258
+ "lifecycle_event": {
1259
+ "event_id": event.event_id,
1260
+ "event_type": event.event_type.value,
1261
+ "repo_id": event.repo_id,
1262
+ "run_id": event.run_id,
1263
+ "timestamp": event.timestamp,
1264
+ "data": event.data,
1265
+ "origin": event.origin,
1266
+ },
1267
+ }
1268
+ idempotency_key = f"lifecycle:{event.event_id}"
1269
+ return await queue.enqueue("pma:default", idempotency_key, payload)
1270
+
1271
+ _, dupe_reason = self._run_coroutine(_enqueue())
1272
+ if dupe_reason:
1273
+ logger.info(
1274
+ "Deduped PMA queue item for lifecycle event %s: %s",
1275
+ event.event_id,
1276
+ dupe_reason,
1277
+ )
1278
+ return True
1279
+
1280
+ def _process_lifecycle_event(self, event: LifecycleEvent) -> None:
1281
+ if event.processed:
1282
+ return
1283
+ event_id = event.event_id
1284
+ if not event_id:
1285
+ return
1286
+
1287
+ decision = "skip"
1288
+ processed = False
1289
+
1290
+ if event.event_type == LifecycleEventType.DISPATCH_CREATED:
1291
+ if not self.hub_config.pma.enabled:
1292
+ decision = "pma_disabled"
1293
+ processed = True
1294
+ else:
1295
+ interceptor = self._ensure_dispatch_interceptor()
1296
+ repo_snapshot = None
1297
+ try:
1298
+ snapshots = self.list_repos()
1299
+ for snap in snapshots:
1300
+ if snap.id == event.repo_id:
1301
+ repo_snapshot = snap
1302
+ break
1303
+ except Exception:
1304
+ repo_snapshot = None
1305
+
1306
+ if repo_snapshot is None or not repo_snapshot.exists_on_disk:
1307
+ decision = "repo_missing"
1308
+ processed = True
1309
+ elif interceptor is not None:
1310
+ result = self._run_coroutine(
1311
+ interceptor.process_dispatch_event(event, repo_snapshot.path)
1312
+ )
1313
+ if result and result.action == "auto_resolved":
1314
+ decision = "dispatch_auto_resolved"
1315
+ processed = True
1316
+ elif result and result.action == "ignore":
1317
+ decision = "dispatch_ignored"
1318
+ processed = True
1319
+ else:
1320
+ allowed, gate_reason = self._pma_reactive_gate(event)
1321
+ if not allowed:
1322
+ decision = gate_reason
1323
+ processed = True
1324
+ else:
1325
+ decision = "dispatch_escalated"
1326
+ processed = self._enqueue_pma_for_lifecycle_event(
1327
+ event, reason="dispatch_escalated"
1328
+ )
1329
+ else:
1330
+ allowed, gate_reason = self._pma_reactive_gate(event)
1331
+ if not allowed:
1332
+ decision = gate_reason
1333
+ processed = True
1334
+ else:
1335
+ decision = "dispatch_enqueued"
1336
+ processed = self._enqueue_pma_for_lifecycle_event(
1337
+ event, reason="dispatch_created"
1338
+ )
1339
+ elif event.event_type in (
1340
+ LifecycleEventType.FLOW_PAUSED,
1341
+ LifecycleEventType.FLOW_COMPLETED,
1342
+ LifecycleEventType.FLOW_FAILED,
1343
+ LifecycleEventType.FLOW_STOPPED,
1344
+ ):
1345
+ if not self.hub_config.pma.enabled:
1346
+ decision = "pma_disabled"
1347
+ processed = True
1348
+ else:
1349
+ allowed, gate_reason = self._pma_reactive_gate(event)
1350
+ if not allowed:
1351
+ decision = gate_reason
1352
+ processed = True
1353
+ else:
1354
+ decision = "flow_enqueued"
1355
+ processed = self._enqueue_pma_for_lifecycle_event(
1356
+ event, reason=event.event_type.value
1357
+ )
1358
+
1359
+ if processed:
1360
+ self.lifecycle_store.mark_processed(event_id)
1361
+ self.lifecycle_store.prune_processed(keep_last=50)
1362
+
1363
+ logger.info(
1364
+ "Lifecycle event processed: event_id=%s type=%s repo_id=%s run_id=%s decision=%s processed=%s",
1365
+ event.event_id,
1366
+ event.event_type.value,
1367
+ event.repo_id,
1368
+ event.run_id,
1369
+ decision,
1370
+ processed,
1371
+ )
1372
+
1096
1373
  def _snapshot_from_record(self, record: DiscoveryRecord) -> RepoSnapshot:
1097
1374
  repo_path = record.absolute_path
1098
1375
  lock_path = repo_path / ".codex-autorunner" / "lock"
@@ -31,6 +31,7 @@ class LifecycleEvent:
31
31
  repo_id: str
32
32
  run_id: str
33
33
  data: dict[str, Any] = field(default_factory=dict)
34
+ origin: str = "system"
34
35
  timestamp: str = field(
35
36
  default_factory=lambda: datetime.now(timezone.utc).isoformat()
36
37
  )
@@ -100,11 +101,18 @@ class LifecycleEventStore:
100
101
  import uuid
101
102
 
102
103
  event_id = str(uuid.uuid4())
104
+ origin_raw = entry.get("origin")
105
+ origin = (
106
+ str(origin_raw).strip()
107
+ if isinstance(origin_raw, str) and origin_raw.strip()
108
+ else "system"
109
+ )
103
110
  event = LifecycleEvent(
104
111
  event_type=event_type,
105
112
  repo_id=str(entry.get("repo_id", "")),
106
113
  run_id=str(entry.get("run_id", "")),
107
114
  data=dict(entry.get("data", {})),
115
+ origin=origin,
108
116
  timestamp=str(entry.get("timestamp", "")),
109
117
  processed=bool(entry.get("processed", False)),
110
118
  event_id=event_id,
@@ -128,6 +136,7 @@ class LifecycleEventStore:
128
136
  "repo_id": event.repo_id,
129
137
  "run_id": event.run_id,
130
138
  "data": event.data,
139
+ "origin": event.origin,
131
140
  "timestamp": event.timestamp,
132
141
  "processed": event.processed,
133
142
  }
@@ -183,57 +192,87 @@ class LifecycleEventEmitter:
183
192
  return event.event_id
184
193
 
185
194
  def emit_flow_paused(
186
- self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
195
+ self,
196
+ repo_id: str,
197
+ run_id: str,
198
+ *,
199
+ data: Optional[dict[str, Any]] = None,
200
+ origin: str = "system",
187
201
  ) -> str:
188
202
  event = LifecycleEvent(
189
203
  event_type=LifecycleEventType.FLOW_PAUSED,
190
204
  repo_id=repo_id,
191
205
  run_id=run_id,
192
206
  data=data or {},
207
+ origin=origin,
193
208
  )
194
209
  return self.emit(event)
195
210
 
196
211
  def emit_flow_completed(
197
- self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
212
+ self,
213
+ repo_id: str,
214
+ run_id: str,
215
+ *,
216
+ data: Optional[dict[str, Any]] = None,
217
+ origin: str = "system",
198
218
  ) -> str:
199
219
  event = LifecycleEvent(
200
220
  event_type=LifecycleEventType.FLOW_COMPLETED,
201
221
  repo_id=repo_id,
202
222
  run_id=run_id,
203
223
  data=data or {},
224
+ origin=origin,
204
225
  )
205
226
  return self.emit(event)
206
227
 
207
228
  def emit_flow_failed(
208
- self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
229
+ self,
230
+ repo_id: str,
231
+ run_id: str,
232
+ *,
233
+ data: Optional[dict[str, Any]] = None,
234
+ origin: str = "system",
209
235
  ) -> str:
210
236
  event = LifecycleEvent(
211
237
  event_type=LifecycleEventType.FLOW_FAILED,
212
238
  repo_id=repo_id,
213
239
  run_id=run_id,
214
240
  data=data or {},
241
+ origin=origin,
215
242
  )
216
243
  return self.emit(event)
217
244
 
218
245
  def emit_flow_stopped(
219
- self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
246
+ self,
247
+ repo_id: str,
248
+ run_id: str,
249
+ *,
250
+ data: Optional[dict[str, Any]] = None,
251
+ origin: str = "system",
220
252
  ) -> str:
221
253
  event = LifecycleEvent(
222
254
  event_type=LifecycleEventType.FLOW_STOPPED,
223
255
  repo_id=repo_id,
224
256
  run_id=run_id,
225
257
  data=data or {},
258
+ origin=origin,
226
259
  )
227
260
  return self.emit(event)
228
261
 
229
262
  def emit_dispatch_created(
230
- self, repo_id: str, run_id: str, *, data: Optional[dict[str, Any]] = None
263
+ self,
264
+ repo_id: str,
265
+ run_id: str,
266
+ *,
267
+ data: Optional[dict[str, Any]] = None,
268
+ origin: str = "system",
231
269
  ) -> str:
232
270
  event = LifecycleEvent(
233
271
  event_type=LifecycleEventType.DISPATCH_CREATED,
234
272
  repo_id=repo_id,
235
273
  run_id=run_id,
236
274
  data=data or {},
275
+ origin=origin,
237
276
  )
238
277
  return self.emit(event)
239
278
 
@@ -0,0 +1,81 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Any, Optional
6
+
7
+ from ..integrations.telegram.adapter import chunk_message
8
+ from ..integrations.telegram.constants import TELEGRAM_MAX_MESSAGE_LENGTH
9
+ from ..integrations.telegram.state import OutboxRecord, TelegramStateStore
10
+ from .pma_sink import PmaActiveSinkStore
11
+ from .time_utils import now_iso
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ async def deliver_pma_output_to_active_sink(
17
+ *,
18
+ hub_root: Path,
19
+ assistant_text: str,
20
+ turn_id: str,
21
+ lifecycle_event: Optional[dict[str, Any]],
22
+ telegram_state_path: Path,
23
+ ) -> bool:
24
+ if not lifecycle_event:
25
+ return False
26
+ if not assistant_text or not assistant_text.strip():
27
+ return False
28
+ if not isinstance(turn_id, str) or not turn_id:
29
+ return False
30
+
31
+ sink_store = PmaActiveSinkStore(hub_root)
32
+ sink = sink_store.load()
33
+ if not isinstance(sink, dict):
34
+ return False
35
+ if sink.get("kind") != "telegram":
36
+ return False
37
+
38
+ last_delivery = sink.get("last_delivery_turn_id")
39
+ if isinstance(last_delivery, str) and last_delivery == turn_id:
40
+ return False
41
+
42
+ chat_id = sink.get("chat_id")
43
+ thread_id = sink.get("thread_id")
44
+ if not isinstance(chat_id, int):
45
+ return False
46
+ if thread_id is not None and not isinstance(thread_id, int):
47
+ thread_id = None
48
+
49
+ chunks = chunk_message(
50
+ assistant_text, max_len=TELEGRAM_MAX_MESSAGE_LENGTH, with_numbering=True
51
+ )
52
+ if not chunks:
53
+ return False
54
+
55
+ store = TelegramStateStore(telegram_state_path)
56
+ try:
57
+ for idx, chunk in enumerate(chunks, 1):
58
+ record_id = f"pma:{turn_id}:{idx}"
59
+ record = OutboxRecord(
60
+ record_id=record_id,
61
+ chat_id=chat_id,
62
+ thread_id=thread_id,
63
+ reply_to_message_id=None,
64
+ placeholder_message_id=None,
65
+ text=chunk,
66
+ created_at=now_iso(),
67
+ operation="send",
68
+ outbox_key=record_id,
69
+ )
70
+ await store.enqueue_outbox(record)
71
+ except Exception:
72
+ logger.exception("Failed to enqueue PMA output to Telegram outbox")
73
+ return False
74
+ finally:
75
+ await store.close()
76
+
77
+ sink_store.mark_delivered(turn_id)
78
+ return True
79
+
80
+
81
+ __all__ = ["deliver_pma_output_to_active_sink"]