codex-autorunner 1.1.0__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +17 -7
- codex_autorunner/bootstrap.py +219 -1
- codex_autorunner/core/__init__.py +17 -1
- codex_autorunner/core/about_car.py +124 -11
- codex_autorunner/core/app_server_threads.py +6 -0
- codex_autorunner/core/config.py +238 -3
- codex_autorunner/core/context_awareness.py +39 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +71 -1
- codex_autorunner/core/flows/reconciler.py +4 -1
- codex_autorunner/core/flows/runtime.py +22 -0
- codex_autorunner/core/flows/store.py +61 -9
- codex_autorunner/core/flows/transition.py +23 -16
- codex_autorunner/core/flows/ux_helpers.py +18 -3
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/hub.py +198 -41
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +683 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/agent_backend.py +2 -5
- codex_autorunner/core/ports/run_event.py +1 -4
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +5 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/ticket_linter_cli.py +17 -0
- codex_autorunner/core/ticket_manager_cli.py +154 -92
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/utils.py +34 -6
- codex_autorunner/flows/review/service.py +23 -25
- codex_autorunner/flows/ticket_flow/definition.py +43 -1
- codex_autorunner/integrations/agents/__init__.py +2 -0
- codex_autorunner/integrations/agents/backend_orchestrator.py +18 -0
- codex_autorunner/integrations/agents/codex_backend.py +19 -8
- codex_autorunner/integrations/agents/runner.py +3 -8
- codex_autorunner/integrations/agents/wiring.py +8 -0
- codex_autorunner/integrations/telegram/adapter.py +1 -1
- codex_autorunner/integrations/telegram/config.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +346 -58
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +202 -45
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +18 -7
- codex_autorunner/integrations/telegram/handlers/messages.py +34 -3
- codex_autorunner/integrations/telegram/helpers.py +1 -3
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +30 -0
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +10 -4
- codex_autorunner/integrations/telegram/transport.py +10 -3
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/server.py +2 -2
- codex_autorunner/static/agentControls.js +21 -5
- codex_autorunner/static/app.js +115 -11
- codex_autorunner/static/archive.js +274 -81
- codex_autorunner/static/archiveApi.js +21 -0
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/constants.js +1 -1
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +46 -81
- codex_autorunner/static/index.html +303 -24
- codex_autorunner/static/messages.js +82 -4
- codex_autorunner/static/notifications.js +288 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/settings.js +3 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9141 -6742
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminalManager.js +22 -3
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +41 -13
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +69 -19
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +28 -0
- codex_autorunner/static/workspace.js +258 -44
- codex_autorunner/static/workspaceFileBrowser.js +6 -4
- codex_autorunner/surfaces/cli/cli.py +1465 -155
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/web/app.py +253 -49
- codex_autorunner/surfaces/web/routes/__init__.py +4 -0
- codex_autorunner/surfaces/web/routes/analytics.py +29 -22
- codex_autorunner/surfaces/web/routes/archive.py +197 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +297 -36
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +219 -29
- codex_autorunner/surfaces/web/routes/messages.py +70 -39
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +1 -1
- codex_autorunner/surfaces/web/routes/shared.py +0 -3
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/runner_manager.py +2 -2
- codex_autorunner/surfaces/web/schemas.py +81 -18
- codex_autorunner/tickets/agent_pool.py +27 -0
- codex_autorunner/tickets/files.py +33 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +3 -0
- codex_autorunner/tickets/outbox.py +41 -5
- codex_autorunner/tickets/runner.py +350 -69
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/METADATA +15 -19
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/RECORD +132 -101
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -3302
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.1.0.dist-info → codex_autorunner-1.2.1.dist-info}/top_level.txt +0 -0
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import dataclasses
|
|
3
4
|
import logging
|
|
4
5
|
from dataclasses import dataclass
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
from typing import TYPE_CHECKING, Any, Optional, Sequence
|
|
7
8
|
|
|
8
9
|
from .....agents.opencode.runtime import extract_session_id
|
|
10
|
+
from .....core.app_server_threads import PMA_KEY, PMA_OPENCODE_KEY
|
|
11
|
+
from .....core.flows import FlowStore
|
|
12
|
+
from .....core.flows.models import FlowRunStatus
|
|
9
13
|
from .....core.logging_utils import log_event
|
|
10
14
|
from .....core.state import now_iso
|
|
11
15
|
from .....core.utils import canonicalize_path, resolve_opencode_binary
|
|
@@ -108,6 +112,32 @@ class ResumeThreadData:
|
|
|
108
112
|
|
|
109
113
|
|
|
110
114
|
class WorkspaceCommands(SharedHelpers):
|
|
115
|
+
def _resolve_workspace_path(
|
|
116
|
+
self,
|
|
117
|
+
record: Optional["TelegramTopicRecord"],
|
|
118
|
+
*,
|
|
119
|
+
allow_pma: bool = False,
|
|
120
|
+
) -> tuple[Optional[str], Optional[str]]:
|
|
121
|
+
if record and record.workspace_path:
|
|
122
|
+
return record.workspace_path, None
|
|
123
|
+
if allow_pma and record and record.pma_enabled:
|
|
124
|
+
hub_root = getattr(self, "_hub_root", None)
|
|
125
|
+
if hub_root is None:
|
|
126
|
+
return None, "PMA unavailable; hub root not configured."
|
|
127
|
+
return str(hub_root), None
|
|
128
|
+
return None, "Topic not bound. Use /bind <repo_id> or /bind <path>."
|
|
129
|
+
|
|
130
|
+
def _record_with_workspace_path(
|
|
131
|
+
self,
|
|
132
|
+
record: Optional["TelegramTopicRecord"],
|
|
133
|
+
workspace_path: Optional[str],
|
|
134
|
+
) -> Optional["TelegramTopicRecord"]:
|
|
135
|
+
if record is None or not workspace_path:
|
|
136
|
+
return record
|
|
137
|
+
if record.workspace_path == workspace_path:
|
|
138
|
+
return record
|
|
139
|
+
return dataclasses.replace(record, workspace_path=workspace_path)
|
|
140
|
+
|
|
111
141
|
async def _apply_agent_change(
|
|
112
142
|
self,
|
|
113
143
|
chat_id: int,
|
|
@@ -165,8 +195,17 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
165
195
|
)
|
|
166
196
|
return
|
|
167
197
|
desired = normalize_agent(argv[0])
|
|
198
|
+
workspace_path, error = self._resolve_workspace_path(record, allow_pma=True)
|
|
199
|
+
if workspace_path is None:
|
|
200
|
+
await self._send_message(
|
|
201
|
+
message.chat_id,
|
|
202
|
+
error or "Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
203
|
+
thread_id=message.thread_id,
|
|
204
|
+
reply_to=message.message_id,
|
|
205
|
+
)
|
|
206
|
+
return
|
|
168
207
|
try:
|
|
169
|
-
client = await self._client_for_workspace(
|
|
208
|
+
client = await self._client_for_workspace(workspace_path)
|
|
170
209
|
except AppServerUnavailableError as exc:
|
|
171
210
|
log_event(
|
|
172
211
|
self._logger,
|
|
@@ -186,7 +225,7 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
186
225
|
if client is None:
|
|
187
226
|
await self._send_message(
|
|
188
227
|
message.chat_id,
|
|
189
|
-
"Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
228
|
+
error or "Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
190
229
|
thread_id=message.thread_id,
|
|
191
230
|
reply_to=message.message_id,
|
|
192
231
|
)
|
|
@@ -532,11 +571,37 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
532
571
|
return await self._router.update_topic(chat_id, thread_id, apply)
|
|
533
572
|
|
|
534
573
|
async def _require_bound_record(
|
|
535
|
-
self,
|
|
574
|
+
self,
|
|
575
|
+
message: TelegramMessage,
|
|
576
|
+
*,
|
|
577
|
+
prompt: Optional[str] = None,
|
|
578
|
+
allow_pma: bool = False,
|
|
536
579
|
) -> Optional["TelegramTopicRecord"]:
|
|
537
580
|
key = await self._resolve_topic_key(message.chat_id, message.thread_id)
|
|
538
581
|
record = await self._router.get_topic(key)
|
|
539
|
-
if record is None
|
|
582
|
+
if record is None:
|
|
583
|
+
await self._send_message(
|
|
584
|
+
message.chat_id,
|
|
585
|
+
prompt or "Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
586
|
+
thread_id=message.thread_id,
|
|
587
|
+
reply_to=message.message_id,
|
|
588
|
+
)
|
|
589
|
+
return None
|
|
590
|
+
if record.workspace_path:
|
|
591
|
+
await self._refresh_workspace_id(key, record)
|
|
592
|
+
return record
|
|
593
|
+
if allow_pma and record.pma_enabled:
|
|
594
|
+
hub_root = getattr(self, "_hub_root", None)
|
|
595
|
+
if hub_root is None:
|
|
596
|
+
await self._send_message(
|
|
597
|
+
message.chat_id,
|
|
598
|
+
"PMA unavailable; hub root not configured.",
|
|
599
|
+
thread_id=message.thread_id,
|
|
600
|
+
reply_to=message.message_id,
|
|
601
|
+
)
|
|
602
|
+
return None
|
|
603
|
+
return record
|
|
604
|
+
if not record.workspace_path:
|
|
540
605
|
await self._send_message(
|
|
541
606
|
message.chat_id,
|
|
542
607
|
prompt or "Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
@@ -544,7 +609,6 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
544
609
|
reply_to=message.message_id,
|
|
545
610
|
)
|
|
546
611
|
return None
|
|
547
|
-
await self._refresh_workspace_id(key, record)
|
|
548
612
|
return record
|
|
549
613
|
|
|
550
614
|
async def _ensure_thread_id(
|
|
@@ -777,6 +841,45 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
777
841
|
return False
|
|
778
842
|
return True
|
|
779
843
|
|
|
844
|
+
async def _handle_repos(
|
|
845
|
+
self, message: TelegramMessage, _args: str, _runtime: Any
|
|
846
|
+
) -> None:
|
|
847
|
+
if not self._manifest_path or not self._hub_root:
|
|
848
|
+
await self._send_message(
|
|
849
|
+
message.chat_id,
|
|
850
|
+
"Hub manifest not configured.",
|
|
851
|
+
thread_id=message.thread_id,
|
|
852
|
+
reply_to=message.message_id,
|
|
853
|
+
)
|
|
854
|
+
return
|
|
855
|
+
|
|
856
|
+
try:
|
|
857
|
+
manifest = load_manifest(self._manifest_path, self._hub_root)
|
|
858
|
+
except Exception as exc:
|
|
859
|
+
await self._send_message(
|
|
860
|
+
message.chat_id,
|
|
861
|
+
f"Failed to load manifest: {exc}",
|
|
862
|
+
thread_id=message.thread_id,
|
|
863
|
+
reply_to=message.message_id,
|
|
864
|
+
)
|
|
865
|
+
return
|
|
866
|
+
|
|
867
|
+
lines = ["Repositories:"]
|
|
868
|
+
for repo in manifest.repos:
|
|
869
|
+
if not repo.enabled:
|
|
870
|
+
continue
|
|
871
|
+
lines.append(f"- `{repo.id}` ({repo.path})")
|
|
872
|
+
|
|
873
|
+
lines.append("\nUse /bind <repo_id> to switch context.")
|
|
874
|
+
|
|
875
|
+
await self._send_message(
|
|
876
|
+
message.chat_id,
|
|
877
|
+
"\n".join(lines),
|
|
878
|
+
thread_id=message.thread_id,
|
|
879
|
+
reply_to=message.message_id,
|
|
880
|
+
parse_mode="Markdown",
|
|
881
|
+
)
|
|
882
|
+
|
|
780
883
|
async def _handle_bind(self, message: TelegramMessage, args: str) -> None:
|
|
781
884
|
key = await self._resolve_topic_key(message.chat_id, message.thread_id)
|
|
782
885
|
if not args:
|
|
@@ -828,13 +931,19 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
828
931
|
scope=scope,
|
|
829
932
|
)
|
|
830
933
|
workspace_id = self._workspace_id_for_path(workspace_path)
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
934
|
+
|
|
935
|
+
def apply_bind_updates(record: TelegramTopicRecord) -> None:
|
|
936
|
+
if workspace_id:
|
|
937
|
+
record.workspace_id = workspace_id
|
|
938
|
+
# Mutual exclusion: Binding to a repo disables PMA mode.
|
|
939
|
+
record.pma_enabled = False
|
|
940
|
+
|
|
941
|
+
await self._router.update_topic(
|
|
942
|
+
chat_id,
|
|
943
|
+
thread_id,
|
|
944
|
+
apply_bind_updates,
|
|
945
|
+
scope=scope,
|
|
946
|
+
)
|
|
838
947
|
await self._answer_callback(callback, "Bound to repo")
|
|
839
948
|
await self._finalize_selection(
|
|
840
949
|
key,
|
|
@@ -866,16 +975,173 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
866
975
|
scope=scope,
|
|
867
976
|
)
|
|
868
977
|
workspace_id = self._workspace_id_for_path(workspace_path)
|
|
869
|
-
|
|
870
|
-
|
|
978
|
+
|
|
979
|
+
def apply_bind_updates(record: TelegramTopicRecord) -> None:
|
|
980
|
+
if workspace_id:
|
|
981
|
+
record.workspace_id = workspace_id
|
|
982
|
+
# Mutual exclusion: Binding to a repo disables PMA mode.
|
|
983
|
+
record.pma_enabled = False
|
|
984
|
+
|
|
985
|
+
await self._router.update_topic(
|
|
986
|
+
message.chat_id,
|
|
987
|
+
message.thread_id,
|
|
988
|
+
apply_bind_updates,
|
|
989
|
+
scope=scope,
|
|
990
|
+
)
|
|
991
|
+
await self._send_message(
|
|
992
|
+
message.chat_id,
|
|
993
|
+
f"Bound to {repo_id or workspace_path}.",
|
|
994
|
+
thread_id=message.thread_id,
|
|
995
|
+
reply_to=message.message_id,
|
|
996
|
+
)
|
|
997
|
+
|
|
998
|
+
async def _handle_reset(self, message: TelegramMessage) -> None:
|
|
999
|
+
key = await self._resolve_topic_key(message.chat_id, message.thread_id)
|
|
1000
|
+
record = await self._router.get_topic(key)
|
|
1001
|
+
pma_enabled = bool(record and record.pma_enabled)
|
|
1002
|
+
if pma_enabled:
|
|
1003
|
+
registry = getattr(self, "_hub_thread_registry", None)
|
|
1004
|
+
agent = self._effective_agent(record)
|
|
1005
|
+
pma_key = PMA_OPENCODE_KEY if agent == "opencode" else PMA_KEY
|
|
1006
|
+
if registry:
|
|
1007
|
+
registry.reset_thread(pma_key)
|
|
1008
|
+
await self._send_message(
|
|
871
1009
|
message.chat_id,
|
|
872
|
-
message.
|
|
873
|
-
|
|
874
|
-
|
|
1010
|
+
"PMA thread reset. Send a message to start a fresh PMA turn.",
|
|
1011
|
+
thread_id=message.thread_id,
|
|
1012
|
+
reply_to=message.message_id,
|
|
875
1013
|
)
|
|
1014
|
+
return
|
|
1015
|
+
if record is None or not record.workspace_path:
|
|
1016
|
+
await self._send_message(
|
|
1017
|
+
message.chat_id,
|
|
1018
|
+
"Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
1019
|
+
thread_id=message.thread_id,
|
|
1020
|
+
reply_to=message.message_id,
|
|
1021
|
+
)
|
|
1022
|
+
return
|
|
1023
|
+
agent = self._effective_agent(record)
|
|
1024
|
+
if agent == "opencode":
|
|
1025
|
+
supervisor = getattr(self, "_opencode_supervisor", None)
|
|
1026
|
+
if supervisor is None:
|
|
1027
|
+
await self._send_message(
|
|
1028
|
+
message.chat_id,
|
|
1029
|
+
"OpenCode backend unavailable; install opencode or switch to /agent codex.",
|
|
1030
|
+
thread_id=message.thread_id,
|
|
1031
|
+
reply_to=message.message_id,
|
|
1032
|
+
)
|
|
1033
|
+
return
|
|
1034
|
+
workspace_root = self._canonical_workspace_root(record.workspace_path)
|
|
1035
|
+
if workspace_root is None:
|
|
1036
|
+
await self._send_message(
|
|
1037
|
+
message.chat_id,
|
|
1038
|
+
"Workspace unavailable.",
|
|
1039
|
+
thread_id=message.thread_id,
|
|
1040
|
+
reply_to=message.message_id,
|
|
1041
|
+
)
|
|
1042
|
+
return
|
|
1043
|
+
try:
|
|
1044
|
+
client = await supervisor.get_client(workspace_root)
|
|
1045
|
+
session = await client.create_session(directory=str(workspace_root))
|
|
1046
|
+
except Exception as exc:
|
|
1047
|
+
log_event(
|
|
1048
|
+
self._logger,
|
|
1049
|
+
logging.WARNING,
|
|
1050
|
+
"telegram.opencode.session.failed",
|
|
1051
|
+
chat_id=message.chat_id,
|
|
1052
|
+
thread_id=message.thread_id,
|
|
1053
|
+
exc=exc,
|
|
1054
|
+
)
|
|
1055
|
+
await self._send_message(
|
|
1056
|
+
message.chat_id,
|
|
1057
|
+
"Failed to reset OpenCode thread.",
|
|
1058
|
+
thread_id=message.thread_id,
|
|
1059
|
+
reply_to=message.message_id,
|
|
1060
|
+
)
|
|
1061
|
+
return
|
|
1062
|
+
session_id = extract_session_id(session, allow_fallback_id=True)
|
|
1063
|
+
if not session_id:
|
|
1064
|
+
await self._send_message(
|
|
1065
|
+
message.chat_id,
|
|
1066
|
+
"Failed to reset OpenCode thread.",
|
|
1067
|
+
thread_id=message.thread_id,
|
|
1068
|
+
reply_to=message.message_id,
|
|
1069
|
+
)
|
|
1070
|
+
return
|
|
1071
|
+
|
|
1072
|
+
def apply(record: "TelegramTopicRecord") -> None:
|
|
1073
|
+
record.active_thread_id = session_id
|
|
1074
|
+
if session_id in record.thread_ids:
|
|
1075
|
+
record.thread_ids.remove(session_id)
|
|
1076
|
+
record.thread_ids.insert(0, session_id)
|
|
1077
|
+
if len(record.thread_ids) > MAX_TOPIC_THREAD_HISTORY:
|
|
1078
|
+
record.thread_ids = record.thread_ids[:MAX_TOPIC_THREAD_HISTORY]
|
|
1079
|
+
_set_thread_summary(
|
|
1080
|
+
record,
|
|
1081
|
+
session_id,
|
|
1082
|
+
last_used_at=now_iso(),
|
|
1083
|
+
workspace_path=record.workspace_path,
|
|
1084
|
+
rollout_path=record.rollout_path,
|
|
1085
|
+
)
|
|
1086
|
+
|
|
1087
|
+
await self._router.update_topic(message.chat_id, message.thread_id, apply)
|
|
1088
|
+
thread_id = session_id
|
|
1089
|
+
else:
|
|
1090
|
+
try:
|
|
1091
|
+
client = await self._client_for_workspace(record.workspace_path)
|
|
1092
|
+
except AppServerUnavailableError as exc:
|
|
1093
|
+
log_event(
|
|
1094
|
+
self._logger,
|
|
1095
|
+
logging.WARNING,
|
|
1096
|
+
"telegram.app_server.unavailable",
|
|
1097
|
+
chat_id=message.chat_id,
|
|
1098
|
+
thread_id=message.thread_id,
|
|
1099
|
+
exc=exc,
|
|
1100
|
+
)
|
|
1101
|
+
await self._send_message(
|
|
1102
|
+
message.chat_id,
|
|
1103
|
+
"App server unavailable; try again or check logs.",
|
|
1104
|
+
thread_id=message.thread_id,
|
|
1105
|
+
reply_to=message.message_id,
|
|
1106
|
+
)
|
|
1107
|
+
return
|
|
1108
|
+
if client is None:
|
|
1109
|
+
await self._send_message(
|
|
1110
|
+
message.chat_id,
|
|
1111
|
+
"Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
1112
|
+
thread_id=message.thread_id,
|
|
1113
|
+
reply_to=message.message_id,
|
|
1114
|
+
)
|
|
1115
|
+
return
|
|
1116
|
+
thread = await client.thread_start(record.workspace_path, agent=agent)
|
|
1117
|
+
if not await self._require_thread_workspace(
|
|
1118
|
+
message, record.workspace_path, thread, action="thread_start"
|
|
1119
|
+
):
|
|
1120
|
+
return
|
|
1121
|
+
thread_id = _extract_thread_id(thread)
|
|
1122
|
+
if not thread_id:
|
|
1123
|
+
await self._send_message(
|
|
1124
|
+
message.chat_id,
|
|
1125
|
+
"Failed to reset thread.",
|
|
1126
|
+
thread_id=message.thread_id,
|
|
1127
|
+
reply_to=message.message_id,
|
|
1128
|
+
)
|
|
1129
|
+
return
|
|
1130
|
+
await self._apply_thread_result(
|
|
1131
|
+
message.chat_id, message.thread_id, thread, active_thread_id=thread_id
|
|
1132
|
+
)
|
|
1133
|
+
effort_label = (
|
|
1134
|
+
record.effort or "default" if self._agent_supports_effort(agent) else "n/a"
|
|
1135
|
+
)
|
|
876
1136
|
await self._send_message(
|
|
877
1137
|
message.chat_id,
|
|
878
|
-
|
|
1138
|
+
"\n".join(
|
|
1139
|
+
[
|
|
1140
|
+
f"Reset thread {thread_id}.",
|
|
1141
|
+
f"Agent: {agent}",
|
|
1142
|
+
f"Effort: {effort_label}",
|
|
1143
|
+
]
|
|
1144
|
+
),
|
|
879
1145
|
thread_id=message.thread_id,
|
|
880
1146
|
reply_to=message.message_id,
|
|
881
1147
|
)
|
|
@@ -883,6 +1149,20 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
883
1149
|
async def _handle_new(self, message: TelegramMessage) -> None:
|
|
884
1150
|
key = await self._resolve_topic_key(message.chat_id, message.thread_id)
|
|
885
1151
|
record = await self._router.get_topic(key)
|
|
1152
|
+
pma_enabled = bool(record and record.pma_enabled)
|
|
1153
|
+
if pma_enabled:
|
|
1154
|
+
registry = getattr(self, "_hub_thread_registry", None)
|
|
1155
|
+
agent = self._effective_agent(record)
|
|
1156
|
+
pma_key = PMA_OPENCODE_KEY if agent == "opencode" else PMA_KEY
|
|
1157
|
+
if registry:
|
|
1158
|
+
registry.reset_thread(pma_key)
|
|
1159
|
+
await self._send_message(
|
|
1160
|
+
message.chat_id,
|
|
1161
|
+
"PMA session reset. Send a message to start a fresh PMA turn.",
|
|
1162
|
+
thread_id=message.thread_id,
|
|
1163
|
+
reply_to=message.message_id,
|
|
1164
|
+
)
|
|
1165
|
+
return
|
|
886
1166
|
if record is None or not record.workspace_path:
|
|
887
1167
|
await self._send_message(
|
|
888
1168
|
message.chat_id,
|
|
@@ -1128,9 +1408,16 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
1128
1408
|
if await self._handle_resume_shortcuts(key, message, parsed_args):
|
|
1129
1409
|
return
|
|
1130
1410
|
record = await self._router.get_topic(key)
|
|
1131
|
-
record = await self._ensure_resume_record(message, record)
|
|
1411
|
+
record = await self._ensure_resume_record(message, record, allow_pma=True)
|
|
1132
1412
|
if record is None:
|
|
1133
1413
|
return
|
|
1414
|
+
if record.pma_enabled and not parsed_args.show_unscoped:
|
|
1415
|
+
parsed_args = ResumeCommandArgs(
|
|
1416
|
+
trimmed=parsed_args.trimmed,
|
|
1417
|
+
remaining=parsed_args.remaining,
|
|
1418
|
+
show_unscoped=True,
|
|
1419
|
+
refresh=parsed_args.refresh,
|
|
1420
|
+
)
|
|
1134
1421
|
if self._effective_agent(record) == "opencode":
|
|
1135
1422
|
await self._handle_opencode_resume(
|
|
1136
1423
|
message,
|
|
@@ -1208,10 +1495,14 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
1208
1495
|
return False
|
|
1209
1496
|
|
|
1210
1497
|
async def _ensure_resume_record(
|
|
1211
|
-
self,
|
|
1498
|
+
self,
|
|
1499
|
+
message: TelegramMessage,
|
|
1500
|
+
record: Optional["TelegramTopicRecord"],
|
|
1501
|
+
*,
|
|
1502
|
+
allow_pma: bool = False,
|
|
1212
1503
|
) -> Optional["TelegramTopicRecord"]:
|
|
1213
1504
|
"""Validate resume preconditions and return the topic record."""
|
|
1214
|
-
if record is None
|
|
1505
|
+
if record is None:
|
|
1215
1506
|
await self._send_message(
|
|
1216
1507
|
message.chat_id,
|
|
1217
1508
|
"Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
@@ -1219,6 +1510,28 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
1219
1510
|
reply_to=message.message_id,
|
|
1220
1511
|
)
|
|
1221
1512
|
return None
|
|
1513
|
+
if not record.workspace_path:
|
|
1514
|
+
if allow_pma and record.pma_enabled:
|
|
1515
|
+
workspace_path, error = self._resolve_workspace_path(
|
|
1516
|
+
record, allow_pma=True
|
|
1517
|
+
)
|
|
1518
|
+
if workspace_path is None:
|
|
1519
|
+
await self._send_message(
|
|
1520
|
+
message.chat_id,
|
|
1521
|
+
error or "PMA unavailable; hub root not configured.",
|
|
1522
|
+
thread_id=message.thread_id,
|
|
1523
|
+
reply_to=message.message_id,
|
|
1524
|
+
)
|
|
1525
|
+
return None
|
|
1526
|
+
record = self._record_with_workspace_path(record, workspace_path)
|
|
1527
|
+
else:
|
|
1528
|
+
await self._send_message(
|
|
1529
|
+
message.chat_id,
|
|
1530
|
+
"Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
1531
|
+
thread_id=message.thread_id,
|
|
1532
|
+
reply_to=message.message_id,
|
|
1533
|
+
)
|
|
1534
|
+
return None
|
|
1222
1535
|
agent = self._effective_agent(record)
|
|
1223
1536
|
if not self._agent_supports_resume(agent):
|
|
1224
1537
|
await self._send_message(
|
|
@@ -1234,8 +1547,17 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
1234
1547
|
self, message: TelegramMessage, record: "TelegramTopicRecord"
|
|
1235
1548
|
) -> Optional[CodexAppServerClient]:
|
|
1236
1549
|
"""Resolve the app server client for the topic workspace."""
|
|
1550
|
+
workspace_path, error = self._resolve_workspace_path(record, allow_pma=True)
|
|
1551
|
+
if workspace_path is None:
|
|
1552
|
+
await self._send_message(
|
|
1553
|
+
message.chat_id,
|
|
1554
|
+
error or "Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
1555
|
+
thread_id=message.thread_id,
|
|
1556
|
+
reply_to=message.message_id,
|
|
1557
|
+
)
|
|
1558
|
+
return None
|
|
1237
1559
|
try:
|
|
1238
|
-
client = await self._client_for_workspace(
|
|
1560
|
+
client = await self._client_for_workspace(workspace_path)
|
|
1239
1561
|
except AppServerUnavailableError as exc:
|
|
1240
1562
|
log_event(
|
|
1241
1563
|
self._logger,
|
|
@@ -1255,7 +1577,7 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
1255
1577
|
if client is None:
|
|
1256
1578
|
await self._send_message(
|
|
1257
1579
|
message.chat_id,
|
|
1258
|
-
"Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
1580
|
+
error or "Topic not bound. Use /bind <repo_id> or /bind <path>.",
|
|
1259
1581
|
thread_id=message.thread_id,
|
|
1260
1582
|
reply_to=message.message_id,
|
|
1261
1583
|
)
|
|
@@ -1999,22 +2321,33 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
1999
2321
|
runtime = self._router.runtime_for(key)
|
|
2000
2322
|
approval_policy, sandbox_policy = self._effective_policies(record)
|
|
2001
2323
|
agent = self._effective_agent(record)
|
|
2324
|
+
is_pma = bool(getattr(record, "pma_enabled", False))
|
|
2325
|
+
workspace_label = record.workspace_path or ("hub" if is_pma else "unbound")
|
|
2002
2326
|
effort_label = (
|
|
2003
2327
|
record.effort or "default" if self._agent_supports_effort(agent) else "n/a"
|
|
2004
2328
|
)
|
|
2005
|
-
lines = [
|
|
2006
|
-
|
|
2007
|
-
|
|
2008
|
-
|
|
2009
|
-
|
|
2010
|
-
|
|
2011
|
-
|
|
2012
|
-
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
|
|
2017
|
-
|
|
2329
|
+
lines = []
|
|
2330
|
+
if is_pma:
|
|
2331
|
+
lines.append("Mode: PMA (hub)")
|
|
2332
|
+
# Show previous binding if available
|
|
2333
|
+
if record.pma_prev_workspace_path:
|
|
2334
|
+
lines.append(f"Previous binding: {record.pma_prev_workspace_path}")
|
|
2335
|
+
lines.append("Use /pma off to restore previous binding.")
|
|
2336
|
+
lines.extend(
|
|
2337
|
+
[
|
|
2338
|
+
f"Workspace: {workspace_label}",
|
|
2339
|
+
f"Workspace ID: {record.workspace_id or 'unknown'}",
|
|
2340
|
+
f"Active thread: {record.active_thread_id or 'none'}",
|
|
2341
|
+
f"Active turn: {runtime.current_turn_id or 'none'}",
|
|
2342
|
+
f"Agent: {agent}",
|
|
2343
|
+
f"Resume: {'supported' if self._agent_supports_resume(agent) else 'unsupported'}",
|
|
2344
|
+
f"Model: {record.model or 'default'}",
|
|
2345
|
+
f"Effort: {effort_label}",
|
|
2346
|
+
f"Approval mode: {record.approval_mode}",
|
|
2347
|
+
f"Approval policy: {approval_policy or 'default'}",
|
|
2348
|
+
f"Sandbox policy: {_format_sandbox_policy(sandbox_policy)}",
|
|
2349
|
+
]
|
|
2350
|
+
)
|
|
2018
2351
|
pending = await self._store.pending_approvals_for_key(key)
|
|
2019
2352
|
if pending:
|
|
2020
2353
|
lines.append(f"Pending approvals: {len(pending)}")
|
|
@@ -2033,8 +2366,136 @@ class WorkspaceCommands(SharedHelpers):
|
|
|
2033
2366
|
lines.extend(_format_token_usage(token_usage))
|
|
2034
2367
|
rate_limits = await self._read_rate_limits(record.workspace_path, agent=agent)
|
|
2035
2368
|
lines.extend(_format_rate_limits(rate_limits))
|
|
2036
|
-
|
|
2369
|
+
|
|
2370
|
+
manifest_path = getattr(self, "_manifest_path", None)
|
|
2371
|
+
hub_root = getattr(self, "_hub_root", None)
|
|
2372
|
+
if is_pma:
|
|
2373
|
+
if hub_root:
|
|
2374
|
+
lines.append(f"Hub root: {hub_root}")
|
|
2375
|
+
if manifest_path:
|
|
2376
|
+
lines.append(f"Manifest: {manifest_path}")
|
|
2377
|
+
registry = getattr(self, "_hub_thread_registry", None)
|
|
2378
|
+
if registry and hasattr(self, "_pma_registry_key"):
|
|
2379
|
+
try:
|
|
2380
|
+
pma_key = self._pma_registry_key(record, message)
|
|
2381
|
+
pma_thread_id = registry.get_thread_id(pma_key) if pma_key else None
|
|
2382
|
+
# Show thread scoping
|
|
2383
|
+
require_topics = getattr(self._config, "require_topics", False)
|
|
2384
|
+
scoping = "per-topic" if require_topics else "global (per hub)"
|
|
2385
|
+
lines.append(f"PMA thread: {pma_thread_id or 'none'} ({scoping})")
|
|
2386
|
+
except Exception:
|
|
2387
|
+
pass
|
|
2388
|
+
if hub_root:
|
|
2389
|
+
try:
|
|
2390
|
+
pma_dir = hub_root / ".codex-autorunner" / "pma"
|
|
2391
|
+
inbox_dir = pma_dir / "inbox"
|
|
2392
|
+
outbox_dir = pma_dir / "outbox"
|
|
2393
|
+
inbox_count = (
|
|
2394
|
+
len(
|
|
2395
|
+
[
|
|
2396
|
+
path
|
|
2397
|
+
for path in inbox_dir.iterdir()
|
|
2398
|
+
if path.is_file() and not path.name.startswith(".")
|
|
2399
|
+
]
|
|
2400
|
+
)
|
|
2401
|
+
if inbox_dir.exists()
|
|
2402
|
+
else 0
|
|
2403
|
+
)
|
|
2404
|
+
outbox_count = (
|
|
2405
|
+
len(
|
|
2406
|
+
[
|
|
2407
|
+
path
|
|
2408
|
+
for path in outbox_dir.iterdir()
|
|
2409
|
+
if path.is_file() and not path.name.startswith(".")
|
|
2410
|
+
]
|
|
2411
|
+
)
|
|
2412
|
+
if outbox_dir.exists()
|
|
2413
|
+
else 0
|
|
2414
|
+
)
|
|
2415
|
+
lines.append(
|
|
2416
|
+
f"PMA files: inbox {inbox_count}, outbox {outbox_count}"
|
|
2417
|
+
)
|
|
2418
|
+
except Exception:
|
|
2419
|
+
pass
|
|
2420
|
+
if is_pma and manifest_path and hub_root:
|
|
2421
|
+
try:
|
|
2422
|
+
manifest = load_manifest(manifest_path, hub_root)
|
|
2423
|
+
enabled_repos = [repo for repo in manifest.repos if repo.enabled]
|
|
2424
|
+
lines.append(
|
|
2425
|
+
f"Hub repos: {len(enabled_repos)}/{len(manifest.repos)} enabled"
|
|
2426
|
+
)
|
|
2427
|
+
active_count = 0
|
|
2428
|
+
paused_count = 0
|
|
2429
|
+
idle_count = 0
|
|
2430
|
+
active_repos: list[str] = []
|
|
2431
|
+
paused_repos: list[str] = []
|
|
2432
|
+
for repo in manifest.repos:
|
|
2433
|
+
if not repo.enabled:
|
|
2434
|
+
continue
|
|
2435
|
+
repo_root = (hub_root / repo.path).resolve()
|
|
2436
|
+
db_path = repo_root / ".codex-autorunner" / "flows.db"
|
|
2437
|
+
if not db_path.exists():
|
|
2438
|
+
idle_count += 1
|
|
2439
|
+
continue
|
|
2440
|
+
|
|
2441
|
+
store = FlowStore(db_path)
|
|
2442
|
+
try:
|
|
2443
|
+
store.initialize()
|
|
2444
|
+
runs = store.list_flow_runs(flow_type="ticket_flow")
|
|
2445
|
+
if runs:
|
|
2446
|
+
latest = runs[0]
|
|
2447
|
+
if latest.status.is_active():
|
|
2448
|
+
active_count += 1
|
|
2449
|
+
active_repos.append(repo.id)
|
|
2450
|
+
elif latest.status == FlowRunStatus.PAUSED:
|
|
2451
|
+
paused_count += 1
|
|
2452
|
+
paused_repos.append(repo.id)
|
|
2453
|
+
else:
|
|
2454
|
+
idle_count += 1
|
|
2455
|
+
else:
|
|
2456
|
+
idle_count += 1
|
|
2457
|
+
except Exception:
|
|
2458
|
+
pass
|
|
2459
|
+
finally:
|
|
2460
|
+
store.close()
|
|
2461
|
+
lines.append(
|
|
2462
|
+
f"Hub flows: {active_count} active, {paused_count} paused, {idle_count} idle"
|
|
2463
|
+
)
|
|
2464
|
+
if active_repos:
|
|
2465
|
+
preview = ", ".join(active_repos[:5])
|
|
2466
|
+
suffix = "" if len(active_repos) <= 5 else "..."
|
|
2467
|
+
lines.append(f"Active repos: {preview}{suffix}")
|
|
2468
|
+
if paused_repos:
|
|
2469
|
+
preview = ", ".join(paused_repos[:5])
|
|
2470
|
+
suffix = "" if len(paused_repos) <= 5 else "..."
|
|
2471
|
+
lines.append(f"Paused repos: {preview}{suffix}")
|
|
2472
|
+
except Exception:
|
|
2473
|
+
pass
|
|
2474
|
+
|
|
2475
|
+
if not record.workspace_path and not is_pma:
|
|
2037
2476
|
lines.append("Use /bind <repo_id> or /bind <path>.")
|
|
2477
|
+
|
|
2478
|
+
if record.workspace_path and not is_pma:
|
|
2479
|
+
repo_root = Path(record.workspace_path)
|
|
2480
|
+
db_path = repo_root / ".codex-autorunner" / "flows.db"
|
|
2481
|
+
if db_path.exists():
|
|
2482
|
+
store = FlowStore(db_path)
|
|
2483
|
+
try:
|
|
2484
|
+
store.initialize()
|
|
2485
|
+
runs = store.list_flow_runs(flow_type="ticket_flow")
|
|
2486
|
+
if runs:
|
|
2487
|
+
latest = runs[0]
|
|
2488
|
+
if latest.status.is_active():
|
|
2489
|
+
lines.append(
|
|
2490
|
+
f"Active Flow: {latest.status.value} (run {latest.id})"
|
|
2491
|
+
)
|
|
2492
|
+
elif latest.status == FlowRunStatus.PAUSED:
|
|
2493
|
+
lines.append(f"Active Flow: PAUSED (run {latest.id})")
|
|
2494
|
+
except Exception:
|
|
2495
|
+
pass
|
|
2496
|
+
finally:
|
|
2497
|
+
store.close()
|
|
2498
|
+
|
|
2038
2499
|
await self._send_message(
|
|
2039
2500
|
message.chat_id,
|
|
2040
2501
|
"\n".join(lines),
|