codex-autorunner 1.0.0__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/__init__.py +12 -1
- codex_autorunner/agents/codex/harness.py +1 -1
- codex_autorunner/agents/opencode/client.py +113 -4
- codex_autorunner/agents/opencode/constants.py +3 -0
- codex_autorunner/agents/opencode/harness.py +6 -1
- codex_autorunner/agents/opencode/runtime.py +59 -18
- codex_autorunner/agents/opencode/supervisor.py +4 -0
- codex_autorunner/agents/registry.py +36 -7
- codex_autorunner/bootstrap.py +226 -4
- codex_autorunner/cli.py +5 -1174
- codex_autorunner/codex_cli.py +20 -84
- codex_autorunner/core/__init__.py +20 -0
- codex_autorunner/core/about_car.py +119 -1
- codex_autorunner/core/app_server_ids.py +59 -0
- codex_autorunner/core/app_server_threads.py +17 -2
- codex_autorunner/core/app_server_utils.py +165 -0
- codex_autorunner/core/archive.py +349 -0
- codex_autorunner/core/codex_runner.py +6 -2
- codex_autorunner/core/config.py +433 -4
- codex_autorunner/core/context_awareness.py +38 -0
- codex_autorunner/core/docs.py +0 -122
- codex_autorunner/core/drafts.py +58 -4
- codex_autorunner/core/exceptions.py +4 -0
- codex_autorunner/core/filebox.py +265 -0
- codex_autorunner/core/flows/controller.py +96 -2
- codex_autorunner/core/flows/models.py +13 -0
- codex_autorunner/core/flows/reasons.py +52 -0
- codex_autorunner/core/flows/reconciler.py +134 -0
- codex_autorunner/core/flows/runtime.py +57 -4
- codex_autorunner/core/flows/store.py +142 -7
- codex_autorunner/core/flows/transition.py +27 -15
- codex_autorunner/core/flows/ux_helpers.py +272 -0
- codex_autorunner/core/flows/worker_process.py +32 -6
- codex_autorunner/core/git_utils.py +62 -0
- codex_autorunner/core/hub.py +291 -20
- codex_autorunner/core/lifecycle_events.py +253 -0
- codex_autorunner/core/notifications.py +14 -2
- codex_autorunner/core/path_utils.py +2 -1
- codex_autorunner/core/pma_audit.py +224 -0
- codex_autorunner/core/pma_context.py +496 -0
- codex_autorunner/core/pma_dispatch_interceptor.py +284 -0
- codex_autorunner/core/pma_lifecycle.py +527 -0
- codex_autorunner/core/pma_queue.py +367 -0
- codex_autorunner/core/pma_safety.py +221 -0
- codex_autorunner/core/pma_state.py +115 -0
- codex_autorunner/core/ports/__init__.py +28 -0
- codex_autorunner/{integrations/agents → core/ports}/agent_backend.py +13 -8
- codex_autorunner/core/ports/backend_orchestrator.py +41 -0
- codex_autorunner/{integrations/agents → core/ports}/run_event.py +23 -6
- codex_autorunner/core/prompt.py +0 -80
- codex_autorunner/core/prompts.py +56 -172
- codex_autorunner/core/redaction.py +0 -4
- codex_autorunner/core/review_context.py +11 -9
- codex_autorunner/core/runner_controller.py +35 -33
- codex_autorunner/core/runner_state.py +147 -0
- codex_autorunner/core/runtime.py +829 -0
- codex_autorunner/core/sqlite_utils.py +13 -4
- codex_autorunner/core/state.py +7 -10
- codex_autorunner/core/state_roots.py +62 -0
- codex_autorunner/core/supervisor_protocol.py +15 -0
- codex_autorunner/core/templates/__init__.py +39 -0
- codex_autorunner/core/templates/git_mirror.py +234 -0
- codex_autorunner/core/templates/provenance.py +56 -0
- codex_autorunner/core/templates/scan_cache.py +120 -0
- codex_autorunner/core/text_delta_coalescer.py +54 -0
- codex_autorunner/core/ticket_linter_cli.py +218 -0
- codex_autorunner/core/ticket_manager_cli.py +494 -0
- codex_autorunner/core/time_utils.py +11 -0
- codex_autorunner/core/types.py +18 -0
- codex_autorunner/core/update.py +4 -5
- codex_autorunner/core/update_paths.py +28 -0
- codex_autorunner/core/usage.py +164 -12
- codex_autorunner/core/utils.py +125 -15
- codex_autorunner/flows/review/__init__.py +17 -0
- codex_autorunner/{core/review.py → flows/review/service.py} +37 -34
- codex_autorunner/flows/ticket_flow/definition.py +52 -3
- codex_autorunner/integrations/agents/__init__.py +11 -19
- codex_autorunner/integrations/agents/backend_orchestrator.py +302 -0
- codex_autorunner/integrations/agents/codex_adapter.py +90 -0
- codex_autorunner/integrations/agents/codex_backend.py +177 -25
- codex_autorunner/integrations/agents/opencode_adapter.py +108 -0
- codex_autorunner/integrations/agents/opencode_backend.py +305 -32
- codex_autorunner/integrations/agents/runner.py +86 -0
- codex_autorunner/integrations/agents/wiring.py +279 -0
- codex_autorunner/integrations/app_server/client.py +7 -60
- codex_autorunner/integrations/app_server/env.py +2 -107
- codex_autorunner/{core/app_server_events.py → integrations/app_server/event_buffer.py} +15 -8
- codex_autorunner/integrations/telegram/adapter.py +65 -0
- codex_autorunner/integrations/telegram/config.py +46 -0
- codex_autorunner/integrations/telegram/constants.py +1 -1
- codex_autorunner/integrations/telegram/doctor.py +228 -6
- codex_autorunner/integrations/telegram/handlers/callbacks.py +7 -0
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +236 -74
- codex_autorunner/integrations/telegram/handlers/commands/files.py +314 -75
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +1496 -71
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +498 -37
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +206 -48
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +20 -3
- codex_autorunner/integrations/telegram/handlers/messages.py +27 -1
- codex_autorunner/integrations/telegram/handlers/selections.py +61 -1
- codex_autorunner/integrations/telegram/helpers.py +22 -1
- codex_autorunner/integrations/telegram/runtime.py +9 -4
- codex_autorunner/integrations/telegram/service.py +45 -10
- codex_autorunner/integrations/telegram/state.py +38 -0
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +338 -43
- codex_autorunner/integrations/telegram/transport.py +13 -4
- codex_autorunner/integrations/templates/__init__.py +27 -0
- codex_autorunner/integrations/templates/scan_agent.py +312 -0
- codex_autorunner/routes/__init__.py +37 -76
- codex_autorunner/routes/agents.py +2 -137
- codex_autorunner/routes/analytics.py +2 -238
- codex_autorunner/routes/app_server.py +2 -131
- codex_autorunner/routes/base.py +2 -596
- codex_autorunner/routes/file_chat.py +4 -833
- codex_autorunner/routes/flows.py +4 -977
- codex_autorunner/routes/messages.py +4 -456
- codex_autorunner/routes/repos.py +2 -196
- codex_autorunner/routes/review.py +2 -147
- codex_autorunner/routes/sessions.py +2 -175
- codex_autorunner/routes/settings.py +2 -168
- codex_autorunner/routes/shared.py +2 -275
- codex_autorunner/routes/system.py +4 -193
- codex_autorunner/routes/usage.py +2 -86
- codex_autorunner/routes/voice.py +2 -119
- codex_autorunner/routes/workspace.py +2 -270
- codex_autorunner/server.py +4 -4
- codex_autorunner/static/agentControls.js +61 -16
- codex_autorunner/static/app.js +126 -14
- codex_autorunner/static/archive.js +826 -0
- codex_autorunner/static/archiveApi.js +37 -0
- codex_autorunner/static/autoRefresh.js +7 -7
- codex_autorunner/static/chatUploads.js +137 -0
- codex_autorunner/static/dashboard.js +224 -171
- codex_autorunner/static/docChatCore.js +185 -13
- codex_autorunner/static/fileChat.js +68 -40
- codex_autorunner/static/fileboxUi.js +159 -0
- codex_autorunner/static/hub.js +114 -131
- codex_autorunner/static/index.html +375 -49
- codex_autorunner/static/messages.js +568 -87
- codex_autorunner/static/notifications.js +255 -0
- codex_autorunner/static/pma.js +1167 -0
- codex_autorunner/static/preserve.js +17 -0
- codex_autorunner/static/settings.js +128 -6
- codex_autorunner/static/smartRefresh.js +52 -0
- codex_autorunner/static/streamUtils.js +57 -0
- codex_autorunner/static/styles.css +9798 -6143
- codex_autorunner/static/tabs.js +152 -11
- codex_autorunner/static/templateReposSettings.js +225 -0
- codex_autorunner/static/terminal.js +18 -0
- codex_autorunner/static/ticketChatActions.js +165 -3
- codex_autorunner/static/ticketChatStream.js +17 -119
- codex_autorunner/static/ticketEditor.js +137 -15
- codex_autorunner/static/ticketTemplates.js +798 -0
- codex_autorunner/static/tickets.js +821 -98
- codex_autorunner/static/turnEvents.js +27 -0
- codex_autorunner/static/turnResume.js +33 -0
- codex_autorunner/static/utils.js +39 -0
- codex_autorunner/static/workspace.js +389 -82
- codex_autorunner/static/workspaceFileBrowser.js +15 -13
- codex_autorunner/surfaces/__init__.py +5 -0
- codex_autorunner/surfaces/cli/__init__.py +6 -0
- codex_autorunner/surfaces/cli/cli.py +2534 -0
- codex_autorunner/surfaces/cli/codex_cli.py +20 -0
- codex_autorunner/surfaces/cli/pma_cli.py +817 -0
- codex_autorunner/surfaces/telegram/__init__.py +3 -0
- codex_autorunner/surfaces/web/__init__.py +1 -0
- codex_autorunner/surfaces/web/app.py +2223 -0
- codex_autorunner/surfaces/web/hub_jobs.py +192 -0
- codex_autorunner/surfaces/web/middleware.py +587 -0
- codex_autorunner/surfaces/web/pty_session.py +370 -0
- codex_autorunner/surfaces/web/review.py +6 -0
- codex_autorunner/surfaces/web/routes/__init__.py +82 -0
- codex_autorunner/surfaces/web/routes/agents.py +138 -0
- codex_autorunner/surfaces/web/routes/analytics.py +284 -0
- codex_autorunner/surfaces/web/routes/app_server.py +132 -0
- codex_autorunner/surfaces/web/routes/archive.py +357 -0
- codex_autorunner/surfaces/web/routes/base.py +615 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +1117 -0
- codex_autorunner/surfaces/web/routes/filebox.py +227 -0
- codex_autorunner/surfaces/web/routes/flows.py +1354 -0
- codex_autorunner/surfaces/web/routes/messages.py +490 -0
- codex_autorunner/surfaces/web/routes/pma.py +1652 -0
- codex_autorunner/surfaces/web/routes/repos.py +197 -0
- codex_autorunner/surfaces/web/routes/review.py +148 -0
- codex_autorunner/surfaces/web/routes/sessions.py +176 -0
- codex_autorunner/surfaces/web/routes/settings.py +169 -0
- codex_autorunner/surfaces/web/routes/shared.py +277 -0
- codex_autorunner/surfaces/web/routes/system.py +196 -0
- codex_autorunner/surfaces/web/routes/templates.py +634 -0
- codex_autorunner/surfaces/web/routes/usage.py +89 -0
- codex_autorunner/surfaces/web/routes/voice.py +120 -0
- codex_autorunner/surfaces/web/routes/workspace.py +271 -0
- codex_autorunner/surfaces/web/runner_manager.py +25 -0
- codex_autorunner/surfaces/web/schemas.py +469 -0
- codex_autorunner/surfaces/web/static_assets.py +490 -0
- codex_autorunner/surfaces/web/static_refresh.py +86 -0
- codex_autorunner/surfaces/web/terminal_sessions.py +78 -0
- codex_autorunner/tickets/__init__.py +8 -1
- codex_autorunner/tickets/agent_pool.py +53 -4
- codex_autorunner/tickets/files.py +37 -16
- codex_autorunner/tickets/lint.py +50 -0
- codex_autorunner/tickets/models.py +6 -1
- codex_autorunner/tickets/outbox.py +50 -2
- codex_autorunner/tickets/runner.py +396 -57
- codex_autorunner/web/__init__.py +5 -1
- codex_autorunner/web/app.py +2 -1949
- codex_autorunner/web/hub_jobs.py +2 -191
- codex_autorunner/web/middleware.py +2 -586
- codex_autorunner/web/pty_session.py +2 -369
- codex_autorunner/web/runner_manager.py +2 -24
- codex_autorunner/web/schemas.py +2 -376
- codex_autorunner/web/static_assets.py +4 -441
- codex_autorunner/web/static_refresh.py +2 -85
- codex_autorunner/web/terminal_sessions.py +2 -77
- codex_autorunner/workspace/paths.py +49 -33
- codex_autorunner-1.2.0.dist-info/METADATA +150 -0
- codex_autorunner-1.2.0.dist-info/RECORD +339 -0
- codex_autorunner/core/adapter_utils.py +0 -21
- codex_autorunner/core/engine.py +0 -2653
- codex_autorunner/core/static_assets.py +0 -55
- codex_autorunner-1.0.0.dist-info/METADATA +0 -246
- codex_autorunner-1.0.0.dist-info/RECORD +0 -251
- /codex_autorunner/{routes → surfaces/web/routes}/terminal_images.py +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/WHEEL +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-1.0.0.dist-info → codex_autorunner-1.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,2223 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import shlex
|
|
5
|
+
import sys
|
|
6
|
+
import threading
|
|
7
|
+
from contextlib import ExitStack, asynccontextmanager
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Mapping, Optional
|
|
11
|
+
|
|
12
|
+
from fastapi import FastAPI, HTTPException
|
|
13
|
+
from fastapi.responses import HTMLResponse
|
|
14
|
+
from fastapi.staticfiles import StaticFiles
|
|
15
|
+
from starlette.middleware.base import BaseHTTPMiddleware
|
|
16
|
+
from starlette.middleware.gzip import GZipMiddleware
|
|
17
|
+
from starlette.routing import Mount
|
|
18
|
+
from starlette.types import ASGIApp
|
|
19
|
+
|
|
20
|
+
from ...agents.opencode.supervisor import OpenCodeSupervisor
|
|
21
|
+
from ...agents.registry import validate_agent_id
|
|
22
|
+
from ...bootstrap import ensure_hub_car_shim
|
|
23
|
+
from ...core.app_server_threads import (
|
|
24
|
+
AppServerThreadRegistry,
|
|
25
|
+
default_app_server_threads_path,
|
|
26
|
+
)
|
|
27
|
+
from ...core.config import (
|
|
28
|
+
AppServerConfig,
|
|
29
|
+
ConfigError,
|
|
30
|
+
HubConfig,
|
|
31
|
+
_is_loopback_host,
|
|
32
|
+
_normalize_base_path,
|
|
33
|
+
collect_env_overrides,
|
|
34
|
+
derive_repo_config,
|
|
35
|
+
load_hub_config,
|
|
36
|
+
load_repo_config,
|
|
37
|
+
resolve_env_for_root,
|
|
38
|
+
)
|
|
39
|
+
from ...core.flows.models import FlowRunStatus
|
|
40
|
+
from ...core.flows.reconciler import reconcile_flow_runs
|
|
41
|
+
from ...core.flows.store import FlowStore
|
|
42
|
+
from ...core.hub import HubSupervisor
|
|
43
|
+
from ...core.logging_utils import safe_log, setup_rotating_logger
|
|
44
|
+
from ...core.optional_dependencies import require_optional_dependencies
|
|
45
|
+
from ...core.request_context import get_request_id
|
|
46
|
+
from ...core.runtime import LockError, RuntimeContext
|
|
47
|
+
from ...core.state import load_state, persist_session_registry
|
|
48
|
+
from ...core.usage import (
|
|
49
|
+
UsageError,
|
|
50
|
+
default_codex_home,
|
|
51
|
+
get_hub_usage_series_cached,
|
|
52
|
+
get_hub_usage_summary_cached,
|
|
53
|
+
parse_iso_datetime,
|
|
54
|
+
)
|
|
55
|
+
from ...core.utils import (
|
|
56
|
+
build_opencode_supervisor,
|
|
57
|
+
reset_repo_root_context,
|
|
58
|
+
set_repo_root_context,
|
|
59
|
+
)
|
|
60
|
+
from ...housekeeping import run_housekeeping_once
|
|
61
|
+
from ...integrations.agents import build_backend_orchestrator
|
|
62
|
+
from ...integrations.agents.wiring import (
|
|
63
|
+
build_agent_backend_factory,
|
|
64
|
+
build_app_server_supervisor_factory,
|
|
65
|
+
)
|
|
66
|
+
from ...integrations.app_server.client import ApprovalHandler, NotificationHandler
|
|
67
|
+
from ...integrations.app_server.env import build_app_server_env
|
|
68
|
+
from ...integrations.app_server.event_buffer import AppServerEventBuffer
|
|
69
|
+
from ...integrations.app_server.supervisor import WorkspaceAppServerSupervisor
|
|
70
|
+
from ...manifest import load_manifest
|
|
71
|
+
from ...tickets.files import list_ticket_paths, safe_relpath, ticket_is_done
|
|
72
|
+
from ...tickets.models import Dispatch
|
|
73
|
+
from ...tickets.outbox import parse_dispatch, resolve_outbox_paths
|
|
74
|
+
from ...voice import VoiceConfig, VoiceService
|
|
75
|
+
from .hub_jobs import HubJobManager
|
|
76
|
+
from .middleware import (
|
|
77
|
+
AuthTokenMiddleware,
|
|
78
|
+
BasePathRouterMiddleware,
|
|
79
|
+
HostOriginMiddleware,
|
|
80
|
+
RequestIdMiddleware,
|
|
81
|
+
SecurityHeadersMiddleware,
|
|
82
|
+
)
|
|
83
|
+
from .routes import build_repo_router
|
|
84
|
+
from .routes.filebox import build_hub_filebox_routes
|
|
85
|
+
from .routes.pma import build_pma_routes
|
|
86
|
+
from .routes.system import build_system_routes
|
|
87
|
+
from .runner_manager import RunnerManager
|
|
88
|
+
from .schemas import (
|
|
89
|
+
HubCleanupWorktreeRequest,
|
|
90
|
+
HubCreateRepoRequest,
|
|
91
|
+
HubCreateWorktreeRequest,
|
|
92
|
+
HubJobResponse,
|
|
93
|
+
HubRemoveRepoRequest,
|
|
94
|
+
RunControlRequest,
|
|
95
|
+
)
|
|
96
|
+
from .static_assets import (
|
|
97
|
+
asset_version,
|
|
98
|
+
index_response_headers,
|
|
99
|
+
materialize_static_assets,
|
|
100
|
+
render_index_html,
|
|
101
|
+
require_static_assets,
|
|
102
|
+
)
|
|
103
|
+
from .terminal_sessions import parse_tui_idle_seconds, prune_terminal_registry
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@dataclass(frozen=True)
|
|
107
|
+
class AppContext:
|
|
108
|
+
base_path: str
|
|
109
|
+
env: Mapping[str, str]
|
|
110
|
+
engine: RuntimeContext
|
|
111
|
+
manager: RunnerManager
|
|
112
|
+
app_server_supervisor: Optional[WorkspaceAppServerSupervisor]
|
|
113
|
+
app_server_prune_interval: Optional[float]
|
|
114
|
+
app_server_threads: AppServerThreadRegistry
|
|
115
|
+
app_server_events: AppServerEventBuffer
|
|
116
|
+
opencode_supervisor: Optional[OpenCodeSupervisor]
|
|
117
|
+
opencode_prune_interval: Optional[float]
|
|
118
|
+
voice_config: VoiceConfig
|
|
119
|
+
voice_missing_reason: Optional[str]
|
|
120
|
+
voice_service: Optional[VoiceService]
|
|
121
|
+
terminal_sessions: dict
|
|
122
|
+
terminal_max_idle_seconds: Optional[float]
|
|
123
|
+
terminal_lock: asyncio.Lock
|
|
124
|
+
session_registry: dict
|
|
125
|
+
repo_to_session: dict
|
|
126
|
+
session_state_last_write: float
|
|
127
|
+
session_state_dirty: bool
|
|
128
|
+
static_dir: Path
|
|
129
|
+
static_assets_context: Optional[object]
|
|
130
|
+
asset_version: str
|
|
131
|
+
logger: logging.Logger
|
|
132
|
+
tui_idle_seconds: Optional[float]
|
|
133
|
+
tui_idle_check_seconds: Optional[float]
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
@dataclass(frozen=True)
|
|
137
|
+
class HubAppContext:
|
|
138
|
+
base_path: str
|
|
139
|
+
config: HubConfig
|
|
140
|
+
supervisor: HubSupervisor
|
|
141
|
+
job_manager: HubJobManager
|
|
142
|
+
app_server_supervisor: Optional[WorkspaceAppServerSupervisor]
|
|
143
|
+
app_server_prune_interval: Optional[float]
|
|
144
|
+
app_server_threads: AppServerThreadRegistry
|
|
145
|
+
app_server_events: AppServerEventBuffer
|
|
146
|
+
opencode_supervisor: Optional[OpenCodeSupervisor]
|
|
147
|
+
opencode_prune_interval: Optional[float]
|
|
148
|
+
static_dir: Path
|
|
149
|
+
static_assets_context: Optional[object]
|
|
150
|
+
asset_version: str
|
|
151
|
+
logger: logging.Logger
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
@dataclass(frozen=True)
|
|
155
|
+
class ServerOverrides:
|
|
156
|
+
allowed_hosts: Optional[list[str]] = None
|
|
157
|
+
allowed_origins: Optional[list[str]] = None
|
|
158
|
+
auth_token_env: Optional[str] = None
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _app_server_prune_interval(idle_ttl_seconds: Optional[int]) -> Optional[float]:
|
|
162
|
+
if not idle_ttl_seconds or idle_ttl_seconds <= 0:
|
|
163
|
+
return None
|
|
164
|
+
return float(min(600.0, max(60.0, idle_ttl_seconds / 2)))
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _normalize_approval_path(path: str, repo_root: Path) -> str:
|
|
168
|
+
raw = (path or "").strip()
|
|
169
|
+
if not raw:
|
|
170
|
+
return ""
|
|
171
|
+
if raw.startswith(("a/", "b/")):
|
|
172
|
+
raw = raw[2:]
|
|
173
|
+
if raw.startswith("./"):
|
|
174
|
+
raw = raw[2:]
|
|
175
|
+
candidate = Path(raw)
|
|
176
|
+
if candidate.is_absolute():
|
|
177
|
+
try:
|
|
178
|
+
candidate = candidate.relative_to(repo_root)
|
|
179
|
+
except ValueError:
|
|
180
|
+
return raw
|
|
181
|
+
return candidate.as_posix()
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def _extract_approval_paths(params: dict, *, repo_root: Path) -> list[str]:
|
|
185
|
+
paths: list[str] = []
|
|
186
|
+
|
|
187
|
+
def _add(entry: object) -> None:
|
|
188
|
+
if isinstance(entry, str):
|
|
189
|
+
normalized = _normalize_approval_path(entry, repo_root)
|
|
190
|
+
if normalized:
|
|
191
|
+
paths.append(normalized)
|
|
192
|
+
return
|
|
193
|
+
if isinstance(entry, dict):
|
|
194
|
+
raw = entry.get("path") or entry.get("file") or entry.get("name")
|
|
195
|
+
if isinstance(raw, str):
|
|
196
|
+
normalized = _normalize_approval_path(raw, repo_root)
|
|
197
|
+
if normalized:
|
|
198
|
+
paths.append(normalized)
|
|
199
|
+
|
|
200
|
+
for payload in (params, params.get("item") if isinstance(params, dict) else None):
|
|
201
|
+
if not isinstance(payload, dict):
|
|
202
|
+
continue
|
|
203
|
+
for key in ("files", "fileChanges", "paths"):
|
|
204
|
+
entries = payload.get(key)
|
|
205
|
+
if isinstance(entries, list):
|
|
206
|
+
for entry in entries:
|
|
207
|
+
_add(entry)
|
|
208
|
+
for key in ("path", "file", "name"):
|
|
209
|
+
_add(payload.get(key))
|
|
210
|
+
return paths
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def _extract_turn_context(params: dict) -> tuple[Optional[str], Optional[str]]:
|
|
214
|
+
if not isinstance(params, dict):
|
|
215
|
+
return None, None
|
|
216
|
+
turn_id = params.get("turnId") or params.get("turn_id") or params.get("id")
|
|
217
|
+
thread_id = params.get("threadId") or params.get("thread_id")
|
|
218
|
+
turn = params.get("turn")
|
|
219
|
+
if isinstance(turn, dict):
|
|
220
|
+
turn_id = turn_id or turn.get("id") or turn.get("turnId")
|
|
221
|
+
thread_id = thread_id or turn.get("threadId") or turn.get("thread_id")
|
|
222
|
+
item = params.get("item")
|
|
223
|
+
if isinstance(item, dict):
|
|
224
|
+
thread_id = thread_id or item.get("threadId") or item.get("thread_id")
|
|
225
|
+
turn_id = str(turn_id) if isinstance(turn_id, str) and turn_id else None
|
|
226
|
+
thread_id = str(thread_id) if isinstance(thread_id, str) and thread_id else None
|
|
227
|
+
return thread_id, turn_id
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def _path_is_allowed_for_file_write(path: str) -> bool:
|
|
231
|
+
normalized = (path or "").strip()
|
|
232
|
+
if not normalized:
|
|
233
|
+
return False
|
|
234
|
+
# Canonical allowlist for all AI-assisted file edits via app-server approval:
|
|
235
|
+
# - tickets: .codex-autorunner/tickets/**
|
|
236
|
+
# - workspace docs: .codex-autorunner/workspace/**
|
|
237
|
+
allowed_prefixes = (
|
|
238
|
+
".codex-autorunner/tickets/",
|
|
239
|
+
".codex-autorunner/workspace/",
|
|
240
|
+
)
|
|
241
|
+
if normalized in (".codex-autorunner/tickets", ".codex-autorunner/workspace"):
|
|
242
|
+
return True
|
|
243
|
+
return any(
|
|
244
|
+
normalized == prefix.rstrip("/") or normalized.startswith(prefix)
|
|
245
|
+
for prefix in allowed_prefixes
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _build_app_server_supervisor(
|
|
250
|
+
config: AppServerConfig,
|
|
251
|
+
*,
|
|
252
|
+
logger: logging.Logger,
|
|
253
|
+
event_prefix: str,
|
|
254
|
+
base_env: Optional[Mapping[str, str]] = None,
|
|
255
|
+
notification_handler: Optional[NotificationHandler] = None,
|
|
256
|
+
approval_handler: Optional[ApprovalHandler] = None,
|
|
257
|
+
) -> tuple[Optional[WorkspaceAppServerSupervisor], Optional[float]]:
|
|
258
|
+
if not config.command:
|
|
259
|
+
return None, None
|
|
260
|
+
|
|
261
|
+
def _env_builder(
|
|
262
|
+
workspace_root: Path, _workspace_id: str, state_dir: Path
|
|
263
|
+
) -> dict[str, str]:
|
|
264
|
+
state_dir.mkdir(parents=True, exist_ok=True)
|
|
265
|
+
return build_app_server_env(
|
|
266
|
+
config.command,
|
|
267
|
+
workspace_root,
|
|
268
|
+
state_dir,
|
|
269
|
+
logger=logger,
|
|
270
|
+
event_prefix=event_prefix,
|
|
271
|
+
base_env=base_env,
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
try:
|
|
275
|
+
asyncio.get_running_loop()
|
|
276
|
+
except RuntimeError:
|
|
277
|
+
asyncio.set_event_loop(asyncio.new_event_loop())
|
|
278
|
+
|
|
279
|
+
supervisor = WorkspaceAppServerSupervisor(
|
|
280
|
+
config.command,
|
|
281
|
+
state_root=config.state_root,
|
|
282
|
+
env_builder=_env_builder,
|
|
283
|
+
logger=logger,
|
|
284
|
+
auto_restart=config.auto_restart,
|
|
285
|
+
max_handles=config.max_handles,
|
|
286
|
+
idle_ttl_seconds=config.idle_ttl_seconds,
|
|
287
|
+
request_timeout=config.request_timeout,
|
|
288
|
+
turn_stall_timeout_seconds=config.turn_stall_timeout_seconds,
|
|
289
|
+
turn_stall_poll_interval_seconds=config.turn_stall_poll_interval_seconds,
|
|
290
|
+
turn_stall_recovery_min_interval_seconds=config.turn_stall_recovery_min_interval_seconds,
|
|
291
|
+
max_message_bytes=config.client.max_message_bytes,
|
|
292
|
+
oversize_preview_bytes=config.client.oversize_preview_bytes,
|
|
293
|
+
max_oversize_drain_bytes=config.client.max_oversize_drain_bytes,
|
|
294
|
+
restart_backoff_initial_seconds=config.client.restart_backoff_initial_seconds,
|
|
295
|
+
restart_backoff_max_seconds=config.client.restart_backoff_max_seconds,
|
|
296
|
+
restart_backoff_jitter_ratio=config.client.restart_backoff_jitter_ratio,
|
|
297
|
+
notification_handler=notification_handler,
|
|
298
|
+
approval_handler=approval_handler,
|
|
299
|
+
)
|
|
300
|
+
return supervisor, _app_server_prune_interval(config.idle_ttl_seconds)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def _parse_command(raw: Optional[str]) -> list[str]:
|
|
304
|
+
if not raw:
|
|
305
|
+
return []
|
|
306
|
+
try:
|
|
307
|
+
return [part for part in shlex.split(raw) if part]
|
|
308
|
+
except ValueError:
|
|
309
|
+
return []
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def _build_opencode_supervisor(
|
|
313
|
+
config: AppServerConfig,
|
|
314
|
+
*,
|
|
315
|
+
workspace_root: Path,
|
|
316
|
+
opencode_binary: Optional[str],
|
|
317
|
+
opencode_command: Optional[list[str]],
|
|
318
|
+
logger: logging.Logger,
|
|
319
|
+
env: Mapping[str, str],
|
|
320
|
+
subagent_models: Optional[Mapping[str, str]] = None,
|
|
321
|
+
session_stall_timeout_seconds: Optional[float] = None,
|
|
322
|
+
max_text_chars: Optional[int] = None,
|
|
323
|
+
) -> tuple[Optional[OpenCodeSupervisor], Optional[float]]:
|
|
324
|
+
supervisor = build_opencode_supervisor(
|
|
325
|
+
opencode_command=opencode_command,
|
|
326
|
+
opencode_binary=opencode_binary,
|
|
327
|
+
workspace_root=workspace_root,
|
|
328
|
+
logger=logger,
|
|
329
|
+
request_timeout=config.request_timeout,
|
|
330
|
+
max_handles=config.max_handles,
|
|
331
|
+
idle_ttl_seconds=config.idle_ttl_seconds,
|
|
332
|
+
session_stall_timeout_seconds=session_stall_timeout_seconds,
|
|
333
|
+
max_text_chars=max_text_chars,
|
|
334
|
+
base_env=env,
|
|
335
|
+
subagent_models=subagent_models,
|
|
336
|
+
)
|
|
337
|
+
if supervisor is None:
|
|
338
|
+
safe_log(
|
|
339
|
+
logger,
|
|
340
|
+
logging.INFO,
|
|
341
|
+
"OpenCode command unavailable; skipping opencode supervisor.",
|
|
342
|
+
)
|
|
343
|
+
return None, None
|
|
344
|
+
return supervisor, _app_server_prune_interval(config.idle_ttl_seconds)
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def _build_app_context(
|
|
348
|
+
repo_root: Optional[Path],
|
|
349
|
+
base_path: Optional[str],
|
|
350
|
+
hub_config: Optional[HubConfig] = None,
|
|
351
|
+
) -> AppContext:
|
|
352
|
+
target_root = (repo_root or Path.cwd()).resolve()
|
|
353
|
+
if hub_config is None:
|
|
354
|
+
config = load_repo_config(target_root)
|
|
355
|
+
env = dict(os.environ)
|
|
356
|
+
else:
|
|
357
|
+
env = resolve_env_for_root(target_root)
|
|
358
|
+
config = derive_repo_config(hub_config, target_root, load_env=False)
|
|
359
|
+
normalized_base = (
|
|
360
|
+
_normalize_base_path(base_path)
|
|
361
|
+
if base_path is not None
|
|
362
|
+
else config.server_base_path
|
|
363
|
+
)
|
|
364
|
+
backend_orchestrator = build_backend_orchestrator(config.root, config)
|
|
365
|
+
engine = RuntimeContext(
|
|
366
|
+
config.root,
|
|
367
|
+
config=config,
|
|
368
|
+
backend_orchestrator=backend_orchestrator,
|
|
369
|
+
)
|
|
370
|
+
manager = RunnerManager(engine)
|
|
371
|
+
voice_config = VoiceConfig.from_raw(config.voice, env=env)
|
|
372
|
+
voice_missing_reason: Optional[str] = None
|
|
373
|
+
try:
|
|
374
|
+
require_optional_dependencies(
|
|
375
|
+
feature="voice",
|
|
376
|
+
deps=(
|
|
377
|
+
("httpx", "httpx"),
|
|
378
|
+
(("multipart", "python_multipart"), "python-multipart"),
|
|
379
|
+
),
|
|
380
|
+
extra="voice",
|
|
381
|
+
)
|
|
382
|
+
except ConfigError as exc:
|
|
383
|
+
voice_missing_reason = str(exc)
|
|
384
|
+
voice_config.enabled = False
|
|
385
|
+
terminal_max_idle_seconds = config.terminal_idle_timeout_seconds
|
|
386
|
+
if terminal_max_idle_seconds is not None and terminal_max_idle_seconds <= 0:
|
|
387
|
+
terminal_max_idle_seconds = None
|
|
388
|
+
tui_idle_seconds = parse_tui_idle_seconds(config)
|
|
389
|
+
tui_idle_check_seconds: Optional[float] = None
|
|
390
|
+
if tui_idle_seconds is not None:
|
|
391
|
+
tui_idle_check_seconds = min(10.0, max(1.0, tui_idle_seconds / 4))
|
|
392
|
+
# Construct asyncio primitives without assuming a loop already exists.
|
|
393
|
+
# This comes up in unit tests (sync context) and when mounting from a worker thread.
|
|
394
|
+
try:
|
|
395
|
+
terminal_lock = asyncio.Lock()
|
|
396
|
+
except RuntimeError:
|
|
397
|
+
asyncio.set_event_loop(asyncio.new_event_loop())
|
|
398
|
+
terminal_lock = asyncio.Lock()
|
|
399
|
+
logger = setup_rotating_logger(
|
|
400
|
+
f"repo[{engine.repo_root}]", engine.config.server_log
|
|
401
|
+
)
|
|
402
|
+
engine.notifier.set_logger(logger)
|
|
403
|
+
env_overrides = collect_env_overrides(env=env)
|
|
404
|
+
if env_overrides:
|
|
405
|
+
safe_log(
|
|
406
|
+
logger,
|
|
407
|
+
logging.INFO,
|
|
408
|
+
"Environment overrides active: %s",
|
|
409
|
+
", ".join(env_overrides),
|
|
410
|
+
)
|
|
411
|
+
safe_log(
|
|
412
|
+
logger,
|
|
413
|
+
logging.INFO,
|
|
414
|
+
f"Repo server ready at {engine.repo_root}",
|
|
415
|
+
)
|
|
416
|
+
app_server_events = AppServerEventBuffer()
|
|
417
|
+
|
|
418
|
+
async def _file_write_approval_handler(message: dict) -> str:
|
|
419
|
+
method = message.get("method")
|
|
420
|
+
params = message.get("params")
|
|
421
|
+
params = params if isinstance(params, dict) else {}
|
|
422
|
+
thread_id, turn_id = _extract_turn_context(params)
|
|
423
|
+
if method == "item/fileChange/requestApproval":
|
|
424
|
+
paths = _extract_approval_paths(params, repo_root=engine.config.root)
|
|
425
|
+
normalized = [path for path in paths if path]
|
|
426
|
+
if not normalized:
|
|
427
|
+
notice = "Rejected file change without explicit paths."
|
|
428
|
+
await app_server_events.handle_notification(
|
|
429
|
+
{
|
|
430
|
+
"method": "error",
|
|
431
|
+
"params": {
|
|
432
|
+
"message": notice,
|
|
433
|
+
"turnId": turn_id,
|
|
434
|
+
"threadId": thread_id,
|
|
435
|
+
},
|
|
436
|
+
}
|
|
437
|
+
)
|
|
438
|
+
return "decline"
|
|
439
|
+
rejected = [
|
|
440
|
+
path for path in normalized if not _path_is_allowed_for_file_write(path)
|
|
441
|
+
]
|
|
442
|
+
if rejected:
|
|
443
|
+
notice = "Rejected write outside allowlist: " + ", ".join(rejected)
|
|
444
|
+
await app_server_events.handle_notification(
|
|
445
|
+
{
|
|
446
|
+
"method": "error",
|
|
447
|
+
"params": {
|
|
448
|
+
"message": notice,
|
|
449
|
+
"turnId": turn_id,
|
|
450
|
+
"threadId": thread_id,
|
|
451
|
+
},
|
|
452
|
+
}
|
|
453
|
+
)
|
|
454
|
+
return "decline"
|
|
455
|
+
return "accept"
|
|
456
|
+
if method == "item/commandExecution/requestApproval":
|
|
457
|
+
notice = "Rejected command execution in file write session."
|
|
458
|
+
await app_server_events.handle_notification(
|
|
459
|
+
{
|
|
460
|
+
"method": "error",
|
|
461
|
+
"params": {
|
|
462
|
+
"message": notice,
|
|
463
|
+
"turnId": turn_id,
|
|
464
|
+
"threadId": thread_id,
|
|
465
|
+
},
|
|
466
|
+
}
|
|
467
|
+
)
|
|
468
|
+
return "decline"
|
|
469
|
+
return "decline"
|
|
470
|
+
|
|
471
|
+
app_server_supervisor, app_server_prune_interval = _build_app_server_supervisor(
|
|
472
|
+
engine.config.app_server,
|
|
473
|
+
logger=logger,
|
|
474
|
+
event_prefix="web.app_server",
|
|
475
|
+
base_env=env,
|
|
476
|
+
notification_handler=app_server_events.handle_notification,
|
|
477
|
+
approval_handler=_file_write_approval_handler,
|
|
478
|
+
)
|
|
479
|
+
app_server_threads = AppServerThreadRegistry(
|
|
480
|
+
default_app_server_threads_path(engine.repo_root)
|
|
481
|
+
)
|
|
482
|
+
opencode_command = config.agent_serve_command("opencode")
|
|
483
|
+
try:
|
|
484
|
+
opencode_binary = config.agent_binary("opencode")
|
|
485
|
+
except ConfigError:
|
|
486
|
+
opencode_binary = None
|
|
487
|
+
agent_config = config.agents.get("opencode")
|
|
488
|
+
subagent_models = agent_config.subagent_models if agent_config else None
|
|
489
|
+
opencode_supervisor, opencode_prune_interval = _build_opencode_supervisor(
|
|
490
|
+
config.app_server,
|
|
491
|
+
workspace_root=engine.repo_root,
|
|
492
|
+
opencode_binary=opencode_binary,
|
|
493
|
+
opencode_command=opencode_command,
|
|
494
|
+
logger=logger,
|
|
495
|
+
env=env,
|
|
496
|
+
subagent_models=subagent_models,
|
|
497
|
+
session_stall_timeout_seconds=config.opencode.session_stall_timeout_seconds,
|
|
498
|
+
max_text_chars=config.opencode.max_text_chars,
|
|
499
|
+
)
|
|
500
|
+
voice_service: Optional[VoiceService]
|
|
501
|
+
if voice_missing_reason:
|
|
502
|
+
voice_service = None
|
|
503
|
+
safe_log(
|
|
504
|
+
logger,
|
|
505
|
+
logging.WARNING,
|
|
506
|
+
voice_missing_reason,
|
|
507
|
+
)
|
|
508
|
+
else:
|
|
509
|
+
try:
|
|
510
|
+
voice_service = VoiceService(voice_config, logger=logger)
|
|
511
|
+
except Exception as exc:
|
|
512
|
+
voice_service = None
|
|
513
|
+
safe_log(
|
|
514
|
+
logger,
|
|
515
|
+
logging.WARNING,
|
|
516
|
+
"Voice service unavailable",
|
|
517
|
+
exc,
|
|
518
|
+
)
|
|
519
|
+
session_registry: dict = {}
|
|
520
|
+
repo_to_session: dict = {}
|
|
521
|
+
initial_state = load_state(engine.state_path)
|
|
522
|
+
session_registry = dict(initial_state.sessions)
|
|
523
|
+
repo_to_session = dict(initial_state.repo_to_session)
|
|
524
|
+
# Normalize persisted keys from older/newer versions:
|
|
525
|
+
# - Prefer bare repo keys for the default "codex" agent.
|
|
526
|
+
# - Preserve `repo:agent` keys for non-default agents (e.g. opencode).
|
|
527
|
+
normalized_repo_to_session: dict[str, str] = {}
|
|
528
|
+
for raw_key, session_id in repo_to_session.items():
|
|
529
|
+
key = str(raw_key)
|
|
530
|
+
if ":" in key:
|
|
531
|
+
repo, agent = key.split(":", 1)
|
|
532
|
+
agent_norm = agent.strip().lower()
|
|
533
|
+
if not agent_norm or agent_norm == "codex":
|
|
534
|
+
key = repo
|
|
535
|
+
else:
|
|
536
|
+
key = f"{repo}:{agent_norm}"
|
|
537
|
+
# Keep the first mapping we see to avoid surprising overrides.
|
|
538
|
+
normalized_repo_to_session.setdefault(key, session_id)
|
|
539
|
+
repo_to_session = normalized_repo_to_session
|
|
540
|
+
terminal_sessions: dict = {}
|
|
541
|
+
if session_registry or repo_to_session:
|
|
542
|
+
prune_terminal_registry(
|
|
543
|
+
engine.state_path,
|
|
544
|
+
terminal_sessions,
|
|
545
|
+
session_registry,
|
|
546
|
+
repo_to_session,
|
|
547
|
+
terminal_max_idle_seconds,
|
|
548
|
+
)
|
|
549
|
+
|
|
550
|
+
def _load_static_assets(
|
|
551
|
+
cache_root: Path, max_cache_entries: int, max_cache_age_days: Optional[int]
|
|
552
|
+
) -> tuple[Path, Optional[ExitStack]]:
|
|
553
|
+
static_dir, static_context = materialize_static_assets(
|
|
554
|
+
cache_root,
|
|
555
|
+
max_cache_entries=max_cache_entries,
|
|
556
|
+
max_cache_age_days=max_cache_age_days,
|
|
557
|
+
logger=logger,
|
|
558
|
+
)
|
|
559
|
+
try:
|
|
560
|
+
require_static_assets(static_dir, logger)
|
|
561
|
+
except Exception as exc:
|
|
562
|
+
if static_context is not None:
|
|
563
|
+
static_context.close()
|
|
564
|
+
safe_log(
|
|
565
|
+
logger,
|
|
566
|
+
logging.WARNING,
|
|
567
|
+
"Static assets requirement check failed",
|
|
568
|
+
exc=exc,
|
|
569
|
+
)
|
|
570
|
+
raise
|
|
571
|
+
return static_dir, static_context
|
|
572
|
+
|
|
573
|
+
try:
|
|
574
|
+
static_dir, static_context = _load_static_assets(
|
|
575
|
+
config.static_assets.cache_root,
|
|
576
|
+
config.static_assets.max_cache_entries,
|
|
577
|
+
config.static_assets.max_cache_age_days,
|
|
578
|
+
)
|
|
579
|
+
except Exception as exc:
|
|
580
|
+
if hub_config is None:
|
|
581
|
+
raise
|
|
582
|
+
hub_static = hub_config.static_assets
|
|
583
|
+
if hub_static.cache_root == config.static_assets.cache_root:
|
|
584
|
+
raise
|
|
585
|
+
safe_log(
|
|
586
|
+
logger,
|
|
587
|
+
logging.WARNING,
|
|
588
|
+
"Repo static assets unavailable; retrying with hub cache root %s",
|
|
589
|
+
hub_static.cache_root,
|
|
590
|
+
exc=exc,
|
|
591
|
+
)
|
|
592
|
+
static_dir, static_context = _load_static_assets(
|
|
593
|
+
hub_static.cache_root,
|
|
594
|
+
hub_static.max_cache_entries,
|
|
595
|
+
hub_static.max_cache_age_days,
|
|
596
|
+
)
|
|
597
|
+
return AppContext(
|
|
598
|
+
base_path=normalized_base,
|
|
599
|
+
env=env,
|
|
600
|
+
engine=engine,
|
|
601
|
+
manager=manager,
|
|
602
|
+
app_server_supervisor=app_server_supervisor,
|
|
603
|
+
app_server_prune_interval=app_server_prune_interval,
|
|
604
|
+
app_server_threads=app_server_threads,
|
|
605
|
+
app_server_events=app_server_events,
|
|
606
|
+
opencode_supervisor=opencode_supervisor,
|
|
607
|
+
opencode_prune_interval=opencode_prune_interval,
|
|
608
|
+
voice_config=voice_config,
|
|
609
|
+
voice_missing_reason=voice_missing_reason,
|
|
610
|
+
voice_service=voice_service,
|
|
611
|
+
terminal_sessions=terminal_sessions,
|
|
612
|
+
terminal_max_idle_seconds=terminal_max_idle_seconds,
|
|
613
|
+
terminal_lock=terminal_lock,
|
|
614
|
+
session_registry=session_registry,
|
|
615
|
+
repo_to_session=repo_to_session,
|
|
616
|
+
session_state_last_write=0.0,
|
|
617
|
+
session_state_dirty=False,
|
|
618
|
+
static_dir=static_dir,
|
|
619
|
+
static_assets_context=static_context,
|
|
620
|
+
asset_version=asset_version(static_dir),
|
|
621
|
+
logger=logger,
|
|
622
|
+
tui_idle_seconds=tui_idle_seconds,
|
|
623
|
+
tui_idle_check_seconds=tui_idle_check_seconds,
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
def _apply_app_context(app: FastAPI, context: AppContext) -> None:
|
|
628
|
+
app.state.base_path = context.base_path
|
|
629
|
+
app.state.env = context.env
|
|
630
|
+
app.state.logger = context.logger
|
|
631
|
+
app.state.engine = context.engine
|
|
632
|
+
app.state.config = context.engine.config # Expose config consistently
|
|
633
|
+
app.state.manager = context.manager
|
|
634
|
+
app.state.app_server_supervisor = context.app_server_supervisor
|
|
635
|
+
app.state.app_server_prune_interval = context.app_server_prune_interval
|
|
636
|
+
app.state.app_server_threads = context.app_server_threads
|
|
637
|
+
app.state.app_server_events = context.app_server_events
|
|
638
|
+
app.state.opencode_supervisor = context.opencode_supervisor
|
|
639
|
+
app.state.opencode_prune_interval = context.opencode_prune_interval
|
|
640
|
+
app.state.voice_config = context.voice_config
|
|
641
|
+
app.state.voice_missing_reason = context.voice_missing_reason
|
|
642
|
+
app.state.voice_service = context.voice_service
|
|
643
|
+
app.state.terminal_sessions = context.terminal_sessions
|
|
644
|
+
app.state.terminal_max_idle_seconds = context.terminal_max_idle_seconds
|
|
645
|
+
app.state.terminal_lock = context.terminal_lock
|
|
646
|
+
app.state.session_registry = context.session_registry
|
|
647
|
+
app.state.repo_to_session = context.repo_to_session
|
|
648
|
+
app.state.session_state_last_write = context.session_state_last_write
|
|
649
|
+
app.state.session_state_dirty = context.session_state_dirty
|
|
650
|
+
app.state.static_dir = context.static_dir
|
|
651
|
+
app.state.static_assets_context = context.static_assets_context
|
|
652
|
+
app.state.asset_version = context.asset_version
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
def _build_hub_context(
|
|
656
|
+
hub_root: Optional[Path], base_path: Optional[str]
|
|
657
|
+
) -> HubAppContext:
|
|
658
|
+
config = load_hub_config(hub_root or Path.cwd())
|
|
659
|
+
normalized_base = (
|
|
660
|
+
_normalize_base_path(base_path)
|
|
661
|
+
if base_path is not None
|
|
662
|
+
else config.server_base_path
|
|
663
|
+
)
|
|
664
|
+
supervisor = HubSupervisor(
|
|
665
|
+
config,
|
|
666
|
+
backend_factory_builder=build_agent_backend_factory,
|
|
667
|
+
app_server_supervisor_factory_builder=build_app_server_supervisor_factory,
|
|
668
|
+
backend_orchestrator_builder=build_backend_orchestrator,
|
|
669
|
+
agent_id_validator=validate_agent_id,
|
|
670
|
+
)
|
|
671
|
+
logger = setup_rotating_logger(f"hub[{config.root}]", config.server_log)
|
|
672
|
+
env_overrides = collect_env_overrides()
|
|
673
|
+
if env_overrides:
|
|
674
|
+
safe_log(
|
|
675
|
+
logger,
|
|
676
|
+
logging.INFO,
|
|
677
|
+
"Environment overrides active: %s",
|
|
678
|
+
", ".join(env_overrides),
|
|
679
|
+
)
|
|
680
|
+
safe_log(
|
|
681
|
+
logger,
|
|
682
|
+
logging.INFO,
|
|
683
|
+
f"Hub app ready at {config.root}",
|
|
684
|
+
)
|
|
685
|
+
try:
|
|
686
|
+
ensure_hub_car_shim(config.root, python_executable=sys.executable)
|
|
687
|
+
except Exception as exc:
|
|
688
|
+
safe_log(
|
|
689
|
+
logger,
|
|
690
|
+
logging.WARNING,
|
|
691
|
+
"Failed to ensure hub car shim",
|
|
692
|
+
exc=exc,
|
|
693
|
+
)
|
|
694
|
+
app_server_events = AppServerEventBuffer()
|
|
695
|
+
app_server_supervisor, app_server_prune_interval = _build_app_server_supervisor(
|
|
696
|
+
config.app_server,
|
|
697
|
+
logger=logger,
|
|
698
|
+
event_prefix="hub.app_server",
|
|
699
|
+
notification_handler=app_server_events.handle_notification,
|
|
700
|
+
)
|
|
701
|
+
app_server_threads = AppServerThreadRegistry(
|
|
702
|
+
default_app_server_threads_path(config.root)
|
|
703
|
+
)
|
|
704
|
+
opencode_command = config.agent_serve_command("opencode")
|
|
705
|
+
try:
|
|
706
|
+
opencode_binary = config.agent_binary("opencode")
|
|
707
|
+
except ConfigError:
|
|
708
|
+
opencode_binary = None
|
|
709
|
+
agent_config = config.agents.get("opencode")
|
|
710
|
+
subagent_models = agent_config.subagent_models if agent_config else None
|
|
711
|
+
opencode_supervisor, opencode_prune_interval = _build_opencode_supervisor(
|
|
712
|
+
config.app_server,
|
|
713
|
+
workspace_root=config.root,
|
|
714
|
+
opencode_binary=opencode_binary,
|
|
715
|
+
opencode_command=opencode_command,
|
|
716
|
+
logger=logger,
|
|
717
|
+
env=resolve_env_for_root(config.root),
|
|
718
|
+
subagent_models=subagent_models,
|
|
719
|
+
session_stall_timeout_seconds=config.opencode.session_stall_timeout_seconds,
|
|
720
|
+
max_text_chars=config.opencode.max_text_chars,
|
|
721
|
+
)
|
|
722
|
+
static_dir, static_context = materialize_static_assets(
|
|
723
|
+
config.static_assets.cache_root,
|
|
724
|
+
max_cache_entries=config.static_assets.max_cache_entries,
|
|
725
|
+
max_cache_age_days=config.static_assets.max_cache_age_days,
|
|
726
|
+
logger=logger,
|
|
727
|
+
)
|
|
728
|
+
try:
|
|
729
|
+
require_static_assets(static_dir, logger)
|
|
730
|
+
except Exception as exc:
|
|
731
|
+
if static_context is not None:
|
|
732
|
+
static_context.close()
|
|
733
|
+
safe_log(
|
|
734
|
+
logger,
|
|
735
|
+
logging.WARNING,
|
|
736
|
+
"Static assets requirement check failed",
|
|
737
|
+
exc=exc,
|
|
738
|
+
)
|
|
739
|
+
raise
|
|
740
|
+
return HubAppContext(
|
|
741
|
+
base_path=normalized_base,
|
|
742
|
+
config=config,
|
|
743
|
+
supervisor=supervisor,
|
|
744
|
+
job_manager=HubJobManager(logger=logger),
|
|
745
|
+
app_server_supervisor=app_server_supervisor,
|
|
746
|
+
app_server_prune_interval=app_server_prune_interval,
|
|
747
|
+
app_server_threads=app_server_threads,
|
|
748
|
+
app_server_events=app_server_events,
|
|
749
|
+
opencode_supervisor=opencode_supervisor,
|
|
750
|
+
opencode_prune_interval=opencode_prune_interval,
|
|
751
|
+
static_dir=static_dir,
|
|
752
|
+
static_assets_context=static_context,
|
|
753
|
+
asset_version=asset_version(static_dir),
|
|
754
|
+
logger=logger,
|
|
755
|
+
)
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
def _apply_hub_context(app: FastAPI, context: HubAppContext) -> None:
|
|
759
|
+
app.state.base_path = context.base_path
|
|
760
|
+
app.state.logger = context.logger
|
|
761
|
+
app.state.config = context.config # Expose config for route modules
|
|
762
|
+
app.state.job_manager = context.job_manager
|
|
763
|
+
app.state.app_server_supervisor = context.app_server_supervisor
|
|
764
|
+
app.state.app_server_prune_interval = context.app_server_prune_interval
|
|
765
|
+
app.state.app_server_threads = context.app_server_threads
|
|
766
|
+
app.state.app_server_events = context.app_server_events
|
|
767
|
+
app.state.opencode_supervisor = context.opencode_supervisor
|
|
768
|
+
app.state.opencode_prune_interval = context.opencode_prune_interval
|
|
769
|
+
app.state.static_dir = context.static_dir
|
|
770
|
+
app.state.static_assets_context = context.static_assets_context
|
|
771
|
+
app.state.asset_version = context.asset_version
|
|
772
|
+
app.state.hub_supervisor = context.supervisor
|
|
773
|
+
|
|
774
|
+
|
|
775
|
+
def _app_lifespan(context: AppContext):
|
|
776
|
+
@asynccontextmanager
|
|
777
|
+
async def lifespan(app: FastAPI):
|
|
778
|
+
tasks: list[asyncio.Task] = []
|
|
779
|
+
|
|
780
|
+
async def _cleanup_loop():
|
|
781
|
+
try:
|
|
782
|
+
while True:
|
|
783
|
+
await asyncio.sleep(600) # Check every 10 mins
|
|
784
|
+
try:
|
|
785
|
+
async with app.state.terminal_lock:
|
|
786
|
+
prune_terminal_registry(
|
|
787
|
+
app.state.engine.state_path,
|
|
788
|
+
app.state.terminal_sessions,
|
|
789
|
+
app.state.session_registry,
|
|
790
|
+
app.state.repo_to_session,
|
|
791
|
+
app.state.terminal_max_idle_seconds,
|
|
792
|
+
)
|
|
793
|
+
except Exception as exc:
|
|
794
|
+
safe_log(
|
|
795
|
+
app.state.logger,
|
|
796
|
+
logging.WARNING,
|
|
797
|
+
"Terminal cleanup task failed",
|
|
798
|
+
exc,
|
|
799
|
+
)
|
|
800
|
+
except asyncio.CancelledError:
|
|
801
|
+
return
|
|
802
|
+
|
|
803
|
+
async def _housekeeping_loop():
|
|
804
|
+
config = app.state.config.housekeeping
|
|
805
|
+
interval = max(config.interval_seconds, 1)
|
|
806
|
+
try:
|
|
807
|
+
while True:
|
|
808
|
+
try:
|
|
809
|
+
await asyncio.to_thread(
|
|
810
|
+
run_housekeeping_once,
|
|
811
|
+
config,
|
|
812
|
+
app.state.engine.repo_root,
|
|
813
|
+
logger=app.state.logger,
|
|
814
|
+
)
|
|
815
|
+
except Exception as exc:
|
|
816
|
+
safe_log(
|
|
817
|
+
app.state.logger,
|
|
818
|
+
logging.WARNING,
|
|
819
|
+
"Housekeeping task failed",
|
|
820
|
+
exc,
|
|
821
|
+
)
|
|
822
|
+
await asyncio.sleep(interval)
|
|
823
|
+
except asyncio.CancelledError:
|
|
824
|
+
return
|
|
825
|
+
|
|
826
|
+
async def _flow_reconcile_loop():
|
|
827
|
+
active_interval = 2.0
|
|
828
|
+
idle_interval = 5.0
|
|
829
|
+
try:
|
|
830
|
+
while True:
|
|
831
|
+
result = await asyncio.to_thread(
|
|
832
|
+
reconcile_flow_runs,
|
|
833
|
+
app.state.engine.repo_root,
|
|
834
|
+
logger=app.state.logger,
|
|
835
|
+
)
|
|
836
|
+
interval = (
|
|
837
|
+
active_interval if result.summary.active > 0 else idle_interval
|
|
838
|
+
)
|
|
839
|
+
await asyncio.sleep(interval)
|
|
840
|
+
except asyncio.CancelledError:
|
|
841
|
+
return
|
|
842
|
+
|
|
843
|
+
tasks.append(asyncio.create_task(_cleanup_loop()))
|
|
844
|
+
if app.state.config.housekeeping.enabled:
|
|
845
|
+
tasks.append(asyncio.create_task(_housekeeping_loop()))
|
|
846
|
+
tasks.append(asyncio.create_task(_flow_reconcile_loop()))
|
|
847
|
+
app_server_supervisor = getattr(app.state, "app_server_supervisor", None)
|
|
848
|
+
app_server_prune_interval = getattr(
|
|
849
|
+
app.state, "app_server_prune_interval", None
|
|
850
|
+
)
|
|
851
|
+
if app_server_supervisor is not None and app_server_prune_interval:
|
|
852
|
+
|
|
853
|
+
async def _app_server_prune_loop():
|
|
854
|
+
try:
|
|
855
|
+
while True:
|
|
856
|
+
await asyncio.sleep(app_server_prune_interval)
|
|
857
|
+
try:
|
|
858
|
+
await app_server_supervisor.prune_idle()
|
|
859
|
+
except Exception as exc:
|
|
860
|
+
safe_log(
|
|
861
|
+
app.state.logger,
|
|
862
|
+
logging.WARNING,
|
|
863
|
+
"App-server prune task failed",
|
|
864
|
+
exc,
|
|
865
|
+
)
|
|
866
|
+
except asyncio.CancelledError:
|
|
867
|
+
return
|
|
868
|
+
|
|
869
|
+
tasks.append(asyncio.create_task(_app_server_prune_loop()))
|
|
870
|
+
|
|
871
|
+
opencode_supervisor = getattr(app.state, "opencode_supervisor", None)
|
|
872
|
+
opencode_prune_interval = getattr(app.state, "opencode_prune_interval", None)
|
|
873
|
+
if opencode_supervisor is not None and opencode_prune_interval:
|
|
874
|
+
|
|
875
|
+
async def _opencode_prune_loop():
|
|
876
|
+
try:
|
|
877
|
+
while True:
|
|
878
|
+
await asyncio.sleep(opencode_prune_interval)
|
|
879
|
+
try:
|
|
880
|
+
await opencode_supervisor.prune_idle()
|
|
881
|
+
except Exception as exc:
|
|
882
|
+
safe_log(
|
|
883
|
+
app.state.logger,
|
|
884
|
+
logging.WARNING,
|
|
885
|
+
"OpenCode prune task failed",
|
|
886
|
+
exc,
|
|
887
|
+
)
|
|
888
|
+
except asyncio.CancelledError:
|
|
889
|
+
return
|
|
890
|
+
|
|
891
|
+
tasks.append(asyncio.create_task(_opencode_prune_loop()))
|
|
892
|
+
|
|
893
|
+
if (
|
|
894
|
+
context.tui_idle_seconds is not None
|
|
895
|
+
and context.tui_idle_check_seconds is not None
|
|
896
|
+
):
|
|
897
|
+
|
|
898
|
+
async def _tui_idle_loop():
|
|
899
|
+
try:
|
|
900
|
+
while True:
|
|
901
|
+
await asyncio.sleep(context.tui_idle_check_seconds)
|
|
902
|
+
try:
|
|
903
|
+
async with app.state.terminal_lock:
|
|
904
|
+
terminal_sessions = app.state.terminal_sessions
|
|
905
|
+
session_registry = app.state.session_registry
|
|
906
|
+
for session_id, session in list(
|
|
907
|
+
terminal_sessions.items()
|
|
908
|
+
):
|
|
909
|
+
if not session.pty.isalive():
|
|
910
|
+
continue
|
|
911
|
+
if not session.should_notify_idle(
|
|
912
|
+
context.tui_idle_seconds
|
|
913
|
+
):
|
|
914
|
+
continue
|
|
915
|
+
record = session_registry.get(session_id)
|
|
916
|
+
repo_path = record.repo_path if record else None
|
|
917
|
+
notifier = getattr(
|
|
918
|
+
app.state.engine, "notifier", None
|
|
919
|
+
)
|
|
920
|
+
if notifier:
|
|
921
|
+
asyncio.create_task(
|
|
922
|
+
notifier.notify_tui_idle_async(
|
|
923
|
+
session_id=session_id,
|
|
924
|
+
idle_seconds=context.tui_idle_seconds,
|
|
925
|
+
repo_path=repo_path,
|
|
926
|
+
)
|
|
927
|
+
)
|
|
928
|
+
except Exception as exc:
|
|
929
|
+
safe_log(
|
|
930
|
+
app.state.logger,
|
|
931
|
+
logging.WARNING,
|
|
932
|
+
"TUI idle notification loop failed",
|
|
933
|
+
exc,
|
|
934
|
+
)
|
|
935
|
+
except asyncio.CancelledError:
|
|
936
|
+
return
|
|
937
|
+
|
|
938
|
+
tasks.append(asyncio.create_task(_tui_idle_loop()))
|
|
939
|
+
|
|
940
|
+
# Shutdown event for graceful SSE/WebSocket termination during reload
|
|
941
|
+
app.state.shutdown_event = asyncio.Event()
|
|
942
|
+
app.state.active_websockets: set = set()
|
|
943
|
+
|
|
944
|
+
try:
|
|
945
|
+
yield
|
|
946
|
+
finally:
|
|
947
|
+
# Signal SSE streams to stop and close WebSocket connections
|
|
948
|
+
app.state.shutdown_event.set()
|
|
949
|
+
for ws in list(app.state.active_websockets):
|
|
950
|
+
try:
|
|
951
|
+
await ws.close(code=1012) # 1012 = Service Restart
|
|
952
|
+
except Exception as exc:
|
|
953
|
+
safe_log(
|
|
954
|
+
app.state.logger,
|
|
955
|
+
logging.DEBUG,
|
|
956
|
+
"Failed to close websocket during shutdown",
|
|
957
|
+
exc=exc,
|
|
958
|
+
)
|
|
959
|
+
app.state.active_websockets.clear()
|
|
960
|
+
|
|
961
|
+
for task in tasks:
|
|
962
|
+
task.cancel()
|
|
963
|
+
if tasks:
|
|
964
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
|
965
|
+
async with app.state.terminal_lock:
|
|
966
|
+
for session in app.state.terminal_sessions.values():
|
|
967
|
+
session.close()
|
|
968
|
+
app.state.terminal_sessions.clear()
|
|
969
|
+
app.state.session_registry.clear()
|
|
970
|
+
app.state.repo_to_session.clear()
|
|
971
|
+
persist_session_registry(
|
|
972
|
+
app.state.engine.state_path,
|
|
973
|
+
app.state.session_registry,
|
|
974
|
+
app.state.repo_to_session,
|
|
975
|
+
)
|
|
976
|
+
app_server_supervisor = getattr(app.state, "app_server_supervisor", None)
|
|
977
|
+
if app_server_supervisor is not None:
|
|
978
|
+
try:
|
|
979
|
+
await app_server_supervisor.close_all()
|
|
980
|
+
except Exception as exc:
|
|
981
|
+
safe_log(
|
|
982
|
+
app.state.logger,
|
|
983
|
+
logging.WARNING,
|
|
984
|
+
"App-server shutdown failed",
|
|
985
|
+
exc,
|
|
986
|
+
)
|
|
987
|
+
opencode_supervisor = getattr(app.state, "opencode_supervisor", None)
|
|
988
|
+
if opencode_supervisor is not None:
|
|
989
|
+
try:
|
|
990
|
+
await opencode_supervisor.close_all()
|
|
991
|
+
except Exception as exc:
|
|
992
|
+
safe_log(
|
|
993
|
+
app.state.logger,
|
|
994
|
+
logging.WARNING,
|
|
995
|
+
"OpenCode shutdown failed",
|
|
996
|
+
exc,
|
|
997
|
+
)
|
|
998
|
+
static_context = getattr(app.state, "static_assets_context", None)
|
|
999
|
+
if static_context is not None:
|
|
1000
|
+
static_context.close()
|
|
1001
|
+
|
|
1002
|
+
return lifespan
|
|
1003
|
+
|
|
1004
|
+
|
|
1005
|
+
def create_repo_app(
|
|
1006
|
+
repo_root: Path,
|
|
1007
|
+
server_overrides: Optional[ServerOverrides] = None,
|
|
1008
|
+
hub_config: Optional[HubConfig] = None,
|
|
1009
|
+
) -> ASGIApp:
|
|
1010
|
+
# Hub-only: repo apps are always mounted under `/repos/<id>` and must not
|
|
1011
|
+
# apply their own base-path rewriting (the hub handles that globally).
|
|
1012
|
+
context = _build_app_context(repo_root, base_path="", hub_config=hub_config)
|
|
1013
|
+
app = FastAPI(redirect_slashes=False, lifespan=_app_lifespan(context))
|
|
1014
|
+
|
|
1015
|
+
class _RepoRootContextMiddleware(BaseHTTPMiddleware):
|
|
1016
|
+
"""Ensure find_repo_root() resolves to the mounted repo even when cwd differs."""
|
|
1017
|
+
|
|
1018
|
+
def __init__(self, app, repo_root: Path):
|
|
1019
|
+
super().__init__(app)
|
|
1020
|
+
self.repo_root = repo_root
|
|
1021
|
+
|
|
1022
|
+
async def dispatch(self, request, call_next):
|
|
1023
|
+
token = set_repo_root_context(self.repo_root)
|
|
1024
|
+
try:
|
|
1025
|
+
return await call_next(request)
|
|
1026
|
+
finally:
|
|
1027
|
+
reset_repo_root_context(token)
|
|
1028
|
+
|
|
1029
|
+
app.add_middleware(_RepoRootContextMiddleware, repo_root=context.engine.repo_root)
|
|
1030
|
+
_apply_app_context(app, context)
|
|
1031
|
+
app.add_middleware(GZipMiddleware, minimum_size=500)
|
|
1032
|
+
static_files = CacheStaticFiles(directory=context.static_dir)
|
|
1033
|
+
app.state.static_files = static_files
|
|
1034
|
+
app.state.static_assets_lock = threading.Lock()
|
|
1035
|
+
app.state.hub_static_assets = (
|
|
1036
|
+
hub_config.static_assets if hub_config is not None else None
|
|
1037
|
+
)
|
|
1038
|
+
app.mount("/static", static_files, name="static")
|
|
1039
|
+
# Route handlers
|
|
1040
|
+
app.include_router(build_repo_router(context.static_dir))
|
|
1041
|
+
|
|
1042
|
+
allowed_hosts = _resolve_allowed_hosts(
|
|
1043
|
+
context.engine.config.server_host, context.engine.config.server_allowed_hosts
|
|
1044
|
+
)
|
|
1045
|
+
allowed_origins = context.engine.config.server_allowed_origins
|
|
1046
|
+
auth_token_env = context.engine.config.server_auth_token_env
|
|
1047
|
+
if server_overrides is not None:
|
|
1048
|
+
if server_overrides.allowed_hosts is not None:
|
|
1049
|
+
allowed_hosts = list(server_overrides.allowed_hosts)
|
|
1050
|
+
if server_overrides.allowed_origins is not None:
|
|
1051
|
+
allowed_origins = list(server_overrides.allowed_origins)
|
|
1052
|
+
if server_overrides.auth_token_env is not None:
|
|
1053
|
+
auth_token_env = server_overrides.auth_token_env
|
|
1054
|
+
auth_token = _resolve_auth_token(auth_token_env, env=context.env)
|
|
1055
|
+
app.state.auth_token = auth_token
|
|
1056
|
+
if auth_token:
|
|
1057
|
+
app.add_middleware(
|
|
1058
|
+
AuthTokenMiddleware, auth_token=auth_token, base_path=context.base_path
|
|
1059
|
+
)
|
|
1060
|
+
app.add_middleware(
|
|
1061
|
+
HostOriginMiddleware,
|
|
1062
|
+
allowed_hosts=allowed_hosts,
|
|
1063
|
+
allowed_origins=allowed_origins,
|
|
1064
|
+
)
|
|
1065
|
+
app.add_middleware(RequestIdMiddleware)
|
|
1066
|
+
app.add_middleware(SecurityHeadersMiddleware)
|
|
1067
|
+
|
|
1068
|
+
return app
|
|
1069
|
+
|
|
1070
|
+
|
|
1071
|
+
def create_app(
|
|
1072
|
+
repo_root: Optional[Path] = None,
|
|
1073
|
+
base_path: Optional[str] = None,
|
|
1074
|
+
server_overrides: Optional[ServerOverrides] = None,
|
|
1075
|
+
hub_config: Optional[HubConfig] = None,
|
|
1076
|
+
) -> ASGIApp:
|
|
1077
|
+
"""
|
|
1078
|
+
Public-facing factory for standalone repo apps (non-hub) retained for backward compatibility.
|
|
1079
|
+
"""
|
|
1080
|
+
# Respect provided base_path when running directly; hub passes base_path="".
|
|
1081
|
+
context = _build_app_context(repo_root, base_path, hub_config=hub_config)
|
|
1082
|
+
app = FastAPI(redirect_slashes=False, lifespan=_app_lifespan(context))
|
|
1083
|
+
|
|
1084
|
+
class _RepoRootContextMiddleware(BaseHTTPMiddleware):
|
|
1085
|
+
"""Ensure find_repo_root() resolves to the mounted repo even when cwd differs."""
|
|
1086
|
+
|
|
1087
|
+
def __init__(self, app, repo_root: Path):
|
|
1088
|
+
super().__init__(app)
|
|
1089
|
+
self.repo_root = repo_root
|
|
1090
|
+
|
|
1091
|
+
async def dispatch(self, request, call_next):
|
|
1092
|
+
token = set_repo_root_context(self.repo_root)
|
|
1093
|
+
try:
|
|
1094
|
+
return await call_next(request)
|
|
1095
|
+
finally:
|
|
1096
|
+
reset_repo_root_context(token)
|
|
1097
|
+
|
|
1098
|
+
app.add_middleware(_RepoRootContextMiddleware, repo_root=context.engine.repo_root)
|
|
1099
|
+
_apply_app_context(app, context)
|
|
1100
|
+
app.add_middleware(GZipMiddleware, minimum_size=500)
|
|
1101
|
+
static_files = CacheStaticFiles(directory=context.static_dir)
|
|
1102
|
+
app.state.static_files = static_files
|
|
1103
|
+
app.state.static_assets_lock = threading.Lock()
|
|
1104
|
+
app.state.hub_static_assets = (
|
|
1105
|
+
hub_config.static_assets if hub_config is not None else None
|
|
1106
|
+
)
|
|
1107
|
+
app.mount("/static", static_files, name="static")
|
|
1108
|
+
# Route handlers
|
|
1109
|
+
app.include_router(build_repo_router(context.static_dir))
|
|
1110
|
+
|
|
1111
|
+
allowed_hosts = _resolve_allowed_hosts(
|
|
1112
|
+
context.engine.config.server_host, context.engine.config.server_allowed_hosts
|
|
1113
|
+
)
|
|
1114
|
+
allowed_origins = context.engine.config.server_allowed_origins
|
|
1115
|
+
auth_token_env = context.engine.config.server_auth_token_env
|
|
1116
|
+
if server_overrides is not None:
|
|
1117
|
+
if server_overrides.allowed_hosts is not None:
|
|
1118
|
+
allowed_hosts = list(server_overrides.allowed_hosts)
|
|
1119
|
+
if server_overrides.allowed_origins is not None:
|
|
1120
|
+
allowed_origins = list(server_overrides.allowed_origins)
|
|
1121
|
+
if server_overrides.auth_token_env is not None:
|
|
1122
|
+
auth_token_env = server_overrides.auth_token_env
|
|
1123
|
+
auth_token = _resolve_auth_token(auth_token_env, env=context.env)
|
|
1124
|
+
app.state.auth_token = auth_token
|
|
1125
|
+
if auth_token:
|
|
1126
|
+
app.add_middleware(
|
|
1127
|
+
AuthTokenMiddleware, auth_token=auth_token, base_path=context.base_path
|
|
1128
|
+
)
|
|
1129
|
+
if context.base_path:
|
|
1130
|
+
app.add_middleware(BasePathRouterMiddleware, base_path=context.base_path)
|
|
1131
|
+
app.add_middleware(
|
|
1132
|
+
HostOriginMiddleware,
|
|
1133
|
+
allowed_hosts=allowed_hosts,
|
|
1134
|
+
allowed_origins=allowed_origins,
|
|
1135
|
+
)
|
|
1136
|
+
app.add_middleware(RequestIdMiddleware)
|
|
1137
|
+
app.add_middleware(SecurityHeadersMiddleware)
|
|
1138
|
+
|
|
1139
|
+
return app
|
|
1140
|
+
|
|
1141
|
+
|
|
1142
|
+
def create_hub_app(
|
|
1143
|
+
hub_root: Optional[Path] = None, base_path: Optional[str] = None
|
|
1144
|
+
) -> ASGIApp:
|
|
1145
|
+
context = _build_hub_context(hub_root, base_path)
|
|
1146
|
+
app = FastAPI(redirect_slashes=False)
|
|
1147
|
+
_apply_hub_context(app, context)
|
|
1148
|
+
app.add_middleware(GZipMiddleware, minimum_size=500)
|
|
1149
|
+
static_files = CacheStaticFiles(directory=context.static_dir)
|
|
1150
|
+
app.state.static_files = static_files
|
|
1151
|
+
app.state.static_assets_lock = threading.Lock()
|
|
1152
|
+
app.state.hub_static_assets = None
|
|
1153
|
+
app.mount("/static", static_files, name="static")
|
|
1154
|
+
raw_config = getattr(context.config, "raw", {})
|
|
1155
|
+
pma_config = raw_config.get("pma", {}) if isinstance(raw_config, dict) else {}
|
|
1156
|
+
if isinstance(pma_config, dict) and pma_config.get("enabled"):
|
|
1157
|
+
app.include_router(build_pma_routes())
|
|
1158
|
+
app.include_router(build_hub_filebox_routes())
|
|
1159
|
+
mounted_repos: set[str] = set()
|
|
1160
|
+
mount_errors: dict[str, str] = {}
|
|
1161
|
+
repo_apps: dict[str, ASGIApp] = {}
|
|
1162
|
+
repo_lifespans: dict[str, object] = {}
|
|
1163
|
+
mount_order: list[str] = []
|
|
1164
|
+
mount_lock: Optional[asyncio.Lock] = None
|
|
1165
|
+
|
|
1166
|
+
async def _get_mount_lock() -> asyncio.Lock:
|
|
1167
|
+
nonlocal mount_lock
|
|
1168
|
+
if mount_lock is None:
|
|
1169
|
+
mount_lock = asyncio.Lock()
|
|
1170
|
+
return mount_lock
|
|
1171
|
+
|
|
1172
|
+
app.state.hub_started = False
|
|
1173
|
+
repo_server_overrides: Optional[ServerOverrides] = None
|
|
1174
|
+
if context.config.repo_server_inherit:
|
|
1175
|
+
repo_server_overrides = ServerOverrides(
|
|
1176
|
+
allowed_hosts=_resolve_allowed_hosts(
|
|
1177
|
+
context.config.server_host, context.config.server_allowed_hosts
|
|
1178
|
+
),
|
|
1179
|
+
allowed_origins=list(context.config.server_allowed_origins),
|
|
1180
|
+
auth_token_env=context.config.server_auth_token_env,
|
|
1181
|
+
)
|
|
1182
|
+
|
|
1183
|
+
def _unwrap_fastapi(sub_app: ASGIApp) -> Optional[FastAPI]:
|
|
1184
|
+
current: ASGIApp = sub_app
|
|
1185
|
+
while not isinstance(current, FastAPI):
|
|
1186
|
+
nested = getattr(current, "app", None)
|
|
1187
|
+
if nested is None:
|
|
1188
|
+
return None
|
|
1189
|
+
current = nested
|
|
1190
|
+
return current
|
|
1191
|
+
|
|
1192
|
+
async def _start_repo_lifespan_locked(prefix: str, sub_app: ASGIApp) -> None:
|
|
1193
|
+
if prefix in repo_lifespans:
|
|
1194
|
+
return
|
|
1195
|
+
fastapi_app = _unwrap_fastapi(sub_app)
|
|
1196
|
+
if fastapi_app is None:
|
|
1197
|
+
return
|
|
1198
|
+
try:
|
|
1199
|
+
ctx = fastapi_app.router.lifespan_context(fastapi_app)
|
|
1200
|
+
await ctx.__aenter__()
|
|
1201
|
+
repo_lifespans[prefix] = ctx
|
|
1202
|
+
safe_log(
|
|
1203
|
+
app.state.logger,
|
|
1204
|
+
logging.INFO,
|
|
1205
|
+
f"Repo app lifespan entered for {prefix}",
|
|
1206
|
+
)
|
|
1207
|
+
except Exception as exc:
|
|
1208
|
+
mount_errors[prefix] = str(exc)
|
|
1209
|
+
try:
|
|
1210
|
+
app.state.logger.warning("Repo lifespan failed for %s: %s", prefix, exc)
|
|
1211
|
+
except Exception as exc2:
|
|
1212
|
+
safe_log(
|
|
1213
|
+
app.state.logger,
|
|
1214
|
+
logging.DEBUG,
|
|
1215
|
+
f"Failed to log repo lifespan failure for {prefix}",
|
|
1216
|
+
exc=exc2,
|
|
1217
|
+
)
|
|
1218
|
+
await _unmount_repo_locked(prefix)
|
|
1219
|
+
|
|
1220
|
+
async def _stop_repo_lifespan_locked(prefix: str) -> None:
|
|
1221
|
+
ctx = repo_lifespans.pop(prefix, None)
|
|
1222
|
+
if ctx is None:
|
|
1223
|
+
return
|
|
1224
|
+
try:
|
|
1225
|
+
await ctx.__aexit__(None, None, None)
|
|
1226
|
+
safe_log(
|
|
1227
|
+
app.state.logger,
|
|
1228
|
+
logging.INFO,
|
|
1229
|
+
f"Repo app lifespan exited for {prefix}",
|
|
1230
|
+
)
|
|
1231
|
+
except Exception as exc:
|
|
1232
|
+
try:
|
|
1233
|
+
app.state.logger.warning(
|
|
1234
|
+
"Repo lifespan shutdown failed for %s: %s", prefix, exc
|
|
1235
|
+
)
|
|
1236
|
+
except Exception as exc2:
|
|
1237
|
+
safe_log(
|
|
1238
|
+
app.state.logger,
|
|
1239
|
+
logging.DEBUG,
|
|
1240
|
+
f"Failed to log repo lifespan shutdown failure for {prefix}",
|
|
1241
|
+
exc=exc2,
|
|
1242
|
+
)
|
|
1243
|
+
|
|
1244
|
+
def _detach_mount_locked(prefix: str) -> None:
|
|
1245
|
+
mount_path = f"/repos/{prefix}"
|
|
1246
|
+
app.router.routes = [
|
|
1247
|
+
route
|
|
1248
|
+
for route in app.router.routes
|
|
1249
|
+
if not (isinstance(route, Mount) and route.path == mount_path)
|
|
1250
|
+
]
|
|
1251
|
+
mounted_repos.discard(prefix)
|
|
1252
|
+
repo_apps.pop(prefix, None)
|
|
1253
|
+
if prefix in mount_order:
|
|
1254
|
+
mount_order.remove(prefix)
|
|
1255
|
+
|
|
1256
|
+
async def _unmount_repo_locked(prefix: str) -> None:
|
|
1257
|
+
await _stop_repo_lifespan_locked(prefix)
|
|
1258
|
+
_detach_mount_locked(prefix)
|
|
1259
|
+
|
|
1260
|
+
def _mount_repo_sync(prefix: str, repo_path: Path) -> bool:
|
|
1261
|
+
if prefix in mounted_repos:
|
|
1262
|
+
return True
|
|
1263
|
+
if prefix in mount_errors:
|
|
1264
|
+
return False
|
|
1265
|
+
try:
|
|
1266
|
+
# Hub already handles the base path; avoid reapplying it in child apps.
|
|
1267
|
+
sub_app = create_repo_app(
|
|
1268
|
+
repo_path,
|
|
1269
|
+
server_overrides=repo_server_overrides,
|
|
1270
|
+
hub_config=context.config,
|
|
1271
|
+
)
|
|
1272
|
+
except ConfigError as exc:
|
|
1273
|
+
mount_errors[prefix] = str(exc)
|
|
1274
|
+
try:
|
|
1275
|
+
app.state.logger.warning("Cannot mount repo %s: %s", prefix, exc)
|
|
1276
|
+
except Exception as exc2:
|
|
1277
|
+
safe_log(
|
|
1278
|
+
app.state.logger,
|
|
1279
|
+
logging.DEBUG,
|
|
1280
|
+
f"Failed to log mount error for {prefix}",
|
|
1281
|
+
exc=exc2,
|
|
1282
|
+
)
|
|
1283
|
+
return False
|
|
1284
|
+
except Exception as exc:
|
|
1285
|
+
mount_errors[prefix] = str(exc)
|
|
1286
|
+
try:
|
|
1287
|
+
app.state.logger.warning("Cannot mount repo %s: %s", prefix, exc)
|
|
1288
|
+
except Exception as exc2:
|
|
1289
|
+
safe_log(
|
|
1290
|
+
app.state.logger,
|
|
1291
|
+
logging.DEBUG,
|
|
1292
|
+
f"Failed to log mount error for {prefix}",
|
|
1293
|
+
exc=exc2,
|
|
1294
|
+
)
|
|
1295
|
+
return False
|
|
1296
|
+
fastapi_app = _unwrap_fastapi(sub_app)
|
|
1297
|
+
if fastapi_app is not None:
|
|
1298
|
+
fastapi_app.state.repo_id = prefix
|
|
1299
|
+
app.mount(f"/repos/{prefix}", sub_app)
|
|
1300
|
+
mounted_repos.add(prefix)
|
|
1301
|
+
repo_apps[prefix] = sub_app
|
|
1302
|
+
if prefix not in mount_order:
|
|
1303
|
+
mount_order.append(prefix)
|
|
1304
|
+
mount_errors.pop(prefix, None)
|
|
1305
|
+
return True
|
|
1306
|
+
|
|
1307
|
+
async def _refresh_mounts(snapshots, *, full_refresh: bool = True):
|
|
1308
|
+
desired = {
|
|
1309
|
+
snap.id for snap in snapshots if snap.initialized and snap.exists_on_disk
|
|
1310
|
+
}
|
|
1311
|
+
mount_lock = await _get_mount_lock()
|
|
1312
|
+
async with mount_lock:
|
|
1313
|
+
if full_refresh:
|
|
1314
|
+
for prefix in list(mounted_repos):
|
|
1315
|
+
if prefix not in desired:
|
|
1316
|
+
await _unmount_repo_locked(prefix)
|
|
1317
|
+
for prefix in list(mount_errors):
|
|
1318
|
+
if prefix not in desired:
|
|
1319
|
+
mount_errors.pop(prefix, None)
|
|
1320
|
+
for snap in snapshots:
|
|
1321
|
+
if snap.id not in desired:
|
|
1322
|
+
continue
|
|
1323
|
+
if snap.id in mounted_repos or snap.id in mount_errors:
|
|
1324
|
+
continue
|
|
1325
|
+
# Hub already handles the base path; avoid reapplying it in child apps.
|
|
1326
|
+
try:
|
|
1327
|
+
sub_app = create_repo_app(
|
|
1328
|
+
snap.path,
|
|
1329
|
+
server_overrides=repo_server_overrides,
|
|
1330
|
+
hub_config=context.config,
|
|
1331
|
+
)
|
|
1332
|
+
except ConfigError as exc:
|
|
1333
|
+
mount_errors[snap.id] = str(exc)
|
|
1334
|
+
try:
|
|
1335
|
+
app.state.logger.warning(
|
|
1336
|
+
"Cannot mount repo %s: %s", snap.id, exc
|
|
1337
|
+
)
|
|
1338
|
+
except Exception as exc2:
|
|
1339
|
+
safe_log(
|
|
1340
|
+
app.state.logger,
|
|
1341
|
+
logging.DEBUG,
|
|
1342
|
+
f"Failed to log mount error for snapshot {snap.id}",
|
|
1343
|
+
exc=exc2,
|
|
1344
|
+
)
|
|
1345
|
+
continue
|
|
1346
|
+
except Exception as exc:
|
|
1347
|
+
mount_errors[snap.id] = str(exc)
|
|
1348
|
+
try:
|
|
1349
|
+
app.state.logger.warning(
|
|
1350
|
+
"Cannot mount repo %s: %s", snap.id, exc
|
|
1351
|
+
)
|
|
1352
|
+
except Exception as exc2:
|
|
1353
|
+
safe_log(
|
|
1354
|
+
app.state.logger,
|
|
1355
|
+
logging.DEBUG,
|
|
1356
|
+
f"Failed to log mount error for snapshot {snap.id}",
|
|
1357
|
+
exc=exc2,
|
|
1358
|
+
)
|
|
1359
|
+
continue
|
|
1360
|
+
fastapi_app = _unwrap_fastapi(sub_app)
|
|
1361
|
+
if fastapi_app is not None:
|
|
1362
|
+
fastapi_app.state.repo_id = snap.id
|
|
1363
|
+
app.mount(f"/repos/{snap.id}", sub_app)
|
|
1364
|
+
mounted_repos.add(snap.id)
|
|
1365
|
+
repo_apps[snap.id] = sub_app
|
|
1366
|
+
if snap.id not in mount_order:
|
|
1367
|
+
mount_order.append(snap.id)
|
|
1368
|
+
mount_errors.pop(snap.id, None)
|
|
1369
|
+
if app.state.hub_started:
|
|
1370
|
+
await _start_repo_lifespan_locked(snap.id, sub_app)
|
|
1371
|
+
|
|
1372
|
+
def _add_mount_info(repo_dict: dict) -> dict:
|
|
1373
|
+
"""Add mount_status to repo dict for UI to know if navigation is possible."""
|
|
1374
|
+
repo_id = repo_dict.get("id")
|
|
1375
|
+
if repo_id in mount_errors:
|
|
1376
|
+
repo_dict["mounted"] = False
|
|
1377
|
+
repo_dict["mount_error"] = mount_errors[repo_id]
|
|
1378
|
+
elif repo_id in mounted_repos:
|
|
1379
|
+
repo_dict["mounted"] = True
|
|
1380
|
+
else:
|
|
1381
|
+
repo_dict["mounted"] = False
|
|
1382
|
+
return repo_dict
|
|
1383
|
+
|
|
1384
|
+
def _get_ticket_flow_summary(repo_path: Path) -> Optional[dict]:
|
|
1385
|
+
"""Get ticket flow summary for a repo (status, done/total, step).
|
|
1386
|
+
|
|
1387
|
+
Returns None if no ticket flow exists or repo is not initialized.
|
|
1388
|
+
"""
|
|
1389
|
+
db_path = repo_path / ".codex-autorunner" / "flows.db"
|
|
1390
|
+
if not db_path.exists():
|
|
1391
|
+
return None
|
|
1392
|
+
try:
|
|
1393
|
+
config = load_repo_config(repo_path)
|
|
1394
|
+
with FlowStore(db_path, durable=config.durable_writes) as store:
|
|
1395
|
+
# Get the latest ticket_flow run (any status)
|
|
1396
|
+
runs = store.list_flow_runs(flow_type="ticket_flow")
|
|
1397
|
+
if not runs:
|
|
1398
|
+
return None
|
|
1399
|
+
latest = runs[0] # Already sorted by created_at DESC
|
|
1400
|
+
|
|
1401
|
+
# Count tickets
|
|
1402
|
+
ticket_dir = repo_path / ".codex-autorunner" / "tickets"
|
|
1403
|
+
total = 0
|
|
1404
|
+
done = 0
|
|
1405
|
+
for path in list_ticket_paths(ticket_dir):
|
|
1406
|
+
total += 1
|
|
1407
|
+
try:
|
|
1408
|
+
if ticket_is_done(path):
|
|
1409
|
+
done += 1
|
|
1410
|
+
except Exception:
|
|
1411
|
+
continue
|
|
1412
|
+
|
|
1413
|
+
if total == 0:
|
|
1414
|
+
return None
|
|
1415
|
+
|
|
1416
|
+
# Extract current step from ticket_engine state
|
|
1417
|
+
state = latest.state if isinstance(latest.state, dict) else {}
|
|
1418
|
+
engine = state.get("ticket_engine") if isinstance(state, dict) else {}
|
|
1419
|
+
engine = engine if isinstance(engine, dict) else {}
|
|
1420
|
+
current_step = engine.get("total_turns")
|
|
1421
|
+
|
|
1422
|
+
return {
|
|
1423
|
+
"status": latest.status.value,
|
|
1424
|
+
"done_count": done,
|
|
1425
|
+
"total_count": total,
|
|
1426
|
+
"current_step": current_step,
|
|
1427
|
+
}
|
|
1428
|
+
except Exception:
|
|
1429
|
+
return None
|
|
1430
|
+
|
|
1431
|
+
initial_snapshots = context.supervisor.scan()
|
|
1432
|
+
for snap in initial_snapshots:
|
|
1433
|
+
if snap.initialized and snap.exists_on_disk:
|
|
1434
|
+
_mount_repo_sync(snap.id, snap.path)
|
|
1435
|
+
|
|
1436
|
+
@asynccontextmanager
|
|
1437
|
+
async def lifespan(app: FastAPI):
|
|
1438
|
+
app.state.hub_started = True
|
|
1439
|
+
if app.state.config.housekeeping.enabled:
|
|
1440
|
+
interval = max(app.state.config.housekeeping.interval_seconds, 1)
|
|
1441
|
+
|
|
1442
|
+
async def _housekeeping_loop():
|
|
1443
|
+
while True:
|
|
1444
|
+
try:
|
|
1445
|
+
await asyncio.to_thread(
|
|
1446
|
+
run_housekeeping_once,
|
|
1447
|
+
app.state.config.housekeeping,
|
|
1448
|
+
app.state.config.root,
|
|
1449
|
+
logger=app.state.logger,
|
|
1450
|
+
)
|
|
1451
|
+
except Exception as exc:
|
|
1452
|
+
safe_log(
|
|
1453
|
+
app.state.logger,
|
|
1454
|
+
logging.WARNING,
|
|
1455
|
+
"Housekeeping task failed",
|
|
1456
|
+
exc,
|
|
1457
|
+
)
|
|
1458
|
+
await asyncio.sleep(interval)
|
|
1459
|
+
|
|
1460
|
+
asyncio.create_task(_housekeeping_loop())
|
|
1461
|
+
app_server_supervisor = getattr(app.state, "app_server_supervisor", None)
|
|
1462
|
+
app_server_prune_interval = getattr(
|
|
1463
|
+
app.state, "app_server_prune_interval", None
|
|
1464
|
+
)
|
|
1465
|
+
if app_server_supervisor is not None and app_server_prune_interval:
|
|
1466
|
+
|
|
1467
|
+
async def _app_server_prune_loop():
|
|
1468
|
+
while True:
|
|
1469
|
+
await asyncio.sleep(app_server_prune_interval)
|
|
1470
|
+
try:
|
|
1471
|
+
await app_server_supervisor.prune_idle()
|
|
1472
|
+
except Exception as exc:
|
|
1473
|
+
safe_log(
|
|
1474
|
+
app.state.logger,
|
|
1475
|
+
logging.WARNING,
|
|
1476
|
+
"Hub app-server prune task failed",
|
|
1477
|
+
exc,
|
|
1478
|
+
)
|
|
1479
|
+
|
|
1480
|
+
asyncio.create_task(_app_server_prune_loop())
|
|
1481
|
+
opencode_supervisor = getattr(app.state, "opencode_supervisor", None)
|
|
1482
|
+
opencode_prune_interval = getattr(app.state, "opencode_prune_interval", None)
|
|
1483
|
+
if opencode_supervisor is not None and opencode_prune_interval:
|
|
1484
|
+
|
|
1485
|
+
async def _opencode_prune_loop():
|
|
1486
|
+
while True:
|
|
1487
|
+
await asyncio.sleep(opencode_prune_interval)
|
|
1488
|
+
try:
|
|
1489
|
+
await opencode_supervisor.prune_idle()
|
|
1490
|
+
except Exception as exc:
|
|
1491
|
+
safe_log(
|
|
1492
|
+
app.state.logger,
|
|
1493
|
+
logging.WARNING,
|
|
1494
|
+
"Hub opencode prune task failed",
|
|
1495
|
+
exc,
|
|
1496
|
+
)
|
|
1497
|
+
|
|
1498
|
+
asyncio.create_task(_opencode_prune_loop())
|
|
1499
|
+
mount_lock = await _get_mount_lock()
|
|
1500
|
+
async with mount_lock:
|
|
1501
|
+
for prefix in list(mount_order):
|
|
1502
|
+
sub_app = repo_apps.get(prefix)
|
|
1503
|
+
if sub_app is not None:
|
|
1504
|
+
await _start_repo_lifespan_locked(prefix, sub_app)
|
|
1505
|
+
try:
|
|
1506
|
+
yield
|
|
1507
|
+
finally:
|
|
1508
|
+
mount_lock = await _get_mount_lock()
|
|
1509
|
+
async with mount_lock:
|
|
1510
|
+
for prefix in list(reversed(mount_order)):
|
|
1511
|
+
await _stop_repo_lifespan_locked(prefix)
|
|
1512
|
+
for prefix in list(mounted_repos):
|
|
1513
|
+
_detach_mount_locked(prefix)
|
|
1514
|
+
app_server_supervisor = getattr(app.state, "app_server_supervisor", None)
|
|
1515
|
+
if app_server_supervisor is not None:
|
|
1516
|
+
try:
|
|
1517
|
+
await app_server_supervisor.close_all()
|
|
1518
|
+
except Exception as exc:
|
|
1519
|
+
safe_log(
|
|
1520
|
+
app.state.logger,
|
|
1521
|
+
logging.WARNING,
|
|
1522
|
+
"Hub app-server shutdown failed",
|
|
1523
|
+
exc,
|
|
1524
|
+
)
|
|
1525
|
+
opencode_supervisor = getattr(app.state, "opencode_supervisor", None)
|
|
1526
|
+
if opencode_supervisor is not None:
|
|
1527
|
+
try:
|
|
1528
|
+
await opencode_supervisor.close_all()
|
|
1529
|
+
except Exception as exc:
|
|
1530
|
+
safe_log(
|
|
1531
|
+
app.state.logger,
|
|
1532
|
+
logging.WARNING,
|
|
1533
|
+
"Hub opencode shutdown failed",
|
|
1534
|
+
exc,
|
|
1535
|
+
)
|
|
1536
|
+
static_context = getattr(app.state, "static_assets_context", None)
|
|
1537
|
+
if static_context is not None:
|
|
1538
|
+
static_context.close()
|
|
1539
|
+
|
|
1540
|
+
app.router.lifespan_context = lifespan
|
|
1541
|
+
|
|
1542
|
+
@app.get("/hub/usage")
|
|
1543
|
+
def hub_usage(since: Optional[str] = None, until: Optional[str] = None):
|
|
1544
|
+
try:
|
|
1545
|
+
since_dt = parse_iso_datetime(since)
|
|
1546
|
+
until_dt = parse_iso_datetime(until)
|
|
1547
|
+
except UsageError as exc:
|
|
1548
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
1549
|
+
|
|
1550
|
+
manifest = load_manifest(context.config.manifest_path, context.config.root)
|
|
1551
|
+
repo_map = [
|
|
1552
|
+
(repo.id, (context.config.root / repo.path)) for repo in manifest.repos
|
|
1553
|
+
]
|
|
1554
|
+
per_repo, unmatched, status = get_hub_usage_summary_cached(
|
|
1555
|
+
repo_map,
|
|
1556
|
+
default_codex_home(),
|
|
1557
|
+
config=context.config,
|
|
1558
|
+
since=since_dt,
|
|
1559
|
+
until=until_dt,
|
|
1560
|
+
)
|
|
1561
|
+
return {
|
|
1562
|
+
"mode": "hub",
|
|
1563
|
+
"hub_root": str(context.config.root),
|
|
1564
|
+
"codex_home": str(default_codex_home()),
|
|
1565
|
+
"since": since,
|
|
1566
|
+
"until": until,
|
|
1567
|
+
"status": status,
|
|
1568
|
+
"repos": [
|
|
1569
|
+
{
|
|
1570
|
+
"id": repo_id,
|
|
1571
|
+
"events": summary.events,
|
|
1572
|
+
"totals": summary.totals.to_dict(),
|
|
1573
|
+
"latest_rate_limits": summary.latest_rate_limits,
|
|
1574
|
+
}
|
|
1575
|
+
for repo_id, summary in per_repo.items()
|
|
1576
|
+
],
|
|
1577
|
+
"unmatched": unmatched.to_dict(),
|
|
1578
|
+
}
|
|
1579
|
+
|
|
1580
|
+
@app.get("/hub/usage/series")
|
|
1581
|
+
def hub_usage_series(
|
|
1582
|
+
since: Optional[str] = None,
|
|
1583
|
+
until: Optional[str] = None,
|
|
1584
|
+
bucket: str = "day",
|
|
1585
|
+
segment: str = "none",
|
|
1586
|
+
):
|
|
1587
|
+
try:
|
|
1588
|
+
since_dt = parse_iso_datetime(since)
|
|
1589
|
+
until_dt = parse_iso_datetime(until)
|
|
1590
|
+
except UsageError as exc:
|
|
1591
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
1592
|
+
|
|
1593
|
+
manifest = load_manifest(context.config.manifest_path, context.config.root)
|
|
1594
|
+
repo_map = [
|
|
1595
|
+
(repo.id, (context.config.root / repo.path)) for repo in manifest.repos
|
|
1596
|
+
]
|
|
1597
|
+
try:
|
|
1598
|
+
series, status = get_hub_usage_series_cached(
|
|
1599
|
+
repo_map,
|
|
1600
|
+
default_codex_home(),
|
|
1601
|
+
config=context.config,
|
|
1602
|
+
since=since_dt,
|
|
1603
|
+
until=until_dt,
|
|
1604
|
+
bucket=bucket,
|
|
1605
|
+
segment=segment,
|
|
1606
|
+
)
|
|
1607
|
+
except UsageError as exc:
|
|
1608
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
1609
|
+
return {
|
|
1610
|
+
"mode": "hub",
|
|
1611
|
+
"hub_root": str(context.config.root),
|
|
1612
|
+
"codex_home": str(default_codex_home()),
|
|
1613
|
+
"since": since,
|
|
1614
|
+
"until": until,
|
|
1615
|
+
"status": status,
|
|
1616
|
+
**series,
|
|
1617
|
+
}
|
|
1618
|
+
|
|
1619
|
+
@app.get("/hub/messages")
|
|
1620
|
+
async def hub_messages(limit: int = 100):
|
|
1621
|
+
"""Return paused ticket_flow dispatches across all repos.
|
|
1622
|
+
|
|
1623
|
+
The hub inbox is intentionally simple: it surfaces the latest archived
|
|
1624
|
+
dispatch for each paused ticket_flow run.
|
|
1625
|
+
"""
|
|
1626
|
+
|
|
1627
|
+
def _latest_dispatch(
|
|
1628
|
+
repo_root: Path, run_id: str, input_data: dict
|
|
1629
|
+
) -> Optional[dict]:
|
|
1630
|
+
try:
|
|
1631
|
+
workspace_root = Path(input_data.get("workspace_root") or repo_root)
|
|
1632
|
+
runs_dir = Path(input_data.get("runs_dir") or ".codex-autorunner/runs")
|
|
1633
|
+
outbox_paths = resolve_outbox_paths(
|
|
1634
|
+
workspace_root=workspace_root, runs_dir=runs_dir, run_id=run_id
|
|
1635
|
+
)
|
|
1636
|
+
history_dir = outbox_paths.dispatch_history_dir
|
|
1637
|
+
if not history_dir.exists() or not history_dir.is_dir():
|
|
1638
|
+
return None
|
|
1639
|
+
|
|
1640
|
+
def _dispatch_dict(dispatch: Dispatch) -> dict:
|
|
1641
|
+
return {
|
|
1642
|
+
"mode": dispatch.mode,
|
|
1643
|
+
"title": dispatch.title,
|
|
1644
|
+
"body": dispatch.body,
|
|
1645
|
+
"extra": dispatch.extra,
|
|
1646
|
+
"is_handoff": dispatch.is_handoff,
|
|
1647
|
+
}
|
|
1648
|
+
|
|
1649
|
+
def _list_files(dispatch_dir: Path) -> list[str]:
|
|
1650
|
+
files: list[str] = []
|
|
1651
|
+
for child in sorted(dispatch_dir.iterdir(), key=lambda p: p.name):
|
|
1652
|
+
if child.name.startswith("."):
|
|
1653
|
+
continue
|
|
1654
|
+
if child.name == "DISPATCH.md":
|
|
1655
|
+
continue
|
|
1656
|
+
if child.is_file():
|
|
1657
|
+
files.append(child.name)
|
|
1658
|
+
return files
|
|
1659
|
+
|
|
1660
|
+
seq_dirs: list[Path] = []
|
|
1661
|
+
for child in history_dir.iterdir():
|
|
1662
|
+
if not child.is_dir():
|
|
1663
|
+
continue
|
|
1664
|
+
name = child.name
|
|
1665
|
+
if len(name) == 4 and name.isdigit():
|
|
1666
|
+
seq_dirs.append(child)
|
|
1667
|
+
if not seq_dirs:
|
|
1668
|
+
return None
|
|
1669
|
+
|
|
1670
|
+
seq_dirs = sorted(seq_dirs, key=lambda p: p.name, reverse=True)
|
|
1671
|
+
handoff_candidate: Optional[dict] = None
|
|
1672
|
+
non_summary_candidate: Optional[dict] = None
|
|
1673
|
+
turn_summary_candidate: Optional[dict] = None
|
|
1674
|
+
error_candidate: Optional[dict] = None
|
|
1675
|
+
|
|
1676
|
+
for seq_dir in seq_dirs:
|
|
1677
|
+
seq = int(seq_dir.name)
|
|
1678
|
+
dispatch_path = seq_dir / "DISPATCH.md"
|
|
1679
|
+
dispatch, errors = parse_dispatch(dispatch_path)
|
|
1680
|
+
if errors or dispatch is None:
|
|
1681
|
+
if error_candidate is None:
|
|
1682
|
+
error_candidate = {
|
|
1683
|
+
"seq": seq,
|
|
1684
|
+
"dir": seq_dir,
|
|
1685
|
+
"errors": errors,
|
|
1686
|
+
}
|
|
1687
|
+
continue
|
|
1688
|
+
candidate = {"seq": seq, "dir": seq_dir, "dispatch": dispatch}
|
|
1689
|
+
if dispatch.is_handoff and handoff_candidate is None:
|
|
1690
|
+
handoff_candidate = candidate
|
|
1691
|
+
if (
|
|
1692
|
+
dispatch.mode != "turn_summary"
|
|
1693
|
+
and non_summary_candidate is None
|
|
1694
|
+
):
|
|
1695
|
+
non_summary_candidate = candidate
|
|
1696
|
+
if (
|
|
1697
|
+
dispatch.mode == "turn_summary"
|
|
1698
|
+
and turn_summary_candidate is None
|
|
1699
|
+
):
|
|
1700
|
+
turn_summary_candidate = candidate
|
|
1701
|
+
if (
|
|
1702
|
+
handoff_candidate
|
|
1703
|
+
and non_summary_candidate
|
|
1704
|
+
and turn_summary_candidate
|
|
1705
|
+
):
|
|
1706
|
+
break
|
|
1707
|
+
|
|
1708
|
+
selected = (
|
|
1709
|
+
handoff_candidate or non_summary_candidate or turn_summary_candidate
|
|
1710
|
+
)
|
|
1711
|
+
if not selected:
|
|
1712
|
+
if error_candidate:
|
|
1713
|
+
return {
|
|
1714
|
+
"seq": error_candidate["seq"],
|
|
1715
|
+
"dir": safe_relpath(error_candidate["dir"], repo_root),
|
|
1716
|
+
"dispatch": None,
|
|
1717
|
+
"errors": error_candidate["errors"],
|
|
1718
|
+
"files": [],
|
|
1719
|
+
}
|
|
1720
|
+
return None
|
|
1721
|
+
|
|
1722
|
+
selected_dir = selected["dir"]
|
|
1723
|
+
dispatch = selected["dispatch"]
|
|
1724
|
+
result = {
|
|
1725
|
+
"seq": selected["seq"],
|
|
1726
|
+
"dir": safe_relpath(selected_dir, repo_root),
|
|
1727
|
+
"dispatch": _dispatch_dict(dispatch),
|
|
1728
|
+
"errors": [],
|
|
1729
|
+
"files": _list_files(selected_dir),
|
|
1730
|
+
}
|
|
1731
|
+
if turn_summary_candidate is not None:
|
|
1732
|
+
result["turn_summary_seq"] = turn_summary_candidate["seq"]
|
|
1733
|
+
result["turn_summary"] = _dispatch_dict(
|
|
1734
|
+
turn_summary_candidate["dispatch"]
|
|
1735
|
+
)
|
|
1736
|
+
return result
|
|
1737
|
+
except Exception:
|
|
1738
|
+
return None
|
|
1739
|
+
|
|
1740
|
+
def _gather() -> list[dict]:
|
|
1741
|
+
messages: list[dict] = []
|
|
1742
|
+
try:
|
|
1743
|
+
snapshots = context.supervisor.list_repos()
|
|
1744
|
+
except Exception:
|
|
1745
|
+
return []
|
|
1746
|
+
for snap in snapshots:
|
|
1747
|
+
if not (snap.initialized and snap.exists_on_disk):
|
|
1748
|
+
continue
|
|
1749
|
+
repo_root = snap.path
|
|
1750
|
+
db_path = repo_root / ".codex-autorunner" / "flows.db"
|
|
1751
|
+
if not db_path.exists():
|
|
1752
|
+
continue
|
|
1753
|
+
try:
|
|
1754
|
+
config = load_repo_config(repo_root)
|
|
1755
|
+
with FlowStore(db_path, durable=config.durable_writes) as store:
|
|
1756
|
+
paused = store.list_flow_runs(
|
|
1757
|
+
flow_type="ticket_flow", status=FlowRunStatus.PAUSED
|
|
1758
|
+
)
|
|
1759
|
+
except Exception:
|
|
1760
|
+
continue
|
|
1761
|
+
if not paused:
|
|
1762
|
+
continue
|
|
1763
|
+
for record in paused:
|
|
1764
|
+
latest = _latest_dispatch(
|
|
1765
|
+
repo_root, str(record.id), dict(record.input_data or {})
|
|
1766
|
+
)
|
|
1767
|
+
if not latest or not latest.get("dispatch"):
|
|
1768
|
+
continue
|
|
1769
|
+
messages.append(
|
|
1770
|
+
{
|
|
1771
|
+
"repo_id": snap.id,
|
|
1772
|
+
"repo_display_name": snap.display_name,
|
|
1773
|
+
"repo_path": str(snap.path),
|
|
1774
|
+
"run_id": record.id,
|
|
1775
|
+
"run_created_at": record.created_at,
|
|
1776
|
+
"status": record.status.value,
|
|
1777
|
+
"seq": latest["seq"],
|
|
1778
|
+
"dispatch": latest["dispatch"],
|
|
1779
|
+
"files": latest.get("files") or [],
|
|
1780
|
+
"open_url": f"/repos/{snap.id}/?tab=inbox&run_id={record.id}",
|
|
1781
|
+
}
|
|
1782
|
+
)
|
|
1783
|
+
messages.sort(key=lambda m: (m.get("run_created_at") or ""), reverse=True)
|
|
1784
|
+
if limit and limit > 0:
|
|
1785
|
+
return messages[: int(limit)]
|
|
1786
|
+
return messages
|
|
1787
|
+
|
|
1788
|
+
items = await asyncio.to_thread(_gather)
|
|
1789
|
+
return {"items": items}
|
|
1790
|
+
|
|
1791
|
+
@app.get("/hub/repos")
|
|
1792
|
+
async def list_repos():
|
|
1793
|
+
safe_log(app.state.logger, logging.INFO, "Hub list_repos")
|
|
1794
|
+
snapshots = await asyncio.to_thread(context.supervisor.list_repos)
|
|
1795
|
+
await _refresh_mounts(snapshots)
|
|
1796
|
+
|
|
1797
|
+
def _enrich_repo(snap):
|
|
1798
|
+
repo_dict = _add_mount_info(snap.to_dict(context.config.root))
|
|
1799
|
+
if snap.initialized and snap.exists_on_disk:
|
|
1800
|
+
repo_dict["ticket_flow"] = _get_ticket_flow_summary(snap.path)
|
|
1801
|
+
else:
|
|
1802
|
+
repo_dict["ticket_flow"] = None
|
|
1803
|
+
return repo_dict
|
|
1804
|
+
|
|
1805
|
+
return {
|
|
1806
|
+
"last_scan_at": context.supervisor.state.last_scan_at,
|
|
1807
|
+
"repos": [_enrich_repo(snap) for snap in snapshots],
|
|
1808
|
+
}
|
|
1809
|
+
|
|
1810
|
+
@app.get("/hub/version")
|
|
1811
|
+
def hub_version():
|
|
1812
|
+
return {"asset_version": app.state.asset_version}
|
|
1813
|
+
|
|
1814
|
+
@app.post("/hub/repos/scan")
|
|
1815
|
+
async def scan_repos():
|
|
1816
|
+
safe_log(app.state.logger, logging.INFO, "Hub scan_repos")
|
|
1817
|
+
snapshots = await asyncio.to_thread(context.supervisor.scan)
|
|
1818
|
+
await _refresh_mounts(snapshots)
|
|
1819
|
+
|
|
1820
|
+
def _enrich_repo(snap):
|
|
1821
|
+
repo_dict = _add_mount_info(snap.to_dict(context.config.root))
|
|
1822
|
+
if snap.initialized and snap.exists_on_disk:
|
|
1823
|
+
repo_dict["ticket_flow"] = _get_ticket_flow_summary(snap.path)
|
|
1824
|
+
else:
|
|
1825
|
+
repo_dict["ticket_flow"] = None
|
|
1826
|
+
return repo_dict
|
|
1827
|
+
|
|
1828
|
+
return {
|
|
1829
|
+
"last_scan_at": context.supervisor.state.last_scan_at,
|
|
1830
|
+
"repos": [_enrich_repo(snap) for snap in snapshots],
|
|
1831
|
+
}
|
|
1832
|
+
|
|
1833
|
+
@app.post("/hub/jobs/scan", response_model=HubJobResponse)
|
|
1834
|
+
async def scan_repos_job():
|
|
1835
|
+
async def _run_scan():
|
|
1836
|
+
snapshots = await asyncio.to_thread(context.supervisor.scan)
|
|
1837
|
+
await _refresh_mounts(snapshots)
|
|
1838
|
+
return {"status": "ok"}
|
|
1839
|
+
|
|
1840
|
+
job = await context.job_manager.submit(
|
|
1841
|
+
"hub.scan_repos", _run_scan, request_id=get_request_id()
|
|
1842
|
+
)
|
|
1843
|
+
return job.to_dict()
|
|
1844
|
+
|
|
1845
|
+
@app.post("/hub/repos")
|
|
1846
|
+
async def create_repo(payload: HubCreateRepoRequest):
|
|
1847
|
+
git_url = payload.git_url
|
|
1848
|
+
repo_id = payload.repo_id
|
|
1849
|
+
if not repo_id and not git_url:
|
|
1850
|
+
raise HTTPException(status_code=400, detail="Missing repo id")
|
|
1851
|
+
repo_path_val = payload.path
|
|
1852
|
+
repo_path = Path(repo_path_val) if repo_path_val else None
|
|
1853
|
+
git_init = payload.git_init
|
|
1854
|
+
force = payload.force
|
|
1855
|
+
safe_log(
|
|
1856
|
+
app.state.logger,
|
|
1857
|
+
logging.INFO,
|
|
1858
|
+
"Hub create repo id=%s path=%s git_init=%s force=%s git_url=%s"
|
|
1859
|
+
% (repo_id, repo_path_val, git_init, force, bool(git_url)),
|
|
1860
|
+
)
|
|
1861
|
+
try:
|
|
1862
|
+
if git_url:
|
|
1863
|
+
snapshot = await asyncio.to_thread(
|
|
1864
|
+
context.supervisor.clone_repo,
|
|
1865
|
+
git_url=str(git_url),
|
|
1866
|
+
repo_id=str(repo_id) if repo_id else None,
|
|
1867
|
+
repo_path=repo_path,
|
|
1868
|
+
force=force,
|
|
1869
|
+
)
|
|
1870
|
+
else:
|
|
1871
|
+
snapshot = await asyncio.to_thread(
|
|
1872
|
+
context.supervisor.create_repo,
|
|
1873
|
+
str(repo_id),
|
|
1874
|
+
repo_path=repo_path,
|
|
1875
|
+
git_init=git_init,
|
|
1876
|
+
force=force,
|
|
1877
|
+
)
|
|
1878
|
+
except Exception as exc:
|
|
1879
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
1880
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
1881
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
1882
|
+
|
|
1883
|
+
@app.post("/hub/jobs/repos", response_model=HubJobResponse)
|
|
1884
|
+
async def create_repo_job(payload: HubCreateRepoRequest):
|
|
1885
|
+
async def _run_create_repo():
|
|
1886
|
+
git_url = payload.git_url
|
|
1887
|
+
repo_id = payload.repo_id
|
|
1888
|
+
if not repo_id and not git_url:
|
|
1889
|
+
raise ValueError("Missing repo id")
|
|
1890
|
+
repo_path_val = payload.path
|
|
1891
|
+
repo_path = Path(repo_path_val) if repo_path_val else None
|
|
1892
|
+
git_init = payload.git_init
|
|
1893
|
+
force = payload.force
|
|
1894
|
+
if git_url:
|
|
1895
|
+
snapshot = await asyncio.to_thread(
|
|
1896
|
+
context.supervisor.clone_repo,
|
|
1897
|
+
git_url=str(git_url),
|
|
1898
|
+
repo_id=str(repo_id) if repo_id else None,
|
|
1899
|
+
repo_path=repo_path,
|
|
1900
|
+
force=force,
|
|
1901
|
+
)
|
|
1902
|
+
else:
|
|
1903
|
+
snapshot = await asyncio.to_thread(
|
|
1904
|
+
context.supervisor.create_repo,
|
|
1905
|
+
str(repo_id),
|
|
1906
|
+
repo_path=repo_path,
|
|
1907
|
+
git_init=git_init,
|
|
1908
|
+
force=force,
|
|
1909
|
+
)
|
|
1910
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
1911
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
1912
|
+
|
|
1913
|
+
job = await context.job_manager.submit(
|
|
1914
|
+
"hub.create_repo", _run_create_repo, request_id=get_request_id()
|
|
1915
|
+
)
|
|
1916
|
+
return job.to_dict()
|
|
1917
|
+
|
|
1918
|
+
@app.get("/hub/repos/{repo_id}/remove-check")
|
|
1919
|
+
async def remove_repo_check(repo_id: str):
|
|
1920
|
+
safe_log(app.state.logger, logging.INFO, f"Hub remove-check {repo_id}")
|
|
1921
|
+
try:
|
|
1922
|
+
return await asyncio.to_thread(
|
|
1923
|
+
context.supervisor.check_repo_removal, repo_id
|
|
1924
|
+
)
|
|
1925
|
+
except Exception as exc:
|
|
1926
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
1927
|
+
|
|
1928
|
+
@app.post("/hub/repos/{repo_id}/remove")
|
|
1929
|
+
async def remove_repo(repo_id: str, payload: Optional[HubRemoveRepoRequest] = None):
|
|
1930
|
+
payload = payload or HubRemoveRepoRequest()
|
|
1931
|
+
force = payload.force
|
|
1932
|
+
delete_dir = payload.delete_dir
|
|
1933
|
+
delete_worktrees = payload.delete_worktrees
|
|
1934
|
+
safe_log(
|
|
1935
|
+
app.state.logger,
|
|
1936
|
+
logging.INFO,
|
|
1937
|
+
"Hub remove repo id=%s force=%s delete_dir=%s delete_worktrees=%s"
|
|
1938
|
+
% (repo_id, force, delete_dir, delete_worktrees),
|
|
1939
|
+
)
|
|
1940
|
+
try:
|
|
1941
|
+
await asyncio.to_thread(
|
|
1942
|
+
context.supervisor.remove_repo,
|
|
1943
|
+
repo_id,
|
|
1944
|
+
force=force,
|
|
1945
|
+
delete_dir=delete_dir,
|
|
1946
|
+
delete_worktrees=delete_worktrees,
|
|
1947
|
+
)
|
|
1948
|
+
except Exception as exc:
|
|
1949
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
1950
|
+
snapshots = await asyncio.to_thread(
|
|
1951
|
+
context.supervisor.list_repos, use_cache=False
|
|
1952
|
+
)
|
|
1953
|
+
await _refresh_mounts(snapshots)
|
|
1954
|
+
return {"status": "ok"}
|
|
1955
|
+
|
|
1956
|
+
@app.post("/hub/jobs/repos/{repo_id}/remove", response_model=HubJobResponse)
|
|
1957
|
+
async def remove_repo_job(
|
|
1958
|
+
repo_id: str, payload: Optional[HubRemoveRepoRequest] = None
|
|
1959
|
+
):
|
|
1960
|
+
payload = payload or HubRemoveRepoRequest()
|
|
1961
|
+
|
|
1962
|
+
async def _run_remove_repo():
|
|
1963
|
+
await asyncio.to_thread(
|
|
1964
|
+
context.supervisor.remove_repo,
|
|
1965
|
+
repo_id,
|
|
1966
|
+
force=payload.force,
|
|
1967
|
+
delete_dir=payload.delete_dir,
|
|
1968
|
+
delete_worktrees=payload.delete_worktrees,
|
|
1969
|
+
)
|
|
1970
|
+
snapshots = await asyncio.to_thread(
|
|
1971
|
+
context.supervisor.list_repos, use_cache=False
|
|
1972
|
+
)
|
|
1973
|
+
await _refresh_mounts(snapshots)
|
|
1974
|
+
return {"status": "ok"}
|
|
1975
|
+
|
|
1976
|
+
job = await context.job_manager.submit(
|
|
1977
|
+
"hub.remove_repo", _run_remove_repo, request_id=get_request_id()
|
|
1978
|
+
)
|
|
1979
|
+
return job.to_dict()
|
|
1980
|
+
|
|
1981
|
+
@app.post("/hub/worktrees/create")
|
|
1982
|
+
async def create_worktree(payload: HubCreateWorktreeRequest):
|
|
1983
|
+
base_repo_id = payload.base_repo_id
|
|
1984
|
+
branch = payload.branch
|
|
1985
|
+
force = payload.force
|
|
1986
|
+
start_point = payload.start_point
|
|
1987
|
+
safe_log(
|
|
1988
|
+
app.state.logger,
|
|
1989
|
+
logging.INFO,
|
|
1990
|
+
"Hub create worktree base=%s branch=%s force=%s start_point=%s"
|
|
1991
|
+
% (base_repo_id, branch, force, start_point),
|
|
1992
|
+
)
|
|
1993
|
+
try:
|
|
1994
|
+
snapshot = await asyncio.to_thread(
|
|
1995
|
+
context.supervisor.create_worktree,
|
|
1996
|
+
base_repo_id=str(base_repo_id),
|
|
1997
|
+
branch=str(branch),
|
|
1998
|
+
force=force,
|
|
1999
|
+
start_point=str(start_point) if start_point else None,
|
|
2000
|
+
)
|
|
2001
|
+
except Exception as exc:
|
|
2002
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2003
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
2004
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2005
|
+
|
|
2006
|
+
@app.post("/hub/jobs/worktrees/create", response_model=HubJobResponse)
|
|
2007
|
+
async def create_worktree_job(payload: HubCreateWorktreeRequest):
|
|
2008
|
+
async def _run_create_worktree():
|
|
2009
|
+
snapshot = await asyncio.to_thread(
|
|
2010
|
+
context.supervisor.create_worktree,
|
|
2011
|
+
base_repo_id=str(payload.base_repo_id),
|
|
2012
|
+
branch=str(payload.branch),
|
|
2013
|
+
force=payload.force,
|
|
2014
|
+
start_point=str(payload.start_point) if payload.start_point else None,
|
|
2015
|
+
)
|
|
2016
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
2017
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2018
|
+
|
|
2019
|
+
job = await context.job_manager.submit(
|
|
2020
|
+
"hub.create_worktree", _run_create_worktree, request_id=get_request_id()
|
|
2021
|
+
)
|
|
2022
|
+
return job.to_dict()
|
|
2023
|
+
|
|
2024
|
+
@app.post("/hub/worktrees/cleanup")
|
|
2025
|
+
async def cleanup_worktree(payload: HubCleanupWorktreeRequest):
|
|
2026
|
+
worktree_repo_id = payload.worktree_repo_id
|
|
2027
|
+
delete_branch = payload.delete_branch
|
|
2028
|
+
delete_remote = payload.delete_remote
|
|
2029
|
+
archive = payload.archive
|
|
2030
|
+
force_archive = payload.force_archive
|
|
2031
|
+
archive_note = payload.archive_note
|
|
2032
|
+
safe_log(
|
|
2033
|
+
app.state.logger,
|
|
2034
|
+
logging.INFO,
|
|
2035
|
+
"Hub cleanup worktree id=%s delete_branch=%s delete_remote=%s archive=%s force_archive=%s"
|
|
2036
|
+
% (
|
|
2037
|
+
worktree_repo_id,
|
|
2038
|
+
delete_branch,
|
|
2039
|
+
delete_remote,
|
|
2040
|
+
archive,
|
|
2041
|
+
force_archive,
|
|
2042
|
+
),
|
|
2043
|
+
)
|
|
2044
|
+
try:
|
|
2045
|
+
await asyncio.to_thread(
|
|
2046
|
+
context.supervisor.cleanup_worktree,
|
|
2047
|
+
worktree_repo_id=str(worktree_repo_id),
|
|
2048
|
+
delete_branch=delete_branch,
|
|
2049
|
+
delete_remote=delete_remote,
|
|
2050
|
+
archive=archive,
|
|
2051
|
+
force_archive=force_archive,
|
|
2052
|
+
archive_note=archive_note,
|
|
2053
|
+
)
|
|
2054
|
+
except Exception as exc:
|
|
2055
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2056
|
+
return {"status": "ok"}
|
|
2057
|
+
|
|
2058
|
+
@app.post("/hub/jobs/worktrees/cleanup", response_model=HubJobResponse)
|
|
2059
|
+
async def cleanup_worktree_job(payload: HubCleanupWorktreeRequest):
|
|
2060
|
+
def _run_cleanup_worktree():
|
|
2061
|
+
context.supervisor.cleanup_worktree(
|
|
2062
|
+
worktree_repo_id=str(payload.worktree_repo_id),
|
|
2063
|
+
delete_branch=payload.delete_branch,
|
|
2064
|
+
delete_remote=payload.delete_remote,
|
|
2065
|
+
archive=payload.archive,
|
|
2066
|
+
force_archive=payload.force_archive,
|
|
2067
|
+
archive_note=payload.archive_note,
|
|
2068
|
+
)
|
|
2069
|
+
return {"status": "ok"}
|
|
2070
|
+
|
|
2071
|
+
job = await context.job_manager.submit(
|
|
2072
|
+
"hub.cleanup_worktree", _run_cleanup_worktree, request_id=get_request_id()
|
|
2073
|
+
)
|
|
2074
|
+
return job.to_dict()
|
|
2075
|
+
|
|
2076
|
+
@app.get("/hub/jobs/{job_id}", response_model=HubJobResponse)
|
|
2077
|
+
async def get_hub_job(job_id: str):
|
|
2078
|
+
job = await context.job_manager.get(job_id)
|
|
2079
|
+
if not job:
|
|
2080
|
+
raise HTTPException(status_code=404, detail="Job not found")
|
|
2081
|
+
return job.to_dict()
|
|
2082
|
+
|
|
2083
|
+
@app.post("/hub/repos/{repo_id}/run")
|
|
2084
|
+
async def run_repo(repo_id: str, payload: Optional[RunControlRequest] = None):
|
|
2085
|
+
once = payload.once if payload else False
|
|
2086
|
+
safe_log(
|
|
2087
|
+
app.state.logger,
|
|
2088
|
+
logging.INFO,
|
|
2089
|
+
"Hub run %s once=%s" % (repo_id, once),
|
|
2090
|
+
)
|
|
2091
|
+
try:
|
|
2092
|
+
snapshot = await asyncio.to_thread(
|
|
2093
|
+
context.supervisor.run_repo, repo_id, once=once
|
|
2094
|
+
)
|
|
2095
|
+
except LockError as exc:
|
|
2096
|
+
raise HTTPException(status_code=409, detail=str(exc)) from exc
|
|
2097
|
+
except Exception as exc:
|
|
2098
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2099
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
2100
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2101
|
+
|
|
2102
|
+
@app.post("/hub/repos/{repo_id}/stop")
|
|
2103
|
+
async def stop_repo(repo_id: str):
|
|
2104
|
+
safe_log(app.state.logger, logging.INFO, f"Hub stop {repo_id}")
|
|
2105
|
+
try:
|
|
2106
|
+
snapshot = await asyncio.to_thread(context.supervisor.stop_repo, repo_id)
|
|
2107
|
+
except Exception as exc:
|
|
2108
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2109
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2110
|
+
|
|
2111
|
+
@app.post("/hub/repos/{repo_id}/resume")
|
|
2112
|
+
async def resume_repo(repo_id: str, payload: Optional[RunControlRequest] = None):
|
|
2113
|
+
once = payload.once if payload else False
|
|
2114
|
+
safe_log(
|
|
2115
|
+
app.state.logger,
|
|
2116
|
+
logging.INFO,
|
|
2117
|
+
"Hub resume %s once=%s" % (repo_id, once),
|
|
2118
|
+
)
|
|
2119
|
+
try:
|
|
2120
|
+
snapshot = await asyncio.to_thread(
|
|
2121
|
+
context.supervisor.resume_repo, repo_id, once=once
|
|
2122
|
+
)
|
|
2123
|
+
except LockError as exc:
|
|
2124
|
+
raise HTTPException(status_code=409, detail=str(exc)) from exc
|
|
2125
|
+
except Exception as exc:
|
|
2126
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2127
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
2128
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2129
|
+
|
|
2130
|
+
@app.post("/hub/repos/{repo_id}/kill")
|
|
2131
|
+
async def kill_repo(repo_id: str):
|
|
2132
|
+
safe_log(app.state.logger, logging.INFO, f"Hub kill {repo_id}")
|
|
2133
|
+
try:
|
|
2134
|
+
snapshot = await asyncio.to_thread(context.supervisor.kill_repo, repo_id)
|
|
2135
|
+
except Exception as exc:
|
|
2136
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2137
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2138
|
+
|
|
2139
|
+
@app.post("/hub/repos/{repo_id}/init")
|
|
2140
|
+
async def init_repo(repo_id: str):
|
|
2141
|
+
safe_log(app.state.logger, logging.INFO, f"Hub init {repo_id}")
|
|
2142
|
+
try:
|
|
2143
|
+
snapshot = await asyncio.to_thread(context.supervisor.init_repo, repo_id)
|
|
2144
|
+
except Exception as exc:
|
|
2145
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2146
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
2147
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2148
|
+
|
|
2149
|
+
@app.post("/hub/repos/{repo_id}/sync-main")
|
|
2150
|
+
async def sync_repo_main(repo_id: str):
|
|
2151
|
+
safe_log(app.state.logger, logging.INFO, f"Hub sync main {repo_id}")
|
|
2152
|
+
try:
|
|
2153
|
+
snapshot = await asyncio.to_thread(context.supervisor.sync_main, repo_id)
|
|
2154
|
+
except Exception as exc:
|
|
2155
|
+
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
|
2156
|
+
await _refresh_mounts([snapshot], full_refresh=False)
|
|
2157
|
+
return _add_mount_info(snapshot.to_dict(context.config.root))
|
|
2158
|
+
|
|
2159
|
+
@app.get("/", include_in_schema=False)
|
|
2160
|
+
def hub_index():
|
|
2161
|
+
index_path = context.static_dir / "index.html"
|
|
2162
|
+
if not index_path.exists():
|
|
2163
|
+
raise HTTPException(
|
|
2164
|
+
status_code=500, detail="Static UI assets missing; reinstall package"
|
|
2165
|
+
)
|
|
2166
|
+
html = render_index_html(context.static_dir, app.state.asset_version)
|
|
2167
|
+
return HTMLResponse(html, headers=index_response_headers())
|
|
2168
|
+
|
|
2169
|
+
app.include_router(build_system_routes())
|
|
2170
|
+
|
|
2171
|
+
allowed_hosts = _resolve_allowed_hosts(
|
|
2172
|
+
context.config.server_host, context.config.server_allowed_hosts
|
|
2173
|
+
)
|
|
2174
|
+
allowed_origins = context.config.server_allowed_origins
|
|
2175
|
+
auth_token = _resolve_auth_token(context.config.server_auth_token_env)
|
|
2176
|
+
app.state.auth_token = auth_token
|
|
2177
|
+
asgi_app: ASGIApp = app
|
|
2178
|
+
if auth_token:
|
|
2179
|
+
asgi_app = AuthTokenMiddleware(asgi_app, auth_token, context.base_path)
|
|
2180
|
+
if context.base_path:
|
|
2181
|
+
asgi_app = BasePathRouterMiddleware(asgi_app, context.base_path)
|
|
2182
|
+
asgi_app = HostOriginMiddleware(asgi_app, allowed_hosts, allowed_origins)
|
|
2183
|
+
asgi_app = RequestIdMiddleware(asgi_app)
|
|
2184
|
+
asgi_app = SecurityHeadersMiddleware(asgi_app)
|
|
2185
|
+
|
|
2186
|
+
return asgi_app
|
|
2187
|
+
|
|
2188
|
+
|
|
2189
|
+
def _resolve_auth_token(
|
|
2190
|
+
env_name: str, *, env: Optional[Mapping[str, str]] = None
|
|
2191
|
+
) -> Optional[str]:
|
|
2192
|
+
if not env_name:
|
|
2193
|
+
return None
|
|
2194
|
+
source = env if env is not None else os.environ
|
|
2195
|
+
value = source.get(env_name)
|
|
2196
|
+
if value is None:
|
|
2197
|
+
return None
|
|
2198
|
+
value = value.strip()
|
|
2199
|
+
return value or None
|
|
2200
|
+
|
|
2201
|
+
|
|
2202
|
+
def _resolve_allowed_hosts(host: str, allowed_hosts: list[str]) -> list[str]:
|
|
2203
|
+
cleaned = [entry.strip() for entry in allowed_hosts if entry and entry.strip()]
|
|
2204
|
+
if cleaned:
|
|
2205
|
+
return cleaned
|
|
2206
|
+
if _is_loopback_host(host):
|
|
2207
|
+
return ["localhost", "127.0.0.1", "::1", "testserver"]
|
|
2208
|
+
return []
|
|
2209
|
+
|
|
2210
|
+
|
|
2211
|
+
_STATIC_CACHE_CONTROL = "public, max-age=31536000, immutable"
|
|
2212
|
+
|
|
2213
|
+
|
|
2214
|
+
class CacheStaticFiles(StaticFiles):
|
|
2215
|
+
def __init__(self, *args, cache_control: str = _STATIC_CACHE_CONTROL, **kwargs):
|
|
2216
|
+
super().__init__(*args, **kwargs)
|
|
2217
|
+
self._cache_control = cache_control
|
|
2218
|
+
|
|
2219
|
+
async def get_response(self, path: str, scope): # type: ignore[override]
|
|
2220
|
+
response = await super().get_response(path, scope)
|
|
2221
|
+
if response.status_code in (200, 206, 304):
|
|
2222
|
+
response.headers.setdefault("Cache-Control", self._cache_control)
|
|
2223
|
+
return response
|