codex-autorunner 0.1.2__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/__init__.py +12 -1
- codex_autorunner/__main__.py +4 -0
- codex_autorunner/agents/codex/harness.py +1 -1
- codex_autorunner/agents/opencode/client.py +68 -35
- codex_autorunner/agents/opencode/constants.py +3 -0
- codex_autorunner/agents/opencode/harness.py +6 -1
- codex_autorunner/agents/opencode/logging.py +21 -5
- codex_autorunner/agents/opencode/run_prompt.py +1 -0
- codex_autorunner/agents/opencode/runtime.py +176 -47
- codex_autorunner/agents/opencode/supervisor.py +36 -48
- codex_autorunner/agents/registry.py +155 -8
- codex_autorunner/api.py +25 -0
- codex_autorunner/bootstrap.py +22 -37
- codex_autorunner/cli.py +5 -1156
- codex_autorunner/codex_cli.py +20 -84
- codex_autorunner/core/__init__.py +4 -0
- codex_autorunner/core/about_car.py +49 -32
- codex_autorunner/core/adapter_utils.py +21 -0
- codex_autorunner/core/app_server_ids.py +59 -0
- codex_autorunner/core/app_server_logging.py +7 -3
- codex_autorunner/core/app_server_prompts.py +27 -260
- codex_autorunner/core/app_server_threads.py +26 -28
- codex_autorunner/core/app_server_utils.py +165 -0
- codex_autorunner/core/archive.py +349 -0
- codex_autorunner/core/codex_runner.py +12 -2
- codex_autorunner/core/config.py +587 -103
- codex_autorunner/core/docs.py +10 -2
- codex_autorunner/core/drafts.py +136 -0
- codex_autorunner/core/engine.py +1531 -866
- codex_autorunner/core/exceptions.py +4 -0
- codex_autorunner/core/flows/__init__.py +25 -0
- codex_autorunner/core/flows/controller.py +202 -0
- codex_autorunner/core/flows/definition.py +82 -0
- codex_autorunner/core/flows/models.py +88 -0
- codex_autorunner/core/flows/reasons.py +52 -0
- codex_autorunner/core/flows/reconciler.py +131 -0
- codex_autorunner/core/flows/runtime.py +382 -0
- codex_autorunner/core/flows/store.py +568 -0
- codex_autorunner/core/flows/transition.py +138 -0
- codex_autorunner/core/flows/ux_helpers.py +257 -0
- codex_autorunner/core/flows/worker_process.py +242 -0
- codex_autorunner/core/git_utils.py +62 -0
- codex_autorunner/core/hub.py +136 -16
- codex_autorunner/core/locks.py +4 -0
- codex_autorunner/core/notifications.py +14 -2
- codex_autorunner/core/ports/__init__.py +28 -0
- codex_autorunner/core/ports/agent_backend.py +150 -0
- codex_autorunner/core/ports/backend_orchestrator.py +41 -0
- codex_autorunner/core/ports/run_event.py +91 -0
- codex_autorunner/core/prompt.py +15 -7
- codex_autorunner/core/redaction.py +29 -0
- codex_autorunner/core/review_context.py +5 -8
- codex_autorunner/core/run_index.py +6 -0
- codex_autorunner/core/runner_process.py +5 -2
- codex_autorunner/core/state.py +0 -88
- codex_autorunner/core/state_roots.py +57 -0
- codex_autorunner/core/supervisor_protocol.py +15 -0
- codex_autorunner/core/supervisor_utils.py +67 -0
- codex_autorunner/core/text_delta_coalescer.py +54 -0
- codex_autorunner/core/ticket_linter_cli.py +201 -0
- codex_autorunner/core/ticket_manager_cli.py +432 -0
- codex_autorunner/core/update.py +24 -16
- codex_autorunner/core/update_paths.py +28 -0
- codex_autorunner/core/update_runner.py +2 -0
- codex_autorunner/core/usage.py +164 -12
- codex_autorunner/core/utils.py +120 -11
- codex_autorunner/discovery.py +2 -4
- codex_autorunner/flows/review/__init__.py +17 -0
- codex_autorunner/{core/review.py → flows/review/service.py} +15 -10
- codex_autorunner/flows/ticket_flow/__init__.py +3 -0
- codex_autorunner/flows/ticket_flow/definition.py +98 -0
- codex_autorunner/integrations/agents/__init__.py +17 -0
- codex_autorunner/integrations/agents/backend_orchestrator.py +284 -0
- codex_autorunner/integrations/agents/codex_adapter.py +90 -0
- codex_autorunner/integrations/agents/codex_backend.py +448 -0
- codex_autorunner/integrations/agents/opencode_adapter.py +108 -0
- codex_autorunner/integrations/agents/opencode_backend.py +598 -0
- codex_autorunner/integrations/agents/runner.py +91 -0
- codex_autorunner/integrations/agents/wiring.py +271 -0
- codex_autorunner/integrations/app_server/client.py +583 -152
- codex_autorunner/integrations/app_server/env.py +2 -107
- codex_autorunner/{core/app_server_events.py → integrations/app_server/event_buffer.py} +15 -8
- codex_autorunner/integrations/app_server/supervisor.py +59 -33
- codex_autorunner/integrations/telegram/adapter.py +204 -165
- codex_autorunner/integrations/telegram/api_schemas.py +120 -0
- codex_autorunner/integrations/telegram/config.py +221 -0
- codex_autorunner/integrations/telegram/constants.py +17 -2
- codex_autorunner/integrations/telegram/dispatch.py +17 -0
- codex_autorunner/integrations/telegram/doctor.py +47 -0
- codex_autorunner/integrations/telegram/handlers/callbacks.py +7 -4
- codex_autorunner/integrations/telegram/handlers/commands/__init__.py +2 -0
- codex_autorunner/integrations/telegram/handlers/commands/execution.py +53 -57
- codex_autorunner/integrations/telegram/handlers/commands/files.py +2 -6
- codex_autorunner/integrations/telegram/handlers/commands/flows.py +1364 -0
- codex_autorunner/integrations/telegram/handlers/commands/formatting.py +1 -1
- codex_autorunner/integrations/telegram/handlers/commands/github.py +41 -582
- codex_autorunner/integrations/telegram/handlers/commands/workspace.py +8 -8
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +137 -478
- codex_autorunner/integrations/telegram/handlers/commands_spec.py +17 -4
- codex_autorunner/integrations/telegram/handlers/messages.py +121 -9
- codex_autorunner/integrations/telegram/handlers/selections.py +61 -1
- codex_autorunner/integrations/telegram/helpers.py +111 -16
- codex_autorunner/integrations/telegram/outbox.py +208 -37
- codex_autorunner/integrations/telegram/progress_stream.py +3 -10
- codex_autorunner/integrations/telegram/service.py +221 -42
- codex_autorunner/integrations/telegram/state.py +100 -2
- codex_autorunner/integrations/telegram/ticket_flow_bridge.py +611 -0
- codex_autorunner/integrations/telegram/transport.py +39 -4
- codex_autorunner/integrations/telegram/trigger_mode.py +53 -0
- codex_autorunner/manifest.py +2 -0
- codex_autorunner/plugin_api.py +22 -0
- codex_autorunner/routes/__init__.py +37 -67
- codex_autorunner/routes/agents.py +2 -137
- codex_autorunner/routes/analytics.py +3 -0
- codex_autorunner/routes/app_server.py +2 -131
- codex_autorunner/routes/base.py +2 -624
- codex_autorunner/routes/file_chat.py +7 -0
- codex_autorunner/routes/flows.py +7 -0
- codex_autorunner/routes/messages.py +7 -0
- codex_autorunner/routes/repos.py +2 -196
- codex_autorunner/routes/review.py +2 -147
- codex_autorunner/routes/sessions.py +2 -175
- codex_autorunner/routes/settings.py +2 -168
- codex_autorunner/routes/shared.py +2 -275
- codex_autorunner/routes/system.py +4 -188
- codex_autorunner/routes/usage.py +3 -0
- codex_autorunner/routes/voice.py +2 -119
- codex_autorunner/routes/workspace.py +3 -0
- codex_autorunner/server.py +3 -2
- codex_autorunner/static/agentControls.js +41 -11
- codex_autorunner/static/agentEvents.js +248 -0
- codex_autorunner/static/app.js +35 -24
- codex_autorunner/static/archive.js +826 -0
- codex_autorunner/static/archiveApi.js +37 -0
- codex_autorunner/static/autoRefresh.js +36 -8
- codex_autorunner/static/bootstrap.js +1 -0
- codex_autorunner/static/bus.js +1 -0
- codex_autorunner/static/cache.js +1 -0
- codex_autorunner/static/constants.js +20 -4
- codex_autorunner/static/dashboard.js +344 -325
- codex_autorunner/static/diffRenderer.js +37 -0
- codex_autorunner/static/docChatCore.js +324 -0
- codex_autorunner/static/docChatStorage.js +65 -0
- codex_autorunner/static/docChatVoice.js +65 -0
- codex_autorunner/static/docEditor.js +133 -0
- codex_autorunner/static/env.js +1 -0
- codex_autorunner/static/eventSummarizer.js +166 -0
- codex_autorunner/static/fileChat.js +182 -0
- codex_autorunner/static/health.js +155 -0
- codex_autorunner/static/hub.js +126 -185
- codex_autorunner/static/index.html +839 -863
- codex_autorunner/static/liveUpdates.js +1 -0
- codex_autorunner/static/loader.js +1 -0
- codex_autorunner/static/messages.js +873 -0
- codex_autorunner/static/mobileCompact.js +2 -1
- codex_autorunner/static/preserve.js +17 -0
- codex_autorunner/static/settings.js +149 -217
- codex_autorunner/static/smartRefresh.js +52 -0
- codex_autorunner/static/styles.css +8850 -3876
- codex_autorunner/static/tabs.js +175 -11
- codex_autorunner/static/terminal.js +32 -0
- codex_autorunner/static/terminalManager.js +34 -59
- codex_autorunner/static/ticketChatActions.js +333 -0
- codex_autorunner/static/ticketChatEvents.js +16 -0
- codex_autorunner/static/ticketChatStorage.js +16 -0
- codex_autorunner/static/ticketChatStream.js +264 -0
- codex_autorunner/static/ticketEditor.js +844 -0
- codex_autorunner/static/ticketVoice.js +9 -0
- codex_autorunner/static/tickets.js +1988 -0
- codex_autorunner/static/utils.js +43 -3
- codex_autorunner/static/voice.js +1 -0
- codex_autorunner/static/workspace.js +765 -0
- codex_autorunner/static/workspaceApi.js +53 -0
- codex_autorunner/static/workspaceFileBrowser.js +504 -0
- codex_autorunner/surfaces/__init__.py +5 -0
- codex_autorunner/surfaces/cli/__init__.py +6 -0
- codex_autorunner/surfaces/cli/cli.py +1224 -0
- codex_autorunner/surfaces/cli/codex_cli.py +20 -0
- codex_autorunner/surfaces/telegram/__init__.py +3 -0
- codex_autorunner/surfaces/web/__init__.py +1 -0
- codex_autorunner/surfaces/web/app.py +2019 -0
- codex_autorunner/surfaces/web/hub_jobs.py +192 -0
- codex_autorunner/surfaces/web/middleware.py +587 -0
- codex_autorunner/surfaces/web/pty_session.py +370 -0
- codex_autorunner/surfaces/web/review.py +6 -0
- codex_autorunner/surfaces/web/routes/__init__.py +78 -0
- codex_autorunner/surfaces/web/routes/agents.py +138 -0
- codex_autorunner/surfaces/web/routes/analytics.py +277 -0
- codex_autorunner/surfaces/web/routes/app_server.py +132 -0
- codex_autorunner/surfaces/web/routes/archive.py +357 -0
- codex_autorunner/surfaces/web/routes/base.py +615 -0
- codex_autorunner/surfaces/web/routes/file_chat.py +836 -0
- codex_autorunner/surfaces/web/routes/flows.py +1164 -0
- codex_autorunner/surfaces/web/routes/messages.py +459 -0
- codex_autorunner/surfaces/web/routes/repos.py +197 -0
- codex_autorunner/surfaces/web/routes/review.py +148 -0
- codex_autorunner/surfaces/web/routes/sessions.py +176 -0
- codex_autorunner/surfaces/web/routes/settings.py +169 -0
- codex_autorunner/surfaces/web/routes/shared.py +280 -0
- codex_autorunner/surfaces/web/routes/system.py +196 -0
- codex_autorunner/surfaces/web/routes/usage.py +89 -0
- codex_autorunner/surfaces/web/routes/voice.py +120 -0
- codex_autorunner/surfaces/web/routes/workspace.py +271 -0
- codex_autorunner/surfaces/web/runner_manager.py +25 -0
- codex_autorunner/surfaces/web/schemas.py +417 -0
- codex_autorunner/surfaces/web/static_assets.py +490 -0
- codex_autorunner/surfaces/web/static_refresh.py +86 -0
- codex_autorunner/surfaces/web/terminal_sessions.py +78 -0
- codex_autorunner/tickets/__init__.py +27 -0
- codex_autorunner/tickets/agent_pool.py +399 -0
- codex_autorunner/tickets/files.py +89 -0
- codex_autorunner/tickets/frontmatter.py +55 -0
- codex_autorunner/tickets/lint.py +102 -0
- codex_autorunner/tickets/models.py +97 -0
- codex_autorunner/tickets/outbox.py +244 -0
- codex_autorunner/tickets/replies.py +179 -0
- codex_autorunner/tickets/runner.py +881 -0
- codex_autorunner/tickets/spec_ingest.py +77 -0
- codex_autorunner/web/__init__.py +5 -1
- codex_autorunner/web/app.py +2 -1771
- codex_autorunner/web/hub_jobs.py +2 -191
- codex_autorunner/web/middleware.py +2 -587
- codex_autorunner/web/pty_session.py +2 -369
- codex_autorunner/web/runner_manager.py +2 -24
- codex_autorunner/web/schemas.py +2 -396
- codex_autorunner/web/static_assets.py +4 -484
- codex_autorunner/web/static_refresh.py +2 -85
- codex_autorunner/web/terminal_sessions.py +2 -77
- codex_autorunner/workspace/__init__.py +40 -0
- codex_autorunner/workspace/paths.py +335 -0
- codex_autorunner-1.1.0.dist-info/METADATA +154 -0
- codex_autorunner-1.1.0.dist-info/RECORD +308 -0
- {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.1.0.dist-info}/WHEEL +1 -1
- codex_autorunner/agents/execution/policy.py +0 -292
- codex_autorunner/agents/factory.py +0 -52
- codex_autorunner/agents/orchestrator.py +0 -358
- codex_autorunner/core/doc_chat.py +0 -1446
- codex_autorunner/core/snapshot.py +0 -580
- codex_autorunner/integrations/github/chatops.py +0 -268
- codex_autorunner/integrations/github/pr_flow.py +0 -1314
- codex_autorunner/routes/docs.py +0 -381
- codex_autorunner/routes/github.py +0 -327
- codex_autorunner/routes/runs.py +0 -250
- codex_autorunner/spec_ingest.py +0 -812
- codex_autorunner/static/docChatActions.js +0 -287
- codex_autorunner/static/docChatEvents.js +0 -300
- codex_autorunner/static/docChatRender.js +0 -205
- codex_autorunner/static/docChatStream.js +0 -361
- codex_autorunner/static/docs.js +0 -20
- codex_autorunner/static/docsClipboard.js +0 -69
- codex_autorunner/static/docsCrud.js +0 -257
- codex_autorunner/static/docsDocUpdates.js +0 -62
- codex_autorunner/static/docsDrafts.js +0 -16
- codex_autorunner/static/docsElements.js +0 -69
- codex_autorunner/static/docsInit.js +0 -285
- codex_autorunner/static/docsParse.js +0 -160
- codex_autorunner/static/docsSnapshot.js +0 -87
- codex_autorunner/static/docsSpecIngest.js +0 -263
- codex_autorunner/static/docsState.js +0 -127
- codex_autorunner/static/docsThreadRegistry.js +0 -44
- codex_autorunner/static/docsUi.js +0 -153
- codex_autorunner/static/docsVoice.js +0 -56
- codex_autorunner/static/github.js +0 -504
- codex_autorunner/static/logs.js +0 -678
- codex_autorunner/static/review.js +0 -157
- codex_autorunner/static/runs.js +0 -418
- codex_autorunner/static/snapshot.js +0 -124
- codex_autorunner/static/state.js +0 -94
- codex_autorunner/static/todoPreview.js +0 -27
- codex_autorunner/workspace.py +0 -16
- codex_autorunner-0.1.2.dist-info/METADATA +0 -249
- codex_autorunner-0.1.2.dist-info/RECORD +0 -222
- /codex_autorunner/{routes → surfaces/web/routes}/terminal_images.py +0 -0
- {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.1.0.dist-info}/entry_points.txt +0 -0
- {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.1.0.dist-info}/licenses/LICENSE +0 -0
- {codex_autorunner-0.1.2.dist-info → codex_autorunner-1.1.0.dist-info}/top_level.txt +0 -0
|
@@ -1,1314 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import asyncio
|
|
4
|
-
import json
|
|
5
|
-
import logging
|
|
6
|
-
import os
|
|
7
|
-
import re
|
|
8
|
-
import threading
|
|
9
|
-
import time
|
|
10
|
-
import uuid
|
|
11
|
-
from dataclasses import dataclass
|
|
12
|
-
from pathlib import Path
|
|
13
|
-
from typing import Any, Optional
|
|
14
|
-
|
|
15
|
-
from ...agents.opencode.supervisor import OpenCodeSupervisor
|
|
16
|
-
from ...core.config import ConfigError
|
|
17
|
-
from ...core.doc_chat import DocChatService
|
|
18
|
-
from ...core.engine import Engine, LockError
|
|
19
|
-
from ...core.hub import HubSupervisor
|
|
20
|
-
from ...core.locks import (
|
|
21
|
-
FileLock,
|
|
22
|
-
FileLockBusy,
|
|
23
|
-
FileLockError,
|
|
24
|
-
process_alive,
|
|
25
|
-
read_lock_info,
|
|
26
|
-
write_lock_info,
|
|
27
|
-
)
|
|
28
|
-
from ...core.logging_utils import log_event
|
|
29
|
-
from ...core.state import now_iso
|
|
30
|
-
from ...core.utils import atomic_write, read_json
|
|
31
|
-
from ...manifest import ManifestRepo, load_manifest
|
|
32
|
-
from ...spec_ingest import SpecIngestError, SpecIngestService
|
|
33
|
-
from ..app_server.supervisor import WorkspaceAppServerSupervisor
|
|
34
|
-
from .service import GitHubService, parse_pr_input
|
|
35
|
-
|
|
36
|
-
PR_FLOW_VERSION = 1
|
|
37
|
-
DEFAULT_PR_FLOW_CONFIG: dict[str, Any] = {
|
|
38
|
-
"enabled": True,
|
|
39
|
-
"max_cycles": 3,
|
|
40
|
-
"stop_condition": "no_issues",
|
|
41
|
-
"max_implementation_runs": None,
|
|
42
|
-
"max_wallclock_seconds": None,
|
|
43
|
-
"review_wait_seconds": 1800,
|
|
44
|
-
"review_poll_interval_seconds": 60,
|
|
45
|
-
"review": {
|
|
46
|
-
"include_codex": True,
|
|
47
|
-
"include_github": True,
|
|
48
|
-
"include_checks": True,
|
|
49
|
-
"severity_threshold": "minor",
|
|
50
|
-
},
|
|
51
|
-
"chatops": {
|
|
52
|
-
"enabled": False,
|
|
53
|
-
"poll_interval_seconds": 60,
|
|
54
|
-
"allow_users": [],
|
|
55
|
-
"allow_associations": [],
|
|
56
|
-
"ignore_bots": True,
|
|
57
|
-
},
|
|
58
|
-
}
|
|
59
|
-
REVIEW_MINOR_KEYWORDS = (
|
|
60
|
-
"nit",
|
|
61
|
-
"minor",
|
|
62
|
-
"optional",
|
|
63
|
-
"non-blocking",
|
|
64
|
-
"non blocking",
|
|
65
|
-
"suggestion",
|
|
66
|
-
)
|
|
67
|
-
REVIEW_MAJOR_KEYWORDS = (
|
|
68
|
-
"blocker",
|
|
69
|
-
"must",
|
|
70
|
-
"required",
|
|
71
|
-
"error",
|
|
72
|
-
"fail",
|
|
73
|
-
"security",
|
|
74
|
-
"bug",
|
|
75
|
-
)
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
class PrFlowError(Exception):
|
|
79
|
-
def __init__(self, message: str, *, status_code: int = 400):
|
|
80
|
-
super().__init__(message)
|
|
81
|
-
self.status_code = status_code
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
@dataclass
|
|
85
|
-
class PrFlowReviewSummary:
|
|
86
|
-
total: int
|
|
87
|
-
major: int
|
|
88
|
-
minor: int
|
|
89
|
-
resolved: int
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
def _merge_defaults(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
|
|
93
|
-
merged = json.loads(json.dumps(base))
|
|
94
|
-
for key, value in override.items():
|
|
95
|
-
if isinstance(value, dict) and isinstance(merged.get(key), dict):
|
|
96
|
-
merged[key] = _merge_defaults(merged[key], value)
|
|
97
|
-
else:
|
|
98
|
-
merged[key] = value
|
|
99
|
-
return merged
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
def _pr_flow_config(raw_config: dict[str, Any]) -> dict[str, Any]:
|
|
103
|
-
github_cfg = raw_config.get("github") if isinstance(raw_config, dict) else None
|
|
104
|
-
github_cfg = github_cfg if isinstance(github_cfg, dict) else {}
|
|
105
|
-
pr_flow = github_cfg.get("pr_flow")
|
|
106
|
-
pr_flow = pr_flow if isinstance(pr_flow, dict) else {}
|
|
107
|
-
return _merge_defaults(DEFAULT_PR_FLOW_CONFIG, pr_flow)
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def _workflow_root(repo_root: Path) -> Path:
|
|
111
|
-
return repo_root / ".codex-autorunner" / "pr_flow"
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def _default_state() -> dict[str, Any]:
|
|
115
|
-
return {
|
|
116
|
-
"version": PR_FLOW_VERSION,
|
|
117
|
-
"id": None,
|
|
118
|
-
"status": "idle",
|
|
119
|
-
"source": None,
|
|
120
|
-
"source_meta": None,
|
|
121
|
-
"mode": None,
|
|
122
|
-
"step": None,
|
|
123
|
-
"issue": None,
|
|
124
|
-
"pr": None,
|
|
125
|
-
"issue_number": None,
|
|
126
|
-
"issue_title": None,
|
|
127
|
-
"issue_url": None,
|
|
128
|
-
"pr_number": None,
|
|
129
|
-
"pr_url": None,
|
|
130
|
-
"base_branch": None,
|
|
131
|
-
"head_branch": None,
|
|
132
|
-
"worktree_repo_id": None,
|
|
133
|
-
"worktree_path": None,
|
|
134
|
-
"cycle": 0,
|
|
135
|
-
"max_cycles": None,
|
|
136
|
-
"stop_condition": None,
|
|
137
|
-
"draft": None,
|
|
138
|
-
"max_implementation_runs": None,
|
|
139
|
-
"max_wallclock_seconds": None,
|
|
140
|
-
"review_summary": None,
|
|
141
|
-
"review_bundle_path": None,
|
|
142
|
-
"review_bundle_json_path": None,
|
|
143
|
-
"review_snapshot_index": 0,
|
|
144
|
-
"review_last_seen_at": None,
|
|
145
|
-
"workflow_log_path": None,
|
|
146
|
-
"final_report_path": None,
|
|
147
|
-
"last_error": None,
|
|
148
|
-
"stop_requested": False,
|
|
149
|
-
"worker_id": None,
|
|
150
|
-
"worker_pid": None,
|
|
151
|
-
"worker_started_at": None,
|
|
152
|
-
"started_at": None,
|
|
153
|
-
"updated_at": None,
|
|
154
|
-
"finished_at": None,
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
def _slugify(value: str, *, max_len: int = 48) -> str:
|
|
159
|
-
raw = re.sub(r"[^a-zA-Z0-9._-]+", "-", (value or "").strip().lower()).strip("-")
|
|
160
|
-
if not raw:
|
|
161
|
-
return "work"
|
|
162
|
-
return raw[:max_len].strip("-") or "work"
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
def _classify_review_text(text: str) -> str:
|
|
166
|
-
lowered = (text or "").lower()
|
|
167
|
-
if any(word in lowered for word in REVIEW_MAJOR_KEYWORDS):
|
|
168
|
-
return "major"
|
|
169
|
-
if any(word in lowered for word in REVIEW_MINOR_KEYWORDS):
|
|
170
|
-
return "minor"
|
|
171
|
-
return "major"
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
def _normalize_stop_condition(value: Optional[str]) -> Optional[str]:
|
|
175
|
-
if not value:
|
|
176
|
-
return None
|
|
177
|
-
raw = value.strip().lower()
|
|
178
|
-
if raw in ("minor", "minor_only", "minor-only"):
|
|
179
|
-
return "minor_only"
|
|
180
|
-
if raw in ("clean", "no_issues", "no-issues"):
|
|
181
|
-
return "no_issues"
|
|
182
|
-
return raw
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
def _format_review_summary(summary: Optional[PrFlowReviewSummary]) -> Optional[dict]:
|
|
186
|
-
if summary is None:
|
|
187
|
-
return None
|
|
188
|
-
return {
|
|
189
|
-
"total": summary.total,
|
|
190
|
-
"major": summary.major,
|
|
191
|
-
"minor": summary.minor,
|
|
192
|
-
"resolved": summary.resolved,
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
def _safe_text(value: Any, limit: int = 400) -> str:
|
|
197
|
-
text = str(value or "").strip()
|
|
198
|
-
if len(text) <= limit:
|
|
199
|
-
return text
|
|
200
|
-
return text[: limit - 3] + "..."
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
def _normalize_review_snippet(value: Any, limit: int = 100) -> str:
|
|
204
|
-
text = str(value or "").strip()
|
|
205
|
-
text = re.sub(r"\s+", " ", text)
|
|
206
|
-
for marker in ("- ", "* ", "• ", "- ", "* ", "• "):
|
|
207
|
-
if text.startswith(marker):
|
|
208
|
-
text = text[len(marker) :]
|
|
209
|
-
break
|
|
210
|
-
text = text.strip()
|
|
211
|
-
if len(text) <= limit:
|
|
212
|
-
return text
|
|
213
|
-
return text[: limit - 3] + "..."
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
def _latest_review_timestamp(threads: list[dict[str, Any]]) -> Optional[str]:
|
|
217
|
-
latest: Optional[str] = None
|
|
218
|
-
for thread in threads:
|
|
219
|
-
if not isinstance(thread, dict):
|
|
220
|
-
continue
|
|
221
|
-
comments = thread.get("comments")
|
|
222
|
-
if not isinstance(comments, list):
|
|
223
|
-
continue
|
|
224
|
-
for comment in comments:
|
|
225
|
-
if not isinstance(comment, dict):
|
|
226
|
-
continue
|
|
227
|
-
created_at = comment.get("createdAt")
|
|
228
|
-
if not isinstance(created_at, str):
|
|
229
|
-
continue
|
|
230
|
-
if latest is None or created_at > latest:
|
|
231
|
-
latest = created_at
|
|
232
|
-
return latest
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
def _has_new_review_feedback(
|
|
236
|
-
threads: list[dict[str, Any]], since: Optional[str]
|
|
237
|
-
) -> bool:
|
|
238
|
-
if not since:
|
|
239
|
-
return bool(threads)
|
|
240
|
-
latest = _latest_review_timestamp(threads)
|
|
241
|
-
if not latest:
|
|
242
|
-
return False
|
|
243
|
-
return latest > since
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
class PrFlowManager:
|
|
247
|
-
def __init__(
|
|
248
|
-
self,
|
|
249
|
-
repo_root: Path,
|
|
250
|
-
*,
|
|
251
|
-
app_server_supervisor: Optional[WorkspaceAppServerSupervisor] = None,
|
|
252
|
-
opencode_supervisor: Optional[OpenCodeSupervisor] = None,
|
|
253
|
-
logger: Optional[logging.Logger] = None,
|
|
254
|
-
hub_root: Optional[Path] = None,
|
|
255
|
-
) -> None:
|
|
256
|
-
self.repo_root = repo_root
|
|
257
|
-
self._app_server_supervisor = app_server_supervisor
|
|
258
|
-
self._opencode_supervisor = opencode_supervisor
|
|
259
|
-
self._logger = logger or logging.getLogger("codex_autorunner.pr_flow")
|
|
260
|
-
self._hub_root = hub_root
|
|
261
|
-
self._state_path = _workflow_root(repo_root) / "state.json"
|
|
262
|
-
self._lock_path = repo_root / ".codex-autorunner" / "locks" / "pr_flow.lock"
|
|
263
|
-
self._events_path = _workflow_root(repo_root) / "events.jsonl"
|
|
264
|
-
self._thread: Optional[threading.Thread] = None
|
|
265
|
-
self._thread_lock = threading.Lock()
|
|
266
|
-
self._events_lock = threading.Lock()
|
|
267
|
-
self._stop_event = threading.Event()
|
|
268
|
-
self._lock_handle: Optional[FileLock] = None
|
|
269
|
-
self._config = _pr_flow_config(self._load_engine().config.raw)
|
|
270
|
-
|
|
271
|
-
def status(self) -> dict[str, Any]:
|
|
272
|
-
state = self._load_state()
|
|
273
|
-
lock_info = read_lock_info(self._lock_path)
|
|
274
|
-
lock_alive = bool(lock_info.pid and process_alive(lock_info.pid))
|
|
275
|
-
is_running = bool(self._thread and self._thread.is_alive()) or lock_alive
|
|
276
|
-
state["running"] = is_running
|
|
277
|
-
if state.get("status") in ("running", "stopping") and not is_running:
|
|
278
|
-
state["status"] = "stopped"
|
|
279
|
-
state["last_error"] = "Recovered from restart"
|
|
280
|
-
state["stop_requested"] = False
|
|
281
|
-
state["updated_at"] = now_iso()
|
|
282
|
-
self._save_state(state)
|
|
283
|
-
return state
|
|
284
|
-
|
|
285
|
-
def start(self, *, payload: dict[str, Any]) -> dict[str, Any]:
|
|
286
|
-
with self._thread_lock:
|
|
287
|
-
state = self.status()
|
|
288
|
-
if state.get("status") in ("running", "stopping"):
|
|
289
|
-
raise PrFlowError("PR flow already running", status_code=409)
|
|
290
|
-
if self._thread and self._thread.is_alive():
|
|
291
|
-
raise PrFlowError("PR flow already running", status_code=409)
|
|
292
|
-
if not self._config.get("enabled", True):
|
|
293
|
-
raise PrFlowError("PR flow disabled by config", status_code=409)
|
|
294
|
-
self._acquire_lock()
|
|
295
|
-
thread_started = False
|
|
296
|
-
try:
|
|
297
|
-
state = self._initialize_state(payload=payload)
|
|
298
|
-
self._stop_event.clear()
|
|
299
|
-
state["worker_id"] = uuid.uuid4().hex
|
|
300
|
-
state["worker_pid"] = os.getpid()
|
|
301
|
-
state["worker_started_at"] = now_iso()
|
|
302
|
-
self._save_state(state)
|
|
303
|
-
self._thread = threading.Thread(
|
|
304
|
-
target=self._run_flow, args=(state["id"],), daemon=True
|
|
305
|
-
)
|
|
306
|
-
self._thread.start()
|
|
307
|
-
thread_started = True
|
|
308
|
-
self._emit_event("start", state=state, message="PR flow started")
|
|
309
|
-
return state
|
|
310
|
-
finally:
|
|
311
|
-
if not thread_started:
|
|
312
|
-
self._release_lock()
|
|
313
|
-
|
|
314
|
-
def stop(self) -> dict[str, Any]:
|
|
315
|
-
self._stop_event.set()
|
|
316
|
-
state = self._load_state()
|
|
317
|
-
state["stop_requested"] = True
|
|
318
|
-
if state.get("status") in ("running", "stopping"):
|
|
319
|
-
state["status"] = "stopping"
|
|
320
|
-
state["updated_at"] = now_iso()
|
|
321
|
-
self._save_state(state)
|
|
322
|
-
self._emit_event("stop_requested", state=state)
|
|
323
|
-
return state
|
|
324
|
-
|
|
325
|
-
def resume(self) -> dict[str, Any]:
|
|
326
|
-
with self._thread_lock:
|
|
327
|
-
state = self._load_state()
|
|
328
|
-
if state.get("status") not in ("stopped", "failed", "idle"):
|
|
329
|
-
raise PrFlowError("PR flow cannot be resumed in the current state")
|
|
330
|
-
if self._thread and self._thread.is_alive():
|
|
331
|
-
raise PrFlowError("PR flow already running", status_code=409)
|
|
332
|
-
self._acquire_lock()
|
|
333
|
-
thread_started = False
|
|
334
|
-
try:
|
|
335
|
-
self._stop_event.clear()
|
|
336
|
-
state["status"] = "running"
|
|
337
|
-
state["stop_requested"] = False
|
|
338
|
-
state["updated_at"] = now_iso()
|
|
339
|
-
state["last_error"] = None
|
|
340
|
-
state["worker_id"] = uuid.uuid4().hex
|
|
341
|
-
state["worker_pid"] = os.getpid()
|
|
342
|
-
state["worker_started_at"] = now_iso()
|
|
343
|
-
self._save_state(state)
|
|
344
|
-
self._thread = threading.Thread(
|
|
345
|
-
target=self._run_flow, args=(state["id"],), daemon=True
|
|
346
|
-
)
|
|
347
|
-
self._thread.start()
|
|
348
|
-
thread_started = True
|
|
349
|
-
self._emit_event("resume", state=state, message="PR flow resumed")
|
|
350
|
-
return state
|
|
351
|
-
finally:
|
|
352
|
-
if not thread_started:
|
|
353
|
-
self._release_lock()
|
|
354
|
-
|
|
355
|
-
def collect_reviews(self) -> dict[str, Any]:
|
|
356
|
-
state = self._load_state()
|
|
357
|
-
if not state.get("worktree_path"):
|
|
358
|
-
raise PrFlowError("PR flow has no active worktree")
|
|
359
|
-
summary, bundle_path, _review_data = self._collect_reviews(state)
|
|
360
|
-
state["review_summary"] = _format_review_summary(summary)
|
|
361
|
-
state["review_bundle_path"] = bundle_path
|
|
362
|
-
state["updated_at"] = now_iso()
|
|
363
|
-
self._save_state(state)
|
|
364
|
-
return state
|
|
365
|
-
|
|
366
|
-
def chatops_config(self) -> dict[str, Any]:
|
|
367
|
-
return self._config.get("chatops", {})
|
|
368
|
-
|
|
369
|
-
def _load_engine(self, repo_root: Optional[Path] = None) -> Engine:
|
|
370
|
-
root = repo_root or self.repo_root
|
|
371
|
-
return Engine(root)
|
|
372
|
-
|
|
373
|
-
def _log_line(self, state: dict[str, Any], message: str) -> None:
|
|
374
|
-
workflow_dir = self._workflow_dir(state)
|
|
375
|
-
workflow_dir.mkdir(parents=True, exist_ok=True)
|
|
376
|
-
log_path = workflow_dir / "workflow.log"
|
|
377
|
-
line = f"[{now_iso()}] {message}\n"
|
|
378
|
-
try:
|
|
379
|
-
with log_path.open("a", encoding="utf-8") as handle:
|
|
380
|
-
handle.write(line)
|
|
381
|
-
except Exception:
|
|
382
|
-
return
|
|
383
|
-
state["workflow_log_path"] = log_path.as_posix()
|
|
384
|
-
state["updated_at"] = now_iso()
|
|
385
|
-
self._save_state(state)
|
|
386
|
-
self._emit_event("log", state=state, message=message)
|
|
387
|
-
|
|
388
|
-
def _load_state(self) -> dict[str, Any]:
|
|
389
|
-
state = read_json(self._state_path) or {}
|
|
390
|
-
if not isinstance(state, dict):
|
|
391
|
-
state = {}
|
|
392
|
-
base = _default_state()
|
|
393
|
-
base.update(state)
|
|
394
|
-
return base
|
|
395
|
-
|
|
396
|
-
def _save_state(self, state: dict[str, Any]) -> None:
|
|
397
|
-
self._state_path.parent.mkdir(parents=True, exist_ok=True)
|
|
398
|
-
atomic_write(self._state_path, json.dumps(state, indent=2) + "\n")
|
|
399
|
-
|
|
400
|
-
def _initialize_state(self, *, payload: dict[str, Any]) -> dict[str, Any]:
|
|
401
|
-
mode = str(payload.get("mode") or "issue").strip().lower()
|
|
402
|
-
issue = payload.get("issue")
|
|
403
|
-
pr = payload.get("pr")
|
|
404
|
-
if mode not in ("issue", "pr"):
|
|
405
|
-
raise PrFlowError("mode must be 'issue' or 'pr'")
|
|
406
|
-
if mode == "issue" and not issue:
|
|
407
|
-
raise PrFlowError("issue is required for issue mode")
|
|
408
|
-
if mode == "pr" and not pr:
|
|
409
|
-
raise PrFlowError("pr is required for pr mode")
|
|
410
|
-
workflow_id = uuid.uuid4().hex
|
|
411
|
-
state = _default_state()
|
|
412
|
-
state.update(
|
|
413
|
-
{
|
|
414
|
-
"id": workflow_id,
|
|
415
|
-
"status": "running",
|
|
416
|
-
"source": payload.get("source"),
|
|
417
|
-
"source_meta": payload.get("source_meta"),
|
|
418
|
-
"mode": mode,
|
|
419
|
-
"step": "preflight",
|
|
420
|
-
"issue": issue,
|
|
421
|
-
"pr": pr,
|
|
422
|
-
"draft": payload.get("draft"),
|
|
423
|
-
"base_branch": payload.get("base_branch"),
|
|
424
|
-
"stop_condition": _normalize_stop_condition(
|
|
425
|
-
payload.get("stop_condition")
|
|
426
|
-
),
|
|
427
|
-
"max_cycles": payload.get("max_cycles"),
|
|
428
|
-
"max_implementation_runs": payload.get("max_implementation_runs"),
|
|
429
|
-
"max_wallclock_seconds": payload.get("max_wallclock_seconds"),
|
|
430
|
-
"started_at": now_iso(),
|
|
431
|
-
"updated_at": now_iso(),
|
|
432
|
-
"finished_at": None,
|
|
433
|
-
"last_error": None,
|
|
434
|
-
"stop_requested": False,
|
|
435
|
-
}
|
|
436
|
-
)
|
|
437
|
-
state["workflow_log_path"] = (
|
|
438
|
-
_workflow_root(self.repo_root) / workflow_id / "workflow.log"
|
|
439
|
-
).as_posix()
|
|
440
|
-
state["final_report_path"] = (
|
|
441
|
-
_workflow_root(self.repo_root) / "final_report.md"
|
|
442
|
-
).as_posix()
|
|
443
|
-
self._save_state(state)
|
|
444
|
-
return state
|
|
445
|
-
|
|
446
|
-
def _workflow_dir(self, state: dict[str, Any]) -> Path:
|
|
447
|
-
workflow_id = state.get("id") or "current"
|
|
448
|
-
return _workflow_root(self.repo_root) / str(workflow_id)
|
|
449
|
-
|
|
450
|
-
def _acquire_lock(self) -> None:
|
|
451
|
-
self._lock_path.parent.mkdir(parents=True, exist_ok=True)
|
|
452
|
-
lock = FileLock(self._lock_path)
|
|
453
|
-
try:
|
|
454
|
-
lock.acquire(blocking=False)
|
|
455
|
-
except FileLockBusy as exc:
|
|
456
|
-
raise PrFlowError("PR flow lock already held", status_code=409) from exc
|
|
457
|
-
except FileLockError as exc:
|
|
458
|
-
raise PrFlowError(str(exc)) from exc
|
|
459
|
-
self._lock_handle = lock
|
|
460
|
-
try:
|
|
461
|
-
write_lock_info(
|
|
462
|
-
self._lock_path,
|
|
463
|
-
os.getpid(),
|
|
464
|
-
started_at=now_iso(),
|
|
465
|
-
lock_file=lock.file,
|
|
466
|
-
)
|
|
467
|
-
except Exception:
|
|
468
|
-
pass
|
|
469
|
-
|
|
470
|
-
def _release_lock(self) -> None:
|
|
471
|
-
if self._lock_handle is not None:
|
|
472
|
-
try:
|
|
473
|
-
self._lock_handle.release()
|
|
474
|
-
except Exception:
|
|
475
|
-
pass
|
|
476
|
-
self._lock_handle = None
|
|
477
|
-
try:
|
|
478
|
-
atomic_write(self._lock_path, "")
|
|
479
|
-
except Exception:
|
|
480
|
-
pass
|
|
481
|
-
|
|
482
|
-
def _should_stop(self) -> bool:
|
|
483
|
-
if self._stop_event.is_set():
|
|
484
|
-
return True
|
|
485
|
-
state = self._load_state()
|
|
486
|
-
return bool(state.get("stop_requested"))
|
|
487
|
-
|
|
488
|
-
def _emit_event(
|
|
489
|
-
self,
|
|
490
|
-
event: str,
|
|
491
|
-
*,
|
|
492
|
-
state: Optional[dict[str, Any]] = None,
|
|
493
|
-
level: str = "info",
|
|
494
|
-
message: Optional[str] = None,
|
|
495
|
-
payload: Optional[dict[str, Any]] = None,
|
|
496
|
-
) -> None:
|
|
497
|
-
data = {
|
|
498
|
-
"ts": now_iso(),
|
|
499
|
-
"event": event,
|
|
500
|
-
"level": level,
|
|
501
|
-
}
|
|
502
|
-
if message:
|
|
503
|
-
data["message"] = message
|
|
504
|
-
if state:
|
|
505
|
-
data.update(
|
|
506
|
-
{
|
|
507
|
-
"workflow_id": state.get("id"),
|
|
508
|
-
"status": state.get("status"),
|
|
509
|
-
"step": state.get("step"),
|
|
510
|
-
"cycle": state.get("cycle"),
|
|
511
|
-
}
|
|
512
|
-
)
|
|
513
|
-
if payload:
|
|
514
|
-
data["payload"] = payload
|
|
515
|
-
with self._events_lock:
|
|
516
|
-
try:
|
|
517
|
-
self._events_path.parent.mkdir(parents=True, exist_ok=True)
|
|
518
|
-
with self._events_path.open("a", encoding="utf-8") as handle:
|
|
519
|
-
handle.write(json.dumps(data) + "\n")
|
|
520
|
-
except Exception:
|
|
521
|
-
return
|
|
522
|
-
|
|
523
|
-
def events_path(self) -> Path:
|
|
524
|
-
return self._events_path
|
|
525
|
-
|
|
526
|
-
def _run_flow(self, workflow_id: str) -> None:
|
|
527
|
-
state = self._load_state()
|
|
528
|
-
if state.get("id") != workflow_id:
|
|
529
|
-
return
|
|
530
|
-
try:
|
|
531
|
-
self._log_line(state, "PR flow starting.")
|
|
532
|
-
self._emit_event("status", state=state, message="running")
|
|
533
|
-
self._execute_flow(state)
|
|
534
|
-
if state.get("status") == "running":
|
|
535
|
-
state["status"] = "completed"
|
|
536
|
-
state["finished_at"] = now_iso()
|
|
537
|
-
self._log_line(state, "PR flow completed.")
|
|
538
|
-
self._emit_event("status", state=state, message="completed")
|
|
539
|
-
except Exception as exc:
|
|
540
|
-
state["status"] = "failed"
|
|
541
|
-
state["last_error"] = str(exc)
|
|
542
|
-
state["finished_at"] = now_iso()
|
|
543
|
-
self._log_line(state, f"PR flow failed: {exc}")
|
|
544
|
-
self._emit_event("status", state=state, level="error", message="failed")
|
|
545
|
-
finally:
|
|
546
|
-
state["updated_at"] = now_iso()
|
|
547
|
-
if state.get("status") in ("completed", "failed", "stopped"):
|
|
548
|
-
state["stop_requested"] = False
|
|
549
|
-
self._save_state(state)
|
|
550
|
-
self._write_final_report(state)
|
|
551
|
-
self._release_lock()
|
|
552
|
-
|
|
553
|
-
def _execute_flow(self, state: dict[str, Any]) -> None:
|
|
554
|
-
steps = [
|
|
555
|
-
"preflight",
|
|
556
|
-
"resolve_base",
|
|
557
|
-
"link",
|
|
558
|
-
"create_worktree",
|
|
559
|
-
"spec",
|
|
560
|
-
"ingest",
|
|
561
|
-
"implement",
|
|
562
|
-
"sync_pr",
|
|
563
|
-
"review_loop",
|
|
564
|
-
]
|
|
565
|
-
start_step = state.get("step")
|
|
566
|
-
if start_step in steps:
|
|
567
|
-
start_index = steps.index(start_step)
|
|
568
|
-
else:
|
|
569
|
-
start_index = 0
|
|
570
|
-
for idx in range(start_index, len(steps)):
|
|
571
|
-
step = steps[idx]
|
|
572
|
-
state["step"] = step
|
|
573
|
-
state["status"] = "running"
|
|
574
|
-
state["updated_at"] = now_iso()
|
|
575
|
-
self._save_state(state)
|
|
576
|
-
self._emit_event("step", state=state, message=f"step:{step}")
|
|
577
|
-
if self._should_stop():
|
|
578
|
-
self._mark_stopped(state)
|
|
579
|
-
return
|
|
580
|
-
if step == "preflight":
|
|
581
|
-
self._preflight(state)
|
|
582
|
-
elif step == "resolve_base":
|
|
583
|
-
self._resolve_base(state)
|
|
584
|
-
elif step == "link":
|
|
585
|
-
self._link_issue_or_pr(state)
|
|
586
|
-
elif step == "create_worktree":
|
|
587
|
-
self._create_worktree(state)
|
|
588
|
-
elif step == "spec":
|
|
589
|
-
if state.get("mode") == "issue":
|
|
590
|
-
self._generate_spec(state)
|
|
591
|
-
elif step == "ingest":
|
|
592
|
-
if state.get("mode") == "issue":
|
|
593
|
-
self._ingest_spec(state)
|
|
594
|
-
elif step == "implement":
|
|
595
|
-
self._run_implementation(state)
|
|
596
|
-
elif step == "sync_pr":
|
|
597
|
-
self._sync_pr(state)
|
|
598
|
-
elif step == "review_loop":
|
|
599
|
-
self._review_loop(state)
|
|
600
|
-
if self._should_stop():
|
|
601
|
-
self._mark_stopped(state)
|
|
602
|
-
return
|
|
603
|
-
if state.get("status") == "running":
|
|
604
|
-
state["status"] = "completed"
|
|
605
|
-
state["finished_at"] = now_iso()
|
|
606
|
-
self._save_state(state)
|
|
607
|
-
|
|
608
|
-
def _mark_stopped(self, state: dict[str, Any]) -> None:
|
|
609
|
-
state["status"] = "stopped"
|
|
610
|
-
state["updated_at"] = now_iso()
|
|
611
|
-
state["finished_at"] = now_iso()
|
|
612
|
-
state["stop_requested"] = False
|
|
613
|
-
self._save_state(state)
|
|
614
|
-
self._emit_event("status", state=state, message="stopped")
|
|
615
|
-
|
|
616
|
-
def _preflight(self, state: dict[str, Any]) -> None:
|
|
617
|
-
engine = self._load_engine()
|
|
618
|
-
gh = GitHubService(engine.repo_root, raw_config=engine.config.raw)
|
|
619
|
-
if not gh.gh_available():
|
|
620
|
-
raise PrFlowError("GitHub CLI (gh) not available", status_code=500)
|
|
621
|
-
if not gh.gh_authenticated():
|
|
622
|
-
raise PrFlowError(
|
|
623
|
-
"GitHub CLI not authenticated (run `gh auth login`)",
|
|
624
|
-
status_code=401,
|
|
625
|
-
)
|
|
626
|
-
if engine.runner_pid():
|
|
627
|
-
raise PrFlowError("Autorunner is active; stop it before starting PR flow")
|
|
628
|
-
if state.get("mode") == "issue" and not gh.is_clean():
|
|
629
|
-
raise PrFlowError(
|
|
630
|
-
"Working tree has uncommitted changes; clean it before starting PR flow"
|
|
631
|
-
)
|
|
632
|
-
self._log_line(state, "Preflight ok.")
|
|
633
|
-
|
|
634
|
-
def _resolve_base(self, state: dict[str, Any]) -> None:
|
|
635
|
-
engine = self._load_engine()
|
|
636
|
-
gh = GitHubService(engine.repo_root, raw_config=engine.config.raw)
|
|
637
|
-
repo = gh.repo_info()
|
|
638
|
-
base_override = (state.get("base_branch") or "").strip()
|
|
639
|
-
if state.get("mode") == "pr" and not base_override:
|
|
640
|
-
return
|
|
641
|
-
base = base_override or repo.default_branch or "main"
|
|
642
|
-
state["base_branch"] = base
|
|
643
|
-
self._save_state(state)
|
|
644
|
-
self._log_line(state, f"Base branch resolved: {base}")
|
|
645
|
-
|
|
646
|
-
def _link_issue_or_pr(self, state: dict[str, Any]) -> None:
|
|
647
|
-
engine = self._load_engine()
|
|
648
|
-
gh = GitHubService(engine.repo_root, raw_config=engine.config.raw)
|
|
649
|
-
mode = state.get("mode")
|
|
650
|
-
if mode == "issue":
|
|
651
|
-
issue_ref = str(state.get("issue") or "")
|
|
652
|
-
link_state = gh.link_issue(issue_ref)
|
|
653
|
-
issue = link_state.get("issue") or {}
|
|
654
|
-
state["issue_number"] = issue.get("number")
|
|
655
|
-
state["issue_title"] = issue.get("title")
|
|
656
|
-
state["issue_url"] = issue.get("url")
|
|
657
|
-
state["updated_at"] = now_iso()
|
|
658
|
-
self._save_state(state)
|
|
659
|
-
return
|
|
660
|
-
if mode == "pr":
|
|
661
|
-
pr_ref = str(state.get("pr") or "")
|
|
662
|
-
pr_number, pr_url, head_ref, base_ref = self._resolve_pr_input(gh, pr_ref)
|
|
663
|
-
state["pr_number"] = pr_number
|
|
664
|
-
state["pr_url"] = pr_url
|
|
665
|
-
if head_ref:
|
|
666
|
-
state["head_branch"] = head_ref
|
|
667
|
-
if base_ref and not state.get("base_branch"):
|
|
668
|
-
state["base_branch"] = base_ref
|
|
669
|
-
state["updated_at"] = now_iso()
|
|
670
|
-
self._save_state(state)
|
|
671
|
-
|
|
672
|
-
def _resolve_pr_input(
|
|
673
|
-
self, gh: GitHubService, pr_ref: str
|
|
674
|
-
) -> tuple[int, Optional[str], Optional[str], Optional[str]]:
|
|
675
|
-
raw = (pr_ref or "").strip()
|
|
676
|
-
if raw.startswith("#"):
|
|
677
|
-
raw = raw[1:].strip()
|
|
678
|
-
if raw.isdigit():
|
|
679
|
-
number = int(raw)
|
|
680
|
-
else:
|
|
681
|
-
slug, number = parse_pr_input(raw)
|
|
682
|
-
repo = gh.repo_info()
|
|
683
|
-
if slug and slug.lower() != repo.name_with_owner.lower():
|
|
684
|
-
raise PrFlowError(
|
|
685
|
-
f"PR must be in this repo ({repo.name_with_owner}); got {slug}"
|
|
686
|
-
)
|
|
687
|
-
pr_obj = gh.pr_view(number=number)
|
|
688
|
-
return (
|
|
689
|
-
int(pr_obj.get("number") or number),
|
|
690
|
-
pr_obj.get("url"),
|
|
691
|
-
pr_obj.get("headRefName"),
|
|
692
|
-
pr_obj.get("baseRefName"),
|
|
693
|
-
)
|
|
694
|
-
|
|
695
|
-
def _resolve_base_repo(self, hub: HubSupervisor) -> ManifestRepo:
|
|
696
|
-
manifest = load_manifest(hub.hub_config.manifest_path, hub.hub_config.root)
|
|
697
|
-
target = self.repo_root.resolve()
|
|
698
|
-
for repo in manifest.repos:
|
|
699
|
-
repo_path = (hub.hub_config.root / repo.path).resolve()
|
|
700
|
-
if repo_path == target:
|
|
701
|
-
if repo.kind == "worktree" and repo.worktree_of:
|
|
702
|
-
base = manifest.get(repo.worktree_of)
|
|
703
|
-
if base:
|
|
704
|
-
return base
|
|
705
|
-
return repo
|
|
706
|
-
raise PrFlowError("Unable to resolve base repo for worktree creation")
|
|
707
|
-
|
|
708
|
-
def _ensure_hub(self) -> HubSupervisor:
|
|
709
|
-
if self._hub_root is not None:
|
|
710
|
-
return HubSupervisor.from_path(self._hub_root)
|
|
711
|
-
try:
|
|
712
|
-
return HubSupervisor.from_path(self.repo_root)
|
|
713
|
-
except (ConfigError, ValueError) as exc:
|
|
714
|
-
raise PrFlowError(
|
|
715
|
-
"Hub config not found; PR flow requires hub worktrees"
|
|
716
|
-
) from exc
|
|
717
|
-
|
|
718
|
-
def _create_worktree(self, state: dict[str, Any]) -> None:
|
|
719
|
-
if state.get("worktree_path"):
|
|
720
|
-
return
|
|
721
|
-
hub = self._ensure_hub()
|
|
722
|
-
base_repo = self._resolve_base_repo(hub)
|
|
723
|
-
base_repo_path = (hub.hub_config.root / base_repo.path).resolve()
|
|
724
|
-
mode = state.get("mode")
|
|
725
|
-
base_branch = state.get("base_branch") or "main"
|
|
726
|
-
branch = None
|
|
727
|
-
start_point = f"origin/{base_branch}"
|
|
728
|
-
if mode == "issue":
|
|
729
|
-
issue_number = int(state.get("issue_number") or 0)
|
|
730
|
-
slug = _slugify(state.get("issue_title") or "")
|
|
731
|
-
branch = f"car/issue-{issue_number}-{slug}"
|
|
732
|
-
elif mode == "pr":
|
|
733
|
-
branch = state.get("head_branch") or ""
|
|
734
|
-
if branch:
|
|
735
|
-
start_point = f"origin/{branch}"
|
|
736
|
-
else:
|
|
737
|
-
pr_number = int(state.get("pr_number") or 0)
|
|
738
|
-
branch = f"car/pr-{pr_number}-fix"
|
|
739
|
-
if not branch:
|
|
740
|
-
raise PrFlowError("Unable to determine branch name for worktree")
|
|
741
|
-
if mode == "pr" and state.get("pr_number"):
|
|
742
|
-
self._ensure_pr_head_available(
|
|
743
|
-
base_repo_path,
|
|
744
|
-
pr_number=int(state.get("pr_number") or 0),
|
|
745
|
-
branch=branch,
|
|
746
|
-
)
|
|
747
|
-
snapshot = hub.create_worktree(
|
|
748
|
-
base_repo_id=base_repo.id,
|
|
749
|
-
branch=branch,
|
|
750
|
-
force=False,
|
|
751
|
-
start_point=start_point,
|
|
752
|
-
)
|
|
753
|
-
state["worktree_repo_id"] = snapshot.id
|
|
754
|
-
state["worktree_path"] = snapshot.path.as_posix()
|
|
755
|
-
state["head_branch"] = branch
|
|
756
|
-
state["updated_at"] = now_iso()
|
|
757
|
-
self._save_state(state)
|
|
758
|
-
self._log_line(state, f"Worktree created: {snapshot.path}")
|
|
759
|
-
worktree_root = snapshot.path
|
|
760
|
-
if state.get("mode") == "issue" and state.get("issue"):
|
|
761
|
-
engine = self._load_engine(worktree_root)
|
|
762
|
-
gh = GitHubService(worktree_root, raw_config=engine.config.raw)
|
|
763
|
-
gh.link_issue(str(state.get("issue")))
|
|
764
|
-
|
|
765
|
-
def _ensure_pr_head_available(
|
|
766
|
-
self,
|
|
767
|
-
base_repo_path: Path,
|
|
768
|
-
*,
|
|
769
|
-
pr_number: int,
|
|
770
|
-
branch: str,
|
|
771
|
-
) -> None:
|
|
772
|
-
engine = self._load_engine(base_repo_path)
|
|
773
|
-
gh = GitHubService(base_repo_path, raw_config=engine.config.raw)
|
|
774
|
-
try:
|
|
775
|
-
gh.ensure_pr_head(number=int(pr_number), branch=branch, cwd=base_repo_path)
|
|
776
|
-
except Exception as exc:
|
|
777
|
-
raise PrFlowError(f"Unable to fetch PR head: {exc}") from exc
|
|
778
|
-
|
|
779
|
-
def _generate_spec(self, state: dict[str, Any]) -> None:
|
|
780
|
-
if self._app_server_supervisor is None:
|
|
781
|
-
raise PrFlowError("App-server backend is not configured")
|
|
782
|
-
worktree_root = self._require_worktree_root(state)
|
|
783
|
-
engine = self._load_engine(worktree_root)
|
|
784
|
-
gh = GitHubService(worktree_root, raw_config=engine.config.raw)
|
|
785
|
-
prompt, _link_state = gh.build_spec_prompt_from_issue(str(state.get("issue")))
|
|
786
|
-
doc_chat = DocChatService(
|
|
787
|
-
engine,
|
|
788
|
-
app_server_supervisor=self._app_server_supervisor,
|
|
789
|
-
app_server_events=None,
|
|
790
|
-
opencode_supervisor=self._opencode_supervisor,
|
|
791
|
-
)
|
|
792
|
-
|
|
793
|
-
async def _run() -> dict:
|
|
794
|
-
req = doc_chat.parse_request(
|
|
795
|
-
{"message": prompt, "stream": False}, kind="spec"
|
|
796
|
-
)
|
|
797
|
-
async with doc_chat.doc_lock():
|
|
798
|
-
return await doc_chat.execute(req)
|
|
799
|
-
|
|
800
|
-
result = asyncio.run(_run())
|
|
801
|
-
if result.get("status") != "ok":
|
|
802
|
-
detail = result.get("detail") or "SPEC generation failed"
|
|
803
|
-
raise PrFlowError(detail)
|
|
804
|
-
self._log_line(state, "SPEC generated from issue.")
|
|
805
|
-
|
|
806
|
-
def _ingest_spec(self, state: dict[str, Any]) -> None:
|
|
807
|
-
if self._app_server_supervisor is None:
|
|
808
|
-
raise PrFlowError("App-server backend is not configured")
|
|
809
|
-
worktree_root = self._require_worktree_root(state)
|
|
810
|
-
engine = self._load_engine(worktree_root)
|
|
811
|
-
ingest = SpecIngestService(
|
|
812
|
-
engine,
|
|
813
|
-
app_server_supervisor=self._app_server_supervisor,
|
|
814
|
-
opencode_supervisor=self._opencode_supervisor,
|
|
815
|
-
)
|
|
816
|
-
|
|
817
|
-
async def _run() -> dict:
|
|
818
|
-
await ingest.execute(force=True, spec_path=None, message=None)
|
|
819
|
-
return ingest.apply_patch()
|
|
820
|
-
|
|
821
|
-
try:
|
|
822
|
-
asyncio.run(_run())
|
|
823
|
-
except SpecIngestError as exc:
|
|
824
|
-
raise PrFlowError(str(exc)) from exc
|
|
825
|
-
self._log_line(state, "SPEC ingested into TODO/PROGRESS/OPINIONS.")
|
|
826
|
-
|
|
827
|
-
def _run_implementation(self, state: dict[str, Any]) -> None:
|
|
828
|
-
worktree_root = self._require_worktree_root(state)
|
|
829
|
-
engine = self._load_engine(worktree_root)
|
|
830
|
-
max_runs = state.get("max_implementation_runs")
|
|
831
|
-
if max_runs is None:
|
|
832
|
-
max_runs = self._config.get("max_implementation_runs")
|
|
833
|
-
try:
|
|
834
|
-
if max_runs is not None and int(max_runs) <= 0:
|
|
835
|
-
max_runs = None
|
|
836
|
-
except (TypeError, ValueError):
|
|
837
|
-
max_runs = None
|
|
838
|
-
max_wallclock = state.get("max_wallclock_seconds")
|
|
839
|
-
if max_wallclock is None:
|
|
840
|
-
max_wallclock = self._config.get("max_wallclock_seconds")
|
|
841
|
-
try:
|
|
842
|
-
engine.acquire_lock(force=False)
|
|
843
|
-
except LockError as exc:
|
|
844
|
-
raise PrFlowError(str(exc)) from exc
|
|
845
|
-
prev_wallclock = engine.config.runner_max_wallclock_seconds
|
|
846
|
-
if max_wallclock is not None:
|
|
847
|
-
engine.config.runner_max_wallclock_seconds = int(max_wallclock)
|
|
848
|
-
try:
|
|
849
|
-
engine.clear_stop_request()
|
|
850
|
-
engine.run_loop(
|
|
851
|
-
stop_after_runs=int(max_runs) if max_runs is not None else None,
|
|
852
|
-
external_stop_flag=self._stop_event,
|
|
853
|
-
)
|
|
854
|
-
finally:
|
|
855
|
-
engine.config.runner_max_wallclock_seconds = prev_wallclock
|
|
856
|
-
engine.release_lock()
|
|
857
|
-
self._log_line(state, "Implementation loop completed.")
|
|
858
|
-
|
|
859
|
-
def _sync_pr(self, state: dict[str, Any]) -> None:
|
|
860
|
-
worktree_root = self._require_worktree_root(state)
|
|
861
|
-
engine = self._load_engine(worktree_root)
|
|
862
|
-
gh = GitHubService(worktree_root, raw_config=engine.config.raw)
|
|
863
|
-
draft = state.get("draft")
|
|
864
|
-
if draft is None:
|
|
865
|
-
draft = bool(
|
|
866
|
-
(engine.config.raw.get("github") or {}).get("pr_draft_default", True)
|
|
867
|
-
)
|
|
868
|
-
result = gh.sync_pr(draft=bool(draft))
|
|
869
|
-
pr = result.get("pr") if isinstance(result, dict) else None
|
|
870
|
-
if isinstance(pr, dict):
|
|
871
|
-
state["pr_number"] = pr.get("number")
|
|
872
|
-
state["pr_url"] = pr.get("url")
|
|
873
|
-
state["updated_at"] = now_iso()
|
|
874
|
-
self._save_state(state)
|
|
875
|
-
self._log_line(state, "PR synced.")
|
|
876
|
-
|
|
877
|
-
def _review_loop(self, state: dict[str, Any]) -> None:
|
|
878
|
-
max_cycles = state.get("max_cycles")
|
|
879
|
-
if max_cycles is None:
|
|
880
|
-
max_cycles = self._config.get("max_cycles", 1)
|
|
881
|
-
try:
|
|
882
|
-
max_cycles = max(1, int(max_cycles))
|
|
883
|
-
except (TypeError, ValueError):
|
|
884
|
-
max_cycles = 1
|
|
885
|
-
stop_condition = _normalize_stop_condition(
|
|
886
|
-
state.get("stop_condition")
|
|
887
|
-
or self._config.get("stop_condition", "no_issues")
|
|
888
|
-
)
|
|
889
|
-
cycle = int(state.get("cycle") or 0)
|
|
890
|
-
while cycle < int(max_cycles):
|
|
891
|
-
if self._should_stop():
|
|
892
|
-
self._mark_stopped(state)
|
|
893
|
-
return
|
|
894
|
-
cycle += 1
|
|
895
|
-
state["cycle"] = cycle
|
|
896
|
-
state["updated_at"] = now_iso()
|
|
897
|
-
self._save_state(state)
|
|
898
|
-
summary, bundle_path, review_data = self._collect_reviews(state)
|
|
899
|
-
state["review_summary"] = _format_review_summary(summary)
|
|
900
|
-
state["review_bundle_path"] = bundle_path
|
|
901
|
-
state["updated_at"] = now_iso()
|
|
902
|
-
self._save_state(state)
|
|
903
|
-
self._emit_event(
|
|
904
|
-
"review_summary",
|
|
905
|
-
state=state,
|
|
906
|
-
payload=_format_review_summary(summary) or {},
|
|
907
|
-
)
|
|
908
|
-
if summary.total == 0:
|
|
909
|
-
self._log_line(state, "No review issues found.")
|
|
910
|
-
return
|
|
911
|
-
if stop_condition == "minor_only" and summary.major == 0:
|
|
912
|
-
self._log_line(state, "Only minor issues remain; stopping.")
|
|
913
|
-
return
|
|
914
|
-
if cycle >= int(max_cycles):
|
|
915
|
-
self._log_line(state, "Max review cycles reached.")
|
|
916
|
-
return
|
|
917
|
-
self._apply_review_to_todo(state, bundle_path, summary, review_data)
|
|
918
|
-
self._run_implementation(state)
|
|
919
|
-
self._sync_pr(state)
|
|
920
|
-
self._wait_for_review_feedback(state)
|
|
921
|
-
|
|
922
|
-
def _collect_reviews(
|
|
923
|
-
self, state: dict[str, Any]
|
|
924
|
-
) -> tuple[PrFlowReviewSummary, Optional[str], dict[str, Any]]:
|
|
925
|
-
worktree_root = self._require_worktree_root(state)
|
|
926
|
-
engine = self._load_engine(worktree_root)
|
|
927
|
-
gh = GitHubService(worktree_root, raw_config=engine.config.raw)
|
|
928
|
-
repo = gh.repo_info()
|
|
929
|
-
owner, repo_name = repo.name_with_owner.split("/", 1)
|
|
930
|
-
pr_number = state.get("pr_number")
|
|
931
|
-
if not pr_number:
|
|
932
|
-
raise PrFlowError("PR number not available for review collection")
|
|
933
|
-
threads = []
|
|
934
|
-
if self._config.get("review", {}).get("include_github", True):
|
|
935
|
-
threads = gh.pr_review_threads(
|
|
936
|
-
owner=owner, repo=repo_name, number=int(pr_number)
|
|
937
|
-
)
|
|
938
|
-
checks = []
|
|
939
|
-
if self._config.get("review", {}).get("include_checks", True):
|
|
940
|
-
checks = gh.pr_checks(number=int(pr_number))
|
|
941
|
-
codex_review = None
|
|
942
|
-
if self._config.get("review", {}).get("include_codex", True):
|
|
943
|
-
codex_review = self._run_codex_review(worktree_root, state)
|
|
944
|
-
summary, lines = self._format_review_bundle(
|
|
945
|
-
state, threads=threads, checks=checks, codex_review=codex_review
|
|
946
|
-
)
|
|
947
|
-
last_seen_at = _latest_review_timestamp(threads)
|
|
948
|
-
review_snapshot_index = int(state.get("review_snapshot_index") or 0) + 1
|
|
949
|
-
state["review_snapshot_index"] = review_snapshot_index
|
|
950
|
-
if last_seen_at:
|
|
951
|
-
state["review_last_seen_at"] = last_seen_at
|
|
952
|
-
state["updated_at"] = now_iso()
|
|
953
|
-
self._save_state(state)
|
|
954
|
-
workflow_dir = self._workflow_dir(state)
|
|
955
|
-
workflow_dir.mkdir(parents=True, exist_ok=True)
|
|
956
|
-
filename = f"review_bundle_snapshot_{review_snapshot_index}.md"
|
|
957
|
-
bundle_path = workflow_dir / filename
|
|
958
|
-
atomic_write(bundle_path, "\n".join(lines).rstrip() + "\n")
|
|
959
|
-
self._log_line(state, f"Review bundle written: {bundle_path}")
|
|
960
|
-
json_name = f"review_bundle_snapshot_{review_snapshot_index}.json"
|
|
961
|
-
bundle_json_path = workflow_dir / json_name
|
|
962
|
-
worktree_context_dir = worktree_root / ".codex-autorunner" / "contexts"
|
|
963
|
-
worktree_context_dir.mkdir(parents=True, exist_ok=True)
|
|
964
|
-
worktree_bundle_path = worktree_context_dir / f"pr_{filename}"
|
|
965
|
-
atomic_write(worktree_bundle_path, "\n".join(lines).rstrip() + "\n")
|
|
966
|
-
self._log_line(
|
|
967
|
-
state, f"Review bundle written to worktree: {worktree_bundle_path}"
|
|
968
|
-
)
|
|
969
|
-
review_data = {
|
|
970
|
-
"threads": threads,
|
|
971
|
-
"checks": checks,
|
|
972
|
-
"codex_review": codex_review,
|
|
973
|
-
"summary": _format_review_summary(summary),
|
|
974
|
-
}
|
|
975
|
-
try:
|
|
976
|
-
atomic_write(
|
|
977
|
-
bundle_json_path,
|
|
978
|
-
json.dumps(review_data, indent=2, sort_keys=True) + "\n",
|
|
979
|
-
)
|
|
980
|
-
state["review_bundle_json_path"] = bundle_json_path.as_posix()
|
|
981
|
-
self._save_state(state)
|
|
982
|
-
self._log_line(state, f"Review bundle JSON written: {bundle_json_path}")
|
|
983
|
-
except Exception:
|
|
984
|
-
pass
|
|
985
|
-
return summary, worktree_bundle_path.as_posix(), review_data
|
|
986
|
-
|
|
987
|
-
def _format_review_bundle(
|
|
988
|
-
self,
|
|
989
|
-
state: dict[str, Any],
|
|
990
|
-
*,
|
|
991
|
-
threads: list[dict[str, Any]],
|
|
992
|
-
checks: list[dict[str, Any]],
|
|
993
|
-
codex_review: Optional[str],
|
|
994
|
-
) -> tuple[PrFlowReviewSummary, list[str]]:
|
|
995
|
-
major = 0
|
|
996
|
-
minor = 0
|
|
997
|
-
resolved = 0
|
|
998
|
-
items: list[str] = []
|
|
999
|
-
if threads:
|
|
1000
|
-
items.append("## GitHub Review Threads")
|
|
1001
|
-
thread_idx = 0
|
|
1002
|
-
for thread in threads:
|
|
1003
|
-
if not isinstance(thread, dict):
|
|
1004
|
-
continue
|
|
1005
|
-
comments = thread.get("comments")
|
|
1006
|
-
if not isinstance(comments, list):
|
|
1007
|
-
continue
|
|
1008
|
-
thread_idx += 1
|
|
1009
|
-
status = "resolved" if thread.get("isResolved") else "unresolved"
|
|
1010
|
-
items.append(f"- Thread {thread_idx} ({status})")
|
|
1011
|
-
if status == "resolved":
|
|
1012
|
-
resolved += 1
|
|
1013
|
-
for comment in comments:
|
|
1014
|
-
if not isinstance(comment, dict):
|
|
1015
|
-
continue
|
|
1016
|
-
body = comment.get("body") or ""
|
|
1017
|
-
severity = _classify_review_text(body)
|
|
1018
|
-
if status != "resolved":
|
|
1019
|
-
if severity == "minor":
|
|
1020
|
-
minor += 1
|
|
1021
|
-
else:
|
|
1022
|
-
major += 1
|
|
1023
|
-
author = comment.get("author") or {}
|
|
1024
|
-
author_name = (
|
|
1025
|
-
author.get("login")
|
|
1026
|
-
if isinstance(author, dict)
|
|
1027
|
-
else str(author or "unknown")
|
|
1028
|
-
)
|
|
1029
|
-
location = comment.get("path") or "(unknown file)"
|
|
1030
|
-
line = comment.get("line")
|
|
1031
|
-
if isinstance(line, int):
|
|
1032
|
-
location = f"{location}:{line}"
|
|
1033
|
-
snippet = _safe_text(body, 200)
|
|
1034
|
-
items.append(
|
|
1035
|
-
f" - [{severity}] {location} by {author_name}: {snippet}"
|
|
1036
|
-
)
|
|
1037
|
-
items.append("")
|
|
1038
|
-
if checks:
|
|
1039
|
-
items.append("## CI Checks")
|
|
1040
|
-
for check in checks:
|
|
1041
|
-
name = check.get("name") or "check"
|
|
1042
|
-
status = check.get("status") or "unknown"
|
|
1043
|
-
conclusion = check.get("conclusion") or "unknown"
|
|
1044
|
-
line = f"- {name}: {status} ({conclusion})"
|
|
1045
|
-
url = check.get("details_url")
|
|
1046
|
-
if url:
|
|
1047
|
-
line = f"{line} - {url}"
|
|
1048
|
-
items.append(line)
|
|
1049
|
-
if conclusion in (
|
|
1050
|
-
"failure",
|
|
1051
|
-
"cancelled",
|
|
1052
|
-
"timed_out",
|
|
1053
|
-
"action_required",
|
|
1054
|
-
):
|
|
1055
|
-
major += 1
|
|
1056
|
-
items.append("")
|
|
1057
|
-
if codex_review:
|
|
1058
|
-
items.append("## Codex Review")
|
|
1059
|
-
for raw_line in codex_review.splitlines():
|
|
1060
|
-
text = raw_line.strip()
|
|
1061
|
-
if not text:
|
|
1062
|
-
continue
|
|
1063
|
-
severity = _classify_review_text(text)
|
|
1064
|
-
if severity == "minor":
|
|
1065
|
-
minor += 1
|
|
1066
|
-
else:
|
|
1067
|
-
major += 1
|
|
1068
|
-
items.append(f"- [{severity}] {text}")
|
|
1069
|
-
items.append("")
|
|
1070
|
-
total = major + minor
|
|
1071
|
-
summary = PrFlowReviewSummary(
|
|
1072
|
-
total=total, major=major, minor=minor, resolved=resolved
|
|
1073
|
-
)
|
|
1074
|
-
lines = [
|
|
1075
|
-
"# PR Flow Review Bundle",
|
|
1076
|
-
f"Workflow: {state.get('id')}",
|
|
1077
|
-
f"Cycle: {state.get('cycle')}",
|
|
1078
|
-
f"PR: {state.get('pr_url') or state.get('pr_number')}",
|
|
1079
|
-
"",
|
|
1080
|
-
"## Summary",
|
|
1081
|
-
f"- Total issues: {summary.total}",
|
|
1082
|
-
f"- Major: {summary.major}",
|
|
1083
|
-
f"- Minor: {summary.minor}",
|
|
1084
|
-
f"- Resolved threads: {summary.resolved}",
|
|
1085
|
-
"",
|
|
1086
|
-
]
|
|
1087
|
-
lines.extend(items)
|
|
1088
|
-
return summary, lines
|
|
1089
|
-
|
|
1090
|
-
def _apply_review_to_todo(
|
|
1091
|
-
self,
|
|
1092
|
-
state: dict[str, Any],
|
|
1093
|
-
bundle_path: Optional[str],
|
|
1094
|
-
summary: PrFlowReviewSummary,
|
|
1095
|
-
review_data: dict[str, Any],
|
|
1096
|
-
) -> None:
|
|
1097
|
-
worktree_root = self._require_worktree_root(state)
|
|
1098
|
-
engine = self._load_engine(worktree_root)
|
|
1099
|
-
todo_path = engine.config.doc_path("todo")
|
|
1100
|
-
existing = todo_path.read_text(encoding="utf-8") if todo_path.exists() else ""
|
|
1101
|
-
|
|
1102
|
-
severity_threshold = self._config.get("review", {}).get(
|
|
1103
|
-
"severity_threshold", "minor"
|
|
1104
|
-
)
|
|
1105
|
-
|
|
1106
|
-
items: list[str] = []
|
|
1107
|
-
|
|
1108
|
-
threads = review_data.get("threads", [])
|
|
1109
|
-
for thread in threads:
|
|
1110
|
-
if not isinstance(thread, dict):
|
|
1111
|
-
continue
|
|
1112
|
-
if thread.get("isResolved"):
|
|
1113
|
-
continue
|
|
1114
|
-
comments = thread.get("comments")
|
|
1115
|
-
if not isinstance(comments, list):
|
|
1116
|
-
continue
|
|
1117
|
-
for comment in comments:
|
|
1118
|
-
if not isinstance(comment, dict):
|
|
1119
|
-
continue
|
|
1120
|
-
body = comment.get("body") or ""
|
|
1121
|
-
severity = _classify_review_text(body)
|
|
1122
|
-
|
|
1123
|
-
if severity_threshold == "major" and severity == "minor":
|
|
1124
|
-
continue
|
|
1125
|
-
|
|
1126
|
-
author = comment.get("author") or {}
|
|
1127
|
-
author_name = (
|
|
1128
|
-
author.get("login")
|
|
1129
|
-
if isinstance(author, dict)
|
|
1130
|
-
else str(author or "unknown")
|
|
1131
|
-
)
|
|
1132
|
-
location = comment.get("path") or "(unknown file)"
|
|
1133
|
-
line = comment.get("line")
|
|
1134
|
-
if isinstance(line, int):
|
|
1135
|
-
location = f"{location}:{line}"
|
|
1136
|
-
snippet = _normalize_review_snippet(body, 100)
|
|
1137
|
-
items.append(
|
|
1138
|
-
f"- [ ] Address review: {location} {snippet} ({author_name})"
|
|
1139
|
-
)
|
|
1140
|
-
|
|
1141
|
-
checks = review_data.get("checks", [])
|
|
1142
|
-
for check in checks:
|
|
1143
|
-
if not isinstance(check, dict):
|
|
1144
|
-
continue
|
|
1145
|
-
name = check.get("name") or "check"
|
|
1146
|
-
conclusion = check.get("conclusion") or "unknown"
|
|
1147
|
-
|
|
1148
|
-
if conclusion not in (
|
|
1149
|
-
"failure",
|
|
1150
|
-
"cancelled",
|
|
1151
|
-
"timed_out",
|
|
1152
|
-
"action_required",
|
|
1153
|
-
):
|
|
1154
|
-
continue
|
|
1155
|
-
|
|
1156
|
-
severity = "major"
|
|
1157
|
-
if severity_threshold == "major" and severity == "minor":
|
|
1158
|
-
continue
|
|
1159
|
-
|
|
1160
|
-
details_url = check.get("details_url") or ""
|
|
1161
|
-
url_suffix = f" {details_url}" if details_url else ""
|
|
1162
|
-
items.append(f"- [ ] Fix failing check: {name} ({conclusion}){url_suffix}")
|
|
1163
|
-
|
|
1164
|
-
codex_review = review_data.get("codex_review")
|
|
1165
|
-
if codex_review:
|
|
1166
|
-
for raw_line in codex_review.splitlines():
|
|
1167
|
-
text = raw_line.strip()
|
|
1168
|
-
if not text:
|
|
1169
|
-
continue
|
|
1170
|
-
severity = _classify_review_text(text)
|
|
1171
|
-
|
|
1172
|
-
if severity_threshold == "major" and severity == "minor":
|
|
1173
|
-
continue
|
|
1174
|
-
|
|
1175
|
-
items.append(f"- [ ] Address Codex review: {text}")
|
|
1176
|
-
|
|
1177
|
-
header = f"## Review Feedback Cycle {state.get('cycle')}"
|
|
1178
|
-
note = f"- Summary: {summary.total} issues ({summary.major} major, {summary.minor} minor)"
|
|
1179
|
-
bundle_line = (
|
|
1180
|
-
f"- Review bundle: {bundle_path}"
|
|
1181
|
-
if bundle_path
|
|
1182
|
-
else "- Review bundle: (missing)"
|
|
1183
|
-
)
|
|
1184
|
-
lines = [header, note, bundle_line]
|
|
1185
|
-
if items:
|
|
1186
|
-
lines.extend(items)
|
|
1187
|
-
|
|
1188
|
-
block = "\n".join(lines) + "\n"
|
|
1189
|
-
new_text = f"{block}{existing}" if existing else block
|
|
1190
|
-
atomic_write(todo_path, new_text)
|
|
1191
|
-
self._log_line(state, "Appended review feedback to TODO.")
|
|
1192
|
-
|
|
1193
|
-
def _run_codex_review(
|
|
1194
|
-
self, worktree_root: Path, state: dict[str, Any]
|
|
1195
|
-
) -> Optional[str]:
|
|
1196
|
-
if self._app_server_supervisor is None:
|
|
1197
|
-
return None
|
|
1198
|
-
try:
|
|
1199
|
-
base_branch = state.get("base_branch") or "main"
|
|
1200
|
-
target = {"type": "baseBranch", "branch": base_branch}
|
|
1201
|
-
|
|
1202
|
-
async def _run() -> str:
|
|
1203
|
-
client = await self._app_server_supervisor.get_client(worktree_root)
|
|
1204
|
-
turn = await client.review_start(
|
|
1205
|
-
thread_id=uuid.uuid4().hex,
|
|
1206
|
-
target=target,
|
|
1207
|
-
delivery="inline",
|
|
1208
|
-
cwd=str(worktree_root),
|
|
1209
|
-
)
|
|
1210
|
-
result = await turn.wait()
|
|
1211
|
-
if not result.agent_messages:
|
|
1212
|
-
return ""
|
|
1213
|
-
return "\n\n".join(result.agent_messages).strip()
|
|
1214
|
-
|
|
1215
|
-
review_text = asyncio.run(_run())
|
|
1216
|
-
if review_text:
|
|
1217
|
-
self._log_line(state, "Codex review completed.")
|
|
1218
|
-
return review_text or None
|
|
1219
|
-
except Exception as exc:
|
|
1220
|
-
log_event(
|
|
1221
|
-
self._logger,
|
|
1222
|
-
logging.WARNING,
|
|
1223
|
-
"pr_flow.codex_review.failed",
|
|
1224
|
-
exc=exc,
|
|
1225
|
-
)
|
|
1226
|
-
return None
|
|
1227
|
-
|
|
1228
|
-
def _wait_for_review_feedback(self, state: dict[str, Any]) -> None:
|
|
1229
|
-
worktree_root = self._require_worktree_root(state)
|
|
1230
|
-
engine = self._load_engine(worktree_root)
|
|
1231
|
-
gh = GitHubService(worktree_root, raw_config=engine.config.raw)
|
|
1232
|
-
pr_number = state.get("pr_number")
|
|
1233
|
-
if not pr_number:
|
|
1234
|
-
return
|
|
1235
|
-
timeout_seconds = int(self._config.get("review_wait_seconds", 1800) or 0)
|
|
1236
|
-
poll_seconds = max(
|
|
1237
|
-
5, int(self._config.get("review_poll_interval_seconds", 60) or 60)
|
|
1238
|
-
)
|
|
1239
|
-
if timeout_seconds <= 0:
|
|
1240
|
-
return
|
|
1241
|
-
try:
|
|
1242
|
-
owner, repo_name = gh.repo_info().name_with_owner.split("/", 1)
|
|
1243
|
-
except Exception as exc:
|
|
1244
|
-
self._log_line(state, f"Review wait skipped: {exc}")
|
|
1245
|
-
return
|
|
1246
|
-
since = state.get("review_last_seen_at")
|
|
1247
|
-
start = time.monotonic()
|
|
1248
|
-
while time.monotonic() - start < timeout_seconds:
|
|
1249
|
-
if self._should_stop():
|
|
1250
|
-
self._mark_stopped(state)
|
|
1251
|
-
return
|
|
1252
|
-
try:
|
|
1253
|
-
threads = gh.pr_review_threads(
|
|
1254
|
-
owner=owner, repo=repo_name, number=int(pr_number)
|
|
1255
|
-
)
|
|
1256
|
-
except Exception as exc:
|
|
1257
|
-
self._log_line(state, f"Review wait error: {exc}")
|
|
1258
|
-
return
|
|
1259
|
-
if _has_new_review_feedback(threads, since):
|
|
1260
|
-
self._log_line(state, "New GitHub review feedback detected.")
|
|
1261
|
-
return
|
|
1262
|
-
time.sleep(poll_seconds)
|
|
1263
|
-
self._log_line(state, "Review wait timeout reached.")
|
|
1264
|
-
|
|
1265
|
-
def _write_final_report(self, state: dict[str, Any]) -> None:
|
|
1266
|
-
report_path = Path(state.get("final_report_path") or "")
|
|
1267
|
-
if not report_path:
|
|
1268
|
-
return
|
|
1269
|
-
try:
|
|
1270
|
-
report_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1271
|
-
lines = [
|
|
1272
|
-
"# PR Flow Final Report",
|
|
1273
|
-
"",
|
|
1274
|
-
f"- Workflow: {state.get('id')}",
|
|
1275
|
-
f"- Status: {state.get('status')}",
|
|
1276
|
-
f"- Mode: {state.get('mode')}",
|
|
1277
|
-
f"- Issue: {state.get('issue') or '–'}",
|
|
1278
|
-
f"- PR: {state.get('pr_url') or state.get('pr') or '–'}",
|
|
1279
|
-
f"- Started: {state.get('started_at') or '–'}",
|
|
1280
|
-
f"- Finished: {state.get('finished_at') or '–'}",
|
|
1281
|
-
"",
|
|
1282
|
-
"## Artifacts",
|
|
1283
|
-
f"- Workflow log: {state.get('workflow_log_path') or '–'}",
|
|
1284
|
-
f"- Review bundle: {state.get('review_bundle_path') or '–'}",
|
|
1285
|
-
f"- Review bundle JSON: {state.get('review_bundle_json_path') or '–'}",
|
|
1286
|
-
"",
|
|
1287
|
-
]
|
|
1288
|
-
summary = state.get("review_summary") or {}
|
|
1289
|
-
if isinstance(summary, dict) and summary:
|
|
1290
|
-
lines.extend(
|
|
1291
|
-
[
|
|
1292
|
-
"## Review Summary",
|
|
1293
|
-
f"- Total: {summary.get('total', 0)}",
|
|
1294
|
-
f"- Major: {summary.get('major', 0)}",
|
|
1295
|
-
f"- Minor: {summary.get('minor', 0)}",
|
|
1296
|
-
f"- Resolved: {summary.get('resolved', 0)}",
|
|
1297
|
-
"",
|
|
1298
|
-
]
|
|
1299
|
-
)
|
|
1300
|
-
if state.get("last_error"):
|
|
1301
|
-
lines.extend(["## Last Error", str(state.get("last_error")), ""])
|
|
1302
|
-
atomic_write(report_path, "\n".join(lines).rstrip() + "\n")
|
|
1303
|
-
except Exception:
|
|
1304
|
-
return
|
|
1305
|
-
|
|
1306
|
-
def _require_worktree_root(self, state: dict[str, Any]) -> Path:
|
|
1307
|
-
worktree_path = state.get("worktree_path")
|
|
1308
|
-
if not worktree_path:
|
|
1309
|
-
raise PrFlowError("Worktree not available")
|
|
1310
|
-
hub = self._ensure_hub()
|
|
1311
|
-
root = (hub.hub_config.root / worktree_path).resolve()
|
|
1312
|
-
if not root.exists():
|
|
1313
|
-
raise PrFlowError(f"Worktree path missing: {root}")
|
|
1314
|
-
return root
|