codex-autorunner 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_autorunner/__init__.py +3 -0
- codex_autorunner/bootstrap.py +151 -0
- codex_autorunner/cli.py +886 -0
- codex_autorunner/codex_cli.py +79 -0
- codex_autorunner/codex_runner.py +17 -0
- codex_autorunner/core/__init__.py +1 -0
- codex_autorunner/core/about_car.py +125 -0
- codex_autorunner/core/codex_runner.py +100 -0
- codex_autorunner/core/config.py +1465 -0
- codex_autorunner/core/doc_chat.py +547 -0
- codex_autorunner/core/docs.py +37 -0
- codex_autorunner/core/engine.py +720 -0
- codex_autorunner/core/git_utils.py +206 -0
- codex_autorunner/core/hub.py +756 -0
- codex_autorunner/core/injected_context.py +9 -0
- codex_autorunner/core/locks.py +57 -0
- codex_autorunner/core/logging_utils.py +158 -0
- codex_autorunner/core/notifications.py +465 -0
- codex_autorunner/core/optional_dependencies.py +41 -0
- codex_autorunner/core/prompt.py +107 -0
- codex_autorunner/core/prompts.py +275 -0
- codex_autorunner/core/request_context.py +21 -0
- codex_autorunner/core/runner_controller.py +116 -0
- codex_autorunner/core/runner_process.py +29 -0
- codex_autorunner/core/snapshot.py +576 -0
- codex_autorunner/core/state.py +156 -0
- codex_autorunner/core/update.py +567 -0
- codex_autorunner/core/update_runner.py +44 -0
- codex_autorunner/core/usage.py +1221 -0
- codex_autorunner/core/utils.py +108 -0
- codex_autorunner/discovery.py +102 -0
- codex_autorunner/housekeeping.py +423 -0
- codex_autorunner/integrations/__init__.py +1 -0
- codex_autorunner/integrations/app_server/__init__.py +6 -0
- codex_autorunner/integrations/app_server/client.py +1386 -0
- codex_autorunner/integrations/app_server/supervisor.py +206 -0
- codex_autorunner/integrations/github/__init__.py +10 -0
- codex_autorunner/integrations/github/service.py +889 -0
- codex_autorunner/integrations/telegram/__init__.py +1 -0
- codex_autorunner/integrations/telegram/adapter.py +1401 -0
- codex_autorunner/integrations/telegram/commands_registry.py +104 -0
- codex_autorunner/integrations/telegram/config.py +450 -0
- codex_autorunner/integrations/telegram/constants.py +154 -0
- codex_autorunner/integrations/telegram/dispatch.py +162 -0
- codex_autorunner/integrations/telegram/handlers/__init__.py +0 -0
- codex_autorunner/integrations/telegram/handlers/approvals.py +241 -0
- codex_autorunner/integrations/telegram/handlers/callbacks.py +72 -0
- codex_autorunner/integrations/telegram/handlers/commands.py +160 -0
- codex_autorunner/integrations/telegram/handlers/commands_runtime.py +5262 -0
- codex_autorunner/integrations/telegram/handlers/messages.py +477 -0
- codex_autorunner/integrations/telegram/handlers/selections.py +545 -0
- codex_autorunner/integrations/telegram/helpers.py +2084 -0
- codex_autorunner/integrations/telegram/notifications.py +164 -0
- codex_autorunner/integrations/telegram/outbox.py +174 -0
- codex_autorunner/integrations/telegram/rendering.py +102 -0
- codex_autorunner/integrations/telegram/retry.py +37 -0
- codex_autorunner/integrations/telegram/runtime.py +270 -0
- codex_autorunner/integrations/telegram/service.py +921 -0
- codex_autorunner/integrations/telegram/state.py +1223 -0
- codex_autorunner/integrations/telegram/transport.py +318 -0
- codex_autorunner/integrations/telegram/types.py +57 -0
- codex_autorunner/integrations/telegram/voice.py +413 -0
- codex_autorunner/manifest.py +150 -0
- codex_autorunner/routes/__init__.py +53 -0
- codex_autorunner/routes/base.py +470 -0
- codex_autorunner/routes/docs.py +275 -0
- codex_autorunner/routes/github.py +197 -0
- codex_autorunner/routes/repos.py +121 -0
- codex_autorunner/routes/sessions.py +137 -0
- codex_autorunner/routes/shared.py +137 -0
- codex_autorunner/routes/system.py +175 -0
- codex_autorunner/routes/terminal_images.py +107 -0
- codex_autorunner/routes/voice.py +128 -0
- codex_autorunner/server.py +23 -0
- codex_autorunner/spec_ingest.py +113 -0
- codex_autorunner/static/app.js +95 -0
- codex_autorunner/static/autoRefresh.js +209 -0
- codex_autorunner/static/bootstrap.js +105 -0
- codex_autorunner/static/bus.js +23 -0
- codex_autorunner/static/cache.js +52 -0
- codex_autorunner/static/constants.js +48 -0
- codex_autorunner/static/dashboard.js +795 -0
- codex_autorunner/static/docs.js +1514 -0
- codex_autorunner/static/env.js +99 -0
- codex_autorunner/static/github.js +168 -0
- codex_autorunner/static/hub.js +1511 -0
- codex_autorunner/static/index.html +622 -0
- codex_autorunner/static/loader.js +28 -0
- codex_autorunner/static/logs.js +690 -0
- codex_autorunner/static/mobileCompact.js +300 -0
- codex_autorunner/static/snapshot.js +116 -0
- codex_autorunner/static/state.js +87 -0
- codex_autorunner/static/styles.css +4966 -0
- codex_autorunner/static/tabs.js +50 -0
- codex_autorunner/static/terminal.js +21 -0
- codex_autorunner/static/terminalManager.js +3535 -0
- codex_autorunner/static/todoPreview.js +25 -0
- codex_autorunner/static/types.d.ts +8 -0
- codex_autorunner/static/utils.js +597 -0
- codex_autorunner/static/vendor/LICENSE.xterm +24 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-400-cyrillic-ext.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-400-cyrillic.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-400-greek.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-400-latin-ext.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-400-latin.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-400-vietnamese.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-500-cyrillic-ext.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-500-cyrillic.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-500-greek.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-500-latin-ext.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-500-latin.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-500-vietnamese.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-600-cyrillic-ext.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-600-cyrillic.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-600-greek.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-600-latin-ext.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-600-latin.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/JetBrainsMono-600-vietnamese.woff2 +0 -0
- codex_autorunner/static/vendor/fonts/jetbrains-mono/OFL.txt +93 -0
- codex_autorunner/static/vendor/xterm-addon-fit.js +2 -0
- codex_autorunner/static/vendor/xterm.css +209 -0
- codex_autorunner/static/vendor/xterm.js +2 -0
- codex_autorunner/static/voice.js +591 -0
- codex_autorunner/voice/__init__.py +39 -0
- codex_autorunner/voice/capture.py +349 -0
- codex_autorunner/voice/config.py +167 -0
- codex_autorunner/voice/provider.py +66 -0
- codex_autorunner/voice/providers/__init__.py +7 -0
- codex_autorunner/voice/providers/openai_whisper.py +345 -0
- codex_autorunner/voice/resolver.py +36 -0
- codex_autorunner/voice/service.py +210 -0
- codex_autorunner/web/__init__.py +1 -0
- codex_autorunner/web/app.py +1037 -0
- codex_autorunner/web/hub_jobs.py +181 -0
- codex_autorunner/web/middleware.py +552 -0
- codex_autorunner/web/pty_session.py +357 -0
- codex_autorunner/web/runner_manager.py +25 -0
- codex_autorunner/web/schemas.py +253 -0
- codex_autorunner/web/static_assets.py +430 -0
- codex_autorunner/web/terminal_sessions.py +78 -0
- codex_autorunner/workspace.py +16 -0
- codex_autorunner-0.1.0.dist-info/METADATA +240 -0
- codex_autorunner-0.1.0.dist-info/RECORD +147 -0
- codex_autorunner-0.1.0.dist-info/WHEEL +5 -0
- codex_autorunner-0.1.0.dist-info/entry_points.txt +3 -0
- codex_autorunner-0.1.0.dist-info/licenses/LICENSE +21 -0
- codex_autorunner-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,889 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import re
|
|
3
|
+
import subprocess
|
|
4
|
+
import time
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any, Optional, Tuple
|
|
8
|
+
|
|
9
|
+
from ...core.git_utils import (
|
|
10
|
+
git_branch,
|
|
11
|
+
git_is_clean,
|
|
12
|
+
)
|
|
13
|
+
from ...core.injected_context import wrap_injected_context
|
|
14
|
+
from ...core.prompts import build_github_issue_to_spec_prompt, build_sync_agent_prompt
|
|
15
|
+
from ...core.utils import (
|
|
16
|
+
atomic_write,
|
|
17
|
+
read_json,
|
|
18
|
+
resolve_executable,
|
|
19
|
+
subprocess_env,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class GitHubError(Exception):
|
|
24
|
+
def __init__(self, message: str, *, status_code: int = 400):
|
|
25
|
+
super().__init__(message)
|
|
26
|
+
self.status_code = status_code
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _now_ms() -> int:
|
|
30
|
+
return int(time.time() * 1000)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _json_dumps(obj: object) -> str:
|
|
34
|
+
return json.dumps(obj, indent=2, sort_keys=True) + "\n"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _run(
|
|
38
|
+
args: list[str],
|
|
39
|
+
*,
|
|
40
|
+
cwd: Path,
|
|
41
|
+
timeout_seconds: int = 30,
|
|
42
|
+
check: bool = True,
|
|
43
|
+
env: Optional[dict[str, str]] = None,
|
|
44
|
+
) -> subprocess.CompletedProcess[str]:
|
|
45
|
+
try:
|
|
46
|
+
proc = subprocess.run(
|
|
47
|
+
args,
|
|
48
|
+
cwd=str(cwd),
|
|
49
|
+
text=True,
|
|
50
|
+
capture_output=True,
|
|
51
|
+
timeout=timeout_seconds,
|
|
52
|
+
env=env or subprocess_env(),
|
|
53
|
+
check=False,
|
|
54
|
+
)
|
|
55
|
+
except FileNotFoundError as exc:
|
|
56
|
+
raise GitHubError(f"Missing binary: {args[0]}", status_code=500) from exc
|
|
57
|
+
except subprocess.TimeoutExpired as exc:
|
|
58
|
+
raise GitHubError(
|
|
59
|
+
f"Command timed out: {' '.join(args)}", status_code=504
|
|
60
|
+
) from exc
|
|
61
|
+
|
|
62
|
+
if check and proc.returncode != 0:
|
|
63
|
+
stderr = (proc.stderr or "").strip()
|
|
64
|
+
stdout = (proc.stdout or "").strip()
|
|
65
|
+
detail = stderr or stdout or f"exit {proc.returncode}"
|
|
66
|
+
raise GitHubError(
|
|
67
|
+
f"Command failed: {' '.join(args)}: {detail}", status_code=400
|
|
68
|
+
)
|
|
69
|
+
return proc
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _tail_lines(text: str, *, max_lines: int = 60, max_chars: int = 6000) -> str:
|
|
73
|
+
raw = (text or "").strip()
|
|
74
|
+
if not raw:
|
|
75
|
+
return ""
|
|
76
|
+
lines = raw.splitlines()
|
|
77
|
+
tail = "\n".join(lines[-max_lines:])
|
|
78
|
+
if len(tail) > max_chars:
|
|
79
|
+
return tail[-max_chars:]
|
|
80
|
+
return tail
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _sanitize_cmd(args: list[str]) -> str:
|
|
84
|
+
# Best-effort sanitization: redact obvious tokens if ever present.
|
|
85
|
+
redacted: list[str] = []
|
|
86
|
+
for a in args:
|
|
87
|
+
if any(
|
|
88
|
+
k in a.lower() for k in ("token", "apikey", "api_key", "password", "secret")
|
|
89
|
+
):
|
|
90
|
+
redacted.append("<redacted>")
|
|
91
|
+
else:
|
|
92
|
+
redacted.append(a)
|
|
93
|
+
return " ".join(redacted)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _get_nested(d: Any, *keys: str, default: Any = None) -> Any:
|
|
97
|
+
cur: Any = d
|
|
98
|
+
for k in keys:
|
|
99
|
+
if not isinstance(cur, dict):
|
|
100
|
+
return default
|
|
101
|
+
cur = cur.get(k)
|
|
102
|
+
return cur if cur is not None else default
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def _run_codex_sync_agent(
|
|
106
|
+
*,
|
|
107
|
+
repo_root: Path,
|
|
108
|
+
raw_config: dict,
|
|
109
|
+
prompt: str,
|
|
110
|
+
) -> None:
|
|
111
|
+
codex_cfg = raw_config.get("codex") if isinstance(raw_config, dict) else None
|
|
112
|
+
codex_cfg = codex_cfg if isinstance(codex_cfg, dict) else {}
|
|
113
|
+
binary = str(codex_cfg.get("binary") or "codex")
|
|
114
|
+
base_args_raw = codex_cfg.get("args")
|
|
115
|
+
base_args = base_args_raw if isinstance(base_args_raw, list) else []
|
|
116
|
+
|
|
117
|
+
# Strip any existing --model flags from base args to avoid ambiguity; this flow
|
|
118
|
+
# deliberately uses the configured "small" model (or no model when unset).
|
|
119
|
+
cleaned_args: list[str] = []
|
|
120
|
+
skip_next = False
|
|
121
|
+
for a in [str(x) for x in base_args]:
|
|
122
|
+
if skip_next:
|
|
123
|
+
skip_next = False
|
|
124
|
+
continue
|
|
125
|
+
if a == "--model":
|
|
126
|
+
skip_next = True
|
|
127
|
+
continue
|
|
128
|
+
cleaned_args.append(a)
|
|
129
|
+
|
|
130
|
+
# Use the "small" model for this use-case when configured; if unset/null, omit --model.
|
|
131
|
+
models = _get_nested(raw_config, "codex", "models", default=None)
|
|
132
|
+
if isinstance(models, dict) and "small" in models:
|
|
133
|
+
model_small = models.get("small")
|
|
134
|
+
else:
|
|
135
|
+
model_small = "gpt-5.1-codex-mini"
|
|
136
|
+
model_flag: list[str] = ["--model", str(model_small)] if model_small else []
|
|
137
|
+
|
|
138
|
+
cmd = [binary, *model_flag, *cleaned_args, prompt]
|
|
139
|
+
|
|
140
|
+
github_cfg = raw_config.get("github") if isinstance(raw_config, dict) else None
|
|
141
|
+
github_cfg = github_cfg if isinstance(github_cfg, dict) else {}
|
|
142
|
+
timeout_seconds = int(github_cfg.get("sync_agent_timeout_seconds", 1800))
|
|
143
|
+
|
|
144
|
+
try:
|
|
145
|
+
proc = subprocess.run(
|
|
146
|
+
cmd,
|
|
147
|
+
cwd=str(repo_root),
|
|
148
|
+
text=True,
|
|
149
|
+
capture_output=True,
|
|
150
|
+
timeout=timeout_seconds,
|
|
151
|
+
env=subprocess_env(),
|
|
152
|
+
check=False,
|
|
153
|
+
)
|
|
154
|
+
except FileNotFoundError as exc:
|
|
155
|
+
raise GitHubError(f"Missing binary: {binary}", status_code=500) from exc
|
|
156
|
+
except subprocess.TimeoutExpired as exc:
|
|
157
|
+
raise GitHubError(
|
|
158
|
+
f"Codex sync agent timed out after {timeout_seconds}s: {_sanitize_cmd(cmd[:-1])}",
|
|
159
|
+
status_code=504,
|
|
160
|
+
) from exc
|
|
161
|
+
|
|
162
|
+
if proc.returncode != 0:
|
|
163
|
+
stdout_tail = _tail_lines(proc.stdout or "")
|
|
164
|
+
stderr_tail = _tail_lines(proc.stderr or "")
|
|
165
|
+
detail = stderr_tail or stdout_tail or f"exit {proc.returncode}"
|
|
166
|
+
raise GitHubError(
|
|
167
|
+
"Codex sync agent failed.\n"
|
|
168
|
+
f"cmd: {_sanitize_cmd(cmd[:-1])}\n"
|
|
169
|
+
f"detail:\n{detail}",
|
|
170
|
+
status_code=400,
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
@dataclass
|
|
175
|
+
class RepoInfo:
|
|
176
|
+
name_with_owner: str
|
|
177
|
+
url: str
|
|
178
|
+
default_branch: Optional[str] = None
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _parse_repo_info(payload: dict) -> RepoInfo:
|
|
182
|
+
name = payload.get("nameWithOwner") or ""
|
|
183
|
+
url = payload.get("url") or ""
|
|
184
|
+
default_ref = payload.get("defaultBranchRef") or {}
|
|
185
|
+
default_branch = default_ref.get("name") if isinstance(default_ref, dict) else None
|
|
186
|
+
if not name or not url:
|
|
187
|
+
raise GitHubError("Unable to determine GitHub repo (missing nameWithOwner/url)")
|
|
188
|
+
return RepoInfo(
|
|
189
|
+
name_with_owner=str(name), url=str(url), default_branch=default_branch
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
ISSUE_URL_RE = re.compile(
|
|
194
|
+
r"^https?://github\.com/(?P<owner>[^/]+)/(?P<repo>[^/]+)/issues/(?P<num>\d+)(?:[/?#].*)?$"
|
|
195
|
+
)
|
|
196
|
+
PR_URL_RE = re.compile(
|
|
197
|
+
r"^https?://github\.com/(?P<owner>[^/]+)/(?P<repo>[^/]+)/pull/(?P<num>\d+)(?:[/?#].*)?$"
|
|
198
|
+
)
|
|
199
|
+
GITHUB_LINK_RE = re.compile(
|
|
200
|
+
r"https?://github\.com/[^/\s]+/[^/\s]+/(?:issues|pull)/\d+(?:[/?#][^\s]*)?"
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def parse_issue_input(issue: str) -> Tuple[Optional[str], int]:
|
|
205
|
+
"""
|
|
206
|
+
Returns (repo_slug_or_none, issue_number).
|
|
207
|
+
Accepts:
|
|
208
|
+
- "123"
|
|
209
|
+
- "https://github.com/org/repo/issues/123"
|
|
210
|
+
"""
|
|
211
|
+
raw = (issue or "").strip()
|
|
212
|
+
if not raw:
|
|
213
|
+
raise GitHubError("issue is required", status_code=400)
|
|
214
|
+
if raw.isdigit():
|
|
215
|
+
return None, int(raw)
|
|
216
|
+
m = ISSUE_URL_RE.match(raw)
|
|
217
|
+
if not m:
|
|
218
|
+
raise GitHubError(
|
|
219
|
+
"Invalid issue reference (expected issue number or GitHub issue URL)"
|
|
220
|
+
)
|
|
221
|
+
slug = f"{m.group('owner')}/{m.group('repo')}"
|
|
222
|
+
return slug, int(m.group("num"))
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def parse_github_url(url: str) -> Optional[tuple[str, str, int]]:
|
|
226
|
+
raw = (url or "").strip()
|
|
227
|
+
if not raw:
|
|
228
|
+
return None
|
|
229
|
+
m = ISSUE_URL_RE.match(raw)
|
|
230
|
+
if m:
|
|
231
|
+
slug = f"{m.group('owner')}/{m.group('repo')}"
|
|
232
|
+
return slug, "issue", int(m.group("num"))
|
|
233
|
+
m = PR_URL_RE.match(raw)
|
|
234
|
+
if m:
|
|
235
|
+
slug = f"{m.group('owner')}/{m.group('repo')}"
|
|
236
|
+
return slug, "pr", int(m.group("num"))
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def find_github_links(text: str) -> list[str]:
|
|
241
|
+
raw = text or ""
|
|
242
|
+
return [m.group(0) for m in GITHUB_LINK_RE.finditer(raw)]
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
class GitHubService:
|
|
246
|
+
def __init__(self, repo_root: Path, raw_config: Optional[dict] = None):
|
|
247
|
+
self.repo_root = repo_root
|
|
248
|
+
self.raw_config = raw_config or {}
|
|
249
|
+
self.github_path = repo_root / ".codex-autorunner" / "github.json"
|
|
250
|
+
self.gh_path, self.gh_override = self._load_gh_path()
|
|
251
|
+
|
|
252
|
+
def _load_gh_path(self) -> tuple[str, bool]:
|
|
253
|
+
cfg = self.raw_config if isinstance(self.raw_config, dict) else {}
|
|
254
|
+
github_cfg_raw = cfg.get("github")
|
|
255
|
+
github_cfg: dict[str, Any] = (
|
|
256
|
+
github_cfg_raw if isinstance(github_cfg_raw, dict) else {}
|
|
257
|
+
)
|
|
258
|
+
gh_path = github_cfg.get("gh_path")
|
|
259
|
+
override = str(gh_path).strip() if isinstance(gh_path, str) and gh_path else ""
|
|
260
|
+
return override or "gh", bool(override)
|
|
261
|
+
|
|
262
|
+
def _gh(
|
|
263
|
+
self,
|
|
264
|
+
args: list[str],
|
|
265
|
+
*,
|
|
266
|
+
cwd: Optional[Path] = None,
|
|
267
|
+
timeout_seconds: int = 30,
|
|
268
|
+
check: bool = True,
|
|
269
|
+
) -> subprocess.CompletedProcess[str]:
|
|
270
|
+
try:
|
|
271
|
+
return _run(
|
|
272
|
+
[self.gh_path] + args,
|
|
273
|
+
cwd=cwd or self.repo_root,
|
|
274
|
+
timeout_seconds=timeout_seconds,
|
|
275
|
+
check=check,
|
|
276
|
+
)
|
|
277
|
+
except GitHubError as exc:
|
|
278
|
+
if "Missing binary:" in str(exc):
|
|
279
|
+
raise GitHubError(
|
|
280
|
+
"GitHub CLI (gh) not available", status_code=500
|
|
281
|
+
) from exc
|
|
282
|
+
raise
|
|
283
|
+
|
|
284
|
+
# ── persistence ────────────────────────────────────────────────────────────
|
|
285
|
+
def read_link_state(self) -> dict:
|
|
286
|
+
return read_json(self.github_path) or {}
|
|
287
|
+
|
|
288
|
+
def write_link_state(self, data: dict) -> dict:
|
|
289
|
+
payload = dict(data)
|
|
290
|
+
payload.setdefault("updatedAtMs", _now_ms())
|
|
291
|
+
atomic_write(self.github_path, _json_dumps(payload))
|
|
292
|
+
return payload
|
|
293
|
+
|
|
294
|
+
# ── capability/status ──────────────────────────────────────────────────────
|
|
295
|
+
def gh_available(self) -> bool:
|
|
296
|
+
return resolve_executable(self.gh_path) is not None
|
|
297
|
+
|
|
298
|
+
def gh_authenticated(self) -> bool:
|
|
299
|
+
if not self.gh_available():
|
|
300
|
+
return False
|
|
301
|
+
proc = self._gh(["auth", "status"], check=False, timeout_seconds=10)
|
|
302
|
+
return proc.returncode == 0
|
|
303
|
+
|
|
304
|
+
def repo_info(self) -> RepoInfo:
|
|
305
|
+
proc = self._gh(
|
|
306
|
+
["repo", "view", "--json", "nameWithOwner,url,defaultBranchRef"],
|
|
307
|
+
timeout_seconds=15,
|
|
308
|
+
check=True,
|
|
309
|
+
)
|
|
310
|
+
try:
|
|
311
|
+
payload = json.loads(proc.stdout or "{}")
|
|
312
|
+
except json.JSONDecodeError as exc:
|
|
313
|
+
raise GitHubError(
|
|
314
|
+
"Unable to parse gh repo view output", status_code=500
|
|
315
|
+
) from exc
|
|
316
|
+
return _parse_repo_info(payload)
|
|
317
|
+
|
|
318
|
+
def current_branch(self, *, cwd: Optional[Path] = None) -> str:
|
|
319
|
+
branch = git_branch(cwd or self.repo_root)
|
|
320
|
+
return branch or "HEAD"
|
|
321
|
+
|
|
322
|
+
def is_clean(self, *, cwd: Optional[Path] = None) -> bool:
|
|
323
|
+
return git_is_clean(cwd or self.repo_root)
|
|
324
|
+
|
|
325
|
+
def pr_for_branch(
|
|
326
|
+
self, *, branch: str, cwd: Optional[Path] = None
|
|
327
|
+
) -> Optional[dict]:
|
|
328
|
+
cwd = cwd or self.repo_root
|
|
329
|
+
proc = self._gh(
|
|
330
|
+
[
|
|
331
|
+
"pr",
|
|
332
|
+
"view",
|
|
333
|
+
"--json",
|
|
334
|
+
"number,url,state,isDraft,title,headRefName,baseRefName",
|
|
335
|
+
],
|
|
336
|
+
cwd=cwd,
|
|
337
|
+
check=False,
|
|
338
|
+
timeout_seconds=15,
|
|
339
|
+
)
|
|
340
|
+
if proc.returncode == 0:
|
|
341
|
+
try:
|
|
342
|
+
return json.loads(proc.stdout or "{}") or None
|
|
343
|
+
except json.JSONDecodeError:
|
|
344
|
+
return None
|
|
345
|
+
proc2 = self._gh(
|
|
346
|
+
[
|
|
347
|
+
"pr",
|
|
348
|
+
"list",
|
|
349
|
+
"--head",
|
|
350
|
+
branch,
|
|
351
|
+
"--limit",
|
|
352
|
+
"1",
|
|
353
|
+
"--json",
|
|
354
|
+
"number,url,state,isDraft,title,headRefName,baseRefName",
|
|
355
|
+
],
|
|
356
|
+
cwd=cwd,
|
|
357
|
+
check=False,
|
|
358
|
+
timeout_seconds=15,
|
|
359
|
+
)
|
|
360
|
+
if proc2.returncode != 0:
|
|
361
|
+
return None
|
|
362
|
+
try:
|
|
363
|
+
arr = json.loads(proc2.stdout or "[]") or []
|
|
364
|
+
except json.JSONDecodeError:
|
|
365
|
+
return None
|
|
366
|
+
return arr[0] if arr else None
|
|
367
|
+
|
|
368
|
+
def issue_view(self, *, number: int, cwd: Optional[Path] = None) -> dict:
|
|
369
|
+
proc = self._gh(
|
|
370
|
+
[
|
|
371
|
+
"issue",
|
|
372
|
+
"view",
|
|
373
|
+
str(number),
|
|
374
|
+
"--json",
|
|
375
|
+
"number,url,title,body,state,author,labels,comments",
|
|
376
|
+
],
|
|
377
|
+
cwd=cwd or self.repo_root,
|
|
378
|
+
check=True,
|
|
379
|
+
timeout_seconds=20,
|
|
380
|
+
)
|
|
381
|
+
try:
|
|
382
|
+
payload = json.loads(proc.stdout or "{}")
|
|
383
|
+
except json.JSONDecodeError as exc:
|
|
384
|
+
raise GitHubError(
|
|
385
|
+
"Unable to parse gh issue view output", status_code=500
|
|
386
|
+
) from exc
|
|
387
|
+
return payload if isinstance(payload, dict) else {}
|
|
388
|
+
|
|
389
|
+
def validate_issue_same_repo(self, issue_ref: str) -> int:
|
|
390
|
+
repo = self.repo_info()
|
|
391
|
+
slug_from_input, num = parse_issue_input(issue_ref)
|
|
392
|
+
if slug_from_input and slug_from_input.lower() != repo.name_with_owner.lower():
|
|
393
|
+
raise GitHubError(
|
|
394
|
+
f"Issue must be in this repo ({repo.name_with_owner}); got {slug_from_input}",
|
|
395
|
+
status_code=400,
|
|
396
|
+
)
|
|
397
|
+
return num
|
|
398
|
+
|
|
399
|
+
def pr_view(self, *, number: int, cwd: Optional[Path] = None) -> dict:
|
|
400
|
+
proc = self._gh(
|
|
401
|
+
[
|
|
402
|
+
"pr",
|
|
403
|
+
"view",
|
|
404
|
+
str(number),
|
|
405
|
+
"--json",
|
|
406
|
+
"number,url,title,body,state,author,labels,files,additions,deletions,changedFiles",
|
|
407
|
+
],
|
|
408
|
+
cwd=cwd or self.repo_root,
|
|
409
|
+
check=True,
|
|
410
|
+
timeout_seconds=30,
|
|
411
|
+
)
|
|
412
|
+
try:
|
|
413
|
+
payload = json.loads(proc.stdout or "{}")
|
|
414
|
+
except json.JSONDecodeError as exc:
|
|
415
|
+
raise GitHubError(
|
|
416
|
+
"Unable to parse gh pr view output", status_code=500
|
|
417
|
+
) from exc
|
|
418
|
+
return payload if isinstance(payload, dict) else {}
|
|
419
|
+
|
|
420
|
+
def pr_review_threads(
|
|
421
|
+
self,
|
|
422
|
+
*,
|
|
423
|
+
owner: str,
|
|
424
|
+
repo: str,
|
|
425
|
+
number: int,
|
|
426
|
+
cwd: Optional[Path] = None,
|
|
427
|
+
) -> list[dict[str, Any]]:
|
|
428
|
+
query = (
|
|
429
|
+
"query($owner:String!,$repo:String!,$number:Int!){"
|
|
430
|
+
"repository(owner:$owner,name:$repo){"
|
|
431
|
+
"pullRequest(number:$number){"
|
|
432
|
+
"reviewThreads(first:50){"
|
|
433
|
+
"nodes{isResolved comments(first:20){nodes{author{login} body path line createdAt}}}"
|
|
434
|
+
"}"
|
|
435
|
+
"}"
|
|
436
|
+
"}"
|
|
437
|
+
"}"
|
|
438
|
+
)
|
|
439
|
+
proc = self._gh(
|
|
440
|
+
[
|
|
441
|
+
"api",
|
|
442
|
+
"graphql",
|
|
443
|
+
"-f",
|
|
444
|
+
f"query={query}",
|
|
445
|
+
"-F",
|
|
446
|
+
f"owner={owner}",
|
|
447
|
+
"-F",
|
|
448
|
+
f"repo={repo}",
|
|
449
|
+
"-F",
|
|
450
|
+
f"number={int(number)}",
|
|
451
|
+
],
|
|
452
|
+
cwd=cwd or self.repo_root,
|
|
453
|
+
check=False,
|
|
454
|
+
timeout_seconds=30,
|
|
455
|
+
)
|
|
456
|
+
if proc.returncode != 0:
|
|
457
|
+
return []
|
|
458
|
+
try:
|
|
459
|
+
payload = json.loads(proc.stdout or "{}")
|
|
460
|
+
except json.JSONDecodeError:
|
|
461
|
+
return []
|
|
462
|
+
nodes = _get_nested(
|
|
463
|
+
payload, "data", "repository", "pullRequest", "reviewThreads", "nodes"
|
|
464
|
+
)
|
|
465
|
+
if not isinstance(nodes, list):
|
|
466
|
+
return []
|
|
467
|
+
threads: list[dict[str, Any]] = []
|
|
468
|
+
for node in nodes:
|
|
469
|
+
if not isinstance(node, dict):
|
|
470
|
+
continue
|
|
471
|
+
comments_nodes = _get_nested(node, "comments", "nodes")
|
|
472
|
+
comments: list[dict[str, Any]] = []
|
|
473
|
+
if isinstance(comments_nodes, list):
|
|
474
|
+
for comment in comments_nodes:
|
|
475
|
+
if not isinstance(comment, dict):
|
|
476
|
+
continue
|
|
477
|
+
comments.append(
|
|
478
|
+
{
|
|
479
|
+
"author": comment.get("author"),
|
|
480
|
+
"body": comment.get("body"),
|
|
481
|
+
"path": comment.get("path"),
|
|
482
|
+
"line": comment.get("line"),
|
|
483
|
+
"createdAt": comment.get("createdAt"),
|
|
484
|
+
}
|
|
485
|
+
)
|
|
486
|
+
threads.append({"isResolved": node.get("isResolved"), "comments": comments})
|
|
487
|
+
return threads
|
|
488
|
+
|
|
489
|
+
def build_context_file_from_url(self, url: str) -> Optional[dict]:
|
|
490
|
+
parsed = parse_github_url(url)
|
|
491
|
+
if not parsed:
|
|
492
|
+
return None
|
|
493
|
+
if not self.gh_available():
|
|
494
|
+
return None
|
|
495
|
+
if not self.gh_authenticated():
|
|
496
|
+
return None
|
|
497
|
+
slug, kind, number = parsed
|
|
498
|
+
repo = self.repo_info()
|
|
499
|
+
if slug.lower() != repo.name_with_owner.lower():
|
|
500
|
+
return None
|
|
501
|
+
|
|
502
|
+
if kind == "issue":
|
|
503
|
+
issue_obj = self.issue_view(number=number)
|
|
504
|
+
lines = _format_issue_context(issue_obj, repo=repo.name_with_owner)
|
|
505
|
+
else:
|
|
506
|
+
pr_obj = self.pr_view(number=number)
|
|
507
|
+
owner, repo_name = repo.name_with_owner.split("/", 1)
|
|
508
|
+
review_threads = self.pr_review_threads(
|
|
509
|
+
owner=owner, repo=repo_name, number=number
|
|
510
|
+
)
|
|
511
|
+
lines = _format_pr_context(
|
|
512
|
+
pr_obj, repo=repo.name_with_owner, review_threads=review_threads
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
rel_dir = Path(".codex-autorunner") / "github_context"
|
|
516
|
+
abs_dir = self.repo_root / rel_dir
|
|
517
|
+
abs_dir.mkdir(parents=True, exist_ok=True)
|
|
518
|
+
filename = f"{kind}-{int(number)}.md"
|
|
519
|
+
rel_path = rel_dir / filename
|
|
520
|
+
abs_path = self.repo_root / rel_path
|
|
521
|
+
atomic_write(abs_path, "\n".join(lines).rstrip() + "\n")
|
|
522
|
+
|
|
523
|
+
hint = wrap_injected_context(
|
|
524
|
+
"Context: see "
|
|
525
|
+
f"{rel_path.as_posix()} "
|
|
526
|
+
"(gh available: true; use gh CLI for updates if asked)."
|
|
527
|
+
)
|
|
528
|
+
return {"path": rel_path.as_posix(), "hint": hint, "kind": kind}
|
|
529
|
+
|
|
530
|
+
# ── high-level operations ──────────────────────────────────────────────
|
|
531
|
+
def status_payload(self) -> dict:
|
|
532
|
+
link = self.read_link_state()
|
|
533
|
+
gh_ok = self.gh_available()
|
|
534
|
+
authed = self.gh_authenticated() if gh_ok else False
|
|
535
|
+
repo: Optional[RepoInfo] = None
|
|
536
|
+
if authed:
|
|
537
|
+
try:
|
|
538
|
+
repo = self.repo_info()
|
|
539
|
+
except Exception:
|
|
540
|
+
repo = None
|
|
541
|
+
branch = self.current_branch()
|
|
542
|
+
clean = self.is_clean()
|
|
543
|
+
is_worktree = (self.repo_root / ".git").is_file()
|
|
544
|
+
pr = None
|
|
545
|
+
if authed and branch != "HEAD":
|
|
546
|
+
pr = self.pr_for_branch(branch=branch) or None
|
|
547
|
+
payload = {
|
|
548
|
+
"gh": {"available": gh_ok, "authenticated": authed},
|
|
549
|
+
"repo": (
|
|
550
|
+
{
|
|
551
|
+
"nameWithOwner": repo.name_with_owner,
|
|
552
|
+
"url": repo.url,
|
|
553
|
+
"defaultBranch": repo.default_branch,
|
|
554
|
+
}
|
|
555
|
+
if repo
|
|
556
|
+
else None
|
|
557
|
+
),
|
|
558
|
+
"git": {"branch": branch, "clean": clean, "is_worktree": is_worktree},
|
|
559
|
+
"link": link or {},
|
|
560
|
+
"pr": pr,
|
|
561
|
+
}
|
|
562
|
+
if pr and pr.get("url"):
|
|
563
|
+
url = pr["url"]
|
|
564
|
+
payload["pr_links"] = {
|
|
565
|
+
"url": url,
|
|
566
|
+
"files": f"{url}/files",
|
|
567
|
+
"checks": f"{url}/checks",
|
|
568
|
+
}
|
|
569
|
+
return payload
|
|
570
|
+
|
|
571
|
+
def link_issue(self, issue_ref: str) -> dict:
|
|
572
|
+
state, _issue_obj = self._fetch_and_link_issue(issue_ref)
|
|
573
|
+
return state
|
|
574
|
+
|
|
575
|
+
def _fetch_and_link_issue(self, issue_ref: str) -> tuple[dict, dict]:
|
|
576
|
+
number = self.validate_issue_same_repo(issue_ref)
|
|
577
|
+
issue_obj = self.issue_view(number=number)
|
|
578
|
+
repo = self.repo_info()
|
|
579
|
+
state = self.read_link_state()
|
|
580
|
+
state["repo"] = {"nameWithOwner": repo.name_with_owner, "url": repo.url}
|
|
581
|
+
state["issue"] = {
|
|
582
|
+
"number": issue_obj.get("number"),
|
|
583
|
+
"url": issue_obj.get("url"),
|
|
584
|
+
"title": issue_obj.get("title"),
|
|
585
|
+
"state": issue_obj.get("state"),
|
|
586
|
+
}
|
|
587
|
+
state["updatedAtMs"] = _now_ms()
|
|
588
|
+
return self.write_link_state(state), issue_obj
|
|
589
|
+
|
|
590
|
+
def build_spec_prompt_from_issue(self, issue_ref: str) -> tuple[str, dict]:
|
|
591
|
+
"""
|
|
592
|
+
Fetch issue details, persist link state, and build the prompt used to
|
|
593
|
+
create/update SPEC based on the issue.
|
|
594
|
+
|
|
595
|
+
Returns (prompt, link_state).
|
|
596
|
+
"""
|
|
597
|
+
link_state, issue_obj = self._fetch_and_link_issue(issue_ref)
|
|
598
|
+
issue_num = ((link_state.get("issue") or {}) or {}).get("number")
|
|
599
|
+
issue_title = ((link_state.get("issue") or {}) or {}).get("title") or ""
|
|
600
|
+
body = (issue_obj.get("body") or "").strip()
|
|
601
|
+
prompt = build_github_issue_to_spec_prompt(
|
|
602
|
+
issue_num=int(issue_num or issue_obj.get("number") or 0),
|
|
603
|
+
issue_title=str(issue_title or ""),
|
|
604
|
+
issue_url=str(issue_obj.get("url") or ""),
|
|
605
|
+
issue_body=str(body or ""),
|
|
606
|
+
)
|
|
607
|
+
return prompt, link_state
|
|
608
|
+
|
|
609
|
+
def sync_pr(
|
|
610
|
+
self,
|
|
611
|
+
*,
|
|
612
|
+
draft: bool = True,
|
|
613
|
+
title: Optional[str] = None,
|
|
614
|
+
body: Optional[str] = None,
|
|
615
|
+
) -> dict:
|
|
616
|
+
if not self.gh_authenticated():
|
|
617
|
+
raise GitHubError(
|
|
618
|
+
"GitHub CLI not authenticated (run `gh auth login`)", status_code=401
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
repo = self.repo_info()
|
|
622
|
+
base = repo.default_branch or "main"
|
|
623
|
+
state = self.read_link_state() or {}
|
|
624
|
+
issue_num = ((state.get("issue") or {}) or {}).get("number")
|
|
625
|
+
head_branch = self.current_branch()
|
|
626
|
+
if head_branch == "HEAD":
|
|
627
|
+
raise GitHubError(
|
|
628
|
+
"Unable to determine current git branch (repo may have no commits). Create an initial commit and try again.",
|
|
629
|
+
status_code=409,
|
|
630
|
+
)
|
|
631
|
+
cwd = self.repo_root
|
|
632
|
+
meta = {"mode": "current"}
|
|
633
|
+
# Decide commit behavior
|
|
634
|
+
github_cfg = (
|
|
635
|
+
(self.raw_config.get("github") or {})
|
|
636
|
+
if isinstance(self.raw_config, dict)
|
|
637
|
+
else {}
|
|
638
|
+
)
|
|
639
|
+
commit_mode = str(github_cfg.get("sync_commit_mode", "auto")).lower()
|
|
640
|
+
if commit_mode not in ("none", "auto", "always"):
|
|
641
|
+
commit_mode = "auto"
|
|
642
|
+
|
|
643
|
+
dirty = not self.is_clean(cwd=cwd)
|
|
644
|
+
if commit_mode in ("always", "auto") and dirty:
|
|
645
|
+
# Commit/push is handled by the sync agent below.
|
|
646
|
+
pass
|
|
647
|
+
if commit_mode == "none" and dirty:
|
|
648
|
+
raise GitHubError(
|
|
649
|
+
"Uncommitted changes present; commit them before syncing PR.",
|
|
650
|
+
status_code=409,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
# Agentic sync (format/lint/test, commit if needed, push; resolve rebase conflicts if any)
|
|
654
|
+
prompt = build_sync_agent_prompt(
|
|
655
|
+
repo_root=str(self.repo_root), branch=head_branch, issue_num=issue_num
|
|
656
|
+
)
|
|
657
|
+
_run_codex_sync_agent(
|
|
658
|
+
repo_root=self.repo_root, raw_config=self.raw_config, prompt=prompt
|
|
659
|
+
)
|
|
660
|
+
|
|
661
|
+
# Find/create PR
|
|
662
|
+
pr = self.pr_for_branch(branch=head_branch, cwd=cwd)
|
|
663
|
+
if not pr:
|
|
664
|
+
args = ["pr", "create", "--base", base]
|
|
665
|
+
if draft:
|
|
666
|
+
args.append("--draft")
|
|
667
|
+
if title:
|
|
668
|
+
args += ["--title", title]
|
|
669
|
+
if body:
|
|
670
|
+
args += ["--body", body]
|
|
671
|
+
else:
|
|
672
|
+
args.append("--fill")
|
|
673
|
+
proc = self._gh(args, cwd=cwd, check=True, timeout_seconds=60)
|
|
674
|
+
# gh pr create returns URL on stdout typically
|
|
675
|
+
url = (
|
|
676
|
+
(proc.stdout or "").strip().splitlines()[-1].strip()
|
|
677
|
+
if proc.stdout
|
|
678
|
+
else ""
|
|
679
|
+
)
|
|
680
|
+
pr = {
|
|
681
|
+
"url": url,
|
|
682
|
+
"state": "OPEN",
|
|
683
|
+
"isDraft": bool(draft),
|
|
684
|
+
"headRefName": head_branch,
|
|
685
|
+
"baseRefName": base,
|
|
686
|
+
}
|
|
687
|
+
pr_url = pr.get("url") if isinstance(pr, dict) else None
|
|
688
|
+
|
|
689
|
+
state["repo"] = {"nameWithOwner": repo.name_with_owner, "url": repo.url}
|
|
690
|
+
state["baseBranch"] = base
|
|
691
|
+
state["headBranch"] = head_branch
|
|
692
|
+
if pr_url:
|
|
693
|
+
state["pr"] = {
|
|
694
|
+
"number": pr.get("number"),
|
|
695
|
+
"url": pr_url,
|
|
696
|
+
"state": pr.get("state"),
|
|
697
|
+
"isDraft": pr.get("isDraft"),
|
|
698
|
+
"title": pr.get("title"),
|
|
699
|
+
"headRefName": pr.get("headRefName") or head_branch,
|
|
700
|
+
"baseRefName": pr.get("baseRefName") or base,
|
|
701
|
+
}
|
|
702
|
+
state["updatedAtMs"] = _now_ms()
|
|
703
|
+
self.write_link_state(state)
|
|
704
|
+
|
|
705
|
+
out = {
|
|
706
|
+
"status": "ok",
|
|
707
|
+
"repo": repo.name_with_owner,
|
|
708
|
+
"mode": "current",
|
|
709
|
+
"meta": meta,
|
|
710
|
+
"pr": pr,
|
|
711
|
+
}
|
|
712
|
+
if pr_url:
|
|
713
|
+
out["links"] = {
|
|
714
|
+
"url": pr_url,
|
|
715
|
+
"files": f"{pr_url}/files",
|
|
716
|
+
"checks": f"{pr_url}/checks",
|
|
717
|
+
}
|
|
718
|
+
return out
|
|
719
|
+
|
|
720
|
+
|
|
721
|
+
def _safe_text(value: Any, *, max_chars: int = 8000) -> str:
|
|
722
|
+
text = str(value or "").strip()
|
|
723
|
+
if len(text) <= max_chars:
|
|
724
|
+
return text
|
|
725
|
+
return text[: max_chars - 3] + "..."
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
def _format_labels(labels: Any) -> str:
|
|
729
|
+
if not isinstance(labels, list):
|
|
730
|
+
return "none"
|
|
731
|
+
names = []
|
|
732
|
+
for label in labels:
|
|
733
|
+
if isinstance(label, dict):
|
|
734
|
+
name = label.get("name")
|
|
735
|
+
else:
|
|
736
|
+
name = label
|
|
737
|
+
if name:
|
|
738
|
+
names.append(str(name))
|
|
739
|
+
return ", ".join(names) if names else "none"
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+
def _format_author(author: Any) -> str:
|
|
743
|
+
if isinstance(author, dict):
|
|
744
|
+
return str(author.get("login") or author.get("name") or "unknown")
|
|
745
|
+
return str(author or "unknown")
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
def _format_issue_context(issue: dict, *, repo: str) -> list[str]:
|
|
749
|
+
number = issue.get("number") or ""
|
|
750
|
+
title = issue.get("title") or ""
|
|
751
|
+
url = issue.get("url") or ""
|
|
752
|
+
state = issue.get("state") or ""
|
|
753
|
+
body = _safe_text(issue.get("body") or "")
|
|
754
|
+
labels = _format_labels(issue.get("labels"))
|
|
755
|
+
author = _format_author(issue.get("author"))
|
|
756
|
+
comments = issue.get("comments")
|
|
757
|
+
comment_count = 0
|
|
758
|
+
if isinstance(comments, dict):
|
|
759
|
+
total = comments.get("totalCount")
|
|
760
|
+
if isinstance(total, int):
|
|
761
|
+
comment_count = total
|
|
762
|
+
else:
|
|
763
|
+
nodes = comments.get("nodes")
|
|
764
|
+
edges = comments.get("edges")
|
|
765
|
+
if isinstance(nodes, list):
|
|
766
|
+
comment_count = len(nodes)
|
|
767
|
+
elif isinstance(edges, list):
|
|
768
|
+
comment_count = len(edges)
|
|
769
|
+
elif isinstance(comments, list):
|
|
770
|
+
comment_count = len(comments)
|
|
771
|
+
|
|
772
|
+
lines = [
|
|
773
|
+
"# GitHub Issue Context",
|
|
774
|
+
f"Repo: {repo}",
|
|
775
|
+
f"Issue: #{number} {title}".strip(),
|
|
776
|
+
f"URL: {url}",
|
|
777
|
+
f"State: {state}",
|
|
778
|
+
f"Author: {author}",
|
|
779
|
+
f"Labels: {labels}",
|
|
780
|
+
f"Comments: {comment_count}",
|
|
781
|
+
"",
|
|
782
|
+
"Body:",
|
|
783
|
+
body or "(no body)",
|
|
784
|
+
]
|
|
785
|
+
return lines
|
|
786
|
+
|
|
787
|
+
|
|
788
|
+
def _format_review_location(path: Any, line: Any) -> str:
|
|
789
|
+
path_val = str(path).strip() if path else ""
|
|
790
|
+
if path_val and isinstance(line, int):
|
|
791
|
+
return f"{path_val}:{line}"
|
|
792
|
+
if path_val:
|
|
793
|
+
return path_val
|
|
794
|
+
if isinstance(line, int):
|
|
795
|
+
return f"(unknown file):{line}"
|
|
796
|
+
return "(unknown file)"
|
|
797
|
+
|
|
798
|
+
|
|
799
|
+
def _format_review_threads(review_threads: list[dict[str, Any]]) -> list[str]:
|
|
800
|
+
lines: list[str] = []
|
|
801
|
+
thread_index = 0
|
|
802
|
+
for thread in review_threads:
|
|
803
|
+
if not isinstance(thread, dict):
|
|
804
|
+
continue
|
|
805
|
+
comments = thread.get("comments")
|
|
806
|
+
if not isinstance(comments, list) or not comments:
|
|
807
|
+
continue
|
|
808
|
+
thread_index += 1
|
|
809
|
+
status = "resolved" if thread.get("isResolved") else "unresolved"
|
|
810
|
+
lines.append(f"- Thread {thread_index} ({status})")
|
|
811
|
+
for comment in comments:
|
|
812
|
+
if not isinstance(comment, dict):
|
|
813
|
+
continue
|
|
814
|
+
author = _format_author(comment.get("author"))
|
|
815
|
+
created_at = comment.get("createdAt") or ""
|
|
816
|
+
location = _format_review_location(comment.get("path"), comment.get("line"))
|
|
817
|
+
header = f" - {location} {author}".strip()
|
|
818
|
+
if created_at:
|
|
819
|
+
header = f"{header} ({created_at})"
|
|
820
|
+
lines.append(header)
|
|
821
|
+
body = _safe_text(comment.get("body") or "")
|
|
822
|
+
if not body:
|
|
823
|
+
lines.append(" (no body)")
|
|
824
|
+
else:
|
|
825
|
+
for line in body.splitlines():
|
|
826
|
+
lines.append(f" {line}")
|
|
827
|
+
return lines
|
|
828
|
+
|
|
829
|
+
|
|
830
|
+
def _format_pr_context(
|
|
831
|
+
pr: dict, *, repo: str, review_threads: Optional[list[dict[str, Any]]] = None
|
|
832
|
+
) -> list[str]:
|
|
833
|
+
number = pr.get("number") or ""
|
|
834
|
+
title = pr.get("title") or ""
|
|
835
|
+
url = pr.get("url") or ""
|
|
836
|
+
state = pr.get("state") or ""
|
|
837
|
+
body = _safe_text(pr.get("body") or "")
|
|
838
|
+
labels = _format_labels(pr.get("labels"))
|
|
839
|
+
author = _format_author(pr.get("author"))
|
|
840
|
+
additions = pr.get("additions") or 0
|
|
841
|
+
deletions = pr.get("deletions") or 0
|
|
842
|
+
changed_files = pr.get("changedFiles") or 0
|
|
843
|
+
files_raw = pr.get("files")
|
|
844
|
+
files = (
|
|
845
|
+
[entry for entry in files_raw if isinstance(entry, dict)]
|
|
846
|
+
if isinstance(files_raw, list)
|
|
847
|
+
else []
|
|
848
|
+
)
|
|
849
|
+
file_lines = []
|
|
850
|
+
for entry in files[:200]:
|
|
851
|
+
if not isinstance(entry, dict):
|
|
852
|
+
continue
|
|
853
|
+
path = entry.get("path") or entry.get("name") or ""
|
|
854
|
+
if not path:
|
|
855
|
+
continue
|
|
856
|
+
add = entry.get("additions")
|
|
857
|
+
dele = entry.get("deletions")
|
|
858
|
+
if isinstance(add, int) and isinstance(dele, int):
|
|
859
|
+
file_lines.append(f"- {path} (+{add}/-{dele})")
|
|
860
|
+
else:
|
|
861
|
+
file_lines.append(f"- {path}")
|
|
862
|
+
if len(files) > 200:
|
|
863
|
+
file_lines.append(f"... ({len(files) - 200} more)")
|
|
864
|
+
|
|
865
|
+
lines = [
|
|
866
|
+
"# GitHub PR Context",
|
|
867
|
+
f"Repo: {repo}",
|
|
868
|
+
f"PR: #{number} {title}".strip(),
|
|
869
|
+
f"URL: {url}",
|
|
870
|
+
f"State: {state}",
|
|
871
|
+
f"Author: {author}",
|
|
872
|
+
f"Labels: {labels}",
|
|
873
|
+
f"Stats: +{additions} -{deletions}; changed files: {changed_files}",
|
|
874
|
+
"",
|
|
875
|
+
"Body:",
|
|
876
|
+
body or "(no body)",
|
|
877
|
+
"",
|
|
878
|
+
"Files:",
|
|
879
|
+
]
|
|
880
|
+
lines.extend(file_lines or ["(no files)"])
|
|
881
|
+
review_lines = (
|
|
882
|
+
_format_review_threads(review_threads)
|
|
883
|
+
if isinstance(review_threads, list)
|
|
884
|
+
else []
|
|
885
|
+
)
|
|
886
|
+
if review_lines:
|
|
887
|
+
lines.extend(["", "Review Threads:"])
|
|
888
|
+
lines.extend(review_lines)
|
|
889
|
+
return lines
|