ai-push-hooks 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.ai-push-hooks.toml +73 -0
- package/LICENSE +21 -0
- package/README.md +234 -0
- package/bin/ai-push-hooks.js +35 -0
- package/package.json +24 -0
- package/pyproject.toml +38 -0
- package/run.sh +29 -0
- package/src/ai_push_hooks/__init__.py +6 -0
- package/src/ai_push_hooks/__main__.py +3 -0
- package/src/ai_push_hooks/artifacts.py +86 -0
- package/src/ai_push_hooks/cli.py +49 -0
- package/src/ai_push_hooks/config.py +356 -0
- package/src/ai_push_hooks/engine.py +172 -0
- package/src/ai_push_hooks/executors/__init__.py +1 -0
- package/src/ai_push_hooks/executors/apply.py +55 -0
- package/src/ai_push_hooks/executors/assertions.py +44 -0
- package/src/ai_push_hooks/executors/exec.py +413 -0
- package/src/ai_push_hooks/executors/llm.py +308 -0
- package/src/ai_push_hooks/hook.py +130 -0
- package/src/ai_push_hooks/modules/__init__.py +11 -0
- package/src/ai_push_hooks/modules/beads.py +46 -0
- package/src/ai_push_hooks/modules/docs.py +159 -0
- package/src/ai_push_hooks/modules/pr.py +73 -0
- package/src/ai_push_hooks/prompts_builtin.py +135 -0
- package/src/ai_push_hooks/types.py +236 -0
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import fnmatch
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import pathlib
|
|
7
|
+
import re
|
|
8
|
+
import shlex
|
|
9
|
+
import shutil
|
|
10
|
+
import subprocess
|
|
11
|
+
from pathlib import PurePosixPath
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
14
|
+
from ..types import FEATURE_BRANCH_PREFIXES, HookError, ModuleRuntimeState, RuntimeContext, StepConfig
|
|
15
|
+
|
|
16
|
+
ZERO_OID = "0000000000000000000000000000000000000000"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def env_bool(name: str) -> bool | None:
|
|
20
|
+
value = os.getenv(name)
|
|
21
|
+
if value is None:
|
|
22
|
+
return None
|
|
23
|
+
value = value.strip().lower()
|
|
24
|
+
if value in {"1", "true", "yes", "y", "on"}:
|
|
25
|
+
return True
|
|
26
|
+
if value in {"0", "false", "no", "n", "off"}:
|
|
27
|
+
return False
|
|
28
|
+
return None
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def run_command(
|
|
32
|
+
args: list[str],
|
|
33
|
+
cwd: pathlib.Path,
|
|
34
|
+
input_text: str | None = None,
|
|
35
|
+
timeout: int | None = None,
|
|
36
|
+
check: bool = False,
|
|
37
|
+
env: dict[str, str | None] | None = None,
|
|
38
|
+
) -> subprocess.CompletedProcess[str]:
|
|
39
|
+
merged_env = None
|
|
40
|
+
if env is not None:
|
|
41
|
+
merged_env = os.environ.copy()
|
|
42
|
+
for key, value in env.items():
|
|
43
|
+
if value is None:
|
|
44
|
+
merged_env.pop(key, None)
|
|
45
|
+
else:
|
|
46
|
+
merged_env[key] = value
|
|
47
|
+
completed = subprocess.run(
|
|
48
|
+
args,
|
|
49
|
+
cwd=cwd,
|
|
50
|
+
input=input_text,
|
|
51
|
+
text=True,
|
|
52
|
+
capture_output=True,
|
|
53
|
+
timeout=timeout,
|
|
54
|
+
env=merged_env,
|
|
55
|
+
)
|
|
56
|
+
if check and completed.returncode != 0:
|
|
57
|
+
stderr = (completed.stderr or "").strip()
|
|
58
|
+
stdout = (completed.stdout or "").strip()
|
|
59
|
+
details = stderr or stdout or f"exit code {completed.returncode}"
|
|
60
|
+
raise HookError(f"Command failed: {' '.join(args)} :: {details}")
|
|
61
|
+
return completed
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def git(cwd: pathlib.Path, args: list[str], check: bool = True) -> str:
|
|
65
|
+
completed = run_command(["git", *args], cwd=cwd, check=check)
|
|
66
|
+
return completed.stdout.strip()
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def resolve_repo_root(cwd: pathlib.Path) -> pathlib.Path:
|
|
70
|
+
return pathlib.Path(git(cwd, ["rev-parse", "--show-toplevel"])).resolve()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def resolve_git_dir(repo_root: pathlib.Path) -> pathlib.Path:
|
|
74
|
+
raw = git(repo_root, ["rev-parse", "--git-dir"])
|
|
75
|
+
path = pathlib.Path(raw)
|
|
76
|
+
if path.is_absolute():
|
|
77
|
+
return path
|
|
78
|
+
return (repo_root / path).resolve()
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def resolve_storage_path(repo_root: pathlib.Path, git_dir: pathlib.Path, raw: str) -> pathlib.Path:
|
|
82
|
+
path = pathlib.Path(raw)
|
|
83
|
+
if path.is_absolute():
|
|
84
|
+
return path
|
|
85
|
+
posix_raw = raw.replace("\\", "/")
|
|
86
|
+
if posix_raw == ".git":
|
|
87
|
+
return git_dir
|
|
88
|
+
if posix_raw.startswith(".git/"):
|
|
89
|
+
return git_dir / posix_raw[len(".git/") :]
|
|
90
|
+
return repo_root / path
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def ensure_dir(path: pathlib.Path) -> pathlib.Path | None:
|
|
94
|
+
try:
|
|
95
|
+
path.mkdir(parents=True, exist_ok=True)
|
|
96
|
+
return path
|
|
97
|
+
except Exception: # noqa: BLE001
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def current_branch(repo_root: pathlib.Path) -> str:
|
|
102
|
+
return git(repo_root, ["rev-parse", "--abbrev-ref", "HEAD"], check=False).strip()
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def is_feature_branch(branch_name: str) -> bool:
|
|
106
|
+
return bool(branch_name) and branch_name.startswith(FEATURE_BRANCH_PREFIXES)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def should_skip_for_sync_branch(repo_root: pathlib.Path) -> tuple[bool, str]:
|
|
110
|
+
sync_branch = os.getenv("BEADS_SYNC_BRANCH", "beads-sync")
|
|
111
|
+
if "/.beads-sync-worktrees/" in repo_root.as_posix():
|
|
112
|
+
return True, "worktree is inside .beads-sync-worktrees"
|
|
113
|
+
branch_name = current_branch(repo_root)
|
|
114
|
+
if branch_name == sync_branch:
|
|
115
|
+
return True, f"current branch is {sync_branch}"
|
|
116
|
+
return False, ""
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def path_matches(path: str, pattern: str) -> bool:
|
|
120
|
+
pure = PurePosixPath(path)
|
|
121
|
+
return pure.match(pattern) or fnmatch.fnmatch(path, pattern)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def list_repo_changes(repo_root: pathlib.Path) -> set[str]:
|
|
125
|
+
changes: set[str] = set()
|
|
126
|
+
output = git(repo_root, ["status", "--short"], check=False)
|
|
127
|
+
for line in output.splitlines():
|
|
128
|
+
payload = line[3:].strip()
|
|
129
|
+
if payload:
|
|
130
|
+
changes.add(payload)
|
|
131
|
+
return changes
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def collect_ranges_from_stdin(
|
|
135
|
+
repo_root: pathlib.Path,
|
|
136
|
+
remote_name: str,
|
|
137
|
+
stdin_lines: list[str],
|
|
138
|
+
) -> list[str]:
|
|
139
|
+
ranges: set[str] = set()
|
|
140
|
+
for line in stdin_lines:
|
|
141
|
+
parts = line.strip().split()
|
|
142
|
+
if len(parts) < 4:
|
|
143
|
+
continue
|
|
144
|
+
_local_ref, local_sha, _remote_ref, remote_sha = parts[:4]
|
|
145
|
+
if local_sha == ZERO_OID:
|
|
146
|
+
continue
|
|
147
|
+
if remote_sha and remote_sha != ZERO_OID:
|
|
148
|
+
if run_command(["git", "cat-file", "-e", f"{remote_sha}^{{commit}}"], cwd=repo_root).returncode == 0:
|
|
149
|
+
ranges.add(f"{remote_sha}..{local_sha}")
|
|
150
|
+
else:
|
|
151
|
+
merge_base = git(repo_root, ["merge-base", local_sha, f"{remote_name}/main"], check=False)
|
|
152
|
+
if merge_base:
|
|
153
|
+
ranges.add(f"{merge_base}..{local_sha}")
|
|
154
|
+
else:
|
|
155
|
+
ranges.add(f"{local_sha}~1..{local_sha}")
|
|
156
|
+
if ranges:
|
|
157
|
+
return sorted(ranges)
|
|
158
|
+
|
|
159
|
+
upstream = git(repo_root, ["rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{upstream}"], check=False)
|
|
160
|
+
if upstream:
|
|
161
|
+
merge_base = git(repo_root, ["merge-base", "HEAD", upstream], check=False)
|
|
162
|
+
if merge_base:
|
|
163
|
+
return [f"{merge_base}..HEAD"]
|
|
164
|
+
previous = git(repo_root, ["rev-parse", "HEAD~1"], check=False)
|
|
165
|
+
if previous:
|
|
166
|
+
return [f"{previous}..HEAD"]
|
|
167
|
+
return []
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def collect_changed_files(repo_root: pathlib.Path, ranges: list[str]) -> list[str]:
|
|
171
|
+
files: set[str] = set()
|
|
172
|
+
for range_expr in ranges:
|
|
173
|
+
output = git(repo_root, ["diff", "--name-only", "--diff-filter=ACMR", range_expr], check=True)
|
|
174
|
+
for line in output.splitlines():
|
|
175
|
+
clean = line.strip()
|
|
176
|
+
if clean:
|
|
177
|
+
files.add(clean)
|
|
178
|
+
return sorted(files)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def collect_diff(repo_root: pathlib.Path, ranges: list[str], max_bytes: int) -> str:
|
|
182
|
+
chunks: list[str] = []
|
|
183
|
+
for range_expr in ranges:
|
|
184
|
+
body = git(repo_root, ["diff", "--unified=3", range_expr], check=True)
|
|
185
|
+
chunks.append(f"### RANGE {range_expr}\n{body}\n")
|
|
186
|
+
return "\n".join(chunks)[:max_bytes]
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def collect_commit_messages_for_ranges(repo_root: pathlib.Path, ranges: list[str]) -> list[dict[str, str]]:
|
|
190
|
+
commits: list[dict[str, str]] = []
|
|
191
|
+
for range_expr in ranges:
|
|
192
|
+
raw = git(repo_root, ["log", "--format=%H%x1f%s%x1f%b%x1e", range_expr], check=True)
|
|
193
|
+
for record in raw.split("\x1e"):
|
|
194
|
+
payload = record.strip()
|
|
195
|
+
if not payload:
|
|
196
|
+
continue
|
|
197
|
+
parts = payload.split("\x1f", 2)
|
|
198
|
+
if len(parts) != 3:
|
|
199
|
+
continue
|
|
200
|
+
commit_hash, subject, body = parts
|
|
201
|
+
commits.append(
|
|
202
|
+
{
|
|
203
|
+
"hash": commit_hash.strip(),
|
|
204
|
+
"subject": subject.strip(),
|
|
205
|
+
"body": body.strip(),
|
|
206
|
+
}
|
|
207
|
+
)
|
|
208
|
+
return commits
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def write_text_file(path: pathlib.Path, content: str) -> bool:
|
|
212
|
+
try:
|
|
213
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
214
|
+
path.write_text(content, encoding="utf-8")
|
|
215
|
+
return True
|
|
216
|
+
except Exception: # noqa: BLE001
|
|
217
|
+
return False
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def parse_key_value_text(text: str) -> dict[str, str]:
|
|
221
|
+
payload: dict[str, str] = {}
|
|
222
|
+
for line in text.splitlines():
|
|
223
|
+
if "=" not in line:
|
|
224
|
+
continue
|
|
225
|
+
key, value = line.split("=", 1)
|
|
226
|
+
payload[key.strip()] = value.strip()
|
|
227
|
+
return payload
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def lookup_open_pr_url(repo_root: pathlib.Path, branch_name: str) -> str:
|
|
231
|
+
completed = run_command(
|
|
232
|
+
["gh", "pr", "list", "--head", branch_name, "--state", "open", "--limit", "1", "--json", "url"],
|
|
233
|
+
cwd=repo_root,
|
|
234
|
+
check=False,
|
|
235
|
+
)
|
|
236
|
+
if completed.returncode != 0:
|
|
237
|
+
details = (completed.stderr or "").strip() or (completed.stdout or "").strip()
|
|
238
|
+
raise HookError(details or "`gh pr list` failed")
|
|
239
|
+
try:
|
|
240
|
+
payload = json.loads((completed.stdout or "").strip() or "[]")
|
|
241
|
+
except json.JSONDecodeError as exc:
|
|
242
|
+
raise HookError("Failed to parse `gh pr list` JSON output") from exc
|
|
243
|
+
if isinstance(payload, list) and payload and isinstance(payload[0], dict):
|
|
244
|
+
return str(payload[0].get("url", "")).strip()
|
|
245
|
+
return ""
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def extract_pr_url(text: str) -> str:
|
|
249
|
+
match = re.search(r"https://github\.com/[^\s]+/pull/\d+", text)
|
|
250
|
+
return match.group(0).strip() if match else ""
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def sanitize_pr_title(raw_title: str, branch_name: str) -> str:
|
|
254
|
+
title = re.sub(r"\s+", " ", raw_title).strip() or branch_name
|
|
255
|
+
return title[:240]
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def build_fallback_pr_body(
|
|
259
|
+
branch_name: str,
|
|
260
|
+
ranges: list[str],
|
|
261
|
+
changed_files: list[str],
|
|
262
|
+
commits: list[dict[str, str]],
|
|
263
|
+
) -> str:
|
|
264
|
+
lines = [
|
|
265
|
+
"## Summary",
|
|
266
|
+
f"- Auto-created by `ai-push-hooks` for branch `{branch_name}`.",
|
|
267
|
+
]
|
|
268
|
+
if ranges:
|
|
269
|
+
lines.append(f"- Push range: `{', '.join(ranges)}`.")
|
|
270
|
+
if commits:
|
|
271
|
+
lines.append("")
|
|
272
|
+
lines.append("## Commits")
|
|
273
|
+
for commit in commits[:8]:
|
|
274
|
+
subject = str(commit.get("subject", "")).strip()
|
|
275
|
+
if subject:
|
|
276
|
+
lines.append(f"- {subject}")
|
|
277
|
+
if changed_files:
|
|
278
|
+
lines.append("")
|
|
279
|
+
lines.append("## Changed Files")
|
|
280
|
+
for path in changed_files[:15]:
|
|
281
|
+
lines.append(f"- `{path}`")
|
|
282
|
+
if len(changed_files) > 15:
|
|
283
|
+
lines.append(f"- and {len(changed_files) - 15} more")
|
|
284
|
+
return "\n".join(lines).strip() + "\n"
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def attempt_pr_creation_fallback(
|
|
288
|
+
repo_root: pathlib.Path,
|
|
289
|
+
branch_name: str,
|
|
290
|
+
base_branch: str,
|
|
291
|
+
ranges: list[str],
|
|
292
|
+
changed_files: list[str],
|
|
293
|
+
commits: list[dict[str, str]],
|
|
294
|
+
) -> str:
|
|
295
|
+
title = sanitize_pr_title(git(repo_root, ["log", "-1", "--pretty=%s"], check=False), branch_name)
|
|
296
|
+
body = build_fallback_pr_body(branch_name, ranges, changed_files, commits)
|
|
297
|
+
created = run_command(
|
|
298
|
+
["gh", "pr", "create", "--head", branch_name, "--base", base_branch, "--title", title, "--body", body],
|
|
299
|
+
cwd=repo_root,
|
|
300
|
+
check=False,
|
|
301
|
+
)
|
|
302
|
+
combined_output = "\n".join([(created.stdout or "").strip(), (created.stderr or "").strip()])
|
|
303
|
+
if created.returncode == 0:
|
|
304
|
+
pr_url = extract_pr_url(combined_output)
|
|
305
|
+
if pr_url:
|
|
306
|
+
return pr_url
|
|
307
|
+
existing_pr = lookup_open_pr_url(repo_root, branch_name)
|
|
308
|
+
if existing_pr:
|
|
309
|
+
return existing_pr
|
|
310
|
+
raise HookError(combined_output.strip() or f"gh pr create failed with exit code {created.returncode}")
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def remote_branch_exists(repo_root: pathlib.Path, remote_name: str, branch_name: str) -> bool:
|
|
314
|
+
completed = run_command(["git", "ls-remote", "--heads", remote_name, branch_name], cwd=repo_root, check=False)
|
|
315
|
+
return completed.returncode == 0 and bool((completed.stdout or "").strip())
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def _report_file_path(context: RuntimeContext, state: ModuleRuntimeState) -> pathlib.Path:
|
|
319
|
+
branch_context = state.artifacts.get("collect/branch-context.txt")
|
|
320
|
+
if branch_context and branch_context.exists():
|
|
321
|
+
payload = parse_key_value_text(branch_context.read_text(encoding="utf-8"))
|
|
322
|
+
report_file = payload.get("report_file", "BEADS_STATUS_ACTION_REQUIRED.md")
|
|
323
|
+
return (context.repo_root / report_file).resolve()
|
|
324
|
+
return (context.repo_root / "BEADS_STATUS_ACTION_REQUIRED.md").resolve()
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def beads_alignment_executor(
|
|
328
|
+
context: RuntimeContext,
|
|
329
|
+
state: ModuleRuntimeState,
|
|
330
|
+
step: StepConfig,
|
|
331
|
+
inputs: list[pathlib.Path],
|
|
332
|
+
) -> dict[str, Any]:
|
|
333
|
+
if state.metadata.get("skip_module"):
|
|
334
|
+
return {"skipped": True, "commands_run": [], "report_written": False, "unresolved": False}
|
|
335
|
+
payload = json.loads(inputs[0].read_text(encoding="utf-8"))
|
|
336
|
+
commands = payload.get("commands", [])
|
|
337
|
+
if not isinstance(commands, list):
|
|
338
|
+
raise HookError("beads_alignment commands must be an array")
|
|
339
|
+
report_path = _report_file_path(context, state)
|
|
340
|
+
commands_run: list[str] = []
|
|
341
|
+
for command in commands:
|
|
342
|
+
if not isinstance(command, str) or not command.strip():
|
|
343
|
+
continue
|
|
344
|
+
run_command(shlex.split(command), cwd=context.repo_root, check=True)
|
|
345
|
+
commands_run.append(command)
|
|
346
|
+
|
|
347
|
+
report_markdown = str(payload.get("report_markdown", "")).strip()
|
|
348
|
+
unresolved = bool(payload.get("unresolved", False))
|
|
349
|
+
report_written = False
|
|
350
|
+
if report_markdown:
|
|
351
|
+
if not report_markdown.endswith("\n"):
|
|
352
|
+
report_markdown += "\n"
|
|
353
|
+
write_text_file(report_path, report_markdown)
|
|
354
|
+
report_written = True
|
|
355
|
+
elif report_path.exists() and not unresolved:
|
|
356
|
+
report_path.unlink()
|
|
357
|
+
|
|
358
|
+
return {
|
|
359
|
+
"skipped": False,
|
|
360
|
+
"commands_run": commands_run,
|
|
361
|
+
"report_written": report_written,
|
|
362
|
+
"unresolved": unresolved,
|
|
363
|
+
"report_file": report_path.relative_to(context.repo_root).as_posix(),
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
def gh_pr_create_executor(
|
|
368
|
+
context: RuntimeContext,
|
|
369
|
+
state: ModuleRuntimeState,
|
|
370
|
+
step: StepConfig,
|
|
371
|
+
inputs: list[pathlib.Path],
|
|
372
|
+
) -> dict[str, Any]:
|
|
373
|
+
if state.metadata.get("skip_module"):
|
|
374
|
+
return {"skipped": True, "pr_url": state.metadata.get("existing_pr_url", "")}
|
|
375
|
+
if shutil.which("gh") is None:
|
|
376
|
+
raise HookError("`gh` is required for PR creation but is not installed")
|
|
377
|
+
payload = json.loads(inputs[0].read_text(encoding="utf-8"))
|
|
378
|
+
branch_name = current_branch(context.repo_root)
|
|
379
|
+
existing_pr = lookup_open_pr_url(context.repo_root, branch_name)
|
|
380
|
+
if existing_pr:
|
|
381
|
+
return {"skipped": False, "pr_url": existing_pr, "already_exists": True}
|
|
382
|
+
|
|
383
|
+
base_branch = str(payload.get("base_branch", "main")).strip() or "main"
|
|
384
|
+
head_branch = str(payload.get("head_branch", branch_name)).strip() or branch_name
|
|
385
|
+
title = sanitize_pr_title(str(payload.get("title", "")).strip(), branch_name)
|
|
386
|
+
body = str(payload.get("body", "")).strip()
|
|
387
|
+
if not body:
|
|
388
|
+
commits = collect_commit_messages_for_ranges(context.repo_root, context.cache.get("ranges", []))
|
|
389
|
+
body = build_fallback_pr_body(
|
|
390
|
+
branch_name,
|
|
391
|
+
context.cache.get("ranges", []),
|
|
392
|
+
context.cache.get("changed_files", []),
|
|
393
|
+
commits,
|
|
394
|
+
)
|
|
395
|
+
args = ["gh", "pr", "create", "--head", head_branch, "--base", base_branch, "--title", title, "--body", body]
|
|
396
|
+
if bool(payload.get("draft", False)):
|
|
397
|
+
args.append("--draft")
|
|
398
|
+
created = run_command(args, cwd=context.repo_root, check=False)
|
|
399
|
+
combined_output = "\n".join([(created.stdout or "").strip(), (created.stderr or "").strip()])
|
|
400
|
+
pr_url = extract_pr_url(combined_output)
|
|
401
|
+
if created.returncode != 0 and not pr_url:
|
|
402
|
+
pr_url = lookup_open_pr_url(context.repo_root, branch_name)
|
|
403
|
+
if not pr_url:
|
|
404
|
+
if remote_branch_exists(context.repo_root, context.remote_name or "origin", branch_name):
|
|
405
|
+
raise HookError(combined_output.strip() or f"gh pr create failed with exit code {created.returncode}")
|
|
406
|
+
return {"skipped": False, "pr_url": "", "deferred_until_remote": True}
|
|
407
|
+
return {"skipped": False, "pr_url": pr_url, "already_exists": False}
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
EXEC_HANDLERS = {
|
|
411
|
+
"beads_alignment": beads_alignment_executor,
|
|
412
|
+
"gh_pr_create": gh_pr_create_executor,
|
|
413
|
+
}
|