jleechanorg-pr-automation 0.1.1__py3-none-any.whl → 0.2.45__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. jleechanorg_pr_automation/STORAGE_STATE_TESTING_PROTOCOL.md +326 -0
  2. jleechanorg_pr_automation/__init__.py +64 -9
  3. jleechanorg_pr_automation/automation_safety_manager.py +306 -95
  4. jleechanorg_pr_automation/automation_safety_wrapper.py +13 -19
  5. jleechanorg_pr_automation/automation_utils.py +87 -65
  6. jleechanorg_pr_automation/check_codex_comment.py +7 -1
  7. jleechanorg_pr_automation/codex_branch_updater.py +21 -9
  8. jleechanorg_pr_automation/codex_config.py +70 -3
  9. jleechanorg_pr_automation/jleechanorg_pr_monitor.py +1954 -234
  10. jleechanorg_pr_automation/logging_utils.py +86 -0
  11. jleechanorg_pr_automation/openai_automation/__init__.py +3 -0
  12. jleechanorg_pr_automation/openai_automation/codex_github_mentions.py +1111 -0
  13. jleechanorg_pr_automation/openai_automation/debug_page_content.py +88 -0
  14. jleechanorg_pr_automation/openai_automation/oracle_cli.py +364 -0
  15. jleechanorg_pr_automation/openai_automation/test_auth_restoration.py +244 -0
  16. jleechanorg_pr_automation/openai_automation/test_codex_comprehensive.py +355 -0
  17. jleechanorg_pr_automation/openai_automation/test_codex_integration.py +254 -0
  18. jleechanorg_pr_automation/orchestrated_pr_runner.py +516 -0
  19. jleechanorg_pr_automation/tests/__init__.py +0 -0
  20. jleechanorg_pr_automation/tests/test_actionable_counting_matrix.py +84 -86
  21. jleechanorg_pr_automation/tests/test_attempt_limit_logic.py +124 -0
  22. jleechanorg_pr_automation/tests/test_automation_marker_functions.py +175 -0
  23. jleechanorg_pr_automation/tests/test_automation_over_running_reproduction.py +9 -11
  24. jleechanorg_pr_automation/tests/test_automation_safety_limits.py +91 -79
  25. jleechanorg_pr_automation/tests/test_automation_safety_manager_comprehensive.py +53 -53
  26. jleechanorg_pr_automation/tests/test_codex_actor_matching.py +1 -1
  27. jleechanorg_pr_automation/tests/test_fixpr_prompt.py +54 -0
  28. jleechanorg_pr_automation/tests/test_fixpr_return_value.py +140 -0
  29. jleechanorg_pr_automation/tests/test_graphql_error_handling.py +26 -26
  30. jleechanorg_pr_automation/tests/test_model_parameter.py +317 -0
  31. jleechanorg_pr_automation/tests/test_orchestrated_pr_runner.py +697 -0
  32. jleechanorg_pr_automation/tests/test_packaging_integration.py +127 -0
  33. jleechanorg_pr_automation/tests/test_pr_filtering_matrix.py +246 -193
  34. jleechanorg_pr_automation/tests/test_pr_monitor_eligibility.py +354 -0
  35. jleechanorg_pr_automation/tests/test_pr_targeting.py +102 -7
  36. jleechanorg_pr_automation/tests/test_version_consistency.py +51 -0
  37. jleechanorg_pr_automation/tests/test_workflow_specific_limits.py +202 -0
  38. jleechanorg_pr_automation/tests/test_workspace_dispatch_missing_dir.py +119 -0
  39. jleechanorg_pr_automation/utils.py +81 -56
  40. jleechanorg_pr_automation-0.2.45.dist-info/METADATA +864 -0
  41. jleechanorg_pr_automation-0.2.45.dist-info/RECORD +45 -0
  42. jleechanorg_pr_automation-0.1.1.dist-info/METADATA +0 -222
  43. jleechanorg_pr_automation-0.1.1.dist-info/RECORD +0 -23
  44. {jleechanorg_pr_automation-0.1.1.dist-info → jleechanorg_pr_automation-0.2.45.dist-info}/WHEEL +0 -0
  45. {jleechanorg_pr_automation-0.1.1.dist-info → jleechanorg_pr_automation-0.2.45.dist-info}/entry_points.txt +0 -0
  46. {jleechanorg_pr_automation-0.1.1.dist-info → jleechanorg_pr_automation-0.2.45.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,516 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Orchestrated PR runner that uses TaskDispatcher (Claude agents) to run /fixpr and /copilot
4
+ for recent PRs. Workspaces live under /tmp/{repo}/{branch}.
5
+ """
6
+
7
+ import argparse
8
+ import json
9
+ import os
10
+ import re
11
+ import shutil
12
+ import subprocess
13
+ import sys
14
+ from contextlib import contextmanager
15
+ from datetime import datetime, timedelta, timezone
16
+ from pathlib import Path
17
+ from typing import Dict, List, Optional
18
+
19
+ from orchestration.task_dispatcher import TaskDispatcher
20
+
21
+ ORG = "jleechanorg"
22
+ BASE_CLONE_ROOT = Path("/tmp/pr-orch-bases")
23
+ WORKSPACE_ROOT_BASE = Path("/tmp")
24
+ DEFAULT_CUTOFF_HOURS = 24
25
+ DEFAULT_MAX_PRS = 5
26
+ DEFAULT_TIMEOUT = 30 # baseline timeout per security guideline
27
+ CLONE_TIMEOUT = 300
28
+ FETCH_TIMEOUT = 120
29
+ API_TIMEOUT = 60
30
+ WORKTREE_TIMEOUT = 60
31
+ LOG_PREFIX = "[orchestrated_pr_runner]"
32
+
33
+
34
+ def log(msg: str) -> None:
35
+ print(f"{LOG_PREFIX} {msg}")
36
+
37
+
38
+ def display_log_viewing_command(session_name: str) -> None:
39
+ """Display formatted log viewing commands for the given session."""
40
+ # Use same path resolution as task_dispatcher (relative to orchestration module)
41
+ try:
42
+ import orchestration.task_dispatcher
43
+ script_path = Path(orchestration.task_dispatcher.__file__).resolve().parent / "stream_logs.sh"
44
+ except (ImportError, AttributeError):
45
+ # Fallback to relative path if orchestration module not available
46
+ script_path = Path(__file__).resolve().parent.parent.parent / "orchestration" / "stream_logs.sh"
47
+
48
+ if script_path.exists():
49
+ log("")
50
+ log("📺 View formatted logs:")
51
+ log(f" {script_path} {session_name}")
52
+ log("")
53
+ log(" Or use the shorter command:")
54
+ log(f" ./orchestration/stream_logs.sh {session_name}")
55
+ log("")
56
+
57
+
58
+ def run_cmd(
59
+ cmd: List[str],
60
+ cwd: Optional[Path] = None,
61
+ check: bool = True,
62
+ timeout: Optional[int] = None,
63
+ ) -> subprocess.CompletedProcess:
64
+ return subprocess.run(
65
+ cmd,
66
+ cwd=str(cwd) if cwd else None,
67
+ text=True,
68
+ capture_output=True,
69
+ check=check,
70
+ timeout=timeout or DEFAULT_TIMEOUT,
71
+ )
72
+
73
+
74
+ def query_recent_prs(cutoff_hours: int) -> List[Dict]:
75
+ cutoff = datetime.now(timezone.utc) - timedelta(hours=cutoff_hours)
76
+ search_query = f"org:{ORG} is:pr is:open updated:>={cutoff.strftime('%Y-%m-%dT%H:%M:%SZ')}"
77
+ graphql_query = """
78
+ query($searchQuery: String!, $cursor: String) {
79
+ search(type: ISSUE, query: $searchQuery, first: 100, after: $cursor) {
80
+ nodes {
81
+ __typename
82
+ ... on PullRequest {
83
+ number
84
+ title
85
+ headRefName
86
+ headRefOid
87
+ updatedAt
88
+ isDraft
89
+ mergeable
90
+ url
91
+ repository { name nameWithOwner }
92
+ }
93
+ }
94
+ pageInfo { hasNextPage endCursor }
95
+ }
96
+ }
97
+ """
98
+
99
+ prs: List[Dict] = []
100
+ cursor = None
101
+ while True:
102
+ cmd = [
103
+ "gh",
104
+ "api",
105
+ "graphql",
106
+ "-f",
107
+ f"query={graphql_query}",
108
+ "-f",
109
+ f"searchQuery={search_query}",
110
+ ]
111
+ if cursor:
112
+ cmd += ["-f", f"cursor={cursor}"]
113
+ result = run_cmd(cmd, check=False, timeout=API_TIMEOUT)
114
+ if result.returncode != 0:
115
+ raise RuntimeError(f"GraphQL search failed: {result.stderr.strip()}")
116
+ try:
117
+ data = json.loads(result.stdout)
118
+ except json.JSONDecodeError as e:
119
+ raise RuntimeError(f"Invalid JSON response from GitHub API: {e}") from e
120
+ search = data.get("data", {}).get("search") or {}
121
+ for node in search.get("nodes", []):
122
+ if node.get("__typename") != "PullRequest":
123
+ continue
124
+ repo_info = node.get("repository") or {}
125
+ repo_full = repo_info.get("nameWithOwner")
126
+ repo_name = repo_info.get("name")
127
+ branch = node.get("headRefName")
128
+ pr_number = node.get("number")
129
+ if not repo_full or not repo_name or branch is None or pr_number is None:
130
+ log(f"Skipping PR with incomplete data: {node.get('url') or node.get('number')}")
131
+ continue
132
+ prs.append(
133
+ {
134
+ "repo_full": repo_full,
135
+ "repo": repo_name,
136
+ "number": pr_number,
137
+ "title": node.get("title"),
138
+ "branch": branch,
139
+ "head_oid": node.get("headRefOid"),
140
+ "updatedAt": node.get("updatedAt"),
141
+ "isDraft": node.get("isDraft"),
142
+ "mergeable": node.get("mergeable"),
143
+ "url": node.get("url"),
144
+ }
145
+ )
146
+ page = search.get("pageInfo") or {}
147
+ if not page.get("hasNextPage"):
148
+ break
149
+ cursor = page.get("endCursor")
150
+ if not cursor:
151
+ break
152
+ prs.sort(key=lambda pr: pr.get("updatedAt", ""), reverse=True)
153
+ return [pr for pr in prs if not pr.get("isDraft")]
154
+
155
+
156
+ def has_failing_checks(repo_full: str, pr_number: int) -> bool:
157
+ """Return True if PR has any failing checks."""
158
+ try:
159
+ result = run_cmd(
160
+ ["gh", "pr", "checks", str(pr_number), "--repo", repo_full, "--json", "name,state,workflow"],
161
+ check=False,
162
+ timeout=API_TIMEOUT,
163
+ )
164
+ if result.returncode != 0:
165
+ log(f"Failed to fetch checks for {repo_full}#{pr_number}: {result.stderr.strip()}")
166
+ return False
167
+ data = json.loads(result.stdout or "[]")
168
+ failing_states = {"FAILED", "FAILURE", "CANCELLED", "TIMED_OUT", "ACTION_REQUIRED"}
169
+ for check in data:
170
+ state = (check.get("state") or "").upper()
171
+ if state in failing_states:
172
+ return True
173
+ return False
174
+ except Exception as exc:
175
+ log(f"Error checking PR checks for {repo_full}#{pr_number}: {exc}")
176
+ return False
177
+
178
+
179
+ def ensure_base_clone(repo_full: str) -> Path:
180
+ # Validate repo_full format defensively
181
+ if not re.match(r"^[\w.-]+/[\w.-]+$", repo_full):
182
+ raise ValueError(f"Invalid repository format: {repo_full}")
183
+
184
+ github_host = os.environ.get('GH_HOST', 'github.com')
185
+ repo_name = repo_full.split("/")[-1]
186
+ base_dir = BASE_CLONE_ROOT / repo_name
187
+ BASE_CLONE_ROOT.mkdir(parents=True, exist_ok=True)
188
+ if not base_dir.exists():
189
+ log(f"Cloning base repo for {repo_full} into {base_dir}")
190
+ run_cmd(
191
+ ["git", "clone", f"https://{github_host}/{repo_full}.git", str(base_dir)],
192
+ timeout=CLONE_TIMEOUT,
193
+ )
194
+ else:
195
+ log(f"Refreshing base repo for {repo_full}")
196
+ try:
197
+ run_cmd(["git", "fetch", "origin", "--prune"], cwd=base_dir, timeout=FETCH_TIMEOUT)
198
+ except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as exc:
199
+ stderr_msg = getattr(exc, "stderr", "") or str(exc) or "No stderr available"
200
+ log(
201
+ f"Fetch failed for {repo_full} ({exc.__class__.__name__}): {stderr_msg}. "
202
+ "Re-cloning base repo."
203
+ )
204
+ shutil.rmtree(base_dir, ignore_errors=True)
205
+ run_cmd(
206
+ ["git", "clone", f"https://{github_host}/{repo_full}.git", str(base_dir)],
207
+ timeout=CLONE_TIMEOUT,
208
+ )
209
+ # Reset base clone to main to ensure clean worktrees
210
+ try:
211
+ # Ensure we're on main branch (create if doesn't exist locally)
212
+ result = run_cmd(["git", "rev-parse", "--verify", "main"], cwd=base_dir, check=False, timeout=DEFAULT_TIMEOUT)
213
+ if result.returncode != 0:
214
+ # Local main doesn't exist - checkout from remote tracking branch
215
+ run_cmd(["git", "checkout", "-B", "main", "origin/main"], cwd=base_dir, timeout=FETCH_TIMEOUT)
216
+ else:
217
+ # Local main exists - switch to it and reset
218
+ run_cmd(["git", "checkout", "main"], cwd=base_dir, timeout=FETCH_TIMEOUT)
219
+ run_cmd(["git", "reset", "--hard", "origin/main"], cwd=base_dir, timeout=FETCH_TIMEOUT)
220
+ run_cmd(["git", "clean", "-fdx"], cwd=base_dir, timeout=FETCH_TIMEOUT)
221
+ except subprocess.CalledProcessError as exc:
222
+ stderr_msg = exc.stderr if exc.stderr else "No stderr available"
223
+ raise RuntimeError(f"Failed to reset base clone for {repo_full}: {stderr_msg}") from exc
224
+ return base_dir
225
+
226
+
227
+ def sanitize_workspace_name(name: str, pr_number: int) -> str:
228
+ sanitized = re.sub(r"[^A-Za-z0-9._-]+", "-", name).strip("-")
229
+ return f"pr-{pr_number}-{sanitized}" if sanitized else f"pr-{pr_number}"
230
+
231
+
232
+ def prepare_workspace_dir(repo: str, workspace_name: str) -> Path:
233
+ target = WORKSPACE_ROOT_BASE / repo / workspace_name
234
+ # Safety: ensure target stays within the configured root
235
+ try:
236
+ target.resolve().relative_to(WORKSPACE_ROOT_BASE.resolve())
237
+ except ValueError:
238
+ raise ValueError(f"Workspace path escapes root: {target}")
239
+
240
+ if target.exists():
241
+ git_file = target / ".git"
242
+ if git_file.exists():
243
+ try:
244
+ git_content = git_file.read_text().strip()
245
+ if git_content.startswith("gitdir: "):
246
+ git_dir_path = Path(git_content.split("gitdir: ", 1)[1].strip())
247
+ base_repo = git_dir_path.parents[1].parent
248
+ run_cmd(
249
+ ["git", "worktree", "remove", str(target), "--force"],
250
+ cwd=base_repo,
251
+ check=False,
252
+ timeout=WORKTREE_TIMEOUT,
253
+ )
254
+ run_cmd(
255
+ ["git", "worktree", "prune"],
256
+ cwd=base_repo,
257
+ check=False,
258
+ timeout=WORKTREE_TIMEOUT,
259
+ )
260
+ except Exception as exc:
261
+ log(f"Warning: Failed to clean worktree metadata for {target}: {exc}")
262
+ try:
263
+ shutil.rmtree(target)
264
+ except FileNotFoundError:
265
+ # Workspace already gone (external cleanup, reboot, etc.) - nothing to do
266
+ pass
267
+ except OSError as e:
268
+ # Other OS errors (permissions, locks, etc.) are serious
269
+ log(f"Failed to remove existing workspace {target}: {e}")
270
+ raise
271
+ target.parent.mkdir(parents=True, exist_ok=True)
272
+ return target
273
+
274
+
275
+ @contextmanager
276
+ def chdir(path: Path):
277
+ prev = Path.cwd()
278
+ os.chdir(path)
279
+ try:
280
+ yield
281
+ finally:
282
+ os.chdir(prev)
283
+
284
+
285
+ def kill_tmux_session_if_exists(name: str) -> None:
286
+ """Ensure tmux session name is free; kill existing session if present."""
287
+ try:
288
+ base = name.rstrip(".")
289
+ candidates = [name, f"{name}_", base, f"{base}_"] # cover trailing dot and suffixed variants
290
+ # First try direct has-session matches
291
+ for candidate in candidates:
292
+ check = run_cmd(["tmux", "has-session", "-t", candidate], check=False, timeout=30)
293
+ if check.returncode == 0:
294
+ log(f"Existing tmux session {candidate} detected; killing to allow reuse")
295
+ run_cmd(["tmux", "kill-session", "-t", candidate], check=False, timeout=30)
296
+ # Fallback: scan tmux ls and kill any sessions containing the base token
297
+ ls = run_cmd(["tmux", "ls"], check=False, timeout=30)
298
+ if ls.returncode == 0:
299
+ base_token = name.rstrip(".")
300
+ pr_token = None
301
+ match = re.search(r"pr-(\d+)", base_token)
302
+ if match:
303
+ pr_token = match.group(0)
304
+ for line in (ls.stdout or "").splitlines():
305
+ session_name = line.split(":", 1)[0]
306
+ # Use word boundary regex to prevent pr-1 from matching pr-10, pr-11, pr-100
307
+ matched = False
308
+ if base_token:
309
+ # Match base_token with word boundaries
310
+ base_pattern = rf"\b{re.escape(base_token)}\b"
311
+ if re.search(base_pattern, session_name):
312
+ matched = True
313
+ if not matched and pr_token:
314
+ # Match pr_token with word boundaries (pr-1 should not match pr-10)
315
+ pr_pattern = rf"\b{re.escape(pr_token)}\b"
316
+ if re.search(pr_pattern, session_name):
317
+ matched = True
318
+ if matched:
319
+ log(f"Killing tmux session {session_name} matched on token {base_token or pr_token}")
320
+ run_cmd(["tmux", "kill-session", "-t", session_name], check=False, timeout=30)
321
+ except Exception as exc:
322
+ log(f"Warning: unable to check/kill tmux session {name}: {exc}")
323
+
324
+
325
+ def dispatch_agent_for_pr_with_task(
326
+ dispatcher: TaskDispatcher,
327
+ pr: Dict,
328
+ task_description: str,
329
+ agent_cli: str = "claude",
330
+ model: str = None,
331
+ ) -> bool:
332
+ """Dispatch an agent for a PR using a custom task description."""
333
+ repo_full = pr.get("repo_full")
334
+ repo = pr.get("repo")
335
+ pr_number = pr.get("number")
336
+ branch = pr.get("branch")
337
+
338
+ if not task_description:
339
+ log("Skipping PR dispatch: missing task description")
340
+ return False
341
+
342
+ if not all([repo_full, repo, pr_number is not None, branch]):
343
+ log(f"Skipping PR with missing required fields: {pr}")
344
+ return False
345
+ assert branch is not None and pr_number is not None
346
+
347
+ workspace_name = sanitize_workspace_name(branch or f"pr-{pr_number}", pr_number)
348
+ workspace_root = WORKSPACE_ROOT_BASE / repo
349
+ prepare_workspace_dir(repo, workspace_name)
350
+
351
+ cli = agent_cli.strip().lower() if isinstance(agent_cli, str) and agent_cli.strip() else "claude"
352
+ agent_specs = dispatcher.analyze_task_and_create_agents(task_description, forced_cli=cli)
353
+ success = False
354
+ for spec in agent_specs:
355
+ agent_spec = {**spec}
356
+ session_name = agent_spec.get("name") or workspace_name
357
+ kill_tmux_session_if_exists(session_name)
358
+ agent_spec.setdefault(
359
+ "workspace_config",
360
+ {
361
+ "workspace_root": str(workspace_root),
362
+ "workspace_name": workspace_name,
363
+ },
364
+ )
365
+ # Inject model parameter for all CLIs in the chain (Gemini, Claude, Cursor, etc.)
366
+ if model:
367
+ raw_model = str(model).strip()
368
+ if not re.fullmatch(r"[A-Za-z0-9_.-]+", raw_model):
369
+ log(f"❌ Invalid model name requested: {raw_model!r}")
370
+ return False
371
+ ok = dispatcher.create_dynamic_agent(agent_spec)
372
+ if ok:
373
+ log(f"Spawned agent for {repo_full}#{pr_number} at /tmp/{repo}/{workspace_name}")
374
+ display_log_viewing_command(session_name)
375
+ success = True
376
+ else:
377
+ log(f"Failed to spawn agent for {repo_full}#{pr_number}")
378
+ return success
379
+
380
+
381
+ def dispatch_agent_for_pr(
382
+ dispatcher: TaskDispatcher,
383
+ pr: Dict,
384
+ agent_cli: str = "claude",
385
+ model: Optional[str] = None,
386
+ ) -> bool:
387
+ repo_full = pr.get("repo_full")
388
+ repo = pr.get("repo")
389
+ pr_number = pr.get("number")
390
+ branch = pr.get("branch")
391
+
392
+ if not all([repo_full, repo, pr_number is not None, branch]):
393
+ log(f"Skipping PR with missing required fields: {pr}")
394
+ return False
395
+ assert branch is not None and pr_number is not None
396
+ workspace_name = sanitize_workspace_name(branch or f"pr-{pr_number}", pr_number)
397
+ workspace_root = WORKSPACE_ROOT_BASE / repo
398
+ prepare_workspace_dir(repo, workspace_name)
399
+
400
+ cli_chain_parts = [part.strip().lower() for part in str(agent_cli).split(",") if part.strip()]
401
+ commit_marker_cli = cli_chain_parts[0] if cli_chain_parts else str(agent_cli).strip().lower() or "claude"
402
+
403
+ normalized_model: Optional[str] = None
404
+ if model:
405
+ raw_model = str(model).strip()
406
+ if not re.fullmatch(r"[A-Za-z0-9_.-]+", raw_model):
407
+ log(f"❌ Invalid model name requested: {raw_model!r}")
408
+ return False
409
+ normalized_model = raw_model
410
+
411
+ task_description = (
412
+ f"FIXPR TASK (SELF-CONTAINED): Update PR #{pr_number} in {repo_full} (branch {branch}). "
413
+ "Goal: resolve merge conflicts and failing checks. Also review and address any reviewer feedback that is blocking CI or mergeability. "
414
+ f"CLI chain: {agent_cli}. DO NOT wait for additional input—start immediately.\n\n"
415
+ "If /fixpr is unavailable, follow these steps explicitly (fallback for all CLIs including Claude):\n"
416
+ f"1) gh pr checkout {pr_number}\n"
417
+ "2) git status && git branch --show-current\n"
418
+ "3) If checkout fails because the branch exists elsewhere, create worktree:\n"
419
+ f" git worktree add {workspace_root}/pr-{pr_number}-rerun {pr_number} && cd {workspace_root}/pr-{pr_number}-rerun\n"
420
+ "4) Fetch PR feedback from ALL sources (pagination-safe):\n"
421
+ " - Issue comments: gh api \"/repos/{owner}/{repo}/issues/{pr}/comments\" --paginate -F per_page=100\n"
422
+ " - Review summaries: gh api \"/repos/{owner}/{repo}/pulls/{pr}/reviews\" --paginate -F per_page=100\n"
423
+ " - Inline review comments: gh api \"/repos/{owner}/{repo}/pulls/{pr}/comments\" --paginate -F per_page=100\n"
424
+ " Ensure you cover all feedback; do not assume `gh pr view --json comments` includes inline review comments.\n"
425
+ "5) Identify failing checks (gh pr view --json statusCheckRollup) and reproduce locally (tests/linters as needed)\n"
426
+ "6) Apply fixes (prefer a deterministic branch sync strategy: merge base into branch; avoid rebases unless required by policy)\n"
427
+ f'7) git add -A && git commit -m "[fixpr {commit_marker_cli}-automation-commit] fix PR #{pr_number}" && git push\n'
428
+ f"8) gh pr view {pr_number} --json mergeable,mergeStateStatus,statusCheckRollup\n"
429
+ f"9) Write completion report to {workspace_root}/{workspace_name}/orchestration_results.json summarizing actions and test results\n\n"
430
+ f"Workspace: --workspace-root {workspace_root} --workspace-name {workspace_name}. "
431
+ "Do not create new PRs or branches. Skip /copilot. Use only the requested CLI chain (in order)."
432
+ )
433
+
434
+ agent_specs = dispatcher.analyze_task_and_create_agents(task_description, forced_cli=agent_cli)
435
+ success = False
436
+ for spec in agent_specs:
437
+ # Preserve the CLI chain emitted by orchestration. Do not overwrite with a single CLI string.
438
+ agent_spec = {**spec}
439
+ # Ensure tmux session name is available for reuse
440
+ session_name = agent_spec.get("name") or workspace_name
441
+ kill_tmux_session_if_exists(session_name)
442
+ agent_spec.setdefault(
443
+ "workspace_config",
444
+ {
445
+ "workspace_root": str(workspace_root),
446
+ "workspace_name": workspace_name,
447
+ },
448
+ )
449
+ if normalized_model:
450
+ agent_spec["model"] = normalized_model
451
+ ok = dispatcher.create_dynamic_agent(agent_spec)
452
+ if ok:
453
+ log(f"Spawned agent for {repo_full}#{pr_number} at /tmp/{repo}/{workspace_name}")
454
+ display_log_viewing_command(session_name)
455
+ success = True
456
+ else:
457
+ log(f"Failed to spawn agent for {repo_full}#{pr_number}")
458
+ return success
459
+
460
+
461
+ def run_fixpr_batch(cutoff_hours: int = DEFAULT_CUTOFF_HOURS, max_prs: int = DEFAULT_MAX_PRS, agent_cli: str = "claude") -> None:
462
+ log(f"Discovering open PRs updated in last {cutoff_hours}h for org {ORG}")
463
+ try:
464
+ prs = query_recent_prs(cutoff_hours)
465
+ except Exception as exc:
466
+ log(f"Failed to discover PRs: {exc}")
467
+ sys.exit(1)
468
+
469
+ if not prs:
470
+ log("No recent PRs found")
471
+ return
472
+
473
+ # Filter to PRs that need action (merge conflicts or failing checks)
474
+ actionable = []
475
+ for pr in prs:
476
+ repo_full = pr["repo_full"]
477
+ pr_number = pr["number"]
478
+ mergeable = pr.get("mergeable")
479
+ if mergeable == "CONFLICTING":
480
+ actionable.append(pr)
481
+ continue
482
+ if has_failing_checks(repo_full, pr_number):
483
+ actionable.append(pr)
484
+
485
+ if not actionable:
486
+ log("No PRs with conflicts or failing checks; skipping run")
487
+ return
488
+
489
+ processed = 0
490
+ for pr in actionable:
491
+ if processed >= max_prs:
492
+ break
493
+ repo_full = pr.get("repo_full")
494
+ if not repo_full:
495
+ log(f"Skipping PR with missing repo_full: {pr}")
496
+ continue
497
+ try:
498
+ base_dir = ensure_base_clone(repo_full)
499
+ with chdir(base_dir):
500
+ dispatcher = TaskDispatcher()
501
+ success = dispatch_agent_for_pr(dispatcher, pr, agent_cli=agent_cli)
502
+ if success:
503
+ processed += 1
504
+ except Exception as exc:
505
+ pr_number = pr.get("number", "unknown")
506
+ log(f"Error processing {repo_full}#{pr_number}: {exc}")
507
+
508
+ log(f"Completed orchestration dispatch for {processed} PR(s)")
509
+
510
+
511
+ if __name__ == "__main__":
512
+ parser = argparse.ArgumentParser(description="Orchestrated PR batch runner (/fixpr-only)")
513
+ parser.add_argument("--cutoff-hours", type=int, default=DEFAULT_CUTOFF_HOURS, help="Lookback window for PR updates")
514
+ parser.add_argument("--max-prs", type=int, default=DEFAULT_MAX_PRS, help="Maximum PRs to process per run")
515
+ args = parser.parse_args()
516
+ run_fixpr_batch(args.cutoff_hours, args.max_prs)
File without changes