agent-control-plane 0.1.16 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/README.md +93 -14
  2. package/bin/pr-risk.sh +28 -6
  3. package/hooks/heartbeat-hooks.sh +62 -22
  4. package/npm/bin/agent-control-plane.js +322 -9
  5. package/package.json +1 -1
  6. package/references/architecture.md +8 -0
  7. package/references/control-plane-map.md +6 -2
  8. package/references/release-checklist.md +0 -2
  9. package/tools/bin/agent-github-update-labels +6 -1
  10. package/tools/bin/agent-project-catch-up-issue-pr-links +118 -0
  11. package/tools/bin/agent-project-catch-up-merged-prs +77 -21
  12. package/tools/bin/agent-project-catch-up-scheduled-issue-retries +123 -0
  13. package/tools/bin/agent-project-cleanup-session +84 -0
  14. package/tools/bin/agent-project-heartbeat-loop +10 -3
  15. package/tools/bin/agent-project-reconcile-issue-session +24 -12
  16. package/tools/bin/agent-project-run-claude-session +2 -2
  17. package/tools/bin/agent-project-run-kilo-session +346 -14
  18. package/tools/bin/agent-project-run-ollama-session +658 -0
  19. package/tools/bin/agent-project-run-openclaw-session +27 -25
  20. package/tools/bin/agent-project-run-opencode-session +354 -14
  21. package/tools/bin/agent-project-run-pi-session +479 -0
  22. package/tools/bin/agent-project-worker-status +1 -1
  23. package/tools/bin/flow-config-lib.sh +116 -3
  24. package/tools/bin/flow-resident-worker-lib.sh +1 -1
  25. package/tools/bin/flow-shell-lib.sh +5 -2
  26. package/tools/bin/heartbeat-recovery-preflight.sh +1 -0
  27. package/tools/bin/heartbeat-safe-auto.sh +105 -17
  28. package/tools/bin/install-project-launchd.sh +19 -2
  29. package/tools/bin/prepare-worktree.sh +4 -4
  30. package/tools/bin/profile-activate.sh +2 -2
  31. package/tools/bin/profile-adopt.sh +2 -2
  32. package/tools/bin/project-init.sh +1 -1
  33. package/tools/bin/project-runtimectl.sh +90 -7
  34. package/tools/bin/provider-cooldown-state.sh +14 -14
  35. package/tools/bin/render-flow-config.sh +30 -33
  36. package/tools/bin/run-codex-task.sh +53 -4
  37. package/tools/bin/scaffold-profile.sh +18 -3
  38. package/tools/bin/start-issue-worker.sh +1 -1
  39. package/tools/bin/start-pr-fix-worker.sh +30 -0
  40. package/tools/bin/start-pr-review-worker.sh +31 -0
  41. package/tools/bin/start-resident-issue-loop.sh +4 -4
  42. package/tools/bin/sync-agent-repo.sh +2 -2
  43. package/tools/bin/sync-dependency-baseline.sh +3 -3
  44. package/tools/bin/sync-shared-agent-home.sh +4 -1
  45. package/tools/templates/pr-fix-template.md +3 -7
  46. package/tools/templates/pr-merge-repair-template.md +3 -7
  47. package/tools/templates/pr-review-template.md +2 -1
@@ -54,8 +54,9 @@ for hook_name in "${optional_hooks[@]}"; do
54
54
  fi
55
55
  done
56
56
 
57
- ledger_dir="${state_root}/merged-pr-catchup"
58
- mkdir -p "$ledger_dir"
57
+ merged_ledger_dir="${state_root}/merged-pr-catchup"
58
+ closed_ledger_dir="${state_root}/closed-pr-catchup"
59
+ mkdir -p "$merged_ledger_dir" "$closed_ledger_dir"
59
60
 
60
61
  get_pr_risk_json() {
61
62
  local pr_number="${1:?pr number required}"
@@ -92,16 +93,68 @@ close_issue_if_needed() {
92
93
  fi
93
94
  }
94
95
 
95
- merged_prs_json="$(flow_github_pr_list_json "$repo_slug" merged "$limit")"
96
-
97
- while IFS= read -r pr_number; do
98
- [[ -n "$pr_number" ]] || continue
96
+ process_terminal_pr() {
97
+ local pr_number="${1:?pr number required}"
98
+ local linked_issue_id="${2:-}"
99
+ local merged_at="${3:-}"
100
+ local processed_state="${4:?processed state required}"
101
+ local ledger_dir=""
102
+ local ledger_file=""
103
+
104
+ case "$processed_state" in
105
+ merged) ledger_dir="$merged_ledger_dir" ;;
106
+ closed) ledger_dir="$closed_ledger_dir" ;;
107
+ *)
108
+ echo "unsupported terminal PR state: ${processed_state}" >&2
109
+ return 1
110
+ ;;
111
+ esac
99
112
 
100
113
  ledger_file="${ledger_dir}/${pr_number}.env"
101
114
  if [[ -f "$ledger_file" ]]; then
102
- continue
115
+ return 0
116
+ fi
117
+
118
+ PR_NUMBER="$pr_number" pr_clear_retry || true
119
+ if [[ "$processed_state" == "merged" ]]; then
120
+ close_issue_if_needed "$pr_number" "$linked_issue_id"
121
+ if ! PR_NUMBER="$pr_number" pr_after_merged "$pr_number"; then
122
+ printf 'CATCHUP_FAILED_PR=%s\n' "$pr_number" >&2
123
+ return 1
124
+ fi
125
+ else
126
+ if ! PR_NUMBER="$pr_number" pr_after_closed "$pr_number"; then
127
+ printf 'CATCHUP_FAILED_CLOSED_PR=%s\n' "$pr_number" >&2
128
+ return 1
129
+ fi
130
+ fi
131
+
132
+ if ! PR_NUMBER="$pr_number" pr_cleanup_merged_residue "$pr_number"; then
133
+ printf 'CATCHUP_FAILED_RESIDUE=%s\n' "$pr_number" >&2
134
+ return 1
103
135
  fi
104
136
 
137
+ {
138
+ printf 'PR_NUMBER=%s\n' "$pr_number"
139
+ printf 'ISSUE_ID=%s\n' "$linked_issue_id"
140
+ printf 'PR_STATE=%s\n' "$processed_state"
141
+ printf 'MERGED_AT=%s\n' "$merged_at"
142
+ printf 'PROCESSED_AT=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
143
+ } >"$ledger_file"
144
+
145
+ if [[ "$processed_state" == "merged" ]]; then
146
+ printf 'CATCHUP_MERGED_PR=%s\n' "$pr_number"
147
+ else
148
+ printf 'CATCHUP_CLOSED_PR=%s\n' "$pr_number"
149
+ fi
150
+ }
151
+
152
+ merged_prs_json="$(flow_github_pr_list_json "$repo_slug" merged "$limit")"
153
+ closed_prs_json="$(flow_github_pr_list_json "$repo_slug" closed "$limit")"
154
+
155
+ while IFS= read -r pr_number; do
156
+ [[ -n "$pr_number" ]] || continue
157
+
105
158
  risk_json="$(get_pr_risk_json "$pr_number" 2>/dev/null || true)"
106
159
  if [[ -z "$risk_json" ]]; then
107
160
  continue
@@ -114,24 +167,27 @@ while IFS= read -r pr_number; do
114
167
 
115
168
  linked_issue_id="$(jq -r '.linkedIssueId // empty' <<<"$risk_json" 2>/dev/null || true)"
116
169
  merged_at="$(jq -r --arg pr "$pr_number" 'map(select((.number | tostring) == $pr)) | .[0].mergedAt // ""' <<<"$merged_prs_json")"
170
+ process_terminal_pr "$pr_number" "$linked_issue_id" "$merged_at" merged || true
171
+ done < <(jq -r 'sort_by(.mergedAt) | reverse | .[].number' <<<"$merged_prs_json")
117
172
 
118
- PR_NUMBER="$pr_number" pr_clear_retry || true
119
- close_issue_if_needed "$pr_number" "$linked_issue_id"
120
- if ! PR_NUMBER="$pr_number" pr_after_merged "$pr_number"; then
121
- printf 'CATCHUP_FAILED_PR=%s\n' "$pr_number" >&2
173
+ while IFS= read -r pr_number; do
174
+ [[ -n "$pr_number" ]] || continue
175
+
176
+ merged_at="$(jq -r --arg pr "$pr_number" 'map(select((.number | tostring) == $pr)) | .[0].mergedAt // ""' <<<"$closed_prs_json")"
177
+ if [[ -n "$merged_at" ]]; then
122
178
  continue
123
179
  fi
124
- if ! PR_NUMBER="$pr_number" pr_cleanup_merged_residue "$pr_number"; then
125
- printf 'CATCHUP_FAILED_RESIDUE=%s\n' "$pr_number" >&2
180
+
181
+ risk_json="$(get_pr_risk_json "$pr_number" 2>/dev/null || true)"
182
+ if [[ -z "$risk_json" ]]; then
126
183
  continue
127
184
  fi
128
185
 
129
- {
130
- printf 'PR_NUMBER=%s\n' "$pr_number"
131
- printf 'ISSUE_ID=%s\n' "$linked_issue_id"
132
- printf 'MERGED_AT=%s\n' "$merged_at"
133
- printf 'PROCESSED_AT=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
134
- } >"$ledger_file"
186
+ is_managed="$(jq -r '.isManagedByAgent // .isAgentBranch // false' <<<"$risk_json" 2>/dev/null || printf 'false\n')"
187
+ if [[ "$is_managed" != "true" ]]; then
188
+ continue
189
+ fi
135
190
 
136
- printf 'CATCHUP_MERGED_PR=%s\n' "$pr_number"
137
- done < <(jq -r 'sort_by(.mergedAt) | reverse | .[].number' <<<"$merged_prs_json")
191
+ linked_issue_id="$(jq -r '.linkedIssueId // empty' <<<"$risk_json" 2>/dev/null || true)"
192
+ process_terminal_pr "$pr_number" "$linked_issue_id" "" closed || true
193
+ done < <(jq -r 'sort_by(.createdAt) | reverse | .[].number' <<<"$closed_prs_json")
@@ -0,0 +1,123 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ usage() {
5
+ cat <<'EOF'
6
+ Usage:
7
+ agent-project-catch-up-scheduled-issue-retries --repo-slug <owner/repo> --state-root <path> --hook-file <path> [--limit <n>]
8
+
9
+ Clear stale retry state for recurring scheduled reporting issues once GitHub
10
+ already reflects the latest terminal status via labels and the issue is not
11
+ currently running.
12
+ EOF
13
+ }
14
+
15
+ repo_slug=""
16
+ state_root=""
17
+ hook_file=""
18
+ limit="100"
19
+
20
+ while [[ $# -gt 0 ]]; do
21
+ case "$1" in
22
+ --repo-slug) repo_slug="${2:-}"; shift 2 ;;
23
+ --state-root) state_root="${2:-}"; shift 2 ;;
24
+ --hook-file) hook_file="${2:-}"; shift 2 ;;
25
+ --limit) limit="${2:-}"; shift 2 ;;
26
+ --help|-h) usage; exit 0 ;;
27
+ *) echo "Unknown argument: $1" >&2; usage >&2; exit 1 ;;
28
+ esac
29
+ done
30
+
31
+ if [[ -z "$repo_slug" || -z "$state_root" || -z "$hook_file" ]]; then
32
+ usage >&2
33
+ exit 1
34
+ fi
35
+
36
+ if [[ ! -f "$hook_file" ]]; then
37
+ echo "missing hook file: $hook_file" >&2
38
+ exit 1
39
+ fi
40
+
41
+ # shellcheck source=/dev/null
42
+ source "$hook_file"
43
+
44
+ if ! declare -F issue_clear_retry >/dev/null 2>&1; then
45
+ issue_clear_retry() { :; }
46
+ fi
47
+
48
+ ledger_dir="${state_root}/scheduled-issue-retry-catchup"
49
+ retry_dir="${state_root}/retries/issues"
50
+ mkdir -p "$ledger_dir" "$retry_dir"
51
+
52
+ issue_has_terminal_scheduled_status() {
53
+ local issue_json="${1:-}"
54
+ ISSUE_JSON="$issue_json" python3 - <<'PY'
55
+ import json
56
+ import os
57
+ import re
58
+ import sys
59
+
60
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
61
+ state = (issue.get("state") or "").upper()
62
+ labels = {str(item.get("name") or "") for item in (issue.get("labels") or [])}
63
+ body = issue.get("body") or ""
64
+
65
+ has_schedule = bool(re.search(r'^\s*(?:Agent schedule|Schedule|Cadence)\s*:\s*(?:every\s+)?\d+\s*[mhd]\s*$', body, re.I | re.M))
66
+ if not has_schedule and "agent-scheduled" not in labels:
67
+ sys.exit(1)
68
+
69
+ if state != "OPEN":
70
+ sys.exit(1)
71
+
72
+ if "agent-running" in labels:
73
+ sys.exit(1)
74
+
75
+ terminal_status_labels = {
76
+ "health-ok",
77
+ "health-not-ok",
78
+ "checks-ok",
79
+ "checks-not-ok",
80
+ "smoke-ok",
81
+ "smoke-not-ok",
82
+ }
83
+ if not (labels & terminal_status_labels):
84
+ sys.exit(1)
85
+
86
+ sys.exit(0)
87
+ PY
88
+ }
89
+
90
+ processed=0
91
+ for retry_file in "$retry_dir"/*.env; do
92
+ [[ -f "$retry_file" ]] || continue
93
+ if [[ "$processed" -ge "$limit" ]]; then
94
+ break
95
+ fi
96
+
97
+ issue_id="$(basename "${retry_file%.env}")"
98
+ [[ -n "$issue_id" ]] || continue
99
+
100
+ ledger_file="${ledger_dir}/${issue_id}.env"
101
+ if [[ -f "$ledger_file" ]]; then
102
+ continue
103
+ fi
104
+
105
+ issue_json="$(flow_github_issue_view_json "$repo_slug" "$issue_id" 2>/dev/null || true)"
106
+ [[ -n "$issue_json" && "$issue_json" != "{}" ]] || continue
107
+
108
+ if ! issue_has_terminal_scheduled_status "$issue_json"; then
109
+ continue
110
+ fi
111
+
112
+ retry_reason="$(awk -F= '/^LAST_REASON=/{print $2; exit}' "$retry_file" 2>/dev/null | tr -d '\r' || true)"
113
+ ISSUE_ID="$issue_id" issue_clear_retry || true
114
+
115
+ {
116
+ printf 'ISSUE_ID=%s\n' "$issue_id"
117
+ printf 'LAST_REASON=%s\n' "$retry_reason"
118
+ printf 'PROCESSED_AT=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
119
+ } >"$ledger_file"
120
+
121
+ printf 'CATCHUP_SCHEDULED_ISSUE=%s\n' "$issue_id"
122
+ processed=$((processed + 1))
123
+ done
@@ -131,6 +131,24 @@ canonicalize_existing_dir() {
131
131
  )
132
132
  }
133
133
 
134
+ canonicalize_dir_or_parent_join() {
135
+ local target="${1:-}"
136
+ local resolved=""
137
+
138
+ [[ -n "${target}" ]] || return 1
139
+
140
+ resolved="$(canonicalize_existing_dir "${target}" || true)"
141
+ if [[ -n "${resolved}" ]]; then
142
+ printf '%s\n' "${resolved}"
143
+ return 0
144
+ fi
145
+
146
+ (
147
+ cd "$(dirname "${target}")" 2>/dev/null
148
+ printf '%s/%s\n' "$(pwd -P)" "$(basename "${target}")"
149
+ ) || return 1
150
+ }
151
+
134
152
  path_is_within_root() {
135
153
  local target="${1:-}"
136
154
  local root="${2:-}"
@@ -169,6 +187,8 @@ resident_worktree_protected() {
169
187
  local resident_worktree=""
170
188
  local resident_realpath=""
171
189
  local resolved_resident=""
190
+ local resident_lane_kind=""
191
+ local resident_last_status=""
172
192
 
173
193
  [[ -n "${candidate_path}" && -d "${candidate_path}" ]] || return 1
174
194
  resolved_candidate="$(canonicalize_existing_dir "${candidate_path}" || true)"
@@ -181,12 +201,19 @@ resident_worktree_protected() {
181
201
  [[ -f "${metadata_file}" ]] || continue
182
202
  resident_worktree=""
183
203
  resident_realpath=""
204
+ resident_lane_kind=""
205
+ resident_last_status=""
184
206
  set +u
185
207
  set -a
186
208
  # shellcheck source=/dev/null
187
209
  source "${metadata_file}"
188
210
  set +a
189
211
  set -u
212
+ resident_lane_kind="${RESIDENT_LANE_KIND:-}"
213
+ resident_last_status="${LAST_STATUS:-}"
214
+ if [[ "${resident_lane_kind}" != "recurring" && "${resident_last_status}" != "running" ]]; then
215
+ continue
216
+ fi
190
217
  resident_worktree="${WORKTREE_REALPATH:-${WORKTREE:-}}"
191
218
  [[ -n "${resident_worktree}" && -d "${resident_worktree}" ]] || continue
192
219
  resolved_resident="$(canonicalize_existing_dir "${resident_worktree}" || true)"
@@ -268,6 +295,59 @@ active_resident_run_worktree_protected() {
268
295
  return 1
269
296
  }
270
297
 
298
+ clear_resident_worktree_realpath_references() {
299
+ local removed_path="${1:-}"
300
+ local resolved_removed=""
301
+ local resolved_state_root=""
302
+ local metadata_file=""
303
+ local resident_realpath=""
304
+ local tmp_file=""
305
+ local metadata_worktree_realpath=""
306
+
307
+ [[ -n "${removed_path}" ]] || return 0
308
+
309
+ resolved_removed="$(canonicalize_dir_or_parent_join "${removed_path}" || true)"
310
+ [[ -n "${resolved_removed}" ]] || return 0
311
+
312
+ resolved_state_root="$(derive_state_root || true)"
313
+ [[ -n "${resolved_state_root}" && -d "${resolved_state_root}/resident-workers/issues" ]] || return 0
314
+
315
+ for metadata_file in "${resolved_state_root}"/resident-workers/issues/*/metadata.env; do
316
+ [[ -f "${metadata_file}" ]] || continue
317
+ metadata_worktree_realpath=""
318
+ set +u
319
+ set -a
320
+ # shellcheck source=/dev/null
321
+ source "${metadata_file}"
322
+ set +a
323
+ set -u
324
+ resident_realpath="${WORKTREE_REALPATH:-${metadata_worktree_realpath:-}}"
325
+ [[ -n "${resident_realpath}" ]] || continue
326
+ resident_realpath="$(canonicalize_dir_or_parent_join "${resident_realpath}" || true)"
327
+ [[ -n "${resident_realpath}" ]] || continue
328
+ if [[ "${resident_realpath}" != "${resolved_removed}" ]]; then
329
+ continue
330
+ fi
331
+
332
+ tmp_file="${metadata_file}.tmp.$$"
333
+ awk '
334
+ BEGIN { replaced = 0 }
335
+ /^WORKTREE_REALPATH=/ {
336
+ print "WORKTREE_REALPATH='\'''\''"
337
+ replaced = 1
338
+ next
339
+ }
340
+ { print }
341
+ END {
342
+ if (replaced == 0) {
343
+ print "WORKTREE_REALPATH='\'''\''"
344
+ }
345
+ }
346
+ ' "${metadata_file}" >"${tmp_file}"
347
+ mv "${tmp_file}" "${metadata_file}"
348
+ done
349
+ }
350
+
271
351
  cleanup_with_branch_tool() {
272
352
  local include_path="${1:-yes}"
273
353
  local -a cleanup_args
@@ -355,6 +435,10 @@ if [[ -n "$session" && "$active_tmux_session" != "true" ]]; then
355
435
  )"
356
436
  fi
357
437
 
438
+ if [[ "$skip_worktree_cleanup" != "true" && -n "$worktree_path" && ! -d "$worktree_path" ]]; then
439
+ clear_resident_worktree_realpath_references "$worktree_path"
440
+ fi
441
+
358
442
  if [[ "$cleanup_status" != "0" && -z "$session" ]]; then
359
443
  [[ -n "$cleanup_error" ]] && printf '%s\n' "$cleanup_error" >&2
360
444
  exit "$cleanup_status"
@@ -558,6 +558,9 @@ cleanup_scheduler_caches() {
558
558
  if [[ -n "${pr_risk_cache_dir:-}" && -d "${pr_risk_cache_dir}" ]]; then
559
559
  rm -rf "${pr_risk_cache_dir}" || true
560
560
  fi
561
+ if declare -F heartbeat_invalidate_snapshot_cache >/dev/null 2>&1; then
562
+ heartbeat_invalidate_snapshot_cache
563
+ fi
561
564
  }
562
565
 
563
566
  stage_issue_launch() {
@@ -1482,9 +1485,13 @@ sync_open_agent_issues() {
1482
1485
  --session "${issue_prefix}${issue_id}"
1483
1486
  )"
1484
1487
  status="$(awk -F= '/^STATUS=/{print $2}' <<<"$status_out")"
1485
- if [[ "$status" == "UNKNOWN" ]]; then
1486
- heartbeat_sync_issue_labels "$issue_id" >/dev/null || true
1487
- fi
1488
+ case "$status" in
1489
+ RUNNING)
1490
+ ;;
1491
+ *)
1492
+ heartbeat_sync_issue_labels "$issue_id" >/dev/null || true
1493
+ ;;
1494
+ esac
1488
1495
  done <<<"$running_issue_ids_cache"
1489
1496
  }
1490
1497
 
@@ -640,6 +640,9 @@ try {
640
640
 
641
641
  const changedFilesLower = gitChangedFiles.map((file) => file.toLowerCase());
642
642
  const repoHasScript = (scriptName) => Boolean(packageJson?.scripts && Object.prototype.hasOwnProperty.call(packageJson.scripts, scriptName));
643
+ const rootTestScript = String(packageJson?.scripts?.test || '').trim();
644
+ const rootTestScriptUsesNodeTest = /^node\s+--test(?:\s|$)/.test(rootTestScript);
645
+ const rootTestScriptLooksWatchMode = /\B--watch(?:All)?(?:[=\s]|$)|(?:^|\s)vitest\s+watch(?:\s|$)/.test(rootTestScript);
643
646
  const commandLooksRunnable = (command) => {
644
647
  if (/^npm test(?:\s|$)?/.test(command)) return repoHasScript('test');
645
648
  if (/^pnpm test(?:\s|$)?/.test(command)) return repoHasScript('test');
@@ -648,6 +651,14 @@ const commandLooksRunnable = (command) => {
648
651
  if (/^node\s+--test(?:\s|$)/.test(command)) return true;
649
652
  return true;
650
653
  };
654
+ const rootTestFallbackCommand = () => {
655
+ if (!rootTestScript) return '';
656
+ if (rootTestScriptUsesNodeTest) return 'npm test';
657
+ if (!rootTestScriptLooksWatchMode) return 'npm test';
658
+ if (/\bjest\b/i.test(rootTestScript)) return 'npx jest --runInBand --watchAll=false';
659
+ if (/\bvitest\b/i.test(rootTestScript)) return 'npx vitest run';
660
+ return '';
661
+ };
651
662
 
652
663
  if (promptFile && fs.existsSync(promptFile)) {
653
664
  const lines = fs.readFileSync(promptFile, 'utf8').split(/\r?\n/).slice(0, 40);
@@ -678,15 +689,17 @@ if (promptFile && fs.existsSync(promptFile)) {
678
689
  }
679
690
 
680
691
  const changedTestFiles = [...new Set(gitChangedFiles.filter((file) => /\.(?:spec|test)\.[cm]?[jt]sx?$/.test(file)))];
681
- const rootTestScript = String(packageJson?.scripts?.test || '').trim();
682
- if (/^node\s+--test(?:\s|$)/.test(rootTestScript)) {
692
+ if (rootTestScriptUsesNodeTest) {
683
693
  for (const file of changedTestFiles) {
684
694
  addCommand(`node --test ${file}`);
685
695
  }
686
696
  }
687
697
 
688
- if (commands.length === 0 && rootTestScript) {
689
- addCommand('npm test');
698
+ if (commands.length === 0) {
699
+ const fallbackCommand = rootTestFallbackCommand();
700
+ if (fallbackCommand) {
701
+ addCommand(fallbackCommand);
702
+ }
690
703
  }
691
704
 
692
705
  const filtered = commands.filter((command) => !recordedPassCommands.has(command));
@@ -1382,19 +1395,18 @@ case "$status" in
1382
1395
  failure_reason="$(normalize_issue_failure_reason "${failure_reason:-worker-exit-failed}")"
1383
1396
  schedule_provider_quota_cooldown "${failure_reason}"
1384
1397
  normalize_issue_runner_state "failed" "${LAST_EXIT_CODE:-}" "${failure_reason}"
1385
- if [[ "${result_outcome:-}" == "blocked" && "${result_action:-}" == "host-comment-blocker" ]] \
1386
- || [[ "${failure_reason}" == "provider-quota-limit" ]]; then
1387
- if [[ -z "${result_outcome:-}" ]]; then
1388
- result_outcome="blocked"
1389
- fi
1390
- if [[ -z "${result_action:-}" ]]; then
1391
- result_action="host-comment-blocker"
1392
- fi
1398
+ if [[ "${result_outcome:-}" == "blocked" && "${result_action:-}" == "host-comment-blocker" ]]; then
1393
1399
  if [[ ! -s "${run_dir}/issue-comment.md" ]]; then
1394
1400
  write_issue_comment_artifact "$(build_issue_runtime_blocker_comment "${failure_reason}")" || true
1395
1401
  fi
1396
1402
  post_issue_comment_if_present
1397
1403
  issue_set_reconcile_summary "$status" "$result_outcome" "$result_action" "$failure_reason"
1404
+ elif [[ "${failure_reason}" == "provider-quota-limit" ]]; then
1405
+ if [[ ! -s "${run_dir}/issue-comment.md" ]]; then
1406
+ write_issue_comment_artifact "$(build_issue_runtime_blocker_comment "${failure_reason}")" || true
1407
+ fi
1408
+ post_issue_comment_if_present
1409
+ issue_set_reconcile_summary "$status" "" "" "$failure_reason"
1398
1410
  else
1399
1411
  issue_set_reconcile_summary "$status" "" "" "$failure_reason"
1400
1412
  fi
@@ -774,12 +774,12 @@ fi
774
774
 
775
775
  ${collect_copy_snippet}
776
776
  if [[ "\${status}" -eq 0 ]]; then
777
- write_state completed "\${status}" '' "\${attempt}" "\$((attempt - 1))"
777
+ write_state succeeded "\${status}" '' "\${attempt}" "\$((attempt - 1))"
778
778
  else
779
779
  write_state failed "\${status}" "\${failure_reason}" "\${attempt}" "\$((attempt - 1))"
780
780
  fi
781
781
  ${reconcile_snippet}
782
- printf '\n__CLAUDE_EXIT__:%s\n' "\${status}" | tee -a "\${output_file}"
782
+ printf '\n__CODEX_EXIT__:%s\n' "\${status}" | tee -a "\${output_file}"
783
783
  exit "\${status}"
784
784
  EOF
785
785