agent-control-plane 0.1.16 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/README.md +93 -14
  2. package/bin/pr-risk.sh +28 -6
  3. package/hooks/heartbeat-hooks.sh +62 -22
  4. package/npm/bin/agent-control-plane.js +360 -10
  5. package/package.json +6 -3
  6. package/references/architecture.md +8 -0
  7. package/references/control-plane-map.md +6 -2
  8. package/references/release-checklist.md +0 -2
  9. package/tools/bin/agent-github-update-labels +6 -1
  10. package/tools/bin/agent-project-catch-up-issue-pr-links +118 -0
  11. package/tools/bin/agent-project-catch-up-merged-prs +78 -21
  12. package/tools/bin/agent-project-catch-up-scheduled-issue-retries +123 -0
  13. package/tools/bin/agent-project-cleanup-session +132 -4
  14. package/tools/bin/agent-project-heartbeat-loop +116 -1461
  15. package/tools/bin/agent-project-reconcile-issue-session +90 -117
  16. package/tools/bin/agent-project-reconcile-pr-session +76 -111
  17. package/tools/bin/agent-project-run-claude-session +12 -2
  18. package/tools/bin/agent-project-run-codex-resilient +86 -9
  19. package/tools/bin/agent-project-run-codex-session +16 -5
  20. package/tools/bin/agent-project-run-kilo-session +356 -14
  21. package/tools/bin/agent-project-run-ollama-session +658 -0
  22. package/tools/bin/agent-project-run-openclaw-session +37 -25
  23. package/tools/bin/agent-project-run-opencode-session +364 -14
  24. package/tools/bin/agent-project-run-pi-session +479 -0
  25. package/tools/bin/agent-project-worker-status +11 -8
  26. package/tools/bin/cleanup-worktree.sh +6 -1
  27. package/tools/bin/flow-config-lib.sh +196 -3
  28. package/tools/bin/flow-resident-worker-lib.sh +120 -2
  29. package/tools/bin/flow-shell-lib.sh +29 -2
  30. package/tools/bin/heartbeat-loop-cache-lib.sh +164 -0
  31. package/tools/bin/heartbeat-loop-counting-lib.sh +306 -0
  32. package/tools/bin/heartbeat-loop-pr-strategy-lib.sh +199 -0
  33. package/tools/bin/heartbeat-loop-scheduling-lib.sh +506 -0
  34. package/tools/bin/heartbeat-loop-worker-lib.sh +319 -0
  35. package/tools/bin/heartbeat-recovery-preflight.sh +13 -1
  36. package/tools/bin/heartbeat-safe-auto.sh +119 -20
  37. package/tools/bin/install-project-launchd.sh +19 -2
  38. package/tools/bin/prepare-worktree.sh +4 -4
  39. package/tools/bin/profile-activate.sh +2 -2
  40. package/tools/bin/profile-adopt.sh +2 -2
  41. package/tools/bin/project-init.sh +1 -1
  42. package/tools/bin/project-launchd-bootstrap.sh +11 -8
  43. package/tools/bin/project-runtimectl.sh +90 -7
  44. package/tools/bin/provider-cooldown-state.sh +14 -14
  45. package/tools/bin/reconcile-bootstrap-lib.sh +113 -0
  46. package/tools/bin/render-flow-config.sh +30 -33
  47. package/tools/bin/resident-issue-controller-lib.sh +448 -0
  48. package/tools/bin/resident-issue-queue-status.py +35 -0
  49. package/tools/bin/run-codex-task.sh +53 -4
  50. package/tools/bin/scaffold-profile.sh +18 -3
  51. package/tools/bin/start-issue-worker.sh +1 -1
  52. package/tools/bin/start-pr-fix-worker.sh +30 -0
  53. package/tools/bin/start-pr-review-worker.sh +31 -0
  54. package/tools/bin/start-resident-issue-loop.sh +27 -438
  55. package/tools/bin/sync-agent-repo.sh +2 -2
  56. package/tools/bin/sync-dependency-baseline.sh +3 -3
  57. package/tools/bin/sync-shared-agent-home.sh +4 -1
  58. package/tools/dashboard/app.js +7 -0
  59. package/tools/dashboard/dashboard_snapshot.py +13 -29
  60. package/tools/templates/pr-fix-template.md +3 -7
  61. package/tools/templates/pr-merge-repair-template.md +3 -7
  62. package/tools/templates/pr-review-template.md +2 -1
  63. package/SKILL.md +0 -149
@@ -0,0 +1,118 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ usage() {
5
+ cat <<'EOF'
6
+ Usage:
7
+ agent-project-catch-up-issue-pr-links --repo-slug <owner/repo> --state-root <path> --hook-file <path> [--limit <n>]
8
+
9
+ Clear stale issue retry state when an issue already has a linked PR comment and
10
+ that PR still exists (open, closed, or merged).
11
+ EOF
12
+ }
13
+
14
+ repo_slug=""
15
+ state_root=""
16
+ hook_file=""
17
+ limit="100"
18
+
19
+ while [[ $# -gt 0 ]]; do
20
+ case "$1" in
21
+ --repo-slug) repo_slug="${2:-}"; shift 2 ;;
22
+ --state-root) state_root="${2:-}"; shift 2 ;;
23
+ --hook-file) hook_file="${2:-}"; shift 2 ;;
24
+ --limit) limit="${2:-}"; shift 2 ;;
25
+ --help|-h) usage; exit 0 ;;
26
+ *) echo "Unknown argument: $1" >&2; usage >&2; exit 1 ;;
27
+ esac
28
+ done
29
+
30
+ if [[ -z "$repo_slug" || -z "$state_root" || -z "$hook_file" ]]; then
31
+ usage >&2
32
+ exit 1
33
+ fi
34
+
35
+ if [[ ! -f "$hook_file" ]]; then
36
+ echo "missing hook file: $hook_file" >&2
37
+ exit 1
38
+ fi
39
+
40
+ # shellcheck source=/dev/null
41
+ source "$hook_file"
42
+
43
+ if ! declare -F issue_clear_retry >/dev/null 2>&1; then
44
+ issue_clear_retry() { :; }
45
+ fi
46
+
47
+ ledger_dir="${state_root}/linked-pr-issue-catchup"
48
+ retry_dir="${state_root}/retries/issues"
49
+ mkdir -p "$ledger_dir" "$retry_dir"
50
+
51
+ extract_latest_linked_pr() {
52
+ local issue_json="${1:-}"
53
+ ISSUE_JSON="$issue_json" python3 - <<'PY'
54
+ import json
55
+ import os
56
+ import re
57
+
58
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
59
+ latest = ""
60
+ latest_at = ""
61
+ for comment in issue.get("comments", []) or []:
62
+ body = comment.get("body") or ""
63
+ match = None
64
+ for candidate in re.finditer(r"Opened PR #(\d+)", body):
65
+ match = candidate
66
+ if not match:
67
+ continue
68
+ created = comment.get("createdAt") or ""
69
+ pr_number = match.group(1)
70
+ if created >= latest_at:
71
+ latest_at = created
72
+ latest = pr_number
73
+
74
+ print(latest)
75
+ PY
76
+ }
77
+
78
+ pr_exists() {
79
+ local pr_number="${1:?pr number required}"
80
+ local pr_json=""
81
+ pr_json="$(flow_github_pr_view_json "$repo_slug" "$pr_number" 2>/dev/null || true)"
82
+ [[ -n "$pr_json" && "$pr_json" != "{}" ]]
83
+ }
84
+
85
+ for retry_file in "$retry_dir"/*.env; do
86
+ [[ -f "$retry_file" ]] || continue
87
+ issue_id="$(basename "${retry_file%.env}")"
88
+ [[ -n "$issue_id" ]] || continue
89
+
90
+ retry_reason="$(awk -F= '/^LAST_REASON=/{print $2; exit}' "$retry_file" 2>/dev/null | tr -d '\r' || true)"
91
+ if [[ "$retry_reason" != "host-publish-failed" ]]; then
92
+ continue
93
+ fi
94
+
95
+ ledger_file="${ledger_dir}/${issue_id}.env"
96
+ if [[ -f "$ledger_file" ]]; then
97
+ continue
98
+ fi
99
+
100
+ issue_json="$(flow_github_issue_view_json "$repo_slug" "$issue_id" 2>/dev/null || true)"
101
+ [[ -n "$issue_json" && "$issue_json" != "{}" ]] || continue
102
+
103
+ linked_pr="$(extract_latest_linked_pr "$issue_json")"
104
+ [[ -n "$linked_pr" ]] || continue
105
+ if ! pr_exists "$linked_pr"; then
106
+ continue
107
+ fi
108
+
109
+ ISSUE_ID="$issue_id" issue_clear_retry || true
110
+
111
+ {
112
+ printf 'ISSUE_ID=%s\n' "$issue_id"
113
+ printf 'LINKED_PR=%s\n' "$linked_pr"
114
+ printf 'PROCESSED_AT=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
115
+ } >"$ledger_file"
116
+
117
+ printf 'CATCHUP_LINKED_PR_ISSUE=%s\n' "$issue_id"
118
+ done
@@ -46,6 +46,7 @@ optional_hooks=(
46
46
  pr_cleanup_merged_residue
47
47
  pr_linked_issue_should_close
48
48
  pr_after_merged
49
+ pr_after_closed
49
50
  )
50
51
 
51
52
  for hook_name in "${optional_hooks[@]}"; do
@@ -54,8 +55,9 @@ for hook_name in "${optional_hooks[@]}"; do
54
55
  fi
55
56
  done
56
57
 
57
- ledger_dir="${state_root}/merged-pr-catchup"
58
- mkdir -p "$ledger_dir"
58
+ merged_ledger_dir="${state_root}/merged-pr-catchup"
59
+ closed_ledger_dir="${state_root}/closed-pr-catchup"
60
+ mkdir -p "$merged_ledger_dir" "$closed_ledger_dir"
59
61
 
60
62
  get_pr_risk_json() {
61
63
  local pr_number="${1:?pr number required}"
@@ -92,16 +94,68 @@ close_issue_if_needed() {
92
94
  fi
93
95
  }
94
96
 
95
- merged_prs_json="$(flow_github_pr_list_json "$repo_slug" merged "$limit")"
96
-
97
- while IFS= read -r pr_number; do
98
- [[ -n "$pr_number" ]] || continue
97
+ process_terminal_pr() {
98
+ local pr_number="${1:?pr number required}"
99
+ local linked_issue_id="${2:-}"
100
+ local merged_at="${3:-}"
101
+ local processed_state="${4:?processed state required}"
102
+ local ledger_dir=""
103
+ local ledger_file=""
104
+
105
+ case "$processed_state" in
106
+ merged) ledger_dir="$merged_ledger_dir" ;;
107
+ closed) ledger_dir="$closed_ledger_dir" ;;
108
+ *)
109
+ echo "unsupported terminal PR state: ${processed_state}" >&2
110
+ return 1
111
+ ;;
112
+ esac
99
113
 
100
114
  ledger_file="${ledger_dir}/${pr_number}.env"
101
115
  if [[ -f "$ledger_file" ]]; then
102
- continue
116
+ return 0
117
+ fi
118
+
119
+ PR_NUMBER="$pr_number" pr_clear_retry || true
120
+ if [[ "$processed_state" == "merged" ]]; then
121
+ close_issue_if_needed "$pr_number" "$linked_issue_id"
122
+ if ! PR_NUMBER="$pr_number" pr_after_merged "$pr_number"; then
123
+ printf 'CATCHUP_FAILED_PR=%s\n' "$pr_number" >&2
124
+ return 1
125
+ fi
126
+ else
127
+ if ! PR_NUMBER="$pr_number" pr_after_closed "$pr_number"; then
128
+ printf 'CATCHUP_FAILED_CLOSED_PR=%s\n' "$pr_number" >&2
129
+ return 1
130
+ fi
131
+ fi
132
+
133
+ if ! PR_NUMBER="$pr_number" pr_cleanup_merged_residue "$pr_number"; then
134
+ printf 'CATCHUP_FAILED_RESIDUE=%s\n' "$pr_number" >&2
135
+ return 1
103
136
  fi
104
137
 
138
+ {
139
+ printf 'PR_NUMBER=%s\n' "$pr_number"
140
+ printf 'ISSUE_ID=%s\n' "$linked_issue_id"
141
+ printf 'PR_STATE=%s\n' "$processed_state"
142
+ printf 'MERGED_AT=%s\n' "$merged_at"
143
+ printf 'PROCESSED_AT=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
144
+ } >"$ledger_file"
145
+
146
+ if [[ "$processed_state" == "merged" ]]; then
147
+ printf 'CATCHUP_MERGED_PR=%s\n' "$pr_number"
148
+ else
149
+ printf 'CATCHUP_CLOSED_PR=%s\n' "$pr_number"
150
+ fi
151
+ }
152
+
153
+ merged_prs_json="$(flow_github_pr_list_json "$repo_slug" merged "$limit")"
154
+ closed_prs_json="$(flow_github_pr_list_json "$repo_slug" closed "$limit")"
155
+
156
+ while IFS= read -r pr_number; do
157
+ [[ -n "$pr_number" ]] || continue
158
+
105
159
  risk_json="$(get_pr_risk_json "$pr_number" 2>/dev/null || true)"
106
160
  if [[ -z "$risk_json" ]]; then
107
161
  continue
@@ -114,24 +168,27 @@ while IFS= read -r pr_number; do
114
168
 
115
169
  linked_issue_id="$(jq -r '.linkedIssueId // empty' <<<"$risk_json" 2>/dev/null || true)"
116
170
  merged_at="$(jq -r --arg pr "$pr_number" 'map(select((.number | tostring) == $pr)) | .[0].mergedAt // ""' <<<"$merged_prs_json")"
171
+ process_terminal_pr "$pr_number" "$linked_issue_id" "$merged_at" merged || true
172
+ done < <(jq -r 'sort_by(.mergedAt) | reverse | .[].number' <<<"$merged_prs_json")
117
173
 
118
- PR_NUMBER="$pr_number" pr_clear_retry || true
119
- close_issue_if_needed "$pr_number" "$linked_issue_id"
120
- if ! PR_NUMBER="$pr_number" pr_after_merged "$pr_number"; then
121
- printf 'CATCHUP_FAILED_PR=%s\n' "$pr_number" >&2
174
+ while IFS= read -r pr_number; do
175
+ [[ -n "$pr_number" ]] || continue
176
+
177
+ merged_at="$(jq -r --arg pr "$pr_number" 'map(select((.number | tostring) == $pr)) | .[0].mergedAt // ""' <<<"$closed_prs_json")"
178
+ if [[ -n "$merged_at" ]]; then
122
179
  continue
123
180
  fi
124
- if ! PR_NUMBER="$pr_number" pr_cleanup_merged_residue "$pr_number"; then
125
- printf 'CATCHUP_FAILED_RESIDUE=%s\n' "$pr_number" >&2
181
+
182
+ risk_json="$(get_pr_risk_json "$pr_number" 2>/dev/null || true)"
183
+ if [[ -z "$risk_json" ]]; then
126
184
  continue
127
185
  fi
128
186
 
129
- {
130
- printf 'PR_NUMBER=%s\n' "$pr_number"
131
- printf 'ISSUE_ID=%s\n' "$linked_issue_id"
132
- printf 'MERGED_AT=%s\n' "$merged_at"
133
- printf 'PROCESSED_AT=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
134
- } >"$ledger_file"
187
+ is_managed="$(jq -r '.isManagedByAgent // .isAgentBranch // false' <<<"$risk_json" 2>/dev/null || printf 'false\n')"
188
+ if [[ "$is_managed" != "true" ]]; then
189
+ continue
190
+ fi
135
191
 
136
- printf 'CATCHUP_MERGED_PR=%s\n' "$pr_number"
137
- done < <(jq -r 'sort_by(.mergedAt) | reverse | .[].number' <<<"$merged_prs_json")
192
+ linked_issue_id="$(jq -r '.linkedIssueId // empty' <<<"$risk_json" 2>/dev/null || true)"
193
+ process_terminal_pr "$pr_number" "$linked_issue_id" "" closed || true
194
+ done < <(jq -r 'sort_by(.createdAt) | reverse | .[].number' <<<"$closed_prs_json")
@@ -0,0 +1,123 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ usage() {
5
+ cat <<'EOF'
6
+ Usage:
7
+ agent-project-catch-up-scheduled-issue-retries --repo-slug <owner/repo> --state-root <path> --hook-file <path> [--limit <n>]
8
+
9
+ Clear stale retry state for recurring scheduled reporting issues once GitHub
10
+ already reflects the latest terminal status via labels and the issue is not
11
+ currently running.
12
+ EOF
13
+ }
14
+
15
+ repo_slug=""
16
+ state_root=""
17
+ hook_file=""
18
+ limit="100"
19
+
20
+ while [[ $# -gt 0 ]]; do
21
+ case "$1" in
22
+ --repo-slug) repo_slug="${2:-}"; shift 2 ;;
23
+ --state-root) state_root="${2:-}"; shift 2 ;;
24
+ --hook-file) hook_file="${2:-}"; shift 2 ;;
25
+ --limit) limit="${2:-}"; shift 2 ;;
26
+ --help|-h) usage; exit 0 ;;
27
+ *) echo "Unknown argument: $1" >&2; usage >&2; exit 1 ;;
28
+ esac
29
+ done
30
+
31
+ if [[ -z "$repo_slug" || -z "$state_root" || -z "$hook_file" ]]; then
32
+ usage >&2
33
+ exit 1
34
+ fi
35
+
36
+ if [[ ! -f "$hook_file" ]]; then
37
+ echo "missing hook file: $hook_file" >&2
38
+ exit 1
39
+ fi
40
+
41
+ # shellcheck source=/dev/null
42
+ source "$hook_file"
43
+
44
+ if ! declare -F issue_clear_retry >/dev/null 2>&1; then
45
+ issue_clear_retry() { :; }
46
+ fi
47
+
48
+ ledger_dir="${state_root}/scheduled-issue-retry-catchup"
49
+ retry_dir="${state_root}/retries/issues"
50
+ mkdir -p "$ledger_dir" "$retry_dir"
51
+
52
+ issue_has_terminal_scheduled_status() {
53
+ local issue_json="${1:-}"
54
+ ISSUE_JSON="$issue_json" python3 - <<'PY'
55
+ import json
56
+ import os
57
+ import re
58
+ import sys
59
+
60
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
61
+ state = (issue.get("state") or "").upper()
62
+ labels = {str(item.get("name") or "") for item in (issue.get("labels") or [])}
63
+ body = issue.get("body") or ""
64
+
65
+ has_schedule = bool(re.search(r'^\s*(?:Agent schedule|Schedule|Cadence)\s*:\s*(?:every\s+)?\d+\s*[mhd]\s*$', body, re.I | re.M))
66
+ if not has_schedule and "agent-scheduled" not in labels:
67
+ sys.exit(1)
68
+
69
+ if state != "OPEN":
70
+ sys.exit(1)
71
+
72
+ if "agent-running" in labels:
73
+ sys.exit(1)
74
+
75
+ terminal_status_labels = {
76
+ "health-ok",
77
+ "health-not-ok",
78
+ "checks-ok",
79
+ "checks-not-ok",
80
+ "smoke-ok",
81
+ "smoke-not-ok",
82
+ }
83
+ if not (labels & terminal_status_labels):
84
+ sys.exit(1)
85
+
86
+ sys.exit(0)
87
+ PY
88
+ }
89
+
90
+ processed=0
91
+ for retry_file in "$retry_dir"/*.env; do
92
+ [[ -f "$retry_file" ]] || continue
93
+ if [[ "$processed" -ge "$limit" ]]; then
94
+ break
95
+ fi
96
+
97
+ issue_id="$(basename "${retry_file%.env}")"
98
+ [[ -n "$issue_id" ]] || continue
99
+
100
+ ledger_file="${ledger_dir}/${issue_id}.env"
101
+ if [[ -f "$ledger_file" ]]; then
102
+ continue
103
+ fi
104
+
105
+ issue_json="$(flow_github_issue_view_json "$repo_slug" "$issue_id" 2>/dev/null || true)"
106
+ [[ -n "$issue_json" && "$issue_json" != "{}" ]] || continue
107
+
108
+ if ! issue_has_terminal_scheduled_status "$issue_json"; then
109
+ continue
110
+ fi
111
+
112
+ retry_reason="$(awk -F= '/^LAST_REASON=/{print $2; exit}' "$retry_file" 2>/dev/null | tr -d '\r' || true)"
113
+ ISSUE_ID="$issue_id" issue_clear_retry || true
114
+
115
+ {
116
+ printf 'ISSUE_ID=%s\n' "$issue_id"
117
+ printf 'LAST_REASON=%s\n' "$retry_reason"
118
+ printf 'PROCESSED_AT=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
119
+ } >"$ledger_file"
120
+
121
+ printf 'CATCHUP_SCHEDULED_ISSUE=%s\n' "$issue_id"
122
+ processed=$((processed + 1))
123
+ done
@@ -95,6 +95,7 @@ cleanup_error=""
95
95
  cleanup_mode="noop"
96
96
  orphan_fallback_used="false"
97
97
  active_tmux_session="false"
98
+ archived_dir=""
98
99
 
99
100
  if [[ -n "$session" ]]; then
100
101
  meta_file="${runs_root}/${session}/run.env"
@@ -131,6 +132,24 @@ canonicalize_existing_dir() {
131
132
  )
132
133
  }
133
134
 
135
+ canonicalize_dir_or_parent_join() {
136
+ local target="${1:-}"
137
+ local resolved=""
138
+
139
+ [[ -n "${target}" ]] || return 1
140
+
141
+ resolved="$(canonicalize_existing_dir "${target}" || true)"
142
+ if [[ -n "${resolved}" ]]; then
143
+ printf '%s\n' "${resolved}"
144
+ return 0
145
+ fi
146
+
147
+ (
148
+ cd "$(dirname "${target}")" 2>/dev/null
149
+ printf '%s/%s\n' "$(pwd -P)" "$(basename "${target}")"
150
+ ) || return 1
151
+ }
152
+
134
153
  path_is_within_root() {
135
154
  local target="${1:-}"
136
155
  local root="${2:-}"
@@ -169,6 +188,8 @@ resident_worktree_protected() {
169
188
  local resident_worktree=""
170
189
  local resident_realpath=""
171
190
  local resolved_resident=""
191
+ local resident_lane_kind=""
192
+ local resident_last_status=""
172
193
 
173
194
  [[ -n "${candidate_path}" && -d "${candidate_path}" ]] || return 1
174
195
  resolved_candidate="$(canonicalize_existing_dir "${candidate_path}" || true)"
@@ -181,12 +202,19 @@ resident_worktree_protected() {
181
202
  [[ -f "${metadata_file}" ]] || continue
182
203
  resident_worktree=""
183
204
  resident_realpath=""
205
+ resident_lane_kind=""
206
+ resident_last_status=""
184
207
  set +u
185
208
  set -a
186
209
  # shellcheck source=/dev/null
187
210
  source "${metadata_file}"
188
211
  set +a
189
212
  set -u
213
+ resident_lane_kind="${RESIDENT_LANE_KIND:-}"
214
+ resident_last_status="${LAST_STATUS:-}"
215
+ if [[ "${resident_lane_kind}" != "recurring" && "${resident_last_status}" != "running" ]]; then
216
+ continue
217
+ fi
190
218
  resident_worktree="${WORKTREE_REALPATH:-${WORKTREE:-}}"
191
219
  [[ -n "${resident_worktree}" && -d "${resident_worktree}" ]] || continue
192
220
  resolved_resident="$(canonicalize_existing_dir "${resident_worktree}" || true)"
@@ -268,6 +296,59 @@ active_resident_run_worktree_protected() {
268
296
  return 1
269
297
  }
270
298
 
299
+ clear_resident_worktree_realpath_references() {
300
+ local removed_path="${1:-}"
301
+ local resolved_removed=""
302
+ local resolved_state_root=""
303
+ local metadata_file=""
304
+ local resident_realpath=""
305
+ local tmp_file=""
306
+ local metadata_worktree_realpath=""
307
+
308
+ [[ -n "${removed_path}" ]] || return 0
309
+
310
+ resolved_removed="$(canonicalize_dir_or_parent_join "${removed_path}" || true)"
311
+ [[ -n "${resolved_removed}" ]] || return 0
312
+
313
+ resolved_state_root="$(derive_state_root || true)"
314
+ [[ -n "${resolved_state_root}" && -d "${resolved_state_root}/resident-workers/issues" ]] || return 0
315
+
316
+ for metadata_file in "${resolved_state_root}"/resident-workers/issues/*/metadata.env; do
317
+ [[ -f "${metadata_file}" ]] || continue
318
+ metadata_worktree_realpath=""
319
+ set +u
320
+ set -a
321
+ # shellcheck source=/dev/null
322
+ source "${metadata_file}"
323
+ set +a
324
+ set -u
325
+ resident_realpath="${WORKTREE_REALPATH:-${metadata_worktree_realpath:-}}"
326
+ [[ -n "${resident_realpath}" ]] || continue
327
+ resident_realpath="$(canonicalize_dir_or_parent_join "${resident_realpath}" || true)"
328
+ [[ -n "${resident_realpath}" ]] || continue
329
+ if [[ "${resident_realpath}" != "${resolved_removed}" ]]; then
330
+ continue
331
+ fi
332
+
333
+ tmp_file="${metadata_file}.tmp.$$"
334
+ awk '
335
+ BEGIN { replaced = 0 }
336
+ /^WORKTREE_REALPATH=/ {
337
+ print "WORKTREE_REALPATH='\'''\''"
338
+ replaced = 1
339
+ next
340
+ }
341
+ { print }
342
+ END {
343
+ if (replaced == 0) {
344
+ print "WORKTREE_REALPATH='\'''\''"
345
+ }
346
+ }
347
+ ' "${metadata_file}" >"${tmp_file}"
348
+ mv "${tmp_file}" "${metadata_file}"
349
+ done
350
+ }
351
+
271
352
  cleanup_with_branch_tool() {
272
353
  local include_path="${1:-yes}"
273
354
  local -a cleanup_args
@@ -310,6 +391,47 @@ cleanup_orphan_worktree_dir() {
310
391
  git -C "$repo_root" worktree prune >/dev/null 2>&1 || true
311
392
  }
312
393
 
394
+ worktree_path_is_registered() {
395
+ local candidate_path="${1:-}"
396
+ [[ -n "${candidate_path}" ]] || return 1
397
+
398
+ git -C "$repo_root" worktree list --porcelain 2>/dev/null \
399
+ | grep -F -x -q -- "worktree ${candidate_path}"
400
+ }
401
+
402
+ write_cleanup_warning_artifact() {
403
+ local target_dir=""
404
+ local notice_file=""
405
+ local cleanup_error_line=""
406
+ local recorded_at=""
407
+
408
+ [[ "${cleanup_status}" != "0" ]] || return 0
409
+
410
+ if [[ -n "${archived_dir}" && -d "${archived_dir}" ]]; then
411
+ target_dir="${archived_dir}"
412
+ elif [[ -n "${session}" && -n "${runs_root}" && -d "${runs_root}/${session}" ]]; then
413
+ target_dir="${runs_root}/${session}"
414
+ fi
415
+
416
+ [[ -n "${target_dir}" && -d "${target_dir}" ]] || return 0
417
+
418
+ cleanup_error_line="$(printf '%s' "${cleanup_error}" | tr '\n' ' ' | sed 's/ */ /g')"
419
+ recorded_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
420
+ notice_file="${target_dir}/cleanup-warning.txt"
421
+ {
422
+ printf 'recorded_at=%s\n' "${recorded_at}"
423
+ printf 'session=%s\n' "${session}"
424
+ printf 'mode=%s\n' "${mode}"
425
+ printf 'worktree=%s\n' "${worktree_path}"
426
+ printf 'branch=%s\n' "${branch_name}"
427
+ printf 'cleanup_mode=%s\n' "${cleanup_mode}"
428
+ printf 'cleanup_status=%s\n' "${cleanup_status}"
429
+ if [[ -n "${cleanup_error_line}" ]]; then
430
+ printf 'cleanup_error=%s\n' "${cleanup_error_line}"
431
+ fi
432
+ } >"${notice_file}"
433
+ }
434
+
313
435
  if [[ "$active_tmux_session" == "true" ]]; then
314
436
  cleanup_mode="deferred-active-session"
315
437
  elif [[ "$skip_worktree_cleanup" != "true" && -n "${worktree_path}" ]] \
@@ -333,7 +455,7 @@ elif [[ "$skip_worktree_cleanup" != "true" && -n "$branch_name" ]]; then
333
455
  fi
334
456
  fi
335
457
  fi
336
- elif [[ "$skip_worktree_cleanup" != "true" && -n "$worktree_path" ]] && git -C "$repo_root" worktree list --porcelain | rg -F -q "worktree $worktree_path"; then
458
+ elif [[ "$skip_worktree_cleanup" != "true" && -n "$worktree_path" ]] && worktree_path_is_registered "$worktree_path"; then
337
459
  git -C "$repo_root" worktree remove "$worktree_path" --force || true
338
460
  git -C "$repo_root" worktree prune
339
461
  cleanup_mode="worktree"
@@ -353,13 +475,15 @@ if [[ -n "$session" && "$active_tmux_session" != "true" ]]; then
353
475
  --session "$session" \
354
476
  --remove-file "${remove_file:-}"
355
477
  )"
478
+ archived_dir="$(awk -F= '/^ARCHIVED_DIR=/{print substr($0, index($0, "=") + 1); exit}' <<<"${archive_output}")"
356
479
  fi
357
480
 
358
- if [[ "$cleanup_status" != "0" && -z "$session" ]]; then
359
- [[ -n "$cleanup_error" ]] && printf '%s\n' "$cleanup_error" >&2
360
- exit "$cleanup_status"
481
+ if [[ "$skip_worktree_cleanup" != "true" && -n "$worktree_path" && ! -d "$worktree_path" ]]; then
482
+ clear_resident_worktree_realpath_references "$worktree_path"
361
483
  fi
362
484
 
485
+ write_cleanup_warning_artifact
486
+
363
487
  printf 'SESSION=%s\n' "$session"
364
488
  printf 'MODE=%s\n' "$mode"
365
489
  printf 'WORKTREE=%s\n' "$worktree_path"
@@ -383,3 +507,7 @@ fi
383
507
  if [[ "$cleanup_status" != "0" && -n "$cleanup_error" ]]; then
384
508
  printf 'CLEANUP_ERROR=%s\n' "$(printf '%s' "$cleanup_error" | tr '\n' ' ' | sed 's/ */ /g')"
385
509
  fi
510
+
511
+ if [[ "$cleanup_status" != "0" ]]; then
512
+ exit "$cleanup_status"
513
+ fi