agent-control-plane 0.2.0 → 0.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -19
- package/assets/workflow-catalog.json +1 -1
- package/bin/pr-risk.sh +22 -7
- package/bin/sync-pr-labels.sh +1 -1
- package/hooks/heartbeat-hooks.sh +125 -12
- package/hooks/issue-reconcile-hooks.sh +1 -1
- package/hooks/pr-reconcile-hooks.sh +1 -1
- package/npm/bin/agent-control-plane.js +296 -61
- package/package.json +11 -7
- package/tools/bin/agent-github-update-labels +36 -2
- package/tools/bin/agent-project-catch-up-merged-prs +4 -2
- package/tools/bin/agent-project-cleanup-session +49 -5
- package/tools/bin/agent-project-heartbeat-loop +119 -1471
- package/tools/bin/agent-project-publish-issue-pr +6 -3
- package/tools/bin/agent-project-reconcile-issue-session +78 -106
- package/tools/bin/agent-project-reconcile-pr-session +166 -143
- package/tools/bin/agent-project-retry-state +18 -7
- package/tools/bin/agent-project-run-claude-session +10 -0
- package/tools/bin/agent-project-run-codex-resilient +99 -14
- package/tools/bin/agent-project-run-codex-session +16 -5
- package/tools/bin/agent-project-run-kilo-session +10 -0
- package/tools/bin/agent-project-run-openclaw-session +10 -0
- package/tools/bin/agent-project-run-opencode-session +10 -0
- package/tools/bin/agent-project-sync-source-repo-main +163 -0
- package/tools/bin/agent-project-worker-status +10 -7
- package/tools/bin/cleanup-worktree.sh +6 -1
- package/tools/bin/flow-config-lib.sh +1257 -34
- package/tools/bin/flow-resident-worker-lib.sh +119 -1
- package/tools/bin/flow-shell-lib.sh +56 -0
- package/tools/bin/github-core-rate-limit-state.sh +77 -0
- package/tools/bin/github-write-outbox.sh +470 -0
- package/tools/bin/heartbeat-loop-cache-lib.sh +164 -0
- package/tools/bin/heartbeat-loop-counting-lib.sh +306 -0
- package/tools/bin/heartbeat-loop-pr-strategy-lib.sh +199 -0
- package/tools/bin/heartbeat-loop-scheduling-lib.sh +506 -0
- package/tools/bin/heartbeat-loop-worker-lib.sh +319 -0
- package/tools/bin/heartbeat-recovery-preflight.sh +12 -1
- package/tools/bin/heartbeat-safe-auto.sh +56 -3
- package/tools/bin/install-project-launchd.sh +17 -2
- package/tools/bin/project-init.sh +21 -1
- package/tools/bin/project-launchd-bootstrap.sh +16 -9
- package/tools/bin/project-runtimectl.sh +46 -2
- package/tools/bin/reconcile-bootstrap-lib.sh +113 -0
- package/tools/bin/resident-issue-controller-lib.sh +448 -0
- package/tools/bin/scaffold-profile.sh +61 -3
- package/tools/bin/start-pr-fix-worker.sh +47 -10
- package/tools/bin/start-resident-issue-loop.sh +28 -439
- package/tools/dashboard/app.js +37 -1
- package/tools/dashboard/dashboard_snapshot.py +65 -26
- package/tools/templates/pr-fix-template.md +3 -1
- package/tools/templates/pr-merge-repair-template.md +2 -1
- package/SKILL.md +0 -149
- package/references/architecture.md +0 -217
- package/references/commands.md +0 -128
- package/references/control-plane-map.md +0 -124
- package/references/docs-map.md +0 -73
- package/references/release-checklist.md +0 -65
- package/references/repo-map.md +0 -36
- package/tools/bin/split-retained-slice.sh +0 -124
|
@@ -0,0 +1,470 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
5
|
+
# shellcheck source=/dev/null
|
|
6
|
+
source "${SCRIPT_DIR}/flow-config-lib.sh"
|
|
7
|
+
|
|
8
|
+
usage() {
|
|
9
|
+
cat <<'EOF'
|
|
10
|
+
Usage:
|
|
11
|
+
github-write-outbox.sh enqueue-labels --repo-slug <owner/repo> --number <id> [--add LABEL]... [--remove LABEL]...
|
|
12
|
+
github-write-outbox.sh enqueue-comment --repo-slug <owner/repo> --number <id> --kind issue|pr --body-file <path>
|
|
13
|
+
github-write-outbox.sh enqueue-approval --repo-slug <owner/repo> --number <id> [--body <text>]
|
|
14
|
+
github-write-outbox.sh flush [--limit <n>]
|
|
15
|
+
|
|
16
|
+
Persist GitHub write intents locally so ACP can continue operating while GitHub
|
|
17
|
+
is unavailable or rate-limited.
|
|
18
|
+
EOF
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
CONFIG_YAML="$(resolve_flow_config_yaml "${BASH_SOURCE[0]}")"
|
|
22
|
+
STATE_ROOT="$(flow_resolve_state_root "${CONFIG_YAML}")"
|
|
23
|
+
OUTBOX_ROOT="${ACP_GITHUB_OUTBOX_ROOT:-${STATE_ROOT}/github-outbox}"
|
|
24
|
+
PENDING_DIR="${OUTBOX_ROOT}/pending"
|
|
25
|
+
SENT_DIR="${OUTBOX_ROOT}/sent"
|
|
26
|
+
FAILED_DIR="${OUTBOX_ROOT}/failed"
|
|
27
|
+
PYTHON_BIN="$(flow_resolve_python_bin || true)"
|
|
28
|
+
ACTION="${1:-}"
|
|
29
|
+
DEFAULT_APPROVAL_BODY="Automated final review passed. Safe low-risk scope, green checks, and host-side merge approved."
|
|
30
|
+
|
|
31
|
+
mkdir -p "${PENDING_DIR}" "${SENT_DIR}" "${FAILED_DIR}"
|
|
32
|
+
|
|
33
|
+
json_hash() {
|
|
34
|
+
local payload="${1:-}"
|
|
35
|
+
|
|
36
|
+
if command -v shasum >/dev/null 2>&1; then
|
|
37
|
+
printf '%s' "${payload}" | shasum -a 256 | awk '{print $1}'
|
|
38
|
+
return 0
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
if command -v sha256sum >/dev/null 2>&1; then
|
|
42
|
+
printf '%s' "${payload}" | sha256sum | awk '{print $1}'
|
|
43
|
+
return 0
|
|
44
|
+
fi
|
|
45
|
+
|
|
46
|
+
if [[ -n "${PYTHON_BIN:-}" ]]; then
|
|
47
|
+
PAYLOAD="${payload}" "${PYTHON_BIN}" - <<'PY'
|
|
48
|
+
import hashlib
|
|
49
|
+
import os
|
|
50
|
+
|
|
51
|
+
print(hashlib.sha256((os.environ.get("PAYLOAD", "")).encode("utf-8")).hexdigest())
|
|
52
|
+
PY
|
|
53
|
+
return 0
|
|
54
|
+
fi
|
|
55
|
+
|
|
56
|
+
return 1
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
outbox_move_sent() {
|
|
60
|
+
local intent_file="${1:?intent file required}"
|
|
61
|
+
mv "${intent_file}" "${SENT_DIR}/$(basename "${intent_file}")"
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
outbox_move_failed() {
|
|
65
|
+
local intent_file="${1:?intent file required}"
|
|
66
|
+
mv "${intent_file}" "${FAILED_DIR}/$(basename "${intent_file}")"
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
enqueue_labels() {
|
|
70
|
+
local repo_slug=""
|
|
71
|
+
local number=""
|
|
72
|
+
local add_file=""
|
|
73
|
+
local remove_file=""
|
|
74
|
+
local add_json="[]"
|
|
75
|
+
local remove_json="[]"
|
|
76
|
+
local created_at=""
|
|
77
|
+
local payload=""
|
|
78
|
+
local digest=""
|
|
79
|
+
local intent_file=""
|
|
80
|
+
|
|
81
|
+
add_file="$(mktemp)"
|
|
82
|
+
remove_file="$(mktemp)"
|
|
83
|
+
trap 'rm -f "${add_file}" "${remove_file}"' RETURN
|
|
84
|
+
|
|
85
|
+
shift
|
|
86
|
+
while [[ $# -gt 0 ]]; do
|
|
87
|
+
case "$1" in
|
|
88
|
+
--repo-slug) repo_slug="${2:-}"; shift 2 ;;
|
|
89
|
+
--number) number="${2:-}"; shift 2 ;;
|
|
90
|
+
--add)
|
|
91
|
+
printf '%s\n' "${2:?missing label after --add}" >>"${add_file}"
|
|
92
|
+
shift 2
|
|
93
|
+
;;
|
|
94
|
+
--remove)
|
|
95
|
+
printf '%s\n' "${2:?missing label after --remove}" >>"${remove_file}"
|
|
96
|
+
shift 2
|
|
97
|
+
;;
|
|
98
|
+
--help|-h) usage; exit 0 ;;
|
|
99
|
+
*)
|
|
100
|
+
echo "unknown argument: $1" >&2
|
|
101
|
+
exit 1
|
|
102
|
+
;;
|
|
103
|
+
esac
|
|
104
|
+
done
|
|
105
|
+
|
|
106
|
+
[[ -n "${repo_slug}" && -n "${number}" ]] || {
|
|
107
|
+
usage >&2
|
|
108
|
+
exit 1
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
add_json="$(jq -R . <"${add_file}" | jq -s 'map(select(length > 0)) | unique')"
|
|
112
|
+
remove_json="$(jq -R . <"${remove_file}" | jq -s 'map(select(length > 0)) | unique')"
|
|
113
|
+
created_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
114
|
+
payload="$(
|
|
115
|
+
jq -cn \
|
|
116
|
+
--arg type "labels" \
|
|
117
|
+
--arg repo_slug "${repo_slug}" \
|
|
118
|
+
--arg number "${number}" \
|
|
119
|
+
--arg created_at "${created_at}" \
|
|
120
|
+
--argjson add "${add_json}" \
|
|
121
|
+
--argjson remove "${remove_json}" \
|
|
122
|
+
'{
|
|
123
|
+
type: $type,
|
|
124
|
+
repo_slug: $repo_slug,
|
|
125
|
+
number: $number,
|
|
126
|
+
created_at: $created_at,
|
|
127
|
+
add: $add,
|
|
128
|
+
remove: $remove
|
|
129
|
+
}'
|
|
130
|
+
)"
|
|
131
|
+
digest="$(json_hash "${payload}")"
|
|
132
|
+
intent_file="${PENDING_DIR}/labels-${number}-${digest}.json"
|
|
133
|
+
if [[ ! -f "${intent_file}" ]]; then
|
|
134
|
+
printf '%s\n' "${payload}" >"${intent_file}"
|
|
135
|
+
fi
|
|
136
|
+
printf 'OUTBOX_FILE=%s\n' "${intent_file}"
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
enqueue_comment() {
|
|
140
|
+
local repo_slug=""
|
|
141
|
+
local number=""
|
|
142
|
+
local kind=""
|
|
143
|
+
local body_file=""
|
|
144
|
+
local body=""
|
|
145
|
+
local body_sha=""
|
|
146
|
+
local created_at=""
|
|
147
|
+
local payload=""
|
|
148
|
+
local intent_file=""
|
|
149
|
+
|
|
150
|
+
shift
|
|
151
|
+
while [[ $# -gt 0 ]]; do
|
|
152
|
+
case "$1" in
|
|
153
|
+
--repo-slug) repo_slug="${2:-}"; shift 2 ;;
|
|
154
|
+
--number) number="${2:-}"; shift 2 ;;
|
|
155
|
+
--kind) kind="${2:-}"; shift 2 ;;
|
|
156
|
+
--body-file) body_file="${2:-}"; shift 2 ;;
|
|
157
|
+
--help|-h) usage; exit 0 ;;
|
|
158
|
+
*)
|
|
159
|
+
echo "unknown argument: $1" >&2
|
|
160
|
+
exit 1
|
|
161
|
+
;;
|
|
162
|
+
esac
|
|
163
|
+
done
|
|
164
|
+
|
|
165
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${kind}" && -n "${body_file}" ]] || {
|
|
166
|
+
usage >&2
|
|
167
|
+
exit 1
|
|
168
|
+
}
|
|
169
|
+
[[ "${kind}" == "issue" || "${kind}" == "pr" ]] || {
|
|
170
|
+
echo "unsupported comment kind: ${kind}" >&2
|
|
171
|
+
exit 1
|
|
172
|
+
}
|
|
173
|
+
[[ -f "${body_file}" ]] || {
|
|
174
|
+
echo "missing comment body file: ${body_file}" >&2
|
|
175
|
+
exit 1
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
body="$(cat "${body_file}")"
|
|
179
|
+
[[ -n "${body}" ]] || {
|
|
180
|
+
echo "empty comment body" >&2
|
|
181
|
+
exit 1
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
body_sha="$(json_hash "${body}")"
|
|
185
|
+
created_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
186
|
+
payload="$(
|
|
187
|
+
jq -cn \
|
|
188
|
+
--arg type "comment" \
|
|
189
|
+
--arg repo_slug "${repo_slug}" \
|
|
190
|
+
--arg number "${number}" \
|
|
191
|
+
--arg kind "${kind}" \
|
|
192
|
+
--arg body "${body}" \
|
|
193
|
+
--arg body_sha "${body_sha}" \
|
|
194
|
+
--arg created_at "${created_at}" \
|
|
195
|
+
'{
|
|
196
|
+
type: $type,
|
|
197
|
+
repo_slug: $repo_slug,
|
|
198
|
+
number: $number,
|
|
199
|
+
kind: $kind,
|
|
200
|
+
body: $body,
|
|
201
|
+
body_sha: $body_sha,
|
|
202
|
+
created_at: $created_at
|
|
203
|
+
}'
|
|
204
|
+
)"
|
|
205
|
+
intent_file="${PENDING_DIR}/comment-${kind}-${number}-${body_sha}.json"
|
|
206
|
+
if [[ ! -f "${intent_file}" ]]; then
|
|
207
|
+
printf '%s\n' "${payload}" >"${intent_file}"
|
|
208
|
+
fi
|
|
209
|
+
printf 'OUTBOX_FILE=%s\n' "${intent_file}"
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
enqueue_approval() {
|
|
213
|
+
local repo_slug=""
|
|
214
|
+
local number=""
|
|
215
|
+
local body="${DEFAULT_APPROVAL_BODY}"
|
|
216
|
+
local body_sha=""
|
|
217
|
+
local created_at=""
|
|
218
|
+
local payload=""
|
|
219
|
+
local intent_file=""
|
|
220
|
+
|
|
221
|
+
shift
|
|
222
|
+
while [[ $# -gt 0 ]]; do
|
|
223
|
+
case "$1" in
|
|
224
|
+
--repo-slug) repo_slug="${2:-}"; shift 2 ;;
|
|
225
|
+
--number) number="${2:-}"; shift 2 ;;
|
|
226
|
+
--body) body="${2:-}"; shift 2 ;;
|
|
227
|
+
--help|-h) usage; exit 0 ;;
|
|
228
|
+
*)
|
|
229
|
+
echo "unknown argument: $1" >&2
|
|
230
|
+
exit 1
|
|
231
|
+
;;
|
|
232
|
+
esac
|
|
233
|
+
done
|
|
234
|
+
|
|
235
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${body}" ]] || {
|
|
236
|
+
usage >&2
|
|
237
|
+
exit 1
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
body_sha="$(json_hash "${body}")"
|
|
241
|
+
created_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
242
|
+
payload="$(
|
|
243
|
+
jq -cn \
|
|
244
|
+
--arg type "approval" \
|
|
245
|
+
--arg repo_slug "${repo_slug}" \
|
|
246
|
+
--arg number "${number}" \
|
|
247
|
+
--arg body "${body}" \
|
|
248
|
+
--arg body_sha "${body_sha}" \
|
|
249
|
+
--arg created_at "${created_at}" \
|
|
250
|
+
'{
|
|
251
|
+
type: $type,
|
|
252
|
+
repo_slug: $repo_slug,
|
|
253
|
+
number: $number,
|
|
254
|
+
body: $body,
|
|
255
|
+
body_sha: $body_sha,
|
|
256
|
+
created_at: $created_at
|
|
257
|
+
}'
|
|
258
|
+
)"
|
|
259
|
+
intent_file="${PENDING_DIR}/approval-${number}-${body_sha}.json"
|
|
260
|
+
if [[ ! -f "${intent_file}" ]]; then
|
|
261
|
+
printf '%s\n' "${payload}" >"${intent_file}"
|
|
262
|
+
fi
|
|
263
|
+
printf 'OUTBOX_FILE=%s\n' "${intent_file}"
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
flush_comment_intent() {
|
|
267
|
+
local intent_file="${1:?intent file required}"
|
|
268
|
+
local repo_slug=""
|
|
269
|
+
local number=""
|
|
270
|
+
local kind=""
|
|
271
|
+
local body=""
|
|
272
|
+
local existing_json=""
|
|
273
|
+
local post_payload=""
|
|
274
|
+
|
|
275
|
+
repo_slug="$(jq -r '.repo_slug // ""' "${intent_file}")"
|
|
276
|
+
number="$(jq -r '.number // ""' "${intent_file}")"
|
|
277
|
+
kind="$(jq -r '.kind // ""' "${intent_file}")"
|
|
278
|
+
body="$(jq -r '.body // ""' "${intent_file}")"
|
|
279
|
+
|
|
280
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${kind}" && -n "${body}" ]] || return 65
|
|
281
|
+
|
|
282
|
+
if [[ "${kind}" == "pr" ]]; then
|
|
283
|
+
existing_json="$(flow_github_pr_view_json "${repo_slug}" "${number}" 2>/dev/null || true)"
|
|
284
|
+
else
|
|
285
|
+
existing_json="$(flow_github_issue_view_json "${repo_slug}" "${number}" 2>/dev/null || true)"
|
|
286
|
+
fi
|
|
287
|
+
|
|
288
|
+
if [[ -n "${existing_json}" ]] && jq -e --arg body "${body}" 'any(.comments[]?; .body == $body)' >/dev/null <<<"${existing_json}" 2>/dev/null; then
|
|
289
|
+
return 0
|
|
290
|
+
fi
|
|
291
|
+
|
|
292
|
+
post_payload="$(jq -cn --arg body "${body}" '{body: $body}')"
|
|
293
|
+
if printf '%s' "${post_payload}" | flow_github_api_repo "${repo_slug}" "issues/${number}/comments" --method POST --input - >/dev/null 2>&1; then
|
|
294
|
+
return 0
|
|
295
|
+
fi
|
|
296
|
+
|
|
297
|
+
if flow_github_core_rate_limit_active; then
|
|
298
|
+
return 75
|
|
299
|
+
fi
|
|
300
|
+
|
|
301
|
+
return 1
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
flush_labels_intent() {
|
|
305
|
+
local intent_file="${1:?intent file required}"
|
|
306
|
+
local repo_slug=""
|
|
307
|
+
local number=""
|
|
308
|
+
local -a args=()
|
|
309
|
+
local label=""
|
|
310
|
+
|
|
311
|
+
repo_slug="$(jq -r '.repo_slug // ""' "${intent_file}")"
|
|
312
|
+
number="$(jq -r '.number // ""' "${intent_file}")"
|
|
313
|
+
[[ -n "${repo_slug}" && -n "${number}" ]] || return 65
|
|
314
|
+
|
|
315
|
+
args=(--repo-slug "${repo_slug}" --number "${number}")
|
|
316
|
+
while IFS= read -r label; do
|
|
317
|
+
[[ -n "${label}" ]] || continue
|
|
318
|
+
args+=(--add "${label}")
|
|
319
|
+
done < <(jq -r '.add[]? // empty' "${intent_file}")
|
|
320
|
+
while IFS= read -r label; do
|
|
321
|
+
[[ -n "${label}" ]] || continue
|
|
322
|
+
args+=(--remove "${label}")
|
|
323
|
+
done < <(jq -r '.remove[]? // empty' "${intent_file}")
|
|
324
|
+
|
|
325
|
+
if ACP_GITHUB_OUTBOX_DISABLE_ENQUEUE=1 bash "${SCRIPT_DIR}/agent-github-update-labels" "${args[@]}" >/dev/null 2>&1; then
|
|
326
|
+
return 0
|
|
327
|
+
fi
|
|
328
|
+
|
|
329
|
+
if flow_github_core_rate_limit_active; then
|
|
330
|
+
return 75
|
|
331
|
+
fi
|
|
332
|
+
|
|
333
|
+
return 1
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
flush_approval_intent() {
|
|
337
|
+
local intent_file="${1:?intent file required}"
|
|
338
|
+
local repo_slug=""
|
|
339
|
+
local number=""
|
|
340
|
+
local body=""
|
|
341
|
+
local reviews_json="[]"
|
|
342
|
+
local post_payload=""
|
|
343
|
+
|
|
344
|
+
repo_slug="$(jq -r '.repo_slug // ""' "${intent_file}")"
|
|
345
|
+
number="$(jq -r '.number // ""' "${intent_file}")"
|
|
346
|
+
body="$(jq -r '.body // ""' "${intent_file}")"
|
|
347
|
+
|
|
348
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${body}" ]] || return 65
|
|
349
|
+
|
|
350
|
+
if reviews_json="$(flow_github_api_repo "${repo_slug}" "pulls/${number}/reviews?per_page=100" 2>/dev/null)"; then
|
|
351
|
+
reviews_json="$(flow_json_or_default "${reviews_json}" '[]')"
|
|
352
|
+
if jq -e --arg body "${body}" 'any(.[]?; (.state // "") == "APPROVED" and (.body // "") == $body)' >/dev/null <<<"${reviews_json}" 2>/dev/null; then
|
|
353
|
+
return 0
|
|
354
|
+
fi
|
|
355
|
+
elif flow_github_core_rate_limit_active; then
|
|
356
|
+
return 75
|
|
357
|
+
fi
|
|
358
|
+
|
|
359
|
+
post_payload="$(jq -cn --arg event "APPROVE" --arg body "${body}" '{event: $event, body: $body}')"
|
|
360
|
+
if printf '%s' "${post_payload}" | flow_github_api_repo "${repo_slug}" "pulls/${number}/reviews" --method POST --input - >/dev/null 2>&1; then
|
|
361
|
+
return 0
|
|
362
|
+
fi
|
|
363
|
+
|
|
364
|
+
if flow_github_core_rate_limit_active; then
|
|
365
|
+
return 75
|
|
366
|
+
fi
|
|
367
|
+
|
|
368
|
+
return 1
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
flush_outbox() {
|
|
372
|
+
local limit="25"
|
|
373
|
+
local processed="0"
|
|
374
|
+
local intent_file=""
|
|
375
|
+
local intent_type=""
|
|
376
|
+
local status="0"
|
|
377
|
+
|
|
378
|
+
shift
|
|
379
|
+
while [[ $# -gt 0 ]]; do
|
|
380
|
+
case "$1" in
|
|
381
|
+
--limit) limit="${2:-25}"; shift 2 ;;
|
|
382
|
+
--help|-h) usage; exit 0 ;;
|
|
383
|
+
*)
|
|
384
|
+
echo "unknown argument: $1" >&2
|
|
385
|
+
exit 1
|
|
386
|
+
;;
|
|
387
|
+
esac
|
|
388
|
+
done
|
|
389
|
+
|
|
390
|
+
[[ -d "${PENDING_DIR}" ]] || exit 0
|
|
391
|
+
flow_github_core_rate_limit_active && exit 0
|
|
392
|
+
|
|
393
|
+
while IFS= read -r intent_file; do
|
|
394
|
+
[[ -n "${intent_file}" ]] || continue
|
|
395
|
+
if (( processed >= limit )); then
|
|
396
|
+
break
|
|
397
|
+
fi
|
|
398
|
+
|
|
399
|
+
intent_type="$(jq -r '.type // ""' "${intent_file}" 2>/dev/null || true)"
|
|
400
|
+
case "${intent_type}" in
|
|
401
|
+
labels)
|
|
402
|
+
if flush_labels_intent "${intent_file}"; then
|
|
403
|
+
status="0"
|
|
404
|
+
else
|
|
405
|
+
status="$?"
|
|
406
|
+
fi
|
|
407
|
+
;;
|
|
408
|
+
comment)
|
|
409
|
+
if flush_comment_intent "${intent_file}"; then
|
|
410
|
+
status="0"
|
|
411
|
+
else
|
|
412
|
+
status="$?"
|
|
413
|
+
fi
|
|
414
|
+
;;
|
|
415
|
+
approval)
|
|
416
|
+
if flush_approval_intent "${intent_file}"; then
|
|
417
|
+
status="0"
|
|
418
|
+
else
|
|
419
|
+
status="$?"
|
|
420
|
+
fi
|
|
421
|
+
;;
|
|
422
|
+
*)
|
|
423
|
+
status="65"
|
|
424
|
+
;;
|
|
425
|
+
esac
|
|
426
|
+
|
|
427
|
+
case "${status}" in
|
|
428
|
+
0)
|
|
429
|
+
outbox_move_sent "${intent_file}"
|
|
430
|
+
;;
|
|
431
|
+
65)
|
|
432
|
+
outbox_move_failed "${intent_file}"
|
|
433
|
+
;;
|
|
434
|
+
75)
|
|
435
|
+
break
|
|
436
|
+
;;
|
|
437
|
+
*)
|
|
438
|
+
break
|
|
439
|
+
;;
|
|
440
|
+
esac
|
|
441
|
+
|
|
442
|
+
processed=$((processed + 1))
|
|
443
|
+
done < <(find "${PENDING_DIR}" -mindepth 1 -maxdepth 1 -type f -name '*.json' 2>/dev/null | sort)
|
|
444
|
+
|
|
445
|
+
printf 'OUTBOX_FLUSHED=%s\n' "${processed}"
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
case "${ACTION}" in
|
|
449
|
+
enqueue-labels)
|
|
450
|
+
enqueue_labels "$@"
|
|
451
|
+
;;
|
|
452
|
+
enqueue-comment)
|
|
453
|
+
enqueue_comment "$@"
|
|
454
|
+
;;
|
|
455
|
+
enqueue-approval)
|
|
456
|
+
enqueue_approval "$@"
|
|
457
|
+
;;
|
|
458
|
+
flush)
|
|
459
|
+
flush_outbox "$@"
|
|
460
|
+
;;
|
|
461
|
+
--help|-h|"")
|
|
462
|
+
usage
|
|
463
|
+
exit 0
|
|
464
|
+
;;
|
|
465
|
+
*)
|
|
466
|
+
echo "unknown action: ${ACTION}" >&2
|
|
467
|
+
usage >&2
|
|
468
|
+
exit 1
|
|
469
|
+
;;
|
|
470
|
+
esac
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# heartbeat-loop-cache-lib.sh — scheduler cache management and attribute caching
|
|
3
|
+
|
|
4
|
+
cleanup_scheduler_caches() {
|
|
5
|
+
tmux_sessions_cache=""
|
|
6
|
+
tmux_sessions_cache_loaded="no"
|
|
7
|
+
all_running_workers_cache=""
|
|
8
|
+
all_running_workers_cache_loaded="no"
|
|
9
|
+
running_issue_workers_cache=""
|
|
10
|
+
running_issue_workers_cache_loaded="no"
|
|
11
|
+
running_pr_workers_cache=""
|
|
12
|
+
running_pr_workers_cache_loaded="no"
|
|
13
|
+
completed_workers_cache=""
|
|
14
|
+
completed_workers_cache_loaded="no"
|
|
15
|
+
ready_issue_ids_cache=""
|
|
16
|
+
ready_issue_ids_cache_loaded="no"
|
|
17
|
+
open_agent_pr_ids_cache=""
|
|
18
|
+
open_agent_pr_ids_cache_loaded="no"
|
|
19
|
+
running_issue_ids_cache=""
|
|
20
|
+
running_issue_ids_cache_loaded="no"
|
|
21
|
+
exclusive_issue_ids_cache=""
|
|
22
|
+
exclusive_issue_ids_cache_loaded="no"
|
|
23
|
+
exclusive_pr_ids_cache=""
|
|
24
|
+
exclusive_pr_ids_cache_loaded="no"
|
|
25
|
+
blocked_recovery_issue_ids_cache=""
|
|
26
|
+
blocked_recovery_issue_ids_cache_loaded="no"
|
|
27
|
+
ordered_ready_issue_ids_cache=""
|
|
28
|
+
ordered_ready_issue_ids_cache_loaded="no"
|
|
29
|
+
due_scheduled_issue_ids_cache=""
|
|
30
|
+
due_scheduled_issue_ids_cache_loaded="no"
|
|
31
|
+
due_blocked_recovery_issue_ids_cache=""
|
|
32
|
+
due_blocked_recovery_issue_ids_cache_loaded="no"
|
|
33
|
+
if [[ -n "${issue_attr_cache_dir:-}" && -d "${issue_attr_cache_dir}" ]]; then
|
|
34
|
+
rm -rf "${issue_attr_cache_dir}" || true
|
|
35
|
+
fi
|
|
36
|
+
if [[ -n "${pr_attr_cache_dir:-}" && -d "${pr_attr_cache_dir}" ]]; then
|
|
37
|
+
rm -rf "${pr_attr_cache_dir}" || true
|
|
38
|
+
fi
|
|
39
|
+
if [[ -n "${pr_risk_cache_dir:-}" && -d "${pr_risk_cache_dir}" ]]; then
|
|
40
|
+
rm -rf "${pr_risk_cache_dir}" || true
|
|
41
|
+
fi
|
|
42
|
+
if declare -F heartbeat_invalidate_snapshot_cache >/dev/null 2>&1; then
|
|
43
|
+
heartbeat_invalidate_snapshot_cache
|
|
44
|
+
fi
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
cache_prefix() {
|
|
48
|
+
local raw_prefix="${issue_prefix:-${pr_prefix:-agent-control-plane}}"
|
|
49
|
+
local sanitized=""
|
|
50
|
+
|
|
51
|
+
sanitized="$(printf '%s' "${raw_prefix}" | tr '/[:space:]' '-' | tr -cd '[:alnum:]_.-')"
|
|
52
|
+
if [[ -z "${sanitized}" ]]; then
|
|
53
|
+
sanitized="agent-control-plane"
|
|
54
|
+
fi
|
|
55
|
+
|
|
56
|
+
printf '%s\n' "${sanitized}"
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
ensure_issue_attr_cache_dir() {
|
|
60
|
+
if [[ -z "${issue_attr_cache_dir:-}" || ! -d "${issue_attr_cache_dir:-}" ]]; then
|
|
61
|
+
issue_attr_cache_dir="$(mktemp -d "${TMPDIR:-/tmp}/$(cache_prefix)-issue-attrs.XXXXXX")"
|
|
62
|
+
fi
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
ensure_pr_attr_cache_dir() {
|
|
66
|
+
if [[ -z "${pr_attr_cache_dir:-}" || ! -d "${pr_attr_cache_dir:-}" ]]; then
|
|
67
|
+
pr_attr_cache_dir="$(mktemp -d "${TMPDIR:-/tmp}/$(cache_prefix)-pr-attrs.XXXXXX")"
|
|
68
|
+
fi
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
ensure_pr_risk_cache_dir() {
|
|
72
|
+
if [[ -z "${pr_risk_cache_dir:-}" || ! -d "${pr_risk_cache_dir:-}" ]]; then
|
|
73
|
+
pr_risk_cache_dir="$(mktemp -d "${TMPDIR:-/tmp}/$(cache_prefix)-pr-risk.XXXXXX")"
|
|
74
|
+
fi
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
pr_risk_runtime_cache_fresh() {
|
|
78
|
+
local cache_file="${1:?cache file required}"
|
|
79
|
+
local modified_at now age
|
|
80
|
+
[[ -f "$cache_file" ]] || return 1
|
|
81
|
+
modified_at="$(stat -f '%m' "$cache_file" 2>/dev/null || true)"
|
|
82
|
+
[[ "$modified_at" =~ ^[0-9]+$ ]] || return 1
|
|
83
|
+
now="$(date +%s)"
|
|
84
|
+
age=$((now - modified_at))
|
|
85
|
+
(( age >= 0 && age <= pr_risk_runtime_cache_ttl_seconds ))
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
cached_issue_attr() {
|
|
89
|
+
local attr_name="${1:?attr name required}"
|
|
90
|
+
local issue_id="${2:?issue id required}"
|
|
91
|
+
local cache_file attr_value
|
|
92
|
+
|
|
93
|
+
ensure_issue_attr_cache_dir
|
|
94
|
+
cache_file="${issue_attr_cache_dir}/${issue_id}.${attr_name}"
|
|
95
|
+
if [[ -f "${cache_file}" ]]; then
|
|
96
|
+
cat "${cache_file}"
|
|
97
|
+
return 0
|
|
98
|
+
fi
|
|
99
|
+
|
|
100
|
+
case "${attr_name}" in
|
|
101
|
+
heavy)
|
|
102
|
+
attr_value="$(heartbeat_issue_is_heavy "${issue_id}")"
|
|
103
|
+
;;
|
|
104
|
+
recurring)
|
|
105
|
+
attr_value="$(heartbeat_issue_is_recurring "${issue_id}")"
|
|
106
|
+
;;
|
|
107
|
+
scheduled)
|
|
108
|
+
attr_value="$(heartbeat_issue_is_scheduled "${issue_id}")"
|
|
109
|
+
;;
|
|
110
|
+
schedule_interval_seconds)
|
|
111
|
+
attr_value="$(heartbeat_issue_schedule_interval_seconds "${issue_id}")"
|
|
112
|
+
;;
|
|
113
|
+
exclusive)
|
|
114
|
+
attr_value="$(heartbeat_issue_is_exclusive "${issue_id}")"
|
|
115
|
+
;;
|
|
116
|
+
*)
|
|
117
|
+
echo "unsupported issue cache attr: ${attr_name}" >&2
|
|
118
|
+
return 1
|
|
119
|
+
;;
|
|
120
|
+
esac
|
|
121
|
+
|
|
122
|
+
printf '%s\n' "${attr_value}" >"${cache_file}"
|
|
123
|
+
printf '%s\n' "${attr_value}"
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
cached_pr_is_exclusive() {
|
|
127
|
+
local pr_number="${1:?pr number required}"
|
|
128
|
+
local cache_file attr_value
|
|
129
|
+
|
|
130
|
+
ensure_pr_attr_cache_dir
|
|
131
|
+
cache_file="${pr_attr_cache_dir}/${pr_number}.exclusive"
|
|
132
|
+
if [[ -f "${cache_file}" ]]; then
|
|
133
|
+
cat "${cache_file}"
|
|
134
|
+
return 0
|
|
135
|
+
fi
|
|
136
|
+
|
|
137
|
+
attr_value="$(heartbeat_pr_is_exclusive "${pr_number}")"
|
|
138
|
+
printf '%s\n' "${attr_value}" >"${cache_file}"
|
|
139
|
+
printf '%s\n' "${attr_value}"
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
cached_pr_risk_json() {
|
|
143
|
+
local pr_number="${1:?pr number required}"
|
|
144
|
+
local cache_file runtime_cache_file risk_json
|
|
145
|
+
|
|
146
|
+
ensure_pr_risk_cache_dir
|
|
147
|
+
cache_file="${pr_risk_cache_dir}/${pr_number}.json"
|
|
148
|
+
runtime_cache_file="${pr_risk_runtime_cache_dir}/${pr_number}.json"
|
|
149
|
+
if [[ -f "${cache_file}" ]]; then
|
|
150
|
+
cat "${cache_file}"
|
|
151
|
+
return 0
|
|
152
|
+
fi
|
|
153
|
+
|
|
154
|
+
if pr_risk_runtime_cache_fresh "${runtime_cache_file}"; then
|
|
155
|
+
cp "${runtime_cache_file}" "${cache_file}"
|
|
156
|
+
cat "${cache_file}"
|
|
157
|
+
return 0
|
|
158
|
+
fi
|
|
159
|
+
|
|
160
|
+
risk_json="$(heartbeat_pr_risk_json "${pr_number}")"
|
|
161
|
+
printf '%s\n' "${risk_json}" >"${cache_file}"
|
|
162
|
+
printf '%s\n' "${risk_json}" >"${runtime_cache_file}"
|
|
163
|
+
printf '%s\n' "${risk_json}"
|
|
164
|
+
}
|