agent-control-plane 0.3.0 → 0.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -19
- package/assets/workflow-catalog.json +1 -1
- package/bin/pr-risk.sh +22 -7
- package/bin/sync-pr-labels.sh +1 -1
- package/hooks/heartbeat-hooks.sh +125 -12
- package/hooks/issue-reconcile-hooks.sh +1 -1
- package/hooks/pr-reconcile-hooks.sh +1 -1
- package/npm/bin/agent-control-plane.js +256 -58
- package/package.json +7 -6
- package/tools/bin/agent-github-update-labels +36 -2
- package/tools/bin/agent-project-catch-up-merged-prs +3 -2
- package/tools/bin/agent-project-publish-issue-pr +6 -3
- package/tools/bin/agent-project-reconcile-issue-session +12 -1
- package/tools/bin/agent-project-reconcile-pr-session +90 -32
- package/tools/bin/agent-project-retry-state +18 -7
- package/tools/bin/agent-project-run-codex-resilient +13 -5
- package/tools/bin/agent-project-sync-source-repo-main +163 -0
- package/tools/bin/flow-config-lib.sh +1203 -60
- package/tools/bin/flow-shell-lib.sh +32 -0
- package/tools/bin/github-core-rate-limit-state.sh +77 -0
- package/tools/bin/github-write-outbox.sh +470 -0
- package/tools/bin/heartbeat-loop-scheduling-lib.sh +7 -7
- package/tools/bin/heartbeat-safe-auto.sh +42 -0
- package/tools/bin/install-project-launchd.sh +17 -2
- package/tools/bin/project-init.sh +21 -1
- package/tools/bin/project-launchd-bootstrap.sh +5 -1
- package/tools/bin/project-runtimectl.sh +46 -2
- package/tools/bin/resident-issue-controller-lib.sh +2 -2
- package/tools/bin/scaffold-profile.sh +61 -3
- package/tools/bin/start-pr-fix-worker.sh +47 -10
- package/tools/bin/start-resident-issue-loop.sh +2 -2
- package/tools/dashboard/app.js +30 -1
- package/tools/dashboard/dashboard_snapshot.py +55 -0
- package/tools/templates/pr-fix-template.md +3 -1
- package/tools/templates/pr-merge-repair-template.md +2 -1
- package/references/architecture.md +0 -217
- package/references/commands.md +0 -128
- package/references/control-plane-map.md +0 -124
- package/references/docs-map.md +0 -73
- package/references/release-checklist.md +0 -65
- package/references/repo-map.md +0 -36
- package/tools/bin/resident-issue-queue-status.py +0 -35
- package/tools/bin/split-retained-slice.sh +0 -124
|
@@ -29,6 +29,38 @@ flow_resolve_python_bin() {
|
|
|
29
29
|
return 1
|
|
30
30
|
}
|
|
31
31
|
|
|
32
|
+
flow_format_epoch_utc() {
|
|
33
|
+
local epoch="${1:-}"
|
|
34
|
+
local python_bin=""
|
|
35
|
+
|
|
36
|
+
if ! [[ "${epoch}" =~ ^[0-9]+$ ]] || [[ "${epoch}" == "0" ]]; then
|
|
37
|
+
return 1
|
|
38
|
+
fi
|
|
39
|
+
|
|
40
|
+
if date -u -r "${epoch}" +"%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then
|
|
41
|
+
date -u -r "${epoch}" +"%Y-%m-%dT%H:%M:%SZ"
|
|
42
|
+
return 0
|
|
43
|
+
fi
|
|
44
|
+
|
|
45
|
+
if date -u -d "@${epoch}" +"%Y-%m-%dT%H:%M:%SZ" >/dev/null 2>&1; then
|
|
46
|
+
date -u -d "@${epoch}" +"%Y-%m-%dT%H:%M:%SZ"
|
|
47
|
+
return 0
|
|
48
|
+
fi
|
|
49
|
+
|
|
50
|
+
python_bin="$(flow_resolve_python_bin 2>/dev/null || true)"
|
|
51
|
+
if [[ -n "${python_bin}" ]]; then
|
|
52
|
+
"${python_bin}" - "${epoch}" <<'PY'
|
|
53
|
+
import datetime
|
|
54
|
+
import sys
|
|
55
|
+
|
|
56
|
+
print(datetime.datetime.fromtimestamp(int(sys.argv[1]), datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"))
|
|
57
|
+
PY
|
|
58
|
+
return 0
|
|
59
|
+
fi
|
|
60
|
+
|
|
61
|
+
return 1
|
|
62
|
+
}
|
|
63
|
+
|
|
32
64
|
flow_compat_skill_alias() {
|
|
33
65
|
printf '%s\n' "${AGENT_CONTROL_PLANE_COMPAT_ALIAS:-}"
|
|
34
66
|
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
5
|
+
# shellcheck source=/dev/null
|
|
6
|
+
source "${SCRIPT_DIR}/flow-config-lib.sh"
|
|
7
|
+
|
|
8
|
+
usage() {
|
|
9
|
+
cat <<'EOF'
|
|
10
|
+
Usage:
|
|
11
|
+
github-core-rate-limit-state.sh get
|
|
12
|
+
github-core-rate-limit-state.sh schedule [reason] [--next-at-epoch <unix-seconds>]
|
|
13
|
+
github-core-rate-limit-state.sh clear
|
|
14
|
+
EOF
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
action="${1:-}"
|
|
18
|
+
reason="${2:-github-api-rate-limit}"
|
|
19
|
+
next_at_epoch=""
|
|
20
|
+
|
|
21
|
+
if [[ $# -lt 1 ]]; then
|
|
22
|
+
usage >&2
|
|
23
|
+
exit 1
|
|
24
|
+
fi
|
|
25
|
+
|
|
26
|
+
shift || true
|
|
27
|
+
if [[ $# -gt 0 ]]; then
|
|
28
|
+
reason="${1:-github-api-rate-limit}"
|
|
29
|
+
shift || true
|
|
30
|
+
fi
|
|
31
|
+
|
|
32
|
+
while [[ $# -gt 0 ]]; do
|
|
33
|
+
case "$1" in
|
|
34
|
+
--next-at-epoch)
|
|
35
|
+
next_at_epoch="${2:-}"
|
|
36
|
+
shift 2
|
|
37
|
+
;;
|
|
38
|
+
--help|-h)
|
|
39
|
+
usage
|
|
40
|
+
exit 0
|
|
41
|
+
;;
|
|
42
|
+
*)
|
|
43
|
+
echo "unknown argument: $1" >&2
|
|
44
|
+
usage >&2
|
|
45
|
+
exit 1
|
|
46
|
+
;;
|
|
47
|
+
esac
|
|
48
|
+
done
|
|
49
|
+
|
|
50
|
+
case "${action}" in
|
|
51
|
+
get|schedule|clear) ;;
|
|
52
|
+
*)
|
|
53
|
+
usage >&2
|
|
54
|
+
exit 1
|
|
55
|
+
;;
|
|
56
|
+
esac
|
|
57
|
+
|
|
58
|
+
CONFIG_YAML="$(resolve_flow_config_yaml "${BASH_SOURCE[0]}")"
|
|
59
|
+
STATE_ROOT="$(flow_resolve_state_root "${CONFIG_YAML}")"
|
|
60
|
+
COOLDOWNS="$(flow_resolve_retry_cooldowns "${CONFIG_YAML}")"
|
|
61
|
+
|
|
62
|
+
exec_args=(
|
|
63
|
+
--state-root "${STATE_ROOT}"
|
|
64
|
+
--kind github
|
|
65
|
+
--item-id core-api
|
|
66
|
+
--action "${action}"
|
|
67
|
+
--reason "${reason}"
|
|
68
|
+
--cooldowns "${COOLDOWNS}"
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
if [[ "${action}" == "schedule" && "${next_at_epoch}" =~ ^[0-9]+$ ]]; then
|
|
72
|
+
exec_args+=(--next-at-epoch "${next_at_epoch}")
|
|
73
|
+
fi
|
|
74
|
+
|
|
75
|
+
ACP_STATE_ROOT="${STATE_ROOT}" \
|
|
76
|
+
ACP_RETRY_COOLDOWNS="${COOLDOWNS}" \
|
|
77
|
+
exec bash "${SCRIPT_DIR}/agent-project-retry-state" "${exec_args[@]}"
|
|
@@ -0,0 +1,470 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
5
|
+
# shellcheck source=/dev/null
|
|
6
|
+
source "${SCRIPT_DIR}/flow-config-lib.sh"
|
|
7
|
+
|
|
8
|
+
usage() {
|
|
9
|
+
cat <<'EOF'
|
|
10
|
+
Usage:
|
|
11
|
+
github-write-outbox.sh enqueue-labels --repo-slug <owner/repo> --number <id> [--add LABEL]... [--remove LABEL]...
|
|
12
|
+
github-write-outbox.sh enqueue-comment --repo-slug <owner/repo> --number <id> --kind issue|pr --body-file <path>
|
|
13
|
+
github-write-outbox.sh enqueue-approval --repo-slug <owner/repo> --number <id> [--body <text>]
|
|
14
|
+
github-write-outbox.sh flush [--limit <n>]
|
|
15
|
+
|
|
16
|
+
Persist GitHub write intents locally so ACP can continue operating while GitHub
|
|
17
|
+
is unavailable or rate-limited.
|
|
18
|
+
EOF
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
CONFIG_YAML="$(resolve_flow_config_yaml "${BASH_SOURCE[0]}")"
|
|
22
|
+
STATE_ROOT="$(flow_resolve_state_root "${CONFIG_YAML}")"
|
|
23
|
+
OUTBOX_ROOT="${ACP_GITHUB_OUTBOX_ROOT:-${STATE_ROOT}/github-outbox}"
|
|
24
|
+
PENDING_DIR="${OUTBOX_ROOT}/pending"
|
|
25
|
+
SENT_DIR="${OUTBOX_ROOT}/sent"
|
|
26
|
+
FAILED_DIR="${OUTBOX_ROOT}/failed"
|
|
27
|
+
PYTHON_BIN="$(flow_resolve_python_bin || true)"
|
|
28
|
+
ACTION="${1:-}"
|
|
29
|
+
DEFAULT_APPROVAL_BODY="Automated final review passed. Safe low-risk scope, green checks, and host-side merge approved."
|
|
30
|
+
|
|
31
|
+
mkdir -p "${PENDING_DIR}" "${SENT_DIR}" "${FAILED_DIR}"
|
|
32
|
+
|
|
33
|
+
json_hash() {
|
|
34
|
+
local payload="${1:-}"
|
|
35
|
+
|
|
36
|
+
if command -v shasum >/dev/null 2>&1; then
|
|
37
|
+
printf '%s' "${payload}" | shasum -a 256 | awk '{print $1}'
|
|
38
|
+
return 0
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
if command -v sha256sum >/dev/null 2>&1; then
|
|
42
|
+
printf '%s' "${payload}" | sha256sum | awk '{print $1}'
|
|
43
|
+
return 0
|
|
44
|
+
fi
|
|
45
|
+
|
|
46
|
+
if [[ -n "${PYTHON_BIN:-}" ]]; then
|
|
47
|
+
PAYLOAD="${payload}" "${PYTHON_BIN}" - <<'PY'
|
|
48
|
+
import hashlib
|
|
49
|
+
import os
|
|
50
|
+
|
|
51
|
+
print(hashlib.sha256((os.environ.get("PAYLOAD", "")).encode("utf-8")).hexdigest())
|
|
52
|
+
PY
|
|
53
|
+
return 0
|
|
54
|
+
fi
|
|
55
|
+
|
|
56
|
+
return 1
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
outbox_move_sent() {
|
|
60
|
+
local intent_file="${1:?intent file required}"
|
|
61
|
+
mv "${intent_file}" "${SENT_DIR}/$(basename "${intent_file}")"
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
outbox_move_failed() {
|
|
65
|
+
local intent_file="${1:?intent file required}"
|
|
66
|
+
mv "${intent_file}" "${FAILED_DIR}/$(basename "${intent_file}")"
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
enqueue_labels() {
|
|
70
|
+
local repo_slug=""
|
|
71
|
+
local number=""
|
|
72
|
+
local add_file=""
|
|
73
|
+
local remove_file=""
|
|
74
|
+
local add_json="[]"
|
|
75
|
+
local remove_json="[]"
|
|
76
|
+
local created_at=""
|
|
77
|
+
local payload=""
|
|
78
|
+
local digest=""
|
|
79
|
+
local intent_file=""
|
|
80
|
+
|
|
81
|
+
add_file="$(mktemp)"
|
|
82
|
+
remove_file="$(mktemp)"
|
|
83
|
+
trap 'rm -f "${add_file}" "${remove_file}"' RETURN
|
|
84
|
+
|
|
85
|
+
shift
|
|
86
|
+
while [[ $# -gt 0 ]]; do
|
|
87
|
+
case "$1" in
|
|
88
|
+
--repo-slug) repo_slug="${2:-}"; shift 2 ;;
|
|
89
|
+
--number) number="${2:-}"; shift 2 ;;
|
|
90
|
+
--add)
|
|
91
|
+
printf '%s\n' "${2:?missing label after --add}" >>"${add_file}"
|
|
92
|
+
shift 2
|
|
93
|
+
;;
|
|
94
|
+
--remove)
|
|
95
|
+
printf '%s\n' "${2:?missing label after --remove}" >>"${remove_file}"
|
|
96
|
+
shift 2
|
|
97
|
+
;;
|
|
98
|
+
--help|-h) usage; exit 0 ;;
|
|
99
|
+
*)
|
|
100
|
+
echo "unknown argument: $1" >&2
|
|
101
|
+
exit 1
|
|
102
|
+
;;
|
|
103
|
+
esac
|
|
104
|
+
done
|
|
105
|
+
|
|
106
|
+
[[ -n "${repo_slug}" && -n "${number}" ]] || {
|
|
107
|
+
usage >&2
|
|
108
|
+
exit 1
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
add_json="$(jq -R . <"${add_file}" | jq -s 'map(select(length > 0)) | unique')"
|
|
112
|
+
remove_json="$(jq -R . <"${remove_file}" | jq -s 'map(select(length > 0)) | unique')"
|
|
113
|
+
created_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
114
|
+
payload="$(
|
|
115
|
+
jq -cn \
|
|
116
|
+
--arg type "labels" \
|
|
117
|
+
--arg repo_slug "${repo_slug}" \
|
|
118
|
+
--arg number "${number}" \
|
|
119
|
+
--arg created_at "${created_at}" \
|
|
120
|
+
--argjson add "${add_json}" \
|
|
121
|
+
--argjson remove "${remove_json}" \
|
|
122
|
+
'{
|
|
123
|
+
type: $type,
|
|
124
|
+
repo_slug: $repo_slug,
|
|
125
|
+
number: $number,
|
|
126
|
+
created_at: $created_at,
|
|
127
|
+
add: $add,
|
|
128
|
+
remove: $remove
|
|
129
|
+
}'
|
|
130
|
+
)"
|
|
131
|
+
digest="$(json_hash "${payload}")"
|
|
132
|
+
intent_file="${PENDING_DIR}/labels-${number}-${digest}.json"
|
|
133
|
+
if [[ ! -f "${intent_file}" ]]; then
|
|
134
|
+
printf '%s\n' "${payload}" >"${intent_file}"
|
|
135
|
+
fi
|
|
136
|
+
printf 'OUTBOX_FILE=%s\n' "${intent_file}"
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
enqueue_comment() {
|
|
140
|
+
local repo_slug=""
|
|
141
|
+
local number=""
|
|
142
|
+
local kind=""
|
|
143
|
+
local body_file=""
|
|
144
|
+
local body=""
|
|
145
|
+
local body_sha=""
|
|
146
|
+
local created_at=""
|
|
147
|
+
local payload=""
|
|
148
|
+
local intent_file=""
|
|
149
|
+
|
|
150
|
+
shift
|
|
151
|
+
while [[ $# -gt 0 ]]; do
|
|
152
|
+
case "$1" in
|
|
153
|
+
--repo-slug) repo_slug="${2:-}"; shift 2 ;;
|
|
154
|
+
--number) number="${2:-}"; shift 2 ;;
|
|
155
|
+
--kind) kind="${2:-}"; shift 2 ;;
|
|
156
|
+
--body-file) body_file="${2:-}"; shift 2 ;;
|
|
157
|
+
--help|-h) usage; exit 0 ;;
|
|
158
|
+
*)
|
|
159
|
+
echo "unknown argument: $1" >&2
|
|
160
|
+
exit 1
|
|
161
|
+
;;
|
|
162
|
+
esac
|
|
163
|
+
done
|
|
164
|
+
|
|
165
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${kind}" && -n "${body_file}" ]] || {
|
|
166
|
+
usage >&2
|
|
167
|
+
exit 1
|
|
168
|
+
}
|
|
169
|
+
[[ "${kind}" == "issue" || "${kind}" == "pr" ]] || {
|
|
170
|
+
echo "unsupported comment kind: ${kind}" >&2
|
|
171
|
+
exit 1
|
|
172
|
+
}
|
|
173
|
+
[[ -f "${body_file}" ]] || {
|
|
174
|
+
echo "missing comment body file: ${body_file}" >&2
|
|
175
|
+
exit 1
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
body="$(cat "${body_file}")"
|
|
179
|
+
[[ -n "${body}" ]] || {
|
|
180
|
+
echo "empty comment body" >&2
|
|
181
|
+
exit 1
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
body_sha="$(json_hash "${body}")"
|
|
185
|
+
created_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
186
|
+
payload="$(
|
|
187
|
+
jq -cn \
|
|
188
|
+
--arg type "comment" \
|
|
189
|
+
--arg repo_slug "${repo_slug}" \
|
|
190
|
+
--arg number "${number}" \
|
|
191
|
+
--arg kind "${kind}" \
|
|
192
|
+
--arg body "${body}" \
|
|
193
|
+
--arg body_sha "${body_sha}" \
|
|
194
|
+
--arg created_at "${created_at}" \
|
|
195
|
+
'{
|
|
196
|
+
type: $type,
|
|
197
|
+
repo_slug: $repo_slug,
|
|
198
|
+
number: $number,
|
|
199
|
+
kind: $kind,
|
|
200
|
+
body: $body,
|
|
201
|
+
body_sha: $body_sha,
|
|
202
|
+
created_at: $created_at
|
|
203
|
+
}'
|
|
204
|
+
)"
|
|
205
|
+
intent_file="${PENDING_DIR}/comment-${kind}-${number}-${body_sha}.json"
|
|
206
|
+
if [[ ! -f "${intent_file}" ]]; then
|
|
207
|
+
printf '%s\n' "${payload}" >"${intent_file}"
|
|
208
|
+
fi
|
|
209
|
+
printf 'OUTBOX_FILE=%s\n' "${intent_file}"
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
enqueue_approval() {
|
|
213
|
+
local repo_slug=""
|
|
214
|
+
local number=""
|
|
215
|
+
local body="${DEFAULT_APPROVAL_BODY}"
|
|
216
|
+
local body_sha=""
|
|
217
|
+
local created_at=""
|
|
218
|
+
local payload=""
|
|
219
|
+
local intent_file=""
|
|
220
|
+
|
|
221
|
+
shift
|
|
222
|
+
while [[ $# -gt 0 ]]; do
|
|
223
|
+
case "$1" in
|
|
224
|
+
--repo-slug) repo_slug="${2:-}"; shift 2 ;;
|
|
225
|
+
--number) number="${2:-}"; shift 2 ;;
|
|
226
|
+
--body) body="${2:-}"; shift 2 ;;
|
|
227
|
+
--help|-h) usage; exit 0 ;;
|
|
228
|
+
*)
|
|
229
|
+
echo "unknown argument: $1" >&2
|
|
230
|
+
exit 1
|
|
231
|
+
;;
|
|
232
|
+
esac
|
|
233
|
+
done
|
|
234
|
+
|
|
235
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${body}" ]] || {
|
|
236
|
+
usage >&2
|
|
237
|
+
exit 1
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
body_sha="$(json_hash "${body}")"
|
|
241
|
+
created_at="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
242
|
+
payload="$(
|
|
243
|
+
jq -cn \
|
|
244
|
+
--arg type "approval" \
|
|
245
|
+
--arg repo_slug "${repo_slug}" \
|
|
246
|
+
--arg number "${number}" \
|
|
247
|
+
--arg body "${body}" \
|
|
248
|
+
--arg body_sha "${body_sha}" \
|
|
249
|
+
--arg created_at "${created_at}" \
|
|
250
|
+
'{
|
|
251
|
+
type: $type,
|
|
252
|
+
repo_slug: $repo_slug,
|
|
253
|
+
number: $number,
|
|
254
|
+
body: $body,
|
|
255
|
+
body_sha: $body_sha,
|
|
256
|
+
created_at: $created_at
|
|
257
|
+
}'
|
|
258
|
+
)"
|
|
259
|
+
intent_file="${PENDING_DIR}/approval-${number}-${body_sha}.json"
|
|
260
|
+
if [[ ! -f "${intent_file}" ]]; then
|
|
261
|
+
printf '%s\n' "${payload}" >"${intent_file}"
|
|
262
|
+
fi
|
|
263
|
+
printf 'OUTBOX_FILE=%s\n' "${intent_file}"
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
flush_comment_intent() {
|
|
267
|
+
local intent_file="${1:?intent file required}"
|
|
268
|
+
local repo_slug=""
|
|
269
|
+
local number=""
|
|
270
|
+
local kind=""
|
|
271
|
+
local body=""
|
|
272
|
+
local existing_json=""
|
|
273
|
+
local post_payload=""
|
|
274
|
+
|
|
275
|
+
repo_slug="$(jq -r '.repo_slug // ""' "${intent_file}")"
|
|
276
|
+
number="$(jq -r '.number // ""' "${intent_file}")"
|
|
277
|
+
kind="$(jq -r '.kind // ""' "${intent_file}")"
|
|
278
|
+
body="$(jq -r '.body // ""' "${intent_file}")"
|
|
279
|
+
|
|
280
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${kind}" && -n "${body}" ]] || return 65
|
|
281
|
+
|
|
282
|
+
if [[ "${kind}" == "pr" ]]; then
|
|
283
|
+
existing_json="$(flow_github_pr_view_json "${repo_slug}" "${number}" 2>/dev/null || true)"
|
|
284
|
+
else
|
|
285
|
+
existing_json="$(flow_github_issue_view_json "${repo_slug}" "${number}" 2>/dev/null || true)"
|
|
286
|
+
fi
|
|
287
|
+
|
|
288
|
+
if [[ -n "${existing_json}" ]] && jq -e --arg body "${body}" 'any(.comments[]?; .body == $body)' >/dev/null <<<"${existing_json}" 2>/dev/null; then
|
|
289
|
+
return 0
|
|
290
|
+
fi
|
|
291
|
+
|
|
292
|
+
post_payload="$(jq -cn --arg body "${body}" '{body: $body}')"
|
|
293
|
+
if printf '%s' "${post_payload}" | flow_github_api_repo "${repo_slug}" "issues/${number}/comments" --method POST --input - >/dev/null 2>&1; then
|
|
294
|
+
return 0
|
|
295
|
+
fi
|
|
296
|
+
|
|
297
|
+
if flow_github_core_rate_limit_active; then
|
|
298
|
+
return 75
|
|
299
|
+
fi
|
|
300
|
+
|
|
301
|
+
return 1
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
flush_labels_intent() {
|
|
305
|
+
local intent_file="${1:?intent file required}"
|
|
306
|
+
local repo_slug=""
|
|
307
|
+
local number=""
|
|
308
|
+
local -a args=()
|
|
309
|
+
local label=""
|
|
310
|
+
|
|
311
|
+
repo_slug="$(jq -r '.repo_slug // ""' "${intent_file}")"
|
|
312
|
+
number="$(jq -r '.number // ""' "${intent_file}")"
|
|
313
|
+
[[ -n "${repo_slug}" && -n "${number}" ]] || return 65
|
|
314
|
+
|
|
315
|
+
args=(--repo-slug "${repo_slug}" --number "${number}")
|
|
316
|
+
while IFS= read -r label; do
|
|
317
|
+
[[ -n "${label}" ]] || continue
|
|
318
|
+
args+=(--add "${label}")
|
|
319
|
+
done < <(jq -r '.add[]? // empty' "${intent_file}")
|
|
320
|
+
while IFS= read -r label; do
|
|
321
|
+
[[ -n "${label}" ]] || continue
|
|
322
|
+
args+=(--remove "${label}")
|
|
323
|
+
done < <(jq -r '.remove[]? // empty' "${intent_file}")
|
|
324
|
+
|
|
325
|
+
if ACP_GITHUB_OUTBOX_DISABLE_ENQUEUE=1 bash "${SCRIPT_DIR}/agent-github-update-labels" "${args[@]}" >/dev/null 2>&1; then
|
|
326
|
+
return 0
|
|
327
|
+
fi
|
|
328
|
+
|
|
329
|
+
if flow_github_core_rate_limit_active; then
|
|
330
|
+
return 75
|
|
331
|
+
fi
|
|
332
|
+
|
|
333
|
+
return 1
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
flush_approval_intent() {
|
|
337
|
+
local intent_file="${1:?intent file required}"
|
|
338
|
+
local repo_slug=""
|
|
339
|
+
local number=""
|
|
340
|
+
local body=""
|
|
341
|
+
local reviews_json="[]"
|
|
342
|
+
local post_payload=""
|
|
343
|
+
|
|
344
|
+
repo_slug="$(jq -r '.repo_slug // ""' "${intent_file}")"
|
|
345
|
+
number="$(jq -r '.number // ""' "${intent_file}")"
|
|
346
|
+
body="$(jq -r '.body // ""' "${intent_file}")"
|
|
347
|
+
|
|
348
|
+
[[ -n "${repo_slug}" && -n "${number}" && -n "${body}" ]] || return 65
|
|
349
|
+
|
|
350
|
+
if reviews_json="$(flow_github_api_repo "${repo_slug}" "pulls/${number}/reviews?per_page=100" 2>/dev/null)"; then
|
|
351
|
+
reviews_json="$(flow_json_or_default "${reviews_json}" '[]')"
|
|
352
|
+
if jq -e --arg body "${body}" 'any(.[]?; (.state // "") == "APPROVED" and (.body // "") == $body)' >/dev/null <<<"${reviews_json}" 2>/dev/null; then
|
|
353
|
+
return 0
|
|
354
|
+
fi
|
|
355
|
+
elif flow_github_core_rate_limit_active; then
|
|
356
|
+
return 75
|
|
357
|
+
fi
|
|
358
|
+
|
|
359
|
+
post_payload="$(jq -cn --arg event "APPROVE" --arg body "${body}" '{event: $event, body: $body}')"
|
|
360
|
+
if printf '%s' "${post_payload}" | flow_github_api_repo "${repo_slug}" "pulls/${number}/reviews" --method POST --input - >/dev/null 2>&1; then
|
|
361
|
+
return 0
|
|
362
|
+
fi
|
|
363
|
+
|
|
364
|
+
if flow_github_core_rate_limit_active; then
|
|
365
|
+
return 75
|
|
366
|
+
fi
|
|
367
|
+
|
|
368
|
+
return 1
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
flush_outbox() {
|
|
372
|
+
local limit="25"
|
|
373
|
+
local processed="0"
|
|
374
|
+
local intent_file=""
|
|
375
|
+
local intent_type=""
|
|
376
|
+
local status="0"
|
|
377
|
+
|
|
378
|
+
shift
|
|
379
|
+
while [[ $# -gt 0 ]]; do
|
|
380
|
+
case "$1" in
|
|
381
|
+
--limit) limit="${2:-25}"; shift 2 ;;
|
|
382
|
+
--help|-h) usage; exit 0 ;;
|
|
383
|
+
*)
|
|
384
|
+
echo "unknown argument: $1" >&2
|
|
385
|
+
exit 1
|
|
386
|
+
;;
|
|
387
|
+
esac
|
|
388
|
+
done
|
|
389
|
+
|
|
390
|
+
[[ -d "${PENDING_DIR}" ]] || exit 0
|
|
391
|
+
flow_github_core_rate_limit_active && exit 0
|
|
392
|
+
|
|
393
|
+
while IFS= read -r intent_file; do
|
|
394
|
+
[[ -n "${intent_file}" ]] || continue
|
|
395
|
+
if (( processed >= limit )); then
|
|
396
|
+
break
|
|
397
|
+
fi
|
|
398
|
+
|
|
399
|
+
intent_type="$(jq -r '.type // ""' "${intent_file}" 2>/dev/null || true)"
|
|
400
|
+
case "${intent_type}" in
|
|
401
|
+
labels)
|
|
402
|
+
if flush_labels_intent "${intent_file}"; then
|
|
403
|
+
status="0"
|
|
404
|
+
else
|
|
405
|
+
status="$?"
|
|
406
|
+
fi
|
|
407
|
+
;;
|
|
408
|
+
comment)
|
|
409
|
+
if flush_comment_intent "${intent_file}"; then
|
|
410
|
+
status="0"
|
|
411
|
+
else
|
|
412
|
+
status="$?"
|
|
413
|
+
fi
|
|
414
|
+
;;
|
|
415
|
+
approval)
|
|
416
|
+
if flush_approval_intent "${intent_file}"; then
|
|
417
|
+
status="0"
|
|
418
|
+
else
|
|
419
|
+
status="$?"
|
|
420
|
+
fi
|
|
421
|
+
;;
|
|
422
|
+
*)
|
|
423
|
+
status="65"
|
|
424
|
+
;;
|
|
425
|
+
esac
|
|
426
|
+
|
|
427
|
+
case "${status}" in
|
|
428
|
+
0)
|
|
429
|
+
outbox_move_sent "${intent_file}"
|
|
430
|
+
;;
|
|
431
|
+
65)
|
|
432
|
+
outbox_move_failed "${intent_file}"
|
|
433
|
+
;;
|
|
434
|
+
75)
|
|
435
|
+
break
|
|
436
|
+
;;
|
|
437
|
+
*)
|
|
438
|
+
break
|
|
439
|
+
;;
|
|
440
|
+
esac
|
|
441
|
+
|
|
442
|
+
processed=$((processed + 1))
|
|
443
|
+
done < <(find "${PENDING_DIR}" -mindepth 1 -maxdepth 1 -type f -name '*.json' 2>/dev/null | sort)
|
|
444
|
+
|
|
445
|
+
printf 'OUTBOX_FLUSHED=%s\n' "${processed}"
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
case "${ACTION}" in
|
|
449
|
+
enqueue-labels)
|
|
450
|
+
enqueue_labels "$@"
|
|
451
|
+
;;
|
|
452
|
+
enqueue-comment)
|
|
453
|
+
enqueue_comment "$@"
|
|
454
|
+
;;
|
|
455
|
+
enqueue-approval)
|
|
456
|
+
enqueue_approval "$@"
|
|
457
|
+
;;
|
|
458
|
+
flush)
|
|
459
|
+
flush_outbox "$@"
|
|
460
|
+
;;
|
|
461
|
+
--help|-h|"")
|
|
462
|
+
usage
|
|
463
|
+
exit 0
|
|
464
|
+
;;
|
|
465
|
+
*)
|
|
466
|
+
echo "unknown action: ${ACTION}" >&2
|
|
467
|
+
usage >&2
|
|
468
|
+
exit 1
|
|
469
|
+
;;
|
|
470
|
+
esac
|
|
@@ -268,9 +268,9 @@ record_scheduled_issue_launch() {
|
|
|
268
268
|
cat >"$state_file" <<EOF
|
|
269
269
|
INTERVAL_SECONDS=${interval_seconds}
|
|
270
270
|
LAST_STARTED_EPOCH=${now_epoch}
|
|
271
|
-
LAST_STARTED_AT=$(
|
|
271
|
+
LAST_STARTED_AT=$(flow_format_epoch_utc "$now_epoch")
|
|
272
272
|
NEXT_DUE_EPOCH=${next_due_epoch}
|
|
273
|
-
NEXT_DUE_AT=$(
|
|
273
|
+
NEXT_DUE_AT=$(flow_format_epoch_utc "$next_due_epoch")
|
|
274
274
|
UPDATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
275
275
|
EOF
|
|
276
276
|
}
|
|
@@ -299,12 +299,12 @@ record_scheduled_issue_result() {
|
|
|
299
299
|
cat >"$state_file" <<EOF
|
|
300
300
|
INTERVAL_SECONDS=${interval_seconds}
|
|
301
301
|
LAST_STARTED_EPOCH=${last_started_epoch}
|
|
302
|
-
LAST_STARTED_AT=$(if [[ "$last_started_epoch" =~ ^[0-9]+$ ]] && (( last_started_epoch > 0 )); then
|
|
302
|
+
LAST_STARTED_AT=$(if [[ "$last_started_epoch" =~ ^[0-9]+$ ]] && (( last_started_epoch > 0 )); then flow_format_epoch_utc "$last_started_epoch"; fi)
|
|
303
303
|
LAST_RESULT_STATUS=${result_status}
|
|
304
304
|
LAST_RESULT_EPOCH=${now_epoch}
|
|
305
|
-
LAST_RESULT_AT=$(
|
|
305
|
+
LAST_RESULT_AT=$(flow_format_epoch_utc "$now_epoch")
|
|
306
306
|
NEXT_DUE_EPOCH=${next_due_epoch}
|
|
307
|
-
NEXT_DUE_AT=$(if [[ "$next_due_epoch" =~ ^[0-9]+$ ]] && (( next_due_epoch > 0 )); then
|
|
307
|
+
NEXT_DUE_AT=$(if [[ "$next_due_epoch" =~ ^[0-9]+$ ]] && (( next_due_epoch > 0 )); then flow_format_epoch_utc "$next_due_epoch"; fi)
|
|
308
308
|
UPDATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
309
309
|
EOF
|
|
310
310
|
}
|
|
@@ -361,14 +361,14 @@ record_blocked_recovery_issue_launch() {
|
|
|
361
361
|
next_due_at=""
|
|
362
362
|
if [[ "${blocked_recovery_cooldown_seconds:-}" =~ ^[1-9][0-9]*$ ]]; then
|
|
363
363
|
next_due_epoch=$((now_epoch + blocked_recovery_cooldown_seconds))
|
|
364
|
-
next_due_at="$(
|
|
364
|
+
next_due_at="$(flow_format_epoch_utc "$next_due_epoch")"
|
|
365
365
|
fi
|
|
366
366
|
|
|
367
367
|
state_file="$(blocked_recovery_state_file "$issue_id")"
|
|
368
368
|
cat >"$state_file" <<EOF
|
|
369
369
|
LANE=blocked-recovery
|
|
370
370
|
LAST_STARTED_EPOCH=${now_epoch}
|
|
371
|
-
LAST_STARTED_AT=$(
|
|
371
|
+
LAST_STARTED_AT=$(flow_format_epoch_utc "$now_epoch")
|
|
372
372
|
NEXT_DUE_EPOCH=${next_due_epoch}
|
|
373
373
|
NEXT_DUE_AT=${next_due_at}
|
|
374
374
|
UPDATED_AT=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
@@ -615,12 +615,35 @@ else
|
|
|
615
615
|
exit "${loop_status}"
|
|
616
616
|
fi
|
|
617
617
|
|
|
618
|
+
# ── Flush local GitHub write outbox ────────────────────────────────────────────
|
|
619
|
+
GITHUB_OUTBOX_FLUSH_LIMIT="${ACP_GITHUB_OUTBOX_FLUSH_LIMIT:-${F_LOSNING_GITHUB_OUTBOX_FLUSH_LIMIT:-25}}"
|
|
620
|
+
GITHUB_OUTBOX_FLUSH_TIMEOUT_SECONDS="${ACP_GITHUB_OUTBOX_FLUSH_TIMEOUT_SECONDS:-${F_LOSNING_GITHUB_OUTBOX_FLUSH_TIMEOUT_SECONDS:-30}}"
|
|
621
|
+
if [[ -x "${FLOW_TOOLS_DIR}/github-write-outbox.sh" ]]; then
|
|
622
|
+
printf '[%s] github-outbox flush start\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
623
|
+
if run_with_timeout "${GITHUB_OUTBOX_FLUSH_TIMEOUT_SECONDS}" \
|
|
624
|
+
env \
|
|
625
|
+
ACP_STATE_ROOT="$STATE_ROOT" \
|
|
626
|
+
F_LOSNING_STATE_ROOT="$STATE_ROOT" \
|
|
627
|
+
ACP_RUNS_ROOT="$RUNS_ROOT" \
|
|
628
|
+
F_LOSNING_RUNS_ROOT="$RUNS_ROOT" \
|
|
629
|
+
bash "${FLOW_TOOLS_DIR}/github-write-outbox.sh" flush --limit "${GITHUB_OUTBOX_FLUSH_LIMIT}"; then
|
|
630
|
+
printf '[%s] github-outbox flush end status=0\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
631
|
+
else
|
|
632
|
+
github_outbox_status=$?
|
|
633
|
+
if [[ "${github_outbox_status}" -eq 124 ]]; then
|
|
634
|
+
printf 'GITHUB_OUTBOX_FLUSH_TIMEOUT=yes\n'
|
|
635
|
+
fi
|
|
636
|
+
printf '[%s] github-outbox flush end status=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" "${github_outbox_status}"
|
|
637
|
+
fi
|
|
638
|
+
fi
|
|
639
|
+
|
|
618
640
|
# ── Throttled catch-up passes ──────────────────────────────────────────────────
|
|
619
641
|
# These scripts fetch merged/closed PRs and linked issues which change rarely.
|
|
620
642
|
# Run them at most once every CATCHUP_INTERVAL_SECONDS (default 300 = 5 min)
|
|
621
643
|
# to avoid burning API quota on every heartbeat cycle.
|
|
622
644
|
CATCHUP_INTERVAL_SECONDS="${ACP_CATCHUP_INTERVAL_SECONDS:-${F_LOSNING_CATCHUP_INTERVAL_SECONDS:-300}}"
|
|
623
645
|
CATCHUP_STAMP_FILE="${STATE_ROOT}/last-catchup-timestamp"
|
|
646
|
+
SOURCE_REPO_SYNC_TIMEOUT_SECONDS="${ACP_SOURCE_REPO_SYNC_TIMEOUT_SECONDS:-${F_LOSNING_SOURCE_REPO_SYNC_TIMEOUT_SECONDS:-45}}"
|
|
624
647
|
_catchup_now="$(date +%s)"
|
|
625
648
|
_catchup_last="0"
|
|
626
649
|
if [[ -f "${CATCHUP_STAMP_FILE}" ]]; then
|
|
@@ -648,6 +671,25 @@ if [[ "${_catchup_age}" -ge "${CATCHUP_INTERVAL_SECONDS}" ]]; then
|
|
|
648
671
|
printf '[%s] merged-pr catchup end status=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" "${catchup_status}"
|
|
649
672
|
fi
|
|
650
673
|
|
|
674
|
+
if [[ -x "${FLOW_TOOLS_DIR}/agent-project-sync-source-repo-main" ]]; then
|
|
675
|
+
printf '[%s] source-repo main sync start\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
676
|
+
if run_with_timeout "${SOURCE_REPO_SYNC_TIMEOUT_SECONDS}" \
|
|
677
|
+
env \
|
|
678
|
+
ACP_RUNS_ROOT="$RUNS_ROOT" \
|
|
679
|
+
F_LOSNING_RUNS_ROOT="$RUNS_ROOT" \
|
|
680
|
+
ACP_STATE_ROOT="$STATE_ROOT" \
|
|
681
|
+
F_LOSNING_STATE_ROOT="$STATE_ROOT" \
|
|
682
|
+
bash "${FLOW_TOOLS_DIR}/agent-project-sync-source-repo-main"; then
|
|
683
|
+
printf '[%s] source-repo main sync end status=0\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
684
|
+
else
|
|
685
|
+
source_repo_sync_status=$?
|
|
686
|
+
if [[ "${source_repo_sync_status}" -eq 124 ]]; then
|
|
687
|
+
printf 'SOURCE_REPO_SYNC_TIMEOUT=yes\n'
|
|
688
|
+
fi
|
|
689
|
+
printf '[%s] source-repo main sync end status=%s\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" "${source_repo_sync_status}"
|
|
690
|
+
fi
|
|
691
|
+
fi
|
|
692
|
+
|
|
651
693
|
printf '[%s] linked-pr issue catchup start\n' "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
652
694
|
if run_with_timeout "${CATCHUP_TIMEOUT_SECONDS}" \
|
|
653
695
|
env \
|