agent-control-plane 0.4.9 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +109 -13
- package/npm/bin/agent-control-plane.js +1 -1
- package/package.json +39 -33
- package/tools/bin/debug-session.sh +106 -0
- package/tools/bin/flow-config-lib.sh +13 -3508
- package/tools/bin/flow-execution-lib.sh +243 -0
- package/tools/bin/flow-forge-lib.sh +1770 -0
- package/tools/bin/flow-profile-lib.sh +335 -0
- package/tools/bin/flow-provider-lib.sh +981 -0
- package/tools/bin/flow-runtime-doctor-linux.sh +136 -0
- package/tools/bin/flow-runtime-doctor.sh +5 -1
- package/tools/bin/flow-session-lib.sh +317 -0
- package/tools/bin/install-project-systemd.sh +255 -0
- package/tools/bin/project-runtimectl.sh +45 -0
- package/tools/bin/project-systemd-bootstrap.sh +74 -0
- package/tools/bin/uninstall-project-systemd.sh +87 -0
- package/tools/dashboard/app.js +238 -8
- package/tools/dashboard/issue_queue_state.py +101 -0
- package/tools/dashboard/requirements.txt +3 -0
- package/tools/dashboard/server.py +250 -30
- package/tools/dashboard/styles.css +526 -455
- package/tools/bin/agent-cleanup-worktree +0 -247
- package/tools/bin/agent-github-update-labels +0 -105
- package/tools/bin/agent-init-worktree +0 -216
- package/tools/bin/agent-project-archive-run +0 -52
- package/tools/bin/agent-project-capture-worker +0 -46
- package/tools/bin/agent-project-catch-up-issue-pr-links +0 -118
- package/tools/bin/agent-project-catch-up-merged-prs +0 -195
- package/tools/bin/agent-project-catch-up-scheduled-issue-retries +0 -123
- package/tools/bin/agent-project-cleanup-session +0 -513
- package/tools/bin/agent-project-detached-launch +0 -127
- package/tools/bin/agent-project-heartbeat-loop +0 -1029
- package/tools/bin/agent-project-open-issue-worktree +0 -89
- package/tools/bin/agent-project-open-pr-worktree +0 -80
- package/tools/bin/agent-project-publish-issue-pr +0 -468
- package/tools/bin/agent-project-reconcile-issue-session +0 -1409
- package/tools/bin/agent-project-reconcile-pr-session +0 -1288
- package/tools/bin/agent-project-retry-state +0 -158
- package/tools/bin/agent-project-run-claude-session +0 -805
- package/tools/bin/agent-project-run-codex-resilient +0 -963
- package/tools/bin/agent-project-run-codex-session +0 -435
- package/tools/bin/agent-project-run-kilo-session +0 -369
- package/tools/bin/agent-project-run-ollama-session +0 -658
- package/tools/bin/agent-project-run-openclaw-session +0 -1309
- package/tools/bin/agent-project-run-opencode-session +0 -377
- package/tools/bin/agent-project-run-pi-session +0 -479
- package/tools/bin/agent-project-sync-anchor-repo +0 -139
- package/tools/bin/agent-project-sync-source-repo-main +0 -163
- package/tools/bin/agent-project-worker-status +0 -188
- package/tools/bin/branch-verification-guard.sh +0 -364
- package/tools/bin/capture-worker.sh +0 -18
- package/tools/bin/cleanup-worktree.sh +0 -52
- package/tools/bin/codex-quota +0 -31
- package/tools/bin/create-follow-up-issue.sh +0 -114
- package/tools/bin/dashboard-launchd-bootstrap.sh +0 -50
- package/tools/bin/issue-publish-localization-guard.sh +0 -142
- package/tools/bin/issue-publish-scope-guard.sh +0 -242
- package/tools/bin/issue-requires-local-workspace-install.sh +0 -31
- package/tools/bin/issue-resource-class.sh +0 -12
- package/tools/bin/kick-scheduler.sh +0 -75
- package/tools/bin/label-follow-up-issues.sh +0 -14
- package/tools/bin/new-pr-worktree.sh +0 -50
- package/tools/bin/new-worktree.sh +0 -49
- package/tools/bin/pr-risk.sh +0 -12
- package/tools/bin/prepare-worktree.sh +0 -142
- package/tools/bin/provider-cooldown-state.sh +0 -204
- package/tools/bin/publish-issue-worker.sh +0 -31
- package/tools/bin/reconcile-bootstrap-lib.sh +0 -113
- package/tools/bin/reconcile-issue-worker.sh +0 -34
- package/tools/bin/reconcile-pr-worker.sh +0 -34
- package/tools/bin/record-verification.sh +0 -71
- package/tools/bin/render-flow-config.sh +0 -98
- package/tools/bin/resident-issue-controller-lib.sh +0 -448
- package/tools/bin/retry-state.sh +0 -31
- package/tools/bin/reuse-issue-worktree.sh +0 -121
- package/tools/bin/run-codex-bypass.sh +0 -3
- package/tools/bin/run-codex-safe.sh +0 -3
- package/tools/bin/run-codex-task.sh +0 -280
- package/tools/bin/serve-dashboard.sh +0 -5
- package/tools/bin/start-issue-worker.sh +0 -943
- package/tools/bin/start-pr-fix-worker.sh +0 -528
- package/tools/bin/start-pr-merge-repair-worker.sh +0 -8
- package/tools/bin/start-pr-review-worker.sh +0 -261
- package/tools/bin/start-resident-issue-loop.sh +0 -499
- package/tools/bin/update-github-labels.sh +0 -14
- package/tools/bin/worker-status.sh +0 -19
- package/tools/bin/workflow-catalog.sh +0 -77
|
@@ -2,3515 +2,20 @@
|
|
|
2
2
|
set -euo pipefail
|
|
3
3
|
|
|
4
4
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
5
|
+
|
|
6
|
+
# Core shell library (required by all modules)
|
|
5
7
|
# shellcheck source=/dev/null
|
|
6
8
|
source "${SCRIPT_DIR}/flow-shell-lib.sh"
|
|
7
9
|
flow_export_project_env_aliases
|
|
8
10
|
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
flow_explicit_profile_id() {
|
|
23
|
-
printf '%s\n' "${ACP_PROJECT_ID:-${AGENT_PROJECT_ID:-}}"
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
resolve_flow_profile_registry_root() {
|
|
27
|
-
local platform_home="${AGENT_PLATFORM_HOME:-${HOME}/.agent-runtime}"
|
|
28
|
-
printf '%s\n' "${AGENT_CONTROL_PLANE_PROFILE_ROOT:-${ACP_PROFILE_REGISTRY_ROOT:-${platform_home}/control-plane/profiles}}"
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
flow_list_profiles_in_root() {
|
|
32
|
-
local profiles_root="${1:-}"
|
|
33
|
-
local profile_file=""
|
|
34
|
-
local profile_id=""
|
|
35
|
-
|
|
36
|
-
[[ -d "${profiles_root}" ]] || return 0
|
|
37
|
-
|
|
38
|
-
while IFS= read -r profile_file; do
|
|
39
|
-
[[ -n "${profile_file}" ]] || continue
|
|
40
|
-
profile_id="$(basename "$(dirname "${profile_file}")")"
|
|
41
|
-
[[ -n "${profile_id}" ]] || continue
|
|
42
|
-
printf '%s\n' "${profile_id}"
|
|
43
|
-
done < <(find "${profiles_root}" -mindepth 2 -maxdepth 2 -type f -name 'control-plane.yaml' 2>/dev/null | sort)
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
flow_list_installed_profile_ids() {
|
|
47
|
-
flow_list_profiles_in_root "$(resolve_flow_profile_registry_root)"
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
flow_find_profile_dir_by_id() {
|
|
51
|
-
local flow_root="${1:-}"
|
|
52
|
-
local profile_id="${2:?profile id required}"
|
|
53
|
-
local registry_root=""
|
|
54
|
-
local candidate=""
|
|
55
|
-
|
|
56
|
-
if [[ -z "${flow_root}" ]]; then
|
|
57
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
58
|
-
fi
|
|
59
|
-
|
|
60
|
-
registry_root="$(resolve_flow_profile_registry_root)"
|
|
61
|
-
candidate="${registry_root}/${profile_id}"
|
|
62
|
-
if [[ -f "${candidate}/control-plane.yaml" ]]; then
|
|
63
|
-
printf '%s\n' "${candidate}"
|
|
64
|
-
return 0
|
|
65
|
-
fi
|
|
66
|
-
|
|
67
|
-
printf '%s/%s\n' "${registry_root}" "${profile_id}"
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
flow_profile_count() {
|
|
71
|
-
local flow_root="${1:-}"
|
|
72
|
-
|
|
73
|
-
if [[ -z "${flow_root}" ]]; then
|
|
74
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
75
|
-
fi
|
|
76
|
-
|
|
77
|
-
flow_list_profile_ids "${flow_root}" | awk 'NF { count += 1 } END { print count + 0 }'
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
flow_default_profile_id() {
|
|
81
|
-
local flow_root="${1:-}"
|
|
82
|
-
local preferred_profile="${AGENT_CONTROL_PLANE_DEFAULT_PROFILE_ID:-${ACP_DEFAULT_PROFILE_ID:-${AGENT_PROJECT_DEFAULT_PROFILE_ID:-}}}"
|
|
83
|
-
local candidate=""
|
|
84
|
-
|
|
85
|
-
if [[ -z "${flow_root}" ]]; then
|
|
86
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
87
|
-
fi
|
|
88
|
-
|
|
89
|
-
for candidate in "${preferred_profile}" "default"; do
|
|
90
|
-
[[ -n "${candidate}" ]] || continue
|
|
91
|
-
if [[ -f "$(flow_find_profile_dir_by_id "${flow_root}" "${candidate}")/control-plane.yaml" ]]; then
|
|
92
|
-
printf '%s\n' "${candidate}"
|
|
93
|
-
return 0
|
|
94
|
-
fi
|
|
95
|
-
done
|
|
96
|
-
|
|
97
|
-
candidate="$(flow_list_profile_ids "${flow_root}" | grep -v '^demo$' | head -n 1 || true)"
|
|
98
|
-
if [[ -n "${candidate}" ]]; then
|
|
99
|
-
printf '%s\n' "${candidate}"
|
|
100
|
-
return 0
|
|
101
|
-
fi
|
|
102
|
-
|
|
103
|
-
candidate="$(flow_list_profile_ids "${flow_root}" | head -n 1 || true)"
|
|
104
|
-
if [[ -n "${candidate}" ]]; then
|
|
105
|
-
printf '%s\n' "${candidate}"
|
|
106
|
-
return 0
|
|
107
|
-
fi
|
|
108
|
-
|
|
109
|
-
printf 'default\n'
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
flow_profile_selection_mode() {
|
|
113
|
-
local flow_root="${1:-}"
|
|
114
|
-
local explicit_profile=""
|
|
115
|
-
local profile_count="0"
|
|
116
|
-
|
|
117
|
-
if [[ -z "${flow_root}" ]]; then
|
|
118
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
119
|
-
fi
|
|
120
|
-
|
|
121
|
-
explicit_profile="$(flow_explicit_profile_id)"
|
|
122
|
-
if [[ -n "${explicit_profile}" ]]; then
|
|
123
|
-
printf 'explicit\n'
|
|
124
|
-
return 0
|
|
125
|
-
fi
|
|
126
|
-
|
|
127
|
-
profile_count="$(flow_profile_count "${flow_root}")"
|
|
128
|
-
if [[ "${profile_count}" -gt 1 ]]; then
|
|
129
|
-
printf 'implicit-default\n'
|
|
130
|
-
return 0
|
|
131
|
-
fi
|
|
132
|
-
|
|
133
|
-
printf 'single-profile-default\n'
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
flow_profile_selection_hint() {
|
|
137
|
-
local flow_root="${1:-}"
|
|
138
|
-
local mode=""
|
|
139
|
-
|
|
140
|
-
if [[ -z "${flow_root}" ]]; then
|
|
141
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
142
|
-
fi
|
|
143
|
-
|
|
144
|
-
mode="$(flow_profile_selection_mode "${flow_root}")"
|
|
145
|
-
if [[ "${mode}" == "implicit-default" ]]; then
|
|
146
|
-
printf 'Set ACP_PROJECT_ID=<id> or AGENT_PROJECT_ID=<id> when multiple available profiles exist.\n'
|
|
147
|
-
fi
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
flow_profile_guard_message() {
|
|
151
|
-
local flow_root="${1:-}"
|
|
152
|
-
local command_name="${2:-this command}"
|
|
153
|
-
local hint=""
|
|
154
|
-
|
|
155
|
-
if [[ -z "${flow_root}" ]]; then
|
|
156
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
157
|
-
fi
|
|
158
|
-
|
|
159
|
-
hint="$(flow_profile_selection_hint "${flow_root}")"
|
|
160
|
-
printf 'explicit profile selection required for %s when multiple available profiles exist.\n' "${command_name}"
|
|
161
|
-
if [[ -n "${hint}" ]]; then
|
|
162
|
-
printf '%s\n' "${hint}"
|
|
163
|
-
fi
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
-
flow_require_explicit_profile_selection() {
|
|
167
|
-
local flow_root="${1:-}"
|
|
168
|
-
local command_name="${2:-this command}"
|
|
169
|
-
|
|
170
|
-
if [[ -z "${flow_root}" ]]; then
|
|
171
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
172
|
-
fi
|
|
173
|
-
|
|
174
|
-
if [[ "${ACP_ALLOW_IMPLICIT_PROFILE_SELECTION:-0}" == "1" ]]; then
|
|
175
|
-
return 0
|
|
176
|
-
fi
|
|
177
|
-
|
|
178
|
-
if [[ "$(flow_profile_selection_mode "${flow_root}")" == "implicit-default" ]]; then
|
|
179
|
-
flow_profile_guard_message "${flow_root}" "${command_name}" >&2
|
|
180
|
-
return 1
|
|
181
|
-
fi
|
|
182
|
-
|
|
183
|
-
return 0
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
resolve_flow_config_yaml() {
|
|
187
|
-
local script_path="${1:-${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}}"
|
|
188
|
-
local flow_root
|
|
189
|
-
local profile_id=""
|
|
190
|
-
local candidate=""
|
|
191
|
-
flow_root="$(resolve_flow_skill_dir "${script_path}")"
|
|
192
|
-
profile_id="${ACP_PROJECT_ID:-${AGENT_PROJECT_ID:-$(flow_default_profile_id "${flow_root}")}}"
|
|
193
|
-
|
|
194
|
-
for candidate in \
|
|
195
|
-
"${AGENT_CONTROL_PLANE_CONFIG:-}" \
|
|
196
|
-
"${ACP_CONFIG:-}" \
|
|
197
|
-
"${AGENT_PROJECT_CONFIG_PATH:-}" \
|
|
198
|
-
"${F_LOSNING_FLOW_CONFIG:-}"; do
|
|
199
|
-
if [[ -n "${candidate}" && -f "${candidate}" ]]; then
|
|
200
|
-
printf '%s\n' "${candidate}"
|
|
201
|
-
return 0
|
|
202
|
-
fi
|
|
203
|
-
done
|
|
204
|
-
|
|
205
|
-
candidate="$(flow_find_profile_dir_by_id "${flow_root}" "${profile_id}")/control-plane.yaml"
|
|
206
|
-
if [[ -f "${candidate}" ]]; then
|
|
207
|
-
printf '%s\n' "${candidate}"
|
|
208
|
-
return 0
|
|
209
|
-
fi
|
|
210
|
-
|
|
211
|
-
printf '%s\n' "${candidate}"
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
flow_list_profile_ids() {
|
|
215
|
-
local flow_root="${1:-}"
|
|
216
|
-
local found_any=""
|
|
217
|
-
|
|
218
|
-
if [[ -z "${flow_root}" ]]; then
|
|
219
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
220
|
-
fi
|
|
221
|
-
|
|
222
|
-
found_any="$(
|
|
223
|
-
{
|
|
224
|
-
flow_list_installed_profile_ids
|
|
225
|
-
} | awk 'NF { print }' | sort -u
|
|
226
|
-
)"
|
|
227
|
-
|
|
228
|
-
if [[ -z "${found_any}" ]]; then
|
|
229
|
-
return 0
|
|
230
|
-
fi
|
|
231
|
-
|
|
232
|
-
printf '%s\n' "${found_any}"
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
flow_git_remote_repo_slug() {
|
|
236
|
-
local repo_root="${1:-}"
|
|
237
|
-
local remote_name="${2:-origin}"
|
|
238
|
-
local remote_url=""
|
|
239
|
-
local normalized=""
|
|
240
|
-
|
|
241
|
-
[[ -n "${repo_root}" && -d "${repo_root}" ]] || return 1
|
|
242
|
-
remote_url="$(git -C "${repo_root}" remote get-url "${remote_name}" 2>/dev/null || true)"
|
|
243
|
-
[[ -n "${remote_url}" ]] || return 1
|
|
244
|
-
|
|
245
|
-
normalized="${remote_url%.git}"
|
|
246
|
-
case "${normalized}" in
|
|
247
|
-
ssh://*@*/*)
|
|
248
|
-
normalized="${normalized#ssh://}"
|
|
249
|
-
normalized="${normalized#*@}"
|
|
250
|
-
normalized="${normalized#*/}"
|
|
251
|
-
;;
|
|
252
|
-
*@*:*/*)
|
|
253
|
-
normalized="${normalized#*@}"
|
|
254
|
-
normalized="${normalized#*:}"
|
|
255
|
-
;;
|
|
256
|
-
https://*/*|http://*/*)
|
|
257
|
-
normalized="${normalized#http://}"
|
|
258
|
-
normalized="${normalized#https://}"
|
|
259
|
-
normalized="${normalized#*/}"
|
|
260
|
-
;;
|
|
261
|
-
*)
|
|
262
|
-
return 1
|
|
263
|
-
;;
|
|
264
|
-
esac
|
|
265
|
-
|
|
266
|
-
if [[ "${normalized}" == */*/* ]]; then
|
|
267
|
-
normalized="${normalized#*/}"
|
|
268
|
-
fi
|
|
269
|
-
|
|
270
|
-
if [[ "${normalized}" =~ ^[^/]+/[^/]+$ ]]; then
|
|
271
|
-
printf '%s\n' "${normalized}"
|
|
272
|
-
return 0
|
|
273
|
-
fi
|
|
274
|
-
|
|
275
|
-
return 1
|
|
276
|
-
}
|
|
277
|
-
|
|
278
|
-
flow_git_has_remote() {
|
|
279
|
-
local repo_root="${1:-}"
|
|
280
|
-
local remote_name="${2:-}"
|
|
281
|
-
|
|
282
|
-
[[ -n "${repo_root}" && -d "${repo_root}" && -n "${remote_name}" ]] || return 1
|
|
283
|
-
git -C "${repo_root}" remote get-url "${remote_name}" >/dev/null 2>&1
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
flow_resolve_forge_primary_remote() {
|
|
287
|
-
local repo_root="${1:-}"
|
|
288
|
-
local repo_slug="${2:-}"
|
|
289
|
-
local remote_name=""
|
|
290
|
-
local override="${ACP_SOURCE_SYNC_REMOTE:-${F_LOSNING_SOURCE_SYNC_REMOTE:-}}"
|
|
291
|
-
local forge_provider=""
|
|
292
|
-
|
|
293
|
-
[[ -n "${repo_root}" && -d "${repo_root}" ]] || return 1
|
|
294
|
-
|
|
295
|
-
if [[ -n "${override}" ]] && flow_git_has_remote "${repo_root}" "${override}"; then
|
|
296
|
-
printf '%s\n' "${override}"
|
|
297
|
-
return 0
|
|
298
|
-
fi
|
|
299
|
-
|
|
300
|
-
forge_provider="$(flow_forge_provider)"
|
|
301
|
-
case "${forge_provider}" in
|
|
302
|
-
gitea)
|
|
303
|
-
if flow_git_has_remote "${repo_root}" "gitea"; then
|
|
304
|
-
printf 'gitea\n'
|
|
305
|
-
return 0
|
|
306
|
-
fi
|
|
307
|
-
;;
|
|
308
|
-
github)
|
|
309
|
-
if flow_git_has_remote "${repo_root}" "origin"; then
|
|
310
|
-
printf 'origin\n'
|
|
311
|
-
return 0
|
|
312
|
-
fi
|
|
313
|
-
;;
|
|
314
|
-
esac
|
|
315
|
-
|
|
316
|
-
if [[ -n "${repo_slug}" ]]; then
|
|
317
|
-
while IFS= read -r remote_name; do
|
|
318
|
-
[[ -n "${remote_name}" ]] || continue
|
|
319
|
-
if [[ "$(flow_git_remote_repo_slug "${repo_root}" "${remote_name}" 2>/dev/null || true)" == "${repo_slug}" ]]; then
|
|
320
|
-
printf '%s\n' "${remote_name}"
|
|
321
|
-
return 0
|
|
322
|
-
fi
|
|
323
|
-
done < <(git -C "${repo_root}" remote)
|
|
324
|
-
fi
|
|
325
|
-
|
|
326
|
-
for remote_name in origin gitea; do
|
|
327
|
-
if flow_git_has_remote "${repo_root}" "${remote_name}"; then
|
|
328
|
-
printf '%s\n' "${remote_name}"
|
|
329
|
-
return 0
|
|
330
|
-
fi
|
|
331
|
-
done
|
|
332
|
-
|
|
333
|
-
return 1
|
|
334
|
-
}
|
|
335
|
-
|
|
336
|
-
flow_git_credential_token_for_repo_slug() {
|
|
337
|
-
local repo_slug="${1:-}"
|
|
338
|
-
local host="${2:-github.com}"
|
|
339
|
-
local path_suffix="${3:-${repo_slug}.git}"
|
|
340
|
-
local credential_payload=""
|
|
341
|
-
local token=""
|
|
342
|
-
|
|
343
|
-
[[ -n "${repo_slug}" && -n "${host}" && -n "${path_suffix}" ]] || return 1
|
|
344
|
-
command -v git >/dev/null 2>&1 || return 1
|
|
345
|
-
|
|
346
|
-
credential_payload="$(
|
|
347
|
-
printf 'protocol=https\nhost=%s\npath=%s\n\n' "${host}" "${path_suffix}" \
|
|
348
|
-
| git credential fill 2>/dev/null || true
|
|
349
|
-
)"
|
|
350
|
-
token="$(awk -F= '/^password=/{print $2; exit}' <<<"${credential_payload}")"
|
|
351
|
-
[[ -n "${token}" ]] || return 1
|
|
352
|
-
|
|
353
|
-
printf '%s\n' "${token}"
|
|
354
|
-
}
|
|
355
|
-
|
|
356
|
-
flow_export_github_cli_auth_env() {
|
|
357
|
-
local repo_slug="${1:-}"
|
|
358
|
-
local token=""
|
|
359
|
-
|
|
360
|
-
if flow_using_gitea; then
|
|
361
|
-
return 0
|
|
362
|
-
fi
|
|
363
|
-
|
|
364
|
-
if [[ -n "${GH_TOKEN:-}" ]]; then
|
|
365
|
-
return 0
|
|
366
|
-
fi
|
|
367
|
-
|
|
368
|
-
if [[ -n "${GITHUB_TOKEN:-}" ]]; then
|
|
369
|
-
export GH_TOKEN="${GITHUB_TOKEN}"
|
|
370
|
-
return 0
|
|
371
|
-
fi
|
|
372
|
-
|
|
373
|
-
if command -v gh >/dev/null 2>&1; then
|
|
374
|
-
if env -u GH_TOKEN -u GITHUB_TOKEN gh auth status >/dev/null 2>&1 \
|
|
375
|
-
|| env -u GH_TOKEN -u GITHUB_TOKEN gh api user --jq .login >/dev/null 2>&1; then
|
|
376
|
-
return 0
|
|
377
|
-
fi
|
|
378
|
-
fi
|
|
379
|
-
|
|
380
|
-
token="$(flow_git_credential_token_for_repo_slug "${repo_slug}" || true)"
|
|
381
|
-
if [[ -n "${token}" ]]; then
|
|
382
|
-
export GH_TOKEN="${token}"
|
|
383
|
-
return 0
|
|
384
|
-
fi
|
|
385
|
-
|
|
386
|
-
if [[ -n "${GITHUB_PERSONAL_ACCESS_TOKEN:-}" ]]; then
|
|
387
|
-
export GH_TOKEN="${GITHUB_PERSONAL_ACCESS_TOKEN}"
|
|
388
|
-
fi
|
|
389
|
-
}
|
|
390
|
-
|
|
391
|
-
flow_forge_provider() {
|
|
392
|
-
local provider="${ACP_FORGE_PROVIDER:-${F_LOSNING_FORGE_PROVIDER:-github}}"
|
|
393
|
-
provider="$(printf '%s' "${provider}" | tr '[:upper:]' '[:lower:]')"
|
|
394
|
-
case "${provider}" in
|
|
395
|
-
github|gitea)
|
|
396
|
-
printf '%s\n' "${provider}"
|
|
397
|
-
;;
|
|
398
|
-
*)
|
|
399
|
-
printf 'github\n'
|
|
400
|
-
;;
|
|
401
|
-
esac
|
|
402
|
-
}
|
|
403
|
-
|
|
404
|
-
flow_using_gitea() {
|
|
405
|
-
[[ "$(flow_forge_provider)" == "gitea" ]]
|
|
406
|
-
}
|
|
407
|
-
|
|
408
|
-
flow_gitea_base_url() {
|
|
409
|
-
local base_url="${ACP_GITEA_BASE_URL:-${GITEA_BASE_URL:-}}"
|
|
410
|
-
[[ -n "${base_url}" ]] || return 1
|
|
411
|
-
printf '%s\n' "${base_url%/}"
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
flow_gitea_base_host() {
|
|
415
|
-
local base_url=""
|
|
416
|
-
base_url="$(flow_gitea_base_url)" || return 1
|
|
417
|
-
base_url="${base_url#http://}"
|
|
418
|
-
base_url="${base_url#https://}"
|
|
419
|
-
printf '%s\n' "${base_url%%/*}"
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
flow_gitea_api_url_for_repo() {
|
|
423
|
-
local repo_slug="${1:?repo slug required}"
|
|
424
|
-
local route="${2:-}"
|
|
425
|
-
local base_url=""
|
|
426
|
-
|
|
427
|
-
base_url="$(flow_gitea_base_url)" || return 1
|
|
428
|
-
route="${route#/}"
|
|
429
|
-
if [[ -n "${route}" ]]; then
|
|
430
|
-
printf '%s/api/v1/repos/%s/%s\n' "${base_url}" "${repo_slug}" "${route}"
|
|
431
|
-
return 0
|
|
432
|
-
fi
|
|
433
|
-
printf '%s/api/v1/repos/%s\n' "${base_url}" "${repo_slug}"
|
|
434
|
-
}
|
|
435
|
-
|
|
436
|
-
flow_gitea_auth_curl_args() {
|
|
437
|
-
local repo_slug="${1:-}"
|
|
438
|
-
local credential_token=""
|
|
439
|
-
|
|
440
|
-
if [[ -n "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" ]]; then
|
|
441
|
-
printf -- "-H\0Authorization: token %s\0" "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}"
|
|
442
|
-
return 0
|
|
443
|
-
fi
|
|
444
|
-
if [[ -n "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" && -n "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" ]]; then
|
|
445
|
-
printf -- "-u\0%s:%s\0" "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}"
|
|
446
|
-
return 0
|
|
447
|
-
fi
|
|
448
|
-
if [[ -n "${repo_slug}" ]]; then
|
|
449
|
-
credential_token="$(flow_git_credential_token_for_repo_slug "${repo_slug}" "$(flow_gitea_base_host)" "${repo_slug}.git" || true)"
|
|
450
|
-
if [[ -n "${credential_token}" ]]; then
|
|
451
|
-
printf -- "-H\0Authorization: token %s\0" "${credential_token}"
|
|
452
|
-
return 0
|
|
453
|
-
fi
|
|
454
|
-
fi
|
|
455
|
-
return 1
|
|
456
|
-
}
|
|
457
|
-
|
|
458
|
-
flow_gitea_api_repo() {
|
|
459
|
-
local repo_slug="${1:?repo slug required}"
|
|
460
|
-
local route="${2:-}"
|
|
461
|
-
local method="GET"
|
|
462
|
-
local paginate="no"
|
|
463
|
-
local slurp="no"
|
|
464
|
-
local jq_filter=""
|
|
465
|
-
local expect_input="no"
|
|
466
|
-
local arg=""
|
|
467
|
-
local url=""
|
|
468
|
-
local input_file=""
|
|
469
|
-
local output=""
|
|
470
|
-
local page="1"
|
|
471
|
-
local per_page="100"
|
|
472
|
-
local response=""
|
|
473
|
-
local body=""
|
|
474
|
-
local link_header=""
|
|
475
|
-
local header_file=""
|
|
476
|
-
local stdout_file=""
|
|
477
|
-
local stderr_file=""
|
|
478
|
-
local curl_status="0"
|
|
479
|
-
local response_status="0"
|
|
480
|
-
local -a curl_args=()
|
|
481
|
-
local -a auth_args=()
|
|
482
|
-
local -a extra_headers=()
|
|
483
|
-
local -a form_fields=()
|
|
484
|
-
local -a pages=()
|
|
485
|
-
|
|
486
|
-
shift 2
|
|
487
|
-
while [[ $# -gt 0 ]]; do
|
|
488
|
-
arg="${1:-}"
|
|
489
|
-
case "${arg}" in
|
|
490
|
-
--method)
|
|
491
|
-
method="${2:-GET}"
|
|
492
|
-
shift 2
|
|
493
|
-
;;
|
|
494
|
-
--paginate)
|
|
495
|
-
paginate="yes"
|
|
496
|
-
shift
|
|
497
|
-
;;
|
|
498
|
-
--slurp)
|
|
499
|
-
slurp="yes"
|
|
500
|
-
shift
|
|
501
|
-
;;
|
|
502
|
-
--jq)
|
|
503
|
-
jq_filter="${2:-}"
|
|
504
|
-
shift 2
|
|
505
|
-
;;
|
|
506
|
-
--input)
|
|
507
|
-
expect_input="yes"
|
|
508
|
-
if [[ "${2:-}" == "-" ]]; then
|
|
509
|
-
input_file="$(mktemp)"
|
|
510
|
-
cat >"${input_file}"
|
|
511
|
-
shift 2
|
|
512
|
-
else
|
|
513
|
-
input_file="${2:-}"
|
|
514
|
-
shift 2
|
|
515
|
-
fi
|
|
516
|
-
;;
|
|
517
|
-
-f|--field)
|
|
518
|
-
form_fields+=("${2:-}")
|
|
519
|
-
shift 2
|
|
520
|
-
;;
|
|
521
|
-
*)
|
|
522
|
-
shift
|
|
523
|
-
;;
|
|
524
|
-
esac
|
|
525
|
-
done
|
|
526
|
-
|
|
527
|
-
url="$(flow_gitea_api_url_for_repo "${repo_slug}" "${route}")" || {
|
|
528
|
-
rm -f "${input_file}"
|
|
529
|
-
return 1
|
|
530
|
-
}
|
|
531
|
-
while IFS= read -r -d '' arg; do
|
|
532
|
-
auth_args+=("${arg}")
|
|
533
|
-
done < <(flow_gitea_auth_curl_args "${repo_slug}") || true
|
|
534
|
-
if [[ "${#auth_args[@]}" -eq 0 && "${method}" != "GET" ]]; then
|
|
535
|
-
rm -f "${input_file}"
|
|
536
|
-
return 1
|
|
537
|
-
fi
|
|
538
|
-
|
|
539
|
-
if [[ "${expect_input}" == "yes" && -n "${input_file}" ]]; then
|
|
540
|
-
extra_headers+=(-H "Content-Type: application/json")
|
|
541
|
-
fi
|
|
542
|
-
if [[ "${#form_fields[@]}" -gt 0 ]]; then
|
|
543
|
-
extra_headers+=(-H "Content-Type: application/json")
|
|
544
|
-
body="$(
|
|
545
|
-
FORM_FIELDS="$(printf '%s\n' "${form_fields[@]}")" python3 - <<'PY'
|
|
546
|
-
import json
|
|
547
|
-
import os
|
|
548
|
-
|
|
549
|
-
payload = {}
|
|
550
|
-
for line in os.environ.get("FORM_FIELDS", "").splitlines():
|
|
551
|
-
line = line.rstrip("\n")
|
|
552
|
-
if "=" not in line:
|
|
553
|
-
continue
|
|
554
|
-
key, value = line.split("=", 1)
|
|
555
|
-
payload[key] = value
|
|
556
|
-
print(json.dumps(payload))
|
|
557
|
-
PY
|
|
558
|
-
)"
|
|
559
|
-
input_file="$(mktemp)"
|
|
560
|
-
printf '%s' "${body}" >"${input_file}"
|
|
561
|
-
fi
|
|
562
|
-
|
|
563
|
-
if [[ "${paginate}" != "yes" ]]; then
|
|
564
|
-
stdout_file="$(mktemp)"
|
|
565
|
-
stderr_file="$(mktemp)"
|
|
566
|
-
header_file="$(mktemp)"
|
|
567
|
-
curl_args=(-sS -D "${header_file}" -X "${method}")
|
|
568
|
-
if [[ "${#auth_args[@]}" -gt 0 ]]; then
|
|
569
|
-
curl_args+=("${auth_args[@]}")
|
|
570
|
-
fi
|
|
571
|
-
if [[ "${#extra_headers[@]}" -gt 0 ]]; then
|
|
572
|
-
curl_args+=("${extra_headers[@]}")
|
|
573
|
-
fi
|
|
574
|
-
if [[ -n "${input_file}" ]]; then
|
|
575
|
-
curl_args+=(--data-binary "@${input_file}")
|
|
576
|
-
fi
|
|
577
|
-
if curl "${curl_args[@]}" "${url}" >"${stdout_file}" 2>"${stderr_file}"; then
|
|
578
|
-
output="$(cat "${stdout_file}" 2>/dev/null || true)"
|
|
579
|
-
if [[ -n "${jq_filter}" ]]; then
|
|
580
|
-
jq -r "${jq_filter}" <<<"${output}"
|
|
581
|
-
else
|
|
582
|
-
printf '%s' "${output}"
|
|
583
|
-
fi
|
|
584
|
-
rm -f "${input_file}" "${stdout_file}" "${stderr_file}" "${header_file}"
|
|
585
|
-
return 0
|
|
586
|
-
fi
|
|
587
|
-
rm -f "${input_file}" "${stdout_file}" "${stderr_file}" "${header_file}"
|
|
588
|
-
return 1
|
|
589
|
-
fi
|
|
590
|
-
|
|
591
|
-
while :; do
|
|
592
|
-
stdout_file="$(mktemp)"
|
|
593
|
-
stderr_file="$(mktemp)"
|
|
594
|
-
header_file="$(mktemp)"
|
|
595
|
-
curl_args=(-sS -D "${header_file}" -X "${method}")
|
|
596
|
-
if [[ "${#auth_args[@]}" -gt 0 ]]; then
|
|
597
|
-
curl_args+=("${auth_args[@]}")
|
|
598
|
-
fi
|
|
599
|
-
if [[ "${#extra_headers[@]}" -gt 0 ]]; then
|
|
600
|
-
curl_args+=("${extra_headers[@]}")
|
|
601
|
-
fi
|
|
602
|
-
if curl "${curl_args[@]}" "${url}$([[ "${url}" == *\?* ]] && printf '&' || printf '?')page=${page}&limit=${per_page}" >"${stdout_file}" 2>"${stderr_file}"; then
|
|
603
|
-
response="$(cat "${stdout_file}" 2>/dev/null || true)"
|
|
604
|
-
pages+=("${response}")
|
|
605
|
-
link_header="$(tr -d '\r' <"${header_file}" | awk 'BEGIN{IGNORECASE=1}/^link:/{sub(/^link:[[:space:]]*/,""); print; exit}')"
|
|
606
|
-
rm -f "${stdout_file}" "${stderr_file}" "${header_file}"
|
|
607
|
-
if [[ "${link_header}" != *'rel="next"'* ]]; then
|
|
608
|
-
break
|
|
609
|
-
fi
|
|
610
|
-
page="$((page + 1))"
|
|
611
|
-
else
|
|
612
|
-
response_status="1"
|
|
613
|
-
rm -f "${stdout_file}" "${stderr_file}" "${header_file}" "${input_file}"
|
|
614
|
-
return "${response_status}"
|
|
615
|
-
fi
|
|
616
|
-
done
|
|
617
|
-
|
|
618
|
-
rm -f "${input_file}"
|
|
619
|
-
if [[ "${slurp}" == "yes" ]]; then
|
|
620
|
-
printf '%s\n' "${pages[@]}" | jq -s '.'
|
|
621
|
-
return 0
|
|
622
|
-
fi
|
|
623
|
-
printf '%s' "${pages[0]:-[]}"
|
|
624
|
-
}
|
|
625
|
-
|
|
626
|
-
flow_gitea_issue_view_json() {
|
|
627
|
-
local repo_slug="${1:?repo slug required}"
|
|
628
|
-
local issue_id="${2:?issue id required}"
|
|
629
|
-
local issue_json=""
|
|
630
|
-
local comments_json=""
|
|
631
|
-
|
|
632
|
-
issue_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null || true)"
|
|
633
|
-
issue_json="$(flow_json_or_default "${issue_json}" '{}')"
|
|
634
|
-
comments_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${issue_id}/comments" --paginate --slurp 2>/dev/null || true)"
|
|
635
|
-
comments_json="$(flow_json_or_default "${comments_json}" '[]')"
|
|
636
|
-
|
|
637
|
-
ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comments_json}" python3 - <<'PY'
|
|
638
|
-
import json
|
|
639
|
-
import os
|
|
640
|
-
|
|
641
|
-
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
642
|
-
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
643
|
-
comments = []
|
|
644
|
-
for page in comment_pages:
|
|
645
|
-
if isinstance(page, list):
|
|
646
|
-
comments.extend(page)
|
|
647
|
-
elif isinstance(page, dict):
|
|
648
|
-
comments.append(page)
|
|
649
|
-
|
|
650
|
-
result = {
|
|
651
|
-
"number": issue.get("number"),
|
|
652
|
-
"state": str(issue.get("state", "")).upper(),
|
|
653
|
-
"title": issue.get("title") or "",
|
|
654
|
-
"body": issue.get("body") or "",
|
|
655
|
-
"url": issue.get("html_url") or issue.get("url") or "",
|
|
656
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
657
|
-
"comments": [
|
|
658
|
-
{
|
|
659
|
-
"body": comment.get("body") or "",
|
|
660
|
-
"createdAt": comment.get("created_at") or "",
|
|
661
|
-
"updatedAt": comment.get("updated_at") or "",
|
|
662
|
-
"url": comment.get("html_url") or "",
|
|
663
|
-
}
|
|
664
|
-
for comment in comments
|
|
665
|
-
if isinstance(comment, dict)
|
|
666
|
-
],
|
|
667
|
-
"createdAt": issue.get("created_at") or "",
|
|
668
|
-
"updatedAt": issue.get("updated_at") or "",
|
|
669
|
-
}
|
|
670
|
-
|
|
671
|
-
print(json.dumps(result))
|
|
672
|
-
PY
|
|
673
|
-
}
|
|
674
|
-
|
|
675
|
-
flow_gitea_issue_list_json() {
|
|
676
|
-
local repo_slug="${1:?repo slug required}"
|
|
677
|
-
local state="${2:-open}"
|
|
678
|
-
local limit="${3:-100}"
|
|
679
|
-
local issues_json=""
|
|
680
|
-
|
|
681
|
-
issues_json="$(flow_gitea_api_repo "${repo_slug}" "issues?state=${state}" --paginate --slurp 2>/dev/null || true)"
|
|
682
|
-
issues_json="$(flow_json_or_default "${issues_json}" '[]')"
|
|
683
|
-
|
|
684
|
-
ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
|
|
685
|
-
import json
|
|
686
|
-
import os
|
|
687
|
-
|
|
688
|
-
pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
|
|
689
|
-
limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
|
|
690
|
-
issues = []
|
|
691
|
-
|
|
692
|
-
for page in pages:
|
|
693
|
-
if isinstance(page, list):
|
|
694
|
-
issues.extend(page)
|
|
695
|
-
elif isinstance(page, dict):
|
|
696
|
-
issues.append(page)
|
|
697
|
-
|
|
698
|
-
result = []
|
|
699
|
-
for issue in issues:
|
|
700
|
-
if not isinstance(issue, dict):
|
|
701
|
-
continue
|
|
702
|
-
if issue.get("pull_request"):
|
|
703
|
-
continue
|
|
704
|
-
result.append({
|
|
705
|
-
"number": issue.get("number"),
|
|
706
|
-
"createdAt": issue.get("created_at") or "",
|
|
707
|
-
"updatedAt": issue.get("updated_at") or "",
|
|
708
|
-
"title": issue.get("title") or "",
|
|
709
|
-
"url": issue.get("html_url") or issue.get("url") or "",
|
|
710
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
711
|
-
})
|
|
712
|
-
|
|
713
|
-
print(json.dumps(result[:limit]))
|
|
714
|
-
PY
|
|
715
|
-
}
|
|
716
|
-
|
|
717
|
-
flow_gitea_pr_view_json() {
|
|
718
|
-
local repo_slug="${1:?repo slug required}"
|
|
719
|
-
local pr_number="${2:?pr number required}"
|
|
720
|
-
local pr_json=""
|
|
721
|
-
local comment_pages_json=""
|
|
722
|
-
local files_json=""
|
|
723
|
-
local reviews_json=""
|
|
724
|
-
|
|
725
|
-
pr_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}" 2>/dev/null || true)"
|
|
726
|
-
pr_json="$(flow_json_or_default "${pr_json}" '{}')"
|
|
727
|
-
comment_pages_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${pr_number}/comments" --paginate --slurp 2>/dev/null || true)"
|
|
728
|
-
comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
|
|
729
|
-
files_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}/files" --paginate --slurp 2>/dev/null || true)"
|
|
730
|
-
files_json="$(flow_json_or_default "${files_json}" '[]')"
|
|
731
|
-
reviews_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}/reviews" --paginate --slurp 2>/dev/null || true)"
|
|
732
|
-
reviews_json="$(flow_json_or_default "${reviews_json}" '[]')"
|
|
733
|
-
|
|
734
|
-
PR_JSON="${pr_json}" COMMENT_PAGES_JSON="${comment_pages_json}" FILES_JSON="${files_json}" REVIEWS_JSON="${reviews_json}" python3 - <<'PY'
|
|
735
|
-
import json
|
|
736
|
-
import os
|
|
737
|
-
|
|
738
|
-
pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
739
|
-
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
740
|
-
file_pages = json.loads(os.environ.get("FILES_JSON", "[]") or "[]")
|
|
741
|
-
review_pages = json.loads(os.environ.get("REVIEWS_JSON", "[]") or "[]")
|
|
742
|
-
comments = []
|
|
743
|
-
for page in comment_pages:
|
|
744
|
-
if isinstance(page, list):
|
|
745
|
-
comments.extend(page)
|
|
746
|
-
elif isinstance(page, dict):
|
|
747
|
-
comments.append(page)
|
|
748
|
-
|
|
749
|
-
files = []
|
|
750
|
-
for page in file_pages:
|
|
751
|
-
if isinstance(page, list):
|
|
752
|
-
files.extend(page)
|
|
753
|
-
elif isinstance(page, dict):
|
|
754
|
-
files.append(page)
|
|
755
|
-
|
|
756
|
-
reviews = []
|
|
757
|
-
for page in review_pages:
|
|
758
|
-
if isinstance(page, list):
|
|
759
|
-
reviews.extend(page)
|
|
760
|
-
elif isinstance(page, dict):
|
|
761
|
-
reviews.append(page)
|
|
762
|
-
|
|
763
|
-
pr_state = str(pr.get("state", "")).upper()
|
|
764
|
-
if pr.get("merged") or pr.get("merged_at"):
|
|
765
|
-
pr_state = "MERGED"
|
|
766
|
-
|
|
767
|
-
review_states = [
|
|
768
|
-
str(review.get("state") or "").upper()
|
|
769
|
-
for review in reviews
|
|
770
|
-
if isinstance(review, dict)
|
|
771
|
-
]
|
|
772
|
-
review_decision = ""
|
|
773
|
-
if any(state == "APPROVED" for state in review_states):
|
|
774
|
-
review_decision = "APPROVED"
|
|
775
|
-
elif any(state in {"CHANGES_REQUESTED", "REQUEST_CHANGES"} for state in review_states):
|
|
776
|
-
review_decision = "CHANGES_REQUESTED"
|
|
777
|
-
|
|
778
|
-
result = {
|
|
779
|
-
"number": pr.get("number"),
|
|
780
|
-
"title": pr.get("title") or "",
|
|
781
|
-
"body": pr.get("body") or "",
|
|
782
|
-
"url": pr.get("html_url") or pr.get("url") or "",
|
|
783
|
-
"headRefName": ((pr.get("head") or {}).get("ref")) or "",
|
|
784
|
-
"headRefOid": ((pr.get("head") or {}).get("sha")) or "",
|
|
785
|
-
"baseRefName": ((pr.get("base") or {}).get("ref")) or "",
|
|
786
|
-
"mergeStateStatus": "CLEAN" if pr.get("mergeable") else "UNKNOWN",
|
|
787
|
-
"statusCheckRollup": [],
|
|
788
|
-
"labels": [{"name": label.get("name", "")} for label in pr.get("labels", []) if isinstance(label, dict)],
|
|
789
|
-
"comments": [
|
|
790
|
-
{
|
|
791
|
-
"body": comment.get("body") or "",
|
|
792
|
-
"createdAt": comment.get("created_at") or "",
|
|
793
|
-
"updatedAt": comment.get("updated_at") or "",
|
|
794
|
-
"url": comment.get("html_url") or "",
|
|
795
|
-
}
|
|
796
|
-
for comment in comments
|
|
797
|
-
if isinstance(comment, dict)
|
|
798
|
-
],
|
|
799
|
-
"state": pr_state,
|
|
800
|
-
"isDraft": bool(pr.get("draft")),
|
|
801
|
-
"createdAt": pr.get("created_at") or "",
|
|
802
|
-
"updatedAt": pr.get("updated_at") or "",
|
|
803
|
-
"mergedAt": pr.get("merged_at") or "",
|
|
804
|
-
"authorLogin": ((pr.get("user") or {}).get("login")) or "",
|
|
805
|
-
"files": [
|
|
806
|
-
{"path": file.get("filename") or ""}
|
|
807
|
-
for file in files
|
|
808
|
-
if isinstance(file, dict) and (file.get("filename") or "")
|
|
809
|
-
],
|
|
810
|
-
"reviewRequests": [
|
|
811
|
-
{"login": reviewer.get("login") or ""}
|
|
812
|
-
for reviewer in (pr.get("requested_reviewers") or [])
|
|
813
|
-
if isinstance(reviewer, dict)
|
|
814
|
-
],
|
|
815
|
-
"reviewDecision": review_decision,
|
|
816
|
-
}
|
|
817
|
-
|
|
818
|
-
print(json.dumps(result))
|
|
819
|
-
PY
|
|
820
|
-
}
|
|
821
|
-
|
|
822
|
-
flow_gitea_pr_list_json() {
|
|
823
|
-
local repo_slug="${1:?repo slug required}"
|
|
824
|
-
local state="${2:-open}"
|
|
825
|
-
local limit="${3:-100}"
|
|
826
|
-
local pulls_state="${state}"
|
|
827
|
-
local pr_pages_json=""
|
|
828
|
-
|
|
829
|
-
if [[ "${state}" == "merged" ]]; then
|
|
830
|
-
pulls_state="closed"
|
|
831
|
-
fi
|
|
832
|
-
|
|
833
|
-
pr_pages_json="$(flow_gitea_api_repo "${repo_slug}" "pulls?state=${pulls_state}" --paginate --slurp 2>/dev/null || true)"
|
|
834
|
-
pr_pages_json="$(flow_json_or_default "${pr_pages_json}" '[]')"
|
|
835
|
-
|
|
836
|
-
PR_PAGES_JSON="${pr_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
|
|
837
|
-
import json
|
|
838
|
-
import os
|
|
839
|
-
|
|
840
|
-
pages = json.loads(os.environ.get("PR_PAGES_JSON", "[]") or "[]")
|
|
841
|
-
limit = int(os.environ.get("PR_LIMIT", "100") or "100")
|
|
842
|
-
state_filter = os.environ.get("PR_STATE_FILTER", "open")
|
|
843
|
-
prs = []
|
|
844
|
-
for page in pages:
|
|
845
|
-
if isinstance(page, list):
|
|
846
|
-
prs.extend(page)
|
|
847
|
-
elif isinstance(page, dict):
|
|
848
|
-
prs.append(page)
|
|
849
|
-
|
|
850
|
-
result = []
|
|
851
|
-
for pr in prs:
|
|
852
|
-
if not isinstance(pr, dict):
|
|
853
|
-
continue
|
|
854
|
-
merged = bool(pr.get("merged") or pr.get("merged_at"))
|
|
855
|
-
state = str(pr.get("state", "")).lower()
|
|
856
|
-
if state_filter == "open" and state != "open":
|
|
857
|
-
continue
|
|
858
|
-
if state_filter == "closed" and state != "closed":
|
|
859
|
-
continue
|
|
860
|
-
if state_filter == "merged" and not merged:
|
|
861
|
-
continue
|
|
862
|
-
normalized_state = "MERGED" if merged else state.upper()
|
|
863
|
-
result.append({
|
|
864
|
-
"number": pr.get("number"),
|
|
865
|
-
"title": pr.get("title") or "",
|
|
866
|
-
"body": pr.get("body") or "",
|
|
867
|
-
"url": pr.get("html_url") or pr.get("url") or "",
|
|
868
|
-
"headRefName": ((pr.get("head") or {}).get("ref")) or "",
|
|
869
|
-
"headRefOid": ((pr.get("head") or {}).get("sha")) or "",
|
|
870
|
-
"baseRefName": ((pr.get("base") or {}).get("ref")) or "",
|
|
871
|
-
"createdAt": pr.get("created_at") or "",
|
|
872
|
-
"mergedAt": pr.get("merged_at") or "",
|
|
873
|
-
"state": normalized_state,
|
|
874
|
-
"isDraft": bool(pr.get("draft")),
|
|
875
|
-
"labels": [{"name": label.get("name", "")} for label in pr.get("labels", []) if isinstance(label, dict)],
|
|
876
|
-
"comments": [],
|
|
877
|
-
"authorLogin": ((pr.get("user") or {}).get("login")) or "",
|
|
878
|
-
})
|
|
879
|
-
if len(result) >= limit:
|
|
880
|
-
break
|
|
881
|
-
|
|
882
|
-
print(json.dumps(result))
|
|
883
|
-
PY
|
|
884
|
-
}
|
|
885
|
-
|
|
886
|
-
flow_github_output_indicates_rate_limit() {
|
|
887
|
-
grep -Eiq 'API rate limit exceeded|secondary rate limit|rate limit exceeded|HTTP 403' <<<"${1:-}"
|
|
888
|
-
}
|
|
889
|
-
|
|
890
|
-
flow_github_core_rate_limit_state_bin() {
|
|
891
|
-
local flow_root=""
|
|
892
|
-
local candidate=""
|
|
893
|
-
|
|
894
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")" || return 1
|
|
895
|
-
candidate="${flow_root}/tools/bin/github-core-rate-limit-state.sh"
|
|
896
|
-
[[ -x "${candidate}" ]] || return 1
|
|
897
|
-
printf '%s\n' "${candidate}"
|
|
898
|
-
}
|
|
899
|
-
|
|
900
|
-
flow_github_core_rate_limit_state_output() {
|
|
901
|
-
local state_bin=""
|
|
902
|
-
|
|
903
|
-
state_bin="$(flow_github_core_rate_limit_state_bin)" || return 1
|
|
904
|
-
"${state_bin}" get 2>/dev/null || true
|
|
905
|
-
}
|
|
906
|
-
|
|
907
|
-
flow_github_core_rate_limit_active() {
|
|
908
|
-
local state_out=""
|
|
909
|
-
local ready=""
|
|
910
|
-
|
|
911
|
-
state_out="$(flow_github_core_rate_limit_state_output)" || return 1
|
|
912
|
-
ready="$(awk -F= '/^READY=/{print $2; exit}' <<<"${state_out}")"
|
|
913
|
-
[[ "${ready}" == "no" ]]
|
|
914
|
-
}
|
|
915
|
-
|
|
916
|
-
flow_github_core_rate_limit_schedule() {
|
|
917
|
-
local reason="${1:-github-api-rate-limit}"
|
|
918
|
-
local reset_epoch="${2:-}"
|
|
919
|
-
local state_bin=""
|
|
920
|
-
local now_epoch=""
|
|
921
|
-
|
|
922
|
-
state_bin="$(flow_github_core_rate_limit_state_bin)" || return 0
|
|
923
|
-
now_epoch="$(date +%s)"
|
|
924
|
-
if [[ "${reset_epoch}" =~ ^[0-9]+$ ]] && (( reset_epoch > now_epoch )); then
|
|
925
|
-
"${state_bin}" schedule "${reason}" --next-at-epoch "${reset_epoch}" >/dev/null 2>&1 || true
|
|
926
|
-
return 0
|
|
927
|
-
fi
|
|
928
|
-
|
|
929
|
-
"${state_bin}" schedule "${reason}" >/dev/null 2>&1 || true
|
|
930
|
-
}
|
|
931
|
-
|
|
932
|
-
flow_github_core_rate_limit_clear() {
|
|
933
|
-
local state_bin=""
|
|
934
|
-
|
|
935
|
-
state_bin="$(flow_github_core_rate_limit_state_bin)" || return 0
|
|
936
|
-
"${state_bin}" clear >/dev/null 2>&1 || true
|
|
937
|
-
}
|
|
938
|
-
|
|
939
|
-
flow_github_graphql_available() {
|
|
940
|
-
local repo_slug="${1:-}"
|
|
941
|
-
local rate_limit_json=""
|
|
942
|
-
local graphql_remaining=""
|
|
943
|
-
local core_remaining=""
|
|
944
|
-
local core_reset=""
|
|
945
|
-
local stderr_file=""
|
|
946
|
-
local stderr_output=""
|
|
947
|
-
|
|
948
|
-
if [[ "${FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE:-}" == "yes" ]]; then
|
|
949
|
-
return 0
|
|
950
|
-
fi
|
|
951
|
-
if [[ "${FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE:-}" == "no" ]]; then
|
|
952
|
-
return 1
|
|
953
|
-
fi
|
|
954
|
-
|
|
955
|
-
if flow_github_core_rate_limit_active; then
|
|
956
|
-
FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
|
|
957
|
-
return 1
|
|
958
|
-
fi
|
|
959
|
-
|
|
960
|
-
flow_export_github_cli_auth_env "${repo_slug}"
|
|
961
|
-
stderr_file="$(mktemp)"
|
|
962
|
-
if rate_limit_json="$(gh api rate_limit 2>"${stderr_file}")"; then
|
|
963
|
-
graphql_remaining="$(jq -r '.resources.graphql.remaining // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
|
|
964
|
-
core_remaining="$(jq -r '.resources.core.remaining // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
|
|
965
|
-
core_reset="$(jq -r '.resources.core.reset // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
|
|
966
|
-
if [[ "${core_remaining}" =~ ^[0-9]+$ ]]; then
|
|
967
|
-
if (( core_remaining > 0 )); then
|
|
968
|
-
flow_github_core_rate_limit_clear
|
|
969
|
-
else
|
|
970
|
-
flow_github_core_rate_limit_schedule "github-api-rate-limit" "${core_reset}"
|
|
971
|
-
FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
|
|
972
|
-
rm -f "${stderr_file}"
|
|
973
|
-
return 1
|
|
974
|
-
fi
|
|
975
|
-
fi
|
|
976
|
-
else
|
|
977
|
-
stderr_output="$(cat "${stderr_file}" 2>/dev/null || true)"
|
|
978
|
-
if flow_github_output_indicates_rate_limit "${stderr_output}"; then
|
|
979
|
-
flow_github_core_rate_limit_schedule "github-api-rate-limit"
|
|
980
|
-
fi
|
|
981
|
-
FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
|
|
982
|
-
rm -f "${stderr_file}"
|
|
983
|
-
return 1
|
|
984
|
-
fi
|
|
985
|
-
rm -f "${stderr_file}"
|
|
986
|
-
|
|
987
|
-
if [[ "${graphql_remaining}" =~ ^[0-9]+$ ]] && (( graphql_remaining > 0 )); then
|
|
988
|
-
FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="yes"
|
|
989
|
-
return 0
|
|
990
|
-
fi
|
|
991
|
-
|
|
992
|
-
FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
|
|
993
|
-
return 1
|
|
994
|
-
}
|
|
995
|
-
|
|
996
|
-
flow_github_repo_id_cache_var() {
|
|
997
|
-
local repo_slug="${1:-}"
|
|
998
|
-
local sanitized="${repo_slug//[^A-Za-z0-9]/_}"
|
|
999
|
-
printf 'FLOW_GITHUB_REPO_ID_CACHE_%s\n' "${sanitized}"
|
|
1000
|
-
}
|
|
1001
|
-
|
|
1002
|
-
flow_github_repo_id_for_repo_slug() {
|
|
1003
|
-
local repo_slug="${1:-}"
|
|
1004
|
-
local cache_var=""
|
|
1005
|
-
local cached_value=""
|
|
1006
|
-
local repos_pages_json=""
|
|
1007
|
-
local repo_id=""
|
|
1008
|
-
local stderr_file=""
|
|
1009
|
-
local stderr_output=""
|
|
1010
|
-
|
|
1011
|
-
[[ -n "${repo_slug}" ]] || return 1
|
|
1012
|
-
command -v gh >/dev/null 2>&1 || return 1
|
|
1013
|
-
|
|
1014
|
-
cache_var="$(flow_github_repo_id_cache_var "${repo_slug}")"
|
|
1015
|
-
cached_value="${!cache_var:-}"
|
|
1016
|
-
if [[ -n "${cached_value}" ]]; then
|
|
1017
|
-
printf '%s\n' "${cached_value}"
|
|
1018
|
-
return 0
|
|
1019
|
-
fi
|
|
1020
|
-
|
|
1021
|
-
repo_id="$(flow_explicit_github_repo_id "${repo_slug}" || true)"
|
|
1022
|
-
if [[ -n "${repo_id}" ]]; then
|
|
1023
|
-
printf -v "${cache_var}" '%s' "${repo_id}"
|
|
1024
|
-
printf '%s\n' "${repo_id}"
|
|
1025
|
-
return 0
|
|
1026
|
-
fi
|
|
1027
|
-
|
|
1028
|
-
if flow_github_core_rate_limit_active; then
|
|
1029
|
-
return 1
|
|
1030
|
-
fi
|
|
1031
|
-
|
|
1032
|
-
flow_export_github_cli_auth_env "${repo_slug}"
|
|
1033
|
-
stderr_file="$(mktemp)"
|
|
1034
|
-
if repos_pages_json="$(
|
|
1035
|
-
gh api 'user/repos?per_page=100&visibility=all&affiliation=owner,collaborator,organization_member' \
|
|
1036
|
-
--paginate \
|
|
1037
|
-
--slurp 2>"${stderr_file}" || true
|
|
1038
|
-
)" && [[ -n "${repos_pages_json}" ]]; then
|
|
1039
|
-
flow_github_core_rate_limit_clear
|
|
1040
|
-
else
|
|
1041
|
-
stderr_output="$(cat "${stderr_file}" 2>/dev/null || true)"
|
|
1042
|
-
if flow_github_output_indicates_rate_limit "${stderr_output}"; then
|
|
1043
|
-
flow_github_core_rate_limit_schedule "github-api-rate-limit"
|
|
1044
|
-
fi
|
|
1045
|
-
fi
|
|
1046
|
-
rm -f "${stderr_file}"
|
|
1047
|
-
[[ -n "${repos_pages_json}" ]] || return 1
|
|
1048
|
-
|
|
1049
|
-
repo_id="$(
|
|
1050
|
-
REPOS_PAGES_JSON="${repos_pages_json}" TARGET_REPO_SLUG="${repo_slug}" python3 - <<'PY'
|
|
1051
|
-
import json
|
|
1052
|
-
import os
|
|
1053
|
-
import sys
|
|
1054
|
-
|
|
1055
|
-
pages = json.loads(os.environ.get("REPOS_PAGES_JSON", "[]") or "[]")
|
|
1056
|
-
target = os.environ.get("TARGET_REPO_SLUG", "")
|
|
1057
|
-
|
|
1058
|
-
for page in pages:
|
|
1059
|
-
if isinstance(page, list):
|
|
1060
|
-
for repo in page:
|
|
1061
|
-
if isinstance(repo, dict) and repo.get("full_name") == target:
|
|
1062
|
-
value = repo.get("id")
|
|
1063
|
-
if value is not None:
|
|
1064
|
-
print(value)
|
|
1065
|
-
sys.exit(0)
|
|
1066
|
-
elif isinstance(page, dict) and page.get("full_name") == target:
|
|
1067
|
-
value = page.get("id")
|
|
1068
|
-
if value is not None:
|
|
1069
|
-
print(value)
|
|
1070
|
-
sys.exit(0)
|
|
1071
|
-
PY
|
|
1072
|
-
)"
|
|
1073
|
-
[[ -n "${repo_id}" ]] || return 1
|
|
1074
|
-
|
|
1075
|
-
printf -v "${cache_var}" '%s' "${repo_id}"
|
|
1076
|
-
printf '%s\n' "${repo_id}"
|
|
1077
|
-
}
|
|
1078
|
-
|
|
1079
|
-
flow_github_repo_api_prefix() {
|
|
1080
|
-
local repo_slug="${1:-}"
|
|
1081
|
-
local repo_id=""
|
|
1082
|
-
|
|
1083
|
-
repo_id="$(flow_github_repo_id_for_repo_slug "${repo_slug}")" || return 1
|
|
1084
|
-
printf 'repositories/%s\n' "${repo_id}"
|
|
1085
|
-
}
|
|
1086
|
-
|
|
1087
|
-
flow_github_api_repo() {
|
|
1088
|
-
local repo_slug="${1:?repo slug required}"
|
|
1089
|
-
local route="${2:-}"
|
|
1090
|
-
local repo_prefix=""
|
|
1091
|
-
local direct_route="repos/${repo_slug}"
|
|
1092
|
-
local fallback_route=""
|
|
1093
|
-
local output=""
|
|
1094
|
-
local stdin_file=""
|
|
1095
|
-
local request_status=0
|
|
1096
|
-
local expect_input_value="false"
|
|
1097
|
-
local arg=""
|
|
1098
|
-
local index=0
|
|
1099
|
-
local gh_arg_count=0
|
|
1100
|
-
local stdout_file=""
|
|
1101
|
-
local stderr_file=""
|
|
1102
|
-
local error_output=""
|
|
1103
|
-
local -a gh_args=()
|
|
1104
|
-
|
|
1105
|
-
if flow_using_gitea; then
|
|
1106
|
-
flow_gitea_api_repo "$@"
|
|
1107
|
-
return $?
|
|
1108
|
-
fi
|
|
1109
|
-
|
|
1110
|
-
route="${route#/}"
|
|
1111
|
-
if [[ -n "${route}" ]]; then
|
|
1112
|
-
direct_route="${direct_route}/${route}"
|
|
1113
|
-
fi
|
|
1114
|
-
|
|
1115
|
-
if [[ $# -gt 2 ]]; then
|
|
1116
|
-
gh_args=("${@:3}")
|
|
1117
|
-
gh_arg_count="${#gh_args[@]}"
|
|
1118
|
-
fi
|
|
1119
|
-
for ((index = 0; index < ${#gh_args[@]}; index++)); do
|
|
1120
|
-
arg="${gh_args[${index}]}"
|
|
1121
|
-
if [[ "${expect_input_value}" == "true" ]]; then
|
|
1122
|
-
if [[ "${arg}" == "-" ]]; then
|
|
1123
|
-
if [[ -z "${stdin_file}" ]]; then
|
|
1124
|
-
stdin_file="$(mktemp)"
|
|
1125
|
-
cat >"${stdin_file}"
|
|
1126
|
-
fi
|
|
1127
|
-
gh_args[${index}]="${stdin_file}"
|
|
1128
|
-
fi
|
|
1129
|
-
expect_input_value="false"
|
|
1130
|
-
elif [[ "${arg}" == "--input" ]]; then
|
|
1131
|
-
expect_input_value="true"
|
|
1132
|
-
fi
|
|
1133
|
-
done
|
|
1134
|
-
|
|
1135
|
-
if flow_github_core_rate_limit_active; then
|
|
1136
|
-
rm -f "${stdin_file}"
|
|
1137
|
-
return 1
|
|
1138
|
-
fi
|
|
1139
|
-
|
|
1140
|
-
flow_export_github_cli_auth_env "${repo_slug}"
|
|
1141
|
-
stdout_file="$(mktemp)"
|
|
1142
|
-
stderr_file="$(mktemp)"
|
|
1143
|
-
if [[ "${gh_arg_count}" -gt 0 ]]; then
|
|
1144
|
-
if gh api "${direct_route}" "${gh_args[@]}" >"${stdout_file}" 2>"${stderr_file}"; then
|
|
1145
|
-
output="$(cat "${stdout_file}" 2>/dev/null || true)"
|
|
1146
|
-
flow_github_core_rate_limit_clear
|
|
1147
|
-
printf '%s' "${output}"
|
|
1148
|
-
rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
|
|
1149
|
-
return 0
|
|
1150
|
-
fi
|
|
1151
|
-
else
|
|
1152
|
-
if gh api "${direct_route}" >"${stdout_file}" 2>"${stderr_file}"; then
|
|
1153
|
-
output="$(cat "${stdout_file}" 2>/dev/null || true)"
|
|
1154
|
-
flow_github_core_rate_limit_clear
|
|
1155
|
-
printf '%s' "${output}"
|
|
1156
|
-
rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
|
|
1157
|
-
return 0
|
|
1158
|
-
fi
|
|
1159
|
-
fi
|
|
1160
|
-
error_output="$(cat "${stderr_file}" 2>/dev/null || true)"
|
|
1161
|
-
if flow_github_output_indicates_rate_limit "${error_output}"; then
|
|
1162
|
-
flow_github_core_rate_limit_schedule "github-api-rate-limit"
|
|
1163
|
-
rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
|
|
1164
|
-
return 1
|
|
1165
|
-
fi
|
|
1166
|
-
|
|
1167
|
-
if ! repo_prefix="$(flow_github_repo_api_prefix "${repo_slug}")"; then
|
|
1168
|
-
rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
|
|
1169
|
-
return 1
|
|
1170
|
-
fi
|
|
1171
|
-
fallback_route="${repo_prefix}"
|
|
1172
|
-
if [[ -n "${route}" ]]; then
|
|
1173
|
-
fallback_route="${fallback_route}/${route}"
|
|
1174
|
-
fi
|
|
1175
|
-
if [[ "${gh_arg_count}" -gt 0 ]]; then
|
|
1176
|
-
if gh api "${fallback_route}" "${gh_args[@]}" >"${stdout_file}" 2>"${stderr_file}"; then
|
|
1177
|
-
output="$(cat "${stdout_file}" 2>/dev/null || true)"
|
|
1178
|
-
flow_github_core_rate_limit_clear
|
|
1179
|
-
printf '%s' "${output}"
|
|
1180
|
-
rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
|
|
1181
|
-
return 0
|
|
1182
|
-
else
|
|
1183
|
-
request_status=$?
|
|
1184
|
-
fi
|
|
1185
|
-
else
|
|
1186
|
-
if gh api "${fallback_route}" >"${stdout_file}" 2>"${stderr_file}"; then
|
|
1187
|
-
output="$(cat "${stdout_file}" 2>/dev/null || true)"
|
|
1188
|
-
flow_github_core_rate_limit_clear
|
|
1189
|
-
printf '%s' "${output}"
|
|
1190
|
-
rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
|
|
1191
|
-
return 0
|
|
1192
|
-
else
|
|
1193
|
-
request_status=$?
|
|
1194
|
-
fi
|
|
1195
|
-
fi
|
|
1196
|
-
error_output="$(cat "${stderr_file}" 2>/dev/null || true)"
|
|
1197
|
-
if flow_github_output_indicates_rate_limit "${error_output}"; then
|
|
1198
|
-
flow_github_core_rate_limit_schedule "github-api-rate-limit"
|
|
1199
|
-
fi
|
|
1200
|
-
rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
|
|
1201
|
-
return "${request_status}"
|
|
1202
|
-
}
|
|
1203
|
-
|
|
1204
|
-
flow_json_or_default() {
|
|
1205
|
-
local raw_value="${1-}"
|
|
1206
|
-
local default_value="${2:-null}"
|
|
1207
|
-
|
|
1208
|
-
if [[ -z "${raw_value}" ]]; then
|
|
1209
|
-
printf '%s\n' "${default_value}"
|
|
1210
|
-
return 0
|
|
1211
|
-
fi
|
|
1212
|
-
|
|
1213
|
-
if jq -e . >/dev/null 2>&1 <<<"${raw_value}"; then
|
|
1214
|
-
printf '%s\n' "${raw_value}"
|
|
1215
|
-
else
|
|
1216
|
-
printf '%s\n' "${default_value}"
|
|
1217
|
-
fi
|
|
1218
|
-
}
|
|
1219
|
-
|
|
1220
|
-
flow_github_urlencode() {
|
|
1221
|
-
local raw_value="${1:-}"
|
|
1222
|
-
|
|
1223
|
-
RAW_VALUE="${raw_value}" python3 - <<'PY'
|
|
1224
|
-
import os
|
|
1225
|
-
from urllib.parse import quote
|
|
1226
|
-
|
|
1227
|
-
print(quote(os.environ.get("RAW_VALUE", ""), safe=""))
|
|
1228
|
-
PY
|
|
1229
|
-
}
|
|
1230
|
-
|
|
1231
|
-
flow_github_issue_view_json_live() {
|
|
1232
|
-
local repo_slug="${1:?repo slug required}"
|
|
1233
|
-
local issue_id="${2:?issue id required}"
|
|
1234
|
-
local issue_json=""
|
|
1235
|
-
local comment_pages_json=""
|
|
1236
|
-
|
|
1237
|
-
if flow_using_gitea; then
|
|
1238
|
-
flow_gitea_issue_view_json "${repo_slug}" "${issue_id}"
|
|
1239
|
-
return $?
|
|
1240
|
-
fi
|
|
1241
|
-
|
|
1242
|
-
if flow_github_graphql_available "${repo_slug}" \
|
|
1243
|
-
&& issue_json="$(gh issue view "${issue_id}" -R "${repo_slug}" --json number,state,title,body,url,labels,comments,createdAt,updatedAt 2>/dev/null)"; then
|
|
1244
|
-
printf '%s\n' "${issue_json}"
|
|
1245
|
-
return 0
|
|
1246
|
-
fi
|
|
1247
|
-
|
|
1248
|
-
if ! issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null)"; then
|
|
1249
|
-
return 1
|
|
1250
|
-
fi
|
|
1251
|
-
issue_json="$(flow_json_or_default "${issue_json}" '{}')"
|
|
1252
|
-
if ! comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments?per_page=100" --paginate --slurp 2>/dev/null)"; then
|
|
1253
|
-
return 1
|
|
1254
|
-
fi
|
|
1255
|
-
comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
|
|
1256
|
-
|
|
1257
|
-
ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY'
|
|
1258
|
-
import json
|
|
1259
|
-
import os
|
|
1260
|
-
|
|
1261
|
-
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
1262
|
-
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
1263
|
-
comments = []
|
|
1264
|
-
for page in comment_pages:
|
|
1265
|
-
if isinstance(page, list):
|
|
1266
|
-
comments.extend(page)
|
|
1267
|
-
elif isinstance(page, dict):
|
|
1268
|
-
comments.append(page)
|
|
1269
|
-
|
|
1270
|
-
result = {
|
|
1271
|
-
"number": issue.get("number"),
|
|
1272
|
-
"state": str(issue.get("state", "")).upper(),
|
|
1273
|
-
"title": issue.get("title") or "",
|
|
1274
|
-
"body": issue.get("body") or "",
|
|
1275
|
-
"url": issue.get("html_url") or issue.get("url") or "",
|
|
1276
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
1277
|
-
"comments": [
|
|
1278
|
-
{
|
|
1279
|
-
"body": comment.get("body") or "",
|
|
1280
|
-
"createdAt": comment.get("created_at") or "",
|
|
1281
|
-
"updatedAt": comment.get("updated_at") or "",
|
|
1282
|
-
"url": comment.get("html_url") or "",
|
|
1283
|
-
}
|
|
1284
|
-
for comment in comments
|
|
1285
|
-
if isinstance(comment, dict)
|
|
1286
|
-
],
|
|
1287
|
-
"createdAt": issue.get("created_at") or "",
|
|
1288
|
-
"updatedAt": issue.get("updated_at") or "",
|
|
1289
|
-
}
|
|
1290
|
-
|
|
1291
|
-
print(json.dumps(result))
|
|
1292
|
-
PY
|
|
1293
|
-
}
|
|
1294
|
-
|
|
1295
|
-
flow_github_issue_view_json() {
|
|
1296
|
-
local repo_slug="${1:?repo slug required}"
|
|
1297
|
-
local issue_id="${2:?issue id required}"
|
|
1298
|
-
local issue_json=""
|
|
1299
|
-
local comment_pages_json=""
|
|
1300
|
-
|
|
1301
|
-
if flow_using_gitea; then
|
|
1302
|
-
flow_gitea_issue_view_json "${repo_slug}" "${issue_id}"
|
|
1303
|
-
return $?
|
|
1304
|
-
fi
|
|
1305
|
-
|
|
1306
|
-
if flow_github_graphql_available "${repo_slug}" \
|
|
1307
|
-
&& issue_json="$(gh issue view "${issue_id}" -R "${repo_slug}" --json number,state,title,body,url,labels,comments,createdAt,updatedAt 2>/dev/null)"; then
|
|
1308
|
-
printf '%s\n' "${issue_json}"
|
|
1309
|
-
return 0
|
|
1310
|
-
fi
|
|
1311
|
-
|
|
1312
|
-
issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null || true)"
|
|
1313
|
-
issue_json="$(flow_json_or_default "${issue_json}" '{}')"
|
|
1314
|
-
comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments?per_page=100" --paginate --slurp 2>/dev/null || true)"
|
|
1315
|
-
comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
|
|
1316
|
-
|
|
1317
|
-
ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY'
|
|
1318
|
-
import json
|
|
1319
|
-
import os
|
|
1320
|
-
|
|
1321
|
-
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
1322
|
-
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
1323
|
-
comments = []
|
|
1324
|
-
for page in comment_pages:
|
|
1325
|
-
if isinstance(page, list):
|
|
1326
|
-
comments.extend(page)
|
|
1327
|
-
elif isinstance(page, dict):
|
|
1328
|
-
comments.append(page)
|
|
1329
|
-
|
|
1330
|
-
result = {
|
|
1331
|
-
"number": issue.get("number"),
|
|
1332
|
-
"state": str(issue.get("state", "")).upper(),
|
|
1333
|
-
"title": issue.get("title") or "",
|
|
1334
|
-
"body": issue.get("body") or "",
|
|
1335
|
-
"url": issue.get("html_url") or issue.get("url") or "",
|
|
1336
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
1337
|
-
"comments": [
|
|
1338
|
-
{
|
|
1339
|
-
"body": comment.get("body") or "",
|
|
1340
|
-
"createdAt": comment.get("created_at") or "",
|
|
1341
|
-
"updatedAt": comment.get("updated_at") or "",
|
|
1342
|
-
"url": comment.get("html_url") or "",
|
|
1343
|
-
}
|
|
1344
|
-
for comment in comments
|
|
1345
|
-
if isinstance(comment, dict)
|
|
1346
|
-
],
|
|
1347
|
-
"createdAt": issue.get("created_at") or "",
|
|
1348
|
-
"updatedAt": issue.get("updated_at") or "",
|
|
1349
|
-
}
|
|
1350
|
-
|
|
1351
|
-
print(json.dumps(result))
|
|
1352
|
-
PY
|
|
1353
|
-
}
|
|
1354
|
-
|
|
1355
|
-
flow_github_issue_list_json_live() {
|
|
1356
|
-
local repo_slug="${1:?repo slug required}"
|
|
1357
|
-
local state="${2:-open}"
|
|
1358
|
-
local limit="${3:-100}"
|
|
1359
|
-
local issues_json=""
|
|
1360
|
-
local per_page="100"
|
|
1361
|
-
|
|
1362
|
-
if flow_using_gitea; then
|
|
1363
|
-
flow_gitea_issue_list_json "${repo_slug}" "${state}" "${limit}"
|
|
1364
|
-
return $?
|
|
1365
|
-
fi
|
|
1366
|
-
|
|
1367
|
-
if flow_github_graphql_available "${repo_slug}" \
|
|
1368
|
-
&& issues_json="$(gh issue list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,createdAt,updatedAt,title,url,labels 2>/dev/null)"; then
|
|
1369
|
-
printf '%s\n' "${issues_json}"
|
|
1370
|
-
return 0
|
|
1371
|
-
fi
|
|
1372
|
-
|
|
1373
|
-
if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
|
|
1374
|
-
per_page="${limit}"
|
|
1375
|
-
fi
|
|
1376
|
-
|
|
1377
|
-
if ! issues_json="$(flow_github_api_repo "${repo_slug}" "issues?state=${state}&per_page=${per_page}" --paginate --slurp 2>/dev/null)"; then
|
|
1378
|
-
return 1
|
|
1379
|
-
fi
|
|
1380
|
-
issues_json="$(flow_json_or_default "${issues_json}" '[]')"
|
|
1381
|
-
|
|
1382
|
-
ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
|
|
1383
|
-
import json
|
|
1384
|
-
import os
|
|
1385
|
-
|
|
1386
|
-
pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
|
|
1387
|
-
limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
|
|
1388
|
-
issues = []
|
|
1389
|
-
|
|
1390
|
-
for page in pages:
|
|
1391
|
-
if isinstance(page, list):
|
|
1392
|
-
issues.extend(page)
|
|
1393
|
-
elif isinstance(page, dict):
|
|
1394
|
-
issues.append(page)
|
|
1395
|
-
|
|
1396
|
-
result = []
|
|
1397
|
-
for issue in issues:
|
|
1398
|
-
if not isinstance(issue, dict):
|
|
1399
|
-
continue
|
|
1400
|
-
if issue.get("pull_request"):
|
|
1401
|
-
continue
|
|
1402
|
-
result.append({
|
|
1403
|
-
"number": issue.get("number"),
|
|
1404
|
-
"createdAt": issue.get("created_at") or "",
|
|
1405
|
-
"updatedAt": issue.get("updated_at") or "",
|
|
1406
|
-
"title": issue.get("title") or "",
|
|
1407
|
-
"url": issue.get("html_url") or issue.get("url") or "",
|
|
1408
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
1409
|
-
})
|
|
1410
|
-
|
|
1411
|
-
print(json.dumps(result[:limit]))
|
|
1412
|
-
PY
|
|
1413
|
-
}
|
|
1414
|
-
|
|
1415
|
-
flow_github_issue_list_json() {
|
|
1416
|
-
local repo_slug="${1:?repo slug required}"
|
|
1417
|
-
local state="${2:-open}"
|
|
1418
|
-
local limit="${3:-100}"
|
|
1419
|
-
local issues_json=""
|
|
1420
|
-
local per_page="100"
|
|
1421
|
-
|
|
1422
|
-
if flow_using_gitea; then
|
|
1423
|
-
flow_gitea_issue_list_json "${repo_slug}" "${state}" "${limit}"
|
|
1424
|
-
return $?
|
|
1425
|
-
fi
|
|
1426
|
-
|
|
1427
|
-
if flow_github_graphql_available "${repo_slug}" \
|
|
1428
|
-
&& issues_json="$(gh issue list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,createdAt,updatedAt,title,url,labels 2>/dev/null)"; then
|
|
1429
|
-
printf '%s\n' "${issues_json}"
|
|
1430
|
-
return 0
|
|
1431
|
-
fi
|
|
1432
|
-
|
|
1433
|
-
if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
|
|
1434
|
-
per_page="${limit}"
|
|
1435
|
-
fi
|
|
1436
|
-
|
|
1437
|
-
issues_json="$(flow_github_api_repo "${repo_slug}" "issues?state=${state}&per_page=${per_page}" --paginate --slurp 2>/dev/null || true)"
|
|
1438
|
-
issues_json="$(flow_json_or_default "${issues_json}" '[]')"
|
|
1439
|
-
|
|
1440
|
-
ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
|
|
1441
|
-
import json
|
|
1442
|
-
import os
|
|
1443
|
-
|
|
1444
|
-
pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
|
|
1445
|
-
limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
|
|
1446
|
-
issues = []
|
|
1447
|
-
|
|
1448
|
-
for page in pages:
|
|
1449
|
-
if isinstance(page, list):
|
|
1450
|
-
issues.extend(page)
|
|
1451
|
-
elif isinstance(page, dict):
|
|
1452
|
-
issues.append(page)
|
|
1453
|
-
|
|
1454
|
-
result = []
|
|
1455
|
-
for issue in issues:
|
|
1456
|
-
if not isinstance(issue, dict):
|
|
1457
|
-
continue
|
|
1458
|
-
if issue.get("pull_request"):
|
|
1459
|
-
continue
|
|
1460
|
-
result.append({
|
|
1461
|
-
"number": issue.get("number"),
|
|
1462
|
-
"createdAt": issue.get("created_at") or "",
|
|
1463
|
-
"updatedAt": issue.get("updated_at") or "",
|
|
1464
|
-
"title": issue.get("title") or "",
|
|
1465
|
-
"url": issue.get("html_url") or issue.get("url") or "",
|
|
1466
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
1467
|
-
})
|
|
1468
|
-
|
|
1469
|
-
print(json.dumps(result[:limit]))
|
|
1470
|
-
PY
|
|
1471
|
-
}
|
|
1472
|
-
|
|
1473
|
-
flow_github_pr_view_json() {
|
|
1474
|
-
local repo_slug="${1:?repo slug required}"
|
|
1475
|
-
local pr_number="${2:?pr number required}"
|
|
1476
|
-
local pr_json=""
|
|
1477
|
-
local issue_json=""
|
|
1478
|
-
local comment_pages_json=""
|
|
1479
|
-
local head_sha=""
|
|
1480
|
-
local check_runs_json="{}"
|
|
1481
|
-
local status_json="{}"
|
|
1482
|
-
|
|
1483
|
-
if flow_using_gitea; then
|
|
1484
|
-
flow_gitea_pr_view_json "${repo_slug}" "${pr_number}"
|
|
1485
|
-
return $?
|
|
1486
|
-
fi
|
|
1487
|
-
|
|
1488
|
-
if flow_github_graphql_available "${repo_slug}" \
|
|
1489
|
-
&& pr_json="$(gh pr view "${pr_number}" -R "${repo_slug}" --json number,title,body,url,headRefName,baseRefName,mergeStateStatus,statusCheckRollup,labels,comments,state,isDraft 2>/dev/null)"; then
|
|
1490
|
-
printf '%s\n' "${pr_json}"
|
|
1491
|
-
return 0
|
|
1492
|
-
fi
|
|
1493
|
-
|
|
1494
|
-
pr_json="$(flow_github_api_repo "${repo_slug}" "pulls/${pr_number}" 2>/dev/null || true)"
|
|
1495
|
-
pr_json="$(flow_json_or_default "${pr_json}" '{}')"
|
|
1496
|
-
issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null || true)"
|
|
1497
|
-
issue_json="$(flow_json_or_default "${issue_json}" '{}')"
|
|
1498
|
-
comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null || true)"
|
|
1499
|
-
comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
|
|
1500
|
-
head_sha="$(
|
|
1501
|
-
PR_JSON="${pr_json}" python3 - <<'PY'
|
|
1502
|
-
import json
|
|
1503
|
-
import os
|
|
1504
|
-
|
|
1505
|
-
payload = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
1506
|
-
head = payload.get("head") or {}
|
|
1507
|
-
print(head.get("sha") or "")
|
|
1508
|
-
PY
|
|
1509
|
-
)"
|
|
1510
|
-
if [[ -n "${head_sha}" ]]; then
|
|
1511
|
-
check_runs_json="$(flow_github_api_repo "${repo_slug}" "commits/${head_sha}/check-runs?per_page=100" 2>/dev/null || true)"
|
|
1512
|
-
check_runs_json="$(flow_json_or_default "${check_runs_json}" '{}')"
|
|
1513
|
-
status_json="$(flow_github_api_repo "${repo_slug}" "commits/${head_sha}/status" 2>/dev/null || true)"
|
|
1514
|
-
status_json="$(flow_json_or_default "${status_json}" '{}')"
|
|
1515
|
-
fi
|
|
1516
|
-
|
|
1517
|
-
PR_JSON="${pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" CHECK_RUNS_JSON="${check_runs_json}" STATUS_JSON="${status_json}" python3 - <<'PY'
|
|
1518
|
-
import json
|
|
1519
|
-
import os
|
|
1520
|
-
|
|
1521
|
-
pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
1522
|
-
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
1523
|
-
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
1524
|
-
check_runs_payload = json.loads(os.environ.get("CHECK_RUNS_JSON", "{}") or "{}")
|
|
1525
|
-
status_payload = json.loads(os.environ.get("STATUS_JSON", "{}") or "{}")
|
|
1526
|
-
|
|
1527
|
-
comments = []
|
|
1528
|
-
for page in comment_pages:
|
|
1529
|
-
if isinstance(page, list):
|
|
1530
|
-
comments.extend(page)
|
|
1531
|
-
elif isinstance(page, dict):
|
|
1532
|
-
comments.append(page)
|
|
1533
|
-
|
|
1534
|
-
status_check_rollup = []
|
|
1535
|
-
for run in check_runs_payload.get("check_runs", []) or []:
|
|
1536
|
-
if not isinstance(run, dict):
|
|
1537
|
-
continue
|
|
1538
|
-
status_check_rollup.append({
|
|
1539
|
-
"name": run.get("name") or "",
|
|
1540
|
-
"status": run.get("status") or "",
|
|
1541
|
-
"conclusion": run.get("conclusion") or "",
|
|
1542
|
-
})
|
|
1543
|
-
for item in status_payload.get("statuses", []) or []:
|
|
1544
|
-
if not isinstance(item, dict):
|
|
1545
|
-
continue
|
|
1546
|
-
state = item.get("state") or ""
|
|
1547
|
-
status_check_rollup.append({
|
|
1548
|
-
"context": item.get("context") or "",
|
|
1549
|
-
"status": state,
|
|
1550
|
-
"conclusion": state,
|
|
1551
|
-
})
|
|
1552
|
-
|
|
1553
|
-
pr_state = str(pr.get("state", "")).upper()
|
|
1554
|
-
if pr.get("merged_at"):
|
|
1555
|
-
pr_state = "MERGED"
|
|
1556
|
-
|
|
1557
|
-
result = {
|
|
1558
|
-
"number": pr.get("number"),
|
|
1559
|
-
"title": pr.get("title") or "",
|
|
1560
|
-
"body": pr.get("body") or issue.get("body") or "",
|
|
1561
|
-
"url": pr.get("html_url") or pr.get("url") or "",
|
|
1562
|
-
"headRefName": ((pr.get("head") or {}).get("ref")) or "",
|
|
1563
|
-
"headRefOid": ((pr.get("head") or {}).get("sha")) or "",
|
|
1564
|
-
"baseRefName": ((pr.get("base") or {}).get("ref")) or "",
|
|
1565
|
-
"mergeStateStatus": str(pr.get("mergeable_state") or "UNKNOWN").upper(),
|
|
1566
|
-
"statusCheckRollup": status_check_rollup,
|
|
1567
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
1568
|
-
"comments": [
|
|
1569
|
-
{
|
|
1570
|
-
"body": comment.get("body") or "",
|
|
1571
|
-
"createdAt": comment.get("created_at") or "",
|
|
1572
|
-
"updatedAt": comment.get("updated_at") or "",
|
|
1573
|
-
"url": comment.get("html_url") or "",
|
|
1574
|
-
}
|
|
1575
|
-
for comment in comments
|
|
1576
|
-
if isinstance(comment, dict)
|
|
1577
|
-
],
|
|
1578
|
-
"state": pr_state,
|
|
1579
|
-
"isDraft": bool(pr.get("draft")),
|
|
1580
|
-
"createdAt": pr.get("created_at") or "",
|
|
1581
|
-
"updatedAt": pr.get("updated_at") or "",
|
|
1582
|
-
"mergedAt": pr.get("merged_at") or "",
|
|
1583
|
-
"authorLogin": ((pr.get("user") or {}).get("login")) or "",
|
|
1584
|
-
}
|
|
1585
|
-
|
|
1586
|
-
print(json.dumps(result))
|
|
1587
|
-
PY
|
|
1588
|
-
}
|
|
1589
|
-
|
|
1590
|
-
flow_github_pr_list_json_live() {
|
|
1591
|
-
local repo_slug="${1:?repo slug required}"
|
|
1592
|
-
local state="${2:-open}"
|
|
1593
|
-
local limit="${3:-100}"
|
|
1594
|
-
local pr_json=""
|
|
1595
|
-
local per_page="100"
|
|
1596
|
-
local pulls_state="${state}"
|
|
1597
|
-
local pull_pages_json=""
|
|
1598
|
-
local selected_prs_json=""
|
|
1599
|
-
local item_jsonl_file=""
|
|
1600
|
-
local current_pr_json=""
|
|
1601
|
-
local issue_json=""
|
|
1602
|
-
local comment_pages_json=""
|
|
1603
|
-
local pr_number=""
|
|
1604
|
-
|
|
1605
|
-
if flow_using_gitea; then
|
|
1606
|
-
flow_gitea_pr_list_json "${repo_slug}" "${state}" "${limit}"
|
|
1607
|
-
return $?
|
|
1608
|
-
fi
|
|
1609
|
-
|
|
1610
|
-
if flow_github_graphql_available "${repo_slug}" \
|
|
1611
|
-
&& pr_json="$(gh pr list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,title,body,url,headRefName,labels,comments,createdAt,mergedAt,isDraft 2>/dev/null)"; then
|
|
1612
|
-
printf '%s\n' "${pr_json}"
|
|
1613
|
-
return 0
|
|
1614
|
-
fi
|
|
1615
|
-
|
|
1616
|
-
if [[ "${state}" == "merged" ]]; then
|
|
1617
|
-
pulls_state="closed"
|
|
1618
|
-
fi
|
|
1619
|
-
if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
|
|
1620
|
-
per_page="${limit}"
|
|
1621
|
-
fi
|
|
1622
|
-
|
|
1623
|
-
if ! pull_pages_json="$(flow_github_api_repo "${repo_slug}" "pulls?state=${pulls_state}&per_page=${per_page}" --paginate --slurp 2>/dev/null)"; then
|
|
1624
|
-
return 1
|
|
1625
|
-
fi
|
|
1626
|
-
pull_pages_json="$(flow_json_or_default "${pull_pages_json}" '[]')"
|
|
1627
|
-
|
|
1628
|
-
if ! selected_prs_json="$(
|
|
1629
|
-
PULL_PAGES_JSON="${pull_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
|
|
1630
|
-
import json
|
|
1631
|
-
import os
|
|
1632
|
-
|
|
1633
|
-
pages = json.loads(os.environ.get("PULL_PAGES_JSON", "[]") or "[]")
|
|
1634
|
-
limit = int(os.environ.get("PR_LIMIT", "100") or "100")
|
|
1635
|
-
state_filter = os.environ.get("PR_STATE_FILTER", "open")
|
|
1636
|
-
pulls = []
|
|
1637
|
-
|
|
1638
|
-
for page in pages:
|
|
1639
|
-
if isinstance(page, list):
|
|
1640
|
-
pulls.extend(page)
|
|
1641
|
-
elif isinstance(page, dict):
|
|
1642
|
-
pulls.append(page)
|
|
1643
|
-
|
|
1644
|
-
result = []
|
|
1645
|
-
for pr in pulls:
|
|
1646
|
-
if not isinstance(pr, dict):
|
|
1647
|
-
continue
|
|
1648
|
-
if state_filter == "merged" and not pr.get("merged_at"):
|
|
1649
|
-
continue
|
|
1650
|
-
result.append({
|
|
1651
|
-
"number": pr.get("number"),
|
|
1652
|
-
"title": pr.get("title") or "",
|
|
1653
|
-
"body": pr.get("body") or "",
|
|
1654
|
-
"url": pr.get("html_url") or pr.get("url") or "",
|
|
1655
|
-
"headRefName": ((pr.get("head") or {}).get("ref")) or "",
|
|
1656
|
-
"createdAt": pr.get("created_at") or "",
|
|
1657
|
-
"mergedAt": pr.get("merged_at") or "",
|
|
1658
|
-
"isDraft": bool(pr.get("draft")),
|
|
1659
|
-
})
|
|
1660
|
-
if len(result) >= limit:
|
|
1661
|
-
break
|
|
1662
|
-
|
|
1663
|
-
print(json.dumps(result))
|
|
1664
|
-
PY
|
|
1665
|
-
)"; then
|
|
1666
|
-
return 1
|
|
1667
|
-
fi
|
|
1668
|
-
|
|
1669
|
-
item_jsonl_file="$(mktemp)"
|
|
1670
|
-
|
|
1671
|
-
while IFS= read -r current_pr_json; do
|
|
1672
|
-
[[ -n "${current_pr_json}" ]] || continue
|
|
1673
|
-
pr_number="$(jq -r '.number // ""' <<<"${current_pr_json}")"
|
|
1674
|
-
[[ -n "${pr_number}" ]] || continue
|
|
1675
|
-
if ! issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null)"; then
|
|
1676
|
-
rm -f "${item_jsonl_file}"
|
|
1677
|
-
return 1
|
|
1678
|
-
fi
|
|
1679
|
-
issue_json="$(flow_json_or_default "${issue_json}" '{}')"
|
|
1680
|
-
if ! comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null)"; then
|
|
1681
|
-
rm -f "${item_jsonl_file}"
|
|
1682
|
-
return 1
|
|
1683
|
-
fi
|
|
1684
|
-
comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
|
|
1685
|
-
PR_JSON="${current_pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY' >>"${item_jsonl_file}"
|
|
1686
|
-
import json
|
|
1687
|
-
import os
|
|
1688
|
-
|
|
1689
|
-
pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
1690
|
-
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
1691
|
-
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
1692
|
-
comments = []
|
|
1693
|
-
for page in comment_pages:
|
|
1694
|
-
if isinstance(page, list):
|
|
1695
|
-
comments.extend(page)
|
|
1696
|
-
elif isinstance(page, dict):
|
|
1697
|
-
comments.append(page)
|
|
1698
|
-
|
|
1699
|
-
result = {
|
|
1700
|
-
"number": pr.get("number"),
|
|
1701
|
-
"title": pr.get("title") or "",
|
|
1702
|
-
"body": pr.get("body") or issue.get("body") or "",
|
|
1703
|
-
"url": pr.get("url") or issue.get("html_url") or issue.get("url") or "",
|
|
1704
|
-
"headRefName": pr.get("headRefName") or "",
|
|
1705
|
-
"createdAt": pr.get("createdAt") or "",
|
|
1706
|
-
"mergedAt": pr.get("mergedAt") or "",
|
|
1707
|
-
"isDraft": bool(pr.get("isDraft")),
|
|
1708
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
1709
|
-
"comments": [
|
|
1710
|
-
{
|
|
1711
|
-
"body": comment.get("body") or "",
|
|
1712
|
-
"createdAt": comment.get("created_at") or "",
|
|
1713
|
-
"updatedAt": comment.get("updated_at") or "",
|
|
1714
|
-
"url": comment.get("html_url") or "",
|
|
1715
|
-
}
|
|
1716
|
-
for comment in comments
|
|
1717
|
-
if isinstance(comment, dict)
|
|
1718
|
-
],
|
|
1719
|
-
}
|
|
1720
|
-
|
|
1721
|
-
print(json.dumps(result))
|
|
1722
|
-
PY
|
|
1723
|
-
done < <(jq -c '.[]' <<<"${selected_prs_json}" 2>/dev/null || true)
|
|
1724
|
-
|
|
1725
|
-
if ! jq -s '.' "${item_jsonl_file}" 2>/dev/null; then
|
|
1726
|
-
rm -f "${item_jsonl_file}"
|
|
1727
|
-
return 1
|
|
1728
|
-
fi
|
|
1729
|
-
|
|
1730
|
-
rm -f "${item_jsonl_file}"
|
|
1731
|
-
}
|
|
1732
|
-
|
|
1733
|
-
flow_github_pr_list_json() {
|
|
1734
|
-
local repo_slug="${1:?repo slug required}"
|
|
1735
|
-
local state="${2:-open}"
|
|
1736
|
-
local limit="${3:-100}"
|
|
1737
|
-
local pr_json=""
|
|
1738
|
-
local per_page="100"
|
|
1739
|
-
local pulls_state="${state}"
|
|
1740
|
-
local pull_pages_json=""
|
|
1741
|
-
local selected_prs_json=""
|
|
1742
|
-
local item_jsonl_file=""
|
|
1743
|
-
local current_pr_json=""
|
|
1744
|
-
local issue_json=""
|
|
1745
|
-
local comment_pages_json=""
|
|
1746
|
-
local pr_number=""
|
|
1747
|
-
|
|
1748
|
-
if flow_using_gitea; then
|
|
1749
|
-
flow_gitea_pr_list_json "${repo_slug}" "${state}" "${limit}"
|
|
1750
|
-
return $?
|
|
1751
|
-
fi
|
|
1752
|
-
|
|
1753
|
-
if flow_github_graphql_available "${repo_slug}" \
|
|
1754
|
-
&& pr_json="$(gh pr list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,title,body,url,headRefName,labels,comments,createdAt,mergedAt,isDraft 2>/dev/null)"; then
|
|
1755
|
-
printf '%s\n' "${pr_json}"
|
|
1756
|
-
return 0
|
|
1757
|
-
fi
|
|
1758
|
-
|
|
1759
|
-
if [[ "${state}" == "merged" ]]; then
|
|
1760
|
-
pulls_state="closed"
|
|
1761
|
-
fi
|
|
1762
|
-
if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
|
|
1763
|
-
per_page="${limit}"
|
|
1764
|
-
fi
|
|
1765
|
-
|
|
1766
|
-
pull_pages_json="$(flow_github_api_repo "${repo_slug}" "pulls?state=${pulls_state}&per_page=${per_page}" --paginate --slurp 2>/dev/null || true)"
|
|
1767
|
-
pull_pages_json="$(flow_json_or_default "${pull_pages_json}" '[]')"
|
|
1768
|
-
|
|
1769
|
-
selected_prs_json="$(
|
|
1770
|
-
PULL_PAGES_JSON="${pull_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
|
|
1771
|
-
import json
|
|
1772
|
-
import os
|
|
1773
|
-
|
|
1774
|
-
pages = json.loads(os.environ.get("PULL_PAGES_JSON", "[]") or "[]")
|
|
1775
|
-
limit = int(os.environ.get("PR_LIMIT", "100") or "100")
|
|
1776
|
-
state_filter = os.environ.get("PR_STATE_FILTER", "open")
|
|
1777
|
-
pulls = []
|
|
1778
|
-
|
|
1779
|
-
for page in pages:
|
|
1780
|
-
if isinstance(page, list):
|
|
1781
|
-
pulls.extend(page)
|
|
1782
|
-
elif isinstance(page, dict):
|
|
1783
|
-
pulls.append(page)
|
|
1784
|
-
|
|
1785
|
-
result = []
|
|
1786
|
-
for pr in pulls:
|
|
1787
|
-
if not isinstance(pr, dict):
|
|
1788
|
-
continue
|
|
1789
|
-
if state_filter == "merged" and not pr.get("merged_at"):
|
|
1790
|
-
continue
|
|
1791
|
-
result.append({
|
|
1792
|
-
"number": pr.get("number"),
|
|
1793
|
-
"title": pr.get("title") or "",
|
|
1794
|
-
"body": pr.get("body") or "",
|
|
1795
|
-
"url": pr.get("html_url") or pr.get("url") or "",
|
|
1796
|
-
"headRefName": ((pr.get("head") or {}).get("ref")) or "",
|
|
1797
|
-
"createdAt": pr.get("created_at") or "",
|
|
1798
|
-
"mergedAt": pr.get("merged_at") or "",
|
|
1799
|
-
"isDraft": bool(pr.get("draft")),
|
|
1800
|
-
})
|
|
1801
|
-
if len(result) >= limit:
|
|
1802
|
-
break
|
|
1803
|
-
|
|
1804
|
-
print(json.dumps(result))
|
|
1805
|
-
PY
|
|
1806
|
-
)" || selected_prs_json='[]'
|
|
1807
|
-
|
|
1808
|
-
item_jsonl_file="$(mktemp)"
|
|
1809
|
-
trap 'rm -f "${item_jsonl_file}"' RETURN
|
|
1810
|
-
|
|
1811
|
-
while IFS= read -r current_pr_json; do
|
|
1812
|
-
[[ -n "${current_pr_json}" ]] || continue
|
|
1813
|
-
pr_number="$(jq -r '.number // ""' <<<"${current_pr_json}")"
|
|
1814
|
-
[[ -n "${pr_number}" ]] || continue
|
|
1815
|
-
issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null || true)"
|
|
1816
|
-
issue_json="$(flow_json_or_default "${issue_json}" '{}')"
|
|
1817
|
-
comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null || true)"
|
|
1818
|
-
comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
|
|
1819
|
-
PR_JSON="${current_pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY' >>"${item_jsonl_file}"
|
|
1820
|
-
import json
|
|
1821
|
-
import os
|
|
1822
|
-
|
|
1823
|
-
pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
1824
|
-
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
1825
|
-
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
1826
|
-
comments = []
|
|
1827
|
-
for page in comment_pages:
|
|
1828
|
-
if isinstance(page, list):
|
|
1829
|
-
comments.extend(page)
|
|
1830
|
-
elif isinstance(page, dict):
|
|
1831
|
-
comments.append(page)
|
|
1832
|
-
|
|
1833
|
-
result = {
|
|
1834
|
-
"number": pr.get("number"),
|
|
1835
|
-
"title": pr.get("title") or "",
|
|
1836
|
-
"body": pr.get("body") or issue.get("body") or "",
|
|
1837
|
-
"url": pr.get("url") or issue.get("html_url") or issue.get("url") or "",
|
|
1838
|
-
"headRefName": pr.get("headRefName") or "",
|
|
1839
|
-
"createdAt": pr.get("createdAt") or "",
|
|
1840
|
-
"mergedAt": pr.get("mergedAt") or "",
|
|
1841
|
-
"isDraft": bool(pr.get("isDraft")),
|
|
1842
|
-
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
1843
|
-
"comments": [
|
|
1844
|
-
{
|
|
1845
|
-
"body": comment.get("body") or "",
|
|
1846
|
-
"createdAt": comment.get("created_at") or "",
|
|
1847
|
-
"updatedAt": comment.get("updated_at") or "",
|
|
1848
|
-
"url": comment.get("html_url") or "",
|
|
1849
|
-
}
|
|
1850
|
-
for comment in comments
|
|
1851
|
-
if isinstance(comment, dict)
|
|
1852
|
-
],
|
|
1853
|
-
}
|
|
1854
|
-
|
|
1855
|
-
print(json.dumps(result))
|
|
1856
|
-
PY
|
|
1857
|
-
done < <(jq -c '.[]' <<<"${selected_prs_json}")
|
|
1858
|
-
|
|
1859
|
-
ITEM_JSONL_FILE="${item_jsonl_file}" python3 - <<'PY'
|
|
1860
|
-
import json
|
|
1861
|
-
import os
|
|
1862
|
-
|
|
1863
|
-
path = os.environ.get("ITEM_JSONL_FILE", "")
|
|
1864
|
-
items = []
|
|
1865
|
-
if path:
|
|
1866
|
-
with open(path, "r", encoding="utf-8") as fh:
|
|
1867
|
-
for line in fh:
|
|
1868
|
-
line = line.strip()
|
|
1869
|
-
if not line:
|
|
1870
|
-
continue
|
|
1871
|
-
items.append(json.loads(line))
|
|
1872
|
-
|
|
1873
|
-
print(json.dumps(items))
|
|
1874
|
-
PY
|
|
1875
|
-
}
|
|
1876
|
-
|
|
1877
|
-
flow_github_issue_close() {
|
|
1878
|
-
local repo_slug="${1:?repo slug required}"
|
|
1879
|
-
local issue_id="${2:?issue id required}"
|
|
1880
|
-
local comment_body="${3:-}"
|
|
1881
|
-
local payload=""
|
|
1882
|
-
|
|
1883
|
-
if flow_using_gitea; then
|
|
1884
|
-
if [[ -n "${comment_body}" ]]; then
|
|
1885
|
-
flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments" --method POST -f body="${comment_body}" >/dev/null || return 1
|
|
1886
|
-
fi
|
|
1887
|
-
payload='{"state":"closed"}'
|
|
1888
|
-
printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
|
|
1889
|
-
return $?
|
|
1890
|
-
fi
|
|
1891
|
-
|
|
1892
|
-
if [[ -n "${comment_body}" ]]; then
|
|
1893
|
-
if gh issue close "${issue_id}" -R "${repo_slug}" --comment "${comment_body}" >/dev/null 2>&1; then
|
|
1894
|
-
return 0
|
|
1895
|
-
fi
|
|
1896
|
-
flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments" --method POST -f body="${comment_body}" >/dev/null
|
|
1897
|
-
else
|
|
1898
|
-
if gh issue close "${issue_id}" -R "${repo_slug}" >/dev/null 2>&1; then
|
|
1899
|
-
return 0
|
|
1900
|
-
fi
|
|
1901
|
-
fi
|
|
1902
|
-
|
|
1903
|
-
payload='{"state":"closed"}'
|
|
1904
|
-
printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
|
|
1905
|
-
}
|
|
1906
|
-
|
|
1907
|
-
flow_github_issue_update_body() {
|
|
1908
|
-
local repo_slug="${1:?repo slug required}"
|
|
1909
|
-
local issue_id="${2:?issue id required}"
|
|
1910
|
-
local body_text="${3:?body text required}"
|
|
1911
|
-
local payload=""
|
|
1912
|
-
|
|
1913
|
-
payload="$(
|
|
1914
|
-
ISSUE_BODY="${body_text}" python3 - <<'PY'
|
|
1915
|
-
import json
|
|
1916
|
-
import os
|
|
1917
|
-
|
|
1918
|
-
print(json.dumps({"body": os.environ.get("ISSUE_BODY", "")}))
|
|
1919
|
-
PY
|
|
1920
|
-
)"
|
|
1921
|
-
|
|
1922
|
-
printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
|
|
1923
|
-
}
|
|
1924
|
-
|
|
1925
|
-
flow_github_label_create() {
|
|
1926
|
-
local repo_slug="${1:?repo slug required}"
|
|
1927
|
-
local label_name="${2:?label name required}"
|
|
1928
|
-
local label_description="${3:-}"
|
|
1929
|
-
local label_color="${4:-1D76DB}"
|
|
1930
|
-
local encoded_label=""
|
|
1931
|
-
|
|
1932
|
-
if gh label create "${label_name}" -R "${repo_slug}" --description "${label_description}" --color "${label_color}" --force >/dev/null 2>&1; then
|
|
1933
|
-
return 0
|
|
1934
|
-
fi
|
|
1935
|
-
|
|
1936
|
-
if flow_github_api_repo "${repo_slug}" "labels" --method POST -f name="${label_name}" -f description="${label_description}" -f color="${label_color}" >/dev/null 2>&1; then
|
|
1937
|
-
return 0
|
|
1938
|
-
fi
|
|
1939
|
-
|
|
1940
|
-
encoded_label="$(flow_github_urlencode "${label_name}")"
|
|
1941
|
-
flow_github_api_repo "${repo_slug}" "labels/${encoded_label}" --method PATCH -f new_name="${label_name}" -f description="${label_description}" -f color="${label_color}" >/dev/null 2>&1 || true
|
|
1942
|
-
}
|
|
1943
|
-
|
|
1944
|
-
flow_github_issue_create() {
|
|
1945
|
-
local repo_slug="${1:?repo slug required}"
|
|
1946
|
-
local title="${2:?title required}"
|
|
1947
|
-
local body_file="${3:?body file required}"
|
|
1948
|
-
local issue_url=""
|
|
1949
|
-
local body_text=""
|
|
1950
|
-
|
|
1951
|
-
if flow_using_gitea; then
|
|
1952
|
-
body_text="$(cat "${body_file}")"
|
|
1953
|
-
issue_url="$(
|
|
1954
|
-
ISSUE_TITLE="${title}" ISSUE_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "issues" --method POST --input - | jq -r '.html_url // ""'
|
|
1955
|
-
import json
|
|
1956
|
-
import os
|
|
1957
|
-
|
|
1958
|
-
payload = {
|
|
1959
|
-
"title": os.environ.get("ISSUE_TITLE", ""),
|
|
1960
|
-
"body": os.environ.get("ISSUE_BODY", ""),
|
|
1961
|
-
}
|
|
1962
|
-
print(json.dumps(payload))
|
|
1963
|
-
PY
|
|
1964
|
-
)"
|
|
1965
|
-
[[ -n "${issue_url}" ]] || return 1
|
|
1966
|
-
printf '%s\n' "${issue_url}"
|
|
1967
|
-
return 0
|
|
1968
|
-
fi
|
|
1969
|
-
|
|
1970
|
-
if issue_url="$(gh issue create -R "${repo_slug}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
|
|
1971
|
-
printf '%s\n' "${issue_url}"
|
|
1972
|
-
return 0
|
|
1973
|
-
fi
|
|
1974
|
-
|
|
1975
|
-
body_text="$(cat "${body_file}")"
|
|
1976
|
-
issue_url="$(
|
|
1977
|
-
ISSUE_TITLE="${title}" ISSUE_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "issues" --method POST --input - | jq -r '.html_url // ""'
|
|
1978
|
-
import json
|
|
1979
|
-
import os
|
|
1980
|
-
|
|
1981
|
-
payload = {
|
|
1982
|
-
"title": os.environ.get("ISSUE_TITLE", ""),
|
|
1983
|
-
"body": os.environ.get("ISSUE_BODY", ""),
|
|
1984
|
-
}
|
|
1985
|
-
print(json.dumps(payload))
|
|
1986
|
-
PY
|
|
1987
|
-
)"
|
|
1988
|
-
[[ -n "${issue_url}" ]] || return 1
|
|
1989
|
-
printf '%s\n' "${issue_url}"
|
|
1990
|
-
}
|
|
1991
|
-
|
|
1992
|
-
flow_github_current_login() {
|
|
1993
|
-
if flow_using_gitea; then
|
|
1994
|
-
local user_json=""
|
|
1995
|
-
local auth_header=""
|
|
1996
|
-
local base_url=""
|
|
1997
|
-
|
|
1998
|
-
base_url="$(flow_gitea_base_url)" || return 1
|
|
1999
|
-
if [[ -n "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" ]]; then
|
|
2000
|
-
user_json="$(curl -sS -H "Authorization: token ${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" "${base_url}/api/v1/user" 2>/dev/null || true)"
|
|
2001
|
-
elif [[ -n "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" && -n "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" ]]; then
|
|
2002
|
-
user_json="$(curl -sS -u "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}:${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" "${base_url}/api/v1/user" 2>/dev/null || true)"
|
|
2003
|
-
fi
|
|
2004
|
-
jq -r '.login // ""' <<<"${user_json:-{}}" 2>/dev/null || true
|
|
2005
|
-
return 0
|
|
2006
|
-
fi
|
|
2007
|
-
|
|
2008
|
-
gh api user --jq '.login // ""' 2>/dev/null || true
|
|
2009
|
-
}
|
|
2010
|
-
|
|
2011
|
-
flow_github_pr_author_login() {
|
|
2012
|
-
local repo_slug="${1:?repo slug required}"
|
|
2013
|
-
local pr_number="${2:?pr number required}"
|
|
2014
|
-
|
|
2015
|
-
flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null | jq -r '.authorLogin // ""' 2>/dev/null || true
|
|
2016
|
-
}
|
|
2017
|
-
|
|
2018
|
-
flow_github_pr_head_oid() {
|
|
2019
|
-
local repo_slug="${1:?repo slug required}"
|
|
2020
|
-
local pr_number="${2:?pr number required}"
|
|
2021
|
-
|
|
2022
|
-
flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null | jq -r '.headRefOid // ""' 2>/dev/null || true
|
|
2023
|
-
}
|
|
2024
|
-
|
|
2025
|
-
flow_github_pr_review_approve() {
|
|
2026
|
-
local repo_slug="${1:?repo slug required}"
|
|
2027
|
-
local pr_number="${2:?pr number required}"
|
|
2028
|
-
local body_text="${3:-Automated final review passed.}"
|
|
2029
|
-
local output=""
|
|
2030
|
-
|
|
2031
|
-
if flow_using_gitea; then
|
|
2032
|
-
if output="$(
|
|
2033
|
-
REVIEW_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls/${pr_number}/reviews" --method POST --input - 2>&1
|
|
2034
|
-
import json
|
|
2035
|
-
import os
|
|
2036
|
-
|
|
2037
|
-
print(json.dumps({"event": "APPROVED", "body": os.environ.get("REVIEW_BODY", "")}))
|
|
2038
|
-
PY
|
|
2039
|
-
)"; then
|
|
2040
|
-
return 0
|
|
2041
|
-
fi
|
|
2042
|
-
if grep -q "approve your own pull is not allowed" <<<"${output}"; then
|
|
2043
|
-
return 0
|
|
2044
|
-
fi
|
|
2045
|
-
printf '%s\n' "${output}" >&2
|
|
2046
|
-
return 1
|
|
2047
|
-
fi
|
|
2048
|
-
|
|
2049
|
-
gh api "repos/${repo_slug}/pulls/${pr_number}/reviews" --method POST -f event=APPROVE -f body="${body_text}" >/dev/null
|
|
2050
|
-
}
|
|
2051
|
-
|
|
2052
|
-
flow_github_pr_create() {
|
|
2053
|
-
local repo_slug="${1:?repo slug required}"
|
|
2054
|
-
local base_branch="${2:?base branch required}"
|
|
2055
|
-
local head_branch="${3:?head branch required}"
|
|
2056
|
-
local title="${4:?title required}"
|
|
2057
|
-
local body_file="${5:?body file required}"
|
|
2058
|
-
local pr_url=""
|
|
2059
|
-
local body_text=""
|
|
2060
|
-
|
|
2061
|
-
if flow_using_gitea; then
|
|
2062
|
-
body_text="$(cat "${body_file}")"
|
|
2063
|
-
pr_url="$(
|
|
2064
|
-
BASE_BRANCH="${base_branch}" HEAD_BRANCH="${head_branch}" PR_TITLE="${title}" PR_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls" --method POST --input - | jq -r '.html_url // ""'
|
|
2065
|
-
import json
|
|
2066
|
-
import os
|
|
2067
|
-
|
|
2068
|
-
payload = {
|
|
2069
|
-
"title": os.environ.get("PR_TITLE", ""),
|
|
2070
|
-
"head": os.environ.get("HEAD_BRANCH", ""),
|
|
2071
|
-
"base": os.environ.get("BASE_BRANCH", ""),
|
|
2072
|
-
"body": os.environ.get("PR_BODY", ""),
|
|
2073
|
-
}
|
|
2074
|
-
print(json.dumps(payload))
|
|
2075
|
-
PY
|
|
2076
|
-
)"
|
|
2077
|
-
[[ -n "${pr_url}" ]] || return 1
|
|
2078
|
-
printf '%s\n' "${pr_url}"
|
|
2079
|
-
return 0
|
|
2080
|
-
fi
|
|
2081
|
-
|
|
2082
|
-
if pr_url="$(gh pr create -R "${repo_slug}" --base "${base_branch}" --head "${head_branch}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
|
|
2083
|
-
printf '%s\n' "${pr_url}"
|
|
2084
|
-
return 0
|
|
2085
|
-
fi
|
|
2086
|
-
|
|
2087
|
-
body_text="$(cat "${body_file}")"
|
|
2088
|
-
pr_url="$(
|
|
2089
|
-
BASE_BRANCH="${base_branch}" HEAD_BRANCH="${head_branch}" PR_TITLE="${title}" PR_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls" --method POST --input - | jq -r '.html_url // ""'
|
|
2090
|
-
import json
|
|
2091
|
-
import os
|
|
2092
|
-
|
|
2093
|
-
payload = {
|
|
2094
|
-
"title": os.environ.get("PR_TITLE", ""),
|
|
2095
|
-
"head": os.environ.get("HEAD_BRANCH", ""),
|
|
2096
|
-
"base": os.environ.get("BASE_BRANCH", ""),
|
|
2097
|
-
"body": os.environ.get("PR_BODY", ""),
|
|
2098
|
-
}
|
|
2099
|
-
print(json.dumps(payload))
|
|
2100
|
-
PY
|
|
2101
|
-
)"
|
|
2102
|
-
[[ -n "${pr_url}" ]] || return 1
|
|
2103
|
-
printf '%s\n' "${pr_url}"
|
|
2104
|
-
}
|
|
2105
|
-
|
|
2106
|
-
flow_github_pr_merge() {
|
|
2107
|
-
local repo_slug="${1:?repo slug required}"
|
|
2108
|
-
local pr_number="${2:?pr number required}"
|
|
2109
|
-
local merge_method="${3:-squash}"
|
|
2110
|
-
local delete_branch="${4:-no}"
|
|
2111
|
-
local pr_json=""
|
|
2112
|
-
local head_ref=""
|
|
2113
|
-
local encoded_ref=""
|
|
2114
|
-
|
|
2115
|
-
if flow_using_gitea; then
|
|
2116
|
-
printf '%s' "$(
|
|
2117
|
-
MERGE_METHOD="${merge_method}" DELETE_BRANCH="${delete_branch}" python3 - <<'PY'
|
|
2118
|
-
import json
|
|
2119
|
-
import os
|
|
2120
|
-
|
|
2121
|
-
method = os.environ.get("MERGE_METHOD", "squash")
|
|
2122
|
-
delete_branch = os.environ.get("DELETE_BRANCH", "no") == "yes"
|
|
2123
|
-
method_map = {"merge": "merge", "squash": "squash", "rebase": "rebase"}
|
|
2124
|
-
print(json.dumps({
|
|
2125
|
-
"Do": method_map.get(method, "squash"),
|
|
2126
|
-
"delete_branch_after_merge": delete_branch,
|
|
2127
|
-
}))
|
|
2128
|
-
PY
|
|
2129
|
-
)" | flow_github_api_repo "${repo_slug}" "pulls/${pr_number}/merge" --method POST --input - >/dev/null
|
|
2130
|
-
return $?
|
|
2131
|
-
fi
|
|
2132
|
-
|
|
2133
|
-
if gh pr merge "${pr_number}" -R "${repo_slug}" "--${merge_method}" $([[ "${delete_branch}" == "yes" ]] && printf '%s' '--delete-branch') --admin >/dev/null 2>&1; then
|
|
2134
|
-
return 0
|
|
2135
|
-
fi
|
|
2136
|
-
|
|
2137
|
-
printf '{"merge_method":"%s"}' "${merge_method}" \
|
|
2138
|
-
| flow_github_api_repo "${repo_slug}" "pulls/${pr_number}/merge" --method PUT --input - >/dev/null
|
|
2139
|
-
|
|
2140
|
-
if [[ "${delete_branch}" == "yes" ]]; then
|
|
2141
|
-
pr_json="$(flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null || printf '{}\n')"
|
|
2142
|
-
head_ref="$(jq -r '.headRefName // ""' <<<"${pr_json}")"
|
|
2143
|
-
if [[ -n "${head_ref}" ]]; then
|
|
2144
|
-
encoded_ref="$(flow_github_urlencode "heads/${head_ref}")"
|
|
2145
|
-
flow_github_api_repo "${repo_slug}" "git/refs/${encoded_ref}" --method DELETE >/dev/null 2>&1 || true
|
|
2146
|
-
fi
|
|
2147
|
-
fi
|
|
2148
|
-
}
|
|
2149
|
-
|
|
2150
|
-
flow_config_get() {
|
|
2151
|
-
local config_file="${1:?config file required}"
|
|
2152
|
-
local target_path="${2:?target path required}"
|
|
2153
|
-
|
|
2154
|
-
python3 - "$config_file" "$target_path" <<'PY'
|
|
2155
|
-
import sys
|
|
2156
|
-
|
|
2157
|
-
config_file = sys.argv[1]
|
|
2158
|
-
target_path = sys.argv[2]
|
|
2159
|
-
|
|
2160
|
-
stack = []
|
|
2161
|
-
found = False
|
|
2162
|
-
|
|
2163
|
-
with open(config_file, "r", encoding="utf-8") as fh:
|
|
2164
|
-
for raw_line in fh:
|
|
2165
|
-
stripped = raw_line.strip()
|
|
2166
|
-
if not stripped or stripped.startswith("#") or stripped.startswith("- "):
|
|
2167
|
-
continue
|
|
2168
|
-
if ":" not in raw_line:
|
|
2169
|
-
continue
|
|
2170
|
-
|
|
2171
|
-
indent = len(raw_line) - len(raw_line.lstrip())
|
|
2172
|
-
key, value = stripped.split(":", 1)
|
|
2173
|
-
key = key.strip()
|
|
2174
|
-
value = value.strip().strip("\"'")
|
|
2175
|
-
|
|
2176
|
-
while stack and indent <= stack[-1][0]:
|
|
2177
|
-
stack.pop()
|
|
2178
|
-
|
|
2179
|
-
stack.append((indent, key))
|
|
2180
|
-
current_path = ".".join(part for _, part in stack)
|
|
2181
|
-
|
|
2182
|
-
if current_path == target_path and value:
|
|
2183
|
-
print(value)
|
|
2184
|
-
found = True
|
|
2185
|
-
break
|
|
2186
|
-
|
|
2187
|
-
if not found:
|
|
2188
|
-
print("")
|
|
2189
|
-
PY
|
|
2190
|
-
}
|
|
2191
|
-
|
|
2192
|
-
flow_kv_get() {
|
|
2193
|
-
local payload="${1:-}"
|
|
2194
|
-
local key="${2:?key required}"
|
|
2195
|
-
|
|
2196
|
-
awk -F= -v key="${key}" '$1 == key { print substr($0, length(key) + 2); exit }' <<<"${payload}"
|
|
2197
|
-
}
|
|
2198
|
-
|
|
2199
|
-
flow_env_or_config() {
|
|
2200
|
-
local config_file="${1:?config file required}"
|
|
2201
|
-
local env_names="${2:?env names required}"
|
|
2202
|
-
local config_key="${3:?config key required}"
|
|
2203
|
-
local default_value="${4:-}"
|
|
2204
|
-
local env_name=""
|
|
2205
|
-
local value=""
|
|
2206
|
-
|
|
2207
|
-
for env_name in ${env_names}; do
|
|
2208
|
-
value="${!env_name:-}"
|
|
2209
|
-
if [[ -n "${value}" ]]; then
|
|
2210
|
-
printf '%s\n' "${value}"
|
|
2211
|
-
return 0
|
|
2212
|
-
fi
|
|
2213
|
-
done
|
|
2214
|
-
|
|
2215
|
-
if [[ -f "${config_file}" ]]; then
|
|
2216
|
-
value="$(flow_config_get "${config_file}" "${config_key}")"
|
|
2217
|
-
if [[ -n "${value}" ]]; then
|
|
2218
|
-
printf '%s\n' "${value}"
|
|
2219
|
-
return 0
|
|
2220
|
-
fi
|
|
2221
|
-
fi
|
|
2222
|
-
|
|
2223
|
-
printf '%s\n' "${default_value}"
|
|
2224
|
-
}
|
|
2225
|
-
|
|
2226
|
-
flow_resolve_adapter_id() {
|
|
2227
|
-
local config_file="${1:-}"
|
|
2228
|
-
local default_value=""
|
|
2229
|
-
if [[ -z "${config_file}" ]]; then
|
|
2230
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2231
|
-
fi
|
|
2232
|
-
default_value="$(flow_default_profile_id)"
|
|
2233
|
-
flow_env_or_config "${config_file}" "ACP_PROJECT_ID AGENT_PROJECT_ID" "id" "${default_value}"
|
|
2234
|
-
}
|
|
2235
|
-
|
|
2236
|
-
flow_resolve_profile_notes_file() {
|
|
2237
|
-
local config_file="${1:-}"
|
|
2238
|
-
local config_dir=""
|
|
2239
|
-
|
|
2240
|
-
if [[ -z "${config_file}" ]]; then
|
|
2241
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2242
|
-
fi
|
|
2243
|
-
|
|
2244
|
-
config_dir="$(cd "$(dirname "${config_file}")" 2>/dev/null && pwd -P || dirname "${config_file}")"
|
|
2245
|
-
printf '%s/README.md
|
|
2246
|
-
' "${config_dir}"
|
|
2247
|
-
}
|
|
2248
|
-
|
|
2249
|
-
flow_default_issue_session_prefix() {
|
|
2250
|
-
local config_file="${1:-}"
|
|
2251
|
-
local adapter_id=""
|
|
2252
|
-
|
|
2253
|
-
if [[ -z "${config_file}" ]]; then
|
|
2254
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2255
|
-
fi
|
|
2256
|
-
|
|
2257
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2258
|
-
printf '%s-issue-\n' "${adapter_id}"
|
|
2259
|
-
}
|
|
2260
|
-
|
|
2261
|
-
flow_default_pr_session_prefix() {
|
|
2262
|
-
local config_file="${1:-}"
|
|
2263
|
-
local adapter_id=""
|
|
2264
|
-
|
|
2265
|
-
if [[ -z "${config_file}" ]]; then
|
|
2266
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2267
|
-
fi
|
|
2268
|
-
|
|
2269
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2270
|
-
printf '%s-pr-\n' "${adapter_id}"
|
|
2271
|
-
}
|
|
2272
|
-
|
|
2273
|
-
flow_default_issue_branch_prefix() {
|
|
2274
|
-
local config_file="${1:-}"
|
|
2275
|
-
local adapter_id=""
|
|
2276
|
-
|
|
2277
|
-
if [[ -z "${config_file}" ]]; then
|
|
2278
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2279
|
-
fi
|
|
2280
|
-
|
|
2281
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2282
|
-
printf 'agent/%s/issue\n' "${adapter_id}"
|
|
2283
|
-
}
|
|
2284
|
-
|
|
2285
|
-
flow_default_pr_worktree_branch_prefix() {
|
|
2286
|
-
local config_file="${1:-}"
|
|
2287
|
-
local adapter_id=""
|
|
2288
|
-
|
|
2289
|
-
if [[ -z "${config_file}" ]]; then
|
|
2290
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2291
|
-
fi
|
|
2292
|
-
|
|
2293
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2294
|
-
printf 'agent/%s/pr\n' "${adapter_id}"
|
|
2295
|
-
}
|
|
2296
|
-
|
|
2297
|
-
flow_default_managed_pr_branch_globs() {
|
|
2298
|
-
local config_file="${1:-}"
|
|
2299
|
-
local adapter_id=""
|
|
2300
|
-
|
|
2301
|
-
if [[ -z "${config_file}" ]]; then
|
|
2302
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2303
|
-
fi
|
|
2304
|
-
|
|
2305
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2306
|
-
printf 'agent/%s/* codex/* openclaw/*\n' "${adapter_id}"
|
|
2307
|
-
}
|
|
2308
|
-
|
|
2309
|
-
flow_default_agent_root() {
|
|
2310
|
-
local config_file="${1:-}"
|
|
2311
|
-
local adapter_id=""
|
|
2312
|
-
local platform_home="${AGENT_PLATFORM_HOME:-${HOME}/.agent-runtime}"
|
|
2313
|
-
|
|
2314
|
-
if [[ -z "${config_file}" ]]; then
|
|
2315
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2316
|
-
fi
|
|
2317
|
-
|
|
2318
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2319
|
-
printf '%s/projects/%s
|
|
2320
|
-
' "${platform_home}" "${adapter_id}"
|
|
2321
|
-
}
|
|
2322
|
-
|
|
2323
|
-
flow_default_repo_slug() {
|
|
2324
|
-
local config_file="${1:-}"
|
|
2325
|
-
local adapter_id=""
|
|
2326
|
-
|
|
2327
|
-
if [[ -z "${config_file}" ]]; then
|
|
2328
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2329
|
-
fi
|
|
2330
|
-
|
|
2331
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2332
|
-
printf 'example/%s
|
|
2333
|
-
' "${adapter_id}"
|
|
2334
|
-
}
|
|
2335
|
-
|
|
2336
|
-
flow_default_repo_id() {
|
|
2337
|
-
printf '\n'
|
|
2338
|
-
}
|
|
2339
|
-
|
|
2340
|
-
flow_default_repo_root() {
|
|
2341
|
-
local config_file="${1:-}"
|
|
2342
|
-
local agent_root=""
|
|
2343
|
-
|
|
2344
|
-
if [[ -z "${config_file}" ]]; then
|
|
2345
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2346
|
-
fi
|
|
2347
|
-
|
|
2348
|
-
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
2349
|
-
printf '%s/repo
|
|
2350
|
-
' "${agent_root}"
|
|
2351
|
-
}
|
|
2352
|
-
|
|
2353
|
-
flow_default_worktree_root() {
|
|
2354
|
-
local config_file="${1:-}"
|
|
2355
|
-
local agent_root=""
|
|
2356
|
-
|
|
2357
|
-
if [[ -z "${config_file}" ]]; then
|
|
2358
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2359
|
-
fi
|
|
2360
|
-
|
|
2361
|
-
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
2362
|
-
printf '%s/worktrees
|
|
2363
|
-
' "${agent_root}"
|
|
2364
|
-
}
|
|
2365
|
-
|
|
2366
|
-
flow_default_retained_repo_root() {
|
|
2367
|
-
local config_file="${1:-}"
|
|
2368
|
-
local agent_root=""
|
|
2369
|
-
|
|
2370
|
-
if [[ -z "${config_file}" ]]; then
|
|
2371
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2372
|
-
fi
|
|
2373
|
-
|
|
2374
|
-
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
2375
|
-
printf '%s/retained
|
|
2376
|
-
' "${agent_root}"
|
|
2377
|
-
}
|
|
2378
|
-
|
|
2379
|
-
flow_default_vscode_workspace_file() {
|
|
2380
|
-
local config_file="${1:-}"
|
|
2381
|
-
local agent_root=""
|
|
2382
|
-
|
|
2383
|
-
if [[ -z "${config_file}" ]]; then
|
|
2384
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2385
|
-
fi
|
|
2386
|
-
|
|
2387
|
-
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
2388
|
-
printf '%s/workspace.code-workspace
|
|
2389
|
-
' "${agent_root}"
|
|
2390
|
-
}
|
|
2391
|
-
flow_resolve_repo_slug() {
|
|
2392
|
-
local config_file="${1:-}"
|
|
2393
|
-
local default_value=""
|
|
2394
|
-
if [[ -z "${config_file}" ]]; then
|
|
2395
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2396
|
-
fi
|
|
2397
|
-
default_value="$(flow_default_repo_slug "${config_file}")"
|
|
2398
|
-
flow_env_or_config "${config_file}" "ACP_REPO_SLUG F_LOSNING_REPO_SLUG" "repo.slug" "${default_value}"
|
|
2399
|
-
}
|
|
2400
|
-
|
|
2401
|
-
flow_resolve_repo_id() {
|
|
2402
|
-
local config_file="${1:-}"
|
|
2403
|
-
local default_value=""
|
|
2404
|
-
if [[ -z "${config_file}" ]]; then
|
|
2405
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2406
|
-
fi
|
|
2407
|
-
default_value="$(flow_default_repo_id)"
|
|
2408
|
-
flow_env_or_config "${config_file}" "ACP_REPO_ID F_LOSNING_REPO_ID ACP_GITHUB_REPOSITORY_ID F_LOSNING_GITHUB_REPOSITORY_ID" "repo.id" "${default_value}"
|
|
2409
|
-
}
|
|
2410
|
-
|
|
2411
|
-
flow_resolve_default_branch() {
|
|
2412
|
-
local config_file="${1:-}"
|
|
2413
|
-
if [[ -z "${config_file}" ]]; then
|
|
2414
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2415
|
-
fi
|
|
2416
|
-
flow_env_or_config "${config_file}" "ACP_DEFAULT_BRANCH F_LOSNING_DEFAULT_BRANCH" "repo.default_branch" "main"
|
|
2417
|
-
}
|
|
2418
|
-
|
|
2419
|
-
flow_resolve_project_label() {
|
|
2420
|
-
local config_file="${1:-}"
|
|
2421
|
-
local repo_slug=""
|
|
2422
|
-
local adapter_id=""
|
|
2423
|
-
local label=""
|
|
2424
|
-
|
|
2425
|
-
if [[ -z "${config_file}" ]]; then
|
|
2426
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2427
|
-
fi
|
|
2428
|
-
|
|
2429
|
-
repo_slug="$(flow_resolve_repo_slug "${config_file}")"
|
|
2430
|
-
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
2431
|
-
label="${repo_slug##*/}"
|
|
2432
|
-
if [[ -n "${label}" ]]; then
|
|
2433
|
-
printf '%s\n' "${label}"
|
|
2434
|
-
else
|
|
2435
|
-
printf '%s\n' "${adapter_id}"
|
|
2436
|
-
fi
|
|
2437
|
-
}
|
|
2438
|
-
|
|
2439
|
-
flow_resolve_repo_root() {
|
|
2440
|
-
local config_file="${1:-}"
|
|
2441
|
-
local default_value=""
|
|
2442
|
-
if [[ -z "${config_file}" ]]; then
|
|
2443
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2444
|
-
fi
|
|
2445
|
-
default_value="$(flow_default_repo_root "${config_file}")"
|
|
2446
|
-
flow_env_or_config "${config_file}" "ACP_REPO_ROOT F_LOSNING_REPO_ROOT" "repo.root" "${default_value}"
|
|
2447
|
-
}
|
|
2448
|
-
|
|
2449
|
-
flow_resolve_agent_root() {
|
|
2450
|
-
local config_file="${1:-}"
|
|
2451
|
-
local default_value=""
|
|
2452
|
-
|
|
2453
|
-
if [[ -z "${config_file}" ]]; then
|
|
2454
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2455
|
-
fi
|
|
2456
|
-
|
|
2457
|
-
default_value="$(flow_default_agent_root "${config_file}")"
|
|
2458
|
-
flow_env_or_config "${config_file}" "ACP_AGENT_ROOT F_LOSNING_AGENT_ROOT" "runtime.orchestrator_agent_root" "${default_value}"
|
|
2459
|
-
}
|
|
2460
|
-
|
|
2461
|
-
flow_resolve_agent_repo_root() {
|
|
2462
|
-
local config_file="${1:-}"
|
|
2463
|
-
local default_value=""
|
|
2464
|
-
|
|
2465
|
-
if [[ -z "${config_file}" ]]; then
|
|
2466
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2467
|
-
fi
|
|
2468
|
-
|
|
2469
|
-
default_value="$(flow_resolve_repo_root "${config_file}")"
|
|
2470
|
-
flow_env_or_config "${config_file}" "ACP_AGENT_REPO_ROOT F_LOSNING_AGENT_REPO_ROOT" "runtime.agent_repo_root" "${default_value}"
|
|
2471
|
-
}
|
|
2472
|
-
|
|
2473
|
-
flow_resolve_worktree_root() {
|
|
2474
|
-
local config_file="${1:-}"
|
|
2475
|
-
local default_value=""
|
|
2476
|
-
if [[ -z "${config_file}" ]]; then
|
|
2477
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2478
|
-
fi
|
|
2479
|
-
default_value="$(flow_default_worktree_root "${config_file}")"
|
|
2480
|
-
flow_env_or_config "${config_file}" "ACP_WORKTREE_ROOT F_LOSNING_WORKTREE_ROOT" "runtime.worktree_root" "${default_value}"
|
|
2481
|
-
}
|
|
2482
|
-
|
|
2483
|
-
flow_resolve_runs_root() {
|
|
2484
|
-
local config_file="${1:-}"
|
|
2485
|
-
local default_value=""
|
|
2486
|
-
local explicit_root="${ACP_RUNS_ROOT:-${F_LOSNING_RUNS_ROOT:-}}"
|
|
2487
|
-
local umbrella_root="${ACP_AGENT_ROOT:-${F_LOSNING_AGENT_ROOT:-}}"
|
|
2488
|
-
|
|
2489
|
-
if [[ -z "${config_file}" ]]; then
|
|
2490
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2491
|
-
fi
|
|
2492
|
-
|
|
2493
|
-
if [[ -n "${explicit_root}" ]]; then
|
|
2494
|
-
printf '%s\n' "${explicit_root}"
|
|
2495
|
-
return 0
|
|
2496
|
-
fi
|
|
2497
|
-
|
|
2498
|
-
default_value="$(flow_resolve_agent_root "${config_file}")/runs"
|
|
2499
|
-
if [[ -n "${umbrella_root}" ]]; then
|
|
2500
|
-
printf '%s\n' "${default_value}"
|
|
2501
|
-
return 0
|
|
2502
|
-
fi
|
|
2503
|
-
|
|
2504
|
-
flow_env_or_config "${config_file}" "ACP_RUNS_ROOT F_LOSNING_RUNS_ROOT" "runtime.runs_root" "${default_value}"
|
|
2505
|
-
}
|
|
2506
|
-
|
|
2507
|
-
flow_resolve_state_root() {
|
|
2508
|
-
local config_file="${1:-}"
|
|
2509
|
-
local default_value=""
|
|
2510
|
-
local explicit_root="${ACP_STATE_ROOT:-${F_LOSNING_STATE_ROOT:-}}"
|
|
2511
|
-
local umbrella_root="${ACP_AGENT_ROOT:-${F_LOSNING_AGENT_ROOT:-}}"
|
|
2512
|
-
|
|
2513
|
-
if [[ -z "${config_file}" ]]; then
|
|
2514
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2515
|
-
fi
|
|
2516
|
-
|
|
2517
|
-
if [[ -n "${explicit_root}" ]]; then
|
|
2518
|
-
printf '%s\n' "${explicit_root}"
|
|
2519
|
-
return 0
|
|
2520
|
-
fi
|
|
2521
|
-
|
|
2522
|
-
default_value="$(flow_resolve_agent_root "${config_file}")/state"
|
|
2523
|
-
if [[ -n "${umbrella_root}" ]]; then
|
|
2524
|
-
printf '%s\n' "${default_value}"
|
|
2525
|
-
return 0
|
|
2526
|
-
fi
|
|
2527
|
-
|
|
2528
|
-
flow_env_or_config "${config_file}" "ACP_STATE_ROOT F_LOSNING_STATE_ROOT" "runtime.state_root" "${default_value}"
|
|
2529
|
-
}
|
|
2530
|
-
|
|
2531
|
-
flow_resolve_history_root() {
|
|
2532
|
-
local config_file="${1:-}"
|
|
2533
|
-
local default_value=""
|
|
2534
|
-
local explicit_root="${ACP_HISTORY_ROOT:-${F_LOSNING_HISTORY_ROOT:-}}"
|
|
2535
|
-
local umbrella_root="${ACP_AGENT_ROOT:-${F_LOSNING_AGENT_ROOT:-}}"
|
|
2536
|
-
|
|
2537
|
-
if [[ -z "${config_file}" ]]; then
|
|
2538
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2539
|
-
fi
|
|
2540
|
-
|
|
2541
|
-
if [[ -n "${explicit_root}" ]]; then
|
|
2542
|
-
printf '%s\n' "${explicit_root}"
|
|
2543
|
-
return 0
|
|
2544
|
-
fi
|
|
2545
|
-
|
|
2546
|
-
default_value="$(flow_resolve_agent_root "${config_file}")/history"
|
|
2547
|
-
if [[ -n "${umbrella_root}" ]]; then
|
|
2548
|
-
printf '%s\n' "${default_value}"
|
|
2549
|
-
return 0
|
|
2550
|
-
fi
|
|
2551
|
-
|
|
2552
|
-
flow_env_or_config "${config_file}" "ACP_HISTORY_ROOT F_LOSNING_HISTORY_ROOT" "runtime.history_root" "${default_value}"
|
|
2553
|
-
}
|
|
2554
|
-
|
|
2555
|
-
flow_resolve_retained_repo_root() {
|
|
2556
|
-
local config_file="${1:-}"
|
|
2557
|
-
local default_value=""
|
|
2558
|
-
if [[ -z "${config_file}" ]]; then
|
|
2559
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2560
|
-
fi
|
|
2561
|
-
default_value="$(flow_default_retained_repo_root "${config_file}")"
|
|
2562
|
-
flow_env_or_config "${config_file}" "ACP_RETAINED_REPO_ROOT F_LOSNING_RETAINED_REPO_ROOT" "runtime.retained_repo_root" "${default_value}"
|
|
2563
|
-
}
|
|
2564
|
-
|
|
2565
|
-
flow_resolve_source_repo_root() {
|
|
2566
|
-
local config_file="${1:-}"
|
|
2567
|
-
local default_value=""
|
|
2568
|
-
if [[ -z "${config_file}" ]]; then
|
|
2569
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2570
|
-
fi
|
|
2571
|
-
default_value="$(flow_resolve_retained_repo_root "${config_file}")"
|
|
2572
|
-
flow_env_or_config "${config_file}" "ACP_SOURCE_REPO_ROOT F_LOSNING_SOURCE_REPO_ROOT" "runtime.source_repo_root" "${default_value}"
|
|
2573
|
-
}
|
|
2574
|
-
|
|
2575
|
-
flow_resolve_vscode_workspace_file() {
|
|
2576
|
-
local config_file="${1:-}"
|
|
2577
|
-
local default_value=""
|
|
2578
|
-
if [[ -z "${config_file}" ]]; then
|
|
2579
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2580
|
-
fi
|
|
2581
|
-
default_value="$(flow_default_vscode_workspace_file "${config_file}")"
|
|
2582
|
-
flow_env_or_config "${config_file}" "ACP_VSCODE_WORKSPACE_FILE F_LOSNING_VSCODE_WORKSPACE_FILE" "runtime.vscode_workspace_file" "${default_value}"
|
|
2583
|
-
}
|
|
2584
|
-
|
|
2585
|
-
flow_resolve_web_playwright_command() {
|
|
2586
|
-
local config_file="${1:-}"
|
|
2587
|
-
if [[ -z "${config_file}" ]]; then
|
|
2588
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2589
|
-
fi
|
|
2590
|
-
flow_env_or_config "${config_file}" "ACP_WEB_PLAYWRIGHT_COMMAND F_LOSNING_WEB_PLAYWRIGHT_COMMAND" "execution.verification.web_playwright_command" "pnpm exec playwright test"
|
|
2591
|
-
}
|
|
2592
|
-
|
|
2593
|
-
flow_resolve_codex_quota_bin() {
|
|
2594
|
-
local flow_root="${1:-}"
|
|
2595
|
-
local shared_home=""
|
|
2596
|
-
local explicit_bin="${ACP_CODEX_QUOTA_BIN:-${F_LOSNING_CODEX_QUOTA_BIN:-}}"
|
|
2597
|
-
local candidate=""
|
|
2598
|
-
|
|
2599
|
-
if [[ -n "${explicit_bin}" ]]; then
|
|
2600
|
-
printf '%s\n' "${explicit_bin}"
|
|
2601
|
-
return 0
|
|
2602
|
-
fi
|
|
2603
|
-
|
|
2604
|
-
if [[ -z "${flow_root}" ]]; then
|
|
2605
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2606
|
-
fi
|
|
2607
|
-
shared_home="${SHARED_AGENT_HOME:-$(resolve_shared_agent_home "${flow_root}")}"
|
|
2608
|
-
|
|
2609
|
-
for candidate in \
|
|
2610
|
-
"${flow_root}/tools/bin/codex-quota" \
|
|
2611
|
-
"${shared_home}/tools/bin/codex-quota"; do
|
|
2612
|
-
if [[ -x "${candidate}" ]]; then
|
|
2613
|
-
printf '%s\n' "${candidate}"
|
|
2614
|
-
return 0
|
|
2615
|
-
fi
|
|
2616
|
-
done
|
|
2617
|
-
|
|
2618
|
-
candidate="$(command -v codex-quota 2>/dev/null || true)"
|
|
2619
|
-
if [[ -n "${candidate}" ]]; then
|
|
2620
|
-
printf '%s\n' "${candidate}"
|
|
2621
|
-
return 0
|
|
2622
|
-
fi
|
|
2623
|
-
|
|
2624
|
-
printf '%s\n' "${flow_root}/tools/bin/codex-quota"
|
|
2625
|
-
}
|
|
2626
|
-
|
|
2627
|
-
flow_resolve_codex_quota_manager_script() {
|
|
2628
|
-
local flow_root="${1:-}"
|
|
2629
|
-
local shared_home=""
|
|
2630
|
-
local explicit_script="${ACP_CODEX_QUOTA_MANAGER_SCRIPT:-${F_LOSNING_CODEX_QUOTA_MANAGER_SCRIPT:-}}"
|
|
2631
|
-
local candidate=""
|
|
2632
|
-
|
|
2633
|
-
if [[ -n "${explicit_script}" ]]; then
|
|
2634
|
-
printf '%s\n' "${explicit_script}"
|
|
2635
|
-
return 0
|
|
2636
|
-
fi
|
|
2637
|
-
|
|
2638
|
-
if [[ -z "${flow_root}" ]]; then
|
|
2639
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2640
|
-
fi
|
|
2641
|
-
shared_home="${SHARED_AGENT_HOME:-$(resolve_shared_agent_home "${flow_root}")}"
|
|
2642
|
-
|
|
2643
|
-
for candidate in \
|
|
2644
|
-
"${flow_root}/tools/vendor/codex-quota-manager/scripts/auto-switch.sh" \
|
|
2645
|
-
"${shared_home}/tools/vendor/codex-quota-manager/scripts/auto-switch.sh" \
|
|
2646
|
-
"${shared_home}/skills/openclaw/codex-quota-manager/scripts/auto-switch.sh"; do
|
|
2647
|
-
if [[ -x "${candidate}" ]]; then
|
|
2648
|
-
printf '%s\n' "${candidate}"
|
|
2649
|
-
return 0
|
|
2650
|
-
fi
|
|
2651
|
-
done
|
|
2652
|
-
|
|
2653
|
-
printf '%s\n' "${flow_root}/tools/vendor/codex-quota-manager/scripts/auto-switch.sh"
|
|
2654
|
-
}
|
|
2655
|
-
|
|
2656
|
-
flow_resolve_template_file() {
|
|
2657
|
-
local template_name="${1:?template name required}"
|
|
2658
|
-
local workspace_dir="${2:-}"
|
|
2659
|
-
local config_file="${3:-}"
|
|
2660
|
-
local flow_root=""
|
|
2661
|
-
local profile_id=""
|
|
2662
|
-
local config_dir=""
|
|
2663
|
-
local template_dir=""
|
|
2664
|
-
local candidate=""
|
|
2665
|
-
local workspace_real=""
|
|
2666
|
-
local canonical_tools_real=""
|
|
2667
|
-
|
|
2668
|
-
if [[ -z "${config_file}" ]]; then
|
|
2669
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2670
|
-
fi
|
|
2671
|
-
|
|
2672
|
-
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[0]}")"
|
|
2673
|
-
config_dir="$(cd "$(dirname "${config_file}")" 2>/dev/null && pwd -P || dirname "${config_file}")"
|
|
2674
|
-
|
|
2675
|
-
for template_dir in \
|
|
2676
|
-
"${AGENT_CONTROL_PLANE_TEMPLATE_DIR:-}" \
|
|
2677
|
-
"${ACP_TEMPLATE_DIR:-}" \
|
|
2678
|
-
"${F_LOSNING_TEMPLATE_DIR:-}"; do
|
|
2679
|
-
if [[ -n "${template_dir}" && -f "${template_dir}/${template_name}" ]]; then
|
|
2680
|
-
printf '%s\n' "${template_dir}/${template_name}"
|
|
2681
|
-
return 0
|
|
2682
|
-
fi
|
|
2683
|
-
done
|
|
2684
|
-
|
|
2685
|
-
if [[ -n "${workspace_dir}" && -f "${workspace_dir}/templates/${template_name}" ]]; then
|
|
2686
|
-
workspace_real="$(cd "${workspace_dir}" && pwd -P)"
|
|
2687
|
-
canonical_tools_real="$(cd "${flow_root}/tools" && pwd -P)"
|
|
2688
|
-
if [[ "${workspace_real}" != "${canonical_tools_real}" ]]; then
|
|
2689
|
-
printf '%s\n' "${workspace_dir}/templates/${template_name}"
|
|
2690
|
-
return 0
|
|
2691
|
-
fi
|
|
2692
|
-
fi
|
|
2693
|
-
|
|
2694
|
-
candidate="${config_dir}/templates/${template_name}"
|
|
2695
|
-
if [[ -f "${candidate}" ]]; then
|
|
2696
|
-
printf '%s\n' "${candidate}"
|
|
2697
|
-
return 0
|
|
2698
|
-
fi
|
|
2699
|
-
|
|
2700
|
-
if [[ -n "${workspace_dir}" && -f "${workspace_dir}/templates/${template_name}" ]]; then
|
|
2701
|
-
printf '%s\n' "${workspace_dir}/templates/${template_name}"
|
|
2702
|
-
return 0
|
|
2703
|
-
fi
|
|
2704
|
-
|
|
2705
|
-
printf '%s\n' "${flow_root}/tools/templates/${template_name}"
|
|
2706
|
-
}
|
|
2707
|
-
|
|
2708
|
-
flow_resolve_retry_cooldowns() {
|
|
2709
|
-
local config_file="${1:-}"
|
|
2710
|
-
if [[ -z "${config_file}" ]]; then
|
|
2711
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2712
|
-
fi
|
|
2713
|
-
flow_env_or_config "${config_file}" "ACP_RETRY_COOLDOWNS F_LOSNING_RETRY_COOLDOWNS" "execution.retry.cooldowns" "300,900,1800,3600"
|
|
2714
|
-
}
|
|
2715
|
-
|
|
2716
|
-
flow_resolve_provider_quota_cooldowns() {
|
|
2717
|
-
local config_file="${1:-}"
|
|
2718
|
-
if [[ -z "${config_file}" ]]; then
|
|
2719
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2720
|
-
fi
|
|
2721
|
-
flow_env_or_config "${config_file}" "ACP_PROVIDER_QUOTA_COOLDOWNS F_LOSNING_PROVIDER_QUOTA_COOLDOWNS" "execution.provider_quota.cooldowns" "300,900,1800,3600"
|
|
2722
|
-
}
|
|
2723
|
-
|
|
2724
|
-
flow_resolve_provider_pool_order() {
|
|
2725
|
-
local config_file="${1:-}"
|
|
2726
|
-
if [[ -z "${config_file}" ]]; then
|
|
2727
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2728
|
-
fi
|
|
2729
|
-
flow_env_or_config "${config_file}" "ACP_PROVIDER_POOL_ORDER F_LOSNING_PROVIDER_POOL_ORDER" "execution.provider_pool_order" ""
|
|
2730
|
-
}
|
|
2731
|
-
|
|
2732
|
-
flow_provider_pool_names() {
|
|
2733
|
-
local config_file="${1:-}"
|
|
2734
|
-
local order=""
|
|
2735
|
-
local pool_name=""
|
|
2736
|
-
|
|
2737
|
-
if [[ -z "${config_file}" ]]; then
|
|
2738
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
2739
|
-
fi
|
|
2740
|
-
|
|
2741
|
-
order="$(flow_resolve_provider_pool_order "${config_file}")"
|
|
2742
|
-
for pool_name in ${order}; do
|
|
2743
|
-
[[ -n "${pool_name}" ]] || continue
|
|
2744
|
-
printf '%s\n' "${pool_name}"
|
|
2745
|
-
done
|
|
2746
|
-
}
|
|
2747
|
-
|
|
2748
|
-
flow_provider_pools_enabled() {
|
|
2749
|
-
local config_file="${1:-}"
|
|
2750
|
-
[[ -n "$(flow_resolve_provider_pool_order "${config_file}")" ]]
|
|
2751
|
-
}
|
|
2752
|
-
|
|
2753
|
-
flow_provider_pool_value() {
|
|
2754
|
-
local config_file="${1:?config file required}"
|
|
2755
|
-
local pool_name="${2:?pool name required}"
|
|
2756
|
-
local relative_path="${3:?relative path required}"
|
|
2757
|
-
|
|
2758
|
-
flow_config_get "${config_file}" "execution.provider_pools.${pool_name}.${relative_path}"
|
|
2759
|
-
}
|
|
2760
|
-
|
|
2761
|
-
flow_provider_pool_backend() {
|
|
2762
|
-
local config_file="${1:?config file required}"
|
|
2763
|
-
local pool_name="${2:?pool name required}"
|
|
2764
|
-
|
|
2765
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "coding_worker"
|
|
2766
|
-
}
|
|
2767
|
-
|
|
2768
|
-
flow_provider_pool_safe_profile() {
|
|
2769
|
-
local config_file="${1:?config file required}"
|
|
2770
|
-
local pool_name="${2:?pool name required}"
|
|
2771
|
-
|
|
2772
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "safe_profile"
|
|
2773
|
-
}
|
|
2774
|
-
|
|
2775
|
-
flow_provider_pool_bypass_profile() {
|
|
2776
|
-
local config_file="${1:?config file required}"
|
|
2777
|
-
local pool_name="${2:?pool name required}"
|
|
2778
|
-
|
|
2779
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "bypass_profile"
|
|
2780
|
-
}
|
|
2781
|
-
|
|
2782
|
-
flow_provider_pool_claude_model() {
|
|
2783
|
-
local config_file="${1:?config file required}"
|
|
2784
|
-
local pool_name="${2:?pool name required}"
|
|
2785
|
-
|
|
2786
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.model"
|
|
2787
|
-
}
|
|
2788
|
-
|
|
2789
|
-
flow_provider_pool_claude_permission_mode() {
|
|
2790
|
-
local config_file="${1:?config file required}"
|
|
2791
|
-
local pool_name="${2:?pool name required}"
|
|
2792
|
-
|
|
2793
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.permission_mode"
|
|
2794
|
-
}
|
|
2795
|
-
|
|
2796
|
-
flow_provider_pool_claude_effort() {
|
|
2797
|
-
local config_file="${1:?config file required}"
|
|
2798
|
-
local pool_name="${2:?pool name required}"
|
|
2799
|
-
|
|
2800
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.effort"
|
|
2801
|
-
}
|
|
2802
|
-
|
|
2803
|
-
flow_provider_pool_claude_timeout_seconds() {
|
|
2804
|
-
local config_file="${1:?config file required}"
|
|
2805
|
-
local pool_name="${2:?pool name required}"
|
|
2806
|
-
|
|
2807
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.timeout_seconds"
|
|
2808
|
-
}
|
|
2809
|
-
|
|
2810
|
-
flow_provider_pool_claude_max_attempts() {
|
|
2811
|
-
local config_file="${1:?config file required}"
|
|
2812
|
-
local pool_name="${2:?pool name required}"
|
|
2813
|
-
|
|
2814
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.max_attempts"
|
|
2815
|
-
}
|
|
2816
|
-
|
|
2817
|
-
flow_provider_pool_claude_retry_backoff_seconds() {
|
|
2818
|
-
local config_file="${1:?config file required}"
|
|
2819
|
-
local pool_name="${2:?pool name required}"
|
|
2820
|
-
|
|
2821
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.retry_backoff_seconds"
|
|
2822
|
-
}
|
|
2823
|
-
|
|
2824
|
-
flow_provider_pool_openclaw_model() {
|
|
2825
|
-
local config_file="${1:?config file required}"
|
|
2826
|
-
local pool_name="${2:?pool name required}"
|
|
2827
|
-
|
|
2828
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "openclaw.model"
|
|
2829
|
-
}
|
|
2830
|
-
|
|
2831
|
-
flow_provider_pool_openclaw_thinking() {
|
|
2832
|
-
local config_file="${1:?config file required}"
|
|
2833
|
-
local pool_name="${2:?pool name required}"
|
|
2834
|
-
|
|
2835
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "openclaw.thinking"
|
|
2836
|
-
}
|
|
2837
|
-
|
|
2838
|
-
flow_provider_pool_openclaw_timeout_seconds() {
|
|
2839
|
-
local config_file="${1:?config file required}"
|
|
2840
|
-
local pool_name="${2:?pool name required}"
|
|
2841
|
-
|
|
2842
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "openclaw.timeout_seconds"
|
|
2843
|
-
}
|
|
2844
|
-
|
|
2845
|
-
flow_provider_pool_ollama_model() {
|
|
2846
|
-
local config_file="${1:?config file required}"
|
|
2847
|
-
local pool_name="${2:?pool name required}"
|
|
2848
|
-
|
|
2849
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "ollama.model"
|
|
2850
|
-
}
|
|
2851
|
-
|
|
2852
|
-
flow_provider_pool_ollama_base_url() {
|
|
2853
|
-
local config_file="${1:?config file required}"
|
|
2854
|
-
local pool_name="${2:?pool name required}"
|
|
2855
|
-
|
|
2856
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "ollama.base_url"
|
|
2857
|
-
}
|
|
2858
|
-
|
|
2859
|
-
flow_provider_pool_ollama_timeout_seconds() {
|
|
2860
|
-
local config_file="${1:?config file required}"
|
|
2861
|
-
local pool_name="${2:?pool name required}"
|
|
2862
|
-
|
|
2863
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "ollama.timeout_seconds"
|
|
2864
|
-
}
|
|
2865
|
-
|
|
2866
|
-
flow_provider_pool_pi_model() {
|
|
2867
|
-
local config_file="${1:?config file required}"
|
|
2868
|
-
local pool_name="${2:?pool name required}"
|
|
2869
|
-
|
|
2870
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "pi.model"
|
|
2871
|
-
}
|
|
2872
|
-
|
|
2873
|
-
flow_provider_pool_pi_thinking() {
|
|
2874
|
-
local config_file="${1:?config file required}"
|
|
2875
|
-
local pool_name="${2:?pool name required}"
|
|
2876
|
-
|
|
2877
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "pi.thinking"
|
|
2878
|
-
}
|
|
2879
|
-
|
|
2880
|
-
flow_provider_pool_pi_timeout_seconds() {
|
|
2881
|
-
local config_file="${1:?config file required}"
|
|
2882
|
-
local pool_name="${2:?pool name required}"
|
|
2883
|
-
|
|
2884
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "pi.timeout_seconds"
|
|
2885
|
-
}
|
|
2886
|
-
|
|
2887
|
-
flow_provider_pool_opencode_model() {
|
|
2888
|
-
local config_file="${1:?config file required}"
|
|
2889
|
-
local pool_name="${2:?pool name required}"
|
|
2890
|
-
|
|
2891
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "opencode.model"
|
|
2892
|
-
}
|
|
2893
|
-
|
|
2894
|
-
flow_provider_pool_opencode_timeout_seconds() {
|
|
2895
|
-
local config_file="${1:?config file required}"
|
|
2896
|
-
local pool_name="${2:?pool name required}"
|
|
2897
|
-
|
|
2898
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "opencode.timeout_seconds"
|
|
2899
|
-
}
|
|
2900
|
-
|
|
2901
|
-
flow_provider_pool_kilo_model() {
|
|
2902
|
-
local config_file="${1:?config file required}"
|
|
2903
|
-
local pool_name="${2:?pool name required}"
|
|
2904
|
-
|
|
2905
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "kilo.model"
|
|
2906
|
-
}
|
|
2907
|
-
|
|
2908
|
-
flow_provider_pool_kilo_timeout_seconds() {
|
|
2909
|
-
local config_file="${1:?config file required}"
|
|
2910
|
-
local pool_name="${2:?pool name required}"
|
|
2911
|
-
|
|
2912
|
-
flow_provider_pool_value "${config_file}" "${pool_name}" "kilo.timeout_seconds"
|
|
2913
|
-
}
|
|
2914
|
-
|
|
2915
|
-
flow_sanitize_provider_key() {
|
|
2916
|
-
local raw_key="${1:?raw key required}"
|
|
2917
|
-
|
|
2918
|
-
printf '%s' "${raw_key}" \
|
|
2919
|
-
| tr '[:upper:]' '[:lower:]' \
|
|
2920
|
-
| sed -E 's/[^a-z0-9._-]+/-/g; s/^-+//; s/-+$//; s/-+/-/g'
|
|
2921
|
-
}
|
|
2922
|
-
|
|
2923
|
-
flow_provider_pool_model_identity() {
|
|
2924
|
-
local config_file="${1:?config file required}"
|
|
2925
|
-
local pool_name="${2:?pool name required}"
|
|
2926
|
-
local backend=""
|
|
2927
|
-
|
|
2928
|
-
backend="$(flow_provider_pool_backend "${config_file}" "${pool_name}")"
|
|
2929
|
-
case "${backend}" in
|
|
2930
|
-
codex)
|
|
2931
|
-
flow_provider_pool_safe_profile "${config_file}" "${pool_name}"
|
|
2932
|
-
;;
|
|
2933
|
-
claude)
|
|
2934
|
-
flow_provider_pool_claude_model "${config_file}" "${pool_name}"
|
|
2935
|
-
;;
|
|
2936
|
-
openclaw)
|
|
2937
|
-
flow_provider_pool_openclaw_model "${config_file}" "${pool_name}"
|
|
2938
|
-
;;
|
|
2939
|
-
ollama)
|
|
2940
|
-
flow_provider_pool_ollama_model "${config_file}" "${pool_name}"
|
|
2941
|
-
;;
|
|
2942
|
-
pi)
|
|
2943
|
-
flow_provider_pool_pi_model "${config_file}" "${pool_name}"
|
|
2944
|
-
;;
|
|
2945
|
-
opencode)
|
|
2946
|
-
flow_provider_pool_opencode_model "${config_file}" "${pool_name}"
|
|
2947
|
-
;;
|
|
2948
|
-
kilo)
|
|
2949
|
-
flow_provider_pool_kilo_model "${config_file}" "${pool_name}"
|
|
2950
|
-
;;
|
|
2951
|
-
*)
|
|
2952
|
-
printf '\n'
|
|
2953
|
-
;;
|
|
2954
|
-
esac
|
|
2955
|
-
}
|
|
2956
|
-
|
|
2957
|
-
flow_provider_pool_state_get() {
|
|
2958
|
-
local config_file="${1:?config file required}"
|
|
2959
|
-
local pool_name="${2:?pool name required}"
|
|
2960
|
-
local backend=""
|
|
2961
|
-
local model=""
|
|
2962
|
-
local state_root=""
|
|
2963
|
-
local provider_key=""
|
|
2964
|
-
local state_file=""
|
|
2965
|
-
local attempts="0"
|
|
2966
|
-
local next_attempt_epoch="0"
|
|
2967
|
-
local next_attempt_at=""
|
|
2968
|
-
local last_reason=""
|
|
2969
|
-
local updated_at=""
|
|
2970
|
-
local ready="yes"
|
|
2971
|
-
local valid="yes"
|
|
2972
|
-
local now_epoch=""
|
|
2973
|
-
local safe_profile=""
|
|
2974
|
-
local bypass_profile=""
|
|
2975
|
-
local claude_model=""
|
|
2976
|
-
local claude_permission_mode=""
|
|
2977
|
-
local claude_effort=""
|
|
2978
|
-
local claude_timeout_seconds=""
|
|
2979
|
-
local claude_max_attempts=""
|
|
2980
|
-
local claude_retry_backoff_seconds=""
|
|
2981
|
-
local openclaw_model=""
|
|
2982
|
-
local openclaw_thinking=""
|
|
2983
|
-
local openclaw_timeout_seconds=""
|
|
2984
|
-
local ollama_model=""
|
|
2985
|
-
local ollama_base_url=""
|
|
2986
|
-
local ollama_timeout_seconds=""
|
|
2987
|
-
local pi_model=""
|
|
2988
|
-
local pi_thinking=""
|
|
2989
|
-
local pi_timeout_seconds=""
|
|
2990
|
-
local opencode_model=""
|
|
2991
|
-
local opencode_timeout_seconds=""
|
|
2992
|
-
local kilo_model=""
|
|
2993
|
-
local kilo_timeout_seconds=""
|
|
2994
|
-
|
|
2995
|
-
backend="$(flow_provider_pool_backend "${config_file}" "${pool_name}")"
|
|
2996
|
-
safe_profile="$(flow_provider_pool_safe_profile "${config_file}" "${pool_name}")"
|
|
2997
|
-
bypass_profile="$(flow_provider_pool_bypass_profile "${config_file}" "${pool_name}")"
|
|
2998
|
-
claude_model="$(flow_provider_pool_claude_model "${config_file}" "${pool_name}")"
|
|
2999
|
-
claude_permission_mode="$(flow_provider_pool_claude_permission_mode "${config_file}" "${pool_name}")"
|
|
3000
|
-
claude_effort="$(flow_provider_pool_claude_effort "${config_file}" "${pool_name}")"
|
|
3001
|
-
claude_timeout_seconds="$(flow_provider_pool_claude_timeout_seconds "${config_file}" "${pool_name}")"
|
|
3002
|
-
claude_max_attempts="$(flow_provider_pool_claude_max_attempts "${config_file}" "${pool_name}")"
|
|
3003
|
-
claude_retry_backoff_seconds="$(flow_provider_pool_claude_retry_backoff_seconds "${config_file}" "${pool_name}")"
|
|
3004
|
-
openclaw_model="$(flow_provider_pool_openclaw_model "${config_file}" "${pool_name}")"
|
|
3005
|
-
openclaw_thinking="$(flow_provider_pool_openclaw_thinking "${config_file}" "${pool_name}")"
|
|
3006
|
-
openclaw_timeout_seconds="$(flow_provider_pool_openclaw_timeout_seconds "${config_file}" "${pool_name}")"
|
|
3007
|
-
ollama_model="$(flow_provider_pool_ollama_model "${config_file}" "${pool_name}")"
|
|
3008
|
-
ollama_base_url="$(flow_provider_pool_ollama_base_url "${config_file}" "${pool_name}")"
|
|
3009
|
-
ollama_timeout_seconds="$(flow_provider_pool_ollama_timeout_seconds "${config_file}" "${pool_name}")"
|
|
3010
|
-
pi_model="$(flow_provider_pool_pi_model "${config_file}" "${pool_name}")"
|
|
3011
|
-
pi_thinking="$(flow_provider_pool_pi_thinking "${config_file}" "${pool_name}")"
|
|
3012
|
-
pi_timeout_seconds="$(flow_provider_pool_pi_timeout_seconds "${config_file}" "${pool_name}")"
|
|
3013
|
-
opencode_model="$(flow_provider_pool_opencode_model "${config_file}" "${pool_name}")"
|
|
3014
|
-
opencode_timeout_seconds="$(flow_provider_pool_opencode_timeout_seconds "${config_file}" "${pool_name}")"
|
|
3015
|
-
kilo_model="$(flow_provider_pool_kilo_model "${config_file}" "${pool_name}")"
|
|
3016
|
-
kilo_timeout_seconds="$(flow_provider_pool_kilo_timeout_seconds "${config_file}" "${pool_name}")"
|
|
3017
|
-
model="$(flow_provider_pool_model_identity "${config_file}" "${pool_name}")"
|
|
3018
|
-
|
|
3019
|
-
case "${backend}" in
|
|
3020
|
-
codex)
|
|
3021
|
-
[[ -n "${safe_profile}" && -n "${bypass_profile}" ]] || valid="no"
|
|
3022
|
-
;;
|
|
3023
|
-
claude)
|
|
3024
|
-
[[ -n "${claude_model}" && -n "${claude_permission_mode}" && -n "${claude_effort}" && -n "${claude_timeout_seconds}" && -n "${claude_max_attempts}" && -n "${claude_retry_backoff_seconds}" ]] || valid="no"
|
|
3025
|
-
;;
|
|
3026
|
-
openclaw)
|
|
3027
|
-
[[ -n "${openclaw_model}" && -n "${openclaw_thinking}" && -n "${openclaw_timeout_seconds}" ]] || valid="no"
|
|
3028
|
-
;;
|
|
3029
|
-
ollama)
|
|
3030
|
-
[[ -n "${ollama_model}" ]] || valid="no"
|
|
3031
|
-
;;
|
|
3032
|
-
pi)
|
|
3033
|
-
[[ -n "${pi_model}" ]] || valid="no"
|
|
3034
|
-
;;
|
|
3035
|
-
opencode)
|
|
3036
|
-
[[ -n "${opencode_model}" && -n "${opencode_timeout_seconds}" ]] || valid="no"
|
|
3037
|
-
;;
|
|
3038
|
-
kilo)
|
|
3039
|
-
[[ -n "${kilo_model}" && -n "${kilo_timeout_seconds}" ]] || valid="no"
|
|
3040
|
-
;;
|
|
3041
|
-
*)
|
|
3042
|
-
valid="no"
|
|
3043
|
-
;;
|
|
3044
|
-
esac
|
|
3045
|
-
|
|
3046
|
-
if [[ "${valid}" == "yes" && -n "${model}" ]]; then
|
|
3047
|
-
state_root="$(flow_resolve_state_root "${config_file}")"
|
|
3048
|
-
provider_key="$(flow_sanitize_provider_key "${backend}-${model}")"
|
|
3049
|
-
state_file="${state_root}/retries/providers/${provider_key}.env"
|
|
3050
|
-
|
|
3051
|
-
if [[ -f "${state_file}" ]]; then
|
|
3052
|
-
set -a
|
|
3053
|
-
# shellcheck source=/dev/null
|
|
3054
|
-
source "${state_file}"
|
|
3055
|
-
set +a
|
|
3056
|
-
attempts="${ATTEMPTS:-0}"
|
|
3057
|
-
next_attempt_epoch="${NEXT_ATTEMPT_EPOCH:-0}"
|
|
3058
|
-
next_attempt_at="${NEXT_ATTEMPT_AT:-}"
|
|
3059
|
-
last_reason="${LAST_REASON:-}"
|
|
3060
|
-
updated_at="${UPDATED_AT:-}"
|
|
3061
|
-
fi
|
|
3062
|
-
|
|
3063
|
-
now_epoch="$(date +%s)"
|
|
3064
|
-
if [[ "${next_attempt_epoch}" =~ ^[0-9]+$ ]] && (( next_attempt_epoch > now_epoch )); then
|
|
3065
|
-
ready="no"
|
|
3066
|
-
fi
|
|
3067
|
-
else
|
|
3068
|
-
ready="no"
|
|
3069
|
-
fi
|
|
3070
|
-
|
|
3071
|
-
printf 'POOL_NAME=%s\n' "${pool_name}"
|
|
3072
|
-
printf 'VALID=%s\n' "${valid}"
|
|
3073
|
-
printf 'BACKEND=%s\n' "${backend}"
|
|
3074
|
-
printf 'MODEL=%s\n' "${model}"
|
|
3075
|
-
printf 'PROVIDER_KEY=%s\n' "${provider_key}"
|
|
3076
|
-
printf 'ATTEMPTS=%s\n' "${attempts}"
|
|
3077
|
-
printf 'NEXT_ATTEMPT_EPOCH=%s\n' "${next_attempt_epoch}"
|
|
3078
|
-
printf 'NEXT_ATTEMPT_AT=%s\n' "${next_attempt_at}"
|
|
3079
|
-
printf 'READY=%s\n' "${ready}"
|
|
3080
|
-
printf 'LAST_REASON=%s\n' "${last_reason}"
|
|
3081
|
-
printf 'UPDATED_AT=%s\n' "${updated_at}"
|
|
3082
|
-
printf 'SAFE_PROFILE=%s\n' "${safe_profile}"
|
|
3083
|
-
printf 'BYPASS_PROFILE=%s\n' "${bypass_profile}"
|
|
3084
|
-
printf 'CLAUDE_MODEL=%s\n' "${claude_model}"
|
|
3085
|
-
printf 'CLAUDE_PERMISSION_MODE=%s\n' "${claude_permission_mode}"
|
|
3086
|
-
printf 'CLAUDE_EFFORT=%s\n' "${claude_effort}"
|
|
3087
|
-
printf 'CLAUDE_TIMEOUT_SECONDS=%s\n' "${claude_timeout_seconds}"
|
|
3088
|
-
printf 'CLAUDE_MAX_ATTEMPTS=%s\n' "${claude_max_attempts}"
|
|
3089
|
-
printf 'CLAUDE_RETRY_BACKOFF_SECONDS=%s\n' "${claude_retry_backoff_seconds}"
|
|
3090
|
-
printf 'OPENCLAW_MODEL=%s\n' "${openclaw_model}"
|
|
3091
|
-
printf 'OPENCLAW_THINKING=%s\n' "${openclaw_thinking}"
|
|
3092
|
-
printf 'OPENCLAW_TIMEOUT_SECONDS=%s\n' "${openclaw_timeout_seconds}"
|
|
3093
|
-
printf 'OLLAMA_MODEL=%s\n' "${ollama_model}"
|
|
3094
|
-
printf 'OLLAMA_BASE_URL=%s\n' "${ollama_base_url}"
|
|
3095
|
-
printf 'OLLAMA_TIMEOUT_SECONDS=%s\n' "${ollama_timeout_seconds}"
|
|
3096
|
-
printf 'PI_MODEL=%s\n' "${pi_model}"
|
|
3097
|
-
printf 'PI_THINKING=%s\n' "${pi_thinking}"
|
|
3098
|
-
printf 'PI_TIMEOUT_SECONDS=%s\n' "${pi_timeout_seconds}"
|
|
3099
|
-
printf 'OPENCODE_MODEL=%s\n' "${opencode_model}"
|
|
3100
|
-
printf 'OPENCODE_TIMEOUT_SECONDS=%s\n' "${opencode_timeout_seconds}"
|
|
3101
|
-
printf 'KILO_MODEL=%s\n' "${kilo_model}"
|
|
3102
|
-
printf 'KILO_TIMEOUT_SECONDS=%s\n' "${kilo_timeout_seconds}"
|
|
3103
|
-
}
|
|
3104
|
-
|
|
3105
|
-
flow_selected_provider_pool_env() {
|
|
3106
|
-
local config_file="${1:-}"
|
|
3107
|
-
local pool_name=""
|
|
3108
|
-
local candidate=""
|
|
3109
|
-
local candidate_valid=""
|
|
3110
|
-
local candidate_ready=""
|
|
3111
|
-
local candidate_next_epoch="0"
|
|
3112
|
-
local exhausted_candidate=""
|
|
3113
|
-
local exhausted_epoch=""
|
|
3114
|
-
|
|
3115
|
-
if [[ -z "${config_file}" ]]; then
|
|
3116
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
3117
|
-
fi
|
|
3118
|
-
|
|
3119
|
-
if ! flow_provider_pools_enabled "${config_file}"; then
|
|
3120
|
-
return 1
|
|
3121
|
-
fi
|
|
3122
|
-
|
|
3123
|
-
while IFS= read -r pool_name; do
|
|
3124
|
-
[[ -n "${pool_name}" ]] || continue
|
|
3125
|
-
candidate="$(flow_provider_pool_state_get "${config_file}" "${pool_name}")"
|
|
3126
|
-
candidate_valid="$(awk -F= '/^VALID=/{print $2}' <<<"${candidate}")"
|
|
3127
|
-
[[ "${candidate_valid}" == "yes" ]] || continue
|
|
3128
|
-
|
|
3129
|
-
candidate_ready="$(awk -F= '/^READY=/{print $2}' <<<"${candidate}")"
|
|
3130
|
-
if [[ "${candidate_ready}" == "yes" ]]; then
|
|
3131
|
-
printf '%s\n' "${candidate}"
|
|
3132
|
-
printf 'POOLS_EXHAUSTED=no\n'
|
|
3133
|
-
printf 'SELECTION_REASON=ready\n'
|
|
3134
|
-
return 0
|
|
3135
|
-
fi
|
|
3136
|
-
|
|
3137
|
-
candidate_next_epoch="$(awk -F= '/^NEXT_ATTEMPT_EPOCH=/{print $2}' <<<"${candidate}")"
|
|
3138
|
-
if [[ -z "${exhausted_candidate}" ]]; then
|
|
3139
|
-
exhausted_candidate="${candidate}"
|
|
3140
|
-
exhausted_epoch="${candidate_next_epoch}"
|
|
3141
|
-
continue
|
|
3142
|
-
fi
|
|
3143
|
-
|
|
3144
|
-
if [[ "${candidate_next_epoch}" =~ ^[0-9]+$ && "${exhausted_epoch}" =~ ^[0-9]+$ ]] && (( candidate_next_epoch < exhausted_epoch )); then
|
|
3145
|
-
exhausted_candidate="${candidate}"
|
|
3146
|
-
exhausted_epoch="${candidate_next_epoch}"
|
|
3147
|
-
fi
|
|
3148
|
-
done < <(flow_provider_pool_names "${config_file}")
|
|
3149
|
-
|
|
3150
|
-
[[ -n "${exhausted_candidate}" ]] || return 1
|
|
3151
|
-
|
|
3152
|
-
printf '%s\n' "${exhausted_candidate}"
|
|
3153
|
-
printf 'POOLS_EXHAUSTED=yes\n'
|
|
3154
|
-
printf 'SELECTION_REASON=all-cooldown\n'
|
|
3155
|
-
}
|
|
3156
|
-
|
|
3157
|
-
flow_resolve_issue_session_prefix() {
|
|
3158
|
-
local config_file="${1:-}"
|
|
3159
|
-
local default_value=""
|
|
3160
|
-
if [[ -z "${config_file}" ]]; then
|
|
3161
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
3162
|
-
fi
|
|
3163
|
-
default_value="$(flow_default_issue_session_prefix "${config_file}")"
|
|
3164
|
-
flow_env_or_config "${config_file}" "ACP_ISSUE_SESSION_PREFIX F_LOSNING_ISSUE_SESSION_PREFIX" "session_naming.issue_prefix" "${default_value}"
|
|
3165
|
-
}
|
|
3166
|
-
|
|
3167
|
-
flow_resolve_pr_session_prefix() {
|
|
3168
|
-
local config_file="${1:-}"
|
|
3169
|
-
local default_value=""
|
|
3170
|
-
if [[ -z "${config_file}" ]]; then
|
|
3171
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
3172
|
-
fi
|
|
3173
|
-
default_value="$(flow_default_pr_session_prefix "${config_file}")"
|
|
3174
|
-
flow_env_or_config "${config_file}" "ACP_PR_SESSION_PREFIX F_LOSNING_PR_SESSION_PREFIX" "session_naming.pr_prefix" "${default_value}"
|
|
3175
|
-
}
|
|
3176
|
-
|
|
3177
|
-
flow_resolve_issue_branch_prefix() {
|
|
3178
|
-
local config_file="${1:-}"
|
|
3179
|
-
local default_value=""
|
|
3180
|
-
if [[ -z "${config_file}" ]]; then
|
|
3181
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
3182
|
-
fi
|
|
3183
|
-
default_value="$(flow_default_issue_branch_prefix "${config_file}")"
|
|
3184
|
-
flow_env_or_config "${config_file}" "ACP_ISSUE_BRANCH_PREFIX F_LOSNING_ISSUE_BRANCH_PREFIX" "session_naming.issue_branch_prefix" "${default_value}"
|
|
3185
|
-
}
|
|
3186
|
-
|
|
3187
|
-
flow_resolve_pr_worktree_branch_prefix() {
|
|
3188
|
-
local config_file="${1:-}"
|
|
3189
|
-
local default_value=""
|
|
3190
|
-
if [[ -z "${config_file}" ]]; then
|
|
3191
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
3192
|
-
fi
|
|
3193
|
-
default_value="$(flow_default_pr_worktree_branch_prefix "${config_file}")"
|
|
3194
|
-
flow_env_or_config "${config_file}" "ACP_PR_WORKTREE_BRANCH_PREFIX F_LOSNING_PR_WORKTREE_BRANCH_PREFIX" "session_naming.pr_worktree_branch_prefix" "${default_value}"
|
|
3195
|
-
}
|
|
3196
|
-
|
|
3197
|
-
flow_resolve_managed_pr_branch_globs() {
|
|
3198
|
-
local config_file="${1:-}"
|
|
3199
|
-
local default_value=""
|
|
3200
|
-
if [[ -z "${config_file}" ]]; then
|
|
3201
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
3202
|
-
fi
|
|
3203
|
-
default_value="$(flow_default_managed_pr_branch_globs "${config_file}")"
|
|
3204
|
-
flow_env_or_config "${config_file}" "ACP_MANAGED_PR_BRANCH_GLOBS F_LOSNING_MANAGED_PR_BRANCH_GLOBS" "session_naming.managed_pr_branch_globs" "${default_value}"
|
|
3205
|
-
}
|
|
3206
|
-
|
|
3207
|
-
flow_escape_regex() {
|
|
3208
|
-
local raw_value="${1:-}"
|
|
3209
|
-
python3 - "${raw_value}" <<'PY'
|
|
3210
|
-
import re
|
|
3211
|
-
import sys
|
|
3212
|
-
|
|
3213
|
-
print(re.escape(sys.argv[1]))
|
|
3214
|
-
PY
|
|
3215
|
-
}
|
|
3216
|
-
|
|
3217
|
-
flow_managed_pr_prefixes() {
|
|
3218
|
-
local config_file="${1:-}"
|
|
3219
|
-
local managed_globs=""
|
|
3220
|
-
local branch_glob=""
|
|
3221
|
-
local prefix=""
|
|
3222
|
-
|
|
3223
|
-
managed_globs="$(flow_resolve_managed_pr_branch_globs "${config_file}")"
|
|
3224
|
-
for branch_glob in ${managed_globs}; do
|
|
3225
|
-
prefix="${branch_glob%\*}"
|
|
3226
|
-
[[ -n "${prefix}" ]] || continue
|
|
3227
|
-
printf '%s\n' "${prefix}"
|
|
3228
|
-
done
|
|
3229
|
-
}
|
|
3230
|
-
|
|
3231
|
-
flow_managed_pr_prefixes_json() {
|
|
3232
|
-
local config_file="${1:-}"
|
|
3233
|
-
local prefixes=()
|
|
3234
|
-
local prefix=""
|
|
3235
|
-
|
|
3236
|
-
while IFS= read -r prefix; do
|
|
3237
|
-
[[ -n "${prefix}" ]] || continue
|
|
3238
|
-
prefixes+=("${prefix}")
|
|
3239
|
-
done < <(flow_managed_pr_prefixes "${config_file}")
|
|
3240
|
-
|
|
3241
|
-
python3 - "${prefixes[@]}" <<'PY'
|
|
3242
|
-
import json
|
|
3243
|
-
import sys
|
|
3244
|
-
|
|
3245
|
-
print(json.dumps(sys.argv[1:]))
|
|
3246
|
-
PY
|
|
3247
|
-
}
|
|
3248
|
-
|
|
3249
|
-
flow_managed_issue_branch_regex() {
|
|
3250
|
-
local config_file="${1:-}"
|
|
3251
|
-
local prefix=""
|
|
3252
|
-
local normalized_prefix=""
|
|
3253
|
-
local escaped_prefix=""
|
|
3254
|
-
local joined=""
|
|
3255
|
-
|
|
3256
|
-
while IFS= read -r prefix; do
|
|
3257
|
-
[[ -n "${prefix}" ]] || continue
|
|
3258
|
-
normalized_prefix="${prefix%/}"
|
|
3259
|
-
escaped_prefix="$(flow_escape_regex "${normalized_prefix}")"
|
|
3260
|
-
if [[ -n "${joined}" ]]; then
|
|
3261
|
-
joined="${joined}|${escaped_prefix}"
|
|
3262
|
-
else
|
|
3263
|
-
joined="${escaped_prefix}"
|
|
3264
|
-
fi
|
|
3265
|
-
done < <(flow_managed_pr_prefixes "${config_file}")
|
|
3266
|
-
|
|
3267
|
-
if [[ -z "${joined}" ]]; then
|
|
3268
|
-
joined="$(flow_escape_regex "agent/$(flow_resolve_adapter_id "${config_file}")")"
|
|
3269
|
-
fi
|
|
3270
|
-
|
|
3271
|
-
printf '^(?:%s)/issue-(?<id>[0-9]+)(?:-|$)\n' "${joined}"
|
|
3272
|
-
}
|
|
3273
|
-
|
|
3274
|
-
flow_export_execution_env() {
|
|
3275
|
-
local config_file="${1:-}"
|
|
3276
|
-
|
|
3277
|
-
if [[ -z "${config_file}" ]]; then
|
|
3278
|
-
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
3279
|
-
fi
|
|
3280
|
-
|
|
3281
|
-
[[ -f "${config_file}" ]] || return 0
|
|
3282
|
-
|
|
3283
|
-
local repo_id=""
|
|
3284
|
-
local coding_worker=""
|
|
3285
|
-
local provider_quota_cooldowns=""
|
|
3286
|
-
local provider_pool_order=""
|
|
3287
|
-
local provider_pool_selection=""
|
|
3288
|
-
local explicit_coding_worker=""
|
|
3289
|
-
local active_provider_pool_name=""
|
|
3290
|
-
local active_provider_backend=""
|
|
3291
|
-
local active_provider_model=""
|
|
3292
|
-
local active_provider_key=""
|
|
3293
|
-
local active_provider_next_attempt_epoch=""
|
|
3294
|
-
local active_provider_next_attempt_at=""
|
|
3295
|
-
local active_provider_last_reason=""
|
|
3296
|
-
local active_provider_pools_exhausted="no"
|
|
3297
|
-
local active_provider_selection_reason="legacy-config"
|
|
3298
|
-
local safe_profile=""
|
|
3299
|
-
local bypass_profile=""
|
|
3300
|
-
local claude_model=""
|
|
3301
|
-
local claude_permission_mode=""
|
|
3302
|
-
local claude_effort=""
|
|
3303
|
-
local claude_timeout=""
|
|
3304
|
-
local claude_max_attempts=""
|
|
3305
|
-
local claude_retry_backoff_seconds=""
|
|
3306
|
-
local openclaw_model=""
|
|
3307
|
-
local openclaw_thinking=""
|
|
3308
|
-
local openclaw_timeout=""
|
|
3309
|
-
local openclaw_stall=""
|
|
3310
|
-
local ollama_model=""
|
|
3311
|
-
local ollama_base_url=""
|
|
3312
|
-
local ollama_timeout=""
|
|
3313
|
-
local pi_model=""
|
|
3314
|
-
local pi_thinking=""
|
|
3315
|
-
local pi_timeout=""
|
|
3316
|
-
local opencode_model=""
|
|
3317
|
-
local opencode_timeout=""
|
|
3318
|
-
local kilo_model=""
|
|
3319
|
-
local kilo_timeout=""
|
|
3320
|
-
|
|
3321
|
-
repo_id="$(flow_resolve_repo_id "${config_file}")"
|
|
3322
|
-
provider_quota_cooldowns="$(flow_resolve_provider_quota_cooldowns "${config_file}")"
|
|
3323
|
-
provider_pool_order="$(flow_resolve_provider_pool_order "${config_file}")"
|
|
3324
|
-
explicit_coding_worker="${ACP_CODING_WORKER:-}"
|
|
3325
|
-
if [[ -z "${explicit_coding_worker}" && -n "${provider_pool_order}" ]]; then
|
|
3326
|
-
provider_pool_selection="$(flow_selected_provider_pool_env "${config_file}" || true)"
|
|
3327
|
-
fi
|
|
3328
|
-
|
|
3329
|
-
if [[ -n "${provider_pool_selection}" ]]; then
|
|
3330
|
-
active_provider_pool_name="$(flow_kv_get "${provider_pool_selection}" "POOL_NAME")"
|
|
3331
|
-
active_provider_backend="$(flow_kv_get "${provider_pool_selection}" "BACKEND")"
|
|
3332
|
-
active_provider_model="$(flow_kv_get "${provider_pool_selection}" "MODEL")"
|
|
3333
|
-
active_provider_key="$(flow_kv_get "${provider_pool_selection}" "PROVIDER_KEY")"
|
|
3334
|
-
active_provider_next_attempt_epoch="$(flow_kv_get "${provider_pool_selection}" "NEXT_ATTEMPT_EPOCH")"
|
|
3335
|
-
active_provider_next_attempt_at="$(flow_kv_get "${provider_pool_selection}" "NEXT_ATTEMPT_AT")"
|
|
3336
|
-
active_provider_last_reason="$(flow_kv_get "${provider_pool_selection}" "LAST_REASON")"
|
|
3337
|
-
active_provider_pools_exhausted="$(flow_kv_get "${provider_pool_selection}" "POOLS_EXHAUSTED")"
|
|
3338
|
-
active_provider_selection_reason="$(flow_kv_get "${provider_pool_selection}" "SELECTION_REASON")"
|
|
3339
|
-
|
|
3340
|
-
coding_worker="${active_provider_backend}"
|
|
3341
|
-
safe_profile="$(flow_kv_get "${provider_pool_selection}" "SAFE_PROFILE")"
|
|
3342
|
-
bypass_profile="$(flow_kv_get "${provider_pool_selection}" "BYPASS_PROFILE")"
|
|
3343
|
-
claude_model="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_MODEL")"
|
|
3344
|
-
claude_permission_mode="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_PERMISSION_MODE")"
|
|
3345
|
-
claude_effort="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_EFFORT")"
|
|
3346
|
-
claude_timeout="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_TIMEOUT_SECONDS")"
|
|
3347
|
-
claude_max_attempts="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_MAX_ATTEMPTS")"
|
|
3348
|
-
claude_retry_backoff_seconds="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_RETRY_BACKOFF_SECONDS")"
|
|
3349
|
-
openclaw_model="$(flow_kv_get "${provider_pool_selection}" "OPENCLAW_MODEL")"
|
|
3350
|
-
openclaw_thinking="$(flow_kv_get "${provider_pool_selection}" "OPENCLAW_THINKING")"
|
|
3351
|
-
openclaw_timeout="$(flow_kv_get "${provider_pool_selection}" "OPENCLAW_TIMEOUT_SECONDS")"
|
|
3352
|
-
openclaw_stall="$(flow_kv_get "${provider_pool_selection}" "OPENCLAW_STALL_SECONDS")"
|
|
3353
|
-
ollama_model="$(flow_kv_get "${provider_pool_selection}" "OLLAMA_MODEL")"
|
|
3354
|
-
ollama_base_url="$(flow_kv_get "${provider_pool_selection}" "OLLAMA_BASE_URL")"
|
|
3355
|
-
ollama_timeout="$(flow_kv_get "${provider_pool_selection}" "OLLAMA_TIMEOUT_SECONDS")"
|
|
3356
|
-
pi_model="$(flow_kv_get "${provider_pool_selection}" "PI_MODEL")"
|
|
3357
|
-
pi_thinking="$(flow_kv_get "${provider_pool_selection}" "PI_THINKING")"
|
|
3358
|
-
pi_timeout="$(flow_kv_get "${provider_pool_selection}" "PI_TIMEOUT_SECONDS")"
|
|
3359
|
-
opencode_model="$(flow_kv_get "${provider_pool_selection}" "OPENCODE_MODEL")"
|
|
3360
|
-
opencode_timeout="$(flow_kv_get "${provider_pool_selection}" "OPENCODE_TIMEOUT_SECONDS")"
|
|
3361
|
-
kilo_model="$(flow_kv_get "${provider_pool_selection}" "KILO_MODEL")"
|
|
3362
|
-
kilo_timeout="$(flow_kv_get "${provider_pool_selection}" "KILO_TIMEOUT_SECONDS")"
|
|
3363
|
-
else
|
|
3364
|
-
if [[ -n "${explicit_coding_worker}" ]]; then
|
|
3365
|
-
active_provider_selection_reason="env-override"
|
|
3366
|
-
fi
|
|
3367
|
-
coding_worker="$(flow_env_or_config "${config_file}" "ACP_CODING_WORKER" "execution.coding_worker" "")"
|
|
3368
|
-
safe_profile="$(flow_env_or_config "${config_file}" "ACP_CODEX_PROFILE_SAFE F_LOSNING_CODEX_PROFILE_SAFE" "execution.safe_profile" "")"
|
|
3369
|
-
bypass_profile="$(flow_env_or_config "${config_file}" "ACP_CODEX_PROFILE_BYPASS F_LOSNING_CODEX_PROFILE_BYPASS" "execution.bypass_profile" "")"
|
|
3370
|
-
claude_model="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_MODEL F_LOSNING_CLAUDE_MODEL" "execution.claude.model" "")"
|
|
3371
|
-
claude_permission_mode="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_PERMISSION_MODE F_LOSNING_CLAUDE_PERMISSION_MODE" "execution.claude.permission_mode" "")"
|
|
3372
|
-
claude_effort="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_EFFORT F_LOSNING_CLAUDE_EFFORT" "execution.claude.effort" "")"
|
|
3373
|
-
claude_timeout="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_TIMEOUT_SECONDS F_LOSNING_CLAUDE_TIMEOUT_SECONDS" "execution.claude.timeout_seconds" "")"
|
|
3374
|
-
claude_max_attempts="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_MAX_ATTEMPTS F_LOSNING_CLAUDE_MAX_ATTEMPTS" "execution.claude.max_attempts" "")"
|
|
3375
|
-
claude_retry_backoff_seconds="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_RETRY_BACKOFF_SECONDS F_LOSNING_CLAUDE_RETRY_BACKOFF_SECONDS" "execution.claude.retry_backoff_seconds" "")"
|
|
3376
|
-
openclaw_model="$(flow_env_or_config "${config_file}" "ACP_OPENCLAW_MODEL F_LOSNING_OPENCLAW_MODEL" "execution.openclaw.model" "")"
|
|
3377
|
-
openclaw_thinking="$(flow_env_or_config "${config_file}" "ACP_OPENCLAW_THINKING F_LOSNING_OPENCLAW_THINKING" "execution.openclaw.thinking" "")"
|
|
3378
|
-
openclaw_timeout="$(flow_env_or_config "${config_file}" "ACP_OPENCLAW_TIMEOUT_SECONDS F_LOSNING_OPENCLAW_TIMEOUT_SECONDS" "execution.openclaw.timeout_seconds" "")"
|
|
3379
|
-
openclaw_stall="$(flow_env_or_config "${config_file}" "ACP_OPENCLAW_STALL_SECONDS F_LOSNING_OPENCLAW_STALL_SECONDS" "execution.openclaw.stall_seconds" "")"
|
|
3380
|
-
ollama_model="$(flow_env_or_config "${config_file}" "ACP_OLLAMA_MODEL F_LOSNING_OLLAMA_MODEL" "execution.ollama.model" "")"
|
|
3381
|
-
ollama_base_url="$(flow_env_or_config "${config_file}" "ACP_OLLAMA_BASE_URL F_LOSNING_OLLAMA_BASE_URL" "execution.ollama.base_url" "")"
|
|
3382
|
-
ollama_timeout="$(flow_env_or_config "${config_file}" "ACP_OLLAMA_TIMEOUT_SECONDS F_LOSNING_OLLAMA_TIMEOUT_SECONDS" "execution.ollama.timeout_seconds" "")"
|
|
3383
|
-
pi_model="$(flow_env_or_config "${config_file}" "ACP_PI_MODEL F_LOSNING_PI_MODEL" "execution.pi.model" "")"
|
|
3384
|
-
pi_thinking="$(flow_env_or_config "${config_file}" "ACP_PI_THINKING F_LOSNING_PI_THINKING" "execution.pi.thinking" "")"
|
|
3385
|
-
pi_timeout="$(flow_env_or_config "${config_file}" "ACP_PI_TIMEOUT_SECONDS F_LOSNING_PI_TIMEOUT_SECONDS" "execution.pi.timeout_seconds" "")"
|
|
3386
|
-
opencode_model="$(flow_env_or_config "${config_file}" "ACP_OPENCODE_MODEL F_LOSNING_OPENCODE_MODEL" "execution.opencode.model" "")"
|
|
3387
|
-
opencode_timeout="$(flow_env_or_config "${config_file}" "ACP_OPENCODE_TIMEOUT_SECONDS F_LOSNING_OPENCODE_TIMEOUT_SECONDS" "execution.opencode.timeout_seconds" "")"
|
|
3388
|
-
kilo_model="$(flow_env_or_config "${config_file}" "ACP_KILO_MODEL F_LOSNING_KILO_MODEL" "execution.kilo.model" "")"
|
|
3389
|
-
kilo_timeout="$(flow_env_or_config "${config_file}" "ACP_KILO_TIMEOUT_SECONDS F_LOSNING_KILO_TIMEOUT_SECONDS" "execution.kilo.timeout_seconds" "")"
|
|
3390
|
-
fi
|
|
3391
|
-
|
|
3392
|
-
if [[ -n "${coding_worker}" ]]; then
|
|
3393
|
-
export ACP_CODING_WORKER="${coding_worker}"
|
|
3394
|
-
fi
|
|
3395
|
-
if [[ -n "${repo_id}" ]]; then
|
|
3396
|
-
export F_LOSNING_REPO_ID="${repo_id}"
|
|
3397
|
-
export ACP_REPO_ID="${repo_id}"
|
|
3398
|
-
export F_LOSNING_GITHUB_REPOSITORY_ID="${repo_id}"
|
|
3399
|
-
export ACP_GITHUB_REPOSITORY_ID="${repo_id}"
|
|
3400
|
-
fi
|
|
3401
|
-
if [[ -n "${provider_quota_cooldowns}" ]]; then
|
|
3402
|
-
export F_LOSNING_PROVIDER_QUOTA_COOLDOWNS="${provider_quota_cooldowns}"
|
|
3403
|
-
export ACP_PROVIDER_QUOTA_COOLDOWNS="${provider_quota_cooldowns}"
|
|
3404
|
-
fi
|
|
3405
|
-
export F_LOSNING_PROVIDER_POOL_ORDER="${provider_pool_order}"
|
|
3406
|
-
export ACP_PROVIDER_POOL_ORDER="${provider_pool_order}"
|
|
3407
|
-
export F_LOSNING_ACTIVE_PROVIDER_POOL_NAME="${active_provider_pool_name}"
|
|
3408
|
-
export ACP_ACTIVE_PROVIDER_POOL_NAME="${active_provider_pool_name}"
|
|
3409
|
-
export F_LOSNING_ACTIVE_PROVIDER_BACKEND="${active_provider_backend}"
|
|
3410
|
-
export ACP_ACTIVE_PROVIDER_BACKEND="${active_provider_backend}"
|
|
3411
|
-
export F_LOSNING_ACTIVE_PROVIDER_MODEL="${active_provider_model}"
|
|
3412
|
-
export ACP_ACTIVE_PROVIDER_MODEL="${active_provider_model}"
|
|
3413
|
-
export F_LOSNING_ACTIVE_PROVIDER_KEY="${active_provider_key}"
|
|
3414
|
-
export ACP_ACTIVE_PROVIDER_KEY="${active_provider_key}"
|
|
3415
|
-
export F_LOSNING_PROVIDER_POOLS_EXHAUSTED="${active_provider_pools_exhausted}"
|
|
3416
|
-
export ACP_PROVIDER_POOLS_EXHAUSTED="${active_provider_pools_exhausted}"
|
|
3417
|
-
export F_LOSNING_PROVIDER_POOL_SELECTION_REASON="${active_provider_selection_reason}"
|
|
3418
|
-
export ACP_PROVIDER_POOL_SELECTION_REASON="${active_provider_selection_reason}"
|
|
3419
|
-
export F_LOSNING_PROVIDER_POOL_NEXT_ATTEMPT_EPOCH="${active_provider_next_attempt_epoch}"
|
|
3420
|
-
export ACP_PROVIDER_POOL_NEXT_ATTEMPT_EPOCH="${active_provider_next_attempt_epoch}"
|
|
3421
|
-
export F_LOSNING_PROVIDER_POOL_NEXT_ATTEMPT_AT="${active_provider_next_attempt_at}"
|
|
3422
|
-
export ACP_PROVIDER_POOL_NEXT_ATTEMPT_AT="${active_provider_next_attempt_at}"
|
|
3423
|
-
export F_LOSNING_PROVIDER_POOL_LAST_REASON="${active_provider_last_reason}"
|
|
3424
|
-
export ACP_PROVIDER_POOL_LAST_REASON="${active_provider_last_reason}"
|
|
3425
|
-
if [[ -n "${safe_profile}" ]]; then
|
|
3426
|
-
export F_LOSNING_CODEX_PROFILE_SAFE="${safe_profile}"
|
|
3427
|
-
export ACP_CODEX_PROFILE_SAFE="${safe_profile}"
|
|
3428
|
-
fi
|
|
3429
|
-
if [[ -n "${bypass_profile}" ]]; then
|
|
3430
|
-
export F_LOSNING_CODEX_PROFILE_BYPASS="${bypass_profile}"
|
|
3431
|
-
export ACP_CODEX_PROFILE_BYPASS="${bypass_profile}"
|
|
3432
|
-
fi
|
|
3433
|
-
if [[ -n "${claude_model}" ]]; then
|
|
3434
|
-
export F_LOSNING_CLAUDE_MODEL="${claude_model}"
|
|
3435
|
-
export ACP_CLAUDE_MODEL="${claude_model}"
|
|
3436
|
-
fi
|
|
3437
|
-
if [[ -n "${claude_permission_mode}" ]]; then
|
|
3438
|
-
export F_LOSNING_CLAUDE_PERMISSION_MODE="${claude_permission_mode}"
|
|
3439
|
-
export ACP_CLAUDE_PERMISSION_MODE="${claude_permission_mode}"
|
|
3440
|
-
fi
|
|
3441
|
-
if [[ -n "${claude_effort}" ]]; then
|
|
3442
|
-
export F_LOSNING_CLAUDE_EFFORT="${claude_effort}"
|
|
3443
|
-
export ACP_CLAUDE_EFFORT="${claude_effort}"
|
|
3444
|
-
fi
|
|
3445
|
-
if [[ -n "${claude_timeout}" ]]; then
|
|
3446
|
-
export F_LOSNING_CLAUDE_TIMEOUT_SECONDS="${claude_timeout}"
|
|
3447
|
-
export ACP_CLAUDE_TIMEOUT_SECONDS="${claude_timeout}"
|
|
3448
|
-
fi
|
|
3449
|
-
if [[ -n "${claude_max_attempts}" ]]; then
|
|
3450
|
-
export F_LOSNING_CLAUDE_MAX_ATTEMPTS="${claude_max_attempts}"
|
|
3451
|
-
export ACP_CLAUDE_MAX_ATTEMPTS="${claude_max_attempts}"
|
|
3452
|
-
fi
|
|
3453
|
-
if [[ -n "${claude_retry_backoff_seconds}" ]]; then
|
|
3454
|
-
export F_LOSNING_CLAUDE_RETRY_BACKOFF_SECONDS="${claude_retry_backoff_seconds}"
|
|
3455
|
-
export ACP_CLAUDE_RETRY_BACKOFF_SECONDS="${claude_retry_backoff_seconds}"
|
|
3456
|
-
fi
|
|
3457
|
-
if [[ -n "${openclaw_model}" ]]; then
|
|
3458
|
-
export F_LOSNING_OPENCLAW_MODEL="${openclaw_model}"
|
|
3459
|
-
export ACP_OPENCLAW_MODEL="${openclaw_model}"
|
|
3460
|
-
fi
|
|
3461
|
-
if [[ -n "${openclaw_thinking}" ]]; then
|
|
3462
|
-
export F_LOSNING_OPENCLAW_THINKING="${openclaw_thinking}"
|
|
3463
|
-
export ACP_OPENCLAW_THINKING="${openclaw_thinking}"
|
|
3464
|
-
fi
|
|
3465
|
-
if [[ -n "${openclaw_timeout}" ]]; then
|
|
3466
|
-
export F_LOSNING_OPENCLAW_TIMEOUT_SECONDS="${openclaw_timeout}"
|
|
3467
|
-
export ACP_OPENCLAW_TIMEOUT_SECONDS="${openclaw_timeout}"
|
|
3468
|
-
fi
|
|
3469
|
-
if [[ -n "${openclaw_stall}" ]]; then
|
|
3470
|
-
export F_LOSNING_OPENCLAW_STALL_SECONDS="${openclaw_stall}"
|
|
3471
|
-
export ACP_OPENCLAW_STALL_SECONDS="${openclaw_stall}"
|
|
3472
|
-
fi
|
|
3473
|
-
if [[ -n "${ollama_model}" ]]; then
|
|
3474
|
-
export F_LOSNING_OLLAMA_MODEL="${ollama_model}"
|
|
3475
|
-
export ACP_OLLAMA_MODEL="${ollama_model}"
|
|
3476
|
-
fi
|
|
3477
|
-
if [[ -n "${ollama_base_url}" ]]; then
|
|
3478
|
-
export F_LOSNING_OLLAMA_BASE_URL="${ollama_base_url}"
|
|
3479
|
-
export ACP_OLLAMA_BASE_URL="${ollama_base_url}"
|
|
3480
|
-
fi
|
|
3481
|
-
if [[ -n "${ollama_timeout}" ]]; then
|
|
3482
|
-
export F_LOSNING_OLLAMA_TIMEOUT_SECONDS="${ollama_timeout}"
|
|
3483
|
-
export ACP_OLLAMA_TIMEOUT_SECONDS="${ollama_timeout}"
|
|
3484
|
-
fi
|
|
3485
|
-
if [[ -n "${pi_model}" ]]; then
|
|
3486
|
-
export F_LOSNING_PI_MODEL="${pi_model}"
|
|
3487
|
-
export ACP_PI_MODEL="${pi_model}"
|
|
3488
|
-
fi
|
|
3489
|
-
if [[ -n "${pi_thinking}" ]]; then
|
|
3490
|
-
export F_LOSNING_PI_THINKING="${pi_thinking}"
|
|
3491
|
-
export ACP_PI_THINKING="${pi_thinking}"
|
|
3492
|
-
fi
|
|
3493
|
-
if [[ -n "${pi_timeout}" ]]; then
|
|
3494
|
-
export F_LOSNING_PI_TIMEOUT_SECONDS="${pi_timeout}"
|
|
3495
|
-
export ACP_PI_TIMEOUT_SECONDS="${pi_timeout}"
|
|
3496
|
-
fi
|
|
3497
|
-
if [[ -n "${opencode_model}" ]]; then
|
|
3498
|
-
export F_LOSNING_OPENCODE_MODEL="${opencode_model}"
|
|
3499
|
-
export ACP_OPENCODE_MODEL="${opencode_model}"
|
|
3500
|
-
fi
|
|
3501
|
-
if [[ -n "${opencode_timeout}" ]]; then
|
|
3502
|
-
export F_LOSNING_OPENCODE_TIMEOUT_SECONDS="${opencode_timeout}"
|
|
3503
|
-
export ACP_OPENCODE_TIMEOUT_SECONDS="${opencode_timeout}"
|
|
3504
|
-
fi
|
|
3505
|
-
if [[ -n "${kilo_model}" ]]; then
|
|
3506
|
-
export F_LOSNING_KILO_MODEL="${kilo_model}"
|
|
3507
|
-
export ACP_KILO_MODEL="${kilo_model}"
|
|
3508
|
-
fi
|
|
3509
|
-
if [[ -n "${kilo_timeout}" ]]; then
|
|
3510
|
-
export F_LOSNING_KILO_TIMEOUT_SECONDS="${kilo_timeout}"
|
|
3511
|
-
export ACP_KILO_TIMEOUT_SECONDS="${kilo_timeout}"
|
|
3512
|
-
fi
|
|
3513
|
-
|
|
3514
|
-
flow_export_github_cli_auth_env "$(flow_resolve_repo_slug "${config_file}")"
|
|
3515
|
-
flow_export_project_env_aliases
|
|
3516
|
-
}
|
|
11
|
+
# Load modules in dependency order
|
|
12
|
+
# shellcheck source=/dev/null
|
|
13
|
+
source "${SCRIPT_DIR}/flow-profile-lib.sh"
|
|
14
|
+
# shellcheck source=/dev/null
|
|
15
|
+
source "${SCRIPT_DIR}/flow-forge-lib.sh"
|
|
16
|
+
# shellcheck source=/dev/null
|
|
17
|
+
source "${SCRIPT_DIR}/flow-provider-lib.sh"
|
|
18
|
+
# shellcheck source=/dev/null
|
|
19
|
+
source "${SCRIPT_DIR}/flow-session-lib.sh"
|
|
20
|
+
# shellcheck source=/dev/null
|
|
21
|
+
source "${SCRIPT_DIR}/flow-execution-lib.sh"
|