agent-control-plane 0.6.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1770 @@
1
+ flow_git_credential_token_for_repo_slug() {
2
+ local repo_slug="${1:-}"
3
+ local host="${2:-github.com}"
4
+ local path_suffix="${3:-${repo_slug}.git}"
5
+ local credential_payload=""
6
+ local token=""
7
+
8
+ [[ -n "${repo_slug}" && -n "${host}" && -n "${path_suffix}" ]] || return 1
9
+ command -v git >/dev/null 2>&1 || return 1
10
+
11
+ credential_payload="$(
12
+ printf 'protocol=https\nhost=%s\npath=%s\n\n' "${host}" "${path_suffix}" \
13
+ | git credential fill 2>/dev/null || true
14
+ )"
15
+ token="$(awk -F= '/^password=/{print $2; exit}' <<<"${credential_payload}")"
16
+ [[ -n "${token}" ]] || return 1
17
+
18
+ printf '%s\n' "${token}"
19
+ }
20
+
21
+ flow_export_github_cli_auth_env() {
22
+ local repo_slug="${1:-}"
23
+ local token=""
24
+
25
+ if flow_using_gitea; then
26
+ return 0
27
+ fi
28
+
29
+ if [[ -n "${GH_TOKEN:-}" ]]; then
30
+ return 0
31
+ fi
32
+
33
+ if [[ -n "${GITHUB_TOKEN:-}" ]]; then
34
+ export GH_TOKEN="${GITHUB_TOKEN}"
35
+ return 0
36
+ fi
37
+
38
+ if command -v gh >/dev/null 2>&1; then
39
+ if env -u GH_TOKEN -u GITHUB_TOKEN gh auth status >/dev/null 2>&1 \
40
+ || env -u GH_TOKEN -u GITHUB_TOKEN gh api user --jq .login >/dev/null 2>&1; then
41
+ return 0
42
+ fi
43
+ fi
44
+
45
+ token="$(flow_git_credential_token_for_repo_slug "${repo_slug}" || true)"
46
+ if [[ -n "${token}" ]]; then
47
+ export GH_TOKEN="${token}"
48
+ return 0
49
+ fi
50
+
51
+ if [[ -n "${GITHUB_PERSONAL_ACCESS_TOKEN:-}" ]]; then
52
+ export GH_TOKEN="${GITHUB_PERSONAL_ACCESS_TOKEN}"
53
+ fi
54
+ }
55
+
56
+ flow_forge_provider() {
57
+ local provider="${ACP_FORGE_PROVIDER:-${F_LOSNING_FORGE_PROVIDER:-github}}"
58
+ provider="$(printf '%s' "${provider}" | tr '[:upper:]' '[:lower:]')"
59
+ case "${provider}" in
60
+ github|gitea)
61
+ printf '%s\n' "${provider}"
62
+ ;;
63
+ *)
64
+ printf 'github\n'
65
+ ;;
66
+ esac
67
+ }
68
+
69
+ flow_using_gitea() {
70
+ [[ "$(flow_forge_provider)" == "gitea" ]]
71
+ }
72
+
73
+ flow_gitea_base_url() {
74
+ local base_url="${ACP_GITEA_BASE_URL:-${GITEA_BASE_URL:-}}"
75
+ [[ -n "${base_url}" ]] || return 1
76
+ printf '%s\n' "${base_url%/}"
77
+ }
78
+
79
+ flow_gitea_base_host() {
80
+ local base_url=""
81
+ base_url="$(flow_gitea_base_url)" || return 1
82
+ base_url="${base_url#http://}"
83
+ base_url="${base_url#https://}"
84
+ printf '%s\n' "${base_url%%/*}"
85
+ }
86
+
87
+ flow_gitea_api_url_for_repo() {
88
+ local repo_slug="${1:?repo slug required}"
89
+ local route="${2:-}"
90
+ local base_url=""
91
+
92
+ base_url="$(flow_gitea_base_url)" || return 1
93
+ route="${route#/}"
94
+ if [[ -n "${route}" ]]; then
95
+ printf '%s/api/v1/repos/%s/%s\n' "${base_url}" "${repo_slug}" "${route}"
96
+ return 0
97
+ fi
98
+ printf '%s/api/v1/repos/%s\n' "${base_url}" "${repo_slug}"
99
+ }
100
+
101
+ flow_gitea_auth_curl_args() {
102
+ local repo_slug="${1:-}"
103
+ local credential_token=""
104
+
105
+ if [[ -n "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" ]]; then
106
+ printf -- "-H\0Authorization: token %s\0" "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}"
107
+ return 0
108
+ fi
109
+ if [[ -n "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" && -n "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" ]]; then
110
+ printf -- "-u\0%s:%s\0" "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}"
111
+ return 0
112
+ fi
113
+ if [[ -n "${repo_slug}" ]]; then
114
+ credential_token="$(flow_git_credential_token_for_repo_slug "${repo_slug}" "$(flow_gitea_base_host)" "${repo_slug}.git" || true)"
115
+ if [[ -n "${credential_token}" ]]; then
116
+ printf -- "-H\0Authorization: token %s\0" "${credential_token}"
117
+ return 0
118
+ fi
119
+ fi
120
+ return 1
121
+ }
122
+
123
+ flow_gitea_api_repo() {
124
+ local repo_slug="${1:?repo slug required}"
125
+ local route="${2:-}"
126
+ local method="GET"
127
+ local paginate="no"
128
+ local slurp="no"
129
+ local jq_filter=""
130
+ local expect_input="no"
131
+ local arg=""
132
+ local url=""
133
+ local input_file=""
134
+ local output=""
135
+ local page="1"
136
+ local per_page="100"
137
+ local response=""
138
+ local body=""
139
+ local link_header=""
140
+ local header_file=""
141
+ local stdout_file=""
142
+ local stderr_file=""
143
+ local curl_status="0"
144
+ local response_status="0"
145
+ local -a curl_args=()
146
+ local -a auth_args=()
147
+ local -a extra_headers=()
148
+ local -a form_fields=()
149
+ local -a pages=()
150
+
151
+ shift 2
152
+ while [[ $# -gt 0 ]]; do
153
+ arg="${1:-}"
154
+ case "${arg}" in
155
+ --method)
156
+ method="${2:-GET}"
157
+ shift 2
158
+ ;;
159
+ --paginate)
160
+ paginate="yes"
161
+ shift
162
+ ;;
163
+ --slurp)
164
+ slurp="yes"
165
+ shift
166
+ ;;
167
+ --jq)
168
+ jq_filter="${2:-}"
169
+ shift 2
170
+ ;;
171
+ --input)
172
+ expect_input="yes"
173
+ if [[ "${2:-}" == "-" ]]; then
174
+ input_file="$(mktemp)"
175
+ cat >"${input_file}"
176
+ shift 2
177
+ else
178
+ input_file="${2:-}"
179
+ shift 2
180
+ fi
181
+ ;;
182
+ -f|--field)
183
+ form_fields+=("${2:-}")
184
+ shift 2
185
+ ;;
186
+ *)
187
+ shift
188
+ ;;
189
+ esac
190
+ done
191
+
192
+ url="$(flow_gitea_api_url_for_repo "${repo_slug}" "${route}")" || {
193
+ rm -f "${input_file}"
194
+ return 1
195
+ }
196
+ while IFS= read -r -d '' arg; do
197
+ auth_args+=("${arg}")
198
+ done < <(flow_gitea_auth_curl_args "${repo_slug}") || true
199
+ if [[ "${#auth_args[@]}" -eq 0 && "${method}" != "GET" ]]; then
200
+ rm -f "${input_file}"
201
+ return 1
202
+ fi
203
+
204
+ if [[ "${expect_input}" == "yes" && -n "${input_file}" ]]; then
205
+ extra_headers+=(-H "Content-Type: application/json")
206
+ fi
207
+ if [[ "${#form_fields[@]}" -gt 0 ]]; then
208
+ extra_headers+=(-H "Content-Type: application/json")
209
+ body="$(
210
+ FORM_FIELDS="$(printf '%s\n' "${form_fields[@]}")" python3 - <<'PY'
211
+ import json
212
+ import os
213
+
214
+ payload = {}
215
+ for line in os.environ.get("FORM_FIELDS", "").splitlines():
216
+ line = line.rstrip("\n")
217
+ if "=" not in line:
218
+ continue
219
+ key, value = line.split("=", 1)
220
+ payload[key] = value
221
+ print(json.dumps(payload))
222
+ PY
223
+ )"
224
+ input_file="$(mktemp)"
225
+ printf '%s' "${body}" >"${input_file}"
226
+ fi
227
+
228
+ if [[ "${paginate}" != "yes" ]]; then
229
+ stdout_file="$(mktemp)"
230
+ stderr_file="$(mktemp)"
231
+ header_file="$(mktemp)"
232
+ curl_args=(-sS -D "${header_file}" -X "${method}")
233
+ if [[ "${#auth_args[@]}" -gt 0 ]]; then
234
+ curl_args+=("${auth_args[@]}")
235
+ fi
236
+ if [[ "${#extra_headers[@]}" -gt 0 ]]; then
237
+ curl_args+=("${extra_headers[@]}")
238
+ fi
239
+ if [[ -n "${input_file}" ]]; then
240
+ curl_args+=(--data-binary "@${input_file}")
241
+ fi
242
+ if curl "${curl_args[@]}" "${url}" >"${stdout_file}" 2>"${stderr_file}"; then
243
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
244
+ if [[ -n "${jq_filter}" ]]; then
245
+ jq -r "${jq_filter}" <<<"${output}"
246
+ else
247
+ printf '%s' "${output}"
248
+ fi
249
+ rm -f "${input_file}" "${stdout_file}" "${stderr_file}" "${header_file}"
250
+ return 0
251
+ fi
252
+ rm -f "${input_file}" "${stdout_file}" "${stderr_file}" "${header_file}"
253
+ return 1
254
+ fi
255
+
256
+ while :; do
257
+ stdout_file="$(mktemp)"
258
+ stderr_file="$(mktemp)"
259
+ header_file="$(mktemp)"
260
+ curl_args=(-sS -D "${header_file}" -X "${method}")
261
+ if [[ "${#auth_args[@]}" -gt 0 ]]; then
262
+ curl_args+=("${auth_args[@]}")
263
+ fi
264
+ if [[ "${#extra_headers[@]}" -gt 0 ]]; then
265
+ curl_args+=("${extra_headers[@]}")
266
+ fi
267
+ if curl "${curl_args[@]}" "${url}$([[ "${url}" == *\?* ]] && printf '&' || printf '?')page=${page}&limit=${per_page}" >"${stdout_file}" 2>"${stderr_file}"; then
268
+ response="$(cat "${stdout_file}" 2>/dev/null || true)"
269
+ pages+=("${response}")
270
+ link_header="$(tr -d '\r' <"${header_file}" | awk 'BEGIN{IGNORECASE=1}/^link:/{sub(/^link:[[:space:]]*/,""); print; exit}')"
271
+ rm -f "${stdout_file}" "${stderr_file}" "${header_file}"
272
+ if [[ "${link_header}" != *'rel="next"'* ]]; then
273
+ break
274
+ fi
275
+ page="$((page + 1))"
276
+ else
277
+ response_status="1"
278
+ rm -f "${stdout_file}" "${stderr_file}" "${header_file}" "${input_file}"
279
+ return "${response_status}"
280
+ fi
281
+ done
282
+
283
+ rm -f "${input_file}"
284
+ if [[ "${slurp}" == "yes" ]]; then
285
+ printf '%s\n' "${pages[@]}" | jq -s '.'
286
+ return 0
287
+ fi
288
+ printf '%s' "${pages[0]:-[]}"
289
+ }
290
+
291
+ flow_gitea_issue_view_json() {
292
+ local repo_slug="${1:?repo slug required}"
293
+ local issue_id="${2:?issue id required}"
294
+ local issue_json=""
295
+ local comments_json=""
296
+
297
+ issue_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null || true)"
298
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
299
+ comments_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${issue_id}/comments" --paginate --slurp 2>/dev/null || true)"
300
+ comments_json="$(flow_json_or_default "${comments_json}" '[]')"
301
+
302
+ ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comments_json}" python3 - <<'PY'
303
+ import json
304
+ import os
305
+
306
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
307
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
308
+ comments = []
309
+ for page in comment_pages:
310
+ if isinstance(page, list):
311
+ comments.extend(page)
312
+ elif isinstance(page, dict):
313
+ comments.append(page)
314
+
315
+ result = {
316
+ "number": issue.get("number"),
317
+ "state": str(issue.get("state", "")).upper(),
318
+ "title": issue.get("title") or "",
319
+ "body": issue.get("body") or "",
320
+ "url": issue.get("html_url") or issue.get("url") or "",
321
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
322
+ "comments": [
323
+ {
324
+ "body": comment.get("body") or "",
325
+ "createdAt": comment.get("created_at") or "",
326
+ "updatedAt": comment.get("updated_at") or "",
327
+ "url": comment.get("html_url") or "",
328
+ }
329
+ for comment in comments
330
+ if isinstance(comment, dict)
331
+ ],
332
+ "createdAt": issue.get("created_at") or "",
333
+ "updatedAt": issue.get("updated_at") or "",
334
+ }
335
+
336
+ print(json.dumps(result))
337
+ PY
338
+ }
339
+
340
+ flow_gitea_issue_list_json() {
341
+ local repo_slug="${1:?repo slug required}"
342
+ local state="${2:-open}"
343
+ local limit="${3:-100}"
344
+ local issues_json=""
345
+
346
+ issues_json="$(flow_gitea_api_repo "${repo_slug}" "issues?state=${state}" --paginate --slurp 2>/dev/null || true)"
347
+ issues_json="$(flow_json_or_default "${issues_json}" '[]')"
348
+
349
+ ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
350
+ import json
351
+ import os
352
+
353
+ pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
354
+ limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
355
+ issues = []
356
+
357
+ for page in pages:
358
+ if isinstance(page, list):
359
+ issues.extend(page)
360
+ elif isinstance(page, dict):
361
+ issues.append(page)
362
+
363
+ result = []
364
+ for issue in issues:
365
+ if not isinstance(issue, dict):
366
+ continue
367
+ if issue.get("pull_request"):
368
+ continue
369
+ result.append({
370
+ "number": issue.get("number"),
371
+ "createdAt": issue.get("created_at") or "",
372
+ "updatedAt": issue.get("updated_at") or "",
373
+ "title": issue.get("title") or "",
374
+ "url": issue.get("html_url") or issue.get("url") or "",
375
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
376
+ })
377
+
378
+ print(json.dumps(result[:limit]))
379
+ PY
380
+ }
381
+
382
+ flow_gitea_pr_view_json() {
383
+ local repo_slug="${1:?repo slug required}"
384
+ local pr_number="${2:?pr number required}"
385
+ local pr_json=""
386
+ local comment_pages_json=""
387
+ local files_json=""
388
+ local reviews_json=""
389
+
390
+ pr_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}" 2>/dev/null || true)"
391
+ pr_json="$(flow_json_or_default "${pr_json}" '{}')"
392
+ comment_pages_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${pr_number}/comments" --paginate --slurp 2>/dev/null || true)"
393
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
394
+ files_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}/files" --paginate --slurp 2>/dev/null || true)"
395
+ files_json="$(flow_json_or_default "${files_json}" '[]')"
396
+ reviews_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}/reviews" --paginate --slurp 2>/dev/null || true)"
397
+ reviews_json="$(flow_json_or_default "${reviews_json}" '[]')"
398
+
399
+ PR_JSON="${pr_json}" COMMENT_PAGES_JSON="${comment_pages_json}" FILES_JSON="${files_json}" REVIEWS_JSON="${reviews_json}" python3 - <<'PY'
400
+ import json
401
+ import os
402
+
403
+ pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
404
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
405
+ file_pages = json.loads(os.environ.get("FILES_JSON", "[]") or "[]")
406
+ review_pages = json.loads(os.environ.get("REVIEWS_JSON", "[]") or "[]")
407
+ comments = []
408
+ for page in comment_pages:
409
+ if isinstance(page, list):
410
+ comments.extend(page)
411
+ elif isinstance(page, dict):
412
+ comments.append(page)
413
+
414
+ files = []
415
+ for page in file_pages:
416
+ if isinstance(page, list):
417
+ files.extend(page)
418
+ elif isinstance(page, dict):
419
+ files.append(page)
420
+
421
+ reviews = []
422
+ for page in review_pages:
423
+ if isinstance(page, list):
424
+ reviews.extend(page)
425
+ elif isinstance(page, dict):
426
+ reviews.append(page)
427
+
428
+ pr_state = str(pr.get("state", "")).upper()
429
+ if pr.get("merged") or pr.get("merged_at"):
430
+ pr_state = "MERGED"
431
+
432
+ review_states = [
433
+ str(review.get("state") or "").upper()
434
+ for review in reviews
435
+ if isinstance(review, dict)
436
+ ]
437
+ review_decision = ""
438
+ if any(state == "APPROVED" for state in review_states):
439
+ review_decision = "APPROVED"
440
+ elif any(state in {"CHANGES_REQUESTED", "REQUEST_CHANGES"} for state in review_states):
441
+ review_decision = "CHANGES_REQUESTED"
442
+
443
+ result = {
444
+ "number": pr.get("number"),
445
+ "title": pr.get("title") or "",
446
+ "body": pr.get("body") or "",
447
+ "url": pr.get("html_url") or pr.get("url") or "",
448
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
449
+ "headRefOid": ((pr.get("head") or {}).get("sha")) or "",
450
+ "baseRefName": ((pr.get("base") or {}).get("ref")) or "",
451
+ "mergeStateStatus": "CLEAN" if pr.get("mergeable") else "UNKNOWN",
452
+ "statusCheckRollup": [],
453
+ "labels": [{"name": label.get("name", "")} for label in pr.get("labels", []) if isinstance(label, dict)],
454
+ "comments": [
455
+ {
456
+ "body": comment.get("body") or "",
457
+ "createdAt": comment.get("created_at") or "",
458
+ "updatedAt": comment.get("updated_at") or "",
459
+ "url": comment.get("html_url") or "",
460
+ }
461
+ for comment in comments
462
+ if isinstance(comment, dict)
463
+ ],
464
+ "state": pr_state,
465
+ "isDraft": bool(pr.get("draft")),
466
+ "createdAt": pr.get("created_at") or "",
467
+ "updatedAt": pr.get("updated_at") or "",
468
+ "mergedAt": pr.get("merged_at") or "",
469
+ "authorLogin": ((pr.get("user") or {}).get("login")) or "",
470
+ "files": [
471
+ {"path": file.get("filename") or ""}
472
+ for file in files
473
+ if isinstance(file, dict) and (file.get("filename") or "")
474
+ ],
475
+ "reviewRequests": [
476
+ {"login": reviewer.get("login") or ""}
477
+ for reviewer in (pr.get("requested_reviewers") or [])
478
+ if isinstance(reviewer, dict)
479
+ ],
480
+ "reviewDecision": review_decision,
481
+ }
482
+
483
+ print(json.dumps(result))
484
+ PY
485
+ }
486
+
487
+ flow_gitea_pr_list_json() {
488
+ local repo_slug="${1:?repo slug required}"
489
+ local state="${2:-open}"
490
+ local limit="${3:-100}"
491
+ local pulls_state="${state}"
492
+ local pr_pages_json=""
493
+
494
+ if [[ "${state}" == "merged" ]]; then
495
+ pulls_state="closed"
496
+ fi
497
+
498
+ pr_pages_json="$(flow_gitea_api_repo "${repo_slug}" "pulls?state=${pulls_state}" --paginate --slurp 2>/dev/null || true)"
499
+ pr_pages_json="$(flow_json_or_default "${pr_pages_json}" '[]')"
500
+
501
+ PR_PAGES_JSON="${pr_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
502
+ import json
503
+ import os
504
+
505
+ pages = json.loads(os.environ.get("PR_PAGES_JSON", "[]") or "[]")
506
+ limit = int(os.environ.get("PR_LIMIT", "100") or "100")
507
+ state_filter = os.environ.get("PR_STATE_FILTER", "open")
508
+ prs = []
509
+ for page in pages:
510
+ if isinstance(page, list):
511
+ prs.extend(page)
512
+ elif isinstance(page, dict):
513
+ prs.append(page)
514
+
515
+ result = []
516
+ for pr in prs:
517
+ if not isinstance(pr, dict):
518
+ continue
519
+ merged = bool(pr.get("merged") or pr.get("merged_at"))
520
+ state = str(pr.get("state", "")).lower()
521
+ if state_filter == "open" and state != "open":
522
+ continue
523
+ if state_filter == "closed" and state != "closed":
524
+ continue
525
+ if state_filter == "merged" and not merged:
526
+ continue
527
+ normalized_state = "MERGED" if merged else state.upper()
528
+ result.append({
529
+ "number": pr.get("number"),
530
+ "title": pr.get("title") or "",
531
+ "body": pr.get("body") or "",
532
+ "url": pr.get("html_url") or pr.get("url") or "",
533
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
534
+ "headRefOid": ((pr.get("head") or {}).get("sha")) or "",
535
+ "baseRefName": ((pr.get("base") or {}).get("ref")) or "",
536
+ "createdAt": pr.get("created_at") or "",
537
+ "mergedAt": pr.get("merged_at") or "",
538
+ "state": normalized_state,
539
+ "isDraft": bool(pr.get("draft")),
540
+ "labels": [{"name": label.get("name", "")} for label in pr.get("labels", []) if isinstance(label, dict)],
541
+ "comments": [],
542
+ "authorLogin": ((pr.get("user") or {}).get("login")) or "",
543
+ })
544
+ if len(result) >= limit:
545
+ break
546
+
547
+ print(json.dumps(result))
548
+ PY
549
+ }
550
+
551
+ flow_github_output_indicates_rate_limit() {
552
+ grep -Eiq 'API rate limit exceeded|secondary rate limit|rate limit exceeded|HTTP 403' <<<"${1:-}"
553
+ }
554
+
555
+ flow_github_core_rate_limit_state_bin() {
556
+ local flow_root=""
557
+ local candidate=""
558
+
559
+ flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")" || return 1
560
+ candidate="${flow_root}/tools/bin/github-core-rate-limit-state.sh"
561
+ [[ -x "${candidate}" ]] || return 1
562
+ printf '%s\n' "${candidate}"
563
+ }
564
+
565
+ flow_github_core_rate_limit_state_output() {
566
+ local state_bin=""
567
+
568
+ state_bin="$(flow_github_core_rate_limit_state_bin)" || return 1
569
+ "${state_bin}" get 2>/dev/null || true
570
+ }
571
+
572
+ flow_github_core_rate_limit_active() {
573
+ local state_out=""
574
+ local ready=""
575
+
576
+ state_out="$(flow_github_core_rate_limit_state_output)" || return 1
577
+ ready="$(awk -F= '/^READY=/{print $2; exit}' <<<"${state_out}")"
578
+ [[ "${ready}" == "no" ]]
579
+ }
580
+
581
+ flow_github_core_rate_limit_schedule() {
582
+ local reason="${1:-github-api-rate-limit}"
583
+ local reset_epoch="${2:-}"
584
+ local state_bin=""
585
+ local now_epoch=""
586
+
587
+ state_bin="$(flow_github_core_rate_limit_state_bin)" || return 0
588
+ now_epoch="$(date +%s)"
589
+ if [[ "${reset_epoch}" =~ ^[0-9]+$ ]] && (( reset_epoch > now_epoch )); then
590
+ "${state_bin}" schedule "${reason}" --next-at-epoch "${reset_epoch}" >/dev/null 2>&1 || true
591
+ return 0
592
+ fi
593
+
594
+ "${state_bin}" schedule "${reason}" >/dev/null 2>&1 || true
595
+ }
596
+
597
+ flow_github_core_rate_limit_clear() {
598
+ local state_bin=""
599
+
600
+ state_bin="$(flow_github_core_rate_limit_state_bin)" || return 0
601
+ "${state_bin}" clear >/dev/null 2>&1 || true
602
+ }
603
+
604
+ flow_github_graphql_available() {
605
+ local repo_slug="${1:-}"
606
+ local rate_limit_json=""
607
+ local graphql_remaining=""
608
+ local core_remaining=""
609
+ local core_reset=""
610
+ local stderr_file=""
611
+ local stderr_output=""
612
+
613
+ if [[ "${FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE:-}" == "yes" ]]; then
614
+ return 0
615
+ fi
616
+ if [[ "${FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE:-}" == "no" ]]; then
617
+ return 1
618
+ fi
619
+
620
+ if flow_github_core_rate_limit_active; then
621
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
622
+ return 1
623
+ fi
624
+
625
+ flow_export_github_cli_auth_env "${repo_slug}"
626
+ stderr_file="$(mktemp)"
627
+ if rate_limit_json="$(gh api rate_limit 2>"${stderr_file}")"; then
628
+ graphql_remaining="$(jq -r '.resources.graphql.remaining // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
629
+ core_remaining="$(jq -r '.resources.core.remaining // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
630
+ core_reset="$(jq -r '.resources.core.reset // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
631
+ if [[ "${core_remaining}" =~ ^[0-9]+$ ]]; then
632
+ if (( core_remaining > 0 )); then
633
+ flow_github_core_rate_limit_clear
634
+ else
635
+ flow_github_core_rate_limit_schedule "github-api-rate-limit" "${core_reset}"
636
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
637
+ rm -f "${stderr_file}"
638
+ return 1
639
+ fi
640
+ fi
641
+ else
642
+ stderr_output="$(cat "${stderr_file}" 2>/dev/null || true)"
643
+ if flow_github_output_indicates_rate_limit "${stderr_output}"; then
644
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
645
+ fi
646
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
647
+ rm -f "${stderr_file}"
648
+ return 1
649
+ fi
650
+ rm -f "${stderr_file}"
651
+
652
+ if [[ "${graphql_remaining}" =~ ^[0-9]+$ ]] && (( graphql_remaining > 0 )); then
653
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="yes"
654
+ return 0
655
+ fi
656
+
657
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
658
+ return 1
659
+ }
660
+
661
+ flow_github_repo_id_cache_var() {
662
+ local repo_slug="${1:-}"
663
+ local sanitized="${repo_slug//[^A-Za-z0-9]/_}"
664
+ printf 'FLOW_GITHUB_REPO_ID_CACHE_%s\n' "${sanitized}"
665
+ }
666
+
667
+ flow_github_repo_id_for_repo_slug() {
668
+ local repo_slug="${1:-}"
669
+ local cache_var=""
670
+ local cached_value=""
671
+ local repos_pages_json=""
672
+ local repo_id=""
673
+ local stderr_file=""
674
+ local stderr_output=""
675
+
676
+ [[ -n "${repo_slug}" ]] || return 1
677
+ command -v gh >/dev/null 2>&1 || return 1
678
+
679
+ cache_var="$(flow_github_repo_id_cache_var "${repo_slug}")"
680
+ cached_value="${!cache_var:-}"
681
+ if [[ -n "${cached_value}" ]]; then
682
+ printf '%s\n' "${cached_value}"
683
+ return 0
684
+ fi
685
+
686
+ repo_id="$(flow_explicit_github_repo_id "${repo_slug}" || true)"
687
+ if [[ -n "${repo_id}" ]]; then
688
+ printf -v "${cache_var}" '%s' "${repo_id}"
689
+ printf '%s\n' "${repo_id}"
690
+ return 0
691
+ fi
692
+
693
+ if flow_github_core_rate_limit_active; then
694
+ return 1
695
+ fi
696
+
697
+ flow_export_github_cli_auth_env "${repo_slug}"
698
+ stderr_file="$(mktemp)"
699
+ if repos_pages_json="$(
700
+ gh api 'user/repos?per_page=100&visibility=all&affiliation=owner,collaborator,organization_member' \
701
+ --paginate \
702
+ --slurp 2>"${stderr_file}" || true
703
+ )" && [[ -n "${repos_pages_json}" ]]; then
704
+ flow_github_core_rate_limit_clear
705
+ else
706
+ stderr_output="$(cat "${stderr_file}" 2>/dev/null || true)"
707
+ if flow_github_output_indicates_rate_limit "${stderr_output}"; then
708
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
709
+ fi
710
+ fi
711
+ rm -f "${stderr_file}"
712
+ [[ -n "${repos_pages_json}" ]] || return 1
713
+
714
+ repo_id="$(
715
+ REPOS_PAGES_JSON="${repos_pages_json}" TARGET_REPO_SLUG="${repo_slug}" python3 - <<'PY'
716
+ import json
717
+ import os
718
+ import sys
719
+
720
+ pages = json.loads(os.environ.get("REPOS_PAGES_JSON", "[]") or "[]")
721
+ target = os.environ.get("TARGET_REPO_SLUG", "")
722
+
723
+ for page in pages:
724
+ if isinstance(page, list):
725
+ for repo in page:
726
+ if isinstance(repo, dict) and repo.get("full_name") == target:
727
+ value = repo.get("id")
728
+ if value is not None:
729
+ print(value)
730
+ sys.exit(0)
731
+ elif isinstance(page, dict) and page.get("full_name") == target:
732
+ value = page.get("id")
733
+ if value is not None:
734
+ print(value)
735
+ sys.exit(0)
736
+ PY
737
+ )"
738
+ [[ -n "${repo_id}" ]] || return 1
739
+
740
+ printf -v "${cache_var}" '%s' "${repo_id}"
741
+ printf '%s\n' "${repo_id}"
742
+ }
743
+
744
+ flow_github_repo_api_prefix() {
745
+ local repo_slug="${1:-}"
746
+ local repo_id=""
747
+
748
+ repo_id="$(flow_github_repo_id_for_repo_slug "${repo_slug}")" || return 1
749
+ printf 'repositories/%s\n' "${repo_id}"
750
+ }
751
+
752
+ flow_github_api_repo() {
753
+ local repo_slug="${1:?repo slug required}"
754
+ local route="${2:-}"
755
+ local repo_prefix=""
756
+ local direct_route="repos/${repo_slug}"
757
+ local fallback_route=""
758
+ local output=""
759
+ local stdin_file=""
760
+ local request_status=0
761
+ local expect_input_value="false"
762
+ local arg=""
763
+ local index=0
764
+ local gh_arg_count=0
765
+ local stdout_file=""
766
+ local stderr_file=""
767
+ local error_output=""
768
+ local -a gh_args=()
769
+
770
+ if flow_using_gitea; then
771
+ flow_gitea_api_repo "$@"
772
+ return $?
773
+ fi
774
+
775
+ route="${route#/}"
776
+ if [[ -n "${route}" ]]; then
777
+ direct_route="${direct_route}/${route}"
778
+ fi
779
+
780
+ if [[ $# -gt 2 ]]; then
781
+ gh_args=("${@:3}")
782
+ gh_arg_count="${#gh_args[@]}"
783
+ fi
784
+ for ((index = 0; index < ${#gh_args[@]}; index++)); do
785
+ arg="${gh_args[${index}]}"
786
+ if [[ "${expect_input_value}" == "true" ]]; then
787
+ if [[ "${arg}" == "-" ]]; then
788
+ if [[ -z "${stdin_file}" ]]; then
789
+ stdin_file="$(mktemp)"
790
+ cat >"${stdin_file}"
791
+ fi
792
+ gh_args[${index}]="${stdin_file}"
793
+ fi
794
+ expect_input_value="false"
795
+ elif [[ "${arg}" == "--input" ]]; then
796
+ expect_input_value="true"
797
+ fi
798
+ done
799
+
800
+ if flow_github_core_rate_limit_active; then
801
+ rm -f "${stdin_file}"
802
+ return 1
803
+ fi
804
+
805
+ flow_export_github_cli_auth_env "${repo_slug}"
806
+ stdout_file="$(mktemp)"
807
+ stderr_file="$(mktemp)"
808
+ if [[ "${gh_arg_count}" -gt 0 ]]; then
809
+ if gh api "${direct_route}" "${gh_args[@]}" >"${stdout_file}" 2>"${stderr_file}"; then
810
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
811
+ flow_github_core_rate_limit_clear
812
+ printf '%s' "${output}"
813
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
814
+ return 0
815
+ fi
816
+ else
817
+ if gh api "${direct_route}" >"${stdout_file}" 2>"${stderr_file}"; then
818
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
819
+ flow_github_core_rate_limit_clear
820
+ printf '%s' "${output}"
821
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
822
+ return 0
823
+ fi
824
+ fi
825
+ error_output="$(cat "${stderr_file}" 2>/dev/null || true)"
826
+ if flow_github_output_indicates_rate_limit "${error_output}"; then
827
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
828
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
829
+ return 1
830
+ fi
831
+
832
+ if ! repo_prefix="$(flow_github_repo_api_prefix "${repo_slug}")"; then
833
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
834
+ return 1
835
+ fi
836
+ fallback_route="${repo_prefix}"
837
+ if [[ -n "${route}" ]]; then
838
+ fallback_route="${fallback_route}/${route}"
839
+ fi
840
+ if [[ "${gh_arg_count}" -gt 0 ]]; then
841
+ if gh api "${fallback_route}" "${gh_args[@]}" >"${stdout_file}" 2>"${stderr_file}"; then
842
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
843
+ flow_github_core_rate_limit_clear
844
+ printf '%s' "${output}"
845
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
846
+ return 0
847
+ else
848
+ request_status=$?
849
+ fi
850
+ else
851
+ if gh api "${fallback_route}" >"${stdout_file}" 2>"${stderr_file}"; then
852
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
853
+ flow_github_core_rate_limit_clear
854
+ printf '%s' "${output}"
855
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
856
+ return 0
857
+ else
858
+ request_status=$?
859
+ fi
860
+ fi
861
+ error_output="$(cat "${stderr_file}" 2>/dev/null || true)"
862
+ if flow_github_output_indicates_rate_limit "${error_output}"; then
863
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
864
+ fi
865
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
866
+ return "${request_status}"
867
+ }
868
+
869
+ flow_json_or_default() {
870
+ local raw_value="${1-}"
871
+ local default_value="${2:-null}"
872
+
873
+ if [[ -z "${raw_value}" ]]; then
874
+ printf '%s\n' "${default_value}"
875
+ return 0
876
+ fi
877
+
878
+ if jq -e . >/dev/null 2>&1 <<<"${raw_value}"; then
879
+ printf '%s\n' "${raw_value}"
880
+ else
881
+ printf '%s\n' "${default_value}"
882
+ fi
883
+ }
884
+
885
+ flow_github_urlencode() {
886
+ local raw_value="${1:-}"
887
+
888
+ RAW_VALUE="${raw_value}" python3 - <<'PY'
889
+ import os
890
+ from urllib.parse import quote
891
+
892
+ print(quote(os.environ.get("RAW_VALUE", ""), safe=""))
893
+ PY
894
+ }
895
+
896
+ flow_github_issue_view_json_live() {
897
+ local repo_slug="${1:?repo slug required}"
898
+ local issue_id="${2:?issue id required}"
899
+ local issue_json=""
900
+ local comment_pages_json=""
901
+
902
+ if flow_using_gitea; then
903
+ flow_gitea_issue_view_json "${repo_slug}" "${issue_id}"
904
+ return $?
905
+ fi
906
+
907
+ if flow_github_graphql_available "${repo_slug}" \
908
+ && issue_json="$(gh issue view "${issue_id}" -R "${repo_slug}" --json number,state,title,body,url,labels,comments,createdAt,updatedAt 2>/dev/null)"; then
909
+ printf '%s\n' "${issue_json}"
910
+ return 0
911
+ fi
912
+
913
+ if ! issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null)"; then
914
+ return 1
915
+ fi
916
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
917
+ if ! comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments?per_page=100" --paginate --slurp 2>/dev/null)"; then
918
+ return 1
919
+ fi
920
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
921
+
922
+ ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY'
923
+ import json
924
+ import os
925
+
926
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
927
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
928
+ comments = []
929
+ for page in comment_pages:
930
+ if isinstance(page, list):
931
+ comments.extend(page)
932
+ elif isinstance(page, dict):
933
+ comments.append(page)
934
+
935
+ result = {
936
+ "number": issue.get("number"),
937
+ "state": str(issue.get("state", "")).upper(),
938
+ "title": issue.get("title") or "",
939
+ "body": issue.get("body") or "",
940
+ "url": issue.get("html_url") or issue.get("url") or "",
941
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
942
+ "comments": [
943
+ {
944
+ "body": comment.get("body") or "",
945
+ "createdAt": comment.get("created_at") or "",
946
+ "updatedAt": comment.get("updated_at") or "",
947
+ "url": comment.get("html_url") or "",
948
+ }
949
+ for comment in comments
950
+ if isinstance(comment, dict)
951
+ ],
952
+ "createdAt": issue.get("created_at") or "",
953
+ "updatedAt": issue.get("updated_at") or "",
954
+ }
955
+
956
+ print(json.dumps(result))
957
+ PY
958
+ }
959
+
960
+ flow_github_issue_view_json() {
961
+ local repo_slug="${1:?repo slug required}"
962
+ local issue_id="${2:?issue id required}"
963
+ local issue_json=""
964
+ local comment_pages_json=""
965
+
966
+ if flow_using_gitea; then
967
+ flow_gitea_issue_view_json "${repo_slug}" "${issue_id}"
968
+ return $?
969
+ fi
970
+
971
+ if flow_github_graphql_available "${repo_slug}" \
972
+ && issue_json="$(gh issue view "${issue_id}" -R "${repo_slug}" --json number,state,title,body,url,labels,comments,createdAt,updatedAt 2>/dev/null)"; then
973
+ printf '%s\n' "${issue_json}"
974
+ return 0
975
+ fi
976
+
977
+ issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null || true)"
978
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
979
+ comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments?per_page=100" --paginate --slurp 2>/dev/null || true)"
980
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
981
+
982
+ ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY'
983
+ import json
984
+ import os
985
+
986
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
987
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
988
+ comments = []
989
+ for page in comment_pages:
990
+ if isinstance(page, list):
991
+ comments.extend(page)
992
+ elif isinstance(page, dict):
993
+ comments.append(page)
994
+
995
+ result = {
996
+ "number": issue.get("number"),
997
+ "state": str(issue.get("state", "")).upper(),
998
+ "title": issue.get("title") or "",
999
+ "body": issue.get("body") or "",
1000
+ "url": issue.get("html_url") or issue.get("url") or "",
1001
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1002
+ "comments": [
1003
+ {
1004
+ "body": comment.get("body") or "",
1005
+ "createdAt": comment.get("created_at") or "",
1006
+ "updatedAt": comment.get("updated_at") or "",
1007
+ "url": comment.get("html_url") or "",
1008
+ }
1009
+ for comment in comments
1010
+ if isinstance(comment, dict)
1011
+ ],
1012
+ "createdAt": issue.get("created_at") or "",
1013
+ "updatedAt": issue.get("updated_at") or "",
1014
+ }
1015
+
1016
+ print(json.dumps(result))
1017
+ PY
1018
+ }
1019
+
1020
+ flow_github_issue_list_json_live() {
1021
+ local repo_slug="${1:?repo slug required}"
1022
+ local state="${2:-open}"
1023
+ local limit="${3:-100}"
1024
+ local issues_json=""
1025
+ local per_page="100"
1026
+
1027
+ if flow_using_gitea; then
1028
+ flow_gitea_issue_list_json "${repo_slug}" "${state}" "${limit}"
1029
+ return $?
1030
+ fi
1031
+
1032
+ if flow_github_graphql_available "${repo_slug}" \
1033
+ && issues_json="$(gh issue list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,createdAt,updatedAt,title,url,labels 2>/dev/null)"; then
1034
+ printf '%s\n' "${issues_json}"
1035
+ return 0
1036
+ fi
1037
+
1038
+ if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
1039
+ per_page="${limit}"
1040
+ fi
1041
+
1042
+ if ! issues_json="$(flow_github_api_repo "${repo_slug}" "issues?state=${state}&per_page=${per_page}" --paginate --slurp 2>/dev/null)"; then
1043
+ return 1
1044
+ fi
1045
+ issues_json="$(flow_json_or_default "${issues_json}" '[]')"
1046
+
1047
+ ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
1048
+ import json
1049
+ import os
1050
+
1051
+ pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
1052
+ limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
1053
+ issues = []
1054
+
1055
+ for page in pages:
1056
+ if isinstance(page, list):
1057
+ issues.extend(page)
1058
+ elif isinstance(page, dict):
1059
+ issues.append(page)
1060
+
1061
+ result = []
1062
+ for issue in issues:
1063
+ if not isinstance(issue, dict):
1064
+ continue
1065
+ if issue.get("pull_request"):
1066
+ continue
1067
+ result.append({
1068
+ "number": issue.get("number"),
1069
+ "createdAt": issue.get("created_at") or "",
1070
+ "updatedAt": issue.get("updated_at") or "",
1071
+ "title": issue.get("title") or "",
1072
+ "url": issue.get("html_url") or issue.get("url") or "",
1073
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1074
+ })
1075
+
1076
+ print(json.dumps(result[:limit]))
1077
+ PY
1078
+ }
1079
+
1080
+ flow_github_issue_list_json() {
1081
+ local repo_slug="${1:?repo slug required}"
1082
+ local state="${2:-open}"
1083
+ local limit="${3:-100}"
1084
+ local issues_json=""
1085
+ local per_page="100"
1086
+
1087
+ if flow_using_gitea; then
1088
+ flow_gitea_issue_list_json "${repo_slug}" "${state}" "${limit}"
1089
+ return $?
1090
+ fi
1091
+
1092
+ if flow_github_graphql_available "${repo_slug}" \
1093
+ && issues_json="$(gh issue list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,createdAt,updatedAt,title,url,labels 2>/dev/null)"; then
1094
+ printf '%s\n' "${issues_json}"
1095
+ return 0
1096
+ fi
1097
+
1098
+ if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
1099
+ per_page="${limit}"
1100
+ fi
1101
+
1102
+ issues_json="$(flow_github_api_repo "${repo_slug}" "issues?state=${state}&per_page=${per_page}" --paginate --slurp 2>/dev/null || true)"
1103
+ issues_json="$(flow_json_or_default "${issues_json}" '[]')"
1104
+
1105
+ ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
1106
+ import json
1107
+ import os
1108
+
1109
+ pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
1110
+ limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
1111
+ issues = []
1112
+
1113
+ for page in pages:
1114
+ if isinstance(page, list):
1115
+ issues.extend(page)
1116
+ elif isinstance(page, dict):
1117
+ issues.append(page)
1118
+
1119
+ result = []
1120
+ for issue in issues:
1121
+ if not isinstance(issue, dict):
1122
+ continue
1123
+ if issue.get("pull_request"):
1124
+ continue
1125
+ result.append({
1126
+ "number": issue.get("number"),
1127
+ "createdAt": issue.get("created_at") or "",
1128
+ "updatedAt": issue.get("updated_at") or "",
1129
+ "title": issue.get("title") or "",
1130
+ "url": issue.get("html_url") or issue.get("url") or "",
1131
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1132
+ })
1133
+
1134
+ print(json.dumps(result[:limit]))
1135
+ PY
1136
+ }
1137
+
1138
+ flow_github_pr_view_json() {
1139
+ local repo_slug="${1:?repo slug required}"
1140
+ local pr_number="${2:?pr number required}"
1141
+ local pr_json=""
1142
+ local issue_json=""
1143
+ local comment_pages_json=""
1144
+ local head_sha=""
1145
+ local check_runs_json="{}"
1146
+ local status_json="{}"
1147
+
1148
+ if flow_using_gitea; then
1149
+ flow_gitea_pr_view_json "${repo_slug}" "${pr_number}"
1150
+ return $?
1151
+ fi
1152
+
1153
+ if flow_github_graphql_available "${repo_slug}" \
1154
+ && pr_json="$(gh pr view "${pr_number}" -R "${repo_slug}" --json number,title,body,url,headRefName,baseRefName,mergeStateStatus,statusCheckRollup,labels,comments,state,isDraft 2>/dev/null)"; then
1155
+ printf '%s\n' "${pr_json}"
1156
+ return 0
1157
+ fi
1158
+
1159
+ pr_json="$(flow_github_api_repo "${repo_slug}" "pulls/${pr_number}" 2>/dev/null || true)"
1160
+ pr_json="$(flow_json_or_default "${pr_json}" '{}')"
1161
+ issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null || true)"
1162
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
1163
+ comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null || true)"
1164
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
1165
+ head_sha="$(
1166
+ PR_JSON="${pr_json}" python3 - <<'PY'
1167
+ import json
1168
+ import os
1169
+
1170
+ payload = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
1171
+ head = payload.get("head") or {}
1172
+ print(head.get("sha") or "")
1173
+ PY
1174
+ )"
1175
+ if [[ -n "${head_sha}" ]]; then
1176
+ check_runs_json="$(flow_github_api_repo "${repo_slug}" "commits/${head_sha}/check-runs?per_page=100" 2>/dev/null || true)"
1177
+ check_runs_json="$(flow_json_or_default "${check_runs_json}" '{}')"
1178
+ status_json="$(flow_github_api_repo "${repo_slug}" "commits/${head_sha}/status" 2>/dev/null || true)"
1179
+ status_json="$(flow_json_or_default "${status_json}" '{}')"
1180
+ fi
1181
+
1182
+ PR_JSON="${pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" CHECK_RUNS_JSON="${check_runs_json}" STATUS_JSON="${status_json}" python3 - <<'PY'
1183
+ import json
1184
+ import os
1185
+
1186
+ pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
1187
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
1188
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
1189
+ check_runs_payload = json.loads(os.environ.get("CHECK_RUNS_JSON", "{}") or "{}")
1190
+ status_payload = json.loads(os.environ.get("STATUS_JSON", "{}") or "{}")
1191
+
1192
+ comments = []
1193
+ for page in comment_pages:
1194
+ if isinstance(page, list):
1195
+ comments.extend(page)
1196
+ elif isinstance(page, dict):
1197
+ comments.append(page)
1198
+
1199
+ status_check_rollup = []
1200
+ for run in check_runs_payload.get("check_runs", []) or []:
1201
+ if not isinstance(run, dict):
1202
+ continue
1203
+ status_check_rollup.append({
1204
+ "name": run.get("name") or "",
1205
+ "status": run.get("status") or "",
1206
+ "conclusion": run.get("conclusion") or "",
1207
+ })
1208
+ for item in status_payload.get("statuses", []) or []:
1209
+ if not isinstance(item, dict):
1210
+ continue
1211
+ state = item.get("state") or ""
1212
+ status_check_rollup.append({
1213
+ "context": item.get("context") or "",
1214
+ "status": state,
1215
+ "conclusion": state,
1216
+ })
1217
+
1218
+ pr_state = str(pr.get("state", "")).upper()
1219
+ if pr.get("merged_at"):
1220
+ pr_state = "MERGED"
1221
+
1222
+ result = {
1223
+ "number": pr.get("number"),
1224
+ "title": pr.get("title") or "",
1225
+ "body": pr.get("body") or issue.get("body") or "",
1226
+ "url": pr.get("html_url") or pr.get("url") or "",
1227
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
1228
+ "headRefOid": ((pr.get("head") or {}).get("sha")) or "",
1229
+ "baseRefName": ((pr.get("base") or {}).get("ref")) or "",
1230
+ "mergeStateStatus": str(pr.get("mergeable_state") or "UNKNOWN").upper(),
1231
+ "statusCheckRollup": status_check_rollup,
1232
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1233
+ "comments": [
1234
+ {
1235
+ "body": comment.get("body") or "",
1236
+ "createdAt": comment.get("created_at") or "",
1237
+ "updatedAt": comment.get("updated_at") or "",
1238
+ "url": comment.get("html_url") or "",
1239
+ }
1240
+ for comment in comments
1241
+ if isinstance(comment, dict)
1242
+ ],
1243
+ "state": pr_state,
1244
+ "isDraft": bool(pr.get("draft")),
1245
+ "createdAt": pr.get("created_at") or "",
1246
+ "updatedAt": pr.get("updated_at") or "",
1247
+ "mergedAt": pr.get("merged_at") or "",
1248
+ "authorLogin": ((pr.get("user") or {}).get("login")) or "",
1249
+ }
1250
+
1251
+ print(json.dumps(result))
1252
+ PY
1253
+ }
1254
+
1255
+ flow_github_pr_list_json_live() {
1256
+ local repo_slug="${1:?repo slug required}"
1257
+ local state="${2:-open}"
1258
+ local limit="${3:-100}"
1259
+ local pr_json=""
1260
+ local per_page="100"
1261
+ local pulls_state="${state}"
1262
+ local pull_pages_json=""
1263
+ local selected_prs_json=""
1264
+ local item_jsonl_file=""
1265
+ local current_pr_json=""
1266
+ local issue_json=""
1267
+ local comment_pages_json=""
1268
+ local pr_number=""
1269
+
1270
+ if flow_using_gitea; then
1271
+ flow_gitea_pr_list_json "${repo_slug}" "${state}" "${limit}"
1272
+ return $?
1273
+ fi
1274
+
1275
+ if flow_github_graphql_available "${repo_slug}" \
1276
+ && pr_json="$(gh pr list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,title,body,url,headRefName,labels,comments,createdAt,mergedAt,isDraft 2>/dev/null)"; then
1277
+ printf '%s\n' "${pr_json}"
1278
+ return 0
1279
+ fi
1280
+
1281
+ if [[ "${state}" == "merged" ]]; then
1282
+ pulls_state="closed"
1283
+ fi
1284
+ if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
1285
+ per_page="${limit}"
1286
+ fi
1287
+
1288
+ if ! pull_pages_json="$(flow_github_api_repo "${repo_slug}" "pulls?state=${pulls_state}&per_page=${per_page}" --paginate --slurp 2>/dev/null)"; then
1289
+ return 1
1290
+ fi
1291
+ pull_pages_json="$(flow_json_or_default "${pull_pages_json}" '[]')"
1292
+
1293
+ if ! selected_prs_json="$(
1294
+ PULL_PAGES_JSON="${pull_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
1295
+ import json
1296
+ import os
1297
+
1298
+ pages = json.loads(os.environ.get("PULL_PAGES_JSON", "[]") or "[]")
1299
+ limit = int(os.environ.get("PR_LIMIT", "100") or "100")
1300
+ state_filter = os.environ.get("PR_STATE_FILTER", "open")
1301
+ pulls = []
1302
+
1303
+ for page in pages:
1304
+ if isinstance(page, list):
1305
+ pulls.extend(page)
1306
+ elif isinstance(page, dict):
1307
+ pulls.append(page)
1308
+
1309
+ result = []
1310
+ for pr in pulls:
1311
+ if not isinstance(pr, dict):
1312
+ continue
1313
+ if state_filter == "merged" and not pr.get("merged_at"):
1314
+ continue
1315
+ result.append({
1316
+ "number": pr.get("number"),
1317
+ "title": pr.get("title") or "",
1318
+ "body": pr.get("body") or "",
1319
+ "url": pr.get("html_url") or pr.get("url") or "",
1320
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
1321
+ "createdAt": pr.get("created_at") or "",
1322
+ "mergedAt": pr.get("merged_at") or "",
1323
+ "isDraft": bool(pr.get("draft")),
1324
+ })
1325
+ if len(result) >= limit:
1326
+ break
1327
+
1328
+ print(json.dumps(result))
1329
+ PY
1330
+ )"; then
1331
+ return 1
1332
+ fi
1333
+
1334
+ item_jsonl_file="$(mktemp)"
1335
+
1336
+ while IFS= read -r current_pr_json; do
1337
+ [[ -n "${current_pr_json}" ]] || continue
1338
+ pr_number="$(jq -r '.number // ""' <<<"${current_pr_json}")"
1339
+ [[ -n "${pr_number}" ]] || continue
1340
+ if ! issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null)"; then
1341
+ rm -f "${item_jsonl_file}"
1342
+ return 1
1343
+ fi
1344
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
1345
+ if ! comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null)"; then
1346
+ rm -f "${item_jsonl_file}"
1347
+ return 1
1348
+ fi
1349
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
1350
+ PR_JSON="${current_pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY' >>"${item_jsonl_file}"
1351
+ import json
1352
+ import os
1353
+
1354
+ pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
1355
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
1356
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
1357
+ comments = []
1358
+ for page in comment_pages:
1359
+ if isinstance(page, list):
1360
+ comments.extend(page)
1361
+ elif isinstance(page, dict):
1362
+ comments.append(page)
1363
+
1364
+ result = {
1365
+ "number": pr.get("number"),
1366
+ "title": pr.get("title") or "",
1367
+ "body": pr.get("body") or issue.get("body") or "",
1368
+ "url": pr.get("url") or issue.get("html_url") or issue.get("url") or "",
1369
+ "headRefName": pr.get("headRefName") or "",
1370
+ "createdAt": pr.get("createdAt") or "",
1371
+ "mergedAt": pr.get("mergedAt") or "",
1372
+ "isDraft": bool(pr.get("isDraft")),
1373
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1374
+ "comments": [
1375
+ {
1376
+ "body": comment.get("body") or "",
1377
+ "createdAt": comment.get("created_at") or "",
1378
+ "updatedAt": comment.get("updated_at") or "",
1379
+ "url": comment.get("html_url") or "",
1380
+ }
1381
+ for comment in comments
1382
+ if isinstance(comment, dict)
1383
+ ],
1384
+ }
1385
+
1386
+ print(json.dumps(result))
1387
+ PY
1388
+ done < <(jq -c '.[]' <<<"${selected_prs_json}" 2>/dev/null || true)
1389
+
1390
+ if ! jq -s '.' "${item_jsonl_file}" 2>/dev/null; then
1391
+ rm -f "${item_jsonl_file}"
1392
+ return 1
1393
+ fi
1394
+
1395
+ rm -f "${item_jsonl_file}"
1396
+ }
1397
+
1398
+ flow_github_pr_list_json() {
1399
+ local repo_slug="${1:?repo slug required}"
1400
+ local state="${2:-open}"
1401
+ local limit="${3:-100}"
1402
+ local pr_json=""
1403
+ local per_page="100"
1404
+ local pulls_state="${state}"
1405
+ local pull_pages_json=""
1406
+ local selected_prs_json=""
1407
+ local item_jsonl_file=""
1408
+ local current_pr_json=""
1409
+ local issue_json=""
1410
+ local comment_pages_json=""
1411
+ local pr_number=""
1412
+
1413
+ if flow_using_gitea; then
1414
+ flow_gitea_pr_list_json "${repo_slug}" "${state}" "${limit}"
1415
+ return $?
1416
+ fi
1417
+
1418
+ if flow_github_graphql_available "${repo_slug}" \
1419
+ && pr_json="$(gh pr list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,title,body,url,headRefName,labels,comments,createdAt,mergedAt,isDraft 2>/dev/null)"; then
1420
+ printf '%s\n' "${pr_json}"
1421
+ return 0
1422
+ fi
1423
+
1424
+ if [[ "${state}" == "merged" ]]; then
1425
+ pulls_state="closed"
1426
+ fi
1427
+ if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
1428
+ per_page="${limit}"
1429
+ fi
1430
+
1431
+ pull_pages_json="$(flow_github_api_repo "${repo_slug}" "pulls?state=${pulls_state}&per_page=${per_page}" --paginate --slurp 2>/dev/null || true)"
1432
+ pull_pages_json="$(flow_json_or_default "${pull_pages_json}" '[]')"
1433
+
1434
+ selected_prs_json="$(
1435
+ PULL_PAGES_JSON="${pull_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
1436
+ import json
1437
+ import os
1438
+
1439
+ pages = json.loads(os.environ.get("PULL_PAGES_JSON", "[]") or "[]")
1440
+ limit = int(os.environ.get("PR_LIMIT", "100") or "100")
1441
+ state_filter = os.environ.get("PR_STATE_FILTER", "open")
1442
+ pulls = []
1443
+
1444
+ for page in pages:
1445
+ if isinstance(page, list):
1446
+ pulls.extend(page)
1447
+ elif isinstance(page, dict):
1448
+ pulls.append(page)
1449
+
1450
+ result = []
1451
+ for pr in pulls:
1452
+ if not isinstance(pr, dict):
1453
+ continue
1454
+ if state_filter == "merged" and not pr.get("merged_at"):
1455
+ continue
1456
+ result.append({
1457
+ "number": pr.get("number"),
1458
+ "title": pr.get("title") or "",
1459
+ "body": pr.get("body") or "",
1460
+ "url": pr.get("html_url") or pr.get("url") or "",
1461
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
1462
+ "createdAt": pr.get("created_at") or "",
1463
+ "mergedAt": pr.get("merged_at") or "",
1464
+ "isDraft": bool(pr.get("draft")),
1465
+ })
1466
+ if len(result) >= limit:
1467
+ break
1468
+
1469
+ print(json.dumps(result))
1470
+ PY
1471
+ )" || selected_prs_json='[]'
1472
+
1473
+ item_jsonl_file="$(mktemp)"
1474
+ trap 'rm -f "${item_jsonl_file}"' RETURN
1475
+
1476
+ while IFS= read -r current_pr_json; do
1477
+ [[ -n "${current_pr_json}" ]] || continue
1478
+ pr_number="$(jq -r '.number // ""' <<<"${current_pr_json}")"
1479
+ [[ -n "${pr_number}" ]] || continue
1480
+ issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null || true)"
1481
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
1482
+ comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null || true)"
1483
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
1484
+ PR_JSON="${current_pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY' >>"${item_jsonl_file}"
1485
+ import json
1486
+ import os
1487
+
1488
+ pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
1489
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
1490
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
1491
+ comments = []
1492
+ for page in comment_pages:
1493
+ if isinstance(page, list):
1494
+ comments.extend(page)
1495
+ elif isinstance(page, dict):
1496
+ comments.append(page)
1497
+
1498
+ result = {
1499
+ "number": pr.get("number"),
1500
+ "title": pr.get("title") or "",
1501
+ "body": pr.get("body") or issue.get("body") or "",
1502
+ "url": pr.get("url") or issue.get("html_url") or issue.get("url") or "",
1503
+ "headRefName": pr.get("headRefName") or "",
1504
+ "createdAt": pr.get("createdAt") or "",
1505
+ "mergedAt": pr.get("mergedAt") or "",
1506
+ "isDraft": bool(pr.get("isDraft")),
1507
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1508
+ "comments": [
1509
+ {
1510
+ "body": comment.get("body") or "",
1511
+ "createdAt": comment.get("created_at") or "",
1512
+ "updatedAt": comment.get("updated_at") or "",
1513
+ "url": comment.get("html_url") or "",
1514
+ }
1515
+ for comment in comments
1516
+ if isinstance(comment, dict)
1517
+ ],
1518
+ }
1519
+
1520
+ print(json.dumps(result))
1521
+ PY
1522
+ done < <(jq -c '.[]' <<<"${selected_prs_json}")
1523
+
1524
+ ITEM_JSONL_FILE="${item_jsonl_file}" python3 - <<'PY'
1525
+ import json
1526
+ import os
1527
+
1528
+ path = os.environ.get("ITEM_JSONL_FILE", "")
1529
+ items = []
1530
+ if path:
1531
+ with open(path, "r", encoding="utf-8") as fh:
1532
+ for line in fh:
1533
+ line = line.strip()
1534
+ if not line:
1535
+ continue
1536
+ items.append(json.loads(line))
1537
+
1538
+ print(json.dumps(items))
1539
+ PY
1540
+ }
1541
+
1542
+ flow_github_issue_close() {
1543
+ local repo_slug="${1:?repo slug required}"
1544
+ local issue_id="${2:?issue id required}"
1545
+ local comment_body="${3:-}"
1546
+ local payload=""
1547
+
1548
+ if flow_using_gitea; then
1549
+ if [[ -n "${comment_body}" ]]; then
1550
+ flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments" --method POST -f body="${comment_body}" >/dev/null || return 1
1551
+ fi
1552
+ payload='{"state":"closed"}'
1553
+ printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
1554
+ return $?
1555
+ fi
1556
+
1557
+ if [[ -n "${comment_body}" ]]; then
1558
+ if gh issue close "${issue_id}" -R "${repo_slug}" --comment "${comment_body}" >/dev/null 2>&1; then
1559
+ return 0
1560
+ fi
1561
+ flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments" --method POST -f body="${comment_body}" >/dev/null
1562
+ else
1563
+ if gh issue close "${issue_id}" -R "${repo_slug}" >/dev/null 2>&1; then
1564
+ return 0
1565
+ fi
1566
+ fi
1567
+
1568
+ payload='{"state":"closed"}'
1569
+ printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
1570
+ }
1571
+
1572
+ flow_github_issue_update_body() {
1573
+ local repo_slug="${1:?repo slug required}"
1574
+ local issue_id="${2:?issue id required}"
1575
+ local body_text="${3:?body text required}"
1576
+ local payload=""
1577
+
1578
+ payload="$(
1579
+ ISSUE_BODY="${body_text}" python3 - <<'PY'
1580
+ import json
1581
+ import os
1582
+
1583
+ print(json.dumps({"body": os.environ.get("ISSUE_BODY", "")}))
1584
+ PY
1585
+ )"
1586
+
1587
+ printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
1588
+ }
1589
+
1590
+ flow_github_label_create() {
1591
+ local repo_slug="${1:?repo slug required}"
1592
+ local label_name="${2:?label name required}"
1593
+ local label_description="${3:-}"
1594
+ local label_color="${4:-1D76DB}"
1595
+ local encoded_label=""
1596
+
1597
+ if gh label create "${label_name}" -R "${repo_slug}" --description "${label_description}" --color "${label_color}" --force >/dev/null 2>&1; then
1598
+ return 0
1599
+ fi
1600
+
1601
+ if flow_github_api_repo "${repo_slug}" "labels" --method POST -f name="${label_name}" -f description="${label_description}" -f color="${label_color}" >/dev/null 2>&1; then
1602
+ return 0
1603
+ fi
1604
+
1605
+ encoded_label="$(flow_github_urlencode "${label_name}")"
1606
+ flow_github_api_repo "${repo_slug}" "labels/${encoded_label}" --method PATCH -f new_name="${label_name}" -f description="${label_description}" -f color="${label_color}" >/dev/null 2>&1 || true
1607
+ }
1608
+
1609
+ flow_github_issue_create() {
1610
+ local repo_slug="${1:?repo slug required}"
1611
+ local title="${2:?title required}"
1612
+ local body_file="${3:?body file required}"
1613
+ local issue_url=""
1614
+ local body_text=""
1615
+
1616
+ if flow_using_gitea; then
1617
+ body_text="$(cat "${body_file}")"
1618
+ issue_url="$(
1619
+ ISSUE_TITLE="${title}" ISSUE_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "issues" --method POST --input - | jq -r '.html_url // ""'
1620
+ import json
1621
+ import os
1622
+
1623
+ payload = {
1624
+ "title": os.environ.get("ISSUE_TITLE", ""),
1625
+ "body": os.environ.get("ISSUE_BODY", ""),
1626
+ }
1627
+ print(json.dumps(payload))
1628
+ PY
1629
+ )"
1630
+ [[ -n "${issue_url}" ]] || return 1
1631
+ printf '%s\n' "${issue_url}"
1632
+ return 0
1633
+ fi
1634
+
1635
+ if issue_url="$(gh issue create -R "${repo_slug}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
1636
+ printf '%s\n' "${issue_url}"
1637
+ return 0
1638
+ fi
1639
+
1640
+ body_text="$(cat "${body_file}")"
1641
+ issue_url="$(
1642
+ ISSUE_TITLE="${title}" ISSUE_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "issues" --method POST --input - | jq -r '.html_url // ""'
1643
+ import json
1644
+ import os
1645
+
1646
+ payload = {
1647
+ "title": os.environ.get("ISSUE_TITLE", ""),
1648
+ "body": os.environ.get("ISSUE_BODY", ""),
1649
+ }
1650
+ print(json.dumps(payload))
1651
+ PY
1652
+ )"
1653
+ [[ -n "${issue_url}" ]] || return 1
1654
+ printf '%s\n' "${issue_url}"
1655
+ }
1656
+
1657
+ flow_github_current_login() {
1658
+ if flow_using_gitea; then
1659
+ local user_json=""
1660
+ local auth_header=""
1661
+ local base_url=""
1662
+
1663
+ base_url="$(flow_gitea_base_url)" || return 1
1664
+ if [[ -n "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" ]]; then
1665
+ user_json="$(curl -sS -H "Authorization: token ${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" "${base_url}/api/v1/user" 2>/dev/null || true)"
1666
+ elif [[ -n "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" && -n "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" ]]; then
1667
+ user_json="$(curl -sS -u "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}:${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" "${base_url}/api/v1/user" 2>/dev/null || true)"
1668
+ fi
1669
+ jq -r '.login // ""' <<<"${user_json:-{}}" 2>/dev/null || true
1670
+ return 0
1671
+ fi
1672
+
1673
+ gh api user --jq '.login // ""' 2>/dev/null || true
1674
+ }
1675
+
1676
+ flow_github_pr_author_login() {
1677
+ local repo_slug="${1:?repo slug required}"
1678
+ local pr_number="${2:?pr number required}"
1679
+
1680
+ flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null | jq -r '.authorLogin // ""' 2>/dev/null || true
1681
+ }
1682
+
1683
+ flow_github_pr_head_oid() {
1684
+ local repo_slug="${1:?repo slug required}"
1685
+ local pr_number="${2:?pr number required}"
1686
+
1687
+ flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null | jq -r '.headRefOid // ""' 2>/dev/null || true
1688
+ }
1689
+
1690
+ flow_github_pr_review_approve() {
1691
+ local repo_slug="${1:?repo slug required}"
1692
+ local pr_number="${2:?pr number required}"
1693
+ local body_text="${3:-Automated final review passed.}"
1694
+ local output=""
1695
+
1696
+ if flow_using_gitea; then
1697
+ if output="$(
1698
+ REVIEW_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls/${pr_number}/reviews" --method POST --input - 2>&1
1699
+ import json
1700
+ import os
1701
+
1702
+ print(json.dumps({"event": "APPROVED", "body": os.environ.get("REVIEW_BODY", "")}))
1703
+ PY
1704
+ )"; then
1705
+ return 0
1706
+ fi
1707
+ if grep -q "approve your own pull is not allowed" <<<"${output}"; then
1708
+ return 0
1709
+ fi
1710
+ printf '%s\n' "${output}" >&2
1711
+ return 1
1712
+ fi
1713
+
1714
+ gh api "repos/${repo_slug}/pulls/${pr_number}/reviews" --method POST -f event=APPROVE -f body="${body_text}" >/dev/null
1715
+ }
1716
+
1717
+ flow_github_pr_create() {
1718
+ local repo_slug="${1:?repo slug required}"
1719
+ local base_branch="${2:?base branch required}"
1720
+ local head_branch="${3:?head branch required}"
1721
+ local title="${4:?title required}"
1722
+ local body_file="${5:?body file required}"
1723
+ local pr_url=""
1724
+ local body_text=""
1725
+
1726
+ if flow_using_gitea; then
1727
+ body_text="$(cat "${body_file}")"
1728
+ pr_url="$(
1729
+ BASE_BRANCH="${base_branch}" HEAD_BRANCH="${head_branch}" PR_TITLE="${title}" PR_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls" --method POST --input - | jq -r '.html_url // ""'
1730
+ import json
1731
+ import os
1732
+
1733
+ payload = {
1734
+ "title": os.environ.get("PR_TITLE", ""),
1735
+ "head": os.environ.get("HEAD_BRANCH", ""),
1736
+ "base": os.environ.get("BASE_BRANCH", ""),
1737
+ "body": os.environ.get("PR_BODY", ""),
1738
+ }
1739
+ print(json.dumps(payload))
1740
+ PY
1741
+ )"
1742
+ [[ -n "${pr_url}" ]] || return 1
1743
+ printf '%s\n' "${pr_url}"
1744
+ return 0
1745
+ fi
1746
+
1747
+ if pr_url="$(gh pr create -R "${repo_slug}" --base "${base_branch}" --head "${head_branch}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
1748
+ printf '%s\n' "${pr_url}"
1749
+ return 0
1750
+ fi
1751
+
1752
+ body_text="$(cat "${body_file}")"
1753
+ pr_url="$(
1754
+ BASE_BRANCH="${base_branch}" HEAD_BRANCH="${head_branch}" PR_TITLE="${title}" PR_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls" --method POST --input - | jq -r '.html_url // ""'
1755
+ import json
1756
+ import os
1757
+
1758
+ payload = {
1759
+ "title": os.environ.get("PR_TITLE", ""),
1760
+ "head": os.environ.get("HEAD_BRANCH", ""),
1761
+ "base": os.environ.get("BASE_BRANCH", ""),
1762
+ "body": os.environ.get("PR_BODY", ""),
1763
+ }
1764
+ print(json.dumps(payload))
1765
+ PY
1766
+ )"
1767
+ [[ -n "${pr_url}" ]] || return 1
1768
+ printf '%s\n' "${pr_url}"
1769
+ }
1770
+