agent-control-plane 0.3.0 → 0.4.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +69 -19
  2. package/assets/workflow-catalog.json +1 -1
  3. package/bin/pr-risk.sh +22 -7
  4. package/bin/sync-pr-labels.sh +1 -1
  5. package/hooks/heartbeat-hooks.sh +125 -12
  6. package/hooks/issue-reconcile-hooks.sh +1 -1
  7. package/hooks/pr-reconcile-hooks.sh +1 -1
  8. package/npm/bin/agent-control-plane.js +256 -58
  9. package/package.json +7 -6
  10. package/tools/bin/agent-github-update-labels +36 -2
  11. package/tools/bin/agent-project-catch-up-merged-prs +3 -2
  12. package/tools/bin/agent-project-publish-issue-pr +6 -3
  13. package/tools/bin/agent-project-reconcile-issue-session +12 -1
  14. package/tools/bin/agent-project-reconcile-pr-session +90 -32
  15. package/tools/bin/agent-project-retry-state +18 -7
  16. package/tools/bin/agent-project-run-codex-resilient +13 -5
  17. package/tools/bin/agent-project-sync-source-repo-main +163 -0
  18. package/tools/bin/flow-config-lib.sh +1203 -60
  19. package/tools/bin/flow-shell-lib.sh +32 -0
  20. package/tools/bin/github-core-rate-limit-state.sh +77 -0
  21. package/tools/bin/github-write-outbox.sh +470 -0
  22. package/tools/bin/heartbeat-loop-scheduling-lib.sh +7 -7
  23. package/tools/bin/heartbeat-safe-auto.sh +42 -0
  24. package/tools/bin/install-project-launchd.sh +17 -2
  25. package/tools/bin/project-init.sh +21 -1
  26. package/tools/bin/project-launchd-bootstrap.sh +5 -1
  27. package/tools/bin/project-runtimectl.sh +46 -2
  28. package/tools/bin/resident-issue-controller-lib.sh +2 -2
  29. package/tools/bin/scaffold-profile.sh +61 -3
  30. package/tools/bin/start-pr-fix-worker.sh +47 -10
  31. package/tools/bin/start-resident-issue-loop.sh +2 -2
  32. package/tools/dashboard/app.js +30 -1
  33. package/tools/dashboard/dashboard_snapshot.py +55 -0
  34. package/tools/templates/pr-fix-template.md +3 -1
  35. package/tools/templates/pr-merge-repair-template.md +2 -1
  36. package/references/architecture.md +0 -217
  37. package/references/commands.md +0 -128
  38. package/references/control-plane-map.md +0 -124
  39. package/references/docs-map.md +0 -73
  40. package/references/release-checklist.md +0 -65
  41. package/references/repo-map.md +0 -36
  42. package/tools/bin/resident-issue-queue-status.py +0 -35
  43. package/tools/bin/split-retained-slice.sh +0 -124
@@ -243,13 +243,28 @@ flow_git_remote_repo_slug() {
243
243
  [[ -n "${remote_url}" ]] || return 1
244
244
 
245
245
  normalized="${remote_url%.git}"
246
- normalized="${normalized#ssh://git@github.com/}"
247
- normalized="${normalized#git@github.com:}"
248
- normalized="${normalized#https://github.com/}"
249
- normalized="${normalized#http://github.com/}"
246
+ case "${normalized}" in
247
+ ssh://*@*/*)
248
+ normalized="${normalized#ssh://}"
249
+ normalized="${normalized#*@}"
250
+ normalized="${normalized#*/}"
251
+ ;;
252
+ *@*:*/*)
253
+ normalized="${normalized#*@}"
254
+ normalized="${normalized#*:}"
255
+ ;;
256
+ https://*/*|http://*/*)
257
+ normalized="${normalized#http://}"
258
+ normalized="${normalized#https://}"
259
+ normalized="${normalized#*/}"
260
+ ;;
261
+ *)
262
+ return 1
263
+ ;;
264
+ esac
250
265
 
251
- if [[ "${normalized}" == "${remote_url%.git}" ]]; then
252
- return 1
266
+ if [[ "${normalized}" == */*/* ]]; then
267
+ normalized="${normalized#*/}"
253
268
  fi
254
269
 
255
270
  if [[ "${normalized}" =~ ^[^/]+/[^/]+$ ]]; then
@@ -260,16 +275,76 @@ flow_git_remote_repo_slug() {
260
275
  return 1
261
276
  }
262
277
 
278
+ flow_git_has_remote() {
279
+ local repo_root="${1:-}"
280
+ local remote_name="${2:-}"
281
+
282
+ [[ -n "${repo_root}" && -d "${repo_root}" && -n "${remote_name}" ]] || return 1
283
+ git -C "${repo_root}" remote get-url "${remote_name}" >/dev/null 2>&1
284
+ }
285
+
286
+ flow_resolve_forge_primary_remote() {
287
+ local repo_root="${1:-}"
288
+ local repo_slug="${2:-}"
289
+ local remote_name=""
290
+ local override="${ACP_SOURCE_SYNC_REMOTE:-${F_LOSNING_SOURCE_SYNC_REMOTE:-}}"
291
+ local forge_provider=""
292
+
293
+ [[ -n "${repo_root}" && -d "${repo_root}" ]] || return 1
294
+
295
+ if [[ -n "${override}" ]] && flow_git_has_remote "${repo_root}" "${override}"; then
296
+ printf '%s\n' "${override}"
297
+ return 0
298
+ fi
299
+
300
+ forge_provider="$(flow_forge_provider)"
301
+ case "${forge_provider}" in
302
+ gitea)
303
+ if flow_git_has_remote "${repo_root}" "gitea"; then
304
+ printf 'gitea\n'
305
+ return 0
306
+ fi
307
+ ;;
308
+ github)
309
+ if flow_git_has_remote "${repo_root}" "origin"; then
310
+ printf 'origin\n'
311
+ return 0
312
+ fi
313
+ ;;
314
+ esac
315
+
316
+ if [[ -n "${repo_slug}" ]]; then
317
+ while IFS= read -r remote_name; do
318
+ [[ -n "${remote_name}" ]] || continue
319
+ if [[ "$(flow_git_remote_repo_slug "${repo_root}" "${remote_name}" 2>/dev/null || true)" == "${repo_slug}" ]]; then
320
+ printf '%s\n' "${remote_name}"
321
+ return 0
322
+ fi
323
+ done < <(git -C "${repo_root}" remote)
324
+ fi
325
+
326
+ for remote_name in origin gitea; do
327
+ if flow_git_has_remote "${repo_root}" "${remote_name}"; then
328
+ printf '%s\n' "${remote_name}"
329
+ return 0
330
+ fi
331
+ done
332
+
333
+ return 1
334
+ }
335
+
263
336
  flow_git_credential_token_for_repo_slug() {
264
337
  local repo_slug="${1:-}"
338
+ local host="${2:-github.com}"
339
+ local path_suffix="${3:-${repo_slug}.git}"
265
340
  local credential_payload=""
266
341
  local token=""
267
342
 
268
- [[ -n "${repo_slug}" ]] || return 1
343
+ [[ -n "${repo_slug}" && -n "${host}" && -n "${path_suffix}" ]] || return 1
269
344
  command -v git >/dev/null 2>&1 || return 1
270
345
 
271
346
  credential_payload="$(
272
- printf 'protocol=https\nhost=github.com\npath=%s.git\n\n' "${repo_slug}" \
347
+ printf 'protocol=https\nhost=%s\npath=%s\n\n' "${host}" "${path_suffix}" \
273
348
  | git credential fill 2>/dev/null || true
274
349
  )"
275
350
  token="$(awk -F= '/^password=/{print $2; exit}' <<<"${credential_payload}")"
@@ -282,6 +357,10 @@ flow_export_github_cli_auth_env() {
282
357
  local repo_slug="${1:-}"
283
358
  local token=""
284
359
 
360
+ if flow_using_gitea; then
361
+ return 0
362
+ fi
363
+
285
364
  if [[ -n "${GH_TOKEN:-}" ]]; then
286
365
  return 0
287
366
  fi
@@ -309,9 +388,562 @@ flow_export_github_cli_auth_env() {
309
388
  fi
310
389
  }
311
390
 
391
+ flow_forge_provider() {
392
+ local provider="${ACP_FORGE_PROVIDER:-${F_LOSNING_FORGE_PROVIDER:-github}}"
393
+ provider="$(printf '%s' "${provider}" | tr '[:upper:]' '[:lower:]')"
394
+ case "${provider}" in
395
+ github|gitea)
396
+ printf '%s\n' "${provider}"
397
+ ;;
398
+ *)
399
+ printf 'github\n'
400
+ ;;
401
+ esac
402
+ }
403
+
404
+ flow_using_gitea() {
405
+ [[ "$(flow_forge_provider)" == "gitea" ]]
406
+ }
407
+
408
+ flow_gitea_base_url() {
409
+ local base_url="${ACP_GITEA_BASE_URL:-${GITEA_BASE_URL:-}}"
410
+ [[ -n "${base_url}" ]] || return 1
411
+ printf '%s\n' "${base_url%/}"
412
+ }
413
+
414
+ flow_gitea_base_host() {
415
+ local base_url=""
416
+ base_url="$(flow_gitea_base_url)" || return 1
417
+ base_url="${base_url#http://}"
418
+ base_url="${base_url#https://}"
419
+ printf '%s\n' "${base_url%%/*}"
420
+ }
421
+
422
+ flow_gitea_api_url_for_repo() {
423
+ local repo_slug="${1:?repo slug required}"
424
+ local route="${2:-}"
425
+ local base_url=""
426
+
427
+ base_url="$(flow_gitea_base_url)" || return 1
428
+ route="${route#/}"
429
+ if [[ -n "${route}" ]]; then
430
+ printf '%s/api/v1/repos/%s/%s\n' "${base_url}" "${repo_slug}" "${route}"
431
+ return 0
432
+ fi
433
+ printf '%s/api/v1/repos/%s\n' "${base_url}" "${repo_slug}"
434
+ }
435
+
436
+ flow_gitea_auth_curl_args() {
437
+ local repo_slug="${1:-}"
438
+ local credential_token=""
439
+
440
+ if [[ -n "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" ]]; then
441
+ printf -- "-H\0Authorization: token %s\0" "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}"
442
+ return 0
443
+ fi
444
+ if [[ -n "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" && -n "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" ]]; then
445
+ printf -- "-u\0%s:%s\0" "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}"
446
+ return 0
447
+ fi
448
+ if [[ -n "${repo_slug}" ]]; then
449
+ credential_token="$(flow_git_credential_token_for_repo_slug "${repo_slug}" "$(flow_gitea_base_host)" "${repo_slug}.git" || true)"
450
+ if [[ -n "${credential_token}" ]]; then
451
+ printf -- "-H\0Authorization: token %s\0" "${credential_token}"
452
+ return 0
453
+ fi
454
+ fi
455
+ return 1
456
+ }
457
+
458
+ flow_gitea_api_repo() {
459
+ local repo_slug="${1:?repo slug required}"
460
+ local route="${2:-}"
461
+ local method="GET"
462
+ local paginate="no"
463
+ local slurp="no"
464
+ local jq_filter=""
465
+ local expect_input="no"
466
+ local arg=""
467
+ local url=""
468
+ local input_file=""
469
+ local output=""
470
+ local page="1"
471
+ local per_page="100"
472
+ local response=""
473
+ local body=""
474
+ local link_header=""
475
+ local header_file=""
476
+ local stdout_file=""
477
+ local stderr_file=""
478
+ local curl_status="0"
479
+ local response_status="0"
480
+ local -a curl_args=()
481
+ local -a auth_args=()
482
+ local -a extra_headers=()
483
+ local -a form_fields=()
484
+ local -a pages=()
485
+
486
+ shift 2
487
+ while [[ $# -gt 0 ]]; do
488
+ arg="${1:-}"
489
+ case "${arg}" in
490
+ --method)
491
+ method="${2:-GET}"
492
+ shift 2
493
+ ;;
494
+ --paginate)
495
+ paginate="yes"
496
+ shift
497
+ ;;
498
+ --slurp)
499
+ slurp="yes"
500
+ shift
501
+ ;;
502
+ --jq)
503
+ jq_filter="${2:-}"
504
+ shift 2
505
+ ;;
506
+ --input)
507
+ expect_input="yes"
508
+ if [[ "${2:-}" == "-" ]]; then
509
+ input_file="$(mktemp)"
510
+ cat >"${input_file}"
511
+ shift 2
512
+ else
513
+ input_file="${2:-}"
514
+ shift 2
515
+ fi
516
+ ;;
517
+ -f|--field)
518
+ form_fields+=("${2:-}")
519
+ shift 2
520
+ ;;
521
+ *)
522
+ shift
523
+ ;;
524
+ esac
525
+ done
526
+
527
+ url="$(flow_gitea_api_url_for_repo "${repo_slug}" "${route}")" || {
528
+ rm -f "${input_file}"
529
+ return 1
530
+ }
531
+ while IFS= read -r -d '' arg; do
532
+ auth_args+=("${arg}")
533
+ done < <(flow_gitea_auth_curl_args "${repo_slug}") || true
534
+ if [[ "${#auth_args[@]}" -eq 0 && "${method}" != "GET" ]]; then
535
+ rm -f "${input_file}"
536
+ return 1
537
+ fi
538
+
539
+ if [[ "${expect_input}" == "yes" && -n "${input_file}" ]]; then
540
+ extra_headers+=(-H "Content-Type: application/json")
541
+ fi
542
+ if [[ "${#form_fields[@]}" -gt 0 ]]; then
543
+ extra_headers+=(-H "Content-Type: application/json")
544
+ body="$(
545
+ FORM_FIELDS="$(printf '%s\n' "${form_fields[@]}")" python3 - <<'PY'
546
+ import json
547
+ import os
548
+
549
+ payload = {}
550
+ for line in os.environ.get("FORM_FIELDS", "").splitlines():
551
+ line = line.rstrip("\n")
552
+ if "=" not in line:
553
+ continue
554
+ key, value = line.split("=", 1)
555
+ payload[key] = value
556
+ print(json.dumps(payload))
557
+ PY
558
+ )"
559
+ input_file="$(mktemp)"
560
+ printf '%s' "${body}" >"${input_file}"
561
+ fi
562
+
563
+ if [[ "${paginate}" != "yes" ]]; then
564
+ stdout_file="$(mktemp)"
565
+ stderr_file="$(mktemp)"
566
+ header_file="$(mktemp)"
567
+ curl_args=(-sS -D "${header_file}" -X "${method}")
568
+ if [[ "${#auth_args[@]}" -gt 0 ]]; then
569
+ curl_args+=("${auth_args[@]}")
570
+ fi
571
+ if [[ "${#extra_headers[@]}" -gt 0 ]]; then
572
+ curl_args+=("${extra_headers[@]}")
573
+ fi
574
+ if [[ -n "${input_file}" ]]; then
575
+ curl_args+=(--data-binary "@${input_file}")
576
+ fi
577
+ if curl "${curl_args[@]}" "${url}" >"${stdout_file}" 2>"${stderr_file}"; then
578
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
579
+ if [[ -n "${jq_filter}" ]]; then
580
+ jq -r "${jq_filter}" <<<"${output}"
581
+ else
582
+ printf '%s' "${output}"
583
+ fi
584
+ rm -f "${input_file}" "${stdout_file}" "${stderr_file}" "${header_file}"
585
+ return 0
586
+ fi
587
+ rm -f "${input_file}" "${stdout_file}" "${stderr_file}" "${header_file}"
588
+ return 1
589
+ fi
590
+
591
+ while :; do
592
+ stdout_file="$(mktemp)"
593
+ stderr_file="$(mktemp)"
594
+ header_file="$(mktemp)"
595
+ curl_args=(-sS -D "${header_file}" -X "${method}")
596
+ if [[ "${#auth_args[@]}" -gt 0 ]]; then
597
+ curl_args+=("${auth_args[@]}")
598
+ fi
599
+ if [[ "${#extra_headers[@]}" -gt 0 ]]; then
600
+ curl_args+=("${extra_headers[@]}")
601
+ fi
602
+ if curl "${curl_args[@]}" "${url}$([[ "${url}" == *\?* ]] && printf '&' || printf '?')page=${page}&limit=${per_page}" >"${stdout_file}" 2>"${stderr_file}"; then
603
+ response="$(cat "${stdout_file}" 2>/dev/null || true)"
604
+ pages+=("${response}")
605
+ link_header="$(tr -d '\r' <"${header_file}" | awk 'BEGIN{IGNORECASE=1}/^link:/{sub(/^link:[[:space:]]*/,""); print; exit}')"
606
+ rm -f "${stdout_file}" "${stderr_file}" "${header_file}"
607
+ if [[ "${link_header}" != *'rel="next"'* ]]; then
608
+ break
609
+ fi
610
+ page="$((page + 1))"
611
+ else
612
+ response_status="1"
613
+ rm -f "${stdout_file}" "${stderr_file}" "${header_file}" "${input_file}"
614
+ return "${response_status}"
615
+ fi
616
+ done
617
+
618
+ rm -f "${input_file}"
619
+ if [[ "${slurp}" == "yes" ]]; then
620
+ printf '%s\n' "${pages[@]}" | jq -s '.'
621
+ return 0
622
+ fi
623
+ printf '%s' "${pages[0]:-[]}"
624
+ }
625
+
626
+ flow_gitea_issue_view_json() {
627
+ local repo_slug="${1:?repo slug required}"
628
+ local issue_id="${2:?issue id required}"
629
+ local issue_json=""
630
+ local comments_json=""
631
+
632
+ issue_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null || true)"
633
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
634
+ comments_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${issue_id}/comments" --paginate --slurp 2>/dev/null || true)"
635
+ comments_json="$(flow_json_or_default "${comments_json}" '[]')"
636
+
637
+ ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comments_json}" python3 - <<'PY'
638
+ import json
639
+ import os
640
+
641
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
642
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
643
+ comments = []
644
+ for page in comment_pages:
645
+ if isinstance(page, list):
646
+ comments.extend(page)
647
+ elif isinstance(page, dict):
648
+ comments.append(page)
649
+
650
+ result = {
651
+ "number": issue.get("number"),
652
+ "state": str(issue.get("state", "")).upper(),
653
+ "title": issue.get("title") or "",
654
+ "body": issue.get("body") or "",
655
+ "url": issue.get("html_url") or issue.get("url") or "",
656
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
657
+ "comments": [
658
+ {
659
+ "body": comment.get("body") or "",
660
+ "createdAt": comment.get("created_at") or "",
661
+ "updatedAt": comment.get("updated_at") or "",
662
+ "url": comment.get("html_url") or "",
663
+ }
664
+ for comment in comments
665
+ if isinstance(comment, dict)
666
+ ],
667
+ "createdAt": issue.get("created_at") or "",
668
+ "updatedAt": issue.get("updated_at") or "",
669
+ }
670
+
671
+ print(json.dumps(result))
672
+ PY
673
+ }
674
+
675
+ flow_gitea_issue_list_json() {
676
+ local repo_slug="${1:?repo slug required}"
677
+ local state="${2:-open}"
678
+ local limit="${3:-100}"
679
+ local issues_json=""
680
+
681
+ issues_json="$(flow_gitea_api_repo "${repo_slug}" "issues?state=${state}" --paginate --slurp 2>/dev/null || true)"
682
+ issues_json="$(flow_json_or_default "${issues_json}" '[]')"
683
+
684
+ ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
685
+ import json
686
+ import os
687
+
688
+ pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
689
+ limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
690
+ issues = []
691
+
692
+ for page in pages:
693
+ if isinstance(page, list):
694
+ issues.extend(page)
695
+ elif isinstance(page, dict):
696
+ issues.append(page)
697
+
698
+ result = []
699
+ for issue in issues:
700
+ if not isinstance(issue, dict):
701
+ continue
702
+ if issue.get("pull_request"):
703
+ continue
704
+ result.append({
705
+ "number": issue.get("number"),
706
+ "createdAt": issue.get("created_at") or "",
707
+ "updatedAt": issue.get("updated_at") or "",
708
+ "title": issue.get("title") or "",
709
+ "url": issue.get("html_url") or issue.get("url") or "",
710
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
711
+ })
712
+
713
+ print(json.dumps(result[:limit]))
714
+ PY
715
+ }
716
+
717
+ flow_gitea_pr_view_json() {
718
+ local repo_slug="${1:?repo slug required}"
719
+ local pr_number="${2:?pr number required}"
720
+ local pr_json=""
721
+ local comment_pages_json=""
722
+ local files_json=""
723
+ local reviews_json=""
724
+
725
+ pr_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}" 2>/dev/null || true)"
726
+ pr_json="$(flow_json_or_default "${pr_json}" '{}')"
727
+ comment_pages_json="$(flow_gitea_api_repo "${repo_slug}" "issues/${pr_number}/comments" --paginate --slurp 2>/dev/null || true)"
728
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
729
+ files_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}/files" --paginate --slurp 2>/dev/null || true)"
730
+ files_json="$(flow_json_or_default "${files_json}" '[]')"
731
+ reviews_json="$(flow_gitea_api_repo "${repo_slug}" "pulls/${pr_number}/reviews" --paginate --slurp 2>/dev/null || true)"
732
+ reviews_json="$(flow_json_or_default "${reviews_json}" '[]')"
733
+
734
+ PR_JSON="${pr_json}" COMMENT_PAGES_JSON="${comment_pages_json}" FILES_JSON="${files_json}" REVIEWS_JSON="${reviews_json}" python3 - <<'PY'
735
+ import json
736
+ import os
737
+
738
+ pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
739
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
740
+ file_pages = json.loads(os.environ.get("FILES_JSON", "[]") or "[]")
741
+ review_pages = json.loads(os.environ.get("REVIEWS_JSON", "[]") or "[]")
742
+ comments = []
743
+ for page in comment_pages:
744
+ if isinstance(page, list):
745
+ comments.extend(page)
746
+ elif isinstance(page, dict):
747
+ comments.append(page)
748
+
749
+ files = []
750
+ for page in file_pages:
751
+ if isinstance(page, list):
752
+ files.extend(page)
753
+ elif isinstance(page, dict):
754
+ files.append(page)
755
+
756
+ reviews = []
757
+ for page in review_pages:
758
+ if isinstance(page, list):
759
+ reviews.extend(page)
760
+ elif isinstance(page, dict):
761
+ reviews.append(page)
762
+
763
+ pr_state = str(pr.get("state", "")).upper()
764
+ if pr.get("merged") or pr.get("merged_at"):
765
+ pr_state = "MERGED"
766
+
767
+ review_states = [
768
+ str(review.get("state") or "").upper()
769
+ for review in reviews
770
+ if isinstance(review, dict)
771
+ ]
772
+ review_decision = ""
773
+ if any(state == "APPROVED" for state in review_states):
774
+ review_decision = "APPROVED"
775
+ elif any(state in {"CHANGES_REQUESTED", "REQUEST_CHANGES"} for state in review_states):
776
+ review_decision = "CHANGES_REQUESTED"
777
+
778
+ result = {
779
+ "number": pr.get("number"),
780
+ "title": pr.get("title") or "",
781
+ "body": pr.get("body") or "",
782
+ "url": pr.get("html_url") or pr.get("url") or "",
783
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
784
+ "headRefOid": ((pr.get("head") or {}).get("sha")) or "",
785
+ "baseRefName": ((pr.get("base") or {}).get("ref")) or "",
786
+ "mergeStateStatus": "CLEAN" if pr.get("mergeable") else "UNKNOWN",
787
+ "statusCheckRollup": [],
788
+ "labels": [{"name": label.get("name", "")} for label in pr.get("labels", []) if isinstance(label, dict)],
789
+ "comments": [
790
+ {
791
+ "body": comment.get("body") or "",
792
+ "createdAt": comment.get("created_at") or "",
793
+ "updatedAt": comment.get("updated_at") or "",
794
+ "url": comment.get("html_url") or "",
795
+ }
796
+ for comment in comments
797
+ if isinstance(comment, dict)
798
+ ],
799
+ "state": pr_state,
800
+ "isDraft": bool(pr.get("draft")),
801
+ "createdAt": pr.get("created_at") or "",
802
+ "updatedAt": pr.get("updated_at") or "",
803
+ "mergedAt": pr.get("merged_at") or "",
804
+ "authorLogin": ((pr.get("user") or {}).get("login")) or "",
805
+ "files": [
806
+ {"path": file.get("filename") or ""}
807
+ for file in files
808
+ if isinstance(file, dict) and (file.get("filename") or "")
809
+ ],
810
+ "reviewRequests": [
811
+ {"login": reviewer.get("login") or ""}
812
+ for reviewer in (pr.get("requested_reviewers") or [])
813
+ if isinstance(reviewer, dict)
814
+ ],
815
+ "reviewDecision": review_decision,
816
+ }
817
+
818
+ print(json.dumps(result))
819
+ PY
820
+ }
821
+
822
+ flow_gitea_pr_list_json() {
823
+ local repo_slug="${1:?repo slug required}"
824
+ local state="${2:-open}"
825
+ local limit="${3:-100}"
826
+ local pulls_state="${state}"
827
+ local pr_pages_json=""
828
+
829
+ if [[ "${state}" == "merged" ]]; then
830
+ pulls_state="closed"
831
+ fi
832
+
833
+ pr_pages_json="$(flow_gitea_api_repo "${repo_slug}" "pulls?state=${pulls_state}" --paginate --slurp 2>/dev/null || true)"
834
+ pr_pages_json="$(flow_json_or_default "${pr_pages_json}" '[]')"
835
+
836
+ PR_PAGES_JSON="${pr_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
837
+ import json
838
+ import os
839
+
840
+ pages = json.loads(os.environ.get("PR_PAGES_JSON", "[]") or "[]")
841
+ limit = int(os.environ.get("PR_LIMIT", "100") or "100")
842
+ state_filter = os.environ.get("PR_STATE_FILTER", "open")
843
+ prs = []
844
+ for page in pages:
845
+ if isinstance(page, list):
846
+ prs.extend(page)
847
+ elif isinstance(page, dict):
848
+ prs.append(page)
849
+
850
+ result = []
851
+ for pr in prs:
852
+ if not isinstance(pr, dict):
853
+ continue
854
+ merged = bool(pr.get("merged") or pr.get("merged_at"))
855
+ state = str(pr.get("state", "")).lower()
856
+ if state_filter == "open" and state != "open":
857
+ continue
858
+ if state_filter == "closed" and state != "closed":
859
+ continue
860
+ if state_filter == "merged" and not merged:
861
+ continue
862
+ normalized_state = "MERGED" if merged else state.upper()
863
+ result.append({
864
+ "number": pr.get("number"),
865
+ "title": pr.get("title") or "",
866
+ "body": pr.get("body") or "",
867
+ "url": pr.get("html_url") or pr.get("url") or "",
868
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
869
+ "headRefOid": ((pr.get("head") or {}).get("sha")) or "",
870
+ "baseRefName": ((pr.get("base") or {}).get("ref")) or "",
871
+ "createdAt": pr.get("created_at") or "",
872
+ "mergedAt": pr.get("merged_at") or "",
873
+ "state": normalized_state,
874
+ "isDraft": bool(pr.get("draft")),
875
+ "labels": [{"name": label.get("name", "")} for label in pr.get("labels", []) if isinstance(label, dict)],
876
+ "comments": [],
877
+ "authorLogin": ((pr.get("user") or {}).get("login")) or "",
878
+ })
879
+ if len(result) >= limit:
880
+ break
881
+
882
+ print(json.dumps(result))
883
+ PY
884
+ }
885
+
886
+ flow_github_output_indicates_rate_limit() {
887
+ grep -Eiq 'API rate limit exceeded|secondary rate limit|rate limit exceeded|HTTP 403' <<<"${1:-}"
888
+ }
889
+
890
+ flow_github_core_rate_limit_state_bin() {
891
+ local flow_root=""
892
+ local candidate=""
893
+
894
+ flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")" || return 1
895
+ candidate="${flow_root}/tools/bin/github-core-rate-limit-state.sh"
896
+ [[ -x "${candidate}" ]] || return 1
897
+ printf '%s\n' "${candidate}"
898
+ }
899
+
900
+ flow_github_core_rate_limit_state_output() {
901
+ local state_bin=""
902
+
903
+ state_bin="$(flow_github_core_rate_limit_state_bin)" || return 1
904
+ "${state_bin}" get 2>/dev/null || true
905
+ }
906
+
907
+ flow_github_core_rate_limit_active() {
908
+ local state_out=""
909
+ local ready=""
910
+
911
+ state_out="$(flow_github_core_rate_limit_state_output)" || return 1
912
+ ready="$(awk -F= '/^READY=/{print $2; exit}' <<<"${state_out}")"
913
+ [[ "${ready}" == "no" ]]
914
+ }
915
+
916
+ flow_github_core_rate_limit_schedule() {
917
+ local reason="${1:-github-api-rate-limit}"
918
+ local reset_epoch="${2:-}"
919
+ local state_bin=""
920
+ local now_epoch=""
921
+
922
+ state_bin="$(flow_github_core_rate_limit_state_bin)" || return 0
923
+ now_epoch="$(date +%s)"
924
+ if [[ "${reset_epoch}" =~ ^[0-9]+$ ]] && (( reset_epoch > now_epoch )); then
925
+ "${state_bin}" schedule "${reason}" --next-at-epoch "${reset_epoch}" >/dev/null 2>&1 || true
926
+ return 0
927
+ fi
928
+
929
+ "${state_bin}" schedule "${reason}" >/dev/null 2>&1 || true
930
+ }
931
+
932
+ flow_github_core_rate_limit_clear() {
933
+ local state_bin=""
934
+
935
+ state_bin="$(flow_github_core_rate_limit_state_bin)" || return 0
936
+ "${state_bin}" clear >/dev/null 2>&1 || true
937
+ }
938
+
312
939
  flow_github_graphql_available() {
313
940
  local repo_slug="${1:-}"
314
- local remaining=""
941
+ local rate_limit_json=""
942
+ local graphql_remaining=""
943
+ local core_remaining=""
944
+ local core_reset=""
945
+ local stderr_file=""
946
+ local stderr_output=""
315
947
 
316
948
  if [[ "${FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE:-}" == "yes" ]]; then
317
949
  return 0
@@ -320,9 +952,39 @@ flow_github_graphql_available() {
320
952
  return 1
321
953
  fi
322
954
 
955
+ if flow_github_core_rate_limit_active; then
956
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
957
+ return 1
958
+ fi
959
+
323
960
  flow_export_github_cli_auth_env "${repo_slug}"
324
- remaining="$(gh api rate_limit --jq '.resources.graphql.remaining' 2>/dev/null || true)"
325
- if [[ "${remaining}" =~ ^[0-9]+$ ]] && (( remaining > 0 )); then
961
+ stderr_file="$(mktemp)"
962
+ if rate_limit_json="$(gh api rate_limit 2>"${stderr_file}")"; then
963
+ graphql_remaining="$(jq -r '.resources.graphql.remaining // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
964
+ core_remaining="$(jq -r '.resources.core.remaining // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
965
+ core_reset="$(jq -r '.resources.core.reset // empty' <<<"${rate_limit_json}" 2>/dev/null || true)"
966
+ if [[ "${core_remaining}" =~ ^[0-9]+$ ]]; then
967
+ if (( core_remaining > 0 )); then
968
+ flow_github_core_rate_limit_clear
969
+ else
970
+ flow_github_core_rate_limit_schedule "github-api-rate-limit" "${core_reset}"
971
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
972
+ rm -f "${stderr_file}"
973
+ return 1
974
+ fi
975
+ fi
976
+ else
977
+ stderr_output="$(cat "${stderr_file}" 2>/dev/null || true)"
978
+ if flow_github_output_indicates_rate_limit "${stderr_output}"; then
979
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
980
+ fi
981
+ FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="no"
982
+ rm -f "${stderr_file}"
983
+ return 1
984
+ fi
985
+ rm -f "${stderr_file}"
986
+
987
+ if [[ "${graphql_remaining}" =~ ^[0-9]+$ ]] && (( graphql_remaining > 0 )); then
326
988
  FLOW_GITHUB_GRAPHQL_AVAILABLE_CACHE="yes"
327
989
  return 0
328
990
  fi
@@ -343,6 +1005,8 @@ flow_github_repo_id_for_repo_slug() {
343
1005
  local cached_value=""
344
1006
  local repos_pages_json=""
345
1007
  local repo_id=""
1008
+ local stderr_file=""
1009
+ local stderr_output=""
346
1010
 
347
1011
  [[ -n "${repo_slug}" ]] || return 1
348
1012
  command -v gh >/dev/null 2>&1 || return 1
@@ -361,12 +1025,25 @@ flow_github_repo_id_for_repo_slug() {
361
1025
  return 0
362
1026
  fi
363
1027
 
1028
+ if flow_github_core_rate_limit_active; then
1029
+ return 1
1030
+ fi
1031
+
364
1032
  flow_export_github_cli_auth_env "${repo_slug}"
365
- repos_pages_json="$(
1033
+ stderr_file="$(mktemp)"
1034
+ if repos_pages_json="$(
366
1035
  gh api 'user/repos?per_page=100&visibility=all&affiliation=owner,collaborator,organization_member' \
367
1036
  --paginate \
368
- --slurp 2>/dev/null || true
369
- )"
1037
+ --slurp 2>"${stderr_file}" || true
1038
+ )" && [[ -n "${repos_pages_json}" ]]; then
1039
+ flow_github_core_rate_limit_clear
1040
+ else
1041
+ stderr_output="$(cat "${stderr_file}" 2>/dev/null || true)"
1042
+ if flow_github_output_indicates_rate_limit "${stderr_output}"; then
1043
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
1044
+ fi
1045
+ fi
1046
+ rm -f "${stderr_file}"
370
1047
  [[ -n "${repos_pages_json}" ]] || return 1
371
1048
 
372
1049
  repo_id="$(
@@ -415,13 +1092,21 @@ flow_github_api_repo() {
415
1092
  local fallback_route=""
416
1093
  local output=""
417
1094
  local stdin_file=""
418
- local status=0
1095
+ local request_status=0
419
1096
  local expect_input_value="false"
420
1097
  local arg=""
421
1098
  local index=0
422
1099
  local gh_arg_count=0
1100
+ local stdout_file=""
1101
+ local stderr_file=""
1102
+ local error_output=""
423
1103
  local -a gh_args=()
424
1104
 
1105
+ if flow_using_gitea; then
1106
+ flow_gitea_api_repo "$@"
1107
+ return $?
1108
+ fi
1109
+
425
1110
  route="${route#/}"
426
1111
  if [[ -n "${route}" ]]; then
427
1112
  direct_route="${direct_route}/${route}"
@@ -447,21 +1132,40 @@ flow_github_api_repo() {
447
1132
  fi
448
1133
  done
449
1134
 
1135
+ if flow_github_core_rate_limit_active; then
1136
+ rm -f "${stdin_file}"
1137
+ return 1
1138
+ fi
1139
+
450
1140
  flow_export_github_cli_auth_env "${repo_slug}"
1141
+ stdout_file="$(mktemp)"
1142
+ stderr_file="$(mktemp)"
451
1143
  if [[ "${gh_arg_count}" -gt 0 ]]; then
452
- output="$(gh api "${direct_route}" "${gh_args[@]}" 2>/dev/null)" && {
1144
+ if gh api "${direct_route}" "${gh_args[@]}" >"${stdout_file}" 2>"${stderr_file}"; then
1145
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
1146
+ flow_github_core_rate_limit_clear
453
1147
  printf '%s' "${output}"
454
- rm -f "${stdin_file}"
1148
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
455
1149
  return 0
456
- }
457
- elif output="$(gh api "${direct_route}" 2>/dev/null)"; then
458
- printf '%s' "${output}"
459
- rm -f "${stdin_file}"
460
- return 0
1150
+ fi
1151
+ else
1152
+ if gh api "${direct_route}" >"${stdout_file}" 2>"${stderr_file}"; then
1153
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
1154
+ flow_github_core_rate_limit_clear
1155
+ printf '%s' "${output}"
1156
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
1157
+ return 0
1158
+ fi
1159
+ fi
1160
+ error_output="$(cat "${stderr_file}" 2>/dev/null || true)"
1161
+ if flow_github_output_indicates_rate_limit "${error_output}"; then
1162
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
1163
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
1164
+ return 1
461
1165
  fi
462
1166
 
463
1167
  if ! repo_prefix="$(flow_github_repo_api_prefix "${repo_slug}")"; then
464
- rm -f "${stdin_file}"
1168
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
465
1169
  return 1
466
1170
  fi
467
1171
  fallback_route="${repo_prefix}"
@@ -469,19 +1173,32 @@ flow_github_api_repo() {
469
1173
  fallback_route="${fallback_route}/${route}"
470
1174
  fi
471
1175
  if [[ "${gh_arg_count}" -gt 0 ]]; then
472
- output="$(gh api "${fallback_route}" "${gh_args[@]}")" && {
1176
+ if gh api "${fallback_route}" "${gh_args[@]}" >"${stdout_file}" 2>"${stderr_file}"; then
1177
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
1178
+ flow_github_core_rate_limit_clear
473
1179
  printf '%s' "${output}"
474
- rm -f "${stdin_file}"
1180
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
475
1181
  return 0
476
- }
477
- elif output="$(gh api "${fallback_route}")"; then
478
- printf '%s' "${output}"
479
- rm -f "${stdin_file}"
480
- return 0
1182
+ else
1183
+ request_status=$?
1184
+ fi
1185
+ else
1186
+ if gh api "${fallback_route}" >"${stdout_file}" 2>"${stderr_file}"; then
1187
+ output="$(cat "${stdout_file}" 2>/dev/null || true)"
1188
+ flow_github_core_rate_limit_clear
1189
+ printf '%s' "${output}"
1190
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
1191
+ return 0
1192
+ else
1193
+ request_status=$?
1194
+ fi
481
1195
  fi
482
- status=$?
483
- rm -f "${stdin_file}"
484
- return "${status}"
1196
+ error_output="$(cat "${stderr_file}" 2>/dev/null || true)"
1197
+ if flow_github_output_indicates_rate_limit "${error_output}"; then
1198
+ flow_github_core_rate_limit_schedule "github-api-rate-limit"
1199
+ fi
1200
+ rm -f "${stdin_file}" "${stdout_file}" "${stderr_file}"
1201
+ return "${request_status}"
485
1202
  }
486
1203
 
487
1204
  flow_json_or_default() {
@@ -511,12 +1228,81 @@ print(quote(os.environ.get("RAW_VALUE", ""), safe=""))
511
1228
  PY
512
1229
  }
513
1230
 
1231
+ flow_github_issue_view_json_live() {
1232
+ local repo_slug="${1:?repo slug required}"
1233
+ local issue_id="${2:?issue id required}"
1234
+ local issue_json=""
1235
+ local comment_pages_json=""
1236
+
1237
+ if flow_using_gitea; then
1238
+ flow_gitea_issue_view_json "${repo_slug}" "${issue_id}"
1239
+ return $?
1240
+ fi
1241
+
1242
+ if flow_github_graphql_available "${repo_slug}" \
1243
+ && issue_json="$(gh issue view "${issue_id}" -R "${repo_slug}" --json number,state,title,body,url,labels,comments,createdAt,updatedAt 2>/dev/null)"; then
1244
+ printf '%s\n' "${issue_json}"
1245
+ return 0
1246
+ fi
1247
+
1248
+ if ! issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}" 2>/dev/null)"; then
1249
+ return 1
1250
+ fi
1251
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
1252
+ if ! comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments?per_page=100" --paginate --slurp 2>/dev/null)"; then
1253
+ return 1
1254
+ fi
1255
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
1256
+
1257
+ ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY'
1258
+ import json
1259
+ import os
1260
+
1261
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
1262
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
1263
+ comments = []
1264
+ for page in comment_pages:
1265
+ if isinstance(page, list):
1266
+ comments.extend(page)
1267
+ elif isinstance(page, dict):
1268
+ comments.append(page)
1269
+
1270
+ result = {
1271
+ "number": issue.get("number"),
1272
+ "state": str(issue.get("state", "")).upper(),
1273
+ "title": issue.get("title") or "",
1274
+ "body": issue.get("body") or "",
1275
+ "url": issue.get("html_url") or issue.get("url") or "",
1276
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1277
+ "comments": [
1278
+ {
1279
+ "body": comment.get("body") or "",
1280
+ "createdAt": comment.get("created_at") or "",
1281
+ "updatedAt": comment.get("updated_at") or "",
1282
+ "url": comment.get("html_url") or "",
1283
+ }
1284
+ for comment in comments
1285
+ if isinstance(comment, dict)
1286
+ ],
1287
+ "createdAt": issue.get("created_at") or "",
1288
+ "updatedAt": issue.get("updated_at") or "",
1289
+ }
1290
+
1291
+ print(json.dumps(result))
1292
+ PY
1293
+ }
1294
+
514
1295
  flow_github_issue_view_json() {
515
1296
  local repo_slug="${1:?repo slug required}"
516
1297
  local issue_id="${2:?issue id required}"
517
1298
  local issue_json=""
518
1299
  local comment_pages_json=""
519
1300
 
1301
+ if flow_using_gitea; then
1302
+ flow_gitea_issue_view_json "${repo_slug}" "${issue_id}"
1303
+ return $?
1304
+ fi
1305
+
520
1306
  if flow_github_graphql_available "${repo_slug}" \
521
1307
  && issue_json="$(gh issue view "${issue_id}" -R "${repo_slug}" --json number,state,title,body,url,labels,comments,createdAt,updatedAt 2>/dev/null)"; then
522
1308
  printf '%s\n' "${issue_json}"
@@ -532,37 +1318,97 @@ flow_github_issue_view_json() {
532
1318
  import json
533
1319
  import os
534
1320
 
535
- issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
536
- comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
537
- comments = []
538
- for page in comment_pages:
1321
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
1322
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
1323
+ comments = []
1324
+ for page in comment_pages:
1325
+ if isinstance(page, list):
1326
+ comments.extend(page)
1327
+ elif isinstance(page, dict):
1328
+ comments.append(page)
1329
+
1330
+ result = {
1331
+ "number": issue.get("number"),
1332
+ "state": str(issue.get("state", "")).upper(),
1333
+ "title": issue.get("title") or "",
1334
+ "body": issue.get("body") or "",
1335
+ "url": issue.get("html_url") or issue.get("url") or "",
1336
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1337
+ "comments": [
1338
+ {
1339
+ "body": comment.get("body") or "",
1340
+ "createdAt": comment.get("created_at") or "",
1341
+ "updatedAt": comment.get("updated_at") or "",
1342
+ "url": comment.get("html_url") or "",
1343
+ }
1344
+ for comment in comments
1345
+ if isinstance(comment, dict)
1346
+ ],
1347
+ "createdAt": issue.get("created_at") or "",
1348
+ "updatedAt": issue.get("updated_at") or "",
1349
+ }
1350
+
1351
+ print(json.dumps(result))
1352
+ PY
1353
+ }
1354
+
1355
+ flow_github_issue_list_json_live() {
1356
+ local repo_slug="${1:?repo slug required}"
1357
+ local state="${2:-open}"
1358
+ local limit="${3:-100}"
1359
+ local issues_json=""
1360
+ local per_page="100"
1361
+
1362
+ if flow_using_gitea; then
1363
+ flow_gitea_issue_list_json "${repo_slug}" "${state}" "${limit}"
1364
+ return $?
1365
+ fi
1366
+
1367
+ if flow_github_graphql_available "${repo_slug}" \
1368
+ && issues_json="$(gh issue list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,createdAt,updatedAt,title,url,labels 2>/dev/null)"; then
1369
+ printf '%s\n' "${issues_json}"
1370
+ return 0
1371
+ fi
1372
+
1373
+ if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
1374
+ per_page="${limit}"
1375
+ fi
1376
+
1377
+ if ! issues_json="$(flow_github_api_repo "${repo_slug}" "issues?state=${state}&per_page=${per_page}" --paginate --slurp 2>/dev/null)"; then
1378
+ return 1
1379
+ fi
1380
+ issues_json="$(flow_json_or_default "${issues_json}" '[]')"
1381
+
1382
+ ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
1383
+ import json
1384
+ import os
1385
+
1386
+ pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
1387
+ limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
1388
+ issues = []
1389
+
1390
+ for page in pages:
539
1391
  if isinstance(page, list):
540
- comments.extend(page)
1392
+ issues.extend(page)
541
1393
  elif isinstance(page, dict):
542
- comments.append(page)
1394
+ issues.append(page)
543
1395
 
544
- result = {
545
- "number": issue.get("number"),
546
- "state": str(issue.get("state", "")).upper(),
547
- "title": issue.get("title") or "",
548
- "body": issue.get("body") or "",
549
- "url": issue.get("html_url") or issue.get("url") or "",
550
- "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
551
- "comments": [
552
- {
553
- "body": comment.get("body") or "",
554
- "createdAt": comment.get("created_at") or "",
555
- "updatedAt": comment.get("updated_at") or "",
556
- "url": comment.get("html_url") or "",
557
- }
558
- for comment in comments
559
- if isinstance(comment, dict)
560
- ],
561
- "createdAt": issue.get("created_at") or "",
562
- "updatedAt": issue.get("updated_at") or "",
563
- }
1396
+ result = []
1397
+ for issue in issues:
1398
+ if not isinstance(issue, dict):
1399
+ continue
1400
+ if issue.get("pull_request"):
1401
+ continue
1402
+ result.append({
1403
+ "number": issue.get("number"),
1404
+ "createdAt": issue.get("created_at") or "",
1405
+ "updatedAt": issue.get("updated_at") or "",
1406
+ "title": issue.get("title") or "",
1407
+ "url": issue.get("html_url") or issue.get("url") or "",
1408
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1409
+ })
564
1410
 
565
- print(json.dumps(result))
1411
+ print(json.dumps(result[:limit]))
566
1412
  PY
567
1413
  }
568
1414
 
@@ -573,6 +1419,11 @@ flow_github_issue_list_json() {
573
1419
  local issues_json=""
574
1420
  local per_page="100"
575
1421
 
1422
+ if flow_using_gitea; then
1423
+ flow_gitea_issue_list_json "${repo_slug}" "${state}" "${limit}"
1424
+ return $?
1425
+ fi
1426
+
576
1427
  if flow_github_graphql_available "${repo_slug}" \
577
1428
  && issues_json="$(gh issue list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,createdAt,updatedAt,title,url,labels 2>/dev/null)"; then
578
1429
  printf '%s\n' "${issues_json}"
@@ -629,6 +1480,11 @@ flow_github_pr_view_json() {
629
1480
  local check_runs_json="{}"
630
1481
  local status_json="{}"
631
1482
 
1483
+ if flow_using_gitea; then
1484
+ flow_gitea_pr_view_json "${repo_slug}" "${pr_number}"
1485
+ return $?
1486
+ fi
1487
+
632
1488
  if flow_github_graphql_available "${repo_slug}" \
633
1489
  && pr_json="$(gh pr view "${pr_number}" -R "${repo_slug}" --json number,title,body,url,headRefName,baseRefName,mergeStateStatus,statusCheckRollup,labels,comments,state,isDraft 2>/dev/null)"; then
634
1490
  printf '%s\n' "${pr_json}"
@@ -704,6 +1560,7 @@ result = {
704
1560
  "body": pr.get("body") or issue.get("body") or "",
705
1561
  "url": pr.get("html_url") or pr.get("url") or "",
706
1562
  "headRefName": ((pr.get("head") or {}).get("ref")) or "",
1563
+ "headRefOid": ((pr.get("head") or {}).get("sha")) or "",
707
1564
  "baseRefName": ((pr.get("base") or {}).get("ref")) or "",
708
1565
  "mergeStateStatus": str(pr.get("mergeable_state") or "UNKNOWN").upper(),
709
1566
  "statusCheckRollup": status_check_rollup,
@@ -723,10 +1580,154 @@ result = {
723
1580
  "createdAt": pr.get("created_at") or "",
724
1581
  "updatedAt": pr.get("updated_at") or "",
725
1582
  "mergedAt": pr.get("merged_at") or "",
1583
+ "authorLogin": ((pr.get("user") or {}).get("login")) or "",
1584
+ }
1585
+
1586
+ print(json.dumps(result))
1587
+ PY
1588
+ }
1589
+
1590
+ flow_github_pr_list_json_live() {
1591
+ local repo_slug="${1:?repo slug required}"
1592
+ local state="${2:-open}"
1593
+ local limit="${3:-100}"
1594
+ local pr_json=""
1595
+ local per_page="100"
1596
+ local pulls_state="${state}"
1597
+ local pull_pages_json=""
1598
+ local selected_prs_json=""
1599
+ local item_jsonl_file=""
1600
+ local current_pr_json=""
1601
+ local issue_json=""
1602
+ local comment_pages_json=""
1603
+ local pr_number=""
1604
+
1605
+ if flow_using_gitea; then
1606
+ flow_gitea_pr_list_json "${repo_slug}" "${state}" "${limit}"
1607
+ return $?
1608
+ fi
1609
+
1610
+ if flow_github_graphql_available "${repo_slug}" \
1611
+ && pr_json="$(gh pr list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,title,body,url,headRefName,labels,comments,createdAt,mergedAt,isDraft 2>/dev/null)"; then
1612
+ printf '%s\n' "${pr_json}"
1613
+ return 0
1614
+ fi
1615
+
1616
+ if [[ "${state}" == "merged" ]]; then
1617
+ pulls_state="closed"
1618
+ fi
1619
+ if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
1620
+ per_page="${limit}"
1621
+ fi
1622
+
1623
+ if ! pull_pages_json="$(flow_github_api_repo "${repo_slug}" "pulls?state=${pulls_state}&per_page=${per_page}" --paginate --slurp 2>/dev/null)"; then
1624
+ return 1
1625
+ fi
1626
+ pull_pages_json="$(flow_json_or_default "${pull_pages_json}" '[]')"
1627
+
1628
+ if ! selected_prs_json="$(
1629
+ PULL_PAGES_JSON="${pull_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
1630
+ import json
1631
+ import os
1632
+
1633
+ pages = json.loads(os.environ.get("PULL_PAGES_JSON", "[]") or "[]")
1634
+ limit = int(os.environ.get("PR_LIMIT", "100") or "100")
1635
+ state_filter = os.environ.get("PR_STATE_FILTER", "open")
1636
+ pulls = []
1637
+
1638
+ for page in pages:
1639
+ if isinstance(page, list):
1640
+ pulls.extend(page)
1641
+ elif isinstance(page, dict):
1642
+ pulls.append(page)
1643
+
1644
+ result = []
1645
+ for pr in pulls:
1646
+ if not isinstance(pr, dict):
1647
+ continue
1648
+ if state_filter == "merged" and not pr.get("merged_at"):
1649
+ continue
1650
+ result.append({
1651
+ "number": pr.get("number"),
1652
+ "title": pr.get("title") or "",
1653
+ "body": pr.get("body") or "",
1654
+ "url": pr.get("html_url") or pr.get("url") or "",
1655
+ "headRefName": ((pr.get("head") or {}).get("ref")) or "",
1656
+ "createdAt": pr.get("created_at") or "",
1657
+ "mergedAt": pr.get("merged_at") or "",
1658
+ "isDraft": bool(pr.get("draft")),
1659
+ })
1660
+ if len(result) >= limit:
1661
+ break
1662
+
1663
+ print(json.dumps(result))
1664
+ PY
1665
+ )"; then
1666
+ return 1
1667
+ fi
1668
+
1669
+ item_jsonl_file="$(mktemp)"
1670
+
1671
+ while IFS= read -r current_pr_json; do
1672
+ [[ -n "${current_pr_json}" ]] || continue
1673
+ pr_number="$(jq -r '.number // ""' <<<"${current_pr_json}")"
1674
+ [[ -n "${pr_number}" ]] || continue
1675
+ if ! issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null)"; then
1676
+ rm -f "${item_jsonl_file}"
1677
+ return 1
1678
+ fi
1679
+ issue_json="$(flow_json_or_default "${issue_json}" '{}')"
1680
+ if ! comment_pages_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null)"; then
1681
+ rm -f "${item_jsonl_file}"
1682
+ return 1
1683
+ fi
1684
+ comment_pages_json="$(flow_json_or_default "${comment_pages_json}" '[]')"
1685
+ PR_JSON="${current_pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY' >>"${item_jsonl_file}"
1686
+ import json
1687
+ import os
1688
+
1689
+ pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
1690
+ issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
1691
+ comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
1692
+ comments = []
1693
+ for page in comment_pages:
1694
+ if isinstance(page, list):
1695
+ comments.extend(page)
1696
+ elif isinstance(page, dict):
1697
+ comments.append(page)
1698
+
1699
+ result = {
1700
+ "number": pr.get("number"),
1701
+ "title": pr.get("title") or "",
1702
+ "body": pr.get("body") or issue.get("body") or "",
1703
+ "url": pr.get("url") or issue.get("html_url") or issue.get("url") or "",
1704
+ "headRefName": pr.get("headRefName") or "",
1705
+ "createdAt": pr.get("createdAt") or "",
1706
+ "mergedAt": pr.get("mergedAt") or "",
1707
+ "isDraft": bool(pr.get("isDraft")),
1708
+ "labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
1709
+ "comments": [
1710
+ {
1711
+ "body": comment.get("body") or "",
1712
+ "createdAt": comment.get("created_at") or "",
1713
+ "updatedAt": comment.get("updated_at") or "",
1714
+ "url": comment.get("html_url") or "",
1715
+ }
1716
+ for comment in comments
1717
+ if isinstance(comment, dict)
1718
+ ],
726
1719
  }
727
1720
 
728
1721
  print(json.dumps(result))
729
1722
  PY
1723
+ done < <(jq -c '.[]' <<<"${selected_prs_json}" 2>/dev/null || true)
1724
+
1725
+ if ! jq -s '.' "${item_jsonl_file}" 2>/dev/null; then
1726
+ rm -f "${item_jsonl_file}"
1727
+ return 1
1728
+ fi
1729
+
1730
+ rm -f "${item_jsonl_file}"
730
1731
  }
731
1732
 
732
1733
  flow_github_pr_list_json() {
@@ -744,6 +1745,11 @@ flow_github_pr_list_json() {
744
1745
  local comment_pages_json=""
745
1746
  local pr_number=""
746
1747
 
1748
+ if flow_using_gitea; then
1749
+ flow_gitea_pr_list_json "${repo_slug}" "${state}" "${limit}"
1750
+ return $?
1751
+ fi
1752
+
747
1753
  if flow_github_graphql_available "${repo_slug}" \
748
1754
  && pr_json="$(gh pr list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,title,body,url,headRefName,labels,comments,createdAt,mergedAt,isDraft 2>/dev/null)"; then
749
1755
  printf '%s\n' "${pr_json}"
@@ -874,6 +1880,15 @@ flow_github_issue_close() {
874
1880
  local comment_body="${3:-}"
875
1881
  local payload=""
876
1882
 
1883
+ if flow_using_gitea; then
1884
+ if [[ -n "${comment_body}" ]]; then
1885
+ flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments" --method POST -f body="${comment_body}" >/dev/null || return 1
1886
+ fi
1887
+ payload='{"state":"closed"}'
1888
+ printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
1889
+ return $?
1890
+ fi
1891
+
877
1892
  if [[ -n "${comment_body}" ]]; then
878
1893
  if gh issue close "${issue_id}" -R "${repo_slug}" --comment "${comment_body}" >/dev/null 2>&1; then
879
1894
  return 0
@@ -933,6 +1948,25 @@ flow_github_issue_create() {
933
1948
  local issue_url=""
934
1949
  local body_text=""
935
1950
 
1951
+ if flow_using_gitea; then
1952
+ body_text="$(cat "${body_file}")"
1953
+ issue_url="$(
1954
+ ISSUE_TITLE="${title}" ISSUE_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "issues" --method POST --input - | jq -r '.html_url // ""'
1955
+ import json
1956
+ import os
1957
+
1958
+ payload = {
1959
+ "title": os.environ.get("ISSUE_TITLE", ""),
1960
+ "body": os.environ.get("ISSUE_BODY", ""),
1961
+ }
1962
+ print(json.dumps(payload))
1963
+ PY
1964
+ )"
1965
+ [[ -n "${issue_url}" ]] || return 1
1966
+ printf '%s\n' "${issue_url}"
1967
+ return 0
1968
+ fi
1969
+
936
1970
  if issue_url="$(gh issue create -R "${repo_slug}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
937
1971
  printf '%s\n' "${issue_url}"
938
1972
  return 0
@@ -955,6 +1989,66 @@ PY
955
1989
  printf '%s\n' "${issue_url}"
956
1990
  }
957
1991
 
1992
+ flow_github_current_login() {
1993
+ if flow_using_gitea; then
1994
+ local user_json=""
1995
+ local auth_header=""
1996
+ local base_url=""
1997
+
1998
+ base_url="$(flow_gitea_base_url)" || return 1
1999
+ if [[ -n "${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" ]]; then
2000
+ user_json="$(curl -sS -H "Authorization: token ${ACP_GITEA_TOKEN:-${GITEA_TOKEN:-}}" "${base_url}/api/v1/user" 2>/dev/null || true)"
2001
+ elif [[ -n "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}" && -n "${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" ]]; then
2002
+ user_json="$(curl -sS -u "${ACP_GITEA_USERNAME:-${GITEA_USERNAME:-}}:${ACP_GITEA_PASSWORD:-${GITEA_PASSWORD:-}}" "${base_url}/api/v1/user" 2>/dev/null || true)"
2003
+ fi
2004
+ jq -r '.login // ""' <<<"${user_json:-{}}" 2>/dev/null || true
2005
+ return 0
2006
+ fi
2007
+
2008
+ gh api user --jq '.login // ""' 2>/dev/null || true
2009
+ }
2010
+
2011
+ flow_github_pr_author_login() {
2012
+ local repo_slug="${1:?repo slug required}"
2013
+ local pr_number="${2:?pr number required}"
2014
+
2015
+ flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null | jq -r '.authorLogin // ""' 2>/dev/null || true
2016
+ }
2017
+
2018
+ flow_github_pr_head_oid() {
2019
+ local repo_slug="${1:?repo slug required}"
2020
+ local pr_number="${2:?pr number required}"
2021
+
2022
+ flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null | jq -r '.headRefOid // ""' 2>/dev/null || true
2023
+ }
2024
+
2025
+ flow_github_pr_review_approve() {
2026
+ local repo_slug="${1:?repo slug required}"
2027
+ local pr_number="${2:?pr number required}"
2028
+ local body_text="${3:-Automated final review passed.}"
2029
+ local output=""
2030
+
2031
+ if flow_using_gitea; then
2032
+ if output="$(
2033
+ REVIEW_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls/${pr_number}/reviews" --method POST --input - 2>&1
2034
+ import json
2035
+ import os
2036
+
2037
+ print(json.dumps({"event": "APPROVED", "body": os.environ.get("REVIEW_BODY", "")}))
2038
+ PY
2039
+ )"; then
2040
+ return 0
2041
+ fi
2042
+ if grep -q "approve your own pull is not allowed" <<<"${output}"; then
2043
+ return 0
2044
+ fi
2045
+ printf '%s\n' "${output}" >&2
2046
+ return 1
2047
+ fi
2048
+
2049
+ gh api "repos/${repo_slug}/pulls/${pr_number}/reviews" --method POST -f event=APPROVE -f body="${body_text}" >/dev/null
2050
+ }
2051
+
958
2052
  flow_github_pr_create() {
959
2053
  local repo_slug="${1:?repo slug required}"
960
2054
  local base_branch="${2:?base branch required}"
@@ -964,6 +2058,27 @@ flow_github_pr_create() {
964
2058
  local pr_url=""
965
2059
  local body_text=""
966
2060
 
2061
+ if flow_using_gitea; then
2062
+ body_text="$(cat "${body_file}")"
2063
+ pr_url="$(
2064
+ BASE_BRANCH="${base_branch}" HEAD_BRANCH="${head_branch}" PR_TITLE="${title}" PR_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls" --method POST --input - | jq -r '.html_url // ""'
2065
+ import json
2066
+ import os
2067
+
2068
+ payload = {
2069
+ "title": os.environ.get("PR_TITLE", ""),
2070
+ "head": os.environ.get("HEAD_BRANCH", ""),
2071
+ "base": os.environ.get("BASE_BRANCH", ""),
2072
+ "body": os.environ.get("PR_BODY", ""),
2073
+ }
2074
+ print(json.dumps(payload))
2075
+ PY
2076
+ )"
2077
+ [[ -n "${pr_url}" ]] || return 1
2078
+ printf '%s\n' "${pr_url}"
2079
+ return 0
2080
+ fi
2081
+
967
2082
  if pr_url="$(gh pr create -R "${repo_slug}" --base "${base_branch}" --head "${head_branch}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
968
2083
  printf '%s\n' "${pr_url}"
969
2084
  return 0
@@ -997,6 +2112,24 @@ flow_github_pr_merge() {
997
2112
  local head_ref=""
998
2113
  local encoded_ref=""
999
2114
 
2115
+ if flow_using_gitea; then
2116
+ printf '%s' "$(
2117
+ MERGE_METHOD="${merge_method}" DELETE_BRANCH="${delete_branch}" python3 - <<'PY'
2118
+ import json
2119
+ import os
2120
+
2121
+ method = os.environ.get("MERGE_METHOD", "squash")
2122
+ delete_branch = os.environ.get("DELETE_BRANCH", "no") == "yes"
2123
+ method_map = {"merge": "merge", "squash": "squash", "rebase": "rebase"}
2124
+ print(json.dumps({
2125
+ "Do": method_map.get(method, "squash"),
2126
+ "delete_branch_after_merge": delete_branch,
2127
+ }))
2128
+ PY
2129
+ )" | flow_github_api_repo "${repo_slug}" "pulls/${pr_number}/merge" --method POST --input - >/dev/null
2130
+ return $?
2131
+ fi
2132
+
1000
2133
  if gh pr merge "${pr_number}" -R "${repo_slug}" "--${merge_method}" $([[ "${delete_branch}" == "yes" ]] && printf '%s' '--delete-branch') --admin >/dev/null 2>&1; then
1001
2134
  return 0
1002
2135
  fi
@@ -1429,6 +2562,16 @@ flow_resolve_retained_repo_root() {
1429
2562
  flow_env_or_config "${config_file}" "ACP_RETAINED_REPO_ROOT F_LOSNING_RETAINED_REPO_ROOT" "runtime.retained_repo_root" "${default_value}"
1430
2563
  }
1431
2564
 
2565
+ flow_resolve_source_repo_root() {
2566
+ local config_file="${1:-}"
2567
+ local default_value=""
2568
+ if [[ -z "${config_file}" ]]; then
2569
+ config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
2570
+ fi
2571
+ default_value="$(flow_resolve_retained_repo_root "${config_file}")"
2572
+ flow_env_or_config "${config_file}" "ACP_SOURCE_REPO_ROOT F_LOSNING_SOURCE_REPO_ROOT" "runtime.source_repo_root" "${default_value}"
2573
+ }
2574
+
1432
2575
  flow_resolve_vscode_workspace_file() {
1433
2576
  local config_file="${1:-}"
1434
2577
  local default_value=""