agent-control-plane 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +589 -0
- package/SKILL.md +149 -0
- package/assets/workflow-catalog.json +57 -0
- package/bin/audit-issue-routing.sh +74 -0
- package/bin/issue-resource-class.sh +58 -0
- package/bin/label-follow-up-issues.sh +114 -0
- package/bin/pr-risk.sh +532 -0
- package/bin/sync-pr-labels.sh +112 -0
- package/hooks/heartbeat-hooks.sh +573 -0
- package/hooks/issue-reconcile-hooks.sh +217 -0
- package/hooks/pr-reconcile-hooks.sh +225 -0
- package/npm/bin/agent-control-plane.js +1984 -0
- package/npm/public-bin/agent-control-plane +3 -0
- package/package.json +61 -0
- package/tools/bin/agent-cleanup-worktree +247 -0
- package/tools/bin/agent-github-update-labels +66 -0
- package/tools/bin/agent-init-worktree +216 -0
- package/tools/bin/agent-project-archive-run +52 -0
- package/tools/bin/agent-project-capture-worker +46 -0
- package/tools/bin/agent-project-catch-up-merged-prs +137 -0
- package/tools/bin/agent-project-cleanup-session +244 -0
- package/tools/bin/agent-project-detached-launch +107 -0
- package/tools/bin/agent-project-heartbeat-loop +2347 -0
- package/tools/bin/agent-project-open-issue-worktree +89 -0
- package/tools/bin/agent-project-open-pr-worktree +80 -0
- package/tools/bin/agent-project-publish-issue-pr +349 -0
- package/tools/bin/agent-project-reconcile-issue-session +1128 -0
- package/tools/bin/agent-project-reconcile-pr-session +1005 -0
- package/tools/bin/agent-project-retry-state +147 -0
- package/tools/bin/agent-project-run-claude-session +657 -0
- package/tools/bin/agent-project-run-codex-resilient +718 -0
- package/tools/bin/agent-project-run-codex-session +316 -0
- package/tools/bin/agent-project-run-kilo-session +27 -0
- package/tools/bin/agent-project-run-openclaw-session +984 -0
- package/tools/bin/agent-project-run-opencode-session +27 -0
- package/tools/bin/agent-project-sync-anchor-repo +128 -0
- package/tools/bin/agent-project-worker-status +143 -0
- package/tools/bin/audit-agent-worktrees.sh +310 -0
- package/tools/bin/audit-issue-routing.sh +11 -0
- package/tools/bin/audit-retained-layout.sh +58 -0
- package/tools/bin/audit-retained-overlap.sh +135 -0
- package/tools/bin/audit-retained-worktrees.sh +228 -0
- package/tools/bin/branch-verification-guard.sh +351 -0
- package/tools/bin/capture-worker.sh +18 -0
- package/tools/bin/check-skill-contracts.sh +324 -0
- package/tools/bin/cleanup-worktree.sh +44 -0
- package/tools/bin/codex-quota +31 -0
- package/tools/bin/create-follow-up-issue.sh +114 -0
- package/tools/bin/dashboard-launchd-bootstrap.sh +38 -0
- package/tools/bin/flow-config-lib.sh +2127 -0
- package/tools/bin/flow-resident-worker-lib.sh +683 -0
- package/tools/bin/flow-runtime-doctor.sh +97 -0
- package/tools/bin/flow-shell-lib.sh +266 -0
- package/tools/bin/heartbeat-recovery-preflight.sh +106 -0
- package/tools/bin/heartbeat-safe-auto.sh +551 -0
- package/tools/bin/install-dashboard-launchd.sh +152 -0
- package/tools/bin/install-project-launchd.sh +219 -0
- package/tools/bin/issue-publish-scope-guard.sh +242 -0
- package/tools/bin/issue-requires-local-workspace-install.sh +31 -0
- package/tools/bin/issue-resource-class.sh +12 -0
- package/tools/bin/kick-scheduler.sh +75 -0
- package/tools/bin/label-follow-up-issues.sh +14 -0
- package/tools/bin/new-pr-worktree.sh +50 -0
- package/tools/bin/new-worktree.sh +49 -0
- package/tools/bin/pr-risk.sh +12 -0
- package/tools/bin/prepare-worktree.sh +140 -0
- package/tools/bin/profile-activate.sh +109 -0
- package/tools/bin/profile-adopt.sh +219 -0
- package/tools/bin/profile-smoke.sh +461 -0
- package/tools/bin/project-init.sh +189 -0
- package/tools/bin/project-launchd-bootstrap.sh +54 -0
- package/tools/bin/project-remove.sh +155 -0
- package/tools/bin/project-runtime-supervisor.sh +56 -0
- package/tools/bin/project-runtimectl.sh +586 -0
- package/tools/bin/provider-cooldown-state.sh +166 -0
- package/tools/bin/publish-issue-worker.sh +31 -0
- package/tools/bin/reconcile-issue-worker.sh +34 -0
- package/tools/bin/reconcile-pr-worker.sh +34 -0
- package/tools/bin/record-verification.sh +71 -0
- package/tools/bin/render-architecture-infographics.sh +110 -0
- package/tools/bin/render-dashboard-demo-media.sh +333 -0
- package/tools/bin/render-dashboard-snapshot.py +16 -0
- package/tools/bin/render-flow-config.sh +86 -0
- package/tools/bin/retry-state.sh +31 -0
- package/tools/bin/reuse-issue-worktree.sh +75 -0
- package/tools/bin/run-codex-bypass.sh +3 -0
- package/tools/bin/run-codex-safe.sh +3 -0
- package/tools/bin/run-codex-task.sh +231 -0
- package/tools/bin/scaffold-profile.sh +374 -0
- package/tools/bin/serve-dashboard.sh +5 -0
- package/tools/bin/split-retained-slice.sh +124 -0
- package/tools/bin/start-issue-worker.sh +796 -0
- package/tools/bin/start-pr-fix-worker.sh +458 -0
- package/tools/bin/start-pr-merge-repair-worker.sh +8 -0
- package/tools/bin/start-pr-review-worker.sh +227 -0
- package/tools/bin/start-resident-issue-loop.sh +908 -0
- package/tools/bin/sync-agent-repo.sh +52 -0
- package/tools/bin/sync-dependency-baseline.sh +247 -0
- package/tools/bin/sync-pr-labels.sh +12 -0
- package/tools/bin/sync-recurring-issue-checklist.sh +274 -0
- package/tools/bin/sync-shared-agent-home.sh +214 -0
- package/tools/bin/sync-vscode-workspace.sh +157 -0
- package/tools/bin/test-smoke.sh +63 -0
- package/tools/bin/uninstall-project-launchd.sh +55 -0
- package/tools/bin/update-github-labels.sh +14 -0
- package/tools/bin/worker-status.sh +19 -0
- package/tools/bin/workflow-catalog.sh +77 -0
- package/tools/dashboard/app.js +286 -0
- package/tools/dashboard/dashboard_snapshot.py +466 -0
- package/tools/dashboard/index.html +41 -0
- package/tools/dashboard/server.py +64 -0
- package/tools/dashboard/styles.css +351 -0
- package/tools/templates/issue-prompt-template.md +109 -0
- package/tools/templates/pr-fix-template.md +120 -0
- package/tools/templates/pr-merge-repair-template.md +91 -0
- package/tools/templates/pr-review-template.md +62 -0
- package/tools/templates/scheduled-issue-prompt-template.md +62 -0
- package/tools/tests/test-agent-control-plane-npm-cli.sh +279 -0
- package/tools/tests/test-agent-github-update-labels-falls-back-to-repository-id.sh +56 -0
- package/tools/tests/test-agent-project-claude-session-wrapper-clears-stale-sandbox-artifacts.sh +89 -0
- package/tools/tests/test-agent-project-claude-session-wrapper-does-not-retry-provider-quota.sh +82 -0
- package/tools/tests/test-agent-project-claude-session-wrapper-retries-transient-failures.sh +90 -0
- package/tools/tests/test-agent-project-claude-session-wrapper-times-out.sh +73 -0
- package/tools/tests/test-agent-project-claude-session-wrapper.sh +103 -0
- package/tools/tests/test-agent-project-cleanup-session-orphan-fallback.sh +90 -0
- package/tools/tests/test-agent-project-cleanup-session-skip-worktree-cleanup.sh +90 -0
- package/tools/tests/test-agent-project-codex-live-thread-persist.sh +76 -0
- package/tools/tests/test-agent-project-codex-recovery.sh +731 -0
- package/tools/tests/test-agent-project-codex-session-wrapper-clears-stale-sandbox-artifacts.sh +105 -0
- package/tools/tests/test-agent-project-codex-session-wrapper.sh +97 -0
- package/tools/tests/test-agent-project-open-pr-worktree-config-prefix.sh +81 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper-clears-stale-sandbox-artifacts.sh +109 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper-infers-blocked-result-contract.sh +89 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper-recovers-literal-env-artifacts.sh +113 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper-recovers-version-mismatch.sh +135 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper-resident.sh +179 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper-reuses-existing-agent-after-add-race.sh +119 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper-terminates-rate-limit-hang.sh +91 -0
- package/tools/tests/test-agent-project-openclaw-session-wrapper.sh +117 -0
- package/tools/tests/test-agent-project-publish-issue-pr-prunes-stale-worktree-entry.sh +148 -0
- package/tools/tests/test-agent-project-publish-issue-pr-reads-archived-session.sh +146 -0
- package/tools/tests/test-agent-project-publish-issue-pr-recovers-final-head.sh +145 -0
- package/tools/tests/test-agent-project-publish-issue-pr-reuses-existing-worktree.sh +147 -0
- package/tools/tests/test-agent-project-reconcile-failure-reason.sh +456 -0
- package/tools/tests/test-agent-project-reconcile-issue-archived-session-fallback.sh +96 -0
- package/tools/tests/test-agent-project-reconcile-issue-before-blocked.sh +90 -0
- package/tools/tests/test-agent-project-reconcile-issue-host-verification-recovery-uses-recovered-worktree.sh +212 -0
- package/tools/tests/test-agent-project-reconcile-issue-host-verification-recovery.sh +207 -0
- package/tools/tests/test-agent-project-reconcile-issue-provider-quota-schedules-provider-cooldown.sh +101 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-backfills-lane-metadata-from-worker-key.sh +113 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-clears-stale-failed-summary.sh +117 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-initializes-shared-agent-home.sh +55 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-normalizes-runner-state.sh +125 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-records-invalid-contract-summary.sh +118 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-skips-duplicate-blocked-comment.sh +144 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-standardizes-no-commits-blocker.sh +145 -0
- package/tools/tests/test-agent-project-reconcile-issue-session-synthesizes-blocked-comment.sh +139 -0
- package/tools/tests/test-agent-project-reconcile-pr-blocked-host-recovery.sh +242 -0
- package/tools/tests/test-agent-project-reconcile-pr-guard-blocked-no-commit.sh +142 -0
- package/tools/tests/test-agent-project-reconcile-pr-provider-quota-schedules-provider-cooldown.sh +106 -0
- package/tools/tests/test-agent-project-reconcile-pr-session-initializes-shared-agent-home.sh +66 -0
- package/tools/tests/test-agent-project-reconcile-pr-updated-branch-noop.sh +129 -0
- package/tools/tests/test-audit-agent-worktrees-active-launch-skips-git-inspection.sh +69 -0
- package/tools/tests/test-audit-agent-worktrees-broken-worktree.sh +43 -0
- package/tools/tests/test-audit-agent-worktrees-pending-launch-owner.sh +46 -0
- package/tools/tests/test-audit-agent-worktrees-unreconciled-owner.sh +79 -0
- package/tools/tests/test-audit-issue-routing-managed-branch-globs.sh +56 -0
- package/tools/tests/test-branch-verification-guard-generated-artifacts.sh +72 -0
- package/tools/tests/test-branch-verification-guard-targeted-coverage.sh +125 -0
- package/tools/tests/test-codex-quota-manager-failure-driven-rotation.sh +178 -0
- package/tools/tests/test-codex-quota-wrapper.sh +37 -0
- package/tools/tests/test-contribution-docs.sh +18 -0
- package/tools/tests/test-control-plane-dashboard-runtime-smoke.sh +343 -0
- package/tools/tests/test-create-follow-up-issue.sh +73 -0
- package/tools/tests/test-dashboard-launchd-bootstrap.sh +55 -0
- package/tools/tests/test-flow-export-execution-env-exports-repo-id.sh +30 -0
- package/tools/tests/test-flow-export-github-cli-auth-env-prefers-git-credential.sh +48 -0
- package/tools/tests/test-flow-github-api-repo-fallback-preserves-input.sh +85 -0
- package/tools/tests/test-flow-github-api-repo-prefers-explicit-repository-id.sh +60 -0
- package/tools/tests/test-flow-github-issue-list-falls-back-to-repository-id.sh +64 -0
- package/tools/tests/test-flow-github-pr-list-falls-back-to-repository-id.sh +77 -0
- package/tools/tests/test-flow-resident-can-reuse-does-not-leak-metadata.sh +52 -0
- package/tools/tests/test-flow-resident-reap-stale-controllers.sh +63 -0
- package/tools/tests/test-flow-resolve-codex-quota-tools.sh +104 -0
- package/tools/tests/test-flow-runtime-doctor-profile-selection.sh +27 -0
- package/tools/tests/test-heartbeat-codex-pr-linked-issue-exclusion.sh +79 -0
- package/tools/tests/test-heartbeat-hooks-enqueue-resident-issue-for-idle-controller.sh +115 -0
- package/tools/tests/test-heartbeat-hooks-enqueue-resident-issue-for-live-lane-controller.sh +117 -0
- package/tools/tests/test-heartbeat-hooks-start-resident-issue-loop-claude.sh +96 -0
- package/tools/tests/test-heartbeat-hooks-start-resident-issue-loop-codex.sh +96 -0
- package/tools/tests/test-heartbeat-hooks-start-resident-issue-loop.sh +96 -0
- package/tools/tests/test-heartbeat-loop-auth-wait-does-not-consume-capacity.sh +170 -0
- package/tools/tests/test-heartbeat-loop-blocked-recovery-lane.sh +201 -0
- package/tools/tests/test-heartbeat-loop-blocked-recovery-vs-pr-reservation.sh +201 -0
- package/tools/tests/test-heartbeat-loop-idle-resident-controller-does-not-block-launches.sh +160 -0
- package/tools/tests/test-heartbeat-loop-pr-launch-dedup.sh +133 -0
- package/tools/tests/test-heartbeat-loop-provider-cooldown-suppresses-launches.sh +157 -0
- package/tools/tests/test-heartbeat-loop-reaps-stale-resident-controller.sh +181 -0
- package/tools/tests/test-heartbeat-loop-waiting-provider-resident-controller-does-not-block-launches.sh +160 -0
- package/tools/tests/test-heartbeat-ready-issues-blocked-recovery.sh +134 -0
- package/tools/tests/test-heartbeat-safe-auto-dynamic-concurrency.sh +162 -0
- package/tools/tests/test-heartbeat-safe-auto-no-tmux-sessions.sh +136 -0
- package/tools/tests/test-heartbeat-safe-auto-openclaw-skips-codex-quota.sh +139 -0
- package/tools/tests/test-heartbeat-safe-auto-quota-health-signal.sh +119 -0
- package/tools/tests/test-heartbeat-safe-auto-stale-shared-loop-pid-does-not-skip.sh +140 -0
- package/tools/tests/test-heartbeat-safe-auto-static-capacity-without-quota-cache.sh +142 -0
- package/tools/tests/test-heartbeat-safe-auto-zero-healthy-pools.sh +141 -0
- package/tools/tests/test-heartbeat-sync-issue-labels-empty-schedule.sh +65 -0
- package/tools/tests/test-heartbeat-sync-open-agent-prs-terminal-clears-running.sh +179 -0
- package/tools/tests/test-install-dashboard-launchd.sh +78 -0
- package/tools/tests/test-install-project-launchd-adds-tool-paths.sh +87 -0
- package/tools/tests/test-install-project-launchd.sh +110 -0
- package/tools/tests/test-issue-local-workspace-install-policy.sh +81 -0
- package/tools/tests/test-issue-publish-scope-guard-docs-signal.sh +70 -0
- package/tools/tests/test-issue-reconcile-hooks-success-clears-blocked.sh +36 -0
- package/tools/tests/test-kick-scheduler-requires-explicit-profile.sh +47 -0
- package/tools/tests/test-label-follow-up-issues-falls-back-to-repository-id.sh +132 -0
- package/tools/tests/test-manual-operator-entrypoints-require-explicit-profile.sh +64 -0
- package/tools/tests/test-package-funding-metadata.sh +21 -0
- package/tools/tests/test-package-public-metadata.sh +62 -0
- package/tools/tests/test-placeholder-worker-adapters.sh +38 -0
- package/tools/tests/test-pr-reconcile-hooks-refreshes-recurring-issue-checklist.sh +110 -0
- package/tools/tests/test-pr-risk-cohesive-mobile-locale-scope.sh +70 -0
- package/tools/tests/test-pr-risk-fix-label-semantics.sh +114 -0
- package/tools/tests/test-pr-risk-local-first-no-checks.sh +70 -0
- package/tools/tests/test-prepare-worktree-simple-repo-baseline.sh +67 -0
- package/tools/tests/test-profile-activate.sh +33 -0
- package/tools/tests/test-profile-adopt-allow-missing-repo.sh +68 -0
- package/tools/tests/test-profile-adopt-skip-workspace-sync-missing-file.sh +61 -0
- package/tools/tests/test-profile-adopt-syncs-anchor-and-workspace.sh +90 -0
- package/tools/tests/test-profile-smoke-collision.sh +44 -0
- package/tools/tests/test-profile-smoke-invalid-claude-config.sh +31 -0
- package/tools/tests/test-profile-smoke-invalid-provider-pool.sh +68 -0
- package/tools/tests/test-profile-smoke-repo-slug-mismatch.sh +36 -0
- package/tools/tests/test-profile-smoke.sh +45 -0
- package/tools/tests/test-project-init-force-and-skip-sync.sh +61 -0
- package/tools/tests/test-project-init-repo-slug-mismatch.sh +29 -0
- package/tools/tests/test-project-init.sh +66 -0
- package/tools/tests/test-project-launchd-bootstrap.sh +66 -0
- package/tools/tests/test-project-remove.sh +150 -0
- package/tools/tests/test-project-runtime-supervisor.sh +47 -0
- package/tools/tests/test-project-runtimectl-launchd.sh +115 -0
- package/tools/tests/test-project-runtimectl-missing-profile.sh +54 -0
- package/tools/tests/test-project-runtimectl-start-falls-back-to-bootstrap.sh +108 -0
- package/tools/tests/test-project-runtimectl-status-reports-supervisor-as-heartbeat-parent.sh +95 -0
- package/tools/tests/test-project-runtimectl-status-supervisor-running.sh +59 -0
- package/tools/tests/test-project-runtimectl-stop-cancels-pending-kick.sh +85 -0
- package/tools/tests/test-project-runtimectl-stop-clears-running-labels.sh +78 -0
- package/tools/tests/test-project-runtimectl.sh +212 -0
- package/tools/tests/test-provider-cooldown-state-prefers-runtime-worker-context.sh +39 -0
- package/tools/tests/test-provider-cooldown-state.sh +59 -0
- package/tools/tests/test-public-repo-docs.sh +159 -0
- package/tools/tests/test-reconcile-pr-worker-acp-config-routing.sh +75 -0
- package/tools/tests/test-render-dashboard-snapshot.sh +149 -0
- package/tools/tests/test-render-flow-config-demo-profile.sh +36 -0
- package/tools/tests/test-render-flow-config-provider-pool-fallback.sh +81 -0
- package/tools/tests/test-render-flow-config.sh +52 -0
- package/tools/tests/test-run-codex-task-claude-routing.sh +125 -0
- package/tools/tests/test-run-codex-task-codex-resident-routing.sh +108 -0
- package/tools/tests/test-run-codex-task-kilo-routing.sh +98 -0
- package/tools/tests/test-run-codex-task-openclaw-resident-routing.sh +117 -0
- package/tools/tests/test-run-codex-task-openclaw-routing.sh +113 -0
- package/tools/tests/test-run-codex-task-opencode-routing.sh +98 -0
- package/tools/tests/test-run-codex-task-provider-pool-fallback-routing.sh +146 -0
- package/tools/tests/test-scaffold-profile.sh +108 -0
- package/tools/tests/test-serve-dashboard.sh +93 -0
- package/tools/tests/test-start-issue-worker-blocked-context.sh +129 -0
- package/tools/tests/test-start-issue-worker-blocks-complete-recurring-checklist.sh +189 -0
- package/tools/tests/test-start-issue-worker-local-install-routing.sh +157 -0
- package/tools/tests/test-start-issue-worker-profile-template-routing.sh +149 -0
- package/tools/tests/test-start-issue-worker-recurring-resident-reuse-codex.sh +212 -0
- package/tools/tests/test-start-issue-worker-recurring-resident-reuse.sh +219 -0
- package/tools/tests/test-start-issue-worker-renders-verification-snippet.sh +155 -0
- package/tools/tests/test-start-issue-worker-resident-reuse-falls-back-to-new-worktree.sh +199 -0
- package/tools/tests/test-start-pr-fix-worker-host-blocker-context.sh +275 -0
- package/tools/tests/test-start-resident-issue-loop-adopts-next-recurring-issue.sh +185 -0
- package/tools/tests/test-start-resident-issue-loop-clears-pending-while-waiting-due.sh +152 -0
- package/tools/tests/test-start-resident-issue-loop-consumes-queued-lease.sh +186 -0
- package/tools/tests/test-start-resident-issue-loop-fails-over-provider-pool.sh +212 -0
- package/tools/tests/test-start-resident-issue-loop-immediate-cycles.sh +148 -0
- package/tools/tests/test-start-resident-issue-loop-waits-for-provider.sh +194 -0
- package/tools/tests/test-start-resident-issue-loop-waits-for-terminal-reconcile-status.sh +198 -0
- package/tools/tests/test-start-resident-issue-loop-yields-to-live-lane-controller.sh +145 -0
- package/tools/tests/test-sync-pr-labels-fix-lane-uses-repair-queued.sh +67 -0
- package/tools/tests/test-sync-recurring-issue-checklist-backfills-workflow-complete-blocker.sh +70 -0
- package/tools/tests/test-sync-recurring-issue-checklist.sh +95 -0
- package/tools/tests/test-sync-shared-agent-home-local-source-root.sh +66 -0
- package/tools/tests/test-sync-shared-agent-home-preserves-unrelated-workflow-catalog-skill.sh +47 -0
- package/tools/tests/test-test-smoke.sh +86 -0
- package/tools/tests/test-uninstall-project-launchd.sh +37 -0
- package/tools/tests/test-update-github-labels-prefers-sibling-helper.sh +49 -0
- package/tools/tests/test-workflow-catalog.sh +43 -0
- package/tools/vendor/codex-quota/LICENSE +21 -0
- package/tools/vendor/codex-quota/README.md +459 -0
- package/tools/vendor/codex-quota/codex-quota.js +261 -0
- package/tools/vendor/codex-quota/lib/claude-accounts.js +226 -0
- package/tools/vendor/codex-quota/lib/claude-oauth.js +174 -0
- package/tools/vendor/codex-quota/lib/claude-tokens.js +471 -0
- package/tools/vendor/codex-quota/lib/claude-usage.js +929 -0
- package/tools/vendor/codex-quota/lib/codex-accounts.js +205 -0
- package/tools/vendor/codex-quota/lib/codex-tokens.js +326 -0
- package/tools/vendor/codex-quota/lib/codex-usage.js +32 -0
- package/tools/vendor/codex-quota/lib/color.js +72 -0
- package/tools/vendor/codex-quota/lib/constants.js +57 -0
- package/tools/vendor/codex-quota/lib/container.js +143 -0
- package/tools/vendor/codex-quota/lib/display.js +1111 -0
- package/tools/vendor/codex-quota/lib/fs.js +63 -0
- package/tools/vendor/codex-quota/lib/handlers.js +2060 -0
- package/tools/vendor/codex-quota/lib/jwt.js +33 -0
- package/tools/vendor/codex-quota/lib/oauth.js +486 -0
- package/tools/vendor/codex-quota/lib/paths.js +34 -0
- package/tools/vendor/codex-quota/lib/prompts.js +44 -0
- package/tools/vendor/codex-quota/lib/sync.js +1438 -0
- package/tools/vendor/codex-quota/lib/token-match.js +96 -0
- package/tools/vendor/codex-quota-manager/scripts/auto-switch.sh +500 -0
- package/tools/vendor/codex-quota-manager/scripts/batch-add.sh +123 -0
|
@@ -0,0 +1,2127 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
5
|
+
# shellcheck source=/dev/null
|
|
6
|
+
source "${SCRIPT_DIR}/flow-shell-lib.sh"
|
|
7
|
+
flow_export_project_env_aliases
|
|
8
|
+
|
|
9
|
+
flow_explicit_github_repo_id() {
|
|
10
|
+
local requested_repo_slug="${1:-}"
|
|
11
|
+
local configured_repo_slug="${ACP_REPO_SLUG:-${F_LOSNING_REPO_SLUG:-}}"
|
|
12
|
+
local explicit_repo_id="${ACP_REPO_ID:-${F_LOSNING_REPO_ID:-${ACP_GITHUB_REPOSITORY_ID:-${F_LOSNING_GITHUB_REPOSITORY_ID:-}}}}"
|
|
13
|
+
|
|
14
|
+
[[ -n "${explicit_repo_id}" ]] || return 1
|
|
15
|
+
if [[ -n "${requested_repo_slug}" && -n "${configured_repo_slug}" && "${configured_repo_slug}" != "${requested_repo_slug}" ]]; then
|
|
16
|
+
return 1
|
|
17
|
+
fi
|
|
18
|
+
|
|
19
|
+
printf '%s\n' "${explicit_repo_id}"
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
flow_explicit_profile_id() {
|
|
23
|
+
printf '%s\n' "${ACP_PROJECT_ID:-${AGENT_PROJECT_ID:-}}"
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
resolve_flow_profile_registry_root() {
|
|
27
|
+
local platform_home="${AGENT_PLATFORM_HOME:-${HOME}/.agent-runtime}"
|
|
28
|
+
printf '%s\n' "${AGENT_CONTROL_PLANE_PROFILE_ROOT:-${ACP_PROFILE_REGISTRY_ROOT:-${platform_home}/control-plane/profiles}}"
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
flow_list_profiles_in_root() {
|
|
32
|
+
local profiles_root="${1:-}"
|
|
33
|
+
local profile_file=""
|
|
34
|
+
local profile_id=""
|
|
35
|
+
|
|
36
|
+
[[ -d "${profiles_root}" ]] || return 0
|
|
37
|
+
|
|
38
|
+
while IFS= read -r profile_file; do
|
|
39
|
+
[[ -n "${profile_file}" ]] || continue
|
|
40
|
+
profile_id="$(basename "$(dirname "${profile_file}")")"
|
|
41
|
+
[[ -n "${profile_id}" ]] || continue
|
|
42
|
+
printf '%s\n' "${profile_id}"
|
|
43
|
+
done < <(find "${profiles_root}" -mindepth 2 -maxdepth 2 -type f -name 'control-plane.yaml' 2>/dev/null | sort)
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
flow_list_installed_profile_ids() {
|
|
47
|
+
flow_list_profiles_in_root "$(resolve_flow_profile_registry_root)"
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
flow_find_profile_dir_by_id() {
|
|
51
|
+
local flow_root="${1:-}"
|
|
52
|
+
local profile_id="${2:?profile id required}"
|
|
53
|
+
local registry_root=""
|
|
54
|
+
local candidate=""
|
|
55
|
+
|
|
56
|
+
if [[ -z "${flow_root}" ]]; then
|
|
57
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
58
|
+
fi
|
|
59
|
+
|
|
60
|
+
registry_root="$(resolve_flow_profile_registry_root)"
|
|
61
|
+
candidate="${registry_root}/${profile_id}"
|
|
62
|
+
if [[ -f "${candidate}/control-plane.yaml" ]]; then
|
|
63
|
+
printf '%s\n' "${candidate}"
|
|
64
|
+
return 0
|
|
65
|
+
fi
|
|
66
|
+
|
|
67
|
+
printf '%s/%s\n' "${registry_root}" "${profile_id}"
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
flow_profile_count() {
|
|
71
|
+
local flow_root="${1:-}"
|
|
72
|
+
|
|
73
|
+
if [[ -z "${flow_root}" ]]; then
|
|
74
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
75
|
+
fi
|
|
76
|
+
|
|
77
|
+
flow_list_profile_ids "${flow_root}" | awk 'NF { count += 1 } END { print count + 0 }'
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
flow_default_profile_id() {
|
|
81
|
+
local flow_root="${1:-}"
|
|
82
|
+
local preferred_profile="${AGENT_CONTROL_PLANE_DEFAULT_PROFILE_ID:-${ACP_DEFAULT_PROFILE_ID:-${AGENT_PROJECT_DEFAULT_PROFILE_ID:-}}}"
|
|
83
|
+
local candidate=""
|
|
84
|
+
|
|
85
|
+
if [[ -z "${flow_root}" ]]; then
|
|
86
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
87
|
+
fi
|
|
88
|
+
|
|
89
|
+
for candidate in "${preferred_profile}" "default"; do
|
|
90
|
+
[[ -n "${candidate}" ]] || continue
|
|
91
|
+
if [[ -f "$(flow_find_profile_dir_by_id "${flow_root}" "${candidate}")/control-plane.yaml" ]]; then
|
|
92
|
+
printf '%s\n' "${candidate}"
|
|
93
|
+
return 0
|
|
94
|
+
fi
|
|
95
|
+
done
|
|
96
|
+
|
|
97
|
+
candidate="$(flow_list_profile_ids "${flow_root}" | grep -v '^demo$' | head -n 1 || true)"
|
|
98
|
+
if [[ -n "${candidate}" ]]; then
|
|
99
|
+
printf '%s\n' "${candidate}"
|
|
100
|
+
return 0
|
|
101
|
+
fi
|
|
102
|
+
|
|
103
|
+
candidate="$(flow_list_profile_ids "${flow_root}" | head -n 1 || true)"
|
|
104
|
+
if [[ -n "${candidate}" ]]; then
|
|
105
|
+
printf '%s\n' "${candidate}"
|
|
106
|
+
return 0
|
|
107
|
+
fi
|
|
108
|
+
|
|
109
|
+
printf 'default\n'
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
flow_profile_selection_mode() {
|
|
113
|
+
local flow_root="${1:-}"
|
|
114
|
+
local explicit_profile=""
|
|
115
|
+
local profile_count="0"
|
|
116
|
+
|
|
117
|
+
if [[ -z "${flow_root}" ]]; then
|
|
118
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
119
|
+
fi
|
|
120
|
+
|
|
121
|
+
explicit_profile="$(flow_explicit_profile_id)"
|
|
122
|
+
if [[ -n "${explicit_profile}" ]]; then
|
|
123
|
+
printf 'explicit\n'
|
|
124
|
+
return 0
|
|
125
|
+
fi
|
|
126
|
+
|
|
127
|
+
profile_count="$(flow_profile_count "${flow_root}")"
|
|
128
|
+
if [[ "${profile_count}" -gt 1 ]]; then
|
|
129
|
+
printf 'implicit-default\n'
|
|
130
|
+
return 0
|
|
131
|
+
fi
|
|
132
|
+
|
|
133
|
+
printf 'single-profile-default\n'
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
flow_profile_selection_hint() {
|
|
137
|
+
local flow_root="${1:-}"
|
|
138
|
+
local mode=""
|
|
139
|
+
|
|
140
|
+
if [[ -z "${flow_root}" ]]; then
|
|
141
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
142
|
+
fi
|
|
143
|
+
|
|
144
|
+
mode="$(flow_profile_selection_mode "${flow_root}")"
|
|
145
|
+
if [[ "${mode}" == "implicit-default" ]]; then
|
|
146
|
+
printf 'Set ACP_PROJECT_ID=<id> or AGENT_PROJECT_ID=<id> when multiple available profiles exist.\n'
|
|
147
|
+
fi
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
flow_profile_guard_message() {
|
|
151
|
+
local flow_root="${1:-}"
|
|
152
|
+
local command_name="${2:-this command}"
|
|
153
|
+
local hint=""
|
|
154
|
+
|
|
155
|
+
if [[ -z "${flow_root}" ]]; then
|
|
156
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
157
|
+
fi
|
|
158
|
+
|
|
159
|
+
hint="$(flow_profile_selection_hint "${flow_root}")"
|
|
160
|
+
printf 'explicit profile selection required for %s when multiple available profiles exist.\n' "${command_name}"
|
|
161
|
+
if [[ -n "${hint}" ]]; then
|
|
162
|
+
printf '%s\n' "${hint}"
|
|
163
|
+
fi
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
flow_require_explicit_profile_selection() {
|
|
167
|
+
local flow_root="${1:-}"
|
|
168
|
+
local command_name="${2:-this command}"
|
|
169
|
+
|
|
170
|
+
if [[ -z "${flow_root}" ]]; then
|
|
171
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
172
|
+
fi
|
|
173
|
+
|
|
174
|
+
if [[ "${ACP_ALLOW_IMPLICIT_PROFILE_SELECTION:-0}" == "1" ]]; then
|
|
175
|
+
return 0
|
|
176
|
+
fi
|
|
177
|
+
|
|
178
|
+
if [[ "$(flow_profile_selection_mode "${flow_root}")" == "implicit-default" ]]; then
|
|
179
|
+
flow_profile_guard_message "${flow_root}" "${command_name}" >&2
|
|
180
|
+
return 1
|
|
181
|
+
fi
|
|
182
|
+
|
|
183
|
+
return 0
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
resolve_flow_config_yaml() {
|
|
187
|
+
local script_path="${1:-${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}}"
|
|
188
|
+
local flow_root
|
|
189
|
+
local profile_id=""
|
|
190
|
+
local candidate=""
|
|
191
|
+
flow_root="$(resolve_flow_skill_dir "${script_path}")"
|
|
192
|
+
profile_id="${ACP_PROJECT_ID:-${AGENT_PROJECT_ID:-$(flow_default_profile_id "${flow_root}")}}"
|
|
193
|
+
|
|
194
|
+
for candidate in \
|
|
195
|
+
"${AGENT_CONTROL_PLANE_CONFIG:-}" \
|
|
196
|
+
"${ACP_CONFIG:-}" \
|
|
197
|
+
"${AGENT_PROJECT_CONFIG_PATH:-}" \
|
|
198
|
+
"${F_LOSNING_FLOW_CONFIG:-}"; do
|
|
199
|
+
if [[ -n "${candidate}" && -f "${candidate}" ]]; then
|
|
200
|
+
printf '%s\n' "${candidate}"
|
|
201
|
+
return 0
|
|
202
|
+
fi
|
|
203
|
+
done
|
|
204
|
+
|
|
205
|
+
candidate="$(flow_find_profile_dir_by_id "${flow_root}" "${profile_id}")/control-plane.yaml"
|
|
206
|
+
if [[ -f "${candidate}" ]]; then
|
|
207
|
+
printf '%s\n' "${candidate}"
|
|
208
|
+
return 0
|
|
209
|
+
fi
|
|
210
|
+
|
|
211
|
+
printf '%s\n' "${candidate}"
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
flow_list_profile_ids() {
|
|
215
|
+
local flow_root="${1:-}"
|
|
216
|
+
local found_any=""
|
|
217
|
+
|
|
218
|
+
if [[ -z "${flow_root}" ]]; then
|
|
219
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
220
|
+
fi
|
|
221
|
+
|
|
222
|
+
found_any="$(
|
|
223
|
+
{
|
|
224
|
+
flow_list_installed_profile_ids
|
|
225
|
+
} | awk 'NF { print }' | sort -u
|
|
226
|
+
)"
|
|
227
|
+
|
|
228
|
+
if [[ -z "${found_any}" ]]; then
|
|
229
|
+
return 0
|
|
230
|
+
fi
|
|
231
|
+
|
|
232
|
+
printf '%s\n' "${found_any}"
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
flow_git_remote_repo_slug() {
|
|
236
|
+
local repo_root="${1:-}"
|
|
237
|
+
local remote_name="${2:-origin}"
|
|
238
|
+
local remote_url=""
|
|
239
|
+
local normalized=""
|
|
240
|
+
|
|
241
|
+
[[ -n "${repo_root}" && -d "${repo_root}" ]] || return 1
|
|
242
|
+
remote_url="$(git -C "${repo_root}" remote get-url "${remote_name}" 2>/dev/null || true)"
|
|
243
|
+
[[ -n "${remote_url}" ]] || return 1
|
|
244
|
+
|
|
245
|
+
normalized="${remote_url%.git}"
|
|
246
|
+
normalized="${normalized#ssh://git@github.com/}"
|
|
247
|
+
normalized="${normalized#git@github.com:}"
|
|
248
|
+
normalized="${normalized#https://github.com/}"
|
|
249
|
+
normalized="${normalized#http://github.com/}"
|
|
250
|
+
|
|
251
|
+
if [[ "${normalized}" == "${remote_url%.git}" ]]; then
|
|
252
|
+
return 1
|
|
253
|
+
fi
|
|
254
|
+
|
|
255
|
+
if [[ "${normalized}" =~ ^[^/]+/[^/]+$ ]]; then
|
|
256
|
+
printf '%s\n' "${normalized}"
|
|
257
|
+
return 0
|
|
258
|
+
fi
|
|
259
|
+
|
|
260
|
+
return 1
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
flow_git_credential_token_for_repo_slug() {
|
|
264
|
+
local repo_slug="${1:-}"
|
|
265
|
+
local credential_payload=""
|
|
266
|
+
local token=""
|
|
267
|
+
|
|
268
|
+
[[ -n "${repo_slug}" ]] || return 1
|
|
269
|
+
command -v git >/dev/null 2>&1 || return 1
|
|
270
|
+
|
|
271
|
+
credential_payload="$(
|
|
272
|
+
printf 'protocol=https\nhost=github.com\npath=%s.git\n\n' "${repo_slug}" \
|
|
273
|
+
| git credential fill 2>/dev/null || true
|
|
274
|
+
)"
|
|
275
|
+
token="$(awk -F= '/^password=/{print $2; exit}' <<<"${credential_payload}")"
|
|
276
|
+
[[ -n "${token}" ]] || return 1
|
|
277
|
+
|
|
278
|
+
printf '%s\n' "${token}"
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
flow_export_github_cli_auth_env() {
|
|
282
|
+
local repo_slug="${1:-}"
|
|
283
|
+
local token=""
|
|
284
|
+
|
|
285
|
+
if [[ -n "${GH_TOKEN:-}" ]]; then
|
|
286
|
+
return 0
|
|
287
|
+
fi
|
|
288
|
+
|
|
289
|
+
if [[ -n "${GITHUB_TOKEN:-}" ]]; then
|
|
290
|
+
export GH_TOKEN="${GITHUB_TOKEN}"
|
|
291
|
+
return 0
|
|
292
|
+
fi
|
|
293
|
+
|
|
294
|
+
token="$(flow_git_credential_token_for_repo_slug "${repo_slug}" || true)"
|
|
295
|
+
if [[ -n "${token}" ]]; then
|
|
296
|
+
export GH_TOKEN="${token}"
|
|
297
|
+
return 0
|
|
298
|
+
fi
|
|
299
|
+
|
|
300
|
+
if [[ -n "${GITHUB_PERSONAL_ACCESS_TOKEN:-}" ]]; then
|
|
301
|
+
export GH_TOKEN="${GITHUB_PERSONAL_ACCESS_TOKEN}"
|
|
302
|
+
fi
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
flow_github_repo_id_cache_var() {
|
|
306
|
+
local repo_slug="${1:-}"
|
|
307
|
+
local sanitized="${repo_slug//[^A-Za-z0-9]/_}"
|
|
308
|
+
printf 'FLOW_GITHUB_REPO_ID_CACHE_%s\n' "${sanitized}"
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
flow_github_repo_id_for_repo_slug() {
|
|
312
|
+
local repo_slug="${1:-}"
|
|
313
|
+
local cache_var=""
|
|
314
|
+
local cached_value=""
|
|
315
|
+
local repos_pages_json=""
|
|
316
|
+
local repo_id=""
|
|
317
|
+
|
|
318
|
+
[[ -n "${repo_slug}" ]] || return 1
|
|
319
|
+
command -v gh >/dev/null 2>&1 || return 1
|
|
320
|
+
|
|
321
|
+
cache_var="$(flow_github_repo_id_cache_var "${repo_slug}")"
|
|
322
|
+
cached_value="${!cache_var:-}"
|
|
323
|
+
if [[ -n "${cached_value}" ]]; then
|
|
324
|
+
printf '%s\n' "${cached_value}"
|
|
325
|
+
return 0
|
|
326
|
+
fi
|
|
327
|
+
|
|
328
|
+
repo_id="$(flow_explicit_github_repo_id "${repo_slug}" || true)"
|
|
329
|
+
if [[ -n "${repo_id}" ]]; then
|
|
330
|
+
printf -v "${cache_var}" '%s' "${repo_id}"
|
|
331
|
+
printf '%s\n' "${repo_id}"
|
|
332
|
+
return 0
|
|
333
|
+
fi
|
|
334
|
+
|
|
335
|
+
flow_export_github_cli_auth_env "${repo_slug}"
|
|
336
|
+
repos_pages_json="$(
|
|
337
|
+
gh api 'user/repos?per_page=100&visibility=all&affiliation=owner,collaborator,organization_member' \
|
|
338
|
+
--paginate \
|
|
339
|
+
--slurp 2>/dev/null || true
|
|
340
|
+
)"
|
|
341
|
+
[[ -n "${repos_pages_json}" ]] || return 1
|
|
342
|
+
|
|
343
|
+
repo_id="$(
|
|
344
|
+
REPOS_PAGES_JSON="${repos_pages_json}" TARGET_REPO_SLUG="${repo_slug}" python3 - <<'PY'
|
|
345
|
+
import json
|
|
346
|
+
import os
|
|
347
|
+
import sys
|
|
348
|
+
|
|
349
|
+
pages = json.loads(os.environ.get("REPOS_PAGES_JSON", "[]") or "[]")
|
|
350
|
+
target = os.environ.get("TARGET_REPO_SLUG", "")
|
|
351
|
+
|
|
352
|
+
for page in pages:
|
|
353
|
+
if isinstance(page, list):
|
|
354
|
+
for repo in page:
|
|
355
|
+
if isinstance(repo, dict) and repo.get("full_name") == target:
|
|
356
|
+
value = repo.get("id")
|
|
357
|
+
if value is not None:
|
|
358
|
+
print(value)
|
|
359
|
+
sys.exit(0)
|
|
360
|
+
elif isinstance(page, dict) and page.get("full_name") == target:
|
|
361
|
+
value = page.get("id")
|
|
362
|
+
if value is not None:
|
|
363
|
+
print(value)
|
|
364
|
+
sys.exit(0)
|
|
365
|
+
PY
|
|
366
|
+
)"
|
|
367
|
+
[[ -n "${repo_id}" ]] || return 1
|
|
368
|
+
|
|
369
|
+
printf -v "${cache_var}" '%s' "${repo_id}"
|
|
370
|
+
printf '%s\n' "${repo_id}"
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
flow_github_repo_api_prefix() {
|
|
374
|
+
local repo_slug="${1:-}"
|
|
375
|
+
local repo_id=""
|
|
376
|
+
|
|
377
|
+
repo_id="$(flow_github_repo_id_for_repo_slug "${repo_slug}")" || return 1
|
|
378
|
+
printf 'repositories/%s\n' "${repo_id}"
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
flow_github_api_repo() {
|
|
382
|
+
local repo_slug="${1:?repo slug required}"
|
|
383
|
+
local route="${2:-}"
|
|
384
|
+
local repo_prefix=""
|
|
385
|
+
local direct_route="repos/${repo_slug}"
|
|
386
|
+
local fallback_route=""
|
|
387
|
+
local output=""
|
|
388
|
+
local stdin_file=""
|
|
389
|
+
local status=0
|
|
390
|
+
local expect_input_value="false"
|
|
391
|
+
local arg=""
|
|
392
|
+
local index=0
|
|
393
|
+
local gh_arg_count=0
|
|
394
|
+
local -a gh_args=()
|
|
395
|
+
|
|
396
|
+
route="${route#/}"
|
|
397
|
+
if [[ -n "${route}" ]]; then
|
|
398
|
+
direct_route="${direct_route}/${route}"
|
|
399
|
+
fi
|
|
400
|
+
|
|
401
|
+
if [[ $# -gt 2 ]]; then
|
|
402
|
+
gh_args=("${@:3}")
|
|
403
|
+
gh_arg_count="${#gh_args[@]}"
|
|
404
|
+
fi
|
|
405
|
+
for ((index = 0; index < ${#gh_args[@]}; index++)); do
|
|
406
|
+
arg="${gh_args[${index}]}"
|
|
407
|
+
if [[ "${expect_input_value}" == "true" ]]; then
|
|
408
|
+
if [[ "${arg}" == "-" ]]; then
|
|
409
|
+
if [[ -z "${stdin_file}" ]]; then
|
|
410
|
+
stdin_file="$(mktemp)"
|
|
411
|
+
cat >"${stdin_file}"
|
|
412
|
+
fi
|
|
413
|
+
gh_args[${index}]="${stdin_file}"
|
|
414
|
+
fi
|
|
415
|
+
expect_input_value="false"
|
|
416
|
+
elif [[ "${arg}" == "--input" ]]; then
|
|
417
|
+
expect_input_value="true"
|
|
418
|
+
fi
|
|
419
|
+
done
|
|
420
|
+
|
|
421
|
+
flow_export_github_cli_auth_env "${repo_slug}"
|
|
422
|
+
if [[ "${gh_arg_count}" -gt 0 ]]; then
|
|
423
|
+
output="$(gh api "${direct_route}" "${gh_args[@]}" 2>/dev/null)" && {
|
|
424
|
+
printf '%s' "${output}"
|
|
425
|
+
rm -f "${stdin_file}"
|
|
426
|
+
return 0
|
|
427
|
+
}
|
|
428
|
+
elif output="$(gh api "${direct_route}" 2>/dev/null)"; then
|
|
429
|
+
printf '%s' "${output}"
|
|
430
|
+
rm -f "${stdin_file}"
|
|
431
|
+
return 0
|
|
432
|
+
fi
|
|
433
|
+
|
|
434
|
+
if ! repo_prefix="$(flow_github_repo_api_prefix "${repo_slug}")"; then
|
|
435
|
+
rm -f "${stdin_file}"
|
|
436
|
+
return 1
|
|
437
|
+
fi
|
|
438
|
+
fallback_route="${repo_prefix}"
|
|
439
|
+
if [[ -n "${route}" ]]; then
|
|
440
|
+
fallback_route="${fallback_route}/${route}"
|
|
441
|
+
fi
|
|
442
|
+
if [[ "${gh_arg_count}" -gt 0 ]]; then
|
|
443
|
+
output="$(gh api "${fallback_route}" "${gh_args[@]}")" && {
|
|
444
|
+
printf '%s' "${output}"
|
|
445
|
+
rm -f "${stdin_file}"
|
|
446
|
+
return 0
|
|
447
|
+
}
|
|
448
|
+
elif output="$(gh api "${fallback_route}")"; then
|
|
449
|
+
printf '%s' "${output}"
|
|
450
|
+
rm -f "${stdin_file}"
|
|
451
|
+
return 0
|
|
452
|
+
fi
|
|
453
|
+
status=$?
|
|
454
|
+
rm -f "${stdin_file}"
|
|
455
|
+
return "${status}"
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
flow_github_urlencode() {
|
|
459
|
+
local raw_value="${1:-}"
|
|
460
|
+
|
|
461
|
+
RAW_VALUE="${raw_value}" python3 - <<'PY'
|
|
462
|
+
import os
|
|
463
|
+
from urllib.parse import quote
|
|
464
|
+
|
|
465
|
+
print(quote(os.environ.get("RAW_VALUE", ""), safe=""))
|
|
466
|
+
PY
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
flow_github_issue_view_json() {
|
|
470
|
+
local repo_slug="${1:?repo slug required}"
|
|
471
|
+
local issue_id="${2:?issue id required}"
|
|
472
|
+
local issue_json=""
|
|
473
|
+
local comment_pages_json=""
|
|
474
|
+
|
|
475
|
+
if issue_json="$(gh issue view "${issue_id}" -R "${repo_slug}" --json number,state,title,body,url,labels,comments,createdAt,updatedAt 2>/dev/null)"; then
|
|
476
|
+
printf '%s\n' "${issue_json}"
|
|
477
|
+
return 0
|
|
478
|
+
fi
|
|
479
|
+
|
|
480
|
+
issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${issue_id}")" || return 1
|
|
481
|
+
comment_pages_json="$(
|
|
482
|
+
flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments?per_page=100" --paginate --slurp 2>/dev/null || printf '[]\n'
|
|
483
|
+
)"
|
|
484
|
+
|
|
485
|
+
ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY'
|
|
486
|
+
import json
|
|
487
|
+
import os
|
|
488
|
+
|
|
489
|
+
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
490
|
+
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
491
|
+
comments = []
|
|
492
|
+
for page in comment_pages:
|
|
493
|
+
if isinstance(page, list):
|
|
494
|
+
comments.extend(page)
|
|
495
|
+
elif isinstance(page, dict):
|
|
496
|
+
comments.append(page)
|
|
497
|
+
|
|
498
|
+
result = {
|
|
499
|
+
"number": issue.get("number"),
|
|
500
|
+
"state": str(issue.get("state", "")).upper(),
|
|
501
|
+
"title": issue.get("title") or "",
|
|
502
|
+
"body": issue.get("body") or "",
|
|
503
|
+
"url": issue.get("html_url") or issue.get("url") or "",
|
|
504
|
+
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
505
|
+
"comments": [
|
|
506
|
+
{
|
|
507
|
+
"body": comment.get("body") or "",
|
|
508
|
+
"createdAt": comment.get("created_at") or "",
|
|
509
|
+
"updatedAt": comment.get("updated_at") or "",
|
|
510
|
+
"url": comment.get("html_url") or "",
|
|
511
|
+
}
|
|
512
|
+
for comment in comments
|
|
513
|
+
if isinstance(comment, dict)
|
|
514
|
+
],
|
|
515
|
+
"createdAt": issue.get("created_at") or "",
|
|
516
|
+
"updatedAt": issue.get("updated_at") or "",
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
print(json.dumps(result))
|
|
520
|
+
PY
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
flow_github_issue_list_json() {
|
|
524
|
+
local repo_slug="${1:?repo slug required}"
|
|
525
|
+
local state="${2:-open}"
|
|
526
|
+
local limit="${3:-100}"
|
|
527
|
+
local issues_json=""
|
|
528
|
+
local per_page="100"
|
|
529
|
+
|
|
530
|
+
if issues_json="$(gh issue list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,createdAt,updatedAt,title,url,labels 2>/dev/null)"; then
|
|
531
|
+
printf '%s\n' "${issues_json}"
|
|
532
|
+
return 0
|
|
533
|
+
fi
|
|
534
|
+
|
|
535
|
+
if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
|
|
536
|
+
per_page="${limit}"
|
|
537
|
+
fi
|
|
538
|
+
|
|
539
|
+
issues_json="$(
|
|
540
|
+
flow_github_api_repo "${repo_slug}" "issues?state=${state}&per_page=${per_page}" --paginate --slurp
|
|
541
|
+
)" || return 1
|
|
542
|
+
|
|
543
|
+
ISSUE_PAGES_JSON="${issues_json}" ISSUE_LIMIT="${limit}" python3 - <<'PY'
|
|
544
|
+
import json
|
|
545
|
+
import os
|
|
546
|
+
|
|
547
|
+
pages = json.loads(os.environ.get("ISSUE_PAGES_JSON", "[]") or "[]")
|
|
548
|
+
limit = int(os.environ.get("ISSUE_LIMIT", "100") or "100")
|
|
549
|
+
issues = []
|
|
550
|
+
|
|
551
|
+
for page in pages:
|
|
552
|
+
if isinstance(page, list):
|
|
553
|
+
issues.extend(page)
|
|
554
|
+
elif isinstance(page, dict):
|
|
555
|
+
issues.append(page)
|
|
556
|
+
|
|
557
|
+
result = []
|
|
558
|
+
for issue in issues:
|
|
559
|
+
if not isinstance(issue, dict):
|
|
560
|
+
continue
|
|
561
|
+
if issue.get("pull_request"):
|
|
562
|
+
continue
|
|
563
|
+
result.append({
|
|
564
|
+
"number": issue.get("number"),
|
|
565
|
+
"createdAt": issue.get("created_at") or "",
|
|
566
|
+
"updatedAt": issue.get("updated_at") or "",
|
|
567
|
+
"title": issue.get("title") or "",
|
|
568
|
+
"url": issue.get("html_url") or issue.get("url") or "",
|
|
569
|
+
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
570
|
+
})
|
|
571
|
+
|
|
572
|
+
print(json.dumps(result[:limit]))
|
|
573
|
+
PY
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
flow_github_pr_view_json() {
|
|
577
|
+
local repo_slug="${1:?repo slug required}"
|
|
578
|
+
local pr_number="${2:?pr number required}"
|
|
579
|
+
local pr_json=""
|
|
580
|
+
local issue_json=""
|
|
581
|
+
local comment_pages_json=""
|
|
582
|
+
local head_sha=""
|
|
583
|
+
local check_runs_json="{}"
|
|
584
|
+
local status_json="{}"
|
|
585
|
+
|
|
586
|
+
if pr_json="$(gh pr view "${pr_number}" -R "${repo_slug}" --json number,title,body,url,headRefName,baseRefName,mergeStateStatus,statusCheckRollup,labels,comments,state,isDraft 2>/dev/null)"; then
|
|
587
|
+
printf '%s\n' "${pr_json}"
|
|
588
|
+
return 0
|
|
589
|
+
fi
|
|
590
|
+
|
|
591
|
+
pr_json="$(flow_github_api_repo "${repo_slug}" "pulls/${pr_number}")" || return 1
|
|
592
|
+
issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}")" || return 1
|
|
593
|
+
comment_pages_json="$(
|
|
594
|
+
flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null || printf '[]\n'
|
|
595
|
+
)"
|
|
596
|
+
head_sha="$(
|
|
597
|
+
PR_JSON="${pr_json}" python3 - <<'PY'
|
|
598
|
+
import json
|
|
599
|
+
import os
|
|
600
|
+
|
|
601
|
+
payload = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
602
|
+
head = payload.get("head") or {}
|
|
603
|
+
print(head.get("sha") or "")
|
|
604
|
+
PY
|
|
605
|
+
)"
|
|
606
|
+
if [[ -n "${head_sha}" ]]; then
|
|
607
|
+
check_runs_json="$(
|
|
608
|
+
flow_github_api_repo "${repo_slug}" "commits/${head_sha}/check-runs?per_page=100" 2>/dev/null || printf '{}\n'
|
|
609
|
+
)"
|
|
610
|
+
status_json="$(
|
|
611
|
+
flow_github_api_repo "${repo_slug}" "commits/${head_sha}/status" 2>/dev/null || printf '{}\n'
|
|
612
|
+
)"
|
|
613
|
+
fi
|
|
614
|
+
|
|
615
|
+
PR_JSON="${pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" CHECK_RUNS_JSON="${check_runs_json}" STATUS_JSON="${status_json}" python3 - <<'PY'
|
|
616
|
+
import json
|
|
617
|
+
import os
|
|
618
|
+
|
|
619
|
+
pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
620
|
+
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
621
|
+
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
622
|
+
check_runs_payload = json.loads(os.environ.get("CHECK_RUNS_JSON", "{}") or "{}")
|
|
623
|
+
status_payload = json.loads(os.environ.get("STATUS_JSON", "{}") or "{}")
|
|
624
|
+
|
|
625
|
+
comments = []
|
|
626
|
+
for page in comment_pages:
|
|
627
|
+
if isinstance(page, list):
|
|
628
|
+
comments.extend(page)
|
|
629
|
+
elif isinstance(page, dict):
|
|
630
|
+
comments.append(page)
|
|
631
|
+
|
|
632
|
+
status_check_rollup = []
|
|
633
|
+
for run in check_runs_payload.get("check_runs", []) or []:
|
|
634
|
+
if not isinstance(run, dict):
|
|
635
|
+
continue
|
|
636
|
+
status_check_rollup.append({
|
|
637
|
+
"name": run.get("name") or "",
|
|
638
|
+
"status": run.get("status") or "",
|
|
639
|
+
"conclusion": run.get("conclusion") or "",
|
|
640
|
+
})
|
|
641
|
+
for item in status_payload.get("statuses", []) or []:
|
|
642
|
+
if not isinstance(item, dict):
|
|
643
|
+
continue
|
|
644
|
+
state = item.get("state") or ""
|
|
645
|
+
status_check_rollup.append({
|
|
646
|
+
"context": item.get("context") or "",
|
|
647
|
+
"status": state,
|
|
648
|
+
"conclusion": state,
|
|
649
|
+
})
|
|
650
|
+
|
|
651
|
+
pr_state = str(pr.get("state", "")).upper()
|
|
652
|
+
if pr.get("merged_at"):
|
|
653
|
+
pr_state = "MERGED"
|
|
654
|
+
|
|
655
|
+
result = {
|
|
656
|
+
"number": pr.get("number"),
|
|
657
|
+
"title": pr.get("title") or "",
|
|
658
|
+
"body": pr.get("body") or issue.get("body") or "",
|
|
659
|
+
"url": pr.get("html_url") or pr.get("url") or "",
|
|
660
|
+
"headRefName": ((pr.get("head") or {}).get("ref")) or "",
|
|
661
|
+
"baseRefName": ((pr.get("base") or {}).get("ref")) or "",
|
|
662
|
+
"mergeStateStatus": str(pr.get("mergeable_state") or "UNKNOWN").upper(),
|
|
663
|
+
"statusCheckRollup": status_check_rollup,
|
|
664
|
+
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
665
|
+
"comments": [
|
|
666
|
+
{
|
|
667
|
+
"body": comment.get("body") or "",
|
|
668
|
+
"createdAt": comment.get("created_at") or "",
|
|
669
|
+
"updatedAt": comment.get("updated_at") or "",
|
|
670
|
+
"url": comment.get("html_url") or "",
|
|
671
|
+
}
|
|
672
|
+
for comment in comments
|
|
673
|
+
if isinstance(comment, dict)
|
|
674
|
+
],
|
|
675
|
+
"state": pr_state,
|
|
676
|
+
"isDraft": bool(pr.get("draft")),
|
|
677
|
+
"createdAt": pr.get("created_at") or "",
|
|
678
|
+
"updatedAt": pr.get("updated_at") or "",
|
|
679
|
+
"mergedAt": pr.get("merged_at") or "",
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
print(json.dumps(result))
|
|
683
|
+
PY
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
flow_github_pr_list_json() {
|
|
687
|
+
local repo_slug="${1:?repo slug required}"
|
|
688
|
+
local state="${2:-open}"
|
|
689
|
+
local limit="${3:-100}"
|
|
690
|
+
local pr_json=""
|
|
691
|
+
local per_page="100"
|
|
692
|
+
local pulls_state="${state}"
|
|
693
|
+
local pull_pages_json=""
|
|
694
|
+
local selected_prs_json=""
|
|
695
|
+
local item_jsonl_file=""
|
|
696
|
+
local current_pr_json=""
|
|
697
|
+
local issue_json=""
|
|
698
|
+
local comment_pages_json=""
|
|
699
|
+
local pr_number=""
|
|
700
|
+
|
|
701
|
+
if pr_json="$(gh pr list -R "${repo_slug}" --state "${state}" --limit "${limit}" --json number,title,body,url,headRefName,labels,comments,createdAt,mergedAt,isDraft 2>/dev/null)"; then
|
|
702
|
+
printf '%s\n' "${pr_json}"
|
|
703
|
+
return 0
|
|
704
|
+
fi
|
|
705
|
+
|
|
706
|
+
if [[ "${state}" == "merged" ]]; then
|
|
707
|
+
pulls_state="closed"
|
|
708
|
+
fi
|
|
709
|
+
if [[ "${limit}" =~ ^[0-9]+$ ]] && (( limit > 0 && limit < 100 )); then
|
|
710
|
+
per_page="${limit}"
|
|
711
|
+
fi
|
|
712
|
+
|
|
713
|
+
pull_pages_json="$(
|
|
714
|
+
flow_github_api_repo "${repo_slug}" "pulls?state=${pulls_state}&per_page=${per_page}" --paginate --slurp
|
|
715
|
+
)" || return 1
|
|
716
|
+
|
|
717
|
+
selected_prs_json="$(
|
|
718
|
+
PULL_PAGES_JSON="${pull_pages_json}" PR_LIMIT="${limit}" PR_STATE_FILTER="${state}" python3 - <<'PY'
|
|
719
|
+
import json
|
|
720
|
+
import os
|
|
721
|
+
|
|
722
|
+
pages = json.loads(os.environ.get("PULL_PAGES_JSON", "[]") or "[]")
|
|
723
|
+
limit = int(os.environ.get("PR_LIMIT", "100") or "100")
|
|
724
|
+
state_filter = os.environ.get("PR_STATE_FILTER", "open")
|
|
725
|
+
pulls = []
|
|
726
|
+
|
|
727
|
+
for page in pages:
|
|
728
|
+
if isinstance(page, list):
|
|
729
|
+
pulls.extend(page)
|
|
730
|
+
elif isinstance(page, dict):
|
|
731
|
+
pulls.append(page)
|
|
732
|
+
|
|
733
|
+
result = []
|
|
734
|
+
for pr in pulls:
|
|
735
|
+
if not isinstance(pr, dict):
|
|
736
|
+
continue
|
|
737
|
+
if state_filter == "merged" and not pr.get("merged_at"):
|
|
738
|
+
continue
|
|
739
|
+
result.append({
|
|
740
|
+
"number": pr.get("number"),
|
|
741
|
+
"title": pr.get("title") or "",
|
|
742
|
+
"body": pr.get("body") or "",
|
|
743
|
+
"url": pr.get("html_url") or pr.get("url") or "",
|
|
744
|
+
"headRefName": ((pr.get("head") or {}).get("ref")) or "",
|
|
745
|
+
"createdAt": pr.get("created_at") or "",
|
|
746
|
+
"mergedAt": pr.get("merged_at") or "",
|
|
747
|
+
"isDraft": bool(pr.get("draft")),
|
|
748
|
+
})
|
|
749
|
+
if len(result) >= limit:
|
|
750
|
+
break
|
|
751
|
+
|
|
752
|
+
print(json.dumps(result))
|
|
753
|
+
PY
|
|
754
|
+
)"
|
|
755
|
+
|
|
756
|
+
item_jsonl_file="$(mktemp)"
|
|
757
|
+
trap 'rm -f "${item_jsonl_file}"' RETURN
|
|
758
|
+
|
|
759
|
+
while IFS= read -r current_pr_json; do
|
|
760
|
+
[[ -n "${current_pr_json}" ]] || continue
|
|
761
|
+
pr_number="$(jq -r '.number // ""' <<<"${current_pr_json}")"
|
|
762
|
+
[[ -n "${pr_number}" ]] || continue
|
|
763
|
+
issue_json="$(flow_github_api_repo "${repo_slug}" "issues/${pr_number}" 2>/dev/null || printf '{}\n')"
|
|
764
|
+
comment_pages_json="$(
|
|
765
|
+
flow_github_api_repo "${repo_slug}" "issues/${pr_number}/comments?per_page=100" --paginate --slurp 2>/dev/null || printf '[]\n'
|
|
766
|
+
)"
|
|
767
|
+
PR_JSON="${current_pr_json}" ISSUE_JSON="${issue_json}" COMMENT_PAGES_JSON="${comment_pages_json}" python3 - <<'PY' >>"${item_jsonl_file}"
|
|
768
|
+
import json
|
|
769
|
+
import os
|
|
770
|
+
|
|
771
|
+
pr = json.loads(os.environ.get("PR_JSON", "{}") or "{}")
|
|
772
|
+
issue = json.loads(os.environ.get("ISSUE_JSON", "{}") or "{}")
|
|
773
|
+
comment_pages = json.loads(os.environ.get("COMMENT_PAGES_JSON", "[]") or "[]")
|
|
774
|
+
comments = []
|
|
775
|
+
for page in comment_pages:
|
|
776
|
+
if isinstance(page, list):
|
|
777
|
+
comments.extend(page)
|
|
778
|
+
elif isinstance(page, dict):
|
|
779
|
+
comments.append(page)
|
|
780
|
+
|
|
781
|
+
result = {
|
|
782
|
+
"number": pr.get("number"),
|
|
783
|
+
"title": pr.get("title") or "",
|
|
784
|
+
"body": pr.get("body") or issue.get("body") or "",
|
|
785
|
+
"url": pr.get("url") or issue.get("html_url") or issue.get("url") or "",
|
|
786
|
+
"headRefName": pr.get("headRefName") or "",
|
|
787
|
+
"createdAt": pr.get("createdAt") or "",
|
|
788
|
+
"mergedAt": pr.get("mergedAt") or "",
|
|
789
|
+
"isDraft": bool(pr.get("isDraft")),
|
|
790
|
+
"labels": [{"name": label.get("name", "")} for label in issue.get("labels", []) if isinstance(label, dict)],
|
|
791
|
+
"comments": [
|
|
792
|
+
{
|
|
793
|
+
"body": comment.get("body") or "",
|
|
794
|
+
"createdAt": comment.get("created_at") or "",
|
|
795
|
+
"updatedAt": comment.get("updated_at") or "",
|
|
796
|
+
"url": comment.get("html_url") or "",
|
|
797
|
+
}
|
|
798
|
+
for comment in comments
|
|
799
|
+
if isinstance(comment, dict)
|
|
800
|
+
],
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
print(json.dumps(result))
|
|
804
|
+
PY
|
|
805
|
+
done < <(jq -c '.[]' <<<"${selected_prs_json}")
|
|
806
|
+
|
|
807
|
+
ITEM_JSONL_FILE="${item_jsonl_file}" python3 - <<'PY'
|
|
808
|
+
import json
|
|
809
|
+
import os
|
|
810
|
+
|
|
811
|
+
path = os.environ.get("ITEM_JSONL_FILE", "")
|
|
812
|
+
items = []
|
|
813
|
+
if path:
|
|
814
|
+
with open(path, "r", encoding="utf-8") as fh:
|
|
815
|
+
for line in fh:
|
|
816
|
+
line = line.strip()
|
|
817
|
+
if not line:
|
|
818
|
+
continue
|
|
819
|
+
items.append(json.loads(line))
|
|
820
|
+
|
|
821
|
+
print(json.dumps(items))
|
|
822
|
+
PY
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
flow_github_issue_close() {
|
|
826
|
+
local repo_slug="${1:?repo slug required}"
|
|
827
|
+
local issue_id="${2:?issue id required}"
|
|
828
|
+
local comment_body="${3:-}"
|
|
829
|
+
local payload=""
|
|
830
|
+
|
|
831
|
+
if [[ -n "${comment_body}" ]]; then
|
|
832
|
+
if gh issue close "${issue_id}" -R "${repo_slug}" --comment "${comment_body}" >/dev/null 2>&1; then
|
|
833
|
+
return 0
|
|
834
|
+
fi
|
|
835
|
+
flow_github_api_repo "${repo_slug}" "issues/${issue_id}/comments" --method POST -f body="${comment_body}" >/dev/null
|
|
836
|
+
else
|
|
837
|
+
if gh issue close "${issue_id}" -R "${repo_slug}" >/dev/null 2>&1; then
|
|
838
|
+
return 0
|
|
839
|
+
fi
|
|
840
|
+
fi
|
|
841
|
+
|
|
842
|
+
payload='{"state":"closed"}'
|
|
843
|
+
printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
flow_github_issue_update_body() {
|
|
847
|
+
local repo_slug="${1:?repo slug required}"
|
|
848
|
+
local issue_id="${2:?issue id required}"
|
|
849
|
+
local body_text="${3:?body text required}"
|
|
850
|
+
local payload=""
|
|
851
|
+
|
|
852
|
+
payload="$(
|
|
853
|
+
ISSUE_BODY="${body_text}" python3 - <<'PY'
|
|
854
|
+
import json
|
|
855
|
+
import os
|
|
856
|
+
|
|
857
|
+
print(json.dumps({"body": os.environ.get("ISSUE_BODY", "")}))
|
|
858
|
+
PY
|
|
859
|
+
)"
|
|
860
|
+
|
|
861
|
+
printf '%s' "${payload}" | flow_github_api_repo "${repo_slug}" "issues/${issue_id}" --method PATCH --input - >/dev/null
|
|
862
|
+
}
|
|
863
|
+
|
|
864
|
+
flow_github_label_create() {
|
|
865
|
+
local repo_slug="${1:?repo slug required}"
|
|
866
|
+
local label_name="${2:?label name required}"
|
|
867
|
+
local label_description="${3:-}"
|
|
868
|
+
local label_color="${4:-1D76DB}"
|
|
869
|
+
local encoded_label=""
|
|
870
|
+
|
|
871
|
+
if gh label create "${label_name}" -R "${repo_slug}" --description "${label_description}" --color "${label_color}" --force >/dev/null 2>&1; then
|
|
872
|
+
return 0
|
|
873
|
+
fi
|
|
874
|
+
|
|
875
|
+
if flow_github_api_repo "${repo_slug}" "labels" --method POST -f name="${label_name}" -f description="${label_description}" -f color="${label_color}" >/dev/null 2>&1; then
|
|
876
|
+
return 0
|
|
877
|
+
fi
|
|
878
|
+
|
|
879
|
+
encoded_label="$(flow_github_urlencode "${label_name}")"
|
|
880
|
+
flow_github_api_repo "${repo_slug}" "labels/${encoded_label}" --method PATCH -f new_name="${label_name}" -f description="${label_description}" -f color="${label_color}" >/dev/null 2>&1 || true
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
flow_github_issue_create() {
|
|
884
|
+
local repo_slug="${1:?repo slug required}"
|
|
885
|
+
local title="${2:?title required}"
|
|
886
|
+
local body_file="${3:?body file required}"
|
|
887
|
+
local issue_url=""
|
|
888
|
+
local body_text=""
|
|
889
|
+
|
|
890
|
+
if issue_url="$(gh issue create -R "${repo_slug}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
|
|
891
|
+
printf '%s\n' "${issue_url}"
|
|
892
|
+
return 0
|
|
893
|
+
fi
|
|
894
|
+
|
|
895
|
+
body_text="$(cat "${body_file}")"
|
|
896
|
+
issue_url="$(
|
|
897
|
+
ISSUE_TITLE="${title}" ISSUE_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "issues" --method POST --input - | jq -r '.html_url // ""'
|
|
898
|
+
import json
|
|
899
|
+
import os
|
|
900
|
+
|
|
901
|
+
payload = {
|
|
902
|
+
"title": os.environ.get("ISSUE_TITLE", ""),
|
|
903
|
+
"body": os.environ.get("ISSUE_BODY", ""),
|
|
904
|
+
}
|
|
905
|
+
print(json.dumps(payload))
|
|
906
|
+
PY
|
|
907
|
+
)"
|
|
908
|
+
[[ -n "${issue_url}" ]] || return 1
|
|
909
|
+
printf '%s\n' "${issue_url}"
|
|
910
|
+
}
|
|
911
|
+
|
|
912
|
+
flow_github_pr_create() {
|
|
913
|
+
local repo_slug="${1:?repo slug required}"
|
|
914
|
+
local base_branch="${2:?base branch required}"
|
|
915
|
+
local head_branch="${3:?head branch required}"
|
|
916
|
+
local title="${4:?title required}"
|
|
917
|
+
local body_file="${5:?body file required}"
|
|
918
|
+
local pr_url=""
|
|
919
|
+
local body_text=""
|
|
920
|
+
|
|
921
|
+
if pr_url="$(gh pr create -R "${repo_slug}" --base "${base_branch}" --head "${head_branch}" --title "${title}" --body-file "${body_file}" 2>/dev/null)"; then
|
|
922
|
+
printf '%s\n' "${pr_url}"
|
|
923
|
+
return 0
|
|
924
|
+
fi
|
|
925
|
+
|
|
926
|
+
body_text="$(cat "${body_file}")"
|
|
927
|
+
pr_url="$(
|
|
928
|
+
BASE_BRANCH="${base_branch}" HEAD_BRANCH="${head_branch}" PR_TITLE="${title}" PR_BODY="${body_text}" python3 - <<'PY' | flow_github_api_repo "${repo_slug}" "pulls" --method POST --input - | jq -r '.html_url // ""'
|
|
929
|
+
import json
|
|
930
|
+
import os
|
|
931
|
+
|
|
932
|
+
payload = {
|
|
933
|
+
"title": os.environ.get("PR_TITLE", ""),
|
|
934
|
+
"head": os.environ.get("HEAD_BRANCH", ""),
|
|
935
|
+
"base": os.environ.get("BASE_BRANCH", ""),
|
|
936
|
+
"body": os.environ.get("PR_BODY", ""),
|
|
937
|
+
}
|
|
938
|
+
print(json.dumps(payload))
|
|
939
|
+
PY
|
|
940
|
+
)"
|
|
941
|
+
[[ -n "${pr_url}" ]] || return 1
|
|
942
|
+
printf '%s\n' "${pr_url}"
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
flow_github_pr_merge() {
|
|
946
|
+
local repo_slug="${1:?repo slug required}"
|
|
947
|
+
local pr_number="${2:?pr number required}"
|
|
948
|
+
local merge_method="${3:-squash}"
|
|
949
|
+
local delete_branch="${4:-no}"
|
|
950
|
+
local pr_json=""
|
|
951
|
+
local head_ref=""
|
|
952
|
+
local encoded_ref=""
|
|
953
|
+
|
|
954
|
+
if gh pr merge "${pr_number}" -R "${repo_slug}" "--${merge_method}" $([[ "${delete_branch}" == "yes" ]] && printf '%s' '--delete-branch') --admin >/dev/null 2>&1; then
|
|
955
|
+
return 0
|
|
956
|
+
fi
|
|
957
|
+
|
|
958
|
+
printf '{"merge_method":"%s"}' "${merge_method}" \
|
|
959
|
+
| flow_github_api_repo "${repo_slug}" "pulls/${pr_number}/merge" --method PUT --input - >/dev/null
|
|
960
|
+
|
|
961
|
+
if [[ "${delete_branch}" == "yes" ]]; then
|
|
962
|
+
pr_json="$(flow_github_pr_view_json "${repo_slug}" "${pr_number}" 2>/dev/null || printf '{}\n')"
|
|
963
|
+
head_ref="$(jq -r '.headRefName // ""' <<<"${pr_json}")"
|
|
964
|
+
if [[ -n "${head_ref}" ]]; then
|
|
965
|
+
encoded_ref="$(flow_github_urlencode "heads/${head_ref}")"
|
|
966
|
+
flow_github_api_repo "${repo_slug}" "git/refs/${encoded_ref}" --method DELETE >/dev/null 2>&1 || true
|
|
967
|
+
fi
|
|
968
|
+
fi
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
flow_config_get() {
|
|
972
|
+
local config_file="${1:?config file required}"
|
|
973
|
+
local target_path="${2:?target path required}"
|
|
974
|
+
|
|
975
|
+
python3 - "$config_file" "$target_path" <<'PY'
|
|
976
|
+
import sys
|
|
977
|
+
|
|
978
|
+
config_file = sys.argv[1]
|
|
979
|
+
target_path = sys.argv[2]
|
|
980
|
+
|
|
981
|
+
stack = []
|
|
982
|
+
found = False
|
|
983
|
+
|
|
984
|
+
with open(config_file, "r", encoding="utf-8") as fh:
|
|
985
|
+
for raw_line in fh:
|
|
986
|
+
stripped = raw_line.strip()
|
|
987
|
+
if not stripped or stripped.startswith("#") or stripped.startswith("- "):
|
|
988
|
+
continue
|
|
989
|
+
if ":" not in raw_line:
|
|
990
|
+
continue
|
|
991
|
+
|
|
992
|
+
indent = len(raw_line) - len(raw_line.lstrip())
|
|
993
|
+
key, value = stripped.split(":", 1)
|
|
994
|
+
key = key.strip()
|
|
995
|
+
value = value.strip().strip("\"'")
|
|
996
|
+
|
|
997
|
+
while stack and indent <= stack[-1][0]:
|
|
998
|
+
stack.pop()
|
|
999
|
+
|
|
1000
|
+
stack.append((indent, key))
|
|
1001
|
+
current_path = ".".join(part for _, part in stack)
|
|
1002
|
+
|
|
1003
|
+
if current_path == target_path and value:
|
|
1004
|
+
print(value)
|
|
1005
|
+
found = True
|
|
1006
|
+
break
|
|
1007
|
+
|
|
1008
|
+
if not found:
|
|
1009
|
+
print("")
|
|
1010
|
+
PY
|
|
1011
|
+
}
|
|
1012
|
+
|
|
1013
|
+
flow_kv_get() {
|
|
1014
|
+
local payload="${1:-}"
|
|
1015
|
+
local key="${2:?key required}"
|
|
1016
|
+
|
|
1017
|
+
awk -F= -v key="${key}" '$1 == key { print substr($0, length(key) + 2); exit }' <<<"${payload}"
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
flow_env_or_config() {
|
|
1021
|
+
local config_file="${1:?config file required}"
|
|
1022
|
+
local env_names="${2:?env names required}"
|
|
1023
|
+
local config_key="${3:?config key required}"
|
|
1024
|
+
local default_value="${4:-}"
|
|
1025
|
+
local env_name=""
|
|
1026
|
+
local value=""
|
|
1027
|
+
|
|
1028
|
+
for env_name in ${env_names}; do
|
|
1029
|
+
value="${!env_name:-}"
|
|
1030
|
+
if [[ -n "${value}" ]]; then
|
|
1031
|
+
printf '%s\n' "${value}"
|
|
1032
|
+
return 0
|
|
1033
|
+
fi
|
|
1034
|
+
done
|
|
1035
|
+
|
|
1036
|
+
if [[ -f "${config_file}" ]]; then
|
|
1037
|
+
value="$(flow_config_get "${config_file}" "${config_key}")"
|
|
1038
|
+
if [[ -n "${value}" ]]; then
|
|
1039
|
+
printf '%s\n' "${value}"
|
|
1040
|
+
return 0
|
|
1041
|
+
fi
|
|
1042
|
+
fi
|
|
1043
|
+
|
|
1044
|
+
printf '%s\n' "${default_value}"
|
|
1045
|
+
}
|
|
1046
|
+
|
|
1047
|
+
flow_resolve_adapter_id() {
|
|
1048
|
+
local config_file="${1:-}"
|
|
1049
|
+
local default_value=""
|
|
1050
|
+
if [[ -z "${config_file}" ]]; then
|
|
1051
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1052
|
+
fi
|
|
1053
|
+
default_value="$(flow_default_profile_id)"
|
|
1054
|
+
flow_env_or_config "${config_file}" "ACP_PROJECT_ID AGENT_PROJECT_ID" "id" "${default_value}"
|
|
1055
|
+
}
|
|
1056
|
+
|
|
1057
|
+
flow_resolve_profile_notes_file() {
|
|
1058
|
+
local config_file="${1:-}"
|
|
1059
|
+
local config_dir=""
|
|
1060
|
+
|
|
1061
|
+
if [[ -z "${config_file}" ]]; then
|
|
1062
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1063
|
+
fi
|
|
1064
|
+
|
|
1065
|
+
config_dir="$(cd "$(dirname "${config_file}")" 2>/dev/null && pwd -P || dirname "${config_file}")"
|
|
1066
|
+
printf '%s/README.md
|
|
1067
|
+
' "${config_dir}"
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
flow_default_issue_session_prefix() {
|
|
1071
|
+
local config_file="${1:-}"
|
|
1072
|
+
local adapter_id=""
|
|
1073
|
+
|
|
1074
|
+
if [[ -z "${config_file}" ]]; then
|
|
1075
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1076
|
+
fi
|
|
1077
|
+
|
|
1078
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1079
|
+
printf '%s-issue-\n' "${adapter_id}"
|
|
1080
|
+
}
|
|
1081
|
+
|
|
1082
|
+
flow_default_pr_session_prefix() {
|
|
1083
|
+
local config_file="${1:-}"
|
|
1084
|
+
local adapter_id=""
|
|
1085
|
+
|
|
1086
|
+
if [[ -z "${config_file}" ]]; then
|
|
1087
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1088
|
+
fi
|
|
1089
|
+
|
|
1090
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1091
|
+
printf '%s-pr-\n' "${adapter_id}"
|
|
1092
|
+
}
|
|
1093
|
+
|
|
1094
|
+
flow_default_issue_branch_prefix() {
|
|
1095
|
+
local config_file="${1:-}"
|
|
1096
|
+
local adapter_id=""
|
|
1097
|
+
|
|
1098
|
+
if [[ -z "${config_file}" ]]; then
|
|
1099
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1100
|
+
fi
|
|
1101
|
+
|
|
1102
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1103
|
+
printf 'agent/%s/issue\n' "${adapter_id}"
|
|
1104
|
+
}
|
|
1105
|
+
|
|
1106
|
+
flow_default_pr_worktree_branch_prefix() {
|
|
1107
|
+
local config_file="${1:-}"
|
|
1108
|
+
local adapter_id=""
|
|
1109
|
+
|
|
1110
|
+
if [[ -z "${config_file}" ]]; then
|
|
1111
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1112
|
+
fi
|
|
1113
|
+
|
|
1114
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1115
|
+
printf 'agent/%s/pr\n' "${adapter_id}"
|
|
1116
|
+
}
|
|
1117
|
+
|
|
1118
|
+
flow_default_managed_pr_branch_globs() {
|
|
1119
|
+
local config_file="${1:-}"
|
|
1120
|
+
local adapter_id=""
|
|
1121
|
+
|
|
1122
|
+
if [[ -z "${config_file}" ]]; then
|
|
1123
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1124
|
+
fi
|
|
1125
|
+
|
|
1126
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1127
|
+
printf 'agent/%s/* codex/* openclaw/*\n' "${adapter_id}"
|
|
1128
|
+
}
|
|
1129
|
+
|
|
1130
|
+
flow_default_agent_root() {
|
|
1131
|
+
local config_file="${1:-}"
|
|
1132
|
+
local adapter_id=""
|
|
1133
|
+
local platform_home="${AGENT_PLATFORM_HOME:-${HOME}/.agent-runtime}"
|
|
1134
|
+
|
|
1135
|
+
if [[ -z "${config_file}" ]]; then
|
|
1136
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1137
|
+
fi
|
|
1138
|
+
|
|
1139
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1140
|
+
printf '%s/projects/%s
|
|
1141
|
+
' "${platform_home}" "${adapter_id}"
|
|
1142
|
+
}
|
|
1143
|
+
|
|
1144
|
+
flow_default_repo_slug() {
|
|
1145
|
+
local config_file="${1:-}"
|
|
1146
|
+
local adapter_id=""
|
|
1147
|
+
|
|
1148
|
+
if [[ -z "${config_file}" ]]; then
|
|
1149
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1150
|
+
fi
|
|
1151
|
+
|
|
1152
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1153
|
+
printf 'example/%s
|
|
1154
|
+
' "${adapter_id}"
|
|
1155
|
+
}
|
|
1156
|
+
|
|
1157
|
+
flow_default_repo_id() {
|
|
1158
|
+
printf '\n'
|
|
1159
|
+
}
|
|
1160
|
+
|
|
1161
|
+
flow_default_repo_root() {
|
|
1162
|
+
local config_file="${1:-}"
|
|
1163
|
+
local agent_root=""
|
|
1164
|
+
|
|
1165
|
+
if [[ -z "${config_file}" ]]; then
|
|
1166
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1167
|
+
fi
|
|
1168
|
+
|
|
1169
|
+
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
1170
|
+
printf '%s/repo
|
|
1171
|
+
' "${agent_root}"
|
|
1172
|
+
}
|
|
1173
|
+
|
|
1174
|
+
flow_default_worktree_root() {
|
|
1175
|
+
local config_file="${1:-}"
|
|
1176
|
+
local agent_root=""
|
|
1177
|
+
|
|
1178
|
+
if [[ -z "${config_file}" ]]; then
|
|
1179
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1180
|
+
fi
|
|
1181
|
+
|
|
1182
|
+
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
1183
|
+
printf '%s/worktrees
|
|
1184
|
+
' "${agent_root}"
|
|
1185
|
+
}
|
|
1186
|
+
|
|
1187
|
+
flow_default_retained_repo_root() {
|
|
1188
|
+
local config_file="${1:-}"
|
|
1189
|
+
local agent_root=""
|
|
1190
|
+
|
|
1191
|
+
if [[ -z "${config_file}" ]]; then
|
|
1192
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1193
|
+
fi
|
|
1194
|
+
|
|
1195
|
+
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
1196
|
+
printf '%s/retained
|
|
1197
|
+
' "${agent_root}"
|
|
1198
|
+
}
|
|
1199
|
+
|
|
1200
|
+
flow_default_vscode_workspace_file() {
|
|
1201
|
+
local config_file="${1:-}"
|
|
1202
|
+
local agent_root=""
|
|
1203
|
+
|
|
1204
|
+
if [[ -z "${config_file}" ]]; then
|
|
1205
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1206
|
+
fi
|
|
1207
|
+
|
|
1208
|
+
agent_root="$(flow_default_agent_root "${config_file}")"
|
|
1209
|
+
printf '%s/workspace.code-workspace
|
|
1210
|
+
' "${agent_root}"
|
|
1211
|
+
}
|
|
1212
|
+
flow_resolve_repo_slug() {
|
|
1213
|
+
local config_file="${1:-}"
|
|
1214
|
+
local default_value=""
|
|
1215
|
+
if [[ -z "${config_file}" ]]; then
|
|
1216
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1217
|
+
fi
|
|
1218
|
+
default_value="$(flow_default_repo_slug "${config_file}")"
|
|
1219
|
+
flow_env_or_config "${config_file}" "ACP_REPO_SLUG F_LOSNING_REPO_SLUG" "repo.slug" "${default_value}"
|
|
1220
|
+
}
|
|
1221
|
+
|
|
1222
|
+
flow_resolve_repo_id() {
|
|
1223
|
+
local config_file="${1:-}"
|
|
1224
|
+
local default_value=""
|
|
1225
|
+
if [[ -z "${config_file}" ]]; then
|
|
1226
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1227
|
+
fi
|
|
1228
|
+
default_value="$(flow_default_repo_id)"
|
|
1229
|
+
flow_env_or_config "${config_file}" "ACP_REPO_ID F_LOSNING_REPO_ID ACP_GITHUB_REPOSITORY_ID F_LOSNING_GITHUB_REPOSITORY_ID" "repo.id" "${default_value}"
|
|
1230
|
+
}
|
|
1231
|
+
|
|
1232
|
+
flow_resolve_default_branch() {
|
|
1233
|
+
local config_file="${1:-}"
|
|
1234
|
+
if [[ -z "${config_file}" ]]; then
|
|
1235
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1236
|
+
fi
|
|
1237
|
+
flow_env_or_config "${config_file}" "ACP_DEFAULT_BRANCH F_LOSNING_DEFAULT_BRANCH" "repo.default_branch" "main"
|
|
1238
|
+
}
|
|
1239
|
+
|
|
1240
|
+
flow_resolve_project_label() {
|
|
1241
|
+
local config_file="${1:-}"
|
|
1242
|
+
local repo_slug=""
|
|
1243
|
+
local adapter_id=""
|
|
1244
|
+
local label=""
|
|
1245
|
+
|
|
1246
|
+
if [[ -z "${config_file}" ]]; then
|
|
1247
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1248
|
+
fi
|
|
1249
|
+
|
|
1250
|
+
repo_slug="$(flow_resolve_repo_slug "${config_file}")"
|
|
1251
|
+
adapter_id="$(flow_resolve_adapter_id "${config_file}")"
|
|
1252
|
+
label="${repo_slug##*/}"
|
|
1253
|
+
if [[ -n "${label}" ]]; then
|
|
1254
|
+
printf '%s\n' "${label}"
|
|
1255
|
+
else
|
|
1256
|
+
printf '%s\n' "${adapter_id}"
|
|
1257
|
+
fi
|
|
1258
|
+
}
|
|
1259
|
+
|
|
1260
|
+
flow_resolve_repo_root() {
|
|
1261
|
+
local config_file="${1:-}"
|
|
1262
|
+
local default_value=""
|
|
1263
|
+
if [[ -z "${config_file}" ]]; then
|
|
1264
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1265
|
+
fi
|
|
1266
|
+
default_value="$(flow_default_repo_root "${config_file}")"
|
|
1267
|
+
flow_env_or_config "${config_file}" "ACP_REPO_ROOT F_LOSNING_REPO_ROOT" "repo.root" "${default_value}"
|
|
1268
|
+
}
|
|
1269
|
+
|
|
1270
|
+
flow_resolve_agent_root() {
|
|
1271
|
+
local config_file="${1:-}"
|
|
1272
|
+
local default_value=""
|
|
1273
|
+
|
|
1274
|
+
if [[ -z "${config_file}" ]]; then
|
|
1275
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1276
|
+
fi
|
|
1277
|
+
|
|
1278
|
+
default_value="$(flow_default_agent_root "${config_file}")"
|
|
1279
|
+
flow_env_or_config "${config_file}" "ACP_AGENT_ROOT F_LOSNING_AGENT_ROOT" "runtime.orchestrator_agent_root" "${default_value}"
|
|
1280
|
+
}
|
|
1281
|
+
|
|
1282
|
+
flow_resolve_agent_repo_root() {
|
|
1283
|
+
local config_file="${1:-}"
|
|
1284
|
+
local default_value=""
|
|
1285
|
+
|
|
1286
|
+
if [[ -z "${config_file}" ]]; then
|
|
1287
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1288
|
+
fi
|
|
1289
|
+
|
|
1290
|
+
default_value="$(flow_resolve_repo_root "${config_file}")"
|
|
1291
|
+
flow_env_or_config "${config_file}" "ACP_AGENT_REPO_ROOT F_LOSNING_AGENT_REPO_ROOT" "runtime.agent_repo_root" "${default_value}"
|
|
1292
|
+
}
|
|
1293
|
+
|
|
1294
|
+
flow_resolve_worktree_root() {
|
|
1295
|
+
local config_file="${1:-}"
|
|
1296
|
+
local default_value=""
|
|
1297
|
+
if [[ -z "${config_file}" ]]; then
|
|
1298
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1299
|
+
fi
|
|
1300
|
+
default_value="$(flow_default_worktree_root "${config_file}")"
|
|
1301
|
+
flow_env_or_config "${config_file}" "ACP_WORKTREE_ROOT F_LOSNING_WORKTREE_ROOT" "runtime.worktree_root" "${default_value}"
|
|
1302
|
+
}
|
|
1303
|
+
|
|
1304
|
+
flow_resolve_runs_root() {
|
|
1305
|
+
local config_file="${1:-}"
|
|
1306
|
+
local default_value=""
|
|
1307
|
+
local explicit_root="${ACP_RUNS_ROOT:-${F_LOSNING_RUNS_ROOT:-}}"
|
|
1308
|
+
local umbrella_root="${ACP_AGENT_ROOT:-${F_LOSNING_AGENT_ROOT:-}}"
|
|
1309
|
+
|
|
1310
|
+
if [[ -z "${config_file}" ]]; then
|
|
1311
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1312
|
+
fi
|
|
1313
|
+
|
|
1314
|
+
if [[ -n "${explicit_root}" ]]; then
|
|
1315
|
+
printf '%s\n' "${explicit_root}"
|
|
1316
|
+
return 0
|
|
1317
|
+
fi
|
|
1318
|
+
|
|
1319
|
+
default_value="$(flow_resolve_agent_root "${config_file}")/runs"
|
|
1320
|
+
if [[ -n "${umbrella_root}" ]]; then
|
|
1321
|
+
printf '%s\n' "${default_value}"
|
|
1322
|
+
return 0
|
|
1323
|
+
fi
|
|
1324
|
+
|
|
1325
|
+
flow_env_or_config "${config_file}" "ACP_RUNS_ROOT F_LOSNING_RUNS_ROOT" "runtime.runs_root" "${default_value}"
|
|
1326
|
+
}
|
|
1327
|
+
|
|
1328
|
+
flow_resolve_state_root() {
|
|
1329
|
+
local config_file="${1:-}"
|
|
1330
|
+
local default_value=""
|
|
1331
|
+
local explicit_root="${ACP_STATE_ROOT:-${F_LOSNING_STATE_ROOT:-}}"
|
|
1332
|
+
local umbrella_root="${ACP_AGENT_ROOT:-${F_LOSNING_AGENT_ROOT:-}}"
|
|
1333
|
+
|
|
1334
|
+
if [[ -z "${config_file}" ]]; then
|
|
1335
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1336
|
+
fi
|
|
1337
|
+
|
|
1338
|
+
if [[ -n "${explicit_root}" ]]; then
|
|
1339
|
+
printf '%s\n' "${explicit_root}"
|
|
1340
|
+
return 0
|
|
1341
|
+
fi
|
|
1342
|
+
|
|
1343
|
+
default_value="$(flow_resolve_agent_root "${config_file}")/state"
|
|
1344
|
+
if [[ -n "${umbrella_root}" ]]; then
|
|
1345
|
+
printf '%s\n' "${default_value}"
|
|
1346
|
+
return 0
|
|
1347
|
+
fi
|
|
1348
|
+
|
|
1349
|
+
flow_env_or_config "${config_file}" "ACP_STATE_ROOT F_LOSNING_STATE_ROOT" "runtime.state_root" "${default_value}"
|
|
1350
|
+
}
|
|
1351
|
+
|
|
1352
|
+
flow_resolve_history_root() {
|
|
1353
|
+
local config_file="${1:-}"
|
|
1354
|
+
local default_value=""
|
|
1355
|
+
local explicit_root="${ACP_HISTORY_ROOT:-${F_LOSNING_HISTORY_ROOT:-}}"
|
|
1356
|
+
local umbrella_root="${ACP_AGENT_ROOT:-${F_LOSNING_AGENT_ROOT:-}}"
|
|
1357
|
+
|
|
1358
|
+
if [[ -z "${config_file}" ]]; then
|
|
1359
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1360
|
+
fi
|
|
1361
|
+
|
|
1362
|
+
if [[ -n "${explicit_root}" ]]; then
|
|
1363
|
+
printf '%s\n' "${explicit_root}"
|
|
1364
|
+
return 0
|
|
1365
|
+
fi
|
|
1366
|
+
|
|
1367
|
+
default_value="$(flow_resolve_agent_root "${config_file}")/history"
|
|
1368
|
+
if [[ -n "${umbrella_root}" ]]; then
|
|
1369
|
+
printf '%s\n' "${default_value}"
|
|
1370
|
+
return 0
|
|
1371
|
+
fi
|
|
1372
|
+
|
|
1373
|
+
flow_env_or_config "${config_file}" "ACP_HISTORY_ROOT F_LOSNING_HISTORY_ROOT" "runtime.history_root" "${default_value}"
|
|
1374
|
+
}
|
|
1375
|
+
|
|
1376
|
+
flow_resolve_retained_repo_root() {
|
|
1377
|
+
local config_file="${1:-}"
|
|
1378
|
+
local default_value=""
|
|
1379
|
+
if [[ -z "${config_file}" ]]; then
|
|
1380
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1381
|
+
fi
|
|
1382
|
+
default_value="$(flow_default_retained_repo_root "${config_file}")"
|
|
1383
|
+
flow_env_or_config "${config_file}" "ACP_RETAINED_REPO_ROOT F_LOSNING_RETAINED_REPO_ROOT" "runtime.retained_repo_root" "${default_value}"
|
|
1384
|
+
}
|
|
1385
|
+
|
|
1386
|
+
flow_resolve_vscode_workspace_file() {
|
|
1387
|
+
local config_file="${1:-}"
|
|
1388
|
+
local default_value=""
|
|
1389
|
+
if [[ -z "${config_file}" ]]; then
|
|
1390
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1391
|
+
fi
|
|
1392
|
+
default_value="$(flow_default_vscode_workspace_file "${config_file}")"
|
|
1393
|
+
flow_env_or_config "${config_file}" "ACP_VSCODE_WORKSPACE_FILE F_LOSNING_VSCODE_WORKSPACE_FILE" "runtime.vscode_workspace_file" "${default_value}"
|
|
1394
|
+
}
|
|
1395
|
+
|
|
1396
|
+
flow_resolve_web_playwright_command() {
|
|
1397
|
+
local config_file="${1:-}"
|
|
1398
|
+
if [[ -z "${config_file}" ]]; then
|
|
1399
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1400
|
+
fi
|
|
1401
|
+
flow_env_or_config "${config_file}" "ACP_WEB_PLAYWRIGHT_COMMAND F_LOSNING_WEB_PLAYWRIGHT_COMMAND" "execution.verification.web_playwright_command" "pnpm exec playwright test"
|
|
1402
|
+
}
|
|
1403
|
+
|
|
1404
|
+
flow_resolve_codex_quota_bin() {
|
|
1405
|
+
local flow_root="${1:-}"
|
|
1406
|
+
local shared_home=""
|
|
1407
|
+
local explicit_bin="${ACP_CODEX_QUOTA_BIN:-${F_LOSNING_CODEX_QUOTA_BIN:-}}"
|
|
1408
|
+
local candidate=""
|
|
1409
|
+
|
|
1410
|
+
if [[ -n "${explicit_bin}" ]]; then
|
|
1411
|
+
printf '%s\n' "${explicit_bin}"
|
|
1412
|
+
return 0
|
|
1413
|
+
fi
|
|
1414
|
+
|
|
1415
|
+
if [[ -z "${flow_root}" ]]; then
|
|
1416
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1417
|
+
fi
|
|
1418
|
+
shared_home="${SHARED_AGENT_HOME:-$(resolve_shared_agent_home "${flow_root}")}"
|
|
1419
|
+
|
|
1420
|
+
for candidate in \
|
|
1421
|
+
"${flow_root}/tools/bin/codex-quota" \
|
|
1422
|
+
"${shared_home}/tools/bin/codex-quota"; do
|
|
1423
|
+
if [[ -x "${candidate}" ]]; then
|
|
1424
|
+
printf '%s\n' "${candidate}"
|
|
1425
|
+
return 0
|
|
1426
|
+
fi
|
|
1427
|
+
done
|
|
1428
|
+
|
|
1429
|
+
candidate="$(command -v codex-quota 2>/dev/null || true)"
|
|
1430
|
+
if [[ -n "${candidate}" ]]; then
|
|
1431
|
+
printf '%s\n' "${candidate}"
|
|
1432
|
+
return 0
|
|
1433
|
+
fi
|
|
1434
|
+
|
|
1435
|
+
printf '%s\n' "${flow_root}/tools/bin/codex-quota"
|
|
1436
|
+
}
|
|
1437
|
+
|
|
1438
|
+
flow_resolve_codex_quota_manager_script() {
|
|
1439
|
+
local flow_root="${1:-}"
|
|
1440
|
+
local shared_home=""
|
|
1441
|
+
local explicit_script="${ACP_CODEX_QUOTA_MANAGER_SCRIPT:-${F_LOSNING_CODEX_QUOTA_MANAGER_SCRIPT:-}}"
|
|
1442
|
+
local candidate=""
|
|
1443
|
+
|
|
1444
|
+
if [[ -n "${explicit_script}" ]]; then
|
|
1445
|
+
printf '%s\n' "${explicit_script}"
|
|
1446
|
+
return 0
|
|
1447
|
+
fi
|
|
1448
|
+
|
|
1449
|
+
if [[ -z "${flow_root}" ]]; then
|
|
1450
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1451
|
+
fi
|
|
1452
|
+
shared_home="${SHARED_AGENT_HOME:-$(resolve_shared_agent_home "${flow_root}")}"
|
|
1453
|
+
|
|
1454
|
+
for candidate in \
|
|
1455
|
+
"${flow_root}/tools/vendor/codex-quota-manager/scripts/auto-switch.sh" \
|
|
1456
|
+
"${shared_home}/tools/vendor/codex-quota-manager/scripts/auto-switch.sh" \
|
|
1457
|
+
"${shared_home}/skills/openclaw/codex-quota-manager/scripts/auto-switch.sh"; do
|
|
1458
|
+
if [[ -x "${candidate}" ]]; then
|
|
1459
|
+
printf '%s\n' "${candidate}"
|
|
1460
|
+
return 0
|
|
1461
|
+
fi
|
|
1462
|
+
done
|
|
1463
|
+
|
|
1464
|
+
printf '%s\n' "${flow_root}/tools/vendor/codex-quota-manager/scripts/auto-switch.sh"
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
flow_resolve_template_file() {
|
|
1468
|
+
local template_name="${1:?template name required}"
|
|
1469
|
+
local workspace_dir="${2:-}"
|
|
1470
|
+
local config_file="${3:-}"
|
|
1471
|
+
local flow_root=""
|
|
1472
|
+
local profile_id=""
|
|
1473
|
+
local config_dir=""
|
|
1474
|
+
local template_dir=""
|
|
1475
|
+
local candidate=""
|
|
1476
|
+
local workspace_real=""
|
|
1477
|
+
local canonical_tools_real=""
|
|
1478
|
+
|
|
1479
|
+
if [[ -z "${config_file}" ]]; then
|
|
1480
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1481
|
+
fi
|
|
1482
|
+
|
|
1483
|
+
flow_root="$(resolve_flow_skill_dir "${BASH_SOURCE[0]}")"
|
|
1484
|
+
config_dir="$(cd "$(dirname "${config_file}")" 2>/dev/null && pwd -P || dirname "${config_file}")"
|
|
1485
|
+
|
|
1486
|
+
for template_dir in \
|
|
1487
|
+
"${AGENT_CONTROL_PLANE_TEMPLATE_DIR:-}" \
|
|
1488
|
+
"${ACP_TEMPLATE_DIR:-}" \
|
|
1489
|
+
"${F_LOSNING_TEMPLATE_DIR:-}"; do
|
|
1490
|
+
if [[ -n "${template_dir}" && -f "${template_dir}/${template_name}" ]]; then
|
|
1491
|
+
printf '%s\n' "${template_dir}/${template_name}"
|
|
1492
|
+
return 0
|
|
1493
|
+
fi
|
|
1494
|
+
done
|
|
1495
|
+
|
|
1496
|
+
if [[ -n "${workspace_dir}" && -f "${workspace_dir}/templates/${template_name}" ]]; then
|
|
1497
|
+
workspace_real="$(cd "${workspace_dir}" && pwd -P)"
|
|
1498
|
+
canonical_tools_real="$(cd "${flow_root}/tools" && pwd -P)"
|
|
1499
|
+
if [[ "${workspace_real}" != "${canonical_tools_real}" ]]; then
|
|
1500
|
+
printf '%s\n' "${workspace_dir}/templates/${template_name}"
|
|
1501
|
+
return 0
|
|
1502
|
+
fi
|
|
1503
|
+
fi
|
|
1504
|
+
|
|
1505
|
+
candidate="${config_dir}/templates/${template_name}"
|
|
1506
|
+
if [[ -f "${candidate}" ]]; then
|
|
1507
|
+
printf '%s\n' "${candidate}"
|
|
1508
|
+
return 0
|
|
1509
|
+
fi
|
|
1510
|
+
|
|
1511
|
+
if [[ -n "${workspace_dir}" && -f "${workspace_dir}/templates/${template_name}" ]]; then
|
|
1512
|
+
printf '%s\n' "${workspace_dir}/templates/${template_name}"
|
|
1513
|
+
return 0
|
|
1514
|
+
fi
|
|
1515
|
+
|
|
1516
|
+
printf '%s\n' "${flow_root}/tools/templates/${template_name}"
|
|
1517
|
+
}
|
|
1518
|
+
|
|
1519
|
+
flow_resolve_retry_cooldowns() {
|
|
1520
|
+
local config_file="${1:-}"
|
|
1521
|
+
if [[ -z "${config_file}" ]]; then
|
|
1522
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1523
|
+
fi
|
|
1524
|
+
flow_env_or_config "${config_file}" "ACP_RETRY_COOLDOWNS F_LOSNING_RETRY_COOLDOWNS" "execution.retry.cooldowns" "300,900,1800,3600"
|
|
1525
|
+
}
|
|
1526
|
+
|
|
1527
|
+
flow_resolve_provider_quota_cooldowns() {
|
|
1528
|
+
local config_file="${1:-}"
|
|
1529
|
+
if [[ -z "${config_file}" ]]; then
|
|
1530
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1531
|
+
fi
|
|
1532
|
+
flow_env_or_config "${config_file}" "ACP_PROVIDER_QUOTA_COOLDOWNS F_LOSNING_PROVIDER_QUOTA_COOLDOWNS" "execution.provider_quota.cooldowns" "300,900,1800,3600"
|
|
1533
|
+
}
|
|
1534
|
+
|
|
1535
|
+
flow_resolve_provider_pool_order() {
|
|
1536
|
+
local config_file="${1:-}"
|
|
1537
|
+
if [[ -z "${config_file}" ]]; then
|
|
1538
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1539
|
+
fi
|
|
1540
|
+
flow_env_or_config "${config_file}" "ACP_PROVIDER_POOL_ORDER F_LOSNING_PROVIDER_POOL_ORDER" "execution.provider_pool_order" ""
|
|
1541
|
+
}
|
|
1542
|
+
|
|
1543
|
+
flow_provider_pool_names() {
|
|
1544
|
+
local config_file="${1:-}"
|
|
1545
|
+
local order=""
|
|
1546
|
+
local pool_name=""
|
|
1547
|
+
|
|
1548
|
+
if [[ -z "${config_file}" ]]; then
|
|
1549
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1550
|
+
fi
|
|
1551
|
+
|
|
1552
|
+
order="$(flow_resolve_provider_pool_order "${config_file}")"
|
|
1553
|
+
for pool_name in ${order}; do
|
|
1554
|
+
[[ -n "${pool_name}" ]] || continue
|
|
1555
|
+
printf '%s\n' "${pool_name}"
|
|
1556
|
+
done
|
|
1557
|
+
}
|
|
1558
|
+
|
|
1559
|
+
flow_provider_pools_enabled() {
|
|
1560
|
+
local config_file="${1:-}"
|
|
1561
|
+
[[ -n "$(flow_resolve_provider_pool_order "${config_file}")" ]]
|
|
1562
|
+
}
|
|
1563
|
+
|
|
1564
|
+
flow_provider_pool_value() {
|
|
1565
|
+
local config_file="${1:?config file required}"
|
|
1566
|
+
local pool_name="${2:?pool name required}"
|
|
1567
|
+
local relative_path="${3:?relative path required}"
|
|
1568
|
+
|
|
1569
|
+
flow_config_get "${config_file}" "execution.provider_pools.${pool_name}.${relative_path}"
|
|
1570
|
+
}
|
|
1571
|
+
|
|
1572
|
+
flow_provider_pool_backend() {
|
|
1573
|
+
local config_file="${1:?config file required}"
|
|
1574
|
+
local pool_name="${2:?pool name required}"
|
|
1575
|
+
|
|
1576
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "coding_worker"
|
|
1577
|
+
}
|
|
1578
|
+
|
|
1579
|
+
flow_provider_pool_safe_profile() {
|
|
1580
|
+
local config_file="${1:?config file required}"
|
|
1581
|
+
local pool_name="${2:?pool name required}"
|
|
1582
|
+
|
|
1583
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "safe_profile"
|
|
1584
|
+
}
|
|
1585
|
+
|
|
1586
|
+
flow_provider_pool_bypass_profile() {
|
|
1587
|
+
local config_file="${1:?config file required}"
|
|
1588
|
+
local pool_name="${2:?pool name required}"
|
|
1589
|
+
|
|
1590
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "bypass_profile"
|
|
1591
|
+
}
|
|
1592
|
+
|
|
1593
|
+
flow_provider_pool_claude_model() {
|
|
1594
|
+
local config_file="${1:?config file required}"
|
|
1595
|
+
local pool_name="${2:?pool name required}"
|
|
1596
|
+
|
|
1597
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.model"
|
|
1598
|
+
}
|
|
1599
|
+
|
|
1600
|
+
flow_provider_pool_claude_permission_mode() {
|
|
1601
|
+
local config_file="${1:?config file required}"
|
|
1602
|
+
local pool_name="${2:?pool name required}"
|
|
1603
|
+
|
|
1604
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.permission_mode"
|
|
1605
|
+
}
|
|
1606
|
+
|
|
1607
|
+
flow_provider_pool_claude_effort() {
|
|
1608
|
+
local config_file="${1:?config file required}"
|
|
1609
|
+
local pool_name="${2:?pool name required}"
|
|
1610
|
+
|
|
1611
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.effort"
|
|
1612
|
+
}
|
|
1613
|
+
|
|
1614
|
+
flow_provider_pool_claude_timeout_seconds() {
|
|
1615
|
+
local config_file="${1:?config file required}"
|
|
1616
|
+
local pool_name="${2:?pool name required}"
|
|
1617
|
+
|
|
1618
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.timeout_seconds"
|
|
1619
|
+
}
|
|
1620
|
+
|
|
1621
|
+
flow_provider_pool_claude_max_attempts() {
|
|
1622
|
+
local config_file="${1:?config file required}"
|
|
1623
|
+
local pool_name="${2:?pool name required}"
|
|
1624
|
+
|
|
1625
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.max_attempts"
|
|
1626
|
+
}
|
|
1627
|
+
|
|
1628
|
+
flow_provider_pool_claude_retry_backoff_seconds() {
|
|
1629
|
+
local config_file="${1:?config file required}"
|
|
1630
|
+
local pool_name="${2:?pool name required}"
|
|
1631
|
+
|
|
1632
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "claude.retry_backoff_seconds"
|
|
1633
|
+
}
|
|
1634
|
+
|
|
1635
|
+
flow_provider_pool_openclaw_model() {
|
|
1636
|
+
local config_file="${1:?config file required}"
|
|
1637
|
+
local pool_name="${2:?pool name required}"
|
|
1638
|
+
|
|
1639
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "openclaw.model"
|
|
1640
|
+
}
|
|
1641
|
+
|
|
1642
|
+
flow_provider_pool_openclaw_thinking() {
|
|
1643
|
+
local config_file="${1:?config file required}"
|
|
1644
|
+
local pool_name="${2:?pool name required}"
|
|
1645
|
+
|
|
1646
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "openclaw.thinking"
|
|
1647
|
+
}
|
|
1648
|
+
|
|
1649
|
+
flow_provider_pool_openclaw_timeout_seconds() {
|
|
1650
|
+
local config_file="${1:?config file required}"
|
|
1651
|
+
local pool_name="${2:?pool name required}"
|
|
1652
|
+
|
|
1653
|
+
flow_provider_pool_value "${config_file}" "${pool_name}" "openclaw.timeout_seconds"
|
|
1654
|
+
}
|
|
1655
|
+
|
|
1656
|
+
flow_sanitize_provider_key() {
|
|
1657
|
+
local raw_key="${1:?raw key required}"
|
|
1658
|
+
|
|
1659
|
+
printf '%s' "${raw_key}" \
|
|
1660
|
+
| tr '[:upper:]' '[:lower:]' \
|
|
1661
|
+
| sed -E 's/[^a-z0-9._-]+/-/g; s/^-+//; s/-+$//; s/-+/-/g'
|
|
1662
|
+
}
|
|
1663
|
+
|
|
1664
|
+
flow_provider_pool_model_identity() {
|
|
1665
|
+
local config_file="${1:?config file required}"
|
|
1666
|
+
local pool_name="${2:?pool name required}"
|
|
1667
|
+
local backend=""
|
|
1668
|
+
|
|
1669
|
+
backend="$(flow_provider_pool_backend "${config_file}" "${pool_name}")"
|
|
1670
|
+
case "${backend}" in
|
|
1671
|
+
codex)
|
|
1672
|
+
flow_provider_pool_safe_profile "${config_file}" "${pool_name}"
|
|
1673
|
+
;;
|
|
1674
|
+
claude)
|
|
1675
|
+
flow_provider_pool_claude_model "${config_file}" "${pool_name}"
|
|
1676
|
+
;;
|
|
1677
|
+
openclaw)
|
|
1678
|
+
flow_provider_pool_openclaw_model "${config_file}" "${pool_name}"
|
|
1679
|
+
;;
|
|
1680
|
+
*)
|
|
1681
|
+
printf '\n'
|
|
1682
|
+
;;
|
|
1683
|
+
esac
|
|
1684
|
+
}
|
|
1685
|
+
|
|
1686
|
+
flow_provider_pool_state_get() {
|
|
1687
|
+
local config_file="${1:?config file required}"
|
|
1688
|
+
local pool_name="${2:?pool name required}"
|
|
1689
|
+
local backend=""
|
|
1690
|
+
local model=""
|
|
1691
|
+
local state_root=""
|
|
1692
|
+
local provider_key=""
|
|
1693
|
+
local state_file=""
|
|
1694
|
+
local attempts="0"
|
|
1695
|
+
local next_attempt_epoch="0"
|
|
1696
|
+
local next_attempt_at=""
|
|
1697
|
+
local last_reason=""
|
|
1698
|
+
local updated_at=""
|
|
1699
|
+
local ready="yes"
|
|
1700
|
+
local valid="yes"
|
|
1701
|
+
local now_epoch=""
|
|
1702
|
+
local safe_profile=""
|
|
1703
|
+
local bypass_profile=""
|
|
1704
|
+
local claude_model=""
|
|
1705
|
+
local claude_permission_mode=""
|
|
1706
|
+
local claude_effort=""
|
|
1707
|
+
local claude_timeout_seconds=""
|
|
1708
|
+
local claude_max_attempts=""
|
|
1709
|
+
local claude_retry_backoff_seconds=""
|
|
1710
|
+
local openclaw_model=""
|
|
1711
|
+
local openclaw_thinking=""
|
|
1712
|
+
local openclaw_timeout_seconds=""
|
|
1713
|
+
|
|
1714
|
+
backend="$(flow_provider_pool_backend "${config_file}" "${pool_name}")"
|
|
1715
|
+
safe_profile="$(flow_provider_pool_safe_profile "${config_file}" "${pool_name}")"
|
|
1716
|
+
bypass_profile="$(flow_provider_pool_bypass_profile "${config_file}" "${pool_name}")"
|
|
1717
|
+
claude_model="$(flow_provider_pool_claude_model "${config_file}" "${pool_name}")"
|
|
1718
|
+
claude_permission_mode="$(flow_provider_pool_claude_permission_mode "${config_file}" "${pool_name}")"
|
|
1719
|
+
claude_effort="$(flow_provider_pool_claude_effort "${config_file}" "${pool_name}")"
|
|
1720
|
+
claude_timeout_seconds="$(flow_provider_pool_claude_timeout_seconds "${config_file}" "${pool_name}")"
|
|
1721
|
+
claude_max_attempts="$(flow_provider_pool_claude_max_attempts "${config_file}" "${pool_name}")"
|
|
1722
|
+
claude_retry_backoff_seconds="$(flow_provider_pool_claude_retry_backoff_seconds "${config_file}" "${pool_name}")"
|
|
1723
|
+
openclaw_model="$(flow_provider_pool_openclaw_model "${config_file}" "${pool_name}")"
|
|
1724
|
+
openclaw_thinking="$(flow_provider_pool_openclaw_thinking "${config_file}" "${pool_name}")"
|
|
1725
|
+
openclaw_timeout_seconds="$(flow_provider_pool_openclaw_timeout_seconds "${config_file}" "${pool_name}")"
|
|
1726
|
+
model="$(flow_provider_pool_model_identity "${config_file}" "${pool_name}")"
|
|
1727
|
+
|
|
1728
|
+
case "${backend}" in
|
|
1729
|
+
codex)
|
|
1730
|
+
[[ -n "${safe_profile}" && -n "${bypass_profile}" ]] || valid="no"
|
|
1731
|
+
;;
|
|
1732
|
+
claude)
|
|
1733
|
+
[[ -n "${claude_model}" && -n "${claude_permission_mode}" && -n "${claude_effort}" && -n "${claude_timeout_seconds}" && -n "${claude_max_attempts}" && -n "${claude_retry_backoff_seconds}" ]] || valid="no"
|
|
1734
|
+
;;
|
|
1735
|
+
openclaw)
|
|
1736
|
+
[[ -n "${openclaw_model}" && -n "${openclaw_thinking}" && -n "${openclaw_timeout_seconds}" ]] || valid="no"
|
|
1737
|
+
;;
|
|
1738
|
+
*)
|
|
1739
|
+
valid="no"
|
|
1740
|
+
;;
|
|
1741
|
+
esac
|
|
1742
|
+
|
|
1743
|
+
if [[ "${valid}" == "yes" && -n "${model}" ]]; then
|
|
1744
|
+
state_root="$(flow_resolve_state_root "${config_file}")"
|
|
1745
|
+
provider_key="$(flow_sanitize_provider_key "${backend}-${model}")"
|
|
1746
|
+
state_file="${state_root}/retries/providers/${provider_key}.env"
|
|
1747
|
+
|
|
1748
|
+
if [[ -f "${state_file}" ]]; then
|
|
1749
|
+
set -a
|
|
1750
|
+
# shellcheck source=/dev/null
|
|
1751
|
+
source "${state_file}"
|
|
1752
|
+
set +a
|
|
1753
|
+
attempts="${ATTEMPTS:-0}"
|
|
1754
|
+
next_attempt_epoch="${NEXT_ATTEMPT_EPOCH:-0}"
|
|
1755
|
+
next_attempt_at="${NEXT_ATTEMPT_AT:-}"
|
|
1756
|
+
last_reason="${LAST_REASON:-}"
|
|
1757
|
+
updated_at="${UPDATED_AT:-}"
|
|
1758
|
+
fi
|
|
1759
|
+
|
|
1760
|
+
now_epoch="$(date +%s)"
|
|
1761
|
+
if [[ "${next_attempt_epoch}" =~ ^[0-9]+$ ]] && (( next_attempt_epoch > now_epoch )); then
|
|
1762
|
+
ready="no"
|
|
1763
|
+
fi
|
|
1764
|
+
else
|
|
1765
|
+
ready="no"
|
|
1766
|
+
fi
|
|
1767
|
+
|
|
1768
|
+
printf 'POOL_NAME=%s\n' "${pool_name}"
|
|
1769
|
+
printf 'VALID=%s\n' "${valid}"
|
|
1770
|
+
printf 'BACKEND=%s\n' "${backend}"
|
|
1771
|
+
printf 'MODEL=%s\n' "${model}"
|
|
1772
|
+
printf 'PROVIDER_KEY=%s\n' "${provider_key}"
|
|
1773
|
+
printf 'ATTEMPTS=%s\n' "${attempts}"
|
|
1774
|
+
printf 'NEXT_ATTEMPT_EPOCH=%s\n' "${next_attempt_epoch}"
|
|
1775
|
+
printf 'NEXT_ATTEMPT_AT=%s\n' "${next_attempt_at}"
|
|
1776
|
+
printf 'READY=%s\n' "${ready}"
|
|
1777
|
+
printf 'LAST_REASON=%s\n' "${last_reason}"
|
|
1778
|
+
printf 'UPDATED_AT=%s\n' "${updated_at}"
|
|
1779
|
+
printf 'SAFE_PROFILE=%s\n' "${safe_profile}"
|
|
1780
|
+
printf 'BYPASS_PROFILE=%s\n' "${bypass_profile}"
|
|
1781
|
+
printf 'CLAUDE_MODEL=%s\n' "${claude_model}"
|
|
1782
|
+
printf 'CLAUDE_PERMISSION_MODE=%s\n' "${claude_permission_mode}"
|
|
1783
|
+
printf 'CLAUDE_EFFORT=%s\n' "${claude_effort}"
|
|
1784
|
+
printf 'CLAUDE_TIMEOUT_SECONDS=%s\n' "${claude_timeout_seconds}"
|
|
1785
|
+
printf 'CLAUDE_MAX_ATTEMPTS=%s\n' "${claude_max_attempts}"
|
|
1786
|
+
printf 'CLAUDE_RETRY_BACKOFF_SECONDS=%s\n' "${claude_retry_backoff_seconds}"
|
|
1787
|
+
printf 'OPENCLAW_MODEL=%s\n' "${openclaw_model}"
|
|
1788
|
+
printf 'OPENCLAW_THINKING=%s\n' "${openclaw_thinking}"
|
|
1789
|
+
printf 'OPENCLAW_TIMEOUT_SECONDS=%s\n' "${openclaw_timeout_seconds}"
|
|
1790
|
+
}
|
|
1791
|
+
|
|
1792
|
+
flow_selected_provider_pool_env() {
|
|
1793
|
+
local config_file="${1:-}"
|
|
1794
|
+
local pool_name=""
|
|
1795
|
+
local candidate=""
|
|
1796
|
+
local candidate_valid=""
|
|
1797
|
+
local candidate_ready=""
|
|
1798
|
+
local candidate_next_epoch="0"
|
|
1799
|
+
local exhausted_candidate=""
|
|
1800
|
+
local exhausted_epoch=""
|
|
1801
|
+
|
|
1802
|
+
if [[ -z "${config_file}" ]]; then
|
|
1803
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1804
|
+
fi
|
|
1805
|
+
|
|
1806
|
+
if ! flow_provider_pools_enabled "${config_file}"; then
|
|
1807
|
+
return 1
|
|
1808
|
+
fi
|
|
1809
|
+
|
|
1810
|
+
while IFS= read -r pool_name; do
|
|
1811
|
+
[[ -n "${pool_name}" ]] || continue
|
|
1812
|
+
candidate="$(flow_provider_pool_state_get "${config_file}" "${pool_name}")"
|
|
1813
|
+
candidate_valid="$(awk -F= '/^VALID=/{print $2}' <<<"${candidate}")"
|
|
1814
|
+
[[ "${candidate_valid}" == "yes" ]] || continue
|
|
1815
|
+
|
|
1816
|
+
candidate_ready="$(awk -F= '/^READY=/{print $2}' <<<"${candidate}")"
|
|
1817
|
+
if [[ "${candidate_ready}" == "yes" ]]; then
|
|
1818
|
+
printf '%s\n' "${candidate}"
|
|
1819
|
+
printf 'POOLS_EXHAUSTED=no\n'
|
|
1820
|
+
printf 'SELECTION_REASON=ready\n'
|
|
1821
|
+
return 0
|
|
1822
|
+
fi
|
|
1823
|
+
|
|
1824
|
+
candidate_next_epoch="$(awk -F= '/^NEXT_ATTEMPT_EPOCH=/{print $2}' <<<"${candidate}")"
|
|
1825
|
+
if [[ -z "${exhausted_candidate}" ]]; then
|
|
1826
|
+
exhausted_candidate="${candidate}"
|
|
1827
|
+
exhausted_epoch="${candidate_next_epoch}"
|
|
1828
|
+
continue
|
|
1829
|
+
fi
|
|
1830
|
+
|
|
1831
|
+
if [[ "${candidate_next_epoch}" =~ ^[0-9]+$ && "${exhausted_epoch}" =~ ^[0-9]+$ ]] && (( candidate_next_epoch < exhausted_epoch )); then
|
|
1832
|
+
exhausted_candidate="${candidate}"
|
|
1833
|
+
exhausted_epoch="${candidate_next_epoch}"
|
|
1834
|
+
fi
|
|
1835
|
+
done < <(flow_provider_pool_names "${config_file}")
|
|
1836
|
+
|
|
1837
|
+
[[ -n "${exhausted_candidate}" ]] || return 1
|
|
1838
|
+
|
|
1839
|
+
printf '%s\n' "${exhausted_candidate}"
|
|
1840
|
+
printf 'POOLS_EXHAUSTED=yes\n'
|
|
1841
|
+
printf 'SELECTION_REASON=all-cooldown\n'
|
|
1842
|
+
}
|
|
1843
|
+
|
|
1844
|
+
flow_resolve_issue_session_prefix() {
|
|
1845
|
+
local config_file="${1:-}"
|
|
1846
|
+
local default_value=""
|
|
1847
|
+
if [[ -z "${config_file}" ]]; then
|
|
1848
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1849
|
+
fi
|
|
1850
|
+
default_value="$(flow_default_issue_session_prefix "${config_file}")"
|
|
1851
|
+
flow_env_or_config "${config_file}" "ACP_ISSUE_SESSION_PREFIX F_LOSNING_ISSUE_SESSION_PREFIX" "session_naming.issue_prefix" "${default_value}"
|
|
1852
|
+
}
|
|
1853
|
+
|
|
1854
|
+
flow_resolve_pr_session_prefix() {
|
|
1855
|
+
local config_file="${1:-}"
|
|
1856
|
+
local default_value=""
|
|
1857
|
+
if [[ -z "${config_file}" ]]; then
|
|
1858
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1859
|
+
fi
|
|
1860
|
+
default_value="$(flow_default_pr_session_prefix "${config_file}")"
|
|
1861
|
+
flow_env_or_config "${config_file}" "ACP_PR_SESSION_PREFIX F_LOSNING_PR_SESSION_PREFIX" "session_naming.pr_prefix" "${default_value}"
|
|
1862
|
+
}
|
|
1863
|
+
|
|
1864
|
+
flow_resolve_issue_branch_prefix() {
|
|
1865
|
+
local config_file="${1:-}"
|
|
1866
|
+
local default_value=""
|
|
1867
|
+
if [[ -z "${config_file}" ]]; then
|
|
1868
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1869
|
+
fi
|
|
1870
|
+
default_value="$(flow_default_issue_branch_prefix "${config_file}")"
|
|
1871
|
+
flow_env_or_config "${config_file}" "ACP_ISSUE_BRANCH_PREFIX F_LOSNING_ISSUE_BRANCH_PREFIX" "session_naming.issue_branch_prefix" "${default_value}"
|
|
1872
|
+
}
|
|
1873
|
+
|
|
1874
|
+
flow_resolve_pr_worktree_branch_prefix() {
|
|
1875
|
+
local config_file="${1:-}"
|
|
1876
|
+
local default_value=""
|
|
1877
|
+
if [[ -z "${config_file}" ]]; then
|
|
1878
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1879
|
+
fi
|
|
1880
|
+
default_value="$(flow_default_pr_worktree_branch_prefix "${config_file}")"
|
|
1881
|
+
flow_env_or_config "${config_file}" "ACP_PR_WORKTREE_BRANCH_PREFIX F_LOSNING_PR_WORKTREE_BRANCH_PREFIX" "session_naming.pr_worktree_branch_prefix" "${default_value}"
|
|
1882
|
+
}
|
|
1883
|
+
|
|
1884
|
+
flow_resolve_managed_pr_branch_globs() {
|
|
1885
|
+
local config_file="${1:-}"
|
|
1886
|
+
local default_value=""
|
|
1887
|
+
if [[ -z "${config_file}" ]]; then
|
|
1888
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1889
|
+
fi
|
|
1890
|
+
default_value="$(flow_default_managed_pr_branch_globs "${config_file}")"
|
|
1891
|
+
flow_env_or_config "${config_file}" "ACP_MANAGED_PR_BRANCH_GLOBS F_LOSNING_MANAGED_PR_BRANCH_GLOBS" "session_naming.managed_pr_branch_globs" "${default_value}"
|
|
1892
|
+
}
|
|
1893
|
+
|
|
1894
|
+
flow_escape_regex() {
|
|
1895
|
+
local raw_value="${1:-}"
|
|
1896
|
+
python3 - "${raw_value}" <<'PY'
|
|
1897
|
+
import re
|
|
1898
|
+
import sys
|
|
1899
|
+
|
|
1900
|
+
print(re.escape(sys.argv[1]))
|
|
1901
|
+
PY
|
|
1902
|
+
}
|
|
1903
|
+
|
|
1904
|
+
flow_managed_pr_prefixes() {
|
|
1905
|
+
local config_file="${1:-}"
|
|
1906
|
+
local managed_globs=""
|
|
1907
|
+
local branch_glob=""
|
|
1908
|
+
local prefix=""
|
|
1909
|
+
|
|
1910
|
+
managed_globs="$(flow_resolve_managed_pr_branch_globs "${config_file}")"
|
|
1911
|
+
for branch_glob in ${managed_globs}; do
|
|
1912
|
+
prefix="${branch_glob%\*}"
|
|
1913
|
+
[[ -n "${prefix}" ]] || continue
|
|
1914
|
+
printf '%s\n' "${prefix}"
|
|
1915
|
+
done
|
|
1916
|
+
}
|
|
1917
|
+
|
|
1918
|
+
flow_managed_pr_prefixes_json() {
|
|
1919
|
+
local config_file="${1:-}"
|
|
1920
|
+
local prefixes=()
|
|
1921
|
+
local prefix=""
|
|
1922
|
+
|
|
1923
|
+
while IFS= read -r prefix; do
|
|
1924
|
+
[[ -n "${prefix}" ]] || continue
|
|
1925
|
+
prefixes+=("${prefix}")
|
|
1926
|
+
done < <(flow_managed_pr_prefixes "${config_file}")
|
|
1927
|
+
|
|
1928
|
+
python3 - "${prefixes[@]}" <<'PY'
|
|
1929
|
+
import json
|
|
1930
|
+
import sys
|
|
1931
|
+
|
|
1932
|
+
print(json.dumps(sys.argv[1:]))
|
|
1933
|
+
PY
|
|
1934
|
+
}
|
|
1935
|
+
|
|
1936
|
+
flow_managed_issue_branch_regex() {
|
|
1937
|
+
local config_file="${1:-}"
|
|
1938
|
+
local prefix=""
|
|
1939
|
+
local normalized_prefix=""
|
|
1940
|
+
local escaped_prefix=""
|
|
1941
|
+
local joined=""
|
|
1942
|
+
|
|
1943
|
+
while IFS= read -r prefix; do
|
|
1944
|
+
[[ -n "${prefix}" ]] || continue
|
|
1945
|
+
normalized_prefix="${prefix%/}"
|
|
1946
|
+
escaped_prefix="$(flow_escape_regex "${normalized_prefix}")"
|
|
1947
|
+
if [[ -n "${joined}" ]]; then
|
|
1948
|
+
joined="${joined}|${escaped_prefix}"
|
|
1949
|
+
else
|
|
1950
|
+
joined="${escaped_prefix}"
|
|
1951
|
+
fi
|
|
1952
|
+
done < <(flow_managed_pr_prefixes "${config_file}")
|
|
1953
|
+
|
|
1954
|
+
if [[ -z "${joined}" ]]; then
|
|
1955
|
+
joined="$(flow_escape_regex "agent/$(flow_resolve_adapter_id "${config_file}")")"
|
|
1956
|
+
fi
|
|
1957
|
+
|
|
1958
|
+
printf '^(?:%s)/issue-(?<id>[0-9]+)(?:-|$)\n' "${joined}"
|
|
1959
|
+
}
|
|
1960
|
+
|
|
1961
|
+
flow_export_execution_env() {
|
|
1962
|
+
local config_file="${1:-}"
|
|
1963
|
+
|
|
1964
|
+
if [[ -z "${config_file}" ]]; then
|
|
1965
|
+
config_file="$(resolve_flow_config_yaml "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}")"
|
|
1966
|
+
fi
|
|
1967
|
+
|
|
1968
|
+
[[ -f "${config_file}" ]] || return 0
|
|
1969
|
+
|
|
1970
|
+
local repo_id=""
|
|
1971
|
+
local coding_worker=""
|
|
1972
|
+
local provider_quota_cooldowns=""
|
|
1973
|
+
local provider_pool_order=""
|
|
1974
|
+
local provider_pool_selection=""
|
|
1975
|
+
local explicit_coding_worker=""
|
|
1976
|
+
local active_provider_pool_name=""
|
|
1977
|
+
local active_provider_backend=""
|
|
1978
|
+
local active_provider_model=""
|
|
1979
|
+
local active_provider_key=""
|
|
1980
|
+
local active_provider_next_attempt_epoch=""
|
|
1981
|
+
local active_provider_next_attempt_at=""
|
|
1982
|
+
local active_provider_last_reason=""
|
|
1983
|
+
local active_provider_pools_exhausted="no"
|
|
1984
|
+
local active_provider_selection_reason="legacy-config"
|
|
1985
|
+
local safe_profile=""
|
|
1986
|
+
local bypass_profile=""
|
|
1987
|
+
local claude_model=""
|
|
1988
|
+
local claude_permission_mode=""
|
|
1989
|
+
local claude_effort=""
|
|
1990
|
+
local claude_timeout=""
|
|
1991
|
+
local claude_max_attempts=""
|
|
1992
|
+
local claude_retry_backoff_seconds=""
|
|
1993
|
+
local openclaw_model=""
|
|
1994
|
+
local openclaw_thinking=""
|
|
1995
|
+
local openclaw_timeout=""
|
|
1996
|
+
|
|
1997
|
+
repo_id="$(flow_resolve_repo_id "${config_file}")"
|
|
1998
|
+
provider_quota_cooldowns="$(flow_resolve_provider_quota_cooldowns "${config_file}")"
|
|
1999
|
+
provider_pool_order="$(flow_resolve_provider_pool_order "${config_file}")"
|
|
2000
|
+
explicit_coding_worker="${ACP_CODING_WORKER:-${F_LOSNING_CODING_WORKER:-}}"
|
|
2001
|
+
if [[ -z "${explicit_coding_worker}" && -n "${provider_pool_order}" ]]; then
|
|
2002
|
+
provider_pool_selection="$(flow_selected_provider_pool_env "${config_file}" || true)"
|
|
2003
|
+
fi
|
|
2004
|
+
|
|
2005
|
+
if [[ -n "${provider_pool_selection}" ]]; then
|
|
2006
|
+
active_provider_pool_name="$(flow_kv_get "${provider_pool_selection}" "POOL_NAME")"
|
|
2007
|
+
active_provider_backend="$(flow_kv_get "${provider_pool_selection}" "BACKEND")"
|
|
2008
|
+
active_provider_model="$(flow_kv_get "${provider_pool_selection}" "MODEL")"
|
|
2009
|
+
active_provider_key="$(flow_kv_get "${provider_pool_selection}" "PROVIDER_KEY")"
|
|
2010
|
+
active_provider_next_attempt_epoch="$(flow_kv_get "${provider_pool_selection}" "NEXT_ATTEMPT_EPOCH")"
|
|
2011
|
+
active_provider_next_attempt_at="$(flow_kv_get "${provider_pool_selection}" "NEXT_ATTEMPT_AT")"
|
|
2012
|
+
active_provider_last_reason="$(flow_kv_get "${provider_pool_selection}" "LAST_REASON")"
|
|
2013
|
+
active_provider_pools_exhausted="$(flow_kv_get "${provider_pool_selection}" "POOLS_EXHAUSTED")"
|
|
2014
|
+
active_provider_selection_reason="$(flow_kv_get "${provider_pool_selection}" "SELECTION_REASON")"
|
|
2015
|
+
|
|
2016
|
+
coding_worker="${active_provider_backend}"
|
|
2017
|
+
safe_profile="$(flow_kv_get "${provider_pool_selection}" "SAFE_PROFILE")"
|
|
2018
|
+
bypass_profile="$(flow_kv_get "${provider_pool_selection}" "BYPASS_PROFILE")"
|
|
2019
|
+
claude_model="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_MODEL")"
|
|
2020
|
+
claude_permission_mode="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_PERMISSION_MODE")"
|
|
2021
|
+
claude_effort="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_EFFORT")"
|
|
2022
|
+
claude_timeout="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_TIMEOUT_SECONDS")"
|
|
2023
|
+
claude_max_attempts="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_MAX_ATTEMPTS")"
|
|
2024
|
+
claude_retry_backoff_seconds="$(flow_kv_get "${provider_pool_selection}" "CLAUDE_RETRY_BACKOFF_SECONDS")"
|
|
2025
|
+
openclaw_model="$(flow_kv_get "${provider_pool_selection}" "OPENCLAW_MODEL")"
|
|
2026
|
+
openclaw_thinking="$(flow_kv_get "${provider_pool_selection}" "OPENCLAW_THINKING")"
|
|
2027
|
+
openclaw_timeout="$(flow_kv_get "${provider_pool_selection}" "OPENCLAW_TIMEOUT_SECONDS")"
|
|
2028
|
+
else
|
|
2029
|
+
if [[ -n "${explicit_coding_worker}" ]]; then
|
|
2030
|
+
active_provider_selection_reason="env-override"
|
|
2031
|
+
fi
|
|
2032
|
+
coding_worker="$(flow_env_or_config "${config_file}" "ACP_CODING_WORKER F_LOSNING_CODING_WORKER" "execution.coding_worker" "")"
|
|
2033
|
+
safe_profile="$(flow_env_or_config "${config_file}" "ACP_CODEX_PROFILE_SAFE F_LOSNING_CODEX_PROFILE_SAFE" "execution.safe_profile" "")"
|
|
2034
|
+
bypass_profile="$(flow_env_or_config "${config_file}" "ACP_CODEX_PROFILE_BYPASS F_LOSNING_CODEX_PROFILE_BYPASS" "execution.bypass_profile" "")"
|
|
2035
|
+
claude_model="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_MODEL F_LOSNING_CLAUDE_MODEL" "execution.claude.model" "")"
|
|
2036
|
+
claude_permission_mode="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_PERMISSION_MODE F_LOSNING_CLAUDE_PERMISSION_MODE" "execution.claude.permission_mode" "")"
|
|
2037
|
+
claude_effort="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_EFFORT F_LOSNING_CLAUDE_EFFORT" "execution.claude.effort" "")"
|
|
2038
|
+
claude_timeout="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_TIMEOUT_SECONDS F_LOSNING_CLAUDE_TIMEOUT_SECONDS" "execution.claude.timeout_seconds" "")"
|
|
2039
|
+
claude_max_attempts="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_MAX_ATTEMPTS F_LOSNING_CLAUDE_MAX_ATTEMPTS" "execution.claude.max_attempts" "")"
|
|
2040
|
+
claude_retry_backoff_seconds="$(flow_env_or_config "${config_file}" "ACP_CLAUDE_RETRY_BACKOFF_SECONDS F_LOSNING_CLAUDE_RETRY_BACKOFF_SECONDS" "execution.claude.retry_backoff_seconds" "")"
|
|
2041
|
+
openclaw_model="$(flow_env_or_config "${config_file}" "ACP_OPENCLAW_MODEL F_LOSNING_OPENCLAW_MODEL" "execution.openclaw.model" "")"
|
|
2042
|
+
openclaw_thinking="$(flow_env_or_config "${config_file}" "ACP_OPENCLAW_THINKING F_LOSNING_OPENCLAW_THINKING" "execution.openclaw.thinking" "")"
|
|
2043
|
+
openclaw_timeout="$(flow_env_or_config "${config_file}" "ACP_OPENCLAW_TIMEOUT_SECONDS F_LOSNING_OPENCLAW_TIMEOUT_SECONDS" "execution.openclaw.timeout_seconds" "")"
|
|
2044
|
+
fi
|
|
2045
|
+
|
|
2046
|
+
if [[ -n "${coding_worker}" ]]; then
|
|
2047
|
+
export F_LOSNING_CODING_WORKER="${coding_worker}"
|
|
2048
|
+
export ACP_CODING_WORKER="${coding_worker}"
|
|
2049
|
+
fi
|
|
2050
|
+
if [[ -n "${repo_id}" ]]; then
|
|
2051
|
+
export F_LOSNING_REPO_ID="${repo_id}"
|
|
2052
|
+
export ACP_REPO_ID="${repo_id}"
|
|
2053
|
+
export F_LOSNING_GITHUB_REPOSITORY_ID="${repo_id}"
|
|
2054
|
+
export ACP_GITHUB_REPOSITORY_ID="${repo_id}"
|
|
2055
|
+
fi
|
|
2056
|
+
if [[ -n "${provider_quota_cooldowns}" ]]; then
|
|
2057
|
+
export F_LOSNING_PROVIDER_QUOTA_COOLDOWNS="${provider_quota_cooldowns}"
|
|
2058
|
+
export ACP_PROVIDER_QUOTA_COOLDOWNS="${provider_quota_cooldowns}"
|
|
2059
|
+
fi
|
|
2060
|
+
export F_LOSNING_PROVIDER_POOL_ORDER="${provider_pool_order}"
|
|
2061
|
+
export ACP_PROVIDER_POOL_ORDER="${provider_pool_order}"
|
|
2062
|
+
export F_LOSNING_ACTIVE_PROVIDER_POOL_NAME="${active_provider_pool_name}"
|
|
2063
|
+
export ACP_ACTIVE_PROVIDER_POOL_NAME="${active_provider_pool_name}"
|
|
2064
|
+
export F_LOSNING_ACTIVE_PROVIDER_BACKEND="${active_provider_backend}"
|
|
2065
|
+
export ACP_ACTIVE_PROVIDER_BACKEND="${active_provider_backend}"
|
|
2066
|
+
export F_LOSNING_ACTIVE_PROVIDER_MODEL="${active_provider_model}"
|
|
2067
|
+
export ACP_ACTIVE_PROVIDER_MODEL="${active_provider_model}"
|
|
2068
|
+
export F_LOSNING_ACTIVE_PROVIDER_KEY="${active_provider_key}"
|
|
2069
|
+
export ACP_ACTIVE_PROVIDER_KEY="${active_provider_key}"
|
|
2070
|
+
export F_LOSNING_PROVIDER_POOLS_EXHAUSTED="${active_provider_pools_exhausted}"
|
|
2071
|
+
export ACP_PROVIDER_POOLS_EXHAUSTED="${active_provider_pools_exhausted}"
|
|
2072
|
+
export F_LOSNING_PROVIDER_POOL_SELECTION_REASON="${active_provider_selection_reason}"
|
|
2073
|
+
export ACP_PROVIDER_POOL_SELECTION_REASON="${active_provider_selection_reason}"
|
|
2074
|
+
export F_LOSNING_PROVIDER_POOL_NEXT_ATTEMPT_EPOCH="${active_provider_next_attempt_epoch}"
|
|
2075
|
+
export ACP_PROVIDER_POOL_NEXT_ATTEMPT_EPOCH="${active_provider_next_attempt_epoch}"
|
|
2076
|
+
export F_LOSNING_PROVIDER_POOL_NEXT_ATTEMPT_AT="${active_provider_next_attempt_at}"
|
|
2077
|
+
export ACP_PROVIDER_POOL_NEXT_ATTEMPT_AT="${active_provider_next_attempt_at}"
|
|
2078
|
+
export F_LOSNING_PROVIDER_POOL_LAST_REASON="${active_provider_last_reason}"
|
|
2079
|
+
export ACP_PROVIDER_POOL_LAST_REASON="${active_provider_last_reason}"
|
|
2080
|
+
if [[ -n "${safe_profile}" ]]; then
|
|
2081
|
+
export F_LOSNING_CODEX_PROFILE_SAFE="${safe_profile}"
|
|
2082
|
+
export ACP_CODEX_PROFILE_SAFE="${safe_profile}"
|
|
2083
|
+
fi
|
|
2084
|
+
if [[ -n "${bypass_profile}" ]]; then
|
|
2085
|
+
export F_LOSNING_CODEX_PROFILE_BYPASS="${bypass_profile}"
|
|
2086
|
+
export ACP_CODEX_PROFILE_BYPASS="${bypass_profile}"
|
|
2087
|
+
fi
|
|
2088
|
+
if [[ -n "${claude_model}" ]]; then
|
|
2089
|
+
export F_LOSNING_CLAUDE_MODEL="${claude_model}"
|
|
2090
|
+
export ACP_CLAUDE_MODEL="${claude_model}"
|
|
2091
|
+
fi
|
|
2092
|
+
if [[ -n "${claude_permission_mode}" ]]; then
|
|
2093
|
+
export F_LOSNING_CLAUDE_PERMISSION_MODE="${claude_permission_mode}"
|
|
2094
|
+
export ACP_CLAUDE_PERMISSION_MODE="${claude_permission_mode}"
|
|
2095
|
+
fi
|
|
2096
|
+
if [[ -n "${claude_effort}" ]]; then
|
|
2097
|
+
export F_LOSNING_CLAUDE_EFFORT="${claude_effort}"
|
|
2098
|
+
export ACP_CLAUDE_EFFORT="${claude_effort}"
|
|
2099
|
+
fi
|
|
2100
|
+
if [[ -n "${claude_timeout}" ]]; then
|
|
2101
|
+
export F_LOSNING_CLAUDE_TIMEOUT_SECONDS="${claude_timeout}"
|
|
2102
|
+
export ACP_CLAUDE_TIMEOUT_SECONDS="${claude_timeout}"
|
|
2103
|
+
fi
|
|
2104
|
+
if [[ -n "${claude_max_attempts}" ]]; then
|
|
2105
|
+
export F_LOSNING_CLAUDE_MAX_ATTEMPTS="${claude_max_attempts}"
|
|
2106
|
+
export ACP_CLAUDE_MAX_ATTEMPTS="${claude_max_attempts}"
|
|
2107
|
+
fi
|
|
2108
|
+
if [[ -n "${claude_retry_backoff_seconds}" ]]; then
|
|
2109
|
+
export F_LOSNING_CLAUDE_RETRY_BACKOFF_SECONDS="${claude_retry_backoff_seconds}"
|
|
2110
|
+
export ACP_CLAUDE_RETRY_BACKOFF_SECONDS="${claude_retry_backoff_seconds}"
|
|
2111
|
+
fi
|
|
2112
|
+
if [[ -n "${openclaw_model}" ]]; then
|
|
2113
|
+
export F_LOSNING_OPENCLAW_MODEL="${openclaw_model}"
|
|
2114
|
+
export ACP_OPENCLAW_MODEL="${openclaw_model}"
|
|
2115
|
+
fi
|
|
2116
|
+
if [[ -n "${openclaw_thinking}" ]]; then
|
|
2117
|
+
export F_LOSNING_OPENCLAW_THINKING="${openclaw_thinking}"
|
|
2118
|
+
export ACP_OPENCLAW_THINKING="${openclaw_thinking}"
|
|
2119
|
+
fi
|
|
2120
|
+
if [[ -n "${openclaw_timeout}" ]]; then
|
|
2121
|
+
export F_LOSNING_OPENCLAW_TIMEOUT_SECONDS="${openclaw_timeout}"
|
|
2122
|
+
export ACP_OPENCLAW_TIMEOUT_SECONDS="${openclaw_timeout}"
|
|
2123
|
+
fi
|
|
2124
|
+
|
|
2125
|
+
flow_export_github_cli_auth_env "$(flow_resolve_repo_slug "${config_file}")"
|
|
2126
|
+
flow_export_project_env_aliases
|
|
2127
|
+
}
|