shipwright-cli 3.2.0 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (279) hide show
  1. package/.claude/agents/code-reviewer.md +2 -0
  2. package/.claude/agents/devops-engineer.md +2 -0
  3. package/.claude/agents/doc-fleet-agent.md +2 -0
  4. package/.claude/agents/pipeline-agent.md +2 -0
  5. package/.claude/agents/shell-script-specialist.md +2 -0
  6. package/.claude/agents/test-specialist.md +2 -0
  7. package/.claude/hooks/agent-crash-capture.sh +32 -0
  8. package/.claude/hooks/post-tool-use.sh +3 -2
  9. package/.claude/hooks/pre-tool-use.sh +35 -3
  10. package/README.md +4 -4
  11. package/claude-code/hooks/config-change.sh +18 -0
  12. package/claude-code/hooks/instructions-reloaded.sh +7 -0
  13. package/claude-code/hooks/worktree-create.sh +25 -0
  14. package/claude-code/hooks/worktree-remove.sh +20 -0
  15. package/config/code-constitution.json +130 -0
  16. package/dashboard/middleware/auth.ts +134 -0
  17. package/dashboard/middleware/constants.ts +21 -0
  18. package/dashboard/public/index.html +2 -6
  19. package/dashboard/public/styles.css +100 -97
  20. package/dashboard/routes/auth.ts +38 -0
  21. package/dashboard/server.ts +66 -25
  22. package/dashboard/services/config.ts +26 -0
  23. package/dashboard/services/db.ts +118 -0
  24. package/dashboard/src/canvas/pixel-agent.ts +298 -0
  25. package/dashboard/src/canvas/pixel-sprites.ts +440 -0
  26. package/dashboard/src/canvas/shipyard-effects.ts +367 -0
  27. package/dashboard/src/canvas/shipyard-scene.ts +616 -0
  28. package/dashboard/src/canvas/submarine-layout.ts +267 -0
  29. package/dashboard/src/components/header.ts +8 -7
  30. package/dashboard/src/core/router.ts +1 -0
  31. package/dashboard/src/design/submarine-theme.ts +253 -0
  32. package/dashboard/src/main.ts +2 -0
  33. package/dashboard/src/types/api.ts +2 -1
  34. package/dashboard/src/views/activity.ts +2 -1
  35. package/dashboard/src/views/shipyard.ts +39 -0
  36. package/dashboard/types/index.ts +166 -0
  37. package/docs/plans/2026-02-28-compound-audit-and-shipyard-design.md +186 -0
  38. package/docs/plans/2026-02-28-skipper-shipwright-implementation-plan.md +1182 -0
  39. package/docs/plans/2026-02-28-skipper-shipwright-integration-design.md +531 -0
  40. package/docs/plans/2026-03-01-ai-powered-skill-injection-design.md +298 -0
  41. package/docs/plans/2026-03-01-ai-powered-skill-injection-plan.md +1109 -0
  42. package/docs/plans/2026-03-01-capabilities-cleanup-plan.md +658 -0
  43. package/docs/plans/2026-03-01-clean-architecture-plan.md +924 -0
  44. package/docs/plans/2026-03-01-compound-audit-cascade-design.md +191 -0
  45. package/docs/plans/2026-03-01-compound-audit-cascade-plan.md +921 -0
  46. package/docs/plans/2026-03-01-deep-integration-plan.md +851 -0
  47. package/docs/plans/2026-03-01-pipeline-audit-trail-design.md +145 -0
  48. package/docs/plans/2026-03-01-pipeline-audit-trail-plan.md +770 -0
  49. package/docs/plans/2026-03-01-refined-depths-brand-design.md +382 -0
  50. package/docs/plans/2026-03-01-refined-depths-implementation.md +599 -0
  51. package/docs/plans/2026-03-01-skipper-kernel-integration-design.md +203 -0
  52. package/docs/plans/2026-03-01-unified-platform-design.md +272 -0
  53. package/docs/plans/2026-03-07-claude-code-feature-integration-design.md +189 -0
  54. package/docs/plans/2026-03-07-claude-code-feature-integration-plan.md +1165 -0
  55. package/docs/research/BACKLOG_QUICK_REFERENCE.md +352 -0
  56. package/docs/research/CUTTING_EDGE_RESEARCH_2026.md +546 -0
  57. package/docs/research/RESEARCH_INDEX.md +439 -0
  58. package/docs/research/RESEARCH_SOURCES.md +440 -0
  59. package/docs/research/RESEARCH_SUMMARY.txt +275 -0
  60. package/docs/superpowers/specs/2026-03-10-pipeline-quality-revolution-design.md +341 -0
  61. package/package.json +2 -2
  62. package/scripts/lib/adaptive-model.sh +427 -0
  63. package/scripts/lib/adaptive-timeout.sh +316 -0
  64. package/scripts/lib/audit-trail.sh +309 -0
  65. package/scripts/lib/auto-recovery.sh +471 -0
  66. package/scripts/lib/bandit-selector.sh +431 -0
  67. package/scripts/lib/bootstrap.sh +104 -2
  68. package/scripts/lib/causal-graph.sh +455 -0
  69. package/scripts/lib/compat.sh +126 -0
  70. package/scripts/lib/compound-audit.sh +337 -0
  71. package/scripts/lib/constitutional.sh +454 -0
  72. package/scripts/lib/context-budget.sh +359 -0
  73. package/scripts/lib/convergence.sh +594 -0
  74. package/scripts/lib/cost-optimizer.sh +634 -0
  75. package/scripts/lib/daemon-adaptive.sh +10 -0
  76. package/scripts/lib/daemon-dispatch.sh +106 -17
  77. package/scripts/lib/daemon-failure.sh +34 -4
  78. package/scripts/lib/daemon-patrol.sh +23 -2
  79. package/scripts/lib/daemon-poll-github.sh +361 -0
  80. package/scripts/lib/daemon-poll-health.sh +299 -0
  81. package/scripts/lib/daemon-poll.sh +27 -611
  82. package/scripts/lib/daemon-state.sh +112 -66
  83. package/scripts/lib/daemon-triage.sh +10 -0
  84. package/scripts/lib/dod-scorecard.sh +442 -0
  85. package/scripts/lib/error-actionability.sh +300 -0
  86. package/scripts/lib/formal-spec.sh +461 -0
  87. package/scripts/lib/helpers.sh +177 -4
  88. package/scripts/lib/intent-analysis.sh +409 -0
  89. package/scripts/lib/loop-convergence.sh +350 -0
  90. package/scripts/lib/loop-iteration.sh +682 -0
  91. package/scripts/lib/loop-progress.sh +48 -0
  92. package/scripts/lib/loop-restart.sh +185 -0
  93. package/scripts/lib/memory-effectiveness.sh +506 -0
  94. package/scripts/lib/mutation-executor.sh +352 -0
  95. package/scripts/lib/outcome-feedback.sh +521 -0
  96. package/scripts/lib/pipeline-cli.sh +336 -0
  97. package/scripts/lib/pipeline-commands.sh +1216 -0
  98. package/scripts/lib/pipeline-detection.sh +100 -2
  99. package/scripts/lib/pipeline-execution.sh +897 -0
  100. package/scripts/lib/pipeline-github.sh +28 -3
  101. package/scripts/lib/pipeline-intelligence-compound.sh +431 -0
  102. package/scripts/lib/pipeline-intelligence-scoring.sh +407 -0
  103. package/scripts/lib/pipeline-intelligence-skip.sh +181 -0
  104. package/scripts/lib/pipeline-intelligence.sh +100 -1136
  105. package/scripts/lib/pipeline-quality-bash-compat.sh +182 -0
  106. package/scripts/lib/pipeline-quality-checks.sh +17 -715
  107. package/scripts/lib/pipeline-quality-gates.sh +563 -0
  108. package/scripts/lib/pipeline-stages-build.sh +730 -0
  109. package/scripts/lib/pipeline-stages-delivery.sh +965 -0
  110. package/scripts/lib/pipeline-stages-intake.sh +1133 -0
  111. package/scripts/lib/pipeline-stages-monitor.sh +407 -0
  112. package/scripts/lib/pipeline-stages-review.sh +1022 -0
  113. package/scripts/lib/pipeline-stages.sh +59 -2929
  114. package/scripts/lib/pipeline-state.sh +36 -5
  115. package/scripts/lib/pipeline-util.sh +487 -0
  116. package/scripts/lib/policy-learner.sh +438 -0
  117. package/scripts/lib/process-reward.sh +493 -0
  118. package/scripts/lib/project-detect.sh +649 -0
  119. package/scripts/lib/quality-profile.sh +334 -0
  120. package/scripts/lib/recruit-commands.sh +885 -0
  121. package/scripts/lib/recruit-learning.sh +739 -0
  122. package/scripts/lib/recruit-roles.sh +648 -0
  123. package/scripts/lib/reward-aggregator.sh +458 -0
  124. package/scripts/lib/rl-optimizer.sh +362 -0
  125. package/scripts/lib/root-cause.sh +427 -0
  126. package/scripts/lib/scope-enforcement.sh +445 -0
  127. package/scripts/lib/session-restart.sh +493 -0
  128. package/scripts/lib/skill-memory.sh +300 -0
  129. package/scripts/lib/skill-registry.sh +775 -0
  130. package/scripts/lib/spec-driven.sh +476 -0
  131. package/scripts/lib/test-helpers.sh +18 -7
  132. package/scripts/lib/test-holdout.sh +429 -0
  133. package/scripts/lib/test-optimizer.sh +511 -0
  134. package/scripts/shipwright-file-suggest.sh +45 -0
  135. package/scripts/skills/adversarial-quality.md +61 -0
  136. package/scripts/skills/api-design.md +44 -0
  137. package/scripts/skills/architecture-design.md +50 -0
  138. package/scripts/skills/brainstorming.md +43 -0
  139. package/scripts/skills/data-pipeline.md +44 -0
  140. package/scripts/skills/deploy-safety.md +64 -0
  141. package/scripts/skills/documentation.md +38 -0
  142. package/scripts/skills/frontend-design.md +45 -0
  143. package/scripts/skills/generated/.gitkeep +0 -0
  144. package/scripts/skills/generated/_refinements/.gitkeep +0 -0
  145. package/scripts/skills/generated/_refinements/adversarial-quality.patch.md +3 -0
  146. package/scripts/skills/generated/_refinements/architecture-design.patch.md +3 -0
  147. package/scripts/skills/generated/_refinements/brainstorming.patch.md +3 -0
  148. package/scripts/skills/generated/cli-version-management.md +29 -0
  149. package/scripts/skills/generated/collection-system-validation.md +99 -0
  150. package/scripts/skills/generated/large-scale-c-refactoring-coordination.md +97 -0
  151. package/scripts/skills/generated/pattern-matching-similarity-scoring.md +195 -0
  152. package/scripts/skills/generated/test-parallelization-detection.md +65 -0
  153. package/scripts/skills/observability.md +79 -0
  154. package/scripts/skills/performance.md +48 -0
  155. package/scripts/skills/pr-quality.md +49 -0
  156. package/scripts/skills/product-thinking.md +43 -0
  157. package/scripts/skills/security-audit.md +49 -0
  158. package/scripts/skills/systematic-debugging.md +40 -0
  159. package/scripts/skills/testing-strategy.md +47 -0
  160. package/scripts/skills/two-stage-review.md +52 -0
  161. package/scripts/skills/validation-thoroughness.md +55 -0
  162. package/scripts/sw +9 -3
  163. package/scripts/sw-activity.sh +9 -2
  164. package/scripts/sw-adaptive.sh +2 -1
  165. package/scripts/sw-adversarial.sh +2 -1
  166. package/scripts/sw-architecture-enforcer.sh +3 -1
  167. package/scripts/sw-auth.sh +12 -2
  168. package/scripts/sw-autonomous.sh +5 -1
  169. package/scripts/sw-changelog.sh +4 -1
  170. package/scripts/sw-checkpoint.sh +2 -1
  171. package/scripts/sw-ci.sh +5 -1
  172. package/scripts/sw-cleanup.sh +4 -26
  173. package/scripts/sw-code-review.sh +10 -4
  174. package/scripts/sw-connect.sh +2 -1
  175. package/scripts/sw-context.sh +2 -1
  176. package/scripts/sw-cost.sh +48 -3
  177. package/scripts/sw-daemon.sh +66 -9
  178. package/scripts/sw-dashboard.sh +3 -1
  179. package/scripts/sw-db.sh +59 -16
  180. package/scripts/sw-decide.sh +8 -2
  181. package/scripts/sw-decompose.sh +360 -17
  182. package/scripts/sw-deps.sh +4 -1
  183. package/scripts/sw-developer-simulation.sh +4 -1
  184. package/scripts/sw-discovery.sh +325 -2
  185. package/scripts/sw-doc-fleet.sh +4 -1
  186. package/scripts/sw-docs-agent.sh +3 -1
  187. package/scripts/sw-docs.sh +2 -1
  188. package/scripts/sw-doctor.sh +453 -2
  189. package/scripts/sw-dora.sh +4 -1
  190. package/scripts/sw-durable.sh +4 -3
  191. package/scripts/sw-e2e-orchestrator.sh +17 -16
  192. package/scripts/sw-eventbus.sh +7 -1
  193. package/scripts/sw-evidence.sh +364 -12
  194. package/scripts/sw-feedback.sh +550 -9
  195. package/scripts/sw-fix.sh +20 -1
  196. package/scripts/sw-fleet-discover.sh +6 -2
  197. package/scripts/sw-fleet-viz.sh +4 -1
  198. package/scripts/sw-fleet.sh +5 -1
  199. package/scripts/sw-github-app.sh +16 -3
  200. package/scripts/sw-github-checks.sh +3 -2
  201. package/scripts/sw-github-deploy.sh +3 -2
  202. package/scripts/sw-github-graphql.sh +18 -7
  203. package/scripts/sw-guild.sh +5 -1
  204. package/scripts/sw-heartbeat.sh +5 -30
  205. package/scripts/sw-hello.sh +67 -0
  206. package/scripts/sw-hygiene.sh +6 -1
  207. package/scripts/sw-incident.sh +265 -1
  208. package/scripts/sw-init.sh +18 -2
  209. package/scripts/sw-instrument.sh +10 -2
  210. package/scripts/sw-intelligence.sh +42 -6
  211. package/scripts/sw-jira.sh +5 -1
  212. package/scripts/sw-launchd.sh +2 -1
  213. package/scripts/sw-linear.sh +4 -1
  214. package/scripts/sw-logs.sh +4 -1
  215. package/scripts/sw-loop.sh +432 -1128
  216. package/scripts/sw-memory.sh +356 -2
  217. package/scripts/sw-mission-control.sh +6 -1
  218. package/scripts/sw-model-router.sh +481 -26
  219. package/scripts/sw-otel.sh +13 -4
  220. package/scripts/sw-oversight.sh +14 -5
  221. package/scripts/sw-patrol-meta.sh +334 -0
  222. package/scripts/sw-pipeline-composer.sh +5 -1
  223. package/scripts/sw-pipeline-vitals.sh +2 -1
  224. package/scripts/sw-pipeline.sh +53 -2664
  225. package/scripts/sw-pm.sh +12 -5
  226. package/scripts/sw-pr-lifecycle.sh +2 -1
  227. package/scripts/sw-predictive.sh +7 -1
  228. package/scripts/sw-prep.sh +185 -2
  229. package/scripts/sw-ps.sh +5 -25
  230. package/scripts/sw-public-dashboard.sh +15 -3
  231. package/scripts/sw-quality.sh +2 -1
  232. package/scripts/sw-reaper.sh +8 -25
  233. package/scripts/sw-recruit.sh +156 -2303
  234. package/scripts/sw-regression.sh +19 -12
  235. package/scripts/sw-release-manager.sh +3 -1
  236. package/scripts/sw-release.sh +4 -1
  237. package/scripts/sw-remote.sh +3 -1
  238. package/scripts/sw-replay.sh +7 -1
  239. package/scripts/sw-retro.sh +158 -1
  240. package/scripts/sw-review-rerun.sh +3 -1
  241. package/scripts/sw-scale.sh +10 -3
  242. package/scripts/sw-security-audit.sh +6 -1
  243. package/scripts/sw-self-optimize.sh +6 -3
  244. package/scripts/sw-session.sh +9 -3
  245. package/scripts/sw-setup.sh +3 -1
  246. package/scripts/sw-stall-detector.sh +406 -0
  247. package/scripts/sw-standup.sh +15 -7
  248. package/scripts/sw-status.sh +3 -1
  249. package/scripts/sw-strategic.sh +4 -1
  250. package/scripts/sw-stream.sh +7 -1
  251. package/scripts/sw-swarm.sh +18 -6
  252. package/scripts/sw-team-stages.sh +13 -6
  253. package/scripts/sw-templates.sh +5 -29
  254. package/scripts/sw-testgen.sh +7 -1
  255. package/scripts/sw-tmux-pipeline.sh +4 -1
  256. package/scripts/sw-tmux-role-color.sh +2 -0
  257. package/scripts/sw-tmux-status.sh +1 -1
  258. package/scripts/sw-tmux.sh +3 -1
  259. package/scripts/sw-trace.sh +3 -1
  260. package/scripts/sw-tracker-github.sh +3 -0
  261. package/scripts/sw-tracker-jira.sh +3 -0
  262. package/scripts/sw-tracker-linear.sh +3 -0
  263. package/scripts/sw-tracker.sh +3 -1
  264. package/scripts/sw-triage.sh +2 -1
  265. package/scripts/sw-upgrade.sh +3 -1
  266. package/scripts/sw-ux.sh +5 -2
  267. package/scripts/sw-webhook.sh +3 -1
  268. package/scripts/sw-widgets.sh +3 -1
  269. package/scripts/sw-worktree.sh +15 -3
  270. package/scripts/test-skill-injection.sh +1233 -0
  271. package/templates/pipelines/autonomous.json +27 -3
  272. package/templates/pipelines/cost-aware.json +34 -8
  273. package/templates/pipelines/deployed.json +12 -0
  274. package/templates/pipelines/enterprise.json +12 -0
  275. package/templates/pipelines/fast.json +6 -0
  276. package/templates/pipelines/full.json +27 -3
  277. package/templates/pipelines/hotfix.json +6 -0
  278. package/templates/pipelines/standard.json +12 -0
  279. package/templates/pipelines/tdd.json +12 -0
@@ -0,0 +1,965 @@
1
+ # pipeline-stages-delivery.sh — pr, merge, deploy stages
2
+ # Source from pipeline-stages.sh. Requires all pipeline globals and dependencies.
3
+ [[ -n "${_PIPELINE_STAGES_DELIVERY_LOADED:-}" ]] && return 0
4
+ _PIPELINE_STAGES_DELIVERY_LOADED=1
5
+
6
+ stage_pr() {
7
+ CURRENT_STAGE_ID="pr"
8
+ # Consume retry context if this is a retry attempt
9
+ local _retry_ctx="${ARTIFACTS_DIR}/.retry-context-pr.md"
10
+ if [[ -s "$_retry_ctx" ]]; then
11
+ local _pr_retry_hints
12
+ _pr_retry_hints=$(cat "$_retry_ctx" 2>/dev/null || true)
13
+ rm -f "$_retry_ctx"
14
+ fi
15
+ # Load PR quality skills (used as guidance for hygiene checks)
16
+ local _pr_skills=""
17
+ if type skill_load_prompts >/dev/null 2>&1; then
18
+ _pr_skills=$(skill_load_prompts "${INTELLIGENCE_ISSUE_TYPE:-backend}" "pr" 2>/dev/null || true)
19
+ if [[ -n "$_pr_skills" ]]; then
20
+ echo "$_pr_skills" > "${ARTIFACTS_DIR}/.pr-quality-skills.md" 2>/dev/null || true
21
+ fi
22
+ fi
23
+ local plan_file="$ARTIFACTS_DIR/plan.md"
24
+ local test_log="$ARTIFACTS_DIR/test-results.log"
25
+ local review_file="$ARTIFACTS_DIR/review.md"
26
+
27
+ # ── Skip PR in local/no-github mode ──
28
+ if [[ "${NO_GITHUB:-false}" == "true" || "${SHIPWRIGHT_LOCAL:-}" == "1" || "${LOCAL_MODE:-false}" == "true" ]]; then
29
+ info "Skipping PR stage — running in local/no-github mode"
30
+ # Save a PR draft locally for reference
31
+ local branch_name
32
+ branch_name=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown")
33
+ local commit_count
34
+ commit_count=$(_safe_base_log --oneline | wc -l | xargs)
35
+ {
36
+ echo "# PR Draft (local mode)"
37
+ echo ""
38
+ echo "**Branch:** ${branch_name}"
39
+ echo "**Commits:** ${commit_count:-0}"
40
+ echo "**Goal:** ${GOAL:-N/A}"
41
+ echo ""
42
+ echo "## Changes"
43
+ _safe_base_diff --stat || true
44
+ } > ".claude/pr-draft.md" 2>/dev/null || true
45
+ emit_event "pr.skipped" "issue=${ISSUE_NUMBER:-0}" "reason=local_mode"
46
+ return 0
47
+ fi
48
+
49
+ # ── PR Hygiene Checks (informational) ──
50
+ local hygiene_commit_count
51
+ hygiene_commit_count=$(_safe_base_log --oneline | wc -l | xargs)
52
+ hygiene_commit_count="${hygiene_commit_count:-0}"
53
+
54
+ if [[ "$hygiene_commit_count" -gt 20 ]]; then
55
+ warn "PR has ${hygiene_commit_count} commits — consider squashing before merge"
56
+ fi
57
+
58
+ # Check for WIP/fixup/squash commits (expanded patterns)
59
+ local wip_commits
60
+ wip_commits=$(_safe_base_log --oneline | grep -ciE '^[0-9a-f]+ (WIP|fixup!|squash!|TODO|HACK|TEMP|BROKEN|wip[:-]|temp[:-]|broken[:-]|do not merge)' || true)
61
+ wip_commits="${wip_commits:-0}"
62
+ if [[ "$wip_commits" -gt 0 ]]; then
63
+ warn "Branch has ${wip_commits} WIP/fixup/squash/temp commit(s) — consider cleaning up"
64
+ fi
65
+
66
+ # ── PR Quality Gate: reject PRs with no real code changes ──
67
+ local real_files
68
+ real_files=$(_safe_base_diff --name-only | grep -v '^\.claude/' | grep -v '^\.github/' || true)
69
+ if [[ -z "$real_files" ]]; then
70
+ error "No real code changes detected — only pipeline artifacts (.claude/ logs)."
71
+ error "The build agent did not produce meaningful changes. Skipping PR creation."
72
+ emit_event "pr.rejected" "issue=${ISSUE_NUMBER:-0}" "reason=no_real_changes"
73
+ # Mark issue so auto-retry knows not to retry empty builds
74
+ if [[ -n "${ISSUE_NUMBER:-}" && "${ISSUE_NUMBER:-0}" != "0" ]]; then
75
+ gh issue comment "$ISSUE_NUMBER" --body "<!-- SHIPWRIGHT-NO-CHANGES: true -->" 2>/dev/null || true
76
+ fi
77
+ return 1
78
+ fi
79
+ local real_file_count
80
+ real_file_count=$(echo "$real_files" | wc -l | xargs)
81
+ info "PR quality gate: ${real_file_count} real file(s) changed"
82
+
83
+ # Commit any uncommitted changes left by the build agent
84
+ if ! git diff --quiet 2>/dev/null || ! git diff --cached --quiet 2>/dev/null; then
85
+ info "Committing remaining uncommitted changes..."
86
+ git add -A 2>/dev/null || true
87
+ git commit -m "chore: pipeline cleanup — commit remaining build changes" --no-verify 2>/dev/null || true
88
+ fi
89
+
90
+ # Auto-rebase onto latest base branch before PR
91
+ auto_rebase || {
92
+ warn "Rebase/merge failed — pushing as-is"
93
+ }
94
+
95
+ # Push branch
96
+ info "Pushing branch: $GIT_BRANCH"
97
+ git push -u origin "$GIT_BRANCH" --force-with-lease 2>/dev/null || {
98
+ # Retry with regular push if force-with-lease fails (first push)
99
+ git push -u origin "$GIT_BRANCH" 2>/dev/null || {
100
+ error "Failed to push branch"
101
+ return 1
102
+ }
103
+ }
104
+
105
+ # ── Developer Simulation (pre-PR review) ──
106
+ local simulation_summary=""
107
+ if type simulation_review >/dev/null 2>&1; then
108
+ local sim_enabled
109
+ sim_enabled=$(jq -r '.intelligence.simulation_enabled // false' "$PIPELINE_CONFIG" 2>/dev/null || echo "false")
110
+ # Also check daemon-config
111
+ local daemon_cfg=".claude/daemon-config.json"
112
+ if [[ "$sim_enabled" != "true" && -f "$daemon_cfg" ]]; then
113
+ sim_enabled=$(jq -r '.intelligence.simulation_enabled // false' "$daemon_cfg" 2>/dev/null || echo "false")
114
+ fi
115
+ if [[ "$sim_enabled" == "true" ]]; then
116
+ info "Running developer simulation review..."
117
+ local diff_for_sim
118
+ diff_for_sim=$(_safe_base_diff || true)
119
+ if [[ -n "$diff_for_sim" ]]; then
120
+ local sim_result
121
+ sim_result=$(simulation_review "$diff_for_sim" "${GOAL:-}" 2>/dev/null || echo "")
122
+ if [[ -n "$sim_result" && "$sim_result" != *'"error"'* ]]; then
123
+ echo "$sim_result" > "$ARTIFACTS_DIR/simulation-review.json"
124
+ local sim_count
125
+ sim_count=$(echo "$sim_result" | jq 'length' 2>/dev/null || echo "0")
126
+ simulation_summary="**Developer simulation:** ${sim_count} reviewer concerns pre-addressed"
127
+ success "Simulation complete: ${sim_count} concerns found and addressed"
128
+ emit_event "simulation.complete" "issue=${ISSUE_NUMBER:-0}" "concerns=${sim_count}"
129
+ else
130
+ info "Simulation returned no actionable concerns"
131
+ fi
132
+ fi
133
+ fi
134
+ fi
135
+
136
+ # ── Architecture Validation (pre-PR check) ──
137
+ local arch_summary=""
138
+ if type architecture_validate_changes >/dev/null 2>&1; then
139
+ local arch_enabled
140
+ arch_enabled=$(jq -r '.intelligence.architecture_enabled // false' "$PIPELINE_CONFIG" 2>/dev/null || echo "false")
141
+ local daemon_cfg=".claude/daemon-config.json"
142
+ if [[ "$arch_enabled" != "true" && -f "$daemon_cfg" ]]; then
143
+ arch_enabled=$(jq -r '.intelligence.architecture_enabled // false' "$daemon_cfg" 2>/dev/null || echo "false")
144
+ fi
145
+ if [[ "$arch_enabled" == "true" ]]; then
146
+ info "Validating architecture..."
147
+ local diff_for_arch
148
+ diff_for_arch=$(_safe_base_diff || true)
149
+ if [[ -n "$diff_for_arch" ]]; then
150
+ local arch_result
151
+ arch_result=$(architecture_validate_changes "$diff_for_arch" "" 2>/dev/null || echo "")
152
+ if [[ -n "$arch_result" && "$arch_result" != *'"error"'* ]]; then
153
+ echo "$arch_result" > "$ARTIFACTS_DIR/architecture-validation.json"
154
+ local violation_count
155
+ violation_count=$(echo "$arch_result" | jq '[.violations[]? | select(.severity == "critical" or .severity == "high")] | length' 2>/dev/null || echo "0")
156
+ arch_summary="**Architecture validation:** ${violation_count} violations"
157
+ if [[ "$violation_count" -gt 0 ]]; then
158
+ warn "Architecture: ${violation_count} high/critical violations found"
159
+ else
160
+ success "Architecture validation passed"
161
+ fi
162
+ emit_event "architecture.validated" "issue=${ISSUE_NUMBER:-0}" "violations=${violation_count}"
163
+ else
164
+ info "Architecture validation returned no results"
165
+ fi
166
+ fi
167
+ fi
168
+ fi
169
+
170
+ # Pre-PR diff gate — verify meaningful code changes exist (not just bookkeeping)
171
+ local real_changes
172
+ real_changes=$(_safe_base_diff --name-only \
173
+ -- . ':!.claude/loop-state.md' ':!.claude/pipeline-state.md' \
174
+ ':!.claude/pipeline-artifacts/*' ':!**/progress.md' \
175
+ ':!**/error-summary.json' | wc -l | xargs || true)
176
+ real_changes="${real_changes:-0}"
177
+ if [[ "${real_changes:-0}" -eq 0 ]]; then
178
+ error "No meaningful code changes detected — only bookkeeping files modified"
179
+ error "Refusing to create PR with zero real changes"
180
+ return 1
181
+ fi
182
+ info "Pre-PR diff check: ${real_changes} real files changed"
183
+
184
+ # Build PR title — prefer GOAL over plan file first line
185
+ # (plan file first line often contains Claude analysis text, not a clean title)
186
+ local pr_title=""
187
+ if [[ -n "${GOAL:-}" ]]; then
188
+ pr_title=$(echo "$GOAL" | cut -c1-70)
189
+ fi
190
+ if [[ -z "$pr_title" ]] && [[ -s "$plan_file" ]]; then
191
+ pr_title=$(head -1 "$plan_file" 2>/dev/null | sed 's/^#* *//' | cut -c1-70)
192
+ fi
193
+ [[ -z "$pr_title" ]] && pr_title="Pipeline changes for issue ${ISSUE_NUMBER:-unknown}"
194
+
195
+ # Sanitize: reject PR titles that look like error messages
196
+ if echo "$pr_title" | grep -qiE 'Invalid API|API key|authentication_error|rate_limit|CLI error|no useful output'; then
197
+ warn "PR title looks like an error message: $pr_title"
198
+ pr_title="Pipeline changes for issue ${ISSUE_NUMBER:-unknown}"
199
+ fi
200
+
201
+ # Build comprehensive PR body
202
+ local plan_summary=""
203
+ if [[ -s "$plan_file" ]]; then
204
+ plan_summary=$(head -20 "$plan_file" 2>/dev/null | tail -15)
205
+ fi
206
+
207
+ local test_summary=""
208
+ if [[ -s "$test_log" ]]; then
209
+ test_summary=$(tail -10 "$test_log" | sed 's/\x1b\[[0-9;]*m//g')
210
+ fi
211
+
212
+ local review_summary=""
213
+ if [[ -s "$review_file" ]]; then
214
+ local total_issues=0
215
+ # Try JSON structured output first
216
+ if head -1 "$review_file" 2>/dev/null | grep -q '^{' 2>/dev/null; then
217
+ total_issues=$(jq -r '.issues | length' "$review_file" 2>/dev/null || echo "0")
218
+ fi
219
+ # Grep fallback for markdown
220
+ if [[ "${total_issues:-0}" -eq 0 ]]; then
221
+ total_issues=$(grep -ciE '\*\*\[?(Critical|Bug|Security|Warning|Suggestion)\]?\*\*' "$review_file" 2>/dev/null || true)
222
+ total_issues="${total_issues:-0}"
223
+ fi
224
+ review_summary="**Code review:** $total_issues issues found"
225
+ fi
226
+
227
+ local closes_line=""
228
+ [[ -n "${GITHUB_ISSUE:-}" ]] && closes_line="Closes ${GITHUB_ISSUE}"
229
+
230
+ local diff_stats
231
+ diff_stats=$(_safe_base_diff --stat | tail -1 || echo "")
232
+
233
+ local commit_count
234
+ commit_count=$(_safe_base_log --oneline | wc -l | xargs)
235
+
236
+ local total_dur=""
237
+ if [[ -n "$PIPELINE_START_EPOCH" ]]; then
238
+ total_dur=$(format_duration $(( $(now_epoch) - PIPELINE_START_EPOCH )))
239
+ fi
240
+
241
+ local pr_body
242
+ pr_body="$(cat <<EOF
243
+ ## Summary
244
+ ${plan_summary:-$GOAL}
245
+
246
+ ## Changes
247
+ ${diff_stats}
248
+ ${commit_count} commit(s) via \`shipwright pipeline\` (${PIPELINE_NAME})
249
+
250
+ ## Test Results
251
+ \`\`\`
252
+ ${test_summary:-No test output}
253
+ \`\`\`
254
+
255
+ ${review_summary}
256
+ ${simulation_summary}
257
+ ${arch_summary}
258
+
259
+ ${closes_line}
260
+
261
+ ---
262
+
263
+ | Metric | Value |
264
+ |--------|-------|
265
+ | Pipeline | \`${PIPELINE_NAME}\` |
266
+ | Duration | ${total_dur:-—} |
267
+ | Model | ${MODEL:-opus} |
268
+ | Agents | ${AGENTS:-1} |
269
+
270
+ Generated by \`shipwright pipeline\`
271
+ EOF
272
+ )"
273
+
274
+ # Verify required evidence before PR (merge policy enforcement)
275
+ local risk_tier
276
+ risk_tier="low"
277
+ if [[ -f "$REPO_DIR/config/policy.json" ]]; then
278
+ local changed_files
279
+ changed_files=$(_safe_base_diff --name-only || true)
280
+ if [[ -n "$changed_files" ]]; then
281
+ local policy_file="$REPO_DIR/config/policy.json"
282
+ check_tier_match() {
283
+ local tier="$1"
284
+ local patterns
285
+ patterns=$(jq -r ".riskTierRules.${tier}[]? // empty" "$policy_file" 2>/dev/null)
286
+ [[ -z "$patterns" ]] && return 1
287
+ while IFS= read -r pattern; do
288
+ [[ -z "$pattern" ]] && continue
289
+ local regex
290
+ regex=$(echo "$pattern" | sed 's/\./\\./g; s/\*\*/DOUBLESTAR/g; s/\*/[^\/]*/g; s/DOUBLESTAR/.*/g')
291
+ while IFS= read -r file; do
292
+ [[ -z "$file" ]] && continue
293
+ if echo "$file" | grep -qE "^${regex}$"; then
294
+ return 0
295
+ fi
296
+ done <<< "$changed_files"
297
+ done <<< "$patterns"
298
+ return 1
299
+ }
300
+ check_tier_match "critical" && risk_tier="critical"
301
+ check_tier_match "high" && [[ "$risk_tier" != "critical" ]] && risk_tier="high"
302
+ check_tier_match "medium" && [[ "$risk_tier" != "critical" && "$risk_tier" != "high" ]] && risk_tier="medium"
303
+ fi
304
+ fi
305
+
306
+ local required_evidence
307
+ required_evidence=$(jq -r ".mergePolicy.\"$risk_tier\".requiredEvidence // [] | .[]" "$REPO_DIR/config/policy.json" 2>/dev/null)
308
+
309
+ if [[ -n "$required_evidence" ]]; then
310
+ local evidence_dir="$REPO_DIR/.claude/evidence"
311
+ local missing_evidence=()
312
+ while IFS= read -r etype; do
313
+ [[ -z "$etype" ]] && continue
314
+ local has_evidence=false
315
+ for f in "$evidence_dir"/*"$etype"*; do
316
+ [[ -f "$f" ]] && has_evidence=true && break
317
+ done
318
+ [[ "$has_evidence" != "true" ]] && missing_evidence+=("$etype")
319
+ done <<< "$required_evidence"
320
+
321
+ if [[ ${#missing_evidence[@]} -gt 0 ]]; then
322
+ warn "Missing required evidence for $risk_tier tier: ${missing_evidence[*]}"
323
+ emit_event "evidence.missing" "{\"tier\":\"$risk_tier\",\"missing\":\"${missing_evidence[*]}\"}"
324
+ # Collect missing evidence
325
+ if [[ -x "$SCRIPT_DIR/sw-evidence.sh" ]]; then
326
+ for etype in "${missing_evidence[@]}"; do
327
+ (cd "$REPO_DIR" && bash "$SCRIPT_DIR/sw-evidence.sh" capture "$etype" 2>/dev/null) || warn "Failed to collect $etype evidence"
328
+ done
329
+ fi
330
+ fi
331
+ fi
332
+
333
+ # ── PR Size Gate: Check against limits from quality profile ──
334
+ if type check_pr_size >/dev/null 2>&1; then
335
+ local quality_profile=".claude/quality-profile.json"
336
+ local max_pr_lines=500
337
+
338
+ if [[ -f "$quality_profile" ]]; then
339
+ max_pr_lines=$(jq -r '.quality.max_pr_lines // 500' "$quality_profile" 2>/dev/null || echo "500")
340
+ fi
341
+
342
+ if ! check_pr_size "origin/$BASE_BRANCH" "$max_pr_lines"; then
343
+ # Get actual PR stats for error message
344
+ local stats pr_stats insertions deletions files_changed total_lines
345
+ stats=$(get_pr_stats "origin/$BASE_BRANCH" 2>/dev/null || echo "{}")
346
+ insertions=$(echo "$stats" | jq '.insertions // 0' 2>/dev/null || echo "0")
347
+ deletions=$(echo "$stats" | jq '.deletions // 0' 2>/dev/null || echo "0")
348
+ files_changed=$(echo "$stats" | jq '.files_changed // 0' 2>/dev/null || echo "0")
349
+ total_lines=$((insertions + deletions))
350
+
351
+ error "PR size gate failed: ${total_lines} lines of change (max: ${max_pr_lines})"
352
+ error "Files changed: ${files_changed} | Insertions: +${insertions} | Deletions: -${deletions}"
353
+
354
+ # Check if scope enforcement is blocking
355
+ local scope_enforcement_blocks=false
356
+ if [[ -f "$quality_profile" ]]; then
357
+ scope_enforcement_blocks=$(jq -r '.scope.unplanned_files_block // false' "$quality_profile" 2>/dev/null)
358
+ fi
359
+
360
+ if [[ "$scope_enforcement_blocks" == "true" ]]; then
361
+ error "Scope enforcement is enabled — decompose into smaller PRs"
362
+ emit_event "pr.rejected" "issue=${ISSUE_NUMBER:-0}" "reason=oversized_pr" "lines=${total_lines}" "max=${max_pr_lines}"
363
+ return 1
364
+ else
365
+ warn "PR size exceeds limit (enforcing disabled) — consider splitting into smaller PRs"
366
+ emit_event "pr.warning" "issue=${ISSUE_NUMBER:-0}" "reason=oversized_pr" "lines=${total_lines}" "max=${max_pr_lines}"
367
+ fi
368
+ fi
369
+ fi
370
+
371
+ # Build gh pr create args
372
+ local pr_args=(--title "$pr_title" --body "$pr_body" --base "$BASE_BRANCH")
373
+
374
+ # Propagate labels from issue + CLI
375
+ local all_labels="${LABELS}"
376
+ if [[ -n "$ISSUE_LABELS" ]]; then
377
+ if [[ -n "$all_labels" ]]; then
378
+ all_labels="${all_labels},${ISSUE_LABELS}"
379
+ else
380
+ all_labels="$ISSUE_LABELS"
381
+ fi
382
+ fi
383
+ if [[ -n "$all_labels" ]]; then
384
+ pr_args+=(--label "$all_labels")
385
+ fi
386
+
387
+ # Auto-detect or use provided reviewers
388
+ local reviewers="${REVIEWERS}"
389
+ if [[ -z "$reviewers" ]]; then
390
+ reviewers=$(detect_reviewers)
391
+ fi
392
+ if [[ -n "$reviewers" ]]; then
393
+ pr_args+=(--reviewer "$reviewers")
394
+ info "Reviewers: ${DIM}$reviewers${RESET}"
395
+ fi
396
+
397
+ # Propagate milestone
398
+ if [[ -n "$ISSUE_MILESTONE" ]]; then
399
+ pr_args+=(--milestone "$ISSUE_MILESTONE")
400
+ info "Milestone: ${DIM}$ISSUE_MILESTONE${RESET}"
401
+ fi
402
+
403
+ # Check for existing open PR on this branch to avoid duplicates (issue #12)
404
+ local pr_url=""
405
+ local existing_pr
406
+ existing_pr=$(gh pr list --head "$GIT_BRANCH" --state open --json number,url --jq '.[0]' 2>/dev/null || echo "")
407
+ if [[ -n "$existing_pr" && "$existing_pr" != "null" ]]; then
408
+ local existing_pr_number existing_pr_url
409
+ existing_pr_number=$(echo "$existing_pr" | jq -r '.number' 2>/dev/null || echo "")
410
+ existing_pr_url=$(echo "$existing_pr" | jq -r '.url' 2>/dev/null || echo "")
411
+ info "Updating existing PR #$existing_pr_number instead of creating duplicate"
412
+ gh pr edit "$existing_pr_number" --title "$pr_title" --body "$pr_body" 2>/dev/null || true
413
+ pr_url="$existing_pr_url"
414
+ else
415
+ info "Creating PR..."
416
+ local pr_stderr pr_exit=0
417
+ pr_url=$(gh pr create "${pr_args[@]}" 2>/tmp/shipwright-pr-stderr.txt) || pr_exit=$?
418
+ pr_stderr=$(cat /tmp/shipwright-pr-stderr.txt 2>/dev/null || true)
419
+ rm -f /tmp/shipwright-pr-stderr.txt
420
+
421
+ # gh pr create may return non-zero for reviewer issues but still create the PR
422
+ if [[ "$pr_exit" -ne 0 ]]; then
423
+ if [[ "$pr_url" == *"github.com"* ]]; then
424
+ # PR was created but something non-fatal failed (e.g., reviewer not found)
425
+ warn "PR created with warnings: ${pr_stderr:-unknown}"
426
+ else
427
+ error "PR creation failed: ${pr_stderr:-$pr_url}"
428
+ return 1
429
+ fi
430
+ fi
431
+ fi
432
+
433
+ success "PR created: ${BOLD}$pr_url${RESET}"
434
+ echo "$pr_url" > "$ARTIFACTS_DIR/pr-url.txt"
435
+
436
+ # Extract PR number
437
+ PR_NUMBER=$(echo "$pr_url" | grep -oE '[0-9]+$' || true)
438
+
439
+ # ── Intelligent Reviewer Selection (GraphQL-enhanced) ──
440
+ if [[ "${NO_GITHUB:-false}" != "true" && -n "$PR_NUMBER" && -z "$reviewers" ]]; then
441
+ local reviewer_assigned=false
442
+
443
+ # Try CODEOWNERS-based routing via GraphQL API
444
+ if type gh_codeowners >/dev/null 2>&1 && [[ -n "$REPO_OWNER" && -n "$REPO_NAME" ]]; then
445
+ local codeowners_json
446
+ codeowners_json=$(gh_codeowners "$REPO_OWNER" "$REPO_NAME" 2>/dev/null || echo "[]")
447
+ if [[ "$codeowners_json" != "[]" && -n "$codeowners_json" ]]; then
448
+ local changed_files
449
+ changed_files=$(_safe_base_diff --name-only || true)
450
+ if [[ -n "$changed_files" ]]; then
451
+ local co_reviewers
452
+ co_reviewers=$(echo "$codeowners_json" | jq -r '.[].owners[]' 2>/dev/null | sort -u | head -3 || true)
453
+ if [[ -n "$co_reviewers" ]]; then
454
+ local rev
455
+ while IFS= read -r rev; do
456
+ rev="${rev#@}"
457
+ [[ -n "$rev" ]] && gh pr edit "$PR_NUMBER" --add-reviewer "$rev" 2>/dev/null || true
458
+ done <<< "$co_reviewers"
459
+ info "Requested review from CODEOWNERS: $(echo "$co_reviewers" | tr '\n' ',' | sed 's/,$//')"
460
+ reviewer_assigned=true
461
+ fi
462
+ fi
463
+ fi
464
+ fi
465
+
466
+ # Fallback: contributor-based routing via GraphQL API
467
+ if [[ "$reviewer_assigned" != "true" ]] && type gh_contributors >/dev/null 2>&1 && [[ -n "$REPO_OWNER" && -n "$REPO_NAME" ]]; then
468
+ local contributors_json
469
+ contributors_json=$(gh_contributors "$REPO_OWNER" "$REPO_NAME" 2>/dev/null || echo "[]")
470
+ local top_contributor
471
+ top_contributor=$(echo "$contributors_json" | jq -r '.[0].login // ""' 2>/dev/null || echo "")
472
+ local current_user
473
+ current_user=$(gh api user --jq '.login' 2>/dev/null || echo "")
474
+ if [[ -n "$top_contributor" && "$top_contributor" != "$current_user" ]]; then
475
+ gh pr edit "$PR_NUMBER" --add-reviewer "$top_contributor" 2>/dev/null || true
476
+ info "Requested review from top contributor: $top_contributor"
477
+ reviewer_assigned=true
478
+ fi
479
+ fi
480
+
481
+ # Final fallback: auto-approve if no reviewers assigned
482
+ if [[ "$reviewer_assigned" != "true" ]]; then
483
+ gh pr review "$PR_NUMBER" --approve 2>/dev/null || warn "Could not auto-approve PR"
484
+ fi
485
+ fi
486
+
487
+ # Update issue with PR link
488
+ if [[ -n "$ISSUE_NUMBER" ]]; then
489
+ gh_remove_label "$ISSUE_NUMBER" "pipeline/in-progress"
490
+ gh_add_labels "$ISSUE_NUMBER" "pipeline/pr-created"
491
+ gh_comment_issue "$ISSUE_NUMBER" "🎉 **PR created:** ${pr_url}
492
+
493
+ Pipeline duration so far: ${total_dur:-unknown}"
494
+
495
+ # Notify tracker of review/PR creation
496
+ "$SCRIPT_DIR/sw-tracker.sh" notify "review" "$ISSUE_NUMBER" "$pr_url" 2>/dev/null || true
497
+ fi
498
+
499
+ # Wait for CI if configured
500
+ local wait_ci
501
+ wait_ci=$(jq -r --arg id "pr" '(.stages[] | select(.id == $id) | .config.wait_ci) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
502
+ if [[ "$wait_ci" == "true" ]]; then
503
+ info "Waiting for CI checks..."
504
+ gh pr checks --watch 2>/dev/null || warn "CI checks did not all pass"
505
+ fi
506
+
507
+ log_stage "pr" "PR created: $pr_url (${reviewers:+reviewers: $reviewers})"
508
+ }
509
+
510
+ stage_merge() {
511
+ CURRENT_STAGE_ID="merge"
512
+
513
+ if [[ "$NO_GITHUB" == "true" ]]; then
514
+ info "Merge stage skipped (--no-github)"
515
+ return 0
516
+ fi
517
+
518
+ # ── Oversight gate: merge block on verdict (diff + review criticals + goal) ──
519
+ if [[ -x "$SCRIPT_DIR/sw-oversight.sh" ]] && [[ "${SKIP_GATES:-false}" != "true" ]]; then
520
+ local merge_diff_file="${ARTIFACTS_DIR}/review-diff.patch"
521
+ local merge_review_file="${ARTIFACTS_DIR}/review.md"
522
+ if [[ ! -s "$merge_diff_file" ]]; then
523
+ _safe_base_diff > "$merge_diff_file" 2>/dev/null || true
524
+ fi
525
+ if [[ -s "$merge_diff_file" ]]; then
526
+ local _merge_critical _merge_sec _merge_blocking _merge_reject
527
+ _merge_critical=$(grep -ciE '\*\*\[?Critical\]?\*\*' "$merge_review_file" 2>/dev/null || true)
528
+ _merge_critical="${_merge_critical:-0}"
529
+ _merge_sec=$(grep -ciE '\*\*\[?Security\]?\*\*' "$merge_review_file" 2>/dev/null || true)
530
+ _merge_sec="${_merge_sec:-0}"
531
+ _merge_blocking=$((${_merge_critical:-0} + ${_merge_sec:-0}))
532
+ [[ "$_merge_blocking" -gt 0 ]] && _merge_reject="Review found ${_merge_blocking} critical/security issue(s)"
533
+ if ! bash "$SCRIPT_DIR/sw-oversight.sh" gate --diff "$merge_diff_file" --description "${GOAL:-Pipeline merge}" --reject-if "${_merge_reject:-}" >/dev/null 2>&1; then
534
+ error "Oversight gate rejected — blocking merge"
535
+ emit_event "merge.oversight_blocked" "issue=${ISSUE_NUMBER:-0}"
536
+ log_stage "merge" "BLOCKED: oversight gate rejected"
537
+ return 1
538
+ fi
539
+ fi
540
+ fi
541
+
542
+ # ── Approval gates: block if merge requires approval and pending for this issue ──
543
+ local ag_file="${HOME}/.shipwright/approval-gates.json"
544
+ if [[ -f "$ag_file" ]] && [[ "${SKIP_GATES:-false}" != "true" ]]; then
545
+ local ag_enabled ag_stages ag_pending_merge ag_issue_num
546
+ ag_enabled=$(jq -r '.enabled // false' "$ag_file" 2>/dev/null || echo "false")
547
+ ag_stages=$(jq -r '.stages // [] | if type == "array" then .[] else empty end' "$ag_file" 2>/dev/null || true)
548
+ ag_issue_num=$(echo "${ISSUE_NUMBER:-0}" | awk '{print $1+0}')
549
+ if [[ "$ag_enabled" == "true" ]] && echo "$ag_stages" | grep -qx "merge" 2>/dev/null; then
550
+ local ha_file="${ARTIFACTS_DIR}/human-approval.txt"
551
+ local ha_approved="false"
552
+ if [[ -f "$ha_file" ]]; then
553
+ ha_approved=$(jq -r --arg stage "merge" 'select(.stage == $stage) | .approved // false' "$ha_file" 2>/dev/null || echo "false")
554
+ fi
555
+ if [[ "$ha_approved" != "true" ]]; then
556
+ ag_pending_merge=$(jq -r --argjson issue "$ag_issue_num" --arg stage "merge" \
557
+ '[.pending[]? | select(.issue == $issue and .stage == $stage)] | length' "$ag_file" 2>/dev/null || echo "0")
558
+ if [[ "${ag_pending_merge:-0}" -eq 0 ]]; then
559
+ local req_at tmp_ag
560
+ req_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || true)
561
+ tmp_ag=$(mktemp "${HOME}/.shipwright/approval-gates.json.XXXXXX" 2>/dev/null || mktemp)
562
+ jq --argjson issue "$ag_issue_num" --arg stage "merge" --arg requested "${req_at}" \
563
+ '.pending += [{"issue": $issue, "stage": $stage, "requested_at": $requested}]' "$ag_file" > "$tmp_ag" 2>/dev/null && mv "$tmp_ag" "$ag_file" || rm -f "$tmp_ag"
564
+ fi
565
+ info "Merge requires approval — awaiting human approval via dashboard"
566
+ emit_event "merge.approval_pending" "issue=${ISSUE_NUMBER:-0}"
567
+ log_stage "merge" "BLOCKED: approval gate pending"
568
+ return 1
569
+ fi
570
+ fi
571
+ fi
572
+
573
+ # ── Branch Protection Check ──
574
+ if type gh_branch_protection >/dev/null 2>&1 && [[ -n "$REPO_OWNER" && -n "$REPO_NAME" ]]; then
575
+ local protection_json
576
+ protection_json=$(gh_branch_protection "$REPO_OWNER" "$REPO_NAME" "${BASE_BRANCH:-main}" 2>/dev/null || echo '{"protected": false}')
577
+ local is_protected
578
+ is_protected=$(echo "$protection_json" | jq -r '.protected // false' 2>/dev/null || echo "false")
579
+ if [[ "$is_protected" == "true" ]]; then
580
+ local required_reviews
581
+ required_reviews=$(echo "$protection_json" | jq -r '.required_pull_request_reviews.required_approving_review_count // 0' 2>/dev/null || echo "0")
582
+ local required_checks
583
+ required_checks=$(echo "$protection_json" | jq -r '[.required_status_checks.contexts // [] | .[]] | length' 2>/dev/null || echo "0")
584
+
585
+ info "Branch protection: ${required_reviews} required review(s), ${required_checks} required check(s)"
586
+
587
+ if [[ "$required_reviews" -gt 0 ]]; then
588
+ # Check if PR has enough approvals
589
+ local prot_pr_number
590
+ prot_pr_number=$(gh pr list --head "$GIT_BRANCH" --json number --jq '.[0].number' 2>/dev/null || echo "")
591
+ if [[ -n "$prot_pr_number" ]]; then
592
+ local approvals
593
+ approvals=$(gh pr view "$prot_pr_number" --json reviews --jq '[.reviews[] | select(.state == "APPROVED")] | length' 2>/dev/null || echo "0")
594
+ if [[ "$approvals" -lt "$required_reviews" ]]; then
595
+ warn "PR has $approvals approval(s), needs $required_reviews — skipping auto-merge"
596
+ info "PR is ready for manual merge after required reviews"
597
+ emit_event "merge.blocked" "issue=${ISSUE_NUMBER:-0}" "reason=insufficient_reviews" "have=$approvals" "need=$required_reviews"
598
+ return 0
599
+ fi
600
+ fi
601
+ fi
602
+ fi
603
+ fi
604
+
605
+ local merge_method wait_ci_timeout auto_delete_branch auto_merge auto_approve merge_strategy
606
+ merge_method=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.merge_method) // "squash"' "$PIPELINE_CONFIG" 2>/dev/null) || true
607
+ [[ -z "$merge_method" || "$merge_method" == "null" ]] && merge_method="squash"
608
+ wait_ci_timeout=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.wait_ci_timeout_s) // 0' "$PIPELINE_CONFIG" 2>/dev/null) || true
609
+ [[ -z "$wait_ci_timeout" || "$wait_ci_timeout" == "null" ]] && wait_ci_timeout=0
610
+
611
+ # Adaptive CI timeout: 90th percentile of historical times × 1.5 safety margin
612
+ if [[ "$wait_ci_timeout" -eq 0 ]] 2>/dev/null; then
613
+ local repo_hash_ci
614
+ repo_hash_ci=$(echo -n "$PROJECT_ROOT" | shasum -a 256 2>/dev/null | cut -c1-12 || echo "unknown")
615
+ local ci_times_file="${HOME}/.shipwright/baselines/${repo_hash_ci}/ci-times.json"
616
+ if [[ -f "$ci_times_file" ]]; then
617
+ local p90_time
618
+ p90_time=$(jq '
619
+ .times | sort |
620
+ (length * 0.9 | floor) as $idx |
621
+ .[$idx] // 600
622
+ ' "$ci_times_file" 2>/dev/null || echo "0")
623
+ if [[ -n "$p90_time" ]] && awk -v t="$p90_time" 'BEGIN{exit !(t > 0)}' 2>/dev/null; then
624
+ # 1.5x safety margin, clamped to [120, 1800]
625
+ wait_ci_timeout=$(awk -v p90="$p90_time" 'BEGIN{
626
+ t = p90 * 1.5;
627
+ if (t < 120) t = 120;
628
+ if (t > 1800) t = 1800;
629
+ printf "%d", t
630
+ }')
631
+ fi
632
+ fi
633
+ # Default fallback if no history
634
+ [[ "$wait_ci_timeout" -eq 0 ]] && wait_ci_timeout=600
635
+ fi
636
+ auto_delete_branch=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.auto_delete_branch) // "true"' "$PIPELINE_CONFIG" 2>/dev/null) || true
637
+ [[ -z "$auto_delete_branch" || "$auto_delete_branch" == "null" ]] && auto_delete_branch="true"
638
+ auto_merge=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.auto_merge) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
639
+ [[ -z "$auto_merge" || "$auto_merge" == "null" ]] && auto_merge="false"
640
+ auto_approve=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.auto_approve) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
641
+ [[ -z "$auto_approve" || "$auto_approve" == "null" ]] && auto_approve="false"
642
+ merge_strategy=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.merge_strategy) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
643
+ [[ -z "$merge_strategy" || "$merge_strategy" == "null" ]] && merge_strategy=""
644
+ # merge_strategy overrides merge_method if set (squash/merge/rebase)
645
+ if [[ -n "$merge_strategy" ]]; then
646
+ merge_method="$merge_strategy"
647
+ fi
648
+
649
+ # Find PR for current branch
650
+ local pr_number
651
+ pr_number=$(gh pr list --head "$GIT_BRANCH" --json number --jq '.[0].number' 2>/dev/null || echo "")
652
+
653
+ if [[ -z "$pr_number" ]]; then
654
+ warn "No PR found for branch $GIT_BRANCH — skipping merge"
655
+ return 0
656
+ fi
657
+
658
+ info "Found PR #${pr_number} for branch ${GIT_BRANCH}"
659
+
660
+ # Wait for CI checks to pass
661
+ info "Waiting for CI checks (timeout: ${wait_ci_timeout}s)..."
662
+ local elapsed=0
663
+ local check_interval=15
664
+
665
+ while [[ "$elapsed" -lt "$wait_ci_timeout" ]]; do
666
+ local check_status
667
+ check_status=$(gh pr checks "$pr_number" --json 'bucket,name' --jq '[.[] | .bucket] | unique | sort' 2>/dev/null || echo '["pending"]')
668
+
669
+ # If all checks passed (only "pass" in buckets)
670
+ if echo "$check_status" | jq -e '. == ["pass"]' >/dev/null 2>&1; then
671
+ success "All CI checks passed"
672
+ break
673
+ fi
674
+
675
+ # If any check failed
676
+ if echo "$check_status" | jq -e 'any(. == "fail")' >/dev/null 2>&1; then
677
+ error "CI checks failed — aborting merge"
678
+ return 1
679
+ fi
680
+
681
+ sleep "$check_interval"
682
+ elapsed=$((elapsed + check_interval))
683
+ done
684
+
685
+ # Record CI wait time for adaptive timeout calculation
686
+ if [[ "$elapsed" -gt 0 ]]; then
687
+ local repo_hash_ci_rec
688
+ repo_hash_ci_rec=$(echo -n "$PROJECT_ROOT" | shasum -a 256 2>/dev/null | cut -c1-12 || echo "unknown")
689
+ local ci_times_dir="${HOME}/.shipwright/baselines/${repo_hash_ci_rec}"
690
+ local ci_times_rec_file="${ci_times_dir}/ci-times.json"
691
+ mkdir -p "$ci_times_dir"
692
+ local ci_history="[]"
693
+ if [[ -f "$ci_times_rec_file" ]]; then
694
+ ci_history=$(jq '.times // []' "$ci_times_rec_file" 2>/dev/null || echo "[]")
695
+ fi
696
+ local updated_ci
697
+ updated_ci=$(echo "$ci_history" | jq --arg t "$elapsed" '. + [($t | tonumber)] | .[-20:]' 2>/dev/null || echo "[$elapsed]")
698
+ local tmp_ci
699
+ tmp_ci=$(mktemp "${ci_times_dir}/ci-times.json.XXXXXX")
700
+ jq -n --argjson times "$updated_ci" --arg updated "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
701
+ '{times: $times, updated: $updated}' > "$tmp_ci" 2>/dev/null
702
+ mv "$tmp_ci" "$ci_times_rec_file" 2>/dev/null || true
703
+ fi
704
+
705
+ if [[ "$elapsed" -ge "$wait_ci_timeout" ]]; then
706
+ warn "CI check timeout (${wait_ci_timeout}s) — proceeding with merge anyway"
707
+ fi
708
+
709
+ # Auto-approve if configured (for branch protection requiring reviews)
710
+ if [[ "$auto_approve" == "true" ]]; then
711
+ info "Auto-approving PR #${pr_number}..."
712
+ gh pr review "$pr_number" --approve 2>/dev/null || warn "Auto-approve failed (may need different permissions)"
713
+ fi
714
+
715
+ # Merge the PR
716
+ if [[ "$auto_merge" == "true" ]]; then
717
+ info "Enabling auto-merge for PR #${pr_number} (strategy: ${merge_method})..."
718
+ local auto_merge_args=("pr" "merge" "$pr_number" "--auto" "--${merge_method}")
719
+ if [[ "$auto_delete_branch" == "true" ]]; then
720
+ auto_merge_args+=("--delete-branch")
721
+ fi
722
+
723
+ if gh "${auto_merge_args[@]}" 2>/dev/null; then
724
+ success "Auto-merge enabled for PR #${pr_number} (strategy: ${merge_method})"
725
+ emit_event "merge.auto_enabled" \
726
+ "issue=${ISSUE_NUMBER:-0}" \
727
+ "pr=$pr_number" \
728
+ "strategy=$merge_method"
729
+ else
730
+ warn "Auto-merge not available — falling back to direct merge"
731
+ # Fall through to direct merge below
732
+ auto_merge="false"
733
+ fi
734
+ fi
735
+
736
+ if [[ "$auto_merge" != "true" ]]; then
737
+ info "Merging PR #${pr_number} (method: ${merge_method})..."
738
+ local merge_args=("pr" "merge" "$pr_number" "--${merge_method}")
739
+ if [[ "$auto_delete_branch" == "true" ]]; then
740
+ merge_args+=("--delete-branch")
741
+ fi
742
+
743
+ if gh "${merge_args[@]}" 2>/dev/null; then
744
+ success "PR #${pr_number} merged successfully"
745
+ else
746
+ error "Failed to merge PR #${pr_number}"
747
+ return 1
748
+ fi
749
+ fi
750
+
751
+ log_stage "merge" "PR #${pr_number} merged (strategy: ${merge_method}, auto_merge: ${auto_merge})"
752
+ }
753
+
754
+ stage_deploy() {
755
+ CURRENT_STAGE_ID="deploy"
756
+ # Consume retry context if this is a retry attempt
757
+ local _retry_ctx="${ARTIFACTS_DIR}/.retry-context-deploy.md"
758
+ if [[ -s "$_retry_ctx" ]]; then
759
+ local _deploy_retry_hints
760
+ _deploy_retry_hints=$(cat "$_retry_ctx" 2>/dev/null || true)
761
+ rm -f "$_retry_ctx"
762
+ fi
763
+ # Load deploy safety skills
764
+ if type skill_load_prompts >/dev/null 2>&1; then
765
+ local _deploy_skills
766
+ _deploy_skills=$(skill_load_prompts "${INTELLIGENCE_ISSUE_TYPE:-backend}" "deploy" 2>/dev/null || true)
767
+ if [[ -n "$_deploy_skills" ]]; then
768
+ echo "$_deploy_skills" > "${ARTIFACTS_DIR}/.deploy-safety-skills.md" 2>/dev/null || true
769
+ fi
770
+ fi
771
+ local staging_cmd
772
+ staging_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.staging_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
773
+ [[ "$staging_cmd" == "null" ]] && staging_cmd=""
774
+
775
+ local prod_cmd
776
+ prod_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.production_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
777
+ [[ "$prod_cmd" == "null" ]] && prod_cmd=""
778
+
779
+ local rollback_cmd
780
+ rollback_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.rollback_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
781
+ [[ "$rollback_cmd" == "null" ]] && rollback_cmd=""
782
+
783
+ if [[ -z "$staging_cmd" && -z "$prod_cmd" ]]; then
784
+ warn "No deploy commands configured — skipping"
785
+ return 0
786
+ fi
787
+
788
+ # Create GitHub deployment tracking
789
+ local gh_deploy_env="production"
790
+ [[ -n "$staging_cmd" && -z "$prod_cmd" ]] && gh_deploy_env="staging"
791
+ if [[ "${NO_GITHUB:-false}" != "true" ]] && type gh_deploy_pipeline_start >/dev/null 2>&1; then
792
+ if [[ -n "$REPO_OWNER" && -n "$REPO_NAME" ]]; then
793
+ gh_deploy_pipeline_start "$REPO_OWNER" "$REPO_NAME" "${GIT_BRANCH:-HEAD}" "$gh_deploy_env" 2>/dev/null || true
794
+ info "GitHub Deployment: tracking as $gh_deploy_env"
795
+ fi
796
+ fi
797
+
798
+ # ── Pre-deploy gates ──
799
+ local pre_deploy_ci
800
+ pre_deploy_ci=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.pre_deploy_ci_status) // "true"' "$PIPELINE_CONFIG" 2>/dev/null) || true
801
+
802
+ if [[ "${pre_deploy_ci:-true}" == "true" && "${NO_GITHUB:-false}" != "true" && -n "${REPO_OWNER:-}" && -n "${REPO_NAME:-}" ]]; then
803
+ info "Pre-deploy gate: checking CI status..."
804
+ local ci_failures
805
+ ci_failures=$(gh api "repos/${REPO_OWNER}/${REPO_NAME}/commits/${GIT_BRANCH:-HEAD}/check-runs" \
806
+ --jq '[.check_runs[] | select(.conclusion != null and .conclusion != "success" and .conclusion != "skipped")] | length' 2>/dev/null || echo "0")
807
+ if [[ "${ci_failures:-0}" -gt 0 ]]; then
808
+ error "Pre-deploy gate FAILED: ${ci_failures} CI check(s) not passing"
809
+ [[ -n "$ISSUE_NUMBER" ]] && gh_comment_issue "$ISSUE_NUMBER" "Pre-deploy gate: ${ci_failures} CI checks failing" 2>/dev/null || true
810
+ return 1
811
+ fi
812
+ success "Pre-deploy gate: all CI checks passing"
813
+ fi
814
+
815
+ local pre_deploy_min_cov
816
+ pre_deploy_min_cov=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.pre_deploy_min_coverage) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
817
+ if [[ -n "${pre_deploy_min_cov:-}" && "${pre_deploy_min_cov}" != "null" && -f "$ARTIFACTS_DIR/test-coverage.json" ]]; then
818
+ local actual_cov
819
+ actual_cov=$(jq -r '.coverage_pct // 0' "$ARTIFACTS_DIR/test-coverage.json" 2>/dev/null || echo "0")
820
+ if [[ "${actual_cov:-0}" -lt "$pre_deploy_min_cov" ]]; then
821
+ error "Pre-deploy gate FAILED: coverage ${actual_cov}% < required ${pre_deploy_min_cov}%"
822
+ [[ -n "$ISSUE_NUMBER" ]] && gh_comment_issue "$ISSUE_NUMBER" "Pre-deploy gate: coverage ${actual_cov}% below minimum ${pre_deploy_min_cov}%" 2>/dev/null || true
823
+ return 1
824
+ fi
825
+ success "Pre-deploy gate: coverage ${actual_cov}% >= ${pre_deploy_min_cov}%"
826
+ fi
827
+
828
+ # Post deploy start to GitHub
829
+ if [[ -n "$ISSUE_NUMBER" ]]; then
830
+ gh_comment_issue "$ISSUE_NUMBER" "Deploy started"
831
+ fi
832
+
833
+ # ── Deploy strategy ──
834
+ local deploy_strategy
835
+ deploy_strategy=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.deploy_strategy) // "direct"' "$PIPELINE_CONFIG" 2>/dev/null) || true
836
+ [[ "$deploy_strategy" == "null" ]] && deploy_strategy="direct"
837
+
838
+ local canary_cmd promote_cmd switch_cmd health_url deploy_log
839
+ canary_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.canary_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
840
+ [[ "$canary_cmd" == "null" ]] && canary_cmd=""
841
+ promote_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.promote_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
842
+ [[ "$promote_cmd" == "null" ]] && promote_cmd=""
843
+ switch_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.switch_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
844
+ [[ "$switch_cmd" == "null" ]] && switch_cmd=""
845
+ health_url=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.health_url) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
846
+ [[ "$health_url" == "null" ]] && health_url=""
847
+ deploy_log="$ARTIFACTS_DIR/deploy.log"
848
+
849
+ case "$deploy_strategy" in
850
+ canary)
851
+ info "Canary deployment strategy..."
852
+ if [[ -z "$canary_cmd" ]]; then
853
+ warn "No canary_cmd configured — falling back to direct"
854
+ deploy_strategy="direct"
855
+ else
856
+ info "Deploying canary..."
857
+ bash -c "$canary_cmd" >> "$deploy_log" 2>&1 || { error "Canary deploy failed"; return 1; }
858
+
859
+ if [[ -n "$health_url" ]]; then
860
+ local canary_healthy=0
861
+ local _chk
862
+ for _chk in 1 2 3; do
863
+ sleep "$(_exponential_backoff "$_chk" 5 30)"
864
+ local _status
865
+ _status=$(curl -s -o /dev/null -w "%{http_code}" "$health_url" 2>/dev/null || echo "0")
866
+ if [[ "$_status" -ge 200 && "$_status" -lt 400 ]]; then
867
+ canary_healthy=$((canary_healthy + 1))
868
+ fi
869
+ done
870
+ if [[ "$canary_healthy" -lt 2 ]]; then
871
+ error "Canary health check failed ($canary_healthy/3 passed) — rolling back"
872
+ [[ -n "$rollback_cmd" ]] && bash -c "$rollback_cmd" 2>/dev/null || true
873
+ return 1
874
+ fi
875
+ success "Canary healthy ($canary_healthy/3 checks passed)"
876
+ fi
877
+
878
+ info "Promoting canary to full deployment..."
879
+ if [[ -n "$promote_cmd" ]]; then
880
+ bash -c "$promote_cmd" >> "$deploy_log" 2>&1 || { error "Promote failed"; return 1; }
881
+ fi
882
+ success "Canary promoted"
883
+ fi
884
+ ;;
885
+ blue-green)
886
+ info "Blue-green deployment strategy..."
887
+ if [[ -z "$staging_cmd" || -z "$switch_cmd" ]]; then
888
+ warn "Blue-green requires staging_cmd + switch_cmd — falling back to direct"
889
+ deploy_strategy="direct"
890
+ else
891
+ info "Deploying to inactive environment..."
892
+ bash -c "$staging_cmd" >> "$deploy_log" 2>&1 || { error "Blue-green staging failed"; return 1; }
893
+
894
+ if [[ -n "$health_url" ]]; then
895
+ local bg_healthy=0
896
+ local _chk
897
+ for _chk in 1 2 3; do
898
+ sleep "$(_exponential_backoff "$_chk" 3 20)"
899
+ local _status
900
+ _status=$(curl -s -o /dev/null -w "%{http_code}" "$health_url" 2>/dev/null || echo "0")
901
+ [[ "$_status" -ge 200 && "$_status" -lt 400 ]] && bg_healthy=$((bg_healthy + 1))
902
+ done
903
+ if [[ "$bg_healthy" -lt 2 ]]; then
904
+ error "Blue-green health check failed — not switching"
905
+ return 1
906
+ fi
907
+ fi
908
+
909
+ info "Switching traffic..."
910
+ bash -c "$switch_cmd" >> "$deploy_log" 2>&1 || { error "Traffic switch failed"; return 1; }
911
+ success "Blue-green switch complete"
912
+ fi
913
+ ;;
914
+ esac
915
+
916
+ # ── Direct deployment (default or fallback) ──
917
+ if [[ "$deploy_strategy" == "direct" ]]; then
918
+ if [[ -n "$staging_cmd" ]]; then
919
+ info "Deploying to staging..."
920
+ bash -c "$staging_cmd" > "$ARTIFACTS_DIR/deploy-staging.log" 2>&1 || {
921
+ error "Staging deploy failed"
922
+ [[ -n "$ISSUE_NUMBER" ]] && gh_comment_issue "$ISSUE_NUMBER" "Staging deploy failed"
923
+ # Mark GitHub deployment as failed
924
+ if [[ "${NO_GITHUB:-false}" != "true" ]] && type gh_deploy_pipeline_complete >/dev/null 2>&1; then
925
+ gh_deploy_pipeline_complete "$REPO_OWNER" "$REPO_NAME" "$gh_deploy_env" false "Staging deploy failed" 2>/dev/null || true
926
+ fi
927
+ return 1
928
+ }
929
+ success "Staging deploy complete"
930
+ fi
931
+
932
+ if [[ -n "$prod_cmd" ]]; then
933
+ info "Deploying to production..."
934
+ bash -c "$prod_cmd" > "$ARTIFACTS_DIR/deploy-prod.log" 2>&1 || {
935
+ error "Production deploy failed"
936
+ if [[ -n "$rollback_cmd" ]]; then
937
+ warn "Rolling back..."
938
+ bash -c "$rollback_cmd" 2>&1 || error "Rollback also failed!"
939
+ fi
940
+ [[ -n "$ISSUE_NUMBER" ]] && gh_comment_issue "$ISSUE_NUMBER" "Production deploy failed — rollback ${rollback_cmd:+attempted}"
941
+ # Mark GitHub deployment as failed
942
+ if [[ "${NO_GITHUB:-false}" != "true" ]] && type gh_deploy_pipeline_complete >/dev/null 2>&1; then
943
+ gh_deploy_pipeline_complete "$REPO_OWNER" "$REPO_NAME" "$gh_deploy_env" false "Production deploy failed" 2>/dev/null || true
944
+ fi
945
+ return 1
946
+ }
947
+ success "Production deploy complete"
948
+ fi
949
+ fi
950
+
951
+ if [[ -n "$ISSUE_NUMBER" ]]; then
952
+ gh_comment_issue "$ISSUE_NUMBER" "✅ **Deploy complete**"
953
+ gh_add_labels "$ISSUE_NUMBER" "deployed"
954
+ fi
955
+
956
+ # Mark GitHub deployment as successful
957
+ if [[ "${NO_GITHUB:-false}" != "true" ]] && type gh_deploy_pipeline_complete >/dev/null 2>&1; then
958
+ if [[ -n "$REPO_OWNER" && -n "$REPO_NAME" ]]; then
959
+ gh_deploy_pipeline_complete "$REPO_OWNER" "$REPO_NAME" "$gh_deploy_env" true "" 2>/dev/null || true
960
+ fi
961
+ fi
962
+
963
+ log_stage "deploy" "Deploy complete"
964
+ }
965
+