shipwright-cli 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +926 -0
- package/claude-code/CLAUDE.md.shipwright +125 -0
- package/claude-code/hooks/notify-idle.sh +35 -0
- package/claude-code/hooks/pre-compact-save.sh +57 -0
- package/claude-code/hooks/task-completed.sh +170 -0
- package/claude-code/hooks/teammate-idle.sh +68 -0
- package/claude-code/settings.json.template +184 -0
- package/completions/_shipwright +140 -0
- package/completions/shipwright.bash +89 -0
- package/completions/shipwright.fish +107 -0
- package/docs/KNOWN-ISSUES.md +199 -0
- package/docs/TIPS.md +331 -0
- package/docs/definition-of-done.example.md +16 -0
- package/docs/patterns/README.md +139 -0
- package/docs/patterns/audit-loop.md +149 -0
- package/docs/patterns/bug-hunt.md +183 -0
- package/docs/patterns/feature-implementation.md +159 -0
- package/docs/patterns/refactoring.md +183 -0
- package/docs/patterns/research-exploration.md +144 -0
- package/docs/patterns/test-generation.md +173 -0
- package/package.json +49 -0
- package/scripts/adapters/docker-deploy.sh +50 -0
- package/scripts/adapters/fly-deploy.sh +41 -0
- package/scripts/adapters/iterm2-adapter.sh +122 -0
- package/scripts/adapters/railway-deploy.sh +34 -0
- package/scripts/adapters/tmux-adapter.sh +87 -0
- package/scripts/adapters/vercel-deploy.sh +35 -0
- package/scripts/adapters/wezterm-adapter.sh +103 -0
- package/scripts/cct +242 -0
- package/scripts/cct-cleanup.sh +172 -0
- package/scripts/cct-cost.sh +590 -0
- package/scripts/cct-daemon.sh +3189 -0
- package/scripts/cct-doctor.sh +328 -0
- package/scripts/cct-fix.sh +478 -0
- package/scripts/cct-fleet.sh +904 -0
- package/scripts/cct-init.sh +282 -0
- package/scripts/cct-logs.sh +273 -0
- package/scripts/cct-loop.sh +1332 -0
- package/scripts/cct-memory.sh +1148 -0
- package/scripts/cct-pipeline.sh +3844 -0
- package/scripts/cct-prep.sh +1352 -0
- package/scripts/cct-ps.sh +168 -0
- package/scripts/cct-reaper.sh +390 -0
- package/scripts/cct-session.sh +284 -0
- package/scripts/cct-status.sh +169 -0
- package/scripts/cct-templates.sh +242 -0
- package/scripts/cct-upgrade.sh +422 -0
- package/scripts/cct-worktree.sh +405 -0
- package/scripts/postinstall.mjs +96 -0
- package/templates/pipelines/autonomous.json +71 -0
- package/templates/pipelines/cost-aware.json +95 -0
- package/templates/pipelines/deployed.json +79 -0
- package/templates/pipelines/enterprise.json +114 -0
- package/templates/pipelines/fast.json +63 -0
- package/templates/pipelines/full.json +104 -0
- package/templates/pipelines/hotfix.json +63 -0
- package/templates/pipelines/standard.json +91 -0
- package/tmux/claude-teams-overlay.conf +109 -0
- package/tmux/templates/architecture.json +19 -0
- package/tmux/templates/bug-fix.json +24 -0
- package/tmux/templates/code-review.json +24 -0
- package/tmux/templates/devops.json +19 -0
- package/tmux/templates/documentation.json +19 -0
- package/tmux/templates/exploration.json +19 -0
- package/tmux/templates/feature-dev.json +24 -0
- package/tmux/templates/full-stack.json +24 -0
- package/tmux/templates/migration.json +24 -0
- package/tmux/templates/refactor.json +19 -0
- package/tmux/templates/security-audit.json +24 -0
- package/tmux/templates/testing.json +24 -0
- package/tmux/tmux.conf +167 -0
|
@@ -0,0 +1,3844 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright pipeline — Autonomous Feature Delivery (Idea → Production) ║
|
|
4
|
+
# ║ Full GitHub integration · Auto-detection · Task tracking · Metrics ║
|
|
5
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
6
|
+
set -euo pipefail
|
|
7
|
+
|
|
8
|
+
VERSION="1.7.0"
|
|
9
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
10
|
+
REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
11
|
+
|
|
12
|
+
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
|
13
|
+
CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
|
|
14
|
+
PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
|
|
15
|
+
BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
|
|
16
|
+
GREEN='\033[38;2;74;222;128m' # success
|
|
17
|
+
YELLOW='\033[38;2;250;204;21m' # warning
|
|
18
|
+
RED='\033[38;2;248;113;113m' # error
|
|
19
|
+
DIM='\033[2m'
|
|
20
|
+
BOLD='\033[1m'
|
|
21
|
+
RESET='\033[0m'
|
|
22
|
+
|
|
23
|
+
# ─── Output Helpers ─────────────────────────────────────────────────────────
|
|
24
|
+
info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
|
|
25
|
+
success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
|
|
26
|
+
warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*"; }
|
|
27
|
+
error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
|
|
28
|
+
|
|
29
|
+
now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
30
|
+
now_epoch() { date +%s; }
|
|
31
|
+
|
|
32
|
+
format_duration() {
|
|
33
|
+
local secs="$1"
|
|
34
|
+
if [[ "$secs" -ge 3600 ]]; then
|
|
35
|
+
printf "%dh %dm %ds" $((secs/3600)) $((secs%3600/60)) $((secs%60))
|
|
36
|
+
elif [[ "$secs" -ge 60 ]]; then
|
|
37
|
+
printf "%dm %ds" $((secs/60)) $((secs%60))
|
|
38
|
+
else
|
|
39
|
+
printf "%ds" "$secs"
|
|
40
|
+
fi
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
# ─── Structured Event Log ──────────────────────────────────────────────────
|
|
44
|
+
# Appends JSON events to ~/.claude-teams/events.jsonl for metrics/traceability
|
|
45
|
+
|
|
46
|
+
EVENTS_DIR="${HOME}/.claude-teams"
|
|
47
|
+
EVENTS_FILE="${EVENTS_DIR}/events.jsonl"
|
|
48
|
+
|
|
49
|
+
emit_event() {
|
|
50
|
+
local event_type="$1"
|
|
51
|
+
shift
|
|
52
|
+
# Remaining args are key=value pairs
|
|
53
|
+
local json_fields=""
|
|
54
|
+
for kv in "$@"; do
|
|
55
|
+
local key="${kv%%=*}"
|
|
56
|
+
local val="${kv#*=}"
|
|
57
|
+
# Numbers: don't quote; strings: quote
|
|
58
|
+
if [[ "$val" =~ ^-?[0-9]+\.?[0-9]*$ ]]; then
|
|
59
|
+
json_fields="${json_fields},\"${key}\":${val}"
|
|
60
|
+
else
|
|
61
|
+
# Escape quotes in value
|
|
62
|
+
val="${val//\"/\\\"}"
|
|
63
|
+
json_fields="${json_fields},\"${key}\":\"${val}\""
|
|
64
|
+
fi
|
|
65
|
+
done
|
|
66
|
+
mkdir -p "$EVENTS_DIR"
|
|
67
|
+
echo "{\"ts\":\"$(now_iso)\",\"ts_epoch\":$(now_epoch),\"type\":\"${event_type}\"${json_fields}}" >> "$EVENTS_FILE"
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
# ─── Token / Cost Parsing ─────────────────────────────────────────────────
|
|
71
|
+
parse_claude_tokens() {
|
|
72
|
+
local log_file="$1"
|
|
73
|
+
local input_tok output_tok
|
|
74
|
+
input_tok=$(grep -oE 'input[_ ]tokens?[: ]+[0-9,]+' "$log_file" 2>/dev/null | tail -1 | grep -oE '[0-9,]+' | tr -d ',' || echo "0")
|
|
75
|
+
output_tok=$(grep -oE 'output[_ ]tokens?[: ]+[0-9,]+' "$log_file" 2>/dev/null | tail -1 | grep -oE '[0-9,]+' | tr -d ',' || echo "0")
|
|
76
|
+
|
|
77
|
+
TOTAL_INPUT_TOKENS=$(( TOTAL_INPUT_TOKENS + ${input_tok:-0} ))
|
|
78
|
+
TOTAL_OUTPUT_TOKENS=$(( TOTAL_OUTPUT_TOKENS + ${output_tok:-0} ))
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# ─── Defaults ───────────────────────────────────────────────────────────────
|
|
82
|
+
GOAL=""
|
|
83
|
+
ISSUE_NUMBER=""
|
|
84
|
+
PIPELINE_NAME="standard"
|
|
85
|
+
PIPELINE_CONFIG=""
|
|
86
|
+
TEST_CMD=""
|
|
87
|
+
MODEL=""
|
|
88
|
+
AGENTS=""
|
|
89
|
+
SKIP_GATES=false
|
|
90
|
+
GIT_BRANCH=""
|
|
91
|
+
GITHUB_ISSUE=""
|
|
92
|
+
TASK_TYPE=""
|
|
93
|
+
REVIEWERS=""
|
|
94
|
+
LABELS=""
|
|
95
|
+
BASE_BRANCH="main"
|
|
96
|
+
NO_GITHUB=false
|
|
97
|
+
DRY_RUN=false
|
|
98
|
+
IGNORE_BUDGET=false
|
|
99
|
+
PR_NUMBER=""
|
|
100
|
+
AUTO_WORKTREE=false
|
|
101
|
+
WORKTREE_NAME=""
|
|
102
|
+
CLEANUP_WORKTREE=false
|
|
103
|
+
ORIGINAL_REPO_DIR=""
|
|
104
|
+
|
|
105
|
+
# GitHub metadata (populated during intake)
|
|
106
|
+
ISSUE_LABELS=""
|
|
107
|
+
ISSUE_MILESTONE=""
|
|
108
|
+
ISSUE_ASSIGNEES=""
|
|
109
|
+
ISSUE_BODY=""
|
|
110
|
+
PROGRESS_COMMENT_ID=""
|
|
111
|
+
REPO_OWNER=""
|
|
112
|
+
REPO_NAME=""
|
|
113
|
+
GH_AVAILABLE=false
|
|
114
|
+
|
|
115
|
+
# Timing
|
|
116
|
+
PIPELINE_START_EPOCH=""
|
|
117
|
+
STAGE_TIMINGS=""
|
|
118
|
+
|
|
119
|
+
PROJECT_ROOT=""
|
|
120
|
+
STATE_DIR=""
|
|
121
|
+
STATE_FILE=""
|
|
122
|
+
ARTIFACTS_DIR=""
|
|
123
|
+
TASKS_FILE=""
|
|
124
|
+
|
|
125
|
+
# ─── Help ───────────────────────────────────────────────────────────────────
|
|
126
|
+
|
|
127
|
+
show_help() {
|
|
128
|
+
echo -e "${CYAN}${BOLD}shipwright pipeline${RESET} — Autonomous Feature Delivery"
|
|
129
|
+
echo ""
|
|
130
|
+
echo -e "${BOLD}USAGE${RESET}"
|
|
131
|
+
echo -e " ${CYAN}shipwright pipeline${RESET} <command> [options]"
|
|
132
|
+
echo ""
|
|
133
|
+
echo -e "${BOLD}COMMANDS${RESET}"
|
|
134
|
+
echo -e " ${CYAN}start${RESET} --goal \"...\" Start a new pipeline"
|
|
135
|
+
echo -e " ${CYAN}resume${RESET} Continue from last completed stage"
|
|
136
|
+
echo -e " ${CYAN}status${RESET} Show pipeline progress dashboard"
|
|
137
|
+
echo -e " ${CYAN}abort${RESET} Stop pipeline and mark aborted"
|
|
138
|
+
echo -e " ${CYAN}list${RESET} Show available pipeline templates"
|
|
139
|
+
echo -e " ${CYAN}show${RESET} <name> Display pipeline stages"
|
|
140
|
+
echo ""
|
|
141
|
+
echo -e "${BOLD}START OPTIONS${RESET}"
|
|
142
|
+
echo -e " ${DIM}--goal \"description\"${RESET} What to build (required unless --issue)"
|
|
143
|
+
echo -e " ${DIM}--issue <number>${RESET} Fetch goal from GitHub issue"
|
|
144
|
+
echo -e " ${DIM}--pipeline <name>${RESET} Pipeline template (default: standard)"
|
|
145
|
+
echo -e " ${DIM}--test-cmd \"command\"${RESET} Override test command (auto-detected if omitted)"
|
|
146
|
+
echo -e " ${DIM}--model <model>${RESET} Override AI model (opus, sonnet, haiku)"
|
|
147
|
+
echo -e " ${DIM}--agents <n>${RESET} Override agent count"
|
|
148
|
+
echo -e " ${DIM}--skip-gates${RESET} Auto-approve all gates (fully autonomous)"
|
|
149
|
+
echo -e " ${DIM}--base <branch>${RESET} Base branch for PR (default: main)"
|
|
150
|
+
echo -e " ${DIM}--reviewers \"a,b\"${RESET} Request PR reviewers (auto-detected if omitted)"
|
|
151
|
+
echo -e " ${DIM}--labels \"a,b\"${RESET} Add labels to PR (inherited from issue if omitted)"
|
|
152
|
+
echo -e " ${DIM}--no-github${RESET} Disable GitHub integration"
|
|
153
|
+
echo -e " ${DIM}--ignore-budget${RESET} Skip budget enforcement checks"
|
|
154
|
+
echo -e " ${DIM}--worktree [=name]${RESET} Run in isolated git worktree (parallel-safe)"
|
|
155
|
+
echo -e " ${DIM}--dry-run${RESET} Show what would happen without executing"
|
|
156
|
+
echo -e " ${DIM}--slack-webhook <url>${RESET} Send notifications to Slack"
|
|
157
|
+
echo -e " ${DIM}--self-heal <n>${RESET} Build→test retry cycles on failure (default: 2)"
|
|
158
|
+
echo ""
|
|
159
|
+
echo -e "${BOLD}STAGES${RESET} ${DIM}(configurable per pipeline template)${RESET}"
|
|
160
|
+
echo -e " intake → plan → design → build → test → review → pr → deploy → validate → monitor"
|
|
161
|
+
echo ""
|
|
162
|
+
echo -e "${BOLD}GITHUB INTEGRATION${RESET} ${DIM}(automatic when gh CLI available)${RESET}"
|
|
163
|
+
echo -e " • Issue intake: fetch metadata, labels, milestone, self-assign"
|
|
164
|
+
echo -e " • Progress tracking: live updates posted as issue comments"
|
|
165
|
+
echo -e " • Task checklist: plan posted as checkbox list on issue"
|
|
166
|
+
echo -e " • PR creation: labels, milestone, reviewers auto-propagated"
|
|
167
|
+
echo -e " • Issue lifecycle: labeled in-progress → closed on completion"
|
|
168
|
+
echo ""
|
|
169
|
+
echo -e "${BOLD}SELF-HEALING${RESET} ${DIM}(autonomous error recovery)${RESET}"
|
|
170
|
+
echo -e " • Build→test feedback loop: failures feed back as build context"
|
|
171
|
+
echo -e " • Configurable retry cycles (--self-heal N, default: 2)"
|
|
172
|
+
echo -e " • Auto-rebase before PR: handles base branch drift"
|
|
173
|
+
echo -e " • Signal-safe: Ctrl+C saves state for clean resume"
|
|
174
|
+
echo -e " • Git stash/restore: protects uncommitted work"
|
|
175
|
+
echo ""
|
|
176
|
+
echo -e "${BOLD}AUTO-DETECTION${RESET} ${DIM}(zero-config for common setups)${RESET}"
|
|
177
|
+
echo -e " • Test command: package.json, Makefile, Cargo.toml, go.mod, etc."
|
|
178
|
+
echo -e " • Branch prefix: feat/, fix/, refactor/ based on task type"
|
|
179
|
+
echo -e " • Reviewers: from CODEOWNERS or recent git contributors"
|
|
180
|
+
echo -e " • Project type: language and framework detection"
|
|
181
|
+
echo ""
|
|
182
|
+
echo -e "${BOLD}NOTIFICATIONS${RESET} ${DIM}(team awareness)${RESET}"
|
|
183
|
+
echo -e " • Slack: --slack-webhook <url>"
|
|
184
|
+
echo -e " • Custom webhook: set SHIPWRIGHT_WEBHOOK_URL env var"
|
|
185
|
+
echo -e " • Events: start, stage complete, failure, self-heal, done"
|
|
186
|
+
echo ""
|
|
187
|
+
echo -e "${BOLD}EXAMPLES${RESET}"
|
|
188
|
+
echo -e " ${DIM}# From GitHub issue (fully autonomous)${RESET}"
|
|
189
|
+
echo -e " ${DIM}shipwright pipeline start --issue 123 --skip-gates${RESET}"
|
|
190
|
+
echo ""
|
|
191
|
+
echo -e " ${DIM}# From inline goal${RESET}"
|
|
192
|
+
echo -e " ${DIM}shipwright pipeline start --goal \"Add JWT authentication\"${RESET}"
|
|
193
|
+
echo ""
|
|
194
|
+
echo -e " ${DIM}# Hotfix with custom test command${RESET}"
|
|
195
|
+
echo -e " ${DIM}shipwright pipeline start --issue 456 --pipeline hotfix --test-cmd \"pytest\"${RESET}"
|
|
196
|
+
echo ""
|
|
197
|
+
echo -e " ${DIM}# Full deployment pipeline with 3 agents${RESET}"
|
|
198
|
+
echo -e " ${DIM}shipwright pipeline start --goal \"Build payment flow\" --pipeline full --agents 3${RESET}"
|
|
199
|
+
echo ""
|
|
200
|
+
echo -e " ${DIM}# Parallel pipeline in isolated worktree${RESET}"
|
|
201
|
+
echo -e " ${DIM}shipwright pipeline start --issue 42 --worktree${RESET}"
|
|
202
|
+
echo ""
|
|
203
|
+
echo -e " ${DIM}# Resume / monitor / abort${RESET}"
|
|
204
|
+
echo -e " ${DIM}shipwright pipeline resume${RESET}"
|
|
205
|
+
echo -e " ${DIM}shipwright pipeline status${RESET}"
|
|
206
|
+
echo -e " ${DIM}shipwright pipeline abort${RESET}"
|
|
207
|
+
echo ""
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
# ─── Argument Parsing ───────────────────────────────────────────────────────
|
|
211
|
+
|
|
212
|
+
SUBCOMMAND="${1:-help}"
|
|
213
|
+
shift 2>/dev/null || true
|
|
214
|
+
|
|
215
|
+
parse_args() {
|
|
216
|
+
while [[ $# -gt 0 ]]; do
|
|
217
|
+
case "$1" in
|
|
218
|
+
--goal) GOAL="$2"; shift 2 ;;
|
|
219
|
+
--issue) ISSUE_NUMBER="$2"; shift 2 ;;
|
|
220
|
+
--pipeline) PIPELINE_NAME="$2"; shift 2 ;;
|
|
221
|
+
--test-cmd) TEST_CMD="$2"; shift 2 ;;
|
|
222
|
+
--model) MODEL="$2"; shift 2 ;;
|
|
223
|
+
--agents) AGENTS="$2"; shift 2 ;;
|
|
224
|
+
--skip-gates) SKIP_GATES=true; shift ;;
|
|
225
|
+
--base) BASE_BRANCH="$2"; shift 2 ;;
|
|
226
|
+
--reviewers) REVIEWERS="$2"; shift 2 ;;
|
|
227
|
+
--labels) LABELS="$2"; shift 2 ;;
|
|
228
|
+
--no-github) NO_GITHUB=true; shift ;;
|
|
229
|
+
--ignore-budget) IGNORE_BUDGET=true; shift ;;
|
|
230
|
+
--worktree=*) AUTO_WORKTREE=true; WORKTREE_NAME="${1#--worktree=}"; WORKTREE_NAME="${WORKTREE_NAME//[^a-zA-Z0-9_-]/}"; if [[ -z "$WORKTREE_NAME" ]]; then error "Invalid worktree name (alphanumeric, hyphens, underscores only)"; exit 1; fi; shift ;;
|
|
231
|
+
--worktree) AUTO_WORKTREE=true; shift ;;
|
|
232
|
+
--dry-run) DRY_RUN=true; shift ;;
|
|
233
|
+
--slack-webhook) SLACK_WEBHOOK="$2"; shift 2 ;;
|
|
234
|
+
--self-heal) BUILD_TEST_RETRIES="${2:-3}"; shift 2 ;;
|
|
235
|
+
--help|-h) show_help; exit 0 ;;
|
|
236
|
+
*)
|
|
237
|
+
if [[ -z "$PIPELINE_NAME_ARG" ]]; then
|
|
238
|
+
PIPELINE_NAME_ARG="$1"
|
|
239
|
+
fi
|
|
240
|
+
shift ;;
|
|
241
|
+
esac
|
|
242
|
+
done
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
PIPELINE_NAME_ARG=""
|
|
246
|
+
parse_args "$@"
|
|
247
|
+
|
|
248
|
+
# ─── Directory Setup ────────────────────────────────────────────────────────
|
|
249
|
+
|
|
250
|
+
setup_dirs() {
|
|
251
|
+
PROJECT_ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
|
|
252
|
+
STATE_DIR="$PROJECT_ROOT/.claude"
|
|
253
|
+
STATE_FILE="$STATE_DIR/pipeline-state.md"
|
|
254
|
+
ARTIFACTS_DIR="$STATE_DIR/pipeline-artifacts"
|
|
255
|
+
TASKS_FILE="$STATE_DIR/pipeline-tasks.md"
|
|
256
|
+
mkdir -p "$STATE_DIR" "$ARTIFACTS_DIR"
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
# ─── Pipeline Config Loading ───────────────────────────────────────────────
|
|
260
|
+
|
|
261
|
+
find_pipeline_config() {
|
|
262
|
+
local name="$1"
|
|
263
|
+
local locations=(
|
|
264
|
+
"$REPO_DIR/templates/pipelines/${name}.json"
|
|
265
|
+
"$HOME/.claude-teams/pipelines/${name}.json"
|
|
266
|
+
)
|
|
267
|
+
for loc in "${locations[@]}"; do
|
|
268
|
+
if [[ -f "$loc" ]]; then
|
|
269
|
+
echo "$loc"
|
|
270
|
+
return 0
|
|
271
|
+
fi
|
|
272
|
+
done
|
|
273
|
+
return 1
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
load_pipeline_config() {
|
|
277
|
+
PIPELINE_CONFIG=$(find_pipeline_config "$PIPELINE_NAME") || {
|
|
278
|
+
error "Pipeline template not found: $PIPELINE_NAME"
|
|
279
|
+
echo -e " Available templates: ${DIM}shipwright pipeline list${RESET}"
|
|
280
|
+
exit 1
|
|
281
|
+
}
|
|
282
|
+
info "Pipeline: ${BOLD}$PIPELINE_NAME${RESET} ${DIM}($PIPELINE_CONFIG)${RESET}"
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
CURRENT_STAGE_ID=""
|
|
286
|
+
|
|
287
|
+
# Notification / webhook
|
|
288
|
+
SLACK_WEBHOOK=""
|
|
289
|
+
NOTIFICATION_ENABLED=false
|
|
290
|
+
|
|
291
|
+
# Self-healing
|
|
292
|
+
BUILD_TEST_RETRIES=2
|
|
293
|
+
STASHED_CHANGES=false
|
|
294
|
+
SELF_HEAL_COUNT=0
|
|
295
|
+
|
|
296
|
+
# ─── Cost Tracking ───────────────────────────────────────────────────────
|
|
297
|
+
TOTAL_INPUT_TOKENS=0
|
|
298
|
+
TOTAL_OUTPUT_TOKENS=0
|
|
299
|
+
COST_MODEL_RATES='{"opus":{"input":15,"output":75},"sonnet":{"input":3,"output":15},"haiku":{"input":0.25,"output":1.25}}'
|
|
300
|
+
|
|
301
|
+
# ─── Signal Handling ───────────────────────────────────────────────────────
|
|
302
|
+
|
|
303
|
+
cleanup_on_exit() {
|
|
304
|
+
local exit_code=$?
|
|
305
|
+
|
|
306
|
+
# Save state if we were running
|
|
307
|
+
if [[ "$PIPELINE_STATUS" == "running" && -n "$STATE_FILE" ]]; then
|
|
308
|
+
PIPELINE_STATUS="interrupted"
|
|
309
|
+
UPDATED_AT="$(now_iso)"
|
|
310
|
+
write_state 2>/dev/null || true
|
|
311
|
+
echo ""
|
|
312
|
+
warn "Pipeline interrupted — state saved."
|
|
313
|
+
echo -e " Resume: ${DIM}shipwright pipeline resume${RESET}"
|
|
314
|
+
fi
|
|
315
|
+
|
|
316
|
+
# Restore stashed changes
|
|
317
|
+
if [[ "$STASHED_CHANGES" == "true" ]]; then
|
|
318
|
+
git stash pop --quiet 2>/dev/null || true
|
|
319
|
+
fi
|
|
320
|
+
|
|
321
|
+
# Update GitHub
|
|
322
|
+
if [[ -n "${ISSUE_NUMBER:-}" && "${GH_AVAILABLE:-false}" == "true" ]]; then
|
|
323
|
+
gh_comment_issue "$ISSUE_NUMBER" "⏸️ **Pipeline interrupted** at stage: ${CURRENT_STAGE_ID:-unknown}" 2>/dev/null || true
|
|
324
|
+
fi
|
|
325
|
+
|
|
326
|
+
exit "$exit_code"
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
trap cleanup_on_exit SIGINT SIGTERM
|
|
330
|
+
|
|
331
|
+
# ─── Pre-flight Validation ─────────────────────────────────────────────────
|
|
332
|
+
|
|
333
|
+
preflight_checks() {
|
|
334
|
+
local errors=0
|
|
335
|
+
|
|
336
|
+
echo -e "${PURPLE}${BOLD}━━━ Pre-flight Checks ━━━${RESET}"
|
|
337
|
+
echo ""
|
|
338
|
+
|
|
339
|
+
# 1. Required tools
|
|
340
|
+
local required_tools=("git" "jq")
|
|
341
|
+
local optional_tools=("gh" "claude" "bc" "curl")
|
|
342
|
+
|
|
343
|
+
for tool in "${required_tools[@]}"; do
|
|
344
|
+
if command -v "$tool" &>/dev/null; then
|
|
345
|
+
echo -e " ${GREEN}✓${RESET} $tool"
|
|
346
|
+
else
|
|
347
|
+
echo -e " ${RED}✗${RESET} $tool ${RED}(required)${RESET}"
|
|
348
|
+
errors=$((errors + 1))
|
|
349
|
+
fi
|
|
350
|
+
done
|
|
351
|
+
|
|
352
|
+
for tool in "${optional_tools[@]}"; do
|
|
353
|
+
if command -v "$tool" &>/dev/null; then
|
|
354
|
+
echo -e " ${GREEN}✓${RESET} $tool"
|
|
355
|
+
else
|
|
356
|
+
echo -e " ${DIM}○${RESET} $tool ${DIM}(optional — some features disabled)${RESET}"
|
|
357
|
+
fi
|
|
358
|
+
done
|
|
359
|
+
|
|
360
|
+
# 2. Git state
|
|
361
|
+
echo ""
|
|
362
|
+
if git rev-parse --is-inside-work-tree &>/dev/null; then
|
|
363
|
+
echo -e " ${GREEN}✓${RESET} Inside git repo"
|
|
364
|
+
else
|
|
365
|
+
echo -e " ${RED}✗${RESET} Not inside a git repository"
|
|
366
|
+
errors=$((errors + 1))
|
|
367
|
+
fi
|
|
368
|
+
|
|
369
|
+
# Check for uncommitted changes — offer to stash
|
|
370
|
+
local dirty_files
|
|
371
|
+
dirty_files=$(git status --porcelain 2>/dev/null | wc -l | xargs)
|
|
372
|
+
if [[ "$dirty_files" -gt 0 ]]; then
|
|
373
|
+
echo -e " ${YELLOW}⚠${RESET} $dirty_files uncommitted change(s)"
|
|
374
|
+
if [[ "$SKIP_GATES" == "true" ]]; then
|
|
375
|
+
info "Auto-stashing uncommitted changes..."
|
|
376
|
+
git stash push -m "cct-pipeline: auto-stash before pipeline" --quiet 2>/dev/null && STASHED_CHANGES=true
|
|
377
|
+
if [[ "$STASHED_CHANGES" == "true" ]]; then
|
|
378
|
+
echo -e " ${GREEN}✓${RESET} Changes stashed (will restore on exit)"
|
|
379
|
+
fi
|
|
380
|
+
else
|
|
381
|
+
echo -e " ${DIM}Tip: Use --skip-gates to auto-stash, or commit/stash manually${RESET}"
|
|
382
|
+
fi
|
|
383
|
+
else
|
|
384
|
+
echo -e " ${GREEN}✓${RESET} Working tree clean"
|
|
385
|
+
fi
|
|
386
|
+
|
|
387
|
+
# Check if base branch exists
|
|
388
|
+
if git rev-parse --verify "$BASE_BRANCH" &>/dev/null; then
|
|
389
|
+
echo -e " ${GREEN}✓${RESET} Base branch: $BASE_BRANCH"
|
|
390
|
+
else
|
|
391
|
+
echo -e " ${RED}✗${RESET} Base branch not found: $BASE_BRANCH"
|
|
392
|
+
errors=$((errors + 1))
|
|
393
|
+
fi
|
|
394
|
+
|
|
395
|
+
# 3. GitHub auth (if gh available and not disabled)
|
|
396
|
+
if [[ "$NO_GITHUB" != "true" ]] && command -v gh &>/dev/null; then
|
|
397
|
+
if gh auth status &>/dev/null 2>&1; then
|
|
398
|
+
echo -e " ${GREEN}✓${RESET} GitHub authenticated"
|
|
399
|
+
else
|
|
400
|
+
echo -e " ${YELLOW}⚠${RESET} GitHub not authenticated (features disabled)"
|
|
401
|
+
fi
|
|
402
|
+
fi
|
|
403
|
+
|
|
404
|
+
# 4. Claude CLI
|
|
405
|
+
if command -v claude &>/dev/null; then
|
|
406
|
+
echo -e " ${GREEN}✓${RESET} Claude CLI available"
|
|
407
|
+
else
|
|
408
|
+
echo -e " ${RED}✗${RESET} Claude CLI not found — plan/build stages will fail"
|
|
409
|
+
errors=$((errors + 1))
|
|
410
|
+
fi
|
|
411
|
+
|
|
412
|
+
# 5. cct loop (needed for build stage)
|
|
413
|
+
if [[ -x "$SCRIPT_DIR/cct-loop.sh" ]]; then
|
|
414
|
+
echo -e " ${GREEN}✓${RESET} shipwright loop available"
|
|
415
|
+
else
|
|
416
|
+
echo -e " ${RED}✗${RESET} cct-loop.sh not found at $SCRIPT_DIR"
|
|
417
|
+
errors=$((errors + 1))
|
|
418
|
+
fi
|
|
419
|
+
|
|
420
|
+
# 6. Disk space check (warn if < 1GB free)
|
|
421
|
+
local free_space_kb
|
|
422
|
+
free_space_kb=$(df -k "$PROJECT_ROOT" 2>/dev/null | tail -1 | awk '{print $4}')
|
|
423
|
+
if [[ -n "$free_space_kb" ]] && [[ "$free_space_kb" -lt 1048576 ]] 2>/dev/null; then
|
|
424
|
+
echo -e " ${YELLOW}⚠${RESET} Low disk space: $(( free_space_kb / 1024 ))MB free"
|
|
425
|
+
fi
|
|
426
|
+
|
|
427
|
+
echo ""
|
|
428
|
+
|
|
429
|
+
if [[ "$errors" -gt 0 ]]; then
|
|
430
|
+
error "Pre-flight failed: $errors error(s)"
|
|
431
|
+
return 1
|
|
432
|
+
fi
|
|
433
|
+
|
|
434
|
+
success "Pre-flight passed"
|
|
435
|
+
echo ""
|
|
436
|
+
return 0
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
# ─── Notification Helpers ──────────────────────────────────────────────────
|
|
440
|
+
|
|
441
|
+
notify() {
|
|
442
|
+
local title="$1" message="$2" level="${3:-info}"
|
|
443
|
+
local emoji
|
|
444
|
+
case "$level" in
|
|
445
|
+
success) emoji="✅" ;;
|
|
446
|
+
error) emoji="❌" ;;
|
|
447
|
+
warn) emoji="⚠️" ;;
|
|
448
|
+
*) emoji="🔔" ;;
|
|
449
|
+
esac
|
|
450
|
+
|
|
451
|
+
# Slack webhook
|
|
452
|
+
if [[ -n "${SLACK_WEBHOOK:-}" ]]; then
|
|
453
|
+
local payload
|
|
454
|
+
payload=$(jq -n \
|
|
455
|
+
--arg text "${emoji} *${title}*\n${message}" \
|
|
456
|
+
'{text: $text}')
|
|
457
|
+
curl -sf -X POST -H 'Content-Type: application/json' \
|
|
458
|
+
-d "$payload" "$SLACK_WEBHOOK" >/dev/null 2>&1 || true
|
|
459
|
+
fi
|
|
460
|
+
|
|
461
|
+
# Custom webhook (env var SHIPWRIGHT_WEBHOOK_URL, with CCT_WEBHOOK_URL fallback)
|
|
462
|
+
local _webhook_url="${SHIPWRIGHT_WEBHOOK_URL:-${CCT_WEBHOOK_URL:-}}"
|
|
463
|
+
if [[ -n "$_webhook_url" ]]; then
|
|
464
|
+
local payload
|
|
465
|
+
payload=$(jq -n \
|
|
466
|
+
--arg title "$title" --arg message "$message" \
|
|
467
|
+
--arg level "$level" --arg pipeline "${PIPELINE_NAME:-}" \
|
|
468
|
+
--arg goal "${GOAL:-}" --arg stage "${CURRENT_STAGE_ID:-}" \
|
|
469
|
+
'{title:$title, message:$message, level:$level, pipeline:$pipeline, goal:$goal, stage:$stage}')
|
|
470
|
+
curl -sf -X POST -H 'Content-Type: application/json' \
|
|
471
|
+
-d "$payload" "$_webhook_url" >/dev/null 2>&1 || true
|
|
472
|
+
fi
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
# ─── GitHub Integration Helpers ─────────────────────────────────────────────
|
|
476
|
+
|
|
477
|
+
gh_init() {
|
|
478
|
+
if [[ "$NO_GITHUB" == "true" ]]; then
|
|
479
|
+
GH_AVAILABLE=false
|
|
480
|
+
return
|
|
481
|
+
fi
|
|
482
|
+
|
|
483
|
+
if ! command -v gh &>/dev/null; then
|
|
484
|
+
GH_AVAILABLE=false
|
|
485
|
+
warn "gh CLI not found — GitHub integration disabled"
|
|
486
|
+
return
|
|
487
|
+
fi
|
|
488
|
+
|
|
489
|
+
# Check if authenticated
|
|
490
|
+
if ! gh auth status &>/dev/null 2>&1; then
|
|
491
|
+
GH_AVAILABLE=false
|
|
492
|
+
warn "gh not authenticated — GitHub integration disabled"
|
|
493
|
+
return
|
|
494
|
+
fi
|
|
495
|
+
|
|
496
|
+
# Detect repo owner/name from git remote
|
|
497
|
+
local remote_url
|
|
498
|
+
remote_url=$(git remote get-url origin 2>/dev/null || true)
|
|
499
|
+
if [[ -n "$remote_url" ]]; then
|
|
500
|
+
# Handle SSH: git@github.com:owner/repo.git
|
|
501
|
+
# Handle HTTPS: https://github.com/owner/repo.git
|
|
502
|
+
REPO_OWNER=$(echo "$remote_url" | sed -E 's#(.*github\.com[:/])([^/]+)/.*#\2#')
|
|
503
|
+
REPO_NAME=$(echo "$remote_url" | sed -E 's#.*/([^/]+)(\.git)?$#\1#' | sed 's/\.git$//')
|
|
504
|
+
fi
|
|
505
|
+
|
|
506
|
+
if [[ -n "$REPO_OWNER" && -n "$REPO_NAME" ]]; then
|
|
507
|
+
GH_AVAILABLE=true
|
|
508
|
+
info "GitHub: ${DIM}${REPO_OWNER}/${REPO_NAME}${RESET}"
|
|
509
|
+
else
|
|
510
|
+
GH_AVAILABLE=false
|
|
511
|
+
warn "Could not detect GitHub repo — GitHub integration disabled"
|
|
512
|
+
fi
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
# Post or update a comment on a GitHub issue
|
|
516
|
+
# Usage: gh_comment_issue <issue_number> <body>
|
|
517
|
+
gh_comment_issue() {
|
|
518
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
519
|
+
local issue_num="$1" body="$2"
|
|
520
|
+
gh issue comment "$issue_num" --body "$body" 2>/dev/null || true
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
# Post a progress-tracking comment and save its ID for later updates
|
|
524
|
+
# Usage: gh_post_progress <issue_number> <body>
|
|
525
|
+
gh_post_progress() {
|
|
526
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
527
|
+
local issue_num="$1" body="$2"
|
|
528
|
+
local result
|
|
529
|
+
result=$(gh api "repos/${REPO_OWNER}/${REPO_NAME}/issues/${issue_num}/comments" \
|
|
530
|
+
-f body="$body" --jq '.id' 2>/dev/null) || true
|
|
531
|
+
if [[ -n "$result" && "$result" != "null" ]]; then
|
|
532
|
+
PROGRESS_COMMENT_ID="$result"
|
|
533
|
+
fi
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
# Update an existing progress comment by ID
|
|
537
|
+
# Usage: gh_update_progress <body>
|
|
538
|
+
gh_update_progress() {
|
|
539
|
+
[[ "$GH_AVAILABLE" != "true" || -z "$PROGRESS_COMMENT_ID" ]] && return 0
|
|
540
|
+
local body="$1"
|
|
541
|
+
gh api "repos/${REPO_OWNER}/${REPO_NAME}/issues/comments/${PROGRESS_COMMENT_ID}" \
|
|
542
|
+
-X PATCH -f body="$body" 2>/dev/null || true
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
# Add labels to an issue or PR
|
|
546
|
+
# Usage: gh_add_labels <issue_number> <label1,label2,...>
|
|
547
|
+
gh_add_labels() {
|
|
548
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
549
|
+
local issue_num="$1" labels="$2"
|
|
550
|
+
[[ -z "$labels" ]] && return 0
|
|
551
|
+
gh issue edit "$issue_num" --add-label "$labels" 2>/dev/null || true
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
# Remove a label from an issue
|
|
555
|
+
# Usage: gh_remove_label <issue_number> <label>
|
|
556
|
+
gh_remove_label() {
|
|
557
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
558
|
+
local issue_num="$1" label="$2"
|
|
559
|
+
gh issue edit "$issue_num" --remove-label "$label" 2>/dev/null || true
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
# Self-assign an issue
|
|
563
|
+
# Usage: gh_assign_self <issue_number>
|
|
564
|
+
gh_assign_self() {
|
|
565
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
566
|
+
local issue_num="$1"
|
|
567
|
+
gh issue edit "$issue_num" --add-assignee "@me" 2>/dev/null || true
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
# Get full issue metadata as JSON
|
|
571
|
+
# Usage: gh_get_issue_meta <issue_number>
|
|
572
|
+
gh_get_issue_meta() {
|
|
573
|
+
[[ "$GH_AVAILABLE" != "true" ]] && return 0
|
|
574
|
+
local issue_num="$1"
|
|
575
|
+
gh issue view "$issue_num" --json title,body,labels,milestone,assignees,comments,number,state 2>/dev/null || true
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
# Build a progress table for GitHub comment
|
|
579
|
+
# Usage: gh_build_progress_body
|
|
580
|
+
gh_build_progress_body() {
|
|
581
|
+
local body="## 🤖 Pipeline Progress — \`${PIPELINE_NAME}\`
|
|
582
|
+
|
|
583
|
+
| Stage | Status | Duration |
|
|
584
|
+
|-------|--------|----------|"
|
|
585
|
+
|
|
586
|
+
local stages
|
|
587
|
+
stages=$(jq -c '.stages[]' "$PIPELINE_CONFIG" 2>/dev/null)
|
|
588
|
+
while IFS= read -r stage; do
|
|
589
|
+
local id enabled
|
|
590
|
+
id=$(echo "$stage" | jq -r '.id')
|
|
591
|
+
enabled=$(echo "$stage" | jq -r '.enabled')
|
|
592
|
+
|
|
593
|
+
if [[ "$enabled" != "true" ]]; then
|
|
594
|
+
body="${body}
|
|
595
|
+
| ${id} | ⏭️ skipped | — |"
|
|
596
|
+
continue
|
|
597
|
+
fi
|
|
598
|
+
|
|
599
|
+
local sstatus
|
|
600
|
+
sstatus=$(get_stage_status "$id")
|
|
601
|
+
local duration
|
|
602
|
+
duration=$(get_stage_timing "$id")
|
|
603
|
+
|
|
604
|
+
local icon
|
|
605
|
+
case "$sstatus" in
|
|
606
|
+
complete) icon="✅" ;;
|
|
607
|
+
running) icon="🔄" ;;
|
|
608
|
+
failed) icon="❌" ;;
|
|
609
|
+
*) icon="⬜" ;;
|
|
610
|
+
esac
|
|
611
|
+
|
|
612
|
+
body="${body}
|
|
613
|
+
| ${id} | ${icon} ${sstatus:-pending} | ${duration:-—} |"
|
|
614
|
+
done <<< "$stages"
|
|
615
|
+
|
|
616
|
+
body="${body}
|
|
617
|
+
|
|
618
|
+
**Goal:** ${GOAL}
|
|
619
|
+
**Branch:** \`${GIT_BRANCH}\`"
|
|
620
|
+
|
|
621
|
+
[[ -n "${GITHUB_ISSUE:-}" ]] && body="${body}
|
|
622
|
+
**Issue:** ${GITHUB_ISSUE}"
|
|
623
|
+
|
|
624
|
+
local total_dur=""
|
|
625
|
+
if [[ -n "$PIPELINE_START_EPOCH" ]]; then
|
|
626
|
+
total_dur=$(format_duration $(( $(now_epoch) - PIPELINE_START_EPOCH )))
|
|
627
|
+
body="${body}
|
|
628
|
+
**Elapsed:** ${total_dur}"
|
|
629
|
+
fi
|
|
630
|
+
|
|
631
|
+
body="${body}
|
|
632
|
+
|
|
633
|
+
---
|
|
634
|
+
_Updated: $(now_iso) · Generated by \`shipwright pipeline\`_"
|
|
635
|
+
echo "$body"
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
# Push a page to the GitHub wiki
|
|
639
|
+
# Usage: gh_wiki_page <title> <content>
|
|
640
|
+
gh_wiki_page() {
|
|
641
|
+
local title="$1" content="$2"
|
|
642
|
+
$GH_AVAILABLE || return 0
|
|
643
|
+
$NO_GITHUB && return 0
|
|
644
|
+
local wiki_dir="$ARTIFACTS_DIR/wiki"
|
|
645
|
+
if [[ ! -d "$wiki_dir" ]]; then
|
|
646
|
+
git clone "https://github.com/${REPO_OWNER}/${REPO_NAME}.wiki.git" "$wiki_dir" 2>/dev/null || {
|
|
647
|
+
info "Wiki not initialized — skipping wiki update"
|
|
648
|
+
return 0
|
|
649
|
+
}
|
|
650
|
+
fi
|
|
651
|
+
echo "$content" > "$wiki_dir/${title}.md"
|
|
652
|
+
( cd "$wiki_dir" && git add -A && git commit -m "Pipeline: update $title" && git push ) 2>/dev/null || true
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
# ─── Auto-Detection ─────────────────────────────────────────────────────────
|
|
656
|
+
|
|
657
|
+
# Detect the test command from project files
|
|
658
|
+
detect_test_cmd() {
|
|
659
|
+
local root="$PROJECT_ROOT"
|
|
660
|
+
|
|
661
|
+
# Node.js: check package.json scripts
|
|
662
|
+
if [[ -f "$root/package.json" ]]; then
|
|
663
|
+
local has_test
|
|
664
|
+
has_test=$(jq -r '.scripts.test // ""' "$root/package.json" 2>/dev/null)
|
|
665
|
+
if [[ -n "$has_test" && "$has_test" != "null" && "$has_test" != *"no test specified"* ]]; then
|
|
666
|
+
# Detect package manager
|
|
667
|
+
if [[ -f "$root/pnpm-lock.yaml" ]]; then
|
|
668
|
+
echo "pnpm test"; return
|
|
669
|
+
elif [[ -f "$root/yarn.lock" ]]; then
|
|
670
|
+
echo "yarn test"; return
|
|
671
|
+
elif [[ -f "$root/bun.lockb" ]]; then
|
|
672
|
+
echo "bun test"; return
|
|
673
|
+
else
|
|
674
|
+
echo "npm test"; return
|
|
675
|
+
fi
|
|
676
|
+
fi
|
|
677
|
+
fi
|
|
678
|
+
|
|
679
|
+
# Python: check for pytest, unittest
|
|
680
|
+
if [[ -f "$root/pytest.ini" || -f "$root/pyproject.toml" || -f "$root/setup.py" ]]; then
|
|
681
|
+
if [[ -f "$root/pyproject.toml" ]] && grep -q "pytest" "$root/pyproject.toml" 2>/dev/null; then
|
|
682
|
+
echo "pytest"; return
|
|
683
|
+
elif [[ -d "$root/tests" ]]; then
|
|
684
|
+
echo "pytest"; return
|
|
685
|
+
fi
|
|
686
|
+
fi
|
|
687
|
+
|
|
688
|
+
# Rust
|
|
689
|
+
if [[ -f "$root/Cargo.toml" ]]; then
|
|
690
|
+
echo "cargo test"; return
|
|
691
|
+
fi
|
|
692
|
+
|
|
693
|
+
# Go
|
|
694
|
+
if [[ -f "$root/go.mod" ]]; then
|
|
695
|
+
echo "go test ./..."; return
|
|
696
|
+
fi
|
|
697
|
+
|
|
698
|
+
# Ruby
|
|
699
|
+
if [[ -f "$root/Gemfile" ]]; then
|
|
700
|
+
if grep -q "rspec" "$root/Gemfile" 2>/dev/null; then
|
|
701
|
+
echo "bundle exec rspec"; return
|
|
702
|
+
fi
|
|
703
|
+
echo "bundle exec rake test"; return
|
|
704
|
+
fi
|
|
705
|
+
|
|
706
|
+
# Java/Kotlin (Maven)
|
|
707
|
+
if [[ -f "$root/pom.xml" ]]; then
|
|
708
|
+
echo "mvn test"; return
|
|
709
|
+
fi
|
|
710
|
+
|
|
711
|
+
# Java/Kotlin (Gradle)
|
|
712
|
+
if [[ -f "$root/build.gradle" || -f "$root/build.gradle.kts" ]]; then
|
|
713
|
+
echo "./gradlew test"; return
|
|
714
|
+
fi
|
|
715
|
+
|
|
716
|
+
# Makefile
|
|
717
|
+
if [[ -f "$root/Makefile" ]] && grep -q "^test:" "$root/Makefile" 2>/dev/null; then
|
|
718
|
+
echo "make test"; return
|
|
719
|
+
fi
|
|
720
|
+
|
|
721
|
+
# Fallback
|
|
722
|
+
echo ""
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
# Detect project language/framework
|
|
726
|
+
detect_project_lang() {
|
|
727
|
+
local root="$PROJECT_ROOT"
|
|
728
|
+
if [[ -f "$root/package.json" ]]; then
|
|
729
|
+
if grep -q "typescript" "$root/package.json" 2>/dev/null; then
|
|
730
|
+
echo "typescript"
|
|
731
|
+
elif grep -q "\"next\"" "$root/package.json" 2>/dev/null; then
|
|
732
|
+
echo "nextjs"
|
|
733
|
+
elif grep -q "\"react\"" "$root/package.json" 2>/dev/null; then
|
|
734
|
+
echo "react"
|
|
735
|
+
else
|
|
736
|
+
echo "nodejs"
|
|
737
|
+
fi
|
|
738
|
+
elif [[ -f "$root/Cargo.toml" ]]; then
|
|
739
|
+
echo "rust"
|
|
740
|
+
elif [[ -f "$root/go.mod" ]]; then
|
|
741
|
+
echo "go"
|
|
742
|
+
elif [[ -f "$root/pyproject.toml" || -f "$root/setup.py" || -f "$root/requirements.txt" ]]; then
|
|
743
|
+
echo "python"
|
|
744
|
+
elif [[ -f "$root/Gemfile" ]]; then
|
|
745
|
+
echo "ruby"
|
|
746
|
+
elif [[ -f "$root/pom.xml" || -f "$root/build.gradle" ]]; then
|
|
747
|
+
echo "java"
|
|
748
|
+
else
|
|
749
|
+
echo "unknown"
|
|
750
|
+
fi
|
|
751
|
+
}
|
|
752
|
+
|
|
753
|
+
# Detect likely reviewers from CODEOWNERS or git log
|
|
754
|
+
detect_reviewers() {
|
|
755
|
+
local root="$PROJECT_ROOT"
|
|
756
|
+
|
|
757
|
+
# Check CODEOWNERS
|
|
758
|
+
local codeowners=""
|
|
759
|
+
for f in "$root/.github/CODEOWNERS" "$root/CODEOWNERS" "$root/docs/CODEOWNERS"; do
|
|
760
|
+
if [[ -f "$f" ]]; then
|
|
761
|
+
codeowners="$f"
|
|
762
|
+
break
|
|
763
|
+
fi
|
|
764
|
+
done
|
|
765
|
+
|
|
766
|
+
if [[ -n "$codeowners" ]]; then
|
|
767
|
+
# Extract GitHub usernames from CODEOWNERS (lines like: * @user1 @user2)
|
|
768
|
+
local owners
|
|
769
|
+
owners=$(grep -oE '@[a-zA-Z0-9_-]+' "$codeowners" 2>/dev/null | sed 's/@//' | sort -u | head -3 | tr '\n' ',')
|
|
770
|
+
owners="${owners%,}" # trim trailing comma
|
|
771
|
+
if [[ -n "$owners" ]]; then
|
|
772
|
+
echo "$owners"
|
|
773
|
+
return
|
|
774
|
+
fi
|
|
775
|
+
fi
|
|
776
|
+
|
|
777
|
+
# Fallback: top contributors from recent git log (excluding self)
|
|
778
|
+
local current_user
|
|
779
|
+
current_user=$(gh api user --jq '.login' 2>/dev/null || git config user.name 2>/dev/null || true)
|
|
780
|
+
local contributors
|
|
781
|
+
contributors=$(git log --format='%aN' -100 2>/dev/null | \
|
|
782
|
+
sort | uniq -c | sort -rn | \
|
|
783
|
+
awk '{print $NF}' | \
|
|
784
|
+
grep -v "^${current_user}$" 2>/dev/null | \
|
|
785
|
+
head -2 | tr '\n' ',')
|
|
786
|
+
contributors="${contributors%,}"
|
|
787
|
+
echo "$contributors"
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
# Get branch prefix from task type
|
|
791
|
+
branch_prefix_for_type() {
|
|
792
|
+
case "$1" in
|
|
793
|
+
bug) echo "fix" ;;
|
|
794
|
+
refactor) echo "refactor" ;;
|
|
795
|
+
testing) echo "test" ;;
|
|
796
|
+
security) echo "security" ;;
|
|
797
|
+
docs) echo "docs" ;;
|
|
798
|
+
devops) echo "ci" ;;
|
|
799
|
+
migration) echo "migrate" ;;
|
|
800
|
+
architecture) echo "arch" ;;
|
|
801
|
+
*) echo "feat" ;;
|
|
802
|
+
esac
|
|
803
|
+
}
|
|
804
|
+
|
|
805
|
+
# ─── State Management ──────────────────────────────────────────────────────
|
|
806
|
+
|
|
807
|
+
PIPELINE_STATUS="pending"
|
|
808
|
+
CURRENT_STAGE=""
|
|
809
|
+
STARTED_AT=""
|
|
810
|
+
UPDATED_AT=""
|
|
811
|
+
STAGE_STATUSES=""
|
|
812
|
+
LOG_ENTRIES=""
|
|
813
|
+
|
|
814
|
+
save_artifact() {
|
|
815
|
+
local name="$1" content="$2"
|
|
816
|
+
echo "$content" > "$ARTIFACTS_DIR/$name"
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
get_stage_status() {
|
|
820
|
+
local stage_id="$1"
|
|
821
|
+
echo "$STAGE_STATUSES" | grep "^${stage_id}:" | cut -d: -f2 | tail -1 || true
|
|
822
|
+
}
|
|
823
|
+
|
|
824
|
+
set_stage_status() {
|
|
825
|
+
local stage_id="$1" status="$2"
|
|
826
|
+
STAGE_STATUSES=$(echo "$STAGE_STATUSES" | grep -v "^${stage_id}:" || true)
|
|
827
|
+
STAGE_STATUSES="${STAGE_STATUSES}
|
|
828
|
+
${stage_id}:${status}"
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
# Per-stage timing
|
|
832
|
+
record_stage_start() {
|
|
833
|
+
local stage_id="$1"
|
|
834
|
+
STAGE_TIMINGS="${STAGE_TIMINGS}
|
|
835
|
+
${stage_id}_start:$(now_epoch)"
|
|
836
|
+
}
|
|
837
|
+
|
|
838
|
+
record_stage_end() {
|
|
839
|
+
local stage_id="$1"
|
|
840
|
+
STAGE_TIMINGS="${STAGE_TIMINGS}
|
|
841
|
+
${stage_id}_end:$(now_epoch)"
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
get_stage_timing() {
|
|
845
|
+
local stage_id="$1"
|
|
846
|
+
local start_e end_e
|
|
847
|
+
start_e=$(echo "$STAGE_TIMINGS" | grep "^${stage_id}_start:" | cut -d: -f2 | tail -1 || true)
|
|
848
|
+
end_e=$(echo "$STAGE_TIMINGS" | grep "^${stage_id}_end:" | cut -d: -f2 | tail -1 || true)
|
|
849
|
+
if [[ -n "$start_e" && -n "$end_e" ]]; then
|
|
850
|
+
format_duration $(( end_e - start_e ))
|
|
851
|
+
elif [[ -n "$start_e" ]]; then
|
|
852
|
+
format_duration $(( $(now_epoch) - start_e ))
|
|
853
|
+
else
|
|
854
|
+
echo ""
|
|
855
|
+
fi
|
|
856
|
+
}
|
|
857
|
+
|
|
858
|
+
update_status() {
|
|
859
|
+
local status="$1" stage="$2"
|
|
860
|
+
PIPELINE_STATUS="$status"
|
|
861
|
+
CURRENT_STAGE="$stage"
|
|
862
|
+
UPDATED_AT="$(now_iso)"
|
|
863
|
+
write_state
|
|
864
|
+
}
|
|
865
|
+
|
|
866
|
+
mark_stage_complete() {
|
|
867
|
+
local stage_id="$1"
|
|
868
|
+
record_stage_end "$stage_id"
|
|
869
|
+
set_stage_status "$stage_id" "complete"
|
|
870
|
+
local timing
|
|
871
|
+
timing=$(get_stage_timing "$stage_id")
|
|
872
|
+
log_stage "$stage_id" "complete (${timing})"
|
|
873
|
+
write_state
|
|
874
|
+
|
|
875
|
+
# Update GitHub progress comment
|
|
876
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
877
|
+
local body
|
|
878
|
+
body=$(gh_build_progress_body)
|
|
879
|
+
gh_update_progress "$body"
|
|
880
|
+
fi
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
mark_stage_failed() {
|
|
884
|
+
local stage_id="$1"
|
|
885
|
+
record_stage_end "$stage_id"
|
|
886
|
+
set_stage_status "$stage_id" "failed"
|
|
887
|
+
local timing
|
|
888
|
+
timing=$(get_stage_timing "$stage_id")
|
|
889
|
+
log_stage "$stage_id" "failed (${timing})"
|
|
890
|
+
write_state
|
|
891
|
+
|
|
892
|
+
# Update GitHub progress + comment failure
|
|
893
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
894
|
+
local body
|
|
895
|
+
body=$(gh_build_progress_body)
|
|
896
|
+
gh_update_progress "$body"
|
|
897
|
+
gh_comment_issue "$ISSUE_NUMBER" "❌ Pipeline failed at stage **${stage_id}** after ${timing}.
|
|
898
|
+
|
|
899
|
+
\`\`\`
|
|
900
|
+
$(tail -5 "$ARTIFACTS_DIR/${stage_id}"*.log 2>/dev/null || echo 'No log available')
|
|
901
|
+
\`\`\`"
|
|
902
|
+
fi
|
|
903
|
+
}
|
|
904
|
+
|
|
905
|
+
log_stage() {
|
|
906
|
+
local stage_id="$1" message="$2"
|
|
907
|
+
local timestamp
|
|
908
|
+
timestamp=$(date +"%H:%M:%S")
|
|
909
|
+
LOG_ENTRIES="${LOG_ENTRIES}
|
|
910
|
+
### ${stage_id} (${timestamp})
|
|
911
|
+
${message}
|
|
912
|
+
"
|
|
913
|
+
}
|
|
914
|
+
|
|
915
|
+
initialize_state() {
|
|
916
|
+
PIPELINE_STATUS="running"
|
|
917
|
+
PIPELINE_START_EPOCH="$(now_epoch)"
|
|
918
|
+
STARTED_AT="$(now_iso)"
|
|
919
|
+
UPDATED_AT="$(now_iso)"
|
|
920
|
+
STAGE_STATUSES=""
|
|
921
|
+
STAGE_TIMINGS=""
|
|
922
|
+
LOG_ENTRIES=""
|
|
923
|
+
write_state
|
|
924
|
+
}
|
|
925
|
+
|
|
926
|
+
write_state() {
|
|
927
|
+
local stages_yaml=""
|
|
928
|
+
while IFS=: read -r sid sstatus; do
|
|
929
|
+
[[ -z "$sid" ]] && continue
|
|
930
|
+
stages_yaml="${stages_yaml} ${sid}: ${sstatus}
|
|
931
|
+
"
|
|
932
|
+
done <<< "$STAGE_STATUSES"
|
|
933
|
+
|
|
934
|
+
local total_dur=""
|
|
935
|
+
if [[ -n "$PIPELINE_START_EPOCH" ]]; then
|
|
936
|
+
total_dur=$(format_duration $(( $(now_epoch) - PIPELINE_START_EPOCH )))
|
|
937
|
+
fi
|
|
938
|
+
|
|
939
|
+
cat > "$STATE_FILE" <<EOF
|
|
940
|
+
---
|
|
941
|
+
pipeline: $PIPELINE_NAME
|
|
942
|
+
goal: "$GOAL"
|
|
943
|
+
status: $PIPELINE_STATUS
|
|
944
|
+
issue: "${GITHUB_ISSUE:-}"
|
|
945
|
+
branch: "${GIT_BRANCH:-}"
|
|
946
|
+
template: "${TASK_TYPE:+$(template_for_type "$TASK_TYPE")}"
|
|
947
|
+
current_stage: $CURRENT_STAGE
|
|
948
|
+
started_at: ${STARTED_AT:-$(now_iso)}
|
|
949
|
+
updated_at: $(now_iso)
|
|
950
|
+
elapsed: ${total_dur:-0s}
|
|
951
|
+
pr_number: ${PR_NUMBER:-}
|
|
952
|
+
progress_comment_id: ${PROGRESS_COMMENT_ID:-}
|
|
953
|
+
stages:
|
|
954
|
+
${stages_yaml}---
|
|
955
|
+
|
|
956
|
+
## Log
|
|
957
|
+
$LOG_ENTRIES
|
|
958
|
+
EOF
|
|
959
|
+
}
|
|
960
|
+
|
|
961
|
+
resume_state() {
|
|
962
|
+
if [[ ! -f "$STATE_FILE" ]]; then
|
|
963
|
+
error "No pipeline state found at $STATE_FILE"
|
|
964
|
+
echo -e " Start a new pipeline: ${DIM}shipwright pipeline start --goal \"...\"${RESET}"
|
|
965
|
+
exit 1
|
|
966
|
+
fi
|
|
967
|
+
|
|
968
|
+
info "Resuming pipeline from $STATE_FILE"
|
|
969
|
+
|
|
970
|
+
local in_frontmatter=false
|
|
971
|
+
while IFS= read -r line; do
|
|
972
|
+
if [[ "$line" == "---" ]]; then
|
|
973
|
+
if $in_frontmatter; then break; else in_frontmatter=true; continue; fi
|
|
974
|
+
fi
|
|
975
|
+
if $in_frontmatter; then
|
|
976
|
+
case "$line" in
|
|
977
|
+
pipeline:*) PIPELINE_NAME="$(echo "${line#pipeline:}" | xargs)" ;;
|
|
978
|
+
goal:*) GOAL="$(echo "${line#goal:}" | sed 's/^ *"//;s/" *$//')" ;;
|
|
979
|
+
status:*) PIPELINE_STATUS="$(echo "${line#status:}" | xargs)" ;;
|
|
980
|
+
issue:*) GITHUB_ISSUE="$(echo "${line#issue:}" | sed 's/^ *"//;s/" *$//')" ;;
|
|
981
|
+
branch:*) GIT_BRANCH="$(echo "${line#branch:}" | sed 's/^ *"//;s/" *$//')" ;;
|
|
982
|
+
current_stage:*) CURRENT_STAGE="$(echo "${line#current_stage:}" | xargs)" ;;
|
|
983
|
+
started_at:*) STARTED_AT="$(echo "${line#started_at:}" | xargs)" ;;
|
|
984
|
+
pr_number:*) PR_NUMBER="$(echo "${line#pr_number:}" | xargs)" ;;
|
|
985
|
+
progress_comment_id:*) PROGRESS_COMMENT_ID="$(echo "${line#progress_comment_id:}" | xargs)" ;;
|
|
986
|
+
" "*)
|
|
987
|
+
local trimmed
|
|
988
|
+
trimmed="$(echo "$line" | xargs)"
|
|
989
|
+
if [[ "$trimmed" == *":"* ]]; then
|
|
990
|
+
local sid="${trimmed%%:*}"
|
|
991
|
+
local sst="${trimmed#*: }"
|
|
992
|
+
[[ -n "$sid" && "$sid" != "stages" ]] && STAGE_STATUSES="${STAGE_STATUSES}
|
|
993
|
+
${sid}:${sst}"
|
|
994
|
+
fi
|
|
995
|
+
;;
|
|
996
|
+
esac
|
|
997
|
+
fi
|
|
998
|
+
done < "$STATE_FILE"
|
|
999
|
+
|
|
1000
|
+
LOG_ENTRIES="$(sed -n '/^## Log$/,$ { /^## Log$/d; p; }' "$STATE_FILE" 2>/dev/null || true)"
|
|
1001
|
+
|
|
1002
|
+
if [[ -n "$GITHUB_ISSUE" && "$GITHUB_ISSUE" =~ ^#([0-9]+)$ ]]; then
|
|
1003
|
+
ISSUE_NUMBER="${BASH_REMATCH[1]}"
|
|
1004
|
+
fi
|
|
1005
|
+
|
|
1006
|
+
if [[ -z "$GOAL" ]]; then
|
|
1007
|
+
error "Could not parse goal from state file."
|
|
1008
|
+
exit 1
|
|
1009
|
+
fi
|
|
1010
|
+
|
|
1011
|
+
if [[ "$PIPELINE_STATUS" == "complete" ]]; then
|
|
1012
|
+
warn "Pipeline already completed. Start a new one."
|
|
1013
|
+
exit 0
|
|
1014
|
+
fi
|
|
1015
|
+
|
|
1016
|
+
if [[ "$PIPELINE_STATUS" == "aborted" ]]; then
|
|
1017
|
+
warn "Pipeline was aborted. Start a new one or edit the state file."
|
|
1018
|
+
exit 0
|
|
1019
|
+
fi
|
|
1020
|
+
|
|
1021
|
+
if [[ "$PIPELINE_STATUS" == "interrupted" ]]; then
|
|
1022
|
+
info "Resuming from interruption..."
|
|
1023
|
+
fi
|
|
1024
|
+
|
|
1025
|
+
if [[ -n "$GIT_BRANCH" ]]; then
|
|
1026
|
+
git checkout "$GIT_BRANCH" 2>/dev/null || true
|
|
1027
|
+
fi
|
|
1028
|
+
|
|
1029
|
+
PIPELINE_START_EPOCH="$(now_epoch)"
|
|
1030
|
+
gh_init
|
|
1031
|
+
load_pipeline_config
|
|
1032
|
+
PIPELINE_STATUS="running"
|
|
1033
|
+
success "Resumed pipeline: ${BOLD}$PIPELINE_NAME${RESET} — stage: $CURRENT_STAGE"
|
|
1034
|
+
}
|
|
1035
|
+
|
|
1036
|
+
# ─── Task Type Detection ───────────────────────────────────────────────────
|
|
1037
|
+
|
|
1038
|
+
detect_task_type() {
|
|
1039
|
+
local goal="$1"
|
|
1040
|
+
local lower
|
|
1041
|
+
lower=$(echo "$goal" | tr '[:upper:]' '[:lower:]')
|
|
1042
|
+
case "$lower" in
|
|
1043
|
+
*fix*|*bug*|*broken*|*error*|*crash*) echo "bug" ;;
|
|
1044
|
+
*refactor*|*clean*|*reorganize*|*extract*) echo "refactor" ;;
|
|
1045
|
+
*test*|*coverage*|*spec*) echo "testing" ;;
|
|
1046
|
+
*security*|*audit*|*vuln*|*cve*) echo "security" ;;
|
|
1047
|
+
*doc*|*readme*|*guide*) echo "docs" ;;
|
|
1048
|
+
*deploy*|*ci*|*pipeline*|*docker*|*infra*) echo "devops" ;;
|
|
1049
|
+
*migrate*|*migration*|*schema*) echo "migration" ;;
|
|
1050
|
+
*architect*|*design*|*rfc*|*adr*) echo "architecture" ;;
|
|
1051
|
+
*) echo "feature" ;;
|
|
1052
|
+
esac
|
|
1053
|
+
}
|
|
1054
|
+
|
|
1055
|
+
template_for_type() {
|
|
1056
|
+
case "$1" in
|
|
1057
|
+
bug) echo "bug-fix" ;;
|
|
1058
|
+
refactor) echo "refactor" ;;
|
|
1059
|
+
testing) echo "testing" ;;
|
|
1060
|
+
security) echo "security-audit" ;;
|
|
1061
|
+
docs) echo "documentation" ;;
|
|
1062
|
+
devops) echo "devops" ;;
|
|
1063
|
+
migration) echo "migration" ;;
|
|
1064
|
+
architecture) echo "architecture" ;;
|
|
1065
|
+
*) echo "feature-dev" ;;
|
|
1066
|
+
esac
|
|
1067
|
+
}
|
|
1068
|
+
|
|
1069
|
+
# ─── Stage Preview ──────────────────────────────────────────────────────────
|
|
1070
|
+
|
|
1071
|
+
show_stage_preview() {
|
|
1072
|
+
local stage_id="$1"
|
|
1073
|
+
echo ""
|
|
1074
|
+
echo -e "${PURPLE}${BOLD}━━━ Stage: ${stage_id} ━━━${RESET}"
|
|
1075
|
+
case "$stage_id" in
|
|
1076
|
+
intake) echo -e " Fetch issue, detect task type, create branch, self-assign" ;;
|
|
1077
|
+
plan) echo -e " Generate plan via Claude, post task checklist to issue" ;;
|
|
1078
|
+
design) echo -e " Generate Architecture Decision Record (ADR), evaluate alternatives" ;;
|
|
1079
|
+
build) echo -e " Delegate to ${CYAN}shipwright loop${RESET} for autonomous building" ;;
|
|
1080
|
+
test) echo -e " Run test suite and check coverage" ;;
|
|
1081
|
+
review) echo -e " AI code review on the diff, post findings" ;;
|
|
1082
|
+
pr) echo -e " Create GitHub PR with labels, reviewers, milestone" ;;
|
|
1083
|
+
merge) echo -e " Wait for CI checks, merge PR, optionally delete branch" ;;
|
|
1084
|
+
deploy) echo -e " Deploy to staging/production with rollback" ;;
|
|
1085
|
+
validate) echo -e " Smoke tests, health checks, close issue" ;;
|
|
1086
|
+
monitor) echo -e " Post-deploy monitoring, health checks, auto-rollback" ;;
|
|
1087
|
+
esac
|
|
1088
|
+
echo ""
|
|
1089
|
+
}
|
|
1090
|
+
|
|
1091
|
+
# ─── Stage Functions ────────────────────────────────────────────────────────
|
|
1092
|
+
|
|
1093
|
+
stage_intake() {
|
|
1094
|
+
local project_lang
|
|
1095
|
+
project_lang=$(detect_project_lang)
|
|
1096
|
+
info "Project: ${BOLD}$project_lang${RESET}"
|
|
1097
|
+
|
|
1098
|
+
# 1. Fetch issue metadata if --issue provided
|
|
1099
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1100
|
+
local meta
|
|
1101
|
+
meta=$(gh_get_issue_meta "$ISSUE_NUMBER")
|
|
1102
|
+
|
|
1103
|
+
if [[ -n "$meta" ]]; then
|
|
1104
|
+
GOAL=$(echo "$meta" | jq -r '.title // ""')
|
|
1105
|
+
ISSUE_BODY=$(echo "$meta" | jq -r '.body // ""')
|
|
1106
|
+
ISSUE_LABELS=$(echo "$meta" | jq -r '[.labels[].name] | join(",")' 2>/dev/null || true)
|
|
1107
|
+
ISSUE_MILESTONE=$(echo "$meta" | jq -r '.milestone.title // ""' 2>/dev/null || true)
|
|
1108
|
+
ISSUE_ASSIGNEES=$(echo "$meta" | jq -r '[.assignees[].login] | join(",")' 2>/dev/null || true)
|
|
1109
|
+
[[ "$ISSUE_MILESTONE" == "null" ]] && ISSUE_MILESTONE=""
|
|
1110
|
+
[[ "$ISSUE_LABELS" == "null" ]] && ISSUE_LABELS=""
|
|
1111
|
+
else
|
|
1112
|
+
# Fallback: just get title
|
|
1113
|
+
GOAL=$(gh issue view "$ISSUE_NUMBER" --json title -q .title 2>/dev/null) || {
|
|
1114
|
+
error "Failed to fetch issue #$ISSUE_NUMBER"
|
|
1115
|
+
return 1
|
|
1116
|
+
}
|
|
1117
|
+
fi
|
|
1118
|
+
|
|
1119
|
+
GITHUB_ISSUE="#$ISSUE_NUMBER"
|
|
1120
|
+
info "Issue #$ISSUE_NUMBER: ${BOLD}$GOAL${RESET}"
|
|
1121
|
+
|
|
1122
|
+
if [[ -n "$ISSUE_LABELS" ]]; then
|
|
1123
|
+
info "Labels: ${DIM}$ISSUE_LABELS${RESET}"
|
|
1124
|
+
fi
|
|
1125
|
+
if [[ -n "$ISSUE_MILESTONE" ]]; then
|
|
1126
|
+
info "Milestone: ${DIM}$ISSUE_MILESTONE${RESET}"
|
|
1127
|
+
fi
|
|
1128
|
+
|
|
1129
|
+
# Self-assign
|
|
1130
|
+
gh_assign_self "$ISSUE_NUMBER"
|
|
1131
|
+
|
|
1132
|
+
# Add in-progress label
|
|
1133
|
+
gh_add_labels "$ISSUE_NUMBER" "pipeline/in-progress"
|
|
1134
|
+
fi
|
|
1135
|
+
|
|
1136
|
+
# 2. Detect task type
|
|
1137
|
+
TASK_TYPE=$(detect_task_type "$GOAL")
|
|
1138
|
+
local suggested_template
|
|
1139
|
+
suggested_template=$(template_for_type "$TASK_TYPE")
|
|
1140
|
+
info "Detected: ${BOLD}$TASK_TYPE${RESET} → team template: ${CYAN}$suggested_template${RESET}"
|
|
1141
|
+
|
|
1142
|
+
# 3. Auto-detect test command if not provided
|
|
1143
|
+
if [[ -z "$TEST_CMD" ]]; then
|
|
1144
|
+
TEST_CMD=$(detect_test_cmd)
|
|
1145
|
+
if [[ -n "$TEST_CMD" ]]; then
|
|
1146
|
+
info "Auto-detected test: ${DIM}$TEST_CMD${RESET}"
|
|
1147
|
+
fi
|
|
1148
|
+
fi
|
|
1149
|
+
|
|
1150
|
+
# 4. Create branch with smart prefix
|
|
1151
|
+
local prefix
|
|
1152
|
+
prefix=$(branch_prefix_for_type "$TASK_TYPE")
|
|
1153
|
+
local slug
|
|
1154
|
+
slug=$(echo "$GOAL" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g' | sed 's/--*/-/g' | cut -c1-40)
|
|
1155
|
+
slug="${slug%-}"
|
|
1156
|
+
[[ -n "$ISSUE_NUMBER" ]] && slug="${slug}-${ISSUE_NUMBER}"
|
|
1157
|
+
GIT_BRANCH="${prefix}/${slug}"
|
|
1158
|
+
|
|
1159
|
+
git checkout -b "$GIT_BRANCH" 2>/dev/null || {
|
|
1160
|
+
info "Branch $GIT_BRANCH exists, checking out"
|
|
1161
|
+
git checkout "$GIT_BRANCH" 2>/dev/null || true
|
|
1162
|
+
}
|
|
1163
|
+
success "Branch: ${BOLD}$GIT_BRANCH${RESET}"
|
|
1164
|
+
|
|
1165
|
+
# 5. Post initial progress comment on GitHub issue
|
|
1166
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1167
|
+
local body
|
|
1168
|
+
body=$(gh_build_progress_body)
|
|
1169
|
+
gh_post_progress "$ISSUE_NUMBER" "$body"
|
|
1170
|
+
fi
|
|
1171
|
+
|
|
1172
|
+
# 6. Save artifacts
|
|
1173
|
+
save_artifact "intake.json" "$(jq -n \
|
|
1174
|
+
--arg goal "$GOAL" --arg type "$TASK_TYPE" \
|
|
1175
|
+
--arg template "$suggested_template" --arg branch "$GIT_BRANCH" \
|
|
1176
|
+
--arg issue "${GITHUB_ISSUE:-}" --arg lang "$project_lang" \
|
|
1177
|
+
--arg test_cmd "${TEST_CMD:-}" --arg labels "${ISSUE_LABELS:-}" \
|
|
1178
|
+
--arg milestone "${ISSUE_MILESTONE:-}" --arg body "${ISSUE_BODY:-}" \
|
|
1179
|
+
'{goal:$goal, type:$type, template:$template, branch:$branch,
|
|
1180
|
+
issue:$issue, language:$lang, test_cmd:$test_cmd,
|
|
1181
|
+
labels:$labels, milestone:$milestone, body:$body}')"
|
|
1182
|
+
|
|
1183
|
+
log_stage "intake" "Goal: $GOAL
|
|
1184
|
+
Type: $TASK_TYPE → template: $suggested_template
|
|
1185
|
+
Branch: $GIT_BRANCH
|
|
1186
|
+
Language: $project_lang
|
|
1187
|
+
Test cmd: ${TEST_CMD:-none detected}"
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
stage_plan() {
|
|
1191
|
+
local plan_file="$ARTIFACTS_DIR/plan.md"
|
|
1192
|
+
|
|
1193
|
+
if ! command -v claude &>/dev/null; then
|
|
1194
|
+
error "Claude CLI not found — cannot generate plan"
|
|
1195
|
+
return 1
|
|
1196
|
+
fi
|
|
1197
|
+
|
|
1198
|
+
info "Generating implementation plan..."
|
|
1199
|
+
|
|
1200
|
+
# Build rich prompt with all available context
|
|
1201
|
+
local plan_prompt="You are an autonomous development agent. Analyze this codebase and create a detailed implementation plan.
|
|
1202
|
+
|
|
1203
|
+
## Goal
|
|
1204
|
+
${GOAL}
|
|
1205
|
+
"
|
|
1206
|
+
|
|
1207
|
+
# Add issue context
|
|
1208
|
+
if [[ -n "$ISSUE_BODY" ]]; then
|
|
1209
|
+
plan_prompt="${plan_prompt}
|
|
1210
|
+
## Issue Description
|
|
1211
|
+
${ISSUE_BODY}
|
|
1212
|
+
"
|
|
1213
|
+
fi
|
|
1214
|
+
|
|
1215
|
+
# Add project context
|
|
1216
|
+
local project_lang
|
|
1217
|
+
project_lang=$(detect_project_lang)
|
|
1218
|
+
plan_prompt="${plan_prompt}
|
|
1219
|
+
## Project Context
|
|
1220
|
+
- Language: ${project_lang}
|
|
1221
|
+
- Test command: ${TEST_CMD:-not configured}
|
|
1222
|
+
- Task type: ${TASK_TYPE:-feature}
|
|
1223
|
+
|
|
1224
|
+
## Required Output
|
|
1225
|
+
Create a Markdown plan with these sections:
|
|
1226
|
+
|
|
1227
|
+
### Files to Modify
|
|
1228
|
+
List every file to create or modify with full paths.
|
|
1229
|
+
|
|
1230
|
+
### Implementation Steps
|
|
1231
|
+
Numbered steps in order of execution. Be specific about what code to write.
|
|
1232
|
+
|
|
1233
|
+
### Task Checklist
|
|
1234
|
+
A checkbox list of discrete tasks that can be tracked:
|
|
1235
|
+
- [ ] Task 1: Description
|
|
1236
|
+
- [ ] Task 2: Description
|
|
1237
|
+
(Include 5-15 tasks covering the full implementation)
|
|
1238
|
+
|
|
1239
|
+
### Testing Approach
|
|
1240
|
+
How to verify the implementation works.
|
|
1241
|
+
|
|
1242
|
+
### Definition of Done
|
|
1243
|
+
Checklist of completion criteria.
|
|
1244
|
+
"
|
|
1245
|
+
|
|
1246
|
+
local plan_model
|
|
1247
|
+
plan_model=$(jq -r --arg id "plan" '(.stages[] | select(.id == $id) | .config.model) // .defaults.model // "opus"' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1248
|
+
[[ -n "$MODEL" ]] && plan_model="$MODEL"
|
|
1249
|
+
[[ -z "$plan_model" || "$plan_model" == "null" ]] && plan_model="opus"
|
|
1250
|
+
|
|
1251
|
+
local _token_log="${ARTIFACTS_DIR}/.claude-tokens-plan.log"
|
|
1252
|
+
claude --print --model "$plan_model" --max-turns 10 \
|
|
1253
|
+
"$plan_prompt" > "$plan_file" 2>"$_token_log" || true
|
|
1254
|
+
parse_claude_tokens "$_token_log"
|
|
1255
|
+
|
|
1256
|
+
if [[ ! -s "$plan_file" ]]; then
|
|
1257
|
+
error "Plan generation failed"
|
|
1258
|
+
return 1
|
|
1259
|
+
fi
|
|
1260
|
+
|
|
1261
|
+
local line_count
|
|
1262
|
+
line_count=$(wc -l < "$plan_file" | xargs)
|
|
1263
|
+
info "Plan saved: ${DIM}$plan_file${RESET} (${line_count} lines)"
|
|
1264
|
+
|
|
1265
|
+
# Extract task checklist for GitHub issue and task tracking
|
|
1266
|
+
local checklist
|
|
1267
|
+
checklist=$(sed -n '/### Task Checklist/,/^###/p' "$plan_file" 2>/dev/null | \
|
|
1268
|
+
grep '^\s*- \[' | head -20)
|
|
1269
|
+
|
|
1270
|
+
if [[ -z "$checklist" ]]; then
|
|
1271
|
+
# Fallback: extract any checkbox lines
|
|
1272
|
+
checklist=$(grep '^\s*- \[' "$plan_file" 2>/dev/null | head -20)
|
|
1273
|
+
fi
|
|
1274
|
+
|
|
1275
|
+
# Write local task file for Claude Code build stage
|
|
1276
|
+
if [[ -n "$checklist" ]]; then
|
|
1277
|
+
cat > "$TASKS_FILE" <<TASKS_EOF
|
|
1278
|
+
# Pipeline Tasks — ${GOAL}
|
|
1279
|
+
|
|
1280
|
+
## Implementation Checklist
|
|
1281
|
+
${checklist}
|
|
1282
|
+
|
|
1283
|
+
## Context
|
|
1284
|
+
- Pipeline: ${PIPELINE_NAME}
|
|
1285
|
+
- Branch: ${GIT_BRANCH}
|
|
1286
|
+
- Issue: ${GITHUB_ISSUE:-none}
|
|
1287
|
+
- Generated: $(now_iso)
|
|
1288
|
+
TASKS_EOF
|
|
1289
|
+
info "Task list: ${DIM}$TASKS_FILE${RESET} ($(echo "$checklist" | wc -l | xargs) tasks)"
|
|
1290
|
+
fi
|
|
1291
|
+
|
|
1292
|
+
# Post plan + task checklist to GitHub issue
|
|
1293
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1294
|
+
local plan_summary
|
|
1295
|
+
plan_summary=$(head -50 "$plan_file")
|
|
1296
|
+
local gh_body="## 📋 Implementation Plan
|
|
1297
|
+
|
|
1298
|
+
<details>
|
|
1299
|
+
<summary>Click to expand full plan (${line_count} lines)</summary>
|
|
1300
|
+
|
|
1301
|
+
${plan_summary}
|
|
1302
|
+
|
|
1303
|
+
</details>
|
|
1304
|
+
"
|
|
1305
|
+
if [[ -n "$checklist" ]]; then
|
|
1306
|
+
gh_body="${gh_body}
|
|
1307
|
+
## ✅ Task Checklist
|
|
1308
|
+
${checklist}
|
|
1309
|
+
"
|
|
1310
|
+
fi
|
|
1311
|
+
|
|
1312
|
+
gh_body="${gh_body}
|
|
1313
|
+
---
|
|
1314
|
+
_Generated by \`shipwright pipeline\` at $(now_iso)_"
|
|
1315
|
+
|
|
1316
|
+
gh_comment_issue "$ISSUE_NUMBER" "$gh_body"
|
|
1317
|
+
info "Plan posted to issue #$ISSUE_NUMBER"
|
|
1318
|
+
fi
|
|
1319
|
+
|
|
1320
|
+
# Push plan to wiki
|
|
1321
|
+
gh_wiki_page "Pipeline-Plan-${ISSUE_NUMBER:-inline}" "$(<"$plan_file")"
|
|
1322
|
+
|
|
1323
|
+
# Generate Claude Code task list
|
|
1324
|
+
local cc_tasks_file="$PROJECT_ROOT/.claude/tasks.md"
|
|
1325
|
+
if [[ -n "$checklist" ]]; then
|
|
1326
|
+
cat > "$cc_tasks_file" <<CC_TASKS_EOF
|
|
1327
|
+
# Tasks — ${GOAL}
|
|
1328
|
+
|
|
1329
|
+
## Status: In Progress
|
|
1330
|
+
Pipeline: ${PIPELINE_NAME} | Branch: ${GIT_BRANCH}
|
|
1331
|
+
|
|
1332
|
+
## Checklist
|
|
1333
|
+
${checklist}
|
|
1334
|
+
|
|
1335
|
+
## Notes
|
|
1336
|
+
- Generated from pipeline plan at $(now_iso)
|
|
1337
|
+
- Pipeline will update status as tasks complete
|
|
1338
|
+
CC_TASKS_EOF
|
|
1339
|
+
info "Claude Code tasks: ${DIM}$cc_tasks_file${RESET}"
|
|
1340
|
+
fi
|
|
1341
|
+
|
|
1342
|
+
# Extract definition of done for quality gates
|
|
1343
|
+
sed -n '/[Dd]efinition [Oo]f [Dd]one/,/^#/p' "$plan_file" | head -20 > "$ARTIFACTS_DIR/dod.md" 2>/dev/null || true
|
|
1344
|
+
|
|
1345
|
+
log_stage "plan" "Generated plan.md (${line_count} lines, $(echo "$checklist" | wc -l | xargs) tasks)"
|
|
1346
|
+
}
|
|
1347
|
+
|
|
1348
|
+
stage_design() {
|
|
1349
|
+
CURRENT_STAGE_ID="design"
|
|
1350
|
+
local plan_file="$ARTIFACTS_DIR/plan.md"
|
|
1351
|
+
local design_file="$ARTIFACTS_DIR/design.md"
|
|
1352
|
+
|
|
1353
|
+
if [[ ! -s "$plan_file" ]]; then
|
|
1354
|
+
warn "No plan found — skipping design stage"
|
|
1355
|
+
return 0
|
|
1356
|
+
fi
|
|
1357
|
+
|
|
1358
|
+
if ! command -v claude &>/dev/null; then
|
|
1359
|
+
error "Claude CLI not found — cannot generate design"
|
|
1360
|
+
return 1
|
|
1361
|
+
fi
|
|
1362
|
+
|
|
1363
|
+
info "Generating Architecture Decision Record..."
|
|
1364
|
+
|
|
1365
|
+
# Memory integration — inject context if memory system available
|
|
1366
|
+
local memory_context=""
|
|
1367
|
+
if [[ -x "$SCRIPT_DIR/cct-memory.sh" ]]; then
|
|
1368
|
+
memory_context=$(bash "$SCRIPT_DIR/cct-memory.sh" inject "design" 2>/dev/null) || true
|
|
1369
|
+
fi
|
|
1370
|
+
|
|
1371
|
+
# Build design prompt with plan + project context
|
|
1372
|
+
local project_lang
|
|
1373
|
+
project_lang=$(detect_project_lang)
|
|
1374
|
+
|
|
1375
|
+
local design_prompt="You are a senior software architect. Review the implementation plan below and produce an Architecture Decision Record (ADR).
|
|
1376
|
+
|
|
1377
|
+
## Goal
|
|
1378
|
+
${GOAL}
|
|
1379
|
+
|
|
1380
|
+
## Implementation Plan
|
|
1381
|
+
$(cat "$plan_file")
|
|
1382
|
+
|
|
1383
|
+
## Project Context
|
|
1384
|
+
- Language: ${project_lang}
|
|
1385
|
+
- Test command: ${TEST_CMD:-not configured}
|
|
1386
|
+
- Task type: ${TASK_TYPE:-feature}
|
|
1387
|
+
${memory_context:+
|
|
1388
|
+
## Historical Context (from memory)
|
|
1389
|
+
${memory_context}
|
|
1390
|
+
}
|
|
1391
|
+
## Required Output — Architecture Decision Record
|
|
1392
|
+
|
|
1393
|
+
Produce this EXACT format:
|
|
1394
|
+
|
|
1395
|
+
# Design: ${GOAL}
|
|
1396
|
+
|
|
1397
|
+
## Context
|
|
1398
|
+
[What problem we're solving, constraints from the codebase]
|
|
1399
|
+
|
|
1400
|
+
## Decision
|
|
1401
|
+
[The chosen approach — be specific about patterns, data flow, error handling]
|
|
1402
|
+
|
|
1403
|
+
## Alternatives Considered
|
|
1404
|
+
1. [Alternative A] — Pros: ... / Cons: ...
|
|
1405
|
+
2. [Alternative B] — Pros: ... / Cons: ...
|
|
1406
|
+
|
|
1407
|
+
## Implementation Plan
|
|
1408
|
+
- Files to create: [list with full paths]
|
|
1409
|
+
- Files to modify: [list with full paths]
|
|
1410
|
+
- Dependencies: [new deps if any]
|
|
1411
|
+
- Risk areas: [fragile code, performance concerns]
|
|
1412
|
+
|
|
1413
|
+
## Validation Criteria
|
|
1414
|
+
- [ ] [How we'll know the design is correct — testable criteria]
|
|
1415
|
+
- [ ] [Additional validation items]
|
|
1416
|
+
|
|
1417
|
+
Be concrete and specific. Reference actual file paths in the codebase. Consider edge cases and failure modes."
|
|
1418
|
+
|
|
1419
|
+
local design_model
|
|
1420
|
+
design_model=$(jq -r --arg id "design" '(.stages[] | select(.id == $id) | .config.model) // .defaults.model // "opus"' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1421
|
+
[[ -n "$MODEL" ]] && design_model="$MODEL"
|
|
1422
|
+
[[ -z "$design_model" || "$design_model" == "null" ]] && design_model="opus"
|
|
1423
|
+
|
|
1424
|
+
local _token_log="${ARTIFACTS_DIR}/.claude-tokens-design.log"
|
|
1425
|
+
claude --print --model "$design_model" --max-turns 10 \
|
|
1426
|
+
"$design_prompt" > "$design_file" 2>"$_token_log" || true
|
|
1427
|
+
parse_claude_tokens "$_token_log"
|
|
1428
|
+
|
|
1429
|
+
if [[ ! -s "$design_file" ]]; then
|
|
1430
|
+
error "Design generation failed"
|
|
1431
|
+
return 1
|
|
1432
|
+
fi
|
|
1433
|
+
|
|
1434
|
+
local line_count
|
|
1435
|
+
line_count=$(wc -l < "$design_file" | xargs)
|
|
1436
|
+
info "Design saved: ${DIM}$design_file${RESET} (${line_count} lines)"
|
|
1437
|
+
|
|
1438
|
+
# Extract file lists for build stage awareness
|
|
1439
|
+
local files_to_create files_to_modify
|
|
1440
|
+
files_to_create=$(sed -n '/Files to create/,/^-\|^#\|^$/p' "$design_file" 2>/dev/null | grep -E '^\s*-' | head -20 || true)
|
|
1441
|
+
files_to_modify=$(sed -n '/Files to modify/,/^-\|^#\|^$/p' "$design_file" 2>/dev/null | grep -E '^\s*-' | head -20 || true)
|
|
1442
|
+
|
|
1443
|
+
if [[ -n "$files_to_create" || -n "$files_to_modify" ]]; then
|
|
1444
|
+
info "Design scope: ${DIM}$(echo "$files_to_create $files_to_modify" | grep -c '^\s*-' || echo 0) file(s)${RESET}"
|
|
1445
|
+
fi
|
|
1446
|
+
|
|
1447
|
+
# Post design to GitHub issue
|
|
1448
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1449
|
+
local design_summary
|
|
1450
|
+
design_summary=$(head -60 "$design_file")
|
|
1451
|
+
gh_comment_issue "$ISSUE_NUMBER" "## 📐 Architecture Decision Record
|
|
1452
|
+
|
|
1453
|
+
<details>
|
|
1454
|
+
<summary>Click to expand ADR (${line_count} lines)</summary>
|
|
1455
|
+
|
|
1456
|
+
${design_summary}
|
|
1457
|
+
|
|
1458
|
+
</details>
|
|
1459
|
+
|
|
1460
|
+
---
|
|
1461
|
+
_Generated by \`shipwright pipeline\` design stage at $(now_iso)_"
|
|
1462
|
+
fi
|
|
1463
|
+
|
|
1464
|
+
# Push design to wiki
|
|
1465
|
+
gh_wiki_page "Pipeline-Design-${ISSUE_NUMBER:-inline}" "$(<"$design_file")"
|
|
1466
|
+
|
|
1467
|
+
log_stage "design" "Generated design.md (${line_count} lines)"
|
|
1468
|
+
}
|
|
1469
|
+
|
|
1470
|
+
stage_build() {
|
|
1471
|
+
local plan_file="$ARTIFACTS_DIR/plan.md"
|
|
1472
|
+
local design_file="$ARTIFACTS_DIR/design.md"
|
|
1473
|
+
local dod_file="$ARTIFACTS_DIR/dod.md"
|
|
1474
|
+
local loop_args=()
|
|
1475
|
+
|
|
1476
|
+
# Memory integration — inject context if memory system available
|
|
1477
|
+
local memory_context=""
|
|
1478
|
+
if [[ -x "$SCRIPT_DIR/cct-memory.sh" ]]; then
|
|
1479
|
+
memory_context=$(bash "$SCRIPT_DIR/cct-memory.sh" inject "build" 2>/dev/null) || true
|
|
1480
|
+
fi
|
|
1481
|
+
|
|
1482
|
+
# Build enriched goal with full context
|
|
1483
|
+
local enriched_goal="$GOAL"
|
|
1484
|
+
if [[ -s "$plan_file" ]]; then
|
|
1485
|
+
enriched_goal="$GOAL
|
|
1486
|
+
|
|
1487
|
+
Implementation plan (follow this exactly):
|
|
1488
|
+
$(cat "$plan_file")"
|
|
1489
|
+
fi
|
|
1490
|
+
|
|
1491
|
+
# Inject approved design document
|
|
1492
|
+
if [[ -s "$design_file" ]]; then
|
|
1493
|
+
enriched_goal="${enriched_goal}
|
|
1494
|
+
|
|
1495
|
+
Follow the approved design document:
|
|
1496
|
+
$(cat "$design_file")"
|
|
1497
|
+
fi
|
|
1498
|
+
|
|
1499
|
+
# Inject memory context
|
|
1500
|
+
if [[ -n "$memory_context" ]]; then
|
|
1501
|
+
enriched_goal="${enriched_goal}
|
|
1502
|
+
|
|
1503
|
+
Historical context (lessons from previous pipelines):
|
|
1504
|
+
${memory_context}"
|
|
1505
|
+
fi
|
|
1506
|
+
|
|
1507
|
+
# Add task list context
|
|
1508
|
+
if [[ -s "$TASKS_FILE" ]]; then
|
|
1509
|
+
enriched_goal="${enriched_goal}
|
|
1510
|
+
|
|
1511
|
+
Task tracking (check off items as you complete them):
|
|
1512
|
+
$(cat "$TASKS_FILE")"
|
|
1513
|
+
fi
|
|
1514
|
+
|
|
1515
|
+
loop_args+=("$enriched_goal")
|
|
1516
|
+
|
|
1517
|
+
# Build loop args from pipeline config + CLI overrides
|
|
1518
|
+
CURRENT_STAGE_ID="build"
|
|
1519
|
+
|
|
1520
|
+
local test_cmd="${TEST_CMD}"
|
|
1521
|
+
if [[ -z "$test_cmd" ]]; then
|
|
1522
|
+
test_cmd=$(jq -r --arg id "build" '(.stages[] | select(.id == $id) | .config.test_cmd) // .defaults.test_cmd // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1523
|
+
[[ "$test_cmd" == "null" ]] && test_cmd=""
|
|
1524
|
+
fi
|
|
1525
|
+
# Auto-detect if still empty
|
|
1526
|
+
if [[ -z "$test_cmd" ]]; then
|
|
1527
|
+
test_cmd=$(detect_test_cmd)
|
|
1528
|
+
fi
|
|
1529
|
+
|
|
1530
|
+
local max_iter
|
|
1531
|
+
max_iter=$(jq -r --arg id "build" '(.stages[] | select(.id == $id) | .config.max_iterations) // 20' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1532
|
+
[[ -z "$max_iter" || "$max_iter" == "null" ]] && max_iter=20
|
|
1533
|
+
|
|
1534
|
+
local agents="${AGENTS}"
|
|
1535
|
+
if [[ -z "$agents" ]]; then
|
|
1536
|
+
agents=$(jq -r --arg id "build" '(.stages[] | select(.id == $id) | .config.agents) // .defaults.agents // 1' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1537
|
+
[[ -z "$agents" || "$agents" == "null" ]] && agents=1
|
|
1538
|
+
fi
|
|
1539
|
+
|
|
1540
|
+
local audit
|
|
1541
|
+
audit=$(jq -r --arg id "build" '(.stages[] | select(.id == $id) | .config.audit) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1542
|
+
local quality
|
|
1543
|
+
quality=$(jq -r --arg id "build" '(.stages[] | select(.id == $id) | .config.quality_gates) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1544
|
+
|
|
1545
|
+
local build_model="${MODEL}"
|
|
1546
|
+
if [[ -z "$build_model" ]]; then
|
|
1547
|
+
build_model=$(jq -r '.defaults.model // "opus"' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1548
|
+
[[ -z "$build_model" || "$build_model" == "null" ]] && build_model="opus"
|
|
1549
|
+
fi
|
|
1550
|
+
|
|
1551
|
+
[[ -n "$test_cmd" && "$test_cmd" != "null" ]] && loop_args+=(--test-cmd "$test_cmd")
|
|
1552
|
+
loop_args+=(--max-iterations "$max_iter")
|
|
1553
|
+
loop_args+=(--model "$build_model")
|
|
1554
|
+
[[ "$agents" -gt 1 ]] 2>/dev/null && loop_args+=(--agents "$agents")
|
|
1555
|
+
[[ "$audit" == "true" ]] && loop_args+=(--audit --audit-agent)
|
|
1556
|
+
[[ "$quality" == "true" ]] && loop_args+=(--quality-gates)
|
|
1557
|
+
[[ -s "$dod_file" ]] && loop_args+=(--definition-of-done "$dod_file")
|
|
1558
|
+
|
|
1559
|
+
info "Starting build loop: ${DIM}shipwright loop${RESET} (max ${max_iter} iterations, ${agents} agent(s))"
|
|
1560
|
+
|
|
1561
|
+
# Post build start to GitHub
|
|
1562
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1563
|
+
gh_comment_issue "$ISSUE_NUMBER" "🔨 **Build started** — \`shipwright loop\` with ${max_iter} max iterations, ${agents} agent(s), model: ${build_model}"
|
|
1564
|
+
fi
|
|
1565
|
+
|
|
1566
|
+
local _token_log="${ARTIFACTS_DIR}/.claude-tokens-build.log"
|
|
1567
|
+
cct loop "${loop_args[@]}" 2>"$_token_log" || {
|
|
1568
|
+
parse_claude_tokens "$_token_log"
|
|
1569
|
+
error "Build loop failed"
|
|
1570
|
+
return 1
|
|
1571
|
+
}
|
|
1572
|
+
parse_claude_tokens "$_token_log"
|
|
1573
|
+
|
|
1574
|
+
# Count commits made during build
|
|
1575
|
+
local commit_count
|
|
1576
|
+
commit_count=$(git log --oneline "${BASE_BRANCH}..HEAD" 2>/dev/null | wc -l | xargs)
|
|
1577
|
+
info "Build produced ${BOLD}$commit_count${RESET} commit(s)"
|
|
1578
|
+
|
|
1579
|
+
log_stage "build" "Build loop completed ($commit_count commits)"
|
|
1580
|
+
}
|
|
1581
|
+
|
|
1582
|
+
stage_test() {
|
|
1583
|
+
CURRENT_STAGE_ID="test"
|
|
1584
|
+
local test_cmd="${TEST_CMD}"
|
|
1585
|
+
if [[ -z "$test_cmd" ]]; then
|
|
1586
|
+
test_cmd=$(jq -r --arg id "test" '(.stages[] | select(.id == $id) | .config.test_cmd) // .defaults.test_cmd // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1587
|
+
[[ -z "$test_cmd" || "$test_cmd" == "null" ]] && test_cmd=""
|
|
1588
|
+
fi
|
|
1589
|
+
# Auto-detect
|
|
1590
|
+
if [[ -z "$test_cmd" ]]; then
|
|
1591
|
+
test_cmd=$(detect_test_cmd)
|
|
1592
|
+
fi
|
|
1593
|
+
if [[ -z "$test_cmd" ]]; then
|
|
1594
|
+
warn "No test command found — skipping test stage"
|
|
1595
|
+
return 0
|
|
1596
|
+
fi
|
|
1597
|
+
|
|
1598
|
+
local coverage_min
|
|
1599
|
+
coverage_min=$(jq -r --arg id "test" '(.stages[] | select(.id == $id) | .config.coverage_min) // 0' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1600
|
+
[[ -z "$coverage_min" || "$coverage_min" == "null" ]] && coverage_min=0
|
|
1601
|
+
|
|
1602
|
+
local test_log="$ARTIFACTS_DIR/test-results.log"
|
|
1603
|
+
|
|
1604
|
+
info "Running tests: ${DIM}$test_cmd${RESET}"
|
|
1605
|
+
local test_exit=0
|
|
1606
|
+
eval "$test_cmd" > "$test_log" 2>&1 || test_exit=$?
|
|
1607
|
+
|
|
1608
|
+
if [[ "$test_exit" -eq 0 ]]; then
|
|
1609
|
+
success "Tests passed"
|
|
1610
|
+
else
|
|
1611
|
+
error "Tests failed (exit code: $test_exit)"
|
|
1612
|
+
tail -20 "$test_log"
|
|
1613
|
+
|
|
1614
|
+
# Post failure to GitHub
|
|
1615
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1616
|
+
gh_comment_issue "$ISSUE_NUMBER" "❌ **Tests failed**
|
|
1617
|
+
\`\`\`
|
|
1618
|
+
$(tail -20 "$test_log")
|
|
1619
|
+
\`\`\`"
|
|
1620
|
+
fi
|
|
1621
|
+
return 1
|
|
1622
|
+
fi
|
|
1623
|
+
|
|
1624
|
+
# Coverage check
|
|
1625
|
+
local coverage=""
|
|
1626
|
+
if [[ "$coverage_min" -gt 0 ]] 2>/dev/null; then
|
|
1627
|
+
coverage=$(grep -oE 'Statements\s*:\s*[0-9.]+' "$test_log" 2>/dev/null | grep -oE '[0-9.]+$' || \
|
|
1628
|
+
grep -oE 'All files\s*\|\s*[0-9.]+' "$test_log" 2>/dev/null | grep -oE '[0-9.]+$' || echo "0")
|
|
1629
|
+
if awk -v cov="$coverage" -v min="$coverage_min" 'BEGIN{exit !(cov < min)}' 2>/dev/null; then
|
|
1630
|
+
warn "Coverage ${coverage}% below minimum ${coverage_min}%"
|
|
1631
|
+
return 1
|
|
1632
|
+
fi
|
|
1633
|
+
info "Coverage: ${coverage}% (min: ${coverage_min}%)"
|
|
1634
|
+
fi
|
|
1635
|
+
|
|
1636
|
+
# Post test results to GitHub
|
|
1637
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1638
|
+
local test_summary
|
|
1639
|
+
test_summary=$(tail -10 "$test_log")
|
|
1640
|
+
local cov_line=""
|
|
1641
|
+
[[ -n "$coverage" ]] && cov_line="
|
|
1642
|
+
**Coverage:** ${coverage}%"
|
|
1643
|
+
gh_comment_issue "$ISSUE_NUMBER" "✅ **Tests passed**${cov_line}
|
|
1644
|
+
<details>
|
|
1645
|
+
<summary>Test output</summary>
|
|
1646
|
+
|
|
1647
|
+
\`\`\`
|
|
1648
|
+
${test_summary}
|
|
1649
|
+
\`\`\`
|
|
1650
|
+
</details>"
|
|
1651
|
+
fi
|
|
1652
|
+
|
|
1653
|
+
log_stage "test" "Tests passed${coverage:+ (coverage: ${coverage}%)}"
|
|
1654
|
+
}
|
|
1655
|
+
|
|
1656
|
+
stage_review() {
|
|
1657
|
+
CURRENT_STAGE_ID="review"
|
|
1658
|
+
local diff_file="$ARTIFACTS_DIR/review-diff.patch"
|
|
1659
|
+
local review_file="$ARTIFACTS_DIR/review.md"
|
|
1660
|
+
|
|
1661
|
+
git diff "${BASE_BRANCH}...${GIT_BRANCH}" > "$diff_file" 2>/dev/null || \
|
|
1662
|
+
git diff HEAD~5 > "$diff_file" 2>/dev/null || true
|
|
1663
|
+
|
|
1664
|
+
if [[ ! -s "$diff_file" ]]; then
|
|
1665
|
+
warn "No diff found — skipping review"
|
|
1666
|
+
return 0
|
|
1667
|
+
fi
|
|
1668
|
+
|
|
1669
|
+
if ! command -v claude &>/dev/null; then
|
|
1670
|
+
warn "Claude CLI not found — skipping AI review"
|
|
1671
|
+
return 0
|
|
1672
|
+
fi
|
|
1673
|
+
|
|
1674
|
+
local diff_stats
|
|
1675
|
+
diff_stats=$(git diff --stat "${BASE_BRANCH}...${GIT_BRANCH}" 2>/dev/null | tail -1 || echo "")
|
|
1676
|
+
info "Running AI code review... ${DIM}($diff_stats)${RESET}"
|
|
1677
|
+
|
|
1678
|
+
local review_model="${MODEL:-opus}"
|
|
1679
|
+
|
|
1680
|
+
claude --print --model "$review_model" --max-turns 15 \
|
|
1681
|
+
"You are a senior code reviewer. Review this git diff thoroughly.
|
|
1682
|
+
|
|
1683
|
+
For each issue found, use this format:
|
|
1684
|
+
- **[SEVERITY]** file:line — description
|
|
1685
|
+
|
|
1686
|
+
Severity levels: Critical, Bug, Security, Warning, Suggestion
|
|
1687
|
+
|
|
1688
|
+
Focus on:
|
|
1689
|
+
1. Logic bugs and edge cases
|
|
1690
|
+
2. Security vulnerabilities (injection, XSS, auth bypass, etc.)
|
|
1691
|
+
3. Error handling gaps
|
|
1692
|
+
4. Performance issues
|
|
1693
|
+
5. Missing validation
|
|
1694
|
+
|
|
1695
|
+
Be specific. Reference exact file paths and line numbers. Only flag genuine issues.
|
|
1696
|
+
|
|
1697
|
+
$(cat "$diff_file")" > "$review_file" 2>"${ARTIFACTS_DIR}/.claude-tokens-review.log" || true
|
|
1698
|
+
parse_claude_tokens "${ARTIFACTS_DIR}/.claude-tokens-review.log"
|
|
1699
|
+
|
|
1700
|
+
if [[ ! -s "$review_file" ]]; then
|
|
1701
|
+
warn "Review produced no output"
|
|
1702
|
+
return 0
|
|
1703
|
+
fi
|
|
1704
|
+
|
|
1705
|
+
local critical_count bug_count warning_count
|
|
1706
|
+
critical_count=$(grep -ciE '\*\*\[?Critical\]?\*\*' "$review_file" 2>/dev/null || true)
|
|
1707
|
+
critical_count="${critical_count:-0}"
|
|
1708
|
+
bug_count=$(grep -ciE '\*\*\[?(Bug|Security)\]?\*\*' "$review_file" 2>/dev/null || true)
|
|
1709
|
+
bug_count="${bug_count:-0}"
|
|
1710
|
+
warning_count=$(grep -ciE '\*\*\[?(Warning|Suggestion)\]?\*\*' "$review_file" 2>/dev/null || true)
|
|
1711
|
+
warning_count="${warning_count:-0}"
|
|
1712
|
+
local total_issues=$((critical_count + bug_count + warning_count))
|
|
1713
|
+
|
|
1714
|
+
if [[ "$critical_count" -gt 0 ]]; then
|
|
1715
|
+
error "Review found ${BOLD}$critical_count critical${RESET} issue(s) — see $review_file"
|
|
1716
|
+
elif [[ "$bug_count" -gt 0 ]]; then
|
|
1717
|
+
warn "Review found $bug_count bug/security issue(s) — see ${DIM}$review_file${RESET}"
|
|
1718
|
+
elif [[ "$total_issues" -gt 0 ]]; then
|
|
1719
|
+
info "Review found $total_issues suggestion(s)"
|
|
1720
|
+
else
|
|
1721
|
+
success "Review clean"
|
|
1722
|
+
fi
|
|
1723
|
+
|
|
1724
|
+
# Post review to GitHub issue
|
|
1725
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1726
|
+
local review_summary
|
|
1727
|
+
review_summary=$(head -40 "$review_file")
|
|
1728
|
+
gh_comment_issue "$ISSUE_NUMBER" "## 🔍 Code Review
|
|
1729
|
+
|
|
1730
|
+
**Stats:** $diff_stats
|
|
1731
|
+
**Issues found:** $total_issues (${critical_count} critical, ${bug_count} bugs, ${warning_count} suggestions)
|
|
1732
|
+
|
|
1733
|
+
<details>
|
|
1734
|
+
<summary>Review details</summary>
|
|
1735
|
+
|
|
1736
|
+
${review_summary}
|
|
1737
|
+
|
|
1738
|
+
</details>"
|
|
1739
|
+
fi
|
|
1740
|
+
|
|
1741
|
+
log_stage "review" "AI review complete ($total_issues issues: $critical_count critical, $bug_count bugs, $warning_count suggestions)"
|
|
1742
|
+
}
|
|
1743
|
+
|
|
1744
|
+
stage_pr() {
|
|
1745
|
+
CURRENT_STAGE_ID="pr"
|
|
1746
|
+
local plan_file="$ARTIFACTS_DIR/plan.md"
|
|
1747
|
+
local test_log="$ARTIFACTS_DIR/test-results.log"
|
|
1748
|
+
local review_file="$ARTIFACTS_DIR/review.md"
|
|
1749
|
+
|
|
1750
|
+
# Auto-rebase onto latest base branch before PR
|
|
1751
|
+
auto_rebase || {
|
|
1752
|
+
warn "Rebase/merge failed — pushing as-is"
|
|
1753
|
+
}
|
|
1754
|
+
|
|
1755
|
+
# Push branch
|
|
1756
|
+
info "Pushing branch: $GIT_BRANCH"
|
|
1757
|
+
git push -u origin "$GIT_BRANCH" --force-with-lease 2>/dev/null || {
|
|
1758
|
+
# Retry with regular push if force-with-lease fails (first push)
|
|
1759
|
+
git push -u origin "$GIT_BRANCH" 2>/dev/null || {
|
|
1760
|
+
error "Failed to push branch"
|
|
1761
|
+
return 1
|
|
1762
|
+
}
|
|
1763
|
+
}
|
|
1764
|
+
|
|
1765
|
+
# Build PR title
|
|
1766
|
+
local pr_title
|
|
1767
|
+
pr_title=$(head -1 "$plan_file" 2>/dev/null | sed 's/^#* *//' | cut -c1-70)
|
|
1768
|
+
[[ -z "$pr_title" ]] && pr_title="$GOAL"
|
|
1769
|
+
|
|
1770
|
+
# Build comprehensive PR body
|
|
1771
|
+
local plan_summary=""
|
|
1772
|
+
if [[ -s "$plan_file" ]]; then
|
|
1773
|
+
plan_summary=$(head -20 "$plan_file" 2>/dev/null | tail -15)
|
|
1774
|
+
fi
|
|
1775
|
+
|
|
1776
|
+
local test_summary=""
|
|
1777
|
+
if [[ -s "$test_log" ]]; then
|
|
1778
|
+
test_summary=$(tail -10 "$test_log")
|
|
1779
|
+
fi
|
|
1780
|
+
|
|
1781
|
+
local review_summary=""
|
|
1782
|
+
if [[ -s "$review_file" ]]; then
|
|
1783
|
+
local total_issues
|
|
1784
|
+
total_issues=$(grep -ciE '\*\*\[?(Critical|Bug|Security|Warning|Suggestion)\]?\*\*' "$review_file" 2>/dev/null || true)
|
|
1785
|
+
total_issues="${total_issues:-0}"
|
|
1786
|
+
review_summary="**Code review:** $total_issues issues found"
|
|
1787
|
+
fi
|
|
1788
|
+
|
|
1789
|
+
local closes_line=""
|
|
1790
|
+
[[ -n "${GITHUB_ISSUE:-}" ]] && closes_line="Closes ${GITHUB_ISSUE}"
|
|
1791
|
+
|
|
1792
|
+
local diff_stats
|
|
1793
|
+
diff_stats=$(git diff --stat "${BASE_BRANCH}...${GIT_BRANCH}" 2>/dev/null | tail -1 || echo "")
|
|
1794
|
+
|
|
1795
|
+
local commit_count
|
|
1796
|
+
commit_count=$(git log --oneline "${BASE_BRANCH}..HEAD" 2>/dev/null | wc -l | xargs)
|
|
1797
|
+
|
|
1798
|
+
local total_dur=""
|
|
1799
|
+
if [[ -n "$PIPELINE_START_EPOCH" ]]; then
|
|
1800
|
+
total_dur=$(format_duration $(( $(now_epoch) - PIPELINE_START_EPOCH )))
|
|
1801
|
+
fi
|
|
1802
|
+
|
|
1803
|
+
local pr_body
|
|
1804
|
+
pr_body="$(cat <<EOF
|
|
1805
|
+
## Summary
|
|
1806
|
+
${plan_summary:-$GOAL}
|
|
1807
|
+
|
|
1808
|
+
## Changes
|
|
1809
|
+
${diff_stats}
|
|
1810
|
+
${commit_count} commit(s) via \`shipwright pipeline\` (${PIPELINE_NAME})
|
|
1811
|
+
|
|
1812
|
+
## Test Results
|
|
1813
|
+
\`\`\`
|
|
1814
|
+
${test_summary:-No test output}
|
|
1815
|
+
\`\`\`
|
|
1816
|
+
|
|
1817
|
+
${review_summary}
|
|
1818
|
+
|
|
1819
|
+
${closes_line}
|
|
1820
|
+
|
|
1821
|
+
---
|
|
1822
|
+
|
|
1823
|
+
| Metric | Value |
|
|
1824
|
+
|--------|-------|
|
|
1825
|
+
| Pipeline | \`${PIPELINE_NAME}\` |
|
|
1826
|
+
| Duration | ${total_dur:-—} |
|
|
1827
|
+
| Model | ${MODEL:-opus} |
|
|
1828
|
+
| Agents | ${AGENTS:-1} |
|
|
1829
|
+
|
|
1830
|
+
Generated by \`shipwright pipeline\`
|
|
1831
|
+
EOF
|
|
1832
|
+
)"
|
|
1833
|
+
|
|
1834
|
+
# Build gh pr create args
|
|
1835
|
+
local pr_args=(--title "$pr_title" --body "$pr_body" --base "$BASE_BRANCH")
|
|
1836
|
+
|
|
1837
|
+
# Propagate labels from issue + CLI
|
|
1838
|
+
local all_labels="${LABELS}"
|
|
1839
|
+
if [[ -n "$ISSUE_LABELS" ]]; then
|
|
1840
|
+
if [[ -n "$all_labels" ]]; then
|
|
1841
|
+
all_labels="${all_labels},${ISSUE_LABELS}"
|
|
1842
|
+
else
|
|
1843
|
+
all_labels="$ISSUE_LABELS"
|
|
1844
|
+
fi
|
|
1845
|
+
fi
|
|
1846
|
+
if [[ -n "$all_labels" ]]; then
|
|
1847
|
+
pr_args+=(--label "$all_labels")
|
|
1848
|
+
fi
|
|
1849
|
+
|
|
1850
|
+
# Auto-detect or use provided reviewers
|
|
1851
|
+
local reviewers="${REVIEWERS}"
|
|
1852
|
+
if [[ -z "$reviewers" ]]; then
|
|
1853
|
+
reviewers=$(detect_reviewers)
|
|
1854
|
+
fi
|
|
1855
|
+
if [[ -n "$reviewers" ]]; then
|
|
1856
|
+
pr_args+=(--reviewer "$reviewers")
|
|
1857
|
+
info "Reviewers: ${DIM}$reviewers${RESET}"
|
|
1858
|
+
fi
|
|
1859
|
+
|
|
1860
|
+
# Propagate milestone
|
|
1861
|
+
if [[ -n "$ISSUE_MILESTONE" ]]; then
|
|
1862
|
+
pr_args+=(--milestone "$ISSUE_MILESTONE")
|
|
1863
|
+
info "Milestone: ${DIM}$ISSUE_MILESTONE${RESET}"
|
|
1864
|
+
fi
|
|
1865
|
+
|
|
1866
|
+
info "Creating PR..."
|
|
1867
|
+
local pr_url
|
|
1868
|
+
pr_url=$(gh pr create "${pr_args[@]}" 2>&1) || {
|
|
1869
|
+
error "PR creation failed: $pr_url"
|
|
1870
|
+
return 1
|
|
1871
|
+
}
|
|
1872
|
+
|
|
1873
|
+
success "PR created: ${BOLD}$pr_url${RESET}"
|
|
1874
|
+
echo "$pr_url" > "$ARTIFACTS_DIR/pr-url.txt"
|
|
1875
|
+
|
|
1876
|
+
# Extract PR number
|
|
1877
|
+
PR_NUMBER=$(echo "$pr_url" | grep -oE '[0-9]+$' || true)
|
|
1878
|
+
|
|
1879
|
+
# Update issue with PR link
|
|
1880
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
1881
|
+
gh_remove_label "$ISSUE_NUMBER" "pipeline/in-progress"
|
|
1882
|
+
gh_add_labels "$ISSUE_NUMBER" "pipeline/pr-created"
|
|
1883
|
+
gh_comment_issue "$ISSUE_NUMBER" "🎉 **PR created:** ${pr_url}
|
|
1884
|
+
|
|
1885
|
+
Pipeline duration so far: ${total_dur:-unknown}"
|
|
1886
|
+
fi
|
|
1887
|
+
|
|
1888
|
+
# Wait for CI if configured
|
|
1889
|
+
local wait_ci
|
|
1890
|
+
wait_ci=$(jq -r --arg id "pr" '(.stages[] | select(.id == $id) | .config.wait_ci) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1891
|
+
if [[ "$wait_ci" == "true" ]]; then
|
|
1892
|
+
info "Waiting for CI checks..."
|
|
1893
|
+
gh pr checks --watch 2>/dev/null || warn "CI checks did not all pass"
|
|
1894
|
+
fi
|
|
1895
|
+
|
|
1896
|
+
log_stage "pr" "PR created: $pr_url (${reviewers:+reviewers: $reviewers})"
|
|
1897
|
+
}
|
|
1898
|
+
|
|
1899
|
+
stage_merge() {
|
|
1900
|
+
CURRENT_STAGE_ID="merge"
|
|
1901
|
+
|
|
1902
|
+
if [[ "$NO_GITHUB" == "true" ]]; then
|
|
1903
|
+
info "Merge stage skipped (--no-github)"
|
|
1904
|
+
return 0
|
|
1905
|
+
fi
|
|
1906
|
+
|
|
1907
|
+
local merge_method wait_ci_timeout auto_delete_branch auto_merge auto_approve merge_strategy
|
|
1908
|
+
merge_method=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.merge_method) // "squash"' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1909
|
+
[[ -z "$merge_method" || "$merge_method" == "null" ]] && merge_method="squash"
|
|
1910
|
+
wait_ci_timeout=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.wait_ci_timeout_s) // 600' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1911
|
+
[[ -z "$wait_ci_timeout" || "$wait_ci_timeout" == "null" ]] && wait_ci_timeout=600
|
|
1912
|
+
auto_delete_branch=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.auto_delete_branch) // "true"' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1913
|
+
[[ -z "$auto_delete_branch" || "$auto_delete_branch" == "null" ]] && auto_delete_branch="true"
|
|
1914
|
+
auto_merge=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.auto_merge) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1915
|
+
[[ -z "$auto_merge" || "$auto_merge" == "null" ]] && auto_merge="false"
|
|
1916
|
+
auto_approve=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.auto_approve) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1917
|
+
[[ -z "$auto_approve" || "$auto_approve" == "null" ]] && auto_approve="false"
|
|
1918
|
+
merge_strategy=$(jq -r --arg id "merge" '(.stages[] | select(.id == $id) | .config.merge_strategy) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
1919
|
+
[[ -z "$merge_strategy" || "$merge_strategy" == "null" ]] && merge_strategy=""
|
|
1920
|
+
# merge_strategy overrides merge_method if set (squash/merge/rebase)
|
|
1921
|
+
if [[ -n "$merge_strategy" ]]; then
|
|
1922
|
+
merge_method="$merge_strategy"
|
|
1923
|
+
fi
|
|
1924
|
+
|
|
1925
|
+
# Find PR for current branch
|
|
1926
|
+
local pr_number
|
|
1927
|
+
pr_number=$(gh pr list --head "$GIT_BRANCH" --json number --jq '.[0].number' 2>/dev/null || echo "")
|
|
1928
|
+
|
|
1929
|
+
if [[ -z "$pr_number" ]]; then
|
|
1930
|
+
warn "No PR found for branch $GIT_BRANCH — skipping merge"
|
|
1931
|
+
return 0
|
|
1932
|
+
fi
|
|
1933
|
+
|
|
1934
|
+
info "Found PR #${pr_number} for branch ${GIT_BRANCH}"
|
|
1935
|
+
|
|
1936
|
+
# Wait for CI checks to pass
|
|
1937
|
+
info "Waiting for CI checks (timeout: ${wait_ci_timeout}s)..."
|
|
1938
|
+
local elapsed=0
|
|
1939
|
+
local check_interval=15
|
|
1940
|
+
|
|
1941
|
+
while [[ "$elapsed" -lt "$wait_ci_timeout" ]]; do
|
|
1942
|
+
local check_status
|
|
1943
|
+
check_status=$(gh pr checks "$pr_number" --json 'bucket,name' --jq '[.[] | .bucket] | unique | sort' 2>/dev/null || echo '["pending"]')
|
|
1944
|
+
|
|
1945
|
+
# If all checks passed (only "pass" in buckets)
|
|
1946
|
+
if echo "$check_status" | jq -e '. == ["pass"]' &>/dev/null; then
|
|
1947
|
+
success "All CI checks passed"
|
|
1948
|
+
break
|
|
1949
|
+
fi
|
|
1950
|
+
|
|
1951
|
+
# If any check failed
|
|
1952
|
+
if echo "$check_status" | jq -e 'any(. == "fail")' &>/dev/null; then
|
|
1953
|
+
error "CI checks failed — aborting merge"
|
|
1954
|
+
return 1
|
|
1955
|
+
fi
|
|
1956
|
+
|
|
1957
|
+
sleep "$check_interval"
|
|
1958
|
+
elapsed=$((elapsed + check_interval))
|
|
1959
|
+
done
|
|
1960
|
+
|
|
1961
|
+
if [[ "$elapsed" -ge "$wait_ci_timeout" ]]; then
|
|
1962
|
+
warn "CI check timeout (${wait_ci_timeout}s) — proceeding with merge anyway"
|
|
1963
|
+
fi
|
|
1964
|
+
|
|
1965
|
+
# Auto-approve if configured (for branch protection requiring reviews)
|
|
1966
|
+
if [[ "$auto_approve" == "true" ]]; then
|
|
1967
|
+
info "Auto-approving PR #${pr_number}..."
|
|
1968
|
+
gh pr review "$pr_number" --approve 2>/dev/null || warn "Auto-approve failed (may need different permissions)"
|
|
1969
|
+
fi
|
|
1970
|
+
|
|
1971
|
+
# Merge the PR
|
|
1972
|
+
if [[ "$auto_merge" == "true" ]]; then
|
|
1973
|
+
info "Enabling auto-merge for PR #${pr_number} (strategy: ${merge_method})..."
|
|
1974
|
+
local auto_merge_args=("pr" "merge" "$pr_number" "--auto" "--${merge_method}")
|
|
1975
|
+
if [[ "$auto_delete_branch" == "true" ]]; then
|
|
1976
|
+
auto_merge_args+=("--delete-branch")
|
|
1977
|
+
fi
|
|
1978
|
+
|
|
1979
|
+
if gh "${auto_merge_args[@]}" 2>/dev/null; then
|
|
1980
|
+
success "Auto-merge enabled for PR #${pr_number} (strategy: ${merge_method})"
|
|
1981
|
+
emit_event "merge.auto_enabled" \
|
|
1982
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
1983
|
+
"pr=$pr_number" \
|
|
1984
|
+
"strategy=$merge_method"
|
|
1985
|
+
else
|
|
1986
|
+
warn "Auto-merge not available — falling back to direct merge"
|
|
1987
|
+
# Fall through to direct merge below
|
|
1988
|
+
auto_merge="false"
|
|
1989
|
+
fi
|
|
1990
|
+
fi
|
|
1991
|
+
|
|
1992
|
+
if [[ "$auto_merge" != "true" ]]; then
|
|
1993
|
+
info "Merging PR #${pr_number} (method: ${merge_method})..."
|
|
1994
|
+
local merge_args=("pr" "merge" "$pr_number" "--${merge_method}")
|
|
1995
|
+
if [[ "$auto_delete_branch" == "true" ]]; then
|
|
1996
|
+
merge_args+=("--delete-branch")
|
|
1997
|
+
fi
|
|
1998
|
+
|
|
1999
|
+
if gh "${merge_args[@]}" 2>/dev/null; then
|
|
2000
|
+
success "PR #${pr_number} merged successfully"
|
|
2001
|
+
else
|
|
2002
|
+
error "Failed to merge PR #${pr_number}"
|
|
2003
|
+
return 1
|
|
2004
|
+
fi
|
|
2005
|
+
fi
|
|
2006
|
+
|
|
2007
|
+
log_stage "merge" "PR #${pr_number} merged (strategy: ${merge_method}, auto_merge: ${auto_merge})"
|
|
2008
|
+
}
|
|
2009
|
+
|
|
2010
|
+
stage_deploy() {
|
|
2011
|
+
CURRENT_STAGE_ID="deploy"
|
|
2012
|
+
local staging_cmd
|
|
2013
|
+
staging_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.staging_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2014
|
+
[[ "$staging_cmd" == "null" ]] && staging_cmd=""
|
|
2015
|
+
|
|
2016
|
+
local prod_cmd
|
|
2017
|
+
prod_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.production_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2018
|
+
[[ "$prod_cmd" == "null" ]] && prod_cmd=""
|
|
2019
|
+
|
|
2020
|
+
local rollback_cmd
|
|
2021
|
+
rollback_cmd=$(jq -r --arg id "deploy" '(.stages[] | select(.id == $id) | .config.rollback_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2022
|
+
[[ "$rollback_cmd" == "null" ]] && rollback_cmd=""
|
|
2023
|
+
|
|
2024
|
+
if [[ -z "$staging_cmd" && -z "$prod_cmd" ]]; then
|
|
2025
|
+
warn "No deploy commands configured — skipping"
|
|
2026
|
+
return 0
|
|
2027
|
+
fi
|
|
2028
|
+
|
|
2029
|
+
# Post deploy start to GitHub
|
|
2030
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
2031
|
+
gh_comment_issue "$ISSUE_NUMBER" "🚀 **Deploy started**"
|
|
2032
|
+
fi
|
|
2033
|
+
|
|
2034
|
+
if [[ -n "$staging_cmd" ]]; then
|
|
2035
|
+
info "Deploying to staging..."
|
|
2036
|
+
eval "$staging_cmd" > "$ARTIFACTS_DIR/deploy-staging.log" 2>&1 || {
|
|
2037
|
+
error "Staging deploy failed"
|
|
2038
|
+
[[ -n "$ISSUE_NUMBER" ]] && gh_comment_issue "$ISSUE_NUMBER" "❌ Staging deploy failed"
|
|
2039
|
+
return 1
|
|
2040
|
+
}
|
|
2041
|
+
success "Staging deploy complete"
|
|
2042
|
+
fi
|
|
2043
|
+
|
|
2044
|
+
if [[ -n "$prod_cmd" ]]; then
|
|
2045
|
+
info "Deploying to production..."
|
|
2046
|
+
eval "$prod_cmd" > "$ARTIFACTS_DIR/deploy-prod.log" 2>&1 || {
|
|
2047
|
+
error "Production deploy failed"
|
|
2048
|
+
if [[ -n "$rollback_cmd" ]]; then
|
|
2049
|
+
warn "Rolling back..."
|
|
2050
|
+
eval "$rollback_cmd" 2>&1 || error "Rollback also failed!"
|
|
2051
|
+
fi
|
|
2052
|
+
[[ -n "$ISSUE_NUMBER" ]] && gh_comment_issue "$ISSUE_NUMBER" "❌ Production deploy failed — rollback ${rollback_cmd:+attempted}"
|
|
2053
|
+
return 1
|
|
2054
|
+
}
|
|
2055
|
+
success "Production deploy complete"
|
|
2056
|
+
fi
|
|
2057
|
+
|
|
2058
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
2059
|
+
gh_comment_issue "$ISSUE_NUMBER" "✅ **Deploy complete**"
|
|
2060
|
+
gh_add_labels "$ISSUE_NUMBER" "deployed"
|
|
2061
|
+
fi
|
|
2062
|
+
|
|
2063
|
+
log_stage "deploy" "Deploy complete"
|
|
2064
|
+
}
|
|
2065
|
+
|
|
2066
|
+
stage_validate() {
|
|
2067
|
+
CURRENT_STAGE_ID="validate"
|
|
2068
|
+
local smoke_cmd
|
|
2069
|
+
smoke_cmd=$(jq -r --arg id "validate" '(.stages[] | select(.id == $id) | .config.smoke_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2070
|
+
[[ "$smoke_cmd" == "null" ]] && smoke_cmd=""
|
|
2071
|
+
|
|
2072
|
+
local health_url
|
|
2073
|
+
health_url=$(jq -r --arg id "validate" '(.stages[] | select(.id == $id) | .config.health_url) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2074
|
+
[[ "$health_url" == "null" ]] && health_url=""
|
|
2075
|
+
|
|
2076
|
+
local close_issue
|
|
2077
|
+
close_issue=$(jq -r --arg id "validate" '(.stages[] | select(.id == $id) | .config.close_issue) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2078
|
+
|
|
2079
|
+
# Smoke tests
|
|
2080
|
+
if [[ -n "$smoke_cmd" ]]; then
|
|
2081
|
+
info "Running smoke tests..."
|
|
2082
|
+
eval "$smoke_cmd" > "$ARTIFACTS_DIR/smoke.log" 2>&1 || {
|
|
2083
|
+
error "Smoke tests failed"
|
|
2084
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
2085
|
+
gh issue create --title "Deploy validation failed: $GOAL" \
|
|
2086
|
+
--label "incident" --body "Pipeline smoke tests failed after deploy.
|
|
2087
|
+
|
|
2088
|
+
Related issue: ${GITHUB_ISSUE}
|
|
2089
|
+
Branch: ${GIT_BRANCH}
|
|
2090
|
+
PR: $(cat "$ARTIFACTS_DIR/pr-url.txt" 2>/dev/null || echo 'unknown')" 2>/dev/null || true
|
|
2091
|
+
fi
|
|
2092
|
+
return 1
|
|
2093
|
+
}
|
|
2094
|
+
success "Smoke tests passed"
|
|
2095
|
+
fi
|
|
2096
|
+
|
|
2097
|
+
# Health check with retry
|
|
2098
|
+
if [[ -n "$health_url" ]]; then
|
|
2099
|
+
info "Health check: $health_url"
|
|
2100
|
+
local attempts=0
|
|
2101
|
+
while [[ $attempts -lt 5 ]]; do
|
|
2102
|
+
if curl -sf "$health_url" >/dev/null 2>&1; then
|
|
2103
|
+
success "Health check passed"
|
|
2104
|
+
break
|
|
2105
|
+
fi
|
|
2106
|
+
attempts=$((attempts + 1))
|
|
2107
|
+
[[ $attempts -lt 5 ]] && { info "Retry ${attempts}/5..."; sleep 10; }
|
|
2108
|
+
done
|
|
2109
|
+
if [[ $attempts -ge 5 ]]; then
|
|
2110
|
+
error "Health check failed after 5 attempts"
|
|
2111
|
+
return 1
|
|
2112
|
+
fi
|
|
2113
|
+
fi
|
|
2114
|
+
|
|
2115
|
+
# Compute total duration once for both issue close and wiki report
|
|
2116
|
+
local total_dur=""
|
|
2117
|
+
if [[ -n "$PIPELINE_START_EPOCH" ]]; then
|
|
2118
|
+
total_dur=$(format_duration $(( $(now_epoch) - PIPELINE_START_EPOCH )))
|
|
2119
|
+
fi
|
|
2120
|
+
|
|
2121
|
+
# Close original issue with comprehensive summary
|
|
2122
|
+
if [[ "$close_issue" == "true" && -n "$ISSUE_NUMBER" ]]; then
|
|
2123
|
+
gh issue close "$ISSUE_NUMBER" --comment "## ✅ Complete — Deployed & Validated
|
|
2124
|
+
|
|
2125
|
+
| Metric | Value |
|
|
2126
|
+
|--------|-------|
|
|
2127
|
+
| Pipeline | \`${PIPELINE_NAME}\` |
|
|
2128
|
+
| Branch | \`${GIT_BRANCH}\` |
|
|
2129
|
+
| PR | $(cat "$ARTIFACTS_DIR/pr-url.txt" 2>/dev/null || echo 'N/A') |
|
|
2130
|
+
| Duration | ${total_dur:-unknown} |
|
|
2131
|
+
|
|
2132
|
+
_Closed automatically by \`shipwright pipeline\`_" 2>/dev/null || true
|
|
2133
|
+
|
|
2134
|
+
gh_remove_label "$ISSUE_NUMBER" "pipeline/pr-created"
|
|
2135
|
+
gh_add_labels "$ISSUE_NUMBER" "pipeline/complete"
|
|
2136
|
+
success "Issue #$ISSUE_NUMBER closed"
|
|
2137
|
+
fi
|
|
2138
|
+
|
|
2139
|
+
# Push pipeline report to wiki
|
|
2140
|
+
local report="# Pipeline Report — ${GOAL}
|
|
2141
|
+
|
|
2142
|
+
| Metric | Value |
|
|
2143
|
+
|--------|-------|
|
|
2144
|
+
| Pipeline | \`${PIPELINE_NAME}\` |
|
|
2145
|
+
| Branch | \`${GIT_BRANCH}\` |
|
|
2146
|
+
| PR | $(cat "$ARTIFACTS_DIR/pr-url.txt" 2>/dev/null || echo 'N/A') |
|
|
2147
|
+
| Duration | ${total_dur:-unknown} |
|
|
2148
|
+
| Stages | $(echo "$STAGE_TIMINGS" | tr '|' '\n' | wc -l | xargs) completed |
|
|
2149
|
+
|
|
2150
|
+
## Stage Timings
|
|
2151
|
+
$(echo "$STAGE_TIMINGS" | tr '|' '\n' | sed 's/^/- /')
|
|
2152
|
+
|
|
2153
|
+
## Artifacts
|
|
2154
|
+
$(ls -1 "$ARTIFACTS_DIR" 2>/dev/null | sed 's/^/- /')
|
|
2155
|
+
|
|
2156
|
+
---
|
|
2157
|
+
_Generated by \`shipwright pipeline\` at $(now_iso)_"
|
|
2158
|
+
gh_wiki_page "Pipeline-Report-${ISSUE_NUMBER:-inline}" "$report"
|
|
2159
|
+
|
|
2160
|
+
log_stage "validate" "Validation complete"
|
|
2161
|
+
}
|
|
2162
|
+
|
|
2163
|
+
stage_monitor() {
|
|
2164
|
+
CURRENT_STAGE_ID="monitor"
|
|
2165
|
+
|
|
2166
|
+
# Read config from pipeline template
|
|
2167
|
+
local duration_minutes health_url error_threshold log_pattern log_cmd rollback_cmd auto_rollback
|
|
2168
|
+
duration_minutes=$(jq -r --arg id "monitor" '(.stages[] | select(.id == $id) | .config.duration_minutes) // 5' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2169
|
+
[[ -z "$duration_minutes" || "$duration_minutes" == "null" ]] && duration_minutes=5
|
|
2170
|
+
health_url=$(jq -r --arg id "monitor" '(.stages[] | select(.id == $id) | .config.health_url) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2171
|
+
[[ "$health_url" == "null" ]] && health_url=""
|
|
2172
|
+
error_threshold=$(jq -r --arg id "monitor" '(.stages[] | select(.id == $id) | .config.error_threshold) // 5' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2173
|
+
[[ -z "$error_threshold" || "$error_threshold" == "null" ]] && error_threshold=5
|
|
2174
|
+
log_pattern=$(jq -r --arg id "monitor" '(.stages[] | select(.id == $id) | .config.log_pattern) // "ERROR|FATAL|PANIC"' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2175
|
+
[[ -z "$log_pattern" || "$log_pattern" == "null" ]] && log_pattern="ERROR|FATAL|PANIC"
|
|
2176
|
+
log_cmd=$(jq -r --arg id "monitor" '(.stages[] | select(.id == $id) | .config.log_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2177
|
+
[[ "$log_cmd" == "null" ]] && log_cmd=""
|
|
2178
|
+
rollback_cmd=$(jq -r --arg id "monitor" '(.stages[] | select(.id == $id) | .config.rollback_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2179
|
+
[[ "$rollback_cmd" == "null" ]] && rollback_cmd=""
|
|
2180
|
+
auto_rollback=$(jq -r --arg id "monitor" '(.stages[] | select(.id == $id) | .config.auto_rollback) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2181
|
+
[[ -z "$auto_rollback" || "$auto_rollback" == "null" ]] && auto_rollback="false"
|
|
2182
|
+
|
|
2183
|
+
if [[ -z "$health_url" && -z "$log_cmd" ]]; then
|
|
2184
|
+
warn "No health_url or log_cmd configured — skipping monitor stage"
|
|
2185
|
+
log_stage "monitor" "Skipped (no monitoring configured)"
|
|
2186
|
+
return 0
|
|
2187
|
+
fi
|
|
2188
|
+
|
|
2189
|
+
local report_file="$ARTIFACTS_DIR/monitor-report.md"
|
|
2190
|
+
local total_errors=0
|
|
2191
|
+
local poll_interval=30 # seconds between polls
|
|
2192
|
+
local total_polls=$(( (duration_minutes * 60) / poll_interval ))
|
|
2193
|
+
[[ "$total_polls" -lt 1 ]] && total_polls=1
|
|
2194
|
+
|
|
2195
|
+
info "Post-deploy monitoring: ${duration_minutes}m (${total_polls} polls, threshold: ${error_threshold} errors)"
|
|
2196
|
+
|
|
2197
|
+
emit_event "monitor.started" \
|
|
2198
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2199
|
+
"duration_minutes=$duration_minutes" \
|
|
2200
|
+
"error_threshold=$error_threshold"
|
|
2201
|
+
|
|
2202
|
+
{
|
|
2203
|
+
echo "# Post-Deploy Monitor Report"
|
|
2204
|
+
echo ""
|
|
2205
|
+
echo "- Duration: ${duration_minutes} minutes"
|
|
2206
|
+
echo "- Health URL: ${health_url:-none}"
|
|
2207
|
+
echo "- Log command: ${log_cmd:-none}"
|
|
2208
|
+
echo "- Error threshold: ${error_threshold}"
|
|
2209
|
+
echo "- Auto-rollback: ${auto_rollback}"
|
|
2210
|
+
echo ""
|
|
2211
|
+
echo "## Poll Results"
|
|
2212
|
+
echo ""
|
|
2213
|
+
} > "$report_file"
|
|
2214
|
+
|
|
2215
|
+
local poll=0
|
|
2216
|
+
local health_failures=0
|
|
2217
|
+
local log_errors=0
|
|
2218
|
+
while [[ "$poll" -lt "$total_polls" ]]; do
|
|
2219
|
+
poll=$((poll + 1))
|
|
2220
|
+
local poll_time
|
|
2221
|
+
poll_time=$(now_iso)
|
|
2222
|
+
|
|
2223
|
+
# Health URL check
|
|
2224
|
+
if [[ -n "$health_url" ]]; then
|
|
2225
|
+
local http_status
|
|
2226
|
+
http_status=$(curl -sf -o /dev/null -w "%{http_code}" "$health_url" 2>/dev/null || echo "000")
|
|
2227
|
+
if [[ "$http_status" -ge 200 && "$http_status" -lt 400 ]]; then
|
|
2228
|
+
echo "- [${poll_time}] Health: ✅ (HTTP ${http_status})" >> "$report_file"
|
|
2229
|
+
else
|
|
2230
|
+
health_failures=$((health_failures + 1))
|
|
2231
|
+
total_errors=$((total_errors + 1))
|
|
2232
|
+
echo "- [${poll_time}] Health: ❌ (HTTP ${http_status})" >> "$report_file"
|
|
2233
|
+
warn "Health check failed: HTTP ${http_status}"
|
|
2234
|
+
fi
|
|
2235
|
+
fi
|
|
2236
|
+
|
|
2237
|
+
# Log command check
|
|
2238
|
+
if [[ -n "$log_cmd" ]]; then
|
|
2239
|
+
local log_output
|
|
2240
|
+
log_output=$(eval "$log_cmd" 2>/dev/null || true)
|
|
2241
|
+
local error_count=0
|
|
2242
|
+
if [[ -n "$log_output" ]]; then
|
|
2243
|
+
error_count=$(echo "$log_output" | grep -cE "$log_pattern" 2>/dev/null || true)
|
|
2244
|
+
error_count="${error_count:-0}"
|
|
2245
|
+
fi
|
|
2246
|
+
if [[ "$error_count" -gt 0 ]]; then
|
|
2247
|
+
log_errors=$((log_errors + error_count))
|
|
2248
|
+
total_errors=$((total_errors + error_count))
|
|
2249
|
+
echo "- [${poll_time}] Logs: ⚠️ ${error_count} error(s) matching '${log_pattern}'" >> "$report_file"
|
|
2250
|
+
warn "Log errors detected: ${error_count}"
|
|
2251
|
+
else
|
|
2252
|
+
echo "- [${poll_time}] Logs: ✅ clean" >> "$report_file"
|
|
2253
|
+
fi
|
|
2254
|
+
fi
|
|
2255
|
+
|
|
2256
|
+
emit_event "monitor.check" \
|
|
2257
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2258
|
+
"poll=$poll" \
|
|
2259
|
+
"total_errors=$total_errors" \
|
|
2260
|
+
"health_failures=$health_failures"
|
|
2261
|
+
|
|
2262
|
+
# Check threshold
|
|
2263
|
+
if [[ "$total_errors" -ge "$error_threshold" ]]; then
|
|
2264
|
+
error "Error threshold exceeded: ${total_errors} >= ${error_threshold}"
|
|
2265
|
+
|
|
2266
|
+
echo "" >> "$report_file"
|
|
2267
|
+
echo "## ❌ THRESHOLD EXCEEDED" >> "$report_file"
|
|
2268
|
+
echo "Total errors: ${total_errors} (threshold: ${error_threshold})" >> "$report_file"
|
|
2269
|
+
|
|
2270
|
+
emit_event "monitor.alert" \
|
|
2271
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2272
|
+
"total_errors=$total_errors" \
|
|
2273
|
+
"threshold=$error_threshold"
|
|
2274
|
+
|
|
2275
|
+
# Auto-rollback if configured
|
|
2276
|
+
if [[ "$auto_rollback" == "true" && -n "$rollback_cmd" ]]; then
|
|
2277
|
+
warn "Auto-rolling back..."
|
|
2278
|
+
echo "" >> "$report_file"
|
|
2279
|
+
echo "## Rollback" >> "$report_file"
|
|
2280
|
+
|
|
2281
|
+
if eval "$rollback_cmd" >> "$report_file" 2>&1; then
|
|
2282
|
+
success "Rollback executed"
|
|
2283
|
+
echo "Rollback: ✅ success" >> "$report_file"
|
|
2284
|
+
|
|
2285
|
+
# Post-rollback smoke test verification
|
|
2286
|
+
local smoke_cmd
|
|
2287
|
+
smoke_cmd=$(jq -r --arg id "validate" '(.stages[] | select(.id == $id) | .config.smoke_cmd) // ""' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2288
|
+
[[ "$smoke_cmd" == "null" ]] && smoke_cmd=""
|
|
2289
|
+
|
|
2290
|
+
if [[ -n "$smoke_cmd" ]]; then
|
|
2291
|
+
info "Verifying rollback with smoke tests..."
|
|
2292
|
+
if eval "$smoke_cmd" > "$ARTIFACTS_DIR/rollback-smoke.log" 2>&1; then
|
|
2293
|
+
success "Rollback verified — smoke tests pass"
|
|
2294
|
+
echo "Rollback verification: ✅ smoke tests pass" >> "$report_file"
|
|
2295
|
+
emit_event "monitor.rollback_verified" \
|
|
2296
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2297
|
+
"status=pass"
|
|
2298
|
+
else
|
|
2299
|
+
error "Rollback verification FAILED — smoke tests still failing"
|
|
2300
|
+
echo "Rollback verification: ❌ smoke tests FAILED — manual intervention required" >> "$report_file"
|
|
2301
|
+
emit_event "monitor.rollback_verified" \
|
|
2302
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2303
|
+
"status=fail"
|
|
2304
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
2305
|
+
gh_comment_issue "$ISSUE_NUMBER" "🚨 **Rollback executed but verification failed** — smoke tests still failing after rollback. Manual intervention required.
|
|
2306
|
+
|
|
2307
|
+
Smoke command: \`${smoke_cmd}\`
|
|
2308
|
+
Log: see \`pipeline-artifacts/rollback-smoke.log\`" 2>/dev/null || true
|
|
2309
|
+
fi
|
|
2310
|
+
fi
|
|
2311
|
+
fi
|
|
2312
|
+
else
|
|
2313
|
+
error "Rollback failed!"
|
|
2314
|
+
echo "Rollback: ❌ failed" >> "$report_file"
|
|
2315
|
+
fi
|
|
2316
|
+
|
|
2317
|
+
emit_event "monitor.rollback" \
|
|
2318
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2319
|
+
"total_errors=$total_errors"
|
|
2320
|
+
|
|
2321
|
+
# Post to GitHub
|
|
2322
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
2323
|
+
gh_comment_issue "$ISSUE_NUMBER" "🚨 **Auto-rollback triggered** — ${total_errors} errors exceeded threshold (${error_threshold})
|
|
2324
|
+
|
|
2325
|
+
Rollback command: \`${rollback_cmd}\`" 2>/dev/null || true
|
|
2326
|
+
|
|
2327
|
+
# Create hotfix issue
|
|
2328
|
+
if [[ "$GH_AVAILABLE" == "true" ]]; then
|
|
2329
|
+
gh issue create \
|
|
2330
|
+
--title "Hotfix: Deploy regression for ${GOAL}" \
|
|
2331
|
+
--label "hotfix,incident" \
|
|
2332
|
+
--body "Auto-rollback triggered during post-deploy monitoring.
|
|
2333
|
+
|
|
2334
|
+
**Original issue:** ${GITHUB_ISSUE:-N/A}
|
|
2335
|
+
**Errors detected:** ${total_errors}
|
|
2336
|
+
**Threshold:** ${error_threshold}
|
|
2337
|
+
**Branch:** ${GIT_BRANCH}
|
|
2338
|
+
|
|
2339
|
+
## Monitor Report
|
|
2340
|
+
$(cat "$report_file")
|
|
2341
|
+
|
|
2342
|
+
---
|
|
2343
|
+
_Created automatically by \`shipwright pipeline\` monitor stage_" 2>/dev/null || true
|
|
2344
|
+
fi
|
|
2345
|
+
fi
|
|
2346
|
+
fi
|
|
2347
|
+
|
|
2348
|
+
log_stage "monitor" "Failed — ${total_errors} errors (threshold: ${error_threshold})"
|
|
2349
|
+
return 1
|
|
2350
|
+
fi
|
|
2351
|
+
|
|
2352
|
+
# Sleep between polls (skip on last poll)
|
|
2353
|
+
if [[ "$poll" -lt "$total_polls" ]]; then
|
|
2354
|
+
sleep "$poll_interval"
|
|
2355
|
+
fi
|
|
2356
|
+
done
|
|
2357
|
+
|
|
2358
|
+
# Monitoring complete — all clear
|
|
2359
|
+
echo "" >> "$report_file"
|
|
2360
|
+
echo "## ✅ Monitoring Complete" >> "$report_file"
|
|
2361
|
+
echo "Total errors: ${total_errors} (threshold: ${error_threshold})" >> "$report_file"
|
|
2362
|
+
echo "Health failures: ${health_failures}" >> "$report_file"
|
|
2363
|
+
echo "Log errors: ${log_errors}" >> "$report_file"
|
|
2364
|
+
|
|
2365
|
+
success "Post-deploy monitoring clean (${total_errors} errors in ${duration_minutes}m)"
|
|
2366
|
+
|
|
2367
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
2368
|
+
gh_comment_issue "$ISSUE_NUMBER" "✅ **Post-deploy monitoring passed** — ${duration_minutes}m, ${total_errors} errors" 2>/dev/null || true
|
|
2369
|
+
fi
|
|
2370
|
+
|
|
2371
|
+
log_stage "monitor" "Clean — ${total_errors} errors in ${duration_minutes}m"
|
|
2372
|
+
}
|
|
2373
|
+
|
|
2374
|
+
# ─── Multi-Dimensional Quality Checks ─────────────────────────────────────
|
|
2375
|
+
# Beyond tests: security, bundle size, perf regression, API compat, coverage
|
|
2376
|
+
|
|
2377
|
+
quality_check_security() {
|
|
2378
|
+
info "Security audit..."
|
|
2379
|
+
local audit_log="$ARTIFACTS_DIR/security-audit.log"
|
|
2380
|
+
local audit_exit=0
|
|
2381
|
+
local tool_found=false
|
|
2382
|
+
|
|
2383
|
+
# Try npm audit
|
|
2384
|
+
if [[ -f "package.json" ]] && command -v npm &>/dev/null; then
|
|
2385
|
+
tool_found=true
|
|
2386
|
+
npm audit --production 2>&1 | tee "$audit_log" || audit_exit=$?
|
|
2387
|
+
# Try pip-audit
|
|
2388
|
+
elif [[ -f "requirements.txt" || -f "pyproject.toml" ]] && command -v pip-audit &>/dev/null; then
|
|
2389
|
+
tool_found=true
|
|
2390
|
+
pip-audit 2>&1 | tee "$audit_log" || audit_exit=$?
|
|
2391
|
+
# Try cargo audit
|
|
2392
|
+
elif [[ -f "Cargo.toml" ]] && command -v cargo-audit &>/dev/null; then
|
|
2393
|
+
tool_found=true
|
|
2394
|
+
cargo audit 2>&1 | tee "$audit_log" || audit_exit=$?
|
|
2395
|
+
fi
|
|
2396
|
+
|
|
2397
|
+
if [[ "$tool_found" != "true" ]]; then
|
|
2398
|
+
info "No security audit tool found — skipping"
|
|
2399
|
+
echo "No audit tool available" > "$audit_log"
|
|
2400
|
+
return 0
|
|
2401
|
+
fi
|
|
2402
|
+
|
|
2403
|
+
# Parse results for critical/high severity
|
|
2404
|
+
local critical_count high_count
|
|
2405
|
+
critical_count=$(grep -ciE 'critical' "$audit_log" 2>/dev/null || true)
|
|
2406
|
+
critical_count="${critical_count:-0}"
|
|
2407
|
+
high_count=$(grep -ciE 'high' "$audit_log" 2>/dev/null || true)
|
|
2408
|
+
high_count="${high_count:-0}"
|
|
2409
|
+
|
|
2410
|
+
emit_event "quality.security" \
|
|
2411
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2412
|
+
"critical=$critical_count" \
|
|
2413
|
+
"high=$high_count"
|
|
2414
|
+
|
|
2415
|
+
if [[ "$critical_count" -gt 0 ]]; then
|
|
2416
|
+
warn "Security audit: ${critical_count} critical, ${high_count} high"
|
|
2417
|
+
return 1
|
|
2418
|
+
fi
|
|
2419
|
+
|
|
2420
|
+
success "Security audit: clean"
|
|
2421
|
+
return 0
|
|
2422
|
+
}
|
|
2423
|
+
|
|
2424
|
+
quality_check_bundle_size() {
|
|
2425
|
+
info "Bundle size check..."
|
|
2426
|
+
local metrics_log="$ARTIFACTS_DIR/bundle-metrics.log"
|
|
2427
|
+
local bundle_size=0
|
|
2428
|
+
local bundle_dir=""
|
|
2429
|
+
|
|
2430
|
+
# Find build output directory
|
|
2431
|
+
for dir in dist build out .next; do
|
|
2432
|
+
if [[ -d "$dir" ]]; then
|
|
2433
|
+
bundle_dir="$dir"
|
|
2434
|
+
break
|
|
2435
|
+
fi
|
|
2436
|
+
done
|
|
2437
|
+
|
|
2438
|
+
if [[ -z "$bundle_dir" ]]; then
|
|
2439
|
+
info "No build output directory found — skipping bundle check"
|
|
2440
|
+
echo "No build directory" > "$metrics_log"
|
|
2441
|
+
return 0
|
|
2442
|
+
fi
|
|
2443
|
+
|
|
2444
|
+
bundle_size=$(du -sk "$bundle_dir" 2>/dev/null | cut -f1 || echo "0")
|
|
2445
|
+
local bundle_size_human
|
|
2446
|
+
bundle_size_human=$(du -sh "$bundle_dir" 2>/dev/null | cut -f1 || echo "unknown")
|
|
2447
|
+
|
|
2448
|
+
echo "Bundle directory: $bundle_dir" > "$metrics_log"
|
|
2449
|
+
echo "Size: ${bundle_size}KB (${bundle_size_human})" >> "$metrics_log"
|
|
2450
|
+
|
|
2451
|
+
emit_event "quality.bundle" \
|
|
2452
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2453
|
+
"size_kb=$bundle_size" \
|
|
2454
|
+
"directory=$bundle_dir"
|
|
2455
|
+
|
|
2456
|
+
# Check against memory baseline if available
|
|
2457
|
+
local baseline_size=""
|
|
2458
|
+
if [[ -x "$SCRIPT_DIR/cct-memory.sh" ]]; then
|
|
2459
|
+
baseline_size=$(bash "$SCRIPT_DIR/cct-memory.sh" get "bundle_size_kb" 2>/dev/null) || true
|
|
2460
|
+
fi
|
|
2461
|
+
|
|
2462
|
+
if [[ -n "$baseline_size" && "$baseline_size" -gt 0 ]] 2>/dev/null; then
|
|
2463
|
+
local growth_pct
|
|
2464
|
+
growth_pct=$(awk -v cur="$bundle_size" -v base="$baseline_size" 'BEGIN{printf "%d", ((cur - base) / base) * 100}')
|
|
2465
|
+
echo "Baseline: ${baseline_size}KB | Growth: ${growth_pct}%" >> "$metrics_log"
|
|
2466
|
+
if [[ "$growth_pct" -gt 20 ]]; then
|
|
2467
|
+
warn "Bundle size grew ${growth_pct}% (${baseline_size}KB → ${bundle_size}KB)"
|
|
2468
|
+
return 1
|
|
2469
|
+
fi
|
|
2470
|
+
fi
|
|
2471
|
+
|
|
2472
|
+
info "Bundle size: ${bundle_size_human}"
|
|
2473
|
+
return 0
|
|
2474
|
+
}
|
|
2475
|
+
|
|
2476
|
+
quality_check_perf_regression() {
|
|
2477
|
+
info "Performance regression check..."
|
|
2478
|
+
local metrics_log="$ARTIFACTS_DIR/perf-metrics.log"
|
|
2479
|
+
local test_log="$ARTIFACTS_DIR/test-results.log"
|
|
2480
|
+
|
|
2481
|
+
if [[ ! -f "$test_log" ]]; then
|
|
2482
|
+
info "No test results — skipping perf check"
|
|
2483
|
+
echo "No test results available" > "$metrics_log"
|
|
2484
|
+
return 0
|
|
2485
|
+
fi
|
|
2486
|
+
|
|
2487
|
+
# Extract test suite duration (common patterns)
|
|
2488
|
+
local duration_ms=""
|
|
2489
|
+
duration_ms=$(grep -oE 'Time:\s*[0-9.]+\s*s' "$test_log" 2>/dev/null | grep -oE '[0-9.]+' | tail -1 || true)
|
|
2490
|
+
[[ -z "$duration_ms" ]] && duration_ms=$(grep -oE '[0-9.]+ ?s(econds?)?' "$test_log" 2>/dev/null | grep -oE '[0-9.]+' | tail -1 || true)
|
|
2491
|
+
|
|
2492
|
+
if [[ -z "$duration_ms" ]]; then
|
|
2493
|
+
info "Could not extract test duration — skipping perf check"
|
|
2494
|
+
echo "Duration not parseable" > "$metrics_log"
|
|
2495
|
+
return 0
|
|
2496
|
+
fi
|
|
2497
|
+
|
|
2498
|
+
echo "Test duration: ${duration_ms}s" > "$metrics_log"
|
|
2499
|
+
|
|
2500
|
+
emit_event "quality.perf" \
|
|
2501
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2502
|
+
"duration_s=$duration_ms"
|
|
2503
|
+
|
|
2504
|
+
# Check against memory baseline if available
|
|
2505
|
+
local baseline_dur=""
|
|
2506
|
+
if [[ -x "$SCRIPT_DIR/cct-memory.sh" ]]; then
|
|
2507
|
+
baseline_dur=$(bash "$SCRIPT_DIR/cct-memory.sh" get "test_duration_s" 2>/dev/null) || true
|
|
2508
|
+
fi
|
|
2509
|
+
|
|
2510
|
+
if [[ -n "$baseline_dur" ]] && awk -v cur="$duration_ms" -v base="$baseline_dur" 'BEGIN{exit !(base > 0)}' 2>/dev/null; then
|
|
2511
|
+
local slowdown_pct
|
|
2512
|
+
slowdown_pct=$(awk -v cur="$duration_ms" -v base="$baseline_dur" 'BEGIN{printf "%d", ((cur - base) / base) * 100}')
|
|
2513
|
+
echo "Baseline: ${baseline_dur}s | Slowdown: ${slowdown_pct}%" >> "$metrics_log"
|
|
2514
|
+
if [[ "$slowdown_pct" -gt 30 ]]; then
|
|
2515
|
+
warn "Tests ${slowdown_pct}% slower (${baseline_dur}s → ${duration_ms}s)"
|
|
2516
|
+
return 1
|
|
2517
|
+
fi
|
|
2518
|
+
fi
|
|
2519
|
+
|
|
2520
|
+
info "Test duration: ${duration_ms}s"
|
|
2521
|
+
return 0
|
|
2522
|
+
}
|
|
2523
|
+
|
|
2524
|
+
quality_check_api_compat() {
|
|
2525
|
+
info "API compatibility check..."
|
|
2526
|
+
local compat_log="$ARTIFACTS_DIR/api-compat.log"
|
|
2527
|
+
|
|
2528
|
+
# Look for OpenAPI/Swagger specs
|
|
2529
|
+
local spec_file=""
|
|
2530
|
+
for candidate in openapi.json openapi.yaml swagger.json swagger.yaml api/openapi.json docs/openapi.yaml; do
|
|
2531
|
+
if [[ -f "$candidate" ]]; then
|
|
2532
|
+
spec_file="$candidate"
|
|
2533
|
+
break
|
|
2534
|
+
fi
|
|
2535
|
+
done
|
|
2536
|
+
|
|
2537
|
+
if [[ -z "$spec_file" ]]; then
|
|
2538
|
+
info "No OpenAPI/Swagger spec found — skipping API compat check"
|
|
2539
|
+
echo "No API spec found" > "$compat_log"
|
|
2540
|
+
return 0
|
|
2541
|
+
fi
|
|
2542
|
+
|
|
2543
|
+
# Check if spec was modified in this branch
|
|
2544
|
+
local spec_changed
|
|
2545
|
+
spec_changed=$(git diff --name-only "${BASE_BRANCH}...HEAD" 2>/dev/null | grep -c "$(basename "$spec_file")" || true)
|
|
2546
|
+
spec_changed="${spec_changed:-0}"
|
|
2547
|
+
|
|
2548
|
+
if [[ "$spec_changed" -eq 0 ]]; then
|
|
2549
|
+
info "API spec unchanged"
|
|
2550
|
+
echo "Spec unchanged" > "$compat_log"
|
|
2551
|
+
return 0
|
|
2552
|
+
fi
|
|
2553
|
+
|
|
2554
|
+
# Diff the spec against base branch
|
|
2555
|
+
local old_spec new_spec
|
|
2556
|
+
old_spec=$(git show "${BASE_BRANCH}:${spec_file}" 2>/dev/null || true)
|
|
2557
|
+
new_spec=$(cat "$spec_file" 2>/dev/null || true)
|
|
2558
|
+
|
|
2559
|
+
if [[ -z "$old_spec" ]]; then
|
|
2560
|
+
info "New API spec — no baseline to compare"
|
|
2561
|
+
echo "New spec, no baseline" > "$compat_log"
|
|
2562
|
+
return 0
|
|
2563
|
+
fi
|
|
2564
|
+
|
|
2565
|
+
# Check for breaking changes: removed endpoints, changed methods
|
|
2566
|
+
local removed_endpoints=""
|
|
2567
|
+
if command -v jq &>/dev/null && [[ "$spec_file" == *.json ]]; then
|
|
2568
|
+
local old_paths new_paths
|
|
2569
|
+
old_paths=$(echo "$old_spec" | jq -r '.paths | keys[]' 2>/dev/null | sort || true)
|
|
2570
|
+
new_paths=$(jq -r '.paths | keys[]' "$spec_file" 2>/dev/null | sort || true)
|
|
2571
|
+
removed_endpoints=$(comm -23 <(echo "$old_paths") <(echo "$new_paths") 2>/dev/null || true)
|
|
2572
|
+
fi
|
|
2573
|
+
|
|
2574
|
+
{
|
|
2575
|
+
echo "Spec: $spec_file"
|
|
2576
|
+
echo "Changed: yes"
|
|
2577
|
+
if [[ -n "$removed_endpoints" ]]; then
|
|
2578
|
+
echo "BREAKING — Removed endpoints:"
|
|
2579
|
+
echo "$removed_endpoints"
|
|
2580
|
+
else
|
|
2581
|
+
echo "No breaking changes detected"
|
|
2582
|
+
fi
|
|
2583
|
+
} > "$compat_log"
|
|
2584
|
+
|
|
2585
|
+
if [[ -n "$removed_endpoints" ]]; then
|
|
2586
|
+
local removed_count
|
|
2587
|
+
removed_count=$(echo "$removed_endpoints" | wc -l | xargs)
|
|
2588
|
+
warn "API breaking changes: ${removed_count} endpoint(s) removed"
|
|
2589
|
+
return 1
|
|
2590
|
+
fi
|
|
2591
|
+
|
|
2592
|
+
success "API compatibility: no breaking changes"
|
|
2593
|
+
return 0
|
|
2594
|
+
}
|
|
2595
|
+
|
|
2596
|
+
quality_check_coverage() {
|
|
2597
|
+
info "Coverage analysis..."
|
|
2598
|
+
local test_log="$ARTIFACTS_DIR/test-results.log"
|
|
2599
|
+
|
|
2600
|
+
if [[ ! -f "$test_log" ]]; then
|
|
2601
|
+
info "No test results — skipping coverage check"
|
|
2602
|
+
return 0
|
|
2603
|
+
fi
|
|
2604
|
+
|
|
2605
|
+
# Extract coverage percentage
|
|
2606
|
+
local coverage=""
|
|
2607
|
+
coverage=$(grep -oE 'Statements\s*:\s*[0-9.]+' "$test_log" 2>/dev/null | grep -oE '[0-9.]+$' || \
|
|
2608
|
+
grep -oE 'All files\s*\|\s*[0-9.]+' "$test_log" 2>/dev/null | grep -oE '[0-9.]+$' || \
|
|
2609
|
+
grep -oE 'TOTAL\s+[0-9]+\s+[0-9]+\s+([0-9]+)%' "$test_log" 2>/dev/null | grep -oE '[0-9]+%' | tr -d '%' || echo "")
|
|
2610
|
+
|
|
2611
|
+
if [[ -z "$coverage" ]]; then
|
|
2612
|
+
info "Could not extract coverage — skipping"
|
|
2613
|
+
return 0
|
|
2614
|
+
fi
|
|
2615
|
+
|
|
2616
|
+
emit_event "quality.coverage" \
|
|
2617
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
2618
|
+
"coverage=$coverage"
|
|
2619
|
+
|
|
2620
|
+
# Check against pipeline config minimum
|
|
2621
|
+
local coverage_min
|
|
2622
|
+
coverage_min=$(jq -r --arg id "test" '(.stages[] | select(.id == $id) | .config.coverage_min) // 0' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2623
|
+
[[ -z "$coverage_min" || "$coverage_min" == "null" ]] && coverage_min=0
|
|
2624
|
+
|
|
2625
|
+
# Check against memory baseline (detect coverage drops)
|
|
2626
|
+
local baseline_coverage=""
|
|
2627
|
+
if [[ -x "$SCRIPT_DIR/cct-memory.sh" ]]; then
|
|
2628
|
+
baseline_coverage=$(bash "$SCRIPT_DIR/cct-memory.sh" get "coverage_pct" 2>/dev/null) || true
|
|
2629
|
+
fi
|
|
2630
|
+
|
|
2631
|
+
local dropped=false
|
|
2632
|
+
if [[ -n "$baseline_coverage" ]] && awk -v cur="$coverage" -v base="$baseline_coverage" 'BEGIN{exit !(cur < base)}' 2>/dev/null; then
|
|
2633
|
+
warn "Coverage dropped: ${baseline_coverage}% → ${coverage}%"
|
|
2634
|
+
dropped=true
|
|
2635
|
+
fi
|
|
2636
|
+
|
|
2637
|
+
if [[ "$coverage_min" -gt 0 ]] 2>/dev/null && awk -v cov="$coverage" -v min="$coverage_min" 'BEGIN{exit !(cov < min)}' 2>/dev/null; then
|
|
2638
|
+
warn "Coverage ${coverage}% below minimum ${coverage_min}%"
|
|
2639
|
+
return 1
|
|
2640
|
+
fi
|
|
2641
|
+
|
|
2642
|
+
if $dropped; then
|
|
2643
|
+
return 1
|
|
2644
|
+
fi
|
|
2645
|
+
|
|
2646
|
+
info "Coverage: ${coverage}%"
|
|
2647
|
+
return 0
|
|
2648
|
+
}
|
|
2649
|
+
|
|
2650
|
+
# ─── Compound Quality Checks ──────────────────────────────────────────────
|
|
2651
|
+
# Adversarial review, negative prompting, E2E validation, and DoD audit.
|
|
2652
|
+
# Feeds findings back into a self-healing rebuild loop for automatic fixes.
|
|
2653
|
+
|
|
2654
|
+
run_adversarial_review() {
|
|
2655
|
+
local diff_content
|
|
2656
|
+
diff_content=$(git diff "${BASE_BRANCH}...HEAD" 2>/dev/null || true)
|
|
2657
|
+
|
|
2658
|
+
if [[ -z "$diff_content" ]]; then
|
|
2659
|
+
info "No diff to review"
|
|
2660
|
+
return 0
|
|
2661
|
+
fi
|
|
2662
|
+
|
|
2663
|
+
local prompt="You are a hostile code reviewer. Your job is to find EVERY possible issue in this diff.
|
|
2664
|
+
Look for:
|
|
2665
|
+
- Bugs (logic errors, off-by-one, null/undefined access, race conditions)
|
|
2666
|
+
- Security vulnerabilities (injection, XSS, CSRF, auth bypass, secrets in code)
|
|
2667
|
+
- Edge cases that aren't handled
|
|
2668
|
+
- Error handling gaps
|
|
2669
|
+
- Performance issues (N+1 queries, memory leaks, blocking calls)
|
|
2670
|
+
- API contract violations
|
|
2671
|
+
- Data validation gaps
|
|
2672
|
+
|
|
2673
|
+
Be thorough and adversarial. List every issue with severity [Critical/Bug/Warning].
|
|
2674
|
+
Format: **[Severity]** file:line — description
|
|
2675
|
+
|
|
2676
|
+
Diff:
|
|
2677
|
+
$diff_content"
|
|
2678
|
+
|
|
2679
|
+
local review_output
|
|
2680
|
+
review_output=$(claude --print "$prompt" 2>"${ARTIFACTS_DIR}/.claude-tokens-adversarial.log" || true)
|
|
2681
|
+
parse_claude_tokens "${ARTIFACTS_DIR}/.claude-tokens-adversarial.log"
|
|
2682
|
+
|
|
2683
|
+
echo "$review_output" > "$ARTIFACTS_DIR/adversarial-review.md"
|
|
2684
|
+
|
|
2685
|
+
# Count issues by severity
|
|
2686
|
+
local critical_count bug_count
|
|
2687
|
+
critical_count=$(grep -ciE '\*\*\[?Critical\]?\*\*' "$ARTIFACTS_DIR/adversarial-review.md" 2>/dev/null || true)
|
|
2688
|
+
critical_count="${critical_count:-0}"
|
|
2689
|
+
bug_count=$(grep -ciE '\*\*\[?Bug\]?\*\*' "$ARTIFACTS_DIR/adversarial-review.md" 2>/dev/null || true)
|
|
2690
|
+
bug_count="${bug_count:-0}"
|
|
2691
|
+
|
|
2692
|
+
if [[ "$critical_count" -gt 0 ]]; then
|
|
2693
|
+
warn "Adversarial review: ${critical_count} critical, ${bug_count} bugs"
|
|
2694
|
+
return 1
|
|
2695
|
+
elif [[ "$bug_count" -gt 0 ]]; then
|
|
2696
|
+
warn "Adversarial review: ${bug_count} bugs found"
|
|
2697
|
+
return 1
|
|
2698
|
+
fi
|
|
2699
|
+
|
|
2700
|
+
success "Adversarial review: clean"
|
|
2701
|
+
return 0
|
|
2702
|
+
}
|
|
2703
|
+
|
|
2704
|
+
run_negative_prompting() {
|
|
2705
|
+
local changed_files
|
|
2706
|
+
changed_files=$(git diff --name-only "${BASE_BRANCH}...HEAD" 2>/dev/null || true)
|
|
2707
|
+
|
|
2708
|
+
if [[ -z "$changed_files" ]]; then
|
|
2709
|
+
info "No changed files to analyze"
|
|
2710
|
+
return 0
|
|
2711
|
+
fi
|
|
2712
|
+
|
|
2713
|
+
# Read contents of changed files
|
|
2714
|
+
local file_contents=""
|
|
2715
|
+
while IFS= read -r file; do
|
|
2716
|
+
if [[ -f "$file" ]]; then
|
|
2717
|
+
file_contents+="
|
|
2718
|
+
--- $file ---
|
|
2719
|
+
$(head -200 "$file" 2>/dev/null || true)
|
|
2720
|
+
"
|
|
2721
|
+
fi
|
|
2722
|
+
done <<< "$changed_files"
|
|
2723
|
+
|
|
2724
|
+
local prompt="You are a pessimistic engineer who assumes everything will break.
|
|
2725
|
+
Review these changes and answer:
|
|
2726
|
+
1. What could go wrong in production?
|
|
2727
|
+
2. What did the developer miss?
|
|
2728
|
+
3. What's fragile and will break when requirements change?
|
|
2729
|
+
4. What assumptions are being made that might not hold?
|
|
2730
|
+
5. What happens under load/stress?
|
|
2731
|
+
6. What happens with malicious input?
|
|
2732
|
+
7. Are there any implicit dependencies that could break?
|
|
2733
|
+
|
|
2734
|
+
Be specific. Reference actual code. Categorize each concern as [Critical/Concern/Minor].
|
|
2735
|
+
|
|
2736
|
+
Files changed: $changed_files
|
|
2737
|
+
|
|
2738
|
+
$file_contents"
|
|
2739
|
+
|
|
2740
|
+
local review_output
|
|
2741
|
+
review_output=$(claude --print "$prompt" 2>"${ARTIFACTS_DIR}/.claude-tokens-negative.log" || true)
|
|
2742
|
+
parse_claude_tokens "${ARTIFACTS_DIR}/.claude-tokens-negative.log"
|
|
2743
|
+
|
|
2744
|
+
echo "$review_output" > "$ARTIFACTS_DIR/negative-review.md"
|
|
2745
|
+
|
|
2746
|
+
local critical_count
|
|
2747
|
+
critical_count=$(grep -ciE '\[Critical\]' "$ARTIFACTS_DIR/negative-review.md" 2>/dev/null || true)
|
|
2748
|
+
critical_count="${critical_count:-0}"
|
|
2749
|
+
|
|
2750
|
+
if [[ "$critical_count" -gt 0 ]]; then
|
|
2751
|
+
warn "Negative prompting: ${critical_count} critical concerns"
|
|
2752
|
+
return 1
|
|
2753
|
+
fi
|
|
2754
|
+
|
|
2755
|
+
success "Negative prompting: no critical concerns"
|
|
2756
|
+
return 0
|
|
2757
|
+
}
|
|
2758
|
+
|
|
2759
|
+
run_e2e_validation() {
|
|
2760
|
+
local test_cmd="${TEST_CMD}"
|
|
2761
|
+
if [[ -z "$test_cmd" ]]; then
|
|
2762
|
+
test_cmd=$(detect_test_cmd)
|
|
2763
|
+
fi
|
|
2764
|
+
|
|
2765
|
+
if [[ -z "$test_cmd" ]]; then
|
|
2766
|
+
warn "No test command configured — skipping E2E validation"
|
|
2767
|
+
return 0
|
|
2768
|
+
fi
|
|
2769
|
+
|
|
2770
|
+
info "Running E2E validation: $test_cmd"
|
|
2771
|
+
if eval "$test_cmd" > "$ARTIFACTS_DIR/e2e-validation.log" 2>&1; then
|
|
2772
|
+
success "E2E validation passed"
|
|
2773
|
+
return 0
|
|
2774
|
+
else
|
|
2775
|
+
error "E2E validation failed"
|
|
2776
|
+
return 1
|
|
2777
|
+
fi
|
|
2778
|
+
}
|
|
2779
|
+
|
|
2780
|
+
run_dod_audit() {
|
|
2781
|
+
local dod_file="$PROJECT_ROOT/.claude/DEFINITION-OF-DONE.md"
|
|
2782
|
+
|
|
2783
|
+
if [[ ! -f "$dod_file" ]]; then
|
|
2784
|
+
# Check for alternative locations
|
|
2785
|
+
for alt in "$PROJECT_ROOT/DEFINITION-OF-DONE.md" "$HOME/.claude-teams/templates/definition-of-done.example.md"; do
|
|
2786
|
+
if [[ -f "$alt" ]]; then
|
|
2787
|
+
dod_file="$alt"
|
|
2788
|
+
break
|
|
2789
|
+
fi
|
|
2790
|
+
done
|
|
2791
|
+
fi
|
|
2792
|
+
|
|
2793
|
+
if [[ ! -f "$dod_file" ]]; then
|
|
2794
|
+
info "No definition-of-done found — skipping DoD audit"
|
|
2795
|
+
return 0
|
|
2796
|
+
fi
|
|
2797
|
+
|
|
2798
|
+
info "Auditing Definition of Done..."
|
|
2799
|
+
|
|
2800
|
+
local total=0 passed=0 failed=0
|
|
2801
|
+
local audit_output="# DoD Audit Results\n\n"
|
|
2802
|
+
|
|
2803
|
+
while IFS= read -r line; do
|
|
2804
|
+
if [[ "$line" =~ ^[[:space:]]*-[[:space:]]*\[[[:space:]]\] ]]; then
|
|
2805
|
+
total=$((total + 1))
|
|
2806
|
+
local item="${line#*] }"
|
|
2807
|
+
|
|
2808
|
+
# Try to verify common items
|
|
2809
|
+
local item_passed=false
|
|
2810
|
+
case "$item" in
|
|
2811
|
+
*"tests pass"*|*"test pass"*)
|
|
2812
|
+
if [[ -f "$ARTIFACTS_DIR/test-results.log" ]] && ! grep -qi "fail\|error" "$ARTIFACTS_DIR/test-results.log" 2>/dev/null; then
|
|
2813
|
+
item_passed=true
|
|
2814
|
+
fi
|
|
2815
|
+
;;
|
|
2816
|
+
*"lint"*|*"Lint"*)
|
|
2817
|
+
if [[ -f "$ARTIFACTS_DIR/lint.log" ]] && ! grep -qi "error" "$ARTIFACTS_DIR/lint.log" 2>/dev/null; then
|
|
2818
|
+
item_passed=true
|
|
2819
|
+
fi
|
|
2820
|
+
;;
|
|
2821
|
+
*"console.log"*|*"print("*)
|
|
2822
|
+
local debug_count
|
|
2823
|
+
debug_count=$(git diff "${BASE_BRANCH}...HEAD" 2>/dev/null | grep -c "^+.*console\.log\|^+.*print(" 2>/dev/null || true)
|
|
2824
|
+
debug_count="${debug_count:-0}"
|
|
2825
|
+
if [[ "$debug_count" -eq 0 ]]; then
|
|
2826
|
+
item_passed=true
|
|
2827
|
+
fi
|
|
2828
|
+
;;
|
|
2829
|
+
*"coverage"*)
|
|
2830
|
+
item_passed=true # Trust test stage coverage check
|
|
2831
|
+
;;
|
|
2832
|
+
*)
|
|
2833
|
+
item_passed=true # Default pass for items we can't auto-verify
|
|
2834
|
+
;;
|
|
2835
|
+
esac
|
|
2836
|
+
|
|
2837
|
+
if $item_passed; then
|
|
2838
|
+
passed=$((passed + 1))
|
|
2839
|
+
audit_output+="- [x] $item\n"
|
|
2840
|
+
else
|
|
2841
|
+
failed=$((failed + 1))
|
|
2842
|
+
audit_output+="- [ ] $item ❌\n"
|
|
2843
|
+
fi
|
|
2844
|
+
fi
|
|
2845
|
+
done < "$dod_file"
|
|
2846
|
+
|
|
2847
|
+
echo -e "$audit_output\n\n**Score: ${passed}/${total} passed**" > "$ARTIFACTS_DIR/dod-audit.md"
|
|
2848
|
+
|
|
2849
|
+
if [[ "$failed" -gt 0 ]]; then
|
|
2850
|
+
warn "DoD audit: ${passed}/${total} passed, ${failed} failed"
|
|
2851
|
+
return 1
|
|
2852
|
+
fi
|
|
2853
|
+
|
|
2854
|
+
success "DoD audit: ${passed}/${total} passed"
|
|
2855
|
+
return 0
|
|
2856
|
+
}
|
|
2857
|
+
|
|
2858
|
+
compound_rebuild_with_feedback() {
|
|
2859
|
+
local feedback_file="$ARTIFACTS_DIR/quality-feedback.md"
|
|
2860
|
+
|
|
2861
|
+
# Collect all findings
|
|
2862
|
+
{
|
|
2863
|
+
echo "# Quality Feedback — Issues to Fix"
|
|
2864
|
+
echo ""
|
|
2865
|
+
if [[ -f "$ARTIFACTS_DIR/adversarial-review.md" ]]; then
|
|
2866
|
+
echo "## Adversarial Review Findings"
|
|
2867
|
+
cat "$ARTIFACTS_DIR/adversarial-review.md"
|
|
2868
|
+
echo ""
|
|
2869
|
+
fi
|
|
2870
|
+
if [[ -f "$ARTIFACTS_DIR/negative-review.md" ]]; then
|
|
2871
|
+
echo "## Negative Prompting Concerns"
|
|
2872
|
+
cat "$ARTIFACTS_DIR/negative-review.md"
|
|
2873
|
+
echo ""
|
|
2874
|
+
fi
|
|
2875
|
+
if [[ -f "$ARTIFACTS_DIR/dod-audit.md" ]]; then
|
|
2876
|
+
echo "## DoD Audit Failures"
|
|
2877
|
+
grep "❌" "$ARTIFACTS_DIR/dod-audit.md" 2>/dev/null || true
|
|
2878
|
+
echo ""
|
|
2879
|
+
fi
|
|
2880
|
+
if [[ -f "$ARTIFACTS_DIR/security-audit.log" ]] && grep -qiE 'critical|high' "$ARTIFACTS_DIR/security-audit.log" 2>/dev/null; then
|
|
2881
|
+
echo "## Security Audit Findings"
|
|
2882
|
+
cat "$ARTIFACTS_DIR/security-audit.log"
|
|
2883
|
+
echo ""
|
|
2884
|
+
fi
|
|
2885
|
+
if [[ -f "$ARTIFACTS_DIR/api-compat.log" ]] && grep -qi 'BREAKING' "$ARTIFACTS_DIR/api-compat.log" 2>/dev/null; then
|
|
2886
|
+
echo "## API Breaking Changes"
|
|
2887
|
+
cat "$ARTIFACTS_DIR/api-compat.log"
|
|
2888
|
+
echo ""
|
|
2889
|
+
fi
|
|
2890
|
+
} > "$feedback_file"
|
|
2891
|
+
|
|
2892
|
+
# Validate feedback file has actual content
|
|
2893
|
+
if [[ ! -s "$feedback_file" ]]; then
|
|
2894
|
+
warn "No quality feedback collected — skipping rebuild"
|
|
2895
|
+
return 1
|
|
2896
|
+
fi
|
|
2897
|
+
|
|
2898
|
+
# Reset build/test stages
|
|
2899
|
+
set_stage_status "build" "pending"
|
|
2900
|
+
set_stage_status "test" "pending"
|
|
2901
|
+
set_stage_status "review" "pending"
|
|
2902
|
+
|
|
2903
|
+
# Augment GOAL with quality feedback
|
|
2904
|
+
local original_goal="$GOAL"
|
|
2905
|
+
local feedback_content
|
|
2906
|
+
feedback_content=$(cat "$feedback_file")
|
|
2907
|
+
GOAL="$GOAL
|
|
2908
|
+
|
|
2909
|
+
IMPORTANT — Compound quality review found issues. Fix ALL of these:
|
|
2910
|
+
$feedback_content
|
|
2911
|
+
|
|
2912
|
+
Fix every issue listed above while keeping all existing functionality working."
|
|
2913
|
+
|
|
2914
|
+
# Re-run self-healing build→test
|
|
2915
|
+
info "Rebuilding with quality feedback..."
|
|
2916
|
+
if self_healing_build_test; then
|
|
2917
|
+
GOAL="$original_goal"
|
|
2918
|
+
return 0
|
|
2919
|
+
else
|
|
2920
|
+
GOAL="$original_goal"
|
|
2921
|
+
return 1
|
|
2922
|
+
fi
|
|
2923
|
+
}
|
|
2924
|
+
|
|
2925
|
+
stage_compound_quality() {
|
|
2926
|
+
CURRENT_STAGE_ID="compound_quality"
|
|
2927
|
+
|
|
2928
|
+
# Read config
|
|
2929
|
+
local max_cycles adversarial_enabled negative_enabled e2e_enabled dod_enabled strict_quality
|
|
2930
|
+
max_cycles=$(jq -r --arg id "compound_quality" '(.stages[] | select(.id == $id) | .config.max_cycles) // 3' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2931
|
+
[[ -z "$max_cycles" || "$max_cycles" == "null" ]] && max_cycles=3
|
|
2932
|
+
adversarial_enabled=$(jq -r --arg id "compound_quality" '(.stages[] | select(.id == $id) | .config.adversarial) // true' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2933
|
+
negative_enabled=$(jq -r --arg id "compound_quality" '(.stages[] | select(.id == $id) | .config.negative) // true' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2934
|
+
e2e_enabled=$(jq -r --arg id "compound_quality" '(.stages[] | select(.id == $id) | .config.e2e) // true' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2935
|
+
dod_enabled=$(jq -r --arg id "compound_quality" '(.stages[] | select(.id == $id) | .config.dod_audit) // true' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2936
|
+
strict_quality=$(jq -r --arg id "compound_quality" '(.stages[] | select(.id == $id) | .config.strict_quality) // false' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
2937
|
+
[[ -z "$strict_quality" || "$strict_quality" == "null" ]] && strict_quality="false"
|
|
2938
|
+
|
|
2939
|
+
local cycle=0
|
|
2940
|
+
while [[ "$cycle" -lt "$max_cycles" ]]; do
|
|
2941
|
+
cycle=$((cycle + 1))
|
|
2942
|
+
local all_passed=true
|
|
2943
|
+
|
|
2944
|
+
echo ""
|
|
2945
|
+
echo -e "${PURPLE}${BOLD}━━━ Compound Quality — Cycle ${cycle}/${max_cycles} ━━━${RESET}"
|
|
2946
|
+
|
|
2947
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
2948
|
+
gh_comment_issue "$ISSUE_NUMBER" "🔬 **Compound quality** — cycle ${cycle}/${max_cycles}" 2>/dev/null || true
|
|
2949
|
+
fi
|
|
2950
|
+
|
|
2951
|
+
# 1. Adversarial Review
|
|
2952
|
+
if [[ "$adversarial_enabled" == "true" ]]; then
|
|
2953
|
+
echo ""
|
|
2954
|
+
info "Running adversarial review..."
|
|
2955
|
+
if ! run_adversarial_review; then
|
|
2956
|
+
all_passed=false
|
|
2957
|
+
fi
|
|
2958
|
+
fi
|
|
2959
|
+
|
|
2960
|
+
# 2. Negative Prompting
|
|
2961
|
+
if [[ "$negative_enabled" == "true" ]]; then
|
|
2962
|
+
echo ""
|
|
2963
|
+
info "Running negative prompting..."
|
|
2964
|
+
if ! run_negative_prompting; then
|
|
2965
|
+
all_passed=false
|
|
2966
|
+
fi
|
|
2967
|
+
fi
|
|
2968
|
+
|
|
2969
|
+
# 3. E2E Validation
|
|
2970
|
+
if [[ "$e2e_enabled" == "true" ]]; then
|
|
2971
|
+
echo ""
|
|
2972
|
+
info "Running E2E validation..."
|
|
2973
|
+
if ! run_e2e_validation; then
|
|
2974
|
+
all_passed=false
|
|
2975
|
+
fi
|
|
2976
|
+
fi
|
|
2977
|
+
|
|
2978
|
+
# 4. DoD Audit
|
|
2979
|
+
if [[ "$dod_enabled" == "true" ]]; then
|
|
2980
|
+
echo ""
|
|
2981
|
+
info "Running Definition of Done audit..."
|
|
2982
|
+
if ! run_dod_audit; then
|
|
2983
|
+
all_passed=false
|
|
2984
|
+
fi
|
|
2985
|
+
fi
|
|
2986
|
+
|
|
2987
|
+
# 5. Multi-dimensional quality checks
|
|
2988
|
+
echo ""
|
|
2989
|
+
info "Running multi-dimensional quality checks..."
|
|
2990
|
+
local quality_failures=0
|
|
2991
|
+
|
|
2992
|
+
if ! quality_check_security; then
|
|
2993
|
+
quality_failures=$((quality_failures + 1))
|
|
2994
|
+
fi
|
|
2995
|
+
if ! quality_check_coverage; then
|
|
2996
|
+
quality_failures=$((quality_failures + 1))
|
|
2997
|
+
fi
|
|
2998
|
+
if ! quality_check_perf_regression; then
|
|
2999
|
+
quality_failures=$((quality_failures + 1))
|
|
3000
|
+
fi
|
|
3001
|
+
if ! quality_check_bundle_size; then
|
|
3002
|
+
quality_failures=$((quality_failures + 1))
|
|
3003
|
+
fi
|
|
3004
|
+
if ! quality_check_api_compat; then
|
|
3005
|
+
quality_failures=$((quality_failures + 1))
|
|
3006
|
+
fi
|
|
3007
|
+
|
|
3008
|
+
if [[ "$quality_failures" -gt 0 ]]; then
|
|
3009
|
+
if [[ "$strict_quality" == "true" ]]; then
|
|
3010
|
+
warn "Multi-dimensional quality: ${quality_failures} check(s) failed (strict mode — blocking)"
|
|
3011
|
+
all_passed=false
|
|
3012
|
+
else
|
|
3013
|
+
warn "Multi-dimensional quality: ${quality_failures} check(s) failed (non-blocking)"
|
|
3014
|
+
fi
|
|
3015
|
+
else
|
|
3016
|
+
success "Multi-dimensional quality: all checks passed"
|
|
3017
|
+
fi
|
|
3018
|
+
|
|
3019
|
+
emit_event "compound.cycle" \
|
|
3020
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
3021
|
+
"cycle=$cycle" \
|
|
3022
|
+
"max_cycles=$max_cycles" \
|
|
3023
|
+
"passed=$all_passed" \
|
|
3024
|
+
"self_heal_count=$SELF_HEAL_COUNT"
|
|
3025
|
+
|
|
3026
|
+
if $all_passed; then
|
|
3027
|
+
success "Compound quality passed on cycle ${cycle}"
|
|
3028
|
+
|
|
3029
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
3030
|
+
gh_comment_issue "$ISSUE_NUMBER" "✅ **Compound quality passed** — cycle ${cycle}/${max_cycles}
|
|
3031
|
+
|
|
3032
|
+
All quality checks clean:
|
|
3033
|
+
- Adversarial review: ✅
|
|
3034
|
+
- Negative prompting: ✅
|
|
3035
|
+
- E2E validation: ✅
|
|
3036
|
+
- DoD audit: ✅
|
|
3037
|
+
- Security audit: ✅
|
|
3038
|
+
- Coverage: ✅
|
|
3039
|
+
- Performance: ✅
|
|
3040
|
+
- Bundle size: ✅
|
|
3041
|
+
- API compat: ✅" 2>/dev/null || true
|
|
3042
|
+
fi
|
|
3043
|
+
|
|
3044
|
+
log_stage "compound_quality" "Passed on cycle ${cycle}/${max_cycles}"
|
|
3045
|
+
return 0
|
|
3046
|
+
fi
|
|
3047
|
+
|
|
3048
|
+
# Not all passed — rebuild if we have cycles left
|
|
3049
|
+
if [[ "$cycle" -lt "$max_cycles" ]]; then
|
|
3050
|
+
warn "Quality checks failed — rebuilding with feedback (cycle $((cycle + 1))/${max_cycles})"
|
|
3051
|
+
|
|
3052
|
+
if ! compound_rebuild_with_feedback; then
|
|
3053
|
+
error "Rebuild with feedback failed"
|
|
3054
|
+
log_stage "compound_quality" "Rebuild failed on cycle ${cycle}"
|
|
3055
|
+
return 1
|
|
3056
|
+
fi
|
|
3057
|
+
|
|
3058
|
+
# Re-run review stage too (since code changed)
|
|
3059
|
+
info "Re-running review after rebuild..."
|
|
3060
|
+
stage_review 2>/dev/null || true
|
|
3061
|
+
fi
|
|
3062
|
+
done
|
|
3063
|
+
|
|
3064
|
+
# Exhausted all cycles
|
|
3065
|
+
error "Compound quality exhausted after ${max_cycles} cycles"
|
|
3066
|
+
|
|
3067
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
3068
|
+
gh_comment_issue "$ISSUE_NUMBER" "❌ **Compound quality failed** after ${max_cycles} cycles
|
|
3069
|
+
|
|
3070
|
+
Quality issues remain. Check artifacts for details." 2>/dev/null || true
|
|
3071
|
+
fi
|
|
3072
|
+
|
|
3073
|
+
log_stage "compound_quality" "Failed after ${max_cycles} cycles"
|
|
3074
|
+
return 1
|
|
3075
|
+
}
|
|
3076
|
+
|
|
3077
|
+
# ─── Stage Runner ───────────────────────────────────────────────────────────
|
|
3078
|
+
|
|
3079
|
+
run_stage_with_retry() {
|
|
3080
|
+
local stage_id="$1"
|
|
3081
|
+
local max_retries
|
|
3082
|
+
max_retries=$(jq -r --arg id "$stage_id" '(.stages[] | select(.id == $id) | .config.retries) // 0' "$PIPELINE_CONFIG" 2>/dev/null) || true
|
|
3083
|
+
[[ -z "$max_retries" || "$max_retries" == "null" ]] && max_retries=0
|
|
3084
|
+
|
|
3085
|
+
local attempt=0
|
|
3086
|
+
while true; do
|
|
3087
|
+
if "stage_${stage_id}"; then
|
|
3088
|
+
return 0
|
|
3089
|
+
fi
|
|
3090
|
+
|
|
3091
|
+
attempt=$((attempt + 1))
|
|
3092
|
+
if [[ "$attempt" -gt "$max_retries" ]]; then
|
|
3093
|
+
return 1
|
|
3094
|
+
fi
|
|
3095
|
+
|
|
3096
|
+
warn "Stage $stage_id failed (attempt $attempt/$((max_retries + 1))) — retrying..."
|
|
3097
|
+
sleep 2
|
|
3098
|
+
done
|
|
3099
|
+
}
|
|
3100
|
+
|
|
3101
|
+
# ─── Self-Healing Build→Test Feedback Loop ─────────────────────────────────
|
|
3102
|
+
# When tests fail after a build, this captures the error and re-runs the build
|
|
3103
|
+
# with the error context, so Claude can fix the issue automatically.
|
|
3104
|
+
|
|
3105
|
+
self_healing_build_test() {
|
|
3106
|
+
local cycle=0
|
|
3107
|
+
local max_cycles="$BUILD_TEST_RETRIES"
|
|
3108
|
+
local last_test_error=""
|
|
3109
|
+
|
|
3110
|
+
while [[ "$cycle" -le "$max_cycles" ]]; do
|
|
3111
|
+
cycle=$((cycle + 1))
|
|
3112
|
+
|
|
3113
|
+
if [[ "$cycle" -gt 1 ]]; then
|
|
3114
|
+
SELF_HEAL_COUNT=$((SELF_HEAL_COUNT + 1))
|
|
3115
|
+
echo ""
|
|
3116
|
+
echo -e "${YELLOW}${BOLD}━━━ Self-Healing Cycle ${cycle}/$((max_cycles + 1)) ━━━${RESET}"
|
|
3117
|
+
info "Feeding test failure back to build loop..."
|
|
3118
|
+
|
|
3119
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
3120
|
+
gh_comment_issue "$ISSUE_NUMBER" "🔄 **Self-healing cycle ${cycle}** — rebuilding with error context" 2>/dev/null || true
|
|
3121
|
+
fi
|
|
3122
|
+
|
|
3123
|
+
# Reset build/test stage statuses for retry
|
|
3124
|
+
set_stage_status "build" "retrying"
|
|
3125
|
+
set_stage_status "test" "pending"
|
|
3126
|
+
fi
|
|
3127
|
+
|
|
3128
|
+
# ── Run Build Stage ──
|
|
3129
|
+
echo ""
|
|
3130
|
+
echo -e "${CYAN}${BOLD}▸ Stage: build${RESET} ${DIM}[cycle ${cycle}]${RESET}"
|
|
3131
|
+
CURRENT_STAGE_ID="build"
|
|
3132
|
+
|
|
3133
|
+
# Inject error context on retry cycles
|
|
3134
|
+
if [[ "$cycle" -gt 1 && -n "$last_test_error" ]]; then
|
|
3135
|
+
# Temporarily augment the goal with error context
|
|
3136
|
+
local original_goal="$GOAL"
|
|
3137
|
+
GOAL="$GOAL
|
|
3138
|
+
|
|
3139
|
+
IMPORTANT — Previous build attempt failed tests. Fix these errors:
|
|
3140
|
+
$last_test_error
|
|
3141
|
+
|
|
3142
|
+
Focus on fixing the failing tests while keeping all passing tests working."
|
|
3143
|
+
|
|
3144
|
+
update_status "running" "build"
|
|
3145
|
+
record_stage_start "build"
|
|
3146
|
+
|
|
3147
|
+
if run_stage_with_retry "build"; then
|
|
3148
|
+
mark_stage_complete "build"
|
|
3149
|
+
local timing
|
|
3150
|
+
timing=$(get_stage_timing "build")
|
|
3151
|
+
success "Stage ${BOLD}build${RESET} complete ${DIM}(${timing})${RESET}"
|
|
3152
|
+
else
|
|
3153
|
+
mark_stage_failed "build"
|
|
3154
|
+
GOAL="$original_goal"
|
|
3155
|
+
return 1
|
|
3156
|
+
fi
|
|
3157
|
+
GOAL="$original_goal"
|
|
3158
|
+
else
|
|
3159
|
+
update_status "running" "build"
|
|
3160
|
+
record_stage_start "build"
|
|
3161
|
+
|
|
3162
|
+
if run_stage_with_retry "build"; then
|
|
3163
|
+
mark_stage_complete "build"
|
|
3164
|
+
local timing
|
|
3165
|
+
timing=$(get_stage_timing "build")
|
|
3166
|
+
success "Stage ${BOLD}build${RESET} complete ${DIM}(${timing})${RESET}"
|
|
3167
|
+
else
|
|
3168
|
+
mark_stage_failed "build"
|
|
3169
|
+
return 1
|
|
3170
|
+
fi
|
|
3171
|
+
fi
|
|
3172
|
+
|
|
3173
|
+
# ── Run Test Stage ──
|
|
3174
|
+
echo ""
|
|
3175
|
+
echo -e "${CYAN}${BOLD}▸ Stage: test${RESET} ${DIM}[cycle ${cycle}]${RESET}"
|
|
3176
|
+
CURRENT_STAGE_ID="test"
|
|
3177
|
+
update_status "running" "test"
|
|
3178
|
+
record_stage_start "test"
|
|
3179
|
+
|
|
3180
|
+
if run_stage_with_retry "test"; then
|
|
3181
|
+
mark_stage_complete "test"
|
|
3182
|
+
local timing
|
|
3183
|
+
timing=$(get_stage_timing "test")
|
|
3184
|
+
success "Stage ${BOLD}test${RESET} complete ${DIM}(${timing})${RESET}"
|
|
3185
|
+
return 0 # Tests passed!
|
|
3186
|
+
fi
|
|
3187
|
+
|
|
3188
|
+
# Tests failed — capture error for next cycle
|
|
3189
|
+
local test_log="$ARTIFACTS_DIR/test-results.log"
|
|
3190
|
+
last_test_error=$(tail -30 "$test_log" 2>/dev/null || echo "Test command failed with no output")
|
|
3191
|
+
mark_stage_failed "test"
|
|
3192
|
+
|
|
3193
|
+
if [[ "$cycle" -le "$max_cycles" ]]; then
|
|
3194
|
+
warn "Tests failed — will attempt self-healing (cycle $((cycle + 1))/$((max_cycles + 1)))"
|
|
3195
|
+
notify "Self-Healing" "Tests failed on cycle ${cycle}, retrying..." "warn"
|
|
3196
|
+
fi
|
|
3197
|
+
done
|
|
3198
|
+
|
|
3199
|
+
error "Self-healing exhausted after $((max_cycles + 1)) cycles"
|
|
3200
|
+
notify "Self-Healing Failed" "Tests still failing after $((max_cycles + 1)) build-test cycles" "error"
|
|
3201
|
+
return 1
|
|
3202
|
+
}
|
|
3203
|
+
|
|
3204
|
+
# ─── Auto-Rebase ──────────────────────────────────────────────────────────
|
|
3205
|
+
|
|
3206
|
+
auto_rebase() {
|
|
3207
|
+
info "Syncing with ${BASE_BRANCH}..."
|
|
3208
|
+
|
|
3209
|
+
# Fetch latest
|
|
3210
|
+
git fetch origin "$BASE_BRANCH" --quiet 2>/dev/null || {
|
|
3211
|
+
warn "Could not fetch origin/${BASE_BRANCH}"
|
|
3212
|
+
return 0
|
|
3213
|
+
}
|
|
3214
|
+
|
|
3215
|
+
# Check if rebase is needed
|
|
3216
|
+
local behind
|
|
3217
|
+
behind=$(git rev-list --count "HEAD..origin/${BASE_BRANCH}" 2>/dev/null || echo "0")
|
|
3218
|
+
|
|
3219
|
+
if [[ "$behind" -eq 0 ]]; then
|
|
3220
|
+
success "Already up to date with ${BASE_BRANCH}"
|
|
3221
|
+
return 0
|
|
3222
|
+
fi
|
|
3223
|
+
|
|
3224
|
+
info "Rebasing onto origin/${BASE_BRANCH} ($behind commits behind)..."
|
|
3225
|
+
if git rebase "origin/${BASE_BRANCH}" --quiet 2>/dev/null; then
|
|
3226
|
+
success "Rebase successful"
|
|
3227
|
+
else
|
|
3228
|
+
warn "Rebase conflict detected — aborting rebase"
|
|
3229
|
+
git rebase --abort 2>/dev/null || true
|
|
3230
|
+
warn "Falling back to merge..."
|
|
3231
|
+
if git merge "origin/${BASE_BRANCH}" --no-edit --quiet 2>/dev/null; then
|
|
3232
|
+
success "Merge successful"
|
|
3233
|
+
else
|
|
3234
|
+
git merge --abort 2>/dev/null || true
|
|
3235
|
+
error "Both rebase and merge failed — manual intervention needed"
|
|
3236
|
+
return 1
|
|
3237
|
+
fi
|
|
3238
|
+
fi
|
|
3239
|
+
}
|
|
3240
|
+
|
|
3241
|
+
run_pipeline() {
|
|
3242
|
+
local stages
|
|
3243
|
+
stages=$(jq -c '.stages[]' "$PIPELINE_CONFIG")
|
|
3244
|
+
|
|
3245
|
+
local stage_count enabled_count
|
|
3246
|
+
stage_count=$(jq '.stages | length' "$PIPELINE_CONFIG")
|
|
3247
|
+
enabled_count=$(jq '[.stages[] | select(.enabled == true)] | length' "$PIPELINE_CONFIG")
|
|
3248
|
+
local completed=0
|
|
3249
|
+
|
|
3250
|
+
# Check which stages are enabled to determine if we use the self-healing loop
|
|
3251
|
+
local build_enabled test_enabled
|
|
3252
|
+
build_enabled=$(jq -r '.stages[] | select(.id == "build") | .enabled' "$PIPELINE_CONFIG" 2>/dev/null)
|
|
3253
|
+
test_enabled=$(jq -r '.stages[] | select(.id == "test") | .enabled' "$PIPELINE_CONFIG" 2>/dev/null)
|
|
3254
|
+
local use_self_healing=false
|
|
3255
|
+
if [[ "$build_enabled" == "true" && "$test_enabled" == "true" && "$BUILD_TEST_RETRIES" -gt 0 ]]; then
|
|
3256
|
+
use_self_healing=true
|
|
3257
|
+
fi
|
|
3258
|
+
|
|
3259
|
+
while IFS= read -r stage; do
|
|
3260
|
+
local id enabled gate
|
|
3261
|
+
id=$(echo "$stage" | jq -r '.id')
|
|
3262
|
+
enabled=$(echo "$stage" | jq -r '.enabled')
|
|
3263
|
+
gate=$(echo "$stage" | jq -r '.gate')
|
|
3264
|
+
|
|
3265
|
+
CURRENT_STAGE_ID="$id"
|
|
3266
|
+
|
|
3267
|
+
if [[ "$enabled" != "true" ]]; then
|
|
3268
|
+
echo -e " ${DIM}○ ${id} — skipped (disabled)${RESET}"
|
|
3269
|
+
continue
|
|
3270
|
+
fi
|
|
3271
|
+
|
|
3272
|
+
local stage_status
|
|
3273
|
+
stage_status=$(get_stage_status "$id")
|
|
3274
|
+
if [[ "$stage_status" == "complete" ]]; then
|
|
3275
|
+
echo -e " ${GREEN}✓ ${id}${RESET} ${DIM}— already complete${RESET}"
|
|
3276
|
+
completed=$((completed + 1))
|
|
3277
|
+
continue
|
|
3278
|
+
fi
|
|
3279
|
+
|
|
3280
|
+
# Self-healing build→test loop: when we hit build, run both together
|
|
3281
|
+
if [[ "$id" == "build" && "$use_self_healing" == "true" ]]; then
|
|
3282
|
+
# Gate check for build
|
|
3283
|
+
local build_gate
|
|
3284
|
+
build_gate=$(echo "$stage" | jq -r '.gate')
|
|
3285
|
+
if [[ "$build_gate" == "approve" && "$SKIP_GATES" != "true" ]]; then
|
|
3286
|
+
show_stage_preview "build"
|
|
3287
|
+
local answer=""
|
|
3288
|
+
read -rp " Proceed with build+test (self-healing)? [Y/n] " answer
|
|
3289
|
+
if [[ "$answer" =~ ^[Nn] ]]; then
|
|
3290
|
+
update_status "paused" "build"
|
|
3291
|
+
info "Pipeline paused. Resume with: ${DIM}shipwright pipeline resume${RESET}"
|
|
3292
|
+
return 0
|
|
3293
|
+
fi
|
|
3294
|
+
fi
|
|
3295
|
+
|
|
3296
|
+
if self_healing_build_test; then
|
|
3297
|
+
completed=$((completed + 2)) # Both build and test
|
|
3298
|
+
else
|
|
3299
|
+
update_status "failed" "test"
|
|
3300
|
+
error "Pipeline failed: build→test self-healing exhausted"
|
|
3301
|
+
return 1
|
|
3302
|
+
fi
|
|
3303
|
+
continue
|
|
3304
|
+
fi
|
|
3305
|
+
|
|
3306
|
+
# Skip test if already handled by self-healing loop
|
|
3307
|
+
if [[ "$id" == "test" && "$use_self_healing" == "true" ]]; then
|
|
3308
|
+
stage_status=$(get_stage_status "test")
|
|
3309
|
+
if [[ "$stage_status" == "complete" ]]; then
|
|
3310
|
+
echo -e " ${GREEN}✓ test${RESET} ${DIM}— completed in build→test loop${RESET}"
|
|
3311
|
+
fi
|
|
3312
|
+
continue
|
|
3313
|
+
fi
|
|
3314
|
+
|
|
3315
|
+
# Gate check
|
|
3316
|
+
if [[ "$gate" == "approve" && "$SKIP_GATES" != "true" ]]; then
|
|
3317
|
+
show_stage_preview "$id"
|
|
3318
|
+
local answer=""
|
|
3319
|
+
read -rp " Proceed with ${id}? [Y/n] " answer
|
|
3320
|
+
if [[ "$answer" =~ ^[Nn] ]]; then
|
|
3321
|
+
update_status "paused" "$id"
|
|
3322
|
+
info "Pipeline paused at ${BOLD}$id${RESET}. Resume with: ${DIM}shipwright pipeline resume${RESET}"
|
|
3323
|
+
return 0
|
|
3324
|
+
fi
|
|
3325
|
+
fi
|
|
3326
|
+
|
|
3327
|
+
# Budget enforcement check (skip with --ignore-budget)
|
|
3328
|
+
if [[ "$IGNORE_BUDGET" != "true" ]] && [[ -x "$SCRIPT_DIR/cct-cost.sh" ]]; then
|
|
3329
|
+
local budget_rc=0
|
|
3330
|
+
bash "$SCRIPT_DIR/cct-cost.sh" check-budget 2>/dev/null || budget_rc=$?
|
|
3331
|
+
if [[ "$budget_rc" -eq 2 ]]; then
|
|
3332
|
+
warn "Daily budget exceeded — pausing pipeline before stage ${BOLD}$id${RESET}"
|
|
3333
|
+
warn "Resume with --ignore-budget to override, or wait until tomorrow"
|
|
3334
|
+
emit_event "pipeline.budget_paused" "issue=${ISSUE_NUMBER:-0}" "stage=$id"
|
|
3335
|
+
update_status "paused" "$id"
|
|
3336
|
+
return 0
|
|
3337
|
+
fi
|
|
3338
|
+
fi
|
|
3339
|
+
|
|
3340
|
+
echo ""
|
|
3341
|
+
echo -e "${CYAN}${BOLD}▸ Stage: ${id}${RESET} ${DIM}[$((completed + 1))/${enabled_count}]${RESET}"
|
|
3342
|
+
update_status "running" "$id"
|
|
3343
|
+
record_stage_start "$id"
|
|
3344
|
+
local stage_start_epoch
|
|
3345
|
+
stage_start_epoch=$(now_epoch)
|
|
3346
|
+
emit_event "stage.started" "issue=${ISSUE_NUMBER:-0}" "stage=$id"
|
|
3347
|
+
|
|
3348
|
+
if run_stage_with_retry "$id"; then
|
|
3349
|
+
mark_stage_complete "$id"
|
|
3350
|
+
completed=$((completed + 1))
|
|
3351
|
+
local timing stage_dur_s
|
|
3352
|
+
timing=$(get_stage_timing "$id")
|
|
3353
|
+
stage_dur_s=$(( $(now_epoch) - stage_start_epoch ))
|
|
3354
|
+
success "Stage ${BOLD}$id${RESET} complete ${DIM}(${timing})${RESET}"
|
|
3355
|
+
emit_event "stage.completed" "issue=${ISSUE_NUMBER:-0}" "stage=$id" "duration_s=$stage_dur_s"
|
|
3356
|
+
else
|
|
3357
|
+
mark_stage_failed "$id"
|
|
3358
|
+
local stage_dur_s
|
|
3359
|
+
stage_dur_s=$(( $(now_epoch) - stage_start_epoch ))
|
|
3360
|
+
error "Pipeline failed at stage: ${BOLD}$id${RESET}"
|
|
3361
|
+
update_status "failed" "$id"
|
|
3362
|
+
emit_event "stage.failed" "issue=${ISSUE_NUMBER:-0}" "stage=$id" "duration_s=$stage_dur_s"
|
|
3363
|
+
return 1
|
|
3364
|
+
fi
|
|
3365
|
+
done <<< "$stages"
|
|
3366
|
+
|
|
3367
|
+
# Pipeline complete!
|
|
3368
|
+
update_status "complete" ""
|
|
3369
|
+
local total_dur=""
|
|
3370
|
+
if [[ -n "$PIPELINE_START_EPOCH" ]]; then
|
|
3371
|
+
total_dur=$(format_duration $(( $(now_epoch) - PIPELINE_START_EPOCH )))
|
|
3372
|
+
fi
|
|
3373
|
+
|
|
3374
|
+
echo ""
|
|
3375
|
+
echo -e "${GREEN}${BOLD}═══════════════════════════════════════════════════════════════════${RESET}"
|
|
3376
|
+
success "Pipeline complete! ${completed}/${enabled_count} stages passed in ${total_dur:-unknown}"
|
|
3377
|
+
echo -e "${GREEN}${BOLD}═══════════════════════════════════════════════════════════════════${RESET}"
|
|
3378
|
+
|
|
3379
|
+
# Show summary
|
|
3380
|
+
echo ""
|
|
3381
|
+
if [[ -f "$ARTIFACTS_DIR/pr-url.txt" ]]; then
|
|
3382
|
+
echo -e " ${BOLD}PR:${RESET} $(cat "$ARTIFACTS_DIR/pr-url.txt")"
|
|
3383
|
+
fi
|
|
3384
|
+
echo -e " ${BOLD}Branch:${RESET} $GIT_BRANCH"
|
|
3385
|
+
[[ -n "${GITHUB_ISSUE:-}" ]] && echo -e " ${BOLD}Issue:${RESET} $GITHUB_ISSUE"
|
|
3386
|
+
echo -e " ${BOLD}Duration:${RESET} $total_dur"
|
|
3387
|
+
echo -e " ${BOLD}Artifacts:${RESET} $ARTIFACTS_DIR/"
|
|
3388
|
+
echo ""
|
|
3389
|
+
|
|
3390
|
+
# Capture learnings to memory (success or failure)
|
|
3391
|
+
if [[ -x "$SCRIPT_DIR/cct-memory.sh" ]]; then
|
|
3392
|
+
bash "$SCRIPT_DIR/cct-memory.sh" capture "$STATE_FILE" "$ARTIFACTS_DIR" 2>/dev/null || true
|
|
3393
|
+
fi
|
|
3394
|
+
|
|
3395
|
+
# Final GitHub progress update
|
|
3396
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
3397
|
+
local body
|
|
3398
|
+
body=$(gh_build_progress_body)
|
|
3399
|
+
gh_update_progress "$body"
|
|
3400
|
+
fi
|
|
3401
|
+
}
|
|
3402
|
+
|
|
3403
|
+
# ─── Worktree Isolation ───────────────────────────────────────────────────
|
|
3404
|
+
# Creates a git worktree for parallel-safe pipeline execution
|
|
3405
|
+
|
|
3406
|
+
pipeline_setup_worktree() {
|
|
3407
|
+
local worktree_base=".worktrees"
|
|
3408
|
+
local name="${WORKTREE_NAME}"
|
|
3409
|
+
|
|
3410
|
+
# Auto-generate name from issue number or timestamp
|
|
3411
|
+
if [[ -z "$name" ]]; then
|
|
3412
|
+
if [[ -n "${ISSUE_NUMBER:-}" ]]; then
|
|
3413
|
+
name="pipeline-issue-${ISSUE_NUMBER}"
|
|
3414
|
+
else
|
|
3415
|
+
name="pipeline-$(date +%s)"
|
|
3416
|
+
fi
|
|
3417
|
+
fi
|
|
3418
|
+
|
|
3419
|
+
local worktree_path="${worktree_base}/${name}"
|
|
3420
|
+
local branch_name="pipeline/${name}"
|
|
3421
|
+
|
|
3422
|
+
info "Setting up worktree: ${DIM}${worktree_path}${RESET}"
|
|
3423
|
+
|
|
3424
|
+
# Ensure worktree base exists
|
|
3425
|
+
mkdir -p "$worktree_base"
|
|
3426
|
+
|
|
3427
|
+
# Remove stale worktree if it exists
|
|
3428
|
+
if [[ -d "$worktree_path" ]]; then
|
|
3429
|
+
warn "Worktree already exists — removing: ${worktree_path}"
|
|
3430
|
+
git worktree remove --force "$worktree_path" 2>/dev/null || rm -rf "$worktree_path"
|
|
3431
|
+
fi
|
|
3432
|
+
|
|
3433
|
+
# Delete stale branch if it exists
|
|
3434
|
+
git branch -D "$branch_name" 2>/dev/null || true
|
|
3435
|
+
|
|
3436
|
+
# Create worktree with new branch from current HEAD
|
|
3437
|
+
git worktree add -b "$branch_name" "$worktree_path" HEAD
|
|
3438
|
+
|
|
3439
|
+
# Store original dir for cleanup, then cd into worktree
|
|
3440
|
+
ORIGINAL_REPO_DIR="$(pwd)"
|
|
3441
|
+
cd "$worktree_path"
|
|
3442
|
+
CLEANUP_WORKTREE=true
|
|
3443
|
+
|
|
3444
|
+
success "Worktree ready: ${CYAN}${worktree_path}${RESET} (branch: ${branch_name})"
|
|
3445
|
+
}
|
|
3446
|
+
|
|
3447
|
+
pipeline_cleanup_worktree() {
|
|
3448
|
+
if [[ "${CLEANUP_WORKTREE:-false}" != "true" ]]; then
|
|
3449
|
+
return
|
|
3450
|
+
fi
|
|
3451
|
+
|
|
3452
|
+
local worktree_path
|
|
3453
|
+
worktree_path="$(pwd)"
|
|
3454
|
+
|
|
3455
|
+
if [[ -n "${ORIGINAL_REPO_DIR:-}" && "$worktree_path" != "$ORIGINAL_REPO_DIR" ]]; then
|
|
3456
|
+
cd "$ORIGINAL_REPO_DIR" 2>/dev/null || cd /
|
|
3457
|
+
info "Cleaning up worktree: ${DIM}${worktree_path}${RESET}"
|
|
3458
|
+
git worktree remove --force "$worktree_path" 2>/dev/null || true
|
|
3459
|
+
fi
|
|
3460
|
+
}
|
|
3461
|
+
|
|
3462
|
+
# ─── Subcommands ────────────────────────────────────────────────────────────
|
|
3463
|
+
|
|
3464
|
+
pipeline_start() {
|
|
3465
|
+
if [[ -z "$GOAL" && -z "$ISSUE_NUMBER" ]]; then
|
|
3466
|
+
error "Must provide --goal or --issue"
|
|
3467
|
+
echo -e " Example: ${DIM}shipwright pipeline start --goal \"Add JWT auth\"${RESET}"
|
|
3468
|
+
echo -e " Example: ${DIM}shipwright pipeline start --issue 123${RESET}"
|
|
3469
|
+
exit 1
|
|
3470
|
+
fi
|
|
3471
|
+
|
|
3472
|
+
if ! command -v jq &>/dev/null; then
|
|
3473
|
+
error "jq is required. Install it: brew install jq"
|
|
3474
|
+
exit 1
|
|
3475
|
+
fi
|
|
3476
|
+
|
|
3477
|
+
# Set up worktree isolation if requested
|
|
3478
|
+
if [[ "$AUTO_WORKTREE" == "true" ]]; then
|
|
3479
|
+
pipeline_setup_worktree
|
|
3480
|
+
fi
|
|
3481
|
+
|
|
3482
|
+
# Register worktree cleanup on exit (chain with existing cleanup)
|
|
3483
|
+
if [[ "$CLEANUP_WORKTREE" == "true" ]]; then
|
|
3484
|
+
trap 'pipeline_cleanup_worktree; cleanup_on_exit' SIGINT SIGTERM
|
|
3485
|
+
trap 'pipeline_cleanup_worktree' EXIT
|
|
3486
|
+
fi
|
|
3487
|
+
|
|
3488
|
+
setup_dirs
|
|
3489
|
+
|
|
3490
|
+
# Check for existing pipeline
|
|
3491
|
+
if [[ -f "$STATE_FILE" ]]; then
|
|
3492
|
+
local existing_status
|
|
3493
|
+
existing_status=$(sed -n 's/^status: *//p' "$STATE_FILE" | head -1)
|
|
3494
|
+
if [[ "$existing_status" == "running" || "$existing_status" == "paused" || "$existing_status" == "interrupted" ]]; then
|
|
3495
|
+
warn "A pipeline is already in progress (status: $existing_status)"
|
|
3496
|
+
echo -e " Resume it: ${DIM}shipwright pipeline resume${RESET}"
|
|
3497
|
+
echo -e " Abort it: ${DIM}shipwright pipeline abort${RESET}"
|
|
3498
|
+
exit 1
|
|
3499
|
+
fi
|
|
3500
|
+
fi
|
|
3501
|
+
|
|
3502
|
+
# Pre-flight checks
|
|
3503
|
+
preflight_checks || exit 1
|
|
3504
|
+
|
|
3505
|
+
# Initialize GitHub integration
|
|
3506
|
+
gh_init
|
|
3507
|
+
|
|
3508
|
+
load_pipeline_config
|
|
3509
|
+
initialize_state
|
|
3510
|
+
|
|
3511
|
+
echo ""
|
|
3512
|
+
echo -e "${PURPLE}${BOLD}╔═══════════════════════════════════════════════════════════════════╗${RESET}"
|
|
3513
|
+
echo -e "${PURPLE}${BOLD}║ shipwright pipeline — Autonomous Feature Delivery ║${RESET}"
|
|
3514
|
+
echo -e "${PURPLE}${BOLD}╚═══════════════════════════════════════════════════════════════════╝${RESET}"
|
|
3515
|
+
echo ""
|
|
3516
|
+
|
|
3517
|
+
# Comprehensive environment summary
|
|
3518
|
+
if [[ -n "$GOAL" ]]; then
|
|
3519
|
+
echo -e " ${BOLD}Goal:${RESET} $GOAL"
|
|
3520
|
+
fi
|
|
3521
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
3522
|
+
echo -e " ${BOLD}Issue:${RESET} #$ISSUE_NUMBER"
|
|
3523
|
+
fi
|
|
3524
|
+
|
|
3525
|
+
echo -e " ${BOLD}Pipeline:${RESET} $PIPELINE_NAME"
|
|
3526
|
+
|
|
3527
|
+
local enabled_stages
|
|
3528
|
+
enabled_stages=$(jq -r '.stages[] | select(.enabled == true) | .id' "$PIPELINE_CONFIG" | tr '\n' ' ')
|
|
3529
|
+
echo -e " ${BOLD}Stages:${RESET} $enabled_stages"
|
|
3530
|
+
|
|
3531
|
+
local gate_count
|
|
3532
|
+
gate_count=$(jq '[.stages[] | select(.gate == "approve" and .enabled == true)] | length' "$PIPELINE_CONFIG")
|
|
3533
|
+
if [[ "$SKIP_GATES" == "true" ]]; then
|
|
3534
|
+
echo -e " ${BOLD}Gates:${RESET} ${YELLOW}all auto (--skip-gates)${RESET}"
|
|
3535
|
+
else
|
|
3536
|
+
echo -e " ${BOLD}Gates:${RESET} ${gate_count} approval gate(s)"
|
|
3537
|
+
fi
|
|
3538
|
+
|
|
3539
|
+
echo -e " ${BOLD}Model:${RESET} ${MODEL:-$(jq -r '.defaults.model // "opus"' "$PIPELINE_CONFIG")}"
|
|
3540
|
+
echo -e " ${BOLD}Self-heal:${RESET} ${BUILD_TEST_RETRIES} retry cycle(s)"
|
|
3541
|
+
|
|
3542
|
+
if [[ "$GH_AVAILABLE" == "true" ]]; then
|
|
3543
|
+
echo -e " ${BOLD}GitHub:${RESET} ${GREEN}✓${RESET} ${DIM}${REPO_OWNER}/${REPO_NAME}${RESET}"
|
|
3544
|
+
else
|
|
3545
|
+
echo -e " ${BOLD}GitHub:${RESET} ${DIM}disabled${RESET}"
|
|
3546
|
+
fi
|
|
3547
|
+
|
|
3548
|
+
if [[ -n "$SLACK_WEBHOOK" ]]; then
|
|
3549
|
+
echo -e " ${BOLD}Slack:${RESET} ${GREEN}✓${RESET} notifications enabled"
|
|
3550
|
+
fi
|
|
3551
|
+
|
|
3552
|
+
echo ""
|
|
3553
|
+
|
|
3554
|
+
if [[ "$DRY_RUN" == "true" ]]; then
|
|
3555
|
+
info "Dry run — no stages will execute"
|
|
3556
|
+
return 0
|
|
3557
|
+
fi
|
|
3558
|
+
|
|
3559
|
+
# Send start notification
|
|
3560
|
+
notify "Pipeline Started" "Goal: ${GOAL}\nPipeline: ${PIPELINE_NAME}" "info"
|
|
3561
|
+
|
|
3562
|
+
emit_event "pipeline.started" \
|
|
3563
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
3564
|
+
"pipeline=${PIPELINE_NAME}" \
|
|
3565
|
+
"model=${MODEL:-opus}" \
|
|
3566
|
+
"goal=${GOAL}"
|
|
3567
|
+
|
|
3568
|
+
run_pipeline
|
|
3569
|
+
local exit_code=$?
|
|
3570
|
+
|
|
3571
|
+
# Send completion notification + event
|
|
3572
|
+
local total_dur_s=""
|
|
3573
|
+
[[ -n "$PIPELINE_START_EPOCH" ]] && total_dur_s=$(( $(now_epoch) - PIPELINE_START_EPOCH ))
|
|
3574
|
+
if [[ "$exit_code" -eq 0 ]]; then
|
|
3575
|
+
local total_dur=""
|
|
3576
|
+
[[ -n "$total_dur_s" ]] && total_dur=$(format_duration "$total_dur_s")
|
|
3577
|
+
local pr_url
|
|
3578
|
+
pr_url=$(cat "$ARTIFACTS_DIR/pr-url.txt" 2>/dev/null || echo "")
|
|
3579
|
+
notify "Pipeline Complete" "Goal: ${GOAL}\nDuration: ${total_dur:-unknown}\nPR: ${pr_url:-N/A}" "success"
|
|
3580
|
+
emit_event "pipeline.completed" \
|
|
3581
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
3582
|
+
"result=success" \
|
|
3583
|
+
"duration_s=${total_dur_s:-0}" \
|
|
3584
|
+
"pr_url=${pr_url:-}" \
|
|
3585
|
+
"input_tokens=$TOTAL_INPUT_TOKENS" \
|
|
3586
|
+
"output_tokens=$TOTAL_OUTPUT_TOKENS" \
|
|
3587
|
+
"self_heal_count=$SELF_HEAL_COUNT"
|
|
3588
|
+
else
|
|
3589
|
+
notify "Pipeline Failed" "Goal: ${GOAL}\nFailed at: ${CURRENT_STAGE_ID:-unknown}" "error"
|
|
3590
|
+
emit_event "pipeline.completed" \
|
|
3591
|
+
"issue=${ISSUE_NUMBER:-0}" \
|
|
3592
|
+
"result=failure" \
|
|
3593
|
+
"duration_s=${total_dur_s:-0}" \
|
|
3594
|
+
"failed_stage=${CURRENT_STAGE_ID:-unknown}" \
|
|
3595
|
+
"input_tokens=$TOTAL_INPUT_TOKENS" \
|
|
3596
|
+
"output_tokens=$TOTAL_OUTPUT_TOKENS" \
|
|
3597
|
+
"self_heal_count=$SELF_HEAL_COUNT"
|
|
3598
|
+
|
|
3599
|
+
# Capture failure learnings to memory
|
|
3600
|
+
if [[ -x "$SCRIPT_DIR/cct-memory.sh" ]]; then
|
|
3601
|
+
bash "$SCRIPT_DIR/cct-memory.sh" capture "$STATE_FILE" "$ARTIFACTS_DIR" 2>/dev/null || true
|
|
3602
|
+
bash "$SCRIPT_DIR/cct-memory.sh" analyze-failure "$ARTIFACTS_DIR/.claude-tokens-${CURRENT_STAGE_ID:-build}.log" "${CURRENT_STAGE_ID:-unknown}" 2>/dev/null || true
|
|
3603
|
+
fi
|
|
3604
|
+
fi
|
|
3605
|
+
|
|
3606
|
+
# Emit cost event
|
|
3607
|
+
local model_key="${MODEL:-sonnet}"
|
|
3608
|
+
local input_cost output_cost total_cost
|
|
3609
|
+
input_cost=$(awk -v tokens="$TOTAL_INPUT_TOKENS" -v rate="$(echo "$COST_MODEL_RATES" | jq -r ".${model_key}.input // 3")" 'BEGIN{printf "%.4f", (tokens / 1000000) * rate}')
|
|
3610
|
+
output_cost=$(awk -v tokens="$TOTAL_OUTPUT_TOKENS" -v rate="$(echo "$COST_MODEL_RATES" | jq -r ".${model_key}.output // 15")" 'BEGIN{printf "%.4f", (tokens / 1000000) * rate}')
|
|
3611
|
+
total_cost=$(awk -v i="$input_cost" -v o="$output_cost" 'BEGIN{printf "%.4f", i + o}')
|
|
3612
|
+
|
|
3613
|
+
emit_event "pipeline.cost" \
|
|
3614
|
+
"input_tokens=$TOTAL_INPUT_TOKENS" \
|
|
3615
|
+
"output_tokens=$TOTAL_OUTPUT_TOKENS" \
|
|
3616
|
+
"model=$model_key" \
|
|
3617
|
+
"estimated_cost_usd=$total_cost"
|
|
3618
|
+
|
|
3619
|
+
return $exit_code
|
|
3620
|
+
}
|
|
3621
|
+
|
|
3622
|
+
pipeline_resume() {
|
|
3623
|
+
setup_dirs
|
|
3624
|
+
resume_state
|
|
3625
|
+
echo ""
|
|
3626
|
+
run_pipeline
|
|
3627
|
+
}
|
|
3628
|
+
|
|
3629
|
+
pipeline_status() {
|
|
3630
|
+
setup_dirs
|
|
3631
|
+
|
|
3632
|
+
if [[ ! -f "$STATE_FILE" ]]; then
|
|
3633
|
+
info "No active pipeline."
|
|
3634
|
+
echo -e " Start one: ${DIM}shipwright pipeline start --goal \"...\"${RESET}"
|
|
3635
|
+
return
|
|
3636
|
+
fi
|
|
3637
|
+
|
|
3638
|
+
echo ""
|
|
3639
|
+
echo -e "${PURPLE}${BOLD}━━━ Pipeline Status ━━━${RESET}"
|
|
3640
|
+
echo ""
|
|
3641
|
+
|
|
3642
|
+
local p_name="" p_goal="" p_status="" p_branch="" p_stage="" p_started="" p_issue="" p_elapsed="" p_pr=""
|
|
3643
|
+
local in_frontmatter=false
|
|
3644
|
+
while IFS= read -r line; do
|
|
3645
|
+
if [[ "$line" == "---" ]]; then
|
|
3646
|
+
if $in_frontmatter; then break; else in_frontmatter=true; continue; fi
|
|
3647
|
+
fi
|
|
3648
|
+
if $in_frontmatter; then
|
|
3649
|
+
case "$line" in
|
|
3650
|
+
pipeline:*) p_name="$(echo "${line#pipeline:}" | xargs)" ;;
|
|
3651
|
+
goal:*) p_goal="$(echo "${line#goal:}" | sed 's/^ *"//;s/" *$//')" ;;
|
|
3652
|
+
status:*) p_status="$(echo "${line#status:}" | xargs)" ;;
|
|
3653
|
+
branch:*) p_branch="$(echo "${line#branch:}" | sed 's/^ *"//;s/" *$//')" ;;
|
|
3654
|
+
current_stage:*) p_stage="$(echo "${line#current_stage:}" | xargs)" ;;
|
|
3655
|
+
started_at:*) p_started="$(echo "${line#started_at:}" | xargs)" ;;
|
|
3656
|
+
issue:*) p_issue="$(echo "${line#issue:}" | sed 's/^ *"//;s/" *$//')" ;;
|
|
3657
|
+
elapsed:*) p_elapsed="$(echo "${line#elapsed:}" | xargs)" ;;
|
|
3658
|
+
pr_number:*) p_pr="$(echo "${line#pr_number:}" | xargs)" ;;
|
|
3659
|
+
esac
|
|
3660
|
+
fi
|
|
3661
|
+
done < "$STATE_FILE"
|
|
3662
|
+
|
|
3663
|
+
local status_icon
|
|
3664
|
+
case "$p_status" in
|
|
3665
|
+
running) status_icon="${CYAN}●${RESET}" ;;
|
|
3666
|
+
complete) status_icon="${GREEN}✓${RESET}" ;;
|
|
3667
|
+
paused) status_icon="${YELLOW}⏸${RESET}" ;;
|
|
3668
|
+
interrupted) status_icon="${YELLOW}⚡${RESET}" ;;
|
|
3669
|
+
failed) status_icon="${RED}✗${RESET}" ;;
|
|
3670
|
+
aborted) status_icon="${RED}◼${RESET}" ;;
|
|
3671
|
+
*) status_icon="${DIM}○${RESET}" ;;
|
|
3672
|
+
esac
|
|
3673
|
+
|
|
3674
|
+
echo -e " ${BOLD}Pipeline:${RESET} $p_name"
|
|
3675
|
+
echo -e " ${BOLD}Goal:${RESET} $p_goal"
|
|
3676
|
+
echo -e " ${BOLD}Status:${RESET} $status_icon $p_status"
|
|
3677
|
+
[[ -n "$p_branch" ]] && echo -e " ${BOLD}Branch:${RESET} $p_branch"
|
|
3678
|
+
[[ -n "$p_issue" ]] && echo -e " ${BOLD}Issue:${RESET} $p_issue"
|
|
3679
|
+
[[ -n "$p_pr" ]] && echo -e " ${BOLD}PR:${RESET} #$p_pr"
|
|
3680
|
+
[[ -n "$p_stage" ]] && echo -e " ${BOLD}Stage:${RESET} $p_stage"
|
|
3681
|
+
[[ -n "$p_started" ]] && echo -e " ${BOLD}Started:${RESET} $p_started"
|
|
3682
|
+
[[ -n "$p_elapsed" ]] && echo -e " ${BOLD}Elapsed:${RESET} $p_elapsed"
|
|
3683
|
+
|
|
3684
|
+
echo ""
|
|
3685
|
+
echo -e " ${BOLD}Stages:${RESET}"
|
|
3686
|
+
|
|
3687
|
+
local in_stages=false
|
|
3688
|
+
while IFS= read -r line; do
|
|
3689
|
+
if [[ "$line" == "stages:" ]]; then
|
|
3690
|
+
in_stages=true; continue
|
|
3691
|
+
fi
|
|
3692
|
+
if $in_stages; then
|
|
3693
|
+
if [[ "$line" == "---" || ! "$line" =~ ^" " ]]; then break; fi
|
|
3694
|
+
local trimmed
|
|
3695
|
+
trimmed="$(echo "$line" | xargs)"
|
|
3696
|
+
if [[ "$trimmed" == *":"* ]]; then
|
|
3697
|
+
local sid="${trimmed%%:*}"
|
|
3698
|
+
local sst="${trimmed#*: }"
|
|
3699
|
+
local s_icon
|
|
3700
|
+
case "$sst" in
|
|
3701
|
+
complete) s_icon="${GREEN}✓${RESET}" ;;
|
|
3702
|
+
running) s_icon="${CYAN}●${RESET}" ;;
|
|
3703
|
+
failed) s_icon="${RED}✗${RESET}" ;;
|
|
3704
|
+
*) s_icon="${DIM}○${RESET}" ;;
|
|
3705
|
+
esac
|
|
3706
|
+
echo -e " $s_icon $sid"
|
|
3707
|
+
fi
|
|
3708
|
+
fi
|
|
3709
|
+
done < "$STATE_FILE"
|
|
3710
|
+
|
|
3711
|
+
if [[ -d "$ARTIFACTS_DIR" ]]; then
|
|
3712
|
+
local artifact_count
|
|
3713
|
+
artifact_count=$(find "$ARTIFACTS_DIR" -type f 2>/dev/null | wc -l | xargs)
|
|
3714
|
+
if [[ "$artifact_count" -gt 0 ]]; then
|
|
3715
|
+
echo ""
|
|
3716
|
+
echo -e " ${BOLD}Artifacts:${RESET} ($artifact_count files)"
|
|
3717
|
+
ls "$ARTIFACTS_DIR" 2>/dev/null | sed 's/^/ /'
|
|
3718
|
+
fi
|
|
3719
|
+
fi
|
|
3720
|
+
echo ""
|
|
3721
|
+
}
|
|
3722
|
+
|
|
3723
|
+
pipeline_abort() {
|
|
3724
|
+
setup_dirs
|
|
3725
|
+
|
|
3726
|
+
if [[ ! -f "$STATE_FILE" ]]; then
|
|
3727
|
+
info "No active pipeline to abort."
|
|
3728
|
+
return
|
|
3729
|
+
fi
|
|
3730
|
+
|
|
3731
|
+
local current_status
|
|
3732
|
+
current_status=$(sed -n 's/^status: *//p' "$STATE_FILE" | head -1)
|
|
3733
|
+
|
|
3734
|
+
if [[ "$current_status" == "complete" || "$current_status" == "aborted" ]]; then
|
|
3735
|
+
info "Pipeline already $current_status."
|
|
3736
|
+
return
|
|
3737
|
+
fi
|
|
3738
|
+
|
|
3739
|
+
resume_state 2>/dev/null || true
|
|
3740
|
+
PIPELINE_STATUS="aborted"
|
|
3741
|
+
write_state
|
|
3742
|
+
|
|
3743
|
+
# Update GitHub
|
|
3744
|
+
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
3745
|
+
gh_init
|
|
3746
|
+
gh_remove_label "$ISSUE_NUMBER" "pipeline/in-progress"
|
|
3747
|
+
gh_comment_issue "$ISSUE_NUMBER" "⏹️ **Pipeline aborted** at stage: ${CURRENT_STAGE:-unknown}"
|
|
3748
|
+
fi
|
|
3749
|
+
|
|
3750
|
+
warn "Pipeline aborted."
|
|
3751
|
+
echo -e " State saved at: ${DIM}$STATE_FILE${RESET}"
|
|
3752
|
+
}
|
|
3753
|
+
|
|
3754
|
+
pipeline_list() {
|
|
3755
|
+
local locations=(
|
|
3756
|
+
"$REPO_DIR/templates/pipelines"
|
|
3757
|
+
"$HOME/.claude-teams/pipelines"
|
|
3758
|
+
)
|
|
3759
|
+
|
|
3760
|
+
echo ""
|
|
3761
|
+
echo -e "${PURPLE}${BOLD}━━━ Pipeline Templates ━━━${RESET}"
|
|
3762
|
+
echo ""
|
|
3763
|
+
|
|
3764
|
+
local found=false
|
|
3765
|
+
for dir in "${locations[@]}"; do
|
|
3766
|
+
if [[ -d "$dir" ]]; then
|
|
3767
|
+
for f in "$dir"/*.json; do
|
|
3768
|
+
[[ -f "$f" ]] || continue
|
|
3769
|
+
found=true
|
|
3770
|
+
local name desc stages_enabled gate_count
|
|
3771
|
+
name=$(jq -r '.name' "$f" 2>/dev/null)
|
|
3772
|
+
desc=$(jq -r '.description' "$f" 2>/dev/null)
|
|
3773
|
+
stages_enabled=$(jq -r '[.stages[] | select(.enabled == true) | .id] | join(" → ")' "$f" 2>/dev/null)
|
|
3774
|
+
gate_count=$(jq '[.stages[] | select(.gate == "approve" and .enabled == true)] | length' "$f" 2>/dev/null)
|
|
3775
|
+
echo -e " ${CYAN}${BOLD}$name${RESET}"
|
|
3776
|
+
echo -e " $desc"
|
|
3777
|
+
echo -e " ${DIM}$stages_enabled${RESET}"
|
|
3778
|
+
echo -e " ${DIM}(${gate_count} approval gates)${RESET}"
|
|
3779
|
+
echo ""
|
|
3780
|
+
done
|
|
3781
|
+
fi
|
|
3782
|
+
done
|
|
3783
|
+
|
|
3784
|
+
if [[ "$found" != "true" ]]; then
|
|
3785
|
+
warn "No pipeline templates found."
|
|
3786
|
+
echo -e " Expected at: ${DIM}templates/pipelines/*.json${RESET}"
|
|
3787
|
+
fi
|
|
3788
|
+
}
|
|
3789
|
+
|
|
3790
|
+
pipeline_show() {
|
|
3791
|
+
local name="${PIPELINE_NAME_ARG:-$PIPELINE_NAME}"
|
|
3792
|
+
|
|
3793
|
+
local config_file
|
|
3794
|
+
config_file=$(find_pipeline_config "$name") || {
|
|
3795
|
+
error "Pipeline template not found: $name"
|
|
3796
|
+
echo -e " Available: ${DIM}shipwright pipeline list${RESET}"
|
|
3797
|
+
exit 1
|
|
3798
|
+
}
|
|
3799
|
+
|
|
3800
|
+
echo ""
|
|
3801
|
+
echo -e "${PURPLE}${BOLD}━━━ Pipeline: $(jq -r '.name' "$config_file") ━━━${RESET}"
|
|
3802
|
+
echo -e " $(jq -r '.description' "$config_file")"
|
|
3803
|
+
echo ""
|
|
3804
|
+
|
|
3805
|
+
echo -e "${BOLD} Defaults:${RESET}"
|
|
3806
|
+
jq -r '.defaults | to_entries[] | " \(.key): \(.value)"' "$config_file" 2>/dev/null
|
|
3807
|
+
echo ""
|
|
3808
|
+
|
|
3809
|
+
echo -e "${BOLD} Stages:${RESET}"
|
|
3810
|
+
jq -r '.stages[] |
|
|
3811
|
+
(if .enabled then " ✓" else " ○" end) +
|
|
3812
|
+
" \(.id)" +
|
|
3813
|
+
(if .gate == "approve" then " [gate: approve]" elif .gate == "skip" then " [skip]" else "" end)
|
|
3814
|
+
' "$config_file" 2>/dev/null
|
|
3815
|
+
echo ""
|
|
3816
|
+
|
|
3817
|
+
echo -e "${BOLD} GitHub Integration:${RESET}"
|
|
3818
|
+
echo -e " • Issue: self-assign, label lifecycle, progress comments"
|
|
3819
|
+
echo -e " • PR: labels, milestone, reviewers auto-propagated"
|
|
3820
|
+
echo -e " • Validation: auto-close issue on completion"
|
|
3821
|
+
echo ""
|
|
3822
|
+
}
|
|
3823
|
+
|
|
3824
|
+
# ─── Main ───────────────────────────────────────────────────────────────────
|
|
3825
|
+
|
|
3826
|
+
case "$SUBCOMMAND" in
|
|
3827
|
+
start) pipeline_start ;;
|
|
3828
|
+
resume) pipeline_resume ;;
|
|
3829
|
+
status) pipeline_status ;;
|
|
3830
|
+
abort) pipeline_abort ;;
|
|
3831
|
+
list) pipeline_list ;;
|
|
3832
|
+
show) pipeline_show ;;
|
|
3833
|
+
test)
|
|
3834
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
3835
|
+
exec "$SCRIPT_DIR/cct-pipeline-test.sh" "$@"
|
|
3836
|
+
;;
|
|
3837
|
+
help|--help|-h) show_help ;;
|
|
3838
|
+
*)
|
|
3839
|
+
error "Unknown pipeline command: $SUBCOMMAND"
|
|
3840
|
+
echo ""
|
|
3841
|
+
show_help
|
|
3842
|
+
exit 1
|
|
3843
|
+
;;
|
|
3844
|
+
esac
|