shipwright-cli 1.7.1 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. package/.claude/agents/code-reviewer.md +90 -0
  2. package/.claude/agents/devops-engineer.md +142 -0
  3. package/.claude/agents/pipeline-agent.md +80 -0
  4. package/.claude/agents/shell-script-specialist.md +150 -0
  5. package/.claude/agents/test-specialist.md +196 -0
  6. package/.claude/hooks/post-tool-use.sh +45 -0
  7. package/.claude/hooks/pre-tool-use.sh +25 -0
  8. package/.claude/hooks/session-started.sh +37 -0
  9. package/README.md +212 -814
  10. package/claude-code/CLAUDE.md.shipwright +54 -0
  11. package/claude-code/hooks/notify-idle.sh +2 -2
  12. package/claude-code/hooks/session-start.sh +24 -0
  13. package/claude-code/hooks/task-completed.sh +6 -2
  14. package/claude-code/settings.json.template +12 -0
  15. package/dashboard/public/app.js +4422 -0
  16. package/dashboard/public/index.html +816 -0
  17. package/dashboard/public/styles.css +4755 -0
  18. package/dashboard/server.ts +4315 -0
  19. package/docs/KNOWN-ISSUES.md +18 -10
  20. package/docs/TIPS.md +38 -26
  21. package/docs/patterns/README.md +33 -23
  22. package/package.json +9 -5
  23. package/scripts/adapters/iterm2-adapter.sh +1 -1
  24. package/scripts/adapters/tmux-adapter.sh +52 -23
  25. package/scripts/adapters/wezterm-adapter.sh +26 -14
  26. package/scripts/lib/compat.sh +200 -0
  27. package/scripts/lib/helpers.sh +72 -0
  28. package/scripts/postinstall.mjs +72 -13
  29. package/scripts/{cct → sw} +118 -22
  30. package/scripts/sw-adversarial.sh +274 -0
  31. package/scripts/sw-architecture-enforcer.sh +330 -0
  32. package/scripts/sw-checkpoint.sh +468 -0
  33. package/scripts/sw-cleanup.sh +359 -0
  34. package/scripts/sw-connect.sh +619 -0
  35. package/scripts/{cct-cost.sh → sw-cost.sh} +368 -34
  36. package/scripts/sw-daemon.sh +5574 -0
  37. package/scripts/sw-dashboard.sh +477 -0
  38. package/scripts/sw-developer-simulation.sh +252 -0
  39. package/scripts/sw-docs.sh +635 -0
  40. package/scripts/sw-doctor.sh +907 -0
  41. package/scripts/{cct-fix.sh → sw-fix.sh} +10 -6
  42. package/scripts/{cct-fleet.sh → sw-fleet.sh} +498 -22
  43. package/scripts/sw-github-checks.sh +521 -0
  44. package/scripts/sw-github-deploy.sh +533 -0
  45. package/scripts/sw-github-graphql.sh +972 -0
  46. package/scripts/sw-heartbeat.sh +293 -0
  47. package/scripts/{cct-init.sh → sw-init.sh} +144 -11
  48. package/scripts/sw-intelligence.sh +1196 -0
  49. package/scripts/sw-jira.sh +643 -0
  50. package/scripts/sw-launchd.sh +364 -0
  51. package/scripts/sw-linear.sh +648 -0
  52. package/scripts/{cct-logs.sh → sw-logs.sh} +72 -2
  53. package/scripts/sw-loop.sh +2217 -0
  54. package/scripts/{cct-memory.sh → sw-memory.sh} +514 -36
  55. package/scripts/sw-patrol-meta.sh +417 -0
  56. package/scripts/sw-pipeline-composer.sh +455 -0
  57. package/scripts/sw-pipeline-vitals.sh +1096 -0
  58. package/scripts/sw-pipeline.sh +7593 -0
  59. package/scripts/sw-predictive.sh +820 -0
  60. package/scripts/{cct-prep.sh → sw-prep.sh} +339 -49
  61. package/scripts/{cct-ps.sh → sw-ps.sh} +9 -6
  62. package/scripts/{cct-reaper.sh → sw-reaper.sh} +10 -6
  63. package/scripts/sw-remote.sh +687 -0
  64. package/scripts/sw-self-optimize.sh +1048 -0
  65. package/scripts/sw-session.sh +541 -0
  66. package/scripts/sw-setup.sh +234 -0
  67. package/scripts/sw-status.sh +796 -0
  68. package/scripts/{cct-templates.sh → sw-templates.sh} +9 -4
  69. package/scripts/sw-tmux.sh +591 -0
  70. package/scripts/sw-tracker-jira.sh +277 -0
  71. package/scripts/sw-tracker-linear.sh +292 -0
  72. package/scripts/sw-tracker.sh +409 -0
  73. package/scripts/{cct-upgrade.sh → sw-upgrade.sh} +103 -46
  74. package/scripts/{cct-worktree.sh → sw-worktree.sh} +3 -0
  75. package/templates/pipelines/autonomous.json +35 -6
  76. package/templates/pipelines/cost-aware.json +21 -0
  77. package/templates/pipelines/deployed.json +40 -6
  78. package/templates/pipelines/enterprise.json +16 -2
  79. package/templates/pipelines/fast.json +19 -0
  80. package/templates/pipelines/full.json +28 -2
  81. package/templates/pipelines/hotfix.json +19 -0
  82. package/templates/pipelines/standard.json +31 -0
  83. package/tmux/{claude-teams-overlay.conf → shipwright-overlay.conf} +27 -9
  84. package/tmux/templates/accessibility.json +34 -0
  85. package/tmux/templates/api-design.json +35 -0
  86. package/tmux/templates/architecture.json +1 -0
  87. package/tmux/templates/bug-fix.json +9 -0
  88. package/tmux/templates/code-review.json +1 -0
  89. package/tmux/templates/compliance.json +36 -0
  90. package/tmux/templates/data-pipeline.json +36 -0
  91. package/tmux/templates/debt-paydown.json +34 -0
  92. package/tmux/templates/devops.json +1 -0
  93. package/tmux/templates/documentation.json +1 -0
  94. package/tmux/templates/exploration.json +1 -0
  95. package/tmux/templates/feature-dev.json +1 -0
  96. package/tmux/templates/full-stack.json +8 -0
  97. package/tmux/templates/i18n.json +34 -0
  98. package/tmux/templates/incident-response.json +36 -0
  99. package/tmux/templates/migration.json +1 -0
  100. package/tmux/templates/observability.json +35 -0
  101. package/tmux/templates/onboarding.json +33 -0
  102. package/tmux/templates/performance.json +35 -0
  103. package/tmux/templates/refactor.json +1 -0
  104. package/tmux/templates/release.json +35 -0
  105. package/tmux/templates/security-audit.json +8 -0
  106. package/tmux/templates/spike.json +34 -0
  107. package/tmux/templates/testing.json +1 -0
  108. package/tmux/tmux.conf +98 -9
  109. package/scripts/cct-cleanup.sh +0 -172
  110. package/scripts/cct-daemon.sh +0 -3189
  111. package/scripts/cct-doctor.sh +0 -414
  112. package/scripts/cct-loop.sh +0 -1332
  113. package/scripts/cct-pipeline.sh +0 -3844
  114. package/scripts/cct-session.sh +0 -284
  115. package/scripts/cct-status.sh +0 -169
@@ -0,0 +1,455 @@
1
+ #!/usr/bin/env bash
2
+ # ╔═══════════════════════════════════════════════════════════════════════════╗
3
+ # ║ shipwright pipeline composer — Dynamic Pipeline Composition ║
4
+ # ║ AI-driven stage selection · Conditional insertion · Model routing ║
5
+ # ╚═══════════════════════════════════════════════════════════════════════════╝
6
+ set -euo pipefail
7
+ trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
+
9
+ VERSION="1.10.0"
10
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
+ REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
+
13
+ # ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
14
+ CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
15
+ PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
16
+ BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
17
+ GREEN='\033[38;2;74;222;128m' # success
18
+ YELLOW='\033[38;2;250;204;21m' # warning
19
+ RED='\033[38;2;248;113;113m' # error
20
+ DIM='\033[2m'
21
+ BOLD='\033[1m'
22
+ RESET='\033[0m'
23
+
24
+ # ─── Cross-platform compatibility ──────────────────────────────────────────
25
+ # shellcheck source=lib/compat.sh
26
+ [[ -f "$SCRIPT_DIR/lib/compat.sh" ]] && source "$SCRIPT_DIR/lib/compat.sh"
27
+
28
+ info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
29
+ success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
30
+ warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*"; }
31
+ error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
32
+
33
+ now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
34
+ now_epoch() { date +%s; }
35
+
36
+ EVENTS_FILE="${HOME}/.shipwright/events.jsonl"
37
+
38
+ emit_event() {
39
+ local event_type="$1"
40
+ shift
41
+ local json_fields=""
42
+ for kv in "$@"; do
43
+ local key="${kv%%=*}"
44
+ local val="${kv#*=}"
45
+ if [[ "$val" =~ ^-?[0-9]+\.?[0-9]*$ ]]; then
46
+ json_fields="${json_fields},\"${key}\":${val}"
47
+ else
48
+ val="${val//\"/\\\"}"
49
+ json_fields="${json_fields},\"${key}\":\"${val}\""
50
+ fi
51
+ done
52
+ mkdir -p "${HOME}/.shipwright"
53
+ echo "{\"ts\":\"$(now_iso)\",\"ts_epoch\":$(now_epoch),\"type\":\"${event_type}\"${json_fields}}" >> "$EVENTS_FILE"
54
+ }
55
+
56
+ # ─── Source intelligence engine ─────────────────────────────────────────────
57
+ INTELLIGENCE_AVAILABLE=false
58
+ if [[ -f "$SCRIPT_DIR/sw-intelligence.sh" ]]; then
59
+ source "$SCRIPT_DIR/sw-intelligence.sh"
60
+ INTELLIGENCE_AVAILABLE=true
61
+ fi
62
+
63
+ # ─── Default template directory ─────────────────────────────────────────────
64
+ TEMPLATES_DIR="${REPO_DIR}/templates/pipelines"
65
+ ARTIFACTS_DIR=".claude/pipeline-artifacts"
66
+
67
+ # ─── GitHub CI History ─────────────────────────────────────────────────────
68
+
69
+ _composer_github_ci_history() {
70
+ type _gh_detect_repo &>/dev/null 2>&1 || { echo "{}"; return 0; }
71
+ _gh_detect_repo 2>/dev/null || { echo "{}"; return 0; }
72
+
73
+ local owner="${GH_OWNER:-}" repo="${GH_REPO:-}"
74
+ [[ -z "$owner" || -z "$repo" ]] && { echo "{}"; return 0; }
75
+
76
+ if type gh_actions_runs &>/dev/null 2>&1; then
77
+ local runs
78
+ runs=$(gh_actions_runs "$owner" "$repo" "" 20 2>/dev/null || echo "[]")
79
+ local avg_duration p90_duration
80
+ avg_duration=$(echo "$runs" | jq '[.[] | .duration_seconds // 0] | if length > 0 then add / length | floor else 0 end' 2>/dev/null || echo "0")
81
+ p90_duration=$(echo "$runs" | jq '[.[] | .duration_seconds // 0] | sort | if length > 0 then .[length * 9 / 10 | floor] else 0 end' 2>/dev/null || echo "0")
82
+ jq -n --argjson avg "${avg_duration:-0}" --argjson p90 "${p90_duration:-0}" \
83
+ '{avg_ci_duration: $avg, p90_ci_duration: $p90}'
84
+ else
85
+ echo "{}"
86
+ fi
87
+ }
88
+
89
+ # ═══════════════════════════════════════════════════════════════════════════════
90
+ # PIPELINE COMPOSITION
91
+ # ═══════════════════════════════════════════════════════════════════════════════
92
+
93
+ # Create a composed pipeline from intelligence analysis
94
+ # Args: issue_analysis_json repo_context budget_json
95
+ composer_create_pipeline() {
96
+ local issue_analysis="${1:-}"
97
+ local repo_context="${2:-}"
98
+ local budget_json="${3:-}"
99
+
100
+ local output_dir="${ARTIFACTS_DIR}"
101
+ local output_file="${output_dir}/composed-pipeline.json"
102
+
103
+ mkdir -p "$output_dir"
104
+
105
+ # Enrich with GitHub CI history if available
106
+ local ci_history
107
+ ci_history=$(_composer_github_ci_history 2>/dev/null || echo "{}")
108
+ local p90_timeout=0
109
+ p90_timeout=$(echo "$ci_history" | jq -r '.p90_ci_duration // 0' 2>/dev/null || echo "0")
110
+ if [[ "${p90_timeout:-0}" -gt 0 ]]; then
111
+ # Include CI history in repo context for the composer
112
+ if [[ -n "$repo_context" ]]; then
113
+ repo_context=$(echo "$repo_context" | jq --argjson ci "$ci_history" '. + {ci_history: $ci}' 2>/dev/null || echo "$repo_context")
114
+ else
115
+ repo_context="$ci_history"
116
+ fi
117
+ info "CI history: p90 duration=${p90_timeout}s — using for timeout tuning" >&2
118
+ fi
119
+
120
+ # Try intelligence-driven composition
121
+ if [[ "$INTELLIGENCE_AVAILABLE" == "true" ]] && \
122
+ [[ -n "$issue_analysis" ]] && \
123
+ type intelligence_compose_pipeline &>/dev/null; then
124
+
125
+ info "Composing pipeline with intelligence engine..." >&2
126
+
127
+ local composed=""
128
+ composed=$(intelligence_compose_pipeline "$issue_analysis" "$repo_context" "$budget_json" 2>/dev/null) || true
129
+
130
+ if [[ -n "$composed" ]] && echo "$composed" | jq -e '.stages' &>/dev/null; then
131
+ # Validate the composed pipeline
132
+ if echo "$composed" | composer_validate_pipeline; then
133
+ # Atomic write
134
+ local tmp_file
135
+ tmp_file=$(mktemp "${output_file}.XXXXXX")
136
+ echo "$composed" | jq '.' > "$tmp_file"
137
+ mv "$tmp_file" "$output_file"
138
+
139
+ local stage_count
140
+ stage_count=$(echo "$composed" | jq '.stages | length')
141
+ success "Composed pipeline: ${stage_count} stages" >&2
142
+
143
+ emit_event "composer.created" \
144
+ "stages=${stage_count}" \
145
+ "source=intelligence" \
146
+ "output=${output_file}"
147
+
148
+ echo "$output_file"
149
+ return 0
150
+ else
151
+ warn "Intelligence pipeline failed validation, falling back to template" >&2
152
+ fi
153
+ else
154
+ warn "Intelligence composition returned invalid JSON, falling back to template" >&2
155
+ fi
156
+ fi
157
+
158
+ # Fallback: use static template
159
+ local fallback_template="${TEMPLATES_DIR}/standard.json"
160
+ if [[ -f "$fallback_template" ]]; then
161
+ info "Using fallback template: standard" >&2
162
+ local tmp_file
163
+ tmp_file=$(mktemp "${output_file}.XXXXXX")
164
+ cp "$fallback_template" "$tmp_file"
165
+ mv "$tmp_file" "$output_file"
166
+
167
+ emit_event "composer.created" \
168
+ "stages=$(jq '.stages | length' "$output_file")" \
169
+ "source=fallback" \
170
+ "output=${output_file}"
171
+
172
+ echo "$output_file"
173
+ return 0
174
+ fi
175
+
176
+ error "No templates available for fallback" >&2
177
+ return 1
178
+ }
179
+
180
+ # ═══════════════════════════════════════════════════════════════════════════════
181
+ # CONDITIONAL STAGE INSERTION
182
+ # ═══════════════════════════════════════════════════════════════════════════════
183
+
184
+ # Insert a stage into the pipeline after a specified stage
185
+ # Args: pipeline_json after_stage new_stage_config
186
+ # pipeline_json can be a file path or JSON string
187
+ composer_insert_conditional_stage() {
188
+ local pipeline_input="${1:-}"
189
+ local after_stage="${2:-}"
190
+ local new_stage_config="${3:-}"
191
+
192
+ if [[ -z "$pipeline_input" || -z "$after_stage" || -z "$new_stage_config" ]]; then
193
+ error "Usage: composer insert <pipeline_json> <after_stage> <new_stage_config>"
194
+ return 1
195
+ fi
196
+
197
+ # Read pipeline JSON (file path or inline)
198
+ local pipeline_json
199
+ if [[ -f "$pipeline_input" ]]; then
200
+ pipeline_json=$(cat "$pipeline_input")
201
+ else
202
+ pipeline_json="$pipeline_input"
203
+ fi
204
+
205
+ # Find index of after_stage
206
+ local idx
207
+ idx=$(echo "$pipeline_json" | jq --arg s "$after_stage" \
208
+ '[.stages[].id] | to_entries | map(select(.value == $s)) | .[0].key // -1')
209
+
210
+ if [[ "$idx" == "-1" || "$idx" == "null" ]]; then
211
+ error "Stage '${after_stage}' not found in pipeline"
212
+ return 1
213
+ fi
214
+
215
+ # Insert new stage after the found index
216
+ local insert_pos=$((idx + 1))
217
+ local result
218
+ result=$(echo "$pipeline_json" | jq --argjson pos "$insert_pos" --argjson stage "$new_stage_config" \
219
+ '.stages = (.stages[:$pos] + [$stage] + .stages[$pos:])')
220
+
221
+ local new_id
222
+ new_id=$(echo "$new_stage_config" | jq -r '.id // "unknown"')
223
+
224
+ emit_event "composer.stage_inserted" \
225
+ "new_stage=${new_id}" \
226
+ "after=${after_stage}" \
227
+ "position=${insert_pos}"
228
+
229
+ echo "$result"
230
+ }
231
+
232
+ # ═══════════════════════════════════════════════════════════════════════════════
233
+ # MODEL DOWNGRADE
234
+ # ═══════════════════════════════════════════════════════════════════════════════
235
+
236
+ # Downgrade models for remaining stages to save budget
237
+ # Args: pipeline_json from_stage
238
+ composer_downgrade_models() {
239
+ local pipeline_input="${1:-}"
240
+ local from_stage="${2:-}"
241
+
242
+ if [[ -z "$pipeline_input" || -z "$from_stage" ]]; then
243
+ error "Usage: composer downgrade <pipeline_json> <from_stage>"
244
+ return 1
245
+ fi
246
+
247
+ # Read pipeline JSON (file path or inline)
248
+ local pipeline_json
249
+ if [[ -f "$pipeline_input" ]]; then
250
+ pipeline_json=$(cat "$pipeline_input")
251
+ else
252
+ pipeline_json="$pipeline_input"
253
+ fi
254
+
255
+ # Find index of from_stage
256
+ local idx
257
+ idx=$(echo "$pipeline_json" | jq --arg s "$from_stage" \
258
+ '[.stages[].id] | to_entries | map(select(.value == $s)) | .[0].key // -1')
259
+
260
+ if [[ "$idx" == "-1" || "$idx" == "null" ]]; then
261
+ error "Stage '${from_stage}' not found in pipeline"
262
+ return 1
263
+ fi
264
+
265
+ # Downgrade model in config for all stages from idx onwards
266
+ local result
267
+ result=$(echo "$pipeline_json" | jq --argjson idx "$idx" '
268
+ .stages = [.stages | to_entries[] |
269
+ if .key >= $idx then
270
+ .value.config.model = "sonnet"
271
+ else . end | .value
272
+ ] |
273
+ if .defaults.model then .defaults.model = "sonnet" else . end
274
+ ')
275
+
276
+ local total_stages
277
+ total_stages=$(echo "$pipeline_json" | jq '.stages | length')
278
+ local downgraded=$((total_stages - idx))
279
+
280
+ emit_event "composer.models_downgraded" \
281
+ "from_stage=${from_stage}" \
282
+ "stages_affected=${downgraded}" \
283
+ "target_model=sonnet"
284
+
285
+ echo "$result"
286
+ }
287
+
288
+ # ═══════════════════════════════════════════════════════════════════════════════
289
+ # ITERATION ESTIMATION
290
+ # ═══════════════════════════════════════════════════════════════════════════════
291
+
292
+ # Estimate build loop iterations needed
293
+ # Args: issue_analysis_json historical_data
294
+ composer_estimate_iterations() {
295
+ local issue_analysis="${1:-}"
296
+ local historical_data="${2:-}"
297
+
298
+ local default_iterations=20
299
+
300
+ # Try intelligence-based estimation
301
+ if [[ "$INTELLIGENCE_AVAILABLE" == "true" ]] && \
302
+ [[ -n "$issue_analysis" ]] && \
303
+ type intelligence_estimate_iterations &>/dev/null; then
304
+
305
+ local estimate=""
306
+ estimate=$(intelligence_estimate_iterations "$issue_analysis" "$historical_data" 2>/dev/null) || true
307
+
308
+ if [[ -n "$estimate" ]] && [[ "$estimate" =~ ^[0-9]+$ ]] && \
309
+ [[ "$estimate" -ge 1 ]] && [[ "$estimate" -le 50 ]]; then
310
+ echo "$estimate"
311
+ return 0
312
+ fi
313
+ fi
314
+
315
+ # Fallback: use complexity from analysis if available
316
+ if [[ -n "$issue_analysis" ]]; then
317
+ local complexity=""
318
+ complexity=$(echo "$issue_analysis" | jq -r '.complexity // empty' 2>/dev/null) || true
319
+
320
+ case "${complexity}" in
321
+ trivial) echo 5; return 0 ;;
322
+ low) echo 10; return 0 ;;
323
+ medium) echo 15; return 0 ;;
324
+ high) echo 25; return 0 ;;
325
+ critical) echo 35; return 0 ;;
326
+ esac
327
+ fi
328
+
329
+ echo "$default_iterations"
330
+ }
331
+
332
+ # ═══════════════════════════════════════════════════════════════════════════════
333
+ # PIPELINE VALIDATION
334
+ # ═══════════════════════════════════════════════════════════════════════════════
335
+
336
+ # Validate a pipeline JSON structure
337
+ # Reads from stdin or file/string argument
338
+ composer_validate_pipeline() {
339
+ local pipeline_input="${1:-}"
340
+ local pipeline_json
341
+
342
+ if [[ -n "$pipeline_input" ]]; then
343
+ if [[ -f "$pipeline_input" ]]; then
344
+ pipeline_json=$(cat "$pipeline_input")
345
+ else
346
+ pipeline_json="$pipeline_input"
347
+ fi
348
+ else
349
+ pipeline_json=$(cat)
350
+ fi
351
+
352
+ # Check: stages array exists
353
+ if ! echo "$pipeline_json" | jq -e '.stages' &>/dev/null; then
354
+ error "Validation failed: missing 'stages' array"
355
+ return 1
356
+ fi
357
+
358
+ # Check: each stage has an id field
359
+ local missing_ids
360
+ missing_ids=$(echo "$pipeline_json" | jq '[.stages[] | select(.id == null or .id == "")] | length')
361
+ if [[ "$missing_ids" -gt 0 ]]; then
362
+ error "Validation failed: ${missing_ids} stage(s) missing 'id' field"
363
+ return 1
364
+ fi
365
+
366
+ # Check: stage ordering constraints
367
+ # intake must come before build, build before test, test before pr
368
+ local stage_ids
369
+ stage_ids=$(echo "$pipeline_json" | jq -r '[.stages[] | select(.enabled != false) | .id] | join(",")')
370
+
371
+ # Helper: check ordering of two stages (only if both are present and enabled)
372
+ _check_order() {
373
+ local before="$1"
374
+ local after="$2"
375
+ local ids="$stage_ids"
376
+
377
+ # Only check if both stages are in the enabled list
378
+ local has_before=false
379
+ local has_after=false
380
+ local before_pos=-1
381
+ local after_pos=-1
382
+ local pos=0
383
+
384
+ local IFS=","
385
+ for sid in $ids; do
386
+ if [[ "$sid" == "$before" ]]; then
387
+ has_before=true
388
+ before_pos=$pos
389
+ fi
390
+ if [[ "$sid" == "$after" ]]; then
391
+ has_after=true
392
+ after_pos=$pos
393
+ fi
394
+ pos=$((pos + 1))
395
+ done
396
+
397
+ if [[ "$has_before" == "true" && "$has_after" == "true" ]]; then
398
+ if [[ "$before_pos" -ge "$after_pos" ]]; then
399
+ error "Validation failed: '${before}' must come before '${after}'"
400
+ return 1
401
+ fi
402
+ fi
403
+ return 0
404
+ }
405
+
406
+ _check_order "intake" "build" || return 1
407
+ _check_order "build" "test" || return 1
408
+ _check_order "test" "pr" || return 1
409
+ _check_order "plan" "build" || return 1
410
+ _check_order "review" "pr" || return 1
411
+
412
+ return 0
413
+ }
414
+
415
+ # ═══════════════════════════════════════════════════════════════════════════════
416
+ # CLI
417
+ # ═══════════════════════════════════════════════════════════════════════════════
418
+
419
+ show_help() {
420
+ echo -e "${CYAN}${BOLD}shipwright pipeline-composer${RESET} — Dynamic pipeline composition"
421
+ echo ""
422
+ echo -e "${BOLD}Usage:${RESET}"
423
+ echo " sw-pipeline-composer.sh <command> [args...]"
424
+ echo ""
425
+ echo -e "${BOLD}Commands:${RESET}"
426
+ echo " create <analysis> [repo_ctx] [budget] Compose pipeline from analysis"
427
+ echo " insert <pipeline> <after> <stage> Insert stage after specified stage"
428
+ echo " downgrade <pipeline> <from_stage> Downgrade models from stage onwards"
429
+ echo " estimate <analysis> [history] Estimate build iterations"
430
+ echo " validate <pipeline> Validate pipeline structure"
431
+ echo " help Show this help"
432
+ echo ""
433
+ }
434
+
435
+ main() {
436
+ local cmd="${1:-help}"
437
+ shift 2>/dev/null || true
438
+
439
+ case "$cmd" in
440
+ create) composer_create_pipeline "$@" ;;
441
+ insert) composer_insert_conditional_stage "$@" ;;
442
+ downgrade) composer_downgrade_models "$@" ;;
443
+ estimate) composer_estimate_iterations "$@" ;;
444
+ validate) composer_validate_pipeline "$@" ;;
445
+ help|--help|-h) show_help ;;
446
+ *) error "Unknown command: $cmd"
447
+ show_help
448
+ exit 1
449
+ ;;
450
+ esac
451
+ }
452
+
453
+ if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
454
+ main "$@"
455
+ fi