shipwright-cli 2.4.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. package/README.md +16 -11
  2. package/completions/_shipwright +1 -1
  3. package/completions/shipwright.bash +3 -8
  4. package/completions/shipwright.fish +1 -1
  5. package/config/defaults.json +111 -0
  6. package/config/event-schema.json +81 -0
  7. package/config/policy.json +13 -18
  8. package/dashboard/coverage/coverage-summary.json +14 -0
  9. package/dashboard/public/index.html +1 -1
  10. package/dashboard/server.ts +306 -17
  11. package/dashboard/src/components/charts/bar.test.ts +79 -0
  12. package/dashboard/src/components/charts/donut.test.ts +68 -0
  13. package/dashboard/src/components/charts/pipeline-rail.test.ts +117 -0
  14. package/dashboard/src/components/charts/sparkline.test.ts +125 -0
  15. package/dashboard/src/core/api.test.ts +309 -0
  16. package/dashboard/src/core/helpers.test.ts +301 -0
  17. package/dashboard/src/core/router.test.ts +307 -0
  18. package/dashboard/src/core/router.ts +7 -0
  19. package/dashboard/src/core/sse.test.ts +144 -0
  20. package/dashboard/src/views/metrics.test.ts +186 -0
  21. package/dashboard/src/views/overview.test.ts +173 -0
  22. package/dashboard/src/views/pipelines.test.ts +183 -0
  23. package/dashboard/src/views/team.test.ts +253 -0
  24. package/dashboard/vitest.config.ts +14 -5
  25. package/docs/TIPS.md +1 -1
  26. package/docs/patterns/README.md +1 -1
  27. package/package.json +5 -7
  28. package/scripts/adapters/docker-deploy.sh +1 -1
  29. package/scripts/adapters/tmux-adapter.sh +11 -1
  30. package/scripts/adapters/wezterm-adapter.sh +1 -1
  31. package/scripts/check-version-consistency.sh +1 -1
  32. package/scripts/lib/architecture.sh +126 -0
  33. package/scripts/lib/bootstrap.sh +75 -0
  34. package/scripts/lib/compat.sh +89 -6
  35. package/scripts/lib/config.sh +91 -0
  36. package/scripts/lib/daemon-adaptive.sh +3 -3
  37. package/scripts/lib/daemon-dispatch.sh +39 -16
  38. package/scripts/lib/daemon-health.sh +1 -1
  39. package/scripts/lib/daemon-patrol.sh +24 -12
  40. package/scripts/lib/daemon-poll.sh +37 -25
  41. package/scripts/lib/daemon-state.sh +115 -23
  42. package/scripts/lib/daemon-triage.sh +30 -8
  43. package/scripts/lib/fleet-failover.sh +63 -0
  44. package/scripts/lib/helpers.sh +30 -6
  45. package/scripts/lib/pipeline-detection.sh +2 -2
  46. package/scripts/lib/pipeline-github.sh +9 -9
  47. package/scripts/lib/pipeline-intelligence.sh +85 -35
  48. package/scripts/lib/pipeline-quality-checks.sh +16 -16
  49. package/scripts/lib/pipeline-quality.sh +1 -1
  50. package/scripts/lib/pipeline-stages.sh +242 -28
  51. package/scripts/lib/pipeline-state.sh +40 -4
  52. package/scripts/lib/test-helpers.sh +247 -0
  53. package/scripts/postinstall.mjs +3 -11
  54. package/scripts/sw +10 -4
  55. package/scripts/sw-activity.sh +1 -11
  56. package/scripts/sw-adaptive.sh +109 -85
  57. package/scripts/sw-adversarial.sh +4 -14
  58. package/scripts/sw-architecture-enforcer.sh +1 -11
  59. package/scripts/sw-auth.sh +8 -17
  60. package/scripts/sw-autonomous.sh +111 -49
  61. package/scripts/sw-changelog.sh +1 -11
  62. package/scripts/sw-checkpoint.sh +144 -20
  63. package/scripts/sw-ci.sh +2 -12
  64. package/scripts/sw-cleanup.sh +13 -17
  65. package/scripts/sw-code-review.sh +16 -36
  66. package/scripts/sw-connect.sh +5 -12
  67. package/scripts/sw-context.sh +9 -26
  68. package/scripts/sw-cost.sh +6 -16
  69. package/scripts/sw-daemon.sh +75 -70
  70. package/scripts/sw-dashboard.sh +57 -17
  71. package/scripts/sw-db.sh +506 -15
  72. package/scripts/sw-decompose.sh +1 -11
  73. package/scripts/sw-deps.sh +15 -25
  74. package/scripts/sw-developer-simulation.sh +1 -11
  75. package/scripts/sw-discovery.sh +112 -30
  76. package/scripts/sw-doc-fleet.sh +7 -17
  77. package/scripts/sw-docs-agent.sh +6 -16
  78. package/scripts/sw-docs.sh +4 -12
  79. package/scripts/sw-doctor.sh +134 -43
  80. package/scripts/sw-dora.sh +11 -19
  81. package/scripts/sw-durable.sh +35 -52
  82. package/scripts/sw-e2e-orchestrator.sh +11 -27
  83. package/scripts/sw-eventbus.sh +115 -115
  84. package/scripts/sw-evidence.sh +114 -30
  85. package/scripts/sw-feedback.sh +3 -13
  86. package/scripts/sw-fix.sh +2 -20
  87. package/scripts/sw-fleet-discover.sh +1 -11
  88. package/scripts/sw-fleet-viz.sh +10 -18
  89. package/scripts/sw-fleet.sh +13 -17
  90. package/scripts/sw-github-app.sh +6 -16
  91. package/scripts/sw-github-checks.sh +1 -11
  92. package/scripts/sw-github-deploy.sh +1 -11
  93. package/scripts/sw-github-graphql.sh +2 -12
  94. package/scripts/sw-guild.sh +1 -11
  95. package/scripts/sw-heartbeat.sh +49 -12
  96. package/scripts/sw-hygiene.sh +45 -43
  97. package/scripts/sw-incident.sh +48 -74
  98. package/scripts/sw-init.sh +35 -37
  99. package/scripts/sw-instrument.sh +1 -11
  100. package/scripts/sw-intelligence.sh +362 -51
  101. package/scripts/sw-jira.sh +5 -14
  102. package/scripts/sw-launchd.sh +2 -12
  103. package/scripts/sw-linear.sh +8 -17
  104. package/scripts/sw-logs.sh +4 -12
  105. package/scripts/sw-loop.sh +641 -90
  106. package/scripts/sw-memory.sh +243 -17
  107. package/scripts/sw-mission-control.sh +2 -12
  108. package/scripts/sw-model-router.sh +73 -34
  109. package/scripts/sw-otel.sh +11 -21
  110. package/scripts/sw-oversight.sh +1 -11
  111. package/scripts/sw-patrol-meta.sh +5 -11
  112. package/scripts/sw-pipeline-composer.sh +7 -17
  113. package/scripts/sw-pipeline-vitals.sh +1 -11
  114. package/scripts/sw-pipeline.sh +478 -122
  115. package/scripts/sw-pm.sh +2 -12
  116. package/scripts/sw-pr-lifecycle.sh +27 -25
  117. package/scripts/sw-predictive.sh +16 -22
  118. package/scripts/sw-prep.sh +6 -16
  119. package/scripts/sw-ps.sh +1 -11
  120. package/scripts/sw-public-dashboard.sh +2 -12
  121. package/scripts/sw-quality.sh +77 -10
  122. package/scripts/sw-reaper.sh +1 -11
  123. package/scripts/sw-recruit.sh +15 -25
  124. package/scripts/sw-regression.sh +11 -21
  125. package/scripts/sw-release-manager.sh +19 -28
  126. package/scripts/sw-release.sh +8 -16
  127. package/scripts/sw-remote.sh +1 -11
  128. package/scripts/sw-replay.sh +48 -44
  129. package/scripts/sw-retro.sh +70 -92
  130. package/scripts/sw-review-rerun.sh +1 -1
  131. package/scripts/sw-scale.sh +109 -32
  132. package/scripts/sw-security-audit.sh +12 -22
  133. package/scripts/sw-self-optimize.sh +239 -23
  134. package/scripts/sw-session.sh +3 -13
  135. package/scripts/sw-setup.sh +8 -18
  136. package/scripts/sw-standup.sh +5 -15
  137. package/scripts/sw-status.sh +32 -23
  138. package/scripts/sw-strategic.sh +129 -13
  139. package/scripts/sw-stream.sh +1 -11
  140. package/scripts/sw-swarm.sh +76 -36
  141. package/scripts/sw-team-stages.sh +10 -20
  142. package/scripts/sw-templates.sh +4 -14
  143. package/scripts/sw-testgen.sh +3 -13
  144. package/scripts/sw-tmux-pipeline.sh +1 -19
  145. package/scripts/sw-tmux-role-color.sh +0 -10
  146. package/scripts/sw-tmux-status.sh +3 -11
  147. package/scripts/sw-tmux.sh +2 -20
  148. package/scripts/sw-trace.sh +1 -19
  149. package/scripts/sw-tracker-github.sh +0 -10
  150. package/scripts/sw-tracker-jira.sh +1 -11
  151. package/scripts/sw-tracker-linear.sh +1 -11
  152. package/scripts/sw-tracker.sh +7 -24
  153. package/scripts/sw-triage.sh +24 -34
  154. package/scripts/sw-upgrade.sh +5 -23
  155. package/scripts/sw-ux.sh +1 -19
  156. package/scripts/sw-webhook.sh +18 -32
  157. package/scripts/sw-widgets.sh +3 -21
  158. package/scripts/sw-worktree.sh +11 -27
  159. package/scripts/update-homebrew-sha.sh +67 -0
  160. package/templates/pipelines/tdd.json +72 -0
  161. package/scripts/sw-pipeline.sh.mock +0 -7
@@ -6,11 +6,11 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.4.0"
9
+ VERSION="3.0.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
 
12
12
  # ─── Dependency check ─────────────────────────────────────────────────────────
13
- if ! command -v jq &>/dev/null; then
13
+ if ! command -v jq >/dev/null 2>&1; then
14
14
  echo "ERROR: sw-scale.sh requires 'jq'. Install with: brew install jq (macOS) or apt install jq (Linux)" >&2
15
15
  exit 1
16
16
  fi
@@ -39,16 +39,6 @@ if [[ "$(type -t emit_event 2>/dev/null)" != "function" ]]; then
39
39
  echo "${payload}}" >> "${HOME}/.shipwright/events.jsonl"
40
40
  }
41
41
  fi
42
- CYAN="${CYAN:-\033[38;2;0;212;255m}"
43
- PURPLE="${PURPLE:-\033[38;2;124;58;237m}"
44
- BLUE="${BLUE:-\033[38;2;0;102;255m}"
45
- GREEN="${GREEN:-\033[38;2;74;222;128m}"
46
- YELLOW="${YELLOW:-\033[38;2;250;204;21m}"
47
- RED="${RED:-\033[38;2;248;113;113m}"
48
- DIM="${DIM:-\033[2m}"
49
- BOLD="${BOLD:-\033[1m}"
50
- RESET="${RESET:-\033[0m}"
51
-
52
42
  # ─── Constants ──────────────────────────────────────────────────────────────
53
43
  SCALE_RULES_FILE="${HOME}/.shipwright/scale-rules.json"
54
44
  SCALE_EVENTS_FILE="${HOME}/.shipwright/scale-events.jsonl"
@@ -148,13 +138,19 @@ emit_scale_event() {
148
138
  '{ts: $ts, action: $action, role: $role, reason: $reason, context: $context}')
149
139
 
150
140
  echo "$event" >> "$SCALE_EVENTS_FILE"
151
- type rotate_jsonl &>/dev/null 2>&1 && rotate_jsonl "$SCALE_EVENTS_FILE" 5000
141
+ type rotate_jsonl >/dev/null 2>&1 && rotate_jsonl "$SCALE_EVENTS_FILE" 5000
152
142
  }
153
143
 
154
144
  # ─── Scale Up: spawn new agent ───────────────────────────────────────────
155
145
  cmd_up() {
156
- local role="${1:-builder}"
157
- shift 2>/dev/null || true
146
+ local count="${1:-1}"
147
+ local role="${2:-builder}"
148
+
149
+ # Parse: "up builder" -> count=1 role=builder; "up 2 tester" -> count=2 role=tester
150
+ if ! [[ "$count" =~ ^[0-9]+$ ]]; then
151
+ role="$count"
152
+ count=1
153
+ fi
158
154
 
159
155
  ensure_dirs
160
156
  init_rules
@@ -178,43 +174,124 @@ cmd_up() {
178
174
  local max_size
179
175
  max_size=$(jq -r '.max_team_size // 8' "$SCALE_RULES_FILE")
180
176
 
181
- info "Scaling up team with ${role} agent"
177
+ info "Scaling up team with ${count} ${role} agent(s)"
182
178
  echo -e " Max team size: ${CYAN}${max_size}${RESET}"
183
179
  echo -e " Role: ${CYAN}${role}${RESET}"
184
180
  echo ""
185
181
 
186
- # TODO: Integrate with tmux/SendMessage to spawn agent
187
- # For now, emit event and log
188
- emit_scale_event "up" "$role" "manual" "$*"
182
+ local repo_root
183
+ repo_root=$(git rev-parse --show-toplevel 2>/dev/null) || repo_root="$(cd "$SCRIPT_DIR/.." 2>/dev/null && pwd)"
184
+
185
+ if ! command -v tmux &>/dev/null; then
186
+ warn "tmux not available - cannot spawn agents"
187
+ echo "Install tmux to enable agent scaling: brew install tmux"
188
+ emit_scale_event "up" "$role" "manual" "tmux_unavailable"
189
+ update_scale_state
190
+ success "Scale-up event recorded (role: ${role})"
191
+ echo ""
192
+ echo -e " ${DIM}Note: Actual agent spawn requires tmux (brew install tmux)${RESET}"
193
+ return 1
194
+ fi
195
+
196
+ for i in $(seq 1 "$count"); do
197
+ local agent_name="sw-agent-${role}-$(date +%s)-${i}"
198
+ local session_name="shipwright-${agent_name}"
199
+
200
+ # Spawn a real agent in a tmux session
201
+ tmux new-session -d -s "$session_name" \
202
+ "cd \"${repo_root}\" && SW_AGENT_ROLE=$role SW_AGENT_NAME=$agent_name bash scripts/sw-daemon.sh start --role $role 2>&1 | tee /tmp/sw-agent-${agent_name}.log" 2>/dev/null && {
203
+ emit_scale_event "up" "$role" "agent_started" "agent=$agent_name"
204
+ echo "Started agent $agent_name in tmux session $session_name"
205
+ } || {
206
+ warn "Failed to spawn agent $agent_name in tmux"
207
+ }
208
+ done
209
+
210
+ emit_scale_event "up" "$role" "manual" "count=$count"
189
211
  update_scale_state
190
212
 
191
- success "Scale-up event recorded (role: ${role})"
213
+ success "Scale-up event recorded (role: ${role}, count: ${count})"
192
214
  echo ""
193
- echo -e " ${DIM}Note: Actual agent spawn requires tmux/claude integration${RESET}"
194
215
  }
195
216
 
196
217
  # ─── Scale Down: send shutdown to agent ──────────────────────────────────
197
218
  cmd_down() {
198
- local agent_id="${1:-}"
199
- shift 2>/dev/null || true
219
+ local first_arg="${1:-}"
220
+ local second_arg="${2:-}"
200
221
 
201
- if [[ -z "$agent_id" ]]; then
202
- error "Usage: shipwright scale down <agent-id>"
222
+ # Require at least one argument for backward compat (down <agent-id> or down <count> [role])
223
+ if [[ -z "$first_arg" ]]; then
224
+ error "Usage: shipwright scale down <agent-id|count> [role]"
203
225
  return 1
204
226
  fi
205
227
 
228
+ local count="$first_arg"
229
+ local role="$second_arg"
230
+
231
+ # Backward compat: "down agent-42" -> treat as session/agent identifier
232
+ if [[ -n "$count" ]] && [[ "$count" != *[0-9]* ]] || [[ "$count" == agent-* ]]; then
233
+ # Specific agent/session id
234
+ local session_pattern="*${count}*"
235
+ local sessions
236
+ sessions=$(tmux list-sessions -F '#{session_name}' 2>/dev/null | grep -E "shipwright|swarm" | grep -i "$count" || true)
237
+ if [[ -z "$sessions" ]]; then
238
+ emit_scale_event "down" "unknown" "manual" "agent_id=$count"
239
+ update_scale_state
240
+ success "Scale-down event recorded (agent: ${count})"
241
+ return 0
242
+ fi
243
+ local session
244
+ session=$(echo "$sessions" | head -1)
245
+ tmux kill-session -t "$session" 2>/dev/null && {
246
+ emit_scale_event "down" "unknown" "agent_stopped" "session=$session"
247
+ echo "Stopped agent session: $session"
248
+ }
249
+ emit_scale_event "down" "unknown" "manual" "agent_id=$count"
250
+ update_scale_state
251
+ success "Scale-down event recorded (agent: ${count})"
252
+ return 0
253
+ fi
254
+
255
+ # Numeric count
256
+ if ! [[ "$count" =~ ^[0-9]+$ ]]; then
257
+ count=1
258
+ fi
259
+
206
260
  ensure_dirs
207
261
  init_rules
208
262
 
209
- info "Scaling down agent: ${agent_id}"
210
- echo ""
263
+ # Find running agent sessions
264
+ local sessions
265
+ sessions=$(tmux list-sessions -F '#{session_name}' 2>/dev/null | grep '^shipwright-sw-agent\|^swarm-' || true)
211
266
 
212
- # TODO: Integrate with SendMessage to shut down agent
213
- emit_scale_event "down" "unknown" "manual" "agent_id=$agent_id"
214
- update_scale_state
267
+ if [[ -z "$sessions" ]]; then
268
+ echo "No running agents to stop"
269
+ emit_scale_event "down" "unknown" "manual" "none_running"
270
+ update_scale_state
271
+ return 0
272
+ fi
215
273
 
216
- success "Scale-down event recorded (agent: ${agent_id})"
217
- echo -e " ${DIM}Note: Agent shutdown requires SendMessage integration${RESET}"
274
+ local stopped=0
275
+ while IFS= read -r session; do
276
+ [[ "$stopped" -ge "$count" ]] && break
277
+ [[ -z "$session" ]] && continue
278
+
279
+ # Filter by role if specified
280
+ if [[ -n "$role" ]] && ! echo "$session" | grep -q "$role"; then
281
+ continue
282
+ fi
283
+
284
+ if tmux kill-session -t "$session" 2>/dev/null; then
285
+ stopped=$((stopped + 1))
286
+ emit_scale_event "down" "unknown" "agent_stopped" "session=$session"
287
+ echo "Stopped agent session: $session"
288
+ fi
289
+ done <<< "$sessions"
290
+
291
+ emit_scale_event "down" "unknown" "manual" "count=$stopped"
292
+ update_scale_state
293
+ echo "Stopped $stopped agent(s)"
294
+ success "Scale-down event recorded"
218
295
  }
219
296
 
220
297
  # ─── Manage scaling rules ────────────────────────────────────────────────
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.4.0"
9
+ VERSION="3.0.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
  REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
12
 
@@ -33,16 +33,6 @@ if [[ "$(type -t emit_event 2>/dev/null)" != "function" ]]; then
33
33
  echo "${payload}}" >> "${HOME}/.shipwright/events.jsonl"
34
34
  }
35
35
  fi
36
- CYAN="${CYAN:-\033[38;2;0;212;255m}"
37
- PURPLE="${PURPLE:-\033[38;2;124;58;237m}"
38
- BLUE="${BLUE:-\033[38;2;0;102;255m}"
39
- GREEN="${GREEN:-\033[38;2;74;222;128m}"
40
- YELLOW="${YELLOW:-\033[38;2;250;204;21m}"
41
- RED="${RED:-\033[38;2;248;113;113m}"
42
- DIM="${DIM:-\033[2m}"
43
- BOLD="${BOLD:-\033[1m}"
44
- RESET="${RESET:-\033[0m}"
45
-
46
36
  # ─── Audit State ───────────────────────────────────────────────────────────
47
37
  FINDINGS=()
48
38
  CRITICAL_COUNT=0
@@ -60,10 +50,10 @@ add_finding() {
60
50
 
61
51
  local color=""
62
52
  case "$priority" in
63
- CRITICAL) color="$RED"; ((CRITICAL_COUNT++)) ;;
64
- HIGH) color="$RED"; ((HIGH_COUNT++)) ;;
65
- MEDIUM) color="$YELLOW"; ((MEDIUM_COUNT++)) ;;
66
- LOW) color="$BLUE"; ((LOW_COUNT++)) ;;
53
+ CRITICAL) color="$RED"; CRITICAL_COUNT=$((CRITICAL_COUNT + 1)) ;;
54
+ HIGH) color="$RED"; HIGH_COUNT=$((HIGH_COUNT + 1)) ;;
55
+ MEDIUM) color="$YELLOW"; MEDIUM_COUNT=$((MEDIUM_COUNT + 1)) ;;
56
+ LOW) color="$BLUE"; LOW_COUNT=$((LOW_COUNT + 1)) ;;
67
57
  esac
68
58
 
69
59
  FINDINGS+=("${priority}|${category}|${title}|${description}|${remediation}")
@@ -133,7 +123,7 @@ scan_licenses() {
133
123
  [[ -f "$REPO_DIR/Cargo.toml" ]] && has_cargo=true
134
124
 
135
125
  # Check npm licenses
136
- if $has_npm && command -v npm &>/dev/null; then
126
+ if $has_npm && command -v npm >/dev/null 2>&1; then
137
127
  while IFS= read -r line; do
138
128
  [[ "$line" =~ GPL|AGPL ]] && [[ ! "$line" =~ MIT|Apache|BSD ]] && \
139
129
  add_finding "MEDIUM" "licenses" "GPL/AGPL dependency in npm project" \
@@ -173,10 +163,10 @@ scan_vulnerabilities() {
173
163
  local vuln_count=0
174
164
 
175
165
  # Check npm vulnerabilities
176
- if [[ -f "$REPO_DIR/package.json" ]] && command -v npm &>/dev/null; then
166
+ if [[ -f "$REPO_DIR/package.json" ]] && command -v npm >/dev/null 2>&1; then
177
167
  while IFS= read -r line; do
178
168
  [[ -z "$line" ]] && continue
179
- ((vuln_count++))
169
+ vuln_count=$((vuln_count + 1))
180
170
  add_finding "HIGH" "vulnerabilities" "npm security vulnerability" \
181
171
  "Found npm audit issue: $line" \
182
172
  "Run 'npm audit fix' to remediate. Update vulnerable dependencies. Re-test after updates."
@@ -184,11 +174,11 @@ scan_vulnerabilities() {
184
174
  fi
185
175
 
186
176
  # Check pip vulnerabilities
187
- if [[ -f "$REPO_DIR/requirements.txt" ]] && command -v pip &>/dev/null; then
188
- if command -v safety &>/dev/null; then
177
+ if [[ -f "$REPO_DIR/requirements.txt" ]] && command -v pip >/dev/null 2>&1; then
178
+ if command -v safety >/dev/null 2>&1; then
189
179
  while IFS= read -r line; do
190
180
  [[ -z "$line" ]] && continue
191
- ((vuln_count++))
181
+ vuln_count=$((vuln_count + 1))
192
182
  add_finding "HIGH" "vulnerabilities" "Python package vulnerability" \
193
183
  "Found via safety: $line" \
194
184
  "Update vulnerable package. Test compatibility. Run safety check after updates."
@@ -210,7 +200,7 @@ generate_sbom() {
210
200
  local sbom='{"bomFormat":"CycloneDX","specVersion":"1.4","version":1,"components":[]}'
211
201
 
212
202
  # Add npm packages
213
- if [[ -f "$REPO_DIR/package.json" ]] && command -v npm &>/dev/null; then
203
+ if [[ -f "$REPO_DIR/package.json" ]] && command -v npm >/dev/null 2>&1; then
214
204
  local npm_list
215
205
  npm_list=$(npm list --json 2>/dev/null || echo '{"dependencies":{}}')
216
206
  while IFS='=' read -r name version; do
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.4.0"
9
+ VERSION="3.0.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
  REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
12
 
@@ -34,19 +34,12 @@ if [[ "$(type -t emit_event 2>/dev/null)" != "function" ]]; then
34
34
  echo "${payload}}" >> "${HOME}/.shipwright/events.jsonl"
35
35
  }
36
36
  fi
37
- CYAN="${CYAN:-\033[38;2;0;212;255m}"
38
- PURPLE="${PURPLE:-\033[38;2;124;58;237m}"
39
- BLUE="${BLUE:-\033[38;2;0;102;255m}"
40
- GREEN="${GREEN:-\033[38;2;74;222;128m}"
41
- YELLOW="${YELLOW:-\033[38;2;250;204;21m}"
42
- RED="${RED:-\033[38;2;248;113;113m}"
43
- DIM="${DIM:-\033[2m}"
44
- BOLD="${BOLD:-\033[1m}"
45
- RESET="${RESET:-\033[0m}"
46
-
47
37
  # ─── Structured Event Log ────────────────────────────────────────────────────
48
38
  EVENTS_FILE="${HOME}/.shipwright/events.jsonl"
49
39
 
40
+ # ─── DB for outcome-based learning ─────────────────────────────────────────────
41
+ [[ -f "$SCRIPT_DIR/sw-db.sh" ]] && source "$SCRIPT_DIR/sw-db.sh"
42
+
50
43
  # ─── Storage Paths ───────────────────────────────────────────────────────────
51
44
  OPTIMIZATION_DIR="${HOME}/.shipwright/optimization"
52
45
  OUTCOMES_FILE="${OPTIMIZATION_DIR}/outcomes.jsonl"
@@ -64,13 +57,13 @@ ensure_optimization_dir() {
64
57
  # ─── GitHub Metrics ──────────────────────────────────────────────────────
65
58
 
66
59
  _optimize_github_metrics() {
67
- type _gh_detect_repo &>/dev/null 2>&1 || { echo "{}"; return 0; }
60
+ type _gh_detect_repo >/dev/null 2>&1 || { echo "{}"; return 0; }
68
61
  _gh_detect_repo 2>/dev/null || { echo "{}"; return 0; }
69
62
 
70
63
  local owner="${GH_OWNER:-}" repo="${GH_REPO:-}"
71
64
  [[ -z "$owner" || -z "$repo" ]] && { echo "{}"; return 0; }
72
65
 
73
- if type gh_actions_runs &>/dev/null 2>&1; then
66
+ if type gh_actions_runs >/dev/null 2>&1; then
74
67
  local runs
75
68
  runs=$(gh_actions_runs "$owner" "$repo" "" 50 2>/dev/null || echo "[]")
76
69
  local success_rate avg_duration
@@ -171,7 +164,7 @@ optimize_analyze_outcome() {
171
164
  echo "$outcome_line" >> "$OUTCOMES_FILE"
172
165
 
173
166
  # Rotate outcomes file to prevent unbounded growth
174
- type rotate_jsonl &>/dev/null 2>&1 && rotate_jsonl "$OUTCOMES_FILE" 10000
167
+ type rotate_jsonl >/dev/null 2>&1 && rotate_jsonl "$OUTCOMES_FILE" 10000
175
168
 
176
169
  # Record GitHub CI metrics alongside outcome
177
170
  local gh_ci_metrics
@@ -223,7 +216,7 @@ optimize_ingest_retro() {
223
216
  latest_retro=$(ls -t "$retros_dir"/retro-*.json 2>/dev/null | head -1)
224
217
  [[ -z "$latest_retro" || ! -f "$latest_retro" ]] && return 0
225
218
 
226
- if ! command -v jq &>/dev/null; then
219
+ if ! command -v jq >/dev/null 2>&1; then
227
220
  warn "jq required for retro ingest — skipping"
228
221
  return 0
229
222
  fi
@@ -290,7 +283,7 @@ optimize_ingest_retro() {
290
283
  fi
291
284
  fi
292
285
 
293
- type rotate_jsonl &>/dev/null 2>&1 && rotate_jsonl "$OUTCOMES_FILE" 10000
286
+ type rotate_jsonl >/dev/null 2>&1 && rotate_jsonl "$OUTCOMES_FILE" 10000
294
287
 
295
288
  emit_event "optimize.retro_ingested" \
296
289
  "success_rate=${success_rate:-0}" \
@@ -680,7 +673,7 @@ _optimize_apply_prediction_bias() {
680
673
  fi
681
674
 
682
675
  # Rotate validation file
683
- type rotate_jsonl &>/dev/null 2>&1 && rotate_jsonl "$validation_file" 5000
676
+ type rotate_jsonl >/dev/null 2>&1 && rotate_jsonl "$validation_file" 5000
684
677
  }
685
678
 
686
679
  # ═════════════════════════════════════════════════════════════════════════════
@@ -739,10 +732,13 @@ optimize_route_models() {
739
732
  done
740
733
  done < "$outcomes_file"
741
734
 
742
- # Build routing recommendations
735
+ # Build routing recommendations; extract .routes from existing file when present
743
736
  local routing='{}'
744
- if [[ -f "$MODEL_ROUTING_FILE" ]]; then
745
- routing=$(cat "$MODEL_ROUTING_FILE")
737
+ if [[ -f "$MODEL_ROUTING_FILE" ]] && command -v jq >/dev/null 2>&1; then
738
+ local existing
739
+ existing=$(cat "$MODEL_ROUTING_FILE")
740
+ # Use .routes when present (self-optimize format), else flatten for merge
741
+ routing=$(echo "$existing" | jq -r 'if .routes then .routes else . end | if type == "object" then . else {} end' 2>/dev/null || echo '{}')
746
742
  fi
747
743
 
748
744
  if [[ -f "$tmp_stage_stats" && -s "$tmp_stage_stats" ]]; then
@@ -760,7 +756,7 @@ optimize_route_models() {
760
756
  sonnet_success="${sonnet_success:-0}"
761
757
 
762
758
  if [[ "$sonnet_total" -gt 0 ]]; then
763
- sonnet_rate=$(awk "BEGIN{printf \"%.1f\", ($sonnet_success/$sonnet_total)*100}")
759
+ sonnet_rate=$(awk "BEGIN{printf \"%.1f\", ($sonnet_success/$sonnet_total)*100}" | tr -d '\n')
764
760
  else
765
761
  sonnet_rate="0"
766
762
  fi
@@ -773,7 +769,7 @@ optimize_route_models() {
773
769
  opus_success="${opus_success:-0}"
774
770
 
775
771
  if [[ "$opus_total" -gt 0 ]]; then
776
- opus_rate=$(awk "BEGIN{printf \"%.1f\", ($opus_success/$opus_total)*100}")
772
+ opus_rate=$(awk "BEGIN{printf \"%.1f\", ($opus_success/$opus_total)*100}" | tr -d '\n')
777
773
  else
778
774
  opus_rate="0"
779
775
  fi
@@ -812,7 +808,7 @@ optimize_route_models() {
812
808
  routes: (. | to_entries | map({
813
809
  key: .key,
814
810
  value: {
815
- model: .value.recommended,
811
+ model: (.value.recommended // .value.model),
816
812
  confidence: (if .value.sonnet_samples + .value.opus_samples >= 10 then 0.9
817
813
  elif .value.sonnet_samples + .value.opus_samples >= 5 then 0.7
818
814
  else 0.5 end),
@@ -836,6 +832,129 @@ optimize_route_models() {
836
832
  success "Model routing updated"
837
833
  }
838
834
 
835
+ # ═════════════════════════════════════════════════════════════════════════════
836
+ # OUTCOME-BASED LEARNING: Thompson Sampling & UCB1
837
+ # ═════════════════════════════════════════════════════════════════════════════
838
+
839
+ # Thompson sampling: select template based on historical success rates
840
+ # Uses Beta distribution approximation: sample from Beta(successes+1, failures+1)
841
+ thompson_select_template() {
842
+ local complexity="${1:-medium}"
843
+
844
+ if ! db_available 2>/dev/null; then
845
+ _legacy_template_select "$complexity"
846
+ return
847
+ fi
848
+
849
+ local templates
850
+ templates=$(_db_query "SELECT template,
851
+ SUM(CASE WHEN success = 1 THEN 1 ELSE 0 END) as wins,
852
+ SUM(CASE WHEN success = 0 THEN 1 ELSE 0 END) as losses
853
+ FROM pipeline_outcomes
854
+ WHERE complexity = '$complexity' AND template IS NOT NULL AND template != ''
855
+ GROUP BY template;" 2>/dev/null || echo "")
856
+
857
+ if [[ -z "$templates" ]]; then
858
+ echo "standard"
859
+ return
860
+ fi
861
+
862
+ local best_template="standard"
863
+ local best_score=0
864
+
865
+ while IFS='|' read -r template wins losses; do
866
+ [[ -z "$template" ]] && continue
867
+ template=$(echo "$template" | xargs)
868
+ local alpha=$((wins + 1))
869
+ local beta_param=$((losses + 1))
870
+ local total=$((alpha + beta_param))
871
+ local mean_x1000=$(( (alpha * 1000) / total ))
872
+ local noise=$(( (RANDOM % 200) - 100 ))
873
+ local variance_factor=$(( 1000 / (total + 1) ))
874
+ local score=$(( mean_x1000 + (noise * variance_factor / 100) ))
875
+
876
+ if [[ $score -gt $best_score ]]; then
877
+ best_score=$score
878
+ best_template="$template"
879
+ fi
880
+ done <<< "$templates"
881
+
882
+ echo "$best_template"
883
+ }
884
+
885
+ # Fallback when DB unavailable: map complexity to template
886
+ _legacy_template_select() {
887
+ local complexity="${1:-medium}"
888
+ case "$complexity" in
889
+ low|fast) echo "fast" ;;
890
+ high|full) echo "full" ;;
891
+ *) echo "standard" ;;
892
+ esac
893
+ }
894
+
895
+ # UCB1: select best model for a given stage
896
+ # UCB1 = mean_reward + sqrt(2 * ln(total_trials) / trials_for_this_arm)
897
+ ucb1_select_model() {
898
+ local stage="${1:-build}"
899
+
900
+ if ! db_available 2>/dev/null; then
901
+ echo "sonnet"
902
+ return
903
+ fi
904
+
905
+ local total_trials
906
+ total_trials=$(_db_query "SELECT COUNT(*) FROM model_outcomes WHERE stage = '$stage';" 2>/dev/null || echo "0")
907
+
908
+ if [[ "${total_trials:-0}" -lt 5 ]]; then
909
+ echo ""
910
+ return
911
+ fi
912
+
913
+ local models
914
+ models=$(_db_query "SELECT model,
915
+ AVG(success) as mean_reward,
916
+ COUNT(*) as trials,
917
+ AVG(cost_usd) as avg_cost
918
+ FROM model_outcomes
919
+ WHERE stage = '$stage'
920
+ GROUP BY model;" 2>/dev/null || echo "")
921
+
922
+ if [[ -z "$models" ]]; then
923
+ echo "sonnet"
924
+ return
925
+ fi
926
+
927
+ local best_model="sonnet"
928
+ local best_ucb=0
929
+
930
+ while IFS='|' read -r model mean_reward trials avg_cost; do
931
+ [[ -z "$model" ]] && continue
932
+ model=$(echo "$model" | xargs)
933
+ local mean_x1000 exploration ucb
934
+ mean_x1000=$(echo "$mean_reward" | awk '{printf "%d", $1 * 1000}')
935
+ exploration=$(awk "BEGIN { printf \"%d\", 1000 * sqrt(2 * log($total_trials) / $trials) }" 2>/dev/null || echo "0")
936
+ ucb=$((mean_x1000 + exploration))
937
+
938
+ if [[ $ucb -gt $best_ucb ]]; then
939
+ best_ucb=$ucb
940
+ best_model="$model"
941
+ fi
942
+ done <<< "$models"
943
+
944
+ echo "$best_model"
945
+ }
946
+
947
+ # Record model outcome for UCB1 learning
948
+ record_model_outcome() {
949
+ local model="$1" stage="$2" success="${3:-1}" duration="${4:-0}" cost="${5:-0}"
950
+ if db_available 2>/dev/null; then
951
+ model="${model//\'/\'\'}"
952
+ stage="${stage//\'/\'\'}"
953
+ _db_exec "INSERT INTO model_outcomes (model, stage, success, duration_secs, cost_usd, created_at)
954
+ VALUES ('$model', '$stage', $success, $duration, $cost, '$(now_iso)');" 2>/dev/null || true
955
+ fi
956
+ }
957
+
839
958
  # ═════════════════════════════════════════════════════════════════════════════
840
959
  # RISK KEYWORD LEARNING
841
960
  # ═════════════════════════════════════════════════════════════════════════════
@@ -1062,6 +1181,100 @@ optimize_evolve_memory() {
1062
1181
  success "Memory evolved: pruned=$pruned, strengthened=$strengthened, promoted=$promoted"
1063
1182
  }
1064
1183
 
1184
+ # ═════════════════════════════════════════════════════════════════════════════
1185
+ # QUALITY INDEX (LONGITUDINAL TRACKING)
1186
+ # ═════════════════════════════════════════════════════════════════════════════
1187
+
1188
+ # optimize_track_quality_index
1189
+ # Compute composite quality metrics from last N pipeline outcomes and append to quality-index.jsonl
1190
+ optimize_track_quality_index() {
1191
+ local quality_file="${HOME}/.shipwright/optimization/quality-index.jsonl"
1192
+ mkdir -p "$(dirname "$quality_file")"
1193
+
1194
+ local outcomes_file="${HOME}/.shipwright/optimization/outcomes.jsonl"
1195
+ [[ ! -f "$outcomes_file" ]] && return 0
1196
+
1197
+ if ! command -v jq >/dev/null 2>&1; then
1198
+ return 0
1199
+ fi
1200
+
1201
+ # Get pipeline outcomes only (exclude retro_summary, ci_metrics)
1202
+ local window=20
1203
+ local recent
1204
+ recent=$(jq -c 'select((.type // "") != "retro_summary" and (.type // "") != "ci_metrics")' "$outcomes_file" 2>/dev/null | tail -"$window" || true)
1205
+
1206
+ [[ -z "$recent" ]] && return 0
1207
+
1208
+ local total success_count
1209
+ total=$(echo "$recent" | wc -l | tr -d ' ')
1210
+ [[ "$total" -lt 3 ]] && return 0
1211
+
1212
+ success_count=$(echo "$recent" | jq -c 'select(.result == "success" or .result == "completed")' 2>/dev/null | wc -l | tr -d ' ')
1213
+ success_count="${success_count:-0}"
1214
+
1215
+ local avg_iterations avg_quality
1216
+ avg_iterations=$(echo "$recent" | jq -s '[.[] | .iterations // 0 | tonumber] | if length > 0 then add / length else 0 end' 2>/dev/null || echo "0")
1217
+ # quality_score: use from record if present, else 100 for success/completed, 0 for failed
1218
+ avg_quality=$(echo "$recent" | jq -s '[.[] | .quality_score // (if (.result == "success" or .result == "completed") then 100 else 0 end) | tonumber] | if length > 0 then add / length else 0 end' 2>/dev/null || echo "0")
1219
+
1220
+ local success_rate=0
1221
+ [[ "$total" -gt 0 ]] && success_rate=$((success_count * 100 / total))
1222
+ [[ "$success_rate" -gt 100 ]] && success_rate=100
1223
+
1224
+ # Efficiency (lower iterations = more efficient)
1225
+ local efficiency
1226
+ efficiency=$(awk "BEGIN{if($avg_iterations > 0) printf \"%.1f\", 100 / $avg_iterations; else print 0}" 2>/dev/null || echo "0")
1227
+
1228
+ # Composite quality index (0-100): success_rate 40%, efficiency 30%, quality 30%
1229
+ local quality_index
1230
+ quality_index=$(awk "BEGIN{printf \"%.0f\", ($success_rate * 0.4) + ($efficiency * 0.3) + ($avg_quality * 0.3)}" 2>/dev/null || echo "0")
1231
+
1232
+ local entry="{\"timestamp\":\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\",\"window\":$window,\"total\":$total,\"success_rate\":$success_rate,\"avg_iterations\":$avg_iterations,\"avg_quality\":$avg_quality,\"efficiency\":$efficiency,\"quality_index\":$quality_index}"
1233
+ echo "$entry" >> "$quality_file"
1234
+
1235
+ # Detect trend
1236
+ if [[ -f "$quality_file" ]]; then
1237
+ local line_count
1238
+ line_count=$(wc -l < "$quality_file" 2>/dev/null | tr -d ' ' || echo "0")
1239
+ if [[ "$line_count" -ge 2 ]]; then
1240
+ local prev_index
1241
+ prev_index=$(tail -2 "$quality_file" | head -1 | jq -r '.quality_index // 0' 2>/dev/null || echo "0")
1242
+ local delta
1243
+ delta=$(awk "BEGIN{printf \"%.0f\", $quality_index - $prev_index}" 2>/dev/null || echo "0")
1244
+
1245
+ if [[ "$delta" -gt 5 ]]; then
1246
+ info "Quality index: $quality_index (+${delta}) - IMPROVING"
1247
+ elif [[ "$delta" -lt -5 ]]; then
1248
+ warn "Quality index: $quality_index (${delta}) - DECLINING"
1249
+ else
1250
+ info "Quality index: $quality_index (stable)"
1251
+ fi
1252
+ fi
1253
+ fi
1254
+
1255
+ emit_event "quality.index_updated" "quality_index=$quality_index" "success_rate=$success_rate" 2>/dev/null || true
1256
+ }
1257
+
1258
+ # cmd_quality_index — Show quality trend (last 10 snapshots)
1259
+ cmd_quality_index() {
1260
+ local quality_file="${HOME}/.shipwright/optimization/quality-index.jsonl"
1261
+ if [[ ! -f "$quality_file" ]]; then
1262
+ echo "No quality data yet. Run some pipelines first."
1263
+ return
1264
+ fi
1265
+
1266
+ echo "Quality Index Trend (last 10 snapshots):"
1267
+ echo "========================================="
1268
+ tail -10 "$quality_file" | while IFS= read -r line; do
1269
+ local ts qi sr ai
1270
+ ts=$(echo "$line" | jq -r '.timestamp' 2>/dev/null)
1271
+ qi=$(echo "$line" | jq -r '.quality_index' 2>/dev/null)
1272
+ sr=$(echo "$line" | jq -r '.success_rate' 2>/dev/null)
1273
+ ai=$(echo "$line" | jq -r '.avg_iterations' 2>/dev/null)
1274
+ printf " %s QI: %s Success: %s%% Avg Iters: %s\n" "$ts" "$qi" "$sr" "$ai"
1275
+ done
1276
+ }
1277
+
1065
1278
  # ═════════════════════════════════════════════════════════════════════════════
1066
1279
  # FULL ANALYSIS (DAILY)
1067
1280
  # ═════════════════════════════════════════════════════════════════════════════
@@ -1082,6 +1295,7 @@ optimize_full_analysis() {
1082
1295
  optimize_route_models
1083
1296
  optimize_learn_risk_keywords
1084
1297
  optimize_evolve_memory
1298
+ optimize_track_quality_index 2>/dev/null || true
1085
1299
  optimize_report >> "${OPTIMIZATION_DIR}/last-report.txt" 2>/dev/null || true
1086
1300
  optimize_adjust_audit_intensity 2>/dev/null || true
1087
1301
 
@@ -1272,6 +1486,7 @@ show_help() {
1272
1486
  echo " analyze-outcome <state-file> Analyze a completed pipeline outcome"
1273
1487
  echo " tune Run full optimization analysis"
1274
1488
  echo " report Show optimization report (last 7 days)"
1489
+ echo " quality-index Show quality trend (last 10 snapshots)"
1275
1490
  echo " ingest-retro Ingest most recent retro into optimization loop"
1276
1491
  echo " evolve-memory Prune/strengthen/promote memory patterns"
1277
1492
  echo " help Show this help"
@@ -1296,6 +1511,7 @@ main() {
1296
1511
  tune) optimize_full_analysis ;;
1297
1512
  ingest-retro) optimize_ingest_retro ;;
1298
1513
  report) optimize_report ;;
1514
+ quality-index) cmd_quality_index ;;
1299
1515
  evolve-memory) optimize_evolve_memory ;;
1300
1516
  help|--help|-h) show_help ;;
1301
1517
  *) error "Unknown command: $cmd"; exit 1 ;;