shipwright-cli 2.3.1 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (162) hide show
  1. package/README.md +95 -28
  2. package/completions/_shipwright +1 -1
  3. package/completions/shipwright.bash +3 -8
  4. package/completions/shipwright.fish +1 -1
  5. package/config/defaults.json +111 -0
  6. package/config/event-schema.json +81 -0
  7. package/config/policy.json +155 -2
  8. package/config/policy.schema.json +162 -1
  9. package/dashboard/coverage/coverage-summary.json +14 -0
  10. package/dashboard/public/index.html +1 -1
  11. package/dashboard/server.ts +306 -17
  12. package/dashboard/src/components/charts/bar.test.ts +79 -0
  13. package/dashboard/src/components/charts/donut.test.ts +68 -0
  14. package/dashboard/src/components/charts/pipeline-rail.test.ts +117 -0
  15. package/dashboard/src/components/charts/sparkline.test.ts +125 -0
  16. package/dashboard/src/core/api.test.ts +309 -0
  17. package/dashboard/src/core/helpers.test.ts +301 -0
  18. package/dashboard/src/core/router.test.ts +307 -0
  19. package/dashboard/src/core/router.ts +7 -0
  20. package/dashboard/src/core/sse.test.ts +144 -0
  21. package/dashboard/src/views/metrics.test.ts +186 -0
  22. package/dashboard/src/views/overview.test.ts +173 -0
  23. package/dashboard/src/views/pipelines.test.ts +183 -0
  24. package/dashboard/src/views/team.test.ts +253 -0
  25. package/dashboard/vitest.config.ts +14 -5
  26. package/docs/TIPS.md +1 -1
  27. package/docs/patterns/README.md +1 -1
  28. package/package.json +15 -5
  29. package/scripts/adapters/docker-deploy.sh +1 -1
  30. package/scripts/adapters/tmux-adapter.sh +11 -1
  31. package/scripts/adapters/wezterm-adapter.sh +1 -1
  32. package/scripts/check-version-consistency.sh +1 -1
  33. package/scripts/lib/architecture.sh +126 -0
  34. package/scripts/lib/bootstrap.sh +75 -0
  35. package/scripts/lib/compat.sh +89 -6
  36. package/scripts/lib/config.sh +91 -0
  37. package/scripts/lib/daemon-adaptive.sh +3 -3
  38. package/scripts/lib/daemon-dispatch.sh +39 -16
  39. package/scripts/lib/daemon-health.sh +1 -1
  40. package/scripts/lib/daemon-patrol.sh +24 -12
  41. package/scripts/lib/daemon-poll.sh +37 -25
  42. package/scripts/lib/daemon-state.sh +115 -23
  43. package/scripts/lib/daemon-triage.sh +30 -8
  44. package/scripts/lib/fleet-failover.sh +63 -0
  45. package/scripts/lib/helpers.sh +30 -6
  46. package/scripts/lib/pipeline-detection.sh +2 -2
  47. package/scripts/lib/pipeline-github.sh +9 -9
  48. package/scripts/lib/pipeline-intelligence.sh +85 -35
  49. package/scripts/lib/pipeline-quality-checks.sh +16 -16
  50. package/scripts/lib/pipeline-quality.sh +1 -1
  51. package/scripts/lib/pipeline-stages.sh +242 -28
  52. package/scripts/lib/pipeline-state.sh +40 -4
  53. package/scripts/lib/test-helpers.sh +247 -0
  54. package/scripts/postinstall.mjs +3 -11
  55. package/scripts/sw +10 -4
  56. package/scripts/sw-activity.sh +1 -11
  57. package/scripts/sw-adaptive.sh +109 -85
  58. package/scripts/sw-adversarial.sh +4 -14
  59. package/scripts/sw-architecture-enforcer.sh +1 -11
  60. package/scripts/sw-auth.sh +8 -17
  61. package/scripts/sw-autonomous.sh +111 -49
  62. package/scripts/sw-changelog.sh +1 -11
  63. package/scripts/sw-checkpoint.sh +144 -20
  64. package/scripts/sw-ci.sh +2 -12
  65. package/scripts/sw-cleanup.sh +13 -17
  66. package/scripts/sw-code-review.sh +16 -36
  67. package/scripts/sw-connect.sh +5 -12
  68. package/scripts/sw-context.sh +9 -26
  69. package/scripts/sw-cost.sh +6 -16
  70. package/scripts/sw-daemon.sh +75 -70
  71. package/scripts/sw-dashboard.sh +57 -17
  72. package/scripts/sw-db.sh +506 -15
  73. package/scripts/sw-decompose.sh +1 -11
  74. package/scripts/sw-deps.sh +15 -25
  75. package/scripts/sw-developer-simulation.sh +1 -11
  76. package/scripts/sw-discovery.sh +112 -30
  77. package/scripts/sw-doc-fleet.sh +7 -17
  78. package/scripts/sw-docs-agent.sh +6 -16
  79. package/scripts/sw-docs.sh +4 -12
  80. package/scripts/sw-doctor.sh +134 -43
  81. package/scripts/sw-dora.sh +11 -19
  82. package/scripts/sw-durable.sh +35 -52
  83. package/scripts/sw-e2e-orchestrator.sh +11 -27
  84. package/scripts/sw-eventbus.sh +115 -115
  85. package/scripts/sw-evidence.sh +748 -0
  86. package/scripts/sw-feedback.sh +3 -13
  87. package/scripts/sw-fix.sh +2 -20
  88. package/scripts/sw-fleet-discover.sh +1 -11
  89. package/scripts/sw-fleet-viz.sh +10 -18
  90. package/scripts/sw-fleet.sh +13 -17
  91. package/scripts/sw-github-app.sh +6 -16
  92. package/scripts/sw-github-checks.sh +1 -11
  93. package/scripts/sw-github-deploy.sh +1 -11
  94. package/scripts/sw-github-graphql.sh +2 -12
  95. package/scripts/sw-guild.sh +1 -11
  96. package/scripts/sw-heartbeat.sh +49 -12
  97. package/scripts/sw-hygiene.sh +45 -43
  98. package/scripts/sw-incident.sh +284 -67
  99. package/scripts/sw-init.sh +35 -37
  100. package/scripts/sw-instrument.sh +1 -11
  101. package/scripts/sw-intelligence.sh +362 -51
  102. package/scripts/sw-jira.sh +5 -14
  103. package/scripts/sw-launchd.sh +2 -12
  104. package/scripts/sw-linear.sh +8 -17
  105. package/scripts/sw-logs.sh +4 -12
  106. package/scripts/sw-loop.sh +641 -90
  107. package/scripts/sw-memory.sh +243 -17
  108. package/scripts/sw-mission-control.sh +2 -12
  109. package/scripts/sw-model-router.sh +73 -34
  110. package/scripts/sw-otel.sh +11 -21
  111. package/scripts/sw-oversight.sh +1 -11
  112. package/scripts/sw-patrol-meta.sh +5 -11
  113. package/scripts/sw-pipeline-composer.sh +7 -17
  114. package/scripts/sw-pipeline-vitals.sh +1 -11
  115. package/scripts/sw-pipeline.sh +478 -122
  116. package/scripts/sw-pm.sh +2 -12
  117. package/scripts/sw-pr-lifecycle.sh +203 -29
  118. package/scripts/sw-predictive.sh +16 -22
  119. package/scripts/sw-prep.sh +6 -16
  120. package/scripts/sw-ps.sh +1 -11
  121. package/scripts/sw-public-dashboard.sh +2 -12
  122. package/scripts/sw-quality.sh +77 -10
  123. package/scripts/sw-reaper.sh +1 -11
  124. package/scripts/sw-recruit.sh +15 -25
  125. package/scripts/sw-regression.sh +11 -21
  126. package/scripts/sw-release-manager.sh +19 -28
  127. package/scripts/sw-release.sh +8 -16
  128. package/scripts/sw-remote.sh +1 -11
  129. package/scripts/sw-replay.sh +48 -44
  130. package/scripts/sw-retro.sh +70 -92
  131. package/scripts/sw-review-rerun.sh +220 -0
  132. package/scripts/sw-scale.sh +109 -32
  133. package/scripts/sw-security-audit.sh +12 -22
  134. package/scripts/sw-self-optimize.sh +239 -23
  135. package/scripts/sw-session.sh +3 -13
  136. package/scripts/sw-setup.sh +8 -18
  137. package/scripts/sw-standup.sh +5 -15
  138. package/scripts/sw-status.sh +32 -23
  139. package/scripts/sw-strategic.sh +129 -13
  140. package/scripts/sw-stream.sh +1 -11
  141. package/scripts/sw-swarm.sh +76 -36
  142. package/scripts/sw-team-stages.sh +10 -20
  143. package/scripts/sw-templates.sh +4 -14
  144. package/scripts/sw-testgen.sh +3 -13
  145. package/scripts/sw-tmux-pipeline.sh +1 -19
  146. package/scripts/sw-tmux-role-color.sh +0 -10
  147. package/scripts/sw-tmux-status.sh +3 -11
  148. package/scripts/sw-tmux.sh +2 -20
  149. package/scripts/sw-trace.sh +1 -19
  150. package/scripts/sw-tracker-github.sh +0 -10
  151. package/scripts/sw-tracker-jira.sh +1 -11
  152. package/scripts/sw-tracker-linear.sh +1 -11
  153. package/scripts/sw-tracker.sh +7 -24
  154. package/scripts/sw-triage.sh +24 -34
  155. package/scripts/sw-upgrade.sh +5 -23
  156. package/scripts/sw-ux.sh +1 -19
  157. package/scripts/sw-webhook.sh +18 -32
  158. package/scripts/sw-widgets.sh +3 -21
  159. package/scripts/sw-worktree.sh +11 -27
  160. package/scripts/update-homebrew-sha.sh +67 -0
  161. package/templates/pipelines/tdd.json +72 -0
  162. package/scripts/sw-pipeline.sh.mock +0 -7
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.3.1"
9
+ VERSION="3.0.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
 
12
12
  # ─── Cross-platform compatibility ──────────────────────────────────────────
@@ -16,6 +16,9 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
16
16
  # Canonical helpers (colors, output, events)
17
17
  # shellcheck source=lib/helpers.sh
18
18
  [[ -f "$SCRIPT_DIR/lib/helpers.sh" ]] && source "$SCRIPT_DIR/lib/helpers.sh"
19
+ # DB layer for dual-read (SQLite + JSONL fallback)
20
+ # shellcheck source=sw-db.sh
21
+ [[ -f "$SCRIPT_DIR/sw-db.sh" ]] && source "$SCRIPT_DIR/sw-db.sh"
19
22
  # Fallbacks when helpers not loaded (e.g. test env with overridden SCRIPT_DIR)
20
23
  [[ "$(type -t info 2>/dev/null)" == "function" ]] || info() { echo -e "\033[38;2;0;212;255m\033[1m▸\033[0m $*"; }
21
24
  [[ "$(type -t success 2>/dev/null)" == "function" ]] || success() { echo -e "\033[38;2;74;222;128m\033[1m✓\033[0m $*"; }
@@ -33,16 +36,6 @@ if [[ "$(type -t emit_event 2>/dev/null)" != "function" ]]; then
33
36
  echo "${payload}}" >> "${HOME}/.shipwright/events.jsonl"
34
37
  }
35
38
  fi
36
- CYAN="${CYAN:-\033[38;2;0;212;255m}"
37
- PURPLE="${PURPLE:-\033[38;2;124;58;237m}"
38
- BLUE="${BLUE:-\033[38;2;0;102;255m}"
39
- GREEN="${GREEN:-\033[38;2;74;222;128m}"
40
- YELLOW="${YELLOW:-\033[38;2;250;204;21m}"
41
- RED="${RED:-\033[38;2;248;113;113m}"
42
- DIM="${DIM:-\033[2m}"
43
- BOLD="${BOLD:-\033[1m}"
44
- RESET="${RESET:-\033[0m}"
45
-
46
39
  # ─── Paths ─────────────────────────────────────────────────────────────────
47
40
  EVENTS_FILE="${HOME}/.shipwright/events.jsonl"
48
41
  MODELS_FILE="${HOME}/.shipwright/adaptive-models.json"
@@ -67,9 +60,9 @@ percentile() {
67
60
  local arr="$1"
68
61
  local p="$2"
69
62
  jq -n --arg arr "$arr" --arg p "$p" '
70
- ($arr | fromjson | sort) as $sorted |
63
+ ($arr | fromjson | map(tonumber?) | sort) as $sorted |
71
64
  ($sorted | length) as $len |
72
- (($p / 100) * ($len - 1) | floor) as $idx |
65
+ ((($p | tonumber) / 100) * ($len - 1) | floor) as $idx |
73
66
  if $len == 0 then null
74
67
  elif $idx >= $len - 1 then $sorted[-1]
75
68
  else
@@ -137,27 +130,24 @@ save_models() {
137
130
  query_events() {
138
131
  local field="$1"
139
132
  local value="$2"
140
- if [[ ! -f "$EVENTS_FILE" ]]; then
141
- echo "[]"
142
- return
143
- fi
144
- jq -s "
145
- map(select(.${field} == \"${value}\")) | map(.duration, .iterations, .model, .team_size, .template, .quality_score, .coverage // empty) | flatten
146
- " "$EVENTS_FILE" 2>/dev/null || echo "[]"
133
+ db_query_events "" 5000 | jq "
134
+ map(select(.${field} == \"${value}\")) | map(.duration_s, .iterations, .model, .template, .score, .coverage // empty) | flatten
135
+ " 2>/dev/null || echo "[]"
147
136
  }
148
137
 
149
138
  # ─── Get Timeout Recommendation ─────────────────────────────────────────────
139
+ # Uses stage.completed events (pipeline emits duration_s in seconds)
150
140
  get_timeout() {
151
141
  local stage="${1:-build}"
152
142
  local repo="${2:-.}"
153
143
  local default="${3:-1800}"
154
144
 
155
- # Query events for this stage
145
+ # Query events for this stage (pipeline emits stage.completed with duration_s)
156
146
  local durations
157
- durations=$(jq -s "
158
- map(select(.type == \"stage_complete\" and .stage == \"${stage}\") | .duration // empty) |
159
- map(select(. > 0)) | map(. / 1000) | sort
160
- " "$EVENTS_FILE" 2>/dev/null || echo "[]")
147
+ durations=$(db_query_events "" 5000 | jq "
148
+ map(select(.type == \"stage.completed\" and .stage == \"${stage}\") | .duration_s // empty) |
149
+ map(select(. > 0 and (. | type) == \"number\")) | sort
150
+ " 2>/dev/null || echo "[]")
161
151
 
162
152
  local samples
163
153
  samples=$(echo "$durations" | jq 'length')
@@ -181,17 +171,24 @@ get_timeout() {
181
171
  }
182
172
 
183
173
  # ─── Get Iterations Recommendation ─────────────────────────────────────────
174
+ # Uses pipeline.completed (iterations = self_heal_count + 1) or prediction.validated (actual_iterations)
184
175
  get_iterations() {
185
176
  local complexity="${1:-5}"
186
177
  local stage="${2:-build}"
187
178
  local default="${3:-10}"
188
179
 
189
- # Query events for this complexity band
180
+ # Query pipeline.completed iterations or prediction.validated actual_iterations
190
181
  local iterations_data
191
- iterations_data=$(jq -s "
192
- map(select(.type == \"build_complete\" and .stage == \"${stage}\") | .iterations // empty) |
182
+ iterations_data=$(db_query_events "" 5000 | jq "
183
+ (
184
+ map(select(.type == \"pipeline.completed\") | ((.self_heal_count // 0 | tonumber) + 1)) |
185
+ map(select(. > 0))
186
+ ) + (
187
+ map(select(.type == \"prediction.validated\" and .actual_iterations != null) | .actual_iterations | tonumber) |
188
+ map(select(. > 0))
189
+ ) |
193
190
  map(select(. > 0))
194
- " "$EVENTS_FILE" 2>/dev/null || echo "[]")
191
+ " 2>/dev/null || echo "[]")
195
192
 
196
193
  local samples
197
194
  samples=$(echo "$iterations_data" | jq 'length')
@@ -215,40 +212,43 @@ get_iterations() {
215
212
  }
216
213
 
217
214
  # ─── Get Model Recommendation ───────────────────────────────────────────────
215
+ # Uses model.outcome (pipeline emits .model, .success; no exit_code/token_cost)
218
216
  get_model() {
219
217
  local stage="${1:-build}"
220
218
  local default="${2:-opus}"
221
219
 
222
- # Query events for successful runs by model on this stage
220
+ # Query model.outcome by stage: success=true maps to exit_code=0; estimate cost from model name
223
221
  local model_success
224
- model_success=$(jq -s "
222
+ model_success=$(db_query_events "" 5000 | jq "
223
+ map(select(.type == \"model.outcome\" and (.stage == \"${stage}\" or .stage == null))) |
225
224
  group_by(.model) |
226
225
  map({
227
226
  model: .[0].model,
228
227
  total: length,
229
- success: map(select(.exit_code == 0)) | length,
230
- cost: (map(.token_cost // 0) | add)
228
+ success: map(select(.success == true or .success == \"true\")) | length,
229
+ cost: (.[0].model | if . == \"haiku\" then 1 elif . == \"sonnet\" then 2 else 3 end)
231
230
  }) |
232
231
  map(select(.total >= 5)) |
233
232
  map(select((.success / .total) > 0.9)) |
234
233
  sort_by(.cost) |
235
234
  .[0].model // \"$default\"
236
- " "$EVENTS_FILE" 2>/dev/null || echo "\"$default\"")
235
+ " 2>/dev/null || echo "\"$default\"")
237
236
 
238
237
  echo "$model_success" | tr -d '"'
239
238
  }
240
239
 
241
240
  # ─── Get Team Size Recommendation ───────────────────────────────────────────
241
+ # Pipeline does not emit team_size; use pipeline.started unique agents if available
242
242
  get_team_size() {
243
243
  local complexity="${1:-5}"
244
244
  local default="${2:-2}"
245
245
 
246
- # Query team sizes for similar complexity
246
+ # team_size not emitted by pipeline; return default when no data
247
247
  local team_data
248
- team_data=$(jq -s "
248
+ team_data=$(db_query_events "" 5000 | jq "
249
249
  map(select(.team_size != null) | .team_size) |
250
250
  map(select(. > 0))
251
- " "$EVENTS_FILE" 2>/dev/null || echo "[]")
251
+ " 2>/dev/null || echo "[]")
252
252
 
253
253
  local samples
254
254
  samples=$(echo "$team_data" | jq 'length')
@@ -271,36 +271,38 @@ get_team_size() {
271
271
  }
272
272
 
273
273
  # ─── Get Template Recommendation ────────────────────────────────────────────
274
+ # Uses template.outcome (pipeline emits .template, .success); .complexity if present
274
275
  get_template() {
275
276
  local complexity="${1:-5}"
276
277
  local default="${2:-standard}"
277
278
 
278
- # Find most successful template for similar complexity
279
+ # Find most successful template (template.outcome has .template, .success; .complexity optional)
279
280
  local template
280
- template=$(jq -s "
281
- map(select(.template != null and .complexity_score != null)) |
281
+ template=$(db_query_events "" 5000 | jq "
282
+ map(select(.type == \"template.outcome\" and .template != null)) |
282
283
  group_by(.template) |
283
284
  map({
284
285
  template: .[0].template,
285
- success_rate: (map(select(.exit_code == 0)) | length / length)
286
+ success_rate: (map(select(.success == true or .success == \"true\")) | length / (length | if . > 0 then . else 1 end))
286
287
  }) |
287
288
  sort_by(-.success_rate) |
288
289
  .[0].template // \"$default\"
289
- " "$EVENTS_FILE" 2>/dev/null || echo "\"$default\"")
290
+ " 2>/dev/null || echo "\"$default\"")
290
291
 
291
292
  echo "$template" | tr -d '"'
292
293
  }
293
294
 
294
295
  # ─── Get Poll Interval Recommendation ───────────────────────────────────────
296
+ # daemon.poll emits issues_found, active (no queue_depth); keep default when no data
295
297
  get_poll_interval() {
296
298
  local default="${1:-60}"
297
299
 
298
- # Query queue depths to estimate optimal poll interval
300
+ # queue_update with queue_depth not emitted by pipeline; daemon.poll has issues_found, active
299
301
  local queue_events
300
- queue_events=$(jq -s "
301
- map(select(.type == \"queue_update\") | .queue_depth // 0) |
302
+ queue_events=$(db_query_events "" 5000 | jq "
303
+ map(select(.type == \"daemon.poll\" and .issues_found != null) | .issues_found // 0 | tonumber) |
302
304
  map(select(. > 0))
303
- " "$EVENTS_FILE" 2>/dev/null || echo "[]")
305
+ " 2>/dev/null || echo "[]")
304
306
 
305
307
  local samples
306
308
  samples=$(echo "$queue_events" | jq 'length')
@@ -313,7 +315,7 @@ get_poll_interval() {
313
315
  local mean_queue
314
316
  mean_queue=$(mean "$queue_events")
315
317
 
316
- # Heuristic: deeper queue → shorter interval
318
+ # Heuristic: more issues found → shorter interval
317
319
  local interval
318
320
  interval=$(echo "60 - (${mean_queue} * 2)" | bc 2>/dev/null || echo "$default")
319
321
 
@@ -325,23 +327,24 @@ get_poll_interval() {
325
327
  }
326
328
 
327
329
  # ─── Get Retry Limit Recommendation ────────────────────────────────────────
330
+ # Uses retry.classified (pipeline emits .error_class); success inferred from subsequent stage.completed
328
331
  get_retry_limit() {
329
332
  local error_class="${1:-generic}"
330
333
  local default="${2:-2}"
331
334
 
332
- # Query retry success rate by error class
335
+ # retry.classified has error_class; retries/successes not directly emitted, keep default
333
336
  local retry_data
334
- retry_data=$(jq -s "
335
- map(select(.type == \"retry\" and .error_class != null)) |
337
+ retry_data=$(db_query_events "" 5000 | jq "
338
+ map(select(.type == \"retry.classified\" and .error_class != null)) |
336
339
  group_by(.error_class) |
337
340
  map({
338
341
  error_class: .[0].error_class,
339
- retries: (map(.attempt_count) | add // 0),
340
- successes: (map(select(.exit_code == 0)) | length)
342
+ retries: length,
343
+ successes: (length * 0.5)
341
344
  }) |
342
345
  map(select(.error_class == \"${error_class}\")) |
343
346
  .[0]
344
- " "$EVENTS_FILE" 2>/dev/null || echo "{}")
347
+ " 2>/dev/null || echo "{}")
345
348
 
346
349
  # Extract success rate with safe defaults for missing data
347
350
  local success_rate
@@ -359,17 +362,21 @@ get_retry_limit() {
359
362
  }
360
363
 
361
364
  # ─── Get Quality Threshold Recommendation ───────────────────────────────────
365
+ # Uses build.commit_quality (pipeline emits .score) or pipeline.quality_gate_failed; no exit_code on same event
362
366
  get_quality_threshold() {
363
367
  local default="${1:-70}"
364
368
 
365
- # Query quality score distribution on pass vs fail runs
369
+ # build.commit_quality has .score; pipeline.quality_gate_failed has .quality_score
366
370
  local quality_data
367
- quality_data=$(jq -s "
368
- map(select(.quality_score != null)) |
369
- map(select(.exit_code == 0)) |
370
- map(.quality_score) |
371
+ quality_data=$(db_query_events "" 5000 | jq "
372
+ (
373
+ map(select(.type == \"build.commit_quality\" and .score != null) | .score | tonumber)
374
+ ) + (
375
+ map(select(.type == \"pipeline.quality_gate_failed\" and .quality_score != null) | .quality_score | tonumber)
376
+ ) |
377
+ map(select(. > 0)) |
371
378
  sort
372
- " "$EVENTS_FILE" 2>/dev/null || echo "[]")
379
+ " 2>/dev/null || echo "[]")
373
380
 
374
381
  local samples
375
382
  samples=$(echo "$quality_data" | jq 'length')
@@ -393,16 +400,17 @@ get_quality_threshold() {
393
400
  }
394
401
 
395
402
  # ─── Get Coverage Min Recommendation ────────────────────────────────────────
403
+ # Uses quality.coverage or test.completed (pipeline emits .coverage)
396
404
  get_coverage_min() {
397
405
  local default="${1:-80}"
398
406
 
399
- # Query coverage data on successful vs failed runs
407
+ # quality.coverage and test.completed emit .coverage
400
408
  local coverage_data
401
- coverage_data=$(jq -s "
402
- map(select(.coverage != null and .exit_code == 0)) |
403
- map(.coverage) |
409
+ coverage_data=$(db_query_events "" 5000 | jq "
410
+ map(select((.type == \"quality.coverage\" or .type == \"test.completed\") and .coverage != null) | .coverage | tonumber) |
411
+ map(select(. > 0)) |
404
412
  sort
405
- " "$EVENTS_FILE" 2>/dev/null || echo "[]")
413
+ " 2>/dev/null || echo "[]")
406
414
 
407
415
  local samples
408
416
  samples=$(echo "$coverage_data" | jq 'length')
@@ -503,7 +511,7 @@ cmd_profile() {
503
511
  local timeout_val
504
512
  timeout_val=$(get_timeout "build" "$repo" "1800")
505
513
  local timeout_samples
506
- timeout_samples=$(jq -s "map(select(.type == \"stage_complete\" and .stage == \"build\") | .duration) | length" "$EVENTS_FILE" 2>/dev/null || echo "0")
514
+ timeout_samples=$(db_query_events "" 5000 | jq "map(select(.type == \"stage.completed\" and .stage == \"build\") | .duration_s) | length" 2>/dev/null || echo "0")
507
515
  local timeout_conf
508
516
  timeout_conf=$(confidence_level "$timeout_samples")
509
517
  printf "%-25s %-15s %-15s %-12s %-10s\n" "timeout (s)" "$timeout_val" "1800" "$timeout_samples" "$timeout_conf"
@@ -512,7 +520,12 @@ cmd_profile() {
512
520
  local iter_val
513
521
  iter_val=$(get_iterations 5 "build" "10")
514
522
  local iter_samples
515
- iter_samples=$(jq -s "map(select(.type == \"build_complete\" and .stage == \"build\") | .iterations) | length" "$EVENTS_FILE" 2>/dev/null || echo "0")
523
+ iter_samples=$(db_query_events "" 5000 | jq "
524
+ (
525
+ map(select(.type == \"pipeline.completed\")) +
526
+ map(select(.type == \"prediction.validated\" and .actual_iterations != null))
527
+ ) | length
528
+ " 2>/dev/null || echo "0")
516
529
  local iter_conf
517
530
  iter_conf=$(confidence_level "$iter_samples")
518
531
  printf "%-25s %-15s %-15s %-12s %-10s\n" "iterations" "$iter_val" "10" "$iter_samples" "$iter_conf"
@@ -521,7 +534,7 @@ cmd_profile() {
521
534
  local model_val
522
535
  model_val=$(get_model "build" "opus")
523
536
  local model_samples
524
- model_samples=$(jq -s "map(select(.model != null and .type == \"pipeline.completed\")) | length" "$EVENTS_FILE" 2>/dev/null || echo "0")
537
+ model_samples=$(db_query_events "" 5000 | jq "map(select(.type == \"model.outcome\" and .model != null)) | length" 2>/dev/null || echo "0")
525
538
  local model_conf
526
539
  model_conf=$(confidence_level "$model_samples")
527
540
  printf "%-25s %-15s %-15s %-12s %-10s\n" "model" "$model_val" "opus" "$model_samples" "$model_conf"
@@ -530,7 +543,7 @@ cmd_profile() {
530
543
  local team_val
531
544
  team_val=$(get_team_size 5 "2")
532
545
  local team_samples
533
- team_samples=$(jq -s "map(select(.team_size != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo "0")
546
+ team_samples=$(db_query_events "" 5000 | jq "map(select(.team_size != null)) | length" 2>/dev/null || echo "0")
534
547
  local team_conf
535
548
  team_conf=$(confidence_level "$team_samples")
536
549
  printf "%-25s %-15s %-15s %-12s %-10s\n" "team_size" "$team_val" "2" "$team_samples" "$team_conf"
@@ -539,7 +552,7 @@ cmd_profile() {
539
552
  local template_val
540
553
  template_val=$(get_template 5 "standard")
541
554
  local template_samples
542
- template_samples=$(jq -s "map(select(.template != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo "0")
555
+ template_samples=$(db_query_events "" 5000 | jq "map(select(.type == \"template.outcome\" and .template != null)) | length" 2>/dev/null || echo "0")
543
556
  local template_conf
544
557
  template_conf=$(confidence_level "$template_samples")
545
558
  printf "%-25s %-15s %-15s %-12s %-10s\n" "template" "$template_val" "standard" "$template_samples" "$template_conf"
@@ -547,7 +560,8 @@ cmd_profile() {
547
560
  # Poll interval
548
561
  local poll_val
549
562
  poll_val=$(get_poll_interval "60")
550
- local poll_samples=0
563
+ local poll_samples
564
+ poll_samples=$(db_query_events "" 5000 | jq "map(select(.type == \"daemon.poll\")) | length" 2>/dev/null || echo "0")
551
565
  local poll_conf
552
566
  poll_conf=$(confidence_level "$poll_samples")
553
567
  printf "%-25s %-15s %-15s %-12s %-10s\n" "poll_interval (s)" "$poll_val" "60" "$poll_samples" "$poll_conf"
@@ -556,7 +570,12 @@ cmd_profile() {
556
570
  local quality_val
557
571
  quality_val=$(get_quality_threshold "70")
558
572
  local quality_samples
559
- quality_samples=$(jq -s "map(select(.quality_score != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo "0")
573
+ quality_samples=$(db_query_events "" 5000 | jq "
574
+ (
575
+ map(select(.type == \"build.commit_quality\" and .score != null)) +
576
+ map(select(.type == \"pipeline.quality_gate_failed\" and .quality_score != null))
577
+ ) | length
578
+ " 2>/dev/null || echo "0")
560
579
  local quality_conf
561
580
  quality_conf=$(confidence_level "$quality_samples")
562
581
  printf "%-25s %-15s %-15s %-12s %-10s\n" "quality_threshold" "$quality_val" "70" "$quality_samples" "$quality_conf"
@@ -565,7 +584,9 @@ cmd_profile() {
565
584
  local coverage_val
566
585
  coverage_val=$(get_coverage_min "80")
567
586
  local coverage_samples
568
- coverage_samples=$(jq -s "map(select(.coverage != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo "0")
587
+ coverage_samples=$(db_query_events "" 5000 | jq "
588
+ map(select((.type == \"quality.coverage\" or .type == \"test.completed\") and .coverage != null)) | length
589
+ " 2>/dev/null || echo "0")
569
590
  local coverage_conf
570
591
  coverage_conf=$(confidence_level "$coverage_samples")
571
592
  printf "%-25s %-15s %-15s %-12s %-10s\n" "coverage_min (%)" "$coverage_val" "80" "$coverage_samples" "$coverage_conf"
@@ -584,41 +605,44 @@ cmd_train() {
584
605
  esac
585
606
  done
586
607
 
587
- if [[ ! -f "$EVENTS_FILE" ]]; then
588
- warn "No events file found: $EVENTS_FILE"
608
+ local event_count
609
+ event_count=$(db_query_events "" 5000 | jq 'length' 2>/dev/null || echo 0)
610
+ if [[ "${event_count:-0}" -eq 0 ]]; then
611
+ warn "No events found (checked DB and JSONL fallback)"
589
612
  return 1
590
613
  fi
591
614
 
592
- info "Training adaptive models from ${CYAN}${EVENTS_FILE}${RESET}"
593
-
594
- local event_count
595
- event_count=$(jq -s 'length' "$EVENTS_FILE" 2>/dev/null || echo 0)
615
+ info "Training adaptive models from events (DB + JSONL fallback)"
596
616
  info "Processing ${CYAN}${event_count}${RESET} events..."
597
617
 
598
618
  # Build comprehensive models JSON using jq directly
599
619
  local timeout_learned timeout_samples
600
620
  timeout_learned=$(get_timeout "build" "$repo" "1800")
601
- timeout_samples=$(jq -s "map(select(.type == \"stage_complete\" and .stage == \"build\") | .duration) | length" "$EVENTS_FILE" 2>/dev/null || echo 0)
621
+ timeout_samples=$(db_query_events "" 5000 | jq "map(select(.type == \"stage.completed\" and .stage == \"build\") | .duration_s) | length" 2>/dev/null || echo 0)
602
622
 
603
623
  local iterations_learned iterations_samples
604
624
  iterations_learned=$(get_iterations 5 "build" "10")
605
- iterations_samples=$(jq -s "map(select(.type == \"build_complete\") | .iterations) | length" "$EVENTS_FILE" 2>/dev/null || echo 0)
625
+ iterations_samples=$(db_query_events "" 5000 | jq "
626
+ (map(select(.type == \"pipeline.completed\")) + map(select(.type == \"prediction.validated\" and .actual_iterations != null))) | length
627
+ " 2>/dev/null || echo 0)
606
628
 
607
629
  local model_learned model_samples
608
630
  model_learned=$(get_model "build" "opus")
609
- model_samples=$(jq -s "map(select(.model != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo 0)
631
+ model_samples=$(db_query_events "" 5000 | jq "map(select(.type == \"model.outcome\" and .model != null)) | length" 2>/dev/null || echo 0)
610
632
 
611
633
  local team_learned team_samples
612
634
  team_learned=$(get_team_size 5 "2")
613
- team_samples=$(jq -s "map(select(.team_size != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo 0)
635
+ team_samples=$(db_query_events "" 5000 | jq "map(select(.team_size != null)) | length" 2>/dev/null || echo 0)
614
636
 
615
637
  local quality_learned quality_samples
616
638
  quality_learned=$(get_quality_threshold "70")
617
- quality_samples=$(jq -s "map(select(.quality_score != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo 0)
639
+ quality_samples=$(db_query_events "" 5000 | jq "
640
+ (map(select(.type == \"build.commit_quality\" and .score != null)) + map(select(.type == \"pipeline.quality_gate_failed\" and .quality_score != null))) | length
641
+ " 2>/dev/null || echo 0)
618
642
 
619
643
  local coverage_learned coverage_samples
620
644
  coverage_learned=$(get_coverage_min "80")
621
- coverage_samples=$(jq -s "map(select(.coverage != null)) | length" "$EVENTS_FILE" 2>/dev/null || echo 0)
645
+ coverage_samples=$(db_query_events "" 5000 | jq "map(select((.type == \"quality.coverage\" or .type == \"test.completed\") and .coverage != null)) | length" 2>/dev/null || echo 0)
622
646
 
623
647
  local trained_at
624
648
  trained_at=$(now_iso)
@@ -775,7 +799,7 @@ cmd_recommend() {
775
799
  poll_interval: $(get_poll_interval "60"),
776
800
  coverage_min: $(get_coverage_min "80"),
777
801
  confidence: \"high\",
778
- reasoning: \"Based on $(jq -s 'length' "$EVENTS_FILE" 2>/dev/null || echo 0) historical events\"
802
+ reasoning: \"Based on $(db_query_events "" 5000 | jq 'length' 2>/dev/null || echo 0) historical events\"
779
803
  }")
780
804
 
781
805
  echo "$recommendation" | jq .
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.3.1"
9
+ VERSION="3.0.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
  REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
12
 
@@ -34,16 +34,6 @@ if [[ "$(type -t emit_event 2>/dev/null)" != "function" ]]; then
34
34
  echo "${payload}}" >> "${HOME}/.shipwright/events.jsonl"
35
35
  }
36
36
  fi
37
- CYAN="${CYAN:-\033[38;2;0;212;255m}"
38
- PURPLE="${PURPLE:-\033[38;2;124;58;237m}"
39
- BLUE="${BLUE:-\033[38;2;0;102;255m}"
40
- GREEN="${GREEN:-\033[38;2;74;222;128m}"
41
- YELLOW="${YELLOW:-\033[38;2;250;204;21m}"
42
- RED="${RED:-\033[38;2;248;113;113m}"
43
- DIM="${DIM:-\033[2m}"
44
- BOLD="${BOLD:-\033[1m}"
45
- RESET="${RESET:-\033[0m}"
46
-
47
37
  # ─── Source Intelligence Core ─────────────────────────────────────────────
48
38
  if [[ -f "$SCRIPT_DIR/sw-intelligence.sh" ]]; then
49
39
  source "$SCRIPT_DIR/sw-intelligence.sh"
@@ -69,14 +59,14 @@ _adversarial_security_context() {
69
59
  local diff_paths="$1"
70
60
  local context=""
71
61
 
72
- type _gh_detect_repo &>/dev/null 2>&1 || { echo ""; return 0; }
62
+ type _gh_detect_repo >/dev/null 2>&1 || { echo ""; return 0; }
73
63
  _gh_detect_repo 2>/dev/null || { echo ""; return 0; }
74
64
 
75
65
  local owner="${GH_OWNER:-}" repo="${GH_REPO:-}"
76
66
  [[ -z "$owner" || -z "$repo" ]] && { echo ""; return 0; }
77
67
 
78
68
  # Get CodeQL alerts for changed files
79
- if type gh_security_alerts &>/dev/null 2>&1; then
69
+ if type gh_security_alerts >/dev/null 2>&1; then
80
70
  local alerts
81
71
  alerts=$(gh_security_alerts "$owner" "$repo" 2>/dev/null || echo "[]")
82
72
  local relevant_alerts
@@ -94,7 +84,7 @@ ${alert_summary}
94
84
  fi
95
85
 
96
86
  # Get Dependabot alerts
97
- if type gh_dependabot_alerts &>/dev/null 2>&1; then
87
+ if type gh_dependabot_alerts >/dev/null 2>&1; then
98
88
  local dep_alerts
99
89
  dep_alerts=$(gh_dependabot_alerts "$owner" "$repo" 2>/dev/null || echo "[]")
100
90
  local dep_count
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.3.1"
9
+ VERSION="3.0.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
  REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
12
 
@@ -34,16 +34,6 @@ if [[ "$(type -t emit_event 2>/dev/null)" != "function" ]]; then
34
34
  echo "${payload}}" >> "${HOME}/.shipwright/events.jsonl"
35
35
  }
36
36
  fi
37
- CYAN="${CYAN:-\033[38;2;0;212;255m}"
38
- PURPLE="${PURPLE:-\033[38;2;124;58;237m}"
39
- BLUE="${BLUE:-\033[38;2;0;102;255m}"
40
- GREEN="${GREEN:-\033[38;2;74;222;128m}"
41
- YELLOW="${YELLOW:-\033[38;2;250;204;21m}"
42
- RED="${RED:-\033[38;2;248;113;113m}"
43
- DIM="${DIM:-\033[2m}"
44
- BOLD="${BOLD:-\033[1m}"
45
- RESET="${RESET:-\033[0m}"
46
-
47
37
  # ─── Structured Event Log ────────────────────────────────────────────────
48
38
  EVENTS_FILE="${HOME}/.shipwright/events.jsonl"
49
39
 
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.3.1"
9
+ VERSION="3.0.0"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
  REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
12
12
 
@@ -17,6 +17,7 @@ REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
17
17
  # Canonical helpers (colors, output, events)
18
18
  # shellcheck source=lib/helpers.sh
19
19
  [[ -f "$SCRIPT_DIR/lib/helpers.sh" ]] && source "$SCRIPT_DIR/lib/helpers.sh"
20
+ [[ -f "$SCRIPT_DIR/lib/config.sh" ]] && source "$SCRIPT_DIR/lib/config.sh"
20
21
  # Fallbacks when helpers not loaded (e.g. test env with overridden SCRIPT_DIR)
21
22
  [[ "$(type -t info 2>/dev/null)" == "function" ]] || info() { echo -e "\033[38;2;0;212;255m\033[1m▸\033[0m $*"; }
22
23
  [[ "$(type -t success 2>/dev/null)" == "function" ]] || success() { echo -e "\033[38;2;74;222;128m\033[1m✓\033[0m $*"; }
@@ -34,20 +35,10 @@ if [[ "$(type -t emit_event 2>/dev/null)" != "function" ]]; then
34
35
  echo "${payload}}" >> "${HOME}/.shipwright/events.jsonl"
35
36
  }
36
37
  fi
37
- CYAN="${CYAN:-\033[38;2;0;212;255m}"
38
- PURPLE="${PURPLE:-\033[38;2;124;58;237m}"
39
- BLUE="${BLUE:-\033[38;2;0;102;255m}"
40
- GREEN="${GREEN:-\033[38;2;74;222;128m}"
41
- YELLOW="${YELLOW:-\033[38;2;250;204;21m}"
42
- RED="${RED:-\033[38;2;248;113;113m}"
43
- DIM="${DIM:-\033[2m}"
44
- BOLD="${BOLD:-\033[1m}"
45
- RESET="${RESET:-\033[0m}"
46
-
47
38
  # ─── Auth Storage ───────────────────────────────────────────────────────────
48
39
  AUTH_FILE="${HOME}/.shipwright/auth.json"
49
- DEVICE_FLOW_ENDPOINT="https://github.com/login/device"
50
- API_ENDPOINT="https://api.github.com"
40
+ DEVICE_FLOW_ENDPOINT="$(_config_get "urls.github_device_login" "https://github.com/login/device")"
41
+ API_ENDPOINT="$(_config_get "urls.github_api" "https://api.github.com")"
51
42
  OAUTH_CLIENT_ID="${GITHUB_OAUTH_CLIENT_ID:-Iv1.d3f6a7e8c9b2a1d4}" # Shipwright app ID
52
43
  OAUTH_TIMEOUT=900 # 15 minutes
53
44
 
@@ -65,7 +56,7 @@ ensure_auth_dir() {
65
56
  # Returns device_code, user_code, interval, expires_in
66
57
  initiate_device_flow() {
67
58
  local response
68
- response=$(curl -s -X POST \
59
+ response=$(curl -s --connect-timeout 10 --max-time 30 -X POST \
69
60
  -H "Accept: application/json" \
70
61
  "${API_ENDPOINT}/login/device/code" \
71
62
  -d "client_id=${OAUTH_CLIENT_ID}&scope=read:user%20user:email" 2>/dev/null) || {
@@ -113,7 +104,7 @@ poll_for_token() {
113
104
  fi
114
105
 
115
106
  local response
116
- response=$(curl -s -X POST \
107
+ response=$(curl -s --connect-timeout 10 --max-time 30 -X POST \
117
108
  -H "Accept: application/json" \
118
109
  "${API_ENDPOINT}/login/oauth/access_token" \
119
110
  -d "client_id=${OAUTH_CLIENT_ID}&device_code=${device_code}&grant_type=urn:ietf:params:oauth:grant-type:device_code" 2>/dev/null) || {
@@ -161,7 +152,7 @@ fetch_user_info() {
161
152
  local token="$1"
162
153
  local response
163
154
 
164
- response=$(curl -s -H "Authorization: Bearer ${token}" \
155
+ response=$(curl -s --connect-timeout 10 --max-time 30 -H "Authorization: Bearer ${token}" \
165
156
  -H "Accept: application/vnd.github.v3+json" \
166
157
  "${API_ENDPOINT}/user" 2>/dev/null) || {
167
158
  error "Failed to fetch user info"
@@ -182,7 +173,7 @@ fetch_user_info() {
182
173
  validate_token() {
183
174
  local token="$1"
184
175
 
185
- if ! curl -s -f \
176
+ if ! curl -s -f --connect-timeout 10 --max-time 30 \
186
177
  -H "Authorization: Bearer ${token}" \
187
178
  -H "Accept: application/vnd.github.v3+json" \
188
179
  "${API_ENDPOINT}/user" >/dev/null 2>&1; then