shipwright-cli 2.4.0 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. package/README.md +16 -11
  2. package/completions/_shipwright +248 -94
  3. package/completions/shipwright.bash +68 -19
  4. package/completions/shipwright.fish +310 -42
  5. package/config/decision-tiers.json +55 -0
  6. package/config/defaults.json +111 -0
  7. package/config/event-schema.json +218 -0
  8. package/config/policy.json +21 -18
  9. package/dashboard/coverage/coverage-summary.json +14 -0
  10. package/dashboard/public/index.html +1 -1
  11. package/dashboard/server.ts +306 -17
  12. package/dashboard/src/components/charts/bar.test.ts +79 -0
  13. package/dashboard/src/components/charts/donut.test.ts +68 -0
  14. package/dashboard/src/components/charts/pipeline-rail.test.ts +117 -0
  15. package/dashboard/src/components/charts/sparkline.test.ts +125 -0
  16. package/dashboard/src/core/api.test.ts +309 -0
  17. package/dashboard/src/core/helpers.test.ts +301 -0
  18. package/dashboard/src/core/router.test.ts +307 -0
  19. package/dashboard/src/core/router.ts +7 -0
  20. package/dashboard/src/core/sse.test.ts +144 -0
  21. package/dashboard/src/views/metrics.test.ts +186 -0
  22. package/dashboard/src/views/overview.test.ts +173 -0
  23. package/dashboard/src/views/pipelines.test.ts +183 -0
  24. package/dashboard/src/views/team.test.ts +253 -0
  25. package/dashboard/vitest.config.ts +14 -5
  26. package/docs/TIPS.md +1 -1
  27. package/docs/patterns/README.md +1 -1
  28. package/package.json +7 -9
  29. package/scripts/adapters/docker-deploy.sh +1 -1
  30. package/scripts/adapters/tmux-adapter.sh +11 -1
  31. package/scripts/adapters/wezterm-adapter.sh +1 -1
  32. package/scripts/check-version-consistency.sh +1 -1
  33. package/scripts/lib/architecture.sh +127 -0
  34. package/scripts/lib/bootstrap.sh +75 -0
  35. package/scripts/lib/compat.sh +89 -6
  36. package/scripts/lib/config.sh +91 -0
  37. package/scripts/lib/daemon-adaptive.sh +3 -3
  38. package/scripts/lib/daemon-dispatch.sh +63 -17
  39. package/scripts/lib/daemon-failure.sh +0 -0
  40. package/scripts/lib/daemon-health.sh +1 -1
  41. package/scripts/lib/daemon-patrol.sh +64 -17
  42. package/scripts/lib/daemon-poll.sh +54 -25
  43. package/scripts/lib/daemon-state.sh +125 -23
  44. package/scripts/lib/daemon-triage.sh +31 -9
  45. package/scripts/lib/decide-autonomy.sh +295 -0
  46. package/scripts/lib/decide-scoring.sh +228 -0
  47. package/scripts/lib/decide-signals.sh +462 -0
  48. package/scripts/lib/fleet-failover.sh +63 -0
  49. package/scripts/lib/helpers.sh +29 -6
  50. package/scripts/lib/pipeline-detection.sh +2 -2
  51. package/scripts/lib/pipeline-github.sh +9 -9
  52. package/scripts/lib/pipeline-intelligence.sh +105 -38
  53. package/scripts/lib/pipeline-quality-checks.sh +17 -16
  54. package/scripts/lib/pipeline-quality.sh +1 -1
  55. package/scripts/lib/pipeline-stages.sh +440 -59
  56. package/scripts/lib/pipeline-state.sh +54 -4
  57. package/scripts/lib/policy.sh +0 -0
  58. package/scripts/lib/test-helpers.sh +247 -0
  59. package/scripts/postinstall.mjs +78 -12
  60. package/scripts/signals/example-collector.sh +36 -0
  61. package/scripts/sw +17 -7
  62. package/scripts/sw-activity.sh +1 -11
  63. package/scripts/sw-adaptive.sh +109 -85
  64. package/scripts/sw-adversarial.sh +4 -14
  65. package/scripts/sw-architecture-enforcer.sh +1 -11
  66. package/scripts/sw-auth.sh +8 -17
  67. package/scripts/sw-autonomous.sh +111 -49
  68. package/scripts/sw-changelog.sh +1 -11
  69. package/scripts/sw-checkpoint.sh +144 -20
  70. package/scripts/sw-ci.sh +2 -12
  71. package/scripts/sw-cleanup.sh +13 -17
  72. package/scripts/sw-code-review.sh +16 -36
  73. package/scripts/sw-connect.sh +5 -12
  74. package/scripts/sw-context.sh +9 -26
  75. package/scripts/sw-cost.sh +17 -18
  76. package/scripts/sw-daemon.sh +76 -71
  77. package/scripts/sw-dashboard.sh +57 -17
  78. package/scripts/sw-db.sh +524 -26
  79. package/scripts/sw-decide.sh +685 -0
  80. package/scripts/sw-decompose.sh +1 -11
  81. package/scripts/sw-deps.sh +15 -25
  82. package/scripts/sw-developer-simulation.sh +1 -11
  83. package/scripts/sw-discovery.sh +138 -30
  84. package/scripts/sw-doc-fleet.sh +7 -17
  85. package/scripts/sw-docs-agent.sh +6 -16
  86. package/scripts/sw-docs.sh +4 -12
  87. package/scripts/sw-doctor.sh +134 -43
  88. package/scripts/sw-dora.sh +11 -19
  89. package/scripts/sw-durable.sh +35 -52
  90. package/scripts/sw-e2e-orchestrator.sh +11 -27
  91. package/scripts/sw-eventbus.sh +115 -115
  92. package/scripts/sw-evidence.sh +114 -30
  93. package/scripts/sw-feedback.sh +3 -13
  94. package/scripts/sw-fix.sh +2 -20
  95. package/scripts/sw-fleet-discover.sh +1 -11
  96. package/scripts/sw-fleet-viz.sh +10 -18
  97. package/scripts/sw-fleet.sh +13 -17
  98. package/scripts/sw-github-app.sh +6 -16
  99. package/scripts/sw-github-checks.sh +1 -11
  100. package/scripts/sw-github-deploy.sh +1 -11
  101. package/scripts/sw-github-graphql.sh +2 -12
  102. package/scripts/sw-guild.sh +1 -11
  103. package/scripts/sw-heartbeat.sh +49 -12
  104. package/scripts/sw-hygiene.sh +45 -43
  105. package/scripts/sw-incident.sh +48 -74
  106. package/scripts/sw-init.sh +35 -37
  107. package/scripts/sw-instrument.sh +1 -11
  108. package/scripts/sw-intelligence.sh +368 -53
  109. package/scripts/sw-jira.sh +5 -14
  110. package/scripts/sw-launchd.sh +2 -12
  111. package/scripts/sw-linear.sh +8 -17
  112. package/scripts/sw-logs.sh +4 -12
  113. package/scripts/sw-loop.sh +905 -104
  114. package/scripts/sw-memory.sh +263 -20
  115. package/scripts/sw-mission-control.sh +2 -12
  116. package/scripts/sw-model-router.sh +73 -34
  117. package/scripts/sw-otel.sh +15 -23
  118. package/scripts/sw-oversight.sh +1 -11
  119. package/scripts/sw-patrol-meta.sh +5 -11
  120. package/scripts/sw-pipeline-composer.sh +7 -17
  121. package/scripts/sw-pipeline-vitals.sh +1 -11
  122. package/scripts/sw-pipeline.sh +550 -122
  123. package/scripts/sw-pm.sh +2 -12
  124. package/scripts/sw-pr-lifecycle.sh +33 -28
  125. package/scripts/sw-predictive.sh +16 -22
  126. package/scripts/sw-prep.sh +6 -16
  127. package/scripts/sw-ps.sh +1 -11
  128. package/scripts/sw-public-dashboard.sh +2 -12
  129. package/scripts/sw-quality.sh +85 -14
  130. package/scripts/sw-reaper.sh +1 -11
  131. package/scripts/sw-recruit.sh +15 -25
  132. package/scripts/sw-regression.sh +11 -21
  133. package/scripts/sw-release-manager.sh +19 -28
  134. package/scripts/sw-release.sh +8 -16
  135. package/scripts/sw-remote.sh +1 -11
  136. package/scripts/sw-replay.sh +48 -44
  137. package/scripts/sw-retro.sh +70 -92
  138. package/scripts/sw-review-rerun.sh +1 -1
  139. package/scripts/sw-scale.sh +174 -41
  140. package/scripts/sw-security-audit.sh +12 -22
  141. package/scripts/sw-self-optimize.sh +239 -23
  142. package/scripts/sw-session.sh +5 -15
  143. package/scripts/sw-setup.sh +8 -18
  144. package/scripts/sw-standup.sh +5 -15
  145. package/scripts/sw-status.sh +32 -23
  146. package/scripts/sw-strategic.sh +129 -13
  147. package/scripts/sw-stream.sh +1 -11
  148. package/scripts/sw-swarm.sh +76 -36
  149. package/scripts/sw-team-stages.sh +10 -20
  150. package/scripts/sw-templates.sh +4 -14
  151. package/scripts/sw-testgen.sh +3 -13
  152. package/scripts/sw-tmux-pipeline.sh +1 -19
  153. package/scripts/sw-tmux-role-color.sh +0 -10
  154. package/scripts/sw-tmux-status.sh +3 -11
  155. package/scripts/sw-tmux.sh +2 -20
  156. package/scripts/sw-trace.sh +1 -19
  157. package/scripts/sw-tracker-github.sh +0 -10
  158. package/scripts/sw-tracker-jira.sh +1 -11
  159. package/scripts/sw-tracker-linear.sh +1 -11
  160. package/scripts/sw-tracker.sh +7 -24
  161. package/scripts/sw-triage.sh +29 -39
  162. package/scripts/sw-upgrade.sh +5 -23
  163. package/scripts/sw-ux.sh +1 -19
  164. package/scripts/sw-webhook.sh +18 -32
  165. package/scripts/sw-widgets.sh +3 -21
  166. package/scripts/sw-worktree.sh +11 -27
  167. package/scripts/update-homebrew-sha.sh +73 -0
  168. package/templates/pipelines/tdd.json +72 -0
  169. package/scripts/sw-pipeline.sh.mock +0 -7
@@ -9,11 +9,14 @@
9
9
  #
10
10
  # Provides:
11
11
  # - NO_COLOR / dumb terminal / non-tty detection (auto-blanks color vars)
12
+ # - _to_lower() / _to_upper() — bash 3.2 compat (${var,,}/${var^^} require bash 4+)
13
+ # - file_mtime() — cross-platform file modification time (epoch)
12
14
  # - sed_i() — cross-platform sed in-place editing
13
15
  # - open_url() — cross-platform browser open
14
16
  # - tmp_dir() — returns best temp directory for platform
15
17
  # - is_wsl() — detect WSL environment
16
18
  # - is_macos() / is_linux() — platform checks
19
+ # - _timeout() — run command with timeout (timeout/gtimeout or no-op on macOS)
17
20
 
18
21
  # ─── NO_COLOR support (https://no-color.org/) ─────────────────────────────
19
22
  # Blanks standard color variables when:
@@ -30,6 +33,11 @@ _COMPAT_UNAME="${_COMPAT_UNAME:-$(uname -s 2>/dev/null || echo "Unknown")}"
30
33
 
31
34
  is_macos() { [[ "$_COMPAT_UNAME" == "Darwin" ]]; }
32
35
  is_linux() { [[ "$_COMPAT_UNAME" == "Linux" ]]; }
36
+
37
+ # ─── Bash 3.2 compat (macOS ships bash 3.2) ───────────────────────────────
38
+ # Case conversion: ${var,,} and ${var^^} require bash 4+. Use these instead:
39
+ _to_lower() { echo "$1" | tr '[:upper:]' '[:lower:]'; }
40
+ _to_upper() { echo "$1" | tr '[:lower:]' '[:upper:]'; }
33
41
  is_wsl() { is_linux && [[ -n "${WSL_DISTRO_NAME:-}" || -f /proc/version ]] && grep -qi microsoft /proc/version 2>/dev/null; }
34
42
 
35
43
  # ─── sed -i (macOS vs GNU) ────────────────────────────────────────────────
@@ -49,14 +57,14 @@ open_url() {
49
57
  open "$url"
50
58
  elif is_wsl; then
51
59
  # WSL: use wslview (from wslu) or powershell
52
- if command -v wslview &>/dev/null; then
60
+ if command -v wslview >/dev/null 2>&1; then
53
61
  wslview "$url"
54
- elif command -v powershell.exe &>/dev/null; then
62
+ elif command -v powershell.exe >/dev/null 2>&1; then
55
63
  powershell.exe -Command "Start-Process '$url'" 2>/dev/null
56
64
  else
57
65
  return 1
58
66
  fi
59
- elif command -v xdg-open &>/dev/null; then
67
+ elif command -v xdg-open >/dev/null 2>&1; then
60
68
  xdg-open "$url"
61
69
  else
62
70
  return 1
@@ -83,7 +91,7 @@ sw_valid_error_category() {
83
91
  local category="${1:-}"
84
92
  local custom_file="$HOME/.shipwright/optimization/error-taxonomy.json"
85
93
  # Check custom taxonomy first
86
- if [[ -f "$custom_file" ]] && command -v jq &>/dev/null; then
94
+ if [[ -f "$custom_file" ]] && command -v jq >/dev/null 2>&1; then
87
95
  local custom_cats
88
96
  custom_cats=$(jq -r '.categories[]? // empty' "$custom_file" 2>/dev/null || true)
89
97
  if [[ -n "$custom_cats" ]]; then
@@ -113,7 +121,7 @@ complexity_bucket() {
113
121
  local config_file="$HOME/.shipwright/optimization/complexity-clusters.json"
114
122
  local low_boundary=3
115
123
  local high_boundary=6
116
- if [[ -f "$config_file" ]] && command -v jq &>/dev/null; then
124
+ if [[ -f "$config_file" ]] && command -v jq >/dev/null 2>&1; then
117
125
  local lb hb
118
126
  lb=$(jq -r '.low_boundary // 3' "$config_file" 2>/dev/null || echo "3")
119
127
  hb=$(jq -r '.high_boundary // 6' "$config_file" 2>/dev/null || echo "6")
@@ -156,7 +164,7 @@ detect_primary_language() {
156
164
 
157
165
  detect_test_framework() {
158
166
  local dir="${1:-.}"
159
- if [[ -f "$dir/package.json" ]] && command -v jq &>/dev/null; then
167
+ if [[ -f "$dir/package.json" ]] && command -v jq >/dev/null 2>&1; then
160
168
  local runner
161
169
  runner=$(jq -r '
162
170
  if .devDependencies.vitest then "vitest"
@@ -184,6 +192,81 @@ detect_test_framework() {
184
192
  fi
185
193
  }
186
194
 
195
+ # ─── Cross-platform file modification time (epoch) ────────────────────────
196
+ # macOS/BSD: stat -f %m; Linux: stat -c '%Y'
197
+ file_mtime() {
198
+ local file="$1"
199
+ stat -f %m "$file" 2>/dev/null || stat -c '%Y' "$file" 2>/dev/null || echo "0"
200
+ }
201
+
202
+ # ─── Timeout command (macOS may lack timeout; gtimeout from coreutils) ─────
203
+ # Usage: _timeout <seconds> <command> [args...]
204
+ _timeout() {
205
+ local secs="$1"
206
+ shift
207
+ if command -v timeout >/dev/null 2>&1; then
208
+ timeout "$secs" "$@"
209
+ elif command -v gtimeout >/dev/null 2>&1; then
210
+ gtimeout "$secs" "$@"
211
+ else
212
+ # Fallback: run without timeout (e.g. on older macOS)
213
+ "$@"
214
+ fi
215
+ }
216
+
217
+ # ─── Cross-platform date helpers (GNU date -d vs BSD date -j/-v) ──────────
218
+ # date_to_epoch: convert date string to Unix epoch
219
+ # date_days_ago: YYYY-MM-DD for N days ago
220
+ # date_add_days: YYYY-MM-DD for base_date + N days
221
+ # epoch_to_iso: convert epoch to ISO 8601
222
+ date_to_epoch() {
223
+ local datestr="$1"
224
+ local fmt=""
225
+ if [[ "$datestr" == *"T"* ]]; then
226
+ fmt="%Y-%m-%dT%H:%M:%SZ"
227
+ else
228
+ fmt="%Y-%m-%d"
229
+ fi
230
+ if date -u -d "$datestr" +%s 2>/dev/null; then
231
+ return
232
+ fi
233
+ # BSD date: -j = don't set date, -f = format
234
+ date -u -j -f "$fmt" "$datestr" +%s 2>/dev/null || echo "0"
235
+ }
236
+
237
+ date_days_ago() {
238
+ local days="$1"
239
+ if date -u -d "$days days ago" +%Y-%m-%d 2>/dev/null; then
240
+ return
241
+ fi
242
+ date -u -v-${days}d +%Y-%m-%d 2>/dev/null || echo "1970-01-01"
243
+ }
244
+
245
+ date_add_days() {
246
+ local base_date="$1"
247
+ local days="$2"
248
+ if date -u -d "${base_date} + ${days} days" +%Y-%m-%d 2>/dev/null; then
249
+ return
250
+ fi
251
+ # BSD: compute via epoch arithmetic
252
+ local base_epoch
253
+ base_epoch=$(date_to_epoch "$base_date")
254
+ if [[ -n "$base_epoch" && "$base_epoch" != "0" ]]; then
255
+ local result_epoch=$((base_epoch + (days * 86400)))
256
+ date -u -r "$result_epoch" +%Y-%m-%d 2>/dev/null || date -u -d "@$result_epoch" +%Y-%m-%d 2>/dev/null || echo "1970-01-01"
257
+ else
258
+ echo "1970-01-01"
259
+ fi
260
+ }
261
+
262
+ epoch_to_iso() {
263
+ local epoch="$1"
264
+ date -u -r "$epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
265
+ date -u -d "@$epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
266
+ python3 -c "import datetime; print(datetime.datetime.utcfromtimestamp($epoch).strftime('%Y-%m-%dT%H:%M:%SZ'))" 2>/dev/null || \
267
+ echo "1970-01-01T00:00:00Z"
268
+ }
269
+
187
270
  # ─── Cross-platform MD5 ──────────────────────────────────────────────────
188
271
  # Usage:
189
272
  # compute_md5 --string "some text" → md5 hash of string
@@ -0,0 +1,91 @@
1
+ #!/usr/bin/env bash
2
+ # config.sh — Centralized configuration reader for Shipwright
3
+ # Precedence: SHIPWRIGHT_* env var > daemon-config.json > policy.json > defaults.json
4
+ # Usage: source "$SCRIPT_DIR/lib/config.sh"
5
+ # val=$(_config_get "daemon.poll_interval")
6
+ [[ -n "${_SW_CONFIG_LOADED:-}" ]] && return 0
7
+ _SW_CONFIG_LOADED=1
8
+
9
+ _CONFIG_SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
10
+ _CONFIG_REPO_DIR="$(cd "$_CONFIG_SCRIPT_DIR/../.." 2>/dev/null && pwd || echo "")"
11
+
12
+ _DEFAULTS_FILE="${_CONFIG_REPO_DIR}/config/defaults.json"
13
+ _POLICY_FILE="${_CONFIG_REPO_DIR}/config/policy.json"
14
+ _DAEMON_CONFIG_FILE=".claude/daemon-config.json"
15
+
16
+ # Resolve daemon config relative to git root or cwd
17
+ if [[ ! -f "$_DAEMON_CONFIG_FILE" ]]; then
18
+ local_root="$(git rev-parse --show-toplevel 2>/dev/null || echo ".")"
19
+ _DAEMON_CONFIG_FILE="${local_root}/.claude/daemon-config.json"
20
+ fi
21
+
22
+ # _config_get "section.key" [default]
23
+ # Reads config with full precedence chain
24
+ _config_get() {
25
+ local dotpath="$1"
26
+ local fallback="${2:-}"
27
+
28
+ # 1. Check env var: daemon.poll_interval -> SHIPWRIGHT_DAEMON_POLL_INTERVAL
29
+ local env_name="SHIPWRIGHT_$(echo "$dotpath" | tr '[:lower:].' '[:upper:]_')"
30
+ local env_val="${!env_name:-}"
31
+ if [[ -n "$env_val" ]]; then
32
+ echo "$env_val"
33
+ return 0
34
+ fi
35
+
36
+ # Convert dotpath to jq path: "daemon.poll_interval" -> ".daemon.poll_interval"
37
+ local jq_path=".${dotpath}"
38
+
39
+ # 2. Check daemon-config.json
40
+ if [[ -f "$_DAEMON_CONFIG_FILE" ]]; then
41
+ local val
42
+ val=$(jq -r "${jq_path} // \"\"" "$_DAEMON_CONFIG_FILE" 2>/dev/null || echo "")
43
+ if [[ -n "$val" && "$val" != "null" ]]; then
44
+ echo "$val"
45
+ return 0
46
+ fi
47
+ fi
48
+
49
+ # 3. Check policy.json
50
+ if [[ -f "$_POLICY_FILE" ]]; then
51
+ local val
52
+ val=$(jq -r "${jq_path} // \"\"" "$_POLICY_FILE" 2>/dev/null || echo "")
53
+ if [[ -n "$val" && "$val" != "null" ]]; then
54
+ echo "$val"
55
+ return 0
56
+ fi
57
+ fi
58
+
59
+ # 4. Check defaults.json
60
+ if [[ -f "$_DEFAULTS_FILE" ]]; then
61
+ local val
62
+ val=$(jq -r "${jq_path} // \"\"" "$_DEFAULTS_FILE" 2>/dev/null || echo "")
63
+ if [[ -n "$val" && "$val" != "null" ]]; then
64
+ echo "$val"
65
+ return 0
66
+ fi
67
+ fi
68
+
69
+ # 5. Return fallback
70
+ echo "$fallback"
71
+ }
72
+
73
+ # _config_get_int "section.key" [default]
74
+ # Same as _config_get but ensures integer output
75
+ _config_get_int() {
76
+ local val
77
+ val=$(_config_get "$1" "${2:-0}")
78
+ # Strip non-numeric
79
+ echo "${val//[!0-9-]/}"
80
+ }
81
+
82
+ # _config_get_bool "section.key" [default]
83
+ # Returns 0 (true) or 1 (false) for use in conditionals
84
+ _config_get_bool() {
85
+ local val
86
+ val=$(_config_get "$1" "${2:-false}")
87
+ case "$val" in
88
+ true|1|yes|on) return 0 ;;
89
+ *) return 1 ;;
90
+ esac
91
+ }
@@ -85,9 +85,9 @@ get_adaptive_heartbeat_timeout() {
85
85
 
86
86
  # Stage-specific defaults (daemon-health.sh when sourced, else policy_get, else literal)
87
87
  local default_timeout="${HEALTH_HEARTBEAT_TIMEOUT:-120}"
88
- if type daemon_health_timeout_for_stage &>/dev/null 2>&1; then
88
+ if type daemon_health_timeout_for_stage >/dev/null 2>&1; then
89
89
  default_timeout=$(daemon_health_timeout_for_stage "$stage" "$default_timeout")
90
- elif type policy_get &>/dev/null 2>&1; then
90
+ elif type policy_get >/dev/null 2>&1; then
91
91
  local policy_stage
92
92
  policy_stage=$(policy_get ".daemon.stage_timeouts.$stage" "")
93
93
  [[ -n "$policy_stage" && "$policy_stage" =~ ^[0-9]+$ ]] && default_timeout="$policy_stage"
@@ -385,7 +385,7 @@ daemon_assess_progress() {
385
385
  ' "$progress_file" > "$tmp_progress" 2>/dev/null && mv "$tmp_progress" "$progress_file"
386
386
 
387
387
  # ── Vitals-based verdict (preferred over static thresholds) ──
388
- if type pipeline_compute_vitals &>/dev/null 2>&1 && type pipeline_health_verdict &>/dev/null 2>&1; then
388
+ if type pipeline_compute_vitals >/dev/null 2>&1 && type pipeline_health_verdict >/dev/null 2>&1; then
389
389
  # Compute vitals using the worktree's pipeline state if available
390
390
  local _worktree_state=""
391
391
  local _worktree_artifacts=""
@@ -37,6 +37,22 @@ daemon_spawn_pipeline() {
37
37
 
38
38
  daemon_log INFO "Spawning pipeline for issue #${issue_num}: ${issue_title}"
39
39
 
40
+ # ── Budget gate: hard-stop if daily budget exhausted ──
41
+ if [[ -x "${SCRIPT_DIR}/sw-cost.sh" ]]; then
42
+ local remaining
43
+ remaining=$("${SCRIPT_DIR}/sw-cost.sh" remaining-budget 2>/dev/null || echo "")
44
+ if [[ -n "$remaining" && "$remaining" != "unlimited" ]]; then
45
+ if awk -v r="$remaining" 'BEGIN { exit !(r <= 0) }' 2>/dev/null; then
46
+ daemon_log WARN "Budget exhausted (remaining: \$${remaining}) — skipping issue #${issue_num}"
47
+ emit_event "daemon.budget_exhausted" "remaining=$remaining" "issue=$issue_num"
48
+ return 1
49
+ fi
50
+ if awk -v r="$remaining" 'BEGIN { exit !(r < 1.0) }' 2>/dev/null; then
51
+ daemon_log WARN "Budget low: \$${remaining} remaining"
52
+ fi
53
+ fi
54
+ fi
55
+
40
56
  # ── Issue decomposition (if decomposer available) ──
41
57
  local decompose_script="${SCRIPT_DIR}/sw-decompose.sh"
42
58
  if [[ -x "$decompose_script" && "$NO_GITHUB" != "true" ]]; then
@@ -45,7 +61,7 @@ daemon_spawn_pipeline() {
45
61
  if [[ "$decompose_result" == *"decomposed"* ]]; then
46
62
  daemon_log INFO "Issue #${issue_num} decomposed into subtasks — skipping pipeline"
47
63
  # Remove the shipwright label so decomposed parent doesn't re-queue
48
- gh issue edit "$issue_num" --remove-label "shipwright" 2>/dev/null || true
64
+ _timeout 30 gh issue edit "$issue_num" --remove-label "shipwright" 2>/dev/null || true
49
65
  return 0
50
66
  fi
51
67
  fi
@@ -54,14 +70,14 @@ daemon_spawn_pipeline() {
54
70
  local issue_goal="$issue_title"
55
71
  if [[ "$NO_GITHUB" != "true" ]]; then
56
72
  local issue_body_first
57
- issue_body_first=$(gh issue view "$issue_num" --json body --jq '.body' 2>/dev/null | head -3 | tr '\n' ' ' | cut -c1-200 || true)
73
+ issue_body_first=$(_timeout 30 gh issue view "$issue_num" --json body --jq '.body' 2>/dev/null | head -3 | tr '\n' ' ' | cut -c1-200 || true)
58
74
  if [[ -n "$issue_body_first" ]]; then
59
75
  issue_goal="${issue_title}: ${issue_body_first}"
60
76
  fi
61
77
  fi
62
78
 
63
79
  # ── Predictive risk assessment (if enabled) ──
64
- if [[ "${PREDICTION_ENABLED:-false}" == "true" ]] && type predict_pipeline_risk &>/dev/null 2>&1; then
80
+ if [[ "${PREDICTION_ENABLED:-false}" == "true" ]] && type predict_pipeline_risk >/dev/null 2>&1; then
65
81
  local issue_json_for_pred=""
66
82
  if [[ "$NO_GITHUB" != "true" ]]; then
67
83
  issue_json_for_pred=$(gh issue view "$issue_num" --json number,title,body,labels 2>/dev/null || echo "")
@@ -214,7 +230,7 @@ daemon_track_job() {
214
230
  local issue_num="$1" pid="$2" worktree="$3" title="${4:-}" repo="${5:-}" goal="${6:-}"
215
231
 
216
232
  # Write to SQLite (non-blocking, best-effort)
217
- if type db_save_job &>/dev/null; then
233
+ if type db_save_job >/dev/null 2>&1; then
218
234
  local job_id="daemon-${issue_num}-$(now_epoch)"
219
235
  db_save_job "$job_id" "$issue_num" "$title" "$pid" "$worktree" "" "${PIPELINE_TEMPLATE:-autonomous}" "$goal" 2>/dev/null || true
220
236
  fi
@@ -266,7 +282,30 @@ daemon_reap_completed() {
266
282
 
267
283
  # Check if process is still running
268
284
  if kill -0 "$pid" 2>/dev/null; then
269
- continue
285
+ # Guard against PID reuse: if job has been running > 6 hours and
286
+ # the process tree doesn't contain sw-pipeline/sw-loop, it's stale
287
+ local _started_at _start_e _age_s
288
+ _started_at=$(echo "$job" | jq -r '.started_at // empty')
289
+ if [[ -n "$_started_at" ]]; then
290
+ _start_e=$(TZ=UTC date -j -f "%Y-%m-%dT%H:%M:%SZ" "$_started_at" +%s 2>/dev/null || date -d "$_started_at" +%s 2>/dev/null || echo "0")
291
+ _age_s=$(( $(now_epoch) - ${_start_e:-0} ))
292
+ if [[ "$_age_s" -gt 21600 ]]; then # 6 hours
293
+ # Verify this PID is actually our pipeline (not a reused PID)
294
+ local _proc_cmd
295
+ _proc_cmd=$(ps -p "$pid" -o command= 2>/dev/null || true)
296
+ if [[ -z "$_proc_cmd" ]] || ! echo "$_proc_cmd" | grep -qE 'sw-pipeline|sw-loop|claude' 2>/dev/null; then
297
+ daemon_log WARN "Stale job #${issue_num}: PID $pid running ${_age_s}s but not a pipeline process — force-reaping"
298
+ emit_event "daemon.stale_dead" "issue=$issue_num" "pid=$pid" "elapsed_s=$_age_s"
299
+ # Fall through to reap logic
300
+ else
301
+ continue
302
+ fi
303
+ else
304
+ continue
305
+ fi
306
+ else
307
+ continue
308
+ fi
270
309
  fi
271
310
 
272
311
  # Process is dead — determine exit code
@@ -309,7 +348,7 @@ daemon_reap_completed() {
309
348
  emit_event "daemon.reap" "issue=$issue_num" "result=$result_str" "duration_s=$dur_s"
310
349
 
311
350
  # Update SQLite (mark job complete/failed)
312
- if type db_complete_job &>/dev/null && type db_fail_job &>/dev/null; then
351
+ if type db_complete_job >/dev/null 2>&1 && type db_fail_job >/dev/null 2>&1; then
313
352
  local _db_job_id="daemon-${issue_num}-${start_epoch}"
314
353
  if [[ "$exit_code" -eq 0 ]]; then
315
354
  db_complete_job "$_db_job_id" "$result_str" 2>/dev/null || true
@@ -343,7 +382,7 @@ daemon_reap_completed() {
343
382
  --method PATCH \
344
383
  --field status=completed \
345
384
  --field conclusion=cancelled \
346
- --silent 2>/dev/null || true
385
+ --silent --timeout 30 2>/dev/null || true
347
386
  fi
348
387
  fi
349
388
  done < <(jq -r 'keys[]' "$check_ids_file" 2>/dev/null || true)
@@ -352,13 +391,18 @@ daemon_reap_completed() {
352
391
  fi
353
392
 
354
393
  # Finalize memory (capture failure patterns for future runs)
355
- if type memory_finalize_pipeline &>/dev/null 2>&1; then
394
+ if type memory_finalize_pipeline >/dev/null 2>&1; then
356
395
  local _job_state _job_artifacts
357
396
  _job_state="${worktree:-.}/.claude/pipeline-state.md"
358
397
  _job_artifacts="${worktree:-.}/.claude/pipeline-artifacts"
359
398
  memory_finalize_pipeline "$_job_state" "$_job_artifacts" 2>/dev/null || true
360
399
  fi
361
400
 
401
+ # Trigger learning after pipeline reap
402
+ if type optimize_full_analysis &>/dev/null; then
403
+ optimize_full_analysis &>/dev/null &
404
+ fi
405
+
362
406
  # Clean up progress tracking for this job
363
407
  daemon_clear_progress "$issue_num"
364
408
 
@@ -400,13 +444,15 @@ daemon_reap_completed() {
400
444
  local current_active
401
445
  current_active=$(locked_get_active_count)
402
446
  if [[ "$current_active" -lt "$MAX_PARALLEL" ]]; then
403
- local next_issue
404
- next_issue=$(dequeue_next)
405
- if [[ -n "$next_issue" ]]; then
447
+ local next_issue_key
448
+ next_issue_key=$(dequeue_next)
449
+ if [[ -n "$next_issue_key" ]]; then
450
+ local next_issue_num="$next_issue_key" next_repo=""
451
+ [[ "$next_issue_key" == *:* ]] && next_repo="${next_issue_key%%:*}" && next_issue_num="${next_issue_key##*:}"
406
452
  local next_title
407
- next_title=$(jq -r --arg n "$next_issue" '.titles[$n] // ""' "$STATE_FILE" 2>/dev/null || true)
408
- daemon_log INFO "Dequeuing issue #${next_issue}: ${next_title}"
409
- daemon_spawn_pipeline "$next_issue" "$next_title"
453
+ next_title=$(jq -r --arg n "$next_issue_key" '.titles[$n] // ""' "$STATE_FILE" 2>/dev/null || true)
454
+ daemon_log INFO "Dequeuing issue #${next_issue_num}${next_repo:+, repo=${next_repo}}: ${next_title}"
455
+ daemon_spawn_pipeline "$next_issue_num" "$next_title" "$next_repo"
410
456
  fi
411
457
  fi
412
458
  done <<< "$jobs"
@@ -453,12 +499,12 @@ daemon_on_success() {
453
499
 
454
500
  if [[ "$NO_GITHUB" != "true" ]]; then
455
501
  # Remove watch label, add success label
456
- gh issue edit "$issue_num" \
502
+ _timeout 30 gh issue edit "$issue_num" \
457
503
  --remove-label "$ON_SUCCESS_REMOVE_LABEL" \
458
504
  --add-label "$ON_SUCCESS_ADD_LABEL" 2>/dev/null || true
459
505
 
460
506
  # Comment on issue
461
- gh issue comment "$issue_num" --body "## ✅ Pipeline Complete
507
+ _timeout 30 gh issue comment "$issue_num" --body "## ✅ Pipeline Complete
462
508
 
463
509
  The autonomous pipeline finished successfully.
464
510
 
@@ -471,7 +517,7 @@ Check the associated PR for the implementation." 2>/dev/null || true
471
517
 
472
518
  # Optionally close the issue
473
519
  if [[ "$ON_SUCCESS_CLOSE_ISSUE" == "true" ]]; then
474
- gh issue close "$issue_num" 2>/dev/null || true
520
+ _timeout 30 gh issue close "$issue_num" 2>/dev/null || true
475
521
  fi
476
522
  fi
477
523
 
File without changes
@@ -11,7 +11,7 @@ _DAEMON_HEALTH_LOADED=1
11
11
  daemon_health_timeout_for_stage() {
12
12
  local stage="${1:-unknown}"
13
13
  local fallback="${2:-120}"
14
- if type policy_get &>/dev/null 2>&1; then
14
+ if type policy_get >/dev/null 2>&1; then
15
15
  local policy_val
16
16
  policy_val=$(policy_get ".daemon.stage_timeouts.$stage" "")
17
17
  if [[ -n "$policy_val" && "$policy_val" =~ ^[0-9]+$ ]]; then
@@ -3,6 +3,28 @@
3
3
  [[ -n "${_DAEMON_PATROL_LOADED:-}" ]] && return 0
4
4
  _DAEMON_PATROL_LOADED=1
5
5
 
6
+ # ─── Decision Engine Signal Mode ─────────────────────────────────────────────
7
+ # When DECISION_ENGINE_ENABLED=true, patrol writes candidates to the pending
8
+ # signals file instead of creating GitHub issues directly. The decision engine
9
+ # collects, scores, and acts on these signals with tiered autonomy.
10
+ SIGNALS_PENDING_FILE="${HOME}/.shipwright/signals/pending.jsonl"
11
+
12
+ _patrol_emit_signal() {
13
+ local id="$1" signal="$2" category="$3" title="$4" description="$5"
14
+ local risk="${6:-50}" confidence="${7:-0.80}" dedup_key="$8"
15
+ mkdir -p "$(dirname "$SIGNALS_PENDING_FILE")"
16
+ local ts
17
+ ts=$(now_iso)
18
+ local candidate
19
+ candidate=$(jq -n \
20
+ --arg id "$id" --arg signal "$signal" --arg category "$category" \
21
+ --arg title "$title" --arg desc "$description" \
22
+ --argjson risk "$risk" --arg conf "$confidence" \
23
+ --arg dedup "$dedup_key" --arg ts "$ts" \
24
+ '{id:$id, signal:$signal, category:$category, title:$title, description:$desc, evidence:{}, risk_score:$risk, confidence:$conf, dedup_key:$dedup, collected_at:$ts}')
25
+ echo "$candidate" >> "$SIGNALS_PENDING_FILE"
26
+ }
27
+
6
28
  patrol_build_labels() {
7
29
  local check_label="$1"
8
30
  local labels="${PATROL_LABEL},${check_label}"
@@ -45,10 +67,22 @@ daemon_patrol() {
45
67
  local findings=0
46
68
 
47
69
  # npm audit
48
- if [[ -f "package.json" ]] && command -v npm &>/dev/null; then
70
+ if [[ -f "package.json" ]] && command -v npm >/dev/null 2>&1; then
49
71
  local audit_json
50
- audit_json=$(npm audit --json 2>/dev/null || true)
51
- if [[ -n "$audit_json" ]]; then
72
+ audit_json=$(npm audit --json 2>/dev/null || echo '{}')
73
+ local audit_version
74
+ audit_version=$(echo "$audit_json" | jq -r '.auditReportVersion // 1')
75
+
76
+ local vuln_list
77
+ if [[ "$audit_version" == "2" ]]; then
78
+ # npm 7+ format: .vulnerabilities is an object keyed by package name
79
+ vuln_list=$(echo "$audit_json" | jq -c '[.vulnerabilities | to_entries[] | .value | {name: .name, severity: .severity, url: (.via[0].url // "N/A"), title: (.via[0].title // .name)}]' 2>/dev/null || echo '[]')
80
+ else
81
+ # npm 6 format: .advisories is an object keyed by advisory ID
82
+ vuln_list=$(echo "$audit_json" | jq -c '[.advisories | to_entries[] | .value | {name: .module_name, severity: .severity, url: .url, title: .title}]' 2>/dev/null || echo '[]')
83
+ fi
84
+
85
+ if [[ -n "$vuln_list" && "$vuln_list" != "[]" ]]; then
52
86
  while IFS= read -r vuln; do
53
87
  local severity name advisory_url title
54
88
  severity=$(echo "$vuln" | jq -r '.severity // "unknown"')
@@ -64,8 +98,16 @@ daemon_patrol() {
64
98
  findings=$((findings + 1))
65
99
  emit_event "patrol.finding" "check=security" "severity=$severity" "package=$name"
66
100
 
67
- # Check if issue already exists
68
- if [[ "$NO_GITHUB" != "true" ]] && [[ "$dry_run" != "true" ]]; then
101
+ # Route to decision engine or create issue directly
102
+ if [[ "${DECISION_ENGINE_ENABLED:-false}" == "true" ]]; then
103
+ local _cat="security_patch"
104
+ [[ "$severity" == "critical" ]] && _cat="security_critical"
105
+ _patrol_emit_signal "sec-${name}" "security" "$_cat" \
106
+ "Security: ${title} in ${name}" \
107
+ "Fix ${severity} vulnerability in ${name}" \
108
+ "$([[ "$severity" == "critical" ]] && echo 80 || echo 50)" \
109
+ "0.95" "security:${name}:${title}"
110
+ elif [[ "$NO_GITHUB" != "true" ]] && [[ "$dry_run" != "true" ]]; then
69
111
  local existing
70
112
  existing=$(gh issue list --label "$PATROL_LABEL" --label "security" \
71
113
  --search "Security: $name" --json number -q 'length' 2>/dev/null || echo "0")
@@ -90,12 +132,12 @@ Auto-detected by \`shipwright daemon patrol\`." \
90
132
  else
91
133
  echo -e " ${RED}●${RESET} ${BOLD}${severity}${RESET}: ${title} in ${CYAN}${name}${RESET}"
92
134
  fi
93
- done < <(echo "$audit_json" | jq -c '.vulnerabilities | to_entries[] | .value' 2>/dev/null)
135
+ done < <(echo "$vuln_list" | jq -c '.[]' 2>/dev/null)
94
136
  fi
95
137
  fi
96
138
 
97
139
  # pip-audit
98
- if [[ -f "requirements.txt" ]] && command -v pip-audit &>/dev/null; then
140
+ if [[ -f "requirements.txt" ]] && command -v pip-audit >/dev/null 2>&1; then
99
141
  local pip_json
100
142
  pip_json=$(pip-audit --format=json 2>/dev/null || true)
101
143
  if [[ -n "$pip_json" ]]; then
@@ -106,7 +148,7 @@ Auto-detected by \`shipwright daemon patrol\`." \
106
148
  fi
107
149
 
108
150
  # cargo audit
109
- if [[ -f "Cargo.toml" ]] && command -v cargo-audit &>/dev/null; then
151
+ if [[ -f "Cargo.toml" ]] && command -v cargo-audit >/dev/null 2>&1; then
110
152
  local cargo_json
111
153
  cargo_json=$(cargo audit --json 2>/dev/null || true)
112
154
  if [[ -n "$cargo_json" ]]; then
@@ -117,8 +159,8 @@ Auto-detected by \`shipwright daemon patrol\`." \
117
159
  fi
118
160
 
119
161
  # Enrich with GitHub security alerts
120
- if type gh_security_alerts &>/dev/null 2>&1 && [[ "${NO_GITHUB:-false}" != "true" ]]; then
121
- if type _gh_detect_repo &>/dev/null 2>&1; then
162
+ if type gh_security_alerts >/dev/null 2>&1 && [[ "${NO_GITHUB:-false}" != "true" ]]; then
163
+ if type _gh_detect_repo >/dev/null 2>&1; then
122
164
  _gh_detect_repo 2>/dev/null || true
123
165
  fi
124
166
  local gh_owner="${GH_OWNER:-}" gh_repo="${GH_REPO:-}"
@@ -135,7 +177,7 @@ Auto-detected by \`shipwright daemon patrol\`." \
135
177
  fi
136
178
 
137
179
  # Enrich with GitHub Dependabot alerts
138
- if type gh_dependabot_alerts &>/dev/null 2>&1 && [[ "${NO_GITHUB:-false}" != "true" ]]; then
180
+ if type gh_dependabot_alerts >/dev/null 2>&1 && [[ "${NO_GITHUB:-false}" != "true" ]]; then
139
181
  local gh_owner="${GH_OWNER:-}" gh_repo="${GH_REPO:-}"
140
182
  if [[ -n "$gh_owner" && -n "$gh_repo" ]]; then
141
183
  local dep_alerts
@@ -162,7 +204,7 @@ Auto-detected by \`shipwright daemon patrol\`." \
162
204
  daemon_log INFO "Patrol: checking for stale dependencies"
163
205
  local findings=0
164
206
 
165
- if [[ -f "package.json" ]] && command -v npm &>/dev/null; then
207
+ if [[ -f "package.json" ]] && command -v npm >/dev/null 2>&1; then
166
208
  local outdated_json
167
209
  outdated_json=$(npm outdated --json 2>/dev/null || true)
168
210
  if [[ -n "$outdated_json" ]] && [[ "$outdated_json" != "{}" ]]; then
@@ -190,8 +232,13 @@ Auto-detected by \`shipwright daemon patrol\`." \
190
232
  fi
191
233
  done < <(echo "$outdated_json" | jq -c 'to_entries[]' 2>/dev/null)
192
234
 
193
- # Create a single issue for all stale deps
194
- if [[ "$findings" -gt 0 ]] && [[ "$NO_GITHUB" != "true" ]] && [[ "$dry_run" != "true" ]]; then
235
+ # Route to decision engine or create issue
236
+ if [[ "$findings" -gt 0 ]] && [[ "${DECISION_ENGINE_ENABLED:-false}" == "true" ]]; then
237
+ _patrol_emit_signal "deps-stale-${findings}" "deps" "deps_major" \
238
+ "Update ${findings} stale dependencies" \
239
+ "Packages 2+ major versions behind" \
240
+ 45 "0.90" "deps:stale:${findings}"
241
+ elif [[ "$findings" -gt 0 ]] && [[ "$NO_GITHUB" != "true" ]] && [[ "$dry_run" != "true" ]]; then
195
242
  local existing
196
243
  existing=$(gh issue list --label "$PATROL_LABEL" --label "dependencies" \
197
244
  --search "Stale dependencies" --json number -q 'length' 2>/dev/null || echo "0")
@@ -534,7 +581,7 @@ Auto-detected by \`shipwright daemon patrol\` on $(now_iso)." \
534
581
  failures_json=$(
535
582
  (
536
583
  source "$memory_script" > /dev/null 2>&1 || true
537
- if command -v memory_get_actionable_failures &>/dev/null; then
584
+ if command -v memory_get_actionable_failures >/dev/null 2>&1; then
538
585
  memory_get_actionable_failures "$PATROL_FAILURES_THRESHOLD"
539
586
  else
540
587
  echo "[]"
@@ -802,7 +849,7 @@ Auto-detected by \`shipwright daemon patrol\` on $(now_iso)." \
802
849
  if [[ ! -f "$scripts_dir/sw-${name}-test.sh" ]]; then
803
850
  # Count usage across other scripts
804
851
  local usage_count
805
- usage_count=$(grep -rl "sw-${name}" "$scripts_dir"/sw-*.sh 2>/dev/null | grep -cv "$basename" 2>/dev/null || echo "0")
852
+ usage_count=$(grep -rl "sw-${name}" "$scripts_dir"/sw-*.sh 2>/dev/null | grep -cv "$basename" 2>/dev/null || true)
806
853
  usage_count=${usage_count:-0}
807
854
 
808
855
  local line_count
@@ -1046,7 +1093,7 @@ Auto-detected by \`shipwright daemon patrol\` on $(now_iso)." \
1046
1093
  echo ""
1047
1094
 
1048
1095
  # ── Stage 2: AI-Powered Confirmation (if enabled) ──
1049
- if [[ "${PREDICTION_ENABLED:-false}" == "true" ]] && type patrol_ai_analyze &>/dev/null 2>&1; then
1096
+ if [[ "${PREDICTION_ENABLED:-false}" == "true" ]] && type patrol_ai_analyze >/dev/null 2>&1; then
1050
1097
  daemon_log INFO "Intelligence: using AI patrol analysis (prediction enabled)"
1051
1098
  echo -e " ${BOLD}AI Deep Analysis${RESET}"
1052
1099
  # Sample recent source files for AI analysis