shipwright-cli 2.4.0 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. package/README.md +16 -11
  2. package/completions/_shipwright +248 -94
  3. package/completions/shipwright.bash +68 -19
  4. package/completions/shipwright.fish +310 -42
  5. package/config/decision-tiers.json +55 -0
  6. package/config/defaults.json +111 -0
  7. package/config/event-schema.json +218 -0
  8. package/config/policy.json +21 -18
  9. package/dashboard/coverage/coverage-summary.json +14 -0
  10. package/dashboard/public/index.html +1 -1
  11. package/dashboard/server.ts +306 -17
  12. package/dashboard/src/components/charts/bar.test.ts +79 -0
  13. package/dashboard/src/components/charts/donut.test.ts +68 -0
  14. package/dashboard/src/components/charts/pipeline-rail.test.ts +117 -0
  15. package/dashboard/src/components/charts/sparkline.test.ts +125 -0
  16. package/dashboard/src/core/api.test.ts +309 -0
  17. package/dashboard/src/core/helpers.test.ts +301 -0
  18. package/dashboard/src/core/router.test.ts +307 -0
  19. package/dashboard/src/core/router.ts +7 -0
  20. package/dashboard/src/core/sse.test.ts +144 -0
  21. package/dashboard/src/views/metrics.test.ts +186 -0
  22. package/dashboard/src/views/overview.test.ts +173 -0
  23. package/dashboard/src/views/pipelines.test.ts +183 -0
  24. package/dashboard/src/views/team.test.ts +253 -0
  25. package/dashboard/vitest.config.ts +14 -5
  26. package/docs/TIPS.md +1 -1
  27. package/docs/patterns/README.md +1 -1
  28. package/package.json +7 -9
  29. package/scripts/adapters/docker-deploy.sh +1 -1
  30. package/scripts/adapters/tmux-adapter.sh +11 -1
  31. package/scripts/adapters/wezterm-adapter.sh +1 -1
  32. package/scripts/check-version-consistency.sh +1 -1
  33. package/scripts/lib/architecture.sh +127 -0
  34. package/scripts/lib/bootstrap.sh +75 -0
  35. package/scripts/lib/compat.sh +89 -6
  36. package/scripts/lib/config.sh +91 -0
  37. package/scripts/lib/daemon-adaptive.sh +3 -3
  38. package/scripts/lib/daemon-dispatch.sh +63 -17
  39. package/scripts/lib/daemon-failure.sh +0 -0
  40. package/scripts/lib/daemon-health.sh +1 -1
  41. package/scripts/lib/daemon-patrol.sh +64 -17
  42. package/scripts/lib/daemon-poll.sh +54 -25
  43. package/scripts/lib/daemon-state.sh +125 -23
  44. package/scripts/lib/daemon-triage.sh +31 -9
  45. package/scripts/lib/decide-autonomy.sh +295 -0
  46. package/scripts/lib/decide-scoring.sh +228 -0
  47. package/scripts/lib/decide-signals.sh +462 -0
  48. package/scripts/lib/fleet-failover.sh +63 -0
  49. package/scripts/lib/helpers.sh +29 -6
  50. package/scripts/lib/pipeline-detection.sh +2 -2
  51. package/scripts/lib/pipeline-github.sh +9 -9
  52. package/scripts/lib/pipeline-intelligence.sh +105 -38
  53. package/scripts/lib/pipeline-quality-checks.sh +17 -16
  54. package/scripts/lib/pipeline-quality.sh +1 -1
  55. package/scripts/lib/pipeline-stages.sh +440 -59
  56. package/scripts/lib/pipeline-state.sh +54 -4
  57. package/scripts/lib/policy.sh +0 -0
  58. package/scripts/lib/test-helpers.sh +247 -0
  59. package/scripts/postinstall.mjs +78 -12
  60. package/scripts/signals/example-collector.sh +36 -0
  61. package/scripts/sw +17 -7
  62. package/scripts/sw-activity.sh +1 -11
  63. package/scripts/sw-adaptive.sh +109 -85
  64. package/scripts/sw-adversarial.sh +4 -14
  65. package/scripts/sw-architecture-enforcer.sh +1 -11
  66. package/scripts/sw-auth.sh +8 -17
  67. package/scripts/sw-autonomous.sh +111 -49
  68. package/scripts/sw-changelog.sh +1 -11
  69. package/scripts/sw-checkpoint.sh +144 -20
  70. package/scripts/sw-ci.sh +2 -12
  71. package/scripts/sw-cleanup.sh +13 -17
  72. package/scripts/sw-code-review.sh +16 -36
  73. package/scripts/sw-connect.sh +5 -12
  74. package/scripts/sw-context.sh +9 -26
  75. package/scripts/sw-cost.sh +17 -18
  76. package/scripts/sw-daemon.sh +76 -71
  77. package/scripts/sw-dashboard.sh +57 -17
  78. package/scripts/sw-db.sh +524 -26
  79. package/scripts/sw-decide.sh +685 -0
  80. package/scripts/sw-decompose.sh +1 -11
  81. package/scripts/sw-deps.sh +15 -25
  82. package/scripts/sw-developer-simulation.sh +1 -11
  83. package/scripts/sw-discovery.sh +138 -30
  84. package/scripts/sw-doc-fleet.sh +7 -17
  85. package/scripts/sw-docs-agent.sh +6 -16
  86. package/scripts/sw-docs.sh +4 -12
  87. package/scripts/sw-doctor.sh +134 -43
  88. package/scripts/sw-dora.sh +11 -19
  89. package/scripts/sw-durable.sh +35 -52
  90. package/scripts/sw-e2e-orchestrator.sh +11 -27
  91. package/scripts/sw-eventbus.sh +115 -115
  92. package/scripts/sw-evidence.sh +114 -30
  93. package/scripts/sw-feedback.sh +3 -13
  94. package/scripts/sw-fix.sh +2 -20
  95. package/scripts/sw-fleet-discover.sh +1 -11
  96. package/scripts/sw-fleet-viz.sh +10 -18
  97. package/scripts/sw-fleet.sh +13 -17
  98. package/scripts/sw-github-app.sh +6 -16
  99. package/scripts/sw-github-checks.sh +1 -11
  100. package/scripts/sw-github-deploy.sh +1 -11
  101. package/scripts/sw-github-graphql.sh +2 -12
  102. package/scripts/sw-guild.sh +1 -11
  103. package/scripts/sw-heartbeat.sh +49 -12
  104. package/scripts/sw-hygiene.sh +45 -43
  105. package/scripts/sw-incident.sh +48 -74
  106. package/scripts/sw-init.sh +35 -37
  107. package/scripts/sw-instrument.sh +1 -11
  108. package/scripts/sw-intelligence.sh +368 -53
  109. package/scripts/sw-jira.sh +5 -14
  110. package/scripts/sw-launchd.sh +2 -12
  111. package/scripts/sw-linear.sh +8 -17
  112. package/scripts/sw-logs.sh +4 -12
  113. package/scripts/sw-loop.sh +905 -104
  114. package/scripts/sw-memory.sh +263 -20
  115. package/scripts/sw-mission-control.sh +2 -12
  116. package/scripts/sw-model-router.sh +73 -34
  117. package/scripts/sw-otel.sh +15 -23
  118. package/scripts/sw-oversight.sh +1 -11
  119. package/scripts/sw-patrol-meta.sh +5 -11
  120. package/scripts/sw-pipeline-composer.sh +7 -17
  121. package/scripts/sw-pipeline-vitals.sh +1 -11
  122. package/scripts/sw-pipeline.sh +550 -122
  123. package/scripts/sw-pm.sh +2 -12
  124. package/scripts/sw-pr-lifecycle.sh +33 -28
  125. package/scripts/sw-predictive.sh +16 -22
  126. package/scripts/sw-prep.sh +6 -16
  127. package/scripts/sw-ps.sh +1 -11
  128. package/scripts/sw-public-dashboard.sh +2 -12
  129. package/scripts/sw-quality.sh +85 -14
  130. package/scripts/sw-reaper.sh +1 -11
  131. package/scripts/sw-recruit.sh +15 -25
  132. package/scripts/sw-regression.sh +11 -21
  133. package/scripts/sw-release-manager.sh +19 -28
  134. package/scripts/sw-release.sh +8 -16
  135. package/scripts/sw-remote.sh +1 -11
  136. package/scripts/sw-replay.sh +48 -44
  137. package/scripts/sw-retro.sh +70 -92
  138. package/scripts/sw-review-rerun.sh +1 -1
  139. package/scripts/sw-scale.sh +174 -41
  140. package/scripts/sw-security-audit.sh +12 -22
  141. package/scripts/sw-self-optimize.sh +239 -23
  142. package/scripts/sw-session.sh +5 -15
  143. package/scripts/sw-setup.sh +8 -18
  144. package/scripts/sw-standup.sh +5 -15
  145. package/scripts/sw-status.sh +32 -23
  146. package/scripts/sw-strategic.sh +129 -13
  147. package/scripts/sw-stream.sh +1 -11
  148. package/scripts/sw-swarm.sh +76 -36
  149. package/scripts/sw-team-stages.sh +10 -20
  150. package/scripts/sw-templates.sh +4 -14
  151. package/scripts/sw-testgen.sh +3 -13
  152. package/scripts/sw-tmux-pipeline.sh +1 -19
  153. package/scripts/sw-tmux-role-color.sh +0 -10
  154. package/scripts/sw-tmux-status.sh +3 -11
  155. package/scripts/sw-tmux.sh +2 -20
  156. package/scripts/sw-trace.sh +1 -19
  157. package/scripts/sw-tracker-github.sh +0 -10
  158. package/scripts/sw-tracker-jira.sh +1 -11
  159. package/scripts/sw-tracker-linear.sh +1 -11
  160. package/scripts/sw-tracker.sh +7 -24
  161. package/scripts/sw-triage.sh +29 -39
  162. package/scripts/sw-upgrade.sh +5 -23
  163. package/scripts/sw-ux.sh +1 -19
  164. package/scripts/sw-webhook.sh +18 -32
  165. package/scripts/sw-widgets.sh +3 -21
  166. package/scripts/sw-worktree.sh +11 -27
  167. package/scripts/update-homebrew-sha.sh +73 -0
  168. package/templates/pipelines/tdd.json +72 -0
  169. package/scripts/sw-pipeline.sh.mock +0 -7
@@ -0,0 +1,462 @@
1
+ # decide-signals.sh — Signal collection for the decision engine
2
+ # Source from sw-decide.sh. Requires helpers.sh, policy.sh.
3
+ [[ -n "${_DECIDE_SIGNALS_LOADED:-}" ]] && return 0
4
+ _DECIDE_SIGNALS_LOADED=1
5
+
6
+ # ─── State ────────────────────────────────────────────────────────────────────
7
+ SIGNALS_DIR="${HOME}/.shipwright/signals"
8
+ SIGNALS_PENDING_FILE="${SIGNALS_DIR}/pending.jsonl"
9
+
10
+ _ensure_signals_dir() {
11
+ mkdir -p "$SIGNALS_DIR"
12
+ }
13
+
14
+ # ─── Candidate builder ───────────────────────────────────────────────────────
15
+ # Usage: _build_candidate "id" "signal" "category" "title" "description" "risk" "confidence" "dedup_key" [evidence_json]
16
+ _build_candidate() {
17
+ local id="$1" signal="$2" category="$3" title="$4" description="$5"
18
+ local risk="${6:-50}" confidence="${7:-0.80}" dedup_key="$8" evidence="${9:-{}}"
19
+ jq -n \
20
+ --arg id "$id" \
21
+ --arg signal "$signal" \
22
+ --arg category "$category" \
23
+ --arg title "$title" \
24
+ --arg desc "$description" \
25
+ --argjson risk "$risk" \
26
+ --arg conf "$confidence" \
27
+ --arg dedup "$dedup_key" \
28
+ --argjson ev "$evidence" \
29
+ --arg ts "$(now_iso)" \
30
+ '{id:$id, signal:$signal, category:$category, title:$title, description:$desc, evidence:$ev, risk_score:$risk, confidence:$conf, dedup_key:$dedup, collected_at:$ts}'
31
+ }
32
+
33
+ # ─── Collectors ───────────────────────────────────────────────────────────────
34
+
35
+ signals_collect_security() {
36
+ # npm audit — critical/high only
37
+ if [[ -f "package.json" ]] && command -v npm >/dev/null 2>&1; then
38
+ local audit_json
39
+ audit_json=$(npm audit --json 2>/dev/null || echo '{}')
40
+ local audit_version
41
+ audit_version=$(echo "$audit_json" | jq -r '.auditReportVersion // 1')
42
+
43
+ local vuln_list
44
+ if [[ "$audit_version" == "2" ]]; then
45
+ vuln_list=$(echo "$audit_json" | jq -c '[.vulnerabilities | to_entries[] | .value | {name: .name, severity: .severity, url: (.via[0].url // "N/A"), title: (.via[0].title // .name)}]' 2>/dev/null || echo '[]')
46
+ else
47
+ vuln_list=$(echo "$audit_json" | jq -c '[.advisories | to_entries[] | .value | {name: .module_name, severity: .severity, url: .url, title: .title}]' 2>/dev/null || echo '[]')
48
+ fi
49
+
50
+ if [[ -n "$vuln_list" && "$vuln_list" != "[]" ]]; then
51
+ while IFS= read -r vuln; do
52
+ local severity name title adv_url
53
+ severity=$(echo "$vuln" | jq -r '.severity // "unknown"')
54
+ name=$(echo "$vuln" | jq -r '.name // "unknown"')
55
+ title=$(echo "$vuln" | jq -r '.title // "vulnerability"')
56
+ adv_url=$(echo "$vuln" | jq -r '.url // ""')
57
+
58
+ [[ "$severity" != "critical" && "$severity" != "high" ]] && continue
59
+
60
+ local risk=50 category="security_patch"
61
+ [[ "$severity" == "critical" ]] && risk=80 && category="security_critical"
62
+
63
+ local evidence
64
+ evidence=$(jq -n --arg sev "$severity" --arg pkg "$name" --arg url "$adv_url" \
65
+ '{severity:$sev, package:$pkg, advisory_url:$url}')
66
+
67
+ _build_candidate \
68
+ "sec-${name}-$(echo "$title" | tr ' ' '-' | cut -c1-30)" \
69
+ "security" "$category" \
70
+ "Security: ${title} in ${name}" \
71
+ "Fix ${severity} vulnerability in ${name}. Advisory: ${adv_url}" \
72
+ "$risk" "0.95" "security:${name}:${title}" "$evidence"
73
+ done < <(echo "$vuln_list" | jq -c '.[]' 2>/dev/null)
74
+ fi
75
+ fi
76
+
77
+ # pip-audit
78
+ if [[ -f "requirements.txt" ]] && command -v pip-audit >/dev/null 2>&1; then
79
+ local pip_json
80
+ pip_json=$(pip-audit --format=json 2>/dev/null || true)
81
+ if [[ -n "$pip_json" ]]; then
82
+ while IFS= read -r dep; do
83
+ local pkg vuln_id
84
+ pkg=$(echo "$dep" | jq -r '.name // "unknown"')
85
+ vuln_id=$(echo "$dep" | jq -r '.vulns[0].id // "unknown"')
86
+ _build_candidate \
87
+ "sec-pip-${pkg}-${vuln_id}" "security" "security_patch" \
88
+ "Security: ${vuln_id} in ${pkg}" \
89
+ "Python dependency ${pkg} has vulnerability ${vuln_id}" \
90
+ 60 "0.90" "security:pip:${pkg}:${vuln_id}"
91
+ done < <(echo "$pip_json" | jq -c '.dependencies[] | select(.vulns | length > 0)' 2>/dev/null)
92
+ fi
93
+ fi
94
+
95
+ # cargo audit
96
+ if [[ -f "Cargo.toml" ]] && command -v cargo-audit >/dev/null 2>&1; then
97
+ local cargo_json
98
+ cargo_json=$(cargo audit --json 2>/dev/null || true)
99
+ local vuln_count
100
+ vuln_count=$(echo "$cargo_json" | jq '.vulnerabilities.found' 2>/dev/null || echo "0")
101
+ if [[ "${vuln_count:-0}" -gt 0 ]]; then
102
+ _build_candidate \
103
+ "sec-cargo-vulns" "security" "security_patch" \
104
+ "Security: ${vuln_count} Cargo vulnerability(ies)" \
105
+ "cargo audit found ${vuln_count} vulnerability(ies)" \
106
+ 60 "0.90" "security:cargo:vulns"
107
+ fi
108
+ fi
109
+ }
110
+
111
+ signals_collect_deps() {
112
+ [[ ! -f "package.json" ]] && return 0
113
+ command -v npm >/dev/null 2>&1 || return 0
114
+
115
+ local outdated_json
116
+ outdated_json=$(npm outdated --json 2>/dev/null || true)
117
+ [[ -z "$outdated_json" || "$outdated_json" == "{}" ]] && return 0
118
+
119
+ while IFS= read -r pkg; do
120
+ local name current latest current_major latest_major
121
+ name=$(echo "$pkg" | jq -r '.key')
122
+ current=$(echo "$pkg" | jq -r '.value.current // "0.0.0"')
123
+ latest=$(echo "$pkg" | jq -r '.value.latest // "0.0.0"')
124
+ current_major="${current%%.*}"
125
+ latest_major="${latest%%.*}"
126
+
127
+ [[ ! "$latest_major" =~ ^[0-9]+$ ]] && continue
128
+ [[ ! "$current_major" =~ ^[0-9]+$ ]] && continue
129
+
130
+ local diff=$((latest_major - current_major))
131
+ local category="deps_patch" risk=15
132
+ if [[ "$diff" -ge 2 ]]; then
133
+ category="deps_major"
134
+ risk=45
135
+ elif [[ "$diff" -ge 1 ]]; then
136
+ category="deps_minor"
137
+ risk=25
138
+ else
139
+ # Only minor/patch version difference — still flag as patch
140
+ category="deps_patch"
141
+ risk=10
142
+ fi
143
+
144
+ # Only emit for >= 1 major behind or if category is explicitly patch
145
+ [[ "$diff" -lt 1 ]] && continue
146
+
147
+ local evidence
148
+ evidence=$(jq -n --arg pkg "$name" --arg cur "$current" --arg lat "$latest" --argjson diff "$diff" \
149
+ '{package:$pkg, current:$cur, latest:$lat, major_versions_behind:$diff}')
150
+
151
+ _build_candidate \
152
+ "deps-${name}" "deps" "$category" \
153
+ "Update ${name}: ${current} -> ${latest}" \
154
+ "Package ${name} is ${diff} major version(s) behind (${current} -> ${latest})" \
155
+ "$risk" "0.90" "deps:${name}" "$evidence"
156
+ done < <(echo "$outdated_json" | jq -c 'to_entries[]' 2>/dev/null)
157
+ }
158
+
159
+ signals_collect_coverage() {
160
+ local coverage_file=""
161
+ for candidate in \
162
+ ".claude/pipeline-artifacts/coverage/coverage-summary.json" \
163
+ "coverage/coverage-summary.json" \
164
+ ".coverage/coverage-summary.json"; do
165
+ [[ -f "$candidate" ]] && coverage_file="$candidate" && break
166
+ done
167
+ [[ -z "$coverage_file" ]] && return 0
168
+
169
+ local low_files=""
170
+ local count=0
171
+ while IFS= read -r entry; do
172
+ local file_path line_pct
173
+ file_path=$(echo "$entry" | jq -r '.key')
174
+ line_pct=$(echo "$entry" | jq -r '.value.lines.pct // 100')
175
+ [[ "$file_path" == "total" ]] && continue
176
+ if awk "BEGIN{exit !($line_pct >= 50)}" 2>/dev/null; then continue; fi
177
+ count=$((count + 1))
178
+ low_files="${low_files}${file_path} (${line_pct}%), "
179
+ done < <(jq -c 'to_entries[]' "$coverage_file" 2>/dev/null)
180
+
181
+ [[ "$count" -eq 0 ]] && return 0
182
+
183
+ _build_candidate \
184
+ "cov-gaps-${count}" "coverage" "test_coverage" \
185
+ "Improve test coverage for ${count} file(s)" \
186
+ "Files with < 50% line coverage: ${low_files%%, }" \
187
+ 20 "0.85" "coverage:gaps:${count}"
188
+ }
189
+
190
+ signals_collect_docs() {
191
+ local findings=0
192
+ local details=""
193
+
194
+ if [[ -f "README.md" ]]; then
195
+ local readme_epoch src_epoch
196
+ readme_epoch=$(git log -1 --format=%ct -- README.md 2>/dev/null || echo "0")
197
+ src_epoch=$(git log -1 --format=%ct -- "*.ts" "*.js" "*.py" "*.go" "*.rs" "*.sh" 2>/dev/null || echo "0")
198
+ if [[ "$src_epoch" -gt 0 && "$readme_epoch" -gt 0 ]]; then
199
+ local drift=$((src_epoch - readme_epoch))
200
+ if [[ "$drift" -gt 2592000 ]]; then
201
+ findings=$((findings + 1))
202
+ local days=$((drift / 86400))
203
+ details="${details}README.md: ${days} days behind; "
204
+ fi
205
+ fi
206
+ fi
207
+
208
+ # Check AUTO section freshness
209
+ if [[ -x "${SCRIPT_DIR:-}/sw-docs.sh" ]]; then
210
+ bash "${SCRIPT_DIR}/sw-docs.sh" check >/dev/null 2>&1 || {
211
+ findings=$((findings + 1))
212
+ details="${details}AUTO sections stale; "
213
+ }
214
+ fi
215
+
216
+ [[ "$findings" -eq 0 ]] && return 0
217
+
218
+ _build_candidate \
219
+ "docs-stale-${findings}" "docs" "doc_sync" \
220
+ "Sync stale documentation (${findings} item(s))" \
221
+ "Documentation drift detected: ${details%%; }" \
222
+ 15 "0.85" "docs:stale"
223
+ }
224
+
225
+ signals_collect_dead_code() {
226
+ [[ ! -f "package.json" && ! -f "tsconfig.json" ]] && return 0
227
+
228
+ local count=0
229
+ local dead_files=""
230
+ local src_dirs=("src" "lib" "app")
231
+ for dir in "${src_dirs[@]}"; do
232
+ [[ -d "$dir" ]] || continue
233
+ while IFS= read -r file; do
234
+ local basename_no_ext
235
+ basename_no_ext=$(basename "$file" | sed 's/\.\(ts\|js\|tsx\|jsx\)$//')
236
+ [[ "$basename_no_ext" == "index" ]] && continue
237
+ [[ "$basename_no_ext" =~ \.(test|spec)$ ]] && continue
238
+
239
+ local import_count
240
+ import_count=$(grep -rlE "(from|require).*['\"].*${basename_no_ext}['\"]" \
241
+ --include="*.ts" --include="*.js" --include="*.tsx" --include="*.jsx" \
242
+ . 2>/dev/null | grep -cv "$file" || true)
243
+ import_count=${import_count:-0}
244
+
245
+ if [[ "$import_count" -eq 0 ]]; then
246
+ count=$((count + 1))
247
+ dead_files="${dead_files}${file}, "
248
+ fi
249
+ done < <(find "$dir" -type f \( -name "*.ts" -o -name "*.js" -o -name "*.tsx" -o -name "*.jsx" \) \
250
+ ! -name "*.test.*" ! -name "*.spec.*" ! -name "*.d.ts" 2>/dev/null)
251
+ done
252
+
253
+ [[ "$count" -eq 0 ]] && return 0
254
+
255
+ _build_candidate \
256
+ "dead-code-${count}" "dead_code" "dead_code" \
257
+ "Dead code candidates (${count} files)" \
258
+ "Files with no importers: ${dead_files%%, }" \
259
+ 25 "0.70" "dead_code:${count}"
260
+ }
261
+
262
+ signals_collect_performance() {
263
+ local events_file="${EVENTS_FILE:-${HOME}/.shipwright/events.jsonl}"
264
+ [[ ! -f "$events_file" ]] && return 0
265
+
266
+ local baseline_file="${HOME}/.shipwright/patrol-perf-baseline.json"
267
+ [[ ! -f "$baseline_file" ]] && return 0
268
+
269
+ local recent_test_dur
270
+ recent_test_dur=$(tail -500 "$events_file" | \
271
+ jq -s '[.[] | select(.type == "stage.completed" and .stage == "test") | .duration_s] | if length > 0 then .[-1] else null end' \
272
+ 2>/dev/null || echo "null")
273
+ [[ "$recent_test_dur" == "null" || -z "$recent_test_dur" ]] && return 0
274
+
275
+ local baseline_dur
276
+ baseline_dur=$(jq -r '.test_duration_s // 0' "$baseline_file" 2>/dev/null || echo "0")
277
+ [[ "$baseline_dur" -le 0 ]] && return 0
278
+
279
+ local threshold=$(( baseline_dur * 130 / 100 ))
280
+ [[ "$recent_test_dur" -le "$threshold" ]] && return 0
281
+
282
+ local pct_slower=$(( (recent_test_dur - baseline_dur) * 100 / baseline_dur ))
283
+
284
+ local evidence
285
+ evidence=$(jq -n --argjson base "$baseline_dur" --argjson cur "$recent_test_dur" --argjson pct "$pct_slower" \
286
+ '{baseline_s:$base, current_s:$cur, regression_pct:$pct}')
287
+
288
+ _build_candidate \
289
+ "perf-test-regression" "performance" "performance_regression" \
290
+ "Test suite performance regression (${pct_slower}% slower)" \
291
+ "Test suite: ${baseline_dur}s -> ${recent_test_dur}s (${pct_slower}% regression)" \
292
+ 40 "0.85" "performance:test_suite" "$evidence"
293
+ }
294
+
295
+ signals_collect_failures() {
296
+ local memory_script="${SCRIPT_DIR:-}/sw-memory.sh"
297
+ [[ ! -f "$memory_script" ]] && return 0
298
+
299
+ local failures_json
300
+ failures_json=$(
301
+ (
302
+ source "$memory_script" > /dev/null 2>&1 || true
303
+ if command -v memory_get_actionable_failures >/dev/null 2>&1; then
304
+ memory_get_actionable_failures 3
305
+ else
306
+ echo "[]"
307
+ fi
308
+ )
309
+ )
310
+
311
+ local count
312
+ count=$(echo "$failures_json" | jq 'length' 2>/dev/null || echo "0")
313
+ [[ "${count:-0}" -eq 0 ]] && return 0
314
+
315
+ while IFS= read -r failure; do
316
+ local pattern stage seen_count
317
+ pattern=$(echo "$failure" | jq -r '.pattern // "unknown"')
318
+ stage=$(echo "$failure" | jq -r '.stage // "unknown"')
319
+ seen_count=$(echo "$failure" | jq -r '.seen_count // 0')
320
+
321
+ local short_pattern
322
+ short_pattern=$(echo "$pattern" | cut -c1-60)
323
+
324
+ _build_candidate \
325
+ "fail-${stage}-$(echo "$short_pattern" | tr ' /' '-_' | cut -c1-30)" \
326
+ "failures" "recurring_failure" \
327
+ "Fix recurring: ${short_pattern}" \
328
+ "Pattern in ${stage}: ${pattern} (seen ${seen_count}x)" \
329
+ 35 "0.80" "failure:${stage}:${short_pattern}"
330
+ done < <(echo "$failures_json" | jq -c '.[]' 2>/dev/null)
331
+ }
332
+
333
+ signals_collect_dora() {
334
+ local events_file="${EVENTS_FILE:-${HOME}/.shipwright/events.jsonl}"
335
+ [[ ! -f "$events_file" ]] && return 0
336
+
337
+ local now_e
338
+ now_e=$(now_epoch)
339
+ local current_start=$((now_e - 604800))
340
+ local prev_start=$((now_e - 1209600))
341
+
342
+ local current_events prev_events
343
+ current_events=$(jq -s --argjson start "$current_start" \
344
+ '[.[] | select(.ts_epoch >= $start)]' "$events_file" 2>/dev/null || echo "[]")
345
+ prev_events=$(jq -s --argjson start "$prev_start" --argjson end "$current_start" \
346
+ '[.[] | select(.ts_epoch >= $start and .ts_epoch < $end)]' "$events_file" 2>/dev/null || echo "[]")
347
+
348
+ local prev_total curr_total
349
+ prev_total=$(echo "$prev_events" | jq '[.[] | select(.type == "pipeline.completed")] | length' 2>/dev/null || echo "0")
350
+ curr_total=$(echo "$current_events" | jq '[.[] | select(.type == "pipeline.completed")] | length' 2>/dev/null || echo "0")
351
+
352
+ [[ "${prev_total:-0}" -lt 3 || "${curr_total:-0}" -lt 3 ]] && return 0
353
+
354
+ # Compare CFR
355
+ local prev_failures curr_failures
356
+ prev_failures=$(echo "$prev_events" | jq '[.[] | select(.type == "pipeline.completed" and .result == "failure")] | length' 2>/dev/null || echo "0")
357
+ curr_failures=$(echo "$current_events" | jq '[.[] | select(.type == "pipeline.completed" and .result == "failure")] | length' 2>/dev/null || echo "0")
358
+
359
+ local prev_cfr=0 curr_cfr=0
360
+ [[ "$prev_total" -gt 0 ]] && prev_cfr=$(echo "$prev_failures $prev_total" | awk '{printf "%.0f", ($1 / $2) * 100}')
361
+ [[ "$curr_total" -gt 0 ]] && curr_cfr=$(echo "$curr_failures $curr_total" | awk '{printf "%.0f", ($1 / $2) * 100}')
362
+
363
+ # Flag if CFR increased by > 5 percentage points
364
+ local cfr_diff=$((curr_cfr - prev_cfr))
365
+ if [[ "$cfr_diff" -gt 5 ]]; then
366
+ local evidence
367
+ evidence=$(jq -n --argjson prev "$prev_cfr" --argjson curr "$curr_cfr" --argjson diff "$cfr_diff" \
368
+ '{prev_cfr_pct:$prev, curr_cfr_pct:$curr, increase_pct:$diff}')
369
+
370
+ _build_candidate \
371
+ "dora-cfr-regression" "dora" "dora_regression" \
372
+ "DORA regression: CFR increased ${cfr_diff}pp" \
373
+ "Change failure rate: ${prev_cfr}% -> ${curr_cfr}% (7-day window)" \
374
+ 45 "0.80" "dora:cfr_regression" "$evidence"
375
+ fi
376
+ }
377
+
378
+ signals_collect_architecture() {
379
+ local arch_script="${SCRIPT_DIR:-}/sw-architecture-enforcer.sh"
380
+ [[ ! -f "$arch_script" ]] && return 0
381
+
382
+ local arch_model="${HOME}/.shipwright/memory/architecture.json"
383
+ [[ ! -f "$arch_model" ]] && return 0
384
+
385
+ local violations
386
+ violations=$(bash "$arch_script" check --json 2>/dev/null || echo '{"violations":0}')
387
+ local count
388
+ count=$(echo "$violations" | jq '.violations // 0' 2>/dev/null || echo "0")
389
+ [[ "${count:-0}" -eq 0 ]] && return 0
390
+
391
+ _build_candidate \
392
+ "arch-drift-${count}" "architecture" "architecture_drift" \
393
+ "Architecture drift: ${count} violation(s)" \
394
+ "Architecture enforcer found ${count} violation(s)" \
395
+ 50 "0.75" "architecture:drift"
396
+ }
397
+
398
+ signals_collect_intelligence() {
399
+ local cache_file=".claude/intelligence-cache.json"
400
+ [[ ! -f "$cache_file" ]] && return 0
401
+
402
+ # Check for high-churn hotspot files
403
+ local hotspots
404
+ hotspots=$(jq -c '.hotspots // [] | [.[] | select(.churn_score > 80)]' "$cache_file" 2>/dev/null || echo '[]')
405
+ local count
406
+ count=$(echo "$hotspots" | jq 'length' 2>/dev/null || echo "0")
407
+ [[ "${count:-0}" -eq 0 ]] && return 0
408
+
409
+ _build_candidate \
410
+ "intel-hotspots-${count}" "intelligence" "refactor_hotspot" \
411
+ "Refactor ${count} high-churn hotspot(s)" \
412
+ "Intelligence cache shows ${count} file(s) with churn score > 80" \
413
+ 40 "0.70" "intelligence:hotspots"
414
+ }
415
+
416
+ signals_collect_external() {
417
+ local collectors_dir="${_REPO_DIR:-$(git rev-parse --show-toplevel 2>/dev/null || echo '.')}/scripts/signals"
418
+ [[ ! -d "$collectors_dir" ]] && return 0
419
+
420
+ while IFS= read -r collector; do
421
+ [[ ! -x "$collector" ]] && continue
422
+ local output
423
+ output=$(bash "$collector" 2>/dev/null || true)
424
+ [[ -z "$output" ]] && continue
425
+ # Each line should be a valid JSON candidate
426
+ while IFS= read -r line; do
427
+ echo "$line" | jq empty 2>/dev/null && echo "$line"
428
+ done <<< "$output"
429
+ done < <(find "$collectors_dir" -maxdepth 1 -name "*.sh" -type f 2>/dev/null | sort)
430
+ }
431
+
432
+ # ─── Pending signal file (for patrol integration) ────────────────────────────
433
+
434
+ signals_read_pending() {
435
+ [[ ! -f "$SIGNALS_PENDING_FILE" ]] && return 0
436
+ cat "$SIGNALS_PENDING_FILE"
437
+ }
438
+
439
+ signals_clear_pending() {
440
+ [[ -f "$SIGNALS_PENDING_FILE" ]] && : > "$SIGNALS_PENDING_FILE"
441
+ }
442
+
443
+ # ─── Orchestrator ─────────────────────────────────────────────────────────────
444
+
445
+ signals_collect_all() {
446
+ _ensure_signals_dir
447
+
448
+ {
449
+ signals_collect_security
450
+ signals_collect_deps
451
+ signals_collect_coverage
452
+ signals_collect_docs
453
+ signals_collect_dead_code
454
+ signals_collect_performance
455
+ signals_collect_failures
456
+ signals_collect_dora
457
+ signals_collect_architecture
458
+ signals_collect_intelligence
459
+ signals_collect_external
460
+ signals_read_pending
461
+ } | jq -s '.' 2>/dev/null || echo '[]'
462
+ }
@@ -0,0 +1,63 @@
1
+ # fleet-failover.sh — Re-queue work from offline fleet machines
2
+ # When a machine goes offline, release its claimed issues so they can be picked up again.
3
+ # Source from daemon-poll or sw-fleet. Works standalone with gh + jq.
4
+ [[ -n "${_FLEET_FAILOVER_LOADED:-}" ]] && return 0
5
+ _FLEET_FAILOVER_LOADED=1
6
+
7
+ fleet_failover_check() {
8
+ local health_file="$HOME/.shipwright/machine-health.json"
9
+ [[ ! -f "$health_file" ]] && return 0
10
+
11
+ [[ "${NO_GITHUB:-false}" == "true" ]] && return 0
12
+ command -v gh >/dev/null 2>&1 || return 0
13
+ command -v jq >/dev/null 2>&1 || return 0
14
+
15
+ # Find offline machines (health file: .[machine_name] = {status, checked_at})
16
+ local offline_machines
17
+ offline_machines=$(jq -r 'to_entries[] | select(.value.status == "offline") | .key' "$health_file" 2>/dev/null)
18
+ [[ -z "$offline_machines" ]] && return 0
19
+
20
+ while IFS= read -r machine; do
21
+ [[ -z "$machine" ]] && continue
22
+
23
+ # Find issues claimed by this offline machine via GitHub label
24
+ local orphaned_issues
25
+ orphaned_issues=$(gh search issues \
26
+ "label:claimed:${machine}" \
27
+ is:open \
28
+ --json number,repository \
29
+ --limit 100 2>/dev/null | jq -r '.[] | "\(.repository.nameWithOwner):\(.number)"' 2>/dev/null)
30
+ [[ -z "$orphaned_issues" ]] && continue
31
+
32
+ while IFS= read -r issue_key; do
33
+ [[ -z "$issue_key" ]] && continue
34
+
35
+ local issue_num="${issue_key##*:}"
36
+ local repo="${issue_key%:*}"
37
+ [[ "$repo" == "$issue_key" ]] && repo=""
38
+
39
+ # Log and emit
40
+ if [[ "$(type -t info 2>/dev/null)" == "function" ]]; then
41
+ info "Failover: re-queuing issue #${issue_num} from offline machine ${machine}"
42
+ fi
43
+ if [[ "$(type -t emit_event 2>/dev/null)" == "function" ]]; then
44
+ emit_event "fleet.failover" "{\"issue\":\"$issue_num\",\"from_machine\":\"$machine\"}"
45
+ fi
46
+
47
+ # Release the claim (remove label) — idempotent
48
+ if [[ -n "$repo" ]]; then
49
+ gh issue edit "$issue_num" --repo "$repo" --remove-label "claimed:${machine}" 2>/dev/null || true
50
+ else
51
+ gh issue edit "$issue_num" --remove-label "claimed:${machine}" 2>/dev/null || true
52
+ fi
53
+
54
+ # When running in daemon context: enqueue so we pick it up if we watch this repo
55
+ # In org mode WATCH_MODE=org, enqueue uses owner/repo:num; in repo mode just num
56
+ if [[ -f "${STATE_FILE:-$HOME/.shipwright/daemon-state.json}" ]] && type enqueue_issue >/dev/null 2>&1; then
57
+ local queue_key="$issue_num"
58
+ [[ -n "$repo" ]] && queue_key="${repo}:${issue_num}"
59
+ enqueue_issue "$queue_key" 2>/dev/null || true
60
+ fi
61
+ done <<< "$orphaned_issues"
62
+ done <<< "$offline_machines"
63
+ }
@@ -64,7 +64,7 @@ emit_event() {
64
64
  shift
65
65
 
66
66
  # Try SQLite first (via sw-db.sh's db_add_event)
67
- if type db_add_event &>/dev/null; then
67
+ if type db_add_event >/dev/null 2>&1; then
68
68
  db_add_event "$event_type" "$@" 2>/dev/null || true
69
69
  fi
70
70
 
@@ -76,7 +76,10 @@ emit_event() {
76
76
  if [[ "$val" =~ ^-?[0-9]+\.?[0-9]*$ ]]; then
77
77
  json_fields="${json_fields},\"${key}\":${val}"
78
78
  else
79
- val="${val//\"/\\\"}"
79
+ val="${val//\\/\\\\}" # escape backslashes first
80
+ val="${val//\"/\\\"}" # then quotes
81
+ val="${val//$'\n'/\\n}" # then newlines
82
+ val="${val//$'\t'/\\t}" # then tabs
80
83
  json_fields="${json_fields},\"${key}\":\"${val}\""
81
84
  fi
82
85
  done
@@ -85,11 +88,32 @@ emit_event() {
85
88
  # Use flock to prevent concurrent write corruption
86
89
  local _lock_file="${EVENTS_FILE}.lock"
87
90
  (
88
- if command -v flock &>/dev/null; then
91
+ if command -v flock >/dev/null 2>&1; then
89
92
  flock -w 2 200 2>/dev/null || true
90
93
  fi
91
94
  echo "$_event_line" >> "$EVENTS_FILE"
92
95
  ) 200>"$_lock_file"
96
+
97
+ # Schema validation — auto-detect config repo from BASH_SOURCE location
98
+ local _schema_dir="${_CONFIG_REPO_DIR:-}"
99
+ if [[ -z "$_schema_dir" ]]; then
100
+ local _helpers_dir
101
+ _helpers_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)" || true
102
+ if [[ -n "$_helpers_dir" && -f "${_helpers_dir}/../../config/event-schema.json" ]]; then
103
+ _schema_dir="$(cd "${_helpers_dir}/../.." && pwd)"
104
+ fi
105
+ fi
106
+ if [[ -n "$_schema_dir" && -f "${_schema_dir}/config/event-schema.json" ]]; then
107
+ local known_types
108
+ known_types=$(jq -r '.event_types | keys[]' "${_schema_dir}/config/event-schema.json" 2>/dev/null || true)
109
+ if [[ -n "$known_types" ]] && ! echo "$known_types" | grep -qx "$event_type"; then
110
+ # Warn-only: never reject events, just log to stderr on first unknown type per session
111
+ if [[ -z "${_SW_SCHEMA_WARNED:-}" ]]; then
112
+ echo "WARN: Unknown event type '$event_type' — update config/event-schema.json" >&2
113
+ _SW_SCHEMA_WARNED=1
114
+ fi
115
+ fi
116
+ fi
93
117
  }
94
118
 
95
119
  # Rotate a JSONL file to keep it within max_lines.
@@ -103,9 +127,7 @@ with_retry() {
103
127
  local attempt=1
104
128
  local delay=1
105
129
  while [[ "$attempt" -le "$max_attempts" ]]; do
106
- if "$@"; then
107
- return 0
108
- fi
130
+ "$@" && return 0
109
131
  local exit_code=$?
110
132
  if [[ "$attempt" -lt "$max_attempts" ]]; then
111
133
  warn "Attempt $attempt/$max_attempts failed (exit $exit_code), retrying in ${delay}s..."
@@ -187,3 +209,4 @@ _sw_github_url() {
187
209
  repo="$(_sw_github_repo)"
188
210
  echo "https://github.com/${repo}"
189
211
  }
212
+
@@ -101,7 +101,7 @@ detect_project_lang() {
101
101
  fi
102
102
 
103
103
  # Intelligence: holistic analysis for polyglot/monorepo detection
104
- if [[ "$detected" == "unknown" ]] && type intelligence_search_memory &>/dev/null 2>&1 && command -v claude &>/dev/null; then
104
+ if [[ "$detected" == "unknown" ]] && type intelligence_search_memory >/dev/null 2>&1 && command -v claude >/dev/null 2>&1; then
105
105
  local config_files
106
106
  config_files=$(ls "$root" 2>/dev/null | grep -E '\.(json|toml|yaml|yml|xml|gradle|lock|mod)$' | head -15)
107
107
  if [[ -n "$config_files" ]]; then
@@ -221,7 +221,7 @@ detect_task_type() {
221
221
  local goal="$1"
222
222
 
223
223
  # Intelligence: Claude classification with confidence score
224
- if type intelligence_search_memory &>/dev/null 2>&1 && command -v claude &>/dev/null; then
224
+ if type intelligence_search_memory >/dev/null 2>&1 && command -v claude >/dev/null 2>&1; then
225
225
  local ai_result
226
226
  ai_result=$(claude --print --output-format text -p "Classify this task into exactly ONE category. Reply in format: CATEGORY|CONFIDENCE (0-100)
227
227