shipwright-cli 2.2.1 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. package/README.md +12 -13
  2. package/docs/AGI-PLATFORM-PLAN.md +5 -5
  3. package/docs/AGI-WHATS-NEXT.md +11 -8
  4. package/docs/README.md +2 -0
  5. package/package.json +1 -1
  6. package/scripts/check-version-consistency.sh +72 -0
  7. package/scripts/lib/daemon-adaptive.sh +610 -0
  8. package/scripts/lib/daemon-dispatch.sh +489 -0
  9. package/scripts/lib/daemon-failure.sh +387 -0
  10. package/scripts/lib/daemon-patrol.sh +1113 -0
  11. package/scripts/lib/daemon-poll.sh +1202 -0
  12. package/scripts/lib/daemon-state.sh +550 -0
  13. package/scripts/lib/daemon-triage.sh +490 -0
  14. package/scripts/lib/helpers.sh +51 -0
  15. package/scripts/lib/pipeline-intelligence.sh +0 -6
  16. package/scripts/lib/pipeline-quality-checks.sh +2 -0
  17. package/scripts/lib/pipeline-stages.sh +20 -0
  18. package/scripts/sw +26 -4
  19. package/scripts/sw-activity.sh +1 -1
  20. package/scripts/sw-adaptive.sh +2 -2
  21. package/scripts/sw-adversarial.sh +1 -1
  22. package/scripts/sw-architecture-enforcer.sh +1 -1
  23. package/scripts/sw-auth.sh +1 -1
  24. package/scripts/sw-autonomous.sh +1 -1
  25. package/scripts/sw-changelog.sh +1 -1
  26. package/scripts/sw-checkpoint.sh +1 -1
  27. package/scripts/sw-ci.sh +1 -1
  28. package/scripts/sw-cleanup.sh +1 -1
  29. package/scripts/sw-code-review.sh +1 -1
  30. package/scripts/sw-connect.sh +1 -1
  31. package/scripts/sw-context.sh +1 -1
  32. package/scripts/sw-cost.sh +1 -1
  33. package/scripts/sw-daemon.sh +52 -4816
  34. package/scripts/sw-dashboard.sh +1 -1
  35. package/scripts/sw-db.sh +1 -1
  36. package/scripts/sw-decompose.sh +1 -1
  37. package/scripts/sw-deps.sh +1 -1
  38. package/scripts/sw-developer-simulation.sh +1 -1
  39. package/scripts/sw-discovery.sh +1 -1
  40. package/scripts/sw-doc-fleet.sh +1 -1
  41. package/scripts/sw-docs-agent.sh +1 -1
  42. package/scripts/sw-docs.sh +1 -1
  43. package/scripts/sw-doctor.sh +42 -1
  44. package/scripts/sw-dora.sh +1 -1
  45. package/scripts/sw-durable.sh +1 -1
  46. package/scripts/sw-e2e-orchestrator.sh +1 -1
  47. package/scripts/sw-eventbus.sh +1 -1
  48. package/scripts/sw-feedback.sh +1 -1
  49. package/scripts/sw-fix.sh +1 -1
  50. package/scripts/sw-fleet-discover.sh +1 -1
  51. package/scripts/sw-fleet-viz.sh +3 -3
  52. package/scripts/sw-fleet.sh +1 -1
  53. package/scripts/sw-github-app.sh +1 -1
  54. package/scripts/sw-github-checks.sh +1 -1
  55. package/scripts/sw-github-deploy.sh +1 -1
  56. package/scripts/sw-github-graphql.sh +1 -1
  57. package/scripts/sw-guild.sh +1 -1
  58. package/scripts/sw-heartbeat.sh +1 -1
  59. package/scripts/sw-hygiene.sh +1 -1
  60. package/scripts/sw-incident.sh +1 -1
  61. package/scripts/sw-init.sh +1 -1
  62. package/scripts/sw-instrument.sh +1 -1
  63. package/scripts/sw-intelligence.sh +1 -1
  64. package/scripts/sw-jira.sh +1 -1
  65. package/scripts/sw-launchd.sh +1 -1
  66. package/scripts/sw-linear.sh +1 -1
  67. package/scripts/sw-logs.sh +1 -1
  68. package/scripts/sw-loop.sh +1 -1
  69. package/scripts/sw-memory.sh +1 -1
  70. package/scripts/sw-mission-control.sh +1 -1
  71. package/scripts/sw-model-router.sh +1 -1
  72. package/scripts/sw-otel.sh +4 -4
  73. package/scripts/sw-oversight.sh +1 -1
  74. package/scripts/sw-pipeline-composer.sh +1 -1
  75. package/scripts/sw-pipeline-vitals.sh +1 -1
  76. package/scripts/sw-pipeline.sh +16 -56
  77. package/scripts/sw-pipeline.sh.mock +7 -0
  78. package/scripts/sw-pm.sh +1 -1
  79. package/scripts/sw-pr-lifecycle.sh +1 -1
  80. package/scripts/sw-predictive.sh +1 -1
  81. package/scripts/sw-prep.sh +1 -1
  82. package/scripts/sw-ps.sh +1 -1
  83. package/scripts/sw-public-dashboard.sh +1 -1
  84. package/scripts/sw-quality.sh +1 -1
  85. package/scripts/sw-reaper.sh +1 -1
  86. package/scripts/sw-recruit.sh +9 -1
  87. package/scripts/sw-regression.sh +1 -1
  88. package/scripts/sw-release-manager.sh +1 -1
  89. package/scripts/sw-release.sh +1 -1
  90. package/scripts/sw-remote.sh +1 -1
  91. package/scripts/sw-replay.sh +1 -1
  92. package/scripts/sw-retro.sh +1 -1
  93. package/scripts/sw-scale.sh +8 -5
  94. package/scripts/sw-security-audit.sh +1 -1
  95. package/scripts/sw-self-optimize.sh +158 -7
  96. package/scripts/sw-session.sh +1 -1
  97. package/scripts/sw-setup.sh +1 -1
  98. package/scripts/sw-standup.sh +3 -3
  99. package/scripts/sw-status.sh +1 -1
  100. package/scripts/sw-strategic.sh +1 -1
  101. package/scripts/sw-stream.sh +8 -2
  102. package/scripts/sw-swarm.sh +7 -10
  103. package/scripts/sw-team-stages.sh +1 -1
  104. package/scripts/sw-templates.sh +1 -1
  105. package/scripts/sw-testgen.sh +1 -1
  106. package/scripts/sw-tmux-pipeline.sh +1 -1
  107. package/scripts/sw-tmux.sh +1 -1
  108. package/scripts/sw-trace.sh +1 -1
  109. package/scripts/sw-tracker.sh +24 -6
  110. package/scripts/sw-triage.sh +1 -1
  111. package/scripts/sw-upgrade.sh +1 -1
  112. package/scripts/sw-ux.sh +1 -1
  113. package/scripts/sw-webhook.sh +1 -1
  114. package/scripts/sw-widgets.sh +1 -1
  115. package/scripts/sw-worktree.sh +1 -1
@@ -0,0 +1,490 @@
1
+ # daemon-triage.sh — Triage scoring and template selection (for sw-daemon.sh)
2
+ # Source from sw-daemon.sh. Requires state, helpers.
3
+ [[ -n "${_DAEMON_TRIAGE_LOADED:-}" ]] && return 0
4
+ _DAEMON_TRIAGE_LOADED=1
5
+
6
+ # Extract dependency issue numbers from issue text
7
+ extract_issue_dependencies() {
8
+ local text="$1"
9
+
10
+ echo "$text" | grep -oE '(depends on|blocked by|after) #[0-9]+' | grep -oE '#[0-9]+' | sort -u || true
11
+ }
12
+
13
+ # Score an issue from 0-100 based on multiple signals for intelligent prioritization.
14
+ # Combines priority labels, age, complexity, dependencies, type, and memory signals.
15
+ # When intelligence engine is enabled, uses semantic AI analysis for richer scoring.
16
+ triage_score_issue() {
17
+ local issue_json="$1"
18
+ local issue_num issue_title issue_body labels_csv created_at
19
+ issue_num=$(echo "$issue_json" | jq -r '.number')
20
+ issue_title=$(echo "$issue_json" | jq -r '.title // ""')
21
+ issue_body=$(echo "$issue_json" | jq -r '.body // ""')
22
+
23
+ # ── Intelligence-powered triage (if enabled) ──
24
+ if [[ "${INTELLIGENCE_ENABLED:-false}" == "true" ]] && type intelligence_analyze_issue &>/dev/null 2>&1; then
25
+ daemon_log INFO "Intelligence: using AI triage (intelligence enabled)" >&2
26
+ local analysis
27
+ analysis=$(intelligence_analyze_issue "$issue_json" 2>/dev/null || echo "")
28
+ if [[ -n "$analysis" && "$analysis" != "{}" && "$analysis" != "null" ]]; then
29
+ # Extract complexity (1-10) and convert to score (0-100)
30
+ local ai_complexity ai_risk ai_success_prob
31
+ ai_complexity=$(echo "$analysis" | jq -r '.complexity // 0' 2>/dev/null || echo "0")
32
+ ai_risk=$(echo "$analysis" | jq -r '.risk_level // "medium"' 2>/dev/null || echo "medium")
33
+ ai_success_prob=$(echo "$analysis" | jq -r '.success_probability // 50' 2>/dev/null || echo "50")
34
+
35
+ # Store analysis for downstream use (composer, predictions)
36
+ export INTELLIGENCE_ANALYSIS="$analysis"
37
+ export INTELLIGENCE_COMPLEXITY="$ai_complexity"
38
+
39
+ # Convert AI analysis to triage score:
40
+ # Higher success probability + lower complexity = higher score (process sooner)
41
+ local ai_score
42
+ ai_score=$(( ai_success_prob - (ai_complexity * 3) ))
43
+ # Risk adjustment
44
+ case "$ai_risk" in
45
+ critical) ai_score=$((ai_score + 15)) ;; # Critical = process urgently
46
+ high) ai_score=$((ai_score + 10)) ;;
47
+ low) ai_score=$((ai_score - 5)) ;;
48
+ esac
49
+ # Clamp
50
+ [[ "$ai_score" -lt 0 ]] && ai_score=0
51
+ [[ "$ai_score" -gt 100 ]] && ai_score=100
52
+
53
+ emit_event "intelligence.triage" \
54
+ "issue=$issue_num" \
55
+ "complexity=$ai_complexity" \
56
+ "risk=$ai_risk" \
57
+ "success_prob=$ai_success_prob" \
58
+ "score=$ai_score"
59
+
60
+ echo "$ai_score"
61
+ return
62
+ fi
63
+ # Fall through to heuristic scoring if intelligence call failed
64
+ daemon_log INFO "Intelligence: AI triage failed, falling back to heuristic scoring" >&2
65
+ else
66
+ daemon_log INFO "Intelligence: using heuristic triage (intelligence disabled, enable with intelligence.enabled=true)" >&2
67
+ fi
68
+ labels_csv=$(echo "$issue_json" | jq -r '[.labels[].name] | join(",")')
69
+ created_at=$(echo "$issue_json" | jq -r '.createdAt // ""')
70
+
71
+ local score=0
72
+
73
+ # ── 1. Priority labels (0-30 points) ──
74
+ local priority_score=0
75
+ if echo "$labels_csv" | grep -qiE "urgent|p0"; then
76
+ priority_score=30
77
+ elif echo "$labels_csv" | grep -qiE "^high$|^high,|,high,|,high$|p1"; then
78
+ priority_score=20
79
+ elif echo "$labels_csv" | grep -qiE "normal|p2"; then
80
+ priority_score=10
81
+ elif echo "$labels_csv" | grep -qiE "^low$|^low,|,low,|,low$|p3"; then
82
+ priority_score=5
83
+ fi
84
+
85
+ # ── 2. Issue age (0-15 points) — older issues boosted to prevent starvation ──
86
+ local age_score=0
87
+ if [[ -n "$created_at" ]]; then
88
+ local created_epoch now_e age_secs
89
+ created_epoch=$(TZ=UTC date -j -f "%Y-%m-%dT%H:%M:%SZ" "$created_at" +%s 2>/dev/null || \
90
+ date -d "$created_at" +%s 2>/dev/null || echo "0")
91
+ now_e=$(now_epoch)
92
+ if [[ "$created_epoch" -gt 0 ]]; then
93
+ age_secs=$((now_e - created_epoch))
94
+ if [[ "$age_secs" -gt 604800 ]]; then # > 7 days
95
+ age_score=15
96
+ elif [[ "$age_secs" -gt 259200 ]]; then # > 3 days
97
+ age_score=10
98
+ elif [[ "$age_secs" -gt 86400 ]]; then # > 1 day
99
+ age_score=5
100
+ fi
101
+ fi
102
+ fi
103
+
104
+ # ── 3. Complexity estimate (0-20 points, INVERTED — simpler = higher) ──
105
+ local complexity_score=0
106
+ local body_len=${#issue_body}
107
+ local file_refs
108
+ file_refs=$(echo "$issue_body" | grep -coE '[a-zA-Z0-9_/-]+\.(ts|js|py|go|rs|sh|json|yaml|yml|md)' || true)
109
+ file_refs=${file_refs:-0}
110
+
111
+ if [[ "$body_len" -lt 200 ]] && [[ "$file_refs" -lt 3 ]]; then
112
+ complexity_score=20 # Short + few files = likely simple
113
+ elif [[ "$body_len" -lt 1000 ]]; then
114
+ complexity_score=10 # Medium
115
+ elif [[ "$file_refs" -lt 5 ]]; then
116
+ complexity_score=5 # Long but not many files
117
+ fi
118
+ # Long + many files = complex = 0 points (lower throughput)
119
+
120
+ # ── 4. Dependencies (0-15 points / -15 for blocked) ──
121
+ local dep_score=0
122
+ local combined_text="${issue_title} ${issue_body}"
123
+
124
+ # Check if this issue is blocked
125
+ local blocked_refs
126
+ blocked_refs=$(echo "$combined_text" | grep -oE '(blocked by|depends on) #[0-9]+' | grep -oE '#[0-9]+' || true)
127
+ if [[ -n "$blocked_refs" ]] && [[ "$NO_GITHUB" != "true" ]]; then
128
+ local all_closed=true
129
+ while IFS= read -r ref; do
130
+ local ref_num="${ref#\#}"
131
+ local ref_state
132
+ ref_state=$(gh issue view "$ref_num" --json state -q '.state' 2>/dev/null || echo "UNKNOWN")
133
+ if [[ "$ref_state" != "CLOSED" ]]; then
134
+ all_closed=false
135
+ break
136
+ fi
137
+ done <<< "$blocked_refs"
138
+ if [[ "$all_closed" == "false" ]]; then
139
+ dep_score=-15
140
+ fi
141
+ fi
142
+
143
+ # Check if this issue blocks others (search issue references)
144
+ if [[ "$NO_GITHUB" != "true" ]]; then
145
+ local mentions
146
+ mentions=$(gh api "repos/{owner}/{repo}/issues/${issue_num}/timeline" --paginate -q '
147
+ [.[] | select(.event == "cross-referenced") | .source.issue.body // ""] |
148
+ map(select(test("blocked by #'"${issue_num}"'|depends on #'"${issue_num}"'"; "i"))) | length
149
+ ' 2>/dev/null || echo "0")
150
+ mentions=${mentions:-0}
151
+ if [[ "$mentions" -gt 0 ]]; then
152
+ dep_score=15
153
+ fi
154
+ fi
155
+
156
+ # ── 5. Type bonus (0-10 points) ──
157
+ local type_score=0
158
+ if echo "$labels_csv" | grep -qiE "security"; then
159
+ type_score=10
160
+ elif echo "$labels_csv" | grep -qiE "bug"; then
161
+ type_score=10
162
+ elif echo "$labels_csv" | grep -qiE "feature|enhancement"; then
163
+ type_score=5
164
+ fi
165
+
166
+ # ── 6. Memory bonus (0-10 points / -5 for prior failures) ──
167
+ local memory_score=0
168
+ if [[ -x "$SCRIPT_DIR/sw-memory.sh" ]]; then
169
+ local memory_result
170
+ memory_result=$("$SCRIPT_DIR/sw-memory.sh" search --issue "$issue_num" --json 2>/dev/null || true)
171
+ if [[ -n "$memory_result" ]]; then
172
+ local prior_result
173
+ prior_result=$(echo "$memory_result" | jq -r '.last_result // ""' 2>/dev/null || true)
174
+ if [[ "$prior_result" == "success" ]]; then
175
+ memory_score=10
176
+ elif [[ "$prior_result" == "failure" ]]; then
177
+ memory_score=-5
178
+ fi
179
+ fi
180
+ fi
181
+
182
+ # ── Total ──
183
+ score=$((priority_score + age_score + complexity_score + dep_score + type_score + memory_score))
184
+ # Clamp to 0-100
185
+ [[ "$score" -lt 0 ]] && score=0
186
+ [[ "$score" -gt 100 ]] && score=100
187
+
188
+ emit_event "daemon.triage" \
189
+ "issue=$issue_num" \
190
+ "score=$score" \
191
+ "priority=$priority_score" \
192
+ "age=$age_score" \
193
+ "complexity=$complexity_score" \
194
+ "dependency=$dep_score" \
195
+ "type=$type_score" \
196
+ "memory=$memory_score"
197
+
198
+ echo "$score"
199
+ }
200
+
201
+ # Auto-select pipeline template based on issue labels
202
+ # When intelligence/composer is enabled, composes a custom pipeline instead of static selection.
203
+ select_pipeline_template() {
204
+ local labels="$1"
205
+ local score="${2:-50}"
206
+ local _selected_template=""
207
+
208
+ # When auto_template is disabled, use default pipeline template
209
+ if [[ "${AUTO_TEMPLATE:-false}" != "true" ]]; then
210
+ echo "$PIPELINE_TEMPLATE"
211
+ return
212
+ fi
213
+
214
+ # ── Intelligence-composed pipeline (if enabled) ──
215
+ if [[ "${COMPOSER_ENABLED:-false}" == "true" ]] && type composer_create_pipeline &>/dev/null 2>&1; then
216
+ daemon_log INFO "Intelligence: using AI pipeline composition (composer enabled)" >&2
217
+ local analysis="${INTELLIGENCE_ANALYSIS:-{}}"
218
+ local repo_context=""
219
+ if [[ -f "${REPO_DIR:-}/.claude/pipeline-state.md" ]]; then
220
+ repo_context="has_pipeline_state"
221
+ fi
222
+ local budget_json="{}"
223
+ if [[ -x "$SCRIPT_DIR/sw-cost.sh" ]]; then
224
+ local remaining
225
+ remaining=$(bash "$SCRIPT_DIR/sw-cost.sh" remaining-budget 2>/dev/null || echo "")
226
+ if [[ -n "$remaining" ]]; then
227
+ budget_json="{\"remaining_usd\": $remaining}"
228
+ fi
229
+ fi
230
+ local composed_path
231
+ composed_path=$(composer_create_pipeline "$analysis" "$repo_context" "$budget_json" 2>/dev/null || echo "")
232
+ if [[ -n "$composed_path" && -f "$composed_path" ]]; then
233
+ emit_event "daemon.composed_pipeline" "labels=$labels" "score=$score"
234
+ echo "composed"
235
+ return
236
+ fi
237
+ # Fall through to static selection if composition failed
238
+ daemon_log INFO "Intelligence: AI pipeline composition failed, falling back to static template selection" >&2
239
+ else
240
+ daemon_log INFO "Intelligence: using static template selection (composer disabled, enable with intelligence.composer_enabled=true)" >&2
241
+ fi
242
+
243
+ # ── DORA-driven template escalation ──
244
+ if [[ -f "${EVENTS_FILE:-$HOME/.shipwright/events.jsonl}" ]]; then
245
+ local _dora_events _dora_total _dora_failures _dora_cfr
246
+ _dora_events=$(tail -500 "${EVENTS_FILE:-$HOME/.shipwright/events.jsonl}" \
247
+ | grep '"type":"pipeline.completed"' 2>/dev/null \
248
+ | tail -5 || true)
249
+ _dora_total=$(echo "$_dora_events" | grep -c '.' 2>/dev/null || echo "0")
250
+ _dora_total="${_dora_total:-0}"
251
+ if [[ "$_dora_total" -ge 3 ]]; then
252
+ _dora_failures=$(echo "$_dora_events" | grep -c '"result":"failure"' 2>/dev/null || true)
253
+ _dora_failures="${_dora_failures:-0}"
254
+ _dora_cfr=$(( _dora_failures * 100 / _dora_total ))
255
+ if [[ "$_dora_cfr" -gt 40 ]]; then
256
+ daemon_log INFO "DORA escalation: CFR ${_dora_cfr}% > 40% — forcing enterprise template" >&2
257
+ emit_event "daemon.dora_escalation" \
258
+ "cfr=$_dora_cfr" \
259
+ "total=$_dora_total" \
260
+ "failures=$_dora_failures" \
261
+ "template=enterprise"
262
+ echo "enterprise"
263
+ return
264
+ fi
265
+ if [[ "$_dora_cfr" -lt 10 && "$score" -ge 60 ]]; then
266
+ daemon_log INFO "DORA: CFR ${_dora_cfr}% < 10% — fast template eligible" >&2
267
+ # Fall through to allow other factors to also vote for fast
268
+ fi
269
+
270
+ # ── DORA multi-factor ──
271
+ # Cycle time: if median > 120min, prefer faster templates
272
+ local _dora_cycle_time=0
273
+ _dora_cycle_time=$(echo "$_dora_events" | jq -r 'select(.duration_s) | .duration_s' 2>/dev/null \
274
+ | sort -n | awk '{ a[NR]=$1 } END { if (NR>0) print int(a[int(NR/2)+1]/60); else print 0 }' 2>/dev/null) || _dora_cycle_time=0
275
+ _dora_cycle_time="${_dora_cycle_time:-0}"
276
+ if [[ "${_dora_cycle_time:-0}" -gt 120 ]]; then
277
+ daemon_log INFO "DORA: cycle time ${_dora_cycle_time}min > 120 — preferring fast template" >&2
278
+ if [[ "${score:-0}" -ge 60 ]]; then
279
+ echo "fast"
280
+ return
281
+ fi
282
+ fi
283
+
284
+ # Deploy frequency: if < 1/week, use cost-aware
285
+ local _dora_deploy_freq=0
286
+ local _dora_first_epoch _dora_last_epoch _dora_span_days
287
+ _dora_first_epoch=$(echo "$_dora_events" | head -1 | jq -r '.timestamp // empty' 2>/dev/null | xargs -I{} date -j -f "%Y-%m-%dT%H:%M:%SZ" {} +%s 2>/dev/null || echo "0")
288
+ _dora_last_epoch=$(echo "$_dora_events" | tail -1 | jq -r '.timestamp // empty' 2>/dev/null | xargs -I{} date -j -f "%Y-%m-%dT%H:%M:%SZ" {} +%s 2>/dev/null || echo "0")
289
+ if [[ "${_dora_first_epoch:-0}" -gt 0 && "${_dora_last_epoch:-0}" -gt 0 ]]; then
290
+ _dora_span_days=$(( (_dora_last_epoch - _dora_first_epoch) / 86400 ))
291
+ if [[ "${_dora_span_days:-0}" -gt 0 ]]; then
292
+ _dora_deploy_freq=$(awk -v t="$_dora_total" -v d="$_dora_span_days" 'BEGIN { printf "%.1f", t * 7 / d }' 2>/dev/null) || _dora_deploy_freq=0
293
+ fi
294
+ fi
295
+ if [[ -n "${_dora_deploy_freq:-}" ]] && awk -v f="${_dora_deploy_freq:-0}" 'BEGIN{exit !(f > 0 && f < 1)}' 2>/dev/null; then
296
+ daemon_log INFO "DORA: deploy freq ${_dora_deploy_freq}/week — using cost-aware" >&2
297
+ echo "cost-aware"
298
+ return
299
+ fi
300
+ fi
301
+ fi
302
+
303
+ # ── Branch protection escalation (highest priority) ──
304
+ if type gh_branch_protection &>/dev/null 2>&1 && [[ "${NO_GITHUB:-false}" != "true" ]]; then
305
+ if type _gh_detect_repo &>/dev/null 2>&1; then
306
+ _gh_detect_repo 2>/dev/null || true
307
+ fi
308
+ local gh_owner="${GH_OWNER:-}" gh_repo="${GH_REPO:-}"
309
+ if [[ -n "$gh_owner" && -n "$gh_repo" ]]; then
310
+ local protection
311
+ protection=$(gh_branch_protection "$gh_owner" "$gh_repo" "${BASE_BRANCH:-main}" 2>/dev/null || echo '{"protected": false}')
312
+ local strict_protection
313
+ strict_protection=$(echo "$protection" | jq -r '.enforce_admins.enabled // false' 2>/dev/null || echo "false")
314
+ local required_reviews
315
+ required_reviews=$(echo "$protection" | jq -r '.required_pull_request_reviews.required_approving_review_count // 0' 2>/dev/null || echo "0")
316
+ if [[ "$strict_protection" == "true" ]] || [[ "${required_reviews:-0}" -gt 1 ]]; then
317
+ daemon_log INFO "Branch has strict protection — escalating to enterprise template" >&2
318
+ echo "enterprise"
319
+ return
320
+ fi
321
+ fi
322
+ fi
323
+
324
+ # ── Label-based overrides ──
325
+ if echo "$labels" | grep -qi "hotfix\|incident"; then
326
+ echo "hotfix"
327
+ return
328
+ fi
329
+ if echo "$labels" | grep -qi "security"; then
330
+ echo "enterprise"
331
+ return
332
+ fi
333
+
334
+ # ── Config-driven template_map overrides ──
335
+ local map="${TEMPLATE_MAP:-\"{}\"}"
336
+ # Unwrap double-encoded JSON if needed
337
+ local decoded_map
338
+ decoded_map=$(echo "$map" | jq -r 'if type == "string" then . else tostring end' 2>/dev/null || echo "{}")
339
+ if [[ "$decoded_map" != "{}" ]]; then
340
+ local matched
341
+ matched=$(echo "$decoded_map" | jq -r --arg labels "$labels" '
342
+ to_entries[] |
343
+ select($labels | test(.key; "i")) |
344
+ .value' 2>/dev/null | head -1)
345
+ if [[ -n "$matched" ]]; then
346
+ echo "$matched"
347
+ return
348
+ fi
349
+ fi
350
+
351
+ # ── Quality memory-driven selection ──
352
+ local quality_scores_file="${HOME}/.shipwright/optimization/quality-scores.jsonl"
353
+ if [[ -f "$quality_scores_file" ]]; then
354
+ local repo_hash
355
+ repo_hash=$(cd "${REPO_DIR:-.}" && git rev-parse --show-toplevel 2>/dev/null | shasum -a 256 | cut -c1-16 || echo "unknown")
356
+ # Get last 5 quality scores for this repo
357
+ local recent_scores avg_quality has_critical
358
+ recent_scores=$(grep "\"repo\":\"$repo_hash\"" "$quality_scores_file" 2>/dev/null | tail -5 || true)
359
+ if [[ -n "$recent_scores" ]]; then
360
+ avg_quality=$(echo "$recent_scores" | jq -r '.quality_score // 70' 2>/dev/null | awk '{ sum += $1; count++ } END { if (count > 0) printf "%.0f", sum/count; else print 70 }')
361
+ has_critical=$(echo "$recent_scores" | jq -r '.findings.critical // 0' 2>/dev/null | awk '{ sum += $1 } END { print (sum > 0) ? "yes" : "no" }')
362
+
363
+ # Critical findings in recent history → force enterprise
364
+ if [[ "$has_critical" == "yes" ]]; then
365
+ daemon_log INFO "Quality memory: critical findings in recent runs — using enterprise template" >&2
366
+ echo "enterprise"
367
+ return
368
+ fi
369
+
370
+ # Poor quality history → use full template
371
+ if [[ "${avg_quality:-70}" -lt 60 ]]; then
372
+ daemon_log INFO "Quality memory: avg score ${avg_quality}/100 in recent runs — using full template" >&2
373
+ echo "full"
374
+ return
375
+ fi
376
+
377
+ # Excellent quality history → allow faster template
378
+ if [[ "${avg_quality:-70}" -gt 80 ]]; then
379
+ daemon_log INFO "Quality memory: avg score ${avg_quality}/100 in recent runs — eligible for fast template" >&2
380
+ # Only upgrade if score also suggests fast
381
+ if [[ "$score" -ge 60 ]]; then
382
+ echo "fast"
383
+ return
384
+ fi
385
+ fi
386
+ fi
387
+ fi
388
+
389
+ # ── Learned template weights ──
390
+ local _tw_file="${HOME}/.shipwright/optimization/template-weights.json"
391
+ if [[ -f "$_tw_file" ]]; then
392
+ local _best_template _best_rate
393
+ _best_template=$(jq -r '
394
+ .weights // {} | to_entries
395
+ | map(select(.value.sample_size >= 3))
396
+ | sort_by(-.value.success_rate)
397
+ | .[0].key // ""
398
+ ' "$_tw_file" 2>/dev/null) || true
399
+ if [[ -n "${_best_template:-}" && "${_best_template:-}" != "null" && "${_best_template:-}" != "" ]]; then
400
+ _best_rate=$(jq -r --arg t "$_best_template" '.weights[$t].success_rate // 0' "$_tw_file" 2>/dev/null || _best_rate=0)
401
+ daemon_log INFO "Template weights: ${_best_template} (${_best_rate} success rate)" >&2
402
+ echo "$_best_template"
403
+ return
404
+ fi
405
+ fi
406
+
407
+ # ── Score-based selection ──
408
+ if [[ "$score" -ge 70 ]]; then
409
+ echo "fast"
410
+ elif [[ "$score" -ge 40 ]]; then
411
+ echo "standard"
412
+ else
413
+ echo "full"
414
+ fi
415
+ }
416
+
417
+ # ─── Triage Display ──────────────────────────────────────────────────────────
418
+
419
+ daemon_triage_show() {
420
+ if [[ "$NO_GITHUB" == "true" ]]; then
421
+ error "Triage requires GitHub access (--no-github is set)"
422
+ exit 1
423
+ fi
424
+
425
+ load_config
426
+
427
+ echo -e "${PURPLE}${BOLD}━━━ Issue Triage Scores ━━━${RESET}"
428
+ echo ""
429
+
430
+ local issues_json
431
+ issues_json=$(gh issue list \
432
+ --label "$WATCH_LABEL" \
433
+ --state open \
434
+ --json number,title,labels,body,createdAt \
435
+ --limit 50 2>/dev/null) || {
436
+ error "Failed to fetch issues from GitHub"
437
+ exit 1
438
+ }
439
+
440
+ local issue_count
441
+ issue_count=$(echo "$issues_json" | jq 'length' 2>/dev/null || echo 0)
442
+
443
+ if [[ "$issue_count" -eq 0 ]]; then
444
+ echo -e " ${DIM}No open issues with label '${WATCH_LABEL}'${RESET}"
445
+ return 0
446
+ fi
447
+
448
+ # Score each issue and collect results
449
+ local scored_lines=()
450
+ while IFS= read -r issue; do
451
+ local num title labels_csv score template
452
+ num=$(echo "$issue" | jq -r '.number')
453
+ title=$(echo "$issue" | jq -r '.title // "—"')
454
+ labels_csv=$(echo "$issue" | jq -r '[.labels[].name] | join(", ")')
455
+ score=$(triage_score_issue "$issue" 2>/dev/null | tail -1)
456
+ score=$(printf '%s' "$score" | tr -cd '[:digit:]')
457
+ [[ -z "$score" ]] && score=50
458
+ template=$(select_pipeline_template "$labels_csv" "$score" 2>/dev/null | tail -1)
459
+ template=$(printf '%s' "$template" | sed $'s/\x1b\\[[0-9;]*m//g' | tr -cd '[:alnum:]-_')
460
+ [[ -z "$template" ]] && template="$PIPELINE_TEMPLATE"
461
+
462
+ scored_lines+=("${score}|${num}|${title}|${labels_csv}|${template}")
463
+ done < <(echo "$issues_json" | jq -c '.[]')
464
+
465
+ # Sort by score descending
466
+ local sorted
467
+ sorted=$(printf '%s\n' "${scored_lines[@]}" | sort -t'|' -k1 -rn)
468
+
469
+ # Print header
470
+ printf " ${BOLD}%-6s %-7s %-45s %-12s %s${RESET}\n" "Score" "Issue" "Title" "Template" "Labels"
471
+ echo -e " ${DIM}$(printf '%.0s─' {1..90})${RESET}"
472
+
473
+ while IFS='|' read -r score num title labels_csv template; do
474
+ # Color score by tier
475
+ local score_color="$RED"
476
+ [[ "$score" -ge 20 ]] && score_color="$YELLOW"
477
+ [[ "$score" -ge 40 ]] && score_color="$CYAN"
478
+ [[ "$score" -ge 60 ]] && score_color="$GREEN"
479
+
480
+ # Truncate title
481
+ [[ ${#title} -gt 42 ]] && title="${title:0:39}..."
482
+
483
+ printf " ${score_color}%-6s${RESET} ${CYAN}#%-6s${RESET} %-45s ${DIM}%-12s %s${RESET}\n" \
484
+ "$score" "$num" "$title" "$template" "$labels_csv"
485
+ done <<< "$sorted"
486
+
487
+ echo ""
488
+ echo -e " ${DIM}${issue_count} issue(s) scored | Higher score = higher processing priority${RESET}"
489
+ echo ""
490
+ }
@@ -94,6 +94,57 @@ emit_event() {
94
94
 
95
95
  # Rotate a JSONL file to keep it within max_lines.
96
96
  # Usage: rotate_jsonl <file> <max_lines>
97
+ # ─── Retry Helper ─────────────────────────────────────────────────
98
+ # Retries a command with exponential backoff for transient failures.
99
+ # Usage: with_retry <max_attempts> <command> [args...]
100
+ with_retry() {
101
+ local max_attempts="${1:-3}"
102
+ shift
103
+ local attempt=1
104
+ local delay=1
105
+ while [[ "$attempt" -le "$max_attempts" ]]; do
106
+ if "$@"; then
107
+ return 0
108
+ fi
109
+ local exit_code=$?
110
+ if [[ "$attempt" -lt "$max_attempts" ]]; then
111
+ warn "Attempt $attempt/$max_attempts failed (exit $exit_code), retrying in ${delay}s..."
112
+ sleep "$delay"
113
+ delay=$((delay * 2))
114
+ [[ "$delay" -gt 30 ]] && delay=30
115
+ fi
116
+ attempt=$((attempt + 1))
117
+ done
118
+ error "All $max_attempts attempts failed"
119
+ return 1
120
+ }
121
+
122
+ # ─── JSON Validation + Recovery ───────────────────────────────────
123
+ # Validates a JSON file and recovers from backup if corrupt.
124
+ # Usage: validate_json <file> [backup_suffix]
125
+ validate_json() {
126
+ local file="$1"
127
+ local backup_suffix="${2:-.bak}"
128
+ [[ ! -f "$file" ]] && return 0
129
+
130
+ if jq '.' "$file" >/dev/null 2>&1; then
131
+ # Valid — create backup
132
+ cp "$file" "${file}${backup_suffix}" 2>/dev/null || true
133
+ return 0
134
+ fi
135
+
136
+ # Corrupt — try to recover from backup
137
+ warn "Corrupt JSON detected: $file"
138
+ if [[ -f "${file}${backup_suffix}" ]] && jq '.' "${file}${backup_suffix}" >/dev/null 2>&1; then
139
+ cp "${file}${backup_suffix}" "$file"
140
+ warn "Recovered from backup: ${file}${backup_suffix}"
141
+ return 0
142
+ fi
143
+
144
+ error "No valid backup for $file — manual intervention needed"
145
+ return 1
146
+ }
147
+
97
148
  rotate_jsonl() {
98
149
  local file="$1"
99
150
  local max_lines="${2:-10000}"
@@ -1090,12 +1090,6 @@ ${route_instruction}"
1090
1090
  fi
1091
1091
  }
1092
1092
 
1093
- # ──────────────────────────────────────────────────────────────────────────────
1094
- # Bash 3.2 Compatibility Check
1095
-
1096
- echo "$untested_functions"
1097
- }
1098
-
1099
1093
  stage_compound_quality() {
1100
1094
  CURRENT_STAGE_ID="compound_quality"
1101
1095
 
@@ -1050,3 +1050,5 @@ run_new_function_test_check() {
1050
1050
  warn "New functions without tests: ${details}"
1051
1051
  fi
1052
1052
 
1053
+ echo "$untested_functions"
1054
+ }
@@ -3,6 +3,26 @@
3
3
  [[ -n "${_PIPELINE_STAGES_LOADED:-}" ]] && return 0
4
4
  _PIPELINE_STAGES_LOADED=1
5
5
 
6
+ show_stage_preview() {
7
+ local stage_id="$1"
8
+ echo ""
9
+ echo -e "${PURPLE}${BOLD}━━━ Stage: ${stage_id} ━━━${RESET}"
10
+ case "$stage_id" in
11
+ intake) echo -e " Fetch issue, detect task type, create branch, self-assign" ;;
12
+ plan) echo -e " Generate plan via Claude, post task checklist to issue" ;;
13
+ design) echo -e " Generate Architecture Decision Record (ADR), evaluate alternatives" ;;
14
+ build) echo -e " Delegate to ${CYAN}shipwright loop${RESET} for autonomous building" ;;
15
+ test) echo -e " Run test suite and check coverage" ;;
16
+ review) echo -e " AI code review on the diff, post findings" ;;
17
+ pr) echo -e " Create GitHub PR with labels, reviewers, milestone" ;;
18
+ merge) echo -e " Wait for CI checks, merge PR, optionally delete branch" ;;
19
+ deploy) echo -e " Deploy to staging/production with rollback" ;;
20
+ validate) echo -e " Smoke tests, health checks, close issue" ;;
21
+ monitor) echo -e " Post-deploy monitoring, health checks, auto-rollback" ;;
22
+ esac
23
+ echo ""
24
+ }
25
+
6
26
  stage_intake() {
7
27
  CURRENT_STAGE_ID="intake"
8
28
  local project_lang
package/scripts/sw CHANGED
@@ -5,7 +5,7 @@
5
5
  # ╚═══════════════════════════════════════════════════════════════════════════╝
6
6
  set -euo pipefail
7
7
 
8
- VERSION="2.2.1"
8
+ VERSION="2.2.2"
9
9
 
10
10
  # Resolve symlinks (required for npm global install where bin/ symlinks to node_modules/)
11
11
  SOURCE="${BASH_SOURCE[0]}"
@@ -173,7 +173,9 @@ show_help() {
173
173
  echo -e " ${CYAN}init${RESET} ${BOLD}Quick tmux setup${RESET} — one command, no prompts"
174
174
  echo -e " ${CYAN}setup${RESET} ${BOLD}Guided setup${RESET} — prerequisites, init, doctor, quick start"
175
175
  echo -e " ${CYAN}help${RESET} Show this help message"
176
- echo -e " ${CYAN}version${RESET} Show version"
176
+ echo -e " ${CYAN}version${RESET} [show] Show version"
177
+ echo -e " ${CYAN}version bump${RESET} <x.y.z> Bump version everywhere (scripts, README, package.json)"
178
+ echo -e " ${CYAN}version check${RESET} Verify version consistency (CI / before release)"
177
179
  echo -e " ${CYAN}hello${RESET} Say hello world"
178
180
  echo ""
179
181
  echo -e "${BOLD}CONTINUOUS LOOP${RESET} ${DIM}(autonomous agent operation)${RESET}"
@@ -303,7 +305,8 @@ route_release() {
303
305
  release-manager) exec "$SCRIPT_DIR/sw-release-manager.sh" "$@" ;;
304
306
  changelog) exec "$SCRIPT_DIR/sw-changelog.sh" "$@" ;;
305
307
  deploy|deploys) exec "$SCRIPT_DIR/sw-github-deploy.sh" "$@" ;;
306
- help|*) echo "Usage: shipwright release {release|release-manager|changelog|deploy}"; exit 1 ;;
308
+ build) exec "$SCRIPT_DIR/build-release.sh" "$@" ;;
309
+ help|*) echo "Usage: shipwright release {release|release-manager|changelog|deploy|build}"; exit 1 ;;
307
310
  esac
308
311
  }
309
312
 
@@ -645,7 +648,26 @@ main() {
645
648
  help|--help|-h)
646
649
  show_help
647
650
  ;;
648
- version|--version|-v)
651
+ version)
652
+ case "${1:-show}" in
653
+ bump)
654
+ shift
655
+ if [[ -z "${1:-}" ]]; then
656
+ error "Usage: shipwright version bump <x.y.z>"
657
+ echo -e " ${DIM}Example: shipwright version bump 2.3.0${RESET}"
658
+ exit 1
659
+ fi
660
+ exec "$SCRIPT_DIR/update-version.sh" "$@"
661
+ ;;
662
+ check)
663
+ exec "$SCRIPT_DIR/check-version-consistency.sh" "$@"
664
+ ;;
665
+ show|*)
666
+ show_version
667
+ ;;
668
+ esac
669
+ ;;
670
+ --version|-v)
649
671
  show_version
650
672
  ;;
651
673
  hello)
@@ -6,7 +6,7 @@
6
6
  set -euo pipefail
7
7
  trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
8
8
 
9
- VERSION="2.2.1"
9
+ VERSION="2.2.2"
10
10
  SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
11
11
 
12
12
  # ─── Cross-platform compatibility ──────────────────────────────────────────