shipwright-cli 1.10.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +221 -55
- package/completions/_shipwright +264 -32
- package/completions/shipwright.bash +118 -26
- package/completions/shipwright.fish +80 -2
- package/dashboard/server.ts +208 -0
- package/docs/strategy/01-market-research.md +619 -0
- package/docs/strategy/02-mission-and-brand.md +587 -0
- package/docs/strategy/03-gtm-and-roadmap.md +759 -0
- package/docs/strategy/QUICK-START.txt +289 -0
- package/docs/strategy/README.md +172 -0
- package/docs/tmux-research/TMUX-ARCHITECTURE.md +567 -0
- package/docs/tmux-research/TMUX-AUDIT.md +925 -0
- package/docs/tmux-research/TMUX-BEST-PRACTICES-2025-2026.md +829 -0
- package/docs/tmux-research/TMUX-QUICK-REFERENCE.md +543 -0
- package/docs/tmux-research/TMUX-RESEARCH-INDEX.md +438 -0
- package/package.json +4 -2
- package/scripts/lib/helpers.sh +7 -0
- package/scripts/sw +323 -2
- package/scripts/sw-activity.sh +500 -0
- package/scripts/sw-adaptive.sh +925 -0
- package/scripts/sw-adversarial.sh +1 -1
- package/scripts/sw-architecture-enforcer.sh +1 -1
- package/scripts/sw-auth.sh +613 -0
- package/scripts/sw-autonomous.sh +754 -0
- package/scripts/sw-changelog.sh +704 -0
- package/scripts/sw-checkpoint.sh +1 -1
- package/scripts/sw-ci.sh +602 -0
- package/scripts/sw-cleanup.sh +1 -1
- package/scripts/sw-code-review.sh +698 -0
- package/scripts/sw-connect.sh +1 -1
- package/scripts/sw-context.sh +605 -0
- package/scripts/sw-cost.sh +44 -3
- package/scripts/sw-daemon.sh +568 -138
- package/scripts/sw-dashboard.sh +1 -1
- package/scripts/sw-db.sh +1380 -0
- package/scripts/sw-decompose.sh +539 -0
- package/scripts/sw-deps.sh +551 -0
- package/scripts/sw-developer-simulation.sh +1 -1
- package/scripts/sw-discovery.sh +412 -0
- package/scripts/sw-docs-agent.sh +539 -0
- package/scripts/sw-docs.sh +1 -1
- package/scripts/sw-doctor.sh +107 -1
- package/scripts/sw-dora.sh +615 -0
- package/scripts/sw-durable.sh +710 -0
- package/scripts/sw-e2e-orchestrator.sh +535 -0
- package/scripts/sw-eventbus.sh +393 -0
- package/scripts/sw-feedback.sh +479 -0
- package/scripts/sw-fix.sh +1 -1
- package/scripts/sw-fleet-discover.sh +567 -0
- package/scripts/sw-fleet-viz.sh +404 -0
- package/scripts/sw-fleet.sh +8 -1
- package/scripts/sw-github-app.sh +596 -0
- package/scripts/sw-github-checks.sh +4 -4
- package/scripts/sw-github-deploy.sh +1 -1
- package/scripts/sw-github-graphql.sh +1 -1
- package/scripts/sw-guild.sh +569 -0
- package/scripts/sw-heartbeat.sh +1 -1
- package/scripts/sw-hygiene.sh +559 -0
- package/scripts/sw-incident.sh +656 -0
- package/scripts/sw-init.sh +237 -24
- package/scripts/sw-instrument.sh +699 -0
- package/scripts/sw-intelligence.sh +1 -1
- package/scripts/sw-jira.sh +1 -1
- package/scripts/sw-launchd.sh +363 -28
- package/scripts/sw-linear.sh +1 -1
- package/scripts/sw-logs.sh +1 -1
- package/scripts/sw-loop.sh +267 -21
- package/scripts/sw-memory.sh +18 -1
- package/scripts/sw-mission-control.sh +487 -0
- package/scripts/sw-model-router.sh +545 -0
- package/scripts/sw-otel.sh +596 -0
- package/scripts/sw-oversight.sh +764 -0
- package/scripts/sw-pipeline-composer.sh +1 -1
- package/scripts/sw-pipeline-vitals.sh +1 -1
- package/scripts/sw-pipeline.sh +947 -35
- package/scripts/sw-pm.sh +758 -0
- package/scripts/sw-pr-lifecycle.sh +522 -0
- package/scripts/sw-predictive.sh +8 -1
- package/scripts/sw-prep.sh +1 -1
- package/scripts/sw-ps.sh +1 -1
- package/scripts/sw-public-dashboard.sh +798 -0
- package/scripts/sw-quality.sh +595 -0
- package/scripts/sw-reaper.sh +1 -1
- package/scripts/sw-recruit.sh +2248 -0
- package/scripts/sw-regression.sh +642 -0
- package/scripts/sw-release-manager.sh +736 -0
- package/scripts/sw-release.sh +706 -0
- package/scripts/sw-remote.sh +1 -1
- package/scripts/sw-replay.sh +520 -0
- package/scripts/sw-retro.sh +691 -0
- package/scripts/sw-scale.sh +444 -0
- package/scripts/sw-security-audit.sh +505 -0
- package/scripts/sw-self-optimize.sh +1 -1
- package/scripts/sw-session.sh +1 -1
- package/scripts/sw-setup.sh +263 -127
- package/scripts/sw-standup.sh +712 -0
- package/scripts/sw-status.sh +44 -2
- package/scripts/sw-strategic.sh +806 -0
- package/scripts/sw-stream.sh +450 -0
- package/scripts/sw-swarm.sh +620 -0
- package/scripts/sw-team-stages.sh +511 -0
- package/scripts/sw-templates.sh +4 -4
- package/scripts/sw-testgen.sh +566 -0
- package/scripts/sw-tmux-pipeline.sh +554 -0
- package/scripts/sw-tmux-role-color.sh +58 -0
- package/scripts/sw-tmux-status.sh +128 -0
- package/scripts/sw-tmux.sh +1 -1
- package/scripts/sw-trace.sh +485 -0
- package/scripts/sw-tracker-github.sh +188 -0
- package/scripts/sw-tracker-jira.sh +172 -0
- package/scripts/sw-tracker-linear.sh +251 -0
- package/scripts/sw-tracker.sh +117 -2
- package/scripts/sw-triage.sh +627 -0
- package/scripts/sw-upgrade.sh +1 -1
- package/scripts/sw-ux.sh +677 -0
- package/scripts/sw-webhook.sh +627 -0
- package/scripts/sw-widgets.sh +530 -0
- package/scripts/sw-worktree.sh +1 -1
- package/templates/pipelines/autonomous.json +2 -2
- package/tmux/shipwright-overlay.conf +35 -17
- package/tmux/tmux.conf +23 -21
|
@@ -0,0 +1,691 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright retro — Sprint Retrospective Engine ║
|
|
4
|
+
# ║ Analyze metrics · Identify improvements · Create action items ║
|
|
5
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
6
|
+
set -euo pipefail
|
|
7
|
+
trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
|
|
8
|
+
|
|
9
|
+
VERSION="2.1.0"
|
|
10
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
11
|
+
|
|
12
|
+
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
|
13
|
+
CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
|
|
14
|
+
PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
|
|
15
|
+
BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
|
|
16
|
+
GREEN='\033[38;2;74;222;128m' # #4ade80 — success
|
|
17
|
+
YELLOW='\033[38;2;250;204;21m' # #faca15 — warning
|
|
18
|
+
RED='\033[38;2;248;113;113m' # #f87171 — error
|
|
19
|
+
DIM='\033[2m'
|
|
20
|
+
BOLD='\033[1m'
|
|
21
|
+
RESET='\033[0m'
|
|
22
|
+
|
|
23
|
+
# ─── Cross-platform compatibility ──────────────────────────────────────────
|
|
24
|
+
_COMPAT="$SCRIPT_DIR/lib/compat.sh"
|
|
25
|
+
[[ -f "$_COMPAT" ]] && source "$_COMPAT"
|
|
26
|
+
|
|
27
|
+
# ─── Helpers ────────────────────────────────────────────────────────────────
|
|
28
|
+
info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
|
|
29
|
+
success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
|
|
30
|
+
warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*"; }
|
|
31
|
+
error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
|
|
32
|
+
|
|
33
|
+
now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
34
|
+
now_epoch() { date +%s; }
|
|
35
|
+
|
|
36
|
+
epoch_to_iso() {
|
|
37
|
+
local epoch="$1"
|
|
38
|
+
date -u -r "$epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
39
|
+
date -u -d "@$epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
40
|
+
python3 -c "import datetime; print(datetime.datetime.utcfromtimestamp($epoch).strftime('%Y-%m-%dT%H:%M:%SZ'))" 2>/dev/null || \
|
|
41
|
+
echo "1970-01-01T00:00:00Z"
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
format_duration() {
|
|
45
|
+
local secs="$1"
|
|
46
|
+
if [[ "$secs" -ge 3600 ]]; then
|
|
47
|
+
printf "%dh %dm %ds" $((secs/3600)) $((secs%3600/60)) $((secs%60))
|
|
48
|
+
elif [[ "$secs" -ge 60 ]]; then
|
|
49
|
+
printf "%dm %ds" $((secs/60)) $((secs%60))
|
|
50
|
+
else
|
|
51
|
+
printf "%ds" "$secs"
|
|
52
|
+
fi
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
emit_event() {
|
|
56
|
+
local event_type="$1"; shift
|
|
57
|
+
local events_file="${HOME}/.shipwright/events.jsonl"
|
|
58
|
+
mkdir -p "$(dirname "$events_file")"
|
|
59
|
+
local payload="{\"ts\":\"$(now_iso)\",\"ts_epoch\":$(now_epoch),\"type\":\"$event_type\""
|
|
60
|
+
for kv in "$@"; do
|
|
61
|
+
local key="${kv%%=*}"
|
|
62
|
+
local val="${kv#*=}"
|
|
63
|
+
if [[ "$val" =~ ^-?[0-9]+\.?[0-9]*$ ]]; then
|
|
64
|
+
payload="${payload},\"${key}\":${val}"
|
|
65
|
+
else
|
|
66
|
+
val="${val//\"/\\\"}"
|
|
67
|
+
payload="${payload},\"${key}\":\"${val}\""
|
|
68
|
+
fi
|
|
69
|
+
done
|
|
70
|
+
payload="${payload}}"
|
|
71
|
+
echo "$payload" >> "$events_file"
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
# ─── State Storage ──────────────────────────────────────────────────────────
|
|
75
|
+
RETRO_DIR="${HOME}/.shipwright/retros"
|
|
76
|
+
ensure_retro_dir() {
|
|
77
|
+
mkdir -p "$RETRO_DIR"
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
# ─── Sprint Date Calculation ────────────────────────────────────────────────
|
|
81
|
+
get_sprint_dates() {
|
|
82
|
+
local from_date="${1:-}"
|
|
83
|
+
local to_date="${2:-}"
|
|
84
|
+
|
|
85
|
+
if [[ -z "$from_date" ]]; then
|
|
86
|
+
# Default: last 7 days
|
|
87
|
+
to_date=$(date -u +"%Y-%m-%d")
|
|
88
|
+
from_date=$(date -u -v-7d +"%Y-%m-%d" 2>/dev/null || \
|
|
89
|
+
date -u -d "7 days ago" +"%Y-%m-%d" 2>/dev/null || \
|
|
90
|
+
python3 -c "from datetime import datetime, timedelta; print((datetime.utcnow() - timedelta(days=7)).strftime('%Y-%m-%d'))")
|
|
91
|
+
elif [[ -z "$to_date" ]]; then
|
|
92
|
+
to_date=$(date -u +"%Y-%m-%d")
|
|
93
|
+
fi
|
|
94
|
+
|
|
95
|
+
echo "${from_date} ${to_date}"
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
# ─── Analyze Pipeline Events ────────────────────────────────────────────────
|
|
99
|
+
analyze_sprint_data() {
|
|
100
|
+
local from_date="$1"
|
|
101
|
+
local to_date="$2"
|
|
102
|
+
|
|
103
|
+
local events_file="${HOME}/.shipwright/events.jsonl"
|
|
104
|
+
if [[ ! -f "$events_file" ]]; then
|
|
105
|
+
echo '{"pipelines":0,"succeeded":0,"failed":0,"retries":0,"avg_duration":0,"avg_stages":0,"slowest_stage":"","quality_score":0}'
|
|
106
|
+
return 0
|
|
107
|
+
fi
|
|
108
|
+
|
|
109
|
+
if ! command -v jq &>/dev/null; then
|
|
110
|
+
error "jq is required for sprint analysis"
|
|
111
|
+
return 1
|
|
112
|
+
fi
|
|
113
|
+
|
|
114
|
+
# Convert dates to epoch
|
|
115
|
+
local from_epoch to_epoch
|
|
116
|
+
from_epoch=$(date -u -d "${from_date}T00:00:00Z" +%s 2>/dev/null || \
|
|
117
|
+
date -u -r "$(date -d "${from_date}T00:00:00Z" +%s 2>/dev/null || echo 0)" +%s || echo 0)
|
|
118
|
+
to_epoch=$(date -u -d "${to_date}T23:59:59Z" +%s 2>/dev/null || \
|
|
119
|
+
date -u -r "$(date -d "${to_date}T23:59:59Z" +%s 2>/dev/null || echo 0)" +%s || echo 0)
|
|
120
|
+
|
|
121
|
+
jq -s --argjson from "$from_epoch" --argjson to "$to_epoch" '
|
|
122
|
+
[.[] | select(.ts_epoch >= $from and .ts_epoch <= $to)] as $events |
|
|
123
|
+
[$events[] | select(.type == "pipeline.completed")] as $completed |
|
|
124
|
+
($completed | length) as $total_pipelines |
|
|
125
|
+
[$completed[] | select(.result == "success")] as $successes |
|
|
126
|
+
($successes | length) as $succeeded |
|
|
127
|
+
($total_pipelines - $succeeded) as $failed |
|
|
128
|
+
[$events[] | select(.type == "pipeline.retry")] as $retries |
|
|
129
|
+
($retries | length) as $retry_count |
|
|
130
|
+
[$completed[].duration_s // 0] | (if length > 0 then (add / length) else 0 end) as $avg_duration |
|
|
131
|
+
[$successes[] | (.stages_passed // 0)] | (if length > 0 then (add / length) else 0 end) as $avg_stages |
|
|
132
|
+
[$completed[] | select(.slowest_stage) | .slowest_stage] | .[0] // "unknown" as $slowest |
|
|
133
|
+
(if $total_pipelines > 0 then ((($succeeded / $total_pipelines) * 100) | floor) else 0 end) as $quality |
|
|
134
|
+
{
|
|
135
|
+
pipelines: $total_pipelines,
|
|
136
|
+
succeeded: $succeeded,
|
|
137
|
+
failed: $failed,
|
|
138
|
+
retries: $retry_count,
|
|
139
|
+
avg_duration: ($avg_duration | floor),
|
|
140
|
+
avg_stages: ($avg_stages * 10 | floor / 10),
|
|
141
|
+
slowest_stage: $slowest,
|
|
142
|
+
quality_score: $quality
|
|
143
|
+
}
|
|
144
|
+
' "$events_file" 2>/dev/null || echo '{"pipelines":0,"succeeded":0,"failed":0,"retries":0,"avg_duration":0,"avg_stages":0,"slowest_stage":"","quality_score":0}'
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
# ─── Agent Performance Analysis ─────────────────────────────────────────────
|
|
148
|
+
analyze_agent_performance() {
|
|
149
|
+
local from_date="$1"
|
|
150
|
+
local to_date="$2"
|
|
151
|
+
|
|
152
|
+
local events_file="${HOME}/.shipwright/events.jsonl"
|
|
153
|
+
if [[ ! -f "$events_file" ]]; then
|
|
154
|
+
echo '{"agents":[]}'
|
|
155
|
+
return 0
|
|
156
|
+
fi
|
|
157
|
+
|
|
158
|
+
if ! command -v jq &>/dev/null; then
|
|
159
|
+
echo '{"agents":[]}'
|
|
160
|
+
return 0
|
|
161
|
+
fi
|
|
162
|
+
|
|
163
|
+
local from_epoch to_epoch
|
|
164
|
+
from_epoch=$(date -u -d "${from_date}T00:00:00Z" +%s 2>/dev/null || echo 0)
|
|
165
|
+
to_epoch=$(date -u -d "${to_date}T23:59:59Z" +%s 2>/dev/null || echo 0)
|
|
166
|
+
|
|
167
|
+
jq -s --argjson from "$from_epoch" --argjson to "$to_epoch" '
|
|
168
|
+
[.[] | select(.ts_epoch >= $from and .ts_epoch <= $to)] as $events |
|
|
169
|
+
[$events[] | select(.type == "pipeline.completed" and .agent)] as $completions |
|
|
170
|
+
$completions | group_by(.agent) | map({
|
|
171
|
+
agent: .[0].agent,
|
|
172
|
+
completed: length,
|
|
173
|
+
succeeded: ([.[] | select(.result == "success")] | length),
|
|
174
|
+
failed: ([.[] | select(.result == "failure")] | length),
|
|
175
|
+
avg_duration: (([.[].duration_s // 0] | add / length) | floor)
|
|
176
|
+
}) | sort_by(-.completed) as $agent_stats |
|
|
177
|
+
{ agents: $agent_stats }
|
|
178
|
+
' "$events_file" 2>/dev/null || echo '{"agents":[]}'
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
# ─── Velocity & Trends ──────────────────────────────────────────────────────
|
|
182
|
+
analyze_velocity() {
|
|
183
|
+
local from_date="$1"
|
|
184
|
+
local to_date="$2"
|
|
185
|
+
|
|
186
|
+
local events_file="${HOME}/.shipwright/events.jsonl"
|
|
187
|
+
if [[ ! -f "$events_file" ]]; then
|
|
188
|
+
echo '{"current":0,"previous":0,"trend":"→"}'
|
|
189
|
+
return 0
|
|
190
|
+
fi
|
|
191
|
+
|
|
192
|
+
if ! command -v jq &>/dev/null; then
|
|
193
|
+
echo '{"current":0,"previous":0,"trend":"→"}'
|
|
194
|
+
return 0
|
|
195
|
+
fi
|
|
196
|
+
|
|
197
|
+
# Get current period
|
|
198
|
+
local from_epoch to_epoch prev_from_epoch prev_to_epoch
|
|
199
|
+
from_epoch=$(date -u -d "${from_date}T00:00:00Z" +%s 2>/dev/null || echo 0)
|
|
200
|
+
to_epoch=$(date -u -d "${to_date}T23:59:59Z" +%s 2>/dev/null || echo 0)
|
|
201
|
+
|
|
202
|
+
# Get previous period (same duration before current)
|
|
203
|
+
local duration_days
|
|
204
|
+
duration_days=$(( (to_epoch - from_epoch) / 86400 ))
|
|
205
|
+
prev_to_epoch=$from_epoch
|
|
206
|
+
prev_from_epoch=$((from_epoch - (duration_days * 86400)))
|
|
207
|
+
|
|
208
|
+
jq -s --argjson curr_from "$from_epoch" --argjson curr_to "$to_epoch" \
|
|
209
|
+
--argjson prev_from "$prev_from_epoch" --argjson prev_to "$prev_to_epoch" '
|
|
210
|
+
[.[] | select(.ts_epoch >= $curr_from and .ts_epoch <= $curr_to and .type == "pipeline.completed" and .result == "success")] | length as $current |
|
|
211
|
+
[.[] | select(.ts_epoch >= $prev_from and .ts_epoch <= $prev_to and .type == "pipeline.completed" and .result == "success")] | length as $previous |
|
|
212
|
+
(if $previous > 0 and $current > $previous then "↑" elif $current < $previous then "↓" else "→" end) as $trend |
|
|
213
|
+
{
|
|
214
|
+
current: $current,
|
|
215
|
+
previous: $previous,
|
|
216
|
+
trend: $trend
|
|
217
|
+
}
|
|
218
|
+
' "$events_file" 2>/dev/null || echo '{"current":0,"previous":0,"trend":"→"}'
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
# ─── Generate Insights & Actions ────────────────────────────────────────────
|
|
222
|
+
generate_improvement_actions() {
|
|
223
|
+
local analysis_json="$1"
|
|
224
|
+
|
|
225
|
+
if ! command -v jq &>/dev/null; then
|
|
226
|
+
echo '{"actions":[]}'
|
|
227
|
+
return 0
|
|
228
|
+
fi
|
|
229
|
+
|
|
230
|
+
local quality_score failed_pipelines retries slowest_stage
|
|
231
|
+
quality_score=$(echo "$analysis_json" | jq -r '.quality_score // 0')
|
|
232
|
+
failed_pipelines=$(echo "$analysis_json" | jq -r '.failed // 0')
|
|
233
|
+
retries=$(echo "$analysis_json" | jq -r '.retries // 0')
|
|
234
|
+
slowest_stage=$(echo "$analysis_json" | jq -r '.slowest_stage // ""')
|
|
235
|
+
|
|
236
|
+
local actions_json='{"actions":['
|
|
237
|
+
|
|
238
|
+
# Action 1: Quality improvement
|
|
239
|
+
if [[ "$quality_score" -lt 80 ]]; then
|
|
240
|
+
actions_json="${actions_json}{\"priority\":\"high\",\"title\":\"Improve pipeline success rate to 85%+\",\"description\":\"Current: ${quality_score}%. Investigate $failed_pipelines failed pipelines and reduce quality gate failures.\",\"label\":\"improvement\"},"
|
|
241
|
+
fi
|
|
242
|
+
|
|
243
|
+
# Action 2: Reduce retries
|
|
244
|
+
if [[ "$retries" -gt 2 ]]; then
|
|
245
|
+
actions_json="${actions_json}{\"priority\":\"high\",\"title\":\"Reduce retry count\",\"description\":\"${retries} retries detected. Analyze root causes and add early detection.\",\"label\":\"reliability\"},"
|
|
246
|
+
fi
|
|
247
|
+
|
|
248
|
+
# Action 3: Optimize slow stages
|
|
249
|
+
if [[ -n "$slowest_stage" && "$slowest_stage" != "unknown" ]]; then
|
|
250
|
+
actions_json="${actions_json}{\"priority\":\"medium\",\"title\":\"Optimize ${slowest_stage} stage performance\",\"description\":\"This is the slowest pipeline stage. Consider parallelization or caching.\",\"label\":\"performance\"},"
|
|
251
|
+
fi
|
|
252
|
+
|
|
253
|
+
# Action 4: Consistency
|
|
254
|
+
actions_json="${actions_json}{\"priority\":\"medium\",\"title\":\"Stabilize pipeline execution time\",\"description\":\"Review variance in stage durations and standardize resource allocation.\",\"label\":\"process\"},"
|
|
255
|
+
|
|
256
|
+
# Remove trailing comma and close
|
|
257
|
+
actions_json="${actions_json%,}]}"
|
|
258
|
+
echo "$actions_json"
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
# ─── Create GitHub Issues for Actions ────────────────────────────────────────
|
|
262
|
+
create_action_issues() {
|
|
263
|
+
local actions_json="$1"
|
|
264
|
+
|
|
265
|
+
if ! command -v gh &>/dev/null; then
|
|
266
|
+
warn "GitHub CLI (gh) not found. Skipping issue creation."
|
|
267
|
+
return 1
|
|
268
|
+
fi
|
|
269
|
+
|
|
270
|
+
if ! command -v jq &>/dev/null; then
|
|
271
|
+
warn "jq not found. Skipping issue creation."
|
|
272
|
+
return 1
|
|
273
|
+
fi
|
|
274
|
+
|
|
275
|
+
local action_count
|
|
276
|
+
action_count=$(echo "$actions_json" | jq '.actions | length')
|
|
277
|
+
|
|
278
|
+
for ((i = 0; i < action_count; i++)); do
|
|
279
|
+
local title description label priority
|
|
280
|
+
title=$(echo "$actions_json" | jq -r ".actions[$i].title")
|
|
281
|
+
description=$(echo "$actions_json" | jq -r ".actions[$i].description")
|
|
282
|
+
label=$(echo "$actions_json" | jq -r ".actions[$i].label")
|
|
283
|
+
priority=$(echo "$actions_json" | jq -r ".actions[$i].priority")
|
|
284
|
+
|
|
285
|
+
# Create GitHub issue
|
|
286
|
+
if gh issue create \
|
|
287
|
+
--title "Retro: $title" \
|
|
288
|
+
--body "$description" \
|
|
289
|
+
--label "$label,retro" \
|
|
290
|
+
--label "$priority" 2>/dev/null; then
|
|
291
|
+
success "Created issue: $title"
|
|
292
|
+
fi
|
|
293
|
+
done
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
# ─── Report Generation ──────────────────────────────────────────────────────
|
|
297
|
+
generate_retro_report() {
|
|
298
|
+
local from_date="$1"
|
|
299
|
+
local to_date="$2"
|
|
300
|
+
local analysis_json="$3"
|
|
301
|
+
local agent_json="$4"
|
|
302
|
+
local velocity_json="$5"
|
|
303
|
+
|
|
304
|
+
ensure_retro_dir
|
|
305
|
+
|
|
306
|
+
local report_file="${RETRO_DIR}/retro-${from_date}-to-${to_date}.md"
|
|
307
|
+
local report_json="${RETRO_DIR}/retro-${from_date}-to-${to_date}.json"
|
|
308
|
+
|
|
309
|
+
# Extract metrics
|
|
310
|
+
local pipelines succeeded failed retries avg_duration quality_score
|
|
311
|
+
pipelines=$(echo "$analysis_json" | jq -r '.pipelines // 0')
|
|
312
|
+
succeeded=$(echo "$analysis_json" | jq -r '.succeeded // 0')
|
|
313
|
+
failed=$(echo "$analysis_json" | jq -r '.failed // 0')
|
|
314
|
+
retries=$(echo "$analysis_json" | jq -r '.retries // 0')
|
|
315
|
+
avg_duration=$(echo "$analysis_json" | jq -r '.avg_duration // 0')
|
|
316
|
+
quality_score=$(echo "$analysis_json" | jq -r '.quality_score // 0')
|
|
317
|
+
|
|
318
|
+
local current_velocity previous_velocity trend
|
|
319
|
+
current_velocity=$(echo "$velocity_json" | jq -r '.current // 0')
|
|
320
|
+
previous_velocity=$(echo "$velocity_json" | jq -r '.previous // 0')
|
|
321
|
+
trend=$(echo "$velocity_json" | jq -r '.trend // "→"')
|
|
322
|
+
|
|
323
|
+
# Generate markdown report
|
|
324
|
+
{
|
|
325
|
+
echo "# Sprint Retrospective"
|
|
326
|
+
echo ""
|
|
327
|
+
echo "**Period**: ${from_date} to ${to_date}"
|
|
328
|
+
echo "**Generated**: $(now_iso)"
|
|
329
|
+
echo ""
|
|
330
|
+
echo "## Summary"
|
|
331
|
+
echo ""
|
|
332
|
+
echo "| Metric | Value |"
|
|
333
|
+
echo "|--------|-------|"
|
|
334
|
+
echo "| Total Pipelines | $pipelines |"
|
|
335
|
+
echo "| Succeeded | $succeeded |"
|
|
336
|
+
echo "| Failed | $failed |"
|
|
337
|
+
echo "| Success Rate | ${quality_score}% |"
|
|
338
|
+
echo "| Retries | $retries |"
|
|
339
|
+
echo "| Avg Duration | $(format_duration "$avg_duration") |"
|
|
340
|
+
echo ""
|
|
341
|
+
echo "## Velocity"
|
|
342
|
+
echo ""
|
|
343
|
+
echo "| Period | Successful Pipelines | Trend |"
|
|
344
|
+
echo "|--------|----------------------|-------|"
|
|
345
|
+
echo "| Current | $current_velocity | $trend |"
|
|
346
|
+
echo "| Previous | $previous_velocity | |"
|
|
347
|
+
echo ""
|
|
348
|
+
echo "## What Went Well"
|
|
349
|
+
echo ""
|
|
350
|
+
if [[ "$quality_score" -ge 90 ]]; then
|
|
351
|
+
echo "- **High quality**: ${quality_score}% success rate demonstrates strong pipeline stability"
|
|
352
|
+
fi
|
|
353
|
+
if [[ "$retries" -le 1 ]]; then
|
|
354
|
+
echo "- **Low retry rate**: Minimal retries indicate reliable execution"
|
|
355
|
+
fi
|
|
356
|
+
if [[ "$current_velocity" -gt "$previous_velocity" ]]; then
|
|
357
|
+
echo "- **Velocity increase**: $trend Successful deliveries increasing"
|
|
358
|
+
fi
|
|
359
|
+
echo ""
|
|
360
|
+
echo "## What Went Wrong"
|
|
361
|
+
echo ""
|
|
362
|
+
if [[ "$quality_score" -lt 80 ]]; then
|
|
363
|
+
echo "- **Quality concerns**: ${quality_score}% success rate needs improvement"
|
|
364
|
+
fi
|
|
365
|
+
if [[ "$failed" -gt 0 ]]; then
|
|
366
|
+
echo "- **Pipeline failures**: $failed failed pipelines in this sprint"
|
|
367
|
+
fi
|
|
368
|
+
if [[ "$retries" -gt 2 ]]; then
|
|
369
|
+
echo "- **High retry count**: $retries retries indicates instability"
|
|
370
|
+
fi
|
|
371
|
+
echo ""
|
|
372
|
+
echo "## Agent Performance"
|
|
373
|
+
echo ""
|
|
374
|
+
echo "| Agent | Completed | Succeeded | Failed | Avg Duration |"
|
|
375
|
+
echo "|-------|-----------|-----------|--------|--------------|"
|
|
376
|
+
} > "$report_file"
|
|
377
|
+
|
|
378
|
+
# Add agent stats
|
|
379
|
+
if command -v jq &>/dev/null; then
|
|
380
|
+
local agent_count
|
|
381
|
+
agent_count=$(echo "$agent_json" | jq '.agents | length' 2>/dev/null || echo 0)
|
|
382
|
+
for ((i = 0; i < agent_count; i++)); do
|
|
383
|
+
local agent completed succeeded_agent failed_agent avg_dur
|
|
384
|
+
agent=$(echo "$agent_json" | jq -r ".agents[$i].agent" 2>/dev/null || echo "unknown")
|
|
385
|
+
completed=$(echo "$agent_json" | jq -r ".agents[$i].completed // 0" 2>/dev/null || echo 0)
|
|
386
|
+
succeeded_agent=$(echo "$agent_json" | jq -r ".agents[$i].succeeded // 0" 2>/dev/null || echo 0)
|
|
387
|
+
failed_agent=$(echo "$agent_json" | jq -r ".agents[$i].failed // 0" 2>/dev/null || echo 0)
|
|
388
|
+
avg_dur=$(echo "$agent_json" | jq -r ".agents[$i].avg_duration // 0" 2>/dev/null || echo 0)
|
|
389
|
+
|
|
390
|
+
echo "| $agent | $completed | $succeeded_agent | $failed_agent | $(format_duration "$avg_dur") |" >> "$report_file"
|
|
391
|
+
done
|
|
392
|
+
fi
|
|
393
|
+
|
|
394
|
+
{
|
|
395
|
+
echo ""
|
|
396
|
+
echo "## Improvement Actions"
|
|
397
|
+
echo ""
|
|
398
|
+
} >> "$report_file"
|
|
399
|
+
|
|
400
|
+
# Capture full analysis to JSON
|
|
401
|
+
jq -n \
|
|
402
|
+
--argjson analysis "$analysis_json" \
|
|
403
|
+
--argjson agents "$agent_json" \
|
|
404
|
+
--argjson velocity "$velocity_json" \
|
|
405
|
+
--arg from_date "$from_date" \
|
|
406
|
+
--arg to_date "$to_date" \
|
|
407
|
+
'{
|
|
408
|
+
from_date: $from_date,
|
|
409
|
+
to_date: $to_date,
|
|
410
|
+
generated_at: "'$(now_iso)'",
|
|
411
|
+
analysis: $analysis,
|
|
412
|
+
agents: $agents,
|
|
413
|
+
velocity: $velocity
|
|
414
|
+
}' > "$report_json"
|
|
415
|
+
|
|
416
|
+
success "Report generated: $report_file"
|
|
417
|
+
emit_event "retro.completed" "from_date=$from_date" "to_date=$to_date" "quality_score=$quality_score"
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
# ─── Subcommands ───────────────────────────────────────────────────────────
|
|
421
|
+
|
|
422
|
+
cmd_run() {
|
|
423
|
+
local from_date to_date
|
|
424
|
+
from_date="${1:-}"
|
|
425
|
+
to_date="${2:-}"
|
|
426
|
+
|
|
427
|
+
# Parse options
|
|
428
|
+
while [[ $# -gt 0 ]]; do
|
|
429
|
+
case "$1" in
|
|
430
|
+
--from)
|
|
431
|
+
from_date="$2"
|
|
432
|
+
shift 2
|
|
433
|
+
;;
|
|
434
|
+
--to)
|
|
435
|
+
to_date="$2"
|
|
436
|
+
shift 2
|
|
437
|
+
;;
|
|
438
|
+
*)
|
|
439
|
+
shift
|
|
440
|
+
;;
|
|
441
|
+
esac
|
|
442
|
+
done
|
|
443
|
+
|
|
444
|
+
# Get sprint dates
|
|
445
|
+
read -r from_date to_date <<< "$(get_sprint_dates "$from_date" "$to_date")"
|
|
446
|
+
|
|
447
|
+
info "Running sprint retrospective for ${from_date} to ${to_date}"
|
|
448
|
+
echo ""
|
|
449
|
+
|
|
450
|
+
# Analyze data
|
|
451
|
+
local analysis agent_perf velocity
|
|
452
|
+
analysis=$(analyze_sprint_data "$from_date" "$to_date")
|
|
453
|
+
agent_perf=$(analyze_agent_performance "$from_date" "$to_date")
|
|
454
|
+
velocity=$(analyze_velocity "$from_date" "$to_date")
|
|
455
|
+
|
|
456
|
+
# Display summary
|
|
457
|
+
echo -e "${BOLD}Sprint Summary${RESET}"
|
|
458
|
+
if command -v jq &>/dev/null; then
|
|
459
|
+
local pipelines succeeded failed quality_score
|
|
460
|
+
pipelines=$(echo "$analysis" | jq -r '.pipelines')
|
|
461
|
+
succeeded=$(echo "$analysis" | jq -r '.succeeded')
|
|
462
|
+
failed=$(echo "$analysis" | jq -r '.failed')
|
|
463
|
+
quality_score=$(echo "$analysis" | jq -r '.quality_score')
|
|
464
|
+
|
|
465
|
+
echo "Pipelines: $pipelines total | ${GREEN}$succeeded succeeded${RESET} | ${RED}$failed failed${RESET}"
|
|
466
|
+
echo "Success Rate: ${quality_score}%"
|
|
467
|
+
echo ""
|
|
468
|
+
fi
|
|
469
|
+
|
|
470
|
+
# Generate improvements
|
|
471
|
+
local improvements
|
|
472
|
+
improvements=$(generate_improvement_actions "$analysis")
|
|
473
|
+
|
|
474
|
+
# Generate report
|
|
475
|
+
generate_retro_report "$from_date" "$to_date" "$analysis" "$agent_perf" "$velocity"
|
|
476
|
+
|
|
477
|
+
# Offer to create issues
|
|
478
|
+
if command -v gh &>/dev/null; then
|
|
479
|
+
echo ""
|
|
480
|
+
info "Create improvement issues? (y/n)"
|
|
481
|
+
read -r -t 5 response || response="n"
|
|
482
|
+
if [[ "$response" =~ ^[yY]$ ]]; then
|
|
483
|
+
create_action_issues "$improvements"
|
|
484
|
+
fi
|
|
485
|
+
fi
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
cmd_summary() {
|
|
489
|
+
local from_date to_date
|
|
490
|
+
from_date="${1:-}"
|
|
491
|
+
to_date="${2:-}"
|
|
492
|
+
|
|
493
|
+
read -r from_date to_date <<< "$(get_sprint_dates "$from_date" "$to_date")"
|
|
494
|
+
|
|
495
|
+
info "Sprint Summary for ${from_date} to ${to_date}"
|
|
496
|
+
echo ""
|
|
497
|
+
|
|
498
|
+
local analysis
|
|
499
|
+
analysis=$(analyze_sprint_data "$from_date" "$to_date")
|
|
500
|
+
|
|
501
|
+
if command -v jq &>/dev/null; then
|
|
502
|
+
echo "$analysis" | jq '.'
|
|
503
|
+
else
|
|
504
|
+
echo "$analysis"
|
|
505
|
+
fi
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
cmd_trends() {
|
|
509
|
+
info "Multi-Sprint Trend Analysis"
|
|
510
|
+
echo ""
|
|
511
|
+
|
|
512
|
+
local events_file="${HOME}/.shipwright/events.jsonl"
|
|
513
|
+
if [[ ! -f "$events_file" ]]; then
|
|
514
|
+
error "No event data found. Run pipelines first."
|
|
515
|
+
return 1
|
|
516
|
+
fi
|
|
517
|
+
|
|
518
|
+
# Show last 4 sprints
|
|
519
|
+
local today
|
|
520
|
+
today=$(date -u +"%Y-%m-%d")
|
|
521
|
+
|
|
522
|
+
for i in 0 1 2 3; do
|
|
523
|
+
local offset_end offset_start end_date start_date
|
|
524
|
+
offset_end=$((i * 7))
|
|
525
|
+
offset_start=$(((i + 1) * 7))
|
|
526
|
+
|
|
527
|
+
end_date=$(date -u -v-${offset_end}d +"%Y-%m-%d" 2>/dev/null || \
|
|
528
|
+
date -u -d "${offset_end} days ago" +"%Y-%m-%d" 2>/dev/null || echo "$today")
|
|
529
|
+
start_date=$(date -u -v-${offset_start}d +"%Y-%m-%d" 2>/dev/null || \
|
|
530
|
+
date -u -d "${offset_start} days ago" +"%Y-%m-%d" 2>/dev/null || echo "$today")
|
|
531
|
+
|
|
532
|
+
local analysis
|
|
533
|
+
analysis=$(analyze_sprint_data "$start_date" "$end_date")
|
|
534
|
+
|
|
535
|
+
if command -v jq &>/dev/null; then
|
|
536
|
+
local quality pipelines
|
|
537
|
+
quality=$(echo "$analysis" | jq -r '.quality_score')
|
|
538
|
+
pipelines=$(echo "$analysis" | jq -r '.pipelines')
|
|
539
|
+
echo "Sprint $(($i + 1)) (${start_date} to ${end_date}): ${quality}% success, $pipelines pipelines"
|
|
540
|
+
fi
|
|
541
|
+
done
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
cmd_agents() {
|
|
545
|
+
local from_date to_date
|
|
546
|
+
from_date="${1:-}"
|
|
547
|
+
to_date="${2:-}"
|
|
548
|
+
|
|
549
|
+
read -r from_date to_date <<< "$(get_sprint_dates "$from_date" "$to_date")"
|
|
550
|
+
|
|
551
|
+
info "Agent Performance for ${from_date} to ${to_date}"
|
|
552
|
+
echo ""
|
|
553
|
+
|
|
554
|
+
local agent_perf
|
|
555
|
+
agent_perf=$(analyze_agent_performance "$from_date" "$to_date")
|
|
556
|
+
|
|
557
|
+
if command -v jq &>/dev/null; then
|
|
558
|
+
echo "$agent_perf" | jq '.agents[] | "\(.agent): \(.completed) completed, \(.succeeded) succeeded, \(.failed) failed"' -r
|
|
559
|
+
else
|
|
560
|
+
echo "$agent_perf"
|
|
561
|
+
fi
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
cmd_actions() {
|
|
565
|
+
local from_date to_date
|
|
566
|
+
from_date="${1:-}"
|
|
567
|
+
to_date="${2:-}"
|
|
568
|
+
|
|
569
|
+
read -r from_date to_date <<< "$(get_sprint_dates "$from_date" "$to_date")"
|
|
570
|
+
|
|
571
|
+
info "Improvement Actions for ${from_date} to ${to_date}"
|
|
572
|
+
echo ""
|
|
573
|
+
|
|
574
|
+
local analysis improvements
|
|
575
|
+
analysis=$(analyze_sprint_data "$from_date" "$to_date")
|
|
576
|
+
improvements=$(generate_improvement_actions "$analysis")
|
|
577
|
+
|
|
578
|
+
if command -v jq &>/dev/null; then
|
|
579
|
+
echo "$improvements" | jq '.actions[] | "\(.priority | ascii_upcase): \(.title)\n \(.description)"' -r
|
|
580
|
+
else
|
|
581
|
+
echo "$improvements"
|
|
582
|
+
fi
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
cmd_compare() {
|
|
586
|
+
local period1="${1:-}"
|
|
587
|
+
local period2="${2:-}"
|
|
588
|
+
|
|
589
|
+
if [[ -z "$period1" || -z "$period2" ]]; then
|
|
590
|
+
error "Usage: sw retro compare <from-date1> <from-date2>"
|
|
591
|
+
return 1
|
|
592
|
+
fi
|
|
593
|
+
|
|
594
|
+
info "Comparing sprints starting ${period1} vs ${period2}"
|
|
595
|
+
echo ""
|
|
596
|
+
|
|
597
|
+
local analysis1 analysis2
|
|
598
|
+
analysis1=$(analyze_sprint_data "$period1" "$(date -u -d "${period1} + 7 days" +"%Y-%m-%d")")
|
|
599
|
+
analysis2=$(analyze_sprint_data "$period2" "$(date -u -d "${period2} + 7 days" +"%Y-%m-%d")")
|
|
600
|
+
|
|
601
|
+
if command -v jq &>/dev/null; then
|
|
602
|
+
echo "Sprint 1 (${period1}):"
|
|
603
|
+
echo "$analysis1" | jq '.'
|
|
604
|
+
echo ""
|
|
605
|
+
echo "Sprint 2 (${period2}):"
|
|
606
|
+
echo "$analysis2" | jq '.'
|
|
607
|
+
fi
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
cmd_history() {
|
|
611
|
+
info "Sprint Retrospective History"
|
|
612
|
+
echo ""
|
|
613
|
+
|
|
614
|
+
ensure_retro_dir
|
|
615
|
+
if [[ ! -d "$RETRO_DIR" || -z "$(ls -A "$RETRO_DIR" 2>/dev/null)" ]]; then
|
|
616
|
+
warn "No retrospectives found. Run 'sw retro run' first."
|
|
617
|
+
return 0
|
|
618
|
+
fi
|
|
619
|
+
|
|
620
|
+
ls -1t "$RETRO_DIR"/retro-*.md 2>/dev/null | while read -r file; do
|
|
621
|
+
basename "$file" .md
|
|
622
|
+
done
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
cmd_help() {
|
|
626
|
+
cat << 'EOF'
|
|
627
|
+
Usage: shipwright retro <subcommand> [options]
|
|
628
|
+
|
|
629
|
+
Subcommands:
|
|
630
|
+
run [--from DATE] [--to DATE] Run retrospective for sprint (default: last 7 days)
|
|
631
|
+
summary [DATE1] [DATE2] Quick sprint summary stats
|
|
632
|
+
trends Multi-sprint trend analysis (last 4 sprints)
|
|
633
|
+
agents [DATE1] [DATE2] Agent performance breakdown
|
|
634
|
+
actions [DATE1] [DATE2] List generated improvement actions
|
|
635
|
+
compare DATE1 DATE2 Compare two sprint periods
|
|
636
|
+
history Show past retrospective reports
|
|
637
|
+
help Show this help message
|
|
638
|
+
|
|
639
|
+
Options:
|
|
640
|
+
--from DATE Start date (YYYY-MM-DD)
|
|
641
|
+
--to DATE End date (YYYY-MM-DD)
|
|
642
|
+
|
|
643
|
+
Examples:
|
|
644
|
+
shipwright retro run # Last 7 days
|
|
645
|
+
shipwright retro run --from 2025-02-01 --to 2025-02-08
|
|
646
|
+
shipwright retro summary
|
|
647
|
+
shipwright retro trends
|
|
648
|
+
shipwright retro agents
|
|
649
|
+
shipwright retro compare 2025-02-01 2025-01-25
|
|
650
|
+
|
|
651
|
+
EOF
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
# ─── Main ──────────────────────────────────────────────────────────────────
|
|
655
|
+
|
|
656
|
+
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
|
|
657
|
+
subcommand="${1:-help}"
|
|
658
|
+
shift 2>/dev/null || true
|
|
659
|
+
|
|
660
|
+
case "$subcommand" in
|
|
661
|
+
run)
|
|
662
|
+
cmd_run "$@"
|
|
663
|
+
;;
|
|
664
|
+
summary)
|
|
665
|
+
cmd_summary "$@"
|
|
666
|
+
;;
|
|
667
|
+
trends)
|
|
668
|
+
cmd_trends "$@"
|
|
669
|
+
;;
|
|
670
|
+
agents)
|
|
671
|
+
cmd_agents "$@"
|
|
672
|
+
;;
|
|
673
|
+
actions)
|
|
674
|
+
cmd_actions "$@"
|
|
675
|
+
;;
|
|
676
|
+
compare)
|
|
677
|
+
cmd_compare "$@"
|
|
678
|
+
;;
|
|
679
|
+
history)
|
|
680
|
+
cmd_history "$@"
|
|
681
|
+
;;
|
|
682
|
+
help|--help|-h)
|
|
683
|
+
cmd_help
|
|
684
|
+
;;
|
|
685
|
+
*)
|
|
686
|
+
error "Unknown subcommand: $subcommand"
|
|
687
|
+
cmd_help
|
|
688
|
+
exit 1
|
|
689
|
+
;;
|
|
690
|
+
esac
|
|
691
|
+
fi
|