shipwright-cli 1.10.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +114 -36
- package/completions/_shipwright +212 -32
- package/completions/shipwright.bash +97 -25
- package/docs/strategy/01-market-research.md +619 -0
- package/docs/strategy/02-mission-and-brand.md +587 -0
- package/docs/strategy/03-gtm-and-roadmap.md +759 -0
- package/docs/strategy/QUICK-START.txt +289 -0
- package/docs/strategy/README.md +172 -0
- package/package.json +4 -2
- package/scripts/sw +208 -1
- package/scripts/sw-activity.sh +500 -0
- package/scripts/sw-adaptive.sh +925 -0
- package/scripts/sw-adversarial.sh +1 -1
- package/scripts/sw-architecture-enforcer.sh +1 -1
- package/scripts/sw-auth.sh +613 -0
- package/scripts/sw-autonomous.sh +664 -0
- package/scripts/sw-changelog.sh +704 -0
- package/scripts/sw-checkpoint.sh +1 -1
- package/scripts/sw-ci.sh +602 -0
- package/scripts/sw-cleanup.sh +1 -1
- package/scripts/sw-code-review.sh +637 -0
- package/scripts/sw-connect.sh +1 -1
- package/scripts/sw-context.sh +605 -0
- package/scripts/sw-cost.sh +1 -1
- package/scripts/sw-daemon.sh +432 -130
- package/scripts/sw-dashboard.sh +1 -1
- package/scripts/sw-db.sh +540 -0
- package/scripts/sw-decompose.sh +539 -0
- package/scripts/sw-deps.sh +551 -0
- package/scripts/sw-developer-simulation.sh +1 -1
- package/scripts/sw-discovery.sh +412 -0
- package/scripts/sw-docs-agent.sh +539 -0
- package/scripts/sw-docs.sh +1 -1
- package/scripts/sw-doctor.sh +59 -1
- package/scripts/sw-dora.sh +615 -0
- package/scripts/sw-durable.sh +710 -0
- package/scripts/sw-e2e-orchestrator.sh +535 -0
- package/scripts/sw-eventbus.sh +393 -0
- package/scripts/sw-feedback.sh +471 -0
- package/scripts/sw-fix.sh +1 -1
- package/scripts/sw-fleet-discover.sh +567 -0
- package/scripts/sw-fleet-viz.sh +404 -0
- package/scripts/sw-fleet.sh +8 -1
- package/scripts/sw-github-app.sh +596 -0
- package/scripts/sw-github-checks.sh +1 -1
- package/scripts/sw-github-deploy.sh +1 -1
- package/scripts/sw-github-graphql.sh +1 -1
- package/scripts/sw-guild.sh +569 -0
- package/scripts/sw-heartbeat.sh +1 -1
- package/scripts/sw-hygiene.sh +559 -0
- package/scripts/sw-incident.sh +617 -0
- package/scripts/sw-init.sh +88 -1
- package/scripts/sw-instrument.sh +699 -0
- package/scripts/sw-intelligence.sh +1 -1
- package/scripts/sw-jira.sh +1 -1
- package/scripts/sw-launchd.sh +363 -28
- package/scripts/sw-linear.sh +1 -1
- package/scripts/sw-logs.sh +1 -1
- package/scripts/sw-loop.sh +64 -3
- package/scripts/sw-memory.sh +1 -1
- package/scripts/sw-mission-control.sh +487 -0
- package/scripts/sw-model-router.sh +545 -0
- package/scripts/sw-otel.sh +596 -0
- package/scripts/sw-oversight.sh +689 -0
- package/scripts/sw-pipeline-composer.sh +1 -1
- package/scripts/sw-pipeline-vitals.sh +1 -1
- package/scripts/sw-pipeline.sh +687 -24
- package/scripts/sw-pm.sh +693 -0
- package/scripts/sw-pr-lifecycle.sh +522 -0
- package/scripts/sw-predictive.sh +1 -1
- package/scripts/sw-prep.sh +1 -1
- package/scripts/sw-ps.sh +1 -1
- package/scripts/sw-public-dashboard.sh +798 -0
- package/scripts/sw-quality.sh +595 -0
- package/scripts/sw-reaper.sh +1 -1
- package/scripts/sw-recruit.sh +573 -0
- package/scripts/sw-regression.sh +642 -0
- package/scripts/sw-release-manager.sh +736 -0
- package/scripts/sw-release.sh +706 -0
- package/scripts/sw-remote.sh +1 -1
- package/scripts/sw-replay.sh +520 -0
- package/scripts/sw-retro.sh +691 -0
- package/scripts/sw-scale.sh +444 -0
- package/scripts/sw-security-audit.sh +505 -0
- package/scripts/sw-self-optimize.sh +1 -1
- package/scripts/sw-session.sh +1 -1
- package/scripts/sw-setup.sh +1 -1
- package/scripts/sw-standup.sh +712 -0
- package/scripts/sw-status.sh +1 -1
- package/scripts/sw-strategic.sh +658 -0
- package/scripts/sw-stream.sh +450 -0
- package/scripts/sw-swarm.sh +583 -0
- package/scripts/sw-team-stages.sh +511 -0
- package/scripts/sw-templates.sh +1 -1
- package/scripts/sw-testgen.sh +515 -0
- package/scripts/sw-tmux-pipeline.sh +554 -0
- package/scripts/sw-tmux.sh +1 -1
- package/scripts/sw-trace.sh +485 -0
- package/scripts/sw-tracker-github.sh +188 -0
- package/scripts/sw-tracker-jira.sh +172 -0
- package/scripts/sw-tracker-linear.sh +251 -0
- package/scripts/sw-tracker.sh +117 -2
- package/scripts/sw-triage.sh +603 -0
- package/scripts/sw-upgrade.sh +1 -1
- package/scripts/sw-ux.sh +677 -0
- package/scripts/sw-webhook.sh +627 -0
- package/scripts/sw-widgets.sh +530 -0
- package/scripts/sw-worktree.sh +1 -1
|
@@ -0,0 +1,567 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright fleet-discover — Auto-Discovery from GitHub Orgs ║
|
|
4
|
+
# ║ Scan GitHub org for eligible repos · Filter by language/activity/topic ║
|
|
5
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
6
|
+
set -euo pipefail
|
|
7
|
+
trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
|
|
8
|
+
|
|
9
|
+
VERSION="2.0.0"
|
|
10
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
11
|
+
REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
12
|
+
|
|
13
|
+
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
|
14
|
+
CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
|
|
15
|
+
PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
|
|
16
|
+
BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
|
|
17
|
+
GREEN='\033[38;2;74;222;128m' # success
|
|
18
|
+
YELLOW='\033[38;2;250;204;21m' # warning
|
|
19
|
+
RED='\033[38;2;248;113;113m' # error
|
|
20
|
+
DIM='\033[2m'
|
|
21
|
+
BOLD='\033[1m'
|
|
22
|
+
RESET='\033[0m'
|
|
23
|
+
|
|
24
|
+
# ─── Cross-platform compatibility ──────────────────────────────────────────
|
|
25
|
+
# shellcheck source=lib/compat.sh
|
|
26
|
+
[[ -f "$SCRIPT_DIR/lib/compat.sh" ]] && source "$SCRIPT_DIR/lib/compat.sh"
|
|
27
|
+
|
|
28
|
+
# ─── Output Helpers ─────────────────────────────────────────────────────────
|
|
29
|
+
info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
|
|
30
|
+
success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
|
|
31
|
+
warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*" >&2; }
|
|
32
|
+
error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
|
|
33
|
+
|
|
34
|
+
now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
35
|
+
now_epoch() { date +%s; }
|
|
36
|
+
|
|
37
|
+
# ─── Structured Event Log ──────────────────────────────────────────────────
|
|
38
|
+
EVENTS_FILE="${HOME}/.shipwright/events.jsonl"
|
|
39
|
+
|
|
40
|
+
emit_event() {
|
|
41
|
+
local event_type="$1"
|
|
42
|
+
shift
|
|
43
|
+
local json_fields=""
|
|
44
|
+
for kv in "$@"; do
|
|
45
|
+
local key="${kv%%=*}"
|
|
46
|
+
local val="${kv#*=}"
|
|
47
|
+
if [[ "$val" =~ ^-?[0-9]+\.?[0-9]*$ ]]; then
|
|
48
|
+
json_fields="${json_fields},\"${key}\":${val}"
|
|
49
|
+
else
|
|
50
|
+
val="${val//\"/\\\"}"
|
|
51
|
+
json_fields="${json_fields},\"${key}\":\"${val}\""
|
|
52
|
+
fi
|
|
53
|
+
done
|
|
54
|
+
mkdir -p "${HOME}/.shipwright"
|
|
55
|
+
echo "{\"ts\":\"$(now_iso)\",\"ts_epoch\":$(now_epoch),\"type\":\"${event_type}\"${json_fields}}" >> "$EVENTS_FILE"
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
# ─── Help ───────────────────────────────────────────────────────────────────
|
|
59
|
+
|
|
60
|
+
show_help() {
|
|
61
|
+
echo ""
|
|
62
|
+
echo -e "${PURPLE}${BOLD}━━━ shipwright fleet discover v${VERSION} ━━━${RESET}"
|
|
63
|
+
echo ""
|
|
64
|
+
echo -e "${BOLD}USAGE${RESET}"
|
|
65
|
+
echo -e " ${CYAN}shipwright fleet discover${RESET} --org <name> [options]"
|
|
66
|
+
echo ""
|
|
67
|
+
echo -e "${BOLD}OPTIONS${RESET}"
|
|
68
|
+
echo -e " ${CYAN}--org${RESET} <name> GitHub organization to scan (required)"
|
|
69
|
+
echo -e " ${CYAN}--config${RESET} <path> Fleet config path ${DIM}(default: .claude/fleet-config.json)${RESET}"
|
|
70
|
+
echo -e " ${CYAN}--min-activity-days${RESET} <N> Only repos pushed to within N days ${DIM}(default: 90)${RESET}"
|
|
71
|
+
echo -e " ${CYAN}--language${RESET} <lang> Filter by primary language ${DIM}(e.g. Go, TypeScript, Python)${RESET}"
|
|
72
|
+
echo -e " ${CYAN}--topic${RESET} <tag> Only repos with this topic"
|
|
73
|
+
echo -e " ${CYAN}--exclude-topic${RESET} <tag> Skip repos with this topic ${DIM}(e.g. 'no-shipwright')${RESET}"
|
|
74
|
+
echo -e " ${CYAN}--dry-run${RESET} Show repos that would be added without modifying config"
|
|
75
|
+
echo -e " ${CYAN}--json${RESET} Output results as JSON"
|
|
76
|
+
echo ""
|
|
77
|
+
echo -e "${BOLD}EXAMPLES${RESET}"
|
|
78
|
+
echo -e " ${DIM}# Discover all repos in org, active within 90 days${RESET}"
|
|
79
|
+
echo -e " ${DIM}shipwright fleet discover --org myorg${RESET}"
|
|
80
|
+
echo ""
|
|
81
|
+
echo -e " ${DIM}# Filter by language and recent activity${RESET}"
|
|
82
|
+
echo -e " ${DIM}shipwright fleet discover --org myorg --language Go --min-activity-days 30${RESET}"
|
|
83
|
+
echo ""
|
|
84
|
+
echo -e " ${DIM}# Dry-run: show what would be added${RESET}"
|
|
85
|
+
echo -e " ${DIM}shipwright fleet discover --org myorg --dry-run${RESET}"
|
|
86
|
+
echo ""
|
|
87
|
+
echo -e " ${DIM}# Skip repos with 'no-shipwright' topic${RESET}"
|
|
88
|
+
echo -e " ${DIM}shipwright fleet discover --org myorg --exclude-topic no-shipwright${RESET}"
|
|
89
|
+
echo ""
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
# ─── GitHub API Checks ───────────────────────────────────────────────────────
|
|
93
|
+
|
|
94
|
+
check_gh_auth() {
|
|
95
|
+
if [[ "${NO_GITHUB:-false}" == "true" ]]; then
|
|
96
|
+
error "GitHub API disabled via NO_GITHUB"
|
|
97
|
+
return 1
|
|
98
|
+
fi
|
|
99
|
+
if ! command -v gh >/dev/null 2>&1; then
|
|
100
|
+
error "gh CLI not found"
|
|
101
|
+
return 1
|
|
102
|
+
fi
|
|
103
|
+
if ! gh auth status >/dev/null 2>&1; then
|
|
104
|
+
error "Not authenticated to GitHub"
|
|
105
|
+
return 1
|
|
106
|
+
fi
|
|
107
|
+
return 0
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
# ─── Discover Repos from GitHub Org ─────────────────────────────────────────
|
|
111
|
+
# Queries /orgs/{org}/repos with pagination and applies filters
|
|
112
|
+
|
|
113
|
+
discover_repos() {
|
|
114
|
+
local org="$1"
|
|
115
|
+
local min_activity_days="$2"
|
|
116
|
+
local language_filter="$3"
|
|
117
|
+
local topic_filter="$4"
|
|
118
|
+
local exclude_topic="$5"
|
|
119
|
+
local dry_run="$6"
|
|
120
|
+
local json_output="$7"
|
|
121
|
+
|
|
122
|
+
info "Discovering repos in GitHub organization: ${CYAN}${org}${RESET}"
|
|
123
|
+
|
|
124
|
+
# Check GitHub auth
|
|
125
|
+
if ! check_gh_auth; then
|
|
126
|
+
error "Cannot authenticate to GitHub API"
|
|
127
|
+
return 1
|
|
128
|
+
fi
|
|
129
|
+
|
|
130
|
+
local discovered_repos=()
|
|
131
|
+
local skipped_repos=()
|
|
132
|
+
local opted_out_repos=()
|
|
133
|
+
|
|
134
|
+
# Calculate cutoff date for activity filter
|
|
135
|
+
local cutoff_epoch=0
|
|
136
|
+
if [[ "$min_activity_days" -gt 0 ]]; then
|
|
137
|
+
cutoff_epoch=$(($(now_epoch) - (min_activity_days * 86400)))
|
|
138
|
+
fi
|
|
139
|
+
|
|
140
|
+
# Paginate through org repos
|
|
141
|
+
local page=1
|
|
142
|
+
local per_page=100
|
|
143
|
+
local total_found=0
|
|
144
|
+
local has_more=true
|
|
145
|
+
|
|
146
|
+
while [[ "$has_more" == "true" ]]; do
|
|
147
|
+
info "Fetching page ${page}..."
|
|
148
|
+
|
|
149
|
+
local repos_json
|
|
150
|
+
repos_json=$(gh api "/orgs/${org}/repos" \
|
|
151
|
+
--paginate \
|
|
152
|
+
--jq '.[] | {name, full_name, url, archived, disabled, topics, language, pushed_at, has_issues}' \
|
|
153
|
+
-q '.' 2>/dev/null) || {
|
|
154
|
+
error "Failed to fetch repos from GitHub org: $org"
|
|
155
|
+
return 1
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
# Check if we got results
|
|
159
|
+
if [[ -z "$repos_json" ]]; then
|
|
160
|
+
has_more=false
|
|
161
|
+
break
|
|
162
|
+
fi
|
|
163
|
+
|
|
164
|
+
# Process each repo
|
|
165
|
+
local repo_count=0
|
|
166
|
+
while IFS= read -r repo_line; do
|
|
167
|
+
[[ -z "$repo_line" ]] && continue
|
|
168
|
+
|
|
169
|
+
local repo_data="$repo_line"
|
|
170
|
+
local name full_name url archived disabled topics language pushed_at has_issues
|
|
171
|
+
|
|
172
|
+
name=$(echo "$repo_data" | jq -r '.name // ""')
|
|
173
|
+
full_name=$(echo "$repo_data" | jq -r '.full_name // ""')
|
|
174
|
+
url=$(echo "$repo_data" | jq -r '.url // ""')
|
|
175
|
+
archived=$(echo "$repo_data" | jq -r '.archived // false')
|
|
176
|
+
disabled=$(echo "$repo_data" | jq -r '.disabled // false')
|
|
177
|
+
topics=$(echo "$repo_data" | jq -r '.topics | join(",") // ""')
|
|
178
|
+
language=$(echo "$repo_data" | jq -r '.language // ""')
|
|
179
|
+
pushed_at=$(echo "$repo_data" | jq -r '.pushed_at // ""')
|
|
180
|
+
has_issues=$(echo "$repo_data" | jq -r '.has_issues // false')
|
|
181
|
+
|
|
182
|
+
total_found=$((total_found + 1))
|
|
183
|
+
repo_count=$((repo_count + 1))
|
|
184
|
+
|
|
185
|
+
# Skip archived/disabled repos
|
|
186
|
+
if [[ "$archived" == "true" || "$disabled" == "true" ]]; then
|
|
187
|
+
skipped_repos+=("$name:archived_or_disabled")
|
|
188
|
+
continue
|
|
189
|
+
fi
|
|
190
|
+
|
|
191
|
+
# Skip repos without issues enabled
|
|
192
|
+
if [[ "$has_issues" != "true" ]]; then
|
|
193
|
+
skipped_repos+=("$name:no_issues")
|
|
194
|
+
continue
|
|
195
|
+
fi
|
|
196
|
+
|
|
197
|
+
# Check language filter
|
|
198
|
+
if [[ -n "$language_filter" && "$language" != "$language_filter" ]]; then
|
|
199
|
+
skipped_repos+=("$name:language")
|
|
200
|
+
continue
|
|
201
|
+
fi
|
|
202
|
+
|
|
203
|
+
# Check topic filter (if specified, repo must have it)
|
|
204
|
+
if [[ -n "$topic_filter" ]]; then
|
|
205
|
+
local has_topic=false
|
|
206
|
+
if echo "$topics" | grep -q "$topic_filter"; then
|
|
207
|
+
has_topic=true
|
|
208
|
+
fi
|
|
209
|
+
if [[ "$has_topic" != "true" ]]; then
|
|
210
|
+
skipped_repos+=("$name:topic_filter")
|
|
211
|
+
continue
|
|
212
|
+
fi
|
|
213
|
+
fi
|
|
214
|
+
|
|
215
|
+
# Check exclude topic filter
|
|
216
|
+
if [[ -n "$exclude_topic" ]]; then
|
|
217
|
+
local has_exclude_topic=false
|
|
218
|
+
if echo "$topics" | grep -q "$exclude_topic"; then
|
|
219
|
+
has_exclude_topic=true
|
|
220
|
+
fi
|
|
221
|
+
if [[ "$has_exclude_topic" == "true" ]]; then
|
|
222
|
+
opted_out_repos+=("$name")
|
|
223
|
+
continue
|
|
224
|
+
fi
|
|
225
|
+
fi
|
|
226
|
+
|
|
227
|
+
# Check activity filter
|
|
228
|
+
if [[ "$min_activity_days" -gt 0 && -n "$pushed_at" ]]; then
|
|
229
|
+
# Parse ISO timestamp to epoch
|
|
230
|
+
local pushed_epoch
|
|
231
|
+
pushed_epoch=$(date -d "$pushed_at" +%s 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%SZ" "$pushed_at" +%s 2>/dev/null || echo 0)
|
|
232
|
+
|
|
233
|
+
if [[ "$pushed_epoch" -lt "$cutoff_epoch" ]]; then
|
|
234
|
+
skipped_repos+=("$name:inactive")
|
|
235
|
+
continue
|
|
236
|
+
fi
|
|
237
|
+
fi
|
|
238
|
+
|
|
239
|
+
# Check for .shipwright-ignore file in repo (opt-out)
|
|
240
|
+
local has_ignore=false
|
|
241
|
+
if gh api "/repos/${full_name}/contents/.shipwright-ignore" >/dev/null 2>&1; then
|
|
242
|
+
has_ignore=true
|
|
243
|
+
fi
|
|
244
|
+
|
|
245
|
+
if [[ "$has_ignore" == "true" ]]; then
|
|
246
|
+
opted_out_repos+=("$name")
|
|
247
|
+
continue
|
|
248
|
+
fi
|
|
249
|
+
|
|
250
|
+
# Repo passed all filters
|
|
251
|
+
discovered_repos+=("$full_name")
|
|
252
|
+
|
|
253
|
+
done <<< "$repos_json"
|
|
254
|
+
|
|
255
|
+
# GitHub API pagination — check if there are more pages
|
|
256
|
+
# The --paginate flag automatically fetches all, so we only get one pass
|
|
257
|
+
has_more=false
|
|
258
|
+
done
|
|
259
|
+
|
|
260
|
+
# Output results
|
|
261
|
+
if [[ "$json_output" == "true" ]]; then
|
|
262
|
+
# JSON output
|
|
263
|
+
local discovered_json="[]"
|
|
264
|
+
for repo in "${discovered_repos[@]}"; do
|
|
265
|
+
discovered_json=$(echo "$discovered_json" | jq --arg r "$repo" '. += [$r]')
|
|
266
|
+
done
|
|
267
|
+
|
|
268
|
+
local skipped_json="{}"
|
|
269
|
+
for skip in "${skipped_repos[@]}"; do
|
|
270
|
+
local skip_repo="${skip%%:*}"
|
|
271
|
+
local skip_reason="${skip#*:}"
|
|
272
|
+
skipped_json=$(echo "$skipped_json" | jq --arg r "$skip_repo" --arg reason "$skip_reason" '.[$r] = $reason')
|
|
273
|
+
done
|
|
274
|
+
|
|
275
|
+
local opted_out_json="[]"
|
|
276
|
+
for opted in "${opted_out_repos[@]}"; do
|
|
277
|
+
opted_out_json=$(echo "$opted_out_json" | jq --arg r "$opted" '. += [$r]')
|
|
278
|
+
done
|
|
279
|
+
|
|
280
|
+
local result_json
|
|
281
|
+
result_json=$(jq -n \
|
|
282
|
+
--argjson discovered "$discovered_json" \
|
|
283
|
+
--argjson skipped "$skipped_json" \
|
|
284
|
+
--argjson opted_out "$opted_out_json" \
|
|
285
|
+
--arg org "$org" \
|
|
286
|
+
--argjson total_found "$total_found" \
|
|
287
|
+
--argjson total_added "$((${#discovered_repos[@]:-0}))" \
|
|
288
|
+
--argjson total_skipped "$((${#skipped_repos[@]:-0}))" \
|
|
289
|
+
--argjson total_opted_out "$((${#opted_out_repos[@]:-0}))" \
|
|
290
|
+
'{
|
|
291
|
+
org: $org,
|
|
292
|
+
discovered: $discovered,
|
|
293
|
+
skipped: $skipped,
|
|
294
|
+
opted_out: $opted_out,
|
|
295
|
+
summary: {
|
|
296
|
+
total_found: $total_found,
|
|
297
|
+
total_eligible: $total_added,
|
|
298
|
+
total_skipped: $total_skipped,
|
|
299
|
+
total_opted_out: $total_opted_out
|
|
300
|
+
}
|
|
301
|
+
}')
|
|
302
|
+
echo "$result_json"
|
|
303
|
+
return 0
|
|
304
|
+
fi
|
|
305
|
+
|
|
306
|
+
# Human-readable output
|
|
307
|
+
echo ""
|
|
308
|
+
echo -e "${PURPLE}${BOLD}━━━ Discovery Results ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
309
|
+
echo -e " Organization: ${CYAN}${org}${RESET}"
|
|
310
|
+
echo ""
|
|
311
|
+
|
|
312
|
+
if [[ ${#discovered_repos[@]} -gt 0 ]]; then
|
|
313
|
+
echo -e "${BOLD}Eligible Repos (${#discovered_repos[@]}):${RESET}"
|
|
314
|
+
for repo in "${discovered_repos[@]}"; do
|
|
315
|
+
echo -e " ${GREEN}✓${RESET} ${repo}"
|
|
316
|
+
done
|
|
317
|
+
echo ""
|
|
318
|
+
else
|
|
319
|
+
echo -e "${YELLOW}No eligible repos found${RESET}"
|
|
320
|
+
echo ""
|
|
321
|
+
fi
|
|
322
|
+
|
|
323
|
+
if [[ ${#skipped_repos[@]} -gt 0 ]]; then
|
|
324
|
+
echo -e "${BOLD}Skipped Repos (${#skipped_repos[@]}):${RESET}"
|
|
325
|
+
for skip in "${skipped_repos[@]}"; do
|
|
326
|
+
local skip_repo="${skip%%:*}"
|
|
327
|
+
local skip_reason="${skip#*:}"
|
|
328
|
+
echo -e " ${DIM}•${RESET} ${skip_repo} ${DIM}(${skip_reason})${RESET}"
|
|
329
|
+
done
|
|
330
|
+
echo ""
|
|
331
|
+
fi
|
|
332
|
+
|
|
333
|
+
if [[ ${#opted_out_repos[@]} -gt 0 ]]; then
|
|
334
|
+
echo -e "${BOLD}Opted Out Repos (${#opted_out_repos[@]}):${RESET}"
|
|
335
|
+
for opted in "${opted_out_repos[@]}"; do
|
|
336
|
+
echo -e " ${YELLOW}⊘${RESET} ${opted} ${DIM}(has .shipwright-ignore or no-shipwright topic)${RESET}"
|
|
337
|
+
done
|
|
338
|
+
echo ""
|
|
339
|
+
fi
|
|
340
|
+
|
|
341
|
+
echo -e "${BOLD}Summary:${RESET}"
|
|
342
|
+
echo -e " Total scanned: ${CYAN}${total_found}${RESET}"
|
|
343
|
+
echo -e " Eligible to add: ${GREEN}${#discovered_repos[@]}${RESET}"
|
|
344
|
+
echo -e " Skipped (filters): ${YELLOW}${#skipped_repos[@]}${RESET}"
|
|
345
|
+
echo -e " Opted out: ${RED}${#opted_out_repos[@]}${RESET}"
|
|
346
|
+
echo ""
|
|
347
|
+
|
|
348
|
+
# Return list of discovered repos (for integration with config update)
|
|
349
|
+
if [[ ${#discovered_repos[@]} -gt 0 ]]; then
|
|
350
|
+
printf '%s\n' "${discovered_repos[@]}"
|
|
351
|
+
fi
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
# ─── Merge Discovered Repos into Fleet Config ────────────────────────────────
|
|
355
|
+
# Adds new repos without overwriting existing manual entries
|
|
356
|
+
|
|
357
|
+
merge_into_config() {
|
|
358
|
+
local config_path="$1"
|
|
359
|
+
shift
|
|
360
|
+
local discovered_repos=("$@")
|
|
361
|
+
|
|
362
|
+
if [[ ! -f "$config_path" ]]; then
|
|
363
|
+
error "Config file not found: $config_path"
|
|
364
|
+
return 1
|
|
365
|
+
fi
|
|
366
|
+
|
|
367
|
+
# Validate existing JSON
|
|
368
|
+
if ! jq empty "$config_path" 2>/dev/null; then
|
|
369
|
+
error "Invalid JSON in config: $config_path"
|
|
370
|
+
return 1
|
|
371
|
+
fi
|
|
372
|
+
|
|
373
|
+
# Get current repo list
|
|
374
|
+
local current_repos
|
|
375
|
+
current_repos=$(jq -r '.repos[].path // empty' "$config_path")
|
|
376
|
+
|
|
377
|
+
# Build list of new repos to add (those not already in config)
|
|
378
|
+
local repos_to_add=()
|
|
379
|
+
for new_repo in "${discovered_repos[@]}"; do
|
|
380
|
+
local repo_exists=false
|
|
381
|
+
while IFS= read -r existing_repo; do
|
|
382
|
+
if [[ "$existing_repo" == "$new_repo" ]]; then
|
|
383
|
+
repo_exists=true
|
|
384
|
+
break
|
|
385
|
+
fi
|
|
386
|
+
done <<< "$current_repos"
|
|
387
|
+
|
|
388
|
+
if [[ "$repo_exists" != "true" ]]; then
|
|
389
|
+
repos_to_add+=("$new_repo")
|
|
390
|
+
fi
|
|
391
|
+
done
|
|
392
|
+
|
|
393
|
+
if [[ ${#repos_to_add[@]} -eq 0 ]]; then
|
|
394
|
+
success "No new repos to add to config"
|
|
395
|
+
return 0
|
|
396
|
+
fi
|
|
397
|
+
|
|
398
|
+
# Merge into config
|
|
399
|
+
local tmp_config="${config_path}.tmp.$$"
|
|
400
|
+
local updated_config=$(cat "$config_path")
|
|
401
|
+
|
|
402
|
+
for repo_path in "${repos_to_add[@]}"; do
|
|
403
|
+
updated_config=$(echo "$updated_config" | jq \
|
|
404
|
+
--arg path "$repo_path" \
|
|
405
|
+
'.repos += [{"path": $path}]')
|
|
406
|
+
done
|
|
407
|
+
|
|
408
|
+
# Write atomically
|
|
409
|
+
echo "$updated_config" > "$tmp_config"
|
|
410
|
+
mv "$tmp_config" "$config_path"
|
|
411
|
+
|
|
412
|
+
success "Added ${#repos_to_add[@]} new repo(s) to config"
|
|
413
|
+
info "Config saved: ${DIM}${config_path}${RESET}"
|
|
414
|
+
|
|
415
|
+
emit_event "fleet.discover.merged" \
|
|
416
|
+
"org=$1" \
|
|
417
|
+
"repos_added=${#repos_to_add[@]}" \
|
|
418
|
+
"total_repos=$(echo "$updated_config" | jq '.repos | length')"
|
|
419
|
+
|
|
420
|
+
return 0
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
424
|
+
# MAIN
|
|
425
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
426
|
+
|
|
427
|
+
main() {
|
|
428
|
+
local org=""
|
|
429
|
+
local config_path=".claude/fleet-config.json"
|
|
430
|
+
local min_activity_days=90
|
|
431
|
+
local language_filter=""
|
|
432
|
+
local topic_filter=""
|
|
433
|
+
local exclude_topic=""
|
|
434
|
+
local dry_run=false
|
|
435
|
+
local json_output=false
|
|
436
|
+
|
|
437
|
+
# Parse arguments
|
|
438
|
+
while [[ $# -gt 0 ]]; do
|
|
439
|
+
case "$1" in
|
|
440
|
+
--org)
|
|
441
|
+
org="${2:-}"
|
|
442
|
+
[[ -z "$org" ]] && { error "Missing value for --org"; return 1; }
|
|
443
|
+
shift 2
|
|
444
|
+
;;
|
|
445
|
+
--org=*)
|
|
446
|
+
org="${1#--org=}"
|
|
447
|
+
shift
|
|
448
|
+
;;
|
|
449
|
+
--config)
|
|
450
|
+
config_path="${2:-}"
|
|
451
|
+
[[ -z "$config_path" ]] && { error "Missing value for --config"; return 1; }
|
|
452
|
+
shift 2
|
|
453
|
+
;;
|
|
454
|
+
--config=*)
|
|
455
|
+
config_path="${1#--config=}"
|
|
456
|
+
shift
|
|
457
|
+
;;
|
|
458
|
+
--min-activity-days)
|
|
459
|
+
min_activity_days="${2:-90}"
|
|
460
|
+
shift 2
|
|
461
|
+
;;
|
|
462
|
+
--min-activity-days=*)
|
|
463
|
+
min_activity_days="${1#--min-activity-days=}"
|
|
464
|
+
shift
|
|
465
|
+
;;
|
|
466
|
+
--language)
|
|
467
|
+
language_filter="${2:-}"
|
|
468
|
+
[[ -z "$language_filter" ]] && { error "Missing value for --language"; return 1; }
|
|
469
|
+
shift 2
|
|
470
|
+
;;
|
|
471
|
+
--language=*)
|
|
472
|
+
language_filter="${1#--language=}"
|
|
473
|
+
shift
|
|
474
|
+
;;
|
|
475
|
+
--topic)
|
|
476
|
+
topic_filter="${2:-}"
|
|
477
|
+
[[ -z "$topic_filter" ]] && { error "Missing value for --topic"; return 1; }
|
|
478
|
+
shift 2
|
|
479
|
+
;;
|
|
480
|
+
--topic=*)
|
|
481
|
+
topic_filter="${1#--topic=}"
|
|
482
|
+
shift
|
|
483
|
+
;;
|
|
484
|
+
--exclude-topic)
|
|
485
|
+
exclude_topic="${2:-}"
|
|
486
|
+
[[ -z "$exclude_topic" ]] && { error "Missing value for --exclude-topic"; return 1; }
|
|
487
|
+
shift 2
|
|
488
|
+
;;
|
|
489
|
+
--exclude-topic=*)
|
|
490
|
+
exclude_topic="${1#--exclude-topic=}"
|
|
491
|
+
shift
|
|
492
|
+
;;
|
|
493
|
+
--dry-run)
|
|
494
|
+
dry_run=true
|
|
495
|
+
shift
|
|
496
|
+
;;
|
|
497
|
+
--json)
|
|
498
|
+
json_output=true
|
|
499
|
+
shift
|
|
500
|
+
;;
|
|
501
|
+
--help|-h)
|
|
502
|
+
show_help
|
|
503
|
+
return 0
|
|
504
|
+
;;
|
|
505
|
+
*)
|
|
506
|
+
error "Unknown option: $1"
|
|
507
|
+
show_help
|
|
508
|
+
return 1
|
|
509
|
+
;;
|
|
510
|
+
esac
|
|
511
|
+
done
|
|
512
|
+
|
|
513
|
+
# Validate required arguments
|
|
514
|
+
if [[ -z "$org" ]]; then
|
|
515
|
+
error "Missing required argument: --org"
|
|
516
|
+
show_help
|
|
517
|
+
return 1
|
|
518
|
+
fi
|
|
519
|
+
|
|
520
|
+
# Validate min_activity_days is numeric
|
|
521
|
+
if ! [[ "$min_activity_days" =~ ^[0-9]+$ ]]; then
|
|
522
|
+
error "Invalid value for --min-activity-days: must be a number"
|
|
523
|
+
return 1
|
|
524
|
+
fi
|
|
525
|
+
|
|
526
|
+
# Run discovery
|
|
527
|
+
local discovered_repos_output
|
|
528
|
+
discovered_repos_output=$(discover_repos "$org" "$min_activity_days" \
|
|
529
|
+
"$language_filter" "$topic_filter" "$exclude_topic" "$dry_run" "$json_output") || return 1
|
|
530
|
+
|
|
531
|
+
if [[ "$json_output" == "true" ]]; then
|
|
532
|
+
# Already formatted as JSON
|
|
533
|
+
echo "$discovered_repos_output"
|
|
534
|
+
return 0
|
|
535
|
+
fi
|
|
536
|
+
|
|
537
|
+
if [[ "$dry_run" == "true" ]]; then
|
|
538
|
+
info "Dry-run mode — no changes made to config"
|
|
539
|
+
return 0
|
|
540
|
+
fi
|
|
541
|
+
|
|
542
|
+
# Extract list of repos from output (last N lines before summary)
|
|
543
|
+
# The discover_repos function outputs them one per line at the end
|
|
544
|
+
local discovered_repos=()
|
|
545
|
+
while IFS= read -r line; do
|
|
546
|
+
if [[ -n "$line" && "$line" != " "* && "$line" != "Eligible"* && "$line" != "Skipped"* ]]; then
|
|
547
|
+
# Only add if it looks like a repo path (contains /)
|
|
548
|
+
if [[ "$line" == *"/"* ]]; then
|
|
549
|
+
discovered_repos+=("$line")
|
|
550
|
+
fi
|
|
551
|
+
fi
|
|
552
|
+
done <<< "$discovered_repos_output"
|
|
553
|
+
|
|
554
|
+
# Merge into config if not in dry-run mode
|
|
555
|
+
if [[ ${#discovered_repos[@]} -gt 0 ]]; then
|
|
556
|
+
merge_into_config "$config_path" "${discovered_repos[@]}"
|
|
557
|
+
emit_event "fleet.discover.completed" \
|
|
558
|
+
"org=$org" \
|
|
559
|
+
"repos_discovered=${#discovered_repos[@]}"
|
|
560
|
+
fi
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
# ─── Source Guard ───────────────────────────────────────────────────────────
|
|
564
|
+
# Allow this script to be sourced by other scripts
|
|
565
|
+
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
|
|
566
|
+
main "$@"
|
|
567
|
+
fi
|