shipwright-cli 1.10.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +114 -36
- package/completions/_shipwright +212 -32
- package/completions/shipwright.bash +97 -25
- package/docs/strategy/01-market-research.md +619 -0
- package/docs/strategy/02-mission-and-brand.md +587 -0
- package/docs/strategy/03-gtm-and-roadmap.md +759 -0
- package/docs/strategy/QUICK-START.txt +289 -0
- package/docs/strategy/README.md +172 -0
- package/package.json +4 -2
- package/scripts/sw +208 -1
- package/scripts/sw-activity.sh +500 -0
- package/scripts/sw-adaptive.sh +925 -0
- package/scripts/sw-adversarial.sh +1 -1
- package/scripts/sw-architecture-enforcer.sh +1 -1
- package/scripts/sw-auth.sh +613 -0
- package/scripts/sw-autonomous.sh +664 -0
- package/scripts/sw-changelog.sh +704 -0
- package/scripts/sw-checkpoint.sh +1 -1
- package/scripts/sw-ci.sh +602 -0
- package/scripts/sw-cleanup.sh +1 -1
- package/scripts/sw-code-review.sh +637 -0
- package/scripts/sw-connect.sh +1 -1
- package/scripts/sw-context.sh +605 -0
- package/scripts/sw-cost.sh +1 -1
- package/scripts/sw-daemon.sh +432 -130
- package/scripts/sw-dashboard.sh +1 -1
- package/scripts/sw-db.sh +540 -0
- package/scripts/sw-decompose.sh +539 -0
- package/scripts/sw-deps.sh +551 -0
- package/scripts/sw-developer-simulation.sh +1 -1
- package/scripts/sw-discovery.sh +412 -0
- package/scripts/sw-docs-agent.sh +539 -0
- package/scripts/sw-docs.sh +1 -1
- package/scripts/sw-doctor.sh +59 -1
- package/scripts/sw-dora.sh +615 -0
- package/scripts/sw-durable.sh +710 -0
- package/scripts/sw-e2e-orchestrator.sh +535 -0
- package/scripts/sw-eventbus.sh +393 -0
- package/scripts/sw-feedback.sh +471 -0
- package/scripts/sw-fix.sh +1 -1
- package/scripts/sw-fleet-discover.sh +567 -0
- package/scripts/sw-fleet-viz.sh +404 -0
- package/scripts/sw-fleet.sh +8 -1
- package/scripts/sw-github-app.sh +596 -0
- package/scripts/sw-github-checks.sh +1 -1
- package/scripts/sw-github-deploy.sh +1 -1
- package/scripts/sw-github-graphql.sh +1 -1
- package/scripts/sw-guild.sh +569 -0
- package/scripts/sw-heartbeat.sh +1 -1
- package/scripts/sw-hygiene.sh +559 -0
- package/scripts/sw-incident.sh +617 -0
- package/scripts/sw-init.sh +88 -1
- package/scripts/sw-instrument.sh +699 -0
- package/scripts/sw-intelligence.sh +1 -1
- package/scripts/sw-jira.sh +1 -1
- package/scripts/sw-launchd.sh +363 -28
- package/scripts/sw-linear.sh +1 -1
- package/scripts/sw-logs.sh +1 -1
- package/scripts/sw-loop.sh +64 -3
- package/scripts/sw-memory.sh +1 -1
- package/scripts/sw-mission-control.sh +487 -0
- package/scripts/sw-model-router.sh +545 -0
- package/scripts/sw-otel.sh +596 -0
- package/scripts/sw-oversight.sh +689 -0
- package/scripts/sw-pipeline-composer.sh +1 -1
- package/scripts/sw-pipeline-vitals.sh +1 -1
- package/scripts/sw-pipeline.sh +687 -24
- package/scripts/sw-pm.sh +693 -0
- package/scripts/sw-pr-lifecycle.sh +522 -0
- package/scripts/sw-predictive.sh +1 -1
- package/scripts/sw-prep.sh +1 -1
- package/scripts/sw-ps.sh +1 -1
- package/scripts/sw-public-dashboard.sh +798 -0
- package/scripts/sw-quality.sh +595 -0
- package/scripts/sw-reaper.sh +1 -1
- package/scripts/sw-recruit.sh +573 -0
- package/scripts/sw-regression.sh +642 -0
- package/scripts/sw-release-manager.sh +736 -0
- package/scripts/sw-release.sh +706 -0
- package/scripts/sw-remote.sh +1 -1
- package/scripts/sw-replay.sh +520 -0
- package/scripts/sw-retro.sh +691 -0
- package/scripts/sw-scale.sh +444 -0
- package/scripts/sw-security-audit.sh +505 -0
- package/scripts/sw-self-optimize.sh +1 -1
- package/scripts/sw-session.sh +1 -1
- package/scripts/sw-setup.sh +1 -1
- package/scripts/sw-standup.sh +712 -0
- package/scripts/sw-status.sh +1 -1
- package/scripts/sw-strategic.sh +658 -0
- package/scripts/sw-stream.sh +450 -0
- package/scripts/sw-swarm.sh +583 -0
- package/scripts/sw-team-stages.sh +511 -0
- package/scripts/sw-templates.sh +1 -1
- package/scripts/sw-testgen.sh +515 -0
- package/scripts/sw-tmux-pipeline.sh +554 -0
- package/scripts/sw-tmux.sh +1 -1
- package/scripts/sw-trace.sh +485 -0
- package/scripts/sw-tracker-github.sh +188 -0
- package/scripts/sw-tracker-jira.sh +172 -0
- package/scripts/sw-tracker-linear.sh +251 -0
- package/scripts/sw-tracker.sh +117 -2
- package/scripts/sw-triage.sh +603 -0
- package/scripts/sw-upgrade.sh +1 -1
- package/scripts/sw-ux.sh +677 -0
- package/scripts/sw-webhook.sh +627 -0
- package/scripts/sw-widgets.sh +530 -0
- package/scripts/sw-worktree.sh +1 -1
|
@@ -0,0 +1,412 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright discovery — Cross-Pipeline Real-Time Learning ║
|
|
4
|
+
# ║ Enables knowledge sharing between concurrent pipelines via discovery ║
|
|
5
|
+
# ║ channel: broadcast, query, inject, clean, status ║
|
|
6
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
7
|
+
set -euo pipefail
|
|
8
|
+
trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
|
|
9
|
+
|
|
10
|
+
VERSION="2.0.0"
|
|
11
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
12
|
+
REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
13
|
+
|
|
14
|
+
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
|
15
|
+
CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
|
|
16
|
+
PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
|
|
17
|
+
BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
|
|
18
|
+
GREEN='\033[38;2;74;222;128m' # success
|
|
19
|
+
YELLOW='\033[38;2;250;204;21m' # warning
|
|
20
|
+
RED='\033[38;2;248;113;113m' # error
|
|
21
|
+
DIM='\033[2m'
|
|
22
|
+
BOLD='\033[1m'
|
|
23
|
+
RESET='\033[0m'
|
|
24
|
+
|
|
25
|
+
# ─── Cross-platform compatibility ──────────────────────────────────────────
|
|
26
|
+
# shellcheck source=lib/compat.sh
|
|
27
|
+
[[ -f "$SCRIPT_DIR/lib/compat.sh" ]] && source "$SCRIPT_DIR/lib/compat.sh"
|
|
28
|
+
|
|
29
|
+
# ─── Output Helpers ─────────────────────────────────────────────────────────
|
|
30
|
+
info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
|
|
31
|
+
success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
|
|
32
|
+
warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*"; }
|
|
33
|
+
error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
|
|
34
|
+
|
|
35
|
+
now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
36
|
+
now_epoch() { date +%s; }
|
|
37
|
+
|
|
38
|
+
# ─── Discovery Storage ──────────────────────────────────────────────────────
|
|
39
|
+
DISCOVERIES_FILE="${HOME}/.shipwright/discoveries.jsonl"
|
|
40
|
+
DISCOVERIES_DIR="${HOME}/.shipwright/discoveries"
|
|
41
|
+
DISCOVERY_TTL_SECS=$((24 * 60 * 60)) # 24 hours default
|
|
42
|
+
|
|
43
|
+
ensure_discoveries_dir() {
|
|
44
|
+
mkdir -p "$DISCOVERIES_DIR"
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
get_seen_file() {
|
|
48
|
+
local pipeline_id="$1"
|
|
49
|
+
echo "${DISCOVERIES_DIR}/seen-${pipeline_id}.json"
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# ─── Discovery Functions ───────────────────────────────────────────────────
|
|
53
|
+
|
|
54
|
+
# broadcast: write a new discovery event
|
|
55
|
+
broadcast_discovery() {
|
|
56
|
+
local category="$1"
|
|
57
|
+
local file_patterns="$2"
|
|
58
|
+
local discovery_text="$3"
|
|
59
|
+
local resolution="${4:-}"
|
|
60
|
+
|
|
61
|
+
ensure_discoveries_dir
|
|
62
|
+
|
|
63
|
+
local pipeline_id="${SHIPWRIGHT_PIPELINE_ID:-unknown}"
|
|
64
|
+
|
|
65
|
+
# Use jq to build compact JSON (single line)
|
|
66
|
+
local entry
|
|
67
|
+
entry=$(jq -cn \
|
|
68
|
+
--arg ts "$(now_iso)" \
|
|
69
|
+
--argjson ts_epoch "$(now_epoch)" \
|
|
70
|
+
--arg pipeline_id "$pipeline_id" \
|
|
71
|
+
--arg category "$category" \
|
|
72
|
+
--arg file_patterns "$file_patterns" \
|
|
73
|
+
--arg discovery "$discovery_text" \
|
|
74
|
+
--arg resolution "$resolution" \
|
|
75
|
+
'{ts: $ts, ts_epoch: $ts_epoch, pipeline_id: $pipeline_id, category: $category, file_patterns: $file_patterns, discovery: $discovery, resolution: $resolution}')
|
|
76
|
+
|
|
77
|
+
echo "$entry" >> "$DISCOVERIES_FILE"
|
|
78
|
+
success "Broadcast discovery: ${category} (${file_patterns})"
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# query: find relevant discoveries for given file patterns
|
|
82
|
+
query_discoveries() {
|
|
83
|
+
local file_patterns="$1"
|
|
84
|
+
local limit="${2:-10}"
|
|
85
|
+
|
|
86
|
+
ensure_discoveries_dir
|
|
87
|
+
|
|
88
|
+
[[ ! -f "$DISCOVERIES_FILE" ]] && {
|
|
89
|
+
info "No discoveries yet"
|
|
90
|
+
return 0
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
local count=0
|
|
94
|
+
local found=false
|
|
95
|
+
|
|
96
|
+
while IFS= read -r line; do
|
|
97
|
+
[[ -z "$line" ]] && continue
|
|
98
|
+
|
|
99
|
+
local disc_patterns
|
|
100
|
+
disc_patterns=$(echo "$line" | jq -r '.file_patterns // ""' 2>/dev/null || echo "")
|
|
101
|
+
|
|
102
|
+
# Check if patterns overlap
|
|
103
|
+
if patterns_overlap "$file_patterns" "$disc_patterns"; then
|
|
104
|
+
if [[ "$found" == "false" ]]; then
|
|
105
|
+
success "Found relevant discoveries:"
|
|
106
|
+
found=true
|
|
107
|
+
fi
|
|
108
|
+
|
|
109
|
+
local category discovery
|
|
110
|
+
category=$(echo "$line" | jq -r '.category' 2>/dev/null || echo "?")
|
|
111
|
+
discovery=$(echo "$line" | jq -r '.discovery' 2>/dev/null || echo "?")
|
|
112
|
+
|
|
113
|
+
echo -e " ${DIM}→${RESET} [${category}] ${discovery} [${disc_patterns}]"
|
|
114
|
+
|
|
115
|
+
((count++))
|
|
116
|
+
[[ "$count" -ge "$limit" ]] && break
|
|
117
|
+
fi
|
|
118
|
+
done < "$DISCOVERIES_FILE"
|
|
119
|
+
|
|
120
|
+
if [[ "$found" == "false" ]]; then
|
|
121
|
+
info "No relevant discoveries found for patterns: ${file_patterns}"
|
|
122
|
+
fi
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
# inject: return discoveries for current pipeline that haven't been seen
|
|
126
|
+
inject_discoveries() {
|
|
127
|
+
local file_patterns="$1"
|
|
128
|
+
local pipeline_id="${SHIPWRIGHT_PIPELINE_ID:-unknown}"
|
|
129
|
+
|
|
130
|
+
ensure_discoveries_dir
|
|
131
|
+
|
|
132
|
+
[[ ! -f "$DISCOVERIES_FILE" ]] && {
|
|
133
|
+
info "No discoveries available"
|
|
134
|
+
return 0
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
local seen_file
|
|
138
|
+
seen_file=$(get_seen_file "$pipeline_id")
|
|
139
|
+
|
|
140
|
+
# Find relevant discoveries not yet seen
|
|
141
|
+
local new_count=0
|
|
142
|
+
local injected_entries=()
|
|
143
|
+
|
|
144
|
+
while IFS= read -r line; do
|
|
145
|
+
[[ -z "$line" ]] && continue
|
|
146
|
+
|
|
147
|
+
local ts_epoch
|
|
148
|
+
ts_epoch=$(echo "$line" | jq -r '.ts_epoch' 2>/dev/null || echo "0")
|
|
149
|
+
|
|
150
|
+
# Skip if already seen
|
|
151
|
+
if [[ -f "$seen_file" ]]; then
|
|
152
|
+
if jq -e ".seen | contains([${ts_epoch}])" "$seen_file" 2>/dev/null | grep -q "true"; then
|
|
153
|
+
continue
|
|
154
|
+
fi
|
|
155
|
+
fi
|
|
156
|
+
|
|
157
|
+
# Check if relevant to current file patterns
|
|
158
|
+
local disc_patterns
|
|
159
|
+
disc_patterns=$(echo "$line" | jq -r '.file_patterns // ""' 2>/dev/null || echo "")
|
|
160
|
+
|
|
161
|
+
if [[ -n "$disc_patterns" ]] && patterns_overlap "$file_patterns" "$disc_patterns"; then
|
|
162
|
+
injected_entries+=("$line")
|
|
163
|
+
((new_count++))
|
|
164
|
+
fi
|
|
165
|
+
done < "$DISCOVERIES_FILE"
|
|
166
|
+
|
|
167
|
+
if [[ "$new_count" -eq 0 ]]; then
|
|
168
|
+
info "No new discoveries to inject"
|
|
169
|
+
return 0
|
|
170
|
+
fi
|
|
171
|
+
|
|
172
|
+
# Update seen set
|
|
173
|
+
local new_seen
|
|
174
|
+
new_seen="{\"seen\":["
|
|
175
|
+
|
|
176
|
+
local first=true
|
|
177
|
+
|
|
178
|
+
# Add previously seen entries
|
|
179
|
+
if [[ -f "$seen_file" ]]; then
|
|
180
|
+
while IFS= read -r ts; do
|
|
181
|
+
[[ -z "$ts" ]] && continue
|
|
182
|
+
[[ "$first" == "false" ]] && new_seen="${new_seen},"
|
|
183
|
+
new_seen="${new_seen}${ts}"
|
|
184
|
+
first=false
|
|
185
|
+
done < <(jq -r '.seen[]? // empty' "$seen_file" 2>/dev/null || true)
|
|
186
|
+
fi
|
|
187
|
+
|
|
188
|
+
# Add newly seen entries
|
|
189
|
+
for entry in "${injected_entries[@]}"; do
|
|
190
|
+
local ts_epoch
|
|
191
|
+
ts_epoch=$(echo "$entry" | jq -r '.ts_epoch' 2>/dev/null || echo "0")
|
|
192
|
+
[[ "$first" == "false" ]] && new_seen="${new_seen},"
|
|
193
|
+
new_seen="${new_seen}${ts_epoch}"
|
|
194
|
+
first=false
|
|
195
|
+
done
|
|
196
|
+
|
|
197
|
+
new_seen="${new_seen}]}"
|
|
198
|
+
|
|
199
|
+
# Atomic write
|
|
200
|
+
local tmp_seen
|
|
201
|
+
tmp_seen=$(mktemp)
|
|
202
|
+
echo "$new_seen" > "$tmp_seen"
|
|
203
|
+
mv "$tmp_seen" "$seen_file"
|
|
204
|
+
|
|
205
|
+
success "Injected ${new_count} new discoveries"
|
|
206
|
+
|
|
207
|
+
# Output for injection into pipeline
|
|
208
|
+
for entry in "${injected_entries[@]}"; do
|
|
209
|
+
echo "$entry" | jq -r '"[\(.category)] \(.discovery) — Resolution: \(.resolution)"' 2>/dev/null || true
|
|
210
|
+
done
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
# patterns_overlap: check if two comma-separated patterns overlap
|
|
214
|
+
patterns_overlap() {
|
|
215
|
+
local patterns1="$1"
|
|
216
|
+
local patterns2="$2"
|
|
217
|
+
|
|
218
|
+
# Simple glob matching: check if any pattern from p1 matches any from p2
|
|
219
|
+
# Use bash filename expansion or simple substring matching
|
|
220
|
+
local p1 p2
|
|
221
|
+
|
|
222
|
+
IFS=',' read -ra p1_arr <<< "$patterns1"
|
|
223
|
+
IFS=',' read -ra p2_arr <<< "$patterns2"
|
|
224
|
+
|
|
225
|
+
for p1 in "${p1_arr[@]}"; do
|
|
226
|
+
p1="${p1// /}" # trim spaces
|
|
227
|
+
[[ -z "$p1" ]] && continue
|
|
228
|
+
|
|
229
|
+
for p2 in "${p2_arr[@]}"; do
|
|
230
|
+
p2="${p2// /}" # trim spaces
|
|
231
|
+
[[ -z "$p2" ]] && continue
|
|
232
|
+
|
|
233
|
+
# Simple substring overlap check: if removing glob chars, do they share directory structure?
|
|
234
|
+
local p1_base="${p1%/*}" # get directory part
|
|
235
|
+
local p2_base="${p2%/*}"
|
|
236
|
+
|
|
237
|
+
# Check if patterns refer to same or overlapping directory trees
|
|
238
|
+
if [[ "$p1_base" == "$p2_base" ]] || [[ "$p1_base" == "$p2"* ]] || [[ "$p2_base" == "$p1"* ]]; then
|
|
239
|
+
return 0
|
|
240
|
+
fi
|
|
241
|
+
|
|
242
|
+
# Also check if exact patterns match
|
|
243
|
+
if [[ "$p1" == "$p2" ]]; then
|
|
244
|
+
return 0
|
|
245
|
+
fi
|
|
246
|
+
done
|
|
247
|
+
done
|
|
248
|
+
|
|
249
|
+
return 1
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
# clean: remove stale discoveries (older than TTL)
|
|
253
|
+
clean_discoveries() {
|
|
254
|
+
local ttl="${1:-$DISCOVERY_TTL_SECS}"
|
|
255
|
+
|
|
256
|
+
[[ ! -f "$DISCOVERIES_FILE" ]] && {
|
|
257
|
+
info "No discoveries to clean"
|
|
258
|
+
return 0
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
local now
|
|
262
|
+
now=$(now_epoch)
|
|
263
|
+
local cutoff=$((now - ttl))
|
|
264
|
+
|
|
265
|
+
local tmp_file
|
|
266
|
+
tmp_file=$(mktemp)
|
|
267
|
+
local removed_count=0
|
|
268
|
+
|
|
269
|
+
while IFS= read -r line; do
|
|
270
|
+
[[ -z "$line" ]] && continue
|
|
271
|
+
|
|
272
|
+
local ts_epoch
|
|
273
|
+
ts_epoch=$(echo "$line" | jq -r '.ts_epoch // 0' 2>/dev/null || echo "0")
|
|
274
|
+
|
|
275
|
+
if [[ "$ts_epoch" -ge "$cutoff" ]]; then
|
|
276
|
+
echo "$line" >> "$tmp_file"
|
|
277
|
+
else
|
|
278
|
+
((removed_count++))
|
|
279
|
+
fi
|
|
280
|
+
done < "$DISCOVERIES_FILE"
|
|
281
|
+
|
|
282
|
+
# Atomic replace
|
|
283
|
+
[[ -s "$tmp_file" ]] && mv "$tmp_file" "$DISCOVERIES_FILE" || rm -f "$tmp_file"
|
|
284
|
+
|
|
285
|
+
if [[ "$removed_count" -gt 0 ]]; then
|
|
286
|
+
success "Cleaned ${removed_count} stale discoveries (older than ${ttl}s)"
|
|
287
|
+
else
|
|
288
|
+
info "No stale discoveries to clean"
|
|
289
|
+
fi
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
# status: show discovery channel stats
|
|
293
|
+
show_status() {
|
|
294
|
+
info "Discovery Channel Status"
|
|
295
|
+
echo ""
|
|
296
|
+
|
|
297
|
+
local total=0
|
|
298
|
+
local oldest=""
|
|
299
|
+
local newest=""
|
|
300
|
+
|
|
301
|
+
if [[ -f "$DISCOVERIES_FILE" ]]; then
|
|
302
|
+
total=$(wc -l < "$DISCOVERIES_FILE")
|
|
303
|
+
|
|
304
|
+
# Get oldest and newest timestamps
|
|
305
|
+
oldest=$(jq -s 'min_by(.ts_epoch) | .ts' "$DISCOVERIES_FILE" 2>/dev/null || echo "N/A")
|
|
306
|
+
newest=$(jq -s 'max_by(.ts_epoch) | .ts' "$DISCOVERIES_FILE" 2>/dev/null || echo "N/A")
|
|
307
|
+
fi
|
|
308
|
+
|
|
309
|
+
echo -e " ${CYAN}Total discoveries:${RESET} ${total}"
|
|
310
|
+
echo -e " ${CYAN}Oldest:${RESET} ${oldest}"
|
|
311
|
+
echo -e " ${CYAN}Newest:${RESET} ${newest}"
|
|
312
|
+
echo ""
|
|
313
|
+
|
|
314
|
+
# Count by category
|
|
315
|
+
if [[ -f "$DISCOVERIES_FILE" ]]; then
|
|
316
|
+
echo -e " ${CYAN}By category:${RESET}"
|
|
317
|
+
jq -s 'group_by(.category) | map({category: .[0].category, count: length}) | .[]' \
|
|
318
|
+
"$DISCOVERIES_FILE" 2>/dev/null | \
|
|
319
|
+
jq -r '" \(.category): \(.count)"' 2>/dev/null | sort || true
|
|
320
|
+
fi
|
|
321
|
+
|
|
322
|
+
echo ""
|
|
323
|
+
echo -e " ${CYAN}Storage:${RESET} ${DISCOVERIES_FILE}"
|
|
324
|
+
[[ -f "$DISCOVERIES_FILE" ]] && echo -e " ${CYAN}Size:${RESET} $(du -h "$DISCOVERIES_FILE" | cut -f1)"
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
# show_help: display usage
|
|
328
|
+
show_help() {
|
|
329
|
+
echo -e "${CYAN}${BOLD}shipwright discovery${RESET} — Cross-Pipeline Real-Time Learning"
|
|
330
|
+
echo ""
|
|
331
|
+
echo -e "${BOLD}USAGE${RESET}"
|
|
332
|
+
echo -e " ${CYAN}shipwright discovery${RESET} <command> [options]"
|
|
333
|
+
echo ""
|
|
334
|
+
echo -e "${BOLD}COMMANDS${RESET}"
|
|
335
|
+
echo -e " ${CYAN}broadcast${RESET} <category> <patterns> <text> [resolution]"
|
|
336
|
+
echo -e " Write a new discovery event to the shared channel"
|
|
337
|
+
echo ""
|
|
338
|
+
echo -e " ${CYAN}query${RESET} <patterns> [limit]"
|
|
339
|
+
echo -e " Find discoveries relevant to file patterns"
|
|
340
|
+
echo ""
|
|
341
|
+
echo -e " ${CYAN}inject${RESET} <patterns>"
|
|
342
|
+
echo -e " Inject new discoveries for this pipeline (tracks seen set)"
|
|
343
|
+
echo ""
|
|
344
|
+
echo -e " ${CYAN}clean${RESET} [ttl-seconds]"
|
|
345
|
+
echo -e " Remove stale discoveries (default: 86400s = 24h)"
|
|
346
|
+
echo ""
|
|
347
|
+
echo -e " ${CYAN}status${RESET}"
|
|
348
|
+
echo -e " Show discovery channel statistics and health"
|
|
349
|
+
echo ""
|
|
350
|
+
echo -e " ${CYAN}help${RESET}"
|
|
351
|
+
echo -e " Show this help message"
|
|
352
|
+
echo ""
|
|
353
|
+
echo -e "${BOLD}ENVIRONMENT${RESET}"
|
|
354
|
+
echo -e " ${DIM}SHIPWRIGHT_PIPELINE_ID${RESET} Current pipeline ID (auto-tracked in seen set)"
|
|
355
|
+
echo ""
|
|
356
|
+
echo -e "${BOLD}EXAMPLES${RESET}"
|
|
357
|
+
echo -e " ${DIM}shipwright discovery broadcast \"auth-fix\" \"src/auth/*.ts\" \"JWT validation failure resolved\" \"Added claim verification\"${RESET}"
|
|
358
|
+
echo -e " ${DIM}shipwright discovery query \"src/**/*.js,src/**/*.ts\" 5${RESET}"
|
|
359
|
+
echo -e " ${DIM}shipwright discovery inject \"src/api/**\" 2>&1 | xargs -I {} echo \"Learning: {}\"${RESET}"
|
|
360
|
+
echo -e " ${DIM}shipwright discovery clean 172800${RESET} # Remove discoveries older than 48h"
|
|
361
|
+
echo -e " ${DIM}shipwright discovery status${RESET}"
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
# ─── Main ────────────────────────────────────────────────────────────────
|
|
365
|
+
|
|
366
|
+
main() {
|
|
367
|
+
local cmd="${1:-help}"
|
|
368
|
+
shift 2>/dev/null || true
|
|
369
|
+
|
|
370
|
+
case "$cmd" in
|
|
371
|
+
broadcast)
|
|
372
|
+
[[ $# -lt 3 ]] && {
|
|
373
|
+
error "broadcast requires: category, patterns, text [, resolution]"
|
|
374
|
+
exit 1
|
|
375
|
+
}
|
|
376
|
+
broadcast_discovery "$1" "$2" "$3" "${4:-}"
|
|
377
|
+
;;
|
|
378
|
+
query)
|
|
379
|
+
[[ $# -lt 1 ]] && {
|
|
380
|
+
error "query requires: patterns [limit]"
|
|
381
|
+
exit 1
|
|
382
|
+
}
|
|
383
|
+
query_discoveries "$1" "${2:-10}"
|
|
384
|
+
;;
|
|
385
|
+
inject)
|
|
386
|
+
[[ $# -lt 1 ]] && {
|
|
387
|
+
error "inject requires: patterns"
|
|
388
|
+
exit 1
|
|
389
|
+
}
|
|
390
|
+
inject_discoveries "$1"
|
|
391
|
+
;;
|
|
392
|
+
clean)
|
|
393
|
+
clean_discoveries "${1:-$DISCOVERY_TTL_SECS}"
|
|
394
|
+
;;
|
|
395
|
+
status)
|
|
396
|
+
show_status
|
|
397
|
+
;;
|
|
398
|
+
help|--help|-h)
|
|
399
|
+
show_help
|
|
400
|
+
;;
|
|
401
|
+
*)
|
|
402
|
+
error "Unknown command: ${cmd}"
|
|
403
|
+
echo ""
|
|
404
|
+
show_help
|
|
405
|
+
exit 1
|
|
406
|
+
;;
|
|
407
|
+
esac
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
|
|
411
|
+
main "$@"
|
|
412
|
+
fi
|