shipwright-cli 1.7.0 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/code-reviewer.md +90 -0
- package/.claude/agents/devops-engineer.md +142 -0
- package/.claude/agents/pipeline-agent.md +80 -0
- package/.claude/agents/shell-script-specialist.md +150 -0
- package/.claude/agents/test-specialist.md +196 -0
- package/.claude/hooks/post-tool-use.sh +38 -0
- package/.claude/hooks/pre-tool-use.sh +25 -0
- package/.claude/hooks/session-started.sh +37 -0
- package/README.md +212 -814
- package/claude-code/CLAUDE.md.shipwright +54 -0
- package/claude-code/hooks/notify-idle.sh +2 -2
- package/claude-code/hooks/session-start.sh +24 -0
- package/claude-code/hooks/task-completed.sh +6 -2
- package/claude-code/settings.json.template +12 -0
- package/dashboard/public/app.js +4422 -0
- package/dashboard/public/index.html +816 -0
- package/dashboard/public/styles.css +4755 -0
- package/dashboard/server.ts +4315 -0
- package/docs/KNOWN-ISSUES.md +18 -10
- package/docs/TIPS.md +38 -26
- package/docs/patterns/README.md +33 -23
- package/package.json +9 -5
- package/scripts/adapters/iterm2-adapter.sh +1 -1
- package/scripts/adapters/tmux-adapter.sh +52 -23
- package/scripts/adapters/wezterm-adapter.sh +26 -14
- package/scripts/lib/compat.sh +200 -0
- package/scripts/lib/helpers.sh +72 -0
- package/scripts/postinstall.mjs +72 -13
- package/scripts/{cct → sw} +109 -21
- package/scripts/sw-adversarial.sh +274 -0
- package/scripts/sw-architecture-enforcer.sh +330 -0
- package/scripts/sw-checkpoint.sh +390 -0
- package/scripts/{cct-cleanup.sh → sw-cleanup.sh} +3 -1
- package/scripts/sw-connect.sh +619 -0
- package/scripts/{cct-cost.sh → sw-cost.sh} +368 -34
- package/scripts/{cct-daemon.sh → sw-daemon.sh} +2217 -204
- package/scripts/sw-dashboard.sh +477 -0
- package/scripts/sw-developer-simulation.sh +252 -0
- package/scripts/sw-docs.sh +635 -0
- package/scripts/sw-doctor.sh +907 -0
- package/scripts/{cct-fix.sh → sw-fix.sh} +10 -6
- package/scripts/{cct-fleet.sh → sw-fleet.sh} +498 -22
- package/scripts/sw-github-checks.sh +521 -0
- package/scripts/sw-github-deploy.sh +533 -0
- package/scripts/sw-github-graphql.sh +972 -0
- package/scripts/sw-heartbeat.sh +293 -0
- package/scripts/sw-init.sh +522 -0
- package/scripts/sw-intelligence.sh +1196 -0
- package/scripts/sw-jira.sh +643 -0
- package/scripts/sw-launchd.sh +364 -0
- package/scripts/sw-linear.sh +648 -0
- package/scripts/{cct-logs.sh → sw-logs.sh} +72 -2
- package/scripts/{cct-loop.sh → sw-loop.sh} +534 -44
- package/scripts/{cct-memory.sh → sw-memory.sh} +321 -38
- package/scripts/sw-patrol-meta.sh +417 -0
- package/scripts/sw-pipeline-composer.sh +455 -0
- package/scripts/{cct-pipeline.sh → sw-pipeline.sh} +2319 -178
- package/scripts/sw-predictive.sh +820 -0
- package/scripts/{cct-prep.sh → sw-prep.sh} +339 -49
- package/scripts/{cct-ps.sh → sw-ps.sh} +6 -4
- package/scripts/{cct-reaper.sh → sw-reaper.sh} +6 -4
- package/scripts/sw-remote.sh +687 -0
- package/scripts/sw-self-optimize.sh +947 -0
- package/scripts/sw-session.sh +519 -0
- package/scripts/sw-setup.sh +234 -0
- package/scripts/sw-status.sh +605 -0
- package/scripts/{cct-templates.sh → sw-templates.sh} +9 -4
- package/scripts/sw-tmux.sh +591 -0
- package/scripts/sw-tracker-jira.sh +277 -0
- package/scripts/sw-tracker-linear.sh +292 -0
- package/scripts/sw-tracker.sh +409 -0
- package/scripts/{cct-upgrade.sh → sw-upgrade.sh} +103 -46
- package/scripts/{cct-worktree.sh → sw-worktree.sh} +3 -0
- package/templates/pipelines/autonomous.json +27 -5
- package/templates/pipelines/full.json +12 -0
- package/templates/pipelines/standard.json +12 -0
- package/tmux/{claude-teams-overlay.conf → shipwright-overlay.conf} +27 -9
- package/tmux/templates/accessibility.json +34 -0
- package/tmux/templates/api-design.json +35 -0
- package/tmux/templates/architecture.json +1 -0
- package/tmux/templates/bug-fix.json +9 -0
- package/tmux/templates/code-review.json +1 -0
- package/tmux/templates/compliance.json +36 -0
- package/tmux/templates/data-pipeline.json +36 -0
- package/tmux/templates/debt-paydown.json +34 -0
- package/tmux/templates/devops.json +1 -0
- package/tmux/templates/documentation.json +1 -0
- package/tmux/templates/exploration.json +1 -0
- package/tmux/templates/feature-dev.json +1 -0
- package/tmux/templates/full-stack.json +8 -0
- package/tmux/templates/i18n.json +34 -0
- package/tmux/templates/incident-response.json +36 -0
- package/tmux/templates/migration.json +1 -0
- package/tmux/templates/observability.json +35 -0
- package/tmux/templates/onboarding.json +33 -0
- package/tmux/templates/performance.json +35 -0
- package/tmux/templates/refactor.json +1 -0
- package/tmux/templates/release.json +35 -0
- package/tmux/templates/security-audit.json +8 -0
- package/tmux/templates/spike.json +34 -0
- package/tmux/templates/testing.json +1 -0
- package/tmux/tmux.conf +98 -9
- package/scripts/cct-doctor.sh +0 -328
- package/scripts/cct-init.sh +0 -282
- package/scripts/cct-session.sh +0 -284
- package/scripts/cct-status.sh +0 -169
|
@@ -0,0 +1,687 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright remote — Machine Registry & Remote Daemon Management ║
|
|
4
|
+
# ║ Register machines · Deploy scripts · Monitor distributed workers ║
|
|
5
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
6
|
+
set -euo pipefail
|
|
7
|
+
trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
|
|
8
|
+
|
|
9
|
+
VERSION="1.9.0"
|
|
10
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
11
|
+
REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
12
|
+
|
|
13
|
+
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
|
14
|
+
CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
|
|
15
|
+
PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
|
|
16
|
+
BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
|
|
17
|
+
GREEN='\033[38;2;74;222;128m' # success
|
|
18
|
+
YELLOW='\033[38;2;250;204;21m' # warning
|
|
19
|
+
RED='\033[38;2;248;113;113m' # error
|
|
20
|
+
DIM='\033[2m'
|
|
21
|
+
BOLD='\033[1m'
|
|
22
|
+
RESET='\033[0m'
|
|
23
|
+
|
|
24
|
+
# ─── Cross-platform compatibility ──────────────────────────────────────────
|
|
25
|
+
# shellcheck source=lib/compat.sh
|
|
26
|
+
[[ -f "$SCRIPT_DIR/lib/compat.sh" ]] && source "$SCRIPT_DIR/lib/compat.sh"
|
|
27
|
+
# ─── Output Helpers ─────────────────────────────────────────────────────────
|
|
28
|
+
info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
|
|
29
|
+
success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
|
|
30
|
+
warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*"; }
|
|
31
|
+
error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
|
|
32
|
+
|
|
33
|
+
now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
34
|
+
|
|
35
|
+
# ─── Structured Event Log ──────────────────────────────────────────────────
|
|
36
|
+
EVENTS_FILE="${HOME}/.shipwright/events.jsonl"
|
|
37
|
+
|
|
38
|
+
emit_event() {
|
|
39
|
+
local event_type="$1"
|
|
40
|
+
shift
|
|
41
|
+
local json_fields=""
|
|
42
|
+
for kv in "$@"; do
|
|
43
|
+
local key="${kv%%=*}"
|
|
44
|
+
local val="${kv#*=}"
|
|
45
|
+
if [[ "$val" =~ ^-?[0-9]+\.?[0-9]*$ ]]; then
|
|
46
|
+
json_fields="${json_fields},\"${key}\":${val}"
|
|
47
|
+
else
|
|
48
|
+
val="${val//\"/\\\"}"
|
|
49
|
+
json_fields="${json_fields},\"${key}\":\"${val}\""
|
|
50
|
+
fi
|
|
51
|
+
done
|
|
52
|
+
mkdir -p "${HOME}/.shipwright"
|
|
53
|
+
echo "{\"ts\":\"$(now_iso)\",\"type\":\"${event_type}\"${json_fields}}" >> "$EVENTS_FILE"
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
# ─── Defaults ───────────────────────────────────────────────────────────────
|
|
57
|
+
MACHINES_FILE="$HOME/.shipwright/machines.json"
|
|
58
|
+
SSH_OPTS="-o ConnectTimeout=5 -o BatchMode=yes -o StrictHostKeyChecking=accept-new"
|
|
59
|
+
|
|
60
|
+
# ─── CLI Argument Parsing ──────────────────────────────────────────────────
|
|
61
|
+
SUBCOMMAND="${1:-help}"
|
|
62
|
+
shift 2>/dev/null || true
|
|
63
|
+
|
|
64
|
+
# Collect positional args and flags
|
|
65
|
+
POSITIONAL_ARGS=()
|
|
66
|
+
OPT_HOST=""
|
|
67
|
+
OPT_USER=""
|
|
68
|
+
OPT_PATH=""
|
|
69
|
+
OPT_MAX_WORKERS=""
|
|
70
|
+
OPT_ROLE=""
|
|
71
|
+
OPT_STOP_DAEMON=false
|
|
72
|
+
OPT_JSON=false
|
|
73
|
+
|
|
74
|
+
while [[ $# -gt 0 ]]; do
|
|
75
|
+
case "$1" in
|
|
76
|
+
--host)
|
|
77
|
+
OPT_HOST="${2:-}"
|
|
78
|
+
shift 2
|
|
79
|
+
;;
|
|
80
|
+
--host=*)
|
|
81
|
+
OPT_HOST="${1#--host=}"
|
|
82
|
+
shift
|
|
83
|
+
;;
|
|
84
|
+
--user)
|
|
85
|
+
OPT_USER="${2:-}"
|
|
86
|
+
shift 2
|
|
87
|
+
;;
|
|
88
|
+
--user=*)
|
|
89
|
+
OPT_USER="${1#--user=}"
|
|
90
|
+
shift
|
|
91
|
+
;;
|
|
92
|
+
--path)
|
|
93
|
+
OPT_PATH="${2:-}"
|
|
94
|
+
shift 2
|
|
95
|
+
;;
|
|
96
|
+
--path=*)
|
|
97
|
+
OPT_PATH="${1#--path=}"
|
|
98
|
+
shift
|
|
99
|
+
;;
|
|
100
|
+
--max-workers)
|
|
101
|
+
OPT_MAX_WORKERS="${2:-}"
|
|
102
|
+
shift 2
|
|
103
|
+
;;
|
|
104
|
+
--max-workers=*)
|
|
105
|
+
OPT_MAX_WORKERS="${1#--max-workers=}"
|
|
106
|
+
shift
|
|
107
|
+
;;
|
|
108
|
+
--role)
|
|
109
|
+
OPT_ROLE="${2:-}"
|
|
110
|
+
shift 2
|
|
111
|
+
;;
|
|
112
|
+
--role=*)
|
|
113
|
+
OPT_ROLE="${1#--role=}"
|
|
114
|
+
shift
|
|
115
|
+
;;
|
|
116
|
+
--stop-daemon)
|
|
117
|
+
OPT_STOP_DAEMON=true
|
|
118
|
+
shift
|
|
119
|
+
;;
|
|
120
|
+
--json)
|
|
121
|
+
OPT_JSON=true
|
|
122
|
+
shift
|
|
123
|
+
;;
|
|
124
|
+
--help|-h)
|
|
125
|
+
SUBCOMMAND="help"
|
|
126
|
+
shift
|
|
127
|
+
;;
|
|
128
|
+
-*)
|
|
129
|
+
error "Unknown option: $1"
|
|
130
|
+
exit 1
|
|
131
|
+
;;
|
|
132
|
+
*)
|
|
133
|
+
POSITIONAL_ARGS+=("$1")
|
|
134
|
+
shift
|
|
135
|
+
;;
|
|
136
|
+
esac
|
|
137
|
+
done
|
|
138
|
+
|
|
139
|
+
# ─── Help ───────────────────────────────────────────────────────────────────
|
|
140
|
+
|
|
141
|
+
show_help() {
|
|
142
|
+
echo ""
|
|
143
|
+
echo -e "${PURPLE}${BOLD}━━━ shipwright remote v${VERSION} ━━━${RESET}"
|
|
144
|
+
echo ""
|
|
145
|
+
echo -e "${BOLD}USAGE${RESET}"
|
|
146
|
+
echo -e " ${CYAN}shipwright remote${RESET} <command> [options]"
|
|
147
|
+
echo ""
|
|
148
|
+
echo -e "${BOLD}COMMANDS${RESET}"
|
|
149
|
+
echo -e " ${CYAN}add${RESET} <name> Register a machine"
|
|
150
|
+
echo -e " ${CYAN}remove${RESET} <name> Remove a machine from registry"
|
|
151
|
+
echo -e " ${CYAN}list${RESET} Show registered machines"
|
|
152
|
+
echo -e " ${CYAN}status${RESET} Health check all machines"
|
|
153
|
+
echo -e " ${CYAN}deploy${RESET} <name> Deploy shipwright to a remote machine"
|
|
154
|
+
echo -e " ${CYAN}help${RESET} Show this help"
|
|
155
|
+
echo ""
|
|
156
|
+
echo -e "${BOLD}ADD OPTIONS${RESET}"
|
|
157
|
+
echo -e " ${CYAN}--host${RESET} <host> Hostname or IP ${DIM}(required)${RESET}"
|
|
158
|
+
echo -e " ${CYAN}--user${RESET} <user> SSH user for remote machines"
|
|
159
|
+
echo -e " ${CYAN}--path${RESET} <path> Shipwright install path on machine ${DIM}(required)${RESET}"
|
|
160
|
+
echo -e " ${CYAN}--max-workers${RESET} <N> Maximum worker count ${DIM}(default: 4)${RESET}"
|
|
161
|
+
echo -e " ${CYAN}--role${RESET} <primary|worker> Machine role ${DIM}(default: worker)${RESET}"
|
|
162
|
+
echo ""
|
|
163
|
+
echo -e "${BOLD}REMOVE OPTIONS${RESET}"
|
|
164
|
+
echo -e " ${CYAN}--stop-daemon${RESET} Stop remote daemon before removing"
|
|
165
|
+
echo ""
|
|
166
|
+
echo -e "${BOLD}EXAMPLES${RESET}"
|
|
167
|
+
echo -e " ${DIM}shipwright remote add dev-laptop --host localhost --path /Users/seth/shipwright --role primary${RESET}"
|
|
168
|
+
echo -e " ${DIM}shipwright remote add build-srv --host 192.168.1.100 --user seth --path /home/seth/shipwright --max-workers 8${RESET}"
|
|
169
|
+
echo -e " ${DIM}shipwright remote list${RESET}"
|
|
170
|
+
echo -e " ${DIM}shipwright remote status${RESET}"
|
|
171
|
+
echo -e " ${DIM}shipwright remote deploy build-srv${RESET}"
|
|
172
|
+
echo -e " ${DIM}shipwright remote remove build-srv --stop-daemon${RESET}"
|
|
173
|
+
echo ""
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
# ─── Machine Registry Helpers ──────────────────────────────────────────────
|
|
177
|
+
|
|
178
|
+
ensure_machines_file() {
|
|
179
|
+
mkdir -p "$HOME/.shipwright"
|
|
180
|
+
if [[ ! -f "$MACHINES_FILE" ]]; then
|
|
181
|
+
echo '{"machines":[]}' > "$MACHINES_FILE"
|
|
182
|
+
fi
|
|
183
|
+
# Validate JSON
|
|
184
|
+
if ! jq empty "$MACHINES_FILE" 2>/dev/null; then
|
|
185
|
+
error "Corrupted machines file: $MACHINES_FILE"
|
|
186
|
+
exit 1
|
|
187
|
+
fi
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
get_machine() {
|
|
191
|
+
local name="$1"
|
|
192
|
+
jq -r --arg n "$name" '.machines[] | select(.name == $n)' "$MACHINES_FILE"
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
machine_exists() {
|
|
196
|
+
local name="$1"
|
|
197
|
+
local found
|
|
198
|
+
found=$(jq -r --arg n "$name" '.machines[] | select(.name == $n) | .name' "$MACHINES_FILE" 2>/dev/null || true)
|
|
199
|
+
[[ -n "$found" ]]
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
is_localhost() {
|
|
203
|
+
local host="$1"
|
|
204
|
+
[[ "$host" == "localhost" || "$host" == "127.0.0.1" || "$host" == "::1" ]]
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
# Run a command on a machine (local or remote)
|
|
208
|
+
run_on_machine() {
|
|
209
|
+
local host="$1"
|
|
210
|
+
local ssh_user="$2"
|
|
211
|
+
local cmd="$3"
|
|
212
|
+
|
|
213
|
+
if is_localhost "$host"; then
|
|
214
|
+
bash -c "$cmd"
|
|
215
|
+
else
|
|
216
|
+
local target="$host"
|
|
217
|
+
if [[ -n "$ssh_user" && "$ssh_user" != "null" ]]; then
|
|
218
|
+
target="${ssh_user}@${host}"
|
|
219
|
+
fi
|
|
220
|
+
# shellcheck disable=SC2086
|
|
221
|
+
ssh $SSH_OPTS "$target" "$cmd"
|
|
222
|
+
fi
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
# ─── Add Machine ───────────────────────────────────────────────────────────
|
|
226
|
+
|
|
227
|
+
remote_add() {
|
|
228
|
+
local name="${POSITIONAL_ARGS[0]:-}"
|
|
229
|
+
local host="$OPT_HOST"
|
|
230
|
+
local ssh_user="$OPT_USER"
|
|
231
|
+
local sw_path="$OPT_PATH"
|
|
232
|
+
local max_workers="${OPT_MAX_WORKERS:-4}"
|
|
233
|
+
local role="${OPT_ROLE:-worker}"
|
|
234
|
+
|
|
235
|
+
# Validate required fields
|
|
236
|
+
if [[ -z "$name" ]]; then
|
|
237
|
+
error "Machine name is required"
|
|
238
|
+
echo ""
|
|
239
|
+
echo -e " Usage: ${CYAN}shipwright remote add <name> --host <host> --path <path>${RESET}"
|
|
240
|
+
exit 1
|
|
241
|
+
fi
|
|
242
|
+
if [[ -z "$host" ]]; then
|
|
243
|
+
error "Host is required (--host)"
|
|
244
|
+
exit 1
|
|
245
|
+
fi
|
|
246
|
+
if [[ -z "$sw_path" ]]; then
|
|
247
|
+
error "Shipwright path is required (--path)"
|
|
248
|
+
exit 1
|
|
249
|
+
fi
|
|
250
|
+
|
|
251
|
+
# Validate role
|
|
252
|
+
if [[ "$role" != "primary" && "$role" != "worker" ]]; then
|
|
253
|
+
error "Role must be 'primary' or 'worker', got: $role"
|
|
254
|
+
exit 1
|
|
255
|
+
fi
|
|
256
|
+
|
|
257
|
+
# Validate max_workers is numeric
|
|
258
|
+
if ! [[ "$max_workers" =~ ^[0-9]+$ ]]; then
|
|
259
|
+
error "max-workers must be a positive integer, got: $max_workers"
|
|
260
|
+
exit 1
|
|
261
|
+
fi
|
|
262
|
+
|
|
263
|
+
ensure_machines_file
|
|
264
|
+
|
|
265
|
+
# Check for duplicate
|
|
266
|
+
if machine_exists "$name"; then
|
|
267
|
+
error "Machine '$name' already registered"
|
|
268
|
+
info "Use ${CYAN}shipwright remote remove $name${RESET} first"
|
|
269
|
+
exit 1
|
|
270
|
+
fi
|
|
271
|
+
|
|
272
|
+
# Test SSH connectivity for remote machines
|
|
273
|
+
if ! is_localhost "$host"; then
|
|
274
|
+
info "Testing SSH connectivity to ${BOLD}$host${RESET}..."
|
|
275
|
+
local target="$host"
|
|
276
|
+
if [[ -n "$ssh_user" ]]; then
|
|
277
|
+
target="${ssh_user}@${host}"
|
|
278
|
+
fi
|
|
279
|
+
# shellcheck disable=SC2086
|
|
280
|
+
if ! ssh $SSH_OPTS "$target" "echo ok" >/dev/null 2>&1; then
|
|
281
|
+
error "Cannot connect to $target via SSH"
|
|
282
|
+
echo ""
|
|
283
|
+
echo -e " Ensure SSH access is configured:"
|
|
284
|
+
echo -e " ${DIM}ssh-copy-id ${target}${RESET}"
|
|
285
|
+
echo -e " ${DIM}ssh ${target} echo ok${RESET}"
|
|
286
|
+
exit 1
|
|
287
|
+
fi
|
|
288
|
+
success "SSH connection verified"
|
|
289
|
+
fi
|
|
290
|
+
|
|
291
|
+
# Check shipwright is installed at the given path
|
|
292
|
+
info "Checking shipwright installation at ${DIM}${sw_path}${RESET}..."
|
|
293
|
+
local check_cmd="test -f '${sw_path}/scripts/sw' && echo 'found' || echo 'missing'"
|
|
294
|
+
local result
|
|
295
|
+
result=$(run_on_machine "$host" "$ssh_user" "$check_cmd" 2>/dev/null || echo "error")
|
|
296
|
+
|
|
297
|
+
if [[ "$result" == "missing" ]]; then
|
|
298
|
+
warn "Shipwright not found at $sw_path on $host"
|
|
299
|
+
info "Use ${CYAN}shipwright remote deploy $name${RESET} after registering to install"
|
|
300
|
+
elif [[ "$result" == "error" ]]; then
|
|
301
|
+
warn "Could not verify shipwright installation on $host"
|
|
302
|
+
else
|
|
303
|
+
success "Shipwright found at $sw_path"
|
|
304
|
+
fi
|
|
305
|
+
|
|
306
|
+
# Build the new machine entry and add to registry atomically
|
|
307
|
+
local tmp_file="${MACHINES_FILE}.tmp.$$"
|
|
308
|
+
jq --arg name "$name" \
|
|
309
|
+
--arg host "$host" \
|
|
310
|
+
--arg role "$role" \
|
|
311
|
+
--arg ssh_user "$ssh_user" \
|
|
312
|
+
--arg sw_path "$sw_path" \
|
|
313
|
+
--argjson max_workers "$max_workers" \
|
|
314
|
+
--arg ts "$(now_iso)" \
|
|
315
|
+
'.machines += [{
|
|
316
|
+
name: $name,
|
|
317
|
+
host: $host,
|
|
318
|
+
role: $role,
|
|
319
|
+
ssh_user: (if $ssh_user == "" then null else $ssh_user end),
|
|
320
|
+
shipwright_path: $sw_path,
|
|
321
|
+
max_workers: $max_workers,
|
|
322
|
+
registered_at: $ts
|
|
323
|
+
}]' "$MACHINES_FILE" > "$tmp_file" && mv "$tmp_file" "$MACHINES_FILE"
|
|
324
|
+
|
|
325
|
+
emit_event "remote.add" "machine=$name" "host=$host" "role=$role" "max_workers=$max_workers"
|
|
326
|
+
success "Registered machine: ${BOLD}$name${RESET} ($host, $role, ${max_workers} workers)"
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
# ─── Remove Machine ───────────────────────────────────────────────────────
|
|
330
|
+
|
|
331
|
+
remote_remove() {
|
|
332
|
+
local name="${POSITIONAL_ARGS[0]:-}"
|
|
333
|
+
|
|
334
|
+
if [[ -z "$name" ]]; then
|
|
335
|
+
error "Machine name is required"
|
|
336
|
+
echo ""
|
|
337
|
+
echo -e " Usage: ${CYAN}shipwright remote remove <name>${RESET}"
|
|
338
|
+
exit 1
|
|
339
|
+
fi
|
|
340
|
+
|
|
341
|
+
ensure_machines_file
|
|
342
|
+
|
|
343
|
+
if ! machine_exists "$name"; then
|
|
344
|
+
error "Machine '$name' not found in registry"
|
|
345
|
+
exit 1
|
|
346
|
+
fi
|
|
347
|
+
|
|
348
|
+
# Optionally stop remote daemon
|
|
349
|
+
if [[ "$OPT_STOP_DAEMON" == true ]]; then
|
|
350
|
+
info "Stopping daemon on ${BOLD}$name${RESET}..."
|
|
351
|
+
local host ssh_user sw_path
|
|
352
|
+
host=$(jq -r --arg n "$name" '.machines[] | select(.name == $n) | .host' "$MACHINES_FILE")
|
|
353
|
+
ssh_user=$(jq -r --arg n "$name" '.machines[] | select(.name == $n) | .ssh_user // ""' "$MACHINES_FILE")
|
|
354
|
+
sw_path=$(jq -r --arg n "$name" '.machines[] | select(.name == $n) | .shipwright_path' "$MACHINES_FILE")
|
|
355
|
+
|
|
356
|
+
local stop_cmd="cd '${sw_path}' && ./scripts/sw daemon stop 2>/dev/null || true"
|
|
357
|
+
if run_on_machine "$host" "$ssh_user" "$stop_cmd" 2>/dev/null; then
|
|
358
|
+
success "Daemon stopped on $name"
|
|
359
|
+
else
|
|
360
|
+
warn "Could not stop daemon on $name (may not be running)"
|
|
361
|
+
fi
|
|
362
|
+
fi
|
|
363
|
+
|
|
364
|
+
# Remove from registry atomically
|
|
365
|
+
local tmp_file="${MACHINES_FILE}.tmp.$$"
|
|
366
|
+
jq --arg name "$name" '.machines = [.machines[] | select(.name != $name)]' "$MACHINES_FILE" > "$tmp_file" \
|
|
367
|
+
&& mv "$tmp_file" "$MACHINES_FILE"
|
|
368
|
+
|
|
369
|
+
emit_event "remote.remove" "machine=$name"
|
|
370
|
+
success "Removed machine: ${BOLD}$name${RESET}"
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
# ─── List Machines ─────────────────────────────────────────────────────────
|
|
374
|
+
|
|
375
|
+
remote_list() {
|
|
376
|
+
ensure_machines_file
|
|
377
|
+
|
|
378
|
+
local count
|
|
379
|
+
count=$(jq '.machines | length' "$MACHINES_FILE")
|
|
380
|
+
|
|
381
|
+
if [[ "$count" -eq 0 ]]; then
|
|
382
|
+
echo ""
|
|
383
|
+
echo -e " ${DIM}No machines registered.${RESET}"
|
|
384
|
+
echo -e " ${DIM}Register one with: ${CYAN}shipwright remote add <name> --host <host> --path <path>${RESET}"
|
|
385
|
+
echo ""
|
|
386
|
+
return
|
|
387
|
+
fi
|
|
388
|
+
|
|
389
|
+
# JSON output mode
|
|
390
|
+
if [[ "$OPT_JSON" == true ]]; then
|
|
391
|
+
jq '.' "$MACHINES_FILE"
|
|
392
|
+
return
|
|
393
|
+
fi
|
|
394
|
+
|
|
395
|
+
echo ""
|
|
396
|
+
echo -e "${PURPLE}${BOLD}━━━ Registered Machines ━━━${RESET}"
|
|
397
|
+
echo ""
|
|
398
|
+
|
|
399
|
+
# Table header
|
|
400
|
+
printf " ${BOLD}%-16s %-20s %-8s %-8s %-22s${RESET}\n" "NAME" "HOST" "ROLE" "WORKERS" "REGISTERED"
|
|
401
|
+
echo -e " ${DIM}$(printf '─%.0s' {1..76})${RESET}"
|
|
402
|
+
|
|
403
|
+
# Table rows
|
|
404
|
+
local i
|
|
405
|
+
for i in $(seq 0 $((count - 1))); do
|
|
406
|
+
local name host role max_workers registered_at
|
|
407
|
+
name=$(jq -r --argjson i "$i" '.machines[$i].name' "$MACHINES_FILE")
|
|
408
|
+
host=$(jq -r --argjson i "$i" '.machines[$i].host' "$MACHINES_FILE")
|
|
409
|
+
role=$(jq -r --argjson i "$i" '.machines[$i].role' "$MACHINES_FILE")
|
|
410
|
+
max_workers=$(jq -r --argjson i "$i" '.machines[$i].max_workers' "$MACHINES_FILE")
|
|
411
|
+
registered_at=$(jq -r --argjson i "$i" '.machines[$i].registered_at' "$MACHINES_FILE")
|
|
412
|
+
|
|
413
|
+
# Trim timestamp for display
|
|
414
|
+
local display_ts
|
|
415
|
+
display_ts=$(echo "$registered_at" | cut -c1-19 | tr 'T' ' ')
|
|
416
|
+
|
|
417
|
+
# Color role
|
|
418
|
+
local role_display
|
|
419
|
+
if [[ "$role" == "primary" ]]; then
|
|
420
|
+
role_display="${CYAN}${role}${RESET}"
|
|
421
|
+
else
|
|
422
|
+
role_display="${DIM}${role}${RESET}"
|
|
423
|
+
fi
|
|
424
|
+
|
|
425
|
+
printf " %-16s %-20s " "$name" "$host"
|
|
426
|
+
echo -ne "$role_display"
|
|
427
|
+
# Pad after colored role (role is max 7 chars)
|
|
428
|
+
local pad=$((8 - ${#role}))
|
|
429
|
+
printf "%${pad}s" ""
|
|
430
|
+
printf "%-8s %s\n" "$max_workers" "$display_ts"
|
|
431
|
+
done
|
|
432
|
+
|
|
433
|
+
echo ""
|
|
434
|
+
echo -e " ${DIM}${count} machine(s) registered${RESET}"
|
|
435
|
+
echo ""
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
# ─── Status / Health Check ─────────────────────────────────────────────────
|
|
439
|
+
|
|
440
|
+
remote_status() {
|
|
441
|
+
ensure_machines_file
|
|
442
|
+
|
|
443
|
+
local count
|
|
444
|
+
count=$(jq '.machines | length' "$MACHINES_FILE")
|
|
445
|
+
|
|
446
|
+
if [[ "$count" -eq 0 ]]; then
|
|
447
|
+
echo ""
|
|
448
|
+
echo -e " ${DIM}No machines registered.${RESET}"
|
|
449
|
+
echo ""
|
|
450
|
+
return
|
|
451
|
+
fi
|
|
452
|
+
|
|
453
|
+
echo ""
|
|
454
|
+
echo -e "${PURPLE}${BOLD}━━━ Machine Health Status ━━━${RESET}"
|
|
455
|
+
echo -e "${DIM} $(date '+%Y-%m-%d %H:%M:%S')${RESET}"
|
|
456
|
+
echo ""
|
|
457
|
+
|
|
458
|
+
# Table header
|
|
459
|
+
printf " ${BOLD}%-16s %-20s %-10s %-10s %-12s${RESET}\n" "NAME" "HOST" "STATUS" "WORKERS" "HEARTBEATS"
|
|
460
|
+
echo -e " ${DIM}$(printf '─%.0s' {1..70})${RESET}"
|
|
461
|
+
|
|
462
|
+
local online_count=0
|
|
463
|
+
local offline_count=0
|
|
464
|
+
local degraded_count=0
|
|
465
|
+
|
|
466
|
+
local i
|
|
467
|
+
for i in $(seq 0 $((count - 1))); do
|
|
468
|
+
local name host ssh_user sw_path max_workers
|
|
469
|
+
name=$(jq -r --argjson i "$i" '.machines[$i].name' "$MACHINES_FILE")
|
|
470
|
+
host=$(jq -r --argjson i "$i" '.machines[$i].host' "$MACHINES_FILE")
|
|
471
|
+
ssh_user=$(jq -r --argjson i "$i" '.machines[$i].ssh_user // ""' "$MACHINES_FILE")
|
|
472
|
+
sw_path=$(jq -r --argjson i "$i" '.machines[$i].shipwright_path' "$MACHINES_FILE")
|
|
473
|
+
max_workers=$(jq -r --argjson i "$i" '.machines[$i].max_workers' "$MACHINES_FILE")
|
|
474
|
+
|
|
475
|
+
local status_label status_icon active_workers heartbeat_count
|
|
476
|
+
active_workers=0
|
|
477
|
+
heartbeat_count=0
|
|
478
|
+
|
|
479
|
+
# Build health check command
|
|
480
|
+
local health_cmd
|
|
481
|
+
health_cmd=$(cat <<'HEALTHEOF'
|
|
482
|
+
daemon_pid=""
|
|
483
|
+
hb_count=0
|
|
484
|
+
active=0
|
|
485
|
+
# Check for daemon PID
|
|
486
|
+
if [ -f "$HOME/.shipwright/daemon.pid" ]; then
|
|
487
|
+
daemon_pid=$(cat "$HOME/.shipwright/daemon.pid" 2>/dev/null || true)
|
|
488
|
+
if [ -n "$daemon_pid" ] && kill -0 "$daemon_pid" 2>/dev/null; then
|
|
489
|
+
daemon_pid="$daemon_pid"
|
|
490
|
+
else
|
|
491
|
+
daemon_pid=""
|
|
492
|
+
fi
|
|
493
|
+
fi
|
|
494
|
+
# Count heartbeat files
|
|
495
|
+
if [ -d "$HOME/.shipwright/heartbeats" ]; then
|
|
496
|
+
hb_count=$(ls -1 "$HOME/.shipwright/heartbeats/" 2>/dev/null | wc -l | tr -d ' ')
|
|
497
|
+
fi
|
|
498
|
+
# Count active jobs from daemon state
|
|
499
|
+
if [ -f "$HOME/.shipwright/daemon-state.json" ]; then
|
|
500
|
+
active=$(python3 -c "import json; d=json.load(open('$HOME/.shipwright/daemon-state.json')); print(len(d.get('active_jobs',{})))" 2>/dev/null || echo 0)
|
|
501
|
+
fi
|
|
502
|
+
echo "${daemon_pid:-none}|${hb_count}|${active}"
|
|
503
|
+
HEALTHEOF
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
local result
|
|
507
|
+
result=$(run_on_machine "$host" "$ssh_user" "$health_cmd" 2>/dev/null || echo "error|0|0")
|
|
508
|
+
|
|
509
|
+
local daemon_pid hb_val active_val
|
|
510
|
+
daemon_pid=$(echo "$result" | cut -d'|' -f1)
|
|
511
|
+
hb_val=$(echo "$result" | cut -d'|' -f2)
|
|
512
|
+
active_val=$(echo "$result" | cut -d'|' -f3)
|
|
513
|
+
|
|
514
|
+
# Sanitize numeric values
|
|
515
|
+
[[ ! "$hb_val" =~ ^[0-9]+$ ]] && hb_val=0
|
|
516
|
+
[[ ! "$active_val" =~ ^[0-9]+$ ]] && active_val=0
|
|
517
|
+
heartbeat_count="$hb_val"
|
|
518
|
+
active_workers="$active_val"
|
|
519
|
+
|
|
520
|
+
if [[ "$daemon_pid" == "error" ]]; then
|
|
521
|
+
status_label="${RED}offline${RESET}"
|
|
522
|
+
status_icon="${RED}●${RESET}"
|
|
523
|
+
offline_count=$((offline_count + 1))
|
|
524
|
+
elif [[ "$daemon_pid" == "none" ]]; then
|
|
525
|
+
if is_localhost "$host"; then
|
|
526
|
+
status_label="${YELLOW}no-daemon${RESET}"
|
|
527
|
+
status_icon="${YELLOW}●${RESET}"
|
|
528
|
+
degraded_count=$((degraded_count + 1))
|
|
529
|
+
else
|
|
530
|
+
status_label="${RED}offline${RESET}"
|
|
531
|
+
status_icon="${RED}●${RESET}"
|
|
532
|
+
offline_count=$((offline_count + 1))
|
|
533
|
+
fi
|
|
534
|
+
else
|
|
535
|
+
status_label="${GREEN}online${RESET}"
|
|
536
|
+
status_icon="${GREEN}●${RESET}"
|
|
537
|
+
online_count=$((online_count + 1))
|
|
538
|
+
fi
|
|
539
|
+
|
|
540
|
+
local worker_display="${active_val}/${max_workers}"
|
|
541
|
+
|
|
542
|
+
printf " %-16s %-20s " "$name" "$host"
|
|
543
|
+
echo -ne "${status_icon} "
|
|
544
|
+
echo -ne "$status_label"
|
|
545
|
+
# Pad after colored status
|
|
546
|
+
local status_text
|
|
547
|
+
if [[ "$daemon_pid" == "error" ]]; then
|
|
548
|
+
status_text="offline"
|
|
549
|
+
elif [[ "$daemon_pid" == "none" ]]; then
|
|
550
|
+
if is_localhost "$host"; then
|
|
551
|
+
status_text="no-daemon"
|
|
552
|
+
else
|
|
553
|
+
status_text="offline"
|
|
554
|
+
fi
|
|
555
|
+
else
|
|
556
|
+
status_text="online"
|
|
557
|
+
fi
|
|
558
|
+
local spad=$((10 - ${#status_text} - 2))
|
|
559
|
+
[[ "$spad" -lt 0 ]] && spad=0
|
|
560
|
+
printf "%${spad}s" ""
|
|
561
|
+
printf "%-10s %s\n" "$worker_display" "$heartbeat_count"
|
|
562
|
+
done
|
|
563
|
+
|
|
564
|
+
echo ""
|
|
565
|
+
echo -e " ${GREEN}●${RESET} ${online_count} online ${YELLOW}●${RESET} ${degraded_count} degraded ${RED}●${RESET} ${offline_count} offline"
|
|
566
|
+
echo ""
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
# ─── Deploy ────────────────────────────────────────────────────────────────
|
|
570
|
+
|
|
571
|
+
remote_deploy() {
|
|
572
|
+
local name="${POSITIONAL_ARGS[0]:-}"
|
|
573
|
+
|
|
574
|
+
if [[ -z "$name" ]]; then
|
|
575
|
+
error "Machine name is required"
|
|
576
|
+
echo ""
|
|
577
|
+
echo -e " Usage: ${CYAN}shipwright remote deploy <name>${RESET}"
|
|
578
|
+
exit 1
|
|
579
|
+
fi
|
|
580
|
+
|
|
581
|
+
ensure_machines_file
|
|
582
|
+
|
|
583
|
+
if ! machine_exists "$name"; then
|
|
584
|
+
error "Machine '$name' not found in registry"
|
|
585
|
+
exit 1
|
|
586
|
+
fi
|
|
587
|
+
|
|
588
|
+
local host ssh_user sw_path
|
|
589
|
+
host=$(jq -r --arg n "$name" '.machines[] | select(.name == $n) | .host' "$MACHINES_FILE")
|
|
590
|
+
ssh_user=$(jq -r --arg n "$name" '.machines[] | select(.name == $n) | .ssh_user // ""' "$MACHINES_FILE")
|
|
591
|
+
sw_path=$(jq -r --arg n "$name" '.machines[] | select(.name == $n) | .shipwright_path' "$MACHINES_FILE")
|
|
592
|
+
|
|
593
|
+
if is_localhost "$host"; then
|
|
594
|
+
error "Cannot deploy to localhost — shipwright is already local"
|
|
595
|
+
info "Use ${CYAN}shipwright upgrade --apply${RESET} to update the local installation"
|
|
596
|
+
exit 1
|
|
597
|
+
fi
|
|
598
|
+
|
|
599
|
+
local target="$host"
|
|
600
|
+
if [[ -n "$ssh_user" && "$ssh_user" != "null" ]]; then
|
|
601
|
+
target="${ssh_user}@${host}"
|
|
602
|
+
fi
|
|
603
|
+
|
|
604
|
+
info "Deploying shipwright to ${BOLD}$name${RESET} ($host)..."
|
|
605
|
+
echo ""
|
|
606
|
+
|
|
607
|
+
# Step 1: Ensure target directory exists
|
|
608
|
+
info "Creating target directory..."
|
|
609
|
+
# shellcheck disable=SC2086
|
|
610
|
+
ssh $SSH_OPTS "$target" "mkdir -p '${sw_path}'" || {
|
|
611
|
+
error "Failed to create directory $sw_path on $host"
|
|
612
|
+
exit 1
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
# Step 2: rsync scripts and essential files
|
|
616
|
+
info "Syncing scripts..."
|
|
617
|
+
local rsync_src="${REPO_DIR}/"
|
|
618
|
+
local rsync_dst="${target}:${sw_path}/"
|
|
619
|
+
|
|
620
|
+
rsync -avz --delete \
|
|
621
|
+
--include='scripts/***' \
|
|
622
|
+
--include='templates/***' \
|
|
623
|
+
--include='tmux/***' \
|
|
624
|
+
--include='install.sh' \
|
|
625
|
+
--include='package.json' \
|
|
626
|
+
--exclude='*' \
|
|
627
|
+
-e "ssh $SSH_OPTS" \
|
|
628
|
+
"$rsync_src" "$rsync_dst" || {
|
|
629
|
+
error "rsync failed"
|
|
630
|
+
exit 1
|
|
631
|
+
}
|
|
632
|
+
success "Scripts synced"
|
|
633
|
+
|
|
634
|
+
# Step 3: Run install.sh remotely
|
|
635
|
+
info "Running install.sh on remote..."
|
|
636
|
+
# shellcheck disable=SC2086
|
|
637
|
+
ssh $SSH_OPTS "$target" "cd '${sw_path}' && bash install.sh --non-interactive" || {
|
|
638
|
+
warn "install.sh returned non-zero (may need manual intervention)"
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
# Step 4: Verify
|
|
642
|
+
info "Verifying installation..."
|
|
643
|
+
local verify_cmd="test -x '${sw_path}/scripts/sw' && '${sw_path}/scripts/sw' --version 2>/dev/null || echo 'verify-failed'"
|
|
644
|
+
local verify_result
|
|
645
|
+
# shellcheck disable=SC2086
|
|
646
|
+
verify_result=$(ssh $SSH_OPTS "$target" "$verify_cmd" 2>/dev/null || echo "verify-failed")
|
|
647
|
+
|
|
648
|
+
if [[ "$verify_result" == "verify-failed" ]]; then
|
|
649
|
+
warn "Could not verify installation — check manually"
|
|
650
|
+
else
|
|
651
|
+
success "Verified: $verify_result"
|
|
652
|
+
fi
|
|
653
|
+
|
|
654
|
+
emit_event "remote.deploy" "machine=$name" "host=$host"
|
|
655
|
+
echo ""
|
|
656
|
+
success "Deployment complete for ${BOLD}$name${RESET}"
|
|
657
|
+
echo ""
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
# ─── Command Router ─────────────────────────────────────────────────────────
|
|
661
|
+
|
|
662
|
+
case "$SUBCOMMAND" in
|
|
663
|
+
add)
|
|
664
|
+
remote_add
|
|
665
|
+
;;
|
|
666
|
+
remove|rm)
|
|
667
|
+
remote_remove
|
|
668
|
+
;;
|
|
669
|
+
list|ls)
|
|
670
|
+
remote_list
|
|
671
|
+
;;
|
|
672
|
+
status)
|
|
673
|
+
remote_status
|
|
674
|
+
;;
|
|
675
|
+
deploy)
|
|
676
|
+
remote_deploy
|
|
677
|
+
;;
|
|
678
|
+
help|--help|-h)
|
|
679
|
+
show_help
|
|
680
|
+
;;
|
|
681
|
+
*)
|
|
682
|
+
error "Unknown command: ${SUBCOMMAND}"
|
|
683
|
+
echo ""
|
|
684
|
+
show_help
|
|
685
|
+
exit 1
|
|
686
|
+
;;
|
|
687
|
+
esac
|