shipwright-cli 1.10.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +114 -36
- package/completions/_shipwright +212 -32
- package/completions/shipwright.bash +97 -25
- package/docs/strategy/01-market-research.md +619 -0
- package/docs/strategy/02-mission-and-brand.md +587 -0
- package/docs/strategy/03-gtm-and-roadmap.md +759 -0
- package/docs/strategy/QUICK-START.txt +289 -0
- package/docs/strategy/README.md +172 -0
- package/package.json +4 -2
- package/scripts/sw +208 -1
- package/scripts/sw-activity.sh +500 -0
- package/scripts/sw-adaptive.sh +925 -0
- package/scripts/sw-adversarial.sh +1 -1
- package/scripts/sw-architecture-enforcer.sh +1 -1
- package/scripts/sw-auth.sh +613 -0
- package/scripts/sw-autonomous.sh +664 -0
- package/scripts/sw-changelog.sh +704 -0
- package/scripts/sw-checkpoint.sh +1 -1
- package/scripts/sw-ci.sh +602 -0
- package/scripts/sw-cleanup.sh +1 -1
- package/scripts/sw-code-review.sh +637 -0
- package/scripts/sw-connect.sh +1 -1
- package/scripts/sw-context.sh +605 -0
- package/scripts/sw-cost.sh +1 -1
- package/scripts/sw-daemon.sh +432 -130
- package/scripts/sw-dashboard.sh +1 -1
- package/scripts/sw-db.sh +540 -0
- package/scripts/sw-decompose.sh +539 -0
- package/scripts/sw-deps.sh +551 -0
- package/scripts/sw-developer-simulation.sh +1 -1
- package/scripts/sw-discovery.sh +412 -0
- package/scripts/sw-docs-agent.sh +539 -0
- package/scripts/sw-docs.sh +1 -1
- package/scripts/sw-doctor.sh +59 -1
- package/scripts/sw-dora.sh +615 -0
- package/scripts/sw-durable.sh +710 -0
- package/scripts/sw-e2e-orchestrator.sh +535 -0
- package/scripts/sw-eventbus.sh +393 -0
- package/scripts/sw-feedback.sh +471 -0
- package/scripts/sw-fix.sh +1 -1
- package/scripts/sw-fleet-discover.sh +567 -0
- package/scripts/sw-fleet-viz.sh +404 -0
- package/scripts/sw-fleet.sh +8 -1
- package/scripts/sw-github-app.sh +596 -0
- package/scripts/sw-github-checks.sh +1 -1
- package/scripts/sw-github-deploy.sh +1 -1
- package/scripts/sw-github-graphql.sh +1 -1
- package/scripts/sw-guild.sh +569 -0
- package/scripts/sw-heartbeat.sh +1 -1
- package/scripts/sw-hygiene.sh +559 -0
- package/scripts/sw-incident.sh +617 -0
- package/scripts/sw-init.sh +88 -1
- package/scripts/sw-instrument.sh +699 -0
- package/scripts/sw-intelligence.sh +1 -1
- package/scripts/sw-jira.sh +1 -1
- package/scripts/sw-launchd.sh +363 -28
- package/scripts/sw-linear.sh +1 -1
- package/scripts/sw-logs.sh +1 -1
- package/scripts/sw-loop.sh +64 -3
- package/scripts/sw-memory.sh +1 -1
- package/scripts/sw-mission-control.sh +487 -0
- package/scripts/sw-model-router.sh +545 -0
- package/scripts/sw-otel.sh +596 -0
- package/scripts/sw-oversight.sh +689 -0
- package/scripts/sw-pipeline-composer.sh +1 -1
- package/scripts/sw-pipeline-vitals.sh +1 -1
- package/scripts/sw-pipeline.sh +687 -24
- package/scripts/sw-pm.sh +693 -0
- package/scripts/sw-pr-lifecycle.sh +522 -0
- package/scripts/sw-predictive.sh +1 -1
- package/scripts/sw-prep.sh +1 -1
- package/scripts/sw-ps.sh +1 -1
- package/scripts/sw-public-dashboard.sh +798 -0
- package/scripts/sw-quality.sh +595 -0
- package/scripts/sw-reaper.sh +1 -1
- package/scripts/sw-recruit.sh +573 -0
- package/scripts/sw-regression.sh +642 -0
- package/scripts/sw-release-manager.sh +736 -0
- package/scripts/sw-release.sh +706 -0
- package/scripts/sw-remote.sh +1 -1
- package/scripts/sw-replay.sh +520 -0
- package/scripts/sw-retro.sh +691 -0
- package/scripts/sw-scale.sh +444 -0
- package/scripts/sw-security-audit.sh +505 -0
- package/scripts/sw-self-optimize.sh +1 -1
- package/scripts/sw-session.sh +1 -1
- package/scripts/sw-setup.sh +1 -1
- package/scripts/sw-standup.sh +712 -0
- package/scripts/sw-status.sh +1 -1
- package/scripts/sw-strategic.sh +658 -0
- package/scripts/sw-stream.sh +450 -0
- package/scripts/sw-swarm.sh +583 -0
- package/scripts/sw-team-stages.sh +511 -0
- package/scripts/sw-templates.sh +1 -1
- package/scripts/sw-testgen.sh +515 -0
- package/scripts/sw-tmux-pipeline.sh +554 -0
- package/scripts/sw-tmux.sh +1 -1
- package/scripts/sw-trace.sh +485 -0
- package/scripts/sw-tracker-github.sh +188 -0
- package/scripts/sw-tracker-jira.sh +172 -0
- package/scripts/sw-tracker-linear.sh +251 -0
- package/scripts/sw-tracker.sh +117 -2
- package/scripts/sw-triage.sh +603 -0
- package/scripts/sw-upgrade.sh +1 -1
- package/scripts/sw-ux.sh +677 -0
- package/scripts/sw-webhook.sh +627 -0
- package/scripts/sw-widgets.sh +530 -0
- package/scripts/sw-worktree.sh +1 -1
package/scripts/sw-dashboard.sh
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
set -euo pipefail
|
|
7
7
|
trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
|
|
8
8
|
|
|
9
|
-
VERSION="
|
|
9
|
+
VERSION="2.0.0"
|
|
10
10
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
11
11
|
|
|
12
12
|
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
package/scripts/sw-db.sh
ADDED
|
@@ -0,0 +1,540 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright db — SQLite Persistence Layer ║
|
|
4
|
+
# ║ Store events, runs, developers, sessions, and metrics in SQLite ║
|
|
5
|
+
# ║ Backward compatible: reads JSON if SQLite unavailable ║
|
|
6
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
7
|
+
set -euo pipefail
|
|
8
|
+
trap 'echo "ERROR: $BASH_SOURCE:$LINENO exited with status $?" >&2' ERR
|
|
9
|
+
|
|
10
|
+
VERSION="2.0.0"
|
|
11
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
12
|
+
REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
13
|
+
|
|
14
|
+
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
|
15
|
+
CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
|
|
16
|
+
PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
|
|
17
|
+
BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
|
|
18
|
+
GREEN='\033[38;2;74;222;128m' # success
|
|
19
|
+
YELLOW='\033[38;2;250;204;21m' # warning
|
|
20
|
+
RED='\033[38;2;248;113;113m' # error
|
|
21
|
+
DIM='\033[2m'
|
|
22
|
+
BOLD='\033[1m'
|
|
23
|
+
RESET='\033[0m'
|
|
24
|
+
|
|
25
|
+
# ─── Cross-platform compatibility ──────────────────────────────────────────
|
|
26
|
+
# shellcheck source=lib/compat.sh
|
|
27
|
+
[[ -f "$SCRIPT_DIR/lib/compat.sh" ]] && source "$SCRIPT_DIR/lib/compat.sh"
|
|
28
|
+
|
|
29
|
+
# ─── Output Helpers ─────────────────────────────────────────────────────────
|
|
30
|
+
info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
|
|
31
|
+
success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
|
|
32
|
+
warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*"; }
|
|
33
|
+
error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
|
|
34
|
+
|
|
35
|
+
now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
36
|
+
now_epoch() { date +%s; }
|
|
37
|
+
|
|
38
|
+
# ─── Database Configuration ──────────────────────────────────────────────────
|
|
39
|
+
DB_DIR="${HOME}/.shipwright"
|
|
40
|
+
DB_FILE="${DB_DIR}/shipwright.db"
|
|
41
|
+
SCHEMA_VERSION=1
|
|
42
|
+
|
|
43
|
+
# JSON fallback paths
|
|
44
|
+
EVENTS_FILE="${DB_DIR}/events.jsonl"
|
|
45
|
+
DAEMON_STATE_FILE="${DB_DIR}/daemon-state.json"
|
|
46
|
+
DEVELOPER_REGISTRY_FILE="${DB_DIR}/developer-registry.json"
|
|
47
|
+
|
|
48
|
+
# ─── Check Prerequisites ─────────────────────────────────────────────────────
|
|
49
|
+
check_sqlite3() {
|
|
50
|
+
if ! command -v sqlite3 &>/dev/null; then
|
|
51
|
+
warn "sqlite3 not found. Install with: brew install sqlite (macOS) or apt install sqlite3 (Ubuntu)"
|
|
52
|
+
return 1
|
|
53
|
+
fi
|
|
54
|
+
return 0
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
# ─── Ensure Database Directory ──────────────────────────────────────────────
|
|
58
|
+
ensure_db_dir() {
|
|
59
|
+
mkdir -p "$DB_DIR"
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
# ─── Initialize Database Schema ──────────────────────────────────────────────
|
|
63
|
+
init_schema() {
|
|
64
|
+
ensure_db_dir
|
|
65
|
+
|
|
66
|
+
if ! check_sqlite3; then
|
|
67
|
+
warn "Skipping SQLite initialization — sqlite3 not available"
|
|
68
|
+
return 0
|
|
69
|
+
fi
|
|
70
|
+
|
|
71
|
+
sqlite3 "$DB_FILE" <<'EOF'
|
|
72
|
+
-- Schema version tracking
|
|
73
|
+
CREATE TABLE IF NOT EXISTS _schema (
|
|
74
|
+
version INTEGER PRIMARY KEY,
|
|
75
|
+
created_at TEXT NOT NULL,
|
|
76
|
+
applied_at TEXT NOT NULL
|
|
77
|
+
);
|
|
78
|
+
|
|
79
|
+
-- Events log (replaces events.jsonl)
|
|
80
|
+
CREATE TABLE IF NOT EXISTS events (
|
|
81
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
82
|
+
ts TEXT NOT NULL,
|
|
83
|
+
ts_epoch INTEGER NOT NULL,
|
|
84
|
+
type TEXT NOT NULL,
|
|
85
|
+
job_id TEXT,
|
|
86
|
+
stage TEXT,
|
|
87
|
+
status TEXT,
|
|
88
|
+
repo TEXT,
|
|
89
|
+
branch TEXT,
|
|
90
|
+
error TEXT,
|
|
91
|
+
duration_secs INTEGER,
|
|
92
|
+
metadata TEXT,
|
|
93
|
+
created_at TEXT NOT NULL,
|
|
94
|
+
UNIQUE(ts_epoch, type, job_id)
|
|
95
|
+
);
|
|
96
|
+
|
|
97
|
+
-- Pipeline runs tracking
|
|
98
|
+
CREATE TABLE IF NOT EXISTS pipeline_runs (
|
|
99
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
100
|
+
job_id TEXT UNIQUE NOT NULL,
|
|
101
|
+
issue_number INTEGER,
|
|
102
|
+
goal TEXT,
|
|
103
|
+
branch TEXT,
|
|
104
|
+
status TEXT NOT NULL,
|
|
105
|
+
template TEXT,
|
|
106
|
+
started_at TEXT NOT NULL,
|
|
107
|
+
completed_at TEXT,
|
|
108
|
+
duration_secs INTEGER,
|
|
109
|
+
stage_name TEXT,
|
|
110
|
+
stage_status TEXT,
|
|
111
|
+
error_message TEXT,
|
|
112
|
+
commit_hash TEXT,
|
|
113
|
+
pr_number INTEGER,
|
|
114
|
+
metadata TEXT,
|
|
115
|
+
created_at TEXT NOT NULL
|
|
116
|
+
);
|
|
117
|
+
|
|
118
|
+
-- Stage history per pipeline run
|
|
119
|
+
CREATE TABLE IF NOT EXISTS pipeline_stages (
|
|
120
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
121
|
+
job_id TEXT NOT NULL,
|
|
122
|
+
stage_name TEXT NOT NULL,
|
|
123
|
+
status TEXT NOT NULL,
|
|
124
|
+
started_at TEXT,
|
|
125
|
+
completed_at TEXT,
|
|
126
|
+
duration_secs INTEGER,
|
|
127
|
+
error_message TEXT,
|
|
128
|
+
metadata TEXT,
|
|
129
|
+
created_at TEXT NOT NULL,
|
|
130
|
+
FOREIGN KEY (job_id) REFERENCES pipeline_runs(job_id)
|
|
131
|
+
);
|
|
132
|
+
|
|
133
|
+
-- Developer registry
|
|
134
|
+
CREATE TABLE IF NOT EXISTS developers (
|
|
135
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
136
|
+
name TEXT UNIQUE NOT NULL,
|
|
137
|
+
github_login TEXT,
|
|
138
|
+
email TEXT,
|
|
139
|
+
role TEXT,
|
|
140
|
+
avatar_url TEXT,
|
|
141
|
+
bio TEXT,
|
|
142
|
+
expertise TEXT,
|
|
143
|
+
contributed_repos TEXT,
|
|
144
|
+
last_active_at TEXT,
|
|
145
|
+
created_at TEXT NOT NULL,
|
|
146
|
+
updated_at TEXT NOT NULL
|
|
147
|
+
);
|
|
148
|
+
|
|
149
|
+
-- Sessions tracking (teams/agents)
|
|
150
|
+
CREATE TABLE IF NOT EXISTS sessions (
|
|
151
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
152
|
+
session_id TEXT UNIQUE NOT NULL,
|
|
153
|
+
name TEXT NOT NULL,
|
|
154
|
+
template TEXT,
|
|
155
|
+
status TEXT NOT NULL,
|
|
156
|
+
team_members TEXT,
|
|
157
|
+
started_at TEXT NOT NULL,
|
|
158
|
+
completed_at TEXT,
|
|
159
|
+
duration_secs INTEGER,
|
|
160
|
+
goal TEXT,
|
|
161
|
+
metadata TEXT,
|
|
162
|
+
created_at TEXT NOT NULL
|
|
163
|
+
);
|
|
164
|
+
|
|
165
|
+
-- Metrics (DORA, cost, performance)
|
|
166
|
+
CREATE TABLE IF NOT EXISTS metrics (
|
|
167
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
168
|
+
job_id TEXT,
|
|
169
|
+
metric_type TEXT NOT NULL,
|
|
170
|
+
metric_name TEXT NOT NULL,
|
|
171
|
+
value REAL NOT NULL,
|
|
172
|
+
period TEXT,
|
|
173
|
+
unit TEXT,
|
|
174
|
+
tags TEXT,
|
|
175
|
+
created_at TEXT NOT NULL,
|
|
176
|
+
FOREIGN KEY (job_id) REFERENCES pipeline_runs(job_id)
|
|
177
|
+
);
|
|
178
|
+
|
|
179
|
+
-- Create indexes for common queries
|
|
180
|
+
CREATE INDEX IF NOT EXISTS idx_events_type ON events(type);
|
|
181
|
+
CREATE INDEX IF NOT EXISTS idx_events_job_id ON events(job_id);
|
|
182
|
+
CREATE INDEX IF NOT EXISTS idx_events_ts_epoch ON events(ts_epoch DESC);
|
|
183
|
+
CREATE INDEX IF NOT EXISTS idx_pipeline_runs_job_id ON pipeline_runs(job_id);
|
|
184
|
+
CREATE INDEX IF NOT EXISTS idx_pipeline_runs_status ON pipeline_runs(status);
|
|
185
|
+
CREATE INDEX IF NOT EXISTS idx_pipeline_runs_created ON pipeline_runs(created_at DESC);
|
|
186
|
+
CREATE INDEX IF NOT EXISTS idx_pipeline_stages_job_id ON pipeline_stages(job_id);
|
|
187
|
+
CREATE INDEX IF NOT EXISTS idx_developers_name ON developers(name);
|
|
188
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_status ON sessions(status);
|
|
189
|
+
CREATE INDEX IF NOT EXISTS idx_metrics_job_id ON metrics(job_id);
|
|
190
|
+
CREATE INDEX IF NOT EXISTS idx_metrics_type ON metrics(metric_type);
|
|
191
|
+
EOF
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
# ─── Migrate Database ────────────────────────────────────────────────────────
|
|
195
|
+
migrate_schema() {
|
|
196
|
+
if ! check_sqlite3; then
|
|
197
|
+
warn "Skipping migration — sqlite3 not available"
|
|
198
|
+
return 0
|
|
199
|
+
fi
|
|
200
|
+
|
|
201
|
+
local current_version
|
|
202
|
+
current_version=$(sqlite3 "$DB_FILE" "SELECT COALESCE(MAX(version), 0) FROM _schema;" 2>/dev/null || echo 0)
|
|
203
|
+
|
|
204
|
+
if [[ "$current_version" -eq 0 ]]; then
|
|
205
|
+
# First run: initialize schema version
|
|
206
|
+
init_schema
|
|
207
|
+
sqlite3 "$DB_FILE" "INSERT INTO _schema (version, created_at, applied_at) VALUES (${SCHEMA_VERSION}, '$(now_iso)', '$(now_iso)');"
|
|
208
|
+
success "Database schema initialized (v${SCHEMA_VERSION})"
|
|
209
|
+
else
|
|
210
|
+
info "Database already at schema v${current_version}"
|
|
211
|
+
fi
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
# ─── Add Event (SQLite + JSONL for backward compat) ──────────────────────────
|
|
215
|
+
add_event() {
|
|
216
|
+
local event_type="$1"
|
|
217
|
+
local job_id="${2:-}"
|
|
218
|
+
local stage="${3:-}"
|
|
219
|
+
local status="${4:-}"
|
|
220
|
+
local duration_secs="${5:-0}"
|
|
221
|
+
local metadata="${6:-}"
|
|
222
|
+
|
|
223
|
+
local ts
|
|
224
|
+
ts="$(now_iso)"
|
|
225
|
+
local ts_epoch
|
|
226
|
+
ts_epoch="$(now_epoch)"
|
|
227
|
+
|
|
228
|
+
# Try SQLite first, fallback to JSONL
|
|
229
|
+
if check_sqlite3; then
|
|
230
|
+
sqlite3 "$DB_FILE" <<EOF || true
|
|
231
|
+
INSERT OR IGNORE INTO events
|
|
232
|
+
(ts, ts_epoch, type, job_id, stage, status, duration_secs, metadata, created_at)
|
|
233
|
+
VALUES
|
|
234
|
+
('${ts}', ${ts_epoch}, '${event_type}', '${job_id}', '${stage}', '${status}', ${duration_secs}, '${metadata}', '${ts}');
|
|
235
|
+
EOF
|
|
236
|
+
fi
|
|
237
|
+
|
|
238
|
+
# Always write to JSONL for backward compat
|
|
239
|
+
mkdir -p "$DB_DIR"
|
|
240
|
+
local json_record
|
|
241
|
+
json_record="{\"ts\":\"${ts}\",\"ts_epoch\":${ts_epoch},\"type\":\"${event_type}\""
|
|
242
|
+
[[ -n "$job_id" ]] && json_record="${json_record},\"job_id\":\"${job_id}\""
|
|
243
|
+
[[ -n "$stage" ]] && json_record="${json_record},\"stage\":\"${stage}\""
|
|
244
|
+
[[ -n "$status" ]] && json_record="${json_record},\"status\":\"${status}\""
|
|
245
|
+
[[ "$duration_secs" -gt 0 ]] && json_record="${json_record},\"duration_secs\":${duration_secs}"
|
|
246
|
+
[[ -n "$metadata" ]] && json_record="${json_record},\"metadata\":${metadata}"
|
|
247
|
+
json_record="${json_record}}"
|
|
248
|
+
echo "$json_record" >> "$EVENTS_FILE"
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
# ─── Add Pipeline Run ────────────────────────────────────────────────────────
|
|
252
|
+
add_pipeline_run() {
|
|
253
|
+
local job_id="$1"
|
|
254
|
+
local issue_number="${2:-0}"
|
|
255
|
+
local goal="${3:-}"
|
|
256
|
+
local branch="${4:-}"
|
|
257
|
+
local template="${5:-standard}"
|
|
258
|
+
|
|
259
|
+
if ! check_sqlite3; then
|
|
260
|
+
warn "Skipping pipeline run insert — sqlite3 not available"
|
|
261
|
+
return 1
|
|
262
|
+
fi
|
|
263
|
+
|
|
264
|
+
local ts
|
|
265
|
+
ts="$(now_iso)"
|
|
266
|
+
|
|
267
|
+
sqlite3 "$DB_FILE" <<EOF || return 1
|
|
268
|
+
INSERT INTO pipeline_runs
|
|
269
|
+
(job_id, issue_number, goal, branch, status, template, started_at, created_at)
|
|
270
|
+
VALUES
|
|
271
|
+
('${job_id}', ${issue_number}, '${goal}', '${branch}', 'pending', '${template}', '${ts}', '${ts}');
|
|
272
|
+
EOF
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
# ─── Update Pipeline Run Status ──────────────────────────────────────────────
|
|
276
|
+
update_pipeline_status() {
|
|
277
|
+
local job_id="$1"
|
|
278
|
+
local status="$2"
|
|
279
|
+
local stage_name="${3:-}"
|
|
280
|
+
local stage_status="${4:-}"
|
|
281
|
+
local duration_secs="${5:-0}"
|
|
282
|
+
|
|
283
|
+
if ! check_sqlite3; then
|
|
284
|
+
return 1
|
|
285
|
+
fi
|
|
286
|
+
|
|
287
|
+
local ts
|
|
288
|
+
ts="$(now_iso)"
|
|
289
|
+
|
|
290
|
+
sqlite3 "$DB_FILE" <<EOF || return 1
|
|
291
|
+
UPDATE pipeline_runs
|
|
292
|
+
SET
|
|
293
|
+
status = '${status}',
|
|
294
|
+
stage_name = '${stage_name}',
|
|
295
|
+
stage_status = '${stage_status}',
|
|
296
|
+
duration_secs = ${duration_secs},
|
|
297
|
+
completed_at = CASE WHEN '${status}' = 'completed' OR '${status}' = 'failed' THEN '${ts}' ELSE completed_at END
|
|
298
|
+
WHERE job_id = '${job_id}';
|
|
299
|
+
EOF
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
# ─── Record Pipeline Stage ──────────────────────────────────────────────────
|
|
303
|
+
record_stage() {
|
|
304
|
+
local job_id="$1"
|
|
305
|
+
local stage_name="$2"
|
|
306
|
+
local status="$3"
|
|
307
|
+
local duration_secs="${4:-0}"
|
|
308
|
+
local error_msg="${5:-}"
|
|
309
|
+
|
|
310
|
+
if ! check_sqlite3; then
|
|
311
|
+
return 1
|
|
312
|
+
fi
|
|
313
|
+
|
|
314
|
+
local ts
|
|
315
|
+
ts="$(now_iso)"
|
|
316
|
+
|
|
317
|
+
sqlite3 "$DB_FILE" <<EOF || return 1
|
|
318
|
+
INSERT INTO pipeline_stages
|
|
319
|
+
(job_id, stage_name, status, started_at, completed_at, duration_secs, error_message, created_at)
|
|
320
|
+
VALUES
|
|
321
|
+
('${job_id}', '${stage_name}', '${status}', '${ts}', '${ts}', ${duration_secs}, '${error_msg}', '${ts}');
|
|
322
|
+
EOF
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
# ─── Query Pipeline Runs ─────────────────────────────────────────────────────
|
|
326
|
+
query_runs() {
|
|
327
|
+
local status="${1:-}"
|
|
328
|
+
local limit="${2:-50}"
|
|
329
|
+
|
|
330
|
+
if ! check_sqlite3; then
|
|
331
|
+
warn "Cannot query — sqlite3 not available"
|
|
332
|
+
return 1
|
|
333
|
+
fi
|
|
334
|
+
|
|
335
|
+
local query="SELECT job_id, goal, status, template, started_at, duration_secs FROM pipeline_runs"
|
|
336
|
+
[[ -n "$status" ]] && query="${query} WHERE status = '${status}'"
|
|
337
|
+
query="${query} ORDER BY created_at DESC LIMIT ${limit};"
|
|
338
|
+
|
|
339
|
+
sqlite3 -header -column "$DB_FILE" "$query"
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
# ─── Export Database to JSON ─────────────────────────────────────────────────
|
|
343
|
+
export_db() {
|
|
344
|
+
local output_file="${1:-${DB_DIR}/shipwright-backup.json}"
|
|
345
|
+
|
|
346
|
+
if ! check_sqlite3; then
|
|
347
|
+
warn "Cannot export — sqlite3 not available"
|
|
348
|
+
return 1
|
|
349
|
+
fi
|
|
350
|
+
|
|
351
|
+
info "Exporting database to ${output_file}..."
|
|
352
|
+
|
|
353
|
+
local tmp_file
|
|
354
|
+
tmp_file=$(mktemp)
|
|
355
|
+
|
|
356
|
+
{
|
|
357
|
+
echo "{"
|
|
358
|
+
echo " \"exported_at\": \"$(now_iso)\","
|
|
359
|
+
echo " \"events\": ["
|
|
360
|
+
|
|
361
|
+
sqlite3 -json "$DB_FILE" "SELECT * FROM events ORDER BY ts_epoch DESC LIMIT 1000;" | sed '1s/\[//' | sed '$s/\]//' >> "$tmp_file"
|
|
362
|
+
|
|
363
|
+
echo " ],"
|
|
364
|
+
echo " \"pipeline_runs\": ["
|
|
365
|
+
|
|
366
|
+
sqlite3 -json "$DB_FILE" "SELECT * FROM pipeline_runs ORDER BY created_at DESC LIMIT 500;" | sed '1s/\[//' | sed '$s/\]//' >> "$tmp_file"
|
|
367
|
+
|
|
368
|
+
echo " ],"
|
|
369
|
+
echo " \"developers\": ["
|
|
370
|
+
|
|
371
|
+
sqlite3 -json "$DB_FILE" "SELECT * FROM developers;" | sed '1s/\[//' | sed '$s/\]//' >> "$tmp_file"
|
|
372
|
+
|
|
373
|
+
echo " ]"
|
|
374
|
+
echo "}"
|
|
375
|
+
} > "$output_file"
|
|
376
|
+
|
|
377
|
+
success "Database exported to ${output_file}"
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
# ─── Import Data from JSON ──────────────────────────────────────────────────
|
|
381
|
+
import_db() {
|
|
382
|
+
local input_file="$1"
|
|
383
|
+
|
|
384
|
+
if [[ ! -f "$input_file" ]]; then
|
|
385
|
+
error "File not found: ${input_file}"
|
|
386
|
+
return 1
|
|
387
|
+
fi
|
|
388
|
+
|
|
389
|
+
if ! check_sqlite3; then
|
|
390
|
+
warn "Cannot import — sqlite3 not available"
|
|
391
|
+
return 1
|
|
392
|
+
fi
|
|
393
|
+
|
|
394
|
+
info "Importing data from ${input_file}..."
|
|
395
|
+
|
|
396
|
+
# This is a simplified import; a full implementation would parse JSON and insert each record
|
|
397
|
+
warn "Full JSON import not yet implemented — copy database file manually or use CLI commands to rebuild"
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
# ─── Show Database Status ────────────────────────────────────────────────────
|
|
401
|
+
show_status() {
|
|
402
|
+
if ! check_sqlite3; then
|
|
403
|
+
warn "sqlite3 not available"
|
|
404
|
+
echo ""
|
|
405
|
+
echo "Fallback: Reading from JSON files..."
|
|
406
|
+
[[ -f "$EVENTS_FILE" ]] && echo " Events: $(wc -l < "$EVENTS_FILE") records"
|
|
407
|
+
[[ -f "$DAEMON_STATE_FILE" ]] && echo " Pipeline state: $(jq '.active_jobs | length' "$DAEMON_STATE_FILE" 2>/dev/null || echo '?')"
|
|
408
|
+
return 0
|
|
409
|
+
fi
|
|
410
|
+
|
|
411
|
+
if [[ ! -f "$DB_FILE" ]]; then
|
|
412
|
+
warn "Database not initialized. Run: shipwright db init"
|
|
413
|
+
return 1
|
|
414
|
+
fi
|
|
415
|
+
|
|
416
|
+
echo ""
|
|
417
|
+
echo -e "${BOLD}SQLite Database Status${RESET}"
|
|
418
|
+
echo -e "${DIM}Database: ${DB_FILE}${RESET}"
|
|
419
|
+
echo ""
|
|
420
|
+
|
|
421
|
+
local event_count pipeline_count stage_count developer_count session_count metric_count
|
|
422
|
+
event_count=$(sqlite3 "$DB_FILE" "SELECT COUNT(*) FROM events;" 2>/dev/null || echo "0")
|
|
423
|
+
pipeline_count=$(sqlite3 "$DB_FILE" "SELECT COUNT(*) FROM pipeline_runs;" 2>/dev/null || echo "0")
|
|
424
|
+
stage_count=$(sqlite3 "$DB_FILE" "SELECT COUNT(*) FROM pipeline_stages;" 2>/dev/null || echo "0")
|
|
425
|
+
developer_count=$(sqlite3 "$DB_FILE" "SELECT COUNT(*) FROM developers;" 2>/dev/null || echo "0")
|
|
426
|
+
session_count=$(sqlite3 "$DB_FILE" "SELECT COUNT(*) FROM sessions;" 2>/dev/null || echo "0")
|
|
427
|
+
metric_count=$(sqlite3 "$DB_FILE" "SELECT COUNT(*) FROM metrics;" 2>/dev/null || echo "0")
|
|
428
|
+
|
|
429
|
+
echo -e "${CYAN}Events${RESET} ${event_count} records"
|
|
430
|
+
echo -e "${CYAN}Pipeline Runs${RESET} ${pipeline_count} records"
|
|
431
|
+
echo -e "${CYAN}Pipeline Stages${RESET} ${stage_count} records"
|
|
432
|
+
echo -e "${CYAN}Developers${RESET} ${developer_count} records"
|
|
433
|
+
echo -e "${CYAN}Sessions${RESET} ${session_count} records"
|
|
434
|
+
echo -e "${CYAN}Metrics${RESET} ${metric_count} records"
|
|
435
|
+
|
|
436
|
+
echo ""
|
|
437
|
+
echo -e "${BOLD}Recent Runs${RESET}"
|
|
438
|
+
sqlite3 -header -column "$DB_FILE" "SELECT job_id, goal, status, template, datetime(started_at) as started FROM pipeline_runs ORDER BY created_at DESC LIMIT 5;"
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
# ─── Clean Old Records ──────────────────────────────────────────────────────
|
|
442
|
+
cleanup_old_data() {
|
|
443
|
+
local days="${1:-30}"
|
|
444
|
+
|
|
445
|
+
if ! check_sqlite3; then
|
|
446
|
+
warn "Cannot cleanup — sqlite3 not available"
|
|
447
|
+
return 1
|
|
448
|
+
fi
|
|
449
|
+
|
|
450
|
+
local cutoff_date
|
|
451
|
+
cutoff_date=$(date -u -d "-${days} days" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
452
|
+
date -u -v-${days}d +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
453
|
+
date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
454
|
+
|
|
455
|
+
info "Cleaning records older than ${days} days (before ${cutoff_date})..."
|
|
456
|
+
|
|
457
|
+
local deleted_events deleted_runs
|
|
458
|
+
deleted_events=$(sqlite3 "$DB_FILE" "DELETE FROM events WHERE ts < '${cutoff_date}' RETURNING COUNT(*);" 2>/dev/null || echo "0")
|
|
459
|
+
deleted_runs=$(sqlite3 "$DB_FILE" "DELETE FROM pipeline_runs WHERE created_at < '${cutoff_date}' RETURNING COUNT(*);" 2>/dev/null || echo "0")
|
|
460
|
+
|
|
461
|
+
success "Deleted ${deleted_events} old events and ${deleted_runs} old pipeline runs"
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
# ─── Show Help ──────────────────────────────────────────────────────────────
|
|
465
|
+
show_help() {
|
|
466
|
+
echo -e "${CYAN}${BOLD}shipwright db${RESET} — SQLite Persistence Layer"
|
|
467
|
+
echo ""
|
|
468
|
+
echo -e "${BOLD}USAGE${RESET}"
|
|
469
|
+
echo -e " shipwright db <command> [options]"
|
|
470
|
+
echo ""
|
|
471
|
+
echo -e "${BOLD}COMMANDS${RESET}"
|
|
472
|
+
echo -e " ${CYAN}init${RESET} Initialize database schema"
|
|
473
|
+
echo -e " ${CYAN}migrate${RESET} Apply schema migrations"
|
|
474
|
+
echo -e " ${CYAN}status${RESET} Show database stats and recent runs"
|
|
475
|
+
echo -e " ${CYAN}query${RESET} [status] Query pipeline runs by status"
|
|
476
|
+
echo -e " ${CYAN}export${RESET} [file] Export database to JSON backup"
|
|
477
|
+
echo -e " ${CYAN}import${RESET} <file> Import data from JSON backup"
|
|
478
|
+
echo -e " ${CYAN}cleanup${RESET} [days] Delete records older than N days (default 30)"
|
|
479
|
+
echo -e " ${CYAN}help${RESET} Show this help"
|
|
480
|
+
echo ""
|
|
481
|
+
echo -e "${DIM}Examples:${RESET}"
|
|
482
|
+
echo -e " shipwright db init"
|
|
483
|
+
echo -e " shipwright db status"
|
|
484
|
+
echo -e " shipwright db query failed"
|
|
485
|
+
echo -e " shipwright db export ~/backups/db-backup.json"
|
|
486
|
+
echo -e " shipwright db cleanup 60"
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
# ─── Main Router ────────────────────────────────────────────────────────────
|
|
490
|
+
main() {
|
|
491
|
+
local cmd="${1:-help}"
|
|
492
|
+
shift 2>/dev/null || true
|
|
493
|
+
|
|
494
|
+
case "$cmd" in
|
|
495
|
+
init)
|
|
496
|
+
ensure_db_dir
|
|
497
|
+
init_schema
|
|
498
|
+
success "Database initialized at ${DB_FILE}"
|
|
499
|
+
;;
|
|
500
|
+
migrate)
|
|
501
|
+
migrate_schema
|
|
502
|
+
;;
|
|
503
|
+
status)
|
|
504
|
+
show_status
|
|
505
|
+
;;
|
|
506
|
+
query)
|
|
507
|
+
local status="${1:-}"
|
|
508
|
+
query_runs "$status"
|
|
509
|
+
;;
|
|
510
|
+
export)
|
|
511
|
+
local file="${1:-${DB_DIR}/shipwright-backup.json}"
|
|
512
|
+
export_db "$file"
|
|
513
|
+
;;
|
|
514
|
+
import)
|
|
515
|
+
local file="${1:-}"
|
|
516
|
+
if [[ -z "$file" ]]; then
|
|
517
|
+
error "Please provide a file to import"
|
|
518
|
+
exit 1
|
|
519
|
+
fi
|
|
520
|
+
import_db "$file"
|
|
521
|
+
;;
|
|
522
|
+
cleanup)
|
|
523
|
+
local days="${1:-30}"
|
|
524
|
+
cleanup_old_data "$days"
|
|
525
|
+
;;
|
|
526
|
+
help|--help|-h)
|
|
527
|
+
show_help
|
|
528
|
+
;;
|
|
529
|
+
*)
|
|
530
|
+
error "Unknown command: ${cmd}"
|
|
531
|
+
echo ""
|
|
532
|
+
show_help
|
|
533
|
+
exit 1
|
|
534
|
+
;;
|
|
535
|
+
esac
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
|
|
539
|
+
main "$@"
|
|
540
|
+
fi
|