shipwright-cli 2.3.1 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +95 -28
- package/completions/_shipwright +1 -1
- package/completions/shipwright.bash +3 -8
- package/completions/shipwright.fish +1 -1
- package/config/defaults.json +111 -0
- package/config/event-schema.json +81 -0
- package/config/policy.json +155 -2
- package/config/policy.schema.json +162 -1
- package/dashboard/coverage/coverage-summary.json +14 -0
- package/dashboard/public/index.html +1 -1
- package/dashboard/server.ts +306 -17
- package/dashboard/src/components/charts/bar.test.ts +79 -0
- package/dashboard/src/components/charts/donut.test.ts +68 -0
- package/dashboard/src/components/charts/pipeline-rail.test.ts +117 -0
- package/dashboard/src/components/charts/sparkline.test.ts +125 -0
- package/dashboard/src/core/api.test.ts +309 -0
- package/dashboard/src/core/helpers.test.ts +301 -0
- package/dashboard/src/core/router.test.ts +307 -0
- package/dashboard/src/core/router.ts +7 -0
- package/dashboard/src/core/sse.test.ts +144 -0
- package/dashboard/src/views/metrics.test.ts +186 -0
- package/dashboard/src/views/overview.test.ts +173 -0
- package/dashboard/src/views/pipelines.test.ts +183 -0
- package/dashboard/src/views/team.test.ts +253 -0
- package/dashboard/vitest.config.ts +14 -5
- package/docs/TIPS.md +1 -1
- package/docs/patterns/README.md +1 -1
- package/package.json +15 -5
- package/scripts/adapters/docker-deploy.sh +1 -1
- package/scripts/adapters/tmux-adapter.sh +11 -1
- package/scripts/adapters/wezterm-adapter.sh +1 -1
- package/scripts/check-version-consistency.sh +1 -1
- package/scripts/lib/architecture.sh +126 -0
- package/scripts/lib/bootstrap.sh +75 -0
- package/scripts/lib/compat.sh +89 -6
- package/scripts/lib/config.sh +91 -0
- package/scripts/lib/daemon-adaptive.sh +3 -3
- package/scripts/lib/daemon-dispatch.sh +39 -16
- package/scripts/lib/daemon-health.sh +1 -1
- package/scripts/lib/daemon-patrol.sh +24 -12
- package/scripts/lib/daemon-poll.sh +37 -25
- package/scripts/lib/daemon-state.sh +115 -23
- package/scripts/lib/daemon-triage.sh +30 -8
- package/scripts/lib/fleet-failover.sh +63 -0
- package/scripts/lib/helpers.sh +30 -6
- package/scripts/lib/pipeline-detection.sh +2 -2
- package/scripts/lib/pipeline-github.sh +9 -9
- package/scripts/lib/pipeline-intelligence.sh +85 -35
- package/scripts/lib/pipeline-quality-checks.sh +16 -16
- package/scripts/lib/pipeline-quality.sh +1 -1
- package/scripts/lib/pipeline-stages.sh +242 -28
- package/scripts/lib/pipeline-state.sh +40 -4
- package/scripts/lib/test-helpers.sh +247 -0
- package/scripts/postinstall.mjs +3 -11
- package/scripts/sw +10 -4
- package/scripts/sw-activity.sh +1 -11
- package/scripts/sw-adaptive.sh +109 -85
- package/scripts/sw-adversarial.sh +4 -14
- package/scripts/sw-architecture-enforcer.sh +1 -11
- package/scripts/sw-auth.sh +8 -17
- package/scripts/sw-autonomous.sh +111 -49
- package/scripts/sw-changelog.sh +1 -11
- package/scripts/sw-checkpoint.sh +144 -20
- package/scripts/sw-ci.sh +2 -12
- package/scripts/sw-cleanup.sh +13 -17
- package/scripts/sw-code-review.sh +16 -36
- package/scripts/sw-connect.sh +5 -12
- package/scripts/sw-context.sh +9 -26
- package/scripts/sw-cost.sh +6 -16
- package/scripts/sw-daemon.sh +75 -70
- package/scripts/sw-dashboard.sh +57 -17
- package/scripts/sw-db.sh +506 -15
- package/scripts/sw-decompose.sh +1 -11
- package/scripts/sw-deps.sh +15 -25
- package/scripts/sw-developer-simulation.sh +1 -11
- package/scripts/sw-discovery.sh +112 -30
- package/scripts/sw-doc-fleet.sh +7 -17
- package/scripts/sw-docs-agent.sh +6 -16
- package/scripts/sw-docs.sh +4 -12
- package/scripts/sw-doctor.sh +134 -43
- package/scripts/sw-dora.sh +11 -19
- package/scripts/sw-durable.sh +35 -52
- package/scripts/sw-e2e-orchestrator.sh +11 -27
- package/scripts/sw-eventbus.sh +115 -115
- package/scripts/sw-evidence.sh +748 -0
- package/scripts/sw-feedback.sh +3 -13
- package/scripts/sw-fix.sh +2 -20
- package/scripts/sw-fleet-discover.sh +1 -11
- package/scripts/sw-fleet-viz.sh +10 -18
- package/scripts/sw-fleet.sh +13 -17
- package/scripts/sw-github-app.sh +6 -16
- package/scripts/sw-github-checks.sh +1 -11
- package/scripts/sw-github-deploy.sh +1 -11
- package/scripts/sw-github-graphql.sh +2 -12
- package/scripts/sw-guild.sh +1 -11
- package/scripts/sw-heartbeat.sh +49 -12
- package/scripts/sw-hygiene.sh +45 -43
- package/scripts/sw-incident.sh +284 -67
- package/scripts/sw-init.sh +35 -37
- package/scripts/sw-instrument.sh +1 -11
- package/scripts/sw-intelligence.sh +362 -51
- package/scripts/sw-jira.sh +5 -14
- package/scripts/sw-launchd.sh +2 -12
- package/scripts/sw-linear.sh +8 -17
- package/scripts/sw-logs.sh +4 -12
- package/scripts/sw-loop.sh +641 -90
- package/scripts/sw-memory.sh +243 -17
- package/scripts/sw-mission-control.sh +2 -12
- package/scripts/sw-model-router.sh +73 -34
- package/scripts/sw-otel.sh +11 -21
- package/scripts/sw-oversight.sh +1 -11
- package/scripts/sw-patrol-meta.sh +5 -11
- package/scripts/sw-pipeline-composer.sh +7 -17
- package/scripts/sw-pipeline-vitals.sh +1 -11
- package/scripts/sw-pipeline.sh +478 -122
- package/scripts/sw-pm.sh +2 -12
- package/scripts/sw-pr-lifecycle.sh +203 -29
- package/scripts/sw-predictive.sh +16 -22
- package/scripts/sw-prep.sh +6 -16
- package/scripts/sw-ps.sh +1 -11
- package/scripts/sw-public-dashboard.sh +2 -12
- package/scripts/sw-quality.sh +77 -10
- package/scripts/sw-reaper.sh +1 -11
- package/scripts/sw-recruit.sh +15 -25
- package/scripts/sw-regression.sh +11 -21
- package/scripts/sw-release-manager.sh +19 -28
- package/scripts/sw-release.sh +8 -16
- package/scripts/sw-remote.sh +1 -11
- package/scripts/sw-replay.sh +48 -44
- package/scripts/sw-retro.sh +70 -92
- package/scripts/sw-review-rerun.sh +220 -0
- package/scripts/sw-scale.sh +109 -32
- package/scripts/sw-security-audit.sh +12 -22
- package/scripts/sw-self-optimize.sh +239 -23
- package/scripts/sw-session.sh +3 -13
- package/scripts/sw-setup.sh +8 -18
- package/scripts/sw-standup.sh +5 -15
- package/scripts/sw-status.sh +32 -23
- package/scripts/sw-strategic.sh +129 -13
- package/scripts/sw-stream.sh +1 -11
- package/scripts/sw-swarm.sh +76 -36
- package/scripts/sw-team-stages.sh +10 -20
- package/scripts/sw-templates.sh +4 -14
- package/scripts/sw-testgen.sh +3 -13
- package/scripts/sw-tmux-pipeline.sh +1 -19
- package/scripts/sw-tmux-role-color.sh +0 -10
- package/scripts/sw-tmux-status.sh +3 -11
- package/scripts/sw-tmux.sh +2 -20
- package/scripts/sw-trace.sh +1 -19
- package/scripts/sw-tracker-github.sh +0 -10
- package/scripts/sw-tracker-jira.sh +1 -11
- package/scripts/sw-tracker-linear.sh +1 -11
- package/scripts/sw-tracker.sh +7 -24
- package/scripts/sw-triage.sh +24 -34
- package/scripts/sw-upgrade.sh +5 -23
- package/scripts/sw-ux.sh +1 -19
- package/scripts/sw-webhook.sh +18 -32
- package/scripts/sw-widgets.sh +3 -21
- package/scripts/sw-worktree.sh +11 -27
- package/scripts/update-homebrew-sha.sh +67 -0
- package/templates/pipelines/tdd.json +72 -0
- package/scripts/sw-pipeline.sh.mock +0 -7
|
@@ -14,9 +14,14 @@
|
|
|
14
14
|
# Track spawned panes by agent name → pane ID (file-based for bash 3.2 compat)
|
|
15
15
|
_TMUX_PANE_MAP="${TMPDIR:-/tmp}/shipwright-tmux-pane-map.$$"
|
|
16
16
|
: > "$_TMUX_PANE_MAP"
|
|
17
|
-
trap '
|
|
17
|
+
trap '
|
|
18
|
+
if [[ -f "$_TMUX_PANE_MAP" ]] && [[ ! -s "$_TMUX_PANE_MAP" ]]; then
|
|
19
|
+
rm -f "$_TMUX_PANE_MAP"
|
|
20
|
+
fi
|
|
21
|
+
' EXIT
|
|
18
22
|
|
|
19
23
|
spawn_agent() {
|
|
24
|
+
[[ -z "${WINDOW_NAME:-}" ]] && { echo "ERROR: WINDOW_NAME not set" >&2; return 1; }
|
|
20
25
|
local name="$1"
|
|
21
26
|
local working_dir="${2:-#{pane_current_path}}"
|
|
22
27
|
local command="${3:-}"
|
|
@@ -32,6 +37,11 @@ spawn_agent() {
|
|
|
32
37
|
new_pane_id=$(tmux split-window -t "$WINDOW_NAME" -c "$working_dir" -P -F '#{pane_id}')
|
|
33
38
|
fi
|
|
34
39
|
|
|
40
|
+
if [[ -z "$new_pane_id" ]]; then
|
|
41
|
+
echo "ERROR: Failed to create tmux pane for agent '$name'" >&2
|
|
42
|
+
return 1
|
|
43
|
+
fi
|
|
44
|
+
|
|
35
45
|
# Record the mapping: name → pane_id
|
|
36
46
|
echo "${name}=${new_pane_id}" >> "$_TMUX_PANE_MAP"
|
|
37
47
|
|
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
10
10
|
|
|
11
11
|
# Verify wezterm CLI is available
|
|
12
|
-
if ! command -v wezterm
|
|
12
|
+
if ! command -v wezterm >/dev/null 2>&1; then
|
|
13
13
|
echo -e "\033[38;2;248;113;113m\033[1m✗\033[0m wezterm CLI not found. Install WezTerm first." >&2
|
|
14
14
|
exit 1
|
|
15
15
|
fi
|
|
@@ -11,7 +11,7 @@ REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
|
11
11
|
|
|
12
12
|
CANONICAL=""
|
|
13
13
|
if [[ -f "$REPO_ROOT/package.json" ]]; then
|
|
14
|
-
if command -v jq
|
|
14
|
+
if command -v jq >/dev/null 2>&1; then
|
|
15
15
|
CANONICAL="$(jq -r .version "$REPO_ROOT/package.json")"
|
|
16
16
|
else
|
|
17
17
|
CANONICAL="$(grep -oE '"version":\s*"[^"]+"' "$REPO_ROOT/package.json" | head -1 | sed 's/.*"\([^"]*\)".*/\1/')"
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
# architecture.sh — Gather call-graph and dependency context for plan/design stages
|
|
2
|
+
# Source from pipeline-stages. Requires compat (detect_primary_language).
|
|
3
|
+
[[ -n "${_ARCHITECTURE_CONTEXT_LOADED:-}" ]] && return 0
|
|
4
|
+
_ARCHITECTURE_CONTEXT_LOADED=1
|
|
5
|
+
|
|
6
|
+
# Gather rich architecture context: structure, imports, modules, entry points, test map
|
|
7
|
+
gather_architecture_context() {
|
|
8
|
+
local repo_root="${1:-.}"
|
|
9
|
+
local context=""
|
|
10
|
+
|
|
11
|
+
# 1. File structure
|
|
12
|
+
context="## Project Structure
|
|
13
|
+
$(find "$repo_root" -type f \( -name '*.ts' -o -name '*.js' -o -name '*.py' -o -name '*.sh' -o -name '*.go' -o -name '*.rs' \) 2>/dev/null | grep -v node_modules | grep -v .git | head -100 | sort)
|
|
14
|
+
|
|
15
|
+
"
|
|
16
|
+
|
|
17
|
+
# 2. Import/dependency graph (language-specific)
|
|
18
|
+
local lang=""
|
|
19
|
+
if type detect_primary_language >/dev/null 2>&1; then
|
|
20
|
+
lang=$(detect_primary_language "$repo_root" 2>/dev/null || echo "unknown")
|
|
21
|
+
else
|
|
22
|
+
lang="unknown"
|
|
23
|
+
fi
|
|
24
|
+
|
|
25
|
+
case "$lang" in
|
|
26
|
+
typescript|javascript|nodejs)
|
|
27
|
+
context="${context}## Import Graph (Top Dependencies)
|
|
28
|
+
"
|
|
29
|
+
local imports=""
|
|
30
|
+
for dir in "$repo_root/src" "$repo_root/lib" "$repo_root/app"; do
|
|
31
|
+
[[ -d "$dir" ]] || continue
|
|
32
|
+
imports=$(grep -rh "^import .* from\|require(" "$dir" 2>/dev/null | \
|
|
33
|
+
grep -oE "from ['\"]([^'\"]+)['\"]|require\\(['\"]([^'\"]+)['\"]\\)" | \
|
|
34
|
+
sed "s/from ['\"]//;s/['\"]//g;s/require(//;s/)//g" | \
|
|
35
|
+
sort | uniq -c | sort -rn | head -20)
|
|
36
|
+
[[ -n "$imports" ]] && context="${context}${imports}
|
|
37
|
+
"
|
|
38
|
+
done
|
|
39
|
+
[[ -z "$imports" ]] && context="${context}(none detected)
|
|
40
|
+
"
|
|
41
|
+
|
|
42
|
+
context="${context}## Module Export Counts
|
|
43
|
+
"
|
|
44
|
+
local f
|
|
45
|
+
while IFS= read -r f; do
|
|
46
|
+
[[ -f "$f" ]] || continue
|
|
47
|
+
local exports=0
|
|
48
|
+
exports=$(grep -c "^export " "$f" 2>/dev/null || echo "0")
|
|
49
|
+
[[ "$exports" -gt 2 ]] 2>/dev/null && context="${context} $(basename "$f"): $exports exports
|
|
50
|
+
"
|
|
51
|
+
done < <(find "$repo_root/src" "$repo_root/lib" -name "*.ts" -o -name "*.js" 2>/dev/null | head -30)
|
|
52
|
+
;;
|
|
53
|
+
|
|
54
|
+
python)
|
|
55
|
+
context="${context}## Import Graph (Top Dependencies)
|
|
56
|
+
"
|
|
57
|
+
local py_imports=""
|
|
58
|
+
py_imports=$(find "$repo_root" -name "*.py" -type f 2>/dev/null | \
|
|
59
|
+
xargs grep -h "^from \|^import " 2>/dev/null | \
|
|
60
|
+
grep -v __pycache__ | sort | uniq -c | sort -rn | head -20)
|
|
61
|
+
context="${context}${py_imports}
|
|
62
|
+
"
|
|
63
|
+
;;
|
|
64
|
+
|
|
65
|
+
bash|shell)
|
|
66
|
+
context="${context}## Source Dependencies
|
|
67
|
+
"
|
|
68
|
+
local sh_imports=""
|
|
69
|
+
[[ -d "$repo_root/scripts" ]] && \
|
|
70
|
+
sh_imports=$(grep -rh "^source \|^\. " "$repo_root/scripts" --include="*.sh" 2>/dev/null | \
|
|
71
|
+
sort | uniq -c | sort -rn | head -20)
|
|
72
|
+
context="${context}${sh_imports}
|
|
73
|
+
"
|
|
74
|
+
;;
|
|
75
|
+
*)
|
|
76
|
+
context="${context}## Dependencies
|
|
77
|
+
(Language: $lang — no specific import analysis)
|
|
78
|
+
"
|
|
79
|
+
;;
|
|
80
|
+
esac
|
|
81
|
+
|
|
82
|
+
# 3. Module boundaries (directories with >2 files = modules)
|
|
83
|
+
context="${context}## Module Boundaries
|
|
84
|
+
"
|
|
85
|
+
local dir
|
|
86
|
+
while IFS= read -r dir; do
|
|
87
|
+
[[ -d "$dir" ]] || continue
|
|
88
|
+
local count=0
|
|
89
|
+
count=$(find "$dir" -maxdepth 1 -type f \( -name "*.ts" -o -name "*.js" -o -name "*.py" -o -name "*.sh" \) 2>/dev/null | wc -l | tr -d ' ')
|
|
90
|
+
[[ "$count" -gt 2 ]] 2>/dev/null && context="${context} $(basename "$dir")/: $count files
|
|
91
|
+
"
|
|
92
|
+
done < <(find "$repo_root/src" "$repo_root/lib" "$repo_root/scripts" -maxdepth 2 -type d 2>/dev/null | head -30)
|
|
93
|
+
|
|
94
|
+
# 4. Entry points
|
|
95
|
+
context="${context}## Entry Points
|
|
96
|
+
"
|
|
97
|
+
if [[ -f "$repo_root/package.json" ]] && command -v jq >/dev/null 2>&1; then
|
|
98
|
+
local main
|
|
99
|
+
main=$(jq -r '.main // .bin // "index.js" | if type == "object" then (. | keys[0]) else . end' "$repo_root/package.json" 2>/dev/null || echo "")
|
|
100
|
+
[[ -n "$main" && "$main" != "null" ]] && context="${context} package.json: $main
|
|
101
|
+
"
|
|
102
|
+
fi
|
|
103
|
+
if [[ -f "$repo_root/Makefile" ]]; then
|
|
104
|
+
local targets
|
|
105
|
+
targets=$(grep '^[a-zA-Z][a-zA-Z0-9_-]*:' "$repo_root/Makefile" 2>/dev/null | cut -d: -f1 | head -10 | tr '\n' ', ')
|
|
106
|
+
[[ -n "$targets" ]] && context="${context} Makefile targets: $targets
|
|
107
|
+
"
|
|
108
|
+
fi
|
|
109
|
+
|
|
110
|
+
# 5. Test-to-source mapping
|
|
111
|
+
context="${context}## Test Coverage Map
|
|
112
|
+
"
|
|
113
|
+
local test_file
|
|
114
|
+
while IFS= read -r test_file; do
|
|
115
|
+
[[ -f "$test_file" ]] || continue
|
|
116
|
+
local base
|
|
117
|
+
base=$(basename "$test_file" | sed 's/[-.]test//;s/[-.]spec//;s/__tests__//;s/\..*$//' | head -c 50)
|
|
118
|
+
[[ -z "$base" ]] && continue
|
|
119
|
+
local source
|
|
120
|
+
source=$(find "$repo_root/src" "$repo_root/lib" "$repo_root/scripts" -name "${base}.*" -type f 2>/dev/null | head -1)
|
|
121
|
+
[[ -n "$source" ]] && context="${context} $test_file -> $source
|
|
122
|
+
"
|
|
123
|
+
done < <(find "$repo_root" -path "*node_modules" -prune -o -path "*/.git" -prune -o \( -name "*test*" -o -name "*spec*" \) -type f -print 2>/dev/null | head -20)
|
|
124
|
+
|
|
125
|
+
echo "$context"
|
|
126
|
+
}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright bootstrap — Cold-start initialization for optimization data ║
|
|
4
|
+
# ║ Creates sensible defaults when no historical data exists (new installs) ║
|
|
5
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
6
|
+
|
|
7
|
+
# bootstrap_optimization — create default iteration model, template weights, model routing
|
|
8
|
+
bootstrap_optimization() {
|
|
9
|
+
local opt_dir="$HOME/.shipwright/optimization"
|
|
10
|
+
mkdir -p "$opt_dir"
|
|
11
|
+
|
|
12
|
+
# Default iteration model based on common patterns
|
|
13
|
+
if [[ ! -f "$opt_dir/iteration-model.json" ]]; then
|
|
14
|
+
cat > "$opt_dir/iteration-model.json" << 'JSON'
|
|
15
|
+
{
|
|
16
|
+
"low": {"mean": 5, "stddev": 2, "samples": 0, "source": "bootstrap"},
|
|
17
|
+
"medium": {"mean": 12, "stddev": 4, "samples": 0, "source": "bootstrap"},
|
|
18
|
+
"high": {"mean": 25, "stddev": 8, "samples": 0, "source": "bootstrap"}
|
|
19
|
+
}
|
|
20
|
+
JSON
|
|
21
|
+
fi
|
|
22
|
+
|
|
23
|
+
# Default template weights
|
|
24
|
+
if [[ ! -f "$opt_dir/template-weights.json" ]]; then
|
|
25
|
+
cat > "$opt_dir/template-weights.json" << 'JSON'
|
|
26
|
+
{
|
|
27
|
+
"standard": 1.0,
|
|
28
|
+
"hotfix": 1.0,
|
|
29
|
+
"docs": 1.0,
|
|
30
|
+
"refactor": 1.0,
|
|
31
|
+
"source": "bootstrap"
|
|
32
|
+
}
|
|
33
|
+
JSON
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
# Default model routing
|
|
37
|
+
if [[ ! -f "$opt_dir/model-routing.json" ]]; then
|
|
38
|
+
cat > "$opt_dir/model-routing.json" << 'JSON'
|
|
39
|
+
{
|
|
40
|
+
"routes": {
|
|
41
|
+
"plan": {"recommended": "opus", "source": "bootstrap"},
|
|
42
|
+
"design": {"recommended": "opus", "source": "bootstrap"},
|
|
43
|
+
"build": {"recommended": "sonnet", "source": "bootstrap"},
|
|
44
|
+
"test": {"recommended": "sonnet", "source": "bootstrap"},
|
|
45
|
+
"review": {"recommended": "sonnet", "source": "bootstrap"}
|
|
46
|
+
},
|
|
47
|
+
"default": "sonnet",
|
|
48
|
+
"source": "bootstrap"
|
|
49
|
+
}
|
|
50
|
+
JSON
|
|
51
|
+
fi
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
# bootstrap_memory — create initial memory patterns based on project type
|
|
55
|
+
bootstrap_memory() {
|
|
56
|
+
local mem_dir="$HOME/.shipwright/memory"
|
|
57
|
+
mkdir -p "$mem_dir"
|
|
58
|
+
|
|
59
|
+
if [[ ! -f "$mem_dir/patterns.json" ]]; then
|
|
60
|
+
# Detect project type and create initial patterns
|
|
61
|
+
local project_type="unknown"
|
|
62
|
+
[[ -f "package.json" ]] && project_type="nodejs"
|
|
63
|
+
[[ -f "requirements.txt" || -f "pyproject.toml" ]] && project_type="python"
|
|
64
|
+
[[ -f "Cargo.toml" ]] && project_type="rust"
|
|
65
|
+
[[ -f "go.mod" ]] && project_type="go"
|
|
66
|
+
|
|
67
|
+
cat > "$mem_dir/patterns.json" << JSON
|
|
68
|
+
{
|
|
69
|
+
"project_type": "$project_type",
|
|
70
|
+
"detected_at": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
|
71
|
+
"source": "bootstrap"
|
|
72
|
+
}
|
|
73
|
+
JSON
|
|
74
|
+
fi
|
|
75
|
+
}
|
package/scripts/lib/compat.sh
CHANGED
|
@@ -9,11 +9,14 @@
|
|
|
9
9
|
#
|
|
10
10
|
# Provides:
|
|
11
11
|
# - NO_COLOR / dumb terminal / non-tty detection (auto-blanks color vars)
|
|
12
|
+
# - _to_lower() / _to_upper() — bash 3.2 compat (${var,,}/${var^^} require bash 4+)
|
|
13
|
+
# - file_mtime() — cross-platform file modification time (epoch)
|
|
12
14
|
# - sed_i() — cross-platform sed in-place editing
|
|
13
15
|
# - open_url() — cross-platform browser open
|
|
14
16
|
# - tmp_dir() — returns best temp directory for platform
|
|
15
17
|
# - is_wsl() — detect WSL environment
|
|
16
18
|
# - is_macos() / is_linux() — platform checks
|
|
19
|
+
# - _timeout() — run command with timeout (timeout/gtimeout or no-op on macOS)
|
|
17
20
|
|
|
18
21
|
# ─── NO_COLOR support (https://no-color.org/) ─────────────────────────────
|
|
19
22
|
# Blanks standard color variables when:
|
|
@@ -30,6 +33,11 @@ _COMPAT_UNAME="${_COMPAT_UNAME:-$(uname -s 2>/dev/null || echo "Unknown")}"
|
|
|
30
33
|
|
|
31
34
|
is_macos() { [[ "$_COMPAT_UNAME" == "Darwin" ]]; }
|
|
32
35
|
is_linux() { [[ "$_COMPAT_UNAME" == "Linux" ]]; }
|
|
36
|
+
|
|
37
|
+
# ─── Bash 3.2 compat (macOS ships bash 3.2) ───────────────────────────────
|
|
38
|
+
# Case conversion: ${var,,} and ${var^^} require bash 4+. Use these instead:
|
|
39
|
+
_to_lower() { echo "$1" | tr '[:upper:]' '[:lower:]'; }
|
|
40
|
+
_to_upper() { echo "$1" | tr '[:lower:]' '[:upper:]'; }
|
|
33
41
|
is_wsl() { is_linux && [[ -n "${WSL_DISTRO_NAME:-}" || -f /proc/version ]] && grep -qi microsoft /proc/version 2>/dev/null; }
|
|
34
42
|
|
|
35
43
|
# ─── sed -i (macOS vs GNU) ────────────────────────────────────────────────
|
|
@@ -49,14 +57,14 @@ open_url() {
|
|
|
49
57
|
open "$url"
|
|
50
58
|
elif is_wsl; then
|
|
51
59
|
# WSL: use wslview (from wslu) or powershell
|
|
52
|
-
if command -v wslview
|
|
60
|
+
if command -v wslview >/dev/null 2>&1; then
|
|
53
61
|
wslview "$url"
|
|
54
|
-
elif command -v powershell.exe
|
|
62
|
+
elif command -v powershell.exe >/dev/null 2>&1; then
|
|
55
63
|
powershell.exe -Command "Start-Process '$url'" 2>/dev/null
|
|
56
64
|
else
|
|
57
65
|
return 1
|
|
58
66
|
fi
|
|
59
|
-
elif command -v xdg-open
|
|
67
|
+
elif command -v xdg-open >/dev/null 2>&1; then
|
|
60
68
|
xdg-open "$url"
|
|
61
69
|
else
|
|
62
70
|
return 1
|
|
@@ -83,7 +91,7 @@ sw_valid_error_category() {
|
|
|
83
91
|
local category="${1:-}"
|
|
84
92
|
local custom_file="$HOME/.shipwright/optimization/error-taxonomy.json"
|
|
85
93
|
# Check custom taxonomy first
|
|
86
|
-
if [[ -f "$custom_file" ]] && command -v jq
|
|
94
|
+
if [[ -f "$custom_file" ]] && command -v jq >/dev/null 2>&1; then
|
|
87
95
|
local custom_cats
|
|
88
96
|
custom_cats=$(jq -r '.categories[]? // empty' "$custom_file" 2>/dev/null || true)
|
|
89
97
|
if [[ -n "$custom_cats" ]]; then
|
|
@@ -113,7 +121,7 @@ complexity_bucket() {
|
|
|
113
121
|
local config_file="$HOME/.shipwright/optimization/complexity-clusters.json"
|
|
114
122
|
local low_boundary=3
|
|
115
123
|
local high_boundary=6
|
|
116
|
-
if [[ -f "$config_file" ]] && command -v jq
|
|
124
|
+
if [[ -f "$config_file" ]] && command -v jq >/dev/null 2>&1; then
|
|
117
125
|
local lb hb
|
|
118
126
|
lb=$(jq -r '.low_boundary // 3' "$config_file" 2>/dev/null || echo "3")
|
|
119
127
|
hb=$(jq -r '.high_boundary // 6' "$config_file" 2>/dev/null || echo "6")
|
|
@@ -156,7 +164,7 @@ detect_primary_language() {
|
|
|
156
164
|
|
|
157
165
|
detect_test_framework() {
|
|
158
166
|
local dir="${1:-.}"
|
|
159
|
-
if [[ -f "$dir/package.json" ]] && command -v jq
|
|
167
|
+
if [[ -f "$dir/package.json" ]] && command -v jq >/dev/null 2>&1; then
|
|
160
168
|
local runner
|
|
161
169
|
runner=$(jq -r '
|
|
162
170
|
if .devDependencies.vitest then "vitest"
|
|
@@ -184,6 +192,81 @@ detect_test_framework() {
|
|
|
184
192
|
fi
|
|
185
193
|
}
|
|
186
194
|
|
|
195
|
+
# ─── Cross-platform file modification time (epoch) ────────────────────────
|
|
196
|
+
# macOS/BSD: stat -f %m; Linux: stat -c '%Y'
|
|
197
|
+
file_mtime() {
|
|
198
|
+
local file="$1"
|
|
199
|
+
stat -f %m "$file" 2>/dev/null || stat -c '%Y' "$file" 2>/dev/null || echo "0"
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
# ─── Timeout command (macOS may lack timeout; gtimeout from coreutils) ─────
|
|
203
|
+
# Usage: _timeout <seconds> <command> [args...]
|
|
204
|
+
_timeout() {
|
|
205
|
+
local secs="$1"
|
|
206
|
+
shift
|
|
207
|
+
if command -v timeout >/dev/null 2>&1; then
|
|
208
|
+
timeout "$secs" "$@"
|
|
209
|
+
elif command -v gtimeout >/dev/null 2>&1; then
|
|
210
|
+
gtimeout "$secs" "$@"
|
|
211
|
+
else
|
|
212
|
+
# Fallback: run without timeout (e.g. on older macOS)
|
|
213
|
+
"$@"
|
|
214
|
+
fi
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
# ─── Cross-platform date helpers (GNU date -d vs BSD date -j/-v) ──────────
|
|
218
|
+
# date_to_epoch: convert date string to Unix epoch
|
|
219
|
+
# date_days_ago: YYYY-MM-DD for N days ago
|
|
220
|
+
# date_add_days: YYYY-MM-DD for base_date + N days
|
|
221
|
+
# epoch_to_iso: convert epoch to ISO 8601
|
|
222
|
+
date_to_epoch() {
|
|
223
|
+
local datestr="$1"
|
|
224
|
+
local fmt=""
|
|
225
|
+
if [[ "$datestr" == *"T"* ]]; then
|
|
226
|
+
fmt="%Y-%m-%dT%H:%M:%SZ"
|
|
227
|
+
else
|
|
228
|
+
fmt="%Y-%m-%d"
|
|
229
|
+
fi
|
|
230
|
+
if date -u -d "$datestr" +%s 2>/dev/null; then
|
|
231
|
+
return
|
|
232
|
+
fi
|
|
233
|
+
# BSD date: -j = don't set date, -f = format
|
|
234
|
+
date -u -j -f "$fmt" "$datestr" +%s 2>/dev/null || echo "0"
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
date_days_ago() {
|
|
238
|
+
local days="$1"
|
|
239
|
+
if date -u -d "$days days ago" +%Y-%m-%d 2>/dev/null; then
|
|
240
|
+
return
|
|
241
|
+
fi
|
|
242
|
+
date -u -v-${days}d +%Y-%m-%d 2>/dev/null || echo "1970-01-01"
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
date_add_days() {
|
|
246
|
+
local base_date="$1"
|
|
247
|
+
local days="$2"
|
|
248
|
+
if date -u -d "${base_date} + ${days} days" +%Y-%m-%d 2>/dev/null; then
|
|
249
|
+
return
|
|
250
|
+
fi
|
|
251
|
+
# BSD: compute via epoch arithmetic
|
|
252
|
+
local base_epoch
|
|
253
|
+
base_epoch=$(date_to_epoch "$base_date")
|
|
254
|
+
if [[ -n "$base_epoch" && "$base_epoch" != "0" ]]; then
|
|
255
|
+
local result_epoch=$((base_epoch + (days * 86400)))
|
|
256
|
+
date -u -r "$result_epoch" +%Y-%m-%d 2>/dev/null || date -u -d "@$result_epoch" +%Y-%m-%d 2>/dev/null || echo "1970-01-01"
|
|
257
|
+
else
|
|
258
|
+
echo "1970-01-01"
|
|
259
|
+
fi
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
epoch_to_iso() {
|
|
263
|
+
local epoch="$1"
|
|
264
|
+
date -u -r "$epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
265
|
+
date -u -d "@$epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
266
|
+
python3 -c "import datetime; print(datetime.datetime.utcfromtimestamp($epoch).strftime('%Y-%m-%dT%H:%M:%SZ'))" 2>/dev/null || \
|
|
267
|
+
echo "1970-01-01T00:00:00Z"
|
|
268
|
+
}
|
|
269
|
+
|
|
187
270
|
# ─── Cross-platform MD5 ──────────────────────────────────────────────────
|
|
188
271
|
# Usage:
|
|
189
272
|
# compute_md5 --string "some text" → md5 hash of string
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# config.sh — Centralized configuration reader for Shipwright
|
|
3
|
+
# Precedence: SHIPWRIGHT_* env var > daemon-config.json > policy.json > defaults.json
|
|
4
|
+
# Usage: source "$SCRIPT_DIR/lib/config.sh"
|
|
5
|
+
# val=$(_config_get "daemon.poll_interval")
|
|
6
|
+
[[ -n "${_SW_CONFIG_LOADED:-}" ]] && return 0
|
|
7
|
+
_SW_CONFIG_LOADED=1
|
|
8
|
+
|
|
9
|
+
_CONFIG_SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
10
|
+
_CONFIG_REPO_DIR="$(cd "$_CONFIG_SCRIPT_DIR/../.." 2>/dev/null && pwd || echo "")"
|
|
11
|
+
|
|
12
|
+
_DEFAULTS_FILE="${_CONFIG_REPO_DIR}/config/defaults.json"
|
|
13
|
+
_POLICY_FILE="${_CONFIG_REPO_DIR}/config/policy.json"
|
|
14
|
+
_DAEMON_CONFIG_FILE=".claude/daemon-config.json"
|
|
15
|
+
|
|
16
|
+
# Resolve daemon config relative to git root or cwd
|
|
17
|
+
if [[ ! -f "$_DAEMON_CONFIG_FILE" ]]; then
|
|
18
|
+
local_root="$(git rev-parse --show-toplevel 2>/dev/null || echo ".")"
|
|
19
|
+
_DAEMON_CONFIG_FILE="${local_root}/.claude/daemon-config.json"
|
|
20
|
+
fi
|
|
21
|
+
|
|
22
|
+
# _config_get "section.key" [default]
|
|
23
|
+
# Reads config with full precedence chain
|
|
24
|
+
_config_get() {
|
|
25
|
+
local dotpath="$1"
|
|
26
|
+
local fallback="${2:-}"
|
|
27
|
+
|
|
28
|
+
# 1. Check env var: daemon.poll_interval -> SHIPWRIGHT_DAEMON_POLL_INTERVAL
|
|
29
|
+
local env_name="SHIPWRIGHT_$(echo "$dotpath" | tr '[:lower:].' '[:upper:]_')"
|
|
30
|
+
local env_val="${!env_name:-}"
|
|
31
|
+
if [[ -n "$env_val" ]]; then
|
|
32
|
+
echo "$env_val"
|
|
33
|
+
return 0
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
# Convert dotpath to jq path: "daemon.poll_interval" -> ".daemon.poll_interval"
|
|
37
|
+
local jq_path=".${dotpath}"
|
|
38
|
+
|
|
39
|
+
# 2. Check daemon-config.json
|
|
40
|
+
if [[ -f "$_DAEMON_CONFIG_FILE" ]]; then
|
|
41
|
+
local val
|
|
42
|
+
val=$(jq -r "${jq_path} // \"\"" "$_DAEMON_CONFIG_FILE" 2>/dev/null || echo "")
|
|
43
|
+
if [[ -n "$val" && "$val" != "null" ]]; then
|
|
44
|
+
echo "$val"
|
|
45
|
+
return 0
|
|
46
|
+
fi
|
|
47
|
+
fi
|
|
48
|
+
|
|
49
|
+
# 3. Check policy.json
|
|
50
|
+
if [[ -f "$_POLICY_FILE" ]]; then
|
|
51
|
+
local val
|
|
52
|
+
val=$(jq -r "${jq_path} // \"\"" "$_POLICY_FILE" 2>/dev/null || echo "")
|
|
53
|
+
if [[ -n "$val" && "$val" != "null" ]]; then
|
|
54
|
+
echo "$val"
|
|
55
|
+
return 0
|
|
56
|
+
fi
|
|
57
|
+
fi
|
|
58
|
+
|
|
59
|
+
# 4. Check defaults.json
|
|
60
|
+
if [[ -f "$_DEFAULTS_FILE" ]]; then
|
|
61
|
+
local val
|
|
62
|
+
val=$(jq -r "${jq_path} // \"\"" "$_DEFAULTS_FILE" 2>/dev/null || echo "")
|
|
63
|
+
if [[ -n "$val" && "$val" != "null" ]]; then
|
|
64
|
+
echo "$val"
|
|
65
|
+
return 0
|
|
66
|
+
fi
|
|
67
|
+
fi
|
|
68
|
+
|
|
69
|
+
# 5. Return fallback
|
|
70
|
+
echo "$fallback"
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# _config_get_int "section.key" [default]
|
|
74
|
+
# Same as _config_get but ensures integer output
|
|
75
|
+
_config_get_int() {
|
|
76
|
+
local val
|
|
77
|
+
val=$(_config_get "$1" "${2:-0}")
|
|
78
|
+
# Strip non-numeric
|
|
79
|
+
echo "${val//[!0-9-]/}"
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
# _config_get_bool "section.key" [default]
|
|
83
|
+
# Returns 0 (true) or 1 (false) for use in conditionals
|
|
84
|
+
_config_get_bool() {
|
|
85
|
+
local val
|
|
86
|
+
val=$(_config_get "$1" "${2:-false}")
|
|
87
|
+
case "$val" in
|
|
88
|
+
true|1|yes|on) return 0 ;;
|
|
89
|
+
*) return 1 ;;
|
|
90
|
+
esac
|
|
91
|
+
}
|
|
@@ -85,9 +85,9 @@ get_adaptive_heartbeat_timeout() {
|
|
|
85
85
|
|
|
86
86
|
# Stage-specific defaults (daemon-health.sh when sourced, else policy_get, else literal)
|
|
87
87
|
local default_timeout="${HEALTH_HEARTBEAT_TIMEOUT:-120}"
|
|
88
|
-
if type daemon_health_timeout_for_stage
|
|
88
|
+
if type daemon_health_timeout_for_stage >/dev/null 2>&1; then
|
|
89
89
|
default_timeout=$(daemon_health_timeout_for_stage "$stage" "$default_timeout")
|
|
90
|
-
elif type policy_get
|
|
90
|
+
elif type policy_get >/dev/null 2>&1; then
|
|
91
91
|
local policy_stage
|
|
92
92
|
policy_stage=$(policy_get ".daemon.stage_timeouts.$stage" "")
|
|
93
93
|
[[ -n "$policy_stage" && "$policy_stage" =~ ^[0-9]+$ ]] && default_timeout="$policy_stage"
|
|
@@ -385,7 +385,7 @@ daemon_assess_progress() {
|
|
|
385
385
|
' "$progress_file" > "$tmp_progress" 2>/dev/null && mv "$tmp_progress" "$progress_file"
|
|
386
386
|
|
|
387
387
|
# ── Vitals-based verdict (preferred over static thresholds) ──
|
|
388
|
-
if type pipeline_compute_vitals
|
|
388
|
+
if type pipeline_compute_vitals >/dev/null 2>&1 && type pipeline_health_verdict >/dev/null 2>&1; then
|
|
389
389
|
# Compute vitals using the worktree's pipeline state if available
|
|
390
390
|
local _worktree_state=""
|
|
391
391
|
local _worktree_artifacts=""
|