shipwright-cli 3.1.0 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/code-reviewer.md +2 -0
- package/.claude/agents/devops-engineer.md +2 -0
- package/.claude/agents/doc-fleet-agent.md +2 -0
- package/.claude/agents/pipeline-agent.md +2 -0
- package/.claude/agents/shell-script-specialist.md +2 -0
- package/.claude/agents/test-specialist.md +2 -0
- package/.claude/hooks/agent-crash-capture.sh +32 -0
- package/.claude/hooks/post-tool-use.sh +3 -2
- package/.claude/hooks/pre-tool-use.sh +35 -3
- package/README.md +22 -8
- package/claude-code/hooks/config-change.sh +18 -0
- package/claude-code/hooks/instructions-reloaded.sh +7 -0
- package/claude-code/hooks/worktree-create.sh +25 -0
- package/claude-code/hooks/worktree-remove.sh +20 -0
- package/config/code-constitution.json +130 -0
- package/config/defaults.json +25 -2
- package/config/policy.json +1 -1
- package/dashboard/middleware/auth.ts +134 -0
- package/dashboard/middleware/constants.ts +21 -0
- package/dashboard/public/index.html +8 -6
- package/dashboard/public/styles.css +176 -97
- package/dashboard/routes/auth.ts +38 -0
- package/dashboard/server.ts +117 -25
- package/dashboard/services/config.ts +26 -0
- package/dashboard/services/db.ts +118 -0
- package/dashboard/src/canvas/pixel-agent.ts +298 -0
- package/dashboard/src/canvas/pixel-sprites.ts +440 -0
- package/dashboard/src/canvas/shipyard-effects.ts +367 -0
- package/dashboard/src/canvas/shipyard-scene.ts +616 -0
- package/dashboard/src/canvas/submarine-layout.ts +267 -0
- package/dashboard/src/components/header.ts +8 -7
- package/dashboard/src/core/api.ts +5 -0
- package/dashboard/src/core/router.ts +1 -0
- package/dashboard/src/design/submarine-theme.ts +253 -0
- package/dashboard/src/main.ts +2 -0
- package/dashboard/src/types/api.ts +12 -1
- package/dashboard/src/views/activity.ts +2 -1
- package/dashboard/src/views/metrics.ts +69 -1
- package/dashboard/src/views/shipyard.ts +39 -0
- package/dashboard/types/index.ts +166 -0
- package/docs/plans/2026-02-28-compound-audit-and-shipyard-design.md +186 -0
- package/docs/plans/2026-02-28-skipper-shipwright-implementation-plan.md +1182 -0
- package/docs/plans/2026-02-28-skipper-shipwright-integration-design.md +531 -0
- package/docs/plans/2026-03-01-ai-powered-skill-injection-design.md +298 -0
- package/docs/plans/2026-03-01-ai-powered-skill-injection-plan.md +1109 -0
- package/docs/plans/2026-03-01-capabilities-cleanup-plan.md +658 -0
- package/docs/plans/2026-03-01-clean-architecture-plan.md +924 -0
- package/docs/plans/2026-03-01-compound-audit-cascade-design.md +191 -0
- package/docs/plans/2026-03-01-compound-audit-cascade-plan.md +921 -0
- package/docs/plans/2026-03-01-deep-integration-plan.md +851 -0
- package/docs/plans/2026-03-01-pipeline-audit-trail-design.md +145 -0
- package/docs/plans/2026-03-01-pipeline-audit-trail-plan.md +770 -0
- package/docs/plans/2026-03-01-refined-depths-brand-design.md +382 -0
- package/docs/plans/2026-03-01-refined-depths-implementation.md +599 -0
- package/docs/plans/2026-03-01-skipper-kernel-integration-design.md +203 -0
- package/docs/plans/2026-03-01-unified-platform-design.md +272 -0
- package/docs/plans/2026-03-07-claude-code-feature-integration-design.md +189 -0
- package/docs/plans/2026-03-07-claude-code-feature-integration-plan.md +1165 -0
- package/docs/research/BACKLOG_QUICK_REFERENCE.md +352 -0
- package/docs/research/CUTTING_EDGE_RESEARCH_2026.md +546 -0
- package/docs/research/RESEARCH_INDEX.md +439 -0
- package/docs/research/RESEARCH_SOURCES.md +440 -0
- package/docs/research/RESEARCH_SUMMARY.txt +275 -0
- package/docs/superpowers/specs/2026-03-10-pipeline-quality-revolution-design.md +341 -0
- package/package.json +2 -2
- package/scripts/lib/adaptive-model.sh +427 -0
- package/scripts/lib/adaptive-timeout.sh +316 -0
- package/scripts/lib/audit-trail.sh +309 -0
- package/scripts/lib/auto-recovery.sh +471 -0
- package/scripts/lib/bandit-selector.sh +431 -0
- package/scripts/lib/bootstrap.sh +104 -2
- package/scripts/lib/causal-graph.sh +455 -0
- package/scripts/lib/compat.sh +126 -0
- package/scripts/lib/compound-audit.sh +337 -0
- package/scripts/lib/constitutional.sh +454 -0
- package/scripts/lib/context-budget.sh +359 -0
- package/scripts/lib/convergence.sh +594 -0
- package/scripts/lib/cost-optimizer.sh +634 -0
- package/scripts/lib/daemon-adaptive.sh +14 -2
- package/scripts/lib/daemon-dispatch.sh +106 -17
- package/scripts/lib/daemon-failure.sh +34 -4
- package/scripts/lib/daemon-patrol.sh +25 -4
- package/scripts/lib/daemon-poll-github.sh +361 -0
- package/scripts/lib/daemon-poll-health.sh +299 -0
- package/scripts/lib/daemon-poll.sh +27 -611
- package/scripts/lib/daemon-state.sh +119 -66
- package/scripts/lib/daemon-triage.sh +10 -0
- package/scripts/lib/dod-scorecard.sh +442 -0
- package/scripts/lib/error-actionability.sh +300 -0
- package/scripts/lib/formal-spec.sh +461 -0
- package/scripts/lib/helpers.sh +180 -5
- package/scripts/lib/intent-analysis.sh +409 -0
- package/scripts/lib/loop-convergence.sh +350 -0
- package/scripts/lib/loop-iteration.sh +682 -0
- package/scripts/lib/loop-progress.sh +48 -0
- package/scripts/lib/loop-restart.sh +185 -0
- package/scripts/lib/memory-effectiveness.sh +506 -0
- package/scripts/lib/mutation-executor.sh +352 -0
- package/scripts/lib/outcome-feedback.sh +521 -0
- package/scripts/lib/pipeline-cli.sh +336 -0
- package/scripts/lib/pipeline-commands.sh +1216 -0
- package/scripts/lib/pipeline-detection.sh +101 -3
- package/scripts/lib/pipeline-execution.sh +897 -0
- package/scripts/lib/pipeline-github.sh +28 -3
- package/scripts/lib/pipeline-intelligence-compound.sh +431 -0
- package/scripts/lib/pipeline-intelligence-scoring.sh +407 -0
- package/scripts/lib/pipeline-intelligence-skip.sh +181 -0
- package/scripts/lib/pipeline-intelligence.sh +104 -1138
- package/scripts/lib/pipeline-quality-bash-compat.sh +182 -0
- package/scripts/lib/pipeline-quality-checks.sh +17 -711
- package/scripts/lib/pipeline-quality-gates.sh +563 -0
- package/scripts/lib/pipeline-stages-build.sh +730 -0
- package/scripts/lib/pipeline-stages-delivery.sh +965 -0
- package/scripts/lib/pipeline-stages-intake.sh +1133 -0
- package/scripts/lib/pipeline-stages-monitor.sh +407 -0
- package/scripts/lib/pipeline-stages-review.sh +1022 -0
- package/scripts/lib/pipeline-stages.sh +161 -2901
- package/scripts/lib/pipeline-state.sh +36 -5
- package/scripts/lib/pipeline-util.sh +487 -0
- package/scripts/lib/policy-learner.sh +438 -0
- package/scripts/lib/process-reward.sh +493 -0
- package/scripts/lib/project-detect.sh +649 -0
- package/scripts/lib/quality-profile.sh +334 -0
- package/scripts/lib/recruit-commands.sh +885 -0
- package/scripts/lib/recruit-learning.sh +739 -0
- package/scripts/lib/recruit-roles.sh +648 -0
- package/scripts/lib/reward-aggregator.sh +458 -0
- package/scripts/lib/rl-optimizer.sh +362 -0
- package/scripts/lib/root-cause.sh +427 -0
- package/scripts/lib/scope-enforcement.sh +445 -0
- package/scripts/lib/session-restart.sh +493 -0
- package/scripts/lib/skill-memory.sh +300 -0
- package/scripts/lib/skill-registry.sh +775 -0
- package/scripts/lib/spec-driven.sh +476 -0
- package/scripts/lib/test-helpers.sh +18 -7
- package/scripts/lib/test-holdout.sh +429 -0
- package/scripts/lib/test-optimizer.sh +511 -0
- package/scripts/shipwright-file-suggest.sh +45 -0
- package/scripts/skills/adversarial-quality.md +61 -0
- package/scripts/skills/api-design.md +44 -0
- package/scripts/skills/architecture-design.md +50 -0
- package/scripts/skills/brainstorming.md +43 -0
- package/scripts/skills/data-pipeline.md +44 -0
- package/scripts/skills/deploy-safety.md +64 -0
- package/scripts/skills/documentation.md +38 -0
- package/scripts/skills/frontend-design.md +45 -0
- package/scripts/skills/generated/.gitkeep +0 -0
- package/scripts/skills/generated/_refinements/.gitkeep +0 -0
- package/scripts/skills/generated/_refinements/adversarial-quality.patch.md +3 -0
- package/scripts/skills/generated/_refinements/architecture-design.patch.md +3 -0
- package/scripts/skills/generated/_refinements/brainstorming.patch.md +3 -0
- package/scripts/skills/generated/cli-version-management.md +29 -0
- package/scripts/skills/generated/collection-system-validation.md +99 -0
- package/scripts/skills/generated/large-scale-c-refactoring-coordination.md +97 -0
- package/scripts/skills/generated/pattern-matching-similarity-scoring.md +195 -0
- package/scripts/skills/generated/test-parallelization-detection.md +65 -0
- package/scripts/skills/observability.md +79 -0
- package/scripts/skills/performance.md +48 -0
- package/scripts/skills/pr-quality.md +49 -0
- package/scripts/skills/product-thinking.md +43 -0
- package/scripts/skills/security-audit.md +49 -0
- package/scripts/skills/systematic-debugging.md +40 -0
- package/scripts/skills/testing-strategy.md +47 -0
- package/scripts/skills/two-stage-review.md +52 -0
- package/scripts/skills/validation-thoroughness.md +55 -0
- package/scripts/sw +9 -3
- package/scripts/sw-activity.sh +9 -8
- package/scripts/sw-adaptive.sh +8 -7
- package/scripts/sw-adversarial.sh +2 -1
- package/scripts/sw-architecture-enforcer.sh +3 -1
- package/scripts/sw-auth.sh +12 -2
- package/scripts/sw-autonomous.sh +5 -1
- package/scripts/sw-changelog.sh +4 -1
- package/scripts/sw-checkpoint.sh +2 -1
- package/scripts/sw-ci.sh +15 -6
- package/scripts/sw-cleanup.sh +4 -26
- package/scripts/sw-code-review.sh +45 -20
- package/scripts/sw-connect.sh +2 -1
- package/scripts/sw-context.sh +2 -1
- package/scripts/sw-cost.sh +107 -5
- package/scripts/sw-daemon.sh +71 -11
- package/scripts/sw-dashboard.sh +3 -1
- package/scripts/sw-db.sh +71 -20
- package/scripts/sw-decide.sh +8 -2
- package/scripts/sw-decompose.sh +360 -17
- package/scripts/sw-deps.sh +4 -1
- package/scripts/sw-developer-simulation.sh +4 -1
- package/scripts/sw-discovery.sh +378 -5
- package/scripts/sw-doc-fleet.sh +4 -1
- package/scripts/sw-docs-agent.sh +3 -1
- package/scripts/sw-docs.sh +2 -1
- package/scripts/sw-doctor.sh +453 -2
- package/scripts/sw-dora.sh +4 -1
- package/scripts/sw-durable.sh +12 -7
- package/scripts/sw-e2e-orchestrator.sh +17 -16
- package/scripts/sw-eventbus.sh +13 -4
- package/scripts/sw-evidence.sh +364 -12
- package/scripts/sw-feedback.sh +550 -9
- package/scripts/sw-fix.sh +20 -1
- package/scripts/sw-fleet-discover.sh +6 -2
- package/scripts/sw-fleet-viz.sh +9 -4
- package/scripts/sw-fleet.sh +5 -1
- package/scripts/sw-github-app.sh +18 -4
- package/scripts/sw-github-checks.sh +3 -2
- package/scripts/sw-github-deploy.sh +3 -2
- package/scripts/sw-github-graphql.sh +18 -7
- package/scripts/sw-guild.sh +5 -1
- package/scripts/sw-heartbeat.sh +5 -30
- package/scripts/sw-hello.sh +67 -0
- package/scripts/sw-hygiene.sh +10 -3
- package/scripts/sw-incident.sh +273 -5
- package/scripts/sw-init.sh +18 -2
- package/scripts/sw-instrument.sh +10 -2
- package/scripts/sw-intelligence.sh +44 -7
- package/scripts/sw-jira.sh +5 -1
- package/scripts/sw-launchd.sh +2 -1
- package/scripts/sw-linear.sh +4 -1
- package/scripts/sw-logs.sh +4 -1
- package/scripts/sw-loop.sh +436 -1076
- package/scripts/sw-memory.sh +357 -3
- package/scripts/sw-mission-control.sh +6 -1
- package/scripts/sw-model-router.sh +483 -27
- package/scripts/sw-otel.sh +15 -4
- package/scripts/sw-oversight.sh +14 -5
- package/scripts/sw-patrol-meta.sh +334 -0
- package/scripts/sw-pipeline-composer.sh +7 -1
- package/scripts/sw-pipeline-vitals.sh +12 -6
- package/scripts/sw-pipeline.sh +54 -2653
- package/scripts/sw-pm.sh +16 -8
- package/scripts/sw-pr-lifecycle.sh +2 -1
- package/scripts/sw-predictive.sh +17 -5
- package/scripts/sw-prep.sh +185 -2
- package/scripts/sw-ps.sh +5 -25
- package/scripts/sw-public-dashboard.sh +17 -4
- package/scripts/sw-quality.sh +14 -6
- package/scripts/sw-reaper.sh +8 -25
- package/scripts/sw-recruit.sh +156 -2303
- package/scripts/sw-regression.sh +19 -12
- package/scripts/sw-release-manager.sh +3 -1
- package/scripts/sw-release.sh +4 -1
- package/scripts/sw-remote.sh +3 -1
- package/scripts/sw-replay.sh +7 -1
- package/scripts/sw-retro.sh +158 -1
- package/scripts/sw-review-rerun.sh +3 -1
- package/scripts/sw-scale.sh +14 -5
- package/scripts/sw-security-audit.sh +6 -1
- package/scripts/sw-self-optimize.sh +173 -6
- package/scripts/sw-session.sh +9 -3
- package/scripts/sw-setup.sh +3 -1
- package/scripts/sw-stall-detector.sh +406 -0
- package/scripts/sw-standup.sh +15 -7
- package/scripts/sw-status.sh +3 -1
- package/scripts/sw-strategic.sh +14 -6
- package/scripts/sw-stream.sh +13 -4
- package/scripts/sw-swarm.sh +20 -7
- package/scripts/sw-team-stages.sh +13 -6
- package/scripts/sw-templates.sh +7 -31
- package/scripts/sw-testgen.sh +17 -6
- package/scripts/sw-tmux-pipeline.sh +4 -1
- package/scripts/sw-tmux-role-color.sh +2 -0
- package/scripts/sw-tmux-status.sh +1 -1
- package/scripts/sw-tmux.sh +37 -1
- package/scripts/sw-trace.sh +3 -1
- package/scripts/sw-tracker-github.sh +3 -0
- package/scripts/sw-tracker-jira.sh +3 -0
- package/scripts/sw-tracker-linear.sh +3 -0
- package/scripts/sw-tracker.sh +3 -1
- package/scripts/sw-triage.sh +3 -2
- package/scripts/sw-upgrade.sh +3 -1
- package/scripts/sw-ux.sh +5 -2
- package/scripts/sw-webhook.sh +5 -2
- package/scripts/sw-widgets.sh +9 -4
- package/scripts/sw-worktree.sh +15 -3
- package/scripts/test-skill-injection.sh +1233 -0
- package/templates/pipelines/autonomous.json +27 -3
- package/templates/pipelines/cost-aware.json +34 -8
- package/templates/pipelines/deployed.json +12 -0
- package/templates/pipelines/enterprise.json +12 -0
- package/templates/pipelines/fast.json +6 -0
- package/templates/pipelines/full.json +27 -3
- package/templates/pipelines/hotfix.json +6 -0
- package/templates/pipelines/standard.json +12 -0
- package/templates/pipelines/tdd.json +12 -0
|
@@ -3,6 +3,16 @@
|
|
|
3
3
|
[[ -n "${_PIPELINE_STATE_LOADED:-}" ]] && return 0
|
|
4
4
|
_PIPELINE_STATE_LOADED=1
|
|
5
5
|
|
|
6
|
+
# Defaults for variables normally set by sw-pipeline.sh (safe under set -u).
|
|
7
|
+
ARTIFACTS_DIR="${ARTIFACTS_DIR:-.claude/pipeline-artifacts}"
|
|
8
|
+
STAGE_STATUSES="${STAGE_STATUSES:-}"
|
|
9
|
+
STAGE_TIMINGS="${STAGE_TIMINGS:-}"
|
|
10
|
+
LOG_ENTRIES="${LOG_ENTRIES:-}"
|
|
11
|
+
ISSUE_NUMBER="${ISSUE_NUMBER:-}"
|
|
12
|
+
GOAL="${GOAL:-}"
|
|
13
|
+
PIPELINE_NAME="${PIPELINE_NAME:-pipeline}"
|
|
14
|
+
PIPELINE_STATUS="${PIPELINE_STATUS:-pending}"
|
|
15
|
+
|
|
6
16
|
save_artifact() {
|
|
7
17
|
local name="$1" content="$2"
|
|
8
18
|
mkdir -p "$ARTIFACTS_DIR" 2>/dev/null || true
|
|
@@ -143,8 +153,8 @@ build_stage_progress() {
|
|
|
143
153
|
stages=$(jq -c '.stages[]' "$PIPELINE_CONFIG" 2>/dev/null) || return 0
|
|
144
154
|
while IFS= read -r -u 3 stage; do
|
|
145
155
|
local id enabled
|
|
146
|
-
id=$(echo "$stage" | jq -r '.id')
|
|
147
|
-
enabled=$(echo "$stage" | jq -r '.enabled')
|
|
156
|
+
id=$(echo "$stage" | jq -r '.id' 2>/dev/null) || id=""
|
|
157
|
+
enabled=$(echo "$stage" | jq -r '.enabled' 2>/dev/null) || enabled="false"
|
|
148
158
|
[[ "$enabled" != "true" ]] && continue
|
|
149
159
|
local sstatus
|
|
150
160
|
sstatus=$(get_stage_status "$id")
|
|
@@ -183,6 +193,14 @@ mark_stage_complete() {
|
|
|
183
193
|
_stage_secs=$(get_stage_timing_seconds "$stage_id")
|
|
184
194
|
record_stage "${SHIPWRIGHT_PIPELINE_ID:-}" "$stage_id" "complete" "${_stage_secs:-0}" "" 2>/dev/null || true
|
|
185
195
|
fi
|
|
196
|
+
|
|
197
|
+
# Record skill outcome for learning system
|
|
198
|
+
if type skill_memory_record >/dev/null 2>&1; then
|
|
199
|
+
local _used_skills
|
|
200
|
+
_used_skills=$(skill_get_prompts "${INTELLIGENCE_ISSUE_TYPE:-backend}" "$stage_id" 2>/dev/null | xargs -I{} basename {} .md | tr '\n' ',' | sed 's/,$//')
|
|
201
|
+
[[ -n "$_used_skills" ]] && skill_memory_record "${INTELLIGENCE_ISSUE_TYPE:-backend}" "$stage_id" "$_used_skills" "success" "1" 2>/dev/null || true
|
|
202
|
+
fi
|
|
203
|
+
|
|
186
204
|
# Update memory baselines and predictive baselines for stage durations
|
|
187
205
|
if [[ "$stage_id" == "test" || "$stage_id" == "build" ]]; then
|
|
188
206
|
local secs
|
|
@@ -325,10 +343,10 @@ verify_stage_artifacts() {
|
|
|
325
343
|
STAGE_EFFECTIVENESS_FILE="${HOME}/.shipwright/stage-effectiveness.jsonl"
|
|
326
344
|
record_stage_effectiveness() {
|
|
327
345
|
local stage_id="$1" outcome="${2:-failed}"
|
|
328
|
-
mkdir -p "${HOME}/.shipwright"
|
|
329
|
-
echo "{\"stage\":\"$stage_id\",\"outcome\":\"$outcome\",\"ts\":\"$(now_iso)\"}" >> "${STAGE_EFFECTIVENESS_FILE}"
|
|
346
|
+
mkdir -p "${HOME}/.shipwright" 2>/dev/null || true
|
|
347
|
+
{ echo "{\"stage\":\"$stage_id\",\"outcome\":\"$outcome\",\"ts\":\"$(now_iso)\"}" >> "${STAGE_EFFECTIVENESS_FILE}"; } 2>/dev/null || true
|
|
330
348
|
# Keep last 100 entries
|
|
331
|
-
tail -100 "${STAGE_EFFECTIVENESS_FILE}" > "${STAGE_EFFECTIVENESS_FILE}.tmp"
|
|
349
|
+
{ tail -100 "${STAGE_EFFECTIVENESS_FILE}" > "${STAGE_EFFECTIVENESS_FILE}.tmp" && mv "${STAGE_EFFECTIVENESS_FILE}.tmp" "${STAGE_EFFECTIVENESS_FILE}"; } 2>/dev/null || true
|
|
332
350
|
}
|
|
333
351
|
get_stage_self_awareness_hint() {
|
|
334
352
|
local stage_id="$1"
|
|
@@ -368,6 +386,13 @@ mark_stage_failed() {
|
|
|
368
386
|
record_stage "${SHIPWRIGHT_PIPELINE_ID:-}" "$stage_id" "failed" "${_stage_secs:-0}" "" 2>/dev/null || true
|
|
369
387
|
fi
|
|
370
388
|
|
|
389
|
+
# Record skill failure for learning system
|
|
390
|
+
if type skill_memory_record >/dev/null 2>&1; then
|
|
391
|
+
local _used_skills
|
|
392
|
+
_used_skills=$(skill_get_prompts "${INTELLIGENCE_ISSUE_TYPE:-backend}" "$stage_id" 2>/dev/null | xargs -I{} basename {} .md | tr '\n' ',' | sed 's/,$//')
|
|
393
|
+
[[ -n "$_used_skills" ]] && skill_memory_record "${INTELLIGENCE_ISSUE_TYPE:-backend}" "$stage_id" "$_used_skills" "failure" "1" 2>/dev/null || true
|
|
394
|
+
fi
|
|
395
|
+
|
|
371
396
|
# Update GitHub progress + comment failure
|
|
372
397
|
if [[ -n "$ISSUE_NUMBER" ]]; then
|
|
373
398
|
local body
|
|
@@ -439,6 +464,12 @@ initialize_state() {
|
|
|
439
464
|
write_state() {
|
|
440
465
|
[[ -z "${STATE_FILE:-}" || -z "${ARTIFACTS_DIR:-}" ]] && return 0
|
|
441
466
|
mkdir -p "$(dirname "$STATE_FILE")" 2>/dev/null || true
|
|
467
|
+
|
|
468
|
+
# Check disk space before write (100MB minimum)
|
|
469
|
+
if ! check_disk_space "$(dirname "$STATE_FILE")" 100; then
|
|
470
|
+
error "Cannot write state: insufficient disk space"
|
|
471
|
+
return 1
|
|
472
|
+
fi
|
|
442
473
|
local stages_yaml=""
|
|
443
474
|
while IFS=: read -r sid sstatus; do
|
|
444
475
|
[[ -z "$sid" ]] && continue
|
|
@@ -0,0 +1,487 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# Module: pipeline-util
|
|
3
|
+
# Utility functions: coverage parsing, cost estimation, notifications, error classification
|
|
4
|
+
set -euo pipefail
|
|
5
|
+
|
|
6
|
+
# Module guard
|
|
7
|
+
[[ -n "${_MODULE_PIPELINE_UTIL_LOADED:-}" ]] && return 0; _MODULE_PIPELINE_UTIL_LOADED=1
|
|
8
|
+
|
|
9
|
+
# ─── Defaults (needed if sourced independently) ──────────────────────────────
|
|
10
|
+
SCRIPT_DIR="${SCRIPT_DIR:-$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)}"
|
|
11
|
+
REPO_DIR="${REPO_DIR:-$(cd "$SCRIPT_DIR/.." && pwd)}"
|
|
12
|
+
PROJECT_ROOT="${PROJECT_ROOT:-$(git rev-parse --show-toplevel 2>/dev/null || pwd)}"
|
|
13
|
+
STATE_DIR="${STATE_DIR:-$PROJECT_ROOT/.claude}"
|
|
14
|
+
STATE_FILE="${STATE_FILE:-$STATE_DIR/pipeline-state.md}"
|
|
15
|
+
ARTIFACTS_DIR="${ARTIFACTS_DIR:-$STATE_DIR/pipeline-artifacts}"
|
|
16
|
+
EVENTS_FILE="${EVENTS_FILE:-$HOME/.shipwright/events.jsonl}"
|
|
17
|
+
|
|
18
|
+
# Variables referenced by util functions (set by sw-pipeline.sh, defaults here for safety)
|
|
19
|
+
HEARTBEAT_PID="${HEARTBEAT_PID:-}"
|
|
20
|
+
PIPELINE_STATUS="${PIPELINE_STATUS:-}"
|
|
21
|
+
STASHED_CHANGES="${STASHED_CHANGES:-false}"
|
|
22
|
+
SLACK_WEBHOOK="${SLACK_WEBHOOK:-}"
|
|
23
|
+
CURRENT_STAGE_ID="${CURRENT_STAGE_ID:-}"
|
|
24
|
+
|
|
25
|
+
# Ensure helpers are loaded
|
|
26
|
+
[[ -f "$SCRIPT_DIR/lib/helpers.sh" ]] && source "$SCRIPT_DIR/lib/helpers.sh" 2>/dev/null || true
|
|
27
|
+
[[ "$(type -t info 2>/dev/null)" == "function" ]] || info() { echo "$*"; }
|
|
28
|
+
[[ "$(type -t warn 2>/dev/null)" == "function" ]] || warn() { echo "$*"; }
|
|
29
|
+
[[ "$(type -t error 2>/dev/null)" == "function" ]] || error() { echo "$*" >&2; }
|
|
30
|
+
[[ "$(type -t emit_event 2>/dev/null)" == "function" ]] || emit_event() { true; }
|
|
31
|
+
[[ "$(type -t now_epoch 2>/dev/null)" == "function" ]] || now_epoch() { date +%s; }
|
|
32
|
+
|
|
33
|
+
# ─── Coverage Parsing ──────────────────────────────────────────────
|
|
34
|
+
parse_coverage_from_output() {
|
|
35
|
+
local log_file="$1"
|
|
36
|
+
[[ ! -f "$log_file" ]] && return
|
|
37
|
+
local cov=""
|
|
38
|
+
# Jest/Istanbul: "Statements : 85.5%"
|
|
39
|
+
cov=$(grep -oE 'Statements\s*:\s*[0-9.]+' "$log_file" 2>/dev/null | grep -oE '[0-9.]+$' || true)
|
|
40
|
+
# Istanbul table: "All files | 85.5"
|
|
41
|
+
[[ -z "$cov" ]] && cov=$(grep -oE 'All files\s*\|\s*[0-9.]+' "$log_file" 2>/dev/null | grep -oE '[0-9.]+$' || true)
|
|
42
|
+
# pytest-cov: "TOTAL 500 75 85%"
|
|
43
|
+
[[ -z "$cov" ]] && cov=$(grep -oE 'TOTAL\s+[0-9]+\s+[0-9]+\s+[0-9]+%' "$log_file" 2>/dev/null | grep -oE '[0-9]+%' | tr -d '%' | tail -1 || true)
|
|
44
|
+
# Vitest: "All files | 85.5 |"
|
|
45
|
+
[[ -z "$cov" ]] && cov=$(grep -oE 'All files\s*\|\s*[0-9.]+\s*\|' "$log_file" 2>/dev/null | grep -oE '[0-9.]+' | head -1 || true)
|
|
46
|
+
# Go coverage: "coverage: 85.5% of statements"
|
|
47
|
+
[[ -z "$cov" ]] && cov=$(grep -oE 'coverage:\s*[0-9.]+%' "$log_file" 2>/dev/null | grep -oE '[0-9.]+' | tail -1 || true)
|
|
48
|
+
# Cargo tarpaulin: "85.50% coverage"
|
|
49
|
+
[[ -z "$cov" ]] && cov=$(grep -oE '[0-9.]+%\s*coverage' "$log_file" 2>/dev/null | grep -oE '[0-9.]+' | head -1 || true)
|
|
50
|
+
# Generic: "Coverage: 85.5%"
|
|
51
|
+
[[ -z "$cov" ]] && cov=$(grep -oiE 'coverage:?\s*[0-9.]+%' "$log_file" 2>/dev/null | grep -oE '[0-9.]+' | tail -1 || true)
|
|
52
|
+
echo "$cov"
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
# ─── Duration Formatting ───────────────────────────────────────────
|
|
56
|
+
format_duration() {
|
|
57
|
+
local secs="$1"
|
|
58
|
+
if [[ "$secs" -ge 3600 ]]; then
|
|
59
|
+
printf "%dh %dm %ds" $((secs/3600)) $((secs%3600/60)) $((secs%60))
|
|
60
|
+
elif [[ "$secs" -ge 60 ]]; then
|
|
61
|
+
printf "%dm %ds" $((secs/60)) $((secs%60))
|
|
62
|
+
else
|
|
63
|
+
printf "%ds" "$secs"
|
|
64
|
+
fi
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
# ─── Event Log Rotation ────────────────────────────────────────────
|
|
68
|
+
rotate_event_log_if_needed() {
|
|
69
|
+
local events_file="${EVENTS_FILE:-$HOME/.shipwright/events.jsonl}"
|
|
70
|
+
local max_lines=10000
|
|
71
|
+
[[ ! -f "$events_file" ]] && return
|
|
72
|
+
local lines
|
|
73
|
+
lines=$(wc -l < "$events_file" 2>/dev/null || true)
|
|
74
|
+
lines="${lines:-0}"
|
|
75
|
+
if [[ "$lines" -gt "$max_lines" ]]; then
|
|
76
|
+
local tmp="${events_file}.rotating"
|
|
77
|
+
if tail -5000 "$events_file" > "$tmp" 2>/dev/null && mv "$tmp" "$events_file" 2>/dev/null; then
|
|
78
|
+
info "Rotated events.jsonl: ${lines} -> 5000 lines"
|
|
79
|
+
fi
|
|
80
|
+
fi
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# ─── Goal Compaction for Context ───────────────────────────────────
|
|
84
|
+
_pipeline_compact_goal() {
|
|
85
|
+
local goal="$1"
|
|
86
|
+
local plan_file="${2:-}"
|
|
87
|
+
local design_file="${3:-}"
|
|
88
|
+
local compact="$goal"
|
|
89
|
+
|
|
90
|
+
# Include plan summary (first 20 lines only)
|
|
91
|
+
if [[ -n "$plan_file" && -f "$plan_file" ]]; then
|
|
92
|
+
compact="${compact}
|
|
93
|
+
|
|
94
|
+
## Plan Summary
|
|
95
|
+
$(head -20 "$plan_file" 2>/dev/null || true)
|
|
96
|
+
[... full plan in .claude/pipeline-artifacts/plan.md]"
|
|
97
|
+
fi
|
|
98
|
+
|
|
99
|
+
# Include design key decisions only (grep for headers)
|
|
100
|
+
if [[ -n "$design_file" && -f "$design_file" ]]; then
|
|
101
|
+
compact="${compact}
|
|
102
|
+
|
|
103
|
+
## Key Design Decisions
|
|
104
|
+
$(grep -E '^#{1,3} ' "$design_file" 2>/dev/null | head -10 || true)
|
|
105
|
+
[... full design in .claude/pipeline-artifacts/design.md]"
|
|
106
|
+
fi
|
|
107
|
+
|
|
108
|
+
echo "$compact"
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
# ─── Token & Cost Parsing ──────────────────────────────────────────
|
|
112
|
+
parse_claude_tokens() {
|
|
113
|
+
local log_file="$1"
|
|
114
|
+
local input_tok output_tok
|
|
115
|
+
input_tok=$(grep -oE 'input[_ ]tokens?[: ]+[0-9,]+' "$log_file" 2>/dev/null | tail -1 | grep -oE '[0-9,]+' | tr -d ',' || echo "0")
|
|
116
|
+
output_tok=$(grep -oE 'output[_ ]tokens?[: ]+[0-9,]+' "$log_file" 2>/dev/null | tail -1 | grep -oE '[0-9,]+' | tr -d ',' || echo "0")
|
|
117
|
+
|
|
118
|
+
TOTAL_INPUT_TOKENS=$(( TOTAL_INPUT_TOKENS + ${input_tok:-0} ))
|
|
119
|
+
TOTAL_OUTPUT_TOKENS=$(( TOTAL_OUTPUT_TOKENS + ${output_tok:-0} ))
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
# Estimate pipeline cost using historical averages from completed pipelines.
|
|
123
|
+
# Falls back to per-stage estimates when no history exists.
|
|
124
|
+
estimate_pipeline_cost() {
|
|
125
|
+
local stages="$1"
|
|
126
|
+
local stage_count
|
|
127
|
+
stage_count=$(echo "$stages" | jq 'length' 2>/dev/null || echo "6")
|
|
128
|
+
[[ ! "$stage_count" =~ ^[0-9]+$ ]] && stage_count=6
|
|
129
|
+
|
|
130
|
+
local events_file="${EVENTS_FILE:-$HOME/.shipwright/events.jsonl}"
|
|
131
|
+
local avg_input=0 avg_output=0
|
|
132
|
+
if [[ -f "$events_file" ]]; then
|
|
133
|
+
local hist
|
|
134
|
+
hist=$(grep '"type":"pipeline.completed"' "$events_file" 2>/dev/null | tail -10)
|
|
135
|
+
if [[ -n "$hist" ]]; then
|
|
136
|
+
avg_input=$(echo "$hist" | jq -s -r '[.[] | .input_tokens // 0 | tonumber] | if length > 0 then (add / length | floor | tostring) else "0" end' 2>/dev/null | head -1)
|
|
137
|
+
avg_output=$(echo "$hist" | jq -s -r '[.[] | .output_tokens // 0 | tonumber] | if length > 0 then (add / length | floor | tostring) else "0" end' 2>/dev/null | head -1)
|
|
138
|
+
fi
|
|
139
|
+
fi
|
|
140
|
+
[[ ! "$avg_input" =~ ^[0-9]+$ ]] && avg_input=0
|
|
141
|
+
[[ ! "$avg_output" =~ ^[0-9]+$ ]] && avg_output=0
|
|
142
|
+
|
|
143
|
+
# Fall back to reasonable per-stage estimates only if no history
|
|
144
|
+
if [[ "$avg_input" -eq 0 ]]; then
|
|
145
|
+
avg_input=$(( stage_count * 8000 )) # More realistic: ~8K input per stage
|
|
146
|
+
avg_output=$(( stage_count * 4000 )) # ~4K output per stage
|
|
147
|
+
fi
|
|
148
|
+
|
|
149
|
+
echo "{\"input_tokens\":${avg_input},\"output_tokens\":${avg_output}}"
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
# ─── Heartbeat Management ──────────────────────────────────────────
|
|
153
|
+
start_heartbeat() {
|
|
154
|
+
local job_id="${PIPELINE_NAME:-pipeline-$$}"
|
|
155
|
+
(
|
|
156
|
+
while true; do
|
|
157
|
+
"$SCRIPT_DIR/sw-heartbeat.sh" write "$job_id" \
|
|
158
|
+
--pid $$ \
|
|
159
|
+
--issue "${ISSUE_NUMBER:-0}" \
|
|
160
|
+
--stage "${CURRENT_STAGE_ID:-unknown}" \
|
|
161
|
+
--iteration "0" \
|
|
162
|
+
--activity "$(get_stage_description "${CURRENT_STAGE_ID:-}" 2>/dev/null || echo "Running pipeline")" 2>/dev/null || true
|
|
163
|
+
sleep "$(_config_get_int "pipeline.heartbeat_interval" 30 2>/dev/null || echo 30)"
|
|
164
|
+
done
|
|
165
|
+
) >/dev/null 2>&1 &
|
|
166
|
+
HEARTBEAT_PID=$!
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
stop_heartbeat() {
|
|
170
|
+
if [[ -n "${HEARTBEAT_PID:-}" ]]; then
|
|
171
|
+
kill "$HEARTBEAT_PID" 2>/dev/null || true
|
|
172
|
+
wait "$HEARTBEAT_PID" 2>/dev/null || true
|
|
173
|
+
"$SCRIPT_DIR/sw-heartbeat.sh" clear "${PIPELINE_NAME:-pipeline-$$}" 2>/dev/null || true
|
|
174
|
+
HEARTBEAT_PID=""
|
|
175
|
+
fi
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
# ─── CI Integration ────────────────────────────────────────────────
|
|
179
|
+
ci_push_partial_work() {
|
|
180
|
+
[[ "${CI_MODE:-false}" != "true" ]] && return 0
|
|
181
|
+
[[ -z "${ISSUE_NUMBER:-}" ]] && return 0
|
|
182
|
+
|
|
183
|
+
local branch="shipwright/issue-${ISSUE_NUMBER}"
|
|
184
|
+
|
|
185
|
+
# Only push if we have uncommitted changes
|
|
186
|
+
if ! git diff --quiet 2>/dev/null || ! git diff --cached --quiet 2>/dev/null; then
|
|
187
|
+
git add -A 2>/dev/null || true
|
|
188
|
+
git commit -m "WIP: partial pipeline progress for #${ISSUE_NUMBER}" --no-verify 2>/dev/null || true
|
|
189
|
+
fi
|
|
190
|
+
|
|
191
|
+
# Push branch (create if needed, force to overwrite previous WIP)
|
|
192
|
+
if ! git push origin "HEAD:refs/heads/$branch" --force 2>/dev/null; then
|
|
193
|
+
warn "git push failed for $branch — remote may be out of sync"
|
|
194
|
+
emit_event "pipeline.push_failed" "branch=$branch"
|
|
195
|
+
fi
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
ci_post_stage_event() {
|
|
199
|
+
[[ "${CI_MODE:-false}" != "true" ]] && return 0
|
|
200
|
+
[[ -z "${ISSUE_NUMBER:-}" ]] && return 0
|
|
201
|
+
[[ "${GH_AVAILABLE:-false}" != "true" ]] && return 0
|
|
202
|
+
|
|
203
|
+
local stage="$1" status="$2" elapsed="${3:-0s}"
|
|
204
|
+
local comment="<!-- SHIPWRIGHT-STAGE: ${stage}:${status}:${elapsed} -->"
|
|
205
|
+
_timeout "$(_config_get_int "network.gh_timeout" 30 2>/dev/null || echo 30)" gh issue comment "$ISSUE_NUMBER" --body "$comment" 2>/dev/null || true
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
# ─── Cleanup on Exit ───────────────────────────────────────────────
|
|
209
|
+
cleanup_on_exit() {
|
|
210
|
+
[[ "${_cleanup_done:-}" == "true" ]] && return 0
|
|
211
|
+
_cleanup_done=true
|
|
212
|
+
local exit_code=$?
|
|
213
|
+
|
|
214
|
+
# Stop heartbeat writer
|
|
215
|
+
stop_heartbeat
|
|
216
|
+
|
|
217
|
+
# Save state if we were running
|
|
218
|
+
if [[ "$PIPELINE_STATUS" == "running" && -n "$STATE_FILE" ]]; then
|
|
219
|
+
PIPELINE_STATUS="interrupted"
|
|
220
|
+
UPDATED_AT="$(now_iso)"
|
|
221
|
+
write_state 2>/dev/null || true
|
|
222
|
+
echo ""
|
|
223
|
+
warn "Pipeline interrupted — state saved."
|
|
224
|
+
echo -e " Resume: ${DIM}shipwright pipeline resume${RESET}"
|
|
225
|
+
|
|
226
|
+
# Push partial work in CI mode so retries can pick it up
|
|
227
|
+
ci_push_partial_work
|
|
228
|
+
fi
|
|
229
|
+
|
|
230
|
+
# Restore stashed changes
|
|
231
|
+
if [[ "$STASHED_CHANGES" == "true" ]]; then
|
|
232
|
+
git stash pop --quiet 2>/dev/null || true
|
|
233
|
+
fi
|
|
234
|
+
|
|
235
|
+
# Release durable pipeline lock
|
|
236
|
+
if [[ -n "${_PIPELINE_LOCK_ID:-}" ]] && type release_lock >/dev/null 2>&1; then
|
|
237
|
+
release_lock "$_PIPELINE_LOCK_ID" 2>/dev/null || true
|
|
238
|
+
fi
|
|
239
|
+
|
|
240
|
+
# Cancel lingering in_progress GitHub Check Runs
|
|
241
|
+
pipeline_cancel_check_runs 2>/dev/null || true
|
|
242
|
+
|
|
243
|
+
# Update GitHub
|
|
244
|
+
if [[ -n "${ISSUE_NUMBER:-}" && "${GH_AVAILABLE:-false}" == "true" ]]; then
|
|
245
|
+
if ! _timeout "$(_config_get_int "network.gh_timeout" 30 2>/dev/null || echo 30)" gh issue comment "$ISSUE_NUMBER" --body "⏸️ **Pipeline interrupted** at stage: ${CURRENT_STAGE_ID:-unknown}" 2>/dev/null; then
|
|
246
|
+
warn "gh issue comment failed — status update may not have been posted"
|
|
247
|
+
emit_event "pipeline.comment_failed" "issue=$ISSUE_NUMBER"
|
|
248
|
+
fi
|
|
249
|
+
fi
|
|
250
|
+
|
|
251
|
+
exit "$exit_code"
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
# ─── Preflight Checks ──────────────────────────────────────────────
|
|
255
|
+
preflight_checks() {
|
|
256
|
+
local errors=0
|
|
257
|
+
|
|
258
|
+
echo -e "${PURPLE}${BOLD}━━━ Pre-flight Checks ━━━${RESET}"
|
|
259
|
+
echo ""
|
|
260
|
+
|
|
261
|
+
# 1. Required tools
|
|
262
|
+
local required_tools=("git" "jq")
|
|
263
|
+
local optional_tools=("gh" "claude" "bc" "curl")
|
|
264
|
+
|
|
265
|
+
for tool in "${required_tools[@]}"; do
|
|
266
|
+
if command -v "$tool" >/dev/null 2>&1; then
|
|
267
|
+
echo -e " ${GREEN}✓${RESET} $tool"
|
|
268
|
+
else
|
|
269
|
+
echo -e " ${RED}✗${RESET} $tool ${RED}(required)${RESET}"
|
|
270
|
+
errors=$((errors + 1))
|
|
271
|
+
fi
|
|
272
|
+
done
|
|
273
|
+
|
|
274
|
+
for tool in "${optional_tools[@]}"; do
|
|
275
|
+
if command -v "$tool" >/dev/null 2>&1; then
|
|
276
|
+
echo -e " ${GREEN}✓${RESET} $tool"
|
|
277
|
+
else
|
|
278
|
+
echo -e " ${DIM}○${RESET} $tool ${DIM}(optional — some features disabled)${RESET}"
|
|
279
|
+
fi
|
|
280
|
+
done
|
|
281
|
+
|
|
282
|
+
# 2. Git state
|
|
283
|
+
echo ""
|
|
284
|
+
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
|
285
|
+
echo -e " ${GREEN}✓${RESET} Inside git repo"
|
|
286
|
+
else
|
|
287
|
+
echo -e " ${RED}✗${RESET} Not inside a git repository"
|
|
288
|
+
errors=$((errors + 1))
|
|
289
|
+
fi
|
|
290
|
+
|
|
291
|
+
# Check for uncommitted changes — offer to stash
|
|
292
|
+
local dirty_files
|
|
293
|
+
dirty_files=$(git status --porcelain 2>/dev/null | wc -l | xargs)
|
|
294
|
+
if [[ "$dirty_files" -gt 0 ]]; then
|
|
295
|
+
echo -e " ${YELLOW}⚠${RESET} $dirty_files uncommitted change(s)"
|
|
296
|
+
if [[ "$SKIP_GATES" == "true" ]]; then
|
|
297
|
+
info "Auto-stashing uncommitted changes..."
|
|
298
|
+
git stash push -m "sw-pipeline: auto-stash before pipeline" --quiet 2>/dev/null && STASHED_CHANGES=true
|
|
299
|
+
if [[ "$STASHED_CHANGES" == "true" ]]; then
|
|
300
|
+
echo -e " ${GREEN}✓${RESET} Changes stashed (will restore on exit)"
|
|
301
|
+
fi
|
|
302
|
+
else
|
|
303
|
+
echo -e " ${DIM}Tip: Use --skip-gates to auto-stash, or commit/stash manually${RESET}"
|
|
304
|
+
fi
|
|
305
|
+
else
|
|
306
|
+
echo -e " ${GREEN}✓${RESET} Working tree clean"
|
|
307
|
+
fi
|
|
308
|
+
|
|
309
|
+
# Check if base branch exists
|
|
310
|
+
if git rev-parse --verify "$BASE_BRANCH" >/dev/null 2>&1; then
|
|
311
|
+
echo -e " ${GREEN}✓${RESET} Base branch: $BASE_BRANCH"
|
|
312
|
+
else
|
|
313
|
+
echo -e " ${RED}✗${RESET} Base branch not found: $BASE_BRANCH"
|
|
314
|
+
errors=$((errors + 1))
|
|
315
|
+
fi
|
|
316
|
+
|
|
317
|
+
# 3. GitHub auth (if gh available and not disabled)
|
|
318
|
+
if [[ "$NO_GITHUB" != "true" ]] && command -v gh >/dev/null 2>&1; then
|
|
319
|
+
if gh auth status >/dev/null 2>&1; then
|
|
320
|
+
echo -e " ${GREEN}✓${RESET} GitHub authenticated"
|
|
321
|
+
else
|
|
322
|
+
echo -e " ${YELLOW}⚠${RESET} GitHub not authenticated (features disabled)"
|
|
323
|
+
fi
|
|
324
|
+
fi
|
|
325
|
+
|
|
326
|
+
# 4. Claude CLI
|
|
327
|
+
if command -v claude >/dev/null 2>&1; then
|
|
328
|
+
echo -e " ${GREEN}✓${RESET} Claude CLI available"
|
|
329
|
+
else
|
|
330
|
+
echo -e " ${RED}✗${RESET} Claude CLI not found — plan/build stages will fail"
|
|
331
|
+
errors=$((errors + 1))
|
|
332
|
+
fi
|
|
333
|
+
|
|
334
|
+
# 5. sw loop (needed for build stage)
|
|
335
|
+
if [[ -x "$SCRIPT_DIR/sw-loop.sh" ]]; then
|
|
336
|
+
echo -e " ${GREEN}✓${RESET} shipwright loop available"
|
|
337
|
+
else
|
|
338
|
+
echo -e " ${RED}✗${RESET} sw-loop.sh not found at $SCRIPT_DIR"
|
|
339
|
+
errors=$((errors + 1))
|
|
340
|
+
fi
|
|
341
|
+
|
|
342
|
+
# 6. Disk space check (warn if < 1GB free)
|
|
343
|
+
local free_space_kb
|
|
344
|
+
free_space_kb=$(df -k "$PROJECT_ROOT" 2>/dev/null | tail -1 | awk '{print $4}')
|
|
345
|
+
if [[ -n "$free_space_kb" ]] && [[ "$free_space_kb" -lt 1048576 ]] 2>/dev/null; then
|
|
346
|
+
echo -e " ${YELLOW}⚠${RESET} Low disk space: $(( free_space_kb / 1024 ))MB free"
|
|
347
|
+
fi
|
|
348
|
+
|
|
349
|
+
echo ""
|
|
350
|
+
|
|
351
|
+
if [[ "$errors" -gt 0 ]]; then
|
|
352
|
+
error "Pre-flight failed: $errors error(s)"
|
|
353
|
+
return 1
|
|
354
|
+
fi
|
|
355
|
+
|
|
356
|
+
success "Pre-flight passed"
|
|
357
|
+
echo ""
|
|
358
|
+
return 0
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
# ─── Notifications ─────────────────────────────────────────────────
|
|
362
|
+
notify() {
|
|
363
|
+
local title="$1" message="$2" level="${3:-info}"
|
|
364
|
+
local emoji
|
|
365
|
+
case "$level" in
|
|
366
|
+
success) emoji="✅" ;;
|
|
367
|
+
error) emoji="❌" ;;
|
|
368
|
+
warn) emoji="⚠️" ;;
|
|
369
|
+
*) emoji="🔔" ;;
|
|
370
|
+
esac
|
|
371
|
+
|
|
372
|
+
# Slack webhook
|
|
373
|
+
if [[ -n "${SLACK_WEBHOOK:-}" ]]; then
|
|
374
|
+
local payload
|
|
375
|
+
payload=$(jq -n \
|
|
376
|
+
--arg text "${emoji} *${title}*\n${message}" \
|
|
377
|
+
'{text: $text}')
|
|
378
|
+
curl -sf --connect-timeout "$(_config_get_int "network.connect_timeout" 10 2>/dev/null || echo 10)" --max-time "$(_config_get_int "network.max_time" 60 2>/dev/null || echo 60)" -X POST -H 'Content-Type: application/json' \
|
|
379
|
+
-d "$payload" "$SLACK_WEBHOOK" >/dev/null 2>&1 || true
|
|
380
|
+
fi
|
|
381
|
+
|
|
382
|
+
# Custom webhook (env var SHIPWRIGHT_WEBHOOK_URL)
|
|
383
|
+
local _webhook_url="${SHIPWRIGHT_WEBHOOK_URL:-}"
|
|
384
|
+
if [[ -n "$_webhook_url" ]]; then
|
|
385
|
+
local payload
|
|
386
|
+
payload=$(jq -n \
|
|
387
|
+
--arg title "$title" --arg message "$message" \
|
|
388
|
+
--arg level "$level" --arg pipeline "${PIPELINE_NAME:-}" \
|
|
389
|
+
--arg goal "${GOAL:-}" --arg stage "${CURRENT_STAGE_ID:-}" \
|
|
390
|
+
'{title:$title, message:$message, level:$level, pipeline:$pipeline, goal:$goal, stage:$stage}')
|
|
391
|
+
curl -sf --connect-timeout 10 --max-time 30 -X POST -H 'Content-Type: application/json' \
|
|
392
|
+
-d "$payload" "$_webhook_url" >/dev/null 2>&1 || true
|
|
393
|
+
fi
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
# ─── Error Classification ──────────────────────────────────────────
|
|
397
|
+
classify_error() {
|
|
398
|
+
local stage_id="$1"
|
|
399
|
+
local log_file="${ARTIFACTS_DIR}/${stage_id}-results.log"
|
|
400
|
+
[[ ! -f "$log_file" ]] && log_file="${ARTIFACTS_DIR}/test-results.log"
|
|
401
|
+
[[ ! -f "$log_file" ]] && { echo "unknown"; return; }
|
|
402
|
+
|
|
403
|
+
local log_tail
|
|
404
|
+
log_tail=$(tail -50 "$log_file" 2>/dev/null || echo "")
|
|
405
|
+
|
|
406
|
+
# Generate error signature for history lookup
|
|
407
|
+
local error_sig
|
|
408
|
+
error_sig=$(echo "$log_tail" | grep -iE 'error|fail|exception|fatal' 2>/dev/null | head -3 | cksum | awk '{print $1}' || echo "0")
|
|
409
|
+
|
|
410
|
+
# Check classification history first (learned from previous runs)
|
|
411
|
+
local class_history="${HOME}/.shipwright/optimization/error-classifications.json"
|
|
412
|
+
if [[ -f "$class_history" ]]; then
|
|
413
|
+
local cached_class
|
|
414
|
+
cached_class=$(jq -r --arg sig "$error_sig" '.[$sig].classification // empty' "$class_history" 2>/dev/null || true)
|
|
415
|
+
if [[ -n "$cached_class" && "$cached_class" != "null" ]]; then
|
|
416
|
+
echo "$cached_class"
|
|
417
|
+
return
|
|
418
|
+
fi
|
|
419
|
+
fi
|
|
420
|
+
|
|
421
|
+
local classification="unknown"
|
|
422
|
+
|
|
423
|
+
# Infrastructure errors: timeout, OOM, network — retry makes sense
|
|
424
|
+
if echo "$log_tail" | grep -qiE 'timeout|timed out|ETIMEDOUT|ECONNREFUSED|ECONNRESET|network|socket hang up|OOM|out of memory|killed|signal 9|Cannot allocate memory'; then
|
|
425
|
+
classification="infrastructure"
|
|
426
|
+
# Configuration errors: missing env, wrong path — don't retry, escalate
|
|
427
|
+
elif echo "$log_tail" | grep -qiE 'ENOENT|not found|No such file|command not found|MODULE_NOT_FOUND|Cannot find module|missing.*env|undefined variable|permission denied|EACCES'; then
|
|
428
|
+
classification="configuration"
|
|
429
|
+
# Logic errors: assertion failures, type errors — retry won't help without code change
|
|
430
|
+
elif echo "$log_tail" | grep -qiE 'AssertionError|assert.*fail|Expected.*but.*got|TypeError|ReferenceError|SyntaxError|CompileError|type mismatch|cannot assign|incompatible type'; then
|
|
431
|
+
classification="logic"
|
|
432
|
+
# Build errors: compilation failures
|
|
433
|
+
elif echo "$log_tail" | grep -qiE 'error\[E[0-9]+\]|error: aborting|FAILED.*compile|build failed|tsc.*error|eslint.*error'; then
|
|
434
|
+
classification="logic"
|
|
435
|
+
# Intelligence fallback: Claude classification for unknown errors
|
|
436
|
+
elif [[ "$classification" == "unknown" ]] && type intelligence_search_memory >/dev/null 2>&1 && command -v claude >/dev/null 2>&1; then
|
|
437
|
+
local ai_class
|
|
438
|
+
ai_class=$(claude --print --output-format text -p "Classify this error as exactly one of: infrastructure, configuration, logic, unknown.
|
|
439
|
+
|
|
440
|
+
Error output:
|
|
441
|
+
$(echo "$log_tail" | tail -20)
|
|
442
|
+
|
|
443
|
+
Reply with ONLY the classification word, nothing else." --model "$(_smart_model classification haiku)" < /dev/null 2>/dev/null || true)
|
|
444
|
+
ai_class=$(echo "$ai_class" | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]')
|
|
445
|
+
case "$ai_class" in
|
|
446
|
+
infrastructure|configuration|logic) classification="$ai_class" ;;
|
|
447
|
+
esac
|
|
448
|
+
fi
|
|
449
|
+
|
|
450
|
+
# Map retry categories to shared taxonomy (from lib/compat.sh SW_ERROR_CATEGORIES)
|
|
451
|
+
# Retry uses: infrastructure, configuration, logic, unknown
|
|
452
|
+
# Shared uses: test_failure, build_error, lint_error, timeout, dependency, flaky, config, security, permission, unknown
|
|
453
|
+
local canonical_category="unknown"
|
|
454
|
+
case "$classification" in
|
|
455
|
+
infrastructure) canonical_category="timeout" ;;
|
|
456
|
+
configuration) canonical_category="config" ;;
|
|
457
|
+
logic)
|
|
458
|
+
case "$stage_id" in
|
|
459
|
+
test) canonical_category="test_failure" ;;
|
|
460
|
+
*) canonical_category="build_error" ;;
|
|
461
|
+
esac
|
|
462
|
+
;;
|
|
463
|
+
esac
|
|
464
|
+
|
|
465
|
+
# Record classification for future runs (using both retry and canonical categories)
|
|
466
|
+
if [[ -n "$error_sig" && "$error_sig" != "0" ]]; then
|
|
467
|
+
local class_dir="${HOME}/.shipwright/optimization"
|
|
468
|
+
mkdir -p "$class_dir" 2>/dev/null || true
|
|
469
|
+
local tmp_class
|
|
470
|
+
tmp_class="$(mktemp)" || { warn "mktemp failed"; return 1; }
|
|
471
|
+
# shellcheck disable=SC2064 # intentional expansion at definition time
|
|
472
|
+
trap "rm -f '$tmp_class'" RETURN
|
|
473
|
+
if [[ -f "$class_history" ]]; then
|
|
474
|
+
jq --arg sig "$error_sig" --arg cls "$classification" --arg canon "$canonical_category" --arg stage "$stage_id" \
|
|
475
|
+
'.[$sig] = {"classification": $cls, "canonical": $canon, "stage": $stage, "recorded_at": now}' \
|
|
476
|
+
"$class_history" > "$tmp_class" 2>/dev/null && \
|
|
477
|
+
mv "$tmp_class" "$class_history" || rm -f "$tmp_class"
|
|
478
|
+
else
|
|
479
|
+
jq -n --arg sig "$error_sig" --arg cls "$classification" --arg canon "$canonical_category" --arg stage "$stage_id" \
|
|
480
|
+
'{($sig): {"classification": $cls, "canonical": $canon, "stage": $stage, "recorded_at": now}}' \
|
|
481
|
+
> "$tmp_class" 2>/dev/null && \
|
|
482
|
+
mv "$tmp_class" "$class_history" || rm -f "$tmp_class"
|
|
483
|
+
fi
|
|
484
|
+
fi
|
|
485
|
+
|
|
486
|
+
echo "$classification"
|
|
487
|
+
}
|