shipwright-cli 3.0.0 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +21 -7
- package/completions/_shipwright +247 -93
- package/completions/shipwright.bash +69 -15
- package/completions/shipwright.fish +309 -41
- package/config/decision-tiers.json +55 -0
- package/config/defaults.json +25 -2
- package/config/event-schema.json +142 -5
- package/config/policy.json +8 -0
- package/dashboard/public/index.html +6 -0
- package/dashboard/public/styles.css +76 -0
- package/dashboard/server.ts +51 -0
- package/dashboard/src/core/api.ts +5 -0
- package/dashboard/src/types/api.ts +10 -0
- package/dashboard/src/views/metrics.ts +69 -1
- package/package.json +3 -3
- package/scripts/lib/architecture.sh +2 -1
- package/scripts/lib/bootstrap.sh +0 -0
- package/scripts/lib/config.sh +0 -0
- package/scripts/lib/daemon-adaptive.sh +4 -2
- package/scripts/lib/daemon-dispatch.sh +24 -1
- package/scripts/lib/daemon-failure.sh +0 -0
- package/scripts/lib/daemon-health.sh +0 -0
- package/scripts/lib/daemon-patrol.sh +42 -7
- package/scripts/lib/daemon-poll.sh +17 -0
- package/scripts/lib/daemon-state.sh +17 -0
- package/scripts/lib/daemon-triage.sh +1 -1
- package/scripts/lib/decide-autonomy.sh +295 -0
- package/scripts/lib/decide-scoring.sh +228 -0
- package/scripts/lib/decide-signals.sh +462 -0
- package/scripts/lib/fleet-failover.sh +0 -0
- package/scripts/lib/helpers.sh +19 -18
- package/scripts/lib/pipeline-detection.sh +1 -1
- package/scripts/lib/pipeline-github.sh +0 -0
- package/scripts/lib/pipeline-intelligence.sh +23 -4
- package/scripts/lib/pipeline-quality-checks.sh +11 -6
- package/scripts/lib/pipeline-quality.sh +0 -0
- package/scripts/lib/pipeline-stages.sh +330 -33
- package/scripts/lib/pipeline-state.sh +14 -0
- package/scripts/lib/policy.sh +0 -0
- package/scripts/lib/test-helpers.sh +0 -0
- package/scripts/postinstall.mjs +75 -1
- package/scripts/signals/example-collector.sh +36 -0
- package/scripts/sw +8 -4
- package/scripts/sw-activity.sh +1 -7
- package/scripts/sw-adaptive.sh +7 -7
- package/scripts/sw-adversarial.sh +1 -1
- package/scripts/sw-architecture-enforcer.sh +1 -1
- package/scripts/sw-auth.sh +1 -1
- package/scripts/sw-autonomous.sh +1 -1
- package/scripts/sw-changelog.sh +1 -1
- package/scripts/sw-checkpoint.sh +1 -1
- package/scripts/sw-ci.sh +11 -6
- package/scripts/sw-cleanup.sh +1 -1
- package/scripts/sw-code-review.sh +36 -17
- package/scripts/sw-connect.sh +1 -1
- package/scripts/sw-context.sh +1 -1
- package/scripts/sw-cost.sh +71 -5
- package/scripts/sw-daemon.sh +6 -3
- package/scripts/sw-dashboard.sh +1 -1
- package/scripts/sw-db.sh +53 -38
- package/scripts/sw-decide.sh +685 -0
- package/scripts/sw-decompose.sh +1 -1
- package/scripts/sw-deps.sh +1 -1
- package/scripts/sw-developer-simulation.sh +1 -1
- package/scripts/sw-discovery.sh +80 -4
- package/scripts/sw-doc-fleet.sh +1 -1
- package/scripts/sw-docs-agent.sh +1 -1
- package/scripts/sw-docs.sh +1 -1
- package/scripts/sw-doctor.sh +1 -1
- package/scripts/sw-dora.sh +1 -1
- package/scripts/sw-durable.sh +9 -5
- package/scripts/sw-e2e-orchestrator.sh +1 -1
- package/scripts/sw-eventbus.sh +7 -4
- package/scripts/sw-evidence.sh +1 -1
- package/scripts/sw-feedback.sh +1 -1
- package/scripts/sw-fix.sh +1 -1
- package/scripts/sw-fleet-discover.sh +1 -1
- package/scripts/sw-fleet-viz.sh +6 -4
- package/scripts/sw-fleet.sh +1 -1
- package/scripts/sw-github-app.sh +3 -2
- package/scripts/sw-github-checks.sh +1 -1
- package/scripts/sw-github-deploy.sh +1 -1
- package/scripts/sw-github-graphql.sh +1 -1
- package/scripts/sw-guild.sh +1 -1
- package/scripts/sw-heartbeat.sh +1 -1
- package/scripts/sw-hygiene.sh +5 -3
- package/scripts/sw-incident.sh +9 -5
- package/scripts/sw-init.sh +1 -1
- package/scripts/sw-instrument.sh +1 -1
- package/scripts/sw-intelligence.sh +11 -6
- package/scripts/sw-jira.sh +1 -1
- package/scripts/sw-launchd.sh +1 -1
- package/scripts/sw-linear.sh +1 -1
- package/scripts/sw-logs.sh +1 -1
- package/scripts/sw-loop.sh +338 -32
- package/scripts/sw-memory.sh +23 -6
- package/scripts/sw-mission-control.sh +1 -1
- package/scripts/sw-model-router.sh +3 -2
- package/scripts/sw-otel.sh +8 -4
- package/scripts/sw-oversight.sh +1 -1
- package/scripts/sw-pipeline-composer.sh +3 -1
- package/scripts/sw-pipeline-vitals.sh +11 -6
- package/scripts/sw-pipeline.sh +92 -8
- package/scripts/sw-pm.sh +5 -4
- package/scripts/sw-pr-lifecycle.sh +7 -4
- package/scripts/sw-predictive.sh +11 -5
- package/scripts/sw-prep.sh +1 -1
- package/scripts/sw-ps.sh +1 -1
- package/scripts/sw-public-dashboard.sh +3 -2
- package/scripts/sw-quality.sh +21 -10
- package/scripts/sw-reaper.sh +1 -1
- package/scripts/sw-recruit.sh +1 -1
- package/scripts/sw-regression.sh +1 -1
- package/scripts/sw-release-manager.sh +1 -1
- package/scripts/sw-release.sh +1 -1
- package/scripts/sw-remote.sh +1 -1
- package/scripts/sw-replay.sh +1 -1
- package/scripts/sw-retro.sh +1 -1
- package/scripts/sw-review-rerun.sh +1 -1
- package/scripts/sw-scale.sh +69 -11
- package/scripts/sw-security-audit.sh +1 -1
- package/scripts/sw-self-optimize.sh +168 -4
- package/scripts/sw-session.sh +3 -3
- package/scripts/sw-setup.sh +1 -1
- package/scripts/sw-standup.sh +1 -1
- package/scripts/sw-status.sh +1 -1
- package/scripts/sw-strategic.sh +11 -6
- package/scripts/sw-stream.sh +7 -4
- package/scripts/sw-swarm.sh +3 -2
- package/scripts/sw-team-stages.sh +1 -1
- package/scripts/sw-templates.sh +3 -3
- package/scripts/sw-testgen.sh +11 -6
- package/scripts/sw-tmux-pipeline.sh +1 -1
- package/scripts/sw-tmux.sh +35 -1
- package/scripts/sw-trace.sh +1 -1
- package/scripts/sw-tracker.sh +1 -1
- package/scripts/sw-triage.sh +7 -7
- package/scripts/sw-upgrade.sh +1 -1
- package/scripts/sw-ux.sh +1 -1
- package/scripts/sw-webhook.sh +3 -2
- package/scripts/sw-widgets.sh +7 -4
- package/scripts/sw-worktree.sh +1 -1
- package/scripts/update-homebrew-sha.sh +21 -15
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
# decide-signals.sh — Signal collection for the decision engine
|
|
2
|
+
# Source from sw-decide.sh. Requires helpers.sh, policy.sh.
|
|
3
|
+
[[ -n "${_DECIDE_SIGNALS_LOADED:-}" ]] && return 0
|
|
4
|
+
_DECIDE_SIGNALS_LOADED=1
|
|
5
|
+
|
|
6
|
+
# ─── State ────────────────────────────────────────────────────────────────────
|
|
7
|
+
SIGNALS_DIR="${HOME}/.shipwright/signals"
|
|
8
|
+
SIGNALS_PENDING_FILE="${SIGNALS_DIR}/pending.jsonl"
|
|
9
|
+
|
|
10
|
+
_ensure_signals_dir() {
|
|
11
|
+
mkdir -p "$SIGNALS_DIR"
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
# ─── Candidate builder ───────────────────────────────────────────────────────
|
|
15
|
+
# Usage: _build_candidate "id" "signal" "category" "title" "description" "risk" "confidence" "dedup_key" [evidence_json]
|
|
16
|
+
_build_candidate() {
|
|
17
|
+
local id="$1" signal="$2" category="$3" title="$4" description="$5"
|
|
18
|
+
local risk="${6:-50}" confidence="${7:-0.80}" dedup_key="$8" evidence="${9:-{}}"
|
|
19
|
+
jq -n \
|
|
20
|
+
--arg id "$id" \
|
|
21
|
+
--arg signal "$signal" \
|
|
22
|
+
--arg category "$category" \
|
|
23
|
+
--arg title "$title" \
|
|
24
|
+
--arg desc "$description" \
|
|
25
|
+
--argjson risk "$risk" \
|
|
26
|
+
--arg conf "$confidence" \
|
|
27
|
+
--arg dedup "$dedup_key" \
|
|
28
|
+
--argjson ev "$evidence" \
|
|
29
|
+
--arg ts "$(now_iso)" \
|
|
30
|
+
'{id:$id, signal:$signal, category:$category, title:$title, description:$desc, evidence:$ev, risk_score:$risk, confidence:$conf, dedup_key:$dedup, collected_at:$ts}'
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
# ─── Collectors ───────────────────────────────────────────────────────────────
|
|
34
|
+
|
|
35
|
+
signals_collect_security() {
|
|
36
|
+
# npm audit — critical/high only
|
|
37
|
+
if [[ -f "package.json" ]] && command -v npm >/dev/null 2>&1; then
|
|
38
|
+
local audit_json
|
|
39
|
+
audit_json=$(npm audit --json 2>/dev/null || echo '{}')
|
|
40
|
+
local audit_version
|
|
41
|
+
audit_version=$(echo "$audit_json" | jq -r '.auditReportVersion // 1')
|
|
42
|
+
|
|
43
|
+
local vuln_list
|
|
44
|
+
if [[ "$audit_version" == "2" ]]; then
|
|
45
|
+
vuln_list=$(echo "$audit_json" | jq -c '[.vulnerabilities | to_entries[] | .value | {name: .name, severity: .severity, url: (.via[0].url // "N/A"), title: (.via[0].title // .name)}]' 2>/dev/null || echo '[]')
|
|
46
|
+
else
|
|
47
|
+
vuln_list=$(echo "$audit_json" | jq -c '[.advisories | to_entries[] | .value | {name: .module_name, severity: .severity, url: .url, title: .title}]' 2>/dev/null || echo '[]')
|
|
48
|
+
fi
|
|
49
|
+
|
|
50
|
+
if [[ -n "$vuln_list" && "$vuln_list" != "[]" ]]; then
|
|
51
|
+
while IFS= read -r vuln; do
|
|
52
|
+
local severity name title adv_url
|
|
53
|
+
severity=$(echo "$vuln" | jq -r '.severity // "unknown"')
|
|
54
|
+
name=$(echo "$vuln" | jq -r '.name // "unknown"')
|
|
55
|
+
title=$(echo "$vuln" | jq -r '.title // "vulnerability"')
|
|
56
|
+
adv_url=$(echo "$vuln" | jq -r '.url // ""')
|
|
57
|
+
|
|
58
|
+
[[ "$severity" != "critical" && "$severity" != "high" ]] && continue
|
|
59
|
+
|
|
60
|
+
local risk=50 category="security_patch"
|
|
61
|
+
[[ "$severity" == "critical" ]] && risk=80 && category="security_critical"
|
|
62
|
+
|
|
63
|
+
local evidence
|
|
64
|
+
evidence=$(jq -n --arg sev "$severity" --arg pkg "$name" --arg url "$adv_url" \
|
|
65
|
+
'{severity:$sev, package:$pkg, advisory_url:$url}')
|
|
66
|
+
|
|
67
|
+
_build_candidate \
|
|
68
|
+
"sec-${name}-$(echo "$title" | tr ' ' '-' | cut -c1-30)" \
|
|
69
|
+
"security" "$category" \
|
|
70
|
+
"Security: ${title} in ${name}" \
|
|
71
|
+
"Fix ${severity} vulnerability in ${name}. Advisory: ${adv_url}" \
|
|
72
|
+
"$risk" "0.95" "security:${name}:${title}" "$evidence"
|
|
73
|
+
done < <(echo "$vuln_list" | jq -c '.[]' 2>/dev/null)
|
|
74
|
+
fi
|
|
75
|
+
fi
|
|
76
|
+
|
|
77
|
+
# pip-audit
|
|
78
|
+
if [[ -f "requirements.txt" ]] && command -v pip-audit >/dev/null 2>&1; then
|
|
79
|
+
local pip_json
|
|
80
|
+
pip_json=$(pip-audit --format=json 2>/dev/null || true)
|
|
81
|
+
if [[ -n "$pip_json" ]]; then
|
|
82
|
+
while IFS= read -r dep; do
|
|
83
|
+
local pkg vuln_id
|
|
84
|
+
pkg=$(echo "$dep" | jq -r '.name // "unknown"')
|
|
85
|
+
vuln_id=$(echo "$dep" | jq -r '.vulns[0].id // "unknown"')
|
|
86
|
+
_build_candidate \
|
|
87
|
+
"sec-pip-${pkg}-${vuln_id}" "security" "security_patch" \
|
|
88
|
+
"Security: ${vuln_id} in ${pkg}" \
|
|
89
|
+
"Python dependency ${pkg} has vulnerability ${vuln_id}" \
|
|
90
|
+
60 "0.90" "security:pip:${pkg}:${vuln_id}"
|
|
91
|
+
done < <(echo "$pip_json" | jq -c '.dependencies[] | select(.vulns | length > 0)' 2>/dev/null)
|
|
92
|
+
fi
|
|
93
|
+
fi
|
|
94
|
+
|
|
95
|
+
# cargo audit
|
|
96
|
+
if [[ -f "Cargo.toml" ]] && command -v cargo-audit >/dev/null 2>&1; then
|
|
97
|
+
local cargo_json
|
|
98
|
+
cargo_json=$(cargo audit --json 2>/dev/null || true)
|
|
99
|
+
local vuln_count
|
|
100
|
+
vuln_count=$(echo "$cargo_json" | jq '.vulnerabilities.found' 2>/dev/null || echo "0")
|
|
101
|
+
if [[ "${vuln_count:-0}" -gt 0 ]]; then
|
|
102
|
+
_build_candidate \
|
|
103
|
+
"sec-cargo-vulns" "security" "security_patch" \
|
|
104
|
+
"Security: ${vuln_count} Cargo vulnerability(ies)" \
|
|
105
|
+
"cargo audit found ${vuln_count} vulnerability(ies)" \
|
|
106
|
+
60 "0.90" "security:cargo:vulns"
|
|
107
|
+
fi
|
|
108
|
+
fi
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
signals_collect_deps() {
|
|
112
|
+
[[ ! -f "package.json" ]] && return 0
|
|
113
|
+
command -v npm >/dev/null 2>&1 || return 0
|
|
114
|
+
|
|
115
|
+
local outdated_json
|
|
116
|
+
outdated_json=$(npm outdated --json 2>/dev/null || true)
|
|
117
|
+
[[ -z "$outdated_json" || "$outdated_json" == "{}" ]] && return 0
|
|
118
|
+
|
|
119
|
+
while IFS= read -r pkg; do
|
|
120
|
+
local name current latest current_major latest_major
|
|
121
|
+
name=$(echo "$pkg" | jq -r '.key')
|
|
122
|
+
current=$(echo "$pkg" | jq -r '.value.current // "0.0.0"')
|
|
123
|
+
latest=$(echo "$pkg" | jq -r '.value.latest // "0.0.0"')
|
|
124
|
+
current_major="${current%%.*}"
|
|
125
|
+
latest_major="${latest%%.*}"
|
|
126
|
+
|
|
127
|
+
[[ ! "$latest_major" =~ ^[0-9]+$ ]] && continue
|
|
128
|
+
[[ ! "$current_major" =~ ^[0-9]+$ ]] && continue
|
|
129
|
+
|
|
130
|
+
local diff=$((latest_major - current_major))
|
|
131
|
+
local category="deps_patch" risk=15
|
|
132
|
+
if [[ "$diff" -ge 2 ]]; then
|
|
133
|
+
category="deps_major"
|
|
134
|
+
risk=45
|
|
135
|
+
elif [[ "$diff" -ge 1 ]]; then
|
|
136
|
+
category="deps_minor"
|
|
137
|
+
risk=25
|
|
138
|
+
else
|
|
139
|
+
# Only minor/patch version difference — still flag as patch
|
|
140
|
+
category="deps_patch"
|
|
141
|
+
risk=10
|
|
142
|
+
fi
|
|
143
|
+
|
|
144
|
+
# Only emit for >= 1 major behind or if category is explicitly patch
|
|
145
|
+
[[ "$diff" -lt 1 ]] && continue
|
|
146
|
+
|
|
147
|
+
local evidence
|
|
148
|
+
evidence=$(jq -n --arg pkg "$name" --arg cur "$current" --arg lat "$latest" --argjson diff "$diff" \
|
|
149
|
+
'{package:$pkg, current:$cur, latest:$lat, major_versions_behind:$diff}')
|
|
150
|
+
|
|
151
|
+
_build_candidate \
|
|
152
|
+
"deps-${name}" "deps" "$category" \
|
|
153
|
+
"Update ${name}: ${current} -> ${latest}" \
|
|
154
|
+
"Package ${name} is ${diff} major version(s) behind (${current} -> ${latest})" \
|
|
155
|
+
"$risk" "0.90" "deps:${name}" "$evidence"
|
|
156
|
+
done < <(echo "$outdated_json" | jq -c 'to_entries[]' 2>/dev/null)
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
signals_collect_coverage() {
|
|
160
|
+
local coverage_file=""
|
|
161
|
+
for candidate in \
|
|
162
|
+
".claude/pipeline-artifacts/coverage/coverage-summary.json" \
|
|
163
|
+
"coverage/coverage-summary.json" \
|
|
164
|
+
".coverage/coverage-summary.json"; do
|
|
165
|
+
[[ -f "$candidate" ]] && coverage_file="$candidate" && break
|
|
166
|
+
done
|
|
167
|
+
[[ -z "$coverage_file" ]] && return 0
|
|
168
|
+
|
|
169
|
+
local low_files=""
|
|
170
|
+
local count=0
|
|
171
|
+
while IFS= read -r entry; do
|
|
172
|
+
local file_path line_pct
|
|
173
|
+
file_path=$(echo "$entry" | jq -r '.key')
|
|
174
|
+
line_pct=$(echo "$entry" | jq -r '.value.lines.pct // 100')
|
|
175
|
+
[[ "$file_path" == "total" ]] && continue
|
|
176
|
+
if awk "BEGIN{exit !($line_pct >= 50)}" 2>/dev/null; then continue; fi
|
|
177
|
+
count=$((count + 1))
|
|
178
|
+
low_files="${low_files}${file_path} (${line_pct}%), "
|
|
179
|
+
done < <(jq -c 'to_entries[]' "$coverage_file" 2>/dev/null)
|
|
180
|
+
|
|
181
|
+
[[ "$count" -eq 0 ]] && return 0
|
|
182
|
+
|
|
183
|
+
_build_candidate \
|
|
184
|
+
"cov-gaps-${count}" "coverage" "test_coverage" \
|
|
185
|
+
"Improve test coverage for ${count} file(s)" \
|
|
186
|
+
"Files with < 50% line coverage: ${low_files%%, }" \
|
|
187
|
+
20 "0.85" "coverage:gaps:${count}"
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
signals_collect_docs() {
|
|
191
|
+
local findings=0
|
|
192
|
+
local details=""
|
|
193
|
+
|
|
194
|
+
if [[ -f "README.md" ]]; then
|
|
195
|
+
local readme_epoch src_epoch
|
|
196
|
+
readme_epoch=$(git log -1 --format=%ct -- README.md 2>/dev/null || echo "0")
|
|
197
|
+
src_epoch=$(git log -1 --format=%ct -- "*.ts" "*.js" "*.py" "*.go" "*.rs" "*.sh" 2>/dev/null || echo "0")
|
|
198
|
+
if [[ "$src_epoch" -gt 0 && "$readme_epoch" -gt 0 ]]; then
|
|
199
|
+
local drift=$((src_epoch - readme_epoch))
|
|
200
|
+
if [[ "$drift" -gt 2592000 ]]; then
|
|
201
|
+
findings=$((findings + 1))
|
|
202
|
+
local days=$((drift / 86400))
|
|
203
|
+
details="${details}README.md: ${days} days behind; "
|
|
204
|
+
fi
|
|
205
|
+
fi
|
|
206
|
+
fi
|
|
207
|
+
|
|
208
|
+
# Check AUTO section freshness
|
|
209
|
+
if [[ -x "${SCRIPT_DIR:-}/sw-docs.sh" ]]; then
|
|
210
|
+
bash "${SCRIPT_DIR}/sw-docs.sh" check >/dev/null 2>&1 || {
|
|
211
|
+
findings=$((findings + 1))
|
|
212
|
+
details="${details}AUTO sections stale; "
|
|
213
|
+
}
|
|
214
|
+
fi
|
|
215
|
+
|
|
216
|
+
[[ "$findings" -eq 0 ]] && return 0
|
|
217
|
+
|
|
218
|
+
_build_candidate \
|
|
219
|
+
"docs-stale-${findings}" "docs" "doc_sync" \
|
|
220
|
+
"Sync stale documentation (${findings} item(s))" \
|
|
221
|
+
"Documentation drift detected: ${details%%; }" \
|
|
222
|
+
15 "0.85" "docs:stale"
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
signals_collect_dead_code() {
|
|
226
|
+
[[ ! -f "package.json" && ! -f "tsconfig.json" ]] && return 0
|
|
227
|
+
|
|
228
|
+
local count=0
|
|
229
|
+
local dead_files=""
|
|
230
|
+
local src_dirs=("src" "lib" "app")
|
|
231
|
+
for dir in "${src_dirs[@]}"; do
|
|
232
|
+
[[ -d "$dir" ]] || continue
|
|
233
|
+
while IFS= read -r file; do
|
|
234
|
+
local basename_no_ext
|
|
235
|
+
basename_no_ext=$(basename "$file" | sed 's/\.\(ts\|js\|tsx\|jsx\)$//')
|
|
236
|
+
[[ "$basename_no_ext" == "index" ]] && continue
|
|
237
|
+
[[ "$basename_no_ext" =~ \.(test|spec)$ ]] && continue
|
|
238
|
+
|
|
239
|
+
local import_count
|
|
240
|
+
import_count=$(grep -rlE "(from|require).*['\"].*${basename_no_ext}['\"]" \
|
|
241
|
+
--include="*.ts" --include="*.js" --include="*.tsx" --include="*.jsx" \
|
|
242
|
+
. 2>/dev/null | grep -cv "$file" || true)
|
|
243
|
+
import_count=${import_count:-0}
|
|
244
|
+
|
|
245
|
+
if [[ "$import_count" -eq 0 ]]; then
|
|
246
|
+
count=$((count + 1))
|
|
247
|
+
dead_files="${dead_files}${file}, "
|
|
248
|
+
fi
|
|
249
|
+
done < <(find "$dir" -type f \( -name "*.ts" -o -name "*.js" -o -name "*.tsx" -o -name "*.jsx" \) \
|
|
250
|
+
! -name "*.test.*" ! -name "*.spec.*" ! -name "*.d.ts" 2>/dev/null)
|
|
251
|
+
done
|
|
252
|
+
|
|
253
|
+
[[ "$count" -eq 0 ]] && return 0
|
|
254
|
+
|
|
255
|
+
_build_candidate \
|
|
256
|
+
"dead-code-${count}" "dead_code" "dead_code" \
|
|
257
|
+
"Dead code candidates (${count} files)" \
|
|
258
|
+
"Files with no importers: ${dead_files%%, }" \
|
|
259
|
+
25 "0.70" "dead_code:${count}"
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
signals_collect_performance() {
|
|
263
|
+
local events_file="${EVENTS_FILE:-${HOME}/.shipwright/events.jsonl}"
|
|
264
|
+
[[ ! -f "$events_file" ]] && return 0
|
|
265
|
+
|
|
266
|
+
local baseline_file="${HOME}/.shipwright/patrol-perf-baseline.json"
|
|
267
|
+
[[ ! -f "$baseline_file" ]] && return 0
|
|
268
|
+
|
|
269
|
+
local recent_test_dur
|
|
270
|
+
recent_test_dur=$(tail -500 "$events_file" | \
|
|
271
|
+
jq -s '[.[] | select(.type == "stage.completed" and .stage == "test") | .duration_s] | if length > 0 then .[-1] else null end' \
|
|
272
|
+
2>/dev/null || echo "null")
|
|
273
|
+
[[ "$recent_test_dur" == "null" || -z "$recent_test_dur" ]] && return 0
|
|
274
|
+
|
|
275
|
+
local baseline_dur
|
|
276
|
+
baseline_dur=$(jq -r '.test_duration_s // 0' "$baseline_file" 2>/dev/null || echo "0")
|
|
277
|
+
[[ "$baseline_dur" -le 0 ]] && return 0
|
|
278
|
+
|
|
279
|
+
local threshold=$(( baseline_dur * 130 / 100 ))
|
|
280
|
+
[[ "$recent_test_dur" -le "$threshold" ]] && return 0
|
|
281
|
+
|
|
282
|
+
local pct_slower=$(( (recent_test_dur - baseline_dur) * 100 / baseline_dur ))
|
|
283
|
+
|
|
284
|
+
local evidence
|
|
285
|
+
evidence=$(jq -n --argjson base "$baseline_dur" --argjson cur "$recent_test_dur" --argjson pct "$pct_slower" \
|
|
286
|
+
'{baseline_s:$base, current_s:$cur, regression_pct:$pct}')
|
|
287
|
+
|
|
288
|
+
_build_candidate \
|
|
289
|
+
"perf-test-regression" "performance" "performance_regression" \
|
|
290
|
+
"Test suite performance regression (${pct_slower}% slower)" \
|
|
291
|
+
"Test suite: ${baseline_dur}s -> ${recent_test_dur}s (${pct_slower}% regression)" \
|
|
292
|
+
40 "0.85" "performance:test_suite" "$evidence"
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
signals_collect_failures() {
|
|
296
|
+
local memory_script="${SCRIPT_DIR:-}/sw-memory.sh"
|
|
297
|
+
[[ ! -f "$memory_script" ]] && return 0
|
|
298
|
+
|
|
299
|
+
local failures_json
|
|
300
|
+
failures_json=$(
|
|
301
|
+
(
|
|
302
|
+
source "$memory_script" > /dev/null 2>&1 || true
|
|
303
|
+
if command -v memory_get_actionable_failures >/dev/null 2>&1; then
|
|
304
|
+
memory_get_actionable_failures 3
|
|
305
|
+
else
|
|
306
|
+
echo "[]"
|
|
307
|
+
fi
|
|
308
|
+
)
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
local count
|
|
312
|
+
count=$(echo "$failures_json" | jq 'length' 2>/dev/null || echo "0")
|
|
313
|
+
[[ "${count:-0}" -eq 0 ]] && return 0
|
|
314
|
+
|
|
315
|
+
while IFS= read -r failure; do
|
|
316
|
+
local pattern stage seen_count
|
|
317
|
+
pattern=$(echo "$failure" | jq -r '.pattern // "unknown"')
|
|
318
|
+
stage=$(echo "$failure" | jq -r '.stage // "unknown"')
|
|
319
|
+
seen_count=$(echo "$failure" | jq -r '.seen_count // 0')
|
|
320
|
+
|
|
321
|
+
local short_pattern
|
|
322
|
+
short_pattern=$(echo "$pattern" | cut -c1-60)
|
|
323
|
+
|
|
324
|
+
_build_candidate \
|
|
325
|
+
"fail-${stage}-$(echo "$short_pattern" | tr ' /' '-_' | cut -c1-30)" \
|
|
326
|
+
"failures" "recurring_failure" \
|
|
327
|
+
"Fix recurring: ${short_pattern}" \
|
|
328
|
+
"Pattern in ${stage}: ${pattern} (seen ${seen_count}x)" \
|
|
329
|
+
35 "0.80" "failure:${stage}:${short_pattern}"
|
|
330
|
+
done < <(echo "$failures_json" | jq -c '.[]' 2>/dev/null)
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
signals_collect_dora() {
|
|
334
|
+
local events_file="${EVENTS_FILE:-${HOME}/.shipwright/events.jsonl}"
|
|
335
|
+
[[ ! -f "$events_file" ]] && return 0
|
|
336
|
+
|
|
337
|
+
local now_e
|
|
338
|
+
now_e=$(now_epoch)
|
|
339
|
+
local current_start=$((now_e - 604800))
|
|
340
|
+
local prev_start=$((now_e - 1209600))
|
|
341
|
+
|
|
342
|
+
local current_events prev_events
|
|
343
|
+
current_events=$(jq -s --argjson start "$current_start" \
|
|
344
|
+
'[.[] | select(.ts_epoch >= $start)]' "$events_file" 2>/dev/null || echo "[]")
|
|
345
|
+
prev_events=$(jq -s --argjson start "$prev_start" --argjson end "$current_start" \
|
|
346
|
+
'[.[] | select(.ts_epoch >= $start and .ts_epoch < $end)]' "$events_file" 2>/dev/null || echo "[]")
|
|
347
|
+
|
|
348
|
+
local prev_total curr_total
|
|
349
|
+
prev_total=$(echo "$prev_events" | jq '[.[] | select(.type == "pipeline.completed")] | length' 2>/dev/null || echo "0")
|
|
350
|
+
curr_total=$(echo "$current_events" | jq '[.[] | select(.type == "pipeline.completed")] | length' 2>/dev/null || echo "0")
|
|
351
|
+
|
|
352
|
+
[[ "${prev_total:-0}" -lt 3 || "${curr_total:-0}" -lt 3 ]] && return 0
|
|
353
|
+
|
|
354
|
+
# Compare CFR
|
|
355
|
+
local prev_failures curr_failures
|
|
356
|
+
prev_failures=$(echo "$prev_events" | jq '[.[] | select(.type == "pipeline.completed" and .result == "failure")] | length' 2>/dev/null || echo "0")
|
|
357
|
+
curr_failures=$(echo "$current_events" | jq '[.[] | select(.type == "pipeline.completed" and .result == "failure")] | length' 2>/dev/null || echo "0")
|
|
358
|
+
|
|
359
|
+
local prev_cfr=0 curr_cfr=0
|
|
360
|
+
[[ "$prev_total" -gt 0 ]] && prev_cfr=$(echo "$prev_failures $prev_total" | awk '{printf "%.0f", ($1 / $2) * 100}')
|
|
361
|
+
[[ "$curr_total" -gt 0 ]] && curr_cfr=$(echo "$curr_failures $curr_total" | awk '{printf "%.0f", ($1 / $2) * 100}')
|
|
362
|
+
|
|
363
|
+
# Flag if CFR increased by > 5 percentage points
|
|
364
|
+
local cfr_diff=$((curr_cfr - prev_cfr))
|
|
365
|
+
if [[ "$cfr_diff" -gt 5 ]]; then
|
|
366
|
+
local evidence
|
|
367
|
+
evidence=$(jq -n --argjson prev "$prev_cfr" --argjson curr "$curr_cfr" --argjson diff "$cfr_diff" \
|
|
368
|
+
'{prev_cfr_pct:$prev, curr_cfr_pct:$curr, increase_pct:$diff}')
|
|
369
|
+
|
|
370
|
+
_build_candidate \
|
|
371
|
+
"dora-cfr-regression" "dora" "dora_regression" \
|
|
372
|
+
"DORA regression: CFR increased ${cfr_diff}pp" \
|
|
373
|
+
"Change failure rate: ${prev_cfr}% -> ${curr_cfr}% (7-day window)" \
|
|
374
|
+
45 "0.80" "dora:cfr_regression" "$evidence"
|
|
375
|
+
fi
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
signals_collect_architecture() {
|
|
379
|
+
local arch_script="${SCRIPT_DIR:-}/sw-architecture-enforcer.sh"
|
|
380
|
+
[[ ! -f "$arch_script" ]] && return 0
|
|
381
|
+
|
|
382
|
+
local arch_model="${HOME}/.shipwright/memory/architecture.json"
|
|
383
|
+
[[ ! -f "$arch_model" ]] && return 0
|
|
384
|
+
|
|
385
|
+
local violations
|
|
386
|
+
violations=$(bash "$arch_script" check --json 2>/dev/null || echo '{"violations":0}')
|
|
387
|
+
local count
|
|
388
|
+
count=$(echo "$violations" | jq '.violations // 0' 2>/dev/null || echo "0")
|
|
389
|
+
[[ "${count:-0}" -eq 0 ]] && return 0
|
|
390
|
+
|
|
391
|
+
_build_candidate \
|
|
392
|
+
"arch-drift-${count}" "architecture" "architecture_drift" \
|
|
393
|
+
"Architecture drift: ${count} violation(s)" \
|
|
394
|
+
"Architecture enforcer found ${count} violation(s)" \
|
|
395
|
+
50 "0.75" "architecture:drift"
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
signals_collect_intelligence() {
|
|
399
|
+
local cache_file=".claude/intelligence-cache.json"
|
|
400
|
+
[[ ! -f "$cache_file" ]] && return 0
|
|
401
|
+
|
|
402
|
+
# Check for high-churn hotspot files
|
|
403
|
+
local hotspots
|
|
404
|
+
hotspots=$(jq -c '.hotspots // [] | [.[] | select(.churn_score > 80)]' "$cache_file" 2>/dev/null || echo '[]')
|
|
405
|
+
local count
|
|
406
|
+
count=$(echo "$hotspots" | jq 'length' 2>/dev/null || echo "0")
|
|
407
|
+
[[ "${count:-0}" -eq 0 ]] && return 0
|
|
408
|
+
|
|
409
|
+
_build_candidate \
|
|
410
|
+
"intel-hotspots-${count}" "intelligence" "refactor_hotspot" \
|
|
411
|
+
"Refactor ${count} high-churn hotspot(s)" \
|
|
412
|
+
"Intelligence cache shows ${count} file(s) with churn score > 80" \
|
|
413
|
+
40 "0.70" "intelligence:hotspots"
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
signals_collect_external() {
|
|
417
|
+
local collectors_dir="${_REPO_DIR:-$(git rev-parse --show-toplevel 2>/dev/null || echo '.')}/scripts/signals"
|
|
418
|
+
[[ ! -d "$collectors_dir" ]] && return 0
|
|
419
|
+
|
|
420
|
+
while IFS= read -r collector; do
|
|
421
|
+
[[ ! -x "$collector" ]] && continue
|
|
422
|
+
local output
|
|
423
|
+
output=$(bash "$collector" 2>/dev/null || true)
|
|
424
|
+
[[ -z "$output" ]] && continue
|
|
425
|
+
# Each line should be a valid JSON candidate
|
|
426
|
+
while IFS= read -r line; do
|
|
427
|
+
echo "$line" | jq empty 2>/dev/null && echo "$line"
|
|
428
|
+
done <<< "$output"
|
|
429
|
+
done < <(find "$collectors_dir" -maxdepth 1 -name "*.sh" -type f 2>/dev/null | sort)
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
# ─── Pending signal file (for patrol integration) ────────────────────────────
|
|
433
|
+
|
|
434
|
+
signals_read_pending() {
|
|
435
|
+
[[ ! -f "$SIGNALS_PENDING_FILE" ]] && return 0
|
|
436
|
+
cat "$SIGNALS_PENDING_FILE"
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
signals_clear_pending() {
|
|
440
|
+
[[ -f "$SIGNALS_PENDING_FILE" ]] && : > "$SIGNALS_PENDING_FILE"
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
# ─── Orchestrator ─────────────────────────────────────────────────────────────
|
|
444
|
+
|
|
445
|
+
signals_collect_all() {
|
|
446
|
+
_ensure_signals_dir
|
|
447
|
+
|
|
448
|
+
{
|
|
449
|
+
signals_collect_security
|
|
450
|
+
signals_collect_deps
|
|
451
|
+
signals_collect_coverage
|
|
452
|
+
signals_collect_docs
|
|
453
|
+
signals_collect_dead_code
|
|
454
|
+
signals_collect_performance
|
|
455
|
+
signals_collect_failures
|
|
456
|
+
signals_collect_dora
|
|
457
|
+
signals_collect_architecture
|
|
458
|
+
signals_collect_intelligence
|
|
459
|
+
signals_collect_external
|
|
460
|
+
signals_read_pending
|
|
461
|
+
} | jq -s '.' 2>/dev/null || echo '[]'
|
|
462
|
+
}
|
|
File without changes
|
package/scripts/lib/helpers.sh
CHANGED
|
@@ -89,17 +89,31 @@ emit_event() {
|
|
|
89
89
|
local _lock_file="${EVENTS_FILE}.lock"
|
|
90
90
|
(
|
|
91
91
|
if command -v flock >/dev/null 2>&1; then
|
|
92
|
-
flock -w 2 200 2>/dev/null
|
|
92
|
+
if ! flock -w 2 200 2>/dev/null; then
|
|
93
|
+
echo "WARN: emit_event lock timeout — concurrent write possible" >&2
|
|
94
|
+
fi
|
|
93
95
|
fi
|
|
94
96
|
echo "$_event_line" >> "$EVENTS_FILE"
|
|
95
97
|
) 200>"$_lock_file"
|
|
96
98
|
|
|
97
|
-
#
|
|
98
|
-
|
|
99
|
+
# Schema validation — auto-detect config repo from BASH_SOURCE location
|
|
100
|
+
local _schema_dir="${_CONFIG_REPO_DIR:-}"
|
|
101
|
+
if [[ -z "$_schema_dir" ]]; then
|
|
102
|
+
local _helpers_dir
|
|
103
|
+
_helpers_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" 2>/dev/null && pwd)" || true
|
|
104
|
+
if [[ -n "$_helpers_dir" && -f "${_helpers_dir}/../../config/event-schema.json" ]]; then
|
|
105
|
+
_schema_dir="$(cd "${_helpers_dir}/../.." && pwd)"
|
|
106
|
+
fi
|
|
107
|
+
fi
|
|
108
|
+
if [[ -n "$_schema_dir" && -f "${_schema_dir}/config/event-schema.json" ]]; then
|
|
99
109
|
local known_types
|
|
100
|
-
known_types=$(jq -r '.event_types | keys[]' "${
|
|
110
|
+
known_types=$(jq -r '.event_types | keys[]' "${_schema_dir}/config/event-schema.json" 2>/dev/null || true)
|
|
101
111
|
if [[ -n "$known_types" ]] && ! echo "$known_types" | grep -qx "$event_type"; then
|
|
102
|
-
|
|
112
|
+
# Warn-only: never reject events, just log to stderr on first unknown type per session
|
|
113
|
+
if [[ -z "${_SW_SCHEMA_WARNED:-}" ]]; then
|
|
114
|
+
echo "WARN: Unknown event type '$event_type' — update config/event-schema.json" >&2
|
|
115
|
+
_SW_SCHEMA_WARNED=1
|
|
116
|
+
fi
|
|
103
117
|
fi
|
|
104
118
|
fi
|
|
105
119
|
}
|
|
@@ -198,16 +212,3 @@ _sw_github_url() {
|
|
|
198
212
|
echo "https://github.com/${repo}"
|
|
199
213
|
}
|
|
200
214
|
|
|
201
|
-
# ─── Network Safe Wrappers (config-aware timeouts) ─────────────────────────────
|
|
202
|
-
# Use SHIPWRIGHT_* env vars if set; otherwise _config_get_int when config.sh is loaded
|
|
203
|
-
# Usage: _curl_safe [curl args...] | _gh_safe [gh args...]
|
|
204
|
-
_curl_safe() {
|
|
205
|
-
local ct="${SHIPWRIGHT_CONNECT_TIMEOUT:-$(_config_get_int "network.connect_timeout" 10 2>/dev/null || echo 10)}"
|
|
206
|
-
local mt="${SHIPWRIGHT_MAX_TIME:-$(_config_get_int "network.max_time" 60 2>/dev/null || echo 60)}"
|
|
207
|
-
curl --connect-timeout "$ct" --max-time "$mt" "$@"
|
|
208
|
-
}
|
|
209
|
-
|
|
210
|
-
_gh_safe() {
|
|
211
|
-
local gh_timeout="${SHIPWRIGHT_GH_TIMEOUT:-$(_config_get_int "network.gh_timeout" 30 2>/dev/null || echo 30)}"
|
|
212
|
-
GH_HTTP_TIMEOUT="$gh_timeout" _timeout "$gh_timeout" gh "$@"
|
|
213
|
-
}
|
|
File without changes
|
|
@@ -305,7 +305,7 @@ $content"
|
|
|
305
305
|
|
|
306
306
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
307
307
|
# 3. Adaptive Cycle Limits
|
|
308
|
-
# Replaces
|
|
308
|
+
# Replaces default max_cycles with convergence-driven limits.
|
|
309
309
|
# Takes the base limit, returns an adjusted limit based on:
|
|
310
310
|
# - Learned iteration model
|
|
311
311
|
# - Convergence/divergence signals
|
|
@@ -1148,7 +1148,18 @@ stage_compound_quality() {
|
|
|
1148
1148
|
_cq_real_changes=$(git diff --name-only "origin/${BASE_BRANCH:-main}...HEAD" \
|
|
1149
1149
|
-- . ':!.claude/loop-state.md' ':!.claude/pipeline-state.md' \
|
|
1150
1150
|
':!.claude/pipeline-artifacts/*' ':!**/progress.md' \
|
|
1151
|
-
':!**/error-summary.json' 2>/dev/null | wc -l
|
|
1151
|
+
':!**/error-summary.json' 2>/dev/null | wc -l || true)
|
|
1152
|
+
_cq_real_changes="${_cq_real_changes:-0}"
|
|
1153
|
+
_cq_real_changes=$(echo "$_cq_real_changes" | tr -d '[:space:]')
|
|
1154
|
+
[[ -z "$_cq_real_changes" ]] && _cq_real_changes=0
|
|
1155
|
+
# Fallback: if no remote, compare against first commit
|
|
1156
|
+
if [[ "$_cq_real_changes" -eq 0 ]] 2>/dev/null; then
|
|
1157
|
+
_cq_real_changes=$(git diff --name-only "$(git rev-list --max-parents=0 HEAD 2>/dev/null)...HEAD" \
|
|
1158
|
+
-- . ':!.claude/*' ':!**/progress.md' ':!**/error-summary.json' 2>/dev/null | wc -l || true)
|
|
1159
|
+
_cq_real_changes="${_cq_real_changes:-0}"
|
|
1160
|
+
_cq_real_changes=$(echo "$_cq_real_changes" | tr -d '[:space:]')
|
|
1161
|
+
[[ -z "$_cq_real_changes" ]] && _cq_real_changes=0
|
|
1162
|
+
fi
|
|
1152
1163
|
if [[ "${_cq_real_changes:-0}" -eq 0 ]]; then
|
|
1153
1164
|
error "Compound quality: no meaningful code changes found — failing quality gate"
|
|
1154
1165
|
return 1
|
|
@@ -1207,8 +1218,11 @@ stage_compound_quality() {
|
|
|
1207
1218
|
|
|
1208
1219
|
# 2. Test coverage check
|
|
1209
1220
|
local coverage_pct=0
|
|
1210
|
-
coverage_pct=$(run_test_coverage_check 2>/dev/null) || coverage_pct=0
|
|
1221
|
+
coverage_pct=$(run_test_coverage_check 2>/dev/null | tr -d '[:space:][:cntrl:]') || coverage_pct=0
|
|
1211
1222
|
coverage_pct="${coverage_pct:-0}"
|
|
1223
|
+
# Sanitize: strip anything non-numeric (ANSI codes, whitespace, etc.)
|
|
1224
|
+
coverage_pct=$(echo "$coverage_pct" | sed 's/[^0-9]//g')
|
|
1225
|
+
[[ -z "$coverage_pct" ]] && coverage_pct=0
|
|
1212
1226
|
|
|
1213
1227
|
if [[ "$coverage_pct" != "skip" ]]; then
|
|
1214
1228
|
if [[ "$coverage_pct" -lt "${PIPELINE_COVERAGE_THRESHOLD:-60}" ]]; then
|
|
@@ -1254,7 +1268,9 @@ stage_compound_quality() {
|
|
|
1254
1268
|
fi
|
|
1255
1269
|
|
|
1256
1270
|
# Vitals-driven adaptive cycle limit (preferred)
|
|
1271
|
+
# Respect the template's max_cycles as a ceiling — vitals can only reduce, not inflate
|
|
1257
1272
|
local base_max_cycles="$max_cycles"
|
|
1273
|
+
local template_max_cycles="$max_cycles"
|
|
1258
1274
|
if type pipeline_adaptive_limit >/dev/null 2>&1; then
|
|
1259
1275
|
local _cq_vitals=""
|
|
1260
1276
|
if type pipeline_compute_vitals >/dev/null 2>&1; then
|
|
@@ -1263,7 +1279,10 @@ stage_compound_quality() {
|
|
|
1263
1279
|
local vitals_cq_limit
|
|
1264
1280
|
vitals_cq_limit=$(pipeline_adaptive_limit "compound_quality" "$_cq_vitals" 2>/dev/null) || true
|
|
1265
1281
|
if [[ -n "$vitals_cq_limit" && "$vitals_cq_limit" =~ ^[0-9]+$ && "$vitals_cq_limit" -gt 0 ]]; then
|
|
1266
|
-
|
|
1282
|
+
# Cap at template max — don't let vitals override the pipeline template's intent
|
|
1283
|
+
if [[ "$vitals_cq_limit" -le "$template_max_cycles" ]]; then
|
|
1284
|
+
max_cycles="$vitals_cq_limit"
|
|
1285
|
+
fi
|
|
1267
1286
|
if [[ "$max_cycles" != "$base_max_cycles" ]]; then
|
|
1268
1287
|
info "Vitals-driven cycles: ${base_max_cycles} → ${max_cycles} (compound_quality)"
|
|
1269
1288
|
fi
|
|
@@ -141,7 +141,9 @@ quality_check_bundle_size() {
|
|
|
141
141
|
return 1
|
|
142
142
|
fi
|
|
143
143
|
else
|
|
144
|
-
# Fallback: legacy memory baseline
|
|
144
|
+
# Fallback: legacy memory baseline (not enough history for statistical check)
|
|
145
|
+
local bundle_growth_limit
|
|
146
|
+
bundle_growth_limit=$(_config_get_int "quality.bundle_growth_legacy_pct" 20 2>/dev/null || echo 20)
|
|
145
147
|
local baseline_size=""
|
|
146
148
|
if [[ -x "$SCRIPT_DIR/sw-memory.sh" ]]; then
|
|
147
149
|
baseline_size=$(bash "$SCRIPT_DIR/sw-memory.sh" get "bundle_size_kb" 2>/dev/null) || true
|
|
@@ -150,7 +152,7 @@ quality_check_bundle_size() {
|
|
|
150
152
|
local growth_pct
|
|
151
153
|
growth_pct=$(awk -v cur="$bundle_size" -v base="$baseline_size" 'BEGIN{printf "%d", ((cur - base) / base) * 100}')
|
|
152
154
|
echo "Baseline: ${baseline_size}KB | Growth: ${growth_pct}%" >> "$metrics_log"
|
|
153
|
-
if [[ "$growth_pct" -gt
|
|
155
|
+
if [[ "$growth_pct" -gt "$bundle_growth_limit" ]]; then
|
|
154
156
|
warn "Bundle size grew ${growth_pct}% (${baseline_size}KB → ${bundle_size}KB)"
|
|
155
157
|
return 1
|
|
156
158
|
fi
|
|
@@ -299,7 +301,9 @@ $tail_output" < /dev/null 2>/dev/null | grep -oE '^[0-9.]+$' | head -1 || true)
|
|
|
299
301
|
return 1
|
|
300
302
|
fi
|
|
301
303
|
else
|
|
302
|
-
# Fallback: legacy memory baseline
|
|
304
|
+
# Fallback: legacy memory baseline (not enough history for statistical check)
|
|
305
|
+
local perf_regression_limit
|
|
306
|
+
perf_regression_limit=$(_config_get_int "quality.perf_regression_legacy_pct" 30 2>/dev/null || echo 30)
|
|
303
307
|
local baseline_dur=""
|
|
304
308
|
if [[ -x "$SCRIPT_DIR/sw-memory.sh" ]]; then
|
|
305
309
|
baseline_dur=$(bash "$SCRIPT_DIR/sw-memory.sh" get "test_duration_s" 2>/dev/null) || true
|
|
@@ -308,7 +312,7 @@ $tail_output" < /dev/null 2>/dev/null | grep -oE '^[0-9.]+$' | head -1 || true)
|
|
|
308
312
|
local slowdown_pct
|
|
309
313
|
slowdown_pct=$(awk -v cur="$duration_ms" -v base="$baseline_dur" 'BEGIN{printf "%d", ((cur - base) / base) * 100}')
|
|
310
314
|
echo "Baseline: ${baseline_dur}s | Slowdown: ${slowdown_pct}%" >> "$metrics_log"
|
|
311
|
-
if [[ "$slowdown_pct" -gt
|
|
315
|
+
if [[ "$slowdown_pct" -gt "$perf_regression_limit" ]]; then
|
|
312
316
|
warn "Tests ${slowdown_pct}% slower (${baseline_dur}s → ${duration_ms}s)"
|
|
313
317
|
return 1
|
|
314
318
|
fi
|
|
@@ -441,7 +445,7 @@ ${spec_git_diff}" --model haiku < /dev/null 2>/dev/null || true)
|
|
|
441
445
|
if [[ -n "$removed_endpoints" || -n "$param_changes" ]]; then
|
|
442
446
|
local issue_count=0
|
|
443
447
|
[[ -n "$removed_endpoints" ]] && issue_count=$((issue_count + $(echo "$removed_endpoints" | wc -l | xargs)))
|
|
444
|
-
[[ -n "$param_changes" ]] && issue_count=$((issue_count + $(echo "$param_changes" | grep -c '.' 2>/dev/null ||
|
|
448
|
+
[[ -n "$param_changes" ]] && issue_count=$((issue_count + $(echo "$param_changes" | grep -c '.' 2>/dev/null || true)))
|
|
445
449
|
warn "API breaking changes: ${issue_count} issue(s) found"
|
|
446
450
|
return 1
|
|
447
451
|
fi
|
|
@@ -990,7 +994,8 @@ run_atomic_write_check() {
|
|
|
990
994
|
|
|
991
995
|
# Check for direct redirection writes (> file) in state/config paths
|
|
992
996
|
local bad_writes
|
|
993
|
-
bad_writes=$(git show "HEAD:$filepath" 2>/dev/null | grep -c 'echo.*>' 2>/dev/null ||
|
|
997
|
+
bad_writes=$(git show "HEAD:$filepath" 2>/dev/null | grep -c 'echo.*>' 2>/dev/null || true)
|
|
998
|
+
bad_writes="${bad_writes:-0}"
|
|
994
999
|
|
|
995
1000
|
if [[ "$bad_writes" -gt 0 ]]; then
|
|
996
1001
|
violations=$((violations + bad_writes))
|
|
File without changes
|