all-for-claudecode 2.5.0 → 2.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +2 -2
- package/.claude-plugin/plugin.json +4 -2
- package/README.md +15 -3
- package/agents/afc-architect.md +1 -1
- package/agents/afc-security.md +1 -1
- package/commands/analyze.md +1 -1
- package/commands/architect.md +1 -1
- package/commands/auto.md +2 -2
- package/commands/checkpoint.md +1 -1
- package/commands/clarify.md +1 -1
- package/commands/clean.md +126 -0
- package/commands/consult.md +1 -1
- package/commands/debug.md +1 -1
- package/commands/doctor.md +64 -23
- package/commands/ideate.md +1 -1
- package/commands/implement.md +1 -1
- package/commands/init.md +10 -6
- package/commands/launch.md +1 -1
- package/commands/plan.md +1 -1
- package/commands/pr-comment.md +1 -1
- package/commands/principles.md +1 -1
- package/commands/qa.md +191 -0
- package/commands/release-notes.md +1 -1
- package/commands/research.md +1 -1
- package/commands/resume.md +2 -2
- package/commands/review.md +1 -1
- package/commands/security.md +1 -1
- package/commands/spec.md +1 -1
- package/commands/tasks.md +1 -1
- package/commands/test.md +1 -1
- package/commands/triage.md +1 -1
- package/commands/validate.md +1 -1
- package/docs/phase-gate-protocol.md +1 -1
- package/hooks/hooks.json +1 -0
- package/package.json +5 -3
- package/schemas/hooks.schema.json +4 -0
- package/schemas/plugin.schema.json +5 -1
- package/scripts/afc-bash-guard.sh +3 -3
- package/scripts/afc-config-change.sh +8 -0
- package/scripts/afc-consistency-check.sh +58 -19
- package/scripts/afc-dag-validate.sh +1 -1
- package/scripts/afc-doctor.sh +445 -0
- package/scripts/afc-failure-hint.sh +24 -2
- package/scripts/afc-qa-audit.sh +536 -0
- package/scripts/afc-state.sh +3 -3
- package/scripts/afc-sync-cache.sh +49 -0
- package/scripts/afc-triage.sh +14 -3
- package/scripts/afc-user-prompt-submit.sh +98 -13
- package/scripts/pre-compact-checkpoint.sh +2 -2
- package/scripts/session-start-context.sh +39 -10
- package/scripts/track-afc-changes.sh +3 -3
|
@@ -0,0 +1,536 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# afc-qa-audit.sh — QA audit: detect quality gaps between structure and runtime behavior
|
|
5
|
+
# Checks: hook I/O safety, test strength, UX completeness, build/deploy integrity
|
|
6
|
+
# Run as part of: npm run qa
|
|
7
|
+
|
|
8
|
+
# shellcheck disable=SC2329
|
|
9
|
+
cleanup() {
|
|
10
|
+
:
|
|
11
|
+
}
|
|
12
|
+
trap cleanup EXIT
|
|
13
|
+
|
|
14
|
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
15
|
+
PROJECT_DIR="${1:-$(cd "$SCRIPT_DIR/.." && pwd)}"
|
|
16
|
+
ERRORS=0
|
|
17
|
+
WARNINGS=0
|
|
18
|
+
PASSES=0
|
|
19
|
+
|
|
20
|
+
# --- Helpers ---
|
|
21
|
+
|
|
22
|
+
fail() {
|
|
23
|
+
printf " ✗ %s\n" "$1" >&2
|
|
24
|
+
ERRORS=$((ERRORS + 1))
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
warn() {
|
|
28
|
+
printf " ⚠ %s\n" "$1"
|
|
29
|
+
WARNINGS=$((WARNINGS + 1))
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
ok() {
|
|
33
|
+
printf " ✓ %s\n" "$1"
|
|
34
|
+
PASSES=$((PASSES + 1))
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
# --- Category A: Hook I/O Safety ---
|
|
38
|
+
|
|
39
|
+
check_a_hook_io_safety() {
|
|
40
|
+
printf "\nCategory A: Hook I/O Safety\n"
|
|
41
|
+
check_a1_stdin_consumption
|
|
42
|
+
check_a2_stdout_json_templates
|
|
43
|
+
check_a3_failure_hint_coverage
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
# A1: stdin must be consumed before any early exit in command hooks
|
|
47
|
+
check_a1_stdin_consumption() {
|
|
48
|
+
local hooks_file="$PROJECT_DIR/hooks/hooks.json"
|
|
49
|
+
if [ ! -f "$hooks_file" ]; then
|
|
50
|
+
warn "hooks.json not found, skipping A1"
|
|
51
|
+
return
|
|
52
|
+
fi
|
|
53
|
+
|
|
54
|
+
# Extract command hook scripts from hooks.json
|
|
55
|
+
local scripts
|
|
56
|
+
scripts=$(grep -oE 'scripts/[^"]+\.sh' "$hooks_file" 2>/dev/null | sort -u || true)
|
|
57
|
+
|
|
58
|
+
local count=0
|
|
59
|
+
local issues=0
|
|
60
|
+
for script_path in $scripts; do
|
|
61
|
+
local full_path="$PROJECT_DIR/$script_path"
|
|
62
|
+
[ -f "$full_path" ] || continue
|
|
63
|
+
count=$((count + 1))
|
|
64
|
+
|
|
65
|
+
# Check: does the script consume stdin (cat, INPUT=$(cat), cat > /dev/null, read)?
|
|
66
|
+
if ! grep -qE '^\s*(INPUT=\$\(cat|cat\b|cat >|read )' "$full_path" 2>/dev/null; then
|
|
67
|
+
fail "stdin not consumed: $script_path (SIGPIPE risk)"
|
|
68
|
+
issues=$((issues + 1))
|
|
69
|
+
continue
|
|
70
|
+
fi
|
|
71
|
+
|
|
72
|
+
# Check: is there an exit/return before stdin consumption?
|
|
73
|
+
local stdin_line exit_line
|
|
74
|
+
stdin_line=$(grep -nE '^\s*(INPUT=\$\(cat|cat\b|cat >)' "$full_path" 2>/dev/null | head -1 | cut -d: -f1 || echo 999)
|
|
75
|
+
exit_line=$(grep -nE '^\s*(exit [0-9]|return [0-9])' "$full_path" 2>/dev/null | head -1 | cut -d: -f1 || echo 999)
|
|
76
|
+
|
|
77
|
+
# Skip if exit is in cleanup() function (lines before trap)
|
|
78
|
+
local trap_line
|
|
79
|
+
trap_line=$(grep -nE '^\s*trap ' "$full_path" 2>/dev/null | head -1 | cut -d: -f1 || echo 0)
|
|
80
|
+
if [ "$exit_line" -lt "$trap_line" ] 2>/dev/null; then
|
|
81
|
+
# exit is inside cleanup function, not a real early exit
|
|
82
|
+
exit_line=999
|
|
83
|
+
fi
|
|
84
|
+
|
|
85
|
+
if [ "$exit_line" -lt "$stdin_line" ] 2>/dev/null; then
|
|
86
|
+
fail "exit before stdin consumption: $script_path (line $exit_line exits before stdin at line $stdin_line)"
|
|
87
|
+
issues=$((issues + 1))
|
|
88
|
+
fi
|
|
89
|
+
done
|
|
90
|
+
|
|
91
|
+
if [ "$issues" -eq 0 ]; then
|
|
92
|
+
ok "stdin consumption: $count hook scripts, all consume before exit"
|
|
93
|
+
fi
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
# A2: stdout JSON templates with hookSpecificOutput must be valid JSON
|
|
97
|
+
check_a2_stdout_json_templates() {
|
|
98
|
+
local scripts_dir="$PROJECT_DIR/scripts"
|
|
99
|
+
[ -d "$scripts_dir" ] || return
|
|
100
|
+
|
|
101
|
+
local count=0
|
|
102
|
+
local issues=0
|
|
103
|
+
|
|
104
|
+
for script in "$scripts_dir"/*.sh; do
|
|
105
|
+
[ -f "$script" ] || continue
|
|
106
|
+
local scriptname
|
|
107
|
+
scriptname=$(basename "$script")
|
|
108
|
+
|
|
109
|
+
# Extract printf patterns with hookSpecificOutput
|
|
110
|
+
local templates
|
|
111
|
+
templates=$(grep -oE "printf '[^']*hookSpecificOutput[^']*'" "$script" 2>/dev/null || true)
|
|
112
|
+
[ -z "$templates" ] && continue
|
|
113
|
+
|
|
114
|
+
while IFS= read -r tmpl; do
|
|
115
|
+
count=$((count + 1))
|
|
116
|
+
# Extract the format string (between single quotes)
|
|
117
|
+
local fmt
|
|
118
|
+
fmt=$(printf '%s' "$tmpl" | sed "s/^printf '//;s/'$//" || true)
|
|
119
|
+
# Replace %s with dummy string
|
|
120
|
+
local json
|
|
121
|
+
json=$(printf '%s' "$fmt" | sed 's/%s/dummy/g; s/\\n//g' || true)
|
|
122
|
+
# Validate JSON
|
|
123
|
+
if command -v jq >/dev/null 2>&1; then
|
|
124
|
+
if ! printf '%s' "$json" | jq . >/dev/null 2>&1; then
|
|
125
|
+
fail "invalid JSON template in $scriptname: $fmt"
|
|
126
|
+
issues=$((issues + 1))
|
|
127
|
+
fi
|
|
128
|
+
fi
|
|
129
|
+
done <<< "$templates"
|
|
130
|
+
done
|
|
131
|
+
|
|
132
|
+
if [ "$issues" -eq 0 ]; then
|
|
133
|
+
ok "stdout JSON templates: $count valid"
|
|
134
|
+
fi
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
# A3: failure-hint case patterns vs spec test coverage
|
|
138
|
+
check_a3_failure_hint_coverage() {
|
|
139
|
+
local hint_script="$PROJECT_DIR/scripts/afc-failure-hint.sh"
|
|
140
|
+
local hint_spec="$PROJECT_DIR/spec/afc-failure-hint_spec.sh"
|
|
141
|
+
|
|
142
|
+
if [ ! -f "$hint_script" ] || [ ! -f "$hint_spec" ]; then
|
|
143
|
+
warn "failure-hint script or spec not found, skipping A3"
|
|
144
|
+
return
|
|
145
|
+
fi
|
|
146
|
+
|
|
147
|
+
# Count case patterns (excluding *) and wildcard)
|
|
148
|
+
local pattern_count
|
|
149
|
+
pattern_count=$(grep -cE '^\s+\*.*\*\)' "$hint_script" 2>/dev/null || echo 0)
|
|
150
|
+
# Subtract the catch-all *)
|
|
151
|
+
local catchall
|
|
152
|
+
catchall=$(grep -cE '^\s+\*\)' "$hint_script" 2>/dev/null || echo 0)
|
|
153
|
+
pattern_count=$((pattern_count - catchall))
|
|
154
|
+
|
|
155
|
+
# Count test contexts in spec
|
|
156
|
+
local test_count
|
|
157
|
+
test_count=$(grep -cE '^\s+Context ' "$hint_spec" 2>/dev/null || echo 0)
|
|
158
|
+
|
|
159
|
+
if [ "$test_count" -ge "$pattern_count" ]; then
|
|
160
|
+
ok "failure-hint patterns: $pattern_count patterns, $test_count tests"
|
|
161
|
+
else
|
|
162
|
+
warn "failure-hint coverage gap: $pattern_count patterns but only $test_count tests"
|
|
163
|
+
fi
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
# --- Category B: Test Strength ---
|
|
167
|
+
|
|
168
|
+
check_b_test_strength() {
|
|
169
|
+
printf "\nCategory B: Test Strength\n"
|
|
170
|
+
check_b1_assertion_density
|
|
171
|
+
check_b2_state_mutation_tests
|
|
172
|
+
check_b3_case_pattern_coverage
|
|
173
|
+
check_b4_empty_stdin_edge
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
# B1: assertion density (The lines per It block)
|
|
177
|
+
check_b1_assertion_density() {
|
|
178
|
+
local spec_dir="$PROJECT_DIR/spec"
|
|
179
|
+
[ -d "$spec_dir" ] || return
|
|
180
|
+
|
|
181
|
+
local low_density=""
|
|
182
|
+
local checked=0
|
|
183
|
+
|
|
184
|
+
for spec in "$spec_dir"/*_spec.sh; do
|
|
185
|
+
[ -f "$spec" ] || continue
|
|
186
|
+
local specname
|
|
187
|
+
specname=$(basename "$spec")
|
|
188
|
+
checked=$((checked + 1))
|
|
189
|
+
|
|
190
|
+
local it_count assertion_count
|
|
191
|
+
it_count=$(grep -cE '^\s+It ' "$spec" 2>/dev/null || echo 0)
|
|
192
|
+
assertion_count=$(grep -cE '^\s+The ' "$spec" 2>/dev/null || echo 0)
|
|
193
|
+
|
|
194
|
+
[ "$it_count" -eq 0 ] && continue
|
|
195
|
+
|
|
196
|
+
# Calculate ratio (integer arithmetic: multiply by 10 for one decimal)
|
|
197
|
+
local ratio_x10
|
|
198
|
+
ratio_x10=$(( (assertion_count * 10) / it_count ))
|
|
199
|
+
|
|
200
|
+
if [ "$ratio_x10" -lt 10 ]; then
|
|
201
|
+
fail "low assertion density: $specname (${assertion_count}/${it_count} = $(( ratio_x10 / 10 )).$(( ratio_x10 % 10 )))"
|
|
202
|
+
elif [ "$ratio_x10" -lt 15 ]; then
|
|
203
|
+
low_density="${low_density:+$low_density, }$specname ($(( ratio_x10 / 10 )).$(( ratio_x10 % 10 )))"
|
|
204
|
+
fi
|
|
205
|
+
done
|
|
206
|
+
|
|
207
|
+
if [ -n "$low_density" ]; then
|
|
208
|
+
warn "low assertion density: $low_density"
|
|
209
|
+
elif [ "$checked" -gt 0 ]; then
|
|
210
|
+
ok "assertion density: $checked specs checked, all >= 1.5"
|
|
211
|
+
fi
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
# B2: state mutation tests must assert file contents
|
|
215
|
+
check_b2_state_mutation_tests() {
|
|
216
|
+
local spec_dir="$PROJECT_DIR/spec"
|
|
217
|
+
[ -d "$spec_dir" ] || return
|
|
218
|
+
|
|
219
|
+
local issues=0
|
|
220
|
+
local checked=0
|
|
221
|
+
|
|
222
|
+
for spec in "$spec_dir"/*_spec.sh; do
|
|
223
|
+
[ -f "$spec" ] || continue
|
|
224
|
+
|
|
225
|
+
# Find specs that call state-changing functions
|
|
226
|
+
if grep -qE 'afc_state_write|setup_state_fixture|afc-state\.json' "$spec" 2>/dev/null; then
|
|
227
|
+
checked=$((checked + 1))
|
|
228
|
+
# Check if they assert file contents
|
|
229
|
+
if ! grep -qE 'contents of file|should include|should eq' "$spec" 2>/dev/null; then
|
|
230
|
+
fail "state mutation without content assertion: $(basename "$spec")"
|
|
231
|
+
issues=$((issues + 1))
|
|
232
|
+
fi
|
|
233
|
+
fi
|
|
234
|
+
done
|
|
235
|
+
|
|
236
|
+
if [ "$issues" -eq 0 ] && [ "$checked" -gt 0 ]; then
|
|
237
|
+
ok "state mutation tests: $checked specs, all have content assertions"
|
|
238
|
+
fi
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
# B3: case pattern test coverage in scripts
|
|
242
|
+
check_b3_case_pattern_coverage() {
|
|
243
|
+
local scripts_dir="$PROJECT_DIR/scripts"
|
|
244
|
+
local spec_dir="$PROJECT_DIR/spec"
|
|
245
|
+
[ -d "$scripts_dir" ] && [ -d "$spec_dir" ] || return
|
|
246
|
+
|
|
247
|
+
local issues=0
|
|
248
|
+
local checked=0
|
|
249
|
+
|
|
250
|
+
for script in "$scripts_dir"/afc-*.sh; do
|
|
251
|
+
[ -f "$script" ] || continue
|
|
252
|
+
local scriptname
|
|
253
|
+
scriptname=$(basename "$script" .sh)
|
|
254
|
+
|
|
255
|
+
# Only check scripts with case "$ERROR" or case "$INPUT" patterns (hook error dispatch)
|
|
256
|
+
if ! grep -qE 'case "\$ERROR|\$TOOL_NAME|\$NOTIFICATION_TYPE"' "$script" 2>/dev/null; then
|
|
257
|
+
continue
|
|
258
|
+
fi
|
|
259
|
+
|
|
260
|
+
# Count case branch patterns (exclude catch-all *)
|
|
261
|
+
local case_count
|
|
262
|
+
case_count=$(grep -cE '^\s+\*[^)]+\)' "$script" 2>/dev/null | tr -d '[:space:]' || true)
|
|
263
|
+
case_count="${case_count:-0}"
|
|
264
|
+
[ "$case_count" -lt 2 ] && continue # Skip trivial case blocks
|
|
265
|
+
|
|
266
|
+
local spec_file="$spec_dir/${scriptname}_spec.sh"
|
|
267
|
+
[ -f "$spec_file" ] || continue
|
|
268
|
+
checked=$((checked + 1))
|
|
269
|
+
|
|
270
|
+
# Count Data lines in spec (each represents a test case)
|
|
271
|
+
local data_count
|
|
272
|
+
data_count=$(grep -cE "^\s+Data " "$spec_file" 2>/dev/null | tr -d '[:space:]' || true)
|
|
273
|
+
data_count="${data_count:-0}"
|
|
274
|
+
|
|
275
|
+
if [ "$data_count" -lt "$case_count" ]; then
|
|
276
|
+
warn "case coverage gap: $scriptname ($case_count branches, $data_count test inputs)"
|
|
277
|
+
fi
|
|
278
|
+
done
|
|
279
|
+
|
|
280
|
+
if [ "$issues" -eq 0 ] && [ "$checked" -gt 0 ]; then
|
|
281
|
+
ok "case pattern coverage: $checked scripts checked"
|
|
282
|
+
fi
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
# B4: empty stdin edge case in hook specs
|
|
286
|
+
check_b4_empty_stdin_edge() {
|
|
287
|
+
local hooks_file="$PROJECT_DIR/hooks/hooks.json"
|
|
288
|
+
local spec_dir="$PROJECT_DIR/spec"
|
|
289
|
+
[ -f "$hooks_file" ] && [ -d "$spec_dir" ] || return
|
|
290
|
+
|
|
291
|
+
# Get hook scripts that receive stdin (command hooks)
|
|
292
|
+
local scripts
|
|
293
|
+
scripts=$(grep -oE 'scripts/[^"]+\.sh' "$hooks_file" 2>/dev/null | sort -u || true)
|
|
294
|
+
|
|
295
|
+
local count=0
|
|
296
|
+
local missing=""
|
|
297
|
+
|
|
298
|
+
for script_path in $scripts; do
|
|
299
|
+
local scriptname
|
|
300
|
+
scriptname=$(basename "$script_path" .sh)
|
|
301
|
+
local spec_file="$spec_dir/${scriptname}_spec.sh"
|
|
302
|
+
[ -f "$spec_file" ] || continue
|
|
303
|
+
count=$((count + 1))
|
|
304
|
+
|
|
305
|
+
if ! grep -qE "Data ['\"]'" "$spec_file" 2>/dev/null && \
|
|
306
|
+
! grep -qE "Data ''" "$spec_file" 2>/dev/null && \
|
|
307
|
+
! grep -qE 'empty stdin' "$spec_file" 2>/dev/null; then
|
|
308
|
+
missing="${missing:+$missing, }$scriptname"
|
|
309
|
+
fi
|
|
310
|
+
done
|
|
311
|
+
|
|
312
|
+
if [ -n "$missing" ]; then
|
|
313
|
+
warn "missing empty stdin test: $missing"
|
|
314
|
+
elif [ "$count" -gt 0 ]; then
|
|
315
|
+
ok "empty stdin edge cases: $count hook specs covered"
|
|
316
|
+
fi
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
# --- Category C: UX Completeness ---
|
|
320
|
+
|
|
321
|
+
check_c_ux_completeness() {
|
|
322
|
+
printf "\nCategory C: UX Completeness\n"
|
|
323
|
+
check_c1_error_pattern_coverage
|
|
324
|
+
check_c2_hook_response_consistency
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
# C1: failure-hint covers common error classes
|
|
328
|
+
check_c1_error_pattern_coverage() {
|
|
329
|
+
local hint_script="$PROJECT_DIR/scripts/afc-failure-hint.sh"
|
|
330
|
+
[ -f "$hint_script" ] || return
|
|
331
|
+
|
|
332
|
+
# Essential error classes that should be handled
|
|
333
|
+
local -a required_patterns=("EACCES" "ENOENT" "ECONNREFUSED" "command not found" "ENOMEM" "ETIMEDOUT" "ENOSPC" "syntax error" "FAILED")
|
|
334
|
+
local missing=""
|
|
335
|
+
local covered=0
|
|
336
|
+
|
|
337
|
+
for pattern in "${required_patterns[@]}"; do
|
|
338
|
+
if grep -qF "$pattern" "$hint_script" 2>/dev/null; then
|
|
339
|
+
covered=$((covered + 1))
|
|
340
|
+
else
|
|
341
|
+
missing="${missing:+$missing, }$pattern"
|
|
342
|
+
fi
|
|
343
|
+
done
|
|
344
|
+
|
|
345
|
+
if [ -n "$missing" ]; then
|
|
346
|
+
warn "missing error patterns in failure-hint: $missing"
|
|
347
|
+
else
|
|
348
|
+
ok "error pattern coverage: ${#required_patterns[@]} common patterns covered"
|
|
349
|
+
fi
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
# C2: same hook event type scripts use consistent JSON output format
|
|
353
|
+
check_c2_hook_response_consistency() {
|
|
354
|
+
local hooks_file="$PROJECT_DIR/hooks/hooks.json"
|
|
355
|
+
[ -f "$hooks_file" ] || return
|
|
356
|
+
|
|
357
|
+
local issues=0
|
|
358
|
+
|
|
359
|
+
# Helper: extract script paths for a given hook event using jq or grep fallback
|
|
360
|
+
_scripts_for_event() {
|
|
361
|
+
local event="$1"
|
|
362
|
+
if command -v jq >/dev/null 2>&1; then
|
|
363
|
+
jq -r ".hooks.\"$event\"[]?.hooks[]? | select(.type==\"command\") | .command" "$hooks_file" 2>/dev/null \
|
|
364
|
+
| grep -oE 'scripts/[^"]+\.sh' || true
|
|
365
|
+
else
|
|
366
|
+
# Fallback: look for script paths near the event key
|
|
367
|
+
# This is best-effort for jq-less environments
|
|
368
|
+
grep -oE 'scripts/[^"]+\.sh' "$hooks_file" 2>/dev/null || true
|
|
369
|
+
fi
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
# Check PreToolUse hooks all use permissionDecision format
|
|
373
|
+
local pretool_scripts
|
|
374
|
+
pretool_scripts=$(_scripts_for_event "PreToolUse")
|
|
375
|
+
|
|
376
|
+
for script_path in $pretool_scripts; do
|
|
377
|
+
local full_path="$PROJECT_DIR/$script_path"
|
|
378
|
+
[ -f "$full_path" ] || continue
|
|
379
|
+
if grep -q 'hookSpecificOutput' "$full_path" 2>/dev/null; then
|
|
380
|
+
if ! grep -q 'permissionDecision' "$full_path" 2>/dev/null; then
|
|
381
|
+
fail "PreToolUse hook $(basename "$script_path") uses wrong response format (expected permissionDecision)"
|
|
382
|
+
issues=$((issues + 1))
|
|
383
|
+
fi
|
|
384
|
+
fi
|
|
385
|
+
done
|
|
386
|
+
|
|
387
|
+
# Check PostToolUse/PostToolUseFailure hooks all use additionalContext format
|
|
388
|
+
local posttool_scripts
|
|
389
|
+
posttool_scripts=$(_scripts_for_event "PostToolUse")
|
|
390
|
+
posttool_scripts="$posttool_scripts
|
|
391
|
+
$(_scripts_for_event "PostToolUseFailure")"
|
|
392
|
+
|
|
393
|
+
for script_path in $posttool_scripts; do
|
|
394
|
+
[ -z "$script_path" ] && continue
|
|
395
|
+
local full_path="$PROJECT_DIR/$script_path"
|
|
396
|
+
[ -f "$full_path" ] || continue
|
|
397
|
+
if grep -q 'hookSpecificOutput' "$full_path" 2>/dev/null; then
|
|
398
|
+
if ! grep -q 'additionalContext' "$full_path" 2>/dev/null; then
|
|
399
|
+
fail "PostToolUse hook $(basename "$script_path") uses wrong response format (expected additionalContext)"
|
|
400
|
+
issues=$((issues + 1))
|
|
401
|
+
fi
|
|
402
|
+
fi
|
|
403
|
+
done
|
|
404
|
+
|
|
405
|
+
if [ "$issues" -eq 0 ]; then
|
|
406
|
+
ok "hook response consistency: PreToolUse/PostToolUse formats correct"
|
|
407
|
+
fi
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
# --- Category D: Build/Deploy Integrity ---
|
|
411
|
+
|
|
412
|
+
check_d_build_deploy() {
|
|
413
|
+
printf "\nCategory D: Build/Deploy Integrity\n"
|
|
414
|
+
check_d1_cache_divergence
|
|
415
|
+
check_d2_script_permissions
|
|
416
|
+
check_d3_zombie_state
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
# D1: source/cache file divergence (dev mode only)
|
|
420
|
+
check_d1_cache_divergence() {
|
|
421
|
+
# Only check in dev mode (when package.json name is "all-for-claudecode")
|
|
422
|
+
local pkg="$PROJECT_DIR/package.json"
|
|
423
|
+
[ -f "$pkg" ] || return
|
|
424
|
+
|
|
425
|
+
local pkg_name
|
|
426
|
+
if command -v jq >/dev/null 2>&1; then
|
|
427
|
+
pkg_name=$(jq -r '.name // empty' "$pkg" 2>/dev/null || true)
|
|
428
|
+
else
|
|
429
|
+
pkg_name=$(grep -o '"name"[[:space:]]*:[[:space:]]*"[^"]*"' "$pkg" | sed 's/.*"name"[[:space:]]*:[[:space:]]*"//;s/"$//' 2>/dev/null || true)
|
|
430
|
+
fi
|
|
431
|
+
|
|
432
|
+
if [ "$pkg_name" != "all-for-claudecode" ]; then
|
|
433
|
+
ok "cache check: skipped (not in dev mode)"
|
|
434
|
+
return
|
|
435
|
+
fi
|
|
436
|
+
|
|
437
|
+
local cache_dir
|
|
438
|
+
cache_dir="$HOME/.claude/plugins/cache/all-for-claudecode/afc"
|
|
439
|
+
if [ ! -d "$cache_dir" ]; then
|
|
440
|
+
ok "cache check: no cache directory found"
|
|
441
|
+
return
|
|
442
|
+
fi
|
|
443
|
+
|
|
444
|
+
# Find the versioned cache directory
|
|
445
|
+
local cache_version_dir
|
|
446
|
+
cache_version_dir=$(find "$cache_dir" -maxdepth 1 -mindepth 1 -type d 2>/dev/null | head -1 || true)
|
|
447
|
+
if [ -z "$cache_version_dir" ]; then
|
|
448
|
+
ok "cache check: no versioned cache found"
|
|
449
|
+
return
|
|
450
|
+
fi
|
|
451
|
+
|
|
452
|
+
local diverged=0
|
|
453
|
+
local checked=0
|
|
454
|
+
|
|
455
|
+
for subdir in commands scripts hooks; do
|
|
456
|
+
local src_dir="$PROJECT_DIR/$subdir"
|
|
457
|
+
local cache_subdir="$cache_version_dir/$subdir"
|
|
458
|
+
[ -d "$src_dir" ] && [ -d "$cache_subdir" ] || continue
|
|
459
|
+
|
|
460
|
+
local diff_output
|
|
461
|
+
diff_output=$(diff -rq "$src_dir" "$cache_subdir" 2>/dev/null || true)
|
|
462
|
+
if [ -n "$diff_output" ]; then
|
|
463
|
+
local diff_count
|
|
464
|
+
diff_count=$(printf '%s\n' "$diff_output" | wc -l | tr -d ' ')
|
|
465
|
+
diverged=$((diverged + diff_count))
|
|
466
|
+
checked=$((checked + 1))
|
|
467
|
+
fi
|
|
468
|
+
done
|
|
469
|
+
|
|
470
|
+
if [ "$diverged" -gt 0 ]; then
|
|
471
|
+
warn "cache divergence: $diverged files differ (run npm run sync:cache)"
|
|
472
|
+
else
|
|
473
|
+
ok "cache sync: source and cache match"
|
|
474
|
+
fi
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
# D2: script execution permissions
|
|
478
|
+
check_d2_script_permissions() {
|
|
479
|
+
local scripts_dir="$PROJECT_DIR/scripts"
|
|
480
|
+
[ -d "$scripts_dir" ] || return
|
|
481
|
+
|
|
482
|
+
local missing=""
|
|
483
|
+
local count=0
|
|
484
|
+
|
|
485
|
+
for script in "$scripts_dir"/*.sh; do
|
|
486
|
+
[ -f "$script" ] || continue
|
|
487
|
+
count=$((count + 1))
|
|
488
|
+
if [ ! -x "$script" ]; then
|
|
489
|
+
missing="${missing:+$missing, }$(basename "$script")"
|
|
490
|
+
fi
|
|
491
|
+
done
|
|
492
|
+
|
|
493
|
+
if [ -n "$missing" ]; then
|
|
494
|
+
warn "missing execute permission: $missing"
|
|
495
|
+
elif [ "$count" -gt 0 ]; then
|
|
496
|
+
ok "script permissions: $count scripts, all executable"
|
|
497
|
+
fi
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
# D3: zombie state file detection
|
|
501
|
+
check_d3_zombie_state() {
|
|
502
|
+
local state_file="$PROJECT_DIR/.claude/.afc-state.json"
|
|
503
|
+
if [ ! -f "$state_file" ]; then
|
|
504
|
+
ok "no zombie state: state file absent (pipeline inactive)"
|
|
505
|
+
return
|
|
506
|
+
fi
|
|
507
|
+
|
|
508
|
+
local feature
|
|
509
|
+
if command -v jq >/dev/null 2>&1; then
|
|
510
|
+
feature=$(jq -r '.feature // empty' "$state_file" 2>/dev/null || true)
|
|
511
|
+
else
|
|
512
|
+
feature=$(grep -o '"feature"[[:space:]]*:[[:space:]]*"[^"]*"' "$state_file" | sed 's/.*"feature"[[:space:]]*:[[:space:]]*"//;s/"$//' 2>/dev/null || true)
|
|
513
|
+
fi
|
|
514
|
+
|
|
515
|
+
if [ -z "$feature" ] || [ "$feature" = "null" ]; then
|
|
516
|
+
fail "zombie state: .afc-state.json exists but feature is empty/null"
|
|
517
|
+
else
|
|
518
|
+
ok "active state: feature=$feature"
|
|
519
|
+
fi
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
# --- Main ---
|
|
523
|
+
|
|
524
|
+
printf "[afc:qa] Running QA audit...\n"
|
|
525
|
+
|
|
526
|
+
check_a_hook_io_safety
|
|
527
|
+
check_b_test_strength
|
|
528
|
+
check_c_ux_completeness
|
|
529
|
+
check_d_build_deploy
|
|
530
|
+
|
|
531
|
+
printf "\n[afc:qa] Done: %d passed, %d warnings, %d errors\n" "$PASSES" "$WARNINGS" "$ERRORS"
|
|
532
|
+
|
|
533
|
+
if [ "$ERRORS" -gt 0 ]; then
|
|
534
|
+
exit 1
|
|
535
|
+
fi
|
|
536
|
+
exit 0
|
package/scripts/afc-state.sh
CHANGED
|
@@ -37,11 +37,11 @@ afc_is_ci_exempt() {
|
|
|
37
37
|
# Returns: 0 if active, 1 if not
|
|
38
38
|
afc_state_is_active() {
|
|
39
39
|
[ -f "$_AFC_STATE_FILE" ] && [ -s "$_AFC_STATE_FILE" ] || return 1
|
|
40
|
-
# Validate JSON structure — reject corrupt/truncated files
|
|
40
|
+
# Validate JSON structure — reject corrupt/truncated files and zombie states (null/empty feature)
|
|
41
41
|
if command -v jq >/dev/null 2>&1; then
|
|
42
|
-
jq -e '.feature // empty' "$_AFC_STATE_FILE" >/dev/null 2>&1 || return 1
|
|
42
|
+
jq -e '.feature // empty | select(length > 0)' "$_AFC_STATE_FILE" >/dev/null 2>&1 || return 1
|
|
43
43
|
else
|
|
44
|
-
grep -
|
|
44
|
+
grep -qE '"feature"[[:space:]]*:[[:space:]]*"[^"]' "$_AFC_STATE_FILE" 2>/dev/null || return 1
|
|
45
45
|
fi
|
|
46
46
|
}
|
|
47
47
|
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# afc-sync-cache.sh — Sync source files to plugin cache directory
|
|
5
|
+
# Used during development to keep the cache in sync with source changes.
|
|
6
|
+
|
|
7
|
+
cleanup() { :; }
|
|
8
|
+
trap cleanup EXIT
|
|
9
|
+
|
|
10
|
+
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
11
|
+
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
12
|
+
|
|
13
|
+
# Read version from package.json
|
|
14
|
+
if command -v jq >/dev/null 2>&1; then
|
|
15
|
+
VERSION=$(jq -r '.version' "$PROJECT_ROOT/package.json")
|
|
16
|
+
else
|
|
17
|
+
VERSION=$(grep -o '"version"[[:space:]]*:[[:space:]]*"[^"]*"' "$PROJECT_ROOT/package.json" | head -1 | sed 's/.*: *"//;s/"//')
|
|
18
|
+
fi
|
|
19
|
+
|
|
20
|
+
if [ -z "$VERSION" ]; then
|
|
21
|
+
printf 'Error: could not read version from package.json\n' >&2
|
|
22
|
+
exit 1
|
|
23
|
+
fi
|
|
24
|
+
|
|
25
|
+
CACHE_DIR="$HOME/.claude/plugins/cache/all-for-claudecode/afc/$VERSION"
|
|
26
|
+
|
|
27
|
+
if [ ! -d "$CACHE_DIR" ]; then
|
|
28
|
+
printf 'Cache directory not found: %s\n' "$CACHE_DIR" >&2
|
|
29
|
+
printf 'Plugin may not be installed yet. Install first: claude plugin install afc@all-for-claudecode\n' >&2
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
# Sync directories and files
|
|
34
|
+
DIRS_TO_SYNC="commands agents scripts hooks docs schemas templates"
|
|
35
|
+
FILES_TO_SYNC="package.json"
|
|
36
|
+
|
|
37
|
+
for dir in $DIRS_TO_SYNC; do
|
|
38
|
+
if [ -d "$PROJECT_ROOT/$dir" ]; then
|
|
39
|
+
rsync -a --delete "$PROJECT_ROOT/$dir/" "$CACHE_DIR/$dir/"
|
|
40
|
+
fi
|
|
41
|
+
done
|
|
42
|
+
|
|
43
|
+
for file in $FILES_TO_SYNC; do
|
|
44
|
+
if [ -f "$PROJECT_ROOT/$file" ]; then
|
|
45
|
+
cp "$PROJECT_ROOT/$file" "$CACHE_DIR/$file"
|
|
46
|
+
fi
|
|
47
|
+
done
|
|
48
|
+
|
|
49
|
+
printf 'Synced source to cache: %s\n' "$CACHE_DIR"
|
package/scripts/afc-triage.sh
CHANGED
|
@@ -78,8 +78,13 @@ if [ ${#SPECIFIC_NUMBERS[@]} -gt 0 ]; then
|
|
|
78
78
|
if command -v jq >/dev/null 2>&1; then
|
|
79
79
|
PR_ITEMS=$(printf '%s\n' "$PR_ITEMS" | jq --argjson item "$pr_data" '. + [$item]')
|
|
80
80
|
else
|
|
81
|
-
# Fallback:
|
|
82
|
-
PR_ITEMS="
|
|
81
|
+
# Fallback: accumulate newline-delimited JSON objects
|
|
82
|
+
if [ "$PR_ITEMS" = "[]" ]; then
|
|
83
|
+
PR_ITEMS="$pr_data"
|
|
84
|
+
else
|
|
85
|
+
PR_ITEMS="$PR_ITEMS
|
|
86
|
+
$pr_data"
|
|
87
|
+
fi
|
|
83
88
|
fi
|
|
84
89
|
else
|
|
85
90
|
# Try as issue
|
|
@@ -90,7 +95,13 @@ if [ ${#SPECIFIC_NUMBERS[@]} -gt 0 ]; then
|
|
|
90
95
|
if command -v jq >/dev/null 2>&1; then
|
|
91
96
|
ISSUE_ITEMS=$(printf '%s\n' "$ISSUE_ITEMS" | jq --argjson item "$issue_data" '. + [$item]')
|
|
92
97
|
else
|
|
93
|
-
|
|
98
|
+
# Fallback: accumulate newline-delimited JSON objects
|
|
99
|
+
if [ "$ISSUE_ITEMS" = "[]" ]; then
|
|
100
|
+
ISSUE_ITEMS="$issue_data"
|
|
101
|
+
else
|
|
102
|
+
ISSUE_ITEMS="$ISSUE_ITEMS
|
|
103
|
+
$issue_data"
|
|
104
|
+
fi
|
|
94
105
|
fi
|
|
95
106
|
else
|
|
96
107
|
printf '[afc:triage] Warning: #%s not found as PR or issue\n' "$num" >&2
|