create-qa-architect 5.12.0 → 5.13.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/dependabot.yml +10 -30
- package/.github/workflows/claude-md-validation.yml +5 -7
- package/.github/workflows/dependabot-auto-merge.yml +1 -0
- package/.github/workflows/quality.yml +26 -12
- package/.github/workflows/release.yml +2 -1
- package/.github/workflows/stale-prs.yml +42 -0
- package/.github/workflows/weekly-gitleaks-verification.yml +6 -4
- package/LICENSE +3 -3
- package/README.md +19 -20
- package/config/quality-config.schema.json +1 -1
- package/docs/CI-COST-ANALYSIS.md +8 -8
- package/docs/DEPLOYMENT.md +1 -1
- package/docs/DEVELOPMENT-WORKFLOW.md +2 -2
- package/docs/TURBOREPO-SUPPORT.md +3 -3
- package/docs/dev_guide/CONVENTIONS.md +132 -0
- package/eslint.config.cjs +25 -0
- package/lib/blob-storage.js +57 -0
- package/lib/commands/analyze-ci.js +267 -27
- package/lib/commands/deps.js +5 -5
- package/lib/commands/license-commands.js +2 -2
- package/lib/commands/maturity-check.js +20 -2
- package/lib/dependency-monitoring-basic.js +4 -4
- package/lib/dependency-monitoring-premium.js +5 -5
- package/lib/license-validator.js +1 -1
- package/lib/licensing.js +3 -3
- package/lib/smart-strategy-generator.js +1 -1
- package/lib/validation/documentation.js +2 -0
- package/lib/workflow-config.js +106 -61
- package/package.json +51 -21
- package/scripts/deploy-consumers.sh +369 -0
- package/scripts/pattern-check.sh +607 -0
- package/scripts/run-semgrep.sh +244 -0
- package/scripts/smart-test-strategy.sh +1 -1
- package/setup.js +62 -32
- package/templates/CLAUDE_WORKFLOW_POLICY.md +3 -3
- package/templates/scripts/smart-test-strategy.sh +1 -1
- package/.github/workflows/auto-release.yml +0 -39
|
@@ -0,0 +1,607 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# =============================================================================
|
|
3
|
+
# Pattern Check - Pre-commit Defensive Pattern Analysis
|
|
4
|
+
# =============================================================================
|
|
5
|
+
# Fast static analysis (<5s) to catch defensive coding violations BEFORE commit.
|
|
6
|
+
# Uses grep-based pattern matching for speed.
|
|
7
|
+
#
|
|
8
|
+
# This script implements Step 1.8 patterns from /bs:quality for pre-commit use.
|
|
9
|
+
#
|
|
10
|
+
# INSTALLATION IN OTHER REPOS:
|
|
11
|
+
# 1. Copy this script to your repo: scripts/pattern-check.sh
|
|
12
|
+
# 2. Add to .husky/pre-commit:
|
|
13
|
+
# #!/bin/sh
|
|
14
|
+
# ./scripts/pattern-check.sh
|
|
15
|
+
# 3. Or install via:
|
|
16
|
+
# npm pkg set scripts.pattern-check="bash scripts/pattern-check.sh"
|
|
17
|
+
# npx husky add .husky/pre-commit "npm run pattern-check"
|
|
18
|
+
#
|
|
19
|
+
# CONFIGURATION:
|
|
20
|
+
# Create .defensive-patterns.json in your project root to customize:
|
|
21
|
+
# - authMiddleware: Custom auth function names (e.g., "protectedProcedure")
|
|
22
|
+
# - safeParseHelpers: Custom safe parsing functions
|
|
23
|
+
# - publicRoutes: Routes that don't require auth (glob patterns)
|
|
24
|
+
# - disabled: Checks to skip entirely
|
|
25
|
+
# See docs/defensive-patterns.md for full documentation.
|
|
26
|
+
#
|
|
27
|
+
# USAGE:
|
|
28
|
+
# ./scripts/pattern-check.sh # Check staged files
|
|
29
|
+
# ./scripts/pattern-check.sh --all # Check all tracked files
|
|
30
|
+
# ./scripts/pattern-check.sh --fix # Show violation locations for manual fix
|
|
31
|
+
# git commit --no-pattern-check ... # Emergency bypass (logged)
|
|
32
|
+
#
|
|
33
|
+
# EXIT CODES:
|
|
34
|
+
# 0 - No violations (or all low severity)
|
|
35
|
+
# 1 - Critical/High violations found - commit blocked
|
|
36
|
+
# 2 - Script error
|
|
37
|
+
# =============================================================================
|
|
38
|
+
|
|
39
|
+
set -euo pipefail
|
|
40
|
+
|
|
41
|
+
# Colors for output
|
|
42
|
+
RED='\033[0;31m'
|
|
43
|
+
YELLOW='\033[1;33m'
|
|
44
|
+
GREEN='\033[0;32m'
|
|
45
|
+
BLUE='\033[0;34m'
|
|
46
|
+
NC='\033[0m' # No Color
|
|
47
|
+
|
|
48
|
+
# Configuration
|
|
49
|
+
BYPASS_FLAG="--no-pattern-check"
|
|
50
|
+
BYPASS_LOG=".claude/bypass-log.json"
|
|
51
|
+
CONFIG_FILE=".defensive-patterns.json"
|
|
52
|
+
CHECK_ALL=false
|
|
53
|
+
SHOW_FIX=false
|
|
54
|
+
|
|
55
|
+
# Default patterns (used when no config file exists)
|
|
56
|
+
DEFAULT_AUTH_MIDDLEWARE="withAuth|requireAuth|authenticate|getSession|getServerSession"
|
|
57
|
+
DEFAULT_SAFE_PARSE="safe(Json)?Parse|\.safeParse"
|
|
58
|
+
DEFAULT_PUBLIC_ROUTES=""
|
|
59
|
+
DISABLED_CHECKS=""
|
|
60
|
+
EXCLUDE_PATHS_PATTERN=""
|
|
61
|
+
|
|
62
|
+
# Project-specific patterns (loaded from config)
|
|
63
|
+
AUTH_MIDDLEWARE_PATTERN=""
|
|
64
|
+
SAFE_PARSE_PATTERN=""
|
|
65
|
+
PUBLIC_ROUTES_PATTERN=""
|
|
66
|
+
|
|
67
|
+
# Parse arguments
|
|
68
|
+
for arg in "$@"; do
|
|
69
|
+
case $arg in
|
|
70
|
+
--all)
|
|
71
|
+
CHECK_ALL=true
|
|
72
|
+
;;
|
|
73
|
+
--fix)
|
|
74
|
+
SHOW_FIX=true
|
|
75
|
+
;;
|
|
76
|
+
--help|-h)
|
|
77
|
+
echo "Usage: $0 [--all] [--fix] [--help]"
|
|
78
|
+
echo ""
|
|
79
|
+
echo "Options:"
|
|
80
|
+
echo " --all Check all tracked files (not just staged)"
|
|
81
|
+
echo " --fix Show file locations for manual fixing"
|
|
82
|
+
echo " --help Show this help message"
|
|
83
|
+
echo ""
|
|
84
|
+
echo "Environment:"
|
|
85
|
+
echo " PATTERN_CHECK_SKIP=1 Skip pattern check (for CI or testing)"
|
|
86
|
+
echo ""
|
|
87
|
+
echo "Configuration:"
|
|
88
|
+
echo " Create .defensive-patterns.json to customize patterns."
|
|
89
|
+
echo " See docs/defensive-patterns.md for schema documentation."
|
|
90
|
+
exit 0
|
|
91
|
+
;;
|
|
92
|
+
esac
|
|
93
|
+
done
|
|
94
|
+
|
|
95
|
+
# Allow skipping in CI or when explicitly requested
|
|
96
|
+
if [[ "${PATTERN_CHECK_SKIP:-}" == "1" ]]; then
|
|
97
|
+
echo -e "${BLUE}Pattern check skipped (PATTERN_CHECK_SKIP=1)${NC}"
|
|
98
|
+
exit 0
|
|
99
|
+
fi
|
|
100
|
+
|
|
101
|
+
# Check for bypass flag in git command (via GIT_COMMIT_ARGS or similar)
|
|
102
|
+
if [[ "${GIT_COMMIT_NO_PATTERN_CHECK:-}" == "1" ]]; then
|
|
103
|
+
log_bypass
|
|
104
|
+
exit 0
|
|
105
|
+
fi
|
|
106
|
+
|
|
107
|
+
# Function to log bypass events
|
|
108
|
+
log_bypass() {
|
|
109
|
+
local timestamp
|
|
110
|
+
timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
111
|
+
local user
|
|
112
|
+
user=$(git config user.email 2>/dev/null || echo "unknown")
|
|
113
|
+
local branch
|
|
114
|
+
branch=$(git branch --show-current 2>/dev/null || echo "unknown")
|
|
115
|
+
|
|
116
|
+
# Ensure .claude directory exists
|
|
117
|
+
mkdir -p "$(dirname "$BYPASS_LOG")"
|
|
118
|
+
|
|
119
|
+
# Create or append to bypass log
|
|
120
|
+
if [[ ! -f "$BYPASS_LOG" ]]; then
|
|
121
|
+
echo '{"bypasses":[]}' > "$BYPASS_LOG"
|
|
122
|
+
fi
|
|
123
|
+
|
|
124
|
+
# Add new entry using jq if available, otherwise use sed
|
|
125
|
+
if command -v jq &> /dev/null; then
|
|
126
|
+
local temp_file
|
|
127
|
+
temp_file=$(mktemp)
|
|
128
|
+
jq --arg ts "$timestamp" --arg user "$user" --arg branch "$branch" \
|
|
129
|
+
'.bypasses += [{"timestamp": $ts, "user": $user, "branch": $branch}]' \
|
|
130
|
+
"$BYPASS_LOG" > "$temp_file" && mv "$temp_file" "$BYPASS_LOG"
|
|
131
|
+
else
|
|
132
|
+
# Fallback: append to a simple text log
|
|
133
|
+
echo "$timestamp | $user | $branch" >> "${BYPASS_LOG}.txt"
|
|
134
|
+
fi
|
|
135
|
+
|
|
136
|
+
echo -e "${YELLOW}Warning: Pattern check bypassed - logged to $BYPASS_LOG${NC}"
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
# Load project configuration
|
|
140
|
+
load_config() {
|
|
141
|
+
if [[ ! -f "$CONFIG_FILE" ]]; then
|
|
142
|
+
# Use defaults
|
|
143
|
+
AUTH_MIDDLEWARE_PATTERN="$DEFAULT_AUTH_MIDDLEWARE"
|
|
144
|
+
SAFE_PARSE_PATTERN="$DEFAULT_SAFE_PARSE"
|
|
145
|
+
PUBLIC_ROUTES_PATTERN=""
|
|
146
|
+
DISABLED_CHECKS=""
|
|
147
|
+
return
|
|
148
|
+
fi
|
|
149
|
+
|
|
150
|
+
echo -e "${BLUE}Loading config from $CONFIG_FILE${NC}"
|
|
151
|
+
|
|
152
|
+
# Check if jq is available
|
|
153
|
+
if ! command -v jq &> /dev/null; then
|
|
154
|
+
echo -e "${YELLOW}Warning: jq not found, using default patterns${NC}"
|
|
155
|
+
AUTH_MIDDLEWARE_PATTERN="$DEFAULT_AUTH_MIDDLEWARE"
|
|
156
|
+
SAFE_PARSE_PATTERN="$DEFAULT_SAFE_PARSE"
|
|
157
|
+
return
|
|
158
|
+
fi
|
|
159
|
+
|
|
160
|
+
# Load authMiddleware array and convert to regex pattern
|
|
161
|
+
local auth_array
|
|
162
|
+
auth_array=$(jq -r '.authMiddleware // empty | @json' "$CONFIG_FILE" 2>/dev/null || true)
|
|
163
|
+
if [[ -n "$auth_array" && "$auth_array" != "null" ]]; then
|
|
164
|
+
AUTH_MIDDLEWARE_PATTERN=$(echo "$auth_array" | jq -r 'join("|")' 2>/dev/null || echo "$DEFAULT_AUTH_MIDDLEWARE")
|
|
165
|
+
else
|
|
166
|
+
AUTH_MIDDLEWARE_PATTERN="$DEFAULT_AUTH_MIDDLEWARE"
|
|
167
|
+
fi
|
|
168
|
+
|
|
169
|
+
# Load safeParseHelpers array and convert to regex pattern
|
|
170
|
+
local safe_array
|
|
171
|
+
safe_array=$(jq -r '.safeParseHelpers // empty | @json' "$CONFIG_FILE" 2>/dev/null || true)
|
|
172
|
+
if [[ -n "$safe_array" && "$safe_array" != "null" ]]; then
|
|
173
|
+
# Escape dots in method patterns and join with |
|
|
174
|
+
SAFE_PARSE_PATTERN=$(echo "$safe_array" | jq -r 'map(gsub("\\."; "\\.")) | join("|")' 2>/dev/null || echo "$DEFAULT_SAFE_PARSE")
|
|
175
|
+
else
|
|
176
|
+
SAFE_PARSE_PATTERN="$DEFAULT_SAFE_PARSE"
|
|
177
|
+
fi
|
|
178
|
+
|
|
179
|
+
# Load publicRoutes array
|
|
180
|
+
local routes_array
|
|
181
|
+
routes_array=$(jq -r '.publicRoutes // empty | @json' "$CONFIG_FILE" 2>/dev/null || true)
|
|
182
|
+
if [[ -n "$routes_array" && "$routes_array" != "null" ]]; then
|
|
183
|
+
PUBLIC_ROUTES_PATTERN=$(echo "$routes_array" | jq -r 'join("|")' 2>/dev/null || echo "")
|
|
184
|
+
# Convert glob patterns to regex: * -> [^/]*, ** -> .*
|
|
185
|
+
PUBLIC_ROUTES_PATTERN=$(echo "$PUBLIC_ROUTES_PATTERN" | sed 's/\*\*/.__DOUBLE_STAR__./g' | sed 's/\*/[^\/]*/g' | sed 's/\.__DOUBLE_STAR__\./.\*/g')
|
|
186
|
+
else
|
|
187
|
+
PUBLIC_ROUTES_PATTERN=""
|
|
188
|
+
fi
|
|
189
|
+
|
|
190
|
+
# Load disabled checks
|
|
191
|
+
local disabled_array
|
|
192
|
+
disabled_array=$(jq -r '.disabled // empty | @json' "$CONFIG_FILE" 2>/dev/null || true)
|
|
193
|
+
if [[ -n "$disabled_array" && "$disabled_array" != "null" ]]; then
|
|
194
|
+
DISABLED_CHECKS=$(echo "$disabled_array" | jq -r 'join("|")' 2>/dev/null || echo "")
|
|
195
|
+
else
|
|
196
|
+
DISABLED_CHECKS=""
|
|
197
|
+
fi
|
|
198
|
+
|
|
199
|
+
# Load exclude paths
|
|
200
|
+
local exclude_array
|
|
201
|
+
exclude_array=$(jq -r '.excludePaths // empty | @json' "$CONFIG_FILE" 2>/dev/null || true)
|
|
202
|
+
if [[ -n "$exclude_array" && "$exclude_array" != "null" ]]; then
|
|
203
|
+
EXCLUDE_PATHS_PATTERN=$(echo "$exclude_array" | jq -r 'join("|")' 2>/dev/null || echo "")
|
|
204
|
+
# Convert glob patterns to regex: * -> [^/]*, ** -> .*
|
|
205
|
+
EXCLUDE_PATHS_PATTERN=$(echo "$EXCLUDE_PATHS_PATTERN" | sed 's/\*\*/.__DOUBLE_STAR__./g' | sed 's/\*/[^\/]*/g' | sed 's/\.__DOUBLE_STAR__\./.\*/g')
|
|
206
|
+
else
|
|
207
|
+
EXCLUDE_PATHS_PATTERN=""
|
|
208
|
+
fi
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
# Check if a check is disabled
|
|
212
|
+
is_check_disabled() {
|
|
213
|
+
local check_id="$1"
|
|
214
|
+
if [[ -z "$DISABLED_CHECKS" ]]; then
|
|
215
|
+
return 1 # Not disabled
|
|
216
|
+
fi
|
|
217
|
+
if echo "$check_id" | grep -qE "^($DISABLED_CHECKS)$"; then
|
|
218
|
+
return 0 # Disabled
|
|
219
|
+
fi
|
|
220
|
+
return 1 # Not disabled
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
# Check if a file matches public routes pattern
|
|
224
|
+
is_public_route() {
|
|
225
|
+
local file="$1"
|
|
226
|
+
if [[ -z "$PUBLIC_ROUTES_PATTERN" ]]; then
|
|
227
|
+
return 1 # No public routes configured
|
|
228
|
+
fi
|
|
229
|
+
if echo "$file" | grep -qE "$PUBLIC_ROUTES_PATTERN"; then
|
|
230
|
+
return 0 # Is public route
|
|
231
|
+
fi
|
|
232
|
+
return 1 # Not public route
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
# Check if a file should be excluded from analysis
|
|
236
|
+
is_excluded_path() {
|
|
237
|
+
local file="$1"
|
|
238
|
+
if [[ -z "$EXCLUDE_PATHS_PATTERN" ]]; then
|
|
239
|
+
return 1 # No exclusions configured
|
|
240
|
+
fi
|
|
241
|
+
if echo "$file" | grep -qE "$EXCLUDE_PATHS_PATTERN"; then
|
|
242
|
+
return 0 # Should be excluded
|
|
243
|
+
fi
|
|
244
|
+
return 1 # Not excluded
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
# Get files to check
|
|
248
|
+
get_files_to_check() {
|
|
249
|
+
local files=""
|
|
250
|
+
|
|
251
|
+
if [[ "$CHECK_ALL" == "true" ]]; then
|
|
252
|
+
# All tracked files
|
|
253
|
+
files=$(git ls-files '*.ts' '*.tsx' '*.js' '*.jsx' 2>/dev/null || true)
|
|
254
|
+
else
|
|
255
|
+
# Only staged files
|
|
256
|
+
files=$(git diff --cached --name-only --diff-filter=ACM | grep -E '\.(ts|tsx|js|jsx)$' || true)
|
|
257
|
+
fi
|
|
258
|
+
|
|
259
|
+
echo "$files"
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
# Track violations
|
|
263
|
+
CRITICAL_COUNT=0
|
|
264
|
+
HIGH_COUNT=0
|
|
265
|
+
MEDIUM_COUNT=0
|
|
266
|
+
VIOLATIONS_FILE=$(mktemp)
|
|
267
|
+
|
|
268
|
+
# Cleanup temp file on exit
|
|
269
|
+
trap 'rm -f "$VIOLATIONS_FILE"' EXIT
|
|
270
|
+
|
|
271
|
+
# Add a violation
|
|
272
|
+
add_violation() {
|
|
273
|
+
local severity="$1"
|
|
274
|
+
local pattern="$2"
|
|
275
|
+
local file="$3"
|
|
276
|
+
local line="$4"
|
|
277
|
+
local message="$5"
|
|
278
|
+
|
|
279
|
+
case $severity in
|
|
280
|
+
critical) ((CRITICAL_COUNT++)) || true ;;
|
|
281
|
+
high) ((HIGH_COUNT++)) || true ;;
|
|
282
|
+
medium) ((MEDIUM_COUNT++)) || true ;;
|
|
283
|
+
esac
|
|
284
|
+
|
|
285
|
+
printf '%s|%s|%s|%s|%s\n' "$severity" "$pattern" "$file" "$line" "$message" >> "$VIOLATIONS_FILE"
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
# Check for unsafe JSON.parse without try/catch or Zod
|
|
289
|
+
check_unsafe_parsing() {
|
|
290
|
+
local file="$1"
|
|
291
|
+
|
|
292
|
+
# Skip if check is disabled
|
|
293
|
+
if is_check_disabled "UNSAFE_PARSING"; then
|
|
294
|
+
return
|
|
295
|
+
fi
|
|
296
|
+
|
|
297
|
+
# Look for JSON.parse not wrapped in try or with safeParse
|
|
298
|
+
while IFS=: read -r line_num line_content; do
|
|
299
|
+
# Skip if using configured safe parse patterns
|
|
300
|
+
if echo "$line_content" | grep -qiE "$SAFE_PARSE_PATTERN"; then
|
|
301
|
+
continue
|
|
302
|
+
fi
|
|
303
|
+
|
|
304
|
+
# Check if this line is inside a try block by counting braces
|
|
305
|
+
# Look at all lines from start to this line and track try/catch scope
|
|
306
|
+
local in_try_block
|
|
307
|
+
in_try_block=$(awk -v target="$line_num" '
|
|
308
|
+
BEGIN { in_try = 0; brace_depth = 0 }
|
|
309
|
+
NR <= target {
|
|
310
|
+
# Match "try {" or "try{" with optional whitespace
|
|
311
|
+
if (/try[ \t]*\{/) {
|
|
312
|
+
in_try = 1
|
|
313
|
+
brace_depth = 1
|
|
314
|
+
} else if (in_try) {
|
|
315
|
+
brace_depth += gsub(/{/, "{")
|
|
316
|
+
brace_depth -= gsub(/}/, "}")
|
|
317
|
+
if (brace_depth <= 0) {
|
|
318
|
+
in_try = 0
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
END { print in_try }
|
|
323
|
+
' "$file" 2>/dev/null)
|
|
324
|
+
|
|
325
|
+
if [[ "$in_try_block" != "1" ]]; then
|
|
326
|
+
add_violation "high" "UNSAFE_PARSING" "$file" "$line_num" "JSON.parse without try/catch or Zod validation"
|
|
327
|
+
fi
|
|
328
|
+
done < <(grep -n 'JSON\.parse\s*(' "$file" 2>/dev/null || true)
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
# Check for empty catch blocks
|
|
332
|
+
check_empty_catches() {
|
|
333
|
+
local file="$1"
|
|
334
|
+
|
|
335
|
+
# Skip if check is disabled
|
|
336
|
+
if is_check_disabled "EMPTY_CATCH"; then
|
|
337
|
+
return
|
|
338
|
+
fi
|
|
339
|
+
|
|
340
|
+
# Pattern: catch blocks with only whitespace, console.log, or nothing
|
|
341
|
+
while IFS=: read -r line_num line_content; do
|
|
342
|
+
add_violation "high" "EMPTY_CATCH" "$file" "$line_num" "Empty or silent catch block (console.log only)"
|
|
343
|
+
done < <(grep -n -E 'catch\s*\([^)]*\)\s*\{\s*(\/\/.*)?(\s*console\.(log|warn|error)[^}]*)?\s*\}' "$file" 2>/dev/null || true)
|
|
344
|
+
|
|
345
|
+
# Also check multiline empty catches - look for catch followed by just }
|
|
346
|
+
# Using awk for multiline pattern matching
|
|
347
|
+
awk '
|
|
348
|
+
/catch\s*\([^)]*\)\s*\{/ {
|
|
349
|
+
catch_line = NR
|
|
350
|
+
in_catch = 1
|
|
351
|
+
brace_count = gsub(/{/, "{") - gsub(/}/, "}")
|
|
352
|
+
next
|
|
353
|
+
}
|
|
354
|
+
in_catch {
|
|
355
|
+
brace_count += gsub(/{/, "{") - gsub(/}/, "}")
|
|
356
|
+
if (brace_count <= 0) {
|
|
357
|
+
# Check if body is essentially empty
|
|
358
|
+
if (NR - catch_line <= 2) {
|
|
359
|
+
print catch_line
|
|
360
|
+
}
|
|
361
|
+
in_catch = 0
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
' "$file" 2>/dev/null | while read -r line_num; do
|
|
365
|
+
add_violation "high" "EMPTY_CATCH" "$file" "$line_num" "Catch block with no meaningful error handling"
|
|
366
|
+
done
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
# Check for missing auth in API routes
|
|
370
|
+
check_missing_auth() {
|
|
371
|
+
local file="$1"
|
|
372
|
+
|
|
373
|
+
# Skip if check is disabled
|
|
374
|
+
if is_check_disabled "MISSING_AUTH"; then
|
|
375
|
+
return
|
|
376
|
+
fi
|
|
377
|
+
|
|
378
|
+
# Only check API route files
|
|
379
|
+
if ! echo "$file" | grep -qE '(api|route)\.(ts|js)$|/api/'; then
|
|
380
|
+
return
|
|
381
|
+
fi
|
|
382
|
+
|
|
383
|
+
# Skip if file matches public routes pattern
|
|
384
|
+
if is_public_route "$file"; then
|
|
385
|
+
return
|
|
386
|
+
fi
|
|
387
|
+
|
|
388
|
+
# Skip if file has PUBLIC ROUTE comment or is explicitly public
|
|
389
|
+
if grep -q 'PUBLIC.ROUTE\|@public\|isPublic.*true' "$file" 2>/dev/null; then
|
|
390
|
+
return
|
|
391
|
+
fi
|
|
392
|
+
|
|
393
|
+
# Check for route handlers without auth wrapper using configured patterns
|
|
394
|
+
while IFS=: read -r line_num line_content; do
|
|
395
|
+
# Skip if wrapped in configured auth middleware
|
|
396
|
+
if ! grep -qE "$AUTH_MIDDLEWARE_PATTERN" "$file" 2>/dev/null; then
|
|
397
|
+
add_violation "critical" "MISSING_AUTH" "$file" "$line_num" "API route without auth middleware"
|
|
398
|
+
fi
|
|
399
|
+
done < <(grep -n -E 'export\s+(async\s+)?function\s+(GET|POST|PUT|DELETE|PATCH)' "$file" 2>/dev/null || true)
|
|
400
|
+
|
|
401
|
+
# Also check for Next.js 13+ route exports
|
|
402
|
+
while IFS=: read -r line_num line_content; do
|
|
403
|
+
if ! grep -qE "$AUTH_MIDDLEWARE_PATTERN" "$file" 2>/dev/null; then
|
|
404
|
+
add_violation "critical" "MISSING_AUTH" "$file" "$line_num" "API route handler without auth"
|
|
405
|
+
fi
|
|
406
|
+
done < <(grep -n -E 'export\s+const\s+(GET|POST|PUT|DELETE|PATCH)\s*=' "$file" 2>/dev/null || true)
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
# Check for inline arrow handlers in JSX (useCallback violation)
|
|
410
|
+
check_inline_handlers() {
|
|
411
|
+
local file="$1"
|
|
412
|
+
|
|
413
|
+
# Skip if check is disabled
|
|
414
|
+
if is_check_disabled "INLINE_HANDLER"; then
|
|
415
|
+
return
|
|
416
|
+
fi
|
|
417
|
+
|
|
418
|
+
# Only check React component files
|
|
419
|
+
if ! echo "$file" | grep -qE '\.(tsx|jsx)$'; then
|
|
420
|
+
return
|
|
421
|
+
fi
|
|
422
|
+
|
|
423
|
+
# Count inline arrow functions in event handlers
|
|
424
|
+
local inline_count
|
|
425
|
+
inline_count=$(grep -c -E 'on[A-Z][a-zA-Z]*=\{[^}]*\(\s*\)\s*=>' "$file" 2>/dev/null || echo "0")
|
|
426
|
+
|
|
427
|
+
if [[ "$inline_count" -gt 3 ]]; then
|
|
428
|
+
# Report first occurrence
|
|
429
|
+
local first_line
|
|
430
|
+
first_line=$(grep -n -E 'on[A-Z][a-zA-Z]*=\{[^}]*\(\s*\)\s*=>' "$file" 2>/dev/null | head -1 | cut -d: -f1)
|
|
431
|
+
add_violation "medium" "INLINE_HANDLER" "$file" "${first_line:-1}" "$inline_count inline arrow handlers (use useCallback)"
|
|
432
|
+
fi
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
# Check for division without zero check
|
|
436
|
+
check_division_guards() {
|
|
437
|
+
local file="$1"
|
|
438
|
+
|
|
439
|
+
# Skip if check is disabled
|
|
440
|
+
if is_check_disabled "DIVISION_GUARD"; then
|
|
441
|
+
return
|
|
442
|
+
fi
|
|
443
|
+
|
|
444
|
+
# Look for division operations
|
|
445
|
+
while IFS=: read -r line_num line_content; do
|
|
446
|
+
# Skip if there's a guard (ternary with > 0 or !== 0)
|
|
447
|
+
if ! echo "$line_content" | grep -qE '>\s*0\s*\?|!==?\s*0\s*\?|&&.*\/'; then
|
|
448
|
+
# Check previous lines for guard
|
|
449
|
+
local start_line=$((line_num > 3 ? line_num - 3 : 1))
|
|
450
|
+
local context
|
|
451
|
+
context=$(sed -n "${start_line},${line_num}p" "$file" 2>/dev/null || true)
|
|
452
|
+
|
|
453
|
+
if ! echo "$context" | grep -qE 'if\s*\([^)]*>\s*0|if\s*\([^)]*!==?\s*0'; then
|
|
454
|
+
add_violation "medium" "DIVISION_GUARD" "$file" "$line_num" "Division without zero check"
|
|
455
|
+
fi
|
|
456
|
+
fi
|
|
457
|
+
done < <(grep -n -E '[^/]/\s*[a-zA-Z_][a-zA-Z0-9_]*\s*[;,)]' "$file" 2>/dev/null | grep -v '//' | grep -v '/*' || true)
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
# Print results
|
|
461
|
+
print_results() {
|
|
462
|
+
if [[ $CRITICAL_COUNT -eq 0 && $HIGH_COUNT -eq 0 && $MEDIUM_COUNT -eq 0 ]]; then
|
|
463
|
+
echo -e "${GREEN}All defensive patterns verified${NC}"
|
|
464
|
+
return 0
|
|
465
|
+
fi
|
|
466
|
+
|
|
467
|
+
echo ""
|
|
468
|
+
echo -e "${RED}=== Pattern Analysis Violations ===${NC}"
|
|
469
|
+
echo ""
|
|
470
|
+
|
|
471
|
+
# Print violations grouped by severity
|
|
472
|
+
while IFS='|' read -r severity pattern file line message; do
|
|
473
|
+
[[ -z "$severity" ]] && continue
|
|
474
|
+
|
|
475
|
+
local color
|
|
476
|
+
case $severity in
|
|
477
|
+
critical) color="$RED" ;;
|
|
478
|
+
high) color="$RED" ;;
|
|
479
|
+
medium) color="$YELLOW" ;;
|
|
480
|
+
*) color="$NC" ;;
|
|
481
|
+
esac
|
|
482
|
+
|
|
483
|
+
echo -e "${color}[$severity]${NC} $pattern"
|
|
484
|
+
echo " File: $file:$line"
|
|
485
|
+
echo " Issue: $message"
|
|
486
|
+
echo ""
|
|
487
|
+
done < "$VIOLATIONS_FILE"
|
|
488
|
+
|
|
489
|
+
echo "==================================="
|
|
490
|
+
echo -e "Summary: ${RED}$CRITICAL_COUNT critical${NC}, ${RED}$HIGH_COUNT high${NC}, ${YELLOW}$MEDIUM_COUNT medium${NC}"
|
|
491
|
+
echo ""
|
|
492
|
+
|
|
493
|
+
if [[ $CRITICAL_COUNT -gt 0 || $HIGH_COUNT -gt 0 ]]; then
|
|
494
|
+
echo -e "${RED}Commit blocked: Fix critical/high violations first${NC}"
|
|
495
|
+
echo ""
|
|
496
|
+
echo "Options:"
|
|
497
|
+
echo " 1. Fix the violations above"
|
|
498
|
+
echo " 2. Emergency bypass: GIT_COMMIT_NO_PATTERN_CHECK=1 git commit ..."
|
|
499
|
+
echo " (Bypasses are logged to $BYPASS_LOG)"
|
|
500
|
+
return 1
|
|
501
|
+
else
|
|
502
|
+
echo -e "${YELLOW}Warning: Medium severity violations found (commit allowed)${NC}"
|
|
503
|
+
return 0
|
|
504
|
+
fi
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
# Main execution
|
|
508
|
+
main() {
|
|
509
|
+
local start_time
|
|
510
|
+
start_time=$(date +%s)
|
|
511
|
+
|
|
512
|
+
echo -e "${BLUE}Running defensive pattern analysis...${NC}"
|
|
513
|
+
|
|
514
|
+
# Load project configuration
|
|
515
|
+
load_config
|
|
516
|
+
|
|
517
|
+
# Get files to check
|
|
518
|
+
local files
|
|
519
|
+
files=$(get_files_to_check)
|
|
520
|
+
|
|
521
|
+
if [[ -z "$files" ]]; then
|
|
522
|
+
echo -e "${GREEN}No JS/TS files to check${NC}"
|
|
523
|
+
exit 0
|
|
524
|
+
fi
|
|
525
|
+
|
|
526
|
+
local file_count
|
|
527
|
+
file_count=$(echo "$files" | wc -l | tr -d ' ')
|
|
528
|
+
echo "Checking $file_count files..."
|
|
529
|
+
|
|
530
|
+
# Run checks on each file
|
|
531
|
+
while IFS= read -r file; do
|
|
532
|
+
[[ -z "$file" ]] && continue
|
|
533
|
+
[[ ! -f "$file" ]] && continue
|
|
534
|
+
|
|
535
|
+
# Skip files matching exclude patterns
|
|
536
|
+
if is_excluded_path "$file"; then
|
|
537
|
+
continue
|
|
538
|
+
fi
|
|
539
|
+
|
|
540
|
+
check_unsafe_parsing "$file"
|
|
541
|
+
check_empty_catches "$file"
|
|
542
|
+
check_missing_auth "$file"
|
|
543
|
+
check_inline_handlers "$file"
|
|
544
|
+
check_division_guards "$file"
|
|
545
|
+
done <<< "$files"
|
|
546
|
+
|
|
547
|
+
local end_time
|
|
548
|
+
end_time=$(date +%s)
|
|
549
|
+
local duration=$((end_time - start_time))
|
|
550
|
+
|
|
551
|
+
echo "Analysis completed in ${duration}s"
|
|
552
|
+
echo ""
|
|
553
|
+
|
|
554
|
+
# Record metrics (CS-088)
|
|
555
|
+
record_metrics
|
|
556
|
+
|
|
557
|
+
# Print results and exit with appropriate code
|
|
558
|
+
print_results
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
# Record violation metrics for tracking (CS-088)
|
|
562
|
+
record_metrics() {
|
|
563
|
+
# Find the metrics script
|
|
564
|
+
local script_dir
|
|
565
|
+
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
566
|
+
local metrics_script="$script_dir/pattern-metrics.sh"
|
|
567
|
+
|
|
568
|
+
# Also check claude-setup location
|
|
569
|
+
if [[ ! -f "$metrics_script" ]]; then
|
|
570
|
+
metrics_script="${HOME}/Projects/internal/claude-setup/scripts/pattern-metrics.sh"
|
|
571
|
+
fi
|
|
572
|
+
|
|
573
|
+
if [[ ! -f "$metrics_script" ]] || [[ ! -x "$metrics_script" ]]; then
|
|
574
|
+
return 0 # Silent skip if metrics not available
|
|
575
|
+
fi
|
|
576
|
+
|
|
577
|
+
# Count violations by pattern type
|
|
578
|
+
local unsafe_parsing=0 empty_catch=0 missing_auth=0 inline_handler=0 division_guard=0
|
|
579
|
+
|
|
580
|
+
while IFS='|' read -r severity pattern file line message; do
|
|
581
|
+
[[ -z "$severity" ]] && continue
|
|
582
|
+
case "$pattern" in
|
|
583
|
+
UNSAFE_PARSING) ((unsafe_parsing++)) || true ;;
|
|
584
|
+
EMPTY_CATCH) ((empty_catch++)) || true ;;
|
|
585
|
+
MISSING_AUTH) ((missing_auth++)) || true ;;
|
|
586
|
+
INLINE_HANDLER) ((inline_handler++)) || true ;;
|
|
587
|
+
DIVISION_GUARD) ((division_guard++)) || true ;;
|
|
588
|
+
esac
|
|
589
|
+
done < "$VIOLATIONS_FILE"
|
|
590
|
+
|
|
591
|
+
# Build JSON and record
|
|
592
|
+
local violations_json
|
|
593
|
+
violations_json=$(cat << EOF
|
|
594
|
+
{
|
|
595
|
+
"UNSAFE_PARSING": $unsafe_parsing,
|
|
596
|
+
"EMPTY_CATCH": $empty_catch,
|
|
597
|
+
"MISSING_AUTH": $missing_auth,
|
|
598
|
+
"INLINE_HANDLER": $inline_handler,
|
|
599
|
+
"DIVISION_GUARD": $division_guard
|
|
600
|
+
}
|
|
601
|
+
EOF
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
"$metrics_script" record "$violations_json" 2>/dev/null || true
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
main "$@"
|