ginskill-init 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +77 -0
- package/agents/developer.md +56 -0
- package/agents/frontend-design.md +69 -0
- package/agents/mobile-reviewer.md +36 -0
- package/agents/review-code.md +49 -0
- package/agents/security-scanner.md +50 -0
- package/agents/tester.md +72 -0
- package/bin/cli.js +226 -0
- package/package.json +20 -0
- package/skills/ai-asset-generator/SKILL.md +255 -0
- package/skills/ai-asset-generator/docs/gen-image.md +274 -0
- package/skills/ai-asset-generator/docs/genvideo.md +341 -0
- package/skills/ai-asset-generator/docs/remove-background.md +19 -0
- package/skills/ai-asset-generator/generate-credit-assets.mjs +180 -0
- package/skills/ai-asset-generator/generate-ginbrowser-assets.mjs +242 -0
- package/skills/ai-asset-generator/generate-sty-icon.mjs +149 -0
- package/skills/ai-asset-generator/lib/bg-remove.mjs +34 -0
- package/skills/ai-asset-generator/lib/env.mjs +38 -0
- package/skills/ai-asset-generator/lib/kie-client.mjs +88 -0
- package/skills/ai-asset-generator/scripts/scaffold-generator.mjs +203 -0
- package/skills/ai-build-ai/SKILL.md +124 -0
- package/skills/ai-build-ai/docs/agent-teams.md +293 -0
- package/skills/ai-build-ai/docs/checkpointing.md +161 -0
- package/skills/ai-build-ai/docs/create-agent.md +399 -0
- package/skills/ai-build-ai/docs/create-mcp.md +395 -0
- package/skills/ai-build-ai/docs/create-skill.md +299 -0
- package/skills/ai-build-ai/docs/headless-mode.md +614 -0
- package/skills/ai-build-ai/docs/hooks.md +578 -0
- package/skills/ai-build-ai/docs/memory-claude-md.md +375 -0
- package/skills/ai-build-ai/docs/output-styles.md +208 -0
- package/skills/ai-build-ai/docs/overview.md +162 -0
- package/skills/ai-build-ai/docs/permissions.md +391 -0
- package/skills/ai-build-ai/docs/plugins.md +396 -0
- package/skills/ai-build-ai/docs/sandbox.md +262 -0
- package/skills/ai-build-ai/scripts/load-tutorial.sh +54 -0
- package/skills/icon-generator/SKILL.md +270 -0
- package/skills/mobile-app-review/SKILL.md +321 -0
- package/skills/mobile-app-review/references/apple-review.md +132 -0
- package/skills/mobile-app-review/references/google-play-review.md +203 -0
- package/skills/mongodb/SKILL.md +667 -0
- package/skills/mongodb/references/mongoose-patterns.md +368 -0
- package/skills/nestjs-architecture/SKILL.md +1086 -0
- package/skills/nestjs-architecture/references/advanced-patterns.md +590 -0
- package/skills/performance/SKILL.md +509 -0
- package/skills/react-fsd-architecture/SKILL.md +693 -0
- package/skills/react-fsd-architecture/references/fsd-patterns.md +747 -0
- package/skills/react-query/SKILL.md +685 -0
- package/skills/react-query/references/query-patterns.md +365 -0
- package/skills/review-code/SKILL.md +321 -0
- package/skills/review-code/references/clean-code-principles.md +395 -0
- package/skills/review-code/references/frontend-patterns.md +136 -0
- package/skills/review-code/references/nestjs-patterns.md +184 -0
- package/skills/review-code/scripts/check-module.sh +201 -0
- package/skills/review-code/scripts/deep-scan.sh +604 -0
- package/skills/review-code/scripts/dep-check.sh +522 -0
- package/skills/review-code/scripts/detect-duplicates.sh +466 -0
- package/skills/review-code/scripts/format-check.sh +577 -0
- package/skills/review-code/scripts/run-review.sh +167 -0
- package/skills/review-code/scripts/scan-codebase.sh +152 -0
- package/skills/security-scanner/SKILL.md +327 -0
- package/skills/security-scanner/references/nestjs-security.md +260 -0
- package/skills/security-scanner/references/nextjs-security.md +201 -0
- package/skills/security-scanner/references/react-native-security.md +199 -0
- package/skills/security-scanner/scripts/security-scan.sh +478 -0
- package/skills/ui-ux-pro-max/SKILL.md +377 -0
- package/skills/ui-ux-pro-max/data/charts.csv +26 -0
- package/skills/ui-ux-pro-max/data/colors.csv +97 -0
- package/skills/ui-ux-pro-max/data/icons.csv +101 -0
- package/skills/ui-ux-pro-max/data/landing.csv +31 -0
- package/skills/ui-ux-pro-max/data/products.csv +97 -0
- package/skills/ui-ux-pro-max/data/react-performance.csv +45 -0
- package/skills/ui-ux-pro-max/data/stacks/astro.csv +54 -0
- package/skills/ui-ux-pro-max/data/stacks/flutter.csv +53 -0
- package/skills/ui-ux-pro-max/data/stacks/html-tailwind.csv +56 -0
- package/skills/ui-ux-pro-max/data/stacks/jetpack-compose.csv +53 -0
- package/skills/ui-ux-pro-max/data/stacks/nextjs.csv +53 -0
- package/skills/ui-ux-pro-max/data/stacks/nuxt-ui.csv +51 -0
- package/skills/ui-ux-pro-max/data/stacks/nuxtjs.csv +59 -0
- package/skills/ui-ux-pro-max/data/stacks/react-native.csv +52 -0
- package/skills/ui-ux-pro-max/data/stacks/react.csv +54 -0
- package/skills/ui-ux-pro-max/data/stacks/shadcn.csv +61 -0
- package/skills/ui-ux-pro-max/data/stacks/svelte.csv +54 -0
- package/skills/ui-ux-pro-max/data/stacks/swiftui.csv +51 -0
- package/skills/ui-ux-pro-max/data/stacks/vue.csv +50 -0
- package/skills/ui-ux-pro-max/data/styles.csv +68 -0
- package/skills/ui-ux-pro-max/data/typography.csv +58 -0
- package/skills/ui-ux-pro-max/data/ui-reasoning.csv +101 -0
- package/skills/ui-ux-pro-max/data/ux-guidelines.csv +100 -0
- package/skills/ui-ux-pro-max/data/web-interface.csv +31 -0
- package/skills/ui-ux-pro-max/scripts/core.py +253 -0
- package/skills/ui-ux-pro-max/scripts/design_system.py +1067 -0
- package/skills/ui-ux-pro-max/scripts/search.py +114 -0
|
@@ -0,0 +1,604 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ─────────────────────────────────────────────────────────────
|
|
3
|
+
# Deep Code Scanner — Clean Code Violations
|
|
4
|
+
#
|
|
5
|
+
# Detects code smells, anti-patterns, SOLID violations, and
|
|
6
|
+
# bloaters using pure bash + awk. No npm dependencies needed.
|
|
7
|
+
#
|
|
8
|
+
# Usage:
|
|
9
|
+
# ./deep-scan.sh # scan all projects
|
|
10
|
+
# ./deep-scan.sh backend # scan backend only
|
|
11
|
+
# ./deep-scan.sh frontend # scan frontend only
|
|
12
|
+
# ./deep-scan.sh mobile # scan mobile only
|
|
13
|
+
# ./deep-scan.sh --category bloaters # only bloater checks
|
|
14
|
+
# ./deep-scan.sh --severity critical # only critical issues
|
|
15
|
+
# ./deep-scan.sh --json # pure JSON output (default)
|
|
16
|
+
# ./deep-scan.sh --summary # summary counts only
|
|
17
|
+
#
|
|
18
|
+
# Output: JSON to stdout
|
|
19
|
+
# Exit codes: 0 = clean, 1 = warnings found, 2 = critical issues
|
|
20
|
+
# ─────────────────────────────────────────────────────────────
|
|
21
|
+
|
|
22
|
+
set -uo pipefail
|
|
23
|
+
|
|
24
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
25
|
+
REPO_ROOT="$(cd "$SCRIPT_DIR/../../../../.." && pwd)"
|
|
26
|
+
|
|
27
|
+
# ─── Defaults ────────────────────────────────────────────────
|
|
28
|
+
TARGET="all"
|
|
29
|
+
CATEGORY="all"
|
|
30
|
+
SEVERITY_FILTER="all"
|
|
31
|
+
SUMMARY_ONLY=false
|
|
32
|
+
MAX_FUNC_LINES=30
|
|
33
|
+
MAX_FILE_LINES=300
|
|
34
|
+
MAX_PARAMS=3
|
|
35
|
+
MAX_NESTING=3
|
|
36
|
+
MAX_METHODS=10
|
|
37
|
+
MAX_DEPS=5
|
|
38
|
+
|
|
39
|
+
# ─── Auto-detect project directories ────────────────────────
|
|
40
|
+
detect_src() {
|
|
41
|
+
local kind="$1"; shift
|
|
42
|
+
for candidate in "$@"; do
|
|
43
|
+
for dir in $REPO_ROOT/$candidate; do
|
|
44
|
+
if [ -d "$dir/src" ]; then echo "$dir/src"; return; fi
|
|
45
|
+
if [ -d "$dir/app" ]; then echo "$dir/app"; return; fi
|
|
46
|
+
done
|
|
47
|
+
done
|
|
48
|
+
echo ""
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
BE_SRC="$(detect_src backend be-* backend server api)"
|
|
52
|
+
FE_SRC="$(detect_src frontend web-* frontend client)"
|
|
53
|
+
MB_SRC="$(detect_src mobile styai-mobile mobile)"
|
|
54
|
+
AUTH_SRC=""
|
|
55
|
+
[ -d "$REPO_ROOT/auth-package/src" ] && AUTH_SRC="$REPO_ROOT/auth-package/src"
|
|
56
|
+
|
|
57
|
+
# ─── Parse arguments ────────────────────────────────────────
|
|
58
|
+
while [[ $# -gt 0 ]]; do
|
|
59
|
+
case "$1" in
|
|
60
|
+
backend) TARGET="backend"; shift ;;
|
|
61
|
+
frontend) TARGET="frontend"; shift ;;
|
|
62
|
+
mobile) TARGET="mobile"; shift ;;
|
|
63
|
+
auth) TARGET="auth"; shift ;;
|
|
64
|
+
--category) CATEGORY="$2"; shift 2 ;;
|
|
65
|
+
--severity) SEVERITY_FILTER="$2"; shift 2 ;;
|
|
66
|
+
--summary) SUMMARY_ONLY=true; shift ;;
|
|
67
|
+
--max-func-lines) MAX_FUNC_LINES="$2"; shift 2 ;;
|
|
68
|
+
--max-file-lines) MAX_FILE_LINES="$2"; shift 2 ;;
|
|
69
|
+
--max-params) MAX_PARAMS="$2"; shift 2 ;;
|
|
70
|
+
--json) shift ;; # default anyway
|
|
71
|
+
*) shift ;;
|
|
72
|
+
esac
|
|
73
|
+
done
|
|
74
|
+
|
|
75
|
+
# ─── Build file list ────────────────────────────────────────
|
|
76
|
+
TMPDIR_WORK=$(mktemp -d)
|
|
77
|
+
trap 'rm -rf "$TMPDIR_WORK"' EXIT
|
|
78
|
+
|
|
79
|
+
FILES_LIST="$TMPDIR_WORK/files.txt"
|
|
80
|
+
FINDINGS_FILE="$TMPDIR_WORK/findings.jsonl"
|
|
81
|
+
touch "$FINDINGS_FILE"
|
|
82
|
+
|
|
83
|
+
build_file_list() {
|
|
84
|
+
local dirs=""
|
|
85
|
+
case "$TARGET" in
|
|
86
|
+
backend) [ -n "$BE_SRC" ] && dirs="$BE_SRC" ;;
|
|
87
|
+
frontend) [ -n "$FE_SRC" ] && dirs="$FE_SRC" ;;
|
|
88
|
+
mobile) [ -n "$MB_SRC" ] && dirs="$MB_SRC" ;;
|
|
89
|
+
auth) [ -n "$AUTH_SRC" ] && dirs="$AUTH_SRC" ;;
|
|
90
|
+
all)
|
|
91
|
+
[ -n "$BE_SRC" ] && dirs="$BE_SRC"
|
|
92
|
+
[ -n "$FE_SRC" ] && dirs="$dirs $FE_SRC"
|
|
93
|
+
[ -n "$MB_SRC" ] && dirs="$dirs $MB_SRC"
|
|
94
|
+
[ -n "$AUTH_SRC" ] && dirs="$dirs $AUTH_SRC"
|
|
95
|
+
;;
|
|
96
|
+
esac
|
|
97
|
+
|
|
98
|
+
dirs=$(echo "$dirs" | xargs)
|
|
99
|
+
[ -z "$dirs" ] && return
|
|
100
|
+
|
|
101
|
+
for dir in $dirs; do
|
|
102
|
+
find "$dir" \( -name "*.ts" -o -name "*.tsx" \) \
|
|
103
|
+
-not -path "*/node_modules/*" \
|
|
104
|
+
-not -path "*/.next/*" \
|
|
105
|
+
-not -path "*/dist/*" \
|
|
106
|
+
-not -path "*.d.ts" \
|
|
107
|
+
2>/dev/null
|
|
108
|
+
done
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
build_file_list | sort > "$FILES_LIST"
|
|
112
|
+
FILE_COUNT=$(wc -l < "$FILES_LIST" | tr -d ' ')
|
|
113
|
+
|
|
114
|
+
if [ "$FILE_COUNT" -eq 0 ]; then
|
|
115
|
+
echo '{"scan":"deep-scan","findings":[],"summary":{"total":0}}'
|
|
116
|
+
exit 0
|
|
117
|
+
fi
|
|
118
|
+
|
|
119
|
+
# ─── Helpers ────────────────────────────────────────────────
|
|
120
|
+
relpath() { echo "${1#$REPO_ROOT/}"; }
|
|
121
|
+
|
|
122
|
+
json_escape() {
|
|
123
|
+
printf '%s' "$1" | python3 -c 'import json,sys; print(json.dumps(sys.stdin.read()), end="")' 2>/dev/null || \
|
|
124
|
+
printf '"%s"' "$(printf '%s' "$1" | sed 's/\\/\\\\/g; s/"/\\"/g; s/\t/\\t/g')"
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
add_finding() {
|
|
128
|
+
local category="$1" severity="$2" rule="$3" file="$4" line="$5" message="$6" suggestion="$7"
|
|
129
|
+
# Apply filters
|
|
130
|
+
if [ "$CATEGORY" != "all" ] && [ "$CATEGORY" != "$category" ]; then return; fi
|
|
131
|
+
if [ "$SEVERITY_FILTER" != "all" ] && [ "$SEVERITY_FILTER" != "$severity" ]; then return; fi
|
|
132
|
+
|
|
133
|
+
echo "{\"category\":\"$category\",\"severity\":\"$severity\",\"rule\":\"$rule\",\"file\":\"$(relpath "$file")\",\"line\":$line,\"message\":$(json_escape "$message"),\"suggestion\":$(json_escape "$suggestion")}" >> "$FINDINGS_FILE"
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
# ═══════════════════════════════════════════════════════════════
|
|
137
|
+
# BLOATERS
|
|
138
|
+
# ═══════════════════════════════════════════════════════════════
|
|
139
|
+
|
|
140
|
+
scan_bloaters() {
|
|
141
|
+
# ─── Long files ──────────────────────────────────────────
|
|
142
|
+
while IFS= read -r file; do
|
|
143
|
+
local lines
|
|
144
|
+
lines=$(wc -l < "$file" | tr -d ' ')
|
|
145
|
+
if [ "$lines" -gt "$MAX_FILE_LINES" ]; then
|
|
146
|
+
add_finding "bloaters" "warning" "long_file" "$file" 1 \
|
|
147
|
+
"File has $lines lines (threshold: $MAX_FILE_LINES)" \
|
|
148
|
+
"Split into smaller modules with focused responsibilities"
|
|
149
|
+
fi
|
|
150
|
+
done < "$FILES_LIST"
|
|
151
|
+
|
|
152
|
+
# ─── Long functions ─────────────────────────────────────
|
|
153
|
+
while IFS= read -r file; do
|
|
154
|
+
awk -v max="$MAX_FUNC_LINES" -v fname="$file" '
|
|
155
|
+
/^[[:space:]]*(export[[:space:]]+)?(async[[:space:]]+)?function[[:space:]]+/ ||
|
|
156
|
+
/^[[:space:]]*(export[[:space:]]+)?(const|let|var)[[:space:]]+[a-zA-Z_][a-zA-Z0-9_]*[[:space:]]*=[[:space:]]*(async[[:space:]]*)?\(/ ||
|
|
157
|
+
/^[[:space:]]*(async[[:space:]]+)?[a-zA-Z_][a-zA-Z0-9_]*[[:space:]]*\([^)]*\)[[:space:]]*[:{]/ {
|
|
158
|
+
if (in_func && func_lines > max) {
|
|
159
|
+
printf "%s\t%d\t%d\t%s\n", fname, func_start, func_lines, func_name
|
|
160
|
+
}
|
|
161
|
+
in_func = 1
|
|
162
|
+
func_start = NR
|
|
163
|
+
func_lines = 0
|
|
164
|
+
line = $0
|
|
165
|
+
gsub(/^[[:space:]]+/, "", line)
|
|
166
|
+
gsub(/async[[:space:]]+/, "", line)
|
|
167
|
+
gsub(/export[[:space:]]+/, "", line)
|
|
168
|
+
gsub(/const[[:space:]]+/, "", line)
|
|
169
|
+
sub(/^function[[:space:]]+/, "", line)
|
|
170
|
+
func_name = line
|
|
171
|
+
sub(/[^a-zA-Z0-9_].*/, "", func_name)
|
|
172
|
+
if (func_name == "") func_name = "anonymous"
|
|
173
|
+
brace_depth = 0
|
|
174
|
+
}
|
|
175
|
+
in_func {
|
|
176
|
+
func_lines++
|
|
177
|
+
n = split($0, chars, "")
|
|
178
|
+
for (i = 1; i <= n; i++) {
|
|
179
|
+
if (chars[i] == "{") brace_depth++
|
|
180
|
+
if (chars[i] == "}") brace_depth--
|
|
181
|
+
}
|
|
182
|
+
if (brace_depth <= 0 && func_lines > 1) {
|
|
183
|
+
if (func_lines > max) {
|
|
184
|
+
printf "%s\t%d\t%d\t%s\n", fname, func_start, func_lines, func_name
|
|
185
|
+
}
|
|
186
|
+
in_func = 0
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
' "$file" 2>/dev/null
|
|
190
|
+
done < "$FILES_LIST" | while IFS=$'\t' read -r file line count name; do
|
|
191
|
+
add_finding "bloaters" "warning" "long_function" "$file" "$line" \
|
|
192
|
+
"Function '$name' has $count lines (threshold: $MAX_FUNC_LINES)" \
|
|
193
|
+
"Break into smaller focused functions with descriptive names"
|
|
194
|
+
done
|
|
195
|
+
|
|
196
|
+
# ─── Too many parameters ─────────────────────────────────
|
|
197
|
+
while IFS= read -r file; do
|
|
198
|
+
grep -n '([^)]*,[^)]*,[^)]*,[^)]*' "$file" 2>/dev/null | \
|
|
199
|
+
grep -E '(function |=>|async )' | while IFS=: read -r line content; do
|
|
200
|
+
# Count commas between parens
|
|
201
|
+
local params
|
|
202
|
+
params=$(echo "$content" | sed 's/.*(\(.*\)).*/\1/' | tr -cd ',' | wc -c | tr -d ' ')
|
|
203
|
+
params=$((params + 1))
|
|
204
|
+
if [ "$params" -gt "$MAX_PARAMS" ]; then
|
|
205
|
+
add_finding "bloaters" "info" "too_many_params" "$file" "$line" \
|
|
206
|
+
"Function has $params parameters (threshold: $MAX_PARAMS)" \
|
|
207
|
+
"Use an options/config object parameter instead"
|
|
208
|
+
fi
|
|
209
|
+
done
|
|
210
|
+
done < "$FILES_LIST"
|
|
211
|
+
|
|
212
|
+
# ─── Deep nesting ────────────────────────────────────────
|
|
213
|
+
while IFS= read -r file; do
|
|
214
|
+
awk -v max="$MAX_NESTING" -v fname="$file" '
|
|
215
|
+
{
|
|
216
|
+
indent = 0
|
|
217
|
+
for (i = 1; i <= length($0); i++) {
|
|
218
|
+
c = substr($0, i, 1)
|
|
219
|
+
if (c == " ") indent++
|
|
220
|
+
else if (c == "\t") indent += 2
|
|
221
|
+
else break
|
|
222
|
+
}
|
|
223
|
+
# Approximate nesting level (2-space indent)
|
|
224
|
+
level = int(indent / 2)
|
|
225
|
+
if (level > max && $0 ~ /if|for|while|switch|try/) {
|
|
226
|
+
printf "%s\t%d\t%d\n", fname, NR, level
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
' "$file" 2>/dev/null
|
|
230
|
+
done < "$FILES_LIST" | head -50 | while IFS=$'\t' read -r file line level; do
|
|
231
|
+
add_finding "bloaters" "warning" "deep_nesting" "$file" "$line" \
|
|
232
|
+
"Nesting level $level (threshold: $MAX_NESTING)" \
|
|
233
|
+
"Use early returns, guard clauses, or extract nested logic into functions"
|
|
234
|
+
done
|
|
235
|
+
|
|
236
|
+
# ─── Classes with too many methods ───────────────────────
|
|
237
|
+
while IFS= read -r file; do
|
|
238
|
+
awk -v max="$MAX_METHODS" -v fname="$file" '
|
|
239
|
+
/^[[:space:]]*export[[:space:]]+class[[:space:]]+/ || /^[[:space:]]*class[[:space:]]+/ {
|
|
240
|
+
if (class_name != "" && method_count > max) {
|
|
241
|
+
printf "%s\t%d\t%d\t%s\n", fname, class_start, method_count, class_name
|
|
242
|
+
}
|
|
243
|
+
class_name = $0
|
|
244
|
+
sub(/.*class[[:space:]]+/, "", class_name)
|
|
245
|
+
sub(/[^a-zA-Z0-9_].*/, "", class_name)
|
|
246
|
+
class_start = NR
|
|
247
|
+
method_count = 0
|
|
248
|
+
in_class = 1
|
|
249
|
+
}
|
|
250
|
+
in_class && /^[[:space:]]+(async[[:space:]]+)?[a-zA-Z_][a-zA-Z0-9_]*[[:space:]]*\(/ {
|
|
251
|
+
method_count++
|
|
252
|
+
}
|
|
253
|
+
END {
|
|
254
|
+
if (class_name != "" && method_count > max) {
|
|
255
|
+
printf "%s\t%d\t%d\t%s\n", fname, class_start, method_count, class_name
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
' "$file" 2>/dev/null
|
|
259
|
+
done < "$FILES_LIST" | while IFS=$'\t' read -r file line count name; do
|
|
260
|
+
add_finding "bloaters" "warning" "too_many_methods" "$file" "$line" \
|
|
261
|
+
"Class '$name' has $count methods (threshold: $MAX_METHODS)" \
|
|
262
|
+
"Split into smaller focused classes following Single Responsibility Principle"
|
|
263
|
+
done
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
# ═══════════════════════════════════════════════════════════════
|
|
267
|
+
# SOLID VIOLATIONS
|
|
268
|
+
# ═══════════════════════════════════════════════════════════════
|
|
269
|
+
|
|
270
|
+
scan_solid() {
|
|
271
|
+
# ─── Fat controllers (DB/business logic in controllers) ──
|
|
272
|
+
while IFS= read -r file; do
|
|
273
|
+
[[ "$file" != *".controller."* ]] && continue
|
|
274
|
+
local relfile
|
|
275
|
+
relfile=$(relpath "$file")
|
|
276
|
+
|
|
277
|
+
# Check for direct model/repository usage in controller
|
|
278
|
+
grep -n "\.find\(\|\.findOne\(\|\.create\(\|\.update\(\|\.delete\(\|\.save\(\|\.aggregate\(" "$file" 2>/dev/null | \
|
|
279
|
+
head -5 | while IFS=: read -r line _content; do
|
|
280
|
+
add_finding "solid" "critical" "fat_controller" "$file" "$line" \
|
|
281
|
+
"Controller contains database operations directly" \
|
|
282
|
+
"Move database operations to the service layer — controllers should only validate input and delegate"
|
|
283
|
+
done
|
|
284
|
+
|
|
285
|
+
# Check for business logic patterns
|
|
286
|
+
grep -n "if.*&&.*||" "$file" 2>/dev/null | head -3 | while IFS=: read -r line _content; do
|
|
287
|
+
add_finding "solid" "warning" "fat_controller" "$file" "$line" \
|
|
288
|
+
"Controller contains complex business logic" \
|
|
289
|
+
"Extract business logic to the service layer"
|
|
290
|
+
done
|
|
291
|
+
done < "$FILES_LIST"
|
|
292
|
+
|
|
293
|
+
# ─── Too many injected dependencies (SRP smell) ──────────
|
|
294
|
+
while IFS= read -r file; do
|
|
295
|
+
[[ "$file" != *".service."* ]] && continue
|
|
296
|
+
local dep_count
|
|
297
|
+
dep_count=$(grep -cE "@Inject|private.*Service|private.*Repository|private readonly" "$file" 2>/dev/null)
|
|
298
|
+
dep_count=${dep_count:-0}
|
|
299
|
+
if [ "$dep_count" -gt "$MAX_DEPS" ]; then
|
|
300
|
+
add_finding "solid" "warning" "too_many_deps" "$file" 1 \
|
|
301
|
+
"Service has $dep_count injected dependencies (threshold: $MAX_DEPS)" \
|
|
302
|
+
"This service may be doing too much — consider splitting responsibilities"
|
|
303
|
+
fi
|
|
304
|
+
done < "$FILES_LIST"
|
|
305
|
+
|
|
306
|
+
# ─── Service locator anti-pattern ────────────────────────
|
|
307
|
+
while IFS= read -r file; do
|
|
308
|
+
grep -n "moduleRef\.get\(\|moduleRef\.resolve\(" "$file" 2>/dev/null | \
|
|
309
|
+
while IFS=: read -r line _content; do
|
|
310
|
+
add_finding "solid" "warning" "service_locator" "$file" "$line" \
|
|
311
|
+
"Using moduleRef.get() — service locator anti-pattern" \
|
|
312
|
+
"Use constructor injection instead of runtime service resolution"
|
|
313
|
+
done
|
|
314
|
+
done < "$FILES_LIST"
|
|
315
|
+
|
|
316
|
+
# ─── new keyword for service instantiation ───────────────
|
|
317
|
+
while IFS= read -r file; do
|
|
318
|
+
grep -n "new .*Service\(\|new .*Repository\(\|new .*Controller\(" "$file" 2>/dev/null | \
|
|
319
|
+
grep -v "\.spec\.\|\.test\.\|mock\|Mock\|stub\|Stub" | \
|
|
320
|
+
while IFS=: read -r line content; do
|
|
321
|
+
add_finding "solid" "warning" "dip_violation" "$file" "$line" \
|
|
322
|
+
"Manual instantiation with 'new' instead of dependency injection" \
|
|
323
|
+
"Let the DI container manage service instantiation"
|
|
324
|
+
done
|
|
325
|
+
done < "$FILES_LIST"
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
# ═══════════════════════════════════════════════════════════════
|
|
329
|
+
# CODE SMELLS
|
|
330
|
+
# ═══════════════════════════════════════════════════════════════
|
|
331
|
+
|
|
332
|
+
scan_smells() {
|
|
333
|
+
# ─── Empty catch blocks (try-catch only, not .catch() promise chains) ───
|
|
334
|
+
while IFS= read -r file; do
|
|
335
|
+
awk -v fname="$file" '
|
|
336
|
+
/^[[:space:]]*\} *catch[[:space:]]*\(/ || /^[[:space:]]*catch[[:space:]]*\(/ {
|
|
337
|
+
catch_line = NR; in_catch = 1; next
|
|
338
|
+
}
|
|
339
|
+
in_catch && /^[[:space:]]*\}/ {
|
|
340
|
+
printf "%s\t%d\n", fname, catch_line
|
|
341
|
+
in_catch = 0
|
|
342
|
+
}
|
|
343
|
+
in_catch && /[^[:space:]]/ { in_catch = 0 }
|
|
344
|
+
' "$file" 2>/dev/null
|
|
345
|
+
done < "$FILES_LIST" | while IFS=$'\t' read -r file line; do
|
|
346
|
+
add_finding "smells" "critical" "empty_catch" "$file" "$line" \
|
|
347
|
+
"Empty catch block — errors are silently swallowed" \
|
|
348
|
+
"Log the error or re-throw with context. Never silently ignore exceptions"
|
|
349
|
+
done
|
|
350
|
+
|
|
351
|
+
# ─── console.log/warn/error in production code ──────────
|
|
352
|
+
while IFS= read -r file; do
|
|
353
|
+
[[ "$file" == *".spec."* || "$file" == *".test."* ]] && continue
|
|
354
|
+
grep -n "console\.\(log\|warn\|error\|debug\|info\)" "$file" 2>/dev/null | \
|
|
355
|
+
head -5 | while IFS=: read -r line _content; do
|
|
356
|
+
add_finding "smells" "warning" "console_usage" "$file" "$line" \
|
|
357
|
+
"console.log/warn/error in production code" \
|
|
358
|
+
"Use a structured logger (e.g., Pino, Winston) instead of console methods"
|
|
359
|
+
done
|
|
360
|
+
done < "$FILES_LIST"
|
|
361
|
+
|
|
362
|
+
# ─── Magic numbers ──────────────────────────────────────
|
|
363
|
+
while IFS= read -r file; do
|
|
364
|
+
[[ "$file" == *".spec."* || "$file" == *".test."* || "$file" == *".config."* ]] && continue
|
|
365
|
+
grep -n '[^a-zA-Z0-9_"'"'"'`\.\/\-][-]\?[0-9]\{2,\}[^a-zA-Z0-9_"'"'"'`px%em\.:\/\-]' "$file" 2>/dev/null | \
|
|
366
|
+
grep -v "import\|require\|console\|//\|TODO\|port\|PORT\|0x\|0b\|index\|length\|\[\|enum\|version\|Version" | \
|
|
367
|
+
head -5 | while IFS=: read -r line content; do
|
|
368
|
+
add_finding "smells" "info" "magic_number" "$file" "$line" \
|
|
369
|
+
"Magic number in code — raw numeric literal" \
|
|
370
|
+
"Extract to a named constant that explains the value's meaning"
|
|
371
|
+
done
|
|
372
|
+
done < "$FILES_LIST"
|
|
373
|
+
|
|
374
|
+
# ─── Hardcoded URLs, emails, API keys ────────────────────
|
|
375
|
+
while IFS= read -r file; do
|
|
376
|
+
[[ "$file" == *".spec."* || "$file" == *".test."* || "$file" == *".config."* ]] && continue
|
|
377
|
+
# URLs (not localhost, not imports)
|
|
378
|
+
grep -n 'https\?://[a-zA-Z]' "$file" 2>/dev/null | \
|
|
379
|
+
grep -v "localhost\|127\.0\.0\.1\|import\|require\|//.*http\|swagger\|example\.com\|schema\.org\|node_modules" | \
|
|
380
|
+
head -3 | while IFS=: read -r line _content; do
|
|
381
|
+
add_finding "smells" "warning" "hardcoded_url" "$file" "$line" \
|
|
382
|
+
"Hardcoded URL in source code" \
|
|
383
|
+
"Move to environment variable or configuration"
|
|
384
|
+
done
|
|
385
|
+
|
|
386
|
+
# Emails
|
|
387
|
+
grep -n '[a-zA-Z0-9._%+-]\+@[a-zA-Z0-9.-]\+\.[a-zA-Z]\{2,\}' "$file" 2>/dev/null | \
|
|
388
|
+
grep -v "import\|require\|@\(Module\|Controller\|Injectable\|Schema\|Prop\|ApiProperty\|nestjs\|angular\|types\)" | \
|
|
389
|
+
grep -v "\.spec\.\|\.test\.\|example\.com\|@example\|@param\|@returns\|@deprecated" | \
|
|
390
|
+
head -3 | while IFS=: read -r line _content; do
|
|
391
|
+
add_finding "smells" "warning" "hardcoded_email" "$file" "$line" \
|
|
392
|
+
"Hardcoded email address in source code" \
|
|
393
|
+
"Move to environment variable or configuration"
|
|
394
|
+
done
|
|
395
|
+
done < "$FILES_LIST"
|
|
396
|
+
|
|
397
|
+
# ─── as any type assertions ─────────────────────────────
|
|
398
|
+
while IFS= read -r file; do
|
|
399
|
+
[[ "$file" == *".spec."* || "$file" == *".test."* ]] && continue
|
|
400
|
+
grep -n " as any\b" "$file" 2>/dev/null | \
|
|
401
|
+
head -5 | while IFS=: read -r line _content; do
|
|
402
|
+
add_finding "smells" "warning" "as_any" "$file" "$line" \
|
|
403
|
+
"'as any' type assertion bypasses TypeScript safety" \
|
|
404
|
+
"Use proper typing, generics, or 'as unknown as Type' if truly needed"
|
|
405
|
+
done
|
|
406
|
+
done < "$FILES_LIST"
|
|
407
|
+
|
|
408
|
+
# ─── Boolean function parameters ────────────────────────
|
|
409
|
+
while IFS= read -r file; do
|
|
410
|
+
grep -n '([^)]*:[[:space:]]*boolean[^)]*)[[:space:]]*[:{]' "$file" 2>/dev/null | \
|
|
411
|
+
grep -v "\.spec\.\|\.test\.\|\.dto\.\|interface\|type " | \
|
|
412
|
+
head -5 | while IFS=: read -r line _content; do
|
|
413
|
+
add_finding "smells" "info" "boolean_param" "$file" "$line" \
|
|
414
|
+
"Boolean function parameter — callers will pass opaque true/false" \
|
|
415
|
+
"Use an options object or separate functions for clarity"
|
|
416
|
+
done
|
|
417
|
+
done < "$FILES_LIST"
|
|
418
|
+
|
|
419
|
+
# ─── TODO/FIXME/HACK without ticket reference ──────────
|
|
420
|
+
while IFS= read -r file; do
|
|
421
|
+
grep -n 'TODO\|FIXME\|HACK\|XXX' "$file" 2>/dev/null | \
|
|
422
|
+
grep -v '#[0-9]\|JIRA\|ticket\|issue\|github\.com\|LINEAR\|ASANA' | \
|
|
423
|
+
head -10 | while IFS=: read -r line content; do
|
|
424
|
+
local tag
|
|
425
|
+
tag=$(echo "$content" | grep -o 'TODO\|FIXME\|HACK\|XXX' | head -1)
|
|
426
|
+
add_finding "smells" "info" "todo_without_ticket" "$file" "$line" \
|
|
427
|
+
"$tag comment without ticket/issue reference" \
|
|
428
|
+
"Add a ticket reference (e.g., TODO #123) or resolve the issue"
|
|
429
|
+
done
|
|
430
|
+
done < "$FILES_LIST"
|
|
431
|
+
|
|
432
|
+
# ─── Commented-out code blocks ──────────────────────────
|
|
433
|
+
while IFS= read -r file; do
|
|
434
|
+
awk -v fname="$file" '
|
|
435
|
+
/^[[:space:]]*\/\/[[:space:]]*(import|export|const|let|var|function|class|if|for|while|return|await|async|try|catch)/ {
|
|
436
|
+
if (!in_comment) { start = NR; count = 0 }
|
|
437
|
+
in_comment = 1
|
|
438
|
+
count++
|
|
439
|
+
next
|
|
440
|
+
}
|
|
441
|
+
{
|
|
442
|
+
if (in_comment && count >= 3) {
|
|
443
|
+
printf "%s\t%d\t%d\n", fname, start, count
|
|
444
|
+
}
|
|
445
|
+
in_comment = 0
|
|
446
|
+
}
|
|
447
|
+
END {
|
|
448
|
+
if (in_comment && count >= 3) {
|
|
449
|
+
printf "%s\t%d\t%d\n", fname, start, count
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
' "$file" 2>/dev/null
|
|
453
|
+
done < "$FILES_LIST" | while IFS=$'\t' read -r file line count; do
|
|
454
|
+
add_finding "smells" "info" "commented_code" "$file" "$line" \
|
|
455
|
+
"$count consecutive lines of commented-out code" \
|
|
456
|
+
"Delete commented code — use git history to recover if needed"
|
|
457
|
+
done
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
# ═══════════════════════════════════════════════════════════════
|
|
461
|
+
# NAMING
|
|
462
|
+
# ═══════════════════════════════════════════════════════════════
|
|
463
|
+
|
|
464
|
+
scan_naming() {
|
|
465
|
+
# ─── Single-letter variables (outside loops) ─────────────
|
|
466
|
+
while IFS= read -r file; do
|
|
467
|
+
[[ "$file" == *".spec."* || "$file" == *".test."* ]] && continue
|
|
468
|
+
grep -n '\b\(const\|let\|var\)\s\+[a-zA-Z]\s*[=:;]' "$file" 2>/dev/null | \
|
|
469
|
+
grep -v 'for\s*(\|\.map\|\.filter\|\.reduce\|\.forEach\|=>\|catch' | \
|
|
470
|
+
head -5 | while IFS=: read -r line content; do
|
|
471
|
+
local varname
|
|
472
|
+
varname=$(echo "$content" | sed 's/.*\(const\|let\|var\)\s\+\([a-zA-Z]\)\s*[=:;].*/\2/')
|
|
473
|
+
# Skip i, j, k, _ (common loop/unused vars)
|
|
474
|
+
[[ "$varname" =~ ^[ijk_]$ ]] && continue
|
|
475
|
+
add_finding "naming" "info" "single_letter_var" "$file" "$line" \
|
|
476
|
+
"Single-letter variable '$varname' — not descriptive" \
|
|
477
|
+
"Use a meaningful name that reveals the variable's purpose"
|
|
478
|
+
done
|
|
479
|
+
done < "$FILES_LIST"
|
|
480
|
+
|
|
481
|
+
# ─── Generic names ──────────────────────────────────────
|
|
482
|
+
while IFS= read -r file; do
|
|
483
|
+
[[ "$file" == *".spec."* || "$file" == *".test."* || "$file" == *".dto."* ]] && continue
|
|
484
|
+
grep -n '\b\(const\|let\|var\)\s\+\(data\|result\|temp\|info\|item\|obj\|val\|value\|res\|ret\|tmp\)\b' "$file" 2>/dev/null | \
|
|
485
|
+
grep -v 'import\|require\|interface\|type\s' | \
|
|
486
|
+
head -5 | while IFS=: read -r line content; do
|
|
487
|
+
local varname
|
|
488
|
+
varname=$(echo "$content" | grep -o '\b\(data\|result\|temp\|info\|item\|obj\|val\|value\|res\|ret\|tmp\)\b' | head -1)
|
|
489
|
+
add_finding "naming" "info" "generic_name" "$file" "$line" \
|
|
490
|
+
"Generic variable name '$varname' — doesn't reveal intent" \
|
|
491
|
+
"Use a domain-specific name (e.g., 'userData', 'orderResult', 'configValue')"
|
|
492
|
+
done
|
|
493
|
+
done < "$FILES_LIST"
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
# ═══════════════════════════════════════════════════════════════
|
|
497
|
+
# RUN SCANNERS
|
|
498
|
+
# ═══════════════════════════════════════════════════════════════
|
|
499
|
+
|
|
500
|
+
if [ "$CATEGORY" = "all" ] || [ "$CATEGORY" = "bloaters" ]; then
|
|
501
|
+
scan_bloaters
|
|
502
|
+
fi
|
|
503
|
+
|
|
504
|
+
if [ "$CATEGORY" = "all" ] || [ "$CATEGORY" = "solid" ]; then
|
|
505
|
+
scan_solid
|
|
506
|
+
fi
|
|
507
|
+
|
|
508
|
+
if [ "$CATEGORY" = "all" ] || [ "$CATEGORY" = "smells" ]; then
|
|
509
|
+
scan_smells
|
|
510
|
+
fi
|
|
511
|
+
|
|
512
|
+
if [ "$CATEGORY" = "all" ] || [ "$CATEGORY" = "naming" ]; then
|
|
513
|
+
scan_naming
|
|
514
|
+
fi
|
|
515
|
+
|
|
516
|
+
# ═══════════════════════════════════════════════════════════════
|
|
517
|
+
# OUTPUT
|
|
518
|
+
# ═══════════════════════════════════════════════════════════════
|
|
519
|
+
|
|
520
|
+
# Safe grep -c wrapper (avoids 0\n0 issue with pipefail + || echo 0)
|
|
521
|
+
count_matches() {
|
|
522
|
+
local result
|
|
523
|
+
result=$(grep -c "$1" "$2" 2>/dev/null) || true
|
|
524
|
+
echo "${result:-0}"
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
TOTAL=$(wc -l < "$FINDINGS_FILE" | tr -d ' ')
|
|
528
|
+
CRITICAL=$(count_matches '"severity":"critical"' "$FINDINGS_FILE")
|
|
529
|
+
WARNINGS=$(count_matches '"severity":"warning"' "$FINDINGS_FILE")
|
|
530
|
+
INFO=$(count_matches '"severity":"info"' "$FINDINGS_FILE")
|
|
531
|
+
|
|
532
|
+
# Count by category
|
|
533
|
+
BLOATER_COUNT=$(count_matches '"category":"bloaters"' "$FINDINGS_FILE")
|
|
534
|
+
SOLID_COUNT=$(count_matches '"category":"solid"' "$FINDINGS_FILE")
|
|
535
|
+
SMELL_COUNT=$(count_matches '"category":"smells"' "$FINDINGS_FILE")
|
|
536
|
+
NAMING_COUNT=$(count_matches '"category":"naming"' "$FINDINGS_FILE")
|
|
537
|
+
|
|
538
|
+
if [ "$SUMMARY_ONLY" = true ]; then
|
|
539
|
+
cat <<EOF
|
|
540
|
+
{
|
|
541
|
+
"scan": "deep-scan",
|
|
542
|
+
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
|
543
|
+
"target": "$TARGET",
|
|
544
|
+
"files_scanned": $FILE_COUNT,
|
|
545
|
+
"summary": {
|
|
546
|
+
"total": $TOTAL,
|
|
547
|
+
"critical": $CRITICAL,
|
|
548
|
+
"warning": $WARNINGS,
|
|
549
|
+
"info": $INFO,
|
|
550
|
+
"by_category": {
|
|
551
|
+
"bloaters": $BLOATER_COUNT,
|
|
552
|
+
"solid": $SOLID_COUNT,
|
|
553
|
+
"smells": $SMELL_COUNT,
|
|
554
|
+
"naming": $NAMING_COUNT
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
EOF
|
|
559
|
+
else
|
|
560
|
+
{
|
|
561
|
+
echo '{'
|
|
562
|
+
echo ' "scan": "deep-scan",'
|
|
563
|
+
echo ' "timestamp": "'$(date -u +%Y-%m-%dT%H:%M:%SZ)'",'
|
|
564
|
+
echo ' "target": "'"$TARGET"'",'
|
|
565
|
+
echo ' "files_scanned": '$FILE_COUNT','
|
|
566
|
+
echo ' "summary": {'
|
|
567
|
+
echo ' "total": '$TOTAL','
|
|
568
|
+
echo ' "critical": '$CRITICAL','
|
|
569
|
+
echo ' "warning": '$WARNINGS','
|
|
570
|
+
echo ' "info": '$INFO','
|
|
571
|
+
echo ' "by_category": {'
|
|
572
|
+
echo ' "bloaters": '$BLOATER_COUNT','
|
|
573
|
+
echo ' "solid": '$SOLID_COUNT','
|
|
574
|
+
echo ' "smells": '$SMELL_COUNT','
|
|
575
|
+
echo ' "naming": '$NAMING_COUNT
|
|
576
|
+
echo ' }'
|
|
577
|
+
echo ' },'
|
|
578
|
+
echo ' "findings": ['
|
|
579
|
+
|
|
580
|
+
first=true
|
|
581
|
+
while IFS= read -r line; do
|
|
582
|
+
[ -z "$line" ] && continue
|
|
583
|
+
if [ "$first" = true ]; then
|
|
584
|
+
first=false
|
|
585
|
+
else
|
|
586
|
+
echo ','
|
|
587
|
+
fi
|
|
588
|
+
printf ' %s' "$line"
|
|
589
|
+
done < "$FINDINGS_FILE"
|
|
590
|
+
|
|
591
|
+
echo ''
|
|
592
|
+
echo ' ]'
|
|
593
|
+
echo '}'
|
|
594
|
+
}
|
|
595
|
+
fi
|
|
596
|
+
|
|
597
|
+
# ─── Exit code ──────────────────────────────────────────────
|
|
598
|
+
if [ "$CRITICAL" -gt 0 ]; then
|
|
599
|
+
exit 2
|
|
600
|
+
elif [ "$WARNINGS" -gt 0 ] || [ "$INFO" -gt 0 ]; then
|
|
601
|
+
exit 1
|
|
602
|
+
else
|
|
603
|
+
exit 0
|
|
604
|
+
fi
|