ginskill-init 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/README.md +77 -0
  2. package/agents/developer.md +56 -0
  3. package/agents/frontend-design.md +69 -0
  4. package/agents/mobile-reviewer.md +36 -0
  5. package/agents/review-code.md +49 -0
  6. package/agents/security-scanner.md +50 -0
  7. package/agents/tester.md +72 -0
  8. package/bin/cli.js +226 -0
  9. package/package.json +20 -0
  10. package/skills/ai-asset-generator/SKILL.md +255 -0
  11. package/skills/ai-asset-generator/docs/gen-image.md +274 -0
  12. package/skills/ai-asset-generator/docs/genvideo.md +341 -0
  13. package/skills/ai-asset-generator/docs/remove-background.md +19 -0
  14. package/skills/ai-asset-generator/generate-credit-assets.mjs +180 -0
  15. package/skills/ai-asset-generator/generate-ginbrowser-assets.mjs +242 -0
  16. package/skills/ai-asset-generator/generate-sty-icon.mjs +149 -0
  17. package/skills/ai-asset-generator/lib/bg-remove.mjs +34 -0
  18. package/skills/ai-asset-generator/lib/env.mjs +38 -0
  19. package/skills/ai-asset-generator/lib/kie-client.mjs +88 -0
  20. package/skills/ai-asset-generator/scripts/scaffold-generator.mjs +203 -0
  21. package/skills/ai-build-ai/SKILL.md +124 -0
  22. package/skills/ai-build-ai/docs/agent-teams.md +293 -0
  23. package/skills/ai-build-ai/docs/checkpointing.md +161 -0
  24. package/skills/ai-build-ai/docs/create-agent.md +399 -0
  25. package/skills/ai-build-ai/docs/create-mcp.md +395 -0
  26. package/skills/ai-build-ai/docs/create-skill.md +299 -0
  27. package/skills/ai-build-ai/docs/headless-mode.md +614 -0
  28. package/skills/ai-build-ai/docs/hooks.md +578 -0
  29. package/skills/ai-build-ai/docs/memory-claude-md.md +375 -0
  30. package/skills/ai-build-ai/docs/output-styles.md +208 -0
  31. package/skills/ai-build-ai/docs/overview.md +162 -0
  32. package/skills/ai-build-ai/docs/permissions.md +391 -0
  33. package/skills/ai-build-ai/docs/plugins.md +396 -0
  34. package/skills/ai-build-ai/docs/sandbox.md +262 -0
  35. package/skills/ai-build-ai/scripts/load-tutorial.sh +54 -0
  36. package/skills/icon-generator/SKILL.md +270 -0
  37. package/skills/mobile-app-review/SKILL.md +321 -0
  38. package/skills/mobile-app-review/references/apple-review.md +132 -0
  39. package/skills/mobile-app-review/references/google-play-review.md +203 -0
  40. package/skills/mongodb/SKILL.md +667 -0
  41. package/skills/mongodb/references/mongoose-patterns.md +368 -0
  42. package/skills/nestjs-architecture/SKILL.md +1086 -0
  43. package/skills/nestjs-architecture/references/advanced-patterns.md +590 -0
  44. package/skills/performance/SKILL.md +509 -0
  45. package/skills/react-fsd-architecture/SKILL.md +693 -0
  46. package/skills/react-fsd-architecture/references/fsd-patterns.md +747 -0
  47. package/skills/react-query/SKILL.md +685 -0
  48. package/skills/react-query/references/query-patterns.md +365 -0
  49. package/skills/review-code/SKILL.md +321 -0
  50. package/skills/review-code/references/clean-code-principles.md +395 -0
  51. package/skills/review-code/references/frontend-patterns.md +136 -0
  52. package/skills/review-code/references/nestjs-patterns.md +184 -0
  53. package/skills/review-code/scripts/check-module.sh +201 -0
  54. package/skills/review-code/scripts/deep-scan.sh +604 -0
  55. package/skills/review-code/scripts/dep-check.sh +522 -0
  56. package/skills/review-code/scripts/detect-duplicates.sh +466 -0
  57. package/skills/review-code/scripts/format-check.sh +577 -0
  58. package/skills/review-code/scripts/run-review.sh +167 -0
  59. package/skills/review-code/scripts/scan-codebase.sh +152 -0
  60. package/skills/security-scanner/SKILL.md +327 -0
  61. package/skills/security-scanner/references/nestjs-security.md +260 -0
  62. package/skills/security-scanner/references/nextjs-security.md +201 -0
  63. package/skills/security-scanner/references/react-native-security.md +199 -0
  64. package/skills/security-scanner/scripts/security-scan.sh +478 -0
  65. package/skills/ui-ux-pro-max/SKILL.md +377 -0
  66. package/skills/ui-ux-pro-max/data/charts.csv +26 -0
  67. package/skills/ui-ux-pro-max/data/colors.csv +97 -0
  68. package/skills/ui-ux-pro-max/data/icons.csv +101 -0
  69. package/skills/ui-ux-pro-max/data/landing.csv +31 -0
  70. package/skills/ui-ux-pro-max/data/products.csv +97 -0
  71. package/skills/ui-ux-pro-max/data/react-performance.csv +45 -0
  72. package/skills/ui-ux-pro-max/data/stacks/astro.csv +54 -0
  73. package/skills/ui-ux-pro-max/data/stacks/flutter.csv +53 -0
  74. package/skills/ui-ux-pro-max/data/stacks/html-tailwind.csv +56 -0
  75. package/skills/ui-ux-pro-max/data/stacks/jetpack-compose.csv +53 -0
  76. package/skills/ui-ux-pro-max/data/stacks/nextjs.csv +53 -0
  77. package/skills/ui-ux-pro-max/data/stacks/nuxt-ui.csv +51 -0
  78. package/skills/ui-ux-pro-max/data/stacks/nuxtjs.csv +59 -0
  79. package/skills/ui-ux-pro-max/data/stacks/react-native.csv +52 -0
  80. package/skills/ui-ux-pro-max/data/stacks/react.csv +54 -0
  81. package/skills/ui-ux-pro-max/data/stacks/shadcn.csv +61 -0
  82. package/skills/ui-ux-pro-max/data/stacks/svelte.csv +54 -0
  83. package/skills/ui-ux-pro-max/data/stacks/swiftui.csv +51 -0
  84. package/skills/ui-ux-pro-max/data/stacks/vue.csv +50 -0
  85. package/skills/ui-ux-pro-max/data/styles.csv +68 -0
  86. package/skills/ui-ux-pro-max/data/typography.csv +58 -0
  87. package/skills/ui-ux-pro-max/data/ui-reasoning.csv +101 -0
  88. package/skills/ui-ux-pro-max/data/ux-guidelines.csv +100 -0
  89. package/skills/ui-ux-pro-max/data/web-interface.csv +31 -0
  90. package/skills/ui-ux-pro-max/scripts/core.py +253 -0
  91. package/skills/ui-ux-pro-max/scripts/design_system.py +1067 -0
  92. package/skills/ui-ux-pro-max/scripts/search.py +114 -0
@@ -0,0 +1,466 @@
1
+ #!/usr/bin/env bash
2
+ # ─────────────────────────────────────────────────────────────
3
+ # Duplicate Code Detector
4
+ #
5
+ # Finds DRY violations using pure bash + awk (no npm deps).
6
+ # Detects similar files, duplicate string literals, and
7
+ # copy-paste import patterns.
8
+ #
9
+ # Usage:
10
+ # ./detect-duplicates.sh # scan all projects
11
+ # ./detect-duplicates.sh backend # scan backend only
12
+ # ./detect-duplicates.sh frontend # scan frontend only
13
+ # ./detect-duplicates.sh mobile # scan mobile only
14
+ # ./detect-duplicates.sh --min-repeats 2 # minimum string repetitions
15
+ #
16
+ # Output: JSON to stdout
17
+ # Exit codes: 0 = clean, 1 = duplicates found
18
+ # ─────────────────────────────────────────────────────────────
19
+
20
+ set -uo pipefail
21
+
22
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
23
+ REPO_ROOT="$(cd "$SCRIPT_DIR/../../../../.." && pwd)"
24
+
25
+ # ─── Defaults ────────────────────────────────────────────────
26
+ MIN_STRING_REPEATS=3
27
+ TARGET="all"
28
+
29
+ # ─── Auto-detect project directories ────────────────────────
30
+ detect_dirs() {
31
+ local kind="$1"; shift
32
+ for candidate in "$@"; do
33
+ for dir in $REPO_ROOT/$candidate; do
34
+ if [ -d "$dir/src" ]; then echo "$dir/src"; return; fi
35
+ if [ -d "$dir/app" ]; then echo "$dir/app"; return; fi
36
+ done
37
+ done
38
+ echo ""
39
+ }
40
+
41
+ BE_SRC="$(detect_dirs backend be-* backend server api)"
42
+ FE_SRC="$(detect_dirs frontend web-* frontend client)"
43
+ MB_SRC="$(detect_dirs mobile styai-mobile mobile)"
44
+ AUTH_SRC=""
45
+ [ -d "$REPO_ROOT/auth-package/src" ] && AUTH_SRC="$REPO_ROOT/auth-package/src"
46
+
47
+ # ─── Parse arguments ────────────────────────────────────────
48
+ while [[ $# -gt 0 ]]; do
49
+ case "$1" in
50
+ backend) TARGET="backend"; shift ;;
51
+ frontend) TARGET="frontend"; shift ;;
52
+ mobile) TARGET="mobile"; shift ;;
53
+ auth) TARGET="auth"; shift ;;
54
+ --min-lines) shift 2 ;;
55
+ --min-repeats) MIN_STRING_REPEATS="$2"; shift 2 ;;
56
+ *) shift ;;
57
+ esac
58
+ done
59
+
60
+ # ─── Build file list ────────────────────────────────────────
61
+ TMPDIR_WORK=$(mktemp -d)
62
+ trap 'rm -rf "$TMPDIR_WORK"' EXIT
63
+
64
+ FILES_LIST="$TMPDIR_WORK/files.txt"
65
+ FINDINGS_FILE="$TMPDIR_WORK/findings.jsonl"
66
+ touch "$FINDINGS_FILE"
67
+
68
+ build_file_list() {
69
+ local dirs=""
70
+ case "$TARGET" in
71
+ backend) [ -n "$BE_SRC" ] && dirs="$BE_SRC" ;;
72
+ frontend) [ -n "$FE_SRC" ] && dirs="$FE_SRC" ;;
73
+ mobile) [ -n "$MB_SRC" ] && dirs="$MB_SRC" ;;
74
+ auth) [ -n "$AUTH_SRC" ] && dirs="$AUTH_SRC" ;;
75
+ all)
76
+ [ -n "$BE_SRC" ] && dirs="$BE_SRC"
77
+ [ -n "$FE_SRC" ] && dirs="$dirs $FE_SRC"
78
+ [ -n "$MB_SRC" ] && dirs="$dirs $MB_SRC"
79
+ [ -n "$AUTH_SRC" ] && dirs="$dirs $AUTH_SRC"
80
+ ;;
81
+ esac
82
+ dirs=$(echo "$dirs" | xargs)
83
+ [ -z "$dirs" ] && return
84
+ for dir in $dirs; do
85
+ find "$dir" \( -name "*.ts" -o -name "*.tsx" \) \
86
+ -not -path "*/node_modules/*" \
87
+ -not -path "*/.next/*" \
88
+ -not -path "*/dist/*" \
89
+ -not -path "*/__tests__/*" \
90
+ -not -path "*.spec.*" \
91
+ -not -path "*.test.*" \
92
+ -not -path "*.d.ts" \
93
+ 2>/dev/null
94
+ done
95
+ }
96
+
97
+ build_file_list | sort > "$FILES_LIST"
98
+ FILE_COUNT=$(wc -l < "$FILES_LIST" | tr -d ' ')
99
+
100
+ if [ "$FILE_COUNT" -eq 0 ]; then
101
+ echo '{"scan":"detect-duplicates","findings":[],"summary":{"total":0,"target":"'"$TARGET"'","files_scanned":0}}'
102
+ exit 0
103
+ fi
104
+
105
+ # ─── Helpers ────────────────────────────────────────────────
106
+ relpath() { echo "${1#$REPO_ROOT/}"; }
107
+
108
+ json_escape() {
109
+ printf '%s' "$1" | python3 -c 'import json,sys; print(json.dumps(sys.stdin.read()), end="")' 2>/dev/null || \
110
+ printf '"%s"' "$(printf '%s' "$1" | sed 's/\\/\\\\/g; s/"/\\"/g; s/\t/\\t/g')"
111
+ }
112
+
113
+ # ═══════════════════════════════════════════════════════════════
114
+ # 1. DUPLICATE STRING LITERALS
115
+ # ═══════════════════════════════════════════════════════════════
116
+ detect_duplicate_strings() {
117
+ local strings_file="$TMPDIR_WORK/strings.txt"
118
+
119
+ # Extract single-quoted strings (8+ chars) from all files, excluding imports/decorators
120
+ while IFS= read -r file; do
121
+ grep -noE "'[^']{8,}'" "$file" 2>/dev/null | while IFS=: read -r lineno str; do
122
+ echo "$str"
123
+ done
124
+ done < "$FILES_LIST" | \
125
+ grep -vE "^'(import|require|from|@Module|@Controller|@Injectable|@Schema|node_modules)" | \
126
+ sort | uniq -c | sort -rn | head -25 > "$strings_file"
127
+
128
+ while read -r count str; do
129
+ count=$(echo "$count" | tr -d ' ')
130
+ [ "$count" -lt "$MIN_STRING_REPEATS" ] && continue
131
+
132
+ # Find sample locations
133
+ local locs=""
134
+ local loc_count=0
135
+ while IFS= read -r file; do
136
+ if grep -qF "$str" "$file" 2>/dev/null; then
137
+ local lineno
138
+ lineno=$(grep -nF "$str" "$file" 2>/dev/null | head -1 | cut -d: -f1)
139
+ [ -n "$lineno" ] && {
140
+ [ $loc_count -gt 0 ] && locs="$locs,"
141
+ locs="$locs{\"file\":\"$(relpath "$file")\",\"line\":$lineno}"
142
+ loc_count=$((loc_count + 1))
143
+ [ $loc_count -ge 3 ] && break
144
+ }
145
+ fi
146
+ done < "$FILES_LIST"
147
+
148
+ echo "{\"type\":\"duplicate_string\",\"severity\":\"info\",\"count\":$count,\"value\":$(json_escape "$str"),\"locations\":[$locs],\"suggestion\":\"Extract to a named constant in a shared constants file\"}" >> "$FINDINGS_FILE"
149
+ done < "$strings_file"
150
+
151
+ # Also check double-quoted strings
152
+ local dstrings_file="$TMPDIR_WORK/dstrings.txt"
153
+ while IFS= read -r file; do
154
+ grep -noE '"[^"]{8,}"' "$file" 2>/dev/null | while IFS=: read -r lineno str; do
155
+ echo "$str"
156
+ done
157
+ done < "$FILES_LIST" | \
158
+ grep -vE '^"(import|require|from |@Module|@Controller|@Injectable|node_modules|use client|use server)' | \
159
+ sort | uniq -c | sort -rn | head -25 > "$dstrings_file"
160
+
161
+ while read -r count str; do
162
+ count=$(echo "$count" | tr -d ' ')
163
+ [ "$count" -lt "$MIN_STRING_REPEATS" ] && continue
164
+
165
+ local locs=""
166
+ local loc_count=0
167
+ while IFS= read -r file; do
168
+ if grep -qF "$str" "$file" 2>/dev/null; then
169
+ local lineno
170
+ lineno=$(grep -nF "$str" "$file" 2>/dev/null | head -1 | cut -d: -f1)
171
+ [ -n "$lineno" ] && {
172
+ [ $loc_count -gt 0 ] && locs="$locs,"
173
+ locs="$locs{\"file\":\"$(relpath "$file")\",\"line\":$lineno}"
174
+ loc_count=$((loc_count + 1))
175
+ [ $loc_count -ge 3 ] && break
176
+ }
177
+ fi
178
+ done < "$FILES_LIST"
179
+
180
+ echo "{\"type\":\"duplicate_string\",\"severity\":\"info\",\"count\":$count,\"value\":$(json_escape "$str"),\"locations\":[$locs],\"suggestion\":\"Extract to a named constant in a shared constants file\"}" >> "$FINDINGS_FILE"
181
+ done < "$dstrings_file"
182
+ }
183
+
184
+ # ═══════════════════════════════════════════════════════════════
185
+ # 2. SIMILAR FILES (by content hash)
186
+ # ═══════════════════════════════════════════════════════════════
187
+ detect_similar_files() {
188
+ local hash_file="$TMPDIR_WORK/file_hashes.txt"
189
+
190
+ while IFS= read -r file; do
191
+ local lines
192
+ lines=$(wc -l < "$file" | tr -d ' ')
193
+ [ "$lines" -lt 20 ] && continue
194
+
195
+ # Create structural fingerprint: strip comments, names, literals; hash the skeleton
196
+ local hash
197
+ hash=$(sed 's|//.*$||; s|/\*.*\*/||' "$file" | \
198
+ tr -s '[:space:]' ' ' | \
199
+ sed 's/[a-zA-Z_][a-zA-Z0-9_]*/ID/g; s/[0-9][0-9]*/N/g' | \
200
+ sed "s/'[^']*'/S/g; s/\"[^\"]*\"/S/g" | \
201
+ md5 -q 2>/dev/null || echo "skip")
202
+ [ "$hash" = "skip" ] && continue
203
+ echo "$hash $lines $(relpath "$file")"
204
+ done < "$FILES_LIST" > "$hash_file"
205
+
206
+ # Group files by hash, find groups of 2+
207
+ sort -t$'\t' -k1,1 "$hash_file" | awk -F'\t' '
208
+ {
209
+ if ($1 == prev_hash && $1 != "") {
210
+ if (count == 1) files = prev_file
211
+ files = files "\t" $3
212
+ count++
213
+ flines = $2
214
+ } else {
215
+ if (count >= 2) {
216
+ printf "%d\t%d\t%s\n", count, flines, files
217
+ }
218
+ count = 1
219
+ files = ""
220
+ }
221
+ prev_hash = $1; prev_file = $3; flines = $2
222
+ }
223
+ END {
224
+ if (count >= 2) {
225
+ printf "%d\t%d\t%s\n", count, flines, files
226
+ }
227
+ }
228
+ ' | sort -t$'\t' -k1 -rn | head -15 | while IFS=$'\t' read -r cnt flines rest; do
229
+ local files_json=""
230
+ local first=true
231
+ local IFS=$'\t'
232
+ for f in $rest; do
233
+ [ -z "$f" ] && continue
234
+ [ "$first" = true ] && first=false || files_json="$files_json,"
235
+ files_json="$files_json{\"file\":\"$f\",\"lines\":$flines}"
236
+ done
237
+
238
+ echo "{\"type\":\"similar_files\",\"severity\":\"warning\",\"count\":$cnt,\"lines\":$flines,\"files\":[$files_json],\"suggestion\":\"These files have identical structure — consider extracting shared logic into a base class or utility\"}" >> "$FINDINGS_FILE"
239
+ done
240
+ }
241
+
242
+ # ═══════════════════════════════════════════════════════════════
243
+ # 3. DUPLICATE IMPORT PATTERNS
244
+ # ═══════════════════════════════════════════════════════════════
245
+ detect_duplicate_imports() {
246
+ local import_file="$TMPDIR_WORK/import_hashes.txt"
247
+
248
+ # For each file, extract import lines, sort them, hash the block
249
+ while IFS= read -r file; do
250
+ local imports
251
+ imports=$(grep '^import ' "$file" 2>/dev/null | sort)
252
+ local import_count
253
+ import_count=$(echo "$imports" | grep -c '^import' 2>/dev/null)
254
+ import_count=${import_count:-0}
255
+ [ "$import_count" -lt 3 ] && continue
256
+
257
+ local hash
258
+ hash=$(echo "$imports" | md5 -q 2>/dev/null || echo "skip")
259
+ [ "$hash" = "skip" ] && continue
260
+
261
+ local preview
262
+ preview=$(echo "$imports" | head -1)
263
+ echo "$hash $(relpath "$file") $preview"
264
+ done < "$FILES_LIST" > "$import_file"
265
+
266
+ # Group by hash
267
+ sort -t$'\t' -k1,1 "$import_file" | awk -F'\t' '
268
+ {
269
+ if ($1 == prev_hash && $1 != "") {
270
+ if (count == 1) { files = prev_file; preview = prev_preview }
271
+ files = files "\t" $2
272
+ count++
273
+ } else {
274
+ if (count >= 3) {
275
+ printf "%d\t%s\t%s\n", count, preview, files
276
+ }
277
+ count = 1
278
+ }
279
+ prev_hash = $1; prev_file = $2; prev_preview = $3
280
+ }
281
+ END {
282
+ if (count >= 3) {
283
+ printf "%d\t%s\t%s\n", count, preview, files
284
+ }
285
+ }
286
+ ' | sort -t$'\t' -k1 -rn | head -10 | while IFS=$'\t' read -r cnt preview rest; do
287
+ local files_json=""
288
+ local first=true
289
+ local IFS=$'\t'
290
+ for f in $rest; do
291
+ [ -z "$f" ] && continue
292
+ [ "$first" = true ] && first=false || files_json="$files_json,"
293
+ files_json="$files_json{\"file\":\"$f\"}"
294
+ done
295
+
296
+ echo "{\"type\":\"duplicate_imports\",\"severity\":\"info\",\"count\":$cnt,\"preview\":$(json_escape "$preview"),\"files\":[$files_json],\"suggestion\":\"Consider a barrel export (index.ts) to consolidate these shared imports\"}" >> "$FINDINGS_FILE"
297
+ done
298
+ }
299
+
300
+ # ═══════════════════════════════════════════════════════════════
301
+ # 4. IDENTICAL FUNCTION BODIES
302
+ # ═══════════════════════════════════════════════════════════════
303
+ detect_identical_functions() {
304
+ local func_file="$TMPDIR_WORK/func_hashes.txt"
305
+
306
+ # For each file, extract function-like blocks and hash their normalized bodies
307
+ while IFS= read -r file; do
308
+ # Use awk to extract function bodies (BSD awk compatible)
309
+ awk -v fname="$file" '
310
+ /^[[:space:]]*(export[[:space:]]+)?(async[[:space:]]+)?function[[:space:]]+[a-zA-Z_]/ ||
311
+ /^[[:space:]]*(async[[:space:]]+)?[a-zA-Z_][a-zA-Z0-9_]*[[:space:]]*\([^)]*\)[[:space:]]*\{/ {
312
+ if (capturing && body_lines >= 5) {
313
+ gsub(/[[:space:]]+/, " ", body)
314
+ printf "%s\t%s:%d\t%s\t%d\n", body, fname, start_line, func_name, body_lines
315
+ }
316
+ capturing = 1
317
+ start_line = NR
318
+ body = ""
319
+ body_lines = 0
320
+ brace_depth = 0
321
+
322
+ # Extract function name
323
+ line = $0
324
+ sub(/^[[:space:]]+/, "", line)
325
+ sub(/async[[:space:]]+/, "", line)
326
+ sub(/export[[:space:]]+/, "", line)
327
+ if (match(line, /function[[:space:]]+([a-zA-Z_][a-zA-Z0-9_]*)/)) {
328
+ func_name = substr(line, RSTART + 9)
329
+ sub(/[^a-zA-Z0-9_].*/, "", func_name)
330
+ } else {
331
+ func_name = line
332
+ sub(/[^a-zA-Z0-9_].*/, "", func_name)
333
+ }
334
+ }
335
+ capturing {
336
+ body = body $0 "\n"
337
+ body_lines++
338
+ n = split($0, chars, "")
339
+ for (i = 1; i <= n; i++) {
340
+ if (chars[i] == "{") brace_depth++
341
+ if (chars[i] == "}") brace_depth--
342
+ }
343
+ if (brace_depth <= 0 && body_lines > 1) {
344
+ if (body_lines >= 5) {
345
+ gsub(/[[:space:]]+/, " ", body)
346
+ printf "%s\t%s:%d\t%s\t%d\n", body, fname, start_line, func_name, body_lines
347
+ }
348
+ capturing = 0
349
+ body = ""
350
+ body_lines = 0
351
+ }
352
+ }
353
+ ' "$file" 2>/dev/null
354
+ done < "$FILES_LIST" | sort -t$'\t' -k1,1 > "$func_file"
355
+
356
+ # Group identical bodies
357
+ awk -F'\t' '
358
+ {
359
+ key = $1; loc = $2; name = $3; lines = $4
360
+ if (key == prev_key && key != "") {
361
+ if (group_count == 1) {
362
+ group_locs = prev_loc
363
+ group_names = prev_name
364
+ }
365
+ group_locs = group_locs "\t" loc
366
+ group_names = group_names "\t" name
367
+ group_count++
368
+ group_lines = lines
369
+ } else {
370
+ if (group_count >= 2) {
371
+ printf "%d\t%d\t%s\t%s\n", group_count, group_lines, group_locs, group_names
372
+ }
373
+ group_count = 1
374
+ group_locs = ""
375
+ group_names = ""
376
+ group_lines = lines
377
+ }
378
+ prev_key = key; prev_loc = loc; prev_name = name
379
+ }
380
+ END {
381
+ if (group_count >= 2) {
382
+ printf "%d\t%d\t%s\t%s\n", group_count, group_lines, group_locs, group_names
383
+ }
384
+ }
385
+ ' "$func_file" | sort -t$'\t' -k1 -rn | head -15 | while IFS=$'\t' read -r cnt lines locs names; do
386
+ local files_json=""
387
+ local first=true
388
+ local saved_ifs="$IFS"
389
+ IFS=$'\t'
390
+ local loc_arr=($locs)
391
+ local name_arr=($names)
392
+ IFS="$saved_ifs"
393
+ for i in "${!loc_arr[@]}"; do
394
+ local loc="${loc_arr[$i]}"
395
+ local name="${name_arr[$i]:-unknown}"
396
+ local f="${loc%%:*}"
397
+ local l="${loc##*:}"
398
+ [ "$first" = true ] && first=false || files_json="$files_json,"
399
+ files_json="$files_json{\"file\":\"$(relpath "$f")\",\"line\":$l,\"function\":\"$name\"}"
400
+ done
401
+
402
+ echo "{\"type\":\"identical_function\",\"severity\":\"warning\",\"count\":$cnt,\"lines\":$lines,\"locations\":[$files_json],\"suggestion\":\"These functions have identical bodies — extract to a shared utility\"}" >> "$FINDINGS_FILE"
403
+ done
404
+ }
405
+
406
+ # ═══════════════════════════════════════════════════════════════
407
+ # RUN ALL DETECTORS
408
+ # ═══════════════════════════════════════════════════════════════
409
+ detect_duplicate_strings
410
+ detect_duplicate_imports
411
+ detect_similar_files
412
+ detect_identical_functions
413
+
414
+ # ═══════════════════════════════════════════════════════════════
415
+ # OUTPUT JSON
416
+ # ═══════════════════════════════════════════════════════════════
417
+ FINDING_COUNT=$(wc -l < "$FINDINGS_FILE" | tr -d ' ')
418
+
419
+ # Count by type
420
+ DUP_STRING=$(grep -c '"type":"duplicate_string"' "$FINDINGS_FILE" 2>/dev/null)
421
+ DUP_STRING=${DUP_STRING:-0}
422
+ SIM_FILES=$(grep -c '"type":"similar_files"' "$FINDINGS_FILE" 2>/dev/null)
423
+ SIM_FILES=${SIM_FILES:-0}
424
+ DUP_IMPORTS=$(grep -c '"type":"duplicate_imports"' "$FINDINGS_FILE" 2>/dev/null)
425
+ DUP_IMPORTS=${DUP_IMPORTS:-0}
426
+ ID_FUNCS=$(grep -c '"type":"identical_function"' "$FINDINGS_FILE" 2>/dev/null)
427
+ ID_FUNCS=${ID_FUNCS:-0}
428
+
429
+ {
430
+ echo '{'
431
+ echo ' "scan": "detect-duplicates",'
432
+ echo ' "timestamp": "'"$(date -u +%Y-%m-%dT%H:%M:%SZ)"'",'
433
+ echo ' "target": "'"$TARGET"'",'
434
+ echo ' "files_scanned": '"$FILE_COUNT"','
435
+ echo ' "min_string_repeats": '"$MIN_STRING_REPEATS"','
436
+ echo ' "total_findings": '"$FINDING_COUNT"','
437
+ echo ' "by_type": {'
438
+ echo ' "duplicate_string": '"$DUP_STRING"','
439
+ echo ' "similar_files": '"$SIM_FILES"','
440
+ echo ' "duplicate_imports": '"$DUP_IMPORTS"','
441
+ echo ' "identical_function": '"$ID_FUNCS"
442
+ echo ' },'
443
+ echo ' "findings": ['
444
+
445
+ first=true
446
+ while IFS= read -r line; do
447
+ [ -z "$line" ] && continue
448
+ if [ "$first" = true ]; then
449
+ first=false
450
+ else
451
+ echo ','
452
+ fi
453
+ printf ' %s' "$line"
454
+ done < "$FINDINGS_FILE"
455
+
456
+ echo ''
457
+ echo ' ]'
458
+ echo '}'
459
+ }
460
+
461
+ # ─── Exit code ──────────────────────────────────────────────
462
+ if [ "$FINDING_COUNT" -gt 0 ]; then
463
+ exit 1
464
+ else
465
+ exit 0
466
+ fi