@vibecheckai/cli 3.1.8 → 3.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/bin/registry.js +106 -116
  2. package/bin/runners/context/generators/mcp.js +18 -0
  3. package/bin/runners/context/index.js +72 -4
  4. package/bin/runners/context/proof-context.js +293 -1
  5. package/bin/runners/context/security-scanner.js +311 -73
  6. package/bin/runners/lib/analyzers.js +607 -20
  7. package/bin/runners/lib/detectors-v2.js +172 -15
  8. package/bin/runners/lib/entitlements-v2.js +48 -1
  9. package/bin/runners/lib/evidence-pack.js +678 -0
  10. package/bin/runners/lib/html-proof-report.js +913 -0
  11. package/bin/runners/lib/missions/plan.js +231 -41
  12. package/bin/runners/lib/missions/templates.js +125 -0
  13. package/bin/runners/lib/scan-output.js +492 -253
  14. package/bin/runners/lib/ship-output.js +901 -641
  15. package/bin/runners/runCheckpoint.js +44 -3
  16. package/bin/runners/runContext.d.ts +4 -0
  17. package/bin/runners/runDoctor.js +10 -2
  18. package/bin/runners/runFix.js +51 -341
  19. package/bin/runners/runInit.js +11 -0
  20. package/bin/runners/runPolish.d.ts +4 -0
  21. package/bin/runners/runPolish.js +608 -29
  22. package/bin/runners/runProve.js +210 -25
  23. package/bin/runners/runReality.js +846 -101
  24. package/bin/runners/runScan.js +238 -4
  25. package/bin/runners/runShip.js +19 -3
  26. package/bin/runners/runWatch.js +14 -1
  27. package/bin/vibecheck.js +32 -2
  28. package/mcp-server/consolidated-tools.js +408 -42
  29. package/mcp-server/index.js +152 -15
  30. package/mcp-server/proof-tools.js +571 -0
  31. package/mcp-server/tier-auth.js +22 -19
  32. package/mcp-server/tools-v3.js +744 -0
  33. package/mcp-server/truth-firewall-tools.js +190 -4
  34. package/package.json +3 -1
  35. package/bin/runners/runInstall.js +0 -281
  36. package/bin/runners/runLabs.js +0 -341
@@ -36,6 +36,15 @@ function findMissingRoutes(truthpack) {
36
36
  // If we have route detection gaps, be less aggressive with BLOCKs
37
37
  const hasGaps = gaps.length > 0;
38
38
 
39
+ // In monorepos/microservices, many client refs may be to external services
40
+ // Only flag routes that look clearly invented/hallucinated
41
+ const serverRouteCount = server.length;
42
+ const clientRefCount = refs.length;
43
+
44
+ // If client refs >> server routes, this is likely a monorepo or microservice architecture
45
+ // Be very lenient in this case
46
+ const isLikelyMonorepo = clientRefCount > serverRouteCount * 3;
47
+
39
48
  // Build a set of known route path prefixes for smarter matching
40
49
  const knownPrefixes = new Set();
41
50
  for (const r of server) {
@@ -48,6 +57,10 @@ function findMissingRoutes(truthpack) {
48
57
  }
49
58
  }
50
59
 
60
+ // Track how many warnings we emit - show more to demonstrate value
61
+ let warningCount = 0;
62
+ const MAX_WARNINGS = 50; // Show more to demonstrate thoroughness
63
+
51
64
  for (const ref of refs) {
52
65
  const method = ref.method || "*";
53
66
  const p = ref.path;
@@ -61,26 +74,33 @@ function findMissingRoutes(truthpack) {
61
74
  const refPrefix2 = refParts.length >= 2 ? '/' + refParts[0] + '/' + refParts[1] : refPrefix1;
62
75
  const sharesPrefix = knownPrefixes.has(refPrefix1) || knownPrefixes.has(refPrefix2);
63
76
 
77
+ // In monorepos, still show routes but as warnings (demonstrates thoroughness)
78
+ // Skip only if route EXACTLY matches a known prefix AND monorepo
79
+ if (sharesPrefix && isLikelyMonorepo && warningCount > MAX_WARNINGS) continue;
80
+
64
81
  // Determine severity based on confidence and context
65
- // In monorepos with complex routing (plugins, dynamic registration), static analysis has limits
66
- // Default to WARN unless we're very confident the route is truly invented
82
+ // Only BLOCK for clearly invented routes
67
83
  let severity = "WARN";
68
84
 
69
- // Only BLOCK if:
70
- // 1. High confidence client ref
71
- // 2. No detection gaps
72
- // 3. Doesn't share prefix with any known route
73
- // 4. Looks like an invented/hallucinated route (unusual patterns)
74
- const looksInvented = /\/(fake|test|mock|dummy|example|foo|bar|baz|xxx|yyy|placeholder)/i.test(p);
75
- if (ref.confidence === "high" && !hasGaps && !sharesPrefix && looksInvented) {
85
+ // Only BLOCK if the route looks clearly invented/hallucinated
86
+ const looksInvented = /\/(fake|test|mock|dummy|example|foo|bar|baz|xxx|yyy|placeholder|asdf|qwerty|lorem|ipsum)/i.test(p);
87
+ const looksGenerated = /\/[a-f0-9]{32,}/i.test(p); // Random hash in path
88
+
89
+ if (looksInvented || looksGenerated) {
76
90
  severity = "BLOCK";
77
91
  }
78
92
 
79
- // Always WARN for common internal/utility routes
80
- const isInternalRoute = /^\/(health|metrics|ready|live|version|debug|internal|suggestions|security|analyze|websocket|dashboard)/.test(p);
93
+ // Always WARN (not BLOCK) for common internal/utility routes
94
+ const isInternalRoute = /^\/(health|metrics|ready|live|version|debug|internal|suggestions|security|analyze|websocket|dashboard|admin|_|\.)/i.test(p);
81
95
  if (isInternalRoute) {
82
96
  severity = "WARN";
83
97
  }
98
+
99
+ // Cap warnings to avoid noise
100
+ if (severity === "WARN") {
101
+ if (warningCount >= MAX_WARNINGS) continue;
102
+ warningCount++;
103
+ }
84
104
 
85
105
  findings.push({
86
106
  id: `F_MISSING_ROUTE_${String(findings.length + 1).padStart(3, "0")}`,
@@ -89,13 +109,13 @@ function findMissingRoutes(truthpack) {
89
109
  title: `Client references route that does not exist: ${method} ${p}`,
90
110
  why: severity === "BLOCK"
91
111
  ? "AI frequently invents endpoints. Shipping this = broken flows (404 / silent failure)."
92
- : "Route reference found but server route not detected. May be a false positive if route is defined dynamically.",
93
- confidence: ref.confidence || "low",
112
+ : "Route reference found but server route not detected. May be a false positive in monorepo/microservice setups.",
113
+ confidence: severity === "BLOCK" ? "high" : "low",
94
114
  evidence: ref.evidence || [],
95
115
  fixHints: [
96
116
  "Update the client call to a real server route (see route map).",
97
117
  "If the route exists but wasn't detected, it may use dynamic registration.",
98
- "If truly missing, implement it in your API and re-run ship."
118
+ "If this is an external service, this warning can be ignored."
99
119
  ]
100
120
  });
101
121
  }
@@ -117,6 +137,24 @@ function findMissingRoutes(truthpack) {
117
137
  ]
118
138
  });
119
139
  }
140
+
141
+ // If we capped warnings, note that
142
+ if (warningCount >= MAX_WARNINGS) {
143
+ findings.push({
144
+ id: `F_MISSING_ROUTE_CAPPED`,
145
+ severity: "WARN",
146
+ category: "MissingRoute",
147
+ title: `${clientRefCount - serverRouteCount - MAX_WARNINGS} additional unmatched routes not shown`,
148
+ why: "Many client references don't match detected server routes. This is common in monorepos/microservices.",
149
+ confidence: "low",
150
+ evidence: [],
151
+ fixHints: [
152
+ "This codebase appears to be a monorepo or use microservices.",
153
+ "Many client refs may be to external services not detected by static analysis.",
154
+ "Use vibecheck scan --allowlist to suppress known false positives."
155
+ ]
156
+ });
157
+ }
120
158
 
121
159
  return findings;
122
160
  }
@@ -166,23 +204,54 @@ function findEnvGaps(truthpack) {
166
204
  // Common optional vars that are often checked but not required
167
205
  'PORT', 'npm_package_version', 'npm_package_name',
168
206
  ]);
207
+
208
+ // Patterns for env vars that are commonly optional/internal and shouldn't BLOCK
209
+ const optionalPatterns = [
210
+ /^(OPENAI|ANTHROPIC|COHERE|AZURE|AWS|GCP|GOOGLE)_/i, // AI/Cloud providers (often optional)
211
+ /^(STRIPE|PAYPAL|PLAID)_/i, // Payment providers (often optional in dev)
212
+ /^(SENDGRID|RESEND|MAILGUN|SES)_/i, // Email providers
213
+ /^(SENTRY|DATADOG|NEWRELIC|LOGROCKET)_/i, // Monitoring (optional)
214
+ /^(REDIS|POSTGRES|MYSQL|MONGO|DATABASE)_/i, // Database (often has defaults)
215
+ /^(NEXT_|NUXT_|VITE_|REACT_APP_)/i, // Framework prefixes
216
+ /^(VIBECHECK|GUARDRAIL)_/i, // Our own vars
217
+ /_(URL|KEY|SECRET|TOKEN|ID|PASSWORD|HOST|PORT)$/i, // Common suffixes (often optional)
218
+ /^(ENABLE_|DISABLE_|USE_|SKIP_|ALLOW_|NO_)/i, // Feature flags (optional by nature)
219
+ /^(MAX_|MIN_|DEFAULT_|TIMEOUT_|LIMIT_|RATE_)/i, // Config limits (have defaults)
220
+ /^(LOG_|DEBUG_|VERBOSE_|TRACE_)/i, // Logging config
221
+ /^(TEST_|DEV_|STAGING_|PROD_)/i, // Environment-specific
222
+ /^(ARTIFACTS_|CACHE_|TMP_|OUTPUT_)/i, // Paths (have defaults)
223
+ /^npm_/i, // npm internal
224
+ ];
225
+
226
+ function isOptionalEnvVar(name) {
227
+ return optionalPatterns.some(p => p.test(name));
228
+ }
169
229
 
170
- // 1) USED but not declared in templates/examples => WARN (or BLOCK if required)
230
+ // 1) USED but not declared in templates/examples
231
+ // Only BLOCK for truly required vars, WARN for everything else, skip optional patterns
171
232
  for (const v of used) {
172
233
  if (declared.has(v.name)) continue;
173
234
  // Skip well-known system/CI env vars
174
235
  if (systemEnvVars.has(v.name)) continue;
175
-
176
- const sev = v.required ? "BLOCK" : "WARN";
236
+ // Skip vars that match optional patterns (very common, likely have defaults)
237
+ if (isOptionalEnvVar(v.name)) continue;
238
+
239
+ // Only BLOCK if:
240
+ // 1. Explicitly marked required AND
241
+ // 2. No fallback detected AND
242
+ // 3. Not a common optional pattern
243
+ const isReallyRequired = v.required && !v.hasFallback;
244
+ const sev = isReallyRequired ? "BLOCK" : "WARN";
245
+
177
246
  findings.push({
178
247
  id: `F_ENV_UNDECLARED_${v.name}`,
179
248
  severity: sev,
180
249
  category: "EnvContract",
181
250
  title: `Env var used but not declared in env templates: ${v.name}`,
182
- why: v.required
251
+ why: isReallyRequired
183
252
  ? "Required env var is used with no fallback. Vibecoders will ship a broken app if it's not documented."
184
253
  : "Env var appears optional but should still be documented to prevent guesswork.",
185
- confidence: "high",
254
+ confidence: isReallyRequired ? "high" : "low",
186
255
  evidence: v.references || [],
187
256
  fixHints: [
188
257
  `Add ${v.name}= to .env.example (or .env.template).`,
@@ -568,6 +637,515 @@ function findOwnerModeBypass(repoRoot) {
568
637
  return findings;
569
638
  }
570
639
 
640
+ // ============================================================================
641
+ // MOCK DATA DETECTOR
642
+ // ============================================================================
643
+
644
+ function findMockData(repoRoot) {
645
+ const findings = [];
646
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx}"], {
647
+ cwd: repoRoot,
648
+ absolute: true,
649
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**", "**/*.test.*", "**/*.spec.*", "**/tests/**", "**/test/**", "**/__tests__/**", "**/mocks/**", "**/__mocks__/**"]
650
+ });
651
+
652
+ const mockPatterns = [
653
+ { rx: /\bmockData\b/gi, label: "mockData variable" },
654
+ { rx: /\bfakeData\b/gi, label: "fakeData variable" },
655
+ { rx: /\bdummyData\b/gi, label: "dummyData variable" },
656
+ { rx: /\btestData\b/gi, label: "testData variable (in production code)" },
657
+ { rx: /\bsampleData\b/gi, label: "sampleData variable" },
658
+ { rx: /['"]fake[_-]?user['"]|['"]test[_-]?user['"]|['"]demo[_-]?user['"]/gi, label: "Hardcoded test user" },
659
+ { rx: /['"]password123['"]|['"]test123['"]|['"]admin123['"]|['"]secret123['"]/gi, label: "Hardcoded test password" },
660
+ { rx: /['"]test@(test|example|fake)\.com['"]/gi, label: "Hardcoded test email" },
661
+ { rx: /\bMOCK_API\b|\bFAKE_API\b|\bDUMMY_API\b/gi, label: "Mock API reference" },
662
+ { rx: /setTimeout\([^)]*[5-9]\d{3,}|setTimeout\([^)]*\d{5,}/g, label: "Long setTimeout (simulated delay?)" },
663
+ { rx: /Math\.random\(\)\s*[*<>]\s*\d+/g, label: "Random data generation" },
664
+ { rx: /\bplaceholder\b.*\bdata\b|\bdata\b.*\bplaceholder\b/gi, label: "Placeholder data" },
665
+ ];
666
+
667
+ for (const fileAbs of files) {
668
+ try {
669
+ const code = fs.readFileSync(fileAbs, "utf8");
670
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
671
+
672
+ // Skip if file looks like a test/mock file even if not in test folder
673
+ if (/\.(test|spec|mock|fake|stub)\./i.test(fileRel)) continue;
674
+ if (/mock|fake|test|spec|fixture/i.test(fileRel) && !/src\//.test(fileRel)) continue;
675
+
676
+ for (const { rx, label } of mockPatterns) {
677
+ const matches = code.match(rx);
678
+ if (matches && matches.length > 0) {
679
+ const lines = code.split('\n');
680
+ let lineNum = 1;
681
+ for (let i = 0; i < lines.length; i++) {
682
+ if (rx.test(lines[i])) {
683
+ lineNum = i + 1;
684
+ break;
685
+ }
686
+ }
687
+
688
+ findings.push({
689
+ id: `F_MOCK_DATA_${fileRel.replace(/[^a-z0-9]/gi, "_")}_${label.replace(/[^a-z0-9]/gi, "_")}`,
690
+ severity: "WARN",
691
+ category: "MockData",
692
+ title: `${label} in production code: ${fileRel}`,
693
+ why: "Mock/fake data in production causes embarrassing bugs and makes your app look unfinished.",
694
+ confidence: "med",
695
+ evidence: [{ file: fileRel, lines: `${lineNum}`, reason: label }],
696
+ fixHints: [
697
+ "Replace mock data with real API calls or database queries.",
698
+ "If this is intentional sample data, move to a clearly marked demo mode."
699
+ ]
700
+ });
701
+ break; // One finding per file per pattern type
702
+ }
703
+ }
704
+ } catch (e) {
705
+ // Skip unreadable files
706
+ }
707
+ }
708
+
709
+ return findings;
710
+ }
711
+
712
+ // ============================================================================
713
+ // TODO/FIXME DETECTOR
714
+ // ============================================================================
715
+
716
+ function findTodoFixme(repoRoot) {
717
+ const findings = [];
718
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx}"], {
719
+ cwd: repoRoot,
720
+ absolute: true,
721
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**"]
722
+ });
723
+
724
+ const todoPatterns = [
725
+ { rx: /\/\/\s*TODO[\s:]/gi, label: "TODO comment", severity: "WARN" },
726
+ { rx: /\/\/\s*FIXME[\s:]/gi, label: "FIXME comment", severity: "WARN" },
727
+ { rx: /\/\/\s*HACK[\s:]/gi, label: "HACK comment", severity: "WARN" },
728
+ { rx: /\/\/\s*XXX[\s:]/gi, label: "XXX comment", severity: "WARN" },
729
+ { rx: /\/\/\s*BUG[\s:]/gi, label: "BUG comment", severity: "BLOCK" },
730
+ { rx: /\/\/\s*BROKEN[\s:]/gi, label: "BROKEN comment", severity: "BLOCK" },
731
+ { rx: /\/\/\s*URGENT[\s:]/gi, label: "URGENT comment", severity: "BLOCK" },
732
+ { rx: /\/\/\s*SECURITY[\s:]/gi, label: "SECURITY comment", severity: "BLOCK" },
733
+ { rx: /\/\/\s*DANGER[\s:]/gi, label: "DANGER comment", severity: "BLOCK" },
734
+ { rx: /\/\*\s*TODO[\s:]/gi, label: "TODO block comment", severity: "WARN" },
735
+ { rx: /\/\*\s*FIXME[\s:]/gi, label: "FIXME block comment", severity: "WARN" },
736
+ ];
737
+
738
+ let todoCount = 0;
739
+ let fixmeCount = 0;
740
+ const MAX_INDIVIDUAL_FINDINGS = 20;
741
+
742
+ for (const fileAbs of files) {
743
+ try {
744
+ const code = fs.readFileSync(fileAbs, "utf8");
745
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
746
+ const lines = code.split('\n');
747
+
748
+ for (let i = 0; i < lines.length; i++) {
749
+ const line = lines[i];
750
+
751
+ for (const { rx, label, severity } of todoPatterns) {
752
+ if (rx.test(line)) {
753
+ if (label.includes("TODO")) todoCount++;
754
+ if (label.includes("FIXME")) fixmeCount++;
755
+
756
+ // Only emit individual findings up to limit
757
+ if (findings.length < MAX_INDIVIDUAL_FINDINGS) {
758
+ const snippet = line.trim().slice(0, 80);
759
+ findings.push({
760
+ id: `F_TODO_${fileRel.replace(/[^a-z0-9]/gi, "_")}_L${i + 1}`,
761
+ severity,
762
+ category: "TodoFixme",
763
+ title: `${label}: ${snippet}${line.length > 80 ? '...' : ''}`,
764
+ why: severity === "BLOCK"
765
+ ? "This comment indicates a known critical issue that must be addressed before shipping."
766
+ : "Unfinished work markers suggest the code isn't production-ready.",
767
+ confidence: "high",
768
+ evidence: [{ file: fileRel, lines: `${i + 1}`, reason: label }],
769
+ fixHints: [
770
+ "Complete the TODO or remove it if already done.",
771
+ "If deferring, create a tracked issue and reference it in the comment."
772
+ ]
773
+ });
774
+ }
775
+ break; // One finding per line
776
+ }
777
+ }
778
+ }
779
+ } catch (e) {
780
+ // Skip unreadable files
781
+ }
782
+ }
783
+
784
+ // Add summary finding if there are many TODOs
785
+ const totalTodos = todoCount + fixmeCount;
786
+ if (totalTodos > MAX_INDIVIDUAL_FINDINGS) {
787
+ findings.push({
788
+ id: `F_TODO_SUMMARY`,
789
+ severity: "WARN",
790
+ category: "TodoFixme",
791
+ title: `${totalTodos} TODO/FIXME comments found (${totalTodos - MAX_INDIVIDUAL_FINDINGS} more not shown)`,
792
+ why: "Large numbers of TODO comments indicate significant unfinished work.",
793
+ confidence: "high",
794
+ evidence: [],
795
+ fixHints: [
796
+ "Review and address high-priority TODOs before shipping.",
797
+ `Run: grep -rn "TODO\\|FIXME" --include="*.ts" --include="*.js" .`
798
+ ]
799
+ });
800
+ }
801
+
802
+ return findings;
803
+ }
804
+
805
+ // ============================================================================
806
+ // CONSOLE.LOG DETECTOR
807
+ // ============================================================================
808
+
809
+ function findConsoleLogs(repoRoot) {
810
+ const findings = [];
811
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx}"], {
812
+ cwd: repoRoot,
813
+ absolute: true,
814
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**", "**/*.test.*", "**/*.spec.*", "**/tests/**", "**/__tests__/**", "**/scripts/**", "**/bin/**"]
815
+ });
816
+
817
+ let consoleCount = 0;
818
+ const MAX_INDIVIDUAL_FINDINGS = 15;
819
+
820
+ for (const fileAbs of files) {
821
+ try {
822
+ const code = fs.readFileSync(fileAbs, "utf8");
823
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
824
+
825
+ // Skip config/setup files
826
+ if (/config|setup|jest|vitest|eslint|prettier/i.test(fileRel)) continue;
827
+
828
+ const lines = code.split('\n');
829
+
830
+ for (let i = 0; i < lines.length; i++) {
831
+ const line = lines[i];
832
+
833
+ // Match console.log, console.warn, console.error, console.debug
834
+ if (/console\.(log|warn|debug|info|trace)\s*\(/.test(line)) {
835
+ // Skip if it's commented out
836
+ if (/^\s*\/\//.test(line)) continue;
837
+
838
+ consoleCount++;
839
+
840
+ if (findings.length < MAX_INDIVIDUAL_FINDINGS) {
841
+ const snippet = line.trim().slice(0, 60);
842
+ findings.push({
843
+ id: `F_CONSOLE_LOG_${fileRel.replace(/[^a-z0-9]/gi, "_")}_L${i + 1}`,
844
+ severity: "WARN",
845
+ category: "ConsoleLog",
846
+ title: `console.log in production code: ${fileRel}:${i + 1}`,
847
+ why: "Console statements leak debugging info to users and clutter browser console.",
848
+ confidence: "high",
849
+ evidence: [{ file: fileRel, lines: `${i + 1}`, reason: snippet }],
850
+ fixHints: [
851
+ "Remove console.log or replace with a proper logger.",
852
+ "Use a logger that can be silenced in production."
853
+ ]
854
+ });
855
+ }
856
+ }
857
+ }
858
+ } catch (e) {
859
+ // Skip unreadable files
860
+ }
861
+ }
862
+
863
+ // Add summary if there are many console logs
864
+ if (consoleCount > MAX_INDIVIDUAL_FINDINGS) {
865
+ findings.push({
866
+ id: `F_CONSOLE_LOG_SUMMARY`,
867
+ severity: "WARN",
868
+ category: "ConsoleLog",
869
+ title: `${consoleCount} console.log statements found (${consoleCount - MAX_INDIVIDUAL_FINDINGS} more not shown)`,
870
+ why: "Large numbers of console statements suggest debugging code left in production.",
871
+ confidence: "high",
872
+ evidence: [],
873
+ fixHints: [
874
+ "Use ESLint no-console rule to catch these automatically.",
875
+ "Replace with a proper logging library (pino, winston, etc.)."
876
+ ]
877
+ });
878
+ }
879
+
880
+ return findings;
881
+ }
882
+
883
+ // ============================================================================
884
+ // HARDCODED SECRETS DETECTOR
885
+ // ============================================================================
886
+
887
+ function findHardcodedSecrets(repoRoot) {
888
+ const findings = [];
889
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx,json}"], {
890
+ cwd: repoRoot,
891
+ absolute: true,
892
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**", "**/package*.json", "**/*.test.*", "**/tests/**"]
893
+ });
894
+
895
+ const secretPatterns = [
896
+ { rx: /['"]sk_live_[a-zA-Z0-9]{20,}['"]/g, label: "Stripe live secret key" },
897
+ { rx: /['"]sk_test_[a-zA-Z0-9]{20,}['"]/g, label: "Stripe test secret key" },
898
+ { rx: /['"]pk_live_[a-zA-Z0-9]{20,}['"]/g, label: "Stripe live publishable key" },
899
+ { rx: /['"]AKIA[0-9A-Z]{16}['"]/g, label: "AWS Access Key ID" },
900
+ { rx: /['"][a-zA-Z0-9+\/]{40}['"]/g, label: "Possible AWS Secret Key" },
901
+ { rx: /['"]ghp_[a-zA-Z0-9]{36}['"]/g, label: "GitHub Personal Access Token" },
902
+ { rx: /['"]gho_[a-zA-Z0-9]{36}['"]/g, label: "GitHub OAuth Token" },
903
+ { rx: /['"]xox[baprs]-[0-9]{10,13}-[0-9]{10,13}-[a-zA-Z0-9]{24}['"]/g, label: "Slack Token" },
904
+ { rx: /['"]eyJ[a-zA-Z0-9_-]{100,}\.[a-zA-Z0-9_-]{100,}\.[a-zA-Z0-9_-]{43,}['"]/g, label: "JWT Token (hardcoded)" },
905
+ { rx: /password\s*[:=]\s*['"][^'"]{8,}['"]/gi, label: "Hardcoded password" },
906
+ { rx: /api[_-]?key\s*[:=]\s*['"][a-zA-Z0-9]{20,}['"]/gi, label: "Hardcoded API key" },
907
+ { rx: /secret\s*[:=]\s*['"][a-zA-Z0-9]{16,}['"]/gi, label: "Hardcoded secret" },
908
+ ];
909
+
910
+ for (const fileAbs of files) {
911
+ try {
912
+ const code = fs.readFileSync(fileAbs, "utf8");
913
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
914
+
915
+ // Skip env files (they're supposed to have secrets, just not committed)
916
+ if (/\.env/.test(fileRel)) continue;
917
+
918
+ for (const { rx, label } of secretPatterns) {
919
+ const matches = code.match(rx);
920
+ if (matches && matches.length > 0) {
921
+ findings.push({
922
+ id: `F_SECRET_${fileRel.replace(/[^a-z0-9]/gi, "_")}_${label.replace(/[^a-z0-9]/gi, "_")}`,
923
+ severity: "BLOCK",
924
+ category: "HardcodedSecret",
925
+ title: `${label} detected in: ${fileRel}`,
926
+ why: "Hardcoded secrets in code get committed to git and leaked. This is a critical security issue.",
927
+ confidence: "high",
928
+ evidence: [{ file: fileRel, reason: label }],
929
+ fixHints: [
930
+ "Move the secret to environment variables.",
931
+ "Rotate the compromised secret immediately.",
932
+ "Add the file to .gitignore if it shouldn't be committed."
933
+ ]
934
+ });
935
+ break; // One finding per file per secret type
936
+ }
937
+ }
938
+ } catch (e) {
939
+ // Skip unreadable files
940
+ }
941
+ }
942
+
943
+ return findings;
944
+ }
945
+
946
+ // ============================================================================
947
+ // DEAD CODE / UNUSED EXPORTS DETECTOR
948
+ // ============================================================================
949
+
950
+ function findDeadCode(repoRoot) {
951
+ const findings = [];
952
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx}"], {
953
+ cwd: repoRoot,
954
+ absolute: true,
955
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**", "**/*.d.ts"]
956
+ });
957
+
958
+ const deadCodePatterns = [
959
+ { rx: /^\s*\/\/\s*export\s+(const|function|class|interface|type)/gm, label: "Commented out export" },
960
+ { rx: /^\s*\/\*[\s\S]*?export[\s\S]*?\*\//gm, label: "Block-commented export" },
961
+ { rx: /if\s*\(\s*false\s*\)\s*\{/g, label: "if (false) block" },
962
+ { rx: /if\s*\(\s*0\s*\)\s*\{/g, label: "if (0) block" },
963
+ { rx: /return;\s*\n\s*[^}]/g, label: "Unreachable code after return" },
964
+ { rx: /throw\s+new\s+Error[^;]*;\s*\n\s*[^}]/g, label: "Unreachable code after throw" },
965
+ ];
966
+
967
+ for (const fileAbs of files) {
968
+ try {
969
+ const code = fs.readFileSync(fileAbs, "utf8");
970
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
971
+
972
+ for (const { rx, label } of deadCodePatterns) {
973
+ if (rx.test(code)) {
974
+ findings.push({
975
+ id: `F_DEAD_CODE_${fileRel.replace(/[^a-z0-9]/gi, "_")}_${label.replace(/[^a-z0-9]/gi, "_")}`,
976
+ severity: "WARN",
977
+ category: "DeadCode",
978
+ title: `${label} in: ${fileRel}`,
979
+ why: "Dead code adds confusion and maintenance burden. It often indicates incomplete refactoring.",
980
+ confidence: "med",
981
+ evidence: [{ file: fileRel, reason: label }],
982
+ fixHints: [
983
+ "Remove the dead code entirely.",
984
+ "If needed for reference, check git history instead of commenting."
985
+ ]
986
+ });
987
+ break; // One finding per file
988
+ }
989
+ }
990
+ } catch (e) {
991
+ // Skip unreadable files
992
+ }
993
+ }
994
+
995
+ return findings;
996
+ }
997
+
998
+ // ============================================================================
999
+ // DEPRECATED API USAGE DETECTOR
1000
+ // ============================================================================
1001
+
1002
+ function findDeprecatedApis(repoRoot) {
1003
+ const findings = [];
1004
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx}"], {
1005
+ cwd: repoRoot,
1006
+ absolute: true,
1007
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**"]
1008
+ });
1009
+
1010
+ const deprecatedPatterns = [
1011
+ { rx: /\bcomponentWillMount\b/g, label: "componentWillMount (deprecated React lifecycle)" },
1012
+ { rx: /\bcomponentWillReceiveProps\b/g, label: "componentWillReceiveProps (deprecated)" },
1013
+ { rx: /\bcomponentWillUpdate\b/g, label: "componentWillUpdate (deprecated)" },
1014
+ { rx: /\bgetInitialProps\b/g, label: "getInitialProps (legacy Next.js)" },
1015
+ { rx: /\bsubstr\s*\(/g, label: "String.substr() (deprecated, use slice)" },
1016
+ { rx: /\bdocument\.write\b/g, label: "document.write (deprecated)" },
1017
+ { rx: /new\s+Buffer\s*\(/g, label: "new Buffer() (deprecated, use Buffer.from)" },
1018
+ { rx: /\brequire\(['"]fs['"]\)\.exists\b/g, label: "fs.exists (deprecated)" },
1019
+ { rx: /\.__proto__\b/g, label: "__proto__ (deprecated)" },
1020
+ ];
1021
+
1022
+ for (const fileAbs of files) {
1023
+ try {
1024
+ const code = fs.readFileSync(fileAbs, "utf8");
1025
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
1026
+
1027
+ for (const { rx, label } of deprecatedPatterns) {
1028
+ const matches = code.match(rx);
1029
+ if (matches && matches.length > 0) {
1030
+ findings.push({
1031
+ id: `F_DEPRECATED_${fileRel.replace(/[^a-z0-9]/gi, "_")}_${label.replace(/[^a-z0-9]/gi, "_")}`,
1032
+ severity: "WARN",
1033
+ category: "DeprecatedApi",
1034
+ title: `${label}: ${fileRel}`,
1035
+ why: "Deprecated APIs may stop working in future versions and often have security issues.",
1036
+ confidence: "high",
1037
+ evidence: [{ file: fileRel, reason: `${matches.length} occurrence(s)` }],
1038
+ fixHints: [
1039
+ "Update to the modern API equivalent.",
1040
+ "Check migration guides for the specific deprecation."
1041
+ ]
1042
+ });
1043
+ break; // One finding per file per deprecated API
1044
+ }
1045
+ }
1046
+ } catch (e) {
1047
+ // Skip unreadable files
1048
+ }
1049
+ }
1050
+
1051
+ return findings;
1052
+ }
1053
+
1054
+ // ============================================================================
1055
+ // EMPTY CATCH BLOCKS DETECTOR
1056
+ // ============================================================================
1057
+
1058
+ function findEmptyCatch(repoRoot) {
1059
+ const findings = [];
1060
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx}"], {
1061
+ cwd: repoRoot,
1062
+ absolute: true,
1063
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**"]
1064
+ });
1065
+
1066
+ for (const fileAbs of files) {
1067
+ try {
1068
+ const code = fs.readFileSync(fileAbs, "utf8");
1069
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
1070
+
1071
+ // Match catch blocks that are empty or only have comments
1072
+ const emptyCatchRx = /catch\s*\([^)]*\)\s*\{\s*(\/\/[^\n]*)?\s*\}/g;
1073
+ const matches = code.match(emptyCatchRx);
1074
+
1075
+ if (matches && matches.length > 0) {
1076
+ findings.push({
1077
+ id: `F_EMPTY_CATCH_${fileRel.replace(/[^a-z0-9]/gi, "_")}`,
1078
+ severity: "WARN",
1079
+ category: "EmptyCatch",
1080
+ title: `Empty catch block(s) in: ${fileRel} (${matches.length} found)`,
1081
+ why: "Empty catch blocks silently swallow errors, making debugging impossible.",
1082
+ confidence: "high",
1083
+ evidence: [{ file: fileRel, reason: `${matches.length} empty catch block(s)` }],
1084
+ fixHints: [
1085
+ "Log the error or handle it appropriately.",
1086
+ "If intentionally ignoring, add a comment explaining why."
1087
+ ]
1088
+ });
1089
+ }
1090
+ } catch (e) {
1091
+ // Skip unreadable files
1092
+ }
1093
+ }
1094
+
1095
+ return findings;
1096
+ }
1097
+
1098
+ // ============================================================================
1099
+ // UNSAFE REGEX DETECTOR
1100
+ // ============================================================================
1101
+
1102
+ function findUnsafeRegex(repoRoot) {
1103
+ const findings = [];
1104
+ const files = fg.sync(["**/*.{ts,tsx,js,jsx}"], {
1105
+ cwd: repoRoot,
1106
+ absolute: true,
1107
+ ignore: ["**/node_modules/**", "**/.next/**", "**/dist/**", "**/build/**"]
1108
+ });
1109
+
1110
+ // Patterns that can cause ReDoS (catastrophic backtracking)
1111
+ const unsafePatterns = [
1112
+ { rx: /new\s+RegExp\s*\([^)]*\+[^)]*\)/g, label: "Dynamic regex with concatenation" },
1113
+ { rx: /\(\.\*\)\+|\(\.\+\)\+|\(\.\*\)\*|\(\.\+\)\*/g, label: "Nested quantifiers (ReDoS risk)" },
1114
+ { rx: /\([^)]+\|[^)]+\)\+/g, label: "Alternation with quantifier (ReDoS risk)" },
1115
+ ];
1116
+
1117
+ for (const fileAbs of files) {
1118
+ try {
1119
+ const code = fs.readFileSync(fileAbs, "utf8");
1120
+ const fileRel = path.relative(repoRoot, fileAbs).replace(/\\/g, "/");
1121
+
1122
+ for (const { rx, label } of unsafePatterns) {
1123
+ if (rx.test(code)) {
1124
+ findings.push({
1125
+ id: `F_UNSAFE_REGEX_${fileRel.replace(/[^a-z0-9]/gi, "_")}_${label.replace(/[^a-z0-9]/gi, "_")}`,
1126
+ severity: "WARN",
1127
+ category: "UnsafeRegex",
1128
+ title: `${label}: ${fileRel}`,
1129
+ why: "Unsafe regex patterns can cause denial of service via catastrophic backtracking.",
1130
+ confidence: "med",
1131
+ evidence: [{ file: fileRel, reason: label }],
1132
+ fixHints: [
1133
+ "Use atomic groups or possessive quantifiers if supported.",
1134
+ "Validate input length before applying regex.",
1135
+ "Consider using a regex linting tool."
1136
+ ]
1137
+ });
1138
+ break;
1139
+ }
1140
+ }
1141
+ } catch (e) {
1142
+ // Skip unreadable files
1143
+ }
1144
+ }
1145
+
1146
+ return findings;
1147
+ }
1148
+
571
1149
  module.exports = {
572
1150
  findMissingRoutes,
573
1151
  findEnvGaps,
@@ -575,5 +1153,14 @@ module.exports = {
575
1153
  findGhostAuth,
576
1154
  findStripeWebhookViolations,
577
1155
  findPaidSurfaceNotEnforced,
578
- findOwnerModeBypass
1156
+ findOwnerModeBypass,
1157
+ // New analyzers
1158
+ findMockData,
1159
+ findTodoFixme,
1160
+ findConsoleLogs,
1161
+ findHardcodedSecrets,
1162
+ findDeadCode,
1163
+ findDeprecatedApis,
1164
+ findEmptyCatch,
1165
+ findUnsafeRegex,
579
1166
  };