@vibecodiq/cli 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +18 -4326
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -1,4327 +1,19 @@
1
1
  #!/usr/bin/env node
2
-
3
- // src/scanner.ts
4
- import path2 from "path";
5
-
6
- // src/lib/grep.ts
7
- async function grepFiles(ctx, pattern, globs) {
8
- const matches = [];
9
- let filesToSearch = ctx.files;
10
- if (globs && globs.length > 0) {
11
- filesToSearch = filesToSearch.filter(
12
- (f) => globs.some((g) => matchGlob(f, g))
13
- );
14
- }
15
- for (const file of filesToSearch) {
16
- let content;
17
- try {
18
- content = await ctx.readFile(file);
19
- } catch {
20
- continue;
21
- }
22
- const lines = content.split("\n");
23
- for (let i = 0; i < lines.length; i++) {
24
- if (pattern.test(lines[i])) {
25
- matches.push({ file, line: i + 1, content: lines[i].trim() });
26
- }
27
- }
28
- }
29
- return matches;
30
- }
31
- function matchGlob(filePath, glob) {
32
- if (glob.startsWith("!")) {
33
- return !matchGlob(filePath, glob.slice(1));
34
- }
35
- if (glob.startsWith("**/")) {
36
- const suffix = glob.slice(3);
37
- return filePath.includes(suffix);
38
- }
39
- if (glob.endsWith("/**")) {
40
- const prefix = glob.slice(0, -3);
41
- return filePath.startsWith(prefix);
42
- }
43
- if (glob.includes("*")) {
44
- const regex = new RegExp("^" + glob.replace(/\*/g, ".*") + "$");
45
- return regex.test(filePath);
46
- }
47
- return filePath === glob || filePath.startsWith(glob + "/");
48
- }
49
- function isClientFile(file) {
50
- return file.startsWith("src/") || file.startsWith("components/") || file.startsWith("app/") || file.startsWith("pages/") || file.includes("/components/") || file.includes("/hooks/") || file.includes("/contexts/");
51
- }
52
- function isServerFile(file) {
53
- return file.includes("/api/") || file.includes("route.ts") || file.includes("route.js") || file.includes("server/") || file.includes("actions.ts") || file.includes("actions.js");
54
- }
55
- function isMigrationFile(file) {
56
- return file.includes("migration") || file.includes("supabase/") || file.endsWith(".sql");
57
- }
58
- function isEnvFile(file) {
59
- const base = file.split("/").pop() || "";
60
- return base.startsWith(".env");
61
- }
62
-
63
- // src/checks/auth-01.ts
64
- var AUTH_01 = {
65
- id: "AUTH-01",
66
- name: "service_role key not in client code",
67
- module: "auth",
68
- priority: "P0",
69
- aliases: ["ADM-17"],
70
- description: "service_role bypasses all RLS \u2014 must never appear in client-side code or NEXT_PUBLIC_ vars",
71
- async run(ctx) {
72
- const pattern = /service_role|SERVICE_ROLE|SUPABASE_SERVICE_ROLE|sb_secret_/i;
73
- const matches = await ctx.grepFiles(pattern);
74
- const clientMatches = matches.filter((m) => {
75
- if (m.content.includes("NEXT_PUBLIC_")) return true;
76
- if (m.file.includes("use client")) return false;
77
- if (isClientFile(m.file) && !m.file.includes("/api/") && !m.file.includes("route.")) return true;
78
- return false;
79
- });
80
- const nextPublicMatches = matches.filter(
81
- (m) => m.content.includes("NEXT_PUBLIC_") && /service_role|SERVICE_ROLE/i.test(m.content)
82
- );
83
- const allBad = [...clientMatches, ...nextPublicMatches];
84
- const unique = [...new Map(allBad.map((m) => [`${m.file}:${m.line}`, m])).values()];
85
- if (unique.length > 0) {
86
- return {
87
- result: "FAIL",
88
- message: `service_role key found in client-accessible code (${unique.length} location${unique.length > 1 ? "s" : ""})`,
89
- evidence: unique.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
90
- };
91
- }
92
- const serverMatches = matches.filter(
93
- (m) => !m.content.startsWith("//") && !m.content.startsWith("#") && !m.content.startsWith("*")
94
- );
95
- if (serverMatches.length > 0) {
96
- return {
97
- result: "PASS",
98
- message: "service_role key found only in server-side code"
99
- };
100
- }
101
- return {
102
- result: "UNKNOWN",
103
- message: "No service_role references found \u2014 Supabase may not be used"
104
- };
105
- }
106
- };
107
-
108
- // src/checks/auth-02.ts
109
- var AUTH_02 = {
110
- id: "AUTH-02",
111
- name: "RLS enabled on all tables",
112
- module: "auth",
113
- priority: "P0",
114
- description: "Every table must have Row Level Security enabled. Without RLS, anon key = full DB access.",
115
- async run(ctx) {
116
- const sqlFiles = ctx.files.filter(isMigrationFile);
117
- if (sqlFiles.length === 0) {
118
- return {
119
- result: "UNKNOWN",
120
- message: "No SQL migration files found"
121
- };
122
- }
123
- const createTablePattern = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?(\w+)/gi;
124
- const rlsPattern = /ALTER\s+TABLE\s+(?:public\.)?(\w+)\s+ENABLE\s+ROW\s+LEVEL\s+SECURITY/gi;
125
- const systemTables = /* @__PURE__ */ new Set(["schema_migrations", "_prisma_migrations", "migrations"]);
126
- const allTables = /* @__PURE__ */ new Set();
127
- const rlsEnabledTables = /* @__PURE__ */ new Set();
128
- for (const file of sqlFiles) {
129
- let content;
130
- try {
131
- content = await ctx.readFile(file);
132
- } catch {
133
- continue;
134
- }
135
- let match;
136
- const createRegex = new RegExp(createTablePattern.source, "gi");
137
- while ((match = createRegex.exec(content)) !== null) {
138
- const tableName = match[1].toLowerCase();
139
- if (!systemTables.has(tableName)) {
140
- allTables.add(tableName);
141
- }
142
- }
143
- const rlsRegex = new RegExp(rlsPattern.source, "gi");
144
- while ((match = rlsRegex.exec(content)) !== null) {
145
- rlsEnabledTables.add(match[1].toLowerCase());
146
- }
147
- }
148
- if (allTables.size === 0) {
149
- return {
150
- result: "UNKNOWN",
151
- message: "No CREATE TABLE statements found in migrations"
152
- };
153
- }
154
- const unprotected = [...allTables].filter((t) => !rlsEnabledTables.has(t));
155
- if (unprotected.length > 0) {
156
- return {
157
- result: "FAIL",
158
- message: `${unprotected.length} table${unprotected.length > 1 ? "s" : ""} without RLS (of ${allTables.size} total)`,
159
- evidence: unprotected.map((t) => `Table "${t}" \u2014 missing ENABLE ROW LEVEL SECURITY`)
160
- };
161
- }
162
- return {
163
- result: "PASS",
164
- message: `All ${allTables.size} tables have RLS enabled`
165
- };
166
- }
167
- };
168
-
169
- // src/checks/auth-03.ts
170
- var AUTH_03 = {
171
- id: "AUTH-03",
172
- name: "RLS policies have WITH CHECK",
173
- module: "auth",
174
- priority: "P0",
175
- description: "INSERT/UPDATE policies need WITH CHECK clause. USING alone lets users insert data owned by others.",
176
- async run(ctx) {
177
- const sqlFiles = ctx.files.filter(isMigrationFile);
178
- if (sqlFiles.length === 0) {
179
- return { result: "UNKNOWN", message: "No SQL migration files found" };
180
- }
181
- const tautologyIssues = [];
182
- const missingWithCheck = [];
183
- for (const file of sqlFiles) {
184
- let content;
185
- try {
186
- content = await ctx.readFile(file);
187
- } catch {
188
- continue;
189
- }
190
- const policyRegex = /CREATE\s+POLICY\s+"?(\w+)"?\s+ON\s+(?:public\.)?(\w+)\s+(?:AS\s+\w+\s+)?FOR\s+(INSERT|UPDATE|ALL)\b([\s\S]*?)(?=CREATE\s+POLICY|$)/gi;
191
- let match;
192
- while ((match = policyRegex.exec(content)) !== null) {
193
- const policyName = match[1];
194
- const table = match[2];
195
- const action = match[3].toUpperCase();
196
- const body = match[4];
197
- if (action === "INSERT" || action === "UPDATE" || action === "ALL") {
198
- if (/WITH\s+CHECK\s*\(\s*true\s*\)/i.test(body)) {
199
- tautologyIssues.push(`${file}: policy "${policyName}" on ${table} \u2014 WITH CHECK (true) is permissive`);
200
- } else if (!/WITH\s+CHECK/i.test(body)) {
201
- missingWithCheck.push(`${file}: policy "${policyName}" on ${table} (${action}) \u2014 missing WITH CHECK`);
202
- }
203
- }
204
- }
205
- }
206
- const allIssues = [...tautologyIssues, ...missingWithCheck];
207
- if (allIssues.length > 0) {
208
- return {
209
- result: "FAIL",
210
- message: `${allIssues.length} RLS policy issue${allIssues.length > 1 ? "s" : ""} found`,
211
- evidence: allIssues.slice(0, 5)
212
- };
213
- }
214
- const anyPolicy = sqlFiles.some((f) => {
215
- try {
216
- const c = ctx.files.includes(f) ? f : "";
217
- return /CREATE\s+POLICY/i.test(c);
218
- } catch {
219
- return false;
220
- }
221
- });
222
- const policyMatches = await ctx.grepFiles(/CREATE\s+POLICY/i);
223
- if (policyMatches.length === 0) {
224
- return { result: "UNKNOWN", message: "No RLS policies found in migrations" };
225
- }
226
- return {
227
- result: "PASS",
228
- message: "All INSERT/UPDATE RLS policies have proper WITH CHECK clauses"
229
- };
230
- }
231
- };
232
-
233
- // src/checks/auth-04.ts
234
- var AUTH_04 = {
235
- id: "AUTH-04",
236
- name: "Server-side auth on protected routes",
237
- module: "auth",
238
- priority: "P0",
239
- description: "Every protected API route/server action must verify auth server-side. Middleware-only auth is bypassable (CVE-2025-29927).",
240
- async run(ctx) {
241
- const routeFiles = ctx.files.filter(
242
- (f) => (f.includes("route.") || f.includes("actions.")) && isServerFile(f)
243
- );
244
- if (routeFiles.length === 0) {
245
- return { result: "UNKNOWN", message: "No API route handlers found" };
246
- }
247
- const authPatterns = /getUser|getSession|auth\(\)|verifyToken|requireAuth|requireAdmin|checkPermission|requireRole|currentUser|validateSession/i;
248
- const publicRoutePatterns = /webhook|health|status|public|og|sitemap|robots|favicon|_next/i;
249
- const unprotected = [];
250
- for (const file of routeFiles) {
251
- if (publicRoutePatterns.test(file)) continue;
252
- let content;
253
- try {
254
- content = await ctx.readFile(file);
255
- } catch {
256
- continue;
257
- }
258
- if (!authPatterns.test(content)) {
259
- unprotected.push(file);
260
- }
261
- }
262
- if (unprotected.length > 0) {
263
- return {
264
- result: "FAIL",
265
- message: `${unprotected.length} API route${unprotected.length > 1 ? "s" : ""} without server-side auth check`,
266
- evidence: unprotected.slice(0, 5).map((f) => `${f} \u2014 no auth verification detected`)
267
- };
268
- }
269
- return {
270
- result: "PASS",
271
- message: `All ${routeFiles.length} API routes have server-side auth checks`
272
- };
273
- }
274
- };
275
-
276
- // src/checks/auth-05.ts
277
- var AUTH_05 = {
278
- id: "AUTH-05",
279
- name: "No secrets with NEXT_PUBLIC_ prefix",
280
- module: "auth",
281
- priority: "P0",
282
- description: "NEXT_PUBLIC_ vars are bundled in the browser. Secrets must never use this prefix.",
283
- async run(ctx) {
284
- const dangerousPatterns = [
285
- /NEXT_PUBLIC_.*SERVICE_ROLE/i,
286
- /NEXT_PUBLIC_.*SECRET/i,
287
- /NEXT_PUBLIC_.*DATABASE_URL/i,
288
- /NEXT_PUBLIC_.*DB_URL/i,
289
- /NEXT_PUBLIC_.*PRIVATE/i,
290
- /NEXT_PUBLIC_.*PASSWORD/i,
291
- /NEXT_PUBLIC_.*JWT_SECRET/i,
292
- /VITE_.*SERVICE_ROLE/i,
293
- /VITE_.*SECRET_KEY/i,
294
- /VITE_.*DATABASE_URL/i,
295
- /EXPO_PUBLIC_.*SERVICE_ROLE/i,
296
- /EXPO_PUBLIC_.*SECRET/i
297
- ];
298
- const envFiles = ctx.files.filter(isEnvFile);
299
- const allMatches = [];
300
- for (const file of envFiles) {
301
- let content;
302
- try {
303
- content = await ctx.readFile(file);
304
- } catch {
305
- continue;
306
- }
307
- const lines = content.split("\n");
308
- for (let i = 0; i < lines.length; i++) {
309
- const line = lines[i].trim();
310
- if (line.startsWith("#") || !line) continue;
311
- for (const pattern of dangerousPatterns) {
312
- if (pattern.test(line)) {
313
- allMatches.push({ file, line: i + 1, content: line.split("=")[0] });
314
- break;
315
- }
316
- }
317
- }
318
- }
319
- if (allMatches.length > 0) {
320
- return {
321
- result: "FAIL",
322
- message: `Secret keys exposed via public prefix (${allMatches.length} found)`,
323
- evidence: allMatches.map((m) => `${m.file}:${m.line} \u2192 ${m.content}`)
324
- };
325
- }
326
- if (envFiles.length === 0) {
327
- return {
328
- result: "UNKNOWN",
329
- message: "No .env files found to check"
330
- };
331
- }
332
- return {
333
- result: "PASS",
334
- message: "No secrets found with NEXT_PUBLIC_/VITE_/EXPO_PUBLIC_ prefix"
335
- };
336
- }
337
- };
338
-
339
- // src/checks/auth-06.ts
340
- var AUTH_06 = {
341
- id: "AUTH-06",
342
- name: "Protected routes redirect unauthenticated users",
343
- module: "auth",
344
- priority: "P1",
345
- description: "Middleware or route guard must redirect unauthenticated users to /login for protected pages.",
346
- async run(ctx) {
347
- const middlewareFile = ctx.files.find((f) => /^middleware\.(ts|js)$/.test(f));
348
- if (!middlewareFile) {
349
- const layoutGuards = await ctx.grepFiles(/redirect.*login|redirect.*auth|redirect.*sign/i);
350
- if (layoutGuards.length > 0) {
351
- return { result: "PASS", message: "Route guards with login redirect detected" };
352
- }
353
- return {
354
- result: "FAIL",
355
- message: "No middleware.ts and no route guards found \u2014 unauthenticated users can access protected pages",
356
- evidence: ["Missing: middleware.ts or per-page auth redirect"]
357
- };
358
- }
359
- let content;
360
- try {
361
- content = await ctx.readFile(middlewareFile);
362
- } catch {
363
- return { result: "UNKNOWN", message: "Could not read middleware file" };
364
- }
365
- if (/redirect.*login|redirect.*auth|NextResponse.*redirect/i.test(content)) {
366
- return { result: "PASS", message: "Middleware redirects unauthenticated users to login" };
367
- }
368
- return {
369
- result: "FAIL",
370
- message: "Middleware exists but no login redirect detected",
371
- evidence: [`${middlewareFile} \u2014 no redirect to /login or /auth`]
372
- };
373
- }
374
- };
375
-
376
- // src/checks/auth-07.ts
377
- var AUTH_07 = {
378
- id: "AUTH-07",
379
- name: "Session tokens in httpOnly cookies",
380
- module: "auth",
381
- priority: "P2",
382
- description: "Tokens in localStorage are accessible via XSS. httpOnly cookies prevent JavaScript access to session tokens.",
383
- async run(ctx) {
384
- const localStorageToken = /localStorage.*(?:token|session|jwt|access_token)|sessionStorage.*(?:token|session|jwt)/i;
385
- const httpOnly = /httpOnly|cookie.*session|supabase.*ssr|@supabase\/ssr/i;
386
- const badMatches = await ctx.grepFiles(localStorageToken);
387
- const goodMatches = await ctx.grepFiles(httpOnly);
388
- const realBad = badMatches.filter((m) => !m.content.trimStart().startsWith("//"));
389
- if (realBad.length > 0 && goodMatches.length === 0) {
390
- return {
391
- result: "FAIL",
392
- message: `Session tokens stored in localStorage (${realBad.length} location${realBad.length > 1 ? "s" : ""})`,
393
- evidence: realBad.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
394
- };
395
- }
396
- if (goodMatches.length > 0) {
397
- return { result: "PASS", message: "httpOnly cookies or @supabase/ssr detected for session management" };
398
- }
399
- return { result: "UNKNOWN", message: "No explicit token storage pattern detected" };
400
- }
401
- };
402
-
403
- // src/checks/auth-08.ts
404
- var AUTH_08 = {
405
- id: "AUTH-08",
406
- name: "Password hashing (if custom auth)",
407
- module: "auth",
408
- priority: "P0",
409
- description: "Passwords must be hashed with bcrypt/argon2/scrypt. N/A if using Supabase Auth (handles hashing internally).",
410
- async run(ctx) {
411
- const supabaseAuth = /supabase|@supabase|createBrowserClient|createServerClient/i;
412
- const supabaseMatches = await ctx.grepFiles(supabaseAuth);
413
- if (supabaseMatches.length > 0) {
414
- return { result: "N/A", message: "Supabase Auth handles password hashing internally (bcrypt/Argon2)" };
415
- }
416
- const hashPatterns = /bcrypt|argon2|scrypt|pbkdf2/i;
417
- const plaintext = /password.*=.*req\.body|password.*=.*body\.|plaintext|md5.*password|sha1.*password/i;
418
- const hashMatches = await ctx.grepFiles(hashPatterns);
419
- const plaintextMatches = await ctx.grepFiles(plaintext);
420
- if (plaintextMatches.length > 0 && hashMatches.length === 0) {
421
- return {
422
- result: "FAIL",
423
- message: "Password handling without secure hashing detected",
424
- evidence: plaintextMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
425
- };
426
- }
427
- if (hashMatches.length > 0) {
428
- return { result: "PASS", message: "Secure password hashing detected (bcrypt/argon2/scrypt)" };
429
- }
430
- return { result: "UNKNOWN", message: "No custom auth password handling detected" };
431
- }
432
- };
433
-
434
- // src/checks/auth-09.ts
435
- var AUTH_09 = {
436
- id: "AUTH-09",
437
- name: "Rate limiting on auth endpoints",
438
- module: "auth",
439
- priority: "P1",
440
- description: "Login/register endpoints without rate limiting are vulnerable to brute force attacks.",
441
- async run(ctx) {
442
- const authFiles = ctx.files.filter((f) => /auth|login|register|sign/i.test(f));
443
- const rateLimitPattern = /rateLimit|rate.limit|throttle|limiter|too.many.requests|429|upstash/i;
444
- if (authFiles.length === 0) {
445
- return { result: "UNKNOWN", message: "No auth endpoint files found" };
446
- }
447
- const matches = await ctx.grepFiles(rateLimitPattern, authFiles);
448
- if (matches.length > 0) {
449
- return { result: "PASS", message: "Rate limiting detected on auth endpoints" };
450
- }
451
- const globalMatches = await ctx.grepFiles(rateLimitPattern);
452
- if (globalMatches.length > 0) {
453
- return { result: "PASS", message: "Rate limiting detected in codebase (verify it covers auth endpoints)" };
454
- }
455
- const supabaseAuth = await ctx.grepFiles(/supabase.*auth|@supabase/i);
456
- if (supabaseAuth.length > 0) {
457
- return { result: "PASS", message: "Supabase Auth has built-in rate limiting for auth endpoints" };
458
- }
459
- return {
460
- result: "FAIL",
461
- message: "No rate limiting found on auth endpoints",
462
- evidence: ["Missing: rate limiting middleware on login/register routes"]
463
- };
464
- }
465
- };
466
-
467
- // src/checks/auth-10.ts
468
- var AUTH_10 = {
469
- id: "AUTH-10",
470
- name: "Profile sync trigger with safe search_path",
471
- module: "auth",
472
- priority: "P1",
473
- description: "handle_new_user() trigger must use SECURITY DEFINER with restricted search_path to prevent privilege escalation.",
474
- async run(ctx) {
475
- const sqlFiles = ctx.files.filter(isMigrationFile);
476
- if (sqlFiles.length === 0) {
477
- return { result: "UNKNOWN", message: "No SQL migration files found" };
478
- }
479
- const triggerPattern = /handle_new_user|on_auth_user_created|AFTER\s+INSERT\s+ON\s+auth\.users/i;
480
- const securityDefiner = /SECURITY\s+DEFINER/i;
481
- const searchPath = /search_path|SET\s+search_path/i;
482
- const triggerMatches = await ctx.grepFiles(triggerPattern);
483
- if (triggerMatches.length === 0) {
484
- return {
485
- result: "FAIL",
486
- message: "No profile sync trigger found (handle_new_user or equivalent)",
487
- evidence: ["Missing trigger: new auth.users rows won't sync to profiles table"]
488
- };
489
- }
490
- for (const file of sqlFiles) {
491
- let content;
492
- try {
493
- content = await ctx.readFile(file);
494
- } catch {
495
- continue;
496
- }
497
- if (triggerPattern.test(content)) {
498
- const hasDefiner = securityDefiner.test(content);
499
- const hasSearchPath = searchPath.test(content);
500
- if (hasDefiner && !hasSearchPath) {
501
- return {
502
- result: "FAIL",
503
- message: "Profile trigger uses SECURITY DEFINER without restricted search_path",
504
- evidence: [`${file} \u2014 SECURITY DEFINER without SET search_path = '' (privilege escalation risk)`]
505
- };
506
- }
507
- if (hasDefiner && hasSearchPath) {
508
- return {
509
- result: "PASS",
510
- message: "Profile sync trigger found with SECURITY DEFINER and restricted search_path"
511
- };
512
- }
513
- return {
514
- result: "PASS",
515
- message: "Profile sync trigger found"
516
- };
517
- }
518
- }
519
- return {
520
- result: "PASS",
521
- message: "Profile sync trigger detected"
522
- };
523
- }
524
- };
525
-
526
- // src/checks/auth-11.ts
527
- var AUTH_11 = {
528
- id: "AUTH-11",
529
- name: "Client/server auth separation",
530
- module: "auth",
531
- priority: "P0",
532
- description: "Separate Supabase clients for browser and server. One shared client leaks service_role to browser or uses anon key on server.",
533
- async run(ctx) {
534
- const browserClient = /createBrowserClient|createClient.*browser/i;
535
- const serverClient = /createServerClient|createClient.*server/i;
536
- const genericClient = /createClient\s*\(/;
537
- const browserMatches = await ctx.grepFiles(browserClient);
538
- const serverMatches = await ctx.grepFiles(serverClient);
539
- const genericMatches = await ctx.grepFiles(genericClient);
540
- if (browserMatches.length > 0 && serverMatches.length > 0) {
541
- return {
542
- result: "PASS",
543
- message: "Separate browser and server Supabase clients detected"
544
- };
545
- }
546
- if (browserMatches.length > 0 || serverMatches.length > 0) {
547
- return {
548
- result: "PASS",
549
- message: "Dedicated Supabase client pattern detected (@supabase/ssr)"
550
- };
551
- }
552
- if (genericMatches.length > 0) {
553
- const supabaseGeneric = genericMatches.filter((m) => /supabase|createClient/.test(m.content));
554
- if (supabaseGeneric.length > 0) {
555
- return {
556
- result: "FAIL",
557
- message: "Single generic createClient() used \u2014 no client/server separation",
558
- evidence: supabaseGeneric.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
559
- };
560
- }
561
- }
562
- return {
563
- result: "UNKNOWN",
564
- message: "No Supabase client initialization found"
565
- };
566
- }
567
- };
568
-
569
- // src/checks/auth-12.ts
570
- var AUTH_12 = {
571
- id: "AUTH-12",
572
- name: "Auth environment variables present",
573
- module: "auth",
574
- priority: "P2",
575
- description: "Missing auth env vars = hardcoded keys or broken auth in production.",
576
- async run(ctx) {
577
- const envFiles = ctx.files.filter(isEnvFile);
578
- if (envFiles.length === 0) {
579
- return { result: "UNKNOWN", message: "No .env files found" };
580
- }
581
- const requiredVars = ["SUPABASE_URL", "SUPABASE_ANON_KEY"];
582
- const found = [];
583
- for (const file of envFiles) {
584
- let content;
585
- try {
586
- content = await ctx.readFile(file);
587
- } catch {
588
- continue;
589
- }
590
- for (const v of requiredVars) {
591
- if (content.includes(v) && !found.includes(v)) found.push(v);
592
- }
593
- }
594
- const supabaseRef = await ctx.grepFiles(/supabase/i);
595
- if (supabaseRef.length === 0) {
596
- return { result: "UNKNOWN", message: "No Supabase references found" };
597
- }
598
- const missing = requiredVars.filter((v) => !found.includes(v));
599
- if (missing.length > 0) {
600
- return {
601
- result: "FAIL",
602
- message: `Missing auth env vars: ${missing.join(", ")}`,
603
- evidence: missing.map((v) => `${v} not found in any .env file`)
604
- };
605
- }
606
- return { result: "PASS", message: "Auth environment variables configured" };
607
- }
608
- };
609
-
610
- // src/checks/auth-13.ts
611
- var AUTH_13 = {
612
- id: "AUTH-13",
613
- name: "getUser() not getSession() for server-side auth",
614
- module: "auth",
615
- priority: "P0",
616
- description: "getSession() reads JWT without server verification. Use getUser() for auth decisions.",
617
- async run(ctx) {
618
- const getSessionPattern = /\.auth\.getSession\s*\(/;
619
- const getUserPattern = /\.auth\.getUser\s*\(/;
620
- const sessionMatches = await ctx.grepFiles(getSessionPattern);
621
- const userMatches = await ctx.grepFiles(getUserPattern);
622
- const serverSessionUsage = sessionMatches.filter((m) => isServerFile(m.file));
623
- const serverUserUsage = userMatches.filter((m) => isServerFile(m.file));
624
- if (serverSessionUsage.length > 0 && serverUserUsage.length === 0) {
625
- return {
626
- result: "FAIL",
627
- message: `Server-side code uses getSession() without getUser() (${serverSessionUsage.length} location${serverSessionUsage.length > 1 ? "s" : ""})`,
628
- evidence: serverSessionUsage.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
629
- };
630
- }
631
- if (serverSessionUsage.length > 0 && serverUserUsage.length > 0) {
632
- return {
633
- result: "FAIL",
634
- message: `Server code uses both getSession() and getUser() \u2014 getSession() in server context is insecure`,
635
- evidence: serverSessionUsage.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
636
- };
637
- }
638
- if (serverUserUsage.length > 0) {
639
- return {
640
- result: "PASS",
641
- message: "Server-side auth uses getUser() for verification"
642
- };
643
- }
644
- if (sessionMatches.length === 0 && userMatches.length === 0) {
645
- return {
646
- result: "UNKNOWN",
647
- message: "No getSession/getUser calls found \u2014 Supabase Auth may not be used"
648
- };
649
- }
650
- return {
651
- result: "PASS",
652
- message: "getSession() used only in client code (acceptable for UI state)"
653
- };
654
- }
655
- };
656
-
657
- // src/checks/auth-14.ts
658
- var AUTH_14 = {
659
- id: "AUTH-14",
660
- name: "No eval() or dangerouslySetInnerHTML with user data",
661
- module: "auth",
662
- priority: "P0",
663
- description: "eval() and dangerouslySetInnerHTML enable XSS attacks \u2014 session theft and account takeover.",
664
- async run(ctx) {
665
- const evalPattern = /\beval\s*\(/;
666
- const innerHtmlPattern = /dangerouslySetInnerHTML/;
667
- const evalMatches = await ctx.grepFiles(evalPattern);
668
- const innerHtmlMatches = await ctx.grepFiles(innerHtmlPattern);
669
- const dangerousEval = evalMatches.filter((m) => {
670
- if (m.content.trimStart().startsWith("//")) return false;
671
- if (m.content.trimStart().startsWith("*")) return false;
672
- if (m.file.includes("node_modules")) return false;
673
- return true;
674
- });
675
- const dangerousHtml = innerHtmlMatches.filter((m) => {
676
- if (m.content.trimStart().startsWith("//")) return false;
677
- if (m.file.includes("node_modules")) return false;
678
- return true;
679
- });
680
- const allDangerous = [...dangerousEval, ...dangerousHtml];
681
- if (allDangerous.length > 0) {
682
- return {
683
- result: "FAIL",
684
- message: `Unsafe code patterns found (${dangerousEval.length} eval, ${dangerousHtml.length} dangerouslySetInnerHTML)`,
685
- evidence: allDangerous.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
686
- };
687
- }
688
- return {
689
- result: "PASS",
690
- message: "No eval() or dangerouslySetInnerHTML found"
691
- };
692
- }
693
- };
694
-
695
- // src/checks/auth-15.ts
696
- var AUTH_15 = {
697
- id: "AUTH-15",
698
- name: "CORS configuration",
699
- module: "auth",
700
- priority: "P2",
701
- description: "Access-Control-Allow-Origin: * with credentials allows any website to read auth cookies and data.",
702
- async run(ctx) {
703
- const wildcardCors = /Access-Control-Allow-Origin.*\*|cors.*origin.*\*|origin:\s*['"]?\*/i;
704
- const credentialsCors = /credentials.*true|allowCredentials|Access-Control-Allow-Credentials/i;
705
- const wildcardMatches = await ctx.grepFiles(wildcardCors);
706
- const credMatches = await ctx.grepFiles(credentialsCors);
707
- if (wildcardMatches.length > 0 && credMatches.length > 0) {
708
- return {
709
- result: "FAIL",
710
- message: "CORS wildcard (*) used with credentials \u2014 any website can read auth data",
711
- evidence: wildcardMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
712
- };
713
- }
714
- if (wildcardMatches.length > 0) {
715
- return {
716
- result: "FAIL",
717
- message: "CORS wildcard (*) detected \u2014 consider restricting to specific origins",
718
- evidence: wildcardMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
719
- };
720
- }
721
- return { result: "PASS", message: "No dangerous CORS wildcard configuration detected" };
722
- }
723
- };
724
-
725
- // src/checks/auth-16.ts
726
- var AUTH_16 = {
727
- id: "AUTH-16",
728
- name: "Token/session expiration configured",
729
- module: "auth",
730
- priority: "P2",
731
- description: "Infinite sessions mean a stolen token works forever. Configure JWT expiration and session timeouts.",
732
- async run(ctx) {
733
- const expiryPatterns = /expiresIn|maxAge|session.*expir|jwt.*expir|SESSION_EXPIRY|JWT_EXPIRY|token.*expir/i;
734
- const matches = await ctx.grepFiles(expiryPatterns);
735
- if (matches.length > 0) {
736
- return { result: "PASS", message: "Token/session expiration configured" };
737
- }
738
- const supabaseRef = await ctx.grepFiles(/supabase/i);
739
- if (supabaseRef.length > 0) {
740
- return { result: "PASS", message: "Supabase Auth has default JWT expiry (3600s)" };
741
- }
742
- return {
743
- result: "FAIL",
744
- message: "No token/session expiration configuration found",
745
- evidence: ["Missing: expiresIn, maxAge, or session expiry configuration"]
746
- };
747
- }
748
- };
749
-
750
- // src/checks/auth-17.ts
751
- var AUTH_17 = {
752
- id: "AUTH-17",
753
- name: "Storage bucket RLS",
754
- module: "auth",
755
- priority: "P0",
756
- description: "Supabase storage buckets must have RLS on storage.objects. Without it, all files are publicly accessible.",
757
- async run(ctx) {
758
- const sqlFiles = ctx.files.filter(isMigrationFile);
759
- if (sqlFiles.length === 0) {
760
- return { result: "UNKNOWN", message: "No SQL migration files found" };
761
- }
762
- const storageBucket = /storage\.buckets|create.*bucket|INSERT.*storage\.buckets/i;
763
- const storageRls = /storage\.objects.*ENABLE.*ROW.*LEVEL|RLS.*storage\.objects|POLICY.*storage\.objects/i;
764
- const bucketMatches = await ctx.grepFiles(storageBucket, sqlFiles);
765
- const rlsMatches = await ctx.grepFiles(storageRls, sqlFiles);
766
- if (bucketMatches.length === 0) {
767
- const codeStorageRef = await ctx.grepFiles(/supabase.*storage|storage\.from\(/i);
768
- if (codeStorageRef.length === 0) {
769
- return { result: "N/A", message: "No Supabase storage usage detected" };
770
- }
771
- return {
772
- result: "UNKNOWN",
773
- message: "Storage used in code but no bucket creation in migrations"
774
- };
775
- }
776
- if (rlsMatches.length > 0) {
777
- return { result: "PASS", message: "Storage bucket RLS policies detected" };
778
- }
779
- return {
780
- result: "FAIL",
781
- message: "Storage buckets created without RLS on storage.objects \u2014 files may be publicly accessible",
782
- evidence: bucketMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
783
- };
784
- }
785
- };
786
-
787
- // src/checks/auth-18.ts
788
- var AUTH_18 = {
789
- id: "AUTH-18",
790
- name: "RBAC in app_metadata not user_metadata",
791
- module: "auth",
792
- priority: "P0",
793
- description: "Roles in user_metadata are user-editable. Use app_metadata for RBAC (server-only).",
794
- async run(ctx) {
795
- const userMetaRole = /user_meta_?data.*role|raw_user_meta_?data.*role|user\.user_metadata.*role/i;
796
- const appMetaRole = /app_meta_?data.*role|user\.app_metadata.*role/i;
797
- const updateUserMeta = /updateUser\s*\(\s*\{[\s\S]*?data\s*:\s*\{[\s\S]*?role/;
798
- const userMetaMatches = await ctx.grepFiles(userMetaRole);
799
- const appMetaMatches = await ctx.grepFiles(appMetaRole);
800
- const selfAssignMatches = await ctx.grepFiles(updateUserMeta);
801
- if (userMetaMatches.length > 0) {
802
- return {
803
- result: "FAIL",
804
- message: `Role stored in user_metadata (user-editable) \u2014 ${userMetaMatches.length} location${userMetaMatches.length > 1 ? "s" : ""}`,
805
- evidence: userMetaMatches.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
806
- };
807
- }
808
- if (selfAssignMatches.length > 0) {
809
- return {
810
- result: "FAIL",
811
- message: "Role set via updateUser() data field (user-editable user_metadata)",
812
- evidence: selfAssignMatches.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
813
- };
814
- }
815
- if (appMetaMatches.length > 0) {
816
- return {
817
- result: "PASS",
818
- message: "Role stored in app_metadata (server-only, not user-editable)"
819
- };
820
- }
821
- return {
822
- result: "UNKNOWN",
823
- message: "No role/RBAC references found in codebase"
824
- };
825
- }
826
- };
827
-
828
- // src/checks/auth-19.ts
829
- var AUTH_19 = {
830
- id: "AUTH-19",
831
- name: "Multi-tenancy data isolation",
832
- module: "auth",
833
- priority: "P1",
834
- description: "RLS policies must include tenant_id or organization_id to prevent cross-tenant data access.",
835
- async run(ctx) {
836
- const sqlFiles = ctx.files.filter(isMigrationFile);
837
- const tenantPattern = /tenant_id|organization_id|org_id|team_id|workspace_id/i;
838
- const schemaMatches = await ctx.grepFiles(tenantPattern, sqlFiles.length > 0 ? sqlFiles : void 0);
839
- const codeMatches = await ctx.grepFiles(tenantPattern);
840
- if (schemaMatches.length === 0 && codeMatches.length === 0) {
841
- return { result: "N/A", message: "No multi-tenancy pattern detected (single-tenant app)" };
842
- }
843
- const rlsPolicyTenant = await ctx.grepFiles(/POLICY[\s\S]*?tenant_id|POLICY[\s\S]*?org_id|POLICY[\s\S]*?organization_id/i, sqlFiles);
844
- if (rlsPolicyTenant.length > 0) {
845
- return { result: "PASS", message: "RLS policies include tenant isolation" };
846
- }
847
- return {
848
- result: "FAIL",
849
- message: "Multi-tenant schema detected but RLS policies don't include tenant_id filtering",
850
- evidence: ["Tenant columns exist but RLS policies may allow cross-tenant data access"]
851
- };
852
- }
853
- };
854
-
855
- // src/checks/auth-20.ts
856
- var AUTH_20 = {
857
- id: "AUTH-20",
858
- name: "OAuth domain restriction",
859
- module: "auth",
860
- priority: "P1",
861
- description: "Social login (Google, GitHub) should restrict allowed email domains to prevent unauthorized access.",
862
- async run(ctx) {
863
- const oauthPattern = /signInWithOAuth|signIn.*provider|google|github.*login|oauth/i;
864
- const domainRestriction = /allowedDomain|domain.*restrict|email.*domain|hd=|hosted_domain/i;
865
- const oauthMatches = await ctx.grepFiles(oauthPattern);
866
- if (oauthMatches.length === 0) {
867
- return { result: "N/A", message: "No OAuth/social login detected" };
868
- }
869
- const domainMatches = await ctx.grepFiles(domainRestriction);
870
- if (domainMatches.length > 0) {
871
- return { result: "PASS", message: "OAuth domain restriction detected" };
872
- }
873
- return {
874
- result: "FAIL",
875
- message: "OAuth login enabled without domain restriction \u2014 any Google/GitHub account can sign in",
876
- evidence: oauthMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
877
- };
878
- }
879
- };
880
-
881
- // src/checks/auth-21.ts
882
- var AUTH_21 = {
883
- id: "AUTH-21",
884
- name: "Force dynamic on auth routes",
885
- module: "auth",
886
- priority: "P1",
887
- description: "Auth routes must use force-dynamic to prevent ISR cache serving wrong user data.",
888
- async run(ctx) {
889
- const authRouteFiles = ctx.files.filter(
890
- (f) => /auth|login|register|signup|sign-in|sign-up/i.test(f) && (f.includes("page.") || f.includes("route.")) && (f.endsWith(".ts") || f.endsWith(".tsx") || f.endsWith(".js") || f.endsWith(".jsx"))
891
- );
892
- if (authRouteFiles.length === 0) {
893
- return { result: "UNKNOWN", message: "No auth route/page files found" };
894
- }
895
- const dynamicExport = /export\s+const\s+dynamic\s*=\s*['"]force-dynamic['"]/;
896
- const noStore = /unstable_noStore|noStore|revalidate\s*=\s*0/;
897
- const missingDynamic = [];
898
- for (const file of authRouteFiles) {
899
- let content;
900
- try {
901
- content = await ctx.readFile(file);
902
- } catch {
903
- continue;
904
- }
905
- if (!dynamicExport.test(content) && !noStore.test(content)) {
906
- if (file.includes("page.")) {
907
- missingDynamic.push(file);
908
- }
909
- }
910
- }
911
- if (missingDynamic.length > 0) {
912
- return {
913
- result: "FAIL",
914
- message: `${missingDynamic.length} auth page${missingDynamic.length > 1 ? "s" : ""} without force-dynamic \u2014 may serve cached auth state`,
915
- evidence: missingDynamic.slice(0, 5).map((f) => `${f} \u2014 missing export const dynamic = 'force-dynamic'`)
916
- };
917
- }
918
- return {
919
- result: "PASS",
920
- message: "Auth routes use force-dynamic or equivalent"
921
- };
922
- }
923
- };
924
-
925
- // src/checks/auth-22.ts
926
- var AUTH_22 = {
927
- id: "AUTH-22",
928
- name: "CSRF protection on Route Handlers",
929
- module: "auth",
930
- priority: "P2",
931
- description: "Server Actions have built-in CSRF protection (Next.js 14+). Route Handlers do NOT \u2014 they need explicit CSRF tokens or SameSite cookies.",
932
- async run(ctx) {
933
- const routeHandlers = ctx.files.filter((f) => /route\.(ts|js)$/i.test(f));
934
- if (routeHandlers.length === 0) {
935
- return { result: "UNKNOWN", message: "No Route Handler files found" };
936
- }
937
- const mutationHandlers = [];
938
- for (const file of routeHandlers) {
939
- let content;
940
- try {
941
- content = await ctx.readFile(file);
942
- } catch {
943
- continue;
944
- }
945
- if (/export\s+(?:async\s+)?function\s+(?:POST|PUT|PATCH|DELETE)/i.test(content)) {
946
- mutationHandlers.push(file);
947
- }
948
- }
949
- if (mutationHandlers.length === 0) {
950
- return { result: "PASS", message: "No mutation Route Handlers found (only GET)" };
951
- }
952
- const csrfPatterns = /csrf|csurf|csrfToken|SameSite|x-csrf|anti.?forgery/i;
953
- const csrfMatches = await ctx.grepFiles(csrfPatterns);
954
- if (csrfMatches.length > 0) {
955
- return { result: "PASS", message: "CSRF protection detected" };
956
- }
957
- const serverActions = await ctx.grepFiles(/["']use server["']/i);
958
- if (serverActions.length > 0 && mutationHandlers.length <= 2) {
959
- return { result: "PASS", message: "Mutations use Server Actions (built-in CSRF protection)" };
960
- }
961
- return {
962
- result: "FAIL",
963
- message: `${mutationHandlers.length} mutation Route Handler${mutationHandlers.length > 1 ? "s" : ""} without CSRF protection`,
964
- evidence: mutationHandlers.slice(0, 3).map((f) => `${f} \u2014 POST/PUT/PATCH/DELETE without CSRF token`)
965
- };
966
- }
967
- };
968
-
969
- // src/checks/auth-23.ts
970
- var AUTH_23 = {
971
- id: "AUTH-23",
972
- name: "Email verification required",
973
- module: "auth",
974
- priority: "P2",
975
- description: "Without email verification, anyone can sign up with any email \u2014 spam accounts and impersonation.",
976
- async run(ctx) {
977
- const emailVerifyPattern = /email_confirmed_at|emailConfirmed|verifyEmail|confirmEmail|email.*verif|verification.*email/i;
978
- const matches = await ctx.grepFiles(emailVerifyPattern);
979
- if (matches.length > 0) {
980
- return { result: "PASS", message: "Email verification check detected" };
981
- }
982
- const supabaseRef = await ctx.grepFiles(/supabase/i);
983
- if (supabaseRef.length > 0) {
984
- return { result: "PASS", message: "Supabase Auth has configurable email verification (check dashboard settings)" };
985
- }
986
- return {
987
- result: "FAIL",
988
- message: "No email verification enforcement found",
989
- evidence: ["Missing: email_confirmed_at check or equivalent verification flow"]
990
- };
991
- }
992
- };
993
-
994
- // src/checks/auth-24.ts
995
- var AUTH_24 = {
996
- id: "AUTH-24",
997
- name: "Account enumeration prevention",
998
- module: "auth",
999
- priority: "P2",
1000
- description: "Login/register error messages must not reveal whether an email exists. Consistent messages prevent user enumeration.",
1001
- async run(ctx) {
1002
- const enumPatterns = /email.*not.*found|user.*not.*found|no.*account.*with|email.*already.*registered|email.*already.*exists|account.*already.*exists/i;
1003
- const matches = await ctx.grepFiles(enumPatterns);
1004
- const codeMatches = matches.filter((m) => !m.content.trimStart().startsWith("//") && !m.content.trimStart().startsWith("*"));
1005
- if (codeMatches.length > 0) {
1006
- return {
1007
- result: "FAIL",
1008
- message: `Account enumeration possible \u2014 error messages reveal email existence (${codeMatches.length} location${codeMatches.length > 1 ? "s" : ""})`,
1009
- evidence: codeMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1010
- };
1011
- }
1012
- return { result: "PASS", message: "No account enumeration patterns detected in error messages" };
1013
- }
1014
- };
1015
-
1016
- // src/checks/auth-25.ts
1017
- var AUTH_25 = {
1018
- id: "AUTH-25",
1019
- name: "Refresh token reuse detection",
1020
- module: "auth",
1021
- priority: "P2",
1022
- description: "Token rotation prevents stolen refresh tokens from being reused. Supabase supports this via config.",
1023
- async run(ctx) {
1024
- const rotationPattern = /token.*rotation|refresh.*token.*reuse|reuse.*detection|GOTRUE_SECURITY_REFRESH_TOKEN_REUSE_INTERVAL/i;
1025
- const matches = await ctx.grepFiles(rotationPattern);
1026
- if (matches.length > 0) {
1027
- return { result: "PASS", message: "Refresh token rotation/reuse detection configured" };
1028
- }
1029
- const supabaseRef = await ctx.grepFiles(/supabase/i);
1030
- if (supabaseRef.length > 0) {
1031
- return { result: "PASS", message: "Supabase Auth has built-in refresh token rotation (check dashboard config)" };
1032
- }
1033
- return { result: "UNKNOWN", message: "No refresh token configuration detected" };
1034
- }
1035
- };
1036
-
1037
- // src/checks/auth-26.ts
1038
- var AUTH_26 = {
1039
- id: "AUTH-26",
1040
- name: "Sign-out revokes server session",
1041
- module: "auth",
1042
- priority: "P2",
1043
- description: "Sign-out must revoke the server session (scope: 'global'), not just clear client cookies.",
1044
- async run(ctx) {
1045
- const signOutPattern = /signOut|sign_out|logout|log_out/i;
1046
- const globalScope = /scope.*global|global.*scope/i;
1047
- const signOutMatches = await ctx.grepFiles(signOutPattern);
1048
- if (signOutMatches.length === 0) {
1049
- return { result: "UNKNOWN", message: "No sign-out implementation found" };
1050
- }
1051
- const globalMatches = await ctx.grepFiles(globalScope);
1052
- if (globalMatches.length > 0) {
1053
- return { result: "PASS", message: "Sign-out uses global scope (revokes all sessions)" };
1054
- }
1055
- return {
1056
- result: "FAIL",
1057
- message: "Sign-out found but no global scope \u2014 sessions may persist on other devices",
1058
- evidence: signOutMatches.slice(0, 2).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1059
- };
1060
- }
1061
- };
1062
-
1063
- // src/checks/auth-27.ts
1064
- var AUTH_27 = {
1065
- id: "AUTH-27",
1066
- name: "Email link poisoning mitigation",
1067
- module: "auth",
1068
- priority: "P2",
1069
- description: "SITE_URL and REDIRECT_ALLOW_LIST must be configured to prevent email link redirect attacks.",
1070
- async run(ctx) {
1071
- const siteUrlPattern = /SITE_URL|GOTRUE_SITE_URL/i;
1072
- const redirectAllowPattern = /REDIRECT_ALLOW_LIST|ADDITIONAL_REDIRECT_URLS|redirect.*allow/i;
1073
- const siteUrlMatches = await ctx.grepFiles(siteUrlPattern);
1074
- const redirectMatches = await ctx.grepFiles(redirectAllowPattern);
1075
- if (siteUrlMatches.length > 0 && redirectMatches.length > 0) {
1076
- return { result: "PASS", message: "SITE_URL and REDIRECT_ALLOW_LIST configured" };
1077
- }
1078
- if (siteUrlMatches.length > 0) {
1079
- return { result: "PASS", message: "SITE_URL configured (check REDIRECT_ALLOW_LIST in Supabase dashboard)" };
1080
- }
1081
- const supabaseRef = await ctx.grepFiles(/supabase/i);
1082
- if (supabaseRef.length === 0) {
1083
- return { result: "N/A", message: "No Supabase usage detected" };
1084
- }
1085
- return {
1086
- result: "FAIL",
1087
- message: "No SITE_URL or REDIRECT_ALLOW_LIST found \u2014 email magic links may redirect to attacker domains",
1088
- evidence: ["Missing: SITE_URL and REDIRECT_ALLOW_LIST in env configuration"]
1089
- };
1090
- }
1091
- };
1092
-
1093
- // src/checks/auth-28.ts
1094
- var AUTH_28 = {
1095
- id: "AUTH-28",
1096
- name: "Realtime presence authorization",
1097
- module: "auth",
1098
- priority: "P2",
1099
- description: "Supabase Realtime Presence channels must have authorization. Without it, any user can see who's online.",
1100
- async run(ctx) {
1101
- const realtimePattern = /realtime|presence|channel.*subscribe|supabase.*channel/i;
1102
- const matches = await ctx.grepFiles(realtimePattern);
1103
- if (matches.length === 0) {
1104
- return { result: "N/A", message: "No Supabase Realtime/Presence usage detected" };
1105
- }
1106
- const authInRealtime = /authorized|RLS.*realtime|realtime.*auth|channel.*auth/i;
1107
- const authMatches = await ctx.grepFiles(authInRealtime);
1108
- if (authMatches.length > 0) {
1109
- return { result: "PASS", message: "Realtime channel authorization detected" };
1110
- }
1111
- return {
1112
- result: "FAIL",
1113
- message: "Realtime/Presence channels found without authorization",
1114
- evidence: matches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1115
- };
1116
- }
1117
- };
1118
-
1119
- // src/checks/bil-01.ts
1120
- var BIL_01 = {
1121
- id: "BIL-01",
1122
- name: "Stripe secret key not in client code",
1123
- module: "billing",
1124
- priority: "P0",
1125
- description: "sk_live_/sk_test_ in client code = full Stripe API access for any visitor",
1126
- async run(ctx) {
1127
- const hardcodedPattern = /sk_live_[a-zA-Z0-9]{20,}|sk_test_[a-zA-Z0-9]{20,}/;
1128
- const envRefPattern = /STRIPE_SECRET|NEXT_PUBLIC_.*STRIPE.*SECRET|VITE_.*STRIPE.*SECRET/i;
1129
- const hardcodedMatches = await ctx.grepFiles(hardcodedPattern);
1130
- const hardcodedInCode = hardcodedMatches.filter(
1131
- (m) => !isEnvFile(m.file) && !m.content.trimStart().startsWith("#") && !m.content.trimStart().startsWith("//")
1132
- );
1133
- if (hardcodedInCode.length > 0) {
1134
- return {
1135
- result: "FAIL",
1136
- message: `Hardcoded Stripe secret key found in source code (${hardcodedInCode.length} location${hardcodedInCode.length > 1 ? "s" : ""})`,
1137
- evidence: hardcodedInCode.map((m) => `${m.file}:${m.line} \u2192 sk_***_[REDACTED]`)
1138
- };
1139
- }
1140
- const envRefMatches = await ctx.grepFiles(envRefPattern);
1141
- const publicPrefix = envRefMatches.filter(
1142
- (m) => /NEXT_PUBLIC_|VITE_|EXPO_PUBLIC_/i.test(m.content) && /STRIPE.*SECRET/i.test(m.content)
1143
- );
1144
- if (publicPrefix.length > 0) {
1145
- return {
1146
- result: "FAIL",
1147
- message: "Stripe secret key exposed via NEXT_PUBLIC_/VITE_ prefix",
1148
- evidence: publicPrefix.map((m) => `${m.file}:${m.line} \u2192 ${m.content.split("=")[0]}`)
1149
- };
1150
- }
1151
- const clientUsage = envRefMatches.filter(
1152
- (m) => isClientFile(m.file) && /STRIPE_SECRET/i.test(m.content) && !m.file.includes("/api/") && !m.file.includes("route.")
1153
- );
1154
- if (clientUsage.length > 0) {
1155
- return {
1156
- result: "FAIL",
1157
- message: "Stripe secret key referenced in client-side file",
1158
- evidence: clientUsage.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1159
- };
1160
- }
1161
- const anyStripeRef = envRefMatches.filter((m) => /STRIPE_SECRET/i.test(m.content));
1162
- if (anyStripeRef.length > 0) {
1163
- return {
1164
- result: "PASS",
1165
- message: "Stripe secret key found only in server-side/env files"
1166
- };
1167
- }
1168
- return {
1169
- result: "UNKNOWN",
1170
- message: "No Stripe secret key references found \u2014 Stripe may not be used"
1171
- };
1172
- }
1173
- };
1174
-
1175
- // src/checks/bil-02.ts
1176
- var BIL_02 = {
1177
- id: "BIL-02",
1178
- name: "Webhook signature verification",
1179
- module: "billing",
1180
- priority: "P0",
1181
- aliases: ["ADM-15"],
1182
- description: "Stripe webhook handler must verify signature via constructEvent(). Without it, anyone can send fake webhooks.",
1183
- async run(ctx) {
1184
- const webhookFilePattern = /webhook/i;
1185
- const webhookFiles = ctx.files.filter((f) => webhookFilePattern.test(f));
1186
- if (webhookFiles.length === 0) {
1187
- return {
1188
- result: "UNKNOWN",
1189
- message: "No webhook handler files found"
1190
- };
1191
- }
1192
- const constructEventPattern = /constructEvent|constructEventAsync|webhooks\.construct/;
1193
- const signaturePattern = /stripe-signature|Stripe-Signature|STRIPE_WEBHOOK_SECRET|webhook.*secret/i;
1194
- const constructMatches = await ctx.grepFiles(constructEventPattern, webhookFiles);
1195
- const signatureMatches = await ctx.grepFiles(signaturePattern, webhookFiles);
1196
- if (constructMatches.length > 0) {
1197
- return {
1198
- result: "PASS",
1199
- message: "Webhook handler uses constructEvent() for signature verification"
1200
- };
1201
- }
1202
- if (signatureMatches.length > 0) {
1203
- return {
1204
- result: "PASS",
1205
- message: "Webhook handler references signature verification"
1206
- };
1207
- }
1208
- const stripeInWebhook = await ctx.grepFiles(/stripe|Stripe/, webhookFiles);
1209
- if (stripeInWebhook.length > 0) {
1210
- return {
1211
- result: "FAIL",
1212
- message: "Stripe webhook handler found WITHOUT signature verification",
1213
- evidence: webhookFiles.map((f) => `${f} \u2014 no constructEvent() or signature check`)
1214
- };
1215
- }
1216
- return {
1217
- result: "UNKNOWN",
1218
- message: "Webhook files found but no Stripe references \u2014 may not be Stripe webhooks"
1219
- };
1220
- }
1221
- };
1222
-
1223
- // src/checks/bil-03.ts
1224
- var BIL_03 = {
1225
- id: "BIL-03",
1226
- name: "Raw body preservation in webhook",
1227
- module: "billing",
1228
- priority: "P0",
1229
- description: "Webhook signature verification requires raw request body. req.json() breaks it \u2014 use req.text() or bodyParser: false.",
1230
- async run(ctx) {
1231
- const webhookFiles = ctx.files.filter((f) => /webhook/i.test(f));
1232
- if (webhookFiles.length === 0) {
1233
- return { result: "UNKNOWN", message: "No webhook handler files found" };
1234
- }
1235
- const rawBodyPatterns = /request\.text\(\)|req\.text\(\)|bodyParser\s*:\s*false|getRawBody|raw\s*:\s*true|rawBody/;
1236
- const badBodyPatterns = /request\.json\(\)|req\.body(?!\s*Parser)|JSON\.parse/;
1237
- const rawMatches = await ctx.grepFiles(rawBodyPatterns, webhookFiles);
1238
- const badMatches = await ctx.grepFiles(badBodyPatterns, webhookFiles);
1239
- if (rawMatches.length > 0) {
1240
- return {
1241
- result: "PASS",
1242
- message: "Webhook handler preserves raw body for signature verification"
1243
- };
1244
- }
1245
- const stripeInWebhook = await ctx.grepFiles(/stripe|constructEvent/i, webhookFiles);
1246
- if (stripeInWebhook.length === 0) {
1247
- return { result: "UNKNOWN", message: "Webhook files found but no Stripe references" };
1248
- }
1249
- if (badMatches.length > 0) {
1250
- return {
1251
- result: "FAIL",
1252
- message: "Webhook handler uses req.json()/req.body instead of raw body \u2014 signature verification will fail",
1253
- evidence: badMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1254
- };
1255
- }
1256
- return {
1257
- result: "FAIL",
1258
- message: "Stripe webhook handler found but no raw body preservation detected",
1259
- evidence: webhookFiles.map((f) => `${f} \u2014 missing request.text() or bodyParser: false`)
1260
- };
1261
- }
1262
- };
1263
-
1264
- // src/checks/bil-04.ts
1265
- var BIL_04 = {
1266
- id: "BIL-04",
1267
- name: "Idempotent webhook processing",
1268
- module: "billing",
1269
- priority: "P1",
1270
- description: "Stripe retries webhooks by design. Without idempotency checks, duplicate credits and double subscriptions occur.",
1271
- async run(ctx) {
1272
- const webhookFiles = ctx.files.filter((f) => /webhook/i.test(f));
1273
- if (webhookFiles.length === 0) {
1274
- return { result: "UNKNOWN", message: "No webhook handler files found" };
1275
- }
1276
- const idempotencyPatterns = /event\.id|event_id|idempoten|UNIQUE.*event|duplicate.*check|processed.*events|already.*processed/i;
1277
- const matches = await ctx.grepFiles(idempotencyPatterns, webhookFiles);
1278
- const stripeRef = await ctx.grepFiles(/stripe|constructEvent/i, webhookFiles);
1279
- if (stripeRef.length === 0) {
1280
- return { result: "UNKNOWN", message: "No Stripe webhook handler found" };
1281
- }
1282
- if (matches.length > 0) {
1283
- return { result: "PASS", message: "Webhook idempotency check detected" };
1284
- }
1285
- return {
1286
- result: "FAIL",
1287
- message: "Stripe webhook handler has no idempotency check \u2014 duplicate processing possible",
1288
- evidence: ["No event.id tracking or duplicate check in webhook handler"]
1289
- };
1290
- }
1291
- };
1292
-
1293
- // src/checks/bil-05.ts
1294
- var BIL_05 = {
1295
- id: "BIL-05",
1296
- name: "Subscription state machine",
1297
- module: "billing",
1298
- priority: "P1",
1299
- description: "Stripe has 8 subscription states. Handling only active/canceled causes access issues for past_due, trialing, incomplete, unpaid.",
1300
- async run(ctx) {
1301
- const fullStates = /past_due|trialing|incomplete|unpaid/i;
1302
- const basicStates = /active|canceled/i;
1303
- const matches = await ctx.grepFiles(fullStates);
1304
- const basicMatches = await ctx.grepFiles(basicStates);
1305
- if (matches.length >= 2) {
1306
- return { result: "PASS", message: "Multiple subscription states handled beyond active/canceled" };
1307
- }
1308
- if (basicMatches.length > 0 && matches.length === 0) {
1309
- return {
1310
- result: "FAIL",
1311
- message: "Only active/canceled states handled \u2014 missing past_due, trialing, incomplete, unpaid",
1312
- evidence: ["Subscription state machine is incomplete \u2014 users may lose access incorrectly"]
1313
- };
1314
- }
1315
- const stripeRef = await ctx.grepFiles(/subscription|stripe/i);
1316
- if (stripeRef.length === 0) {
1317
- return { result: "UNKNOWN", message: "No subscription handling found" };
1318
- }
1319
- return {
1320
- result: "FAIL",
1321
- message: "Subscription code found but no explicit state handling",
1322
- evidence: ["Missing: past_due, trialing, incomplete, unpaid state handling"]
1323
- };
1324
- }
1325
- };
1326
-
1327
- // src/checks/bil-06.ts
1328
- var BIL_06 = {
1329
- id: "BIL-06",
1330
- name: "Entitlement/plan limit checking",
1331
- module: "billing",
1332
- priority: "P1",
1333
- description: "Stripe doesn't track usage limits. Without app-side entitlement checks, free users access paid features.",
1334
- async run(ctx) {
1335
- const patterns = /checkPlan|checkEntitle|planLimit|featureGate|subscription.*check|plan.*limit|canAccess|hasFeature|isSubscribed|entitlement/i;
1336
- const matches = await ctx.grepFiles(patterns);
1337
- if (matches.length > 0) {
1338
- return { result: "PASS", message: "Entitlement/plan limit checking detected" };
1339
- }
1340
- const stripeRef = await ctx.grepFiles(/subscription|stripe.*plan|pricing/i);
1341
- if (stripeRef.length === 0) {
1342
- return { result: "UNKNOWN", message: "No subscription/pricing code found" };
1343
- }
1344
- return {
1345
- result: "FAIL",
1346
- message: "Subscription code exists but no entitlement/plan limit checks found",
1347
- evidence: ["Missing: checkPlanLimit, checkEntitlement, featureGate, or equivalent"]
1348
- };
1349
- }
1350
- };
1351
-
1352
- // src/checks/bil-07.ts
1353
- var BIL_07 = {
1354
- id: "BIL-07",
1355
- name: "Customer \u2194 User sync",
1356
- module: "billing",
1357
- priority: "P1",
1358
- description: "Every user must map to exactly one Stripe customer. Missing sync = orphaned customers, broken subscription lookups.",
1359
- async run(ctx) {
1360
- const schemaPattern = /stripe_customer_id|stripeCustomerId|customer_id.*stripe/i;
1361
- const createCustomerPattern = /customers\.create|createCustomer|stripe.*customer/i;
1362
- const sqlFiles = ctx.files.filter(isMigrationFile);
1363
- const schemaMatches = await ctx.grepFiles(schemaPattern, sqlFiles.length > 0 ? sqlFiles : void 0);
1364
- const codeMatches = await ctx.grepFiles(createCustomerPattern);
1365
- if (schemaMatches.length > 0) {
1366
- return {
1367
- result: "PASS",
1368
- message: "stripe_customer_id found in database schema \u2014 user-customer sync exists"
1369
- };
1370
- }
1371
- if (codeMatches.length > 0) {
1372
- return {
1373
- result: "PASS",
1374
- message: "Stripe customer creation found in code"
1375
- };
1376
- }
1377
- const stripeRef = await ctx.grepFiles(/stripe/i);
1378
- if (stripeRef.length > 0) {
1379
- return {
1380
- result: "FAIL",
1381
- message: "Stripe is used but no stripe_customer_id in schema and no customer creation logic",
1382
- evidence: ["Missing: stripe_customer_id column in users/profiles table"]
1383
- };
1384
- }
1385
- return {
1386
- result: "UNKNOWN",
1387
- message: "No Stripe references found"
1388
- };
1389
- }
1390
- };
1391
-
1392
- // src/checks/bil-08.ts
1393
- var BIL_08 = {
1394
- id: "BIL-08",
1395
- name: "Webhook returns 200 for unknown events",
1396
- module: "billing",
1397
- priority: "P1",
1398
- description: "Returning 400/500 for unhandled events triggers Stripe retry loops and eventual endpoint deactivation.",
1399
- async run(ctx) {
1400
- const webhookFiles = ctx.files.filter((f) => /webhook/i.test(f));
1401
- if (webhookFiles.length === 0) {
1402
- return { result: "UNKNOWN", message: "No webhook handler files found" };
1403
- }
1404
- const badDefault = /else\s*\{[\s\S]*?(?:return.*(?:4\d\d|5\d\d)|throw|NextResponse.*(?:4\d\d|5\d\d))/i;
1405
- const goodDefault = /default\s*:[\s\S]*?(?:200|ok|return\s+new\s+Response)/i;
1406
- const badMatches = await ctx.grepFiles(badDefault, webhookFiles);
1407
- const goodMatches = await ctx.grepFiles(goodDefault, webhookFiles);
1408
- if (badMatches.length > 0) {
1409
- return {
1410
- result: "FAIL",
1411
- message: "Webhook handler returns error for unknown events \u2014 will trigger Stripe retry loop",
1412
- evidence: badMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1413
- };
1414
- }
1415
- if (goodMatches.length > 0) {
1416
- return { result: "PASS", message: "Webhook handler returns 200 for unhandled events" };
1417
- }
1418
- return { result: "UNKNOWN", message: "Could not determine webhook default response behavior" };
1419
- }
1420
- };
1421
-
1422
- // src/checks/bil-09.ts
1423
- var BIL_09 = {
1424
- id: "BIL-09",
1425
- name: "No client-side billing state as source of truth",
1426
- module: "billing",
1427
- priority: "P1",
1428
- description: "Subscription state in localStorage/React state can be manipulated. Server must be source of truth.",
1429
- async run(ctx) {
1430
- const clientStatePatterns = /localStorage.*(?:subscription|plan|billing)|sessionStorage.*(?:subscription|plan)|useState.*(?:subscription|isPro|isPaid|plan)/i;
1431
- const matches = await ctx.grepFiles(clientStatePatterns);
1432
- const clientMatches = matches.filter((m) => isClientFile(m.file));
1433
- if (clientMatches.length > 0) {
1434
- return {
1435
- result: "FAIL",
1436
- message: `Client-side billing state found (${clientMatches.length} location${clientMatches.length > 1 ? "s" : ""})`,
1437
- evidence: clientMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1438
- };
1439
- }
1440
- return { result: "PASS", message: "No client-side billing state as source of truth detected" };
1441
- }
1442
- };
1443
-
1444
- // src/checks/bil-10.ts
1445
- var BIL_10 = {
1446
- id: "BIL-10",
1447
- name: "Reconciliation mechanism",
1448
- module: "billing",
1449
- priority: "P2",
1450
- description: "Even good webhook handling drifts 1-2x/month. A reconciliation job comparing Stripe vs DB prevents silent revenue loss.",
1451
- async run(ctx) {
1452
- const patterns = /reconcil|sync.*stripe|stripe.*sync|cron.*billing|billing.*cron|verify.*subscription|subscription.*verify/i;
1453
- const filePattern = ctx.files.filter((f) => /reconcil/i.test(f));
1454
- const grepMatches = await ctx.grepFiles(patterns);
1455
- if (filePattern.length > 0 || grepMatches.length > 0) {
1456
- return { result: "PASS", message: "Reconciliation mechanism detected" };
1457
- }
1458
- const stripeRef = await ctx.grepFiles(/stripe/i);
1459
- if (stripeRef.length === 0) {
1460
- return { result: "UNKNOWN", message: "No Stripe references found" };
1461
- }
1462
- return {
1463
- result: "FAIL",
1464
- message: "No billing reconciliation mechanism found",
1465
- evidence: ["Missing: reconciliation script/job to compare Stripe state vs DB state"]
1466
- };
1467
- }
1468
- };
1469
-
1470
- // src/checks/bil-11.ts
1471
- var BIL_11 = {
1472
- id: "BIL-11",
1473
- name: "Cancellation handling",
1474
- module: "billing",
1475
- priority: "P1",
1476
- description: "Explicit cancel flow: server-side cancellation + webhook processing + DB update + access revocation.",
1477
- async run(ctx) {
1478
- const cancelPatterns = /cancel.*subscription|subscription.*cancel|customer\.subscription\.deleted|cancelAt|cancel_at_period_end/i;
1479
- const matches = await ctx.grepFiles(cancelPatterns);
1480
- if (matches.length >= 2) {
1481
- return { result: "PASS", message: "Cancellation handling detected in multiple locations" };
1482
- }
1483
- if (matches.length === 1) {
1484
- return { result: "PASS", message: "Cancellation handling detected" };
1485
- }
1486
- const stripeRef = await ctx.grepFiles(/subscription|stripe/i);
1487
- if (stripeRef.length === 0) {
1488
- return { result: "UNKNOWN", message: "No subscription code found" };
1489
- }
1490
- return {
1491
- result: "FAIL",
1492
- message: "No cancellation handling found \u2014 users may retain access after canceling or be charged after canceling",
1493
- evidence: ["Missing: cancel subscription flow, subscription.deleted webhook handler"]
1494
- };
1495
- }
1496
- };
1497
-
1498
- // src/checks/bil-12.ts
1499
- var BIL_12 = {
1500
- id: "BIL-12",
1501
- name: "Stripe env vars configured",
1502
- module: "billing",
1503
- priority: "P2",
1504
- description: "Missing Stripe env vars = billing won't work in production or keys get hardcoded.",
1505
- async run(ctx) {
1506
- const envFiles = ctx.files.filter(isEnvFile);
1507
- if (envFiles.length === 0) {
1508
- return { result: "UNKNOWN", message: "No .env files found" };
1509
- }
1510
- const requiredVars = ["STRIPE_SECRET_KEY", "STRIPE_WEBHOOK_SECRET", "STRIPE_PUBLISHABLE_KEY"];
1511
- const found = [];
1512
- const missing = [];
1513
- for (const file of envFiles) {
1514
- let content;
1515
- try {
1516
- content = await ctx.readFile(file);
1517
- } catch {
1518
- continue;
1519
- }
1520
- for (const v of requiredVars) {
1521
- if (content.includes(v) && !found.includes(v)) found.push(v);
1522
- }
1523
- }
1524
- for (const v of requiredVars) {
1525
- if (!found.includes(v)) missing.push(v);
1526
- }
1527
- const stripeRef = await ctx.grepFiles(/stripe/i);
1528
- if (stripeRef.length === 0) {
1529
- return { result: "UNKNOWN", message: "No Stripe references found" };
1530
- }
1531
- if (missing.length > 0) {
1532
- return {
1533
- result: "FAIL",
1534
- message: `Missing Stripe env vars: ${missing.join(", ")}`,
1535
- evidence: missing.map((v) => `${v} not found in any .env file`)
1536
- };
1537
- }
1538
- return { result: "PASS", message: "All required Stripe env vars configured" };
1539
- }
1540
- };
1541
-
1542
- // src/checks/bil-13.ts
1543
- var BIL_13 = {
1544
- id: "BIL-13",
1545
- name: "Error handling in payment flows",
1546
- module: "billing",
1547
- priority: "P2",
1548
- description: "Missing error handling around Stripe API calls = white screen on payment failure, no diagnostics.",
1549
- async run(ctx) {
1550
- const stripeApiCall = /stripe\.\w+\.\w+\(|checkout\.sessions\.create|subscriptions\.create|customers\.create/i;
1551
- const errorHandling = /try\s*\{|\.catch\s*\(|catch\s*\(/;
1552
- const apiFiles = ctx.files.filter((f) => /api|route|action|server/i.test(f));
1553
- const stripeMatches = await ctx.grepFiles(stripeApiCall, apiFiles.length > 0 ? apiFiles : void 0);
1554
- if (stripeMatches.length === 0) {
1555
- return { result: "UNKNOWN", message: "No Stripe API calls found" };
1556
- }
1557
- const errorMatches = await ctx.grepFiles(errorHandling, apiFiles.length > 0 ? apiFiles : void 0);
1558
- if (errorMatches.length > 0) {
1559
- return { result: "PASS", message: "Error handling found in payment flow files" };
1560
- }
1561
- return {
1562
- result: "FAIL",
1563
- message: "Stripe API calls found without error handling",
1564
- evidence: stripeMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1565
- };
1566
- }
1567
- };
1568
-
1569
- // src/checks/bil-14.ts
1570
- var BIL_14 = {
1571
- id: "BIL-14",
1572
- name: "Checkout flow is server-initiated",
1573
- module: "billing",
1574
- priority: "P0",
1575
- description: "Checkout session must be created on the server. Client-side creation requires secret key in browser.",
1576
- async run(ctx) {
1577
- const checkoutPattern = /checkout\.sessions\.create|createCheckoutSession/;
1578
- const matches = await ctx.grepFiles(checkoutPattern);
1579
- if (matches.length === 0) {
1580
- return { result: "UNKNOWN", message: "No Checkout session creation found" };
1581
- }
1582
- const clientSide = matches.filter((m) => isClientFile(m.file) && !isServerFile(m.file));
1583
- for (const m of clientSide) {
1584
- let content;
1585
- try {
1586
- content = await ctx.readFile(m.file);
1587
- } catch {
1588
- continue;
1589
- }
1590
- if (/["']use client["']/.test(content)) {
1591
- return {
1592
- result: "FAIL",
1593
- message: "Checkout session created in client component \u2014 secret key required in browser",
1594
- evidence: [`${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`]
1595
- };
1596
- }
1597
- }
1598
- const serverSide = matches.filter((m) => isServerFile(m.file));
1599
- if (serverSide.length > 0) {
1600
- return { result: "PASS", message: "Checkout session created server-side" };
1601
- }
1602
- return { result: "PASS", message: "Checkout session creation found (not in client component)" };
1603
- }
1604
- };
1605
-
1606
- // src/checks/bil-15.ts
1607
- var BIL_15 = {
1608
- id: "BIL-15",
1609
- name: "Stripe Price ID tampering prevention",
1610
- module: "billing",
1611
- priority: "P0",
1612
- description: "Price ID from client request must be validated server-side against an allowlist. Client can send any price_id.",
1613
- async run(ctx) {
1614
- const checkoutPattern = /checkout\.sessions\.create|createCheckoutSession/;
1615
- const checkoutMatches = await ctx.grepFiles(checkoutPattern);
1616
- if (checkoutMatches.length === 0) {
1617
- return { result: "UNKNOWN", message: "No Stripe Checkout session creation found" };
1618
- }
1619
- const priceFromRequest = /req\.body.*price|req\.json.*price|request\.json.*price|body\.price|priceId|price_id/i;
1620
- const priceValidation = /allowedPrices|ALLOWED_PRICES|validPrices|PRICE_IDS|priceWhitelist|priceLookup|PLANS\[|plans\[|PRICES\[|prices\./i;
1621
- const requestPriceMatches = await ctx.grepFiles(priceFromRequest);
1622
- const validationMatches = await ctx.grepFiles(priceValidation);
1623
- if (requestPriceMatches.length > 0 && validationMatches.length === 0) {
1624
- return {
1625
- result: "FAIL",
1626
- message: "Price ID accepted from client without server-side validation",
1627
- evidence: requestPriceMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1628
- };
1629
- }
1630
- if (validationMatches.length > 0) {
1631
- return {
1632
- result: "PASS",
1633
- message: "Price ID validation/allowlist detected"
1634
- };
1635
- }
1636
- return {
1637
- result: "PASS",
1638
- message: "Checkout session creation found with no client price input detected"
1639
- };
1640
- }
1641
- };
1642
-
1643
- // src/checks/bil-16.ts
1644
- var BIL_16 = {
1645
- id: "BIL-16",
1646
- name: "Never fulfill on success_url",
1647
- module: "billing",
1648
- priority: "P0",
1649
- description: "Fulfillment (DB writes, access grants) must happen via webhook, not on the success redirect page.",
1650
- async run(ctx) {
1651
- const successPagePatterns = [
1652
- /success/i,
1653
- /thank/i,
1654
- /payment.*confirm/i
1655
- ];
1656
- const successPages = ctx.files.filter((f) => {
1657
- const isPage = f.includes("page.") || f.includes("index.");
1658
- return isPage && successPagePatterns.some((p) => p.test(f));
1659
- });
1660
- if (successPages.length === 0) {
1661
- return {
1662
- result: "UNKNOWN",
1663
- message: "No success/thank-you pages found"
1664
- };
1665
- }
1666
- const fulfillmentPatterns = /\.insert\(|\.update\(|\.upsert\(|createSubscription|grantAccess|activateUser|fulfillOrder|UPDATE.*SET|INSERT.*INTO/i;
1667
- const dangerousMatches = [];
1668
- for (const file of successPages) {
1669
- let content;
1670
- try {
1671
- content = await ctx.readFile(file);
1672
- } catch {
1673
- continue;
1674
- }
1675
- const lines = content.split("\n");
1676
- for (let i = 0; i < lines.length; i++) {
1677
- if (fulfillmentPatterns.test(lines[i])) {
1678
- dangerousMatches.push({ file, line: i + 1, content: lines[i].trim() });
1679
- }
1680
- }
1681
- }
1682
- if (dangerousMatches.length > 0) {
1683
- return {
1684
- result: "FAIL",
1685
- message: `Fulfillment logic found in success page (${dangerousMatches.length} location${dangerousMatches.length > 1 ? "s" : ""}) \u2014 must use webhook instead`,
1686
- evidence: dangerousMatches.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1687
- };
1688
- }
1689
- return {
1690
- result: "PASS",
1691
- message: "Success pages contain no fulfillment logic"
1692
- };
1693
- }
1694
- };
1695
-
1696
- // src/checks/bil-17.ts
1697
- var BIL_17 = {
1698
- id: "BIL-17",
1699
- name: "PCI raw card data safety",
1700
- module: "billing",
1701
- priority: "P0",
1702
- description: "Raw card numbers, CVV, expiration must never touch your server. Use Stripe Elements or Checkout.",
1703
- async run(ctx) {
1704
- const rawCardPatterns = /card\s*\[\s*number\s*\]|cardNumber|card_number|cvv|cvc|expiry.*month|card.*expir/i;
1705
- const inputPatterns = /<input[^>]*(?:card|cvv|cvc|expir)/i;
1706
- const stripeElements = /CardElement|PaymentElement|useStripe|useElements|@stripe\/react-stripe-js|stripe\.elements/i;
1707
- const rawCardMatches = await ctx.grepFiles(rawCardPatterns);
1708
- const inputMatches = await ctx.grepFiles(inputPatterns);
1709
- const codeMatches = [...rawCardMatches, ...inputMatches].filter((m) => {
1710
- if (m.content.trimStart().startsWith("//")) return false;
1711
- if (m.content.trimStart().startsWith("*")) return false;
1712
- if (m.content.trimStart().startsWith("#")) return false;
1713
- if (m.file.includes("test") || m.file.includes("spec")) return false;
1714
- if (m.file.includes(".md")) return false;
1715
- return true;
1716
- });
1717
- const elementsMatches = await ctx.grepFiles(stripeElements);
1718
- if (codeMatches.length > 0 && elementsMatches.length === 0) {
1719
- return {
1720
- result: "FAIL",
1721
- message: `Raw card data handling detected without Stripe Elements (${codeMatches.length} location${codeMatches.length > 1 ? "s" : ""})`,
1722
- evidence: codeMatches.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1723
- };
1724
- }
1725
- if (elementsMatches.length > 0) {
1726
- return {
1727
- result: "PASS",
1728
- message: "Stripe Elements/Checkout detected \u2014 card data handled by Stripe"
1729
- };
1730
- }
1731
- return {
1732
- result: "UNKNOWN",
1733
- message: "No card data handling or Stripe Elements detected"
1734
- };
1735
- }
1736
- };
1737
-
1738
- // src/checks/bil-18.ts
1739
- var BIL_18 = {
1740
- id: "BIL-18",
1741
- name: "Refund/dispute handling",
1742
- module: "billing",
1743
- priority: "P1",
1744
- description: "Webhook must handle charge.refunded and charge.dispute.created to revoke access and update billing state.",
1745
- async run(ctx) {
1746
- const webhookFiles = ctx.files.filter((f) => /webhook/i.test(f));
1747
- if (webhookFiles.length === 0) {
1748
- return { result: "UNKNOWN", message: "No webhook handler files found" };
1749
- }
1750
- const refundPattern = /charge\.refunded|refund/i;
1751
- const disputePattern = /charge\.dispute|dispute/i;
1752
- const refundMatches = await ctx.grepFiles(refundPattern, webhookFiles);
1753
- const disputeMatches = await ctx.grepFiles(disputePattern, webhookFiles);
1754
- if (refundMatches.length > 0 && disputeMatches.length > 0) {
1755
- return { result: "PASS", message: "Both refund and dispute handling detected in webhook" };
1756
- }
1757
- if (refundMatches.length > 0 || disputeMatches.length > 0) {
1758
- return { result: "PASS", message: "Partial refund/dispute handling detected" };
1759
- }
1760
- const stripeRef = await ctx.grepFiles(/stripe/i, webhookFiles);
1761
- if (stripeRef.length === 0) {
1762
- return { result: "UNKNOWN", message: "No Stripe webhook handler found" };
1763
- }
1764
- return {
1765
- result: "FAIL",
1766
- message: "No refund or dispute handling in webhook \u2014 access may persist after refund",
1767
- evidence: ["Missing: charge.refunded and charge.dispute.created event handlers"]
1768
- };
1769
- }
1770
- };
1771
-
1772
- // src/checks/bil-19.ts
1773
- var BIL_19 = {
1774
- id: "BIL-19",
1775
- name: "Stripe API version pinning",
1776
- module: "billing",
1777
- priority: "P2",
1778
- description: "Pin Stripe API version to avoid breaking changes from automatic upgrades.",
1779
- async run(ctx) {
1780
- const versionPattern = /apiVersion|api_version/i;
1781
- const matches = await ctx.grepFiles(versionPattern);
1782
- if (matches.length > 0) {
1783
- return { result: "PASS", message: "Stripe API version pinning detected" };
1784
- }
1785
- const stripeInit = await ctx.grepFiles(/new\s+Stripe\s*\(|Stripe\s*\(/i);
1786
- if (stripeInit.length === 0) {
1787
- return { result: "UNKNOWN", message: "No Stripe initialization found" };
1788
- }
1789
- return {
1790
- result: "FAIL",
1791
- message: "Stripe initialized without explicit API version \u2014 may break on Stripe upgrades",
1792
- evidence: stripeInit.slice(0, 2).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1793
- };
1794
- }
1795
- };
1796
-
1797
- // src/checks/bil-20.ts
1798
- var BIL_20 = {
1799
- id: "BIL-20",
1800
- name: "Portal session auth",
1801
- module: "billing",
1802
- priority: "P1",
1803
- description: "Billing portal session must use customer ID from authenticated user, not from client request.",
1804
- async run(ctx) {
1805
- const portalPattern = /billingPortal|billing_portal|customer_portal/i;
1806
- const matches = await ctx.grepFiles(portalPattern);
1807
- if (matches.length === 0) {
1808
- return { result: "UNKNOWN", message: "No billing portal usage found" };
1809
- }
1810
- const authBeforePortal = /getUser|getSession|auth\(\)|requireAuth/i;
1811
- const portalFiles = [...new Set(matches.map((m) => m.file))];
1812
- const authMatches = await ctx.grepFiles(authBeforePortal, portalFiles);
1813
- if (authMatches.length > 0) {
1814
- return { result: "PASS", message: "Billing portal session created with authenticated customer ID" };
1815
- }
1816
- return {
1817
- result: "FAIL",
1818
- message: "Billing portal session created without verifying authenticated user",
1819
- evidence: matches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
1820
- };
1821
- }
1822
- };
1823
-
1824
- // src/checks/bil-21.ts
1825
- var BIL_21 = {
1826
- id: "BIL-21",
1827
- name: "Webhook event coverage",
1828
- module: "billing",
1829
- priority: "P1",
1830
- description: "Webhook handler must process at minimum 3 core subscription events. Missing events = state drift.",
1831
- async run(ctx) {
1832
- const webhookFiles = ctx.files.filter((f) => /webhook/i.test(f));
1833
- if (webhookFiles.length === 0) {
1834
- return { result: "UNKNOWN", message: "No webhook handler files found" };
1835
- }
1836
- const coreEvents = [
1837
- { pattern: /checkout\.session\.completed/i, name: "checkout.session.completed" },
1838
- { pattern: /customer\.subscription\.updated/i, name: "customer.subscription.updated" },
1839
- { pattern: /customer\.subscription\.deleted/i, name: "customer.subscription.deleted" },
1840
- { pattern: /invoice\.payment_succeeded/i, name: "invoice.payment_succeeded" },
1841
- { pattern: /invoice\.payment_failed/i, name: "invoice.payment_failed" }
1842
- ];
1843
- const foundEvents = [];
1844
- const missingEvents = [];
1845
- for (const event of coreEvents) {
1846
- const matches = await ctx.grepFiles(event.pattern, webhookFiles);
1847
- if (matches.length > 0) {
1848
- foundEvents.push(event.name);
1849
- } else {
1850
- missingEvents.push(event.name);
1851
- }
1852
- }
1853
- if (foundEvents.length === 0) {
1854
- const stripeRef = await ctx.grepFiles(/stripe/i, webhookFiles);
1855
- if (stripeRef.length === 0) {
1856
- return { result: "UNKNOWN", message: "Webhook files found but no Stripe event handling" };
1857
- }
1858
- return {
1859
- result: "FAIL",
1860
- message: "Stripe webhook handler found but no subscription events handled",
1861
- evidence: ["Missing: checkout.session.completed, subscription.updated, subscription.deleted"]
1862
- };
1863
- }
1864
- if (foundEvents.length < 3) {
1865
- return {
1866
- result: "FAIL",
1867
- message: `Only ${foundEvents.length}/5 core webhook events handled`,
1868
- evidence: [
1869
- `Handled: ${foundEvents.join(", ")}`,
1870
- `Missing: ${missingEvents.join(", ")}`
1871
- ]
1872
- };
1873
- }
1874
- return {
1875
- result: "PASS",
1876
- message: `${foundEvents.length}/5 core webhook events handled`
1877
- };
1878
- }
1879
- };
1880
-
1881
- // src/checks/bil-22.ts
1882
- var BIL_22 = {
1883
- id: "BIL-22",
1884
- name: "Trial period handling",
1885
- module: "billing",
1886
- priority: "P2",
1887
- description: "If trials are offered, handle trial_will_end webhook and trial-to-paid conversion properly.",
1888
- async run(ctx) {
1889
- const trialPattern = /trial_period_days|trial_end|trialing|trial_will_end|trialDays/i;
1890
- const matches = await ctx.grepFiles(trialPattern);
1891
- if (matches.length >= 2) {
1892
- return { result: "PASS", message: "Trial period handling detected" };
1893
- }
1894
- if (matches.length === 1) {
1895
- return { result: "PASS", message: "Trial reference found (verify trial_will_end webhook is handled)" };
1896
- }
1897
- const stripeRef = await ctx.grepFiles(/subscription|stripe/i);
1898
- if (stripeRef.length === 0) {
1899
- return { result: "UNKNOWN", message: "No subscription code found" };
1900
- }
1901
- return { result: "N/A", message: "No trial period usage detected" };
1902
- }
1903
- };
1904
-
1905
- // src/checks/bil-23.ts
1906
- var BIL_23 = {
1907
- id: "BIL-23",
1908
- name: "Card testing protection",
1909
- module: "billing",
1910
- priority: "P2",
1911
- description: "Rate limiting on payment endpoints prevents card testing attacks (automated validation of stolen cards).",
1912
- async run(ctx) {
1913
- const paymentFiles = ctx.files.filter((f) => /checkout|payment|billing|subscribe/i.test(f));
1914
- if (paymentFiles.length === 0) {
1915
- return { result: "UNKNOWN", message: "No payment endpoint files found" };
1916
- }
1917
- const rateLimitPattern = /rateLimit|rate.limit|throttle|limiter|too.many.requests|429|upstash/i;
1918
- const matches = await ctx.grepFiles(rateLimitPattern, paymentFiles);
1919
- if (matches.length > 0) {
1920
- return { result: "PASS", message: "Rate limiting detected on payment endpoints" };
1921
- }
1922
- const globalRateLimit = await ctx.grepFiles(rateLimitPattern);
1923
- if (globalRateLimit.length > 0) {
1924
- return { result: "PASS", message: "Rate limiting detected in codebase (verify it covers payment endpoints)" };
1925
- }
1926
- return {
1927
- result: "FAIL",
1928
- message: "No rate limiting found on payment endpoints \u2014 vulnerable to card testing",
1929
- evidence: ["Missing: rate limiting middleware on checkout/payment routes"]
1930
- };
1931
- }
1932
- };
1933
-
1934
- // src/checks/bil-24.ts
1935
- var BIL_24 = {
1936
- id: "BIL-24",
1937
- name: "Metered billing usage reporting",
1938
- module: "billing",
1939
- priority: "P2",
1940
- description: "If using metered pricing, usage must be reported to Stripe. Missing reporting = customers never billed for usage.",
1941
- async run(ctx) {
1942
- const meteredPattern = /metered|usage_type|usageRecord|usage.*report|createUsageRecord|meter/i;
1943
- const matches = await ctx.grepFiles(meteredPattern);
1944
- if (matches.length >= 2) {
1945
- return { result: "PASS", message: "Metered billing usage reporting detected" };
1946
- }
1947
- if (matches.length === 1) {
1948
- return { result: "PASS", message: "Metered billing reference found" };
1949
- }
1950
- return { result: "N/A", message: "No metered billing usage detected" };
1951
- }
1952
- };
1953
-
1954
- // src/checks/adm-01.ts
1955
- var ADM_01 = {
1956
- id: "ADM-01",
1957
- name: "Admin endpoints have server-side auth",
1958
- module: "admin",
1959
- priority: "P0",
1960
- description: "Every /admin or /api/admin endpoint must verify admin role server-side. Server Actions are public POST endpoints.",
1961
- async run(ctx) {
1962
- const adminRoutes = ctx.files.filter(
1963
- (f) => /api\/admin|app\/api\/admin/i.test(f) && (f.endsWith(".ts") || f.endsWith(".js"))
1964
- );
1965
- if (adminRoutes.length === 0) {
1966
- return { result: "UNKNOWN", message: "No admin API route handlers found" };
1967
- }
1968
- const adminAuthPatterns = /requireAdmin|role.*admin|isAdmin|checkPermission|requireRole|app_metadata.*admin|admin.*guard/i;
1969
- const unprotected = [];
1970
- for (const file of adminRoutes) {
1971
- let content;
1972
- try {
1973
- content = await ctx.readFile(file);
1974
- } catch {
1975
- continue;
1976
- }
1977
- if (!adminAuthPatterns.test(content)) {
1978
- unprotected.push(file);
1979
- }
1980
- }
1981
- if (unprotected.length > 0) {
1982
- return {
1983
- result: "FAIL",
1984
- message: `${unprotected.length} admin API route${unprotected.length > 1 ? "s" : ""} without admin role verification`,
1985
- evidence: unprotected.slice(0, 5).map((f) => `${f} \u2014 no admin role check detected`)
1986
- };
1987
- }
1988
- return {
1989
- result: "PASS",
1990
- message: `All ${adminRoutes.length} admin API routes have admin role verification`
1991
- };
1992
- }
1993
- };
1994
-
1995
- // src/checks/adm-02.ts
1996
- var ADM_02 = {
1997
- id: "ADM-02",
1998
- name: "Admin routes not accessible without auth",
1999
- module: "admin",
2000
- priority: "P0",
2001
- description: "Admin pages and API must return 401/403 without valid admin token. Middleware alone is insufficient (CVE-2025-29927).",
2002
- async run(ctx) {
2003
- const adminPages = ctx.files.filter(
2004
- (f) => /app\/admin\/.*page\.|pages\/admin\//i.test(f)
2005
- );
2006
- if (adminPages.length === 0) {
2007
- return { result: "UNKNOWN", message: "No admin pages found" };
2008
- }
2009
- const authGuardPatterns = /getUser|getSession|requireAuth|requireAdmin|redirect.*login|redirect.*auth|middleware|auth\(\)|checkPermission/i;
2010
- const unprotected = [];
2011
- for (const file of adminPages) {
2012
- let content;
2013
- try {
2014
- content = await ctx.readFile(file);
2015
- } catch {
2016
- continue;
2017
- }
2018
- if (!authGuardPatterns.test(content)) {
2019
- unprotected.push(file);
2020
- }
2021
- }
2022
- const hasMiddleware = ctx.files.some((f) => /middleware\.(ts|js)$/i.test(f));
2023
- let middlewareCoversAdmin = false;
2024
- if (hasMiddleware) {
2025
- const mwFile = ctx.files.find((f) => /middleware\.(ts|js)$/i.test(f));
2026
- if (mwFile) {
2027
- try {
2028
- const content = await ctx.readFile(mwFile);
2029
- middlewareCoversAdmin = /admin/i.test(content);
2030
- } catch {
2031
- }
2032
- }
2033
- }
2034
- if (unprotected.length > 0 && !middlewareCoversAdmin) {
2035
- return {
2036
- result: "FAIL",
2037
- message: `${unprotected.length} admin page${unprotected.length > 1 ? "s" : ""} without auth guard (no middleware coverage either)`,
2038
- evidence: unprotected.slice(0, 5).map((f) => `${f} \u2014 no auth guard detected`)
2039
- };
2040
- }
2041
- if (unprotected.length > 0 && middlewareCoversAdmin) {
2042
- return {
2043
- result: "PASS",
2044
- message: `Admin pages protected via middleware (${adminPages.length} pages). Note: add per-route auth for defense-in-depth.`
2045
- };
2046
- }
2047
- return {
2048
- result: "PASS",
2049
- message: `All ${adminPages.length} admin pages have auth guards`
2050
- };
2051
- }
2052
- };
2053
-
2054
- // src/checks/adm-03.ts
2055
- var ADM_03 = {
2056
- id: "ADM-03",
2057
- name: "No client-side-only role checks",
2058
- module: "admin",
2059
- priority: "P1",
2060
- description: "Role checks in JSX ({isAdmin && <Panel/>}) without server-side enforcement are bypassable via dev tools.",
2061
- async run(ctx) {
2062
- const clientRolePattern = /isAdmin|is_admin|role.*admin|admin.*role/i;
2063
- const serverRolePattern = /requireAdmin|requireRole|checkPermission|app_metadata.*admin/i;
2064
- const clientMatches = await ctx.grepFiles(clientRolePattern);
2065
- const clientOnly = clientMatches.filter((m) => isClientFile(m.file) && !isServerFile(m.file));
2066
- if (clientOnly.length === 0) {
2067
- return { result: "PASS", message: "No client-side-only role checks detected" };
2068
- }
2069
- const serverMatches = await ctx.grepFiles(serverRolePattern);
2070
- if (serverMatches.length > 0) {
2071
- return { result: "PASS", message: "Client-side role checks backed by server-side enforcement" };
2072
- }
2073
- return {
2074
- result: "FAIL",
2075
- message: `Client-side role checks found without server-side enforcement (${clientOnly.length} location${clientOnly.length > 1 ? "s" : ""})`,
2076
- evidence: clientOnly.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2077
- };
2078
- }
2079
- };
2080
-
2081
- // src/checks/adm-04.ts
2082
- var ADM_04 = {
2083
- id: "ADM-04",
2084
- name: "Audit log for admin actions",
2085
- module: "admin",
2086
- priority: "P1",
2087
- description: "Admin operations (delete user, change role, modify data) must be logged. Required for SOC 2 compliance.",
2088
- async run(ctx) {
2089
- const codePattern = /audit.*log|admin.*log|action.*log|createAuditEntry|logAdminAction|activity.*log/i;
2090
- const schemaPattern = /audit_log|admin_log|activity_log/i;
2091
- const codeMatches = await ctx.grepFiles(codePattern);
2092
- const sqlFiles = ctx.files.filter(isMigrationFile);
2093
- const schemaMatches = await ctx.grepFiles(schemaPattern, sqlFiles.length > 0 ? sqlFiles : void 0);
2094
- if (codeMatches.length > 0 || schemaMatches.length > 0) {
2095
- return { result: "PASS", message: "Audit logging detected for admin actions" };
2096
- }
2097
- const adminRef = await ctx.grepFiles(/admin/i);
2098
- if (adminRef.length === 0) {
2099
- return { result: "UNKNOWN", message: "No admin functionality detected" };
2100
- }
2101
- return {
2102
- result: "FAIL",
2103
- message: "No audit logging found for admin actions",
2104
- evidence: ["Missing: audit_log table or logAdminAction function"]
2105
- };
2106
- }
2107
- };
2108
-
2109
- // src/checks/adm-05.ts
2110
- var ADM_05 = {
2111
- id: "ADM-05",
2112
- name: "RBAC beyond binary admin",
2113
- module: "admin",
2114
- priority: "P2",
2115
- description: "Binary admin/user model means every admin can do everything. Granular roles limit blast radius.",
2116
- async run(ctx) {
2117
- const sqlFiles = ctx.files.filter(isMigrationFile);
2118
- const rbacPattern = /RBAC|permission|capability|role.*check|roles.*table|user_roles/i;
2119
- const binaryPattern = /isAdmin|is_admin|boolean.*admin/i;
2120
- const rbacMatches = await ctx.grepFiles(rbacPattern);
2121
- const schemaRoles = await ctx.grepFiles(/role.*enum|roles.*table|permissions.*table/i, sqlFiles.length > 0 ? sqlFiles : void 0);
2122
- if (rbacMatches.length > 0 || schemaRoles.length > 0) {
2123
- return { result: "PASS", message: "RBAC or granular permissions model detected" };
2124
- }
2125
- const binaryMatches = await ctx.grepFiles(binaryPattern);
2126
- if (binaryMatches.length > 0) {
2127
- return {
2128
- result: "FAIL",
2129
- message: "Binary admin model (isAdmin boolean) \u2014 no granular permissions",
2130
- evidence: binaryMatches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2131
- };
2132
- }
2133
- return { result: "UNKNOWN", message: "No admin role model detected" };
2134
- }
2135
- };
2136
-
2137
- // src/checks/adm-06.ts
2138
- var ADM_06 = {
2139
- id: "ADM-06",
2140
- name: "Safe impersonation (if exists)",
2141
- module: "admin",
2142
- priority: "P1",
2143
- description: "If admin can 'login as user', the action must be logged with admin ID preserved. Replacing admin session = invisible abuse.",
2144
- async run(ctx) {
2145
- const impersonatePattern = /impersonat|loginAs|actAs|switchUser|act_as|login_as/i;
2146
- const matches = await ctx.grepFiles(impersonatePattern);
2147
- if (matches.length === 0) {
2148
- return { result: "N/A", message: "No impersonation functionality detected" };
2149
- }
2150
- const auditPattern = /audit|log.*admin|admin.*log|logAction|track/i;
2151
- const auditMatches = await ctx.grepFiles(auditPattern);
2152
- if (auditMatches.length > 0) {
2153
- return { result: "PASS", message: "Impersonation exists with audit logging" };
2154
- }
2155
- return {
2156
- result: "FAIL",
2157
- message: "Impersonation found without audit logging \u2014 admin actions will appear as user actions",
2158
- evidence: matches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2159
- };
2160
- }
2161
- };
2162
-
2163
- // src/checks/adm-07.ts
2164
- var ADM_07 = {
2165
- id: "ADM-07",
2166
- name: "UUIDs not sequential IDs",
2167
- module: "admin",
2168
- priority: "P1",
2169
- description: "Sequential integer IDs enable enumeration attacks (IDOR). Use UUIDs for all user-facing primary keys.",
2170
- async run(ctx) {
2171
- const sqlFiles = ctx.files.filter(isMigrationFile);
2172
- if (sqlFiles.length === 0) {
2173
- return { result: "UNKNOWN", message: "No SQL migration files found" };
2174
- }
2175
- const serialPattern = /(?:SERIAL|BIGSERIAL|INTEGER\s+PRIMARY\s+KEY\s+(?:AUTO_INCREMENT|GENERATED))/i;
2176
- const uuidPattern = /UUID\s+(?:PRIMARY\s+KEY\s+)?DEFAULT\s+(?:gen_random_uuid|uuid_generate)/i;
2177
- const serialMatches = await ctx.grepFiles(serialPattern, sqlFiles);
2178
- const uuidMatches = await ctx.grepFiles(uuidPattern, sqlFiles);
2179
- const realSerialIssues = serialMatches.filter((m) => {
2180
- if (/migration|schema_migration|_prisma/i.test(m.content)) return false;
2181
- return true;
2182
- });
2183
- if (realSerialIssues.length > 0 && uuidMatches.length === 0) {
2184
- return {
2185
- result: "FAIL",
2186
- message: `Sequential IDs found without UUID usage (${realSerialIssues.length} table${realSerialIssues.length > 1 ? "s" : ""})`,
2187
- evidence: realSerialIssues.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2188
- };
2189
- }
2190
- if (uuidMatches.length > 0) {
2191
- return {
2192
- result: "PASS",
2193
- message: "UUID primary keys detected in schema"
2194
- };
2195
- }
2196
- return {
2197
- result: "UNKNOWN",
2198
- message: "No primary key definitions found in migrations"
2199
- };
2200
- }
2201
- };
2202
-
2203
- // src/checks/adm-08.ts
2204
- var ADM_08 = {
2205
- id: "ADM-08",
2206
- name: "No unprotected debug/admin routes",
2207
- module: "admin",
2208
- priority: "P0",
2209
- description: "Debug, internal, and admin routes must have auth guards. Hidden URLs are not security.",
2210
- async run(ctx) {
2211
- const suspiciousRoutes = [
2212
- /app\/admin\//,
2213
- /pages\/admin\//,
2214
- /app\/internal\//,
2215
- /app\/debug\//,
2216
- /api\/admin\//,
2217
- /api\/debug\//,
2218
- /api\/internal\//,
2219
- /api\/graphql/,
2220
- /api\/seed/,
2221
- /api\/reset/,
2222
- /api\/test/
2223
- ];
2224
- const foundRoutes = ctx.files.filter(
2225
- (f) => suspiciousRoutes.some((p) => p.test(f))
2226
- );
2227
- if (foundRoutes.length === 0) {
2228
- return {
2229
- result: "PASS",
2230
- message: "No admin/debug/internal routes detected"
2231
- };
2232
- }
2233
- const authPatterns = /requireAdmin|requireAuth|getUser|getSession|verifyToken|isAdmin|checkPermission|requireRole|auth\(\)|middleware/i;
2234
- const unprotected = [];
2235
- for (const file of foundRoutes) {
2236
- if (!file.endsWith(".ts") && !file.endsWith(".tsx") && !file.endsWith(".js") && !file.endsWith(".jsx")) continue;
2237
- let content;
2238
- try {
2239
- content = await ctx.readFile(file);
2240
- } catch {
2241
- continue;
2242
- }
2243
- if (!authPatterns.test(content)) {
2244
- unprotected.push(file);
2245
- }
2246
- }
2247
- if (unprotected.length > 0) {
2248
- return {
2249
- result: "FAIL",
2250
- message: `${unprotected.length} admin/debug route${unprotected.length > 1 ? "s" : ""} without auth check`,
2251
- evidence: unprotected.map((f) => `${f} \u2014 no auth guard detected`)
2252
- };
2253
- }
2254
- return {
2255
- result: "PASS",
2256
- message: `All ${foundRoutes.length} admin/debug routes have auth checks`
2257
- };
2258
- }
2259
- };
2260
-
2261
- // src/checks/adm-09.ts
2262
- var ADM_09 = {
2263
- id: "ADM-09",
2264
- name: "Destructive ops require extra authorization",
2265
- module: "admin",
2266
- priority: "P2",
2267
- description: "Bulk delete, data wipe, billing override should require confirmation step or elevated permission.",
2268
- async run(ctx) {
2269
- const destructivePattern = /deleteAll|bulkDelete|wipeData|truncate|DROP\s+TABLE|removeAll|destroyAll|purge/i;
2270
- const confirmPattern = /confirm|double.*auth|re.?authenticate|verification.*step|two.*step/i;
2271
- const destructiveMatches = await ctx.grepFiles(destructivePattern);
2272
- const adminDestructive = destructiveMatches.filter((m) => /admin/i.test(m.file));
2273
- if (adminDestructive.length === 0) {
2274
- return { result: "N/A", message: "No bulk destructive admin operations detected" };
2275
- }
2276
- const confirmMatches = await ctx.grepFiles(confirmPattern);
2277
- if (confirmMatches.length > 0) {
2278
- return { result: "PASS", message: "Confirmation/extra auth detected for destructive operations" };
2279
- }
2280
- return {
2281
- result: "FAIL",
2282
- message: "Destructive admin operations without extra authorization",
2283
- evidence: adminDestructive.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2284
- };
2285
- }
2286
- };
2287
-
2288
- // src/checks/adm-10.ts
2289
- var ADM_10 = {
2290
- id: "ADM-10",
2291
- name: "Admin code separated from user app",
2292
- module: "admin",
2293
- priority: "P2",
2294
- description: "Admin code in separate directories prevents user app bugs from affecting admin, and vice versa.",
2295
- async run(ctx) {
2296
- const adminDir = ctx.files.some((f) => /^app\/admin\/|^domains\/admin\/|^src\/admin\//i.test(f));
2297
- const adminScattered = ctx.files.filter((f) => /admin/i.test(f) && !/(app|domains|src)\/admin\//i.test(f));
2298
- if (adminDir) {
2299
- return { result: "PASS", message: "Admin code in dedicated directory (app/admin/ or domains/admin/)" };
2300
- }
2301
- if (adminScattered.length > 0) {
2302
- return {
2303
- result: "FAIL",
2304
- message: "Admin code scattered across user-facing directories",
2305
- evidence: adminScattered.slice(0, 5).map((f) => f)
2306
- };
2307
- }
2308
- return { result: "N/A", message: "No admin code detected" };
2309
- }
2310
- };
2311
-
2312
- // src/checks/adm-11.ts
2313
- var ADM_11 = {
2314
- id: "ADM-11",
2315
- name: "No hardcoded admin credentials",
2316
- module: "admin",
2317
- priority: "P0",
2318
- description: "Hardcoded admin passwords, tokens, or emails in source code = anyone with repo access is admin",
2319
- async run(ctx) {
2320
- const patterns = [
2321
- /admin.*password\s*[:=]\s*["']/i,
2322
- /admin.*token\s*[:=]\s*["']/i,
2323
- /admin.*secret\s*[:=]\s*["']/i,
2324
- /password\s*[:=]\s*["'](?:admin|password|123456|secret)/i,
2325
- /DEFAULT_ADMIN_PASSWORD/i,
2326
- /ADMIN_PASSWORD\s*[:=]/i,
2327
- /seed.*admin.*password/i
2328
- ];
2329
- const allMatches = [];
2330
- for (const pattern of patterns) {
2331
- const matches = await ctx.grepFiles(pattern);
2332
- const codeMatches = matches.filter((m) => {
2333
- if (isEnvFile(m.file)) return false;
2334
- if (m.content.trimStart().startsWith("//")) return false;
2335
- if (m.content.trimStart().startsWith("#")) return false;
2336
- if (m.content.trimStart().startsWith("*")) return false;
2337
- if (m.file.includes("test") || m.file.includes("spec")) return false;
2338
- return true;
2339
- });
2340
- allMatches.push(...codeMatches);
2341
- }
2342
- const unique = [...new Map(allMatches.map((m) => [`${m.file}:${m.line}`, m])).values()];
2343
- if (unique.length > 0) {
2344
- return {
2345
- result: "FAIL",
2346
- message: `Hardcoded admin credentials found (${unique.length} location${unique.length > 1 ? "s" : ""})`,
2347
- evidence: unique.map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 80).replace(/["'][^"']{8,}["']/g, '"[REDACTED]"')}`)
2348
- };
2349
- }
2350
- return {
2351
- result: "PASS",
2352
- message: "No hardcoded admin credentials found in source code"
2353
- };
2354
- }
2355
- };
2356
-
2357
- // src/checks/adm-12.ts
2358
- var ADM_12 = {
2359
- id: "ADM-12",
2360
- name: "Admin error handling",
2361
- module: "admin",
2362
- priority: "P2",
2363
- description: "Admin API errors must not leak stack traces, SQL queries, or internal schema details.",
2364
- async run(ctx) {
2365
- const adminFiles = ctx.files.filter((f) => /admin.*route|api.*admin/i.test(f));
2366
- if (adminFiles.length === 0) {
2367
- return { result: "UNKNOWN", message: "No admin API routes found" };
2368
- }
2369
- const errorHandling = /try\s*\{|\.catch\s*\(|catch\s*\(/;
2370
- const leakPatterns = /stack|trace|SQL|query.*error|\.message/i;
2371
- let hasErrorHandling = false;
2372
- const leaks = [];
2373
- for (const file of adminFiles) {
2374
- let content;
2375
- try {
2376
- content = await ctx.readFile(file);
2377
- } catch {
2378
- continue;
2379
- }
2380
- if (errorHandling.test(content)) hasErrorHandling = true;
2381
- const lines = content.split("\n");
2382
- for (let i = 0; i < lines.length; i++) {
2383
- if (/error\.stack|error\.message|err\.stack|JSON\.stringify.*error/i.test(lines[i])) {
2384
- leaks.push({ file, line: i + 1, content: lines[i].trim() });
2385
- }
2386
- }
2387
- }
2388
- if (leaks.length > 0) {
2389
- return {
2390
- result: "FAIL",
2391
- message: "Admin API may leak error details (stack traces, error messages)",
2392
- evidence: leaks.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2393
- };
2394
- }
2395
- if (!hasErrorHandling) {
2396
- return {
2397
- result: "FAIL",
2398
- message: "No error handling in admin API routes",
2399
- evidence: adminFiles.slice(0, 3).map((f) => `${f} \u2014 no try/catch`)
2400
- };
2401
- }
2402
- return { result: "PASS", message: "Admin error handling present without obvious information leaks" };
2403
- }
2404
- };
2405
-
2406
- // src/checks/adm-13.ts
2407
- var ADM_13 = {
2408
- id: "ADM-13",
2409
- name: "MFA requirement for admin roles",
2410
- module: "admin",
2411
- priority: "P0",
2412
- description: "Admin auth must require MFA/AAL2. Compromised admin password without MFA = total takeover.",
2413
- async run(ctx) {
2414
- const mfaPatterns = /mfa|aal2|getAuthenticatorAssuranceLevel|totp|authenticator|multi.?factor|two.?factor/i;
2415
- const adminFiles = ctx.files.filter((f) => /admin/i.test(f));
2416
- if (adminFiles.length === 0) {
2417
- return { result: "UNKNOWN", message: "No admin files found" };
2418
- }
2419
- const mfaInAdmin = await ctx.grepFiles(mfaPatterns, adminFiles);
2420
- const mfaGlobal = await ctx.grepFiles(mfaPatterns);
2421
- if (mfaInAdmin.length > 0) {
2422
- return {
2423
- result: "PASS",
2424
- message: "MFA/AAL2 enforcement detected in admin code"
2425
- };
2426
- }
2427
- if (mfaGlobal.length > 0) {
2428
- return {
2429
- result: "PASS",
2430
- message: "MFA implementation detected in codebase (verify it covers admin routes)"
2431
- };
2432
- }
2433
- return {
2434
- result: "FAIL",
2435
- message: "No MFA/AAL2 enforcement found for admin roles",
2436
- evidence: ["No references to mfa, aal2, totp, or authenticator in admin code"]
2437
- };
2438
- }
2439
- };
2440
-
2441
- // src/checks/adm-14.ts
2442
- var ADM_14 = {
2443
- id: "ADM-14",
2444
- name: "Rate limiting on admin endpoints",
2445
- module: "admin",
2446
- priority: "P1",
2447
- description: "Admin API endpoints without rate limiting are vulnerable to brute force and DoS attacks.",
2448
- async run(ctx) {
2449
- const adminFiles = ctx.files.filter((f) => /api.*admin|admin.*route/i.test(f));
2450
- if (adminFiles.length === 0) {
2451
- return { result: "UNKNOWN", message: "No admin API endpoints found" };
2452
- }
2453
- const rateLimitPattern = /rateLimit|rate.limit|throttle|limiter|upstash|redis.*limit|too.many.requests|429/i;
2454
- const matches = await ctx.grepFiles(rateLimitPattern, adminFiles);
2455
- if (matches.length > 0) {
2456
- return { result: "PASS", message: "Rate limiting detected on admin endpoints" };
2457
- }
2458
- const globalMatches = await ctx.grepFiles(rateLimitPattern);
2459
- if (globalMatches.length > 0) {
2460
- return { result: "PASS", message: "Rate limiting detected in codebase (verify it covers admin endpoints)" };
2461
- }
2462
- return {
2463
- result: "FAIL",
2464
- message: "No rate limiting found on admin endpoints",
2465
- evidence: ["Missing: rate limiting middleware on admin API routes"]
2466
- };
2467
- }
2468
- };
2469
-
2470
- // src/checks/adm-16.ts
2471
- var ADM_16 = {
2472
- id: "ADM-16",
2473
- name: "Separate admin session timeouts",
2474
- module: "admin",
2475
- priority: "P1",
2476
- description: "Admin sessions should have shorter timeouts than user sessions to limit session hijacking window.",
2477
- async run(ctx) {
2478
- const sessionDiffPattern = /admin.*timeout|admin.*expir|admin.*maxAge|admin.*session.*duration|session.*admin.*short/i;
2479
- const matches = await ctx.grepFiles(sessionDiffPattern);
2480
- if (matches.length > 0) {
2481
- return { result: "PASS", message: "Differentiated admin session timeout detected" };
2482
- }
2483
- const adminRef = await ctx.grepFiles(/admin/i);
2484
- if (adminRef.length === 0) {
2485
- return { result: "UNKNOWN", message: "No admin functionality detected" };
2486
- }
2487
- return {
2488
- result: "FAIL",
2489
- message: "No separate admin session timeout \u2014 admin sessions use same duration as user sessions",
2490
- evidence: ["Missing: shorter session timeout for admin roles"]
2491
- };
2492
- }
2493
- };
2494
-
2495
- // src/checks/adm-18.ts
2496
- var ADM_18 = {
2497
- id: "ADM-18",
2498
- name: "Admin action notification/alerting",
2499
- module: "admin",
2500
- priority: "P2",
2501
- description: "Critical admin actions should trigger notifications (Slack, email) so compromised admin accounts are detected quickly.",
2502
- async run(ctx) {
2503
- const notifyPattern = /slack.*webhook|sendSlack|sendEmail.*admin|notify.*admin|alert.*admin|admin.*notify|admin.*alert|webhook.*notify/i;
2504
- const adminFiles = ctx.files.filter((f) => /admin/i.test(f));
2505
- if (adminFiles.length === 0) {
2506
- return { result: "UNKNOWN", message: "No admin functionality detected" };
2507
- }
2508
- const matches = await ctx.grepFiles(notifyPattern, adminFiles);
2509
- const globalMatches = await ctx.grepFiles(notifyPattern);
2510
- if (matches.length > 0 || globalMatches.length > 0) {
2511
- return { result: "PASS", message: "Admin action notifications/alerting detected" };
2512
- }
2513
- return {
2514
- result: "FAIL",
2515
- message: "No notification/alerting for admin actions \u2014 compromised admin goes undetected",
2516
- evidence: ["Missing: Slack webhook, email alert, or notification for critical admin actions"]
2517
- };
2518
- }
2519
- };
2520
-
2521
- // src/checks/adm-19.ts
2522
- var ADM_19 = {
2523
- id: "ADM-19",
2524
- name: "CSRF protection for admin mutations",
2525
- module: "admin",
2526
- priority: "P1",
2527
- description: "Admin mutation endpoints need CSRF protection. If victim is admin, CSRF compromises entire app.",
2528
- async run(ctx) {
2529
- const adminRoutes = ctx.files.filter((f) => /admin.*route\.(ts|js)$|api.*admin/i.test(f));
2530
- if (adminRoutes.length === 0) {
2531
- return { result: "UNKNOWN", message: "No admin route handlers found" };
2532
- }
2533
- const csrfPattern = /csrf|csurf|csrfToken|SameSite|x-csrf|anti.?forgery/i;
2534
- const csrfMatches = await ctx.grepFiles(csrfPattern);
2535
- if (csrfMatches.length > 0) {
2536
- return { result: "PASS", message: "CSRF protection detected" };
2537
- }
2538
- const serverActions = await ctx.grepFiles(/["']use server["']/i);
2539
- if (serverActions.length > 0) {
2540
- return { result: "PASS", message: "Server Actions used (built-in CSRF protection in Next.js 14+)" };
2541
- }
2542
- return {
2543
- result: "FAIL",
2544
- message: "No CSRF protection on admin mutation routes",
2545
- evidence: adminRoutes.slice(0, 3).map((f) => `${f} \u2014 no CSRF token or SameSite cookie`)
2546
- };
2547
- }
2548
- };
2549
-
2550
- // src/checks/adm-20.ts
2551
- var ADM_20 = {
2552
- id: "ADM-20",
2553
- name: "Data export controls",
2554
- module: "admin",
2555
- priority: "P1",
2556
- description: "Admin bulk export/download must have authorization and logging. Compromised admin can exfiltrate all user data.",
2557
- async run(ctx) {
2558
- const exportPattern = /export.*csv|downloadCSV|bulk.*fetch|dump.*data|export.*users|export.*data/i;
2559
- const matches = await ctx.grepFiles(exportPattern);
2560
- if (matches.length === 0) {
2561
- return { result: "N/A", message: "No bulk data export functionality detected" };
2562
- }
2563
- const authPattern = /requireAdmin|requireAuth|getUser|checkPermission/i;
2564
- const logPattern = /audit|log.*export|log.*download/i;
2565
- const exportFiles = [...new Set(matches.map((m) => m.file))];
2566
- const authMatches = await ctx.grepFiles(authPattern, exportFiles);
2567
- const logMatches = await ctx.grepFiles(logPattern, exportFiles);
2568
- if (authMatches.length > 0 && logMatches.length > 0) {
2569
- return { result: "PASS", message: "Data export has auth and logging" };
2570
- }
2571
- if (authMatches.length > 0) {
2572
- return { result: "PASS", message: "Data export has auth (consider adding export logging)" };
2573
- }
2574
- return {
2575
- result: "FAIL",
2576
- message: "Data export without auth/logging \u2014 PII exfiltration risk",
2577
- evidence: matches.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2578
- };
2579
- }
2580
- };
2581
-
2582
- // src/checks/adm-21.ts
2583
- var ADM_21 = {
2584
- id: "ADM-21",
2585
- name: "Admin provisioning control",
2586
- module: "admin",
2587
- priority: "P1",
2588
- description: "Admin role must not be self-assignable. AI tools often generate 'isFirstUser \u2192 admin' or open role selection on signup.",
2589
- async run(ctx) {
2590
- const selfAssignPatterns = /role\s*=\s*['"]admin['"]|isFirstUser|first.*user.*admin|role.*select|self.*assign.*admin/i;
2591
- const signupFiles = ctx.files.filter((f) => /signup|register|onboard/i.test(f));
2592
- const allMatches = await ctx.grepFiles(selfAssignPatterns);
2593
- const signupMatches = signupFiles.length > 0 ? await ctx.grepFiles(selfAssignPatterns, signupFiles) : [];
2594
- const dangerous = [...signupMatches, ...allMatches.filter((m) => /signup|register|onboard/i.test(m.file))];
2595
- const unique = [...new Map(dangerous.map((m) => [`${m.file}:${m.line}`, m])).values()];
2596
- if (unique.length > 0) {
2597
- return {
2598
- result: "FAIL",
2599
- message: "Self-assign admin pattern detected in signup/onboarding flow",
2600
- evidence: unique.slice(0, 3).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 120)}`)
2601
- };
2602
- }
2603
- return { result: "PASS", message: "No self-assign admin pattern detected in signup flow" };
2604
- }
2605
- };
2606
-
2607
- // src/checks/drift-auth-01.ts
2608
- var AUTH_DIRS = ["/auth/", "middleware.ts", "middleware.js", "/domains/auth/"];
2609
- var AUTH_PATTERNS = /\b(getUser|createServerClient|supabase\.auth|currentUser|clerkMiddleware|useUser|getServerSession|useSession|authOptions|getSession)\b/;
2610
- function isAuthDir(file) {
2611
- return AUTH_DIRS.some((d) => file.includes(d));
2612
- }
2613
- function isIgnored(file) {
2614
- return file.includes("node_modules") || file.includes(".next/") || file.includes("test") || file.includes("spec") || file.includes(".d.ts") || file.includes("types.ts") || file.includes("types.js");
2615
- }
2616
- var DRIFT_AUTH_01 = {
2617
- id: "DRIFT-AUTH-01",
2618
- name: "Auth logic spreading outside auth directory",
2619
- module: "auth",
2620
- priority: "P1",
2621
- category: "drift",
2622
- description: "Auth patterns (getUser, createServerClient, etc.) found outside auth directories indicate module boundary drift.",
2623
- async run(ctx) {
2624
- const matches = await ctx.grepFiles(AUTH_PATTERNS);
2625
- const outsideAuth = matches.filter((m) => {
2626
- if (isIgnored(m.file)) return false;
2627
- if (m.content.trimStart().startsWith("//")) return false;
2628
- if (m.content.trimStart().startsWith("*")) return false;
2629
- if (isAuthDir(m.file)) return false;
2630
- return true;
2631
- });
2632
- if (outsideAuth.length > 3) {
2633
- return {
2634
- result: "FAIL",
2635
- message: `Auth logic found in ${outsideAuth.length} locations outside auth directories \u2014 module boundary is leaking`,
2636
- evidence: outsideAuth.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
2637
- };
2638
- }
2639
- if (outsideAuth.length > 0) {
2640
- return {
2641
- result: "PASS",
2642
- message: `Minor auth references outside auth dir (${outsideAuth.length}) \u2014 within acceptable range`
2643
- };
2644
- }
2645
- return {
2646
- result: "PASS",
2647
- message: "Auth logic is contained within auth directories"
2648
- };
2649
- }
2650
- };
2651
-
2652
- // src/checks/drift-auth-02.ts
2653
- var MIDDLEWARE_AUTH_PATTERN = /\b(getUser|createServerClient|supabase\.auth|currentUser|clerkMiddleware|auth\(\)|getServerSession|NextAuth)\b/;
2654
- var DRIFT_AUTH_02 = {
2655
- id: "DRIFT-AUTH-02",
2656
- name: "Duplicate auth middleware files",
2657
- module: "auth",
2658
- priority: "P1",
2659
- category: "drift",
2660
- description: "Multiple middleware files with auth logic indicate fragmented auth \u2014 a common AI generation pattern.",
2661
- async run(ctx) {
2662
- const middlewareFiles = ctx.files.filter(
2663
- (f) => (f.includes("middleware.ts") || f.includes("middleware.js")) && !f.includes("node_modules") && !f.includes(".next/")
2664
- );
2665
- const authMiddleware = [];
2666
- for (const file of middlewareFiles) {
2667
- try {
2668
- const content = await ctx.readFile(file);
2669
- if (MIDDLEWARE_AUTH_PATTERN.test(content)) {
2670
- authMiddleware.push(file);
2671
- }
2672
- } catch {
2673
- continue;
2674
- }
2675
- }
2676
- if (authMiddleware.length > 1) {
2677
- return {
2678
- result: "FAIL",
2679
- message: `${authMiddleware.length} middleware files contain auth logic \u2014 auth should have a single entry point`,
2680
- evidence: authMiddleware.map((f) => f)
2681
- };
2682
- }
2683
- if (authMiddleware.length === 1) {
2684
- return {
2685
- result: "PASS",
2686
- message: `Single auth middleware found: ${authMiddleware[0]}`
2687
- };
2688
- }
2689
- return {
2690
- result: "UNKNOWN",
2691
- message: "No middleware with auth logic found"
2692
- };
2693
- }
2694
- };
2695
-
2696
- // src/checks/drift-auth-03.ts
2697
- var AUTH_CHECK_PATTERN = /\b(getUser|createServerClient|supabase\.auth|currentUser|auth\(\)|getServerSession|requireAuth|withAuth|isAuthenticated)\b/;
2698
- var DRIFT_AUTH_03 = {
2699
- id: "DRIFT-AUTH-03",
2700
- name: "New API routes without auth checks",
2701
- module: "auth",
2702
- priority: "P0",
2703
- category: "drift",
2704
- description: "API route handlers without auth verification \u2014 any visitor can call unprotected endpoints.",
2705
- async run(ctx) {
2706
- const apiRoutes = ctx.files.filter(
2707
- (f) => f.includes("/api/") && (f.endsWith("route.ts") || f.endsWith("route.js")) && !f.includes("node_modules") && !f.includes(".next/") && !f.includes("/api/auth/") && !f.includes("/api/webhook") && !f.includes("/api/health") && !f.includes("/api/public")
2708
- );
2709
- if (apiRoutes.length === 0) {
2710
- return {
2711
- result: "UNKNOWN",
2712
- message: "No API route files found"
2713
- };
2714
- }
2715
- const unprotected = [];
2716
- for (const file of apiRoutes) {
2717
- try {
2718
- const content = await ctx.readFile(file);
2719
- if (!AUTH_CHECK_PATTERN.test(content)) {
2720
- unprotected.push(file);
2721
- }
2722
- } catch {
2723
- continue;
2724
- }
2725
- }
2726
- if (unprotected.length > 0) {
2727
- return {
2728
- result: "FAIL",
2729
- message: `${unprotected.length} of ${apiRoutes.length} API routes have no auth verification`,
2730
- evidence: unprotected.slice(0, 5).map((f) => f)
2731
- };
2732
- }
2733
- return {
2734
- result: "PASS",
2735
- message: `All ${apiRoutes.length} API routes have auth checks`
2736
- };
2737
- }
2738
- };
2739
-
2740
- // src/checks/drift-auth-04.ts
2741
- var CLIENT_ROLE_PATTERN = /\b(isAdmin|role\s*===?\s*['"`]admin['"`]|user\.role|userRole|hasRole)\b/;
2742
- var SERVER_ROLE_PATTERN = /\b(getUser|createServerClient|supabase\.auth|currentUser|getServerSession|requireAdmin|checkAdmin)\b/;
2743
- var DRIFT_AUTH_04 = {
2744
- id: "DRIFT-AUTH-04",
2745
- name: "Client-side auth bypass \u2014 role check without server verification",
2746
- module: "auth",
2747
- priority: "P1",
2748
- category: "drift",
2749
- description: "Role checks in JSX/client code without server-side verification \u2014 UI-only gates can be bypassed.",
2750
- async run(ctx) {
2751
- const clientRoleMatches = await ctx.grepFiles(CLIENT_ROLE_PATTERN);
2752
- const clientOnly = clientRoleMatches.filter((m) => {
2753
- if (m.content.trimStart().startsWith("//")) return false;
2754
- if (m.file.includes("node_modules")) return false;
2755
- if (m.file.includes(".next/")) return false;
2756
- return isClientFile(m.file);
2757
- });
2758
- if (clientOnly.length === 0) {
2759
- return {
2760
- result: "PASS",
2761
- message: "No client-side role checks found"
2762
- };
2763
- }
2764
- const serverRoleMatches = await ctx.grepFiles(SERVER_ROLE_PATTERN, ["**/api/**", "**/server/**", "**/actions.*"]);
2765
- const hasServerVerification = serverRoleMatches.some((m) => !m.file.includes("node_modules"));
2766
- if (clientOnly.length > 0 && !hasServerVerification) {
2767
- return {
2768
- result: "FAIL",
2769
- message: `${clientOnly.length} client-side role checks found but no server-side role verification \u2014 admin UI can be bypassed`,
2770
- evidence: clientOnly.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
2771
- };
2772
- }
2773
- return {
2774
- result: "PASS",
2775
- message: `Client-side role checks (${clientOnly.length}) backed by server-side verification`
2776
- };
2777
- }
2778
- };
2779
-
2780
- // src/checks/drift-auth-05.ts
2781
- var LOCALSTORAGE_AUTH_PATTERN = /localStorage\.(getItem|setItem|removeItem)\s*\(\s*['"`](token|jwt|session|auth|access_token|refresh_token|user)/;
2782
- var DRIFT_AUTH_05 = {
2783
- id: "DRIFT-AUTH-05",
2784
- name: "Auth tokens stored in localStorage",
2785
- module: "auth",
2786
- priority: "P1",
2787
- category: "drift",
2788
- description: "localStorage for auth tokens is vulnerable to XSS \u2014 use httpOnly cookies or secure session management.",
2789
- async run(ctx) {
2790
- const matches = await ctx.grepFiles(LOCALSTORAGE_AUTH_PATTERN);
2791
- const relevant = matches.filter((m) => {
2792
- if (m.content.trimStart().startsWith("//")) return false;
2793
- if (m.file.includes("node_modules")) return false;
2794
- if (m.file.includes(".next/")) return false;
2795
- return true;
2796
- });
2797
- if (relevant.length > 0) {
2798
- return {
2799
- result: "FAIL",
2800
- message: `Auth tokens stored in localStorage (${relevant.length} location${relevant.length > 1 ? "s" : ""}) \u2014 vulnerable to XSS`,
2801
- evidence: relevant.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
2802
- };
2803
- }
2804
- return {
2805
- result: "PASS",
2806
- message: "No localStorage auth token usage found"
2807
- };
2808
- }
2809
- };
2810
-
2811
- // src/checks/drift-bil-01.ts
2812
- var BILLING_DIRS = ["/billing/", "/stripe/", "/webhook/", "/payment/", "/subscription/", "/domains/billing/"];
2813
- var BILLING_PATTERNS = /\b(stripe|Stripe|createCheckout|price_id|priceId|subscription_id|subscriptionId|checkout\.sessions|customer\.subscriptions)\b/;
2814
- function isBillingDir(file) {
2815
- return BILLING_DIRS.some((d) => file.includes(d));
2816
- }
2817
- function isIgnored2(file) {
2818
- return file.includes("node_modules") || file.includes(".next/") || file.includes("test") || file.includes("spec") || file.includes(".d.ts") || file.includes("types.ts") || file.includes("types.js") || file.includes("package.json") || file.includes("package-lock") || file.includes("pnpm-lock") || file.includes(".env");
2819
- }
2820
- var DRIFT_BIL_01 = {
2821
- id: "DRIFT-BIL-01",
2822
- name: "Billing logic spreading outside billing directory",
2823
- module: "billing",
2824
- priority: "P1",
2825
- category: "drift",
2826
- description: "Stripe/payment imports outside billing directories indicate module boundary drift.",
2827
- async run(ctx) {
2828
- const matches = await ctx.grepFiles(BILLING_PATTERNS);
2829
- const outsideBilling = matches.filter((m) => {
2830
- if (isIgnored2(m.file)) return false;
2831
- if (m.content.trimStart().startsWith("//")) return false;
2832
- if (m.content.trimStart().startsWith("*")) return false;
2833
- if (m.content.trimStart().startsWith("#")) return false;
2834
- if (isBillingDir(m.file)) return false;
2835
- return true;
2836
- });
2837
- if (outsideBilling.length > 3) {
2838
- return {
2839
- result: "FAIL",
2840
- message: `Billing/Stripe logic found in ${outsideBilling.length} locations outside billing directories \u2014 module boundary is leaking`,
2841
- evidence: outsideBilling.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
2842
- };
2843
- }
2844
- if (outsideBilling.length > 0) {
2845
- return {
2846
- result: "PASS",
2847
- message: `Minor billing references outside billing dir (${outsideBilling.length}) \u2014 within acceptable range`
2848
- };
2849
- }
2850
- return {
2851
- result: "PASS",
2852
- message: "Billing logic is contained within billing directories"
2853
- };
2854
- }
2855
- };
2856
-
2857
- // src/checks/drift-bil-02.ts
2858
- var WEBHOOK_ROUTE_PATTERN = /\b(webhook|stripe.*event|event\.type|constructEvent)\b/i;
2859
- var DRIFT_BIL_02 = {
2860
- id: "DRIFT-BIL-02",
2861
- name: "Duplicate webhook handler endpoints",
2862
- module: "billing",
2863
- priority: "P0",
2864
- category: "drift",
2865
- description: "Multiple webhook endpoints create race conditions and duplicate event processing.",
2866
- async run(ctx) {
2867
- const webhookFiles = ctx.files.filter(
2868
- (f) => !f.includes("node_modules") && !f.includes(".next/") && !f.includes("test") && (f.includes("webhook") || f.includes("stripe")) && (f.endsWith(".ts") || f.endsWith(".js") || f.endsWith(".py"))
2869
- );
2870
- const confirmedWebhookHandlers = [];
2871
- for (const file of webhookFiles) {
2872
- try {
2873
- const content = await ctx.readFile(file);
2874
- if (WEBHOOK_ROUTE_PATTERN.test(content)) {
2875
- confirmedWebhookHandlers.push(file);
2876
- }
2877
- } catch {
2878
- continue;
2879
- }
2880
- }
2881
- if (confirmedWebhookHandlers.length > 1) {
2882
- return {
2883
- result: "FAIL",
2884
- message: `${confirmedWebhookHandlers.length} webhook handler files found \u2014 should be exactly 1`,
2885
- evidence: confirmedWebhookHandlers
2886
- };
2887
- }
2888
- if (confirmedWebhookHandlers.length === 1) {
2889
- return {
2890
- result: "PASS",
2891
- message: `Single webhook handler: ${confirmedWebhookHandlers[0]}`
2892
- };
2893
- }
2894
- return {
2895
- result: "UNKNOWN",
2896
- message: "No webhook handler files found \u2014 Stripe webhooks may not be configured"
2897
- };
2898
- }
2899
- };
2900
-
2901
- // src/checks/drift-bil-03.ts
2902
- var CLIENT_SUB_PATTERN = /\b(subscription|plan|isPro|isFreeTier|isPaid|currentPlan|userPlan|hasSubscription)\b/;
2903
- var DRIFT_BIL_03 = {
2904
- id: "DRIFT-BIL-03",
2905
- name: "Client-side subscription check without server verification",
2906
- module: "billing",
2907
- priority: "P1",
2908
- category: "drift",
2909
- description: "Plan/subscription checks in client code without server-side verification \u2014 users can bypass paywalls.",
2910
- async run(ctx) {
2911
- const clientMatches = await ctx.grepFiles(CLIENT_SUB_PATTERN);
2912
- const clientSubs = clientMatches.filter((m) => {
2913
- if (m.content.trimStart().startsWith("//")) return false;
2914
- if (m.file.includes("node_modules")) return false;
2915
- if (m.file.includes(".next/")) return false;
2916
- if (m.file.includes("test")) return false;
2917
- if (m.file.includes("types")) return false;
2918
- return isClientFile(m.file) && !m.file.includes("/api/");
2919
- });
2920
- if (clientSubs.length === 0) {
2921
- return {
2922
- result: "PASS",
2923
- message: "No client-side subscription checks found"
2924
- };
2925
- }
2926
- const serverSubMatches = await ctx.grepFiles(
2927
- /\b(subscription|check.*plan|check.*limit|entitlement|getSubscription)\b/i,
2928
- ["**/api/**", "**/server/**", "**/actions.*", "**/billing/**"]
2929
- );
2930
- const hasServerCheck = serverSubMatches.some((m) => !m.file.includes("node_modules"));
2931
- if (clientSubs.length > 0 && !hasServerCheck) {
2932
- return {
2933
- result: "FAIL",
2934
- message: `${clientSubs.length} client-side subscription checks but no server-side plan verification \u2014 paywall can be bypassed`,
2935
- evidence: clientSubs.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
2936
- };
2937
- }
2938
- return {
2939
- result: "PASS",
2940
- message: `Client-side subscription checks (${clientSubs.length}) backed by server-side verification`
2941
- };
2942
- }
2943
- };
2944
-
2945
- // src/checks/drift-bil-04.ts
2946
- var HARDCODED_PRICE_PATTERN = /(?:price|amount|cost|fee)\s*[:=]\s*(\d{2,}(?:\.\d{2})?|['"`]\$?\d+)/i;
2947
- var HARDCODED_CENTS_PATTERN = /(?:amount|price|unit_amount)\s*[:=]\s*\d{3,}/;
2948
- var DRIFT_BIL_04 = {
2949
- id: "DRIFT-BIL-04",
2950
- name: "Hardcoded prices instead of plan configuration",
2951
- module: "billing",
2952
- priority: "P2",
2953
- category: "drift",
2954
- description: "Hardcoded dollar amounts or price values should come from plan configuration or Stripe metadata.",
2955
- async run(ctx) {
2956
- const priceMatches = await ctx.grepFiles(HARDCODED_PRICE_PATTERN);
2957
- const centsMatches = await ctx.grepFiles(HARDCODED_CENTS_PATTERN);
2958
- const allMatches = [...priceMatches, ...centsMatches];
2959
- const relevant = allMatches.filter((m) => {
2960
- if (m.content.trimStart().startsWith("//")) return false;
2961
- if (m.content.trimStart().startsWith("*")) return false;
2962
- if (m.file.includes("node_modules")) return false;
2963
- if (m.file.includes(".next/")) return false;
2964
- if (m.file.includes("test")) return false;
2965
- if (m.file.includes("package.json")) return false;
2966
- if (m.file.includes(".lock")) return false;
2967
- if (m.file.includes(".css")) return false;
2968
- if (m.file.includes("migration")) return false;
2969
- return true;
2970
- });
2971
- if (relevant.length > 3) {
2972
- return {
2973
- result: "FAIL",
2974
- message: `${relevant.length} hardcoded price/amount values found \u2014 prices should come from plan config or Stripe`,
2975
- evidence: relevant.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
2976
- };
2977
- }
2978
- if (relevant.length > 0) {
2979
- return {
2980
- result: "PASS",
2981
- message: `Minor hardcoded amounts found (${relevant.length}) \u2014 review recommended`
2982
- };
2983
- }
2984
- return {
2985
- result: "PASS",
2986
- message: "No hardcoded price values detected"
2987
- };
2988
- }
2989
- };
2990
-
2991
- // src/checks/drift-bil-05.ts
2992
- var FEATURE_GATE_PATTERN = /\b(canAccess|hasFeature|featureEnabled|isEnabled|featureFlag|canUse)\b/;
2993
- var BILLING_CHECK_PATTERN = /\b(checkLimit|checkSubscription|checkPlan|entitlement|billingGuard|requirePlan|checkQuota)\b/;
2994
- var DRIFT_BIL_05 = {
2995
- id: "DRIFT-BIL-05",
2996
- name: "Feature access without billing verification",
2997
- module: "billing",
2998
- priority: "P2",
2999
- category: "drift",
3000
- description: "New features with access gates but no billing/entitlement check \u2014 free users can access paid features.",
3001
- async run(ctx) {
3002
- const featureGates = await ctx.grepFiles(FEATURE_GATE_PATTERN);
3003
- const relevant = featureGates.filter((m) => {
3004
- if (m.content.trimStart().startsWith("//")) return false;
3005
- if (m.file.includes("node_modules")) return false;
3006
- if (m.file.includes(".next/")) return false;
3007
- if (m.file.includes("test")) return false;
3008
- return true;
3009
- });
3010
- if (relevant.length === 0) {
3011
- return {
3012
- result: "UNKNOWN",
3013
- message: "No feature gate patterns found \u2014 may not use feature flags"
3014
- };
3015
- }
3016
- const billingChecks = await ctx.grepFiles(BILLING_CHECK_PATTERN);
3017
- const hasBillingVerification = billingChecks.some(
3018
- (m) => !m.file.includes("node_modules") && !m.file.includes(".next/")
3019
- );
3020
- if (relevant.length > 0 && !hasBillingVerification) {
3021
- return {
3022
- result: "FAIL",
3023
- message: `${relevant.length} feature gate(s) found but no billing/entitlement verification \u2014 paid features may be accessible for free`,
3024
- evidence: relevant.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
3025
- };
3026
- }
3027
- return {
3028
- result: "PASS",
3029
- message: `Feature gates (${relevant.length}) backed by billing verification`
3030
- };
3031
- }
3032
- };
3033
-
3034
- // src/checks/drift-adm-01.ts
3035
- var ADMIN_DIRS = ["/admin/", "/domains/admin/"];
3036
- var ADMIN_PATTERNS = /\b(isAdmin|requireAdmin|checkAdmin|adminGuard|role\s*===?\s*['"`]admin['"`]|user_role|superAdmin|isSuperAdmin)\b/;
3037
- function isAdminDir(file) {
3038
- return ADMIN_DIRS.some((d) => file.includes(d));
3039
- }
3040
- function isIgnored3(file) {
3041
- return file.includes("node_modules") || file.includes(".next/") || file.includes("test") || file.includes("spec") || file.includes(".d.ts") || file.includes("types.ts") || file.includes("types.js") || file.includes("migration") || file.includes(".sql");
3042
- }
3043
- var DRIFT_ADM_01 = {
3044
- id: "DRIFT-ADM-01",
3045
- name: "Admin logic spreading outside admin directory",
3046
- module: "admin",
3047
- priority: "P1",
3048
- category: "drift",
3049
- description: "Admin permission patterns found outside admin directories indicate module boundary drift.",
3050
- async run(ctx) {
3051
- const matches = await ctx.grepFiles(ADMIN_PATTERNS);
3052
- const outsideAdmin = matches.filter((m) => {
3053
- if (isIgnored3(m.file)) return false;
3054
- if (m.content.trimStart().startsWith("//")) return false;
3055
- if (m.content.trimStart().startsWith("*")) return false;
3056
- if (isAdminDir(m.file)) return false;
3057
- return true;
3058
- });
3059
- if (outsideAdmin.length > 3) {
3060
- return {
3061
- result: "FAIL",
3062
- message: `Admin logic found in ${outsideAdmin.length} locations outside admin directories \u2014 module boundary is leaking`,
3063
- evidence: outsideAdmin.slice(0, 5).map((m) => `${m.file}:${m.line} \u2192 ${m.content.substring(0, 100)}`)
3064
- };
3065
- }
3066
- if (outsideAdmin.length > 0) {
3067
- return {
3068
- result: "PASS",
3069
- message: `Minor admin references outside admin dir (${outsideAdmin.length}) \u2014 within acceptable range`
3070
- };
3071
- }
3072
- return {
3073
- result: "PASS",
3074
- message: "Admin logic is contained within admin directories"
3075
- };
3076
- }
3077
- };
3078
-
3079
- // src/checks/drift-adm-02.ts
3080
- var ADMIN_GUARD_PATTERN = /\b(requireAdmin|checkAdmin|adminGuard|isAdmin|role\s*===?\s*['"`]admin['"`]|isSuperAdmin)\b/;
3081
- var DRIFT_ADM_02 = {
3082
- id: "DRIFT-ADM-02",
3083
- name: "New admin routes without permission guards",
3084
- module: "admin",
3085
- priority: "P0",
3086
- category: "drift",
3087
- description: "Admin route handlers without permission checks \u2014 any authenticated user can access admin features.",
3088
- async run(ctx) {
3089
- const adminRoutes = ctx.files.filter(
3090
- (f) => f.includes("/admin/") && !f.includes("node_modules") && !f.includes(".next/") && !f.includes("test") && (f.endsWith("route.ts") || f.endsWith("route.js") || f.endsWith("page.tsx") || f.endsWith("page.jsx") || f.endsWith("page.ts"))
3091
- );
3092
- if (adminRoutes.length === 0) {
3093
- return {
3094
- result: "UNKNOWN",
3095
- message: "No admin route files found"
3096
- };
3097
- }
3098
- const unguarded = [];
3099
- for (const file of adminRoutes) {
3100
- try {
3101
- const content = await ctx.readFile(file);
3102
- if (!ADMIN_GUARD_PATTERN.test(content)) {
3103
- unguarded.push(file);
3104
- }
3105
- } catch {
3106
- continue;
3107
- }
3108
- }
3109
- if (unguarded.length > 0) {
3110
- return {
3111
- result: "FAIL",
3112
- message: `${unguarded.length} of ${adminRoutes.length} admin routes have no permission guard`,
3113
- evidence: unguarded.slice(0, 5).map((f) => f)
3114
- };
3115
- }
3116
- return {
3117
- result: "PASS",
3118
- message: `All ${adminRoutes.length} admin routes have permission guards`
3119
- };
3120
- }
3121
- };
3122
-
3123
- // src/checks/drift-adm-03.ts
3124
- var AUDIT_LOG_PATTERN = /\b(logAuditEvent|auditLog|audit_log|createAuditEntry|logAdminAction|insertAuditLog)\b/;
3125
- var ADMIN_MUTATION_PATTERN = /\b(DELETE|PUT|PATCH|POST)\b/;
3126
- var DRIFT_ADM_03 = {
3127
- id: "DRIFT-ADM-03",
3128
- name: "Admin mutations without audit logging",
3129
- module: "admin",
3130
- priority: "P1",
3131
- category: "drift",
3132
- description: "Admin mutation endpoints (POST/PUT/DELETE) without audit log calls \u2014 no trail of admin actions.",
3133
- async run(ctx) {
3134
- const adminApiRoutes = ctx.files.filter(
3135
- (f) => f.includes("/admin/") && f.includes("/api/") && !f.includes("node_modules") && !f.includes(".next/") && !f.includes("test") && (f.endsWith("route.ts") || f.endsWith("route.js"))
3136
- );
3137
- if (adminApiRoutes.length === 0) {
3138
- const adminActions = ctx.files.filter(
3139
- (f) => f.includes("/admin/") && !f.includes("node_modules") && !f.includes(".next/") && (f.includes("action") || f.includes("service") || f.includes("handler")) && (f.endsWith(".ts") || f.endsWith(".js") || f.endsWith(".py"))
3140
- );
3141
- if (adminActions.length === 0) {
3142
- return {
3143
- result: "UNKNOWN",
3144
- message: "No admin API routes or action files found"
3145
- };
3146
- }
3147
- const unlogged2 = [];
3148
- for (const file of adminActions) {
3149
- try {
3150
- const content = await ctx.readFile(file);
3151
- if (ADMIN_MUTATION_PATTERN.test(content) && !AUDIT_LOG_PATTERN.test(content)) {
3152
- unlogged2.push(file);
3153
- }
3154
- } catch {
3155
- continue;
3156
- }
3157
- }
3158
- if (unlogged2.length > 0) {
3159
- return {
3160
- result: "FAIL",
3161
- message: `${unlogged2.length} admin action file(s) with mutations but no audit logging`,
3162
- evidence: unlogged2.slice(0, 5)
3163
- };
3164
- }
3165
- return {
3166
- result: "PASS",
3167
- message: `Admin action files have audit logging`
3168
- };
3169
- }
3170
- const unlogged = [];
3171
- for (const file of adminApiRoutes) {
3172
- try {
3173
- const content = await ctx.readFile(file);
3174
- if (ADMIN_MUTATION_PATTERN.test(content) && !AUDIT_LOG_PATTERN.test(content)) {
3175
- unlogged.push(file);
3176
- }
3177
- } catch {
3178
- continue;
3179
- }
3180
- }
3181
- if (unlogged.length > 0) {
3182
- return {
3183
- result: "FAIL",
3184
- message: `${unlogged.length} of ${adminApiRoutes.length} admin API routes have mutations without audit logging`,
3185
- evidence: unlogged.slice(0, 5)
3186
- };
3187
- }
3188
- return {
3189
- result: "PASS",
3190
- message: `All ${adminApiRoutes.length} admin API routes have audit logging`
3191
- };
3192
- }
3193
- };
3194
-
3195
- // src/checks/registry.ts
3196
- var ALL_CHECKS = [
3197
- // Auth checks (28)
3198
- AUTH_01,
3199
- AUTH_02,
3200
- AUTH_03,
3201
- AUTH_04,
3202
- AUTH_05,
3203
- AUTH_06,
3204
- AUTH_07,
3205
- AUTH_08,
3206
- AUTH_09,
3207
- AUTH_10,
3208
- AUTH_11,
3209
- AUTH_12,
3210
- AUTH_13,
3211
- AUTH_14,
3212
- AUTH_15,
3213
- AUTH_16,
3214
- AUTH_17,
3215
- AUTH_18,
3216
- AUTH_19,
3217
- AUTH_20,
3218
- AUTH_21,
3219
- AUTH_22,
3220
- AUTH_23,
3221
- AUTH_24,
3222
- AUTH_25,
3223
- AUTH_26,
3224
- AUTH_27,
3225
- AUTH_28,
3226
- // Billing checks (24)
3227
- BIL_01,
3228
- BIL_02,
3229
- BIL_03,
3230
- BIL_04,
3231
- BIL_05,
3232
- BIL_06,
3233
- BIL_07,
3234
- BIL_08,
3235
- BIL_09,
3236
- BIL_10,
3237
- BIL_11,
3238
- BIL_12,
3239
- BIL_13,
3240
- BIL_14,
3241
- BIL_15,
3242
- BIL_16,
3243
- BIL_17,
3244
- BIL_18,
3245
- BIL_19,
3246
- BIL_20,
3247
- BIL_21,
3248
- BIL_22,
3249
- BIL_23,
3250
- BIL_24,
3251
- // Admin checks (19 unique + 2 aliases = 21 total)
3252
- ADM_01,
3253
- ADM_02,
3254
- ADM_03,
3255
- ADM_04,
3256
- ADM_05,
3257
- ADM_06,
3258
- ADM_07,
3259
- ADM_08,
3260
- ADM_09,
3261
- ADM_10,
3262
- ADM_11,
3263
- ADM_12,
3264
- ADM_13,
3265
- ADM_14,
3266
- ADM_16,
3267
- ADM_18,
3268
- ADM_19,
3269
- ADM_20,
3270
- ADM_21,
3271
- // Drift checks (13) — module boundary detection
3272
- DRIFT_AUTH_01,
3273
- DRIFT_AUTH_02,
3274
- DRIFT_AUTH_03,
3275
- DRIFT_AUTH_04,
3276
- DRIFT_AUTH_05,
3277
- DRIFT_BIL_01,
3278
- DRIFT_BIL_02,
3279
- DRIFT_BIL_03,
3280
- DRIFT_BIL_04,
3281
- DRIFT_BIL_05,
3282
- DRIFT_ADM_01,
3283
- DRIFT_ADM_02,
3284
- DRIFT_ADM_03
3285
- ];
3286
-
3287
- // src/lib/files.ts
3288
- import fs from "fs/promises";
3289
- import path from "path";
3290
- var IGNORE_DIRS = /* @__PURE__ */ new Set([
3291
- "node_modules",
3292
- ".git",
3293
- ".next",
3294
- "dist",
3295
- "build",
3296
- ".vercel",
3297
- ".turbo",
3298
- "coverage",
3299
- ".nyc_output",
3300
- "__pycache__",
3301
- ".svelte-kit"
3302
- ]);
3303
- var CODE_EXTENSIONS = /* @__PURE__ */ new Set([
3304
- ".ts",
3305
- ".tsx",
3306
- ".js",
3307
- ".jsx",
3308
- ".mjs",
3309
- ".cjs",
3310
- ".sql",
3311
- ".env",
3312
- ".env.local",
3313
- ".env.example"
3314
- ]);
3315
- async function listFiles(rootDir) {
3316
- const results = [];
3317
- await walk(rootDir, rootDir, results);
3318
- return results;
3319
- }
3320
- async function walk(dir, rootDir, results) {
3321
- let entries;
3322
- try {
3323
- entries = await fs.readdir(dir, { withFileTypes: true });
3324
- } catch {
3325
- return;
3326
- }
3327
- for (const entry of entries) {
3328
- if (entry.name.startsWith(".") && entry.name !== ".env" && entry.name !== ".env.local" && entry.name !== ".env.example") {
3329
- if (entry.isDirectory()) continue;
3330
- }
3331
- if (entry.isDirectory()) {
3332
- if (IGNORE_DIRS.has(entry.name)) continue;
3333
- await walk(path.join(dir, entry.name), rootDir, results);
3334
- } else {
3335
- const ext = path.extname(entry.name);
3336
- const basename = entry.name;
3337
- if (CODE_EXTENSIONS.has(ext) || CODE_EXTENSIONS.has(basename)) {
3338
- results.push(path.relative(rootDir, path.join(dir, entry.name)));
3339
- }
3340
- }
3341
- }
3342
- }
3343
- async function readFile(rootDir, filePath) {
3344
- const fullPath = path.resolve(rootDir, filePath);
3345
- return fs.readFile(fullPath, "utf-8");
3346
- }
3347
-
3348
- // src/scanner.ts
3349
- async function runScan(targetDir) {
3350
- const rootDir = path2.resolve(targetDir);
3351
- const files = await listFiles(rootDir);
3352
- const ctx = {
3353
- rootDir,
3354
- files,
3355
- readFile: (filePath) => readFile(rootDir, filePath),
3356
- grepFiles: (pattern, globs) => grepFiles({ rootDir, files, readFile: (f) => readFile(rootDir, f) }, pattern, globs)
3357
- };
3358
- const results = [];
3359
- for (const check of ALL_CHECKS) {
3360
- try {
3361
- const output = await check.run(ctx);
3362
- results.push({
3363
- id: check.id,
3364
- name: check.name,
3365
- module: check.module,
3366
- priority: check.priority,
3367
- category: check.category,
3368
- result: output.result,
3369
- message: output.message,
3370
- evidence: output.evidence
3371
- });
3372
- } catch (err) {
3373
- results.push({
3374
- id: check.id,
3375
- name: check.name,
3376
- module: check.module,
3377
- priority: check.priority,
3378
- category: check.category,
3379
- result: "UNKNOWN",
3380
- message: `Check failed with error: ${err instanceof Error ? err.message : String(err)}`
3381
- });
3382
- }
3383
- }
3384
- const projectName = path2.basename(rootDir);
3385
- const safetyResults = results.filter((r) => r.category !== "drift");
3386
- const driftResults = results.filter((r) => r.category === "drift");
3387
- return {
3388
- project: projectName,
3389
- timestamp: (/* @__PURE__ */ new Date()).toISOString(),
3390
- checks: results,
3391
- summary: {
3392
- billing: computeModuleScore(safetyResults, "billing"),
3393
- auth: computeModuleScore(safetyResults, "auth"),
3394
- admin: computeModuleScore(safetyResults, "admin")
3395
- },
3396
- drift: {
3397
- billing: computeModuleScore(driftResults, "billing"),
3398
- auth: computeModuleScore(driftResults, "auth"),
3399
- admin: computeModuleScore(driftResults, "admin")
3400
- }
3401
- };
3402
- }
3403
- function computeModuleScore(results, module) {
3404
- const moduleChecks = results.filter((r) => r.module === module);
3405
- const applicable = moduleChecks.filter((r) => r.result !== "N/A");
3406
- const passed = applicable.filter((r) => r.result === "PASS").length;
3407
- const failed = applicable.filter((r) => r.result === "FAIL").length;
3408
- const total = applicable.length;
3409
- const score = total > 0 ? Math.round(passed / total * 10) : 0;
3410
- return { passed, failed, total, score };
3411
- }
3412
-
3413
- // src/lib/output.ts
3414
- var RESET = "\x1B[0m";
3415
- var BOLD = "\x1B[1m";
3416
- var DIM = "\x1B[2m";
3417
- var RED = "\x1B[31m";
3418
- var GREEN = "\x1B[32m";
3419
- var YELLOW = "\x1B[33m";
3420
- var CYAN = "\x1B[36m";
3421
- var WHITE = "\x1B[37m";
3422
- var BG_RED = "\x1B[41m";
3423
- var BG_YELLOW = "\x1B[43m";
3424
- function resultIcon(result) {
3425
- switch (result) {
3426
- case "PASS":
3427
- return `${GREEN}\u2713${RESET}`;
3428
- case "FAIL":
3429
- return `${RED}\u2717${RESET}`;
3430
- case "UNKNOWN":
3431
- return `${YELLOW}?${RESET}`;
3432
- case "N/A":
3433
- return `${DIM}\u2014${RESET}`;
3434
- default:
3435
- return " ";
3436
- }
3437
- }
3438
- function priorityTag(priority) {
3439
- switch (priority) {
3440
- case "P0":
3441
- return `${BG_RED}${WHITE}${BOLD} P0 ${RESET}`;
3442
- case "P1":
3443
- return `${BG_YELLOW}${WHITE}${BOLD} P1 ${RESET}`;
3444
- case "P2":
3445
- return `${DIM} P2 ${RESET}`;
3446
- default:
3447
- return ` ${priority} `;
3448
- }
3449
- }
3450
- function scoreBar(score) {
3451
- const filled = score.score;
3452
- const empty = 10 - filled;
3453
- const color = filled >= 7 ? GREEN : filled >= 4 ? YELLOW : RED;
3454
- return `${color}${"\u2588".repeat(filled)}${DIM}${"\u2591".repeat(empty)}${RESET} ${filled}/10`;
3455
- }
3456
- function scoreLabel(score) {
3457
- if (score >= 8) return `${GREEN}GOOD${RESET}`;
3458
- if (score >= 5) return `${YELLOW}HIGH${RESET}`;
3459
- return `${RED}CRITICAL${RESET}`;
3460
- }
3461
- function printReport(report) {
3462
- const w = process.stdout.write.bind(process.stdout);
3463
- const log = (s) => w(s + "\n");
3464
- log("");
3465
- log(`${BOLD}\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557${RESET}`);
3466
- log(`${BOLD}\u2551 VIBECODIQ \u2014 PRODUCTION SAFETY SCAN \u2551${RESET}`);
3467
- log(`${BOLD}\u2551 Project: ${report.project.padEnd(37)}\u2551${RESET}`);
3468
- log(`${BOLD}\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255D${RESET}`);
3469
- log("");
3470
- const modules = [
3471
- { key: "billing", label: "Billing Safety", score: report.summary.billing },
3472
- { key: "auth", label: "Auth Safety", score: report.summary.auth },
3473
- { key: "admin", label: "Admin Safety", score: report.summary.admin }
3474
- ];
3475
- for (const mod of modules) {
3476
- log(` ${BOLD}${mod.label.padEnd(20)}${RESET} ${scoreBar(mod.score)} ${scoreLabel(mod.score.score)}`);
3477
- }
3478
- if (report.drift) {
3479
- log("");
3480
- log(`${DIM}${"\u2500".repeat(60)}${RESET}`);
3481
- log("");
3482
- log(`${BOLD}${CYAN} MODULE BOUNDARY (DRIFT DETECTION)${RESET}`);
3483
- log("");
3484
- const driftModules = [
3485
- { key: "auth", label: "Auth Boundary", score: report.drift.auth },
3486
- { key: "billing", label: "Billing Boundary", score: report.drift.billing },
3487
- { key: "admin", label: "Admin Boundary", score: report.drift.admin }
3488
- ];
3489
- for (const mod of driftModules) {
3490
- log(` ${BOLD}${mod.label.padEnd(20)}${RESET} ${scoreBar(mod.score)} ${scoreLabel(mod.score.score)}`);
3491
- }
3492
- }
3493
- log("");
3494
- log(`${DIM}${"\u2500".repeat(60)}${RESET}`);
3495
- const safetyChecks = report.checks.filter((c) => c.category !== "drift");
3496
- const driftChecks = report.checks.filter((c) => c.category === "drift");
3497
- const failedChecks = safetyChecks.filter((c) => c.result === "FAIL");
3498
- const passedChecks = safetyChecks.filter((c) => c.result === "PASS");
3499
- const unknownChecks = safetyChecks.filter((c) => c.result === "UNKNOWN");
3500
- if (failedChecks.length > 0) {
3501
- log("");
3502
- log(`${RED}${BOLD} FAILED CHECKS (${failedChecks.length})${RESET}`);
3503
- log("");
3504
- for (const check of failedChecks) {
3505
- log(` ${resultIcon(check.result)} ${priorityTag(check.priority)} ${BOLD}${check.id}${RESET} ${check.name}`);
3506
- log(` ${DIM}${check.message}${RESET}`);
3507
- if (check.evidence) {
3508
- for (const e of check.evidence.slice(0, 3)) {
3509
- log(` ${DIM}\u2192 ${e}${RESET}`);
3510
- }
3511
- if (check.evidence.length > 3) {
3512
- log(` ${DIM} ... and ${check.evidence.length - 3} more${RESET}`);
3513
- }
3514
- }
3515
- log("");
3516
- }
3517
- }
3518
- if (passedChecks.length > 0) {
3519
- log(`${GREEN}${BOLD} PASSED CHECKS (${passedChecks.length})${RESET}`);
3520
- log("");
3521
- for (const check of passedChecks) {
3522
- log(` ${resultIcon(check.result)} ${DIM}${check.id}${RESET} ${check.name}`);
3523
- }
3524
- log("");
3525
- }
3526
- if (unknownChecks.length > 0) {
3527
- log(`${YELLOW}${BOLD} UNKNOWN (${unknownChecks.length})${RESET}`);
3528
- log("");
3529
- for (const check of unknownChecks) {
3530
- log(` ${resultIcon(check.result)} ${DIM}${check.id}${RESET} ${check.name} \u2014 ${DIM}${check.message}${RESET}`);
3531
- }
3532
- log("");
3533
- }
3534
- const driftFailed = driftChecks.filter((c) => c.result === "FAIL");
3535
- const driftPassed = driftChecks.filter((c) => c.result === "PASS");
3536
- const driftUnknown = driftChecks.filter((c) => c.result === "UNKNOWN");
3537
- if (driftChecks.length > 0) {
3538
- log(`${DIM}${"\u2500".repeat(60)}${RESET}`);
3539
- log("");
3540
- log(`${CYAN}${BOLD} DRIFT DETECTION (${driftChecks.length} checks)${RESET}`);
3541
- if (driftFailed.length > 0) {
3542
- log("");
3543
- for (const check of driftFailed) {
3544
- log(` ${resultIcon(check.result)} ${priorityTag(check.priority)} ${BOLD}${check.id}${RESET} ${check.name}`);
3545
- log(` ${DIM}${check.message}${RESET}`);
3546
- if (check.evidence) {
3547
- for (const e of check.evidence.slice(0, 3)) {
3548
- log(` ${DIM}\u2192 ${e}${RESET}`);
3549
- }
3550
- if (check.evidence.length > 3) {
3551
- log(` ${DIM} ... and ${check.evidence.length - 3} more${RESET}`);
3552
- }
3553
- }
3554
- log("");
3555
- }
3556
- }
3557
- if (driftPassed.length > 0) {
3558
- for (const check of driftPassed) {
3559
- log(` ${resultIcon(check.result)} ${DIM}${check.id}${RESET} ${check.name}`);
3560
- }
3561
- log("");
3562
- }
3563
- if (driftUnknown.length > 0) {
3564
- for (const check of driftUnknown) {
3565
- log(` ${resultIcon(check.result)} ${DIM}${check.id}${RESET} ${check.name} \u2014 ${DIM}${check.message}${RESET}`);
3566
- }
3567
- log("");
3568
- }
3569
- }
3570
- log(`${DIM}${"\u2500".repeat(60)}${RESET}`);
3571
- const totalFailed = failedChecks.length;
3572
- const totalPassed = passedChecks.length;
3573
- const safetyTotal = safetyChecks.length;
3574
- const driftTotal = driftChecks.length;
3575
- log("");
3576
- log(` ${BOLD}Safety:${RESET} ${totalPassed}/${safetyTotal} passed, ${totalFailed} failed, ${unknownChecks.length} unknown`);
3577
- if (driftTotal > 0) {
3578
- log(` ${BOLD}Drift:${RESET} ${driftPassed.length}/${driftTotal} passed, ${driftFailed.length} failed, ${driftUnknown.length} unknown`);
3579
- }
3580
- log("");
3581
- if (totalFailed > 0) {
3582
- log(` ${YELLOW}Fix the ${RED}${BOLD}${totalFailed} failed${RESET}${YELLOW} check${totalFailed > 1 ? "s" : ""} before going to production.${RESET}`);
3583
- log(` ${DIM}Learn more: https://asastandard.org/checks${RESET}`);
3584
- } else {
3585
- log(` ${GREEN}${BOLD}All checks passed.${RESET}`);
3586
- }
3587
- log("");
3588
- }
3589
- function printJson(report) {
3590
- console.log(JSON.stringify(report, null, 2));
3591
- }
3592
-
3593
- // src/commands/guard-init.ts
3594
- import fs2 from "fs";
3595
- import path3 from "path";
3596
-
3597
- // src/templates/check-structure.ts
3598
- var CHECK_STRUCTURE_SCRIPT = [
3599
- "#!/bin/bash",
3600
- "# ASA Structure Check \u2014 validates Slice Architecture rules",
3601
- "# Run: npx @vibecodiq/cli guard check (or ./check-structure.sh)",
3602
- "",
3603
- "set -e",
3604
- "",
3605
- 'RULES_FILE=".asa/rules/architecture.md"',
3606
- "ERRORS=0",
3607
- "WARNINGS=0",
3608
- "",
3609
- 'echo "\u{1F50D} ASA Structure Check \u2014 validating architecture rules..."',
3610
- 'echo " Rules: $RULES_FILE"',
3611
- 'echo ""',
3612
- "",
3613
- "fail() {",
3614
- ' echo "\u274C FAIL: $1"',
3615
- ' echo " \u2192 $2"',
3616
- ' echo " \u2192 Fix: see $RULES_FILE"',
3617
- ' echo ""',
3618
- " ERRORS=$((ERRORS + 1))",
3619
- "}",
3620
- "",
3621
- "warn() {",
3622
- ' echo "\u26A0\uFE0F WARN: $1"',
3623
- ' echo " \u2192 $2"',
3624
- ' echo ""',
3625
- " WARNINGS=$((WARNINGS + 1))",
3626
- "}",
3627
- "",
3628
- "pass() {",
3629
- ' echo "\u2705 PASS: $1"',
3630
- "}",
3631
- "",
3632
- "# CHECK 1: Business logic in src/domains/",
3633
- 'echo "--- Check 1: Business logic in src/domains/ ---"',
3634
- "",
3635
- 'BIZ_IN_PAGES=$(grep -rl "supabase\\.\\(from\\|auth\\|rpc\\|storage\\)" src/pages/ 2>/dev/null || true)',
3636
- 'if [ -n "$BIZ_IN_PAGES" ]; then',
3637
- " for f in $BIZ_IN_PAGES; do",
3638
- ' fail "Business logic in page file: $f" \\',
3639
- ' "Supabase calls must be in src/domains/, not in pages."',
3640
- " done",
3641
- "else",
3642
- ' pass "No business logic found in src/pages/"',
3643
- "fi",
3644
- "",
3645
- 'if [ -d "src/components" ]; then',
3646
- ' BIZ_IN_COMPONENTS=$(grep -rl "supabase\\.\\(from\\|auth\\|rpc\\|storage\\)" src/components/ 2>/dev/null || true)',
3647
- ' if [ -n "$BIZ_IN_COMPONENTS" ]; then',
3648
- " for f in $BIZ_IN_COMPONENTS; do",
3649
- ' fail "Business logic in component file: $f" \\',
3650
- ' "Supabase calls must be in src/domains/, not in components/."',
3651
- " done",
3652
- " else",
3653
- ' pass "No business logic found in src/components/"',
3654
- " fi",
3655
- "fi",
3656
- "",
3657
- "# CHECK 2: src/domains/ directory exists",
3658
- 'echo ""',
3659
- 'echo "--- Check 2: src/domains/ directory exists ---"',
3660
- "",
3661
- 'TS_FILES=$(find src/ -name "*.ts" -o -name "*.tsx" | grep -v "node_modules" | grep -v "vite-env" | wc -l)',
3662
- "",
3663
- 'if [ "$TS_FILES" -gt 5 ]; then',
3664
- ' if [ -d "src/domains" ]; then',
3665
- ' pass "src/domains/ directory exists"',
3666
- " DOMAIN_COUNT=$(find src/domains -mindepth 1 -maxdepth 1 -type d | wc -l)",
3667
- ' if [ "$DOMAIN_COUNT" -gt 0 ]; then',
3668
- ' pass "Found $DOMAIN_COUNT domain(s) in src/domains/"',
3669
- " else",
3670
- ' warn "src/domains/ exists but is empty" \\',
3671
- ' "Add domain folders like src/domains/auth/, src/domains/tasks/"',
3672
- " fi",
3673
- " else",
3674
- ' fail "src/domains/ directory missing" \\',
3675
- ' "Business logic must be in src/domains/<domain>/<slice>/."',
3676
- " fi",
3677
- "else",
3678
- ' pass "Project is small ($TS_FILES files) \u2014 src/domains/ not yet required"',
3679
- "fi",
3680
- "",
3681
- "# CHECK 3: No cross-domain imports",
3682
- 'echo ""',
3683
- 'echo "--- Check 3: No cross-domain imports ---"',
3684
- "",
3685
- 'if [ -d "src/domains" ]; then',
3686
- " CROSS_DOMAIN_VIOLATIONS=0",
3687
- " for domain_dir in src/domains/*/; do",
3688
- ' [ ! -d "$domain_dir" ] && continue',
3689
- ' domain_name=$(basename "$domain_dir")',
3690
- ' OTHER_DOMAINS=$(find src/domains -mindepth 1 -maxdepth 1 -type d ! -name "$domain_name" -exec basename {} \\;)',
3691
- " for other in $OTHER_DOMAINS; do",
3692
- ' VIOLATIONS=$(grep -rn "from.*domains/$other\\|import.*domains/$other" "$domain_dir" 2>/dev/null || true)',
3693
- ' if [ -n "$VIOLATIONS" ]; then',
3694
- " while IFS= read -r line; do",
3695
- ' fail "Cross-domain import in $domain_name \u2192 $other" "$line"',
3696
- ' done <<< "$VIOLATIONS"',
3697
- " CROSS_DOMAIN_VIOLATIONS=$((CROSS_DOMAIN_VIOLATIONS + 1))",
3698
- " fi",
3699
- " done",
3700
- " done",
3701
- ' [ "$CROSS_DOMAIN_VIOLATIONS" -eq 0 ] && pass "No cross-domain imports detected"',
3702
- "else",
3703
- ' pass "No domains yet \u2014 cross-domain check skipped"',
3704
- "fi",
3705
- "",
3706
- "# CHECK 4: Pages are thin wrappers",
3707
- 'echo ""',
3708
- 'echo "--- Check 4: Pages are thin wrappers ---"',
3709
- "",
3710
- 'if [ -d "src/pages" ]; then',
3711
- " for page in src/pages/*.tsx; do",
3712
- ' [ ! -f "$page" ] && continue',
3713
- ' PAGE_LINES=$(wc -l < "$page")',
3714
- ' if [ "$PAGE_LINES" -gt 80 ]; then',
3715
- ' fail "Page too large: $page ($PAGE_LINES lines)" \\',
3716
- ' "Pages should be <80 lines. Move logic to src/domains/."',
3717
- " fi",
3718
- " done",
3719
- ' pass "Page thin wrapper check complete"',
3720
- "fi",
3721
- "",
3722
- "# CHECK 5: shared/ has no business logic",
3723
- 'echo ""',
3724
- 'echo "--- Check 5: shared/ has no business logic ---"',
3725
- "",
3726
- 'if [ -d "src/shared" ]; then',
3727
- ' BIZ_IN_SHARED=$(grep -rln "TaskList\\|TaskForm\\|PricingCard\\|AdminUser\\|LoginForm\\|RegisterForm" src/shared/ 2>/dev/null || true)',
3728
- ' if [ -n "$BIZ_IN_SHARED" ]; then',
3729
- " for f in $BIZ_IN_SHARED; do",
3730
- ' fail "Business component in shared/: $f" \\',
3731
- ' "Domain-specific components belong in src/domains/."',
3732
- " done",
3733
- " else",
3734
- ' pass "No business logic found in src/shared/"',
3735
- " fi",
3736
- "else",
3737
- ' pass "src/shared/ not yet created \u2014 check skipped"',
3738
- "fi",
3739
- "",
3740
- "# RESULTS",
3741
- 'echo ""',
3742
- 'echo "=========================================="',
3743
- 'if [ "$ERRORS" -gt 0 ]; then',
3744
- ' echo "\u274C ASA STRUCTURE CHECK FAILED"',
3745
- ' echo " $ERRORS error(s), $WARNINGS warning(s)"',
3746
- ' echo " Read the architecture rules: $RULES_FILE"',
3747
- ' echo "=========================================="',
3748
- " exit 1",
3749
- "else",
3750
- ' echo "\u2705 ASA STRUCTURE CHECK PASSED"',
3751
- ' echo " 0 errors, $WARNINGS warning(s)"',
3752
- ' echo "=========================================="',
3753
- " exit 0",
3754
- "fi",
3755
- ""
3756
- ].join("\n");
3757
-
3758
- // src/templates/index.ts
3759
- var ARCHITECTURE_RULES = [
3760
- "# ASA Architecture Rules",
3761
- "",
3762
- "This project follows the ASA Slice Architecture. All code in this repository",
3763
- "MUST follow these rules. CI checks will fail if rules are violated.",
3764
- "",
3765
- "---",
3766
- "",
3767
- "## Rule 1: Business logic goes in `src/domains/`",
3768
- "",
3769
- "All business logic MUST be organized in domain folders:",
3770
- "",
3771
- "```",
3772
- "src/domains/",
3773
- "\u251C\u2500\u2500 auth/ # Authentication and authorization",
3774
- "\u251C\u2500\u2500 billing/ # Payments and subscriptions",
3775
- "\u2514\u2500\u2500 admin/ # Admin panel and user management",
3776
- "```",
3777
- "",
3778
- "Each domain contains **slices** \u2014 self-contained features:",
3779
- "",
3780
- "```",
3781
- "src/domains/<domain>/<slice>/",
3782
- "\u251C\u2500\u2500 <Component>.tsx # React component (UI)",
3783
- "\u251C\u2500\u2500 use<Action>.ts # React hook (data fetching / mutations)",
3784
- "\u2514\u2500\u2500 types.ts # Types (optional)",
3785
- "```",
3786
- "",
3787
- "### What is a Slice?",
3788
- "",
3789
- "One slice = one user action. Examples:",
3790
- "",
3791
- "| Domain | Slice | What it does |",
3792
- "|--------|-------|-------------|",
3793
- "| `auth` | `login` | User logs in |",
3794
- "| `auth` | `register` | User creates account |",
3795
- "| `billing` | `subscribe` | User subscribes to a plan |",
3796
- "| `billing` | `check-limits` | Check if user hit plan limits |",
3797
- "| `admin` | `user-list` | Admin sees all users |",
3798
- "",
3799
- "---",
3800
- "",
3801
- "## Rule 2: Pages are thin wrappers",
3802
- "",
3803
- "Page files MUST be thin wrappers that import from `src/domains/`.",
3804
- "Pages contain NO business logic \u2014 only layout and composition.",
3805
- "Maximum 80 lines per page file.",
3806
- "",
3807
- "---",
3808
- "",
3809
- "## Rule 3: Shared infrastructure goes in `src/shared/`",
3810
- "",
3811
- "Database clients, auth helpers, and external service configs go in `src/shared/`:",
3812
- "",
3813
- "```",
3814
- "src/shared/",
3815
- "\u251C\u2500\u2500 db/",
3816
- "\u2502 \u2514\u2500\u2500 supabase-client.ts",
3817
- "\u251C\u2500\u2500 auth/",
3818
- "\u2502 \u251C\u2500\u2500 AuthGuard.tsx",
3819
- "\u2502 \u2514\u2500\u2500 useCurrentUser.ts",
3820
- "\u2514\u2500\u2500 billing/",
3821
- " \u2514\u2500\u2500 stripe-client.ts",
3822
- "```",
3823
- "",
3824
- "---",
3825
- "",
3826
- "## Rule 4: No cross-domain imports",
3827
- "",
3828
- "A file in one domain MUST NOT import from another domain.",
3829
- "Use `src/shared/` for cross-domain communication.",
3830
- "",
3831
- "---",
3832
- "",
3833
- "## Rule 5: File naming conventions",
3834
- "",
3835
- "| Type | Pattern | Example |",
3836
- "|------|---------|---------|",
3837
- "| Component | `PascalCase.tsx` | `LoginForm.tsx` |",
3838
- "| Hook | `use<Action>.ts` | `useLogin.ts` |",
3839
- "| Types | `types.ts` | `types.ts` |",
3840
- "| Page | `page.tsx` | `page.tsx` |",
3841
- "| Shared utility | `camelCase.ts` | `supabase-client.ts` |",
3842
- "",
3843
- "---",
3844
- "",
3845
- "**If CI fails, restructure your code to follow these rules.**",
3846
- ""
3847
- ].join("\n");
3848
- var ASA_GUARD_WORKFLOW = [
3849
- "name: ASA Structure Check",
3850
- "",
3851
- "on:",
3852
- " push:",
3853
- " branches: [main]",
3854
- " paths-ignore:",
3855
- " - '.asa/logs/**'",
3856
- " pull_request:",
3857
- " branches: [main]",
3858
- "",
3859
- "permissions:",
3860
- " contents: write",
3861
- "",
3862
- "jobs:",
3863
- " asa-structure-check:",
3864
- " name: ASA Architecture Validation",
3865
- " runs-on: ubuntu-latest",
3866
- "",
3867
- " steps:",
3868
- " - name: Checkout code",
3869
- " uses: actions/checkout@v4",
3870
- " with:",
3871
- " fetch-depth: 0",
3872
- "",
3873
- " - name: Run ASA structure check",
3874
- " id: check",
3875
- " run: |",
3876
- " chmod +x check-structure.sh",
3877
- " ./check-structure.sh 2>&1 | tee /tmp/check_output.txt || true",
3878
- ' if grep -q "ASA STRUCTURE CHECK FAILED" /tmp/check_output.txt; then',
3879
- ' echo "check_failed=true" >> "$GITHUB_OUTPUT"',
3880
- " else",
3881
- ' echo "check_failed=false" >> "$GITHUB_OUTPUT"',
3882
- " fi",
3883
- "",
3884
- " - name: Fail if architecture check failed",
3885
- " if: steps.check.outputs.check_failed == 'true'",
3886
- " run: |",
3887
- ' echo ""',
3888
- ' echo "\u274C ASA Architecture Check FAILED."',
3889
- ' echo " See output above for details."',
3890
- ' echo " Fix violations and push again."',
3891
- " exit 1",
3892
- ""
3893
- ].join("\n");
3894
-
3895
- // src/commands/guard-init.ts
3896
- var CYAN2 = "\x1B[36m";
3897
- var GREEN2 = "\x1B[32m";
3898
- var YELLOW2 = "\x1B[33m";
3899
- var BOLD2 = "\x1B[1m";
3900
- var DIM2 = "\x1B[2m";
3901
- var RESET2 = "\x1B[0m";
3902
- async function guardInit(targetDir, isPro) {
3903
- const root = path3.resolve(targetDir);
3904
- if (isPro) {
3905
- console.log("");
3906
- console.log(` ${YELLOW2}\u26A1 guard init --pro requires a Vibecodiq Pro license.${RESET2}`);
3907
- console.log(` ${DIM2} Get yours at: https://vibecodiq.com/guard${RESET2}`);
3908
- console.log(` ${DIM2} Run: npx @vibecodiq/cli login${RESET2}`);
3909
- console.log("");
3910
- process.exit(1);
3911
- }
3912
- console.log("");
3913
- console.log(` ${CYAN2}\u{1F6E1}\uFE0F Initializing ASA Guard in ${root}${RESET2}`);
3914
- console.log("");
3915
- const files = [
3916
- {
3917
- path: ".asa/rules/architecture.md",
3918
- content: ARCHITECTURE_RULES
3919
- },
3920
- {
3921
- path: ".github/workflows/asa-guard.yml",
3922
- content: ASA_GUARD_WORKFLOW
3923
- },
3924
- {
3925
- path: "check-structure.sh",
3926
- content: CHECK_STRUCTURE_SCRIPT,
3927
- executable: true
3928
- }
3929
- ];
3930
- let created = 0;
3931
- let skipped = 0;
3932
- for (const file of files) {
3933
- const fullPath = path3.join(root, file.path);
3934
- const dir = path3.dirname(fullPath);
3935
- if (!fs2.existsSync(dir)) {
3936
- fs2.mkdirSync(dir, { recursive: true });
3937
- }
3938
- if (fs2.existsSync(fullPath)) {
3939
- console.log(` ${YELLOW2}\u23ED ${file.path}${RESET2} ${DIM2}(already exists)${RESET2}`);
3940
- skipped++;
3941
- } else {
3942
- fs2.writeFileSync(fullPath, file.content, "utf-8");
3943
- if (file.executable) {
3944
- fs2.chmodSync(fullPath, 493);
3945
- }
3946
- console.log(` ${GREEN2}\u2713 ${file.path}${RESET2}`);
3947
- created++;
3948
- }
3949
- }
3950
- console.log("");
3951
- console.log(` ${BOLD2}Done:${RESET2} ${created} file${created !== 1 ? "s" : ""} created, ${skipped} skipped.`);
3952
- console.log("");
3953
- console.log(` ${BOLD2}Next steps:${RESET2}`);
3954
- console.log(` ${DIM2}1.${RESET2} Commit the new files: ${DIM2}git add -A && git commit -m "chore: add ASA Guard"${RESET2}`);
3955
- console.log(` ${DIM2}2.${RESET2} Push to GitHub \u2014 CI will run architecture checks on every PR`);
3956
- console.log(` ${DIM2}3.${RESET2} Run locally: ${DIM2}npx @vibecodiq/cli guard check${RESET2}`);
3957
- console.log("");
3958
- console.log(` ${DIM2}Learn more: https://asastandard.org/checks/methodology${RESET2}`);
3959
- console.log("");
3960
- }
3961
-
3962
- // src/commands/guard-check.ts
3963
- import fs3 from "fs";
3964
- import path4 from "path";
3965
- var CYAN3 = "\x1B[36m";
3966
- var GREEN3 = "\x1B[32m";
3967
- var YELLOW3 = "\x1B[33m";
3968
- var RED2 = "\x1B[31m";
3969
- var BOLD3 = "\x1B[1m";
3970
- var DIM3 = "\x1B[2m";
3971
- var RESET3 = "\x1B[0m";
3972
- async function guardCheck(targetDir, isPro) {
3973
- const root = path4.resolve(targetDir);
3974
- if (isPro) {
3975
- console.log("");
3976
- console.log(` ${YELLOW3}\u26A1 guard check --pro requires a Vibecodiq Pro license.${RESET3}`);
3977
- console.log(` ${DIM3} Get yours at: https://vibecodiq.com/guard${RESET3}`);
3978
- console.log(` ${DIM3} Run: npx @vibecodiq/cli login${RESET3}`);
3979
- console.log("");
3980
- process.exit(1);
3981
- }
3982
- console.log("");
3983
- console.log(` ${CYAN3}\u{1F6E1}\uFE0F ASA Guard \u2014 Architecture Check${RESET3}`);
3984
- console.log(` ${DIM3} Target: ${root}${RESET3}`);
3985
- console.log("");
3986
- const results = [];
3987
- const srcDir = findSrcDir(root);
3988
- results.push(await checkBusinessLogicInDomains(root, srcDir));
3989
- results.push(await checkDomainsExist(root, srcDir));
3990
- results.push(await checkNoCrossDomainImports(root, srcDir));
3991
- results.push(await checkThinPages(root, srcDir));
3992
- results.push(await checkSharedNoBusinessLogic(root, srcDir));
3993
- let errors = 0;
3994
- let warnings = 0;
3995
- let passed = 0;
3996
- for (const r of results) {
3997
- const icon = r.status === "PASS" ? `${GREEN3}\u2705${RESET3}` : r.status === "FAIL" ? `${RED2}\u274C${RESET3}` : r.status === "WARN" ? `${YELLOW3}\u26A0\uFE0F${RESET3}` : `${DIM3}\u23ED${RESET3}`;
3998
- console.log(` ${icon} ${r.name}`);
3999
- if (r.status !== "PASS" && r.status !== "SKIP") {
4000
- console.log(` ${DIM3}${r.message}${RESET3}`);
4001
- if (r.evidence) {
4002
- for (const e of r.evidence.slice(0, 3)) {
4003
- console.log(` ${DIM3}\u2192 ${e}${RESET3}`);
4004
- }
4005
- }
4006
- }
4007
- if (r.status === "FAIL") errors++;
4008
- else if (r.status === "WARN") warnings++;
4009
- else if (r.status === "PASS") passed++;
4010
- }
4011
- console.log("");
4012
- if (errors > 0) {
4013
- console.log(` ${RED2}${BOLD3}\u274C ASA GUARD CHECK FAILED${RESET3} \u2014 ${errors} error${errors > 1 ? "s" : ""}, ${warnings} warning${warnings > 1 ? "s" : ""}`);
4014
- console.log(` ${DIM3} Fix violations and run again.${RESET3}`);
4015
- console.log(` ${DIM3} Rules: .asa/rules/architecture.md${RESET3}`);
4016
- } else {
4017
- console.log(` ${GREEN3}${BOLD3}\u2705 ASA GUARD CHECK PASSED${RESET3} \u2014 ${passed} passed, ${warnings} warning${warnings > 1 ? "s" : ""}`);
4018
- }
4019
- console.log("");
4020
- process.exit(errors > 0 ? 1 : 0);
4021
- }
4022
- function findSrcDir(root) {
4023
- if (fs3.existsSync(path4.join(root, "src"))) return "src";
4024
- if (fs3.existsSync(path4.join(root, "app"))) return "app";
4025
- return null;
4026
- }
4027
- function getAllFiles(dir, ext) {
4028
- const results = [];
4029
- if (!fs3.existsSync(dir)) return results;
4030
- const entries = fs3.readdirSync(dir, { withFileTypes: true });
4031
- for (const entry of entries) {
4032
- const full = path4.join(dir, entry.name);
4033
- if (entry.name === "node_modules" || entry.name === ".git") continue;
4034
- if (entry.isDirectory()) {
4035
- results.push(...getAllFiles(full, ext));
4036
- } else if (ext.some((e) => entry.name.endsWith(e))) {
4037
- results.push(full);
4038
- }
4039
- }
4040
- return results;
4041
- }
4042
- async function checkBusinessLogicInDomains(root, srcDir) {
4043
- const name = "Business logic in domains/";
4044
- if (!srcDir) return { name, status: "SKIP", message: "No src/ directory found" };
4045
- const pagesDir = path4.join(root, srcDir, "pages");
4046
- const componentsDir = path4.join(root, srcDir, "components");
4047
- const violations = [];
4048
- const bizPattern = /supabase\.(from|auth|rpc|storage)\b/;
4049
- for (const dir of [pagesDir, componentsDir]) {
4050
- if (!fs3.existsSync(dir)) continue;
4051
- const files = getAllFiles(dir, [".ts", ".tsx", ".js", ".jsx"]);
4052
- for (const file of files) {
4053
- const content = fs3.readFileSync(file, "utf-8");
4054
- if (bizPattern.test(content)) {
4055
- violations.push(path4.relative(root, file));
4056
- }
4057
- }
4058
- }
4059
- if (violations.length > 0) {
4060
- return {
4061
- name,
4062
- status: "FAIL",
4063
- message: `Supabase calls found in pages/components (${violations.length} file${violations.length > 1 ? "s" : ""})`,
4064
- evidence: violations
4065
- };
4066
- }
4067
- return { name, status: "PASS", message: "" };
4068
- }
4069
- async function checkDomainsExist(root, srcDir) {
4070
- const name = "domains/ directory exists";
4071
- if (!srcDir) return { name, status: "SKIP", message: "No src/ directory found" };
4072
- const domainsDir = path4.join(root, srcDir, "domains");
4073
- const tsFiles = getAllFiles(path4.join(root, srcDir), [".ts", ".tsx"]);
4074
- if (tsFiles.length <= 5) {
4075
- return { name, status: "PASS", message: "Project is small \u2014 domains/ not yet required" };
4076
- }
4077
- if (!fs3.existsSync(domainsDir)) {
4078
- return {
4079
- name,
4080
- status: "FAIL",
4081
- message: `${tsFiles.length} files in ${srcDir}/ but no domains/ directory`,
4082
- evidence: ["Create src/domains/<domain>/<slice>/ for business logic"]
4083
- };
4084
- }
4085
- const domains = fs3.readdirSync(domainsDir, { withFileTypes: true }).filter((d) => d.isDirectory());
4086
- if (domains.length === 0) {
4087
- return { name, status: "WARN", message: "domains/ exists but is empty" };
4088
- }
4089
- return { name, status: "PASS", message: `${domains.length} domain${domains.length > 1 ? "s" : ""} found` };
4090
- }
4091
- async function checkNoCrossDomainImports(root, srcDir) {
4092
- const name = "No cross-domain imports";
4093
- if (!srcDir) return { name, status: "SKIP", message: "No src/ directory found" };
4094
- const domainsDir = path4.join(root, srcDir, "domains");
4095
- if (!fs3.existsSync(domainsDir)) {
4096
- return { name, status: "PASS", message: "No domains yet" };
4097
- }
4098
- const domains = fs3.readdirSync(domainsDir, { withFileTypes: true }).filter((d) => d.isDirectory()).map((d) => d.name);
4099
- const violations = [];
4100
- for (const domain of domains) {
4101
- const domainPath = path4.join(domainsDir, domain);
4102
- const files = getAllFiles(domainPath, [".ts", ".tsx", ".js", ".jsx"]);
4103
- const otherDomains = domains.filter((d) => d !== domain);
4104
- for (const file of files) {
4105
- const content = fs3.readFileSync(file, "utf-8");
4106
- for (const other of otherDomains) {
4107
- const importPattern = new RegExp(`from.*domains/${other}|import.*domains/${other}`);
4108
- if (importPattern.test(content)) {
4109
- violations.push(`${path4.relative(root, file)} \u2192 imports from ${other}/`);
4110
- }
4111
- }
4112
- }
4113
- }
4114
- if (violations.length > 0) {
4115
- return {
4116
- name,
4117
- status: "FAIL",
4118
- message: `${violations.length} cross-domain import${violations.length > 1 ? "s" : ""} found`,
4119
- evidence: violations
4120
- };
4121
- }
4122
- return { name, status: "PASS", message: "" };
4123
- }
4124
- async function checkThinPages(root, srcDir) {
4125
- const name = "Pages are thin wrappers";
4126
- if (!srcDir) return { name, status: "SKIP", message: "No src/ directory found" };
4127
- const pagesDir = path4.join(root, srcDir, "pages");
4128
- if (!fs3.existsSync(pagesDir)) {
4129
- return { name, status: "PASS", message: "No pages/ directory (Next.js App Router or no pages)" };
4130
- }
4131
- const files = getAllFiles(pagesDir, [".tsx", ".jsx"]);
4132
- const fat = [];
4133
- for (const file of files) {
4134
- const content = fs3.readFileSync(file, "utf-8");
4135
- const lines = content.split("\n").length;
4136
- if (lines > 80) {
4137
- fat.push(`${path4.relative(root, file)} (${lines} lines)`);
4138
- }
4139
- }
4140
- if (fat.length > 0) {
4141
- return {
4142
- name,
4143
- status: "FAIL",
4144
- message: `${fat.length} page${fat.length > 1 ? "s" : ""} over 80 lines`,
4145
- evidence: fat
4146
- };
4147
- }
4148
- return { name, status: "PASS", message: "" };
4149
- }
4150
- async function checkSharedNoBusinessLogic(root, srcDir) {
4151
- const name = "shared/ has no business logic";
4152
- if (!srcDir) return { name, status: "SKIP", message: "No src/ directory found" };
4153
- const sharedDir = path4.join(root, srcDir, "shared");
4154
- if (!fs3.existsSync(sharedDir)) {
4155
- return { name, status: "PASS", message: "No shared/ directory yet" };
4156
- }
4157
- const bizNames = /TaskList|TaskForm|PricingCard|AdminUser|LoginForm|RegisterForm/;
4158
- const files = getAllFiles(sharedDir, [".ts", ".tsx", ".js", ".jsx"]);
4159
- const violations = [];
4160
- for (const file of files) {
4161
- const content = fs3.readFileSync(file, "utf-8");
4162
- if (bizNames.test(content)) {
4163
- violations.push(path4.relative(root, file));
4164
- }
4165
- }
4166
- if (violations.length > 0) {
4167
- return {
4168
- name,
4169
- status: "FAIL",
4170
- message: `Business components found in shared/ (${violations.length} file${violations.length > 1 ? "s" : ""})`,
4171
- evidence: violations
4172
- };
4173
- }
4174
- return { name, status: "PASS", message: "" };
4175
- }
4176
-
4177
- // src/commands/guard-upgrade.ts
4178
- import fs4 from "fs";
4179
- import path5 from "path";
4180
- var GREEN4 = "\x1B[32m";
4181
- var YELLOW4 = "\x1B[33m";
4182
- var CYAN4 = "\x1B[36m";
4183
- var BOLD4 = "\x1B[1m";
4184
- var DIM4 = "\x1B[2m";
4185
- var RESET4 = "\x1B[0m";
4186
- async function guardUpgrade(targetDir) {
4187
- const root = path5.resolve(targetDir);
4188
- console.log("");
4189
- console.log(` ${CYAN4}\u{1F504} Upgrading ASA Guard rules...${RESET4}`);
4190
- console.log("");
4191
- const files = [
4192
- { path: ".asa/rules/architecture.md", content: ARCHITECTURE_RULES },
4193
- { path: ".github/workflows/asa-guard.yml", content: ASA_GUARD_WORKFLOW },
4194
- { path: "check-structure.sh", content: CHECK_STRUCTURE_SCRIPT, executable: true }
4195
- ];
4196
- let updated = 0;
4197
- for (const file of files) {
4198
- const fullPath = path5.join(root, file.path);
4199
- if (!fs4.existsSync(fullPath)) {
4200
- console.log(` ${YELLOW4}\u23ED ${file.path}${RESET4} ${DIM4}(not found \u2014 run guard init first)${RESET4}`);
4201
- continue;
4202
- }
4203
- const existing = fs4.readFileSync(fullPath, "utf-8");
4204
- if (existing === file.content) {
4205
- console.log(` ${DIM4}\u2713 ${file.path} (already up to date)${RESET4}`);
4206
- continue;
4207
- }
4208
- fs4.writeFileSync(fullPath, file.content, "utf-8");
4209
- if (file.executable) fs4.chmodSync(fullPath, 493);
4210
- console.log(` ${GREEN4}\u2713 ${file.path}${RESET4} ${BOLD4}updated${RESET4}`);
4211
- updated++;
4212
- }
4213
- console.log("");
4214
- if (updated > 0) {
4215
- console.log(` ${BOLD4}${updated} file${updated > 1 ? "s" : ""} updated.${RESET4} Commit the changes.`);
4216
- } else {
4217
- console.log(` ${DIM4}All rules are already up to date.${RESET4}`);
4218
- }
4219
- console.log("");
4220
- }
4221
-
4222
- // src/index.ts
4223
- var VERSION = "0.2.0";
4224
- var BOLD5 = "\x1B[1m";
4225
- var DIM5 = "\x1B[2m";
4226
- var CYAN5 = "\x1B[36m";
4227
- var YELLOW5 = "\x1B[33m";
4228
- var RESET5 = "\x1B[0m";
4229
- var args = process.argv.slice(2);
4230
- var command = args[0];
4231
- var subcommand = args[1];
4232
- function getPath(args2) {
4233
- for (const a of args2.slice(1)) {
4234
- if (!a.startsWith("-")) return a;
4235
- }
4236
- return ".";
4237
- }
4238
- function hasFlag(flag) {
4239
- return args.includes(flag);
4240
- }
4241
- function proStub(feature) {
4242
- console.log("");
4243
- console.log(` ${YELLOW5}\u26A1 ${feature} requires a Vibecodiq Pro license.${RESET5}`);
4244
- console.log("");
4245
- console.log(` ${DIM5} Get yours at: https://vibecodiq.com/pricing${RESET5}`);
4246
- console.log(` ${DIM5} Then run: npx @vibecodiq/cli login${RESET5}`);
4247
- console.log("");
4248
- process.exit(1);
4249
- }
4250
- if (command === "scan") {
4251
- if (hasFlag("--fix")) {
4252
- proStub("scan --fix (AI fix prompts)");
4253
- }
4254
- if (hasFlag("--full")) {
4255
- proStub("scan --full (full audit mode)");
4256
- }
4257
- const targetDir = getPath(args);
4258
- const jsonOutput = hasFlag("--json");
4259
- if (!jsonOutput) {
4260
- console.log("");
4261
- console.log(` ${CYAN5}\u23F3 Scanning project...${RESET5}`);
4262
- }
4263
- const report = await runScan(targetDir);
4264
- if (jsonOutput) {
4265
- printJson(report);
4266
- } else {
4267
- printReport(report);
4268
- }
4269
- const hasFails = report.checks.some((c) => c.result === "FAIL");
4270
- process.exit(hasFails ? 1 : 0);
4271
- } else if (command === "guard") {
4272
- if (subcommand === "init") {
4273
- const targetDir = getPath(args.slice(1));
4274
- const isPro = hasFlag("--pro");
4275
- await guardInit(targetDir, isPro);
4276
- } else if (subcommand === "check") {
4277
- const targetDir = getPath(args.slice(1));
4278
- const isPro = hasFlag("--pro");
4279
- await guardCheck(targetDir, isPro);
4280
- } else if (subcommand === "upgrade") {
4281
- const targetDir = getPath(args.slice(1));
4282
- await guardUpgrade(targetDir);
4283
- } else {
4284
- console.log("");
4285
- console.log(` ${BOLD5}@vibecodiq/cli guard${RESET5} \u2014 Architecture rules & CI enforcement`);
4286
- console.log("");
4287
- console.log(` ${BOLD5}Commands:${RESET5}`);
4288
- console.log(` guard init Install ASA rules + CI into your repo`);
4289
- console.log(` guard init --pro + connect to Vibecodiq API ${DIM5}(Pro)${RESET5}`);
4290
- console.log(` guard check Run 5 architecture checks locally`);
4291
- console.log(` guard check --pro Run full checks via remote API ${DIM5}(Pro)${RESET5}`);
4292
- console.log(` guard upgrade Upgrade rules to latest version`);
4293
- console.log("");
4294
- console.log(` ${DIM5}Learn more: https://vibecodiq.com/guard${RESET5}`);
4295
- console.log("");
4296
- }
4297
- } else if (command === "login") {
4298
- proStub("login (Pro authentication)");
4299
- } else if (command === "logout") {
4300
- proStub("logout (Pro authentication)");
4301
- } else if (command === "--version" || command === "-v") {
4302
- console.log(`@vibecodiq/cli v${VERSION}`);
4303
- } else {
4304
- console.log("");
4305
- console.log(` ${BOLD5}@vibecodiq/cli${RESET5} v${VERSION} \u2014 Production safety for AI-built apps`);
4306
- console.log("");
4307
- console.log(` ${BOLD5}Scan${RESET5} ${DIM5}(check what you already have)${RESET5}`);
4308
- console.log(` scan [path] Run 71 safety checks ${DIM5}(Free)${RESET5}`);
4309
- console.log(` scan --json JSON output for CI pipelines`);
4310
- console.log(` scan --fix AI fix prompts for failures ${DIM5}(Pro)${RESET5}`);
4311
- console.log(` scan --full Full audit with Trust Score ${DIM5}(Pro)${RESET5}`);
4312
- console.log("");
4313
- console.log(` ${BOLD5}Guard${RESET5} ${DIM5}(build safely from day one)${RESET5}`);
4314
- console.log(` guard init Install ASA rules + CI ${DIM5}(Free)${RESET5}`);
4315
- console.log(` guard check Run 5 architecture checks ${DIM5}(Free)${RESET5}`);
4316
- console.log(` guard check --pro Full architecture + safety ${DIM5}(Pro)${RESET5}`);
4317
- console.log(` guard upgrade Upgrade rules to latest`);
4318
- console.log("");
4319
- console.log(` ${BOLD5}Account${RESET5}`);
4320
- console.log(` login Authenticate for Pro features`);
4321
- console.log(` logout Sign out`);
4322
- console.log(` --version Show version`);
4323
- console.log("");
4324
- console.log(` ${DIM5}Docs: https://asastandard.org/checks${RESET5}`);
4325
- console.log(` ${DIM5}Pricing: https://vibecodiq.com/pricing${RESET5}`);
4326
- console.log("");
4327
- }
2
+ import{createHash as Hs}from"crypto";import Si from"path";var pe={id:"ARCH-01",name:"Business logic in domains/",module:"architecture",layer:"L1",priority:"P0",description:"Supabase/DB calls in pages or components indicate business logic outside domains/",fixCost:250,fixSize:"L",async run(e){let t=/supabase\.(from|auth|rpc|storage)\b|\.from\(\s*['"`]/,n=[],i=e.files.filter(s=>(s.includes("/pages/")||s.includes("/app/"))&&!s.includes("/api/")&&!s.includes("route.")&&!s.includes("layout.")&&!s.includes("globals.")&&(s.endsWith(".ts")||s.endsWith(".tsx")||s.endsWith(".js")||s.endsWith(".jsx"))),r=e.files.filter(s=>s.includes("/components/")&&(s.endsWith(".ts")||s.endsWith(".tsx")||s.endsWith(".js")||s.endsWith(".jsx")));for(let s of[...i,...r]){let o;try{o=await e.readFile(s)}catch{continue}t.test(o)&&n.push(s)}return n.length>0?{result:"FAIL",message:`Supabase/DB calls found in pages/components (${n.length} file${n.length>1?"s":""})`,evidence:n.slice(0,5)}:{result:"PASS",message:"No business logic detected in pages/components"}}};var fe={id:"ARCH-02",name:"domains/ directory exists",module:"architecture",layer:"L1",priority:"P0",description:"Projects with >5 source files need a domains/ directory for organized business logic",fixCost:250,fixSize:"L",async run(e){let t=e.files.filter(r=>(r.endsWith(".ts")||r.endsWith(".tsx")||r.endsWith(".js")||r.endsWith(".jsx"))&&!r.includes("node_modules"));if(t.length<=5)return{result:"PASS",message:"Project is small \u2014 domains/ not yet required"};if(!e.files.some(r=>r.includes("domains/")||r.includes("src/domains/")))return{result:"FAIL",message:`${t.length} source files but no domains/ directory`,evidence:["Create domains/<domain>/<slice>/ for business logic"]};let i=new Set;for(let r of e.files){let s=r.match(/(?:src\/)?domains\/([^/]+)\//);s&&i.add(s[1])}return i.size===0?{result:"FAIL",message:"domains/ exists but is empty"}:{result:"PASS",message:`${i.size} domain${i.size>1?"s":""} found`}}};var me={id:"ARCH-03",name:"No cross-domain imports",module:"architecture",layer:"L1",priority:"P1",description:"Files in one domain must not import from another domain \u2014 use shared/ for cross-domain communication",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(r=>r.includes("domains/"));if(t.length===0)return{result:"PASS",message:"No domains/ directory yet"};let n=new Set;for(let r of t){let s=r.match(/(?:src\/)?domains\/([^/]+)\//);s&&n.add(s[1])}let i=[];for(let r of t){if(!r.endsWith(".ts")&&!r.endsWith(".tsx")&&!r.endsWith(".js")&&!r.endsWith(".jsx"))continue;let s=r.match(/(?:src\/)?domains\/([^/]+)\//)?.[1];if(!s)continue;let o;try{o=await e.readFile(r)}catch{continue}for(let a of n){if(a===s)continue;new RegExp(`from.*domains/${a}|import.*domains/${a}`).test(o)&&i.push(`${r} \u2192 imports from ${a}/`)}}return i.length>0?{result:"FAIL",message:`${i.length} cross-domain import${i.length>1?"s":""} found`,evidence:i.slice(0,5)}:{result:"PASS",message:"No cross-domain imports detected"}}};var ge={id:"ARCH-04",name:"Pages are thin wrappers",module:"architecture",layer:"L1",priority:"P1",description:"Page files should be thin wrappers (<80 lines) that compose domain components",fixCost:250,fixSize:"L",async run(e){let t=e.files.filter(i=>(i.includes("/pages/")||i.match(/app\/.*\/page\.(ts|tsx|js|jsx)$/))&&(i.endsWith(".tsx")||i.endsWith(".jsx")));if(t.length===0)return{result:"PASS",message:"No page files found"};let n=[];for(let i of t){let r;try{r=await e.readFile(i)}catch{continue}let s=r.split(`
3
+ `).length;s>80&&n.push(`${i} (${s} lines)`)}return n.length>0?{result:"FAIL",message:`${n.length} page${n.length>1?"s":""} over 80 lines \u2014 extract logic to domains/`,evidence:n.slice(0,5)}:{result:"PASS",message:`All ${t.length} pages are thin wrappers`}}};var he={id:"ARCH-05",name:"shared/ has no business logic",module:"architecture",layer:"L1",priority:"P1",description:"shared/ should contain only infrastructure (DB clients, auth helpers) \u2014 not domain-specific components",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>(s.includes("shared/")||s.includes("src/shared/"))&&(s.endsWith(".ts")||s.endsWith(".tsx")||s.endsWith(".js")||s.endsWith(".jsx")));if(t.length===0)return{result:"PASS",message:"No shared/ directory yet"};let n=/supabase\.(from|rpc)\(|\.insert\(|\.update\(|\.delete\(|\.select\(/,i=/TaskList|TaskForm|PricingCard|AdminUser|LoginForm|RegisterForm|InvoiceTable|OrderList|ProductCard/,r=[];for(let s of t){let o;try{o=await e.readFile(s)}catch{continue}i.test(o)?r.push(`${s} \u2014 business component name detected`):n.test(o)&&!s.includes("/db/")&&!s.includes("/auth/")&&!s.includes("/billing/")&&r.push(`${s} \u2014 direct DB operations outside infrastructure`)}return r.length>0?{result:"FAIL",message:`Business logic found in shared/ (${r.length} file${r.length>1?"s":""})`,evidence:r.slice(0,5)}:{result:"PASS",message:`shared/ contains only infrastructure (${t.length} files)`}}};async function Se(e,t,n){let i=[],r=e.files;n&&n.length>0&&(r=r.filter(s=>n.some(o=>ye(s,o))));for(let s of r){let o;try{o=await e.readFile(s)}catch{continue}let a=o.split(`
4
+ `);for(let c=0;c<a.length;c++)t.test(a[c])&&i.push({file:s,line:c+1,content:a[c].trim()})}return i}function ye(e,t){if(t.startsWith("!"))return!ye(e,t.slice(1));if(t.startsWith("**/")){let n=t.slice(3);return e.includes(n)}if(t.endsWith("/**")){let n=t.slice(0,-3);return e.startsWith(n)}return t.includes("*")?new RegExp("^"+t.replace(/\*/g,".*")+"$").test(e):e===t||e.startsWith(t+"/")}function v(e){return e.startsWith("src/")||e.startsWith("components/")||e.startsWith("app/")||e.startsWith("pages/")||e.includes("/components/")||e.includes("/hooks/")||e.includes("/contexts/")}function w(e){return e.includes("/api/")||e.includes("route.ts")||e.includes("route.js")||e.includes("server/")||e.includes("actions.ts")||e.includes("actions.js")}function b(e){return e.includes("migration")||e.includes("supabase/")||e.endsWith(".sql")}function E(e){return(e.split("/").pop()||"").startsWith(".env")}var Ae={id:"AUTH-01",name:"service_role key not in client code",module:"auth",layer:"L2",priority:"P0",aliases:["ADM-17"],description:"service_role bypasses all RLS \u2014 must never appear in client-side code or NEXT_PUBLIC_ vars",fixCost:100,fixSize:"M",async run(e){let t=/service_role|SERVICE_ROLE|SUPABASE_SERVICE_ROLE|sb_secret_/i,n=await e.grepFiles(t),i=n.filter(c=>c.content.includes("NEXT_PUBLIC_")?!0:c.file.includes("use client")?!1:!!(v(c.file)&&!c.file.includes("/api/")&&!c.file.includes("route."))),r=n.filter(c=>c.content.includes("NEXT_PUBLIC_")&&/service_role|SERVICE_ROLE/i.test(c.content)),s=[...i,...r],o=[...new Map(s.map(c=>[`${c.file}:${c.line}`,c])).values()];return o.length>0?{result:"FAIL",message:`service_role key found in client-accessible code (${o.length} location${o.length>1?"s":""})`,evidence:o.map(c=>`${c.file}:${c.line} \u2192 ${c.content.substring(0,120)}`)}:n.filter(c=>!c.content.startsWith("//")&&!c.content.startsWith("#")&&!c.content.startsWith("*")).length>0?{result:"PASS",message:"service_role key found only in server-side code"}:{result:"UNKNOWN",message:"No service_role references found \u2014 Supabase may not be used"}}};var be={id:"AUTH-02",name:"RLS enabled on all tables",module:"auth",layer:"L2",priority:"P0",description:"Every table must have Row Level Security enabled. Without RLS, anon key = full DB access.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(b);if(t.length===0)return{result:"UNKNOWN",message:"No SQL migration files found"};let n=/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?(\w+)/gi,i=/ALTER\s+TABLE\s+(?:public\.)?(\w+)\s+ENABLE\s+ROW\s+LEVEL\s+SECURITY/gi,r=new Set(["schema_migrations","_prisma_migrations","migrations"]),s=new Set,o=new Set;for(let c of t){let l;try{l=await e.readFile(c)}catch{continue}let p,f=new RegExp(n.source,"gi");for(;(p=f.exec(l))!==null;){let A=p[1].toLowerCase();r.has(A)||s.add(A)}let S=new RegExp(i.source,"gi");for(;(p=S.exec(l))!==null;)o.add(p[1].toLowerCase())}if(s.size===0)return{result:"UNKNOWN",message:"No CREATE TABLE statements found in migrations"};let a=[...s].filter(c=>!o.has(c));return a.length>0?{result:"FAIL",message:`${a.length} table${a.length>1?"s":""} without RLS (of ${s.size} total)`,evidence:a.map(c=>`Table "${c}" \u2014 missing ENABLE ROW LEVEL SECURITY`)}:{result:"PASS",message:`All ${s.size} tables have RLS enabled`}}};var $e={id:"AUTH-03",name:"RLS policies have WITH CHECK",module:"auth",layer:"L2",priority:"P0",description:"INSERT/UPDATE policies need WITH CHECK clause. USING alone lets users insert data owned by others.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(b);if(t.length===0)return{result:"UNKNOWN",message:"No SQL migration files found"};let n=[],i=[];for(let a of t){let c;try{c=await e.readFile(a)}catch{continue}let l=/CREATE\s+POLICY\s+"?(\w+)"?\s+ON\s+(?:public\.)?(\w+)\s+(?:AS\s+\w+\s+)?FOR\s+(INSERT|UPDATE|ALL)\b([\s\S]*?)(?=CREATE\s+POLICY|$)/gi,p;for(;(p=l.exec(c))!==null;){let f=p[1],S=p[2],A=p[3].toUpperCase(),P=p[4];(A==="INSERT"||A==="UPDATE"||A==="ALL")&&(/WITH\s+CHECK\s*\(\s*true\s*\)/i.test(P)?n.push(`${a}: policy "${f}" on ${S} \u2014 WITH CHECK (true) is permissive`):/WITH\s+CHECK/i.test(P)||i.push(`${a}: policy "${f}" on ${S} (${A}) \u2014 missing WITH CHECK`))}}let r=[...n,...i];if(r.length>0)return{result:"FAIL",message:`${r.length} RLS policy issue${r.length>1?"s":""} found`,evidence:r.slice(0,5)};let s=t.some(a=>{try{let c=e.files.includes(a)?a:"";return/CREATE\s+POLICY/i.test(c)}catch{return!1}});return(await e.grepFiles(/CREATE\s+POLICY/i)).length===0?{result:"UNKNOWN",message:"No RLS policies found in migrations"}:{result:"PASS",message:"All INSERT/UPDATE RLS policies have proper WITH CHECK clauses"}}};var ve={id:"AUTH-04",name:"Server-side auth on protected routes",module:"auth",layer:"L2",priority:"P0",description:"Every protected API route/server action must verify auth server-side. Middleware-only auth is bypassable (CVE-2025-29927).",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>(s.includes("route.")||s.includes("actions."))&&w(s));if(t.length===0)return{result:"UNKNOWN",message:"No API route handlers found"};let n=/getUser|getSession|auth\(\)|verifyToken|requireAuth|requireAdmin|checkPermission|requireRole|currentUser|validateSession/i,i=/webhook|health|status|public|og|sitemap|robots|favicon|_next/i,r=[];for(let s of t){if(i.test(s))continue;let o;try{o=await e.readFile(s)}catch{continue}n.test(o)||r.push(s)}return r.length>0?{result:"FAIL",message:`${r.length} API route${r.length>1?"s":""} without server-side auth check`,evidence:r.slice(0,5).map(s=>`${s} \u2014 no auth verification detected`)}:{result:"PASS",message:`All ${t.length} API routes have server-side auth checks`}}};var Ce={id:"AUTH-05",name:"No secrets with NEXT_PUBLIC_ prefix",module:"auth",layer:"L2",priority:"P0",description:"NEXT_PUBLIC_ vars are bundled in the browser. Secrets must never use this prefix.",fixCost:100,fixSize:"M",async run(e){let t=[/NEXT_PUBLIC_.*SERVICE_ROLE/i,/NEXT_PUBLIC_.*SECRET/i,/NEXT_PUBLIC_.*DATABASE_URL/i,/NEXT_PUBLIC_.*DB_URL/i,/NEXT_PUBLIC_.*PRIVATE/i,/NEXT_PUBLIC_.*PASSWORD/i,/NEXT_PUBLIC_.*JWT_SECRET/i,/VITE_.*SERVICE_ROLE/i,/VITE_.*SECRET_KEY/i,/VITE_.*DATABASE_URL/i,/EXPO_PUBLIC_.*SERVICE_ROLE/i,/EXPO_PUBLIC_.*SECRET/i],n=e.files.filter(E),i=[];for(let r of n){let s;try{s=await e.readFile(r)}catch{continue}let o=s.split(`
5
+ `);for(let a=0;a<o.length;a++){let c=o[a].trim();if(!(c.startsWith("#")||!c)){for(let l of t)if(l.test(c)){i.push({file:r,line:a+1,content:c.split("=")[0]});break}}}}return i.length>0?{result:"FAIL",message:`Secret keys exposed via public prefix (${i.length} found)`,evidence:i.map(r=>`${r.file}:${r.line} \u2192 ${r.content}`)}:n.length===0?{result:"UNKNOWN",message:"No .env files found to check"}:{result:"PASS",message:"No secrets found with NEXT_PUBLIC_/VITE_/EXPO_PUBLIC_ prefix"}}};var _e={id:"AUTH-06",name:"Protected routes redirect unauthenticated users",module:"auth",layer:"L2",priority:"P1",description:"Middleware or route guard must redirect unauthenticated users to /login for protected pages.",fixCost:100,fixSize:"M",async run(e){let t=e.files.find(i=>/^middleware\.(ts|js)$/.test(i));if(!t)return(await e.grepFiles(/redirect.*login|redirect.*auth|redirect.*sign/i)).length>0?{result:"PASS",message:"Route guards with login redirect detected"}:{result:"FAIL",message:"No middleware.ts and no route guards found \u2014 unauthenticated users can access protected pages",evidence:["Missing: middleware.ts or per-page auth redirect"]};let n;try{n=await e.readFile(t)}catch{return{result:"UNKNOWN",message:"Could not read middleware file"}}return/redirect.*login|redirect.*auth|NextResponse.*redirect/i.test(n)?{result:"PASS",message:"Middleware redirects unauthenticated users to login"}:{result:"FAIL",message:"Middleware exists but no login redirect detected",evidence:[`${t} \u2014 no redirect to /login or /auth`]}}};var ke={id:"AUTH-07",name:"Session tokens in httpOnly cookies",module:"auth",layer:"L2",priority:"P2",description:"Tokens in localStorage are accessible via XSS. httpOnly cookies prevent JavaScript access to session tokens.",fixCost:100,fixSize:"M",async run(e){let t=/localStorage.*(?:token|session|jwt|access_token)|sessionStorage.*(?:token|session|jwt)/i,n=/httpOnly|cookie.*session|supabase.*ssr|@supabase\/ssr/i,i=await e.grepFiles(t),r=await e.grepFiles(n),s=i.filter(o=>!o.content.trimStart().startsWith("//"));return s.length>0&&r.length===0?{result:"FAIL",message:`Session tokens stored in localStorage (${s.length} location${s.length>1?"s":""})`,evidence:s.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}:r.length>0?{result:"PASS",message:"httpOnly cookies or @supabase/ssr detected for session management"}:{result:"UNKNOWN",message:"No explicit token storage pattern detected"}}};var Ie={id:"AUTH-08",name:"Password hashing (if custom auth)",module:"auth",layer:"L2",priority:"P0",description:"Passwords must be hashed with bcrypt/argon2/scrypt. N/A if using Supabase Auth (handles hashing internally).",fixCost:100,fixSize:"M",async run(e){let t=/supabase|@supabase|createBrowserClient|createServerClient/i;if((await e.grepFiles(t)).length>0)return{result:"N/A",message:"Supabase Auth handles password hashing internally (bcrypt/Argon2)"};let i=/bcrypt|argon2|scrypt|pbkdf2/i,r=/password.*=.*req\.body|password.*=.*body\.|plaintext|md5.*password|sha1.*password/i,s=await e.grepFiles(i),o=await e.grepFiles(r);return o.length>0&&s.length===0?{result:"FAIL",message:"Password handling without secure hashing detected",evidence:o.slice(0,3).map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:s.length>0?{result:"PASS",message:"Secure password hashing detected (bcrypt/argon2/scrypt)"}:{result:"UNKNOWN",message:"No custom auth password handling detected"}}};var Pe={id:"AUTH-09",name:"Rate limiting on auth endpoints",module:"auth",layer:"L2",priority:"P1",description:"Login/register endpoints without rate limiting are vulnerable to brute force attacks.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(o=>/auth|login|register|sign/i.test(o)),n=/rateLimit|rate.limit|throttle|limiter|too.many.requests|429|upstash/i;return t.length===0?{result:"UNKNOWN",message:"No auth endpoint files found"}:(await e.grepFiles(n,t)).length>0?{result:"PASS",message:"Rate limiting detected on auth endpoints"}:(await e.grepFiles(n)).length>0?{result:"PASS",message:"Rate limiting detected in codebase (verify it covers auth endpoints)"}:(await e.grepFiles(/supabase.*auth|@supabase/i)).length>0?{result:"PASS",message:"Supabase Auth has built-in rate limiting for auth endpoints"}:{result:"FAIL",message:"No rate limiting found on auth endpoints",evidence:["Missing: rate limiting middleware on login/register routes"]}}};var we={id:"AUTH-10",name:"Profile sync trigger with safe search_path",module:"auth",layer:"L2",priority:"P1",description:"handle_new_user() trigger must use SECURITY DEFINER with restricted search_path to prevent privilege escalation.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(b);if(t.length===0)return{result:"UNKNOWN",message:"No SQL migration files found"};let n=/handle_new_user|on_auth_user_created|AFTER\s+INSERT\s+ON\s+auth\.users/i,i=/SECURITY\s+DEFINER/i,r=/search_path|SET\s+search_path/i;if((await e.grepFiles(n)).length===0)return{result:"FAIL",message:"No profile sync trigger found (handle_new_user or equivalent)",evidence:["Missing trigger: new auth.users rows won't sync to profiles table"]};for(let o of t){let a;try{a=await e.readFile(o)}catch{continue}if(n.test(a)){let c=i.test(a),l=r.test(a);return c&&!l?{result:"FAIL",message:"Profile trigger uses SECURITY DEFINER without restricted search_path",evidence:[`${o} \u2014 SECURITY DEFINER without SET search_path = '' (privilege escalation risk)`]}:c&&l?{result:"PASS",message:"Profile sync trigger found with SECURITY DEFINER and restricted search_path"}:{result:"PASS",message:"Profile sync trigger found"}}}return{result:"PASS",message:"Profile sync trigger detected"}}};var Le={id:"AUTH-11",name:"Client/server auth separation",module:"auth",layer:"L2",priority:"P0",description:"Separate Supabase clients for browser and server. One shared client leaks service_role to browser or uses anon key on server.",fixCost:100,fixSize:"M",async run(e){let t=/createBrowserClient|createClient.*browser/i,n=/createServerClient|createClient.*server/i,i=/createClient\s*\(/,r=await e.grepFiles(t),s=await e.grepFiles(n),o=await e.grepFiles(i);if(r.length>0&&s.length>0)return{result:"PASS",message:"Separate browser and server Supabase clients detected"};if(r.length>0||s.length>0)return{result:"PASS",message:"Dedicated Supabase client pattern detected (@supabase/ssr)"};if(o.length>0){let a=o.filter(c=>/supabase|createClient/.test(c.content));if(a.length>0)return{result:"FAIL",message:"Single generic createClient() used \u2014 no client/server separation",evidence:a.slice(0,3).map(c=>`${c.file}:${c.line} \u2192 ${c.content.substring(0,120)}`)}}return{result:"UNKNOWN",message:"No Supabase client initialization found"}}};var Ne={id:"AUTH-12",name:"Auth environment variables present",module:"auth",layer:"L2",priority:"P2",description:"Missing auth env vars = hardcoded keys or broken auth in production.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(E);if(t.length===0)return{result:"UNKNOWN",message:"No .env files found"};let n=["SUPABASE_URL","SUPABASE_ANON_KEY"],i=[];for(let o of t){let a;try{a=await e.readFile(o)}catch{continue}for(let c of n)a.includes(c)&&!i.includes(c)&&i.push(c)}if((await e.grepFiles(/supabase/i)).length===0)return{result:"UNKNOWN",message:"No Supabase references found"};let s=n.filter(o=>!i.includes(o));return s.length>0?{result:"FAIL",message:`Missing auth env vars: ${s.join(", ")}`,evidence:s.map(o=>`${o} not found in any .env file`)}:{result:"PASS",message:"Auth environment variables configured"}}};var Re={id:"AUTH-13",name:"getUser() not getSession() for server-side auth",module:"auth",layer:"L2",priority:"P0",description:"getSession() reads JWT without server verification. Use getUser() for auth decisions.",fixCost:100,fixSize:"M",async run(e){let t=/\.auth\.getSession\s*\(/,n=/\.auth\.getUser\s*\(/,i=await e.grepFiles(t),r=await e.grepFiles(n);if(i.length===0&&r.length===0)return{result:"UNKNOWN",message:"No getSession/getUser calls found \u2014 Supabase Auth may not be used"};let s=i.filter(l=>w(l.file)),o=r.filter(l=>w(l.file));if(s.length>0&&o.length===0)return{result:"FAIL",message:`Server-side code uses getSession() without getUser() (${s.length} location${s.length>1?"s":""})`,evidence:s.map(l=>`${l.file}:${l.line} \u2192 ${l.content.substring(0,120)}`)};if(s.length>0&&o.length>0)return{result:"FAIL",message:"Server code uses both getSession() and getUser() \u2014 getSession() in server context is insecure",evidence:s.map(l=>`${l.file}:${l.line} \u2192 ${l.content.substring(0,120)}`)};let a=i.filter(l=>!w(l.file)&&!/supabase\/functions\//i.test(l.file)),c=r.filter(l=>!w(l.file)&&!/supabase\/functions\//i.test(l.file));return a.length>0&&c.length===0&&o.length===0?{result:"FAIL",message:`Client code uses getSession() (${a.length} location${a.length>1?"s":""}) but getUser() never called \u2014 auth relies on unverified JWT`,evidence:a.slice(0,3).map(l=>`${l.file}:${l.line} \u2192 ${l.content.substring(0,120)}`)}:o.length>0||c.length>0?{result:"PASS",message:"Auth uses getUser() for verification"}:{result:"PASS",message:"getUser() is used for auth verification"}}};var Ee={id:"AUTH-14",name:"No eval() or dangerouslySetInnerHTML with user data",module:"auth",layer:"L2",priority:"P0",description:"eval() and dangerouslySetInnerHTML enable XSS attacks \u2014 session theft and account takeover.",fixCost:100,fixSize:"M",async run(e){let t=/\beval\s*\(/,n=/dangerouslySetInnerHTML/,i=await e.grepFiles(t),r=await e.grepFiles(n),s=i.filter(c=>!(c.content.trimStart().startsWith("//")||c.content.trimStart().startsWith("*")||c.file.includes("node_modules"))),o=r.filter(c=>!(c.content.trimStart().startsWith("//")||c.file.includes("node_modules"))),a=[...s,...o];return a.length>0?{result:"FAIL",message:`Unsafe code patterns found (${s.length} eval, ${o.length} dangerouslySetInnerHTML)`,evidence:a.slice(0,5).map(c=>`${c.file}:${c.line} \u2192 ${c.content.substring(0,120)}`)}:{result:"PASS",message:"No eval() or dangerouslySetInnerHTML found"}}};var Fe={id:"AUTH-15",name:"CORS configuration",module:"auth",layer:"L2",priority:"P2",description:"Access-Control-Allow-Origin: * with credentials allows any website to read auth cookies and data.",fixCost:100,fixSize:"M",async run(e){let t=/Access-Control-Allow-Origin.*\*|cors.*origin.*\*|origin:\s*['"]?\*/i,n=/credentials.*true|allowCredentials|Access-Control-Allow-Credentials/i,i=await e.grepFiles(t),r=await e.grepFiles(n);return i.length>0&&r.length>0?{result:"FAIL",message:"CORS wildcard (*) used with credentials \u2014 any website can read auth data",evidence:i.slice(0,3).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`)}:i.length>0?{result:"FAIL",message:"CORS wildcard (*) detected \u2014 consider restricting to specific origins",evidence:i.slice(0,3).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`)}:{result:"PASS",message:"No dangerous CORS wildcard configuration detected"}}};var Te={id:"AUTH-16",name:"Token/session expiration configured",module:"auth",layer:"L2",priority:"P2",description:"Infinite sessions mean a stolen token works forever. Configure JWT expiration and session timeouts.",fixCost:100,fixSize:"M",async run(e){let t=/expiresIn|maxAge|session.*expir|jwt.*expir|SESSION_EXPIRY|JWT_EXPIRY|token.*expir/i;return(await e.grepFiles(t)).length>0?{result:"PASS",message:"Token/session expiration configured"}:(await e.grepFiles(/supabase/i)).length>0?{result:"PASS",message:"Supabase Auth has default JWT expiry (3600s)"}:{result:"FAIL",message:"No token/session expiration configuration found",evidence:["Missing: expiresIn, maxAge, or session expiry configuration"]}}};var xe={id:"AUTH-17",name:"Storage bucket RLS",module:"auth",layer:"L2",priority:"P0",description:"Supabase storage buckets must have RLS on storage.objects. Without it, all files are publicly accessible.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(b);if(t.length===0)return{result:"UNKNOWN",message:"No SQL migration files found"};let n=/storage\.buckets|create.*bucket|INSERT.*storage\.buckets/i,i=/storage\.objects.*ENABLE.*ROW.*LEVEL|RLS.*storage\.objects|POLICY.*storage\.objects/i,r=await e.grepFiles(n,t),s=await e.grepFiles(i,t);return r.length===0?(await e.grepFiles(/supabase.*storage|storage\.from\(/i)).length===0?{result:"N/A",message:"No Supabase storage usage detected"}:{result:"UNKNOWN",message:"Storage used in code but no bucket creation in migrations"}:s.length>0?{result:"PASS",message:"Storage bucket RLS policies detected"}:{result:"FAIL",message:"Storage buckets created without RLS on storage.objects \u2014 files may be publicly accessible",evidence:r.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}}};var De={id:"AUTH-18",name:"RBAC in app_metadata not user_metadata",module:"auth",layer:"L2",priority:"P0",description:"Roles in user_metadata are user-editable. Use app_metadata for RBAC (server-only).",fixCost:100,fixSize:"M",async run(e){let t=/user_meta_?data.*role|raw_user_meta_?data.*role|user\.user_metadata.*role/i,n=/app_meta_?data.*role|user\.app_metadata.*role/i,i=/updateUser\s*\(\s*\{[\s\S]*?data\s*:\s*\{[\s\S]*?role/,r=await e.grepFiles(t),s=await e.grepFiles(n),o=await e.grepFiles(i);return r.length>0?{result:"FAIL",message:`Role stored in user_metadata (user-editable) \u2014 ${r.length} location${r.length>1?"s":""}`,evidence:r.map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:o.length>0?{result:"FAIL",message:"Role set via updateUser() data field (user-editable user_metadata)",evidence:o.map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:s.length>0?{result:"PASS",message:"Role stored in app_metadata (server-only, not user-editable)"}:{result:"UNKNOWN",message:"No role/RBAC references found in codebase"}}};var Ue={id:"AUTH-19",name:"Multi-tenancy data isolation",module:"auth",layer:"L2",priority:"P1",description:"RLS policies must include tenant_id or organization_id to prevent cross-tenant data access.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(b),n=/tenant_id|organization_id|org_id|team_id|workspace_id/i,i=await e.grepFiles(n,t.length>0?t:void 0),r=await e.grepFiles(n);return i.length===0&&r.length===0?{result:"N/A",message:"No multi-tenancy pattern detected (single-tenant app)"}:(await e.grepFiles(/POLICY[\s\S]*?tenant_id|POLICY[\s\S]*?org_id|POLICY[\s\S]*?organization_id/i,t)).length>0?{result:"PASS",message:"RLS policies include tenant isolation"}:{result:"FAIL",message:"Multi-tenant schema detected but RLS policies don't include tenant_id filtering",evidence:["Tenant columns exist but RLS policies may allow cross-tenant data access"]}}};var Me={id:"AUTH-20",name:"OAuth domain restriction",module:"auth",layer:"L2",priority:"P1",description:"Social login (Google, GitHub) should restrict allowed email domains to prevent unauthorized access.",fixCost:100,fixSize:"M",async run(e){let t=/signInWithOAuth|signIn.*provider|google|github.*login|oauth/i,n=/allowedDomain|domain.*restrict|email.*domain|hd=|hosted_domain/i,i=await e.grepFiles(t);return i.length===0?{result:"N/A",message:"No OAuth/social login detected"}:(await e.grepFiles(n)).length>0?{result:"PASS",message:"OAuth domain restriction detected"}:{result:"FAIL",message:"OAuth login enabled without domain restriction \u2014 any Google/GitHub account can sign in",evidence:i.slice(0,3).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`)}}};var Oe={id:"AUTH-21",name:"Force dynamic on auth routes",module:"auth",layer:"L2",priority:"P1",description:"Auth routes must use force-dynamic to prevent ISR cache serving wrong user data.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>/auth|login|register|signup|sign-in|sign-up/i.test(s)&&(s.includes("page.")||s.includes("route."))&&(s.endsWith(".ts")||s.endsWith(".tsx")||s.endsWith(".js")||s.endsWith(".jsx")));if(t.length===0)return{result:"UNKNOWN",message:"No auth route/page files found"};let n=/export\s+const\s+dynamic\s*=\s*['"]force-dynamic['"]/,i=/unstable_noStore|noStore|revalidate\s*=\s*0/,r=[];for(let s of t){let o;try{o=await e.readFile(s)}catch{continue}!n.test(o)&&!i.test(o)&&s.includes("page.")&&r.push(s)}return r.length>0?{result:"FAIL",message:`${r.length} auth page${r.length>1?"s":""} without force-dynamic \u2014 may serve cached auth state`,evidence:r.slice(0,5).map(s=>`${s} \u2014 missing export const dynamic = 'force-dynamic'`)}:{result:"PASS",message:"Auth routes use force-dynamic or equivalent"}}};var We={id:"AUTH-22",name:"CSRF protection on Route Handlers",module:"auth",layer:"L2",priority:"P2",description:"Server Actions have built-in CSRF protection (Next.js 14+). Route Handlers do NOT \u2014 they need explicit CSRF tokens or SameSite cookies.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(o=>/route\.(ts|js)$/i.test(o));if(t.length===0)return{result:"UNKNOWN",message:"No Route Handler files found"};let n=[];for(let o of t){let a;try{a=await e.readFile(o)}catch{continue}/export\s+(?:async\s+)?function\s+(?:POST|PUT|PATCH|DELETE)/i.test(a)&&n.push(o)}if(n.length===0)return{result:"PASS",message:"No mutation Route Handlers found (only GET)"};let i=/csrf|csurf|csrfToken|SameSite|x-csrf|anti.?forgery/i;return(await e.grepFiles(i)).length>0?{result:"PASS",message:"CSRF protection detected"}:(await e.grepFiles(/["']use server["']/i)).length>0&&n.length<=2?{result:"PASS",message:"Mutations use Server Actions (built-in CSRF protection)"}:{result:"FAIL",message:`${n.length} mutation Route Handler${n.length>1?"s":""} without CSRF protection`,evidence:n.slice(0,3).map(o=>`${o} \u2014 POST/PUT/PATCH/DELETE without CSRF token`)}}};var je={id:"AUTH-23",name:"Email verification required",module:"auth",layer:"L2",priority:"P2",description:"Without email verification, anyone can sign up with any email \u2014 spam accounts and impersonation.",fixCost:100,fixSize:"M",async run(e){let t=/email_confirmed_at|emailConfirmed|verifyEmail|confirmEmail|email.*verif|verification.*email/i;return(await e.grepFiles(t)).length>0?{result:"PASS",message:"Email verification check detected"}:(await e.grepFiles(/supabase/i)).length>0?{result:"PASS",message:"Supabase Auth has configurable email verification (check dashboard settings)"}:{result:"FAIL",message:"No email verification enforcement found",evidence:["Missing: email_confirmed_at check or equivalent verification flow"]}}};var Be={id:"AUTH-24",name:"Account enumeration prevention",module:"auth",layer:"L2",priority:"P2",description:"Login/register error messages must not reveal whether an email exists. Consistent messages prevent user enumeration.",fixCost:100,fixSize:"M",async run(e){let t=/email.*not.*found|user.*not.*found|no.*account.*with|email.*already.*registered|email.*already.*exists|account.*already.*exists/i,i=(await e.grepFiles(t)).filter(r=>!r.content.trimStart().startsWith("//")&&!r.content.trimStart().startsWith("*"));return i.length>0?{result:"FAIL",message:`Account enumeration possible \u2014 error messages reveal email existence (${i.length} location${i.length>1?"s":""})`,evidence:i.slice(0,3).map(r=>`${r.file}:${r.line} \u2192 ${r.content.substring(0,120)}`)}:{result:"PASS",message:"No account enumeration patterns detected in error messages"}}};var He={id:"AUTH-25",name:"Refresh token reuse detection",module:"auth",layer:"L2",priority:"P2",description:"Token rotation prevents stolen refresh tokens from being reused. Supabase supports this via config.",fixCost:100,fixSize:"M",async run(e){let t=/token.*rotation|refresh.*token.*reuse|reuse.*detection|GOTRUE_SECURITY_REFRESH_TOKEN_REUSE_INTERVAL/i;return(await e.grepFiles(t)).length>0?{result:"PASS",message:"Refresh token rotation/reuse detection configured"}:(await e.grepFiles(/supabase/i)).length>0?{result:"PASS",message:"Supabase Auth has built-in refresh token rotation (check dashboard config)"}:{result:"UNKNOWN",message:"No refresh token configuration detected"}}};var ze={id:"AUTH-26",name:"Sign-out revokes server session",module:"auth",layer:"L2",priority:"P2",description:"Sign-out must revoke the server session (scope: 'global'), not just clear client cookies.",fixCost:100,fixSize:"M",async run(e){let t=/signOut|sign_out|logout|log_out/i,n=/scope.*global|global.*scope/i,i=await e.grepFiles(t);return i.length===0?{result:"UNKNOWN",message:"No sign-out implementation found"}:(await e.grepFiles(n)).length>0?{result:"PASS",message:"Sign-out uses global scope (revokes all sessions)"}:{result:"FAIL",message:"Sign-out found but no global scope \u2014 sessions may persist on other devices",evidence:i.slice(0,2).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`)}}};var qe={id:"AUTH-27",name:"Email link poisoning mitigation",module:"auth",layer:"L2",priority:"P2",description:"SITE_URL and REDIRECT_ALLOW_LIST must be configured to prevent email link redirect attacks.",fixCost:100,fixSize:"M",async run(e){let t=/SITE_URL|GOTRUE_SITE_URL/i,n=/REDIRECT_ALLOW_LIST|ADDITIONAL_REDIRECT_URLS|redirect.*allow/i,i=await e.grepFiles(t),r=await e.grepFiles(n);return i.length>0&&r.length>0?{result:"PASS",message:"SITE_URL and REDIRECT_ALLOW_LIST configured"}:i.length>0?{result:"PASS",message:"SITE_URL configured (check REDIRECT_ALLOW_LIST in Supabase dashboard)"}:(await e.grepFiles(/supabase/i)).length===0?{result:"N/A",message:"No Supabase usage detected"}:{result:"FAIL",message:"No SITE_URL or REDIRECT_ALLOW_LIST found \u2014 email magic links may redirect to attacker domains",evidence:["Missing: SITE_URL and REDIRECT_ALLOW_LIST in env configuration"]}}};var Ke={id:"AUTH-28",name:"Realtime presence authorization",module:"auth",layer:"L2",priority:"P2",description:"Supabase Realtime Presence channels must have authorization. Without it, any user can see who's online.",fixCost:100,fixSize:"M",async run(e){let t=/realtime|presence|channel.*subscribe|supabase.*channel/i,n=await e.grepFiles(t);if(n.length===0)return{result:"N/A",message:"No Supabase Realtime/Presence usage detected"};let i=/authorized|RLS.*realtime|realtime.*auth|channel.*auth/i;return(await e.grepFiles(i)).length>0?{result:"PASS",message:"Realtime channel authorization detected"}:{result:"FAIL",message:"Realtime/Presence channels found without authorization",evidence:n.slice(0,3).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`)}}};var Ge={id:"BIL-01",name:"Stripe secret key not in client code",module:"billing",layer:"L2",priority:"P0",description:"sk_live_/sk_test_ in client code = full Stripe API access for any visitor",fixCost:100,fixSize:"M",async run(e){let t=/sk_live_[a-zA-Z0-9]{20,}|sk_test_[a-zA-Z0-9]{20,}/,n=/STRIPE_SECRET|NEXT_PUBLIC_.*STRIPE.*SECRET|VITE_.*STRIPE.*SECRET/i,r=(await e.grepFiles(t)).filter(l=>!E(l.file)&&!l.content.trimStart().startsWith("#")&&!l.content.trimStart().startsWith("//"));if(r.length>0)return{result:"FAIL",message:`Hardcoded Stripe secret key found in source code (${r.length} location${r.length>1?"s":""})`,evidence:r.map(l=>`${l.file}:${l.line} \u2192 sk_***_[REDACTED]`)};let s=await e.grepFiles(n),o=s.filter(l=>/NEXT_PUBLIC_|VITE_|EXPO_PUBLIC_/i.test(l.content)&&/STRIPE.*SECRET/i.test(l.content));if(o.length>0)return{result:"FAIL",message:"Stripe secret key exposed via NEXT_PUBLIC_/VITE_ prefix",evidence:o.map(l=>`${l.file}:${l.line} \u2192 ${l.content.split("=")[0]}`)};let a=s.filter(l=>v(l.file)&&/STRIPE_SECRET/i.test(l.content)&&!l.file.includes("/api/")&&!l.file.includes("route."));return a.length>0?{result:"FAIL",message:"Stripe secret key referenced in client-side file",evidence:a.map(l=>`${l.file}:${l.line} \u2192 ${l.content.substring(0,120)}`)}:s.filter(l=>/STRIPE_SECRET/i.test(l.content)).length>0?{result:"PASS",message:"Stripe secret key found only in server-side/env files"}:{result:"UNKNOWN",message:"No Stripe secret key references found \u2014 Stripe may not be used"}}};var Ve={id:"BIL-02",name:"Webhook signature verification",module:"billing",layer:"L2",priority:"P0",aliases:["ADM-15"],description:"Stripe webhook handler must verify signature via constructEvent(). Without it, anyone can send fake webhooks.",fixCost:100,fixSize:"M",async run(e){let t=/webhook/i,n=e.files.filter(c=>t.test(c));if(n.length===0)return{result:"UNKNOWN",message:"No webhook handler files found"};let i=/constructEvent|constructEventAsync|webhooks\.construct/,r=/stripe-signature|Stripe-Signature|STRIPE_WEBHOOK_SECRET|webhook.*secret/i,s=await e.grepFiles(i,n),o=await e.grepFiles(r,n);if(s.length>0){for(let c of n){let l;try{l=await e.readFile(c)}catch{continue}let p=/constructEvent|constructEventAsync|webhooks\.construct/.test(l),f=/JSON\.parse\s*\(.*body/i.test(l),S=/if\s*\(\s*(?:webhook_?[Ss]ecret|STRIPE_WEBHOOK_SECRET|secret)/i.test(l)||/if\s*\(\s*!?\s*webhook_?[Ss]ecret/i.test(l);if(p&&f&&S)return{result:"FAIL",message:"Webhook signature verification is conditional \u2014 JSON.parse fallback bypasses constructEvent() when secret is missing",evidence:[`${c} \u2014 constructEvent() inside conditional, JSON.parse(body) fallback allows unverified webhooks`]}}return{result:"PASS",message:"Webhook handler uses constructEvent() for signature verification"}}return o.length>0?{result:"PASS",message:"Webhook handler references signature verification"}:(await e.grepFiles(/stripe|Stripe/,n)).length>0?{result:"FAIL",message:"Stripe webhook handler found WITHOUT signature verification",evidence:n.map(c=>`${c} \u2014 no constructEvent() or signature check`)}:{result:"UNKNOWN",message:"Webhook files found but no Stripe references \u2014 may not be Stripe webhooks"}}};var Ye={id:"BIL-03",name:"Raw body preservation in webhook",module:"billing",layer:"L2",priority:"P0",description:"Webhook signature verification requires raw request body. req.json() breaks it \u2014 use req.text() or bodyParser: false.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(a=>/webhook/i.test(a));if(t.length===0)return{result:"UNKNOWN",message:"No webhook handler files found"};let n=/request\.text\(\)|req\.text\(\)|bodyParser\s*:\s*false|getRawBody|raw\s*:\s*true|rawBody/,i=/request\.json\(\)|req\.body(?!\s*Parser)|JSON\.parse/,r=await e.grepFiles(n,t),s=await e.grepFiles(i,t);return r.length>0?{result:"PASS",message:"Webhook handler preserves raw body for signature verification"}:(await e.grepFiles(/stripe|constructEvent/i,t)).length===0?{result:"UNKNOWN",message:"Webhook files found but no Stripe references"}:s.length>0?{result:"FAIL",message:"Webhook handler uses req.json()/req.body instead of raw body \u2014 signature verification will fail",evidence:s.slice(0,3).map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:{result:"FAIL",message:"Stripe webhook handler found but no raw body preservation detected",evidence:t.map(a=>`${a} \u2014 missing request.text() or bodyParser: false`)}}};var Xe={id:"BIL-04",name:"Idempotent webhook processing",module:"billing",layer:"L2",priority:"P1",description:"Stripe retries webhooks by design. Without idempotency checks, duplicate credits and double subscriptions occur.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>/webhook/i.test(s));if(t.length===0)return{result:"UNKNOWN",message:"No webhook handler files found"};let n=/event\.id|event_id|idempoten|UNIQUE.*event|duplicate.*check|processed.*events|already.*processed/i,i=await e.grepFiles(n,t);return(await e.grepFiles(/stripe|constructEvent/i,t)).length===0?{result:"UNKNOWN",message:"No Stripe webhook handler found"}:i.length>0?{result:"PASS",message:"Webhook idempotency check detected"}:{result:"FAIL",message:"Stripe webhook handler has no idempotency check \u2014 duplicate processing possible",evidence:["No event.id tracking or duplicate check in webhook handler"]}}};var Qe={id:"BIL-05",name:"Subscription state machine",module:"billing",layer:"L2",priority:"P1",description:"Stripe has 8 subscription states. Handling only active/canceled causes access issues for past_due, trialing, incomplete, unpaid.",fixCost:100,fixSize:"M",async run(e){let t=/past_due|trialing|incomplete|unpaid/i,n=/active|canceled/i,i=await e.grepFiles(t),r=await e.grepFiles(n);return i.length>=2?{result:"PASS",message:"Multiple subscription states handled beyond active/canceled"}:r.length>0&&i.length===0?{result:"FAIL",message:"Only active/canceled states handled \u2014 missing past_due, trialing, incomplete, unpaid",evidence:["Subscription state machine is incomplete \u2014 users may lose access incorrectly"]}:(await e.grepFiles(/subscription|stripe/i)).length===0?{result:"UNKNOWN",message:"No subscription handling found"}:{result:"FAIL",message:"Subscription code found but no explicit state handling",evidence:["Missing: past_due, trialing, incomplete, unpaid state handling"]}}};var Je={id:"BIL-06",name:"Entitlement/plan limit checking",module:"billing",layer:"L2",priority:"P1",description:"Stripe doesn't track usage limits. Without app-side entitlement checks, free users access paid features.",fixCost:100,fixSize:"M",async run(e){let t=/checkPlan|checkEntitle|planLimit|featureGate|subscription.*check|plan.*limit|canAccess|hasFeature|isSubscribed|entitlement/i;return(await e.grepFiles(t)).length>0?{result:"PASS",message:"Entitlement/plan limit checking detected"}:(await e.grepFiles(/subscription|stripe.*plan|pricing/i)).length===0?{result:"UNKNOWN",message:"No subscription/pricing code found"}:{result:"FAIL",message:"Subscription code exists but no entitlement/plan limit checks found",evidence:["Missing: checkPlanLimit, checkEntitlement, featureGate, or equivalent"]}}};var Ze={id:"BIL-07",name:"Customer \u2194 User sync",module:"billing",layer:"L2",priority:"P1",description:"Every user must map to exactly one Stripe customer. Missing sync = orphaned customers, broken subscription lookups.",fixCost:100,fixSize:"M",async run(e){let t=/stripe_customer_id|stripeCustomerId|customer_id.*stripe/i,n=/customers\.create|createCustomer|stripe.*customer/i,i=e.files.filter(b),r=await e.grepFiles(t,i.length>0?i:void 0),s=await e.grepFiles(n);return r.length>0?{result:"PASS",message:"stripe_customer_id found in database schema \u2014 user-customer sync exists"}:s.length>0?{result:"PASS",message:"Stripe customer creation found in code"}:(await e.grepFiles(/stripe/i)).length>0?{result:"FAIL",message:"Stripe is used but no stripe_customer_id in schema and no customer creation logic",evidence:["Missing: stripe_customer_id column in users/profiles table"]}:{result:"UNKNOWN",message:"No Stripe references found"}}};var et={id:"BIL-08",name:"Webhook returns 200 for unknown events",module:"billing",layer:"L2",priority:"P1",description:"Returning 400/500 for unhandled events triggers Stripe retry loops and eventual endpoint deactivation.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(o=>/webhook/i.test(o));if(t.length===0)return{result:"UNKNOWN",message:"No webhook handler files found"};let n=/else\s*\{[\s\S]*?(?:return.*(?:4\d\d|5\d\d)|throw|NextResponse.*(?:4\d\d|5\d\d))/i,i=/default\s*:[\s\S]*?(?:200|ok|return\s+new\s+Response)/i,r=await e.grepFiles(n,t),s=await e.grepFiles(i,t);return r.length>0?{result:"FAIL",message:"Webhook handler returns error for unknown events \u2014 will trigger Stripe retry loop",evidence:r.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}:s.length>0?{result:"PASS",message:"Webhook handler returns 200 for unhandled events"}:{result:"UNKNOWN",message:"Could not determine webhook default response behavior"}}};var tt={id:"BIL-09",name:"No client-side billing state as source of truth",module:"billing",layer:"L2",priority:"P1",description:"Subscription state in localStorage/React state can be manipulated. Server must be source of truth.",fixCost:100,fixSize:"M",async run(e){let t=[],n=/localStorage.*(?:subscription|plan|billing)|sessionStorage.*(?:subscription|plan)|useState.*(?:subscription|isPro|isPaid|plan)/i,r=(await e.grepFiles(n)).filter(p=>v(p.file));for(let p of r)t.push(`${p.file}:${p.line} \u2192 ${p.content.substring(0,120)}`);let s=/\.from\s*\(\s*['"](?:subscriptions|plans|credits|billing)['"]\s*\)\s*\.\s*select/i,a=(await e.grepFiles(s)).filter(p=>v(p.file));for(let p of a)t.push(`${p.file}:${p.line} \u2192 ${p.content.substring(0,120)}`);let c=/(?:useSubscription|useBilling|usePlan)\b/,l=e.files.filter(p=>c.test(p)||/hook/i.test(p));for(let p of l){let f;try{f=await e.readFile(p)}catch{continue}let S=/from\s*\(\s*['"](?:subscriptions|plans|credits)['"]\s*\)/.test(f),A=/isPro|isPaid|plan\s*===|plan\s*!==|plan\s*==|plan\s*!=/.test(f);S&&A&&t.push(`${p} \u2192 Client hook reads billing table and derives plan/isPro locally`)}return t.length>0?{result:"FAIL",message:`Client-side billing state as source of truth (${t.length} location${t.length>1?"s":""})`,evidence:t.slice(0,5)}:{result:"PASS",message:"No client-side billing state as source of truth detected"}}};var it={id:"BIL-10",name:"Reconciliation mechanism",module:"billing",layer:"L2",priority:"P2",description:"Even good webhook handling drifts 1-2x/month. A reconciliation job comparing Stripe vs DB prevents silent revenue loss.",fixCost:100,fixSize:"M",async run(e){let t=/reconcil|sync.*stripe|stripe.*sync|cron.*billing|billing.*cron|verify.*subscription|subscription.*verify/i,n=e.files.filter(s=>/reconcil/i.test(s)),i=await e.grepFiles(t);return n.length>0||i.length>0?{result:"PASS",message:"Reconciliation mechanism detected"}:(await e.grepFiles(/stripe/i)).length===0?{result:"UNKNOWN",message:"No Stripe references found"}:{result:"FAIL",message:"No billing reconciliation mechanism found",evidence:["Missing: reconciliation script/job to compare Stripe state vs DB state"]}}};var st={id:"BIL-11",name:"Cancellation handling",module:"billing",layer:"L2",priority:"P1",description:"Explicit cancel flow: server-side cancellation + webhook processing + DB update + access revocation.",fixCost:100,fixSize:"M",async run(e){let t=/cancel.*subscription|subscription.*cancel|customer\.subscription\.deleted|cancelAt|cancel_at_period_end/i,n=await e.grepFiles(t);return n.length>=2?{result:"PASS",message:"Cancellation handling detected in multiple locations"}:n.length===1?{result:"PASS",message:"Cancellation handling detected"}:(await e.grepFiles(/subscription|stripe/i)).length===0?{result:"UNKNOWN",message:"No subscription code found"}:{result:"FAIL",message:"No cancellation handling found \u2014 users may retain access after canceling or be charged after canceling",evidence:["Missing: cancel subscription flow, subscription.deleted webhook handler"]}}};var nt={id:"BIL-12",name:"Stripe env vars configured",module:"billing",layer:"L2",priority:"P2",description:"Missing Stripe env vars = billing won't work in production or keys get hardcoded.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(E);if(t.length===0)return{result:"UNKNOWN",message:"No .env files found"};let n=["STRIPE_SECRET_KEY","STRIPE_WEBHOOK_SECRET","STRIPE_PUBLISHABLE_KEY"],i=[],r=[];for(let o of t){let a;try{a=await e.readFile(o)}catch{continue}for(let c of n)a.includes(c)&&!i.includes(c)&&i.push(c)}for(let o of n)i.includes(o)||r.push(o);return(await e.grepFiles(/stripe/i)).length===0?{result:"UNKNOWN",message:"No Stripe references found"}:r.length>0?{result:"FAIL",message:`Missing Stripe env vars: ${r.join(", ")}`,evidence:r.map(o=>`${o} not found in any .env file`)}:{result:"PASS",message:"All required Stripe env vars configured"}}};var rt={id:"BIL-13",name:"Error handling in payment flows",module:"billing",layer:"L2",priority:"P2",description:"Missing error handling around Stripe API calls = white screen on payment failure, no diagnostics.",fixCost:100,fixSize:"M",async run(e){let t=/stripe\.\w+\.\w+\(|checkout\.sessions\.create|subscriptions\.create|customers\.create/i,n=/try\s*\{|\.catch\s*\(|catch\s*\(/,i=e.files.filter(o=>/api|route|action|server/i.test(o)),r=await e.grepFiles(t,i.length>0?i:void 0);return r.length===0?{result:"UNKNOWN",message:"No Stripe API calls found"}:(await e.grepFiles(n,i.length>0?i:void 0)).length>0?{result:"PASS",message:"Error handling found in payment flow files"}:{result:"FAIL",message:"Stripe API calls found without error handling",evidence:r.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}}};var ot={id:"BIL-14",name:"Checkout flow is server-initiated",module:"billing",layer:"L2",priority:"P0",description:"Checkout session must be created on the server. Client-side creation requires secret key in browser.",fixCost:100,fixSize:"M",async run(e){let t=/checkout\.sessions\.create|createCheckoutSession/,n=await e.grepFiles(t);if(n.length===0)return{result:"UNKNOWN",message:"No Checkout session creation found"};let i=n.filter(s=>v(s.file)&&!w(s.file));for(let s of i){let o;try{o=await e.readFile(s.file)}catch{continue}if(/["']use client["']/.test(o))return{result:"FAIL",message:"Checkout session created in client component \u2014 secret key required in browser",evidence:[`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`]}}return n.filter(s=>w(s.file)).length>0?{result:"PASS",message:"Checkout session created server-side"}:{result:"PASS",message:"Checkout session creation found (not in client component)"}}};var at={id:"BIL-15",name:"Stripe Price ID tampering prevention",module:"billing",layer:"L2",priority:"P0",description:"Price ID from client request must be validated server-side against an allowlist. Client can send any price_id.",fixCost:100,fixSize:"M",async run(e){let t=/checkout\.sessions\.create|createCheckoutSession/;if((await e.grepFiles(t)).length===0)return{result:"UNKNOWN",message:"No Stripe Checkout session creation found"};let i=/req\.body.*price|req\.json.*price|request\.json.*price|body\.price|priceId|price_id/i,r=/allowedPrices|ALLOWED_PRICES|validPrices|PRICE_IDS|priceWhitelist|priceLookup|PLANS\[|plans\[|PRICES\[|prices\./i,s=await e.grepFiles(i),o=await e.grepFiles(r);return s.length>0&&o.length===0?{result:"FAIL",message:"Price ID accepted from client without server-side validation",evidence:s.slice(0,3).map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:o.length>0?{result:"PASS",message:"Price ID validation/allowlist detected"}:{result:"PASS",message:"Checkout session creation found with no client price input detected"}}};var ct={id:"BIL-16",name:"Never fulfill on success_url",module:"billing",layer:"L2",priority:"P0",description:"Fulfillment (DB writes, access grants) must happen via webhook, not on the success redirect page.",fixCost:100,fixSize:"M",async run(e){let t=[/success/i,/thank/i,/payment.*confirm/i],n=e.files.filter(s=>(s.includes("page.")||s.includes("index."))&&t.some(a=>a.test(s)));if(n.length===0)return{result:"UNKNOWN",message:"No success/thank-you pages found"};let i=/\.insert\(|\.update\(|\.upsert\(|createSubscription|grantAccess|activateUser|fulfillOrder|UPDATE.*SET|INSERT.*INTO/i,r=[];for(let s of n){let o;try{o=await e.readFile(s)}catch{continue}let a=o.split(`
6
+ `);for(let c=0;c<a.length;c++)i.test(a[c])&&r.push({file:s,line:c+1,content:a[c].trim()})}return r.length>0?{result:"FAIL",message:`Fulfillment logic found in success page (${r.length} location${r.length>1?"s":""}) \u2014 must use webhook instead`,evidence:r.map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`)}:{result:"PASS",message:"Success pages contain no fulfillment logic"}}};var lt={id:"BIL-17",name:"PCI raw card data safety",module:"billing",layer:"L2",priority:"P0",description:"Raw card numbers, CVV, expiration must never touch your server. Use Stripe Elements or Checkout.",fixCost:100,fixSize:"M",async run(e){let t=/card\s*\[\s*number\s*\]|cardNumber|card_number|cvv|cvc|expiry.*month|card.*expir/i,n=/<input[^>]*(?:card|cvv|cvc|expir)/i,i=/CardElement|PaymentElement|useStripe|useElements|@stripe\/react-stripe-js|stripe\.elements/i,r=await e.grepFiles(t),s=await e.grepFiles(n),o=[...r,...s].filter(c=>!(c.content.trimStart().startsWith("//")||c.content.trimStart().startsWith("*")||c.content.trimStart().startsWith("#")||c.file.includes("test")||c.file.includes("spec")||c.file.includes(".md"))),a=await e.grepFiles(i);return o.length>0&&a.length===0?{result:"FAIL",message:`Raw card data handling detected without Stripe Elements (${o.length} location${o.length>1?"s":""})`,evidence:o.slice(0,5).map(c=>`${c.file}:${c.line} \u2192 ${c.content.substring(0,120)}`)}:a.length>0?{result:"PASS",message:"Stripe Elements/Checkout detected \u2014 card data handled by Stripe"}:{result:"UNKNOWN",message:"No card data handling or Stripe Elements detected"}}};var ut={id:"BIL-18",name:"Refund/dispute handling",module:"billing",layer:"L2",priority:"P1",description:"Webhook must handle charge.refunded and charge.dispute.created to revoke access and update billing state.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(a=>/webhook/i.test(a));if(t.length===0)return{result:"UNKNOWN",message:"No webhook handler files found"};let n=/charge\.refunded|refund/i,i=/charge\.dispute|dispute/i,r=await e.grepFiles(n,t),s=await e.grepFiles(i,t);return r.length>0&&s.length>0?{result:"PASS",message:"Both refund and dispute handling detected in webhook"}:r.length>0||s.length>0?{result:"PASS",message:"Partial refund/dispute handling detected"}:(await e.grepFiles(/stripe/i,t)).length===0?{result:"UNKNOWN",message:"No Stripe webhook handler found"}:{result:"FAIL",message:"No refund or dispute handling in webhook \u2014 access may persist after refund",evidence:["Missing: charge.refunded and charge.dispute.created event handlers"]}}};var dt={id:"BIL-19",name:"Stripe API version pinning",module:"billing",layer:"L2",priority:"P2",description:"Pin Stripe API version to avoid breaking changes from automatic upgrades.",fixCost:100,fixSize:"M",async run(e){let t=/apiVersion|api_version/i;if((await e.grepFiles(t)).length>0)return{result:"PASS",message:"Stripe API version pinning detected"};let i=await e.grepFiles(/new\s+Stripe\s*\(|Stripe\s*\(/i);return i.length===0?{result:"UNKNOWN",message:"No Stripe initialization found"}:{result:"FAIL",message:"Stripe initialized without explicit API version \u2014 may break on Stripe upgrades",evidence:i.slice(0,2).map(r=>`${r.file}:${r.line} \u2192 ${r.content.substring(0,120)}`)}}};var pt={id:"BIL-20",name:"Portal session auth",module:"billing",layer:"L2",priority:"P1",description:"Billing portal session must use customer ID from authenticated user, not from client request.",fixCost:100,fixSize:"M",async run(e){let t=/billingPortal|billing_portal|customer_portal/i,n=await e.grepFiles(t);if(n.length===0)return{result:"UNKNOWN",message:"No billing portal usage found"};let i=/getUser|getSession|auth\(\)|requireAuth/i,r=[...new Set(n.map(o=>o.file))];return(await e.grepFiles(i,r)).length>0?{result:"PASS",message:"Billing portal session created with authenticated customer ID"}:{result:"FAIL",message:"Billing portal session created without verifying authenticated user",evidence:n.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}}};var ft={id:"BIL-21",name:"Webhook event coverage",module:"billing",layer:"L2",priority:"P1",description:"Webhook handler must process at minimum 3 core subscription events. Missing events = state drift.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>/webhook/i.test(s));if(t.length===0)return{result:"UNKNOWN",message:"No webhook handler files found"};let n=[{pattern:/checkout\.session\.completed/i,name:"checkout.session.completed"},{pattern:/customer\.subscription\.updated/i,name:"customer.subscription.updated"},{pattern:/customer\.subscription\.deleted/i,name:"customer.subscription.deleted"},{pattern:/invoice\.payment_succeeded/i,name:"invoice.payment_succeeded"},{pattern:/invoice\.payment_failed/i,name:"invoice.payment_failed"}],i=[],r=[];for(let s of n)(await e.grepFiles(s.pattern,t)).length>0?i.push(s.name):r.push(s.name);return i.length===0?(await e.grepFiles(/stripe/i,t)).length===0?{result:"UNKNOWN",message:"Webhook files found but no Stripe event handling"}:{result:"FAIL",message:"Stripe webhook handler found but no subscription events handled",evidence:["Missing: checkout.session.completed, subscription.updated, subscription.deleted"]}:i.length<3?{result:"FAIL",message:`Only ${i.length}/5 core webhook events handled`,evidence:[`Handled: ${i.join(", ")}`,`Missing: ${r.join(", ")}`]}:{result:"PASS",message:`${i.length}/5 core webhook events handled`}}};var mt={id:"BIL-22",name:"Trial period handling",module:"billing",layer:"L2",priority:"P2",description:"If trials are offered, handle trial_will_end webhook and trial-to-paid conversion properly.",fixCost:100,fixSize:"M",async run(e){let t=/trial_period_days|trial_end|trialing|trial_will_end|trialDays/i,n=await e.grepFiles(t);return n.length>=2?{result:"PASS",message:"Trial period handling detected"}:n.length===1?{result:"PASS",message:"Trial reference found (verify trial_will_end webhook is handled)"}:(await e.grepFiles(/subscription|stripe/i)).length===0?{result:"UNKNOWN",message:"No subscription code found"}:{result:"N/A",message:"No trial period usage detected"}}};var gt={id:"BIL-23",name:"Card testing protection",module:"billing",layer:"L2",priority:"P2",description:"Rate limiting on payment endpoints prevents card testing attacks (automated validation of stolen cards).",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>/checkout|payment|billing|subscribe/i.test(s));if(t.length===0)return{result:"UNKNOWN",message:"No payment endpoint files found"};let n=/rateLimit|rate.limit|throttle|limiter|too.many.requests|429|upstash/i;return(await e.grepFiles(n,t)).length>0?{result:"PASS",message:"Rate limiting detected on payment endpoints"}:(await e.grepFiles(n)).length>0?{result:"PASS",message:"Rate limiting detected in codebase (verify it covers payment endpoints)"}:{result:"FAIL",message:"No rate limiting found on payment endpoints \u2014 vulnerable to card testing",evidence:["Missing: rate limiting middleware on checkout/payment routes"]}}};var ht={id:"BIL-24",name:"Metered billing usage reporting",module:"billing",layer:"L2",priority:"P2",description:"If using metered pricing, usage must be reported to Stripe. Missing reporting = customers never billed for usage.",fixCost:100,fixSize:"M",async run(e){let t=/metered|usage_type|usageRecord|usage.*report|createUsageRecord|meter/i,n=await e.grepFiles(t);return n.length>=2?{result:"PASS",message:"Metered billing usage reporting detected"}:n.length===1?{result:"PASS",message:"Metered billing reference found"}:{result:"N/A",message:"No metered billing usage detected"}}};var St={id:"BIL-25",name:"Subscription table user-writable",module:"billing",layer:"L2",priority:"P0",description:"RLS policies on billing tables (subscriptions, plans, credits) must not allow authenticated users to UPDATE or INSERT plan/status columns. User-writable billing state enables subscription fraud (Free \u2192 Pro without payment).",fixCost:250,fixSize:"L",async run(e){let t=["subscriptions","plans","credits","billing"],n=/create\s+table\s+(?:public\.)?(subscriptions|plans|credits|billing)/i,i=e.files.filter(l=>/\.sql$/i.test(l)||/supabase.*migration/i.test(l)),r=await e.grepFiles(n,i.length>0?i:void 0);if(r.length===0){let l=await e.grepFiles(n);if(l.length===0)return{result:"PASS",message:"No billing tables (subscriptions/plans/credits) found"};r.push(...l)}let s=new Set(r.map(l=>{let p=l.content.match(/(?:public\.)?(subscriptions|plans|credits|billing)/i);return p?p[1].toLowerCase():null}).filter(Boolean)),o=[];for(let l of s)for(let p of i.length>0?i:e.files.filter(f=>/\.sql$/i.test(f))){let f;try{f=await e.readFile(p)}catch{continue}let S=new RegExp(`create\\s+policy[^;]*?on\\s+(?:public\\.)?${l}\\s+for\\s+(update|insert|all)`,"gis"),A;for(;(A=S.exec(f))!==null;){let P=Math.max(0,A.index),F=f.substring(P,f.indexOf(";",A.index)+1||P+500),d=/auth\.uid\(\)/i.test(F),y=/with\s+check\s*\([^)]*(?:false|service_role|is_admin)/i.test(F);if(d&&!y){let q=f.substring(0,A.index).split(`
7
+ `).length,Mi=A[1].toUpperCase();o.push(`${p}:${q} \u2192 RLS ${Mi} policy on ${l} allows user writes (auth.uid() = user_id)`)}}}if(o.length>0)return{result:"FAIL",message:`Billing table${s.size>1?"s":""} (${[...s].join(", ")}) allow user writes via RLS \u2014 subscription fraud risk`,evidence:o.slice(0,5)};let a=/\.from\s*\(\s*['"](?:subscriptions|plans|credits|billing)['"]\s*\)\s*\.\s*(?:update|insert|upsert)/i,c=await e.grepFiles(a);return c.length>0?{result:"FAIL",message:`Client-side writes to billing table detected (${c.length} location${c.length>1?"s":""})`,evidence:c.slice(0,3).map(l=>`${l.file}:${l.line} \u2192 ${l.content.substring(0,120)}`)}:{result:"PASS",message:"No user-writable billing tables detected"}}};var yt={id:"ADM-01",name:"Admin endpoints have server-side auth",module:"admin",layer:"L2",priority:"P0",description:"Every /admin or /api/admin endpoint must verify admin role server-side. Server Actions are public POST endpoints.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(r=>/api\/admin|app\/api\/admin/i.test(r)&&(r.endsWith(".ts")||r.endsWith(".js")));if(t.length===0)return{result:"UNKNOWN",message:"No admin API route handlers found"};let n=/requireAdmin|role.*admin|isAdmin|checkPermission|requireRole|app_metadata.*admin|admin.*guard/i,i=[];for(let r of t){let s;try{s=await e.readFile(r)}catch{continue}n.test(s)||i.push(r)}return i.length>0?{result:"FAIL",message:`${i.length} admin API route${i.length>1?"s":""} without admin role verification`,evidence:i.slice(0,5).map(r=>`${r} \u2014 no admin role check detected`)}:{result:"PASS",message:`All ${t.length} admin API routes have admin role verification`}}};var At={id:"ADM-02",name:"Admin routes not accessible without auth",module:"admin",layer:"L2",priority:"P0",description:"Admin pages and API must return 401/403 without valid admin token. Middleware alone is insufficient (CVE-2025-29927).",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(o=>/app\/admin\/.*page\.|pages\/admin\//i.test(o)||/(?:pages|views|routes)\/Admin[A-Z][^/]*\.(tsx|ts|jsx|js)$/.test(o)||/(?:pages|views|routes)\/admin-[^/]*\.(tsx|ts|jsx|js)$/.test(o));if(t.length===0)return{result:"UNKNOWN",message:"No admin pages found"};let n=/getUser|getSession|requireAuth|requireAdmin|redirect.*login|redirect.*auth|middleware|auth\(\)|checkPermission/i,i=[];for(let o of t){let a;try{a=await e.readFile(o)}catch{continue}n.test(a)||i.push(o)}let r=e.files.some(o=>/middleware\.(ts|js)$/i.test(o)),s=!1;if(r){let o=e.files.find(a=>/middleware\.(ts|js)$/i.test(a));if(o)try{let a=await e.readFile(o);s=/admin/i.test(a)}catch{}}return i.length>0&&!s?{result:"FAIL",message:`${i.length} admin page${i.length>1?"s":""} without auth guard (no middleware coverage either)`,evidence:i.slice(0,5).map(o=>`${o} \u2014 no auth guard detected`)}:i.length>0&&s?{result:"PASS",message:`Admin pages protected via middleware (${t.length} pages). Note: add per-route auth for defense-in-depth.`}:{result:"PASS",message:`All ${t.length} admin pages have auth guards`}}};var bt={id:"ADM-03",name:"No client-side-only role checks",module:"admin",layer:"L2",priority:"P1",description:"Role checks in JSX ({isAdmin && <Panel/>}) without server-side enforcement are bypassable via dev tools.",fixCost:100,fixSize:"M",async run(e){let t=/isAdmin|is_admin|role.*admin|admin.*role/i,n=/requireAdmin|requireRole|checkPermission|app_metadata.*admin/i,r=(await e.grepFiles(t)).filter(o=>v(o.file)&&!w(o.file));return r.length===0?{result:"PASS",message:"No client-side-only role checks detected"}:(await e.grepFiles(n)).length>0?{result:"PASS",message:"Client-side role checks backed by server-side enforcement"}:{result:"FAIL",message:`Client-side role checks found without server-side enforcement (${r.length} location${r.length>1?"s":""})`,evidence:r.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}}};var $t={id:"ADM-04",name:"Audit log for admin actions",module:"admin",layer:"L2",priority:"P1",description:"Admin operations (delete user, change role, modify data) must be logged. Required for SOC 2 compliance.",fixCost:100,fixSize:"M",async run(e){let t=/audit.*log|admin.*log|action.*log|createAuditEntry|logAdminAction|activity.*log/i,n=/audit_log|admin_log|activity_log/i,i=await e.grepFiles(t),r=e.files.filter(b),s=await e.grepFiles(n,r.length>0?r:void 0);return i.length>0||s.length>0?{result:"PASS",message:"Audit logging detected for admin actions"}:(await e.grepFiles(/admin/i)).length===0?{result:"UNKNOWN",message:"No admin functionality detected"}:{result:"FAIL",message:"No audit logging found for admin actions",evidence:["Missing: audit_log table or logAdminAction function"]}}};var vt={id:"ADM-05",name:"RBAC beyond binary admin",module:"admin",layer:"L2",priority:"P2",description:"Binary admin/user model means every admin can do everything. Granular roles limit blast radius.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(b),n=/RBAC|permission|capability|role.*check|roles.*table|user_roles/i,i=/isAdmin|is_admin|boolean.*admin/i,r=await e.grepFiles(n),s=await e.grepFiles(/role.*enum|roles.*table|permissions.*table/i,t.length>0?t:void 0);if(r.length>0||s.length>0)return{result:"PASS",message:"RBAC or granular permissions model detected"};let o=await e.grepFiles(i);return o.length>0?{result:"FAIL",message:"Binary admin model (isAdmin boolean) \u2014 no granular permissions",evidence:o.slice(0,3).map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:{result:"UNKNOWN",message:"No admin role model detected"}}};var Ct={id:"ADM-06",name:"Safe impersonation (if exists)",module:"admin",layer:"L2",priority:"P1",description:"If admin can 'login as user', the action must be logged with admin ID preserved. Replacing admin session = invisible abuse.",fixCost:100,fixSize:"M",async run(e){let t=/impersonat|loginAs|actAs|switchUser|act_as|login_as/i,n=await e.grepFiles(t);if(n.length===0)return{result:"N/A",message:"No impersonation functionality detected"};let i=/audit|log.*admin|admin.*log|logAction|track/i;return(await e.grepFiles(i)).length>0?{result:"PASS",message:"Impersonation exists with audit logging"}:{result:"FAIL",message:"Impersonation found without audit logging \u2014 admin actions will appear as user actions",evidence:n.slice(0,3).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,120)}`)}}};var _t={id:"ADM-07",name:"UUIDs not sequential IDs",module:"admin",layer:"L2",priority:"P1",description:"Sequential integer IDs enable enumeration attacks (IDOR). Use UUIDs for all user-facing primary keys.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(b);if(t.length===0)return{result:"UNKNOWN",message:"No SQL migration files found"};let n=/(?:SERIAL|BIGSERIAL|INTEGER\s+PRIMARY\s+KEY\s+(?:AUTO_INCREMENT|GENERATED))/i,i=/UUID\s+(?:PRIMARY\s+KEY\s+)?DEFAULT\s+(?:gen_random_uuid|uuid_generate)/i,r=await e.grepFiles(n,t),s=await e.grepFiles(i,t),o=r.filter(a=>!/migration|schema_migration|_prisma/i.test(a.content));return o.length>0&&s.length===0?{result:"FAIL",message:`Sequential IDs found without UUID usage (${o.length} table${o.length>1?"s":""})`,evidence:o.slice(0,5).map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:s.length>0?{result:"PASS",message:"UUID primary keys detected in schema"}:{result:"UNKNOWN",message:"No primary key definitions found in migrations"}}};var kt={id:"ADM-08",name:"No unprotected debug/admin routes",module:"admin",layer:"L2",priority:"P0",description:"Debug, internal, and admin routes must have auth guards. Hidden URLs are not security.",fixCost:100,fixSize:"M",async run(e){let t=[/app\/admin\//,/pages\/admin\//,/app\/internal\//,/app\/debug\//,/api\/admin\//,/api\/debug\//,/api\/internal\//,/api\/graphql/,/api\/seed/,/api\/reset/,/api\/test/,/supabase\/functions\/.*(?:seed|admin|debug|reset|test)/],n=e.files.filter(s=>t.some(o=>o.test(s)));if(n.length===0)return{result:"PASS",message:"No admin/debug/internal routes detected"};let i=/requireAdmin|requireAuth|getUser|getSession|verifyToken|isAdmin|checkPermission|requireRole|auth\(\)|middleware/i,r=[];for(let s of n){if(!s.endsWith(".ts")&&!s.endsWith(".tsx")&&!s.endsWith(".js")&&!s.endsWith(".jsx"))continue;let o;try{o=await e.readFile(s)}catch{continue}i.test(o)||r.push(s)}return r.length>0?{result:"FAIL",message:`${r.length} admin/debug route${r.length>1?"s":""} without auth check`,evidence:r.map(s=>`${s} \u2014 no auth guard detected`)}:{result:"PASS",message:`All ${n.length} admin/debug routes have auth checks`}}};var It={id:"ADM-09",name:"Destructive ops require extra authorization",module:"admin",layer:"L2",priority:"P2",description:"Bulk delete, data wipe, billing override should require confirmation step or elevated permission.",fixCost:100,fixSize:"M",async run(e){let t=/deleteAll|bulkDelete|wipeData|truncate|DROP\s+TABLE|removeAll|destroyAll|purge/i,n=/confirm|double.*auth|re.?authenticate|verification.*step|two.*step/i,r=(await e.grepFiles(t)).filter(o=>/admin/i.test(o.file));return r.length===0?{result:"N/A",message:"No bulk destructive admin operations detected"}:(await e.grepFiles(n)).length>0?{result:"PASS",message:"Confirmation/extra auth detected for destructive operations"}:{result:"FAIL",message:"Destructive admin operations without extra authorization",evidence:r.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}}};var Pt={id:"ADM-10",name:"Admin code separated from user app",module:"admin",layer:"L2",priority:"P2",description:"Admin code in separate directories prevents user app bugs from affecting admin, and vice versa.",fixCost:100,fixSize:"M",async run(e){let t=e.files.some(i=>/^app\/admin\/|^domains\/admin\/|^src\/admin\//i.test(i)),n=e.files.filter(i=>/admin/i.test(i)&&!/(app|domains|src)\/admin\//i.test(i));return t?{result:"PASS",message:"Admin code in dedicated directory (app/admin/ or domains/admin/)"}:n.length>0?{result:"FAIL",message:"Admin code scattered across user-facing directories",evidence:n.slice(0,5).map(i=>i)}:{result:"N/A",message:"No admin code detected"}}};var wt={id:"ADM-11",name:"No hardcoded admin credentials",module:"admin",layer:"L2",priority:"P0",description:"Hardcoded admin passwords, tokens, or emails in source code = anyone with repo access is admin",fixCost:100,fixSize:"M",async run(e){let t=[/admin.*password\s*[:=]\s*["']/i,/admin.*token\s*[:=]\s*["']/i,/admin.*secret\s*[:=]\s*["']/i,/password\s*[:=]\s*["'](?:admin|password|123456|secret)/i,/DEFAULT_ADMIN_PASSWORD/i,/ADMIN_PASSWORD\s*[:=]/i,/seed.*admin.*password/i],n=[];for(let r of t){let o=(await e.grepFiles(r)).filter(a=>!(E(a.file)||a.content.trimStart().startsWith("//")||a.content.trimStart().startsWith("#")||a.content.trimStart().startsWith("*")||a.file.includes("test")||a.file.includes("spec")));n.push(...o)}let i=[...new Map(n.map(r=>[`${r.file}:${r.line}`,r])).values()];return i.length>0?{result:"FAIL",message:`Hardcoded admin credentials found (${i.length} location${i.length>1?"s":""})`,evidence:i.map(r=>`${r.file}:${r.line} \u2192 ${r.content.substring(0,80).replace(/["'][^"']{8,}["']/g,'"[REDACTED]"')}`)}:{result:"PASS",message:"No hardcoded admin credentials found in source code"}}};var Lt={id:"ADM-12",name:"Admin error handling",module:"admin",layer:"L2",priority:"P2",description:"Admin API errors must not leak stack traces, SQL queries, or internal schema details.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(o=>/admin.*route|api.*admin/i.test(o));if(t.length===0)return{result:"UNKNOWN",message:"No admin API routes found"};let n=/try\s*\{|\.catch\s*\(|catch\s*\(/,i=/stack|trace|SQL|query.*error|\.message/i,r=!1,s=[];for(let o of t){let a;try{a=await e.readFile(o)}catch{continue}n.test(a)&&(r=!0);let c=a.split(`
8
+ `);for(let l=0;l<c.length;l++)/error\.stack|error\.message|err\.stack|JSON\.stringify.*error/i.test(c[l])&&s.push({file:o,line:l+1,content:c[l].trim()})}return s.length>0?{result:"FAIL",message:"Admin API may leak error details (stack traces, error messages)",evidence:s.slice(0,3).map(o=>`${o.file}:${o.line} \u2192 ${o.content.substring(0,120)}`)}:r?{result:"PASS",message:"Admin error handling present without obvious information leaks"}:{result:"FAIL",message:"No error handling in admin API routes",evidence:t.slice(0,3).map(o=>`${o} \u2014 no try/catch`)}}};var Nt={id:"ADM-13",name:"MFA requirement for admin roles",module:"admin",layer:"L2",priority:"P0",description:"Admin auth must require MFA/AAL2. Compromised admin password without MFA = total takeover.",fixCost:100,fixSize:"M",async run(e){let t=/mfa|aal2|getAuthenticatorAssuranceLevel|totp|authenticator|multi.?factor|two.?factor/i,n=e.files.filter(s=>/admin/i.test(s));if(n.length===0)return{result:"UNKNOWN",message:"No admin files found"};let i=await e.grepFiles(t,n),r=await e.grepFiles(t);return i.length>0?{result:"PASS",message:"MFA/AAL2 enforcement detected in admin code"}:r.length>0?{result:"PASS",message:"MFA implementation detected in codebase (verify it covers admin routes)"}:{result:"FAIL",message:"No MFA/AAL2 enforcement found for admin roles",evidence:["No references to mfa, aal2, totp, or authenticator in admin code"]}}};var Rt={id:"ADM-14",name:"Rate limiting on admin endpoints",module:"admin",layer:"L2",priority:"P1",description:"Admin API endpoints without rate limiting are vulnerable to brute force and DoS attacks.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>/api.*admin|admin.*route/i.test(s));if(t.length===0)return{result:"UNKNOWN",message:"No admin API endpoints found"};let n=/rateLimit|rate.limit|throttle|limiter|upstash|redis.*limit|too.many.requests|429/i;return(await e.grepFiles(n,t)).length>0?{result:"PASS",message:"Rate limiting detected on admin endpoints"}:(await e.grepFiles(n)).length>0?{result:"PASS",message:"Rate limiting detected in codebase (verify it covers admin endpoints)"}:{result:"FAIL",message:"No rate limiting found on admin endpoints",evidence:["Missing: rate limiting middleware on admin API routes"]}}};var Et={id:"ADM-16",name:"Separate admin session timeouts",module:"admin",layer:"L2",priority:"P1",description:"Admin sessions should have shorter timeouts than user sessions to limit session hijacking window.",fixCost:100,fixSize:"M",async run(e){let t=/admin.*timeout|admin.*expir|admin.*maxAge|admin.*session.*duration|session.*admin.*short/i;return(await e.grepFiles(t)).length>0?{result:"PASS",message:"Differentiated admin session timeout detected"}:(await e.grepFiles(/admin/i)).length===0?{result:"UNKNOWN",message:"No admin functionality detected"}:{result:"FAIL",message:"No separate admin session timeout \u2014 admin sessions use same duration as user sessions",evidence:["Missing: shorter session timeout for admin roles"]}}};var Ft={id:"ADM-18",name:"Admin action notification/alerting",module:"admin",layer:"L2",priority:"P2",description:"Critical admin actions should trigger notifications (Slack, email) so compromised admin accounts are detected quickly.",fixCost:100,fixSize:"M",async run(e){let t=/slack.*webhook|sendSlack|sendEmail.*admin|notify.*admin|alert.*admin|admin.*notify|admin.*alert|webhook.*notify/i,n=e.files.filter(s=>/admin/i.test(s));if(n.length===0)return{result:"UNKNOWN",message:"No admin functionality detected"};let i=await e.grepFiles(t,n),r=await e.grepFiles(t);return i.length>0||r.length>0?{result:"PASS",message:"Admin action notifications/alerting detected"}:{result:"FAIL",message:"No notification/alerting for admin actions \u2014 compromised admin goes undetected",evidence:["Missing: Slack webhook, email alert, or notification for critical admin actions"]}}};var Tt={id:"ADM-19",name:"CSRF protection for admin mutations",module:"admin",layer:"L2",priority:"P1",description:"Admin mutation endpoints need CSRF protection. If victim is admin, CSRF compromises entire app.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>/admin.*route\.(ts|js)$|api.*admin/i.test(s));if(t.length===0)return{result:"UNKNOWN",message:"No admin route handlers found"};let n=/csrf|csurf|csrfToken|SameSite|x-csrf|anti.?forgery/i;return(await e.grepFiles(n)).length>0?{result:"PASS",message:"CSRF protection detected"}:(await e.grepFiles(/["']use server["']/i)).length>0?{result:"PASS",message:"Server Actions used (built-in CSRF protection in Next.js 14+)"}:{result:"FAIL",message:"No CSRF protection on admin mutation routes",evidence:t.slice(0,3).map(s=>`${s} \u2014 no CSRF token or SameSite cookie`)}}};var xt={id:"ADM-20",name:"Data export controls",module:"admin",layer:"L2",priority:"P1",description:"Admin bulk export/download must have authorization and logging. Compromised admin can exfiltrate all user data.",fixCost:100,fixSize:"M",async run(e){let t=/export.*csv|downloadCSV|bulk.*fetch|dump.*data|export.*users|export.*data/i,n=await e.grepFiles(t);if(n.length===0)return{result:"N/A",message:"No bulk data export functionality detected"};let i=/requireAdmin|requireAuth|getUser|checkPermission/i,r=/audit|log.*export|log.*download/i,s=[...new Set(n.map(c=>c.file))],o=await e.grepFiles(i,s),a=await e.grepFiles(r,s);return o.length>0&&a.length>0?{result:"PASS",message:"Data export has auth and logging"}:o.length>0?{result:"PASS",message:"Data export has auth (consider adding export logging)"}:{result:"FAIL",message:"Data export without auth/logging \u2014 PII exfiltration risk",evidence:n.slice(0,3).map(c=>`${c.file}:${c.line} \u2192 ${c.content.substring(0,120)}`)}}};var Dt={id:"ADM-21",name:"Admin provisioning control",module:"admin",layer:"L2",priority:"P1",description:"Admin role must not be self-assignable. AI tools often generate 'isFirstUser \u2192 admin' or open role selection on signup.",fixCost:100,fixSize:"M",async run(e){let t=/role\s*=\s*['"]admin['"]|isFirstUser|first.*user.*admin|role.*select|self.*assign.*admin/i,n=e.files.filter(a=>/signup|register|onboard/i.test(a)),i=await e.grepFiles(t),s=[...n.length>0?await e.grepFiles(t,n):[],...i.filter(a=>/signup|register|onboard/i.test(a.file))],o=[...new Map(s.map(a=>[`${a.file}:${a.line}`,a])).values()];return o.length>0?{result:"FAIL",message:"Self-assign admin pattern detected in signup/onboarding flow",evidence:o.slice(0,3).map(a=>`${a.file}:${a.line} \u2192 ${a.content.substring(0,120)}`)}:{result:"PASS",message:"No self-assign admin pattern detected in signup flow"}}};var Ut={id:"ADM-22",name:"Edge Function authentication",module:"admin",layer:"L2",priority:"P0",description:"Supabase Edge Functions must verify the Authorization header or user session. Unprotected Edge Functions can be called by anyone with the public anon key, enabling privilege escalation (e.g. seed-admin).",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(r=>/supabase\/functions\/[^/]+\/index\.(ts|js)$/i.test(r));if(t.length===0)return{result:"PASS",message:"No Supabase Edge Functions found"};let n=/auth\.getUser|authorization.*header|req\.headers\.get\s*\(\s*['"]authorization['"]\)|verifyJWT|supabaseClient\.auth|createClient.*serviceRole/i,i=[];for(let r of t){let s=await e.readFile(r);n.test(s)||i.push(r)}return i.length>0?{result:"FAIL",message:`${i.length} Edge Function${i.length>1?"s":""} without auth verification`,evidence:i.slice(0,5).map(r=>`${r} \u2192 No Authorization header check or auth.getUser() call`)}:{result:"PASS",message:`All ${t.length} Edge Functions have auth checks`}}};var Oi=["/auth/","middleware.ts","middleware.js","/domains/auth/"],Wi=/\b(getUser|createServerClient|supabase\.auth|currentUser|clerkMiddleware|useUser|getServerSession|useSession|authOptions|getSession)\b/;function ji(e){return Oi.some(t=>e.includes(t))}function Bi(e){return e.includes("node_modules")||e.includes(".next/")||e.includes("test")||e.includes("spec")||e.includes(".d.ts")||e.includes("types.ts")||e.includes("types.js")}var Mt={id:"DRIFT-AUTH-01",name:"Auth logic spreading outside auth directory",module:"auth",layer:"L2",priority:"P1",category:"drift",description:"Auth patterns (getUser, createServerClient, etc.) found outside auth directories indicate module boundary drift.",fixCost:100,fixSize:"M",async run(e){let n=(await e.grepFiles(Wi)).filter(i=>!(Bi(i.file)||i.content.trimStart().startsWith("//")||i.content.trimStart().startsWith("*")||ji(i.file)));return n.length>3?{result:"FAIL",message:`Auth logic found in ${n.length} locations outside auth directories \u2014 module boundary is leaking`,evidence:n.slice(0,5).map(i=>`${i.file}:${i.line} \u2192 ${i.content.substring(0,100)}`)}:n.length>0?{result:"PASS",message:`Minor auth references outside auth dir (${n.length}) \u2014 within acceptable range`}:{result:"PASS",message:"Auth logic is contained within auth directories"}}};var Hi=/\b(getUser|createServerClient|supabase\.auth|currentUser|clerkMiddleware|auth\(\)|getServerSession|NextAuth)\b/,Ot={id:"DRIFT-AUTH-02",name:"Duplicate auth middleware files",module:"auth",layer:"L2",priority:"P1",category:"drift",description:"Multiple middleware files with auth logic indicate fragmented auth \u2014 a common AI generation pattern.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(i=>(i.includes("middleware.ts")||i.includes("middleware.js"))&&!i.includes("node_modules")&&!i.includes(".next/")),n=[];for(let i of t)try{let r=await e.readFile(i);Hi.test(r)&&n.push(i)}catch{continue}return n.length>1?{result:"FAIL",message:`${n.length} middleware files contain auth logic \u2014 auth should have a single entry point`,evidence:n.map(i=>i)}:n.length===1?{result:"PASS",message:`Single auth middleware found: ${n[0]}`}:{result:"UNKNOWN",message:"No middleware with auth logic found"}}};var zi=/\b(getUser|createServerClient|supabase\.auth|currentUser|auth\(\)|getServerSession|requireAuth|withAuth|isAuthenticated)\b/,Wt={id:"DRIFT-AUTH-03",name:"New API routes without auth checks",module:"auth",layer:"L2",priority:"P0",category:"drift",description:"API route handlers without auth verification \u2014 any visitor can call unprotected endpoints.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(i=>i.includes("/api/")&&(i.endsWith("route.ts")||i.endsWith("route.js"))&&!i.includes("node_modules")&&!i.includes(".next/")&&!i.includes("/api/auth/")&&!i.includes("/api/webhook")&&!i.includes("/api/health")&&!i.includes("/api/public"));if(t.length===0)return{result:"UNKNOWN",message:"No API route files found"};let n=[];for(let i of t)try{let r=await e.readFile(i);zi.test(r)||n.push(i)}catch{continue}return n.length>0?{result:"FAIL",message:`${n.length} of ${t.length} API routes have no auth verification`,evidence:n.slice(0,5).map(i=>i)}:{result:"PASS",message:`All ${t.length} API routes have auth checks`}}};var qi=/\b(isAdmin|role\s*===?\s*['"`]admin['"`]|user\.role|userRole|hasRole)\b/,Ki=/\b(getUser|createServerClient|supabase\.auth|currentUser|getServerSession|requireAdmin|checkAdmin)\b/,jt={id:"DRIFT-AUTH-04",name:"Client-side auth bypass \u2014 role check without server verification",module:"auth",layer:"L2",priority:"P1",category:"drift",description:"Role checks in JSX/client code without server-side verification \u2014 UI-only gates can be bypassed.",fixCost:100,fixSize:"M",async run(e){let n=(await e.grepFiles(qi)).filter(s=>s.content.trimStart().startsWith("//")||s.file.includes("node_modules")||s.file.includes(".next/")?!1:v(s.file));if(n.length===0)return{result:"PASS",message:"No client-side role checks found"};let r=(await e.grepFiles(Ki,["**/api/**","**/server/**","**/actions.*"])).some(s=>!s.file.includes("node_modules"));return n.length>0&&!r?{result:"FAIL",message:`${n.length} client-side role checks found but no server-side role verification \u2014 admin UI can be bypassed`,evidence:n.slice(0,5).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,100)}`)}:{result:"PASS",message:`Client-side role checks (${n.length}) backed by server-side verification`}}};var Gi=/localStorage\.(getItem|setItem|removeItem)\s*\(\s*['"`](token|jwt|session|auth|access_token|refresh_token|user)/,Bt={id:"DRIFT-AUTH-05",name:"Auth tokens stored in localStorage",module:"auth",layer:"L2",priority:"P1",category:"drift",description:"localStorage for auth tokens is vulnerable to XSS \u2014 use httpOnly cookies or secure session management.",fixCost:100,fixSize:"M",async run(e){let n=(await e.grepFiles(Gi)).filter(i=>!(i.content.trimStart().startsWith("//")||i.file.includes("node_modules")||i.file.includes(".next/")));return n.length>0?{result:"FAIL",message:`Auth tokens stored in localStorage (${n.length} location${n.length>1?"s":""}) \u2014 vulnerable to XSS`,evidence:n.slice(0,5).map(i=>`${i.file}:${i.line} \u2192 ${i.content.substring(0,100)}`)}:{result:"PASS",message:"No localStorage auth token usage found"}}};var Vi=["/billing/","/stripe/","/webhook/","/payment/","/subscription/","/domains/billing/"],Yi=/\b(stripe|Stripe|createCheckout|price_id|priceId|subscription_id|subscriptionId|checkout\.sessions|customer\.subscriptions)\b/;function Xi(e){return Vi.some(t=>e.includes(t))}function Qi(e){return e.includes("node_modules")||e.includes(".next/")||e.includes("test")||e.includes("spec")||e.includes(".d.ts")||e.includes("types.ts")||e.includes("types.js")||e.includes("package.json")||e.includes("package-lock")||e.includes("pnpm-lock")||e.includes(".env")}var Ht={id:"DRIFT-BIL-01",name:"Billing logic spreading outside billing directory",module:"billing",layer:"L2",priority:"P1",category:"drift",description:"Stripe/payment imports outside billing directories indicate module boundary drift.",fixCost:100,fixSize:"M",async run(e){let n=(await e.grepFiles(Yi)).filter(i=>!(Qi(i.file)||i.content.trimStart().startsWith("//")||i.content.trimStart().startsWith("*")||i.content.trimStart().startsWith("#")||Xi(i.file)));return n.length>3?{result:"FAIL",message:`Billing/Stripe logic found in ${n.length} locations outside billing directories \u2014 module boundary is leaking`,evidence:n.slice(0,5).map(i=>`${i.file}:${i.line} \u2192 ${i.content.substring(0,100)}`)}:n.length>0?{result:"PASS",message:`Minor billing references outside billing dir (${n.length}) \u2014 within acceptable range`}:{result:"PASS",message:"Billing logic is contained within billing directories"}}};var Ji=/\b(webhook|stripe.*event|event\.type|constructEvent)\b/i,zt={id:"DRIFT-BIL-02",name:"Duplicate webhook handler endpoints",module:"billing",layer:"L2",priority:"P0",category:"drift",description:"Multiple webhook endpoints create race conditions and duplicate event processing.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(i=>!i.includes("node_modules")&&!i.includes(".next/")&&!i.includes("test")&&(i.includes("webhook")||i.includes("stripe"))&&(i.endsWith(".ts")||i.endsWith(".js")||i.endsWith(".py"))),n=[];for(let i of t)try{let r=await e.readFile(i);Ji.test(r)&&n.push(i)}catch{continue}return n.length>1?{result:"FAIL",message:`${n.length} webhook handler files found \u2014 should be exactly 1`,evidence:n}:n.length===1?{result:"PASS",message:`Single webhook handler: ${n[0]}`}:{result:"UNKNOWN",message:"No webhook handler files found \u2014 Stripe webhooks may not be configured"}}};var Zi=/\b(subscription|plan|isPro|isFreeTier|isPaid|currentPlan|userPlan|hasSubscription)\b/,qt={id:"DRIFT-BIL-03",name:"Client-side subscription check without server verification",module:"billing",layer:"L2",priority:"P1",category:"drift",description:"Plan/subscription checks in client code without server-side verification \u2014 users can bypass paywalls.",fixCost:100,fixSize:"M",async run(e){let n=(await e.grepFiles(Zi)).filter(s=>s.content.trimStart().startsWith("//")||s.file.includes("node_modules")||s.file.includes(".next/")||s.file.includes("test")||s.file.includes("types")?!1:v(s.file)&&!s.file.includes("/api/"));if(n.length===0)return{result:"PASS",message:"No client-side subscription checks found"};let r=(await e.grepFiles(/\b(subscription|check.*plan|check.*limit|entitlement|getSubscription)\b/i,["**/api/**","**/server/**","**/actions.*","**/billing/**"])).some(s=>!s.file.includes("node_modules"));return n.length>0&&!r?{result:"FAIL",message:`${n.length} client-side subscription checks but no server-side plan verification \u2014 paywall can be bypassed`,evidence:n.slice(0,5).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,100)}`)}:{result:"PASS",message:`Client-side subscription checks (${n.length}) backed by server-side verification`}}};var es=/(?:price|amount|cost|fee)\s*[:=]\s*(\d{2,}(?:\.\d{2})?|['"`]\$?\d+)/i,ts=/(?:amount|price|unit_amount)\s*[:=]\s*\d{3,}/,Kt={id:"DRIFT-BIL-04",name:"Hardcoded prices instead of plan configuration",module:"billing",layer:"L2",priority:"P2",category:"drift",description:"Hardcoded dollar amounts or price values should come from plan configuration or Stripe metadata.",fixCost:100,fixSize:"M",async run(e){let t=await e.grepFiles(es),n=await e.grepFiles(ts),r=[...t,...n].filter(s=>!(s.content.trimStart().startsWith("//")||s.content.trimStart().startsWith("*")||s.file.includes("node_modules")||s.file.includes(".next/")||s.file.includes("test")||s.file.includes("package.json")||s.file.includes(".lock")||s.file.includes(".css")||s.file.includes("migration")));return r.length>3?{result:"FAIL",message:`${r.length} hardcoded price/amount values found \u2014 prices should come from plan config or Stripe`,evidence:r.slice(0,5).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,100)}`)}:r.length>0?{result:"PASS",message:`Minor hardcoded amounts found (${r.length}) \u2014 review recommended`}:{result:"PASS",message:"No hardcoded price values detected"}}};var is=/\b(canAccess|hasFeature|featureEnabled|isEnabled|featureFlag|canUse)\b/,ss=/\b(checkLimit|checkSubscription|checkPlan|entitlement|billingGuard|requirePlan|checkQuota)\b/,Gt={id:"DRIFT-BIL-05",name:"Feature access without billing verification",module:"billing",layer:"L2",priority:"P2",category:"drift",description:"New features with access gates but no billing/entitlement check \u2014 free users can access paid features.",fixCost:100,fixSize:"M",async run(e){let n=(await e.grepFiles(is)).filter(s=>!(s.content.trimStart().startsWith("//")||s.file.includes("node_modules")||s.file.includes(".next/")||s.file.includes("test")));if(n.length===0)return{result:"UNKNOWN",message:"No feature gate patterns found \u2014 may not use feature flags"};let r=(await e.grepFiles(ss)).some(s=>!s.file.includes("node_modules")&&!s.file.includes(".next/"));return n.length>0&&!r?{result:"FAIL",message:`${n.length} feature gate(s) found but no billing/entitlement verification \u2014 paid features may be accessible for free`,evidence:n.slice(0,5).map(s=>`${s.file}:${s.line} \u2192 ${s.content.substring(0,100)}`)}:{result:"PASS",message:`Feature gates (${n.length}) backed by billing verification`}}};var ns=["/admin/","/domains/admin/"],rs=/\b(isAdmin|requireAdmin|checkAdmin|adminGuard|role\s*===?\s*['"`]admin['"`]|user_role|superAdmin|isSuperAdmin)\b/;function os(e){return ns.some(t=>e.includes(t))}function as(e){return e.includes("node_modules")||e.includes(".next/")||e.includes("test")||e.includes("spec")||e.includes(".d.ts")||e.includes("types.ts")||e.includes("types.js")||e.includes("migration")||e.includes(".sql")}var Vt={id:"DRIFT-ADM-01",name:"Admin logic spreading outside admin directory",module:"admin",layer:"L2",priority:"P1",category:"drift",description:"Admin permission patterns found outside admin directories indicate module boundary drift.",fixCost:100,fixSize:"M",async run(e){let n=(await e.grepFiles(rs)).filter(i=>!(as(i.file)||i.content.trimStart().startsWith("//")||i.content.trimStart().startsWith("*")||os(i.file)));return n.length>3?{result:"FAIL",message:`Admin logic found in ${n.length} locations outside admin directories \u2014 module boundary is leaking`,evidence:n.slice(0,5).map(i=>`${i.file}:${i.line} \u2192 ${i.content.substring(0,100)}`)}:n.length>0?{result:"PASS",message:`Minor admin references outside admin dir (${n.length}) \u2014 within acceptable range`}:{result:"PASS",message:"Admin logic is contained within admin directories"}}};var cs=/\b(requireAdmin|checkAdmin|adminGuard|isAdmin|role\s*===?\s*['"`]admin['"`]|isSuperAdmin)\b/,Yt={id:"DRIFT-ADM-02",name:"New admin routes without permission guards",module:"admin",layer:"L2",priority:"P0",category:"drift",description:"Admin route handlers without permission checks \u2014 any authenticated user can access admin features.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(i=>i.includes("/admin/")&&!i.includes("node_modules")&&!i.includes(".next/")&&!i.includes("test")&&(i.endsWith("route.ts")||i.endsWith("route.js")||i.endsWith("page.tsx")||i.endsWith("page.jsx")||i.endsWith("page.ts")));if(t.length===0)return{result:"UNKNOWN",message:"No admin route files found"};let n=[];for(let i of t)try{let r=await e.readFile(i);cs.test(r)||n.push(i)}catch{continue}return n.length>0?{result:"FAIL",message:`${n.length} of ${t.length} admin routes have no permission guard`,evidence:n.slice(0,5).map(i=>i)}:{result:"PASS",message:`All ${t.length} admin routes have permission guards`}}};var Xt=/\b(logAuditEvent|auditLog|audit_log|createAuditEntry|logAdminAction|insertAuditLog)\b/,Qt=/\b(DELETE|PUT|PATCH|POST)\b/,Jt={id:"DRIFT-ADM-03",name:"Admin mutations without audit logging",module:"admin",layer:"L2",priority:"P1",category:"drift",description:"Admin mutation endpoints (POST/PUT/DELETE) without audit log calls \u2014 no trail of admin actions.",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(i=>i.includes("/admin/")&&i.includes("/api/")&&!i.includes("node_modules")&&!i.includes(".next/")&&!i.includes("test")&&(i.endsWith("route.ts")||i.endsWith("route.js")));if(t.length===0){let i=e.files.filter(s=>s.includes("/admin/")&&!s.includes("node_modules")&&!s.includes(".next/")&&(s.includes("action")||s.includes("service")||s.includes("handler"))&&(s.endsWith(".ts")||s.endsWith(".js")||s.endsWith(".py")));if(i.length===0)return{result:"UNKNOWN",message:"No admin API routes or action files found"};let r=[];for(let s of i)try{let o=await e.readFile(s);Qt.test(o)&&!Xt.test(o)&&r.push(s)}catch{continue}return r.length>0?{result:"FAIL",message:`${r.length} admin action file(s) with mutations but no audit logging`,evidence:r.slice(0,5)}:{result:"PASS",message:"Admin action files have audit logging"}}let n=[];for(let i of t)try{let r=await e.readFile(i);Qt.test(r)&&!Xt.test(r)&&n.push(i)}catch{continue}return n.length>0?{result:"FAIL",message:`${n.length} of ${t.length} admin API routes have mutations without audit logging`,evidence:n.slice(0,5)}:{result:"PASS",message:`All ${t.length} admin API routes have audit logging`}}};var Zt={id:"ENV-01",name:".env.example exists",module:"foundation",layer:"L3",priority:"P1",description:"Project should have .env.example documenting required environment variables",fixCost:50,fixSize:"S",async run(e){return e.files.some(i=>i===".env.example"||i.endsWith("/.env.example"))?{result:"PASS",message:".env.example found"}:e.files.some(i=>i===".env"||i===".env.local"||i.endsWith("/.env")||i.endsWith("/.env.local"))?{result:"FAIL",message:"Project uses .env files but has no .env.example for documentation",evidence:["Create .env.example listing all required vars (without secret values)"]}:{result:"UNKNOWN",message:"No .env files found \u2014 project may not use environment variables"}}};var ei={id:"ENV-02",name:"No secrets in committed .env",module:"foundation",layer:"L3",priority:"P0",description:"Committed .env files must not contain actual secret values \u2014 only .env.example with placeholders",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>{let o=s.split("/").pop()||"";return(o===".env"||o===".env.local"||o===".env.production")&&!o.includes("example")});if(t.length===0)return{result:"PASS",message:"No committed .env files found"};let n=/^[A-Z_]+=(?:sk_live_|sk_test_|whsec_|sbp_|eyJ|ghp_|gho_|AKIA|supabase.*service_role)/m,i=/^[A-Z_]+=.{20,}/m,r=[];for(let s of t){let o;try{o=await e.readFile(s)}catch{continue}if(n.test(o))r.push(`${s} \u2014 contains secret key patterns`);else if(i.test(o)){let a=o.split(`
9
+ `).filter(c=>/^[A-Z_]+=.{20,}/.test(c)&&!c.startsWith("#")&&!c.includes("your_")&&!c.includes("xxx"));a.length>2&&r.push(`${s} \u2014 appears to contain real values (${a.length} long values)`)}}return r.length>0?{result:"FAIL",message:`Secrets found in committed .env file${r.length>1?"s":""}`,evidence:r}:{result:"FAIL",message:`.env file${t.length>1?"s":""} committed to git \u2014 should be in .gitignore`,evidence:t.map(s=>`${s} \u2014 add to .gitignore`)}}};var ti={id:"CFG-01",name:"TypeScript strict mode",module:"foundation",layer:"L3",priority:"P1",description:"tsconfig.json should have strict: true to catch type errors at build time",fixCost:250,fixSize:"L",async run(e){let t;try{t=await e.readFile("tsconfig.json")}catch{return{result:"UNKNOWN",message:"No tsconfig.json found \u2014 project may not use TypeScript"}}return/"strict"\s*:\s*true/.test(t)?{result:"PASS",message:"TypeScript strict mode is enabled"}:/"strict"\s*:\s*false/.test(t)?{result:"FAIL",message:"TypeScript strict mode is explicitly disabled",evidence:['tsconfig.json \u2014 set "strict": true']}:{result:"FAIL",message:"TypeScript strict mode not configured (defaults to false)",evidence:['tsconfig.json \u2014 add "strict": true to compilerOptions']}}};var ii={id:"ERR-01",name:"Global error boundary exists",module:"foundation",layer:"L3",priority:"P1",description:"React apps should have a top-level error boundary to prevent full white-screen crashes",fixCost:100,fixSize:"M",async run(e){if(e.files.filter(i=>i.endsWith(".tsx")||i.endsWith(".jsx")).length===0)return{result:"UNKNOWN",message:"No React files found"};let n=/ErrorBoundary|componentDidCatch|getDerivedStateFromError|error\.tsx|error\.jsx/;for(let i of e.files){if(!i.endsWith(".ts")&&!i.endsWith(".tsx")&&!i.endsWith(".js")&&!i.endsWith(".jsx"))continue;let r;try{r=await e.readFile(i)}catch{continue}if(n.test(r))return{result:"PASS",message:`Error boundary found in ${i}`}}return{result:"FAIL",message:"No error boundary component found \u2014 unhandled errors will crash the entire app",evidence:["Add an ErrorBoundary component wrapping your app root or use Next.js error.tsx"]}}};var si={id:"ERR-03",name:"Unhandled promise rejection handling",module:"foundation",layer:"L3",priority:"P1",description:"Application should handle unhandled promise rejections to prevent silent failures",fixCost:50,fixSize:"S",async run(e){let t=e.files.filter(r=>(r.includes("/api/")||r.includes("route.ts")||r.includes("route.js")||r.includes("actions.ts")||r.includes("actions.js"))&&(r.endsWith(".ts")||r.endsWith(".js")));if(t.length===0)return{result:"UNKNOWN",message:"No API routes or server actions found"};let n=/try\s*\{/,i=[];for(let r of t){let s;try{s=await e.readFile(r)}catch{continue}/async\s+function|async\s*\(/.test(s)&&!n.test(s)&&i.push(r)}return i.length>0?{result:"FAIL",message:`${i.length} async API route${i.length>1?"s":""} without try/catch error handling`,evidence:i.slice(0,5).map(r=>`${r} \u2014 add try/catch around async operations`)}:{result:"PASS",message:`All ${t.length} API routes have error handling`}}};var ni={id:"SCH-01",name:"Database migrations exist",module:"schema",layer:"L4",priority:"P1",description:"Project should have migration files (Supabase, Prisma, or Drizzle) for reproducible schema changes",fixCost:100,fixSize:"M",async run(e){let t=[/supabase\/migrations\//,/prisma\/migrations\//,/drizzle\/migrations\//,/migrations\/.*\.sql$/,/schema\.prisma$/],n=e.files.filter(o=>t.some(a=>a.test(o)));if(n.length>0)return{result:"PASS",message:`${n.length} migration file${n.length>1?"s":""} found`};let i=e.files.some(o=>o.includes("supabase")||o.includes("@supabase")),r=e.files.some(o=>o.includes("prisma")),s=e.files.some(o=>o.includes("drizzle"));return i||r||s?{result:"FAIL",message:"Database ORM/client detected but no migration files found",evidence:["Create migration files to track schema changes reproducibly"]}:{result:"UNKNOWN",message:"No database usage detected"}}};var ri={id:"SCH-02",name:"No raw SQL in application code",module:"schema",layer:"L4",priority:"P2",description:"Inline SQL in route handlers/components instead of ORM/query builder is fragile and injection-prone",fixCost:250,fixSize:"L",async run(e){let t=e.files.filter(r=>!r.includes("migration")&&!r.endsWith(".sql")&&!r.includes("seed")&&!r.includes("supabase/")&&!r.includes("prisma/")&&!r.includes("drizzle/")&&(r.endsWith(".ts")||r.endsWith(".tsx")||r.endsWith(".js")||r.endsWith(".jsx"))),n=/\b(SELECT|INSERT INTO|UPDATE\s+\w+\s+SET|DELETE FROM|CREATE TABLE|ALTER TABLE|DROP TABLE)\b/,i=[];for(let r of t){let s;try{s=await e.readFile(r)}catch{continue}let o=s.split(`
10
+ `);for(let a=0;a<o.length;a++){let c=o[a].trim();c.startsWith("//")||c.startsWith("*")||c.startsWith("#")||n.test(c)&&i.push(`${r}:${a+1} \u2192 ${c.substring(0,100)}`)}}return i.length>0?{result:"FAIL",message:`Raw SQL found in ${i.length} location${i.length>1?"s":""} in application code`,evidence:i.slice(0,5)}:{result:"PASS",message:"No raw SQL detected in application code"}}};var oi={id:"SCH-04",name:"Soft delete pattern consistency",module:"schema",layer:"L4",priority:"P2",description:"If soft delete is used, all delete operations should use the same pattern (deleted_at column)",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>(s.endsWith(".ts")||s.endsWith(".tsx")||s.endsWith(".js")||s.endsWith(".jsx"))&&!s.includes("node_modules")),n=!1,i=!1,r=[];for(let s of t){let o;try{o=await e.readFile(s)}catch{continue}/deleted_at|deletedAt|is_deleted|isDeleted|soft.?delete/i.test(o)&&(n=!0),/\.delete\(\)|\.remove\(\)|DELETE FROM/i.test(o)&&!o.includes("deleted_at")&&(i=!0,r.push(s))}return!n&&!i?{result:"UNKNOWN",message:"No delete operations detected"}:n&&i?{result:"FAIL",message:"Mixed soft delete and hard delete patterns \u2014 inconsistent data deletion strategy",evidence:r.slice(0,5).map(s=>`${s} \u2014 uses hard delete while project has soft delete pattern`)}:n?{result:"PASS",message:"Consistent soft delete pattern used"}:{result:"PASS",message:"Consistent hard delete pattern (no soft delete used)"}}};var ai={id:"SCH-09",name:"N+1 query pattern detection",module:"schema",layer:"L4",priority:"P2",description:"Detects fetch-list-then-fetch-each-item patterns that cause performance collapse under load",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(o=>(o.endsWith(".ts")||o.endsWith(".tsx")||o.endsWith(".js")||o.endsWith(".jsx"))&&!o.includes("node_modules")&&!o.includes("migration")),n=/\.(from|select)\(.*\)(?:(?!\.single\(\)).)*$/,i=/for\s*\(.*\)\s*\{[^}]*(?:\.from\(|\.select\(|\.rpc\(|supabase\.|fetch\()/s,r=/\.map\(\s*(?:async\s*)?\([^)]*\)\s*=>\s*(?:{[^}]*)?(?:\.from\(|\.select\(|\.rpc\(|supabase\.|fetch\()/s,s=[];for(let o of t){let a;try{a=await e.readFile(o)}catch{continue}i.test(a)?s.push(`${o} \u2014 DB/API call inside for loop`):r.test(a)&&s.push(`${o} \u2014 DB/API call inside .map()`)}return s.length>0?{result:"FAIL",message:`N+1 query pattern detected in ${s.length} file${s.length>1?"s":""}`,evidence:s.slice(0,5)}:{result:"PASS",message:"No N+1 query patterns detected"}}};var ci={id:"SCH-10",name:"Unbounded query guard",module:"schema",layer:"L4",priority:"P2",description:"Queries without LIMIT/pagination can exhaust memory under real data volumes",fixCost:100,fixSize:"M",async run(e){let t=e.files.filter(s=>(s.endsWith(".ts")||s.endsWith(".tsx")||s.endsWith(".js")||s.endsWith(".jsx"))&&!s.includes("node_modules")&&!s.includes("migration")&&!s.includes("seed")),n=/\.from\(['"`]\w+['"`]\)\s*\.select\(/,i=/\.limit\(|\.range\(|\.single\(|\.maybeSingle\(|\.eq\(.*id|pagination|paginate|page\s*[=:]/i,r=[];for(let s of t){let o;try{o=await e.readFile(s)}catch{continue}n.test(o)&&!i.test(o)&&r.push(`${s} \u2014 .from().select() without .limit()/.range()/.single()`)}return r.length>0?{result:"FAIL",message:`Unbounded queries found in ${r.length} file${r.length>1?"s":""}`,evidence:r.slice(0,5)}:{result:"PASS",message:"All queries appear to have bounds (limit/range/single)"}}};var ls=new Set(["ARCH-01","ARCH-02","ARCH-03","ARCH-04","ARCH-05","AUTH-01","AUTH-02","AUTH-03","AUTH-05","AUTH-06","AUTH-11","AUTH-13","AUTH-14","BIL-01","BIL-02","BIL-03","BIL-04","BIL-09","BIL-14","BIL-16","BIL-17","ADM-01","ADM-02","ADM-08","ADM-11","ENV-01","ENV-02","CFG-01","ERR-01"]),ui=[pe,fe,me,ge,he,Ae,be,$e,ve,Ce,_e,ke,Ie,Pe,we,Le,Ne,Re,Ee,Fe,Te,xe,De,Ue,Me,Oe,We,je,Be,He,ze,qe,Ke,Ge,Ve,Ye,Xe,Qe,Je,Ze,et,tt,it,st,nt,rt,ot,at,ct,lt,ut,dt,pt,ft,mt,gt,ht,St,yt,At,bt,$t,vt,Ct,_t,kt,It,Pt,wt,Lt,Nt,Rt,Et,Ft,Tt,xt,Dt,Ut,Mt,Ot,Wt,jt,Bt,Ht,zt,qt,Kt,Gt,Vt,Yt,Jt,Zt,ei,ti,ii,si,ni,ri,oi,ai,ci],li={"AUTH-01":"supabase","AUTH-02":"supabase","AUTH-03":"supabase","AUTH-10":"supabase","AUTH-11":"supabase","AUTH-13":"supabase","AUTH-17":"supabase","AUTH-18":"supabase","AUTH-27":"supabase","AUTH-28":"supabase","BIL-01":"stripe","BIL-02":"stripe","BIL-03":"stripe","BIL-05":"stripe","BIL-12":"stripe","BIL-15":"stripe","BIL-19":"stripe","BIL-20":"stripe","BIL-22":"stripe"};for(let e of ui)e.phase=ls.has(e.id)?1:2,li[e.id]&&(e.vendor=li[e.id]);var di=ui;import fi from"fs/promises";import K from"path";var us=new Set(["node_modules",".git",".next","dist","build",".vercel",".turbo","coverage",".nyc_output","__pycache__",".svelte-kit"]),pi=new Set([".ts",".tsx",".js",".jsx",".mjs",".cjs",".sql",".env",".env.local",".env.example"]);async function mi(e){let t=[];return await gi(e,e,t),t}async function gi(e,t,n){let i;try{i=await fi.readdir(e,{withFileTypes:!0})}catch{return}for(let r of i)if(!(r.name.startsWith(".")&&r.name!==".env"&&r.name!==".env.local"&&r.name!==".env.example"&&r.isDirectory()))if(r.isDirectory()){if(us.has(r.name))continue;await gi(K.join(e,r.name),t,n)}else{let s=K.extname(r.name),o=r.name;(pi.has(s)||pi.has(o))&&n.push(K.relative(t,K.join(e,r.name)))}}async function Y(e,t){let n=K.resolve(e,t);return fi.readFile(n,"utf-8")}import ds from"fs/promises";import ps from"path";async function hi(e,t,n){let i={hasStripe:!1,hasSupabase:!1,detectedVendors:[]},r=ps.join(e,"package.json");try{let s=await ds.readFile(r,"utf-8"),o=JSON.parse(s),a={...o.dependencies,...o.devDependencies};(a.stripe||a["@stripe/stripe-js"]||a["@stripe/react-stripe-js"])&&(i.hasStripe=!0),(a["@supabase/supabase-js"]||a["@supabase/ssr"]||a["@supabase/auth-helpers-nextjs"])&&(i.hasSupabase=!0)}catch{}if((!i.hasStripe||!i.hasSupabase)&&t&&n){let s=/from\s+["']stripe["']|new\s+Stripe\s*\(|stripe[_-]webhook|STRIPE_SECRET|loadStripe\s*\(/,o=/from\s+["']@supabase|createClient\s*\(|SUPABASE_URL|supabase\.auth\.|supabase\.from\s*\(/,a=t.filter(f=>f.endsWith(".ts")||f.endsWith(".tsx")||f.endsWith(".js")||f.endsWith(".jsx")),c=a.filter(f=>/stripe|billing|payment|webhook|checkout|supabase|auth/i.test(f)),l=a.filter(f=>!/stripe|billing|payment|webhook|checkout|supabase|auth/i.test(f)),p=[...c,...l.slice(0,30)];for(let f of p){if(i.hasStripe&&i.hasSupabase)break;try{let S=await n(f);!i.hasStripe&&s.test(S)&&(i.hasStripe=!0),!i.hasSupabase&&o.test(S)&&(i.hasSupabase=!0)}catch{continue}}}return i.hasStripe&&i.detectedVendors.push("stripe"),i.hasSupabase&&i.detectedVendors.push("supabase"),i}var fs={L1:"Architecture",L2:"Safety",L3:"Foundation",L4:"Schema"},X=class{evaluate(t,n,i){let r=this.computeLayerScores(t);return{project:i,timestamp:new Date().toISOString(),mode:n,checks:t,layers:r}}computeLayerScores(t){let n={},i=new Set;for(let r of t)i.add(r.layer);for(let r of i){let o=t.filter(f=>f.layer===r).filter(f=>f.result!=="N/A"&&f.result!=="UNKNOWN"&&f.result!=="NOT_APPLICABLE"),a=o.filter(f=>f.result==="PASS").length,c=o.filter(f=>f.result==="FAIL").length,l=o.length,p=l>0?Math.round(a/l*10):0;n[r]={name:fs[r]??r,passed:a,failed:c,total:l,score:p}}return n}};var ms={"trust-score":["L2","L3","L4"],architecture:["L1"],all:["L1","L2","L3","L4"]};async function yi(e,t={}){let{mode:n="trust-score",engine:i=new X,allChecks:r=!1,disabledChecks:s}=t,o=Si.resolve(e),a=await mi(o),c=ms[n],p=await hi(o,a,d=>Y(o,d)),f={rootDir:o,files:a,fingerprint:p,readFile:d=>Y(o,d),grepFiles:(d,y)=>Se({rootDir:o,files:a,readFile:q=>Y(o,q)},d,y)},S=s??gs(),A=di.filter(d=>c.includes(d.layer)&&(r||d.phase===1)),P=[];for(let d of A)if(!S.has(d.id)){if(hs(d,p)){P.push({id:d.id,name:d.name,module:d.module,layer:d.layer,priority:d.priority,category:d.category,result:"NOT_APPLICABLE",message:Ss(d)});continue}try{let y=await d.run(f);P.push({id:d.id,name:d.name,module:d.module,layer:d.layer,priority:d.priority,category:d.category,result:y.result,message:y.message,evidence:y.evidence,shadow:d.shadow})}catch(y){P.push({id:d.id,name:d.name,module:d.module,layer:d.layer,priority:d.priority,category:d.category,result:"UNKNOWN",message:`Check failed with error: ${y instanceof Error?y.message:String(y)}`,shadow:d.shadow})}}let F=Si.basename(o);return i.evaluate(P,n,F)}function gs(){let e=process.env.VIBECODIQ_DISABLED_CHECKS||"";return e.trim()?new Set(e.split(",").map(t=>t.trim().toUpperCase()).filter(Boolean)):new Set}function hs(e,t){return e.vendor==="stripe"&&!t.hasStripe||e.vendor==="supabase"&&!t.hasSupabase}function Ss(e){return e.vendor==="stripe"?"No Stripe integration detected \u2014 check not applicable":e.vendor==="supabase"?"No Supabase integration detected \u2014 check not applicable":"Prerequisite not detected \u2014 check not applicable"}var u="\x1B[0m",g="\x1B[1m",m="\x1B[2m",I="\x1B[31m",C="\x1B[32m",R="\x1B[33m",ne="\x1B[36m",Q="\x1B[37m",ys="\x1B[41m";var As="\x1B[43m";function T(e){switch(e){case"PASS":return`${C}\u2713${u}`;case"FAIL":return`${I}\u2717${u}`;case"REVIEW":return`${R}\u26A0${u}`;case"UNKNOWN":return`${R}?${u}`;case"NOT_APPLICABLE":return`${m}\u25CB${u}`;case"N/A":return`${m}\u2014${u}`;default:return" "}}function $i(e){switch(e){case"P0":return`${ys}${Q}${g} P0 ${u}`;case"P1":return`${As}${Q}${g} P1 ${u}`;case"P2":return`${m} P2 ${u}`;default:return` ${e} `}}function vi(e){let t=e.score,n=10-t;return`${t>=7?C:t>=4?R:I}${"\u2588".repeat(t)}${m}${"\u2591".repeat(n)}${u} ${t}/10`}var Ci={"trust-score":"SAFETY SCAN",architecture:"ARCHITECTURE SCAN",all:"FULL SCAN"};function bs(e,t,n){return e>0?{icon:"\u{1F534}",text:"NOT READY FOR PRODUCTION",color:I}:t>0?{icon:"\u{1F7E1}",text:"NEEDS ATTENTION",color:R}:n>0?{icon:"\u26AA",text:"MINOR ISSUES",color:m}:{icon:"\u{1F7E2}",text:"READY FOR PRODUCTION",color:C}}function J(e){let t=d=>process.stdout.write(d+`
11
+ `),n=Ci[e.mode]??"SCAN",i=e.checks.filter(d=>!d.shadow),r=i.filter(d=>d.result==="FAIL"),s=i.filter(d=>d.result==="PASS"),o=i.filter(d=>d.result==="UNKNOWN"),a=i.filter(d=>d.result==="NOT_APPLICABLE"),c=i.filter(d=>d.result==="REVIEW"),l=r.filter(d=>d.priority==="P0"),p=r.filter(d=>d.priority==="P1"),f=r.filter(d=>d.priority!=="P0"&&d.priority!=="P1"),S=bs(l.length,p.length,f.length);if(t(""),t(`${g}\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557${u}`),t(`${g}\u2551 VIBECODIQ \u2014 ${n.padEnd(43)}\u2551${u}`),t(`${g}\u2551 Project: ${e.project.padEnd(46)}\u2551${u}`),t(`${g}\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255D${u}`),t(""),t(` ${S.icon} ${g}${S.color}${S.text}${u}`),r.length>0){let d=[];l.length>0&&d.push(`${I}${l.length} critical${u}`),p.length>0&&d.push(`${R}${p.length} important${u}`),f.length>0&&d.push(`${m}${f.length} quality${u}`),t(` ${r.length} issues found (${d.join(", ")})`)}t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t("");let A=["L1","L2","L3","L4"];for(let d of A){let y=e.layers[d];if(!y)continue;let q=`${d} ${y.name}`;t(` ${g}${q.padEnd(22)}${u} ${vi(y)} ${y.passed}/${y.total} pass ${y.failed>0?`${I}${y.failed} fail${u}`:`${C}0 fail${u}`}`)}if(l.length>0){t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t(""),t(` \u{1F534} ${I}${g}TIER 1 \u2014 Launch Blockers (${l.length})${u}`);for(let d of l)if(t(""),t(` ${T(d.result)} ${$i(d.priority)} ${g}${d.id}${u} ${d.name}`),t(` ${m}${d.message}${u}`),d.evidence){for(let y of d.evidence.slice(0,3))t(` ${m}\u2192 ${y}${u}`);d.evidence.length>3&&t(` ${m} ... and ${d.evidence.length-3} more${u}`)}}if(p.length>0){t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t(""),t(` \u{1F7E1} ${R}${g}TIER 2 \u2014 Important Issues (${p.length})${u}`),t("");for(let d of p)t(` ${T(d.result)} ${m}${d.id.padEnd(16)}${u}${d.name}`);t(""),t(` ${ne}\u2192 npx @vibecodiq/cli scan --fix${u} ${m}for details + AI fix prompts${u}`)}if(f.length>0){t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t(""),t(` \u26AA ${m}${g}TIER 3 \u2014 Quality & Drift (${f.length})${u}`),t("");for(let d of f)t(` ${T(d.result)} ${m}${d.id.padEnd(16)}${d.name}${u}`)}if(s.length>0){t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t(""),t(`${C}${g} PASSED (${s.length})${u}`),t("");for(let d of s)t(` ${T(d.result)} ${m}${d.id.padEnd(16)}${u}${d.name}`)}if(c.length>0){t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t(""),t(`${R}${g} MANUAL REVIEW (${c.length})${u} ${m}\u2014 pattern detected, needs verification${u}`),t("");for(let d of c)t(` ${T(d.result)} ${m}${d.id.padEnd(16)}${u}${d.name}`)}if(o.length>0){t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t(""),t(`${R}${g} UNKNOWN (${o.length})${u} ${m}\u2014 could not determine${u}`),t("");for(let d of o)t(` ${T(d.result)} ${m}${d.id.padEnd(16)}${d.name}${u}`)}if(a.length>0){t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t(""),t(`${m}${g} NOT APPLICABLE (${a.length})${u} ${m}\u2014 vendor/feature not detected in project${u}`),t("");for(let d of a)t(` ${T(d.result)} ${m}${d.id.padEnd(16)}${d.name}${u}`)}t(""),t(`${m}${"\u2500".repeat(62)}${u}`),t("");let P=e.checks.length,F=[`${C}${s.length} pass${u}`,r.length>0?`${I}${r.length} fail${u}`:`${C}0 fail${u}`];c.length>0&&F.push(`${R}${c.length} review${u}`),o.length>0&&F.push(`${o.length} unknown`),a.length>0&&F.push(`${m}${a.length} n/a${u}`),t(` ${g}Total:${u} ${P} checks \u2014 ${F.join(", ")}`),r.length>0?(t(""),t(` ${ne}${g}npx @vibecodiq/cli scan --fix${u} ${m}Trust Score + fix cost + AI fix prompts (Pro)${u}`)):t(` ${C}${g}All checks passed.${u}`),ki()}function Z(e){console.log(JSON.stringify(e,null,2))}var Ai="\x1B[35m";function bi(e){switch(e){case"A":return C;case"B":return C;case"C":return R;case"D":return I;case"F":return I;default:return Q}}function _i(e,t){let n=p=>process.stdout.write(p+`
12
+ `),i=Ci[e.mode]??"SCAN",r=e.checks.filter(p=>!p.shadow),s=r.filter(p=>p.result==="FAIL"),o=r.filter(p=>p.result==="PASS"),a=r.filter(p=>p.result==="UNKNOWN");n(""),n(`${g}\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557${u}`),n(`${g}\u2551 VIBECODIQ \u2014 ${i.padEnd(43)}\u2551${u}`),n(`${g}\u2551 Project: ${e.project.padEnd(46)}\u2551${u}`),n(`${g}\u255A\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255D${u}`),n(""),n(` ${g}Trust Score:${u} ${bi(t.grade)}${g}${t.grade}${u} ${m}(${t.trust_score}/100)${u}`),n(` ${g}Fix Cost:${u} ${ne}~${t.fix_cost_hours}h${u} estimated`),n(` ${g}Verdict:${u} ${t.fix_vs_rebuild==="FIX"?`${C}FIX${u} \u2014 worth fixing`:`${I}REBUILD${u} \u2014 consider rebuilding`}`),n(""),n(`${m}${"\u2500".repeat(62)}${u}`),n("");let c=["L1","L2","L3","L4"];for(let p of c){let f=e.layers[p];if(!f)continue;let S=`${p} ${f.name}`;n(` ${g}${S.padEnd(22)}${u} ${vi(f)} ${f.passed}/${f.total} pass ${f.failed>0?`${I}${f.failed} fail${u}`:`${C}0 fail${u}`}`)}if(s.length>0){n(""),n(`${m}${"\u2500".repeat(62)}${u}`),n(""),n(` ${I}${g}FAILED (${s.length})${u}`);for(let p of s)if(n(""),n(` ${T(p.result)} ${$i(p.priority)} ${g}${p.id}${u} ${p.name}`),n(` ${m}${p.message}${u}`),p.evidence){for(let f of p.evidence.slice(0,3))n(` ${m}\u2192 ${f}${u}`);p.evidence.length>3&&n(` ${m} ... and ${p.evidence.length-3} more${u}`)}}if(t.fix_batches&&t.fix_batches.length>0){n(""),n(`${m}${"\u2500".repeat(62)}${u}`),n(""),n(` ${Ai}${g}FIX BATCHES (${t.fix_batches.length})${u}`),n(` ${m}Copy-paste these prompts into your AI tool to fix issues.${u}`);for(let p of t.fix_batches)n(""),n(` ${Ai}${g}Batch: ${p.title}${u} ${m}(${p.checks.join(", ")})${u}`),n(` ${m}${"\u2504".repeat(56)}${u}`),n(` ${Q}${p.prompt}${u}`)}if(o.length>0){n(""),n(`${m}${"\u2500".repeat(62)}${u}`),n(""),n(`${C}${g} PASSED (${o.length})${u}`),n("");for(let p of o)n(` ${T(p.result)} ${m}${p.id.padEnd(16)}${u}${p.name}`)}n(""),n(`${m}${"\u2500".repeat(62)}${u}`),n("");let l=e.checks.length;n(` ${g}Total:${u} ${l} checks \u2014 ${C}${o.length} pass${u}, ${s.length>0?`${I}${s.length} fail${u}`:`${C}0 fail${u}`}, ${a.length} unknown`),n(` ${g}Trust Score:${u} ${bi(t.grade)}${g}${t.grade}${u} (${t.trust_score}/100) ${g}Fix:${u} ~${t.fix_cost_hours}h ${g}Verdict:${u} ${t.fix_vs_rebuild}`),n(""),n(` ${m}${t.summary}${u}`),ki()}function ki(){let e=t=>process.stdout.write(t+`
13
+ `);e(""),e(`${m}${"\u2500".repeat(62)}${u}`),e(` ${m}Static source-code analysis focused on high-confidence patterns.${u}`),e(` ${m}Not a complete security audit. Some findings may need manual review.${u}`),e(` ${m}https://vibecodiq.com/scan | https://asastandard.org${u}`),e("")}function re(){let e=t=>process.stdout.write(t+`
14
+ `);e(""),e(` ${R}\u26A0 API unavailable. Showing local results only.${u}`),e(` ${m}Run again later for Trust Score + fix prompts.${u}`),e(` ${m}Status: https://status.vibecodiq.com${u}`),e("")}import V from"fs";import oe from"path";var G=["#!/bin/bash","# ASA Structure Check \u2014 validates Slice Architecture rules","# Run: npx @vibecodiq/cli guard check (or ./check-structure.sh)","","set -e","",'RULES_FILE=".asa/rules/architecture.md"',"ERRORS=0","WARNINGS=0","",'echo "\u{1F50D} ASA Structure Check \u2014 validating architecture rules..."','echo " Rules: $RULES_FILE"','echo ""',"","fail() {",' echo "\u274C FAIL: $1"',' echo " \u2192 $2"',' echo " \u2192 Fix: see $RULES_FILE"',' echo ""'," ERRORS=$((ERRORS + 1))","}","","warn() {",' echo "\u26A0\uFE0F WARN: $1"',' echo " \u2192 $2"',' echo ""'," WARNINGS=$((WARNINGS + 1))","}","","pass() {",' echo "\u2705 PASS: $1"',"}","","# CHECK 1: Business logic in src/domains/",'echo "--- Check 1: Business logic in src/domains/ ---"',"",'BIZ_IN_PAGES=$(grep -rl "supabase\\.\\(from\\|auth\\|rpc\\|storage\\)" src/pages/ 2>/dev/null || true)','if [ -n "$BIZ_IN_PAGES" ]; then'," for f in $BIZ_IN_PAGES; do",' fail "Business logic in page file: $f" \\',' "Supabase calls must be in src/domains/, not in pages."'," done","else",' pass "No business logic found in src/pages/"',"fi","",'if [ -d "src/components" ]; then',' BIZ_IN_COMPONENTS=$(grep -rl "supabase\\.\\(from\\|auth\\|rpc\\|storage\\)" src/components/ 2>/dev/null || true)',' if [ -n "$BIZ_IN_COMPONENTS" ]; then'," for f in $BIZ_IN_COMPONENTS; do",' fail "Business logic in component file: $f" \\',' "Supabase calls must be in src/domains/, not in components/."'," done"," else",' pass "No business logic found in src/components/"'," fi","fi","","# CHECK 2: src/domains/ directory exists",'echo ""','echo "--- Check 2: src/domains/ directory exists ---"',"",'TS_FILES=$(find src/ -name "*.ts" -o -name "*.tsx" | grep -v "node_modules" | grep -v "vite-env" | wc -l)',"",'if [ "$TS_FILES" -gt 5 ]; then',' if [ -d "src/domains" ]; then',' pass "src/domains/ directory exists"'," DOMAIN_COUNT=$(find src/domains -mindepth 1 -maxdepth 1 -type d | wc -l)",' if [ "$DOMAIN_COUNT" -gt 0 ]; then',' pass "Found $DOMAIN_COUNT domain(s) in src/domains/"'," else",' warn "src/domains/ exists but is empty" \\',' "Add domain folders like src/domains/auth/, src/domains/tasks/"'," fi"," else",' fail "src/domains/ directory missing" \\',' "Business logic must be in src/domains/<domain>/<slice>/."'," fi","else",' pass "Project is small ($TS_FILES files) \u2014 src/domains/ not yet required"',"fi","","# CHECK 3: No cross-domain imports",'echo ""','echo "--- Check 3: No cross-domain imports ---"',"",'if [ -d "src/domains" ]; then'," CROSS_DOMAIN_VIOLATIONS=0"," for domain_dir in src/domains/*/; do",' [ ! -d "$domain_dir" ] && continue',' domain_name=$(basename "$domain_dir")',' OTHER_DOMAINS=$(find src/domains -mindepth 1 -maxdepth 1 -type d ! -name "$domain_name" -exec basename {} \\;)'," for other in $OTHER_DOMAINS; do",' VIOLATIONS=$(grep -rn "from.*domains/$other\\|import.*domains/$other" "$domain_dir" 2>/dev/null || true)',' if [ -n "$VIOLATIONS" ]; then'," while IFS= read -r line; do",' fail "Cross-domain import in $domain_name \u2192 $other" "$line"',' done <<< "$VIOLATIONS"'," CROSS_DOMAIN_VIOLATIONS=$((CROSS_DOMAIN_VIOLATIONS + 1))"," fi"," done"," done",' [ "$CROSS_DOMAIN_VIOLATIONS" -eq 0 ] && pass "No cross-domain imports detected"',"else",' pass "No domains yet \u2014 cross-domain check skipped"',"fi","","# CHECK 4: Pages are thin wrappers",'echo ""','echo "--- Check 4: Pages are thin wrappers ---"',"",'if [ -d "src/pages" ]; then'," for page in src/pages/*.tsx; do",' [ ! -f "$page" ] && continue',' PAGE_LINES=$(wc -l < "$page")',' if [ "$PAGE_LINES" -gt 80 ]; then',' fail "Page too large: $page ($PAGE_LINES lines)" \\',' "Pages should be <80 lines. Move logic to src/domains/."'," fi"," done",' pass "Page thin wrapper check complete"',"fi","","# CHECK 5: shared/ has no business logic",'echo ""','echo "--- Check 5: shared/ has no business logic ---"',"",'if [ -d "src/shared" ]; then',' BIZ_IN_SHARED=$(grep -rln "TaskList\\|TaskForm\\|PricingCard\\|AdminUser\\|LoginForm\\|RegisterForm" src/shared/ 2>/dev/null || true)',' if [ -n "$BIZ_IN_SHARED" ]; then'," for f in $BIZ_IN_SHARED; do",' fail "Business component in shared/: $f" \\',' "Domain-specific components belong in src/domains/."'," done"," else",' pass "No business logic found in src/shared/"'," fi","else",' pass "src/shared/ not yet created \u2014 check skipped"',"fi","","# RESULTS",'echo ""','echo "=========================================="','if [ "$ERRORS" -gt 0 ]; then',' echo "\u274C ASA STRUCTURE CHECK FAILED"',' echo " $ERRORS error(s), $WARNINGS warning(s)"',' echo " Read the architecture rules: $RULES_FILE"',' echo "=========================================="'," exit 1","else",' echo "\u2705 ASA STRUCTURE CHECK PASSED"',' echo " 0 errors, $WARNINGS warning(s)"',' echo "=========================================="'," exit 0","fi",""].join(`
15
+ `);var ee=["# ASA Architecture Rules","","This project follows the ASA Slice Architecture. All code in this repository","MUST follow these rules. CI checks will fail if rules are violated.","","---","","## Rule 1: Business logic goes in `src/domains/`","","All business logic MUST be organized in domain folders:","","```","src/domains/","\u251C\u2500\u2500 auth/ # Authentication and authorization","\u251C\u2500\u2500 billing/ # Payments and subscriptions","\u2514\u2500\u2500 admin/ # Admin panel and user management","```","","Each domain contains **slices** \u2014 self-contained features:","","```","src/domains/<domain>/<slice>/","\u251C\u2500\u2500 <Component>.tsx # React component (UI)","\u251C\u2500\u2500 use<Action>.ts # React hook (data fetching / mutations)","\u2514\u2500\u2500 types.ts # Types (optional)","```","","### What is a Slice?","","One slice = one user action. Examples:","","| Domain | Slice | What it does |","|--------|-------|-------------|","| `auth` | `login` | User logs in |","| `auth` | `register` | User creates account |","| `billing` | `subscribe` | User subscribes to a plan |","| `billing` | `check-limits` | Check if user hit plan limits |","| `admin` | `user-list` | Admin sees all users |","","---","","## Rule 2: Pages are thin wrappers","","Page files MUST be thin wrappers that import from `src/domains/`.","Pages contain NO business logic \u2014 only layout and composition.","Maximum 80 lines per page file.","","---","","## Rule 3: Shared infrastructure goes in `src/shared/`","","Database clients, auth helpers, and external service configs go in `src/shared/`:","","```","src/shared/","\u251C\u2500\u2500 db/","\u2502 \u2514\u2500\u2500 supabase-client.ts","\u251C\u2500\u2500 auth/","\u2502 \u251C\u2500\u2500 AuthGuard.tsx","\u2502 \u2514\u2500\u2500 useCurrentUser.ts","\u2514\u2500\u2500 billing/"," \u2514\u2500\u2500 stripe-client.ts","```","","---","","## Rule 4: No cross-domain imports","","A file in one domain MUST NOT import from another domain.","Use `src/shared/` for cross-domain communication.","","---","","## Rule 5: File naming conventions","","| Type | Pattern | Example |","|------|---------|---------|","| Component | `PascalCase.tsx` | `LoginForm.tsx` |","| Hook | `use<Action>.ts` | `useLogin.ts` |","| Types | `types.ts` | `types.ts` |","| Page | `page.tsx` | `page.tsx` |","| Shared utility | `camelCase.ts` | `supabase-client.ts` |","","---","","**If CI fails, restructure your code to follow these rules.**",""].join(`
16
+ `),te=["name: Vibecodiq Guard","","on:"," push:"," branches: [main]"," paths-ignore:"," - '.asa/logs/**'"," pull_request:"," branches: [main]","","permissions:"," contents: read"," pull-requests: write"," actions: read","","jobs:"," vibecodiq-guard:"," name: Vibecodiq Safety Scan"," runs-on: ubuntu-latest",""," steps:"," - name: Checkout code"," uses: actions/checkout@v4"," with:"," fetch-depth: 0",""," - name: Setup Node.js"," uses: actions/setup-node@v4"," with:"," node-version: '18'",""," - name: Run Vibecodiq scan"," id: scan"," run: |"," npx @vibecodiq/cli@latest scan --all --json > /tmp/scan-result.json 2>/dev/null || true",` FAIL_COUNT=$(cat /tmp/scan-result.json | python3 -c "import sys,json; r=json.load(sys.stdin); print(sum(1 for c in r.get('checks',[]) if c.get('result')=='FAIL'))" 2>/dev/null || echo 0)`,` TOTAL=$(cat /tmp/scan-result.json | python3 -c "import sys,json; r=json.load(sys.stdin); print(len(r.get('checks',[])))" 2>/dev/null || echo 0)`,' echo "fail_count=$FAIL_COUNT" >> "$GITHUB_OUTPUT"',' echo "total=$TOTAL" >> "$GITHUB_OUTPUT"',' if [ "$FAIL_COUNT" -gt 0 ]; then',' echo "has_failures=true" >> "$GITHUB_OUTPUT"'," else",' echo "has_failures=false" >> "$GITHUB_OUTPUT"'," fi",""," - name: Call API for fix prompts"," id: api"," if: steps.scan.outputs.has_failures == 'true' && env.VIBECODIQ_API_TOKEN != ''"," env:"," VIBECODIQ_API_TOKEN: ${{ secrets.VIBECODIQ_API_TOKEN }}"," run: |"," npx @vibecodiq/cli@latest scan --all --fix --json > /tmp/fix-result.json 2>/dev/null || true"," if [ -s /tmp/fix-result.json ]; then",' echo "api_ok=true" >> "$GITHUB_OUTPUT"'," else",' echo "api_ok=false" >> "$GITHUB_OUTPUT"'," fi",""," - name: Upload scan artifact"," if: always()"," uses: actions/upload-artifact@v4"," with:"," name: vibecodiq-scan"," path: /tmp/scan-result.json"," retention-days: 30",""," - name: Write job summary"," if: always()"," run: |"," echo '## Vibecodiq Safety Scan' >> $GITHUB_STEP_SUMMARY"," FAIL_COUNT=${{ steps.scan.outputs.fail_count }}"," TOTAL=${{ steps.scan.outputs.total }}",' if [ "$FAIL_COUNT" = "0" ]; then'," echo '\u2705 **All checks passed** \u2014 your app is production-ready.' >> $GITHUB_STEP_SUMMARY"," else",' echo "\u274C **$FAIL_COUNT failures** out of $TOTAL checks." >> $GITHUB_STEP_SUMMARY'," echo '' >> $GITHUB_STEP_SUMMARY"," echo 'Download the scan artifact for full details.' >> $GITHUB_STEP_SUMMARY"," fi"," echo '' >> $GITHUB_STEP_SUMMARY"," echo '---' >> $GITHUB_STEP_SUMMARY"," echo '*Scanned by [Vibecodiq](https://vibecodiq.com) production-readiness checks*' >> $GITHUB_STEP_SUMMARY",""," - name: Post PR comment"," if: github.event_name == 'pull_request' && steps.scan.outputs.has_failures == 'true'"," uses: actions/github-script@v7"," with:"," script: |"," const fs = require('fs');"," const failCount = '${{ steps.scan.outputs.fail_count }}';"," const total = '${{ steps.scan.outputs.total }}';"," "," // Build comment body"," let body = `## \u274C Vibecodiq Safety Scan \u2014 ${failCount} failures\\n\\n`;"," body += `**${failCount}** of **${total}** checks failed.\\n\\n`;"," "," // Try to include fix prompts from API result"," try {"," const fixResult = JSON.parse(fs.readFileSync('/tmp/fix-result.json', 'utf-8'));"," if (fixResult.fix_batches && fixResult.fix_batches.length > 0) {"," body += `**Trust Score:** ${fixResult.grade} (${fixResult.trust_score}/100) \xB7 `;"," body += `**Fix:** ~${fixResult.fix_cost_hours}h \xB7 `;"," body += `**Verdict:** ${fixResult.fix_vs_rebuild}\\n\\n`;"," body += `### Top Fix Batches\\n\\n`;"," for (const batch of fixResult.fix_batches.slice(0, 3)) {"," body += `<details>\\n<summary>${batch.title} (${batch.checks.join(', ')})</summary>\\n\\n`;"," body += '```\\n' + batch.prompt + '\\n```\\n\\n';"," body += `</details>\\n\\n`;"," }"," }"," } catch (e) {}"," "," body += `---\\n*Scanned by [Vibecodiq](https://vibecodiq.com) production-readiness checks*`;"," "," // Find and update existing comment (sticky)"," const marker = '## \u274C Vibecodiq Safety Scan';"," const { data: comments } = await github.rest.issues.listComments({"," owner: context.repo.owner,"," repo: context.repo.repo,"," issue_number: context.issue.number,"," });"," const existing = comments.find(c => c.body && c.body.startsWith(marker));"," "," if (existing) {"," await github.rest.issues.updateComment({"," owner: context.repo.owner,"," repo: context.repo.repo,"," comment_id: existing.id,"," body: body,"," });"," } else {"," await github.rest.issues.createComment({"," owner: context.repo.owner,"," repo: context.repo.repo,"," issue_number: context.issue.number,"," body: body,"," });"," }",""," - name: Delete PR comment on pass"," if: github.event_name == 'pull_request' && steps.scan.outputs.has_failures == 'false'"," uses: actions/github-script@v7"," with:"," script: |"," const marker = '## \u274C Vibecodiq Safety Scan';"," const { data: comments } = await github.rest.issues.listComments({"," owner: context.repo.owner,"," repo: context.repo.repo,"," issue_number: context.issue.number,"," });"," const existing = comments.find(c => c.body && c.body.startsWith(marker));"," if (existing) {"," await github.rest.issues.deleteComment({"," owner: context.repo.owner,"," repo: context.repo.repo,"," comment_id: existing.id,"," });"," }",""," - name: Fail if checks failed"," if: steps.scan.outputs.has_failures == 'true'"," run: |",' echo "\u274C Vibecodiq scan found ${{ steps.scan.outputs.fail_count }} failures."',' echo "Download the scan artifact or check the PR comment for fix instructions."'," exit 1",""].join(`
17
+ `);var $s="\x1B[36m",vs="\x1B[32m",Cs="\x1B[33m";var Ii="\x1B[1m",M="\x1B[2m",L="\x1B[0m";async function Pi(e,t=!1){let n=oe.resolve(e);console.log(""),console.log(` ${$s}\u{1F6E1}\uFE0F Initializing ASA Guard in ${n}${L}`),console.log("");let i=[{path:".asa/rules/architecture.md",content:ee},{path:".github/workflows/asa-guard.yml",content:te},{path:"check-structure.sh",content:G,executable:!0}],r=0,s=0;for(let o of i){let a=oe.join(n,o.path),c=oe.dirname(a);V.existsSync(c)||V.mkdirSync(c,{recursive:!0}),V.existsSync(a)?(console.log(` ${Cs}\u23ED ${o.path}${L} ${M}(already exists)${L}`),s++):(V.writeFileSync(a,o.content,"utf-8"),o.executable&&V.chmodSync(a,493),console.log(` ${vs}\u2713 ${o.path}${L}`),r++)}console.log(""),console.log(` ${Ii}Done:${L} ${r} file${r!==1?"s":""} created, ${s} skipped.`),console.log(""),console.log(` ${Ii}Next steps:${L}`),console.log(` ${M}1.${L} Commit the new files: ${M}git add -A && git commit -m "chore: add ASA Guard"${L}`),console.log(` ${M}2.${L} Push to GitHub \u2014 CI will run architecture checks on every PR`),console.log(` ${M}3.${L} Run locally: ${M}npx @vibecodiq/cli guard check${L}`),console.log(""),console.log(` ${M}Learn more: https://asastandard.org/checks/methodology${L}`),console.log("")}import _ from"fs";import $ from"path";var _s="\x1B[36m",wi="\x1B[32m",ks="\x1B[33m",Li="\x1B[31m",Ni="\x1B[1m",W="\x1B[2m",N="\x1B[0m";async function Ri(e){let t=$.resolve(e);console.log(""),console.log(` ${_s}\u{1F6E1}\uFE0F ASA Guard \u2014 Architecture Check${N}`),console.log(` ${W} Target: ${t}${N}`),console.log("");let n=[],i=Is(t);n.push(await Ps(t,i)),n.push(await ws(t,i)),n.push(await Ls(t,i)),n.push(await Ns(t,i)),n.push(await Rs(t,i));let r=0,s=0,o=0;for(let a of n){let c=a.status==="PASS"?`${wi}\u2705${N}`:a.status==="FAIL"?`${Li}\u274C${N}`:a.status==="WARN"?`${ks}\u26A0\uFE0F${N}`:`${W}\u23ED${N}`;if(console.log(` ${c} ${a.name}`),a.status!=="PASS"&&a.status!=="SKIP"&&(console.log(` ${W}${a.message}${N}`),a.evidence))for(let l of a.evidence.slice(0,3))console.log(` ${W}\u2192 ${l}${N}`);a.status==="FAIL"?r++:a.status==="WARN"?s++:a.status==="PASS"&&o++}console.log(""),r>0?(console.log(` ${Li}${Ni}\u274C ASA GUARD CHECK FAILED${N} \u2014 ${r} error${r>1?"s":""}, ${s} warning${s>1?"s":""}`),console.log(` ${W} Fix violations and run again.${N}`),console.log(` ${W} Rules: .asa/rules/architecture.md${N}`)):console.log(` ${wi}${Ni}\u2705 ASA GUARD CHECK PASSED${N} \u2014 ${o} passed, ${s} warning${s>1?"s":""}`),console.log(""),process.exit(r>0?1:0)}function Is(e){return _.existsSync($.join(e,"src"))?"src":_.existsSync($.join(e,"app"))?"app":null}function j(e,t){let n=[];if(!_.existsSync(e))return n;let i=_.readdirSync(e,{withFileTypes:!0});for(let r of i){let s=$.join(e,r.name);r.name==="node_modules"||r.name===".git"||(r.isDirectory()?n.push(...j(s,t)):t.some(o=>r.name.endsWith(o))&&n.push(s))}return n}async function Ps(e,t){let n="Business logic in domains/";if(!t)return{name:n,status:"SKIP",message:"No src/ directory found"};let i=$.join(e,t,"pages"),r=$.join(e,t,"components"),s=[],o=/supabase\.(from|auth|rpc|storage)\b/;for(let a of[i,r]){if(!_.existsSync(a))continue;let c=j(a,[".ts",".tsx",".js",".jsx"]);for(let l of c){let p=_.readFileSync(l,"utf-8");o.test(p)&&s.push($.relative(e,l))}}return s.length>0?{name:n,status:"FAIL",message:`Supabase calls found in pages/components (${s.length} file${s.length>1?"s":""})`,evidence:s}:{name:n,status:"PASS",message:""}}async function ws(e,t){let n="domains/ directory exists";if(!t)return{name:n,status:"SKIP",message:"No src/ directory found"};let i=$.join(e,t,"domains"),r=j($.join(e,t),[".ts",".tsx"]);if(r.length<=5)return{name:n,status:"PASS",message:"Project is small \u2014 domains/ not yet required"};if(!_.existsSync(i))return{name:n,status:"FAIL",message:`${r.length} files in ${t}/ but no domains/ directory`,evidence:["Create src/domains/<domain>/<slice>/ for business logic"]};let s=_.readdirSync(i,{withFileTypes:!0}).filter(o=>o.isDirectory());return s.length===0?{name:n,status:"WARN",message:"domains/ exists but is empty"}:{name:n,status:"PASS",message:`${s.length} domain${s.length>1?"s":""} found`}}async function Ls(e,t){let n="No cross-domain imports";if(!t)return{name:n,status:"SKIP",message:"No src/ directory found"};let i=$.join(e,t,"domains");if(!_.existsSync(i))return{name:n,status:"PASS",message:"No domains yet"};let r=_.readdirSync(i,{withFileTypes:!0}).filter(o=>o.isDirectory()).map(o=>o.name),s=[];for(let o of r){let a=$.join(i,o),c=j(a,[".ts",".tsx",".js",".jsx"]),l=r.filter(p=>p!==o);for(let p of c){let f=_.readFileSync(p,"utf-8");for(let S of l)new RegExp(`from.*domains/${S}|import.*domains/${S}`).test(f)&&s.push(`${$.relative(e,p)} \u2192 imports from ${S}/`)}}return s.length>0?{name:n,status:"FAIL",message:`${s.length} cross-domain import${s.length>1?"s":""} found`,evidence:s}:{name:n,status:"PASS",message:""}}async function Ns(e,t){let n="Pages are thin wrappers";if(!t)return{name:n,status:"SKIP",message:"No src/ directory found"};let i=$.join(e,t,"pages");if(!_.existsSync(i))return{name:n,status:"PASS",message:"No pages/ directory (Next.js App Router or no pages)"};let r=j(i,[".tsx",".jsx"]),s=[];for(let o of r){let c=_.readFileSync(o,"utf-8").split(`
18
+ `).length;c>80&&s.push(`${$.relative(e,o)} (${c} lines)`)}return s.length>0?{name:n,status:"FAIL",message:`${s.length} page${s.length>1?"s":""} over 80 lines`,evidence:s}:{name:n,status:"PASS",message:""}}async function Rs(e,t){let n="shared/ has no business logic";if(!t)return{name:n,status:"SKIP",message:"No src/ directory found"};let i=$.join(e,t,"shared");if(!_.existsSync(i))return{name:n,status:"PASS",message:"No shared/ directory yet"};let r=/TaskList|TaskForm|PricingCard|AdminUser|LoginForm|RegisterForm/,s=j(i,[".ts",".tsx",".js",".jsx"]),o=[];for(let a of s){let c=_.readFileSync(a,"utf-8");r.test(c)&&o.push($.relative(e,a))}return o.length>0?{name:n,status:"FAIL",message:`Business components found in shared/ (${o.length} file${o.length>1?"s":""})`,evidence:o}:{name:n,status:"PASS",message:""}}import ie from"fs";import Ei from"path";var Es="\x1B[32m",Fs="\x1B[33m",Ts="\x1B[36m",Fi="\x1B[1m",ae="\x1B[2m",x="\x1B[0m";async function Ti(e){let t=Ei.resolve(e);console.log(""),console.log(` ${Ts}\u{1F504} Upgrading ASA Guard rules...${x}`),console.log("");let n=[{path:".asa/rules/architecture.md",content:ee},{path:".github/workflows/asa-guard.yml",content:te},{path:"check-structure.sh",content:G,executable:!0}],i=0;for(let r of n){let s=Ei.join(t,r.path);if(!ie.existsSync(s)){console.log(` ${Fs}\u23ED ${r.path}${x} ${ae}(not found \u2014 run guard init first)${x}`);continue}if(ie.readFileSync(s,"utf-8")===r.content){console.log(` ${ae}\u2713 ${r.path} (already up to date)${x}`);continue}ie.writeFileSync(s,r.content,"utf-8"),r.executable&&ie.chmodSync(s,493),console.log(` ${Es}\u2713 ${r.path}${x} ${Fi}updated${x}`),i++}console.log(""),i>0?console.log(` ${Fi}${i} file${i>1?"s":""} updated.${x} Commit the changes.`):console.log(` ${ae}All rules are already up to date.${x}`),console.log("")}function xs(e,t){if(e.startsWith(t))return e.slice(t.length).replace(/^[/\\]/,"");let n=/^\/(?:home|Users)\/[^/]+\//;if(n.test(e))return e.replace(n,"");let i=/^[A-Z]:\\Users\\[^\\]+\\/i;return i.test(e)?e.replace(i,""):e}function Ds(e,t){return e.map(n=>n.replace(/(?:\/(?:home|Users)\/[^\s:]+|[A-Z]:\\Users\\[^\s:]+)/g,i=>xs(i,t)))}function xi(e,t,n,i,r){let s=e.map(o=>({check_id:o.id,result:o.result,name:o.name,module:o.module,layer:o.layer,priority:o.priority,category:o.category,message:o.message,evidence:o.evidence?Ds(o.evidence,t):void 0}));return{cli_version:n,scan_mode:i,project_name:r,timestamp:new Date().toISOString(),findings:s}}import{readFileSync as Us}from"fs";import{join as Ms}from"path";import{homedir as Os}from"os";var Ws=process.env.VIBECODIQ_API_URL||"https://api.vibecodiq.com",js=1e4,ce=1;async function Bs(e,t,n){let i=new AbortController,r=setTimeout(()=>i.abort(),n);try{return await fetch(e,{...t,signal:i.signal})}finally{clearTimeout(r)}}async function Di(e,t){for(let n=0;n<=ce;n++)try{let i=await Bs(`${Ws}/scan`,{method:"POST",headers:{"Content-Type":"application/json",Authorization:`Bearer ${t}`},body:JSON.stringify(e)},js);if(i.ok)return await i.json();if(i.status===401){let r=await i.json().catch(()=>({detail:"Unauthorized"}));throw new B(r.detail??"Invalid API token")}if(i.status>=500&&n<ce)continue;return null}catch(i){if(i instanceof B)throw i;if(n<ce)continue;return null}return null}var B=class extends Error{constructor(t){super(t),this.name="AuthError"}};function Ui(){let e=process.env.VIBECODIQ_API_TOKEN;if(e)return e;try{let t=Ms(Os(),".vibecodiq","token"),n=Us(t,"utf-8").trim();if(n)return n}catch{}return null}var zs="533f2e11ca9bf0419a066f73ff9cc910e23a39053fcb3051eb03f9521794e8db",le="0.4.0",D="\x1B[1m",k="\x1B[2m",H="\x1B[36m",ue="\x1B[33m",h="\x1B[0m",U=process.argv.slice(2),z=U[0],de=U[1];function se(e){for(let t of e.slice(1))if(!t.startsWith("-"))return t;return"."}function O(e){return U.includes(e)}if(z==="scan"){let e=O("--fix"),t=se(U),n=O("--json"),i=!1;if(O("--unreleased")){let l=process.env.VIBECODIQ_INTERNAL_KEY||"";Hs("sha256").update(l).digest("hex")!==zs&&(console.log(""),console.log(` ${ue}\u26A0 --unreleased requires VIBECODIQ_INTERNAL_KEY environment variable.${h}`),console.log(` ${k}This flag is restricted to authorized internal use.${h}`),console.log(""),process.exit(1)),i=!0}let r="trust-score";O("--architecture")&&(r="architecture"),O("--all")&&(r="all");let s=i?"all checks":"Phase 1",o=r==="trust-score"?`Safety Scan (${s})`:r==="architecture"?`Architecture Scan (${s})`:`Full Scan (${s})`;n||(console.log(""),console.log(` ${H}\u23F3 Scanning project \u2014 ${o}...${h}`));let a=await yi(t,{mode:r,allChecks:i}),c=a.checks.some(l=>l.result==="FAIL");if(e){let l=Ui();l||(console.log(""),console.log(` ${ue}\u26A0 No API token found.${h}`),console.log(""),console.log(" Set your token via:"),console.log(` ${H}export VIBECODIQ_API_TOKEN=<your-token>${h}`),console.log(" Or save to file:"),console.log(` ${H}mkdir -p ~/.vibecodiq && echo "<your-token>" > ~/.vibecodiq/token${h}`),console.log(""),console.log(` ${k}Get your token at: https://vibecodiq.com/pricing${h}`),console.log(""),process.exit(1)),n||console.log(` ${H}\u23F3 Sending sanitized findings to API...${h}`);let p=xi(a.checks,t.startsWith("/")?t:process.cwd(),le,r,a.project);try{let f=await Di(p,l);f?n?console.log(JSON.stringify({...a,...f},null,2)):_i(a,f):n?Z(a):(J(a),re())}catch(f){f instanceof B&&(console.log(""),console.log(` ${ue}\u26A0 API authentication failed: ${f.message}${h}`),console.log(` ${k}Check your VIBECODIQ_API_TOKEN.${h}`),console.log(""),process.exit(1)),n?Z(a):(J(a),re())}process.exit(c?1:0)}n?Z(a):J(a),process.exit(c?1:0)}else if(z==="guard")if(de==="init"){let e=se(U.slice(1)),t=O("--architecture"),n=O("--all");await Pi(e,t||n)}else if(de==="check"){let e=se(U.slice(1));await Ri(e)}else if(de==="upgrade"){let e=se(U.slice(1));await Ti(e)}else console.log(""),console.log(` ${D}@vibecodiq/cli guard${h} \u2014 Architecture rules & CI enforcement`),console.log(""),console.log(` ${D}Commands:${h}`),console.log(" guard init Install safety rules + CI into your repo"),console.log(" guard init --architecture + ASA architecture rules"),console.log(" guard init --all Install everything"),console.log(" guard check Run architecture checks locally"),console.log(" guard upgrade Upgrade rules to latest version"),console.log(""),console.log(` ${k}Learn more: https://vibecodiq.com/guard${h}`),console.log("");else if(z==="login"){let e=U[1];e||(console.log(""),console.log(` ${D}@vibecodiq/cli login${h} \u2014 Save your API token`),console.log(""),console.log(` ${D}Usage:${h}`),console.log(" npx @vibecodiq/cli login <your-token>"),console.log(""),console.log(` ${k}Your token is saved to ~/.vibecodiq/token${h}`),console.log(` ${k}Get your token at: https://vibecodiq.com/pricing${h}`),console.log(""),process.exit(1));let{mkdirSync:t,writeFileSync:n}=await import("fs"),{join:i}=await import("path"),{homedir:r}=await import("os"),s=i(r(),".vibecodiq"),o=i(s,"token");t(s,{recursive:!0}),n(o,e.trim()+`
19
+ `,{mode:384}),console.log(""),console.log(` ${H}\u2713${h} Token saved to ${k}${o}${h}`),console.log(` ${k}Run: npx @vibecodiq/cli scan --fix${h}`),console.log("")}else if(z==="logout"){let{unlinkSync:e,existsSync:t}=await import("fs"),{join:n}=await import("path"),{homedir:i}=await import("os"),r=n(i(),".vibecodiq","token");t(r)?(e(r),console.log(""),console.log(` ${H}\u2713${h} Token removed.`),console.log("")):(console.log(""),console.log(` ${k}No token found. Already logged out.${h}`),console.log(""))}else z==="--version"||z==="-v"?console.log(`@vibecodiq/cli v${le}`):(console.log(""),console.log(` ${D}@vibecodiq/cli${h} v${le} \u2014 Production safety for AI-built apps`),console.log(""),console.log(` ${D}Scan${h} ${k}(check what you already have)${h}`),console.log(" scan [path] Safety scan \u2014 Phase 1 (L2+L3+L4)"),console.log(" scan --architecture Architecture scan \u2014 Phase 1 (L1)"),console.log(" scan --all Full scan \u2014 Phase 1 (all layers)"),console.log(` scan --unreleased Run all checks incl. unreleased ${k}(internal)${h}`),console.log(" scan --json JSON output for CI pipelines"),console.log(` scan --fix AI fix prompts for failures ${k}(Pro)${h}`),console.log(""),console.log(` ${D}Guard${h} ${k}(build safely from day one)${h}`),console.log(" guard init Install safety rules + CI"),console.log(" guard init --architecture + ASA architecture rules"),console.log(" guard init --all Install everything"),console.log(" guard check Run architecture checks locally"),console.log(" guard upgrade Upgrade rules to latest"),console.log(""),console.log(` ${D}Account${h}`),console.log(" login Authenticate for Pro features"),console.log(" logout Sign out"),console.log(" --version Show version"),console.log(""),console.log(` ${k}Docs: https://asastandard.org/checks${h}`),console.log(` ${k}Pricing: https://vibecodiq.com/pricing${h}`),console.log(""));