@soulbatical/tetra-dev-toolkit 1.14.0 → 1.15.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Tetra DB Push — Safe wrapper around `supabase db push`
|
|
5
|
+
*
|
|
6
|
+
* Runs tetra-migration-lint FIRST. If any CRITICAL or HIGH issues
|
|
7
|
+
* are found, the push is BLOCKED. No exceptions.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* tetra-db-push # Lint + push
|
|
11
|
+
* tetra-db-push --force # Skip lint (DANGEROUS — requires explicit flag)
|
|
12
|
+
* tetra-db-push --dry-run # Lint only, don't push
|
|
13
|
+
* tetra-db-push -- --linked # Pass flags to supabase db push
|
|
14
|
+
*
|
|
15
|
+
* Replace in your workflow:
|
|
16
|
+
* BEFORE: supabase db push
|
|
17
|
+
* AFTER: tetra-db-push
|
|
18
|
+
*
|
|
19
|
+
* Or add alias: alias supabase-push='tetra-db-push'
|
|
20
|
+
*/
|
|
21
|
+
|
|
22
|
+
import { execSync, spawnSync } from 'child_process'
|
|
23
|
+
import chalk from 'chalk'
|
|
24
|
+
import { resolve } from 'path'
|
|
25
|
+
|
|
26
|
+
const args = process.argv.slice(2)
|
|
27
|
+
const force = args.includes('--force')
|
|
28
|
+
const dryRun = args.includes('--dry-run')
|
|
29
|
+
const supabaseArgs = args.filter(a => a !== '--force' && a !== '--dry-run')
|
|
30
|
+
|
|
31
|
+
const projectRoot = resolve(process.cwd())
|
|
32
|
+
|
|
33
|
+
// ─── Step 1: Migration Lint ────────────────────────────────────────
|
|
34
|
+
console.log('')
|
|
35
|
+
console.log(chalk.bold(' 🔒 Tetra DB Push — Security Gate'))
|
|
36
|
+
console.log('')
|
|
37
|
+
|
|
38
|
+
if (force) {
|
|
39
|
+
console.log(chalk.red.bold(' ⚠️ --force flag: SKIPPING security lint'))
|
|
40
|
+
console.log(chalk.red(' You are pushing migrations WITHOUT security validation.'))
|
|
41
|
+
console.log('')
|
|
42
|
+
} else {
|
|
43
|
+
console.log(chalk.gray(' Step 1: Running migration lint...'))
|
|
44
|
+
console.log('')
|
|
45
|
+
|
|
46
|
+
const lintResult = spawnSync('node', [
|
|
47
|
+
resolve(import.meta.dirname, 'tetra-migration-lint.js'),
|
|
48
|
+
'--project', projectRoot
|
|
49
|
+
], {
|
|
50
|
+
cwd: projectRoot,
|
|
51
|
+
stdio: 'inherit',
|
|
52
|
+
env: process.env
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
if (lintResult.status !== 0) {
|
|
56
|
+
console.log('')
|
|
57
|
+
console.log(chalk.red.bold(' ❌ Migration lint FAILED — push blocked'))
|
|
58
|
+
console.log(chalk.gray(' Fix the issues above, then run tetra-db-push again.'))
|
|
59
|
+
console.log('')
|
|
60
|
+
process.exit(1)
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
console.log(chalk.green(' ✅ Migration lint passed'))
|
|
64
|
+
console.log('')
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// ─── Step 2: Supabase DB Push ──────────────────────────────────────
|
|
68
|
+
if (dryRun) {
|
|
69
|
+
console.log(chalk.gray(' --dry-run: skipping actual push'))
|
|
70
|
+
console.log('')
|
|
71
|
+
process.exit(0)
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
console.log(chalk.gray(' Step 2: Running supabase db push...'))
|
|
75
|
+
console.log('')
|
|
76
|
+
|
|
77
|
+
const pushResult = spawnSync('npx', ['supabase', 'db', 'push', ...supabaseArgs], {
|
|
78
|
+
cwd: projectRoot,
|
|
79
|
+
stdio: 'inherit',
|
|
80
|
+
env: process.env
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
if (pushResult.status !== 0) {
|
|
84
|
+
console.log('')
|
|
85
|
+
console.log(chalk.red(' ❌ supabase db push failed'))
|
|
86
|
+
process.exit(pushResult.status || 1)
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
console.log('')
|
|
90
|
+
console.log(chalk.green.bold(' ✅ Migrations pushed successfully (security validated)'))
|
|
91
|
+
console.log('')
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Tetra Migration Lint — HARD BLOCK before supabase db push
|
|
5
|
+
*
|
|
6
|
+
* Scans SQL migration files OFFLINE (no Supabase connection needed) for:
|
|
7
|
+
* 1. SECURITY DEFINER on data RPCs (must be INVOKER)
|
|
8
|
+
* 2. Tables without ENABLE ROW LEVEL SECURITY
|
|
9
|
+
* 3. DROP POLICY / DROP TABLE without explicit confirmation
|
|
10
|
+
* 4. GRANT to public/anon on sensitive tables
|
|
11
|
+
* 5. Missing WITH CHECK on INSERT/UPDATE policies
|
|
12
|
+
*
|
|
13
|
+
* Usage:
|
|
14
|
+
* tetra-migration-lint # Lint all migrations
|
|
15
|
+
* tetra-migration-lint --staged # Only git-staged .sql files (for pre-commit)
|
|
16
|
+
* tetra-migration-lint --file path.sql # Lint a specific file
|
|
17
|
+
* tetra-migration-lint --fix-suggestions # Show fix SQL for each violation
|
|
18
|
+
* tetra-migration-lint --json # JSON output for CI
|
|
19
|
+
*
|
|
20
|
+
* Exit codes:
|
|
21
|
+
* 0 = all clean
|
|
22
|
+
* 1 = violations found (CRITICAL or HIGH)
|
|
23
|
+
* 2 = warnings only (MEDIUM/LOW) — does not block
|
|
24
|
+
*
|
|
25
|
+
* Hook usage (.husky/pre-commit):
|
|
26
|
+
* tetra-migration-lint --staged || exit 1
|
|
27
|
+
*
|
|
28
|
+
* Wrapper usage (replace `supabase db push`):
|
|
29
|
+
* tetra-migration-lint && supabase db push
|
|
30
|
+
*/
|
|
31
|
+
|
|
32
|
+
import { readFileSync, existsSync } from 'fs'
|
|
33
|
+
import { join, resolve, basename, relative } from 'path'
|
|
34
|
+
import { globSync } from 'glob'
|
|
35
|
+
import { execSync } from 'child_process'
|
|
36
|
+
import chalk from 'chalk'
|
|
37
|
+
import { program } from 'commander'
|
|
38
|
+
|
|
39
|
+
// ─── Dangerous patterns ────────────────────────────────────────────
|
|
40
|
+
const RULES = [
|
|
41
|
+
{
|
|
42
|
+
id: 'DEFINER_DATA_RPC',
|
|
43
|
+
severity: 'critical',
|
|
44
|
+
pattern: /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?(\w+)\s*\([^)]*\)[\s\S]*?SECURITY\s+DEFINER/gi,
|
|
45
|
+
test: (match, fullContent) => {
|
|
46
|
+
const funcName = match[1]
|
|
47
|
+
// Auth helpers are OK as DEFINER
|
|
48
|
+
const authWhitelist = [
|
|
49
|
+
'auth_org_id', 'auth_uid', 'auth_role', 'auth_user_role',
|
|
50
|
+
'auth_admin_organizations', 'auth_user_id', 'requesting_user_id',
|
|
51
|
+
'get_auth_org_id', 'get_current_user_id',
|
|
52
|
+
'auth_user_organizations', 'auth_creator_organizations',
|
|
53
|
+
'auth_organization_id', 'auth_is_admin', 'auth_is_superadmin'
|
|
54
|
+
]
|
|
55
|
+
return !authWhitelist.includes(funcName)
|
|
56
|
+
},
|
|
57
|
+
message: (match) => `SECURITY DEFINER on RPC "${match[1]}" — bypasses RLS completely. Use SECURITY INVOKER instead.`,
|
|
58
|
+
fix: (match) => `ALTER FUNCTION ${match[1]} SECURITY INVOKER;`
|
|
59
|
+
},
|
|
60
|
+
{
|
|
61
|
+
id: 'CREATE_TABLE_NO_RLS',
|
|
62
|
+
severity: 'critical',
|
|
63
|
+
// Match CREATE TABLE that is NOT followed by ENABLE ROW LEVEL SECURITY in the same migration
|
|
64
|
+
test: (match, fullContent) => {
|
|
65
|
+
const tableName = match[1]
|
|
66
|
+
const rlsPattern = new RegExp(`ALTER\\s+TABLE\\s+(?:public\\.)?${tableName}\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'i')
|
|
67
|
+
return !rlsPattern.test(fullContent)
|
|
68
|
+
},
|
|
69
|
+
pattern: /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?(\w+)\s*\(/gi,
|
|
70
|
+
message: (match) => `CREATE TABLE "${match[1]}" without ENABLE ROW LEVEL SECURITY in same migration.`,
|
|
71
|
+
fix: (match) => `ALTER TABLE ${match[1]} ENABLE ROW LEVEL SECURITY;`
|
|
72
|
+
},
|
|
73
|
+
{
|
|
74
|
+
id: 'DROP_POLICY',
|
|
75
|
+
severity: 'high',
|
|
76
|
+
pattern: /DROP\s+POLICY\s+(?:IF\s+EXISTS\s+)?["']?(\w+)["']?\s+ON\s+(?:public\.)?(\w+)/gi,
|
|
77
|
+
message: (match) => `DROP POLICY "${match[1]}" on "${match[2]}" — removes security. Ensure a replacement policy exists in the same migration.`,
|
|
78
|
+
test: (match, fullContent) => {
|
|
79
|
+
const table = match[2]
|
|
80
|
+
// OK if there's a CREATE POLICY in the same migration for the same table
|
|
81
|
+
const createPattern = new RegExp(`CREATE\\s+POLICY.*ON\\s+(?:public\\.)?${table}`, 'i')
|
|
82
|
+
if (createPattern.test(fullContent)) return false
|
|
83
|
+
// OK if there's a DO $$ block that references the table (dynamic policy creation)
|
|
84
|
+
const doBlockPattern = new RegExp(`DO\\s+\\$\\$[\\s\\S]*?['"]${table}['"][\\s\\S]*?\\$\\$`, 'i')
|
|
85
|
+
if (doBlockPattern.test(fullContent)) return false
|
|
86
|
+
// OK if DROP POLICY IF EXISTS (safe — only drops if present)
|
|
87
|
+
const dropIfExists = new RegExp(`DROP\\s+POLICY\\s+IF\\s+EXISTS`, 'i')
|
|
88
|
+
const matchStr = match[0]
|
|
89
|
+
if (dropIfExists.test(matchStr)) {
|
|
90
|
+
// Check if ANY policy creation exists for this table (even dynamic)
|
|
91
|
+
const anyCreate = new RegExp(`(CREATE\\s+POLICY|EXECUTE\\s+format.*CREATE\\s+POLICY)`, 'i')
|
|
92
|
+
if (anyCreate.test(fullContent)) return false
|
|
93
|
+
}
|
|
94
|
+
return true
|
|
95
|
+
},
|
|
96
|
+
fix: (match) => `-- Add replacement policy:\n-- CREATE POLICY "${match[1]}" ON ${match[2]} FOR ALL USING (organization_id = auth_org_id());`
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
id: 'GRANT_PUBLIC_ANON',
|
|
100
|
+
severity: 'critical',
|
|
101
|
+
pattern: /GRANT\s+(?:ALL|INSERT|UPDATE|DELETE)\s+ON\s+(?:TABLE\s+)?(?:public\.)?(\w+)\s+TO\s+(public|anon)/gi,
|
|
102
|
+
message: (match) => `GRANT ${match[0].match(/GRANT\s+(\w+)/)[1]} to ${match[2]} on "${match[1]}" — allows unauthenticated access.`,
|
|
103
|
+
fix: (match) => `-- Remove this GRANT or restrict to authenticated:\n-- GRANT SELECT ON ${match[1]} TO authenticated;`
|
|
104
|
+
},
|
|
105
|
+
{
|
|
106
|
+
id: 'DISABLE_RLS',
|
|
107
|
+
severity: 'critical',
|
|
108
|
+
pattern: /ALTER\s+TABLE\s+(?:public\.)?(\w+)\s+DISABLE\s+ROW\s+LEVEL\s+SECURITY/gi,
|
|
109
|
+
message: (match) => `DISABLE ROW LEVEL SECURITY on "${match[1]}" — removes ALL protection.`,
|
|
110
|
+
fix: (match) => `-- Do NOT disable RLS. If you need service-level access, use systemDB() in backend code.`
|
|
111
|
+
},
|
|
112
|
+
{
|
|
113
|
+
id: 'POLICY_NO_WITH_CHECK',
|
|
114
|
+
severity: 'medium',
|
|
115
|
+
pattern: /CREATE\s+POLICY\s+["']?(\w+)["']?\s+ON\s+(?:public\.)?(\w+)\s+FOR\s+(INSERT|UPDATE)\s+TO\s+\w+\s+USING\s*\([^)]+\)(?!\s*WITH\s+CHECK)/gi,
|
|
116
|
+
message: (match) => `Policy "${match[1]}" on "${match[2]}" for ${match[3]} has USING but no WITH CHECK — users could write data they can't read.`,
|
|
117
|
+
fix: (match) => `-- Add WITH CHECK clause matching the USING clause`
|
|
118
|
+
},
|
|
119
|
+
{
|
|
120
|
+
id: 'USING_TRUE_WRITE',
|
|
121
|
+
severity: 'critical',
|
|
122
|
+
pattern: /CREATE\s+POLICY\s+["']?(\w+)["']?\s+ON\s+(?:public\.)?(\w+)\s+FOR\s+(INSERT|UPDATE|DELETE|ALL)\s+(?:TO\s+\w+\s+)?USING\s*\(\s*true\s*\)/gi,
|
|
123
|
+
message: (match) => `Policy "${match[1]}" on "${match[2]}" allows ${match[3]} with USING(true) — anyone can write.`,
|
|
124
|
+
fix: (match) => `-- Replace USING(true) with proper org/user scoping:\n-- USING (organization_id = auth_org_id())`
|
|
125
|
+
},
|
|
126
|
+
{
|
|
127
|
+
id: 'RAW_SERVICE_KEY',
|
|
128
|
+
severity: 'critical',
|
|
129
|
+
pattern: /eyJ[A-Za-z0-9_-]{20,}\.eyJ[A-Za-z0-9_-]{20,}/g,
|
|
130
|
+
message: () => `Hardcoded JWT/service key found in migration file.`,
|
|
131
|
+
fix: () => `-- NEVER put keys in migrations. Use environment variables or Vault.`
|
|
132
|
+
}
|
|
133
|
+
]
|
|
134
|
+
|
|
135
|
+
// ─── Load whitelist from .tetra-quality.json ───────────────────────
|
|
136
|
+
function loadWhitelist(projectRoot) {
|
|
137
|
+
const configPath = join(projectRoot, '.tetra-quality.json')
|
|
138
|
+
if (!existsSync(configPath)) return { securityDefinerWhitelist: [], tables: { noRls: [] } }
|
|
139
|
+
try {
|
|
140
|
+
const config = JSON.parse(readFileSync(configPath, 'utf-8'))
|
|
141
|
+
return {
|
|
142
|
+
securityDefinerWhitelist: config?.supabase?.securityDefinerWhitelist || [],
|
|
143
|
+
tables: { noRls: config?.supabase?.backendOnlyTables || [] }
|
|
144
|
+
}
|
|
145
|
+
} catch { return { securityDefinerWhitelist: [], tables: { noRls: [] } } }
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// ─── Find migration files ──────────────────────────────────────────
|
|
149
|
+
function findMigrations(projectRoot, options) {
|
|
150
|
+
if (options.file) {
|
|
151
|
+
const filePath = resolve(options.file)
|
|
152
|
+
return existsSync(filePath) ? [filePath] : []
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (options.staged) {
|
|
156
|
+
try {
|
|
157
|
+
const staged = execSync('git diff --cached --name-only --diff-filter=ACM', {
|
|
158
|
+
cwd: projectRoot, encoding: 'utf-8'
|
|
159
|
+
})
|
|
160
|
+
return staged.split('\n')
|
|
161
|
+
.filter(f => f.endsWith('.sql'))
|
|
162
|
+
.map(f => join(projectRoot, f))
|
|
163
|
+
.filter(f => existsSync(f))
|
|
164
|
+
} catch { return [] }
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// All migrations
|
|
168
|
+
const patterns = [
|
|
169
|
+
'supabase/migrations/**/*.sql',
|
|
170
|
+
'backend/supabase/migrations/**/*.sql',
|
|
171
|
+
'migrations/**/*.sql'
|
|
172
|
+
]
|
|
173
|
+
const files = []
|
|
174
|
+
for (const pattern of patterns) {
|
|
175
|
+
files.push(...globSync(pattern, { cwd: projectRoot, absolute: true }))
|
|
176
|
+
}
|
|
177
|
+
return [...new Set(files)]
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// ─── Lint a single file ────────────────────────────────────────────
|
|
181
|
+
function lintFile(filePath, projectRoot, whitelist) {
|
|
182
|
+
const content = readFileSync(filePath, 'utf-8')
|
|
183
|
+
const relPath = relative(projectRoot, filePath)
|
|
184
|
+
const findings = []
|
|
185
|
+
|
|
186
|
+
for (const rule of RULES) {
|
|
187
|
+
// Reset regex lastIndex
|
|
188
|
+
rule.pattern.lastIndex = 0
|
|
189
|
+
let match
|
|
190
|
+
while ((match = rule.pattern.exec(content)) !== null) {
|
|
191
|
+
// Check whitelist for DEFINER rules
|
|
192
|
+
if (rule.id === 'DEFINER_DATA_RPC' && whitelist.securityDefinerWhitelist.includes(match[1])) {
|
|
193
|
+
continue
|
|
194
|
+
}
|
|
195
|
+
// Check backendOnlyTables for CREATE_TABLE_NO_RLS
|
|
196
|
+
if (rule.id === 'CREATE_TABLE_NO_RLS' && whitelist.tables.noRls.includes(match[1])) {
|
|
197
|
+
continue
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Run custom test if exists
|
|
201
|
+
if (rule.test && !rule.test(match, content)) {
|
|
202
|
+
continue
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Find line number
|
|
206
|
+
const lineNum = content.substring(0, match.index).split('\n').length
|
|
207
|
+
|
|
208
|
+
findings.push({
|
|
209
|
+
file: relPath,
|
|
210
|
+
line: lineNum,
|
|
211
|
+
rule: rule.id,
|
|
212
|
+
severity: rule.severity,
|
|
213
|
+
message: rule.message(match),
|
|
214
|
+
fix: rule.fix ? rule.fix(match) : null
|
|
215
|
+
})
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
return findings
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// ─── Main ──────────────────────────────────────────────────────────
|
|
223
|
+
program
|
|
224
|
+
.name('tetra-migration-lint')
|
|
225
|
+
.description('Lint SQL migrations for security issues before pushing to Supabase')
|
|
226
|
+
.option('--staged', 'Only lint git-staged .sql files (for pre-commit hook)')
|
|
227
|
+
.option('--file <path>', 'Lint a specific SQL file')
|
|
228
|
+
.option('--fix-suggestions', 'Show fix SQL for each violation')
|
|
229
|
+
.option('--json', 'JSON output for CI')
|
|
230
|
+
.option('--project <path>', 'Project root (default: cwd)')
|
|
231
|
+
.parse()
|
|
232
|
+
|
|
233
|
+
const opts = program.opts()
|
|
234
|
+
const projectRoot = resolve(opts.project || process.cwd())
|
|
235
|
+
const whitelist = loadWhitelist(projectRoot)
|
|
236
|
+
const files = findMigrations(projectRoot, opts)
|
|
237
|
+
|
|
238
|
+
if (files.length === 0) {
|
|
239
|
+
if (!opts.json) console.log(chalk.gray('No migration files to lint.'))
|
|
240
|
+
process.exit(0)
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
const allFindings = []
|
|
244
|
+
for (const file of files) {
|
|
245
|
+
allFindings.push(...lintFile(file, projectRoot, whitelist))
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// ─── Output ────────────────────────────────────────────────────────
|
|
249
|
+
if (opts.json) {
|
|
250
|
+
console.log(JSON.stringify({
|
|
251
|
+
files: files.length,
|
|
252
|
+
findings: allFindings,
|
|
253
|
+
critical: allFindings.filter(f => f.severity === 'critical').length,
|
|
254
|
+
high: allFindings.filter(f => f.severity === 'high').length,
|
|
255
|
+
medium: allFindings.filter(f => f.severity === 'medium').length,
|
|
256
|
+
passed: allFindings.filter(f => f.severity === 'critical' || f.severity === 'high').length === 0
|
|
257
|
+
}, null, 2))
|
|
258
|
+
} else {
|
|
259
|
+
const critical = allFindings.filter(f => f.severity === 'critical')
|
|
260
|
+
const high = allFindings.filter(f => f.severity === 'high')
|
|
261
|
+
const medium = allFindings.filter(f => f.severity === 'medium')
|
|
262
|
+
|
|
263
|
+
console.log('')
|
|
264
|
+
console.log(chalk.bold(' Tetra Migration Lint'))
|
|
265
|
+
console.log(chalk.gray(` ${files.length} migration files scanned`))
|
|
266
|
+
console.log('')
|
|
267
|
+
|
|
268
|
+
if (allFindings.length === 0) {
|
|
269
|
+
console.log(chalk.green(' ✅ All migrations pass security lint'))
|
|
270
|
+
console.log('')
|
|
271
|
+
process.exit(0)
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// Group by file
|
|
275
|
+
const byFile = {}
|
|
276
|
+
for (const f of allFindings) {
|
|
277
|
+
if (!byFile[f.file]) byFile[f.file] = []
|
|
278
|
+
byFile[f.file].push(f)
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
for (const [file, findings] of Object.entries(byFile)) {
|
|
282
|
+
console.log(chalk.underline(` ${file}`))
|
|
283
|
+
for (const f of findings) {
|
|
284
|
+
const icon = f.severity === 'critical' ? '🔴' : f.severity === 'high' ? '🟠' : '🟡'
|
|
285
|
+
const color = f.severity === 'critical' ? chalk.red : f.severity === 'high' ? chalk.yellow : chalk.gray
|
|
286
|
+
console.log(` ${icon} ${color(`[${f.severity.toUpperCase()}]`)} Line ${f.line}: ${f.message}`)
|
|
287
|
+
if (opts.fixSuggestions && f.fix) {
|
|
288
|
+
console.log(chalk.cyan(` Fix: ${f.fix}`))
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
console.log('')
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
console.log(chalk.bold(' Summary:'))
|
|
295
|
+
if (critical.length) console.log(chalk.red(` ${critical.length} CRITICAL`))
|
|
296
|
+
if (high.length) console.log(chalk.yellow(` ${high.length} HIGH`))
|
|
297
|
+
if (medium.length) console.log(chalk.gray(` ${medium.length} MEDIUM`))
|
|
298
|
+
console.log('')
|
|
299
|
+
|
|
300
|
+
if (critical.length || high.length) {
|
|
301
|
+
console.log(chalk.red.bold(' ❌ BLOCKED — fix CRITICAL/HIGH issues before pushing migrations'))
|
|
302
|
+
console.log(chalk.gray(' Run with --fix-suggestions to see fix SQL'))
|
|
303
|
+
console.log('')
|
|
304
|
+
process.exit(1)
|
|
305
|
+
} else {
|
|
306
|
+
console.log(chalk.yellow(' ⚠️ Warnings found but not blocking'))
|
|
307
|
+
console.log('')
|
|
308
|
+
process.exit(0)
|
|
309
|
+
}
|
|
310
|
+
}
|
|
@@ -32,11 +32,11 @@ export const meta = {
|
|
|
32
32
|
}
|
|
33
33
|
|
|
34
34
|
const DB_PATTERNS = {
|
|
35
|
-
systemDB: { level: 'SYSTEM', pattern:
|
|
36
|
-
adminDB: { level: 'ADMIN', pattern: /adminDB\s*\(/g,
|
|
37
|
-
userDB: { level: 'USER', pattern:
|
|
38
|
-
publicDB: { level: 'PUBLIC', pattern:
|
|
39
|
-
superadminDB: { level: 'SUPERADMIN', pattern:
|
|
35
|
+
systemDB: { level: 'SYSTEM', pattern: /\bsystemDB\s*\(/g, desc: 'System-level (cron, webhooks)' },
|
|
36
|
+
adminDB: { level: 'ADMIN', pattern: /(?<!\w)adminDB\s*\(/g, desc: 'Admin operations (org-scoped)' },
|
|
37
|
+
userDB: { level: 'USER', pattern: /\buserDB\s*\(/g, desc: 'User-specific operations' },
|
|
38
|
+
publicDB: { level: 'PUBLIC', pattern: /\bpublicDB\s*\(/g, desc: 'Public/unauthenticated' },
|
|
39
|
+
superadminDB: { level: 'SUPERADMIN', pattern: /\bsuperadminDB\s*\(/g, desc: 'Cross-org superadmin' }
|
|
40
40
|
}
|
|
41
41
|
|
|
42
42
|
/**
|
|
@@ -264,14 +264,16 @@ export async function run(config, projectRoot) {
|
|
|
264
264
|
// public routes are fine without auth middleware, no action needed
|
|
265
265
|
|
|
266
266
|
// --- Cross-access-level route name mismatch ---
|
|
267
|
-
|
|
267
|
+
// Skip if route file has explicit @tetra-audit-ignore for this check
|
|
268
|
+
const hasIgnoreDirective = /@tetra-audit-ignore\s+route-config-alignment\b/.test(content)
|
|
269
|
+
if (cfg.accessLevel === 'admin' && routeName === 'publicRoutes.ts' && !hasAuthMiddleware(content) && !hasIgnoreDirective) {
|
|
268
270
|
results.findings.push({
|
|
269
271
|
file: relRouteFile,
|
|
270
272
|
line: 1,
|
|
271
273
|
type: 'admin-feature-public-route-no-auth',
|
|
272
274
|
severity: 'critical',
|
|
273
275
|
message: `Config declares accessLevel "admin" for table "${cfg.tableName}" but has a publicRoutes.ts without auth. Admin data may be exposed publicly.`,
|
|
274
|
-
fix: `Either add auth middleware to publicRoutes.ts or
|
|
276
|
+
fix: `Either add auth middleware to publicRoutes.ts, or add @tetra-audit-ignore route-config-alignment comment if the public route is intentional.`
|
|
275
277
|
})
|
|
276
278
|
results.summary.critical++
|
|
277
279
|
results.summary.total++
|
|
@@ -29,15 +29,21 @@ export const meta = {
|
|
|
29
29
|
description: 'Verifies all RPC functions use SECURITY INVOKER (not DEFINER) unless explicitly whitelisted. DEFINER bypasses RLS completely.'
|
|
30
30
|
}
|
|
31
31
|
|
|
32
|
-
//
|
|
32
|
+
// Functions that legitimately need SECURITY DEFINER
|
|
33
33
|
const BUILTIN_DEFINER_WHITELIST = [
|
|
34
|
+
// Auth helpers (need to read auth schema / organization_members)
|
|
34
35
|
'auth_org_id',
|
|
35
36
|
'auth_admin_organizations',
|
|
36
37
|
'auth_user_organizations',
|
|
38
|
+
'auth_creator_organizations',
|
|
37
39
|
'get_user_org_role',
|
|
38
40
|
'get_org_id',
|
|
39
41
|
'handle_new_user',
|
|
40
42
|
'moddatetime',
|
|
43
|
+
// Public RPCs (called by anon users, need DEFINER to bypass RLS and return only safe columns)
|
|
44
|
+
'search_public_ad_library',
|
|
45
|
+
// System/billing RPCs (called by systemDB, no user context)
|
|
46
|
+
'get_org_credit_limits',
|
|
41
47
|
// Supabase internal
|
|
42
48
|
'pgsodium_encrypt',
|
|
43
49
|
'pgsodium_decrypt'
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soulbatical/tetra-dev-toolkit",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.15.1",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "restricted"
|
|
6
6
|
},
|
|
@@ -29,7 +29,9 @@
|
|
|
29
29
|
"tetra-init": "./bin/tetra-init.js",
|
|
30
30
|
"tetra-setup": "./bin/tetra-setup.js",
|
|
31
31
|
"tetra-dev-token": "./bin/tetra-dev-token.js",
|
|
32
|
-
"tetra-check-rls": "./bin/tetra-check-rls.js"
|
|
32
|
+
"tetra-check-rls": "./bin/tetra-check-rls.js",
|
|
33
|
+
"tetra-migration-lint": "./bin/tetra-migration-lint.js",
|
|
34
|
+
"tetra-db-push": "./bin/tetra-db-push.js"
|
|
33
35
|
},
|
|
34
36
|
"files": [
|
|
35
37
|
"bin/",
|