@soulbatical/tetra-dev-toolkit 1.13.1 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/tetra-db-push.js +91 -0
- package/bin/tetra-migration-lint.js +295 -0
- package/lib/checks/security/mixed-db-usage.js +5 -5
- package/lib/checks/security/route-config-alignment.js +308 -0
- package/lib/checks/security/rpc-security-mode.js +175 -0
- package/lib/runner.js +5 -1
- package/package.json +4 -2
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Tetra DB Push — Safe wrapper around `supabase db push`
|
|
5
|
+
*
|
|
6
|
+
* Runs tetra-migration-lint FIRST. If any CRITICAL or HIGH issues
|
|
7
|
+
* are found, the push is BLOCKED. No exceptions.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* tetra-db-push # Lint + push
|
|
11
|
+
* tetra-db-push --force # Skip lint (DANGEROUS — requires explicit flag)
|
|
12
|
+
* tetra-db-push --dry-run # Lint only, don't push
|
|
13
|
+
* tetra-db-push -- --linked # Pass flags to supabase db push
|
|
14
|
+
*
|
|
15
|
+
* Replace in your workflow:
|
|
16
|
+
* BEFORE: supabase db push
|
|
17
|
+
* AFTER: tetra-db-push
|
|
18
|
+
*
|
|
19
|
+
* Or add alias: alias supabase-push='tetra-db-push'
|
|
20
|
+
*/
|
|
21
|
+
|
|
22
|
+
import { execSync, spawnSync } from 'child_process'
|
|
23
|
+
import chalk from 'chalk'
|
|
24
|
+
import { resolve } from 'path'
|
|
25
|
+
|
|
26
|
+
const args = process.argv.slice(2)
|
|
27
|
+
const force = args.includes('--force')
|
|
28
|
+
const dryRun = args.includes('--dry-run')
|
|
29
|
+
const supabaseArgs = args.filter(a => a !== '--force' && a !== '--dry-run')
|
|
30
|
+
|
|
31
|
+
const projectRoot = resolve(process.cwd())
|
|
32
|
+
|
|
33
|
+
// ─── Step 1: Migration Lint ────────────────────────────────────────
|
|
34
|
+
console.log('')
|
|
35
|
+
console.log(chalk.bold(' 🔒 Tetra DB Push — Security Gate'))
|
|
36
|
+
console.log('')
|
|
37
|
+
|
|
38
|
+
if (force) {
|
|
39
|
+
console.log(chalk.red.bold(' ⚠️ --force flag: SKIPPING security lint'))
|
|
40
|
+
console.log(chalk.red(' You are pushing migrations WITHOUT security validation.'))
|
|
41
|
+
console.log('')
|
|
42
|
+
} else {
|
|
43
|
+
console.log(chalk.gray(' Step 1: Running migration lint...'))
|
|
44
|
+
console.log('')
|
|
45
|
+
|
|
46
|
+
const lintResult = spawnSync('node', [
|
|
47
|
+
resolve(import.meta.dirname, 'tetra-migration-lint.js'),
|
|
48
|
+
'--project', projectRoot
|
|
49
|
+
], {
|
|
50
|
+
cwd: projectRoot,
|
|
51
|
+
stdio: 'inherit',
|
|
52
|
+
env: process.env
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
if (lintResult.status !== 0) {
|
|
56
|
+
console.log('')
|
|
57
|
+
console.log(chalk.red.bold(' ❌ Migration lint FAILED — push blocked'))
|
|
58
|
+
console.log(chalk.gray(' Fix the issues above, then run tetra-db-push again.'))
|
|
59
|
+
console.log('')
|
|
60
|
+
process.exit(1)
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
console.log(chalk.green(' ✅ Migration lint passed'))
|
|
64
|
+
console.log('')
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// ─── Step 2: Supabase DB Push ──────────────────────────────────────
|
|
68
|
+
if (dryRun) {
|
|
69
|
+
console.log(chalk.gray(' --dry-run: skipping actual push'))
|
|
70
|
+
console.log('')
|
|
71
|
+
process.exit(0)
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
console.log(chalk.gray(' Step 2: Running supabase db push...'))
|
|
75
|
+
console.log('')
|
|
76
|
+
|
|
77
|
+
const pushResult = spawnSync('npx', ['supabase', 'db', 'push', ...supabaseArgs], {
|
|
78
|
+
cwd: projectRoot,
|
|
79
|
+
stdio: 'inherit',
|
|
80
|
+
env: process.env
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
if (pushResult.status !== 0) {
|
|
84
|
+
console.log('')
|
|
85
|
+
console.log(chalk.red(' ❌ supabase db push failed'))
|
|
86
|
+
process.exit(pushResult.status || 1)
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
console.log('')
|
|
90
|
+
console.log(chalk.green.bold(' ✅ Migrations pushed successfully (security validated)'))
|
|
91
|
+
console.log('')
|
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Tetra Migration Lint — HARD BLOCK before supabase db push
|
|
5
|
+
*
|
|
6
|
+
* Scans SQL migration files OFFLINE (no Supabase connection needed) for:
|
|
7
|
+
* 1. SECURITY DEFINER on data RPCs (must be INVOKER)
|
|
8
|
+
* 2. Tables without ENABLE ROW LEVEL SECURITY
|
|
9
|
+
* 3. DROP POLICY / DROP TABLE without explicit confirmation
|
|
10
|
+
* 4. GRANT to public/anon on sensitive tables
|
|
11
|
+
* 5. Missing WITH CHECK on INSERT/UPDATE policies
|
|
12
|
+
*
|
|
13
|
+
* Usage:
|
|
14
|
+
* tetra-migration-lint # Lint all migrations
|
|
15
|
+
* tetra-migration-lint --staged # Only git-staged .sql files (for pre-commit)
|
|
16
|
+
* tetra-migration-lint --file path.sql # Lint a specific file
|
|
17
|
+
* tetra-migration-lint --fix-suggestions # Show fix SQL for each violation
|
|
18
|
+
* tetra-migration-lint --json # JSON output for CI
|
|
19
|
+
*
|
|
20
|
+
* Exit codes:
|
|
21
|
+
* 0 = all clean
|
|
22
|
+
* 1 = violations found (CRITICAL or HIGH)
|
|
23
|
+
* 2 = warnings only (MEDIUM/LOW) — does not block
|
|
24
|
+
*
|
|
25
|
+
* Hook usage (.husky/pre-commit):
|
|
26
|
+
* tetra-migration-lint --staged || exit 1
|
|
27
|
+
*
|
|
28
|
+
* Wrapper usage (replace `supabase db push`):
|
|
29
|
+
* tetra-migration-lint && supabase db push
|
|
30
|
+
*/
|
|
31
|
+
|
|
32
|
+
import { readFileSync, existsSync } from 'fs'
|
|
33
|
+
import { join, resolve, basename, relative } from 'path'
|
|
34
|
+
import { globSync } from 'glob'
|
|
35
|
+
import { execSync } from 'child_process'
|
|
36
|
+
import chalk from 'chalk'
|
|
37
|
+
import { program } from 'commander'
|
|
38
|
+
|
|
39
|
+
// ─── Dangerous patterns ────────────────────────────────────────────
|
|
40
|
+
const RULES = [
|
|
41
|
+
{
|
|
42
|
+
id: 'DEFINER_DATA_RPC',
|
|
43
|
+
severity: 'critical',
|
|
44
|
+
pattern: /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?(\w+)\s*\([^)]*\)[\s\S]*?SECURITY\s+DEFINER/gi,
|
|
45
|
+
test: (match, fullContent) => {
|
|
46
|
+
const funcName = match[1]
|
|
47
|
+
// Auth helpers are OK as DEFINER
|
|
48
|
+
const authWhitelist = [
|
|
49
|
+
'auth_org_id', 'auth_uid', 'auth_role', 'auth_user_role',
|
|
50
|
+
'auth_admin_organizations', 'auth_user_id', 'requesting_user_id',
|
|
51
|
+
'get_auth_org_id', 'get_current_user_id'
|
|
52
|
+
]
|
|
53
|
+
return !authWhitelist.includes(funcName)
|
|
54
|
+
},
|
|
55
|
+
message: (match) => `SECURITY DEFINER on RPC "${match[1]}" — bypasses RLS completely. Use SECURITY INVOKER instead.`,
|
|
56
|
+
fix: (match) => `ALTER FUNCTION ${match[1]} SECURITY INVOKER;`
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
id: 'CREATE_TABLE_NO_RLS',
|
|
60
|
+
severity: 'critical',
|
|
61
|
+
// Match CREATE TABLE that is NOT followed by ENABLE ROW LEVEL SECURITY in the same migration
|
|
62
|
+
test: (match, fullContent) => {
|
|
63
|
+
const tableName = match[1]
|
|
64
|
+
const rlsPattern = new RegExp(`ALTER\\s+TABLE\\s+(?:public\\.)?${tableName}\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'i')
|
|
65
|
+
return !rlsPattern.test(fullContent)
|
|
66
|
+
},
|
|
67
|
+
pattern: /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?(\w+)\s*\(/gi,
|
|
68
|
+
message: (match) => `CREATE TABLE "${match[1]}" without ENABLE ROW LEVEL SECURITY in same migration.`,
|
|
69
|
+
fix: (match) => `ALTER TABLE ${match[1]} ENABLE ROW LEVEL SECURITY;`
|
|
70
|
+
},
|
|
71
|
+
{
|
|
72
|
+
id: 'DROP_POLICY',
|
|
73
|
+
severity: 'high',
|
|
74
|
+
pattern: /DROP\s+POLICY\s+(?:IF\s+EXISTS\s+)?["']?(\w+)["']?\s+ON\s+(?:public\.)?(\w+)/gi,
|
|
75
|
+
message: (match) => `DROP POLICY "${match[1]}" on "${match[2]}" — removes security. Ensure a replacement policy exists in the same migration.`,
|
|
76
|
+
test: (match, fullContent) => {
|
|
77
|
+
// OK if there's a CREATE POLICY in the same migration for the same table
|
|
78
|
+
const createPattern = new RegExp(`CREATE\\s+POLICY.*ON\\s+(?:public\\.)?${match[2]}`, 'i')
|
|
79
|
+
return !createPattern.test(fullContent)
|
|
80
|
+
},
|
|
81
|
+
fix: (match) => `-- Add replacement policy:\n-- CREATE POLICY "${match[1]}" ON ${match[2]} FOR ALL USING (organization_id = auth_org_id());`
|
|
82
|
+
},
|
|
83
|
+
{
|
|
84
|
+
id: 'GRANT_PUBLIC_ANON',
|
|
85
|
+
severity: 'critical',
|
|
86
|
+
pattern: /GRANT\s+(?:ALL|INSERT|UPDATE|DELETE)\s+ON\s+(?:TABLE\s+)?(?:public\.)?(\w+)\s+TO\s+(public|anon)/gi,
|
|
87
|
+
message: (match) => `GRANT ${match[0].match(/GRANT\s+(\w+)/)[1]} to ${match[2]} on "${match[1]}" — allows unauthenticated access.`,
|
|
88
|
+
fix: (match) => `-- Remove this GRANT or restrict to authenticated:\n-- GRANT SELECT ON ${match[1]} TO authenticated;`
|
|
89
|
+
},
|
|
90
|
+
{
|
|
91
|
+
id: 'DISABLE_RLS',
|
|
92
|
+
severity: 'critical',
|
|
93
|
+
pattern: /ALTER\s+TABLE\s+(?:public\.)?(\w+)\s+DISABLE\s+ROW\s+LEVEL\s+SECURITY/gi,
|
|
94
|
+
message: (match) => `DISABLE ROW LEVEL SECURITY on "${match[1]}" — removes ALL protection.`,
|
|
95
|
+
fix: (match) => `-- Do NOT disable RLS. If you need service-level access, use systemDB() in backend code.`
|
|
96
|
+
},
|
|
97
|
+
{
|
|
98
|
+
id: 'POLICY_NO_WITH_CHECK',
|
|
99
|
+
severity: 'medium',
|
|
100
|
+
pattern: /CREATE\s+POLICY\s+["']?(\w+)["']?\s+ON\s+(?:public\.)?(\w+)\s+FOR\s+(INSERT|UPDATE)\s+TO\s+\w+\s+USING\s*\([^)]+\)(?!\s*WITH\s+CHECK)/gi,
|
|
101
|
+
message: (match) => `Policy "${match[1]}" on "${match[2]}" for ${match[3]} has USING but no WITH CHECK — users could write data they can't read.`,
|
|
102
|
+
fix: (match) => `-- Add WITH CHECK clause matching the USING clause`
|
|
103
|
+
},
|
|
104
|
+
{
|
|
105
|
+
id: 'USING_TRUE_WRITE',
|
|
106
|
+
severity: 'critical',
|
|
107
|
+
pattern: /CREATE\s+POLICY\s+["']?(\w+)["']?\s+ON\s+(?:public\.)?(\w+)\s+FOR\s+(INSERT|UPDATE|DELETE|ALL)\s+(?:TO\s+\w+\s+)?USING\s*\(\s*true\s*\)/gi,
|
|
108
|
+
message: (match) => `Policy "${match[1]}" on "${match[2]}" allows ${match[3]} with USING(true) — anyone can write.`,
|
|
109
|
+
fix: (match) => `-- Replace USING(true) with proper org/user scoping:\n-- USING (organization_id = auth_org_id())`
|
|
110
|
+
},
|
|
111
|
+
{
|
|
112
|
+
id: 'RAW_SERVICE_KEY',
|
|
113
|
+
severity: 'critical',
|
|
114
|
+
pattern: /eyJ[A-Za-z0-9_-]{20,}\.eyJ[A-Za-z0-9_-]{20,}/g,
|
|
115
|
+
message: () => `Hardcoded JWT/service key found in migration file.`,
|
|
116
|
+
fix: () => `-- NEVER put keys in migrations. Use environment variables or Vault.`
|
|
117
|
+
}
|
|
118
|
+
]
|
|
119
|
+
|
|
120
|
+
// ─── Load whitelist from .tetra-quality.json ───────────────────────
|
|
121
|
+
function loadWhitelist(projectRoot) {
|
|
122
|
+
const configPath = join(projectRoot, '.tetra-quality.json')
|
|
123
|
+
if (!existsSync(configPath)) return { securityDefinerWhitelist: [], tables: { noRls: [] } }
|
|
124
|
+
try {
|
|
125
|
+
const config = JSON.parse(readFileSync(configPath, 'utf-8'))
|
|
126
|
+
return {
|
|
127
|
+
securityDefinerWhitelist: config?.supabase?.securityDefinerWhitelist || [],
|
|
128
|
+
tables: { noRls: config?.supabase?.backendOnlyTables || [] }
|
|
129
|
+
}
|
|
130
|
+
} catch { return { securityDefinerWhitelist: [], tables: { noRls: [] } } }
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// ─── Find migration files ──────────────────────────────────────────
|
|
134
|
+
function findMigrations(projectRoot, options) {
|
|
135
|
+
if (options.file) {
|
|
136
|
+
const filePath = resolve(options.file)
|
|
137
|
+
return existsSync(filePath) ? [filePath] : []
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
if (options.staged) {
|
|
141
|
+
try {
|
|
142
|
+
const staged = execSync('git diff --cached --name-only --diff-filter=ACM', {
|
|
143
|
+
cwd: projectRoot, encoding: 'utf-8'
|
|
144
|
+
})
|
|
145
|
+
return staged.split('\n')
|
|
146
|
+
.filter(f => f.endsWith('.sql'))
|
|
147
|
+
.map(f => join(projectRoot, f))
|
|
148
|
+
.filter(f => existsSync(f))
|
|
149
|
+
} catch { return [] }
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// All migrations
|
|
153
|
+
const patterns = [
|
|
154
|
+
'supabase/migrations/**/*.sql',
|
|
155
|
+
'backend/supabase/migrations/**/*.sql',
|
|
156
|
+
'migrations/**/*.sql'
|
|
157
|
+
]
|
|
158
|
+
const files = []
|
|
159
|
+
for (const pattern of patterns) {
|
|
160
|
+
files.push(...globSync(pattern, { cwd: projectRoot, absolute: true }))
|
|
161
|
+
}
|
|
162
|
+
return [...new Set(files)]
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// ─── Lint a single file ────────────────────────────────────────────
|
|
166
|
+
function lintFile(filePath, projectRoot, whitelist) {
|
|
167
|
+
const content = readFileSync(filePath, 'utf-8')
|
|
168
|
+
const relPath = relative(projectRoot, filePath)
|
|
169
|
+
const findings = []
|
|
170
|
+
|
|
171
|
+
for (const rule of RULES) {
|
|
172
|
+
// Reset regex lastIndex
|
|
173
|
+
rule.pattern.lastIndex = 0
|
|
174
|
+
let match
|
|
175
|
+
while ((match = rule.pattern.exec(content)) !== null) {
|
|
176
|
+
// Check whitelist for DEFINER rules
|
|
177
|
+
if (rule.id === 'DEFINER_DATA_RPC' && whitelist.securityDefinerWhitelist.includes(match[1])) {
|
|
178
|
+
continue
|
|
179
|
+
}
|
|
180
|
+
// Check backendOnlyTables for CREATE_TABLE_NO_RLS
|
|
181
|
+
if (rule.id === 'CREATE_TABLE_NO_RLS' && whitelist.tables.noRls.includes(match[1])) {
|
|
182
|
+
continue
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Run custom test if exists
|
|
186
|
+
if (rule.test && !rule.test(match, content)) {
|
|
187
|
+
continue
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// Find line number
|
|
191
|
+
const lineNum = content.substring(0, match.index).split('\n').length
|
|
192
|
+
|
|
193
|
+
findings.push({
|
|
194
|
+
file: relPath,
|
|
195
|
+
line: lineNum,
|
|
196
|
+
rule: rule.id,
|
|
197
|
+
severity: rule.severity,
|
|
198
|
+
message: rule.message(match),
|
|
199
|
+
fix: rule.fix ? rule.fix(match) : null
|
|
200
|
+
})
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
return findings
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// ─── Main ──────────────────────────────────────────────────────────
|
|
208
|
+
program
|
|
209
|
+
.name('tetra-migration-lint')
|
|
210
|
+
.description('Lint SQL migrations for security issues before pushing to Supabase')
|
|
211
|
+
.option('--staged', 'Only lint git-staged .sql files (for pre-commit hook)')
|
|
212
|
+
.option('--file <path>', 'Lint a specific SQL file')
|
|
213
|
+
.option('--fix-suggestions', 'Show fix SQL for each violation')
|
|
214
|
+
.option('--json', 'JSON output for CI')
|
|
215
|
+
.option('--project <path>', 'Project root (default: cwd)')
|
|
216
|
+
.parse()
|
|
217
|
+
|
|
218
|
+
const opts = program.opts()
|
|
219
|
+
const projectRoot = resolve(opts.project || process.cwd())
|
|
220
|
+
const whitelist = loadWhitelist(projectRoot)
|
|
221
|
+
const files = findMigrations(projectRoot, opts)
|
|
222
|
+
|
|
223
|
+
if (files.length === 0) {
|
|
224
|
+
if (!opts.json) console.log(chalk.gray('No migration files to lint.'))
|
|
225
|
+
process.exit(0)
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const allFindings = []
|
|
229
|
+
for (const file of files) {
|
|
230
|
+
allFindings.push(...lintFile(file, projectRoot, whitelist))
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// ─── Output ────────────────────────────────────────────────────────
|
|
234
|
+
if (opts.json) {
|
|
235
|
+
console.log(JSON.stringify({
|
|
236
|
+
files: files.length,
|
|
237
|
+
findings: allFindings,
|
|
238
|
+
critical: allFindings.filter(f => f.severity === 'critical').length,
|
|
239
|
+
high: allFindings.filter(f => f.severity === 'high').length,
|
|
240
|
+
medium: allFindings.filter(f => f.severity === 'medium').length,
|
|
241
|
+
passed: allFindings.filter(f => f.severity === 'critical' || f.severity === 'high').length === 0
|
|
242
|
+
}, null, 2))
|
|
243
|
+
} else {
|
|
244
|
+
const critical = allFindings.filter(f => f.severity === 'critical')
|
|
245
|
+
const high = allFindings.filter(f => f.severity === 'high')
|
|
246
|
+
const medium = allFindings.filter(f => f.severity === 'medium')
|
|
247
|
+
|
|
248
|
+
console.log('')
|
|
249
|
+
console.log(chalk.bold(' Tetra Migration Lint'))
|
|
250
|
+
console.log(chalk.gray(` ${files.length} migration files scanned`))
|
|
251
|
+
console.log('')
|
|
252
|
+
|
|
253
|
+
if (allFindings.length === 0) {
|
|
254
|
+
console.log(chalk.green(' ✅ All migrations pass security lint'))
|
|
255
|
+
console.log('')
|
|
256
|
+
process.exit(0)
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
// Group by file
|
|
260
|
+
const byFile = {}
|
|
261
|
+
for (const f of allFindings) {
|
|
262
|
+
if (!byFile[f.file]) byFile[f.file] = []
|
|
263
|
+
byFile[f.file].push(f)
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
for (const [file, findings] of Object.entries(byFile)) {
|
|
267
|
+
console.log(chalk.underline(` ${file}`))
|
|
268
|
+
for (const f of findings) {
|
|
269
|
+
const icon = f.severity === 'critical' ? '🔴' : f.severity === 'high' ? '🟠' : '🟡'
|
|
270
|
+
const color = f.severity === 'critical' ? chalk.red : f.severity === 'high' ? chalk.yellow : chalk.gray
|
|
271
|
+
console.log(` ${icon} ${color(`[${f.severity.toUpperCase()}]`)} Line ${f.line}: ${f.message}`)
|
|
272
|
+
if (opts.fixSuggestions && f.fix) {
|
|
273
|
+
console.log(chalk.cyan(` Fix: ${f.fix}`))
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
console.log('')
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
console.log(chalk.bold(' Summary:'))
|
|
280
|
+
if (critical.length) console.log(chalk.red(` ${critical.length} CRITICAL`))
|
|
281
|
+
if (high.length) console.log(chalk.yellow(` ${high.length} HIGH`))
|
|
282
|
+
if (medium.length) console.log(chalk.gray(` ${medium.length} MEDIUM`))
|
|
283
|
+
console.log('')
|
|
284
|
+
|
|
285
|
+
if (critical.length || high.length) {
|
|
286
|
+
console.log(chalk.red.bold(' ❌ BLOCKED — fix CRITICAL/HIGH issues before pushing migrations'))
|
|
287
|
+
console.log(chalk.gray(' Run with --fix-suggestions to see fix SQL'))
|
|
288
|
+
console.log('')
|
|
289
|
+
process.exit(1)
|
|
290
|
+
} else {
|
|
291
|
+
console.log(chalk.yellow(' ⚠️ Warnings found but not blocking'))
|
|
292
|
+
console.log('')
|
|
293
|
+
process.exit(0)
|
|
294
|
+
}
|
|
295
|
+
}
|
|
@@ -32,11 +32,11 @@ export const meta = {
|
|
|
32
32
|
}
|
|
33
33
|
|
|
34
34
|
const DB_PATTERNS = {
|
|
35
|
-
systemDB: { level: 'SYSTEM', pattern:
|
|
36
|
-
adminDB: { level: 'ADMIN', pattern: /adminDB\s*\(/g,
|
|
37
|
-
userDB: { level: 'USER', pattern:
|
|
38
|
-
publicDB: { level: 'PUBLIC', pattern:
|
|
39
|
-
superadminDB: { level: 'SUPERADMIN', pattern:
|
|
35
|
+
systemDB: { level: 'SYSTEM', pattern: /\bsystemDB\s*\(/g, desc: 'System-level (cron, webhooks)' },
|
|
36
|
+
adminDB: { level: 'ADMIN', pattern: /(?<!\w)adminDB\s*\(/g, desc: 'Admin operations (org-scoped)' },
|
|
37
|
+
userDB: { level: 'USER', pattern: /\buserDB\s*\(/g, desc: 'User-specific operations' },
|
|
38
|
+
publicDB: { level: 'PUBLIC', pattern: /\bpublicDB\s*\(/g, desc: 'Public/unauthenticated' },
|
|
39
|
+
superadminDB: { level: 'SUPERADMIN', pattern: /\bsuperadminDB\s*\(/g, desc: 'Cross-org superadmin' }
|
|
40
40
|
}
|
|
41
41
|
|
|
42
42
|
/**
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Route ↔ Config Alignment Check
|
|
3
|
+
*
|
|
4
|
+
* Verifies that route files match the feature config accessLevel:
|
|
5
|
+
*
|
|
6
|
+
* - accessLevel 'admin' → route file must be adminRoutes.ts AND must have authenticateToken + requireOrganizationAdmin
|
|
7
|
+
* - accessLevel 'user' → route file must be userRoutes.ts AND must have authenticateToken
|
|
8
|
+
* - accessLevel 'public' → route can be publicRoutes.ts, NO auth middleware required
|
|
9
|
+
* - accessLevel 'system' → should NOT have any route file (backend-only)
|
|
10
|
+
* - accessLevel 'creator' → route must have authenticateToken
|
|
11
|
+
*
|
|
12
|
+
* CRITICAL if an admin endpoint has no auth middleware.
|
|
13
|
+
* HIGH if route name doesn't match access level.
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import { readFileSync, existsSync } from 'fs'
|
|
17
|
+
import { join, basename, dirname } from 'path'
|
|
18
|
+
import { globSync } from 'glob'
|
|
19
|
+
|
|
20
|
+
export const meta = {
|
|
21
|
+
id: 'route-config-alignment',
|
|
22
|
+
name: 'Route ↔ Config Alignment',
|
|
23
|
+
category: 'security',
|
|
24
|
+
severity: 'critical',
|
|
25
|
+
description: 'Verifies route middleware matches feature config accessLevel'
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Find files matching a glob pattern
|
|
30
|
+
*/
|
|
31
|
+
function findFiles(projectRoot, pattern) {
|
|
32
|
+
try {
|
|
33
|
+
return globSync(pattern, { cwd: projectRoot, absolute: true, ignore: ['**/node_modules/**'] })
|
|
34
|
+
} catch {
|
|
35
|
+
return []
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Parse all feature configs to extract tableName → accessLevel + feature directory
|
|
41
|
+
*/
|
|
42
|
+
function parseFeatureConfigs(projectRoot) {
|
|
43
|
+
const configs = [] // { tableName, accessLevel, configFile, featureDir }
|
|
44
|
+
|
|
45
|
+
const configFiles = [
|
|
46
|
+
...findFiles(projectRoot, 'backend/src/features/**/config/*.config.ts'),
|
|
47
|
+
...findFiles(projectRoot, 'src/features/**/config/*.config.ts')
|
|
48
|
+
]
|
|
49
|
+
|
|
50
|
+
for (const file of configFiles) {
|
|
51
|
+
let content
|
|
52
|
+
try { content = readFileSync(file, 'utf-8') } catch { continue }
|
|
53
|
+
|
|
54
|
+
const tableMatch = content.match(/tableName:\s*['"]([^'"]+)['"]/)
|
|
55
|
+
if (!tableMatch) continue
|
|
56
|
+
|
|
57
|
+
const tableName = tableMatch[1]
|
|
58
|
+
|
|
59
|
+
const accessMatch = content.match(/accessLevel:\s*['"]([^'"]+)['"]/)
|
|
60
|
+
const accessLevel = accessMatch ? accessMatch[1] : 'admin'
|
|
61
|
+
|
|
62
|
+
// Feature directory is two levels up from config file (features/X/config/file.ts → features/X)
|
|
63
|
+
const configDir = dirname(file)
|
|
64
|
+
const featureDir = dirname(configDir)
|
|
65
|
+
|
|
66
|
+
configs.push({
|
|
67
|
+
tableName,
|
|
68
|
+
accessLevel,
|
|
69
|
+
configFile: file.replace(projectRoot + '/', ''),
|
|
70
|
+
featureDir
|
|
71
|
+
})
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return configs
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Find route files in a feature directory
|
|
79
|
+
*/
|
|
80
|
+
function findRouteFiles(featureDir) {
|
|
81
|
+
const routesDir = join(featureDir, 'routes')
|
|
82
|
+
if (!existsSync(routesDir)) return []
|
|
83
|
+
|
|
84
|
+
try {
|
|
85
|
+
return globSync('*.ts', { cwd: routesDir, absolute: true, ignore: ['*.d.ts'] })
|
|
86
|
+
} catch {
|
|
87
|
+
return []
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Check if a route file contains authenticateToken middleware
|
|
93
|
+
*/
|
|
94
|
+
function hasAuthMiddleware(content) {
|
|
95
|
+
return /authenticateToken/.test(content)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Check if a route file contains requireOrganizationAdmin middleware
|
|
100
|
+
*/
|
|
101
|
+
function hasOrgAdminMiddleware(content) {
|
|
102
|
+
return /requireOrganizationAdmin/.test(content)
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Expected route filename for a given accessLevel
|
|
107
|
+
*/
|
|
108
|
+
function expectedRouteFileName(accessLevel) {
|
|
109
|
+
switch (accessLevel) {
|
|
110
|
+
case 'admin': return 'adminRoutes.ts'
|
|
111
|
+
case 'user': return 'userRoutes.ts'
|
|
112
|
+
case 'public': return 'publicRoutes.ts'
|
|
113
|
+
default: return null
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
export async function run(config, projectRoot) {
|
|
118
|
+
const results = {
|
|
119
|
+
passed: true,
|
|
120
|
+
skipped: false,
|
|
121
|
+
findings: [],
|
|
122
|
+
summary: { total: 0, critical: 0, high: 0, medium: 0, low: 0 },
|
|
123
|
+
details: { routesChecked: 0, violations: 0 }
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
const featureConfigs = parseFeatureConfigs(projectRoot)
|
|
127
|
+
|
|
128
|
+
if (featureConfigs.length === 0) {
|
|
129
|
+
results.skipped = true
|
|
130
|
+
results.skipReason = 'No feature config files found'
|
|
131
|
+
return results
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
for (const cfg of featureConfigs) {
|
|
135
|
+
const routeFiles = findRouteFiles(cfg.featureDir)
|
|
136
|
+
|
|
137
|
+
// --- system: should NOT have any route file ---
|
|
138
|
+
if (cfg.accessLevel === 'system') {
|
|
139
|
+
if (routeFiles.length > 0) {
|
|
140
|
+
const routeNames = routeFiles.map(f => basename(f)).join(', ')
|
|
141
|
+
results.findings.push({
|
|
142
|
+
file: cfg.configFile,
|
|
143
|
+
line: 1,
|
|
144
|
+
type: 'system-has-routes',
|
|
145
|
+
severity: 'high',
|
|
146
|
+
message: `Config declares accessLevel "system" for table "${cfg.tableName}" but feature has route files: ${routeNames}. System features should be backend-only with no HTTP routes.`,
|
|
147
|
+
fix: `Remove route files or change accessLevel in the config.`
|
|
148
|
+
})
|
|
149
|
+
results.summary.high++
|
|
150
|
+
results.summary.total++
|
|
151
|
+
results.passed = false
|
|
152
|
+
results.details.violations++
|
|
153
|
+
}
|
|
154
|
+
continue
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Skip features with no routes (may be intentional for some configs)
|
|
158
|
+
if (routeFiles.length === 0) continue
|
|
159
|
+
|
|
160
|
+
// Check each route file in this feature
|
|
161
|
+
for (const routeFile of routeFiles) {
|
|
162
|
+
results.details.routesChecked++
|
|
163
|
+
|
|
164
|
+
let content
|
|
165
|
+
try { content = readFileSync(routeFile, 'utf-8') } catch { continue }
|
|
166
|
+
|
|
167
|
+
const routeName = basename(routeFile)
|
|
168
|
+
const relRouteFile = routeFile.replace(projectRoot + '/', '')
|
|
169
|
+
|
|
170
|
+
// --- admin checks ---
|
|
171
|
+
if (cfg.accessLevel === 'admin') {
|
|
172
|
+
// Route name should be adminRoutes.ts
|
|
173
|
+
if (routeName === 'adminRoutes.ts') {
|
|
174
|
+
// CRITICAL: admin route MUST have authenticateToken
|
|
175
|
+
if (!hasAuthMiddleware(content)) {
|
|
176
|
+
results.findings.push({
|
|
177
|
+
file: relRouteFile,
|
|
178
|
+
line: 1,
|
|
179
|
+
type: 'admin-route-no-auth',
|
|
180
|
+
severity: 'critical',
|
|
181
|
+
message: `Admin route for table "${cfg.tableName}" is missing authenticateToken middleware. Endpoints are accessible without authentication.`,
|
|
182
|
+
fix: `Add authenticateToken middleware: router.use(authenticateToken, requireOrganizationAdmin)`
|
|
183
|
+
})
|
|
184
|
+
results.summary.critical++
|
|
185
|
+
results.summary.total++
|
|
186
|
+
results.passed = false
|
|
187
|
+
results.details.violations++
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// CRITICAL: admin route MUST have requireOrganizationAdmin
|
|
191
|
+
if (!hasOrgAdminMiddleware(content)) {
|
|
192
|
+
results.findings.push({
|
|
193
|
+
file: relRouteFile,
|
|
194
|
+
line: 1,
|
|
195
|
+
type: 'admin-route-no-org-admin',
|
|
196
|
+
severity: 'critical',
|
|
197
|
+
message: `Admin route for table "${cfg.tableName}" is missing requireOrganizationAdmin middleware. Any authenticated user can access admin endpoints.`,
|
|
198
|
+
fix: `Add requireOrganizationAdmin middleware: router.use(authenticateToken, requireOrganizationAdmin)`
|
|
199
|
+
})
|
|
200
|
+
results.summary.critical++
|
|
201
|
+
results.summary.total++
|
|
202
|
+
results.passed = false
|
|
203
|
+
results.details.violations++
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// --- user checks ---
|
|
209
|
+
if (cfg.accessLevel === 'user') {
|
|
210
|
+
if (routeName === 'userRoutes.ts') {
|
|
211
|
+
if (!hasAuthMiddleware(content)) {
|
|
212
|
+
results.findings.push({
|
|
213
|
+
file: relRouteFile,
|
|
214
|
+
line: 1,
|
|
215
|
+
type: 'user-route-no-auth',
|
|
216
|
+
severity: 'critical',
|
|
217
|
+
message: `User route for table "${cfg.tableName}" is missing authenticateToken middleware. Endpoints are accessible without authentication.`,
|
|
218
|
+
fix: `Add authenticateToken middleware: router.use(authenticateToken)`
|
|
219
|
+
})
|
|
220
|
+
results.summary.critical++
|
|
221
|
+
results.summary.total++
|
|
222
|
+
results.passed = false
|
|
223
|
+
results.details.violations++
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// HIGH: user-level feature shouldn't primarily use adminRoutes
|
|
228
|
+
if (routeName === 'adminRoutes.ts') {
|
|
229
|
+
results.findings.push({
|
|
230
|
+
file: relRouteFile,
|
|
231
|
+
line: 1,
|
|
232
|
+
type: 'user-feature-admin-route',
|
|
233
|
+
severity: 'high',
|
|
234
|
+
message: `Config declares accessLevel "user" for table "${cfg.tableName}" but has adminRoutes.ts. Route file name does not match access level.`,
|
|
235
|
+
fix: `Rename to userRoutes.ts or update config accessLevel to "admin".`
|
|
236
|
+
})
|
|
237
|
+
results.summary.high++
|
|
238
|
+
results.summary.total++
|
|
239
|
+
results.passed = false
|
|
240
|
+
results.details.violations++
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// --- creator checks ---
|
|
245
|
+
if (cfg.accessLevel === 'creator') {
|
|
246
|
+
// Creator routes must have authenticateToken
|
|
247
|
+
if (!hasAuthMiddleware(content) && routeName !== 'publicRoutes.ts') {
|
|
248
|
+
results.findings.push({
|
|
249
|
+
file: relRouteFile,
|
|
250
|
+
line: 1,
|
|
251
|
+
type: 'creator-route-no-auth',
|
|
252
|
+
severity: 'critical',
|
|
253
|
+
message: `Creator route "${routeName}" for table "${cfg.tableName}" is missing authenticateToken middleware. Endpoints are accessible without authentication.`,
|
|
254
|
+
fix: `Add authenticateToken middleware: router.use(authenticateToken)`
|
|
255
|
+
})
|
|
256
|
+
results.summary.critical++
|
|
257
|
+
results.summary.total++
|
|
258
|
+
results.passed = false
|
|
259
|
+
results.details.violations++
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// --- public checks: no auth required, just verify naming ---
|
|
264
|
+
// public routes are fine without auth middleware, no action needed
|
|
265
|
+
|
|
266
|
+
// --- Cross-access-level route name mismatch ---
|
|
267
|
+
// Skip if route file has explicit @tetra-audit-ignore for this check
|
|
268
|
+
const hasIgnoreDirective = /@tetra-audit-ignore\s+route-config-alignment\b/.test(content)
|
|
269
|
+
if (cfg.accessLevel === 'admin' && routeName === 'publicRoutes.ts' && !hasAuthMiddleware(content) && !hasIgnoreDirective) {
|
|
270
|
+
results.findings.push({
|
|
271
|
+
file: relRouteFile,
|
|
272
|
+
line: 1,
|
|
273
|
+
type: 'admin-feature-public-route-no-auth',
|
|
274
|
+
severity: 'critical',
|
|
275
|
+
message: `Config declares accessLevel "admin" for table "${cfg.tableName}" but has a publicRoutes.ts without auth. Admin data may be exposed publicly.`,
|
|
276
|
+
fix: `Either add auth middleware to publicRoutes.ts, or add @tetra-audit-ignore route-config-alignment comment if the public route is intentional.`
|
|
277
|
+
})
|
|
278
|
+
results.summary.critical++
|
|
279
|
+
results.summary.total++
|
|
280
|
+
results.passed = false
|
|
281
|
+
results.details.violations++
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// HIGH: Check if expected route file name exists for the access level
|
|
286
|
+
const expected = expectedRouteFileName(cfg.accessLevel)
|
|
287
|
+
if (expected && routeFiles.length > 0) {
|
|
288
|
+
const hasExpected = routeFiles.some(f => basename(f) === expected)
|
|
289
|
+
if (!hasExpected) {
|
|
290
|
+
const routeNames = routeFiles.map(f => basename(f)).join(', ')
|
|
291
|
+
results.findings.push({
|
|
292
|
+
file: cfg.configFile,
|
|
293
|
+
line: 1,
|
|
294
|
+
type: 'route-name-mismatch',
|
|
295
|
+
severity: 'high',
|
|
296
|
+
message: `Config declares accessLevel "${cfg.accessLevel}" for table "${cfg.tableName}" expecting ${expected} but found: ${routeNames}.`,
|
|
297
|
+
fix: `Rename the primary route file to ${expected} or update the config accessLevel.`
|
|
298
|
+
})
|
|
299
|
+
results.summary.high++
|
|
300
|
+
results.summary.total++
|
|
301
|
+
results.passed = false
|
|
302
|
+
results.details.violations++
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
return results
|
|
308
|
+
}
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* RPC Security Mode Check — HARD BLOCK
|
|
3
|
+
*
|
|
4
|
+
* Scans ALL SQL migrations for RPC functions and verifies their security mode:
|
|
5
|
+
*
|
|
6
|
+
* SECURITY DEFINER = function runs as the DB owner, BYPASSES RLS completely.
|
|
7
|
+
* SECURITY INVOKER = function runs as the calling user, RLS is enforced.
|
|
8
|
+
*
|
|
9
|
+
* Rules:
|
|
10
|
+
* - Data query RPCs (get_*, list_*, search_*) → MUST be INVOKER
|
|
11
|
+
* - Auth helper functions (auth_org_id, auth_uid) → DEFINER is OK (they need to read auth.users)
|
|
12
|
+
* - Count/results RPCs linked to feature configs → MUST be INVOKER
|
|
13
|
+
* - Public RPCs (explicitly returning only public columns) → DEFINER is OK if whitelisted
|
|
14
|
+
*
|
|
15
|
+
* Whitelist: .tetra-quality.json → supabase.securityDefinerWhitelist: ['auth_org_id', ...]
|
|
16
|
+
*
|
|
17
|
+
* Reference: stella_howto_get slug="tetra-architecture-guide"
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
import { readFileSync, existsSync } from 'fs'
|
|
21
|
+
import { join } from 'path'
|
|
22
|
+
import { globSync } from 'glob'
|
|
23
|
+
|
|
24
|
+
export const meta = {
|
|
25
|
+
id: 'rpc-security-mode',
|
|
26
|
+
name: 'RPC Security Mode',
|
|
27
|
+
category: 'security',
|
|
28
|
+
severity: 'critical',
|
|
29
|
+
description: 'Verifies all RPC functions use SECURITY INVOKER (not DEFINER) unless explicitly whitelisted. DEFINER bypasses RLS completely.'
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Functions that legitimately need SECURITY DEFINER
|
|
33
|
+
const BUILTIN_DEFINER_WHITELIST = [
|
|
34
|
+
// Auth helpers (need to read auth schema / organization_members)
|
|
35
|
+
'auth_org_id',
|
|
36
|
+
'auth_admin_organizations',
|
|
37
|
+
'auth_user_organizations',
|
|
38
|
+
'auth_creator_organizations',
|
|
39
|
+
'get_user_org_role',
|
|
40
|
+
'get_org_id',
|
|
41
|
+
'handle_new_user',
|
|
42
|
+
'moddatetime',
|
|
43
|
+
// Public RPCs (called by anon users, need DEFINER to bypass RLS and return only safe columns)
|
|
44
|
+
'search_public_ad_library',
|
|
45
|
+
// System/billing RPCs (called by systemDB, no user context)
|
|
46
|
+
'get_org_credit_limits',
|
|
47
|
+
// Supabase internal
|
|
48
|
+
'pgsodium_encrypt',
|
|
49
|
+
'pgsodium_decrypt'
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
export async function run(config, projectRoot) {
|
|
53
|
+
const results = {
|
|
54
|
+
passed: true,
|
|
55
|
+
skipped: false,
|
|
56
|
+
findings: [],
|
|
57
|
+
summary: { total: 0, critical: 0, high: 0, medium: 0, low: 0 },
|
|
58
|
+
details: { rpcsFound: 0, definerCount: 0, invokerCount: 0, defaultCount: 0, whitelistedCount: 0 }
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const migrationDirs = [
|
|
62
|
+
join(projectRoot, 'supabase/migrations'),
|
|
63
|
+
join(projectRoot, 'backend/supabase/migrations')
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
const sqlFiles = []
|
|
67
|
+
for (const dir of migrationDirs) {
|
|
68
|
+
if (!existsSync(dir)) continue
|
|
69
|
+
try {
|
|
70
|
+
const files = globSync('*.sql', { cwd: dir, absolute: true })
|
|
71
|
+
sqlFiles.push(...files)
|
|
72
|
+
} catch { /* skip */ }
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (sqlFiles.length === 0) {
|
|
76
|
+
results.skipped = true
|
|
77
|
+
results.skipReason = 'No SQL migration files found'
|
|
78
|
+
return results
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Build whitelist from config + builtins
|
|
82
|
+
const userWhitelist = (config.supabase?.securityDefinerWhitelist || [])
|
|
83
|
+
const whitelist = new Set([...BUILTIN_DEFINER_WHITELIST, ...userWhitelist])
|
|
84
|
+
|
|
85
|
+
// Track latest definition per function (migrations can override)
|
|
86
|
+
const functions = new Map() // funcName → { securityMode, file, line, isDataQuery }
|
|
87
|
+
|
|
88
|
+
for (const file of sqlFiles) {
|
|
89
|
+
let content
|
|
90
|
+
try { content = readFileSync(file, 'utf-8') } catch { continue }
|
|
91
|
+
|
|
92
|
+
const relFile = file.replace(projectRoot + '/', '')
|
|
93
|
+
|
|
94
|
+
// Find all CREATE [OR REPLACE] FUNCTION statements
|
|
95
|
+
const funcRegex = /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?(\w+)\s*\(/gi
|
|
96
|
+
let match
|
|
97
|
+
|
|
98
|
+
while ((match = funcRegex.exec(content)) !== null) {
|
|
99
|
+
const funcName = match[1]
|
|
100
|
+
const startPos = match.index
|
|
101
|
+
|
|
102
|
+
// Extract the function body (up to next CREATE FUNCTION or end of file, max 5000 chars)
|
|
103
|
+
const bodyEnd = Math.min(startPos + 5000, content.length)
|
|
104
|
+
const funcBody = content.substring(startPos, bodyEnd)
|
|
105
|
+
|
|
106
|
+
// Determine security mode
|
|
107
|
+
let securityMode = 'DEFAULT' // PostgreSQL default is INVOKER
|
|
108
|
+
if (/SECURITY\s+DEFINER/i.test(funcBody.substring(0, 2000))) {
|
|
109
|
+
securityMode = 'DEFINER'
|
|
110
|
+
} else if (/SECURITY\s+INVOKER/i.test(funcBody.substring(0, 2000))) {
|
|
111
|
+
securityMode = 'INVOKER'
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Determine if this is a data query function
|
|
115
|
+
const isDataQuery = /^(get_|list_|search_|find_|fetch_|count_)/i.test(funcName) ||
|
|
116
|
+
/_counts$|_results$|_detail$/i.test(funcName)
|
|
117
|
+
|
|
118
|
+
// Calculate line number
|
|
119
|
+
const beforeMatch = content.substring(0, startPos)
|
|
120
|
+
const line = (beforeMatch.match(/\n/g) || []).length + 1
|
|
121
|
+
|
|
122
|
+
// Store (later definitions override earlier ones)
|
|
123
|
+
functions.set(funcName, { securityMode, file: relFile, line, isDataQuery, funcBody: funcBody.substring(0, 500) })
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
results.details.rpcsFound = functions.size
|
|
128
|
+
|
|
129
|
+
for (const [funcName, info] of functions) {
|
|
130
|
+
if (info.securityMode === 'DEFINER') {
|
|
131
|
+
results.details.definerCount++
|
|
132
|
+
|
|
133
|
+
// Check whitelist
|
|
134
|
+
if (whitelist.has(funcName)) {
|
|
135
|
+
results.details.whitelistedCount++
|
|
136
|
+
continue
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Data query RPCs with DEFINER = CRITICAL
|
|
140
|
+
if (info.isDataQuery) {
|
|
141
|
+
results.passed = false
|
|
142
|
+
results.findings.push({
|
|
143
|
+
file: info.file,
|
|
144
|
+
line: info.line,
|
|
145
|
+
type: 'data-rpc-security-definer',
|
|
146
|
+
severity: 'critical',
|
|
147
|
+
message: `Data RPC "${funcName}" uses SECURITY DEFINER — bypasses ALL RLS policies. Any authenticated user can see ALL data from ALL organizations.`,
|
|
148
|
+
fix: `Change to SECURITY INVOKER or remove the SECURITY DEFINER clause. If this function legitimately needs DEFINER, add "${funcName}" to supabase.securityDefinerWhitelist in .tetra-quality.json.`
|
|
149
|
+
})
|
|
150
|
+
results.summary.critical++
|
|
151
|
+
results.summary.total++
|
|
152
|
+
} else {
|
|
153
|
+
// Non-data RPCs with DEFINER = HIGH (should still be investigated)
|
|
154
|
+
results.findings.push({
|
|
155
|
+
file: info.file,
|
|
156
|
+
line: info.line,
|
|
157
|
+
type: 'non-data-rpc-security-definer',
|
|
158
|
+
severity: 'high',
|
|
159
|
+
message: `RPC "${funcName}" uses SECURITY DEFINER but is not whitelisted. DEFINER functions bypass RLS — ensure this is intentional.`,
|
|
160
|
+
fix: `Change to SECURITY INVOKER, or add "${funcName}" to supabase.securityDefinerWhitelist in .tetra-quality.json if DEFINER is intentional.`
|
|
161
|
+
})
|
|
162
|
+
results.summary.high++
|
|
163
|
+
results.summary.total++
|
|
164
|
+
results.passed = false
|
|
165
|
+
}
|
|
166
|
+
} else if (info.securityMode === 'INVOKER') {
|
|
167
|
+
results.details.invokerCount++
|
|
168
|
+
} else {
|
|
169
|
+
results.details.defaultCount++
|
|
170
|
+
// DEFAULT = INVOKER in PostgreSQL, which is correct
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return results
|
|
175
|
+
}
|
package/lib/runner.js
CHANGED
|
@@ -15,6 +15,7 @@ import * as frontendSupabaseQueries from './checks/security/frontend-supabase-qu
|
|
|
15
15
|
import * as tetraCoreCompliance from './checks/security/tetra-core-compliance.js'
|
|
16
16
|
import * as mixedDbUsage from './checks/security/mixed-db-usage.js'
|
|
17
17
|
import * as configRlsAlignment from './checks/security/config-rls-alignment.js'
|
|
18
|
+
import * as rpcSecurityMode from './checks/security/rpc-security-mode.js'
|
|
18
19
|
import * as systemdbWhitelist from './checks/security/systemdb-whitelist.js'
|
|
19
20
|
import * as huskyHooks from './checks/stability/husky-hooks.js'
|
|
20
21
|
import * as ciPipeline from './checks/stability/ci-pipeline.js'
|
|
@@ -24,6 +25,7 @@ import * as fileSize from './checks/codeQuality/file-size.js'
|
|
|
24
25
|
import * as namingConventions from './checks/codeQuality/naming-conventions.js'
|
|
25
26
|
import * as routeSeparation from './checks/codeQuality/route-separation.js'
|
|
26
27
|
import * as gitignoreValidation from './checks/security/gitignore-validation.js'
|
|
28
|
+
import * as routeConfigAlignment from './checks/security/route-config-alignment.js'
|
|
27
29
|
import * as rlsPolicyAudit from './checks/supabase/rls-policy-audit.js'
|
|
28
30
|
import * as rpcParamMismatch from './checks/supabase/rpc-param-mismatch.js'
|
|
29
31
|
import * as rpcGeneratorOrigin from './checks/supabase/rpc-generator-origin.js'
|
|
@@ -41,8 +43,10 @@ const ALL_CHECKS = {
|
|
|
41
43
|
tetraCoreCompliance,
|
|
42
44
|
mixedDbUsage,
|
|
43
45
|
configRlsAlignment,
|
|
46
|
+
rpcSecurityMode,
|
|
44
47
|
systemdbWhitelist,
|
|
45
|
-
gitignoreValidation
|
|
48
|
+
gitignoreValidation,
|
|
49
|
+
routeConfigAlignment
|
|
46
50
|
],
|
|
47
51
|
stability: [
|
|
48
52
|
huskyHooks,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soulbatical/tetra-dev-toolkit",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.15.0",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "restricted"
|
|
6
6
|
},
|
|
@@ -29,7 +29,9 @@
|
|
|
29
29
|
"tetra-init": "./bin/tetra-init.js",
|
|
30
30
|
"tetra-setup": "./bin/tetra-setup.js",
|
|
31
31
|
"tetra-dev-token": "./bin/tetra-dev-token.js",
|
|
32
|
-
"tetra-check-rls": "./bin/tetra-check-rls.js"
|
|
32
|
+
"tetra-check-rls": "./bin/tetra-check-rls.js",
|
|
33
|
+
"tetra-migration-lint": "./bin/tetra-migration-lint.js",
|
|
34
|
+
"tetra-db-push": "./bin/tetra-db-push.js"
|
|
33
35
|
},
|
|
34
36
|
"files": [
|
|
35
37
|
"bin/",
|