@soulbatical/tetra-dev-toolkit 1.17.1 → 1.17.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Tetra Check Peers — Validate peer dependency compatibility across consumer projects
|
|
5
|
+
*
|
|
6
|
+
* Scans all known consumer projects and checks if their installed versions
|
|
7
|
+
* are compatible with tetra packages' peerDependencies.
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* tetra-check-peers # Check all consumers
|
|
11
|
+
* tetra-check-peers --fix # Show npm commands to fix mismatches
|
|
12
|
+
* tetra-check-peers --strict # Fail on any mismatch (for CI/prepublish)
|
|
13
|
+
* tetra-check-peers --json # JSON output
|
|
14
|
+
*
|
|
15
|
+
* Add to prepublishOnly to catch breaking peer dep changes before publish.
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
import { readFileSync, existsSync } from 'fs'
|
|
19
|
+
import { join, basename, dirname } from 'path'
|
|
20
|
+
import { execSync } from 'child_process'
|
|
21
|
+
|
|
22
|
+
// ─── Config ──────────────────────────────────────────────
|
|
23
|
+
|
|
24
|
+
const PROJECTS_ROOT = join(process.env.HOME || '~', 'projecten')
|
|
25
|
+
const TETRA_ROOT = join(PROJECTS_ROOT, 'tetra', 'packages')
|
|
26
|
+
|
|
27
|
+
// Tetra packages that have peerDependencies
|
|
28
|
+
const TETRA_PACKAGES = ['core', 'ui', 'dev-toolkit', 'schemas']
|
|
29
|
+
|
|
30
|
+
// ─── Helpers ─────────────────────────────────────────────
|
|
31
|
+
|
|
32
|
+
function readJson(path) {
|
|
33
|
+
try {
|
|
34
|
+
return JSON.parse(readFileSync(path, 'utf-8'))
|
|
35
|
+
} catch {
|
|
36
|
+
return null
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function satisfiesRange(installed, range) {
|
|
41
|
+
// Simple semver range check without external deps
|
|
42
|
+
// Handles: exact "2.93.3", caret "^2.93.3", tilde "~2.93.3", star "*", >= ">= 8.0.0"
|
|
43
|
+
if (!installed || !range) return false
|
|
44
|
+
if (range === '*') return true
|
|
45
|
+
|
|
46
|
+
// Clean versions: remove leading ^ ~ >= <= > < =
|
|
47
|
+
const cleanVersion = (v) => v.replace(/^[\^~>=<\s]+/, '').trim()
|
|
48
|
+
const parseVersion = (v) => {
|
|
49
|
+
const cleaned = cleanVersion(v)
|
|
50
|
+
const parts = cleaned.split('.').map(Number)
|
|
51
|
+
return { major: parts[0] || 0, minor: parts[1] || 0, patch: parts[2] || 0 }
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const inst = parseVersion(installed)
|
|
55
|
+
const req = parseVersion(range)
|
|
56
|
+
|
|
57
|
+
if (range.startsWith('>=')) {
|
|
58
|
+
// >= check
|
|
59
|
+
if (inst.major > req.major) return true
|
|
60
|
+
if (inst.major === req.major && inst.minor > req.minor) return true
|
|
61
|
+
if (inst.major === req.major && inst.minor === req.minor && inst.patch >= req.patch) return true
|
|
62
|
+
return false
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (range.startsWith('^') || range.includes('||')) {
|
|
66
|
+
// Caret: same major, >= minor.patch
|
|
67
|
+
// For ranges with || (e.g. "^18.0.0 || ^19.0.0"), check each part
|
|
68
|
+
const parts = range.split('||').map(p => p.trim())
|
|
69
|
+
return parts.some(part => {
|
|
70
|
+
const r = parseVersion(part)
|
|
71
|
+
if (inst.major !== r.major) return false
|
|
72
|
+
if (inst.minor > r.minor) return true
|
|
73
|
+
if (inst.minor === r.minor && inst.patch >= r.patch) return true
|
|
74
|
+
return false
|
|
75
|
+
})
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if (range.startsWith('~')) {
|
|
79
|
+
// Tilde: same major.minor, >= patch
|
|
80
|
+
if (inst.major !== req.major || inst.minor !== req.minor) return false
|
|
81
|
+
return inst.patch >= req.patch
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Exact version match
|
|
85
|
+
return inst.major === req.major && inst.minor === req.minor && inst.patch === req.patch
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// ─── Discovery ───────────────────────────────────────────
|
|
89
|
+
|
|
90
|
+
function discoverTetraPeerDeps() {
|
|
91
|
+
const result = {}
|
|
92
|
+
|
|
93
|
+
for (const pkg of TETRA_PACKAGES) {
|
|
94
|
+
const pkgJson = readJson(join(TETRA_ROOT, pkg, 'package.json'))
|
|
95
|
+
if (!pkgJson?.peerDependencies) continue
|
|
96
|
+
|
|
97
|
+
result[pkgJson.name] = {
|
|
98
|
+
version: pkgJson.version,
|
|
99
|
+
peerDependencies: pkgJson.peerDependencies,
|
|
100
|
+
peerDependenciesMeta: pkgJson.peerDependenciesMeta || {}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return result
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
function discoverConsumers() {
|
|
108
|
+
const consumers = []
|
|
109
|
+
|
|
110
|
+
try {
|
|
111
|
+
const dirs = execSync(`ls -d ${PROJECTS_ROOT}/*/`, { encoding: 'utf-8' })
|
|
112
|
+
.trim().split('\n').filter(Boolean)
|
|
113
|
+
|
|
114
|
+
for (const dir of dirs) {
|
|
115
|
+
const projectName = basename(dir.replace(/\/$/, ''))
|
|
116
|
+
if (projectName === 'tetra' || projectName.startsWith('.') || projectName.startsWith('_')) continue
|
|
117
|
+
|
|
118
|
+
// Check root, backend/, frontend/ package.json
|
|
119
|
+
const locations = [
|
|
120
|
+
{ path: join(dir, 'package.json'), label: projectName },
|
|
121
|
+
{ path: join(dir, 'backend', 'package.json'), label: `${projectName}/backend` },
|
|
122
|
+
{ path: join(dir, 'frontend', 'package.json'), label: `${projectName}/frontend` },
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
for (const loc of locations) {
|
|
126
|
+
const pkg = readJson(loc.path)
|
|
127
|
+
if (!pkg) continue
|
|
128
|
+
|
|
129
|
+
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies }
|
|
130
|
+
|
|
131
|
+
// Check if this package.json uses any tetra package
|
|
132
|
+
const usesTetra = Object.keys(allDeps).some(d => d.startsWith('@soulbatical/tetra-'))
|
|
133
|
+
if (!usesTetra) continue
|
|
134
|
+
|
|
135
|
+
consumers.push({
|
|
136
|
+
label: loc.label,
|
|
137
|
+
path: loc.path,
|
|
138
|
+
dependencies: allDeps,
|
|
139
|
+
tetraDeps: Object.fromEntries(
|
|
140
|
+
Object.entries(allDeps).filter(([k]) => k.startsWith('@soulbatical/tetra-'))
|
|
141
|
+
)
|
|
142
|
+
})
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
} catch {
|
|
146
|
+
// ignore discovery errors
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return consumers
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// ─── Check ───────────────────────────────────────────────
|
|
153
|
+
|
|
154
|
+
function checkCompatibility(tetraPeers, consumers) {
|
|
155
|
+
const issues = []
|
|
156
|
+
|
|
157
|
+
for (const consumer of consumers) {
|
|
158
|
+
for (const [tetraPkg, tetraInfo] of Object.entries(tetraPeers)) {
|
|
159
|
+
// Does this consumer use this tetra package?
|
|
160
|
+
if (!consumer.tetraDeps[tetraPkg]) continue
|
|
161
|
+
|
|
162
|
+
// Check each peer dependency
|
|
163
|
+
for (const [peerDep, requiredRange] of Object.entries(tetraInfo.peerDependencies)) {
|
|
164
|
+
const isOptional = tetraInfo.peerDependenciesMeta[peerDep]?.optional
|
|
165
|
+
const installedVersion = consumer.dependencies[peerDep]
|
|
166
|
+
|
|
167
|
+
if (!installedVersion) {
|
|
168
|
+
if (!isOptional) {
|
|
169
|
+
issues.push({
|
|
170
|
+
consumer: consumer.label,
|
|
171
|
+
tetraPackage: tetraPkg,
|
|
172
|
+
dependency: peerDep,
|
|
173
|
+
required: requiredRange,
|
|
174
|
+
installed: 'MISSING',
|
|
175
|
+
severity: 'error',
|
|
176
|
+
fix: `npm install ${peerDep}@"${requiredRange}"`
|
|
177
|
+
})
|
|
178
|
+
}
|
|
179
|
+
continue
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Extract version from range (consumer might have "^2.93.3")
|
|
183
|
+
const cleanInstalled = installedVersion.replace(/^[\^~>=<\s]+/, '')
|
|
184
|
+
|
|
185
|
+
if (!satisfiesRange(cleanInstalled, requiredRange)) {
|
|
186
|
+
// Check if it's an exact pin vs range issue
|
|
187
|
+
const isExactPin = !requiredRange.startsWith('^') && !requiredRange.startsWith('~') && !requiredRange.startsWith('>')
|
|
188
|
+
const severity = isExactPin ? 'warning' : 'error'
|
|
189
|
+
|
|
190
|
+
issues.push({
|
|
191
|
+
consumer: consumer.label,
|
|
192
|
+
tetraPackage: tetraPkg,
|
|
193
|
+
dependency: peerDep,
|
|
194
|
+
required: requiredRange,
|
|
195
|
+
installed: installedVersion,
|
|
196
|
+
severity,
|
|
197
|
+
isExactPin,
|
|
198
|
+
fix: `npm install ${peerDep}@"${requiredRange}"`,
|
|
199
|
+
suggestion: isExactPin
|
|
200
|
+
? `Consider using "^${requiredRange}" in ${tetraPkg} peerDependencies for flexibility`
|
|
201
|
+
: null
|
|
202
|
+
})
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
return issues
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// ─── Output ──────────────────────────────────────────────
|
|
212
|
+
|
|
213
|
+
function formatTerminal(issues, consumers, tetraPeers, options) {
|
|
214
|
+
const lines = []
|
|
215
|
+
|
|
216
|
+
lines.push('')
|
|
217
|
+
lines.push('═══════════════════════════════════════════════════════════════')
|
|
218
|
+
lines.push(' 🔗 Tetra Check Peers — Peer Dependency Compatibility')
|
|
219
|
+
lines.push('═══════════════════════════════════════════════════════════════')
|
|
220
|
+
lines.push('')
|
|
221
|
+
|
|
222
|
+
// Summary
|
|
223
|
+
const tetraPackages = Object.entries(tetraPeers)
|
|
224
|
+
.map(([name, info]) => `${name}@${info.version}`)
|
|
225
|
+
.join(', ')
|
|
226
|
+
lines.push(` Tetra packages: ${tetraPackages}`)
|
|
227
|
+
lines.push(` Consumers found: ${consumers.length}`)
|
|
228
|
+
lines.push(` Issues found: ${issues.length}`)
|
|
229
|
+
lines.push('')
|
|
230
|
+
|
|
231
|
+
if (issues.length === 0) {
|
|
232
|
+
lines.push(' ✅ All consumer projects are compatible with current peer dependencies')
|
|
233
|
+
lines.push('')
|
|
234
|
+
lines.push('═══════════════════════════════════════════════════════════════')
|
|
235
|
+
return lines.join('\n')
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// Group by consumer
|
|
239
|
+
const byConsumer = {}
|
|
240
|
+
for (const issue of issues) {
|
|
241
|
+
if (!byConsumer[issue.consumer]) byConsumer[issue.consumer] = []
|
|
242
|
+
byConsumer[issue.consumer].push(issue)
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
for (const [consumer, consumerIssues] of Object.entries(byConsumer)) {
|
|
246
|
+
lines.push(` 📦 ${consumer}`)
|
|
247
|
+
for (const issue of consumerIssues) {
|
|
248
|
+
const icon = issue.severity === 'error' ? '❌' : '⚠️'
|
|
249
|
+
lines.push(` ${icon} ${issue.dependency}: installed ${issue.installed}, needs ${issue.required}`)
|
|
250
|
+
if (issue.suggestion) {
|
|
251
|
+
lines.push(` 💡 ${issue.suggestion}`)
|
|
252
|
+
}
|
|
253
|
+
if (options.fix) {
|
|
254
|
+
lines.push(` → ${issue.fix}`)
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
lines.push('')
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Exact pin warnings
|
|
261
|
+
const exactPins = issues.filter(i => i.isExactPin)
|
|
262
|
+
if (exactPins.length > 0) {
|
|
263
|
+
const uniquePins = [...new Set(exactPins.map(i => `${i.dependency} (${i.required} in ${i.tetraPackage})`))]
|
|
264
|
+
lines.push(' 💡 EXACT VERSION PINS detected — these cause most compatibility issues:')
|
|
265
|
+
for (const pin of uniquePins) {
|
|
266
|
+
lines.push(` → ${pin}`)
|
|
267
|
+
}
|
|
268
|
+
lines.push(' Consider using "^x.y.z" ranges instead of exact versions in peerDependencies')
|
|
269
|
+
lines.push('')
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
lines.push('═══════════════════════════════════════════════════════════════')
|
|
273
|
+
return lines.join('\n')
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// ─── Main ────────────────────────────────────────────────
|
|
277
|
+
|
|
278
|
+
const args = process.argv.slice(2)
|
|
279
|
+
const options = {
|
|
280
|
+
fix: args.includes('--fix'),
|
|
281
|
+
strict: args.includes('--strict'),
|
|
282
|
+
json: args.includes('--json'),
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
const tetraPeers = discoverTetraPeerDeps()
|
|
286
|
+
const consumers = discoverConsumers()
|
|
287
|
+
const issues = checkCompatibility(tetraPeers, consumers)
|
|
288
|
+
|
|
289
|
+
if (options.json) {
|
|
290
|
+
console.log(JSON.stringify({ tetraPeers, consumers: consumers.map(c => c.label), issues }, null, 2))
|
|
291
|
+
} else {
|
|
292
|
+
console.log(formatTerminal(issues, consumers, tetraPeers, options))
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// Exit code
|
|
296
|
+
const errors = issues.filter(i => i.severity === 'error')
|
|
297
|
+
if (options.strict && errors.length > 0) {
|
|
298
|
+
process.exit(1)
|
|
299
|
+
}
|
|
@@ -281,32 +281,98 @@ function isWideOpen(using) {
|
|
|
281
281
|
}
|
|
282
282
|
|
|
283
283
|
/**
|
|
284
|
-
*
|
|
285
|
-
* Valid patterns (from sparkbuddy-live):
|
|
286
|
-
* - organization_id IN (SELECT auth_admin_organizations())
|
|
287
|
-
* - user_id = auth.uid()
|
|
288
|
-
* - USING(true) for public tables only
|
|
289
|
-
* - Subquery to parent table with org/user check
|
|
284
|
+
* Allowed RLS policy patterns — whitelist approach.
|
|
290
285
|
*
|
|
291
|
-
*
|
|
286
|
+
* Derived from sparkbuddy-live production DB (562 policies analyzed).
|
|
287
|
+
* These are the ONLY structural patterns allowed in USING/WITH CHECK clauses.
|
|
288
|
+
* Everything else is rejected. To add a new pattern: add it here with justification.
|
|
289
|
+
*
|
|
290
|
+
* Categories:
|
|
291
|
+
* 1. Org isolation: auth_admin_organizations(), auth_user_organizations(), auth_org_id()
|
|
292
|
+
* 2. User isolation: auth.uid(), auth_current_user_id()
|
|
293
|
+
* 3. Role gates: auth.role() = 'authenticated' or 'anon' (NOT 'service_role')
|
|
294
|
+
* 4. Data filters: column = literal, IS NULL, boolean checks
|
|
295
|
+
* 5. Parent checks: IN (SELECT ...), EXISTS (SELECT ...)
|
|
296
|
+
* 6. Open access: true, false
|
|
297
|
+
* 7. Legacy: auth.jwt() -> 'app_metadata', current_setting('app.*')
|
|
292
298
|
*/
|
|
293
|
-
const
|
|
294
|
-
|
|
295
|
-
{ pattern: /
|
|
296
|
-
{ pattern: /
|
|
297
|
-
{ pattern: /
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
{ pattern: /
|
|
299
|
+
const ALLOWED_RLS_PATTERNS = [
|
|
300
|
+
// 1. Org isolation helper functions
|
|
301
|
+
{ pattern: /auth_admin_organizations\s*\(\)/i, label: 'org-admin isolation' },
|
|
302
|
+
{ pattern: /auth_user_organizations\s*\(\)/i, label: 'org-user isolation' },
|
|
303
|
+
{ pattern: /auth_org_id\s*\(\)/i, label: 'org isolation' },
|
|
304
|
+
|
|
305
|
+
// 2. User isolation
|
|
306
|
+
{ pattern: /auth\.uid\s*\(\)/i, label: 'user isolation' },
|
|
307
|
+
{ pattern: /auth_current_user_id\s*\(\)/i, label: 'user isolation' },
|
|
308
|
+
|
|
309
|
+
// 3. Role gates (ONLY authenticated and anon — never service_role)
|
|
310
|
+
{ pattern: /auth\.role\s*\(\)\s*=\s*'authenticated'/i, label: 'authenticated role gate' },
|
|
311
|
+
{ pattern: /auth\.role\s*\(\)\s*=\s*'anon'/i, label: 'anon role gate' },
|
|
312
|
+
|
|
313
|
+
// 4. Column comparisons and data filters (any column = any value is fine)
|
|
314
|
+
// This is inherently safe — it filters data, doesn't bypass auth
|
|
315
|
+
{ pattern: /\w+\s*=\s*/i, label: 'column comparison' },
|
|
316
|
+
{ pattern: /\w+\s+IS\s+(NOT\s+)?NULL/i, label: 'null check' },
|
|
317
|
+
{ pattern: /\w+\s+IN\s*\(/i, label: 'IN check' },
|
|
318
|
+
{ pattern: /\w+\s*=\s*ANY\s*\(/i, label: 'ANY check' },
|
|
319
|
+
|
|
320
|
+
// 5. Parent table checks
|
|
321
|
+
{ pattern: /EXISTS\s*\(\s*SELECT/i, label: 'exists subquery' },
|
|
322
|
+
|
|
323
|
+
// 6. Open access
|
|
324
|
+
{ pattern: /^\s*true\s*$/i, label: 'public access' },
|
|
325
|
+
{ pattern: /^\s*\(true\)\s*$/i, label: 'public access' },
|
|
326
|
+
{ pattern: /^\s*false\s*$/i, label: 'deny all' },
|
|
327
|
+
|
|
328
|
+
// 7. Legacy JWT and app context
|
|
329
|
+
{ pattern: /auth\.jwt\s*\(\)/i, label: 'legacy JWT' },
|
|
330
|
+
{ pattern: /current_setting\s*\(\s*'app\./i, label: 'app context' },
|
|
331
|
+
|
|
332
|
+
// 8. Custom helper functions (e.g. is_org_member(), is_product_publicly_accessible())
|
|
333
|
+
// These are project-specific SECURITY DEFINER helpers — safe as long as
|
|
334
|
+
// the function itself is audited (which is done by the RPC Security Mode check)
|
|
335
|
+
{ pattern: /\b\w+\s*\([^)]*\)/i, label: 'function call' },
|
|
301
336
|
]
|
|
302
337
|
|
|
303
338
|
/**
|
|
304
|
-
*
|
|
305
|
-
*
|
|
339
|
+
* Patterns BANNED from RLS policies — these bypass tenant isolation.
|
|
340
|
+
* Service role already bypasses RLS at the Supabase layer automatically.
|
|
341
|
+
* Adding these to policies creates a false sense of security and opens
|
|
342
|
+
* cross-tenant data leakage vectors.
|
|
343
|
+
*
|
|
344
|
+
* Derived from sparkbuddy-live analysis: 2 policies with auth.role()='service_role'
|
|
345
|
+
* were identified as tech debt (redirects, translations) — not a pattern to follow.
|
|
306
346
|
*/
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
347
|
+
const BANNED_RLS_PATTERNS = [
|
|
348
|
+
{ pattern: /service_role/i, label: 'service_role bypass — service role already bypasses RLS at the Supabase layer' },
|
|
349
|
+
{ pattern: /auth\.role\s*\(\)\s*=\s*'service_role'/i, label: 'auth.role() service_role bypass — service role already bypasses RLS automatically' },
|
|
350
|
+
{ pattern: /current_setting\s*\(\s*'role'/i, label: 'PostgreSQL role check — bypasses tenant isolation' },
|
|
351
|
+
{ pattern: /current_setting\s*\(\s*'request\.jwt\.claims'/i, label: 'JWT claims role check — bypasses tenant isolation' },
|
|
352
|
+
{ pattern: /session_user/i, label: 'session_user check — bypasses tenant isolation' },
|
|
353
|
+
{ pattern: /current_user\s*=/i, label: 'current_user check — bypasses tenant isolation' },
|
|
354
|
+
{ pattern: /pg_has_role/i, label: 'pg_has_role — bypasses tenant isolation' },
|
|
355
|
+
]
|
|
356
|
+
|
|
357
|
+
/**
|
|
358
|
+
* Validate an RLS clause against the whitelist.
|
|
359
|
+
* Returns null if valid, or a description string if banned/unrecognized.
|
|
360
|
+
*/
|
|
361
|
+
function validateRlsClause(clause) {
|
|
362
|
+
if (!clause || !clause.trim()) return null
|
|
363
|
+
|
|
364
|
+
// First: check for explicitly banned patterns (these are always wrong)
|
|
365
|
+
for (const { pattern, label } of BANNED_RLS_PATTERNS) {
|
|
366
|
+
if (pattern.test(clause)) return label
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// Second: verify clause contains at least one allowed pattern
|
|
370
|
+
const hasAllowedPattern = ALLOWED_RLS_PATTERNS.some(({ pattern }) => pattern.test(clause))
|
|
371
|
+
if (!hasAllowedPattern) {
|
|
372
|
+
return `Unrecognized RLS clause: "${clause.substring(0, 150)}". Only whitelisted patterns are allowed (org/user isolation, role gates, data filters, subqueries). See ALLOWED_RLS_PATTERNS in config-rls-alignment.js.`
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
return null
|
|
310
376
|
}
|
|
311
377
|
|
|
312
378
|
export async function run(config, projectRoot) {
|
|
@@ -475,24 +541,24 @@ export async function run(config, projectRoot) {
|
|
|
475
541
|
}
|
|
476
542
|
}
|
|
477
543
|
|
|
478
|
-
// CHECK 4b:
|
|
544
|
+
// CHECK 4b: All policy clauses must match whitelisted patterns only
|
|
479
545
|
for (const p of policies) {
|
|
480
|
-
const
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
546
|
+
for (const [clauseType, clause] of [['USING', p.using], ['WITH CHECK', p.withCheck]]) {
|
|
547
|
+
if (!clause) continue
|
|
548
|
+
const violation = validateRlsClause(clause)
|
|
549
|
+
if (violation) {
|
|
550
|
+
results.passed = false
|
|
551
|
+
results.findings.push({
|
|
552
|
+
file: p.file,
|
|
553
|
+
line: 1,
|
|
554
|
+
type: 'rls-invalid-clause',
|
|
555
|
+
severity: 'critical',
|
|
556
|
+
message: `Policy "${p.name}" on table "${tableName}" has invalid ${clauseType} clause: ${violation}`,
|
|
557
|
+
fix: `Only whitelisted patterns are allowed. Valid: auth_admin_organizations(), auth.uid(), org/user column checks, parent-table subqueries, boolean column filters. See ALLOWED_RLS_ATOMS in config-rls-alignment.js.`
|
|
558
|
+
})
|
|
559
|
+
results.summary.critical++
|
|
560
|
+
results.summary.total++
|
|
561
|
+
}
|
|
496
562
|
}
|
|
497
563
|
}
|
|
498
564
|
|