@soulbatical/tetra-dev-toolkit 1.18.1 → 1.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,7 +21,28 @@ import { execSync } from 'child_process'
21
21
 
22
22
  // ─── Config ──────────────────────────────────────────────
23
23
 
24
- const PROJECTS_ROOT = join(process.env.HOME || '~', 'projecten')
24
+ // Resolve paths dynamically:
25
+ // 1. TETRA_PROJECTS_ROOT env var (explicit override)
26
+ // 2. Detect from tetra repo: this script lives in tetra/packages/dev-toolkit/bin/
27
+ // so tetra root = 4 levels up, and projects root = 5 levels up (sibling dirs)
28
+ // 3. Fallback: ~/projecten
29
+ function resolveProjectsRoot() {
30
+ if (process.env.TETRA_PROJECTS_ROOT) return process.env.TETRA_PROJECTS_ROOT
31
+
32
+ // This file: <projects>/<tetra>/packages/dev-toolkit/bin/tetra-check-peers.js
33
+ const scriptDir = dirname(new URL(import.meta.url).pathname)
34
+ const tetraRoot = join(scriptDir, '..', '..', '..') // → tetra/
35
+ const possibleProjectsRoot = join(tetraRoot, '..') // → projects/
36
+
37
+ // Verify: does this directory contain a 'tetra' subdirectory?
38
+ if (existsSync(join(possibleProjectsRoot, 'tetra', 'packages'))) {
39
+ return possibleProjectsRoot
40
+ }
41
+
42
+ return join(process.env.HOME || '~', 'projecten')
43
+ }
44
+
45
+ const PROJECTS_ROOT = resolveProjectsRoot()
25
46
  const TETRA_ROOT = join(PROJECTS_ROOT, 'tetra', 'packages')
26
47
 
27
48
  // Tetra packages that have peerDependencies
@@ -0,0 +1,293 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Tetra Security Gate — AI-powered pre-push security review
5
+ *
6
+ * Detects security-sensitive file changes in the current git diff,
7
+ * submits them to ralph-manager's security gate agent for review,
8
+ * and blocks the push if the agent denies the changes.
9
+ *
10
+ * Usage:
11
+ * tetra-security-gate # Auto-detect ralph-manager URL
12
+ * tetra-security-gate --url <url> # Explicit ralph-manager URL
13
+ * tetra-security-gate --timeout 120 # Custom timeout (seconds)
14
+ * tetra-security-gate --dry-run # Show what would be sent, don't block
15
+ *
16
+ * Exit codes:
17
+ * 0 = approved (or no security files changed)
18
+ * 1 = denied (security violation found)
19
+ * 0 = ralph-manager offline (graceful fallback, doesn't block)
20
+ */
21
+
22
+ import { program } from 'commander'
23
+ import { execSync } from 'child_process'
24
+ import chalk from 'chalk'
25
+
26
+ // Security-sensitive file patterns
27
+ const SECURITY_PATTERNS = [
28
+ /supabase\/migrations\/.*\.sql$/i,
29
+ /\.rls\./i,
30
+ /rls[-_]?policy/i,
31
+ /auth[-_]?config/i,
32
+ /middleware\/auth/i,
33
+ /middleware\/security/i,
34
+ /security\.ts$/i,
35
+ /security\.js$/i,
36
+ /\.env$/,
37
+ /\.env\.\w+$/,
38
+ /doppler\.yaml$/,
39
+ /auth-config/i,
40
+ /permissions/i,
41
+ /checks\/security\//i,
42
+ ]
43
+
44
+ function isSecurityFile(file) {
45
+ return SECURITY_PATTERNS.some(p => p.test(file))
46
+ }
47
+
48
+ function getChangedFiles() {
49
+ try {
50
+ // Files changed between HEAD and upstream (what's being pushed)
51
+ const upstream = execSync('git rev-parse --abbrev-ref @{upstream} 2>/dev/null', { encoding: 'utf8' }).trim()
52
+ if (upstream) {
53
+ return execSync(`git diff --name-only ${upstream}...HEAD`, { encoding: 'utf8' }).trim().split('\n').filter(Boolean)
54
+ }
55
+ } catch {
56
+ // No upstream — compare against origin/main or origin/master
57
+ }
58
+
59
+ for (const base of ['origin/main', 'origin/master']) {
60
+ try {
61
+ return execSync(`git diff --name-only ${base}...HEAD`, { encoding: 'utf8' }).trim().split('\n').filter(Boolean)
62
+ } catch { /* try next */ }
63
+ }
64
+
65
+ // Fallback: last commit
66
+ try {
67
+ return execSync('git diff --name-only HEAD~1', { encoding: 'utf8' }).trim().split('\n').filter(Boolean)
68
+ } catch {
69
+ return []
70
+ }
71
+ }
72
+
73
+ function getDiff(files) {
74
+ try {
75
+ const upstream = execSync('git rev-parse --abbrev-ref @{upstream} 2>/dev/null', { encoding: 'utf8' }).trim()
76
+ if (upstream) {
77
+ return execSync(`git diff ${upstream}...HEAD -- ${files.join(' ')}`, { encoding: 'utf8', maxBuffer: 1024 * 1024 })
78
+ }
79
+ } catch { /* fallback */ }
80
+
81
+ for (const base of ['origin/main', 'origin/master']) {
82
+ try {
83
+ return execSync(`git diff ${base}...HEAD -- ${files.join(' ')}`, { encoding: 'utf8', maxBuffer: 1024 * 1024 })
84
+ } catch { /* try next */ }
85
+ }
86
+
87
+ try {
88
+ return execSync(`git diff HEAD~1 -- ${files.join(' ')}`, { encoding: 'utf8', maxBuffer: 1024 * 1024 })
89
+ } catch {
90
+ return ''
91
+ }
92
+ }
93
+
94
+ function getProjectName() {
95
+ try {
96
+ const remoteUrl = execSync('git remote get-url origin 2>/dev/null', { encoding: 'utf8' }).trim()
97
+ const match = remoteUrl.match(/\/([^/]+?)(?:\.git)?$/)
98
+ if (match) return match[1]
99
+ } catch { /* fallback */ }
100
+
101
+ try {
102
+ return execSync('basename "$(git rev-parse --show-toplevel)"', { encoding: 'utf8' }).trim()
103
+ } catch {
104
+ return 'unknown'
105
+ }
106
+ }
107
+
108
+ function getRalphManagerUrl() {
109
+ // Check .ralph/ports.json first
110
+ try {
111
+ const portsJson = execSync('cat .ralph/ports.json 2>/dev/null', { encoding: 'utf8' })
112
+ const ports = JSON.parse(portsJson)
113
+ if (ports.api_url) return ports.api_url
114
+ } catch { /* fallback */ }
115
+
116
+ // Check RALPH_MANAGER_API env
117
+ if (process.env.RALPH_MANAGER_API) return process.env.RALPH_MANAGER_API
118
+
119
+ // Default
120
+ return 'http://localhost:3005'
121
+ }
122
+
123
+ async function pollForVerdict(baseUrl, gateId, timeoutSeconds) {
124
+ const deadline = Date.now() + timeoutSeconds * 1000
125
+ const pollInterval = 3000 // 3 seconds
126
+
127
+ while (Date.now() < deadline) {
128
+ try {
129
+ const resp = await fetch(`${baseUrl}/api/internal/security-gate/${gateId}`)
130
+ if (!resp.ok) {
131
+ console.error(chalk.yellow(` Poll failed: HTTP ${resp.status}`))
132
+ break
133
+ }
134
+
135
+ const { data } = await resp.json()
136
+
137
+ if (data.status === 'approved') {
138
+ return { status: 'approved', reason: data.reason, findings: data.findings }
139
+ }
140
+ if (data.status === 'denied') {
141
+ return { status: 'denied', reason: data.reason, findings: data.findings }
142
+ }
143
+ if (data.status === 'error') {
144
+ return { status: 'error', reason: data.reason }
145
+ }
146
+
147
+ // Still pending — wait and retry
148
+ await new Promise(r => setTimeout(r, pollInterval))
149
+ } catch {
150
+ // Network error — ralph-manager might be restarting
151
+ await new Promise(r => setTimeout(r, pollInterval))
152
+ }
153
+ }
154
+
155
+ return { status: 'timeout', reason: `No verdict within ${timeoutSeconds}s` }
156
+ }
157
+
158
+ program
159
+ .name('tetra-security-gate')
160
+ .description('AI-powered pre-push security gate — reviews RLS/auth/security changes')
161
+ .version('1.0.0')
162
+ .option('--url <url>', 'Ralph Manager URL (default: auto-detect)')
163
+ .option('--timeout <seconds>', 'Max wait time for agent verdict', '180')
164
+ .option('--dry-run', 'Show what would be submitted, do not block')
165
+ .action(async (options) => {
166
+ try {
167
+ console.log(chalk.blue.bold('\n Tetra Security Gate\n'))
168
+
169
+ // Step 1: Detect changed files
170
+ const allFiles = getChangedFiles()
171
+ const securityFiles = allFiles.filter(isSecurityFile)
172
+
173
+ if (securityFiles.length === 0) {
174
+ console.log(chalk.green(' No security-sensitive files changed — skipping gate.'))
175
+ console.log(chalk.gray(` (checked ${allFiles.length} files)\n`))
176
+ process.exit(0)
177
+ }
178
+
179
+ console.log(chalk.yellow(` ${securityFiles.length} security-sensitive file(s) detected:`))
180
+ for (const f of securityFiles) {
181
+ console.log(chalk.gray(` - ${f}`))
182
+ }
183
+ console.log()
184
+
185
+ // Step 2: Get the diff
186
+ const diff = getDiff(securityFiles)
187
+ if (!diff.trim()) {
188
+ console.log(chalk.green(' No actual diff content — skipping gate.\n'))
189
+ process.exit(0)
190
+ }
191
+
192
+ const project = getProjectName()
193
+
194
+ if (options.dryRun) {
195
+ console.log(chalk.cyan(' [DRY RUN] Would submit to security gate:'))
196
+ console.log(chalk.gray(` Project: ${project}`))
197
+ console.log(chalk.gray(` Files: ${securityFiles.length}`))
198
+ console.log(chalk.gray(` Diff size: ${diff.length} chars`))
199
+ console.log()
200
+ process.exit(0)
201
+ }
202
+
203
+ // Step 3: Submit to ralph-manager
204
+ const baseUrl = options.url || getRalphManagerUrl()
205
+ const timeout = parseInt(options.timeout, 10)
206
+
207
+ console.log(chalk.gray(` Submitting to ${baseUrl}...`))
208
+
209
+ let gateId
210
+ try {
211
+ const resp = await fetch(`${baseUrl}/api/internal/security-gate`, {
212
+ method: 'POST',
213
+ headers: { 'Content-Type': 'application/json' },
214
+ body: JSON.stringify({ project, files_changed: securityFiles, diff }),
215
+ signal: AbortSignal.timeout(10_000),
216
+ })
217
+
218
+ if (!resp.ok) {
219
+ const body = await resp.text()
220
+ console.error(chalk.yellow(` Ralph Manager returned ${resp.status}: ${body}`))
221
+ console.log(chalk.yellow(' Falling back to PASS (ralph-manager error).\n'))
222
+ process.exit(0)
223
+ }
224
+
225
+ const { data } = await resp.json()
226
+ gateId = data.id
227
+
228
+ // If already resolved (e.g. fallback auto-approve)
229
+ if (data.status === 'approved') {
230
+ console.log(chalk.green(` ${chalk.bold('APPROVED')} (immediate): ${data.reason || 'OK'}\n`))
231
+ process.exit(0)
232
+ }
233
+ if (data.status === 'denied') {
234
+ console.error(chalk.red.bold(`\n PUSH BLOCKED — Security Gate DENIED\n`))
235
+ console.error(chalk.red(` Reason: ${data.reason}\n`))
236
+ process.exit(1)
237
+ }
238
+ } catch (err) {
239
+ // Ralph-manager offline — don't block the push
240
+ console.log(chalk.yellow(` Cannot reach ralph-manager at ${baseUrl}`))
241
+ console.log(chalk.yellow(' Falling back to PASS (offline fallback).\n'))
242
+ process.exit(0)
243
+ }
244
+
245
+ // Step 4: Poll for verdict
246
+ console.log(chalk.gray(` Agent reviewing... (timeout: ${timeout}s)`))
247
+ const result = await pollForVerdict(baseUrl, gateId, timeout)
248
+
249
+ if (result.status === 'approved') {
250
+ console.log(chalk.green.bold(`\n APPROVED: ${result.reason || 'No issues found'}`))
251
+ if (result.findings?.length) {
252
+ for (const f of result.findings) {
253
+ console.log(chalk.yellow(` ⚠ ${f}`))
254
+ }
255
+ }
256
+ console.log()
257
+ process.exit(0)
258
+ }
259
+
260
+ if (result.status === 'denied') {
261
+ console.error(chalk.red.bold(`\n ════════════════════════════════════════════════════════════`))
262
+ console.error(chalk.red.bold(` PUSH BLOCKED — Security Gate DENIED`))
263
+ console.error(chalk.red.bold(` ════════════════════════════════════════════════════════════`))
264
+ console.error(chalk.red(`\n Reason: ${result.reason}`))
265
+ if (result.findings?.length) {
266
+ console.error(chalk.red(`\n Findings:`))
267
+ for (const f of result.findings) {
268
+ console.error(chalk.red(` - ${f}`))
269
+ }
270
+ }
271
+ console.error(chalk.yellow(`\n Fix the issues and try again.\n`))
272
+ process.exit(1)
273
+ }
274
+
275
+ if (result.status === 'timeout') {
276
+ console.log(chalk.yellow(` Agent did not respond within ${timeout}s.`))
277
+ console.log(chalk.yellow(' Falling back to PASS (timeout fallback).\n'))
278
+ process.exit(0)
279
+ }
280
+
281
+ // Unknown status — don't block
282
+ console.log(chalk.yellow(` Unexpected verdict status: ${result.status}`))
283
+ console.log(chalk.yellow(' Falling back to PASS.\n'))
284
+ process.exit(0)
285
+
286
+ } catch (err) {
287
+ console.error(chalk.red(`\n ERROR: ${err.message}\n`))
288
+ // Never block on internal errors
289
+ process.exit(0)
290
+ }
291
+ })
292
+
293
+ program.parse()
@@ -29,7 +29,7 @@ export const meta = {
29
29
  name: 'Config ↔ RLS Alignment',
30
30
  category: 'security',
31
31
  severity: 'critical',
32
- description: 'Verifies that feature config accessLevel matches actual RLS policies on each table. The golden 1:1 check.'
32
+ description: 'Verifies that feature config accessLevel matches actual RLS policies on each table. Uses live DB when available, falls back to migration parsing.'
33
33
  }
34
34
 
35
35
  /**
@@ -390,17 +390,23 @@ function validateRlsClause(clause) {
390
390
  return null
391
391
  }
392
392
 
393
- export async function run(config, projectRoot) {
393
+ export async function run(config, projectRoot, options = {}) {
394
394
  const results = {
395
395
  passed: true,
396
396
  skipped: false,
397
397
  findings: [],
398
398
  summary: { total: 0, critical: 0, high: 0, medium: 0, low: 0 },
399
- details: { tablesChecked: 0, configsFound: 0, alignmentErrors: 0 }
399
+ details: { tablesChecked: 0, configsFound: 0, alignmentErrors: 0, source: 'migrations' }
400
400
  }
401
401
 
402
402
  const featureConfigs = parseFeatureConfigs(projectRoot)
403
- const rlsData = parseMigrations(projectRoot)
403
+
404
+ // Use live DB state if available (passed from rls-live-audit via runner),
405
+ // otherwise fall back to migration file parsing
406
+ const rlsData = options.liveState || parseMigrations(projectRoot)
407
+ if (options.liveState) {
408
+ results.details.source = 'live-db'
409
+ }
404
410
 
405
411
  results.details.configsFound = featureConfigs.size
406
412
 
@@ -1,21 +1,35 @@
1
1
  /**
2
2
  * RLS Live DB Audit — queries pg_policies from the LIVE database
3
3
  *
4
- * The source of truth for RLS policy validation. Migration file parsing can miss:
4
+ * The PRIMARY source of truth for RLS policy validation.
5
+ * When this check runs successfully, migration-based RLS checks
6
+ * (rls-policy-audit, config-rls-alignment) are skipped for current state
7
+ * and only validate unapplied migrations (delta).
8
+ *
9
+ * Migration file parsing can miss:
5
10
  * - Policies applied directly via SQL (not in migration files)
6
11
  * - PL/pgSQL dynamic policies (EXECUTE format)
7
12
  * - Policies overridden by later manual changes
8
13
  *
9
14
  * Requires SUPABASE_URL + SUPABASE_SERVICE_ROLE_KEY in environment.
10
- * Connects via @supabase/supabase-js and calls a lightweight RPC.
11
15
  *
12
16
  * Setup (one-time per project):
13
17
  * CREATE OR REPLACE FUNCTION tetra_rls_audit()
14
- * RETURNS TABLE(tablename text, policyname text, using_clause text, with_check_clause text)
18
+ * RETURNS TABLE(tablename text, policyname text, cmd text, using_clause text, with_check_clause text)
15
19
  * LANGUAGE sql SECURITY DEFINER AS $$
16
- * SELECT tablename::text, policyname::text, qual::text, with_check::text
20
+ * SELECT tablename::text, policyname::text, cmd::text, qual::text, with_check::text
17
21
  * FROM pg_policies WHERE schemaname = 'public';
18
22
  * $$;
23
+ *
24
+ * -- Optional: also return RLS enabled status per table
25
+ * CREATE OR REPLACE FUNCTION tetra_rls_tables()
26
+ * RETURNS TABLE(table_name text, rls_enabled boolean, rls_forced boolean)
27
+ * LANGUAGE sql SECURITY DEFINER AS $$
28
+ * SELECT c.relname::text, c.relrowsecurity, c.relforcerowsecurity
29
+ * FROM pg_class c
30
+ * JOIN pg_namespace n ON n.oid = c.relnamespace
31
+ * WHERE n.nspname = 'public' AND c.relkind = 'r';
32
+ * $$;
19
33
  */
20
34
 
21
35
  export const meta = {
@@ -70,13 +84,83 @@ function validateRlsClause(clause) {
70
84
  return null
71
85
  }
72
86
 
87
+ /**
88
+ * Fetch data from a Supabase RPC endpoint.
89
+ * Returns null if the RPC doesn't exist or fails.
90
+ */
91
+ async function callRpc(supabaseUrl, supabaseKey, rpcName) {
92
+ try {
93
+ const response = await fetch(`${supabaseUrl}/rest/v1/rpc/${rpcName}`, {
94
+ method: 'POST',
95
+ headers: {
96
+ 'apikey': supabaseKey,
97
+ 'Authorization': `Bearer ${supabaseKey}`,
98
+ 'Content-Type': 'application/json',
99
+ },
100
+ body: '{}',
101
+ })
102
+ if (!response.ok) return null
103
+ return await response.json()
104
+ } catch {
105
+ return null
106
+ }
107
+ }
108
+
109
+ /**
110
+ * Build a structured live DB state from raw policy + table data.
111
+ * This is the same format that config-rls-alignment uses from migration parsing,
112
+ * so it can transparently use either source.
113
+ *
114
+ * Returns: Map<tableName, { rlsEnabled, policies: [{ name, operation, using, withCheck }] }>
115
+ */
116
+ export function buildLiveState(policies, tableStatus) {
117
+ const tables = new Map()
118
+
119
+ // Initialize from table status (if available)
120
+ if (Array.isArray(tableStatus)) {
121
+ for (const t of tableStatus) {
122
+ if (!t?.table_name) continue
123
+ tables.set(t.table_name, {
124
+ rlsEnabled: t.rls_enabled || false,
125
+ policies: [],
126
+ rpcFunctions: new Map(),
127
+ source: 'live-db'
128
+ })
129
+ }
130
+ }
131
+
132
+ // Add policies
133
+ for (const policy of policies) {
134
+ if (!policy) continue
135
+ const { tablename, policyname, cmd, using_clause, with_check_clause } = policy
136
+ if (!tablename) continue
137
+
138
+ if (!tables.has(tablename)) {
139
+ // Table exists in policies but not in table status — it has RLS (otherwise no policies)
140
+ tables.set(tablename, { rlsEnabled: true, policies: [], rpcFunctions: new Map(), source: 'live-db' })
141
+ }
142
+
143
+ tables.get(tablename).policies.push({
144
+ name: policyname,
145
+ operation: (cmd || 'ALL').toUpperCase(),
146
+ using: using_clause || '',
147
+ withCheck: with_check_clause || '',
148
+ file: 'LIVE DB'
149
+ })
150
+ }
151
+
152
+ return tables
153
+ }
154
+
73
155
  export async function run(config, projectRoot) {
74
156
  const results = {
75
157
  passed: true,
76
158
  skipped: false,
77
159
  findings: [],
78
160
  summary: { total: 0, critical: 0, high: 0, medium: 0, low: 0 },
79
- details: { policiesChecked: 0, tablesChecked: 0, violations: 0 }
161
+ details: { policiesChecked: 0, tablesChecked: 0, violations: 0 },
162
+ // Expose live data so runner can pass it to other checks
163
+ _liveData: null
80
164
  }
81
165
 
82
166
  const supabaseUrl = process.env.SUPABASE_URL
@@ -105,7 +189,7 @@ export async function run(config, projectRoot) {
105
189
  const status = response.status
106
190
  if (status === 404) {
107
191
  results.skipped = true
108
- results.skipReason = 'RPC tetra_rls_audit() not found. Create it once:\n\n CREATE OR REPLACE FUNCTION tetra_rls_audit()\n RETURNS TABLE(tablename text, policyname text, using_clause text, with_check_clause text)\n LANGUAGE sql SECURITY DEFINER AS $$\n SELECT tablename::text, policyname::text, qual::text, with_check::text\n FROM pg_policies WHERE schemaname = \'public\';\n $$;'
192
+ results.skipReason = 'RPC tetra_rls_audit() not found. Create it once:\n\n CREATE OR REPLACE FUNCTION tetra_rls_audit()\n RETURNS TABLE(tablename text, policyname text, cmd text, using_clause text, with_check_clause text)\n LANGUAGE sql SECURITY DEFINER AS $$\n SELECT tablename::text, policyname::text, cmd::text, qual::text, with_check::text\n FROM pg_policies WHERE schemaname = \'public\';\n $$;'
109
193
  } else {
110
194
  results.skipped = true
111
195
  results.skipReason = `RPC tetra_rls_audit() failed with status ${status}`
@@ -126,6 +210,13 @@ export async function run(config, projectRoot) {
126
210
  return results
127
211
  }
128
212
 
213
+ // Optionally fetch table-level RLS status
214
+ const tableStatus = await callRpc(supabaseUrl, supabaseKey, 'tetra_rls_tables')
215
+
216
+ // Build structured live state (shared with config-rls-alignment)
217
+ const liveState = buildLiveState(policies, tableStatus)
218
+ results._liveData = { policies, tableStatus, liveState }
219
+
129
220
  const backendOnlyTables = config.supabase?.backendOnlyTables || []
130
221
  const tablesChecked = new Set()
131
222
 
package/lib/runner.js CHANGED
@@ -93,12 +93,12 @@ const ALL_CHECKS = {
93
93
  frontendSupabaseQueries,
94
94
  tetraCoreCompliance,
95
95
  mixedDbUsage,
96
- configRlsAlignment,
97
96
  rpcSecurityMode,
98
97
  systemdbWhitelist,
99
98
  gitignoreValidation,
100
99
  routeConfigAlignment,
101
- rlsLiveAudit
100
+ rlsLiveAudit, // Must run BEFORE config-rls-alignment (provides live DB data)
101
+ configRlsAlignment, // Uses live DB data from rls-live-audit when available
102
102
  ],
103
103
  stability: [
104
104
  huskyHooks,
@@ -162,6 +162,10 @@ export async function runAllChecks(options = {}) {
162
162
  }
163
163
  }
164
164
 
165
+ // Track rls-live-audit result across suites so migration-based RLS checks can be skipped.
166
+ // rls-live-audit runs in 'security' suite, rls-policy-audit runs in 'supabase' suite.
167
+ let rlsLiveData = null
168
+
165
169
  for (const suite of suites) {
166
170
  if (!config.suites[suite]) {
167
171
  continue
@@ -174,11 +178,43 @@ export async function runAllChecks(options = {}) {
174
178
  }
175
179
 
176
180
  for (const check of checks) {
177
- const checkResult = {
178
- id: check.meta.id,
179
- name: check.meta.name,
180
- severity: check.meta.severity,
181
- ...await check.run(config, projectRoot)
181
+ let checkResult
182
+
183
+ // rls-policy-audit is pure migration parsing — skip when live DB is available
184
+ if (rlsLiveData && check.meta.id === 'rls-policy-audit') {
185
+ checkResult = {
186
+ id: check.meta.id,
187
+ name: `${check.meta.name} (skipped — live DB is source of truth)`,
188
+ severity: check.meta.severity,
189
+ passed: true,
190
+ skipped: true,
191
+ skipReason: 'Skipped: rls-live-audit succeeded — live DB is the source of truth for current RLS state.',
192
+ findings: [],
193
+ summary: { total: 0, critical: 0, high: 0, medium: 0, low: 0 }
194
+ }
195
+ }
196
+ // config-rls-alignment: always runs, but uses live DB data when available
197
+ else if (rlsLiveData && check.meta.id === 'config-rls-alignment') {
198
+ checkResult = {
199
+ id: check.meta.id,
200
+ name: `${check.meta.name} (live DB)`,
201
+ severity: check.meta.severity,
202
+ ...await check.run(config, projectRoot, { liveState: rlsLiveData.liveState })
203
+ }
204
+ }
205
+ else {
206
+ checkResult = {
207
+ id: check.meta.id,
208
+ name: check.meta.name,
209
+ severity: check.meta.severity,
210
+ ...await check.run(config, projectRoot)
211
+ }
212
+ }
213
+
214
+ // Capture live data from rls-live-audit for downstream checks
215
+ if (check.meta.id === 'rls-live-audit' && !checkResult.skipped && checkResult._liveData) {
216
+ rlsLiveData = checkResult._liveData
217
+ delete checkResult._liveData // Don't leak internal data into output
182
218
  }
183
219
 
184
220
  results.suites[suite].checks.push(checkResult)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soulbatical/tetra-dev-toolkit",
3
- "version": "1.18.1",
3
+ "version": "1.19.0",
4
4
  "publishConfig": {
5
5
  "access": "restricted"
6
6
  },
@@ -32,7 +32,8 @@
32
32
  "tetra-check-rls": "./bin/tetra-check-rls.js",
33
33
  "tetra-migration-lint": "./bin/tetra-migration-lint.js",
34
34
  "tetra-db-push": "./bin/tetra-db-push.js",
35
- "tetra-check-peers": "./bin/tetra-check-peers.js"
35
+ "tetra-check-peers": "./bin/tetra-check-peers.js",
36
+ "tetra-security-gate": "./bin/tetra-security-gate.js"
36
37
  },
37
38
  "files": [
38
39
  "bin/",