@soulbatical/tetra-dev-toolkit 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/README.md +312 -0
  2. package/bin/vca-audit.js +90 -0
  3. package/bin/vca-dev-token.js +39 -0
  4. package/bin/vca-setup.js +227 -0
  5. package/lib/checks/codeQuality/api-response-format.js +268 -0
  6. package/lib/checks/health/claude-md.js +114 -0
  7. package/lib/checks/health/doppler-compliance.js +174 -0
  8. package/lib/checks/health/git.js +61 -0
  9. package/lib/checks/health/gitignore.js +83 -0
  10. package/lib/checks/health/index.js +26 -0
  11. package/lib/checks/health/infrastructure-yml.js +87 -0
  12. package/lib/checks/health/mcps.js +57 -0
  13. package/lib/checks/health/naming-conventions.js +302 -0
  14. package/lib/checks/health/plugins.js +38 -0
  15. package/lib/checks/health/quality-toolkit.js +97 -0
  16. package/lib/checks/health/repo-visibility.js +70 -0
  17. package/lib/checks/health/rls-audit.js +130 -0
  18. package/lib/checks/health/scanner.js +68 -0
  19. package/lib/checks/health/secrets.js +80 -0
  20. package/lib/checks/health/stella-integration.js +124 -0
  21. package/lib/checks/health/tests.js +140 -0
  22. package/lib/checks/health/types.js +77 -0
  23. package/lib/checks/health/vincifox-widget.js +47 -0
  24. package/lib/checks/index.js +17 -0
  25. package/lib/checks/security/deprecated-supabase-admin.js +96 -0
  26. package/lib/checks/security/gitignore-validation.js +211 -0
  27. package/lib/checks/security/hardcoded-secrets.js +95 -0
  28. package/lib/checks/security/service-key-exposure.js +107 -0
  29. package/lib/checks/security/systemdb-whitelist.js +138 -0
  30. package/lib/checks/stability/ci-pipeline.js +143 -0
  31. package/lib/checks/stability/husky-hooks.js +117 -0
  32. package/lib/checks/stability/npm-audit.js +140 -0
  33. package/lib/checks/supabase/rls-policy-audit.js +261 -0
  34. package/lib/commands/dev-token.js +342 -0
  35. package/lib/config.js +213 -0
  36. package/lib/index.js +17 -0
  37. package/lib/reporters/terminal.js +134 -0
  38. package/lib/runner.js +179 -0
  39. package/package.json +72 -0
@@ -0,0 +1,261 @@
1
+ /**
2
+ * RLS Policy Audit
3
+ *
4
+ * Scans SQL migrations to verify:
5
+ * 1. Every table has ROW LEVEL SECURITY enabled
6
+ * 2. Every table with RLS has at least basic policies defined
7
+ * 3. No overly permissive policies (USING (true) / WITH CHECK (true))
8
+ * 4. SECURITY DEFINER functions are flagged for review
9
+ */
10
+
11
+ import { readFileSync, existsSync, readdirSync } from 'fs'
12
+ import { join } from 'path'
13
+
14
+ export const meta = {
15
+ id: 'rls-policy-audit',
16
+ name: 'Row Level Security Audit',
17
+ category: 'supabase',
18
+ severity: 'critical',
19
+ description: 'Verifies RLS is enabled on all tables and policies are properly configured'
20
+ }
21
+
22
+ export async function run(config, projectRoot) {
23
+ const results = {
24
+ passed: true,
25
+ findings: [],
26
+ summary: { total: 0, critical: 0, high: 0, medium: 0, low: 0 },
27
+ details: {
28
+ tablesFound: 0,
29
+ tablesWithRls: 0,
30
+ tablesWithPolicies: 0,
31
+ permissivePolicies: 0,
32
+ securityDefinerFunctions: 0
33
+ }
34
+ }
35
+
36
+ // Find migration directories
37
+ const migrationPaths = config.paths?.migrations || ['supabase/migrations', 'migrations']
38
+ // Also check common backend migration paths
39
+ const extraPaths = ['backend/supabase/migrations']
40
+ const allPaths = [...migrationPaths, ...extraPaths]
41
+
42
+ const sqlFiles = []
43
+ for (const relPath of allPaths) {
44
+ const dir = join(projectRoot, relPath)
45
+ if (!existsSync(dir)) continue
46
+ try {
47
+ const files = readdirSync(dir).filter(f => f.endsWith('.sql')).sort()
48
+ for (const f of files) {
49
+ sqlFiles.push(join(dir, f))
50
+ }
51
+ } catch {
52
+ // ignore
53
+ }
54
+ }
55
+
56
+ if (sqlFiles.length === 0) {
57
+ results.skipped = true
58
+ results.skipReason = 'No SQL migration files found'
59
+ return results
60
+ }
61
+
62
+ // Parse all migrations to build final state
63
+ const tables = new Map() // tableName -> { created: true, rlsEnabled: false, policies: [], droppedPolicies: [], file: string }
64
+ const securityDefinerFns = [] // { name, file }
65
+ const publicTables = config.supabase?.publicTables || []
66
+
67
+ for (const filePath of sqlFiles) {
68
+ let content
69
+ try {
70
+ content = readFileSync(filePath, 'utf-8')
71
+ } catch { continue }
72
+
73
+ const fileName = filePath.split('/').pop()
74
+
75
+ // Track CREATE TABLE
76
+ const createTableRe = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?/gi
77
+ let match
78
+ while ((match = createTableRe.exec(content)) !== null) {
79
+ const name = match[1].toLowerCase()
80
+ if (!tables.has(name)) {
81
+ tables.set(name, { rlsEnabled: false, policies: [], file: fileName })
82
+ }
83
+ }
84
+
85
+ // Track DROP TABLE (remove from tracking)
86
+ const dropTableRe = /DROP\s+TABLE\s+(?:IF\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?/gi
87
+ while ((match = dropTableRe.exec(content)) !== null) {
88
+ tables.delete(match[1].toLowerCase())
89
+ }
90
+
91
+ // Track ALTER TABLE ... RENAME TO
92
+ const renameRe = /ALTER\s+TABLE\s+(?:IF\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?\s+RENAME\s+TO\s+["']?(\w+)["']?/gi
93
+ while ((match = renameRe.exec(content)) !== null) {
94
+ const oldName = match[1].toLowerCase()
95
+ const newName = match[2].toLowerCase()
96
+ if (tables.has(oldName)) {
97
+ const data = tables.get(oldName)
98
+ tables.delete(oldName)
99
+ tables.set(newName, data)
100
+ }
101
+ }
102
+
103
+ // Track ENABLE ROW LEVEL SECURITY
104
+ const rlsEnableRe = /ALTER\s+TABLE\s+(?:public\.)?["']?(\w+)["']?\s+ENABLE\s+ROW\s+LEVEL\s+SECURITY/gi
105
+ while ((match = rlsEnableRe.exec(content)) !== null) {
106
+ const name = match[1].toLowerCase()
107
+ if (tables.has(name)) {
108
+ tables.get(name).rlsEnabled = true
109
+ } else {
110
+ // Table created elsewhere or before our scan
111
+ tables.set(name, { rlsEnabled: true, policies: [], file: fileName })
112
+ }
113
+ }
114
+
115
+ // Track DISABLE ROW LEVEL SECURITY
116
+ const rlsDisableRe = /ALTER\s+TABLE\s+(?:public\.)?["']?(\w+)["']?\s+DISABLE\s+ROW\s+LEVEL\s+SECURITY/gi
117
+ while ((match = rlsDisableRe.exec(content)) !== null) {
118
+ const name = match[1].toLowerCase()
119
+ if (tables.has(name)) {
120
+ tables.get(name).rlsEnabled = false
121
+ }
122
+ }
123
+
124
+ // Track CREATE POLICY
125
+ const policyRe = /CREATE\s+POLICY\s+["']?(\w+)["']?\s+ON\s+(?:public\.)?["']?(\w+)["']?\s+([\s\S]*?)(?:;|CREATE\s|ALTER\s|DROP\s|GRANT\s)/gi
126
+ while ((match = policyRe.exec(content)) !== null) {
127
+ const policyName = match[1]
128
+ const tableName = match[2].toLowerCase()
129
+ const policyBody = match[3]
130
+
131
+ if (!tables.has(tableName)) {
132
+ tables.set(tableName, { rlsEnabled: false, policies: [], file: fileName })
133
+ }
134
+
135
+ const forOp = policyBody.match(/FOR\s+(SELECT|INSERT|UPDATE|DELETE|ALL)/i)
136
+ const operation = forOp ? forOp[1].toUpperCase() : 'ALL'
137
+ const isPermissive = /USING\s*\(\s*true\s*\)/i.test(policyBody) || /WITH\s+CHECK\s*\(\s*true\s*\)/i.test(policyBody)
138
+
139
+ tables.get(tableName).policies.push({
140
+ name: policyName,
141
+ operation,
142
+ permissive: isPermissive,
143
+ file: fileName
144
+ })
145
+ }
146
+
147
+ // Track DROP POLICY
148
+ const dropPolicyRe = /DROP\s+POLICY\s+(?:IF\s+EXISTS\s+)?["']?(\w+)["']?\s+ON\s+(?:public\.)?["']?(\w+)["']?/gi
149
+ while ((match = dropPolicyRe.exec(content)) !== null) {
150
+ const policyName = match[1]
151
+ const tableName = match[2].toLowerCase()
152
+ if (tables.has(tableName)) {
153
+ const table = tables.get(tableName)
154
+ table.policies = table.policies.filter(p => p.name !== policyName)
155
+ }
156
+ }
157
+
158
+ // Track SECURITY DEFINER functions
159
+ const secDefRe = /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?["']?(\w+)["']?[\s\S]*?SECURITY\s+DEFINER/gi
160
+ while ((match = secDefRe.exec(content)) !== null) {
161
+ securityDefinerFns.push({ name: match[1], file: fileName })
162
+ }
163
+ }
164
+
165
+ // --- Analyze results ---
166
+
167
+ results.details.tablesFound = tables.size
168
+ results.details.securityDefinerFunctions = securityDefinerFns.length
169
+
170
+ for (const [tableName, info] of tables) {
171
+ // Skip intentionally public tables
172
+ if (publicTables.includes(tableName)) continue
173
+
174
+ // Skip internal Supabase tables
175
+ if (tableName.startsWith('_') || tableName.startsWith('pg_') || tableName.startsWith('auth_')) continue
176
+
177
+ // 1. Check RLS enabled
178
+ if (!info.rlsEnabled) {
179
+ results.passed = false
180
+ results.findings.push({
181
+ file: info.file,
182
+ type: 'RLS not enabled',
183
+ severity: 'critical',
184
+ message: `Table "${tableName}" does not have ROW LEVEL SECURITY enabled`,
185
+ table: tableName
186
+ })
187
+ results.summary.critical++
188
+ results.summary.total++
189
+ continue
190
+ }
191
+
192
+ results.details.tablesWithRls++
193
+
194
+ // 2. Check policies exist
195
+ if (info.policies.length === 0) {
196
+ results.passed = false
197
+ results.findings.push({
198
+ file: info.file,
199
+ type: 'No RLS policies',
200
+ severity: 'high',
201
+ message: `Table "${tableName}" has RLS enabled but NO policies defined (all access blocked)`,
202
+ table: tableName
203
+ })
204
+ results.summary.high++
205
+ results.summary.total++
206
+ } else {
207
+ results.details.tablesWithPolicies++
208
+
209
+ // 3. Check for overly permissive policies
210
+ for (const policy of info.policies) {
211
+ if (policy.permissive) {
212
+ results.details.permissivePolicies++
213
+ results.findings.push({
214
+ file: policy.file,
215
+ type: 'Permissive policy',
216
+ severity: 'medium',
217
+ message: `Policy "${policy.name}" on "${tableName}" uses USING(true) or WITH CHECK(true) — bypasses RLS`,
218
+ table: tableName,
219
+ policy: policy.name
220
+ })
221
+ results.summary.medium++
222
+ results.summary.total++
223
+ }
224
+ }
225
+
226
+ // 4. Check policy coverage (should have at least SELECT + modification policy)
227
+ const ops = new Set(info.policies.map(p => p.operation))
228
+ if (!ops.has('ALL') && !ops.has('SELECT')) {
229
+ results.findings.push({
230
+ file: info.file,
231
+ type: 'Missing SELECT policy',
232
+ severity: 'low',
233
+ message: `Table "${tableName}" has no explicit SELECT policy`,
234
+ table: tableName
235
+ })
236
+ results.summary.low++
237
+ results.summary.total++
238
+ }
239
+ }
240
+ }
241
+
242
+ // 5. Flag SECURITY DEFINER functions
243
+ for (const fn of securityDefinerFns) {
244
+ results.findings.push({
245
+ file: fn.file,
246
+ type: 'SECURITY DEFINER function',
247
+ severity: 'medium',
248
+ message: `Function "${fn.name}" uses SECURITY DEFINER — runs with owner privileges, bypasses RLS`,
249
+ function: fn.name
250
+ })
251
+ results.summary.medium++
252
+ results.summary.total++
253
+ }
254
+
255
+ // If there are only medium/low findings, still pass but warn
256
+ if (results.summary.critical === 0 && results.summary.high === 0) {
257
+ results.passed = true
258
+ }
259
+
260
+ return results
261
+ }
@@ -0,0 +1,342 @@
1
+ /**
2
+ * VCA Dev Toolkit - Dev Token Manager
3
+ *
4
+ * Centralized dev token management for all VCA/Supabase projects.
5
+ * Auto-detects project name, finds Supabase config, manages token lifecycle.
6
+ *
7
+ * Replaces per-project generate-dev-token.js scripts.
8
+ */
9
+
10
+ import { createClient } from '@supabase/supabase-js'
11
+ import { readFileSync, writeFileSync, existsSync } from 'fs'
12
+ import { join, basename, dirname, resolve } from 'path'
13
+ import { createInterface } from 'readline'
14
+ import { config as dotenvConfig } from 'dotenv'
15
+ import chalk from 'chalk'
16
+
17
+ /**
18
+ * Find the project root by walking up from cwd.
19
+ * Prefers the package.json that has "workspaces" (monorepo root)
20
+ * or is at the git root. Returns { dir, pkg }.
21
+ */
22
+ function findProjectRoot(cwd = process.cwd()) {
23
+ let dir = resolve(cwd)
24
+ const visited = []
25
+
26
+ while (dir !== dirname(dir)) {
27
+ const pkgPath = join(dir, 'package.json')
28
+ if (existsSync(pkgPath)) {
29
+ try {
30
+ const pkg = JSON.parse(readFileSync(pkgPath, 'utf8'))
31
+ visited.push({ dir, pkg })
32
+ // If this has workspaces, it's the monorepo root
33
+ if (pkg.workspaces) return { dir, pkg }
34
+ } catch { /* ignore */ }
35
+ }
36
+ // If we hit a .git directory, this is the repo root
37
+ if (existsSync(join(dir, '.git'))) {
38
+ // Return the last visited package.json at or above this level
39
+ const atRoot = visited.find(v => v.dir === dir)
40
+ if (atRoot) return atRoot
41
+ break
42
+ }
43
+ dir = dirname(dir)
44
+ }
45
+
46
+ // Fallback: first package.json we found
47
+ return visited.length > 0 ? visited[0] : null
48
+ }
49
+
50
+ /**
51
+ * Detect project slug from package.json name or directory name.
52
+ * Walks up to find monorepo root, strips @scope/ prefix.
53
+ */
54
+ export function detectProject(cwd = process.cwd()) {
55
+ const root = findProjectRoot(cwd)
56
+ if (root && root.pkg.name) {
57
+ const name = root.pkg.name.replace(/^@[^/]+\//, '')
58
+ return name.toLowerCase().replace(/[^a-z0-9-]/g, '-')
59
+ }
60
+
61
+ // Fallback: directory name of cwd
62
+ return basename(cwd).toLowerCase().replace(/[^a-z0-9-]/g, '-')
63
+ }
64
+
65
+ /**
66
+ * Find Supabase URL and anon key from environment files.
67
+ * Searches from both cwd and project root: backend/.env, .env, .env.local.
68
+ */
69
+ export function findSupabaseConfig(cwd = process.cwd()) {
70
+ const root = findProjectRoot(cwd)
71
+ const rootDir = root ? root.dir : cwd
72
+
73
+ // Collect unique dirs to search
74
+ const searchDirs = [...new Set([cwd, rootDir])]
75
+ const envFiles = []
76
+
77
+ for (const dir of searchDirs) {
78
+ envFiles.push(join(dir, 'backend', '.env'))
79
+ envFiles.push(join(dir, '.env'))
80
+ envFiles.push(join(dir, 'backend', '.env.local'))
81
+ envFiles.push(join(dir, '.env.local'))
82
+ }
83
+
84
+ for (const envFile of envFiles) {
85
+ if (existsSync(envFile)) {
86
+ dotenvConfig({ path: envFile, override: true })
87
+ }
88
+ }
89
+
90
+ const url = process.env.SUPABASE_URL || process.env.NEXT_PUBLIC_SUPABASE_URL
91
+ const key = process.env.SUPABASE_ANON_KEY || process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY
92
+
93
+ return { url, key }
94
+ }
95
+
96
+ /**
97
+ * Detect the backend port from .ralph/ports.json or PORT env var.
98
+ */
99
+ export function detectPort(cwd = process.cwd()) {
100
+ const root = findProjectRoot(cwd)
101
+ const rootDir = root ? root.dir : cwd
102
+
103
+ // Try .ralph/ports.json at project root
104
+ const portsPath = join(rootDir, '.ralph', 'ports.json')
105
+ if (existsSync(portsPath)) {
106
+ try {
107
+ const ports = JSON.parse(readFileSync(portsPath, 'utf8'))
108
+ if (ports.backend_port) return ports.backend_port
109
+ } catch { /* ignore */ }
110
+ }
111
+
112
+ // Try PORT from env
113
+ if (process.env.PORT) return parseInt(process.env.PORT)
114
+
115
+ return null
116
+ }
117
+
118
+ /**
119
+ * Detect curl alias prefix from project slug.
120
+ * ralph-manager -> rm, sparkbuddy-live -> sb, ad-agent -> aa
121
+ */
122
+ function detectAliasPrefix(slug) {
123
+ const knownPrefixes = {
124
+ 'ralph-manager': 'rm',
125
+ 'sparkbuddy-live': 'sb',
126
+ 'ad-agent': 'aa',
127
+ }
128
+ if (knownPrefixes[slug]) return knownPrefixes[slug]
129
+
130
+ // Generate from initials of hyphenated parts
131
+ const parts = slug.split('-')
132
+ if (parts.length >= 2) {
133
+ return parts.map(p => p[0]).join('')
134
+ }
135
+ return slug.slice(0, 2)
136
+ }
137
+
138
+ /** Decode JWT payload without verification */
139
+ export function decodeJWT(token) {
140
+ try {
141
+ const parts = token.split('.')
142
+ if (parts.length !== 3) return null
143
+ return JSON.parse(Buffer.from(parts[1], 'base64').toString())
144
+ } catch {
145
+ return null
146
+ }
147
+ }
148
+
149
+ /** Check if JWT is expired (with buffer) */
150
+ export function isTokenExpired(token, bufferSeconds = 300) {
151
+ const payload = decodeJWT(token)
152
+ if (!payload || !payload.exp) return true
153
+ return payload.exp < (Math.floor(Date.now() / 1000) + bufferSeconds)
154
+ }
155
+
156
+ /** Token file path for a project */
157
+ export function tokenFilePath(slug) {
158
+ return `/tmp/${slug}-dev-token`
159
+ }
160
+
161
+ /** Cache file path for a project */
162
+ export function cacheFilePath(slug) {
163
+ return `/tmp/${slug}-dev-token-cache.json`
164
+ }
165
+
166
+ /** Load cached token data */
167
+ export function loadCache(slug) {
168
+ const path = cacheFilePath(slug)
169
+ try {
170
+ if (existsSync(path)) {
171
+ return JSON.parse(readFileSync(path, 'utf8'))
172
+ }
173
+ } catch { /* ignore */ }
174
+ return null
175
+ }
176
+
177
+ /** Save token data to cache (chmod 600) */
178
+ export function saveCache(data, slug) {
179
+ writeFileSync(cacheFilePath(slug), JSON.stringify(data, null, 2), { mode: 0o600 })
180
+ writeFileSync(tokenFilePath(slug), data.access_token, { mode: 0o600 })
181
+ }
182
+
183
+ /** Login with email/password via Supabase */
184
+ export async function login(url, key, email, password) {
185
+ const supabase = createClient(url, key)
186
+ const { data, error } = await supabase.auth.signInWithPassword({ email, password })
187
+ if (error) throw new Error(`Login failed: ${error.message}`)
188
+ return {
189
+ access_token: data.session.access_token,
190
+ refresh_token: data.session.refresh_token,
191
+ user: { id: data.user.id, email: data.user.email },
192
+ }
193
+ }
194
+
195
+ /** Refresh session using refresh token */
196
+ export async function refresh(url, key, refreshToken) {
197
+ const supabase = createClient(url, key)
198
+ const { data, error } = await supabase.auth.refreshSession({ refresh_token: refreshToken })
199
+ if (error) throw new Error(`Refresh failed: ${error.message}`)
200
+ return {
201
+ access_token: data.session.access_token,
202
+ refresh_token: data.session.refresh_token,
203
+ user: { id: data.user.id, email: data.user.email },
204
+ }
205
+ }
206
+
207
+ /** Prompt for input (password is hidden) */
208
+ export function promptInput(question) {
209
+ const rl = createInterface({ input: process.stdin, output: process.stdout })
210
+ return new Promise((resolve) => {
211
+ if (question.toLowerCase().includes('password')) {
212
+ process.stdout.write(question)
213
+ process.stdin.setRawMode(true)
214
+ process.stdin.resume()
215
+ process.stdin.setEncoding('utf8')
216
+ let password = ''
217
+ const onData = (char) => {
218
+ if (char === '\n' || char === '\r') {
219
+ process.stdin.setRawMode(false)
220
+ process.stdin.removeListener('data', onData)
221
+ console.log('')
222
+ rl.close()
223
+ resolve(password)
224
+ } else if (char === '\u0003') {
225
+ process.exit()
226
+ } else if (char === '\u007F') {
227
+ if (password.length > 0) {
228
+ password = password.slice(0, -1)
229
+ process.stdout.write('\b \b')
230
+ }
231
+ } else {
232
+ password += char
233
+ process.stdout.write('*')
234
+ }
235
+ }
236
+ process.stdin.on('data', onData)
237
+ } else {
238
+ rl.question(question, (answer) => { rl.close(); resolve(answer) })
239
+ }
240
+ })
241
+ }
242
+
243
+ /** Print usage examples with project-specific values */
244
+ export function printUsage(slug, port) {
245
+ const tokenFile = tokenFilePath(slug)
246
+ const prefix = detectAliasPrefix(slug)
247
+ const host = port ? `http://localhost:${port}` : 'http://localhost:<PORT>'
248
+
249
+ console.log('\n' + chalk.dim('-'.repeat(50)))
250
+ console.log(chalk.bold('Usage:\n'))
251
+ console.log(`curl ${host}/api/admin/stats \\`)
252
+ console.log(` -H "Authorization: Bearer $(cat ${tokenFile})" | jq .`)
253
+ console.log('')
254
+ console.log(chalk.dim('Or add alias to ~/.zshrc:'))
255
+ console.log(`alias ${prefix}curl='curl -H "Authorization: Bearer $(cat ${tokenFile})"'`)
256
+ }
257
+
258
+ /**
259
+ * Main dev-token command handler
260
+ */
261
+ export async function runDevToken({ forceLogin = false, showStatus = false, projectOverride = null } = {}) {
262
+ const cwd = process.cwd()
263
+ const slug = projectOverride || detectProject(cwd)
264
+ const { url, key } = findSupabaseConfig(cwd)
265
+ const port = detectPort(cwd)
266
+ const tokenFile = tokenFilePath(slug)
267
+
268
+ console.log(chalk.bold(`${slug} Dev Token Manager\n`))
269
+
270
+ if (!url || !key) {
271
+ console.error(chalk.red('Missing SUPABASE_URL or SUPABASE_ANON_KEY'))
272
+ console.error(chalk.dim('Searched: backend/.env, .env, .env.local'))
273
+ process.exit(1)
274
+ }
275
+
276
+ const cache = loadCache(slug)
277
+
278
+ // --status
279
+ if (showStatus) {
280
+ if (!cache) {
281
+ console.log(chalk.red('No cached token.'))
282
+ console.log(chalk.dim(`Run: vca-dev-token --login`))
283
+ process.exit(1)
284
+ }
285
+ const payload = decodeJWT(cache.access_token)
286
+ const expired = isTokenExpired(cache.access_token, 0)
287
+ console.log(`User: ${cache.user.email}`)
288
+ console.log(`Expires: ${new Date(payload.exp * 1000).toLocaleString()}`)
289
+ console.log(`Status: ${expired ? chalk.red('Expired') : chalk.green('Valid')}`)
290
+ console.log(`\nToken: ${tokenFile}`)
291
+ process.exit(0)
292
+ }
293
+
294
+ // --login
295
+ if (forceLogin) {
296
+ const email = process.env.DEV_AUTH_EMAIL || await promptInput('Email: ')
297
+ const password = await promptInput('Password: ')
298
+ console.log(`\nLogging in as ${email}...`)
299
+ try {
300
+ const data = await login(url, key, email, password)
301
+ saveCache(data, slug)
302
+ const payload = decodeJWT(data.access_token)
303
+ console.log(chalk.green('\nLogin successful!'))
304
+ console.log(` User: ${data.user.email}`)
305
+ console.log(` Expires: ${new Date(payload.exp * 1000).toLocaleString()}`)
306
+ console.log(`\nToken saved to: ${tokenFile}`)
307
+ printUsage(slug, port)
308
+ } catch (error) {
309
+ console.error(chalk.red(`\n${error.message}`))
310
+ process.exit(1)
311
+ }
312
+ return
313
+ }
314
+
315
+ // Auto: use cached or refresh
316
+ if (cache) {
317
+ if (!isTokenExpired(cache.access_token)) {
318
+ const payload = decodeJWT(cache.access_token)
319
+ const mins = Math.round((payload.exp - Date.now() / 1000) / 60)
320
+ console.log(chalk.green(`Using cached token (${mins} min remaining)`) + ` - ${cache.user.email}`)
321
+ console.log(`\nToken: ${tokenFile}`)
322
+ printUsage(slug, port)
323
+ return
324
+ }
325
+
326
+ console.log('Token expired, refreshing...')
327
+ try {
328
+ const data = await refresh(url, key, cache.refresh_token)
329
+ saveCache(data, slug)
330
+ console.log(chalk.green('Token refreshed!') + ` Saved to: ${tokenFile}`)
331
+ printUsage(slug, port)
332
+ return
333
+ } catch (e) {
334
+ console.log(chalk.yellow(`Refresh failed: ${e.message}`))
335
+ console.log(chalk.dim('Run with --login to re-authenticate'))
336
+ }
337
+ }
338
+
339
+ console.log(chalk.red('No valid token.'))
340
+ console.log(chalk.dim(`Run: vca-dev-token --login`))
341
+ process.exit(1)
342
+ }