@soulbatical/tetra-dev-toolkit 1.20.6 → 1.20.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -307,6 +307,203 @@ Projects without `.ralph/@fix_plan.md` don't show the tasks field.
307
307
 
308
308
  ---
309
309
 
310
+ ## Config-Driven E2E Testing (v5.0)
311
+
312
+ Auto-generate CRUD E2E tests from FeatureConfig. Zero boilerplate — add a `testing` section to your feature config and tests are generated automatically.
313
+
314
+ ### How it works
315
+
316
+ 1. Each `FeatureConfig` gets a `testing` section defining create/update bodies and required fields
317
+ 2. A single test file imports all configs and generates list/create/read/update/delete + validation tests
318
+ 3. Read-only resources just set `skip: { create: true, update: true, delete: true }`
319
+
320
+ ### Step 1: Add `testing` to your feature config
321
+
322
+ ```typescript
323
+ // backend/src/features/adcampaigns/config/adcampaigns.config.ts
324
+ export const adcampaignsFeatureConfig: FeatureConfig = {
325
+ tableName: 'ad_campaigns',
326
+ // ... existing config ...
327
+
328
+ testing: {
329
+ restBasePath: '/api/admin/ad-campaigns',
330
+ createBody: { name: 'E2E Test Campaign $timestamp', platform: 'facebook' },
331
+ updateBody: { name: 'E2E Updated Campaign' },
332
+ updateMethod: 'PATCH', // or 'PUT' (default)
333
+ createStatus: 200, // expected status (default: 200, use 201 if your API returns that)
334
+ requiredFields: ['name', 'platform'], // generates validation tests
335
+ }
336
+ };
337
+ ```
338
+
339
+ For read-only resources:
340
+ ```typescript
341
+ testing: {
342
+ restBasePath: '/api/admin/style-library',
343
+ createBody: {},
344
+ updateBody: {},
345
+ skip: { create: true, update: true, delete: true },
346
+ }
347
+ ```
348
+
349
+ ### Step 2: Create the test file
350
+
351
+ ```typescript
352
+ // tests/e2e/02-crud-resources.test.ts
353
+ import { describe, it, expect, beforeAll } from 'vitest';
354
+ import { get, post, del, api } from './helpers/api-client';
355
+ import { getTestContext, type TestContext } from './helpers/test-users';
356
+
357
+ import { adcampaignsFeatureConfig } from '../../backend/src/features/adcampaigns/config/adcampaigns.config';
358
+ import { projectsFeatureConfig } from '../../backend/src/features/projects/config/projects.config';
359
+ // ... import all configs ...
360
+
361
+ const allConfigs = [adcampaignsFeatureConfig, projectsFeatureConfig /* ... */];
362
+
363
+ let ctx: TestContext;
364
+ beforeAll(async () => { ctx = await getTestContext(); }, 60000);
365
+
366
+ for (const config of allConfigs) {
367
+ const testing = config.testing;
368
+ if (!testing) continue;
369
+ const basePath = testing.restBasePath;
370
+
371
+ describe(`CRUD: ${config.tableName}`, () => {
372
+ let createdId: string;
373
+
374
+ it(`GET ${basePath} returns 200`, async () => { /* ... */ });
375
+ if (!testing.skip?.create) {
376
+ it(`POST ${basePath} creates`, async () => { /* ... */ });
377
+ it(`GET ${basePath}/:id reads`, async () => { /* ... */ });
378
+ it(`${testing.updateMethod} ${basePath}/:id updates`, async () => { /* ... */ });
379
+ it(`DELETE ${basePath}/:id deletes`, async () => { /* ... */ });
380
+ }
381
+ });
382
+ }
383
+ ```
384
+
385
+ See `agentrook/tests/e2e/02-crud-resources.test.ts` for a full working example.
386
+
387
+ ### Step 3: Run
388
+
389
+ ```bash
390
+ npx vitest run --config vitest.config.e2e.ts
391
+ ```
392
+
393
+ ### TestingConfig reference
394
+
395
+ | Field | Type | Default | Description |
396
+ |-------|------|---------|-------------|
397
+ | `restBasePath` | string | `config.restBasePath` | API endpoint path |
398
+ | `createBody` | object | required | POST body for create |
399
+ | `updateBody` | object | required | PUT/PATCH body for update |
400
+ | `updateMethod` | `'PUT' \| 'PATCH'` | `'PUT'` | HTTP method for updates |
401
+ | `createStatus` | number | `200` | Expected create response status |
402
+ | `requiredFields` | string[] | `[]` | Fields to test validation for |
403
+ | `skip.create` | boolean | `false` | Skip create/read/update/delete |
404
+ | `skip.read` | boolean | `false` | Skip read test |
405
+ | `skip.update` | boolean | `false` | Skip update test |
406
+ | `skip.delete` | boolean | `false` | Skip delete test |
407
+
408
+ ### Dynamic values
409
+
410
+ Use `$timestamp` and `$random` in string values for unique test data:
411
+
412
+ ```typescript
413
+ createBody: { name: 'Test $timestamp', key: 'e2e-$random' }
414
+ // Resolves to: { name: 'Test 1710782400000', key: 'e2e-x8k2m9' }
415
+ ```
416
+
417
+ ### Runtime utility
418
+
419
+ For advanced usage, import the generator directly:
420
+
421
+ ```typescript
422
+ import { generateCrudTests } from '@soulbatical/tetra-dev-toolkit/testing';
423
+ ```
424
+
425
+ ### Layer 2: Permission + Business Flow Tests
426
+
427
+ Config-driven tests cover **breadth** (every endpoint). Layer 2 tests cover **depth** — these must be written manually per project.
428
+
429
+ #### Permission tests (`03-permissions.test.ts`)
430
+
431
+ Test role-based access control with two test accounts (admin + member):
432
+
433
+ ```typescript
434
+ // Tests per admin endpoint:
435
+ // 1. Admin GET → 200 ✅
436
+ // 2. Member GET → 403 ✅ (catches missing middleware!)
437
+ // 3. No token → 401 ✅
438
+ // Plus: superadmin routes blocked for admin, write ops blocked for member
439
+ ```
440
+
441
+ This catches real security bugs. Agentrook findings:
442
+ - `/api/admin/ad-creatives` — missing `requireOrganizationAdmin`
443
+ - `/api/admin/style-library` — missing `requireOrganizationAdmin`
444
+
445
+ #### Business flow tests (`04-business-flows.test.ts`)
446
+
447
+ Multi-step scenarios that span resources:
448
+
449
+ ```typescript
450
+ // Flow: Project → Campaign (cross-resource linking)
451
+ // Flow: Brand Profile → Asset (parent-child)
452
+ // Flow: Status lifecycle (draft → running → paused → completed)
453
+ // Flow: Duplicate key handling
454
+ // Flow: Empty update → 400
455
+ // Flow: Nonexistent resource → 404
456
+ ```
457
+
458
+ ### Test user setup
459
+
460
+ **CRITICAL**: Never create Supabase Auth users via raw SQL — the password hash is incompatible with GoTrue. Use the GoTrue Admin API:
461
+
462
+ ```bash
463
+ SERVICE_KEY=$(doppler secrets get SUPABASE_SERVICE_ROLE_KEY --plain --project X --config Y)
464
+
465
+ curl -X POST "https://PROJECT.supabase.co/auth/v1/admin/users" \
466
+ -H "Authorization: Bearer $SERVICE_KEY" \
467
+ -H "apikey: $SERVICE_KEY" \
468
+ -H "Content-Type: application/json" \
469
+ -d '{"email":"e2e-admin@project.com","password":"TestPass1234","email_confirm":true}'
470
+ ```
471
+
472
+ Then add `users_public` + `organization_members` records via SQL.
473
+
474
+ ### 3-layer test pyramid
475
+
476
+ ```
477
+ Layer 3: Browser smoke (gstack) — 5-10 user flows
478
+ Layer 2: Permission + business flow — catches security bugs, logic errors
479
+ Layer 1: Config-driven CRUD — catches broken endpoints, missing fields
480
+ ```
481
+
482
+ | Layer | Auto-generated? | What it catches |
483
+ |-------|----------------|-----------------|
484
+ | 1 | Yes (from config) | Broken routes, 500s, missing fields |
485
+ | 2 | No (manual) | Permission bugs, business logic, edge cases |
486
+ | 3 | No (gstack) | Frontend integration, UX issues |
487
+
488
+ ### File structure
489
+
490
+ ```
491
+ tests/e2e/
492
+ helpers/api-client.ts — HTTP client with X-Test-Key
493
+ helpers/test-users.ts — Login + token caching
494
+ global-setup.ts — Login test accounts
495
+ 02-crud-resources.test.ts — Layer 1: config-driven
496
+ 03-permissions.test.ts — Layer 2: role access
497
+ 04-business-flows.test.ts — Layer 2: multi-step
498
+ tests/e2e-auth/ — Separate (invalidates tokens)
499
+ 01-auth.test.ts — Login, refresh, logout
500
+ 07-security.test.ts — Auth walls
501
+ ```
502
+
503
+ **Important**: Auth/security tests must run in a separate directory — they invalidate shared tokens.
504
+
505
+ ---
506
+
310
507
  ## Changelog
311
508
 
312
509
  ### 1.16.0
@@ -39,3 +39,4 @@ export { check as checkSast } from './sast.js'
39
39
  export { check as checkBundleSize } from './bundle-size.js'
40
40
  export { check as checkSecurityLayers } from './security-layers.js'
41
41
  export { check as checkSmokeReadiness } from './smoke-readiness.js'
42
+ export { check as checkReleasePipeline } from './release-pipeline.js'
@@ -0,0 +1,132 @@
1
+ /**
2
+ * Health Check: Release Pipeline Readiness
3
+ *
4
+ * Checks if a project has proper release pipeline infrastructure:
5
+ * - GitHub Actions post-deploy workflow with deploy webhook notify step
6
+ * - DEPLOY_WEBHOOK_SECRET referenced in workflows
7
+ * - Release-related API routes or VinciFox integration
8
+ * - Professional git workflow (main + develop branches)
9
+ *
10
+ * Score: up to 5 points
11
+ */
12
+
13
+ import { existsSync, readFileSync, readdirSync } from 'fs'
14
+ import { join } from 'path'
15
+ import { createCheck } from './types.js'
16
+
17
+ export async function check(projectPath) {
18
+ const result = createCheck('release-pipeline', 5, {
19
+ hasDeployWebhook: false,
20
+ hasWebhookSecret: false,
21
+ hasProfessionalBranches: false,
22
+ hasDeployMigrations: false,
23
+ hasPostDeployNotify: false,
24
+ })
25
+
26
+ // 1. Check for post-deploy workflow with webhook notification (+1 point)
27
+ const workflowDir = join(projectPath, '.github/workflows')
28
+ if (existsSync(workflowDir)) {
29
+ try {
30
+ const workflows = readdirSync(workflowDir).filter(f => f.endsWith('.yml') || f.endsWith('.yaml'))
31
+ for (const wf of workflows) {
32
+ const content = readFileSync(join(workflowDir, wf), 'utf-8')
33
+ const lower = content.toLowerCase()
34
+
35
+ // Check for deploy webhook notification step
36
+ if (lower.includes('deploy-webhook') || lower.includes('deploy_webhook')) {
37
+ result.details.hasDeployWebhook = true
38
+ result.details.webhookWorkflow = wf
39
+ result.score += 1
40
+ }
41
+
42
+ // Check for DEPLOY_WEBHOOK_SECRET usage (+1 point)
43
+ if (content.includes('DEPLOY_WEBHOOK_SECRET')) {
44
+ result.details.hasWebhookSecret = true
45
+ result.score += 1
46
+ }
47
+
48
+ // Check for deploy-migrations workflow (+1 point)
49
+ if (lower.includes('supabase') && (lower.includes('db push') || lower.includes('migration'))) {
50
+ result.details.hasDeployMigrations = true
51
+ result.details.migrationWorkflow = wf
52
+ result.score += 1
53
+ }
54
+
55
+ // Check for post-deploy notification (curl to webhook endpoint)
56
+ if (lower.includes('notify-deploy') || (lower.includes('curl') && lower.includes('deploy-webhook'))) {
57
+ result.details.hasPostDeployNotify = true
58
+ }
59
+ }
60
+ } catch { /* skip */ }
61
+ }
62
+
63
+ // 2. Check for professional git workflow (main + develop branches) (+1 point)
64
+ // Look in CLAUDE.md or .git config for branch references
65
+ const claudeMd = join(projectPath, 'CLAUDE.md')
66
+ if (existsSync(claudeMd)) {
67
+ try {
68
+ const content = readFileSync(claudeMd, 'utf-8')
69
+ if (content.includes('develop') && (content.includes('main') || content.includes('master'))) {
70
+ if (content.includes('feat/') || content.includes('feat/*') || content.includes('PR')) {
71
+ result.details.hasProfessionalBranches = true
72
+ result.score += 1
73
+ }
74
+ }
75
+ } catch { /* skip */ }
76
+ }
77
+
78
+ // 3. Check for github_repo configuration in project config (+1 point)
79
+ // Projects using the release pipeline should have github_repo configured somewhere
80
+ const configIndicators = [
81
+ 'github_repo',
82
+ 'github_pr_number',
83
+ 'mergeReleasePR',
84
+ 'deploy-webhook',
85
+ ]
86
+
87
+ let hasReleaseIntegration = false
88
+ const srcDirs = ['src', 'backend/src']
89
+ for (const srcDir of srcDirs) {
90
+ const srcPath = join(projectPath, srcDir)
91
+ if (!existsSync(srcPath)) continue
92
+
93
+ try {
94
+ const scanForRelease = (dir, depth = 0) => {
95
+ if (depth > 3 || hasReleaseIntegration) return
96
+ for (const entry of readdirSync(dir, { withFileTypes: true })) {
97
+ if (entry.name === 'node_modules' || entry.name.startsWith('.')) continue
98
+ const fullPath = join(dir, entry.name)
99
+ if (entry.isDirectory()) {
100
+ scanForRelease(fullPath, depth + 1)
101
+ } else if (entry.isFile() && /\.(ts|js)$/.test(entry.name)) {
102
+ try {
103
+ const content = readFileSync(fullPath, 'utf-8')
104
+ if (configIndicators.some(i => content.includes(i))) {
105
+ hasReleaseIntegration = true
106
+ }
107
+ } catch { /* skip */ }
108
+ }
109
+ }
110
+ }
111
+ scanForRelease(srcPath)
112
+ } catch { /* skip */ }
113
+ }
114
+
115
+ if (hasReleaseIntegration) {
116
+ result.details.hasReleaseIntegration = true
117
+ // Already counted in other scores
118
+ }
119
+
120
+ // Cap and set status
121
+ result.score = Math.min(result.score, result.maxScore)
122
+
123
+ if (result.score === 0) {
124
+ result.status = 'warning'
125
+ result.details.message = 'No release pipeline infrastructure — add deploy webhook workflow and DEPLOY_WEBHOOK_SECRET'
126
+ } else if (result.score < 3) {
127
+ result.status = 'warning'
128
+ result.details.message = 'Incomplete release pipeline — missing webhook notification or migration workflow'
129
+ }
130
+
131
+ return result
132
+ }
@@ -3,12 +3,19 @@
3
3
  *
4
4
  * Builds table state from migrations, checks RLS enabled + policies exist.
5
5
  * Score: 3 (full) with deductions for missing RLS, missing policies, permissive policies
6
+ *
7
+ * Handles both unquoted (public.tablename) and quoted ("public"."tablename") identifiers
8
+ * as produced by pg_dump / supabase db dump.
6
9
  */
7
10
 
8
11
  import { existsSync, readFileSync, readdirSync } from 'fs'
9
12
  import { join } from 'path'
10
13
  import { createCheck } from './types.js'
11
14
 
15
+ // Matches: public.tablename, "public"."tablename", "public".tablename, tablename, "tablename"
16
+ // Captures the actual table name (without quotes) in group 1
17
+ const TABLE_ID = `(?:"public"\\.|public\\.)?\"?(\\w+)\"?`
18
+
12
19
  export async function check(projectPath) {
13
20
  const result = createCheck('rls-audit', 3, {
14
21
  tablesFound: 0,
@@ -50,39 +57,50 @@ export async function check(projectPath) {
50
57
  let m
51
58
 
52
59
  // CREATE TABLE
53
- const createRe = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?/gi
60
+ const createRe = new RegExp(`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?${TABLE_ID}`, 'gi')
54
61
  while ((m = createRe.exec(content)) !== null) {
55
62
  const name = m[1].toLowerCase()
63
+ if (name === 'public') continue // skip schema name parsed as table
56
64
  if (!tables.has(name)) tables.set(name, { rlsEnabled: false, policies: [], file: fileName })
57
65
  }
58
66
 
59
67
  // DROP TABLE
60
- const dropRe = /DROP\s+TABLE\s+(?:IF\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?/gi
61
- while ((m = dropRe.exec(content)) !== null) tables.delete(m[1].toLowerCase())
68
+ const dropRe = new RegExp(`DROP\\s+TABLE\\s+(?:IF\\s+EXISTS\\s+)?${TABLE_ID}`, 'gi')
69
+ while ((m = dropRe.exec(content)) !== null) {
70
+ const name = m[1].toLowerCase()
71
+ if (name !== 'public') tables.delete(name)
72
+ }
62
73
 
63
74
  // RENAME TABLE
64
- const renameRe = /ALTER\s+TABLE\s+(?:IF\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?\s+RENAME\s+TO\s+["']?(\w+)["']?/gi
75
+ const renameRe = new RegExp(`ALTER\\s+TABLE\\s+(?:IF\\s+EXISTS\\s+)?${TABLE_ID}\\s+RENAME\\s+TO\\s+\"?(\\w+)\"?`, 'gi')
65
76
  while ((m = renameRe.exec(content)) !== null) {
66
- const data = tables.get(m[1].toLowerCase())
67
- if (data) { tables.delete(m[1].toLowerCase()); tables.set(m[2].toLowerCase(), data) }
77
+ const oldName = m[1].toLowerCase()
78
+ const newName = m[2].toLowerCase()
79
+ if (oldName === 'public' || newName === 'public') continue
80
+ const data = tables.get(oldName)
81
+ if (data) { tables.delete(oldName); tables.set(newName, data) }
68
82
  }
69
83
 
70
84
  // ENABLE/DISABLE RLS
71
- const enableRe = /ALTER\s+TABLE\s+(?:public\.)?["']?(\w+)["']?\s+ENABLE\s+ROW\s+LEVEL\s+SECURITY/gi
85
+ const enableRe = new RegExp(`ALTER\\s+TABLE\\s+${TABLE_ID}\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'gi')
72
86
  while ((m = enableRe.exec(content)) !== null) {
73
87
  const name = m[1].toLowerCase()
88
+ if (name === 'public') continue
74
89
  if (tables.has(name)) tables.get(name).rlsEnabled = true
75
90
  else tables.set(name, { rlsEnabled: true, policies: [], file: fileName })
76
91
  }
77
- const disableRe = /ALTER\s+TABLE\s+(?:public\.)?["']?(\w+)["']?\s+DISABLE\s+ROW\s+LEVEL\s+SECURITY/gi
92
+ const disableRe = new RegExp(`ALTER\\s+TABLE\\s+${TABLE_ID}\\s+DISABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'gi')
78
93
  while ((m = disableRe.exec(content)) !== null) {
79
- if (tables.has(m[1].toLowerCase())) tables.get(m[1].toLowerCase()).rlsEnabled = false
94
+ const name = m[1].toLowerCase()
95
+ if (name === 'public') continue
96
+ if (tables.has(name)) tables.get(name).rlsEnabled = false
80
97
  }
81
98
 
82
99
  // CREATE POLICY
83
- const policyRe = /CREATE\s+POLICY\s+"([^"]+)"\s+ON\s+(?:public\.)?["']?(\w+)["']?\s+([\s\S]*?)(?:;|CREATE\s|ALTER\s|DROP\s|GRANT\s)/gi
100
+ const policyRe = new RegExp(`CREATE\\s+POLICY\\s+"([^"]+)"\\s+ON\\s+${TABLE_ID}\\s+([\\s\\S]*?)(?:;|CREATE\\s|ALTER\\s|DROP\\s|GRANT\\s)`, 'gi')
84
101
  while ((m = policyRe.exec(content)) !== null) {
85
102
  const tableName = m[2].toLowerCase()
103
+ if (tableName === 'public') continue
86
104
  if (!tables.has(tableName)) tables.set(tableName, { rlsEnabled: false, policies: [], file: fileName })
87
105
  tables.get(tableName).policies.push(m[1])
88
106
  if (/USING\s*\(\s*true\s*\)/i.test(m[3]) || /WITH\s+CHECK\s*\(\s*true\s*\)/i.test(m[3])) {
@@ -91,16 +109,19 @@ export async function check(projectPath) {
91
109
  }
92
110
 
93
111
  // DROP POLICY
94
- const dropPolicyRe = /DROP\s+POLICY\s+(?:IF\s+EXISTS\s+)?"([^"]+)"\s+ON\s+(?:public\.)?["']?(\w+)["']?/gi
112
+ const dropPolicyRe = new RegExp(`DROP\\s+POLICY\\s+(?:IF\\s+EXISTS\\s+)?"([^"]+)"\\s+ON\\s+${TABLE_ID}`, 'gi')
95
113
  while ((m = dropPolicyRe.exec(content)) !== null) {
96
- const t = tables.get(m[2].toLowerCase())
114
+ const name = m[2].toLowerCase()
115
+ if (name === 'public') continue
116
+ const t = tables.get(name)
97
117
  if (t) t.policies = t.policies.filter(p => p !== m[1])
98
118
  }
99
119
 
100
- // SECURITY DEFINER
101
- const secDefRe = /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?["']?(\w+)["']?[\s\S]*?SECURITY\s+DEFINER/gi
120
+ // SECURITY DEFINER functions
121
+ const secDefRe = new RegExp(`CREATE\\s+(?:OR\\s+REPLACE\\s+)?FUNCTION\\s+${TABLE_ID}[\\s\\S]*?SECURITY\\s+DEFINER`, 'gi')
102
122
  while ((m = secDefRe.exec(content)) !== null) {
103
- result.details.securityDefinerFunctions.push(m[1])
123
+ const name = m[1].toLowerCase()
124
+ if (name !== 'public') result.details.securityDefinerFunctions.push(name)
104
125
  }
105
126
  }
106
127
 
@@ -35,6 +35,7 @@ import { check as checkSast } from './sast.js'
35
35
  import { check as checkBundleSize } from './bundle-size.js'
36
36
  import { check as checkSecurityLayers } from './security-layers.js'
37
37
  import { check as checkSmokeReadiness } from './smoke-readiness.js'
38
+ import { check as checkReleasePipeline } from './release-pipeline.js'
38
39
  import { calculateHealthStatus } from './types.js'
39
40
 
40
41
  /**
@@ -78,7 +79,8 @@ export async function scanProjectHealth(projectPath, projectName, options = {})
78
79
  checkSast(projectPath),
79
80
  checkBundleSize(projectPath),
80
81
  checkSecurityLayers(projectPath),
81
- checkSmokeReadiness(projectPath)
82
+ checkSmokeReadiness(projectPath),
83
+ checkReleasePipeline(projectPath)
82
84
  ])
83
85
 
84
86
  const totalScore = checks.reduce((sum, c) => sum + c.score, 0)
@@ -5,7 +5,7 @@
5
5
  */
6
6
 
7
7
  /**
8
- * @typedef {'plugins'|'mcps'|'git'|'tests'|'secrets'|'quality-toolkit'|'naming-conventions'|'rls-audit'|'rpc-param-mismatch'|'typescript-strict'|'prettier'|'coverage-thresholds'|'eslint-security'|'dependency-cruiser'|'conventional-commits'|'knip'|'dependency-automation'|'license-audit'|'sast'|'bundle-size'|'gitignore'|'repo-visibility'|'vincifox-widget'|'stella-integration'|'claude-md'|'doppler-compliance'|'infrastructure-yml'|'file-organization'|'security-layers'|'smoke-readiness'} HealthCheckType
8
+ * @typedef {'plugins'|'mcps'|'git'|'tests'|'secrets'|'quality-toolkit'|'naming-conventions'|'rls-audit'|'rpc-param-mismatch'|'typescript-strict'|'prettier'|'coverage-thresholds'|'eslint-security'|'dependency-cruiser'|'conventional-commits'|'knip'|'dependency-automation'|'license-audit'|'sast'|'bundle-size'|'gitignore'|'repo-visibility'|'vincifox-widget'|'stella-integration'|'claude-md'|'doppler-compliance'|'infrastructure-yml'|'file-organization'|'security-layers'|'smoke-readiness'|'release-pipeline'} HealthCheckType
9
9
  *
10
10
  * @typedef {'ok'|'warning'|'error'} HealthStatus
11
11
  *
@@ -89,6 +89,11 @@ function parseFeatureConfigs(projectRoot) {
89
89
  /**
90
90
  * Parse all SQL migrations to extract RLS info per table
91
91
  */
92
+ // SQL table identifier pattern — matches both quoted and unquoted forms:
93
+ // public.tablename, "public"."tablename", tablename, "tablename"
94
+ // Capture group returns the clean table name without quotes.
95
+ const TABLE_ID = `(?:"?public"?\\.)?"?(\\w+)"?`
96
+
92
97
  function parseMigrations(projectRoot) {
93
98
  const tables = new Map() // tableName → { rlsEnabled, policies: [], rpcFunctions: Map }
94
99
 
@@ -115,7 +120,7 @@ function parseMigrations(projectRoot) {
115
120
  const relFile = file.replace(projectRoot + '/', '')
116
121
 
117
122
  // Handle DROP POLICY — removes policy from earlier migration
118
- const dropPolicyMatches = content.matchAll(/DROP\s+POLICY\s+(?:IF\s+EXISTS\s+)?"([^"]+)"\s+ON\s+(?:public\.)?(\w+)/gi)
123
+ const dropPolicyMatches = content.matchAll(new RegExp(`DROP\\s+POLICY\\s+(?:IF\\s+EXISTS\\s+)?"([^"]+)"\\s+ON\\s+${TABLE_ID}`, 'gi'))
119
124
  for (const m of dropPolicyMatches) {
120
125
  const policyName = m[1]
121
126
  const table = m[2]
@@ -125,7 +130,9 @@ function parseMigrations(projectRoot) {
125
130
  }
126
131
 
127
132
  // Handle ALTER FUNCTION ... SECURITY INVOKER/DEFINER — overrides earlier CREATE FUNCTION
128
- const alterFuncMatches = content.matchAll(/ALTER\s+FUNCTION\s+(?:public\.)?(\w+)(?:\s*\([^)]*\))?\s+SECURITY\s+(INVOKER|DEFINER)/gi)
133
+ // Uses same quoted-identifier pattern as TABLE_ID but for function names
134
+ const alterFuncMatches = content.matchAll(new RegExp(`ALTER\\s+FUNCTION\\s+${TABLE_ID}(?:\\s*\\([^)]*\\))?\\s+SECURITY\\s+(INVOKER|DEFINER)`, 'gi'))
135
+ // Note: TABLE_ID capture group 1 = funcName, group 2 = securityMode
129
136
  for (const m of alterFuncMatches) {
130
137
  const funcName = m[1]
131
138
  const securityMode = m[2].toUpperCase()
@@ -139,14 +146,14 @@ function parseMigrations(projectRoot) {
139
146
  }
140
147
 
141
148
  // Handle DISABLE RLS — overrides earlier ENABLE
142
- const disableRlsMatches = content.matchAll(/ALTER\s+TABLE\s+(?:public\.)?(\w+)\s+DISABLE\s+ROW\s+LEVEL\s+SECURITY/gi)
149
+ const disableRlsMatches = content.matchAll(new RegExp(`ALTER\\s+TABLE\\s+${TABLE_ID}\\s+DISABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'gi'))
143
150
  for (const m of disableRlsMatches) {
144
151
  const table = m[1]
145
152
  if (tables.has(table)) tables.get(table).rlsEnabled = false
146
153
  }
147
154
 
148
155
  // Find RLS enables
149
- const rlsMatches = content.matchAll(/ALTER\s+TABLE\s+(?:public\.)?(\w+)\s+ENABLE\s+ROW\s+LEVEL\s+SECURITY/gi)
156
+ const rlsMatches = content.matchAll(new RegExp(`ALTER\\s+TABLE\\s+${TABLE_ID}\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'gi'))
150
157
  for (const m of rlsMatches) {
151
158
  const table = m[1]
152
159
  if (!tables.has(table)) tables.set(table, { rlsEnabled: true, policies: [], rpcFunctions: new Map() })
@@ -154,7 +161,7 @@ function parseMigrations(projectRoot) {
154
161
  }
155
162
 
156
163
  // Find policies
157
- const policyRegex = /CREATE\s+POLICY\s+"?([^"]+)"?\s+ON\s+(?:public\.)?(\w+)\s*([\s\S]*?)(?=CREATE\s+POLICY|CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION|ALTER\s+TABLE|CREATE\s+(?:UNIQUE\s+)?INDEX|GRANT|$)/gi
164
+ const policyRegex = new RegExp(`CREATE\\s+POLICY\\s+"?([^"]+)"?\\s+ON\\s+${TABLE_ID}\\s*([\\s\\S]*?)(?=CREATE\\s+POLICY|CREATE\\s+(?:OR\\s+REPLACE\\s+)?FUNCTION|ALTER\\s+TABLE|CREATE\\s+(?:UNIQUE\\s+)?INDEX|GRANT|$)`, 'gi')
158
165
  for (const m of content.matchAll(policyRegex)) {
159
166
  const policyName = m[1]
160
167
  const table = m[2]
@@ -237,7 +244,7 @@ function parseMigrations(projectRoot) {
237
244
  }
238
245
 
239
246
  // Find RPC functions and their security mode
240
- const funcRegex = /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?(\w+)\s*\(([\s\S]*?)\)\s*RETURNS\s+([\s\S]*?)(?:LANGUAGE|AS)/gi
247
+ const funcRegex = new RegExp(`CREATE\\s+(?:OR\\s+REPLACE\\s+)?FUNCTION\\s+${TABLE_ID}\\s*\\(([\\s\\S]*?)\\)\\s*RETURNS\\s+([\\s\\S]*?)(?:LANGUAGE|AS)`, 'gi')
241
248
  for (const m of content.matchAll(funcRegex)) {
242
249
  const funcName = m[1]
243
250
  const funcBody = content.substring(m.index, m.index + 2000)
@@ -274,25 +281,28 @@ function findFiles(projectRoot, pattern) {
274
281
  * Check if a USING clause enforces org isolation
275
282
  */
276
283
  function isOrgIsolation(using) {
277
- const hasOrgFunction = /auth_org_id\(\)|auth_admin_organizations\(\)/i.test(using)
284
+ const u = using.replace(/"/g, '')
285
+ const hasOrgFunction = /auth_org_id\(\)|auth_admin_organizations\(\)/i.test(u)
278
286
  // Legacy pattern: auth.jwt() -> 'app_metadata' ->> 'organization_id'
279
- const hasLegacyJwtOrg = /auth\.jwt\(\)\s*->\s*'app_metadata'\s*->>\s*'organization_id'/i.test(using)
280
- return (hasOrgFunction || hasLegacyJwtOrg) && /organization_id/i.test(using)
287
+ const hasLegacyJwtOrg = /auth\.jwt\(\)\s*->\s*'app_metadata'\s*->>\s*'organization_id'/i.test(u)
288
+ return (hasOrgFunction || hasLegacyJwtOrg) && /organization_id/i.test(u)
281
289
  }
282
290
 
283
291
  /**
284
292
  * Check if a USING clause enforces user isolation
285
293
  */
286
294
  function isUserIsolation(using) {
287
- return /auth\.uid\(\)/i.test(using) &&
288
- /user_id|created_by|owner_id/i.test(using)
295
+ const u = using.replace(/"/g, '')
296
+ return /auth\.uid\(\)/i.test(u) &&
297
+ /user_id|created_by|owner_id/i.test(u)
289
298
  }
290
299
 
291
300
  /**
292
301
  * Check if a USING clause is wide open
293
302
  */
294
303
  function isWideOpen(using) {
295
- return using.trim() === 'true' || using.trim() === '(true)'
304
+ const u = using.replace(/"/g, '').trim()
305
+ return u === 'true' || u === '(true)'
296
306
  }
297
307
 
298
308
  /**
@@ -376,15 +386,19 @@ const BANNED_RLS_PATTERNS = [
376
386
  function validateRlsClause(clause) {
377
387
  if (!clause || !clause.trim()) return null
378
388
 
389
+ // Strip pg_dump quoted identifiers — "public"."auth"."uid"() → public.auth.uid()
390
+ // This normalizes the clause so whitelist patterns match both quoted and unquoted forms.
391
+ const normalized = clause.replace(/"/g, '')
392
+
379
393
  // First: check for explicitly banned patterns (these are always wrong)
380
394
  for (const { pattern, label } of BANNED_RLS_PATTERNS) {
381
- if (pattern.test(clause)) return label
395
+ if (pattern.test(normalized)) return label
382
396
  }
383
397
 
384
398
  // Second: verify clause contains at least one allowed pattern
385
- const hasAllowedPattern = ALLOWED_RLS_PATTERNS.some(({ pattern }) => pattern.test(clause))
399
+ const hasAllowedPattern = ALLOWED_RLS_PATTERNS.some(({ pattern }) => pattern.test(normalized))
386
400
  if (!hasAllowedPattern) {
387
- return `Unrecognized RLS clause: "${clause.substring(0, 150)}". Only whitelisted patterns are allowed (org/user isolation, role gates, data filters, subqueries). See ALLOWED_RLS_PATTERNS in config-rls-alignment.js.`
401
+ return `Unrecognized RLS clause: "${normalized.substring(0, 150)}". Only whitelisted patterns are allowed (org/user isolation, role gates, data filters, subqueries). See ALLOWED_RLS_PATTERNS in config-rls-alignment.js.`
388
402
  }
389
403
 
390
404
  return null
@@ -6,6 +6,9 @@
6
6
  * 2. Every table with RLS has at least basic policies defined
7
7
  * 3. No overly permissive policies (USING (true) / WITH CHECK (true))
8
8
  * 4. SECURITY DEFINER functions are flagged for review
9
+ *
10
+ * Handles both unquoted (public.tablename) and quoted ("public"."tablename") identifiers
11
+ * as produced by pg_dump / supabase db dump.
9
12
  */
10
13
 
11
14
  import { readFileSync, existsSync, readdirSync } from 'fs'
@@ -19,6 +22,10 @@ export const meta = {
19
22
  description: 'Verifies RLS is enabled on all tables and policies are properly configured'
20
23
  }
21
24
 
25
+ // Matches: public.tablename, "public"."tablename", "public".tablename, tablename, "tablename"
26
+ // Captures the actual table name (without quotes) in group 1
27
+ const TABLE_ID = `(?:"public"\\.|public\\.)?\"?(\\w+)\"?`
28
+
22
29
  export async function run(config, projectRoot) {
23
30
  const results = {
24
31
  passed: true,
@@ -74,26 +81,29 @@ export async function run(config, projectRoot) {
74
81
  const fileName = filePath.split('/').pop()
75
82
 
76
83
  // Track CREATE TABLE
77
- const createTableRe = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?/gi
84
+ const createTableRe = new RegExp(`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?${TABLE_ID}`, 'gi')
78
85
  let match
79
86
  while ((match = createTableRe.exec(content)) !== null) {
80
87
  const name = match[1].toLowerCase()
88
+ if (name === 'public') continue
81
89
  if (!tables.has(name)) {
82
90
  tables.set(name, { rlsEnabled: false, policies: [], file: fileName })
83
91
  }
84
92
  }
85
93
 
86
94
  // Track DROP TABLE (remove from tracking)
87
- const dropTableRe = /DROP\s+TABLE\s+(?:IF\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?/gi
95
+ const dropTableRe = new RegExp(`DROP\\s+TABLE\\s+(?:IF\\s+EXISTS\\s+)?${TABLE_ID}`, 'gi')
88
96
  while ((match = dropTableRe.exec(content)) !== null) {
89
- tables.delete(match[1].toLowerCase())
97
+ const name = match[1].toLowerCase()
98
+ if (name !== 'public') tables.delete(name)
90
99
  }
91
100
 
92
101
  // Track ALTER TABLE ... RENAME TO
93
- const renameRe = /ALTER\s+TABLE\s+(?:IF\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?\s+RENAME\s+TO\s+["']?(\w+)["']?/gi
102
+ const renameRe = new RegExp(`ALTER\\s+TABLE\\s+(?:IF\\s+EXISTS\\s+)?${TABLE_ID}\\s+RENAME\\s+TO\\s+\"?(\\w+)\"?`, 'gi')
94
103
  while ((match = renameRe.exec(content)) !== null) {
95
104
  const oldName = match[1].toLowerCase()
96
105
  const newName = match[2].toLowerCase()
106
+ if (oldName === 'public' || newName === 'public') continue
97
107
  if (tables.has(oldName)) {
98
108
  const data = tables.get(oldName)
99
109
  tables.delete(oldName)
@@ -102,9 +112,10 @@ export async function run(config, projectRoot) {
102
112
  }
103
113
 
104
114
  // Track ENABLE ROW LEVEL SECURITY
105
- const rlsEnableRe = /ALTER\s+TABLE\s+(?:public\.)?["']?(\w+)["']?\s+ENABLE\s+ROW\s+LEVEL\s+SECURITY/gi
115
+ const rlsEnableRe = new RegExp(`ALTER\\s+TABLE\\s+${TABLE_ID}\\s+ENABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'gi')
106
116
  while ((match = rlsEnableRe.exec(content)) !== null) {
107
117
  const name = match[1].toLowerCase()
118
+ if (name === 'public') continue
108
119
  if (tables.has(name)) {
109
120
  tables.get(name).rlsEnabled = true
110
121
  } else {
@@ -114,21 +125,23 @@ export async function run(config, projectRoot) {
114
125
  }
115
126
 
116
127
  // Track DISABLE ROW LEVEL SECURITY
117
- const rlsDisableRe = /ALTER\s+TABLE\s+(?:public\.)?["']?(\w+)["']?\s+DISABLE\s+ROW\s+LEVEL\s+SECURITY/gi
128
+ const rlsDisableRe = new RegExp(`ALTER\\s+TABLE\\s+${TABLE_ID}\\s+DISABLE\\s+ROW\\s+LEVEL\\s+SECURITY`, 'gi')
118
129
  while ((match = rlsDisableRe.exec(content)) !== null) {
119
130
  const name = match[1].toLowerCase()
131
+ if (name === 'public') continue
120
132
  if (tables.has(name)) {
121
133
  tables.get(name).rlsEnabled = false
122
134
  }
123
135
  }
124
136
 
125
- // Track CREATE POLICY
126
- const policyRe = /CREATE\s+POLICY\s+["']?(\w+)["']?\s+ON\s+(?:public\.)?["']?(\w+)["']?\s+([\s\S]*?)(?:;|CREATE\s|ALTER\s|DROP\s|GRANT\s)/gi
137
+ // Track CREATE POLICY — policy names can contain spaces, so use quoted capture
138
+ const policyRe = new RegExp(`CREATE\\s+POLICY\\s+\"([^\"]+)\"\\s+ON\\s+${TABLE_ID}\\s+([\\s\\S]*?)(?:;|CREATE\\s|ALTER\\s|DROP\\s|GRANT\\s)`, 'gi')
127
139
  while ((match = policyRe.exec(content)) !== null) {
128
140
  const policyName = match[1]
129
141
  const tableName = match[2].toLowerCase()
130
142
  const policyBody = match[3]
131
143
 
144
+ if (tableName === 'public') continue
132
145
  if (!tables.has(tableName)) {
133
146
  tables.set(tableName, { rlsEnabled: false, policies: [], file: fileName })
134
147
  }
@@ -146,10 +159,11 @@ export async function run(config, projectRoot) {
146
159
  }
147
160
 
148
161
  // Track DROP POLICY
149
- const dropPolicyRe = /DROP\s+POLICY\s+(?:IF\s+EXISTS\s+)?["']?(\w+)["']?\s+ON\s+(?:public\.)?["']?(\w+)["']?/gi
162
+ const dropPolicyRe = new RegExp(`DROP\\s+POLICY\\s+(?:IF\\s+EXISTS\\s+)?\"([^\"]+)\"\\s+ON\\s+${TABLE_ID}`, 'gi')
150
163
  while ((match = dropPolicyRe.exec(content)) !== null) {
151
164
  const policyName = match[1]
152
165
  const tableName = match[2].toLowerCase()
166
+ if (tableName === 'public') continue
153
167
  if (tables.has(tableName)) {
154
168
  const table = tables.get(tableName)
155
169
  table.policies = table.policies.filter(p => p.name !== policyName)
@@ -157,9 +171,10 @@ export async function run(config, projectRoot) {
157
171
  }
158
172
 
159
173
  // Track SECURITY DEFINER functions
160
- const secDefRe = /CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+(?:public\.)?["']?(\w+)["']?[\s\S]*?SECURITY\s+DEFINER/gi
174
+ const secDefRe = new RegExp(`CREATE\\s+(?:OR\\s+REPLACE\\s+)?FUNCTION\\s+${TABLE_ID}[\\s\\S]*?SECURITY\\s+DEFINER`, 'gi')
161
175
  while ((match = secDefRe.exec(content)) !== null) {
162
- securityDefinerFns.push({ name: match[1], file: fileName })
176
+ const name = match[1].toLowerCase()
177
+ if (name !== 'public') securityDefinerFns.push({ name, file: fileName })
163
178
  }
164
179
  }
165
180
 
package/lib/index.js CHANGED
@@ -15,3 +15,6 @@ export * as checks from './checks/index.js'
15
15
 
16
16
  // Health scanner (project ecosystem checks)
17
17
  export { scanProjectHealth, calculateHealthStatus } from './checks/health/index.js'
18
+
19
+ // Testing utilities (config-driven CRUD test generation)
20
+ export { generateCrudTests } from './testing/index.js'
@@ -0,0 +1,153 @@
1
+ /**
2
+ * Tetra CRUD Test Generator (v5.0)
3
+ *
4
+ * Config-driven E2E test generation. Reads `testing` from FeatureConfig
5
+ * and generates list/create/read/update/delete tests + validation tests.
6
+ *
7
+ * Usage in a project's test file:
8
+ *
9
+ * import { generateCrudTests } from '@soulbatical/tetra-dev-toolkit/testing';
10
+ * import { getTestContext } from './helpers/test-users';
11
+ * import { adcampaignsFeatureConfig } from '../../backend/src/features/adcampaigns/config/adcampaigns.config';
12
+ *
13
+ * const configs = [adcampaignsFeatureConfig, projectsFeatureConfig, ...];
14
+ * generateCrudTests(configs, () => getTestContext());
15
+ */
16
+
17
+ /**
18
+ * Generate CRUD E2E tests for an array of feature configs.
19
+ *
20
+ * @param {import('vitest')} vitest - { describe, it, expect, beforeAll } from vitest
21
+ * @param {Array<import('@soulbatical/tetra-core').FeatureConfig>} configs - Feature configs with `testing` section
22
+ * @param {() => Promise<TestContext>} getContext - Async function returning test context with admin token
23
+ * @param {object} httpClient - { get, post, put, del, api } from test helpers
24
+ */
25
+ export function generateCrudTests(vitest, configs, getContext, httpClient) {
26
+ const { describe, it, expect, beforeAll } = vitest
27
+ const { get, post, del, api } = httpClient
28
+
29
+ let ctx
30
+
31
+ beforeAll(async () => {
32
+ ctx = await getContext()
33
+ }, 60000)
34
+
35
+ for (const config of configs) {
36
+ const testing = config.testing
37
+ if (!testing) continue
38
+
39
+ const basePath = testing.restBasePath || config.restBasePath
40
+ if (!basePath) {
41
+ console.warn(`⚠️ Skipping ${config.tableName}: no restBasePath in testing or config`)
42
+ continue
43
+ }
44
+
45
+ const name = config.display?.page?.entityName || config.tableName || basePath
46
+ const skip = testing.skip || {}
47
+
48
+ describe(`CRUD: ${name}`, () => {
49
+ let createdId
50
+
51
+ // ── LIST ──
52
+ it(`GET ${basePath} returns 200`, async () => {
53
+ const res = await get(basePath, ctx.admin.token)
54
+ expect(res.status).toBe(200)
55
+ expect(res.data.success).toBe(true)
56
+ })
57
+
58
+ // ── CREATE ──
59
+ if (!skip.create) {
60
+ it(`POST ${basePath} creates a record`, async () => {
61
+ const body = resolveCreateBody(testing.createBody)
62
+ const res = await post(basePath, body, ctx.admin.token)
63
+ const expectedStatus = testing.createStatus || 200
64
+ expect(res.status).toBe(expectedStatus)
65
+ expect(res.data.success).toBe(true)
66
+ expect(res.data.data).toBeTruthy()
67
+ createdId = res.data.data.id
68
+ expect(createdId).toBeTruthy()
69
+ })
70
+ }
71
+
72
+ // ── READ ──
73
+ if (!skip.create && !skip.read) {
74
+ it(`GET ${basePath}/:id reads the record`, async () => {
75
+ if (!createdId) return
76
+ const res = await get(`${basePath}/${createdId}`, ctx.admin.token)
77
+ expect(res.status).toBe(200)
78
+ expect(res.data.success).toBe(true)
79
+ expect(res.data.data).toBeTruthy()
80
+ })
81
+ }
82
+
83
+ // ── UPDATE ──
84
+ if (!skip.create && !skip.update) {
85
+ it(`${testing.updateMethod || 'PUT'} ${basePath}/:id updates the record`, async () => {
86
+ if (!createdId) return
87
+ const method = testing.updateMethod || 'PUT'
88
+ const res = await api(`${basePath}/${createdId}`, {
89
+ method,
90
+ body: testing.updateBody,
91
+ token: ctx.admin.token,
92
+ })
93
+ expect(res.status).toBe(200)
94
+ expect(res.data.success).toBe(true)
95
+ })
96
+ }
97
+
98
+ // ── DELETE ──
99
+ if (!skip.create && !skip.delete) {
100
+ it(`DELETE ${basePath}/:id deletes the record`, async () => {
101
+ if (!createdId) return
102
+ const res = await del(`${basePath}/${createdId}`, ctx.admin.token)
103
+ expect(res.status).toBe(200)
104
+ expect(res.data.success).toBe(true)
105
+ })
106
+
107
+ it(`GET ${basePath}/:id returns 404 after delete`, async () => {
108
+ if (!createdId) return
109
+ const res = await get(`${basePath}/${createdId}`, ctx.admin.token)
110
+ // Some endpoints return 200 with null data instead of 404
111
+ if (res.status === 200) {
112
+ expect(res.data.data).toBeFalsy()
113
+ } else {
114
+ expect(res.status).toBe(404)
115
+ }
116
+ })
117
+ }
118
+ })
119
+
120
+ // ── VALIDATION: required fields ──
121
+ if (!skip.create && testing.requiredFields?.length) {
122
+ describe(`Validation: ${name}`, () => {
123
+ for (const field of testing.requiredFields) {
124
+ it(`POST ${basePath} rejects missing ${field}`, async () => {
125
+ const body = resolveCreateBody(testing.createBody)
126
+ delete body[field]
127
+ const res = await post(basePath, body, ctx.admin.token)
128
+ expect([400, 422]).toContain(res.status)
129
+ })
130
+ }
131
+ })
132
+ }
133
+ }
134
+ }
135
+
136
+ /**
137
+ * Resolve dynamic values in create body.
138
+ * Supports $timestamp and $random placeholders.
139
+ */
140
+ function resolveCreateBody(body) {
141
+ const resolved = { ...body }
142
+ const ts = Date.now()
143
+
144
+ for (const [key, value] of Object.entries(resolved)) {
145
+ if (typeof value === 'string') {
146
+ resolved[key] = value
147
+ .replace(/\$timestamp/g, String(ts))
148
+ .replace(/\$random/g, String(Math.random().toString(36).slice(2, 8)))
149
+ }
150
+ }
151
+
152
+ return resolved
153
+ }
@@ -0,0 +1,7 @@
1
+ /**
2
+ * Tetra Testing Utilities
3
+ *
4
+ * Config-driven test generation from FeatureConfig.
5
+ */
6
+
7
+ export { generateCrudTests } from './generateCrudTests.js'
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soulbatical/tetra-dev-toolkit",
3
- "version": "1.20.6",
3
+ "version": "1.20.7",
4
4
  "publishConfig": {
5
5
  "access": "restricted"
6
6
  },
@@ -24,6 +24,10 @@
24
24
  ],
25
25
  "type": "module",
26
26
  "main": "lib/index.js",
27
+ "exports": {
28
+ ".": "./lib/index.js",
29
+ "./testing": "./lib/testing/index.js"
30
+ },
27
31
  "bin": {
28
32
  "tetra-audit": "./bin/tetra-audit.js",
29
33
  "tetra-init": "./bin/tetra-init.js",