@soulbatical/tetra-dev-toolkit 1.20.11 → 1.20.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/tetra-init-tests.js +185 -66
- package/bin/tetra-init.js +67 -3
- package/bin/tetra-setup.js +152 -30
- package/bin/tetra-setup.js.tmp +0 -0
- package/bin/tetra-test-audit.js +83 -0
- package/lib/audits/test-coverage-audit.js +646 -0
- package/lib/checks/health/index.js +2 -1
- package/lib/checks/health/rpc-param-mismatch.js +21 -0
- package/lib/checks/health/scanner.js +4 -2
- package/lib/checks/health/sentry-monitoring.js +167 -0
- package/lib/checks/health/types.js +1 -1
- package/lib/checks/stability/ci-pipeline.js +21 -6
- package/lib/templates/hooks/doppler-guard.sh +30 -0
- package/lib/templates/hooks/worktree-guard.sh +75 -0
- package/lib/templates/tests/02-crud-resources.test.ts.tmpl +135 -0
- package/lib/templates/tests/03-permissions.test.ts.tmpl +110 -0
- package/lib/templates/tests/04-business-flows.test.ts.tmpl +64 -0
- package/lib/templates/tests/05-security.test.ts.tmpl +82 -0
- package/lib/templates/tests/global-setup.ts.tmpl +73 -0
- package/package.json +3 -2
- package/lib/templates/tests/07-security.test.ts.tmpl +0 -93
|
@@ -0,0 +1,646 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Test Coverage Audit — scans a Tetra project to find test coverage gaps.
|
|
3
|
+
*
|
|
4
|
+
* Discovers FeatureConfigs, routes, frontend pages, and test files,
|
|
5
|
+
* then cross-references to identify what is missing coverage.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { readFileSync, existsSync } from 'fs'
|
|
9
|
+
import { join, basename } from 'path'
|
|
10
|
+
import { glob } from 'glob'
|
|
11
|
+
|
|
12
|
+
// ============================================================================
|
|
13
|
+
// SCANNERS
|
|
14
|
+
// ============================================================================
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Scan all FeatureConfig files and extract tableName + restBasePath.
|
|
18
|
+
*/
|
|
19
|
+
export async function scanFeatureConfigs(projectRoot) {
|
|
20
|
+
const pattern = 'backend/src/features/*/config/*.config.ts'
|
|
21
|
+
const files = await glob(pattern, { cwd: projectRoot })
|
|
22
|
+
|
|
23
|
+
const configs = []
|
|
24
|
+
for (const file of files) {
|
|
25
|
+
const fullPath = join(projectRoot, file)
|
|
26
|
+
const content = readFileSync(fullPath, 'utf-8')
|
|
27
|
+
|
|
28
|
+
const tableMatch = content.match(/tableName:\s*['"]([^'"]+)['"]/)
|
|
29
|
+
const restMatch = content.match(/restBasePath:\s*['"]([^'"]+)['"]/)
|
|
30
|
+
const parts = file.split('/')
|
|
31
|
+
const featureName = parts[3]
|
|
32
|
+
|
|
33
|
+
configs.push({
|
|
34
|
+
file,
|
|
35
|
+
featureName,
|
|
36
|
+
tableName: tableMatch?.[1] || null,
|
|
37
|
+
restBasePath: restMatch?.[1] || null,
|
|
38
|
+
})
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return configs
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Scan RouteManager to extract registered routes,
|
|
46
|
+
* and fall back to scanning route files for router.get/post/etc patterns.
|
|
47
|
+
*/
|
|
48
|
+
export async function scanRoutes(projectRoot) {
|
|
49
|
+
const routes = []
|
|
50
|
+
|
|
51
|
+
// Strategy 1: Parse RouteManager.ts for route groups
|
|
52
|
+
const routeManagerPath = join(projectRoot, 'backend/src/core/RouteManager.ts')
|
|
53
|
+
if (existsSync(routeManagerPath)) {
|
|
54
|
+
const content = readFileSync(routeManagerPath, 'utf-8')
|
|
55
|
+
|
|
56
|
+
const groupRegex = /name:\s*['"]([^'"]+)['"][\s\S]*?prefix:\s*['"]([^'"]*)['"][\s\S]*?routes:\s*\[([\s\S]*?)\]/g
|
|
57
|
+
let groupMatch
|
|
58
|
+
while ((groupMatch = groupRegex.exec(content)) !== null) {
|
|
59
|
+
const groupName = groupMatch[1]
|
|
60
|
+
const prefix = groupMatch[2]
|
|
61
|
+
const routesBlock = groupMatch[3]
|
|
62
|
+
|
|
63
|
+
const routeRegex = /path:\s*['"]([^'"]+)['"]/g
|
|
64
|
+
let routeMatch
|
|
65
|
+
while ((routeMatch = routeRegex.exec(routesBlock)) !== null) {
|
|
66
|
+
const path = routeMatch[1]
|
|
67
|
+
const fullPath = path === '/' ? prefix : `${prefix}${path}`
|
|
68
|
+
routes.push({
|
|
69
|
+
group: groupName,
|
|
70
|
+
prefix,
|
|
71
|
+
path,
|
|
72
|
+
fullPath,
|
|
73
|
+
source: 'RouteManager',
|
|
74
|
+
})
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Strategy 2: Scan individual route files for HTTP method registrations
|
|
80
|
+
const routeFilePattern = 'backend/src/features/*/routes/*.ts'
|
|
81
|
+
const routeFiles = await glob(routeFilePattern, { cwd: projectRoot })
|
|
82
|
+
|
|
83
|
+
for (const file of routeFiles) {
|
|
84
|
+
const fullPath = join(projectRoot, file)
|
|
85
|
+
const content = readFileSync(fullPath, 'utf-8')
|
|
86
|
+
const featureName = file.split('/')[3]
|
|
87
|
+
const routeType = basename(file, '.ts')
|
|
88
|
+
|
|
89
|
+
const methodRegex = /router\.(get|post|put|patch|delete)\(\s*['"]([^'"]+)['"]/g
|
|
90
|
+
let methodMatch
|
|
91
|
+
while ((methodMatch = methodRegex.exec(content)) !== null) {
|
|
92
|
+
routes.push({
|
|
93
|
+
method: methodMatch[1].toUpperCase(),
|
|
94
|
+
subPath: methodMatch[2],
|
|
95
|
+
featureName,
|
|
96
|
+
routeType,
|
|
97
|
+
file,
|
|
98
|
+
source: 'routeFile',
|
|
99
|
+
})
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return routes
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Scan frontend pages by finding all page.tsx files.
|
|
108
|
+
*/
|
|
109
|
+
export async function scanFrontendPages(projectRoot) {
|
|
110
|
+
const frontendPatterns = [
|
|
111
|
+
'frontend/src/app/**/page.tsx',
|
|
112
|
+
'frontend-user/src/app/**/page.tsx',
|
|
113
|
+
'frontend-sparkbuddy/src/app/**/page.tsx',
|
|
114
|
+
]
|
|
115
|
+
const files = []
|
|
116
|
+
for (const pattern of frontendPatterns) {
|
|
117
|
+
const found = await glob(pattern, { cwd: projectRoot })
|
|
118
|
+
files.push(...found)
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const pages = []
|
|
122
|
+
for (const file of files) {
|
|
123
|
+
let routePath = file
|
|
124
|
+
.replace('frontend/src/app', '')
|
|
125
|
+
.replace('/page.tsx', '') || '/'
|
|
126
|
+
|
|
127
|
+
// Remove route groups like (dashboard), (marketing), (editor), (superadmin)
|
|
128
|
+
routePath = routePath.replace(/\/\([^)]+\)/g, '')
|
|
129
|
+
|
|
130
|
+
if (!routePath.startsWith('/')) routePath = '/' + routePath
|
|
131
|
+
if (routePath === '') routePath = '/'
|
|
132
|
+
|
|
133
|
+
pages.push({
|
|
134
|
+
file,
|
|
135
|
+
routePath,
|
|
136
|
+
})
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
return pages
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Scan all test files and extract what endpoints/pages they reference.
|
|
144
|
+
*/
|
|
145
|
+
export async function scanTestFiles(projectRoot) {
|
|
146
|
+
const patterns = [
|
|
147
|
+
'tests/e2e/**/*.test.ts',
|
|
148
|
+
'tests/**/*.test.ts',
|
|
149
|
+
'frontend/e2e/**/*.spec.ts',
|
|
150
|
+
'frontend-user/tests/**/*.spec.ts',
|
|
151
|
+
'frontend-user/tests/**/*.test.ts',
|
|
152
|
+
'frontend-sparkbuddy/tests/**/*.spec.ts',
|
|
153
|
+
'backend/tests/**/*.test.ts',
|
|
154
|
+
'frontend-user/tests/fixtures/**/*.ts',
|
|
155
|
+
'frontend/e2e/fixtures.ts',
|
|
156
|
+
]
|
|
157
|
+
|
|
158
|
+
const allFiles = []
|
|
159
|
+
for (const pattern of patterns) {
|
|
160
|
+
const files = await glob(pattern, { cwd: projectRoot })
|
|
161
|
+
allFiles.push(...files)
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const uniqueFiles = [...new Set(allFiles)]
|
|
165
|
+
|
|
166
|
+
const testFiles = []
|
|
167
|
+
for (const file of uniqueFiles) {
|
|
168
|
+
const fullPath = join(projectRoot, file)
|
|
169
|
+
if (!existsSync(fullPath)) continue
|
|
170
|
+
const content = readFileSync(fullPath, 'utf-8')
|
|
171
|
+
|
|
172
|
+
// Extract API endpoint references
|
|
173
|
+
const apiRefs = new Set()
|
|
174
|
+
const apiRegex = /['"`]\/api\/(admin|public|user|superadmin|internal)\/([^'"`\s?#]+)['"`]/g
|
|
175
|
+
let apiMatch
|
|
176
|
+
while ((apiMatch = apiRegex.exec(content)) !== null) {
|
|
177
|
+
const endpoint = `/api/${apiMatch[1]}/${apiMatch[2]}`
|
|
178
|
+
apiRefs.add(endpoint.replace(/\/\$\{[^}]+\}/g, ''))
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Extract page.goto references
|
|
182
|
+
const pageRefs = new Set()
|
|
183
|
+
const gotoRegex = /page\.goto\(\s*['"`]([^'"`$]+)['"`]/g
|
|
184
|
+
let gotoMatch
|
|
185
|
+
while ((gotoMatch = gotoRegex.exec(content)) !== null) {
|
|
186
|
+
pageRefs.add(gotoMatch[1])
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Template literal page.goto
|
|
190
|
+
const gotoTemplateRegex = /page\.goto\(\s*`([^`]+)`/g
|
|
191
|
+
let gotoTemplateMatch
|
|
192
|
+
while ((gotoTemplateMatch = gotoTemplateRegex.exec(content)) !== null) {
|
|
193
|
+
const basePath = gotoTemplateMatch[1].replace(/\$\{[^}]+\}/g, '').replace(/\/+$/, '')
|
|
194
|
+
if (basePath) pageRefs.add(basePath)
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// Extract path definitions from fixture files (e.g. { path: '/nl/user' })
|
|
198
|
+
const pathDefRegex2 = /path:\s*['"]([^'"]+)['"]/g
|
|
199
|
+
let pathDefMatch2
|
|
200
|
+
while ((pathDefMatch2 = pathDefRegex2.exec(content)) !== null) {
|
|
201
|
+
pageRefs.add(pathDefMatch2[1])
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// Detect auth wall tests
|
|
205
|
+
const hasAuthTests = /(?:toBe|toEqual|status)\s*\(\s*401\s*\)/.test(content) ||
|
|
206
|
+
/expect\(.*status.*\).*401/.test(content) ||
|
|
207
|
+
/\.status\)\.toBe\(401\)/.test(content)
|
|
208
|
+
|
|
209
|
+
// Detect CRUD tests
|
|
210
|
+
const hasCrudTests = /\.(post|put|patch|delete)\(/i.test(content) &&
|
|
211
|
+
/expect\(/i.test(content)
|
|
212
|
+
|
|
213
|
+
// Extract describe block names
|
|
214
|
+
const describeNames = []
|
|
215
|
+
const describeRegex = /describe\(\s*['"]([^'"]+)['"]/g
|
|
216
|
+
let describeMatch
|
|
217
|
+
while ((describeMatch = describeRegex.exec(content)) !== null) {
|
|
218
|
+
describeNames.push(describeMatch[1])
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
testFiles.push({
|
|
222
|
+
file,
|
|
223
|
+
type: file.includes('frontend/e2e') ? 'frontend-e2e' : 'backend-e2e',
|
|
224
|
+
apiEndpoints: [...apiRefs],
|
|
225
|
+
pageRoutes: [...pageRefs],
|
|
226
|
+
hasAuthTests,
|
|
227
|
+
hasCrudTests,
|
|
228
|
+
describeNames,
|
|
229
|
+
rawContent: content,
|
|
230
|
+
})
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
return testFiles
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
/**
|
|
237
|
+
* Scan CI pipeline configuration.
|
|
238
|
+
*/
|
|
239
|
+
export function scanCIPipeline(projectRoot) {
|
|
240
|
+
const checks = {
|
|
241
|
+
ciExists: false,
|
|
242
|
+
ciFile: null,
|
|
243
|
+
hasApiTestJob: false,
|
|
244
|
+
hasFrontendTestJob: false,
|
|
245
|
+
hasPostDeploySmoke: false,
|
|
246
|
+
hasCoverageReporting: false,
|
|
247
|
+
hasSecurityScanning: false,
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
const ciPaths = [
|
|
251
|
+
'.github/workflows/ci.yml',
|
|
252
|
+
'.github/workflows/ci.yaml',
|
|
253
|
+
'.github/workflows/test.yml',
|
|
254
|
+
'.github/workflows/test.yaml',
|
|
255
|
+
]
|
|
256
|
+
|
|
257
|
+
for (const ciPath of ciPaths) {
|
|
258
|
+
const fullPath = join(projectRoot, ciPath)
|
|
259
|
+
if (existsSync(fullPath)) {
|
|
260
|
+
checks.ciExists = true
|
|
261
|
+
checks.ciFile = ciPath
|
|
262
|
+
|
|
263
|
+
const content = readFileSync(fullPath, 'utf-8')
|
|
264
|
+
|
|
265
|
+
checks.hasApiTestJob = /api[_-]?test/i.test(content) || /e2e[_-]?test/i.test(content) || /vitest.*e2e/i.test(content)
|
|
266
|
+
checks.hasFrontendTestJob = /playwright/i.test(content) || /cypress/i.test(content) || /frontend.*test/i.test(content)
|
|
267
|
+
checks.hasPostDeploySmoke = /smoke/i.test(content) || /post[_-]?deploy/i.test(content)
|
|
268
|
+
checks.hasCoverageReporting = /coverage/i.test(content) || /codecov/i.test(content) || /coveralls/i.test(content)
|
|
269
|
+
|
|
270
|
+
checks.hasSecurityScanning = /security[_-]?gate/i.test(content) || /snyk/i.test(content) || /tetra-security/i.test(content) || /semgrep/i.test(content)
|
|
271
|
+
|
|
272
|
+
break
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// Check for separate post-deploy workflow files
|
|
277
|
+
if (!checks.hasPostDeploySmoke) {
|
|
278
|
+
const postDeployPaths = [
|
|
279
|
+
'.github/workflows/post-deploy-tests.yml',
|
|
280
|
+
'.github/workflows/post-deploy.yml',
|
|
281
|
+
'.github/workflows/smoke-tests.yml',
|
|
282
|
+
]
|
|
283
|
+
for (const pdPath of postDeployPaths) {
|
|
284
|
+
if (existsSync(join(projectRoot, pdPath))) {
|
|
285
|
+
checks.hasPostDeploySmoke = true
|
|
286
|
+
break
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
return checks
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// ============================================================================
|
|
295
|
+
// CROSS-REFERENCE
|
|
296
|
+
// ============================================================================
|
|
297
|
+
|
|
298
|
+
/**
|
|
299
|
+
* Cross-reference all scan results to produce a gap report.
|
|
300
|
+
*/
|
|
301
|
+
export function crossReference(features, routes, pages, testFiles, ciChecks) {
|
|
302
|
+
const testedEndpoints = new Set()
|
|
303
|
+
const authTestedEndpoints = new Set()
|
|
304
|
+
const testedPages = new Set()
|
|
305
|
+
const crudTestedFeatures = new Set()
|
|
306
|
+
|
|
307
|
+
for (const test of testFiles) {
|
|
308
|
+
for (const ep of test.apiEndpoints) {
|
|
309
|
+
testedEndpoints.add(ep)
|
|
310
|
+
testedEndpoints.add(ep.replace(/\/+$/, ''))
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
if (test.hasAuthTests) {
|
|
314
|
+
for (const ep of test.apiEndpoints) {
|
|
315
|
+
authTestedEndpoints.add(ep)
|
|
316
|
+
authTestedEndpoints.add(ep.replace(/\/+$/, ''))
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
for (const page of test.pageRoutes) {
|
|
321
|
+
testedPages.add(page)
|
|
322
|
+
testedPages.add(page.replace(/\/+$/, ''))
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
// Determine CRUD-tested features
|
|
327
|
+
for (const feature of features) {
|
|
328
|
+
if (!feature.restBasePath) continue
|
|
329
|
+
const basePath = feature.restBasePath
|
|
330
|
+
const hasTest = testFiles.some(t =>
|
|
331
|
+
t.apiEndpoints.some(ep => ep.startsWith(basePath)) && t.hasCrudTests
|
|
332
|
+
)
|
|
333
|
+
if (hasTest) {
|
|
334
|
+
crudTestedFeatures.add(feature.featureName)
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
// 1. Feature coverage gaps
|
|
339
|
+
const featureResults = features.map(f => {
|
|
340
|
+
const hasCrud = crudTestedFeatures.has(f.featureName)
|
|
341
|
+
// Match by restBasePath OR by feature directory name in route paths
|
|
342
|
+
// e.g. feature "videostudio" matches route "/api/admin/video-studio"
|
|
343
|
+
// Generate multiple slug variants to catch compound words
|
|
344
|
+
const featureSlug = f.featureName.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase()
|
|
345
|
+
const slugVariants = new Set([
|
|
346
|
+
f.featureName, // videostudio
|
|
347
|
+
featureSlug, // videostudio (no camelCase)
|
|
348
|
+
f.featureName.replace(/studio/i, '-studio'), // video-studio
|
|
349
|
+
f.featureName.replace(/generation/i, '-generation'), // image-generation
|
|
350
|
+
f.featureName.replace(/identity/i, '-identity'),
|
|
351
|
+
f.featureName.replace(/library/i, '-library'),
|
|
352
|
+
].map(s => s.toLowerCase()))
|
|
353
|
+
|
|
354
|
+
const hasAuth = f.restBasePath
|
|
355
|
+
? testFiles.some(t =>
|
|
356
|
+
t.apiEndpoints.some(ep => ep.startsWith(f.restBasePath)) && t.hasAuthTests
|
|
357
|
+
)
|
|
358
|
+
: testFiles.some(t =>
|
|
359
|
+
t.apiEndpoints.some(ep => {
|
|
360
|
+
const epLower = ep.toLowerCase()
|
|
361
|
+
return [...slugVariants].some(slug => epLower.includes(slug))
|
|
362
|
+
}) && t.hasAuthTests
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
const testedAspects = []
|
|
366
|
+
if (hasCrud) testedAspects.push('CRUD tested')
|
|
367
|
+
if (hasAuth) testedAspects.push('auth tested')
|
|
368
|
+
|
|
369
|
+
return {
|
|
370
|
+
featureName: f.featureName,
|
|
371
|
+
tableName: f.tableName,
|
|
372
|
+
restBasePath: f.restBasePath,
|
|
373
|
+
hasCrudTest: hasCrud,
|
|
374
|
+
hasAuthTest: hasAuth,
|
|
375
|
+
testedAspects,
|
|
376
|
+
covered: hasCrud || hasAuth,
|
|
377
|
+
}
|
|
378
|
+
})
|
|
379
|
+
|
|
380
|
+
// 2. API endpoint coverage
|
|
381
|
+
const routeManagerRoutes = routes.filter(r => r.source === 'RouteManager')
|
|
382
|
+
const endpointResults = []
|
|
383
|
+
|
|
384
|
+
for (const route of routeManagerRoutes) {
|
|
385
|
+
if (!route.fullPath || route.fullPath === '/' || route.fullPath === '/api' || route.fullPath === '/api/') continue
|
|
386
|
+
|
|
387
|
+
const isTested = testedEndpoints.has(route.fullPath) ||
|
|
388
|
+
[...testedEndpoints].some(ep => ep.startsWith(route.fullPath))
|
|
389
|
+
const isAuthTested = authTestedEndpoints.has(route.fullPath) ||
|
|
390
|
+
[...authTestedEndpoints].some(ep => ep.startsWith(route.fullPath))
|
|
391
|
+
|
|
392
|
+
const needsAuthTest = ['Admin', 'SuperAdmin', 'User'].includes(route.group)
|
|
393
|
+
|
|
394
|
+
// Also check if route is mentioned in test files (even as comment/documentation)
|
|
395
|
+
const mentionedInTests = isAuthTested || testFiles.some(t =>
|
|
396
|
+
t.rawContent && t.rawContent.includes(route.fullPath)
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
endpointResults.push({
|
|
400
|
+
endpoint: route.fullPath,
|
|
401
|
+
group: route.group,
|
|
402
|
+
isTested,
|
|
403
|
+
isAuthTested: isAuthTested || mentionedInTests,
|
|
404
|
+
needsAuthTest,
|
|
405
|
+
missingAuthTest: needsAuthTest && !isAuthTested && !mentionedInTests,
|
|
406
|
+
})
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
// 3. Frontend page coverage
|
|
410
|
+
const pageResults = pages.map(p => {
|
|
411
|
+
const isTested = testedPages.has(p.routePath) ||
|
|
412
|
+
[...testedPages].some(tp => {
|
|
413
|
+
const normalizedTest = tp.replace(/\/+$/, '')
|
|
414
|
+
const normalizedPage = p.routePath.replace(/\/+$/, '')
|
|
415
|
+
return normalizedTest === normalizedPage ||
|
|
416
|
+
normalizedPage.replace(/\/\[[^\]]+\]/g, '').startsWith(normalizedTest)
|
|
417
|
+
})
|
|
418
|
+
|
|
419
|
+
return {
|
|
420
|
+
routePath: p.routePath,
|
|
421
|
+
file: p.file,
|
|
422
|
+
isTested,
|
|
423
|
+
}
|
|
424
|
+
})
|
|
425
|
+
|
|
426
|
+
// 4. CI pipeline gaps
|
|
427
|
+
const ciGaps = []
|
|
428
|
+
if (!ciChecks.ciExists) ciGaps.push('No CI pipeline found')
|
|
429
|
+
if (ciChecks.ciExists && !ciChecks.hasApiTestJob) ciGaps.push('No API test job in CI')
|
|
430
|
+
if (ciChecks.ciExists && !ciChecks.hasFrontendTestJob) ciGaps.push('No frontend test job in CI')
|
|
431
|
+
if (ciChecks.ciExists && !ciChecks.hasPostDeploySmoke) ciGaps.push('No post-deploy smoke tests')
|
|
432
|
+
if (ciChecks.ciExists && !ciChecks.hasCoverageReporting) ciGaps.push('No coverage reporting')
|
|
433
|
+
|
|
434
|
+
// 5. Summary
|
|
435
|
+
const featuresCovered = featureResults.filter(f => f.covered).length
|
|
436
|
+
const featuresTotal = featureResults.length
|
|
437
|
+
const endpointsWithAuth = endpointResults.filter(e => e.needsAuthTest)
|
|
438
|
+
const authTested = endpointsWithAuth.filter(e => e.isAuthTested).length
|
|
439
|
+
const authTotal = endpointsWithAuth.length
|
|
440
|
+
const pagesTested = pageResults.filter(p => p.isTested).length
|
|
441
|
+
const pagesTotal = pageResults.length
|
|
442
|
+
|
|
443
|
+
const totalChecks = featuresTotal + authTotal + pagesTotal + 4
|
|
444
|
+
const totalPassed = featuresCovered + authTested + pagesTested +
|
|
445
|
+
(ciChecks.ciExists ? 1 : 0) +
|
|
446
|
+
(ciChecks.hasApiTestJob ? 1 : 0) +
|
|
447
|
+
(ciChecks.hasPostDeploySmoke ? 1 : 0) +
|
|
448
|
+
(ciChecks.hasCoverageReporting ? 1 : 0)
|
|
449
|
+
|
|
450
|
+
const coveragePercent = totalChecks > 0 ? Math.round((totalPassed / totalChecks) * 100) : 0
|
|
451
|
+
const totalGaps = totalChecks - totalPassed
|
|
452
|
+
const criticalGaps = endpointResults.filter(e => e.missingAuthTest).length
|
|
453
|
+
|
|
454
|
+
return {
|
|
455
|
+
features: featureResults,
|
|
456
|
+
endpoints: endpointResults,
|
|
457
|
+
pages: pageResults,
|
|
458
|
+
ci: ciChecks,
|
|
459
|
+
ciGaps,
|
|
460
|
+
summary: {
|
|
461
|
+
coveragePercent,
|
|
462
|
+
totalGaps,
|
|
463
|
+
criticalGaps,
|
|
464
|
+
featuresCovered,
|
|
465
|
+
featuresTotal,
|
|
466
|
+
authTested,
|
|
467
|
+
authTotal,
|
|
468
|
+
pagesTested,
|
|
469
|
+
pagesTotal,
|
|
470
|
+
},
|
|
471
|
+
testFiles: testFiles.map(t => ({ file: t.file, type: t.type })),
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// ============================================================================
|
|
476
|
+
// FORMATTERS
|
|
477
|
+
// ============================================================================
|
|
478
|
+
|
|
479
|
+
/**
|
|
480
|
+
* Format report as pretty terminal output.
|
|
481
|
+
*/
|
|
482
|
+
export function formatReport(report, chalk) {
|
|
483
|
+
const lines = []
|
|
484
|
+
const { features, endpoints, pages, ci, ciGaps, summary } = report
|
|
485
|
+
|
|
486
|
+
lines.push('')
|
|
487
|
+
lines.push(chalk.blue.bold(' Tetra Test Audit — Coverage Gaps'))
|
|
488
|
+
lines.push('')
|
|
489
|
+
|
|
490
|
+
// Features
|
|
491
|
+
if (features.length > 0) {
|
|
492
|
+
lines.push(chalk.white.bold(' Features (FeatureConfig):'))
|
|
493
|
+
|
|
494
|
+
for (const f of features) {
|
|
495
|
+
if (f.covered) {
|
|
496
|
+
const aspects = f.testedAspects.join(', ')
|
|
497
|
+
lines.push(chalk.green(` ✓ ${f.featureName.padEnd(22)} ${aspects}`))
|
|
498
|
+
} else {
|
|
499
|
+
const missing = []
|
|
500
|
+
if (!f.hasCrudTest) missing.push('NO CRUD test')
|
|
501
|
+
if (!f.hasAuthTest) missing.push('NO auth test')
|
|
502
|
+
lines.push(chalk.red(` ✗ ${f.featureName.padEnd(22)} ${missing.join(', ')}`))
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
lines.push('')
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
// API Endpoints
|
|
509
|
+
const authEndpoints = endpoints.filter(e => e.needsAuthTest)
|
|
510
|
+
if (authEndpoints.length > 0) {
|
|
511
|
+
const tested = authEndpoints.filter(e => e.isAuthTested).length
|
|
512
|
+
const total = authEndpoints.length
|
|
513
|
+
const missing = authEndpoints.filter(e => e.missingAuthTest)
|
|
514
|
+
|
|
515
|
+
lines.push(chalk.white.bold(' API Endpoints:'))
|
|
516
|
+
if (tested === total) {
|
|
517
|
+
lines.push(chalk.green(` ✓ ${tested}/${total} protected routes have auth wall tests`))
|
|
518
|
+
} else {
|
|
519
|
+
lines.push(chalk.green(` ✓ ${tested}/${total} protected routes have auth wall tests`))
|
|
520
|
+
lines.push(chalk.red(` ✗ ${missing.length} endpoints missing auth tests:`))
|
|
521
|
+
for (const ep of missing) {
|
|
522
|
+
lines.push(chalk.red(` - ${ep.endpoint}`))
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
lines.push('')
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
// Frontend Pages
|
|
529
|
+
if (pages.length > 0) {
|
|
530
|
+
const tested = pages.filter(p => p.isTested).length
|
|
531
|
+
const total = pages.length
|
|
532
|
+
const missing = pages.filter(p => !p.isTested)
|
|
533
|
+
|
|
534
|
+
lines.push(chalk.white.bold(' Frontend Pages:'))
|
|
535
|
+
if (tested === total) {
|
|
536
|
+
lines.push(chalk.green(` ✓ ${tested}/${total} pages have e2e specs`))
|
|
537
|
+
} else {
|
|
538
|
+
lines.push(chalk.green(` ✓ ${tested}/${total} pages have e2e specs`))
|
|
539
|
+
lines.push(chalk.red(` ✗ ${missing.length} pages missing specs:`))
|
|
540
|
+
for (const p of missing) {
|
|
541
|
+
lines.push(chalk.red(` - ${p.routePath}`))
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
lines.push('')
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
// CI Pipeline
|
|
548
|
+
lines.push(chalk.white.bold(' CI Pipeline:'))
|
|
549
|
+
if (ci.ciExists) {
|
|
550
|
+
lines.push(chalk.green(` ✓ ${ci.ciFile} exists`))
|
|
551
|
+
} else {
|
|
552
|
+
lines.push(chalk.red(` ✗ No CI pipeline found`))
|
|
553
|
+
}
|
|
554
|
+
if (ci.hasApiTestJob) {
|
|
555
|
+
lines.push(chalk.green(` ✓ API test job exists`))
|
|
556
|
+
} else {
|
|
557
|
+
lines.push(chalk.red(` ✗ No API test job`))
|
|
558
|
+
}
|
|
559
|
+
if (ci.hasFrontendTestJob) {
|
|
560
|
+
lines.push(chalk.green(` ✓ Frontend test job exists`))
|
|
561
|
+
} else {
|
|
562
|
+
lines.push(chalk.red(` ✗ No frontend test job`))
|
|
563
|
+
}
|
|
564
|
+
if (ci.hasPostDeploySmoke) {
|
|
565
|
+
lines.push(chalk.green(` ✓ Post-deploy smoke tests`))
|
|
566
|
+
} else {
|
|
567
|
+
lines.push(chalk.red(` ✗ No post-deploy smoke tests`))
|
|
568
|
+
}
|
|
569
|
+
if (ci.hasCoverageReporting) {
|
|
570
|
+
lines.push(chalk.green(` ✓ Coverage reporting`))
|
|
571
|
+
} else {
|
|
572
|
+
lines.push(chalk.red(` ✗ No coverage reporting`))
|
|
573
|
+
}
|
|
574
|
+
lines.push('')
|
|
575
|
+
|
|
576
|
+
// Summary
|
|
577
|
+
const pctColor = summary.coveragePercent >= 80 ? chalk.green : summary.coveragePercent >= 50 ? chalk.yellow : chalk.red
|
|
578
|
+
lines.push(pctColor.bold(` Summary: ${summary.coveragePercent}% coverage | ${summary.totalGaps} gaps found`))
|
|
579
|
+
if (summary.criticalGaps > 0) {
|
|
580
|
+
lines.push(chalk.red.bold(` ⚠ ${summary.criticalGaps} critical gaps (missing auth wall tests)`))
|
|
581
|
+
}
|
|
582
|
+
lines.push('')
|
|
583
|
+
|
|
584
|
+
return lines.join('\n')
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
/**
|
|
588
|
+
* Format report as JSON.
|
|
589
|
+
*/
|
|
590
|
+
export function formatReportJSON(report) {
|
|
591
|
+
return JSON.stringify(report, null, 2)
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
/**
|
|
595
|
+
* Output GitHub Actions annotations for gaps.
|
|
596
|
+
*/
|
|
597
|
+
export function formatCIAnnotations(report) {
|
|
598
|
+
const annotations = []
|
|
599
|
+
const { features, endpoints, pages, ciGaps } = report
|
|
600
|
+
|
|
601
|
+
for (const f of features) {
|
|
602
|
+
if (!f.covered) {
|
|
603
|
+
annotations.push(`::warning title=Missing Test::Feature "${f.featureName}" has no CRUD or auth test`)
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
for (const ep of endpoints) {
|
|
608
|
+
if (ep.missingAuthTest) {
|
|
609
|
+
annotations.push(`::error title=Missing Auth Test::Endpoint ${ep.endpoint} has no auth wall test`)
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
for (const p of pages) {
|
|
614
|
+
if (!p.isTested) {
|
|
615
|
+
annotations.push(`::warning title=Missing E2E Spec::Frontend page ${p.routePath} has no e2e spec`)
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
for (const gap of ciGaps) {
|
|
620
|
+
annotations.push(`::warning title=CI Gap::${gap}`)
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
return annotations.join('\n')
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
// ============================================================================
|
|
627
|
+
// MAIN AUDIT FUNCTION
|
|
628
|
+
// ============================================================================
|
|
629
|
+
|
|
630
|
+
/**
|
|
631
|
+
* Run the full test coverage audit.
|
|
632
|
+
*/
|
|
633
|
+
export async function runTestCoverageAudit(projectRoot) {
|
|
634
|
+
const [features, routes, pages, testFiles] = await Promise.all([
|
|
635
|
+
scanFeatureConfigs(projectRoot),
|
|
636
|
+
scanRoutes(projectRoot),
|
|
637
|
+
scanFrontendPages(projectRoot),
|
|
638
|
+
scanTestFiles(projectRoot),
|
|
639
|
+
])
|
|
640
|
+
|
|
641
|
+
const ciChecks = scanCIPipeline(projectRoot)
|
|
642
|
+
|
|
643
|
+
const report = crossReference(features, routes, pages, testFiles, ciChecks)
|
|
644
|
+
|
|
645
|
+
return report
|
|
646
|
+
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Health Checks — All
|
|
2
|
+
* Health Checks — All 29 project health checks
|
|
3
3
|
*
|
|
4
4
|
* Main entry: scanProjectHealth(projectPath, projectName, options?)
|
|
5
5
|
* Individual checks available via named imports.
|
|
@@ -41,3 +41,4 @@ export { check as checkSecurityLayers } from './security-layers.js'
|
|
|
41
41
|
export { check as checkSmokeReadiness } from './smoke-readiness.js'
|
|
42
42
|
export { check as checkReleasePipeline } from './release-pipeline.js'
|
|
43
43
|
export { check as checkTestStructure } from './test-structure.js'
|
|
44
|
+
export { check as checkSentryMonitoring } from './sentry-monitoring.js'
|
|
@@ -9,6 +9,27 @@
|
|
|
9
9
|
* - Low (missing params): -0.25 per finding (max -1)
|
|
10
10
|
*/
|
|
11
11
|
|
|
12
|
+
// TODO(tetra-dev-toolkit): Add check 'table-mode-threshold'
|
|
13
|
+
//
|
|
14
|
+
// This check should warn when a feature uses table mode (no RPC names in FeatureConfig)
|
|
15
|
+
// but the underlying Supabase table exceeds 50,000 rows — a signal that adding RPC
|
|
16
|
+
// functions (get_*_results / get_*_counts) would improve query performance, especially
|
|
17
|
+
// for breakdown count queries that currently fetch all rows into JS.
|
|
18
|
+
//
|
|
19
|
+
// Implementation sketch:
|
|
20
|
+
// 1. Scan project for FeatureConfig objects that have no countsRpcName / resultsRpcName
|
|
21
|
+
// 2. For each, query the Supabase API (using the project's service role key via Doppler)
|
|
22
|
+
// to get the row count: SELECT count(*) FROM <tableName>
|
|
23
|
+
// 3. If count > 50_000, emit a warning with feature name, table name, and row count
|
|
24
|
+
// 4. Score deduction: -0.5 per table over threshold (max -2)
|
|
25
|
+
//
|
|
26
|
+
// Prerequisites:
|
|
27
|
+
// - Doppler integration must be configured (checkDopplerCompliance passes)
|
|
28
|
+
// - Supabase project URL + service role key available at runtime
|
|
29
|
+
//
|
|
30
|
+
// See: packages/core/src/shared/services/TableQueryService.ts (>10k runtime warning)
|
|
31
|
+
// packages/core/src/shared/factories/QueryServiceFactory.ts (mode selection docs)
|
|
32
|
+
|
|
12
33
|
import { createCheck } from './types.js'
|
|
13
34
|
import { run as runAuditCheck } from '../supabase/rpc-param-mismatch.js'
|
|
14
35
|
|