@soulbatical/tetra-dev-toolkit 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +312 -0
- package/bin/vca-audit.js +90 -0
- package/bin/vca-dev-token.js +39 -0
- package/bin/vca-setup.js +227 -0
- package/lib/checks/codeQuality/api-response-format.js +268 -0
- package/lib/checks/health/claude-md.js +114 -0
- package/lib/checks/health/doppler-compliance.js +174 -0
- package/lib/checks/health/git.js +61 -0
- package/lib/checks/health/gitignore.js +83 -0
- package/lib/checks/health/index.js +26 -0
- package/lib/checks/health/infrastructure-yml.js +87 -0
- package/lib/checks/health/mcps.js +57 -0
- package/lib/checks/health/naming-conventions.js +302 -0
- package/lib/checks/health/plugins.js +38 -0
- package/lib/checks/health/quality-toolkit.js +97 -0
- package/lib/checks/health/repo-visibility.js +70 -0
- package/lib/checks/health/rls-audit.js +130 -0
- package/lib/checks/health/scanner.js +68 -0
- package/lib/checks/health/secrets.js +80 -0
- package/lib/checks/health/stella-integration.js +124 -0
- package/lib/checks/health/tests.js +140 -0
- package/lib/checks/health/types.js +77 -0
- package/lib/checks/health/vincifox-widget.js +47 -0
- package/lib/checks/index.js +17 -0
- package/lib/checks/security/deprecated-supabase-admin.js +96 -0
- package/lib/checks/security/gitignore-validation.js +211 -0
- package/lib/checks/security/hardcoded-secrets.js +95 -0
- package/lib/checks/security/service-key-exposure.js +107 -0
- package/lib/checks/security/systemdb-whitelist.js +138 -0
- package/lib/checks/stability/ci-pipeline.js +143 -0
- package/lib/checks/stability/husky-hooks.js +117 -0
- package/lib/checks/stability/npm-audit.js +140 -0
- package/lib/checks/supabase/rls-policy-audit.js +261 -0
- package/lib/commands/dev-token.js +342 -0
- package/lib/config.js +213 -0
- package/lib/index.js +17 -0
- package/lib/reporters/terminal.js +134 -0
- package/lib/runner.js +179 -0
- package/package.json +72 -0
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Health Check: .gitignore Critical Entries
|
|
3
|
+
*
|
|
4
|
+
* Verifies .gitignore has entries for .env, node_modules, dist, credentials.
|
|
5
|
+
* Score: 2 (full) = all critical present, 1 = missing recommended, 0 = missing critical
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { existsSync, readFileSync } from 'fs'
|
|
9
|
+
import { join } from 'path'
|
|
10
|
+
import { createCheck } from './types.js'
|
|
11
|
+
|
|
12
|
+
const CRITICAL = [
|
|
13
|
+
{ name: '.env files', patterns: ['.env', '.env.*', '.env.local'] },
|
|
14
|
+
{ name: 'node_modules', patterns: ['node_modules'] },
|
|
15
|
+
{ name: 'dist/build', patterns: ['dist', 'build'] }
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
const RECOMMENDED = [
|
|
19
|
+
{ name: 'credential files (*.pem, *.key)', patterns: ['*.pem', '*.key'] },
|
|
20
|
+
{ name: 'Supabase temp', patterns: ['.supabase', 'supabase/.temp'] },
|
|
21
|
+
{ name: '.DS_Store', patterns: ['.DS_Store'] }
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
function isCovered(lines, pattern) {
|
|
25
|
+
const norm = pattern.replace(/\/$/, '')
|
|
26
|
+
return lines.some(line => {
|
|
27
|
+
const normLine = line.replace(/\/$/, '')
|
|
28
|
+
if (normLine === norm) return true
|
|
29
|
+
if (normLine === '.env.*' && pattern.startsWith('.env.')) return true
|
|
30
|
+
if (normLine === '.env*' && pattern.startsWith('.env')) return true
|
|
31
|
+
if (normLine.startsWith('*') && pattern.endsWith(normLine.slice(1))) return true
|
|
32
|
+
if (normLine === '/' + norm) return true
|
|
33
|
+
if (normLine === norm + '/') return true
|
|
34
|
+
if (normLine === norm + '/**') return true
|
|
35
|
+
if (normLine.endsWith('/' + norm)) return true
|
|
36
|
+
return false
|
|
37
|
+
})
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export async function check(projectPath) {
|
|
41
|
+
const result = createCheck('gitignore', 2, {
|
|
42
|
+
exists: false, entryCount: 0, missingCritical: [], missingRecommended: []
|
|
43
|
+
})
|
|
44
|
+
result.score = 2
|
|
45
|
+
|
|
46
|
+
const gitignorePath = join(projectPath, '.gitignore')
|
|
47
|
+
if (!existsSync(gitignorePath)) {
|
|
48
|
+
result.status = 'error'
|
|
49
|
+
result.score = 0
|
|
50
|
+
result.details.message = 'No .gitignore file found'
|
|
51
|
+
return result
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
result.details.exists = true
|
|
55
|
+
let content
|
|
56
|
+
try { content = readFileSync(gitignorePath, 'utf-8') } catch {
|
|
57
|
+
result.status = 'error'; result.score = 0; result.details.message = 'Could not read .gitignore'; return result
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const lines = content.split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('#'))
|
|
61
|
+
result.details.entryCount = lines.length
|
|
62
|
+
|
|
63
|
+
for (const entry of CRITICAL) {
|
|
64
|
+
if (!entry.patterns.some(p => isCovered(lines, p))) {
|
|
65
|
+
result.details.missingCritical.push(entry.name)
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
for (const entry of RECOMMENDED) {
|
|
69
|
+
if (!entry.patterns.some(p => isCovered(lines, p))) {
|
|
70
|
+
result.details.missingRecommended.push(entry.name)
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (result.details.missingCritical.length > 0) {
|
|
75
|
+
result.score = 0; result.status = 'error'
|
|
76
|
+
result.details.message = `Missing critical entries: ${result.details.missingCritical.join(', ')}`
|
|
77
|
+
} else if (result.details.missingRecommended.length > 0) {
|
|
78
|
+
result.score = 1; result.status = 'warning'
|
|
79
|
+
result.details.message = `Missing recommended entries: ${result.details.missingRecommended.join(', ')}`
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return result
|
|
83
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Health Checks — All 15 project health checks
|
|
3
|
+
*
|
|
4
|
+
* Main entry: scanProjectHealth(projectPath, projectName, options?)
|
|
5
|
+
* Individual checks available via named imports.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
export { scanProjectHealth } from './scanner.js'
|
|
9
|
+
export { calculateHealthStatus, createCheck, maskSecret } from './types.js'
|
|
10
|
+
|
|
11
|
+
// Individual checks (for selective use)
|
|
12
|
+
export { check as checkPlugins } from './plugins.js'
|
|
13
|
+
export { check as checkMcps } from './mcps.js'
|
|
14
|
+
export { check as checkGit } from './git.js'
|
|
15
|
+
export { check as checkTests } from './tests.js'
|
|
16
|
+
export { check as checkSecrets } from './secrets.js'
|
|
17
|
+
export { check as checkQualityToolkit } from './quality-toolkit.js'
|
|
18
|
+
export { check as checkNamingConventions } from './naming-conventions.js'
|
|
19
|
+
export { check as checkRlsPolicies } from './rls-audit.js'
|
|
20
|
+
export { check as checkGitignore } from './gitignore.js'
|
|
21
|
+
export { check as checkRepoVisibility } from './repo-visibility.js'
|
|
22
|
+
export { check as checkVinciFoxWidget } from './vincifox-widget.js'
|
|
23
|
+
export { check as checkStellaIntegration } from './stella-integration.js'
|
|
24
|
+
export { check as checkClaudeMd } from './claude-md.js'
|
|
25
|
+
export { check as checkDopplerCompliance } from './doppler-compliance.js'
|
|
26
|
+
export { check as checkInfrastructureYml } from './infrastructure-yml.js'
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Health Check: .ralph/INFRASTRUCTURE.yml
|
|
3
|
+
*
|
|
4
|
+
* Checks for infrastructure documentation completeness.
|
|
5
|
+
* Score: 0=missing, 1=exists+parses, 2=all required sections, 3=no open security issues
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { existsSync, readFileSync } from 'fs'
|
|
9
|
+
import { join } from 'path'
|
|
10
|
+
import { parse as parseYaml } from 'yaml'
|
|
11
|
+
import { createCheck } from './types.js'
|
|
12
|
+
|
|
13
|
+
const REQUIRED_SECTIONS = ['hosting', 'domains', 'database', 'secrets', 'email', 'security']
|
|
14
|
+
const ALL_SECTIONS = ['project', 'hosting', 'domains', 'database', 'secrets', 'email', 'monitoring', 'social', 'services', 'security', 'meta']
|
|
15
|
+
|
|
16
|
+
function isFilled(value) {
|
|
17
|
+
if (value === null || value === undefined) return false
|
|
18
|
+
if (typeof value !== 'object') return true
|
|
19
|
+
if (Array.isArray(value)) return value.length > 0
|
|
20
|
+
return Object.values(value).some(v => v !== null && v !== undefined && v !== '')
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function check(projectPath) {
|
|
24
|
+
const result = createCheck('infrastructure-yml', 3, {
|
|
25
|
+
exists: false, valid: false, sections: {}, missingSections: [], openSecurityIssues: []
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
const filePath = join(projectPath, '.ralph', 'INFRASTRUCTURE.yml')
|
|
29
|
+
if (!existsSync(filePath)) {
|
|
30
|
+
result.status = 'warning'
|
|
31
|
+
result.details.message = 'Missing .ralph/INFRASTRUCTURE.yml'
|
|
32
|
+
return result
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
result.details.exists = true
|
|
36
|
+
|
|
37
|
+
let data
|
|
38
|
+
try {
|
|
39
|
+
data = parseYaml(readFileSync(filePath, 'utf-8'))
|
|
40
|
+
if (!data || typeof data !== 'object') {
|
|
41
|
+
result.status = 'error'
|
|
42
|
+
result.details.message = 'YAML parsed but is not an object'
|
|
43
|
+
return result
|
|
44
|
+
}
|
|
45
|
+
} catch (e) {
|
|
46
|
+
result.status = 'error'
|
|
47
|
+
result.details.message = `YAML parse error: ${e instanceof Error ? e.message : String(e)}`
|
|
48
|
+
return result
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
result.details.valid = true
|
|
52
|
+
result.score = 1
|
|
53
|
+
|
|
54
|
+
for (const section of ALL_SECTIONS) {
|
|
55
|
+
result.details.sections[section] = isFilled(data[section])
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const missingSections = REQUIRED_SECTIONS.filter(s => !isFilled(data[s]))
|
|
59
|
+
result.details.missingSections = missingSections
|
|
60
|
+
|
|
61
|
+
if (missingSections.length === 0) result.score = 2
|
|
62
|
+
|
|
63
|
+
// Check security open issues
|
|
64
|
+
const security = data.security
|
|
65
|
+
if (security && typeof security === 'object') {
|
|
66
|
+
const openIssues = security.open_issues
|
|
67
|
+
if (Array.isArray(openIssues) && openIssues.length > 0) {
|
|
68
|
+
result.details.openSecurityIssues = openIssues
|
|
69
|
+
}
|
|
70
|
+
if (missingSections.length === 0 && (!Array.isArray(openIssues) || openIssues.length === 0)) {
|
|
71
|
+
result.score = 3
|
|
72
|
+
}
|
|
73
|
+
} else if (missingSections.length === 0) {
|
|
74
|
+
result.score = 2
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
if (result.score === 0) result.status = 'error'
|
|
78
|
+
else if (result.score < result.maxScore) {
|
|
79
|
+
result.status = 'warning'
|
|
80
|
+
const issues = []
|
|
81
|
+
if (missingSections.length > 0) issues.push(`missing: ${missingSections.join(', ')}`)
|
|
82
|
+
if (result.details.openSecurityIssues.length > 0) issues.push(`${result.details.openSecurityIssues.length} open security issues`)
|
|
83
|
+
result.details.message = issues.join('; ')
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return result
|
|
87
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Health Check: MCP Servers
|
|
3
|
+
*
|
|
4
|
+
* Checks .mcp.json and .claude/settings.local.json for configured MCP servers.
|
|
5
|
+
* Score: 0 = none, 1 = 1+ servers
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { existsSync, readFileSync } from 'fs'
|
|
9
|
+
import { join } from 'path'
|
|
10
|
+
import { createCheck } from './types.js'
|
|
11
|
+
|
|
12
|
+
export async function check(projectPath) {
|
|
13
|
+
const result = createCheck('mcps', 1, { mcps: [], count: 0 })
|
|
14
|
+
let mcpServers = []
|
|
15
|
+
|
|
16
|
+
// Check .mcp.json
|
|
17
|
+
const mcpPath = join(projectPath, '.mcp.json')
|
|
18
|
+
if (existsSync(mcpPath)) {
|
|
19
|
+
try {
|
|
20
|
+
const content = JSON.parse(readFileSync(mcpPath, 'utf-8'))
|
|
21
|
+
mcpServers = Object.keys(content.mcpServers || content.servers || {})
|
|
22
|
+
} catch {
|
|
23
|
+
result.status = 'error'
|
|
24
|
+
result.details.error = 'Failed to parse .mcp.json'
|
|
25
|
+
return result
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Also check settings.local.json for enabledMcpjsonServers
|
|
30
|
+
const settingsLocalPath = join(projectPath, '.claude', 'settings.local.json')
|
|
31
|
+
if (existsSync(settingsLocalPath)) {
|
|
32
|
+
try {
|
|
33
|
+
const settings = JSON.parse(readFileSync(settingsLocalPath, 'utf-8'))
|
|
34
|
+
const enabled = settings.enabledMcpjsonServers || []
|
|
35
|
+
if (Array.isArray(enabled)) {
|
|
36
|
+
for (const s of enabled) {
|
|
37
|
+
if (!mcpServers.includes(s)) mcpServers.push(s)
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
if (settings.enableAllProjectMcpServers) {
|
|
41
|
+
result.details.enableAllProjectMcpServers = true
|
|
42
|
+
}
|
|
43
|
+
} catch { /* ignore */ }
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
result.details.mcps = mcpServers
|
|
47
|
+
result.details.count = mcpServers.length
|
|
48
|
+
|
|
49
|
+
if (mcpServers.length >= 1) {
|
|
50
|
+
result.score = 1
|
|
51
|
+
} else {
|
|
52
|
+
result.status = 'warning'
|
|
53
|
+
result.details.message = 'No MCP servers configured'
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return result
|
|
57
|
+
}
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Health Check: Naming Conventions (DB + Code)
|
|
3
|
+
*
|
|
4
|
+
* Scans SQL migrations for snake_case tables/columns and source code for naming compliance.
|
|
5
|
+
* Score: up to 3 points (1 DB + 2 code)
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { existsSync, readFileSync, readdirSync } from 'fs'
|
|
9
|
+
import { join } from 'path'
|
|
10
|
+
import { createCheck } from './types.js'
|
|
11
|
+
|
|
12
|
+
const isSnakeCase = (s) => /^[a-z][a-z0-9]*(_[a-z0-9]+)*$/.test(s)
|
|
13
|
+
const isPlural = (s) => s.endsWith('s') || s.endsWith('ies') || s.endsWith('data') || s.endsWith('media') || s.endsWith('info') || s.endsWith('status')
|
|
14
|
+
|
|
15
|
+
const COL_TYPE_REGEX = /^["']?(\w+)["']?\s+(uuid|text|varchar|integer|int|bigint|boolean|bool|timestamp|timestamptz|jsonb?|smallint|numeric|real|double|serial|bytea|date|time|interval|citext|inet|macaddr|point|polygon|float)/i
|
|
16
|
+
const BOOL_PREFIX = /^(is_|has_|can_|should_|allow_|enable_|use_|include_)/
|
|
17
|
+
const JSON_SUFFIX = /_(data|json|meta|config|settings|options|payload|context|params|attributes|properties|extra|raw)$/
|
|
18
|
+
const JSON_STANDALONE = /^(metadata|settings|config|configuration|options|preferences|tags|labels|permissions|context|payload|attributes|properties|extras|params|parameters)$/
|
|
19
|
+
|
|
20
|
+
function scanDatabaseNaming(projectPath) {
|
|
21
|
+
const violations = []
|
|
22
|
+
let totalFields = 0
|
|
23
|
+
let compliant = 0
|
|
24
|
+
|
|
25
|
+
const migrationDirs = [
|
|
26
|
+
join(projectPath, 'supabase', 'migrations'),
|
|
27
|
+
join(projectPath, 'backend', 'supabase', 'migrations')
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
const sqlFiles = []
|
|
31
|
+
for (const dir of migrationDirs) {
|
|
32
|
+
if (!existsSync(dir)) continue
|
|
33
|
+
try {
|
|
34
|
+
for (const f of readdirSync(dir).filter(f => f.endsWith('.sql'))) {
|
|
35
|
+
sqlFiles.push(join(dir, f))
|
|
36
|
+
}
|
|
37
|
+
} catch { /* ignore */ }
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (sqlFiles.length === 0) return { totalFields: 0, compliant: 0, violations: [], compliancePercent: 100 }
|
|
41
|
+
|
|
42
|
+
const globalColumns = {}
|
|
43
|
+
|
|
44
|
+
const checkColumn = (tableName, colName, colType, body, fileName) => {
|
|
45
|
+
if (!globalColumns[colName]) globalColumns[colName] = []
|
|
46
|
+
globalColumns[colName].push(tableName)
|
|
47
|
+
|
|
48
|
+
totalFields++
|
|
49
|
+
if (isSnakeCase(colName)) compliant++
|
|
50
|
+
else violations.push(`Column "${tableName}.${colName}" not snake_case (${fileName})`)
|
|
51
|
+
|
|
52
|
+
// PK must be "id"
|
|
53
|
+
const pkPattern = new RegExp(`["']?${colName}["']?\\s+\\w+[^,]*PRIMARY\\s+KEY`, 'i')
|
|
54
|
+
if (pkPattern.test(body) && colName !== 'id') {
|
|
55
|
+
totalFields++
|
|
56
|
+
violations.push(`PK "${tableName}.${colName}" should be named "id" (${fileName})`)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// FK: uuid refs should end with _id
|
|
60
|
+
if (colType === 'uuid' && colName !== 'id' && !colName.endsWith('_id') && !colName.endsWith('_at') && !colName.endsWith('_by')) {
|
|
61
|
+
if (new RegExp(`["']?${colName}["']?[^;]*REFERENCES`, 'i').test(body)) {
|
|
62
|
+
totalFields++
|
|
63
|
+
violations.push(`FK "${tableName}.${colName}" should use _id suffix (${fileName})`)
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Boolean prefix
|
|
68
|
+
if (colType === 'boolean' || colType === 'bool') {
|
|
69
|
+
totalFields++
|
|
70
|
+
if (BOOL_PREFIX.test(colName)) compliant++
|
|
71
|
+
else violations.push(`Boolean "${tableName}.${colName}" needs prefix (is_/has_/can_/...) (${fileName})`)
|
|
72
|
+
if (colName === 'is_deleted') {
|
|
73
|
+
totalFields++
|
|
74
|
+
violations.push(`Soft delete "${tableName}.is_deleted" should be "deleted_at timestamptz" (${fileName})`)
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// JSON naming
|
|
79
|
+
if (colType === 'jsonb' || colType === 'json') {
|
|
80
|
+
totalFields++
|
|
81
|
+
if (JSON_SUFFIX.test(colName) || JSON_STANDALONE.test(colName)) compliant++
|
|
82
|
+
else violations.push(`JSON "${tableName}.${colName}" needs suffix (_data/_json/_meta/_config/_settings) (${fileName})`)
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
for (const filePath of sqlFiles) {
|
|
87
|
+
let content
|
|
88
|
+
try { content = readFileSync(filePath, 'utf-8') } catch { continue }
|
|
89
|
+
const fileName = filePath.split('/').pop() || ''
|
|
90
|
+
|
|
91
|
+
// CREATE TABLE
|
|
92
|
+
const createTableRegex = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:public\.)?["']?(\w+)["']?\s*\(([\s\S]*?)\);/gi
|
|
93
|
+
let match
|
|
94
|
+
while ((match = createTableRegex.exec(content)) !== null) {
|
|
95
|
+
const tableName = match[1]
|
|
96
|
+
const body = match[2]
|
|
97
|
+
|
|
98
|
+
totalFields++
|
|
99
|
+
if (isSnakeCase(tableName) && isPlural(tableName)) compliant++
|
|
100
|
+
else {
|
|
101
|
+
if (!isSnakeCase(tableName)) violations.push(`Table "${tableName}" not snake_case (${fileName})`)
|
|
102
|
+
else if (!isPlural(tableName)) violations.push(`Table "${tableName}" not plural (${fileName})`)
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const columns = []
|
|
106
|
+
for (const line of body.split('\n')) {
|
|
107
|
+
const colMatch = line.trim().match(COL_TYPE_REGEX)
|
|
108
|
+
if (colMatch) {
|
|
109
|
+
columns.push(colMatch[1])
|
|
110
|
+
checkColumn(tableName, colMatch[1], colMatch[2].toLowerCase(), body, fileName)
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Timestamps check
|
|
115
|
+
totalFields++
|
|
116
|
+
if (columns.includes('created_at') && columns.includes('updated_at')) compliant++
|
|
117
|
+
else {
|
|
118
|
+
const missing = []
|
|
119
|
+
if (!columns.includes('created_at')) missing.push('created_at')
|
|
120
|
+
if (!columns.includes('updated_at')) missing.push('updated_at')
|
|
121
|
+
violations.push(`Table "${tableName}" missing ${missing.join(', ')} (${fileName})`)
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// ALTER TABLE ADD COLUMN
|
|
126
|
+
const alterRegex = /ALTER\s+TABLE\s+(?:public\.)?["']?(\w+)["']?\s+ADD\s+(?:COLUMN\s+)?["']?(\w+)["']?\s+(uuid|text|varchar|integer|int|bigint|boolean|bool|timestamp|timestamptz|jsonb?|smallint|numeric|real|double|serial|bytea|date|time|interval|citext)/gi
|
|
127
|
+
while ((match = alterRegex.exec(content)) !== null) {
|
|
128
|
+
checkColumn(match[1], match[2], match[3].toLowerCase(), '', fileName)
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// CREATE INDEX: idx_ or ix_ prefix
|
|
132
|
+
const indexRegex = /CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?:IF\s+NOT\s+EXISTS\s+)?["']?(\w+)["']?\s+ON/gi
|
|
133
|
+
while ((match = indexRegex.exec(content)) !== null) {
|
|
134
|
+
totalFields++
|
|
135
|
+
if (/^(idx_|ix_)/.test(match[1])) compliant++
|
|
136
|
+
else violations.push(`Index "${match[1]}" should use idx_ or ix_ prefix (${fileName})`)
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Consistency checks
|
|
141
|
+
const consistencyGroups = [
|
|
142
|
+
{ concept: 'creation timestamp', preferred: 'created_at', alternatives: ['created_on', 'creation_date', 'inserted_at', 'date_created'] },
|
|
143
|
+
{ concept: 'update timestamp', preferred: 'updated_at', alternatives: ['updated_on', 'modified_at', 'changed_at', 'last_modified', 'date_updated'] },
|
|
144
|
+
{ concept: 'deletion timestamp', preferred: 'deleted_at', alternatives: ['removed_at', 'date_deleted'] },
|
|
145
|
+
{ concept: 'active flag', preferred: 'is_active', alternatives: ['active', 'enabled', 'is_enabled'] },
|
|
146
|
+
{ concept: 'description', preferred: 'description', alternatives: ['desc', 'summary'] },
|
|
147
|
+
]
|
|
148
|
+
|
|
149
|
+
for (const { concept, preferred, alternatives } of consistencyGroups) {
|
|
150
|
+
const preferredTables = globalColumns[preferred] || []
|
|
151
|
+
for (const alt of alternatives) {
|
|
152
|
+
const altTables = globalColumns[alt] || []
|
|
153
|
+
if (altTables.length > 0) {
|
|
154
|
+
totalFields++
|
|
155
|
+
const label = preferredTables.length > 0 ? 'Inconsistent' : 'Non-standard'
|
|
156
|
+
violations.push(`${label} ${concept}: "${alt}" in ${altTables.slice(0, 3).join(', ')} — use "${preferred}"`)
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return {
|
|
162
|
+
totalFields,
|
|
163
|
+
compliant,
|
|
164
|
+
violations: violations.slice(0, 50),
|
|
165
|
+
compliancePercent: totalFields > 0 ? Math.round((compliant / totalFields) * 100) : 100
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
function scanCodeNaming(projectPath) {
|
|
170
|
+
const violations = []
|
|
171
|
+
let totalChecked = 0
|
|
172
|
+
let compliant = 0
|
|
173
|
+
|
|
174
|
+
const SKIP_DIRS = ['node_modules', 'dist', '.next', 'generated', 'coverage', '.turbo', 'build']
|
|
175
|
+
const MAX_FILES = 200
|
|
176
|
+
const MAX_LINES = 500
|
|
177
|
+
const SQL_KEYWORDS = /^\s*(SELECT|INSERT|UPDATE|DELETE|CREATE|ALTER|DROP|FROM|WHERE|JOIN|SET|VALUES|INTO|TABLE|INDEX|GRANT|REVOKE)\b/i
|
|
178
|
+
|
|
179
|
+
const tsFiles = []
|
|
180
|
+
const collectFiles = (dir) => {
|
|
181
|
+
if (tsFiles.length >= MAX_FILES || !existsSync(dir)) return
|
|
182
|
+
try {
|
|
183
|
+
for (const entry of readdirSync(dir, { withFileTypes: true })) {
|
|
184
|
+
if (tsFiles.length >= MAX_FILES) return
|
|
185
|
+
const fullPath = join(dir, entry.name)
|
|
186
|
+
if (entry.isDirectory()) {
|
|
187
|
+
if (SKIP_DIRS.includes(entry.name)) continue
|
|
188
|
+
if (!/^[a-z][a-z0-9._-]*$/.test(entry.name) && !entry.name.startsWith('.') && !entry.name.startsWith('__')) {
|
|
189
|
+
totalChecked++
|
|
190
|
+
violations.push(`Directory "${entry.name}" should be lowercase (${fullPath.replace(projectPath + '/', '')})`)
|
|
191
|
+
} else {
|
|
192
|
+
totalChecked++
|
|
193
|
+
compliant++
|
|
194
|
+
}
|
|
195
|
+
collectFiles(fullPath)
|
|
196
|
+
} else if (entry.isFile() && /\.(ts|tsx)$/.test(entry.name) && !entry.name.endsWith('.d.ts')) {
|
|
197
|
+
const baseName = entry.name.replace(/\.(ts|tsx)$/, '')
|
|
198
|
+
totalChecked++
|
|
199
|
+
const isCamel = /^[a-z][a-zA-Z0-9]*$/.test(baseName)
|
|
200
|
+
const isPascal = /^[A-Z][a-zA-Z0-9]*$/.test(baseName)
|
|
201
|
+
const isSnake = /^[a-z][a-z0-9]*([._-][a-z0-9]+)*$/.test(baseName)
|
|
202
|
+
|
|
203
|
+
if ((entry.name.endsWith('.tsx') && (isPascal || isCamel || isSnake)) ||
|
|
204
|
+
(entry.name.endsWith('.ts') && (isCamel || isSnake || isPascal))) {
|
|
205
|
+
compliant++
|
|
206
|
+
} else {
|
|
207
|
+
violations.push(`File "${entry.name}" naming issue (${fullPath.replace(projectPath + '/', '')})`)
|
|
208
|
+
}
|
|
209
|
+
tsFiles.push(fullPath)
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
} catch { /* ignore */ }
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
for (const dir of ['src', 'backend/src', 'frontend/src'].map(d => join(projectPath, d))) {
|
|
216
|
+
collectFiles(dir)
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
if (tsFiles.length === 0) return { totalChecked: 0, compliant: 0, violations: [], compliancePercent: 100 }
|
|
220
|
+
|
|
221
|
+
for (const filePath of tsFiles) {
|
|
222
|
+
let content
|
|
223
|
+
try { content = readFileSync(filePath, 'utf-8') } catch { continue }
|
|
224
|
+
const lines = content.split('\n').slice(0, MAX_LINES)
|
|
225
|
+
const relPath = filePath.replace(projectPath + '/', '')
|
|
226
|
+
|
|
227
|
+
for (let i = 0; i < lines.length; i++) {
|
|
228
|
+
const line = lines[i]
|
|
229
|
+
if (SQL_KEYWORDS.test(line) || /^\s*(\/\/|\/\*|\*|import\s|export\s.*from)/.test(line) || !line.trim()) continue
|
|
230
|
+
|
|
231
|
+
// Variables & functions: camelCase or CONSTANT_CASE
|
|
232
|
+
const varMatch = line.match(/(?:const|let|var|function)\s+([a-zA-Z_$][a-zA-Z0-9_$]*)\b/)
|
|
233
|
+
if (varMatch) {
|
|
234
|
+
const name = varMatch[1]
|
|
235
|
+
if (name.length <= 1) continue
|
|
236
|
+
if (/^[A-Z][A-Z0-9_]+$/.test(name)) { totalChecked++; compliant++; continue }
|
|
237
|
+
totalChecked++
|
|
238
|
+
if (/^[a-z][a-zA-Z0-9]*$/.test(name) || /^_[a-z][a-zA-Z0-9]*$/.test(name)) compliant++
|
|
239
|
+
else if (/^[A-Z][a-zA-Z0-9]*$/.test(name) && filePath.endsWith('.tsx')) compliant++
|
|
240
|
+
else violations.push(`Variable "${name}" should be camelCase (${relPath}:${i + 1})`)
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// Classes, interfaces, types, enums: PascalCase
|
|
244
|
+
const classMatch = line.match(/(?:class|interface|type|enum)\s+([a-zA-Z_$][a-zA-Z0-9_$]*)\b/)
|
|
245
|
+
if (classMatch) {
|
|
246
|
+
const name = classMatch[1]
|
|
247
|
+
if (name.length <= 1) continue
|
|
248
|
+
totalChecked++
|
|
249
|
+
if (/^[A-Z][a-zA-Z0-9]*$/.test(name)) compliant++
|
|
250
|
+
else violations.push(`Type "${name}" should be PascalCase (${relPath}:${i + 1})`)
|
|
251
|
+
|
|
252
|
+
if (line.match(/\binterface\s/) && /^I[A-Z]/.test(name)) {
|
|
253
|
+
totalChecked++
|
|
254
|
+
violations.push(`Interface "${name}" should not use I-prefix (${relPath}:${i + 1})`)
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
return {
|
|
261
|
+
totalChecked,
|
|
262
|
+
compliant,
|
|
263
|
+
violations: violations.slice(0, 50),
|
|
264
|
+
compliancePercent: totalChecked > 0 ? Math.round((compliant / totalChecked) * 100) : 100
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
export async function check(projectPath) {
|
|
269
|
+
const result = createCheck('naming-conventions', 3, {
|
|
270
|
+
database: { totalFields: 0, compliant: 0, violations: [], compliancePercent: 0 },
|
|
271
|
+
code: { totalChecked: 0, compliant: 0, violations: [], compliancePercent: 0 },
|
|
272
|
+
overallCompliancePercent: 0
|
|
273
|
+
})
|
|
274
|
+
|
|
275
|
+
const dbResult = scanDatabaseNaming(projectPath)
|
|
276
|
+
result.details.database = dbResult
|
|
277
|
+
if (dbResult.totalFields > 0) {
|
|
278
|
+
if (dbResult.compliancePercent >= 80) result.score += 1
|
|
279
|
+
else if (dbResult.compliancePercent >= 50) result.score += 0.5
|
|
280
|
+
} else {
|
|
281
|
+
result.score += 1 // No migrations = benefit of the doubt
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
const codeResult = scanCodeNaming(projectPath)
|
|
285
|
+
result.details.code = codeResult
|
|
286
|
+
if (codeResult.totalChecked > 0) {
|
|
287
|
+
if (codeResult.compliancePercent >= 90) result.score += 2
|
|
288
|
+
else if (codeResult.compliancePercent >= 70) result.score += 1
|
|
289
|
+
else if (codeResult.compliancePercent >= 50) result.score += 0.5
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
const totalChecked = dbResult.totalFields + codeResult.totalChecked
|
|
293
|
+
const totalCompliant = dbResult.compliant + codeResult.compliant
|
|
294
|
+
result.details.overallCompliancePercent = totalChecked > 0 ? Math.round((totalCompliant / totalChecked) * 100) : 0
|
|
295
|
+
|
|
296
|
+
const totalViolations = (dbResult.violations?.length || 0) + (codeResult.violations?.length || 0)
|
|
297
|
+
if (totalViolations === 0) result.status = 'ok'
|
|
298
|
+
else if (result.score >= 1) result.status = 'warning'
|
|
299
|
+
else result.status = 'error'
|
|
300
|
+
|
|
301
|
+
return result
|
|
302
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Health Check: Claude Code Plugins
|
|
3
|
+
*
|
|
4
|
+
* Checks .claude/settings.json for enabled plugins.
|
|
5
|
+
* Score: 0 = none, 1 = 1-2 plugins, 2 = 3+ plugins
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { existsSync, readFileSync } from 'fs'
|
|
9
|
+
import { join } from 'path'
|
|
10
|
+
import { createCheck } from './types.js'
|
|
11
|
+
|
|
12
|
+
export async function check(projectPath) {
|
|
13
|
+
const result = createCheck('plugins', 2, { plugins: [], count: 0 })
|
|
14
|
+
const settingsPath = join(projectPath, '.claude', 'settings.json')
|
|
15
|
+
|
|
16
|
+
if (!existsSync(settingsPath)) {
|
|
17
|
+
result.status = 'warning'
|
|
18
|
+
result.details.message = 'No settings.json found'
|
|
19
|
+
return result
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
try {
|
|
23
|
+
const content = JSON.parse(readFileSync(settingsPath, 'utf-8'))
|
|
24
|
+
const raw = content.enabledPlugins || {}
|
|
25
|
+
const pluginList = Array.isArray(raw) ? raw : Object.keys(raw).filter(k => raw[k])
|
|
26
|
+
|
|
27
|
+
result.details.plugins = pluginList
|
|
28
|
+
result.details.count = pluginList.length
|
|
29
|
+
|
|
30
|
+
if (pluginList.length >= 3) result.score = 2
|
|
31
|
+
else if (pluginList.length >= 1) result.score = 1
|
|
32
|
+
} catch {
|
|
33
|
+
result.status = 'error'
|
|
34
|
+
result.details.error = 'Failed to parse settings.json'
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return result
|
|
38
|
+
}
|