@gaurav.codes/schemacheck 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +0 -0
- package/cli.js +798 -0
- package/package.json +74 -0
package/README.md
ADDED
|
File without changes
|
package/cli.js
ADDED
|
@@ -0,0 +1,798 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict'
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* schemacheck — zero-dependency runner
|
|
6
|
+
* Works immediately: node run.js ./schema.prisma
|
|
7
|
+
* Full TypeScript build: npm install && npx tsc && node dist/cli.js
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
const fs = require('fs')
|
|
11
|
+
const path = require('path')
|
|
12
|
+
|
|
13
|
+
// ═══════════════════════════════════════════════════════════════
|
|
14
|
+
// PARSERS
|
|
15
|
+
// ═══════════════════════════════════════════════════════════════
|
|
16
|
+
|
|
17
|
+
// ── Prisma ────────────────────────────────────────────────────
|
|
18
|
+
|
|
19
|
+
function normalizePrismaType(raw) {
|
|
20
|
+
const base = raw.replace('?', '').replace('[]', '').toLowerCase()
|
|
21
|
+
const map = {
|
|
22
|
+
string: 'string', int: 'int', bigint: 'int', float: 'float',
|
|
23
|
+
decimal: 'float', boolean: 'boolean', datetime: 'datetime', json: 'json', bytes: 'bytes'
|
|
24
|
+
}
|
|
25
|
+
return map[base] ?? 'unknown'
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function parsePrisma(source) {
|
|
29
|
+
const models = []
|
|
30
|
+
const modelRegex = /model\s+(\w+)\s*\{([^}]+)\}/g
|
|
31
|
+
let match
|
|
32
|
+
|
|
33
|
+
while ((match = modelRegex.exec(source)) !== null) {
|
|
34
|
+
const [, modelName, body] = match
|
|
35
|
+
const fields = []
|
|
36
|
+
const indexes = []
|
|
37
|
+
let lineNum = source.slice(0, match.index).split('\n').length
|
|
38
|
+
|
|
39
|
+
for (const line of body.split('\n')) {
|
|
40
|
+
lineNum++
|
|
41
|
+
const t = line.trim()
|
|
42
|
+
if (!t || t.startsWith('//')) continue
|
|
43
|
+
|
|
44
|
+
if (t.startsWith('@@index')) {
|
|
45
|
+
const m = t.match(/\[([^\]]+)\]/)
|
|
46
|
+
if (m) indexes.push({ fields: m[1].split(',').map(s => s.trim()), isUnique: false })
|
|
47
|
+
continue
|
|
48
|
+
}
|
|
49
|
+
if (t.startsWith('@@unique')) {
|
|
50
|
+
const m = t.match(/\[([^\]]+)\]/)
|
|
51
|
+
if (m) indexes.push({ fields: m[1].split(',').map(s => s.trim()), isUnique: true })
|
|
52
|
+
continue
|
|
53
|
+
}
|
|
54
|
+
if (t.startsWith('@@')) continue
|
|
55
|
+
|
|
56
|
+
const parts = t.split(/\s+/)
|
|
57
|
+
if (parts.length < 2 || parts[0].startsWith('@')) continue
|
|
58
|
+
|
|
59
|
+
const [fieldName, rawType] = parts
|
|
60
|
+
const defMatch = t.match(/@default\(([^)]+)\)/)
|
|
61
|
+
|
|
62
|
+
fields.push({
|
|
63
|
+
name: fieldName,
|
|
64
|
+
type: normalizePrismaType(rawType),
|
|
65
|
+
isRequired: !rawType.endsWith('?') && !rawType.endsWith('[]'),
|
|
66
|
+
isUnique: t.includes('@unique'),
|
|
67
|
+
isPrimary: t.includes('@id'),
|
|
68
|
+
isArray: rawType.endsWith('[]'),
|
|
69
|
+
default: defMatch?.[1],
|
|
70
|
+
line: lineNum,
|
|
71
|
+
})
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Extract relations from fields
|
|
75
|
+
const relations = []
|
|
76
|
+
for (const f of fields) {
|
|
77
|
+
if (f.type === 'unknown') {
|
|
78
|
+
relations.push({ fromField: f.name, toModel: f.name, type: f.isArray ? 'one-to-many' : 'one-to-one' })
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
models.push({ name: modelName, fields: fields.filter(f => f.type !== 'unknown'), indexes, relations })
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return { format: 'prisma', models, rawSource: source }
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// ── SQL ───────────────────────────────────────────────────────
|
|
89
|
+
|
|
90
|
+
function normalizeSQLType(raw) {
|
|
91
|
+
const t = raw.toLowerCase()
|
|
92
|
+
if (/varchar|char|text|nvarchar/.test(t)) return 'string'
|
|
93
|
+
if (/int|serial|bigint|smallint/.test(t)) return 'int'
|
|
94
|
+
if (/float|double|decimal|numeric|real/.test(t)) return 'float'
|
|
95
|
+
if (/bool/.test(t)) return 'boolean'
|
|
96
|
+
if (/timestamp|datetime|date/.test(t)) return 'datetime'
|
|
97
|
+
if (/json/.test(t)) return 'json'
|
|
98
|
+
if (/uuid/.test(t)) return 'uuid'
|
|
99
|
+
if (/bytea|blob|binary/.test(t)) return 'bytes'
|
|
100
|
+
return 'unknown'
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function parseSQL(source) {
|
|
104
|
+
const models = []
|
|
105
|
+
const tableRegex = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?["`]?(\w+)["`]?\s*\(([^;]+)\)/gis
|
|
106
|
+
let match
|
|
107
|
+
|
|
108
|
+
while ((match = tableRegex.exec(source)) !== null) {
|
|
109
|
+
const [, tableName, body] = match
|
|
110
|
+
const fields = []
|
|
111
|
+
const indexes = []
|
|
112
|
+
|
|
113
|
+
for (const line of body.split('\n')) {
|
|
114
|
+
const t = line.trim().replace(/,$/, '')
|
|
115
|
+
if (!t) continue
|
|
116
|
+
const up = t.toUpperCase()
|
|
117
|
+
|
|
118
|
+
if (up.startsWith('PRIMARY KEY')) {
|
|
119
|
+
const m = t.match(/\(([^)]+)\)/)
|
|
120
|
+
if (m) m[1].split(',').map(s => s.trim()).forEach(col => {
|
|
121
|
+
const f = fields.find(f => f.name === col)
|
|
122
|
+
if (f) f.isPrimary = true
|
|
123
|
+
})
|
|
124
|
+
continue
|
|
125
|
+
}
|
|
126
|
+
if (up.startsWith('UNIQUE')) {
|
|
127
|
+
const m = t.match(/\(([^)]+)\)/)
|
|
128
|
+
if (m) indexes.push({ fields: m[1].split(',').map(s => s.trim()), isUnique: true })
|
|
129
|
+
continue
|
|
130
|
+
}
|
|
131
|
+
if (up.startsWith('INDEX') || up.startsWith('KEY')) {
|
|
132
|
+
const m = t.match(/\(([^)]+)\)/)
|
|
133
|
+
if (m) indexes.push({ fields: m[1].split(',').map(s => s.trim()), isUnique: false })
|
|
134
|
+
continue
|
|
135
|
+
}
|
|
136
|
+
if (up.startsWith('FOREIGN KEY') || up.startsWith('CONSTRAINT') || up.startsWith(')')) continue
|
|
137
|
+
|
|
138
|
+
const colMatch = t.match(/^["`]?(\w+)["`]?\s+(\w+(?:\([^)]*\))?)(.*)/i)
|
|
139
|
+
if (!colMatch) continue
|
|
140
|
+
const [, colName, dataType, rest] = colMatch
|
|
141
|
+
const restUp = rest.toUpperCase()
|
|
142
|
+
const defMatch = rest.match(/DEFAULT\s+([^\s,]+)/i)
|
|
143
|
+
|
|
144
|
+
fields.push({
|
|
145
|
+
name: colName,
|
|
146
|
+
type: normalizeSQLType(dataType),
|
|
147
|
+
isRequired: restUp.includes('NOT NULL'),
|
|
148
|
+
isUnique: restUp.includes('UNIQUE'),
|
|
149
|
+
isPrimary: restUp.includes('PRIMARY KEY'),
|
|
150
|
+
isArray: false,
|
|
151
|
+
default: defMatch?.[1],
|
|
152
|
+
})
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
models.push({ name: tableName, fields, indexes, relations: [] })
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Standalone CREATE INDEX
|
|
159
|
+
const idxRegex = /CREATE\s+(UNIQUE\s+)?INDEX\s+\w+\s+ON\s+["`]?(\w+)["`]?\s*\(([^)]+)\)/gis
|
|
160
|
+
while ((match = idxRegex.exec(source)) !== null) {
|
|
161
|
+
const [, unique, tableName, cols] = match
|
|
162
|
+
const model = models.find(m => m.name.toLowerCase() === tableName.toLowerCase())
|
|
163
|
+
if (model) model.indexes.push({
|
|
164
|
+
fields: cols.split(',').map(s => s.trim()),
|
|
165
|
+
isUnique: !!unique,
|
|
166
|
+
})
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return { format: 'sql', models, rawSource: source }
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// ── Mongoose ──────────────────────────────────────────────────
|
|
173
|
+
|
|
174
|
+
function normalizeMongooseType(raw) {
|
|
175
|
+
const t = raw.toLowerCase()
|
|
176
|
+
if (t.includes('string')) return 'string'
|
|
177
|
+
if (t.includes('number')) return 'int'
|
|
178
|
+
if (t.includes('boolean') || t.includes('bool')) return 'boolean'
|
|
179
|
+
if (t.includes('date')) return 'datetime'
|
|
180
|
+
if (t.includes('buffer')) return 'bytes'
|
|
181
|
+
if (t.includes('mixed') || t.includes('object')) return 'json'
|
|
182
|
+
if (t.includes('objectid')) return 'uuid'
|
|
183
|
+
return 'unknown'
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
function parseMongoose(source) {
|
|
187
|
+
const models = []
|
|
188
|
+
|
|
189
|
+
// Find model name mappings: mongoose.model('User', UserSchema)
|
|
190
|
+
const modelNameMap = new Map()
|
|
191
|
+
const modelRegex = /(?:mongoose\.)?model\s*\(\s*['"`](\w+)['"`]\s*,\s*(\w+)/g
|
|
192
|
+
let m
|
|
193
|
+
while ((m = modelRegex.exec(source)) !== null) modelNameMap.set(m[2], m[1])
|
|
194
|
+
|
|
195
|
+
// Find new Schema({ }) blocks
|
|
196
|
+
const schemaRegex = /(?:const|let|var)\s+(\w+)\s*=\s*new\s+(?:\w+\.)?Schema\s*\(\s*\{/g
|
|
197
|
+
while ((m = schemaRegex.exec(source)) !== null) {
|
|
198
|
+
const varName = m[1]
|
|
199
|
+
const start = m.index + m[0].length - 1
|
|
200
|
+
let depth = 0, i = start
|
|
201
|
+
while (i < source.length) {
|
|
202
|
+
if (source[i] === '{') depth++
|
|
203
|
+
if (source[i] === '}') { depth--; if (depth === 0) break }
|
|
204
|
+
i++
|
|
205
|
+
}
|
|
206
|
+
const body = source.slice(start + 1, i)
|
|
207
|
+
const fields = []
|
|
208
|
+
|
|
209
|
+
// Parse top-level fields — collect each field's full block (handles multi-line)
|
|
210
|
+
// Strategy: scan char by char, collect top-level key:value pairs
|
|
211
|
+
const SKIP = new Set(['type', 'default', 'required', 'unique', 'index', 'ref',
|
|
212
|
+
'min', 'max', 'enum', 'select', 'trim', 'lowercase', 'uppercase', 'match',
|
|
213
|
+
'validate', 'get', 'set', 'alias', 'keys', 'endpoint', 'auth', 'p256dh',
|
|
214
|
+
'minLen', 'maxLen', 'expires', 'sparse', 'immutable', 'of'])
|
|
215
|
+
|
|
216
|
+
// Split body into top-level field blocks by tracking brace depth
|
|
217
|
+
const topLevelFields = []
|
|
218
|
+
let depth2 = 0, current = '', inStr = false, strChar = ''
|
|
219
|
+
for (let ci = 0; ci < body.length; ci++) {
|
|
220
|
+
const ch = body[ci]
|
|
221
|
+
if (inStr) {
|
|
222
|
+
current += ch
|
|
223
|
+
if (ch === strChar && body[ci - 1] !== '\\') inStr = false
|
|
224
|
+
continue
|
|
225
|
+
}
|
|
226
|
+
if (ch === '"' || ch === "'") { inStr = true; strChar = ch; current += ch; continue }
|
|
227
|
+
if (ch === '{') depth2++
|
|
228
|
+
if (ch === '}') depth2--
|
|
229
|
+
if (ch === ',' && depth2 === 0) {
|
|
230
|
+
if (current.trim()) topLevelFields.push(current.trim())
|
|
231
|
+
current = ''
|
|
232
|
+
continue
|
|
233
|
+
}
|
|
234
|
+
current += ch
|
|
235
|
+
}
|
|
236
|
+
if (current.trim()) topLevelFields.push(current.trim())
|
|
237
|
+
|
|
238
|
+
for (const block of topLevelFields) {
|
|
239
|
+
// Extract field name — first word before colon
|
|
240
|
+
const nameMatch = block.match(/^(\w+)\s*:/)
|
|
241
|
+
if (!nameMatch) continue
|
|
242
|
+
const fieldName = nameMatch[1]
|
|
243
|
+
if (SKIP.has(fieldName)) continue
|
|
244
|
+
|
|
245
|
+
// Get everything after the first colon
|
|
246
|
+
const afterColon = block.slice(block.indexOf(':') + 1).trim()
|
|
247
|
+
const isArr = afterColon.startsWith('[')
|
|
248
|
+
|
|
249
|
+
// Check if this is a nested subdocument (pure object with no type key at top level)
|
|
250
|
+
// e.g. subscription: { endpoint: {...}, keys: {...} }
|
|
251
|
+
// vs username: { type: String, required: true }
|
|
252
|
+
const isNestedSubdoc = afterColon.startsWith('{') &&
|
|
253
|
+
!/type\s*:/.test(afterColon) &&
|
|
254
|
+
// has nested objects inside
|
|
255
|
+
/:\s*\{/.test(afterColon)
|
|
256
|
+
|
|
257
|
+
if (isNestedSubdoc) continue
|
|
258
|
+
|
|
259
|
+
const typeMatch = afterColon.match(/type\s*:\s*([A-Za-z]+)/)
|
|
260
|
+
const rawType = typeMatch ? typeMatch[1] : afterColon.replace(/[{},]/g, '').trim().split(/\s/)[0]
|
|
261
|
+
|
|
262
|
+
const isUniq = /unique\s*:\s*true/.test(afterColon)
|
|
263
|
+
const isReq = /required\s*:\s*true/.test(afterColon)
|
|
264
|
+
const defMatch = afterColon.match(/default\s*:\s*([^,}\n]+)/)
|
|
265
|
+
|
|
266
|
+
fields.push({
|
|
267
|
+
name: fieldName,
|
|
268
|
+
type: normalizeMongooseType(rawType),
|
|
269
|
+
isRequired: isReq,
|
|
270
|
+
isUnique: isUniq,
|
|
271
|
+
isPrimary: fieldName === '_id',
|
|
272
|
+
isArray: isArr,
|
|
273
|
+
default: defMatch?.[1]?.trim(),
|
|
274
|
+
})
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
// Auto-add _id
|
|
278
|
+
if (!fields.find(f => f.name === '_id')) {
|
|
279
|
+
fields.unshift({ name: '_id', type: 'uuid', isRequired: true, isUnique: true, isPrimary: true, isArray: false })
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// Find .index() calls
|
|
283
|
+
const indexes = []
|
|
284
|
+
const idxRegex = new RegExp(`${varName}\\.index\\s*\\(\\s*\\{([^}]+)\\}`, 'g')
|
|
285
|
+
let im
|
|
286
|
+
while ((im = idxRegex.exec(source)) !== null) {
|
|
287
|
+
const idxFields = [...im[1].matchAll(/(\w+)\s*:/g)].map(x => x[1])
|
|
288
|
+
indexes.push({ fields: idxFields, isUnique: /unique\s*:\s*true/.test(source.slice(im.index, im.index + 100)) })
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
models.push({ name: modelNameMap.get(varName) ?? varName, fields, indexes, relations: [] })
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
return { format: 'mongoose', models, rawSource: source }
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// ── Drizzle ───────────────────────────────────────────────────
|
|
298
|
+
|
|
299
|
+
function normalizeDrizzleType(def) {
|
|
300
|
+
const t = def.toLowerCase()
|
|
301
|
+
if (/varchar|text|char\(/.test(t)) return 'string'
|
|
302
|
+
if (/integer|serial|bigint|smallint/.test(t)) return 'int'
|
|
303
|
+
if (/real|numeric|decimal|double|float/.test(t)) return 'float'
|
|
304
|
+
if (/boolean/.test(t)) return 'boolean'
|
|
305
|
+
if (/timestamp|date|time/.test(t)) return 'datetime'
|
|
306
|
+
if (/jsonb|json/.test(t)) return 'json'
|
|
307
|
+
if (/uuid/.test(t)) return 'uuid'
|
|
308
|
+
return 'unknown'
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
function parseDrizzle(source) {
|
|
312
|
+
const models = []
|
|
313
|
+
const tableRegex = /(?:pgTable|mysqlTable|sqliteTable)\s*\(\s*['"`](\w+)['"`]\s*,\s*\{([^}]+)\}/g
|
|
314
|
+
let match
|
|
315
|
+
|
|
316
|
+
while ((match = tableRegex.exec(source)) !== null) {
|
|
317
|
+
const [, tableName, body] = match
|
|
318
|
+
const fields = []
|
|
319
|
+
|
|
320
|
+
for (const line of body.split('\n')) {
|
|
321
|
+
const t = line.trim().replace(/,$/, '')
|
|
322
|
+
if (!t || t.startsWith('//')) continue
|
|
323
|
+
const fieldMatch = t.match(/^(\w+)\s*:\s*(.+)/)
|
|
324
|
+
if (!fieldMatch) continue
|
|
325
|
+
const [, fieldName, def] = fieldMatch
|
|
326
|
+
const defVal = def.match(/\.default\(([^)]+)\)/)?.[1]
|
|
327
|
+
fields.push({
|
|
328
|
+
name: fieldName,
|
|
329
|
+
type: normalizeDrizzleType(def),
|
|
330
|
+
isRequired: def.includes('.notNull()'),
|
|
331
|
+
isUnique: def.includes('.unique()'),
|
|
332
|
+
isPrimary: def.includes('primaryKey') || def.includes('.primaryKey()'),
|
|
333
|
+
isArray: false,
|
|
334
|
+
default: defVal,
|
|
335
|
+
})
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
models.push({ name: tableName, fields, indexes: [], relations: [] })
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
return { format: 'drizzle', models, rawSource: source }
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// ── Auto-detect + dispatch ────────────────────────────────────
|
|
345
|
+
|
|
346
|
+
function detectFormat(filePath, source) {
|
|
347
|
+
const ext = path.extname(filePath).toLowerCase()
|
|
348
|
+
if (ext === '.prisma') return 'prisma'
|
|
349
|
+
if (ext === '.sql') return 'sql'
|
|
350
|
+
if (/(?:pgTable|mysqlTable|sqliteTable)\s*\(/.test(source)) return 'drizzle'
|
|
351
|
+
if (/new\s+(?:\w+\.)?Schema\s*\(/.test(source)) return 'mongoose'
|
|
352
|
+
if (/^CREATE\s+TABLE/im.test(source)) return 'sql'
|
|
353
|
+
if (/^model\s+\w+\s*\{/m.test(source)) return 'prisma'
|
|
354
|
+
throw new Error(`Cannot detect format for "${filePath}". Use --format=prisma|drizzle|sql|mongoose`)
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
function parseSchema(filePath, forceFormat) {
|
|
358
|
+
if (!fs.existsSync(filePath)) throw new Error(`File not found: ${filePath}`)
|
|
359
|
+
const source = fs.readFileSync(filePath, 'utf-8')
|
|
360
|
+
const format = forceFormat ?? detectFormat(filePath, source)
|
|
361
|
+
switch (format) {
|
|
362
|
+
case 'prisma': return parsePrisma(source)
|
|
363
|
+
case 'sql': return parseSQL(source)
|
|
364
|
+
case 'drizzle': return parseDrizzle(source)
|
|
365
|
+
case 'mongoose': return parseMongoose(source)
|
|
366
|
+
default: throw new Error(`Unsupported format: ${format}`)
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// ═══════════════════════════════════════════════════════════════
|
|
371
|
+
// LINT RULES
|
|
372
|
+
// ═══════════════════════════════════════════════════════════════
|
|
373
|
+
|
|
374
|
+
const FK_SUFFIXES = ['Id', '_id', 'Fk', '_fk']
|
|
375
|
+
const FK_EXCLUSION_PATTERNS = [
|
|
376
|
+
/[Pp]ublic[Ii][dD]$/,
|
|
377
|
+
/[Cc]lient[Ii][dD]$/,
|
|
378
|
+
/[Ss]ession[Ii][dD]$/,
|
|
379
|
+
/[Rr]equest[Ii][dD]$/,
|
|
380
|
+
/[Cc]orrelation[Ii][dD]$/,
|
|
381
|
+
/[Tt]race[Ii][dD]$/,
|
|
382
|
+
/[Ee]xternal[Ii][dD]$/,
|
|
383
|
+
]
|
|
384
|
+
function isFKExclusion(name) { return FK_EXCLUSION_PATTERNS.some(p => p.test(name)) }
|
|
385
|
+
|
|
386
|
+
function hasIndexOn(model, fieldName) {
|
|
387
|
+
return model.indexes.some(idx => idx.fields.includes(fieldName))
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
function isPrimaryField(f) {
|
|
391
|
+
return f.isPrimary || f.name === 'id' || f.name === '_id'
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
const RULES = [
|
|
395
|
+
|
|
396
|
+
// SC001 — Missing primary key
|
|
397
|
+
{
|
|
398
|
+
id: 'SC001', name: 'Missing primary key', severity: 'error',
|
|
399
|
+
check(schema) {
|
|
400
|
+
return schema.models
|
|
401
|
+
.filter(m => !m.fields.some(isPrimaryField))
|
|
402
|
+
.map(m => ({
|
|
403
|
+
ruleId: 'SC001', severity: 'error', model: m.name,
|
|
404
|
+
message: `"${m.name}" has no primary key field`,
|
|
405
|
+
suggestion: `Add: id String @id @default(cuid())`,
|
|
406
|
+
}))
|
|
407
|
+
},
|
|
408
|
+
},
|
|
409
|
+
|
|
410
|
+
// SC002 — FK field missing index
|
|
411
|
+
{
|
|
412
|
+
id: 'SC002', name: 'Foreign key missing index', severity: 'error',
|
|
413
|
+
check(schema) {
|
|
414
|
+
const issues = []
|
|
415
|
+
for (const model of schema.models) {
|
|
416
|
+
const fkFields = model.fields.filter(f =>
|
|
417
|
+
!isPrimaryField(f) && !isFKExclusion(f.name) && FK_SUFFIXES.some(s => f.name.endsWith(s) && f.name.length > s.length)
|
|
418
|
+
)
|
|
419
|
+
for (const field of fkFields) {
|
|
420
|
+
if (!hasIndexOn(model, field.name)) {
|
|
421
|
+
issues.push({
|
|
422
|
+
ruleId: 'SC002', severity: 'error', model: model.name, field: field.name,
|
|
423
|
+
message: `"${model.name}.${field.name}" looks like a foreign key but has no index`,
|
|
424
|
+
suggestion: `Add @@index([${field.name}]) to "${model.name}"`,
|
|
425
|
+
})
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
return issues
|
|
430
|
+
},
|
|
431
|
+
},
|
|
432
|
+
|
|
433
|
+
// SC003 — Missing timestamps
|
|
434
|
+
{
|
|
435
|
+
id: 'SC003', name: 'Missing timestamps', severity: 'warning',
|
|
436
|
+
check(schema) {
|
|
437
|
+
const issues = []
|
|
438
|
+
const CREATED = ['createdat', 'created_at', 'createdate']
|
|
439
|
+
const UPDATED = ['updatedat', 'updated_at', 'updatedate']
|
|
440
|
+
for (const m of schema.models) {
|
|
441
|
+
const names = m.fields.map(f => f.name.toLowerCase())
|
|
442
|
+
if (!CREATED.some(t => names.includes(t)))
|
|
443
|
+
issues.push({
|
|
444
|
+
ruleId: 'SC003', severity: 'warning', model: m.name,
|
|
445
|
+
message: `"${m.name}" is missing a createdAt timestamp`,
|
|
446
|
+
suggestion: `Add: createdAt DateTime @default(now())`
|
|
447
|
+
})
|
|
448
|
+
if (!UPDATED.some(t => names.includes(t)))
|
|
449
|
+
issues.push({
|
|
450
|
+
ruleId: 'SC003', severity: 'warning', model: m.name,
|
|
451
|
+
message: `"${m.name}" is missing an updatedAt timestamp`,
|
|
452
|
+
suggestion: `Add: updatedAt DateTime @updatedAt`
|
|
453
|
+
})
|
|
454
|
+
}
|
|
455
|
+
return issues
|
|
456
|
+
},
|
|
457
|
+
},
|
|
458
|
+
|
|
459
|
+
// SC004 — No soft delete
|
|
460
|
+
{
|
|
461
|
+
id: 'SC004', name: 'No soft delete field', severity: 'info',
|
|
462
|
+
check(schema) {
|
|
463
|
+
const SOFT = ['deletedat', 'deleted_at', 'isdeleted', 'is_deleted', 'archivedat']
|
|
464
|
+
return schema.models
|
|
465
|
+
.filter(m => !m.fields.some(f => SOFT.includes(f.name.toLowerCase())))
|
|
466
|
+
.map(m => ({
|
|
467
|
+
ruleId: 'SC004', severity: 'info', model: m.name,
|
|
468
|
+
message: `"${m.name}" has no soft delete field`,
|
|
469
|
+
suggestion: `Consider: deletedAt DateTime? — allows recovery without data loss`,
|
|
470
|
+
}))
|
|
471
|
+
},
|
|
472
|
+
},
|
|
473
|
+
|
|
474
|
+
// SC005 — God model
|
|
475
|
+
{
|
|
476
|
+
id: 'SC005', name: 'God model — too many fields', severity: 'warning',
|
|
477
|
+
check(schema) {
|
|
478
|
+
return schema.models
|
|
479
|
+
.filter(m => m.fields.length > 20)
|
|
480
|
+
.map(m => ({
|
|
481
|
+
ruleId: 'SC005', severity: 'warning', model: m.name,
|
|
482
|
+
message: `"${m.name}" has ${m.fields.length} fields — may be doing too much`,
|
|
483
|
+
suggestion: `Split into separate models or move related fields into a JSON column`,
|
|
484
|
+
}))
|
|
485
|
+
},
|
|
486
|
+
},
|
|
487
|
+
|
|
488
|
+
// SC006 — Unbounded string (SQL only)
|
|
489
|
+
{
|
|
490
|
+
id: 'SC006', name: 'Unbounded string field', severity: 'warning',
|
|
491
|
+
check(schema) {
|
|
492
|
+
if (schema.format !== 'sql') return []
|
|
493
|
+
const issues = []
|
|
494
|
+
for (const m of schema.models)
|
|
495
|
+
for (const f of m.fields)
|
|
496
|
+
if (f.type === 'string' && !f.isPrimary && !f.isUnique)
|
|
497
|
+
issues.push({
|
|
498
|
+
ruleId: 'SC006', severity: 'warning', model: m.name, field: f.name,
|
|
499
|
+
message: `"${m.name}.${f.name}" is an unbounded TEXT — consider VARCHAR(n)`,
|
|
500
|
+
suggestion: `Use VARCHAR(255) or appropriate limit if max length is known`
|
|
501
|
+
})
|
|
502
|
+
return issues
|
|
503
|
+
},
|
|
504
|
+
},
|
|
505
|
+
|
|
506
|
+
// SC007 — Email not unique
|
|
507
|
+
{
|
|
508
|
+
id: 'SC007', name: 'Email field not unique', severity: 'error',
|
|
509
|
+
check(schema) {
|
|
510
|
+
const issues = []
|
|
511
|
+
const EMAIL_NAMES = ['email', 'emailaddress', 'email_address', 'useremail']
|
|
512
|
+
for (const m of schema.models) {
|
|
513
|
+
for (const f of m.fields) {
|
|
514
|
+
if (!EMAIL_NAMES.includes(f.name.toLowerCase())) continue
|
|
515
|
+
const uniqueIdx = m.indexes.some(i => i.isUnique && i.fields.includes(f.name))
|
|
516
|
+
if (!f.isUnique && !uniqueIdx)
|
|
517
|
+
issues.push({
|
|
518
|
+
ruleId: 'SC007', severity: 'error', model: m.name, field: f.name,
|
|
519
|
+
message: `"${m.name}.${f.name}" should have a unique constraint`,
|
|
520
|
+
suggestion: `Add @unique to the field or @@unique([${f.name}]) at model level`
|
|
521
|
+
})
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
return issues
|
|
525
|
+
},
|
|
526
|
+
},
|
|
527
|
+
|
|
528
|
+
// SC008 — Inconsistent naming
|
|
529
|
+
{
|
|
530
|
+
id: 'SC008', name: 'Inconsistent field naming', severity: 'warning',
|
|
531
|
+
check(schema) {
|
|
532
|
+
const issues = []
|
|
533
|
+
for (const m of schema.models) {
|
|
534
|
+
const names = m.fields.map(f => f.name)
|
|
535
|
+
const camel = names.filter(n => /^[a-z][a-zA-Z0-9]*$/.test(n) && /[A-Z]/.test(n)).length
|
|
536
|
+
const snake = names.filter(n => /^[a-z][a-z0-9_]*_[a-z]/.test(n)).length
|
|
537
|
+
if (camel > 0 && snake > 0)
|
|
538
|
+
issues.push({
|
|
539
|
+
ruleId: 'SC008', severity: 'warning', model: m.name,
|
|
540
|
+
message: `"${m.name}" mixes camelCase and snake_case field names`,
|
|
541
|
+
suggestion: `Pick one: camelCase (userId) or snake_case (user_id)`
|
|
542
|
+
})
|
|
543
|
+
}
|
|
544
|
+
return issues
|
|
545
|
+
},
|
|
546
|
+
},
|
|
547
|
+
|
|
548
|
+
// SC009 — Poor boolean naming
|
|
549
|
+
{
|
|
550
|
+
id: 'SC009', name: 'Boolean field unclear name', severity: 'info',
|
|
551
|
+
check(schema) {
|
|
552
|
+
const issues = []
|
|
553
|
+
const PREFIXES = ['is', 'has', 'can', 'should', 'allow', 'enable', 'show', 'hide']
|
|
554
|
+
for (const m of schema.models)
|
|
555
|
+
for (const f of m.fields)
|
|
556
|
+
if (f.type === 'boolean' && !PREFIXES.some(p => f.name.toLowerCase().startsWith(p)))
|
|
557
|
+
issues.push({
|
|
558
|
+
ruleId: 'SC009', severity: 'info', model: m.name, field: f.name,
|
|
559
|
+
message: `"${m.name}.${f.name}" is a boolean but doesn't start with is/has/can`,
|
|
560
|
+
suggestion: `Rename to "is${f.name[0].toUpperCase()}${f.name.slice(1)}"`
|
|
561
|
+
})
|
|
562
|
+
return issues
|
|
563
|
+
},
|
|
564
|
+
},
|
|
565
|
+
|
|
566
|
+
// SC010 — JSON field in relational schema
|
|
567
|
+
{
|
|
568
|
+
id: 'SC010', name: 'JSON field — consider typed relation', severity: 'info',
|
|
569
|
+
check(schema) {
|
|
570
|
+
if (schema.format === 'mongoose') return []
|
|
571
|
+
return schema.models.flatMap(m =>
|
|
572
|
+
m.fields
|
|
573
|
+
.filter(f => f.type === 'json')
|
|
574
|
+
.map(f => ({
|
|
575
|
+
ruleId: 'SC010', severity: 'info', model: m.name, field: f.name,
|
|
576
|
+
message: `"${m.name}.${f.name}" stores JSON — ensure this is intentional`,
|
|
577
|
+
suggestion: `If data has a consistent shape, model it as a related table`
|
|
578
|
+
}))
|
|
579
|
+
)
|
|
580
|
+
},
|
|
581
|
+
},
|
|
582
|
+
|
|
583
|
+
// SC011 — Missing tenantId consistency (multi-tenancy smell)
|
|
584
|
+
{
|
|
585
|
+
id: 'SC011', name: 'Inconsistent multi-tenancy', severity: 'warning',
|
|
586
|
+
check(schema) {
|
|
587
|
+
if (schema.models.length < 2) return []
|
|
588
|
+
const TENANT_FIELDS = ['tenantid', 'tenant_id', 'orgid', 'org_id', 'organizationid', 'workspace_id']
|
|
589
|
+
const modelsWithTenant = schema.models.filter(m =>
|
|
590
|
+
m.fields.some(f => TENANT_FIELDS.includes(f.name.toLowerCase()))
|
|
591
|
+
)
|
|
592
|
+
const modelsWithout = schema.models.filter(m =>
|
|
593
|
+
!m.fields.some(f => TENANT_FIELDS.includes(f.name.toLowerCase()))
|
|
594
|
+
)
|
|
595
|
+
if (modelsWithTenant.length > 0 && modelsWithout.length > 0)
|
|
596
|
+
return modelsWithout.map(m => ({
|
|
597
|
+
ruleId: 'SC011', severity: 'warning', model: m.name,
|
|
598
|
+
message: `"${m.name}" is missing a tenantId while other models have one`,
|
|
599
|
+
suggestion: `Add tenantId to all models if building a multi-tenant SaaS`
|
|
600
|
+
}))
|
|
601
|
+
return []
|
|
602
|
+
},
|
|
603
|
+
},
|
|
604
|
+
|
|
605
|
+
]
|
|
606
|
+
|
|
607
|
+
// ═══════════════════════════════════════════════════════════════
|
|
608
|
+
// SCORER
|
|
609
|
+
// ═══════════════════════════════════════════════════════════════
|
|
610
|
+
|
|
611
|
+
function analyze(schema) {
|
|
612
|
+
const issues = RULES.flatMap(rule => { try { return rule.check(schema) } catch { return [] } })
|
|
613
|
+
|
|
614
|
+
const WEIGHTS = { error: 10, warning: 4, info: 1 }
|
|
615
|
+
const penalty = issues.reduce((s, i) => s + WEIGHTS[i.severity], 0)
|
|
616
|
+
const modelCount = Math.max(schema.models.length, 1)
|
|
617
|
+
const fieldCount = Math.max(schema.models.reduce((s, m) => s + m.fields.length, 0), 1)
|
|
618
|
+
// Scale max penalty by both model count and field count so single-model schemas score fairly
|
|
619
|
+
const maxPenalty = Math.max(modelCount * 20, fieldCount * 2)
|
|
620
|
+
const norm = Math.min(penalty, maxPenalty)
|
|
621
|
+
const score = Math.max(0, Math.round(100 - (norm / maxPenalty) * 100))
|
|
622
|
+
|
|
623
|
+
const grade = score >= 90 ? 'A' : score >= 75 ? 'B' : score >= 60 ? 'C' : score >= 40 ? 'D' : 'F'
|
|
624
|
+
|
|
625
|
+
const issuesByModel = {}
|
|
626
|
+
for (const issue of issues) {
|
|
627
|
+
const key = issue.model ?? '_global'
|
|
628
|
+
if (!issuesByModel[key]) issuesByModel[key] = []
|
|
629
|
+
issuesByModel[key].push(issue)
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
return {
|
|
633
|
+
format: schema.format,
|
|
634
|
+
score, grade,
|
|
635
|
+
totalModels: schema.models.length,
|
|
636
|
+
totalFields: schema.models.reduce((s, m) => s + m.fields.length, 0),
|
|
637
|
+
issues, issuesByModel,
|
|
638
|
+
summary: {
|
|
639
|
+
errors: issues.filter(i => i.severity === 'error').length,
|
|
640
|
+
warnings: issues.filter(i => i.severity === 'warning').length,
|
|
641
|
+
infos: issues.filter(i => i.severity === 'info').length,
|
|
642
|
+
},
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
// ═══════════════════════════════════════════════════════════════
|
|
647
|
+
// REPORTER
|
|
648
|
+
// ═══════════════════════════════════════════════════════════════
|
|
649
|
+
|
|
650
|
+
const C = {
|
|
651
|
+
reset: '\x1b[0m', bold: '\x1b[1m', dim: '\x1b[2m',
|
|
652
|
+
red: '\x1b[31m', yellow: '\x1b[33m', blue: '\x1b[34m',
|
|
653
|
+
green: '\x1b[32m', cyan: '\x1b[36m', white: '\x1b[37m',
|
|
654
|
+
bgRed: '\x1b[41m', bgGreen: '\x1b[42m', bgYellow: '\x1b[43m',
|
|
655
|
+
}
|
|
656
|
+
const cc = (...codes) => text => codes.join('') + text + C.reset
|
|
657
|
+
|
|
658
|
+
function scoreColor(s) { return s >= 90 ? C.green : s >= 75 ? C.cyan : s >= 60 ? C.yellow : C.red }
|
|
659
|
+
|
|
660
|
+
function scoreBar(score, w = 28) {
|
|
661
|
+
const f = Math.round((score / 100) * w)
|
|
662
|
+
return scoreColor(score) + '█'.repeat(f) + C.dim + '░'.repeat(w - f) + C.reset
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
function severityBadge(sev) {
|
|
666
|
+
if (sev === 'error') return C.bold + C.bgRed + C.white + ' ERR ' + C.reset
|
|
667
|
+
if (sev === 'warning') return C.bold + C.bgYellow + ' WRN ' + C.reset
|
|
668
|
+
return C.bold + C.blue + ' INF ' + C.reset
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
function printReport(report, filePath) {
|
|
672
|
+
const hr = C.dim + '─'.repeat(58) + C.reset
|
|
673
|
+
const out = []
|
|
674
|
+
|
|
675
|
+
out.push('')
|
|
676
|
+
out.push(C.bold + C.cyan + ' schemacheck' + C.reset + C.dim + ' v0.1.0' + C.reset)
|
|
677
|
+
out.push(C.dim + ` ${filePath}` + C.reset)
|
|
678
|
+
out.push('')
|
|
679
|
+
out.push(hr)
|
|
680
|
+
out.push('')
|
|
681
|
+
|
|
682
|
+
const col = scoreColor(report.score)
|
|
683
|
+
out.push(
|
|
684
|
+
` ${C.bold}Health score${C.reset} ` +
|
|
685
|
+
col + C.bold + report.score + C.reset + C.dim + '/100' + C.reset +
|
|
686
|
+
' ' + scoreBar(report.score) +
|
|
687
|
+
' ' + col + C.bold + `Grade ${report.grade}` + C.reset
|
|
688
|
+
)
|
|
689
|
+
out.push('')
|
|
690
|
+
out.push(
|
|
691
|
+
C.dim + ' Format: ' + C.reset + C.white + report.format + C.reset +
|
|
692
|
+
C.dim + ' Models: ' + C.reset + C.white + report.totalModels + C.reset +
|
|
693
|
+
C.dim + ' Fields: ' + C.reset + C.white + report.totalFields + C.reset
|
|
694
|
+
)
|
|
695
|
+
out.push('')
|
|
696
|
+
out.push(hr)
|
|
697
|
+
|
|
698
|
+
if (report.issues.length === 0) {
|
|
699
|
+
out.push('')
|
|
700
|
+
out.push(C.green + C.bold + ' ✓ No issues found — schema looks healthy!' + C.reset)
|
|
701
|
+
out.push('')
|
|
702
|
+
} else {
|
|
703
|
+
out.push('')
|
|
704
|
+
out.push(
|
|
705
|
+
` ${C.red + C.bold + report.summary.errors + ' error' + (report.summary.errors !== 1 ? 's' : '') + C.reset}` +
|
|
706
|
+
` ${C.yellow + C.bold + report.summary.warnings + ' warning' + (report.summary.warnings !== 1 ? 's' : '') + C.reset}` +
|
|
707
|
+
` ${C.blue + report.summary.infos + ' suggestion' + (report.summary.infos !== 1 ? 's' : '') + C.reset}`
|
|
708
|
+
)
|
|
709
|
+
out.push('')
|
|
710
|
+
|
|
711
|
+
for (const [modelName, issues] of Object.entries(report.issuesByModel)) {
|
|
712
|
+
out.push(` ${C.bold + C.white + modelName + C.reset}`)
|
|
713
|
+
for (const issue of issues) {
|
|
714
|
+
out.push(` ${severityBadge(issue.severity)} ${C.dim}[${issue.ruleId}]${C.reset} ${issue.message}`)
|
|
715
|
+
out.push(` ${C.dim} ↳ ${issue.suggestion}${C.reset}`)
|
|
716
|
+
out.push('')
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
out.push(hr)
|
|
722
|
+
out.push('')
|
|
723
|
+
process.stdout.write(out.join('\n'))
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
function printJSON(report) {
|
|
727
|
+
process.stdout.write(JSON.stringify(report, null, 2) + '\n')
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
function printCI(report, filePath) {
|
|
731
|
+
const lines = [
|
|
732
|
+
`schemacheck: ${filePath}`,
|
|
733
|
+
`score=${report.score} grade=${report.grade} errors=${report.summary.errors} warnings=${report.summary.warnings}`,
|
|
734
|
+
]
|
|
735
|
+
for (const i of report.issues) {
|
|
736
|
+
const loc = [i.model, i.field].filter(Boolean).join('.')
|
|
737
|
+
lines.push(`::${i.severity === 'error' ? 'error' : 'warning'} title=${i.ruleId}::${loc}: ${i.message}`)
|
|
738
|
+
}
|
|
739
|
+
process.stdout.write(lines.join('\n') + '\n')
|
|
740
|
+
}
|
|
741
|
+
|
|
742
|
+
// ═══════════════════════════════════════════════════════════════
|
|
743
|
+
// CLI
|
|
744
|
+
// ═══════════════════════════════════════════════════════════════
|
|
745
|
+
|
|
746
|
+
function printHelp() {
|
|
747
|
+
console.log(`
|
|
748
|
+
schemacheck — database schema health analyzer
|
|
749
|
+
|
|
750
|
+
Usage:
|
|
751
|
+
node run.js <schema-file> [options]
|
|
752
|
+
npx schemacheck <schema-file> [options] (after npm publish)
|
|
753
|
+
|
|
754
|
+
Options:
|
|
755
|
+
--format=<fmt> Force: prisma | drizzle | sql | mongoose
|
|
756
|
+
--output=<fmt> Output: terminal (default) | json | ci
|
|
757
|
+
--fail-below=<n> Exit 1 if score < n (great for CI)
|
|
758
|
+
--help, -h Show help
|
|
759
|
+
|
|
760
|
+
Examples:
|
|
761
|
+
node run.js ./prisma/schema.prisma
|
|
762
|
+
node run.js ./schema.sql --output=json
|
|
763
|
+
node run.js ./schema.prisma --fail-below=80 --output=ci
|
|
764
|
+
`)
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
function main() {
|
|
768
|
+
const argv = process.argv.slice(2)
|
|
769
|
+
|
|
770
|
+
if (argv.length === 0 || argv.includes('--help') || argv.includes('-h')) {
|
|
771
|
+
printHelp(); process.exit(0)
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
const filePath = argv.find(a => !a.startsWith('-'))
|
|
775
|
+
const formatArg = argv.find(a => a.startsWith('--format='))?.split('=')[1]
|
|
776
|
+
const outputArg = argv.find(a => a.startsWith('--output='))?.split('=')[1] ?? 'terminal'
|
|
777
|
+
const failBelow = argv.find(a => a.startsWith('--fail-below='))?.split('=')[1]
|
|
778
|
+
|
|
779
|
+
if (!filePath) { console.error(' Error: no schema file provided. Run --help'); process.exit(1) }
|
|
780
|
+
|
|
781
|
+
try {
|
|
782
|
+
const schema = parseSchema(path.resolve(filePath), formatArg)
|
|
783
|
+
const report = analyze(schema)
|
|
784
|
+
|
|
785
|
+
if (outputArg === 'json') printJSON(report)
|
|
786
|
+
else if (outputArg === 'ci') printCI(report, filePath)
|
|
787
|
+
else printReport(report, filePath)
|
|
788
|
+
|
|
789
|
+
if (failBelow && report.score < parseInt(failBelow)) { process.exit(1) }
|
|
790
|
+
if (!failBelow && report.summary.errors > 0) { process.exit(1) }
|
|
791
|
+
|
|
792
|
+
} catch (err) {
|
|
793
|
+
console.error(`\n Error: ${err.message}\n`)
|
|
794
|
+
process.exit(1)
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
main()
|
package/package.json
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@gaurav.codes/schemacheck",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Database schema health analyzer for Prisma, Drizzle, SQL and Mongoose — catch missing indexes, bad patterns and anti-patterns before they hit production",
|
|
5
|
+
"author": "your-actual-name",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"homepage": "https://github.com/your-username/schemacheck",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/your-username/schemacheck.git"
|
|
11
|
+
},
|
|
12
|
+
"bugs": {
|
|
13
|
+
"url": "https://github.com/your-username/schemacheck/issues"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [
|
|
16
|
+
"schema",
|
|
17
|
+
"linter",
|
|
18
|
+
"prisma",
|
|
19
|
+
"drizzle",
|
|
20
|
+
"sql",
|
|
21
|
+
"mongoose",
|
|
22
|
+
"database",
|
|
23
|
+
"health",
|
|
24
|
+
"analyzer",
|
|
25
|
+
"cli",
|
|
26
|
+
"orm"
|
|
27
|
+
],
|
|
28
|
+
"bin": {
|
|
29
|
+
"schemacheck": "cli.js"
|
|
30
|
+
},
|
|
31
|
+
"main": "./run.js",
|
|
32
|
+
"files": [
|
|
33
|
+
"run.js",
|
|
34
|
+
"README.md",
|
|
35
|
+
"LICENSE"
|
|
36
|
+
],
|
|
37
|
+
"engines": {
|
|
38
|
+
"node": ">=16.0.0"
|
|
39
|
+
},
|
|
40
|
+
"scripts": {
|
|
41
|
+
"build": "tsc",
|
|
42
|
+
"build:watch": "tsc --watch",
|
|
43
|
+
"dev": "ts-node src/cli.ts",
|
|
44
|
+
"test": "jest",
|
|
45
|
+
"test:watch": "jest --watch",
|
|
46
|
+
"test:coverage": "jest --coverage",
|
|
47
|
+
"lint": "eslint src/**/*.ts",
|
|
48
|
+
"prepublishOnly": "echo 'ready to publish'",
|
|
49
|
+
"start": "node run.js"
|
|
50
|
+
},
|
|
51
|
+
"devDependencies": {
|
|
52
|
+
"@types/jest": "^29.5.0",
|
|
53
|
+
"@types/node": "^20.0.0",
|
|
54
|
+
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
|
55
|
+
"@typescript-eslint/parser": "^7.0.0",
|
|
56
|
+
"eslint": "^8.57.0",
|
|
57
|
+
"jest": "^29.5.0",
|
|
58
|
+
"node-sql-parser": "^4.18.0",
|
|
59
|
+
"ts-jest": "^29.1.0",
|
|
60
|
+
"ts-morph": "^21.0.1",
|
|
61
|
+
"ts-node": "^10.9.0",
|
|
62
|
+
"typescript": "^5.4.0"
|
|
63
|
+
},
|
|
64
|
+
"jest": {
|
|
65
|
+
"preset": "ts-jest",
|
|
66
|
+
"testEnvironment": "node",
|
|
67
|
+
"testMatch": [
|
|
68
|
+
"**/tests/**/*.test.ts"
|
|
69
|
+
],
|
|
70
|
+
"collectCoverageFrom": [
|
|
71
|
+
"src/**/*.ts"
|
|
72
|
+
]
|
|
73
|
+
}
|
|
74
|
+
}
|