@sqldoc/templates 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +161 -0
- package/src/__tests__/dedent.test.ts +45 -0
- package/src/__tests__/docker-templates.test.ts +134 -0
- package/src/__tests__/go-structs.test.ts +184 -0
- package/src/__tests__/naming.test.ts +48 -0
- package/src/__tests__/python-dataclasses.test.ts +185 -0
- package/src/__tests__/rust-structs.test.ts +176 -0
- package/src/__tests__/tags-helpers.test.ts +72 -0
- package/src/__tests__/type-mapping.test.ts +332 -0
- package/src/__tests__/typescript.test.ts +202 -0
- package/src/cobol-copybook/index.ts +220 -0
- package/src/cobol-copybook/test/.gitignore +6 -0
- package/src/cobol-copybook/test/Dockerfile +7 -0
- package/src/csharp-records/index.ts +131 -0
- package/src/csharp-records/test/.gitignore +6 -0
- package/src/csharp-records/test/Dockerfile +6 -0
- package/src/diesel/index.ts +247 -0
- package/src/diesel/test/.gitignore +6 -0
- package/src/diesel/test/Dockerfile +16 -0
- package/src/drizzle/index.ts +255 -0
- package/src/drizzle/test/.gitignore +6 -0
- package/src/drizzle/test/Dockerfile +8 -0
- package/src/drizzle/test/test.ts +71 -0
- package/src/efcore/index.ts +190 -0
- package/src/efcore/test/.gitignore +6 -0
- package/src/efcore/test/Dockerfile +7 -0
- package/src/go-structs/index.ts +119 -0
- package/src/go-structs/test/.gitignore +6 -0
- package/src/go-structs/test/Dockerfile +13 -0
- package/src/go-structs/test/test.go +71 -0
- package/src/gorm/index.ts +134 -0
- package/src/gorm/test/.gitignore +6 -0
- package/src/gorm/test/Dockerfile +13 -0
- package/src/gorm/test/test.go +65 -0
- package/src/helpers/atlas.ts +43 -0
- package/src/helpers/enrich.ts +396 -0
- package/src/helpers/naming.ts +19 -0
- package/src/helpers/tags.ts +63 -0
- package/src/index.ts +24 -0
- package/src/java-records/index.ts +179 -0
- package/src/java-records/test/.gitignore +6 -0
- package/src/java-records/test/Dockerfile +11 -0
- package/src/java-records/test/Test.java +93 -0
- package/src/jpa/index.ts +279 -0
- package/src/jpa/test/.gitignore +6 -0
- package/src/jpa/test/Dockerfile +14 -0
- package/src/jpa/test/Test.java +111 -0
- package/src/json-schema/index.ts +351 -0
- package/src/json-schema/test/.gitignore +6 -0
- package/src/json-schema/test/Dockerfile +18 -0
- package/src/knex/index.ts +168 -0
- package/src/knex/test/.gitignore +6 -0
- package/src/knex/test/Dockerfile +7 -0
- package/src/knex/test/test.ts +75 -0
- package/src/kotlin-data/index.ts +147 -0
- package/src/kotlin-data/test/.gitignore +6 -0
- package/src/kotlin-data/test/Dockerfile +14 -0
- package/src/kotlin-data/test/Test.kt +82 -0
- package/src/kysely/index.ts +165 -0
- package/src/kysely/test/.gitignore +6 -0
- package/src/kysely/test/Dockerfile +8 -0
- package/src/kysely/test/test.ts +82 -0
- package/src/prisma/index.ts +387 -0
- package/src/prisma/test/.gitignore +6 -0
- package/src/prisma/test/Dockerfile +7 -0
- package/src/protobuf/index.ts +219 -0
- package/src/protobuf/test/.gitignore +6 -0
- package/src/protobuf/test/Dockerfile +6 -0
- package/src/pydantic/index.ts +272 -0
- package/src/pydantic/test/.gitignore +6 -0
- package/src/pydantic/test/Dockerfile +8 -0
- package/src/pydantic/test/test.py +63 -0
- package/src/python-dataclasses/index.ts +217 -0
- package/src/python-dataclasses/test/.gitignore +6 -0
- package/src/python-dataclasses/test/Dockerfile +8 -0
- package/src/python-dataclasses/test/test.py +63 -0
- package/src/rust-structs/index.ts +152 -0
- package/src/rust-structs/test/.gitignore +6 -0
- package/src/rust-structs/test/Dockerfile +22 -0
- package/src/rust-structs/test/test.rs +82 -0
- package/src/sqlalchemy/index.ts +258 -0
- package/src/sqlalchemy/test/.gitignore +6 -0
- package/src/sqlalchemy/test/Dockerfile +8 -0
- package/src/sqlalchemy/test/test.py +61 -0
- package/src/sqlc/index.ts +148 -0
- package/src/sqlc/test/.gitignore +6 -0
- package/src/sqlc/test/Dockerfile +13 -0
- package/src/sqlc/test/test.go +91 -0
- package/src/tags/dedent.ts +28 -0
- package/src/tags/index.ts +14 -0
- package/src/types/index.ts +8 -0
- package/src/types/pg-to-csharp.ts +136 -0
- package/src/types/pg-to-go.ts +120 -0
- package/src/types/pg-to-java.ts +141 -0
- package/src/types/pg-to-kotlin.ts +119 -0
- package/src/types/pg-to-python.ts +120 -0
- package/src/types/pg-to-rust.ts +121 -0
- package/src/types/pg-to-ts.ts +173 -0
- package/src/typescript/index.ts +168 -0
- package/src/typescript/test/.gitignore +6 -0
- package/src/typescript/test/Dockerfile +8 -0
- package/src/typescript/test/test.ts +89 -0
- package/src/xsd/index.ts +191 -0
- package/src/xsd/test/.gitignore +6 -0
- package/src/xsd/test/Dockerfile +6 -0
- package/src/zod/index.ts +289 -0
- package/src/zod/test/.gitignore +6 -0
- package/src/zod/test/Dockerfile +6 -0
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
import { defineTemplate } from '@sqldoc/ns-codegen'
|
|
2
|
+
import { activeTables, type EnrichedColumn, enrichRealm } from '../helpers/enrich.ts'
|
|
3
|
+
import { toPascalCase } from '../helpers/naming.ts'
|
|
4
|
+
|
|
5
|
+
export const configSchema = {
|
|
6
|
+
mode: {
|
|
7
|
+
type: 'enum',
|
|
8
|
+
values: ['per-table', 'bundled'],
|
|
9
|
+
description: 'Output one file per table or a single bundled file (default: bundled)',
|
|
10
|
+
},
|
|
11
|
+
$id: {
|
|
12
|
+
type: 'string',
|
|
13
|
+
description: 'Schema ID base URI (e.g. https://example.com/schemas)',
|
|
14
|
+
},
|
|
15
|
+
} as const
|
|
16
|
+
|
|
17
|
+
const PG_TO_JSON_SCHEMA: Record<string, { type?: string; format?: string }> = {
|
|
18
|
+
// Numeric -- integers
|
|
19
|
+
smallint: { type: 'integer' },
|
|
20
|
+
int2: { type: 'integer' },
|
|
21
|
+
integer: { type: 'integer' },
|
|
22
|
+
int: { type: 'integer' },
|
|
23
|
+
int4: { type: 'integer' },
|
|
24
|
+
bigint: { type: 'integer' },
|
|
25
|
+
int8: { type: 'integer' },
|
|
26
|
+
serial: { type: 'integer' },
|
|
27
|
+
serial4: { type: 'integer' },
|
|
28
|
+
bigserial: { type: 'integer' },
|
|
29
|
+
serial8: { type: 'integer' },
|
|
30
|
+
smallserial: { type: 'integer' },
|
|
31
|
+
serial2: { type: 'integer' },
|
|
32
|
+
|
|
33
|
+
// Numeric -- floats
|
|
34
|
+
real: { type: 'number' },
|
|
35
|
+
float4: { type: 'number' },
|
|
36
|
+
'double precision': { type: 'number' },
|
|
37
|
+
float8: { type: 'number' },
|
|
38
|
+
numeric: { type: 'number' },
|
|
39
|
+
decimal: { type: 'number' },
|
|
40
|
+
money: { type: 'string' },
|
|
41
|
+
|
|
42
|
+
// String
|
|
43
|
+
text: { type: 'string' },
|
|
44
|
+
varchar: { type: 'string' },
|
|
45
|
+
'character varying': { type: 'string' },
|
|
46
|
+
char: { type: 'string' },
|
|
47
|
+
character: { type: 'string' },
|
|
48
|
+
name: { type: 'string' },
|
|
49
|
+
citext: { type: 'string' },
|
|
50
|
+
|
|
51
|
+
// Boolean
|
|
52
|
+
boolean: { type: 'boolean' },
|
|
53
|
+
bool: { type: 'boolean' },
|
|
54
|
+
|
|
55
|
+
// Date/Time
|
|
56
|
+
timestamp: { type: 'string', format: 'date-time' },
|
|
57
|
+
'timestamp without time zone': { type: 'string', format: 'date-time' },
|
|
58
|
+
timestamptz: { type: 'string', format: 'date-time' },
|
|
59
|
+
'timestamp with time zone': { type: 'string', format: 'date-time' },
|
|
60
|
+
date: { type: 'string', format: 'date' },
|
|
61
|
+
time: { type: 'string', format: 'time' },
|
|
62
|
+
'time without time zone': { type: 'string', format: 'time' },
|
|
63
|
+
timetz: { type: 'string', format: 'time' },
|
|
64
|
+
'time with time zone': { type: 'string', format: 'time' },
|
|
65
|
+
interval: { type: 'string', format: 'duration' },
|
|
66
|
+
|
|
67
|
+
// Binary
|
|
68
|
+
bytea: { type: 'string', format: 'byte' },
|
|
69
|
+
|
|
70
|
+
// JSON
|
|
71
|
+
json: {},
|
|
72
|
+
jsonb: {},
|
|
73
|
+
|
|
74
|
+
// UUID
|
|
75
|
+
uuid: { type: 'string', format: 'uuid' },
|
|
76
|
+
|
|
77
|
+
// Network
|
|
78
|
+
inet: { type: 'string', format: 'ipv4' },
|
|
79
|
+
cidr: { type: 'string' },
|
|
80
|
+
macaddr: { type: 'string' },
|
|
81
|
+
macaddr8: { type: 'string' },
|
|
82
|
+
|
|
83
|
+
// Other
|
|
84
|
+
xml: { type: 'string' },
|
|
85
|
+
tsvector: { type: 'string' },
|
|
86
|
+
tsquery: { type: 'string' },
|
|
87
|
+
oid: { type: 'integer' },
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function pgToJsonSchema(pgType: string): Record<string, unknown> {
|
|
91
|
+
const normalized = pgType.toLowerCase().trim()
|
|
92
|
+
|
|
93
|
+
// Arrays
|
|
94
|
+
if (normalized.endsWith('[]')) {
|
|
95
|
+
return { type: 'array', items: pgToJsonSchema(normalized.slice(0, -2)) }
|
|
96
|
+
}
|
|
97
|
+
if (normalized.startsWith('_')) {
|
|
98
|
+
return { type: 'array', items: pgToJsonSchema(normalized.slice(1)) }
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Strip length specifiers
|
|
102
|
+
const baseType = normalized.replace(/\(\d+(?:,\s*\d+)?\)/, '').trim()
|
|
103
|
+
|
|
104
|
+
const mapped = PG_TO_JSON_SCHEMA[baseType]
|
|
105
|
+
if (mapped) return { ...mapped }
|
|
106
|
+
|
|
107
|
+
// JSON types -- no type constraint (any valid JSON)
|
|
108
|
+
if (baseType === 'json' || baseType === 'jsonb') return {}
|
|
109
|
+
|
|
110
|
+
return { type: 'string' }
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
function applyValidation(prop: Record<string, unknown>, col: EnrichedColumn): void {
|
|
114
|
+
for (const t of col.tags) {
|
|
115
|
+
if (t.namespace !== 'validate') continue
|
|
116
|
+
|
|
117
|
+
if (t.tag === 'notEmpty') {
|
|
118
|
+
if (prop.type === 'string') prop.minLength = 1
|
|
119
|
+
if (prop.type === 'array') prop.minItems = 1
|
|
120
|
+
} else if (t.tag === 'email') {
|
|
121
|
+
prop.format = 'email'
|
|
122
|
+
} else if (t.tag === 'length') {
|
|
123
|
+
const args = t.args as Record<string, unknown>
|
|
124
|
+
if (args.min !== undefined) prop.minLength = Number(args.min)
|
|
125
|
+
if (args.max !== undefined) prop.maxLength = Number(args.max)
|
|
126
|
+
} else if (t.tag === 'range') {
|
|
127
|
+
const args = t.args as Record<string, unknown>
|
|
128
|
+
if (args.min !== undefined) prop.minimum = Number(args.min)
|
|
129
|
+
if (args.max !== undefined) prop.maximum = Number(args.max)
|
|
130
|
+
} else if (t.tag === 'pattern') {
|
|
131
|
+
const pattern = Array.isArray(t.args) ? t.args[0] : undefined
|
|
132
|
+
if (pattern) prop.pattern = String(pattern)
|
|
133
|
+
} else if (t.tag === 'min') {
|
|
134
|
+
const val = Array.isArray(t.args) ? t.args[0] : undefined
|
|
135
|
+
if (val !== undefined) {
|
|
136
|
+
if (prop.type === 'string') prop.minLength = Number(val)
|
|
137
|
+
else prop.minimum = Number(val)
|
|
138
|
+
}
|
|
139
|
+
} else if (t.tag === 'max') {
|
|
140
|
+
const val = Array.isArray(t.args) ? t.args[0] : undefined
|
|
141
|
+
if (val !== undefined) {
|
|
142
|
+
if (prop.type === 'string') prop.maxLength = Number(val)
|
|
143
|
+
else prop.maximum = Number(val)
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
function buildTableSchema(
|
|
150
|
+
table: ReturnType<typeof activeTables>[number],
|
|
151
|
+
baseId?: string,
|
|
152
|
+
_enumDefs?: Map<string, string[]>,
|
|
153
|
+
): Record<string, unknown> {
|
|
154
|
+
const required: string[] = []
|
|
155
|
+
const properties: Record<string, Record<string, unknown>> = {}
|
|
156
|
+
|
|
157
|
+
for (const col of table.columns) {
|
|
158
|
+
let prop: Record<string, unknown>
|
|
159
|
+
|
|
160
|
+
if (col.category === 'enum' && col.enumValues?.length) {
|
|
161
|
+
prop = { type: 'string', enum: col.enumValues }
|
|
162
|
+
} else {
|
|
163
|
+
prop = pgToJsonSchema(col.pgType)
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
if (!col.nullable) required.push(col.name)
|
|
167
|
+
|
|
168
|
+
applyValidation(prop, col)
|
|
169
|
+
|
|
170
|
+
properties[col.name] = prop
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const schema: Record<string, unknown> = {
|
|
174
|
+
type: 'object',
|
|
175
|
+
properties,
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
if (required.length > 0) schema.required = required
|
|
179
|
+
if (baseId) schema.$id = `${baseId}/${table.name}`
|
|
180
|
+
|
|
181
|
+
return schema
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
function buildViewSchema(
|
|
185
|
+
view: { pascalName: string; name: string; columns: EnrichedColumn[] },
|
|
186
|
+
baseId?: string,
|
|
187
|
+
): Record<string, unknown> {
|
|
188
|
+
const required: string[] = []
|
|
189
|
+
const properties: Record<string, Record<string, unknown>> = {}
|
|
190
|
+
|
|
191
|
+
for (const col of view.columns) {
|
|
192
|
+
let prop: Record<string, unknown>
|
|
193
|
+
|
|
194
|
+
if (col.category === 'enum' && col.enumValues?.length) {
|
|
195
|
+
prop = { type: 'string', enum: col.enumValues }
|
|
196
|
+
} else {
|
|
197
|
+
prop = pgToJsonSchema(col.pgType)
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
if (!col.nullable) required.push(col.name)
|
|
201
|
+
|
|
202
|
+
applyValidation(prop, col)
|
|
203
|
+
|
|
204
|
+
properties[col.name] = prop
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const schema: Record<string, unknown> = {
|
|
208
|
+
type: 'object',
|
|
209
|
+
properties,
|
|
210
|
+
readOnly: true,
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
if (required.length > 0) schema.required = required
|
|
214
|
+
if (baseId) schema.$id = `${baseId}/${view.name}`
|
|
215
|
+
|
|
216
|
+
return schema
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
export default defineTemplate({
|
|
220
|
+
name: 'JSON Schema',
|
|
221
|
+
description: 'Generate JSON Schema definitions from SQL schema',
|
|
222
|
+
language: 'json',
|
|
223
|
+
configSchema,
|
|
224
|
+
|
|
225
|
+
generate(ctx) {
|
|
226
|
+
const config = ctx.config ?? {}
|
|
227
|
+
const schema = enrichRealm(ctx)
|
|
228
|
+
const tables = activeTables(schema)
|
|
229
|
+
const views = schema.views.filter((v) => !v.skipped)
|
|
230
|
+
const baseId = config.$id
|
|
231
|
+
|
|
232
|
+
// Collect composite types from columns
|
|
233
|
+
const composites = new Map<string, Array<{ name: string; type: string }>>()
|
|
234
|
+
for (const table of schema.tables) {
|
|
235
|
+
for (const col of table.columns) {
|
|
236
|
+
if (col.category === 'composite' && col.compositeFields?.length && !composites.has(col.pgType)) {
|
|
237
|
+
composites.set(col.pgType, col.compositeFields)
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
if (config.mode === 'per-table') {
|
|
243
|
+
const files = [
|
|
244
|
+
...tables.map((table) => ({
|
|
245
|
+
path: `${table.name}.schema.json`,
|
|
246
|
+
content: `${JSON.stringify(
|
|
247
|
+
{ $schema: 'https://json-schema.org/draft/2020-12/schema', ...buildTableSchema(table, baseId) },
|
|
248
|
+
null,
|
|
249
|
+
2,
|
|
250
|
+
)}\n`,
|
|
251
|
+
})),
|
|
252
|
+
...views.map((view) => ({
|
|
253
|
+
path: `${view.name}.schema.json`,
|
|
254
|
+
content: `${JSON.stringify(
|
|
255
|
+
{ $schema: 'https://json-schema.org/draft/2020-12/schema', ...buildViewSchema(view, baseId) },
|
|
256
|
+
null,
|
|
257
|
+
2,
|
|
258
|
+
)}\n`,
|
|
259
|
+
})),
|
|
260
|
+
]
|
|
261
|
+
return { files }
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Bundled mode (default): all tables as $defs with top-level oneOf
|
|
265
|
+
const defs: Record<string, Record<string, unknown>> = {}
|
|
266
|
+
|
|
267
|
+
// Enum definitions
|
|
268
|
+
for (const e of schema.enums) {
|
|
269
|
+
defs[e.pascalName] = { type: 'string', enum: e.values }
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// Composite type definitions
|
|
273
|
+
for (const [name, fields] of composites) {
|
|
274
|
+
const props: Record<string, Record<string, unknown>> = {}
|
|
275
|
+
const req: string[] = []
|
|
276
|
+
for (const f of fields) {
|
|
277
|
+
props[f.name] = pgToJsonSchema(f.type)
|
|
278
|
+
req.push(f.name)
|
|
279
|
+
}
|
|
280
|
+
const compSchema: Record<string, unknown> = { type: 'object', properties: props }
|
|
281
|
+
if (req.length > 0) compSchema.required = req
|
|
282
|
+
defs[toPascalCase(name)] = compSchema
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
for (const table of tables) {
|
|
286
|
+
defs[table.pascalName] = buildTableSchema(table, baseId)
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
for (const view of views) {
|
|
290
|
+
defs[view.pascalName] = buildViewSchema(view, baseId)
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Function schemas (skip trigger functions)
|
|
294
|
+
for (const fn of schema.functions) {
|
|
295
|
+
const retRaw = fn.returnType?.type?.toLowerCase() ?? ''
|
|
296
|
+
if (retRaw === 'trigger') continue
|
|
297
|
+
|
|
298
|
+
const params: Record<string, Record<string, unknown>> = {}
|
|
299
|
+
const paramRequired: string[] = []
|
|
300
|
+
for (const a of fn.args.filter((a) => !a.name?.startsWith('_') && (a as any).mode !== 'OUT')) {
|
|
301
|
+
const argName = a.name || 'arg'
|
|
302
|
+
params[argName] = pgToJsonSchema(a.type)
|
|
303
|
+
paramRequired.push(argName)
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
let returnSchema: Record<string, unknown>
|
|
307
|
+
if (retRaw.startsWith('setof ')) {
|
|
308
|
+
const tableName = retRaw.replace('setof ', '')
|
|
309
|
+
const table = schema.tables.find((t) => t.name === tableName)
|
|
310
|
+
if (table) {
|
|
311
|
+
returnSchema = { type: 'array', items: { $ref: `#/$defs/${table.pascalName}` } }
|
|
312
|
+
} else {
|
|
313
|
+
returnSchema = { type: 'array', items: pgToJsonSchema(tableName) }
|
|
314
|
+
}
|
|
315
|
+
} else if (fn.returnType) {
|
|
316
|
+
returnSchema = pgToJsonSchema(fn.returnType.type)
|
|
317
|
+
} else {
|
|
318
|
+
returnSchema = {}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
const fnSchema: Record<string, unknown> = {
|
|
322
|
+
type: 'object',
|
|
323
|
+
properties: {
|
|
324
|
+
parameters: {
|
|
325
|
+
type: 'object',
|
|
326
|
+
properties: params,
|
|
327
|
+
...(paramRequired.length > 0 ? { required: paramRequired } : {}),
|
|
328
|
+
},
|
|
329
|
+
returnType: returnSchema,
|
|
330
|
+
},
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
defs[fn.pascalName] = fnSchema
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
const bundled: Record<string, unknown> = {
|
|
337
|
+
$schema: 'https://json-schema.org/draft/2020-12/schema',
|
|
338
|
+
$defs: defs,
|
|
339
|
+
}
|
|
340
|
+
if (baseId) bundled.$id = baseId
|
|
341
|
+
|
|
342
|
+
return {
|
|
343
|
+
files: [
|
|
344
|
+
{
|
|
345
|
+
path: 'schema.json',
|
|
346
|
+
content: `${JSON.stringify(bundled, null, 2)}\n`,
|
|
347
|
+
},
|
|
348
|
+
],
|
|
349
|
+
}
|
|
350
|
+
},
|
|
351
|
+
})
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
FROM node:23-slim
|
|
2
|
+
WORKDIR /app
|
|
3
|
+
COPY . .
|
|
4
|
+
RUN npm init -y && npm install ajv@8 ajv-formats@3 --save-dev
|
|
5
|
+
RUN node -e " \
|
|
6
|
+
const Ajv = require('ajv/dist/2020'); \
|
|
7
|
+
const addFormats = require('ajv-formats'); \
|
|
8
|
+
const schema = require('./schema.json'); \
|
|
9
|
+
const ajv = new Ajv({ allErrors: true }); \
|
|
10
|
+
addFormats(ajv); \
|
|
11
|
+
for (const [name, def] of Object.entries(schema.\$defs || {})) { \
|
|
12
|
+
const valid = ajv.validateSchema(def); \
|
|
13
|
+
if (!valid) { console.error('INVALID:', name, ajv.errors); process.exit(1); } \
|
|
14
|
+
console.log('OK:', name); \
|
|
15
|
+
} \
|
|
16
|
+
console.log('All schemas valid'); \
|
|
17
|
+
"
|
|
18
|
+
CMD ["echo", "ok"]
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import { defineTemplate } from '@sqldoc/ns-codegen'
|
|
2
|
+
import { activeTables, enrichRealm } from '../helpers/enrich.ts'
|
|
3
|
+
import { toCamelCase, toPascalCase } from '../helpers/naming.ts'
|
|
4
|
+
import { pgToTs, type TsTypeOptions } from '../types/pg-to-ts.ts'
|
|
5
|
+
|
|
6
|
+
export const configSchema = {
|
|
7
|
+
dateType: {
|
|
8
|
+
type: 'enum',
|
|
9
|
+
values: ['Date', 'dayjs', 'luxon', 'string'],
|
|
10
|
+
description: 'How to represent date/time types',
|
|
11
|
+
},
|
|
12
|
+
bigintType: {
|
|
13
|
+
type: 'enum',
|
|
14
|
+
values: ['number', 'bigint', 'string'],
|
|
15
|
+
description: 'How to represent bigint/bigserial columns',
|
|
16
|
+
},
|
|
17
|
+
} as const
|
|
18
|
+
|
|
19
|
+
export default defineTemplate({
|
|
20
|
+
name: 'Knex Table Types',
|
|
21
|
+
description: 'Generate TypeScript table interfaces with Knex module augmentation for type-safe queries',
|
|
22
|
+
language: 'typescript',
|
|
23
|
+
configSchema,
|
|
24
|
+
|
|
25
|
+
generate(ctx) {
|
|
26
|
+
const config = ctx.config ?? {}
|
|
27
|
+
const options: TsTypeOptions = {
|
|
28
|
+
dateType: config.dateType,
|
|
29
|
+
bigintType: config.bigintType,
|
|
30
|
+
nullableStyle: 'null-union',
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const schema = enrichRealm(ctx)
|
|
34
|
+
const lines: string[] = [
|
|
35
|
+
'// Generated by @sqldoc/templates/knex -- DO NOT EDIT',
|
|
36
|
+
'',
|
|
37
|
+
"import type { Knex } from 'knex'",
|
|
38
|
+
'',
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
// Enums
|
|
42
|
+
for (const e of schema.enums) {
|
|
43
|
+
const values = e.values.map((v) => `'${v}'`).join(' | ')
|
|
44
|
+
lines.push(`export type ${e.pascalName} = ${values}`)
|
|
45
|
+
lines.push('')
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Composite types (collected from columns)
|
|
49
|
+
const composites = new Map<string, Array<{ name: string; type: string }>>()
|
|
50
|
+
for (const table of schema.tables) {
|
|
51
|
+
for (const col of table.columns) {
|
|
52
|
+
if (col.category === 'composite' && col.compositeFields?.length && !composites.has(col.pgType)) {
|
|
53
|
+
composites.set(col.pgType, col.compositeFields)
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
for (const [name, fields] of composites) {
|
|
58
|
+
const typeName = toPascalCase(name)
|
|
59
|
+
lines.push(`export interface ${typeName} {`)
|
|
60
|
+
for (const f of fields) {
|
|
61
|
+
lines.push(` ${toCamelCase(f.name)}: ${pgToTs(f.type, false, options)}`)
|
|
62
|
+
}
|
|
63
|
+
lines.push('}')
|
|
64
|
+
lines.push('')
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Generate per-table interfaces
|
|
68
|
+
const tableEntries: string[] = []
|
|
69
|
+
|
|
70
|
+
for (const table of activeTables(schema)) {
|
|
71
|
+
const interfaceName = `${table.pascalName}Table`
|
|
72
|
+
|
|
73
|
+
lines.push(`export interface ${interfaceName} {`)
|
|
74
|
+
|
|
75
|
+
for (const col of table.columns) {
|
|
76
|
+
const tsType = resolveType(col, options)
|
|
77
|
+
lines.push(` ${col.name}: ${tsType}`)
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
lines.push('}')
|
|
81
|
+
lines.push('')
|
|
82
|
+
|
|
83
|
+
tableEntries.push(` ${table.name}: ${interfaceName}`)
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Views (read-only)
|
|
87
|
+
const viewEntries: string[] = []
|
|
88
|
+
|
|
89
|
+
for (const view of schema.views.filter((v) => !v.skipped)) {
|
|
90
|
+
const interfaceName = `${view.pascalName}View`
|
|
91
|
+
|
|
92
|
+
lines.push(`/** Read-only (from view) */`)
|
|
93
|
+
lines.push(`export interface ${interfaceName} {`)
|
|
94
|
+
|
|
95
|
+
for (const col of view.columns) {
|
|
96
|
+
const tsType = resolveType(col, options)
|
|
97
|
+
lines.push(` ${col.name}: ${tsType}`)
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
lines.push('}')
|
|
101
|
+
lines.push('')
|
|
102
|
+
|
|
103
|
+
viewEntries.push(` ${view.name}: ${interfaceName}`)
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Knex module augmentation for type-safe table map
|
|
107
|
+
lines.push("declare module 'knex/types/tables' {")
|
|
108
|
+
lines.push(' interface Tables {')
|
|
109
|
+
for (const entry of tableEntries) {
|
|
110
|
+
lines.push(entry)
|
|
111
|
+
}
|
|
112
|
+
for (const entry of viewEntries) {
|
|
113
|
+
lines.push(entry)
|
|
114
|
+
}
|
|
115
|
+
lines.push(' }')
|
|
116
|
+
lines.push('}')
|
|
117
|
+
lines.push('')
|
|
118
|
+
|
|
119
|
+
// Functions (skip trigger functions)
|
|
120
|
+
for (const fn of schema.functions) {
|
|
121
|
+
const retRaw = fn.returnType?.type?.toLowerCase() ?? ''
|
|
122
|
+
if (retRaw === 'trigger') continue
|
|
123
|
+
|
|
124
|
+
const params = fn.args
|
|
125
|
+
.filter((a) => !a.name?.startsWith('_') && (a as any).mode !== 'OUT')
|
|
126
|
+
.map((a) => {
|
|
127
|
+
const argType = pgToTs(a.type, false, options, a.category as any)
|
|
128
|
+
return `${toCamelCase(a.name || 'arg')}: ${argType}`
|
|
129
|
+
})
|
|
130
|
+
.join(', ')
|
|
131
|
+
|
|
132
|
+
let retType: string
|
|
133
|
+
if (retRaw.startsWith('setof ')) {
|
|
134
|
+
const tableName = retRaw.replace('setof ', '')
|
|
135
|
+
const table = schema.tables.find((t) => t.name === tableName)
|
|
136
|
+
retType = table ? `${table.pascalName}Table[]` : `${pgToTs(tableName, false, options)}[]`
|
|
137
|
+
} else if (fn.returnType) {
|
|
138
|
+
retType = pgToTs(fn.returnType.type, false, options, fn.returnType.category as any)
|
|
139
|
+
} else {
|
|
140
|
+
retType = 'void'
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
lines.push(`export type ${fn.pascalName} = (${params}) => ${retType}`)
|
|
144
|
+
lines.push('')
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return {
|
|
148
|
+
files: [
|
|
149
|
+
{
|
|
150
|
+
path: 'database.ts',
|
|
151
|
+
content: lines.join('\n'),
|
|
152
|
+
},
|
|
153
|
+
],
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
function resolveType(col: any, options: TsTypeOptions): string {
|
|
157
|
+
if (col.typeOverride) return col.typeOverride
|
|
158
|
+
if (col.category === 'enum' && col.enumValues?.length) {
|
|
159
|
+
return col.nullable ? `${toPascalCase(col.pgType)} | null` : toPascalCase(col.pgType)
|
|
160
|
+
}
|
|
161
|
+
if (col.category === 'composite' && col.compositeFields?.length) {
|
|
162
|
+
const compositeType = toPascalCase(col.pgType)
|
|
163
|
+
return col.nullable ? `${compositeType} | null` : compositeType
|
|
164
|
+
}
|
|
165
|
+
return pgToTs(col.pgType, col.nullable, options, col.category)
|
|
166
|
+
}
|
|
167
|
+
},
|
|
168
|
+
})
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
FROM node:23-slim
|
|
2
|
+
WORKDIR /app
|
|
3
|
+
COPY . .
|
|
4
|
+
RUN npm init -y && npm pkg set type=module && npm install typescript@5 knex@3 pg @types/pg @types/node --save-dev
|
|
5
|
+
# Note: knex module augmentation (knex/types/tables) doesn't typecheck under nodenext.
|
|
6
|
+
# Skip tsc and focus on the runtime integration test.
|
|
7
|
+
CMD ["node", "--experimental-strip-types", "test.ts"]
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Integration test for @sqldoc/templates/knex
|
|
3
|
+
* Connects to real Postgres via Knex, verifies generated table types work.
|
|
4
|
+
*/
|
|
5
|
+
import knex from 'knex'
|
|
6
|
+
// Import to ensure the module augmentation is loaded
|
|
7
|
+
import type {} from './database.ts'
|
|
8
|
+
|
|
9
|
+
const DATABASE_URL = process.env.DATABASE_URL
|
|
10
|
+
if (!DATABASE_URL) {
|
|
11
|
+
console.error('DATABASE_URL not set')
|
|
12
|
+
process.exit(1)
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const db = knex({
|
|
16
|
+
client: 'pg',
|
|
17
|
+
connection: DATABASE_URL,
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
let failed = 0
|
|
21
|
+
function assert(condition: boolean, msg: string) {
|
|
22
|
+
if (!condition) {
|
|
23
|
+
console.error(`FAIL: ${msg}`)
|
|
24
|
+
failed++
|
|
25
|
+
} else {
|
|
26
|
+
console.log(` ok: ${msg}`)
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function run() {
|
|
31
|
+
try {
|
|
32
|
+
console.log('--- knex integration test ---')
|
|
33
|
+
|
|
34
|
+
// 1. Query known seeded user using Knex's type-safe table method
|
|
35
|
+
const users = await db('users').where({ id: 1 })
|
|
36
|
+
const user = users[0]
|
|
37
|
+
|
|
38
|
+
assert(user.email === 'test@example.com', 'user email matches')
|
|
39
|
+
assert(user.name === 'Test User', 'user name matches')
|
|
40
|
+
assert(user.is_active === true, 'user is_active matches')
|
|
41
|
+
|
|
42
|
+
// 2. Query known seeded post
|
|
43
|
+
const posts = await db('posts').where({ id: 1 })
|
|
44
|
+
assert(posts.length === 1, 'seeded post found')
|
|
45
|
+
assert(posts[0].title === 'Hello World', 'post title matches')
|
|
46
|
+
|
|
47
|
+
// 3. Insert a new post
|
|
48
|
+
await db('posts').insert({
|
|
49
|
+
user_id: 1,
|
|
50
|
+
title: 'Post from knex',
|
|
51
|
+
body: 'test body',
|
|
52
|
+
view_count: 0,
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
// 4. Read it back
|
|
56
|
+
const newPosts = await db('posts').where({ title: 'Post from knex' })
|
|
57
|
+
assert(newPosts.length === 1, 'inserted post found')
|
|
58
|
+
assert(newPosts[0].title === 'Post from knex', 'inserted post title matches')
|
|
59
|
+
// pg returns bigint columns as strings; use Number() for comparison
|
|
60
|
+
assert(Number(newPosts[0].user_id) === 1, 'inserted post user_id matches')
|
|
61
|
+
|
|
62
|
+
if (failed > 0) {
|
|
63
|
+
console.error(`\n${failed} assertion(s) failed`)
|
|
64
|
+
process.exit(1)
|
|
65
|
+
}
|
|
66
|
+
console.log('\nAll assertions passed!')
|
|
67
|
+
} finally {
|
|
68
|
+
await db.destroy()
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
run().catch((err) => {
|
|
73
|
+
console.error(err)
|
|
74
|
+
process.exit(1)
|
|
75
|
+
})
|