@sqldoc/templates 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/package.json +161 -0
  2. package/src/__tests__/dedent.test.ts +45 -0
  3. package/src/__tests__/docker-templates.test.ts +134 -0
  4. package/src/__tests__/go-structs.test.ts +184 -0
  5. package/src/__tests__/naming.test.ts +48 -0
  6. package/src/__tests__/python-dataclasses.test.ts +185 -0
  7. package/src/__tests__/rust-structs.test.ts +176 -0
  8. package/src/__tests__/tags-helpers.test.ts +72 -0
  9. package/src/__tests__/type-mapping.test.ts +332 -0
  10. package/src/__tests__/typescript.test.ts +202 -0
  11. package/src/cobol-copybook/index.ts +220 -0
  12. package/src/cobol-copybook/test/.gitignore +6 -0
  13. package/src/cobol-copybook/test/Dockerfile +7 -0
  14. package/src/csharp-records/index.ts +131 -0
  15. package/src/csharp-records/test/.gitignore +6 -0
  16. package/src/csharp-records/test/Dockerfile +6 -0
  17. package/src/diesel/index.ts +247 -0
  18. package/src/diesel/test/.gitignore +6 -0
  19. package/src/diesel/test/Dockerfile +16 -0
  20. package/src/drizzle/index.ts +255 -0
  21. package/src/drizzle/test/.gitignore +6 -0
  22. package/src/drizzle/test/Dockerfile +8 -0
  23. package/src/drizzle/test/test.ts +71 -0
  24. package/src/efcore/index.ts +190 -0
  25. package/src/efcore/test/.gitignore +6 -0
  26. package/src/efcore/test/Dockerfile +7 -0
  27. package/src/go-structs/index.ts +119 -0
  28. package/src/go-structs/test/.gitignore +6 -0
  29. package/src/go-structs/test/Dockerfile +13 -0
  30. package/src/go-structs/test/test.go +71 -0
  31. package/src/gorm/index.ts +134 -0
  32. package/src/gorm/test/.gitignore +6 -0
  33. package/src/gorm/test/Dockerfile +13 -0
  34. package/src/gorm/test/test.go +65 -0
  35. package/src/helpers/atlas.ts +43 -0
  36. package/src/helpers/enrich.ts +396 -0
  37. package/src/helpers/naming.ts +19 -0
  38. package/src/helpers/tags.ts +63 -0
  39. package/src/index.ts +24 -0
  40. package/src/java-records/index.ts +179 -0
  41. package/src/java-records/test/.gitignore +6 -0
  42. package/src/java-records/test/Dockerfile +11 -0
  43. package/src/java-records/test/Test.java +93 -0
  44. package/src/jpa/index.ts +279 -0
  45. package/src/jpa/test/.gitignore +6 -0
  46. package/src/jpa/test/Dockerfile +14 -0
  47. package/src/jpa/test/Test.java +111 -0
  48. package/src/json-schema/index.ts +351 -0
  49. package/src/json-schema/test/.gitignore +6 -0
  50. package/src/json-schema/test/Dockerfile +18 -0
  51. package/src/knex/index.ts +168 -0
  52. package/src/knex/test/.gitignore +6 -0
  53. package/src/knex/test/Dockerfile +7 -0
  54. package/src/knex/test/test.ts +75 -0
  55. package/src/kotlin-data/index.ts +147 -0
  56. package/src/kotlin-data/test/.gitignore +6 -0
  57. package/src/kotlin-data/test/Dockerfile +14 -0
  58. package/src/kotlin-data/test/Test.kt +82 -0
  59. package/src/kysely/index.ts +165 -0
  60. package/src/kysely/test/.gitignore +6 -0
  61. package/src/kysely/test/Dockerfile +8 -0
  62. package/src/kysely/test/test.ts +82 -0
  63. package/src/prisma/index.ts +387 -0
  64. package/src/prisma/test/.gitignore +6 -0
  65. package/src/prisma/test/Dockerfile +7 -0
  66. package/src/protobuf/index.ts +219 -0
  67. package/src/protobuf/test/.gitignore +6 -0
  68. package/src/protobuf/test/Dockerfile +6 -0
  69. package/src/pydantic/index.ts +272 -0
  70. package/src/pydantic/test/.gitignore +6 -0
  71. package/src/pydantic/test/Dockerfile +8 -0
  72. package/src/pydantic/test/test.py +63 -0
  73. package/src/python-dataclasses/index.ts +217 -0
  74. package/src/python-dataclasses/test/.gitignore +6 -0
  75. package/src/python-dataclasses/test/Dockerfile +8 -0
  76. package/src/python-dataclasses/test/test.py +63 -0
  77. package/src/rust-structs/index.ts +152 -0
  78. package/src/rust-structs/test/.gitignore +6 -0
  79. package/src/rust-structs/test/Dockerfile +22 -0
  80. package/src/rust-structs/test/test.rs +82 -0
  81. package/src/sqlalchemy/index.ts +258 -0
  82. package/src/sqlalchemy/test/.gitignore +6 -0
  83. package/src/sqlalchemy/test/Dockerfile +8 -0
  84. package/src/sqlalchemy/test/test.py +61 -0
  85. package/src/sqlc/index.ts +148 -0
  86. package/src/sqlc/test/.gitignore +6 -0
  87. package/src/sqlc/test/Dockerfile +13 -0
  88. package/src/sqlc/test/test.go +91 -0
  89. package/src/tags/dedent.ts +28 -0
  90. package/src/tags/index.ts +14 -0
  91. package/src/types/index.ts +8 -0
  92. package/src/types/pg-to-csharp.ts +136 -0
  93. package/src/types/pg-to-go.ts +120 -0
  94. package/src/types/pg-to-java.ts +141 -0
  95. package/src/types/pg-to-kotlin.ts +119 -0
  96. package/src/types/pg-to-python.ts +120 -0
  97. package/src/types/pg-to-rust.ts +121 -0
  98. package/src/types/pg-to-ts.ts +173 -0
  99. package/src/typescript/index.ts +168 -0
  100. package/src/typescript/test/.gitignore +6 -0
  101. package/src/typescript/test/Dockerfile +8 -0
  102. package/src/typescript/test/test.ts +89 -0
  103. package/src/xsd/index.ts +191 -0
  104. package/src/xsd/test/.gitignore +6 -0
  105. package/src/xsd/test/Dockerfile +6 -0
  106. package/src/zod/index.ts +289 -0
  107. package/src/zod/test/.gitignore +6 -0
  108. package/src/zod/test/Dockerfile +6 -0
@@ -0,0 +1,220 @@
1
+ import { defineTemplate } from '@sqldoc/ns-codegen'
2
+ import { activeTables, type EnrichedColumn, enrichRealm } from '../helpers/enrich.ts'
3
+
4
+ /**
5
+ * Map PG types to COBOL PIC clauses.
6
+ * COBOL uses PIC (Picture) clauses to define data formats:
7
+ * PIC X(n) = alphanumeric, n characters
8
+ * PIC 9(n) = numeric, n digits
9
+ * PIC S9(n) = signed numeric
10
+ * PIC 9(n)V9(m) = numeric with implied decimal
11
+ */
12
+ function pgToCoboPic(col: EnrichedColumn): string {
13
+ const pgType = col.pgType
14
+ .toLowerCase()
15
+ .replace(/\(\d+(?:,\s*\d+)?\)/, '')
16
+ .trim()
17
+
18
+ switch (pgType) {
19
+ // Integers
20
+ case 'smallint':
21
+ case 'int2':
22
+ return 'PIC S9(4) COMP.'
23
+ case 'integer':
24
+ case 'int':
25
+ case 'int4':
26
+ case 'serial':
27
+ case 'serial4':
28
+ return 'PIC S9(9) COMP.'
29
+ case 'bigint':
30
+ case 'int8':
31
+ case 'bigserial':
32
+ case 'serial8':
33
+ return 'PIC S9(18) COMP.'
34
+ case 'smallserial':
35
+ case 'serial2':
36
+ return 'PIC S9(4) COMP.'
37
+
38
+ // Floats -- use COMP-2 (double precision float)
39
+ case 'real':
40
+ case 'float4':
41
+ return 'COMP-1.'
42
+ case 'double precision':
43
+ case 'float8':
44
+ return 'COMP-2.'
45
+
46
+ // Decimal -- use implied decimal point
47
+ case 'numeric':
48
+ case 'decimal':
49
+ case 'money': {
50
+ const m = col.pgType.match(/\((\d+)(?:,\s*(\d+))?\)/)
51
+ const prec = m ? parseInt(m[1], 10) : 10
52
+ const scale = m?.[2] ? parseInt(m[2], 10) : 2
53
+ return `PIC S9(${prec - scale})V9(${scale}) COMP-3.`
54
+ }
55
+
56
+ // Boolean -- single character Y/N
57
+ case 'boolean':
58
+ case 'bool':
59
+ return "PIC X VALUE 'N'."
60
+
61
+ // Date/time
62
+ case 'date':
63
+ return 'PIC 9(8).' // YYYYMMDD
64
+ case 'time':
65
+ case 'time without time zone':
66
+ case 'timetz':
67
+ case 'time with time zone':
68
+ return 'PIC 9(6).' // HHMMSS
69
+ case 'timestamp':
70
+ case 'timestamp without time zone':
71
+ case 'timestamptz':
72
+ case 'timestamp with time zone':
73
+ return 'PIC X(26).' // ISO 8601 string
74
+ case 'interval':
75
+ return 'PIC X(30).'
76
+
77
+ // Text -- estimate from varchar length or default to 255
78
+ case 'varchar':
79
+ case 'character varying': {
80
+ const lenMatch = col.pgType.match(/\((\d+)\)/)
81
+ const len = lenMatch ? parseInt(lenMatch[1], 10) : 255
82
+ return `PIC X(${len}).`
83
+ }
84
+ case 'char':
85
+ case 'character': {
86
+ const lenMatch = col.pgType.match(/\((\d+)\)/)
87
+ const len = lenMatch ? parseInt(lenMatch[1], 10) : 1
88
+ return `PIC X(${len}).`
89
+ }
90
+ case 'text':
91
+ case 'citext':
92
+ case 'name':
93
+ return 'PIC X(255).'
94
+
95
+ // UUID -- 36 char string (8-4-4-4-12)
96
+ case 'uuid':
97
+ return 'PIC X(36).'
98
+
99
+ // Binary/JSON/other -- treat as large string
100
+ case 'bytea':
101
+ case 'json':
102
+ case 'jsonb':
103
+ case 'xml':
104
+ return 'PIC X(4096).'
105
+
106
+ default:
107
+ return 'PIC X(255).'
108
+ }
109
+ }
110
+
111
+ // COBOL reserved words that can't be used as data names
112
+ const COBOL_RESERVED = new Set([
113
+ 'ADDRESS',
114
+ 'ID',
115
+ 'CONTENT',
116
+ 'STATUS',
117
+ 'NAME',
118
+ 'TYPE',
119
+ 'VALUE',
120
+ 'SIZE',
121
+ 'COUNT',
122
+ 'DATE',
123
+ 'TIME',
124
+ 'DATA',
125
+ 'FILE',
126
+ 'KEY',
127
+ 'RECORD',
128
+ 'TABLE',
129
+ 'INDEX',
130
+ 'INPUT',
131
+ 'OUTPUT',
132
+ 'OPEN',
133
+ 'CLOSE',
134
+ 'READ',
135
+ 'WRITE',
136
+ 'DELETE',
137
+ ])
138
+
139
+ function toCobolName(name: string): string {
140
+ // COBOL names are uppercase, hyphens instead of underscores, max 30 chars
141
+ let cobol = name.toUpperCase().replace(/_/g, '-').substring(0, 30)
142
+
143
+ // Prefix reserved words with WS- (Working Storage)
144
+ if (COBOL_RESERVED.has(cobol)) {
145
+ cobol = `WS-${cobol}`
146
+ }
147
+
148
+ return cobol
149
+ }
150
+
151
+ export default defineTemplate({
152
+ name: 'COBOL Copybook',
153
+ description: 'Generate COBOL copybook record definitions from SQL schema',
154
+ language: 'cobol',
155
+
156
+ generate(ctx) {
157
+ const schema = enrichRealm(ctx)
158
+ const lines: string[] = [' *> Generated by @sqldoc/templates/cobol-copybook', ' *> DO NOT EDIT', '']
159
+
160
+ // Enums as 88-level conditions
161
+ for (const e of schema.enums) {
162
+ const recordName = `${toCobolName(e.name)}-ENUM`
163
+ lines.push(` 01 ${recordName} PIC X(50).`)
164
+ for (const v of e.values) {
165
+ const condName = `${toCobolName(e.name)}-${toCobolName(v)}`
166
+ lines.push(` 88 ${condName} VALUE '${v}'.`)
167
+ }
168
+ lines.push('')
169
+ }
170
+
171
+ for (const table of activeTables(schema)) {
172
+ const recordName = `${toCobolName(table.name)}-RECORD`
173
+ lines.push(` 01 ${recordName}.`)
174
+
175
+ for (const col of table.columns) {
176
+ const fieldName = toCobolName(col.name)
177
+ if (col.category === 'enum' && col.enumValues?.length) {
178
+ // Enum column: use PIC X with 88-level conditions
179
+ const maxLen = Math.max(...col.enumValues.map((v) => v.length), 20)
180
+ lines.push(` 05 ${fieldName} PIC X(${maxLen}).`)
181
+ for (const v of col.enumValues) {
182
+ const condName = `${fieldName}-${toCobolName(v)}`
183
+ lines.push(` 88 ${condName} VALUE '${v}'.`)
184
+ }
185
+ } else {
186
+ const pic = pgToCoboPic(col)
187
+ lines.push(` 05 ${fieldName} ${pic}`)
188
+ }
189
+ }
190
+
191
+ lines.push('')
192
+ }
193
+
194
+ // Views (read-only)
195
+ for (const view of schema.views.filter((v) => !v.skipped)) {
196
+ const recordName = `${toCobolName(view.name)}-VIEW`
197
+ lines.push(` *> Read-only (from view)`)
198
+ lines.push(` 01 ${recordName}.`)
199
+
200
+ for (const col of view.columns) {
201
+ const fieldName = toCobolName(col.name)
202
+ if (col.category === 'enum' && col.enumValues?.length) {
203
+ const maxLen = Math.max(...col.enumValues.map((v) => v.length), 20)
204
+ lines.push(` 05 ${fieldName} PIC X(${maxLen}).`)
205
+ for (const v of col.enumValues) {
206
+ const condName = `${fieldName}-${toCobolName(v)}`
207
+ lines.push(` 88 ${condName} VALUE '${v}'.`)
208
+ }
209
+ } else {
210
+ const pic = pgToCoboPic(col)
211
+ lines.push(` 05 ${fieldName} ${pic}`)
212
+ }
213
+ }
214
+
215
+ lines.push('')
216
+ }
217
+
218
+ return { files: [{ path: 'schema.cpy', content: lines.join('\n') }] }
219
+ },
220
+ })
@@ -0,0 +1,6 @@
1
+ # Generated by codegen — only Dockerfile and test scripts are tracked
2
+ *
3
+ !.gitignore
4
+ !Dockerfile
5
+ !test.*
6
+ !Test.*
@@ -0,0 +1,7 @@
1
+ FROM debian:bookworm-slim
2
+ RUN apt-get update && apt-get install -y --no-install-recommends gnucobol4 && rm -rf /var/lib/apt/lists/*
3
+ WORKDIR /app
4
+ COPY . .
5
+ RUN printf ' IDENTIFICATION DIVISION.\n PROGRAM-ID. TEST-CPY.\n DATA DIVISION.\n WORKING-STORAGE SECTION.\n COPY "schema.cpy".\n PROCEDURE DIVISION.\n STOP RUN.\n' > test.cob
6
+ RUN cobc -fsyntax-only test.cob
7
+ CMD ["echo", "ok"]
@@ -0,0 +1,131 @@
1
+ import { defineTemplate } from '@sqldoc/ns-codegen'
2
+ import { activeTables, enrichRealm } from '../helpers/enrich.ts'
3
+ import { toPascalCase } from '../helpers/naming.ts'
4
+ import { pgToCsharp } from '../types/pg-to-csharp.ts'
5
+
6
+ export default defineTemplate({
7
+ name: 'C# Records',
8
+ description: 'Generate C# record types from SQL schema',
9
+ language: 'csharp',
10
+
11
+ generate(ctx) {
12
+ const schema = enrichRealm(ctx)
13
+ const records: string[] = []
14
+
15
+ // Enums
16
+ for (const e of schema.enums) {
17
+ const enumName = toPascalCase(e.name)
18
+ const members = e.values.map((v) => ` ${toPascalCase(v)}`).join(',\n')
19
+ records.push(`public enum ${enumName}\n{\n${members}\n}`)
20
+ }
21
+
22
+ // Composite types as records
23
+ const composites = new Map<string, Array<{ name: string; type: string }>>()
24
+ for (const table of schema.tables) {
25
+ for (const col of table.columns) {
26
+ if (col.category === 'composite' && col.compositeFields?.length && !composites.has(col.pgType)) {
27
+ composites.set(col.pgType, col.compositeFields)
28
+ }
29
+ }
30
+ }
31
+ for (const [name, fields] of composites) {
32
+ const className = toPascalCase(name)
33
+ const csFields = fields.map((f) => {
34
+ const csType = pgToCsharp(f.type, false)
35
+ return ` ${csType} ${toPascalCase(f.name)}`
36
+ })
37
+ records.push(`public record ${className}(`)
38
+ records.push(csFields.join(',\n'))
39
+ records.push(');')
40
+ }
41
+
42
+ for (const table of activeTables(schema)) {
43
+ const params: string[] = []
44
+ for (const col of table.columns) {
45
+ let csType: string
46
+ if (col.typeOverride) {
47
+ csType = col.nullable ? `${col.typeOverride}?` : col.typeOverride
48
+ } else if (col.category === 'enum' && col.enumValues?.length) {
49
+ csType = col.nullable ? `${toPascalCase(col.pgType)}?` : toPascalCase(col.pgType)
50
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
51
+ const compositeType = toPascalCase(col.pgType)
52
+ csType = col.nullable ? `${compositeType}?` : compositeType
53
+ } else {
54
+ csType = pgToCsharp(col.pgType, col.nullable, col.category)
55
+ }
56
+
57
+ params.push(` ${csType} ${col.pascalName}`)
58
+ }
59
+
60
+ records.push(`public record ${table.pascalName}(`)
61
+ records.push(params.join(',\n'))
62
+ records.push(');')
63
+ }
64
+
65
+ // Views (read-only)
66
+ for (const view of schema.views.filter((v) => !v.skipped)) {
67
+ const params: string[] = []
68
+ for (const col of view.columns) {
69
+ let csType: string
70
+ if (col.typeOverride) {
71
+ csType = col.nullable ? `${col.typeOverride}?` : col.typeOverride
72
+ } else if (col.category === 'enum' && col.enumValues?.length) {
73
+ csType = col.nullable ? `${toPascalCase(col.pgType)}?` : toPascalCase(col.pgType)
74
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
75
+ const compositeType = toPascalCase(col.pgType)
76
+ csType = col.nullable ? `${compositeType}?` : compositeType
77
+ } else {
78
+ csType = pgToCsharp(col.pgType, col.nullable, col.category)
79
+ }
80
+
81
+ params.push(` ${csType} ${col.pascalName}`)
82
+ }
83
+
84
+ records.push(`/// <summary>Read-only (from view)</summary>`)
85
+ records.push(`public record ${view.pascalName}(`)
86
+ records.push(params.join(',\n'))
87
+ records.push(');')
88
+ }
89
+
90
+ // Functions (skip trigger functions)
91
+ for (const fn of schema.functions) {
92
+ const retRaw = fn.returnType?.type?.toLowerCase() ?? ''
93
+ if (retRaw === 'trigger') continue
94
+
95
+ let retType: string
96
+ if (retRaw.startsWith('setof ')) {
97
+ const tableName = retRaw.replace('setof ', '')
98
+ const table = schema.tables.find((t) => t.name === tableName)
99
+ retType = table ? `IEnumerable<${table.pascalName}>` : `IEnumerable<${toPascalCase(tableName)}>`
100
+ } else if (fn.returnType) {
101
+ retType = pgToCsharp(fn.returnType.type, false, fn.returnType.category)
102
+ } else {
103
+ retType = 'void'
104
+ }
105
+
106
+ const argParts = fn.args
107
+ .filter((a) => !a.name?.startsWith('_') && (a as any).mode !== 'OUT')
108
+ .map((a) => {
109
+ const csType = pgToCsharp(a.type, false, a.category)
110
+ const paramName = toPascalCase(a.name || 'arg').replace(/^./, (c) => c.toLowerCase()) // lowerCamelCase
111
+ return `${csType} ${paramName}`
112
+ })
113
+
114
+ records.push(`public delegate ${retType} ${fn.pascalName}(${argParts.join(', ')});`)
115
+ }
116
+
117
+ if (records.length === 0) {
118
+ return { files: [] }
119
+ }
120
+
121
+ const parts: string[] = []
122
+ parts.push('namespace Generated;')
123
+ parts.push('')
124
+ parts.push(records.join('\n\n'))
125
+ parts.push('')
126
+
127
+ return {
128
+ files: [{ path: 'Models.cs', content: parts.join('\n') }],
129
+ }
130
+ },
131
+ })
@@ -0,0 +1,6 @@
1
+ # Generated by codegen — only Dockerfile and test scripts are tracked
2
+ *
3
+ !.gitignore
4
+ !Dockerfile
5
+ !test.*
6
+ !Test.*
@@ -0,0 +1,6 @@
1
+ FROM mcr.microsoft.com/dotnet/sdk:9.0-alpine
2
+ WORKDIR /app
3
+ RUN dotnet new classlib -n TypeCheck --force && rm TypeCheck/Class1.cs
4
+ COPY *.cs TypeCheck/
5
+ RUN dotnet build TypeCheck/
6
+ CMD ["echo", "ok"]
@@ -0,0 +1,247 @@
1
+ import { defineTemplate } from '@sqldoc/ns-codegen'
2
+ import { activeTables, enrichRealm } from '../helpers/enrich.ts'
3
+ import { toPascalCase } from '../helpers/naming.ts'
4
+ import { pgToRust } from '../types/pg-to-rust.ts'
5
+
6
+ /**
7
+ * Mapping from PostgreSQL types to Diesel SQL types.
8
+ */
9
+ const PG_TO_DIESEL: Record<string, string> = {
10
+ smallint: 'SmallInt',
11
+ int2: 'SmallInt',
12
+ integer: 'Integer',
13
+ int: 'Integer',
14
+ int4: 'Integer',
15
+ bigint: 'BigInt',
16
+ int8: 'BigInt',
17
+ serial: 'Integer',
18
+ serial4: 'Integer',
19
+ bigserial: 'BigInt',
20
+ serial8: 'BigInt',
21
+ smallserial: 'SmallInt',
22
+ serial2: 'SmallInt',
23
+ real: 'Float',
24
+ float4: 'Float',
25
+ 'double precision': 'Double',
26
+ float8: 'Double',
27
+ numeric: 'Numeric',
28
+ decimal: 'Numeric',
29
+ text: 'Text',
30
+ varchar: 'Text',
31
+ 'character varying': 'Text',
32
+ char: 'Text',
33
+ character: 'Text',
34
+ name: 'Text',
35
+ citext: 'Text',
36
+ boolean: 'Bool',
37
+ bool: 'Bool',
38
+ timestamp: 'Timestamp',
39
+ 'timestamp without time zone': 'Timestamp',
40
+ timestamptz: 'Timestamptz',
41
+ 'timestamp with time zone': 'Timestamptz',
42
+ date: 'Date',
43
+ time: 'Time',
44
+ 'time without time zone': 'Time',
45
+ bytea: 'Bytea',
46
+ json: 'Jsonb',
47
+ jsonb: 'Jsonb',
48
+ uuid: 'Uuid',
49
+ inet: 'Inet',
50
+ money: 'Money',
51
+ }
52
+
53
+ /**
54
+ * Map a PostgreSQL column type to Diesel's SQL type name.
55
+ */
56
+ function pgToDieselType(pgType: string, nullable: boolean): string {
57
+ const normalized = pgType.toLowerCase().trim()
58
+
59
+ // Handle arrays
60
+ if (normalized.endsWith('[]') || normalized.startsWith('_')) {
61
+ const baseType = normalized.endsWith('[]') ? normalized.slice(0, -2) : normalized.slice(1)
62
+ const inner = pgToDieselType(baseType, false)
63
+ const arrayType = `Array<${inner}>`
64
+ return nullable ? `Nullable<${arrayType}>` : arrayType
65
+ }
66
+
67
+ // Strip length specifiers
68
+ const baseType = normalized.replace(/\(\d+(?:,\s*\d+)?\)/, '').trim()
69
+ const dieselType = PG_TO_DIESEL[baseType] ?? 'Text'
70
+
71
+ return nullable ? `Nullable<${dieselType}>` : dieselType
72
+ }
73
+
74
+ export default defineTemplate({
75
+ name: 'Diesel Schema',
76
+ description: 'Generate Diesel table! macros and Queryable structs from SQL schema',
77
+ language: 'rust',
78
+
79
+ generate(ctx) {
80
+ const schema = enrichRealm(ctx)
81
+ const tableMacros: string[] = []
82
+ const modelStructs: string[] = []
83
+ const allImports = new Set<string>()
84
+ allImports.add('serde::{Serialize, Deserialize}')
85
+
86
+ // Enums
87
+ const enumBlocks: string[] = []
88
+ for (const e of schema.enums) {
89
+ const enumName = toPascalCase(e.name)
90
+ const variants = e.values.map((v) => {
91
+ const variantName = toPascalCase(v)
92
+ return ` #[serde(rename = "${v}")]\n ${variantName},`
93
+ })
94
+ enumBlocks.push(
95
+ `#[derive(Debug, Clone, Serialize, Deserialize, diesel_derive_enum::DbEnum)]\npub enum ${enumName} {\n${variants.join('\n')}\n}`,
96
+ )
97
+ }
98
+
99
+ // Composite types as structs
100
+ const composites = new Map<string, Array<{ name: string; type: string }>>()
101
+ for (const table of schema.tables) {
102
+ for (const col of table.columns) {
103
+ if (col.category === 'composite' && col.compositeFields?.length && !composites.has(col.pgType)) {
104
+ composites.set(col.pgType, col.compositeFields)
105
+ }
106
+ }
107
+ }
108
+ const compositeBlocks: string[] = []
109
+ for (const [name, fields] of composites) {
110
+ const structName = toPascalCase(name)
111
+ const rustFields = fields.map((f) => {
112
+ const mapped = pgToRust(f.type, false)
113
+ for (const imp of mapped.imports) allImports.add(imp)
114
+ return ` pub ${f.name}: ${mapped.type},`
115
+ })
116
+ compositeBlocks.push(
117
+ `#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct ${structName} {\n${rustFields.join('\n')}\n}`,
118
+ )
119
+ }
120
+
121
+ for (const table of activeTables(schema)) {
122
+ // Determine PK column
123
+ const pkColumn = table.primaryKey[0] ?? 'id'
124
+
125
+ // Generate table! macro
126
+ const macroColumns: string[] = []
127
+ for (const col of table.columns) {
128
+ const dieselType =
129
+ col.category === 'enum'
130
+ ? 'Text'
131
+ : col.category === 'composite'
132
+ ? 'Text'
133
+ : pgToDieselType(col.pgType, col.nullable)
134
+ macroColumns.push(` ${col.name} -> ${dieselType},`)
135
+ }
136
+
137
+ tableMacros.push(`diesel::table! {`)
138
+ tableMacros.push(` ${table.name} (${pkColumn}) {`)
139
+ tableMacros.push(macroColumns.join('\n'))
140
+ tableMacros.push(` }`)
141
+ tableMacros.push(`}`)
142
+
143
+ // Generate Queryable struct
144
+ const fields: string[] = []
145
+ for (const col of table.columns) {
146
+ let rustType: string
147
+ if (col.typeOverride) {
148
+ rustType = col.nullable ? `Option<${col.typeOverride}>` : col.typeOverride
149
+ } else if (col.category === 'enum' && col.enumValues?.length) {
150
+ const enumType = toPascalCase(col.pgType)
151
+ rustType = col.nullable ? `Option<${enumType}>` : enumType
152
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
153
+ const compositeType = toPascalCase(col.pgType)
154
+ rustType = col.nullable ? `Option<${compositeType}>` : compositeType
155
+ } else {
156
+ const mapped = pgToRust(col.pgType, col.nullable, col.category)
157
+ rustType = mapped.type
158
+ for (const imp of mapped.imports) allImports.add(imp)
159
+ }
160
+
161
+ fields.push(` pub ${col.name}: ${rustType},`)
162
+ }
163
+
164
+ modelStructs.push('#[derive(Debug, Clone, Queryable, Selectable, Serialize, Deserialize)]')
165
+ modelStructs.push(`#[diesel(table_name = ${table.name})]`)
166
+ modelStructs.push(`pub struct ${table.pascalName} {`)
167
+ modelStructs.push(fields.join('\n'))
168
+ modelStructs.push('}')
169
+ }
170
+
171
+ // Views (read-only) — generate both view schema and Queryable struct
172
+ for (const view of schema.views.filter((v) => !v.skipped)) {
173
+ // Generate table! macro for the view (Diesel uses table! for views too)
174
+ const macroColumns: string[] = []
175
+ const firstCol = view.columns[0]?.name ?? 'id'
176
+ for (const col of view.columns) {
177
+ const dieselType =
178
+ col.category === 'enum'
179
+ ? 'Text'
180
+ : col.category === 'composite'
181
+ ? 'Text'
182
+ : pgToDieselType(col.pgType, col.nullable)
183
+ macroColumns.push(` ${col.name} -> ${dieselType},`)
184
+ }
185
+
186
+ tableMacros.push(`diesel::table! {`)
187
+ tableMacros.push(` ${view.name} (${firstCol}) {`)
188
+ tableMacros.push(macroColumns.join('\n'))
189
+ tableMacros.push(` }`)
190
+ tableMacros.push(`}`)
191
+
192
+ const fields: string[] = []
193
+ for (const col of view.columns) {
194
+ let rustType: string
195
+ if (col.typeOverride) {
196
+ rustType = col.nullable ? `Option<${col.typeOverride}>` : col.typeOverride
197
+ } else if (col.category === 'enum' && col.enumValues?.length) {
198
+ const enumType = toPascalCase(col.pgType)
199
+ rustType = col.nullable ? `Option<${enumType}>` : enumType
200
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
201
+ const compositeType = toPascalCase(col.pgType)
202
+ rustType = col.nullable ? `Option<${compositeType}>` : compositeType
203
+ } else {
204
+ const mapped = pgToRust(col.pgType, col.nullable, col.category)
205
+ rustType = mapped.type
206
+ for (const imp of mapped.imports) allImports.add(imp)
207
+ }
208
+
209
+ fields.push(` pub ${col.name}: ${rustType},`)
210
+ }
211
+
212
+ modelStructs.push(`/// Read-only (from view)`)
213
+ modelStructs.push('#[derive(Debug, Clone, Queryable, Serialize, Deserialize)]')
214
+ modelStructs.push(`pub struct ${view.pascalName} {`)
215
+ modelStructs.push(fields.join('\n'))
216
+ modelStructs.push('}')
217
+ }
218
+
219
+ if (tableMacros.length === 0 && enumBlocks.length === 0 && compositeBlocks.length === 0) {
220
+ return { files: [] }
221
+ }
222
+
223
+ // schema.rs
224
+ const schemaContent = `${tableMacros.join('\n\n')}\n`
225
+
226
+ // models.rs
227
+ const sortedImports = [...allImports].sort()
228
+ const useLines = sortedImports.map((imp) => `use ${imp};`)
229
+ const modelsContent = [
230
+ 'use diesel::prelude::*;',
231
+ 'use crate::schema::*;',
232
+ ...useLines,
233
+ '',
234
+ ...(enumBlocks.length > 0 ? [...enumBlocks, ''] : []),
235
+ ...(compositeBlocks.length > 0 ? [...compositeBlocks, ''] : []),
236
+ modelStructs.join('\n\n'),
237
+ '',
238
+ ].join('\n')
239
+
240
+ return {
241
+ files: [
242
+ { path: 'schema.rs', content: schemaContent },
243
+ { path: 'models.rs', content: modelsContent },
244
+ ],
245
+ }
246
+ },
247
+ })
@@ -0,0 +1,6 @@
1
+ # Generated by codegen — only Dockerfile and test scripts are tracked
2
+ *
3
+ !.gitignore
4
+ !Dockerfile
5
+ !test.*
6
+ !Test.*
@@ -0,0 +1,16 @@
1
+ FROM rust:1.85-slim
2
+ WORKDIR /app
3
+ RUN cargo init --name typecheck .
4
+ RUN cat >> Cargo.toml <<'TOML'
5
+ diesel = { version = "2", features = ["postgres"] }
6
+ serde = { version = "1", features = ["derive"] }
7
+ serde_json = "1"
8
+ chrono = { version = "0.4", features = ["serde"] }
9
+ uuid = { version = "1", features = ["serde"] }
10
+ bigdecimal = { version = "0.4", features = ["serde"] }
11
+ TOML
12
+ RUN rm src/main.rs
13
+ COPY . src/
14
+ RUN echo 'mod schema;' > src/lib.rs && echo 'mod models;' >> src/lib.rs
15
+ RUN cargo check
16
+ CMD ["echo", "ok"]