@sqldoc/templates 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/package.json +161 -0
  2. package/src/__tests__/dedent.test.ts +45 -0
  3. package/src/__tests__/docker-templates.test.ts +134 -0
  4. package/src/__tests__/go-structs.test.ts +184 -0
  5. package/src/__tests__/naming.test.ts +48 -0
  6. package/src/__tests__/python-dataclasses.test.ts +185 -0
  7. package/src/__tests__/rust-structs.test.ts +176 -0
  8. package/src/__tests__/tags-helpers.test.ts +72 -0
  9. package/src/__tests__/type-mapping.test.ts +332 -0
  10. package/src/__tests__/typescript.test.ts +202 -0
  11. package/src/cobol-copybook/index.ts +220 -0
  12. package/src/cobol-copybook/test/.gitignore +6 -0
  13. package/src/cobol-copybook/test/Dockerfile +7 -0
  14. package/src/csharp-records/index.ts +131 -0
  15. package/src/csharp-records/test/.gitignore +6 -0
  16. package/src/csharp-records/test/Dockerfile +6 -0
  17. package/src/diesel/index.ts +247 -0
  18. package/src/diesel/test/.gitignore +6 -0
  19. package/src/diesel/test/Dockerfile +16 -0
  20. package/src/drizzle/index.ts +255 -0
  21. package/src/drizzle/test/.gitignore +6 -0
  22. package/src/drizzle/test/Dockerfile +8 -0
  23. package/src/drizzle/test/test.ts +71 -0
  24. package/src/efcore/index.ts +190 -0
  25. package/src/efcore/test/.gitignore +6 -0
  26. package/src/efcore/test/Dockerfile +7 -0
  27. package/src/go-structs/index.ts +119 -0
  28. package/src/go-structs/test/.gitignore +6 -0
  29. package/src/go-structs/test/Dockerfile +13 -0
  30. package/src/go-structs/test/test.go +71 -0
  31. package/src/gorm/index.ts +134 -0
  32. package/src/gorm/test/.gitignore +6 -0
  33. package/src/gorm/test/Dockerfile +13 -0
  34. package/src/gorm/test/test.go +65 -0
  35. package/src/helpers/atlas.ts +43 -0
  36. package/src/helpers/enrich.ts +396 -0
  37. package/src/helpers/naming.ts +19 -0
  38. package/src/helpers/tags.ts +63 -0
  39. package/src/index.ts +24 -0
  40. package/src/java-records/index.ts +179 -0
  41. package/src/java-records/test/.gitignore +6 -0
  42. package/src/java-records/test/Dockerfile +11 -0
  43. package/src/java-records/test/Test.java +93 -0
  44. package/src/jpa/index.ts +279 -0
  45. package/src/jpa/test/.gitignore +6 -0
  46. package/src/jpa/test/Dockerfile +14 -0
  47. package/src/jpa/test/Test.java +111 -0
  48. package/src/json-schema/index.ts +351 -0
  49. package/src/json-schema/test/.gitignore +6 -0
  50. package/src/json-schema/test/Dockerfile +18 -0
  51. package/src/knex/index.ts +168 -0
  52. package/src/knex/test/.gitignore +6 -0
  53. package/src/knex/test/Dockerfile +7 -0
  54. package/src/knex/test/test.ts +75 -0
  55. package/src/kotlin-data/index.ts +147 -0
  56. package/src/kotlin-data/test/.gitignore +6 -0
  57. package/src/kotlin-data/test/Dockerfile +14 -0
  58. package/src/kotlin-data/test/Test.kt +82 -0
  59. package/src/kysely/index.ts +165 -0
  60. package/src/kysely/test/.gitignore +6 -0
  61. package/src/kysely/test/Dockerfile +8 -0
  62. package/src/kysely/test/test.ts +82 -0
  63. package/src/prisma/index.ts +387 -0
  64. package/src/prisma/test/.gitignore +6 -0
  65. package/src/prisma/test/Dockerfile +7 -0
  66. package/src/protobuf/index.ts +219 -0
  67. package/src/protobuf/test/.gitignore +6 -0
  68. package/src/protobuf/test/Dockerfile +6 -0
  69. package/src/pydantic/index.ts +272 -0
  70. package/src/pydantic/test/.gitignore +6 -0
  71. package/src/pydantic/test/Dockerfile +8 -0
  72. package/src/pydantic/test/test.py +63 -0
  73. package/src/python-dataclasses/index.ts +217 -0
  74. package/src/python-dataclasses/test/.gitignore +6 -0
  75. package/src/python-dataclasses/test/Dockerfile +8 -0
  76. package/src/python-dataclasses/test/test.py +63 -0
  77. package/src/rust-structs/index.ts +152 -0
  78. package/src/rust-structs/test/.gitignore +6 -0
  79. package/src/rust-structs/test/Dockerfile +22 -0
  80. package/src/rust-structs/test/test.rs +82 -0
  81. package/src/sqlalchemy/index.ts +258 -0
  82. package/src/sqlalchemy/test/.gitignore +6 -0
  83. package/src/sqlalchemy/test/Dockerfile +8 -0
  84. package/src/sqlalchemy/test/test.py +61 -0
  85. package/src/sqlc/index.ts +148 -0
  86. package/src/sqlc/test/.gitignore +6 -0
  87. package/src/sqlc/test/Dockerfile +13 -0
  88. package/src/sqlc/test/test.go +91 -0
  89. package/src/tags/dedent.ts +28 -0
  90. package/src/tags/index.ts +14 -0
  91. package/src/types/index.ts +8 -0
  92. package/src/types/pg-to-csharp.ts +136 -0
  93. package/src/types/pg-to-go.ts +120 -0
  94. package/src/types/pg-to-java.ts +141 -0
  95. package/src/types/pg-to-kotlin.ts +119 -0
  96. package/src/types/pg-to-python.ts +120 -0
  97. package/src/types/pg-to-rust.ts +121 -0
  98. package/src/types/pg-to-ts.ts +173 -0
  99. package/src/typescript/index.ts +168 -0
  100. package/src/typescript/test/.gitignore +6 -0
  101. package/src/typescript/test/Dockerfile +8 -0
  102. package/src/typescript/test/test.ts +89 -0
  103. package/src/xsd/index.ts +191 -0
  104. package/src/xsd/test/.gitignore +6 -0
  105. package/src/xsd/test/Dockerfile +6 -0
  106. package/src/zod/index.ts +289 -0
  107. package/src/zod/test/.gitignore +6 -0
  108. package/src/zod/test/Dockerfile +6 -0
@@ -0,0 +1,272 @@
1
+ import { defineTemplate } from '@sqldoc/ns-codegen'
2
+ import { activeTables, enrichRealm, type TagEntry } from '../helpers/enrich.ts'
3
+ import { toPascalCase, toScreamingSnake } from '../helpers/naming.ts'
4
+ import { pgToPython } from '../types/pg-to-python.ts'
5
+
6
+ /** Python imports needed for specific types */
7
+ const TYPE_IMPORTS: Record<string, string> = {
8
+ datetime: 'from datetime import datetime',
9
+ date: 'from datetime import date',
10
+ time: 'from datetime import time',
11
+ timedelta: 'from datetime import timedelta',
12
+ Decimal: 'from decimal import Decimal',
13
+ UUID: 'from uuid import UUID',
14
+ Any: 'from typing import Any',
15
+ }
16
+
17
+ export default defineTemplate({
18
+ name: 'Pydantic Models',
19
+ description: 'Generate Pydantic BaseModel classes with Field validators from SQL schema',
20
+ language: 'python',
21
+
22
+ generate(ctx) {
23
+ const schema = enrichRealm(ctx)
24
+ const allImports = new Set<string>()
25
+ allImports.add('from pydantic import BaseModel, Field')
26
+ const needsOptional = new Set<boolean>()
27
+ const needsEnum = schema.enums.length > 0
28
+ const needsCallable = { value: false }
29
+ const classBlocks: string[] = []
30
+
31
+ // Enums
32
+ for (const e of schema.enums) {
33
+ const className = toPascalCase(e.name)
34
+ const members = e.values.map((v) => ` ${toScreamingSnake(v)} = "${v}"`).join('\n')
35
+ classBlocks.push(`class ${className}(str, Enum):\n${members}`)
36
+ }
37
+
38
+ // Composite types (collected from columns, rendered as BaseModel)
39
+ const composites = new Map<string, Array<{ name: string; type: string }>>()
40
+ for (const table of schema.tables) {
41
+ for (const col of table.columns) {
42
+ if (col.category === 'composite' && col.compositeFields?.length && !composites.has(col.pgType)) {
43
+ composites.set(col.pgType, col.compositeFields)
44
+ }
45
+ }
46
+ }
47
+ for (const [name, fields] of composites) {
48
+ const typeName = toPascalCase(name)
49
+ const fieldLines: string[] = []
50
+ for (const f of fields) {
51
+ const pyType = pgToPython(f.type, false)
52
+ collectImports(pyType, allImports)
53
+ fieldLines.push(` ${f.name}: ${pyType}`)
54
+ }
55
+ classBlocks.push(`class ${typeName}(BaseModel):\n${fieldLines.join('\n')}`)
56
+ }
57
+
58
+ for (const table of activeTables(schema)) {
59
+ const requiredFields: string[] = []
60
+ const optionalFields: string[] = []
61
+
62
+ for (const col of table.columns) {
63
+ let pyType: string
64
+
65
+ if (col.typeOverride) {
66
+ pyType = col.typeOverride
67
+ } else if (col.category === 'enum' && col.enumValues?.length) {
68
+ const enumName = toPascalCase(col.pgType)
69
+ pyType = col.nullable ? `Optional[${enumName}]` : enumName
70
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
71
+ const compositeType = toPascalCase(col.pgType)
72
+ pyType = col.nullable ? `Optional[${compositeType}]` : compositeType
73
+ } else {
74
+ pyType = pgToPython(col.pgType, col.nullable, col.category)
75
+ }
76
+
77
+ collectImports(pyType, allImports)
78
+
79
+ // Build Field() arguments from @validate tags
80
+ const fieldArgs = buildFieldArgs(col.tags)
81
+
82
+ if (col.nullable) {
83
+ needsOptional.add(true)
84
+ if (fieldArgs) {
85
+ optionalFields.push(` ${col.name}: ${pyType} = Field(default=None, ${fieldArgs})`)
86
+ } else {
87
+ optionalFields.push(` ${col.name}: ${pyType} = None`)
88
+ }
89
+ } else {
90
+ if (fieldArgs) {
91
+ requiredFields.push(` ${col.name}: ${pyType} = Field(${fieldArgs})`)
92
+ } else {
93
+ requiredFields.push(` ${col.name}: ${pyType}`)
94
+ }
95
+ }
96
+ }
97
+
98
+ const fields = [...requiredFields, ...optionalFields]
99
+ classBlocks.push(`class ${table.pascalName}(BaseModel):\n${fields.join('\n')}`)
100
+ }
101
+
102
+ // Views (read-only, frozen BaseModel)
103
+ for (const view of schema.views.filter((v) => !v.skipped)) {
104
+ const requiredFields: string[] = []
105
+ const optionalFields: string[] = []
106
+
107
+ for (const col of view.columns) {
108
+ let pyType: string
109
+
110
+ if (col.typeOverride) {
111
+ pyType = col.typeOverride
112
+ } else if (col.category === 'enum' && col.enumValues?.length) {
113
+ const enumName = toPascalCase(col.pgType)
114
+ pyType = col.nullable ? `Optional[${enumName}]` : enumName
115
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
116
+ const compositeType = toPascalCase(col.pgType)
117
+ pyType = col.nullable ? `Optional[${compositeType}]` : compositeType
118
+ } else {
119
+ pyType = pgToPython(col.pgType, col.nullable, col.category)
120
+ }
121
+
122
+ collectImports(pyType, allImports)
123
+
124
+ const fieldArgs = buildFieldArgs(col.tags)
125
+
126
+ if (col.nullable) {
127
+ needsOptional.add(true)
128
+ if (fieldArgs) {
129
+ optionalFields.push(` ${col.name}: ${pyType} = Field(default=None, ${fieldArgs})`)
130
+ } else {
131
+ optionalFields.push(` ${col.name}: ${pyType} = None`)
132
+ }
133
+ } else {
134
+ if (fieldArgs) {
135
+ requiredFields.push(` ${col.name}: ${pyType} = Field(${fieldArgs})`)
136
+ } else {
137
+ requiredFields.push(` ${col.name}: ${pyType}`)
138
+ }
139
+ }
140
+ }
141
+
142
+ const fields = [...requiredFields, ...optionalFields]
143
+ const configLine = `\n\n class Config:\n frozen = True`
144
+ classBlocks.push(
145
+ `class ${view.pascalName}(BaseModel):\n """Read-only (from view)"""\n${fields.join('\n')}${configLine}`,
146
+ )
147
+ }
148
+
149
+ // Functions (skip trigger functions)
150
+ const funcLines: string[] = []
151
+ for (const fn of schema.functions) {
152
+ const retRaw = fn.returnType?.type?.toLowerCase() ?? ''
153
+ if (retRaw === 'trigger') continue
154
+
155
+ needsCallable.value = true
156
+
157
+ const params = fn.args
158
+ .filter((a) => !a.name?.startsWith('_') && (a as any).mode !== 'OUT')
159
+ .map((a) => {
160
+ const argType = pgToPython(a.type, false, a.category)
161
+ collectImports(argType, allImports)
162
+ return argType
163
+ })
164
+
165
+ let retType: string
166
+ if (retRaw.startsWith('setof ')) {
167
+ const tableName = retRaw.replace('setof ', '')
168
+ const table = schema.tables.find((t) => t.name === tableName)
169
+ retType = table ? `list[${table.pascalName}]` : `list[${pgToPython(tableName, false)}]`
170
+ } else if (fn.returnType) {
171
+ retType = pgToPython(fn.returnType.type, false, fn.returnType.category)
172
+ } else {
173
+ retType = 'None'
174
+ }
175
+ collectImports(retType, allImports)
176
+
177
+ funcLines.push(`${fn.pascalName}: Callable[[${params.join(', ')}], ${retType}]`)
178
+ }
179
+
180
+ if (needsOptional.has(true)) {
181
+ allImports.add('from typing import Optional')
182
+ }
183
+ if (needsEnum) {
184
+ allImports.add('from enum import Enum')
185
+ }
186
+ if (needsCallable.value) {
187
+ allImports.add('from typing import Callable')
188
+ }
189
+
190
+ const sortedImports = sortPythonImports([...allImports])
191
+
192
+ const allBlocks = [...classBlocks]
193
+ if (funcLines.length > 0) {
194
+ allBlocks.push(funcLines.join('\n'))
195
+ }
196
+
197
+ const content = `# Generated by @sqldoc/templates/pydantic -- DO NOT EDIT
198
+
199
+ ${sortedImports.join('\n')}
200
+
201
+
202
+ ${allBlocks.join('\n\n\n')}
203
+ `
204
+
205
+ return {
206
+ files: [{ path: 'models.py', content }],
207
+ }
208
+ },
209
+ })
210
+
211
+ function buildFieldArgs(tags: TagEntry[]): string | undefined {
212
+ const validateTags = tags.filter((t) => t.namespace === 'validate')
213
+ if (validateTags.length === 0) return undefined
214
+
215
+ const parts: string[] = []
216
+
217
+ for (const tag of validateTags) {
218
+ switch (tag.tag) {
219
+ case 'notEmpty':
220
+ parts.push('min_length=1')
221
+ break
222
+ case 'length': {
223
+ const args = tag.args as Record<string, unknown>
224
+ if (args && typeof args === 'object' && !Array.isArray(args)) {
225
+ if ('min' in args) parts.push(`min_length=${args.min}`)
226
+ if ('max' in args) parts.push(`max_length=${args.max}`)
227
+ }
228
+ break
229
+ }
230
+ case 'range': {
231
+ const args = tag.args as Record<string, unknown>
232
+ if (args && typeof args === 'object' && !Array.isArray(args)) {
233
+ if ('min' in args) parts.push(`ge=${args.min}`)
234
+ if ('max' in args) parts.push(`le=${args.max}`)
235
+ }
236
+ break
237
+ }
238
+ case 'pattern': {
239
+ const args = tag.args
240
+ if (Array.isArray(args) && args.length > 0) {
241
+ parts.push(`pattern='${args[0]}'`)
242
+ }
243
+ break
244
+ }
245
+ }
246
+ }
247
+
248
+ return parts.length > 0 ? parts.join(', ') : undefined
249
+ }
250
+
251
+ function collectImports(pyType: string, imports: Set<string>): void {
252
+ const match = pyType.match(/^Optional\[(.+)\]$/)
253
+ const baseType = match ? match[1] : pyType
254
+ const listMatch = baseType.match(/^list\[(.+)\]$/)
255
+ const innerType = listMatch ? listMatch[1] : baseType
256
+
257
+ if (TYPE_IMPORTS[innerType]) {
258
+ imports.add(TYPE_IMPORTS[innerType])
259
+ }
260
+ }
261
+
262
+ function sortPythonImports(imports: string[]): string[] {
263
+ return imports.sort((a, b) => {
264
+ if (a.includes('pydantic')) return -1
265
+ if (b.includes('pydantic')) return 1
266
+ if (a.includes('typing')) return -1
267
+ if (b.includes('typing')) return 1
268
+ if (a.includes('enum')) return -1
269
+ if (b.includes('enum')) return 1
270
+ return a.localeCompare(b)
271
+ })
272
+ }
@@ -0,0 +1,6 @@
1
+ # Generated by codegen — only Dockerfile and test scripts are tracked
2
+ *
3
+ !.gitignore
4
+ !Dockerfile
5
+ !test.*
6
+ !Test.*
@@ -0,0 +1,8 @@
1
+ FROM python:3.13-slim
2
+ WORKDIR /app
3
+ COPY . .
4
+ RUN pip install pydantic psycopg2-binary
5
+ # Step 1: syntax check the generated models
6
+ RUN python -m py_compile models.py
7
+ # Step 2: run integration test against real DB
8
+ CMD ["python", "test.py"]
@@ -0,0 +1,63 @@
1
+ """
2
+ Integration test for @sqldoc/templates/pydantic
3
+ Connects to real Postgres, verifies generated Pydantic models work with actual data.
4
+ """
5
+ import os
6
+ import sys
7
+ import psycopg2
8
+
9
+ from models import Users, Posts
10
+
11
+ DATABASE_URL = os.environ.get("DATABASE_URL")
12
+ if not DATABASE_URL:
13
+ print("DATABASE_URL not set", file=sys.stderr)
14
+ sys.exit(1)
15
+
16
+ failed = 0
17
+
18
+
19
+ def assert_eq(actual, expected, msg):
20
+ global failed
21
+ if actual != expected:
22
+ print(f"FAIL: {msg} (got {actual!r}, expected {expected!r})", file=sys.stderr)
23
+ failed += 1
24
+ else:
25
+ print(f" ok: {msg}")
26
+
27
+
28
+ def main():
29
+ global failed
30
+ conn = psycopg2.connect(DATABASE_URL)
31
+ conn.autocommit = True
32
+ cur = conn.cursor()
33
+
34
+ print("--- pydantic integration test ---")
35
+
36
+ # 1. Query user and construct Pydantic model
37
+ cur.execute("SELECT id, email, name, age, is_active, created_at FROM users WHERE id = 1")
38
+ row = cur.fetchone()
39
+ user = Users(id=row[0], email=row[1], name=row[2], age=row[3], is_active=row[4], created_at=row[5])
40
+ assert_eq(user.email, "test@example.com", "user.email matches")
41
+ assert_eq(user.name, "Test User", "user.name matches")
42
+ assert_eq(user.age, 30, "user.age matches")
43
+ assert_eq(user.is_active, True, "user.is_active matches")
44
+
45
+ # 2. Query post and construct Pydantic model
46
+ cur.execute("SELECT id, user_id, title, body, view_count, rating FROM posts WHERE id = 1")
47
+ row = cur.fetchone()
48
+ post = Posts(id=row[0], user_id=row[1], title=row[2], body=row[3], view_count=row[4], rating=row[5])
49
+ assert_eq(post.title, "Hello World", "post.title matches")
50
+ assert_eq(post.user_id, 1, "post.user_id matches")
51
+ assert_eq(post.view_count, 42, "post.view_count matches")
52
+
53
+ cur.close()
54
+ conn.close()
55
+
56
+ if failed > 0:
57
+ print(f"\n{failed} assertion(s) failed", file=sys.stderr)
58
+ sys.exit(1)
59
+ print("\nAll assertions passed!")
60
+
61
+
62
+ if __name__ == "__main__":
63
+ main()
@@ -0,0 +1,217 @@
1
+ import { defineTemplate } from '@sqldoc/ns-codegen'
2
+ import { activeTables, enrichRealm } from '../helpers/enrich.ts'
3
+ import { toPascalCase, toScreamingSnake } from '../helpers/naming.ts'
4
+ import { pgToPython } from '../types/pg-to-python.ts'
5
+
6
+ /** Python imports needed for specific types */
7
+ const TYPE_IMPORTS: Record<string, string> = {
8
+ datetime: 'from datetime import datetime',
9
+ date: 'from datetime import date',
10
+ time: 'from datetime import time',
11
+ timedelta: 'from datetime import timedelta',
12
+ Decimal: 'from decimal import Decimal',
13
+ UUID: 'from uuid import UUID',
14
+ Any: 'from typing import Any',
15
+ }
16
+
17
+ export default defineTemplate({
18
+ name: 'Python Dataclasses',
19
+ description: 'Generate Python @dataclass classes from SQL schema',
20
+ language: 'python',
21
+
22
+ generate(ctx) {
23
+ const schema = enrichRealm(ctx)
24
+ const allImports = new Set<string>()
25
+ allImports.add('from dataclasses import dataclass')
26
+ const needsOptional = new Set<boolean>()
27
+ const needsEnum = schema.enums.length > 0
28
+ const needsCallable = { value: false }
29
+ const classBlocks: string[] = []
30
+
31
+ // Enums
32
+ for (const e of schema.enums) {
33
+ const className = toPascalCase(e.name)
34
+ const members = e.values.map((v) => ` ${toScreamingSnake(v)} = "${v}"`).join('\n')
35
+ classBlocks.push(`class ${className}(str, Enum):\n${members}`)
36
+ }
37
+
38
+ // Composite types (collected from columns)
39
+ const composites = new Map<string, Array<{ name: string; type: string }>>()
40
+ for (const table of schema.tables) {
41
+ for (const col of table.columns) {
42
+ if (col.category === 'composite' && col.compositeFields?.length && !composites.has(col.pgType)) {
43
+ composites.set(col.pgType, col.compositeFields)
44
+ }
45
+ }
46
+ }
47
+ for (const [name, fields] of composites) {
48
+ const typeName = toPascalCase(name)
49
+ const fieldLines: string[] = []
50
+ for (const f of fields) {
51
+ const pyType = pgToPython(f.type, false)
52
+ collectImports(pyType, allImports, needsOptional)
53
+ fieldLines.push(` ${f.name}: ${pyType}`)
54
+ }
55
+ classBlocks.push(`@dataclass\nclass ${typeName}:\n${fieldLines.join('\n')}`)
56
+ }
57
+
58
+ for (const table of activeTables(schema)) {
59
+ const requiredFields: string[] = []
60
+ const optionalFields: string[] = []
61
+
62
+ for (const col of table.columns) {
63
+ let pyType: string
64
+
65
+ if (col.typeOverride) {
66
+ pyType = col.typeOverride
67
+ } else if (col.category === 'enum' && col.enumValues?.length) {
68
+ const enumName = toPascalCase(col.pgType)
69
+ pyType = col.nullable ? `Optional[${enumName}]` : enumName
70
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
71
+ const compositeType = toPascalCase(col.pgType)
72
+ pyType = col.nullable ? `Optional[${compositeType}]` : compositeType
73
+ } else {
74
+ pyType = pgToPython(col.pgType, col.nullable, col.category)
75
+ }
76
+
77
+ // Collect imports for types used
78
+ collectImports(pyType, allImports, needsOptional)
79
+
80
+ if (col.nullable) {
81
+ needsOptional.add(true)
82
+ optionalFields.push(` ${col.name}: ${pyType} = None`)
83
+ } else {
84
+ requiredFields.push(` ${col.name}: ${pyType}`)
85
+ }
86
+ }
87
+
88
+ // Python requires non-default args before default args
89
+ const fields = [...requiredFields, ...optionalFields]
90
+ classBlocks.push(`@dataclass\nclass ${table.pascalName}:\n${fields.join('\n')}`)
91
+ }
92
+
93
+ // Views (read-only, frozen dataclasses)
94
+ for (const view of schema.views.filter((v) => !v.skipped)) {
95
+ const requiredFields: string[] = []
96
+ const optionalFields: string[] = []
97
+
98
+ for (const col of view.columns) {
99
+ let pyType: string
100
+
101
+ if (col.typeOverride) {
102
+ pyType = col.typeOverride
103
+ } else if (col.category === 'enum' && col.enumValues?.length) {
104
+ const enumName = toPascalCase(col.pgType)
105
+ pyType = col.nullable ? `Optional[${enumName}]` : enumName
106
+ } else if (col.category === 'composite' && col.compositeFields?.length) {
107
+ const compositeType = toPascalCase(col.pgType)
108
+ pyType = col.nullable ? `Optional[${compositeType}]` : compositeType
109
+ } else {
110
+ pyType = pgToPython(col.pgType, col.nullable, col.category)
111
+ }
112
+
113
+ collectImports(pyType, allImports, needsOptional)
114
+
115
+ if (col.nullable) {
116
+ needsOptional.add(true)
117
+ optionalFields.push(` ${col.name}: ${pyType} = None`)
118
+ } else {
119
+ requiredFields.push(` ${col.name}: ${pyType}`)
120
+ }
121
+ }
122
+
123
+ const fields = [...requiredFields, ...optionalFields]
124
+ classBlocks.push(
125
+ `@dataclass(frozen=True)\nclass ${view.pascalName}:\n """Read-only (from view)"""\n${fields.join('\n')}`,
126
+ )
127
+ }
128
+
129
+ // Functions (skip trigger functions)
130
+ const funcLines: string[] = []
131
+ for (const fn of schema.functions) {
132
+ const retRaw = fn.returnType?.type?.toLowerCase() ?? ''
133
+ if (retRaw === 'trigger') continue
134
+
135
+ needsCallable.value = true
136
+
137
+ const params = fn.args
138
+ .filter((a) => !a.name?.startsWith('_') && (a as any).mode !== 'OUT')
139
+ .map((a) => {
140
+ const argType = pgToPython(a.type, false, a.category)
141
+ collectImports(argType, allImports, needsOptional)
142
+ return argType
143
+ })
144
+
145
+ let retType: string
146
+ if (retRaw.startsWith('setof ')) {
147
+ const tableName = retRaw.replace('setof ', '')
148
+ const table = schema.tables.find((t) => t.name === tableName)
149
+ retType = table ? `list[${table.pascalName}]` : `list[${pgToPython(tableName, false)}]`
150
+ } else if (fn.returnType) {
151
+ retType = pgToPython(fn.returnType.type, false, fn.returnType.category)
152
+ } else {
153
+ retType = 'None'
154
+ }
155
+ collectImports(retType, allImports, needsOptional)
156
+
157
+ funcLines.push(`${fn.pascalName}: Callable[[${params.join(', ')}], ${retType}]`)
158
+ }
159
+
160
+ if (needsOptional.has(true)) {
161
+ allImports.add('from typing import Optional')
162
+ }
163
+ if (needsEnum) {
164
+ allImports.add('from enum import Enum')
165
+ }
166
+ if (needsCallable.value) {
167
+ allImports.add('from typing import Callable')
168
+ }
169
+
170
+ const sortedImports = sortPythonImports([...allImports])
171
+
172
+ const allBlocks = [...classBlocks]
173
+ if (funcLines.length > 0) {
174
+ allBlocks.push(funcLines.join('\n'))
175
+ }
176
+
177
+ const content = `# Generated by @sqldoc/templates/python-dataclasses -- DO NOT EDIT
178
+
179
+ ${sortedImports.join('\n')}
180
+
181
+
182
+ ${allBlocks.join('\n\n\n')}
183
+ `
184
+
185
+ return {
186
+ files: [{ path: 'models.py', content }],
187
+ }
188
+ },
189
+ })
190
+
191
+ function collectImports(pyType: string, imports: Set<string>, _needsOptional: Set<boolean>): void {
192
+ // Strip Optional[] wrapper to get base type
193
+ const match = pyType.match(/^Optional\[(.+)\]$/)
194
+ const baseType = match ? match[1] : pyType
195
+
196
+ // Strip list[] wrapper
197
+ const listMatch = baseType.match(/^list\[(.+)\]$/)
198
+ const innerType = listMatch ? listMatch[1] : baseType
199
+
200
+ if (TYPE_IMPORTS[innerType]) {
201
+ imports.add(TYPE_IMPORTS[innerType])
202
+ }
203
+ }
204
+
205
+ function sortPythonImports(imports: string[]): string[] {
206
+ // Sort: stdlib first, then third-party, then local
207
+ return imports.sort((a, b) => {
208
+ // dataclass and typing always first
209
+ if (a.includes('dataclass')) return -1
210
+ if (b.includes('dataclass')) return 1
211
+ if (a.includes('typing')) return -1
212
+ if (b.includes('typing')) return 1
213
+ if (a.includes('enum')) return -1
214
+ if (b.includes('enum')) return 1
215
+ return a.localeCompare(b)
216
+ })
217
+ }
@@ -0,0 +1,6 @@
1
+ # Generated by codegen — only Dockerfile and test scripts are tracked
2
+ *
3
+ !.gitignore
4
+ !Dockerfile
5
+ !test.*
6
+ !Test.*
@@ -0,0 +1,8 @@
1
+ FROM python:3.13-slim
2
+ WORKDIR /app
3
+ COPY . .
4
+ RUN pip install psycopg2-binary
5
+ # Step 1: syntax check the generated models
6
+ RUN python -m py_compile models.py
7
+ # Step 2: run integration test against real DB
8
+ CMD ["python", "test.py"]
@@ -0,0 +1,63 @@
1
+ """
2
+ Integration test for @sqldoc/templates/python-dataclasses
3
+ Connects to real Postgres, verifies generated dataclasses work with actual data.
4
+ """
5
+ import os
6
+ import sys
7
+ import psycopg2
8
+
9
+ from models import Users, Posts
10
+
11
+ DATABASE_URL = os.environ.get("DATABASE_URL")
12
+ if not DATABASE_URL:
13
+ print("DATABASE_URL not set", file=sys.stderr)
14
+ sys.exit(1)
15
+
16
+ failed = 0
17
+
18
+
19
+ def assert_eq(actual, expected, msg):
20
+ global failed
21
+ if actual != expected:
22
+ print(f"FAIL: {msg} (got {actual!r}, expected {expected!r})", file=sys.stderr)
23
+ failed += 1
24
+ else:
25
+ print(f" ok: {msg}")
26
+
27
+
28
+ def main():
29
+ global failed
30
+ conn = psycopg2.connect(DATABASE_URL)
31
+ conn.autocommit = True
32
+ cur = conn.cursor()
33
+
34
+ print("--- python-dataclasses integration test ---")
35
+
36
+ # 1. Query user and construct dataclass
37
+ cur.execute("SELECT id, email, name, age, is_active, created_at FROM users WHERE id = 1")
38
+ row = cur.fetchone()
39
+ user = Users(id=row[0], email=row[1], name=row[2], age=row[3], is_active=row[4], created_at=row[5])
40
+ assert_eq(user.email, "test@example.com", "user.email matches")
41
+ assert_eq(user.name, "Test User", "user.name matches")
42
+ assert_eq(user.age, 30, "user.age matches")
43
+ assert_eq(user.is_active, True, "user.is_active matches")
44
+
45
+ # 2. Query post and construct dataclass
46
+ cur.execute("SELECT id, user_id, title, body, view_count, rating FROM posts WHERE id = 1")
47
+ row = cur.fetchone()
48
+ post = Posts(id=row[0], user_id=row[1], title=row[2], body=row[3], view_count=row[4], rating=row[5])
49
+ assert_eq(post.title, "Hello World", "post.title matches")
50
+ assert_eq(post.user_id, 1, "post.user_id matches")
51
+ assert_eq(post.view_count, 42, "post.view_count matches")
52
+
53
+ cur.close()
54
+ conn.close()
55
+
56
+ if failed > 0:
57
+ print(f"\n{failed} assertion(s) failed", file=sys.stderr)
58
+ sys.exit(1)
59
+ print("\nAll assertions passed!")
60
+
61
+
62
+ if __name__ == "__main__":
63
+ main()