sqlcx-orm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3qq0zjsm.js +12 -0
- package/dist/chunk-49wq4032.js +11 -0
- package/dist/cli.js +1226 -0
- package/dist/config/index.js +9 -0
- package/dist/generator/typescript/driver/bun-sql.js +151 -0
- package/dist/generator/typescript/index.js +110 -0
- package/dist/generator/typescript/schema/typebox.js +127 -0
- package/dist/index.js +7 -0
- package/dist/parser/postgres.js +615 -0
- package/package.json +56 -0
- package/src/cache/index.ts +46 -0
- package/src/cli/index.ts +306 -0
- package/src/config/index.ts +19 -0
- package/src/generator/interface.ts +36 -0
- package/src/generator/typescript/driver/bun-sql.ts +157 -0
- package/src/generator/typescript/index.ts +144 -0
- package/src/generator/typescript/schema/typebox.ts +143 -0
- package/src/index.ts +23 -0
- package/src/ir/index.ts +72 -0
- package/src/parser/interface.ts +8 -0
- package/src/parser/param-naming.ts +49 -0
- package/src/parser/postgres.ts +745 -0
- package/src/utils/index.ts +13 -0
package/src/cli/index.ts
ADDED
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
|
|
2
|
+
import { join, dirname, basename, extname, relative } from "path";
|
|
3
|
+
import { computeHash, readCache, writeCache } from "@/cache";
|
|
4
|
+
import { createPostgresParser } from "@/parser/postgres";
|
|
5
|
+
import { createTypeScriptPlugin } from "@/generator/typescript";
|
|
6
|
+
import { createTypeBoxGenerator } from "@/generator/typescript/schema/typebox";
|
|
7
|
+
import { createBunSqlGenerator } from "@/generator/typescript/driver/bun-sql";
|
|
8
|
+
import type { SqlcxIR, TableDef, QueryDef, EnumDef } from "@/ir";
|
|
9
|
+
|
|
10
|
+
// ---------------------------------------------------------------------------
|
|
11
|
+
// Helpers
|
|
12
|
+
// ---------------------------------------------------------------------------
|
|
13
|
+
|
|
14
|
+
function globSqlFiles(dir: string): string[] {
|
|
15
|
+
const glob = new Bun.Glob("**/*.sql");
|
|
16
|
+
const paths: string[] = [];
|
|
17
|
+
for (const match of glob.scanSync({ cwd: dir, absolute: true })) {
|
|
18
|
+
paths.push(match);
|
|
19
|
+
}
|
|
20
|
+
return paths.sort();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function isSchemaFile(content: string): boolean {
|
|
24
|
+
return /CREATE\s+TABLE/i.test(content);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function isQueryFile(content: string): boolean {
|
|
28
|
+
return /--\s*name:/i.test(content);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function getFlag(args: string[], flag: string): string | undefined {
|
|
32
|
+
const idx = args.indexOf(flag);
|
|
33
|
+
return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : undefined;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// ---------------------------------------------------------------------------
|
|
37
|
+
// Generate
|
|
38
|
+
// ---------------------------------------------------------------------------
|
|
39
|
+
|
|
40
|
+
export interface GenerateOptions {
|
|
41
|
+
sqlDir: string;
|
|
42
|
+
outDir: string;
|
|
43
|
+
cacheDir: string;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export async function generate(options: GenerateOptions): Promise<void> {
|
|
47
|
+
const { sqlDir, outDir, cacheDir } = options;
|
|
48
|
+
|
|
49
|
+
// 1. Glob all .sql files
|
|
50
|
+
const sqlFiles = globSqlFiles(sqlDir);
|
|
51
|
+
if (sqlFiles.length === 0) {
|
|
52
|
+
console.log("No .sql files found in", sqlDir);
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// 2. Read all SQL files
|
|
57
|
+
const fileContents = sqlFiles.map((f) => ({
|
|
58
|
+
path: relative(sqlDir, f),
|
|
59
|
+
content: readFileSync(f, "utf-8"),
|
|
60
|
+
}));
|
|
61
|
+
|
|
62
|
+
// 3. Compute content hash
|
|
63
|
+
const hash = computeHash(fileContents);
|
|
64
|
+
|
|
65
|
+
// 4. Check cache
|
|
66
|
+
let ir = readCache(cacheDir, hash);
|
|
67
|
+
|
|
68
|
+
if (!ir) {
|
|
69
|
+
// 5. Parse
|
|
70
|
+
ir = parse(fileContents);
|
|
71
|
+
|
|
72
|
+
// 6. Write cache
|
|
73
|
+
writeCache(cacheDir, ir, hash);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// 7. Generate output
|
|
77
|
+
const plugin = createTypeScriptPlugin({
|
|
78
|
+
schema: createTypeBoxGenerator(),
|
|
79
|
+
driver: createBunSqlGenerator(),
|
|
80
|
+
});
|
|
81
|
+
const generatedFiles = plugin.generate(ir, { out: outDir });
|
|
82
|
+
|
|
83
|
+
// 8. Write files to disk
|
|
84
|
+
for (const file of generatedFiles) {
|
|
85
|
+
const dir = dirname(file.path);
|
|
86
|
+
if (!existsSync(dir)) {
|
|
87
|
+
mkdirSync(dir, { recursive: true });
|
|
88
|
+
}
|
|
89
|
+
writeFileSync(file.path, file.content, "utf-8");
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// 9. Log success
|
|
93
|
+
console.log(`Generated ${generatedFiles.length} files to ${outDir}`);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// ---------------------------------------------------------------------------
|
|
97
|
+
// Check
|
|
98
|
+
// ---------------------------------------------------------------------------
|
|
99
|
+
|
|
100
|
+
export interface CheckOptions {
|
|
101
|
+
sqlDir: string;
|
|
102
|
+
cacheDir: string;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
export interface CheckResult {
|
|
106
|
+
valid: boolean;
|
|
107
|
+
tables: number;
|
|
108
|
+
queries: number;
|
|
109
|
+
errors: string[];
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
export async function check(options: CheckOptions): Promise<CheckResult> {
|
|
113
|
+
const { sqlDir, cacheDir } = options;
|
|
114
|
+
const errors: string[] = [];
|
|
115
|
+
|
|
116
|
+
// 1. Glob all .sql files
|
|
117
|
+
const sqlFiles = globSqlFiles(sqlDir);
|
|
118
|
+
if (sqlFiles.length === 0) {
|
|
119
|
+
return { valid: true, tables: 0, queries: 0, errors: [] };
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// 2. Read all SQL files
|
|
123
|
+
const fileContents = sqlFiles.map((f) => ({
|
|
124
|
+
path: relative(sqlDir, f),
|
|
125
|
+
content: readFileSync(f, "utf-8"),
|
|
126
|
+
}));
|
|
127
|
+
|
|
128
|
+
// 3. Compute hash and check cache
|
|
129
|
+
const hash = computeHash(fileContents);
|
|
130
|
+
let ir = readCache(cacheDir, hash);
|
|
131
|
+
|
|
132
|
+
if (!ir) {
|
|
133
|
+
// 4. Parse
|
|
134
|
+
try {
|
|
135
|
+
ir = parse(fileContents);
|
|
136
|
+
writeCache(cacheDir, ir, hash);
|
|
137
|
+
} catch (err) {
|
|
138
|
+
errors.push(String(err));
|
|
139
|
+
return { valid: false, tables: 0, queries: 0, errors };
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
return {
|
|
144
|
+
valid: errors.length === 0,
|
|
145
|
+
tables: ir.tables.length,
|
|
146
|
+
queries: ir.queries.length,
|
|
147
|
+
errors,
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// ---------------------------------------------------------------------------
|
|
152
|
+
// Shared parse logic
|
|
153
|
+
// ---------------------------------------------------------------------------
|
|
154
|
+
|
|
155
|
+
function parse(
|
|
156
|
+
fileContents: { path: string; content: string }[],
|
|
157
|
+
): SqlcxIR {
|
|
158
|
+
const parser = createPostgresParser();
|
|
159
|
+
|
|
160
|
+
const schemaFiles = fileContents.filter((f) => isSchemaFile(f.content));
|
|
161
|
+
const queryFiles = fileContents.filter((f) => isQueryFile(f.content));
|
|
162
|
+
|
|
163
|
+
// Parse enums from all schema files
|
|
164
|
+
const allSchemaSql = schemaFiles.map((f) => f.content).join("\n\n");
|
|
165
|
+
const enums: EnumDef[] = parser.parseEnums(allSchemaSql);
|
|
166
|
+
|
|
167
|
+
// Parse tables from all schema files
|
|
168
|
+
const tables: TableDef[] = parser.parseSchema(allSchemaSql);
|
|
169
|
+
|
|
170
|
+
// Parse queries from each query file
|
|
171
|
+
const queries: QueryDef[] = [];
|
|
172
|
+
for (const file of queryFiles) {
|
|
173
|
+
const parsed = parser.parseQueries(file.content, tables);
|
|
174
|
+
for (const q of parsed) {
|
|
175
|
+
q.sourceFile = basename(file.path, extname(file.path));
|
|
176
|
+
queries.push(q);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
return { tables, queries, enums };
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// ---------------------------------------------------------------------------
|
|
184
|
+
// Init
|
|
185
|
+
// ---------------------------------------------------------------------------
|
|
186
|
+
|
|
187
|
+
async function init(): Promise<void> {
|
|
188
|
+
// Create sql/ directory with example files
|
|
189
|
+
const sqlDir = "sql";
|
|
190
|
+
const queriesDir = join(sqlDir, "queries");
|
|
191
|
+
|
|
192
|
+
if (!existsSync(sqlDir)) {
|
|
193
|
+
mkdirSync(queriesDir, { recursive: true });
|
|
194
|
+
} else if (!existsSync(queriesDir)) {
|
|
195
|
+
mkdirSync(queriesDir, { recursive: true });
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
const schemaPath = join(sqlDir, "schema.sql");
|
|
199
|
+
if (!existsSync(schemaPath)) {
|
|
200
|
+
writeFileSync(
|
|
201
|
+
schemaPath,
|
|
202
|
+
`CREATE TABLE users (
|
|
203
|
+
id SERIAL PRIMARY KEY,
|
|
204
|
+
name TEXT NOT NULL,
|
|
205
|
+
email TEXT NOT NULL UNIQUE,
|
|
206
|
+
created_at TIMESTAMP NOT NULL DEFAULT NOW()
|
|
207
|
+
);
|
|
208
|
+
`,
|
|
209
|
+
"utf-8",
|
|
210
|
+
);
|
|
211
|
+
console.log("Created", schemaPath);
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
const queryPath = join(queriesDir, "users.sql");
|
|
215
|
+
if (!existsSync(queryPath)) {
|
|
216
|
+
writeFileSync(
|
|
217
|
+
queryPath,
|
|
218
|
+
`-- name: GetUserById :one
|
|
219
|
+
SELECT * FROM users WHERE id = $1;
|
|
220
|
+
|
|
221
|
+
-- name: ListUsers :many
|
|
222
|
+
SELECT * FROM users ORDER BY created_at DESC;
|
|
223
|
+
|
|
224
|
+
-- name: CreateUser :exec
|
|
225
|
+
INSERT INTO users (name, email) VALUES ($1, $2);
|
|
226
|
+
`,
|
|
227
|
+
"utf-8",
|
|
228
|
+
);
|
|
229
|
+
console.log("Created", queryPath);
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
const configPath = "sqlcx.config.ts";
|
|
233
|
+
if (!existsSync(configPath)) {
|
|
234
|
+
writeFileSync(
|
|
235
|
+
configPath,
|
|
236
|
+
`import { defineConfig } from "sqlcx";
|
|
237
|
+
import { createPostgresParser } from "sqlcx/parser/postgres";
|
|
238
|
+
import { createTypeScriptPlugin } from "sqlcx/generator/typescript";
|
|
239
|
+
import { createTypeBoxGenerator } from "sqlcx/generator/typescript/schema/typebox";
|
|
240
|
+
import { createBunSqlGenerator } from "sqlcx/generator/typescript/driver/bun-sql";
|
|
241
|
+
|
|
242
|
+
export default defineConfig({
|
|
243
|
+
sql: "./sql",
|
|
244
|
+
parser: createPostgresParser(),
|
|
245
|
+
targets: [
|
|
246
|
+
createTypeScriptPlugin({
|
|
247
|
+
schema: createTypeBoxGenerator(),
|
|
248
|
+
driver: createBunSqlGenerator(),
|
|
249
|
+
}),
|
|
250
|
+
],
|
|
251
|
+
});
|
|
252
|
+
`,
|
|
253
|
+
"utf-8",
|
|
254
|
+
);
|
|
255
|
+
console.log("Created", configPath);
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
console.log("\nProject initialized! Run 'sqlcx generate' to generate types.");
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// ---------------------------------------------------------------------------
|
|
262
|
+
// CLI entry point
|
|
263
|
+
// ---------------------------------------------------------------------------
|
|
264
|
+
|
|
265
|
+
function printHelp(): void {
|
|
266
|
+
console.log("Usage: sqlcx <generate|check|init> [options]");
|
|
267
|
+
console.log("");
|
|
268
|
+
console.log("Commands:");
|
|
269
|
+
console.log(" generate Parse SQL and generate typed code");
|
|
270
|
+
console.log(" check Validate SQL files without generating (CI-friendly)");
|
|
271
|
+
console.log(" init Scaffold sql/ directory with example files");
|
|
272
|
+
console.log("");
|
|
273
|
+
console.log("Options:");
|
|
274
|
+
console.log(" --sql <dir> SQL directory (default: ./sql)");
|
|
275
|
+
console.log(" --out <dir> Output directory (default: ./src/db)");
|
|
276
|
+
console.log(" --cache <dir> Cache directory (default: .sqlcx)");
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
if (import.meta.main) {
|
|
280
|
+
const args = process.argv.slice(2);
|
|
281
|
+
const command = args[0];
|
|
282
|
+
|
|
283
|
+
if (command === "generate") {
|
|
284
|
+
await generate({
|
|
285
|
+
sqlDir: getFlag(args, "--sql") ?? "./sql",
|
|
286
|
+
outDir: getFlag(args, "--out") ?? "./src/db",
|
|
287
|
+
cacheDir: getFlag(args, "--cache") ?? ".sqlcx",
|
|
288
|
+
});
|
|
289
|
+
} else if (command === "check") {
|
|
290
|
+
const result = await check({
|
|
291
|
+
sqlDir: getFlag(args, "--sql") ?? "./sql",
|
|
292
|
+
cacheDir: getFlag(args, "--cache") ?? ".sqlcx",
|
|
293
|
+
});
|
|
294
|
+
if (!result.valid) {
|
|
295
|
+
console.error("Check failed:", result.errors);
|
|
296
|
+
process.exit(1);
|
|
297
|
+
}
|
|
298
|
+
console.log(
|
|
299
|
+
`Check passed: ${result.tables} tables, ${result.queries} queries`,
|
|
300
|
+
);
|
|
301
|
+
} else if (command === "init") {
|
|
302
|
+
await init();
|
|
303
|
+
} else {
|
|
304
|
+
printHelp();
|
|
305
|
+
}
|
|
306
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { DatabaseParser } from "@/parser/interface";
|
|
2
|
+
import type { LanguagePlugin } from "@/generator/interface";
|
|
3
|
+
|
|
4
|
+
export interface SqlcxConfig {
|
|
5
|
+
sql: string;
|
|
6
|
+
parser: DatabaseParser;
|
|
7
|
+
targets: LanguagePlugin[];
|
|
8
|
+
overrides?: Record<string, string>;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function defineConfig(config: SqlcxConfig): SqlcxConfig {
|
|
12
|
+
return config;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function loadConfig(configPath: string): Promise<SqlcxConfig> {
|
|
16
|
+
const resolved = Bun.resolveSync(configPath, process.cwd());
|
|
17
|
+
const mod = await import(resolved);
|
|
18
|
+
return mod.default as SqlcxConfig;
|
|
19
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { SqlcxIR, TableDef, QueryDef, EnumDef } from "@/ir";
|
|
2
|
+
|
|
3
|
+
export interface LanguageOptions {
|
|
4
|
+
out: string;
|
|
5
|
+
overrides?: Record<string, string>;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export interface GeneratedFile {
|
|
9
|
+
path: string;
|
|
10
|
+
content: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface SchemaGenerator {
|
|
14
|
+
name: string;
|
|
15
|
+
generateImports(): string;
|
|
16
|
+
generateEnumSchema(enumDef: EnumDef): string;
|
|
17
|
+
generateSelectSchema(table: TableDef, ir: SqlcxIR): string;
|
|
18
|
+
generateInsertSchema(table: TableDef, ir: SqlcxIR): string;
|
|
19
|
+
generateTypeAlias(name: string, schemaVarName: string): string;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export interface DriverGenerator {
|
|
23
|
+
name: string;
|
|
24
|
+
generateImports(): string;
|
|
25
|
+
generateClientAdapter(): string;
|
|
26
|
+
generateQueryFunction(query: QueryDef): string;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// DatabaseClient interface is generated by the LanguagePlugin (not the DriverGenerator).
|
|
30
|
+
// This prevents collisions when multiple drivers target the same output dir.
|
|
31
|
+
|
|
32
|
+
export interface LanguagePlugin {
|
|
33
|
+
language: string;
|
|
34
|
+
fileExtension: string;
|
|
35
|
+
generate(ir: SqlcxIR, options: LanguageOptions): GeneratedFile[];
|
|
36
|
+
}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import type { DriverGenerator } from "@/generator/interface";
|
|
2
|
+
import type { QueryDef, SqlType } from "@/ir";
|
|
3
|
+
import { camelCase, pascalCase } from "@/utils";
|
|
4
|
+
|
|
5
|
+
/** Split PascalCase/camelCase into words before applying case utils */
|
|
6
|
+
function splitWords(str: string): string {
|
|
7
|
+
return str.replace(/([a-z])([A-Z])/g, "$1_$2");
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
function toCamel(str: string): string {
|
|
11
|
+
return camelCase(splitWords(str));
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function toPascal(str: string): string {
|
|
15
|
+
return pascalCase(splitWords(str));
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function tsType(type: SqlType): string {
|
|
19
|
+
// Handle arrays
|
|
20
|
+
if (type.elementType) {
|
|
21
|
+
return `${tsType(type.elementType)}[]`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
switch (type.category) {
|
|
25
|
+
case "string":
|
|
26
|
+
case "uuid":
|
|
27
|
+
case "enum":
|
|
28
|
+
return "string";
|
|
29
|
+
case "number":
|
|
30
|
+
return "number";
|
|
31
|
+
case "boolean":
|
|
32
|
+
return "boolean";
|
|
33
|
+
case "date":
|
|
34
|
+
return "Date";
|
|
35
|
+
case "json":
|
|
36
|
+
return "unknown";
|
|
37
|
+
case "binary":
|
|
38
|
+
return "Uint8Array";
|
|
39
|
+
case "unknown":
|
|
40
|
+
return "unknown";
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function generateRowType(query: QueryDef): string {
|
|
45
|
+
if (query.returns.length === 0) return "";
|
|
46
|
+
const typeName = `${toPascal(query.name)}Row`;
|
|
47
|
+
const fields = query.returns
|
|
48
|
+
.map((col) => {
|
|
49
|
+
const fieldName = col.alias ?? col.name;
|
|
50
|
+
const type = tsType(col.type);
|
|
51
|
+
const nullable = col.nullable ? " | null" : "";
|
|
52
|
+
return ` ${fieldName}: ${type}${nullable};`;
|
|
53
|
+
})
|
|
54
|
+
.join("\n");
|
|
55
|
+
return `export interface ${typeName} {\n${fields}\n}`;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function generateParamsType(query: QueryDef): string {
|
|
59
|
+
if (query.params.length === 0) return "";
|
|
60
|
+
const typeName = `${toPascal(query.name)}Params`;
|
|
61
|
+
const fields = query.params
|
|
62
|
+
.map((p) => ` ${p.name}: ${tsType(p.type)};`)
|
|
63
|
+
.join("\n");
|
|
64
|
+
return `export interface ${typeName} {\n${fields}\n}`;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export function createBunSqlGenerator(): DriverGenerator {
|
|
68
|
+
return {
|
|
69
|
+
name: "bun-sql",
|
|
70
|
+
|
|
71
|
+
generateImports(): string {
|
|
72
|
+
return "";
|
|
73
|
+
},
|
|
74
|
+
|
|
75
|
+
generateClientAdapter(): string {
|
|
76
|
+
return `interface BunSqlDriver {
|
|
77
|
+
unsafe(query: string, values?: unknown[]): Promise<any[] & { count: number }>;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export class BunSqlClient implements DatabaseClient {
|
|
81
|
+
private sql: BunSqlDriver;
|
|
82
|
+
|
|
83
|
+
constructor(sql: BunSqlDriver) {
|
|
84
|
+
this.sql = sql;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async query<T>(text: string, values?: unknown[]): Promise<T[]> {
|
|
88
|
+
const result = await this.sql.unsafe(text, values);
|
|
89
|
+
return [...result] as T[];
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async queryOne<T>(text: string, values?: unknown[]): Promise<T | null> {
|
|
93
|
+
const rows = await this.query<T>(text, values);
|
|
94
|
+
return rows[0] ?? null;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async execute(text: string, values?: unknown[]): Promise<{ rowsAffected: number }> {
|
|
98
|
+
const result = await this.sql.unsafe(text, values);
|
|
99
|
+
return { rowsAffected: result.count };
|
|
100
|
+
}
|
|
101
|
+
}`;
|
|
102
|
+
},
|
|
103
|
+
|
|
104
|
+
generateQueryFunction(query: QueryDef): string {
|
|
105
|
+
const fnName = toCamel(query.name);
|
|
106
|
+
const rowType = generateRowType(query);
|
|
107
|
+
const hasParams = query.params.length > 0;
|
|
108
|
+
const paramsInterface = generateParamsType(query);
|
|
109
|
+
const paramsTypeName = `${toPascal(query.name)}Params`;
|
|
110
|
+
// JSON.stringify handles newlines, quotes, backslashes safely
|
|
111
|
+
const sqlConst = `export const ${fnName}Sql = ${JSON.stringify(query.sql)};`;
|
|
112
|
+
|
|
113
|
+
const paramsSig = hasParams ? `, params: ${paramsTypeName}` : "";
|
|
114
|
+
const valuesArg = hasParams
|
|
115
|
+
? `[${query.params.map((p) => `params.${p.name}`).join(", ")}]`
|
|
116
|
+
: "[]";
|
|
117
|
+
|
|
118
|
+
let returnType: string;
|
|
119
|
+
let body: string;
|
|
120
|
+
|
|
121
|
+
switch (query.command) {
|
|
122
|
+
case "one": {
|
|
123
|
+
const typeName = `${toPascal(query.name)}Row`;
|
|
124
|
+
returnType = `Promise<${typeName} | null>`;
|
|
125
|
+
body = ` return client.queryOne<${typeName}>(${fnName}Sql, ${valuesArg});`;
|
|
126
|
+
break;
|
|
127
|
+
}
|
|
128
|
+
case "many": {
|
|
129
|
+
const typeName = `${toPascal(query.name)}Row`;
|
|
130
|
+
returnType = `Promise<${typeName}[]>`;
|
|
131
|
+
body = ` return client.query<${typeName}>(${fnName}Sql, ${valuesArg});`;
|
|
132
|
+
break;
|
|
133
|
+
}
|
|
134
|
+
case "exec": {
|
|
135
|
+
returnType = "Promise<void>";
|
|
136
|
+
body = ` await client.execute(${fnName}Sql, ${valuesArg});`;
|
|
137
|
+
break;
|
|
138
|
+
}
|
|
139
|
+
case "execresult": {
|
|
140
|
+
returnType = "Promise<{ rowsAffected: number }>";
|
|
141
|
+
body = ` return client.execute(${fnName}Sql, ${valuesArg});`;
|
|
142
|
+
break;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
const parts: string[] = [];
|
|
147
|
+
if (rowType) parts.push(rowType);
|
|
148
|
+
if (paramsInterface) parts.push(paramsInterface);
|
|
149
|
+
parts.push(sqlConst);
|
|
150
|
+
parts.push(
|
|
151
|
+
`export async function ${fnName}(client: DatabaseClient${paramsSig}): ${returnType} {\n${body}\n}`
|
|
152
|
+
);
|
|
153
|
+
|
|
154
|
+
return parts.join("\n\n");
|
|
155
|
+
},
|
|
156
|
+
};
|
|
157
|
+
}
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
LanguagePlugin,
|
|
3
|
+
SchemaGenerator,
|
|
4
|
+
DriverGenerator,
|
|
5
|
+
GeneratedFile,
|
|
6
|
+
LanguageOptions,
|
|
7
|
+
} from "@/generator/interface";
|
|
8
|
+
import type { SqlcxIR } from "@/ir";
|
|
9
|
+
import { pascalCase } from "@/utils";
|
|
10
|
+
import path from "node:path";
|
|
11
|
+
|
|
12
|
+
function joinPath(base: string, filename: string): string {
|
|
13
|
+
const joined = path.join(base, filename);
|
|
14
|
+
// Preserve leading "./" if the base had it
|
|
15
|
+
if (base.startsWith("./") && !joined.startsWith("./")) {
|
|
16
|
+
return "./" + joined;
|
|
17
|
+
}
|
|
18
|
+
return joined;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
interface TypeScriptPluginOptions {
|
|
22
|
+
schema: SchemaGenerator;
|
|
23
|
+
driver: DriverGenerator;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function generateSchemaFile(
|
|
27
|
+
schema: SchemaGenerator,
|
|
28
|
+
ir: SqlcxIR,
|
|
29
|
+
): string {
|
|
30
|
+
const parts: string[] = [];
|
|
31
|
+
|
|
32
|
+
parts.push(schema.generateImports());
|
|
33
|
+
|
|
34
|
+
for (const enumDef of ir.enums) {
|
|
35
|
+
parts.push(schema.generateEnumSchema(enumDef));
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
for (const table of ir.tables) {
|
|
39
|
+
parts.push(schema.generateSelectSchema(table, ir));
|
|
40
|
+
parts.push(schema.generateInsertSchema(table, ir));
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
for (const table of ir.tables) {
|
|
44
|
+
const selectName = `Select${pascalCase(table.name)}`;
|
|
45
|
+
const insertName = `Insert${pascalCase(table.name)}`;
|
|
46
|
+
parts.push(schema.generateTypeAlias(selectName, selectName));
|
|
47
|
+
parts.push(schema.generateTypeAlias(insertName, insertName));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
for (const enumDef of ir.enums) {
|
|
51
|
+
const name = pascalCase(enumDef.name);
|
|
52
|
+
parts.push(schema.generateTypeAlias(name, name));
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return parts.join("\n\n") + "\n";
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const DATABASE_CLIENT_INTERFACE = `export interface DatabaseClient {
|
|
59
|
+
query<T>(sql: string, params: unknown[]): Promise<T[]>;
|
|
60
|
+
queryOne<T>(sql: string, params: unknown[]): Promise<T | null>;
|
|
61
|
+
execute(sql: string, params: unknown[]): Promise<{ rowsAffected: number }>;
|
|
62
|
+
}`;
|
|
63
|
+
|
|
64
|
+
function generateClientFile(driver: DriverGenerator): string {
|
|
65
|
+
const parts: string[] = [];
|
|
66
|
+
|
|
67
|
+
const driverImports = driver.generateImports();
|
|
68
|
+
if (driverImports) {
|
|
69
|
+
parts.push(driverImports);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
parts.push(DATABASE_CLIENT_INTERFACE);
|
|
73
|
+
parts.push(driver.generateClientAdapter());
|
|
74
|
+
|
|
75
|
+
return parts.join("\n\n") + "\n";
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
function generateQueryFiles(
|
|
79
|
+
driver: DriverGenerator,
|
|
80
|
+
ir: SqlcxIR,
|
|
81
|
+
outDir: string,
|
|
82
|
+
): GeneratedFile[] {
|
|
83
|
+
const grouped = new Map<string, typeof ir.queries>();
|
|
84
|
+
|
|
85
|
+
for (const query of ir.queries) {
|
|
86
|
+
const existing = grouped.get(query.sourceFile);
|
|
87
|
+
if (existing) {
|
|
88
|
+
existing.push(query);
|
|
89
|
+
} else {
|
|
90
|
+
grouped.set(query.sourceFile, [query]);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const files: GeneratedFile[] = [];
|
|
95
|
+
|
|
96
|
+
for (const [sourceFile, queries] of grouped) {
|
|
97
|
+
const basename = path.basename(sourceFile, path.extname(sourceFile));
|
|
98
|
+
const filename = `${basename}.queries.ts`;
|
|
99
|
+
|
|
100
|
+
const parts: string[] = [];
|
|
101
|
+
parts.push(`import type { DatabaseClient } from "./client";`);
|
|
102
|
+
|
|
103
|
+
for (const query of queries) {
|
|
104
|
+
parts.push(driver.generateQueryFunction(query));
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
files.push({
|
|
108
|
+
path: joinPath(outDir, filename),
|
|
109
|
+
content: parts.join("\n\n") + "\n",
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
return files;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
export function createTypeScriptPlugin(
|
|
117
|
+
options: TypeScriptPluginOptions,
|
|
118
|
+
): LanguagePlugin {
|
|
119
|
+
const { schema, driver } = options;
|
|
120
|
+
|
|
121
|
+
return {
|
|
122
|
+
language: "typescript",
|
|
123
|
+
fileExtension: ".ts",
|
|
124
|
+
|
|
125
|
+
generate(ir: SqlcxIR, langOptions: LanguageOptions): GeneratedFile[] {
|
|
126
|
+
const outDir = langOptions.out;
|
|
127
|
+
const files: GeneratedFile[] = [];
|
|
128
|
+
|
|
129
|
+
files.push({
|
|
130
|
+
path: joinPath(outDir, "schema.ts"),
|
|
131
|
+
content: generateSchemaFile(schema, ir),
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
files.push({
|
|
135
|
+
path: joinPath(outDir, "client.ts"),
|
|
136
|
+
content: generateClientFile(driver),
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
files.push(...generateQueryFiles(driver, ir, outDir));
|
|
140
|
+
|
|
141
|
+
return files;
|
|
142
|
+
},
|
|
143
|
+
};
|
|
144
|
+
}
|