sqlcx-orm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3qq0zjsm.js +12 -0
- package/dist/chunk-49wq4032.js +11 -0
- package/dist/cli.js +1226 -0
- package/dist/config/index.js +9 -0
- package/dist/generator/typescript/driver/bun-sql.js +151 -0
- package/dist/generator/typescript/index.js +110 -0
- package/dist/generator/typescript/schema/typebox.js +127 -0
- package/dist/index.js +7 -0
- package/dist/parser/postgres.js +615 -0
- package/package.json +56 -0
- package/src/cache/index.ts +46 -0
- package/src/cli/index.ts +306 -0
- package/src/config/index.ts +19 -0
- package/src/generator/interface.ts +36 -0
- package/src/generator/typescript/driver/bun-sql.ts +157 -0
- package/src/generator/typescript/index.ts +144 -0
- package/src/generator/typescript/schema/typebox.ts +143 -0
- package/src/index.ts +23 -0
- package/src/ir/index.ts +72 -0
- package/src/parser/interface.ts +8 -0
- package/src/parser/param-naming.ts +49 -0
- package/src/parser/postgres.ts +745 -0
- package/src/utils/index.ts +13 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,1226 @@
|
|
|
1
|
+
// @bun
|
|
2
|
+
// src/cli/index.ts
|
|
3
|
+
import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
4
|
+
import { join as join2, dirname, basename, extname, relative } from "path";
|
|
5
|
+
|
|
6
|
+
// src/cache/index.ts
|
|
7
|
+
import { createHash } from "crypto";
|
|
8
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, renameSync } from "fs";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
function computeHash(files) {
|
|
11
|
+
const sorted = [...files].sort((a, b) => a.path.localeCompare(b.path));
|
|
12
|
+
const hash = createHash("sha256");
|
|
13
|
+
for (const f of sorted) {
|
|
14
|
+
hash.update(f.path);
|
|
15
|
+
hash.update("\x00");
|
|
16
|
+
hash.update(f.content);
|
|
17
|
+
hash.update("\x00");
|
|
18
|
+
}
|
|
19
|
+
return hash.digest("hex");
|
|
20
|
+
}
|
|
21
|
+
function writeCache(cacheDir, ir, hash) {
|
|
22
|
+
if (!existsSync(cacheDir))
|
|
23
|
+
mkdirSync(cacheDir, { recursive: true });
|
|
24
|
+
const data = { hash, ir };
|
|
25
|
+
const cachePath = join(cacheDir, "ir.json");
|
|
26
|
+
const tempPath = cachePath + ".tmp";
|
|
27
|
+
writeFileSync(tempPath, JSON.stringify(data, null, 2));
|
|
28
|
+
renameSync(tempPath, cachePath);
|
|
29
|
+
}
|
|
30
|
+
function readCache(cacheDir, expectedHash) {
|
|
31
|
+
const cachePath = join(cacheDir, "ir.json");
|
|
32
|
+
if (!existsSync(cachePath))
|
|
33
|
+
return null;
|
|
34
|
+
try {
|
|
35
|
+
const data = JSON.parse(readFileSync(cachePath, "utf-8"));
|
|
36
|
+
if (data.hash !== expectedHash)
|
|
37
|
+
return null;
|
|
38
|
+
return data.ir;
|
|
39
|
+
} catch {
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// src/parser/param-naming.ts
|
|
45
|
+
function resolveParamNames(params) {
|
|
46
|
+
const freq = new Map;
|
|
47
|
+
for (const p of params) {
|
|
48
|
+
if (!p.override && p.column) {
|
|
49
|
+
freq.set(p.column, (freq.get(p.column) ?? 0) + 1);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
const counters = new Map;
|
|
53
|
+
const seen = new Set;
|
|
54
|
+
const result = new Array(params.length);
|
|
55
|
+
for (let i = 0;i < params.length; i++) {
|
|
56
|
+
const p = params[i];
|
|
57
|
+
let name;
|
|
58
|
+
if (p.override) {
|
|
59
|
+
name = p.override;
|
|
60
|
+
} else if (!p.column) {
|
|
61
|
+
name = `param_${p.index}`;
|
|
62
|
+
} else if ((freq.get(p.column) ?? 0) > 1) {
|
|
63
|
+
const n = (counters.get(p.column) ?? 0) + 1;
|
|
64
|
+
counters.set(p.column, n);
|
|
65
|
+
name = `${p.column}_${n}`;
|
|
66
|
+
} else {
|
|
67
|
+
name = p.column;
|
|
68
|
+
}
|
|
69
|
+
const base = name;
|
|
70
|
+
let suffix = 1;
|
|
71
|
+
while (seen.has(name)) {
|
|
72
|
+
name = `${base}_${suffix++}`;
|
|
73
|
+
}
|
|
74
|
+
seen.add(name);
|
|
75
|
+
result[i] = name;
|
|
76
|
+
}
|
|
77
|
+
return result;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// src/parser/postgres.ts
|
|
81
|
+
var TYPE_CATEGORY_MAP = new Map([
|
|
82
|
+
["text", "string"],
|
|
83
|
+
["varchar", "string"],
|
|
84
|
+
["char", "string"],
|
|
85
|
+
["character varying", "string"],
|
|
86
|
+
["character", "string"],
|
|
87
|
+
["name", "string"],
|
|
88
|
+
["integer", "number"],
|
|
89
|
+
["int", "number"],
|
|
90
|
+
["int2", "number"],
|
|
91
|
+
["int4", "number"],
|
|
92
|
+
["int8", "number"],
|
|
93
|
+
["smallint", "number"],
|
|
94
|
+
["bigint", "number"],
|
|
95
|
+
["serial", "number"],
|
|
96
|
+
["bigserial", "number"],
|
|
97
|
+
["real", "number"],
|
|
98
|
+
["double precision", "number"],
|
|
99
|
+
["numeric", "number"],
|
|
100
|
+
["decimal", "number"],
|
|
101
|
+
["float", "number"],
|
|
102
|
+
["float4", "number"],
|
|
103
|
+
["float8", "number"],
|
|
104
|
+
["boolean", "boolean"],
|
|
105
|
+
["bool", "boolean"],
|
|
106
|
+
["timestamp", "date"],
|
|
107
|
+
["timestamptz", "date"],
|
|
108
|
+
["date", "date"],
|
|
109
|
+
["time", "date"],
|
|
110
|
+
["timetz", "date"],
|
|
111
|
+
["timestamp without time zone", "date"],
|
|
112
|
+
["timestamp with time zone", "date"],
|
|
113
|
+
["json", "json"],
|
|
114
|
+
["jsonb", "json"],
|
|
115
|
+
["uuid", "uuid"],
|
|
116
|
+
["bytea", "binary"]
|
|
117
|
+
]);
|
|
118
|
+
var SERIAL_TYPES = new Set(["serial", "bigserial"]);
|
|
119
|
+
function resolveType(raw, enumNames) {
|
|
120
|
+
const trimmed = raw.trim();
|
|
121
|
+
if (trimmed.endsWith("[]")) {
|
|
122
|
+
const baseRaw = trimmed.slice(0, -2);
|
|
123
|
+
const elementType = resolveType(baseRaw, enumNames);
|
|
124
|
+
return {
|
|
125
|
+
raw: trimmed,
|
|
126
|
+
normalized: trimmed.toLowerCase(),
|
|
127
|
+
category: elementType.category,
|
|
128
|
+
elementType
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
const normalized = trimmed.toLowerCase();
|
|
132
|
+
const category = TYPE_CATEGORY_MAP.get(normalized);
|
|
133
|
+
if (category) {
|
|
134
|
+
return { raw: trimmed, normalized, category };
|
|
135
|
+
}
|
|
136
|
+
if (enumNames.has(normalized)) {
|
|
137
|
+
return { raw: trimmed, normalized, category: "enum", enumName: normalized };
|
|
138
|
+
}
|
|
139
|
+
return { raw: trimmed, normalized, category: "unknown" };
|
|
140
|
+
}
|
|
141
|
+
var ENUM_RE_SOURCE = /CREATE\s+TYPE\s+(\w+)\s+AS\s+ENUM\s*\(\s*((?:'[^']*'(?:\s*,\s*'[^']*')*)?)\s*\)/i.source;
|
|
142
|
+
function parseEnumDefs(sql) {
|
|
143
|
+
const re = new RegExp(ENUM_RE_SOURCE, "gi");
|
|
144
|
+
const enums = [];
|
|
145
|
+
let m;
|
|
146
|
+
while ((m = re.exec(sql)) !== null) {
|
|
147
|
+
const name = m[1].toLowerCase();
|
|
148
|
+
const valuesRaw = m[2];
|
|
149
|
+
const values = [...valuesRaw.matchAll(/'([^']*)'/g)].map((v) => v[1]);
|
|
150
|
+
enums.push({ name, values });
|
|
151
|
+
}
|
|
152
|
+
return enums;
|
|
153
|
+
}
|
|
154
|
+
var CREATE_TABLE_RE_SOURCE = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(\w+)\s*\(([\s\S]*?)\)\s*;/i.source;
|
|
155
|
+
function splitColumnDefs(body) {
|
|
156
|
+
const parts = [];
|
|
157
|
+
let depth = 0;
|
|
158
|
+
let current = "";
|
|
159
|
+
for (const ch of body) {
|
|
160
|
+
if (ch === "(") {
|
|
161
|
+
depth++;
|
|
162
|
+
current += ch;
|
|
163
|
+
} else if (ch === ")") {
|
|
164
|
+
depth--;
|
|
165
|
+
current += ch;
|
|
166
|
+
} else if (ch === "," && depth === 0) {
|
|
167
|
+
parts.push(current.trim());
|
|
168
|
+
current = "";
|
|
169
|
+
} else {
|
|
170
|
+
current += ch;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
if (current.trim())
|
|
174
|
+
parts.push(current.trim());
|
|
175
|
+
return parts;
|
|
176
|
+
}
|
|
177
|
+
var MULTI_WORD_TYPES = [
|
|
178
|
+
"character varying",
|
|
179
|
+
"double precision",
|
|
180
|
+
"timestamp without time zone",
|
|
181
|
+
"timestamp with time zone"
|
|
182
|
+
];
|
|
183
|
+
function parseColumnLine(line, enumNames) {
|
|
184
|
+
line = line.trim();
|
|
185
|
+
if (!line)
|
|
186
|
+
return null;
|
|
187
|
+
if (/^(PRIMARY\s+KEY|CONSTRAINT|UNIQUE|CHECK|FOREIGN\s+KEY)/i.test(line)) {
|
|
188
|
+
return null;
|
|
189
|
+
}
|
|
190
|
+
const nameMatch = line.match(/^(\w+)\s+/);
|
|
191
|
+
if (!nameMatch)
|
|
192
|
+
return null;
|
|
193
|
+
const colName = nameMatch[1].toLowerCase();
|
|
194
|
+
const afterName = line.slice(nameMatch[0].length);
|
|
195
|
+
let rawType = null;
|
|
196
|
+
for (const mwt of MULTI_WORD_TYPES) {
|
|
197
|
+
if (afterName.toLowerCase().startsWith(mwt)) {
|
|
198
|
+
rawType = mwt;
|
|
199
|
+
break;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
if (!rawType) {
|
|
203
|
+
const typeMatch = afterName.match(/^(\w+(?:\[\])?)/);
|
|
204
|
+
rawType = typeMatch ? typeMatch[1] : "unknown";
|
|
205
|
+
}
|
|
206
|
+
const restAfterType = afterName.slice(rawType.length).trim();
|
|
207
|
+
const isNotNull = /\bNOT\s+NULL\b/i.test(restAfterType);
|
|
208
|
+
const hasDefaultKeyword = /\bDEFAULT\b/i.test(restAfterType);
|
|
209
|
+
const isSerial = SERIAL_TYPES.has(rawType.toLowerCase());
|
|
210
|
+
const isPK = /\bPRIMARY\s+KEY\b/i.test(restAfterType);
|
|
211
|
+
const isUnique = /\bUNIQUE\b/i.test(restAfterType);
|
|
212
|
+
const type = resolveType(rawType, enumNames);
|
|
213
|
+
return {
|
|
214
|
+
col: {
|
|
215
|
+
name: colName,
|
|
216
|
+
type,
|
|
217
|
+
nullable: !isNotNull,
|
|
218
|
+
hasDefault: hasDefaultKeyword || isSerial
|
|
219
|
+
},
|
|
220
|
+
isPK,
|
|
221
|
+
isUnique
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
function parseEnumAnnotation(comment) {
|
|
225
|
+
const match = comment.match(/--\s*@enum\s*\(\s*(.*?)\s*\)/);
|
|
226
|
+
if (!match)
|
|
227
|
+
return;
|
|
228
|
+
const inner = match[1];
|
|
229
|
+
const values = [];
|
|
230
|
+
const re = /"([^"]*?)"/g;
|
|
231
|
+
let m;
|
|
232
|
+
while ((m = re.exec(inner)) !== null) {
|
|
233
|
+
values.push(m[1]);
|
|
234
|
+
}
|
|
235
|
+
return values.length > 0 ? values : undefined;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
class JsonShapeParser {
|
|
239
|
+
input;
|
|
240
|
+
pos = 0;
|
|
241
|
+
constructor(input) {
|
|
242
|
+
this.input = input;
|
|
243
|
+
}
|
|
244
|
+
parse() {
|
|
245
|
+
const shape = this.parseType();
|
|
246
|
+
this.skipWs();
|
|
247
|
+
if (this.pos < this.input.length) {
|
|
248
|
+
throw new Error(`@json parse error: unexpected trailing content at position ${this.pos}: "${this.input.slice(this.pos, this.pos + 10)}"`);
|
|
249
|
+
}
|
|
250
|
+
return shape;
|
|
251
|
+
}
|
|
252
|
+
parseType() {
|
|
253
|
+
this.skipWs();
|
|
254
|
+
let shape;
|
|
255
|
+
if (this.peek() === "{") {
|
|
256
|
+
shape = this.parseObject();
|
|
257
|
+
} else {
|
|
258
|
+
shape = this.parsePrimitive();
|
|
259
|
+
}
|
|
260
|
+
this.skipWs();
|
|
261
|
+
while (this.lookAhead("[]")) {
|
|
262
|
+
this.pos += 2;
|
|
263
|
+
this.skipWs();
|
|
264
|
+
shape = { kind: "array", element: shape };
|
|
265
|
+
}
|
|
266
|
+
if (this.peek() === "?") {
|
|
267
|
+
this.pos++;
|
|
268
|
+
shape = { kind: "nullable", inner: shape };
|
|
269
|
+
}
|
|
270
|
+
return shape;
|
|
271
|
+
}
|
|
272
|
+
parsePrimitive() {
|
|
273
|
+
this.skipWs();
|
|
274
|
+
if (this.matchWord("string"))
|
|
275
|
+
return { kind: "string" };
|
|
276
|
+
if (this.matchWord("number"))
|
|
277
|
+
return { kind: "number" };
|
|
278
|
+
if (this.matchWord("boolean"))
|
|
279
|
+
return { kind: "boolean" };
|
|
280
|
+
throw new Error(`@json parse error: unexpected token at position ${this.pos}: "${this.input.slice(this.pos, this.pos + 10)}"`);
|
|
281
|
+
}
|
|
282
|
+
parseObject() {
|
|
283
|
+
this.consume("{");
|
|
284
|
+
this.skipWs();
|
|
285
|
+
const fields = {};
|
|
286
|
+
if (this.peek() !== "}") {
|
|
287
|
+
this.parseField(fields);
|
|
288
|
+
while (this.peek() === ",") {
|
|
289
|
+
this.pos++;
|
|
290
|
+
this.skipWs();
|
|
291
|
+
if (this.peek() === "}")
|
|
292
|
+
break;
|
|
293
|
+
this.parseField(fields);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
this.consume("}");
|
|
297
|
+
return { kind: "object", fields };
|
|
298
|
+
}
|
|
299
|
+
parseField(fields) {
|
|
300
|
+
this.skipWs();
|
|
301
|
+
const name = this.readIdentifier();
|
|
302
|
+
this.skipWs();
|
|
303
|
+
this.consume(":");
|
|
304
|
+
this.skipWs();
|
|
305
|
+
fields[name] = this.parseType();
|
|
306
|
+
this.skipWs();
|
|
307
|
+
}
|
|
308
|
+
readIdentifier() {
|
|
309
|
+
this.skipWs();
|
|
310
|
+
const start = this.pos;
|
|
311
|
+
while (this.pos < this.input.length && /[\w]/.test(this.input[this.pos])) {
|
|
312
|
+
this.pos++;
|
|
313
|
+
}
|
|
314
|
+
if (this.pos === start) {
|
|
315
|
+
throw new Error(`@json parse error: expected identifier at position ${this.pos}`);
|
|
316
|
+
}
|
|
317
|
+
return this.input.slice(start, this.pos);
|
|
318
|
+
}
|
|
319
|
+
skipWs() {
|
|
320
|
+
while (this.pos < this.input.length && /\s/.test(this.input[this.pos])) {
|
|
321
|
+
this.pos++;
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
peek() {
|
|
325
|
+
this.skipWs();
|
|
326
|
+
return this.input[this.pos];
|
|
327
|
+
}
|
|
328
|
+
lookAhead(s) {
|
|
329
|
+
return this.input.startsWith(s, this.pos);
|
|
330
|
+
}
|
|
331
|
+
matchWord(word) {
|
|
332
|
+
if (this.input.startsWith(word, this.pos)) {
|
|
333
|
+
const afterPos = this.pos + word.length;
|
|
334
|
+
if (afterPos >= this.input.length || !/\w/.test(this.input[afterPos])) {
|
|
335
|
+
this.pos = afterPos;
|
|
336
|
+
return true;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
return false;
|
|
340
|
+
}
|
|
341
|
+
consume(ch) {
|
|
342
|
+
this.skipWs();
|
|
343
|
+
if (this.input[this.pos] !== ch) {
|
|
344
|
+
throw new Error(`@json parse error: expected '${ch}' at position ${this.pos}, got '${this.input[this.pos]}'`);
|
|
345
|
+
}
|
|
346
|
+
this.pos++;
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
function parseJsonAnnotation(comment) {
|
|
350
|
+
const match = comment.match(/--\s*@json\s*\(\s*([\s\S]+)\s*\)\s*$/);
|
|
351
|
+
if (!match)
|
|
352
|
+
return;
|
|
353
|
+
const body = match[1].trim();
|
|
354
|
+
try {
|
|
355
|
+
const parser = new JsonShapeParser(body);
|
|
356
|
+
return parser.parse();
|
|
357
|
+
} catch {
|
|
358
|
+
return;
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
function parseSchemaDefs(sql, enumNames) {
|
|
362
|
+
const re = new RegExp(CREATE_TABLE_RE_SOURCE, "gi");
|
|
363
|
+
const tables = [];
|
|
364
|
+
let m;
|
|
365
|
+
while ((m = re.exec(sql)) !== null) {
|
|
366
|
+
const tableName = m[1].toLowerCase();
|
|
367
|
+
const body = m[2];
|
|
368
|
+
const columns = [];
|
|
369
|
+
const primaryKey = [];
|
|
370
|
+
const uniqueConstraints = [];
|
|
371
|
+
const rawLines = body.split(`
|
|
372
|
+
`);
|
|
373
|
+
let pendingComment = "";
|
|
374
|
+
let nonCommentBuffer = "";
|
|
375
|
+
const commentMap = new Map;
|
|
376
|
+
for (const rawLine of rawLines) {
|
|
377
|
+
const trimmedLine = rawLine.trim();
|
|
378
|
+
if (trimmedLine.startsWith("--")) {
|
|
379
|
+
pendingComment += (pendingComment ? `
|
|
380
|
+
` : "") + trimmedLine;
|
|
381
|
+
} else {
|
|
382
|
+
const beforeDefs = splitColumnDefs(nonCommentBuffer).filter((d) => d.trim().length > 0).length;
|
|
383
|
+
nonCommentBuffer += (nonCommentBuffer ? `
|
|
384
|
+
` : "") + rawLine;
|
|
385
|
+
const afterDefs = splitColumnDefs(nonCommentBuffer).filter((d) => d.trim().length > 0).length;
|
|
386
|
+
if (afterDefs > beforeDefs && pendingComment) {
|
|
387
|
+
commentMap.set(beforeDefs, pendingComment);
|
|
388
|
+
pendingComment = "";
|
|
389
|
+
} else if (afterDefs === beforeDefs) {} else {
|
|
390
|
+
pendingComment = "";
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
const lines = splitColumnDefs(nonCommentBuffer);
|
|
395
|
+
for (let i = 0;i < lines.length; i++) {
|
|
396
|
+
const trimmed = lines[i].trim();
|
|
397
|
+
const pkMatch = trimmed.match(/^PRIMARY\s+KEY\s*\(\s*([\w\s,]+)\s*\)/i);
|
|
398
|
+
if (pkMatch) {
|
|
399
|
+
primaryKey.push(...pkMatch[1].split(",").map((s) => s.trim().toLowerCase()));
|
|
400
|
+
continue;
|
|
401
|
+
}
|
|
402
|
+
const result = parseColumnLine(trimmed, enumNames);
|
|
403
|
+
if (!result)
|
|
404
|
+
continue;
|
|
405
|
+
const comment = commentMap.get(i);
|
|
406
|
+
if (comment) {
|
|
407
|
+
const enumValues = parseEnumAnnotation(comment);
|
|
408
|
+
if (enumValues) {
|
|
409
|
+
result.col.type = {
|
|
410
|
+
...result.col.type,
|
|
411
|
+
category: "enum",
|
|
412
|
+
enumValues
|
|
413
|
+
};
|
|
414
|
+
}
|
|
415
|
+
const jsonShape = parseJsonAnnotation(comment);
|
|
416
|
+
if (jsonShape) {
|
|
417
|
+
result.col.type = {
|
|
418
|
+
...result.col.type,
|
|
419
|
+
jsonShape
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
columns.push(result.col);
|
|
424
|
+
if (result.isPK) {
|
|
425
|
+
primaryKey.push(result.col.name);
|
|
426
|
+
}
|
|
427
|
+
if (result.isUnique) {
|
|
428
|
+
uniqueConstraints.push([result.col.name]);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
for (const col of columns) {
|
|
432
|
+
if (primaryKey.includes(col.name)) {
|
|
433
|
+
col.nullable = false;
|
|
434
|
+
col.hasDefault = col.hasDefault || SERIAL_TYPES.has(col.type.normalized);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
tables.push({ name: tableName, columns, primaryKey, uniqueConstraints });
|
|
438
|
+
}
|
|
439
|
+
return tables;
|
|
440
|
+
}
|
|
441
|
+
var QUERY_ANNOTATION_RE = /--\s*name:\s*(\w+)\s+:(one|many|execresult|exec)/;
|
|
442
|
+
var PARAM_OVERRIDE_RE = /--\s*@param\s+\$(\d+)\s+(\w+)/g;
|
|
443
|
+
var DOLLAR_PARAM_RE = /\$(\d+)/g;
|
|
444
|
+
function splitQueryBlocks(sql) {
|
|
445
|
+
const lines = sql.split(`
|
|
446
|
+
`);
|
|
447
|
+
const blocks = [];
|
|
448
|
+
let current = null;
|
|
449
|
+
let commentBuffer = "";
|
|
450
|
+
for (const line of lines) {
|
|
451
|
+
const trimmed = line.trim();
|
|
452
|
+
const annotationMatch = trimmed.match(QUERY_ANNOTATION_RE);
|
|
453
|
+
if (annotationMatch) {
|
|
454
|
+
if (current)
|
|
455
|
+
blocks.push(current);
|
|
456
|
+
current = {
|
|
457
|
+
name: annotationMatch[1],
|
|
458
|
+
command: annotationMatch[2],
|
|
459
|
+
sql: "",
|
|
460
|
+
comments: commentBuffer + trimmed + `
|
|
461
|
+
`
|
|
462
|
+
};
|
|
463
|
+
commentBuffer = "";
|
|
464
|
+
} else if (trimmed.startsWith("--")) {
|
|
465
|
+
if (current) {
|
|
466
|
+
current.comments += trimmed + `
|
|
467
|
+
`;
|
|
468
|
+
} else {
|
|
469
|
+
commentBuffer += trimmed + `
|
|
470
|
+
`;
|
|
471
|
+
}
|
|
472
|
+
} else if (current && trimmed) {
|
|
473
|
+
current.sql += (current.sql ? " " : "") + trimmed;
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
if (current)
|
|
477
|
+
blocks.push(current);
|
|
478
|
+
return blocks;
|
|
479
|
+
}
|
|
480
|
+
function extractParamOverrides(comments) {
|
|
481
|
+
const overrides = new Map;
|
|
482
|
+
let m;
|
|
483
|
+
const re = new RegExp(PARAM_OVERRIDE_RE.source, "g");
|
|
484
|
+
while ((m = re.exec(comments)) !== null) {
|
|
485
|
+
overrides.set(parseInt(m[1], 10), m[2]);
|
|
486
|
+
}
|
|
487
|
+
return overrides;
|
|
488
|
+
}
|
|
489
|
+
function extractParamIndices(sql) {
|
|
490
|
+
const indices = new Set;
|
|
491
|
+
let m;
|
|
492
|
+
const re = new RegExp(DOLLAR_PARAM_RE.source, "g");
|
|
493
|
+
while ((m = re.exec(sql)) !== null) {
|
|
494
|
+
indices.add(parseInt(m[1], 10));
|
|
495
|
+
}
|
|
496
|
+
return [...indices].sort((a, b) => a - b);
|
|
497
|
+
}
|
|
498
|
+
function inferParamColumns(sql) {
|
|
499
|
+
const result = new Map;
|
|
500
|
+
const insertMatch = sql.match(/INSERT\s+INTO\s+\w+\s*\(\s*([\w\s,]+)\s*\)\s*VALUES\s*\(\s*([\$\d\s,]+)\s*\)/i);
|
|
501
|
+
if (insertMatch) {
|
|
502
|
+
const cols = insertMatch[1].split(",").map((s) => s.trim().toLowerCase());
|
|
503
|
+
const params = [...insertMatch[2].matchAll(/\$(\d+)/g)].map((m2) => parseInt(m2[1], 10));
|
|
504
|
+
for (let i = 0;i < Math.min(cols.length, params.length); i++) {
|
|
505
|
+
result.set(params[i], cols[i]);
|
|
506
|
+
}
|
|
507
|
+
return result;
|
|
508
|
+
}
|
|
509
|
+
const SQL_KEYWORDS = new Set([
|
|
510
|
+
"not",
|
|
511
|
+
"and",
|
|
512
|
+
"or",
|
|
513
|
+
"where",
|
|
514
|
+
"set",
|
|
515
|
+
"when",
|
|
516
|
+
"then",
|
|
517
|
+
"else",
|
|
518
|
+
"case",
|
|
519
|
+
"between",
|
|
520
|
+
"exists",
|
|
521
|
+
"any",
|
|
522
|
+
"all",
|
|
523
|
+
"some",
|
|
524
|
+
"having"
|
|
525
|
+
]);
|
|
526
|
+
const wherePatterns = /(?:(\w+)\s*\(\s*(\w+)\s*\)|(\w+))\s*(?:=|!=|<>|<=?|>=?|(?:NOT\s+)?(?:I?LIKE|IN|IS))\s*\$(\d+)/gi;
|
|
527
|
+
let m;
|
|
528
|
+
while ((m = wherePatterns.exec(sql)) !== null) {
|
|
529
|
+
const paramIdx = parseInt(m[4], 10);
|
|
530
|
+
if (m[1] && m[2]) {
|
|
531
|
+
result.set(paramIdx, m[2].toLowerCase());
|
|
532
|
+
} else if (m[3]) {
|
|
533
|
+
const word = m[3].toLowerCase();
|
|
534
|
+
if (!SQL_KEYWORDS.has(word)) {
|
|
535
|
+
result.set(paramIdx, word);
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
return result;
|
|
540
|
+
}
|
|
541
|
+
function findFromTable(sql, tables) {
|
|
542
|
+
const fromMatch = sql.match(/(?:FROM|INTO|UPDATE)\s+(\w+)/i);
|
|
543
|
+
if (!fromMatch)
|
|
544
|
+
return;
|
|
545
|
+
const tableName = fromMatch[1].toLowerCase();
|
|
546
|
+
return tables.find((t) => t.name === tableName);
|
|
547
|
+
}
|
|
548
|
+
function resolveReturningColumns(sql, table) {
|
|
549
|
+
const returningMatch = sql.match(/\bRETURNING\s+([\s\S]+?)(?:;?\s*)$/i);
|
|
550
|
+
if (!returningMatch)
|
|
551
|
+
return null;
|
|
552
|
+
const colsPart = returningMatch[1].trim();
|
|
553
|
+
if (colsPart === "*") {
|
|
554
|
+
return table ? [...table.columns] : [];
|
|
555
|
+
}
|
|
556
|
+
if (!table)
|
|
557
|
+
return [];
|
|
558
|
+
return colsPart.split(",").map((s) => {
|
|
559
|
+
const name = s.trim().toLowerCase();
|
|
560
|
+
const tableCol = table.columns.find((c) => c.name === name);
|
|
561
|
+
return tableCol ? { ...tableCol } : { name, type: { raw: "unknown", normalized: "unknown", category: "unknown" }, nullable: true, hasDefault: false };
|
|
562
|
+
});
|
|
563
|
+
}
|
|
564
|
+
function resolveReturnColumns(sql, table) {
|
|
565
|
+
const returning = resolveReturningColumns(sql, table);
|
|
566
|
+
if (returning)
|
|
567
|
+
return returning;
|
|
568
|
+
if (!/^\s*SELECT\b/i.test(sql))
|
|
569
|
+
return [];
|
|
570
|
+
const selectMatch = sql.match(/SELECT\s+([\s\S]+?)\s+FROM\b/i);
|
|
571
|
+
if (!selectMatch)
|
|
572
|
+
return [];
|
|
573
|
+
const colsPart = selectMatch[1].trim();
|
|
574
|
+
if (colsPart === "*") {
|
|
575
|
+
return table ? [...table.columns] : [];
|
|
576
|
+
}
|
|
577
|
+
if (!table)
|
|
578
|
+
return [];
|
|
579
|
+
const colNames = colsPart.split(",").map((s) => s.trim().toLowerCase());
|
|
580
|
+
const resolved = [];
|
|
581
|
+
for (const colExpr of colNames) {
|
|
582
|
+
const aliasMatch = colExpr.match(/^(\w+)\s+as\s+(\w+)$/i);
|
|
583
|
+
const actualName = aliasMatch ? aliasMatch[1] : colExpr;
|
|
584
|
+
const tableCol = table.columns.find((c) => c.name === actualName);
|
|
585
|
+
if (tableCol) {
|
|
586
|
+
resolved.push(aliasMatch ? { ...tableCol, alias: aliasMatch[2].toLowerCase() } : { ...tableCol });
|
|
587
|
+
} else {
|
|
588
|
+
resolved.push({
|
|
589
|
+
name: actualName,
|
|
590
|
+
type: { raw: "unknown", normalized: "unknown", category: "unknown" },
|
|
591
|
+
nullable: true,
|
|
592
|
+
hasDefault: false
|
|
593
|
+
});
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
return resolved;
|
|
597
|
+
}
|
|
598
|
+
function buildParams(sql, comments, table) {
|
|
599
|
+
const paramIndices = extractParamIndices(sql);
|
|
600
|
+
if (paramIndices.length === 0)
|
|
601
|
+
return [];
|
|
602
|
+
const overrides = extractParamOverrides(comments);
|
|
603
|
+
const inferredCols = inferParamColumns(sql);
|
|
604
|
+
const rawParams = paramIndices.map((idx) => ({
|
|
605
|
+
index: idx,
|
|
606
|
+
column: inferredCols.get(idx) ?? null,
|
|
607
|
+
override: overrides.get(idx)
|
|
608
|
+
}));
|
|
609
|
+
const names = resolveParamNames(rawParams);
|
|
610
|
+
return paramIndices.map((idx, i) => {
|
|
611
|
+
const colName = inferredCols.get(idx);
|
|
612
|
+
let type = {
|
|
613
|
+
raw: "unknown",
|
|
614
|
+
normalized: "unknown",
|
|
615
|
+
category: "unknown"
|
|
616
|
+
};
|
|
617
|
+
if (table && colName) {
|
|
618
|
+
const tableCol = table.columns.find((c) => c.name === colName);
|
|
619
|
+
if (tableCol) {
|
|
620
|
+
type = tableCol.type;
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
return { index: idx, name: names[i], type };
|
|
624
|
+
});
|
|
625
|
+
}
|
|
626
|
+
function createPostgresParser() {
|
|
627
|
+
return {
|
|
628
|
+
dialect: "postgresql",
|
|
629
|
+
parseEnums(sql) {
|
|
630
|
+
return parseEnumDefs(sql);
|
|
631
|
+
},
|
|
632
|
+
parseSchema(sql) {
|
|
633
|
+
const enums = parseEnumDefs(sql);
|
|
634
|
+
const enumNames = new Set(enums.map((e) => e.name));
|
|
635
|
+
return parseSchemaDefs(sql, enumNames);
|
|
636
|
+
},
|
|
637
|
+
parseQueries(sql, tables) {
|
|
638
|
+
const blocks = splitQueryBlocks(sql);
|
|
639
|
+
return blocks.map((block) => {
|
|
640
|
+
const table = findFromTable(block.sql, tables);
|
|
641
|
+
const params = buildParams(block.sql, block.comments, table);
|
|
642
|
+
const returns = resolveReturnColumns(block.sql, table);
|
|
643
|
+
return {
|
|
644
|
+
name: block.name,
|
|
645
|
+
command: block.command,
|
|
646
|
+
sql: block.sql.replace(/;\s*$/, ""),
|
|
647
|
+
params,
|
|
648
|
+
returns,
|
|
649
|
+
sourceFile: ""
|
|
650
|
+
};
|
|
651
|
+
});
|
|
652
|
+
}
|
|
653
|
+
};
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// src/utils/index.ts
|
|
657
|
+
function pascalCase(str) {
|
|
658
|
+
return str.split(/[_\-\s]+/).map((w) => w.charAt(0).toUpperCase() + w.slice(1).toLowerCase()).join("");
|
|
659
|
+
}
|
|
660
|
+
function camelCase(str) {
|
|
661
|
+
const pascal = pascalCase(str);
|
|
662
|
+
return pascal.charAt(0).toLowerCase() + pascal.slice(1);
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
// src/generator/typescript/index.ts
|
|
666
|
+
import path from "path";
|
|
667
|
+
function joinPath(base, filename) {
|
|
668
|
+
const joined = path.join(base, filename);
|
|
669
|
+
if (base.startsWith("./") && !joined.startsWith("./")) {
|
|
670
|
+
return "./" + joined;
|
|
671
|
+
}
|
|
672
|
+
return joined;
|
|
673
|
+
}
|
|
674
|
+
function generateSchemaFile(schema, ir) {
|
|
675
|
+
const parts = [];
|
|
676
|
+
parts.push(schema.generateImports());
|
|
677
|
+
for (const enumDef of ir.enums) {
|
|
678
|
+
parts.push(schema.generateEnumSchema(enumDef));
|
|
679
|
+
}
|
|
680
|
+
for (const table of ir.tables) {
|
|
681
|
+
parts.push(schema.generateSelectSchema(table, ir));
|
|
682
|
+
parts.push(schema.generateInsertSchema(table, ir));
|
|
683
|
+
}
|
|
684
|
+
for (const table of ir.tables) {
|
|
685
|
+
const selectName = `Select${pascalCase(table.name)}`;
|
|
686
|
+
const insertName = `Insert${pascalCase(table.name)}`;
|
|
687
|
+
parts.push(schema.generateTypeAlias(selectName, selectName));
|
|
688
|
+
parts.push(schema.generateTypeAlias(insertName, insertName));
|
|
689
|
+
}
|
|
690
|
+
for (const enumDef of ir.enums) {
|
|
691
|
+
const name = pascalCase(enumDef.name);
|
|
692
|
+
parts.push(schema.generateTypeAlias(name, name));
|
|
693
|
+
}
|
|
694
|
+
return parts.join(`
|
|
695
|
+
|
|
696
|
+
`) + `
|
|
697
|
+
`;
|
|
698
|
+
}
|
|
699
|
+
var DATABASE_CLIENT_INTERFACE = `export interface DatabaseClient {
|
|
700
|
+
query<T>(sql: string, params: unknown[]): Promise<T[]>;
|
|
701
|
+
queryOne<T>(sql: string, params: unknown[]): Promise<T | null>;
|
|
702
|
+
execute(sql: string, params: unknown[]): Promise<{ rowsAffected: number }>;
|
|
703
|
+
}`;
|
|
704
|
+
function generateClientFile(driver) {
|
|
705
|
+
const parts = [];
|
|
706
|
+
const driverImports = driver.generateImports();
|
|
707
|
+
if (driverImports) {
|
|
708
|
+
parts.push(driverImports);
|
|
709
|
+
}
|
|
710
|
+
parts.push(DATABASE_CLIENT_INTERFACE);
|
|
711
|
+
parts.push(driver.generateClientAdapter());
|
|
712
|
+
return parts.join(`
|
|
713
|
+
|
|
714
|
+
`) + `
|
|
715
|
+
`;
|
|
716
|
+
}
|
|
717
|
+
function generateQueryFiles(driver, ir, outDir) {
|
|
718
|
+
const grouped = new Map;
|
|
719
|
+
for (const query of ir.queries) {
|
|
720
|
+
const existing = grouped.get(query.sourceFile);
|
|
721
|
+
if (existing) {
|
|
722
|
+
existing.push(query);
|
|
723
|
+
} else {
|
|
724
|
+
grouped.set(query.sourceFile, [query]);
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
const files = [];
|
|
728
|
+
for (const [sourceFile, queries] of grouped) {
|
|
729
|
+
const basename = path.basename(sourceFile, path.extname(sourceFile));
|
|
730
|
+
const filename = `${basename}.queries.ts`;
|
|
731
|
+
const parts = [];
|
|
732
|
+
parts.push(`import type { DatabaseClient } from "./client";`);
|
|
733
|
+
for (const query of queries) {
|
|
734
|
+
parts.push(driver.generateQueryFunction(query));
|
|
735
|
+
}
|
|
736
|
+
files.push({
|
|
737
|
+
path: joinPath(outDir, filename),
|
|
738
|
+
content: parts.join(`
|
|
739
|
+
|
|
740
|
+
`) + `
|
|
741
|
+
`
|
|
742
|
+
});
|
|
743
|
+
}
|
|
744
|
+
return files;
|
|
745
|
+
}
|
|
746
|
+
function createTypeScriptPlugin(options) {
|
|
747
|
+
const { schema, driver } = options;
|
|
748
|
+
return {
|
|
749
|
+
language: "typescript",
|
|
750
|
+
fileExtension: ".ts",
|
|
751
|
+
generate(ir, langOptions) {
|
|
752
|
+
const outDir = langOptions.out;
|
|
753
|
+
const files = [];
|
|
754
|
+
files.push({
|
|
755
|
+
path: joinPath(outDir, "schema.ts"),
|
|
756
|
+
content: generateSchemaFile(schema, ir)
|
|
757
|
+
});
|
|
758
|
+
files.push({
|
|
759
|
+
path: joinPath(outDir, "client.ts"),
|
|
760
|
+
content: generateClientFile(driver)
|
|
761
|
+
});
|
|
762
|
+
files.push(...generateQueryFiles(driver, ir, outDir));
|
|
763
|
+
return files;
|
|
764
|
+
}
|
|
765
|
+
};
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
// src/generator/typescript/schema/typebox.ts
|
|
769
|
+
function escapeString(str) {
|
|
770
|
+
return JSON.stringify(str).slice(1, -1);
|
|
771
|
+
}
|
|
772
|
+
function jsonShapeToTypeBox(shape) {
|
|
773
|
+
switch (shape.kind) {
|
|
774
|
+
case "string":
|
|
775
|
+
return "Type.String()";
|
|
776
|
+
case "number":
|
|
777
|
+
return "Type.Number()";
|
|
778
|
+
case "boolean":
|
|
779
|
+
return "Type.Boolean()";
|
|
780
|
+
case "object": {
|
|
781
|
+
const fields = Object.entries(shape.fields).map(([key, val]) => `"${escapeString(key)}": ${jsonShapeToTypeBox(val)}`).join(", ");
|
|
782
|
+
return `Type.Object({ ${fields} })`;
|
|
783
|
+
}
|
|
784
|
+
case "array":
|
|
785
|
+
return `Type.Array(${jsonShapeToTypeBox(shape.element)})`;
|
|
786
|
+
case "nullable":
|
|
787
|
+
return `Type.Union([${jsonShapeToTypeBox(shape.inner)}, Type.Null()])`;
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
function typeBoxType(type) {
|
|
791
|
+
if (type.enumValues) {
|
|
792
|
+
const literals = type.enumValues.map((v) => `Type.Literal("${escapeString(v)}")`).join(", ");
|
|
793
|
+
return `Type.Union([${literals}])`;
|
|
794
|
+
}
|
|
795
|
+
if (type.jsonShape) {
|
|
796
|
+
return jsonShapeToTypeBox(type.jsonShape);
|
|
797
|
+
}
|
|
798
|
+
if (type.elementType) {
|
|
799
|
+
return `Type.Array(${typeBoxType(type.elementType)})`;
|
|
800
|
+
}
|
|
801
|
+
switch (type.category) {
|
|
802
|
+
case "string":
|
|
803
|
+
return "Type.String()";
|
|
804
|
+
case "number":
|
|
805
|
+
return "Type.Number()";
|
|
806
|
+
case "boolean":
|
|
807
|
+
return "Type.Boolean()";
|
|
808
|
+
case "date":
|
|
809
|
+
return "Type.Date()";
|
|
810
|
+
case "json":
|
|
811
|
+
return "Type.Any()";
|
|
812
|
+
case "uuid":
|
|
813
|
+
return "Type.String()";
|
|
814
|
+
case "binary":
|
|
815
|
+
return "Type.Uint8Array()";
|
|
816
|
+
case "enum": {
|
|
817
|
+
if (type.enumName) {
|
|
818
|
+
return pascalCase(type.enumName);
|
|
819
|
+
}
|
|
820
|
+
return "Type.String()";
|
|
821
|
+
}
|
|
822
|
+
case "unknown":
|
|
823
|
+
return "Type.Unknown()";
|
|
824
|
+
default: {
|
|
825
|
+
const _exhaustive = type.category;
|
|
826
|
+
return _exhaustive;
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
function selectColumn(col) {
|
|
831
|
+
const base = typeBoxType(col.type);
|
|
832
|
+
if (col.nullable) {
|
|
833
|
+
return `Type.Union([${base}, Type.Null()])`;
|
|
834
|
+
}
|
|
835
|
+
return base;
|
|
836
|
+
}
|
|
837
|
+
function insertColumn(col) {
|
|
838
|
+
const base = typeBoxType(col.type);
|
|
839
|
+
if (col.hasDefault) {
|
|
840
|
+
if (col.nullable) {
|
|
841
|
+
return `Type.Optional(Type.Union([${base}, Type.Null()]))`;
|
|
842
|
+
}
|
|
843
|
+
return `Type.Optional(${base})`;
|
|
844
|
+
}
|
|
845
|
+
if (col.nullable) {
|
|
846
|
+
return `Type.Optional(Type.Union([${base}, Type.Null()]))`;
|
|
847
|
+
}
|
|
848
|
+
return base;
|
|
849
|
+
}
|
|
850
|
+
function objectBody(columns, mapper) {
|
|
851
|
+
const fields = columns.map((col) => ` "${escapeString(col.name)}": ${mapper(col)}`).join(`,
|
|
852
|
+
`);
|
|
853
|
+
return `{
|
|
854
|
+
${fields}
|
|
855
|
+
}`;
|
|
856
|
+
}
|
|
857
|
+
function createTypeBoxGenerator() {
|
|
858
|
+
return {
|
|
859
|
+
name: "typebox",
|
|
860
|
+
generateImports() {
|
|
861
|
+
return `import { Type, type Static } from "@sinclair/typebox";
|
|
862
|
+
|
|
863
|
+
// Requires @sinclair/typebox >= 0.31.0 (for Type.Date and Type.Uint8Array)
|
|
864
|
+
|
|
865
|
+
type Prettify<T> = { [K in keyof T]: T[K] } & {};`;
|
|
866
|
+
},
|
|
867
|
+
generateEnumSchema(enumDef) {
|
|
868
|
+
const name = pascalCase(enumDef.name);
|
|
869
|
+
const literals = enumDef.values.map((v) => `Type.Literal("${escapeString(v)}")`).join(", ");
|
|
870
|
+
return `export const ${name} = Type.Union([${literals}]);`;
|
|
871
|
+
},
|
|
872
|
+
generateSelectSchema(table, _ir) {
|
|
873
|
+
const name = `Select${pascalCase(table.name)}`;
|
|
874
|
+
const body = objectBody(table.columns, selectColumn);
|
|
875
|
+
return `export const ${name} = Type.Object(${body});`;
|
|
876
|
+
},
|
|
877
|
+
generateInsertSchema(table, _ir) {
|
|
878
|
+
const name = `Insert${pascalCase(table.name)}`;
|
|
879
|
+
const body = objectBody(table.columns, insertColumn);
|
|
880
|
+
return `export const ${name} = Type.Object(${body});`;
|
|
881
|
+
},
|
|
882
|
+
generateTypeAlias(name, schemaVarName) {
|
|
883
|
+
return `export type ${name} = Prettify<Static<typeof ${schemaVarName}>>;`;
|
|
884
|
+
}
|
|
885
|
+
};
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
// src/generator/typescript/driver/bun-sql.ts
|
|
889
|
+
function splitWords(str) {
|
|
890
|
+
return str.replace(/([a-z])([A-Z])/g, "$1_$2");
|
|
891
|
+
}
|
|
892
|
+
function toCamel(str) {
|
|
893
|
+
return camelCase(splitWords(str));
|
|
894
|
+
}
|
|
895
|
+
function toPascal(str) {
|
|
896
|
+
return pascalCase(splitWords(str));
|
|
897
|
+
}
|
|
898
|
+
function tsType(type) {
|
|
899
|
+
if (type.elementType) {
|
|
900
|
+
return `${tsType(type.elementType)}[]`;
|
|
901
|
+
}
|
|
902
|
+
switch (type.category) {
|
|
903
|
+
case "string":
|
|
904
|
+
case "uuid":
|
|
905
|
+
case "enum":
|
|
906
|
+
return "string";
|
|
907
|
+
case "number":
|
|
908
|
+
return "number";
|
|
909
|
+
case "boolean":
|
|
910
|
+
return "boolean";
|
|
911
|
+
case "date":
|
|
912
|
+
return "Date";
|
|
913
|
+
case "json":
|
|
914
|
+
return "unknown";
|
|
915
|
+
case "binary":
|
|
916
|
+
return "Uint8Array";
|
|
917
|
+
case "unknown":
|
|
918
|
+
return "unknown";
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
function generateRowType(query) {
|
|
922
|
+
if (query.returns.length === 0)
|
|
923
|
+
return "";
|
|
924
|
+
const typeName = `${toPascal(query.name)}Row`;
|
|
925
|
+
const fields = query.returns.map((col) => {
|
|
926
|
+
const fieldName = col.alias ?? col.name;
|
|
927
|
+
const type = tsType(col.type);
|
|
928
|
+
const nullable = col.nullable ? " | null" : "";
|
|
929
|
+
return ` ${fieldName}: ${type}${nullable};`;
|
|
930
|
+
}).join(`
|
|
931
|
+
`);
|
|
932
|
+
return `export interface ${typeName} {
|
|
933
|
+
${fields}
|
|
934
|
+
}`;
|
|
935
|
+
}
|
|
936
|
+
function generateParamsType(query) {
|
|
937
|
+
if (query.params.length === 0)
|
|
938
|
+
return "";
|
|
939
|
+
const typeName = `${toPascal(query.name)}Params`;
|
|
940
|
+
const fields = query.params.map((p) => ` ${p.name}: ${tsType(p.type)};`).join(`
|
|
941
|
+
`);
|
|
942
|
+
return `export interface ${typeName} {
|
|
943
|
+
${fields}
|
|
944
|
+
}`;
|
|
945
|
+
}
|
|
946
|
+
function createBunSqlGenerator() {
|
|
947
|
+
return {
|
|
948
|
+
name: "bun-sql",
|
|
949
|
+
generateImports() {
|
|
950
|
+
return "";
|
|
951
|
+
},
|
|
952
|
+
generateClientAdapter() {
|
|
953
|
+
return `interface BunSqlDriver {
|
|
954
|
+
unsafe(query: string, values?: unknown[]): Promise<any[] & { count: number }>;
|
|
955
|
+
}
|
|
956
|
+
|
|
957
|
+
export class BunSqlClient implements DatabaseClient {
|
|
958
|
+
private sql: BunSqlDriver;
|
|
959
|
+
|
|
960
|
+
constructor(sql: BunSqlDriver) {
|
|
961
|
+
this.sql = sql;
|
|
962
|
+
}
|
|
963
|
+
|
|
964
|
+
async query<T>(text: string, values?: unknown[]): Promise<T[]> {
|
|
965
|
+
const result = await this.sql.unsafe(text, values);
|
|
966
|
+
return [...result] as T[];
|
|
967
|
+
}
|
|
968
|
+
|
|
969
|
+
async queryOne<T>(text: string, values?: unknown[]): Promise<T | null> {
|
|
970
|
+
const rows = await this.query<T>(text, values);
|
|
971
|
+
return rows[0] ?? null;
|
|
972
|
+
}
|
|
973
|
+
|
|
974
|
+
async execute(text: string, values?: unknown[]): Promise<{ rowsAffected: number }> {
|
|
975
|
+
const result = await this.sql.unsafe(text, values);
|
|
976
|
+
return { rowsAffected: result.count };
|
|
977
|
+
}
|
|
978
|
+
}`;
|
|
979
|
+
},
|
|
980
|
+
generateQueryFunction(query) {
|
|
981
|
+
const fnName = toCamel(query.name);
|
|
982
|
+
const rowType = generateRowType(query);
|
|
983
|
+
const hasParams = query.params.length > 0;
|
|
984
|
+
const paramsInterface = generateParamsType(query);
|
|
985
|
+
const paramsTypeName = `${toPascal(query.name)}Params`;
|
|
986
|
+
const sqlConst = `export const ${fnName}Sql = ${JSON.stringify(query.sql)};`;
|
|
987
|
+
const paramsSig = hasParams ? `, params: ${paramsTypeName}` : "";
|
|
988
|
+
const valuesArg = hasParams ? `[${query.params.map((p) => `params.${p.name}`).join(", ")}]` : "[]";
|
|
989
|
+
let returnType;
|
|
990
|
+
let body;
|
|
991
|
+
switch (query.command) {
|
|
992
|
+
case "one": {
|
|
993
|
+
const typeName = `${toPascal(query.name)}Row`;
|
|
994
|
+
returnType = `Promise<${typeName} | null>`;
|
|
995
|
+
body = ` return client.queryOne<${typeName}>(${fnName}Sql, ${valuesArg});`;
|
|
996
|
+
break;
|
|
997
|
+
}
|
|
998
|
+
case "many": {
|
|
999
|
+
const typeName = `${toPascal(query.name)}Row`;
|
|
1000
|
+
returnType = `Promise<${typeName}[]>`;
|
|
1001
|
+
body = ` return client.query<${typeName}>(${fnName}Sql, ${valuesArg});`;
|
|
1002
|
+
break;
|
|
1003
|
+
}
|
|
1004
|
+
case "exec": {
|
|
1005
|
+
returnType = "Promise<void>";
|
|
1006
|
+
body = ` await client.execute(${fnName}Sql, ${valuesArg});`;
|
|
1007
|
+
break;
|
|
1008
|
+
}
|
|
1009
|
+
case "execresult": {
|
|
1010
|
+
returnType = "Promise<{ rowsAffected: number }>";
|
|
1011
|
+
body = ` return client.execute(${fnName}Sql, ${valuesArg});`;
|
|
1012
|
+
break;
|
|
1013
|
+
}
|
|
1014
|
+
}
|
|
1015
|
+
const parts = [];
|
|
1016
|
+
if (rowType)
|
|
1017
|
+
parts.push(rowType);
|
|
1018
|
+
if (paramsInterface)
|
|
1019
|
+
parts.push(paramsInterface);
|
|
1020
|
+
parts.push(sqlConst);
|
|
1021
|
+
parts.push(`export async function ${fnName}(client: DatabaseClient${paramsSig}): ${returnType} {
|
|
1022
|
+
${body}
|
|
1023
|
+
}`);
|
|
1024
|
+
return parts.join(`
|
|
1025
|
+
|
|
1026
|
+
`);
|
|
1027
|
+
}
|
|
1028
|
+
};
|
|
1029
|
+
}
|
|
1030
|
+
|
|
1031
|
+
// src/cli/index.ts
|
|
1032
|
+
function globSqlFiles(dir) {
|
|
1033
|
+
const glob = new Bun.Glob("**/*.sql");
|
|
1034
|
+
const paths = [];
|
|
1035
|
+
for (const match of glob.scanSync({ cwd: dir, absolute: true })) {
|
|
1036
|
+
paths.push(match);
|
|
1037
|
+
}
|
|
1038
|
+
return paths.sort();
|
|
1039
|
+
}
|
|
1040
|
+
function isSchemaFile(content) {
|
|
1041
|
+
return /CREATE\s+TABLE/i.test(content);
|
|
1042
|
+
}
|
|
1043
|
+
function isQueryFile(content) {
|
|
1044
|
+
return /--\s*name:/i.test(content);
|
|
1045
|
+
}
|
|
1046
|
+
function getFlag(args, flag) {
|
|
1047
|
+
const idx = args.indexOf(flag);
|
|
1048
|
+
return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : undefined;
|
|
1049
|
+
}
|
|
1050
|
+
async function generate(options) {
|
|
1051
|
+
const { sqlDir, outDir, cacheDir } = options;
|
|
1052
|
+
const sqlFiles = globSqlFiles(sqlDir);
|
|
1053
|
+
if (sqlFiles.length === 0) {
|
|
1054
|
+
console.log("No .sql files found in", sqlDir);
|
|
1055
|
+
return;
|
|
1056
|
+
}
|
|
1057
|
+
const fileContents = sqlFiles.map((f) => ({
|
|
1058
|
+
path: relative(sqlDir, f),
|
|
1059
|
+
content: readFileSync2(f, "utf-8")
|
|
1060
|
+
}));
|
|
1061
|
+
const hash = computeHash(fileContents);
|
|
1062
|
+
let ir = readCache(cacheDir, hash);
|
|
1063
|
+
if (!ir) {
|
|
1064
|
+
ir = parse(fileContents);
|
|
1065
|
+
writeCache(cacheDir, ir, hash);
|
|
1066
|
+
}
|
|
1067
|
+
const plugin = createTypeScriptPlugin({
|
|
1068
|
+
schema: createTypeBoxGenerator(),
|
|
1069
|
+
driver: createBunSqlGenerator()
|
|
1070
|
+
});
|
|
1071
|
+
const generatedFiles = plugin.generate(ir, { out: outDir });
|
|
1072
|
+
for (const file of generatedFiles) {
|
|
1073
|
+
const dir = dirname(file.path);
|
|
1074
|
+
if (!existsSync2(dir)) {
|
|
1075
|
+
mkdirSync2(dir, { recursive: true });
|
|
1076
|
+
}
|
|
1077
|
+
writeFileSync2(file.path, file.content, "utf-8");
|
|
1078
|
+
}
|
|
1079
|
+
console.log(`Generated ${generatedFiles.length} files to ${outDir}`);
|
|
1080
|
+
}
|
|
1081
|
+
async function check(options) {
|
|
1082
|
+
const { sqlDir, cacheDir } = options;
|
|
1083
|
+
const errors = [];
|
|
1084
|
+
const sqlFiles = globSqlFiles(sqlDir);
|
|
1085
|
+
if (sqlFiles.length === 0) {
|
|
1086
|
+
return { valid: true, tables: 0, queries: 0, errors: [] };
|
|
1087
|
+
}
|
|
1088
|
+
const fileContents = sqlFiles.map((f) => ({
|
|
1089
|
+
path: relative(sqlDir, f),
|
|
1090
|
+
content: readFileSync2(f, "utf-8")
|
|
1091
|
+
}));
|
|
1092
|
+
const hash = computeHash(fileContents);
|
|
1093
|
+
let ir = readCache(cacheDir, hash);
|
|
1094
|
+
if (!ir) {
|
|
1095
|
+
try {
|
|
1096
|
+
ir = parse(fileContents);
|
|
1097
|
+
writeCache(cacheDir, ir, hash);
|
|
1098
|
+
} catch (err) {
|
|
1099
|
+
errors.push(String(err));
|
|
1100
|
+
return { valid: false, tables: 0, queries: 0, errors };
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
return {
|
|
1104
|
+
valid: errors.length === 0,
|
|
1105
|
+
tables: ir.tables.length,
|
|
1106
|
+
queries: ir.queries.length,
|
|
1107
|
+
errors
|
|
1108
|
+
};
|
|
1109
|
+
}
|
|
1110
|
+
function parse(fileContents) {
|
|
1111
|
+
const parser = createPostgresParser();
|
|
1112
|
+
const schemaFiles = fileContents.filter((f) => isSchemaFile(f.content));
|
|
1113
|
+
const queryFiles = fileContents.filter((f) => isQueryFile(f.content));
|
|
1114
|
+
const allSchemaSql = schemaFiles.map((f) => f.content).join(`
|
|
1115
|
+
|
|
1116
|
+
`);
|
|
1117
|
+
const enums = parser.parseEnums(allSchemaSql);
|
|
1118
|
+
const tables = parser.parseSchema(allSchemaSql);
|
|
1119
|
+
const queries = [];
|
|
1120
|
+
for (const file of queryFiles) {
|
|
1121
|
+
const parsed = parser.parseQueries(file.content, tables);
|
|
1122
|
+
for (const q of parsed) {
|
|
1123
|
+
q.sourceFile = basename(file.path, extname(file.path));
|
|
1124
|
+
queries.push(q);
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
return { tables, queries, enums };
|
|
1128
|
+
}
|
|
1129
|
+
async function init() {
|
|
1130
|
+
const sqlDir = "sql";
|
|
1131
|
+
const queriesDir = join2(sqlDir, "queries");
|
|
1132
|
+
if (!existsSync2(sqlDir)) {
|
|
1133
|
+
mkdirSync2(queriesDir, { recursive: true });
|
|
1134
|
+
} else if (!existsSync2(queriesDir)) {
|
|
1135
|
+
mkdirSync2(queriesDir, { recursive: true });
|
|
1136
|
+
}
|
|
1137
|
+
const schemaPath = join2(sqlDir, "schema.sql");
|
|
1138
|
+
if (!existsSync2(schemaPath)) {
|
|
1139
|
+
writeFileSync2(schemaPath, `CREATE TABLE users (
|
|
1140
|
+
id SERIAL PRIMARY KEY,
|
|
1141
|
+
name TEXT NOT NULL,
|
|
1142
|
+
email TEXT NOT NULL UNIQUE,
|
|
1143
|
+
created_at TIMESTAMP NOT NULL DEFAULT NOW()
|
|
1144
|
+
);
|
|
1145
|
+
`, "utf-8");
|
|
1146
|
+
console.log("Created", schemaPath);
|
|
1147
|
+
}
|
|
1148
|
+
const queryPath = join2(queriesDir, "users.sql");
|
|
1149
|
+
if (!existsSync2(queryPath)) {
|
|
1150
|
+
writeFileSync2(queryPath, `-- name: GetUserById :one
|
|
1151
|
+
SELECT * FROM users WHERE id = $1;
|
|
1152
|
+
|
|
1153
|
+
-- name: ListUsers :many
|
|
1154
|
+
SELECT * FROM users ORDER BY created_at DESC;
|
|
1155
|
+
|
|
1156
|
+
-- name: CreateUser :exec
|
|
1157
|
+
INSERT INTO users (name, email) VALUES ($1, $2);
|
|
1158
|
+
`, "utf-8");
|
|
1159
|
+
console.log("Created", queryPath);
|
|
1160
|
+
}
|
|
1161
|
+
const configPath = "sqlcx.config.ts";
|
|
1162
|
+
if (!existsSync2(configPath)) {
|
|
1163
|
+
writeFileSync2(configPath, `import { defineConfig } from "sqlcx";
|
|
1164
|
+
import { createPostgresParser } from "sqlcx/parser/postgres";
|
|
1165
|
+
import { createTypeScriptPlugin } from "sqlcx/generator/typescript";
|
|
1166
|
+
import { createTypeBoxGenerator } from "sqlcx/generator/typescript/schema/typebox";
|
|
1167
|
+
import { createBunSqlGenerator } from "sqlcx/generator/typescript/driver/bun-sql";
|
|
1168
|
+
|
|
1169
|
+
export default defineConfig({
|
|
1170
|
+
sql: "./sql",
|
|
1171
|
+
parser: createPostgresParser(),
|
|
1172
|
+
targets: [
|
|
1173
|
+
createTypeScriptPlugin({
|
|
1174
|
+
schema: createTypeBoxGenerator(),
|
|
1175
|
+
driver: createBunSqlGenerator(),
|
|
1176
|
+
}),
|
|
1177
|
+
],
|
|
1178
|
+
});
|
|
1179
|
+
`, "utf-8");
|
|
1180
|
+
console.log("Created", configPath);
|
|
1181
|
+
}
|
|
1182
|
+
console.log(`
|
|
1183
|
+
Project initialized! Run 'sqlcx generate' to generate types.`);
|
|
1184
|
+
}
|
|
1185
|
+
function printHelp() {
|
|
1186
|
+
console.log("Usage: sqlcx <generate|check|init> [options]");
|
|
1187
|
+
console.log("");
|
|
1188
|
+
console.log("Commands:");
|
|
1189
|
+
console.log(" generate Parse SQL and generate typed code");
|
|
1190
|
+
console.log(" check Validate SQL files without generating (CI-friendly)");
|
|
1191
|
+
console.log(" init Scaffold sql/ directory with example files");
|
|
1192
|
+
console.log("");
|
|
1193
|
+
console.log("Options:");
|
|
1194
|
+
console.log(" --sql <dir> SQL directory (default: ./sql)");
|
|
1195
|
+
console.log(" --out <dir> Output directory (default: ./src/db)");
|
|
1196
|
+
console.log(" --cache <dir> Cache directory (default: .sqlcx)");
|
|
1197
|
+
}
|
|
1198
|
+
if (import.meta.main) {
|
|
1199
|
+
const args = process.argv.slice(2);
|
|
1200
|
+
const command = args[0];
|
|
1201
|
+
if (command === "generate") {
|
|
1202
|
+
await generate({
|
|
1203
|
+
sqlDir: getFlag(args, "--sql") ?? "./sql",
|
|
1204
|
+
outDir: getFlag(args, "--out") ?? "./src/db",
|
|
1205
|
+
cacheDir: getFlag(args, "--cache") ?? ".sqlcx"
|
|
1206
|
+
});
|
|
1207
|
+
} else if (command === "check") {
|
|
1208
|
+
const result = await check({
|
|
1209
|
+
sqlDir: getFlag(args, "--sql") ?? "./sql",
|
|
1210
|
+
cacheDir: getFlag(args, "--cache") ?? ".sqlcx"
|
|
1211
|
+
});
|
|
1212
|
+
if (!result.valid) {
|
|
1213
|
+
console.error("Check failed:", result.errors);
|
|
1214
|
+
process.exit(1);
|
|
1215
|
+
}
|
|
1216
|
+
console.log(`Check passed: ${result.tables} tables, ${result.queries} queries`);
|
|
1217
|
+
} else if (command === "init") {
|
|
1218
|
+
await init();
|
|
1219
|
+
} else {
|
|
1220
|
+
printHelp();
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
export {
|
|
1224
|
+
generate,
|
|
1225
|
+
check
|
|
1226
|
+
};
|