sqlcx-orm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3qq0zjsm.js +12 -0
- package/dist/chunk-49wq4032.js +11 -0
- package/dist/cli.js +1226 -0
- package/dist/config/index.js +9 -0
- package/dist/generator/typescript/driver/bun-sql.js +151 -0
- package/dist/generator/typescript/index.js +110 -0
- package/dist/generator/typescript/schema/typebox.js +127 -0
- package/dist/index.js +7 -0
- package/dist/parser/postgres.js +615 -0
- package/package.json +56 -0
- package/src/cache/index.ts +46 -0
- package/src/cli/index.ts +306 -0
- package/src/config/index.ts +19 -0
- package/src/generator/interface.ts +36 -0
- package/src/generator/typescript/driver/bun-sql.ts +157 -0
- package/src/generator/typescript/index.ts +144 -0
- package/src/generator/typescript/schema/typebox.ts +143 -0
- package/src/index.ts +23 -0
- package/src/ir/index.ts +72 -0
- package/src/parser/interface.ts +8 -0
- package/src/parser/param-naming.ts +49 -0
- package/src/parser/postgres.ts +745 -0
- package/src/utils/index.ts +13 -0
|
@@ -0,0 +1,745 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
TableDef,
|
|
3
|
+
QueryDef,
|
|
4
|
+
EnumDef,
|
|
5
|
+
ColumnDef,
|
|
6
|
+
SqlType,
|
|
7
|
+
SqlTypeCategory,
|
|
8
|
+
QueryCommand,
|
|
9
|
+
ParamDef,
|
|
10
|
+
JsonShape,
|
|
11
|
+
} from "@/ir";
|
|
12
|
+
import type { DatabaseParser } from "@/parser/interface";
|
|
13
|
+
import { resolveParamNames, type RawParam } from "@/parser/param-naming";
|
|
14
|
+
|
|
15
|
+
// ---------------------------------------------------------------------------
|
|
16
|
+
// SQL type mapping
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
|
|
19
|
+
const TYPE_CATEGORY_MAP = new Map<string, SqlTypeCategory>([
|
|
20
|
+
["text", "string"],
|
|
21
|
+
["varchar", "string"],
|
|
22
|
+
["char", "string"],
|
|
23
|
+
["character varying", "string"],
|
|
24
|
+
["character", "string"],
|
|
25
|
+
["name", "string"],
|
|
26
|
+
["integer", "number"],
|
|
27
|
+
["int", "number"],
|
|
28
|
+
["int2", "number"],
|
|
29
|
+
["int4", "number"],
|
|
30
|
+
["int8", "number"],
|
|
31
|
+
["smallint", "number"],
|
|
32
|
+
["bigint", "number"],
|
|
33
|
+
["serial", "number"],
|
|
34
|
+
["bigserial", "number"],
|
|
35
|
+
["real", "number"],
|
|
36
|
+
["double precision", "number"],
|
|
37
|
+
["numeric", "number"],
|
|
38
|
+
["decimal", "number"],
|
|
39
|
+
["float", "number"],
|
|
40
|
+
["float4", "number"],
|
|
41
|
+
["float8", "number"],
|
|
42
|
+
["boolean", "boolean"],
|
|
43
|
+
["bool", "boolean"],
|
|
44
|
+
["timestamp", "date"],
|
|
45
|
+
["timestamptz", "date"],
|
|
46
|
+
["date", "date"],
|
|
47
|
+
["time", "date"],
|
|
48
|
+
["timetz", "date"],
|
|
49
|
+
["timestamp without time zone", "date"],
|
|
50
|
+
["timestamp with time zone", "date"],
|
|
51
|
+
["json", "json"],
|
|
52
|
+
["jsonb", "json"],
|
|
53
|
+
["uuid", "uuid"],
|
|
54
|
+
["bytea", "binary"],
|
|
55
|
+
]);
|
|
56
|
+
|
|
57
|
+
const SERIAL_TYPES = new Set(["serial", "bigserial"]);
|
|
58
|
+
|
|
59
|
+
function resolveType(raw: string, enumNames: Set<string>): SqlType {
|
|
60
|
+
const trimmed = raw.trim();
|
|
61
|
+
|
|
62
|
+
// Array detection
|
|
63
|
+
if (trimmed.endsWith("[]")) {
|
|
64
|
+
const baseRaw = trimmed.slice(0, -2);
|
|
65
|
+
const elementType = resolveType(baseRaw, enumNames);
|
|
66
|
+
return {
|
|
67
|
+
raw: trimmed,
|
|
68
|
+
normalized: trimmed.toLowerCase(),
|
|
69
|
+
category: elementType.category,
|
|
70
|
+
elementType,
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const normalized = trimmed.toLowerCase();
|
|
75
|
+
const category = TYPE_CATEGORY_MAP.get(normalized);
|
|
76
|
+
if (category) {
|
|
77
|
+
return { raw: trimmed, normalized, category };
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Check if it's a known enum
|
|
81
|
+
if (enumNames.has(normalized)) {
|
|
82
|
+
return { raw: trimmed, normalized, category: "enum", enumName: normalized };
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return { raw: trimmed, normalized, category: "unknown" };
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// ---------------------------------------------------------------------------
|
|
89
|
+
// Enum parsing (regex-based)
|
|
90
|
+
// ---------------------------------------------------------------------------
|
|
91
|
+
|
|
92
|
+
const ENUM_RE_SOURCE =
|
|
93
|
+
/CREATE\s+TYPE\s+(\w+)\s+AS\s+ENUM\s*\(\s*((?:'[^']*'(?:\s*,\s*'[^']*')*)?)\s*\)/i.source;
|
|
94
|
+
|
|
95
|
+
function parseEnumDefs(sql: string): EnumDef[] {
|
|
96
|
+
const re = new RegExp(ENUM_RE_SOURCE, "gi");
|
|
97
|
+
const enums: EnumDef[] = [];
|
|
98
|
+
let m: RegExpExecArray | null;
|
|
99
|
+
while ((m = re.exec(sql)) !== null) {
|
|
100
|
+
const name = m[1].toLowerCase();
|
|
101
|
+
const valuesRaw = m[2];
|
|
102
|
+
const values = [...valuesRaw.matchAll(/'([^']*)'/g)].map((v) => v[1]);
|
|
103
|
+
enums.push({ name, values });
|
|
104
|
+
}
|
|
105
|
+
return enums;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// ---------------------------------------------------------------------------
|
|
109
|
+
// Schema parsing (regex-based for reliability with custom types)
|
|
110
|
+
// ---------------------------------------------------------------------------
|
|
111
|
+
|
|
112
|
+
const CREATE_TABLE_RE_SOURCE =
|
|
113
|
+
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(\w+)\s*\(([\s\S]*?)\)\s*;/i.source;
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Split the CREATE TABLE body into individual column/constraint definitions.
|
|
117
|
+
* Handles nested parentheses so REFERENCES users(id) doesn't cause a split.
|
|
118
|
+
*/
|
|
119
|
+
function splitColumnDefs(body: string): string[] {
|
|
120
|
+
const parts: string[] = [];
|
|
121
|
+
let depth = 0;
|
|
122
|
+
let current = "";
|
|
123
|
+
|
|
124
|
+
for (const ch of body) {
|
|
125
|
+
if (ch === "(") {
|
|
126
|
+
depth++;
|
|
127
|
+
current += ch;
|
|
128
|
+
} else if (ch === ")") {
|
|
129
|
+
depth--;
|
|
130
|
+
current += ch;
|
|
131
|
+
} else if (ch === "," && depth === 0) {
|
|
132
|
+
parts.push(current.trim());
|
|
133
|
+
current = "";
|
|
134
|
+
} else {
|
|
135
|
+
current += ch;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
if (current.trim()) parts.push(current.trim());
|
|
139
|
+
return parts;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
const MULTI_WORD_TYPES = [
|
|
143
|
+
"character varying",
|
|
144
|
+
"double precision",
|
|
145
|
+
"timestamp without time zone",
|
|
146
|
+
"timestamp with time zone",
|
|
147
|
+
];
|
|
148
|
+
|
|
149
|
+
function parseColumnLine(
|
|
150
|
+
line: string,
|
|
151
|
+
enumNames: Set<string>,
|
|
152
|
+
): { col: ColumnDef; isPK: boolean; isUnique: boolean } | null {
|
|
153
|
+
line = line.trim();
|
|
154
|
+
if (!line) return null;
|
|
155
|
+
if (/^(PRIMARY\s+KEY|CONSTRAINT|UNIQUE|CHECK|FOREIGN\s+KEY)/i.test(line)) {
|
|
156
|
+
return null;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Extract column name (first word)
|
|
160
|
+
const nameMatch = line.match(/^(\w+)\s+/);
|
|
161
|
+
if (!nameMatch) return null;
|
|
162
|
+
const colName = nameMatch[1].toLowerCase();
|
|
163
|
+
const afterName = line.slice(nameMatch[0].length);
|
|
164
|
+
|
|
165
|
+
// Determine the type
|
|
166
|
+
let rawType: string | null = null;
|
|
167
|
+
for (const mwt of MULTI_WORD_TYPES) {
|
|
168
|
+
if (afterName.toLowerCase().startsWith(mwt)) {
|
|
169
|
+
rawType = mwt;
|
|
170
|
+
break;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
if (!rawType) {
|
|
174
|
+
const typeMatch = afterName.match(/^(\w+(?:\[\])?)/);
|
|
175
|
+
rawType = typeMatch ? typeMatch[1] : "unknown";
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const restAfterType = afterName.slice(rawType.length).trim();
|
|
179
|
+
|
|
180
|
+
const isNotNull = /\bNOT\s+NULL\b/i.test(restAfterType);
|
|
181
|
+
const hasDefaultKeyword = /\bDEFAULT\b/i.test(restAfterType);
|
|
182
|
+
const isSerial = SERIAL_TYPES.has(rawType.toLowerCase());
|
|
183
|
+
const isPK = /\bPRIMARY\s+KEY\b/i.test(restAfterType);
|
|
184
|
+
const isUnique = /\bUNIQUE\b/i.test(restAfterType);
|
|
185
|
+
|
|
186
|
+
const type = resolveType(rawType, enumNames);
|
|
187
|
+
|
|
188
|
+
return {
|
|
189
|
+
col: {
|
|
190
|
+
name: colName,
|
|
191
|
+
type,
|
|
192
|
+
nullable: !isNotNull,
|
|
193
|
+
hasDefault: hasDefaultKeyword || isSerial,
|
|
194
|
+
},
|
|
195
|
+
isPK,
|
|
196
|
+
isUnique,
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// ---------------------------------------------------------------------------
|
|
201
|
+
// Inline annotation parsing (@enum, @json)
|
|
202
|
+
// ---------------------------------------------------------------------------
|
|
203
|
+
|
|
204
|
+
function parseEnumAnnotation(comment: string): string[] | undefined {
|
|
205
|
+
const match = comment.match(/--\s*@enum\s*\(\s*(.*?)\s*\)/);
|
|
206
|
+
if (!match) return undefined;
|
|
207
|
+
const inner = match[1];
|
|
208
|
+
const values: string[] = [];
|
|
209
|
+
const re = /"([^"]*?)"/g;
|
|
210
|
+
let m: RegExpExecArray | null;
|
|
211
|
+
while ((m = re.exec(inner)) !== null) {
|
|
212
|
+
values.push(m[1]);
|
|
213
|
+
}
|
|
214
|
+
return values.length > 0 ? values : undefined;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Recursive-descent parser for the @json type DSL.
|
|
219
|
+
* Supports: string, number, boolean, { key: type }, type[], type?
|
|
220
|
+
*/
|
|
221
|
+
class JsonShapeParser {
|
|
222
|
+
private pos = 0;
|
|
223
|
+
constructor(private input: string) {}
|
|
224
|
+
|
|
225
|
+
parse(): JsonShape {
|
|
226
|
+
const shape = this.parseType();
|
|
227
|
+
this.skipWs();
|
|
228
|
+
if (this.pos < this.input.length) {
|
|
229
|
+
throw new Error(
|
|
230
|
+
`@json parse error: unexpected trailing content at position ${this.pos}: "${this.input.slice(this.pos, this.pos + 10)}"`,
|
|
231
|
+
);
|
|
232
|
+
}
|
|
233
|
+
return shape;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
private parseType(): JsonShape {
|
|
237
|
+
this.skipWs();
|
|
238
|
+
let shape: JsonShape;
|
|
239
|
+
|
|
240
|
+
if (this.peek() === "{") {
|
|
241
|
+
shape = this.parseObject();
|
|
242
|
+
} else {
|
|
243
|
+
shape = this.parsePrimitive();
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Check for array suffix []
|
|
247
|
+
this.skipWs();
|
|
248
|
+
while (this.lookAhead("[]")) {
|
|
249
|
+
this.pos += 2;
|
|
250
|
+
this.skipWs();
|
|
251
|
+
shape = { kind: "array", element: shape };
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// Check for nullable suffix ?
|
|
255
|
+
if (this.peek() === "?") {
|
|
256
|
+
this.pos++;
|
|
257
|
+
shape = { kind: "nullable", inner: shape };
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
return shape;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
private parsePrimitive(): JsonShape {
|
|
264
|
+
this.skipWs();
|
|
265
|
+
if (this.matchWord("string")) return { kind: "string" };
|
|
266
|
+
if (this.matchWord("number")) return { kind: "number" };
|
|
267
|
+
if (this.matchWord("boolean")) return { kind: "boolean" };
|
|
268
|
+
throw new Error(
|
|
269
|
+
`@json parse error: unexpected token at position ${this.pos}: "${this.input.slice(this.pos, this.pos + 10)}"`,
|
|
270
|
+
);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
private parseObject(): JsonShape {
|
|
274
|
+
this.consume("{");
|
|
275
|
+
this.skipWs();
|
|
276
|
+
const fields: Record<string, JsonShape> = {};
|
|
277
|
+
|
|
278
|
+
if (this.peek() !== "}") {
|
|
279
|
+
this.parseField(fields);
|
|
280
|
+
while (this.peek() === ",") {
|
|
281
|
+
this.pos++; // consume ','
|
|
282
|
+
this.skipWs();
|
|
283
|
+
if (this.peek() === "}") break; // trailing comma
|
|
284
|
+
this.parseField(fields);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
this.consume("}");
|
|
289
|
+
return { kind: "object", fields };
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
private parseField(fields: Record<string, JsonShape>): void {
|
|
293
|
+
this.skipWs();
|
|
294
|
+
const name = this.readIdentifier();
|
|
295
|
+
this.skipWs();
|
|
296
|
+
this.consume(":");
|
|
297
|
+
this.skipWs();
|
|
298
|
+
fields[name] = this.parseType();
|
|
299
|
+
this.skipWs();
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
private readIdentifier(): string {
|
|
303
|
+
this.skipWs();
|
|
304
|
+
const start = this.pos;
|
|
305
|
+
while (this.pos < this.input.length && /[\w]/.test(this.input[this.pos])) {
|
|
306
|
+
this.pos++;
|
|
307
|
+
}
|
|
308
|
+
if (this.pos === start) {
|
|
309
|
+
throw new Error(
|
|
310
|
+
`@json parse error: expected identifier at position ${this.pos}`,
|
|
311
|
+
);
|
|
312
|
+
}
|
|
313
|
+
return this.input.slice(start, this.pos);
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
private skipWs(): void {
|
|
317
|
+
while (this.pos < this.input.length && /\s/.test(this.input[this.pos])) {
|
|
318
|
+
this.pos++;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
private peek(): string | undefined {
|
|
323
|
+
this.skipWs();
|
|
324
|
+
return this.input[this.pos];
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
private lookAhead(s: string): boolean {
|
|
328
|
+
return this.input.startsWith(s, this.pos);
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
private matchWord(word: string): boolean {
|
|
332
|
+
if (this.input.startsWith(word, this.pos)) {
|
|
333
|
+
const afterPos = this.pos + word.length;
|
|
334
|
+
if (afterPos >= this.input.length || !/\w/.test(this.input[afterPos])) {
|
|
335
|
+
this.pos = afterPos;
|
|
336
|
+
return true;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
return false;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
private consume(ch: string): void {
|
|
343
|
+
this.skipWs();
|
|
344
|
+
if (this.input[this.pos] !== ch) {
|
|
345
|
+
throw new Error(
|
|
346
|
+
`@json parse error: expected '${ch}' at position ${this.pos}, got '${this.input[this.pos]}'`,
|
|
347
|
+
);
|
|
348
|
+
}
|
|
349
|
+
this.pos++;
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
function parseJsonAnnotation(comment: string): JsonShape | undefined {
|
|
354
|
+
const match = comment.match(/--\s*@json\s*\(\s*([\s\S]+)\s*\)\s*$/);
|
|
355
|
+
if (!match) return undefined;
|
|
356
|
+
const body = match[1].trim();
|
|
357
|
+
try {
|
|
358
|
+
const parser = new JsonShapeParser(body);
|
|
359
|
+
return parser.parse();
|
|
360
|
+
} catch {
|
|
361
|
+
return undefined;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
function parseSchemaDefs(sql: string, enumNames: Set<string>): TableDef[] {
|
|
366
|
+
const re = new RegExp(CREATE_TABLE_RE_SOURCE, "gi");
|
|
367
|
+
const tables: TableDef[] = [];
|
|
368
|
+
let m: RegExpExecArray | null;
|
|
369
|
+
|
|
370
|
+
while ((m = re.exec(sql)) !== null) {
|
|
371
|
+
const tableName = m[1].toLowerCase();
|
|
372
|
+
const body = m[2];
|
|
373
|
+
|
|
374
|
+
const columns: ColumnDef[] = [];
|
|
375
|
+
const primaryKey: string[] = [];
|
|
376
|
+
const uniqueConstraints: string[][] = [];
|
|
377
|
+
|
|
378
|
+
// Split body into raw lines, then group comments with the column that follows
|
|
379
|
+
const rawLines = body.split("\n");
|
|
380
|
+
let pendingComment = "";
|
|
381
|
+
// First pass: associate comment lines with column defs
|
|
382
|
+
// We accumulate lines into defs using splitColumnDefs on non-comment text
|
|
383
|
+
let nonCommentBuffer = "";
|
|
384
|
+
const commentMap = new Map<number, string>(); // defIndex -> comment
|
|
385
|
+
|
|
386
|
+
for (const rawLine of rawLines) {
|
|
387
|
+
const trimmedLine = rawLine.trim();
|
|
388
|
+
if (trimmedLine.startsWith("--")) {
|
|
389
|
+
// Accumulate comment lines — annotations can be on any line above the column
|
|
390
|
+
pendingComment += (pendingComment ? "\n" : "") + trimmedLine;
|
|
391
|
+
} else {
|
|
392
|
+
// Non-comment content; track how many defs this adds
|
|
393
|
+
const beforeDefs = splitColumnDefs(nonCommentBuffer).filter(
|
|
394
|
+
(d) => d.trim().length > 0,
|
|
395
|
+
).length;
|
|
396
|
+
nonCommentBuffer += (nonCommentBuffer ? "\n" : "") + rawLine;
|
|
397
|
+
const afterDefs = splitColumnDefs(nonCommentBuffer).filter(
|
|
398
|
+
(d) => d.trim().length > 0,
|
|
399
|
+
).length;
|
|
400
|
+
|
|
401
|
+
if (afterDefs > beforeDefs && pendingComment) {
|
|
402
|
+
commentMap.set(beforeDefs, pendingComment);
|
|
403
|
+
pendingComment = "";
|
|
404
|
+
} else if (afterDefs === beforeDefs) {
|
|
405
|
+
// Still accumulating same def, keep comment pending
|
|
406
|
+
} else {
|
|
407
|
+
pendingComment = "";
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
const lines = splitColumnDefs(nonCommentBuffer);
|
|
413
|
+
|
|
414
|
+
for (let i = 0; i < lines.length; i++) {
|
|
415
|
+
const trimmed = lines[i].trim();
|
|
416
|
+
|
|
417
|
+
// Table-level PRIMARY KEY constraint
|
|
418
|
+
const pkMatch = trimmed.match(
|
|
419
|
+
/^PRIMARY\s+KEY\s*\(\s*([\w\s,]+)\s*\)/i,
|
|
420
|
+
);
|
|
421
|
+
if (pkMatch) {
|
|
422
|
+
primaryKey.push(
|
|
423
|
+
...pkMatch[1].split(",").map((s) => s.trim().toLowerCase()),
|
|
424
|
+
);
|
|
425
|
+
continue;
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
const result = parseColumnLine(trimmed, enumNames);
|
|
429
|
+
if (!result) continue;
|
|
430
|
+
|
|
431
|
+
// Apply inline annotations from the comment above this column
|
|
432
|
+
const comment = commentMap.get(i);
|
|
433
|
+
if (comment) {
|
|
434
|
+
const enumValues = parseEnumAnnotation(comment);
|
|
435
|
+
if (enumValues) {
|
|
436
|
+
result.col.type = {
|
|
437
|
+
...result.col.type,
|
|
438
|
+
category: "enum",
|
|
439
|
+
enumValues,
|
|
440
|
+
};
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
const jsonShape = parseJsonAnnotation(comment);
|
|
444
|
+
if (jsonShape) {
|
|
445
|
+
result.col.type = {
|
|
446
|
+
...result.col.type,
|
|
447
|
+
jsonShape,
|
|
448
|
+
};
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
columns.push(result.col);
|
|
453
|
+
if (result.isPK) {
|
|
454
|
+
primaryKey.push(result.col.name);
|
|
455
|
+
}
|
|
456
|
+
if (result.isUnique) {
|
|
457
|
+
uniqueConstraints.push([result.col.name]);
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
// PK columns are implicitly NOT NULL — fix nullable for table-level PKs
|
|
462
|
+
for (const col of columns) {
|
|
463
|
+
if (primaryKey.includes(col.name)) {
|
|
464
|
+
col.nullable = false;
|
|
465
|
+
col.hasDefault = col.hasDefault || SERIAL_TYPES.has(col.type.normalized);
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
tables.push({ name: tableName, columns, primaryKey, uniqueConstraints });
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
return tables;
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// ---------------------------------------------------------------------------
|
|
476
|
+
// Query parsing
|
|
477
|
+
// ---------------------------------------------------------------------------
|
|
478
|
+
|
|
479
|
+
const QUERY_ANNOTATION_RE =
|
|
480
|
+
/--\s*name:\s*(\w+)\s+:(one|many|execresult|exec)/;
|
|
481
|
+
const PARAM_OVERRIDE_RE = /--\s*@param\s+\$(\d+)\s+(\w+)/g;
|
|
482
|
+
const DOLLAR_PARAM_RE = /\$(\d+)/g;
|
|
483
|
+
|
|
484
|
+
interface QueryBlock {
|
|
485
|
+
name: string;
|
|
486
|
+
command: QueryCommand;
|
|
487
|
+
sql: string;
|
|
488
|
+
comments: string;
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
function splitQueryBlocks(sql: string): QueryBlock[] {
|
|
492
|
+
const lines = sql.split("\n");
|
|
493
|
+
const blocks: QueryBlock[] = [];
|
|
494
|
+
let current: QueryBlock | null = null;
|
|
495
|
+
let commentBuffer = "";
|
|
496
|
+
|
|
497
|
+
for (const line of lines) {
|
|
498
|
+
const trimmed = line.trim();
|
|
499
|
+
const annotationMatch = trimmed.match(QUERY_ANNOTATION_RE);
|
|
500
|
+
|
|
501
|
+
if (annotationMatch) {
|
|
502
|
+
if (current) blocks.push(current);
|
|
503
|
+
current = {
|
|
504
|
+
name: annotationMatch[1],
|
|
505
|
+
command: annotationMatch[2] as QueryCommand,
|
|
506
|
+
sql: "",
|
|
507
|
+
comments: commentBuffer + trimmed + "\n",
|
|
508
|
+
};
|
|
509
|
+
commentBuffer = "";
|
|
510
|
+
} else if (trimmed.startsWith("--")) {
|
|
511
|
+
if (current) {
|
|
512
|
+
current.comments += trimmed + "\n";
|
|
513
|
+
} else {
|
|
514
|
+
commentBuffer += trimmed + "\n";
|
|
515
|
+
}
|
|
516
|
+
} else if (current && trimmed) {
|
|
517
|
+
current.sql += (current.sql ? " " : "") + trimmed;
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
if (current) blocks.push(current);
|
|
522
|
+
return blocks;
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
function extractParamOverrides(comments: string): Map<number, string> {
|
|
526
|
+
const overrides = new Map<number, string>();
|
|
527
|
+
let m: RegExpExecArray | null;
|
|
528
|
+
const re = new RegExp(PARAM_OVERRIDE_RE.source, "g");
|
|
529
|
+
while ((m = re.exec(comments)) !== null) {
|
|
530
|
+
overrides.set(parseInt(m[1], 10), m[2]);
|
|
531
|
+
}
|
|
532
|
+
return overrides;
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
function extractParamIndices(sql: string): number[] {
|
|
536
|
+
const indices = new Set<number>();
|
|
537
|
+
let m: RegExpExecArray | null;
|
|
538
|
+
const re = new RegExp(DOLLAR_PARAM_RE.source, "g");
|
|
539
|
+
while ((m = re.exec(sql)) !== null) {
|
|
540
|
+
indices.add(parseInt(m[1], 10));
|
|
541
|
+
}
|
|
542
|
+
return [...indices].sort((a, b) => a - b);
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
/**
|
|
546
|
+
* Try to infer what column a $N param corresponds to from the SQL text.
|
|
547
|
+
* Handles WHERE col = $1, col ILIKE $1, and INSERT positional mapping.
|
|
548
|
+
*/
|
|
549
|
+
function inferParamColumns(sql: string): Map<number, string> {
|
|
550
|
+
const result = new Map<number, string>();
|
|
551
|
+
|
|
552
|
+
// INSERT: columns list maps positionally to VALUES params
|
|
553
|
+
const insertMatch = sql.match(
|
|
554
|
+
/INSERT\s+INTO\s+\w+\s*\(\s*([\w\s,]+)\s*\)\s*VALUES\s*\(\s*([\$\d\s,]+)\s*\)/i,
|
|
555
|
+
);
|
|
556
|
+
if (insertMatch) {
|
|
557
|
+
const cols = insertMatch[1].split(",").map((s) => s.trim().toLowerCase());
|
|
558
|
+
const params = [...insertMatch[2].matchAll(/\$(\d+)/g)].map((m) =>
|
|
559
|
+
parseInt(m[1], 10),
|
|
560
|
+
);
|
|
561
|
+
for (let i = 0; i < Math.min(cols.length, params.length); i++) {
|
|
562
|
+
result.set(params[i], cols[i]);
|
|
563
|
+
}
|
|
564
|
+
return result;
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
// SQL keywords that can appear before operators but aren't column names
|
|
568
|
+
const SQL_KEYWORDS = new Set([
|
|
569
|
+
"not", "and", "or", "where", "set", "when", "then", "else", "case",
|
|
570
|
+
"between", "exists", "any", "all", "some", "having",
|
|
571
|
+
]);
|
|
572
|
+
|
|
573
|
+
// WHERE/SET: col op $N — also try to extract column from FUNC(col) op $N
|
|
574
|
+
const wherePatterns =
|
|
575
|
+
/(?:(\w+)\s*\(\s*(\w+)\s*\)|(\w+))\s*(?:=|!=|<>|<=?|>=?|(?:NOT\s+)?(?:I?LIKE|IN|IS))\s*\$(\d+)/gi;
|
|
576
|
+
let m: RegExpExecArray | null;
|
|
577
|
+
while ((m = wherePatterns.exec(sql)) !== null) {
|
|
578
|
+
const paramIdx = parseInt(m[4], 10);
|
|
579
|
+
if (m[1] && m[2]) {
|
|
580
|
+
// FUNC(col) pattern — use the inner column name
|
|
581
|
+
result.set(paramIdx, m[2].toLowerCase());
|
|
582
|
+
} else if (m[3]) {
|
|
583
|
+
const word = m[3].toLowerCase();
|
|
584
|
+
if (!SQL_KEYWORDS.has(word)) {
|
|
585
|
+
result.set(paramIdx, word);
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
|
|
590
|
+
return result;
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
function findFromTable(
|
|
594
|
+
sql: string,
|
|
595
|
+
tables: TableDef[],
|
|
596
|
+
): TableDef | undefined {
|
|
597
|
+
const fromMatch = sql.match(/(?:FROM|INTO|UPDATE)\s+(\w+)/i);
|
|
598
|
+
if (!fromMatch) return undefined;
|
|
599
|
+
const tableName = fromMatch[1].toLowerCase();
|
|
600
|
+
return tables.find((t) => t.name === tableName);
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
function resolveReturningColumns(
|
|
604
|
+
sql: string,
|
|
605
|
+
table: TableDef | undefined,
|
|
606
|
+
): ColumnDef[] | null {
|
|
607
|
+
const returningMatch = sql.match(/\bRETURNING\s+([\s\S]+?)(?:;?\s*)$/i);
|
|
608
|
+
if (!returningMatch) return null;
|
|
609
|
+
|
|
610
|
+
const colsPart = returningMatch[1].trim();
|
|
611
|
+
if (colsPart === "*") {
|
|
612
|
+
return table ? [...table.columns] : [];
|
|
613
|
+
}
|
|
614
|
+
if (!table) return [];
|
|
615
|
+
|
|
616
|
+
return colsPart.split(",").map((s) => {
|
|
617
|
+
const name = s.trim().toLowerCase();
|
|
618
|
+
const tableCol = table.columns.find((c) => c.name === name);
|
|
619
|
+
return tableCol
|
|
620
|
+
? { ...tableCol }
|
|
621
|
+
: { name, type: { raw: "unknown", normalized: "unknown", category: "unknown" }, nullable: true, hasDefault: false };
|
|
622
|
+
});
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
function resolveReturnColumns(
|
|
626
|
+
sql: string,
|
|
627
|
+
table: TableDef | undefined,
|
|
628
|
+
): ColumnDef[] {
|
|
629
|
+
// Check RETURNING clause first (INSERT/UPDATE/DELETE ... RETURNING)
|
|
630
|
+
const returning = resolveReturningColumns(sql, table);
|
|
631
|
+
if (returning) return returning;
|
|
632
|
+
|
|
633
|
+
if (!/^\s*SELECT\b/i.test(sql)) return [];
|
|
634
|
+
|
|
635
|
+
const selectMatch = sql.match(/SELECT\s+([\s\S]+?)\s+FROM\b/i);
|
|
636
|
+
if (!selectMatch) return [];
|
|
637
|
+
|
|
638
|
+
const colsPart = selectMatch[1].trim();
|
|
639
|
+
|
|
640
|
+
if (colsPart === "*") {
|
|
641
|
+
return table ? [...table.columns] : [];
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
if (!table) return [];
|
|
645
|
+
const colNames = colsPart.split(",").map((s) => s.trim().toLowerCase());
|
|
646
|
+
const resolved: ColumnDef[] = [];
|
|
647
|
+
|
|
648
|
+
for (const colExpr of colNames) {
|
|
649
|
+
const aliasMatch = colExpr.match(/^(\w+)\s+as\s+(\w+)$/i);
|
|
650
|
+
const actualName = aliasMatch ? aliasMatch[1] : colExpr;
|
|
651
|
+
const tableCol = table.columns.find((c) => c.name === actualName);
|
|
652
|
+
|
|
653
|
+
if (tableCol) {
|
|
654
|
+
resolved.push(
|
|
655
|
+
aliasMatch
|
|
656
|
+
? { ...tableCol, alias: aliasMatch[2].toLowerCase() }
|
|
657
|
+
: { ...tableCol },
|
|
658
|
+
);
|
|
659
|
+
} else {
|
|
660
|
+
resolved.push({
|
|
661
|
+
name: actualName,
|
|
662
|
+
type: { raw: "unknown", normalized: "unknown", category: "unknown" },
|
|
663
|
+
nullable: true,
|
|
664
|
+
hasDefault: false,
|
|
665
|
+
});
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
return resolved;
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
function buildParams(
|
|
672
|
+
sql: string,
|
|
673
|
+
comments: string,
|
|
674
|
+
table: TableDef | undefined,
|
|
675
|
+
): ParamDef[] {
|
|
676
|
+
const paramIndices = extractParamIndices(sql);
|
|
677
|
+
if (paramIndices.length === 0) return [];
|
|
678
|
+
|
|
679
|
+
const overrides = extractParamOverrides(comments);
|
|
680
|
+
const inferredCols = inferParamColumns(sql);
|
|
681
|
+
|
|
682
|
+
const rawParams: RawParam[] = paramIndices.map((idx) => ({
|
|
683
|
+
index: idx,
|
|
684
|
+
column: inferredCols.get(idx) ?? null,
|
|
685
|
+
override: overrides.get(idx),
|
|
686
|
+
}));
|
|
687
|
+
|
|
688
|
+
const names = resolveParamNames(rawParams);
|
|
689
|
+
|
|
690
|
+
return paramIndices.map((idx, i) => {
|
|
691
|
+
const colName = inferredCols.get(idx);
|
|
692
|
+
let type: SqlType = {
|
|
693
|
+
raw: "unknown",
|
|
694
|
+
normalized: "unknown",
|
|
695
|
+
category: "unknown",
|
|
696
|
+
};
|
|
697
|
+
|
|
698
|
+
if (table && colName) {
|
|
699
|
+
const tableCol = table.columns.find((c) => c.name === colName);
|
|
700
|
+
if (tableCol) {
|
|
701
|
+
type = tableCol.type;
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
return { index: idx, name: names[i], type };
|
|
706
|
+
});
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
// ---------------------------------------------------------------------------
|
|
710
|
+
// Public API
|
|
711
|
+
// ---------------------------------------------------------------------------
|
|
712
|
+
|
|
713
|
+
export function createPostgresParser(): DatabaseParser {
|
|
714
|
+
return {
|
|
715
|
+
dialect: "postgresql",
|
|
716
|
+
|
|
717
|
+
parseEnums(sql: string): EnumDef[] {
|
|
718
|
+
return parseEnumDefs(sql);
|
|
719
|
+
},
|
|
720
|
+
|
|
721
|
+
parseSchema(sql: string): TableDef[] {
|
|
722
|
+
const enums = parseEnumDefs(sql);
|
|
723
|
+
const enumNames = new Set(enums.map((e) => e.name));
|
|
724
|
+
return parseSchemaDefs(sql, enumNames);
|
|
725
|
+
},
|
|
726
|
+
|
|
727
|
+
parseQueries(sql: string, tables: TableDef[]): QueryDef[] {
|
|
728
|
+
const blocks = splitQueryBlocks(sql);
|
|
729
|
+
return blocks.map((block) => {
|
|
730
|
+
const table = findFromTable(block.sql, tables);
|
|
731
|
+
const params = buildParams(block.sql, block.comments, table);
|
|
732
|
+
const returns = resolveReturnColumns(block.sql, table);
|
|
733
|
+
|
|
734
|
+
return {
|
|
735
|
+
name: block.name,
|
|
736
|
+
command: block.command,
|
|
737
|
+
sql: block.sql.replace(/;\s*$/, ""),
|
|
738
|
+
params,
|
|
739
|
+
returns,
|
|
740
|
+
sourceFile: "",
|
|
741
|
+
};
|
|
742
|
+
});
|
|
743
|
+
},
|
|
744
|
+
};
|
|
745
|
+
}
|