sqlcx-orm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-3qq0zjsm.js +12 -0
- package/dist/chunk-49wq4032.js +11 -0
- package/dist/cli.js +1226 -0
- package/dist/config/index.js +9 -0
- package/dist/generator/typescript/driver/bun-sql.js +151 -0
- package/dist/generator/typescript/index.js +110 -0
- package/dist/generator/typescript/schema/typebox.js +127 -0
- package/dist/index.js +7 -0
- package/dist/parser/postgres.js +615 -0
- package/package.json +56 -0
- package/src/cache/index.ts +46 -0
- package/src/cli/index.ts +306 -0
- package/src/config/index.ts +19 -0
- package/src/generator/interface.ts +36 -0
- package/src/generator/typescript/driver/bun-sql.ts +157 -0
- package/src/generator/typescript/index.ts +144 -0
- package/src/generator/typescript/schema/typebox.ts +143 -0
- package/src/index.ts +23 -0
- package/src/ir/index.ts +72 -0
- package/src/parser/interface.ts +8 -0
- package/src/parser/param-naming.ts +49 -0
- package/src/parser/postgres.ts +745 -0
- package/src/utils/index.ts +13 -0
|
@@ -0,0 +1,615 @@
|
|
|
1
|
+
// @bun
|
|
2
|
+
// src/parser/param-naming.ts
|
|
3
|
+
function resolveParamNames(params) {
|
|
4
|
+
const freq = new Map;
|
|
5
|
+
for (const p of params) {
|
|
6
|
+
if (!p.override && p.column) {
|
|
7
|
+
freq.set(p.column, (freq.get(p.column) ?? 0) + 1);
|
|
8
|
+
}
|
|
9
|
+
}
|
|
10
|
+
const counters = new Map;
|
|
11
|
+
const seen = new Set;
|
|
12
|
+
const result = new Array(params.length);
|
|
13
|
+
for (let i = 0;i < params.length; i++) {
|
|
14
|
+
const p = params[i];
|
|
15
|
+
let name;
|
|
16
|
+
if (p.override) {
|
|
17
|
+
name = p.override;
|
|
18
|
+
} else if (!p.column) {
|
|
19
|
+
name = `param_${p.index}`;
|
|
20
|
+
} else if ((freq.get(p.column) ?? 0) > 1) {
|
|
21
|
+
const n = (counters.get(p.column) ?? 0) + 1;
|
|
22
|
+
counters.set(p.column, n);
|
|
23
|
+
name = `${p.column}_${n}`;
|
|
24
|
+
} else {
|
|
25
|
+
name = p.column;
|
|
26
|
+
}
|
|
27
|
+
const base = name;
|
|
28
|
+
let suffix = 1;
|
|
29
|
+
while (seen.has(name)) {
|
|
30
|
+
name = `${base}_${suffix++}`;
|
|
31
|
+
}
|
|
32
|
+
seen.add(name);
|
|
33
|
+
result[i] = name;
|
|
34
|
+
}
|
|
35
|
+
return result;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// src/parser/postgres.ts
|
|
39
|
+
var TYPE_CATEGORY_MAP = new Map([
|
|
40
|
+
["text", "string"],
|
|
41
|
+
["varchar", "string"],
|
|
42
|
+
["char", "string"],
|
|
43
|
+
["character varying", "string"],
|
|
44
|
+
["character", "string"],
|
|
45
|
+
["name", "string"],
|
|
46
|
+
["integer", "number"],
|
|
47
|
+
["int", "number"],
|
|
48
|
+
["int2", "number"],
|
|
49
|
+
["int4", "number"],
|
|
50
|
+
["int8", "number"],
|
|
51
|
+
["smallint", "number"],
|
|
52
|
+
["bigint", "number"],
|
|
53
|
+
["serial", "number"],
|
|
54
|
+
["bigserial", "number"],
|
|
55
|
+
["real", "number"],
|
|
56
|
+
["double precision", "number"],
|
|
57
|
+
["numeric", "number"],
|
|
58
|
+
["decimal", "number"],
|
|
59
|
+
["float", "number"],
|
|
60
|
+
["float4", "number"],
|
|
61
|
+
["float8", "number"],
|
|
62
|
+
["boolean", "boolean"],
|
|
63
|
+
["bool", "boolean"],
|
|
64
|
+
["timestamp", "date"],
|
|
65
|
+
["timestamptz", "date"],
|
|
66
|
+
["date", "date"],
|
|
67
|
+
["time", "date"],
|
|
68
|
+
["timetz", "date"],
|
|
69
|
+
["timestamp without time zone", "date"],
|
|
70
|
+
["timestamp with time zone", "date"],
|
|
71
|
+
["json", "json"],
|
|
72
|
+
["jsonb", "json"],
|
|
73
|
+
["uuid", "uuid"],
|
|
74
|
+
["bytea", "binary"]
|
|
75
|
+
]);
|
|
76
|
+
var SERIAL_TYPES = new Set(["serial", "bigserial"]);
|
|
77
|
+
function resolveType(raw, enumNames) {
|
|
78
|
+
const trimmed = raw.trim();
|
|
79
|
+
if (trimmed.endsWith("[]")) {
|
|
80
|
+
const baseRaw = trimmed.slice(0, -2);
|
|
81
|
+
const elementType = resolveType(baseRaw, enumNames);
|
|
82
|
+
return {
|
|
83
|
+
raw: trimmed,
|
|
84
|
+
normalized: trimmed.toLowerCase(),
|
|
85
|
+
category: elementType.category,
|
|
86
|
+
elementType
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
const normalized = trimmed.toLowerCase();
|
|
90
|
+
const category = TYPE_CATEGORY_MAP.get(normalized);
|
|
91
|
+
if (category) {
|
|
92
|
+
return { raw: trimmed, normalized, category };
|
|
93
|
+
}
|
|
94
|
+
if (enumNames.has(normalized)) {
|
|
95
|
+
return { raw: trimmed, normalized, category: "enum", enumName: normalized };
|
|
96
|
+
}
|
|
97
|
+
return { raw: trimmed, normalized, category: "unknown" };
|
|
98
|
+
}
|
|
99
|
+
var ENUM_RE_SOURCE = /CREATE\s+TYPE\s+(\w+)\s+AS\s+ENUM\s*\(\s*((?:'[^']*'(?:\s*,\s*'[^']*')*)?)\s*\)/i.source;
|
|
100
|
+
function parseEnumDefs(sql) {
|
|
101
|
+
const re = new RegExp(ENUM_RE_SOURCE, "gi");
|
|
102
|
+
const enums = [];
|
|
103
|
+
let m;
|
|
104
|
+
while ((m = re.exec(sql)) !== null) {
|
|
105
|
+
const name = m[1].toLowerCase();
|
|
106
|
+
const valuesRaw = m[2];
|
|
107
|
+
const values = [...valuesRaw.matchAll(/'([^']*)'/g)].map((v) => v[1]);
|
|
108
|
+
enums.push({ name, values });
|
|
109
|
+
}
|
|
110
|
+
return enums;
|
|
111
|
+
}
|
|
112
|
+
var CREATE_TABLE_RE_SOURCE = /CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(\w+)\s*\(([\s\S]*?)\)\s*;/i.source;
|
|
113
|
+
function splitColumnDefs(body) {
|
|
114
|
+
const parts = [];
|
|
115
|
+
let depth = 0;
|
|
116
|
+
let current = "";
|
|
117
|
+
for (const ch of body) {
|
|
118
|
+
if (ch === "(") {
|
|
119
|
+
depth++;
|
|
120
|
+
current += ch;
|
|
121
|
+
} else if (ch === ")") {
|
|
122
|
+
depth--;
|
|
123
|
+
current += ch;
|
|
124
|
+
} else if (ch === "," && depth === 0) {
|
|
125
|
+
parts.push(current.trim());
|
|
126
|
+
current = "";
|
|
127
|
+
} else {
|
|
128
|
+
current += ch;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
if (current.trim())
|
|
132
|
+
parts.push(current.trim());
|
|
133
|
+
return parts;
|
|
134
|
+
}
|
|
135
|
+
var MULTI_WORD_TYPES = [
|
|
136
|
+
"character varying",
|
|
137
|
+
"double precision",
|
|
138
|
+
"timestamp without time zone",
|
|
139
|
+
"timestamp with time zone"
|
|
140
|
+
];
|
|
141
|
+
function parseColumnLine(line, enumNames) {
|
|
142
|
+
line = line.trim();
|
|
143
|
+
if (!line)
|
|
144
|
+
return null;
|
|
145
|
+
if (/^(PRIMARY\s+KEY|CONSTRAINT|UNIQUE|CHECK|FOREIGN\s+KEY)/i.test(line)) {
|
|
146
|
+
return null;
|
|
147
|
+
}
|
|
148
|
+
const nameMatch = line.match(/^(\w+)\s+/);
|
|
149
|
+
if (!nameMatch)
|
|
150
|
+
return null;
|
|
151
|
+
const colName = nameMatch[1].toLowerCase();
|
|
152
|
+
const afterName = line.slice(nameMatch[0].length);
|
|
153
|
+
let rawType = null;
|
|
154
|
+
for (const mwt of MULTI_WORD_TYPES) {
|
|
155
|
+
if (afterName.toLowerCase().startsWith(mwt)) {
|
|
156
|
+
rawType = mwt;
|
|
157
|
+
break;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
if (!rawType) {
|
|
161
|
+
const typeMatch = afterName.match(/^(\w+(?:\[\])?)/);
|
|
162
|
+
rawType = typeMatch ? typeMatch[1] : "unknown";
|
|
163
|
+
}
|
|
164
|
+
const restAfterType = afterName.slice(rawType.length).trim();
|
|
165
|
+
const isNotNull = /\bNOT\s+NULL\b/i.test(restAfterType);
|
|
166
|
+
const hasDefaultKeyword = /\bDEFAULT\b/i.test(restAfterType);
|
|
167
|
+
const isSerial = SERIAL_TYPES.has(rawType.toLowerCase());
|
|
168
|
+
const isPK = /\bPRIMARY\s+KEY\b/i.test(restAfterType);
|
|
169
|
+
const isUnique = /\bUNIQUE\b/i.test(restAfterType);
|
|
170
|
+
const type = resolveType(rawType, enumNames);
|
|
171
|
+
return {
|
|
172
|
+
col: {
|
|
173
|
+
name: colName,
|
|
174
|
+
type,
|
|
175
|
+
nullable: !isNotNull,
|
|
176
|
+
hasDefault: hasDefaultKeyword || isSerial
|
|
177
|
+
},
|
|
178
|
+
isPK,
|
|
179
|
+
isUnique
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
function parseEnumAnnotation(comment) {
|
|
183
|
+
const match = comment.match(/--\s*@enum\s*\(\s*(.*?)\s*\)/);
|
|
184
|
+
if (!match)
|
|
185
|
+
return;
|
|
186
|
+
const inner = match[1];
|
|
187
|
+
const values = [];
|
|
188
|
+
const re = /"([^"]*?)"/g;
|
|
189
|
+
let m;
|
|
190
|
+
while ((m = re.exec(inner)) !== null) {
|
|
191
|
+
values.push(m[1]);
|
|
192
|
+
}
|
|
193
|
+
return values.length > 0 ? values : undefined;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
class JsonShapeParser {
|
|
197
|
+
input;
|
|
198
|
+
pos = 0;
|
|
199
|
+
constructor(input) {
|
|
200
|
+
this.input = input;
|
|
201
|
+
}
|
|
202
|
+
parse() {
|
|
203
|
+
const shape = this.parseType();
|
|
204
|
+
this.skipWs();
|
|
205
|
+
if (this.pos < this.input.length) {
|
|
206
|
+
throw new Error(`@json parse error: unexpected trailing content at position ${this.pos}: "${this.input.slice(this.pos, this.pos + 10)}"`);
|
|
207
|
+
}
|
|
208
|
+
return shape;
|
|
209
|
+
}
|
|
210
|
+
parseType() {
|
|
211
|
+
this.skipWs();
|
|
212
|
+
let shape;
|
|
213
|
+
if (this.peek() === "{") {
|
|
214
|
+
shape = this.parseObject();
|
|
215
|
+
} else {
|
|
216
|
+
shape = this.parsePrimitive();
|
|
217
|
+
}
|
|
218
|
+
this.skipWs();
|
|
219
|
+
while (this.lookAhead("[]")) {
|
|
220
|
+
this.pos += 2;
|
|
221
|
+
this.skipWs();
|
|
222
|
+
shape = { kind: "array", element: shape };
|
|
223
|
+
}
|
|
224
|
+
if (this.peek() === "?") {
|
|
225
|
+
this.pos++;
|
|
226
|
+
shape = { kind: "nullable", inner: shape };
|
|
227
|
+
}
|
|
228
|
+
return shape;
|
|
229
|
+
}
|
|
230
|
+
parsePrimitive() {
|
|
231
|
+
this.skipWs();
|
|
232
|
+
if (this.matchWord("string"))
|
|
233
|
+
return { kind: "string" };
|
|
234
|
+
if (this.matchWord("number"))
|
|
235
|
+
return { kind: "number" };
|
|
236
|
+
if (this.matchWord("boolean"))
|
|
237
|
+
return { kind: "boolean" };
|
|
238
|
+
throw new Error(`@json parse error: unexpected token at position ${this.pos}: "${this.input.slice(this.pos, this.pos + 10)}"`);
|
|
239
|
+
}
|
|
240
|
+
parseObject() {
|
|
241
|
+
this.consume("{");
|
|
242
|
+
this.skipWs();
|
|
243
|
+
const fields = {};
|
|
244
|
+
if (this.peek() !== "}") {
|
|
245
|
+
this.parseField(fields);
|
|
246
|
+
while (this.peek() === ",") {
|
|
247
|
+
this.pos++;
|
|
248
|
+
this.skipWs();
|
|
249
|
+
if (this.peek() === "}")
|
|
250
|
+
break;
|
|
251
|
+
this.parseField(fields);
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
this.consume("}");
|
|
255
|
+
return { kind: "object", fields };
|
|
256
|
+
}
|
|
257
|
+
parseField(fields) {
|
|
258
|
+
this.skipWs();
|
|
259
|
+
const name = this.readIdentifier();
|
|
260
|
+
this.skipWs();
|
|
261
|
+
this.consume(":");
|
|
262
|
+
this.skipWs();
|
|
263
|
+
fields[name] = this.parseType();
|
|
264
|
+
this.skipWs();
|
|
265
|
+
}
|
|
266
|
+
readIdentifier() {
|
|
267
|
+
this.skipWs();
|
|
268
|
+
const start = this.pos;
|
|
269
|
+
while (this.pos < this.input.length && /[\w]/.test(this.input[this.pos])) {
|
|
270
|
+
this.pos++;
|
|
271
|
+
}
|
|
272
|
+
if (this.pos === start) {
|
|
273
|
+
throw new Error(`@json parse error: expected identifier at position ${this.pos}`);
|
|
274
|
+
}
|
|
275
|
+
return this.input.slice(start, this.pos);
|
|
276
|
+
}
|
|
277
|
+
skipWs() {
|
|
278
|
+
while (this.pos < this.input.length && /\s/.test(this.input[this.pos])) {
|
|
279
|
+
this.pos++;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
peek() {
|
|
283
|
+
this.skipWs();
|
|
284
|
+
return this.input[this.pos];
|
|
285
|
+
}
|
|
286
|
+
lookAhead(s) {
|
|
287
|
+
return this.input.startsWith(s, this.pos);
|
|
288
|
+
}
|
|
289
|
+
matchWord(word) {
|
|
290
|
+
if (this.input.startsWith(word, this.pos)) {
|
|
291
|
+
const afterPos = this.pos + word.length;
|
|
292
|
+
if (afterPos >= this.input.length || !/\w/.test(this.input[afterPos])) {
|
|
293
|
+
this.pos = afterPos;
|
|
294
|
+
return true;
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
return false;
|
|
298
|
+
}
|
|
299
|
+
consume(ch) {
|
|
300
|
+
this.skipWs();
|
|
301
|
+
if (this.input[this.pos] !== ch) {
|
|
302
|
+
throw new Error(`@json parse error: expected '${ch}' at position ${this.pos}, got '${this.input[this.pos]}'`);
|
|
303
|
+
}
|
|
304
|
+
this.pos++;
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
function parseJsonAnnotation(comment) {
|
|
308
|
+
const match = comment.match(/--\s*@json\s*\(\s*([\s\S]+)\s*\)\s*$/);
|
|
309
|
+
if (!match)
|
|
310
|
+
return;
|
|
311
|
+
const body = match[1].trim();
|
|
312
|
+
try {
|
|
313
|
+
const parser = new JsonShapeParser(body);
|
|
314
|
+
return parser.parse();
|
|
315
|
+
} catch {
|
|
316
|
+
return;
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
function parseSchemaDefs(sql, enumNames) {
|
|
320
|
+
const re = new RegExp(CREATE_TABLE_RE_SOURCE, "gi");
|
|
321
|
+
const tables = [];
|
|
322
|
+
let m;
|
|
323
|
+
while ((m = re.exec(sql)) !== null) {
|
|
324
|
+
const tableName = m[1].toLowerCase();
|
|
325
|
+
const body = m[2];
|
|
326
|
+
const columns = [];
|
|
327
|
+
const primaryKey = [];
|
|
328
|
+
const uniqueConstraints = [];
|
|
329
|
+
const rawLines = body.split(`
|
|
330
|
+
`);
|
|
331
|
+
let pendingComment = "";
|
|
332
|
+
let nonCommentBuffer = "";
|
|
333
|
+
const commentMap = new Map;
|
|
334
|
+
for (const rawLine of rawLines) {
|
|
335
|
+
const trimmedLine = rawLine.trim();
|
|
336
|
+
if (trimmedLine.startsWith("--")) {
|
|
337
|
+
pendingComment += (pendingComment ? `
|
|
338
|
+
` : "") + trimmedLine;
|
|
339
|
+
} else {
|
|
340
|
+
const beforeDefs = splitColumnDefs(nonCommentBuffer).filter((d) => d.trim().length > 0).length;
|
|
341
|
+
nonCommentBuffer += (nonCommentBuffer ? `
|
|
342
|
+
` : "") + rawLine;
|
|
343
|
+
const afterDefs = splitColumnDefs(nonCommentBuffer).filter((d) => d.trim().length > 0).length;
|
|
344
|
+
if (afterDefs > beforeDefs && pendingComment) {
|
|
345
|
+
commentMap.set(beforeDefs, pendingComment);
|
|
346
|
+
pendingComment = "";
|
|
347
|
+
} else if (afterDefs === beforeDefs) {} else {
|
|
348
|
+
pendingComment = "";
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
const lines = splitColumnDefs(nonCommentBuffer);
|
|
353
|
+
for (let i = 0;i < lines.length; i++) {
|
|
354
|
+
const trimmed = lines[i].trim();
|
|
355
|
+
const pkMatch = trimmed.match(/^PRIMARY\s+KEY\s*\(\s*([\w\s,]+)\s*\)/i);
|
|
356
|
+
if (pkMatch) {
|
|
357
|
+
primaryKey.push(...pkMatch[1].split(",").map((s) => s.trim().toLowerCase()));
|
|
358
|
+
continue;
|
|
359
|
+
}
|
|
360
|
+
const result = parseColumnLine(trimmed, enumNames);
|
|
361
|
+
if (!result)
|
|
362
|
+
continue;
|
|
363
|
+
const comment = commentMap.get(i);
|
|
364
|
+
if (comment) {
|
|
365
|
+
const enumValues = parseEnumAnnotation(comment);
|
|
366
|
+
if (enumValues) {
|
|
367
|
+
result.col.type = {
|
|
368
|
+
...result.col.type,
|
|
369
|
+
category: "enum",
|
|
370
|
+
enumValues
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
const jsonShape = parseJsonAnnotation(comment);
|
|
374
|
+
if (jsonShape) {
|
|
375
|
+
result.col.type = {
|
|
376
|
+
...result.col.type,
|
|
377
|
+
jsonShape
|
|
378
|
+
};
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
columns.push(result.col);
|
|
382
|
+
if (result.isPK) {
|
|
383
|
+
primaryKey.push(result.col.name);
|
|
384
|
+
}
|
|
385
|
+
if (result.isUnique) {
|
|
386
|
+
uniqueConstraints.push([result.col.name]);
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
for (const col of columns) {
|
|
390
|
+
if (primaryKey.includes(col.name)) {
|
|
391
|
+
col.nullable = false;
|
|
392
|
+
col.hasDefault = col.hasDefault || SERIAL_TYPES.has(col.type.normalized);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
tables.push({ name: tableName, columns, primaryKey, uniqueConstraints });
|
|
396
|
+
}
|
|
397
|
+
return tables;
|
|
398
|
+
}
|
|
399
|
+
var QUERY_ANNOTATION_RE = /--\s*name:\s*(\w+)\s+:(one|many|execresult|exec)/;
|
|
400
|
+
var PARAM_OVERRIDE_RE = /--\s*@param\s+\$(\d+)\s+(\w+)/g;
|
|
401
|
+
var DOLLAR_PARAM_RE = /\$(\d+)/g;
|
|
402
|
+
function splitQueryBlocks(sql) {
|
|
403
|
+
const lines = sql.split(`
|
|
404
|
+
`);
|
|
405
|
+
const blocks = [];
|
|
406
|
+
let current = null;
|
|
407
|
+
let commentBuffer = "";
|
|
408
|
+
for (const line of lines) {
|
|
409
|
+
const trimmed = line.trim();
|
|
410
|
+
const annotationMatch = trimmed.match(QUERY_ANNOTATION_RE);
|
|
411
|
+
if (annotationMatch) {
|
|
412
|
+
if (current)
|
|
413
|
+
blocks.push(current);
|
|
414
|
+
current = {
|
|
415
|
+
name: annotationMatch[1],
|
|
416
|
+
command: annotationMatch[2],
|
|
417
|
+
sql: "",
|
|
418
|
+
comments: commentBuffer + trimmed + `
|
|
419
|
+
`
|
|
420
|
+
};
|
|
421
|
+
commentBuffer = "";
|
|
422
|
+
} else if (trimmed.startsWith("--")) {
|
|
423
|
+
if (current) {
|
|
424
|
+
current.comments += trimmed + `
|
|
425
|
+
`;
|
|
426
|
+
} else {
|
|
427
|
+
commentBuffer += trimmed + `
|
|
428
|
+
`;
|
|
429
|
+
}
|
|
430
|
+
} else if (current && trimmed) {
|
|
431
|
+
current.sql += (current.sql ? " " : "") + trimmed;
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
if (current)
|
|
435
|
+
blocks.push(current);
|
|
436
|
+
return blocks;
|
|
437
|
+
}
|
|
438
|
+
function extractParamOverrides(comments) {
|
|
439
|
+
const overrides = new Map;
|
|
440
|
+
let m;
|
|
441
|
+
const re = new RegExp(PARAM_OVERRIDE_RE.source, "g");
|
|
442
|
+
while ((m = re.exec(comments)) !== null) {
|
|
443
|
+
overrides.set(parseInt(m[1], 10), m[2]);
|
|
444
|
+
}
|
|
445
|
+
return overrides;
|
|
446
|
+
}
|
|
447
|
+
function extractParamIndices(sql) {
|
|
448
|
+
const indices = new Set;
|
|
449
|
+
let m;
|
|
450
|
+
const re = new RegExp(DOLLAR_PARAM_RE.source, "g");
|
|
451
|
+
while ((m = re.exec(sql)) !== null) {
|
|
452
|
+
indices.add(parseInt(m[1], 10));
|
|
453
|
+
}
|
|
454
|
+
return [...indices].sort((a, b) => a - b);
|
|
455
|
+
}
|
|
456
|
+
function inferParamColumns(sql) {
|
|
457
|
+
const result = new Map;
|
|
458
|
+
const insertMatch = sql.match(/INSERT\s+INTO\s+\w+\s*\(\s*([\w\s,]+)\s*\)\s*VALUES\s*\(\s*([\$\d\s,]+)\s*\)/i);
|
|
459
|
+
if (insertMatch) {
|
|
460
|
+
const cols = insertMatch[1].split(",").map((s) => s.trim().toLowerCase());
|
|
461
|
+
const params = [...insertMatch[2].matchAll(/\$(\d+)/g)].map((m2) => parseInt(m2[1], 10));
|
|
462
|
+
for (let i = 0;i < Math.min(cols.length, params.length); i++) {
|
|
463
|
+
result.set(params[i], cols[i]);
|
|
464
|
+
}
|
|
465
|
+
return result;
|
|
466
|
+
}
|
|
467
|
+
const SQL_KEYWORDS = new Set([
|
|
468
|
+
"not",
|
|
469
|
+
"and",
|
|
470
|
+
"or",
|
|
471
|
+
"where",
|
|
472
|
+
"set",
|
|
473
|
+
"when",
|
|
474
|
+
"then",
|
|
475
|
+
"else",
|
|
476
|
+
"case",
|
|
477
|
+
"between",
|
|
478
|
+
"exists",
|
|
479
|
+
"any",
|
|
480
|
+
"all",
|
|
481
|
+
"some",
|
|
482
|
+
"having"
|
|
483
|
+
]);
|
|
484
|
+
const wherePatterns = /(?:(\w+)\s*\(\s*(\w+)\s*\)|(\w+))\s*(?:=|!=|<>|<=?|>=?|(?:NOT\s+)?(?:I?LIKE|IN|IS))\s*\$(\d+)/gi;
|
|
485
|
+
let m;
|
|
486
|
+
while ((m = wherePatterns.exec(sql)) !== null) {
|
|
487
|
+
const paramIdx = parseInt(m[4], 10);
|
|
488
|
+
if (m[1] && m[2]) {
|
|
489
|
+
result.set(paramIdx, m[2].toLowerCase());
|
|
490
|
+
} else if (m[3]) {
|
|
491
|
+
const word = m[3].toLowerCase();
|
|
492
|
+
if (!SQL_KEYWORDS.has(word)) {
|
|
493
|
+
result.set(paramIdx, word);
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
return result;
|
|
498
|
+
}
|
|
499
|
+
function findFromTable(sql, tables) {
|
|
500
|
+
const fromMatch = sql.match(/(?:FROM|INTO|UPDATE)\s+(\w+)/i);
|
|
501
|
+
if (!fromMatch)
|
|
502
|
+
return;
|
|
503
|
+
const tableName = fromMatch[1].toLowerCase();
|
|
504
|
+
return tables.find((t) => t.name === tableName);
|
|
505
|
+
}
|
|
506
|
+
function resolveReturningColumns(sql, table) {
|
|
507
|
+
const returningMatch = sql.match(/\bRETURNING\s+([\s\S]+?)(?:;?\s*)$/i);
|
|
508
|
+
if (!returningMatch)
|
|
509
|
+
return null;
|
|
510
|
+
const colsPart = returningMatch[1].trim();
|
|
511
|
+
if (colsPart === "*") {
|
|
512
|
+
return table ? [...table.columns] : [];
|
|
513
|
+
}
|
|
514
|
+
if (!table)
|
|
515
|
+
return [];
|
|
516
|
+
return colsPart.split(",").map((s) => {
|
|
517
|
+
const name = s.trim().toLowerCase();
|
|
518
|
+
const tableCol = table.columns.find((c) => c.name === name);
|
|
519
|
+
return tableCol ? { ...tableCol } : { name, type: { raw: "unknown", normalized: "unknown", category: "unknown" }, nullable: true, hasDefault: false };
|
|
520
|
+
});
|
|
521
|
+
}
|
|
522
|
+
function resolveReturnColumns(sql, table) {
|
|
523
|
+
const returning = resolveReturningColumns(sql, table);
|
|
524
|
+
if (returning)
|
|
525
|
+
return returning;
|
|
526
|
+
if (!/^\s*SELECT\b/i.test(sql))
|
|
527
|
+
return [];
|
|
528
|
+
const selectMatch = sql.match(/SELECT\s+([\s\S]+?)\s+FROM\b/i);
|
|
529
|
+
if (!selectMatch)
|
|
530
|
+
return [];
|
|
531
|
+
const colsPart = selectMatch[1].trim();
|
|
532
|
+
if (colsPart === "*") {
|
|
533
|
+
return table ? [...table.columns] : [];
|
|
534
|
+
}
|
|
535
|
+
if (!table)
|
|
536
|
+
return [];
|
|
537
|
+
const colNames = colsPart.split(",").map((s) => s.trim().toLowerCase());
|
|
538
|
+
const resolved = [];
|
|
539
|
+
for (const colExpr of colNames) {
|
|
540
|
+
const aliasMatch = colExpr.match(/^(\w+)\s+as\s+(\w+)$/i);
|
|
541
|
+
const actualName = aliasMatch ? aliasMatch[1] : colExpr;
|
|
542
|
+
const tableCol = table.columns.find((c) => c.name === actualName);
|
|
543
|
+
if (tableCol) {
|
|
544
|
+
resolved.push(aliasMatch ? { ...tableCol, alias: aliasMatch[2].toLowerCase() } : { ...tableCol });
|
|
545
|
+
} else {
|
|
546
|
+
resolved.push({
|
|
547
|
+
name: actualName,
|
|
548
|
+
type: { raw: "unknown", normalized: "unknown", category: "unknown" },
|
|
549
|
+
nullable: true,
|
|
550
|
+
hasDefault: false
|
|
551
|
+
});
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
return resolved;
|
|
555
|
+
}
|
|
556
|
+
function buildParams(sql, comments, table) {
|
|
557
|
+
const paramIndices = extractParamIndices(sql);
|
|
558
|
+
if (paramIndices.length === 0)
|
|
559
|
+
return [];
|
|
560
|
+
const overrides = extractParamOverrides(comments);
|
|
561
|
+
const inferredCols = inferParamColumns(sql);
|
|
562
|
+
const rawParams = paramIndices.map((idx) => ({
|
|
563
|
+
index: idx,
|
|
564
|
+
column: inferredCols.get(idx) ?? null,
|
|
565
|
+
override: overrides.get(idx)
|
|
566
|
+
}));
|
|
567
|
+
const names = resolveParamNames(rawParams);
|
|
568
|
+
return paramIndices.map((idx, i) => {
|
|
569
|
+
const colName = inferredCols.get(idx);
|
|
570
|
+
let type = {
|
|
571
|
+
raw: "unknown",
|
|
572
|
+
normalized: "unknown",
|
|
573
|
+
category: "unknown"
|
|
574
|
+
};
|
|
575
|
+
if (table && colName) {
|
|
576
|
+
const tableCol = table.columns.find((c) => c.name === colName);
|
|
577
|
+
if (tableCol) {
|
|
578
|
+
type = tableCol.type;
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
return { index: idx, name: names[i], type };
|
|
582
|
+
});
|
|
583
|
+
}
|
|
584
|
+
function createPostgresParser() {
|
|
585
|
+
return {
|
|
586
|
+
dialect: "postgresql",
|
|
587
|
+
parseEnums(sql) {
|
|
588
|
+
return parseEnumDefs(sql);
|
|
589
|
+
},
|
|
590
|
+
parseSchema(sql) {
|
|
591
|
+
const enums = parseEnumDefs(sql);
|
|
592
|
+
const enumNames = new Set(enums.map((e) => e.name));
|
|
593
|
+
return parseSchemaDefs(sql, enumNames);
|
|
594
|
+
},
|
|
595
|
+
parseQueries(sql, tables) {
|
|
596
|
+
const blocks = splitQueryBlocks(sql);
|
|
597
|
+
return blocks.map((block) => {
|
|
598
|
+
const table = findFromTable(block.sql, tables);
|
|
599
|
+
const params = buildParams(block.sql, block.comments, table);
|
|
600
|
+
const returns = resolveReturnColumns(block.sql, table);
|
|
601
|
+
return {
|
|
602
|
+
name: block.name,
|
|
603
|
+
command: block.command,
|
|
604
|
+
sql: block.sql.replace(/;\s*$/, ""),
|
|
605
|
+
params,
|
|
606
|
+
returns,
|
|
607
|
+
sourceFile: ""
|
|
608
|
+
};
|
|
609
|
+
});
|
|
610
|
+
}
|
|
611
|
+
};
|
|
612
|
+
}
|
|
613
|
+
export {
|
|
614
|
+
createPostgresParser
|
|
615
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "sqlcx-orm",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "SQL-first cross-language type-safe code generator",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"sqlcx": "dist/cli.js"
|
|
8
|
+
},
|
|
9
|
+
"main": "dist/index.js",
|
|
10
|
+
"exports": {
|
|
11
|
+
".": {
|
|
12
|
+
"types": "./src/index.ts",
|
|
13
|
+
"import": "./dist/index.js"
|
|
14
|
+
},
|
|
15
|
+
"./config": {
|
|
16
|
+
"types": "./src/config/index.ts",
|
|
17
|
+
"import": "./dist/config/index.js"
|
|
18
|
+
},
|
|
19
|
+
"./parser/postgres": {
|
|
20
|
+
"types": "./src/parser/postgres.ts",
|
|
21
|
+
"import": "./dist/parser/postgres.js"
|
|
22
|
+
},
|
|
23
|
+
"./generator/typescript": {
|
|
24
|
+
"types": "./src/generator/typescript/index.ts",
|
|
25
|
+
"import": "./dist/generator/typescript/index.js"
|
|
26
|
+
},
|
|
27
|
+
"./generator/typescript/schema/typebox": {
|
|
28
|
+
"types": "./src/generator/typescript/schema/typebox.ts",
|
|
29
|
+
"import": "./dist/generator/typescript/schema/typebox.js"
|
|
30
|
+
},
|
|
31
|
+
"./generator/typescript/driver/bun-sql": {
|
|
32
|
+
"types": "./src/generator/typescript/driver/bun-sql.ts",
|
|
33
|
+
"import": "./dist/generator/typescript/driver/bun-sql.js"
|
|
34
|
+
}
|
|
35
|
+
},
|
|
36
|
+
"files": [
|
|
37
|
+
"dist",
|
|
38
|
+
"src"
|
|
39
|
+
],
|
|
40
|
+
"scripts": {
|
|
41
|
+
"build": "bun run scripts/build.ts",
|
|
42
|
+
"test": "bun test",
|
|
43
|
+
"generate": "bun run src/cli/index.ts generate",
|
|
44
|
+
"check": "bun run src/cli/index.ts check",
|
|
45
|
+
"prepublishOnly": "bun run build"
|
|
46
|
+
},
|
|
47
|
+
"keywords": ["sql", "codegen", "typescript", "typebox", "orm", "type-safe", "bun"],
|
|
48
|
+
"license": "MIT",
|
|
49
|
+
"dependencies": {
|
|
50
|
+
"node-sql-parser": "^5.4.0"
|
|
51
|
+
},
|
|
52
|
+
"devDependencies": {
|
|
53
|
+
"@types/bun": "^1.3.10",
|
|
54
|
+
"typescript": "^5.9.3"
|
|
55
|
+
}
|
|
56
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { createHash } from "crypto";
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, renameSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
import type { SqlcxIR } from "@/ir";
|
|
5
|
+
|
|
6
|
+
interface CacheFile {
|
|
7
|
+
hash: string;
|
|
8
|
+
ir: SqlcxIR;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function computeHash(files: { path: string; content: string }[]): string {
|
|
12
|
+
const sorted = [...files].sort((a, b) => a.path.localeCompare(b.path));
|
|
13
|
+
// Include both path and content in hash, with null byte separators
|
|
14
|
+
// to avoid collisions from file splits/merges/renames
|
|
15
|
+
const hash = createHash("sha256");
|
|
16
|
+
for (const f of sorted) {
|
|
17
|
+
hash.update(f.path);
|
|
18
|
+
hash.update("\0");
|
|
19
|
+
hash.update(f.content);
|
|
20
|
+
hash.update("\0");
|
|
21
|
+
}
|
|
22
|
+
return hash.digest("hex");
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function writeCache(cacheDir: string, ir: SqlcxIR, hash: string): void {
|
|
26
|
+
if (!existsSync(cacheDir)) mkdirSync(cacheDir, { recursive: true });
|
|
27
|
+
const data: CacheFile = { hash, ir };
|
|
28
|
+
const cachePath = join(cacheDir, "ir.json");
|
|
29
|
+
const tempPath = cachePath + ".tmp";
|
|
30
|
+
// Write to temp file then atomic rename — safe against interruptions
|
|
31
|
+
writeFileSync(tempPath, JSON.stringify(data, null, 2));
|
|
32
|
+
renameSync(tempPath, cachePath);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function readCache(cacheDir: string, expectedHash: string): SqlcxIR | null {
|
|
36
|
+
const cachePath = join(cacheDir, "ir.json");
|
|
37
|
+
if (!existsSync(cachePath)) return null;
|
|
38
|
+
try {
|
|
39
|
+
const data: CacheFile = JSON.parse(readFileSync(cachePath, "utf-8"));
|
|
40
|
+
if (data.hash !== expectedHash) return null;
|
|
41
|
+
return data.ir;
|
|
42
|
+
} catch {
|
|
43
|
+
// Corrupted cache file — treat as cache miss
|
|
44
|
+
return null;
|
|
45
|
+
}
|
|
46
|
+
}
|