@sqg/sqg 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/sqg.mjs +118 -18
- package/dist/templates/better-sqlite3.hbs +31 -1
- package/dist/templates/java-duckdb-arrow.hbs +11 -1
- package/dist/templates/java-jdbc.hbs +63 -1
- package/dist/templates/libsql.hbs +40 -0
- package/dist/templates/node-sqlite.hbs +33 -0
- package/dist/templates/turso.hbs +35 -0
- package/dist/templates/typescript-duckdb.hbs +37 -1
- package/package.json +2 -1
package/dist/sqg.mjs
CHANGED
|
@@ -1094,14 +1094,63 @@ const duckdb = new class {
|
|
|
1094
1094
|
//#endregion
|
|
1095
1095
|
//#region src/db/postgres.ts
|
|
1096
1096
|
const databaseName = "sqg-db-temp";
|
|
1097
|
-
|
|
1098
|
-
|
|
1097
|
+
let containerInstance = null;
|
|
1098
|
+
async function startTestContainer() {
|
|
1099
|
+
if (containerInstance) return containerInstance.getConnectionUri();
|
|
1100
|
+
consola.info("Starting PostgreSQL container via testcontainers...");
|
|
1101
|
+
const { PostgreSqlContainer } = await import("@testcontainers/postgresql");
|
|
1102
|
+
containerInstance = await new PostgreSqlContainer("postgres:16-alpine").withDatabase("sqg-db").withUsername("sqg").withPassword("secret").start();
|
|
1103
|
+
const connectionUri = containerInstance.getConnectionUri();
|
|
1104
|
+
consola.success(`PostgreSQL container started at: ${connectionUri}`);
|
|
1105
|
+
return connectionUri;
|
|
1106
|
+
}
|
|
1107
|
+
async function stopTestContainer() {
|
|
1108
|
+
if (containerInstance) {
|
|
1109
|
+
consola.info("Stopping PostgreSQL container...");
|
|
1110
|
+
await containerInstance.stop();
|
|
1111
|
+
containerInstance = null;
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
async function getConnectionString() {
|
|
1115
|
+
if (process.env.SQG_POSTGRES_URL) return process.env.SQG_POSTGRES_URL;
|
|
1116
|
+
return await startTestContainer();
|
|
1117
|
+
}
|
|
1118
|
+
function getTempConnectionString(baseUrl) {
|
|
1119
|
+
return baseUrl.replace(/\/[^/]+$/, `/${databaseName}`);
|
|
1120
|
+
}
|
|
1099
1121
|
const typeIdToName = /* @__PURE__ */ new Map();
|
|
1100
1122
|
for (const [name, id] of Object.entries(types.builtins)) typeIdToName.set(Number(id), name);
|
|
1123
|
+
let dynamicTypeCache = /* @__PURE__ */ new Map();
|
|
1124
|
+
async function loadTypeCache(db) {
|
|
1125
|
+
const result = await db.query(`
|
|
1126
|
+
SELECT t.oid, t.typname, t.typtype, t.typelem, et.typname AS elemtype
|
|
1127
|
+
FROM pg_type t
|
|
1128
|
+
LEFT JOIN pg_type et ON t.typelem = et.oid
|
|
1129
|
+
WHERE t.typtype IN ('b', 'e', 'r', 'c') -- base, enum, range, composite
|
|
1130
|
+
OR t.typelem != 0 -- array types
|
|
1131
|
+
`);
|
|
1132
|
+
dynamicTypeCache = /* @__PURE__ */ new Map();
|
|
1133
|
+
for (const row of result.rows) {
|
|
1134
|
+
const oid = row.oid;
|
|
1135
|
+
let typeName = row.typname;
|
|
1136
|
+
if (typeName.startsWith("_") && row.elemtype) typeName = `_${row.elemtype.toUpperCase()}`;
|
|
1137
|
+
else typeName = typeName.toUpperCase();
|
|
1138
|
+
dynamicTypeCache.set(oid, typeName);
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
1141
|
+
function getTypeName(dataTypeID) {
|
|
1142
|
+
const cached = dynamicTypeCache.get(dataTypeID);
|
|
1143
|
+
if (cached) return cached;
|
|
1144
|
+
return typeIdToName.get(dataTypeID) || `type_${dataTypeID}`;
|
|
1145
|
+
}
|
|
1101
1146
|
const postgres = new class {
|
|
1102
1147
|
dbInitial;
|
|
1103
1148
|
db;
|
|
1149
|
+
usingTestContainer = false;
|
|
1104
1150
|
async initializeDatabase(queries) {
|
|
1151
|
+
const connectionString = await getConnectionString();
|
|
1152
|
+
const connectionStringTemp = getTempConnectionString(connectionString);
|
|
1153
|
+
this.usingTestContainer = containerInstance !== null;
|
|
1105
1154
|
this.dbInitial = new Client({ connectionString });
|
|
1106
1155
|
this.db = new Client({ connectionString: connectionStringTemp });
|
|
1107
1156
|
try {
|
|
@@ -1110,7 +1159,7 @@ const postgres = new class {
|
|
|
1110
1159
|
throw new DatabaseError(`Failed to connect to PostgreSQL: ${e.message}`, "postgres", `Check that PostgreSQL is running and accessible at ${connectionString}. Set SQG_POSTGRES_URL environment variable to use a different connection string.`);
|
|
1111
1160
|
}
|
|
1112
1161
|
try {
|
|
1113
|
-
await this.dbInitial.query(`DROP DATABASE "${databaseName}";`);
|
|
1162
|
+
await this.dbInitial.query(`DROP DATABASE IF EXISTS "${databaseName}";`);
|
|
1114
1163
|
} catch (error) {}
|
|
1115
1164
|
try {
|
|
1116
1165
|
await this.dbInitial.query(`CREATE DATABASE "${databaseName}";`);
|
|
@@ -1129,6 +1178,7 @@ const postgres = new class {
|
|
|
1129
1178
|
throw new SqlExecutionError(e.message, query.id, query.filename, query.rawQuery, e);
|
|
1130
1179
|
}
|
|
1131
1180
|
});
|
|
1181
|
+
await loadTypeCache(this.db);
|
|
1132
1182
|
}
|
|
1133
1183
|
async executeQueries(queries) {
|
|
1134
1184
|
const db = this.db;
|
|
@@ -1150,17 +1200,22 @@ const postgres = new class {
|
|
|
1150
1200
|
const statement = query.queryPositional;
|
|
1151
1201
|
try {
|
|
1152
1202
|
consola.info("Query:", statement.sql);
|
|
1203
|
+
const parameterValues = statement.parameters.map((p) => {
|
|
1204
|
+
const value = p.value;
|
|
1205
|
+
if (value.startsWith("'") && value.endsWith("'") || value.startsWith("\"") && value.endsWith("\"")) return value.slice(1, -1);
|
|
1206
|
+
return value;
|
|
1207
|
+
});
|
|
1153
1208
|
let result;
|
|
1154
1209
|
try {
|
|
1155
1210
|
await db.query("BEGIN");
|
|
1156
|
-
result = await db.query(statement.sql,
|
|
1211
|
+
result = await db.query(statement.sql, parameterValues);
|
|
1157
1212
|
} finally {
|
|
1158
1213
|
await db.query("ROLLBACK");
|
|
1159
1214
|
}
|
|
1160
1215
|
if (query.isQuery) {
|
|
1161
1216
|
const columnNames = result.fields.map((field) => field.name);
|
|
1162
1217
|
const columnTypes = result.fields.map((field) => {
|
|
1163
|
-
return
|
|
1218
|
+
return getTypeName(field.dataTypeID);
|
|
1164
1219
|
});
|
|
1165
1220
|
consola.debug("Columns:", columnNames);
|
|
1166
1221
|
consola.debug("Types:", columnTypes);
|
|
@@ -1203,8 +1258,9 @@ const postgres = new class {
|
|
|
1203
1258
|
}
|
|
1204
1259
|
async close() {
|
|
1205
1260
|
await this.db.end();
|
|
1206
|
-
await this.dbInitial.query(`DROP DATABASE "${databaseName}"`);
|
|
1261
|
+
await this.dbInitial.query(`DROP DATABASE IF EXISTS "${databaseName}"`);
|
|
1207
1262
|
await this.dbInitial.end();
|
|
1263
|
+
if (this.usingTestContainer) await stopTestContainer();
|
|
1208
1264
|
}
|
|
1209
1265
|
}();
|
|
1210
1266
|
|
|
@@ -1396,7 +1452,20 @@ var JavaTypeMapper = class JavaTypeMapper extends TypeMapper {
|
|
|
1396
1452
|
INTERVAL: "String",
|
|
1397
1453
|
BIT: "String",
|
|
1398
1454
|
BIGNUM: "BigDecimal",
|
|
1399
|
-
|
|
1455
|
+
INT2: "Short",
|
|
1456
|
+
INT4: "Integer",
|
|
1457
|
+
INT8: "Long",
|
|
1458
|
+
FLOAT4: "Float",
|
|
1459
|
+
FLOAT8: "Double",
|
|
1460
|
+
NUMERIC: "BigDecimal",
|
|
1461
|
+
BOOL: "Boolean",
|
|
1462
|
+
BYTEA: "byte[]",
|
|
1463
|
+
TIMESTAMPTZ: "OffsetDateTime",
|
|
1464
|
+
JSON: "String",
|
|
1465
|
+
JSONB: "String",
|
|
1466
|
+
OID: "Long",
|
|
1467
|
+
SERIAL: "Integer",
|
|
1468
|
+
BIGSERIAL: "Long"
|
|
1400
1469
|
};
|
|
1401
1470
|
static javaReservedKeywords = new Set([
|
|
1402
1471
|
"abstract",
|
|
@@ -1457,12 +1526,15 @@ var JavaTypeMapper = class JavaTypeMapper extends TypeMapper {
|
|
|
1457
1526
|
const upperType = type.toString().toUpperCase();
|
|
1458
1527
|
const mappedType = this.typeMap[upperType];
|
|
1459
1528
|
if (mappedType) return mappedType;
|
|
1529
|
+
if (upperType.startsWith("_")) {
|
|
1530
|
+
const baseType = upperType.substring(1);
|
|
1531
|
+
return `List<${this.typeMap[baseType] || "Object"}>`;
|
|
1532
|
+
}
|
|
1460
1533
|
if (upperType.startsWith("DECIMAL(") || upperType.startsWith("NUMERIC(")) return "BigDecimal";
|
|
1461
1534
|
if (upperType.startsWith("ENUM(")) return "String";
|
|
1462
1535
|
if (upperType.startsWith("UNION(")) return "Object";
|
|
1463
1536
|
if (/\[\d+\]/.test(upperType)) return "Object";
|
|
1464
|
-
|
|
1465
|
-
return "Object";
|
|
1537
|
+
return "String";
|
|
1466
1538
|
}
|
|
1467
1539
|
formatListType(elementType) {
|
|
1468
1540
|
return `List<${elementType}>`;
|
|
@@ -1515,8 +1587,13 @@ var JavaTypeMapper = class JavaTypeMapper extends TypeMapper {
|
|
|
1515
1587
|
const fieldType = this.getTypeName(column);
|
|
1516
1588
|
const upperType = column.type?.toString().toUpperCase() ?? "";
|
|
1517
1589
|
if (upperType === "TIMESTAMP" || upperType === "DATETIME") return `toLocalDateTime((java.sql.Timestamp)${value})`;
|
|
1590
|
+
if (upperType === "TIMESTAMPTZ") return `toOffsetDateTime((java.sql.Timestamp)${value})`;
|
|
1518
1591
|
if (upperType === "DATE") return `toLocalDate((java.sql.Date)${value})`;
|
|
1519
1592
|
if (upperType === "TIME") return `toLocalTime((java.sql.Time)${value})`;
|
|
1593
|
+
if (upperType.startsWith("_")) {
|
|
1594
|
+
const baseType = upperType.substring(1);
|
|
1595
|
+
return `arrayToList((Array)${value}, ${this.typeMap[baseType] || "Object"}[].class)`;
|
|
1596
|
+
}
|
|
1520
1597
|
return `(${fieldType})${value}`;
|
|
1521
1598
|
}
|
|
1522
1599
|
getInnermostType(type) {
|
|
@@ -1584,12 +1661,30 @@ var TypeScriptTypeMapper = class extends TypeMapper {
|
|
|
1584
1661
|
INTERVAL: "{ months: number; days: number; micros: bigint }",
|
|
1585
1662
|
BIT: "{ data: Uint8Array }",
|
|
1586
1663
|
BIGNUM: "bigint",
|
|
1587
|
-
|
|
1664
|
+
INT2: "number",
|
|
1665
|
+
INT4: "number",
|
|
1666
|
+
INT8: "bigint",
|
|
1667
|
+
FLOAT4: "number",
|
|
1668
|
+
FLOAT8: "number",
|
|
1669
|
+
NUMERIC: "string",
|
|
1670
|
+
BOOL: "boolean",
|
|
1671
|
+
BYTEA: "Buffer",
|
|
1672
|
+
TIMESTAMPTZ: "Date",
|
|
1673
|
+
JSON: "unknown",
|
|
1674
|
+
JSONB: "unknown",
|
|
1675
|
+
OID: "number",
|
|
1676
|
+
SERIAL: "number",
|
|
1677
|
+
BIGSERIAL: "bigint"
|
|
1588
1678
|
};
|
|
1589
1679
|
mapPrimitiveType(type, nullable) {
|
|
1590
1680
|
const upperType = type.toUpperCase();
|
|
1591
1681
|
const mappedType = this.typeMap[upperType];
|
|
1592
1682
|
if (mappedType) return nullable ? `${mappedType} | null` : mappedType;
|
|
1683
|
+
if (upperType.startsWith("_")) {
|
|
1684
|
+
const baseType = upperType.substring(1);
|
|
1685
|
+
const arrayType = `${this.typeMap[baseType] || "unknown"}[]`;
|
|
1686
|
+
return nullable ? `${arrayType} | null` : arrayType;
|
|
1687
|
+
}
|
|
1593
1688
|
if (upperType.startsWith("DECIMAL(") || upperType.startsWith("NUMERIC(")) {
|
|
1594
1689
|
const baseType = "{ width: number; scale: number; value: bigint }";
|
|
1595
1690
|
return nullable ? `${baseType} | null` : baseType;
|
|
@@ -1614,8 +1709,7 @@ var TypeScriptTypeMapper = class extends TypeMapper {
|
|
|
1614
1709
|
}
|
|
1615
1710
|
}
|
|
1616
1711
|
if (/\[\d+\]/.test(upperType)) return "{ items: unknown[] }";
|
|
1617
|
-
|
|
1618
|
-
return "unknown";
|
|
1712
|
+
return nullable ? "string | null" : "string";
|
|
1619
1713
|
}
|
|
1620
1714
|
formatListType(elementType) {
|
|
1621
1715
|
return `{ items: (${elementType})[] }`;
|
|
@@ -1800,7 +1894,8 @@ var JavaDuckDBArrowGenerator = class extends BaseGenerator {
|
|
|
1800
1894
|
name,
|
|
1801
1895
|
generator: "java/duckdb/jdbc",
|
|
1802
1896
|
output: gen.output,
|
|
1803
|
-
config: gen.config
|
|
1897
|
+
config: gen.config,
|
|
1898
|
+
projectName: gen.projectName
|
|
1804
1899
|
}, this.javaGenerator, name, q, tables, "duckdb");
|
|
1805
1900
|
}
|
|
1806
1901
|
isCompatibleWith(engine) {
|
|
@@ -2266,7 +2361,7 @@ var TableHelper = class {
|
|
|
2266
2361
|
return this.generator.typeMapper;
|
|
2267
2362
|
}
|
|
2268
2363
|
};
|
|
2269
|
-
function generateSourceFile(name, queries, tables, templatePath, generator, engine, config) {
|
|
2364
|
+
function generateSourceFile(name, queries, tables, templatePath, generator, engine, projectName, config) {
|
|
2270
2365
|
const templateSrc = readFileSync(templatePath, "utf-8");
|
|
2271
2366
|
const template = Handlebars.compile(templateSrc);
|
|
2272
2367
|
Handlebars.registerHelper("mapType", (column) => generator.mapType(column));
|
|
@@ -2279,6 +2374,7 @@ function generateSourceFile(name, queries, tables, templatePath, generator, engi
|
|
|
2279
2374
|
queries: queries.map((q) => new SqlQueryHelper(q, generator, generator.getStatement(q))),
|
|
2280
2375
|
tables: tableHelpers,
|
|
2281
2376
|
className: generator.getClassName(name),
|
|
2377
|
+
projectName,
|
|
2282
2378
|
config
|
|
2283
2379
|
}, {
|
|
2284
2380
|
allowProtoPropertiesByDefault: true,
|
|
@@ -2418,7 +2514,7 @@ async function writeGeneratedFile(projectDir, gen, generator, file, queries, tab
|
|
|
2418
2514
|
await generator.beforeGenerate(projectDir, gen, queries, tables);
|
|
2419
2515
|
const templatePath = join(dirname(new URL(import.meta.url).pathname), gen.template ?? generator.template);
|
|
2420
2516
|
const name = gen.name ?? basename(file, extname(file));
|
|
2421
|
-
const sourceFile = generateSourceFile(name, queries, tables, templatePath, generator, engine, gen.config);
|
|
2517
|
+
const sourceFile = generateSourceFile(name, queries, tables, templatePath, generator, engine, gen.projectName ?? name, gen.config);
|
|
2422
2518
|
if (writeToStdout) {
|
|
2423
2519
|
process.stdout.write(sourceFile);
|
|
2424
2520
|
if (!sourceFile.endsWith("\n")) process.stdout.write("\n");
|
|
@@ -2545,7 +2641,11 @@ async function processProjectFromConfig(project, projectDir, writeToStdout = fal
|
|
|
2545
2641
|
});
|
|
2546
2642
|
}
|
|
2547
2643
|
for (const gen of gens) {
|
|
2548
|
-
const
|
|
2644
|
+
const generator = getGenerator(gen.generator);
|
|
2645
|
+
const outputPath = await writeGeneratedFile(projectDir, {
|
|
2646
|
+
...gen,
|
|
2647
|
+
projectName: project.name
|
|
2648
|
+
}, generator, sqlFile, queries, tables, engine, writeToStdout);
|
|
2549
2649
|
if (outputPath !== null) files.push(outputPath);
|
|
2550
2650
|
}
|
|
2551
2651
|
}
|
|
@@ -2568,7 +2668,7 @@ async function processProject(projectPath) {
|
|
|
2568
2668
|
//#region src/mcp-server.ts
|
|
2569
2669
|
const server = new Server({
|
|
2570
2670
|
name: "sqg-mcp",
|
|
2571
|
-
version: process.env.npm_package_version ?? "0.
|
|
2671
|
+
version: process.env.npm_package_version ?? "0.8.0"
|
|
2572
2672
|
}, { capabilities: {
|
|
2573
2673
|
tools: {},
|
|
2574
2674
|
resources: {}
|
|
@@ -2897,7 +2997,7 @@ async function startMcpServer() {
|
|
|
2897
2997
|
|
|
2898
2998
|
//#endregion
|
|
2899
2999
|
//#region src/sqg.ts
|
|
2900
|
-
const version = process.env.npm_package_version ?? "0.
|
|
3000
|
+
const version = process.env.npm_package_version ?? "0.8.0";
|
|
2901
3001
|
const description = process.env.npm_package_description ?? "SQG - SQL Query Generator - Type-safe code generation from SQL (https://sqg.dev)";
|
|
2902
3002
|
consola.level = LogLevels.info;
|
|
2903
3003
|
const program = new Command().name("sqg").description(`${description}
|
|
@@ -18,7 +18,7 @@ export class {{className}} {
|
|
|
18
18
|
return stmt as Statement<BindParameters, Result>;
|
|
19
19
|
}
|
|
20
20
|
|
|
21
|
-
static getMigrations(): string[] {
|
|
21
|
+
static getMigrations(): string[] {
|
|
22
22
|
return [
|
|
23
23
|
{{#each migrations}}
|
|
24
24
|
{{{quote sqlQuery}}},
|
|
@@ -26,6 +26,36 @@ export class {{className}} {
|
|
|
26
26
|
];
|
|
27
27
|
}
|
|
28
28
|
|
|
29
|
+
{{#if config.migrations}}
|
|
30
|
+
static applyMigrations(db: Database, projectName = '{{projectName}}'): void {
|
|
31
|
+
db.exec(`CREATE TABLE IF NOT EXISTS _sqg_migrations (
|
|
32
|
+
project TEXT NOT NULL,
|
|
33
|
+
migration_id TEXT NOT NULL,
|
|
34
|
+
applied_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
35
|
+
PRIMARY KEY (project, migration_id)
|
|
36
|
+
)`);
|
|
37
|
+
const runMigrations = db.transaction(() => {
|
|
38
|
+
const applied = new Set(
|
|
39
|
+
db.prepare('SELECT migration_id FROM _sqg_migrations WHERE project = ?')
|
|
40
|
+
.pluck().all(projectName) as string[]
|
|
41
|
+
);
|
|
42
|
+
const migrations: [string, string][] = [
|
|
43
|
+
{{#each migrations}}
|
|
44
|
+
['{{{id}}}', {{{quote sqlQuery}}}],
|
|
45
|
+
{{/each}}
|
|
46
|
+
];
|
|
47
|
+
for (const [id, sql] of migrations) {
|
|
48
|
+
if (!applied.has(id)) {
|
|
49
|
+
db.exec(sql);
|
|
50
|
+
db.prepare('INSERT INTO _sqg_migrations (project, migration_id) VALUES (?, ?)')
|
|
51
|
+
.run(projectName, id);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
runMigrations.immediate();
|
|
56
|
+
}
|
|
57
|
+
{{/if}}
|
|
58
|
+
|
|
29
59
|
static getQueryNames(): Map<string, keyof {{className}}> {
|
|
30
60
|
return new Map([
|
|
31
61
|
{{#each queries}} {{#unless skipGenerateFunction}}
|
|
@@ -29,10 +29,20 @@ public class {{className}} {
|
|
|
29
29
|
}
|
|
30
30
|
|
|
31
31
|
|
|
32
|
-
public static List<String> getMigrations() {
|
|
32
|
+
public static List<String> getMigrations() {
|
|
33
33
|
return {{className}}Jdbc.getMigrations();
|
|
34
34
|
}
|
|
35
35
|
|
|
36
|
+
{{#if config.migrations}}
|
|
37
|
+
public static void applyMigrations(Connection connection) throws SQLException {
|
|
38
|
+
{{className}}Jdbc.applyMigrations(connection);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
public static void applyMigrations(Connection connection, String projectName) throws SQLException {
|
|
42
|
+
{{className}}Jdbc.applyMigrations(connection, projectName);
|
|
43
|
+
}
|
|
44
|
+
{{/if}}
|
|
45
|
+
|
|
36
46
|
{{#each queries}}
|
|
37
47
|
{{#unless skipGenerateFunction}}
|
|
38
48
|
{{#if isOne}}
|
|
@@ -13,6 +13,7 @@ import java.time.Instant;
|
|
|
13
13
|
import java.time.LocalDate;
|
|
14
14
|
import java.time.LocalDateTime;
|
|
15
15
|
import java.time.LocalTime;
|
|
16
|
+
import java.time.OffsetDateTime;
|
|
16
17
|
import java.time.OffsetTime;
|
|
17
18
|
import java.util.ArrayList;
|
|
18
19
|
import java.util.Arrays;
|
|
@@ -65,6 +66,10 @@ public class {{className}} {
|
|
|
65
66
|
private static LocalTime toLocalTime(java.sql.Time t) {
|
|
66
67
|
return t != null ? t.toLocalTime() : null;
|
|
67
68
|
}
|
|
69
|
+
|
|
70
|
+
private static OffsetDateTime toOffsetDateTime(java.sql.Timestamp ts) {
|
|
71
|
+
return ts != null ? ts.toInstant().atOffset(java.time.ZoneOffset.UTC) : null;
|
|
72
|
+
}
|
|
68
73
|
|
|
69
74
|
private static <K> List<K> arrayToList(
|
|
70
75
|
Array array,
|
|
@@ -129,10 +134,67 @@ public class {{className}} {
|
|
|
129
134
|
{{/each}}
|
|
130
135
|
);
|
|
131
136
|
|
|
132
|
-
|
|
137
|
+
{{#if config.migrations}}
|
|
138
|
+
private static final List<String> migrationIds = List.of(
|
|
139
|
+
{{#each migrations}}"{{id}}"{{#unless @last}},{{/unless}}
|
|
140
|
+
{{/each}}
|
|
141
|
+
);
|
|
142
|
+
{{/if}}
|
|
143
|
+
|
|
144
|
+
public static List<String> getMigrations() {
|
|
133
145
|
return migrations;
|
|
134
146
|
}
|
|
135
147
|
|
|
148
|
+
{{#if config.migrations}}
|
|
149
|
+
public static void applyMigrations(Connection connection) throws SQLException {
|
|
150
|
+
applyMigrations(connection, "{{projectName}}");
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
public static void applyMigrations(Connection connection, String projectName) throws SQLException {
|
|
154
|
+
try (var stmt = connection.createStatement()) {
|
|
155
|
+
stmt.execute("""
|
|
156
|
+
CREATE TABLE IF NOT EXISTS _sqg_migrations (
|
|
157
|
+
project TEXT NOT NULL,
|
|
158
|
+
migration_id TEXT NOT NULL,
|
|
159
|
+
applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
160
|
+
PRIMARY KEY (project, migration_id)
|
|
161
|
+
)""");
|
|
162
|
+
}
|
|
163
|
+
boolean wasAutoCommit = connection.getAutoCommit();
|
|
164
|
+
connection.setAutoCommit(false);
|
|
165
|
+
try {
|
|
166
|
+
var applied = new java.util.HashSet<String>();
|
|
167
|
+
try (var stmt = connection.prepareStatement("SELECT migration_id FROM _sqg_migrations WHERE project = ?")) {
|
|
168
|
+
stmt.setString(1, projectName);
|
|
169
|
+
try (var rs = stmt.executeQuery()) {
|
|
170
|
+
while (rs.next()) {
|
|
171
|
+
applied.add(rs.getString(1));
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
for (int i = 0; i < migrations.size(); i++) {
|
|
176
|
+
var id = migrationIds.get(i);
|
|
177
|
+
if (!applied.contains(id)) {
|
|
178
|
+
try (var stmt = connection.createStatement()) {
|
|
179
|
+
stmt.execute(migrations.get(i));
|
|
180
|
+
}
|
|
181
|
+
try (var stmt = connection.prepareStatement("INSERT INTO _sqg_migrations (project, migration_id) VALUES (?, ?)")) {
|
|
182
|
+
stmt.setString(1, projectName);
|
|
183
|
+
stmt.setString(2, id);
|
|
184
|
+
stmt.executeUpdate();
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
connection.commit();
|
|
189
|
+
} catch (SQLException e) {
|
|
190
|
+
connection.rollback();
|
|
191
|
+
throw e;
|
|
192
|
+
} finally {
|
|
193
|
+
connection.setAutoCommit(wasAutoCommit);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
{{/if}}
|
|
197
|
+
|
|
136
198
|
{{#each queries}}
|
|
137
199
|
{{#unless skipGenerateFunction}}
|
|
138
200
|
{{>columnTypesRecord}}
|
|
@@ -17,6 +17,46 @@ export class {{className}} {
|
|
|
17
17
|
];
|
|
18
18
|
}
|
|
19
19
|
|
|
20
|
+
{{#if config.migrations}}
|
|
21
|
+
static async applyMigrations(client: Client, projectName = '{{projectName}}'): Promise<void> {
|
|
22
|
+
await client.execute({
|
|
23
|
+
sql: `CREATE TABLE IF NOT EXISTS _sqg_migrations (
|
|
24
|
+
project TEXT NOT NULL,
|
|
25
|
+
migration_id TEXT NOT NULL,
|
|
26
|
+
applied_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
27
|
+
PRIMARY KEY (project, migration_id)
|
|
28
|
+
)`,
|
|
29
|
+
args: [],
|
|
30
|
+
});
|
|
31
|
+
const tx = await client.transaction('write');
|
|
32
|
+
try {
|
|
33
|
+
const result = await tx.execute({
|
|
34
|
+
sql: 'SELECT migration_id FROM _sqg_migrations WHERE project = ?',
|
|
35
|
+
args: [projectName],
|
|
36
|
+
});
|
|
37
|
+
const applied = new Set(result.rows.map(r => r.migration_id as string));
|
|
38
|
+
const migrations: [string, string][] = [
|
|
39
|
+
{{#each migrations}}
|
|
40
|
+
['{{{id}}}', {{{quote sqlQuery}}}],
|
|
41
|
+
{{/each}}
|
|
42
|
+
];
|
|
43
|
+
for (const [id, sql] of migrations) {
|
|
44
|
+
if (!applied.has(id)) {
|
|
45
|
+
await tx.execute({ sql, args: [] });
|
|
46
|
+
await tx.execute({
|
|
47
|
+
sql: 'INSERT INTO _sqg_migrations (project, migration_id) VALUES (?, ?)',
|
|
48
|
+
args: [projectName, id],
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
await tx.commit();
|
|
53
|
+
} catch (e) {
|
|
54
|
+
await tx.rollback();
|
|
55
|
+
throw e;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
{{/if}}
|
|
59
|
+
|
|
20
60
|
static getQueryNames(): Map<string, keyof {{className}}> {
|
|
21
61
|
return new Map([
|
|
22
62
|
{{#each queries}} {{#unless skipGenerateFunction}}
|
|
@@ -28,6 +28,39 @@ export class {{className}} {
|
|
|
28
28
|
];
|
|
29
29
|
}
|
|
30
30
|
|
|
31
|
+
{{#if config.migrations}}
|
|
32
|
+
static applyMigrations(db: DatabaseSync, projectName = '{{projectName}}'): void {
|
|
33
|
+
db.exec(`CREATE TABLE IF NOT EXISTS _sqg_migrations (
|
|
34
|
+
project TEXT NOT NULL,
|
|
35
|
+
migration_id TEXT NOT NULL,
|
|
36
|
+
applied_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
37
|
+
PRIMARY KEY (project, migration_id)
|
|
38
|
+
)`);
|
|
39
|
+
db.exec('BEGIN IMMEDIATE');
|
|
40
|
+
try {
|
|
41
|
+
const rows = db.prepare('SELECT migration_id FROM _sqg_migrations WHERE project = ?')
|
|
42
|
+
.all(projectName) as { migration_id: string }[];
|
|
43
|
+
const applied = new Set(rows.map(r => r.migration_id));
|
|
44
|
+
const migrations: [string, string][] = [
|
|
45
|
+
{{#each migrations}}
|
|
46
|
+
['{{{id}}}', {{{quote sqlQuery}}}],
|
|
47
|
+
{{/each}}
|
|
48
|
+
];
|
|
49
|
+
for (const [id, sql] of migrations) {
|
|
50
|
+
if (!applied.has(id)) {
|
|
51
|
+
db.exec(sql);
|
|
52
|
+
db.prepare('INSERT INTO _sqg_migrations (project, migration_id) VALUES (?, ?)')
|
|
53
|
+
.run(projectName, id);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
db.exec('COMMIT');
|
|
57
|
+
} catch (e) {
|
|
58
|
+
db.exec('ROLLBACK');
|
|
59
|
+
throw e;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
{{/if}}
|
|
63
|
+
|
|
31
64
|
static getQueryNames(): Map<string, keyof {{className}}> {
|
|
32
65
|
return new Map([
|
|
33
66
|
{{#each queries}} {{#unless skipGenerateFunction}}
|
package/dist/templates/turso.hbs
CHANGED
|
@@ -31,6 +31,41 @@ export class {{className}} {
|
|
|
31
31
|
];
|
|
32
32
|
}
|
|
33
33
|
|
|
34
|
+
{{#if config.migrations}}
|
|
35
|
+
static async applyMigrations(db: Database, projectName = '{{projectName}}'): Promise<void> {
|
|
36
|
+
const createStmt = await db.prepare(`CREATE TABLE IF NOT EXISTS _sqg_migrations (
|
|
37
|
+
project TEXT NOT NULL,
|
|
38
|
+
migration_id TEXT NOT NULL,
|
|
39
|
+
applied_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
40
|
+
PRIMARY KEY (project, migration_id)
|
|
41
|
+
)`);
|
|
42
|
+
await createStmt.run();
|
|
43
|
+
const tx = await db.transaction('write');
|
|
44
|
+
try {
|
|
45
|
+
const selectStmt = await tx.prepare('SELECT migration_id FROM _sqg_migrations WHERE project = ?');
|
|
46
|
+
const rows = await selectStmt.all(projectName) as { migration_id: string }[];
|
|
47
|
+
const applied = new Set(rows.map(r => r.migration_id));
|
|
48
|
+
const migrations: [string, string][] = [
|
|
49
|
+
{{#each migrations}}
|
|
50
|
+
['{{{id}}}', {{{quote sqlQuery}}}],
|
|
51
|
+
{{/each}}
|
|
52
|
+
];
|
|
53
|
+
for (const [id, sql] of migrations) {
|
|
54
|
+
if (!applied.has(id)) {
|
|
55
|
+
const execStmt = await tx.prepare(sql);
|
|
56
|
+
await execStmt.run();
|
|
57
|
+
const insertStmt = await tx.prepare('INSERT INTO _sqg_migrations (project, migration_id) VALUES (?, ?)');
|
|
58
|
+
await insertStmt.run(projectName, id);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
await tx.commit();
|
|
62
|
+
} catch (e) {
|
|
63
|
+
await tx.rollback();
|
|
64
|
+
throw e;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
{{/if}}
|
|
68
|
+
|
|
34
69
|
static getQueryNames(): Map<string, keyof {{className}}> {
|
|
35
70
|
return new Map([
|
|
36
71
|
{{#each queries}} {{#unless skipGenerateFunction}}
|
|
@@ -5,7 +5,7 @@ export class {{className}} {
|
|
|
5
5
|
|
|
6
6
|
constructor(private conn: DuckDBConnection) {}
|
|
7
7
|
|
|
8
|
-
static getMigrations(): string[] {
|
|
8
|
+
static getMigrations(): string[] {
|
|
9
9
|
return [
|
|
10
10
|
{{#each migrations}}
|
|
11
11
|
{{{quote sqlQuery}}},
|
|
@@ -13,6 +13,42 @@ export class {{className}} {
|
|
|
13
13
|
];
|
|
14
14
|
}
|
|
15
15
|
|
|
16
|
+
{{#if config.migrations}}
|
|
17
|
+
static async applyMigrations(conn: DuckDBConnection, projectName = '{{projectName}}'): Promise<void> {
|
|
18
|
+
await conn.run(`CREATE TABLE IF NOT EXISTS _sqg_migrations (
|
|
19
|
+
project TEXT NOT NULL,
|
|
20
|
+
migration_id TEXT NOT NULL,
|
|
21
|
+
applied_at TIMESTAMP NOT NULL DEFAULT now(),
|
|
22
|
+
PRIMARY KEY (project, migration_id)
|
|
23
|
+
)`);
|
|
24
|
+
await conn.run('BEGIN');
|
|
25
|
+
try {
|
|
26
|
+
const result = await conn.runAndReadAll(
|
|
27
|
+
'SELECT migration_id FROM _sqg_migrations WHERE project = $1', [projectName]
|
|
28
|
+
);
|
|
29
|
+
const applied = new Set(result.getRows().map((row) => row[0] as string));
|
|
30
|
+
const migrations: [string, string][] = [
|
|
31
|
+
{{#each migrations}}
|
|
32
|
+
['{{{id}}}', {{{quote sqlQuery}}}],
|
|
33
|
+
{{/each}}
|
|
34
|
+
];
|
|
35
|
+
for (const [id, sql] of migrations) {
|
|
36
|
+
if (!applied.has(id)) {
|
|
37
|
+
await conn.run(sql);
|
|
38
|
+
await conn.run(
|
|
39
|
+
'INSERT INTO _sqg_migrations (project, migration_id) VALUES ($1, $2)',
|
|
40
|
+
[projectName, id]
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
await conn.run('COMMIT');
|
|
45
|
+
} catch (e) {
|
|
46
|
+
await conn.run('ROLLBACK');
|
|
47
|
+
throw e;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
{{/if}}
|
|
51
|
+
|
|
16
52
|
static getQueryNames(): Map<string, keyof {{className}}> {
|
|
17
53
|
return new Map([
|
|
18
54
|
{{#each queries}} {{#unless skipGenerateFunction}}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sqg/sqg",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.8.0",
|
|
4
4
|
"description": "SQG - SQL Query Generator - Type-safe code generation from SQL (https://sqg.dev)",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
@@ -53,6 +53,7 @@
|
|
|
53
53
|
"handlebars": "^4.7.8",
|
|
54
54
|
"pg": "^8.16.3",
|
|
55
55
|
"pg-types": "^4.1.0",
|
|
56
|
+
"@testcontainers/postgresql": "^10.21.0",
|
|
56
57
|
"prettier": "^3.7.4",
|
|
57
58
|
"prettier-plugin-java": "^2.7.7",
|
|
58
59
|
"yaml": "^2.8.2",
|