@b9g/zen 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/LICENSE +21 -0
- package/README.md +1044 -0
- package/chunk-2IEEEMRN.js +38 -0
- package/chunk-56M5Z3A6.js +1346 -0
- package/chunk-QXGEP5PB.js +310 -0
- package/ddl-NAJM37GQ.js +9 -0
- package/package.json +102 -0
- package/src/bun.d.ts +50 -0
- package/src/bun.js +906 -0
- package/src/mysql.d.ts +62 -0
- package/src/mysql.js +573 -0
- package/src/postgres.d.ts +62 -0
- package/src/postgres.js +555 -0
- package/src/sqlite.d.ts +43 -0
- package/src/sqlite.js +447 -0
- package/src/zen.d.ts +14 -0
- package/src/zen.js +2143 -0
package/src/mysql.d.ts
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* mysql2 adapter for @b9g/zen
|
|
3
|
+
*
|
|
4
|
+
* Provides a Driver implementation for mysql2.
|
|
5
|
+
* Uses connection pooling - call close() when done to end all connections.
|
|
6
|
+
*
|
|
7
|
+
* Requires: mysql2
|
|
8
|
+
*/
|
|
9
|
+
import type { Driver, Table, EnsureResult } from "./zen.js";
|
|
10
|
+
/**
|
|
11
|
+
* Options for the mysql adapter.
|
|
12
|
+
*/
|
|
13
|
+
export interface MySQLOptions {
|
|
14
|
+
/** Maximum number of connections in the pool (default: 10) */
|
|
15
|
+
connectionLimit?: number;
|
|
16
|
+
/** Idle timeout in milliseconds (default: 60000) */
|
|
17
|
+
idleTimeout?: number;
|
|
18
|
+
/** Connection timeout in milliseconds (default: 10000) */
|
|
19
|
+
connectTimeout?: number;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* MySQL driver using mysql2.
|
|
23
|
+
*
|
|
24
|
+
* @example
|
|
25
|
+
* import MySQLDriver from "@b9g/zen/mysql";
|
|
26
|
+
* import {Database} from "@b9g/zen";
|
|
27
|
+
*
|
|
28
|
+
* const driver = new MySQLDriver("mysql://localhost/mydb");
|
|
29
|
+
* const db = new Database(driver);
|
|
30
|
+
*
|
|
31
|
+
* db.addEventListener("upgradeneeded", (e) => {
|
|
32
|
+
* e.waitUntil(runMigrations(e));
|
|
33
|
+
* });
|
|
34
|
+
*
|
|
35
|
+
* await db.open(1);
|
|
36
|
+
*
|
|
37
|
+
* // When done:
|
|
38
|
+
* await driver.close();
|
|
39
|
+
*/
|
|
40
|
+
export default class MySQLDriver implements Driver {
|
|
41
|
+
#private;
|
|
42
|
+
readonly supportsReturning = false;
|
|
43
|
+
constructor(url: string, options?: MySQLOptions);
|
|
44
|
+
all<T>(strings: TemplateStringsArray, values: unknown[]): Promise<T[]>;
|
|
45
|
+
get<T>(strings: TemplateStringsArray, values: unknown[]): Promise<T | null>;
|
|
46
|
+
run(strings: TemplateStringsArray, values: unknown[]): Promise<number>;
|
|
47
|
+
val<T>(strings: TemplateStringsArray, values: unknown[]): Promise<T | null>;
|
|
48
|
+
close(): Promise<void>;
|
|
49
|
+
transaction<T>(fn: (txDriver: Driver) => Promise<T>): Promise<T>;
|
|
50
|
+
withMigrationLock<T>(fn: () => Promise<T>): Promise<T>;
|
|
51
|
+
/**
|
|
52
|
+
* Ensure table exists with the specified structure.
|
|
53
|
+
* Creates table if missing, adds missing columns/indexes.
|
|
54
|
+
* Throws SchemaDriftError if constraints are missing.
|
|
55
|
+
*/
|
|
56
|
+
ensureTable<T extends Table<any>>(table: T): Promise<EnsureResult>;
|
|
57
|
+
/**
|
|
58
|
+
* Ensure constraints exist on the table.
|
|
59
|
+
* Applies unique and foreign key constraints with preflight checks.
|
|
60
|
+
*/
|
|
61
|
+
ensureConstraints<T extends Table<any>>(table: T): Promise<EnsureResult>;
|
|
62
|
+
}
|
package/src/mysql.js
ADDED
|
@@ -0,0 +1,573 @@
|
|
|
1
|
+
/// <reference types="./mysql.d.ts" />
|
|
2
|
+
import {
|
|
3
|
+
quoteIdent,
|
|
4
|
+
renderDDL
|
|
5
|
+
} from "../chunk-2IEEEMRN.js";
|
|
6
|
+
import {
|
|
7
|
+
generateColumnDDL,
|
|
8
|
+
generateDDL
|
|
9
|
+
} from "../chunk-QXGEP5PB.js";
|
|
10
|
+
import {
|
|
11
|
+
ConstraintPreflightError,
|
|
12
|
+
EnsureError,
|
|
13
|
+
SchemaDriftError,
|
|
14
|
+
getTableMeta,
|
|
15
|
+
resolveSQLBuiltin
|
|
16
|
+
} from "../chunk-56M5Z3A6.js";
|
|
17
|
+
|
|
18
|
+
// src/mysql.ts
|
|
19
|
+
import {
|
|
20
|
+
ConstraintViolationError,
|
|
21
|
+
isSQLBuiltin,
|
|
22
|
+
isSQLIdentifier
|
|
23
|
+
} from "./zen.js";
|
|
24
|
+
import mysql from "mysql2/promise";
|
|
25
|
+
var DIALECT = "mysql";
|
|
26
|
+
function quoteIdent2(name) {
|
|
27
|
+
return quoteIdent(name, DIALECT);
|
|
28
|
+
}
|
|
29
|
+
function buildSQL(strings, values) {
|
|
30
|
+
let sql = strings[0];
|
|
31
|
+
const params = [];
|
|
32
|
+
for (let i = 0; i < values.length; i++) {
|
|
33
|
+
const value = values[i];
|
|
34
|
+
if (isSQLBuiltin(value)) {
|
|
35
|
+
sql += resolveSQLBuiltin(value) + strings[i + 1];
|
|
36
|
+
} else if (isSQLIdentifier(value)) {
|
|
37
|
+
sql += quoteIdent2(value.name) + strings[i + 1];
|
|
38
|
+
} else {
|
|
39
|
+
sql += "?" + strings[i + 1];
|
|
40
|
+
params.push(value);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return { sql, params };
|
|
44
|
+
}
|
|
45
|
+
var MySQLDriver = class {
|
|
46
|
+
supportsReturning = false;
|
|
47
|
+
#pool;
|
|
48
|
+
constructor(url, options = {}) {
|
|
49
|
+
this.#pool = mysql.createPool({
|
|
50
|
+
uri: url,
|
|
51
|
+
connectionLimit: options.connectionLimit ?? 10,
|
|
52
|
+
idleTimeout: options.idleTimeout ?? 6e4,
|
|
53
|
+
connectTimeout: options.connectTimeout ?? 1e4
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Convert MySQL errors to Zealot errors.
|
|
58
|
+
*/
|
|
59
|
+
#handleError(error) {
|
|
60
|
+
if (error && typeof error === "object" && "code" in error) {
|
|
61
|
+
const code = error.code;
|
|
62
|
+
const message = error.message || String(error);
|
|
63
|
+
let kind = "unknown";
|
|
64
|
+
let constraint;
|
|
65
|
+
let table;
|
|
66
|
+
let column;
|
|
67
|
+
if (code === "ER_DUP_ENTRY") {
|
|
68
|
+
kind = "unique";
|
|
69
|
+
const keyMatch = message.match(/for key '([^']+)'/i);
|
|
70
|
+
constraint = keyMatch ? keyMatch[1] : void 0;
|
|
71
|
+
if (constraint) {
|
|
72
|
+
const parts = constraint.split(".");
|
|
73
|
+
if (parts.length > 1) {
|
|
74
|
+
table = parts[0];
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
} else if (code === "ER_NO_REFERENCED_ROW_2" || code === "ER_ROW_IS_REFERENCED_2") {
|
|
78
|
+
kind = "foreign_key";
|
|
79
|
+
const constraintMatch = message.match(/CONSTRAINT `([^`]+)`/i);
|
|
80
|
+
constraint = constraintMatch ? constraintMatch[1] : void 0;
|
|
81
|
+
const tableMatch = message.match(/`([^`]+)`\.`([^`]+)`/);
|
|
82
|
+
if (tableMatch) {
|
|
83
|
+
table = tableMatch[2];
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (code === "ER_DUP_ENTRY" || code === "ER_NO_REFERENCED_ROW_2" || code === "ER_ROW_IS_REFERENCED_2") {
|
|
87
|
+
throw new ConstraintViolationError(
|
|
88
|
+
message,
|
|
89
|
+
{
|
|
90
|
+
kind,
|
|
91
|
+
constraint,
|
|
92
|
+
table,
|
|
93
|
+
column
|
|
94
|
+
},
|
|
95
|
+
{
|
|
96
|
+
cause: error
|
|
97
|
+
}
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
throw error;
|
|
102
|
+
}
|
|
103
|
+
async all(strings, values) {
|
|
104
|
+
try {
|
|
105
|
+
const { sql, params } = buildSQL(strings, values);
|
|
106
|
+
const [rows] = await this.#pool.execute(sql, params);
|
|
107
|
+
return rows;
|
|
108
|
+
} catch (error) {
|
|
109
|
+
return this.#handleError(error);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
async get(strings, values) {
|
|
113
|
+
try {
|
|
114
|
+
const { sql, params } = buildSQL(strings, values);
|
|
115
|
+
const [rows] = await this.#pool.execute(sql, params);
|
|
116
|
+
return rows[0] ?? null;
|
|
117
|
+
} catch (error) {
|
|
118
|
+
return this.#handleError(error);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
async run(strings, values) {
|
|
122
|
+
try {
|
|
123
|
+
const { sql, params } = buildSQL(strings, values);
|
|
124
|
+
const [result] = await this.#pool.execute(sql, params);
|
|
125
|
+
return result.affectedRows ?? 0;
|
|
126
|
+
} catch (error) {
|
|
127
|
+
return this.#handleError(error);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
async val(strings, values) {
|
|
131
|
+
try {
|
|
132
|
+
const { sql, params } = buildSQL(strings, values);
|
|
133
|
+
const [rows] = await this.#pool.execute(sql, params);
|
|
134
|
+
const row = rows[0];
|
|
135
|
+
if (!row)
|
|
136
|
+
return null;
|
|
137
|
+
const rowValues = Object.values(row);
|
|
138
|
+
return rowValues[0];
|
|
139
|
+
} catch (error) {
|
|
140
|
+
return this.#handleError(error);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
async close() {
|
|
144
|
+
await this.#pool.end();
|
|
145
|
+
}
|
|
146
|
+
async transaction(fn) {
|
|
147
|
+
const connection = await this.#pool.getConnection();
|
|
148
|
+
const handleError = this.#handleError.bind(this);
|
|
149
|
+
try {
|
|
150
|
+
await connection.query("START TRANSACTION");
|
|
151
|
+
const txDriver = {
|
|
152
|
+
supportsReturning: false,
|
|
153
|
+
all: async (strings, values) => {
|
|
154
|
+
try {
|
|
155
|
+
const { sql, params } = buildSQL(strings, values);
|
|
156
|
+
const [rows] = await connection.execute(sql, params);
|
|
157
|
+
return rows;
|
|
158
|
+
} catch (error) {
|
|
159
|
+
return handleError(error);
|
|
160
|
+
}
|
|
161
|
+
},
|
|
162
|
+
get: async (strings, values) => {
|
|
163
|
+
try {
|
|
164
|
+
const { sql, params } = buildSQL(strings, values);
|
|
165
|
+
const [rows] = await connection.execute(sql, params);
|
|
166
|
+
return rows[0] ?? null;
|
|
167
|
+
} catch (error) {
|
|
168
|
+
return handleError(error);
|
|
169
|
+
}
|
|
170
|
+
},
|
|
171
|
+
run: async (strings, values) => {
|
|
172
|
+
try {
|
|
173
|
+
const { sql, params } = buildSQL(strings, values);
|
|
174
|
+
const [result2] = await connection.execute(sql, params);
|
|
175
|
+
return result2.affectedRows ?? 0;
|
|
176
|
+
} catch (error) {
|
|
177
|
+
return handleError(error);
|
|
178
|
+
}
|
|
179
|
+
},
|
|
180
|
+
val: async (strings, values) => {
|
|
181
|
+
try {
|
|
182
|
+
const { sql, params } = buildSQL(strings, values);
|
|
183
|
+
const [rows] = await connection.execute(sql, params);
|
|
184
|
+
const row = rows[0];
|
|
185
|
+
if (!row)
|
|
186
|
+
return null;
|
|
187
|
+
const rowValues = Object.values(row);
|
|
188
|
+
return rowValues[0];
|
|
189
|
+
} catch (error) {
|
|
190
|
+
return handleError(error);
|
|
191
|
+
}
|
|
192
|
+
},
|
|
193
|
+
close: async () => {
|
|
194
|
+
},
|
|
195
|
+
transaction: async () => {
|
|
196
|
+
throw new Error("Nested transactions are not supported");
|
|
197
|
+
}
|
|
198
|
+
};
|
|
199
|
+
const result = await fn(txDriver);
|
|
200
|
+
await connection.query("COMMIT");
|
|
201
|
+
return result;
|
|
202
|
+
} catch (error) {
|
|
203
|
+
await connection.query("ROLLBACK");
|
|
204
|
+
throw error;
|
|
205
|
+
} finally {
|
|
206
|
+
connection.release();
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
async withMigrationLock(fn) {
|
|
210
|
+
const LOCK_NAME = "zen_migration";
|
|
211
|
+
const LOCK_TIMEOUT = 10;
|
|
212
|
+
const [lockResult] = await this.#pool.execute(`SELECT GET_LOCK(?, ?)`, [
|
|
213
|
+
LOCK_NAME,
|
|
214
|
+
LOCK_TIMEOUT
|
|
215
|
+
]);
|
|
216
|
+
const acquired = lockResult[0]?.["GET_LOCK(?, ?)"] === 1;
|
|
217
|
+
if (!acquired) {
|
|
218
|
+
throw new Error(
|
|
219
|
+
`Failed to acquire migration lock after ${LOCK_TIMEOUT}s. Another migration may be in progress.`
|
|
220
|
+
);
|
|
221
|
+
}
|
|
222
|
+
try {
|
|
223
|
+
return await fn();
|
|
224
|
+
} finally {
|
|
225
|
+
await this.#pool.execute(`SELECT RELEASE_LOCK(?)`, [LOCK_NAME]);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
// ========================================================================
|
|
229
|
+
// Schema Management Methods
|
|
230
|
+
// ========================================================================
|
|
231
|
+
/**
|
|
232
|
+
* Ensure table exists with the specified structure.
|
|
233
|
+
* Creates table if missing, adds missing columns/indexes.
|
|
234
|
+
* Throws SchemaDriftError if constraints are missing.
|
|
235
|
+
*/
|
|
236
|
+
async ensureTable(table) {
|
|
237
|
+
const tableName = table.name;
|
|
238
|
+
let step = 0;
|
|
239
|
+
let applied = false;
|
|
240
|
+
try {
|
|
241
|
+
const exists = await this.#tableExists(tableName);
|
|
242
|
+
if (!exists) {
|
|
243
|
+
step = 1;
|
|
244
|
+
const ddlTemplate = generateDDL(table, { dialect: DIALECT });
|
|
245
|
+
const ddlSQL = renderDDL(ddlTemplate[0], ddlTemplate.slice(1), DIALECT);
|
|
246
|
+
for (const stmt of ddlSQL.split(";").filter((s) => s.trim())) {
|
|
247
|
+
await this.#pool.execute(stmt.trim(), []);
|
|
248
|
+
}
|
|
249
|
+
applied = true;
|
|
250
|
+
} else {
|
|
251
|
+
step = 2;
|
|
252
|
+
const columnsApplied = await this.#ensureMissingColumns(table);
|
|
253
|
+
applied = applied || columnsApplied;
|
|
254
|
+
step = 3;
|
|
255
|
+
const indexesApplied = await this.#ensureMissingIndexes(table);
|
|
256
|
+
applied = applied || indexesApplied;
|
|
257
|
+
step = 4;
|
|
258
|
+
await this.#checkMissingConstraints(table);
|
|
259
|
+
}
|
|
260
|
+
return { applied };
|
|
261
|
+
} catch (error) {
|
|
262
|
+
if (error instanceof SchemaDriftError || error instanceof ConstraintPreflightError) {
|
|
263
|
+
throw error;
|
|
264
|
+
}
|
|
265
|
+
throw new EnsureError(
|
|
266
|
+
`Failed to ensure table "${tableName}" exists (step ${step})`,
|
|
267
|
+
{
|
|
268
|
+
operation: "ensureTable",
|
|
269
|
+
table: tableName,
|
|
270
|
+
step
|
|
271
|
+
},
|
|
272
|
+
{
|
|
273
|
+
cause: error
|
|
274
|
+
}
|
|
275
|
+
);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Ensure constraints exist on the table.
|
|
280
|
+
* Applies unique and foreign key constraints with preflight checks.
|
|
281
|
+
*/
|
|
282
|
+
async ensureConstraints(table) {
|
|
283
|
+
const tableName = table.name;
|
|
284
|
+
let step = 0;
|
|
285
|
+
let applied = false;
|
|
286
|
+
try {
|
|
287
|
+
step = 1;
|
|
288
|
+
const existingConstraints = await this.#getConstraints(tableName);
|
|
289
|
+
step = 2;
|
|
290
|
+
const uniqueApplied = await this.#ensureUniqueConstraints(
|
|
291
|
+
table,
|
|
292
|
+
existingConstraints
|
|
293
|
+
);
|
|
294
|
+
applied = applied || uniqueApplied;
|
|
295
|
+
step = 3;
|
|
296
|
+
const fkApplied = await this.#ensureForeignKeys(
|
|
297
|
+
table,
|
|
298
|
+
existingConstraints
|
|
299
|
+
);
|
|
300
|
+
applied = applied || fkApplied;
|
|
301
|
+
return { applied };
|
|
302
|
+
} catch (error) {
|
|
303
|
+
if (error instanceof SchemaDriftError || error instanceof ConstraintPreflightError) {
|
|
304
|
+
throw error;
|
|
305
|
+
}
|
|
306
|
+
throw new EnsureError(
|
|
307
|
+
`Failed to ensure constraints on table "${tableName}" (step ${step})`,
|
|
308
|
+
{
|
|
309
|
+
operation: "ensureConstraints",
|
|
310
|
+
table: tableName,
|
|
311
|
+
step
|
|
312
|
+
},
|
|
313
|
+
{
|
|
314
|
+
cause: error
|
|
315
|
+
}
|
|
316
|
+
);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
// ========================================================================
|
|
320
|
+
// Private Helper Methods
|
|
321
|
+
// ========================================================================
|
|
322
|
+
async #tableExists(tableName) {
|
|
323
|
+
const [rows] = await this.#pool.execute(
|
|
324
|
+
`SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = ?`,
|
|
325
|
+
[tableName]
|
|
326
|
+
);
|
|
327
|
+
return (rows[0]?.count ?? 0) > 0;
|
|
328
|
+
}
|
|
329
|
+
async #getColumns(tableName) {
|
|
330
|
+
const [rows] = await this.#pool.execute(
|
|
331
|
+
`SELECT COLUMN_NAME, DATA_TYPE, IS_NULLABLE FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = ? ORDER BY ordinal_position`,
|
|
332
|
+
[tableName]
|
|
333
|
+
);
|
|
334
|
+
return rows.map((row) => ({
|
|
335
|
+
name: row.COLUMN_NAME,
|
|
336
|
+
type: row.DATA_TYPE,
|
|
337
|
+
notnull: row.IS_NULLABLE === "NO"
|
|
338
|
+
}));
|
|
339
|
+
}
|
|
340
|
+
async #getIndexes(tableName) {
|
|
341
|
+
const [rows] = await this.#pool.execute(
|
|
342
|
+
`SELECT
|
|
343
|
+
INDEX_NAME,
|
|
344
|
+
GROUP_CONCAT(COLUMN_NAME ORDER BY SEQ_IN_INDEX) as COLUMNS,
|
|
345
|
+
MAX(NON_UNIQUE = 0) as IS_UNIQUE
|
|
346
|
+
FROM information_schema.statistics
|
|
347
|
+
WHERE table_schema = DATABASE() AND table_name = ? AND INDEX_NAME != 'PRIMARY'
|
|
348
|
+
GROUP BY INDEX_NAME`,
|
|
349
|
+
[tableName]
|
|
350
|
+
);
|
|
351
|
+
return rows.map((row) => ({
|
|
352
|
+
name: row.INDEX_NAME,
|
|
353
|
+
columns: row.COLUMNS.split(","),
|
|
354
|
+
unique: row.IS_UNIQUE === 1
|
|
355
|
+
}));
|
|
356
|
+
}
|
|
357
|
+
async #getConstraints(tableName) {
|
|
358
|
+
const constraints = [];
|
|
359
|
+
const indexes = await this.#getIndexes(tableName);
|
|
360
|
+
for (const idx of indexes) {
|
|
361
|
+
if (idx.unique) {
|
|
362
|
+
constraints.push({
|
|
363
|
+
name: idx.name,
|
|
364
|
+
type: "unique",
|
|
365
|
+
columns: idx.columns
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
const [fkRows] = await this.#pool.execute(
|
|
370
|
+
`SELECT
|
|
371
|
+
CONSTRAINT_NAME,
|
|
372
|
+
GROUP_CONCAT(COLUMN_NAME ORDER BY ORDINAL_POSITION) as COLUMNS,
|
|
373
|
+
REFERENCED_TABLE_NAME,
|
|
374
|
+
GROUP_CONCAT(REFERENCED_COLUMN_NAME ORDER BY ORDINAL_POSITION) as REF_COLUMNS
|
|
375
|
+
FROM information_schema.key_column_usage
|
|
376
|
+
WHERE table_schema = DATABASE()
|
|
377
|
+
AND table_name = ?
|
|
378
|
+
AND REFERENCED_TABLE_NAME IS NOT NULL
|
|
379
|
+
GROUP BY CONSTRAINT_NAME, REFERENCED_TABLE_NAME`,
|
|
380
|
+
[tableName]
|
|
381
|
+
);
|
|
382
|
+
for (const row of fkRows) {
|
|
383
|
+
constraints.push({
|
|
384
|
+
name: row.CONSTRAINT_NAME,
|
|
385
|
+
type: "foreign_key",
|
|
386
|
+
columns: row.COLUMNS.split(","),
|
|
387
|
+
referencedTable: row.REFERENCED_TABLE_NAME,
|
|
388
|
+
referencedColumns: row.REF_COLUMNS.split(",")
|
|
389
|
+
});
|
|
390
|
+
}
|
|
391
|
+
return constraints;
|
|
392
|
+
}
|
|
393
|
+
async #ensureMissingColumns(table) {
|
|
394
|
+
const existingCols = await this.#getColumns(table.name);
|
|
395
|
+
const existingColNames = new Set(existingCols.map((c) => c.name));
|
|
396
|
+
const schemaFields = Object.keys(table.schema.shape);
|
|
397
|
+
let applied = false;
|
|
398
|
+
for (const fieldName of schemaFields) {
|
|
399
|
+
if (!existingColNames.has(fieldName)) {
|
|
400
|
+
await this.#addColumn(table, fieldName);
|
|
401
|
+
applied = true;
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
return applied;
|
|
405
|
+
}
|
|
406
|
+
async #addColumn(table, fieldName) {
|
|
407
|
+
const zodType = table.schema.shape[fieldName];
|
|
408
|
+
const fieldMeta = getTableMeta(table).fields[fieldName] || {};
|
|
409
|
+
const colTemplate = generateColumnDDL(
|
|
410
|
+
fieldName,
|
|
411
|
+
zodType,
|
|
412
|
+
fieldMeta,
|
|
413
|
+
DIALECT
|
|
414
|
+
);
|
|
415
|
+
const colSQL = renderDDL(colTemplate[0], colTemplate.slice(1), DIALECT);
|
|
416
|
+
await this.#pool.execute(
|
|
417
|
+
`ALTER TABLE ${quoteIdent2(table.name)} ADD COLUMN ${colSQL}`,
|
|
418
|
+
[]
|
|
419
|
+
);
|
|
420
|
+
}
|
|
421
|
+
async #ensureMissingIndexes(table) {
|
|
422
|
+
const existingIndexes = await this.#getIndexes(table.name);
|
|
423
|
+
const existingIndexNames = new Set(existingIndexes.map((idx) => idx.name));
|
|
424
|
+
const meta = getTableMeta(table);
|
|
425
|
+
let applied = false;
|
|
426
|
+
for (const fieldName of meta.indexed) {
|
|
427
|
+
const indexName = `idx_${table.name}_${fieldName}`;
|
|
428
|
+
if (!existingIndexNames.has(indexName)) {
|
|
429
|
+
await this.#createIndex(table.name, [fieldName], false);
|
|
430
|
+
applied = true;
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
for (const indexCols of table.indexes) {
|
|
434
|
+
const indexName = `idx_${table.name}_${indexCols.join("_")}`;
|
|
435
|
+
if (!existingIndexNames.has(indexName)) {
|
|
436
|
+
await this.#createIndex(table.name, indexCols, false);
|
|
437
|
+
applied = true;
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
return applied;
|
|
441
|
+
}
|
|
442
|
+
async #createIndex(tableName, columns, unique) {
|
|
443
|
+
const prefix = unique ? "uniq" : "idx";
|
|
444
|
+
const indexName = `${prefix}_${tableName}_${columns.join("_")}`;
|
|
445
|
+
const uniqueClause = unique ? "UNIQUE " : "";
|
|
446
|
+
const columnList = columns.map(quoteIdent2).join(", ");
|
|
447
|
+
const sql = `CREATE ${uniqueClause}INDEX ${quoteIdent2(indexName)} ON ${quoteIdent2(tableName)} (${columnList})`;
|
|
448
|
+
await this.#pool.execute(sql, []);
|
|
449
|
+
return indexName;
|
|
450
|
+
}
|
|
451
|
+
async #checkMissingConstraints(table) {
|
|
452
|
+
const existingConstraints = await this.#getConstraints(table.name);
|
|
453
|
+
const meta = getTableMeta(table);
|
|
454
|
+
for (const fieldName of Object.keys(meta.fields)) {
|
|
455
|
+
const fieldMeta = meta.fields[fieldName];
|
|
456
|
+
if (fieldMeta.unique) {
|
|
457
|
+
const hasConstraint = existingConstraints.some(
|
|
458
|
+
(c) => c.type === "unique" && c.columns.length === 1 && c.columns[0] === fieldName
|
|
459
|
+
);
|
|
460
|
+
if (!hasConstraint) {
|
|
461
|
+
throw new SchemaDriftError(
|
|
462
|
+
`Table "${table.name}" is missing UNIQUE constraint on column "${fieldName}"`,
|
|
463
|
+
{
|
|
464
|
+
table: table.name,
|
|
465
|
+
drift: `missing unique:${fieldName}`,
|
|
466
|
+
suggestion: `Run ensureConstraints() to apply constraints`
|
|
467
|
+
}
|
|
468
|
+
);
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
for (const ref of meta.references) {
|
|
473
|
+
const hasFK = existingConstraints.some(
|
|
474
|
+
(c) => c.type === "foreign_key" && c.columns.length === 1 && c.columns[0] === ref.fieldName && c.referencedTable === ref.table.name && c.referencedColumns?.[0] === ref.referencedField
|
|
475
|
+
);
|
|
476
|
+
if (!hasFK) {
|
|
477
|
+
throw new SchemaDriftError(
|
|
478
|
+
`Table "${table.name}" is missing FOREIGN KEY constraint on column "${ref.fieldName}"`,
|
|
479
|
+
{
|
|
480
|
+
table: table.name,
|
|
481
|
+
drift: `missing foreign_key:${ref.fieldName}->${ref.table.name}.${ref.referencedField}`,
|
|
482
|
+
suggestion: `Run ensureConstraints() to apply constraints`
|
|
483
|
+
}
|
|
484
|
+
);
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
async #ensureUniqueConstraints(table, existingConstraints) {
|
|
489
|
+
const meta = getTableMeta(table);
|
|
490
|
+
let applied = false;
|
|
491
|
+
for (const fieldName of Object.keys(meta.fields)) {
|
|
492
|
+
const fieldMeta = meta.fields[fieldName];
|
|
493
|
+
if (fieldMeta.unique) {
|
|
494
|
+
const hasConstraint = existingConstraints.some(
|
|
495
|
+
(c) => c.type === "unique" && c.columns.length === 1 && c.columns[0] === fieldName
|
|
496
|
+
);
|
|
497
|
+
if (!hasConstraint) {
|
|
498
|
+
await this.#preflightUnique(table.name, [fieldName]);
|
|
499
|
+
await this.#createIndex(table.name, [fieldName], true);
|
|
500
|
+
applied = true;
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
return applied;
|
|
505
|
+
}
|
|
506
|
+
async #ensureForeignKeys(table, existingConstraints) {
|
|
507
|
+
const meta = getTableMeta(table);
|
|
508
|
+
let applied = false;
|
|
509
|
+
for (const ref of meta.references) {
|
|
510
|
+
const hasFK = existingConstraints.some(
|
|
511
|
+
(c) => c.type === "foreign_key" && c.columns.length === 1 && c.columns[0] === ref.fieldName && c.referencedTable === ref.table.name && c.referencedColumns?.[0] === ref.referencedField
|
|
512
|
+
);
|
|
513
|
+
if (!hasFK) {
|
|
514
|
+
await this.#preflightForeignKey(
|
|
515
|
+
table.name,
|
|
516
|
+
ref.fieldName,
|
|
517
|
+
ref.table.name,
|
|
518
|
+
ref.referencedField
|
|
519
|
+
);
|
|
520
|
+
const constraintName = `fk_${table.name}_${ref.fieldName}`;
|
|
521
|
+
const onDelete = ref.onDelete ? ` ON DELETE ${ref.onDelete.toUpperCase()}` : "";
|
|
522
|
+
await this.#pool.execute(
|
|
523
|
+
`ALTER TABLE ${quoteIdent2(table.name)} ADD CONSTRAINT ${quoteIdent2(constraintName)} FOREIGN KEY (${quoteIdent2(ref.fieldName)}) REFERENCES ${quoteIdent2(ref.table.name)} (${quoteIdent2(ref.referencedField)})${onDelete}`,
|
|
524
|
+
[]
|
|
525
|
+
);
|
|
526
|
+
applied = true;
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
return applied;
|
|
530
|
+
}
|
|
531
|
+
async #preflightUnique(tableName, columns) {
|
|
532
|
+
const columnList = columns.map(quoteIdent2).join(", ");
|
|
533
|
+
const [rows] = await this.#pool.execute(
|
|
534
|
+
`SELECT COUNT(*) as count FROM ${quoteIdent2(tableName)} GROUP BY ${columnList} HAVING COUNT(*) > 1`,
|
|
535
|
+
[]
|
|
536
|
+
);
|
|
537
|
+
const violationCount = rows.length;
|
|
538
|
+
if (violationCount > 0) {
|
|
539
|
+
const diagQuery = `SELECT ${columnList}, COUNT(*) FROM ${quoteIdent2(tableName)} GROUP BY ${columnList} HAVING COUNT(*) > 1`;
|
|
540
|
+
throw new ConstraintPreflightError(
|
|
541
|
+
`Cannot add UNIQUE constraint on "${tableName}"(${columns.join(", ")}): duplicate values exist`,
|
|
542
|
+
{
|
|
543
|
+
table: tableName,
|
|
544
|
+
constraint: `unique:${columns.join(",")}`,
|
|
545
|
+
violationCount,
|
|
546
|
+
query: diagQuery
|
|
547
|
+
}
|
|
548
|
+
);
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
async #preflightForeignKey(tableName, column, refTable, refColumn) {
|
|
552
|
+
const [rows] = await this.#pool.execute(
|
|
553
|
+
`SELECT COUNT(*) as count FROM ${quoteIdent2(tableName)} t WHERE t.${quoteIdent2(column)} IS NOT NULL AND NOT EXISTS (SELECT 1 FROM ${quoteIdent2(refTable)} r WHERE r.${quoteIdent2(refColumn)} = t.${quoteIdent2(column)})`,
|
|
554
|
+
[]
|
|
555
|
+
);
|
|
556
|
+
const violationCount = parseInt(String(rows[0]?.count ?? "0"), 10);
|
|
557
|
+
if (violationCount > 0) {
|
|
558
|
+
const diagQuery = `SELECT t.${quoteIdent2(column)} FROM ${quoteIdent2(tableName)} t WHERE t.${quoteIdent2(column)} IS NOT NULL AND NOT EXISTS (SELECT 1 FROM ${quoteIdent2(refTable)} r WHERE r.${quoteIdent2(refColumn)} = t.${quoteIdent2(column)})`;
|
|
559
|
+
throw new ConstraintPreflightError(
|
|
560
|
+
`Cannot add FOREIGN KEY constraint on "${tableName}"(${column}): ${violationCount} orphaned rows exist`,
|
|
561
|
+
{
|
|
562
|
+
table: tableName,
|
|
563
|
+
constraint: `foreign_key:${column}->${refTable}.${refColumn}`,
|
|
564
|
+
violationCount,
|
|
565
|
+
query: diagQuery
|
|
566
|
+
}
|
|
567
|
+
);
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
};
|
|
571
|
+
export {
|
|
572
|
+
MySQLDriver as default
|
|
573
|
+
};
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* postgres.js adapter for @b9g/zen
|
|
3
|
+
*
|
|
4
|
+
* Provides a Driver implementation for postgres.js.
|
|
5
|
+
* Uses connection pooling - call close() when done to end all connections.
|
|
6
|
+
*
|
|
7
|
+
* Requires: postgres
|
|
8
|
+
*/
|
|
9
|
+
import type { Driver, Table, EnsureResult } from "./zen.js";
|
|
10
|
+
/**
|
|
11
|
+
* Options for the postgres adapter.
|
|
12
|
+
*/
|
|
13
|
+
export interface PostgresOptions {
|
|
14
|
+
/** Maximum number of connections in the pool (default: 10) */
|
|
15
|
+
max?: number;
|
|
16
|
+
/** Idle timeout in seconds before closing connections (default: 30) */
|
|
17
|
+
idleTimeout?: number;
|
|
18
|
+
/** Connection timeout in seconds (default: 30) */
|
|
19
|
+
connectTimeout?: number;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* PostgreSQL driver using postgres.js.
|
|
23
|
+
*
|
|
24
|
+
* @example
|
|
25
|
+
* import PostgresDriver from "@b9g/zen/postgres";
|
|
26
|
+
* import {Database} from "@b9g/zen";
|
|
27
|
+
*
|
|
28
|
+
* const driver = new PostgresDriver("postgresql://localhost/mydb");
|
|
29
|
+
* const db = new Database(driver);
|
|
30
|
+
*
|
|
31
|
+
* db.addEventListener("upgradeneeded", (e) => {
|
|
32
|
+
* e.waitUntil(runMigrations(e));
|
|
33
|
+
* });
|
|
34
|
+
*
|
|
35
|
+
* await db.open(1);
|
|
36
|
+
*
|
|
37
|
+
* // When done:
|
|
38
|
+
* await driver.close();
|
|
39
|
+
*/
|
|
40
|
+
export default class PostgresDriver implements Driver {
|
|
41
|
+
#private;
|
|
42
|
+
readonly supportsReturning = true;
|
|
43
|
+
constructor(url: string, options?: PostgresOptions);
|
|
44
|
+
all<T>(strings: TemplateStringsArray, values: unknown[]): Promise<T[]>;
|
|
45
|
+
get<T>(strings: TemplateStringsArray, values: unknown[]): Promise<T | null>;
|
|
46
|
+
run(strings: TemplateStringsArray, values: unknown[]): Promise<number>;
|
|
47
|
+
val<T>(strings: TemplateStringsArray, values: unknown[]): Promise<T | null>;
|
|
48
|
+
close(): Promise<void>;
|
|
49
|
+
transaction<T>(fn: (txDriver: Driver) => Promise<T>): Promise<T>;
|
|
50
|
+
withMigrationLock<T>(fn: () => Promise<T>): Promise<T>;
|
|
51
|
+
/**
|
|
52
|
+
* Ensure table exists with the specified structure.
|
|
53
|
+
* Creates table if missing, adds missing columns/indexes.
|
|
54
|
+
* Throws SchemaDriftError if constraints are missing.
|
|
55
|
+
*/
|
|
56
|
+
ensureTable<T extends Table<any>>(table: T): Promise<EnsureResult>;
|
|
57
|
+
/**
|
|
58
|
+
* Ensure constraints exist on the table.
|
|
59
|
+
* Applies unique and foreign key constraints with preflight checks.
|
|
60
|
+
*/
|
|
61
|
+
ensureConstraints<T extends Table<any>>(table: T): Promise<EnsureResult>;
|
|
62
|
+
}
|