nexusql 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +147 -0
- package/bin/nexusql.js +2 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +999 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +256 -0
- package/dist/index.js +909 -0
- package/dist/index.js.map +1 -0
- package/package.json +66 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,909 @@
|
|
|
1
|
+
// src/lib/database.ts
|
|
2
|
+
import pg from "pg";
|
|
3
|
+
|
|
4
|
+
// src/lib/db-url.ts
|
|
5
|
+
function parseAndEncodeDbUrl(rawUrl) {
|
|
6
|
+
const match = rawUrl.match(
|
|
7
|
+
/^(postgres(?:ql)?):\/\/([^:]+):(.+)@([^:]+):(\d+)\/([^?]+)(\?.*)?$/
|
|
8
|
+
);
|
|
9
|
+
if (!match) {
|
|
10
|
+
throw new Error(
|
|
11
|
+
"Invalid DATABASE_URL format. Expected: postgres://user:password@host:port/database"
|
|
12
|
+
);
|
|
13
|
+
}
|
|
14
|
+
const [, protocol, user, password, host, port, database, params = ""] = match;
|
|
15
|
+
const encodedPassword = encodeURIComponent(password);
|
|
16
|
+
return {
|
|
17
|
+
url: `${protocol}://${user}:${encodedPassword}@${host}:${port}/${database}${params}`,
|
|
18
|
+
protocol,
|
|
19
|
+
user,
|
|
20
|
+
password: encodedPassword,
|
|
21
|
+
host,
|
|
22
|
+
port,
|
|
23
|
+
database,
|
|
24
|
+
params
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
function buildDbUrl(parts, database) {
|
|
28
|
+
return `${parts.protocol}://${parts.user}:${parts.password}@${parts.host}:${parts.port}/${database}${parts.params}`;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// src/lib/database.ts
|
|
32
|
+
var { Client } = pg;
|
|
33
|
+
var Database = class _Database {
|
|
34
|
+
connectionString;
|
|
35
|
+
dbParts;
|
|
36
|
+
constructor(connectionString) {
|
|
37
|
+
this.dbParts = parseAndEncodeDbUrl(connectionString);
|
|
38
|
+
this.connectionString = this.dbParts.url;
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Execute a query and return results.
|
|
42
|
+
*/
|
|
43
|
+
async query(sql) {
|
|
44
|
+
const client = new Client({ connectionString: this.connectionString });
|
|
45
|
+
try {
|
|
46
|
+
await client.connect();
|
|
47
|
+
const result = await client.query(sql);
|
|
48
|
+
return {
|
|
49
|
+
rows: result.rows,
|
|
50
|
+
rowCount: result.rowCount ?? 0
|
|
51
|
+
};
|
|
52
|
+
} finally {
|
|
53
|
+
await client.end();
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Execute a parameterized query and return results.
|
|
58
|
+
*/
|
|
59
|
+
async queryParams(sql, params) {
|
|
60
|
+
const client = new Client({ connectionString: this.connectionString });
|
|
61
|
+
try {
|
|
62
|
+
await client.connect();
|
|
63
|
+
const result = await client.query(sql, params);
|
|
64
|
+
return {
|
|
65
|
+
rows: result.rows,
|
|
66
|
+
rowCount: result.rowCount ?? 0
|
|
67
|
+
};
|
|
68
|
+
} finally {
|
|
69
|
+
await client.end();
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Execute a parameterized query without returning results.
|
|
74
|
+
*/
|
|
75
|
+
async execParams(sql, params) {
|
|
76
|
+
const client = new Client({ connectionString: this.connectionString });
|
|
77
|
+
try {
|
|
78
|
+
await client.connect();
|
|
79
|
+
await client.query(sql, params);
|
|
80
|
+
} finally {
|
|
81
|
+
await client.end();
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Execute a query without returning results (for DDL/DML).
|
|
86
|
+
*/
|
|
87
|
+
async exec(sql) {
|
|
88
|
+
const client = new Client({ connectionString: this.connectionString });
|
|
89
|
+
try {
|
|
90
|
+
await client.connect();
|
|
91
|
+
await client.query(sql);
|
|
92
|
+
} finally {
|
|
93
|
+
await client.end();
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Execute operations within a transaction.
|
|
98
|
+
* Automatically commits on success, rolls back on error.
|
|
99
|
+
*/
|
|
100
|
+
async withTransaction(fn) {
|
|
101
|
+
const client = new Client({ connectionString: this.connectionString });
|
|
102
|
+
try {
|
|
103
|
+
await client.connect();
|
|
104
|
+
await client.query("BEGIN");
|
|
105
|
+
const result = await fn(client);
|
|
106
|
+
await client.query("COMMIT");
|
|
107
|
+
return result;
|
|
108
|
+
} catch (error) {
|
|
109
|
+
await client.query("ROLLBACK");
|
|
110
|
+
throw error;
|
|
111
|
+
} finally {
|
|
112
|
+
await client.end();
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Create a new database.
|
|
117
|
+
*/
|
|
118
|
+
async createDatabase(name) {
|
|
119
|
+
await this.exec(`CREATE DATABASE "${name}";`);
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Drop a database if it exists.
|
|
123
|
+
*/
|
|
124
|
+
async dropDatabase(name) {
|
|
125
|
+
await this.exec(`DROP DATABASE IF EXISTS "${name}";`);
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Create a Database instance connected to a different database.
|
|
129
|
+
*/
|
|
130
|
+
withDatabase(name) {
|
|
131
|
+
const newUrl = buildDbUrl(this.dbParts, name);
|
|
132
|
+
return new _Database(newUrl);
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Get connection URL for external tools (like migra).
|
|
136
|
+
*/
|
|
137
|
+
getConnectionUrl() {
|
|
138
|
+
return this.connectionString;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Get parsed database URL parts.
|
|
142
|
+
*/
|
|
143
|
+
getParts() {
|
|
144
|
+
return this.dbParts;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Install PostgreSQL extensions.
|
|
148
|
+
*/
|
|
149
|
+
async installExtensions(extensions) {
|
|
150
|
+
for (const ext of extensions) {
|
|
151
|
+
try {
|
|
152
|
+
await this.exec(`CREATE EXTENSION IF NOT EXISTS "${ext}";`);
|
|
153
|
+
} catch {
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Get column information for a table.
|
|
159
|
+
*/
|
|
160
|
+
async getTableColumns(tableName) {
|
|
161
|
+
const result = await this.query(`
|
|
162
|
+
SELECT column_name, ordinal_position
|
|
163
|
+
FROM information_schema.columns
|
|
164
|
+
WHERE table_name = '${tableName}'
|
|
165
|
+
ORDER BY ordinal_position;
|
|
166
|
+
`);
|
|
167
|
+
return result.rows;
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* List all tables in the public schema.
|
|
171
|
+
*/
|
|
172
|
+
async listTables() {
|
|
173
|
+
const result = await this.query(`
|
|
174
|
+
SELECT tablename FROM pg_tables WHERE schemaname = 'public';
|
|
175
|
+
`);
|
|
176
|
+
return result.rows.map((row) => row.tablename);
|
|
177
|
+
}
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
// src/lib/migrations.ts
|
|
181
|
+
import { readdirSync, statSync, readFileSync, writeFileSync } from "fs";
|
|
182
|
+
import { join } from "path";
|
|
183
|
+
|
|
184
|
+
// src/lib/reverse.ts
|
|
185
|
+
function generateReverseSql(upSql) {
|
|
186
|
+
if (!upSql) return "";
|
|
187
|
+
const downStatements = [];
|
|
188
|
+
const lines = upSql.split(";").map((l) => l.trim()).filter((l) => l);
|
|
189
|
+
for (const line of lines) {
|
|
190
|
+
const createTableMatch = line.match(
|
|
191
|
+
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?([^\s(]+)/i
|
|
192
|
+
);
|
|
193
|
+
if (createTableMatch) {
|
|
194
|
+
const tableName = createTableMatch[1];
|
|
195
|
+
downStatements.push(`DROP TABLE ${tableName};`);
|
|
196
|
+
continue;
|
|
197
|
+
}
|
|
198
|
+
const dropTableMatch = line.match(
|
|
199
|
+
/DROP\s+TABLE\s+(?:IF\s+EXISTS\s+)?([^\s;]+)/i
|
|
200
|
+
);
|
|
201
|
+
if (dropTableMatch) {
|
|
202
|
+
const tableName = dropTableMatch[1];
|
|
203
|
+
downStatements.push(`-- Manual: Recreate table ${tableName}`);
|
|
204
|
+
continue;
|
|
205
|
+
}
|
|
206
|
+
const addColumnMatch = line.match(
|
|
207
|
+
/ALTER\s+TABLE\s+([^\s]+)\s+ADD\s+(?:COLUMN\s+)?([^\s;]+)/i
|
|
208
|
+
);
|
|
209
|
+
if (addColumnMatch) {
|
|
210
|
+
const [, tableName, colName] = addColumnMatch;
|
|
211
|
+
downStatements.push(`ALTER TABLE ${tableName} DROP COLUMN ${colName};`);
|
|
212
|
+
continue;
|
|
213
|
+
}
|
|
214
|
+
const dropColumnMatch = line.match(
|
|
215
|
+
/ALTER\s+TABLE\s+([^\s]+)\s+DROP\s+(?:COLUMN\s+)?([^\s;]+)/i
|
|
216
|
+
);
|
|
217
|
+
if (dropColumnMatch) {
|
|
218
|
+
const [, tableName, colName] = dropColumnMatch;
|
|
219
|
+
downStatements.push(
|
|
220
|
+
`-- Manual: Add column ${colName} back to table ${tableName}`
|
|
221
|
+
);
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
const createIndexMatch = line.match(
|
|
225
|
+
/CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?:IF\s+NOT\s+EXISTS\s+)?([^\s]+)\s+ON/i
|
|
226
|
+
);
|
|
227
|
+
if (createIndexMatch) {
|
|
228
|
+
const indexName = createIndexMatch[1];
|
|
229
|
+
downStatements.push(`DROP INDEX ${indexName};`);
|
|
230
|
+
continue;
|
|
231
|
+
}
|
|
232
|
+
const dropIndexMatch = line.match(
|
|
233
|
+
/DROP\s+INDEX\s+(?:IF\s+EXISTS\s+)?([^\s;]+)/i
|
|
234
|
+
);
|
|
235
|
+
if (dropIndexMatch) {
|
|
236
|
+
const indexName = dropIndexMatch[1];
|
|
237
|
+
downStatements.push(`-- Manual: Recreate index ${indexName}`);
|
|
238
|
+
continue;
|
|
239
|
+
}
|
|
240
|
+
downStatements.push(`-- Manual: Revert ${line.substring(0, 50)}...`);
|
|
241
|
+
}
|
|
242
|
+
return downStatements.reverse().join("\n");
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// src/lib/migrations.ts
|
|
246
|
+
function listMigrations(migrationsDir) {
|
|
247
|
+
try {
|
|
248
|
+
const files = readdirSync(migrationsDir);
|
|
249
|
+
return files.filter((f) => f.endsWith(".sql")).map((f) => {
|
|
250
|
+
const path = join(migrationsDir, f);
|
|
251
|
+
const match = f.match(/^(\d+)/);
|
|
252
|
+
return {
|
|
253
|
+
name: f,
|
|
254
|
+
path,
|
|
255
|
+
version: match ? match[1] : f,
|
|
256
|
+
mtime: statSync(path).mtime.getTime()
|
|
257
|
+
};
|
|
258
|
+
}).sort((a, b) => a.version.localeCompare(b.version));
|
|
259
|
+
} catch {
|
|
260
|
+
return [];
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
function getMostRecentMigration(migrationsDir) {
|
|
264
|
+
const migrations = listMigrations(migrationsDir);
|
|
265
|
+
if (migrations.length === 0) return null;
|
|
266
|
+
return migrations.reduce(
|
|
267
|
+
(most, current) => current.mtime > most.mtime ? current : most
|
|
268
|
+
);
|
|
269
|
+
}
|
|
270
|
+
function generateMigrationFilename(name) {
|
|
271
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[-:T]/g, "").slice(0, 14);
|
|
272
|
+
const sanitized = name.replace(/\s+/g, "_").toLowerCase();
|
|
273
|
+
return `${timestamp}_${sanitized}.sql`;
|
|
274
|
+
}
|
|
275
|
+
function createMigrationFile(migrationsDir, name, upSql = "", downSql = "") {
|
|
276
|
+
const filename = generateMigrationFilename(name);
|
|
277
|
+
const filepath = join(migrationsDir, filename);
|
|
278
|
+
if (!downSql && upSql) {
|
|
279
|
+
downSql = generateReverseSql(upSql);
|
|
280
|
+
}
|
|
281
|
+
const content = `-- migrate:up
|
|
282
|
+
${upSql}
|
|
283
|
+
|
|
284
|
+
-- migrate:down
|
|
285
|
+
${downSql}
|
|
286
|
+
`;
|
|
287
|
+
writeFileSync(filepath, content, "utf-8");
|
|
288
|
+
return filepath;
|
|
289
|
+
}
|
|
290
|
+
function updateMigrationFile(filepath, upSql) {
|
|
291
|
+
const content = readFileSync(filepath, "utf-8");
|
|
292
|
+
const updated = content.replace(
|
|
293
|
+
/-- migrate:up\n/,
|
|
294
|
+
`-- migrate:up
|
|
295
|
+
${upSql}
|
|
296
|
+
|
|
297
|
+
`
|
|
298
|
+
);
|
|
299
|
+
writeFileSync(filepath, updated, "utf-8");
|
|
300
|
+
}
|
|
301
|
+
function parseMigrationFile(filepath) {
|
|
302
|
+
const content = readFileSync(filepath, "utf-8");
|
|
303
|
+
const upMatch = content.match(
|
|
304
|
+
/-- migrate:up\n([\s\S]*?)(?=-- migrate:down|$)/
|
|
305
|
+
);
|
|
306
|
+
const downMatch = content.match(/-- migrate:down\n([\s\S]*?)$/);
|
|
307
|
+
return {
|
|
308
|
+
up: upMatch ? upMatch[1].trim() : "",
|
|
309
|
+
down: downMatch ? downMatch[1].trim() : ""
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// src/lib/runner.ts
|
|
314
|
+
var SCHEMA_MIGRATIONS_TABLE = "schema_migrations";
|
|
315
|
+
var MigrationRunner = class {
|
|
316
|
+
db;
|
|
317
|
+
migrationsDir;
|
|
318
|
+
constructor(db, migrationsDir) {
|
|
319
|
+
this.db = db;
|
|
320
|
+
this.migrationsDir = migrationsDir;
|
|
321
|
+
}
|
|
322
|
+
/**
|
|
323
|
+
* Ensure schema_migrations table exists.
|
|
324
|
+
*/
|
|
325
|
+
async ensureTable() {
|
|
326
|
+
await this.db.exec(`
|
|
327
|
+
CREATE TABLE IF NOT EXISTS ${SCHEMA_MIGRATIONS_TABLE} (
|
|
328
|
+
version VARCHAR(255) PRIMARY KEY
|
|
329
|
+
);
|
|
330
|
+
`);
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* Get list of applied migration versions.
|
|
334
|
+
*/
|
|
335
|
+
async getAppliedVersions() {
|
|
336
|
+
await this.ensureTable();
|
|
337
|
+
const result = await this.db.query(
|
|
338
|
+
`SELECT version FROM ${SCHEMA_MIGRATIONS_TABLE};`
|
|
339
|
+
);
|
|
340
|
+
return new Set(result.rows.map((row) => row.version));
|
|
341
|
+
}
|
|
342
|
+
/**
|
|
343
|
+
* Mark a migration as applied.
|
|
344
|
+
*/
|
|
345
|
+
async markApplied(version, client) {
|
|
346
|
+
await this.ensureTable();
|
|
347
|
+
const sql = `
|
|
348
|
+
INSERT INTO ${SCHEMA_MIGRATIONS_TABLE} (version)
|
|
349
|
+
VALUES ($1)
|
|
350
|
+
ON CONFLICT DO NOTHING;
|
|
351
|
+
`;
|
|
352
|
+
if (client) {
|
|
353
|
+
await client.query(sql, [version]);
|
|
354
|
+
} else {
|
|
355
|
+
await this.db.execParams(sql, [version]);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
/**
|
|
359
|
+
* Mark a migration as not applied (for rollback).
|
|
360
|
+
*/
|
|
361
|
+
async markUnapplied(version, client) {
|
|
362
|
+
const sql = `
|
|
363
|
+
DELETE FROM ${SCHEMA_MIGRATIONS_TABLE}
|
|
364
|
+
WHERE version = $1;
|
|
365
|
+
`;
|
|
366
|
+
if (client) {
|
|
367
|
+
await client.query(sql, [version]);
|
|
368
|
+
} else {
|
|
369
|
+
await this.db.execParams(sql, [version]);
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
/**
|
|
373
|
+
* Get pending migrations (not yet applied).
|
|
374
|
+
*/
|
|
375
|
+
async getPendingMigrations() {
|
|
376
|
+
const all = listMigrations(this.migrationsDir);
|
|
377
|
+
const applied = await this.getAppliedVersions();
|
|
378
|
+
return all.filter((m) => !applied.has(m.version));
|
|
379
|
+
}
|
|
380
|
+
/**
|
|
381
|
+
* Apply a single migration.
|
|
382
|
+
*/
|
|
383
|
+
async applyMigration(migration) {
|
|
384
|
+
const { up: up2 } = parseMigrationFile(migration.path);
|
|
385
|
+
await this.db.withTransaction(async (client) => {
|
|
386
|
+
if (up2) {
|
|
387
|
+
await client.query(up2);
|
|
388
|
+
}
|
|
389
|
+
await this.markApplied(migration.version, client);
|
|
390
|
+
});
|
|
391
|
+
}
|
|
392
|
+
/**
|
|
393
|
+
* Roll back a single migration.
|
|
394
|
+
*/
|
|
395
|
+
async rollbackMigration(migration) {
|
|
396
|
+
const { down } = parseMigrationFile(migration.path);
|
|
397
|
+
await this.db.withTransaction(async (client) => {
|
|
398
|
+
if (down) {
|
|
399
|
+
await client.query(down);
|
|
400
|
+
}
|
|
401
|
+
await this.markUnapplied(migration.version, client);
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
/**
|
|
405
|
+
* Apply all pending migrations.
|
|
406
|
+
*/
|
|
407
|
+
async migrateUp() {
|
|
408
|
+
const pending = await this.getPendingMigrations();
|
|
409
|
+
for (const migration of pending) {
|
|
410
|
+
await this.applyMigration(migration);
|
|
411
|
+
}
|
|
412
|
+
return pending;
|
|
413
|
+
}
|
|
414
|
+
/**
|
|
415
|
+
* Get migration status.
|
|
416
|
+
*/
|
|
417
|
+
async status() {
|
|
418
|
+
const all = listMigrations(this.migrationsDir);
|
|
419
|
+
const appliedVersions = await this.getAppliedVersions();
|
|
420
|
+
return {
|
|
421
|
+
applied: all.filter((m) => appliedVersions.has(m.version)),
|
|
422
|
+
pending: all.filter((m) => !appliedVersions.has(m.version))
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
};
|
|
426
|
+
|
|
427
|
+
// src/lib/dbml.ts
|
|
428
|
+
import { Parser, ModelExporter } from "@dbml/core";
|
|
429
|
+
function dbmlToSql(dbmlContent) {
|
|
430
|
+
const database = new Parser().parse(dbmlContent, "dbml");
|
|
431
|
+
const sql = ModelExporter.export(database, "postgres");
|
|
432
|
+
return { sql };
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
// src/lib/migra.ts
|
|
436
|
+
import { execFileSync } from "child_process";
|
|
437
|
+
function runMigra(fromUrl, toUrl, options = {}) {
|
|
438
|
+
const args = [fromUrl, toUrl];
|
|
439
|
+
if (options.unsafe) {
|
|
440
|
+
args.push("--unsafe");
|
|
441
|
+
}
|
|
442
|
+
try {
|
|
443
|
+
const output = execFileSync("migra", args, {
|
|
444
|
+
encoding: "utf-8",
|
|
445
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
446
|
+
});
|
|
447
|
+
return { sql: output || "", hasChanges: false };
|
|
448
|
+
} catch (error) {
|
|
449
|
+
const err = error;
|
|
450
|
+
if (err.status === 2 && err.stdout) {
|
|
451
|
+
return { sql: err.stdout, hasChanges: true };
|
|
452
|
+
}
|
|
453
|
+
if (err.status === 1) {
|
|
454
|
+
throw new Error(`migra error: ${err.stderr || "Unknown error"}`);
|
|
455
|
+
}
|
|
456
|
+
return { sql: err.stdout || "", hasChanges: Boolean(err.stdout) };
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
function filterSchemaMigrations(sql) {
|
|
460
|
+
return sql.split("\n").filter((line) => !line.includes("schema_migrations")).join("\n").trim();
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// src/lib/config.ts
|
|
464
|
+
import { existsSync } from "fs";
|
|
465
|
+
import { join as join2 } from "path";
|
|
466
|
+
import { pathToFileURL } from "url";
|
|
467
|
+
import { config as loadDotenv } from "dotenv";
|
|
468
|
+
var DEFAULT_CONFIG = {
|
|
469
|
+
schema: "./schema.dbml",
|
|
470
|
+
migrations: "./db/migrations",
|
|
471
|
+
extensions: ["uuid-ossp"]
|
|
472
|
+
};
|
|
473
|
+
async function loadConfig(cwd = process.cwd()) {
|
|
474
|
+
loadDotenv();
|
|
475
|
+
const configPath = join2(cwd, "nexusql.config.js");
|
|
476
|
+
let userConfig = {};
|
|
477
|
+
if (existsSync(configPath)) {
|
|
478
|
+
try {
|
|
479
|
+
const configUrl = pathToFileURL(configPath).href;
|
|
480
|
+
const module = await import(configUrl);
|
|
481
|
+
userConfig = module.default || module;
|
|
482
|
+
} catch {
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
return {
|
|
486
|
+
...DEFAULT_CONFIG,
|
|
487
|
+
...userConfig,
|
|
488
|
+
databaseUrl: process.env.DATABASE_URL || userConfig.databaseUrl
|
|
489
|
+
};
|
|
490
|
+
}
|
|
491
|
+
function getDatabaseUrl(config) {
|
|
492
|
+
const url = config.databaseUrl || process.env.DATABASE_URL;
|
|
493
|
+
if (!url) {
|
|
494
|
+
throw new Error("DATABASE_URL not set. Set it in .env or nexusql.config.js");
|
|
495
|
+
}
|
|
496
|
+
return url;
|
|
497
|
+
}
|
|
498
|
+
function generateConfigTemplate() {
|
|
499
|
+
return `/** @type {import('nexusql').NexusqlConfig} */
|
|
500
|
+
export default {
|
|
501
|
+
// Path to your DBML schema file
|
|
502
|
+
schema: './schema.dbml',
|
|
503
|
+
|
|
504
|
+
// Directory for migration files
|
|
505
|
+
migrations: './db/migrations',
|
|
506
|
+
|
|
507
|
+
// PostgreSQL extensions to install in temp database
|
|
508
|
+
extensions: ['uuid-ossp'],
|
|
509
|
+
};
|
|
510
|
+
`;
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
// src/commands/gen.ts
|
|
514
|
+
import { readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "fs";
|
|
515
|
+
import { resolve } from "path";
|
|
516
|
+
import ora from "ora";
|
|
517
|
+
import chalk from "chalk";
|
|
518
|
+
async function gen(options = {}) {
|
|
519
|
+
const config = await loadConfig();
|
|
520
|
+
const databaseUrl = getDatabaseUrl(config);
|
|
521
|
+
const db = new Database(databaseUrl);
|
|
522
|
+
const spinner = ora();
|
|
523
|
+
spinner.start("Reading DBML schema...");
|
|
524
|
+
const schemaPath = resolve(config.schema);
|
|
525
|
+
let dbmlContent;
|
|
526
|
+
try {
|
|
527
|
+
dbmlContent = readFileSync2(schemaPath, "utf-8");
|
|
528
|
+
spinner.succeed("Read DBML schema");
|
|
529
|
+
} catch {
|
|
530
|
+
spinner.fail(`Failed to read schema: ${schemaPath}`);
|
|
531
|
+
throw new Error(`Schema file not found: ${schemaPath}`);
|
|
532
|
+
}
|
|
533
|
+
spinner.start("Converting DBML to SQL...");
|
|
534
|
+
const { sql: targetSql } = dbmlToSql(dbmlContent);
|
|
535
|
+
spinner.succeed("Converted DBML to SQL");
|
|
536
|
+
const tempDbName = `nexusql_temp_${Date.now()}`;
|
|
537
|
+
spinner.start(`Creating temp database: ${tempDbName}`);
|
|
538
|
+
try {
|
|
539
|
+
await db.createDatabase(tempDbName);
|
|
540
|
+
spinner.succeed(`Created temp database: ${tempDbName}`);
|
|
541
|
+
} catch (error) {
|
|
542
|
+
spinner.fail("Failed to create temp database");
|
|
543
|
+
throw error;
|
|
544
|
+
}
|
|
545
|
+
const tempDb = db.withDatabase(tempDbName);
|
|
546
|
+
let migrationSql = "";
|
|
547
|
+
try {
|
|
548
|
+
spinner.start("Installing extensions...");
|
|
549
|
+
await tempDb.installExtensions(config.extensions);
|
|
550
|
+
spinner.succeed("Installed extensions");
|
|
551
|
+
spinner.start("Loading schema into temp database...");
|
|
552
|
+
await tempDb.exec(targetSql);
|
|
553
|
+
spinner.succeed("Loaded schema into temp database");
|
|
554
|
+
if (options.verbose) {
|
|
555
|
+
const tables = await tempDb.listTables();
|
|
556
|
+
console.log(chalk.dim(`
|
|
557
|
+
Tables in temp database: ${tables.join(", ")}`));
|
|
558
|
+
}
|
|
559
|
+
spinner.start("Generating migration diff...");
|
|
560
|
+
const migraResult = runMigra(
|
|
561
|
+
db.getConnectionUrl(),
|
|
562
|
+
tempDb.getConnectionUrl(),
|
|
563
|
+
{ unsafe: true }
|
|
564
|
+
);
|
|
565
|
+
migrationSql = filterSchemaMigrations(migraResult.sql);
|
|
566
|
+
if (migraResult.hasChanges) {
|
|
567
|
+
spinner.succeed("Generated migration diff");
|
|
568
|
+
} else {
|
|
569
|
+
spinner.info("No schema changes detected");
|
|
570
|
+
}
|
|
571
|
+
spinner.start("Checking comment changes...");
|
|
572
|
+
const commentSql = await getCommentChanges(db, tempDb);
|
|
573
|
+
if (commentSql) {
|
|
574
|
+
migrationSql = migrationSql ? `${migrationSql}
|
|
575
|
+
|
|
576
|
+
-- Comment changes
|
|
577
|
+
${commentSql}` : `-- Comment changes
|
|
578
|
+
${commentSql}`;
|
|
579
|
+
}
|
|
580
|
+
spinner.succeed("Checked comment changes");
|
|
581
|
+
} finally {
|
|
582
|
+
spinner.start("Cleaning up temp database...");
|
|
583
|
+
try {
|
|
584
|
+
await db.dropDatabase(tempDbName);
|
|
585
|
+
spinner.succeed("Cleaned up temp database");
|
|
586
|
+
} catch {
|
|
587
|
+
spinner.warn("Failed to cleanup temp database");
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
const finalSql = migrationSql || "-- No changes detected";
|
|
591
|
+
if (options.output) {
|
|
592
|
+
writeFileSync2(options.output, finalSql, "utf-8");
|
|
593
|
+
console.log(chalk.green(`
|
|
594
|
+
Migration SQL written to: ${options.output}`));
|
|
595
|
+
} else {
|
|
596
|
+
console.log(chalk.cyan("\n=== Migration SQL ===\n"));
|
|
597
|
+
console.log(finalSql);
|
|
598
|
+
}
|
|
599
|
+
return finalSql;
|
|
600
|
+
}
|
|
601
|
+
async function getCommentChanges(currentDb, targetDb) {
|
|
602
|
+
const commentQuery = `
|
|
603
|
+
SELECT
|
|
604
|
+
format('COMMENT ON COLUMN %I.%I IS %L;',
|
|
605
|
+
c.table_name,
|
|
606
|
+
c.column_name,
|
|
607
|
+
pgd.description
|
|
608
|
+
) as comment_sql,
|
|
609
|
+
c.table_name,
|
|
610
|
+
c.column_name,
|
|
611
|
+
pgd.description
|
|
612
|
+
FROM information_schema.columns c
|
|
613
|
+
JOIN pg_catalog.pg_class pc ON pc.relname = c.table_name
|
|
614
|
+
JOIN pg_catalog.pg_namespace pn ON pn.oid = pc.relnamespace AND pn.nspname = c.table_schema
|
|
615
|
+
LEFT JOIN pg_catalog.pg_description pgd ON pgd.objoid = pc.oid AND pgd.objsubid = c.ordinal_position
|
|
616
|
+
WHERE c.table_schema = 'public'
|
|
617
|
+
AND c.table_name != 'schema_migrations'
|
|
618
|
+
ORDER BY c.table_name, c.ordinal_position;
|
|
619
|
+
`;
|
|
620
|
+
const [currentComments, targetComments] = await Promise.all([
|
|
621
|
+
currentDb.query(commentQuery),
|
|
622
|
+
targetDb.query(commentQuery)
|
|
623
|
+
]);
|
|
624
|
+
const currentMap = /* @__PURE__ */ new Map();
|
|
625
|
+
const targetMap = /* @__PURE__ */ new Map();
|
|
626
|
+
for (const row of currentComments.rows) {
|
|
627
|
+
const key = `${row.table_name}.${row.column_name}`;
|
|
628
|
+
currentMap.set(key, row.description);
|
|
629
|
+
}
|
|
630
|
+
for (const row of targetComments.rows) {
|
|
631
|
+
const key = `${row.table_name}.${row.column_name}`;
|
|
632
|
+
targetMap.set(key, {
|
|
633
|
+
sql: row.comment_sql,
|
|
634
|
+
description: row.description
|
|
635
|
+
});
|
|
636
|
+
}
|
|
637
|
+
const commentDiffs = [];
|
|
638
|
+
for (const [key, target] of targetMap) {
|
|
639
|
+
const current = currentMap.get(key);
|
|
640
|
+
if (current !== target.description && target.sql) {
|
|
641
|
+
commentDiffs.push(target.sql);
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
return commentDiffs.join("\n");
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
// src/commands/migrate.ts
|
|
648
|
+
import { mkdirSync, existsSync as existsSync2 } from "fs";
|
|
649
|
+
import chalk2 from "chalk";
|
|
650
|
+
import ora2 from "ora";
|
|
651
|
+
|
|
652
|
+
// src/utils/prompts.ts
|
|
653
|
+
import * as readline from "readline";
|
|
654
|
+
var rl = null;
|
|
655
|
+
function getReadline() {
|
|
656
|
+
if (!rl) {
|
|
657
|
+
rl = readline.createInterface({
|
|
658
|
+
input: process.stdin,
|
|
659
|
+
output: process.stdout
|
|
660
|
+
});
|
|
661
|
+
}
|
|
662
|
+
return rl;
|
|
663
|
+
}
|
|
664
|
+
function closePrompts() {
|
|
665
|
+
if (rl) {
|
|
666
|
+
rl.close();
|
|
667
|
+
rl = null;
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
function question(query) {
|
|
671
|
+
return new Promise((resolve2) => {
|
|
672
|
+
getReadline().question(query, (answer) => {
|
|
673
|
+
resolve2(answer);
|
|
674
|
+
});
|
|
675
|
+
});
|
|
676
|
+
}
|
|
677
|
+
async function confirm(query) {
|
|
678
|
+
const answer = await question(`${query} (yes/no): `);
|
|
679
|
+
return answer.toLowerCase() === "yes" || answer.toLowerCase() === "y";
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
// src/commands/migrate.ts
|
|
683
|
+
async function migrate(options = {}) {
|
|
684
|
+
const config = await loadConfig();
|
|
685
|
+
const databaseUrl = getDatabaseUrl(config);
|
|
686
|
+
const db = new Database(databaseUrl);
|
|
687
|
+
const runner = new MigrationRunner(db, config.migrations);
|
|
688
|
+
console.log(chalk2.bold("\nInteractive Database Migration\n"));
|
|
689
|
+
console.log("This will:");
|
|
690
|
+
console.log(" 1. Generate migration diff from DBML");
|
|
691
|
+
console.log(" 2. Create a new migration file");
|
|
692
|
+
console.log(" 3. Optionally apply the migration\n");
|
|
693
|
+
try {
|
|
694
|
+
console.log(chalk2.cyan("Step 1: Generating migration diff...\n"));
|
|
695
|
+
const migrationSql = await gen({ verbose: false });
|
|
696
|
+
const noChanges = !migrationSql || migrationSql === "-- No changes detected";
|
|
697
|
+
if (noChanges) {
|
|
698
|
+
console.log(chalk2.yellow("\nNo schema changes detected.\n"));
|
|
699
|
+
if (!options.yes) {
|
|
700
|
+
const proceed = await confirm(
|
|
701
|
+
"Do you still want to create an empty migration?"
|
|
702
|
+
);
|
|
703
|
+
if (!proceed) {
|
|
704
|
+
console.log(chalk2.red("\nMigration cancelled."));
|
|
705
|
+
return;
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
console.log(chalk2.cyan("\n--- Step 2: Create Migration File ---\n"));
|
|
710
|
+
let migrationName = options.name;
|
|
711
|
+
if (!migrationName) {
|
|
712
|
+
migrationName = await question(
|
|
713
|
+
'Enter migration name (e.g., "add_users_table"): '
|
|
714
|
+
);
|
|
715
|
+
}
|
|
716
|
+
if (!migrationName || migrationName.trim() === "") {
|
|
717
|
+
console.log(chalk2.red("\nMigration name cannot be empty. Exiting..."));
|
|
718
|
+
return;
|
|
719
|
+
}
|
|
720
|
+
const sanitizedName = migrationName.trim().replace(/\s+/g, "_");
|
|
721
|
+
console.log(chalk2.green(`
|
|
722
|
+
Using migration name: ${sanitizedName}`));
|
|
723
|
+
if (!options.yes) {
|
|
724
|
+
const proceed = await confirm("\nCreate this migration?");
|
|
725
|
+
if (!proceed) {
|
|
726
|
+
console.log(chalk2.red("\nMigration cancelled."));
|
|
727
|
+
return;
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
if (!existsSync2(config.migrations)) {
|
|
731
|
+
mkdirSync(config.migrations, { recursive: true });
|
|
732
|
+
}
|
|
733
|
+
const spinner = ora2("Creating migration file...").start();
|
|
734
|
+
const filepath = createMigrationFile(
|
|
735
|
+
config.migrations,
|
|
736
|
+
sanitizedName,
|
|
737
|
+
noChanges ? "" : migrationSql
|
|
738
|
+
);
|
|
739
|
+
spinner.succeed(`Created migration file: ${filepath}`);
|
|
740
|
+
console.log(chalk2.cyan("\n--- Step 3: Apply Migration ---\n"));
|
|
741
|
+
let shouldApply = options.apply;
|
|
742
|
+
if (shouldApply === void 0 && !options.yes) {
|
|
743
|
+
shouldApply = await confirm("Apply this migration now?");
|
|
744
|
+
}
|
|
745
|
+
if (shouldApply) {
|
|
746
|
+
const applySpinner = ora2("Applying migration...").start();
|
|
747
|
+
try {
|
|
748
|
+
const applied = await runner.migrateUp();
|
|
749
|
+
if (applied.length > 0) {
|
|
750
|
+
applySpinner.succeed(
|
|
751
|
+
`Applied ${applied.length} migration(s) successfully`
|
|
752
|
+
);
|
|
753
|
+
} else {
|
|
754
|
+
applySpinner.info("No pending migrations to apply");
|
|
755
|
+
}
|
|
756
|
+
} catch (error) {
|
|
757
|
+
applySpinner.fail("Failed to apply migration");
|
|
758
|
+
throw error;
|
|
759
|
+
}
|
|
760
|
+
} else {
|
|
761
|
+
console.log(chalk2.dim("\nMigration created but not applied."));
|
|
762
|
+
console.log(chalk2.dim(`Run "nexusql up" to apply pending migrations.`));
|
|
763
|
+
}
|
|
764
|
+
console.log(chalk2.green(`
|
|
765
|
+
Migration file: ${filepath}
|
|
766
|
+
`));
|
|
767
|
+
} finally {
|
|
768
|
+
closePrompts();
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
async function up(options = {}) {
|
|
772
|
+
const config = await loadConfig();
|
|
773
|
+
const databaseUrl = getDatabaseUrl(config);
|
|
774
|
+
const db = new Database(databaseUrl);
|
|
775
|
+
const runner = new MigrationRunner(db, config.migrations);
|
|
776
|
+
const spinner = ora2("Checking migrations...").start();
|
|
777
|
+
try {
|
|
778
|
+
const pending = await runner.getPendingMigrations();
|
|
779
|
+
if (pending.length === 0) {
|
|
780
|
+
spinner.info("No pending migrations");
|
|
781
|
+
return;
|
|
782
|
+
}
|
|
783
|
+
if (options.dryRun) {
|
|
784
|
+
spinner.info(`found ${pending.length} pending migration(s):`);
|
|
785
|
+
for (const m of pending) {
|
|
786
|
+
console.log(chalk2.cyan(` \u25CB ${m.name}`));
|
|
787
|
+
}
|
|
788
|
+
return;
|
|
789
|
+
}
|
|
790
|
+
spinner.text = "Applying migrations...";
|
|
791
|
+
for (const migration of pending) {
|
|
792
|
+
await runner.applyMigration(migration);
|
|
793
|
+
console.log(chalk2.green(` \u2713 ${migration.name}`));
|
|
794
|
+
}
|
|
795
|
+
spinner.succeed(`Applied ${pending.length} migration(s)`);
|
|
796
|
+
} catch (error) {
|
|
797
|
+
spinner.fail("Migration failed");
|
|
798
|
+
throw error;
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
async function status() {
|
|
802
|
+
const config = await loadConfig();
|
|
803
|
+
const databaseUrl = getDatabaseUrl(config);
|
|
804
|
+
const db = new Database(databaseUrl);
|
|
805
|
+
const runner = new MigrationRunner(db, config.migrations);
|
|
806
|
+
const spinner = ora2("Checking migration status...").start();
|
|
807
|
+
const { applied, pending } = await runner.status();
|
|
808
|
+
spinner.stop();
|
|
809
|
+
console.log(chalk2.bold("\nMigration Status\n"));
|
|
810
|
+
if (applied.length > 0) {
|
|
811
|
+
console.log(chalk2.green("Applied:"));
|
|
812
|
+
for (const m of applied) {
|
|
813
|
+
console.log(chalk2.green(` \u2713 ${m.name}`));
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
if (pending.length > 0) {
|
|
817
|
+
console.log(chalk2.yellow("\nPending:"));
|
|
818
|
+
for (const m of pending) {
|
|
819
|
+
console.log(chalk2.yellow(` \u25CB ${m.name}`));
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
if (applied.length === 0 && pending.length === 0) {
|
|
823
|
+
console.log(chalk2.dim("No migrations found."));
|
|
824
|
+
}
|
|
825
|
+
console.log();
|
|
826
|
+
}
|
|
827
|
+
|
|
828
|
+
// src/commands/mark-applied.ts
|
|
829
|
+
import chalk3 from "chalk";
|
|
830
|
+
import ora3 from "ora";
|
|
831
|
+
async function markApplied(version) {
|
|
832
|
+
if (!version) {
|
|
833
|
+
console.error(chalk3.red("Error: Migration version is required"));
|
|
834
|
+
console.log(chalk3.dim("\nUsage: nexusql mark-applied <version>"));
|
|
835
|
+
console.log(chalk3.dim("Example: nexusql mark-applied 20251209153535"));
|
|
836
|
+
process.exit(1);
|
|
837
|
+
}
|
|
838
|
+
const config = await loadConfig();
|
|
839
|
+
const databaseUrl = getDatabaseUrl(config);
|
|
840
|
+
const db = new Database(databaseUrl);
|
|
841
|
+
const runner = new MigrationRunner(db, config.migrations);
|
|
842
|
+
const spinner = ora3(`Marking migration ${version} as applied...`).start();
|
|
843
|
+
try {
|
|
844
|
+
await runner.markApplied(version);
|
|
845
|
+
spinner.succeed(`Migration ${version} marked as applied`);
|
|
846
|
+
} catch (error) {
|
|
847
|
+
spinner.fail("Failed to mark migration as applied");
|
|
848
|
+
throw error;
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
// src/commands/init.ts
|
|
853
|
+
import { writeFileSync as writeFileSync3, existsSync as existsSync3, mkdirSync as mkdirSync2 } from "fs";
|
|
854
|
+
import { join as join3 } from "path";
|
|
855
|
+
import chalk4 from "chalk";
|
|
856
|
+
async function init(options = {}) {
|
|
857
|
+
const cwd = process.cwd();
|
|
858
|
+
const configPath = join3(cwd, "nexusql.config.js");
|
|
859
|
+
if (existsSync3(configPath) && !options.force) {
|
|
860
|
+
console.log(chalk4.yellow("nexusql.config.js already exists."));
|
|
861
|
+
console.log(chalk4.dim("Use --force to overwrite."));
|
|
862
|
+
return;
|
|
863
|
+
}
|
|
864
|
+
writeFileSync3(configPath, generateConfigTemplate(), "utf-8");
|
|
865
|
+
console.log(chalk4.green("Created nexusql.config.js"));
|
|
866
|
+
const migrationsDir = join3(cwd, "db", "migrations");
|
|
867
|
+
if (!existsSync3(migrationsDir)) {
|
|
868
|
+
mkdirSync2(migrationsDir, { recursive: true });
|
|
869
|
+
console.log(chalk4.green("Created db/migrations/"));
|
|
870
|
+
}
|
|
871
|
+
const envExamplePath = join3(cwd, ".env.example");
|
|
872
|
+
if (!existsSync3(envExamplePath)) {
|
|
873
|
+
writeFileSync3(
|
|
874
|
+
envExamplePath,
|
|
875
|
+
"DATABASE_URL=postgres://user:password@localhost:5432/database\n",
|
|
876
|
+
"utf-8"
|
|
877
|
+
);
|
|
878
|
+
console.log(chalk4.green("Created .env.example"));
|
|
879
|
+
}
|
|
880
|
+
console.log(chalk4.bold("\nNext steps:"));
|
|
881
|
+
console.log(" 1. Copy .env.example to .env and set your DATABASE_URL");
|
|
882
|
+
console.log(" 2. Create your schema.dbml file");
|
|
883
|
+
console.log(' 3. Run "nexusql gen" to generate migration SQL');
|
|
884
|
+
console.log(' 4. Run "nexusql migrate" to create and apply migrations\n');
|
|
885
|
+
}
|
|
886
|
+
export {
|
|
887
|
+
Database,
|
|
888
|
+
MigrationRunner,
|
|
889
|
+
buildDbUrl,
|
|
890
|
+
createMigrationFile,
|
|
891
|
+
dbmlToSql,
|
|
892
|
+
filterSchemaMigrations,
|
|
893
|
+
gen,
|
|
894
|
+
generateConfigTemplate,
|
|
895
|
+
getDatabaseUrl,
|
|
896
|
+
getMostRecentMigration,
|
|
897
|
+
init,
|
|
898
|
+
listMigrations,
|
|
899
|
+
loadConfig,
|
|
900
|
+
markApplied,
|
|
901
|
+
migrate,
|
|
902
|
+
parseAndEncodeDbUrl,
|
|
903
|
+
parseMigrationFile,
|
|
904
|
+
runMigra,
|
|
905
|
+
status,
|
|
906
|
+
up,
|
|
907
|
+
updateMigrationFile
|
|
908
|
+
};
|
|
909
|
+
//# sourceMappingURL=index.js.map
|