zenstack-kit 0.1.4 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -6
- package/dist/cli/app.d.ts.map +1 -1
- package/dist/cli/app.js +7 -1
- package/dist/cli/commands.d.ts +2 -0
- package/dist/cli/commands.d.ts.map +1 -1
- package/dist/cli/commands.js +97 -6
- package/dist/cli/prompts.d.ts.map +1 -1
- package/dist/cli/prompts.js +1 -3
- package/dist/config/loader.d.ts +1 -1
- package/dist/config/loader.d.ts.map +1 -1
- package/dist/config/loader.js +11 -9
- package/dist/migrations/prisma/apply.d.ts +54 -0
- package/dist/migrations/prisma/apply.d.ts.map +1 -0
- package/dist/migrations/prisma/apply.js +384 -0
- package/dist/migrations/prisma/create.d.ts +63 -0
- package/dist/migrations/prisma/create.d.ts.map +1 -0
- package/dist/migrations/prisma/create.js +119 -0
- package/dist/migrations/prisma/diff.d.ts +104 -0
- package/dist/migrations/prisma/diff.d.ts.map +1 -0
- package/dist/migrations/prisma/diff.js +442 -0
- package/dist/migrations/prisma/log.d.ts +31 -0
- package/dist/migrations/prisma/log.d.ts.map +1 -0
- package/dist/migrations/prisma/log.js +101 -0
- package/dist/migrations/prisma/rename.d.ts +23 -0
- package/dist/migrations/prisma/rename.d.ts.map +1 -0
- package/dist/migrations/prisma/rename.js +57 -0
- package/dist/migrations/prisma/snapshot.d.ts +32 -0
- package/dist/migrations/prisma/snapshot.d.ts.map +1 -0
- package/dist/migrations/prisma/snapshot.js +65 -0
- package/dist/migrations/prisma.d.ts +5 -202
- package/dist/migrations/prisma.d.ts.map +1 -1
- package/dist/migrations/prisma.js +5 -1168
- package/dist/schema/pull.d.ts +2 -0
- package/dist/schema/pull.d.ts.map +1 -1
- package/dist/schema/pull.js +102 -4
- package/package.json +1 -1
|
@@ -0,0 +1,384 @@
|
|
|
1
|
+
import * as fs from "fs/promises";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import * as crypto from "crypto";
|
|
4
|
+
import { sql } from "kysely";
|
|
5
|
+
import { createKyselyAdapter } from "../../sql/kysely-adapter.js";
|
|
6
|
+
import { calculateChecksum, readMigrationLog } from "./log.js";
|
|
7
|
+
/**
|
|
8
|
+
* Ensure _prisma_migrations table exists
|
|
9
|
+
*/
|
|
10
|
+
async function ensureMigrationsTable(db, tableName, schema, dialect) {
|
|
11
|
+
const fullTableName = schema && dialect === "postgres" ? `${schema}.${tableName}` : tableName;
|
|
12
|
+
if (dialect === "sqlite") {
|
|
13
|
+
await sql `
|
|
14
|
+
CREATE TABLE IF NOT EXISTS ${sql.raw(`"${tableName}"`)} (
|
|
15
|
+
id TEXT PRIMARY KEY,
|
|
16
|
+
checksum TEXT NOT NULL,
|
|
17
|
+
finished_at TEXT,
|
|
18
|
+
migration_name TEXT NOT NULL,
|
|
19
|
+
logs TEXT,
|
|
20
|
+
rolled_back_at TEXT,
|
|
21
|
+
started_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
22
|
+
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
23
|
+
)
|
|
24
|
+
`.execute(db);
|
|
25
|
+
}
|
|
26
|
+
else if (dialect === "postgres") {
|
|
27
|
+
await sql `
|
|
28
|
+
CREATE TABLE IF NOT EXISTS ${sql.raw(`"${schema}"."${tableName}"`)} (
|
|
29
|
+
id VARCHAR(36) PRIMARY KEY,
|
|
30
|
+
checksum VARCHAR(64) NOT NULL,
|
|
31
|
+
finished_at TIMESTAMPTZ,
|
|
32
|
+
migration_name VARCHAR(255) NOT NULL,
|
|
33
|
+
logs TEXT,
|
|
34
|
+
rolled_back_at TIMESTAMPTZ,
|
|
35
|
+
started_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
36
|
+
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
37
|
+
)
|
|
38
|
+
`.execute(db);
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
await sql `
|
|
42
|
+
CREATE TABLE IF NOT EXISTS ${sql.raw(`\`${tableName}\``)} (
|
|
43
|
+
id VARCHAR(36) PRIMARY KEY,
|
|
44
|
+
checksum VARCHAR(64) NOT NULL,
|
|
45
|
+
finished_at DATETIME,
|
|
46
|
+
migration_name VARCHAR(255) NOT NULL,
|
|
47
|
+
logs TEXT,
|
|
48
|
+
rolled_back_at DATETIME,
|
|
49
|
+
started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
50
|
+
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
51
|
+
)
|
|
52
|
+
`.execute(db);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Get list of applied migrations from _prisma_migrations table
|
|
57
|
+
*/
|
|
58
|
+
async function getAppliedMigrations(db, tableName, schema, dialect) {
|
|
59
|
+
let result;
|
|
60
|
+
if (dialect === "postgres" && schema) {
|
|
61
|
+
result = await sql `
|
|
62
|
+
SELECT * FROM ${sql.raw(`"${schema}"."${tableName}"`)}
|
|
63
|
+
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
64
|
+
`.execute(db);
|
|
65
|
+
}
|
|
66
|
+
else if (dialect === "sqlite") {
|
|
67
|
+
result = await sql `
|
|
68
|
+
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
|
69
|
+
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
70
|
+
`.execute(db);
|
|
71
|
+
}
|
|
72
|
+
else {
|
|
73
|
+
result = await sql `
|
|
74
|
+
SELECT * FROM ${sql.raw(`\`${tableName}\``)}
|
|
75
|
+
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
76
|
+
`.execute(db);
|
|
77
|
+
}
|
|
78
|
+
return new Map(result.rows.map((row) => [row.migration_name, row]));
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Record a migration in _prisma_migrations table
|
|
82
|
+
*/
|
|
83
|
+
async function recordMigration(db, tableName, schema, dialect, migrationName, checksum) {
|
|
84
|
+
const id = crypto.randomUUID();
|
|
85
|
+
if (dialect === "postgres" && schema) {
|
|
86
|
+
await sql `
|
|
87
|
+
INSERT INTO ${sql.raw(`"${schema}"."${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
88
|
+
VALUES (${id}, ${checksum}, ${migrationName}, now(), 1)
|
|
89
|
+
`.execute(db);
|
|
90
|
+
}
|
|
91
|
+
else if (dialect === "sqlite") {
|
|
92
|
+
await sql `
|
|
93
|
+
INSERT INTO ${sql.raw(`"${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
94
|
+
VALUES (${id}, ${checksum}, ${migrationName}, datetime('now'), 1)
|
|
95
|
+
`.execute(db);
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
await sql `
|
|
99
|
+
INSERT INTO ${sql.raw(`\`${tableName}\``)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
100
|
+
VALUES (${id}, ${checksum}, ${migrationName}, NOW(), 1)
|
|
101
|
+
`.execute(db);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Validate that the database's applied migrations are coherent with the migration log.
|
|
106
|
+
*
|
|
107
|
+
* Coherence rules:
|
|
108
|
+
* 1. Every migration applied in the DB must exist in the migration log
|
|
109
|
+
* 2. Applied migrations must be a prefix of the log (no gaps)
|
|
110
|
+
* 3. Checksums must match for applied migrations
|
|
111
|
+
*/
|
|
112
|
+
function validateMigrationCoherence(appliedMigrations, migrationLog, migrationFolders) {
|
|
113
|
+
const errors = [];
|
|
114
|
+
// Build a set of log migration names for quick lookup
|
|
115
|
+
const logMigrationNames = new Set(migrationLog.map((e) => e.name));
|
|
116
|
+
const folderNames = new Set(migrationFolders);
|
|
117
|
+
for (const entry of migrationLog) {
|
|
118
|
+
if (!folderNames.has(entry.name)) {
|
|
119
|
+
errors.push({
|
|
120
|
+
type: "missing_from_disk",
|
|
121
|
+
migrationName: entry.name,
|
|
122
|
+
details: `Migration "${entry.name}" exists in migration log but not on disk`,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// Check 1: Every applied migration must exist in the log
|
|
127
|
+
for (const [migrationName] of appliedMigrations) {
|
|
128
|
+
if (!logMigrationNames.has(migrationName)) {
|
|
129
|
+
errors.push({
|
|
130
|
+
type: "missing_from_log",
|
|
131
|
+
migrationName,
|
|
132
|
+
details: `Migration "${migrationName}" exists in database but not in migration log`,
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
// If there are migrations missing from the log, return early
|
|
137
|
+
// (other checks don't make sense if the log is incomplete)
|
|
138
|
+
if (errors.length > 0) {
|
|
139
|
+
return { isCoherent: false, errors };
|
|
140
|
+
}
|
|
141
|
+
// Check 2: Applied migrations should be a continuous prefix of the log
|
|
142
|
+
// i.e., if migration N is applied, all migrations before N in the log must also be applied
|
|
143
|
+
let lastAppliedIndex = -1;
|
|
144
|
+
for (let i = 0; i < migrationLog.length; i++) {
|
|
145
|
+
const logEntry = migrationLog[i];
|
|
146
|
+
const isApplied = appliedMigrations.has(logEntry.name);
|
|
147
|
+
if (isApplied) {
|
|
148
|
+
// Check for gaps: if this is applied, all previous should be applied
|
|
149
|
+
if (lastAppliedIndex !== i - 1) {
|
|
150
|
+
// There's a gap - find the missing migrations
|
|
151
|
+
for (let j = lastAppliedIndex + 1; j < i; j++) {
|
|
152
|
+
const missing = migrationLog[j];
|
|
153
|
+
errors.push({
|
|
154
|
+
type: "order_mismatch",
|
|
155
|
+
migrationName: missing.name,
|
|
156
|
+
details: `Migration "${missing.name}" is in the log but not applied, yet later migration "${logEntry.name}" is applied`,
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
lastAppliedIndex = i;
|
|
161
|
+
// Check 3: Checksum validation for applied migrations
|
|
162
|
+
const dbRow = appliedMigrations.get(logEntry.name);
|
|
163
|
+
if (dbRow.checksum !== logEntry.checksum) {
|
|
164
|
+
errors.push({
|
|
165
|
+
type: "checksum_mismatch",
|
|
166
|
+
migrationName: logEntry.name,
|
|
167
|
+
details: `Checksum mismatch for "${logEntry.name}": database has ${dbRow.checksum.slice(0, 8)}..., log has ${logEntry.checksum.slice(0, 8)}...`,
|
|
168
|
+
});
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
return {
|
|
173
|
+
isCoherent: errors.length === 0,
|
|
174
|
+
errors,
|
|
175
|
+
};
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Execute raw SQL using the database driver directly
|
|
179
|
+
* This bypasses Kysely for DDL statements which don't work reliably with sql.raw()
|
|
180
|
+
*/
|
|
181
|
+
async function executeRawSql(dialect, sqlContent, options) {
|
|
182
|
+
if (dialect === "sqlite") {
|
|
183
|
+
const { default: Database } = await import("better-sqlite3");
|
|
184
|
+
const sqliteDb = new Database(options.databasePath || ":memory:");
|
|
185
|
+
try {
|
|
186
|
+
// better-sqlite3's exec() handles multiple statements properly
|
|
187
|
+
sqliteDb.exec(sqlContent);
|
|
188
|
+
}
|
|
189
|
+
finally {
|
|
190
|
+
sqliteDb.close();
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
else if (dialect === "postgres") {
|
|
194
|
+
const { Pool } = await import("pg");
|
|
195
|
+
const pool = new Pool({ connectionString: options.connectionUrl });
|
|
196
|
+
const client = await pool.connect();
|
|
197
|
+
try {
|
|
198
|
+
// PostgreSQL supports transactional DDL, so wrap migration in a transaction
|
|
199
|
+
await client.query("BEGIN");
|
|
200
|
+
await client.query(sqlContent);
|
|
201
|
+
await client.query("COMMIT");
|
|
202
|
+
}
|
|
203
|
+
catch (error) {
|
|
204
|
+
await client.query("ROLLBACK");
|
|
205
|
+
throw error;
|
|
206
|
+
}
|
|
207
|
+
finally {
|
|
208
|
+
client.release();
|
|
209
|
+
await pool.end();
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
else if (dialect === "mysql") {
|
|
213
|
+
// Use mysql2 with promise wrapper
|
|
214
|
+
const mysql = await import("mysql2");
|
|
215
|
+
const pool = mysql.createPool({ uri: options.connectionUrl });
|
|
216
|
+
const promisePool = pool.promise();
|
|
217
|
+
try {
|
|
218
|
+
// MySQL needs statements executed one at a time
|
|
219
|
+
const statements = sqlContent
|
|
220
|
+
.split(/;(?:\s*\n|\s*$)/)
|
|
221
|
+
.map((s) => s.trim())
|
|
222
|
+
.filter((s) => s.length > 0 && !s.startsWith("--"));
|
|
223
|
+
for (const statement of statements) {
|
|
224
|
+
await promisePool.query(statement);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
finally {
|
|
228
|
+
await pool.promise().end();
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Apply pending Prisma migrations
|
|
234
|
+
*/
|
|
235
|
+
export async function applyPrismaMigrations(options) {
|
|
236
|
+
const migrationsTable = options.migrationsTable ?? "_prisma_migrations";
|
|
237
|
+
const migrationsSchema = options.migrationsSchema ?? "public";
|
|
238
|
+
const { db, destroy } = await createKyselyAdapter({
|
|
239
|
+
dialect: options.dialect,
|
|
240
|
+
connectionUrl: options.connectionUrl,
|
|
241
|
+
databasePath: options.databasePath,
|
|
242
|
+
});
|
|
243
|
+
try {
|
|
244
|
+
// Ensure migrations table exists
|
|
245
|
+
await ensureMigrationsTable(db, migrationsTable, migrationsSchema, options.dialect);
|
|
246
|
+
// Get already applied migrations
|
|
247
|
+
const appliedMigrations = await getAppliedMigrations(db, migrationsTable, migrationsSchema, options.dialect);
|
|
248
|
+
// Read migration folders
|
|
249
|
+
const entries = await fs.readdir(options.migrationsFolder, { withFileTypes: true });
|
|
250
|
+
const migrationFolders = entries
|
|
251
|
+
.filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
|
|
252
|
+
.map((e) => e.name)
|
|
253
|
+
.sort();
|
|
254
|
+
const migrationFoldersWithSql = [];
|
|
255
|
+
for (const folderName of migrationFolders) {
|
|
256
|
+
const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
|
|
257
|
+
try {
|
|
258
|
+
await fs.access(sqlPath);
|
|
259
|
+
migrationFoldersWithSql.push(folderName);
|
|
260
|
+
}
|
|
261
|
+
catch {
|
|
262
|
+
// Missing migration.sql; coherence check will flag if it's in the log
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
// Read migration log and validate coherence
|
|
266
|
+
const migrationLog = await readMigrationLog(options.migrationsFolder);
|
|
267
|
+
const coherence = validateMigrationCoherence(appliedMigrations, migrationLog, migrationFoldersWithSql);
|
|
268
|
+
if (!coherence.isCoherent) {
|
|
269
|
+
return {
|
|
270
|
+
applied: [],
|
|
271
|
+
alreadyApplied: [],
|
|
272
|
+
coherenceErrors: coherence.errors,
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
const result = {
|
|
276
|
+
applied: [],
|
|
277
|
+
alreadyApplied: [],
|
|
278
|
+
};
|
|
279
|
+
for (const folderName of migrationFoldersWithSql) {
|
|
280
|
+
if (appliedMigrations.has(folderName)) {
|
|
281
|
+
result.alreadyApplied.push(folderName);
|
|
282
|
+
continue;
|
|
283
|
+
}
|
|
284
|
+
const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
|
|
285
|
+
let sqlContent;
|
|
286
|
+
try {
|
|
287
|
+
sqlContent = await fs.readFile(sqlPath, "utf-8");
|
|
288
|
+
}
|
|
289
|
+
catch {
|
|
290
|
+
continue; // Skip if no migration.sql
|
|
291
|
+
}
|
|
292
|
+
const checksum = calculateChecksum(sqlContent);
|
|
293
|
+
// Verify checksum against migration log (migrationLog already read above)
|
|
294
|
+
const logEntry = migrationLog.find((m) => m.name === folderName);
|
|
295
|
+
if (logEntry && logEntry.checksum !== checksum) {
|
|
296
|
+
result.failed = {
|
|
297
|
+
migrationName: folderName,
|
|
298
|
+
error: `Checksum mismatch for migration ${folderName}.\n` +
|
|
299
|
+
`Expected: ${logEntry.checksum}\n` +
|
|
300
|
+
`Found: ${checksum}\n` +
|
|
301
|
+
`The migration file may have been modified after generation.`,
|
|
302
|
+
};
|
|
303
|
+
break;
|
|
304
|
+
}
|
|
305
|
+
const startTime = Date.now();
|
|
306
|
+
try {
|
|
307
|
+
if (!options.markApplied) {
|
|
308
|
+
// Execute the migration SQL using direct driver access
|
|
309
|
+
await executeRawSql(options.dialect, sqlContent, {
|
|
310
|
+
connectionUrl: options.connectionUrl,
|
|
311
|
+
databasePath: options.databasePath,
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
// Record the migration (still use Kysely for this since it's simple INSERT)
|
|
315
|
+
await recordMigration(db, migrationsTable, migrationsSchema, options.dialect, folderName, checksum);
|
|
316
|
+
result.applied.push({
|
|
317
|
+
migrationName: folderName,
|
|
318
|
+
duration: Date.now() - startTime,
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
catch (error) {
|
|
322
|
+
result.failed = {
|
|
323
|
+
migrationName: folderName,
|
|
324
|
+
error: error instanceof Error ? error.message : String(error),
|
|
325
|
+
};
|
|
326
|
+
break; // Stop on first failure
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
return result;
|
|
330
|
+
}
|
|
331
|
+
finally {
|
|
332
|
+
await destroy();
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
/**
|
|
336
|
+
* Preview pending migrations without applying them
|
|
337
|
+
*/
|
|
338
|
+
export async function previewPrismaMigrations(options) {
|
|
339
|
+
const migrationsTable = options.migrationsTable ?? "_prisma_migrations";
|
|
340
|
+
const migrationsSchema = options.migrationsSchema ?? "public";
|
|
341
|
+
const { db, destroy } = await createKyselyAdapter({
|
|
342
|
+
dialect: options.dialect,
|
|
343
|
+
connectionUrl: options.connectionUrl,
|
|
344
|
+
databasePath: options.databasePath,
|
|
345
|
+
});
|
|
346
|
+
try {
|
|
347
|
+
// Ensure migrations table exists
|
|
348
|
+
await ensureMigrationsTable(db, migrationsTable, migrationsSchema, options.dialect);
|
|
349
|
+
// Get already applied migrations
|
|
350
|
+
const appliedMigrations = await getAppliedMigrations(db, migrationsTable, migrationsSchema, options.dialect);
|
|
351
|
+
// Read migration folders
|
|
352
|
+
const entries = await fs.readdir(options.migrationsFolder, { withFileTypes: true });
|
|
353
|
+
const migrationFolders = entries
|
|
354
|
+
.filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
|
|
355
|
+
.map((e) => e.name)
|
|
356
|
+
.sort();
|
|
357
|
+
const result = {
|
|
358
|
+
pending: [],
|
|
359
|
+
alreadyApplied: [],
|
|
360
|
+
};
|
|
361
|
+
for (const folderName of migrationFolders) {
|
|
362
|
+
if (appliedMigrations.has(folderName)) {
|
|
363
|
+
result.alreadyApplied.push(folderName);
|
|
364
|
+
continue;
|
|
365
|
+
}
|
|
366
|
+
const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
|
|
367
|
+
let sqlContent;
|
|
368
|
+
try {
|
|
369
|
+
sqlContent = await fs.readFile(sqlPath, "utf-8");
|
|
370
|
+
}
|
|
371
|
+
catch {
|
|
372
|
+
continue; // Skip if no migration.sql
|
|
373
|
+
}
|
|
374
|
+
result.pending.push({
|
|
375
|
+
name: folderName,
|
|
376
|
+
sql: sqlContent,
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
return result;
|
|
380
|
+
}
|
|
381
|
+
finally {
|
|
382
|
+
await destroy();
|
|
383
|
+
}
|
|
384
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import type { KyselyDialect } from "../../sql/kysely-adapter.js";
|
|
2
|
+
export interface PrismaMigrationOptions {
|
|
3
|
+
/** Migration name */
|
|
4
|
+
name: string;
|
|
5
|
+
/** Path to ZenStack schema file */
|
|
6
|
+
schemaPath: string;
|
|
7
|
+
/** Output directory for migration files */
|
|
8
|
+
outputPath: string;
|
|
9
|
+
/** Database dialect for SQL generation */
|
|
10
|
+
dialect: KyselyDialect;
|
|
11
|
+
/** Table rename mappings */
|
|
12
|
+
renameTables?: Array<{
|
|
13
|
+
from: string;
|
|
14
|
+
to: string;
|
|
15
|
+
}>;
|
|
16
|
+
/** Column rename mappings */
|
|
17
|
+
renameColumns?: Array<{
|
|
18
|
+
table: string;
|
|
19
|
+
from: string;
|
|
20
|
+
to: string;
|
|
21
|
+
}>;
|
|
22
|
+
}
|
|
23
|
+
export interface PrismaMigration {
|
|
24
|
+
/** Migration folder name (timestamp_name) */
|
|
25
|
+
folderName: string;
|
|
26
|
+
/** Full path to migration folder */
|
|
27
|
+
folderPath: string;
|
|
28
|
+
/** SQL content */
|
|
29
|
+
sql: string;
|
|
30
|
+
/** Timestamp */
|
|
31
|
+
timestamp: number;
|
|
32
|
+
}
|
|
33
|
+
export interface CreateInitialMigrationOptions {
|
|
34
|
+
/** Migration name (default: "init") */
|
|
35
|
+
name?: string;
|
|
36
|
+
/** Path to ZenStack schema file */
|
|
37
|
+
schemaPath: string;
|
|
38
|
+
/** Output directory for migration files */
|
|
39
|
+
outputPath: string;
|
|
40
|
+
/** Database dialect for SQL generation */
|
|
41
|
+
dialect: KyselyDialect;
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Generate timestamp string for migration folder name
|
|
45
|
+
*/
|
|
46
|
+
export declare function generateTimestamp(): string;
|
|
47
|
+
/**
|
|
48
|
+
* Create a Prisma-compatible migration
|
|
49
|
+
*/
|
|
50
|
+
export declare function createPrismaMigration(options: PrismaMigrationOptions): Promise<PrismaMigration | null>;
|
|
51
|
+
/**
|
|
52
|
+
* Create an initial migration that creates all tables from scratch.
|
|
53
|
+
* This is used when initializing a project where the database is empty.
|
|
54
|
+
*/
|
|
55
|
+
export declare function createInitialMigration(options: CreateInitialMigrationOptions): Promise<PrismaMigration>;
|
|
56
|
+
/**
|
|
57
|
+
* Check if there are schema changes
|
|
58
|
+
*/
|
|
59
|
+
export declare function hasPrismaSchemaChanges(options: {
|
|
60
|
+
schemaPath: string;
|
|
61
|
+
outputPath: string;
|
|
62
|
+
}): Promise<boolean>;
|
|
63
|
+
//# sourceMappingURL=create.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"create.d.ts","sourceRoot":"","sources":["../../../src/migrations/prisma/create.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAMjE,MAAM,WAAW,sBAAsB;IACrC,qBAAqB;IACrB,IAAI,EAAE,MAAM,CAAC;IACb,mCAAmC;IACnC,UAAU,EAAE,MAAM,CAAC;IACnB,2CAA2C;IAC3C,UAAU,EAAE,MAAM,CAAC;IACnB,0CAA0C;IAC1C,OAAO,EAAE,aAAa,CAAC;IACvB,4BAA4B;IAC5B,YAAY,CAAC,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,EAAE,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IACnD,6BAA6B;IAC7B,aAAa,CAAC,EAAE,KAAK,CAAC;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,EAAE,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CACpE;AAED,MAAM,WAAW,eAAe;IAC9B,6CAA6C;IAC7C,UAAU,EAAE,MAAM,CAAC;IACnB,oCAAoC;IACpC,UAAU,EAAE,MAAM,CAAC;IACnB,kBAAkB;IAClB,GAAG,EAAE,MAAM,CAAC;IACZ,gBAAgB;IAChB,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,6BAA6B;IAC5C,uCAAuC;IACvC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,mCAAmC;IACnC,UAAU,EAAE,MAAM,CAAC;IACnB,2CAA2C;IAC3C,UAAU,EAAE,MAAM,CAAC;IACnB,0CAA0C;IAC1C,OAAO,EAAE,aAAa,CAAC;CACxB;AAED;;GAEG;AACH,wBAAgB,iBAAiB,IAAI,MAAM,CAU1C;AAED;;GAEG;AACH,wBAAsB,qBAAqB,CACzC,OAAO,EAAE,sBAAsB,GAC9B,OAAO,CAAC,eAAe,GAAG,IAAI,CAAC,CAiDjC;AAED;;;GAGG;AACH,wBAAsB,sBAAsB,CAC1C,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,eAAe,CAAC,CAwC1B;AAED;;GAEG;AACH,wBAAsB,sBAAsB,CAAC,OAAO,EAAE;IACpD,UAAU,EAAE,MAAM,CAAC;IACnB,UAAU,EAAE,MAAM,CAAC;CACpB,GAAG,OAAO,CAAC,OAAO,CAAC,CAqBnB"}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import * as fs from "fs/promises";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
import { generateSchemaSnapshot } from "../../schema/snapshot.js";
|
|
4
|
+
import { applyRenameMappings, buildSqlStatements, diffSchemas } from "./diff.js";
|
|
5
|
+
import { appendToMigrationLog, calculateChecksum } from "./log.js";
|
|
6
|
+
import { getSnapshotPaths, readSnapshot, writeSnapshot } from "./snapshot.js";
|
|
7
|
+
/**
|
|
8
|
+
* Generate timestamp string for migration folder name
|
|
9
|
+
*/
|
|
10
|
+
export function generateTimestamp() {
|
|
11
|
+
const now = new Date();
|
|
12
|
+
return [
|
|
13
|
+
now.getFullYear(),
|
|
14
|
+
String(now.getMonth() + 1).padStart(2, "0"),
|
|
15
|
+
String(now.getDate()).padStart(2, "0"),
|
|
16
|
+
String(now.getHours()).padStart(2, "0"),
|
|
17
|
+
String(now.getMinutes()).padStart(2, "0"),
|
|
18
|
+
String(now.getSeconds()).padStart(2, "0"),
|
|
19
|
+
].join("");
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Create a Prisma-compatible migration
|
|
23
|
+
*/
|
|
24
|
+
export async function createPrismaMigration(options) {
|
|
25
|
+
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
26
|
+
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
27
|
+
const previousSnapshot = await readSnapshot(snapshotPath);
|
|
28
|
+
const diff = applyRenameMappings(diffSchemas(previousSnapshot?.schema ?? null, currentSchema), options.renameTables, options.renameColumns);
|
|
29
|
+
const { up } = buildSqlStatements(diff, options.dialect);
|
|
30
|
+
if (up.length === 0) {
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
const timestamp = Date.now();
|
|
34
|
+
const timestampStr = generateTimestamp();
|
|
35
|
+
const safeName = options.name.replace(/[^a-z0-9]/gi, "_").toLowerCase();
|
|
36
|
+
const folderName = `${timestampStr}_${safeName}`;
|
|
37
|
+
const folderPath = path.join(options.outputPath, folderName);
|
|
38
|
+
// Build migration.sql content with comments
|
|
39
|
+
const sqlContent = [
|
|
40
|
+
`-- Migration: ${options.name}`,
|
|
41
|
+
`-- Generated at: ${new Date(timestamp).toISOString()}`,
|
|
42
|
+
"",
|
|
43
|
+
...up,
|
|
44
|
+
"",
|
|
45
|
+
].join("\n");
|
|
46
|
+
// Create migration folder and file
|
|
47
|
+
await fs.mkdir(folderPath, { recursive: true });
|
|
48
|
+
await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
|
|
49
|
+
// Update snapshot
|
|
50
|
+
await writeSnapshot(snapshotPath, currentSchema);
|
|
51
|
+
// Append to migration log
|
|
52
|
+
const checksum = calculateChecksum(sqlContent);
|
|
53
|
+
await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
|
|
54
|
+
return {
|
|
55
|
+
folderName,
|
|
56
|
+
folderPath,
|
|
57
|
+
sql: sqlContent,
|
|
58
|
+
timestamp,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Create an initial migration that creates all tables from scratch.
|
|
63
|
+
* This is used when initializing a project where the database is empty.
|
|
64
|
+
*/
|
|
65
|
+
export async function createInitialMigration(options) {
|
|
66
|
+
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
67
|
+
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
68
|
+
// Diff against empty schema to get full creation SQL
|
|
69
|
+
const diff = diffSchemas(null, currentSchema);
|
|
70
|
+
const { up } = buildSqlStatements(diff, options.dialect);
|
|
71
|
+
const timestamp = Date.now();
|
|
72
|
+
const timestampStr = generateTimestamp();
|
|
73
|
+
const safeName = (options.name ?? "init").replace(/[^a-z0-9]/gi, "_").toLowerCase();
|
|
74
|
+
const folderName = `${timestampStr}_${safeName}`;
|
|
75
|
+
const folderPath = path.join(options.outputPath, folderName);
|
|
76
|
+
// Build migration.sql content with comments
|
|
77
|
+
const sqlContent = [
|
|
78
|
+
`-- Migration: ${options.name ?? "init"}`,
|
|
79
|
+
`-- Generated at: ${new Date(timestamp).toISOString()}`,
|
|
80
|
+
"",
|
|
81
|
+
...up,
|
|
82
|
+
"",
|
|
83
|
+
].join("\n");
|
|
84
|
+
// Create migration folder and file
|
|
85
|
+
await fs.mkdir(folderPath, { recursive: true });
|
|
86
|
+
await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
|
|
87
|
+
// Update snapshot
|
|
88
|
+
await writeSnapshot(snapshotPath, currentSchema);
|
|
89
|
+
// Append to migration log
|
|
90
|
+
const checksum = calculateChecksum(sqlContent);
|
|
91
|
+
await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
|
|
92
|
+
return {
|
|
93
|
+
folderName,
|
|
94
|
+
folderPath,
|
|
95
|
+
sql: sqlContent,
|
|
96
|
+
timestamp,
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Check if there are schema changes
|
|
101
|
+
*/
|
|
102
|
+
export async function hasPrismaSchemaChanges(options) {
|
|
103
|
+
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
104
|
+
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
105
|
+
const previousSnapshot = await readSnapshot(snapshotPath);
|
|
106
|
+
const diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
|
|
107
|
+
return (diff.addedModels.length > 0 ||
|
|
108
|
+
diff.removedModels.length > 0 ||
|
|
109
|
+
diff.addedFields.length > 0 ||
|
|
110
|
+
diff.removedFields.length > 0 ||
|
|
111
|
+
diff.alteredFields.length > 0 ||
|
|
112
|
+
diff.addedUniqueConstraints.length > 0 ||
|
|
113
|
+
diff.removedUniqueConstraints.length > 0 ||
|
|
114
|
+
diff.addedIndexes.length > 0 ||
|
|
115
|
+
diff.removedIndexes.length > 0 ||
|
|
116
|
+
diff.addedForeignKeys.length > 0 ||
|
|
117
|
+
diff.removedForeignKeys.length > 0 ||
|
|
118
|
+
diff.primaryKeyChanges.length > 0);
|
|
119
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import type { KyselyDialect } from "../../sql/kysely-adapter.js";
|
|
2
|
+
import type { SchemaSnapshot, SchemaTable, SchemaColumn } from "../../schema/snapshot.js";
|
|
3
|
+
export declare function diffSchemas(previous: SchemaSnapshot | null, current: SchemaSnapshot): {
|
|
4
|
+
addedModels: SchemaTable[];
|
|
5
|
+
removedModels: SchemaTable[];
|
|
6
|
+
addedFields: {
|
|
7
|
+
tableName: string;
|
|
8
|
+
column: SchemaColumn;
|
|
9
|
+
}[];
|
|
10
|
+
removedFields: {
|
|
11
|
+
tableName: string;
|
|
12
|
+
column: SchemaColumn;
|
|
13
|
+
}[];
|
|
14
|
+
alteredFields: {
|
|
15
|
+
tableName: string;
|
|
16
|
+
columnName: string;
|
|
17
|
+
previous: SchemaColumn;
|
|
18
|
+
current: SchemaColumn;
|
|
19
|
+
}[];
|
|
20
|
+
addedUniqueConstraints: {
|
|
21
|
+
tableName: string;
|
|
22
|
+
constraint: {
|
|
23
|
+
name: string;
|
|
24
|
+
columns: string[];
|
|
25
|
+
};
|
|
26
|
+
}[];
|
|
27
|
+
removedUniqueConstraints: {
|
|
28
|
+
tableName: string;
|
|
29
|
+
constraint: {
|
|
30
|
+
name: string;
|
|
31
|
+
columns: string[];
|
|
32
|
+
};
|
|
33
|
+
}[];
|
|
34
|
+
addedIndexes: {
|
|
35
|
+
tableName: string;
|
|
36
|
+
index: {
|
|
37
|
+
name: string;
|
|
38
|
+
columns: string[];
|
|
39
|
+
};
|
|
40
|
+
}[];
|
|
41
|
+
removedIndexes: {
|
|
42
|
+
tableName: string;
|
|
43
|
+
index: {
|
|
44
|
+
name: string;
|
|
45
|
+
columns: string[];
|
|
46
|
+
};
|
|
47
|
+
}[];
|
|
48
|
+
addedForeignKeys: {
|
|
49
|
+
tableName: string;
|
|
50
|
+
foreignKey: {
|
|
51
|
+
name: string;
|
|
52
|
+
columns: string[];
|
|
53
|
+
referencedTable: string;
|
|
54
|
+
referencedColumns: string[];
|
|
55
|
+
};
|
|
56
|
+
}[];
|
|
57
|
+
removedForeignKeys: {
|
|
58
|
+
tableName: string;
|
|
59
|
+
foreignKey: {
|
|
60
|
+
name: string;
|
|
61
|
+
columns: string[];
|
|
62
|
+
referencedTable: string;
|
|
63
|
+
referencedColumns: string[];
|
|
64
|
+
};
|
|
65
|
+
}[];
|
|
66
|
+
primaryKeyChanges: {
|
|
67
|
+
tableName: string;
|
|
68
|
+
previous?: {
|
|
69
|
+
name: string;
|
|
70
|
+
columns: string[];
|
|
71
|
+
};
|
|
72
|
+
current?: {
|
|
73
|
+
name: string;
|
|
74
|
+
columns: string[];
|
|
75
|
+
};
|
|
76
|
+
}[];
|
|
77
|
+
renamedTables: Array<{
|
|
78
|
+
from: string;
|
|
79
|
+
to: string;
|
|
80
|
+
}>;
|
|
81
|
+
renamedColumns: Array<{
|
|
82
|
+
tableName: string;
|
|
83
|
+
from: string;
|
|
84
|
+
to: string;
|
|
85
|
+
}>;
|
|
86
|
+
};
|
|
87
|
+
type PrismaDiff = ReturnType<typeof diffSchemas>;
|
|
88
|
+
export declare function applyRenameMappings(diff: PrismaDiff, renameTables?: Array<{
|
|
89
|
+
from: string;
|
|
90
|
+
to: string;
|
|
91
|
+
}>, renameColumns?: Array<{
|
|
92
|
+
table: string;
|
|
93
|
+
from: string;
|
|
94
|
+
to: string;
|
|
95
|
+
}>): PrismaDiff;
|
|
96
|
+
/**
|
|
97
|
+
* Build SQL statements from diff
|
|
98
|
+
*/
|
|
99
|
+
export declare function buildSqlStatements(diff: ReturnType<typeof diffSchemas>, dialect: KyselyDialect): {
|
|
100
|
+
up: string[];
|
|
101
|
+
down: string[];
|
|
102
|
+
};
|
|
103
|
+
export {};
|
|
104
|
+
//# sourceMappingURL=diff.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"diff.d.ts","sourceRoot":"","sources":["../../../src/migrations/prisma/diff.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AACjE,OAAO,KAAK,EAAE,cAAc,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAyK1F,wBAAgB,WAAW,CAAC,QAAQ,EAAE,cAAc,GAAG,IAAI,EAAE,OAAO,EAAE,cAAc;;;;mBAsB5C,MAAM;gBAAU,YAAY;;;mBAC1B,MAAM;gBAAU,YAAY;;;mBAEvD,MAAM;oBACL,MAAM;kBACR,YAAY;iBACb,YAAY;;;mBAGV,MAAM;oBACL;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,EAAE,CAAA;SAAE;;;mBAGpC,MAAM;oBACL;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,EAAE,CAAA;SAAE;;;mBAGpC,MAAM;eACV;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,EAAE,CAAA;SAAE;;;mBAG/B,MAAM;eACV;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,EAAE,CAAA;SAAE;;;mBAG/B,MAAM;oBACL;YACV,IAAI,EAAE,MAAM,CAAC;YACb,OAAO,EAAE,MAAM,EAAE,CAAC;YAClB,eAAe,EAAE,MAAM,CAAC;YACxB,iBAAiB,EAAE,MAAM,EAAE,CAAC;SAC7B;;;mBAGU,MAAM;oBACL;YACV,IAAI,EAAE,MAAM,CAAC;YACb,OAAO,EAAE,MAAM,EAAE,CAAC;YAClB,eAAe,EAAE,MAAM,CAAC;YACxB,iBAAiB,EAAE,MAAM,EAAE,CAAC;SAC7B;;;mBAGU,MAAM;mBACN;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,EAAE,CAAA;SAAE;kBACpC;YAAE,IAAI,EAAE,MAAM,CAAC;YAAC,OAAO,EAAE,MAAM,EAAE,CAAA;SAAE;;mBAiCxB,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,EAAE,EAAE,MAAM,CAAA;KAAE,CAAC;oBAClC,KAAK,CAAC;QAAE,SAAS,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,EAAE,EAAE,MAAM,CAAA;KAAE,CAAC;EAE/E;AAED,KAAK,UAAU,GAAG,UAAU,CAAC,OAAO,WAAW,CAAC,CAAC;AAkGjD,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,UAAU,EAChB,YAAY,GAAE,KAAK,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAA;CAAE,CAAM,EACtD,aAAa,GAAE,KAAK,CAAC;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,MAAM,CAAA;CAAE,CAAM,GACrE,UAAU,CA8FZ;AA2CD;;GAEG;AACH,wBAAgB,kBAAkB,CAChC,IAAI,EAAE,UAAU,CAAC,OAAO,WAAW,CAAC,EACpC,OAAO,EAAE,aAAa,GACrB;IAAE,EAAE,EAAE,MAAM,EAAE,CAAC;IAAC,IAAI,EAAE,MAAM,EAAE,CAAA;CAAE,CAkNlC"}
|