zenstack-kit 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/app.d.ts.map +1 -1
- package/dist/cli/app.js +9 -28
- package/dist/cli/commands.d.ts +3 -1
- package/dist/cli/commands.d.ts.map +1 -1
- package/dist/cli/commands.js +17 -1
- package/dist/cli/index.js +0 -0
- package/dist/cli/prompts.d.ts +9 -0
- package/dist/cli/prompts.d.ts.map +1 -1
- package/dist/cli/prompts.js +57 -1
- package/package.json +1 -5
- package/dist/cli.d.ts +0 -12
- package/dist/cli.d.ts.map +0 -1
- package/dist/cli.js +0 -240
- package/dist/config-loader.d.ts +0 -6
- package/dist/config-loader.d.ts.map +0 -1
- package/dist/config-loader.js +0 -36
- package/dist/config.d.ts +0 -62
- package/dist/config.d.ts.map +0 -1
- package/dist/config.js +0 -44
- package/dist/init-prompts.d.ts +0 -13
- package/dist/init-prompts.d.ts.map +0 -1
- package/dist/init-prompts.js +0 -64
- package/dist/introspect.d.ts +0 -54
- package/dist/introspect.d.ts.map +0 -1
- package/dist/introspect.js +0 -75
- package/dist/kysely-adapter.d.ts +0 -49
- package/dist/kysely-adapter.d.ts.map +0 -1
- package/dist/kysely-adapter.js +0 -74
- package/dist/migrate-apply.d.ts +0 -18
- package/dist/migrate-apply.d.ts.map +0 -1
- package/dist/migrate-apply.js +0 -61
- package/dist/migrations.d.ts +0 -161
- package/dist/migrations.d.ts.map +0 -1
- package/dist/migrations.js +0 -620
- package/dist/prisma-migrations.d.ts +0 -160
- package/dist/prisma-migrations.d.ts.map +0 -1
- package/dist/prisma-migrations.js +0 -789
- package/dist/prompts.d.ts +0 -10
- package/dist/prompts.d.ts.map +0 -1
- package/dist/prompts.js +0 -41
- package/dist/pull.d.ts +0 -23
- package/dist/pull.d.ts.map +0 -1
- package/dist/pull.js +0 -424
- package/dist/schema-snapshot.d.ts +0 -45
- package/dist/schema-snapshot.d.ts.map +0 -1
- package/dist/schema-snapshot.js +0 -265
- package/dist/sql-compiler.d.ts +0 -74
- package/dist/sql-compiler.d.ts.map +0 -1
- package/dist/sql-compiler.js +0 -243
|
@@ -1,789 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Prisma-compatible migrations
|
|
3
|
-
*
|
|
4
|
-
* Generates migrations in Prisma format:
|
|
5
|
-
* - Folder structure: migrations/<timestamp>_<name>/migration.sql
|
|
6
|
-
* - Tracks migrations in _prisma_migrations table
|
|
7
|
-
* - Compatible with `prisma migrate deploy`
|
|
8
|
-
*/
|
|
9
|
-
import * as fs from "fs/promises";
|
|
10
|
-
import * as path from "path";
|
|
11
|
-
import * as crypto from "crypto";
|
|
12
|
-
import { sql } from "kysely";
|
|
13
|
-
import { createKyselyAdapter } from "./kysely-adapter.js";
|
|
14
|
-
import { generateSchemaSnapshot, createSnapshot } from "./schema-snapshot.js";
|
|
15
|
-
import { compileCreateTable, compileDropTable, compileAddColumn, compileDropColumn, compileRenameTable, compileRenameColumn, compileCreateIndex, compileDropIndex, compileAddUniqueConstraint, compileDropConstraint, compileAddForeignKeyConstraint, compileAddPrimaryKeyConstraint, compileAlterColumn, } from "./sql-compiler.js";
|
|
16
|
-
/**
|
|
17
|
-
* Generate timestamp string for migration folder name
|
|
18
|
-
*/
|
|
19
|
-
export function generateTimestamp() {
|
|
20
|
-
const now = new Date();
|
|
21
|
-
return [
|
|
22
|
-
now.getFullYear(),
|
|
23
|
-
String(now.getMonth() + 1).padStart(2, "0"),
|
|
24
|
-
String(now.getDate()).padStart(2, "0"),
|
|
25
|
-
String(now.getHours()).padStart(2, "0"),
|
|
26
|
-
String(now.getMinutes()).padStart(2, "0"),
|
|
27
|
-
String(now.getSeconds()).padStart(2, "0"),
|
|
28
|
-
].join("");
|
|
29
|
-
}
|
|
30
|
-
/**
|
|
31
|
-
* Get paths for snapshot file
|
|
32
|
-
*/
|
|
33
|
-
function getSnapshotPaths(outputPath) {
|
|
34
|
-
const metaDir = path.join(outputPath, "meta");
|
|
35
|
-
return {
|
|
36
|
-
metaDir,
|
|
37
|
-
snapshotPath: path.join(metaDir, "_snapshot.json"),
|
|
38
|
-
};
|
|
39
|
-
}
|
|
40
|
-
/**
|
|
41
|
-
* Read existing snapshot
|
|
42
|
-
*/
|
|
43
|
-
async function readSnapshot(snapshotPath) {
|
|
44
|
-
try {
|
|
45
|
-
const content = await fs.readFile(snapshotPath, "utf-8");
|
|
46
|
-
const snapshot = JSON.parse(content);
|
|
47
|
-
if (!snapshot || snapshot.version !== 2 || !snapshot.schema) {
|
|
48
|
-
throw new Error("Snapshot format is invalid");
|
|
49
|
-
}
|
|
50
|
-
return snapshot;
|
|
51
|
-
}
|
|
52
|
-
catch (error) {
|
|
53
|
-
if (error instanceof Error && "code" in error && error.code === "ENOENT") {
|
|
54
|
-
return null;
|
|
55
|
-
}
|
|
56
|
-
throw error;
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
/**
|
|
60
|
-
* Write snapshot to file
|
|
61
|
-
*/
|
|
62
|
-
export async function writeSnapshot(snapshotPath, schema) {
|
|
63
|
-
const snapshot = createSnapshot(schema);
|
|
64
|
-
await fs.mkdir(path.dirname(snapshotPath), { recursive: true });
|
|
65
|
-
await fs.writeFile(snapshotPath, JSON.stringify(snapshot, null, 2), "utf-8");
|
|
66
|
-
}
|
|
67
|
-
/**
|
|
68
|
-
* Diff two schemas and return the changes
|
|
69
|
-
*/
|
|
70
|
-
function diffSchemas(previous, current) {
|
|
71
|
-
const previousModels = new Map();
|
|
72
|
-
const currentModels = new Map();
|
|
73
|
-
previous?.tables.forEach((model) => previousModels.set(model.name, model));
|
|
74
|
-
current.tables.forEach((model) => currentModels.set(model.name, model));
|
|
75
|
-
const addedModels = [];
|
|
76
|
-
const removedModels = [];
|
|
77
|
-
for (const [tableName, model] of currentModels.entries()) {
|
|
78
|
-
if (!previousModels.has(tableName)) {
|
|
79
|
-
addedModels.push(model);
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
for (const [tableName, model] of previousModels.entries()) {
|
|
83
|
-
if (!currentModels.has(tableName)) {
|
|
84
|
-
removedModels.push(model);
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
// Field-level changes for existing tables
|
|
88
|
-
const addedFields = [];
|
|
89
|
-
const removedFields = [];
|
|
90
|
-
const alteredFields = [];
|
|
91
|
-
// Constraint changes
|
|
92
|
-
const addedUniqueConstraints = [];
|
|
93
|
-
const removedUniqueConstraints = [];
|
|
94
|
-
const addedIndexes = [];
|
|
95
|
-
const removedIndexes = [];
|
|
96
|
-
const addedForeignKeys = [];
|
|
97
|
-
const removedForeignKeys = [];
|
|
98
|
-
const primaryKeyChanges = [];
|
|
99
|
-
for (const [tableName, currentModel] of currentModels.entries()) {
|
|
100
|
-
const previousModel = previousModels.get(tableName);
|
|
101
|
-
if (!previousModel)
|
|
102
|
-
continue;
|
|
103
|
-
// Field changes
|
|
104
|
-
const previousFields = new Map(previousModel.columns.map((f) => [f.name, f]));
|
|
105
|
-
const currentFields = new Map(currentModel.columns.map((f) => [f.name, f]));
|
|
106
|
-
for (const [columnName, column] of currentFields.entries()) {
|
|
107
|
-
if (!previousFields.has(columnName)) {
|
|
108
|
-
addedFields.push({ tableName, column });
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
for (const [columnName, column] of previousFields.entries()) {
|
|
112
|
-
if (!currentFields.has(columnName)) {
|
|
113
|
-
removedFields.push({ tableName, column });
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
for (const [columnName, currentColumn] of currentFields.entries()) {
|
|
117
|
-
const previousColumn = previousFields.get(columnName);
|
|
118
|
-
if (!previousColumn)
|
|
119
|
-
continue;
|
|
120
|
-
if (previousColumn.type !== currentColumn.type ||
|
|
121
|
-
previousColumn.notNull !== currentColumn.notNull ||
|
|
122
|
-
previousColumn.default !== currentColumn.default) {
|
|
123
|
-
alteredFields.push({
|
|
124
|
-
tableName,
|
|
125
|
-
columnName,
|
|
126
|
-
previous: previousColumn,
|
|
127
|
-
current: currentColumn,
|
|
128
|
-
});
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
// Unique constraint changes
|
|
132
|
-
const prevUnique = new Map(previousModel.uniqueConstraints.map((c) => [c.name, c]));
|
|
133
|
-
const currUnique = new Map(currentModel.uniqueConstraints.map((c) => [c.name, c]));
|
|
134
|
-
for (const [name, constraint] of currUnique.entries()) {
|
|
135
|
-
if (!prevUnique.has(name)) {
|
|
136
|
-
addedUniqueConstraints.push({ tableName, constraint });
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
for (const [name, constraint] of prevUnique.entries()) {
|
|
140
|
-
if (!currUnique.has(name)) {
|
|
141
|
-
removedUniqueConstraints.push({ tableName, constraint });
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
// Index changes
|
|
145
|
-
const prevIndexes = new Map(previousModel.indexes.map((i) => [i.name, i]));
|
|
146
|
-
const currIndexes = new Map(currentModel.indexes.map((i) => [i.name, i]));
|
|
147
|
-
for (const [name, index] of currIndexes.entries()) {
|
|
148
|
-
if (!prevIndexes.has(name)) {
|
|
149
|
-
addedIndexes.push({ tableName, index });
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
for (const [name, index] of prevIndexes.entries()) {
|
|
153
|
-
if (!currIndexes.has(name)) {
|
|
154
|
-
removedIndexes.push({ tableName, index });
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
// Foreign key changes
|
|
158
|
-
const prevFks = new Map(previousModel.foreignKeys.map((f) => [f.name, f]));
|
|
159
|
-
const currFks = new Map(currentModel.foreignKeys.map((f) => [f.name, f]));
|
|
160
|
-
for (const [name, fk] of currFks.entries()) {
|
|
161
|
-
if (!prevFks.has(name)) {
|
|
162
|
-
addedForeignKeys.push({ tableName, foreignKey: fk });
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
for (const [name, fk] of prevFks.entries()) {
|
|
166
|
-
if (!currFks.has(name)) {
|
|
167
|
-
removedForeignKeys.push({ tableName, foreignKey: fk });
|
|
168
|
-
}
|
|
169
|
-
}
|
|
170
|
-
// Primary key changes
|
|
171
|
-
const prevPk = previousModel.primaryKey;
|
|
172
|
-
const currPk = currentModel.primaryKey;
|
|
173
|
-
const pkEqual = (prevPk?.name ?? "") === (currPk?.name ?? "") &&
|
|
174
|
-
JSON.stringify(prevPk?.columns ?? []) === JSON.stringify(currPk?.columns ?? []);
|
|
175
|
-
if (!pkEqual) {
|
|
176
|
-
primaryKeyChanges.push({
|
|
177
|
-
tableName,
|
|
178
|
-
previous: prevPk,
|
|
179
|
-
current: currPk,
|
|
180
|
-
});
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
return {
|
|
184
|
-
addedModels,
|
|
185
|
-
removedModels,
|
|
186
|
-
addedFields,
|
|
187
|
-
removedFields,
|
|
188
|
-
alteredFields,
|
|
189
|
-
addedUniqueConstraints,
|
|
190
|
-
removedUniqueConstraints,
|
|
191
|
-
addedIndexes,
|
|
192
|
-
removedIndexes,
|
|
193
|
-
addedForeignKeys,
|
|
194
|
-
removedForeignKeys,
|
|
195
|
-
primaryKeyChanges,
|
|
196
|
-
renamedTables: [],
|
|
197
|
-
renamedColumns: [],
|
|
198
|
-
};
|
|
199
|
-
}
|
|
200
|
-
/**
|
|
201
|
-
* Build SQL statements from diff
|
|
202
|
-
*/
|
|
203
|
-
function buildSqlStatements(diff, dialect) {
|
|
204
|
-
const up = [];
|
|
205
|
-
const down = [];
|
|
206
|
-
const compileOpts = { dialect };
|
|
207
|
-
// Table renames
|
|
208
|
-
for (const rename of diff.renamedTables) {
|
|
209
|
-
up.push(compileRenameTable(rename.from, rename.to, compileOpts));
|
|
210
|
-
down.unshift(compileRenameTable(rename.to, rename.from, compileOpts));
|
|
211
|
-
}
|
|
212
|
-
// Column renames
|
|
213
|
-
for (const rename of diff.renamedColumns) {
|
|
214
|
-
up.push(compileRenameColumn(rename.tableName, rename.from, rename.to, compileOpts));
|
|
215
|
-
down.unshift(compileRenameColumn(rename.tableName, rename.to, rename.from, compileOpts));
|
|
216
|
-
}
|
|
217
|
-
// Create tables
|
|
218
|
-
for (const model of diff.addedModels) {
|
|
219
|
-
up.push(compileCreateTable(model, compileOpts));
|
|
220
|
-
down.unshift(compileDropTable(model.name, compileOpts));
|
|
221
|
-
}
|
|
222
|
-
// Drop tables
|
|
223
|
-
for (const model of diff.removedModels) {
|
|
224
|
-
up.push(compileDropTable(model.name, compileOpts));
|
|
225
|
-
down.unshift(compileCreateTable(model, compileOpts));
|
|
226
|
-
}
|
|
227
|
-
// Primary key changes (drop old first)
|
|
228
|
-
for (const change of diff.primaryKeyChanges) {
|
|
229
|
-
if (change.previous) {
|
|
230
|
-
up.push(compileDropConstraint(change.tableName, change.previous.name, compileOpts));
|
|
231
|
-
down.unshift(compileAddPrimaryKeyConstraint(change.tableName, change.previous.name, change.previous.columns, compileOpts));
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
// Drop foreign keys first (before dropping columns)
|
|
235
|
-
for (const { tableName, foreignKey } of diff.removedForeignKeys) {
|
|
236
|
-
up.push(compileDropConstraint(tableName, foreignKey.name, compileOpts));
|
|
237
|
-
down.unshift(compileAddForeignKeyConstraint(tableName, foreignKey.name, foreignKey.columns, foreignKey.referencedTable, foreignKey.referencedColumns, compileOpts));
|
|
238
|
-
}
|
|
239
|
-
// Drop unique constraints
|
|
240
|
-
for (const { tableName, constraint } of diff.removedUniqueConstraints) {
|
|
241
|
-
up.push(compileDropConstraint(tableName, constraint.name, compileOpts));
|
|
242
|
-
down.unshift(compileAddUniqueConstraint(tableName, constraint.name, constraint.columns, compileOpts));
|
|
243
|
-
}
|
|
244
|
-
// Drop indexes
|
|
245
|
-
for (const { tableName, index } of diff.removedIndexes) {
|
|
246
|
-
up.push(compileDropIndex(index.name, compileOpts));
|
|
247
|
-
down.unshift(compileCreateIndex(tableName, index.name, index.columns, compileOpts));
|
|
248
|
-
}
|
|
249
|
-
// Add columns
|
|
250
|
-
for (const { tableName, column } of diff.addedFields) {
|
|
251
|
-
up.push(compileAddColumn(tableName, column, compileOpts));
|
|
252
|
-
down.unshift(compileDropColumn(tableName, column.name, compileOpts));
|
|
253
|
-
}
|
|
254
|
-
// Drop columns
|
|
255
|
-
for (const { tableName, column } of diff.removedFields) {
|
|
256
|
-
up.push(compileDropColumn(tableName, column.name, compileOpts));
|
|
257
|
-
down.unshift(compileAddColumn(tableName, column, compileOpts));
|
|
258
|
-
}
|
|
259
|
-
// Alter columns
|
|
260
|
-
for (const change of diff.alteredFields) {
|
|
261
|
-
const typeChanged = change.previous.type !== change.current.type;
|
|
262
|
-
const nullChanged = change.previous.notNull !== change.current.notNull;
|
|
263
|
-
const defaultChanged = change.previous.default !== change.current.default;
|
|
264
|
-
if (typeChanged) {
|
|
265
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { setType: change.current.type }, compileOpts));
|
|
266
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setType: change.previous.type }, compileOpts));
|
|
267
|
-
}
|
|
268
|
-
if (nullChanged) {
|
|
269
|
-
if (change.current.notNull) {
|
|
270
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { setNotNull: true }, compileOpts));
|
|
271
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { dropNotNull: true }, compileOpts));
|
|
272
|
-
}
|
|
273
|
-
else {
|
|
274
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { dropNotNull: true }, compileOpts));
|
|
275
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setNotNull: true }, compileOpts));
|
|
276
|
-
}
|
|
277
|
-
}
|
|
278
|
-
if (defaultChanged) {
|
|
279
|
-
if (change.current.default !== undefined) {
|
|
280
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { setDefault: change.current.default }, compileOpts));
|
|
281
|
-
}
|
|
282
|
-
else {
|
|
283
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { dropDefault: true }, compileOpts));
|
|
284
|
-
}
|
|
285
|
-
if (change.previous.default !== undefined) {
|
|
286
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setDefault: change.previous.default }, compileOpts));
|
|
287
|
-
}
|
|
288
|
-
else {
|
|
289
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { dropDefault: true }, compileOpts));
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
}
|
|
293
|
-
// Primary key changes (add new)
|
|
294
|
-
for (const change of diff.primaryKeyChanges) {
|
|
295
|
-
if (change.current) {
|
|
296
|
-
up.push(compileAddPrimaryKeyConstraint(change.tableName, change.current.name, change.current.columns, compileOpts));
|
|
297
|
-
down.unshift(compileDropConstraint(change.tableName, change.current.name, compileOpts));
|
|
298
|
-
}
|
|
299
|
-
}
|
|
300
|
-
// Add unique constraints
|
|
301
|
-
for (const { tableName, constraint } of diff.addedUniqueConstraints) {
|
|
302
|
-
up.push(compileAddUniqueConstraint(tableName, constraint.name, constraint.columns, compileOpts));
|
|
303
|
-
down.unshift(compileDropConstraint(tableName, constraint.name, compileOpts));
|
|
304
|
-
}
|
|
305
|
-
// Add indexes
|
|
306
|
-
for (const { tableName, index } of diff.addedIndexes) {
|
|
307
|
-
up.push(compileCreateIndex(tableName, index.name, index.columns, compileOpts));
|
|
308
|
-
down.unshift(compileDropIndex(index.name, compileOpts));
|
|
309
|
-
}
|
|
310
|
-
// Add foreign keys
|
|
311
|
-
for (const { tableName, foreignKey } of diff.addedForeignKeys) {
|
|
312
|
-
up.push(compileAddForeignKeyConstraint(tableName, foreignKey.name, foreignKey.columns, foreignKey.referencedTable, foreignKey.referencedColumns, compileOpts));
|
|
313
|
-
down.unshift(compileDropConstraint(tableName, foreignKey.name, compileOpts));
|
|
314
|
-
}
|
|
315
|
-
return { up, down };
|
|
316
|
-
}
|
|
317
|
-
/**
|
|
318
|
-
* Create a Prisma-compatible migration
|
|
319
|
-
*/
|
|
320
|
-
export async function createPrismaMigration(options) {
|
|
321
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
322
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
323
|
-
const previousSnapshot = await readSnapshot(snapshotPath);
|
|
324
|
-
let diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
|
|
325
|
-
// Apply rename mappings
|
|
326
|
-
if (options.renameTables?.length || options.renameColumns?.length) {
|
|
327
|
-
// Handle table renames
|
|
328
|
-
for (const mapping of options.renameTables ?? []) {
|
|
329
|
-
const removedIdx = diff.removedModels.findIndex((m) => m.name === mapping.from);
|
|
330
|
-
const addedIdx = diff.addedModels.findIndex((m) => m.name === mapping.to);
|
|
331
|
-
if (removedIdx !== -1 && addedIdx !== -1) {
|
|
332
|
-
diff.removedModels.splice(removedIdx, 1);
|
|
333
|
-
diff.addedModels.splice(addedIdx, 1);
|
|
334
|
-
diff.renamedTables.push(mapping);
|
|
335
|
-
}
|
|
336
|
-
}
|
|
337
|
-
// Handle column renames
|
|
338
|
-
for (const mapping of options.renameColumns ?? []) {
|
|
339
|
-
const removedIdx = diff.removedFields.findIndex((f) => f.tableName === mapping.table && f.column.name === mapping.from);
|
|
340
|
-
const addedIdx = diff.addedFields.findIndex((f) => f.tableName === mapping.table && f.column.name === mapping.to);
|
|
341
|
-
if (removedIdx !== -1 && addedIdx !== -1) {
|
|
342
|
-
diff.removedFields.splice(removedIdx, 1);
|
|
343
|
-
diff.addedFields.splice(addedIdx, 1);
|
|
344
|
-
diff.renamedColumns.push({ tableName: mapping.table, from: mapping.from, to: mapping.to });
|
|
345
|
-
}
|
|
346
|
-
}
|
|
347
|
-
}
|
|
348
|
-
const { up, down } = buildSqlStatements(diff, options.dialect);
|
|
349
|
-
if (up.length === 0) {
|
|
350
|
-
return null;
|
|
351
|
-
}
|
|
352
|
-
const timestamp = Date.now();
|
|
353
|
-
const timestampStr = generateTimestamp();
|
|
354
|
-
const safeName = options.name.replace(/[^a-z0-9]/gi, "_").toLowerCase();
|
|
355
|
-
const folderName = `${timestampStr}_${safeName}`;
|
|
356
|
-
const folderPath = path.join(options.outputPath, folderName);
|
|
357
|
-
// Build migration.sql content with comments
|
|
358
|
-
const sqlContent = [
|
|
359
|
-
`-- Migration: ${options.name}`,
|
|
360
|
-
`-- Generated at: ${new Date(timestamp).toISOString()}`,
|
|
361
|
-
"",
|
|
362
|
-
...up,
|
|
363
|
-
"",
|
|
364
|
-
].join("\n");
|
|
365
|
-
// Create migration folder and file
|
|
366
|
-
await fs.mkdir(folderPath, { recursive: true });
|
|
367
|
-
await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
|
|
368
|
-
// Update snapshot
|
|
369
|
-
await writeSnapshot(snapshotPath, currentSchema);
|
|
370
|
-
// Append to migration log
|
|
371
|
-
const checksum = calculateChecksum(sqlContent);
|
|
372
|
-
await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
|
|
373
|
-
return {
|
|
374
|
-
folderName,
|
|
375
|
-
folderPath,
|
|
376
|
-
sql: sqlContent,
|
|
377
|
-
timestamp,
|
|
378
|
-
};
|
|
379
|
-
}
|
|
380
|
-
/**
|
|
381
|
-
* Create an initial migration that creates all tables from scratch.
|
|
382
|
-
* This is used when initializing a project where the database is empty.
|
|
383
|
-
*/
|
|
384
|
-
export async function createInitialMigration(options) {
|
|
385
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
386
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
387
|
-
// Diff against empty schema to get full creation SQL
|
|
388
|
-
const diff = diffSchemas(null, currentSchema);
|
|
389
|
-
const { up } = buildSqlStatements(diff, options.dialect);
|
|
390
|
-
const timestamp = Date.now();
|
|
391
|
-
const timestampStr = generateTimestamp();
|
|
392
|
-
const safeName = (options.name ?? "init").replace(/[^a-z0-9]/gi, "_").toLowerCase();
|
|
393
|
-
const folderName = `${timestampStr}_${safeName}`;
|
|
394
|
-
const folderPath = path.join(options.outputPath, folderName);
|
|
395
|
-
// Build migration.sql content with comments
|
|
396
|
-
const sqlContent = [
|
|
397
|
-
`-- Migration: ${options.name ?? "init"}`,
|
|
398
|
-
`-- Generated at: ${new Date(timestamp).toISOString()}`,
|
|
399
|
-
"",
|
|
400
|
-
...up,
|
|
401
|
-
"",
|
|
402
|
-
].join("\n");
|
|
403
|
-
// Create migration folder and file
|
|
404
|
-
await fs.mkdir(folderPath, { recursive: true });
|
|
405
|
-
await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
|
|
406
|
-
// Update snapshot
|
|
407
|
-
await writeSnapshot(snapshotPath, currentSchema);
|
|
408
|
-
// Append to migration log
|
|
409
|
-
const checksum = calculateChecksum(sqlContent);
|
|
410
|
-
await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
|
|
411
|
-
return {
|
|
412
|
-
folderName,
|
|
413
|
-
folderPath,
|
|
414
|
-
sql: sqlContent,
|
|
415
|
-
timestamp,
|
|
416
|
-
};
|
|
417
|
-
}
|
|
418
|
-
/**
|
|
419
|
-
* Ensure _prisma_migrations table exists
|
|
420
|
-
*/
|
|
421
|
-
async function ensureMigrationsTable(db, tableName, schema, dialect) {
|
|
422
|
-
const fullTableName = schema && dialect === "postgres" ? `${schema}.${tableName}` : tableName;
|
|
423
|
-
if (dialect === "sqlite") {
|
|
424
|
-
await sql `
|
|
425
|
-
CREATE TABLE IF NOT EXISTS ${sql.raw(`"${tableName}"`)} (
|
|
426
|
-
id TEXT PRIMARY KEY,
|
|
427
|
-
checksum TEXT NOT NULL,
|
|
428
|
-
finished_at TEXT,
|
|
429
|
-
migration_name TEXT NOT NULL,
|
|
430
|
-
logs TEXT,
|
|
431
|
-
rolled_back_at TEXT,
|
|
432
|
-
started_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
433
|
-
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
434
|
-
)
|
|
435
|
-
`.execute(db);
|
|
436
|
-
}
|
|
437
|
-
else if (dialect === "postgres") {
|
|
438
|
-
await sql `
|
|
439
|
-
CREATE TABLE IF NOT EXISTS ${sql.raw(`"${schema}"."${tableName}"`)} (
|
|
440
|
-
id VARCHAR(36) PRIMARY KEY,
|
|
441
|
-
checksum VARCHAR(64) NOT NULL,
|
|
442
|
-
finished_at TIMESTAMPTZ,
|
|
443
|
-
migration_name VARCHAR(255) NOT NULL,
|
|
444
|
-
logs TEXT,
|
|
445
|
-
rolled_back_at TIMESTAMPTZ,
|
|
446
|
-
started_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
447
|
-
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
448
|
-
)
|
|
449
|
-
`.execute(db);
|
|
450
|
-
}
|
|
451
|
-
else {
|
|
452
|
-
await sql `
|
|
453
|
-
CREATE TABLE IF NOT EXISTS ${sql.raw(`\`${tableName}\``)} (
|
|
454
|
-
id VARCHAR(36) PRIMARY KEY,
|
|
455
|
-
checksum VARCHAR(64) NOT NULL,
|
|
456
|
-
finished_at DATETIME,
|
|
457
|
-
migration_name VARCHAR(255) NOT NULL,
|
|
458
|
-
logs TEXT,
|
|
459
|
-
rolled_back_at DATETIME,
|
|
460
|
-
started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
461
|
-
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
462
|
-
)
|
|
463
|
-
`.execute(db);
|
|
464
|
-
}
|
|
465
|
-
}
|
|
466
|
-
/**
|
|
467
|
-
* Get list of applied migrations from _prisma_migrations table
|
|
468
|
-
*/
|
|
469
|
-
async function getAppliedMigrations(db, tableName, schema, dialect) {
|
|
470
|
-
let result;
|
|
471
|
-
if (dialect === "postgres" && schema) {
|
|
472
|
-
result = await sql `
|
|
473
|
-
SELECT * FROM ${sql.raw(`"${schema}"."${tableName}"`)}
|
|
474
|
-
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
475
|
-
`.execute(db);
|
|
476
|
-
}
|
|
477
|
-
else if (dialect === "sqlite") {
|
|
478
|
-
result = await sql `
|
|
479
|
-
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
|
480
|
-
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
481
|
-
`.execute(db);
|
|
482
|
-
}
|
|
483
|
-
else {
|
|
484
|
-
result = await sql `
|
|
485
|
-
SELECT * FROM ${sql.raw(`\`${tableName}\``)}
|
|
486
|
-
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
487
|
-
`.execute(db);
|
|
488
|
-
}
|
|
489
|
-
return new Map(result.rows.map((row) => [row.migration_name, row]));
|
|
490
|
-
}
|
|
491
|
-
/**
|
|
492
|
-
* Record a migration in _prisma_migrations table
|
|
493
|
-
*/
|
|
494
|
-
async function recordMigration(db, tableName, schema, dialect, migrationName, checksum) {
|
|
495
|
-
const id = crypto.randomUUID();
|
|
496
|
-
if (dialect === "postgres" && schema) {
|
|
497
|
-
await sql `
|
|
498
|
-
INSERT INTO ${sql.raw(`"${schema}"."${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
499
|
-
VALUES (${id}, ${checksum}, ${migrationName}, now(), 1)
|
|
500
|
-
`.execute(db);
|
|
501
|
-
}
|
|
502
|
-
else if (dialect === "sqlite") {
|
|
503
|
-
await sql `
|
|
504
|
-
INSERT INTO ${sql.raw(`"${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
505
|
-
VALUES (${id}, ${checksum}, ${migrationName}, datetime('now'), 1)
|
|
506
|
-
`.execute(db);
|
|
507
|
-
}
|
|
508
|
-
else {
|
|
509
|
-
await sql `
|
|
510
|
-
INSERT INTO ${sql.raw(`\`${tableName}\``)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
511
|
-
VALUES (${id}, ${checksum}, ${migrationName}, NOW(), 1)
|
|
512
|
-
`.execute(db);
|
|
513
|
-
}
|
|
514
|
-
}
|
|
515
|
-
/**
|
|
516
|
-
* Calculate SHA256 checksum of migration SQL
|
|
517
|
-
*/
|
|
518
|
-
export function calculateChecksum(sql) {
|
|
519
|
-
return crypto.createHash("sha256").update(sql).digest("hex");
|
|
520
|
-
}
|
|
521
|
-
/**
|
|
522
|
-
* Execute raw SQL using the database driver directly
|
|
523
|
-
* This bypasses Kysely for DDL statements which don't work reliably with sql.raw()
|
|
524
|
-
*/
|
|
525
|
-
async function executeRawSql(dialect, sqlContent, options) {
|
|
526
|
-
if (dialect === "sqlite") {
|
|
527
|
-
const { default: Database } = await import("better-sqlite3");
|
|
528
|
-
const sqliteDb = new Database(options.databasePath || ":memory:");
|
|
529
|
-
try {
|
|
530
|
-
// better-sqlite3's exec() handles multiple statements properly
|
|
531
|
-
sqliteDb.exec(sqlContent);
|
|
532
|
-
}
|
|
533
|
-
finally {
|
|
534
|
-
sqliteDb.close();
|
|
535
|
-
}
|
|
536
|
-
}
|
|
537
|
-
else if (dialect === "postgres") {
|
|
538
|
-
const { Pool } = await import("pg");
|
|
539
|
-
const pool = new Pool({ connectionString: options.connectionUrl });
|
|
540
|
-
try {
|
|
541
|
-
await pool.query(sqlContent);
|
|
542
|
-
}
|
|
543
|
-
finally {
|
|
544
|
-
await pool.end();
|
|
545
|
-
}
|
|
546
|
-
}
|
|
547
|
-
else if (dialect === "mysql") {
|
|
548
|
-
// Use mysql2 with promise wrapper
|
|
549
|
-
const mysql = await import("mysql2");
|
|
550
|
-
const pool = mysql.createPool({ uri: options.connectionUrl });
|
|
551
|
-
const promisePool = pool.promise();
|
|
552
|
-
try {
|
|
553
|
-
// MySQL needs statements executed one at a time
|
|
554
|
-
const statements = sqlContent
|
|
555
|
-
.split(/;(?:\s*\n|\s*$)/)
|
|
556
|
-
.map((s) => s.trim())
|
|
557
|
-
.filter((s) => s.length > 0 && !s.startsWith("--"));
|
|
558
|
-
for (const statement of statements) {
|
|
559
|
-
await promisePool.query(statement);
|
|
560
|
-
}
|
|
561
|
-
}
|
|
562
|
-
finally {
|
|
563
|
-
await pool.promise().end();
|
|
564
|
-
}
|
|
565
|
-
}
|
|
566
|
-
}
|
|
567
|
-
/**
|
|
568
|
-
* Apply pending Prisma migrations
|
|
569
|
-
*/
|
|
570
|
-
export async function applyPrismaMigrations(options) {
|
|
571
|
-
const migrationsTable = options.migrationsTable ?? "_prisma_migrations";
|
|
572
|
-
const migrationsSchema = options.migrationsSchema ?? "public";
|
|
573
|
-
const { db, destroy } = await createKyselyAdapter({
|
|
574
|
-
dialect: options.dialect,
|
|
575
|
-
connectionUrl: options.connectionUrl,
|
|
576
|
-
databasePath: options.databasePath,
|
|
577
|
-
});
|
|
578
|
-
try {
|
|
579
|
-
// Ensure migrations table exists
|
|
580
|
-
await ensureMigrationsTable(db, migrationsTable, migrationsSchema, options.dialect);
|
|
581
|
-
// Get already applied migrations
|
|
582
|
-
const appliedMigrations = await getAppliedMigrations(db, migrationsTable, migrationsSchema, options.dialect);
|
|
583
|
-
// Read migration folders
|
|
584
|
-
const entries = await fs.readdir(options.migrationsFolder, { withFileTypes: true });
|
|
585
|
-
const migrationFolders = entries
|
|
586
|
-
.filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
|
|
587
|
-
.map((e) => e.name)
|
|
588
|
-
.sort();
|
|
589
|
-
const result = {
|
|
590
|
-
applied: [],
|
|
591
|
-
alreadyApplied: [],
|
|
592
|
-
};
|
|
593
|
-
for (const folderName of migrationFolders) {
|
|
594
|
-
if (appliedMigrations.has(folderName)) {
|
|
595
|
-
result.alreadyApplied.push(folderName);
|
|
596
|
-
continue;
|
|
597
|
-
}
|
|
598
|
-
const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
|
|
599
|
-
let sqlContent;
|
|
600
|
-
try {
|
|
601
|
-
sqlContent = await fs.readFile(sqlPath, "utf-8");
|
|
602
|
-
}
|
|
603
|
-
catch {
|
|
604
|
-
continue; // Skip if no migration.sql
|
|
605
|
-
}
|
|
606
|
-
const checksum = calculateChecksum(sqlContent);
|
|
607
|
-
// Verify checksum against migration log
|
|
608
|
-
const migrationLog = await readMigrationLog(options.migrationsFolder);
|
|
609
|
-
const logEntry = migrationLog.find((m) => m.name === folderName);
|
|
610
|
-
if (logEntry && logEntry.checksum !== checksum) {
|
|
611
|
-
result.failed = {
|
|
612
|
-
migrationName: folderName,
|
|
613
|
-
error: `Checksum mismatch for migration ${folderName}.\n` +
|
|
614
|
-
`Expected: ${logEntry.checksum}\n` +
|
|
615
|
-
`Found: ${checksum}\n` +
|
|
616
|
-
`The migration file may have been modified after generation.`,
|
|
617
|
-
};
|
|
618
|
-
break;
|
|
619
|
-
}
|
|
620
|
-
const startTime = Date.now();
|
|
621
|
-
try {
|
|
622
|
-
// Execute the migration SQL using direct driver access
|
|
623
|
-
await executeRawSql(options.dialect, sqlContent, {
|
|
624
|
-
connectionUrl: options.connectionUrl,
|
|
625
|
-
databasePath: options.databasePath,
|
|
626
|
-
});
|
|
627
|
-
// Record the migration (still use Kysely for this since it's simple INSERT)
|
|
628
|
-
await recordMigration(db, migrationsTable, migrationsSchema, options.dialect, folderName, checksum);
|
|
629
|
-
result.applied.push({
|
|
630
|
-
migrationName: folderName,
|
|
631
|
-
duration: Date.now() - startTime,
|
|
632
|
-
});
|
|
633
|
-
}
|
|
634
|
-
catch (error) {
|
|
635
|
-
result.failed = {
|
|
636
|
-
migrationName: folderName,
|
|
637
|
-
error: error instanceof Error ? error.message : String(error),
|
|
638
|
-
};
|
|
639
|
-
break; // Stop on first failure
|
|
640
|
-
}
|
|
641
|
-
}
|
|
642
|
-
return result;
|
|
643
|
-
}
|
|
644
|
-
finally {
|
|
645
|
-
await destroy();
|
|
646
|
-
}
|
|
647
|
-
}
|
|
648
|
-
/**
|
|
649
|
-
* Check if there are schema changes
|
|
650
|
-
*/
|
|
651
|
-
export async function hasPrismaSchemaChanges(options) {
|
|
652
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
653
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
654
|
-
const previousSnapshot = await readSnapshot(snapshotPath);
|
|
655
|
-
const diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
|
|
656
|
-
return (diff.addedModels.length > 0 ||
|
|
657
|
-
diff.removedModels.length > 0 ||
|
|
658
|
-
diff.addedFields.length > 0 ||
|
|
659
|
-
diff.removedFields.length > 0 ||
|
|
660
|
-
diff.alteredFields.length > 0 ||
|
|
661
|
-
diff.addedUniqueConstraints.length > 0 ||
|
|
662
|
-
diff.removedUniqueConstraints.length > 0 ||
|
|
663
|
-
diff.addedIndexes.length > 0 ||
|
|
664
|
-
diff.removedIndexes.length > 0 ||
|
|
665
|
-
diff.addedForeignKeys.length > 0 ||
|
|
666
|
-
diff.removedForeignKeys.length > 0 ||
|
|
667
|
-
diff.primaryKeyChanges.length > 0);
|
|
668
|
-
}
|
|
669
|
-
const MIGRATION_LOG_HEADER = `# zenstack-kit migration log
|
|
670
|
-
# Format: <migration_name> <checksum>
|
|
671
|
-
`;
|
|
672
|
-
/**
|
|
673
|
-
* Get the path to the migration log file
|
|
674
|
-
*/
|
|
675
|
-
export function getMigrationLogPath(outputPath) {
|
|
676
|
-
return path.join(outputPath, "meta", "_migration_log");
|
|
677
|
-
}
|
|
678
|
-
/**
|
|
679
|
-
* Parse migration log content into entries
|
|
680
|
-
*/
|
|
681
|
-
function parseMigrationLog(content) {
|
|
682
|
-
return content
|
|
683
|
-
.split("\n")
|
|
684
|
-
.filter((line) => line.trim() && !line.startsWith("#"))
|
|
685
|
-
.map((line) => {
|
|
686
|
-
const [name, checksum] = line.split(" ");
|
|
687
|
-
return { name, checksum };
|
|
688
|
-
})
|
|
689
|
-
.filter((entry) => entry.name && entry.checksum);
|
|
690
|
-
}
|
|
691
|
-
/**
|
|
692
|
-
* Serialize migration log entries to string
|
|
693
|
-
*/
|
|
694
|
-
function serializeMigrationLog(entries) {
|
|
695
|
-
const lines = entries.map((e) => `${e.name} ${e.checksum}`).join("\n");
|
|
696
|
-
return MIGRATION_LOG_HEADER + lines + (lines.length > 0 ? "\n" : "");
|
|
697
|
-
}
|
|
698
|
-
/**
|
|
699
|
-
* Read migration log file
|
|
700
|
-
*/
|
|
701
|
-
export async function readMigrationLog(outputPath) {
|
|
702
|
-
const logPath = getMigrationLogPath(outputPath);
|
|
703
|
-
try {
|
|
704
|
-
const content = await fs.readFile(logPath, "utf-8");
|
|
705
|
-
return parseMigrationLog(content);
|
|
706
|
-
}
|
|
707
|
-
catch (error) {
|
|
708
|
-
if (error instanceof Error && "code" in error && error.code === "ENOENT") {
|
|
709
|
-
return [];
|
|
710
|
-
}
|
|
711
|
-
throw error;
|
|
712
|
-
}
|
|
713
|
-
}
|
|
714
|
-
/**
|
|
715
|
-
* Write migration log file
|
|
716
|
-
*/
|
|
717
|
-
export async function writeMigrationLog(outputPath, entries) {
|
|
718
|
-
const logPath = getMigrationLogPath(outputPath);
|
|
719
|
-
await fs.mkdir(path.dirname(logPath), { recursive: true });
|
|
720
|
-
await fs.writeFile(logPath, serializeMigrationLog(entries), "utf-8");
|
|
721
|
-
}
|
|
722
|
-
/**
|
|
723
|
-
* Append a single entry to the migration log
|
|
724
|
-
*/
|
|
725
|
-
export async function appendToMigrationLog(outputPath, entry) {
|
|
726
|
-
const entries = await readMigrationLog(outputPath);
|
|
727
|
-
entries.push(entry);
|
|
728
|
-
await writeMigrationLog(outputPath, entries);
|
|
729
|
-
}
|
|
730
|
-
/**
|
|
731
|
-
* Scan migration folders and compute checksums for each
|
|
732
|
-
*/
|
|
733
|
-
export async function scanMigrationFolders(outputPath) {
|
|
734
|
-
const entries = [];
|
|
735
|
-
try {
|
|
736
|
-
const dirEntries = await fs.readdir(outputPath, { withFileTypes: true });
|
|
737
|
-
const migrationFolders = dirEntries
|
|
738
|
-
.filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
|
|
739
|
-
.map((e) => e.name)
|
|
740
|
-
.sort();
|
|
741
|
-
for (const folderName of migrationFolders) {
|
|
742
|
-
const sqlPath = path.join(outputPath, folderName, "migration.sql");
|
|
743
|
-
try {
|
|
744
|
-
const sqlContent = await fs.readFile(sqlPath, "utf-8");
|
|
745
|
-
const checksum = calculateChecksum(sqlContent);
|
|
746
|
-
entries.push({ name: folderName, checksum });
|
|
747
|
-
}
|
|
748
|
-
catch {
|
|
749
|
-
// Skip folders without migration.sql
|
|
750
|
-
}
|
|
751
|
-
}
|
|
752
|
-
}
|
|
753
|
-
catch (error) {
|
|
754
|
-
if (error instanceof Error && "code" in error && error.code === "ENOENT") {
|
|
755
|
-
return [];
|
|
756
|
-
}
|
|
757
|
-
throw error;
|
|
758
|
-
}
|
|
759
|
-
return entries;
|
|
760
|
-
}
|
|
761
|
-
/**
|
|
762
|
-
* Check if snapshot exists
|
|
763
|
-
*/
|
|
764
|
-
export async function hasSnapshot(outputPath) {
|
|
765
|
-
const { snapshotPath } = getSnapshotPaths(outputPath);
|
|
766
|
-
try {
|
|
767
|
-
await fs.access(snapshotPath);
|
|
768
|
-
return true;
|
|
769
|
-
}
|
|
770
|
-
catch {
|
|
771
|
-
return false;
|
|
772
|
-
}
|
|
773
|
-
}
|
|
774
|
-
/**
|
|
775
|
-
* Initialize snapshot from schema without generating migration
|
|
776
|
-
*/
|
|
777
|
-
export async function initializeSnapshot(options) {
|
|
778
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
779
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
780
|
-
await writeSnapshot(snapshotPath, currentSchema);
|
|
781
|
-
return {
|
|
782
|
-
snapshotPath,
|
|
783
|
-
tableCount: currentSchema.tables.length,
|
|
784
|
-
};
|
|
785
|
-
}
|
|
786
|
-
/**
|
|
787
|
-
* Export getSnapshotPaths for external use
|
|
788
|
-
*/
|
|
789
|
-
export { getSnapshotPaths };
|