zenstack-kit 0.1.4 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -6
- package/dist/cli/app.d.ts.map +1 -1
- package/dist/cli/app.js +7 -1
- package/dist/cli/commands.d.ts +2 -0
- package/dist/cli/commands.d.ts.map +1 -1
- package/dist/cli/commands.js +97 -6
- package/dist/cli/prompts.d.ts.map +1 -1
- package/dist/cli/prompts.js +1 -3
- package/dist/config/loader.d.ts +1 -1
- package/dist/config/loader.d.ts.map +1 -1
- package/dist/config/loader.js +11 -9
- package/dist/migrations/prisma/apply.d.ts +54 -0
- package/dist/migrations/prisma/apply.d.ts.map +1 -0
- package/dist/migrations/prisma/apply.js +384 -0
- package/dist/migrations/prisma/create.d.ts +63 -0
- package/dist/migrations/prisma/create.d.ts.map +1 -0
- package/dist/migrations/prisma/create.js +119 -0
- package/dist/migrations/prisma/diff.d.ts +104 -0
- package/dist/migrations/prisma/diff.d.ts.map +1 -0
- package/dist/migrations/prisma/diff.js +442 -0
- package/dist/migrations/prisma/log.d.ts +31 -0
- package/dist/migrations/prisma/log.d.ts.map +1 -0
- package/dist/migrations/prisma/log.js +101 -0
- package/dist/migrations/prisma/rename.d.ts +23 -0
- package/dist/migrations/prisma/rename.d.ts.map +1 -0
- package/dist/migrations/prisma/rename.js +57 -0
- package/dist/migrations/prisma/snapshot.d.ts +32 -0
- package/dist/migrations/prisma/snapshot.d.ts.map +1 -0
- package/dist/migrations/prisma/snapshot.js +65 -0
- package/dist/migrations/prisma.d.ts +5 -202
- package/dist/migrations/prisma.d.ts.map +1 -1
- package/dist/migrations/prisma.js +5 -1168
- package/dist/schema/pull.d.ts +2 -0
- package/dist/schema/pull.d.ts.map +1 -1
- package/dist/schema/pull.js +102 -4
- package/package.json +1 -1
|
@@ -1,1168 +1,5 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
* - Tracks migrations in _prisma_migrations table
|
|
7
|
-
* - Compatible with `prisma migrate deploy`
|
|
8
|
-
*/
|
|
9
|
-
import * as fs from "fs/promises";
|
|
10
|
-
import * as path from "path";
|
|
11
|
-
import * as crypto from "crypto";
|
|
12
|
-
import { sql } from "kysely";
|
|
13
|
-
import { createKyselyAdapter } from "../sql/kysely-adapter.js";
|
|
14
|
-
import { generateSchemaSnapshot, createSnapshot } from "../schema/snapshot.js";
|
|
15
|
-
import { compileCreateTable, compileDropTable, compileAddColumn, compileDropColumn, compileRenameTable, compileRenameColumn, compileCreateIndex, compileDropIndex, compileAddUniqueConstraint, compileDropConstraint, compileAddForeignKeyConstraint, compileAddPrimaryKeyConstraint, compileAlterColumn, } from "../sql/compiler.js";
|
|
16
|
-
/**
|
|
17
|
-
* Generate timestamp string for migration folder name
|
|
18
|
-
*/
|
|
19
|
-
export function generateTimestamp() {
|
|
20
|
-
const now = new Date();
|
|
21
|
-
return [
|
|
22
|
-
now.getFullYear(),
|
|
23
|
-
String(now.getMonth() + 1).padStart(2, "0"),
|
|
24
|
-
String(now.getDate()).padStart(2, "0"),
|
|
25
|
-
String(now.getHours()).padStart(2, "0"),
|
|
26
|
-
String(now.getMinutes()).padStart(2, "0"),
|
|
27
|
-
String(now.getSeconds()).padStart(2, "0"),
|
|
28
|
-
].join("");
|
|
29
|
-
}
|
|
30
|
-
/**
|
|
31
|
-
* Get paths for snapshot file
|
|
32
|
-
*/
|
|
33
|
-
function getSnapshotPaths(outputPath) {
|
|
34
|
-
const metaDir = path.join(outputPath, "meta");
|
|
35
|
-
return {
|
|
36
|
-
metaDir,
|
|
37
|
-
snapshotPath: path.join(metaDir, "_snapshot.json"),
|
|
38
|
-
};
|
|
39
|
-
}
|
|
40
|
-
/**
|
|
41
|
-
* Read existing snapshot
|
|
42
|
-
*/
|
|
43
|
-
async function readSnapshot(snapshotPath) {
|
|
44
|
-
try {
|
|
45
|
-
const content = await fs.readFile(snapshotPath, "utf-8");
|
|
46
|
-
const snapshot = JSON.parse(content);
|
|
47
|
-
if (!snapshot || snapshot.version !== 2 || !snapshot.schema) {
|
|
48
|
-
throw new Error("Snapshot format is invalid");
|
|
49
|
-
}
|
|
50
|
-
return snapshot;
|
|
51
|
-
}
|
|
52
|
-
catch (error) {
|
|
53
|
-
if (error instanceof Error && "code" in error && error.code === "ENOENT") {
|
|
54
|
-
return null;
|
|
55
|
-
}
|
|
56
|
-
throw error;
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
/**
|
|
60
|
-
* Write snapshot to file
|
|
61
|
-
*/
|
|
62
|
-
export async function writeSnapshot(snapshotPath, schema) {
|
|
63
|
-
const snapshot = createSnapshot(schema);
|
|
64
|
-
await fs.mkdir(path.dirname(snapshotPath), { recursive: true });
|
|
65
|
-
await fs.writeFile(snapshotPath, JSON.stringify(snapshot, null, 2), "utf-8");
|
|
66
|
-
}
|
|
67
|
-
/**
|
|
68
|
-
* Diff two schemas and return the changes
|
|
69
|
-
*/
|
|
70
|
-
function diffTableChanges(previousModel, currentModel, tableName) {
|
|
71
|
-
const addedFields = [];
|
|
72
|
-
const removedFields = [];
|
|
73
|
-
const alteredFields = [];
|
|
74
|
-
const addedUniqueConstraints = [];
|
|
75
|
-
const removedUniqueConstraints = [];
|
|
76
|
-
const addedIndexes = [];
|
|
77
|
-
const removedIndexes = [];
|
|
78
|
-
const addedForeignKeys = [];
|
|
79
|
-
const removedForeignKeys = [];
|
|
80
|
-
const primaryKeyChanges = [];
|
|
81
|
-
const previousFields = new Map(previousModel.columns.map((f) => [f.name, f]));
|
|
82
|
-
const currentFields = new Map(currentModel.columns.map((f) => [f.name, f]));
|
|
83
|
-
for (const [columnName, column] of currentFields.entries()) {
|
|
84
|
-
if (!previousFields.has(columnName)) {
|
|
85
|
-
addedFields.push({ tableName, column });
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
for (const [columnName, column] of previousFields.entries()) {
|
|
89
|
-
if (!currentFields.has(columnName)) {
|
|
90
|
-
removedFields.push({ tableName, column });
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
for (const [columnName, currentColumn] of currentFields.entries()) {
|
|
94
|
-
const previousColumn = previousFields.get(columnName);
|
|
95
|
-
if (!previousColumn)
|
|
96
|
-
continue;
|
|
97
|
-
if (previousColumn.type !== currentColumn.type ||
|
|
98
|
-
previousColumn.notNull !== currentColumn.notNull ||
|
|
99
|
-
previousColumn.default !== currentColumn.default) {
|
|
100
|
-
alteredFields.push({
|
|
101
|
-
tableName,
|
|
102
|
-
columnName,
|
|
103
|
-
previous: previousColumn,
|
|
104
|
-
current: currentColumn,
|
|
105
|
-
});
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
const prevUnique = new Map(previousModel.uniqueConstraints.map((c) => [c.name, c]));
|
|
109
|
-
const currUnique = new Map(currentModel.uniqueConstraints.map((c) => [c.name, c]));
|
|
110
|
-
for (const [name, constraint] of currUnique.entries()) {
|
|
111
|
-
if (!prevUnique.has(name)) {
|
|
112
|
-
addedUniqueConstraints.push({ tableName, constraint });
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
for (const [name, constraint] of prevUnique.entries()) {
|
|
116
|
-
if (!currUnique.has(name)) {
|
|
117
|
-
removedUniqueConstraints.push({ tableName, constraint });
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
const prevIndexes = new Map(previousModel.indexes.map((i) => [i.name, i]));
|
|
121
|
-
const currIndexes = new Map(currentModel.indexes.map((i) => [i.name, i]));
|
|
122
|
-
for (const [name, index] of currIndexes.entries()) {
|
|
123
|
-
if (!prevIndexes.has(name)) {
|
|
124
|
-
addedIndexes.push({ tableName, index });
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
for (const [name, index] of prevIndexes.entries()) {
|
|
128
|
-
if (!currIndexes.has(name)) {
|
|
129
|
-
removedIndexes.push({ tableName, index });
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
const prevFks = new Map(previousModel.foreignKeys.map((f) => [f.name, f]));
|
|
133
|
-
const currFks = new Map(currentModel.foreignKeys.map((f) => [f.name, f]));
|
|
134
|
-
for (const [name, fk] of currFks.entries()) {
|
|
135
|
-
if (!prevFks.has(name)) {
|
|
136
|
-
addedForeignKeys.push({ tableName, foreignKey: fk });
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
for (const [name, fk] of prevFks.entries()) {
|
|
140
|
-
if (!currFks.has(name)) {
|
|
141
|
-
removedForeignKeys.push({ tableName, foreignKey: fk });
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
const prevPk = previousModel.primaryKey;
|
|
145
|
-
const currPk = currentModel.primaryKey;
|
|
146
|
-
const pkEqual = (prevPk?.name ?? "") === (currPk?.name ?? "") &&
|
|
147
|
-
JSON.stringify(prevPk?.columns ?? []) === JSON.stringify(currPk?.columns ?? []);
|
|
148
|
-
if (!pkEqual) {
|
|
149
|
-
primaryKeyChanges.push({
|
|
150
|
-
tableName,
|
|
151
|
-
previous: prevPk,
|
|
152
|
-
current: currPk,
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
return {
|
|
156
|
-
addedFields,
|
|
157
|
-
removedFields,
|
|
158
|
-
alteredFields,
|
|
159
|
-
addedUniqueConstraints,
|
|
160
|
-
removedUniqueConstraints,
|
|
161
|
-
addedIndexes,
|
|
162
|
-
removedIndexes,
|
|
163
|
-
addedForeignKeys,
|
|
164
|
-
removedForeignKeys,
|
|
165
|
-
primaryKeyChanges,
|
|
166
|
-
};
|
|
167
|
-
}
|
|
168
|
-
function diffSchemas(previous, current) {
|
|
169
|
-
const previousModels = new Map();
|
|
170
|
-
const currentModels = new Map();
|
|
171
|
-
previous?.tables.forEach((model) => previousModels.set(model.name, model));
|
|
172
|
-
current.tables.forEach((model) => currentModels.set(model.name, model));
|
|
173
|
-
const addedModels = [];
|
|
174
|
-
const removedModels = [];
|
|
175
|
-
for (const [tableName, model] of currentModels.entries()) {
|
|
176
|
-
if (!previousModels.has(tableName)) {
|
|
177
|
-
addedModels.push(model);
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
for (const [tableName, model] of previousModels.entries()) {
|
|
181
|
-
if (!currentModels.has(tableName)) {
|
|
182
|
-
removedModels.push(model);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
// Field-level changes for existing tables
|
|
186
|
-
const addedFields = [];
|
|
187
|
-
const removedFields = [];
|
|
188
|
-
const alteredFields = [];
|
|
189
|
-
// Constraint changes
|
|
190
|
-
const addedUniqueConstraints = [];
|
|
191
|
-
const removedUniqueConstraints = [];
|
|
192
|
-
const addedIndexes = [];
|
|
193
|
-
const removedIndexes = [];
|
|
194
|
-
const addedForeignKeys = [];
|
|
195
|
-
const removedForeignKeys = [];
|
|
196
|
-
const primaryKeyChanges = [];
|
|
197
|
-
for (const [tableName, currentModel] of currentModels.entries()) {
|
|
198
|
-
const previousModel = previousModels.get(tableName);
|
|
199
|
-
if (!previousModel)
|
|
200
|
-
continue;
|
|
201
|
-
const modelDiff = diffTableChanges(previousModel, currentModel, tableName);
|
|
202
|
-
addedFields.push(...modelDiff.addedFields);
|
|
203
|
-
removedFields.push(...modelDiff.removedFields);
|
|
204
|
-
alteredFields.push(...modelDiff.alteredFields);
|
|
205
|
-
addedUniqueConstraints.push(...modelDiff.addedUniqueConstraints);
|
|
206
|
-
removedUniqueConstraints.push(...modelDiff.removedUniqueConstraints);
|
|
207
|
-
addedIndexes.push(...modelDiff.addedIndexes);
|
|
208
|
-
removedIndexes.push(...modelDiff.removedIndexes);
|
|
209
|
-
addedForeignKeys.push(...modelDiff.addedForeignKeys);
|
|
210
|
-
removedForeignKeys.push(...modelDiff.removedForeignKeys);
|
|
211
|
-
primaryKeyChanges.push(...modelDiff.primaryKeyChanges);
|
|
212
|
-
}
|
|
213
|
-
return {
|
|
214
|
-
addedModels,
|
|
215
|
-
removedModels,
|
|
216
|
-
addedFields,
|
|
217
|
-
removedFields,
|
|
218
|
-
alteredFields,
|
|
219
|
-
addedUniqueConstraints,
|
|
220
|
-
removedUniqueConstraints,
|
|
221
|
-
addedIndexes,
|
|
222
|
-
removedIndexes,
|
|
223
|
-
addedForeignKeys,
|
|
224
|
-
removedForeignKeys,
|
|
225
|
-
primaryKeyChanges,
|
|
226
|
-
renamedTables: [],
|
|
227
|
-
renamedColumns: [],
|
|
228
|
-
};
|
|
229
|
-
}
|
|
230
|
-
function columnsSignature(columns) {
|
|
231
|
-
return columns.join("|");
|
|
232
|
-
}
|
|
233
|
-
function consumeSignature(map, signature) {
|
|
234
|
-
const count = map.get(signature) ?? 0;
|
|
235
|
-
if (count > 0) {
|
|
236
|
-
map.set(signature, count - 1);
|
|
237
|
-
return true;
|
|
238
|
-
}
|
|
239
|
-
return false;
|
|
240
|
-
}
|
|
241
|
-
function buildSignatureCount(items, getSignature) {
|
|
242
|
-
const counts = new Map();
|
|
243
|
-
for (const item of items) {
|
|
244
|
-
const signature = getSignature(item);
|
|
245
|
-
counts.set(signature, (counts.get(signature) ?? 0) + 1);
|
|
246
|
-
}
|
|
247
|
-
return counts;
|
|
248
|
-
}
|
|
249
|
-
function columnsEqual(a, b) {
|
|
250
|
-
return JSON.stringify(a ?? []) === JSON.stringify(b ?? []);
|
|
251
|
-
}
|
|
252
|
-
function filterRenamedConstraintChanges(previousModel, currentModel, modelDiff) {
|
|
253
|
-
const prevUnique = buildSignatureCount(previousModel.uniqueConstraints, (c) => columnsSignature(c.columns));
|
|
254
|
-
const currUnique = buildSignatureCount(currentModel.uniqueConstraints, (c) => columnsSignature(c.columns));
|
|
255
|
-
const prevIndexes = buildSignatureCount(previousModel.indexes, (i) => columnsSignature(i.columns));
|
|
256
|
-
const currIndexes = buildSignatureCount(currentModel.indexes, (i) => columnsSignature(i.columns));
|
|
257
|
-
const prevFks = buildSignatureCount(previousModel.foreignKeys, (f) => `${columnsSignature(f.columns)}->${f.referencedTable}:${columnsSignature(f.referencedColumns)}`);
|
|
258
|
-
const currFks = buildSignatureCount(currentModel.foreignKeys, (f) => `${columnsSignature(f.columns)}->${f.referencedTable}:${columnsSignature(f.referencedColumns)}`);
|
|
259
|
-
const addedUniqueConstraints = modelDiff.addedUniqueConstraints.filter(({ constraint }) => !consumeSignature(prevUnique, columnsSignature(constraint.columns)));
|
|
260
|
-
const removedUniqueConstraints = modelDiff.removedUniqueConstraints.filter(({ constraint }) => !consumeSignature(currUnique, columnsSignature(constraint.columns)));
|
|
261
|
-
const addedIndexes = modelDiff.addedIndexes.filter(({ index }) => !consumeSignature(prevIndexes, columnsSignature(index.columns)));
|
|
262
|
-
const removedIndexes = modelDiff.removedIndexes.filter(({ index }) => !consumeSignature(currIndexes, columnsSignature(index.columns)));
|
|
263
|
-
const addedForeignKeys = modelDiff.addedForeignKeys.filter(({ foreignKey }) => !consumeSignature(prevFks, `${columnsSignature(foreignKey.columns)}->${foreignKey.referencedTable}:${columnsSignature(foreignKey.referencedColumns)}`));
|
|
264
|
-
const removedForeignKeys = modelDiff.removedForeignKeys.filter(({ foreignKey }) => !consumeSignature(currFks, `${columnsSignature(foreignKey.columns)}->${foreignKey.referencedTable}:${columnsSignature(foreignKey.referencedColumns)}`));
|
|
265
|
-
let primaryKeyChanges = modelDiff.primaryKeyChanges;
|
|
266
|
-
if (previousModel.primaryKey && currentModel.primaryKey) {
|
|
267
|
-
if (columnsEqual(previousModel.primaryKey.columns, currentModel.primaryKey.columns)) {
|
|
268
|
-
primaryKeyChanges = [];
|
|
269
|
-
}
|
|
270
|
-
}
|
|
271
|
-
return {
|
|
272
|
-
...modelDiff,
|
|
273
|
-
addedUniqueConstraints,
|
|
274
|
-
removedUniqueConstraints,
|
|
275
|
-
addedIndexes,
|
|
276
|
-
removedIndexes,
|
|
277
|
-
addedForeignKeys,
|
|
278
|
-
removedForeignKeys,
|
|
279
|
-
primaryKeyChanges,
|
|
280
|
-
};
|
|
281
|
-
}
|
|
282
|
-
function applyRenameMappings(diff, renameTables = [], renameColumns = []) {
|
|
283
|
-
const removedModels = [...diff.removedModels];
|
|
284
|
-
const addedModels = [...diff.addedModels];
|
|
285
|
-
const removedFields = [...diff.removedFields];
|
|
286
|
-
const addedFields = [...diff.addedFields];
|
|
287
|
-
const alteredFields = [...diff.alteredFields];
|
|
288
|
-
const addedUniqueConstraints = [...diff.addedUniqueConstraints];
|
|
289
|
-
const removedUniqueConstraints = [...diff.removedUniqueConstraints];
|
|
290
|
-
const addedIndexes = [...diff.addedIndexes];
|
|
291
|
-
const removedIndexes = [...diff.removedIndexes];
|
|
292
|
-
const addedForeignKeys = [...diff.addedForeignKeys];
|
|
293
|
-
const removedForeignKeys = [...diff.removedForeignKeys];
|
|
294
|
-
const primaryKeyChanges = [...diff.primaryKeyChanges];
|
|
295
|
-
const renamedTables = [];
|
|
296
|
-
const renamedColumns = [];
|
|
297
|
-
const renamedTableMap = new Map();
|
|
298
|
-
renameTables.forEach((mapping) => {
|
|
299
|
-
const fromIndex = removedModels.findIndex((model) => model.name === mapping.from);
|
|
300
|
-
const toIndex = addedModels.findIndex((model) => model.name === mapping.to);
|
|
301
|
-
if (fromIndex === -1 || toIndex === -1) {
|
|
302
|
-
return;
|
|
303
|
-
}
|
|
304
|
-
const previousModel = removedModels[fromIndex];
|
|
305
|
-
const currentModel = addedModels[toIndex];
|
|
306
|
-
removedModels.splice(fromIndex, 1);
|
|
307
|
-
addedModels.splice(toIndex, 1);
|
|
308
|
-
renamedTables.push({ from: mapping.from, to: mapping.to });
|
|
309
|
-
renamedTableMap.set(mapping.from, mapping.to);
|
|
310
|
-
const modelDiff = filterRenamedConstraintChanges(previousModel, currentModel, diffTableChanges(previousModel, currentModel, mapping.to));
|
|
311
|
-
addedFields.push(...modelDiff.addedFields);
|
|
312
|
-
removedFields.push(...modelDiff.removedFields);
|
|
313
|
-
alteredFields.push(...modelDiff.alteredFields);
|
|
314
|
-
addedUniqueConstraints.push(...modelDiff.addedUniqueConstraints);
|
|
315
|
-
removedUniqueConstraints.push(...modelDiff.removedUniqueConstraints);
|
|
316
|
-
addedIndexes.push(...modelDiff.addedIndexes);
|
|
317
|
-
removedIndexes.push(...modelDiff.removedIndexes);
|
|
318
|
-
addedForeignKeys.push(...modelDiff.addedForeignKeys);
|
|
319
|
-
removedForeignKeys.push(...modelDiff.removedForeignKeys);
|
|
320
|
-
primaryKeyChanges.push(...modelDiff.primaryKeyChanges);
|
|
321
|
-
});
|
|
322
|
-
const remapTableName = (tableName) => renamedTableMap.get(tableName) ?? tableName;
|
|
323
|
-
const remapTableEntries = (items) => items.map((item) => ({ ...item, tableName: remapTableName(item.tableName) }));
|
|
324
|
-
if (renamedTableMap.size > 0) {
|
|
325
|
-
removedFields.forEach((entry) => {
|
|
326
|
-
const mapped = renamedTableMap.get(entry.tableName);
|
|
327
|
-
if (mapped) {
|
|
328
|
-
entry.tableName = mapped;
|
|
329
|
-
}
|
|
330
|
-
});
|
|
331
|
-
}
|
|
332
|
-
renameColumns.forEach((mapping) => {
|
|
333
|
-
const mappedTable = remapTableName(mapping.table);
|
|
334
|
-
const removedIdx = removedFields.findIndex((f) => f.tableName === mappedTable && f.column.name === mapping.from);
|
|
335
|
-
const addedIdx = addedFields.findIndex((f) => f.tableName === mappedTable && f.column.name === mapping.to);
|
|
336
|
-
if (removedIdx !== -1 && addedIdx !== -1) {
|
|
337
|
-
removedFields.splice(removedIdx, 1);
|
|
338
|
-
addedFields.splice(addedIdx, 1);
|
|
339
|
-
renamedColumns.push({ tableName: mappedTable, from: mapping.from, to: mapping.to });
|
|
340
|
-
}
|
|
341
|
-
});
|
|
342
|
-
return {
|
|
343
|
-
...diff,
|
|
344
|
-
removedModels,
|
|
345
|
-
addedModels,
|
|
346
|
-
removedFields: remapTableEntries(removedFields),
|
|
347
|
-
addedFields: remapTableEntries(addedFields),
|
|
348
|
-
alteredFields: remapTableEntries(alteredFields),
|
|
349
|
-
renamedTables,
|
|
350
|
-
renamedColumns,
|
|
351
|
-
addedUniqueConstraints: remapTableEntries(addedUniqueConstraints),
|
|
352
|
-
removedUniqueConstraints: remapTableEntries(removedUniqueConstraints),
|
|
353
|
-
addedIndexes: remapTableEntries(addedIndexes),
|
|
354
|
-
removedIndexes: remapTableEntries(removedIndexes),
|
|
355
|
-
addedForeignKeys: remapTableEntries(addedForeignKeys),
|
|
356
|
-
removedForeignKeys: remapTableEntries(removedForeignKeys),
|
|
357
|
-
primaryKeyChanges: remapTableEntries(primaryKeyChanges),
|
|
358
|
-
};
|
|
359
|
-
}
|
|
360
|
-
/**
|
|
361
|
-
* Topologically sort tables so that referenced tables come before tables that reference them.
|
|
362
|
-
* Tables with no foreign keys come first, then tables that only reference already-ordered tables.
|
|
363
|
-
*/
|
|
364
|
-
function sortTablesByDependencies(tables) {
|
|
365
|
-
const tableMap = new Map(tables.map((t) => [t.name, t]));
|
|
366
|
-
const sorted = [];
|
|
367
|
-
const visited = new Set();
|
|
368
|
-
const visiting = new Set();
|
|
369
|
-
function visit(tableName) {
|
|
370
|
-
if (visited.has(tableName))
|
|
371
|
-
return;
|
|
372
|
-
if (visiting.has(tableName)) {
|
|
373
|
-
// Circular dependency - just add it and let the DB handle it
|
|
374
|
-
return;
|
|
375
|
-
}
|
|
376
|
-
const table = tableMap.get(tableName);
|
|
377
|
-
if (!table)
|
|
378
|
-
return;
|
|
379
|
-
visiting.add(tableName);
|
|
380
|
-
// Visit all tables this table references first
|
|
381
|
-
for (const fk of table.foreignKeys) {
|
|
382
|
-
if (tableMap.has(fk.referencedTable) && fk.referencedTable !== tableName) {
|
|
383
|
-
visit(fk.referencedTable);
|
|
384
|
-
}
|
|
385
|
-
}
|
|
386
|
-
visiting.delete(tableName);
|
|
387
|
-
visited.add(tableName);
|
|
388
|
-
sorted.push(table);
|
|
389
|
-
}
|
|
390
|
-
for (const table of tables) {
|
|
391
|
-
visit(table.name);
|
|
392
|
-
}
|
|
393
|
-
return sorted;
|
|
394
|
-
}
|
|
395
|
-
/**
|
|
396
|
-
* Build SQL statements from diff
|
|
397
|
-
*/
|
|
398
|
-
function buildSqlStatements(diff, dialect) {
|
|
399
|
-
const up = [];
|
|
400
|
-
const down = [];
|
|
401
|
-
const compileOpts = { dialect };
|
|
402
|
-
// Table renames
|
|
403
|
-
for (const rename of diff.renamedTables) {
|
|
404
|
-
up.push(compileRenameTable(rename.from, rename.to, compileOpts));
|
|
405
|
-
down.unshift(compileRenameTable(rename.to, rename.from, compileOpts));
|
|
406
|
-
}
|
|
407
|
-
// Column renames
|
|
408
|
-
for (const rename of diff.renamedColumns) {
|
|
409
|
-
up.push(compileRenameColumn(rename.tableName, rename.from, rename.to, compileOpts));
|
|
410
|
-
down.unshift(compileRenameColumn(rename.tableName, rename.to, rename.from, compileOpts));
|
|
411
|
-
}
|
|
412
|
-
// Create tables (sorted by dependency order so referenced tables are created first)
|
|
413
|
-
const sortedAddedModels = sortTablesByDependencies(diff.addedModels);
|
|
414
|
-
for (const model of sortedAddedModels) {
|
|
415
|
-
up.push(compileCreateTable(model, compileOpts));
|
|
416
|
-
down.unshift(compileDropTable(model.name, compileOpts));
|
|
417
|
-
}
|
|
418
|
-
// Drop tables
|
|
419
|
-
for (const model of diff.removedModels) {
|
|
420
|
-
up.push(compileDropTable(model.name, compileOpts));
|
|
421
|
-
down.unshift(compileCreateTable(model, compileOpts));
|
|
422
|
-
}
|
|
423
|
-
// Primary key changes (drop old first)
|
|
424
|
-
for (const change of diff.primaryKeyChanges) {
|
|
425
|
-
if (change.previous) {
|
|
426
|
-
up.push(compileDropConstraint(change.tableName, change.previous.name, compileOpts));
|
|
427
|
-
down.unshift(compileAddPrimaryKeyConstraint(change.tableName, change.previous.name, change.previous.columns, compileOpts));
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
// Drop foreign keys first (before dropping columns)
|
|
431
|
-
for (const { tableName, foreignKey } of diff.removedForeignKeys) {
|
|
432
|
-
up.push(compileDropConstraint(tableName, foreignKey.name, compileOpts));
|
|
433
|
-
down.unshift(compileAddForeignKeyConstraint(tableName, foreignKey.name, foreignKey.columns, foreignKey.referencedTable, foreignKey.referencedColumns, compileOpts));
|
|
434
|
-
}
|
|
435
|
-
// Drop unique constraints
|
|
436
|
-
for (const { tableName, constraint } of diff.removedUniqueConstraints) {
|
|
437
|
-
up.push(compileDropConstraint(tableName, constraint.name, compileOpts));
|
|
438
|
-
down.unshift(compileAddUniqueConstraint(tableName, constraint.name, constraint.columns, compileOpts));
|
|
439
|
-
}
|
|
440
|
-
// Drop indexes
|
|
441
|
-
for (const { tableName, index } of diff.removedIndexes) {
|
|
442
|
-
up.push(compileDropIndex(index.name, compileOpts));
|
|
443
|
-
down.unshift(compileCreateIndex(tableName, index.name, index.columns, compileOpts));
|
|
444
|
-
}
|
|
445
|
-
// Add columns
|
|
446
|
-
for (const { tableName, column } of diff.addedFields) {
|
|
447
|
-
up.push(compileAddColumn(tableName, column, compileOpts));
|
|
448
|
-
down.unshift(compileDropColumn(tableName, column.name, compileOpts));
|
|
449
|
-
}
|
|
450
|
-
// Drop columns
|
|
451
|
-
for (const { tableName, column } of diff.removedFields) {
|
|
452
|
-
up.push(compileDropColumn(tableName, column.name, compileOpts));
|
|
453
|
-
down.unshift(compileAddColumn(tableName, column, compileOpts));
|
|
454
|
-
}
|
|
455
|
-
// Alter columns
|
|
456
|
-
for (const change of diff.alteredFields) {
|
|
457
|
-
const typeChanged = change.previous.type !== change.current.type;
|
|
458
|
-
const nullChanged = change.previous.notNull !== change.current.notNull;
|
|
459
|
-
const defaultChanged = change.previous.default !== change.current.default;
|
|
460
|
-
if (typeChanged) {
|
|
461
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { setType: change.current.type }, compileOpts));
|
|
462
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setType: change.previous.type }, compileOpts));
|
|
463
|
-
}
|
|
464
|
-
if (nullChanged) {
|
|
465
|
-
if (change.current.notNull) {
|
|
466
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { setNotNull: true }, compileOpts));
|
|
467
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { dropNotNull: true }, compileOpts));
|
|
468
|
-
}
|
|
469
|
-
else {
|
|
470
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { dropNotNull: true }, compileOpts));
|
|
471
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setNotNull: true }, compileOpts));
|
|
472
|
-
}
|
|
473
|
-
}
|
|
474
|
-
if (defaultChanged) {
|
|
475
|
-
if (change.current.default !== undefined) {
|
|
476
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { setDefault: change.current.default }, compileOpts));
|
|
477
|
-
}
|
|
478
|
-
else {
|
|
479
|
-
up.push(...compileAlterColumn(change.tableName, change.columnName, { dropDefault: true }, compileOpts));
|
|
480
|
-
}
|
|
481
|
-
if (change.previous.default !== undefined) {
|
|
482
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setDefault: change.previous.default }, compileOpts));
|
|
483
|
-
}
|
|
484
|
-
else {
|
|
485
|
-
down.unshift(...compileAlterColumn(change.tableName, change.columnName, { dropDefault: true }, compileOpts));
|
|
486
|
-
}
|
|
487
|
-
}
|
|
488
|
-
}
|
|
489
|
-
// Primary key changes (add new)
|
|
490
|
-
for (const change of diff.primaryKeyChanges) {
|
|
491
|
-
if (change.current) {
|
|
492
|
-
up.push(compileAddPrimaryKeyConstraint(change.tableName, change.current.name, change.current.columns, compileOpts));
|
|
493
|
-
down.unshift(compileDropConstraint(change.tableName, change.current.name, compileOpts));
|
|
494
|
-
}
|
|
495
|
-
}
|
|
496
|
-
// Add unique constraints
|
|
497
|
-
for (const { tableName, constraint } of diff.addedUniqueConstraints) {
|
|
498
|
-
up.push(compileAddUniqueConstraint(tableName, constraint.name, constraint.columns, compileOpts));
|
|
499
|
-
down.unshift(compileDropConstraint(tableName, constraint.name, compileOpts));
|
|
500
|
-
}
|
|
501
|
-
// Add indexes
|
|
502
|
-
for (const { tableName, index } of diff.addedIndexes) {
|
|
503
|
-
up.push(compileCreateIndex(tableName, index.name, index.columns, compileOpts));
|
|
504
|
-
down.unshift(compileDropIndex(index.name, compileOpts));
|
|
505
|
-
}
|
|
506
|
-
// Add foreign keys
|
|
507
|
-
for (const { tableName, foreignKey } of diff.addedForeignKeys) {
|
|
508
|
-
up.push(compileAddForeignKeyConstraint(tableName, foreignKey.name, foreignKey.columns, foreignKey.referencedTable, foreignKey.referencedColumns, compileOpts));
|
|
509
|
-
down.unshift(compileDropConstraint(tableName, foreignKey.name, compileOpts));
|
|
510
|
-
}
|
|
511
|
-
return { up, down };
|
|
512
|
-
}
|
|
513
|
-
/**
|
|
514
|
-
* Create a Prisma-compatible migration
|
|
515
|
-
*/
|
|
516
|
-
export async function createPrismaMigration(options) {
|
|
517
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
518
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
519
|
-
const previousSnapshot = await readSnapshot(snapshotPath);
|
|
520
|
-
const diff = applyRenameMappings(diffSchemas(previousSnapshot?.schema ?? null, currentSchema), options.renameTables, options.renameColumns);
|
|
521
|
-
const { up, down } = buildSqlStatements(diff, options.dialect);
|
|
522
|
-
if (up.length === 0) {
|
|
523
|
-
return null;
|
|
524
|
-
}
|
|
525
|
-
const timestamp = Date.now();
|
|
526
|
-
const timestampStr = generateTimestamp();
|
|
527
|
-
const safeName = options.name.replace(/[^a-z0-9]/gi, "_").toLowerCase();
|
|
528
|
-
const folderName = `${timestampStr}_${safeName}`;
|
|
529
|
-
const folderPath = path.join(options.outputPath, folderName);
|
|
530
|
-
// Build migration.sql content with comments
|
|
531
|
-
const sqlContent = [
|
|
532
|
-
`-- Migration: ${options.name}`,
|
|
533
|
-
`-- Generated at: ${new Date(timestamp).toISOString()}`,
|
|
534
|
-
"",
|
|
535
|
-
...up,
|
|
536
|
-
"",
|
|
537
|
-
].join("\n");
|
|
538
|
-
// Create migration folder and file
|
|
539
|
-
await fs.mkdir(folderPath, { recursive: true });
|
|
540
|
-
await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
|
|
541
|
-
// Update snapshot
|
|
542
|
-
await writeSnapshot(snapshotPath, currentSchema);
|
|
543
|
-
// Append to migration log
|
|
544
|
-
const checksum = calculateChecksum(sqlContent);
|
|
545
|
-
await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
|
|
546
|
-
return {
|
|
547
|
-
folderName,
|
|
548
|
-
folderPath,
|
|
549
|
-
sql: sqlContent,
|
|
550
|
-
timestamp,
|
|
551
|
-
};
|
|
552
|
-
}
|
|
553
|
-
/**
|
|
554
|
-
* Create an initial migration that creates all tables from scratch.
|
|
555
|
-
* This is used when initializing a project where the database is empty.
|
|
556
|
-
*/
|
|
557
|
-
export async function createInitialMigration(options) {
|
|
558
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
559
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
560
|
-
// Diff against empty schema to get full creation SQL
|
|
561
|
-
const diff = diffSchemas(null, currentSchema);
|
|
562
|
-
const { up } = buildSqlStatements(diff, options.dialect);
|
|
563
|
-
const timestamp = Date.now();
|
|
564
|
-
const timestampStr = generateTimestamp();
|
|
565
|
-
const safeName = (options.name ?? "init").replace(/[^a-z0-9]/gi, "_").toLowerCase();
|
|
566
|
-
const folderName = `${timestampStr}_${safeName}`;
|
|
567
|
-
const folderPath = path.join(options.outputPath, folderName);
|
|
568
|
-
// Build migration.sql content with comments
|
|
569
|
-
const sqlContent = [
|
|
570
|
-
`-- Migration: ${options.name ?? "init"}`,
|
|
571
|
-
`-- Generated at: ${new Date(timestamp).toISOString()}`,
|
|
572
|
-
"",
|
|
573
|
-
...up,
|
|
574
|
-
"",
|
|
575
|
-
].join("\n");
|
|
576
|
-
// Create migration folder and file
|
|
577
|
-
await fs.mkdir(folderPath, { recursive: true });
|
|
578
|
-
await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
|
|
579
|
-
// Update snapshot
|
|
580
|
-
await writeSnapshot(snapshotPath, currentSchema);
|
|
581
|
-
// Append to migration log
|
|
582
|
-
const checksum = calculateChecksum(sqlContent);
|
|
583
|
-
await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
|
|
584
|
-
return {
|
|
585
|
-
folderName,
|
|
586
|
-
folderPath,
|
|
587
|
-
sql: sqlContent,
|
|
588
|
-
timestamp,
|
|
589
|
-
};
|
|
590
|
-
}
|
|
591
|
-
/**
|
|
592
|
-
* Ensure _prisma_migrations table exists
|
|
593
|
-
*/
|
|
594
|
-
async function ensureMigrationsTable(db, tableName, schema, dialect) {
|
|
595
|
-
const fullTableName = schema && dialect === "postgres" ? `${schema}.${tableName}` : tableName;
|
|
596
|
-
if (dialect === "sqlite") {
|
|
597
|
-
await sql `
|
|
598
|
-
CREATE TABLE IF NOT EXISTS ${sql.raw(`"${tableName}"`)} (
|
|
599
|
-
id TEXT PRIMARY KEY,
|
|
600
|
-
checksum TEXT NOT NULL,
|
|
601
|
-
finished_at TEXT,
|
|
602
|
-
migration_name TEXT NOT NULL,
|
|
603
|
-
logs TEXT,
|
|
604
|
-
rolled_back_at TEXT,
|
|
605
|
-
started_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
606
|
-
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
607
|
-
)
|
|
608
|
-
`.execute(db);
|
|
609
|
-
}
|
|
610
|
-
else if (dialect === "postgres") {
|
|
611
|
-
await sql `
|
|
612
|
-
CREATE TABLE IF NOT EXISTS ${sql.raw(`"${schema}"."${tableName}"`)} (
|
|
613
|
-
id VARCHAR(36) PRIMARY KEY,
|
|
614
|
-
checksum VARCHAR(64) NOT NULL,
|
|
615
|
-
finished_at TIMESTAMPTZ,
|
|
616
|
-
migration_name VARCHAR(255) NOT NULL,
|
|
617
|
-
logs TEXT,
|
|
618
|
-
rolled_back_at TIMESTAMPTZ,
|
|
619
|
-
started_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
620
|
-
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
621
|
-
)
|
|
622
|
-
`.execute(db);
|
|
623
|
-
}
|
|
624
|
-
else {
|
|
625
|
-
await sql `
|
|
626
|
-
CREATE TABLE IF NOT EXISTS ${sql.raw(`\`${tableName}\``)} (
|
|
627
|
-
id VARCHAR(36) PRIMARY KEY,
|
|
628
|
-
checksum VARCHAR(64) NOT NULL,
|
|
629
|
-
finished_at DATETIME,
|
|
630
|
-
migration_name VARCHAR(255) NOT NULL,
|
|
631
|
-
logs TEXT,
|
|
632
|
-
rolled_back_at DATETIME,
|
|
633
|
-
started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
634
|
-
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
|
635
|
-
)
|
|
636
|
-
`.execute(db);
|
|
637
|
-
}
|
|
638
|
-
}
|
|
639
|
-
/**
|
|
640
|
-
* Get list of applied migrations from _prisma_migrations table
|
|
641
|
-
*/
|
|
642
|
-
async function getAppliedMigrations(db, tableName, schema, dialect) {
|
|
643
|
-
let result;
|
|
644
|
-
if (dialect === "postgres" && schema) {
|
|
645
|
-
result = await sql `
|
|
646
|
-
SELECT * FROM ${sql.raw(`"${schema}"."${tableName}"`)}
|
|
647
|
-
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
648
|
-
`.execute(db);
|
|
649
|
-
}
|
|
650
|
-
else if (dialect === "sqlite") {
|
|
651
|
-
result = await sql `
|
|
652
|
-
SELECT * FROM ${sql.raw(`"${tableName}"`)}
|
|
653
|
-
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
654
|
-
`.execute(db);
|
|
655
|
-
}
|
|
656
|
-
else {
|
|
657
|
-
result = await sql `
|
|
658
|
-
SELECT * FROM ${sql.raw(`\`${tableName}\``)}
|
|
659
|
-
WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
|
|
660
|
-
`.execute(db);
|
|
661
|
-
}
|
|
662
|
-
return new Map(result.rows.map((row) => [row.migration_name, row]));
|
|
663
|
-
}
|
|
664
|
-
/**
|
|
665
|
-
* Record a migration in _prisma_migrations table
|
|
666
|
-
*/
|
|
667
|
-
async function recordMigration(db, tableName, schema, dialect, migrationName, checksum) {
|
|
668
|
-
const id = crypto.randomUUID();
|
|
669
|
-
if (dialect === "postgres" && schema) {
|
|
670
|
-
await sql `
|
|
671
|
-
INSERT INTO ${sql.raw(`"${schema}"."${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
672
|
-
VALUES (${id}, ${checksum}, ${migrationName}, now(), 1)
|
|
673
|
-
`.execute(db);
|
|
674
|
-
}
|
|
675
|
-
else if (dialect === "sqlite") {
|
|
676
|
-
await sql `
|
|
677
|
-
INSERT INTO ${sql.raw(`"${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
678
|
-
VALUES (${id}, ${checksum}, ${migrationName}, datetime('now'), 1)
|
|
679
|
-
`.execute(db);
|
|
680
|
-
}
|
|
681
|
-
else {
|
|
682
|
-
await sql `
|
|
683
|
-
INSERT INTO ${sql.raw(`\`${tableName}\``)} (id, checksum, migration_name, finished_at, applied_steps_count)
|
|
684
|
-
VALUES (${id}, ${checksum}, ${migrationName}, NOW(), 1)
|
|
685
|
-
`.execute(db);
|
|
686
|
-
}
|
|
687
|
-
}
|
|
688
|
-
/**
|
|
689
|
-
* Calculate SHA256 checksum of migration SQL
|
|
690
|
-
*/
|
|
691
|
-
export function calculateChecksum(sql) {
|
|
692
|
-
return crypto.createHash("sha256").update(sql).digest("hex");
|
|
693
|
-
}
|
|
694
|
-
/**
|
|
695
|
-
* Validate that the database's applied migrations are coherent with the migration log.
|
|
696
|
-
*
|
|
697
|
-
* Coherence rules:
|
|
698
|
-
* 1. Every migration applied in the DB must exist in the migration log
|
|
699
|
-
* 2. Applied migrations must be a prefix of the log (no gaps)
|
|
700
|
-
* 3. Checksums must match for applied migrations
|
|
701
|
-
*/
|
|
702
|
-
function validateMigrationCoherence(appliedMigrations, migrationLog, migrationFolders) {
|
|
703
|
-
const errors = [];
|
|
704
|
-
// Build a set of log migration names for quick lookup
|
|
705
|
-
const logMigrationNames = new Set(migrationLog.map((e) => e.name));
|
|
706
|
-
const folderNames = new Set(migrationFolders);
|
|
707
|
-
for (const entry of migrationLog) {
|
|
708
|
-
if (!folderNames.has(entry.name)) {
|
|
709
|
-
errors.push({
|
|
710
|
-
type: "missing_from_disk",
|
|
711
|
-
migrationName: entry.name,
|
|
712
|
-
details: `Migration "${entry.name}" exists in migration log but not on disk`,
|
|
713
|
-
});
|
|
714
|
-
}
|
|
715
|
-
}
|
|
716
|
-
// Check 1: Every applied migration must exist in the log
|
|
717
|
-
for (const [migrationName, row] of appliedMigrations) {
|
|
718
|
-
if (!logMigrationNames.has(migrationName)) {
|
|
719
|
-
errors.push({
|
|
720
|
-
type: "missing_from_log",
|
|
721
|
-
migrationName,
|
|
722
|
-
details: `Migration "${migrationName}" exists in database but not in migration log`,
|
|
723
|
-
});
|
|
724
|
-
}
|
|
725
|
-
}
|
|
726
|
-
// If there are migrations missing from the log, return early
|
|
727
|
-
// (other checks don't make sense if the log is incomplete)
|
|
728
|
-
if (errors.length > 0) {
|
|
729
|
-
return { isCoherent: false, errors };
|
|
730
|
-
}
|
|
731
|
-
// Check 2: Applied migrations should be a continuous prefix of the log
|
|
732
|
-
// i.e., if migration N is applied, all migrations before N in the log must also be applied
|
|
733
|
-
let lastAppliedIndex = -1;
|
|
734
|
-
for (let i = 0; i < migrationLog.length; i++) {
|
|
735
|
-
const logEntry = migrationLog[i];
|
|
736
|
-
const isApplied = appliedMigrations.has(logEntry.name);
|
|
737
|
-
if (isApplied) {
|
|
738
|
-
// Check for gaps: if this is applied, all previous should be applied
|
|
739
|
-
if (lastAppliedIndex !== i - 1) {
|
|
740
|
-
// There's a gap - find the missing migrations
|
|
741
|
-
for (let j = lastAppliedIndex + 1; j < i; j++) {
|
|
742
|
-
const missing = migrationLog[j];
|
|
743
|
-
errors.push({
|
|
744
|
-
type: "order_mismatch",
|
|
745
|
-
migrationName: missing.name,
|
|
746
|
-
details: `Migration "${missing.name}" is in the log but not applied, yet later migration "${logEntry.name}" is applied`,
|
|
747
|
-
});
|
|
748
|
-
}
|
|
749
|
-
}
|
|
750
|
-
lastAppliedIndex = i;
|
|
751
|
-
// Check 3: Checksum validation for applied migrations
|
|
752
|
-
const dbRow = appliedMigrations.get(logEntry.name);
|
|
753
|
-
if (dbRow.checksum !== logEntry.checksum) {
|
|
754
|
-
errors.push({
|
|
755
|
-
type: "checksum_mismatch",
|
|
756
|
-
migrationName: logEntry.name,
|
|
757
|
-
details: `Checksum mismatch for "${logEntry.name}": database has ${dbRow.checksum.slice(0, 8)}..., log has ${logEntry.checksum.slice(0, 8)}...`,
|
|
758
|
-
});
|
|
759
|
-
}
|
|
760
|
-
}
|
|
761
|
-
}
|
|
762
|
-
return {
|
|
763
|
-
isCoherent: errors.length === 0,
|
|
764
|
-
errors,
|
|
765
|
-
};
|
|
766
|
-
}
|
|
767
|
-
/**
|
|
768
|
-
* Execute raw SQL using the database driver directly
|
|
769
|
-
* This bypasses Kysely for DDL statements which don't work reliably with sql.raw()
|
|
770
|
-
*/
|
|
771
|
-
async function executeRawSql(dialect, sqlContent, options) {
|
|
772
|
-
if (dialect === "sqlite") {
|
|
773
|
-
const { default: Database } = await import("better-sqlite3");
|
|
774
|
-
const sqliteDb = new Database(options.databasePath || ":memory:");
|
|
775
|
-
try {
|
|
776
|
-
// better-sqlite3's exec() handles multiple statements properly
|
|
777
|
-
sqliteDb.exec(sqlContent);
|
|
778
|
-
}
|
|
779
|
-
finally {
|
|
780
|
-
sqliteDb.close();
|
|
781
|
-
}
|
|
782
|
-
}
|
|
783
|
-
else if (dialect === "postgres") {
|
|
784
|
-
const { Pool } = await import("pg");
|
|
785
|
-
const pool = new Pool({ connectionString: options.connectionUrl });
|
|
786
|
-
const client = await pool.connect();
|
|
787
|
-
try {
|
|
788
|
-
// PostgreSQL supports transactional DDL, so wrap migration in a transaction
|
|
789
|
-
await client.query("BEGIN");
|
|
790
|
-
await client.query(sqlContent);
|
|
791
|
-
await client.query("COMMIT");
|
|
792
|
-
}
|
|
793
|
-
catch (error) {
|
|
794
|
-
await client.query("ROLLBACK");
|
|
795
|
-
throw error;
|
|
796
|
-
}
|
|
797
|
-
finally {
|
|
798
|
-
client.release();
|
|
799
|
-
await pool.end();
|
|
800
|
-
}
|
|
801
|
-
}
|
|
802
|
-
else if (dialect === "mysql") {
|
|
803
|
-
// Use mysql2 with promise wrapper
|
|
804
|
-
const mysql = await import("mysql2");
|
|
805
|
-
const pool = mysql.createPool({ uri: options.connectionUrl });
|
|
806
|
-
const promisePool = pool.promise();
|
|
807
|
-
try {
|
|
808
|
-
// MySQL needs statements executed one at a time
|
|
809
|
-
const statements = sqlContent
|
|
810
|
-
.split(/;(?:\s*\n|\s*$)/)
|
|
811
|
-
.map((s) => s.trim())
|
|
812
|
-
.filter((s) => s.length > 0 && !s.startsWith("--"));
|
|
813
|
-
for (const statement of statements) {
|
|
814
|
-
await promisePool.query(statement);
|
|
815
|
-
}
|
|
816
|
-
}
|
|
817
|
-
finally {
|
|
818
|
-
await pool.promise().end();
|
|
819
|
-
}
|
|
820
|
-
}
|
|
821
|
-
}
|
|
822
|
-
/**
|
|
823
|
-
* Apply pending Prisma migrations
|
|
824
|
-
*/
|
|
825
|
-
export async function applyPrismaMigrations(options) {
|
|
826
|
-
const migrationsTable = options.migrationsTable ?? "_prisma_migrations";
|
|
827
|
-
const migrationsSchema = options.migrationsSchema ?? "public";
|
|
828
|
-
const { db, destroy } = await createKyselyAdapter({
|
|
829
|
-
dialect: options.dialect,
|
|
830
|
-
connectionUrl: options.connectionUrl,
|
|
831
|
-
databasePath: options.databasePath,
|
|
832
|
-
});
|
|
833
|
-
try {
|
|
834
|
-
// Ensure migrations table exists
|
|
835
|
-
await ensureMigrationsTable(db, migrationsTable, migrationsSchema, options.dialect);
|
|
836
|
-
// Get already applied migrations
|
|
837
|
-
const appliedMigrations = await getAppliedMigrations(db, migrationsTable, migrationsSchema, options.dialect);
|
|
838
|
-
// Read migration folders
|
|
839
|
-
const entries = await fs.readdir(options.migrationsFolder, { withFileTypes: true });
|
|
840
|
-
const migrationFolders = entries
|
|
841
|
-
.filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
|
|
842
|
-
.map((e) => e.name)
|
|
843
|
-
.sort();
|
|
844
|
-
const migrationFoldersWithSql = [];
|
|
845
|
-
for (const folderName of migrationFolders) {
|
|
846
|
-
const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
|
|
847
|
-
try {
|
|
848
|
-
await fs.access(sqlPath);
|
|
849
|
-
migrationFoldersWithSql.push(folderName);
|
|
850
|
-
}
|
|
851
|
-
catch {
|
|
852
|
-
// Missing migration.sql; coherence check will flag if it's in the log
|
|
853
|
-
}
|
|
854
|
-
}
|
|
855
|
-
// Read migration log and validate coherence
|
|
856
|
-
const migrationLog = await readMigrationLog(options.migrationsFolder);
|
|
857
|
-
const coherence = validateMigrationCoherence(appliedMigrations, migrationLog, migrationFoldersWithSql);
|
|
858
|
-
if (!coherence.isCoherent) {
|
|
859
|
-
return {
|
|
860
|
-
applied: [],
|
|
861
|
-
alreadyApplied: [],
|
|
862
|
-
coherenceErrors: coherence.errors,
|
|
863
|
-
};
|
|
864
|
-
}
|
|
865
|
-
const result = {
|
|
866
|
-
applied: [],
|
|
867
|
-
alreadyApplied: [],
|
|
868
|
-
};
|
|
869
|
-
for (const folderName of migrationFoldersWithSql) {
|
|
870
|
-
if (appliedMigrations.has(folderName)) {
|
|
871
|
-
result.alreadyApplied.push(folderName);
|
|
872
|
-
continue;
|
|
873
|
-
}
|
|
874
|
-
const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
|
|
875
|
-
let sqlContent;
|
|
876
|
-
try {
|
|
877
|
-
sqlContent = await fs.readFile(sqlPath, "utf-8");
|
|
878
|
-
}
|
|
879
|
-
catch {
|
|
880
|
-
continue; // Skip if no migration.sql
|
|
881
|
-
}
|
|
882
|
-
const checksum = calculateChecksum(sqlContent);
|
|
883
|
-
// Verify checksum against migration log (migrationLog already read above)
|
|
884
|
-
const logEntry = migrationLog.find((m) => m.name === folderName);
|
|
885
|
-
if (logEntry && logEntry.checksum !== checksum) {
|
|
886
|
-
result.failed = {
|
|
887
|
-
migrationName: folderName,
|
|
888
|
-
error: `Checksum mismatch for migration ${folderName}.\n` +
|
|
889
|
-
`Expected: ${logEntry.checksum}\n` +
|
|
890
|
-
`Found: ${checksum}\n` +
|
|
891
|
-
`The migration file may have been modified after generation.`,
|
|
892
|
-
};
|
|
893
|
-
break;
|
|
894
|
-
}
|
|
895
|
-
const startTime = Date.now();
|
|
896
|
-
try {
|
|
897
|
-
// Execute the migration SQL using direct driver access
|
|
898
|
-
await executeRawSql(options.dialect, sqlContent, {
|
|
899
|
-
connectionUrl: options.connectionUrl,
|
|
900
|
-
databasePath: options.databasePath,
|
|
901
|
-
});
|
|
902
|
-
// Record the migration (still use Kysely for this since it's simple INSERT)
|
|
903
|
-
await recordMigration(db, migrationsTable, migrationsSchema, options.dialect, folderName, checksum);
|
|
904
|
-
result.applied.push({
|
|
905
|
-
migrationName: folderName,
|
|
906
|
-
duration: Date.now() - startTime,
|
|
907
|
-
});
|
|
908
|
-
}
|
|
909
|
-
catch (error) {
|
|
910
|
-
result.failed = {
|
|
911
|
-
migrationName: folderName,
|
|
912
|
-
error: error instanceof Error ? error.message : String(error),
|
|
913
|
-
};
|
|
914
|
-
break; // Stop on first failure
|
|
915
|
-
}
|
|
916
|
-
}
|
|
917
|
-
return result;
|
|
918
|
-
}
|
|
919
|
-
finally {
|
|
920
|
-
await destroy();
|
|
921
|
-
}
|
|
922
|
-
}
|
|
923
|
-
/**
|
|
924
|
-
* Preview pending migrations without applying them
|
|
925
|
-
*/
|
|
926
|
-
export async function previewPrismaMigrations(options) {
|
|
927
|
-
const migrationsTable = options.migrationsTable ?? "_prisma_migrations";
|
|
928
|
-
const migrationsSchema = options.migrationsSchema ?? "public";
|
|
929
|
-
const { db, destroy } = await createKyselyAdapter({
|
|
930
|
-
dialect: options.dialect,
|
|
931
|
-
connectionUrl: options.connectionUrl,
|
|
932
|
-
databasePath: options.databasePath,
|
|
933
|
-
});
|
|
934
|
-
try {
|
|
935
|
-
// Ensure migrations table exists
|
|
936
|
-
await ensureMigrationsTable(db, migrationsTable, migrationsSchema, options.dialect);
|
|
937
|
-
// Get already applied migrations
|
|
938
|
-
const appliedMigrations = await getAppliedMigrations(db, migrationsTable, migrationsSchema, options.dialect);
|
|
939
|
-
// Read migration folders
|
|
940
|
-
const entries = await fs.readdir(options.migrationsFolder, { withFileTypes: true });
|
|
941
|
-
const migrationFolders = entries
|
|
942
|
-
.filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
|
|
943
|
-
.map((e) => e.name)
|
|
944
|
-
.sort();
|
|
945
|
-
const result = {
|
|
946
|
-
pending: [],
|
|
947
|
-
alreadyApplied: [],
|
|
948
|
-
};
|
|
949
|
-
for (const folderName of migrationFolders) {
|
|
950
|
-
if (appliedMigrations.has(folderName)) {
|
|
951
|
-
result.alreadyApplied.push(folderName);
|
|
952
|
-
continue;
|
|
953
|
-
}
|
|
954
|
-
const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
|
|
955
|
-
let sqlContent;
|
|
956
|
-
try {
|
|
957
|
-
sqlContent = await fs.readFile(sqlPath, "utf-8");
|
|
958
|
-
}
|
|
959
|
-
catch {
|
|
960
|
-
continue; // Skip if no migration.sql
|
|
961
|
-
}
|
|
962
|
-
result.pending.push({
|
|
963
|
-
name: folderName,
|
|
964
|
-
sql: sqlContent,
|
|
965
|
-
});
|
|
966
|
-
}
|
|
967
|
-
return result;
|
|
968
|
-
}
|
|
969
|
-
finally {
|
|
970
|
-
await destroy();
|
|
971
|
-
}
|
|
972
|
-
}
|
|
973
|
-
/**
|
|
974
|
-
* Check if there are schema changes
|
|
975
|
-
*/
|
|
976
|
-
export async function hasPrismaSchemaChanges(options) {
|
|
977
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
978
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
979
|
-
const previousSnapshot = await readSnapshot(snapshotPath);
|
|
980
|
-
const diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
|
|
981
|
-
return (diff.addedModels.length > 0 ||
|
|
982
|
-
diff.removedModels.length > 0 ||
|
|
983
|
-
diff.addedFields.length > 0 ||
|
|
984
|
-
diff.removedFields.length > 0 ||
|
|
985
|
-
diff.alteredFields.length > 0 ||
|
|
986
|
-
diff.addedUniqueConstraints.length > 0 ||
|
|
987
|
-
diff.removedUniqueConstraints.length > 0 ||
|
|
988
|
-
diff.addedIndexes.length > 0 ||
|
|
989
|
-
diff.removedIndexes.length > 0 ||
|
|
990
|
-
diff.addedForeignKeys.length > 0 ||
|
|
991
|
-
diff.removedForeignKeys.length > 0 ||
|
|
992
|
-
diff.primaryKeyChanges.length > 0);
|
|
993
|
-
}
|
|
994
|
-
/**
|
|
995
|
-
* Detect potential renames by finding removed+added pairs.
|
|
996
|
-
* A table rename is detected when one table is removed and one is added.
|
|
997
|
-
* A column rename is detected when within the same table, one column is removed and one is added.
|
|
998
|
-
*/
|
|
999
|
-
export async function detectPotentialRenames(options) {
|
|
1000
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
1001
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
1002
|
-
const previousSnapshot = await readSnapshot(snapshotPath);
|
|
1003
|
-
const diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
|
|
1004
|
-
const result = {
|
|
1005
|
-
tables: [],
|
|
1006
|
-
columns: [],
|
|
1007
|
-
};
|
|
1008
|
-
// Detect potential table renames: one removed + one added
|
|
1009
|
-
// For simplicity, if there's exactly one removed and one added, suggest it as a rename
|
|
1010
|
-
// For multiple, pair them up by order (user can disambiguate)
|
|
1011
|
-
const minTablePairs = Math.min(diff.removedModels.length, diff.addedModels.length);
|
|
1012
|
-
for (let i = 0; i < minTablePairs; i++) {
|
|
1013
|
-
result.tables.push({
|
|
1014
|
-
from: diff.removedModels[i].name,
|
|
1015
|
-
to: diff.addedModels[i].name,
|
|
1016
|
-
});
|
|
1017
|
-
}
|
|
1018
|
-
// Detect potential column renames within same table
|
|
1019
|
-
// Group removed/added fields by table
|
|
1020
|
-
const removedByTable = new Map();
|
|
1021
|
-
const addedByTable = new Map();
|
|
1022
|
-
for (const { tableName, column } of diff.removedFields) {
|
|
1023
|
-
if (!removedByTable.has(tableName)) {
|
|
1024
|
-
removedByTable.set(tableName, []);
|
|
1025
|
-
}
|
|
1026
|
-
removedByTable.get(tableName).push(column.name);
|
|
1027
|
-
}
|
|
1028
|
-
for (const { tableName, column } of diff.addedFields) {
|
|
1029
|
-
if (!addedByTable.has(tableName)) {
|
|
1030
|
-
addedByTable.set(tableName, []);
|
|
1031
|
-
}
|
|
1032
|
-
addedByTable.get(tableName).push(column.name);
|
|
1033
|
-
}
|
|
1034
|
-
// For each table with both removed and added columns, suggest renames
|
|
1035
|
-
for (const [tableName, removed] of removedByTable.entries()) {
|
|
1036
|
-
const added = addedByTable.get(tableName) || [];
|
|
1037
|
-
const minPairs = Math.min(removed.length, added.length);
|
|
1038
|
-
for (let i = 0; i < minPairs; i++) {
|
|
1039
|
-
result.columns.push({
|
|
1040
|
-
table: tableName,
|
|
1041
|
-
from: removed[i],
|
|
1042
|
-
to: added[i],
|
|
1043
|
-
});
|
|
1044
|
-
}
|
|
1045
|
-
}
|
|
1046
|
-
return result;
|
|
1047
|
-
}
|
|
1048
|
-
const MIGRATION_LOG_HEADER = `# zenstack-kit migration log
|
|
1049
|
-
# Format: <migration_name> <checksum>
|
|
1050
|
-
`;
|
|
1051
|
-
/**
|
|
1052
|
-
* Get the path to the migration log file
|
|
1053
|
-
*/
|
|
1054
|
-
export function getMigrationLogPath(outputPath) {
|
|
1055
|
-
return path.join(outputPath, "meta", "_migration_log");
|
|
1056
|
-
}
|
|
1057
|
-
/**
|
|
1058
|
-
* Parse migration log content into entries
|
|
1059
|
-
*/
|
|
1060
|
-
function parseMigrationLog(content) {
|
|
1061
|
-
return content
|
|
1062
|
-
.split("\n")
|
|
1063
|
-
.filter((line) => line.trim() && !line.startsWith("#"))
|
|
1064
|
-
.map((line) => {
|
|
1065
|
-
const [name, checksum] = line.split(" ");
|
|
1066
|
-
return { name, checksum };
|
|
1067
|
-
})
|
|
1068
|
-
.filter((entry) => entry.name && entry.checksum);
|
|
1069
|
-
}
|
|
1070
|
-
/**
|
|
1071
|
-
* Serialize migration log entries to string
|
|
1072
|
-
*/
|
|
1073
|
-
function serializeMigrationLog(entries) {
|
|
1074
|
-
const lines = entries.map((e) => `${e.name} ${e.checksum}`).join("\n");
|
|
1075
|
-
return MIGRATION_LOG_HEADER + lines + (lines.length > 0 ? "\n" : "");
|
|
1076
|
-
}
|
|
1077
|
-
/**
|
|
1078
|
-
* Read migration log file
|
|
1079
|
-
*/
|
|
1080
|
-
export async function readMigrationLog(outputPath) {
|
|
1081
|
-
const logPath = getMigrationLogPath(outputPath);
|
|
1082
|
-
try {
|
|
1083
|
-
const content = await fs.readFile(logPath, "utf-8");
|
|
1084
|
-
return parseMigrationLog(content);
|
|
1085
|
-
}
|
|
1086
|
-
catch (error) {
|
|
1087
|
-
if (error instanceof Error && "code" in error && error.code === "ENOENT") {
|
|
1088
|
-
return [];
|
|
1089
|
-
}
|
|
1090
|
-
throw error;
|
|
1091
|
-
}
|
|
1092
|
-
}
|
|
1093
|
-
/**
|
|
1094
|
-
* Write migration log file
|
|
1095
|
-
*/
|
|
1096
|
-
export async function writeMigrationLog(outputPath, entries) {
|
|
1097
|
-
const logPath = getMigrationLogPath(outputPath);
|
|
1098
|
-
await fs.mkdir(path.dirname(logPath), { recursive: true });
|
|
1099
|
-
await fs.writeFile(logPath, serializeMigrationLog(entries), "utf-8");
|
|
1100
|
-
}
|
|
1101
|
-
/**
|
|
1102
|
-
* Append a single entry to the migration log
|
|
1103
|
-
*/
|
|
1104
|
-
export async function appendToMigrationLog(outputPath, entry) {
|
|
1105
|
-
const entries = await readMigrationLog(outputPath);
|
|
1106
|
-
entries.push(entry);
|
|
1107
|
-
await writeMigrationLog(outputPath, entries);
|
|
1108
|
-
}
|
|
1109
|
-
/**
|
|
1110
|
-
* Scan migration folders and compute checksums for each
|
|
1111
|
-
*/
|
|
1112
|
-
export async function scanMigrationFolders(outputPath) {
|
|
1113
|
-
const entries = [];
|
|
1114
|
-
try {
|
|
1115
|
-
const dirEntries = await fs.readdir(outputPath, { withFileTypes: true });
|
|
1116
|
-
const migrationFolders = dirEntries
|
|
1117
|
-
.filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
|
|
1118
|
-
.map((e) => e.name)
|
|
1119
|
-
.sort();
|
|
1120
|
-
for (const folderName of migrationFolders) {
|
|
1121
|
-
const sqlPath = path.join(outputPath, folderName, "migration.sql");
|
|
1122
|
-
try {
|
|
1123
|
-
const sqlContent = await fs.readFile(sqlPath, "utf-8");
|
|
1124
|
-
const checksum = calculateChecksum(sqlContent);
|
|
1125
|
-
entries.push({ name: folderName, checksum });
|
|
1126
|
-
}
|
|
1127
|
-
catch {
|
|
1128
|
-
// Skip folders without migration.sql
|
|
1129
|
-
}
|
|
1130
|
-
}
|
|
1131
|
-
}
|
|
1132
|
-
catch (error) {
|
|
1133
|
-
if (error instanceof Error && "code" in error && error.code === "ENOENT") {
|
|
1134
|
-
return [];
|
|
1135
|
-
}
|
|
1136
|
-
throw error;
|
|
1137
|
-
}
|
|
1138
|
-
return entries;
|
|
1139
|
-
}
|
|
1140
|
-
/**
|
|
1141
|
-
* Check if snapshot exists
|
|
1142
|
-
*/
|
|
1143
|
-
export async function hasSnapshot(outputPath) {
|
|
1144
|
-
const { snapshotPath } = getSnapshotPaths(outputPath);
|
|
1145
|
-
try {
|
|
1146
|
-
await fs.access(snapshotPath);
|
|
1147
|
-
return true;
|
|
1148
|
-
}
|
|
1149
|
-
catch {
|
|
1150
|
-
return false;
|
|
1151
|
-
}
|
|
1152
|
-
}
|
|
1153
|
-
/**
|
|
1154
|
-
* Initialize snapshot from schema without generating migration
|
|
1155
|
-
*/
|
|
1156
|
-
export async function initializeSnapshot(options) {
|
|
1157
|
-
const currentSchema = await generateSchemaSnapshot(options.schemaPath);
|
|
1158
|
-
const { snapshotPath } = getSnapshotPaths(options.outputPath);
|
|
1159
|
-
await writeSnapshot(snapshotPath, currentSchema);
|
|
1160
|
-
return {
|
|
1161
|
-
snapshotPath,
|
|
1162
|
-
tableCount: currentSchema.tables.length,
|
|
1163
|
-
};
|
|
1164
|
-
}
|
|
1165
|
-
/**
|
|
1166
|
-
* Export getSnapshotPaths for external use
|
|
1167
|
-
*/
|
|
1168
|
-
export { getSnapshotPaths };
|
|
1
|
+
export { createPrismaMigration, createInitialMigration, hasPrismaSchemaChanges, } from "./prisma/create.js";
|
|
2
|
+
export { applyPrismaMigrations, previewPrismaMigrations, } from "./prisma/apply.js";
|
|
3
|
+
export { readMigrationLog, writeMigrationLog, appendToMigrationLog, scanMigrationFolders, getMigrationLogPath, calculateChecksum, } from "./prisma/log.js";
|
|
4
|
+
export { initializeSnapshot, hasSnapshot, getSnapshotPaths, writeSnapshot, } from "./prisma/snapshot.js";
|
|
5
|
+
export { detectPotentialRenames, } from "./prisma/rename.js";
|