zenstack-kit 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +313 -0
  3. package/dist/cli/app.d.ts +12 -0
  4. package/dist/cli/app.d.ts.map +1 -0
  5. package/dist/cli/app.js +253 -0
  6. package/dist/cli/commands.d.ts +70 -0
  7. package/dist/cli/commands.d.ts.map +1 -0
  8. package/dist/cli/commands.js +308 -0
  9. package/dist/cli/index.d.ts +12 -0
  10. package/dist/cli/index.d.ts.map +1 -0
  11. package/dist/cli/index.js +12 -0
  12. package/dist/cli/prompt-provider.d.ts +10 -0
  13. package/dist/cli/prompt-provider.d.ts.map +1 -0
  14. package/dist/cli/prompt-provider.js +41 -0
  15. package/dist/cli/prompts.d.ts +27 -0
  16. package/dist/cli/prompts.d.ts.map +1 -0
  17. package/dist/cli/prompts.js +133 -0
  18. package/dist/cli.d.ts +12 -0
  19. package/dist/cli.d.ts.map +1 -0
  20. package/dist/cli.js +240 -0
  21. package/dist/config/index.d.ts +96 -0
  22. package/dist/config/index.d.ts.map +1 -0
  23. package/dist/config/index.js +48 -0
  24. package/dist/config/loader.d.ts +11 -0
  25. package/dist/config/loader.d.ts.map +1 -0
  26. package/dist/config/loader.js +44 -0
  27. package/dist/config-loader.d.ts +6 -0
  28. package/dist/config-loader.d.ts.map +1 -0
  29. package/dist/config-loader.js +36 -0
  30. package/dist/config.d.ts +62 -0
  31. package/dist/config.d.ts.map +1 -0
  32. package/dist/config.js +44 -0
  33. package/dist/index.d.ts +19 -0
  34. package/dist/index.d.ts.map +1 -0
  35. package/dist/index.js +23 -0
  36. package/dist/init-prompts.d.ts +13 -0
  37. package/dist/init-prompts.d.ts.map +1 -0
  38. package/dist/init-prompts.js +64 -0
  39. package/dist/introspect.d.ts +54 -0
  40. package/dist/introspect.d.ts.map +1 -0
  41. package/dist/introspect.js +75 -0
  42. package/dist/kysely-adapter.d.ts +49 -0
  43. package/dist/kysely-adapter.d.ts.map +1 -0
  44. package/dist/kysely-adapter.js +74 -0
  45. package/dist/migrate-apply.d.ts +18 -0
  46. package/dist/migrate-apply.d.ts.map +1 -0
  47. package/dist/migrate-apply.js +61 -0
  48. package/dist/migrate.d.ts +108 -0
  49. package/dist/migrate.d.ts.map +1 -0
  50. package/dist/migrate.js +127 -0
  51. package/dist/migrations/apply.d.ts +18 -0
  52. package/dist/migrations/apply.d.ts.map +1 -0
  53. package/dist/migrations/apply.js +61 -0
  54. package/dist/migrations/diff.d.ts +161 -0
  55. package/dist/migrations/diff.d.ts.map +1 -0
  56. package/dist/migrations/diff.js +620 -0
  57. package/dist/migrations/prisma.d.ts +193 -0
  58. package/dist/migrations/prisma.d.ts.map +1 -0
  59. package/dist/migrations/prisma.js +929 -0
  60. package/dist/migrations.d.ts +161 -0
  61. package/dist/migrations.d.ts.map +1 -0
  62. package/dist/migrations.js +620 -0
  63. package/dist/prisma-migrations.d.ts +160 -0
  64. package/dist/prisma-migrations.d.ts.map +1 -0
  65. package/dist/prisma-migrations.js +789 -0
  66. package/dist/prompts.d.ts +10 -0
  67. package/dist/prompts.d.ts.map +1 -0
  68. package/dist/prompts.js +41 -0
  69. package/dist/pull.d.ts +23 -0
  70. package/dist/pull.d.ts.map +1 -0
  71. package/dist/pull.js +424 -0
  72. package/dist/schema/introspect.d.ts +54 -0
  73. package/dist/schema/introspect.d.ts.map +1 -0
  74. package/dist/schema/introspect.js +75 -0
  75. package/dist/schema/pull.d.ts +23 -0
  76. package/dist/schema/pull.d.ts.map +1 -0
  77. package/dist/schema/pull.js +424 -0
  78. package/dist/schema/snapshot.d.ts +46 -0
  79. package/dist/schema/snapshot.d.ts.map +1 -0
  80. package/dist/schema/snapshot.js +278 -0
  81. package/dist/schema-snapshot.d.ts +45 -0
  82. package/dist/schema-snapshot.d.ts.map +1 -0
  83. package/dist/schema-snapshot.js +265 -0
  84. package/dist/sql/compiler.d.ts +74 -0
  85. package/dist/sql/compiler.d.ts.map +1 -0
  86. package/dist/sql/compiler.js +270 -0
  87. package/dist/sql/kysely-adapter.d.ts +49 -0
  88. package/dist/sql/kysely-adapter.d.ts.map +1 -0
  89. package/dist/sql/kysely-adapter.js +74 -0
  90. package/dist/sql-compiler.d.ts +74 -0
  91. package/dist/sql-compiler.d.ts.map +1 -0
  92. package/dist/sql-compiler.js +243 -0
  93. package/package.json +81 -0
@@ -0,0 +1,929 @@
1
+ /**
2
+ * Prisma-compatible migrations
3
+ *
4
+ * Generates migrations in Prisma format:
5
+ * - Folder structure: migrations/<timestamp>_<name>/migration.sql
6
+ * - Tracks migrations in _prisma_migrations table
7
+ * - Compatible with `prisma migrate deploy`
8
+ */
9
+ import * as fs from "fs/promises";
10
+ import * as path from "path";
11
+ import * as crypto from "crypto";
12
+ import { sql } from "kysely";
13
+ import { createKyselyAdapter } from "../sql/kysely-adapter.js";
14
+ import { generateSchemaSnapshot, createSnapshot } from "../schema/snapshot.js";
15
+ import { compileCreateTable, compileDropTable, compileAddColumn, compileDropColumn, compileRenameTable, compileRenameColumn, compileCreateIndex, compileDropIndex, compileAddUniqueConstraint, compileDropConstraint, compileAddForeignKeyConstraint, compileAddPrimaryKeyConstraint, compileAlterColumn, } from "../sql/compiler.js";
16
+ /**
17
+ * Generate timestamp string for migration folder name
18
+ */
19
+ export function generateTimestamp() {
20
+ const now = new Date();
21
+ return [
22
+ now.getFullYear(),
23
+ String(now.getMonth() + 1).padStart(2, "0"),
24
+ String(now.getDate()).padStart(2, "0"),
25
+ String(now.getHours()).padStart(2, "0"),
26
+ String(now.getMinutes()).padStart(2, "0"),
27
+ String(now.getSeconds()).padStart(2, "0"),
28
+ ].join("");
29
+ }
30
+ /**
31
+ * Get paths for snapshot file
32
+ */
33
+ function getSnapshotPaths(outputPath) {
34
+ const metaDir = path.join(outputPath, "meta");
35
+ return {
36
+ metaDir,
37
+ snapshotPath: path.join(metaDir, "_snapshot.json"),
38
+ };
39
+ }
40
+ /**
41
+ * Read existing snapshot
42
+ */
43
+ async function readSnapshot(snapshotPath) {
44
+ try {
45
+ const content = await fs.readFile(snapshotPath, "utf-8");
46
+ const snapshot = JSON.parse(content);
47
+ if (!snapshot || snapshot.version !== 2 || !snapshot.schema) {
48
+ throw new Error("Snapshot format is invalid");
49
+ }
50
+ return snapshot;
51
+ }
52
+ catch (error) {
53
+ if (error instanceof Error && "code" in error && error.code === "ENOENT") {
54
+ return null;
55
+ }
56
+ throw error;
57
+ }
58
+ }
59
+ /**
60
+ * Write snapshot to file
61
+ */
62
+ export async function writeSnapshot(snapshotPath, schema) {
63
+ const snapshot = createSnapshot(schema);
64
+ await fs.mkdir(path.dirname(snapshotPath), { recursive: true });
65
+ await fs.writeFile(snapshotPath, JSON.stringify(snapshot, null, 2), "utf-8");
66
+ }
67
+ /**
68
+ * Diff two schemas and return the changes
69
+ */
70
+ function diffSchemas(previous, current) {
71
+ const previousModels = new Map();
72
+ const currentModels = new Map();
73
+ previous?.tables.forEach((model) => previousModels.set(model.name, model));
74
+ current.tables.forEach((model) => currentModels.set(model.name, model));
75
+ const addedModels = [];
76
+ const removedModels = [];
77
+ for (const [tableName, model] of currentModels.entries()) {
78
+ if (!previousModels.has(tableName)) {
79
+ addedModels.push(model);
80
+ }
81
+ }
82
+ for (const [tableName, model] of previousModels.entries()) {
83
+ if (!currentModels.has(tableName)) {
84
+ removedModels.push(model);
85
+ }
86
+ }
87
+ // Field-level changes for existing tables
88
+ const addedFields = [];
89
+ const removedFields = [];
90
+ const alteredFields = [];
91
+ // Constraint changes
92
+ const addedUniqueConstraints = [];
93
+ const removedUniqueConstraints = [];
94
+ const addedIndexes = [];
95
+ const removedIndexes = [];
96
+ const addedForeignKeys = [];
97
+ const removedForeignKeys = [];
98
+ const primaryKeyChanges = [];
99
+ for (const [tableName, currentModel] of currentModels.entries()) {
100
+ const previousModel = previousModels.get(tableName);
101
+ if (!previousModel)
102
+ continue;
103
+ // Field changes
104
+ const previousFields = new Map(previousModel.columns.map((f) => [f.name, f]));
105
+ const currentFields = new Map(currentModel.columns.map((f) => [f.name, f]));
106
+ for (const [columnName, column] of currentFields.entries()) {
107
+ if (!previousFields.has(columnName)) {
108
+ addedFields.push({ tableName, column });
109
+ }
110
+ }
111
+ for (const [columnName, column] of previousFields.entries()) {
112
+ if (!currentFields.has(columnName)) {
113
+ removedFields.push({ tableName, column });
114
+ }
115
+ }
116
+ for (const [columnName, currentColumn] of currentFields.entries()) {
117
+ const previousColumn = previousFields.get(columnName);
118
+ if (!previousColumn)
119
+ continue;
120
+ if (previousColumn.type !== currentColumn.type ||
121
+ previousColumn.notNull !== currentColumn.notNull ||
122
+ previousColumn.default !== currentColumn.default) {
123
+ alteredFields.push({
124
+ tableName,
125
+ columnName,
126
+ previous: previousColumn,
127
+ current: currentColumn,
128
+ });
129
+ }
130
+ }
131
+ // Unique constraint changes
132
+ const prevUnique = new Map(previousModel.uniqueConstraints.map((c) => [c.name, c]));
133
+ const currUnique = new Map(currentModel.uniqueConstraints.map((c) => [c.name, c]));
134
+ for (const [name, constraint] of currUnique.entries()) {
135
+ if (!prevUnique.has(name)) {
136
+ addedUniqueConstraints.push({ tableName, constraint });
137
+ }
138
+ }
139
+ for (const [name, constraint] of prevUnique.entries()) {
140
+ if (!currUnique.has(name)) {
141
+ removedUniqueConstraints.push({ tableName, constraint });
142
+ }
143
+ }
144
+ // Index changes
145
+ const prevIndexes = new Map(previousModel.indexes.map((i) => [i.name, i]));
146
+ const currIndexes = new Map(currentModel.indexes.map((i) => [i.name, i]));
147
+ for (const [name, index] of currIndexes.entries()) {
148
+ if (!prevIndexes.has(name)) {
149
+ addedIndexes.push({ tableName, index });
150
+ }
151
+ }
152
+ for (const [name, index] of prevIndexes.entries()) {
153
+ if (!currIndexes.has(name)) {
154
+ removedIndexes.push({ tableName, index });
155
+ }
156
+ }
157
+ // Foreign key changes
158
+ const prevFks = new Map(previousModel.foreignKeys.map((f) => [f.name, f]));
159
+ const currFks = new Map(currentModel.foreignKeys.map((f) => [f.name, f]));
160
+ for (const [name, fk] of currFks.entries()) {
161
+ if (!prevFks.has(name)) {
162
+ addedForeignKeys.push({ tableName, foreignKey: fk });
163
+ }
164
+ }
165
+ for (const [name, fk] of prevFks.entries()) {
166
+ if (!currFks.has(name)) {
167
+ removedForeignKeys.push({ tableName, foreignKey: fk });
168
+ }
169
+ }
170
+ // Primary key changes
171
+ const prevPk = previousModel.primaryKey;
172
+ const currPk = currentModel.primaryKey;
173
+ const pkEqual = (prevPk?.name ?? "") === (currPk?.name ?? "") &&
174
+ JSON.stringify(prevPk?.columns ?? []) === JSON.stringify(currPk?.columns ?? []);
175
+ if (!pkEqual) {
176
+ primaryKeyChanges.push({
177
+ tableName,
178
+ previous: prevPk,
179
+ current: currPk,
180
+ });
181
+ }
182
+ }
183
+ return {
184
+ addedModels,
185
+ removedModels,
186
+ addedFields,
187
+ removedFields,
188
+ alteredFields,
189
+ addedUniqueConstraints,
190
+ removedUniqueConstraints,
191
+ addedIndexes,
192
+ removedIndexes,
193
+ addedForeignKeys,
194
+ removedForeignKeys,
195
+ primaryKeyChanges,
196
+ renamedTables: [],
197
+ renamedColumns: [],
198
+ };
199
+ }
200
+ /**
201
+ * Topologically sort tables so that referenced tables come before tables that reference them.
202
+ * Tables with no foreign keys come first, then tables that only reference already-ordered tables.
203
+ */
204
+ function sortTablesByDependencies(tables) {
205
+ const tableMap = new Map(tables.map((t) => [t.name, t]));
206
+ const sorted = [];
207
+ const visited = new Set();
208
+ const visiting = new Set();
209
+ function visit(tableName) {
210
+ if (visited.has(tableName))
211
+ return;
212
+ if (visiting.has(tableName)) {
213
+ // Circular dependency - just add it and let the DB handle it
214
+ return;
215
+ }
216
+ const table = tableMap.get(tableName);
217
+ if (!table)
218
+ return;
219
+ visiting.add(tableName);
220
+ // Visit all tables this table references first
221
+ for (const fk of table.foreignKeys) {
222
+ if (tableMap.has(fk.referencedTable) && fk.referencedTable !== tableName) {
223
+ visit(fk.referencedTable);
224
+ }
225
+ }
226
+ visiting.delete(tableName);
227
+ visited.add(tableName);
228
+ sorted.push(table);
229
+ }
230
+ for (const table of tables) {
231
+ visit(table.name);
232
+ }
233
+ return sorted;
234
+ }
235
+ /**
236
+ * Build SQL statements from diff
237
+ */
238
+ function buildSqlStatements(diff, dialect) {
239
+ const up = [];
240
+ const down = [];
241
+ const compileOpts = { dialect };
242
+ // Table renames
243
+ for (const rename of diff.renamedTables) {
244
+ up.push(compileRenameTable(rename.from, rename.to, compileOpts));
245
+ down.unshift(compileRenameTable(rename.to, rename.from, compileOpts));
246
+ }
247
+ // Column renames
248
+ for (const rename of diff.renamedColumns) {
249
+ up.push(compileRenameColumn(rename.tableName, rename.from, rename.to, compileOpts));
250
+ down.unshift(compileRenameColumn(rename.tableName, rename.to, rename.from, compileOpts));
251
+ }
252
+ // Create tables (sorted by dependency order so referenced tables are created first)
253
+ const sortedAddedModels = sortTablesByDependencies(diff.addedModels);
254
+ for (const model of sortedAddedModels) {
255
+ up.push(compileCreateTable(model, compileOpts));
256
+ down.unshift(compileDropTable(model.name, compileOpts));
257
+ }
258
+ // Drop tables
259
+ for (const model of diff.removedModels) {
260
+ up.push(compileDropTable(model.name, compileOpts));
261
+ down.unshift(compileCreateTable(model, compileOpts));
262
+ }
263
+ // Primary key changes (drop old first)
264
+ for (const change of diff.primaryKeyChanges) {
265
+ if (change.previous) {
266
+ up.push(compileDropConstraint(change.tableName, change.previous.name, compileOpts));
267
+ down.unshift(compileAddPrimaryKeyConstraint(change.tableName, change.previous.name, change.previous.columns, compileOpts));
268
+ }
269
+ }
270
+ // Drop foreign keys first (before dropping columns)
271
+ for (const { tableName, foreignKey } of diff.removedForeignKeys) {
272
+ up.push(compileDropConstraint(tableName, foreignKey.name, compileOpts));
273
+ down.unshift(compileAddForeignKeyConstraint(tableName, foreignKey.name, foreignKey.columns, foreignKey.referencedTable, foreignKey.referencedColumns, compileOpts));
274
+ }
275
+ // Drop unique constraints
276
+ for (const { tableName, constraint } of diff.removedUniqueConstraints) {
277
+ up.push(compileDropConstraint(tableName, constraint.name, compileOpts));
278
+ down.unshift(compileAddUniqueConstraint(tableName, constraint.name, constraint.columns, compileOpts));
279
+ }
280
+ // Drop indexes
281
+ for (const { tableName, index } of diff.removedIndexes) {
282
+ up.push(compileDropIndex(index.name, compileOpts));
283
+ down.unshift(compileCreateIndex(tableName, index.name, index.columns, compileOpts));
284
+ }
285
+ // Add columns
286
+ for (const { tableName, column } of diff.addedFields) {
287
+ up.push(compileAddColumn(tableName, column, compileOpts));
288
+ down.unshift(compileDropColumn(tableName, column.name, compileOpts));
289
+ }
290
+ // Drop columns
291
+ for (const { tableName, column } of diff.removedFields) {
292
+ up.push(compileDropColumn(tableName, column.name, compileOpts));
293
+ down.unshift(compileAddColumn(tableName, column, compileOpts));
294
+ }
295
+ // Alter columns
296
+ for (const change of diff.alteredFields) {
297
+ const typeChanged = change.previous.type !== change.current.type;
298
+ const nullChanged = change.previous.notNull !== change.current.notNull;
299
+ const defaultChanged = change.previous.default !== change.current.default;
300
+ if (typeChanged) {
301
+ up.push(...compileAlterColumn(change.tableName, change.columnName, { setType: change.current.type }, compileOpts));
302
+ down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setType: change.previous.type }, compileOpts));
303
+ }
304
+ if (nullChanged) {
305
+ if (change.current.notNull) {
306
+ up.push(...compileAlterColumn(change.tableName, change.columnName, { setNotNull: true }, compileOpts));
307
+ down.unshift(...compileAlterColumn(change.tableName, change.columnName, { dropNotNull: true }, compileOpts));
308
+ }
309
+ else {
310
+ up.push(...compileAlterColumn(change.tableName, change.columnName, { dropNotNull: true }, compileOpts));
311
+ down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setNotNull: true }, compileOpts));
312
+ }
313
+ }
314
+ if (defaultChanged) {
315
+ if (change.current.default !== undefined) {
316
+ up.push(...compileAlterColumn(change.tableName, change.columnName, { setDefault: change.current.default }, compileOpts));
317
+ }
318
+ else {
319
+ up.push(...compileAlterColumn(change.tableName, change.columnName, { dropDefault: true }, compileOpts));
320
+ }
321
+ if (change.previous.default !== undefined) {
322
+ down.unshift(...compileAlterColumn(change.tableName, change.columnName, { setDefault: change.previous.default }, compileOpts));
323
+ }
324
+ else {
325
+ down.unshift(...compileAlterColumn(change.tableName, change.columnName, { dropDefault: true }, compileOpts));
326
+ }
327
+ }
328
+ }
329
+ // Primary key changes (add new)
330
+ for (const change of diff.primaryKeyChanges) {
331
+ if (change.current) {
332
+ up.push(compileAddPrimaryKeyConstraint(change.tableName, change.current.name, change.current.columns, compileOpts));
333
+ down.unshift(compileDropConstraint(change.tableName, change.current.name, compileOpts));
334
+ }
335
+ }
336
+ // Add unique constraints
337
+ for (const { tableName, constraint } of diff.addedUniqueConstraints) {
338
+ up.push(compileAddUniqueConstraint(tableName, constraint.name, constraint.columns, compileOpts));
339
+ down.unshift(compileDropConstraint(tableName, constraint.name, compileOpts));
340
+ }
341
+ // Add indexes
342
+ for (const { tableName, index } of diff.addedIndexes) {
343
+ up.push(compileCreateIndex(tableName, index.name, index.columns, compileOpts));
344
+ down.unshift(compileDropIndex(index.name, compileOpts));
345
+ }
346
+ // Add foreign keys
347
+ for (const { tableName, foreignKey } of diff.addedForeignKeys) {
348
+ up.push(compileAddForeignKeyConstraint(tableName, foreignKey.name, foreignKey.columns, foreignKey.referencedTable, foreignKey.referencedColumns, compileOpts));
349
+ down.unshift(compileDropConstraint(tableName, foreignKey.name, compileOpts));
350
+ }
351
+ return { up, down };
352
+ }
353
+ /**
354
+ * Create a Prisma-compatible migration
355
+ */
356
+ export async function createPrismaMigration(options) {
357
+ const currentSchema = await generateSchemaSnapshot(options.schemaPath);
358
+ const { snapshotPath } = getSnapshotPaths(options.outputPath);
359
+ const previousSnapshot = await readSnapshot(snapshotPath);
360
+ let diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
361
+ // Apply rename mappings
362
+ if (options.renameTables?.length || options.renameColumns?.length) {
363
+ // Handle table renames
364
+ for (const mapping of options.renameTables ?? []) {
365
+ const removedIdx = diff.removedModels.findIndex((m) => m.name === mapping.from);
366
+ const addedIdx = diff.addedModels.findIndex((m) => m.name === mapping.to);
367
+ if (removedIdx !== -1 && addedIdx !== -1) {
368
+ diff.removedModels.splice(removedIdx, 1);
369
+ diff.addedModels.splice(addedIdx, 1);
370
+ diff.renamedTables.push(mapping);
371
+ }
372
+ }
373
+ // Handle column renames
374
+ for (const mapping of options.renameColumns ?? []) {
375
+ const removedIdx = diff.removedFields.findIndex((f) => f.tableName === mapping.table && f.column.name === mapping.from);
376
+ const addedIdx = diff.addedFields.findIndex((f) => f.tableName === mapping.table && f.column.name === mapping.to);
377
+ if (removedIdx !== -1 && addedIdx !== -1) {
378
+ diff.removedFields.splice(removedIdx, 1);
379
+ diff.addedFields.splice(addedIdx, 1);
380
+ diff.renamedColumns.push({ tableName: mapping.table, from: mapping.from, to: mapping.to });
381
+ }
382
+ }
383
+ }
384
+ const { up, down } = buildSqlStatements(diff, options.dialect);
385
+ if (up.length === 0) {
386
+ return null;
387
+ }
388
+ const timestamp = Date.now();
389
+ const timestampStr = generateTimestamp();
390
+ const safeName = options.name.replace(/[^a-z0-9]/gi, "_").toLowerCase();
391
+ const folderName = `${timestampStr}_${safeName}`;
392
+ const folderPath = path.join(options.outputPath, folderName);
393
+ // Build migration.sql content with comments
394
+ const sqlContent = [
395
+ `-- Migration: ${options.name}`,
396
+ `-- Generated at: ${new Date(timestamp).toISOString()}`,
397
+ "",
398
+ ...up,
399
+ "",
400
+ ].join("\n");
401
+ // Create migration folder and file
402
+ await fs.mkdir(folderPath, { recursive: true });
403
+ await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
404
+ // Update snapshot
405
+ await writeSnapshot(snapshotPath, currentSchema);
406
+ // Append to migration log
407
+ const checksum = calculateChecksum(sqlContent);
408
+ await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
409
+ return {
410
+ folderName,
411
+ folderPath,
412
+ sql: sqlContent,
413
+ timestamp,
414
+ };
415
+ }
416
+ /**
417
+ * Create an initial migration that creates all tables from scratch.
418
+ * This is used when initializing a project where the database is empty.
419
+ */
420
+ export async function createInitialMigration(options) {
421
+ const currentSchema = await generateSchemaSnapshot(options.schemaPath);
422
+ const { snapshotPath } = getSnapshotPaths(options.outputPath);
423
+ // Diff against empty schema to get full creation SQL
424
+ const diff = diffSchemas(null, currentSchema);
425
+ const { up } = buildSqlStatements(diff, options.dialect);
426
+ const timestamp = Date.now();
427
+ const timestampStr = generateTimestamp();
428
+ const safeName = (options.name ?? "init").replace(/[^a-z0-9]/gi, "_").toLowerCase();
429
+ const folderName = `${timestampStr}_${safeName}`;
430
+ const folderPath = path.join(options.outputPath, folderName);
431
+ // Build migration.sql content with comments
432
+ const sqlContent = [
433
+ `-- Migration: ${options.name ?? "init"}`,
434
+ `-- Generated at: ${new Date(timestamp).toISOString()}`,
435
+ "",
436
+ ...up,
437
+ "",
438
+ ].join("\n");
439
+ // Create migration folder and file
440
+ await fs.mkdir(folderPath, { recursive: true });
441
+ await fs.writeFile(path.join(folderPath, "migration.sql"), sqlContent, "utf-8");
442
+ // Update snapshot
443
+ await writeSnapshot(snapshotPath, currentSchema);
444
+ // Append to migration log
445
+ const checksum = calculateChecksum(sqlContent);
446
+ await appendToMigrationLog(options.outputPath, { name: folderName, checksum });
447
+ return {
448
+ folderName,
449
+ folderPath,
450
+ sql: sqlContent,
451
+ timestamp,
452
+ };
453
+ }
454
+ /**
455
+ * Ensure _prisma_migrations table exists
456
+ */
457
+ async function ensureMigrationsTable(db, tableName, schema, dialect) {
458
+ const fullTableName = schema && dialect === "postgres" ? `${schema}.${tableName}` : tableName;
459
+ if (dialect === "sqlite") {
460
+ await sql `
461
+ CREATE TABLE IF NOT EXISTS ${sql.raw(`"${tableName}"`)} (
462
+ id TEXT PRIMARY KEY,
463
+ checksum TEXT NOT NULL,
464
+ finished_at TEXT,
465
+ migration_name TEXT NOT NULL,
466
+ logs TEXT,
467
+ rolled_back_at TEXT,
468
+ started_at TEXT NOT NULL DEFAULT (datetime('now')),
469
+ applied_steps_count INTEGER NOT NULL DEFAULT 0
470
+ )
471
+ `.execute(db);
472
+ }
473
+ else if (dialect === "postgres") {
474
+ await sql `
475
+ CREATE TABLE IF NOT EXISTS ${sql.raw(`"${schema}"."${tableName}"`)} (
476
+ id VARCHAR(36) PRIMARY KEY,
477
+ checksum VARCHAR(64) NOT NULL,
478
+ finished_at TIMESTAMPTZ,
479
+ migration_name VARCHAR(255) NOT NULL,
480
+ logs TEXT,
481
+ rolled_back_at TIMESTAMPTZ,
482
+ started_at TIMESTAMPTZ NOT NULL DEFAULT now(),
483
+ applied_steps_count INTEGER NOT NULL DEFAULT 0
484
+ )
485
+ `.execute(db);
486
+ }
487
+ else {
488
+ await sql `
489
+ CREATE TABLE IF NOT EXISTS ${sql.raw(`\`${tableName}\``)} (
490
+ id VARCHAR(36) PRIMARY KEY,
491
+ checksum VARCHAR(64) NOT NULL,
492
+ finished_at DATETIME,
493
+ migration_name VARCHAR(255) NOT NULL,
494
+ logs TEXT,
495
+ rolled_back_at DATETIME,
496
+ started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
497
+ applied_steps_count INTEGER NOT NULL DEFAULT 0
498
+ )
499
+ `.execute(db);
500
+ }
501
+ }
502
+ /**
503
+ * Get list of applied migrations from _prisma_migrations table
504
+ */
505
+ async function getAppliedMigrations(db, tableName, schema, dialect) {
506
+ let result;
507
+ if (dialect === "postgres" && schema) {
508
+ result = await sql `
509
+ SELECT * FROM ${sql.raw(`"${schema}"."${tableName}"`)}
510
+ WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
511
+ `.execute(db);
512
+ }
513
+ else if (dialect === "sqlite") {
514
+ result = await sql `
515
+ SELECT * FROM ${sql.raw(`"${tableName}"`)}
516
+ WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
517
+ `.execute(db);
518
+ }
519
+ else {
520
+ result = await sql `
521
+ SELECT * FROM ${sql.raw(`\`${tableName}\``)}
522
+ WHERE rolled_back_at IS NULL AND finished_at IS NOT NULL
523
+ `.execute(db);
524
+ }
525
+ return new Map(result.rows.map((row) => [row.migration_name, row]));
526
+ }
527
+ /**
528
+ * Record a migration in _prisma_migrations table
529
+ */
530
+ async function recordMigration(db, tableName, schema, dialect, migrationName, checksum) {
531
+ const id = crypto.randomUUID();
532
+ if (dialect === "postgres" && schema) {
533
+ await sql `
534
+ INSERT INTO ${sql.raw(`"${schema}"."${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
535
+ VALUES (${id}, ${checksum}, ${migrationName}, now(), 1)
536
+ `.execute(db);
537
+ }
538
+ else if (dialect === "sqlite") {
539
+ await sql `
540
+ INSERT INTO ${sql.raw(`"${tableName}"`)} (id, checksum, migration_name, finished_at, applied_steps_count)
541
+ VALUES (${id}, ${checksum}, ${migrationName}, datetime('now'), 1)
542
+ `.execute(db);
543
+ }
544
+ else {
545
+ await sql `
546
+ INSERT INTO ${sql.raw(`\`${tableName}\``)} (id, checksum, migration_name, finished_at, applied_steps_count)
547
+ VALUES (${id}, ${checksum}, ${migrationName}, NOW(), 1)
548
+ `.execute(db);
549
+ }
550
+ }
551
+ /**
552
+ * Calculate SHA256 checksum of migration SQL
553
+ */
554
+ export function calculateChecksum(sql) {
555
+ return crypto.createHash("sha256").update(sql).digest("hex");
556
+ }
557
+ /**
558
+ * Execute raw SQL using the database driver directly
559
+ * This bypasses Kysely for DDL statements which don't work reliably with sql.raw()
560
+ */
561
+ async function executeRawSql(dialect, sqlContent, options) {
562
+ if (dialect === "sqlite") {
563
+ const { default: Database } = await import("better-sqlite3");
564
+ const sqliteDb = new Database(options.databasePath || ":memory:");
565
+ try {
566
+ // better-sqlite3's exec() handles multiple statements properly
567
+ sqliteDb.exec(sqlContent);
568
+ }
569
+ finally {
570
+ sqliteDb.close();
571
+ }
572
+ }
573
+ else if (dialect === "postgres") {
574
+ const { Pool } = await import("pg");
575
+ const pool = new Pool({ connectionString: options.connectionUrl });
576
+ try {
577
+ await pool.query(sqlContent);
578
+ }
579
+ finally {
580
+ await pool.end();
581
+ }
582
+ }
583
+ else if (dialect === "mysql") {
584
+ // Use mysql2 with promise wrapper
585
+ const mysql = await import("mysql2");
586
+ const pool = mysql.createPool({ uri: options.connectionUrl });
587
+ const promisePool = pool.promise();
588
+ try {
589
+ // MySQL needs statements executed one at a time
590
+ const statements = sqlContent
591
+ .split(/;(?:\s*\n|\s*$)/)
592
+ .map((s) => s.trim())
593
+ .filter((s) => s.length > 0 && !s.startsWith("--"));
594
+ for (const statement of statements) {
595
+ await promisePool.query(statement);
596
+ }
597
+ }
598
+ finally {
599
+ await pool.promise().end();
600
+ }
601
+ }
602
+ }
603
+ /**
604
+ * Apply pending Prisma migrations
605
+ */
606
+ export async function applyPrismaMigrations(options) {
607
+ const migrationsTable = options.migrationsTable ?? "_prisma_migrations";
608
+ const migrationsSchema = options.migrationsSchema ?? "public";
609
+ const { db, destroy } = await createKyselyAdapter({
610
+ dialect: options.dialect,
611
+ connectionUrl: options.connectionUrl,
612
+ databasePath: options.databasePath,
613
+ });
614
+ try {
615
+ // Ensure migrations table exists
616
+ await ensureMigrationsTable(db, migrationsTable, migrationsSchema, options.dialect);
617
+ // Get already applied migrations
618
+ const appliedMigrations = await getAppliedMigrations(db, migrationsTable, migrationsSchema, options.dialect);
619
+ // Read migration folders
620
+ const entries = await fs.readdir(options.migrationsFolder, { withFileTypes: true });
621
+ const migrationFolders = entries
622
+ .filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
623
+ .map((e) => e.name)
624
+ .sort();
625
+ const result = {
626
+ applied: [],
627
+ alreadyApplied: [],
628
+ };
629
+ for (const folderName of migrationFolders) {
630
+ if (appliedMigrations.has(folderName)) {
631
+ result.alreadyApplied.push(folderName);
632
+ continue;
633
+ }
634
+ const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
635
+ let sqlContent;
636
+ try {
637
+ sqlContent = await fs.readFile(sqlPath, "utf-8");
638
+ }
639
+ catch {
640
+ continue; // Skip if no migration.sql
641
+ }
642
+ const checksum = calculateChecksum(sqlContent);
643
+ // Verify checksum against migration log
644
+ const migrationLog = await readMigrationLog(options.migrationsFolder);
645
+ const logEntry = migrationLog.find((m) => m.name === folderName);
646
+ if (logEntry && logEntry.checksum !== checksum) {
647
+ result.failed = {
648
+ migrationName: folderName,
649
+ error: `Checksum mismatch for migration ${folderName}.\n` +
650
+ `Expected: ${logEntry.checksum}\n` +
651
+ `Found: ${checksum}\n` +
652
+ `The migration file may have been modified after generation.`,
653
+ };
654
+ break;
655
+ }
656
+ const startTime = Date.now();
657
+ try {
658
+ // Execute the migration SQL using direct driver access
659
+ await executeRawSql(options.dialect, sqlContent, {
660
+ connectionUrl: options.connectionUrl,
661
+ databasePath: options.databasePath,
662
+ });
663
+ // Record the migration (still use Kysely for this since it's simple INSERT)
664
+ await recordMigration(db, migrationsTable, migrationsSchema, options.dialect, folderName, checksum);
665
+ result.applied.push({
666
+ migrationName: folderName,
667
+ duration: Date.now() - startTime,
668
+ });
669
+ }
670
+ catch (error) {
671
+ result.failed = {
672
+ migrationName: folderName,
673
+ error: error instanceof Error ? error.message : String(error),
674
+ };
675
+ break; // Stop on first failure
676
+ }
677
+ }
678
+ return result;
679
+ }
680
+ finally {
681
+ await destroy();
682
+ }
683
+ }
684
+ /**
685
+ * Preview pending migrations without applying them
686
+ */
687
+ export async function previewPrismaMigrations(options) {
688
+ const migrationsTable = options.migrationsTable ?? "_prisma_migrations";
689
+ const migrationsSchema = options.migrationsSchema ?? "public";
690
+ const { db, destroy } = await createKyselyAdapter({
691
+ dialect: options.dialect,
692
+ connectionUrl: options.connectionUrl,
693
+ databasePath: options.databasePath,
694
+ });
695
+ try {
696
+ // Ensure migrations table exists
697
+ await ensureMigrationsTable(db, migrationsTable, migrationsSchema, options.dialect);
698
+ // Get already applied migrations
699
+ const appliedMigrations = await getAppliedMigrations(db, migrationsTable, migrationsSchema, options.dialect);
700
+ // Read migration folders
701
+ const entries = await fs.readdir(options.migrationsFolder, { withFileTypes: true });
702
+ const migrationFolders = entries
703
+ .filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
704
+ .map((e) => e.name)
705
+ .sort();
706
+ const result = {
707
+ pending: [],
708
+ alreadyApplied: [],
709
+ };
710
+ for (const folderName of migrationFolders) {
711
+ if (appliedMigrations.has(folderName)) {
712
+ result.alreadyApplied.push(folderName);
713
+ continue;
714
+ }
715
+ const sqlPath = path.join(options.migrationsFolder, folderName, "migration.sql");
716
+ let sqlContent;
717
+ try {
718
+ sqlContent = await fs.readFile(sqlPath, "utf-8");
719
+ }
720
+ catch {
721
+ continue; // Skip if no migration.sql
722
+ }
723
+ result.pending.push({
724
+ name: folderName,
725
+ sql: sqlContent,
726
+ });
727
+ }
728
+ return result;
729
+ }
730
+ finally {
731
+ await destroy();
732
+ }
733
+ }
734
+ /**
735
+ * Check if there are schema changes
736
+ */
737
+ export async function hasPrismaSchemaChanges(options) {
738
+ const currentSchema = await generateSchemaSnapshot(options.schemaPath);
739
+ const { snapshotPath } = getSnapshotPaths(options.outputPath);
740
+ const previousSnapshot = await readSnapshot(snapshotPath);
741
+ const diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
742
+ return (diff.addedModels.length > 0 ||
743
+ diff.removedModels.length > 0 ||
744
+ diff.addedFields.length > 0 ||
745
+ diff.removedFields.length > 0 ||
746
+ diff.alteredFields.length > 0 ||
747
+ diff.addedUniqueConstraints.length > 0 ||
748
+ diff.removedUniqueConstraints.length > 0 ||
749
+ diff.addedIndexes.length > 0 ||
750
+ diff.removedIndexes.length > 0 ||
751
+ diff.addedForeignKeys.length > 0 ||
752
+ diff.removedForeignKeys.length > 0 ||
753
+ diff.primaryKeyChanges.length > 0);
754
+ }
755
+ /**
756
+ * Detect potential renames by finding removed+added pairs.
757
+ * A table rename is detected when one table is removed and one is added.
758
+ * A column rename is detected when within the same table, one column is removed and one is added.
759
+ */
760
+ export async function detectPotentialRenames(options) {
761
+ const currentSchema = await generateSchemaSnapshot(options.schemaPath);
762
+ const { snapshotPath } = getSnapshotPaths(options.outputPath);
763
+ const previousSnapshot = await readSnapshot(snapshotPath);
764
+ const diff = diffSchemas(previousSnapshot?.schema ?? null, currentSchema);
765
+ const result = {
766
+ tables: [],
767
+ columns: [],
768
+ };
769
+ // Detect potential table renames: one removed + one added
770
+ // For simplicity, if there's exactly one removed and one added, suggest it as a rename
771
+ // For multiple, pair them up by order (user can disambiguate)
772
+ const minTablePairs = Math.min(diff.removedModels.length, diff.addedModels.length);
773
+ for (let i = 0; i < minTablePairs; i++) {
774
+ result.tables.push({
775
+ from: diff.removedModels[i].name,
776
+ to: diff.addedModels[i].name,
777
+ });
778
+ }
779
+ // Detect potential column renames within same table
780
+ // Group removed/added fields by table
781
+ const removedByTable = new Map();
782
+ const addedByTable = new Map();
783
+ for (const { tableName, column } of diff.removedFields) {
784
+ if (!removedByTable.has(tableName)) {
785
+ removedByTable.set(tableName, []);
786
+ }
787
+ removedByTable.get(tableName).push(column.name);
788
+ }
789
+ for (const { tableName, column } of diff.addedFields) {
790
+ if (!addedByTable.has(tableName)) {
791
+ addedByTable.set(tableName, []);
792
+ }
793
+ addedByTable.get(tableName).push(column.name);
794
+ }
795
+ // For each table with both removed and added columns, suggest renames
796
+ for (const [tableName, removed] of removedByTable.entries()) {
797
+ const added = addedByTable.get(tableName) || [];
798
+ const minPairs = Math.min(removed.length, added.length);
799
+ for (let i = 0; i < minPairs; i++) {
800
+ result.columns.push({
801
+ table: tableName,
802
+ from: removed[i],
803
+ to: added[i],
804
+ });
805
+ }
806
+ }
807
+ return result;
808
+ }
809
+ const MIGRATION_LOG_HEADER = `# zenstack-kit migration log
810
+ # Format: <migration_name> <checksum>
811
+ `;
812
+ /**
813
+ * Get the path to the migration log file
814
+ */
815
+ export function getMigrationLogPath(outputPath) {
816
+ return path.join(outputPath, "meta", "_migration_log");
817
+ }
818
+ /**
819
+ * Parse migration log content into entries
820
+ */
821
+ function parseMigrationLog(content) {
822
+ return content
823
+ .split("\n")
824
+ .filter((line) => line.trim() && !line.startsWith("#"))
825
+ .map((line) => {
826
+ const [name, checksum] = line.split(" ");
827
+ return { name, checksum };
828
+ })
829
+ .filter((entry) => entry.name && entry.checksum);
830
+ }
831
+ /**
832
+ * Serialize migration log entries to string
833
+ */
834
+ function serializeMigrationLog(entries) {
835
+ const lines = entries.map((e) => `${e.name} ${e.checksum}`).join("\n");
836
+ return MIGRATION_LOG_HEADER + lines + (lines.length > 0 ? "\n" : "");
837
+ }
838
+ /**
839
+ * Read migration log file
840
+ */
841
+ export async function readMigrationLog(outputPath) {
842
+ const logPath = getMigrationLogPath(outputPath);
843
+ try {
844
+ const content = await fs.readFile(logPath, "utf-8");
845
+ return parseMigrationLog(content);
846
+ }
847
+ catch (error) {
848
+ if (error instanceof Error && "code" in error && error.code === "ENOENT") {
849
+ return [];
850
+ }
851
+ throw error;
852
+ }
853
+ }
854
+ /**
855
+ * Write migration log file
856
+ */
857
+ export async function writeMigrationLog(outputPath, entries) {
858
+ const logPath = getMigrationLogPath(outputPath);
859
+ await fs.mkdir(path.dirname(logPath), { recursive: true });
860
+ await fs.writeFile(logPath, serializeMigrationLog(entries), "utf-8");
861
+ }
862
+ /**
863
+ * Append a single entry to the migration log
864
+ */
865
+ export async function appendToMigrationLog(outputPath, entry) {
866
+ const entries = await readMigrationLog(outputPath);
867
+ entries.push(entry);
868
+ await writeMigrationLog(outputPath, entries);
869
+ }
870
+ /**
871
+ * Scan migration folders and compute checksums for each
872
+ */
873
+ export async function scanMigrationFolders(outputPath) {
874
+ const entries = [];
875
+ try {
876
+ const dirEntries = await fs.readdir(outputPath, { withFileTypes: true });
877
+ const migrationFolders = dirEntries
878
+ .filter((e) => e.isDirectory() && /^\d{14}_/.test(e.name))
879
+ .map((e) => e.name)
880
+ .sort();
881
+ for (const folderName of migrationFolders) {
882
+ const sqlPath = path.join(outputPath, folderName, "migration.sql");
883
+ try {
884
+ const sqlContent = await fs.readFile(sqlPath, "utf-8");
885
+ const checksum = calculateChecksum(sqlContent);
886
+ entries.push({ name: folderName, checksum });
887
+ }
888
+ catch {
889
+ // Skip folders without migration.sql
890
+ }
891
+ }
892
+ }
893
+ catch (error) {
894
+ if (error instanceof Error && "code" in error && error.code === "ENOENT") {
895
+ return [];
896
+ }
897
+ throw error;
898
+ }
899
+ return entries;
900
+ }
901
+ /**
902
+ * Check if snapshot exists
903
+ */
904
+ export async function hasSnapshot(outputPath) {
905
+ const { snapshotPath } = getSnapshotPaths(outputPath);
906
+ try {
907
+ await fs.access(snapshotPath);
908
+ return true;
909
+ }
910
+ catch {
911
+ return false;
912
+ }
913
+ }
914
+ /**
915
+ * Initialize snapshot from schema without generating migration
916
+ */
917
+ export async function initializeSnapshot(options) {
918
+ const currentSchema = await generateSchemaSnapshot(options.schemaPath);
919
+ const { snapshotPath } = getSnapshotPaths(options.outputPath);
920
+ await writeSnapshot(snapshotPath, currentSchema);
921
+ return {
922
+ snapshotPath,
923
+ tableCount: currentSchema.tables.length,
924
+ };
925
+ }
926
+ /**
927
+ * Export getSnapshotPaths for external use
928
+ */
929
+ export { getSnapshotPaths };