@momentumcms/migrations 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,640 @@
1
+ "use strict";
2
+
3
+ // libs/migrations/src/cli/rollback.ts
4
+ var import_node_path2 = require("node:path");
5
+
6
+ // libs/core/src/lib/collections/define-collection.ts
7
+ function defineCollection(config) {
8
+ const collection = {
9
+ timestamps: true,
10
+ // Enable timestamps by default
11
+ ...config
12
+ };
13
+ if (!collection.slug) {
14
+ throw new Error("Collection must have a slug");
15
+ }
16
+ if (!collection.fields || collection.fields.length === 0) {
17
+ throw new Error(`Collection "${collection.slug}" must have at least one field`);
18
+ }
19
+ if (!/^[a-z][a-z0-9-]*$/.test(collection.slug)) {
20
+ throw new Error(
21
+ `Collection slug "${collection.slug}" must be kebab-case (lowercase letters, numbers, and hyphens, starting with a letter)`
22
+ );
23
+ }
24
+ return collection;
25
+ }
26
+
27
+ // libs/core/src/lib/fields/field-builders.ts
28
+ function text(name, options = {}) {
29
+ return {
30
+ name,
31
+ type: "text",
32
+ ...options
33
+ };
34
+ }
35
+ function number(name, options = {}) {
36
+ return {
37
+ name,
38
+ type: "number",
39
+ ...options
40
+ };
41
+ }
42
+ function json(name, options = {}) {
43
+ return {
44
+ name,
45
+ type: "json",
46
+ ...options
47
+ };
48
+ }
49
+
50
+ // libs/core/src/lib/collections/media.collection.ts
51
+ var MediaCollection = defineCollection({
52
+ slug: "media",
53
+ labels: {
54
+ singular: "Media",
55
+ plural: "Media"
56
+ },
57
+ upload: {
58
+ mimeTypes: ["image/*", "application/pdf", "video/*", "audio/*"]
59
+ },
60
+ admin: {
61
+ useAsTitle: "filename",
62
+ defaultColumns: ["filename", "mimeType", "filesize", "createdAt"]
63
+ },
64
+ fields: [
65
+ text("filename", {
66
+ required: true,
67
+ label: "Filename",
68
+ description: "Original filename of the uploaded file"
69
+ }),
70
+ text("mimeType", {
71
+ required: true,
72
+ label: "MIME Type",
73
+ description: "File MIME type (e.g., image/jpeg, application/pdf)"
74
+ }),
75
+ number("filesize", {
76
+ label: "File Size",
77
+ description: "File size in bytes"
78
+ }),
79
+ text("path", {
80
+ label: "Storage Path",
81
+ description: "Path/key where the file is stored",
82
+ admin: {
83
+ hidden: true
84
+ }
85
+ }),
86
+ text("url", {
87
+ label: "URL",
88
+ description: "Public URL to access the file"
89
+ }),
90
+ text("alt", {
91
+ label: "Alt Text",
92
+ description: "Alternative text for accessibility"
93
+ }),
94
+ number("width", {
95
+ label: "Width",
96
+ description: "Image width in pixels (for images only)"
97
+ }),
98
+ number("height", {
99
+ label: "Height",
100
+ description: "Image height in pixels (for images only)"
101
+ }),
102
+ json("focalPoint", {
103
+ label: "Focal Point",
104
+ description: "Focal point coordinates for image cropping",
105
+ admin: {
106
+ hidden: true
107
+ }
108
+ })
109
+ ],
110
+ access: {
111
+ // Media is readable by anyone by default
112
+ read: () => true,
113
+ // Only authenticated users can create/update/delete
114
+ create: ({ req }) => !!req?.user,
115
+ update: ({ req }) => !!req?.user,
116
+ delete: ({ req }) => !!req?.user
117
+ }
118
+ });
119
+
120
+ // libs/core/src/lib/migrations.ts
121
+ function resolveMigrationMode(mode) {
122
+ if (mode === "push" || mode === "migrate")
123
+ return mode;
124
+ const env = process.env["NODE_ENV"];
125
+ if (env === "production")
126
+ return "migrate";
127
+ return "push";
128
+ }
129
+ function resolveMigrationConfig(config) {
130
+ if (!config)
131
+ return void 0;
132
+ const mode = resolveMigrationMode(config.mode);
133
+ return {
134
+ ...config,
135
+ directory: config.directory ?? "./migrations",
136
+ mode,
137
+ cloneTest: config.cloneTest ?? mode === "migrate",
138
+ dangerDetection: config.dangerDetection ?? true,
139
+ autoApply: config.autoApply ?? mode === "push"
140
+ };
141
+ }
142
+
143
+ // libs/migrations/src/lib/loader/migration-loader.ts
144
+ var import_node_fs = require("node:fs");
145
+ var import_node_path = require("node:path");
146
+ var import_node_url = require("node:url");
147
+ var MIGRATION_FILE_PATTERN = /^\d{14}_.+\.ts$/;
148
+ function isMigrationFile(value) {
149
+ if (typeof value !== "object" || value === null)
150
+ return false;
151
+ const obj = value;
152
+ return "meta" in obj && typeof obj["meta"] === "object" && obj["meta"] !== null && "up" in obj && typeof obj["up"] === "function" && "down" in obj && typeof obj["down"] === "function";
153
+ }
154
+ function validateMigrationModule(mod, filePath) {
155
+ const file = mod["default"] ?? mod;
156
+ if (!isMigrationFile(file)) {
157
+ if (typeof file !== "object" || file === null) {
158
+ throw new Error(`Migration file ${filePath} does not export a valid module`);
159
+ }
160
+ if (!("meta" in file) || typeof file["meta"] !== "object") {
161
+ throw new Error(`Migration file ${filePath} is missing a valid 'meta' export`);
162
+ }
163
+ if (!("up" in file) || typeof file["up"] !== "function") {
164
+ throw new Error(`Migration file ${filePath} is missing an 'up' function export`);
165
+ }
166
+ if (!("down" in file) || typeof file["down"] !== "function") {
167
+ throw new Error(`Migration file ${filePath} is missing a 'down' function export`);
168
+ }
169
+ throw new Error(`Migration file ${filePath} does not conform to MigrationFile interface`);
170
+ }
171
+ return file;
172
+ }
173
+ async function loadMigrationsFromDisk(directory) {
174
+ if (!(0, import_node_fs.existsSync)(directory))
175
+ return [];
176
+ const files = (0, import_node_fs.readdirSync)(directory).filter((f) => MIGRATION_FILE_PATTERN.test(f)).sort();
177
+ if (files.length === 0)
178
+ return [];
179
+ const migrations = [];
180
+ for (const filename of files) {
181
+ const filePath = (0, import_node_path.join)(directory, filename);
182
+ const fileUrl = (0, import_node_url.pathToFileURL)(filePath).href;
183
+ const mod = await import(fileUrl);
184
+ const file = validateMigrationModule(mod, filePath);
185
+ const name = filename.replace(/\.ts$/, "");
186
+ migrations.push({ name, file });
187
+ }
188
+ return migrations;
189
+ }
190
+
191
+ // libs/migrations/src/lib/migration.types.ts
192
+ var MIGRATION_TRACKING_TABLE = "_momentum_migrations";
193
+
194
+ // libs/migrations/src/lib/tracking/migration-tracker.ts
195
+ async function ensureTrackingTable(db, dialect) {
196
+ if (dialect === "postgresql") {
197
+ await db.execute(`
198
+ CREATE TABLE IF NOT EXISTS "${MIGRATION_TRACKING_TABLE}" (
199
+ "id" VARCHAR(36) PRIMARY KEY,
200
+ "name" VARCHAR(255) NOT NULL UNIQUE,
201
+ "batch" INTEGER NOT NULL,
202
+ "checksum" VARCHAR(64) NOT NULL,
203
+ "appliedAt" TIMESTAMPTZ NOT NULL,
204
+ "executionMs" INTEGER NOT NULL
205
+ )
206
+ `);
207
+ } else {
208
+ await db.execute(`
209
+ CREATE TABLE IF NOT EXISTS "${MIGRATION_TRACKING_TABLE}" (
210
+ "id" TEXT PRIMARY KEY,
211
+ "name" TEXT NOT NULL UNIQUE,
212
+ "batch" INTEGER NOT NULL,
213
+ "checksum" TEXT NOT NULL,
214
+ "appliedAt" TEXT NOT NULL,
215
+ "executionMs" INTEGER NOT NULL
216
+ )
217
+ `);
218
+ }
219
+ }
220
+ async function removeMigrationRecord(db, name, dialect) {
221
+ const placeholder = dialect === "postgresql" ? "$1" : "?";
222
+ const affected = await db.execute(
223
+ `DELETE FROM "${MIGRATION_TRACKING_TABLE}" WHERE "name" = ${placeholder}`,
224
+ [name]
225
+ );
226
+ return affected > 0;
227
+ }
228
+ async function getMigrationsByBatch(db, batch, dialect) {
229
+ const placeholder = dialect === "postgresql" ? "$1" : "?";
230
+ const rows = await db.query(
231
+ `SELECT * FROM "${MIGRATION_TRACKING_TABLE}" WHERE "batch" = ${placeholder} ORDER BY "name" DESC`,
232
+ [batch]
233
+ );
234
+ return rows.map(toTrackingRecord);
235
+ }
236
+ async function getLatestBatchNumber(db) {
237
+ const rows = await db.query(
238
+ `SELECT MAX("batch") as max_batch FROM "${MIGRATION_TRACKING_TABLE}"`
239
+ );
240
+ return typeof rows[0]?.max_batch === "number" ? rows[0].max_batch : 0;
241
+ }
242
+ function toTrackingRecord(row2) {
243
+ return {
244
+ id: String(row2["id"]),
245
+ name: String(row2["name"]),
246
+ batch: Number(row2["batch"]),
247
+ checksum: String(row2["checksum"]),
248
+ appliedAt: String(row2["appliedAt"]),
249
+ executionMs: Number(row2["executionMs"])
250
+ };
251
+ }
252
+
253
+ // libs/migrations/src/lib/runner/migrate-runner.ts
254
+ async function rollbackBatch(options) {
255
+ const { migrations, dialect, tracker, buildContext, log } = options;
256
+ const noop = {
257
+ info: () => {
258
+ },
259
+ warn: () => {
260
+ }
261
+ };
262
+ const logger = log ?? noop;
263
+ await ensureTrackingTable(tracker, dialect);
264
+ const latestBatch = await getLatestBatchNumber(tracker);
265
+ if (latestBatch === 0) {
266
+ logger.info("Nothing to rollback.");
267
+ return { batch: 0, results: [], successCount: 0, failCount: 0, dangers: null };
268
+ }
269
+ const batchMigrations = await getMigrationsByBatch(tracker, latestBatch, dialect);
270
+ if (batchMigrations.length === 0) {
271
+ logger.info("No migrations in latest batch.");
272
+ return { batch: 0, results: [], successCount: 0, failCount: 0, dangers: null };
273
+ }
274
+ logger.info(`Rolling back batch ${latestBatch} (${batchMigrations.length} migration(s))...`);
275
+ const migrationMap = new Map(migrations.map((m) => [m.name, m]));
276
+ const results = [];
277
+ const ctx = buildContext();
278
+ for (const record of batchMigrations) {
279
+ const migration = migrationMap.get(record.name);
280
+ if (!migration) {
281
+ results.push({
282
+ name: record.name,
283
+ success: false,
284
+ executionMs: 0,
285
+ error: `Migration file "${record.name}" not found`
286
+ });
287
+ logger.warn(` MISSING: ${record.name}`);
288
+ break;
289
+ }
290
+ const start = Date.now();
291
+ try {
292
+ await migration.file.down(ctx);
293
+ const executionMs = Date.now() - start;
294
+ await removeMigrationRecord(tracker, record.name, dialect);
295
+ results.push({ name: record.name, success: true, executionMs });
296
+ logger.info(` Rolled back: ${record.name} (${executionMs}ms)`);
297
+ } catch (err) {
298
+ const executionMs = Date.now() - start;
299
+ const errMsg = err instanceof Error ? err.message : String(err);
300
+ const errorCode = extractErrorCode(err);
301
+ results.push({ name: record.name, success: false, executionMs, error: errMsg, errorCode });
302
+ logger.warn(` FAILED rollback: ${record.name} \u2014 ${errMsg}`);
303
+ break;
304
+ }
305
+ }
306
+ const successCount = results.filter((r) => r.success).length;
307
+ const failCount = results.filter((r) => !r.success).length;
308
+ return { batch: latestBatch, results, successCount, failCount, dangers: null };
309
+ }
310
+ function extractErrorCode(err) {
311
+ if (err !== null && typeof err === "object" && "code" in err && typeof err.code === "string") {
312
+ return err.code;
313
+ }
314
+ return void 0;
315
+ }
316
+
317
+ // libs/migrations/src/cli/shared.ts
318
+ var import_node_url2 = require("node:url");
319
+
320
+ // libs/migrations/src/lib/helpers/data-helpers.ts
321
+ function createDataHelpers(db, dialect) {
322
+ const ph = (index) => dialect === "postgresql" ? `$${index}` : "?";
323
+ return {
324
+ async backfill(table, column, value, options) {
325
+ const where = options?.where ? ` AND (${options.where})` : "";
326
+ const batchSize = options?.batchSize ?? 1e3;
327
+ let totalAffected = 0;
328
+ if (dialect === "postgresql") {
329
+ let affected;
330
+ do {
331
+ affected = await db.execute(
332
+ `UPDATE "${table}" SET "${column}" = ${ph(1)}
333
+ WHERE ctid IN (
334
+ SELECT ctid FROM "${table}"
335
+ WHERE "${column}" IS NULL${where}
336
+ LIMIT ${batchSize}
337
+ )`,
338
+ [value]
339
+ );
340
+ totalAffected += affected;
341
+ } while (affected >= batchSize);
342
+ } else {
343
+ let affected;
344
+ do {
345
+ affected = await db.execute(
346
+ `UPDATE "${table}" SET "${column}" = ${ph(1)}
347
+ WHERE rowid IN (
348
+ SELECT rowid FROM "${table}"
349
+ WHERE "${column}" IS NULL${where}
350
+ LIMIT ${batchSize}
351
+ )`,
352
+ [value]
353
+ );
354
+ totalAffected += affected;
355
+ } while (affected >= batchSize);
356
+ }
357
+ return totalAffected;
358
+ },
359
+ async transform(table, column, sqlExpression, options) {
360
+ const where = options?.where ? ` WHERE ${options.where}` : "";
361
+ const batchSize = options?.batchSize ?? 0;
362
+ if (batchSize <= 0) {
363
+ return db.execute(
364
+ `UPDATE "${table}" SET "${column}" = ${sqlExpression}${where}`
365
+ );
366
+ }
367
+ let totalAffected = 0;
368
+ let affected;
369
+ do {
370
+ if (dialect === "postgresql") {
371
+ affected = await db.execute(
372
+ `UPDATE "${table}" SET "${column}" = ${sqlExpression}
373
+ WHERE ctid IN (
374
+ SELECT ctid FROM "${table}"${where}
375
+ LIMIT ${batchSize}
376
+ )`
377
+ );
378
+ } else {
379
+ affected = await db.execute(
380
+ `UPDATE "${table}" SET "${column}" = ${sqlExpression}
381
+ WHERE rowid IN (
382
+ SELECT rowid FROM "${table}"${where}
383
+ LIMIT ${batchSize}
384
+ )`
385
+ );
386
+ }
387
+ totalAffected += affected;
388
+ } while (affected >= batchSize);
389
+ return totalAffected;
390
+ },
391
+ async renameColumn(table, from, to, columnType) {
392
+ await db.execute(
393
+ `ALTER TABLE "${table}" ADD COLUMN "${to}" ${columnType}`
394
+ );
395
+ await db.execute(
396
+ `UPDATE "${table}" SET "${to}" = "${from}"`
397
+ );
398
+ await db.execute(
399
+ `ALTER TABLE "${table}" DROP COLUMN "${from}"`
400
+ );
401
+ },
402
+ async splitColumn(table, _sourceColumn, targets) {
403
+ for (const target of targets) {
404
+ await db.execute(
405
+ `ALTER TABLE "${table}" ADD COLUMN "${target.name}" ${target.type}`
406
+ );
407
+ await db.execute(
408
+ `UPDATE "${table}" SET "${target.name}" = ${target.expression}`
409
+ );
410
+ }
411
+ },
412
+ async mergeColumns(table, _sourceColumns, targetColumn, targetType, mergeExpression) {
413
+ await db.execute(
414
+ `ALTER TABLE "${table}" ADD COLUMN "${targetColumn}" ${targetType}`
415
+ );
416
+ await db.execute(
417
+ `UPDATE "${table}" SET "${targetColumn}" = ${mergeExpression}`
418
+ );
419
+ },
420
+ async copyData(sourceTable, targetTable, columnMapping, options) {
421
+ const targetCols = [];
422
+ const sourceCols = [];
423
+ for (const [target, source] of Object.entries(columnMapping)) {
424
+ targetCols.push(`"${target}"`);
425
+ if (typeof source === "string") {
426
+ sourceCols.push(`"${source}"`);
427
+ } else {
428
+ sourceCols.push(source.expression);
429
+ }
430
+ }
431
+ const where = options?.where ? ` WHERE ${options.where}` : "";
432
+ const affected = await db.execute(
433
+ `INSERT INTO "${targetTable}" (${targetCols.join(", ")})
434
+ SELECT ${sourceCols.join(", ")} FROM "${sourceTable}"${where}`
435
+ );
436
+ return affected;
437
+ },
438
+ async columnToJson(table, sourceColumn, jsonColumn, jsonKey) {
439
+ if (dialect === "postgresql") {
440
+ await db.execute(
441
+ `UPDATE "${table}" SET "${jsonColumn}" = COALESCE("${jsonColumn}", '{}'::jsonb) || jsonb_build_object('${jsonKey}', "${sourceColumn}")`
442
+ );
443
+ } else {
444
+ await db.execute(
445
+ `UPDATE "${table}" SET "${jsonColumn}" = json_set(COALESCE("${jsonColumn}", '{}'), '$.${jsonKey}', "${sourceColumn}")`
446
+ );
447
+ }
448
+ },
449
+ async jsonToColumn(table, jsonColumn, jsonKey, targetColumn, targetType) {
450
+ await db.execute(
451
+ `ALTER TABLE "${table}" ADD COLUMN "${targetColumn}" ${targetType}`
452
+ );
453
+ if (dialect === "postgresql") {
454
+ await db.execute(
455
+ `UPDATE "${table}" SET "${targetColumn}" = "${jsonColumn}"->>'${jsonKey}'`
456
+ );
457
+ } else {
458
+ await db.execute(
459
+ `UPDATE "${table}" SET "${targetColumn}" = json_extract("${jsonColumn}", '$.${jsonKey}')`
460
+ );
461
+ }
462
+ },
463
+ async dedup(table, columns, keepStrategy = "latest") {
464
+ const colList = columns.map((c) => `"${c}"`).join(", ");
465
+ let orderBy;
466
+ switch (keepStrategy) {
467
+ case "earliest":
468
+ orderBy = '"createdAt" ASC';
469
+ break;
470
+ case "first":
471
+ orderBy = dialect === "postgresql" ? "ctid ASC" : "rowid ASC";
472
+ break;
473
+ default:
474
+ orderBy = '"createdAt" DESC';
475
+ break;
476
+ }
477
+ if (dialect === "postgresql") {
478
+ return db.execute(
479
+ `DELETE FROM "${table}" WHERE ctid NOT IN (
480
+ SELECT DISTINCT ON (${colList}) ctid
481
+ FROM "${table}"
482
+ ORDER BY ${colList}, ${orderBy}
483
+ )`
484
+ );
485
+ }
486
+ return db.execute(
487
+ `DELETE FROM "${table}" WHERE rowid NOT IN (
488
+ SELECT MIN(rowid) FROM "${table}"
489
+ GROUP BY ${colList}
490
+ )`
491
+ );
492
+ }
493
+ };
494
+ }
495
+
496
+ // libs/migrations/src/cli/shared.ts
497
+ function isResolvedConfig(value) {
498
+ return typeof value === "object" && value !== null && "collections" in value && "db" in value;
499
+ }
500
+ async function loadMomentumConfig(configPath) {
501
+ const configUrl = (0, import_node_url2.pathToFileURL)(configPath).href;
502
+ const mod = await import(configUrl);
503
+ const raw = mod["default"] ?? mod;
504
+ if (!isResolvedConfig(raw)) {
505
+ throw new Error(`Config at ${configPath} is not a valid ResolvedMomentumConfig`);
506
+ }
507
+ if (!raw.db?.adapter) {
508
+ throw new Error(`Config at ${configPath} is missing db.adapter`);
509
+ }
510
+ if (!raw.collections || raw.collections.length === 0) {
511
+ throw new Error(`Config at ${configPath} has no collections`);
512
+ }
513
+ return raw;
514
+ }
515
+ function resolveDialect(adapter) {
516
+ if (!adapter.dialect) {
517
+ throw new Error(
518
+ "DatabaseAdapter.dialect is not set. Ensure your adapter factory (postgresAdapter/sqliteAdapter) sets the dialect property."
519
+ );
520
+ }
521
+ return adapter.dialect;
522
+ }
523
+ function buildTrackerFromAdapter(adapter) {
524
+ if (!adapter.queryRaw || !adapter.executeRaw) {
525
+ throw new Error("DatabaseAdapter must implement queryRaw and executeRaw for migration tracking");
526
+ }
527
+ const queryRaw = adapter.queryRaw.bind(adapter);
528
+ const executeRaw = adapter.executeRaw.bind(adapter);
529
+ return {
530
+ async query(sql, params) {
531
+ return queryRaw(sql, params);
532
+ },
533
+ async execute(sql, params) {
534
+ return executeRaw(sql, params);
535
+ }
536
+ };
537
+ }
538
+ function buildContextFromAdapter(adapter, dialect) {
539
+ if (!adapter.queryRaw || !adapter.executeRaw) {
540
+ throw new Error("DatabaseAdapter must implement queryRaw and executeRaw for migration context");
541
+ }
542
+ const queryRaw = adapter.queryRaw.bind(adapter);
543
+ const executeRaw = adapter.executeRaw.bind(adapter);
544
+ const dataDb = {
545
+ async execute(sql, params) {
546
+ return executeRaw(sql, params);
547
+ },
548
+ async query(sql, params) {
549
+ return queryRaw(sql, params);
550
+ }
551
+ };
552
+ const helpers = createDataHelpers(dataDb, dialect);
553
+ return {
554
+ async sql(query, params) {
555
+ await executeRaw(query, params);
556
+ },
557
+ async query(sql, params) {
558
+ return queryRaw(sql, params);
559
+ },
560
+ data: helpers,
561
+ dialect,
562
+ log: {
563
+ info: (msg) => {
564
+ console.warn(`[migration] ${msg}`);
565
+ },
566
+ warn: (msg) => {
567
+ console.warn(`[migration:warn] ${msg}`);
568
+ }
569
+ }
570
+ };
571
+ }
572
+ function parseMigrationArgs(args) {
573
+ const configPath = args.find((a) => !a.startsWith("--"));
574
+ if (!configPath) {
575
+ throw new Error("Usage: npx tsx <command>.ts <configPath> [options]");
576
+ }
577
+ let name;
578
+ const nameIdx = args.indexOf("--name");
579
+ if (nameIdx !== -1 && args[nameIdx + 1]) {
580
+ name = args[nameIdx + 1];
581
+ }
582
+ return {
583
+ configPath,
584
+ name,
585
+ dryRun: args.includes("--dry-run"),
586
+ testOnly: args.includes("--test-only"),
587
+ skipCloneTest: args.includes("--skip-clone-test")
588
+ };
589
+ }
590
+
591
+ // libs/migrations/src/cli/rollback.ts
592
+ async function main() {
593
+ const args = parseMigrationArgs(process.argv.slice(2));
594
+ const config = await loadMomentumConfig((0, import_node_path2.resolve)(args.configPath));
595
+ const adapter = config.db.adapter;
596
+ const dialect = resolveDialect(adapter);
597
+ const migrationConfig = resolveMigrationConfig(config.migrations ?? {});
598
+ if (!migrationConfig) {
599
+ console.warn("No migration config found.");
600
+ process.exit(1);
601
+ }
602
+ const directory = (0, import_node_path2.resolve)(migrationConfig.directory);
603
+ const migrations = await loadMigrationsFromDisk(directory);
604
+ if (migrations.length === 0) {
605
+ console.warn("No migration files found in", directory);
606
+ return;
607
+ }
608
+ const tracker = buildTrackerFromAdapter(adapter);
609
+ const buildContext = () => buildContextFromAdapter(adapter, dialect);
610
+ const log = {
611
+ info: (msg) => console.warn(`[migration] ${msg}`),
612
+ warn: (msg) => console.warn(`[migration:warn] ${msg}`)
613
+ };
614
+ const result = await rollbackBatch({
615
+ migrations,
616
+ dialect,
617
+ tracker,
618
+ buildContext,
619
+ log
620
+ });
621
+ if (result.batch === 0) {
622
+ console.warn("Nothing to rollback.");
623
+ return;
624
+ }
625
+ console.warn(`
626
+ Rolled back batch ${result.batch}: ${result.successCount} migration(s)`);
627
+ if (result.failCount > 0) {
628
+ console.error(`Failed: ${result.failCount} migration(s)`);
629
+ for (const r of result.results) {
630
+ if (!r.success) {
631
+ console.error(` ${r.name}: ${r.error}`);
632
+ }
633
+ }
634
+ process.exit(1);
635
+ }
636
+ }
637
+ main().catch((err) => {
638
+ console.error("Migration rollback failed:", err);
639
+ process.exit(1);
640
+ });