@momentumcms/migrations 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs ADDED
@@ -0,0 +1,2515 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // libs/migrations/src/index.ts
21
+ var src_exports = {};
22
+ __export(src_exports, {
23
+ INTERNAL_TABLES: () => INTERNAL_TABLES,
24
+ MIGRATION_TRACKING_TABLE: () => MIGRATION_TRACKING_TABLE,
25
+ areTypesCompatible: () => areTypesCompatible,
26
+ buildCloneDbFromAdapter: () => buildCloneDbFromAdapter,
27
+ buildContextFromAdapter: () => buildContextFromAdapter,
28
+ buildIntrospector: () => buildIntrospector,
29
+ buildPushDbFromAdapter: () => buildPushDbFromAdapter,
30
+ buildTrackerFromAdapter: () => buildTrackerFromAdapter,
31
+ cloneTestApply: () => cloneTestApply,
32
+ collectionToTableSnapshot: () => collectionToTableSnapshot,
33
+ collectionsToSchema: () => collectionsToSchema,
34
+ computeSchemaChecksum: () => computeSchemaChecksum,
35
+ createDataHelpers: () => createDataHelpers,
36
+ createSchemaSnapshot: () => createSchemaSnapshot,
37
+ deserializeSnapshot: () => deserializeSnapshot,
38
+ detectDangers: () => detectDangers,
39
+ diffSchemas: () => diffSchemas,
40
+ ensureTrackingTable: () => ensureTrackingTable,
41
+ fieldToColumnType: () => fieldToColumnType,
42
+ fieldToPostgresType: () => fieldToPostgresType,
43
+ fieldToSqliteType: () => fieldToSqliteType,
44
+ generateMigrationFileContent: () => generateMigrationFileContent,
45
+ generateMigrationName: () => generateMigrationName,
46
+ getAppliedMigrations: () => getAppliedMigrations,
47
+ getLatestBatchNumber: () => getLatestBatchNumber,
48
+ getMigrationStatus: () => getMigrationStatus,
49
+ getMigrationsByBatch: () => getMigrationsByBatch,
50
+ getNextBatchNumber: () => getNextBatchNumber,
51
+ getSnapshotPath: () => getSnapshotPath,
52
+ introspectPostgres: () => introspectPostgres,
53
+ introspectSqlite: () => introspectSqlite,
54
+ isMigrationApplied: () => isMigrationApplied,
55
+ loadMigrationsFromDisk: () => loadMigrationsFromDisk,
56
+ normalizeColumnType: () => normalizeColumnType,
57
+ operationToReverseSql: () => operationToReverseSql,
58
+ operationToSql: () => operationToSql,
59
+ operationsToDownSql: () => operationsToDownSql,
60
+ operationsToUpSql: () => operationsToUpSql,
61
+ parseMigrationArgs: () => parseMigrationArgs,
62
+ readSnapshot: () => readSnapshot,
63
+ recordMigration: () => recordMigration,
64
+ removeMigrationRecord: () => removeMigrationRecord,
65
+ resolveDialect: () => resolveDialect,
66
+ rollbackBatch: () => rollbackBatch,
67
+ runMigrations: () => runMigrations,
68
+ runPush: () => runPush,
69
+ serializeSnapshot: () => serializeSnapshot,
70
+ writeSnapshot: () => writeSnapshot
71
+ });
72
+ module.exports = __toCommonJS(src_exports);
73
+
74
+ // libs/migrations/src/lib/schema/schema-snapshot.ts
75
+ var import_node_crypto = require("node:crypto");
76
+ var INTERNAL_TABLES = /* @__PURE__ */ new Set(["_momentum_migrations", "_momentum_seeds", "_globals"]);
77
+ function computeSchemaChecksum(tables) {
78
+ const normalized = tables.map((t) => ({
79
+ name: t.name,
80
+ columns: [...t.columns].sort((a, b) => a.name.localeCompare(b.name)),
81
+ foreignKeys: [...t.foreignKeys].sort(
82
+ (a, b) => a.constraintName.localeCompare(b.constraintName)
83
+ ),
84
+ indexes: [...t.indexes].sort((a, b) => a.name.localeCompare(b.name))
85
+ })).sort((a, b) => a.name.localeCompare(b.name));
86
+ const json2 = JSON.stringify(normalized);
87
+ return (0, import_node_crypto.createHash)("sha256").update(json2).digest("hex");
88
+ }
89
+ function createSchemaSnapshot(dialect, tables) {
90
+ return {
91
+ dialect,
92
+ tables,
93
+ capturedAt: (/* @__PURE__ */ new Date()).toISOString(),
94
+ checksum: computeSchemaChecksum(tables)
95
+ };
96
+ }
97
+ function serializeSnapshot(snapshot) {
98
+ return JSON.stringify(snapshot, null, " ");
99
+ }
100
+ function deserializeSnapshot(json2) {
101
+ const parsed = JSON.parse(json2);
102
+ if (!isSchemaSnapshot(parsed)) {
103
+ throw new Error("Invalid schema snapshot JSON");
104
+ }
105
+ return parsed;
106
+ }
107
+ function isSchemaSnapshot(value) {
108
+ if (typeof value !== "object" || value === null)
109
+ return false;
110
+ const obj = value;
111
+ return (obj["dialect"] === "postgresql" || obj["dialect"] === "sqlite") && Array.isArray(obj["tables"]) && typeof obj["capturedAt"] === "string" && typeof obj["checksum"] === "string";
112
+ }
113
+
114
+ // libs/migrations/src/lib/schema/column-type-map.ts
115
+ function fieldToPostgresType(field) {
116
+ switch (field.type) {
117
+ case "text":
118
+ case "textarea":
119
+ case "richText":
120
+ case "password":
121
+ case "radio":
122
+ case "point":
123
+ return "TEXT";
124
+ case "email":
125
+ case "slug":
126
+ case "select":
127
+ return "VARCHAR(255)";
128
+ case "number":
129
+ return "NUMERIC";
130
+ case "checkbox":
131
+ return "BOOLEAN";
132
+ case "date":
133
+ return "TIMESTAMPTZ";
134
+ case "relationship":
135
+ case "upload":
136
+ return "VARCHAR(36)";
137
+ case "array":
138
+ case "group":
139
+ case "blocks":
140
+ case "json":
141
+ return "JSONB";
142
+ case "tabs":
143
+ case "collapsible":
144
+ case "row":
145
+ return "TEXT";
146
+ default:
147
+ return "TEXT";
148
+ }
149
+ }
150
+ function fieldToSqliteType(field) {
151
+ switch (field.type) {
152
+ case "text":
153
+ case "textarea":
154
+ case "richText":
155
+ case "email":
156
+ case "slug":
157
+ case "select":
158
+ case "password":
159
+ case "radio":
160
+ case "point":
161
+ return "TEXT";
162
+ case "number":
163
+ return "REAL";
164
+ case "checkbox":
165
+ return "INTEGER";
166
+ case "date":
167
+ case "relationship":
168
+ case "upload":
169
+ return "TEXT";
170
+ case "array":
171
+ case "group":
172
+ case "blocks":
173
+ case "json":
174
+ return "TEXT";
175
+ default:
176
+ return "TEXT";
177
+ }
178
+ }
179
+ function fieldToColumnType(field, dialect) {
180
+ if (dialect === "postgresql")
181
+ return fieldToPostgresType(field);
182
+ return fieldToSqliteType(field);
183
+ }
184
+ function normalizeColumnType(rawType, dialect) {
185
+ const upper = rawType.toUpperCase().trim();
186
+ if (dialect === "postgresql") {
187
+ return normalizePgType(upper);
188
+ }
189
+ return normalizeSqliteType(upper);
190
+ }
191
+ function normalizePgType(type) {
192
+ const charVaryingMatch = type.match(/^CHARACTER VARYING\((\d+)\)$/);
193
+ if (charVaryingMatch)
194
+ return `VARCHAR(${charVaryingMatch[1]})`;
195
+ if (type === "CHARACTER VARYING")
196
+ return "VARCHAR(255)";
197
+ if (type === "TIMESTAMP WITH TIME ZONE")
198
+ return "TIMESTAMPTZ";
199
+ if (type === "TIMESTAMP WITHOUT TIME ZONE")
200
+ return "TIMESTAMP";
201
+ if (type === "BOOLEAN")
202
+ return "BOOLEAN";
203
+ if (type === "NUMERIC")
204
+ return "NUMERIC";
205
+ if (type === "TEXT")
206
+ return "TEXT";
207
+ if (type === "JSONB")
208
+ return "JSONB";
209
+ if (type === "JSON")
210
+ return "JSON";
211
+ if (type === "INTEGER")
212
+ return "INTEGER";
213
+ if (type === "BIGINT")
214
+ return "BIGINT";
215
+ if (type === "REAL")
216
+ return "REAL";
217
+ if (type === "DOUBLE PRECISION")
218
+ return "DOUBLE PRECISION";
219
+ return type;
220
+ }
221
+ function normalizeSqliteType(type) {
222
+ if (type === "INT" || type === "INTEGER")
223
+ return "INTEGER";
224
+ if (type === "REAL" || type === "FLOAT" || type === "DOUBLE")
225
+ return "REAL";
226
+ return type;
227
+ }
228
+ function areTypesCompatible(typeA, typeB, dialect) {
229
+ const normA = normalizeColumnType(typeA, dialect);
230
+ const normB = normalizeColumnType(typeB, dialect);
231
+ return normA === normB;
232
+ }
233
+
234
+ // libs/migrations/src/lib/schema/introspect-postgres.ts
235
+ async function introspectPostgres(queryFn, schema = "public") {
236
+ const [columnRows, fkRows, indexRows, pkRows] = await Promise.all([
237
+ queryFn(
238
+ `SELECT table_name, column_name, data_type, character_maximum_length, is_nullable, column_default
239
+ FROM information_schema.columns
240
+ WHERE table_schema = $1
241
+ ORDER BY table_name, ordinal_position`,
242
+ [schema]
243
+ ),
244
+ queryFn(
245
+ `SELECT
246
+ tc.table_name,
247
+ tc.constraint_name,
248
+ kcu.column_name,
249
+ ccu.table_name AS foreign_table_name,
250
+ ccu.column_name AS foreign_column_name,
251
+ rc.delete_rule
252
+ FROM information_schema.table_constraints tc
253
+ JOIN information_schema.key_column_usage kcu
254
+ ON tc.constraint_name = kcu.constraint_name
255
+ AND tc.table_schema = kcu.table_schema
256
+ JOIN information_schema.constraint_column_usage ccu
257
+ ON ccu.constraint_name = tc.constraint_name
258
+ AND ccu.table_schema = tc.table_schema
259
+ JOIN information_schema.referential_constraints rc
260
+ ON rc.constraint_name = tc.constraint_name
261
+ AND rc.constraint_schema = tc.constraint_schema
262
+ WHERE tc.constraint_type = 'FOREIGN KEY'
263
+ AND tc.table_schema = $1
264
+ ORDER BY tc.table_name, tc.constraint_name`,
265
+ [schema]
266
+ ),
267
+ queryFn(
268
+ `SELECT tablename, indexname, indexdef
269
+ FROM pg_indexes
270
+ WHERE schemaname = $1
271
+ ORDER BY tablename, indexname`,
272
+ [schema]
273
+ ),
274
+ queryFn(
275
+ `SELECT tc.table_name, kcu.column_name
276
+ FROM information_schema.table_constraints tc
277
+ JOIN information_schema.key_column_usage kcu
278
+ ON tc.constraint_name = kcu.constraint_name
279
+ AND tc.table_schema = kcu.table_schema
280
+ WHERE tc.constraint_type = 'PRIMARY KEY'
281
+ AND tc.table_schema = $1`,
282
+ [schema]
283
+ )
284
+ ]);
285
+ const pkLookup = /* @__PURE__ */ new Map();
286
+ for (const row2 of pkRows) {
287
+ const tableName = row2.table_name;
288
+ if (!pkLookup.has(tableName)) {
289
+ pkLookup.set(tableName, /* @__PURE__ */ new Set());
290
+ }
291
+ pkLookup.get(tableName).add(row2.column_name);
292
+ }
293
+ const tableColumnsMap = /* @__PURE__ */ new Map();
294
+ for (const row2 of columnRows) {
295
+ const tableName = row2.table_name;
296
+ if (INTERNAL_TABLES.has(tableName))
297
+ continue;
298
+ if (!tableColumnsMap.has(tableName)) {
299
+ tableColumnsMap.set(tableName, []);
300
+ }
301
+ const rawType = buildPgColumnType(row2);
302
+ const pkSet = pkLookup.get(tableName);
303
+ tableColumnsMap.get(tableName).push({
304
+ name: row2.column_name,
305
+ type: normalizeColumnType(rawType, "postgresql"),
306
+ nullable: row2.is_nullable === "YES",
307
+ defaultValue: row2.column_default,
308
+ isPrimaryKey: pkSet?.has(row2.column_name) ?? false
309
+ });
310
+ }
311
+ const tableFkMap = /* @__PURE__ */ new Map();
312
+ for (const row2 of fkRows) {
313
+ const tableName = row2.table_name;
314
+ if (INTERNAL_TABLES.has(tableName))
315
+ continue;
316
+ if (!tableFkMap.has(tableName)) {
317
+ tableFkMap.set(tableName, []);
318
+ }
319
+ tableFkMap.get(tableName).push({
320
+ constraintName: row2.constraint_name,
321
+ column: row2.column_name,
322
+ referencedTable: row2.foreign_table_name,
323
+ referencedColumn: row2.foreign_column_name,
324
+ onDelete: row2.delete_rule
325
+ });
326
+ }
327
+ const fkConstraintNames = new Set(fkRows.map((r) => r.constraint_name));
328
+ const tableIndexMap = /* @__PURE__ */ new Map();
329
+ for (const row2 of indexRows) {
330
+ const tableName = row2.tablename;
331
+ if (INTERNAL_TABLES.has(tableName))
332
+ continue;
333
+ if (row2.indexdef.includes("PRIMARY KEY"))
334
+ continue;
335
+ if (fkConstraintNames.has(row2.indexname))
336
+ continue;
337
+ if (row2.indexname.endsWith("_pkey"))
338
+ continue;
339
+ if (!tableIndexMap.has(tableName)) {
340
+ tableIndexMap.set(tableName, []);
341
+ }
342
+ const columns = extractIndexColumns(row2.indexdef);
343
+ const unique = row2.indexdef.toUpperCase().includes("UNIQUE");
344
+ tableIndexMap.get(tableName).push({
345
+ name: row2.indexname,
346
+ columns,
347
+ unique
348
+ });
349
+ }
350
+ const tables = [];
351
+ for (const [tableName, columns] of tableColumnsMap) {
352
+ tables.push({
353
+ name: tableName,
354
+ columns,
355
+ foreignKeys: tableFkMap.get(tableName) ?? [],
356
+ indexes: tableIndexMap.get(tableName) ?? []
357
+ });
358
+ }
359
+ return createSchemaSnapshot("postgresql", tables);
360
+ }
361
+ function buildPgColumnType(row2) {
362
+ const dataType = row2.data_type.toUpperCase();
363
+ if (dataType === "CHARACTER VARYING") {
364
+ const len = row2.character_maximum_length ?? 255;
365
+ return `VARCHAR(${len})`;
366
+ }
367
+ if (dataType === "CHARACTER") {
368
+ const len = row2.character_maximum_length ?? 1;
369
+ return `CHAR(${len})`;
370
+ }
371
+ return dataType;
372
+ }
373
+ function extractIndexColumns(indexDef) {
374
+ const match = indexDef.match(/\(([^)]+)\)\s*$/);
375
+ if (!match)
376
+ return [];
377
+ return match[1].split(",").map((col) => col.trim().replace(/^"/, "").replace(/"$/, "")).filter((col) => col.length > 0);
378
+ }
379
+
380
+ // libs/migrations/src/lib/schema/introspect-sqlite.ts
381
+ async function introspectSqlite(queryFn) {
382
+ const masterRows = await queryFn(
383
+ `SELECT name, type, sql FROM sqlite_master WHERE type = 'table' AND name NOT LIKE 'sqlite_%' ORDER BY name`
384
+ );
385
+ const tables = [];
386
+ for (const masterRow of masterRows) {
387
+ const tableName = masterRow.name;
388
+ if (INTERNAL_TABLES.has(tableName))
389
+ continue;
390
+ const columnRows = await queryFn(
391
+ `PRAGMA table_info("${tableName}")`
392
+ );
393
+ const columns = columnRows.map((row2) => ({
394
+ name: row2.name,
395
+ type: normalizeColumnType(row2.type || "TEXT", "sqlite"),
396
+ nullable: row2.notnull === 0,
397
+ defaultValue: row2.dflt_value,
398
+ isPrimaryKey: row2.pk > 0
399
+ }));
400
+ const fkRows = await queryFn(
401
+ `PRAGMA foreign_key_list("${tableName}")`
402
+ );
403
+ const foreignKeys = fkRows.map((row2) => ({
404
+ constraintName: `fk_${tableName}_${row2.from}`,
405
+ column: row2.from,
406
+ referencedTable: row2.table,
407
+ referencedColumn: row2.to,
408
+ onDelete: row2.on_delete
409
+ }));
410
+ const indexListRows = await queryFn(
411
+ `PRAGMA index_list("${tableName}")`
412
+ );
413
+ const indexes = [];
414
+ for (const idxRow of indexListRows) {
415
+ if (idxRow.origin === "pk")
416
+ continue;
417
+ const indexInfoRows = await queryFn(
418
+ `PRAGMA index_info("${idxRow.name}")`
419
+ );
420
+ const indexColumns = indexInfoRows.sort((a, b) => a.seqno - b.seqno).map((r) => r.name).filter((n) => n.length > 0);
421
+ if (indexColumns.length > 0) {
422
+ indexes.push({
423
+ name: idxRow.name,
424
+ columns: indexColumns,
425
+ unique: idxRow.unique === 1
426
+ });
427
+ }
428
+ }
429
+ tables.push({
430
+ name: tableName,
431
+ columns,
432
+ foreignKeys,
433
+ indexes
434
+ });
435
+ }
436
+ return createSchemaSnapshot("sqlite", tables);
437
+ }
438
+
439
+ // libs/core/src/lib/collections/define-collection.ts
440
+ function defineCollection(config) {
441
+ const collection = {
442
+ timestamps: true,
443
+ // Enable timestamps by default
444
+ ...config
445
+ };
446
+ if (!collection.slug) {
447
+ throw new Error("Collection must have a slug");
448
+ }
449
+ if (!collection.fields || collection.fields.length === 0) {
450
+ throw new Error(`Collection "${collection.slug}" must have at least one field`);
451
+ }
452
+ if (!/^[a-z][a-z0-9-]*$/.test(collection.slug)) {
453
+ throw new Error(
454
+ `Collection slug "${collection.slug}" must be kebab-case (lowercase letters, numbers, and hyphens, starting with a letter)`
455
+ );
456
+ }
457
+ return collection;
458
+ }
459
+ function getSoftDeleteField(config) {
460
+ if (!config.softDelete)
461
+ return null;
462
+ if (config.softDelete === true)
463
+ return "deletedAt";
464
+ const sdConfig = config.softDelete;
465
+ return sdConfig.field ?? "deletedAt";
466
+ }
467
+
468
+ // libs/core/src/lib/fields/field.types.ts
469
+ function isNamedTab(tab) {
470
+ return typeof tab.name === "string" && tab.name.length > 0;
471
+ }
472
+ function flattenDataFields(fields) {
473
+ const result = [];
474
+ for (const field of fields) {
475
+ if (field.type === "tabs") {
476
+ for (const tab of field.tabs) {
477
+ if (isNamedTab(tab)) {
478
+ const syntheticGroup = {
479
+ name: tab.name,
480
+ type: "group",
481
+ label: tab.label,
482
+ description: tab.description,
483
+ fields: tab.fields
484
+ };
485
+ result.push(syntheticGroup);
486
+ } else {
487
+ result.push(...flattenDataFields(tab.fields));
488
+ }
489
+ }
490
+ } else if (field.type === "collapsible" || field.type === "row") {
491
+ result.push(...flattenDataFields(field.fields));
492
+ } else {
493
+ result.push(field);
494
+ }
495
+ }
496
+ return result;
497
+ }
498
+
499
+ // libs/core/src/lib/fields/field-builders.ts
500
+ function text(name, options = {}) {
501
+ return {
502
+ name,
503
+ type: "text",
504
+ ...options
505
+ };
506
+ }
507
+ function number(name, options = {}) {
508
+ return {
509
+ name,
510
+ type: "number",
511
+ ...options
512
+ };
513
+ }
514
+ function json(name, options = {}) {
515
+ return {
516
+ name,
517
+ type: "json",
518
+ ...options
519
+ };
520
+ }
521
+
522
+ // libs/core/src/lib/collections/media.collection.ts
523
+ var MediaCollection = defineCollection({
524
+ slug: "media",
525
+ labels: {
526
+ singular: "Media",
527
+ plural: "Media"
528
+ },
529
+ upload: {
530
+ mimeTypes: ["image/*", "application/pdf", "video/*", "audio/*"]
531
+ },
532
+ admin: {
533
+ useAsTitle: "filename",
534
+ defaultColumns: ["filename", "mimeType", "filesize", "createdAt"]
535
+ },
536
+ fields: [
537
+ text("filename", {
538
+ required: true,
539
+ label: "Filename",
540
+ description: "Original filename of the uploaded file"
541
+ }),
542
+ text("mimeType", {
543
+ required: true,
544
+ label: "MIME Type",
545
+ description: "File MIME type (e.g., image/jpeg, application/pdf)"
546
+ }),
547
+ number("filesize", {
548
+ label: "File Size",
549
+ description: "File size in bytes"
550
+ }),
551
+ text("path", {
552
+ label: "Storage Path",
553
+ description: "Path/key where the file is stored",
554
+ admin: {
555
+ hidden: true
556
+ }
557
+ }),
558
+ text("url", {
559
+ label: "URL",
560
+ description: "Public URL to access the file"
561
+ }),
562
+ text("alt", {
563
+ label: "Alt Text",
564
+ description: "Alternative text for accessibility"
565
+ }),
566
+ number("width", {
567
+ label: "Width",
568
+ description: "Image width in pixels (for images only)"
569
+ }),
570
+ number("height", {
571
+ label: "Height",
572
+ description: "Image height in pixels (for images only)"
573
+ }),
574
+ json("focalPoint", {
575
+ label: "Focal Point",
576
+ description: "Focal point coordinates for image cropping",
577
+ admin: {
578
+ hidden: true
579
+ }
580
+ })
581
+ ],
582
+ access: {
583
+ // Media is readable by anyone by default
584
+ read: () => true,
585
+ // Only authenticated users can create/update/delete
586
+ create: ({ req }) => !!req?.user,
587
+ update: ({ req }) => !!req?.user,
588
+ delete: ({ req }) => !!req?.user
589
+ }
590
+ });
591
+
592
+ // libs/migrations/src/lib/schema/collections-to-schema.ts
593
+ function mapOnDelete(onDelete, required) {
594
+ const effective = required && (!onDelete || onDelete === "set-null") ? "restrict" : onDelete;
595
+ switch (effective) {
596
+ case "restrict":
597
+ return "RESTRICT";
598
+ case "cascade":
599
+ return "CASCADE";
600
+ default:
601
+ return "SET NULL";
602
+ }
603
+ }
604
+ function getTableName(collection) {
605
+ return collection.dbName ?? collection.slug;
606
+ }
607
+ function hasVersionDrafts(collection) {
608
+ const versions = collection.versions;
609
+ if (!versions)
610
+ return false;
611
+ if (typeof versions === "boolean")
612
+ return false;
613
+ return !!versions.drafts;
614
+ }
615
+ function isCollectionConfig(value) {
616
+ return typeof value === "object" && value !== null && "slug" in value && "fields" in value;
617
+ }
618
+ function resolveCollectionRef(ref) {
619
+ try {
620
+ const resolved = ref();
621
+ if (isCollectionConfig(resolved)) {
622
+ return resolved;
623
+ }
624
+ return null;
625
+ } catch {
626
+ return null;
627
+ }
628
+ }
629
+ function buildAutoColumns(collection, dialect) {
630
+ const columns = [];
631
+ columns.push({
632
+ name: "id",
633
+ type: dialect === "postgresql" ? "VARCHAR(36)" : "TEXT",
634
+ nullable: false,
635
+ defaultValue: null,
636
+ isPrimaryKey: true
637
+ });
638
+ const timestamps = collection.timestamps;
639
+ const addCreatedAt = timestamps !== false && (timestamps === true || timestamps === void 0 || timestamps.createdAt !== false);
640
+ const addUpdatedAt = timestamps !== false && (timestamps === true || timestamps === void 0 || timestamps.updatedAt !== false);
641
+ if (addCreatedAt) {
642
+ columns.push({
643
+ name: "createdAt",
644
+ type: dialect === "postgresql" ? "TIMESTAMPTZ" : "TEXT",
645
+ nullable: false,
646
+ defaultValue: null,
647
+ isPrimaryKey: false
648
+ });
649
+ }
650
+ if (addUpdatedAt) {
651
+ columns.push({
652
+ name: "updatedAt",
653
+ type: dialect === "postgresql" ? "TIMESTAMPTZ" : "TEXT",
654
+ nullable: false,
655
+ defaultValue: null,
656
+ isPrimaryKey: false
657
+ });
658
+ }
659
+ if (hasVersionDrafts(collection)) {
660
+ columns.push({
661
+ name: "_status",
662
+ type: dialect === "postgresql" ? "VARCHAR(20)" : "TEXT",
663
+ nullable: false,
664
+ defaultValue: "'draft'",
665
+ isPrimaryKey: false
666
+ });
667
+ }
668
+ const softDeleteCol = getSoftDeleteField(collection);
669
+ if (softDeleteCol) {
670
+ columns.push({
671
+ name: softDeleteCol,
672
+ type: dialect === "postgresql" ? "TIMESTAMPTZ" : "TEXT",
673
+ nullable: true,
674
+ defaultValue: null,
675
+ isPrimaryKey: false
676
+ });
677
+ }
678
+ return columns;
679
+ }
680
+ function fieldToColumn(field, dialect) {
681
+ return {
682
+ name: field.name,
683
+ type: fieldToColumnType(field, dialect),
684
+ nullable: !field.required,
685
+ defaultValue: null,
686
+ isPrimaryKey: false
687
+ };
688
+ }
689
+ function buildForeignKeys(tableName, fields) {
690
+ const foreignKeys = [];
691
+ for (const field of fields) {
692
+ if (field.type !== "relationship")
693
+ continue;
694
+ if (field.hasMany)
695
+ continue;
696
+ if (field.relationTo && field.relationTo.length > 0)
697
+ continue;
698
+ const target = resolveCollectionRef(field.collection);
699
+ if (!target)
700
+ continue;
701
+ const targetTable = getTableName(target);
702
+ const onDelete = mapOnDelete(field.onDelete, !!field.required);
703
+ foreignKeys.push({
704
+ constraintName: `fk_${tableName}_${field.name}`,
705
+ column: field.name,
706
+ referencedTable: targetTable,
707
+ referencedColumn: "id",
708
+ onDelete
709
+ });
710
+ }
711
+ return foreignKeys;
712
+ }
713
+ function buildIndexes(tableName, collection) {
714
+ const indexes = [];
715
+ const sdField = getSoftDeleteField(collection);
716
+ if (sdField) {
717
+ indexes.push({
718
+ name: `idx_${tableName}_${sdField}`,
719
+ columns: [sdField],
720
+ unique: false
721
+ });
722
+ }
723
+ if (collection.indexes) {
724
+ for (const idx of collection.indexes) {
725
+ indexes.push({
726
+ name: idx.name ?? `idx_${tableName}_${idx.columns.join("_")}`,
727
+ columns: [...idx.columns],
728
+ unique: !!idx.unique
729
+ });
730
+ }
731
+ }
732
+ return indexes;
733
+ }
734
+ function buildVersionTable(collection, dialect) {
735
+ if (!collection.versions)
736
+ return null;
737
+ const baseTable = getTableName(collection);
738
+ const tableName = `${baseTable}_versions`;
739
+ const columns = [
740
+ {
741
+ name: "id",
742
+ type: dialect === "postgresql" ? "VARCHAR(36)" : "TEXT",
743
+ nullable: false,
744
+ defaultValue: null,
745
+ isPrimaryKey: true
746
+ },
747
+ {
748
+ name: "parent",
749
+ type: dialect === "postgresql" ? "VARCHAR(36)" : "TEXT",
750
+ nullable: false,
751
+ defaultValue: null,
752
+ isPrimaryKey: false
753
+ },
754
+ {
755
+ name: "version",
756
+ type: "TEXT",
757
+ nullable: false,
758
+ defaultValue: null,
759
+ isPrimaryKey: false
760
+ },
761
+ {
762
+ name: "_status",
763
+ type: dialect === "postgresql" ? "VARCHAR(20)" : "TEXT",
764
+ nullable: false,
765
+ defaultValue: "'draft'",
766
+ isPrimaryKey: false
767
+ },
768
+ {
769
+ name: "autosave",
770
+ type: dialect === "postgresql" ? "BOOLEAN" : "INTEGER",
771
+ nullable: false,
772
+ defaultValue: dialect === "postgresql" ? "false" : "0",
773
+ isPrimaryKey: false
774
+ },
775
+ {
776
+ name: "publishedAt",
777
+ type: dialect === "postgresql" ? "TIMESTAMPTZ" : "TEXT",
778
+ nullable: true,
779
+ defaultValue: null,
780
+ isPrimaryKey: false
781
+ },
782
+ {
783
+ name: "createdAt",
784
+ type: dialect === "postgresql" ? "TIMESTAMPTZ" : "TEXT",
785
+ nullable: false,
786
+ defaultValue: null,
787
+ isPrimaryKey: false
788
+ },
789
+ {
790
+ name: "updatedAt",
791
+ type: dialect === "postgresql" ? "TIMESTAMPTZ" : "TEXT",
792
+ nullable: false,
793
+ defaultValue: null,
794
+ isPrimaryKey: false
795
+ }
796
+ ];
797
+ const foreignKeys = [
798
+ {
799
+ constraintName: `fk_${tableName}_parent`,
800
+ column: "parent",
801
+ referencedTable: baseTable,
802
+ referencedColumn: "id",
803
+ onDelete: "CASCADE"
804
+ }
805
+ ];
806
+ const indexes = [
807
+ { name: `idx_${tableName}_parent`, columns: ["parent"], unique: false },
808
+ { name: `idx_${tableName}_status`, columns: ["_status"], unique: false },
809
+ { name: `idx_${tableName}_createdAt`, columns: ["createdAt"], unique: false }
810
+ ];
811
+ return { name: tableName, columns, foreignKeys, indexes };
812
+ }
813
+ function collectionToTableSnapshot(collection, dialect) {
814
+ const tableName = getTableName(collection);
815
+ const dataFields = flattenDataFields(collection.fields);
816
+ const columns = [
817
+ ...buildAutoColumns(collection, dialect),
818
+ ...dataFields.map((f) => fieldToColumn(f, dialect))
819
+ ];
820
+ const foreignKeys = buildForeignKeys(tableName, dataFields);
821
+ const indexes = buildIndexes(tableName, collection);
822
+ return { name: tableName, columns, foreignKeys, indexes };
823
+ }
824
+ function collectionsToSchema(collections, dialect) {
825
+ const tables = [];
826
+ for (const collection of collections) {
827
+ tables.push(collectionToTableSnapshot(collection, dialect));
828
+ const versionTable = buildVersionTable(collection, dialect);
829
+ if (versionTable) {
830
+ tables.push(versionTable);
831
+ }
832
+ }
833
+ return createSchemaSnapshot(dialect, tables);
834
+ }
835
+
836
+ // libs/migrations/src/lib/schema/schema-diff.ts
837
+ var DEFAULT_DIFF_OPTIONS = {
838
+ detectRenames: true,
839
+ renameSimilarityThreshold: 0.6
840
+ };
841
+ function diffSchemas(desired, actual, dialect, options) {
842
+ const opts = { ...DEFAULT_DIFF_OPTIONS, ...options };
843
+ const operations = [];
844
+ const summary = [];
845
+ const desiredMap = /* @__PURE__ */ new Map();
846
+ const actualMap = /* @__PURE__ */ new Map();
847
+ for (const t of desired.tables)
848
+ desiredMap.set(t.name, t);
849
+ for (const t of actual.tables)
850
+ actualMap.set(t.name, t);
851
+ for (const [name, desiredTable] of desiredMap) {
852
+ if (!actualMap.has(name)) {
853
+ operations.push({
854
+ type: "createTable",
855
+ table: name,
856
+ columns: desiredTable.columns.map((c) => ({
857
+ name: c.name,
858
+ type: c.type,
859
+ nullable: c.nullable,
860
+ defaultValue: c.defaultValue ?? void 0,
861
+ primaryKey: c.isPrimaryKey || void 0
862
+ }))
863
+ });
864
+ summary.push(`Create table "${name}"`);
865
+ for (const fk of desiredTable.foreignKeys) {
866
+ operations.push({
867
+ type: "addForeignKey",
868
+ table: name,
869
+ constraintName: fk.constraintName,
870
+ column: fk.column,
871
+ referencedTable: fk.referencedTable,
872
+ referencedColumn: fk.referencedColumn,
873
+ onDelete: fk.onDelete
874
+ });
875
+ }
876
+ for (const idx of desiredTable.indexes) {
877
+ operations.push({
878
+ type: "createIndex",
879
+ table: name,
880
+ indexName: idx.name,
881
+ columns: idx.columns,
882
+ unique: idx.unique
883
+ });
884
+ }
885
+ }
886
+ }
887
+ for (const [name] of actualMap) {
888
+ if (!desiredMap.has(name)) {
889
+ operations.push({ type: "dropTable", table: name });
890
+ summary.push(`Drop table "${name}"`);
891
+ }
892
+ }
893
+ for (const [name, desiredTable] of desiredMap) {
894
+ const actualTable = actualMap.get(name);
895
+ if (!actualTable)
896
+ continue;
897
+ const tableOps = diffTable(desiredTable, actualTable, dialect, opts);
898
+ operations.push(...tableOps.operations);
899
+ summary.push(...tableOps.summary);
900
+ }
901
+ return {
902
+ hasChanges: operations.length > 0,
903
+ operations,
904
+ summary
905
+ };
906
+ }
907
+ function diffTable(desired, actual, dialect, opts) {
908
+ const operations = [];
909
+ const summary = [];
910
+ const tableName = desired.name;
911
+ const colOps = diffColumns(tableName, desired.columns, actual.columns, dialect, opts);
912
+ operations.push(...colOps.operations);
913
+ summary.push(...colOps.summary);
914
+ const fkOps = diffForeignKeys(tableName, desired.foreignKeys, actual.foreignKeys);
915
+ operations.push(...fkOps.operations);
916
+ summary.push(...fkOps.summary);
917
+ const idxOps = diffIndexes(tableName, desired.indexes, actual.indexes);
918
+ operations.push(...idxOps.operations);
919
+ summary.push(...idxOps.summary);
920
+ return { operations, summary };
921
+ }
922
+ function diffColumns(tableName, desiredColumns, actualColumns, dialect, opts) {
923
+ const operations = [];
924
+ const summary = [];
925
+ const desiredMap = /* @__PURE__ */ new Map();
926
+ const actualMap = /* @__PURE__ */ new Map();
927
+ for (const c of desiredColumns)
928
+ desiredMap.set(c.name, c);
929
+ for (const c of actualColumns)
930
+ actualMap.set(c.name, c);
931
+ const renamedFrom = /* @__PURE__ */ new Set();
932
+ const renamedTo = /* @__PURE__ */ new Set();
933
+ if (opts.detectRenames) {
934
+ const missingInActual = [...desiredMap.keys()].filter((k) => !actualMap.has(k));
935
+ const extraInActual = [...actualMap.keys()].filter((k) => !desiredMap.has(k));
936
+ for (const newName of missingInActual) {
937
+ const desiredCol = desiredMap.get(newName);
938
+ for (const oldName of extraInActual) {
939
+ if (renamedFrom.has(oldName))
940
+ continue;
941
+ const actualCol = actualMap.get(oldName);
942
+ if (areTypesCompatible(desiredCol.type, actualCol.type, dialect)) {
943
+ operations.push({
944
+ type: "renameColumn",
945
+ table: tableName,
946
+ from: oldName,
947
+ to: newName
948
+ });
949
+ summary.push(
950
+ `Rename column "${tableName}"."${oldName}" \u2192 "${newName}"`
951
+ );
952
+ renamedFrom.add(oldName);
953
+ renamedTo.add(newName);
954
+ break;
955
+ }
956
+ }
957
+ }
958
+ }
959
+ for (const [name, desiredCol] of desiredMap) {
960
+ if (actualMap.has(name) || renamedTo.has(name))
961
+ continue;
962
+ operations.push({
963
+ type: "addColumn",
964
+ table: tableName,
965
+ column: name,
966
+ columnType: desiredCol.type,
967
+ nullable: desiredCol.nullable,
968
+ defaultValue: desiredCol.defaultValue ?? void 0
969
+ });
970
+ summary.push(`Add column "${tableName}"."${name}" (${desiredCol.type})`);
971
+ }
972
+ for (const [name, actualCol] of actualMap) {
973
+ if (desiredMap.has(name) || renamedFrom.has(name))
974
+ continue;
975
+ operations.push({
976
+ type: "dropColumn",
977
+ table: tableName,
978
+ column: name,
979
+ previousType: actualCol.type,
980
+ previousNullable: actualCol.nullable
981
+ });
982
+ summary.push(`Drop column "${tableName}"."${name}"`);
983
+ }
984
+ for (const [name, desiredCol] of desiredMap) {
985
+ const actualCol = actualMap.get(name);
986
+ if (!actualCol)
987
+ continue;
988
+ if (!areTypesCompatible(desiredCol.type, actualCol.type, dialect)) {
989
+ operations.push({
990
+ type: "alterColumnType",
991
+ table: tableName,
992
+ column: name,
993
+ fromType: normalizeColumnType(actualCol.type, dialect),
994
+ toType: normalizeColumnType(desiredCol.type, dialect)
995
+ });
996
+ summary.push(
997
+ `Change type "${tableName}"."${name}": ${actualCol.type} \u2192 ${desiredCol.type}`
998
+ );
999
+ }
1000
+ if (desiredCol.nullable !== actualCol.nullable) {
1001
+ operations.push({
1002
+ type: "alterColumnNullable",
1003
+ table: tableName,
1004
+ column: name,
1005
+ nullable: desiredCol.nullable
1006
+ });
1007
+ summary.push(
1008
+ `Change nullable "${tableName}"."${name}": ${actualCol.nullable} \u2192 ${desiredCol.nullable}`
1009
+ );
1010
+ }
1011
+ if (normalizeDefault(desiredCol.defaultValue) !== normalizeDefault(actualCol.defaultValue)) {
1012
+ operations.push({
1013
+ type: "alterColumnDefault",
1014
+ table: tableName,
1015
+ column: name,
1016
+ defaultValue: desiredCol.defaultValue,
1017
+ previousDefault: actualCol.defaultValue
1018
+ });
1019
+ summary.push(
1020
+ `Change default "${tableName}"."${name}": ${actualCol.defaultValue ?? "NULL"} \u2192 ${desiredCol.defaultValue ?? "NULL"}`
1021
+ );
1022
+ }
1023
+ }
1024
+ return { operations, summary };
1025
+ }
1026
+ function normalizeDefault(value) {
1027
+ if (value === null || value === void 0 || value === "")
1028
+ return null;
1029
+ return value;
1030
+ }
1031
+ function diffForeignKeys(tableName, desiredFks, actualFks) {
1032
+ const operations = [];
1033
+ const summary = [];
1034
+ const desiredMap = /* @__PURE__ */ new Map();
1035
+ const actualMap = /* @__PURE__ */ new Map();
1036
+ for (const fk of desiredFks)
1037
+ desiredMap.set(fk.constraintName, fk);
1038
+ for (const fk of actualFks)
1039
+ actualMap.set(fk.constraintName, fk);
1040
+ for (const [name, fk] of desiredMap) {
1041
+ if (!actualMap.has(name)) {
1042
+ operations.push({
1043
+ type: "addForeignKey",
1044
+ table: tableName,
1045
+ constraintName: fk.constraintName,
1046
+ column: fk.column,
1047
+ referencedTable: fk.referencedTable,
1048
+ referencedColumn: fk.referencedColumn,
1049
+ onDelete: fk.onDelete
1050
+ });
1051
+ summary.push(`Add foreign key "${name}" on "${tableName}"`);
1052
+ } else {
1053
+ const actualFk = actualMap.get(name);
1054
+ if (fk.column !== actualFk.column || fk.referencedTable !== actualFk.referencedTable || fk.referencedColumn !== actualFk.referencedColumn || fk.onDelete !== actualFk.onDelete) {
1055
+ operations.push({
1056
+ type: "dropForeignKey",
1057
+ table: tableName,
1058
+ constraintName: name
1059
+ });
1060
+ operations.push({
1061
+ type: "addForeignKey",
1062
+ table: tableName,
1063
+ constraintName: fk.constraintName,
1064
+ column: fk.column,
1065
+ referencedTable: fk.referencedTable,
1066
+ referencedColumn: fk.referencedColumn,
1067
+ onDelete: fk.onDelete
1068
+ });
1069
+ summary.push(`Modify foreign key "${name}" on "${tableName}"`);
1070
+ }
1071
+ }
1072
+ }
1073
+ for (const [name] of actualMap) {
1074
+ if (!desiredMap.has(name)) {
1075
+ operations.push({
1076
+ type: "dropForeignKey",
1077
+ table: tableName,
1078
+ constraintName: name
1079
+ });
1080
+ summary.push(`Drop foreign key "${name}" on "${tableName}"`);
1081
+ }
1082
+ }
1083
+ return { operations, summary };
1084
+ }
1085
+ function diffIndexes(tableName, desiredIdxs, actualIdxs) {
1086
+ const operations = [];
1087
+ const summary = [];
1088
+ const desiredMap = /* @__PURE__ */ new Map();
1089
+ const actualMap = /* @__PURE__ */ new Map();
1090
+ for (const idx of desiredIdxs)
1091
+ desiredMap.set(idx.name, idx);
1092
+ for (const idx of actualIdxs)
1093
+ actualMap.set(idx.name, idx);
1094
+ for (const [name, idx] of desiredMap) {
1095
+ if (!actualMap.has(name)) {
1096
+ operations.push({
1097
+ type: "createIndex",
1098
+ table: tableName,
1099
+ indexName: idx.name,
1100
+ columns: idx.columns,
1101
+ unique: idx.unique
1102
+ });
1103
+ summary.push(`Create index "${name}" on "${tableName}"`);
1104
+ } else {
1105
+ const actualIdx = actualMap.get(name);
1106
+ if (idx.unique !== actualIdx.unique || JSON.stringify(idx.columns) !== JSON.stringify(actualIdx.columns)) {
1107
+ operations.push({
1108
+ type: "dropIndex",
1109
+ table: tableName,
1110
+ indexName: name
1111
+ });
1112
+ operations.push({
1113
+ type: "createIndex",
1114
+ table: tableName,
1115
+ indexName: idx.name,
1116
+ columns: idx.columns,
1117
+ unique: idx.unique
1118
+ });
1119
+ summary.push(`Modify index "${name}" on "${tableName}"`);
1120
+ }
1121
+ }
1122
+ }
1123
+ for (const [name] of actualMap) {
1124
+ if (!desiredMap.has(name)) {
1125
+ operations.push({
1126
+ type: "dropIndex",
1127
+ table: tableName,
1128
+ indexName: name
1129
+ });
1130
+ summary.push(`Drop index "${name}" on "${tableName}"`);
1131
+ }
1132
+ }
1133
+ return { operations, summary };
1134
+ }
1135
+
1136
+ // libs/migrations/src/lib/generator/sql-generator.ts
1137
+ function operationToSql(op, dialect) {
1138
+ switch (op.type) {
1139
+ case "createTable":
1140
+ return generateCreateTable(op, dialect);
1141
+ case "dropTable":
1142
+ return `DROP TABLE IF EXISTS "${op.table}"`;
1143
+ case "renameTable":
1144
+ return `ALTER TABLE "${op.from}" RENAME TO "${op.to}"`;
1145
+ case "addColumn":
1146
+ return generateAddColumn(op, dialect);
1147
+ case "dropColumn":
1148
+ return generateDropColumn(op, dialect);
1149
+ case "alterColumnType":
1150
+ return generateAlterColumnType(op, dialect);
1151
+ case "alterColumnNullable":
1152
+ return generateAlterColumnNullable(op, dialect);
1153
+ case "alterColumnDefault":
1154
+ return generateAlterColumnDefault(op, dialect);
1155
+ case "renameColumn":
1156
+ return `ALTER TABLE "${op.table}" RENAME COLUMN "${op.from}" TO "${op.to}"`;
1157
+ case "addForeignKey":
1158
+ return generateAddForeignKey(op, dialect);
1159
+ case "dropForeignKey":
1160
+ return generateDropForeignKey(op, dialect);
1161
+ case "createIndex":
1162
+ return generateCreateIndex(op);
1163
+ case "dropIndex":
1164
+ return `DROP INDEX IF EXISTS "${op.indexName}"`;
1165
+ case "rawSql":
1166
+ return op.upSql;
1167
+ }
1168
+ }
1169
+ function operationToReverseSql(op, dialect) {
1170
+ switch (op.type) {
1171
+ case "createTable":
1172
+ return `DROP TABLE IF EXISTS "${op.table}"`;
1173
+ case "dropTable":
1174
+ return null;
1175
+ case "renameTable":
1176
+ return `ALTER TABLE "${op.to}" RENAME TO "${op.from}"`;
1177
+ case "addColumn":
1178
+ return `ALTER TABLE "${op.table}" DROP COLUMN "${op.column}"`;
1179
+ case "dropColumn":
1180
+ if (op.previousType) {
1181
+ const nullable = op.previousNullable !== false ? "" : " NOT NULL";
1182
+ return `ALTER TABLE "${op.table}" ADD COLUMN "${op.column}" ${op.previousType}${nullable}`;
1183
+ }
1184
+ return null;
1185
+ case "alterColumnType":
1186
+ return generateAlterColumnType(
1187
+ { ...op, fromType: op.toType, toType: op.fromType },
1188
+ dialect
1189
+ );
1190
+ case "alterColumnNullable":
1191
+ return generateAlterColumnNullable(
1192
+ { ...op, nullable: !op.nullable },
1193
+ dialect
1194
+ );
1195
+ case "alterColumnDefault":
1196
+ return generateAlterColumnDefault(
1197
+ {
1198
+ ...op,
1199
+ defaultValue: op.previousDefault,
1200
+ previousDefault: op.defaultValue
1201
+ },
1202
+ dialect
1203
+ );
1204
+ case "renameColumn":
1205
+ return `ALTER TABLE "${op.table}" RENAME COLUMN "${op.to}" TO "${op.from}"`;
1206
+ case "addForeignKey":
1207
+ return generateDropForeignKey(
1208
+ { type: "dropForeignKey", table: op.table, constraintName: op.constraintName },
1209
+ dialect
1210
+ );
1211
+ case "dropForeignKey":
1212
+ return null;
1213
+ case "createIndex":
1214
+ return `DROP INDEX IF EXISTS "${op.indexName}"`;
1215
+ case "dropIndex":
1216
+ return null;
1217
+ case "rawSql":
1218
+ return op.downSql;
1219
+ }
1220
+ }
1221
+ function operationsToUpSql(operations, dialect) {
1222
+ return operations.map((op) => operationToSql(op, dialect));
1223
+ }
1224
+ function operationsToDownSql(operations, dialect) {
1225
+ return [...operations].reverse().map((op) => operationToReverseSql(op, dialect)).filter((sql) => sql !== null);
1226
+ }
1227
+ function generateCreateTable(op, _dialect) {
1228
+ const columnDefs = op.columns.map((c) => {
1229
+ let def = `"${c.name}" ${c.type}`;
1230
+ if (c.primaryKey)
1231
+ def += " PRIMARY KEY";
1232
+ if (!c.nullable)
1233
+ def += " NOT NULL";
1234
+ if (c.defaultValue)
1235
+ def += ` DEFAULT ${c.defaultValue}`;
1236
+ return def;
1237
+ });
1238
+ return `CREATE TABLE "${op.table}" (
1239
+ ${columnDefs.join(",\n ")}
1240
+ )`;
1241
+ }
1242
+ function generateAddColumn(op, _dialect) {
1243
+ let sql = `ALTER TABLE "${op.table}" ADD COLUMN "${op.column}" ${op.columnType}`;
1244
+ if (!op.nullable)
1245
+ sql += " NOT NULL";
1246
+ if (op.defaultValue)
1247
+ sql += ` DEFAULT ${op.defaultValue}`;
1248
+ return sql;
1249
+ }
1250
+ function generateDropColumn(op, _dialect) {
1251
+ return `ALTER TABLE "${op.table}" DROP COLUMN "${op.column}"`;
1252
+ }
1253
+ function generateAlterColumnType(op, dialect) {
1254
+ if (dialect === "sqlite") {
1255
+ return `-- SQLite: Cannot alter column type for "${op.table}"."${op.column}" (${op.fromType} \u2192 ${op.toType}). Requires table rebuild.`;
1256
+ }
1257
+ const using = op.castExpression ? ` USING ${op.castExpression}` : ` USING "${op.column}"::${op.toType}`;
1258
+ return `ALTER TABLE "${op.table}" ALTER COLUMN "${op.column}" TYPE ${op.toType}${using}`;
1259
+ }
1260
+ function generateAlterColumnNullable(op, dialect) {
1261
+ if (dialect === "sqlite") {
1262
+ return `-- SQLite: Cannot alter nullable for "${op.table}"."${op.column}". Requires table rebuild.`;
1263
+ }
1264
+ if (op.nullable) {
1265
+ return `ALTER TABLE "${op.table}" ALTER COLUMN "${op.column}" DROP NOT NULL`;
1266
+ }
1267
+ return `ALTER TABLE "${op.table}" ALTER COLUMN "${op.column}" SET NOT NULL`;
1268
+ }
1269
+ function generateAlterColumnDefault(op, dialect) {
1270
+ if (dialect === "sqlite") {
1271
+ return `-- SQLite: Cannot alter default for "${op.table}"."${op.column}". Requires table rebuild.`;
1272
+ }
1273
+ if (op.defaultValue === null) {
1274
+ return `ALTER TABLE "${op.table}" ALTER COLUMN "${op.column}" DROP DEFAULT`;
1275
+ }
1276
+ return `ALTER TABLE "${op.table}" ALTER COLUMN "${op.column}" SET DEFAULT ${op.defaultValue}`;
1277
+ }
1278
+ function generateAddForeignKey(op, dialect) {
1279
+ if (dialect === "sqlite") {
1280
+ return `-- SQLite: Cannot add FK "${op.constraintName}" after table creation. Requires table rebuild.`;
1281
+ }
1282
+ return `ALTER TABLE "${op.table}" ADD CONSTRAINT "${op.constraintName}" FOREIGN KEY ("${op.column}") REFERENCES "${op.referencedTable}"("${op.referencedColumn}") ON DELETE ${op.onDelete}`;
1283
+ }
1284
+ function generateDropForeignKey(op, dialect) {
1285
+ if (dialect === "sqlite") {
1286
+ return `-- SQLite: Cannot drop FK "${op.constraintName}" after table creation. Requires table rebuild.`;
1287
+ }
1288
+ return `ALTER TABLE "${op.table}" DROP CONSTRAINT "${op.constraintName}"`;
1289
+ }
1290
+ function generateCreateIndex(op) {
1291
+ const unique = op.unique ? "UNIQUE " : "";
1292
+ const cols = op.columns.map((c) => `"${c}"`).join(", ");
1293
+ return `CREATE ${unique}INDEX IF NOT EXISTS "${op.indexName}" ON "${op.table}" (${cols})`;
1294
+ }
1295
+
1296
+ // libs/migrations/src/lib/generator/migration-file-generator.ts
1297
+ function generateMigrationName(name, timestamp) {
1298
+ const d = timestamp ?? /* @__PURE__ */ new Date();
1299
+ const pad = (n) => String(n).padStart(2, "0");
1300
+ const prefix = `${d.getFullYear()}${pad(d.getMonth() + 1)}${pad(d.getDate())}${pad(d.getHours())}${pad(d.getMinutes())}${pad(d.getSeconds())}`;
1301
+ return `${prefix}_${name}`;
1302
+ }
1303
+ function generateMigrationFileContent(diff, options) {
1304
+ const { name, description, dialect } = options;
1305
+ const desc = description ?? (diff.summary.join("; ") || "Auto-generated migration");
1306
+ const upStatements = operationsToUpSql(diff.operations, dialect);
1307
+ const downStatements = operationsToDownSql(diff.operations, dialect);
1308
+ const operationsMeta = serializeOperationsMeta(diff.operations);
1309
+ const lines = [];
1310
+ lines.push("import type { MigrationFile, MigrationContext } from '@momentumcms/migrations';");
1311
+ lines.push("");
1312
+ lines.push("export const meta: MigrationFile['meta'] = {");
1313
+ lines.push(` name: ${JSON.stringify(name)},`);
1314
+ lines.push(` description: ${JSON.stringify(desc)},`);
1315
+ lines.push(` operations: ${operationsMeta},`);
1316
+ lines.push("};");
1317
+ lines.push("");
1318
+ lines.push("export async function up(ctx: MigrationContext): Promise<void> {");
1319
+ for (const sql of upStatements) {
1320
+ if (sql.startsWith("--")) {
1321
+ lines.push(` // ${sql.slice(3)}`);
1322
+ } else {
1323
+ lines.push(` await ctx.sql(${JSON.stringify(sql)});`);
1324
+ }
1325
+ }
1326
+ if (upStatements.length === 0) {
1327
+ lines.push(" // No operations");
1328
+ }
1329
+ lines.push("}");
1330
+ lines.push("");
1331
+ lines.push("export async function down(ctx: MigrationContext): Promise<void> {");
1332
+ for (const sql of downStatements) {
1333
+ if (sql.startsWith("--")) {
1334
+ lines.push(` // ${sql.slice(3)}`);
1335
+ } else {
1336
+ lines.push(` await ctx.sql(${JSON.stringify(sql)});`);
1337
+ }
1338
+ }
1339
+ if (downStatements.length === 0) {
1340
+ lines.push(" // Cannot reverse all operations");
1341
+ }
1342
+ lines.push("}");
1343
+ lines.push("");
1344
+ return lines.join("\n");
1345
+ }
1346
+ function serializeOperationsMeta(operations) {
1347
+ const simplified = operations.map((op) => {
1348
+ switch (op.type) {
1349
+ case "createTable":
1350
+ return { type: op.type, table: op.table };
1351
+ case "dropTable":
1352
+ return { type: op.type, table: op.table };
1353
+ case "renameTable":
1354
+ return { type: op.type, from: op.from, to: op.to };
1355
+ case "addColumn":
1356
+ return {
1357
+ type: op.type,
1358
+ table: op.table,
1359
+ column: op.column,
1360
+ nullable: op.nullable,
1361
+ defaultValue: op.defaultValue ?? null
1362
+ };
1363
+ case "dropColumn":
1364
+ return { type: op.type, table: op.table, column: op.column };
1365
+ case "alterColumnType":
1366
+ return {
1367
+ type: op.type,
1368
+ table: op.table,
1369
+ column: op.column,
1370
+ fromType: op.fromType,
1371
+ toType: op.toType
1372
+ };
1373
+ case "alterColumnNullable":
1374
+ return { type: op.type, table: op.table, column: op.column, nullable: op.nullable };
1375
+ case "alterColumnDefault":
1376
+ return { type: op.type, table: op.table, column: op.column };
1377
+ case "renameColumn":
1378
+ return { type: op.type, table: op.table, from: op.from, to: op.to };
1379
+ case "addForeignKey":
1380
+ return { type: op.type, table: op.table, constraintName: op.constraintName };
1381
+ case "dropForeignKey":
1382
+ return { type: op.type, table: op.table, constraintName: op.constraintName };
1383
+ case "createIndex":
1384
+ return { type: op.type, table: op.table, indexName: op.indexName };
1385
+ case "dropIndex":
1386
+ return { type: op.type, table: op.table, indexName: op.indexName };
1387
+ case "rawSql":
1388
+ return { type: op.type, description: op.description };
1389
+ }
1390
+ });
1391
+ return JSON.stringify(simplified, null, " ");
1392
+ }
1393
+
1394
+ // libs/migrations/src/lib/migration.types.ts
1395
+ var MIGRATION_TRACKING_TABLE = "_momentum_migrations";
1396
+
1397
+ // libs/migrations/src/lib/tracking/migration-tracker.ts
1398
+ async function ensureTrackingTable(db, dialect) {
1399
+ if (dialect === "postgresql") {
1400
+ await db.execute(`
1401
+ CREATE TABLE IF NOT EXISTS "${MIGRATION_TRACKING_TABLE}" (
1402
+ "id" VARCHAR(36) PRIMARY KEY,
1403
+ "name" VARCHAR(255) NOT NULL UNIQUE,
1404
+ "batch" INTEGER NOT NULL,
1405
+ "checksum" VARCHAR(64) NOT NULL,
1406
+ "appliedAt" TIMESTAMPTZ NOT NULL,
1407
+ "executionMs" INTEGER NOT NULL
1408
+ )
1409
+ `);
1410
+ } else {
1411
+ await db.execute(`
1412
+ CREATE TABLE IF NOT EXISTS "${MIGRATION_TRACKING_TABLE}" (
1413
+ "id" TEXT PRIMARY KEY,
1414
+ "name" TEXT NOT NULL UNIQUE,
1415
+ "batch" INTEGER NOT NULL,
1416
+ "checksum" TEXT NOT NULL,
1417
+ "appliedAt" TEXT NOT NULL,
1418
+ "executionMs" INTEGER NOT NULL
1419
+ )
1420
+ `);
1421
+ }
1422
+ }
1423
+ async function getAppliedMigrations(db) {
1424
+ const rows = await db.query(
1425
+ `SELECT * FROM "${MIGRATION_TRACKING_TABLE}" ORDER BY "batch" ASC, "name" ASC`
1426
+ );
1427
+ return rows.map(toTrackingRecord);
1428
+ }
1429
+ async function getNextBatchNumber(db) {
1430
+ const rows = await db.query(
1431
+ `SELECT MAX("batch") as max_batch FROM "${MIGRATION_TRACKING_TABLE}"`
1432
+ );
1433
+ const maxBatch = rows[0]?.max_batch;
1434
+ return (typeof maxBatch === "number" ? maxBatch : 0) + 1;
1435
+ }
1436
+ async function recordMigration(db, record, dialect) {
1437
+ const id = generateUUID();
1438
+ const full = { id, ...record };
1439
+ if (dialect === "postgresql") {
1440
+ await db.execute(
1441
+ `INSERT INTO "${MIGRATION_TRACKING_TABLE}" ("id", "name", "batch", "checksum", "appliedAt", "executionMs")
1442
+ VALUES ($1, $2, $3, $4, $5, $6)`,
1443
+ [full.id, full.name, full.batch, full.checksum, full.appliedAt, full.executionMs]
1444
+ );
1445
+ } else {
1446
+ await db.execute(
1447
+ `INSERT INTO "${MIGRATION_TRACKING_TABLE}" ("id", "name", "batch", "checksum", "appliedAt", "executionMs")
1448
+ VALUES (?, ?, ?, ?, ?, ?)`,
1449
+ [full.id, full.name, full.batch, full.checksum, full.appliedAt, full.executionMs]
1450
+ );
1451
+ }
1452
+ return full;
1453
+ }
1454
+ async function removeMigrationRecord(db, name, dialect) {
1455
+ const placeholder = dialect === "postgresql" ? "$1" : "?";
1456
+ const affected = await db.execute(
1457
+ `DELETE FROM "${MIGRATION_TRACKING_TABLE}" WHERE "name" = ${placeholder}`,
1458
+ [name]
1459
+ );
1460
+ return affected > 0;
1461
+ }
1462
+ async function getMigrationsByBatch(db, batch, dialect) {
1463
+ const placeholder = dialect === "postgresql" ? "$1" : "?";
1464
+ const rows = await db.query(
1465
+ `SELECT * FROM "${MIGRATION_TRACKING_TABLE}" WHERE "batch" = ${placeholder} ORDER BY "name" DESC`,
1466
+ [batch]
1467
+ );
1468
+ return rows.map(toTrackingRecord);
1469
+ }
1470
+ async function getLatestBatchNumber(db) {
1471
+ const rows = await db.query(
1472
+ `SELECT MAX("batch") as max_batch FROM "${MIGRATION_TRACKING_TABLE}"`
1473
+ );
1474
+ return typeof rows[0]?.max_batch === "number" ? rows[0].max_batch : 0;
1475
+ }
1476
+ async function isMigrationApplied(db, name, dialect) {
1477
+ const placeholder = dialect === "postgresql" ? "$1" : "?";
1478
+ const rows = await db.query(
1479
+ `SELECT COUNT(*) as cnt FROM "${MIGRATION_TRACKING_TABLE}" WHERE "name" = ${placeholder}`,
1480
+ [name]
1481
+ );
1482
+ return (rows[0]?.cnt ?? 0) > 0;
1483
+ }
1484
+ function toTrackingRecord(row2) {
1485
+ return {
1486
+ id: String(row2["id"]),
1487
+ name: String(row2["name"]),
1488
+ batch: Number(row2["batch"]),
1489
+ checksum: String(row2["checksum"]),
1490
+ appliedAt: String(row2["appliedAt"]),
1491
+ executionMs: Number(row2["executionMs"])
1492
+ };
1493
+ }
1494
+ function generateUUID() {
1495
+ const crypto = require("node:crypto");
1496
+ return crypto.randomUUID();
1497
+ }
1498
+
1499
+ // libs/migrations/src/lib/danger/danger-detector.ts
1500
+ function detectDangers(operations, dialect) {
1501
+ const warnings = [];
1502
+ for (let i = 0; i < operations.length; i++) {
1503
+ const op = operations[i];
1504
+ warnings.push(...checkOperation(op, i, operations, dialect));
1505
+ }
1506
+ const severityOrder = { error: 0, warning: 1, info: 2 };
1507
+ warnings.sort((a, b) => severityOrder[a.severity] - severityOrder[b.severity]);
1508
+ return {
1509
+ warnings,
1510
+ hasErrors: warnings.some((w) => w.severity === "error"),
1511
+ hasWarnings: warnings.some((w) => w.severity === "warning")
1512
+ };
1513
+ }
1514
+ function checkOperation(op, index, _allOps, dialect) {
1515
+ const warnings = [];
1516
+ switch (op.type) {
1517
+ case "dropTable":
1518
+ warnings.push({
1519
+ severity: "error",
1520
+ operation: op,
1521
+ operationIndex: index,
1522
+ message: `Dropping table "${op.table}" will permanently delete all data.`,
1523
+ suggestion: 'Consider renaming the table with a deprecation prefix (e.g., "_deprecated_") and scheduling deletion after verifying no data is needed.'
1524
+ });
1525
+ break;
1526
+ case "dropColumn":
1527
+ warnings.push({
1528
+ severity: "warning",
1529
+ operation: op,
1530
+ operationIndex: index,
1531
+ message: `Dropping column "${op.table}"."${op.column}" will permanently delete all values in this column.`,
1532
+ suggestion: "Before dropping, verify the column data is either migrated elsewhere or truly unneeded. Consider a backup or data export first."
1533
+ });
1534
+ break;
1535
+ case "alterColumnType":
1536
+ warnings.push(...checkTypeChange(op, index, dialect));
1537
+ break;
1538
+ case "alterColumnNullable":
1539
+ if (!op.nullable) {
1540
+ warnings.push({
1541
+ severity: "warning",
1542
+ operation: op,
1543
+ operationIndex: index,
1544
+ message: `Setting "${op.table}"."${op.column}" to NOT NULL may fail if existing rows contain NULL values.`,
1545
+ suggestion: "First backfill NULL values with a default (e.g., UPDATE table SET column = 'default' WHERE column IS NULL), then add the NOT NULL constraint."
1546
+ });
1547
+ }
1548
+ break;
1549
+ case "addColumn":
1550
+ if (!op.nullable && !op.defaultValue) {
1551
+ warnings.push({
1552
+ severity: "error",
1553
+ operation: op,
1554
+ operationIndex: index,
1555
+ message: `Adding NOT NULL column "${op.table}"."${op.column}" without a default value will fail if the table has existing rows.`,
1556
+ suggestion: "Either add a DEFAULT value, make the column nullable first and backfill, or add the column as nullable, backfill, then alter to NOT NULL."
1557
+ });
1558
+ }
1559
+ break;
1560
+ case "renameColumn":
1561
+ warnings.push({
1562
+ severity: "warning",
1563
+ operation: op,
1564
+ operationIndex: index,
1565
+ message: `Renaming "${op.table}"."${op.from}" to "${op.to}" may break application code that references the old name.`,
1566
+ suggestion: "Deploy application code changes to use the new column name before or alongside the migration. Consider a phased approach: add new column, migrate data, update code, then drop old column."
1567
+ });
1568
+ break;
1569
+ case "renameTable":
1570
+ warnings.push({
1571
+ severity: "warning",
1572
+ operation: op,
1573
+ operationIndex: index,
1574
+ message: `Renaming table "${op.from}" to "${op.to}" may break application code and queries.`,
1575
+ suggestion: "Update application code to use the new table name before or alongside the migration."
1576
+ });
1577
+ break;
1578
+ case "addForeignKey":
1579
+ if (dialect === "postgresql") {
1580
+ warnings.push({
1581
+ severity: "info",
1582
+ operation: op,
1583
+ operationIndex: index,
1584
+ message: `Adding foreign key "${op.constraintName}" acquires an ACCESS EXCLUSIVE lock on the referenced table.`,
1585
+ suggestion: "On large tables, consider adding the FK constraint with NOT VALID first, then validating separately: ALTER TABLE ... ADD CONSTRAINT ... NOT VALID; ALTER TABLE ... VALIDATE CONSTRAINT ..."
1586
+ });
1587
+ }
1588
+ break;
1589
+ case "createIndex":
1590
+ if (dialect === "postgresql" && !isCreateIndexConcurrent(op)) {
1591
+ warnings.push({
1592
+ severity: "info",
1593
+ operation: op,
1594
+ operationIndex: index,
1595
+ message: `Creating index "${op.indexName}" will lock "${op.table}" for writes during index creation.`,
1596
+ suggestion: "For large tables, consider CREATE INDEX CONCURRENTLY to avoid blocking writes (requires running outside a transaction)."
1597
+ });
1598
+ }
1599
+ break;
1600
+ }
1601
+ return warnings;
1602
+ }
1603
+ function checkTypeChange(op, index, dialect) {
1604
+ const warnings = [];
1605
+ if (dialect === "sqlite") {
1606
+ warnings.push({
1607
+ severity: "error",
1608
+ operation: op,
1609
+ operationIndex: index,
1610
+ message: `SQLite does not support ALTER COLUMN TYPE. Changing "${op.table}"."${op.column}" from ${op.fromType} to ${op.toType} requires a table rebuild.`,
1611
+ suggestion: "Create a new table with the desired schema, copy data, drop old table, and rename new table. Use a raw SQL migration for this."
1612
+ });
1613
+ return warnings;
1614
+ }
1615
+ if (isLossyTypeChange(op.fromType, op.toType)) {
1616
+ warnings.push({
1617
+ severity: "warning",
1618
+ operation: op,
1619
+ operationIndex: index,
1620
+ message: `Changing "${op.table}"."${op.column}" from ${op.fromType} to ${op.toType} may cause data loss or cast errors.`,
1621
+ suggestion: "Test the type conversion on a clone database first. Consider adding a USING clause with explicit cast logic."
1622
+ });
1623
+ }
1624
+ if (isTableRewriteType(op.fromType, op.toType)) {
1625
+ warnings.push({
1626
+ severity: "info",
1627
+ operation: op,
1628
+ operationIndex: index,
1629
+ message: `Changing "${op.table}"."${op.column}" from ${op.fromType} to ${op.toType} may require a table rewrite on large tables.`,
1630
+ suggestion: "On large tables, this can take significant time and lock the table. Consider running during low-traffic periods or using a phased approach."
1631
+ });
1632
+ }
1633
+ return warnings;
1634
+ }
1635
+ function isLossyTypeChange(from, to) {
1636
+ const fromUpper = from.toUpperCase();
1637
+ const toUpper = to.toUpperCase();
1638
+ if (isTextType(fromUpper) && isNumericType(toUpper))
1639
+ return true;
1640
+ if (fromUpper === "NUMERIC" && (toUpper === "INTEGER" || toUpper === "SMALLINT"))
1641
+ return true;
1642
+ if (fromUpper === "BIGINT" && (toUpper === "INTEGER" || toUpper === "SMALLINT"))
1643
+ return true;
1644
+ if (fromUpper === "DOUBLE PRECISION" && toUpper === "REAL")
1645
+ return true;
1646
+ if ((fromUpper === "JSONB" || fromUpper === "JSON") && !fromUpper.includes("JSON"))
1647
+ return true;
1648
+ if (fromUpper.includes("TIMESTAMP") && toUpper === "DATE")
1649
+ return true;
1650
+ const fromLength = extractLength(fromUpper);
1651
+ const toLength = extractLength(toUpper);
1652
+ if (fromLength && toLength && toLength < fromLength)
1653
+ return true;
1654
+ return false;
1655
+ }
1656
+ function isTableRewriteType(from, to) {
1657
+ const fromUpper = from.toUpperCase();
1658
+ const toUpper = to.toUpperCase();
1659
+ if (fromUpper.startsWith("VARCHAR") && toUpper === "TEXT")
1660
+ return false;
1661
+ if (fromUpper === "TEXT" && toUpper.startsWith("VARCHAR"))
1662
+ return true;
1663
+ if (isNumericType(fromUpper) !== isNumericType(toUpper))
1664
+ return true;
1665
+ return false;
1666
+ }
1667
+ function isTextType(type) {
1668
+ return type === "TEXT" || type.startsWith("VARCHAR") || type.startsWith("CHAR");
1669
+ }
1670
+ function isNumericType(type) {
1671
+ return ["INTEGER", "BIGINT", "SMALLINT", "NUMERIC", "REAL", "DOUBLE PRECISION", "FLOAT"].includes(
1672
+ type
1673
+ );
1674
+ }
1675
+ function extractLength(type) {
1676
+ const match = type.match(/\((\d+)\)/);
1677
+ return match ? parseInt(match[1], 10) : null;
1678
+ }
1679
+ function isCreateIndexConcurrent(_op) {
1680
+ return false;
1681
+ }
1682
+
1683
+ // libs/migrations/src/lib/runner/push-runner.ts
1684
+ async function runPush(options) {
1685
+ const { collections, dialect, db, introspect, dryRun, skipDangerDetection, log } = options;
1686
+ const noop = { info: () => {
1687
+ }, warn: () => {
1688
+ } };
1689
+ const logger = log ?? noop;
1690
+ const desired = collectionsToSchema(collections, dialect);
1691
+ const actual = await introspect();
1692
+ const diff = diffSchemas(desired, actual, dialect);
1693
+ if (!diff.hasChanges) {
1694
+ logger.info("Schema is up to date. No changes needed.");
1695
+ return {
1696
+ applied: false,
1697
+ diff,
1698
+ dangers: null,
1699
+ sqlStatements: [],
1700
+ successCount: 0,
1701
+ errors: []
1702
+ };
1703
+ }
1704
+ let dangers = null;
1705
+ if (!skipDangerDetection) {
1706
+ dangers = detectDangers(diff.operations, dialect);
1707
+ if (dangers.hasErrors) {
1708
+ logger.warn(
1709
+ `Found ${dangers.warnings.filter((w) => w.severity === "error").length} dangerous operations. Push blocked.`
1710
+ );
1711
+ return {
1712
+ applied: false,
1713
+ diff,
1714
+ dangers,
1715
+ sqlStatements: [],
1716
+ successCount: 0,
1717
+ errors: []
1718
+ };
1719
+ }
1720
+ }
1721
+ const sqlStatements = operationsToUpSql(diff.operations, dialect);
1722
+ if (dryRun) {
1723
+ logger.info(`Dry run: ${sqlStatements.length} statements would be executed.`);
1724
+ return {
1725
+ applied: false,
1726
+ diff,
1727
+ dangers,
1728
+ sqlStatements,
1729
+ successCount: 0,
1730
+ errors: []
1731
+ };
1732
+ }
1733
+ let successCount = 0;
1734
+ const errors = [];
1735
+ for (const sql of sqlStatements) {
1736
+ if (sql.startsWith("--")) {
1737
+ logger.info(`Skipping: ${sql}`);
1738
+ continue;
1739
+ }
1740
+ try {
1741
+ await db.executeRaw(sql);
1742
+ successCount++;
1743
+ logger.info(`OK: ${sql.slice(0, 80)}${sql.length > 80 ? "..." : ""}`);
1744
+ } catch (err) {
1745
+ const errMsg = err instanceof Error ? err.message : String(err);
1746
+ errors.push({ sql, error: errMsg });
1747
+ logger.warn(`FAILED: ${sql.slice(0, 80)} \u2014 ${errMsg}`);
1748
+ }
1749
+ }
1750
+ logger.info(
1751
+ `Push complete: ${successCount} applied, ${errors.length} failed out of ${sqlStatements.length} statements.`
1752
+ );
1753
+ return {
1754
+ applied: successCount > 0,
1755
+ diff,
1756
+ dangers,
1757
+ sqlStatements,
1758
+ successCount,
1759
+ errors
1760
+ };
1761
+ }
1762
+
1763
+ // libs/migrations/src/lib/runner/migrate-runner.ts
1764
+ var import_node_crypto2 = require("node:crypto");
1765
+ async function runMigrations(options) {
1766
+ const { migrations, dialect, tracker, buildContext, skipDangerDetection, log } = options;
1767
+ const noop = {
1768
+ info: () => {
1769
+ },
1770
+ warn: () => {
1771
+ }
1772
+ };
1773
+ const logger = log ?? noop;
1774
+ await ensureTrackingTable(tracker, dialect);
1775
+ const applied = await getAppliedMigrations(tracker);
1776
+ const appliedNames = new Set(applied.map((m) => m.name));
1777
+ const pending = migrations.filter((m) => !appliedNames.has(m.name));
1778
+ if (pending.length === 0) {
1779
+ logger.info("No pending migrations.");
1780
+ return {
1781
+ batch: 0,
1782
+ results: [],
1783
+ successCount: 0,
1784
+ failCount: 0,
1785
+ dangers: null
1786
+ };
1787
+ }
1788
+ let dangers = null;
1789
+ if (!skipDangerDetection) {
1790
+ const allOps = pending.flatMap((m) => {
1791
+ const ops = m.file.meta.operations;
1792
+ if (!ops)
1793
+ return [];
1794
+ return ops.filter((op) => typeof op.type === "string").map(toMigrationOperation);
1795
+ });
1796
+ if (allOps.length > 0) {
1797
+ dangers = detectDangers(allOps, dialect);
1798
+ if (dangers.hasErrors) {
1799
+ logger.warn("Dangerous operations detected. Migration blocked.");
1800
+ return {
1801
+ batch: 0,
1802
+ results: [],
1803
+ successCount: 0,
1804
+ failCount: 0,
1805
+ dangers
1806
+ };
1807
+ }
1808
+ }
1809
+ }
1810
+ const batch = await getNextBatchNumber(tracker);
1811
+ logger.info(`Running ${pending.length} migration(s) in batch ${batch}...`);
1812
+ const results = [];
1813
+ const ctx = buildContext();
1814
+ for (const migration of pending) {
1815
+ const start = Date.now();
1816
+ try {
1817
+ await migration.file.up(ctx);
1818
+ const executionMs = Date.now() - start;
1819
+ const checksum = computeMigrationChecksum(migration);
1820
+ await recordMigration(
1821
+ tracker,
1822
+ {
1823
+ name: migration.name,
1824
+ batch,
1825
+ checksum,
1826
+ appliedAt: (/* @__PURE__ */ new Date()).toISOString(),
1827
+ executionMs
1828
+ },
1829
+ dialect
1830
+ );
1831
+ results.push({ name: migration.name, success: true, executionMs });
1832
+ logger.info(` OK: ${migration.name} (${executionMs}ms)`);
1833
+ } catch (err) {
1834
+ const executionMs = Date.now() - start;
1835
+ const errMsg = err instanceof Error ? err.message : String(err);
1836
+ const errorCode = extractErrorCode(err);
1837
+ results.push({ name: migration.name, success: false, executionMs, error: errMsg, errorCode });
1838
+ logger.warn(` FAILED: ${migration.name} \u2014 ${errMsg}`);
1839
+ break;
1840
+ }
1841
+ }
1842
+ const successCount = results.filter((r) => r.success).length;
1843
+ const failCount = results.filter((r) => !r.success).length;
1844
+ logger.info(`Batch ${batch}: ${successCount} applied, ${failCount} failed.`);
1845
+ return { batch, results, successCount, failCount, dangers };
1846
+ }
1847
+ async function rollbackBatch(options) {
1848
+ const { migrations, dialect, tracker, buildContext, log } = options;
1849
+ const noop = {
1850
+ info: () => {
1851
+ },
1852
+ warn: () => {
1853
+ }
1854
+ };
1855
+ const logger = log ?? noop;
1856
+ await ensureTrackingTable(tracker, dialect);
1857
+ const latestBatch = await getLatestBatchNumber(tracker);
1858
+ if (latestBatch === 0) {
1859
+ logger.info("Nothing to rollback.");
1860
+ return { batch: 0, results: [], successCount: 0, failCount: 0, dangers: null };
1861
+ }
1862
+ const batchMigrations = await getMigrationsByBatch(tracker, latestBatch, dialect);
1863
+ if (batchMigrations.length === 0) {
1864
+ logger.info("No migrations in latest batch.");
1865
+ return { batch: 0, results: [], successCount: 0, failCount: 0, dangers: null };
1866
+ }
1867
+ logger.info(`Rolling back batch ${latestBatch} (${batchMigrations.length} migration(s))...`);
1868
+ const migrationMap = new Map(migrations.map((m) => [m.name, m]));
1869
+ const results = [];
1870
+ const ctx = buildContext();
1871
+ for (const record of batchMigrations) {
1872
+ const migration = migrationMap.get(record.name);
1873
+ if (!migration) {
1874
+ results.push({
1875
+ name: record.name,
1876
+ success: false,
1877
+ executionMs: 0,
1878
+ error: `Migration file "${record.name}" not found`
1879
+ });
1880
+ logger.warn(` MISSING: ${record.name}`);
1881
+ break;
1882
+ }
1883
+ const start = Date.now();
1884
+ try {
1885
+ await migration.file.down(ctx);
1886
+ const executionMs = Date.now() - start;
1887
+ await removeMigrationRecord(tracker, record.name, dialect);
1888
+ results.push({ name: record.name, success: true, executionMs });
1889
+ logger.info(` Rolled back: ${record.name} (${executionMs}ms)`);
1890
+ } catch (err) {
1891
+ const executionMs = Date.now() - start;
1892
+ const errMsg = err instanceof Error ? err.message : String(err);
1893
+ const errorCode = extractErrorCode(err);
1894
+ results.push({ name: record.name, success: false, executionMs, error: errMsg, errorCode });
1895
+ logger.warn(` FAILED rollback: ${record.name} \u2014 ${errMsg}`);
1896
+ break;
1897
+ }
1898
+ }
1899
+ const successCount = results.filter((r) => r.success).length;
1900
+ const failCount = results.filter((r) => !r.success).length;
1901
+ return { batch: latestBatch, results, successCount, failCount, dangers: null };
1902
+ }
1903
+ async function getMigrationStatus(migrations, tracker, dialect) {
1904
+ await ensureTrackingTable(tracker, dialect);
1905
+ const applied = await getAppliedMigrations(tracker);
1906
+ const appliedMap = new Map(applied.map((m) => [m.name, m]));
1907
+ return migrations.map((m) => {
1908
+ const record = appliedMap.get(m.name);
1909
+ if (record) {
1910
+ return {
1911
+ name: m.name,
1912
+ status: "applied",
1913
+ batch: record.batch,
1914
+ appliedAt: record.appliedAt
1915
+ };
1916
+ }
1917
+ return { name: m.name, status: "pending" };
1918
+ });
1919
+ }
1920
+ function extractErrorCode(err) {
1921
+ if (err !== null && typeof err === "object" && "code" in err && typeof err.code === "string") {
1922
+ return err.code;
1923
+ }
1924
+ return void 0;
1925
+ }
1926
+ function toMigrationOperation(op) {
1927
+ return op;
1928
+ }
1929
+ function computeMigrationChecksum(migration) {
1930
+ const content = JSON.stringify(migration.file.meta) + migration.file.up.toString() + migration.file.down.toString();
1931
+ return (0, import_node_crypto2.createHash)("sha256").update(content).digest("hex");
1932
+ }
1933
+
1934
+ // libs/migrations/src/lib/runner/clone-test-apply.ts
1935
+ async function cloneTestApply(options) {
1936
+ const {
1937
+ migrations,
1938
+ dialect,
1939
+ tracker,
1940
+ buildContext,
1941
+ db,
1942
+ buildCloneTracker,
1943
+ buildCloneContext,
1944
+ testOnly,
1945
+ skipDangerDetection,
1946
+ log
1947
+ } = options;
1948
+ const noop = { info: () => {
1949
+ }, warn: () => {
1950
+ } };
1951
+ const logger = log ?? noop;
1952
+ const timestamp = Date.now();
1953
+ const cloneName = `_mig_clone_${timestamp}`;
1954
+ let cloneCleanedUp = false;
1955
+ const result = {
1956
+ phase: "clone",
1957
+ cloneResult: null,
1958
+ applyResult: null,
1959
+ dangers: null,
1960
+ cloneCleanedUp: false,
1961
+ cloneName,
1962
+ suggestions: []
1963
+ };
1964
+ try {
1965
+ logger.info(`Cloning database to "${cloneName}"...`);
1966
+ await db.cloneDatabase(cloneName);
1967
+ logger.info("Clone created successfully.");
1968
+ result.phase = "test";
1969
+ logger.info("Running migrations on clone...");
1970
+ const cloneTracker = buildCloneTracker(cloneName);
1971
+ const cloneContext = buildCloneContext(cloneName);
1972
+ const cloneResult = await runMigrations({
1973
+ migrations,
1974
+ dialect,
1975
+ tracker: cloneTracker,
1976
+ buildContext: () => cloneContext,
1977
+ skipDangerDetection,
1978
+ log: {
1979
+ info: (msg) => logger.info(`[clone] ${msg}`),
1980
+ warn: (msg) => logger.warn(`[clone] ${msg}`)
1981
+ }
1982
+ });
1983
+ result.cloneResult = cloneResult;
1984
+ result.dangers = cloneResult.dangers;
1985
+ if (cloneResult.failCount > 0) {
1986
+ const suggestions = generateFixSuggestions(cloneResult);
1987
+ result.suggestions = suggestions;
1988
+ result.error = `Migration failed on clone: ${cloneResult.results.find((r) => !r.success)?.error}`;
1989
+ logger.warn("Migration failed on clone. Suggestions:");
1990
+ for (const suggestion of suggestions) {
1991
+ logger.warn(` - ${suggestion}`);
1992
+ }
1993
+ await cleanupClone(db, cloneName, logger);
1994
+ result.cloneCleanedUp = true;
1995
+ cloneCleanedUp = true;
1996
+ return result;
1997
+ }
1998
+ logger.info(`Clone test passed: ${cloneResult.successCount} migration(s) applied.`);
1999
+ await cleanupClone(db, cloneName, logger);
2000
+ result.cloneCleanedUp = true;
2001
+ cloneCleanedUp = true;
2002
+ if (testOnly) {
2003
+ result.phase = "skipped";
2004
+ logger.info("Test-only mode: skipping real database apply.");
2005
+ return result;
2006
+ }
2007
+ result.phase = "apply";
2008
+ logger.info("Applying migrations to real database...");
2009
+ const applyResult = await runMigrations({
2010
+ migrations,
2011
+ dialect,
2012
+ tracker,
2013
+ buildContext,
2014
+ skipDangerDetection: true,
2015
+ // Already validated on clone
2016
+ log: logger
2017
+ });
2018
+ result.applyResult = applyResult;
2019
+ if (applyResult.failCount > 0) {
2020
+ result.error = `Migration failed on real database: ${applyResult.results.find((r) => !r.success)?.error}`;
2021
+ return result;
2022
+ }
2023
+ result.phase = "complete";
2024
+ logger.info(
2025
+ `Pipeline complete: ${applyResult.successCount} migration(s) applied to real database.`
2026
+ );
2027
+ return result;
2028
+ } catch (err) {
2029
+ const errMsg = err instanceof Error ? err.message : String(err);
2030
+ result.error = errMsg;
2031
+ logger.warn(`Pipeline error: ${errMsg}`);
2032
+ if (!cloneCleanedUp) {
2033
+ await cleanupClone(db, cloneName, logger);
2034
+ result.cloneCleanedUp = true;
2035
+ }
2036
+ return result;
2037
+ }
2038
+ }
2039
+ async function cleanupClone(db, cloneName, logger) {
2040
+ try {
2041
+ await db.dropClone(cloneName);
2042
+ logger.info(`Clone "${cloneName}" cleaned up.`);
2043
+ } catch (err) {
2044
+ const errMsg = err instanceof Error ? err.message : String(err);
2045
+ logger.warn(`Failed to clean up clone "${cloneName}": ${errMsg}`);
2046
+ }
2047
+ }
2048
+ var SQLSTATE_SUGGESTIONS = {
2049
+ // Class 23 — Integrity Constraint Violation
2050
+ "23502": "NOT NULL constraint violation. Backfill NULL values before adding the constraint: Use ctx.data.backfill(table, column, defaultValue) in a prior migration step.",
2051
+ "23505": "Unique constraint violation. Use ctx.data.dedup() to remove duplicates before adding the unique constraint.",
2052
+ "23503": "Foreign key constraint violation. Ensure referenced data exists before adding the constraint. Consider using ctx.data.backfill() to populate references, or add the FK with NOT VALID first.",
2053
+ // Class 42 — Syntax Error or Access Rule Violation
2054
+ "42P07": "The table already exists. Check if a previous migration already created it, or use IF NOT EXISTS in your DDL.",
2055
+ "42701": "The column already exists. Check if a previous migration already created it, or use IF NOT EXISTS in your DDL.",
2056
+ "42P01": "Referenced table does not exist. Check migration ordering \u2014 the table must be created before it can be referenced.",
2057
+ "42703": "Referenced column does not exist. Check migration ordering \u2014 the column must be created before it can be referenced.",
2058
+ // Class 22 — Data Exception
2059
+ "22P02": "Type conversion error. The data contains values that cannot be converted to the target type. Use ctx.data.transform() to clean up values before altering the column type.",
2060
+ "42804": "Type conversion error. Add an explicit USING clause for the type change, or use ctx.data.transform() to convert values before altering the column type."
2061
+ };
2062
+ function generateFixSuggestions(result) {
2063
+ const suggestions = [];
2064
+ const failedMigration = result.results.find((r) => !r.success);
2065
+ if (!failedMigration)
2066
+ return suggestions;
2067
+ if (failedMigration.errorCode) {
2068
+ const codeSuggestion = SQLSTATE_SUGGESTIONS[failedMigration.errorCode];
2069
+ if (codeSuggestion) {
2070
+ suggestions.push(codeSuggestion);
2071
+ return suggestions;
2072
+ }
2073
+ }
2074
+ const error = failedMigration.error ?? "";
2075
+ if (error.includes("NOT NULL") && error.includes("contains null")) {
2076
+ suggestions.push(
2077
+ "Backfill NULL values before adding NOT NULL constraint: Use ctx.data.backfill(table, column, defaultValue) in a prior migration step."
2078
+ );
2079
+ }
2080
+ if (error.includes("already exists")) {
2081
+ suggestions.push(
2082
+ "The column or table already exists. Check if a previous migration already created it, or use IF NOT EXISTS in your DDL."
2083
+ );
2084
+ }
2085
+ if (error.includes("violates foreign key")) {
2086
+ suggestions.push(
2087
+ "Foreign key constraint violation. Ensure referenced data exists before adding the constraint. Consider using ctx.data.backfill() to populate references, or add the FK with NOT VALID first."
2088
+ );
2089
+ }
2090
+ if (error.includes("does not exist")) {
2091
+ suggestions.push(
2092
+ "Referenced table or column does not exist. Check migration ordering \u2014 the table/column must be created before it can be referenced."
2093
+ );
2094
+ }
2095
+ if (error.includes("unique constraint") || error.includes("duplicate key")) {
2096
+ suggestions.push(
2097
+ "Unique constraint violation. Use ctx.data.dedup() to remove duplicates before adding the unique constraint."
2098
+ );
2099
+ }
2100
+ if (error.includes("type") && (error.includes("cast") || error.includes("convert"))) {
2101
+ suggestions.push(
2102
+ "Type conversion error. Add an explicit USING clause for the type change, or use ctx.data.transform() to convert values before altering the column type."
2103
+ );
2104
+ }
2105
+ if (suggestions.length === 0) {
2106
+ suggestions.push(
2107
+ `Migration "${failedMigration.name}" failed with: ${error}. Review the migration SQL and test on a development database.`
2108
+ );
2109
+ }
2110
+ return suggestions;
2111
+ }
2112
+
2113
+ // libs/migrations/src/lib/helpers/data-helpers.ts
2114
+ function createDataHelpers(db, dialect) {
2115
+ const ph = (index) => dialect === "postgresql" ? `$${index}` : "?";
2116
+ return {
2117
+ async backfill(table, column, value, options) {
2118
+ const where = options?.where ? ` AND (${options.where})` : "";
2119
+ const batchSize = options?.batchSize ?? 1e3;
2120
+ let totalAffected = 0;
2121
+ if (dialect === "postgresql") {
2122
+ let affected;
2123
+ do {
2124
+ affected = await db.execute(
2125
+ `UPDATE "${table}" SET "${column}" = ${ph(1)}
2126
+ WHERE ctid IN (
2127
+ SELECT ctid FROM "${table}"
2128
+ WHERE "${column}" IS NULL${where}
2129
+ LIMIT ${batchSize}
2130
+ )`,
2131
+ [value]
2132
+ );
2133
+ totalAffected += affected;
2134
+ } while (affected >= batchSize);
2135
+ } else {
2136
+ let affected;
2137
+ do {
2138
+ affected = await db.execute(
2139
+ `UPDATE "${table}" SET "${column}" = ${ph(1)}
2140
+ WHERE rowid IN (
2141
+ SELECT rowid FROM "${table}"
2142
+ WHERE "${column}" IS NULL${where}
2143
+ LIMIT ${batchSize}
2144
+ )`,
2145
+ [value]
2146
+ );
2147
+ totalAffected += affected;
2148
+ } while (affected >= batchSize);
2149
+ }
2150
+ return totalAffected;
2151
+ },
2152
+ async transform(table, column, sqlExpression, options) {
2153
+ const where = options?.where ? ` WHERE ${options.where}` : "";
2154
+ const batchSize = options?.batchSize ?? 0;
2155
+ if (batchSize <= 0) {
2156
+ return db.execute(
2157
+ `UPDATE "${table}" SET "${column}" = ${sqlExpression}${where}`
2158
+ );
2159
+ }
2160
+ let totalAffected = 0;
2161
+ let affected;
2162
+ do {
2163
+ if (dialect === "postgresql") {
2164
+ affected = await db.execute(
2165
+ `UPDATE "${table}" SET "${column}" = ${sqlExpression}
2166
+ WHERE ctid IN (
2167
+ SELECT ctid FROM "${table}"${where}
2168
+ LIMIT ${batchSize}
2169
+ )`
2170
+ );
2171
+ } else {
2172
+ affected = await db.execute(
2173
+ `UPDATE "${table}" SET "${column}" = ${sqlExpression}
2174
+ WHERE rowid IN (
2175
+ SELECT rowid FROM "${table}"${where}
2176
+ LIMIT ${batchSize}
2177
+ )`
2178
+ );
2179
+ }
2180
+ totalAffected += affected;
2181
+ } while (affected >= batchSize);
2182
+ return totalAffected;
2183
+ },
2184
+ async renameColumn(table, from, to, columnType) {
2185
+ await db.execute(
2186
+ `ALTER TABLE "${table}" ADD COLUMN "${to}" ${columnType}`
2187
+ );
2188
+ await db.execute(
2189
+ `UPDATE "${table}" SET "${to}" = "${from}"`
2190
+ );
2191
+ await db.execute(
2192
+ `ALTER TABLE "${table}" DROP COLUMN "${from}"`
2193
+ );
2194
+ },
2195
+ async splitColumn(table, _sourceColumn, targets) {
2196
+ for (const target of targets) {
2197
+ await db.execute(
2198
+ `ALTER TABLE "${table}" ADD COLUMN "${target.name}" ${target.type}`
2199
+ );
2200
+ await db.execute(
2201
+ `UPDATE "${table}" SET "${target.name}" = ${target.expression}`
2202
+ );
2203
+ }
2204
+ },
2205
+ async mergeColumns(table, _sourceColumns, targetColumn, targetType, mergeExpression) {
2206
+ await db.execute(
2207
+ `ALTER TABLE "${table}" ADD COLUMN "${targetColumn}" ${targetType}`
2208
+ );
2209
+ await db.execute(
2210
+ `UPDATE "${table}" SET "${targetColumn}" = ${mergeExpression}`
2211
+ );
2212
+ },
2213
+ async copyData(sourceTable, targetTable, columnMapping, options) {
2214
+ const targetCols = [];
2215
+ const sourceCols = [];
2216
+ for (const [target, source] of Object.entries(columnMapping)) {
2217
+ targetCols.push(`"${target}"`);
2218
+ if (typeof source === "string") {
2219
+ sourceCols.push(`"${source}"`);
2220
+ } else {
2221
+ sourceCols.push(source.expression);
2222
+ }
2223
+ }
2224
+ const where = options?.where ? ` WHERE ${options.where}` : "";
2225
+ const affected = await db.execute(
2226
+ `INSERT INTO "${targetTable}" (${targetCols.join(", ")})
2227
+ SELECT ${sourceCols.join(", ")} FROM "${sourceTable}"${where}`
2228
+ );
2229
+ return affected;
2230
+ },
2231
+ async columnToJson(table, sourceColumn, jsonColumn, jsonKey) {
2232
+ if (dialect === "postgresql") {
2233
+ await db.execute(
2234
+ `UPDATE "${table}" SET "${jsonColumn}" = COALESCE("${jsonColumn}", '{}'::jsonb) || jsonb_build_object('${jsonKey}', "${sourceColumn}")`
2235
+ );
2236
+ } else {
2237
+ await db.execute(
2238
+ `UPDATE "${table}" SET "${jsonColumn}" = json_set(COALESCE("${jsonColumn}", '{}'), '$.${jsonKey}', "${sourceColumn}")`
2239
+ );
2240
+ }
2241
+ },
2242
+ async jsonToColumn(table, jsonColumn, jsonKey, targetColumn, targetType) {
2243
+ await db.execute(
2244
+ `ALTER TABLE "${table}" ADD COLUMN "${targetColumn}" ${targetType}`
2245
+ );
2246
+ if (dialect === "postgresql") {
2247
+ await db.execute(
2248
+ `UPDATE "${table}" SET "${targetColumn}" = "${jsonColumn}"->>'${jsonKey}'`
2249
+ );
2250
+ } else {
2251
+ await db.execute(
2252
+ `UPDATE "${table}" SET "${targetColumn}" = json_extract("${jsonColumn}", '$.${jsonKey}')`
2253
+ );
2254
+ }
2255
+ },
2256
+ async dedup(table, columns, keepStrategy = "latest") {
2257
+ const colList = columns.map((c) => `"${c}"`).join(", ");
2258
+ let orderBy;
2259
+ switch (keepStrategy) {
2260
+ case "earliest":
2261
+ orderBy = '"createdAt" ASC';
2262
+ break;
2263
+ case "first":
2264
+ orderBy = dialect === "postgresql" ? "ctid ASC" : "rowid ASC";
2265
+ break;
2266
+ default:
2267
+ orderBy = '"createdAt" DESC';
2268
+ break;
2269
+ }
2270
+ if (dialect === "postgresql") {
2271
+ return db.execute(
2272
+ `DELETE FROM "${table}" WHERE ctid NOT IN (
2273
+ SELECT DISTINCT ON (${colList}) ctid
2274
+ FROM "${table}"
2275
+ ORDER BY ${colList}, ${orderBy}
2276
+ )`
2277
+ );
2278
+ }
2279
+ return db.execute(
2280
+ `DELETE FROM "${table}" WHERE rowid NOT IN (
2281
+ SELECT MIN(rowid) FROM "${table}"
2282
+ GROUP BY ${colList}
2283
+ )`
2284
+ );
2285
+ }
2286
+ };
2287
+ }
2288
+
2289
+ // libs/migrations/src/lib/loader/migration-loader.ts
2290
+ var import_node_fs = require("node:fs");
2291
+ var import_node_path = require("node:path");
2292
+ var import_node_url = require("node:url");
2293
+ var MIGRATION_FILE_PATTERN = /^\d{14}_.+\.ts$/;
2294
+ function isMigrationFile(value) {
2295
+ if (typeof value !== "object" || value === null)
2296
+ return false;
2297
+ const obj = value;
2298
+ return "meta" in obj && typeof obj["meta"] === "object" && obj["meta"] !== null && "up" in obj && typeof obj["up"] === "function" && "down" in obj && typeof obj["down"] === "function";
2299
+ }
2300
+ function validateMigrationModule(mod, filePath) {
2301
+ const file = mod["default"] ?? mod;
2302
+ if (!isMigrationFile(file)) {
2303
+ if (typeof file !== "object" || file === null) {
2304
+ throw new Error(`Migration file ${filePath} does not export a valid module`);
2305
+ }
2306
+ if (!("meta" in file) || typeof file["meta"] !== "object") {
2307
+ throw new Error(`Migration file ${filePath} is missing a valid 'meta' export`);
2308
+ }
2309
+ if (!("up" in file) || typeof file["up"] !== "function") {
2310
+ throw new Error(`Migration file ${filePath} is missing an 'up' function export`);
2311
+ }
2312
+ if (!("down" in file) || typeof file["down"] !== "function") {
2313
+ throw new Error(`Migration file ${filePath} is missing a 'down' function export`);
2314
+ }
2315
+ throw new Error(`Migration file ${filePath} does not conform to MigrationFile interface`);
2316
+ }
2317
+ return file;
2318
+ }
2319
+ async function loadMigrationsFromDisk(directory) {
2320
+ if (!(0, import_node_fs.existsSync)(directory))
2321
+ return [];
2322
+ const files = (0, import_node_fs.readdirSync)(directory).filter((f) => MIGRATION_FILE_PATTERN.test(f)).sort();
2323
+ if (files.length === 0)
2324
+ return [];
2325
+ const migrations = [];
2326
+ for (const filename of files) {
2327
+ const filePath = (0, import_node_path.join)(directory, filename);
2328
+ const fileUrl = (0, import_node_url.pathToFileURL)(filePath).href;
2329
+ const mod = await import(fileUrl);
2330
+ const file = validateMigrationModule(mod, filePath);
2331
+ const name = filename.replace(/\.ts$/, "");
2332
+ migrations.push({ name, file });
2333
+ }
2334
+ return migrations;
2335
+ }
2336
+
2337
+ // libs/migrations/src/lib/loader/snapshot-manager.ts
2338
+ var import_node_fs2 = require("node:fs");
2339
+ var import_node_path2 = require("node:path");
2340
+ var SNAPSHOT_FILENAME = ".snapshot.json";
2341
+ function readSnapshot(directory) {
2342
+ const filePath = (0, import_node_path2.join)(directory, SNAPSHOT_FILENAME);
2343
+ if (!(0, import_node_fs2.existsSync)(filePath))
2344
+ return null;
2345
+ const json2 = (0, import_node_fs2.readFileSync)(filePath, "utf-8");
2346
+ return deserializeSnapshot(json2);
2347
+ }
2348
+ function writeSnapshot(directory, snapshot) {
2349
+ (0, import_node_fs2.mkdirSync)(directory, { recursive: true });
2350
+ const filePath = (0, import_node_path2.join)(directory, SNAPSHOT_FILENAME);
2351
+ (0, import_node_fs2.writeFileSync)(filePath, serializeSnapshot(snapshot), "utf-8");
2352
+ }
2353
+ function getSnapshotPath(directory) {
2354
+ return (0, import_node_path2.join)(directory, SNAPSHOT_FILENAME);
2355
+ }
2356
+
2357
+ // libs/migrations/src/cli/shared.ts
2358
+ function resolveDialect(adapter) {
2359
+ if (!adapter.dialect) {
2360
+ throw new Error(
2361
+ "DatabaseAdapter.dialect is not set. Ensure your adapter factory (postgresAdapter/sqliteAdapter) sets the dialect property."
2362
+ );
2363
+ }
2364
+ return adapter.dialect;
2365
+ }
2366
+ function buildTrackerFromAdapter(adapter) {
2367
+ if (!adapter.queryRaw || !adapter.executeRaw) {
2368
+ throw new Error("DatabaseAdapter must implement queryRaw and executeRaw for migration tracking");
2369
+ }
2370
+ const queryRaw = adapter.queryRaw.bind(adapter);
2371
+ const executeRaw = adapter.executeRaw.bind(adapter);
2372
+ return {
2373
+ async query(sql, params) {
2374
+ return queryRaw(sql, params);
2375
+ },
2376
+ async execute(sql, params) {
2377
+ return executeRaw(sql, params);
2378
+ }
2379
+ };
2380
+ }
2381
+ function buildContextFromAdapter(adapter, dialect) {
2382
+ if (!adapter.queryRaw || !adapter.executeRaw) {
2383
+ throw new Error("DatabaseAdapter must implement queryRaw and executeRaw for migration context");
2384
+ }
2385
+ const queryRaw = adapter.queryRaw.bind(adapter);
2386
+ const executeRaw = adapter.executeRaw.bind(adapter);
2387
+ const dataDb = {
2388
+ async execute(sql, params) {
2389
+ return executeRaw(sql, params);
2390
+ },
2391
+ async query(sql, params) {
2392
+ return queryRaw(sql, params);
2393
+ }
2394
+ };
2395
+ const helpers = createDataHelpers(dataDb, dialect);
2396
+ return {
2397
+ async sql(query, params) {
2398
+ await executeRaw(query, params);
2399
+ },
2400
+ async query(sql, params) {
2401
+ return queryRaw(sql, params);
2402
+ },
2403
+ data: helpers,
2404
+ dialect,
2405
+ log: {
2406
+ info: (msg) => {
2407
+ console.warn(`[migration] ${msg}`);
2408
+ },
2409
+ warn: (msg) => {
2410
+ console.warn(`[migration:warn] ${msg}`);
2411
+ }
2412
+ }
2413
+ };
2414
+ }
2415
+ function buildPushDbFromAdapter(adapter) {
2416
+ if (!adapter.executeRaw) {
2417
+ throw new Error("DatabaseAdapter must implement executeRaw for push mode");
2418
+ }
2419
+ const executeRaw = adapter.executeRaw.bind(adapter);
2420
+ return {
2421
+ async executeRaw(sql) {
2422
+ return executeRaw(sql);
2423
+ }
2424
+ };
2425
+ }
2426
+ function buildCloneDbFromAdapter(adapter) {
2427
+ if (!adapter.cloneDatabase || !adapter.dropClone) {
2428
+ throw new Error("DatabaseAdapter must implement cloneDatabase and dropClone for clone-test-apply");
2429
+ }
2430
+ return {
2431
+ cloneDatabase: adapter.cloneDatabase.bind(adapter),
2432
+ dropClone: adapter.dropClone.bind(adapter)
2433
+ };
2434
+ }
2435
+ function buildIntrospector(adapter, dialect) {
2436
+ if (!adapter.queryRaw) {
2437
+ throw new Error("DatabaseAdapter must implement queryRaw for introspection");
2438
+ }
2439
+ const queryRaw = adapter.queryRaw.bind(adapter);
2440
+ if (dialect === "postgresql") {
2441
+ const queryFn2 = async (sql, params) => queryRaw(sql, params);
2442
+ return () => introspectPostgres(queryFn2);
2443
+ }
2444
+ const queryFn = async (sql, params) => queryRaw(sql, params);
2445
+ return () => introspectSqlite(queryFn);
2446
+ }
2447
+ function parseMigrationArgs(args) {
2448
+ const configPath = args.find((a) => !a.startsWith("--"));
2449
+ if (!configPath) {
2450
+ throw new Error("Usage: npx tsx <command>.ts <configPath> [options]");
2451
+ }
2452
+ let name;
2453
+ const nameIdx = args.indexOf("--name");
2454
+ if (nameIdx !== -1 && args[nameIdx + 1]) {
2455
+ name = args[nameIdx + 1];
2456
+ }
2457
+ return {
2458
+ configPath,
2459
+ name,
2460
+ dryRun: args.includes("--dry-run"),
2461
+ testOnly: args.includes("--test-only"),
2462
+ skipCloneTest: args.includes("--skip-clone-test")
2463
+ };
2464
+ }
2465
+ // Annotate the CommonJS export names for ESM import in node:
2466
+ 0 && (module.exports = {
2467
+ INTERNAL_TABLES,
2468
+ MIGRATION_TRACKING_TABLE,
2469
+ areTypesCompatible,
2470
+ buildCloneDbFromAdapter,
2471
+ buildContextFromAdapter,
2472
+ buildIntrospector,
2473
+ buildPushDbFromAdapter,
2474
+ buildTrackerFromAdapter,
2475
+ cloneTestApply,
2476
+ collectionToTableSnapshot,
2477
+ collectionsToSchema,
2478
+ computeSchemaChecksum,
2479
+ createDataHelpers,
2480
+ createSchemaSnapshot,
2481
+ deserializeSnapshot,
2482
+ detectDangers,
2483
+ diffSchemas,
2484
+ ensureTrackingTable,
2485
+ fieldToColumnType,
2486
+ fieldToPostgresType,
2487
+ fieldToSqliteType,
2488
+ generateMigrationFileContent,
2489
+ generateMigrationName,
2490
+ getAppliedMigrations,
2491
+ getLatestBatchNumber,
2492
+ getMigrationStatus,
2493
+ getMigrationsByBatch,
2494
+ getNextBatchNumber,
2495
+ getSnapshotPath,
2496
+ introspectPostgres,
2497
+ introspectSqlite,
2498
+ isMigrationApplied,
2499
+ loadMigrationsFromDisk,
2500
+ normalizeColumnType,
2501
+ operationToReverseSql,
2502
+ operationToSql,
2503
+ operationsToDownSql,
2504
+ operationsToUpSql,
2505
+ parseMigrationArgs,
2506
+ readSnapshot,
2507
+ recordMigration,
2508
+ removeMigrationRecord,
2509
+ resolveDialect,
2510
+ rollbackBatch,
2511
+ runMigrations,
2512
+ runPush,
2513
+ serializeSnapshot,
2514
+ writeSnapshot
2515
+ });