@momentumcms/migrations 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +19 -12
- package/schematics/collection.json +25 -0
- package/schematics/generate/index.cjs +50 -0
- package/schematics/generate/index.js +25 -0
- package/schematics/generate/schema.d.ts +5 -0
- package/schematics/generate/schema.json +24 -0
- package/schematics/rollback/index.cjs +44 -0
- package/schematics/rollback/index.js +19 -0
- package/schematics/rollback/schema.d.ts +3 -0
- package/schematics/rollback/schema.json +15 -0
- package/schematics/run/index.cjs +50 -0
- package/schematics/run/index.js +25 -0
- package/schematics/run/schema.d.ts +5 -0
- package/schematics/run/schema.json +25 -0
- package/schematics/status/index.cjs +44 -0
- package/schematics/status/index.js +19 -0
- package/schematics/status/schema.d.ts +3 -0
- package/schematics/status/schema.json +15 -0
- package/src/cli/generate.cjs +1688 -0
- package/src/cli/generate.js +1686 -0
- package/src/cli/rollback.cjs +640 -0
- package/src/cli/rollback.js +638 -0
- package/src/cli/run.cjs +1091 -0
- package/src/cli/run.js +1097 -0
- package/src/cli/status.cjs +356 -0
- package/src/cli/status.js +354 -0
- package/CHANGELOG.md +0 -14
- package/LICENSE +0 -21
- /package/{index.cjs → src/index.cjs} +0 -0
- /package/{index.js → src/index.js} +0 -0
package/src/cli/run.js
ADDED
|
@@ -0,0 +1,1097 @@
|
|
|
1
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
+
}) : x)(function(x) {
|
|
4
|
+
if (typeof require !== "undefined")
|
|
5
|
+
return require.apply(this, arguments);
|
|
6
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
// libs/migrations/src/cli/run.ts
|
|
10
|
+
import { resolve } from "node:path";
|
|
11
|
+
|
|
12
|
+
// libs/core/src/lib/collections/define-collection.ts
|
|
13
|
+
function defineCollection(config) {
|
|
14
|
+
const collection = {
|
|
15
|
+
timestamps: true,
|
|
16
|
+
// Enable timestamps by default
|
|
17
|
+
...config
|
|
18
|
+
};
|
|
19
|
+
if (!collection.slug) {
|
|
20
|
+
throw new Error("Collection must have a slug");
|
|
21
|
+
}
|
|
22
|
+
if (!collection.fields || collection.fields.length === 0) {
|
|
23
|
+
throw new Error(`Collection "${collection.slug}" must have at least one field`);
|
|
24
|
+
}
|
|
25
|
+
if (!/^[a-z][a-z0-9-]*$/.test(collection.slug)) {
|
|
26
|
+
throw new Error(
|
|
27
|
+
`Collection slug "${collection.slug}" must be kebab-case (lowercase letters, numbers, and hyphens, starting with a letter)`
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
return collection;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// libs/core/src/lib/fields/field-builders.ts
|
|
34
|
+
function text(name, options = {}) {
|
|
35
|
+
return {
|
|
36
|
+
name,
|
|
37
|
+
type: "text",
|
|
38
|
+
...options
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
function number(name, options = {}) {
|
|
42
|
+
return {
|
|
43
|
+
name,
|
|
44
|
+
type: "number",
|
|
45
|
+
...options
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
function json(name, options = {}) {
|
|
49
|
+
return {
|
|
50
|
+
name,
|
|
51
|
+
type: "json",
|
|
52
|
+
...options
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// libs/core/src/lib/collections/media.collection.ts
|
|
57
|
+
var MediaCollection = defineCollection({
|
|
58
|
+
slug: "media",
|
|
59
|
+
labels: {
|
|
60
|
+
singular: "Media",
|
|
61
|
+
plural: "Media"
|
|
62
|
+
},
|
|
63
|
+
upload: {
|
|
64
|
+
mimeTypes: ["image/*", "application/pdf", "video/*", "audio/*"]
|
|
65
|
+
},
|
|
66
|
+
admin: {
|
|
67
|
+
useAsTitle: "filename",
|
|
68
|
+
defaultColumns: ["filename", "mimeType", "filesize", "createdAt"]
|
|
69
|
+
},
|
|
70
|
+
fields: [
|
|
71
|
+
text("filename", {
|
|
72
|
+
required: true,
|
|
73
|
+
label: "Filename",
|
|
74
|
+
description: "Original filename of the uploaded file"
|
|
75
|
+
}),
|
|
76
|
+
text("mimeType", {
|
|
77
|
+
required: true,
|
|
78
|
+
label: "MIME Type",
|
|
79
|
+
description: "File MIME type (e.g., image/jpeg, application/pdf)"
|
|
80
|
+
}),
|
|
81
|
+
number("filesize", {
|
|
82
|
+
label: "File Size",
|
|
83
|
+
description: "File size in bytes"
|
|
84
|
+
}),
|
|
85
|
+
text("path", {
|
|
86
|
+
label: "Storage Path",
|
|
87
|
+
description: "Path/key where the file is stored",
|
|
88
|
+
admin: {
|
|
89
|
+
hidden: true
|
|
90
|
+
}
|
|
91
|
+
}),
|
|
92
|
+
text("url", {
|
|
93
|
+
label: "URL",
|
|
94
|
+
description: "Public URL to access the file"
|
|
95
|
+
}),
|
|
96
|
+
text("alt", {
|
|
97
|
+
label: "Alt Text",
|
|
98
|
+
description: "Alternative text for accessibility"
|
|
99
|
+
}),
|
|
100
|
+
number("width", {
|
|
101
|
+
label: "Width",
|
|
102
|
+
description: "Image width in pixels (for images only)"
|
|
103
|
+
}),
|
|
104
|
+
number("height", {
|
|
105
|
+
label: "Height",
|
|
106
|
+
description: "Image height in pixels (for images only)"
|
|
107
|
+
}),
|
|
108
|
+
json("focalPoint", {
|
|
109
|
+
label: "Focal Point",
|
|
110
|
+
description: "Focal point coordinates for image cropping",
|
|
111
|
+
admin: {
|
|
112
|
+
hidden: true
|
|
113
|
+
}
|
|
114
|
+
})
|
|
115
|
+
],
|
|
116
|
+
access: {
|
|
117
|
+
// Media is readable by anyone by default
|
|
118
|
+
read: () => true,
|
|
119
|
+
// Only authenticated users can create/update/delete
|
|
120
|
+
create: ({ req }) => !!req?.user,
|
|
121
|
+
update: ({ req }) => !!req?.user,
|
|
122
|
+
delete: ({ req }) => !!req?.user
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
// libs/core/src/lib/migrations.ts
|
|
127
|
+
function resolveMigrationMode(mode) {
|
|
128
|
+
if (mode === "push" || mode === "migrate")
|
|
129
|
+
return mode;
|
|
130
|
+
const env = process.env["NODE_ENV"];
|
|
131
|
+
if (env === "production")
|
|
132
|
+
return "migrate";
|
|
133
|
+
return "push";
|
|
134
|
+
}
|
|
135
|
+
function resolveMigrationConfig(config) {
|
|
136
|
+
if (!config)
|
|
137
|
+
return void 0;
|
|
138
|
+
const mode = resolveMigrationMode(config.mode);
|
|
139
|
+
return {
|
|
140
|
+
...config,
|
|
141
|
+
directory: config.directory ?? "./migrations",
|
|
142
|
+
mode,
|
|
143
|
+
cloneTest: config.cloneTest ?? mode === "migrate",
|
|
144
|
+
dangerDetection: config.dangerDetection ?? true,
|
|
145
|
+
autoApply: config.autoApply ?? mode === "push"
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// libs/migrations/src/lib/loader/migration-loader.ts
|
|
150
|
+
import { readdirSync, existsSync } from "node:fs";
|
|
151
|
+
import { join } from "node:path";
|
|
152
|
+
import { pathToFileURL } from "node:url";
|
|
153
|
+
var MIGRATION_FILE_PATTERN = /^\d{14}_.+\.ts$/;
|
|
154
|
+
function isMigrationFile(value) {
|
|
155
|
+
if (typeof value !== "object" || value === null)
|
|
156
|
+
return false;
|
|
157
|
+
const obj = value;
|
|
158
|
+
return "meta" in obj && typeof obj["meta"] === "object" && obj["meta"] !== null && "up" in obj && typeof obj["up"] === "function" && "down" in obj && typeof obj["down"] === "function";
|
|
159
|
+
}
|
|
160
|
+
function validateMigrationModule(mod, filePath) {
|
|
161
|
+
const file = mod["default"] ?? mod;
|
|
162
|
+
if (!isMigrationFile(file)) {
|
|
163
|
+
if (typeof file !== "object" || file === null) {
|
|
164
|
+
throw new Error(`Migration file ${filePath} does not export a valid module`);
|
|
165
|
+
}
|
|
166
|
+
if (!("meta" in file) || typeof file["meta"] !== "object") {
|
|
167
|
+
throw new Error(`Migration file ${filePath} is missing a valid 'meta' export`);
|
|
168
|
+
}
|
|
169
|
+
if (!("up" in file) || typeof file["up"] !== "function") {
|
|
170
|
+
throw new Error(`Migration file ${filePath} is missing an 'up' function export`);
|
|
171
|
+
}
|
|
172
|
+
if (!("down" in file) || typeof file["down"] !== "function") {
|
|
173
|
+
throw new Error(`Migration file ${filePath} is missing a 'down' function export`);
|
|
174
|
+
}
|
|
175
|
+
throw new Error(`Migration file ${filePath} does not conform to MigrationFile interface`);
|
|
176
|
+
}
|
|
177
|
+
return file;
|
|
178
|
+
}
|
|
179
|
+
async function loadMigrationsFromDisk(directory) {
|
|
180
|
+
if (!existsSync(directory))
|
|
181
|
+
return [];
|
|
182
|
+
const files = readdirSync(directory).filter((f) => MIGRATION_FILE_PATTERN.test(f)).sort();
|
|
183
|
+
if (files.length === 0)
|
|
184
|
+
return [];
|
|
185
|
+
const migrations = [];
|
|
186
|
+
for (const filename of files) {
|
|
187
|
+
const filePath = join(directory, filename);
|
|
188
|
+
const fileUrl = pathToFileURL(filePath).href;
|
|
189
|
+
const mod = await import(fileUrl);
|
|
190
|
+
const file = validateMigrationModule(mod, filePath);
|
|
191
|
+
const name = filename.replace(/\.ts$/, "");
|
|
192
|
+
migrations.push({ name, file });
|
|
193
|
+
}
|
|
194
|
+
return migrations;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// libs/migrations/src/lib/migration.types.ts
|
|
198
|
+
var MIGRATION_TRACKING_TABLE = "_momentum_migrations";
|
|
199
|
+
|
|
200
|
+
// libs/migrations/src/lib/tracking/migration-tracker.ts
|
|
201
|
+
async function ensureTrackingTable(db, dialect) {
|
|
202
|
+
if (dialect === "postgresql") {
|
|
203
|
+
await db.execute(`
|
|
204
|
+
CREATE TABLE IF NOT EXISTS "${MIGRATION_TRACKING_TABLE}" (
|
|
205
|
+
"id" VARCHAR(36) PRIMARY KEY,
|
|
206
|
+
"name" VARCHAR(255) NOT NULL UNIQUE,
|
|
207
|
+
"batch" INTEGER NOT NULL,
|
|
208
|
+
"checksum" VARCHAR(64) NOT NULL,
|
|
209
|
+
"appliedAt" TIMESTAMPTZ NOT NULL,
|
|
210
|
+
"executionMs" INTEGER NOT NULL
|
|
211
|
+
)
|
|
212
|
+
`);
|
|
213
|
+
} else {
|
|
214
|
+
await db.execute(`
|
|
215
|
+
CREATE TABLE IF NOT EXISTS "${MIGRATION_TRACKING_TABLE}" (
|
|
216
|
+
"id" TEXT PRIMARY KEY,
|
|
217
|
+
"name" TEXT NOT NULL UNIQUE,
|
|
218
|
+
"batch" INTEGER NOT NULL,
|
|
219
|
+
"checksum" TEXT NOT NULL,
|
|
220
|
+
"appliedAt" TEXT NOT NULL,
|
|
221
|
+
"executionMs" INTEGER NOT NULL
|
|
222
|
+
)
|
|
223
|
+
`);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
async function getAppliedMigrations(db) {
|
|
227
|
+
const rows = await db.query(
|
|
228
|
+
`SELECT * FROM "${MIGRATION_TRACKING_TABLE}" ORDER BY "batch" ASC, "name" ASC`
|
|
229
|
+
);
|
|
230
|
+
return rows.map(toTrackingRecord);
|
|
231
|
+
}
|
|
232
|
+
async function getNextBatchNumber(db) {
|
|
233
|
+
const rows = await db.query(
|
|
234
|
+
`SELECT MAX("batch") as max_batch FROM "${MIGRATION_TRACKING_TABLE}"`
|
|
235
|
+
);
|
|
236
|
+
const maxBatch = rows[0]?.max_batch;
|
|
237
|
+
return (typeof maxBatch === "number" ? maxBatch : 0) + 1;
|
|
238
|
+
}
|
|
239
|
+
async function recordMigration(db, record, dialect) {
|
|
240
|
+
const id = generateUUID();
|
|
241
|
+
const full = { id, ...record };
|
|
242
|
+
if (dialect === "postgresql") {
|
|
243
|
+
await db.execute(
|
|
244
|
+
`INSERT INTO "${MIGRATION_TRACKING_TABLE}" ("id", "name", "batch", "checksum", "appliedAt", "executionMs")
|
|
245
|
+
VALUES ($1, $2, $3, $4, $5, $6)`,
|
|
246
|
+
[full.id, full.name, full.batch, full.checksum, full.appliedAt, full.executionMs]
|
|
247
|
+
);
|
|
248
|
+
} else {
|
|
249
|
+
await db.execute(
|
|
250
|
+
`INSERT INTO "${MIGRATION_TRACKING_TABLE}" ("id", "name", "batch", "checksum", "appliedAt", "executionMs")
|
|
251
|
+
VALUES (?, ?, ?, ?, ?, ?)`,
|
|
252
|
+
[full.id, full.name, full.batch, full.checksum, full.appliedAt, full.executionMs]
|
|
253
|
+
);
|
|
254
|
+
}
|
|
255
|
+
return full;
|
|
256
|
+
}
|
|
257
|
+
function toTrackingRecord(row2) {
|
|
258
|
+
return {
|
|
259
|
+
id: String(row2["id"]),
|
|
260
|
+
name: String(row2["name"]),
|
|
261
|
+
batch: Number(row2["batch"]),
|
|
262
|
+
checksum: String(row2["checksum"]),
|
|
263
|
+
appliedAt: String(row2["appliedAt"]),
|
|
264
|
+
executionMs: Number(row2["executionMs"])
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
function generateUUID() {
|
|
268
|
+
const crypto = __require("node:crypto");
|
|
269
|
+
return crypto.randomUUID();
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// libs/migrations/src/lib/danger/danger-detector.ts
|
|
273
|
+
function detectDangers(operations, dialect) {
|
|
274
|
+
const warnings = [];
|
|
275
|
+
for (let i = 0; i < operations.length; i++) {
|
|
276
|
+
const op = operations[i];
|
|
277
|
+
warnings.push(...checkOperation(op, i, operations, dialect));
|
|
278
|
+
}
|
|
279
|
+
const severityOrder = { error: 0, warning: 1, info: 2 };
|
|
280
|
+
warnings.sort((a, b) => severityOrder[a.severity] - severityOrder[b.severity]);
|
|
281
|
+
return {
|
|
282
|
+
warnings,
|
|
283
|
+
hasErrors: warnings.some((w) => w.severity === "error"),
|
|
284
|
+
hasWarnings: warnings.some((w) => w.severity === "warning")
|
|
285
|
+
};
|
|
286
|
+
}
|
|
287
|
+
function checkOperation(op, index, _allOps, dialect) {
|
|
288
|
+
const warnings = [];
|
|
289
|
+
switch (op.type) {
|
|
290
|
+
case "dropTable":
|
|
291
|
+
warnings.push({
|
|
292
|
+
severity: "error",
|
|
293
|
+
operation: op,
|
|
294
|
+
operationIndex: index,
|
|
295
|
+
message: `Dropping table "${op.table}" will permanently delete all data.`,
|
|
296
|
+
suggestion: 'Consider renaming the table with a deprecation prefix (e.g., "_deprecated_") and scheduling deletion after verifying no data is needed.'
|
|
297
|
+
});
|
|
298
|
+
break;
|
|
299
|
+
case "dropColumn":
|
|
300
|
+
warnings.push({
|
|
301
|
+
severity: "warning",
|
|
302
|
+
operation: op,
|
|
303
|
+
operationIndex: index,
|
|
304
|
+
message: `Dropping column "${op.table}"."${op.column}" will permanently delete all values in this column.`,
|
|
305
|
+
suggestion: "Before dropping, verify the column data is either migrated elsewhere or truly unneeded. Consider a backup or data export first."
|
|
306
|
+
});
|
|
307
|
+
break;
|
|
308
|
+
case "alterColumnType":
|
|
309
|
+
warnings.push(...checkTypeChange(op, index, dialect));
|
|
310
|
+
break;
|
|
311
|
+
case "alterColumnNullable":
|
|
312
|
+
if (!op.nullable) {
|
|
313
|
+
warnings.push({
|
|
314
|
+
severity: "warning",
|
|
315
|
+
operation: op,
|
|
316
|
+
operationIndex: index,
|
|
317
|
+
message: `Setting "${op.table}"."${op.column}" to NOT NULL may fail if existing rows contain NULL values.`,
|
|
318
|
+
suggestion: "First backfill NULL values with a default (e.g., UPDATE table SET column = 'default' WHERE column IS NULL), then add the NOT NULL constraint."
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
break;
|
|
322
|
+
case "addColumn":
|
|
323
|
+
if (!op.nullable && !op.defaultValue) {
|
|
324
|
+
warnings.push({
|
|
325
|
+
severity: "error",
|
|
326
|
+
operation: op,
|
|
327
|
+
operationIndex: index,
|
|
328
|
+
message: `Adding NOT NULL column "${op.table}"."${op.column}" without a default value will fail if the table has existing rows.`,
|
|
329
|
+
suggestion: "Either add a DEFAULT value, make the column nullable first and backfill, or add the column as nullable, backfill, then alter to NOT NULL."
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
break;
|
|
333
|
+
case "renameColumn":
|
|
334
|
+
warnings.push({
|
|
335
|
+
severity: "warning",
|
|
336
|
+
operation: op,
|
|
337
|
+
operationIndex: index,
|
|
338
|
+
message: `Renaming "${op.table}"."${op.from}" to "${op.to}" may break application code that references the old name.`,
|
|
339
|
+
suggestion: "Deploy application code changes to use the new column name before or alongside the migration. Consider a phased approach: add new column, migrate data, update code, then drop old column."
|
|
340
|
+
});
|
|
341
|
+
break;
|
|
342
|
+
case "renameTable":
|
|
343
|
+
warnings.push({
|
|
344
|
+
severity: "warning",
|
|
345
|
+
operation: op,
|
|
346
|
+
operationIndex: index,
|
|
347
|
+
message: `Renaming table "${op.from}" to "${op.to}" may break application code and queries.`,
|
|
348
|
+
suggestion: "Update application code to use the new table name before or alongside the migration."
|
|
349
|
+
});
|
|
350
|
+
break;
|
|
351
|
+
case "addForeignKey":
|
|
352
|
+
if (dialect === "postgresql") {
|
|
353
|
+
warnings.push({
|
|
354
|
+
severity: "info",
|
|
355
|
+
operation: op,
|
|
356
|
+
operationIndex: index,
|
|
357
|
+
message: `Adding foreign key "${op.constraintName}" acquires an ACCESS EXCLUSIVE lock on the referenced table.`,
|
|
358
|
+
suggestion: "On large tables, consider adding the FK constraint with NOT VALID first, then validating separately: ALTER TABLE ... ADD CONSTRAINT ... NOT VALID; ALTER TABLE ... VALIDATE CONSTRAINT ..."
|
|
359
|
+
});
|
|
360
|
+
}
|
|
361
|
+
break;
|
|
362
|
+
case "createIndex":
|
|
363
|
+
if (dialect === "postgresql" && !isCreateIndexConcurrent(op)) {
|
|
364
|
+
warnings.push({
|
|
365
|
+
severity: "info",
|
|
366
|
+
operation: op,
|
|
367
|
+
operationIndex: index,
|
|
368
|
+
message: `Creating index "${op.indexName}" will lock "${op.table}" for writes during index creation.`,
|
|
369
|
+
suggestion: "For large tables, consider CREATE INDEX CONCURRENTLY to avoid blocking writes (requires running outside a transaction)."
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
break;
|
|
373
|
+
}
|
|
374
|
+
return warnings;
|
|
375
|
+
}
|
|
376
|
+
function checkTypeChange(op, index, dialect) {
|
|
377
|
+
const warnings = [];
|
|
378
|
+
if (dialect === "sqlite") {
|
|
379
|
+
warnings.push({
|
|
380
|
+
severity: "error",
|
|
381
|
+
operation: op,
|
|
382
|
+
operationIndex: index,
|
|
383
|
+
message: `SQLite does not support ALTER COLUMN TYPE. Changing "${op.table}"."${op.column}" from ${op.fromType} to ${op.toType} requires a table rebuild.`,
|
|
384
|
+
suggestion: "Create a new table with the desired schema, copy data, drop old table, and rename new table. Use a raw SQL migration for this."
|
|
385
|
+
});
|
|
386
|
+
return warnings;
|
|
387
|
+
}
|
|
388
|
+
if (isLossyTypeChange(op.fromType, op.toType)) {
|
|
389
|
+
warnings.push({
|
|
390
|
+
severity: "warning",
|
|
391
|
+
operation: op,
|
|
392
|
+
operationIndex: index,
|
|
393
|
+
message: `Changing "${op.table}"."${op.column}" from ${op.fromType} to ${op.toType} may cause data loss or cast errors.`,
|
|
394
|
+
suggestion: "Test the type conversion on a clone database first. Consider adding a USING clause with explicit cast logic."
|
|
395
|
+
});
|
|
396
|
+
}
|
|
397
|
+
if (isTableRewriteType(op.fromType, op.toType)) {
|
|
398
|
+
warnings.push({
|
|
399
|
+
severity: "info",
|
|
400
|
+
operation: op,
|
|
401
|
+
operationIndex: index,
|
|
402
|
+
message: `Changing "${op.table}"."${op.column}" from ${op.fromType} to ${op.toType} may require a table rewrite on large tables.`,
|
|
403
|
+
suggestion: "On large tables, this can take significant time and lock the table. Consider running during low-traffic periods or using a phased approach."
|
|
404
|
+
});
|
|
405
|
+
}
|
|
406
|
+
return warnings;
|
|
407
|
+
}
|
|
408
|
+
function isLossyTypeChange(from, to) {
|
|
409
|
+
const fromUpper = from.toUpperCase();
|
|
410
|
+
const toUpper = to.toUpperCase();
|
|
411
|
+
if (isTextType(fromUpper) && isNumericType(toUpper))
|
|
412
|
+
return true;
|
|
413
|
+
if (fromUpper === "NUMERIC" && (toUpper === "INTEGER" || toUpper === "SMALLINT"))
|
|
414
|
+
return true;
|
|
415
|
+
if (fromUpper === "BIGINT" && (toUpper === "INTEGER" || toUpper === "SMALLINT"))
|
|
416
|
+
return true;
|
|
417
|
+
if (fromUpper === "DOUBLE PRECISION" && toUpper === "REAL")
|
|
418
|
+
return true;
|
|
419
|
+
if ((fromUpper === "JSONB" || fromUpper === "JSON") && !fromUpper.includes("JSON"))
|
|
420
|
+
return true;
|
|
421
|
+
if (fromUpper.includes("TIMESTAMP") && toUpper === "DATE")
|
|
422
|
+
return true;
|
|
423
|
+
const fromLength = extractLength(fromUpper);
|
|
424
|
+
const toLength = extractLength(toUpper);
|
|
425
|
+
if (fromLength && toLength && toLength < fromLength)
|
|
426
|
+
return true;
|
|
427
|
+
return false;
|
|
428
|
+
}
|
|
429
|
+
function isTableRewriteType(from, to) {
|
|
430
|
+
const fromUpper = from.toUpperCase();
|
|
431
|
+
const toUpper = to.toUpperCase();
|
|
432
|
+
if (fromUpper.startsWith("VARCHAR") && toUpper === "TEXT")
|
|
433
|
+
return false;
|
|
434
|
+
if (fromUpper === "TEXT" && toUpper.startsWith("VARCHAR"))
|
|
435
|
+
return true;
|
|
436
|
+
if (isNumericType(fromUpper) !== isNumericType(toUpper))
|
|
437
|
+
return true;
|
|
438
|
+
return false;
|
|
439
|
+
}
|
|
440
|
+
function isTextType(type) {
|
|
441
|
+
return type === "TEXT" || type.startsWith("VARCHAR") || type.startsWith("CHAR");
|
|
442
|
+
}
|
|
443
|
+
function isNumericType(type) {
|
|
444
|
+
return ["INTEGER", "BIGINT", "SMALLINT", "NUMERIC", "REAL", "DOUBLE PRECISION", "FLOAT"].includes(
|
|
445
|
+
type
|
|
446
|
+
);
|
|
447
|
+
}
|
|
448
|
+
function extractLength(type) {
|
|
449
|
+
const match = type.match(/\((\d+)\)/);
|
|
450
|
+
return match ? parseInt(match[1], 10) : null;
|
|
451
|
+
}
|
|
452
|
+
function isCreateIndexConcurrent(_op) {
|
|
453
|
+
return false;
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
// libs/migrations/src/lib/runner/migrate-runner.ts
|
|
457
|
+
import { createHash } from "node:crypto";
|
|
458
|
+
async function runMigrations(options) {
|
|
459
|
+
const { migrations, dialect, tracker, buildContext, skipDangerDetection, log } = options;
|
|
460
|
+
const noop = {
|
|
461
|
+
info: () => {
|
|
462
|
+
},
|
|
463
|
+
warn: () => {
|
|
464
|
+
}
|
|
465
|
+
};
|
|
466
|
+
const logger = log ?? noop;
|
|
467
|
+
await ensureTrackingTable(tracker, dialect);
|
|
468
|
+
const applied = await getAppliedMigrations(tracker);
|
|
469
|
+
const appliedNames = new Set(applied.map((m) => m.name));
|
|
470
|
+
const pending = migrations.filter((m) => !appliedNames.has(m.name));
|
|
471
|
+
if (pending.length === 0) {
|
|
472
|
+
logger.info("No pending migrations.");
|
|
473
|
+
return {
|
|
474
|
+
batch: 0,
|
|
475
|
+
results: [],
|
|
476
|
+
successCount: 0,
|
|
477
|
+
failCount: 0,
|
|
478
|
+
dangers: null
|
|
479
|
+
};
|
|
480
|
+
}
|
|
481
|
+
let dangers = null;
|
|
482
|
+
if (!skipDangerDetection) {
|
|
483
|
+
const allOps = pending.flatMap((m) => {
|
|
484
|
+
const ops = m.file.meta.operations;
|
|
485
|
+
if (!ops)
|
|
486
|
+
return [];
|
|
487
|
+
return ops.filter((op) => typeof op.type === "string").map(toMigrationOperation);
|
|
488
|
+
});
|
|
489
|
+
if (allOps.length > 0) {
|
|
490
|
+
dangers = detectDangers(allOps, dialect);
|
|
491
|
+
if (dangers.hasErrors) {
|
|
492
|
+
logger.warn("Dangerous operations detected. Migration blocked.");
|
|
493
|
+
return {
|
|
494
|
+
batch: 0,
|
|
495
|
+
results: [],
|
|
496
|
+
successCount: 0,
|
|
497
|
+
failCount: 0,
|
|
498
|
+
dangers
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
const batch = await getNextBatchNumber(tracker);
|
|
504
|
+
logger.info(`Running ${pending.length} migration(s) in batch ${batch}...`);
|
|
505
|
+
const results = [];
|
|
506
|
+
const ctx = buildContext();
|
|
507
|
+
for (const migration of pending) {
|
|
508
|
+
const start = Date.now();
|
|
509
|
+
try {
|
|
510
|
+
await migration.file.up(ctx);
|
|
511
|
+
const executionMs = Date.now() - start;
|
|
512
|
+
const checksum = computeMigrationChecksum(migration);
|
|
513
|
+
await recordMigration(
|
|
514
|
+
tracker,
|
|
515
|
+
{
|
|
516
|
+
name: migration.name,
|
|
517
|
+
batch,
|
|
518
|
+
checksum,
|
|
519
|
+
appliedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
520
|
+
executionMs
|
|
521
|
+
},
|
|
522
|
+
dialect
|
|
523
|
+
);
|
|
524
|
+
results.push({ name: migration.name, success: true, executionMs });
|
|
525
|
+
logger.info(` OK: ${migration.name} (${executionMs}ms)`);
|
|
526
|
+
} catch (err) {
|
|
527
|
+
const executionMs = Date.now() - start;
|
|
528
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
529
|
+
const errorCode = extractErrorCode(err);
|
|
530
|
+
results.push({ name: migration.name, success: false, executionMs, error: errMsg, errorCode });
|
|
531
|
+
logger.warn(` FAILED: ${migration.name} \u2014 ${errMsg}`);
|
|
532
|
+
break;
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
const successCount = results.filter((r) => r.success).length;
|
|
536
|
+
const failCount = results.filter((r) => !r.success).length;
|
|
537
|
+
logger.info(`Batch ${batch}: ${successCount} applied, ${failCount} failed.`);
|
|
538
|
+
return { batch, results, successCount, failCount, dangers };
|
|
539
|
+
}
|
|
540
|
+
function extractErrorCode(err) {
|
|
541
|
+
if (err !== null && typeof err === "object" && "code" in err && typeof err.code === "string") {
|
|
542
|
+
return err.code;
|
|
543
|
+
}
|
|
544
|
+
return void 0;
|
|
545
|
+
}
|
|
546
|
+
function toMigrationOperation(op) {
|
|
547
|
+
return op;
|
|
548
|
+
}
|
|
549
|
+
function computeMigrationChecksum(migration) {
|
|
550
|
+
const content = JSON.stringify(migration.file.meta) + migration.file.up.toString() + migration.file.down.toString();
|
|
551
|
+
return createHash("sha256").update(content).digest("hex");
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
// libs/migrations/src/lib/runner/clone-test-apply.ts
|
|
555
|
+
async function cloneTestApply(options) {
|
|
556
|
+
const {
|
|
557
|
+
migrations,
|
|
558
|
+
dialect,
|
|
559
|
+
tracker,
|
|
560
|
+
buildContext,
|
|
561
|
+
db,
|
|
562
|
+
buildCloneTracker,
|
|
563
|
+
buildCloneContext,
|
|
564
|
+
testOnly,
|
|
565
|
+
skipDangerDetection,
|
|
566
|
+
log
|
|
567
|
+
} = options;
|
|
568
|
+
const noop = { info: () => {
|
|
569
|
+
}, warn: () => {
|
|
570
|
+
} };
|
|
571
|
+
const logger = log ?? noop;
|
|
572
|
+
const timestamp = Date.now();
|
|
573
|
+
const cloneName = `_mig_clone_${timestamp}`;
|
|
574
|
+
let cloneCleanedUp = false;
|
|
575
|
+
const result = {
|
|
576
|
+
phase: "clone",
|
|
577
|
+
cloneResult: null,
|
|
578
|
+
applyResult: null,
|
|
579
|
+
dangers: null,
|
|
580
|
+
cloneCleanedUp: false,
|
|
581
|
+
cloneName,
|
|
582
|
+
suggestions: []
|
|
583
|
+
};
|
|
584
|
+
try {
|
|
585
|
+
logger.info(`Cloning database to "${cloneName}"...`);
|
|
586
|
+
await db.cloneDatabase(cloneName);
|
|
587
|
+
logger.info("Clone created successfully.");
|
|
588
|
+
result.phase = "test";
|
|
589
|
+
logger.info("Running migrations on clone...");
|
|
590
|
+
const cloneTracker = buildCloneTracker(cloneName);
|
|
591
|
+
const cloneContext = buildCloneContext(cloneName);
|
|
592
|
+
const cloneResult = await runMigrations({
|
|
593
|
+
migrations,
|
|
594
|
+
dialect,
|
|
595
|
+
tracker: cloneTracker,
|
|
596
|
+
buildContext: () => cloneContext,
|
|
597
|
+
skipDangerDetection,
|
|
598
|
+
log: {
|
|
599
|
+
info: (msg) => logger.info(`[clone] ${msg}`),
|
|
600
|
+
warn: (msg) => logger.warn(`[clone] ${msg}`)
|
|
601
|
+
}
|
|
602
|
+
});
|
|
603
|
+
result.cloneResult = cloneResult;
|
|
604
|
+
result.dangers = cloneResult.dangers;
|
|
605
|
+
if (cloneResult.failCount > 0) {
|
|
606
|
+
const suggestions = generateFixSuggestions(cloneResult);
|
|
607
|
+
result.suggestions = suggestions;
|
|
608
|
+
result.error = `Migration failed on clone: ${cloneResult.results.find((r) => !r.success)?.error}`;
|
|
609
|
+
logger.warn("Migration failed on clone. Suggestions:");
|
|
610
|
+
for (const suggestion of suggestions) {
|
|
611
|
+
logger.warn(` - ${suggestion}`);
|
|
612
|
+
}
|
|
613
|
+
await cleanupClone(db, cloneName, logger);
|
|
614
|
+
result.cloneCleanedUp = true;
|
|
615
|
+
cloneCleanedUp = true;
|
|
616
|
+
return result;
|
|
617
|
+
}
|
|
618
|
+
logger.info(`Clone test passed: ${cloneResult.successCount} migration(s) applied.`);
|
|
619
|
+
await cleanupClone(db, cloneName, logger);
|
|
620
|
+
result.cloneCleanedUp = true;
|
|
621
|
+
cloneCleanedUp = true;
|
|
622
|
+
if (testOnly) {
|
|
623
|
+
result.phase = "skipped";
|
|
624
|
+
logger.info("Test-only mode: skipping real database apply.");
|
|
625
|
+
return result;
|
|
626
|
+
}
|
|
627
|
+
result.phase = "apply";
|
|
628
|
+
logger.info("Applying migrations to real database...");
|
|
629
|
+
const applyResult = await runMigrations({
|
|
630
|
+
migrations,
|
|
631
|
+
dialect,
|
|
632
|
+
tracker,
|
|
633
|
+
buildContext,
|
|
634
|
+
skipDangerDetection: true,
|
|
635
|
+
// Already validated on clone
|
|
636
|
+
log: logger
|
|
637
|
+
});
|
|
638
|
+
result.applyResult = applyResult;
|
|
639
|
+
if (applyResult.failCount > 0) {
|
|
640
|
+
result.error = `Migration failed on real database: ${applyResult.results.find((r) => !r.success)?.error}`;
|
|
641
|
+
return result;
|
|
642
|
+
}
|
|
643
|
+
result.phase = "complete";
|
|
644
|
+
logger.info(
|
|
645
|
+
`Pipeline complete: ${applyResult.successCount} migration(s) applied to real database.`
|
|
646
|
+
);
|
|
647
|
+
return result;
|
|
648
|
+
} catch (err) {
|
|
649
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
650
|
+
result.error = errMsg;
|
|
651
|
+
logger.warn(`Pipeline error: ${errMsg}`);
|
|
652
|
+
if (!cloneCleanedUp) {
|
|
653
|
+
await cleanupClone(db, cloneName, logger);
|
|
654
|
+
result.cloneCleanedUp = true;
|
|
655
|
+
}
|
|
656
|
+
return result;
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
async function cleanupClone(db, cloneName, logger) {
|
|
660
|
+
try {
|
|
661
|
+
await db.dropClone(cloneName);
|
|
662
|
+
logger.info(`Clone "${cloneName}" cleaned up.`);
|
|
663
|
+
} catch (err) {
|
|
664
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
665
|
+
logger.warn(`Failed to clean up clone "${cloneName}": ${errMsg}`);
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
var SQLSTATE_SUGGESTIONS = {
|
|
669
|
+
// Class 23 — Integrity Constraint Violation
|
|
670
|
+
"23502": "NOT NULL constraint violation. Backfill NULL values before adding the constraint: Use ctx.data.backfill(table, column, defaultValue) in a prior migration step.",
|
|
671
|
+
"23505": "Unique constraint violation. Use ctx.data.dedup() to remove duplicates before adding the unique constraint.",
|
|
672
|
+
"23503": "Foreign key constraint violation. Ensure referenced data exists before adding the constraint. Consider using ctx.data.backfill() to populate references, or add the FK with NOT VALID first.",
|
|
673
|
+
// Class 42 — Syntax Error or Access Rule Violation
|
|
674
|
+
"42P07": "The table already exists. Check if a previous migration already created it, or use IF NOT EXISTS in your DDL.",
|
|
675
|
+
"42701": "The column already exists. Check if a previous migration already created it, or use IF NOT EXISTS in your DDL.",
|
|
676
|
+
"42P01": "Referenced table does not exist. Check migration ordering \u2014 the table must be created before it can be referenced.",
|
|
677
|
+
"42703": "Referenced column does not exist. Check migration ordering \u2014 the column must be created before it can be referenced.",
|
|
678
|
+
// Class 22 — Data Exception
|
|
679
|
+
"22P02": "Type conversion error. The data contains values that cannot be converted to the target type. Use ctx.data.transform() to clean up values before altering the column type.",
|
|
680
|
+
"42804": "Type conversion error. Add an explicit USING clause for the type change, or use ctx.data.transform() to convert values before altering the column type."
|
|
681
|
+
};
|
|
682
|
+
function generateFixSuggestions(result) {
|
|
683
|
+
const suggestions = [];
|
|
684
|
+
const failedMigration = result.results.find((r) => !r.success);
|
|
685
|
+
if (!failedMigration)
|
|
686
|
+
return suggestions;
|
|
687
|
+
if (failedMigration.errorCode) {
|
|
688
|
+
const codeSuggestion = SQLSTATE_SUGGESTIONS[failedMigration.errorCode];
|
|
689
|
+
if (codeSuggestion) {
|
|
690
|
+
suggestions.push(codeSuggestion);
|
|
691
|
+
return suggestions;
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
const error = failedMigration.error ?? "";
|
|
695
|
+
if (error.includes("NOT NULL") && error.includes("contains null")) {
|
|
696
|
+
suggestions.push(
|
|
697
|
+
"Backfill NULL values before adding NOT NULL constraint: Use ctx.data.backfill(table, column, defaultValue) in a prior migration step."
|
|
698
|
+
);
|
|
699
|
+
}
|
|
700
|
+
if (error.includes("already exists")) {
|
|
701
|
+
suggestions.push(
|
|
702
|
+
"The column or table already exists. Check if a previous migration already created it, or use IF NOT EXISTS in your DDL."
|
|
703
|
+
);
|
|
704
|
+
}
|
|
705
|
+
if (error.includes("violates foreign key")) {
|
|
706
|
+
suggestions.push(
|
|
707
|
+
"Foreign key constraint violation. Ensure referenced data exists before adding the constraint. Consider using ctx.data.backfill() to populate references, or add the FK with NOT VALID first."
|
|
708
|
+
);
|
|
709
|
+
}
|
|
710
|
+
if (error.includes("does not exist")) {
|
|
711
|
+
suggestions.push(
|
|
712
|
+
"Referenced table or column does not exist. Check migration ordering \u2014 the table/column must be created before it can be referenced."
|
|
713
|
+
);
|
|
714
|
+
}
|
|
715
|
+
if (error.includes("unique constraint") || error.includes("duplicate key")) {
|
|
716
|
+
suggestions.push(
|
|
717
|
+
"Unique constraint violation. Use ctx.data.dedup() to remove duplicates before adding the unique constraint."
|
|
718
|
+
);
|
|
719
|
+
}
|
|
720
|
+
if (error.includes("type") && (error.includes("cast") || error.includes("convert"))) {
|
|
721
|
+
suggestions.push(
|
|
722
|
+
"Type conversion error. Add an explicit USING clause for the type change, or use ctx.data.transform() to convert values before altering the column type."
|
|
723
|
+
);
|
|
724
|
+
}
|
|
725
|
+
if (suggestions.length === 0) {
|
|
726
|
+
suggestions.push(
|
|
727
|
+
`Migration "${failedMigration.name}" failed with: ${error}. Review the migration SQL and test on a development database.`
|
|
728
|
+
);
|
|
729
|
+
}
|
|
730
|
+
return suggestions;
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
// libs/migrations/src/cli/shared.ts
|
|
734
|
+
import { pathToFileURL as pathToFileURL2 } from "node:url";
|
|
735
|
+
|
|
736
|
+
// libs/migrations/src/lib/helpers/data-helpers.ts
|
|
737
|
+
function createDataHelpers(db, dialect) {
|
|
738
|
+
const ph = (index) => dialect === "postgresql" ? `$${index}` : "?";
|
|
739
|
+
return {
|
|
740
|
+
async backfill(table, column, value, options) {
|
|
741
|
+
const where = options?.where ? ` AND (${options.where})` : "";
|
|
742
|
+
const batchSize = options?.batchSize ?? 1e3;
|
|
743
|
+
let totalAffected = 0;
|
|
744
|
+
if (dialect === "postgresql") {
|
|
745
|
+
let affected;
|
|
746
|
+
do {
|
|
747
|
+
affected = await db.execute(
|
|
748
|
+
`UPDATE "${table}" SET "${column}" = ${ph(1)}
|
|
749
|
+
WHERE ctid IN (
|
|
750
|
+
SELECT ctid FROM "${table}"
|
|
751
|
+
WHERE "${column}" IS NULL${where}
|
|
752
|
+
LIMIT ${batchSize}
|
|
753
|
+
)`,
|
|
754
|
+
[value]
|
|
755
|
+
);
|
|
756
|
+
totalAffected += affected;
|
|
757
|
+
} while (affected >= batchSize);
|
|
758
|
+
} else {
|
|
759
|
+
let affected;
|
|
760
|
+
do {
|
|
761
|
+
affected = await db.execute(
|
|
762
|
+
`UPDATE "${table}" SET "${column}" = ${ph(1)}
|
|
763
|
+
WHERE rowid IN (
|
|
764
|
+
SELECT rowid FROM "${table}"
|
|
765
|
+
WHERE "${column}" IS NULL${where}
|
|
766
|
+
LIMIT ${batchSize}
|
|
767
|
+
)`,
|
|
768
|
+
[value]
|
|
769
|
+
);
|
|
770
|
+
totalAffected += affected;
|
|
771
|
+
} while (affected >= batchSize);
|
|
772
|
+
}
|
|
773
|
+
return totalAffected;
|
|
774
|
+
},
|
|
775
|
+
async transform(table, column, sqlExpression, options) {
|
|
776
|
+
const where = options?.where ? ` WHERE ${options.where}` : "";
|
|
777
|
+
const batchSize = options?.batchSize ?? 0;
|
|
778
|
+
if (batchSize <= 0) {
|
|
779
|
+
return db.execute(
|
|
780
|
+
`UPDATE "${table}" SET "${column}" = ${sqlExpression}${where}`
|
|
781
|
+
);
|
|
782
|
+
}
|
|
783
|
+
let totalAffected = 0;
|
|
784
|
+
let affected;
|
|
785
|
+
do {
|
|
786
|
+
if (dialect === "postgresql") {
|
|
787
|
+
affected = await db.execute(
|
|
788
|
+
`UPDATE "${table}" SET "${column}" = ${sqlExpression}
|
|
789
|
+
WHERE ctid IN (
|
|
790
|
+
SELECT ctid FROM "${table}"${where}
|
|
791
|
+
LIMIT ${batchSize}
|
|
792
|
+
)`
|
|
793
|
+
);
|
|
794
|
+
} else {
|
|
795
|
+
affected = await db.execute(
|
|
796
|
+
`UPDATE "${table}" SET "${column}" = ${sqlExpression}
|
|
797
|
+
WHERE rowid IN (
|
|
798
|
+
SELECT rowid FROM "${table}"${where}
|
|
799
|
+
LIMIT ${batchSize}
|
|
800
|
+
)`
|
|
801
|
+
);
|
|
802
|
+
}
|
|
803
|
+
totalAffected += affected;
|
|
804
|
+
} while (affected >= batchSize);
|
|
805
|
+
return totalAffected;
|
|
806
|
+
},
|
|
807
|
+
async renameColumn(table, from, to, columnType) {
|
|
808
|
+
await db.execute(
|
|
809
|
+
`ALTER TABLE "${table}" ADD COLUMN "${to}" ${columnType}`
|
|
810
|
+
);
|
|
811
|
+
await db.execute(
|
|
812
|
+
`UPDATE "${table}" SET "${to}" = "${from}"`
|
|
813
|
+
);
|
|
814
|
+
await db.execute(
|
|
815
|
+
`ALTER TABLE "${table}" DROP COLUMN "${from}"`
|
|
816
|
+
);
|
|
817
|
+
},
|
|
818
|
+
async splitColumn(table, _sourceColumn, targets) {
|
|
819
|
+
for (const target of targets) {
|
|
820
|
+
await db.execute(
|
|
821
|
+
`ALTER TABLE "${table}" ADD COLUMN "${target.name}" ${target.type}`
|
|
822
|
+
);
|
|
823
|
+
await db.execute(
|
|
824
|
+
`UPDATE "${table}" SET "${target.name}" = ${target.expression}`
|
|
825
|
+
);
|
|
826
|
+
}
|
|
827
|
+
},
|
|
828
|
+
async mergeColumns(table, _sourceColumns, targetColumn, targetType, mergeExpression) {
|
|
829
|
+
await db.execute(
|
|
830
|
+
`ALTER TABLE "${table}" ADD COLUMN "${targetColumn}" ${targetType}`
|
|
831
|
+
);
|
|
832
|
+
await db.execute(
|
|
833
|
+
`UPDATE "${table}" SET "${targetColumn}" = ${mergeExpression}`
|
|
834
|
+
);
|
|
835
|
+
},
|
|
836
|
+
async copyData(sourceTable, targetTable, columnMapping, options) {
|
|
837
|
+
const targetCols = [];
|
|
838
|
+
const sourceCols = [];
|
|
839
|
+
for (const [target, source] of Object.entries(columnMapping)) {
|
|
840
|
+
targetCols.push(`"${target}"`);
|
|
841
|
+
if (typeof source === "string") {
|
|
842
|
+
sourceCols.push(`"${source}"`);
|
|
843
|
+
} else {
|
|
844
|
+
sourceCols.push(source.expression);
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
const where = options?.where ? ` WHERE ${options.where}` : "";
|
|
848
|
+
const affected = await db.execute(
|
|
849
|
+
`INSERT INTO "${targetTable}" (${targetCols.join(", ")})
|
|
850
|
+
SELECT ${sourceCols.join(", ")} FROM "${sourceTable}"${where}`
|
|
851
|
+
);
|
|
852
|
+
return affected;
|
|
853
|
+
},
|
|
854
|
+
async columnToJson(table, sourceColumn, jsonColumn, jsonKey) {
|
|
855
|
+
if (dialect === "postgresql") {
|
|
856
|
+
await db.execute(
|
|
857
|
+
`UPDATE "${table}" SET "${jsonColumn}" = COALESCE("${jsonColumn}", '{}'::jsonb) || jsonb_build_object('${jsonKey}', "${sourceColumn}")`
|
|
858
|
+
);
|
|
859
|
+
} else {
|
|
860
|
+
await db.execute(
|
|
861
|
+
`UPDATE "${table}" SET "${jsonColumn}" = json_set(COALESCE("${jsonColumn}", '{}'), '$.${jsonKey}', "${sourceColumn}")`
|
|
862
|
+
);
|
|
863
|
+
}
|
|
864
|
+
},
|
|
865
|
+
async jsonToColumn(table, jsonColumn, jsonKey, targetColumn, targetType) {
|
|
866
|
+
await db.execute(
|
|
867
|
+
`ALTER TABLE "${table}" ADD COLUMN "${targetColumn}" ${targetType}`
|
|
868
|
+
);
|
|
869
|
+
if (dialect === "postgresql") {
|
|
870
|
+
await db.execute(
|
|
871
|
+
`UPDATE "${table}" SET "${targetColumn}" = "${jsonColumn}"->>'${jsonKey}'`
|
|
872
|
+
);
|
|
873
|
+
} else {
|
|
874
|
+
await db.execute(
|
|
875
|
+
`UPDATE "${table}" SET "${targetColumn}" = json_extract("${jsonColumn}", '$.${jsonKey}')`
|
|
876
|
+
);
|
|
877
|
+
}
|
|
878
|
+
},
|
|
879
|
+
async dedup(table, columns, keepStrategy = "latest") {
|
|
880
|
+
const colList = columns.map((c) => `"${c}"`).join(", ");
|
|
881
|
+
let orderBy;
|
|
882
|
+
switch (keepStrategy) {
|
|
883
|
+
case "earliest":
|
|
884
|
+
orderBy = '"createdAt" ASC';
|
|
885
|
+
break;
|
|
886
|
+
case "first":
|
|
887
|
+
orderBy = dialect === "postgresql" ? "ctid ASC" : "rowid ASC";
|
|
888
|
+
break;
|
|
889
|
+
default:
|
|
890
|
+
orderBy = '"createdAt" DESC';
|
|
891
|
+
break;
|
|
892
|
+
}
|
|
893
|
+
if (dialect === "postgresql") {
|
|
894
|
+
return db.execute(
|
|
895
|
+
`DELETE FROM "${table}" WHERE ctid NOT IN (
|
|
896
|
+
SELECT DISTINCT ON (${colList}) ctid
|
|
897
|
+
FROM "${table}"
|
|
898
|
+
ORDER BY ${colList}, ${orderBy}
|
|
899
|
+
)`
|
|
900
|
+
);
|
|
901
|
+
}
|
|
902
|
+
return db.execute(
|
|
903
|
+
`DELETE FROM "${table}" WHERE rowid NOT IN (
|
|
904
|
+
SELECT MIN(rowid) FROM "${table}"
|
|
905
|
+
GROUP BY ${colList}
|
|
906
|
+
)`
|
|
907
|
+
);
|
|
908
|
+
}
|
|
909
|
+
};
|
|
910
|
+
}
|
|
911
|
+
|
|
912
|
+
// libs/migrations/src/cli/shared.ts
|
|
913
|
+
function isResolvedConfig(value) {
|
|
914
|
+
return typeof value === "object" && value !== null && "collections" in value && "db" in value;
|
|
915
|
+
}
|
|
916
|
+
async function loadMomentumConfig(configPath) {
|
|
917
|
+
const configUrl = pathToFileURL2(configPath).href;
|
|
918
|
+
const mod = await import(configUrl);
|
|
919
|
+
const raw = mod["default"] ?? mod;
|
|
920
|
+
if (!isResolvedConfig(raw)) {
|
|
921
|
+
throw new Error(`Config at ${configPath} is not a valid ResolvedMomentumConfig`);
|
|
922
|
+
}
|
|
923
|
+
if (!raw.db?.adapter) {
|
|
924
|
+
throw new Error(`Config at ${configPath} is missing db.adapter`);
|
|
925
|
+
}
|
|
926
|
+
if (!raw.collections || raw.collections.length === 0) {
|
|
927
|
+
throw new Error(`Config at ${configPath} has no collections`);
|
|
928
|
+
}
|
|
929
|
+
return raw;
|
|
930
|
+
}
|
|
931
|
+
function resolveDialect(adapter) {
|
|
932
|
+
if (!adapter.dialect) {
|
|
933
|
+
throw new Error(
|
|
934
|
+
"DatabaseAdapter.dialect is not set. Ensure your adapter factory (postgresAdapter/sqliteAdapter) sets the dialect property."
|
|
935
|
+
);
|
|
936
|
+
}
|
|
937
|
+
return adapter.dialect;
|
|
938
|
+
}
|
|
939
|
+
function buildTrackerFromAdapter(adapter) {
|
|
940
|
+
if (!adapter.queryRaw || !adapter.executeRaw) {
|
|
941
|
+
throw new Error("DatabaseAdapter must implement queryRaw and executeRaw for migration tracking");
|
|
942
|
+
}
|
|
943
|
+
const queryRaw = adapter.queryRaw.bind(adapter);
|
|
944
|
+
const executeRaw = adapter.executeRaw.bind(adapter);
|
|
945
|
+
return {
|
|
946
|
+
async query(sql, params) {
|
|
947
|
+
return queryRaw(sql, params);
|
|
948
|
+
},
|
|
949
|
+
async execute(sql, params) {
|
|
950
|
+
return executeRaw(sql, params);
|
|
951
|
+
}
|
|
952
|
+
};
|
|
953
|
+
}
|
|
954
|
+
function buildContextFromAdapter(adapter, dialect) {
|
|
955
|
+
if (!adapter.queryRaw || !adapter.executeRaw) {
|
|
956
|
+
throw new Error("DatabaseAdapter must implement queryRaw and executeRaw for migration context");
|
|
957
|
+
}
|
|
958
|
+
const queryRaw = adapter.queryRaw.bind(adapter);
|
|
959
|
+
const executeRaw = adapter.executeRaw.bind(adapter);
|
|
960
|
+
const dataDb = {
|
|
961
|
+
async execute(sql, params) {
|
|
962
|
+
return executeRaw(sql, params);
|
|
963
|
+
},
|
|
964
|
+
async query(sql, params) {
|
|
965
|
+
return queryRaw(sql, params);
|
|
966
|
+
}
|
|
967
|
+
};
|
|
968
|
+
const helpers = createDataHelpers(dataDb, dialect);
|
|
969
|
+
return {
|
|
970
|
+
async sql(query, params) {
|
|
971
|
+
await executeRaw(query, params);
|
|
972
|
+
},
|
|
973
|
+
async query(sql, params) {
|
|
974
|
+
return queryRaw(sql, params);
|
|
975
|
+
},
|
|
976
|
+
data: helpers,
|
|
977
|
+
dialect,
|
|
978
|
+
log: {
|
|
979
|
+
info: (msg) => {
|
|
980
|
+
console.warn(`[migration] ${msg}`);
|
|
981
|
+
},
|
|
982
|
+
warn: (msg) => {
|
|
983
|
+
console.warn(`[migration:warn] ${msg}`);
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
};
|
|
987
|
+
}
|
|
988
|
+
function buildCloneDbFromAdapter(adapter) {
|
|
989
|
+
if (!adapter.cloneDatabase || !adapter.dropClone) {
|
|
990
|
+
throw new Error("DatabaseAdapter must implement cloneDatabase and dropClone for clone-test-apply");
|
|
991
|
+
}
|
|
992
|
+
return {
|
|
993
|
+
cloneDatabase: adapter.cloneDatabase.bind(adapter),
|
|
994
|
+
dropClone: adapter.dropClone.bind(adapter)
|
|
995
|
+
};
|
|
996
|
+
}
|
|
997
|
+
function parseMigrationArgs(args) {
|
|
998
|
+
const configPath = args.find((a) => !a.startsWith("--"));
|
|
999
|
+
if (!configPath) {
|
|
1000
|
+
throw new Error("Usage: npx tsx <command>.ts <configPath> [options]");
|
|
1001
|
+
}
|
|
1002
|
+
let name;
|
|
1003
|
+
const nameIdx = args.indexOf("--name");
|
|
1004
|
+
if (nameIdx !== -1 && args[nameIdx + 1]) {
|
|
1005
|
+
name = args[nameIdx + 1];
|
|
1006
|
+
}
|
|
1007
|
+
return {
|
|
1008
|
+
configPath,
|
|
1009
|
+
name,
|
|
1010
|
+
dryRun: args.includes("--dry-run"),
|
|
1011
|
+
testOnly: args.includes("--test-only"),
|
|
1012
|
+
skipCloneTest: args.includes("--skip-clone-test")
|
|
1013
|
+
};
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
// libs/migrations/src/cli/run.ts
|
|
1017
|
+
async function main() {
|
|
1018
|
+
const args = parseMigrationArgs(process.argv.slice(2));
|
|
1019
|
+
const config = await loadMomentumConfig(resolve(args.configPath));
|
|
1020
|
+
const adapter = config.db.adapter;
|
|
1021
|
+
const dialect = resolveDialect(adapter);
|
|
1022
|
+
const migrationConfig = resolveMigrationConfig(config.migrations ?? {});
|
|
1023
|
+
if (!migrationConfig) {
|
|
1024
|
+
console.warn("No migration config found.");
|
|
1025
|
+
process.exit(1);
|
|
1026
|
+
}
|
|
1027
|
+
const directory = resolve(migrationConfig.directory);
|
|
1028
|
+
const migrations = await loadMigrationsFromDisk(directory);
|
|
1029
|
+
if (migrations.length === 0) {
|
|
1030
|
+
console.warn("No migration files found in", directory);
|
|
1031
|
+
return;
|
|
1032
|
+
}
|
|
1033
|
+
const tracker = buildTrackerFromAdapter(adapter);
|
|
1034
|
+
const buildContext = () => buildContextFromAdapter(adapter, dialect);
|
|
1035
|
+
const log = {
|
|
1036
|
+
info: (msg) => console.warn(`[migration] ${msg}`),
|
|
1037
|
+
warn: (msg) => console.warn(`[migration:warn] ${msg}`)
|
|
1038
|
+
};
|
|
1039
|
+
const useCloneTest = !args.skipCloneTest && migrationConfig.cloneTest && dialect === "postgresql";
|
|
1040
|
+
if (useCloneTest) {
|
|
1041
|
+
const db = buildCloneDbFromAdapter(adapter);
|
|
1042
|
+
const result = await cloneTestApply({
|
|
1043
|
+
migrations,
|
|
1044
|
+
dialect,
|
|
1045
|
+
tracker,
|
|
1046
|
+
buildContext,
|
|
1047
|
+
db,
|
|
1048
|
+
buildCloneTracker: (_cloneName) => tracker,
|
|
1049
|
+
buildCloneContext: (_cloneName) => buildContext(),
|
|
1050
|
+
testOnly: args.testOnly,
|
|
1051
|
+
log
|
|
1052
|
+
});
|
|
1053
|
+
const cloneSuccess = result.cloneResult ? result.cloneResult.failCount === 0 : false;
|
|
1054
|
+
console.warn(`
|
|
1055
|
+
Clone test: ${cloneSuccess ? "PASSED" : "FAILED"}`);
|
|
1056
|
+
if (!cloneSuccess && result.error) {
|
|
1057
|
+
console.error(`Clone error: ${result.error}`);
|
|
1058
|
+
if (result.suggestions.length > 0) {
|
|
1059
|
+
console.warn(`Suggestion: ${result.suggestions[0]}`);
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
if (result.applyResult) {
|
|
1063
|
+
console.warn(
|
|
1064
|
+
`Applied: ${result.applyResult.successCount} migration(s) in batch ${result.applyResult.batch}`
|
|
1065
|
+
);
|
|
1066
|
+
if (result.applyResult.failCount > 0) {
|
|
1067
|
+
console.error(`Failed: ${result.applyResult.failCount} migration(s)`);
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
if (!cloneSuccess) {
|
|
1071
|
+
process.exit(1);
|
|
1072
|
+
}
|
|
1073
|
+
} else {
|
|
1074
|
+
const result = await runMigrations({
|
|
1075
|
+
migrations,
|
|
1076
|
+
dialect,
|
|
1077
|
+
tracker,
|
|
1078
|
+
buildContext,
|
|
1079
|
+
log
|
|
1080
|
+
});
|
|
1081
|
+
console.warn(`
|
|
1082
|
+
Applied: ${result.successCount} migration(s) in batch ${result.batch}`);
|
|
1083
|
+
if (result.failCount > 0) {
|
|
1084
|
+
console.error(`Failed: ${result.failCount} migration(s)`);
|
|
1085
|
+
for (const r of result.results) {
|
|
1086
|
+
if (!r.success) {
|
|
1087
|
+
console.error(` ${r.name}: ${r.error}`);
|
|
1088
|
+
}
|
|
1089
|
+
}
|
|
1090
|
+
process.exit(1);
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
}
|
|
1094
|
+
main().catch((err) => {
|
|
1095
|
+
console.error("Migration run failed:", err);
|
|
1096
|
+
process.exit(1);
|
|
1097
|
+
});
|