@javalabs/prisma-client 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +220 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +34 -0
- package/dist/index.js.map +1 -0
- package/dist/prisma-factory.service.d.ts +9 -0
- package/dist/prisma-factory.service.js +47 -0
- package/dist/prisma-factory.service.js.map +1 -0
- package/dist/prisma.module.d.ts +2 -0
- package/dist/prisma.module.js +23 -0
- package/dist/prisma.module.js.map +1 -0
- package/dist/prisma.service.d.ts +6 -0
- package/dist/prisma.service.js +27 -0
- package/dist/prisma.service.js.map +1 -0
- package/dist/scripts/create-tenant-schemas.d.ts +1 -0
- package/dist/scripts/create-tenant-schemas.js +117 -0
- package/dist/scripts/create-tenant-schemas.js.map +1 -0
- package/dist/scripts/data-migration/batch-migrator.d.ts +25 -0
- package/dist/scripts/data-migration/batch-migrator.js +333 -0
- package/dist/scripts/data-migration/batch-migrator.js.map +1 -0
- package/dist/scripts/data-migration/data-transformer.d.ts +17 -0
- package/dist/scripts/data-migration/data-transformer.js +242 -0
- package/dist/scripts/data-migration/data-transformer.js.map +1 -0
- package/dist/scripts/data-migration/db-connector.d.ts +7 -0
- package/dist/scripts/data-migration/db-connector.js +58 -0
- package/dist/scripts/data-migration/db-connector.js.map +1 -0
- package/dist/scripts/data-migration/dependency-manager.d.ts +9 -0
- package/dist/scripts/data-migration/dependency-manager.js +86 -0
- package/dist/scripts/data-migration/dependency-manager.js.map +1 -0
- package/dist/scripts/data-migration/dependency-resolver.d.ts +18 -0
- package/dist/scripts/data-migration/dependency-resolver.js +251 -0
- package/dist/scripts/data-migration/dependency-resolver.js.map +1 -0
- package/dist/scripts/data-migration/entity-discovery.d.ts +11 -0
- package/dist/scripts/data-migration/entity-discovery.js +152 -0
- package/dist/scripts/data-migration/entity-discovery.js.map +1 -0
- package/dist/scripts/data-migration/foreign-key-manager.d.ts +17 -0
- package/dist/scripts/data-migration/foreign-key-manager.js +70 -0
- package/dist/scripts/data-migration/foreign-key-manager.js.map +1 -0
- package/dist/scripts/data-migration/migration-phases.d.ts +5 -0
- package/dist/scripts/data-migration/migration-phases.js +55 -0
- package/dist/scripts/data-migration/migration-phases.js.map +1 -0
- package/dist/scripts/data-migration/migration-tool.d.ts +29 -0
- package/dist/scripts/data-migration/migration-tool.js +250 -0
- package/dist/scripts/data-migration/migration-tool.js.map +1 -0
- package/dist/scripts/data-migration/phase-generator.d.ts +15 -0
- package/dist/scripts/data-migration/phase-generator.js +187 -0
- package/dist/scripts/data-migration/phase-generator.js.map +1 -0
- package/dist/scripts/data-migration/schema-utils.d.ts +18 -0
- package/dist/scripts/data-migration/schema-utils.js +164 -0
- package/dist/scripts/data-migration/schema-utils.js.map +1 -0
- package/dist/scripts/data-migration/tenant-migrator.d.ts +15 -0
- package/dist/scripts/data-migration/tenant-migrator.js +110 -0
- package/dist/scripts/data-migration/tenant-migrator.js.map +1 -0
- package/dist/scripts/data-migration/typecast-manager.d.ts +5 -0
- package/dist/scripts/data-migration/typecast-manager.js +35 -0
- package/dist/scripts/data-migration/typecast-manager.js.map +1 -0
- package/dist/scripts/data-migration/types.d.ts +34 -0
- package/dist/scripts/data-migration/types.js +3 -0
- package/dist/scripts/data-migration/types.js.map +1 -0
- package/dist/scripts/data-migration.d.ts +22 -0
- package/dist/scripts/data-migration.js +593 -0
- package/dist/scripts/data-migration.js.map +1 -0
- package/dist/scripts/drop-database.d.ts +10 -0
- package/dist/scripts/drop-database.js +81 -0
- package/dist/scripts/drop-database.js.map +1 -0
- package/dist/scripts/error-handler.d.ts +12 -0
- package/dist/scripts/error-handler.js +82 -0
- package/dist/scripts/error-handler.js.map +1 -0
- package/dist/scripts/fix-data-types.d.ts +10 -0
- package/dist/scripts/fix-data-types.js +185 -0
- package/dist/scripts/fix-data-types.js.map +1 -0
- package/dist/scripts/fix-enum-values.d.ts +17 -0
- package/dist/scripts/fix-enum-values.js +234 -0
- package/dist/scripts/fix-enum-values.js.map +1 -0
- package/dist/scripts/fix-schema-discrepancies.d.ts +21 -0
- package/dist/scripts/fix-schema-discrepancies.js +240 -0
- package/dist/scripts/fix-schema-discrepancies.js.map +1 -0
- package/dist/scripts/migrate-schema-structure.d.ts +1 -0
- package/dist/scripts/migrate-schema-structure.js +76 -0
- package/dist/scripts/migrate-schema-structure.js.map +1 -0
- package/dist/scripts/post-migration-validator.d.ts +21 -0
- package/dist/scripts/post-migration-validator.js +341 -0
- package/dist/scripts/post-migration-validator.js.map +1 -0
- package/dist/scripts/pre-migration-validator.d.ts +25 -0
- package/dist/scripts/pre-migration-validator.js +491 -0
- package/dist/scripts/pre-migration-validator.js.map +1 -0
- package/dist/scripts/reset-database.d.ts +17 -0
- package/dist/scripts/reset-database.js +202 -0
- package/dist/scripts/reset-database.js.map +1 -0
- package/dist/scripts/retry-failed-migrations.d.ts +14 -0
- package/dist/scripts/retry-failed-migrations.js +301 -0
- package/dist/scripts/retry-failed-migrations.js.map +1 -0
- package/dist/scripts/run-migration.d.ts +1 -0
- package/dist/scripts/run-migration.js +525 -0
- package/dist/scripts/run-migration.js.map +1 -0
- package/dist/scripts/schema-sync.d.ts +1 -0
- package/dist/scripts/schema-sync.js +85 -0
- package/dist/scripts/schema-sync.js.map +1 -0
- package/dist/scripts/sync-enum-types.d.ts +13 -0
- package/dist/scripts/sync-enum-types.js +139 -0
- package/dist/scripts/sync-enum-types.js.map +1 -0
- package/dist/scripts/sync-enum-values.d.ts +20 -0
- package/dist/scripts/sync-enum-values.js +336 -0
- package/dist/scripts/sync-enum-values.js.map +1 -0
- package/dist/scripts/truncate-database.d.ts +10 -0
- package/dist/scripts/truncate-database.js +100 -0
- package/dist/scripts/truncate-database.js.map +1 -0
- package/dist/scripts/verify-migration-setup.d.ts +11 -0
- package/dist/scripts/verify-migration-setup.js +120 -0
- package/dist/scripts/verify-migration-setup.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/migration-config-public.json +95 -0
- package/migration-config.json +95 -0
- package/package.json +33 -0
- package/prisma/migrations/migration_lock.toml +3 -0
- package/prisma/schema.prisma +360 -0
- package/src/index.ts +23 -0
- package/src/prisma-factory.service.ts +41 -0
- package/src/prisma.module.ts +10 -0
- package/src/prisma.service.ts +17 -0
- package/src/scripts/create-tenant-schemas.ts +146 -0
- package/src/scripts/data-migration/batch-migrator.ts +569 -0
- package/src/scripts/data-migration/data-transformer.ts +377 -0
- package/src/scripts/data-migration/db-connector.ts +67 -0
- package/src/scripts/data-migration/dependency-resolver.ts +319 -0
- package/src/scripts/data-migration/entity-discovery.ts +197 -0
- package/src/scripts/data-migration/foreign-key-manager.ts +95 -0
- package/src/scripts/data-migration/migration-tool.ts +357 -0
- package/src/scripts/data-migration/schema-utils.ts +186 -0
- package/src/scripts/data-migration/tenant-migrator.ts +194 -0
- package/src/scripts/data-migration/typecast-manager.ts +38 -0
- package/src/scripts/data-migration/types.ts +40 -0
- package/src/scripts/drop-database.ts +105 -0
- package/src/scripts/dump-source-db.sh +62 -0
- package/src/scripts/dumps/source_dump_20250413_112626.sql +1527 -0
- package/src/scripts/error-handler.ts +118 -0
- package/src/scripts/fix-data-types.ts +242 -0
- package/src/scripts/fix-enum-values.ts +357 -0
- package/src/scripts/fix-schema-discrepancies.ts +318 -0
- package/src/scripts/migrate-schema-structure.ts +90 -0
- package/src/scripts/post-migration-validator.ts +427 -0
- package/src/scripts/pre-migration-validator.ts +611 -0
- package/src/scripts/reset-database.ts +264 -0
- package/src/scripts/retry-failed-migrations.ts +416 -0
- package/src/scripts/run-migration.ts +691 -0
- package/src/scripts/schema-sync.ts +129 -0
- package/src/scripts/sync-enum-types.ts +171 -0
- package/src/scripts/sync-enum-values.ts +563 -0
- package/src/scripts/truncate-database.ts +124 -0
- package/src/scripts/verify-migration-setup.ts +136 -0
- package/tsconfig.json +18 -0
|
@@ -0,0 +1,691 @@
|
|
|
1
|
+
import { Command } from "commander";
|
|
2
|
+
import * as dotenv from "dotenv";
|
|
3
|
+
import { exec } from "child_process";
|
|
4
|
+
import { promisify } from "util";
|
|
5
|
+
import { SchemaDiscrepancyFixer } from "./fix-schema-discrepancies";
|
|
6
|
+
import { EnumSynchronizer } from "./sync-enum-types";
|
|
7
|
+
import { PrismaClient } from "@prisma/client";
|
|
8
|
+
|
|
9
|
+
dotenv.config();
|
|
10
|
+
|
|
11
|
+
const execAsync = promisify(exec);
|
|
12
|
+
const program = new Command();
|
|
13
|
+
|
|
14
|
+
// Añadir esta función después de las importaciones
|
|
15
|
+
async function testDatabaseConnection(
|
|
16
|
+
url: string,
|
|
17
|
+
name: string
|
|
18
|
+
): Promise<boolean> {
|
|
19
|
+
const { Pool } = await import("pg");
|
|
20
|
+
const pool = new Pool({ connectionString: url });
|
|
21
|
+
|
|
22
|
+
try {
|
|
23
|
+
console.log(`Testing connection to ${name} database...`);
|
|
24
|
+
const client = await pool.connect();
|
|
25
|
+
const result = await client.query("SELECT NOW()");
|
|
26
|
+
client.release();
|
|
27
|
+
|
|
28
|
+
console.log(`Successfully connected to ${name} database`);
|
|
29
|
+
return true;
|
|
30
|
+
} catch (error) {
|
|
31
|
+
console.error(`Failed to connect to ${name} database: ${error.message}`);
|
|
32
|
+
return false;
|
|
33
|
+
} finally {
|
|
34
|
+
await pool.end();
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// Modificar la acción del comando migrate para incluir la prueba de conexión
|
|
39
|
+
async function initializeDatabase(options: any) {
|
|
40
|
+
try {
|
|
41
|
+
const { Pool } = await import("pg");
|
|
42
|
+
const pool = new Pool({ connectionString: process.env.DATABASE_URL });
|
|
43
|
+
|
|
44
|
+
// Check if database is already initialized
|
|
45
|
+
const tablesExist = await checkIfTablesExist(pool);
|
|
46
|
+
|
|
47
|
+
if (!tablesExist) {
|
|
48
|
+
// Only create tables if they don't exist
|
|
49
|
+
console.log("Database empty, creating initial schema...");
|
|
50
|
+
await execAsync("npx prisma db push --skip-generate");
|
|
51
|
+
} else {
|
|
52
|
+
console.log("Database schema already exists, skipping schema creation");
|
|
53
|
+
// Create baseline if it doesn't exist
|
|
54
|
+
await execAsync(
|
|
55
|
+
"npx prisma migrate reset --force --skip-generate --skip-seed"
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (!options.skipEnumSync) {
|
|
60
|
+
console.log("Synchronizing enum types...");
|
|
61
|
+
const enumSynchronizer = new EnumSynchronizer(
|
|
62
|
+
process.env.SOURCE_DATABASE_URL,
|
|
63
|
+
process.env.DATABASE_URL
|
|
64
|
+
);
|
|
65
|
+
await enumSynchronizer.synchronizeEnums();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Proceed with data migration if not skipped
|
|
69
|
+
if (!options.skipDataMigration) {
|
|
70
|
+
console.log("Starting data migration...");
|
|
71
|
+
const { DataMigrationTool } = await import(
|
|
72
|
+
"./data-migration/migration-tool"
|
|
73
|
+
);
|
|
74
|
+
const migrationTool = new DataMigrationTool({
|
|
75
|
+
sourcePool: new Pool({
|
|
76
|
+
connectionString: process.env.SOURCE_DATABASE_URL,
|
|
77
|
+
}),
|
|
78
|
+
targetPool: pool,
|
|
79
|
+
sourcePrisma: new PrismaClient({
|
|
80
|
+
datasources: { db: { url: process.env.SOURCE_DATABASE_URL } },
|
|
81
|
+
}),
|
|
82
|
+
targetPrisma: new PrismaClient({
|
|
83
|
+
datasources: { db: { url: process.env.DATABASE_URL } },
|
|
84
|
+
}),
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
await migrationTool.migrateData({
|
|
88
|
+
publicOnly: options.mode === "public-only",
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
await pool.end();
|
|
93
|
+
return true;
|
|
94
|
+
} catch (error) {
|
|
95
|
+
console.error("Error initializing database:", error);
|
|
96
|
+
return false;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Helper function to check if tables exist
|
|
101
|
+
async function checkIfTablesExist(pool: any): Promise<boolean> {
|
|
102
|
+
try {
|
|
103
|
+
const result = await pool.query(`
|
|
104
|
+
SELECT COUNT(*)
|
|
105
|
+
FROM information_schema.tables
|
|
106
|
+
WHERE table_schema = 'public'
|
|
107
|
+
AND table_type = 'BASE TABLE'
|
|
108
|
+
`);
|
|
109
|
+
|
|
110
|
+
return parseInt(result.rows[0].count) > 0;
|
|
111
|
+
} catch (error) {
|
|
112
|
+
console.error("Error checking tables:", error);
|
|
113
|
+
return false;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Modify the existing action handler
|
|
118
|
+
program
|
|
119
|
+
.command("migrate")
|
|
120
|
+
.description("Run the data migration process")
|
|
121
|
+
.option("-s, --source <url>", "Source database connection URL")
|
|
122
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
123
|
+
.option(
|
|
124
|
+
"-m, --mode <mode>",
|
|
125
|
+
"Migration mode: 'multi-tenant' or 'public-only'",
|
|
126
|
+
"multi-tenant"
|
|
127
|
+
)
|
|
128
|
+
.option("-d, --dry-run", "Perform a dry run without making changes", false)
|
|
129
|
+
.option("--skip-schema-creation", "Skip the schema creation step", false)
|
|
130
|
+
.option("--skip-schema-migration", "Skip the schema migration step", false)
|
|
131
|
+
.option("--skip-data-migration", "Skip the data migration step", false)
|
|
132
|
+
.option("--skip-validation", "Skip pre and post validation steps", false)
|
|
133
|
+
.option("--skip-enum-sync", "Skip enum synchronization step", false)
|
|
134
|
+
.option("--auto-fix-schema", "Automatically fix schema discrepancies", false)
|
|
135
|
+
.option("--force", "Force migration even if validation fails", false)
|
|
136
|
+
.option("-y, --yes", "Automatically answer yes to all prompts", false)
|
|
137
|
+
.action(async (options) => {
|
|
138
|
+
// Set environment variables for the migration
|
|
139
|
+
if (options.source) {
|
|
140
|
+
process.env.SOURCE_DATABASE_URL = options.source;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (options.target) {
|
|
144
|
+
process.env.DATABASE_URL = options.target;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
if (!process.env.SOURCE_DATABASE_URL) {
|
|
148
|
+
console.error(
|
|
149
|
+
"Source database URL is required. Use --source or set SOURCE_DATABASE_URL environment variable."
|
|
150
|
+
);
|
|
151
|
+
process.exit(1);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
if (!process.env.DATABASE_URL) {
|
|
155
|
+
console.error(
|
|
156
|
+
"Target database URL is required. Use --target or set DATABASE_URL environment variable."
|
|
157
|
+
);
|
|
158
|
+
process.exit(1);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if (options.dryRun) {
|
|
162
|
+
console.log(
|
|
163
|
+
"Performing dry run - no changes will be made to the database"
|
|
164
|
+
);
|
|
165
|
+
process.env.DRY_RUN = "true";
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const sourceConnected = await testDatabaseConnection(
|
|
169
|
+
process.env.SOURCE_DATABASE_URL,
|
|
170
|
+
"source"
|
|
171
|
+
);
|
|
172
|
+
const targetConnected = await testDatabaseConnection(
|
|
173
|
+
process.env.DATABASE_URL,
|
|
174
|
+
"target"
|
|
175
|
+
);
|
|
176
|
+
|
|
177
|
+
if (!sourceConnected || !targetConnected) {
|
|
178
|
+
console.error(
|
|
179
|
+
"Database connection test failed. Please check your connection strings and try again."
|
|
180
|
+
);
|
|
181
|
+
process.exit(1);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
try {
|
|
185
|
+
// Initialize database if empty
|
|
186
|
+
await initializeDatabase(options);
|
|
187
|
+
|
|
188
|
+
// Step 0: Pre-migration validation
|
|
189
|
+
if (!options.skipValidation) {
|
|
190
|
+
console.log("Step 0: Running pre-migration validation...");
|
|
191
|
+
const { PreMigrationValidator } = await import(
|
|
192
|
+
"./pre-migration-validator"
|
|
193
|
+
);
|
|
194
|
+
const validator = new PreMigrationValidator();
|
|
195
|
+
const validationResult = await validator.validate();
|
|
196
|
+
|
|
197
|
+
if (!validationResult.success && !options.force) {
|
|
198
|
+
console.log(
|
|
199
|
+
`Pre-migration validation found ${validationResult.issueCount} issues.`
|
|
200
|
+
);
|
|
201
|
+
console.log(
|
|
202
|
+
`Check the full report at: ${validationResult.reportPath}`
|
|
203
|
+
);
|
|
204
|
+
|
|
205
|
+
// Si la opción --yes o --force está activada, continuar automáticamente
|
|
206
|
+
if (options.yes) {
|
|
207
|
+
console.log(
|
|
208
|
+
"Continuing with migration despite issues (--yes flag provided)"
|
|
209
|
+
);
|
|
210
|
+
} else if (!options.force) {
|
|
211
|
+
// Preguntar al usuario si desea continuar a pesar de los problemas
|
|
212
|
+
const readline = require("readline").createInterface({
|
|
213
|
+
input: process.stdin,
|
|
214
|
+
output: process.stdout,
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
const answer = await new Promise<string>((resolve) => {
|
|
218
|
+
readline.question(
|
|
219
|
+
"Continue with migration despite issues? (y/N) ",
|
|
220
|
+
(ans: string) => {
|
|
221
|
+
resolve(ans);
|
|
222
|
+
readline.close();
|
|
223
|
+
}
|
|
224
|
+
);
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
if (answer.toLowerCase() !== "y") {
|
|
228
|
+
console.log("Migration aborted by user.");
|
|
229
|
+
process.exit(1);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
} else {
|
|
233
|
+
console.log("Pre-migration validation successful!");
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// Corregir automáticamente las discrepancias del esquema si se solicita
|
|
237
|
+
if (options.autoFixSchema && validationResult.reportPath) {
|
|
238
|
+
console.log("Automatically fixing schema discrepancies...");
|
|
239
|
+
const fixer = new SchemaDiscrepancyFixer(
|
|
240
|
+
process.env.SOURCE_DATABASE_URL,
|
|
241
|
+
process.env.DATABASE_URL,
|
|
242
|
+
validationResult.reportPath
|
|
243
|
+
);
|
|
244
|
+
await fixer.fixDiscrepancies();
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Step 1: Migrate schema structure first
|
|
249
|
+
if (!options.skipSchemaMigration) {
|
|
250
|
+
console.log("Step 1: Migrating schema structure...");
|
|
251
|
+
if (options.mode === "multi-tenant") {
|
|
252
|
+
await execAsync("node dist/scripts/migrate-schema-structure.js");
|
|
253
|
+
} else {
|
|
254
|
+
const { Pool } = await import("pg");
|
|
255
|
+
const pool = new Pool({ connectionString: process.env.DATABASE_URL });
|
|
256
|
+
const tablesExist = await checkIfTablesExist(pool);
|
|
257
|
+
if (!tablesExist) {
|
|
258
|
+
await execAsync("npx prisma migrate deploy");
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// Step 2: Now synchronize enums
|
|
264
|
+
if (!options.skipEnumSync) {
|
|
265
|
+
console.log("Step 2: Synchronizing enum types...");
|
|
266
|
+
const { EnumSynchronizer } = await import("./sync-enum-types");
|
|
267
|
+
const synchronizer = new EnumSynchronizer();
|
|
268
|
+
await synchronizer.synchronizeEnums();
|
|
269
|
+
console.log("Enum types synchronized successfully!");
|
|
270
|
+
|
|
271
|
+
console.log("Step 2.1: Synchronizing enum values...");
|
|
272
|
+
const { EnumValueSynchronizer } = await import("./sync-enum-values");
|
|
273
|
+
const valueSynchronizer = new EnumValueSynchronizer();
|
|
274
|
+
await valueSynchronizer.synchronizeEnumValues();
|
|
275
|
+
console.log("Enum values synchronized successfully!");
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
// Step 3: Migrate data with transformation
|
|
279
|
+
// In the migrate command action
|
|
280
|
+
if (!options.skipDataMigration) {
|
|
281
|
+
console.log("Step 3: Migrating data with transformation...");
|
|
282
|
+
const { DataMigrationTool } = await import(
|
|
283
|
+
"./data-migration/migration-tool"
|
|
284
|
+
);
|
|
285
|
+
const { PrismaClient } = await import("@prisma/client");
|
|
286
|
+
const { Pool } = await import("pg");
|
|
287
|
+
|
|
288
|
+
// Create connections with increased pool size and timeout
|
|
289
|
+
const connections = {
|
|
290
|
+
sourcePool: new Pool({
|
|
291
|
+
connectionString: process.env.SOURCE_DATABASE_URL,
|
|
292
|
+
max: 20,
|
|
293
|
+
idleTimeoutMillis: 30000,
|
|
294
|
+
}),
|
|
295
|
+
targetPool: new Pool({
|
|
296
|
+
connectionString: process.env.DATABASE_URL,
|
|
297
|
+
max: 20,
|
|
298
|
+
idleTimeoutMillis: 30000,
|
|
299
|
+
}),
|
|
300
|
+
sourcePrisma: new PrismaClient({
|
|
301
|
+
datasources: { db: { url: process.env.SOURCE_DATABASE_URL } },
|
|
302
|
+
log: ["error", "warn"],
|
|
303
|
+
}),
|
|
304
|
+
targetPrisma: new PrismaClient({
|
|
305
|
+
datasources: { db: { url: process.env.DATABASE_URL } },
|
|
306
|
+
log: ["error", "warn"],
|
|
307
|
+
}),
|
|
308
|
+
};
|
|
309
|
+
|
|
310
|
+
try {
|
|
311
|
+
// Disable foreign key constraints before migration
|
|
312
|
+
await connections.targetPool.query(
|
|
313
|
+
"SET session_replication_role = 'replica';"
|
|
314
|
+
);
|
|
315
|
+
|
|
316
|
+
const migrationTool = new DataMigrationTool(connections);
|
|
317
|
+
|
|
318
|
+
if (options.mode === "public-only") {
|
|
319
|
+
await migrationTool.migrateData({
|
|
320
|
+
publicOnly: true,
|
|
321
|
+
targetSchema: "public",
|
|
322
|
+
sourceSchema: "public",
|
|
323
|
+
});
|
|
324
|
+
} else {
|
|
325
|
+
await migrationTool.migrateData({
|
|
326
|
+
forceSingleTenant: false,
|
|
327
|
+
multiTenant: true,
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
} finally {
|
|
331
|
+
// Re-enable foreign key constraints
|
|
332
|
+
await connections.targetPool.query(
|
|
333
|
+
"SET session_replication_role = 'origin';"
|
|
334
|
+
);
|
|
335
|
+
|
|
336
|
+
// Close connections
|
|
337
|
+
await connections.sourcePool.end();
|
|
338
|
+
await connections.targetPool.end();
|
|
339
|
+
await connections.sourcePrisma.$disconnect();
|
|
340
|
+
await connections.targetPrisma.$disconnect();
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// Step 4: Fix enum values if needed
|
|
345
|
+
console.log("Step 4: Fixing enum values...");
|
|
346
|
+
const { EnumFixer } = await import("./fix-enum-values");
|
|
347
|
+
const enumFixer = new EnumFixer(process.env.DATABASE_URL);
|
|
348
|
+
await enumFixer.fixEnumValues();
|
|
349
|
+
console.log("Enum values fixed successfully!");
|
|
350
|
+
|
|
351
|
+
// Step 5: Fix data types if needed
|
|
352
|
+
console.log("Step 5: Fixing data type issues...");
|
|
353
|
+
const { DataTypeFixer } = await import("./fix-data-types");
|
|
354
|
+
const fixer = new DataTypeFixer();
|
|
355
|
+
await fixer.fixDataTypes();
|
|
356
|
+
console.log("Data type fixing completed successfully!");
|
|
357
|
+
|
|
358
|
+
// Step 6: Post-migration validation
|
|
359
|
+
if (!options.skipValidation) {
|
|
360
|
+
console.log("Step 6: Running post-migration validation...");
|
|
361
|
+
const { PostMigrationValidator } = await import(
|
|
362
|
+
"./post-migration-validator"
|
|
363
|
+
);
|
|
364
|
+
const validator = new PostMigrationValidator();
|
|
365
|
+
const result = await validator.validate();
|
|
366
|
+
|
|
367
|
+
if (result.success) {
|
|
368
|
+
console.log("Post-migration validation successful!");
|
|
369
|
+
} else {
|
|
370
|
+
console.log(
|
|
371
|
+
`Post-migration validation found ${result.issueCount} issues.`
|
|
372
|
+
);
|
|
373
|
+
console.log(`Check the full report at: ${result.reportPath}`);
|
|
374
|
+
console.log(
|
|
375
|
+
"You may need to run 'retry-failed' command to fix these issues."
|
|
376
|
+
);
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
console.log("Migration process completed successfully!");
|
|
381
|
+
} catch (error) {
|
|
382
|
+
console.error("Error running migration:", error);
|
|
383
|
+
process.exit(1);
|
|
384
|
+
}
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
program
|
|
388
|
+
.command("fix-data-types")
|
|
389
|
+
.description("Fix data type issues in the migrated data")
|
|
390
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
391
|
+
.action(async (options) => {
|
|
392
|
+
if (options.target) {
|
|
393
|
+
process.env.DATABASE_URL = options.target;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
if (!process.env.DATABASE_URL) {
|
|
397
|
+
console.error(
|
|
398
|
+
"Target database URL is required. Use --target or set DATABASE_URL environment variable."
|
|
399
|
+
);
|
|
400
|
+
process.exit(1);
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
try {
|
|
404
|
+
console.log("Fixing data type issues...");
|
|
405
|
+
const { DataTypeFixer } = await import("./fix-data-types");
|
|
406
|
+
const fixer = new DataTypeFixer();
|
|
407
|
+
await fixer.fixDataTypes();
|
|
408
|
+
console.log("Data type fixing completed successfully!");
|
|
409
|
+
} catch (error) {
|
|
410
|
+
console.error("Error fixing data types:", error);
|
|
411
|
+
process.exit(1);
|
|
412
|
+
}
|
|
413
|
+
});
|
|
414
|
+
|
|
415
|
+
program
|
|
416
|
+
.command("truncate-database")
|
|
417
|
+
.description(
|
|
418
|
+
"Truncate (empty) all tables in the database without dropping the database"
|
|
419
|
+
)
|
|
420
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
421
|
+
.action(async (options) => {
|
|
422
|
+
if (options.target) {
|
|
423
|
+
process.env.DATABASE_URL = options.target;
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
if (!process.env.DATABASE_URL) {
|
|
427
|
+
console.error(
|
|
428
|
+
"Target database URL is required. Use --target or set DATABASE_URL environment variable."
|
|
429
|
+
);
|
|
430
|
+
process.exit(1);
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
try {
|
|
434
|
+
console.log("Truncating database...");
|
|
435
|
+
const { DatabaseTruncateTool } = await import("./truncate-database");
|
|
436
|
+
const truncateTool = new DatabaseTruncateTool(process.env.DATABASE_URL);
|
|
437
|
+
await truncateTool.truncateDatabase();
|
|
438
|
+
console.log("Database truncation completed successfully!");
|
|
439
|
+
} catch (error) {
|
|
440
|
+
console.error("Error truncating database:", error);
|
|
441
|
+
process.exit(1);
|
|
442
|
+
}
|
|
443
|
+
});
|
|
444
|
+
|
|
445
|
+
program
|
|
446
|
+
.command("retry-failed")
|
|
447
|
+
.description("Retry failed migrations from an error log file")
|
|
448
|
+
.argument("<error-log-path>", "Path to the error log file")
|
|
449
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
450
|
+
.action(async (errorLogPath, options) => {
|
|
451
|
+
if (options.target) {
|
|
452
|
+
process.env.DATABASE_URL = options.target;
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
if (!process.env.DATABASE_URL) {
|
|
456
|
+
console.error(
|
|
457
|
+
"Target database URL is required. Use --target or set DATABASE_URL environment variable."
|
|
458
|
+
);
|
|
459
|
+
process.exit(1);
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
try {
|
|
463
|
+
console.log(`Retrying failed migrations from log: ${errorLogPath}`);
|
|
464
|
+
const { FailedMigrationRetry } = await import(
|
|
465
|
+
"./retry-failed-migrations"
|
|
466
|
+
);
|
|
467
|
+
const retryTool = new FailedMigrationRetry(errorLogPath);
|
|
468
|
+
await retryTool.retryFailedMigrations();
|
|
469
|
+
console.log("Retry process completed!");
|
|
470
|
+
} catch (error) {
|
|
471
|
+
console.error("Error retrying failed migrations:", error);
|
|
472
|
+
process.exit(1);
|
|
473
|
+
}
|
|
474
|
+
});
|
|
475
|
+
|
|
476
|
+
program
|
|
477
|
+
.command("fix-enum-values")
|
|
478
|
+
.description("Fix invalid enum values in the database")
|
|
479
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
480
|
+
.action(async (options) => {
|
|
481
|
+
if (options.target) {
|
|
482
|
+
process.env.DATABASE_URL = options.target;
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
if (!process.env.DATABASE_URL) {
|
|
486
|
+
console.error(
|
|
487
|
+
"Target database URL is required. Use --target or set DATABASE_URL environment variable."
|
|
488
|
+
);
|
|
489
|
+
process.exit(1);
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
try {
|
|
493
|
+
console.log("Fixing enum values...");
|
|
494
|
+
const { EnumFixer } = await import("./fix-enum-values");
|
|
495
|
+
const enumFixer = new EnumFixer(process.env.DATABASE_URL);
|
|
496
|
+
await enumFixer.fixEnumValues();
|
|
497
|
+
console.log("Enum values fixed successfully!");
|
|
498
|
+
} catch (error) {
|
|
499
|
+
console.error("Error fixing enum values:", error);
|
|
500
|
+
process.exit(1);
|
|
501
|
+
}
|
|
502
|
+
});
|
|
503
|
+
|
|
504
|
+
program
|
|
505
|
+
.command("pre-validate")
|
|
506
|
+
.description("Validate database before migration")
|
|
507
|
+
.option("-s, --source <url>", "Source database connection URL")
|
|
508
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
509
|
+
.option("-y, --yes", "Automatically answer yes to all prompts", false)
|
|
510
|
+
.action(async (options) => {
|
|
511
|
+
if (options.source) {
|
|
512
|
+
process.env.SOURCE_DATABASE_URL = options.source;
|
|
513
|
+
}
|
|
514
|
+
if (options.target) {
|
|
515
|
+
process.env.DATABASE_URL = options.target;
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
if (!process.env.SOURCE_DATABASE_URL || !process.env.DATABASE_URL) {
|
|
519
|
+
console.error(
|
|
520
|
+
"Source and target database URLs are required. Use --source and --target or set SOURCE_DATABASE_URL and DATABASE_URL environment variables."
|
|
521
|
+
);
|
|
522
|
+
process.exit(1);
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
try {
|
|
526
|
+
console.log("Validating database before migration...");
|
|
527
|
+
const { PreMigrationValidator } = await import(
|
|
528
|
+
"./pre-migration-validator"
|
|
529
|
+
);
|
|
530
|
+
const validator = new PreMigrationValidator();
|
|
531
|
+
const result = await validator.validate();
|
|
532
|
+
|
|
533
|
+
if (result.success) {
|
|
534
|
+
console.log("Pre-migration validation successful!");
|
|
535
|
+
process.exit(0);
|
|
536
|
+
} else {
|
|
537
|
+
console.log(
|
|
538
|
+
`Pre-migration validation found ${result.issueCount} issues.`
|
|
539
|
+
);
|
|
540
|
+
console.log(`Check the full report at: ${result.reportPath}`);
|
|
541
|
+
|
|
542
|
+
// Si la opción --yes está activada, continuar automáticamente
|
|
543
|
+
if (options.yes) {
|
|
544
|
+
console.log(
|
|
545
|
+
"Continuing with migration despite issues (--yes flag provided)"
|
|
546
|
+
);
|
|
547
|
+
process.exit(0);
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
// Arreglar el problema con readline usando una promesa
|
|
551
|
+
const readline = require("readline").createInterface({
|
|
552
|
+
input: process.stdin,
|
|
553
|
+
output: process.stdout,
|
|
554
|
+
});
|
|
555
|
+
|
|
556
|
+
const answer = await new Promise<string>((resolve) => {
|
|
557
|
+
readline.question(
|
|
558
|
+
"Continue with migration despite issues? (y/N) ",
|
|
559
|
+
(ans: string) => {
|
|
560
|
+
resolve(ans);
|
|
561
|
+
readline.close();
|
|
562
|
+
}
|
|
563
|
+
);
|
|
564
|
+
});
|
|
565
|
+
|
|
566
|
+
if (answer.toLowerCase() === "y") {
|
|
567
|
+
console.log("Continuing with migration despite issues.");
|
|
568
|
+
process.exit(0);
|
|
569
|
+
} else {
|
|
570
|
+
console.log("Migration aborted by user.");
|
|
571
|
+
process.exit(1);
|
|
572
|
+
}
|
|
573
|
+
}
|
|
574
|
+
} catch (error) {
|
|
575
|
+
console.error("Error during pre-migration validation:", error);
|
|
576
|
+
process.exit(1);
|
|
577
|
+
}
|
|
578
|
+
});
|
|
579
|
+
|
|
580
|
+
program
|
|
581
|
+
.command("sync-enums")
|
|
582
|
+
.description("Synchronize enum types between databases")
|
|
583
|
+
.option("-s, --source <url>", "Source database connection URL")
|
|
584
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
585
|
+
.action(async (options) => {
|
|
586
|
+
if (options.source) {
|
|
587
|
+
process.env.SOURCE_DATABASE_URL = options.source;
|
|
588
|
+
}
|
|
589
|
+
if (options.target) {
|
|
590
|
+
process.env.DATABASE_URL = options.target;
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
if (!process.env.SOURCE_DATABASE_URL || !process.env.DATABASE_URL) {
|
|
594
|
+
console.error(
|
|
595
|
+
"Source and target database URLs are required. Use --source and --target or set SOURCE_DATABASE_URL and DATABASE_URL environment variables."
|
|
596
|
+
);
|
|
597
|
+
process.exit(1);
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
try {
|
|
601
|
+
console.log("Synchronizing enum types...");
|
|
602
|
+
const { EnumSynchronizer } = await import("./sync-enum-types");
|
|
603
|
+
const synchronizer = new EnumSynchronizer();
|
|
604
|
+
await synchronizer.synchronizeEnums();
|
|
605
|
+
console.log("Enum types synchronized successfully!");
|
|
606
|
+
} catch (error) {
|
|
607
|
+
console.error("Error synchronizing enum types:", error);
|
|
608
|
+
process.exit(1);
|
|
609
|
+
}
|
|
610
|
+
});
|
|
611
|
+
|
|
612
|
+
program
|
|
613
|
+
.command("post-validate")
|
|
614
|
+
.description("Validate database after migration")
|
|
615
|
+
.option("-s, --source <url>", "Source database connection URL")
|
|
616
|
+
.option("-t, --target <url>", "Target database connection URL")
|
|
617
|
+
.action(async (options) => {
|
|
618
|
+
if (options.source) {
|
|
619
|
+
process.env.SOURCE_DATABASE_URL = options.source;
|
|
620
|
+
}
|
|
621
|
+
if (options.target) {
|
|
622
|
+
process.env.DATABASE_URL = options.target;
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
if (!process.env.SOURCE_DATABASE_URL || !process.env.DATABASE_URL) {
|
|
626
|
+
console.error(
|
|
627
|
+
"Source and target database URLs are required. Use --source and --target or set SOURCE_DATABASE_URL and DATABASE_URL environment variables."
|
|
628
|
+
);
|
|
629
|
+
process.exit(1);
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
try {
|
|
633
|
+
console.log("Validating database after migration...");
|
|
634
|
+
const { PostMigrationValidator } = await import(
|
|
635
|
+
"./post-migration-validator"
|
|
636
|
+
);
|
|
637
|
+
const validator = new PostMigrationValidator();
|
|
638
|
+
const result = await validator.validate();
|
|
639
|
+
|
|
640
|
+
if (result.success) {
|
|
641
|
+
console.log("Post-migration validation successful!");
|
|
642
|
+
} else {
|
|
643
|
+
console.log(
|
|
644
|
+
`Post-migration validation found ${result.issueCount} issues.`
|
|
645
|
+
);
|
|
646
|
+
console.log(`Check the full report at: ${result.reportPath}`);
|
|
647
|
+
}
|
|
648
|
+
} catch (error) {
|
|
649
|
+
console.error("Error during post-migration validation:", error);
|
|
650
|
+
process.exit(1);
|
|
651
|
+
}
|
|
652
|
+
});
|
|
653
|
+
|
|
654
|
+
program
|
|
655
|
+
.command("verify")
|
|
656
|
+
.description("Verify migration setup and connections")
|
|
657
|
+
.action(async () => {
|
|
658
|
+
try {
|
|
659
|
+
const { MigrationSetupVerifier } = await import(
|
|
660
|
+
"./verify-migration-setup"
|
|
661
|
+
);
|
|
662
|
+
const verifier = new MigrationSetupVerifier();
|
|
663
|
+
await verifier.verifySetup();
|
|
664
|
+
} catch (error) {
|
|
665
|
+
console.error("Verification failed:", error);
|
|
666
|
+
process.exit(1);
|
|
667
|
+
}
|
|
668
|
+
});
|
|
669
|
+
|
|
670
|
+
program
|
|
671
|
+
.command("fix-schema")
|
|
672
|
+
.description("Fix schema discrepancies between source and target databases")
|
|
673
|
+
.option("-r, --report <path>", "Path to pre-migration report file")
|
|
674
|
+
.action(async (options) => {
|
|
675
|
+
try {
|
|
676
|
+
const fixer = new SchemaDiscrepancyFixer(
|
|
677
|
+
process.env.SOURCE_DATABASE_URL,
|
|
678
|
+
process.env.DATABASE_URL,
|
|
679
|
+
options.report
|
|
680
|
+
);
|
|
681
|
+
|
|
682
|
+
await fixer.fixDiscrepancies();
|
|
683
|
+
console.log("Schema discrepancies fixed successfully");
|
|
684
|
+
} catch (error) {
|
|
685
|
+
console.error("Error fixing schema discrepancies:", error.message);
|
|
686
|
+
process.exit(1);
|
|
687
|
+
}
|
|
688
|
+
});
|
|
689
|
+
|
|
690
|
+
// Remove the unused run() function or call it if needed
|
|
691
|
+
program.parse();
|