forge-sql-orm 1.0.31 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/README.md +216 -695
  2. package/dist/ForgeSQLORM.js +526 -564
  3. package/dist/ForgeSQLORM.js.map +1 -1
  4. package/dist/ForgeSQLORM.mjs +527 -554
  5. package/dist/ForgeSQLORM.mjs.map +1 -1
  6. package/dist/core/ForgeSQLCrudOperations.d.ts +101 -130
  7. package/dist/core/ForgeSQLCrudOperations.d.ts.map +1 -1
  8. package/dist/core/ForgeSQLORM.d.ts +11 -10
  9. package/dist/core/ForgeSQLORM.d.ts.map +1 -1
  10. package/dist/core/ForgeSQLQueryBuilder.d.ts +275 -111
  11. package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
  12. package/dist/core/ForgeSQLSelectOperations.d.ts +65 -22
  13. package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
  14. package/dist/core/SystemTables.d.ts +59 -0
  15. package/dist/core/SystemTables.d.ts.map +1 -0
  16. package/dist/index.d.ts +1 -2
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/utils/sqlUtils.d.ts +53 -6
  19. package/dist/utils/sqlUtils.d.ts.map +1 -1
  20. package/dist-cli/cli.js +461 -397
  21. package/dist-cli/cli.js.map +1 -1
  22. package/dist-cli/cli.mjs +461 -397
  23. package/dist-cli/cli.mjs.map +1 -1
  24. package/package.json +21 -27
  25. package/src/core/ForgeSQLCrudOperations.ts +360 -473
  26. package/src/core/ForgeSQLORM.ts +38 -79
  27. package/src/core/ForgeSQLQueryBuilder.ts +255 -132
  28. package/src/core/ForgeSQLSelectOperations.ts +185 -72
  29. package/src/core/SystemTables.ts +7 -0
  30. package/src/index.ts +1 -2
  31. package/src/utils/sqlUtils.ts +164 -22
  32. package/dist/core/ComplexQuerySchemaBuilder.d.ts +0 -38
  33. package/dist/core/ComplexQuerySchemaBuilder.d.ts.map +0 -1
  34. package/dist/knex/index.d.ts +0 -4
  35. package/dist/knex/index.d.ts.map +0 -1
  36. package/src/core/ComplexQuerySchemaBuilder.ts +0 -63
  37. package/src/knex/index.ts +0 -4
package/dist-cli/cli.js CHANGED
@@ -6,92 +6,134 @@ const inquirer = require("inquirer");
6
6
  const fs = require("fs");
7
7
  const path = require("path");
8
8
  require("reflect-metadata");
9
- const mysql = require("@mikro-orm/mysql");
10
- const entityGenerator = require("@mikro-orm/entity-generator");
11
- const regenerateIndexFile = (outputPath) => {
12
- const entitiesDir = path.resolve(outputPath);
13
- const indexPath = path.join(entitiesDir, "index.ts");
14
- const entityFiles = fs.readdirSync(entitiesDir).filter((file) => file.endsWith(".ts") && file !== "index.ts");
15
- const imports = entityFiles.map((file) => {
16
- const entityName = path.basename(file, ".ts");
17
- return `import { ${entityName} } from "./${entityName}";`;
18
- });
19
- const indexContent = `${imports.join("\n")}
9
+ const child_process = require("child_process");
10
+ const mysql = require("mysql2/promise");
11
+ require("moment");
12
+ const uniqueConstraint = require("drizzle-orm/mysql-core/unique-constraint");
13
+ function replaceMySQLTypes(schemaContent) {
14
+ const imports = `import { mySqlDateTimeString, mySqlTimeString, mySqlDateString, mySqlTimestampString } from "forge-sql-orm";
20
15
 
21
- export default [${entityFiles.map((file) => path.basename(file, ".ts")).join(", ")}];
22
16
  `;
23
- fs.writeFileSync(indexPath, indexContent, "utf8");
24
- console.log(`✅ Updated index.ts with ${entityFiles.length} entities.`);
25
- };
17
+ let modifiedContent = schemaContent.replace(/datetime\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(['"]([^'"]+)['"]\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlDateTimeString()").replace(/time\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlTimeString('$1')").replace(/time\(['"]([^'"]+)['"]\)/g, "mySqlTimeString('$1')").replace(/time\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlTimeString()").replace(/date\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlDateString('$1')").replace(/date\(['"]([^'"]+)['"]\)/g, "mySqlDateString('$1')").replace(/date\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlDateString()").replace(/timestamp\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(['"]([^'"]+)['"]\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlTimestampString()");
18
+ if (!modifiedContent.includes("import { mySqlDateTimeString")) {
19
+ modifiedContent = imports + modifiedContent;
20
+ }
21
+ return modifiedContent;
22
+ }
26
23
  const generateModels = async (options) => {
27
24
  try {
28
- const ormConfig = mysql.defineConfig({
29
- host: options.host,
30
- port: options.port,
31
- user: options.user,
32
- password: options.password,
33
- dbName: options.dbName,
34
- namingStrategy: mysql.MongoNamingStrategy,
35
- discovery: { warnWhenNoEntities: false },
36
- extensions: [entityGenerator.EntityGenerator],
37
- debug: true
38
- });
39
- const orm = mysql.MikroORM.initSync(ormConfig);
40
- console.log(`✅ Connected to ${options.dbName} at ${options.host}:${options.port}`);
41
- const onCreatingVersionField = async (metadatas) => {
42
- metadatas.forEach((m) => {
43
- if (options.versionField) {
44
- const versionFieldName = Object.keys(m.properties).find((p) => {
45
- return p === options.versionField || m.properties[p]?.name === options.versionField || m.properties[p]?.fieldNames?.find((f) => f === options.versionField);
46
- });
47
- if (versionFieldName) {
48
- const property = m.properties[versionFieldName];
49
- if (property.type !== "datetime" && property.type !== "integer" && property.type !== "decimal") {
25
+ const sql = await child_process.execSync(
26
+ `npx drizzle-kit pull --dialect mysql --url mysql://${options.user}:${options.password}@${options.host}:${options.port}/${options.dbName} --out ${options.output}`,
27
+ { encoding: "utf-8" }
28
+ );
29
+ const metaDir = path.join(options.output, "meta");
30
+ const additionalMetadata = {};
31
+ if (fs.existsSync(metaDir)) {
32
+ const snapshotFile = path.join(metaDir, "0000_snapshot.json");
33
+ if (fs.existsSync(snapshotFile)) {
34
+ const snapshotData = JSON.parse(fs.readFileSync(snapshotFile, "utf-8"));
35
+ for (const [tableName, tableData] of Object.entries(snapshotData.tables)) {
36
+ const table = tableData;
37
+ const versionField = Object.entries(table.columns).find(
38
+ ([_, col]) => col.name.toLowerCase() === options.versionField
39
+ );
40
+ if (versionField) {
41
+ const [_, col] = versionField;
42
+ const fieldType = col.type;
43
+ const isSupportedType = fieldType === "datetime" || fieldType === "timestamp" || fieldType === "int" || fieldType === "number" || fieldType === "decimal";
44
+ if (!col.notNull) {
45
+ console.warn(`Version field "${col.name}" in table ${tableName} is nullable. Versioning may not work correctly.`);
46
+ } else if (!isSupportedType) {
50
47
  console.warn(
51
- `Version field "${property.name}" can be only datetime or integer Table ${m.tableName} but now is "${property.type}"`
48
+ `Version field "${col.name}" in table ${tableName} has unsupported type "${fieldType}". Only datetime, timestamp, int, and decimal types are supported for versioning. Versioning will be skipped.`
52
49
  );
53
- return;
50
+ } else {
51
+ additionalMetadata[tableName] = {
52
+ tableName,
53
+ versionField: {
54
+ fieldName: col.name
55
+ }
56
+ };
54
57
  }
55
- if (property.primary) {
56
- console.warn(
57
- `Version field "${property.name}" can not be primary key Table ${m.tableName}`
58
- );
59
- return;
60
- }
61
- if (property.nullable) {
62
- console.warn(
63
- `Version field "${property.name}" should not be nullable Table ${m.tableName}`
64
- );
65
- return;
66
- }
67
- property.version = true;
68
58
  }
69
59
  }
70
- });
71
- };
72
- await orm.entityGenerator.generate({
73
- entitySchema: true,
74
- bidirectionalRelations: true,
75
- identifiedReferences: false,
76
- forceUndefined: true,
77
- undefinedDefaults: true,
78
- useCoreBaseEntity: false,
79
- onlyPurePivotTables: false,
80
- outputPurePivotTables: false,
81
- scalarPropertiesForRelations: "always",
82
- save: true,
83
- path: options.output,
84
- onInitialMetadata: onCreatingVersionField
85
- });
86
- regenerateIndexFile(options.output);
87
- console.log(`✅ Entities generated at: ${options.output}`);
60
+ }
61
+ }
62
+ const versionMetadataContent = `/**
63
+ * This file was auto-generated by forge-sql-orm
64
+ * Generated at: ${(/* @__PURE__ */ new Date()).toISOString()}
65
+ *
66
+ * DO NOT EDIT THIS FILE MANUALLY
67
+ * Any changes will be overwritten on next generation
68
+ */
69
+
70
+
71
+ export * from "./relations";
72
+ export * from "./schema";
73
+
74
+ export interface VersionFieldMetadata {
75
+ fieldName: string;
76
+ }
77
+
78
+ export interface TableMetadata {
79
+ tableName: string;
80
+ versionField: VersionFieldMetadata;
81
+ }
82
+
83
+ export type AdditionalMetadata = Record<string, TableMetadata>;
84
+
85
+ export const additionalMetadata: AdditionalMetadata = ${JSON.stringify(additionalMetadata, null, 2)};
86
+ `;
87
+ fs.writeFileSync(path.join(options.output, "index.ts"), versionMetadataContent);
88
+ const schemaPath = path.join(options.output, "schema.ts");
89
+ if (fs.existsSync(schemaPath)) {
90
+ const schemaContent = fs.readFileSync(schemaPath, "utf-8");
91
+ const modifiedContent = replaceMySQLTypes(schemaContent);
92
+ fs.writeFileSync(schemaPath, modifiedContent);
93
+ console.log(`✅ Updated schema types in: ${schemaPath}`);
94
+ }
95
+ const migrationDir = path.join(options.output, "migrations");
96
+ if (fs.existsSync(migrationDir)) {
97
+ fs.rmSync(migrationDir, { recursive: true, force: true });
98
+ console.log(`✅ Removed: ${migrationDir}`);
99
+ }
100
+ if (fs.existsSync(metaDir)) {
101
+ const journalFile = path.join(metaDir, "_journal.json");
102
+ if (fs.existsSync(journalFile)) {
103
+ const journalData = JSON.parse(fs.readFileSync(journalFile, "utf-8"));
104
+ for (const entry of journalData.entries) {
105
+ const sqlFile = path.join(options.output, `${entry.tag}.sql`);
106
+ if (fs.existsSync(sqlFile)) {
107
+ fs.rmSync(sqlFile, { force: true });
108
+ console.log(`✅ Removed SQL file: ${entry.tag}.sql`);
109
+ }
110
+ }
111
+ }
112
+ fs.rmSync(metaDir, { recursive: true, force: true });
113
+ console.log(`✅ Removed: ${metaDir}`);
114
+ }
115
+ console.log(`✅ Successfully generated models and version metadata`);
88
116
  process.exit(0);
89
117
  } catch (error) {
90
- console.error(`❌ Error generating entities:`, error);
118
+ console.error(`❌ Error during model generation:`, error);
119
+ process.exit(1);
120
+ }
121
+ };
122
+ const loadMigrationVersion$1 = async (migrationPath) => {
123
+ try {
124
+ const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
125
+ if (!fs.existsSync(migrationCountFilePath)) {
126
+ return 0;
127
+ }
128
+ const { MIGRATION_VERSION } = await import(migrationCountFilePath);
129
+ console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
130
+ return MIGRATION_VERSION;
131
+ } catch (error) {
132
+ console.error(`❌ Error loading migrationCount:`, error);
91
133
  process.exit(1);
92
134
  }
93
135
  };
94
- function cleanSQLStatement$2(sql) {
136
+ function cleanSQLStatement(sql) {
95
137
  sql = sql.replace(/create\s+table\s+(\w+)/gi, "create table if not exists $1");
96
138
  sql = sql.replace(/create\s+index\s+(\w+)/gi, "create index if not exists $1");
97
139
  sql = sql.replace(/alter\s+table\s+(\w+)\s+add\s+index\s+(\w+)/gi, "alter table $1 add index if not exists $2");
@@ -101,7 +143,7 @@ function cleanSQLStatement$2(sql) {
101
143
  function generateMigrationFile$2(createStatements, version) {
102
144
  const versionPrefix = `v${version}_MIGRATION`;
103
145
  const migrationLines = createStatements.map(
104
- (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement$2(stmt)}")`
146
+ (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement(stmt).replace(/\s+/g, " ")}")`
105
147
  // eslint-disable-line no-useless-escape
106
148
  ).join("\n");
107
149
  return `import { MigrationRunner } from "@forge/sql/out/migration";
@@ -143,41 +185,12 @@ export default async (
143
185
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
144
186
  console.log(`✅ Migration index file created: ${indexFilePath}`);
145
187
  }
146
- const extractCreateStatements$1 = (schema) => {
147
- const statements = schema.split(";").map((s) => s.trim());
188
+ const extractCreateStatements = (schema) => {
189
+ const statements = schema.split(/--> statement-breakpoint|;/).map((s) => s.trim()).filter((s) => s.length > 0);
148
190
  return statements.filter(
149
- (stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table") && (stmt.includes("add index") || stmt.includes("add constraint")) || stmt.startsWith("primary")
191
+ (stmt) => stmt.toLowerCase().startsWith("create table") || stmt.toLowerCase().startsWith("alter table") || stmt.toLowerCase().includes("add index") || stmt.toLowerCase().includes("add unique index") || stmt.toLowerCase().includes("add constraint")
150
192
  );
151
193
  };
152
- const loadEntities$2 = async (entitiesPath) => {
153
- try {
154
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
155
- if (!fs.existsSync(indexFilePath)) {
156
- console.error(`❌ Error: index.ts not found in ${indexFilePath}`);
157
- process.exit(1);
158
- }
159
- const { default: entities } = await import(indexFilePath);
160
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
161
- return entities;
162
- } catch (error) {
163
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
164
- process.exit(1);
165
- }
166
- };
167
- const loadMigrationVersion$1 = async (migrationPath) => {
168
- try {
169
- const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
170
- if (!fs.existsSync(migrationCountFilePath)) {
171
- return 0;
172
- }
173
- const { MIGRATION_VERSION } = await import(migrationCountFilePath);
174
- console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
175
- return MIGRATION_VERSION;
176
- } catch (error) {
177
- console.error(`❌ Error loading migrationCount:`, error);
178
- process.exit(1);
179
- }
180
- };
181
194
  const createMigration = async (options) => {
182
195
  try {
183
196
  let version = await loadMigrationVersion$1(options.output);
@@ -190,19 +203,20 @@ const createMigration = async (options) => {
190
203
  }
191
204
  }
192
205
  version = 1;
193
- const entities = await loadEntities$2(options.entitiesPath);
194
- const orm = mysql.MikroORM.initSync({
195
- host: options.host,
196
- port: options.port,
197
- user: options.user,
198
- password: options.password,
199
- dbName: options.dbName,
200
- entities
201
- });
202
- const createSchemaSQL = await orm.schema.getCreateSchemaSQL({ wrap: true });
203
- const statements = extractCreateStatements$1(createSchemaSQL);
204
- const migrationFile = generateMigrationFile$2(statements, version);
205
- saveMigrationFiles$2(migrationFile, version, options.output);
206
+ await child_process.execSync(
207
+ `npx drizzle-kit generate --name=init --dialect mysql --out ${options.output} --schema ${options.entitiesPath}`,
208
+ { encoding: "utf-8" }
209
+ );
210
+ const initSqlFile = path.join(options.output, "0000_init.sql");
211
+ const sql = fs.readFileSync(initSqlFile, "utf-8");
212
+ const createStatements = extractCreateStatements(sql);
213
+ const migrationFile = generateMigrationFile$2(createStatements, 1);
214
+ saveMigrationFiles$2(migrationFile, 1, options.output);
215
+ fs.rmSync(initSqlFile, { force: true });
216
+ console.log(`✅ Removed SQL file: ${initSqlFile}`);
217
+ let metaDir = path.join(options.output, "meta");
218
+ fs.rmSync(metaDir, { recursive: true, force: true });
219
+ console.log(`✅ Removed: ${metaDir}`);
206
220
  console.log(`✅ Migration successfully created!`);
207
221
  process.exit(0);
208
222
  } catch (error) {
@@ -210,13 +224,58 @@ const createMigration = async (options) => {
210
224
  process.exit(1);
211
225
  }
212
226
  };
213
- function cleanSQLStatement$1(sql) {
214
- return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim();
227
+ function getTableMetadata(table) {
228
+ const symbols = Object.getOwnPropertySymbols(table);
229
+ const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
230
+ const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
231
+ const extraSymbol = symbols.find((s) => s.toString().includes("ExtraConfigBuilder"));
232
+ const builders = {
233
+ indexes: [],
234
+ checks: [],
235
+ foreignKeys: [],
236
+ primaryKeys: [],
237
+ uniqueConstraints: [],
238
+ extras: []
239
+ };
240
+ if (extraSymbol) {
241
+ const extraConfigBuilder = table[extraSymbol];
242
+ if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
243
+ const configBuilderData = extraConfigBuilder(table);
244
+ if (configBuilderData) {
245
+ const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
246
+ (item) => item.value || item
247
+ );
248
+ configBuilders.forEach((builder) => {
249
+ if (!builder?.constructor) return;
250
+ const builderName = builder.constructor.name.toLowerCase();
251
+ const builderMap = {
252
+ indexbuilder: builders.indexes,
253
+ checkbuilder: builders.checks,
254
+ foreignkeybuilder: builders.foreignKeys,
255
+ primarykeybuilder: builders.primaryKeys,
256
+ uniqueconstraintbuilder: builders.uniqueConstraints
257
+ };
258
+ for (const [type, array] of Object.entries(builderMap)) {
259
+ if (builderName.includes(type)) {
260
+ array.push(builder);
261
+ break;
262
+ }
263
+ }
264
+ builders.extras.push(builder);
265
+ });
266
+ }
267
+ }
268
+ }
269
+ return {
270
+ tableName: nameSymbol ? table[nameSymbol] : "",
271
+ columns: columnsSymbol ? table[columnsSymbol] : {},
272
+ ...builders
273
+ };
215
274
  }
216
275
  function generateMigrationFile$1(createStatements, version) {
217
276
  const versionPrefix = `v${version}_MIGRATION`;
218
277
  const migrationLines = createStatements.map(
219
- (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement$1(stmt)}")`
278
+ (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${stmt}")`
220
279
  // eslint-disable-line no-useless-escape
221
280
  ).join("\n");
222
281
  return `import { MigrationRunner } from "@forge/sql/out/migration";
@@ -226,6 +285,18 @@ export default (migrationRunner: MigrationRunner): MigrationRunner => {
226
285
  ${migrationLines};
227
286
  };`;
228
287
  }
288
+ function filterWithPreviousMigration(newStatements, prevVersion, outputDir) {
289
+ const prevMigrationPath = path.join(outputDir, `migrationV${prevVersion}.ts`);
290
+ if (!fs.existsSync(prevMigrationPath)) {
291
+ return newStatements.map((s) => s.replace(/\s+/g, " "));
292
+ }
293
+ const prevContent = fs.readFileSync(prevMigrationPath, "utf-8");
294
+ const prevStatements = prevContent.split("\n").filter((line) => line.includes(".enqueue(")).map((line) => {
295
+ const match = line.match(/\.enqueue\([^,]+,\s*"([^"]+)"/);
296
+ return match ? match[1].replace(/\s+/g, " ").trim() : "";
297
+ });
298
+ return newStatements.filter((s) => !prevStatements.includes(s.replace(/\s+/g, " "))).map((s) => s.replace(/\s+/g, " "));
299
+ }
229
300
  function saveMigrationFiles$1(migrationCode, version, outputDir) {
230
301
  if (!fs.existsSync(outputDir)) {
231
302
  fs.mkdirSync(outputDir, { recursive: true });
@@ -257,28 +328,8 @@ export default async (
257
328
  console.log(`✅ Migration file created: ${migrationFilePath}`);
258
329
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
259
330
  console.log(`✅ Migration index file created: ${indexFilePath}`);
331
+ return true;
260
332
  }
261
- const extractCreateStatements = (schema) => {
262
- const statements = schema.split(";").map((s) => s.trim());
263
- return statements.filter(
264
- (stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table") && stmt.includes("add index") || stmt.startsWith("alter table") && stmt.includes("add") && !stmt.includes("foreign") || stmt.startsWith("alter table") && stmt.includes("modify") && !stmt.includes("foreign")
265
- );
266
- };
267
- const loadEntities$1 = async (entitiesPath) => {
268
- try {
269
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
270
- if (!fs.existsSync(indexFilePath)) {
271
- console.error(`❌ Error: index.ts not found in ${entitiesPath}`);
272
- process.exit(1);
273
- }
274
- const { default: entities } = await import(indexFilePath);
275
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
276
- return entities;
277
- } catch (error) {
278
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
279
- process.exit(1);
280
- }
281
- };
282
333
  const loadMigrationVersion = async (migrationPath) => {
283
334
  try {
284
335
  const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
@@ -296,9 +347,183 @@ const loadMigrationVersion = async (migrationPath) => {
296
347
  process.exit(1);
297
348
  }
298
349
  };
350
+ async function getDatabaseSchema(connection, dbName) {
351
+ const [columns] = await connection.execute(`
352
+ SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_KEY, EXTRA
353
+ FROM INFORMATION_SCHEMA.COLUMNS
354
+ WHERE TABLE_SCHEMA = ?
355
+ `, [dbName]);
356
+ const [indexes] = await connection.execute(`
357
+ SELECT TABLE_NAME, INDEX_NAME, COLUMN_NAME, NON_UNIQUE
358
+ FROM INFORMATION_SCHEMA.STATISTICS
359
+ WHERE TABLE_SCHEMA = ?
360
+ ORDER BY TABLE_NAME, INDEX_NAME, SEQ_IN_INDEX
361
+ `, [dbName]);
362
+ const [foreignKeys] = await connection.execute(`
363
+ SELECT
364
+ TABLE_NAME,
365
+ COLUMN_NAME,
366
+ CONSTRAINT_NAME,
367
+ REFERENCED_TABLE_NAME,
368
+ REFERENCED_COLUMN_NAME
369
+ FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
370
+ WHERE TABLE_SCHEMA = ?
371
+ AND REFERENCED_TABLE_NAME IS NOT NULL
372
+ `, [dbName]);
373
+ const schema = {};
374
+ columns.forEach((row) => {
375
+ if (!schema[row.TABLE_NAME]) {
376
+ schema[row.TABLE_NAME] = {
377
+ columns: {},
378
+ indexes: {},
379
+ foreignKeys: {}
380
+ };
381
+ }
382
+ schema[row.TABLE_NAME].columns[row.COLUMN_NAME] = row;
383
+ });
384
+ indexes.forEach((row) => {
385
+ if (!schema[row.TABLE_NAME].indexes[row.INDEX_NAME]) {
386
+ schema[row.TABLE_NAME].indexes[row.INDEX_NAME] = {
387
+ columns: [],
388
+ unique: !row.NON_UNIQUE
389
+ };
390
+ }
391
+ schema[row.TABLE_NAME].indexes[row.INDEX_NAME].columns.push(row.COLUMN_NAME);
392
+ });
393
+ foreignKeys.forEach((row) => {
394
+ if (!schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME]) {
395
+ schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME] = {
396
+ column: row.COLUMN_NAME,
397
+ referencedTable: row.REFERENCED_TABLE_NAME,
398
+ referencedColumn: row.REFERENCED_COLUMN_NAME
399
+ };
400
+ }
401
+ });
402
+ return schema;
403
+ }
404
+ function normalizeMySQLType(mysqlType) {
405
+ let normalized = mysqlType.replace(/\([^)]*\)/, "").toLowerCase();
406
+ normalized = normalized.replace(/^mysql/, "");
407
+ return normalized;
408
+ }
409
+ function getForeignKeyName(fk) {
410
+ return fk.getName();
411
+ }
412
+ function getIndexName(index) {
413
+ return index.name;
414
+ }
415
+ function getUniqueConstraintName(uc) {
416
+ return uc.name;
417
+ }
418
+ function getIndexColumns(index) {
419
+ return index.columns.map((col) => col.name);
420
+ }
421
+ function generateSchemaChanges(drizzleSchema, dbSchema, schemaModule) {
422
+ const changes = [];
423
+ for (const [tableName, dbTable] of Object.entries(dbSchema)) {
424
+ const drizzleColumns = drizzleSchema[tableName];
425
+ if (!drizzleColumns) {
426
+ const columns = Object.entries(dbTable.columns).map(([colName, col]) => {
427
+ const type = col.COLUMN_TYPE;
428
+ const nullable = col.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
429
+ const autoIncrement = col.EXTRA.includes("auto_increment") ? "AUTO_INCREMENT" : "";
430
+ return `\`${colName}\` ${type} ${nullable} ${autoIncrement}`.trim();
431
+ }).join(",\n ");
432
+ changes.push(`CREATE TABLE if not exists \`${tableName}\` (
433
+ ${columns}
434
+ );`);
435
+ for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
436
+ if (indexName === "PRIMARY") {
437
+ continue;
438
+ }
439
+ const isForeignKeyIndex = dbIndex.columns.some((colName) => {
440
+ const column = dbTable.columns[colName];
441
+ return column && column.COLUMN_KEY === "MUL" && column.EXTRA.includes("foreign key");
442
+ });
443
+ if (isForeignKeyIndex) {
444
+ continue;
445
+ }
446
+ const columns2 = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
447
+ const unique = dbIndex.unique ? "UNIQUE " : "";
448
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns2});`);
449
+ }
450
+ for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
451
+ changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
452
+ }
453
+ continue;
454
+ }
455
+ for (const [colName, dbCol] of Object.entries(dbTable.columns)) {
456
+ const drizzleCol = Object.values(drizzleColumns).find((c) => c.name === colName);
457
+ if (!drizzleCol) {
458
+ const type = dbCol.COLUMN_TYPE;
459
+ const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
460
+ changes.push(`ALTER TABLE \`${tableName}\` ADD COLUMN \`${colName}\` ${type} ${nullable};`);
461
+ continue;
462
+ }
463
+ const normalizedDbType = normalizeMySQLType(dbCol.COLUMN_TYPE);
464
+ const normalizedDrizzleType = normalizeMySQLType(drizzleCol.getSQLType());
465
+ if (normalizedDbType !== normalizedDrizzleType) {
466
+ const type = dbCol.COLUMN_TYPE;
467
+ const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
468
+ changes.push(`ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${colName}\` ${type} ${nullable};`);
469
+ }
470
+ }
471
+ const table = Object.values(schemaModule).find((t) => {
472
+ const metadata = getTableMetadata(t);
473
+ return metadata.tableName === tableName;
474
+ });
475
+ if (table) {
476
+ const metadata = getTableMetadata(table);
477
+ for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
478
+ if (indexName === "PRIMARY") {
479
+ continue;
480
+ }
481
+ const isForeignKeyIndex = metadata.foreignKeys.some((fk) => getForeignKeyName(fk) === indexName);
482
+ if (isForeignKeyIndex) {
483
+ continue;
484
+ }
485
+ const existsUniqIndex = metadata.uniqueConstraints.find((uc) => getUniqueConstraintName(uc) === indexName);
486
+ let drizzleIndex = metadata.indexes.find((i) => getIndexName(i) === indexName);
487
+ if (!drizzleIndex && existsUniqIndex) {
488
+ drizzleIndex = existsUniqIndex;
489
+ }
490
+ if (!drizzleIndex) {
491
+ const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
492
+ const unique = dbIndex.unique ? "UNIQUE " : "";
493
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
494
+ continue;
495
+ }
496
+ const dbColumns = dbIndex.columns.join(", ");
497
+ const drizzleColumns2 = getIndexColumns(drizzleIndex).join(", ");
498
+ if (dbColumns !== drizzleColumns2 || dbIndex.unique !== drizzleIndex instanceof uniqueConstraint.UniqueConstraintBuilder) {
499
+ changes.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
500
+ const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
501
+ const unique = dbIndex.unique ? "UNIQUE " : "";
502
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
503
+ }
504
+ }
505
+ for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
506
+ const drizzleFK = metadata.foreignKeys.find((fk) => getForeignKeyName(fk) === fkName);
507
+ if (!drizzleFK) {
508
+ changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
509
+ continue;
510
+ }
511
+ }
512
+ for (const drizzleForeignKey of metadata.foreignKeys) {
513
+ const isDbFk = Object.keys(dbTable.foreignKeys).find((fk) => fk === getForeignKeyName(drizzleForeignKey));
514
+ if (!isDbFk) {
515
+ const fkName = getForeignKeyName(drizzleForeignKey);
516
+ changes.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
517
+ }
518
+ }
519
+ }
520
+ }
521
+ return changes;
522
+ }
299
523
  const updateMigration = async (options) => {
300
524
  try {
301
525
  let version = await loadMigrationVersion(options.output);
526
+ const prevVersion = version;
302
527
  if (version < 1) {
303
528
  console.log(
304
529
  `⚠️ Initial migration not found. Run "npx forge-sql-orm migrations:create" first.`
@@ -306,247 +531,81 @@ const updateMigration = async (options) => {
306
531
  process.exit(0);
307
532
  }
308
533
  version += 1;
309
- const entities = await loadEntities$1(options.entitiesPath);
310
- const orm = mysql.MikroORM.initSync({
534
+ const connection = await mysql.createConnection({
311
535
  host: options.host,
312
536
  port: options.port,
313
537
  user: options.user,
314
538
  password: options.password,
315
- dbName: options.dbName,
316
- entities,
317
- debug: true
539
+ database: options.dbName
318
540
  });
319
- const createSchemaSQL = await orm.schema.getUpdateSchemaMigrationSQL({ wrap: true });
320
- const statements = extractCreateStatements(createSchemaSQL?.down || "");
321
- if (statements.length) {
322
- const migrationFile = generateMigrationFile$1(statements, version);
323
- saveMigrationFiles$1(migrationFile, version, options.output);
324
- console.log(`✅ Migration successfully updated!`);
325
- process.exit(0);
326
- } else {
327
- console.log(`⚠️ No new migration changes detected.`);
328
- process.exit(0);
329
- }
330
- } catch (error) {
331
- console.error(`❌ Error during migration update:`, error);
332
- process.exit(1);
333
- }
334
- };
335
- const PATCHES = [
336
- // 🗑️ Remove unused dialects (mssql, postgres, sqlite) in MikroORM
337
- {
338
- file: "node_modules/@mikro-orm/knex/MonkeyPatchable.d.ts",
339
- deleteLines: [
340
- /^.*mssql.*$/gim,
341
- /^.*MsSql.*$/gim,
342
- /^\s*Postgres.*$/gm,
343
- /^.*Sqlite3.*$/gm,
344
- /^.*BetterSqlite3.*$/gim
345
- ],
346
- description: "Removing unused dialects from MonkeyPatchable.d.ts"
347
- },
348
- {
349
- file: "node_modules/@mikro-orm/knex/MonkeyPatchable.js",
350
- deleteLines: [
351
- /^.*mssql.*$/gim,
352
- /^.*MsSql.*$/gim,
353
- /^.*postgres.*$/gim,
354
- /^.*sqlite.*$/gim,
355
- /^.*Sqlite.*$/gim
356
- ],
357
- description: "Removing unused dialects from MonkeyPatchable.js"
358
- },
359
- {
360
- file: "node_modules/@mikro-orm/knex/dialects/index.js",
361
- deleteLines: [/^.*mssql.*$/gim, /^.*MsSql.*$/gim, /^.*postgresql.*$/gim, /^.*sqlite.*$/gim],
362
- description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
363
- },
364
- {
365
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/mssql",
366
- description: "Removing mssql dialect from MikroORM"
367
- },
368
- {
369
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/postgresql",
370
- description: "Removing postgresql dialect from MikroORM"
371
- },
372
- {
373
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/sqlite",
374
- description: "Removing sqlite dialect from MikroORM"
375
- },
376
- {
377
- deleteFolder: "node_modules/@mikro-orm/mysql/node_modules",
378
- description: "Removing node_modules from @mikro-orm/mysql"
379
- },
380
- {
381
- deleteFolder: "node_modules/@mikro-orm/knex/node_modules",
382
- description: "Removing node_modules from @mikro-orm/knex"
383
- },
384
- {
385
- deleteFolder: "node_modules/@mikro-orm/core/node_modules",
386
- description: "Removing sqlite dialect from MikroORM"
387
- },
388
- // 🔄 Fix Webpack `Critical dependency: the request of a dependency is an expression`
389
- {
390
- file: "node_modules/@mikro-orm/core/utils/Configuration.js",
391
- search: /dynamicImportProvider:\s*\/\* istanbul ignore next \*\/\s*\(id\) => import\(id\),/g,
392
- replace: "dynamicImportProvider: /* istanbul ignore next */ () => Promise.resolve({}),",
393
- description: "Fixing dynamic imports in MikroORM Configuration"
394
- },
395
- {
396
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
397
- search: /static dynamicImportProvider = \(id\) => import\(id\);/g,
398
- replace: "static dynamicImportProvider = () => Promise.resolve({});",
399
- description: "Fixing dynamic imports in MikroORM Utils.js"
400
- },
401
- // 🛑 Remove deprecated `require.extensions` usage
402
- {
403
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
404
- search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
405
- replace: ";",
406
- description: "Removing deprecated `require.extensions` check in MikroORM"
407
- },
408
- // 🛠️ Patch Knex to remove `Migrator` and `Seeder`
409
- {
410
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
411
- deleteLines: [
412
- /^const \{ Migrator \} = require\('\.\.\/migrations\/migrate\/Migrator'\);$/gm,
413
- /^const Seeder = require\('\.\.\/migrations\/seed\/Seeder'\);$/gm
414
- ],
415
- description: "Removing `Migrator` and `Seeder` requires from make-knex.js"
416
- },
417
- {
418
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
419
- search: /\sreturn new Migrator\(this\);/g,
420
- replace: "return null;",
421
- description: "Replacing `return new Migrator(this);` with `return null;`"
422
- },
423
- {
424
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
425
- search: /\sreturn new Seeder\(this\);/g,
426
- replace: "return null;",
427
- description: "Replacing `return new Seeder(this);` with `return null;`"
428
- },
429
- {
430
- file: "node_modules/knex/lib/dialects/index.js",
431
- deleteLines: [
432
- /^.*mssql.*$/gim,
433
- /^.*MsSql.*$/gim,
434
- /^.*postgresql.*$/gim,
435
- /^.*sqlite.*$/gim,
436
- /^.*oracle.*$/gim,
437
- /^.*oracledb.*$/gim,
438
- /^.*pgnative.*$/gim,
439
- /^.*postgres.*$/gim,
440
- /^.*redshift.*$/gim,
441
- /^.*sqlite3.*$/gim,
442
- /^.*cockroachdb.*$/gim
443
- ],
444
- description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
445
- },
446
- {
447
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
448
- search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
449
- replace: ";",
450
- // Replaces with semicolon to keep syntax valid
451
- description: "Removing deprecated `require.extensions` check from MikroORM"
452
- },
453
- {
454
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
455
- search: /^.*extensions.*$/gim,
456
- replace: "{",
457
- // Replaces with semicolon to keep syntax valid
458
- description: "Removing deprecated `require.extensions` check from MikroORM"
459
- },
460
- {
461
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
462
- search: /^.*package.json.*$/gim,
463
- replace: "return 0;",
464
- // Replaces with semicolon to keep syntax valid
465
- description: "Removing deprecated `require.extensions` check from MikroORM"
466
- },
467
- {
468
- file: "node_modules/@mikro-orm/knex/dialects/mysql/index.js",
469
- deleteLines: [/^.*MariaDbKnexDialect.*$/gim],
470
- description: "Removing MariaDbKnexDialect"
471
- }
472
- ];
473
- function runPostInstallPatch() {
474
- console.log("🔧 Applying MikroORM & Knex patches...");
475
- PATCHES.forEach(
476
- ({ file, search, replace, deleteLines, deleteFile, deleteFolder, description }) => {
477
- if (file) {
478
- const filePath = path.resolve(file);
479
- if (fs.existsSync(filePath)) {
480
- let content = fs.readFileSync(filePath, "utf8");
481
- let originalContent = content;
482
- if (search && replace) {
483
- if (typeof search === "string" ? content.includes(search) : search.test(content)) {
484
- content = content.replace(search, replace);
485
- console.log(`[PATCHED] ${description}`);
486
- }
487
- }
488
- if (deleteLines) {
489
- deleteLines.forEach((pattern) => {
490
- content = content.split("\n").filter((line) => !pattern.test(line)).join("\n");
491
- });
492
- if (content !== originalContent) {
493
- console.log(`[CLEANED] Removed matching lines in ${file}`);
494
- }
495
- }
496
- if (content !== originalContent) {
497
- fs.writeFileSync(filePath, content, "utf8");
498
- }
499
- if (content.trim() === "") {
500
- fs.unlinkSync(filePath);
501
- console.log(`[REMOVED] ${filePath} (file is now empty)`);
502
- }
503
- } else {
504
- console.warn(`[WARNING] File not found: ${file}`);
505
- }
541
+ try {
542
+ const dbSchema = await getDatabaseSchema(connection, options.dbName);
543
+ const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
544
+ if (!fs.existsSync(schemaPath)) {
545
+ throw new Error(`Schema file not found at: ${schemaPath}`);
546
+ }
547
+ const schemaModule = await import(schemaPath);
548
+ if (!schemaModule) {
549
+ throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
506
550
  }
507
- if (deleteFile) {
508
- const deleteFilePath = path.resolve(deleteFile);
509
- if (fs.existsSync(deleteFilePath)) {
510
- fs.unlinkSync(deleteFilePath);
511
- console.log(`[DELETED] ${deleteFilePath} ${description}`);
512
- } else {
513
- console.log(`[SKIPPED] ${deleteFilePath} ${description}`);
551
+ const drizzleSchema = {};
552
+ const tables = Object.values(schemaModule);
553
+ tables.forEach((table) => {
554
+ const metadata = getTableMetadata(table);
555
+ if (metadata.tableName) {
556
+ const columns = {};
557
+ Object.entries(metadata.columns).forEach(([name, column]) => {
558
+ columns[name] = {
559
+ type: column.dataType,
560
+ notNull: column.notNull,
561
+ autoincrement: column.autoincrement,
562
+ columnType: column.columnType,
563
+ name: column.name,
564
+ getSQLType: () => column.getSQLType()
565
+ };
566
+ });
567
+ drizzleSchema[metadata.tableName] = columns;
514
568
  }
569
+ });
570
+ if (Object.keys(drizzleSchema).length === 0) {
571
+ throw new Error(`No valid tables found in schema at: ${schemaPath}`);
515
572
  }
516
- if (deleteFolder) {
517
- const deleteFolderPath = path.resolve(deleteFolder);
518
- if (fs.existsSync(deleteFolderPath)) {
519
- fs.rmSync(deleteFolderPath, { recursive: true, force: true });
520
- console.log(`[DELETED] ${deleteFolderPath} ${description}`);
521
- } else {
522
- console.log(`[SKIPPED] ${deleteFolderPath} ${description}`);
573
+ console.log("Found tables:", Object.keys(drizzleSchema));
574
+ const createStatements = filterWithPreviousMigration(generateSchemaChanges(drizzleSchema, dbSchema, schemaModule), prevVersion, options.output);
575
+ if (createStatements.length) {
576
+ const migrationFile = generateMigrationFile$1(createStatements, version);
577
+ if (saveMigrationFiles$1(migrationFile, version, options.output)) {
578
+ console.log(`✅ Migration successfully updated!`);
523
579
  }
580
+ process.exit(0);
581
+ } else {
582
+ console.log(`⚠️ No new migration changes detected.`);
583
+ process.exit(0);
524
584
  }
585
+ } finally {
586
+ await connection.end();
525
587
  }
526
- );
527
- console.log("🎉 MikroORM & Knex patching completed!");
528
- }
588
+ } catch (error) {
589
+ console.error(`❌ Error during migration update:`, error);
590
+ process.exit(1);
591
+ }
592
+ };
529
593
  function generateMigrationUUID(version) {
530
594
  const now = /* @__PURE__ */ new Date();
531
595
  const timestamp = now.getTime();
532
596
  return `MIGRATION_V${version}_${timestamp}`;
533
597
  }
534
- function cleanSQLStatement(sql) {
535
- return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim();
536
- }
537
598
  function generateMigrationFile(createStatements, version) {
538
599
  const uniqId = generateMigrationUUID(version);
539
600
  const migrationLines = createStatements.map(
540
- (stmt, index) => ` .enqueue("${uniqId}_${index}", "${cleanSQLStatement(stmt)}")`
601
+ (stmt, index) => ` .enqueue("${uniqId}_${index}", "${stmt}")`
541
602
  // eslint-disable-line no-useless-escape
542
603
  ).join("\n");
543
- const clearMigrationsLine = ` .enqueue("${uniqId}", "DELETE FROM __migrations")`;
544
604
  return `import { MigrationRunner } from "@forge/sql/out/migration";
545
605
 
546
606
  export default (migrationRunner: MigrationRunner): MigrationRunner => {
547
607
  return migrationRunner
548
- ${migrationLines}
549
- ${clearMigrationsLine};
608
+ ${migrationLines};
550
609
  };`;
551
610
  }
552
611
  function saveMigrationFiles(migrationCode, version, outputDir) {
@@ -581,42 +640,54 @@ export default async (
581
640
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
582
641
  console.log(`✅ Migration index file created: ${indexFilePath}`);
583
642
  }
584
- const extractDropStatements = (schema) => {
585
- const statements = schema.split(";").map((s) => s.trim());
586
- return statements.filter((s) => {
587
- return s.toLowerCase().startsWith("drop");
588
- });
589
- };
590
- const loadEntities = async (entitiesPath) => {
591
- try {
592
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
593
- if (!fs.existsSync(indexFilePath)) {
594
- console.error(`❌ Error: index.ts not found in ${indexFilePath}`);
595
- process.exit(1);
596
- }
597
- const { default: entities } = await import(indexFilePath);
598
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
599
- return entities;
600
- } catch (error) {
601
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
602
- process.exit(1);
603
- }
604
- };
605
643
  const dropMigration = async (options) => {
606
644
  try {
607
645
  const version = 1;
608
- const entities = await loadEntities(options.entitiesPath);
609
- const orm = mysql.MikroORM.initSync({
610
- host: options.host,
611
- port: options.port,
612
- user: options.user,
613
- password: options.password,
614
- dbName: options.dbName,
615
- entities
646
+ const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
647
+ if (!fs.existsSync(schemaPath)) {
648
+ throw new Error(`Schema file not found at: ${schemaPath}`);
649
+ }
650
+ const schemaModule = await import(schemaPath);
651
+ if (!schemaModule) {
652
+ throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
653
+ }
654
+ const drizzleSchema = {};
655
+ const tables = Object.values(schemaModule);
656
+ tables.forEach((table) => {
657
+ const symbols = Object.getOwnPropertySymbols(table);
658
+ const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
659
+ const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
660
+ const indexesSymbol = symbols.find((s) => s.toString().includes("Indexes"));
661
+ const foreignKeysSymbol = symbols.find((s) => s.toString().includes("ForeignKeys"));
662
+ if (table && nameSymbol && columnsSymbol) {
663
+ drizzleSchema[table[nameSymbol]] = {
664
+ // @ts-ignore
665
+ columns: table[columnsSymbol],
666
+ // @ts-ignore
667
+ indexes: indexesSymbol ? table[indexesSymbol] || {} : {},
668
+ // @ts-ignore
669
+ foreignKeys: foreignKeysSymbol ? table[foreignKeysSymbol] || {} : {}
670
+ };
671
+ }
616
672
  });
617
- const dropSchemaSQL = await orm.schema.getDropSchemaSQL({ wrap: true });
618
- const statements = extractDropStatements(dropSchemaSQL);
619
- const migrationFile = generateMigrationFile(statements, version);
673
+ if (Object.keys(drizzleSchema).length === 0) {
674
+ throw new Error(`No valid tables found in schema at: ${schemaPath}`);
675
+ }
676
+ console.log("Found tables:", Object.keys(drizzleSchema));
677
+ const dropStatements = [];
678
+ for (const [tableName, tableInfo] of Object.entries(drizzleSchema)) {
679
+ for (const fk of Object.values(tableInfo.foreignKeys)) {
680
+ const fkName = fk.getName();
681
+ dropStatements.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
682
+ }
683
+ for (const [indexName, index] of Object.entries(tableInfo.indexes)) {
684
+ if (indexName === "PRIMARY") continue;
685
+ dropStatements.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
686
+ }
687
+ dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
688
+ }
689
+ dropStatements.push(`DELETE FROM __migrations;`);
690
+ const migrationFile = generateMigrationFile(dropStatements, version);
620
691
  saveMigrationFiles(migrationFile, version, options.output);
621
692
  console.log(`✅ Migration successfully created!`);
622
693
  process.exit(0);
@@ -796,12 +867,5 @@ program.command("migrations:drop").description("Generate a migration to drop all
796
867
  );
797
868
  await dropMigration(config);
798
869
  });
799
- program.command("patch:mikroorm").description("Patch MikroORM and Knex dependencies to work properly with Forge").action(async () => {
800
- console.log("Running MikroORM patch...");
801
- await runPostInstallPatch();
802
- await runPostInstallPatch();
803
- await runPostInstallPatch();
804
- console.log("✅ MikroORM patch applied successfully!");
805
- });
806
870
  program.parse(process.argv);
807
871
  //# sourceMappingURL=cli.js.map