forge-sql-orm 1.0.31 → 1.1.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/README.md +216 -695
  2. package/dist/ForgeSQLORM.js +538 -567
  3. package/dist/ForgeSQLORM.js.map +1 -1
  4. package/dist/ForgeSQLORM.mjs +536 -554
  5. package/dist/ForgeSQLORM.mjs.map +1 -1
  6. package/dist/core/ForgeSQLCrudOperations.d.ts +101 -130
  7. package/dist/core/ForgeSQLCrudOperations.d.ts.map +1 -1
  8. package/dist/core/ForgeSQLORM.d.ts +11 -10
  9. package/dist/core/ForgeSQLORM.d.ts.map +1 -1
  10. package/dist/core/ForgeSQLQueryBuilder.d.ts +271 -113
  11. package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
  12. package/dist/core/ForgeSQLSelectOperations.d.ts +65 -22
  13. package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
  14. package/dist/core/SystemTables.d.ts +59 -0
  15. package/dist/core/SystemTables.d.ts.map +1 -0
  16. package/dist/index.d.ts +1 -2
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/utils/sqlUtils.d.ts +53 -6
  19. package/dist/utils/sqlUtils.d.ts.map +1 -1
  20. package/dist-cli/cli.js +471 -397
  21. package/dist-cli/cli.js.map +1 -1
  22. package/dist-cli/cli.mjs +471 -397
  23. package/dist-cli/cli.mjs.map +1 -1
  24. package/package.json +21 -22
  25. package/src/core/ForgeSQLCrudOperations.ts +360 -473
  26. package/src/core/ForgeSQLORM.ts +38 -79
  27. package/src/core/ForgeSQLQueryBuilder.ts +250 -133
  28. package/src/core/ForgeSQLSelectOperations.ts +182 -72
  29. package/src/core/SystemTables.ts +7 -0
  30. package/src/index.ts +1 -2
  31. package/src/utils/sqlUtils.ts +155 -23
  32. package/dist/core/ComplexQuerySchemaBuilder.d.ts +0 -38
  33. package/dist/core/ComplexQuerySchemaBuilder.d.ts.map +0 -1
  34. package/dist/knex/index.d.ts +0 -4
  35. package/dist/knex/index.d.ts.map +0 -1
  36. package/src/core/ComplexQuerySchemaBuilder.ts +0 -63
  37. package/src/knex/index.ts +0 -4
package/dist-cli/cli.mjs CHANGED
@@ -5,92 +5,138 @@ import inquirer from "inquirer";
5
5
  import fs from "fs";
6
6
  import path from "path";
7
7
  import "reflect-metadata";
8
- import { defineConfig, MongoNamingStrategy, MikroORM } from "@mikro-orm/mysql";
9
- import { EntityGenerator } from "@mikro-orm/entity-generator";
10
- const regenerateIndexFile = (outputPath) => {
11
- const entitiesDir = path.resolve(outputPath);
12
- const indexPath = path.join(entitiesDir, "index.ts");
13
- const entityFiles = fs.readdirSync(entitiesDir).filter((file) => file.endsWith(".ts") && file !== "index.ts");
14
- const imports = entityFiles.map((file) => {
15
- const entityName = path.basename(file, ".ts");
16
- return `import { ${entityName} } from "./${entityName}";`;
17
- });
18
- const indexContent = `${imports.join("\n")}
8
+ import { execSync } from "child_process";
9
+ import mysql from "mysql2/promise";
10
+ import "moment";
11
+ import { PrimaryKeyBuilder } from "drizzle-orm/mysql-core/primary-keys";
12
+ import { IndexBuilder } from "drizzle-orm/mysql-core/indexes";
13
+ import { CheckBuilder } from "drizzle-orm/mysql-core/checks";
14
+ import { ForeignKeyBuilder } from "drizzle-orm/mysql-core/foreign-keys";
15
+ import { UniqueConstraintBuilder } from "drizzle-orm/mysql-core/unique-constraint";
16
+ function replaceMySQLTypes(schemaContent) {
17
+ const imports = `import { mySqlDateTimeString, mySqlTimeString, mySqlDateString, mySqlTimestampString } from "forge-sql-orm";
19
18
 
20
- export default [${entityFiles.map((file) => path.basename(file, ".ts")).join(", ")}];
21
19
  `;
22
- fs.writeFileSync(indexPath, indexContent, "utf8");
23
- console.log(`✅ Updated index.ts with ${entityFiles.length} entities.`);
24
- };
20
+ let modifiedContent = schemaContent.replace(/datetime\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(['"]([^'"]+)['"]\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlDateTimeString()").replace(/datetime\(\)/g, "mySqlDateTimeString()").replace(/time\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlTimeString('$1')").replace(/time\(['"]([^'"]+)['"]\)/g, "mySqlTimeString('$1')").replace(/time\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlTimeString()").replace(/time\(\)/g, "mySqlTimeString()").replace(/date\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlDateString('$1')").replace(/date\(['"]([^'"]+)['"]\)/g, "mySqlDateString('$1')").replace(/date\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlDateString()").replace(/date\(\)/g, "mySqlDateString()").replace(/timestamp\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(['"]([^'"]+)['"]\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlTimestampString()").replace(/timestamp\(\)/g, "mySqlTimestampString()");
21
+ if (!modifiedContent.includes("import { mySqlDateTimeString")) {
22
+ modifiedContent = imports + modifiedContent;
23
+ }
24
+ return modifiedContent;
25
+ }
25
26
  const generateModels = async (options) => {
26
27
  try {
27
- const ormConfig = defineConfig({
28
- host: options.host,
29
- port: options.port,
30
- user: options.user,
31
- password: options.password,
32
- dbName: options.dbName,
33
- namingStrategy: MongoNamingStrategy,
34
- discovery: { warnWhenNoEntities: false },
35
- extensions: [EntityGenerator],
36
- debug: true
37
- });
38
- const orm = MikroORM.initSync(ormConfig);
39
- console.log(`✅ Connected to ${options.dbName} at ${options.host}:${options.port}`);
40
- const onCreatingVersionField = async (metadatas) => {
41
- metadatas.forEach((m) => {
42
- if (options.versionField) {
43
- const versionFieldName = Object.keys(m.properties).find((p) => {
44
- return p === options.versionField || m.properties[p]?.name === options.versionField || m.properties[p]?.fieldNames?.find((f) => f === options.versionField);
45
- });
46
- if (versionFieldName) {
47
- const property = m.properties[versionFieldName];
48
- if (property.type !== "datetime" && property.type !== "integer" && property.type !== "decimal") {
49
- console.warn(
50
- `Version field "${property.name}" can be only datetime or integer Table ${m.tableName} but now is "${property.type}"`
51
- );
52
- return;
53
- }
54
- if (property.primary) {
55
- console.warn(
56
- `Version field "${property.name}" can not be primary key Table ${m.tableName}`
57
- );
58
- return;
59
- }
60
- if (property.nullable) {
28
+ const sql = await execSync(
29
+ `npx drizzle-kit pull --dialect mysql --url mysql://${options.user}:${options.password}@${options.host}:${options.port}/${options.dbName} --out ${options.output}`,
30
+ { encoding: "utf-8" }
31
+ );
32
+ const metaDir = path.join(options.output, "meta");
33
+ const additionalMetadata = {};
34
+ if (fs.existsSync(metaDir)) {
35
+ const snapshotFile = path.join(metaDir, "0000_snapshot.json");
36
+ if (fs.existsSync(snapshotFile)) {
37
+ const snapshotData = JSON.parse(fs.readFileSync(snapshotFile, "utf-8"));
38
+ for (const [tableName, tableData] of Object.entries(snapshotData.tables)) {
39
+ const table = tableData;
40
+ const versionField = Object.entries(table.columns).find(
41
+ ([_, col]) => col.name.toLowerCase() === options.versionField
42
+ );
43
+ if (versionField) {
44
+ const [_, col] = versionField;
45
+ const fieldType = col.type;
46
+ const isSupportedType = fieldType === "datetime" || fieldType === "timestamp" || fieldType === "int" || fieldType === "number" || fieldType === "decimal";
47
+ if (!col.notNull) {
48
+ console.warn(`Version field "${col.name}" in table ${tableName} is nullable. Versioning may not work correctly.`);
49
+ } else if (!isSupportedType) {
61
50
  console.warn(
62
- `Version field "${property.name}" should not be nullable Table ${m.tableName}`
51
+ `Version field "${col.name}" in table ${tableName} has unsupported type "${fieldType}". Only datetime, timestamp, int, and decimal types are supported for versioning. Versioning will be skipped.`
63
52
  );
64
- return;
53
+ } else {
54
+ additionalMetadata[tableName] = {
55
+ tableName,
56
+ versionField: {
57
+ fieldName: col.name
58
+ }
59
+ };
65
60
  }
66
- property.version = true;
67
61
  }
68
62
  }
69
- });
70
- };
71
- await orm.entityGenerator.generate({
72
- entitySchema: true,
73
- bidirectionalRelations: true,
74
- identifiedReferences: false,
75
- forceUndefined: true,
76
- undefinedDefaults: true,
77
- useCoreBaseEntity: false,
78
- onlyPurePivotTables: false,
79
- outputPurePivotTables: false,
80
- scalarPropertiesForRelations: "always",
81
- save: true,
82
- path: options.output,
83
- onInitialMetadata: onCreatingVersionField
84
- });
85
- regenerateIndexFile(options.output);
86
- console.log(`✅ Entities generated at: ${options.output}`);
63
+ }
64
+ }
65
+ const versionMetadataContent = `/**
66
+ * This file was auto-generated by forge-sql-orm
67
+ * Generated at: ${(/* @__PURE__ */ new Date()).toISOString()}
68
+ *
69
+ * DO NOT EDIT THIS FILE MANUALLY
70
+ * Any changes will be overwritten on next generation
71
+ */
72
+
73
+
74
+ export * from "./relations";
75
+ export * from "./schema";
76
+
77
+ export interface VersionFieldMetadata {
78
+ fieldName: string;
79
+ }
80
+
81
+ export interface TableMetadata {
82
+ tableName: string;
83
+ versionField: VersionFieldMetadata;
84
+ }
85
+
86
+ export type AdditionalMetadata = Record<string, TableMetadata>;
87
+
88
+ export const additionalMetadata: AdditionalMetadata = ${JSON.stringify(additionalMetadata, null, 2)};
89
+ `;
90
+ fs.writeFileSync(path.join(options.output, "index.ts"), versionMetadataContent);
91
+ const schemaPath = path.join(options.output, "schema.ts");
92
+ if (fs.existsSync(schemaPath)) {
93
+ const schemaContent = fs.readFileSync(schemaPath, "utf-8");
94
+ const modifiedContent = replaceMySQLTypes(schemaContent);
95
+ fs.writeFileSync(schemaPath, modifiedContent);
96
+ console.log(`✅ Updated schema types in: ${schemaPath}`);
97
+ }
98
+ const migrationDir = path.join(options.output, "migrations");
99
+ if (fs.existsSync(migrationDir)) {
100
+ fs.rmSync(migrationDir, { recursive: true, force: true });
101
+ console.log(`✅ Removed: ${migrationDir}`);
102
+ }
103
+ if (fs.existsSync(metaDir)) {
104
+ const journalFile = path.join(metaDir, "_journal.json");
105
+ if (fs.existsSync(journalFile)) {
106
+ const journalData = JSON.parse(fs.readFileSync(journalFile, "utf-8"));
107
+ for (const entry of journalData.entries) {
108
+ const sqlFile = path.join(options.output, `${entry.tag}.sql`);
109
+ if (fs.existsSync(sqlFile)) {
110
+ fs.rmSync(sqlFile, { force: true });
111
+ console.log(`✅ Removed SQL file: ${entry.tag}.sql`);
112
+ }
113
+ }
114
+ }
115
+ fs.rmSync(metaDir, { recursive: true, force: true });
116
+ console.log(`✅ Removed: ${metaDir}`);
117
+ }
118
+ console.log(`✅ Successfully generated models and version metadata`);
87
119
  process.exit(0);
88
120
  } catch (error) {
89
- console.error(`❌ Error generating entities:`, error);
121
+ console.error(`❌ Error during model generation:`, error);
90
122
  process.exit(1);
91
123
  }
92
124
  };
93
- function cleanSQLStatement$2(sql) {
125
+ const loadMigrationVersion$1 = async (migrationPath) => {
126
+ try {
127
+ const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
128
+ if (!fs.existsSync(migrationCountFilePath)) {
129
+ return 0;
130
+ }
131
+ const { MIGRATION_VERSION } = await import(migrationCountFilePath);
132
+ console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
133
+ return MIGRATION_VERSION;
134
+ } catch (error) {
135
+ console.error(`❌ Error loading migrationCount:`, error);
136
+ process.exit(1);
137
+ }
138
+ };
139
+ function cleanSQLStatement(sql) {
94
140
  sql = sql.replace(/create\s+table\s+(\w+)/gi, "create table if not exists $1");
95
141
  sql = sql.replace(/create\s+index\s+(\w+)/gi, "create index if not exists $1");
96
142
  sql = sql.replace(/alter\s+table\s+(\w+)\s+add\s+index\s+(\w+)/gi, "alter table $1 add index if not exists $2");
@@ -100,7 +146,7 @@ function cleanSQLStatement$2(sql) {
100
146
  function generateMigrationFile$2(createStatements, version) {
101
147
  const versionPrefix = `v${version}_MIGRATION`;
102
148
  const migrationLines = createStatements.map(
103
- (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement$2(stmt)}")`
149
+ (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement(stmt).replace(/\s+/g, " ")}")`
104
150
  // eslint-disable-line no-useless-escape
105
151
  ).join("\n");
106
152
  return `import { MigrationRunner } from "@forge/sql/out/migration";
@@ -142,41 +188,12 @@ export default async (
142
188
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
143
189
  console.log(`✅ Migration index file created: ${indexFilePath}`);
144
190
  }
145
- const extractCreateStatements$1 = (schema) => {
146
- const statements = schema.split(";").map((s) => s.trim());
191
+ const extractCreateStatements = (schema) => {
192
+ const statements = schema.split(/--> statement-breakpoint|;/).map((s) => s.trim()).filter((s) => s.length > 0);
147
193
  return statements.filter(
148
- (stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table") && (stmt.includes("add index") || stmt.includes("add constraint")) || stmt.startsWith("primary")
194
+ (stmt) => stmt.toLowerCase().startsWith("create table") || stmt.toLowerCase().startsWith("alter table") || stmt.toLowerCase().includes("add index") || stmt.toLowerCase().includes("add unique index") || stmt.toLowerCase().includes("add constraint")
149
195
  );
150
196
  };
151
- const loadEntities$2 = async (entitiesPath) => {
152
- try {
153
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
154
- if (!fs.existsSync(indexFilePath)) {
155
- console.error(`❌ Error: index.ts not found in ${indexFilePath}`);
156
- process.exit(1);
157
- }
158
- const { default: entities } = await import(indexFilePath);
159
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
160
- return entities;
161
- } catch (error) {
162
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
163
- process.exit(1);
164
- }
165
- };
166
- const loadMigrationVersion$1 = async (migrationPath) => {
167
- try {
168
- const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
169
- if (!fs.existsSync(migrationCountFilePath)) {
170
- return 0;
171
- }
172
- const { MIGRATION_VERSION } = await import(migrationCountFilePath);
173
- console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
174
- return MIGRATION_VERSION;
175
- } catch (error) {
176
- console.error(`❌ Error loading migrationCount:`, error);
177
- process.exit(1);
178
- }
179
- };
180
197
  const createMigration = async (options) => {
181
198
  try {
182
199
  let version = await loadMigrationVersion$1(options.output);
@@ -189,19 +206,20 @@ const createMigration = async (options) => {
189
206
  }
190
207
  }
191
208
  version = 1;
192
- const entities = await loadEntities$2(options.entitiesPath);
193
- const orm = MikroORM.initSync({
194
- host: options.host,
195
- port: options.port,
196
- user: options.user,
197
- password: options.password,
198
- dbName: options.dbName,
199
- entities
200
- });
201
- const createSchemaSQL = await orm.schema.getCreateSchemaSQL({ wrap: true });
202
- const statements = extractCreateStatements$1(createSchemaSQL);
203
- const migrationFile = generateMigrationFile$2(statements, version);
204
- saveMigrationFiles$2(migrationFile, version, options.output);
209
+ await execSync(
210
+ `npx drizzle-kit generate --name=init --dialect mysql --out ${options.output} --schema ${options.entitiesPath}`,
211
+ { encoding: "utf-8" }
212
+ );
213
+ const initSqlFile = path.join(options.output, "0000_init.sql");
214
+ const sql = fs.readFileSync(initSqlFile, "utf-8");
215
+ const createStatements = extractCreateStatements(sql);
216
+ const migrationFile = generateMigrationFile$2(createStatements, 1);
217
+ saveMigrationFiles$2(migrationFile, 1, options.output);
218
+ fs.rmSync(initSqlFile, { force: true });
219
+ console.log(`✅ Removed SQL file: ${initSqlFile}`);
220
+ let metaDir = path.join(options.output, "meta");
221
+ fs.rmSync(metaDir, { recursive: true, force: true });
222
+ console.log(`✅ Removed: ${metaDir}`);
205
223
  console.log(`✅ Migration successfully created!`);
206
224
  process.exit(0);
207
225
  } catch (error) {
@@ -209,13 +227,64 @@ const createMigration = async (options) => {
209
227
  process.exit(1);
210
228
  }
211
229
  };
212
- function cleanSQLStatement$1(sql) {
213
- return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim();
230
+ function getTableMetadata(table) {
231
+ const symbols = Object.getOwnPropertySymbols(table);
232
+ const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
233
+ const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
234
+ const extraSymbol = symbols.find((s) => s.toString().includes("ExtraConfigBuilder"));
235
+ const foreignKeysSymbol = symbols.find((s) => s.toString().includes("MySqlInlineForeignKeys)"));
236
+ const builders = {
237
+ indexes: [],
238
+ checks: [],
239
+ foreignKeys: [],
240
+ primaryKeys: [],
241
+ uniqueConstraints: [],
242
+ extras: []
243
+ };
244
+ if (foreignKeysSymbol) {
245
+ const foreignKeys = table[foreignKeysSymbol];
246
+ if (foreignKeys) {
247
+ for (const foreignKey of foreignKeys) {
248
+ builders.foreignKeys.push(foreignKey);
249
+ }
250
+ }
251
+ }
252
+ if (extraSymbol) {
253
+ const extraConfigBuilder = table[extraSymbol];
254
+ if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
255
+ const configBuilders = extraConfigBuilder(table);
256
+ let configBuildersArray = [];
257
+ if (!Array.isArray(configBuilders)) {
258
+ configBuildersArray = Object.values(configBuilders);
259
+ } else {
260
+ configBuildersArray = configBuilders;
261
+ }
262
+ configBuildersArray.forEach((builder) => {
263
+ if (builder instanceof IndexBuilder) {
264
+ builders.indexes.push(builder);
265
+ } else if (builder instanceof CheckBuilder) {
266
+ builders.checks.push(builder);
267
+ } else if (builder instanceof ForeignKeyBuilder) {
268
+ builders.foreignKeys.push(builder);
269
+ } else if (builder instanceof PrimaryKeyBuilder) {
270
+ builders.primaryKeys.push(builder);
271
+ } else if (builder instanceof UniqueConstraintBuilder) {
272
+ builders.uniqueConstraints.push(builder);
273
+ }
274
+ builders.extras.push(builder);
275
+ });
276
+ }
277
+ }
278
+ return {
279
+ tableName: nameSymbol ? table[nameSymbol] : "",
280
+ columns: columnsSymbol ? table[columnsSymbol] : {},
281
+ ...builders
282
+ };
214
283
  }
215
284
  function generateMigrationFile$1(createStatements, version) {
216
285
  const versionPrefix = `v${version}_MIGRATION`;
217
286
  const migrationLines = createStatements.map(
218
- (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement$1(stmt)}")`
287
+ (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${stmt}")`
219
288
  // eslint-disable-line no-useless-escape
220
289
  ).join("\n");
221
290
  return `import { MigrationRunner } from "@forge/sql/out/migration";
@@ -225,6 +294,18 @@ export default (migrationRunner: MigrationRunner): MigrationRunner => {
225
294
  ${migrationLines};
226
295
  };`;
227
296
  }
297
+ function filterWithPreviousMigration(newStatements, prevVersion, outputDir) {
298
+ const prevMigrationPath = path.join(outputDir, `migrationV${prevVersion}.ts`);
299
+ if (!fs.existsSync(prevMigrationPath)) {
300
+ return newStatements.map((s) => s.replace(/\s+/g, " "));
301
+ }
302
+ const prevContent = fs.readFileSync(prevMigrationPath, "utf-8");
303
+ const prevStatements = prevContent.split("\n").filter((line) => line.includes(".enqueue(")).map((line) => {
304
+ const match = line.match(/\.enqueue\([^,]+,\s*"([^"]+)"/);
305
+ return match ? match[1].replace(/\s+/g, " ").trim() : "";
306
+ });
307
+ return newStatements.filter((s) => !prevStatements.includes(s.replace(/\s+/g, " "))).map((s) => s.replace(/\s+/g, " "));
308
+ }
228
309
  function saveMigrationFiles$1(migrationCode, version, outputDir) {
229
310
  if (!fs.existsSync(outputDir)) {
230
311
  fs.mkdirSync(outputDir, { recursive: true });
@@ -256,28 +337,8 @@ export default async (
256
337
  console.log(`✅ Migration file created: ${migrationFilePath}`);
257
338
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
258
339
  console.log(`✅ Migration index file created: ${indexFilePath}`);
340
+ return true;
259
341
  }
260
- const extractCreateStatements = (schema) => {
261
- const statements = schema.split(";").map((s) => s.trim());
262
- return statements.filter(
263
- (stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table") && stmt.includes("add index") || stmt.startsWith("alter table") && stmt.includes("add") && !stmt.includes("foreign") || stmt.startsWith("alter table") && stmt.includes("modify") && !stmt.includes("foreign")
264
- );
265
- };
266
- const loadEntities$1 = async (entitiesPath) => {
267
- try {
268
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
269
- if (!fs.existsSync(indexFilePath)) {
270
- console.error(`❌ Error: index.ts not found in ${entitiesPath}`);
271
- process.exit(1);
272
- }
273
- const { default: entities } = await import(indexFilePath);
274
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
275
- return entities;
276
- } catch (error) {
277
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
278
- process.exit(1);
279
- }
280
- };
281
342
  const loadMigrationVersion = async (migrationPath) => {
282
343
  try {
283
344
  const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
@@ -295,9 +356,183 @@ const loadMigrationVersion = async (migrationPath) => {
295
356
  process.exit(1);
296
357
  }
297
358
  };
359
+ async function getDatabaseSchema(connection, dbName) {
360
+ const [columns] = await connection.execute(`
361
+ SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_KEY, EXTRA
362
+ FROM INFORMATION_SCHEMA.COLUMNS
363
+ WHERE TABLE_SCHEMA = ?
364
+ `, [dbName]);
365
+ const [indexes] = await connection.execute(`
366
+ SELECT TABLE_NAME, INDEX_NAME, COLUMN_NAME, NON_UNIQUE
367
+ FROM INFORMATION_SCHEMA.STATISTICS
368
+ WHERE TABLE_SCHEMA = ?
369
+ ORDER BY TABLE_NAME, INDEX_NAME, SEQ_IN_INDEX
370
+ `, [dbName]);
371
+ const [foreignKeys] = await connection.execute(`
372
+ SELECT
373
+ TABLE_NAME,
374
+ COLUMN_NAME,
375
+ CONSTRAINT_NAME,
376
+ REFERENCED_TABLE_NAME,
377
+ REFERENCED_COLUMN_NAME
378
+ FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
379
+ WHERE TABLE_SCHEMA = ?
380
+ AND REFERENCED_TABLE_NAME IS NOT NULL
381
+ `, [dbName]);
382
+ const schema = {};
383
+ columns.forEach((row) => {
384
+ if (!schema[row.TABLE_NAME]) {
385
+ schema[row.TABLE_NAME] = {
386
+ columns: {},
387
+ indexes: {},
388
+ foreignKeys: {}
389
+ };
390
+ }
391
+ schema[row.TABLE_NAME].columns[row.COLUMN_NAME] = row;
392
+ });
393
+ indexes.forEach((row) => {
394
+ if (!schema[row.TABLE_NAME].indexes[row.INDEX_NAME]) {
395
+ schema[row.TABLE_NAME].indexes[row.INDEX_NAME] = {
396
+ columns: [],
397
+ unique: !row.NON_UNIQUE
398
+ };
399
+ }
400
+ schema[row.TABLE_NAME].indexes[row.INDEX_NAME].columns.push(row.COLUMN_NAME);
401
+ });
402
+ foreignKeys.forEach((row) => {
403
+ if (!schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME]) {
404
+ schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME] = {
405
+ column: row.COLUMN_NAME,
406
+ referencedTable: row.REFERENCED_TABLE_NAME,
407
+ referencedColumn: row.REFERENCED_COLUMN_NAME
408
+ };
409
+ }
410
+ });
411
+ return schema;
412
+ }
413
+ function normalizeMySQLType(mysqlType) {
414
+ let normalized = mysqlType.replace(/\([^)]*\)/, "").toLowerCase();
415
+ normalized = normalized.replace(/^mysql/, "");
416
+ return normalized;
417
+ }
418
+ function getForeignKeyName(fk) {
419
+ return fk.getName();
420
+ }
421
+ function getIndexName(index) {
422
+ return index.name;
423
+ }
424
+ function getUniqueConstraintName(uc) {
425
+ return uc.name;
426
+ }
427
+ function getIndexColumns(index) {
428
+ return index.columns.map((col) => col.name);
429
+ }
430
+ function generateSchemaChanges(drizzleSchema, dbSchema, schemaModule) {
431
+ const changes = [];
432
+ for (const [tableName, dbTable] of Object.entries(dbSchema)) {
433
+ const drizzleColumns = drizzleSchema[tableName];
434
+ if (!drizzleColumns) {
435
+ const columns = Object.entries(dbTable.columns).map(([colName, col]) => {
436
+ const type = col.COLUMN_TYPE;
437
+ const nullable = col.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
438
+ const autoIncrement = col.EXTRA.includes("auto_increment") ? "AUTO_INCREMENT" : "";
439
+ return `\`${colName}\` ${type} ${nullable} ${autoIncrement}`.trim();
440
+ }).join(",\n ");
441
+ changes.push(`CREATE TABLE if not exists \`${tableName}\` (
442
+ ${columns}
443
+ );`);
444
+ for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
445
+ if (indexName === "PRIMARY") {
446
+ continue;
447
+ }
448
+ const isForeignKeyIndex = dbIndex.columns.some((colName) => {
449
+ const column = dbTable.columns[colName];
450
+ return column && column.COLUMN_KEY === "MUL" && column.EXTRA.includes("foreign key");
451
+ });
452
+ if (isForeignKeyIndex) {
453
+ continue;
454
+ }
455
+ const columns2 = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
456
+ const unique = dbIndex.unique ? "UNIQUE " : "";
457
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns2});`);
458
+ }
459
+ for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
460
+ changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
461
+ }
462
+ continue;
463
+ }
464
+ for (const [colName, dbCol] of Object.entries(dbTable.columns)) {
465
+ const drizzleCol = Object.values(drizzleColumns).find((c) => c.name === colName);
466
+ if (!drizzleCol) {
467
+ const type = dbCol.COLUMN_TYPE;
468
+ const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
469
+ changes.push(`ALTER TABLE \`${tableName}\` ADD COLUMN \`${colName}\` ${type} ${nullable};`);
470
+ continue;
471
+ }
472
+ const normalizedDbType = normalizeMySQLType(dbCol.COLUMN_TYPE);
473
+ const normalizedDrizzleType = normalizeMySQLType(drizzleCol.getSQLType());
474
+ if (normalizedDbType !== normalizedDrizzleType) {
475
+ const type = dbCol.COLUMN_TYPE;
476
+ const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
477
+ changes.push(`ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${colName}\` ${type} ${nullable};`);
478
+ }
479
+ }
480
+ const table = Object.values(schemaModule).find((t) => {
481
+ const metadata = getTableMetadata(t);
482
+ return metadata.tableName === tableName;
483
+ });
484
+ if (table) {
485
+ const metadata = getTableMetadata(table);
486
+ for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
487
+ if (indexName === "PRIMARY") {
488
+ continue;
489
+ }
490
+ const isForeignKeyIndex = metadata.foreignKeys.some((fk) => getForeignKeyName(fk) === indexName);
491
+ if (isForeignKeyIndex) {
492
+ continue;
493
+ }
494
+ const existsUniqIndex = metadata.uniqueConstraints.find((uc) => getUniqueConstraintName(uc) === indexName);
495
+ let drizzleIndex = metadata.indexes.find((i) => getIndexName(i) === indexName);
496
+ if (!drizzleIndex && existsUniqIndex) {
497
+ drizzleIndex = existsUniqIndex;
498
+ }
499
+ if (!drizzleIndex) {
500
+ const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
501
+ const unique = dbIndex.unique ? "UNIQUE " : "";
502
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
503
+ continue;
504
+ }
505
+ const dbColumns = dbIndex.columns.join(", ");
506
+ const drizzleColumns2 = getIndexColumns(drizzleIndex).join(", ");
507
+ if (dbColumns !== drizzleColumns2 || dbIndex.unique !== drizzleIndex instanceof UniqueConstraintBuilder) {
508
+ changes.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
509
+ const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
510
+ const unique = dbIndex.unique ? "UNIQUE " : "";
511
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
512
+ }
513
+ }
514
+ for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
515
+ const drizzleFK = metadata.foreignKeys.find((fk) => getForeignKeyName(fk) === fkName);
516
+ if (!drizzleFK) {
517
+ changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
518
+ continue;
519
+ }
520
+ }
521
+ for (const drizzleForeignKey of metadata.foreignKeys) {
522
+ const isDbFk = Object.keys(dbTable.foreignKeys).find((fk) => fk === getForeignKeyName(drizzleForeignKey));
523
+ if (!isDbFk) {
524
+ const fkName = getForeignKeyName(drizzleForeignKey);
525
+ changes.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
526
+ }
527
+ }
528
+ }
529
+ }
530
+ return changes;
531
+ }
298
532
  const updateMigration = async (options) => {
299
533
  try {
300
534
  let version = await loadMigrationVersion(options.output);
535
+ const prevVersion = version;
301
536
  if (version < 1) {
302
537
  console.log(
303
538
  `⚠️ Initial migration not found. Run "npx forge-sql-orm migrations:create" first.`
@@ -305,247 +540,81 @@ const updateMigration = async (options) => {
305
540
  process.exit(0);
306
541
  }
307
542
  version += 1;
308
- const entities = await loadEntities$1(options.entitiesPath);
309
- const orm = MikroORM.initSync({
543
+ const connection = await mysql.createConnection({
310
544
  host: options.host,
311
545
  port: options.port,
312
546
  user: options.user,
313
547
  password: options.password,
314
- dbName: options.dbName,
315
- entities,
316
- debug: true
548
+ database: options.dbName
317
549
  });
318
- const createSchemaSQL = await orm.schema.getUpdateSchemaMigrationSQL({ wrap: true });
319
- const statements = extractCreateStatements(createSchemaSQL?.down || "");
320
- if (statements.length) {
321
- const migrationFile = generateMigrationFile$1(statements, version);
322
- saveMigrationFiles$1(migrationFile, version, options.output);
323
- console.log(`✅ Migration successfully updated!`);
324
- process.exit(0);
325
- } else {
326
- console.log(`⚠️ No new migration changes detected.`);
327
- process.exit(0);
328
- }
329
- } catch (error) {
330
- console.error(`❌ Error during migration update:`, error);
331
- process.exit(1);
332
- }
333
- };
334
- const PATCHES = [
335
- // 🗑️ Remove unused dialects (mssql, postgres, sqlite) in MikroORM
336
- {
337
- file: "node_modules/@mikro-orm/knex/MonkeyPatchable.d.ts",
338
- deleteLines: [
339
- /^.*mssql.*$/gim,
340
- /^.*MsSql.*$/gim,
341
- /^\s*Postgres.*$/gm,
342
- /^.*Sqlite3.*$/gm,
343
- /^.*BetterSqlite3.*$/gim
344
- ],
345
- description: "Removing unused dialects from MonkeyPatchable.d.ts"
346
- },
347
- {
348
- file: "node_modules/@mikro-orm/knex/MonkeyPatchable.js",
349
- deleteLines: [
350
- /^.*mssql.*$/gim,
351
- /^.*MsSql.*$/gim,
352
- /^.*postgres.*$/gim,
353
- /^.*sqlite.*$/gim,
354
- /^.*Sqlite.*$/gim
355
- ],
356
- description: "Removing unused dialects from MonkeyPatchable.js"
357
- },
358
- {
359
- file: "node_modules/@mikro-orm/knex/dialects/index.js",
360
- deleteLines: [/^.*mssql.*$/gim, /^.*MsSql.*$/gim, /^.*postgresql.*$/gim, /^.*sqlite.*$/gim],
361
- description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
362
- },
363
- {
364
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/mssql",
365
- description: "Removing mssql dialect from MikroORM"
366
- },
367
- {
368
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/postgresql",
369
- description: "Removing postgresql dialect from MikroORM"
370
- },
371
- {
372
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/sqlite",
373
- description: "Removing sqlite dialect from MikroORM"
374
- },
375
- {
376
- deleteFolder: "node_modules/@mikro-orm/mysql/node_modules",
377
- description: "Removing node_modules from @mikro-orm/mysql"
378
- },
379
- {
380
- deleteFolder: "node_modules/@mikro-orm/knex/node_modules",
381
- description: "Removing node_modules from @mikro-orm/knex"
382
- },
383
- {
384
- deleteFolder: "node_modules/@mikro-orm/core/node_modules",
385
- description: "Removing sqlite dialect from MikroORM"
386
- },
387
- // 🔄 Fix Webpack `Critical dependency: the request of a dependency is an expression`
388
- {
389
- file: "node_modules/@mikro-orm/core/utils/Configuration.js",
390
- search: /dynamicImportProvider:\s*\/\* istanbul ignore next \*\/\s*\(id\) => import\(id\),/g,
391
- replace: "dynamicImportProvider: /* istanbul ignore next */ () => Promise.resolve({}),",
392
- description: "Fixing dynamic imports in MikroORM Configuration"
393
- },
394
- {
395
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
396
- search: /static dynamicImportProvider = \(id\) => import\(id\);/g,
397
- replace: "static dynamicImportProvider = () => Promise.resolve({});",
398
- description: "Fixing dynamic imports in MikroORM Utils.js"
399
- },
400
- // 🛑 Remove deprecated `require.extensions` usage
401
- {
402
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
403
- search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
404
- replace: ";",
405
- description: "Removing deprecated `require.extensions` check in MikroORM"
406
- },
407
- // 🛠️ Patch Knex to remove `Migrator` and `Seeder`
408
- {
409
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
410
- deleteLines: [
411
- /^const \{ Migrator \} = require\('\.\.\/migrations\/migrate\/Migrator'\);$/gm,
412
- /^const Seeder = require\('\.\.\/migrations\/seed\/Seeder'\);$/gm
413
- ],
414
- description: "Removing `Migrator` and `Seeder` requires from make-knex.js"
415
- },
416
- {
417
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
418
- search: /\sreturn new Migrator\(this\);/g,
419
- replace: "return null;",
420
- description: "Replacing `return new Migrator(this);` with `return null;`"
421
- },
422
- {
423
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
424
- search: /\sreturn new Seeder\(this\);/g,
425
- replace: "return null;",
426
- description: "Replacing `return new Seeder(this);` with `return null;`"
427
- },
428
- {
429
- file: "node_modules/knex/lib/dialects/index.js",
430
- deleteLines: [
431
- /^.*mssql.*$/gim,
432
- /^.*MsSql.*$/gim,
433
- /^.*postgresql.*$/gim,
434
- /^.*sqlite.*$/gim,
435
- /^.*oracle.*$/gim,
436
- /^.*oracledb.*$/gim,
437
- /^.*pgnative.*$/gim,
438
- /^.*postgres.*$/gim,
439
- /^.*redshift.*$/gim,
440
- /^.*sqlite3.*$/gim,
441
- /^.*cockroachdb.*$/gim
442
- ],
443
- description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
444
- },
445
- {
446
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
447
- search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
448
- replace: ";",
449
- // Replaces with semicolon to keep syntax valid
450
- description: "Removing deprecated `require.extensions` check from MikroORM"
451
- },
452
- {
453
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
454
- search: /^.*extensions.*$/gim,
455
- replace: "{",
456
- // Replaces with semicolon to keep syntax valid
457
- description: "Removing deprecated `require.extensions` check from MikroORM"
458
- },
459
- {
460
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
461
- search: /^.*package.json.*$/gim,
462
- replace: "return 0;",
463
- // Replaces with semicolon to keep syntax valid
464
- description: "Removing deprecated `require.extensions` check from MikroORM"
465
- },
466
- {
467
- file: "node_modules/@mikro-orm/knex/dialects/mysql/index.js",
468
- deleteLines: [/^.*MariaDbKnexDialect.*$/gim],
469
- description: "Removing MariaDbKnexDialect"
470
- }
471
- ];
472
- function runPostInstallPatch() {
473
- console.log("🔧 Applying MikroORM & Knex patches...");
474
- PATCHES.forEach(
475
- ({ file, search, replace, deleteLines, deleteFile, deleteFolder, description }) => {
476
- if (file) {
477
- const filePath = path.resolve(file);
478
- if (fs.existsSync(filePath)) {
479
- let content = fs.readFileSync(filePath, "utf8");
480
- let originalContent = content;
481
- if (search && replace) {
482
- if (typeof search === "string" ? content.includes(search) : search.test(content)) {
483
- content = content.replace(search, replace);
484
- console.log(`[PATCHED] ${description}`);
485
- }
486
- }
487
- if (deleteLines) {
488
- deleteLines.forEach((pattern) => {
489
- content = content.split("\n").filter((line) => !pattern.test(line)).join("\n");
490
- });
491
- if (content !== originalContent) {
492
- console.log(`[CLEANED] Removed matching lines in ${file}`);
493
- }
494
- }
495
- if (content !== originalContent) {
496
- fs.writeFileSync(filePath, content, "utf8");
497
- }
498
- if (content.trim() === "") {
499
- fs.unlinkSync(filePath);
500
- console.log(`[REMOVED] ${filePath} (file is now empty)`);
501
- }
502
- } else {
503
- console.warn(`[WARNING] File not found: ${file}`);
504
- }
550
+ try {
551
+ const dbSchema = await getDatabaseSchema(connection, options.dbName);
552
+ const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
553
+ if (!fs.existsSync(schemaPath)) {
554
+ throw new Error(`Schema file not found at: ${schemaPath}`);
555
+ }
556
+ const schemaModule = await import(schemaPath);
557
+ if (!schemaModule) {
558
+ throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
505
559
  }
506
- if (deleteFile) {
507
- const deleteFilePath = path.resolve(deleteFile);
508
- if (fs.existsSync(deleteFilePath)) {
509
- fs.unlinkSync(deleteFilePath);
510
- console.log(`[DELETED] ${deleteFilePath} ${description}`);
511
- } else {
512
- console.log(`[SKIPPED] ${deleteFilePath} ${description}`);
560
+ const drizzleSchema = {};
561
+ const tables = Object.values(schemaModule);
562
+ tables.forEach((table) => {
563
+ const metadata = getTableMetadata(table);
564
+ if (metadata.tableName) {
565
+ const columns = {};
566
+ Object.entries(metadata.columns).forEach(([name, column]) => {
567
+ columns[name] = {
568
+ type: column.dataType,
569
+ notNull: column.notNull,
570
+ autoincrement: column.autoincrement,
571
+ columnType: column.columnType,
572
+ name: column.name,
573
+ getSQLType: () => column.getSQLType()
574
+ };
575
+ });
576
+ drizzleSchema[metadata.tableName] = columns;
513
577
  }
578
+ });
579
+ if (Object.keys(drizzleSchema).length === 0) {
580
+ throw new Error(`No valid tables found in schema at: ${schemaPath}`);
514
581
  }
515
- if (deleteFolder) {
516
- const deleteFolderPath = path.resolve(deleteFolder);
517
- if (fs.existsSync(deleteFolderPath)) {
518
- fs.rmSync(deleteFolderPath, { recursive: true, force: true });
519
- console.log(`[DELETED] ${deleteFolderPath} ${description}`);
520
- } else {
521
- console.log(`[SKIPPED] ${deleteFolderPath} ${description}`);
582
+ console.log("Found tables:", Object.keys(drizzleSchema));
583
+ const createStatements = filterWithPreviousMigration(generateSchemaChanges(drizzleSchema, dbSchema, schemaModule), prevVersion, options.output);
584
+ if (createStatements.length) {
585
+ const migrationFile = generateMigrationFile$1(createStatements, version);
586
+ if (saveMigrationFiles$1(migrationFile, version, options.output)) {
587
+ console.log(`✅ Migration successfully updated!`);
522
588
  }
589
+ process.exit(0);
590
+ } else {
591
+ console.log(`⚠️ No new migration changes detected.`);
592
+ process.exit(0);
523
593
  }
594
+ } finally {
595
+ await connection.end();
524
596
  }
525
- );
526
- console.log("🎉 MikroORM & Knex patching completed!");
527
- }
597
+ } catch (error) {
598
+ console.error(`❌ Error during migration update:`, error);
599
+ process.exit(1);
600
+ }
601
+ };
528
602
  function generateMigrationUUID(version) {
529
603
  const now = /* @__PURE__ */ new Date();
530
604
  const timestamp = now.getTime();
531
605
  return `MIGRATION_V${version}_${timestamp}`;
532
606
  }
533
- function cleanSQLStatement(sql) {
534
- return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim();
535
- }
536
607
  function generateMigrationFile(createStatements, version) {
537
608
  const uniqId = generateMigrationUUID(version);
538
609
  const migrationLines = createStatements.map(
539
- (stmt, index) => ` .enqueue("${uniqId}_${index}", "${cleanSQLStatement(stmt)}")`
610
+ (stmt, index) => ` .enqueue("${uniqId}_${index}", "${stmt}")`
540
611
  // eslint-disable-line no-useless-escape
541
612
  ).join("\n");
542
- const clearMigrationsLine = ` .enqueue("${uniqId}", "DELETE FROM __migrations")`;
543
613
  return `import { MigrationRunner } from "@forge/sql/out/migration";
544
614
 
545
615
  export default (migrationRunner: MigrationRunner): MigrationRunner => {
546
616
  return migrationRunner
547
- ${migrationLines}
548
- ${clearMigrationsLine};
617
+ ${migrationLines};
549
618
  };`;
550
619
  }
551
620
  function saveMigrationFiles(migrationCode, version, outputDir) {
@@ -580,42 +649,54 @@ export default async (
580
649
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
581
650
  console.log(`✅ Migration index file created: ${indexFilePath}`);
582
651
  }
583
- const extractDropStatements = (schema) => {
584
- const statements = schema.split(";").map((s) => s.trim());
585
- return statements.filter((s) => {
586
- return s.toLowerCase().startsWith("drop");
587
- });
588
- };
589
- const loadEntities = async (entitiesPath) => {
590
- try {
591
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
592
- if (!fs.existsSync(indexFilePath)) {
593
- console.error(`❌ Error: index.ts not found in ${indexFilePath}`);
594
- process.exit(1);
595
- }
596
- const { default: entities } = await import(indexFilePath);
597
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
598
- return entities;
599
- } catch (error) {
600
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
601
- process.exit(1);
602
- }
603
- };
604
652
  const dropMigration = async (options) => {
605
653
  try {
606
654
  const version = 1;
607
- const entities = await loadEntities(options.entitiesPath);
608
- const orm = MikroORM.initSync({
609
- host: options.host,
610
- port: options.port,
611
- user: options.user,
612
- password: options.password,
613
- dbName: options.dbName,
614
- entities
655
+ const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
656
+ if (!fs.existsSync(schemaPath)) {
657
+ throw new Error(`Schema file not found at: ${schemaPath}`);
658
+ }
659
+ const schemaModule = await import(schemaPath);
660
+ if (!schemaModule) {
661
+ throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
662
+ }
663
+ const drizzleSchema = {};
664
+ const tables = Object.values(schemaModule);
665
+ tables.forEach((table) => {
666
+ const symbols = Object.getOwnPropertySymbols(table);
667
+ const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
668
+ const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
669
+ const indexesSymbol = symbols.find((s) => s.toString().includes("Indexes"));
670
+ const foreignKeysSymbol = symbols.find((s) => s.toString().includes("ForeignKeys"));
671
+ if (table && nameSymbol && columnsSymbol) {
672
+ drizzleSchema[table[nameSymbol]] = {
673
+ // @ts-ignore
674
+ columns: table[columnsSymbol],
675
+ // @ts-ignore
676
+ indexes: indexesSymbol ? table[indexesSymbol] || {} : {},
677
+ // @ts-ignore
678
+ foreignKeys: foreignKeysSymbol ? table[foreignKeysSymbol] || {} : {}
679
+ };
680
+ }
615
681
  });
616
- const dropSchemaSQL = await orm.schema.getDropSchemaSQL({ wrap: true });
617
- const statements = extractDropStatements(dropSchemaSQL);
618
- const migrationFile = generateMigrationFile(statements, version);
682
+ if (Object.keys(drizzleSchema).length === 0) {
683
+ throw new Error(`No valid tables found in schema at: ${schemaPath}`);
684
+ }
685
+ console.log("Found tables:", Object.keys(drizzleSchema));
686
+ const dropStatements = [];
687
+ for (const [tableName, tableInfo] of Object.entries(drizzleSchema)) {
688
+ for (const fk of Object.values(tableInfo.foreignKeys)) {
689
+ const fkName = fk.getName();
690
+ dropStatements.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
691
+ }
692
+ for (const [indexName, index] of Object.entries(tableInfo.indexes)) {
693
+ if (indexName === "PRIMARY") continue;
694
+ dropStatements.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
695
+ }
696
+ dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
697
+ }
698
+ dropStatements.push(`DELETE FROM __migrations;`);
699
+ const migrationFile = generateMigrationFile(dropStatements, version);
619
700
  saveMigrationFiles(migrationFile, version, options.output);
620
701
  console.log(`✅ Migration successfully created!`);
621
702
  process.exit(0);
@@ -795,12 +876,5 @@ program.command("migrations:drop").description("Generate a migration to drop all
795
876
  );
796
877
  await dropMigration(config);
797
878
  });
798
- program.command("patch:mikroorm").description("Patch MikroORM and Knex dependencies to work properly with Forge").action(async () => {
799
- console.log("Running MikroORM patch...");
800
- await runPostInstallPatch();
801
- await runPostInstallPatch();
802
- await runPostInstallPatch();
803
- console.log("✅ MikroORM patch applied successfully!");
804
- });
805
879
  program.parse(process.argv);
806
880
  //# sourceMappingURL=cli.mjs.map