forge-sql-orm 1.0.30 → 1.1.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/README.md +242 -661
  2. package/dist/ForgeSQLORM.js +541 -568
  3. package/dist/ForgeSQLORM.js.map +1 -1
  4. package/dist/ForgeSQLORM.mjs +539 -555
  5. package/dist/ForgeSQLORM.mjs.map +1 -1
  6. package/dist/core/ForgeSQLCrudOperations.d.ts +101 -130
  7. package/dist/core/ForgeSQLCrudOperations.d.ts.map +1 -1
  8. package/dist/core/ForgeSQLORM.d.ts +11 -10
  9. package/dist/core/ForgeSQLORM.d.ts.map +1 -1
  10. package/dist/core/ForgeSQLQueryBuilder.d.ts +271 -113
  11. package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
  12. package/dist/core/ForgeSQLSelectOperations.d.ts +65 -22
  13. package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
  14. package/dist/core/SystemTables.d.ts +59 -0
  15. package/dist/core/SystemTables.d.ts.map +1 -0
  16. package/dist/index.d.ts +1 -2
  17. package/dist/index.d.ts.map +1 -1
  18. package/dist/utils/sqlUtils.d.ts +53 -6
  19. package/dist/utils/sqlUtils.d.ts.map +1 -1
  20. package/dist-cli/cli.js +561 -360
  21. package/dist-cli/cli.js.map +1 -1
  22. package/dist-cli/cli.mjs +561 -360
  23. package/dist-cli/cli.mjs.map +1 -1
  24. package/package.json +26 -26
  25. package/src/core/ForgeSQLCrudOperations.ts +360 -473
  26. package/src/core/ForgeSQLORM.ts +40 -78
  27. package/src/core/ForgeSQLQueryBuilder.ts +250 -133
  28. package/src/core/ForgeSQLSelectOperations.ts +182 -72
  29. package/src/core/SystemTables.ts +7 -0
  30. package/src/index.ts +1 -2
  31. package/src/utils/sqlUtils.ts +155 -23
  32. package/dist/core/ComplexQuerySchemaBuilder.d.ts +0 -38
  33. package/dist/core/ComplexQuerySchemaBuilder.d.ts.map +0 -1
  34. package/dist/knex/index.d.ts +0 -4
  35. package/dist/knex/index.d.ts.map +0 -1
  36. package/src/core/ComplexQuerySchemaBuilder.ts +0 -63
  37. package/src/knex/index.ts +0 -4
package/dist-cli/cli.js CHANGED
@@ -6,98 +6,148 @@ const inquirer = require("inquirer");
6
6
  const fs = require("fs");
7
7
  const path = require("path");
8
8
  require("reflect-metadata");
9
- const mysql = require("@mikro-orm/mysql");
10
- const entityGenerator = require("@mikro-orm/entity-generator");
11
- const regenerateIndexFile = (outputPath) => {
12
- const entitiesDir = path.resolve(outputPath);
13
- const indexPath = path.join(entitiesDir, "index.ts");
14
- const entityFiles = fs.readdirSync(entitiesDir).filter((file) => file.endsWith(".ts") && file !== "index.ts");
15
- const imports = entityFiles.map((file) => {
16
- const entityName = path.basename(file, ".ts");
17
- return `import { ${entityName} } from "./${entityName}";`;
18
- });
19
- const indexContent = `${imports.join("\n")}
9
+ const child_process = require("child_process");
10
+ const mysql = require("mysql2/promise");
11
+ require("moment");
12
+ const primaryKeys = require("drizzle-orm/mysql-core/primary-keys");
13
+ const indexes = require("drizzle-orm/mysql-core/indexes");
14
+ const checks = require("drizzle-orm/mysql-core/checks");
15
+ const foreignKeys = require("drizzle-orm/mysql-core/foreign-keys");
16
+ const uniqueConstraint = require("drizzle-orm/mysql-core/unique-constraint");
17
+ function replaceMySQLTypes(schemaContent) {
18
+ const imports = `import { mySqlDateTimeString, mySqlTimeString, mySqlDateString, mySqlTimestampString } from "forge-sql-orm";
20
19
 
21
- export default [${entityFiles.map((file) => path.basename(file, ".ts")).join(", ")}];
22
20
  `;
23
- fs.writeFileSync(indexPath, indexContent, "utf8");
24
- console.log(`✅ Updated index.ts with ${entityFiles.length} entities.`);
25
- };
21
+ let modifiedContent = schemaContent.replace(/datetime\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(['"]([^'"]+)['"]\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlDateTimeString()").replace(/datetime\(\)/g, "mySqlDateTimeString()").replace(/time\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlTimeString('$1')").replace(/time\(['"]([^'"]+)['"]\)/g, "mySqlTimeString('$1')").replace(/time\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlTimeString()").replace(/time\(\)/g, "mySqlTimeString()").replace(/date\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlDateString('$1')").replace(/date\(['"]([^'"]+)['"]\)/g, "mySqlDateString('$1')").replace(/date\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlDateString()").replace(/date\(\)/g, "mySqlDateString()").replace(/timestamp\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(['"]([^'"]+)['"]\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(\s*{\s*mode:\s*['"]([^'"]+)['"]\s*}\s*\)/g, "mySqlTimestampString()").replace(/timestamp\(\)/g, "mySqlTimestampString()");
22
+ if (!modifiedContent.includes("import { mySqlDateTimeString")) {
23
+ modifiedContent = imports + modifiedContent;
24
+ }
25
+ return modifiedContent;
26
+ }
26
27
  const generateModels = async (options) => {
27
28
  try {
28
- const ormConfig = mysql.defineConfig({
29
- host: options.host,
30
- port: options.port,
31
- user: options.user,
32
- password: options.password,
33
- dbName: options.dbName,
34
- namingStrategy: mysql.MongoNamingStrategy,
35
- discovery: { warnWhenNoEntities: false },
36
- extensions: [entityGenerator.EntityGenerator],
37
- debug: true
38
- });
39
- const orm = mysql.MikroORM.initSync(ormConfig);
40
- console.log(`✅ Connected to ${options.dbName} at ${options.host}:${options.port}`);
41
- const onCreatingVersionField = async (metadatas) => {
42
- metadatas.forEach((m) => {
43
- if (options.versionField) {
44
- const versionFieldName = Object.keys(m.properties).find((p) => {
45
- return p === options.versionField || m.properties[p]?.name === options.versionField || m.properties[p]?.fieldNames?.find((f) => f === options.versionField);
46
- });
47
- if (versionFieldName) {
48
- const property = m.properties[versionFieldName];
49
- if (property.type !== "datetime" && property.type !== "integer" && property.type !== "decimal") {
50
- console.warn(
51
- `Version field "${property.name}" can be only datetime or integer Table ${m.tableName} but now is "${property.type}"`
52
- );
53
- return;
54
- }
55
- if (property.primary) {
56
- console.warn(
57
- `Version field "${property.name}" can not be primary key Table ${m.tableName}`
58
- );
59
- return;
60
- }
61
- if (property.nullable) {
29
+ const sql = await child_process.execSync(
30
+ `npx drizzle-kit pull --dialect mysql --url mysql://${options.user}:${options.password}@${options.host}:${options.port}/${options.dbName} --out ${options.output}`,
31
+ { encoding: "utf-8" }
32
+ );
33
+ const metaDir = path.join(options.output, "meta");
34
+ const additionalMetadata = {};
35
+ if (fs.existsSync(metaDir)) {
36
+ const snapshotFile = path.join(metaDir, "0000_snapshot.json");
37
+ if (fs.existsSync(snapshotFile)) {
38
+ const snapshotData = JSON.parse(fs.readFileSync(snapshotFile, "utf-8"));
39
+ for (const [tableName, tableData] of Object.entries(snapshotData.tables)) {
40
+ const table = tableData;
41
+ const versionField = Object.entries(table.columns).find(
42
+ ([_, col]) => col.name.toLowerCase() === options.versionField
43
+ );
44
+ if (versionField) {
45
+ const [_, col] = versionField;
46
+ const fieldType = col.type;
47
+ const isSupportedType = fieldType === "datetime" || fieldType === "timestamp" || fieldType === "int" || fieldType === "number" || fieldType === "decimal";
48
+ if (!col.notNull) {
49
+ console.warn(`Version field "${col.name}" in table ${tableName} is nullable. Versioning may not work correctly.`);
50
+ } else if (!isSupportedType) {
62
51
  console.warn(
63
- `Version field "${property.name}" should not be nullable Table ${m.tableName}`
52
+ `Version field "${col.name}" in table ${tableName} has unsupported type "${fieldType}". Only datetime, timestamp, int, and decimal types are supported for versioning. Versioning will be skipped.`
64
53
  );
65
- return;
54
+ } else {
55
+ additionalMetadata[tableName] = {
56
+ tableName,
57
+ versionField: {
58
+ fieldName: col.name
59
+ }
60
+ };
66
61
  }
67
- property.version = true;
68
62
  }
69
63
  }
70
- });
71
- };
72
- await orm.entityGenerator.generate({
73
- entitySchema: true,
74
- bidirectionalRelations: true,
75
- identifiedReferences: false,
76
- forceUndefined: true,
77
- undefinedDefaults: true,
78
- useCoreBaseEntity: false,
79
- onlyPurePivotTables: false,
80
- outputPurePivotTables: false,
81
- scalarPropertiesForRelations: "always",
82
- save: true,
83
- path: options.output,
84
- onInitialMetadata: onCreatingVersionField
85
- });
86
- regenerateIndexFile(options.output);
87
- console.log(`✅ Entities generated at: ${options.output}`);
64
+ }
65
+ }
66
+ const versionMetadataContent = `/**
67
+ * This file was auto-generated by forge-sql-orm
68
+ * Generated at: ${(/* @__PURE__ */ new Date()).toISOString()}
69
+ *
70
+ * DO NOT EDIT THIS FILE MANUALLY
71
+ * Any changes will be overwritten on next generation
72
+ */
73
+
74
+
75
+ export * from "./relations";
76
+ export * from "./schema";
77
+
78
+ export interface VersionFieldMetadata {
79
+ fieldName: string;
80
+ }
81
+
82
+ export interface TableMetadata {
83
+ tableName: string;
84
+ versionField: VersionFieldMetadata;
85
+ }
86
+
87
+ export type AdditionalMetadata = Record<string, TableMetadata>;
88
+
89
+ export const additionalMetadata: AdditionalMetadata = ${JSON.stringify(additionalMetadata, null, 2)};
90
+ `;
91
+ fs.writeFileSync(path.join(options.output, "index.ts"), versionMetadataContent);
92
+ const schemaPath = path.join(options.output, "schema.ts");
93
+ if (fs.existsSync(schemaPath)) {
94
+ const schemaContent = fs.readFileSync(schemaPath, "utf-8");
95
+ const modifiedContent = replaceMySQLTypes(schemaContent);
96
+ fs.writeFileSync(schemaPath, modifiedContent);
97
+ console.log(`✅ Updated schema types in: ${schemaPath}`);
98
+ }
99
+ const migrationDir = path.join(options.output, "migrations");
100
+ if (fs.existsSync(migrationDir)) {
101
+ fs.rmSync(migrationDir, { recursive: true, force: true });
102
+ console.log(`✅ Removed: ${migrationDir}`);
103
+ }
104
+ if (fs.existsSync(metaDir)) {
105
+ const journalFile = path.join(metaDir, "_journal.json");
106
+ if (fs.existsSync(journalFile)) {
107
+ const journalData = JSON.parse(fs.readFileSync(journalFile, "utf-8"));
108
+ for (const entry of journalData.entries) {
109
+ const sqlFile = path.join(options.output, `${entry.tag}.sql`);
110
+ if (fs.existsSync(sqlFile)) {
111
+ fs.rmSync(sqlFile, { force: true });
112
+ console.log(`✅ Removed SQL file: ${entry.tag}.sql`);
113
+ }
114
+ }
115
+ }
116
+ fs.rmSync(metaDir, { recursive: true, force: true });
117
+ console.log(`✅ Removed: ${metaDir}`);
118
+ }
119
+ console.log(`✅ Successfully generated models and version metadata`);
88
120
  process.exit(0);
89
121
  } catch (error) {
90
- console.error(`❌ Error generating entities:`, error);
122
+ console.error(`❌ Error during model generation:`, error);
91
123
  process.exit(1);
92
124
  }
93
125
  };
94
- function cleanSQLStatement$1(sql) {
126
+ const loadMigrationVersion$1 = async (migrationPath) => {
127
+ try {
128
+ const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
129
+ if (!fs.existsSync(migrationCountFilePath)) {
130
+ return 0;
131
+ }
132
+ const { MIGRATION_VERSION } = await import(migrationCountFilePath);
133
+ console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
134
+ return MIGRATION_VERSION;
135
+ } catch (error) {
136
+ console.error(`❌ Error loading migrationCount:`, error);
137
+ process.exit(1);
138
+ }
139
+ };
140
+ function cleanSQLStatement(sql) {
141
+ sql = sql.replace(/create\s+table\s+(\w+)/gi, "create table if not exists $1");
142
+ sql = sql.replace(/create\s+index\s+(\w+)/gi, "create index if not exists $1");
143
+ sql = sql.replace(/alter\s+table\s+(\w+)\s+add\s+index\s+(\w+)/gi, "alter table $1 add index if not exists $2");
144
+ sql = sql.replace(/alter\s+table\s+(\w+)\s+add\s+constraint\s+(\w+)/gi, "alter table $1 add constraint if not exists $2");
95
145
  return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim();
96
146
  }
97
- function generateMigrationFile$1(createStatements, version) {
147
+ function generateMigrationFile$2(createStatements, version) {
98
148
  const versionPrefix = `v${version}_MIGRATION`;
99
149
  const migrationLines = createStatements.map(
100
- (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement$1(stmt)}")`
150
+ (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement(stmt).replace(/\s+/g, " ")}")`
101
151
  // eslint-disable-line no-useless-escape
102
152
  ).join("\n");
103
153
  return `import { MigrationRunner } from "@forge/sql/out/migration";
@@ -107,7 +157,7 @@ export default (migrationRunner: MigrationRunner): MigrationRunner => {
107
157
  ${migrationLines};
108
158
  };`;
109
159
  }
110
- function saveMigrationFiles$1(migrationCode, version, outputDir) {
160
+ function saveMigrationFiles$2(migrationCode, version, outputDir) {
111
161
  if (!fs.existsSync(outputDir)) {
112
162
  fs.mkdirSync(outputDir, { recursive: true });
113
163
  }
@@ -139,62 +189,38 @@ export default async (
139
189
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
140
190
  console.log(`✅ Migration index file created: ${indexFilePath}`);
141
191
  }
142
- const extractCreateStatements$1 = (schema) => {
143
- const statements = schema.split(";").map((s) => s.trim());
192
+ const extractCreateStatements = (schema) => {
193
+ const statements = schema.split(/--> statement-breakpoint|;/).map((s) => s.trim()).filter((s) => s.length > 0);
144
194
  return statements.filter(
145
- (stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table") && stmt.includes("add index") || stmt.startsWith("primary")
195
+ (stmt) => stmt.toLowerCase().startsWith("create table") || stmt.toLowerCase().startsWith("alter table") || stmt.toLowerCase().includes("add index") || stmt.toLowerCase().includes("add unique index") || stmt.toLowerCase().includes("add constraint")
146
196
  );
147
197
  };
148
- const loadEntities$1 = async (entitiesPath) => {
149
- try {
150
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
151
- if (!fs.existsSync(indexFilePath)) {
152
- console.error(`❌ Error: index.ts not found in ${indexFilePath}`);
153
- process.exit(1);
154
- }
155
- const { default: entities } = await import(indexFilePath);
156
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
157
- return entities;
158
- } catch (error) {
159
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
160
- process.exit(1);
161
- }
162
- };
163
- const loadMigrationVersion$1 = async (migrationPath) => {
164
- try {
165
- const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
166
- if (!fs.existsSync(migrationCountFilePath)) {
167
- return 0;
168
- }
169
- const { MIGRATION_VERSION } = await import(migrationCountFilePath);
170
- console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
171
- return MIGRATION_VERSION;
172
- } catch (error) {
173
- console.error(`❌ Error loading migrationCount:`, error);
174
- process.exit(1);
175
- }
176
- };
177
198
  const createMigration = async (options) => {
178
199
  try {
179
200
  let version = await loadMigrationVersion$1(options.output);
180
201
  if (version > 0) {
181
- console.error(`❌ Error: Migration has already been created.`);
182
- process.exit(1);
202
+ if (options.force) {
203
+ console.warn(`⚠️ Warning: Migration already exists. Creating new migration with force flag...`);
204
+ } else {
205
+ console.error(`❌ Error: Migration has already been created. Use --force flag to override.`);
206
+ process.exit(1);
207
+ }
183
208
  }
184
209
  version = 1;
185
- const entities = await loadEntities$1(options.entitiesPath);
186
- const orm = mysql.MikroORM.initSync({
187
- host: options.host,
188
- port: options.port,
189
- user: options.user,
190
- password: options.password,
191
- dbName: options.dbName,
192
- entities
193
- });
194
- const createSchemaSQL = await orm.schema.getCreateSchemaSQL({ wrap: true });
195
- const statements = extractCreateStatements$1(createSchemaSQL);
196
- const migrationFile = generateMigrationFile$1(statements, version);
197
- saveMigrationFiles$1(migrationFile, version, options.output);
210
+ await child_process.execSync(
211
+ `npx drizzle-kit generate --name=init --dialect mysql --out ${options.output} --schema ${options.entitiesPath}`,
212
+ { encoding: "utf-8" }
213
+ );
214
+ const initSqlFile = path.join(options.output, "0000_init.sql");
215
+ const sql = fs.readFileSync(initSqlFile, "utf-8");
216
+ const createStatements = extractCreateStatements(sql);
217
+ const migrationFile = generateMigrationFile$2(createStatements, 1);
218
+ saveMigrationFiles$2(migrationFile, 1, options.output);
219
+ fs.rmSync(initSqlFile, { force: true });
220
+ console.log(`✅ Removed SQL file: ${initSqlFile}`);
221
+ let metaDir = path.join(options.output, "meta");
222
+ fs.rmSync(metaDir, { recursive: true, force: true });
223
+ console.log(`✅ Removed: ${metaDir}`);
198
224
  console.log(`✅ Migration successfully created!`);
199
225
  process.exit(0);
200
226
  } catch (error) {
@@ -202,13 +228,64 @@ const createMigration = async (options) => {
202
228
  process.exit(1);
203
229
  }
204
230
  };
205
- function cleanSQLStatement(sql) {
206
- return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim();
231
+ function getTableMetadata(table) {
232
+ const symbols = Object.getOwnPropertySymbols(table);
233
+ const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
234
+ const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
235
+ const extraSymbol = symbols.find((s) => s.toString().includes("ExtraConfigBuilder"));
236
+ const foreignKeysSymbol = symbols.find((s) => s.toString().includes("MySqlInlineForeignKeys)"));
237
+ const builders = {
238
+ indexes: [],
239
+ checks: [],
240
+ foreignKeys: [],
241
+ primaryKeys: [],
242
+ uniqueConstraints: [],
243
+ extras: []
244
+ };
245
+ if (foreignKeysSymbol) {
246
+ const foreignKeys2 = table[foreignKeysSymbol];
247
+ if (foreignKeys2) {
248
+ for (const foreignKey of foreignKeys2) {
249
+ builders.foreignKeys.push(foreignKey);
250
+ }
251
+ }
252
+ }
253
+ if (extraSymbol) {
254
+ const extraConfigBuilder = table[extraSymbol];
255
+ if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
256
+ const configBuilders = extraConfigBuilder(table);
257
+ let configBuildersArray = [];
258
+ if (!Array.isArray(configBuilders)) {
259
+ configBuildersArray = Object.values(configBuilders);
260
+ } else {
261
+ configBuildersArray = configBuilders;
262
+ }
263
+ configBuildersArray.forEach((builder) => {
264
+ if (builder instanceof indexes.IndexBuilder) {
265
+ builders.indexes.push(builder);
266
+ } else if (builder instanceof checks.CheckBuilder) {
267
+ builders.checks.push(builder);
268
+ } else if (builder instanceof foreignKeys.ForeignKeyBuilder) {
269
+ builders.foreignKeys.push(builder);
270
+ } else if (builder instanceof primaryKeys.PrimaryKeyBuilder) {
271
+ builders.primaryKeys.push(builder);
272
+ } else if (builder instanceof uniqueConstraint.UniqueConstraintBuilder) {
273
+ builders.uniqueConstraints.push(builder);
274
+ }
275
+ builders.extras.push(builder);
276
+ });
277
+ }
278
+ }
279
+ return {
280
+ tableName: nameSymbol ? table[nameSymbol] : "",
281
+ columns: columnsSymbol ? table[columnsSymbol] : {},
282
+ ...builders
283
+ };
207
284
  }
208
- function generateMigrationFile(createStatements, version) {
285
+ function generateMigrationFile$1(createStatements, version) {
209
286
  const versionPrefix = `v${version}_MIGRATION`;
210
287
  const migrationLines = createStatements.map(
211
- (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement(stmt)}")`
288
+ (stmt, index) => ` .enqueue("${versionPrefix}${index}", "${stmt}")`
212
289
  // eslint-disable-line no-useless-escape
213
290
  ).join("\n");
214
291
  return `import { MigrationRunner } from "@forge/sql/out/migration";
@@ -218,7 +295,19 @@ export default (migrationRunner: MigrationRunner): MigrationRunner => {
218
295
  ${migrationLines};
219
296
  };`;
220
297
  }
221
- function saveMigrationFiles(migrationCode, version, outputDir) {
298
+ function filterWithPreviousMigration(newStatements, prevVersion, outputDir) {
299
+ const prevMigrationPath = path.join(outputDir, `migrationV${prevVersion}.ts`);
300
+ if (!fs.existsSync(prevMigrationPath)) {
301
+ return newStatements.map((s) => s.replace(/\s+/g, " "));
302
+ }
303
+ const prevContent = fs.readFileSync(prevMigrationPath, "utf-8");
304
+ const prevStatements = prevContent.split("\n").filter((line) => line.includes(".enqueue(")).map((line) => {
305
+ const match = line.match(/\.enqueue\([^,]+,\s*"([^"]+)"/);
306
+ return match ? match[1].replace(/\s+/g, " ").trim() : "";
307
+ });
308
+ return newStatements.filter((s) => !prevStatements.includes(s.replace(/\s+/g, " "))).map((s) => s.replace(/\s+/g, " "));
309
+ }
310
+ function saveMigrationFiles$1(migrationCode, version, outputDir) {
222
311
  if (!fs.existsSync(outputDir)) {
223
312
  fs.mkdirSync(outputDir, { recursive: true });
224
313
  }
@@ -249,28 +338,8 @@ export default async (
249
338
  console.log(`✅ Migration file created: ${migrationFilePath}`);
250
339
  console.log(`✅ Migration count file updated: ${migrationCountPath}`);
251
340
  console.log(`✅ Migration index file created: ${indexFilePath}`);
341
+ return true;
252
342
  }
253
- const extractCreateStatements = (schema) => {
254
- const statements = schema.split(";").map((s) => s.trim());
255
- return statements.filter(
256
- (stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table") && stmt.includes("add index") || stmt.startsWith("alter table") && stmt.includes("add") && !stmt.includes("foreign") || stmt.startsWith("alter table") && stmt.includes("modify") && !stmt.includes("foreign")
257
- );
258
- };
259
- const loadEntities = async (entitiesPath) => {
260
- try {
261
- const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
262
- if (!fs.existsSync(indexFilePath)) {
263
- console.error(`❌ Error: index.ts not found in ${entitiesPath}`);
264
- process.exit(1);
265
- }
266
- const { default: entities } = await import(indexFilePath);
267
- console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
268
- return entities;
269
- } catch (error) {
270
- console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
271
- process.exit(1);
272
- }
273
- };
274
343
  const loadMigrationVersion = async (migrationPath) => {
275
344
  try {
276
345
  const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
@@ -288,9 +357,183 @@ const loadMigrationVersion = async (migrationPath) => {
288
357
  process.exit(1);
289
358
  }
290
359
  };
360
+ async function getDatabaseSchema(connection, dbName) {
361
+ const [columns] = await connection.execute(`
362
+ SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_KEY, EXTRA
363
+ FROM INFORMATION_SCHEMA.COLUMNS
364
+ WHERE TABLE_SCHEMA = ?
365
+ `, [dbName]);
366
+ const [indexes2] = await connection.execute(`
367
+ SELECT TABLE_NAME, INDEX_NAME, COLUMN_NAME, NON_UNIQUE
368
+ FROM INFORMATION_SCHEMA.STATISTICS
369
+ WHERE TABLE_SCHEMA = ?
370
+ ORDER BY TABLE_NAME, INDEX_NAME, SEQ_IN_INDEX
371
+ `, [dbName]);
372
+ const [foreignKeys2] = await connection.execute(`
373
+ SELECT
374
+ TABLE_NAME,
375
+ COLUMN_NAME,
376
+ CONSTRAINT_NAME,
377
+ REFERENCED_TABLE_NAME,
378
+ REFERENCED_COLUMN_NAME
379
+ FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
380
+ WHERE TABLE_SCHEMA = ?
381
+ AND REFERENCED_TABLE_NAME IS NOT NULL
382
+ `, [dbName]);
383
+ const schema = {};
384
+ columns.forEach((row) => {
385
+ if (!schema[row.TABLE_NAME]) {
386
+ schema[row.TABLE_NAME] = {
387
+ columns: {},
388
+ indexes: {},
389
+ foreignKeys: {}
390
+ };
391
+ }
392
+ schema[row.TABLE_NAME].columns[row.COLUMN_NAME] = row;
393
+ });
394
+ indexes2.forEach((row) => {
395
+ if (!schema[row.TABLE_NAME].indexes[row.INDEX_NAME]) {
396
+ schema[row.TABLE_NAME].indexes[row.INDEX_NAME] = {
397
+ columns: [],
398
+ unique: !row.NON_UNIQUE
399
+ };
400
+ }
401
+ schema[row.TABLE_NAME].indexes[row.INDEX_NAME].columns.push(row.COLUMN_NAME);
402
+ });
403
+ foreignKeys2.forEach((row) => {
404
+ if (!schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME]) {
405
+ schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME] = {
406
+ column: row.COLUMN_NAME,
407
+ referencedTable: row.REFERENCED_TABLE_NAME,
408
+ referencedColumn: row.REFERENCED_COLUMN_NAME
409
+ };
410
+ }
411
+ });
412
+ return schema;
413
+ }
414
+ function normalizeMySQLType(mysqlType) {
415
+ let normalized = mysqlType.replace(/\([^)]*\)/, "").toLowerCase();
416
+ normalized = normalized.replace(/^mysql/, "");
417
+ return normalized;
418
+ }
419
+ function getForeignKeyName(fk) {
420
+ return fk.getName();
421
+ }
422
+ function getIndexName(index) {
423
+ return index.name;
424
+ }
425
+ function getUniqueConstraintName(uc) {
426
+ return uc.name;
427
+ }
428
+ function getIndexColumns(index) {
429
+ return index.columns.map((col) => col.name);
430
+ }
431
+ function generateSchemaChanges(drizzleSchema, dbSchema, schemaModule) {
432
+ const changes = [];
433
+ for (const [tableName, dbTable] of Object.entries(dbSchema)) {
434
+ const drizzleColumns = drizzleSchema[tableName];
435
+ if (!drizzleColumns) {
436
+ const columns = Object.entries(dbTable.columns).map(([colName, col]) => {
437
+ const type = col.COLUMN_TYPE;
438
+ const nullable = col.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
439
+ const autoIncrement = col.EXTRA.includes("auto_increment") ? "AUTO_INCREMENT" : "";
440
+ return `\`${colName}\` ${type} ${nullable} ${autoIncrement}`.trim();
441
+ }).join(",\n ");
442
+ changes.push(`CREATE TABLE if not exists \`${tableName}\` (
443
+ ${columns}
444
+ );`);
445
+ for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
446
+ if (indexName === "PRIMARY") {
447
+ continue;
448
+ }
449
+ const isForeignKeyIndex = dbIndex.columns.some((colName) => {
450
+ const column = dbTable.columns[colName];
451
+ return column && column.COLUMN_KEY === "MUL" && column.EXTRA.includes("foreign key");
452
+ });
453
+ if (isForeignKeyIndex) {
454
+ continue;
455
+ }
456
+ const columns2 = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
457
+ const unique = dbIndex.unique ? "UNIQUE " : "";
458
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns2});`);
459
+ }
460
+ for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
461
+ changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
462
+ }
463
+ continue;
464
+ }
465
+ for (const [colName, dbCol] of Object.entries(dbTable.columns)) {
466
+ const drizzleCol = Object.values(drizzleColumns).find((c) => c.name === colName);
467
+ if (!drizzleCol) {
468
+ const type = dbCol.COLUMN_TYPE;
469
+ const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
470
+ changes.push(`ALTER TABLE \`${tableName}\` ADD COLUMN \`${colName}\` ${type} ${nullable};`);
471
+ continue;
472
+ }
473
+ const normalizedDbType = normalizeMySQLType(dbCol.COLUMN_TYPE);
474
+ const normalizedDrizzleType = normalizeMySQLType(drizzleCol.getSQLType());
475
+ if (normalizedDbType !== normalizedDrizzleType) {
476
+ const type = dbCol.COLUMN_TYPE;
477
+ const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
478
+ changes.push(`ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${colName}\` ${type} ${nullable};`);
479
+ }
480
+ }
481
+ const table = Object.values(schemaModule).find((t) => {
482
+ const metadata = getTableMetadata(t);
483
+ return metadata.tableName === tableName;
484
+ });
485
+ if (table) {
486
+ const metadata = getTableMetadata(table);
487
+ for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
488
+ if (indexName === "PRIMARY") {
489
+ continue;
490
+ }
491
+ const isForeignKeyIndex = metadata.foreignKeys.some((fk) => getForeignKeyName(fk) === indexName);
492
+ if (isForeignKeyIndex) {
493
+ continue;
494
+ }
495
+ const existsUniqIndex = metadata.uniqueConstraints.find((uc) => getUniqueConstraintName(uc) === indexName);
496
+ let drizzleIndex = metadata.indexes.find((i) => getIndexName(i) === indexName);
497
+ if (!drizzleIndex && existsUniqIndex) {
498
+ drizzleIndex = existsUniqIndex;
499
+ }
500
+ if (!drizzleIndex) {
501
+ const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
502
+ const unique = dbIndex.unique ? "UNIQUE " : "";
503
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
504
+ continue;
505
+ }
506
+ const dbColumns = dbIndex.columns.join(", ");
507
+ const drizzleColumns2 = getIndexColumns(drizzleIndex).join(", ");
508
+ if (dbColumns !== drizzleColumns2 || dbIndex.unique !== drizzleIndex instanceof uniqueConstraint.UniqueConstraintBuilder) {
509
+ changes.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
510
+ const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
511
+ const unique = dbIndex.unique ? "UNIQUE " : "";
512
+ changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
513
+ }
514
+ }
515
+ for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
516
+ const drizzleFK = metadata.foreignKeys.find((fk) => getForeignKeyName(fk) === fkName);
517
+ if (!drizzleFK) {
518
+ changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
519
+ continue;
520
+ }
521
+ }
522
+ for (const drizzleForeignKey of metadata.foreignKeys) {
523
+ const isDbFk = Object.keys(dbTable.foreignKeys).find((fk) => fk === getForeignKeyName(drizzleForeignKey));
524
+ if (!isDbFk) {
525
+ const fkName = getForeignKeyName(drizzleForeignKey);
526
+ changes.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
527
+ }
528
+ }
529
+ }
530
+ }
531
+ return changes;
532
+ }
291
533
  const updateMigration = async (options) => {
292
534
  try {
293
535
  let version = await loadMigrationVersion(options.output);
536
+ const prevVersion = version;
294
537
  if (version < 1) {
295
538
  console.log(
296
539
  `⚠️ Initial migration not found. Run "npx forge-sql-orm migrations:create" first.`
@@ -298,226 +541,171 @@ const updateMigration = async (options) => {
298
541
  process.exit(0);
299
542
  }
300
543
  version += 1;
301
- const entities = await loadEntities(options.entitiesPath);
302
- const orm = mysql.MikroORM.initSync({
544
+ const connection = await mysql.createConnection({
303
545
  host: options.host,
304
546
  port: options.port,
305
547
  user: options.user,
306
548
  password: options.password,
307
- dbName: options.dbName,
308
- entities,
309
- debug: true
549
+ database: options.dbName
310
550
  });
311
- const createSchemaSQL = await orm.schema.getUpdateSchemaMigrationSQL({ wrap: true });
312
- const statements = extractCreateStatements(createSchemaSQL?.down || "");
313
- if (statements.length) {
314
- const migrationFile = generateMigrationFile(statements, version);
315
- saveMigrationFiles(migrationFile, version, options.output);
316
- console.log(`✅ Migration successfully updated!`);
317
- process.exit(0);
318
- } else {
319
- console.log(`⚠️ No new migration changes detected.`);
320
- process.exit(0);
551
+ try {
552
+ const dbSchema = await getDatabaseSchema(connection, options.dbName);
553
+ const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
554
+ if (!fs.existsSync(schemaPath)) {
555
+ throw new Error(`Schema file not found at: ${schemaPath}`);
556
+ }
557
+ const schemaModule = await import(schemaPath);
558
+ if (!schemaModule) {
559
+ throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
560
+ }
561
+ const drizzleSchema = {};
562
+ const tables = Object.values(schemaModule);
563
+ tables.forEach((table) => {
564
+ const metadata = getTableMetadata(table);
565
+ if (metadata.tableName) {
566
+ const columns = {};
567
+ Object.entries(metadata.columns).forEach(([name, column]) => {
568
+ columns[name] = {
569
+ type: column.dataType,
570
+ notNull: column.notNull,
571
+ autoincrement: column.autoincrement,
572
+ columnType: column.columnType,
573
+ name: column.name,
574
+ getSQLType: () => column.getSQLType()
575
+ };
576
+ });
577
+ drizzleSchema[metadata.tableName] = columns;
578
+ }
579
+ });
580
+ if (Object.keys(drizzleSchema).length === 0) {
581
+ throw new Error(`No valid tables found in schema at: ${schemaPath}`);
582
+ }
583
+ console.log("Found tables:", Object.keys(drizzleSchema));
584
+ const createStatements = filterWithPreviousMigration(generateSchemaChanges(drizzleSchema, dbSchema, schemaModule), prevVersion, options.output);
585
+ if (createStatements.length) {
586
+ const migrationFile = generateMigrationFile$1(createStatements, version);
587
+ if (saveMigrationFiles$1(migrationFile, version, options.output)) {
588
+ console.log(`✅ Migration successfully updated!`);
589
+ }
590
+ process.exit(0);
591
+ } else {
592
+ console.log(`⚠️ No new migration changes detected.`);
593
+ process.exit(0);
594
+ }
595
+ } finally {
596
+ await connection.end();
321
597
  }
322
598
  } catch (error) {
323
599
  console.error(`❌ Error during migration update:`, error);
324
600
  process.exit(1);
325
601
  }
326
602
  };
327
- const PATCHES = [
328
- // 🗑️ Remove unused dialects (mssql, postgres, sqlite) in MikroORM
329
- {
330
- file: "node_modules/@mikro-orm/knex/MonkeyPatchable.d.ts",
331
- deleteLines: [
332
- /^.*mssql.*$/gim,
333
- /^.*MsSql.*$/gim,
334
- /^\s*Postgres.*$/gm,
335
- /^.*Sqlite3.*$/gm,
336
- /^.*BetterSqlite3.*$/gim
337
- ],
338
- description: "Removing unused dialects from MonkeyPatchable.d.ts"
339
- },
340
- {
341
- file: "node_modules/@mikro-orm/knex/MonkeyPatchable.js",
342
- deleteLines: [
343
- /^.*mssql.*$/gim,
344
- /^.*MsSql.*$/gim,
345
- /^.*postgres.*$/gim,
346
- /^.*sqlite.*$/gim,
347
- /^.*Sqlite.*$/gim
348
- ],
349
- description: "Removing unused dialects from MonkeyPatchable.js"
350
- },
351
- {
352
- file: "node_modules/@mikro-orm/knex/dialects/index.js",
353
- deleteLines: [/^.*mssql.*$/gim, /^.*MsSql.*$/gim, /^.*postgresql.*$/gim, /^.*sqlite.*$/gim],
354
- description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
355
- },
356
- {
357
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/mssql",
358
- description: "Removing mssql dialect from MikroORM"
359
- },
360
- {
361
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/postgresql",
362
- description: "Removing postgresql dialect from MikroORM"
363
- },
364
- {
365
- deleteFolder: "node_modules/@mikro-orm/knex/dialects/sqlite",
366
- description: "Removing sqlite dialect from MikroORM"
367
- },
368
- {
369
- deleteFolder: "node_modules/@mikro-orm/mysql/node_modules",
370
- description: "Removing node_modules from @mikro-orm/mysql"
371
- },
372
- {
373
- deleteFolder: "node_modules/@mikro-orm/knex/node_modules",
374
- description: "Removing node_modules from @mikro-orm/knex"
375
- },
376
- {
377
- deleteFolder: "node_modules/@mikro-orm/core/node_modules",
378
- description: "Removing sqlite dialect from MikroORM"
379
- },
380
- // 🔄 Fix Webpack `Critical dependency: the request of a dependency is an expression`
381
- {
382
- file: "node_modules/@mikro-orm/core/utils/Configuration.js",
383
- search: /dynamicImportProvider:\s*\/\* istanbul ignore next \*\/\s*\(id\) => import\(id\),/g,
384
- replace: "dynamicImportProvider: /* istanbul ignore next */ () => Promise.resolve({}),",
385
- description: "Fixing dynamic imports in MikroORM Configuration"
386
- },
387
- {
388
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
389
- search: /static dynamicImportProvider = \(id\) => import\(id\);/g,
390
- replace: "static dynamicImportProvider = () => Promise.resolve({});",
391
- description: "Fixing dynamic imports in MikroORM Utils.js"
392
- },
393
- // 🛑 Remove deprecated `require.extensions` usage
394
- {
395
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
396
- search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
397
- replace: ";",
398
- description: "Removing deprecated `require.extensions` check in MikroORM"
399
- },
400
- // 🛠️ Patch Knex to remove `Migrator` and `Seeder`
401
- {
402
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
403
- deleteLines: [
404
- /^const \{ Migrator \} = require\('\.\.\/migrations\/migrate\/Migrator'\);$/gm,
405
- /^const Seeder = require\('\.\.\/migrations\/seed\/Seeder'\);$/gm
406
- ],
407
- description: "Removing `Migrator` and `Seeder` requires from make-knex.js"
408
- },
409
- {
410
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
411
- search: /\sreturn new Migrator\(this\);/g,
412
- replace: "return null;",
413
- description: "Replacing `return new Migrator(this);` with `return null;`"
414
- },
415
- {
416
- file: "node_modules/knex/lib/knex-builder/make-knex.js",
417
- search: /\sreturn new Seeder\(this\);/g,
418
- replace: "return null;",
419
- description: "Replacing `return new Seeder(this);` with `return null;`"
420
- },
421
- {
422
- file: "node_modules/knex/lib/dialects/index.js",
423
- deleteLines: [
424
- /^.*mssql.*$/gim,
425
- /^.*MsSql.*$/gim,
426
- /^.*postgresql.*$/gim,
427
- /^.*sqlite.*$/gim,
428
- /^.*oracle.*$/gim,
429
- /^.*oracledb.*$/gim,
430
- /^.*pgnative.*$/gim,
431
- /^.*postgres.*$/gim,
432
- /^.*redshift.*$/gim,
433
- /^.*sqlite3.*$/gim,
434
- /^.*cockroachdb.*$/gim
435
- ],
436
- description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
437
- },
438
- {
439
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
440
- search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
441
- replace: ";",
442
- // Replaces with semicolon to keep syntax valid
443
- description: "Removing deprecated `require.extensions` check from MikroORM"
444
- },
445
- {
446
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
447
- search: /^.*extensions.*$/gim,
448
- replace: "{",
449
- // Replaces with semicolon to keep syntax valid
450
- description: "Removing deprecated `require.extensions` check from MikroORM"
451
- },
452
- {
453
- file: "node_modules/@mikro-orm/core/utils/Utils.js",
454
- search: /^.*package.json.*$/gim,
455
- replace: "return 0;",
456
- // Replaces with semicolon to keep syntax valid
457
- description: "Removing deprecated `require.extensions` check from MikroORM"
458
- },
459
- {
460
- file: "node_modules/@mikro-orm/knex/dialects/mysql/index.js",
461
- deleteLines: [/^.*MariaDbKnexDialect.*$/gim],
462
- description: "Removing MariaDbKnexDialect"
603
+ function generateMigrationUUID(version) {
604
+ const now = /* @__PURE__ */ new Date();
605
+ const timestamp = now.getTime();
606
+ return `MIGRATION_V${version}_${timestamp}`;
607
+ }
608
+ function generateMigrationFile(createStatements, version) {
609
+ const uniqId = generateMigrationUUID(version);
610
+ const migrationLines = createStatements.map(
611
+ (stmt, index) => ` .enqueue("${uniqId}_${index}", "${stmt}")`
612
+ // eslint-disable-line no-useless-escape
613
+ ).join("\n");
614
+ return `import { MigrationRunner } from "@forge/sql/out/migration";
615
+
616
+ export default (migrationRunner: MigrationRunner): MigrationRunner => {
617
+ return migrationRunner
618
+ ${migrationLines};
619
+ };`;
620
+ }
621
+ function saveMigrationFiles(migrationCode, version, outputDir) {
622
+ if (!fs.existsSync(outputDir)) {
623
+ fs.mkdirSync(outputDir, { recursive: true });
463
624
  }
464
- ];
465
- function runPostInstallPatch() {
466
- console.log("🔧 Applying MikroORM & Knex patches...");
467
- PATCHES.forEach(
468
- ({ file, search, replace, deleteLines, deleteFile, deleteFolder, description }) => {
469
- if (file) {
470
- const filePath = path.resolve(file);
471
- if (fs.existsSync(filePath)) {
472
- let content = fs.readFileSync(filePath, "utf8");
473
- let originalContent = content;
474
- if (search && replace) {
475
- if (typeof search === "string" ? content.includes(search) : search.test(content)) {
476
- content = content.replace(search, replace);
477
- console.log(`[PATCHED] ${description}`);
478
- }
479
- }
480
- if (deleteLines) {
481
- deleteLines.forEach((pattern) => {
482
- content = content.split("\n").filter((line) => !pattern.test(line)).join("\n");
483
- });
484
- if (content !== originalContent) {
485
- console.log(`[CLEANED] Removed matching lines in ${file}`);
486
- }
487
- }
488
- if (content !== originalContent) {
489
- fs.writeFileSync(filePath, content, "utf8");
490
- }
491
- if (content.trim() === "") {
492
- fs.unlinkSync(filePath);
493
- console.log(`[REMOVED] ${filePath} (file is now empty)`);
494
- }
495
- } else {
496
- console.warn(`[WARNING] File not found: ${file}`);
497
- }
625
+ const migrationFilePath = path.join(outputDir, `migrationV${version}.ts`);
626
+ const migrationCountPath = path.join(outputDir, `migrationCount.ts`);
627
+ const indexFilePath = path.join(outputDir, `index.ts`);
628
+ fs.writeFileSync(migrationFilePath, migrationCode);
629
+ fs.writeFileSync(migrationCountPath, `export const MIGRATION_VERSION = ${version};`);
630
+ const indexFileContent = `import { MigrationRunner } from "@forge/sql/out/migration";
631
+ import { MIGRATION_VERSION } from "./migrationCount";
632
+
633
+ export type MigrationType = (
634
+ migrationRunner: MigrationRunner,
635
+ ) => MigrationRunner;
636
+
637
+ export default async (
638
+ migrationRunner: MigrationRunner,
639
+ ): Promise<MigrationRunner> => {
640
+ for (let i = 1; i <= MIGRATION_VERSION; i++) {
641
+ const migrations = (await import(\`./migrationV\${i}\`)) as {
642
+ default: MigrationType;
643
+ };
644
+ migrations.default(migrationRunner);
645
+ }
646
+ return migrationRunner;
647
+ };`;
648
+ fs.writeFileSync(indexFilePath, indexFileContent);
649
+ console.log(`✅ Migration file created: ${migrationFilePath}`);
650
+ console.log(`✅ Migration count file updated: ${migrationCountPath}`);
651
+ console.log(`✅ Migration index file created: ${indexFilePath}`);
652
+ }
653
+ const dropMigration = async (options) => {
654
+ try {
655
+ const version = 1;
656
+ const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
657
+ if (!fs.existsSync(schemaPath)) {
658
+ throw new Error(`Schema file not found at: ${schemaPath}`);
659
+ }
660
+ const schemaModule = await import(schemaPath);
661
+ if (!schemaModule) {
662
+ throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
663
+ }
664
+ const drizzleSchema = {};
665
+ const tables = Object.values(schemaModule);
666
+ tables.forEach((table) => {
667
+ const symbols = Object.getOwnPropertySymbols(table);
668
+ const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
669
+ const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
670
+ const indexesSymbol = symbols.find((s) => s.toString().includes("Indexes"));
671
+ const foreignKeysSymbol = symbols.find((s) => s.toString().includes("ForeignKeys"));
672
+ if (table && nameSymbol && columnsSymbol) {
673
+ drizzleSchema[table[nameSymbol]] = {
674
+ // @ts-ignore
675
+ columns: table[columnsSymbol],
676
+ // @ts-ignore
677
+ indexes: indexesSymbol ? table[indexesSymbol] || {} : {},
678
+ // @ts-ignore
679
+ foreignKeys: foreignKeysSymbol ? table[foreignKeysSymbol] || {} : {}
680
+ };
498
681
  }
499
- if (deleteFile) {
500
- const deleteFilePath = path.resolve(deleteFile);
501
- if (fs.existsSync(deleteFilePath)) {
502
- fs.unlinkSync(deleteFilePath);
503
- console.log(`[DELETED] ${deleteFilePath} ${description}`);
504
- } else {
505
- console.log(`[SKIPPED] ${deleteFilePath} ${description}`);
506
- }
682
+ });
683
+ if (Object.keys(drizzleSchema).length === 0) {
684
+ throw new Error(`No valid tables found in schema at: ${schemaPath}`);
685
+ }
686
+ console.log("Found tables:", Object.keys(drizzleSchema));
687
+ const dropStatements = [];
688
+ for (const [tableName, tableInfo] of Object.entries(drizzleSchema)) {
689
+ for (const fk of Object.values(tableInfo.foreignKeys)) {
690
+ const fkName = fk.getName();
691
+ dropStatements.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
507
692
  }
508
- if (deleteFolder) {
509
- const deleteFolderPath = path.resolve(deleteFolder);
510
- if (fs.existsSync(deleteFolderPath)) {
511
- fs.rmSync(deleteFolderPath, { recursive: true, force: true });
512
- console.log(`[DELETED] ${deleteFolderPath} ${description}`);
513
- } else {
514
- console.log(`[SKIPPED] ${deleteFolderPath} ${description}`);
515
- }
693
+ for (const [indexName, index] of Object.entries(tableInfo.indexes)) {
694
+ if (indexName === "PRIMARY") continue;
695
+ dropStatements.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
516
696
  }
697
+ dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
517
698
  }
518
- );
519
- console.log("🎉 MikroORM & Knex patching completed!");
520
- }
699
+ dropStatements.push(`DELETE FROM __migrations;`);
700
+ const migrationFile = generateMigrationFile(dropStatements, version);
701
+ saveMigrationFiles(migrationFile, version, options.output);
702
+ console.log(`✅ Migration successfully created!`);
703
+ process.exit(0);
704
+ } catch (error) {
705
+ console.error(`❌ Error during migration creation:`, error);
706
+ process.exit(1);
707
+ }
708
+ };
521
709
  const ENV_PATH = path.resolve(process.cwd(), ".env");
522
710
  dotenv.config({ path: ENV_PATH });
523
711
  const saveEnvFile = (config) => {
@@ -631,12 +819,13 @@ program.command("generate:model").description("Generate MikroORM models from the
631
819
  );
632
820
  await generateModels(config);
633
821
  });
634
- program.command("migrations:create").description("Generate an initial migration for the entire database.").option("--host <string>", "Database host").option("--port <number>", "Database port").option("--user <string>", "Database user").option("--password <string>", "Database password").option("--dbName <string>", "Database name").option("--output <string>", "Output path for migrations").option("--entitiesPath <string>", "Path to the folder containing entities").option("--saveEnv", "Save configuration to .env file").action(async (cmd) => {
822
+ program.command("migrations:create").description("Generate an initial migration for the entire database.").option("--host <string>", "Database host").option("--port <number>", "Database port").option("--user <string>", "Database user").option("--password <string>", "Database password").option("--dbName <string>", "Database name").option("--output <string>", "Output path for migrations").option("--entitiesPath <string>", "Path to the folder containing entities").option("--force", "Force creation even if migrations exist").option("--saveEnv", "Save configuration to .env file").action(async (cmd) => {
635
823
  const config = await getConfig(
636
824
  cmd,
637
825
  "./database/migration",
638
826
  () => ({
639
- entitiesPath: cmd.entitiesPath || process.env.FORGE_SQL_ORM_ENTITIESPATH
827
+ entitiesPath: cmd.entitiesPath || process.env.FORGE_SQL_ORM_ENTITIESPATH,
828
+ force: cmd.force || false
640
829
  }),
641
830
  (cfg, questions) => {
642
831
  if (!cfg.entitiesPath)
@@ -669,12 +858,24 @@ program.command("migrations:update").description("Generate a migration to update
669
858
  );
670
859
  await updateMigration(config);
671
860
  });
672
- program.command("patch:mikroorm").description("Patch MikroORM and Knex dependencies to work properly with Forge").action(async () => {
673
- console.log("Running MikroORM patch...");
674
- await runPostInstallPatch();
675
- await runPostInstallPatch();
676
- await runPostInstallPatch();
677
- console.log("✅ MikroORM patch applied successfully!");
861
+ program.command("migrations:drop").description("Generate a migration to drop all tables and clear migrations history.").option("--host <string>", "Database host").option("--port <number>", "Database port").option("--user <string>", "Database user").option("--password <string>", "Database password").option("--dbName <string>", "Database name").option("--output <string>", "Output path for migrations").option("--entitiesPath <string>", "Path to the folder containing entities").option("--saveEnv", "Save configuration to .env file").action(async (cmd) => {
862
+ const config = await getConfig(
863
+ cmd,
864
+ "./database/migration",
865
+ () => ({
866
+ entitiesPath: cmd.entitiesPath || process.env.FORGE_SQL_ORM_ENTITIESPATH
867
+ }),
868
+ (cfg, questions) => {
869
+ if (!cfg.entitiesPath)
870
+ questions.push({
871
+ type: "input",
872
+ name: "entitiesPath",
873
+ message: "Enter the path to entities:",
874
+ default: "./database/entities"
875
+ });
876
+ }
877
+ );
878
+ await dropMigration(config);
678
879
  });
679
880
  program.parse(process.argv);
680
881
  //# sourceMappingURL=cli.js.map