forge-sql-orm 1.0.31 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +216 -695
- package/dist/ForgeSQLORM.js +526 -564
- package/dist/ForgeSQLORM.js.map +1 -1
- package/dist/ForgeSQLORM.mjs +527 -554
- package/dist/ForgeSQLORM.mjs.map +1 -1
- package/dist/core/ForgeSQLCrudOperations.d.ts +101 -130
- package/dist/core/ForgeSQLCrudOperations.d.ts.map +1 -1
- package/dist/core/ForgeSQLORM.d.ts +11 -10
- package/dist/core/ForgeSQLORM.d.ts.map +1 -1
- package/dist/core/ForgeSQLQueryBuilder.d.ts +275 -111
- package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
- package/dist/core/ForgeSQLSelectOperations.d.ts +65 -22
- package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
- package/dist/core/SystemTables.d.ts +59 -0
- package/dist/core/SystemTables.d.ts.map +1 -0
- package/dist/index.d.ts +1 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/utils/sqlUtils.d.ts +53 -6
- package/dist/utils/sqlUtils.d.ts.map +1 -1
- package/dist-cli/cli.js +461 -397
- package/dist-cli/cli.js.map +1 -1
- package/dist-cli/cli.mjs +461 -397
- package/dist-cli/cli.mjs.map +1 -1
- package/package.json +21 -27
- package/src/core/ForgeSQLCrudOperations.ts +360 -473
- package/src/core/ForgeSQLORM.ts +38 -79
- package/src/core/ForgeSQLQueryBuilder.ts +255 -132
- package/src/core/ForgeSQLSelectOperations.ts +185 -72
- package/src/core/SystemTables.ts +7 -0
- package/src/index.ts +1 -2
- package/src/utils/sqlUtils.ts +164 -22
- package/dist/core/ComplexQuerySchemaBuilder.d.ts +0 -38
- package/dist/core/ComplexQuerySchemaBuilder.d.ts.map +0 -1
- package/dist/knex/index.d.ts +0 -4
- package/dist/knex/index.d.ts.map +0 -1
- package/src/core/ComplexQuerySchemaBuilder.ts +0 -63
- package/src/knex/index.ts +0 -4
package/dist-cli/cli.mjs
CHANGED
|
@@ -5,92 +5,134 @@ import inquirer from "inquirer";
|
|
|
5
5
|
import fs from "fs";
|
|
6
6
|
import path from "path";
|
|
7
7
|
import "reflect-metadata";
|
|
8
|
-
import {
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
const
|
|
14
|
-
const imports = entityFiles.map((file) => {
|
|
15
|
-
const entityName = path.basename(file, ".ts");
|
|
16
|
-
return `import { ${entityName} } from "./${entityName}";`;
|
|
17
|
-
});
|
|
18
|
-
const indexContent = `${imports.join("\n")}
|
|
8
|
+
import { execSync } from "child_process";
|
|
9
|
+
import mysql from "mysql2/promise";
|
|
10
|
+
import "moment";
|
|
11
|
+
import { UniqueConstraintBuilder } from "drizzle-orm/mysql-core/unique-constraint";
|
|
12
|
+
function replaceMySQLTypes(schemaContent) {
|
|
13
|
+
const imports = `import { mySqlDateTimeString, mySqlTimeString, mySqlDateString, mySqlTimestampString } from "forge-sql-orm";
|
|
19
14
|
|
|
20
|
-
export default [${entityFiles.map((file) => path.basename(file, ".ts")).join(", ")}];
|
|
21
15
|
`;
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
16
|
+
let modifiedContent = schemaContent.replace(/datetime\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(['"]([^'"]+)['"]\)/g, "mySqlDateTimeString('$1')").replace(/datetime\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlDateTimeString()").replace(/time\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlTimeString('$1')").replace(/time\(['"]([^'"]+)['"]\)/g, "mySqlTimeString('$1')").replace(/time\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlTimeString()").replace(/date\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlDateString('$1')").replace(/date\(['"]([^'"]+)['"]\)/g, "mySqlDateString('$1')").replace(/date\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlDateString()").replace(/timestamp\(['"]([^'"]+)['"],\s*{\s*mode:\s*['"]string['"]\s*}\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(['"]([^'"]+)['"]\)/g, "mySqlTimestampString('$1')").replace(/timestamp\(\s*{\s*mode:\s*['"]string['"]\s*}\s*\)/g, "mySqlTimestampString()");
|
|
17
|
+
if (!modifiedContent.includes("import { mySqlDateTimeString")) {
|
|
18
|
+
modifiedContent = imports + modifiedContent;
|
|
19
|
+
}
|
|
20
|
+
return modifiedContent;
|
|
21
|
+
}
|
|
25
22
|
const generateModels = async (options) => {
|
|
26
23
|
try {
|
|
27
|
-
const
|
|
28
|
-
host
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
if (
|
|
24
|
+
const sql = await execSync(
|
|
25
|
+
`npx drizzle-kit pull --dialect mysql --url mysql://${options.user}:${options.password}@${options.host}:${options.port}/${options.dbName} --out ${options.output}`,
|
|
26
|
+
{ encoding: "utf-8" }
|
|
27
|
+
);
|
|
28
|
+
const metaDir = path.join(options.output, "meta");
|
|
29
|
+
const additionalMetadata = {};
|
|
30
|
+
if (fs.existsSync(metaDir)) {
|
|
31
|
+
const snapshotFile = path.join(metaDir, "0000_snapshot.json");
|
|
32
|
+
if (fs.existsSync(snapshotFile)) {
|
|
33
|
+
const snapshotData = JSON.parse(fs.readFileSync(snapshotFile, "utf-8"));
|
|
34
|
+
for (const [tableName, tableData] of Object.entries(snapshotData.tables)) {
|
|
35
|
+
const table = tableData;
|
|
36
|
+
const versionField = Object.entries(table.columns).find(
|
|
37
|
+
([_, col]) => col.name.toLowerCase() === options.versionField
|
|
38
|
+
);
|
|
39
|
+
if (versionField) {
|
|
40
|
+
const [_, col] = versionField;
|
|
41
|
+
const fieldType = col.type;
|
|
42
|
+
const isSupportedType = fieldType === "datetime" || fieldType === "timestamp" || fieldType === "int" || fieldType === "number" || fieldType === "decimal";
|
|
43
|
+
if (!col.notNull) {
|
|
44
|
+
console.warn(`Version field "${col.name}" in table ${tableName} is nullable. Versioning may not work correctly.`);
|
|
45
|
+
} else if (!isSupportedType) {
|
|
49
46
|
console.warn(
|
|
50
|
-
`Version field "${
|
|
47
|
+
`Version field "${col.name}" in table ${tableName} has unsupported type "${fieldType}". Only datetime, timestamp, int, and decimal types are supported for versioning. Versioning will be skipped.`
|
|
51
48
|
);
|
|
52
|
-
|
|
49
|
+
} else {
|
|
50
|
+
additionalMetadata[tableName] = {
|
|
51
|
+
tableName,
|
|
52
|
+
versionField: {
|
|
53
|
+
fieldName: col.name
|
|
54
|
+
}
|
|
55
|
+
};
|
|
53
56
|
}
|
|
54
|
-
if (property.primary) {
|
|
55
|
-
console.warn(
|
|
56
|
-
`Version field "${property.name}" can not be primary key Table ${m.tableName}`
|
|
57
|
-
);
|
|
58
|
-
return;
|
|
59
|
-
}
|
|
60
|
-
if (property.nullable) {
|
|
61
|
-
console.warn(
|
|
62
|
-
`Version field "${property.name}" should not be nullable Table ${m.tableName}`
|
|
63
|
-
);
|
|
64
|
-
return;
|
|
65
|
-
}
|
|
66
|
-
property.version = true;
|
|
67
57
|
}
|
|
68
58
|
}
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
const versionMetadataContent = `/**
|
|
62
|
+
* This file was auto-generated by forge-sql-orm
|
|
63
|
+
* Generated at: ${(/* @__PURE__ */ new Date()).toISOString()}
|
|
64
|
+
*
|
|
65
|
+
* DO NOT EDIT THIS FILE MANUALLY
|
|
66
|
+
* Any changes will be overwritten on next generation
|
|
67
|
+
*/
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
export * from "./relations";
|
|
71
|
+
export * from "./schema";
|
|
72
|
+
|
|
73
|
+
export interface VersionFieldMetadata {
|
|
74
|
+
fieldName: string;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export interface TableMetadata {
|
|
78
|
+
tableName: string;
|
|
79
|
+
versionField: VersionFieldMetadata;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export type AdditionalMetadata = Record<string, TableMetadata>;
|
|
83
|
+
|
|
84
|
+
export const additionalMetadata: AdditionalMetadata = ${JSON.stringify(additionalMetadata, null, 2)};
|
|
85
|
+
`;
|
|
86
|
+
fs.writeFileSync(path.join(options.output, "index.ts"), versionMetadataContent);
|
|
87
|
+
const schemaPath = path.join(options.output, "schema.ts");
|
|
88
|
+
if (fs.existsSync(schemaPath)) {
|
|
89
|
+
const schemaContent = fs.readFileSync(schemaPath, "utf-8");
|
|
90
|
+
const modifiedContent = replaceMySQLTypes(schemaContent);
|
|
91
|
+
fs.writeFileSync(schemaPath, modifiedContent);
|
|
92
|
+
console.log(`✅ Updated schema types in: ${schemaPath}`);
|
|
93
|
+
}
|
|
94
|
+
const migrationDir = path.join(options.output, "migrations");
|
|
95
|
+
if (fs.existsSync(migrationDir)) {
|
|
96
|
+
fs.rmSync(migrationDir, { recursive: true, force: true });
|
|
97
|
+
console.log(`✅ Removed: ${migrationDir}`);
|
|
98
|
+
}
|
|
99
|
+
if (fs.existsSync(metaDir)) {
|
|
100
|
+
const journalFile = path.join(metaDir, "_journal.json");
|
|
101
|
+
if (fs.existsSync(journalFile)) {
|
|
102
|
+
const journalData = JSON.parse(fs.readFileSync(journalFile, "utf-8"));
|
|
103
|
+
for (const entry of journalData.entries) {
|
|
104
|
+
const sqlFile = path.join(options.output, `${entry.tag}.sql`);
|
|
105
|
+
if (fs.existsSync(sqlFile)) {
|
|
106
|
+
fs.rmSync(sqlFile, { force: true });
|
|
107
|
+
console.log(`✅ Removed SQL file: ${entry.tag}.sql`);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
fs.rmSync(metaDir, { recursive: true, force: true });
|
|
112
|
+
console.log(`✅ Removed: ${metaDir}`);
|
|
113
|
+
}
|
|
114
|
+
console.log(`✅ Successfully generated models and version metadata`);
|
|
87
115
|
process.exit(0);
|
|
88
116
|
} catch (error) {
|
|
89
|
-
console.error(`❌ Error
|
|
117
|
+
console.error(`❌ Error during model generation:`, error);
|
|
118
|
+
process.exit(1);
|
|
119
|
+
}
|
|
120
|
+
};
|
|
121
|
+
const loadMigrationVersion$1 = async (migrationPath) => {
|
|
122
|
+
try {
|
|
123
|
+
const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
|
|
124
|
+
if (!fs.existsSync(migrationCountFilePath)) {
|
|
125
|
+
return 0;
|
|
126
|
+
}
|
|
127
|
+
const { MIGRATION_VERSION } = await import(migrationCountFilePath);
|
|
128
|
+
console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
|
|
129
|
+
return MIGRATION_VERSION;
|
|
130
|
+
} catch (error) {
|
|
131
|
+
console.error(`❌ Error loading migrationCount:`, error);
|
|
90
132
|
process.exit(1);
|
|
91
133
|
}
|
|
92
134
|
};
|
|
93
|
-
function cleanSQLStatement
|
|
135
|
+
function cleanSQLStatement(sql) {
|
|
94
136
|
sql = sql.replace(/create\s+table\s+(\w+)/gi, "create table if not exists $1");
|
|
95
137
|
sql = sql.replace(/create\s+index\s+(\w+)/gi, "create index if not exists $1");
|
|
96
138
|
sql = sql.replace(/alter\s+table\s+(\w+)\s+add\s+index\s+(\w+)/gi, "alter table $1 add index if not exists $2");
|
|
@@ -100,7 +142,7 @@ function cleanSQLStatement$2(sql) {
|
|
|
100
142
|
function generateMigrationFile$2(createStatements, version) {
|
|
101
143
|
const versionPrefix = `v${version}_MIGRATION`;
|
|
102
144
|
const migrationLines = createStatements.map(
|
|
103
|
-
(stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement
|
|
145
|
+
(stmt, index) => ` .enqueue("${versionPrefix}${index}", "${cleanSQLStatement(stmt).replace(/\s+/g, " ")}")`
|
|
104
146
|
// eslint-disable-line no-useless-escape
|
|
105
147
|
).join("\n");
|
|
106
148
|
return `import { MigrationRunner } from "@forge/sql/out/migration";
|
|
@@ -142,41 +184,12 @@ export default async (
|
|
|
142
184
|
console.log(`✅ Migration count file updated: ${migrationCountPath}`);
|
|
143
185
|
console.log(`✅ Migration index file created: ${indexFilePath}`);
|
|
144
186
|
}
|
|
145
|
-
const extractCreateStatements
|
|
146
|
-
const statements = schema.split(
|
|
187
|
+
const extractCreateStatements = (schema) => {
|
|
188
|
+
const statements = schema.split(/--> statement-breakpoint|;/).map((s) => s.trim()).filter((s) => s.length > 0);
|
|
147
189
|
return statements.filter(
|
|
148
|
-
(stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table")
|
|
190
|
+
(stmt) => stmt.toLowerCase().startsWith("create table") || stmt.toLowerCase().startsWith("alter table") || stmt.toLowerCase().includes("add index") || stmt.toLowerCase().includes("add unique index") || stmt.toLowerCase().includes("add constraint")
|
|
149
191
|
);
|
|
150
192
|
};
|
|
151
|
-
const loadEntities$2 = async (entitiesPath) => {
|
|
152
|
-
try {
|
|
153
|
-
const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
|
|
154
|
-
if (!fs.existsSync(indexFilePath)) {
|
|
155
|
-
console.error(`❌ Error: index.ts not found in ${indexFilePath}`);
|
|
156
|
-
process.exit(1);
|
|
157
|
-
}
|
|
158
|
-
const { default: entities } = await import(indexFilePath);
|
|
159
|
-
console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
|
|
160
|
-
return entities;
|
|
161
|
-
} catch (error) {
|
|
162
|
-
console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
|
|
163
|
-
process.exit(1);
|
|
164
|
-
}
|
|
165
|
-
};
|
|
166
|
-
const loadMigrationVersion$1 = async (migrationPath) => {
|
|
167
|
-
try {
|
|
168
|
-
const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
|
|
169
|
-
if (!fs.existsSync(migrationCountFilePath)) {
|
|
170
|
-
return 0;
|
|
171
|
-
}
|
|
172
|
-
const { MIGRATION_VERSION } = await import(migrationCountFilePath);
|
|
173
|
-
console.log(`✅ Current migration version: ${MIGRATION_VERSION}`);
|
|
174
|
-
return MIGRATION_VERSION;
|
|
175
|
-
} catch (error) {
|
|
176
|
-
console.error(`❌ Error loading migrationCount:`, error);
|
|
177
|
-
process.exit(1);
|
|
178
|
-
}
|
|
179
|
-
};
|
|
180
193
|
const createMigration = async (options) => {
|
|
181
194
|
try {
|
|
182
195
|
let version = await loadMigrationVersion$1(options.output);
|
|
@@ -189,19 +202,20 @@ const createMigration = async (options) => {
|
|
|
189
202
|
}
|
|
190
203
|
}
|
|
191
204
|
version = 1;
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
+
await execSync(
|
|
206
|
+
`npx drizzle-kit generate --name=init --dialect mysql --out ${options.output} --schema ${options.entitiesPath}`,
|
|
207
|
+
{ encoding: "utf-8" }
|
|
208
|
+
);
|
|
209
|
+
const initSqlFile = path.join(options.output, "0000_init.sql");
|
|
210
|
+
const sql = fs.readFileSync(initSqlFile, "utf-8");
|
|
211
|
+
const createStatements = extractCreateStatements(sql);
|
|
212
|
+
const migrationFile = generateMigrationFile$2(createStatements, 1);
|
|
213
|
+
saveMigrationFiles$2(migrationFile, 1, options.output);
|
|
214
|
+
fs.rmSync(initSqlFile, { force: true });
|
|
215
|
+
console.log(`✅ Removed SQL file: ${initSqlFile}`);
|
|
216
|
+
let metaDir = path.join(options.output, "meta");
|
|
217
|
+
fs.rmSync(metaDir, { recursive: true, force: true });
|
|
218
|
+
console.log(`✅ Removed: ${metaDir}`);
|
|
205
219
|
console.log(`✅ Migration successfully created!`);
|
|
206
220
|
process.exit(0);
|
|
207
221
|
} catch (error) {
|
|
@@ -209,13 +223,58 @@ const createMigration = async (options) => {
|
|
|
209
223
|
process.exit(1);
|
|
210
224
|
}
|
|
211
225
|
};
|
|
212
|
-
function
|
|
213
|
-
|
|
226
|
+
function getTableMetadata(table) {
|
|
227
|
+
const symbols = Object.getOwnPropertySymbols(table);
|
|
228
|
+
const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
|
|
229
|
+
const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
|
|
230
|
+
const extraSymbol = symbols.find((s) => s.toString().includes("ExtraConfigBuilder"));
|
|
231
|
+
const builders = {
|
|
232
|
+
indexes: [],
|
|
233
|
+
checks: [],
|
|
234
|
+
foreignKeys: [],
|
|
235
|
+
primaryKeys: [],
|
|
236
|
+
uniqueConstraints: [],
|
|
237
|
+
extras: []
|
|
238
|
+
};
|
|
239
|
+
if (extraSymbol) {
|
|
240
|
+
const extraConfigBuilder = table[extraSymbol];
|
|
241
|
+
if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
|
|
242
|
+
const configBuilderData = extraConfigBuilder(table);
|
|
243
|
+
if (configBuilderData) {
|
|
244
|
+
const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
|
|
245
|
+
(item) => item.value || item
|
|
246
|
+
);
|
|
247
|
+
configBuilders.forEach((builder) => {
|
|
248
|
+
if (!builder?.constructor) return;
|
|
249
|
+
const builderName = builder.constructor.name.toLowerCase();
|
|
250
|
+
const builderMap = {
|
|
251
|
+
indexbuilder: builders.indexes,
|
|
252
|
+
checkbuilder: builders.checks,
|
|
253
|
+
foreignkeybuilder: builders.foreignKeys,
|
|
254
|
+
primarykeybuilder: builders.primaryKeys,
|
|
255
|
+
uniqueconstraintbuilder: builders.uniqueConstraints
|
|
256
|
+
};
|
|
257
|
+
for (const [type, array] of Object.entries(builderMap)) {
|
|
258
|
+
if (builderName.includes(type)) {
|
|
259
|
+
array.push(builder);
|
|
260
|
+
break;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
builders.extras.push(builder);
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
return {
|
|
269
|
+
tableName: nameSymbol ? table[nameSymbol] : "",
|
|
270
|
+
columns: columnsSymbol ? table[columnsSymbol] : {},
|
|
271
|
+
...builders
|
|
272
|
+
};
|
|
214
273
|
}
|
|
215
274
|
function generateMigrationFile$1(createStatements, version) {
|
|
216
275
|
const versionPrefix = `v${version}_MIGRATION`;
|
|
217
276
|
const migrationLines = createStatements.map(
|
|
218
|
-
(stmt, index) => ` .enqueue("${versionPrefix}${index}", "${
|
|
277
|
+
(stmt, index) => ` .enqueue("${versionPrefix}${index}", "${stmt}")`
|
|
219
278
|
// eslint-disable-line no-useless-escape
|
|
220
279
|
).join("\n");
|
|
221
280
|
return `import { MigrationRunner } from "@forge/sql/out/migration";
|
|
@@ -225,6 +284,18 @@ export default (migrationRunner: MigrationRunner): MigrationRunner => {
|
|
|
225
284
|
${migrationLines};
|
|
226
285
|
};`;
|
|
227
286
|
}
|
|
287
|
+
function filterWithPreviousMigration(newStatements, prevVersion, outputDir) {
|
|
288
|
+
const prevMigrationPath = path.join(outputDir, `migrationV${prevVersion}.ts`);
|
|
289
|
+
if (!fs.existsSync(prevMigrationPath)) {
|
|
290
|
+
return newStatements.map((s) => s.replace(/\s+/g, " "));
|
|
291
|
+
}
|
|
292
|
+
const prevContent = fs.readFileSync(prevMigrationPath, "utf-8");
|
|
293
|
+
const prevStatements = prevContent.split("\n").filter((line) => line.includes(".enqueue(")).map((line) => {
|
|
294
|
+
const match = line.match(/\.enqueue\([^,]+,\s*"([^"]+)"/);
|
|
295
|
+
return match ? match[1].replace(/\s+/g, " ").trim() : "";
|
|
296
|
+
});
|
|
297
|
+
return newStatements.filter((s) => !prevStatements.includes(s.replace(/\s+/g, " "))).map((s) => s.replace(/\s+/g, " "));
|
|
298
|
+
}
|
|
228
299
|
function saveMigrationFiles$1(migrationCode, version, outputDir) {
|
|
229
300
|
if (!fs.existsSync(outputDir)) {
|
|
230
301
|
fs.mkdirSync(outputDir, { recursive: true });
|
|
@@ -256,28 +327,8 @@ export default async (
|
|
|
256
327
|
console.log(`✅ Migration file created: ${migrationFilePath}`);
|
|
257
328
|
console.log(`✅ Migration count file updated: ${migrationCountPath}`);
|
|
258
329
|
console.log(`✅ Migration index file created: ${indexFilePath}`);
|
|
330
|
+
return true;
|
|
259
331
|
}
|
|
260
|
-
const extractCreateStatements = (schema) => {
|
|
261
|
-
const statements = schema.split(";").map((s) => s.trim());
|
|
262
|
-
return statements.filter(
|
|
263
|
-
(stmt) => stmt.startsWith("create table") || stmt.startsWith("alter table") && stmt.includes("add index") || stmt.startsWith("alter table") && stmt.includes("add") && !stmt.includes("foreign") || stmt.startsWith("alter table") && stmt.includes("modify") && !stmt.includes("foreign")
|
|
264
|
-
);
|
|
265
|
-
};
|
|
266
|
-
const loadEntities$1 = async (entitiesPath) => {
|
|
267
|
-
try {
|
|
268
|
-
const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
|
|
269
|
-
if (!fs.existsSync(indexFilePath)) {
|
|
270
|
-
console.error(`❌ Error: index.ts not found in ${entitiesPath}`);
|
|
271
|
-
process.exit(1);
|
|
272
|
-
}
|
|
273
|
-
const { default: entities } = await import(indexFilePath);
|
|
274
|
-
console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
|
|
275
|
-
return entities;
|
|
276
|
-
} catch (error) {
|
|
277
|
-
console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
|
|
278
|
-
process.exit(1);
|
|
279
|
-
}
|
|
280
|
-
};
|
|
281
332
|
const loadMigrationVersion = async (migrationPath) => {
|
|
282
333
|
try {
|
|
283
334
|
const migrationCountFilePath = path.resolve(path.join(migrationPath, "migrationCount.ts"));
|
|
@@ -295,9 +346,183 @@ const loadMigrationVersion = async (migrationPath) => {
|
|
|
295
346
|
process.exit(1);
|
|
296
347
|
}
|
|
297
348
|
};
|
|
349
|
+
async function getDatabaseSchema(connection, dbName) {
|
|
350
|
+
const [columns] = await connection.execute(`
|
|
351
|
+
SELECT TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_KEY, EXTRA
|
|
352
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
353
|
+
WHERE TABLE_SCHEMA = ?
|
|
354
|
+
`, [dbName]);
|
|
355
|
+
const [indexes] = await connection.execute(`
|
|
356
|
+
SELECT TABLE_NAME, INDEX_NAME, COLUMN_NAME, NON_UNIQUE
|
|
357
|
+
FROM INFORMATION_SCHEMA.STATISTICS
|
|
358
|
+
WHERE TABLE_SCHEMA = ?
|
|
359
|
+
ORDER BY TABLE_NAME, INDEX_NAME, SEQ_IN_INDEX
|
|
360
|
+
`, [dbName]);
|
|
361
|
+
const [foreignKeys] = await connection.execute(`
|
|
362
|
+
SELECT
|
|
363
|
+
TABLE_NAME,
|
|
364
|
+
COLUMN_NAME,
|
|
365
|
+
CONSTRAINT_NAME,
|
|
366
|
+
REFERENCED_TABLE_NAME,
|
|
367
|
+
REFERENCED_COLUMN_NAME
|
|
368
|
+
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
369
|
+
WHERE TABLE_SCHEMA = ?
|
|
370
|
+
AND REFERENCED_TABLE_NAME IS NOT NULL
|
|
371
|
+
`, [dbName]);
|
|
372
|
+
const schema = {};
|
|
373
|
+
columns.forEach((row) => {
|
|
374
|
+
if (!schema[row.TABLE_NAME]) {
|
|
375
|
+
schema[row.TABLE_NAME] = {
|
|
376
|
+
columns: {},
|
|
377
|
+
indexes: {},
|
|
378
|
+
foreignKeys: {}
|
|
379
|
+
};
|
|
380
|
+
}
|
|
381
|
+
schema[row.TABLE_NAME].columns[row.COLUMN_NAME] = row;
|
|
382
|
+
});
|
|
383
|
+
indexes.forEach((row) => {
|
|
384
|
+
if (!schema[row.TABLE_NAME].indexes[row.INDEX_NAME]) {
|
|
385
|
+
schema[row.TABLE_NAME].indexes[row.INDEX_NAME] = {
|
|
386
|
+
columns: [],
|
|
387
|
+
unique: !row.NON_UNIQUE
|
|
388
|
+
};
|
|
389
|
+
}
|
|
390
|
+
schema[row.TABLE_NAME].indexes[row.INDEX_NAME].columns.push(row.COLUMN_NAME);
|
|
391
|
+
});
|
|
392
|
+
foreignKeys.forEach((row) => {
|
|
393
|
+
if (!schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME]) {
|
|
394
|
+
schema[row.TABLE_NAME].foreignKeys[row.CONSTRAINT_NAME] = {
|
|
395
|
+
column: row.COLUMN_NAME,
|
|
396
|
+
referencedTable: row.REFERENCED_TABLE_NAME,
|
|
397
|
+
referencedColumn: row.REFERENCED_COLUMN_NAME
|
|
398
|
+
};
|
|
399
|
+
}
|
|
400
|
+
});
|
|
401
|
+
return schema;
|
|
402
|
+
}
|
|
403
|
+
function normalizeMySQLType(mysqlType) {
|
|
404
|
+
let normalized = mysqlType.replace(/\([^)]*\)/, "").toLowerCase();
|
|
405
|
+
normalized = normalized.replace(/^mysql/, "");
|
|
406
|
+
return normalized;
|
|
407
|
+
}
|
|
408
|
+
function getForeignKeyName(fk) {
|
|
409
|
+
return fk.getName();
|
|
410
|
+
}
|
|
411
|
+
function getIndexName(index) {
|
|
412
|
+
return index.name;
|
|
413
|
+
}
|
|
414
|
+
function getUniqueConstraintName(uc) {
|
|
415
|
+
return uc.name;
|
|
416
|
+
}
|
|
417
|
+
function getIndexColumns(index) {
|
|
418
|
+
return index.columns.map((col) => col.name);
|
|
419
|
+
}
|
|
420
|
+
function generateSchemaChanges(drizzleSchema, dbSchema, schemaModule) {
|
|
421
|
+
const changes = [];
|
|
422
|
+
for (const [tableName, dbTable] of Object.entries(dbSchema)) {
|
|
423
|
+
const drizzleColumns = drizzleSchema[tableName];
|
|
424
|
+
if (!drizzleColumns) {
|
|
425
|
+
const columns = Object.entries(dbTable.columns).map(([colName, col]) => {
|
|
426
|
+
const type = col.COLUMN_TYPE;
|
|
427
|
+
const nullable = col.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
|
|
428
|
+
const autoIncrement = col.EXTRA.includes("auto_increment") ? "AUTO_INCREMENT" : "";
|
|
429
|
+
return `\`${colName}\` ${type} ${nullable} ${autoIncrement}`.trim();
|
|
430
|
+
}).join(",\n ");
|
|
431
|
+
changes.push(`CREATE TABLE if not exists \`${tableName}\` (
|
|
432
|
+
${columns}
|
|
433
|
+
);`);
|
|
434
|
+
for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
|
|
435
|
+
if (indexName === "PRIMARY") {
|
|
436
|
+
continue;
|
|
437
|
+
}
|
|
438
|
+
const isForeignKeyIndex = dbIndex.columns.some((colName) => {
|
|
439
|
+
const column = dbTable.columns[colName];
|
|
440
|
+
return column && column.COLUMN_KEY === "MUL" && column.EXTRA.includes("foreign key");
|
|
441
|
+
});
|
|
442
|
+
if (isForeignKeyIndex) {
|
|
443
|
+
continue;
|
|
444
|
+
}
|
|
445
|
+
const columns2 = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
|
|
446
|
+
const unique = dbIndex.unique ? "UNIQUE " : "";
|
|
447
|
+
changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns2});`);
|
|
448
|
+
}
|
|
449
|
+
for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
|
|
450
|
+
changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
|
|
451
|
+
}
|
|
452
|
+
continue;
|
|
453
|
+
}
|
|
454
|
+
for (const [colName, dbCol] of Object.entries(dbTable.columns)) {
|
|
455
|
+
const drizzleCol = Object.values(drizzleColumns).find((c) => c.name === colName);
|
|
456
|
+
if (!drizzleCol) {
|
|
457
|
+
const type = dbCol.COLUMN_TYPE;
|
|
458
|
+
const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
|
|
459
|
+
changes.push(`ALTER TABLE \`${tableName}\` ADD COLUMN \`${colName}\` ${type} ${nullable};`);
|
|
460
|
+
continue;
|
|
461
|
+
}
|
|
462
|
+
const normalizedDbType = normalizeMySQLType(dbCol.COLUMN_TYPE);
|
|
463
|
+
const normalizedDrizzleType = normalizeMySQLType(drizzleCol.getSQLType());
|
|
464
|
+
if (normalizedDbType !== normalizedDrizzleType) {
|
|
465
|
+
const type = dbCol.COLUMN_TYPE;
|
|
466
|
+
const nullable = dbCol.IS_NULLABLE === "YES" ? "NULL" : "NOT NULL";
|
|
467
|
+
changes.push(`ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${colName}\` ${type} ${nullable};`);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
const table = Object.values(schemaModule).find((t) => {
|
|
471
|
+
const metadata = getTableMetadata(t);
|
|
472
|
+
return metadata.tableName === tableName;
|
|
473
|
+
});
|
|
474
|
+
if (table) {
|
|
475
|
+
const metadata = getTableMetadata(table);
|
|
476
|
+
for (const [indexName, dbIndex] of Object.entries(dbTable.indexes)) {
|
|
477
|
+
if (indexName === "PRIMARY") {
|
|
478
|
+
continue;
|
|
479
|
+
}
|
|
480
|
+
const isForeignKeyIndex = metadata.foreignKeys.some((fk) => getForeignKeyName(fk) === indexName);
|
|
481
|
+
if (isForeignKeyIndex) {
|
|
482
|
+
continue;
|
|
483
|
+
}
|
|
484
|
+
const existsUniqIndex = metadata.uniqueConstraints.find((uc) => getUniqueConstraintName(uc) === indexName);
|
|
485
|
+
let drizzleIndex = metadata.indexes.find((i) => getIndexName(i) === indexName);
|
|
486
|
+
if (!drizzleIndex && existsUniqIndex) {
|
|
487
|
+
drizzleIndex = existsUniqIndex;
|
|
488
|
+
}
|
|
489
|
+
if (!drizzleIndex) {
|
|
490
|
+
const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
|
|
491
|
+
const unique = dbIndex.unique ? "UNIQUE " : "";
|
|
492
|
+
changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
|
|
493
|
+
continue;
|
|
494
|
+
}
|
|
495
|
+
const dbColumns = dbIndex.columns.join(", ");
|
|
496
|
+
const drizzleColumns2 = getIndexColumns(drizzleIndex).join(", ");
|
|
497
|
+
if (dbColumns !== drizzleColumns2 || dbIndex.unique !== drizzleIndex instanceof UniqueConstraintBuilder) {
|
|
498
|
+
changes.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
|
|
499
|
+
const columns = dbIndex.columns.map((col) => `\`${col}\``).join(", ");
|
|
500
|
+
const unique = dbIndex.unique ? "UNIQUE " : "";
|
|
501
|
+
changes.push(`CREATE if not exists ${unique}INDEX \`${indexName}\` ON \`${tableName}\` (${columns});`);
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
for (const [fkName, dbFK] of Object.entries(dbTable.foreignKeys)) {
|
|
505
|
+
const drizzleFK = metadata.foreignKeys.find((fk) => getForeignKeyName(fk) === fkName);
|
|
506
|
+
if (!drizzleFK) {
|
|
507
|
+
changes.push(`ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${fkName}\` FOREIGN KEY (\`${dbFK.column}\`) REFERENCES \`${dbFK.referencedTable}\` (\`${dbFK.referencedColumn}\`);`);
|
|
508
|
+
continue;
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
for (const drizzleForeignKey of metadata.foreignKeys) {
|
|
512
|
+
const isDbFk = Object.keys(dbTable.foreignKeys).find((fk) => fk === getForeignKeyName(drizzleForeignKey));
|
|
513
|
+
if (!isDbFk) {
|
|
514
|
+
const fkName = getForeignKeyName(drizzleForeignKey);
|
|
515
|
+
changes.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
return changes;
|
|
521
|
+
}
|
|
298
522
|
const updateMigration = async (options) => {
|
|
299
523
|
try {
|
|
300
524
|
let version = await loadMigrationVersion(options.output);
|
|
525
|
+
const prevVersion = version;
|
|
301
526
|
if (version < 1) {
|
|
302
527
|
console.log(
|
|
303
528
|
`⚠️ Initial migration not found. Run "npx forge-sql-orm migrations:create" first.`
|
|
@@ -305,247 +530,81 @@ const updateMigration = async (options) => {
|
|
|
305
530
|
process.exit(0);
|
|
306
531
|
}
|
|
307
532
|
version += 1;
|
|
308
|
-
const
|
|
309
|
-
const orm = MikroORM.initSync({
|
|
533
|
+
const connection = await mysql.createConnection({
|
|
310
534
|
host: options.host,
|
|
311
535
|
port: options.port,
|
|
312
536
|
user: options.user,
|
|
313
537
|
password: options.password,
|
|
314
|
-
|
|
315
|
-
entities,
|
|
316
|
-
debug: true
|
|
538
|
+
database: options.dbName
|
|
317
539
|
});
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
process.exit(0);
|
|
328
|
-
}
|
|
329
|
-
} catch (error) {
|
|
330
|
-
console.error(`❌ Error during migration update:`, error);
|
|
331
|
-
process.exit(1);
|
|
332
|
-
}
|
|
333
|
-
};
|
|
334
|
-
const PATCHES = [
|
|
335
|
-
// 🗑️ Remove unused dialects (mssql, postgres, sqlite) in MikroORM
|
|
336
|
-
{
|
|
337
|
-
file: "node_modules/@mikro-orm/knex/MonkeyPatchable.d.ts",
|
|
338
|
-
deleteLines: [
|
|
339
|
-
/^.*mssql.*$/gim,
|
|
340
|
-
/^.*MsSql.*$/gim,
|
|
341
|
-
/^\s*Postgres.*$/gm,
|
|
342
|
-
/^.*Sqlite3.*$/gm,
|
|
343
|
-
/^.*BetterSqlite3.*$/gim
|
|
344
|
-
],
|
|
345
|
-
description: "Removing unused dialects from MonkeyPatchable.d.ts"
|
|
346
|
-
},
|
|
347
|
-
{
|
|
348
|
-
file: "node_modules/@mikro-orm/knex/MonkeyPatchable.js",
|
|
349
|
-
deleteLines: [
|
|
350
|
-
/^.*mssql.*$/gim,
|
|
351
|
-
/^.*MsSql.*$/gim,
|
|
352
|
-
/^.*postgres.*$/gim,
|
|
353
|
-
/^.*sqlite.*$/gim,
|
|
354
|
-
/^.*Sqlite.*$/gim
|
|
355
|
-
],
|
|
356
|
-
description: "Removing unused dialects from MonkeyPatchable.js"
|
|
357
|
-
},
|
|
358
|
-
{
|
|
359
|
-
file: "node_modules/@mikro-orm/knex/dialects/index.js",
|
|
360
|
-
deleteLines: [/^.*mssql.*$/gim, /^.*MsSql.*$/gim, /^.*postgresql.*$/gim, /^.*sqlite.*$/gim],
|
|
361
|
-
description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
|
|
362
|
-
},
|
|
363
|
-
{
|
|
364
|
-
deleteFolder: "node_modules/@mikro-orm/knex/dialects/mssql",
|
|
365
|
-
description: "Removing mssql dialect from MikroORM"
|
|
366
|
-
},
|
|
367
|
-
{
|
|
368
|
-
deleteFolder: "node_modules/@mikro-orm/knex/dialects/postgresql",
|
|
369
|
-
description: "Removing postgresql dialect from MikroORM"
|
|
370
|
-
},
|
|
371
|
-
{
|
|
372
|
-
deleteFolder: "node_modules/@mikro-orm/knex/dialects/sqlite",
|
|
373
|
-
description: "Removing sqlite dialect from MikroORM"
|
|
374
|
-
},
|
|
375
|
-
{
|
|
376
|
-
deleteFolder: "node_modules/@mikro-orm/mysql/node_modules",
|
|
377
|
-
description: "Removing node_modules from @mikro-orm/mysql"
|
|
378
|
-
},
|
|
379
|
-
{
|
|
380
|
-
deleteFolder: "node_modules/@mikro-orm/knex/node_modules",
|
|
381
|
-
description: "Removing node_modules from @mikro-orm/knex"
|
|
382
|
-
},
|
|
383
|
-
{
|
|
384
|
-
deleteFolder: "node_modules/@mikro-orm/core/node_modules",
|
|
385
|
-
description: "Removing sqlite dialect from MikroORM"
|
|
386
|
-
},
|
|
387
|
-
// 🔄 Fix Webpack `Critical dependency: the request of a dependency is an expression`
|
|
388
|
-
{
|
|
389
|
-
file: "node_modules/@mikro-orm/core/utils/Configuration.js",
|
|
390
|
-
search: /dynamicImportProvider:\s*\/\* istanbul ignore next \*\/\s*\(id\) => import\(id\),/g,
|
|
391
|
-
replace: "dynamicImportProvider: /* istanbul ignore next */ () => Promise.resolve({}),",
|
|
392
|
-
description: "Fixing dynamic imports in MikroORM Configuration"
|
|
393
|
-
},
|
|
394
|
-
{
|
|
395
|
-
file: "node_modules/@mikro-orm/core/utils/Utils.js",
|
|
396
|
-
search: /static dynamicImportProvider = \(id\) => import\(id\);/g,
|
|
397
|
-
replace: "static dynamicImportProvider = () => Promise.resolve({});",
|
|
398
|
-
description: "Fixing dynamic imports in MikroORM Utils.js"
|
|
399
|
-
},
|
|
400
|
-
// 🛑 Remove deprecated `require.extensions` usage
|
|
401
|
-
{
|
|
402
|
-
file: "node_modules/@mikro-orm/core/utils/Utils.js",
|
|
403
|
-
search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
|
|
404
|
-
replace: ";",
|
|
405
|
-
description: "Removing deprecated `require.extensions` check in MikroORM"
|
|
406
|
-
},
|
|
407
|
-
// 🛠️ Patch Knex to remove `Migrator` and `Seeder`
|
|
408
|
-
{
|
|
409
|
-
file: "node_modules/knex/lib/knex-builder/make-knex.js",
|
|
410
|
-
deleteLines: [
|
|
411
|
-
/^const \{ Migrator \} = require\('\.\.\/migrations\/migrate\/Migrator'\);$/gm,
|
|
412
|
-
/^const Seeder = require\('\.\.\/migrations\/seed\/Seeder'\);$/gm
|
|
413
|
-
],
|
|
414
|
-
description: "Removing `Migrator` and `Seeder` requires from make-knex.js"
|
|
415
|
-
},
|
|
416
|
-
{
|
|
417
|
-
file: "node_modules/knex/lib/knex-builder/make-knex.js",
|
|
418
|
-
search: /\sreturn new Migrator\(this\);/g,
|
|
419
|
-
replace: "return null;",
|
|
420
|
-
description: "Replacing `return new Migrator(this);` with `return null;`"
|
|
421
|
-
},
|
|
422
|
-
{
|
|
423
|
-
file: "node_modules/knex/lib/knex-builder/make-knex.js",
|
|
424
|
-
search: /\sreturn new Seeder\(this\);/g,
|
|
425
|
-
replace: "return null;",
|
|
426
|
-
description: "Replacing `return new Seeder(this);` with `return null;`"
|
|
427
|
-
},
|
|
428
|
-
{
|
|
429
|
-
file: "node_modules/knex/lib/dialects/index.js",
|
|
430
|
-
deleteLines: [
|
|
431
|
-
/^.*mssql.*$/gim,
|
|
432
|
-
/^.*MsSql.*$/gim,
|
|
433
|
-
/^.*postgresql.*$/gim,
|
|
434
|
-
/^.*sqlite.*$/gim,
|
|
435
|
-
/^.*oracle.*$/gim,
|
|
436
|
-
/^.*oracledb.*$/gim,
|
|
437
|
-
/^.*pgnative.*$/gim,
|
|
438
|
-
/^.*postgres.*$/gim,
|
|
439
|
-
/^.*redshift.*$/gim,
|
|
440
|
-
/^.*sqlite3.*$/gim,
|
|
441
|
-
/^.*cockroachdb.*$/gim
|
|
442
|
-
],
|
|
443
|
-
description: "Removing unused dialects from @mikro-orm/knex/dialects/index.js"
|
|
444
|
-
},
|
|
445
|
-
{
|
|
446
|
-
file: "node_modules/@mikro-orm/core/utils/Utils.js",
|
|
447
|
-
search: /\s\|\|\s*\(require\.extensions\s*&&\s*!!require\.extensions\['\.ts'\]\);\s*/g,
|
|
448
|
-
replace: ";",
|
|
449
|
-
// Replaces with semicolon to keep syntax valid
|
|
450
|
-
description: "Removing deprecated `require.extensions` check from MikroORM"
|
|
451
|
-
},
|
|
452
|
-
{
|
|
453
|
-
file: "node_modules/@mikro-orm/core/utils/Utils.js",
|
|
454
|
-
search: /^.*extensions.*$/gim,
|
|
455
|
-
replace: "{",
|
|
456
|
-
// Replaces with semicolon to keep syntax valid
|
|
457
|
-
description: "Removing deprecated `require.extensions` check from MikroORM"
|
|
458
|
-
},
|
|
459
|
-
{
|
|
460
|
-
file: "node_modules/@mikro-orm/core/utils/Utils.js",
|
|
461
|
-
search: /^.*package.json.*$/gim,
|
|
462
|
-
replace: "return 0;",
|
|
463
|
-
// Replaces with semicolon to keep syntax valid
|
|
464
|
-
description: "Removing deprecated `require.extensions` check from MikroORM"
|
|
465
|
-
},
|
|
466
|
-
{
|
|
467
|
-
file: "node_modules/@mikro-orm/knex/dialects/mysql/index.js",
|
|
468
|
-
deleteLines: [/^.*MariaDbKnexDialect.*$/gim],
|
|
469
|
-
description: "Removing MariaDbKnexDialect"
|
|
470
|
-
}
|
|
471
|
-
];
|
|
472
|
-
function runPostInstallPatch() {
|
|
473
|
-
console.log("🔧 Applying MikroORM & Knex patches...");
|
|
474
|
-
PATCHES.forEach(
|
|
475
|
-
({ file, search, replace, deleteLines, deleteFile, deleteFolder, description }) => {
|
|
476
|
-
if (file) {
|
|
477
|
-
const filePath = path.resolve(file);
|
|
478
|
-
if (fs.existsSync(filePath)) {
|
|
479
|
-
let content = fs.readFileSync(filePath, "utf8");
|
|
480
|
-
let originalContent = content;
|
|
481
|
-
if (search && replace) {
|
|
482
|
-
if (typeof search === "string" ? content.includes(search) : search.test(content)) {
|
|
483
|
-
content = content.replace(search, replace);
|
|
484
|
-
console.log(`[PATCHED] ${description}`);
|
|
485
|
-
}
|
|
486
|
-
}
|
|
487
|
-
if (deleteLines) {
|
|
488
|
-
deleteLines.forEach((pattern) => {
|
|
489
|
-
content = content.split("\n").filter((line) => !pattern.test(line)).join("\n");
|
|
490
|
-
});
|
|
491
|
-
if (content !== originalContent) {
|
|
492
|
-
console.log(`[CLEANED] Removed matching lines in ${file}`);
|
|
493
|
-
}
|
|
494
|
-
}
|
|
495
|
-
if (content !== originalContent) {
|
|
496
|
-
fs.writeFileSync(filePath, content, "utf8");
|
|
497
|
-
}
|
|
498
|
-
if (content.trim() === "") {
|
|
499
|
-
fs.unlinkSync(filePath);
|
|
500
|
-
console.log(`[REMOVED] ${filePath} (file is now empty)`);
|
|
501
|
-
}
|
|
502
|
-
} else {
|
|
503
|
-
console.warn(`[WARNING] File not found: ${file}`);
|
|
504
|
-
}
|
|
540
|
+
try {
|
|
541
|
+
const dbSchema = await getDatabaseSchema(connection, options.dbName);
|
|
542
|
+
const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
|
|
543
|
+
if (!fs.existsSync(schemaPath)) {
|
|
544
|
+
throw new Error(`Schema file not found at: ${schemaPath}`);
|
|
545
|
+
}
|
|
546
|
+
const schemaModule = await import(schemaPath);
|
|
547
|
+
if (!schemaModule) {
|
|
548
|
+
throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
|
|
505
549
|
}
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
550
|
+
const drizzleSchema = {};
|
|
551
|
+
const tables = Object.values(schemaModule);
|
|
552
|
+
tables.forEach((table) => {
|
|
553
|
+
const metadata = getTableMetadata(table);
|
|
554
|
+
if (metadata.tableName) {
|
|
555
|
+
const columns = {};
|
|
556
|
+
Object.entries(metadata.columns).forEach(([name, column]) => {
|
|
557
|
+
columns[name] = {
|
|
558
|
+
type: column.dataType,
|
|
559
|
+
notNull: column.notNull,
|
|
560
|
+
autoincrement: column.autoincrement,
|
|
561
|
+
columnType: column.columnType,
|
|
562
|
+
name: column.name,
|
|
563
|
+
getSQLType: () => column.getSQLType()
|
|
564
|
+
};
|
|
565
|
+
});
|
|
566
|
+
drizzleSchema[metadata.tableName] = columns;
|
|
513
567
|
}
|
|
568
|
+
});
|
|
569
|
+
if (Object.keys(drizzleSchema).length === 0) {
|
|
570
|
+
throw new Error(`No valid tables found in schema at: ${schemaPath}`);
|
|
514
571
|
}
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
console.log(`[SKIPPED] ${deleteFolderPath} ${description}`);
|
|
572
|
+
console.log("Found tables:", Object.keys(drizzleSchema));
|
|
573
|
+
const createStatements = filterWithPreviousMigration(generateSchemaChanges(drizzleSchema, dbSchema, schemaModule), prevVersion, options.output);
|
|
574
|
+
if (createStatements.length) {
|
|
575
|
+
const migrationFile = generateMigrationFile$1(createStatements, version);
|
|
576
|
+
if (saveMigrationFiles$1(migrationFile, version, options.output)) {
|
|
577
|
+
console.log(`✅ Migration successfully updated!`);
|
|
522
578
|
}
|
|
579
|
+
process.exit(0);
|
|
580
|
+
} else {
|
|
581
|
+
console.log(`⚠️ No new migration changes detected.`);
|
|
582
|
+
process.exit(0);
|
|
523
583
|
}
|
|
584
|
+
} finally {
|
|
585
|
+
await connection.end();
|
|
524
586
|
}
|
|
525
|
-
)
|
|
526
|
-
|
|
527
|
-
|
|
587
|
+
} catch (error) {
|
|
588
|
+
console.error(`❌ Error during migration update:`, error);
|
|
589
|
+
process.exit(1);
|
|
590
|
+
}
|
|
591
|
+
};
|
|
528
592
|
function generateMigrationUUID(version) {
|
|
529
593
|
const now = /* @__PURE__ */ new Date();
|
|
530
594
|
const timestamp = now.getTime();
|
|
531
595
|
return `MIGRATION_V${version}_${timestamp}`;
|
|
532
596
|
}
|
|
533
|
-
function cleanSQLStatement(sql) {
|
|
534
|
-
return sql.replace(/\s+default\s+character\s+set\s+utf8mb4\s+engine\s*=\s*InnoDB;?/gi, "").trim();
|
|
535
|
-
}
|
|
536
597
|
function generateMigrationFile(createStatements, version) {
|
|
537
598
|
const uniqId = generateMigrationUUID(version);
|
|
538
599
|
const migrationLines = createStatements.map(
|
|
539
|
-
(stmt, index) => ` .enqueue("${uniqId}_${index}", "${
|
|
600
|
+
(stmt, index) => ` .enqueue("${uniqId}_${index}", "${stmt}")`
|
|
540
601
|
// eslint-disable-line no-useless-escape
|
|
541
602
|
).join("\n");
|
|
542
|
-
const clearMigrationsLine = ` .enqueue("${uniqId}", "DELETE FROM __migrations")`;
|
|
543
603
|
return `import { MigrationRunner } from "@forge/sql/out/migration";
|
|
544
604
|
|
|
545
605
|
export default (migrationRunner: MigrationRunner): MigrationRunner => {
|
|
546
606
|
return migrationRunner
|
|
547
|
-
${migrationLines}
|
|
548
|
-
${clearMigrationsLine};
|
|
607
|
+
${migrationLines};
|
|
549
608
|
};`;
|
|
550
609
|
}
|
|
551
610
|
function saveMigrationFiles(migrationCode, version, outputDir) {
|
|
@@ -580,42 +639,54 @@ export default async (
|
|
|
580
639
|
console.log(`✅ Migration count file updated: ${migrationCountPath}`);
|
|
581
640
|
console.log(`✅ Migration index file created: ${indexFilePath}`);
|
|
582
641
|
}
|
|
583
|
-
const extractDropStatements = (schema) => {
|
|
584
|
-
const statements = schema.split(";").map((s) => s.trim());
|
|
585
|
-
return statements.filter((s) => {
|
|
586
|
-
return s.toLowerCase().startsWith("drop");
|
|
587
|
-
});
|
|
588
|
-
};
|
|
589
|
-
const loadEntities = async (entitiesPath) => {
|
|
590
|
-
try {
|
|
591
|
-
const indexFilePath = path.resolve(path.join(entitiesPath, "index.ts"));
|
|
592
|
-
if (!fs.existsSync(indexFilePath)) {
|
|
593
|
-
console.error(`❌ Error: index.ts not found in ${indexFilePath}`);
|
|
594
|
-
process.exit(1);
|
|
595
|
-
}
|
|
596
|
-
const { default: entities } = await import(indexFilePath);
|
|
597
|
-
console.log(`✅ Loaded ${entities.length} entities from ${entitiesPath}`);
|
|
598
|
-
return entities;
|
|
599
|
-
} catch (error) {
|
|
600
|
-
console.error(`❌ Error loading index.ts from ${entitiesPath}:`, error);
|
|
601
|
-
process.exit(1);
|
|
602
|
-
}
|
|
603
|
-
};
|
|
604
642
|
const dropMigration = async (options) => {
|
|
605
643
|
try {
|
|
606
644
|
const version = 1;
|
|
607
|
-
const
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
645
|
+
const schemaPath = path.resolve(options.entitiesPath, "schema.ts");
|
|
646
|
+
if (!fs.existsSync(schemaPath)) {
|
|
647
|
+
throw new Error(`Schema file not found at: ${schemaPath}`);
|
|
648
|
+
}
|
|
649
|
+
const schemaModule = await import(schemaPath);
|
|
650
|
+
if (!schemaModule) {
|
|
651
|
+
throw new Error(`Invalid schema file at: ${schemaPath}. Schema must export tables.`);
|
|
652
|
+
}
|
|
653
|
+
const drizzleSchema = {};
|
|
654
|
+
const tables = Object.values(schemaModule);
|
|
655
|
+
tables.forEach((table) => {
|
|
656
|
+
const symbols = Object.getOwnPropertySymbols(table);
|
|
657
|
+
const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
|
|
658
|
+
const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
|
|
659
|
+
const indexesSymbol = symbols.find((s) => s.toString().includes("Indexes"));
|
|
660
|
+
const foreignKeysSymbol = symbols.find((s) => s.toString().includes("ForeignKeys"));
|
|
661
|
+
if (table && nameSymbol && columnsSymbol) {
|
|
662
|
+
drizzleSchema[table[nameSymbol]] = {
|
|
663
|
+
// @ts-ignore
|
|
664
|
+
columns: table[columnsSymbol],
|
|
665
|
+
// @ts-ignore
|
|
666
|
+
indexes: indexesSymbol ? table[indexesSymbol] || {} : {},
|
|
667
|
+
// @ts-ignore
|
|
668
|
+
foreignKeys: foreignKeysSymbol ? table[foreignKeysSymbol] || {} : {}
|
|
669
|
+
};
|
|
670
|
+
}
|
|
615
671
|
});
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
672
|
+
if (Object.keys(drizzleSchema).length === 0) {
|
|
673
|
+
throw new Error(`No valid tables found in schema at: ${schemaPath}`);
|
|
674
|
+
}
|
|
675
|
+
console.log("Found tables:", Object.keys(drizzleSchema));
|
|
676
|
+
const dropStatements = [];
|
|
677
|
+
for (const [tableName, tableInfo] of Object.entries(drizzleSchema)) {
|
|
678
|
+
for (const fk of Object.values(tableInfo.foreignKeys)) {
|
|
679
|
+
const fkName = fk.getName();
|
|
680
|
+
dropStatements.push(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${fkName}\`;`);
|
|
681
|
+
}
|
|
682
|
+
for (const [indexName, index] of Object.entries(tableInfo.indexes)) {
|
|
683
|
+
if (indexName === "PRIMARY") continue;
|
|
684
|
+
dropStatements.push(`DROP INDEX \`${indexName}\` ON \`${tableName}\`;`);
|
|
685
|
+
}
|
|
686
|
+
dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
|
|
687
|
+
}
|
|
688
|
+
dropStatements.push(`DELETE FROM __migrations;`);
|
|
689
|
+
const migrationFile = generateMigrationFile(dropStatements, version);
|
|
619
690
|
saveMigrationFiles(migrationFile, version, options.output);
|
|
620
691
|
console.log(`✅ Migration successfully created!`);
|
|
621
692
|
process.exit(0);
|
|
@@ -795,12 +866,5 @@ program.command("migrations:drop").description("Generate a migration to drop all
|
|
|
795
866
|
);
|
|
796
867
|
await dropMigration(config);
|
|
797
868
|
});
|
|
798
|
-
program.command("patch:mikroorm").description("Patch MikroORM and Knex dependencies to work properly with Forge").action(async () => {
|
|
799
|
-
console.log("Running MikroORM patch...");
|
|
800
|
-
await runPostInstallPatch();
|
|
801
|
-
await runPostInstallPatch();
|
|
802
|
-
await runPostInstallPatch();
|
|
803
|
-
console.log("✅ MikroORM patch applied successfully!");
|
|
804
|
-
});
|
|
805
869
|
program.parse(process.argv);
|
|
806
870
|
//# sourceMappingURL=cli.mjs.map
|