dbtasker 2.5.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -1
- package/addcolumn.js +264 -0
- package/altercolumn.js +595 -0
- package/dbop.js +41 -51
- package/dropcolumn.js +174 -0
- package/function.js +567 -104
- package/index.js +98 -32
- package/package.json +1 -1
- package/tableop.js +273 -0
- package/validation.js +224 -159
- package/columnop.js +0 -748
- package/tables.js +0 -0
package/function.js
CHANGED
|
@@ -17,32 +17,32 @@ function isNumber(str) {
|
|
|
17
17
|
}
|
|
18
18
|
return !isNaN(str) && str.trim() !== "";
|
|
19
19
|
}
|
|
20
|
-
function getDateTime(
|
|
20
|
+
function getDateTime(separator = "/") {
|
|
21
21
|
const today = new Date();
|
|
22
22
|
const formattedDateTime =
|
|
23
23
|
today.getFullYear() +
|
|
24
|
-
|
|
24
|
+
separator +
|
|
25
25
|
(today.getMonth() + 1).toString().padStart(2, "0") +
|
|
26
|
-
|
|
26
|
+
separator +
|
|
27
27
|
today.getDate().toString().padStart(2, "0") +
|
|
28
28
|
" " +
|
|
29
29
|
today.getHours().toString().padStart(2, "0") +
|
|
30
|
-
|
|
30
|
+
separator +
|
|
31
31
|
today.getMinutes().toString().padStart(2, "0") +
|
|
32
|
-
|
|
32
|
+
separator +
|
|
33
33
|
today.getSeconds().toString().padStart(2, "0");
|
|
34
34
|
|
|
35
35
|
const formatedDate =
|
|
36
36
|
today.getFullYear() +
|
|
37
|
-
|
|
37
|
+
separator +
|
|
38
38
|
(today.getMonth() + 1).toString().padStart(2, "0") +
|
|
39
|
-
|
|
39
|
+
separator +
|
|
40
40
|
today.getDate().toString().padStart(2, "0");
|
|
41
41
|
const formatedTime =
|
|
42
42
|
today.getHours().toString().padStart(2, "0") +
|
|
43
|
-
|
|
43
|
+
separator +
|
|
44
44
|
today.getMinutes().toString().padStart(2, "0") +
|
|
45
|
-
|
|
45
|
+
separator +
|
|
46
46
|
today.getSeconds().toString().padStart(2, "0");
|
|
47
47
|
return {
|
|
48
48
|
year: today.getFullYear(),
|
|
@@ -98,25 +98,25 @@ async function getCharsetAndCollations(config) {
|
|
|
98
98
|
}
|
|
99
99
|
}
|
|
100
100
|
async function isCharsetCollationValid(config, charset, collation) {
|
|
101
|
-
|
|
101
|
+
let connection;
|
|
102
102
|
|
|
103
|
-
|
|
104
|
-
|
|
103
|
+
try {
|
|
104
|
+
connection = await mysql.createConnection(config);
|
|
105
105
|
|
|
106
|
-
|
|
106
|
+
const [rows] = await connection.execute(`
|
|
107
107
|
SELECT 1
|
|
108
108
|
FROM information_schema.COLLATIONS
|
|
109
109
|
WHERE COLLATION_NAME = ?
|
|
110
110
|
AND CHARACTER_SET_NAME = ?
|
|
111
111
|
`, [collation, charset]);
|
|
112
112
|
|
|
113
|
-
|
|
113
|
+
return rows.length > 0;
|
|
114
114
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
115
|
+
} catch (err) {
|
|
116
|
+
return null;
|
|
117
|
+
} finally {
|
|
118
|
+
if (connection) await connection.end();
|
|
119
|
+
}
|
|
120
120
|
}
|
|
121
121
|
async function getMySQLEngines(config) {
|
|
122
122
|
let connection;
|
|
@@ -209,6 +209,7 @@ async function dropDatabase(config, databaseName) {
|
|
|
209
209
|
try {
|
|
210
210
|
// Connect to server without specifying database
|
|
211
211
|
connection = await mysql.createConnection({
|
|
212
|
+
port: config.port,
|
|
212
213
|
host: config.host,
|
|
213
214
|
user: config.user,
|
|
214
215
|
password: config.password
|
|
@@ -223,13 +224,11 @@ async function dropDatabase(config, databaseName) {
|
|
|
223
224
|
);
|
|
224
225
|
|
|
225
226
|
if (rows.length === 0) {
|
|
226
|
-
console.log(`Database '${databaseName}' does not exist.`);
|
|
227
227
|
return false;
|
|
228
228
|
}
|
|
229
229
|
|
|
230
230
|
// Drop the database
|
|
231
231
|
await connection.query(`DROP DATABASE \`${databaseName}\``);
|
|
232
|
-
console.log(`Database '${databaseName}' dropped successfully.`);
|
|
233
232
|
return true;
|
|
234
233
|
} catch (err) {
|
|
235
234
|
console.error("Error dropping database:", err.message);
|
|
@@ -460,19 +459,19 @@ function isValidColumnName(name) {
|
|
|
460
459
|
|
|
461
460
|
return true;
|
|
462
461
|
}
|
|
463
|
-
function createloopname(text,
|
|
462
|
+
function createloopname(text, separator = "_") {
|
|
464
463
|
if (!isJsonObject(text)) {
|
|
465
464
|
return null;
|
|
466
465
|
}
|
|
467
|
-
|
|
466
|
+
separator = separator.toString();
|
|
468
467
|
if (text.loop === null) {
|
|
469
468
|
return text.name;
|
|
470
469
|
} else if (['year', 'years'].includes(text.loop)) {
|
|
471
|
-
return text.name +
|
|
470
|
+
return text.name + separator + getDateTime().year + separator;
|
|
472
471
|
} else if (['month', 'months'].includes(text.loop)) {
|
|
473
|
-
return text.name +
|
|
472
|
+
return text.name + separator + getDateTime().year + separator + getDateTime().month + separator;
|
|
474
473
|
} else if (['day', 'days'].includes(text.loop)) {
|
|
475
|
-
return text.name +
|
|
474
|
+
return text.name + separator + getDateTime().year + separator + getDateTime().month + separator + getDateTime().day + separator;
|
|
476
475
|
} else {
|
|
477
476
|
return false;
|
|
478
477
|
}
|
|
@@ -497,7 +496,7 @@ function getloop(text) {
|
|
|
497
496
|
return { name: text, loop: null }
|
|
498
497
|
}
|
|
499
498
|
}
|
|
500
|
-
function perseTableNameWithLoop(text,
|
|
499
|
+
function perseTableNameWithLoop(text, separator = "_") {
|
|
501
500
|
if (typeof text !== 'string') return null;
|
|
502
501
|
text = text.trim();
|
|
503
502
|
let gtlp = getloop(text);
|
|
@@ -511,7 +510,7 @@ function perseTableNameWithLoop(text, seperator = "_") {
|
|
|
511
510
|
} else if (gtlp === null) {
|
|
512
511
|
return false;
|
|
513
512
|
} else {
|
|
514
|
-
const loopname = createloopname(gtlp,
|
|
513
|
+
const loopname = createloopname(gtlp, separator);
|
|
515
514
|
if (isValidTableName(loopname)) {
|
|
516
515
|
return { name: gtlp.name, loop: gtlp.loop, loopname: loopname }
|
|
517
516
|
} else {
|
|
@@ -519,7 +518,7 @@ function perseTableNameWithLoop(text, seperator = "_") {
|
|
|
519
518
|
}
|
|
520
519
|
}
|
|
521
520
|
}
|
|
522
|
-
function perseDatabaseNameWithLoop(text,
|
|
521
|
+
function perseDatabaseNameWithLoop(text, separator = "_") {
|
|
523
522
|
if (typeof text !== 'string') return false;
|
|
524
523
|
text = text.trim();
|
|
525
524
|
let gtlp = getloop(text);
|
|
@@ -533,7 +532,7 @@ function perseDatabaseNameWithLoop(text, seperator = "_") {
|
|
|
533
532
|
} else if (gtlp === null) {
|
|
534
533
|
return false;
|
|
535
534
|
} else {
|
|
536
|
-
const loopname = createloopname(gtlp,
|
|
535
|
+
const loopname = createloopname(gtlp, separator);
|
|
537
536
|
if (isValidDatabaseName(loopname)) {
|
|
538
537
|
return { name: gtlp.name, loop: gtlp.loop, loopname: loopname }
|
|
539
538
|
} else {
|
|
@@ -542,54 +541,53 @@ function perseDatabaseNameWithLoop(text, seperator = "_") {
|
|
|
542
541
|
}
|
|
543
542
|
}
|
|
544
543
|
function reverseLoopName(text) {
|
|
545
|
-
if (typeof text !== "string") return text;
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
y += text[i];
|
|
570
|
-
}
|
|
571
|
-
return [y + "(month)", y + "(months)"];
|
|
572
|
-
}
|
|
573
|
-
return text;
|
|
574
|
-
}
|
|
575
|
-
} else if (a[a.length - 1].length === 4) {
|
|
576
|
-
const year = new Date().getFullYear();
|
|
577
|
-
if (Number(a[a.length - 1]) <= year) {
|
|
578
|
-
let y = "";
|
|
579
|
-
for (let i = 0; i < text.length - 6; i++) {
|
|
580
|
-
y += text[i];
|
|
581
|
-
}
|
|
582
|
-
return [y + "(year)", y + "(years)"];
|
|
583
|
-
}
|
|
584
|
-
return text;
|
|
585
|
-
}
|
|
586
|
-
return text;
|
|
544
|
+
if (typeof text !== "string" || text.length === 0) return text;
|
|
545
|
+
|
|
546
|
+
const nowYear = new Date().getFullYear();
|
|
547
|
+
const sep = text[text.length - 1]; // The separator "_"
|
|
548
|
+
const core = text.slice(0, -1); // Remove trailing separator
|
|
549
|
+
|
|
550
|
+
// This Regex looks for the structure:
|
|
551
|
+
// [sep] + YEAR + optional([sep]+MONTH) + optional([sep]+DAY)
|
|
552
|
+
const dateRegex = new RegExp(`[${sep}](\\d{4})(?:[${sep}](\\d{2}))?(?:[${sep}](\\d{2}))?$`);
|
|
553
|
+
const match = core.match(dateRegex);
|
|
554
|
+
|
|
555
|
+
if (!match) return text;
|
|
556
|
+
|
|
557
|
+
const [fullMatch, year, month, day] = match;
|
|
558
|
+
|
|
559
|
+
// Validate Year
|
|
560
|
+
if (parseInt(year) > nowYear) return text;
|
|
561
|
+
|
|
562
|
+
// Determine granularity based on what was provided in the string
|
|
563
|
+
let type = "year";
|
|
564
|
+
if (day) {
|
|
565
|
+
type = "day";
|
|
566
|
+
} else if (month) {
|
|
567
|
+
type = "month";
|
|
587
568
|
}
|
|
588
|
-
|
|
569
|
+
|
|
570
|
+
// To get the "previous form":
|
|
571
|
+
// We take the core and remove the entire date block (fullMatch).
|
|
572
|
+
// Then we add the separator back to match your "base + _" rule.
|
|
573
|
+
const basePart = core.slice(0, core.length - fullMatch.length);
|
|
574
|
+
const baseWithSep = basePart;
|
|
575
|
+
|
|
576
|
+
return [
|
|
577
|
+
`${baseWithSep}(${type})`,
|
|
578
|
+
`${baseWithSep}(${type}s)`,
|
|
579
|
+
`(${type})${baseWithSep}`,
|
|
580
|
+
`(${type}s)${baseWithSep}`
|
|
581
|
+
];
|
|
589
582
|
}
|
|
590
583
|
|
|
591
584
|
|
|
592
585
|
|
|
586
|
+
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
|
|
593
591
|
async function getLastSavedFile(directory) {
|
|
594
592
|
try {
|
|
595
593
|
// Read the directory
|
|
@@ -781,6 +779,13 @@ function removefromarray(arr, text = "") {
|
|
|
781
779
|
}
|
|
782
780
|
return arr
|
|
783
781
|
}
|
|
782
|
+
function isSameArray(arr1, arr2) {
|
|
783
|
+
if (arr1.length !== arr2.length) return false;
|
|
784
|
+
for (let i = 0; i < arr1.length; i++) {
|
|
785
|
+
if (!arr1.includes(arr2[i])) return false;
|
|
786
|
+
}
|
|
787
|
+
return true;
|
|
788
|
+
}
|
|
784
789
|
function isJsonObject(value) {
|
|
785
790
|
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
786
791
|
}
|
|
@@ -946,7 +951,6 @@ async function getColumnDetails(config, dbName, tableName, columnName) {
|
|
|
946
951
|
IS_NULLABLE,
|
|
947
952
|
COLUMN_DEFAULT,
|
|
948
953
|
EXTRA,
|
|
949
|
-
COLUMN_KEY,
|
|
950
954
|
CHARACTER_SET_NAME,
|
|
951
955
|
COLLATION_NAME,
|
|
952
956
|
COLUMN_COMMENT
|
|
@@ -961,33 +965,42 @@ async function getColumnDetails(config, dbName, tableName, columnName) {
|
|
|
961
965
|
if (!cols.length) return false;
|
|
962
966
|
const c = cols[0];
|
|
963
967
|
|
|
964
|
-
// 2. Parse ENUM / SET
|
|
968
|
+
// 2. Parse ENUM / SET / lengths
|
|
965
969
|
let length_value = null;
|
|
966
|
-
// DECIMAL / FLOAT / DOUBLE ONLY
|
|
967
970
|
if (["decimal", "float", "double"].includes(c.DATA_TYPE)) {
|
|
968
971
|
length_value =
|
|
969
972
|
c.NUMERIC_SCALE !== null
|
|
970
973
|
? [c.NUMERIC_PRECISION, c.NUMERIC_SCALE]
|
|
971
974
|
: c.NUMERIC_PRECISION;
|
|
972
|
-
}
|
|
973
|
-
|
|
974
|
-
// INTEGER TYPES → no length_value
|
|
975
|
-
else if (
|
|
976
|
-
["tinyint", "smallint", "mediumint", "int", "bigint"].includes(c.DATA_TYPE)
|
|
977
|
-
) {
|
|
975
|
+
} else if (["tinyint", "smallint", "mediumint", "int", "bigint"].includes(c.DATA_TYPE)) {
|
|
978
976
|
length_value = null;
|
|
979
|
-
}
|
|
980
|
-
else if (c.DATA_TYPE === "enum" || c.DATA_TYPE === "set") {
|
|
977
|
+
} else if (c.DATA_TYPE === "enum" || c.DATA_TYPE === "set") {
|
|
981
978
|
length_value = c.COLUMN_TYPE
|
|
982
979
|
.slice(c.DATA_TYPE.length + 1, -1)
|
|
983
980
|
.split(",")
|
|
984
981
|
.map(v => v.trim().replace(/^'(.*)'$/, "$1"));
|
|
985
|
-
}
|
|
986
|
-
// CHAR / VARCHAR
|
|
987
|
-
else if (c.CHARACTER_MAXIMUM_LENGTH !== null) {
|
|
982
|
+
} else if (c.CHARACTER_MAXIMUM_LENGTH !== null) {
|
|
988
983
|
length_value = c.CHARACTER_MAXIMUM_LENGTH;
|
|
989
984
|
}
|
|
990
985
|
|
|
986
|
+
// 3. Index metadata from STATISTICS
|
|
987
|
+
const [idx] = await connection.execute(
|
|
988
|
+
`
|
|
989
|
+
SELECT INDEX_NAME, NON_UNIQUE, SEQ_IN_INDEX
|
|
990
|
+
FROM INFORMATION_SCHEMA.STATISTICS
|
|
991
|
+
WHERE TABLE_SCHEMA = ?
|
|
992
|
+
AND TABLE_NAME = ?
|
|
993
|
+
AND COLUMN_NAME = ?
|
|
994
|
+
ORDER BY INDEX_NAME, SEQ_IN_INDEX
|
|
995
|
+
`,
|
|
996
|
+
[dbName, tableName, columnName]
|
|
997
|
+
);
|
|
998
|
+
|
|
999
|
+
let index = "";
|
|
1000
|
+
if (idx.some(i => i.INDEX_NAME === "PRIMARY")) index = "PRIMARY KEY";
|
|
1001
|
+
else if (idx.some(i => i.NON_UNIQUE === 0)) index = "UNIQUE";
|
|
1002
|
+
else if (idx.length) index = "KEY";
|
|
1003
|
+
|
|
991
1004
|
return {
|
|
992
1005
|
columntype: c.DATA_TYPE.toUpperCase(),
|
|
993
1006
|
length_value,
|
|
@@ -996,10 +1009,7 @@ async function getColumnDetails(config, dbName, tableName, columnName) {
|
|
|
996
1009
|
nulls: c.IS_NULLABLE === "YES",
|
|
997
1010
|
defaults: c.COLUMN_DEFAULT,
|
|
998
1011
|
autoincrement: c.EXTRA.includes("auto_increment"),
|
|
999
|
-
index
|
|
1000
|
-
c.COLUMN_KEY === "PRI" ? "PRIMARY KEY" :
|
|
1001
|
-
c.COLUMN_KEY === "UNI" ? "UNIQUE" :
|
|
1002
|
-
c.COLUMN_KEY === "MUL" ? "KEY" : "",
|
|
1012
|
+
index,
|
|
1003
1013
|
_charset_: c.CHARACTER_SET_NAME,
|
|
1004
1014
|
_collate_: c.COLLATION_NAME,
|
|
1005
1015
|
comment: c.COLUMN_COMMENT
|
|
@@ -1012,6 +1022,267 @@ async function getColumnDetails(config, dbName, tableName, columnName) {
|
|
|
1012
1022
|
if (connection) await connection.end();
|
|
1013
1023
|
}
|
|
1014
1024
|
}
|
|
1025
|
+
async function inspectColumnConstraint(config, database, table, column, options = {}) {
|
|
1026
|
+
const loose = options.loose !== false; // default true: include composite constraints that contain the column
|
|
1027
|
+
|
|
1028
|
+
if (!database || !table || !column) {
|
|
1029
|
+
throw new Error('database, table and column are required');
|
|
1030
|
+
}
|
|
1031
|
+
|
|
1032
|
+
// simple identifier checks to avoid injection via identifiers
|
|
1033
|
+
const validIdent = s => typeof s === 'string' && /^[A-Za-z0-9$_]+$/.test(s);
|
|
1034
|
+
if (!validIdent(database) || !validIdent(table) || !validIdent(column)) {
|
|
1035
|
+
throw new Error('Invalid database/table/column name');
|
|
1036
|
+
}
|
|
1037
|
+
|
|
1038
|
+
const conn = await mysql.createConnection(config);
|
|
1039
|
+
try {
|
|
1040
|
+
// 1) Find constraints (from KEY_COLUMN_USAGE) that include the specific column in this table
|
|
1041
|
+
const [kcuRows] = await conn.execute(
|
|
1042
|
+
`SELECT CONSTRAINT_NAME, COLUMN_NAME, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME, ORDINAL_POSITION
|
|
1043
|
+
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
|
1044
|
+
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND COLUMN_NAME = ?
|
|
1045
|
+
ORDER BY CONSTRAINT_NAME, ORDINAL_POSITION`,
|
|
1046
|
+
[database, table, column]
|
|
1047
|
+
);
|
|
1048
|
+
|
|
1049
|
+
// If no constraint rows found, constraints list is empty
|
|
1050
|
+
const constraints = [];
|
|
1051
|
+
const constraintNames = Array.from(new Set(kcuRows.map(r => r.CONSTRAINT_NAME)));
|
|
1052
|
+
|
|
1053
|
+
if (constraintNames.length > 0) {
|
|
1054
|
+
// Build constraint -> columns and referenced columns
|
|
1055
|
+
const consMap = new Map();
|
|
1056
|
+
for (const r of kcuRows) {
|
|
1057
|
+
const name = r.CONSTRAINT_NAME;
|
|
1058
|
+
if (!consMap.has(name)) {
|
|
1059
|
+
consMap.set(name, {
|
|
1060
|
+
constraintName: name,
|
|
1061
|
+
columns: [],
|
|
1062
|
+
referencedTableSchema: r.REFERENCED_TABLE_SCHEMA || null,
|
|
1063
|
+
referencedTable: r.REFERENCED_TABLE_NAME || null,
|
|
1064
|
+
referencedColumns: []
|
|
1065
|
+
});
|
|
1066
|
+
}
|
|
1067
|
+
const entry = consMap.get(name);
|
|
1068
|
+
entry.columns.push(r.COLUMN_NAME);
|
|
1069
|
+
if (r.REFERENCED_COLUMN_NAME) entry.referencedColumns.push(r.REFERENCED_COLUMN_NAME);
|
|
1070
|
+
}
|
|
1071
|
+
|
|
1072
|
+
// Get constraint types for these constraint names
|
|
1073
|
+
const placeholders = constraintNames.map(() => '?').join(',');
|
|
1074
|
+
const [tcRows] = await conn.execute(
|
|
1075
|
+
`SELECT CONSTRAINT_NAME, CONSTRAINT_TYPE
|
|
1076
|
+
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS
|
|
1077
|
+
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND CONSTRAINT_NAME IN (${placeholders})`,
|
|
1078
|
+
[database, table, ...constraintNames]
|
|
1079
|
+
);
|
|
1080
|
+
const typeByName = new Map(tcRows.map(r => [r.CONSTRAINT_NAME, r.CONSTRAINT_TYPE]));
|
|
1081
|
+
|
|
1082
|
+
// If there are foreign keys, fetch their ON DELETE/ON UPDATE rules
|
|
1083
|
+
const fkNames = tcRows.filter(r => r.CONSTRAINT_TYPE === 'FOREIGN KEY').map(r => r.CONSTRAINT_NAME);
|
|
1084
|
+
const fkRules = new Map();
|
|
1085
|
+
if (fkNames.length > 0) {
|
|
1086
|
+
const fkPlaceholders = fkNames.map(() => '?').join(',');
|
|
1087
|
+
const [rcRows] = await conn.execute(
|
|
1088
|
+
`SELECT CONSTRAINT_NAME, DELETE_RULE, UPDATE_RULE
|
|
1089
|
+
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS
|
|
1090
|
+
WHERE CONSTRAINT_SCHEMA = ? AND CONSTRAINT_NAME IN (${fkPlaceholders})`,
|
|
1091
|
+
[database, ...fkNames]
|
|
1092
|
+
);
|
|
1093
|
+
for (const r of rcRows) fkRules.set(r.CONSTRAINT_NAME, { deleteRule: r.DELETE_RULE, updateRule: r.UPDATE_RULE });
|
|
1094
|
+
}
|
|
1095
|
+
|
|
1096
|
+
// Compose final constraint objects
|
|
1097
|
+
for (const [name, info] of consMap.entries()) {
|
|
1098
|
+
const ctype = typeByName.get(name) || null; // could be null for some implicitly created indexes
|
|
1099
|
+
const isPrimary = ctype === 'PRIMARY KEY';
|
|
1100
|
+
const isUnique = ctype === 'UNIQUE' || isPrimary;
|
|
1101
|
+
const isForeignKey = ctype === 'FOREIGN KEY';
|
|
1102
|
+
const rule = isForeignKey ? fkRules.get(name) || {} : {};
|
|
1103
|
+
|
|
1104
|
+
// Apply strict/loose filtering:
|
|
1105
|
+
if (loose || (info.columns.length === 1 && info.columns[0] === column)) {
|
|
1106
|
+
constraints.push({
|
|
1107
|
+
constraintName: name,
|
|
1108
|
+
constraintType: ctype,
|
|
1109
|
+
columns: info.columns,
|
|
1110
|
+
isPrimary,
|
|
1111
|
+
isUnique,
|
|
1112
|
+
isForeignKey,
|
|
1113
|
+
referencedTableSchema: info.referencedTableSchema,
|
|
1114
|
+
referencedTable: info.referencedTable,
|
|
1115
|
+
referencedColumns: info.referencedColumns,
|
|
1116
|
+
deleteRule: rule.deleteRule || null,
|
|
1117
|
+
updateRule: rule.updateRule || null
|
|
1118
|
+
});
|
|
1119
|
+
}
|
|
1120
|
+
}
|
|
1121
|
+
}
|
|
1122
|
+
|
|
1123
|
+
// 2) Indexes: find index names that include the column, then assemble full column lists for those indexes
|
|
1124
|
+
const [idxNameRows] = await conn.execute(
|
|
1125
|
+
`SELECT DISTINCT INDEX_NAME
|
|
1126
|
+
FROM INFORMATION_SCHEMA.STATISTICS
|
|
1127
|
+
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND COLUMN_NAME = ?`,
|
|
1128
|
+
[database, table, column]
|
|
1129
|
+
);
|
|
1130
|
+
|
|
1131
|
+
const indexes = [];
|
|
1132
|
+
if (idxNameRows.length > 0) {
|
|
1133
|
+
const idxNames = idxNameRows.map(r => r.INDEX_NAME);
|
|
1134
|
+
const placeholders2 = idxNames.map(() => '?').join(',');
|
|
1135
|
+
// fetch full index definitions for those indexes
|
|
1136
|
+
const [idxRows] = await conn.execute(
|
|
1137
|
+
`SELECT INDEX_NAME, NON_UNIQUE, COLUMN_NAME, SEQ_IN_INDEX
|
|
1138
|
+
FROM INFORMATION_SCHEMA.STATISTICS
|
|
1139
|
+
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND INDEX_NAME IN (${placeholders2})
|
|
1140
|
+
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
|
1141
|
+
[database, table, ...idxNames]
|
|
1142
|
+
);
|
|
1143
|
+
|
|
1144
|
+
const idxMap = new Map();
|
|
1145
|
+
for (const r of idxRows) {
|
|
1146
|
+
const iname = r.INDEX_NAME;
|
|
1147
|
+
if (!idxMap.has(iname)) {
|
|
1148
|
+
idxMap.set(iname, { indexName: iname, nonUnique: Number(r.NON_UNIQUE), indexColumns: [] });
|
|
1149
|
+
}
|
|
1150
|
+
idxMap.get(iname).indexColumns.push(r.COLUMN_NAME);
|
|
1151
|
+
}
|
|
1152
|
+
// filter by loose/strict: if strict, only include indexes where column list is exactly [column]
|
|
1153
|
+
for (const idx of Array.from(idxMap.values())) {
|
|
1154
|
+
if (loose || (idx.indexColumns.length === 1 && idx.indexColumns[0] === column)) {
|
|
1155
|
+
indexes.push(idx);
|
|
1156
|
+
}
|
|
1157
|
+
}
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
// 3) CHECK constraints: search check clauses for the column (only checks defined on this table)
|
|
1161
|
+
const [checkRows] = await conn.execute(
|
|
1162
|
+
`SELECT tc.CONSTRAINT_NAME, cc.CHECK_CLAUSE
|
|
1163
|
+
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc
|
|
1164
|
+
JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS cc
|
|
1165
|
+
ON tc.CONSTRAINT_SCHEMA = cc.CONSTRAINT_SCHEMA
|
|
1166
|
+
AND tc.CONSTRAINT_NAME = cc.CONSTRAINT_NAME
|
|
1167
|
+
WHERE tc.TABLE_SCHEMA = ? AND tc.TABLE_NAME = ? AND tc.CONSTRAINT_TYPE = 'CHECK'`,
|
|
1168
|
+
[database, table]
|
|
1169
|
+
);
|
|
1170
|
+
|
|
1171
|
+
const checks = [];
|
|
1172
|
+
if (checkRows.length > 0) {
|
|
1173
|
+
// simple text search to see if check clause mentions the column (best-effort)
|
|
1174
|
+
const colPattern = new RegExp('(^|[^A-Za-z0-9_`])' + column.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1') + '($|[^A-Za-z0-9_`])', 'i');
|
|
1175
|
+
for (const r of checkRows) {
|
|
1176
|
+
const clause = r.CHECK_CLAUSE || '';
|
|
1177
|
+
if (loose) {
|
|
1178
|
+
if (colPattern.test(clause) || clause.includes('`' + column + '`')) {
|
|
1179
|
+
checks.push({ constraintName: r.CONSTRAINT_NAME, checkClause: clause });
|
|
1180
|
+
}
|
|
1181
|
+
} else {
|
|
1182
|
+
// strict: only include if the clause explicitly mentions the exact column token (best-effort)
|
|
1183
|
+
if (clause.includes('`' + column + '`') || colPattern.test(clause)) {
|
|
1184
|
+
checks.push({ constraintName: r.CONSTRAINT_NAME, checkClause: clause });
|
|
1185
|
+
}
|
|
1186
|
+
}
|
|
1187
|
+
}
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
const found = constraints.length > 0 || indexes.length > 0 || checks.length > 0;
|
|
1191
|
+
return { found, constraints, indexes, checks };
|
|
1192
|
+
} catch (err) {
|
|
1193
|
+
console.error(err.message);
|
|
1194
|
+
return null;
|
|
1195
|
+
} finally {
|
|
1196
|
+
await conn.end();
|
|
1197
|
+
}
|
|
1198
|
+
}
|
|
1199
|
+
async function _fetchIndexes(conn, database, tableName) {
|
|
1200
|
+
const sql = `
|
|
1201
|
+
SELECT INDEX_NAME, SEQ_IN_INDEX, COLUMN_NAME, NON_UNIQUE
|
|
1202
|
+
FROM information_schema.STATISTICS
|
|
1203
|
+
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?
|
|
1204
|
+
ORDER BY INDEX_NAME, SEQ_IN_INDEX
|
|
1205
|
+
`;
|
|
1206
|
+
const [rows] = await conn.execute(sql, [database, tableName]);
|
|
1207
|
+
|
|
1208
|
+
const map = new Map();
|
|
1209
|
+
for (const r of rows) {
|
|
1210
|
+
const name = r.INDEX_NAME;
|
|
1211
|
+
if (!map.has(name)) map.set(name, { indexName: name, columns: [], nonUnique: Boolean(r.NON_UNIQUE) });
|
|
1212
|
+
map.get(name).columns.push(r.COLUMN_NAME);
|
|
1213
|
+
}
|
|
1214
|
+
|
|
1215
|
+
return Array.from(map.values());
|
|
1216
|
+
}
|
|
1217
|
+
async function checkIndexExists(config, database, tableName, indexKey) {
|
|
1218
|
+
if (!config || !database || !tableName || indexKey === undefined || indexKey === null) {
|
|
1219
|
+
throw new Error('config, database, tableName and indexKey are required');
|
|
1220
|
+
}
|
|
1221
|
+
|
|
1222
|
+
const conn = await mysql.createConnection(config);
|
|
1223
|
+
try {
|
|
1224
|
+
const indexes = await _fetchIndexes(conn, database, tableName);
|
|
1225
|
+
if (!indexes.length) {
|
|
1226
|
+
return { found: false, matches: [] };
|
|
1227
|
+
}
|
|
1228
|
+
|
|
1229
|
+
// Normalize input
|
|
1230
|
+
let wantIndexName = null;
|
|
1231
|
+
let wantCols = null;
|
|
1232
|
+
|
|
1233
|
+
if (Array.isArray(indexKey)) {
|
|
1234
|
+
wantCols = indexKey.map(c => String(c).trim()).filter(Boolean);
|
|
1235
|
+
} else if (typeof indexKey === 'string') {
|
|
1236
|
+
const s = indexKey.trim();
|
|
1237
|
+
if (s.indexOf(',') !== -1) {
|
|
1238
|
+
wantCols = s.split(',').map(x => x.replace(/`/g, '').trim()).filter(Boolean);
|
|
1239
|
+
} else {
|
|
1240
|
+
// try index name first, but also allow single-column match
|
|
1241
|
+
wantIndexName = s.replace(/`/g, '');
|
|
1242
|
+
wantCols = [s.replace(/`/g, '')];
|
|
1243
|
+
}
|
|
1244
|
+
} else {
|
|
1245
|
+
wantCols = [String(indexKey)];
|
|
1246
|
+
}
|
|
1247
|
+
|
|
1248
|
+
const lowerWantName = wantIndexName ? wantIndexName.toLowerCase() : null;
|
|
1249
|
+
const lowerWantCols = wantCols ? wantCols.map(c => c.toLowerCase()) : null;
|
|
1250
|
+
|
|
1251
|
+
const matches = [];
|
|
1252
|
+
|
|
1253
|
+
for (const idx of indexes) {
|
|
1254
|
+
const idxNameLower = String(idx.indexName).toLowerCase();
|
|
1255
|
+
const idxColsLower = idx.columns.map(c => String(c).toLowerCase());
|
|
1256
|
+
|
|
1257
|
+
// If user provided an index name and it matches exactly -> match
|
|
1258
|
+
if (lowerWantName && idxNameLower === lowerWantName) {
|
|
1259
|
+
matches.push({ indexName: idx.indexName, columns: idx.columns.slice(), nonUnique: idx.nonUnique });
|
|
1260
|
+
continue;
|
|
1261
|
+
}
|
|
1262
|
+
|
|
1263
|
+
// Otherwise check leftmost-prefix column match (only if wantCols provided)
|
|
1264
|
+
if (lowerWantCols && lowerWantCols.length > 0) {
|
|
1265
|
+
if (idxColsLower.length >= lowerWantCols.length) {
|
|
1266
|
+
let ok = true;
|
|
1267
|
+
for (let i = 0; i < lowerWantCols.length; i++) {
|
|
1268
|
+
if (idxColsLower[i] !== lowerWantCols[i]) { ok = false; break; }
|
|
1269
|
+
}
|
|
1270
|
+
if (ok) {
|
|
1271
|
+
matches.push({ indexName: idx.indexName, columns: idx.columns.slice(), nonUnique: idx.nonUnique });
|
|
1272
|
+
continue;
|
|
1273
|
+
}
|
|
1274
|
+
}
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
|
|
1278
|
+
return { found: matches.length > 0, matches };
|
|
1279
|
+
} catch (err) {
|
|
1280
|
+
console.error("Error in checkIndexExists:", err.message);
|
|
1281
|
+
return null;
|
|
1282
|
+
} finally {
|
|
1283
|
+
await conn.end();
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1015
1286
|
async function columnHasKey(config, databaseName, tableName, columnName) {
|
|
1016
1287
|
let connection;
|
|
1017
1288
|
try {
|
|
@@ -1142,7 +1413,124 @@ async function getAllForeignKeyDetails(config, databaseName, tableName) {
|
|
|
1142
1413
|
if (connection) await connection.end();
|
|
1143
1414
|
}
|
|
1144
1415
|
}
|
|
1145
|
-
async function
|
|
1416
|
+
async function findReferencingFromColumns(config, database, parentTable, parentColumn) {
|
|
1417
|
+
const conn = await mysql.createConnection(config);
|
|
1418
|
+
try {
|
|
1419
|
+
// Build KEY_COLUMN_USAGE query. If parentColumn is provided, include it in filter.
|
|
1420
|
+
let kcuSql = `
|
|
1421
|
+
SELECT
|
|
1422
|
+
kcu.CONSTRAINT_SCHEMA AS constraint_schema,
|
|
1423
|
+
kcu.TABLE_SCHEMA AS child_schema,
|
|
1424
|
+
kcu.TABLE_NAME AS child_table,
|
|
1425
|
+
kcu.CONSTRAINT_NAME AS fk_name,
|
|
1426
|
+
kcu.COLUMN_NAME AS child_column,
|
|
1427
|
+
kcu.ORDINAL_POSITION AS ordinal_position,
|
|
1428
|
+
kcu.REFERENCED_TABLE_SCHEMA AS referenced_schema,
|
|
1429
|
+
kcu.REFERENCED_TABLE_NAME AS referenced_table,
|
|
1430
|
+
kcu.REFERENCED_COLUMN_NAME AS referenced_column
|
|
1431
|
+
FROM information_schema.KEY_COLUMN_USAGE kcu
|
|
1432
|
+
WHERE kcu.REFERENCED_TABLE_SCHEMA = ?
|
|
1433
|
+
AND kcu.REFERENCED_TABLE_NAME = ?
|
|
1434
|
+
`;
|
|
1435
|
+
const params = [database, parentTable];
|
|
1436
|
+
if (parentColumn) {
|
|
1437
|
+
kcuSql += ` AND kcu.REFERENCED_COLUMN_NAME = ?`;
|
|
1438
|
+
params.push(parentColumn);
|
|
1439
|
+
}
|
|
1440
|
+
kcuSql += ` ORDER BY kcu.CONSTRAINT_NAME, kcu.ORDINAL_POSITION;`;
|
|
1441
|
+
|
|
1442
|
+
const [kcuRows] = await conn.execute(kcuSql, params);
|
|
1443
|
+
|
|
1444
|
+
if (!kcuRows.length) return [];
|
|
1445
|
+
|
|
1446
|
+
// Get update/delete rules for the involved constraints from REFERENTIAL_CONSTRAINTS
|
|
1447
|
+
// Build list of unique (constraint_schema, constraint_name) pairs
|
|
1448
|
+
const uniqueKeys = new Set();
|
|
1449
|
+
for (const r of kcuRows) {
|
|
1450
|
+
uniqueKeys.add(`${r.constraint_schema}||${r.fk_name}`);
|
|
1451
|
+
}
|
|
1452
|
+
// Prepare placeholders and params for referential constraints query
|
|
1453
|
+
const rcWhereParts = [];
|
|
1454
|
+
const rcParams = [];
|
|
1455
|
+
for (const key of uniqueKeys) {
|
|
1456
|
+
const [schema, name] = key.split('||');
|
|
1457
|
+
rcWhereParts.push('(CONSTRAINT_SCHEMA = ? AND CONSTRAINT_NAME = ?)');
|
|
1458
|
+
rcParams.push(schema, name);
|
|
1459
|
+
}
|
|
1460
|
+
const rcSql = `
|
|
1461
|
+
SELECT CONSTRAINT_SCHEMA, CONSTRAINT_NAME, UPDATE_RULE, DELETE_RULE
|
|
1462
|
+
FROM information_schema.REFERENTIAL_CONSTRAINTS
|
|
1463
|
+
WHERE ${rcWhereParts.join(' OR ')};
|
|
1464
|
+
`;
|
|
1465
|
+
const [rcRows] = rcParams.length ? await conn.execute(rcSql, rcParams) : [[]];
|
|
1466
|
+
|
|
1467
|
+
// Map rc by schema+name
|
|
1468
|
+
const rcMap = new Map();
|
|
1469
|
+
for (const rc of rcRows) {
|
|
1470
|
+
rcMap.set(`${rc.CONSTRAINT_SCHEMA}||${rc.CONSTRAINT_NAME}`, {
|
|
1471
|
+
update_rule: rc.UPDATE_RULE,
|
|
1472
|
+
delete_rule: rc.DELETE_RULE
|
|
1473
|
+
});
|
|
1474
|
+
}
|
|
1475
|
+
|
|
1476
|
+
// Group kcuRows by constraint (schema + fk_name + child_table) and collect ordered columns
|
|
1477
|
+
const grouped = new Map();
|
|
1478
|
+
for (const row of kcuRows) {
|
|
1479
|
+
const key = `${row.child_schema}||${row.child_table}||${row.fk_name}`;
|
|
1480
|
+
if (!grouped.has(key)) {
|
|
1481
|
+
grouped.set(key, {
|
|
1482
|
+
fk_name: row.fk_name,
|
|
1483
|
+
child_schema: row.child_schema,
|
|
1484
|
+
child_table: row.child_table,
|
|
1485
|
+
child_columns: [],
|
|
1486
|
+
referenced_schema: row.referenced_schema,
|
|
1487
|
+
referenced_table: row.referenced_table,
|
|
1488
|
+
referenced_columns: [],
|
|
1489
|
+
ordinal_positions: []
|
|
1490
|
+
});
|
|
1491
|
+
}
|
|
1492
|
+
const g = grouped.get(key);
|
|
1493
|
+
g.child_columns.push(row.child_column);
|
|
1494
|
+
g.referenced_columns.push(row.referenced_column);
|
|
1495
|
+
g.ordinal_positions.push(row.ordinal_position);
|
|
1496
|
+
}
|
|
1497
|
+
|
|
1498
|
+
// Build final array and attach update/delete rules if available.
|
|
1499
|
+
const result = [];
|
|
1500
|
+
for (const [key, g] of grouped.entries()) {
|
|
1501
|
+
const [schema, , fk_name] = key.split('||');
|
|
1502
|
+
const rcKey = `${schema}||${fk_name}`;
|
|
1503
|
+
const rc = rcMap.get(rcKey) || {};
|
|
1504
|
+
// Ensure columns are ordered by ordinal_position (we already fetched in order,
|
|
1505
|
+
// but let's be defensive and sort if needed)
|
|
1506
|
+
// (we'll build an array of tuples and sort)
|
|
1507
|
+
const tuples = g.ordinal_positions.map((ord, i) => ({ ord: Number(ord), child: g.child_columns[i], ref: g.referenced_columns[i] }));
|
|
1508
|
+
tuples.sort((a, b) => a.ord - b.ord);
|
|
1509
|
+
const child_columns = tuples.map(t => t.child);
|
|
1510
|
+
const referenced_columns = tuples.map(t => t.ref);
|
|
1511
|
+
|
|
1512
|
+
result.push({
|
|
1513
|
+
fk_name: g.fk_name,
|
|
1514
|
+
child_schema: g.child_schema,
|
|
1515
|
+
child_table: g.child_table,
|
|
1516
|
+
child_columns,
|
|
1517
|
+
referenced_schema: g.referenced_schema,
|
|
1518
|
+
referenced_table: g.referenced_table,
|
|
1519
|
+
referenced_columns,
|
|
1520
|
+
update_rule: rc.update_rule || null,
|
|
1521
|
+
delete_rule: rc.delete_rule || null
|
|
1522
|
+
});
|
|
1523
|
+
}
|
|
1524
|
+
|
|
1525
|
+
return result;
|
|
1526
|
+
} catch (err) {
|
|
1527
|
+
console.error("Error in findReferencingColumns:", err.message);
|
|
1528
|
+
return null;
|
|
1529
|
+
} finally {
|
|
1530
|
+
await conn.end();
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
async function addForeignKeyWithIndex(config, dbName, tableName, columnName, refTable, refColumn, options = {}) {
|
|
1146
1534
|
const {
|
|
1147
1535
|
onDelete = "RESTRICT",
|
|
1148
1536
|
onUpdate = "RESTRICT"
|
|
@@ -1152,16 +1540,24 @@ async function addForeignKeyWithIndex(config, dbname, tableName, columnName, ref
|
|
|
1152
1540
|
const fkName = `fk_${tableName}_${refTable}_${columnName}`;
|
|
1153
1541
|
|
|
1154
1542
|
let connection;
|
|
1543
|
+
|
|
1155
1544
|
try {
|
|
1156
|
-
connection = await mysql.createConnection({
|
|
1545
|
+
connection = await mysql.createConnection({
|
|
1546
|
+
...config,
|
|
1547
|
+
database: dbName
|
|
1548
|
+
});
|
|
1157
1549
|
|
|
1158
|
-
//
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1550
|
+
// Add index (ignore if already exists)
|
|
1551
|
+
try {
|
|
1552
|
+
await connection.query(`
|
|
1553
|
+
ALTER TABLE \`${tableName}\`
|
|
1554
|
+
ADD INDEX \`${indexName}\` (\`${columnName}\`)
|
|
1555
|
+
`);
|
|
1556
|
+
} catch (_) {
|
|
1557
|
+
// index probably exists — safe to ignore
|
|
1558
|
+
}
|
|
1163
1559
|
|
|
1164
|
-
//
|
|
1560
|
+
// Add foreign key constraint
|
|
1165
1561
|
await connection.query(`
|
|
1166
1562
|
ALTER TABLE \`${tableName}\`
|
|
1167
1563
|
ADD CONSTRAINT \`${fkName}\`
|
|
@@ -1173,14 +1569,75 @@ async function addForeignKeyWithIndex(config, dbname, tableName, columnName, ref
|
|
|
1173
1569
|
|
|
1174
1570
|
return true;
|
|
1175
1571
|
} catch (err) {
|
|
1176
|
-
const
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1572
|
+
const message = err?.message || "";
|
|
1573
|
+
|
|
1574
|
+
console.error("FK add error:", message);
|
|
1575
|
+
|
|
1576
|
+
if (message.toLowerCase().includes("duplicate")) {
|
|
1577
|
+
return false; // already exists
|
|
1180
1578
|
}
|
|
1579
|
+
|
|
1181
1580
|
return null;
|
|
1182
1581
|
} finally {
|
|
1183
|
-
if (connection)
|
|
1582
|
+
if (connection) {
|
|
1583
|
+
await connection.end();
|
|
1584
|
+
}
|
|
1585
|
+
}
|
|
1586
|
+
}
|
|
1587
|
+
async function addForeignKey(config, data) {
|
|
1588
|
+
const {
|
|
1589
|
+
database,
|
|
1590
|
+
table,
|
|
1591
|
+
column,
|
|
1592
|
+
refTable,
|
|
1593
|
+
refColumn,
|
|
1594
|
+
onDelete = "RESTRICT",
|
|
1595
|
+
onUpdate = "RESTRICT"
|
|
1596
|
+
} = data;
|
|
1597
|
+
|
|
1598
|
+
const connection = await mysql.createConnection({
|
|
1599
|
+
...config,
|
|
1600
|
+
database
|
|
1601
|
+
});
|
|
1602
|
+
|
|
1603
|
+
try {
|
|
1604
|
+
// 1. Ensure column index exists (required for FK)
|
|
1605
|
+
const [indexes] = await connection.execute(
|
|
1606
|
+
`
|
|
1607
|
+
SHOW INDEX
|
|
1608
|
+
FROM \`${table}\`
|
|
1609
|
+
WHERE Column_name = ?
|
|
1610
|
+
`,
|
|
1611
|
+
[column]
|
|
1612
|
+
);
|
|
1613
|
+
|
|
1614
|
+
if (indexes.length === 0) {
|
|
1615
|
+
await connection.execute(
|
|
1616
|
+
`ALTER TABLE \`${table}\` ADD INDEX (\`${column}\`)`
|
|
1617
|
+
);
|
|
1618
|
+
}
|
|
1619
|
+
|
|
1620
|
+
// 2. Generate constraint name
|
|
1621
|
+
const constraintName = `fk_${table}_${column}_${refTable}_${refColumn}`;
|
|
1622
|
+
|
|
1623
|
+
// 3. Add foreign key
|
|
1624
|
+
const sql = `
|
|
1625
|
+
ALTER TABLE \`${table}\`
|
|
1626
|
+
ADD CONSTRAINT \`${constraintName}\`
|
|
1627
|
+
FOREIGN KEY (\`${column}\`)
|
|
1628
|
+
REFERENCES \`${refTable}\` (\`${refColumn}\`)
|
|
1629
|
+
ON DELETE ${onDelete}
|
|
1630
|
+
ON UPDATE ${onUpdate}
|
|
1631
|
+
`;
|
|
1632
|
+
|
|
1633
|
+
await connection.execute(sql);
|
|
1634
|
+
|
|
1635
|
+
return {
|
|
1636
|
+
success: true,
|
|
1637
|
+
constraint: constraintName
|
|
1638
|
+
};
|
|
1639
|
+
} finally {
|
|
1640
|
+
await connection.end();
|
|
1184
1641
|
}
|
|
1185
1642
|
}
|
|
1186
1643
|
async function removeForeignKeyFromColumn(config, databaseName, tableName, columnName) {
|
|
@@ -1377,6 +1834,7 @@ async function runQuery(config, databaseName, queryText) {
|
|
|
1377
1834
|
let connection;
|
|
1378
1835
|
try {
|
|
1379
1836
|
if (!queryText || typeof queryText !== "string") return null;
|
|
1837
|
+
console.log("Database:", cstyler.hex("#00d9ffff")(databaseName), "Running query: ", cstyler.green(queryText));
|
|
1380
1838
|
|
|
1381
1839
|
connection = await mysql.createConnection({
|
|
1382
1840
|
...config,
|
|
@@ -1401,6 +1859,7 @@ module.exports = {
|
|
|
1401
1859
|
isNumber,
|
|
1402
1860
|
getDateTime,
|
|
1403
1861
|
removefromarray,
|
|
1862
|
+
isSameArray,
|
|
1404
1863
|
getMySQLVersion,
|
|
1405
1864
|
isMySQL578OrAbove,
|
|
1406
1865
|
isValidMySQLConfig,
|
|
@@ -1428,10 +1887,14 @@ module.exports = {
|
|
|
1428
1887
|
getColumnNames,
|
|
1429
1888
|
getDatabaseCharsetAndCollation,
|
|
1430
1889
|
getColumnDetails,
|
|
1890
|
+
inspectColumnConstraint,
|
|
1891
|
+
checkIndexExists,
|
|
1431
1892
|
columnHasKey,
|
|
1432
1893
|
getForeignKeyDetails,
|
|
1433
1894
|
getAllForeignKeyDetails,
|
|
1895
|
+
findReferencingFromColumns,
|
|
1434
1896
|
addForeignKeyWithIndex,
|
|
1897
|
+
addForeignKey,
|
|
1435
1898
|
removeForeignKeyFromColumn,
|
|
1436
1899
|
removeForeignKeyConstraintFromColumn,
|
|
1437
1900
|
columnExists,
|