forge-sql-orm 2.0.17 → 2.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +95 -4
- package/dist/ForgeSQLORM.js +382 -60
- package/dist/ForgeSQLORM.js.map +1 -1
- package/dist/ForgeSQLORM.mjs +382 -60
- package/dist/ForgeSQLORM.mjs.map +1 -1
- package/dist/core/ForgeSQLAnalyseOperations.d.ts +250 -0
- package/dist/core/ForgeSQLAnalyseOperations.d.ts.map +1 -0
- package/dist/core/ForgeSQLCrudOperations.d.ts +1 -1
- package/dist/core/ForgeSQLCrudOperations.d.ts.map +1 -1
- package/dist/core/ForgeSQLORM.d.ts +12 -2
- package/dist/core/ForgeSQLORM.d.ts.map +1 -1
- package/dist/core/ForgeSQLQueryBuilder.d.ts +112 -21
- package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
- package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
- package/dist/core/SystemTables.d.ts +167 -0
- package/dist/core/SystemTables.d.ts.map +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/utils/forgeDriverProxy.d.ts +11 -0
- package/dist/utils/forgeDriverProxy.d.ts.map +1 -0
- package/dist/utils/sqlHints.d.ts +21 -0
- package/dist/utils/sqlHints.d.ts.map +1 -0
- package/dist/utils/sqlUtils.d.ts +2 -8
- package/dist/utils/sqlUtils.d.ts.map +1 -1
- package/dist/webtriggers/applyMigrationsWebTrigger.d.ts.map +1 -1
- package/dist/webtriggers/dropMigrationWebTrigger.d.ts +2 -4
- package/dist/webtriggers/dropMigrationWebTrigger.d.ts.map +1 -1
- package/package.json +4 -12
- package/src/core/ForgeSQLAnalyseOperations.ts +461 -0
- package/src/core/ForgeSQLCrudOperations.ts +15 -8
- package/src/core/ForgeSQLORM.ts +46 -9
- package/src/core/ForgeSQLQueryBuilder.ts +129 -32
- package/src/core/ForgeSQLSelectOperations.ts +4 -6
- package/src/core/SystemTables.ts +175 -0
- package/src/index.ts +1 -0
- package/src/utils/forgeDriverProxy.ts +27 -0
- package/src/utils/sqlHints.ts +63 -0
- package/src/utils/sqlUtils.ts +36 -32
- package/src/webtriggers/applyMigrationsWebTrigger.ts +32 -16
- package/src/webtriggers/dropMigrationWebTrigger.ts +5 -6
- package/src/webtriggers/fetchSchemaWebTrigger.ts +2 -10
package/dist/ForgeSQLORM.js
CHANGED
|
@@ -9,16 +9,23 @@ const mysqlCore = require("drizzle-orm/mysql-core");
|
|
|
9
9
|
const moment$1 = require("moment/moment.js");
|
|
10
10
|
const table = require("drizzle-orm/table");
|
|
11
11
|
const parseDateTime = (value, format) => {
|
|
12
|
+
let result;
|
|
12
13
|
const m = moment(value, format, true);
|
|
13
14
|
if (!m.isValid()) {
|
|
14
|
-
|
|
15
|
+
const momentDate = moment(value);
|
|
16
|
+
if (momentDate.isValid()) {
|
|
17
|
+
result = momentDate.toDate();
|
|
18
|
+
} else {
|
|
19
|
+
result = new Date(value);
|
|
20
|
+
}
|
|
21
|
+
} else {
|
|
22
|
+
result = m.toDate();
|
|
23
|
+
}
|
|
24
|
+
if (isNaN(result.getTime())) {
|
|
25
|
+
result = new Date(value);
|
|
15
26
|
}
|
|
16
|
-
return
|
|
27
|
+
return result;
|
|
17
28
|
};
|
|
18
|
-
function extractAlias(query) {
|
|
19
|
-
const match = query.match(/\bas\s+(['"`]?)([\w*]+)\1$/i);
|
|
20
|
-
return match ? match[2] : query;
|
|
21
|
-
}
|
|
22
29
|
function getPrimaryKeys(table2) {
|
|
23
30
|
const { columns, primaryKeys } = getTableMetadata(table2);
|
|
24
31
|
const columnPrimaryKeys = Object.entries(columns).filter(([, column]) => column.primary);
|
|
@@ -57,7 +64,7 @@ function processForeignKeys(table2, foreignKeysSymbol, extraSymbol) {
|
|
|
57
64
|
const configBuilderData = extraConfigBuilder(table2);
|
|
58
65
|
if (configBuilderData) {
|
|
59
66
|
const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
|
|
60
|
-
(item) => item.value
|
|
67
|
+
(item) => item.value ?? item
|
|
61
68
|
);
|
|
62
69
|
configBuilders.forEach((builder) => {
|
|
63
70
|
if (!builder?.constructor) return;
|
|
@@ -92,7 +99,7 @@ function getTableMetadata(table2) {
|
|
|
92
99
|
const configBuilderData = extraConfigBuilder(table2);
|
|
93
100
|
if (configBuilderData) {
|
|
94
101
|
const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
|
|
95
|
-
(item) => item.value
|
|
102
|
+
(item) => item.value ?? item
|
|
96
103
|
);
|
|
97
104
|
configBuilders.forEach((builder) => {
|
|
98
105
|
if (!builder?.constructor) return;
|
|
@@ -122,13 +129,9 @@ function getTableMetadata(table2) {
|
|
|
122
129
|
}
|
|
123
130
|
function generateDropTableStatements(tables) {
|
|
124
131
|
const dropStatements = [];
|
|
125
|
-
tables.forEach((
|
|
126
|
-
|
|
127
|
-
if (tableMetadata.tableName) {
|
|
128
|
-
dropStatements.push(`DROP TABLE IF EXISTS \`${tableMetadata.tableName}\`;`);
|
|
129
|
-
}
|
|
132
|
+
tables.forEach((tableName) => {
|
|
133
|
+
dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
|
|
130
134
|
});
|
|
131
|
-
dropStatements.push(`DELETE FROM __migrations;`);
|
|
132
135
|
return dropStatements;
|
|
133
136
|
}
|
|
134
137
|
function mapSelectTableToAlias(table2, uniqPrefix, aliasMap) {
|
|
@@ -186,9 +189,9 @@ function getAliasFromDrizzleAlias(value) {
|
|
|
186
189
|
const aliasNameChunk = queryChunks[queryChunks.length - 2];
|
|
187
190
|
if (sql.isSQLWrapper(aliasNameChunk) && "queryChunks" in aliasNameChunk) {
|
|
188
191
|
const aliasNameChunkSql = aliasNameChunk;
|
|
189
|
-
if (aliasNameChunkSql
|
|
192
|
+
if (aliasNameChunkSql.queryChunks?.length === 1 && aliasNameChunkSql.queryChunks[0]) {
|
|
190
193
|
const queryChunksStringChunc = aliasNameChunkSql.queryChunks[0];
|
|
191
|
-
if (
|
|
194
|
+
if ("value" in queryChunksStringChunc) {
|
|
192
195
|
const values = queryChunksStringChunc.value;
|
|
193
196
|
if (values && values.length === 1) {
|
|
194
197
|
return values[0];
|
|
@@ -240,7 +243,10 @@ function applyFromDriverTransform(rows, selections, aliasMap) {
|
|
|
240
243
|
});
|
|
241
244
|
}
|
|
242
245
|
function processNullBranches(obj) {
|
|
243
|
-
if (obj === null || typeof obj !== "object"
|
|
246
|
+
if (obj === null || typeof obj !== "object") {
|
|
247
|
+
return obj;
|
|
248
|
+
}
|
|
249
|
+
if (obj.constructor && obj.constructor.name !== "Object") {
|
|
244
250
|
return obj;
|
|
245
251
|
}
|
|
246
252
|
const result = {};
|
|
@@ -250,7 +256,7 @@ function processNullBranches(obj) {
|
|
|
250
256
|
result[key] = null;
|
|
251
257
|
continue;
|
|
252
258
|
}
|
|
253
|
-
if (typeof value === "object"
|
|
259
|
+
if (typeof value === "object") {
|
|
254
260
|
const processed = processNullBranches(value);
|
|
255
261
|
result[key] = processed;
|
|
256
262
|
if (processed !== null) {
|
|
@@ -438,7 +444,7 @@ class ForgeSQLCrudOperations {
|
|
|
438
444
|
let fieldName = versionMetadata.fieldName;
|
|
439
445
|
let versionField = columns[versionMetadata.fieldName];
|
|
440
446
|
if (!versionField) {
|
|
441
|
-
const find = Object.entries(columns).find(([
|
|
447
|
+
const find = Object.entries(columns).find(([, c]) => c.name === versionMetadata.fieldName);
|
|
442
448
|
if (find) {
|
|
443
449
|
fieldName = find[0];
|
|
444
450
|
versionField = find[1];
|
|
@@ -500,12 +506,20 @@ class ForgeSQLCrudOperations {
|
|
|
500
506
|
*/
|
|
501
507
|
prepareModelWithVersion(model, versionMetadata, columns) {
|
|
502
508
|
if (!versionMetadata || !columns) return model;
|
|
503
|
-
|
|
509
|
+
let fieldName = versionMetadata.fieldName;
|
|
510
|
+
let versionField = columns[versionMetadata.fieldName];
|
|
511
|
+
if (!versionField) {
|
|
512
|
+
const find = Object.entries(columns).find(([, c]) => c.name === versionMetadata.fieldName);
|
|
513
|
+
if (find) {
|
|
514
|
+
fieldName = find[0];
|
|
515
|
+
versionField = find[1];
|
|
516
|
+
}
|
|
517
|
+
}
|
|
504
518
|
if (!versionField) return model;
|
|
505
519
|
const modelWithVersion = { ...model };
|
|
506
520
|
const fieldType = versionField.getSQLType();
|
|
507
521
|
const versionValue = fieldType === "datetime" || fieldType === "timestamp" ? /* @__PURE__ */ new Date() : 1;
|
|
508
|
-
modelWithVersion[
|
|
522
|
+
modelWithVersion[fieldName] = versionValue;
|
|
509
523
|
return modelWithVersion;
|
|
510
524
|
}
|
|
511
525
|
/**
|
|
@@ -592,9 +606,8 @@ class ForgeSQLSelectOperations {
|
|
|
592
606
|
*/
|
|
593
607
|
async executeRawSQL(query, params) {
|
|
594
608
|
if (this.options.logRawSqlQuery) {
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
);
|
|
609
|
+
const paramsStr = params ? `, with params: ${JSON.stringify(params)}` : "";
|
|
610
|
+
console.debug(`Executing with SQL ${query}${paramsStr}`);
|
|
598
611
|
}
|
|
599
612
|
const sqlStatement = sql$1.sql.prepare(query);
|
|
600
613
|
if (params) {
|
|
@@ -616,7 +629,7 @@ class ForgeSQLSelectOperations {
|
|
|
616
629
|
}
|
|
617
630
|
if (this.options.logRawSqlQuery) {
|
|
618
631
|
console.debug(
|
|
619
|
-
`Executing Update with SQL ${query}` + params ? `, with params: ${JSON.stringify(params)}` : ""
|
|
632
|
+
`Executing Update with SQL ${query}` + (params ? `, with params: ${JSON.stringify(params)}` : "")
|
|
620
633
|
);
|
|
621
634
|
}
|
|
622
635
|
const updateQueryResponseResults = await sqlStatement.execute();
|
|
@@ -648,6 +661,45 @@ const forgeDriver = async (query, params, method) => {
|
|
|
648
661
|
throw error;
|
|
649
662
|
}
|
|
650
663
|
};
|
|
664
|
+
function injectSqlHints(query, hints) {
|
|
665
|
+
if (!hints) {
|
|
666
|
+
return query;
|
|
667
|
+
}
|
|
668
|
+
const normalizedQuery = query.trim().toUpperCase();
|
|
669
|
+
let queryHints;
|
|
670
|
+
if (normalizedQuery.startsWith("SELECT")) {
|
|
671
|
+
queryHints = hints.select;
|
|
672
|
+
} else if (normalizedQuery.startsWith("INSERT")) {
|
|
673
|
+
queryHints = hints.insert;
|
|
674
|
+
} else if (normalizedQuery.startsWith("UPDATE")) {
|
|
675
|
+
queryHints = hints.update;
|
|
676
|
+
} else if (normalizedQuery.startsWith("DELETE")) {
|
|
677
|
+
queryHints = hints.delete;
|
|
678
|
+
}
|
|
679
|
+
if (!queryHints || queryHints.length === 0) {
|
|
680
|
+
return query;
|
|
681
|
+
}
|
|
682
|
+
const hintsString = queryHints.join(" ");
|
|
683
|
+
if (normalizedQuery.startsWith("SELECT")) {
|
|
684
|
+
return `SELECT /*+ ${hintsString} */ ${query.substring(6)}`;
|
|
685
|
+
} else if (normalizedQuery.startsWith("INSERT")) {
|
|
686
|
+
return `INSERT /*+ ${hintsString} */ ${query.substring(6)}`;
|
|
687
|
+
} else if (normalizedQuery.startsWith("UPDATE")) {
|
|
688
|
+
return `UPDATE /*+ ${hintsString} */ ${query.substring(6)}`;
|
|
689
|
+
} else if (normalizedQuery.startsWith("DELETE")) {
|
|
690
|
+
return `DELETE /*+ ${hintsString} */ ${query.substring(6)}`;
|
|
691
|
+
}
|
|
692
|
+
return query;
|
|
693
|
+
}
|
|
694
|
+
function createForgeDriverProxy(options, logRawSqlQuery) {
|
|
695
|
+
return async (query, params, method) => {
|
|
696
|
+
const modifiedQuery = injectSqlHints(query, options);
|
|
697
|
+
if (options && logRawSqlQuery && modifiedQuery !== query) {
|
|
698
|
+
console.warn("modified query: " + modifiedQuery);
|
|
699
|
+
}
|
|
700
|
+
return forgeDriver(modifiedQuery, params, method);
|
|
701
|
+
};
|
|
702
|
+
}
|
|
651
703
|
function createAliasedSelectBuilder(db, fields, selectFn) {
|
|
652
704
|
const { selections, aliasMap } = mapSelectFieldsWithAlias(fields);
|
|
653
705
|
const builder = selectFn(selections);
|
|
@@ -691,11 +743,239 @@ function patchDbWithSelectAliased(db) {
|
|
|
691
743
|
};
|
|
692
744
|
return db;
|
|
693
745
|
}
|
|
746
|
+
class ForgeSQLAnalyseOperation {
|
|
747
|
+
forgeOperations;
|
|
748
|
+
/**
|
|
749
|
+
* Creates a new instance of ForgeSQLAnalizeOperation.
|
|
750
|
+
* @param {ForgeSqlOperation} forgeOperations - The ForgeSQL operations instance
|
|
751
|
+
*/
|
|
752
|
+
constructor(forgeOperations) {
|
|
753
|
+
this.forgeOperations = forgeOperations;
|
|
754
|
+
this.mapToCamelCaseClusterStatement = this.mapToCamelCaseClusterStatement.bind(this);
|
|
755
|
+
}
|
|
756
|
+
/**
|
|
757
|
+
* Executes EXPLAIN on a raw SQL query.
|
|
758
|
+
* @param {string} query - The SQL query to analyze
|
|
759
|
+
* @param {unknown[]} bindParams - The query parameters
|
|
760
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
761
|
+
*/
|
|
762
|
+
async explainRaw(query, bindParams) {
|
|
763
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ${query}`, bindParams);
|
|
764
|
+
return results.map((row) => ({
|
|
765
|
+
id: row.id,
|
|
766
|
+
estRows: row.estRows,
|
|
767
|
+
actRows: row.actRows,
|
|
768
|
+
task: row.task,
|
|
769
|
+
accessObject: row["access object"],
|
|
770
|
+
executionInfo: row["execution info"],
|
|
771
|
+
operatorInfo: row["operator info"],
|
|
772
|
+
memory: row.memory,
|
|
773
|
+
disk: row.disk
|
|
774
|
+
}));
|
|
775
|
+
}
|
|
776
|
+
/**
|
|
777
|
+
* Executes EXPLAIN on a Drizzle query.
|
|
778
|
+
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
|
|
779
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
780
|
+
*/
|
|
781
|
+
async explain(query) {
|
|
782
|
+
const { sql: sql2, params } = query.toSQL();
|
|
783
|
+
return this.explainRaw(sql2, params);
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Executes EXPLAIN ANALYZE on a raw SQL query.
|
|
787
|
+
* @param {string} query - The SQL query to analyze
|
|
788
|
+
* @param {unknown[]} bindParams - The query parameters
|
|
789
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
790
|
+
*/
|
|
791
|
+
async explainAnalyzeRaw(query, bindParams) {
|
|
792
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ANALYZE ${query}`, bindParams);
|
|
793
|
+
return results.map((row) => ({
|
|
794
|
+
id: row.id,
|
|
795
|
+
estRows: row.estRows,
|
|
796
|
+
actRows: row.actRows,
|
|
797
|
+
task: row.task,
|
|
798
|
+
accessObject: row["access object"],
|
|
799
|
+
executionInfo: row["execution info"],
|
|
800
|
+
operatorInfo: row["operator info"],
|
|
801
|
+
memory: row.memory,
|
|
802
|
+
disk: row.disk
|
|
803
|
+
}));
|
|
804
|
+
}
|
|
805
|
+
/**
|
|
806
|
+
* Executes EXPLAIN ANALYZE on a Drizzle query.
|
|
807
|
+
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
|
|
808
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
809
|
+
*/
|
|
810
|
+
async explainAnalyze(query) {
|
|
811
|
+
const { sql: sql2, params } = query.toSQL();
|
|
812
|
+
return this.explainAnalyzeRaw(sql2, params);
|
|
813
|
+
}
|
|
814
|
+
/**
|
|
815
|
+
* Decodes a query execution plan from its string representation.
|
|
816
|
+
* @param {string} input - The raw execution plan string
|
|
817
|
+
* @returns {ExplainAnalyzeRow[]} The decoded execution plan rows
|
|
818
|
+
*/
|
|
819
|
+
decodedPlan(input) {
|
|
820
|
+
if (!input) {
|
|
821
|
+
return [];
|
|
822
|
+
}
|
|
823
|
+
const lines = input.trim().split("\n");
|
|
824
|
+
if (lines.length < 2) return [];
|
|
825
|
+
const headersRaw = lines[0].split(" ").map((h) => h.trim()).filter(Boolean);
|
|
826
|
+
const headers = headersRaw.map((h) => {
|
|
827
|
+
return h.replace(/\s+/g, " ").replace(/[-\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "").replace(/^./, (s) => s.toLowerCase());
|
|
828
|
+
});
|
|
829
|
+
return lines.slice(1).map((line) => {
|
|
830
|
+
const values = line.split(" ").map((s) => s.trim()).filter(Boolean);
|
|
831
|
+
const row = {};
|
|
832
|
+
headers.forEach((key, i) => {
|
|
833
|
+
row[key] = values[i] ?? "";
|
|
834
|
+
});
|
|
835
|
+
return row;
|
|
836
|
+
});
|
|
837
|
+
}
|
|
838
|
+
/**
|
|
839
|
+
* Normalizes a raw slow query row into a more structured format.
|
|
840
|
+
* @param {SlowQueryRaw} row - The raw slow query data
|
|
841
|
+
* @returns {SlowQueryNormalized} The normalized slow query data
|
|
842
|
+
*/
|
|
843
|
+
normalizeSlowQuery(row) {
|
|
844
|
+
return {
|
|
845
|
+
time: row.Time,
|
|
846
|
+
txnStartTs: row.Txn_start_ts,
|
|
847
|
+
user: row.User,
|
|
848
|
+
host: row.Host,
|
|
849
|
+
connId: row.Conn_ID,
|
|
850
|
+
db: row.DB,
|
|
851
|
+
query: row.Query,
|
|
852
|
+
digest: row.Digest,
|
|
853
|
+
queryTime: row.Query_time,
|
|
854
|
+
compileTime: row.Compile_time,
|
|
855
|
+
optimizeTime: row.Optimize_time,
|
|
856
|
+
processTime: row.Process_time,
|
|
857
|
+
waitTime: row.Wait_time,
|
|
858
|
+
parseTime: row.Parse_time,
|
|
859
|
+
rewriteTime: row.Rewrite_time,
|
|
860
|
+
copTime: row.Cop_time,
|
|
861
|
+
copProcAvg: row.Cop_proc_avg,
|
|
862
|
+
copProcMax: row.Cop_proc_max,
|
|
863
|
+
copProcP90: row.Cop_proc_p90,
|
|
864
|
+
copProcAddr: row.Cop_proc_addr,
|
|
865
|
+
copWaitAvg: row.Cop_wait_avg,
|
|
866
|
+
copWaitMax: row.Cop_wait_max,
|
|
867
|
+
copWaitP90: row.Cop_wait_p90,
|
|
868
|
+
copWaitAddr: row.Cop_wait_addr,
|
|
869
|
+
memMax: row.Mem_max,
|
|
870
|
+
diskMax: row.Disk_max,
|
|
871
|
+
totalKeys: row.Total_keys,
|
|
872
|
+
processKeys: row.Process_keys,
|
|
873
|
+
requestCount: row.Request_count,
|
|
874
|
+
kvTotal: row.KV_total,
|
|
875
|
+
pdTotal: row.PD_total,
|
|
876
|
+
resultRows: row.Result_rows,
|
|
877
|
+
rocksdbBlockCacheHitCount: row.Rocksdb_block_cache_hit_count,
|
|
878
|
+
rocksdbBlockReadCount: row.Rocksdb_block_read_count,
|
|
879
|
+
rocksdbBlockReadByte: row.Rocksdb_block_read_byte,
|
|
880
|
+
plan: row.Plan,
|
|
881
|
+
binaryPlan: row.Binary_plan,
|
|
882
|
+
planDigest: row.Plan_digest,
|
|
883
|
+
parsedPlan: this.decodedPlan(row.Plan)
|
|
884
|
+
};
|
|
885
|
+
}
|
|
886
|
+
/**
|
|
887
|
+
* Builds a SQL query for retrieving cluster statement history.
|
|
888
|
+
* @param {string[]} tables - The tables to analyze
|
|
889
|
+
* @param {Date} [from] - The start date for the analysis
|
|
890
|
+
* @param {Date} [to] - The end date for the analysis
|
|
891
|
+
* @returns {string} The SQL query for cluster statement history
|
|
892
|
+
*/
|
|
893
|
+
buildClusterStatementQuery(tables, from, to) {
|
|
894
|
+
const formatDateTime = (date) => moment(date).format("YYYY-MM-DDTHH:mm:ss.SSS");
|
|
895
|
+
const tableConditions = tables.map((table2) => `TABLE_NAMES LIKE CONCAT(SCHEMA_NAME, '.', '%', '${table2}', '%')`).join(" OR ");
|
|
896
|
+
const timeConditions = [];
|
|
897
|
+
if (from) {
|
|
898
|
+
timeConditions.push(`SUMMARY_BEGIN_TIME >= '${formatDateTime(from)}'`);
|
|
899
|
+
}
|
|
900
|
+
if (to) {
|
|
901
|
+
timeConditions.push(`SUMMARY_END_TIME <= '${formatDateTime(to)}'`);
|
|
902
|
+
}
|
|
903
|
+
let whereClauses;
|
|
904
|
+
if (tableConditions?.length) {
|
|
905
|
+
whereClauses = [tableConditions ? `(${tableConditions})` : "", ...timeConditions];
|
|
906
|
+
} else {
|
|
907
|
+
whereClauses = timeConditions;
|
|
908
|
+
}
|
|
909
|
+
return `
|
|
910
|
+
SELECT *
|
|
911
|
+
FROM (
|
|
912
|
+
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY
|
|
913
|
+
UNION ALL
|
|
914
|
+
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
|
|
915
|
+
) AS combined
|
|
916
|
+
${whereClauses?.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : ""}
|
|
917
|
+
`;
|
|
918
|
+
}
|
|
919
|
+
/**
|
|
920
|
+
* Retrieves and analyzes slow queries from the database.
|
|
921
|
+
* @returns {Promise<SlowQueryNormalized[]>} The normalized slow query data
|
|
922
|
+
*/
|
|
923
|
+
async analyzeSlowQueries() {
|
|
924
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(`
|
|
925
|
+
SELECT *
|
|
926
|
+
FROM information_schema.slow_query
|
|
927
|
+
ORDER BY time DESC
|
|
928
|
+
`);
|
|
929
|
+
return results.map((row) => this.normalizeSlowQuery(row));
|
|
930
|
+
}
|
|
931
|
+
/**
|
|
932
|
+
* Converts a cluster statement row to camelCase format.
|
|
933
|
+
* @param {Record<string, any>} input - The input row data
|
|
934
|
+
* @returns {ClusterStatementRowCamelCase} The converted row data
|
|
935
|
+
*/
|
|
936
|
+
mapToCamelCaseClusterStatement(input) {
|
|
937
|
+
if (!input) {
|
|
938
|
+
return {};
|
|
939
|
+
}
|
|
940
|
+
const result = {};
|
|
941
|
+
result.parsedPlan = this.decodedPlan(input["PLAN"] ?? "");
|
|
942
|
+
for (const key in input) {
|
|
943
|
+
const camelKey = key.toLowerCase().replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
|
|
944
|
+
result[camelKey] = input[key];
|
|
945
|
+
}
|
|
946
|
+
return result;
|
|
947
|
+
}
|
|
948
|
+
/**
|
|
949
|
+
* Analyzes query history for specific tables using raw table names.
|
|
950
|
+
* @param {string[]} tables - The table names to analyze
|
|
951
|
+
* @param {Date} [fromDate] - The start date for the analysis
|
|
952
|
+
* @param {Date} [toDate] - The end date for the analysis
|
|
953
|
+
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
|
|
954
|
+
*/
|
|
955
|
+
async analyzeQueriesHistoryRaw(tables, fromDate, toDate) {
|
|
956
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(
|
|
957
|
+
this.buildClusterStatementQuery(tables ?? [], fromDate, toDate)
|
|
958
|
+
);
|
|
959
|
+
return results.map((r) => this.mapToCamelCaseClusterStatement(r));
|
|
960
|
+
}
|
|
961
|
+
/**
|
|
962
|
+
* Analyzes query history for specific tables using Drizzle table objects.
|
|
963
|
+
* @param {AnyMySqlTable[]} tables - The Drizzle table objects to analyze
|
|
964
|
+
* @param {Date} [fromDate] - The start date for the analysis
|
|
965
|
+
* @param {Date} [toDate] - The end date for the analysis
|
|
966
|
+
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
|
|
967
|
+
*/
|
|
968
|
+
async analyzeQueriesHistory(tables, fromDate, toDate) {
|
|
969
|
+
const tableNames = tables?.map((table$1) => table.getTableName(table$1)) ?? [];
|
|
970
|
+
return this.analyzeQueriesHistoryRaw(tableNames, fromDate, toDate);
|
|
971
|
+
}
|
|
972
|
+
}
|
|
694
973
|
class ForgeSQLORMImpl {
|
|
695
974
|
static instance = null;
|
|
696
975
|
drizzle;
|
|
697
976
|
crudOperations;
|
|
698
977
|
fetchOperations;
|
|
978
|
+
analyzeOperations;
|
|
699
979
|
/**
|
|
700
980
|
* Private constructor to enforce singleton behavior.
|
|
701
981
|
* @param options - Options for configuring ForgeSQL ORM behavior.
|
|
@@ -709,25 +989,32 @@ class ForgeSQLORMImpl {
|
|
|
709
989
|
if (newOptions.logRawSqlQuery) {
|
|
710
990
|
console.debug("Initializing ForgeSQLORM...");
|
|
711
991
|
}
|
|
992
|
+
const proxiedDriver = createForgeDriverProxy(newOptions.hints, newOptions.logRawSqlQuery);
|
|
712
993
|
this.drizzle = patchDbWithSelectAliased(
|
|
713
|
-
mysqlProxy.drizzle(
|
|
994
|
+
mysqlProxy.drizzle(proxiedDriver, { logger: newOptions.logRawSqlQuery })
|
|
714
995
|
);
|
|
715
996
|
this.crudOperations = new ForgeSQLCrudOperations(this, newOptions);
|
|
716
997
|
this.fetchOperations = new ForgeSQLSelectOperations(newOptions);
|
|
998
|
+
this.analyzeOperations = new ForgeSQLAnalyseOperation(this);
|
|
717
999
|
} catch (error) {
|
|
718
1000
|
console.error("ForgeSQLORM initialization failed:", error);
|
|
719
1001
|
throw error;
|
|
720
1002
|
}
|
|
721
1003
|
}
|
|
1004
|
+
/**
|
|
1005
|
+
* Create the modify operations instance.
|
|
1006
|
+
* @returns modify operations.
|
|
1007
|
+
*/
|
|
1008
|
+
modify() {
|
|
1009
|
+
return this.crudOperations;
|
|
1010
|
+
}
|
|
722
1011
|
/**
|
|
723
1012
|
* Returns the singleton instance of ForgeSQLORMImpl.
|
|
724
1013
|
* @param options - Options for configuring ForgeSQL ORM behavior.
|
|
725
1014
|
* @returns The singleton instance of ForgeSQLORMImpl.
|
|
726
1015
|
*/
|
|
727
1016
|
static getInstance(options) {
|
|
728
|
-
|
|
729
|
-
ForgeSQLORMImpl.instance = new ForgeSQLORMImpl(options);
|
|
730
|
-
}
|
|
1017
|
+
ForgeSQLORMImpl.instance ??= new ForgeSQLORMImpl(options);
|
|
731
1018
|
return ForgeSQLORMImpl.instance;
|
|
732
1019
|
}
|
|
733
1020
|
/**
|
|
@@ -735,7 +1022,7 @@ class ForgeSQLORMImpl {
|
|
|
735
1022
|
* @returns CRUD operations.
|
|
736
1023
|
*/
|
|
737
1024
|
crud() {
|
|
738
|
-
return this.
|
|
1025
|
+
return this.modify();
|
|
739
1026
|
}
|
|
740
1027
|
/**
|
|
741
1028
|
* Retrieves the fetch operations instance.
|
|
@@ -744,6 +1031,9 @@ class ForgeSQLORMImpl {
|
|
|
744
1031
|
fetch() {
|
|
745
1032
|
return this.fetchOperations;
|
|
746
1033
|
}
|
|
1034
|
+
analyze() {
|
|
1035
|
+
return this.analyzeOperations;
|
|
1036
|
+
}
|
|
747
1037
|
/**
|
|
748
1038
|
* Returns a Drizzle query builder instance.
|
|
749
1039
|
*
|
|
@@ -831,7 +1121,7 @@ class ForgeSQLORM {
|
|
|
831
1121
|
*
|
|
832
1122
|
* @template TSelection - The type of the selected fields
|
|
833
1123
|
* @param {TSelection} fields - Object containing the fields to select, with table schemas as values
|
|
834
|
-
* @returns {MySqlSelectBuilder<TSelection,
|
|
1124
|
+
* @returns {MySqlSelectBuilder<TSelection, MySqlRemotePreparedQueryHKT>} A distinct select query builder with unique field aliases
|
|
835
1125
|
* @throws {Error} If fields parameter is empty
|
|
836
1126
|
* @example
|
|
837
1127
|
* ```typescript
|
|
@@ -849,7 +1139,14 @@ class ForgeSQLORM {
|
|
|
849
1139
|
* @returns CRUD operations.
|
|
850
1140
|
*/
|
|
851
1141
|
crud() {
|
|
852
|
-
return this.ormInstance.
|
|
1142
|
+
return this.ormInstance.modify();
|
|
1143
|
+
}
|
|
1144
|
+
/**
|
|
1145
|
+
* Proxies the `modify` method from `ForgeSQLORMImpl`.
|
|
1146
|
+
* @returns Modify operations.
|
|
1147
|
+
*/
|
|
1148
|
+
modify() {
|
|
1149
|
+
return this.ormInstance.modify();
|
|
853
1150
|
}
|
|
854
1151
|
/**
|
|
855
1152
|
* Proxies the `fetch` method from `ForgeSQLORMImpl`.
|
|
@@ -858,6 +1155,13 @@ class ForgeSQLORM {
|
|
|
858
1155
|
fetch() {
|
|
859
1156
|
return this.ormInstance.fetch();
|
|
860
1157
|
}
|
|
1158
|
+
/**
|
|
1159
|
+
* Provides query analysis capabilities including EXPLAIN ANALYZE and slow query analysis.
|
|
1160
|
+
* @returns {SchemaAnalyzeForgeSql} Interface for analyzing query performance
|
|
1161
|
+
*/
|
|
1162
|
+
analyze() {
|
|
1163
|
+
return this.ormInstance.analyze();
|
|
1164
|
+
}
|
|
861
1165
|
/**
|
|
862
1166
|
* Returns a Drizzle query builder instance.
|
|
863
1167
|
*
|
|
@@ -888,7 +1192,7 @@ const forgeTimestampString = mysqlCore.customType({
|
|
|
888
1192
|
return "timestamp";
|
|
889
1193
|
},
|
|
890
1194
|
toDriver(value) {
|
|
891
|
-
return moment$1(value).format("YYYY-MM-DDTHH:mm:ss.SSS");
|
|
1195
|
+
return moment$1(new Date(value)).format("YYYY-MM-DDTHH:mm:ss.SSS");
|
|
892
1196
|
},
|
|
893
1197
|
fromDriver(value) {
|
|
894
1198
|
const format = "YYYY-MM-DDTHH:mm:ss.SSS";
|
|
@@ -918,8 +1222,19 @@ const forgeTimeString = mysqlCore.customType({
|
|
|
918
1222
|
return parseDateTime(value, "HH:mm:ss.SSS");
|
|
919
1223
|
}
|
|
920
1224
|
});
|
|
921
|
-
|
|
1225
|
+
const migrations = mysqlCore.mysqlTable("__migrations", {
|
|
1226
|
+
id: mysqlCore.bigint("id", { mode: "number" }).primaryKey().autoincrement(),
|
|
1227
|
+
name: mysqlCore.varchar("name", { length: 255 }).notNull(),
|
|
1228
|
+
migratedAt: mysqlCore.timestamp("migratedAt").defaultNow().notNull()
|
|
1229
|
+
});
|
|
1230
|
+
async function getTables() {
|
|
1231
|
+
const tables = await sql$1.sql.executeDDL("SHOW TABLES");
|
|
1232
|
+
return tables.rows.flatMap((tableInfo) => Object.values(tableInfo));
|
|
1233
|
+
}
|
|
1234
|
+
const forgeSystemTables = [migrations];
|
|
1235
|
+
async function dropSchemaMigrations() {
|
|
922
1236
|
try {
|
|
1237
|
+
const tables = await getTables();
|
|
923
1238
|
const dropStatements = generateDropTableStatements(tables);
|
|
924
1239
|
for (const statement of dropStatements) {
|
|
925
1240
|
console.warn(statement);
|
|
@@ -930,32 +1245,41 @@ async function dropSchemaMigrations(tables) {
|
|
|
930
1245
|
"⚠️ All data in these tables has been permanently deleted. This operation cannot be undone."
|
|
931
1246
|
);
|
|
932
1247
|
} catch (error) {
|
|
1248
|
+
console.error(error);
|
|
933
1249
|
const errorMessage = error instanceof Error ? error.message : "Unknown error occurred";
|
|
934
1250
|
return getHttpResponse(500, errorMessage);
|
|
935
1251
|
}
|
|
936
1252
|
}
|
|
937
1253
|
const applySchemaMigrations = async (migration) => {
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
1254
|
+
try {
|
|
1255
|
+
if (typeof migration !== "function") {
|
|
1256
|
+
throw new Error("migration is not a function");
|
|
1257
|
+
}
|
|
1258
|
+
console.log("Provisioning the database");
|
|
1259
|
+
await sql$1.sql._provision();
|
|
1260
|
+
console.info("Running schema migrations");
|
|
1261
|
+
const migrations2 = await migration(sql$1.migrationRunner);
|
|
1262
|
+
const successfulMigrations = await migrations2.run();
|
|
1263
|
+
console.info("Migrations applied:", successfulMigrations);
|
|
1264
|
+
const migrationList = await sql$1.migrationRunner.list();
|
|
1265
|
+
const migrationHistory = Array.isArray(migrationList) && migrationList.length > 0 ? migrationList.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toUTCString()}`).join("\n") : "No migrations found";
|
|
1266
|
+
console.info("Migrations history:\nid, name, migrated_at\n", migrationHistory);
|
|
1267
|
+
return {
|
|
1268
|
+
headers: { "Content-Type": ["application/json"] },
|
|
1269
|
+
statusCode: 200,
|
|
1270
|
+
statusText: "OK",
|
|
1271
|
+
body: "Migrations successfully executed"
|
|
1272
|
+
};
|
|
1273
|
+
} catch (error) {
|
|
1274
|
+
console.error("Error during migration:", error);
|
|
1275
|
+
return {
|
|
1276
|
+
headers: { "Content-Type": ["application/json"] },
|
|
1277
|
+
statusCode: 500,
|
|
1278
|
+
statusText: "Internal Server Error",
|
|
1279
|
+
body: error instanceof Error ? error.message : "Unknown error during migration"
|
|
1280
|
+
};
|
|
1281
|
+
}
|
|
952
1282
|
};
|
|
953
|
-
const migrations = mysqlCore.mysqlTable("__migrations", {
|
|
954
|
-
id: mysqlCore.bigint("id", { mode: "number" }).primaryKey().autoincrement(),
|
|
955
|
-
name: mysqlCore.varchar("name", { length: 255 }).notNull(),
|
|
956
|
-
migratedAt: mysqlCore.timestamp("migratedAt").defaultNow().notNull()
|
|
957
|
-
});
|
|
958
|
-
const forgeSystemTables = [migrations];
|
|
959
1283
|
async function fetchSchemaWebTrigger() {
|
|
960
1284
|
try {
|
|
961
1285
|
const tables = await getTables();
|
|
@@ -968,14 +1292,10 @@ async function fetchSchemaWebTrigger() {
|
|
|
968
1292
|
return getHttpResponse(500, errorMessage);
|
|
969
1293
|
}
|
|
970
1294
|
}
|
|
971
|
-
async function getTables() {
|
|
972
|
-
const tables = await sql$1.sql.executeDDL("SHOW TABLES");
|
|
973
|
-
return tables.rows.flatMap((tableInfo) => Object.values(tableInfo));
|
|
974
|
-
}
|
|
975
1295
|
async function generateCreateTableStatements(tables) {
|
|
976
1296
|
const statements = [];
|
|
977
1297
|
for (const table2 of tables) {
|
|
978
|
-
const createTableResult = await sql$1.sql.executeDDL(`SHOW CREATE TABLE ${table2}`);
|
|
1298
|
+
const createTableResult = await sql$1.sql.executeDDL(`SHOW CREATE TABLE "${table2}"`);
|
|
979
1299
|
const createTableStatements = createTableResult.rows.filter((row) => !isSystemTable(row.Table)).map((row) => formatCreateTableStatement(row["Create Table"]));
|
|
980
1300
|
statements.push(...createTableStatements);
|
|
981
1301
|
}
|
|
@@ -1010,19 +1330,21 @@ exports.applyFromDriverTransform = applyFromDriverTransform;
|
|
|
1010
1330
|
exports.applySchemaMigrations = applySchemaMigrations;
|
|
1011
1331
|
exports.default = ForgeSQLORM;
|
|
1012
1332
|
exports.dropSchemaMigrations = dropSchemaMigrations;
|
|
1013
|
-
exports.extractAlias = extractAlias;
|
|
1014
1333
|
exports.fetchSchemaWebTrigger = fetchSchemaWebTrigger;
|
|
1015
1334
|
exports.forgeDateString = forgeDateString;
|
|
1016
1335
|
exports.forgeDateTimeString = forgeDateTimeString;
|
|
1017
1336
|
exports.forgeDriver = forgeDriver;
|
|
1337
|
+
exports.forgeSystemTables = forgeSystemTables;
|
|
1018
1338
|
exports.forgeTimeString = forgeTimeString;
|
|
1019
1339
|
exports.forgeTimestampString = forgeTimestampString;
|
|
1020
1340
|
exports.generateDropTableStatements = generateDropTableStatements;
|
|
1021
1341
|
exports.getHttpResponse = getHttpResponse;
|
|
1022
1342
|
exports.getPrimaryKeys = getPrimaryKeys;
|
|
1023
1343
|
exports.getTableMetadata = getTableMetadata;
|
|
1344
|
+
exports.getTables = getTables;
|
|
1024
1345
|
exports.mapSelectAllFieldsToAlias = mapSelectAllFieldsToAlias;
|
|
1025
1346
|
exports.mapSelectFieldsWithAlias = mapSelectFieldsWithAlias;
|
|
1347
|
+
exports.migrations = migrations;
|
|
1026
1348
|
exports.parseDateTime = parseDateTime;
|
|
1027
1349
|
exports.patchDbWithSelectAliased = patchDbWithSelectAliased;
|
|
1028
1350
|
//# sourceMappingURL=ForgeSQLORM.js.map
|