forge-sql-orm 2.0.18 → 2.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +95 -4
- package/dist/ForgeSQLORM.js +314 -49
- package/dist/ForgeSQLORM.js.map +1 -1
- package/dist/ForgeSQLORM.mjs +314 -49
- package/dist/ForgeSQLORM.mjs.map +1 -1
- package/dist/core/ForgeSQLAnalyseOperations.d.ts +250 -0
- package/dist/core/ForgeSQLAnalyseOperations.d.ts.map +1 -0
- package/dist/core/ForgeSQLORM.d.ts +12 -2
- package/dist/core/ForgeSQLORM.d.ts.map +1 -1
- package/dist/core/ForgeSQLQueryBuilder.d.ts +105 -9
- package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
- package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
- package/dist/core/SystemTables.d.ts +167 -0
- package/dist/core/SystemTables.d.ts.map +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/utils/sqlUtils.d.ts +2 -2
- package/dist/utils/sqlUtils.d.ts.map +1 -1
- package/dist/webtriggers/applyMigrationsWebTrigger.d.ts.map +1 -1
- package/dist/webtriggers/dropMigrationWebTrigger.d.ts +2 -4
- package/dist/webtriggers/dropMigrationWebTrigger.d.ts.map +1 -1
- package/package.json +4 -12
- package/src/core/ForgeSQLAnalyseOperations.ts +461 -0
- package/src/core/ForgeSQLORM.ts +43 -7
- package/src/core/ForgeSQLQueryBuilder.ts +121 -18
- package/src/core/ForgeSQLSelectOperations.ts +4 -6
- package/src/core/SystemTables.ts +175 -0
- package/src/index.ts +1 -0
- package/src/utils/forgeDriverProxy.ts +1 -1
- package/src/utils/sqlUtils.ts +10 -16
- package/src/webtriggers/applyMigrationsWebTrigger.ts +32 -16
- package/src/webtriggers/dropMigrationWebTrigger.ts +5 -6
- package/src/webtriggers/fetchSchemaWebTrigger.ts +2 -10
package/README.md
CHANGED
|
@@ -1,11 +1,23 @@
|
|
|
1
1
|
# Forge SQL ORM
|
|
2
2
|
|
|
3
|
+
[](https://www.npmjs.com/package/forge-sql-orm)
|
|
4
|
+
[](https://www.npmjs.com/package/forge-sql-orm)
|
|
5
|
+
[](https://www.npmjs.com/package/forge-sql-orm-cli)
|
|
6
|
+
[](https://www.npmjs.com/package/forge-sql-orm-cli)
|
|
7
|
+
|
|
8
|
+
[](https://github.com/vzakharchenko/forge-sql-orm/blob/master/LICENSE)
|
|
9
|
+
|
|
3
10
|
[](https://github.com/vzakharchenko/forge-sql-orm/actions/workflows/node.js.yml)
|
|
11
|
+
[](https://coveralls.io/github/vzakharchenko/forge-sql-orm?branch=master)
|
|
12
|
+
[](https://sonarcloud.io/summary/new_code?id=vzakharchenko_forge-sql-orm)
|
|
13
|
+
[](https://deepscan.io/dashboard#view=project&tid=26652&pid=29272&bid=940614)
|
|
14
|
+
|
|
4
15
|
|
|
5
16
|
**Forge-SQL-ORM** is an ORM designed for working with [@forge/sql](https://developer.atlassian.com/platform/forge/storage-reference/sql-tutorial/) in **Atlassian Forge**. It is built on top of [Drizzle ORM](https://orm.drizzle.team) and provides advanced capabilities for working with relational databases inside Forge.
|
|
6
17
|
|
|
7
18
|
## Key Features
|
|
8
19
|
- ✅ **Custom Drizzle Driver** for direct integration with @forge/sql
|
|
20
|
+
- ✅ **Type-Safe Query Building**: Write SQL queries with full TypeScript support
|
|
9
21
|
- ✅ **Supports complex SQL queries** with joins and filtering using Drizzle ORM
|
|
10
22
|
- ✅ **Schema migration support**, allowing automatic schema evolution
|
|
11
23
|
- ✅ **Automatic entity generation** from MySQL/tidb databases
|
|
@@ -14,7 +26,7 @@
|
|
|
14
26
|
- ✅ **Schema Fetching** Development-only web trigger to retrieve current database schema and generate SQL statements for schema recreation
|
|
15
27
|
- ✅ **Ready-to-use Migration Triggers** Built-in web triggers for applying migrations, dropping tables (development-only), and fetching schema (development-only) with proper error handling and security controls
|
|
16
28
|
- ✅ **Optimistic Locking** Ensures data consistency by preventing conflicts when multiple users update the same record
|
|
17
|
-
- ✅ **
|
|
29
|
+
- ✅ **Query Plan Analysis**: Detailed execution plan analysis and optimization insights (Performance analysis and Troubleshooting only)
|
|
18
30
|
|
|
19
31
|
## Usage Approaches
|
|
20
32
|
|
|
@@ -587,15 +599,14 @@ This trigger allows you to completely reset your database schema. It's useful fo
|
|
|
587
599
|
- Testing scenarios requiring a clean database
|
|
588
600
|
- Resetting the database before applying new migrations
|
|
589
601
|
|
|
590
|
-
**Important**: The trigger will
|
|
602
|
+
**Important**: The trigger will drop all tables including migration.
|
|
591
603
|
|
|
592
604
|
```typescript
|
|
593
605
|
// Example usage in your Forge app
|
|
594
606
|
import { dropSchemaMigrations } from "forge-sql-orm";
|
|
595
|
-
import * as schema from "./entities/schema";
|
|
596
607
|
|
|
597
608
|
export const dropMigrations = () => {
|
|
598
|
-
return dropSchemaMigrations(
|
|
609
|
+
return dropSchemaMigrations();
|
|
599
610
|
};
|
|
600
611
|
```
|
|
601
612
|
|
|
@@ -671,6 +682,86 @@ SET foreign_key_checks = 1;
|
|
|
671
682
|
- Use these triggers as part of your deployment pipeline
|
|
672
683
|
- Monitor the execution logs in the Forge Developer Console
|
|
673
684
|
|
|
685
|
+
## Query Analysis and Performance Optimization
|
|
686
|
+
|
|
687
|
+
⚠️ **IMPORTANT NOTE**: The query analysis features described below are experimental and should be used only for troubleshooting purposes. These features rely on TiDB's `information_schema` and `performance_schema` which may change in future updates. As of April 2025, these features are available but their future availability is not guaranteed.
|
|
688
|
+
|
|
689
|
+
### About Atlassian's Built-in Analysis Tools
|
|
690
|
+
|
|
691
|
+
Atlassian already provides comprehensive query analysis tools in the development console, including:
|
|
692
|
+
- Basic query performance metrics
|
|
693
|
+
- Slow query tracking (queries over 500ms)
|
|
694
|
+
- Basic execution statistics
|
|
695
|
+
- Query history and patterns
|
|
696
|
+
|
|
697
|
+
Our analysis tools are designed to complement these built-in features by providing additional insights directly from TiDB's system schemas. However, they should be used with caution and only for troubleshooting purposes.
|
|
698
|
+
|
|
699
|
+
### Usage Guidelines
|
|
700
|
+
|
|
701
|
+
1. **Development and Troubleshooting Only**
|
|
702
|
+
- These tools should not be used in production code
|
|
703
|
+
- Intended only for development and debugging
|
|
704
|
+
- Use for identifying and fixing performance issues
|
|
705
|
+
|
|
706
|
+
2. **Schema Stability**
|
|
707
|
+
- Features rely on TiDB's `information_schema` and `performance_schema`
|
|
708
|
+
- Schema structure may change in future TiDB updates
|
|
709
|
+
- No guarantee of long-term availability
|
|
710
|
+
|
|
711
|
+
3. **Current Availability (April 2025)**
|
|
712
|
+
- `information_schema` based analysis is currently functional
|
|
713
|
+
- Query plan analysis is available
|
|
714
|
+
- Performance metrics collection is working
|
|
715
|
+
|
|
716
|
+
### Available Analysis Tools
|
|
717
|
+
|
|
718
|
+
```typescript
|
|
719
|
+
import ForgeSQL from "forge-sql-orm";
|
|
720
|
+
|
|
721
|
+
const forgeSQL = new ForgeSQL();
|
|
722
|
+
const analyzeForgeSql = forgeSQL.analyze();
|
|
723
|
+
```
|
|
724
|
+
|
|
725
|
+
#### Query Plan Analysis
|
|
726
|
+
|
|
727
|
+
⚠️ **For Troubleshooting Only**: This feature should only be used during development and debugging sessions.
|
|
728
|
+
|
|
729
|
+
```typescript
|
|
730
|
+
// Example usage for troubleshooting a specific query
|
|
731
|
+
const forgeSQL = new ForgeSQL();
|
|
732
|
+
const analyzeForgeSql = forgeSQL.analyze();
|
|
733
|
+
|
|
734
|
+
// Analyze a Drizzle query
|
|
735
|
+
const plan = await analyzeForgeSql.explain(
|
|
736
|
+
forgeSQL.select({
|
|
737
|
+
table1: testEntityJoin1,
|
|
738
|
+
table2: { name: testEntityJoin2.name, email: testEntityJoin2.email },
|
|
739
|
+
count: rawSql<number>`COUNT(*)`,
|
|
740
|
+
table3: {
|
|
741
|
+
table12: testEntityJoin1.name,
|
|
742
|
+
table22: testEntityJoin2.email,
|
|
743
|
+
table32: testEntity.id
|
|
744
|
+
},
|
|
745
|
+
})
|
|
746
|
+
.from(testEntityJoin1)
|
|
747
|
+
.innerJoin(testEntityJoin2, eq(testEntityJoin1.id, testEntityJoin2.id))
|
|
748
|
+
);
|
|
749
|
+
|
|
750
|
+
// Analyze a raw SQL query
|
|
751
|
+
const rawPlan = await analyzeForgeSql.explainRaw(
|
|
752
|
+
"SELECT * FROM users WHERE id = ?",
|
|
753
|
+
[1]
|
|
754
|
+
);
|
|
755
|
+
```
|
|
756
|
+
|
|
757
|
+
This analysis helps you understand:
|
|
758
|
+
- How the database executes your query
|
|
759
|
+
- Which indexes are being used
|
|
760
|
+
- Estimated vs actual row counts
|
|
761
|
+
- Resource usage at each step
|
|
762
|
+
- Potential performance bottlenecks
|
|
763
|
+
|
|
764
|
+
|
|
674
765
|
## License
|
|
675
766
|
This project is licensed under the **MIT License**.
|
|
676
767
|
Feel free to use it for commercial and personal projects.
|
package/dist/ForgeSQLORM.js
CHANGED
|
@@ -64,7 +64,7 @@ function processForeignKeys(table2, foreignKeysSymbol, extraSymbol) {
|
|
|
64
64
|
const configBuilderData = extraConfigBuilder(table2);
|
|
65
65
|
if (configBuilderData) {
|
|
66
66
|
const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
|
|
67
|
-
(item) => item.value
|
|
67
|
+
(item) => item.value ?? item
|
|
68
68
|
);
|
|
69
69
|
configBuilders.forEach((builder) => {
|
|
70
70
|
if (!builder?.constructor) return;
|
|
@@ -99,7 +99,7 @@ function getTableMetadata(table2) {
|
|
|
99
99
|
const configBuilderData = extraConfigBuilder(table2);
|
|
100
100
|
if (configBuilderData) {
|
|
101
101
|
const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
|
|
102
|
-
(item) => item.value
|
|
102
|
+
(item) => item.value ?? item
|
|
103
103
|
);
|
|
104
104
|
configBuilders.forEach((builder) => {
|
|
105
105
|
if (!builder?.constructor) return;
|
|
@@ -129,13 +129,9 @@ function getTableMetadata(table2) {
|
|
|
129
129
|
}
|
|
130
130
|
function generateDropTableStatements(tables) {
|
|
131
131
|
const dropStatements = [];
|
|
132
|
-
tables.forEach((
|
|
133
|
-
|
|
134
|
-
if (tableMetadata.tableName) {
|
|
135
|
-
dropStatements.push(`DROP TABLE IF EXISTS \`${tableMetadata.tableName}\`;`);
|
|
136
|
-
}
|
|
132
|
+
tables.forEach((tableName) => {
|
|
133
|
+
dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
|
|
137
134
|
});
|
|
138
|
-
dropStatements.push(`DELETE FROM __migrations;`);
|
|
139
135
|
return dropStatements;
|
|
140
136
|
}
|
|
141
137
|
function mapSelectTableToAlias(table2, uniqPrefix, aliasMap) {
|
|
@@ -193,9 +189,9 @@ function getAliasFromDrizzleAlias(value) {
|
|
|
193
189
|
const aliasNameChunk = queryChunks[queryChunks.length - 2];
|
|
194
190
|
if (sql.isSQLWrapper(aliasNameChunk) && "queryChunks" in aliasNameChunk) {
|
|
195
191
|
const aliasNameChunkSql = aliasNameChunk;
|
|
196
|
-
if (aliasNameChunkSql
|
|
192
|
+
if (aliasNameChunkSql.queryChunks?.length === 1 && aliasNameChunkSql.queryChunks[0]) {
|
|
197
193
|
const queryChunksStringChunc = aliasNameChunkSql.queryChunks[0];
|
|
198
|
-
if (
|
|
194
|
+
if ("value" in queryChunksStringChunc) {
|
|
199
195
|
const values = queryChunksStringChunc.value;
|
|
200
196
|
if (values && values.length === 1) {
|
|
201
197
|
return values[0];
|
|
@@ -247,7 +243,7 @@ function applyFromDriverTransform(rows, selections, aliasMap) {
|
|
|
247
243
|
});
|
|
248
244
|
}
|
|
249
245
|
function processNullBranches(obj) {
|
|
250
|
-
if (obj === null || typeof obj !== "object"
|
|
246
|
+
if (obj === null || typeof obj !== "object") {
|
|
251
247
|
return obj;
|
|
252
248
|
}
|
|
253
249
|
if (obj.constructor && obj.constructor.name !== "Object") {
|
|
@@ -260,7 +256,7 @@ function processNullBranches(obj) {
|
|
|
260
256
|
result[key] = null;
|
|
261
257
|
continue;
|
|
262
258
|
}
|
|
263
|
-
if (typeof value === "object"
|
|
259
|
+
if (typeof value === "object") {
|
|
264
260
|
const processed = processNullBranches(value);
|
|
265
261
|
result[key] = processed;
|
|
266
262
|
if (processed !== null) {
|
|
@@ -610,9 +606,8 @@ class ForgeSQLSelectOperations {
|
|
|
610
606
|
*/
|
|
611
607
|
async executeRawSQL(query, params) {
|
|
612
608
|
if (this.options.logRawSqlQuery) {
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
);
|
|
609
|
+
const paramsStr = params ? `, with params: ${JSON.stringify(params)}` : "";
|
|
610
|
+
console.debug(`Executing with SQL ${query}${paramsStr}`);
|
|
616
611
|
}
|
|
617
612
|
const sqlStatement = sql$1.sql.prepare(query);
|
|
618
613
|
if (params) {
|
|
@@ -634,7 +629,7 @@ class ForgeSQLSelectOperations {
|
|
|
634
629
|
}
|
|
635
630
|
if (this.options.logRawSqlQuery) {
|
|
636
631
|
console.debug(
|
|
637
|
-
`Executing Update with SQL ${query}` + params ? `, with params: ${JSON.stringify(params)}` : ""
|
|
632
|
+
`Executing Update with SQL ${query}` + (params ? `, with params: ${JSON.stringify(params)}` : "")
|
|
638
633
|
);
|
|
639
634
|
}
|
|
640
635
|
const updateQueryResponseResults = await sqlStatement.execute();
|
|
@@ -699,7 +694,7 @@ function injectSqlHints(query, hints) {
|
|
|
699
694
|
function createForgeDriverProxy(options, logRawSqlQuery) {
|
|
700
695
|
return async (query, params, method) => {
|
|
701
696
|
const modifiedQuery = injectSqlHints(query, options);
|
|
702
|
-
if (options && logRawSqlQuery) {
|
|
697
|
+
if (options && logRawSqlQuery && modifiedQuery !== query) {
|
|
703
698
|
console.warn("modified query: " + modifiedQuery);
|
|
704
699
|
}
|
|
705
700
|
return forgeDriver(modifiedQuery, params, method);
|
|
@@ -748,11 +743,239 @@ function patchDbWithSelectAliased(db) {
|
|
|
748
743
|
};
|
|
749
744
|
return db;
|
|
750
745
|
}
|
|
746
|
+
class ForgeSQLAnalyseOperation {
|
|
747
|
+
forgeOperations;
|
|
748
|
+
/**
|
|
749
|
+
* Creates a new instance of ForgeSQLAnalizeOperation.
|
|
750
|
+
* @param {ForgeSqlOperation} forgeOperations - The ForgeSQL operations instance
|
|
751
|
+
*/
|
|
752
|
+
constructor(forgeOperations) {
|
|
753
|
+
this.forgeOperations = forgeOperations;
|
|
754
|
+
this.mapToCamelCaseClusterStatement = this.mapToCamelCaseClusterStatement.bind(this);
|
|
755
|
+
}
|
|
756
|
+
/**
|
|
757
|
+
* Executes EXPLAIN on a raw SQL query.
|
|
758
|
+
* @param {string} query - The SQL query to analyze
|
|
759
|
+
* @param {unknown[]} bindParams - The query parameters
|
|
760
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
761
|
+
*/
|
|
762
|
+
async explainRaw(query, bindParams) {
|
|
763
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ${query}`, bindParams);
|
|
764
|
+
return results.map((row) => ({
|
|
765
|
+
id: row.id,
|
|
766
|
+
estRows: row.estRows,
|
|
767
|
+
actRows: row.actRows,
|
|
768
|
+
task: row.task,
|
|
769
|
+
accessObject: row["access object"],
|
|
770
|
+
executionInfo: row["execution info"],
|
|
771
|
+
operatorInfo: row["operator info"],
|
|
772
|
+
memory: row.memory,
|
|
773
|
+
disk: row.disk
|
|
774
|
+
}));
|
|
775
|
+
}
|
|
776
|
+
/**
|
|
777
|
+
* Executes EXPLAIN on a Drizzle query.
|
|
778
|
+
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
|
|
779
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
780
|
+
*/
|
|
781
|
+
async explain(query) {
|
|
782
|
+
const { sql: sql2, params } = query.toSQL();
|
|
783
|
+
return this.explainRaw(sql2, params);
|
|
784
|
+
}
|
|
785
|
+
/**
|
|
786
|
+
* Executes EXPLAIN ANALYZE on a raw SQL query.
|
|
787
|
+
* @param {string} query - The SQL query to analyze
|
|
788
|
+
* @param {unknown[]} bindParams - The query parameters
|
|
789
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
790
|
+
*/
|
|
791
|
+
async explainAnalyzeRaw(query, bindParams) {
|
|
792
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ANALYZE ${query}`, bindParams);
|
|
793
|
+
return results.map((row) => ({
|
|
794
|
+
id: row.id,
|
|
795
|
+
estRows: row.estRows,
|
|
796
|
+
actRows: row.actRows,
|
|
797
|
+
task: row.task,
|
|
798
|
+
accessObject: row["access object"],
|
|
799
|
+
executionInfo: row["execution info"],
|
|
800
|
+
operatorInfo: row["operator info"],
|
|
801
|
+
memory: row.memory,
|
|
802
|
+
disk: row.disk
|
|
803
|
+
}));
|
|
804
|
+
}
|
|
805
|
+
/**
|
|
806
|
+
* Executes EXPLAIN ANALYZE on a Drizzle query.
|
|
807
|
+
* @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
|
|
808
|
+
* @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
|
|
809
|
+
*/
|
|
810
|
+
async explainAnalyze(query) {
|
|
811
|
+
const { sql: sql2, params } = query.toSQL();
|
|
812
|
+
return this.explainAnalyzeRaw(sql2, params);
|
|
813
|
+
}
|
|
814
|
+
/**
|
|
815
|
+
* Decodes a query execution plan from its string representation.
|
|
816
|
+
* @param {string} input - The raw execution plan string
|
|
817
|
+
* @returns {ExplainAnalyzeRow[]} The decoded execution plan rows
|
|
818
|
+
*/
|
|
819
|
+
decodedPlan(input) {
|
|
820
|
+
if (!input) {
|
|
821
|
+
return [];
|
|
822
|
+
}
|
|
823
|
+
const lines = input.trim().split("\n");
|
|
824
|
+
if (lines.length < 2) return [];
|
|
825
|
+
const headersRaw = lines[0].split(" ").map((h) => h.trim()).filter(Boolean);
|
|
826
|
+
const headers = headersRaw.map((h) => {
|
|
827
|
+
return h.replace(/\s+/g, " ").replace(/[-\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "").replace(/^./, (s) => s.toLowerCase());
|
|
828
|
+
});
|
|
829
|
+
return lines.slice(1).map((line) => {
|
|
830
|
+
const values = line.split(" ").map((s) => s.trim()).filter(Boolean);
|
|
831
|
+
const row = {};
|
|
832
|
+
headers.forEach((key, i) => {
|
|
833
|
+
row[key] = values[i] ?? "";
|
|
834
|
+
});
|
|
835
|
+
return row;
|
|
836
|
+
});
|
|
837
|
+
}
|
|
838
|
+
/**
|
|
839
|
+
* Normalizes a raw slow query row into a more structured format.
|
|
840
|
+
* @param {SlowQueryRaw} row - The raw slow query data
|
|
841
|
+
* @returns {SlowQueryNormalized} The normalized slow query data
|
|
842
|
+
*/
|
|
843
|
+
normalizeSlowQuery(row) {
|
|
844
|
+
return {
|
|
845
|
+
time: row.Time,
|
|
846
|
+
txnStartTs: row.Txn_start_ts,
|
|
847
|
+
user: row.User,
|
|
848
|
+
host: row.Host,
|
|
849
|
+
connId: row.Conn_ID,
|
|
850
|
+
db: row.DB,
|
|
851
|
+
query: row.Query,
|
|
852
|
+
digest: row.Digest,
|
|
853
|
+
queryTime: row.Query_time,
|
|
854
|
+
compileTime: row.Compile_time,
|
|
855
|
+
optimizeTime: row.Optimize_time,
|
|
856
|
+
processTime: row.Process_time,
|
|
857
|
+
waitTime: row.Wait_time,
|
|
858
|
+
parseTime: row.Parse_time,
|
|
859
|
+
rewriteTime: row.Rewrite_time,
|
|
860
|
+
copTime: row.Cop_time,
|
|
861
|
+
copProcAvg: row.Cop_proc_avg,
|
|
862
|
+
copProcMax: row.Cop_proc_max,
|
|
863
|
+
copProcP90: row.Cop_proc_p90,
|
|
864
|
+
copProcAddr: row.Cop_proc_addr,
|
|
865
|
+
copWaitAvg: row.Cop_wait_avg,
|
|
866
|
+
copWaitMax: row.Cop_wait_max,
|
|
867
|
+
copWaitP90: row.Cop_wait_p90,
|
|
868
|
+
copWaitAddr: row.Cop_wait_addr,
|
|
869
|
+
memMax: row.Mem_max,
|
|
870
|
+
diskMax: row.Disk_max,
|
|
871
|
+
totalKeys: row.Total_keys,
|
|
872
|
+
processKeys: row.Process_keys,
|
|
873
|
+
requestCount: row.Request_count,
|
|
874
|
+
kvTotal: row.KV_total,
|
|
875
|
+
pdTotal: row.PD_total,
|
|
876
|
+
resultRows: row.Result_rows,
|
|
877
|
+
rocksdbBlockCacheHitCount: row.Rocksdb_block_cache_hit_count,
|
|
878
|
+
rocksdbBlockReadCount: row.Rocksdb_block_read_count,
|
|
879
|
+
rocksdbBlockReadByte: row.Rocksdb_block_read_byte,
|
|
880
|
+
plan: row.Plan,
|
|
881
|
+
binaryPlan: row.Binary_plan,
|
|
882
|
+
planDigest: row.Plan_digest,
|
|
883
|
+
parsedPlan: this.decodedPlan(row.Plan)
|
|
884
|
+
};
|
|
885
|
+
}
|
|
886
|
+
/**
|
|
887
|
+
* Builds a SQL query for retrieving cluster statement history.
|
|
888
|
+
* @param {string[]} tables - The tables to analyze
|
|
889
|
+
* @param {Date} [from] - The start date for the analysis
|
|
890
|
+
* @param {Date} [to] - The end date for the analysis
|
|
891
|
+
* @returns {string} The SQL query for cluster statement history
|
|
892
|
+
*/
|
|
893
|
+
buildClusterStatementQuery(tables, from, to) {
|
|
894
|
+
const formatDateTime = (date) => moment(date).format("YYYY-MM-DDTHH:mm:ss.SSS");
|
|
895
|
+
const tableConditions = tables.map((table2) => `TABLE_NAMES LIKE CONCAT(SCHEMA_NAME, '.', '%', '${table2}', '%')`).join(" OR ");
|
|
896
|
+
const timeConditions = [];
|
|
897
|
+
if (from) {
|
|
898
|
+
timeConditions.push(`SUMMARY_BEGIN_TIME >= '${formatDateTime(from)}'`);
|
|
899
|
+
}
|
|
900
|
+
if (to) {
|
|
901
|
+
timeConditions.push(`SUMMARY_END_TIME <= '${formatDateTime(to)}'`);
|
|
902
|
+
}
|
|
903
|
+
let whereClauses;
|
|
904
|
+
if (tableConditions?.length) {
|
|
905
|
+
whereClauses = [tableConditions ? `(${tableConditions})` : "", ...timeConditions];
|
|
906
|
+
} else {
|
|
907
|
+
whereClauses = timeConditions;
|
|
908
|
+
}
|
|
909
|
+
return `
|
|
910
|
+
SELECT *
|
|
911
|
+
FROM (
|
|
912
|
+
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY
|
|
913
|
+
UNION ALL
|
|
914
|
+
SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
|
|
915
|
+
) AS combined
|
|
916
|
+
${whereClauses?.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : ""}
|
|
917
|
+
`;
|
|
918
|
+
}
|
|
919
|
+
/**
|
|
920
|
+
* Retrieves and analyzes slow queries from the database.
|
|
921
|
+
* @returns {Promise<SlowQueryNormalized[]>} The normalized slow query data
|
|
922
|
+
*/
|
|
923
|
+
async analyzeSlowQueries() {
|
|
924
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(`
|
|
925
|
+
SELECT *
|
|
926
|
+
FROM information_schema.slow_query
|
|
927
|
+
ORDER BY time DESC
|
|
928
|
+
`);
|
|
929
|
+
return results.map((row) => this.normalizeSlowQuery(row));
|
|
930
|
+
}
|
|
931
|
+
/**
|
|
932
|
+
* Converts a cluster statement row to camelCase format.
|
|
933
|
+
* @param {Record<string, any>} input - The input row data
|
|
934
|
+
* @returns {ClusterStatementRowCamelCase} The converted row data
|
|
935
|
+
*/
|
|
936
|
+
mapToCamelCaseClusterStatement(input) {
|
|
937
|
+
if (!input) {
|
|
938
|
+
return {};
|
|
939
|
+
}
|
|
940
|
+
const result = {};
|
|
941
|
+
result.parsedPlan = this.decodedPlan(input["PLAN"] ?? "");
|
|
942
|
+
for (const key in input) {
|
|
943
|
+
const camelKey = key.toLowerCase().replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
|
|
944
|
+
result[camelKey] = input[key];
|
|
945
|
+
}
|
|
946
|
+
return result;
|
|
947
|
+
}
|
|
948
|
+
/**
|
|
949
|
+
* Analyzes query history for specific tables using raw table names.
|
|
950
|
+
* @param {string[]} tables - The table names to analyze
|
|
951
|
+
* @param {Date} [fromDate] - The start date for the analysis
|
|
952
|
+
* @param {Date} [toDate] - The end date for the analysis
|
|
953
|
+
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
|
|
954
|
+
*/
|
|
955
|
+
async analyzeQueriesHistoryRaw(tables, fromDate, toDate) {
|
|
956
|
+
const results = await this.forgeOperations.fetch().executeRawSQL(
|
|
957
|
+
this.buildClusterStatementQuery(tables ?? [], fromDate, toDate)
|
|
958
|
+
);
|
|
959
|
+
return results.map((r) => this.mapToCamelCaseClusterStatement(r));
|
|
960
|
+
}
|
|
961
|
+
/**
|
|
962
|
+
* Analyzes query history for specific tables using Drizzle table objects.
|
|
963
|
+
* @param {AnyMySqlTable[]} tables - The Drizzle table objects to analyze
|
|
964
|
+
* @param {Date} [fromDate] - The start date for the analysis
|
|
965
|
+
* @param {Date} [toDate] - The end date for the analysis
|
|
966
|
+
* @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
|
|
967
|
+
*/
|
|
968
|
+
async analyzeQueriesHistory(tables, fromDate, toDate) {
|
|
969
|
+
const tableNames = tables?.map((table$1) => table.getTableName(table$1)) ?? [];
|
|
970
|
+
return this.analyzeQueriesHistoryRaw(tableNames, fromDate, toDate);
|
|
971
|
+
}
|
|
972
|
+
}
|
|
751
973
|
class ForgeSQLORMImpl {
|
|
752
974
|
static instance = null;
|
|
753
975
|
drizzle;
|
|
754
976
|
crudOperations;
|
|
755
977
|
fetchOperations;
|
|
978
|
+
analyzeOperations;
|
|
756
979
|
/**
|
|
757
980
|
* Private constructor to enforce singleton behavior.
|
|
758
981
|
* @param options - Options for configuring ForgeSQL ORM behavior.
|
|
@@ -772,20 +995,26 @@ class ForgeSQLORMImpl {
|
|
|
772
995
|
);
|
|
773
996
|
this.crudOperations = new ForgeSQLCrudOperations(this, newOptions);
|
|
774
997
|
this.fetchOperations = new ForgeSQLSelectOperations(newOptions);
|
|
998
|
+
this.analyzeOperations = new ForgeSQLAnalyseOperation(this);
|
|
775
999
|
} catch (error) {
|
|
776
1000
|
console.error("ForgeSQLORM initialization failed:", error);
|
|
777
1001
|
throw error;
|
|
778
1002
|
}
|
|
779
1003
|
}
|
|
1004
|
+
/**
|
|
1005
|
+
* Create the modify operations instance.
|
|
1006
|
+
* @returns modify operations.
|
|
1007
|
+
*/
|
|
1008
|
+
modify() {
|
|
1009
|
+
return this.crudOperations;
|
|
1010
|
+
}
|
|
780
1011
|
/**
|
|
781
1012
|
* Returns the singleton instance of ForgeSQLORMImpl.
|
|
782
1013
|
* @param options - Options for configuring ForgeSQL ORM behavior.
|
|
783
1014
|
* @returns The singleton instance of ForgeSQLORMImpl.
|
|
784
1015
|
*/
|
|
785
1016
|
static getInstance(options) {
|
|
786
|
-
|
|
787
|
-
ForgeSQLORMImpl.instance = new ForgeSQLORMImpl(options);
|
|
788
|
-
}
|
|
1017
|
+
ForgeSQLORMImpl.instance ??= new ForgeSQLORMImpl(options);
|
|
789
1018
|
return ForgeSQLORMImpl.instance;
|
|
790
1019
|
}
|
|
791
1020
|
/**
|
|
@@ -793,7 +1022,7 @@ class ForgeSQLORMImpl {
|
|
|
793
1022
|
* @returns CRUD operations.
|
|
794
1023
|
*/
|
|
795
1024
|
crud() {
|
|
796
|
-
return this.
|
|
1025
|
+
return this.modify();
|
|
797
1026
|
}
|
|
798
1027
|
/**
|
|
799
1028
|
* Retrieves the fetch operations instance.
|
|
@@ -802,6 +1031,9 @@ class ForgeSQLORMImpl {
|
|
|
802
1031
|
fetch() {
|
|
803
1032
|
return this.fetchOperations;
|
|
804
1033
|
}
|
|
1034
|
+
analyze() {
|
|
1035
|
+
return this.analyzeOperations;
|
|
1036
|
+
}
|
|
805
1037
|
/**
|
|
806
1038
|
* Returns a Drizzle query builder instance.
|
|
807
1039
|
*
|
|
@@ -889,7 +1121,7 @@ class ForgeSQLORM {
|
|
|
889
1121
|
*
|
|
890
1122
|
* @template TSelection - The type of the selected fields
|
|
891
1123
|
* @param {TSelection} fields - Object containing the fields to select, with table schemas as values
|
|
892
|
-
* @returns {MySqlSelectBuilder<TSelection,
|
|
1124
|
+
* @returns {MySqlSelectBuilder<TSelection, MySqlRemotePreparedQueryHKT>} A distinct select query builder with unique field aliases
|
|
893
1125
|
* @throws {Error} If fields parameter is empty
|
|
894
1126
|
* @example
|
|
895
1127
|
* ```typescript
|
|
@@ -907,7 +1139,14 @@ class ForgeSQLORM {
|
|
|
907
1139
|
* @returns CRUD operations.
|
|
908
1140
|
*/
|
|
909
1141
|
crud() {
|
|
910
|
-
return this.ormInstance.
|
|
1142
|
+
return this.ormInstance.modify();
|
|
1143
|
+
}
|
|
1144
|
+
/**
|
|
1145
|
+
* Proxies the `modify` method from `ForgeSQLORMImpl`.
|
|
1146
|
+
* @returns Modify operations.
|
|
1147
|
+
*/
|
|
1148
|
+
modify() {
|
|
1149
|
+
return this.ormInstance.modify();
|
|
911
1150
|
}
|
|
912
1151
|
/**
|
|
913
1152
|
* Proxies the `fetch` method from `ForgeSQLORMImpl`.
|
|
@@ -916,6 +1155,13 @@ class ForgeSQLORM {
|
|
|
916
1155
|
fetch() {
|
|
917
1156
|
return this.ormInstance.fetch();
|
|
918
1157
|
}
|
|
1158
|
+
/**
|
|
1159
|
+
* Provides query analysis capabilities including EXPLAIN ANALYZE and slow query analysis.
|
|
1160
|
+
* @returns {SchemaAnalyzeForgeSql} Interface for analyzing query performance
|
|
1161
|
+
*/
|
|
1162
|
+
analyze() {
|
|
1163
|
+
return this.ormInstance.analyze();
|
|
1164
|
+
}
|
|
919
1165
|
/**
|
|
920
1166
|
* Returns a Drizzle query builder instance.
|
|
921
1167
|
*
|
|
@@ -976,8 +1222,19 @@ const forgeTimeString = mysqlCore.customType({
|
|
|
976
1222
|
return parseDateTime(value, "HH:mm:ss.SSS");
|
|
977
1223
|
}
|
|
978
1224
|
});
|
|
979
|
-
|
|
1225
|
+
const migrations = mysqlCore.mysqlTable("__migrations", {
|
|
1226
|
+
id: mysqlCore.bigint("id", { mode: "number" }).primaryKey().autoincrement(),
|
|
1227
|
+
name: mysqlCore.varchar("name", { length: 255 }).notNull(),
|
|
1228
|
+
migratedAt: mysqlCore.timestamp("migratedAt").defaultNow().notNull()
|
|
1229
|
+
});
|
|
1230
|
+
async function getTables() {
|
|
1231
|
+
const tables = await sql$1.sql.executeDDL("SHOW TABLES");
|
|
1232
|
+
return tables.rows.flatMap((tableInfo) => Object.values(tableInfo));
|
|
1233
|
+
}
|
|
1234
|
+
const forgeSystemTables = [migrations];
|
|
1235
|
+
async function dropSchemaMigrations() {
|
|
980
1236
|
try {
|
|
1237
|
+
const tables = await getTables();
|
|
981
1238
|
const dropStatements = generateDropTableStatements(tables);
|
|
982
1239
|
for (const statement of dropStatements) {
|
|
983
1240
|
console.warn(statement);
|
|
@@ -988,32 +1245,41 @@ async function dropSchemaMigrations(tables) {
|
|
|
988
1245
|
"⚠️ All data in these tables has been permanently deleted. This operation cannot be undone."
|
|
989
1246
|
);
|
|
990
1247
|
} catch (error) {
|
|
1248
|
+
console.error(error);
|
|
991
1249
|
const errorMessage = error instanceof Error ? error.message : "Unknown error occurred";
|
|
992
1250
|
return getHttpResponse(500, errorMessage);
|
|
993
1251
|
}
|
|
994
1252
|
}
|
|
995
1253
|
const applySchemaMigrations = async (migration) => {
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1254
|
+
try {
|
|
1255
|
+
if (typeof migration !== "function") {
|
|
1256
|
+
throw new Error("migration is not a function");
|
|
1257
|
+
}
|
|
1258
|
+
console.log("Provisioning the database");
|
|
1259
|
+
await sql$1.sql._provision();
|
|
1260
|
+
console.info("Running schema migrations");
|
|
1261
|
+
const migrations2 = await migration(sql$1.migrationRunner);
|
|
1262
|
+
const successfulMigrations = await migrations2.run();
|
|
1263
|
+
console.info("Migrations applied:", successfulMigrations);
|
|
1264
|
+
const migrationList = await sql$1.migrationRunner.list();
|
|
1265
|
+
const migrationHistory = Array.isArray(migrationList) && migrationList.length > 0 ? migrationList.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toUTCString()}`).join("\n") : "No migrations found";
|
|
1266
|
+
console.info("Migrations history:\nid, name, migrated_at\n", migrationHistory);
|
|
1267
|
+
return {
|
|
1268
|
+
headers: { "Content-Type": ["application/json"] },
|
|
1269
|
+
statusCode: 200,
|
|
1270
|
+
statusText: "OK",
|
|
1271
|
+
body: "Migrations successfully executed"
|
|
1272
|
+
};
|
|
1273
|
+
} catch (error) {
|
|
1274
|
+
console.error("Error during migration:", error);
|
|
1275
|
+
return {
|
|
1276
|
+
headers: { "Content-Type": ["application/json"] },
|
|
1277
|
+
statusCode: 500,
|
|
1278
|
+
statusText: "Internal Server Error",
|
|
1279
|
+
body: error instanceof Error ? error.message : "Unknown error during migration"
|
|
1280
|
+
};
|
|
1281
|
+
}
|
|
1010
1282
|
};
|
|
1011
|
-
const migrations = mysqlCore.mysqlTable("__migrations", {
|
|
1012
|
-
id: mysqlCore.bigint("id", { mode: "number" }).primaryKey().autoincrement(),
|
|
1013
|
-
name: mysqlCore.varchar("name", { length: 255 }).notNull(),
|
|
1014
|
-
migratedAt: mysqlCore.timestamp("migratedAt").defaultNow().notNull()
|
|
1015
|
-
});
|
|
1016
|
-
const forgeSystemTables = [migrations];
|
|
1017
1283
|
async function fetchSchemaWebTrigger() {
|
|
1018
1284
|
try {
|
|
1019
1285
|
const tables = await getTables();
|
|
@@ -1026,14 +1292,10 @@ async function fetchSchemaWebTrigger() {
|
|
|
1026
1292
|
return getHttpResponse(500, errorMessage);
|
|
1027
1293
|
}
|
|
1028
1294
|
}
|
|
1029
|
-
async function getTables() {
|
|
1030
|
-
const tables = await sql$1.sql.executeDDL("SHOW TABLES");
|
|
1031
|
-
return tables.rows.flatMap((tableInfo) => Object.values(tableInfo));
|
|
1032
|
-
}
|
|
1033
1295
|
async function generateCreateTableStatements(tables) {
|
|
1034
1296
|
const statements = [];
|
|
1035
1297
|
for (const table2 of tables) {
|
|
1036
|
-
const createTableResult = await sql$1.sql.executeDDL(`SHOW CREATE TABLE ${table2}`);
|
|
1298
|
+
const createTableResult = await sql$1.sql.executeDDL(`SHOW CREATE TABLE "${table2}"`);
|
|
1037
1299
|
const createTableStatements = createTableResult.rows.filter((row) => !isSystemTable(row.Table)).map((row) => formatCreateTableStatement(row["Create Table"]));
|
|
1038
1300
|
statements.push(...createTableStatements);
|
|
1039
1301
|
}
|
|
@@ -1072,14 +1334,17 @@ exports.fetchSchemaWebTrigger = fetchSchemaWebTrigger;
|
|
|
1072
1334
|
exports.forgeDateString = forgeDateString;
|
|
1073
1335
|
exports.forgeDateTimeString = forgeDateTimeString;
|
|
1074
1336
|
exports.forgeDriver = forgeDriver;
|
|
1337
|
+
exports.forgeSystemTables = forgeSystemTables;
|
|
1075
1338
|
exports.forgeTimeString = forgeTimeString;
|
|
1076
1339
|
exports.forgeTimestampString = forgeTimestampString;
|
|
1077
1340
|
exports.generateDropTableStatements = generateDropTableStatements;
|
|
1078
1341
|
exports.getHttpResponse = getHttpResponse;
|
|
1079
1342
|
exports.getPrimaryKeys = getPrimaryKeys;
|
|
1080
1343
|
exports.getTableMetadata = getTableMetadata;
|
|
1344
|
+
exports.getTables = getTables;
|
|
1081
1345
|
exports.mapSelectAllFieldsToAlias = mapSelectAllFieldsToAlias;
|
|
1082
1346
|
exports.mapSelectFieldsWithAlias = mapSelectFieldsWithAlias;
|
|
1347
|
+
exports.migrations = migrations;
|
|
1083
1348
|
exports.parseDateTime = parseDateTime;
|
|
1084
1349
|
exports.patchDbWithSelectAliased = patchDbWithSelectAliased;
|
|
1085
1350
|
//# sourceMappingURL=ForgeSQLORM.js.map
|