forge-sql-orm 2.0.18 → 2.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/README.md +95 -4
  2. package/dist/ForgeSQLORM.js +315 -49
  3. package/dist/ForgeSQLORM.js.map +1 -1
  4. package/dist/ForgeSQLORM.mjs +315 -49
  5. package/dist/ForgeSQLORM.mjs.map +1 -1
  6. package/dist/core/ForgeSQLAnalyseOperations.d.ts +250 -0
  7. package/dist/core/ForgeSQLAnalyseOperations.d.ts.map +1 -0
  8. package/dist/core/ForgeSQLORM.d.ts +12 -2
  9. package/dist/core/ForgeSQLORM.d.ts.map +1 -1
  10. package/dist/core/ForgeSQLQueryBuilder.d.ts +105 -9
  11. package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
  12. package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
  13. package/dist/core/SystemTables.d.ts +167 -0
  14. package/dist/core/SystemTables.d.ts.map +1 -1
  15. package/dist/index.d.ts +1 -0
  16. package/dist/index.d.ts.map +1 -1
  17. package/dist/utils/sqlUtils.d.ts +2 -2
  18. package/dist/utils/sqlUtils.d.ts.map +1 -1
  19. package/dist/webtriggers/applyMigrationsWebTrigger.d.ts.map +1 -1
  20. package/dist/webtriggers/dropMigrationWebTrigger.d.ts +2 -4
  21. package/dist/webtriggers/dropMigrationWebTrigger.d.ts.map +1 -1
  22. package/package.json +11 -19
  23. package/src/core/ForgeSQLAnalyseOperations.ts +462 -0
  24. package/src/core/ForgeSQLORM.ts +43 -7
  25. package/src/core/ForgeSQLQueryBuilder.ts +121 -18
  26. package/src/core/ForgeSQLSelectOperations.ts +4 -6
  27. package/src/core/SystemTables.ts +175 -0
  28. package/src/index.ts +1 -0
  29. package/src/utils/forgeDriverProxy.ts +1 -1
  30. package/src/utils/sqlUtils.ts +10 -16
  31. package/src/webtriggers/applyMigrationsWebTrigger.ts +32 -16
  32. package/src/webtriggers/dropMigrationWebTrigger.ts +5 -6
  33. package/src/webtriggers/fetchSchemaWebTrigger.ts +2 -10
package/README.md CHANGED
@@ -1,11 +1,23 @@
1
1
  # Forge SQL ORM
2
2
 
3
+ [![npm version](https://img.shields.io/npm/v/forge-sql-orm)](https://www.npmjs.com/package/forge-sql-orm)
4
+ [![npm downloads](https://img.shields.io/npm/dm/forge-sql-orm)](https://www.npmjs.com/package/forge-sql-orm)
5
+ [![npm version (CLI)](https://img.shields.io/npm/v/forge-sql-orm-cli?label=cli)](https://www.npmjs.com/package/forge-sql-orm-cli)
6
+ [![npm downloads (CLI)](https://img.shields.io/npm/dm/forge-sql-orm-cli?label=cli%20downloads)](https://www.npmjs.com/package/forge-sql-orm-cli)
7
+
8
+ [![License](https://img.shields.io/github/license/vzakharchenko/forge-sql-orm)](https://github.com/vzakharchenko/forge-sql-orm/blob/master/LICENSE)
9
+
3
10
  [![forge-sql-orm CI](https://github.com/vzakharchenko/forge-sql-orm/actions/workflows/node.js.yml/badge.svg)](https://github.com/vzakharchenko/forge-sql-orm/actions/workflows/node.js.yml)
11
+ [![Coverage Status](https://coveralls.io/repos/github/vzakharchenko/forge-sql-orm/badge.svg?branch=master)](https://coveralls.io/github/vzakharchenko/forge-sql-orm?branch=master)
12
+ [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=vzakharchenko_forge-sql-orm&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=vzakharchenko_forge-sql-orm)
13
+ [![DeepScan grade](https://deepscan.io/api/teams/26652/projects/29272/branches/940614/badge/grade.svg)](https://deepscan.io/dashboard#view=project&tid=26652&pid=29272&bid=940614)
14
+
4
15
 
5
16
  **Forge-SQL-ORM** is an ORM designed for working with [@forge/sql](https://developer.atlassian.com/platform/forge/storage-reference/sql-tutorial/) in **Atlassian Forge**. It is built on top of [Drizzle ORM](https://orm.drizzle.team) and provides advanced capabilities for working with relational databases inside Forge.
6
17
 
7
18
  ## Key Features
8
19
  - ✅ **Custom Drizzle Driver** for direct integration with @forge/sql
20
+ - ✅ **Type-Safe Query Building**: Write SQL queries with full TypeScript support
9
21
  - ✅ **Supports complex SQL queries** with joins and filtering using Drizzle ORM
10
22
  - ✅ **Schema migration support**, allowing automatic schema evolution
11
23
  - ✅ **Automatic entity generation** from MySQL/tidb databases
@@ -14,7 +26,7 @@
14
26
  - ✅ **Schema Fetching** Development-only web trigger to retrieve current database schema and generate SQL statements for schema recreation
15
27
  - ✅ **Ready-to-use Migration Triggers** Built-in web triggers for applying migrations, dropping tables (development-only), and fetching schema (development-only) with proper error handling and security controls
16
28
  - ✅ **Optimistic Locking** Ensures data consistency by preventing conflicts when multiple users update the same record
17
- - ✅ **Type Safety** Full TypeScript support with proper type inference
29
+ - ✅ **Query Plan Analysis**: Detailed execution plan analysis and optimization insights (Performance analysis and Troubleshooting only)
18
30
 
19
31
  ## Usage Approaches
20
32
 
@@ -587,15 +599,14 @@ This trigger allows you to completely reset your database schema. It's useful fo
587
599
  - Testing scenarios requiring a clean database
588
600
  - Resetting the database before applying new migrations
589
601
 
590
- **Important**: The trigger will only drop tables that are defined in your models. Any tables that exist in the database but are not defined in your models will remain untouched.
602
+ **Important**: The trigger will drop all tables including migration.
591
603
 
592
604
  ```typescript
593
605
  // Example usage in your Forge app
594
606
  import { dropSchemaMigrations } from "forge-sql-orm";
595
- import * as schema from "./entities/schema";
596
607
 
597
608
  export const dropMigrations = () => {
598
- return dropSchemaMigrations(Object.values(schema));
609
+ return dropSchemaMigrations();
599
610
  };
600
611
  ```
601
612
 
@@ -671,6 +682,86 @@ SET foreign_key_checks = 1;
671
682
  - Use these triggers as part of your deployment pipeline
672
683
  - Monitor the execution logs in the Forge Developer Console
673
684
 
685
+ ## Query Analysis and Performance Optimization
686
+
687
+ ⚠️ **IMPORTANT NOTE**: The query analysis features described below are experimental and should be used only for troubleshooting purposes. These features rely on TiDB's `information_schema` and `performance_schema` which may change in future updates. As of April 2025, these features are available but their future availability is not guaranteed.
688
+
689
+ ### About Atlassian's Built-in Analysis Tools
690
+
691
+ Atlassian already provides comprehensive query analysis tools in the development console, including:
692
+ - Basic query performance metrics
693
+ - Slow query tracking (queries over 500ms)
694
+ - Basic execution statistics
695
+ - Query history and patterns
696
+
697
+ Our analysis tools are designed to complement these built-in features by providing additional insights directly from TiDB's system schemas. However, they should be used with caution and only for troubleshooting purposes.
698
+
699
+ ### Usage Guidelines
700
+
701
+ 1. **Development and Troubleshooting Only**
702
+ - These tools should not be used in production code
703
+ - Intended only for development and debugging
704
+ - Use for identifying and fixing performance issues
705
+
706
+ 2. **Schema Stability**
707
+ - Features rely on TiDB's `information_schema` and `performance_schema`
708
+ - Schema structure may change in future TiDB updates
709
+ - No guarantee of long-term availability
710
+
711
+ 3. **Current Availability (April 2025)**
712
+ - `information_schema` based analysis is currently functional
713
+ - Query plan analysis is available
714
+ - Performance metrics collection is working
715
+
716
+ ### Available Analysis Tools
717
+
718
+ ```typescript
719
+ import ForgeSQL from "forge-sql-orm";
720
+
721
+ const forgeSQL = new ForgeSQL();
722
+ const analyzeForgeSql = forgeSQL.analyze();
723
+ ```
724
+
725
+ #### Query Plan Analysis
726
+
727
+ ⚠️ **For Troubleshooting Only**: This feature should only be used during development and debugging sessions.
728
+
729
+ ```typescript
730
+ // Example usage for troubleshooting a specific query
731
+ const forgeSQL = new ForgeSQL();
732
+ const analyzeForgeSql = forgeSQL.analyze();
733
+
734
+ // Analyze a Drizzle query
735
+ const plan = await analyzeForgeSql.explain(
736
+ forgeSQL.select({
737
+ table1: testEntityJoin1,
738
+ table2: { name: testEntityJoin2.name, email: testEntityJoin2.email },
739
+ count: rawSql<number>`COUNT(*)`,
740
+ table3: {
741
+ table12: testEntityJoin1.name,
742
+ table22: testEntityJoin2.email,
743
+ table32: testEntity.id
744
+ },
745
+ })
746
+ .from(testEntityJoin1)
747
+ .innerJoin(testEntityJoin2, eq(testEntityJoin1.id, testEntityJoin2.id))
748
+ );
749
+
750
+ // Analyze a raw SQL query
751
+ const rawPlan = await analyzeForgeSql.explainRaw(
752
+ "SELECT * FROM users WHERE id = ?",
753
+ [1]
754
+ );
755
+ ```
756
+
757
+ This analysis helps you understand:
758
+ - How the database executes your query
759
+ - Which indexes are being used
760
+ - Estimated vs actual row counts
761
+ - Resource usage at each step
762
+ - Potential performance bottlenecks
763
+
764
+
674
765
  ## License
675
766
  This project is licensed under the **MIT License**.
676
767
  Feel free to use it for commercial and personal projects.
@@ -64,7 +64,7 @@ function processForeignKeys(table2, foreignKeysSymbol, extraSymbol) {
64
64
  const configBuilderData = extraConfigBuilder(table2);
65
65
  if (configBuilderData) {
66
66
  const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
67
- (item) => item.value || item
67
+ (item) => item.value ?? item
68
68
  );
69
69
  configBuilders.forEach((builder) => {
70
70
  if (!builder?.constructor) return;
@@ -99,7 +99,7 @@ function getTableMetadata(table2) {
99
99
  const configBuilderData = extraConfigBuilder(table2);
100
100
  if (configBuilderData) {
101
101
  const configBuilders = Array.isArray(configBuilderData) ? configBuilderData : Object.values(configBuilderData).map(
102
- (item) => item.value || item
102
+ (item) => item.value ?? item
103
103
  );
104
104
  configBuilders.forEach((builder) => {
105
105
  if (!builder?.constructor) return;
@@ -129,13 +129,9 @@ function getTableMetadata(table2) {
129
129
  }
130
130
  function generateDropTableStatements(tables) {
131
131
  const dropStatements = [];
132
- tables.forEach((table2) => {
133
- const tableMetadata = getTableMetadata(table2);
134
- if (tableMetadata.tableName) {
135
- dropStatements.push(`DROP TABLE IF EXISTS \`${tableMetadata.tableName}\`;`);
136
- }
132
+ tables.forEach((tableName) => {
133
+ dropStatements.push(`DROP TABLE IF EXISTS \`${tableName}\`;`);
137
134
  });
138
- dropStatements.push(`DELETE FROM __migrations;`);
139
135
  return dropStatements;
140
136
  }
141
137
  function mapSelectTableToAlias(table2, uniqPrefix, aliasMap) {
@@ -193,9 +189,9 @@ function getAliasFromDrizzleAlias(value) {
193
189
  const aliasNameChunk = queryChunks[queryChunks.length - 2];
194
190
  if (sql.isSQLWrapper(aliasNameChunk) && "queryChunks" in aliasNameChunk) {
195
191
  const aliasNameChunkSql = aliasNameChunk;
196
- if (aliasNameChunkSql && aliasNameChunkSql.queryChunks.length === 1) {
192
+ if (aliasNameChunkSql.queryChunks?.length === 1 && aliasNameChunkSql.queryChunks[0]) {
197
193
  const queryChunksStringChunc = aliasNameChunkSql.queryChunks[0];
198
- if (queryChunksStringChunc && "value" in queryChunksStringChunc) {
194
+ if ("value" in queryChunksStringChunc) {
199
195
  const values = queryChunksStringChunc.value;
200
196
  if (values && values.length === 1) {
201
197
  return values[0];
@@ -247,7 +243,7 @@ function applyFromDriverTransform(rows, selections, aliasMap) {
247
243
  });
248
244
  }
249
245
  function processNullBranches(obj) {
250
- if (obj === null || typeof obj !== "object" || obj === void 0) {
246
+ if (obj === null || typeof obj !== "object") {
251
247
  return obj;
252
248
  }
253
249
  if (obj.constructor && obj.constructor.name !== "Object") {
@@ -260,7 +256,7 @@ function processNullBranches(obj) {
260
256
  result[key] = null;
261
257
  continue;
262
258
  }
263
- if (typeof value === "object" && value !== null && value !== void 0) {
259
+ if (typeof value === "object") {
264
260
  const processed = processNullBranches(value);
265
261
  result[key] = processed;
266
262
  if (processed !== null) {
@@ -610,9 +606,8 @@ class ForgeSQLSelectOperations {
610
606
  */
611
607
  async executeRawSQL(query, params) {
612
608
  if (this.options.logRawSqlQuery) {
613
- console.debug(
614
- `Executing with SQL ${query}` + params ? `, with params: ${JSON.stringify(params)}` : ""
615
- );
609
+ const paramsStr = params ? `, with params: ${JSON.stringify(params)}` : "";
610
+ console.debug(`Executing with SQL ${query}${paramsStr}`);
616
611
  }
617
612
  const sqlStatement = sql$1.sql.prepare(query);
618
613
  if (params) {
@@ -634,7 +629,7 @@ class ForgeSQLSelectOperations {
634
629
  }
635
630
  if (this.options.logRawSqlQuery) {
636
631
  console.debug(
637
- `Executing Update with SQL ${query}` + params ? `, with params: ${JSON.stringify(params)}` : ""
632
+ `Executing Update with SQL ${query}` + (params ? `, with params: ${JSON.stringify(params)}` : "")
638
633
  );
639
634
  }
640
635
  const updateQueryResponseResults = await sqlStatement.execute();
@@ -699,7 +694,7 @@ function injectSqlHints(query, hints) {
699
694
  function createForgeDriverProxy(options, logRawSqlQuery) {
700
695
  return async (query, params, method) => {
701
696
  const modifiedQuery = injectSqlHints(query, options);
702
- if (options && logRawSqlQuery) {
697
+ if (options && logRawSqlQuery && modifiedQuery !== query) {
703
698
  console.warn("modified query: " + modifiedQuery);
704
699
  }
705
700
  return forgeDriver(modifiedQuery, params, method);
@@ -748,11 +743,240 @@ function patchDbWithSelectAliased(db) {
748
743
  };
749
744
  return db;
750
745
  }
746
+ class ForgeSQLAnalyseOperation {
747
+ forgeOperations;
748
+ /**
749
+ * Creates a new instance of ForgeSQLAnalizeOperation.
750
+ * @param {ForgeSqlOperation} forgeOperations - The ForgeSQL operations instance
751
+ */
752
+ constructor(forgeOperations) {
753
+ this.forgeOperations = forgeOperations;
754
+ this.mapToCamelCaseClusterStatement = this.mapToCamelCaseClusterStatement.bind(this);
755
+ }
756
+ /**
757
+ * Executes EXPLAIN on a raw SQL query.
758
+ * @param {string} query - The SQL query to analyze
759
+ * @param {unknown[]} bindParams - The query parameters
760
+ * @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
761
+ */
762
+ async explainRaw(query, bindParams) {
763
+ const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ${query}`, bindParams);
764
+ return results.map((row) => ({
765
+ id: row.id,
766
+ estRows: row.estRows,
767
+ actRows: row.actRows,
768
+ task: row.task,
769
+ accessObject: row["access object"],
770
+ executionInfo: row["execution info"],
771
+ operatorInfo: row["operator info"],
772
+ memory: row.memory,
773
+ disk: row.disk
774
+ }));
775
+ }
776
+ /**
777
+ * Executes EXPLAIN on a Drizzle query.
778
+ * @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
779
+ * @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
780
+ */
781
+ async explain(query) {
782
+ const { sql: sql2, params } = query.toSQL();
783
+ return this.explainRaw(sql2, params);
784
+ }
785
+ /**
786
+ * Executes EXPLAIN ANALYZE on a raw SQL query.
787
+ * @param {string} query - The SQL query to analyze
788
+ * @param {unknown[]} bindParams - The query parameters
789
+ * @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
790
+ */
791
+ async explainAnalyzeRaw(query, bindParams) {
792
+ const results = await this.forgeOperations.fetch().executeRawSQL(`EXPLAIN ANALYZE ${query}`, bindParams);
793
+ return results.map((row) => ({
794
+ id: row.id,
795
+ estRows: row.estRows,
796
+ actRows: row.actRows,
797
+ task: row.task,
798
+ accessObject: row["access object"],
799
+ executionInfo: row["execution info"],
800
+ operatorInfo: row["operator info"],
801
+ memory: row.memory,
802
+ disk: row.disk
803
+ }));
804
+ }
805
+ /**
806
+ * Executes EXPLAIN ANALYZE on a Drizzle query.
807
+ * @param {{ toSQL: () => Query }} query - The Drizzle query to analyze
808
+ * @returns {Promise<ExplainAnalyzeRow[]>} The execution plan analysis results
809
+ */
810
+ async explainAnalyze(query) {
811
+ const { sql: sql2, params } = query.toSQL();
812
+ return this.explainAnalyzeRaw(sql2, params);
813
+ }
814
+ /**
815
+ * Decodes a query execution plan from its string representation.
816
+ * @param {string} input - The raw execution plan string
817
+ * @returns {ExplainAnalyzeRow[]} The decoded execution plan rows
818
+ */
819
+ decodedPlan(input) {
820
+ if (!input) {
821
+ return [];
822
+ }
823
+ const lines = input.trim().split("\n");
824
+ if (lines.length < 2) return [];
825
+ const headersRaw = lines[0].split(" ").map((h) => h.trim()).filter(Boolean);
826
+ const headers = headersRaw.map((h) => {
827
+ return h.replace(/\s+/g, " ").replace(/[-\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : "").replace(/^./, (s) => s.toLowerCase());
828
+ });
829
+ return lines.slice(1).map((line) => {
830
+ const values = line.split(" ").map((s) => s.trim()).filter(Boolean);
831
+ const row = {};
832
+ headers.forEach((key, i) => {
833
+ row[key] = values[i] ?? "";
834
+ });
835
+ return row;
836
+ });
837
+ }
838
+ /**
839
+ * Normalizes a raw slow query row into a more structured format.
840
+ * @param {SlowQueryRaw} row - The raw slow query data
841
+ * @returns {SlowQueryNormalized} The normalized slow query data
842
+ */
843
+ normalizeSlowQuery(row) {
844
+ return {
845
+ time: row.Time,
846
+ txnStartTs: row.Txn_start_ts,
847
+ user: row.User,
848
+ host: row.Host,
849
+ connId: row.Conn_ID,
850
+ db: row.DB,
851
+ query: row.Query,
852
+ digest: row.Digest,
853
+ queryTime: row.Query_time,
854
+ compileTime: row.Compile_time,
855
+ optimizeTime: row.Optimize_time,
856
+ processTime: row.Process_time,
857
+ waitTime: row.Wait_time,
858
+ parseTime: row.Parse_time,
859
+ rewriteTime: row.Rewrite_time,
860
+ copTime: row.Cop_time,
861
+ copProcAvg: row.Cop_proc_avg,
862
+ copProcMax: row.Cop_proc_max,
863
+ copProcP90: row.Cop_proc_p90,
864
+ copProcAddr: row.Cop_proc_addr,
865
+ copWaitAvg: row.Cop_wait_avg,
866
+ copWaitMax: row.Cop_wait_max,
867
+ copWaitP90: row.Cop_wait_p90,
868
+ copWaitAddr: row.Cop_wait_addr,
869
+ memMax: row.Mem_max,
870
+ diskMax: row.Disk_max,
871
+ totalKeys: row.Total_keys,
872
+ processKeys: row.Process_keys,
873
+ requestCount: row.Request_count,
874
+ kvTotal: row.KV_total,
875
+ pdTotal: row.PD_total,
876
+ resultRows: row.Result_rows,
877
+ rocksdbBlockCacheHitCount: row.Rocksdb_block_cache_hit_count,
878
+ rocksdbBlockReadCount: row.Rocksdb_block_read_count,
879
+ rocksdbBlockReadByte: row.Rocksdb_block_read_byte,
880
+ plan: row.Plan,
881
+ binaryPlan: row.Binary_plan,
882
+ planDigest: row.Plan_digest,
883
+ parsedPlan: this.decodedPlan(row.Plan)
884
+ };
885
+ }
886
+ /**
887
+ * Builds a SQL query for retrieving cluster statement history.
888
+ * @param {string[]} tables - The tables to analyze
889
+ * @param {Date} [from] - The start date for the analysis
890
+ * @param {Date} [to] - The end date for the analysis
891
+ * @returns {string} The SQL query for cluster statement history
892
+ */
893
+ buildClusterStatementQuery(tables, from, to) {
894
+ const formatDateTime = (date) => moment(date).format("YYYY-MM-DDTHH:mm:ss.SSS");
895
+ const tableConditions = tables.map((table2) => `TABLE_NAMES LIKE CONCAT(SCHEMA_NAME, '.', '%', '${table2}', '%')`).join(" OR ");
896
+ const timeConditions = [];
897
+ if (from) {
898
+ timeConditions.push(`SUMMARY_BEGIN_TIME >= '${formatDateTime(from)}'`);
899
+ }
900
+ if (to) {
901
+ timeConditions.push(`SUMMARY_END_TIME <= '${formatDateTime(to)}'`);
902
+ }
903
+ let whereClauses;
904
+ if (tableConditions?.length) {
905
+ whereClauses = [tableConditions ? `(${tableConditions})` : "", ...timeConditions];
906
+ } else {
907
+ whereClauses = timeConditions;
908
+ }
909
+ return `
910
+ SELECT *
911
+ FROM (
912
+ SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY
913
+ UNION ALL
914
+ SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
915
+ ) AS combined
916
+ ${whereClauses?.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : ""}
917
+ `;
918
+ }
919
+ /**
920
+ * Retrieves and analyzes slow queries from the database.
921
+ * @returns {Promise<SlowQueryNormalized[]>} The normalized slow query data
922
+ */
923
+ // CLUSTER_SLOW_QUERY STATISTICS
924
+ async analyzeSlowQueries() {
925
+ const results = await this.forgeOperations.fetch().executeRawSQL(`
926
+ SELECT *
927
+ FROM information_schema.slow_query
928
+ ORDER BY time DESC
929
+ `);
930
+ return results.map((row) => this.normalizeSlowQuery(row));
931
+ }
932
+ /**
933
+ * Converts a cluster statement row to camelCase format.
934
+ * @param {Record<string, any>} input - The input row data
935
+ * @returns {ClusterStatementRowCamelCase} The converted row data
936
+ */
937
+ mapToCamelCaseClusterStatement(input) {
938
+ if (!input) {
939
+ return {};
940
+ }
941
+ const result = {};
942
+ result.parsedPlan = this.decodedPlan(input["PLAN"] ?? "");
943
+ for (const key in input) {
944
+ const camelKey = key.toLowerCase().replace(/_([a-z])/g, (_, letter) => letter.toUpperCase());
945
+ result[camelKey] = input[key];
946
+ }
947
+ return result;
948
+ }
949
+ /**
950
+ * Analyzes query history for specific tables using raw table names.
951
+ * @param {string[]} tables - The table names to analyze
952
+ * @param {Date} [fromDate] - The start date for the analysis
953
+ * @param {Date} [toDate] - The end date for the analysis
954
+ * @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
955
+ */
956
+ async analyzeQueriesHistoryRaw(tables, fromDate, toDate) {
957
+ const results = await this.forgeOperations.fetch().executeRawSQL(
958
+ this.buildClusterStatementQuery(tables ?? [], fromDate, toDate)
959
+ );
960
+ return results.map((r) => this.mapToCamelCaseClusterStatement(r));
961
+ }
962
+ /**
963
+ * Analyzes query history for specific tables using Drizzle table objects.
964
+ * @param {AnyMySqlTable[]} tables - The Drizzle table objects to analyze
965
+ * @param {Date} [fromDate] - The start date for the analysis
966
+ * @param {Date} [toDate] - The end date for the analysis
967
+ * @returns {Promise<ClusterStatementRowCamelCase[]>} The analyzed query history
968
+ */
969
+ async analyzeQueriesHistory(tables, fromDate, toDate) {
970
+ const tableNames = tables?.map((table$1) => table.getTableName(table$1)) ?? [];
971
+ return this.analyzeQueriesHistoryRaw(tableNames, fromDate, toDate);
972
+ }
973
+ }
751
974
  class ForgeSQLORMImpl {
752
975
  static instance = null;
753
976
  drizzle;
754
977
  crudOperations;
755
978
  fetchOperations;
979
+ analyzeOperations;
756
980
  /**
757
981
  * Private constructor to enforce singleton behavior.
758
982
  * @param options - Options for configuring ForgeSQL ORM behavior.
@@ -772,20 +996,26 @@ class ForgeSQLORMImpl {
772
996
  );
773
997
  this.crudOperations = new ForgeSQLCrudOperations(this, newOptions);
774
998
  this.fetchOperations = new ForgeSQLSelectOperations(newOptions);
999
+ this.analyzeOperations = new ForgeSQLAnalyseOperation(this);
775
1000
  } catch (error) {
776
1001
  console.error("ForgeSQLORM initialization failed:", error);
777
1002
  throw error;
778
1003
  }
779
1004
  }
1005
+ /**
1006
+ * Create the modify operations instance.
1007
+ * @returns modify operations.
1008
+ */
1009
+ modify() {
1010
+ return this.crudOperations;
1011
+ }
780
1012
  /**
781
1013
  * Returns the singleton instance of ForgeSQLORMImpl.
782
1014
  * @param options - Options for configuring ForgeSQL ORM behavior.
783
1015
  * @returns The singleton instance of ForgeSQLORMImpl.
784
1016
  */
785
1017
  static getInstance(options) {
786
- if (!ForgeSQLORMImpl.instance) {
787
- ForgeSQLORMImpl.instance = new ForgeSQLORMImpl(options);
788
- }
1018
+ ForgeSQLORMImpl.instance ??= new ForgeSQLORMImpl(options);
789
1019
  return ForgeSQLORMImpl.instance;
790
1020
  }
791
1021
  /**
@@ -793,7 +1023,7 @@ class ForgeSQLORMImpl {
793
1023
  * @returns CRUD operations.
794
1024
  */
795
1025
  crud() {
796
- return this.crudOperations;
1026
+ return this.modify();
797
1027
  }
798
1028
  /**
799
1029
  * Retrieves the fetch operations instance.
@@ -802,6 +1032,9 @@ class ForgeSQLORMImpl {
802
1032
  fetch() {
803
1033
  return this.fetchOperations;
804
1034
  }
1035
+ analyze() {
1036
+ return this.analyzeOperations;
1037
+ }
805
1038
  /**
806
1039
  * Returns a Drizzle query builder instance.
807
1040
  *
@@ -889,7 +1122,7 @@ class ForgeSQLORM {
889
1122
  *
890
1123
  * @template TSelection - The type of the selected fields
891
1124
  * @param {TSelection} fields - Object containing the fields to select, with table schemas as values
892
- * @returns {MySqlSelectBuilder<TSelection, MySql2PreparedQueryHKT>} A distinct select query builder with unique field aliases
1125
+ * @returns {MySqlSelectBuilder<TSelection, MySqlRemotePreparedQueryHKT>} A distinct select query builder with unique field aliases
893
1126
  * @throws {Error} If fields parameter is empty
894
1127
  * @example
895
1128
  * ```typescript
@@ -907,7 +1140,14 @@ class ForgeSQLORM {
907
1140
  * @returns CRUD operations.
908
1141
  */
909
1142
  crud() {
910
- return this.ormInstance.crud();
1143
+ return this.ormInstance.modify();
1144
+ }
1145
+ /**
1146
+ * Proxies the `modify` method from `ForgeSQLORMImpl`.
1147
+ * @returns Modify operations.
1148
+ */
1149
+ modify() {
1150
+ return this.ormInstance.modify();
911
1151
  }
912
1152
  /**
913
1153
  * Proxies the `fetch` method from `ForgeSQLORMImpl`.
@@ -916,6 +1156,13 @@ class ForgeSQLORM {
916
1156
  fetch() {
917
1157
  return this.ormInstance.fetch();
918
1158
  }
1159
+ /**
1160
+ * Provides query analysis capabilities including EXPLAIN ANALYZE and slow query analysis.
1161
+ * @returns {SchemaAnalyzeForgeSql} Interface for analyzing query performance
1162
+ */
1163
+ analyze() {
1164
+ return this.ormInstance.analyze();
1165
+ }
919
1166
  /**
920
1167
  * Returns a Drizzle query builder instance.
921
1168
  *
@@ -976,8 +1223,19 @@ const forgeTimeString = mysqlCore.customType({
976
1223
  return parseDateTime(value, "HH:mm:ss.SSS");
977
1224
  }
978
1225
  });
979
- async function dropSchemaMigrations(tables) {
1226
+ const migrations = mysqlCore.mysqlTable("__migrations", {
1227
+ id: mysqlCore.bigint("id", { mode: "number" }).primaryKey().autoincrement(),
1228
+ name: mysqlCore.varchar("name", { length: 255 }).notNull(),
1229
+ migratedAt: mysqlCore.timestamp("migratedAt").defaultNow().notNull()
1230
+ });
1231
+ async function getTables() {
1232
+ const tables = await sql$1.sql.executeDDL("SHOW TABLES");
1233
+ return tables.rows.flatMap((tableInfo) => Object.values(tableInfo));
1234
+ }
1235
+ const forgeSystemTables = [migrations];
1236
+ async function dropSchemaMigrations() {
980
1237
  try {
1238
+ const tables = await getTables();
981
1239
  const dropStatements = generateDropTableStatements(tables);
982
1240
  for (const statement of dropStatements) {
983
1241
  console.warn(statement);
@@ -988,32 +1246,41 @@ async function dropSchemaMigrations(tables) {
988
1246
  "⚠️ All data in these tables has been permanently deleted. This operation cannot be undone."
989
1247
  );
990
1248
  } catch (error) {
1249
+ console.error(error);
991
1250
  const errorMessage = error instanceof Error ? error.message : "Unknown error occurred";
992
1251
  return getHttpResponse(500, errorMessage);
993
1252
  }
994
1253
  }
995
1254
  const applySchemaMigrations = async (migration) => {
996
- console.log("Provisioning the database");
997
- await sql$1.sql._provision();
998
- console.info("Running schema migrations");
999
- const migrations2 = await migration(sql$1.migrationRunner);
1000
- const successfulMigrations = await migrations2.run();
1001
- console.info("Migrations applied:", successfulMigrations);
1002
- const migrationHistory = (await sql$1.migrationRunner.list()).map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toUTCString()}`).join("\n");
1003
- console.info("Migrations history:\nid, name, migrated_at\n", migrationHistory);
1004
- return {
1005
- headers: { "Content-Type": ["application/json"] },
1006
- statusCode: 200,
1007
- statusText: "OK",
1008
- body: "Migrations successfully executed"
1009
- };
1255
+ try {
1256
+ if (typeof migration !== "function") {
1257
+ throw new Error("migration is not a function");
1258
+ }
1259
+ console.log("Provisioning the database");
1260
+ await sql$1.sql._provision();
1261
+ console.info("Running schema migrations");
1262
+ const migrations2 = await migration(sql$1.migrationRunner);
1263
+ const successfulMigrations = await migrations2.run();
1264
+ console.info("Migrations applied:", successfulMigrations);
1265
+ const migrationList = await sql$1.migrationRunner.list();
1266
+ const migrationHistory = Array.isArray(migrationList) && migrationList.length > 0 ? migrationList.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toUTCString()}`).join("\n") : "No migrations found";
1267
+ console.info("Migrations history:\nid, name, migrated_at\n", migrationHistory);
1268
+ return {
1269
+ headers: { "Content-Type": ["application/json"] },
1270
+ statusCode: 200,
1271
+ statusText: "OK",
1272
+ body: "Migrations successfully executed"
1273
+ };
1274
+ } catch (error) {
1275
+ console.error("Error during migration:", error);
1276
+ return {
1277
+ headers: { "Content-Type": ["application/json"] },
1278
+ statusCode: 500,
1279
+ statusText: "Internal Server Error",
1280
+ body: error instanceof Error ? error.message : "Unknown error during migration"
1281
+ };
1282
+ }
1010
1283
  };
1011
- const migrations = mysqlCore.mysqlTable("__migrations", {
1012
- id: mysqlCore.bigint("id", { mode: "number" }).primaryKey().autoincrement(),
1013
- name: mysqlCore.varchar("name", { length: 255 }).notNull(),
1014
- migratedAt: mysqlCore.timestamp("migratedAt").defaultNow().notNull()
1015
- });
1016
- const forgeSystemTables = [migrations];
1017
1284
  async function fetchSchemaWebTrigger() {
1018
1285
  try {
1019
1286
  const tables = await getTables();
@@ -1026,14 +1293,10 @@ async function fetchSchemaWebTrigger() {
1026
1293
  return getHttpResponse(500, errorMessage);
1027
1294
  }
1028
1295
  }
1029
- async function getTables() {
1030
- const tables = await sql$1.sql.executeDDL("SHOW TABLES");
1031
- return tables.rows.flatMap((tableInfo) => Object.values(tableInfo));
1032
- }
1033
1296
  async function generateCreateTableStatements(tables) {
1034
1297
  const statements = [];
1035
1298
  for (const table2 of tables) {
1036
- const createTableResult = await sql$1.sql.executeDDL(`SHOW CREATE TABLE ${table2}`);
1299
+ const createTableResult = await sql$1.sql.executeDDL(`SHOW CREATE TABLE "${table2}"`);
1037
1300
  const createTableStatements = createTableResult.rows.filter((row) => !isSystemTable(row.Table)).map((row) => formatCreateTableStatement(row["Create Table"]));
1038
1301
  statements.push(...createTableStatements);
1039
1302
  }
@@ -1072,14 +1335,17 @@ exports.fetchSchemaWebTrigger = fetchSchemaWebTrigger;
1072
1335
  exports.forgeDateString = forgeDateString;
1073
1336
  exports.forgeDateTimeString = forgeDateTimeString;
1074
1337
  exports.forgeDriver = forgeDriver;
1338
+ exports.forgeSystemTables = forgeSystemTables;
1075
1339
  exports.forgeTimeString = forgeTimeString;
1076
1340
  exports.forgeTimestampString = forgeTimestampString;
1077
1341
  exports.generateDropTableStatements = generateDropTableStatements;
1078
1342
  exports.getHttpResponse = getHttpResponse;
1079
1343
  exports.getPrimaryKeys = getPrimaryKeys;
1080
1344
  exports.getTableMetadata = getTableMetadata;
1345
+ exports.getTables = getTables;
1081
1346
  exports.mapSelectAllFieldsToAlias = mapSelectAllFieldsToAlias;
1082
1347
  exports.mapSelectFieldsWithAlias = mapSelectFieldsWithAlias;
1348
+ exports.migrations = migrations;
1083
1349
  exports.parseDateTime = parseDateTime;
1084
1350
  exports.patchDbWithSelectAliased = patchDbWithSelectAliased;
1085
1351
  //# sourceMappingURL=ForgeSQLORM.js.map