forge-sql-orm 2.1.5 → 2.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +135 -53
  2. package/dist/ForgeSQLORM.js +572 -231
  3. package/dist/ForgeSQLORM.js.map +1 -1
  4. package/dist/ForgeSQLORM.mjs +572 -231
  5. package/dist/ForgeSQLORM.mjs.map +1 -1
  6. package/dist/core/ForgeSQLORM.d.ts +91 -3
  7. package/dist/core/ForgeSQLORM.d.ts.map +1 -1
  8. package/dist/core/ForgeSQLQueryBuilder.d.ts +89 -2
  9. package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
  10. package/dist/core/SystemTables.d.ts +3654 -0
  11. package/dist/core/SystemTables.d.ts.map +1 -1
  12. package/dist/lib/drizzle/extensions/additionalActions.d.ts +2 -2
  13. package/dist/lib/drizzle/extensions/additionalActions.d.ts.map +1 -1
  14. package/dist/utils/forgeDriver.d.ts +61 -14
  15. package/dist/utils/forgeDriver.d.ts.map +1 -1
  16. package/dist/utils/metadataContextUtils.d.ts +1 -1
  17. package/dist/utils/metadataContextUtils.d.ts.map +1 -1
  18. package/dist/utils/requestTypeContextUtils.d.ts +8 -0
  19. package/dist/utils/requestTypeContextUtils.d.ts.map +1 -0
  20. package/dist/webtriggers/topSlowestStatementLastHourTrigger.d.ts +90 -65
  21. package/dist/webtriggers/topSlowestStatementLastHourTrigger.d.ts.map +1 -1
  22. package/package.json +9 -9
  23. package/src/core/ForgeSQLCrudOperations.ts +3 -3
  24. package/src/core/ForgeSQLORM.ts +334 -124
  25. package/src/core/ForgeSQLQueryBuilder.ts +116 -20
  26. package/src/core/ForgeSQLSelectOperations.ts +2 -2
  27. package/src/core/SystemTables.ts +16 -0
  28. package/src/lib/drizzle/extensions/additionalActions.ts +24 -22
  29. package/src/utils/cacheContextUtils.ts +2 -2
  30. package/src/utils/cacheUtils.ts +12 -12
  31. package/src/utils/forgeDriver.ts +219 -40
  32. package/src/utils/forgeDriverProxy.ts +2 -2
  33. package/src/utils/metadataContextUtils.ts +11 -13
  34. package/src/utils/requestTypeContextUtils.ts +11 -0
  35. package/src/utils/sqlUtils.ts +1 -1
  36. package/src/webtriggers/applyMigrationsWebTrigger.ts +9 -9
  37. package/src/webtriggers/clearCacheSchedulerTrigger.ts +1 -1
  38. package/src/webtriggers/dropMigrationWebTrigger.ts +2 -2
  39. package/src/webtriggers/dropTablesMigrationWebTrigger.ts +2 -2
  40. package/src/webtriggers/fetchSchemaWebTrigger.ts +1 -1
  41. package/src/webtriggers/topSlowestStatementLastHourTrigger.ts +511 -308
@@ -1035,7 +1035,7 @@ class ForgeSQLSelectOperations {
1035
1035
  }
1036
1036
  }
1037
1037
  const metadataQueryContext = new node_async_hooks.AsyncLocalStorage();
1038
- async function saveMetaDataInContextContext(metadata) {
1038
+ async function saveMetaDataToContext(metadata) {
1039
1039
  const context = metadataQueryContext.getStore();
1040
1040
  if (context && metadata) {
1041
1041
  context.totalResponseSize += metadata.responseSize;
@@ -1046,26 +1046,101 @@ async function saveMetaDataInContextContext(metadata) {
1046
1046
  async function getLastestMetadata() {
1047
1047
  return metadataQueryContext.getStore();
1048
1048
  }
1049
- const forgeDriver = async (query, params, method) => {
1050
- if (method == "execute") {
1051
- const sqlStatement = sql$1.sql.prepare(query);
1052
- if (params) {
1053
- sqlStatement.bindParams(...params);
1049
+ const operationTypeQueryContext = new node_async_hooks.AsyncLocalStorage();
1050
+ async function getOperationType() {
1051
+ return operationTypeQueryContext.getStore()?.operationType ?? "DML";
1052
+ }
1053
+ function isUpdateQueryResponse(obj) {
1054
+ return obj !== null && typeof obj === "object" && typeof obj.affectedRows === "number" && typeof obj.insertId === "number";
1055
+ }
1056
+ async function withTimeout$1(promise, timeoutMs = 1e4) {
1057
+ let timeoutId;
1058
+ const timeoutPromise = new Promise((_, reject) => {
1059
+ timeoutId = setTimeout(() => {
1060
+ reject(
1061
+ new Error(
1062
+ `Atlassian @forge/sql did not return a response within ${timeoutMs}ms (${timeoutMs / 1e3} seconds), so the request is blocked. Possible causes: slow query, network issues, or exceeding Forge SQL limits.`
1063
+ )
1064
+ );
1065
+ }, timeoutMs);
1066
+ });
1067
+ try {
1068
+ return await Promise.race([promise, timeoutPromise]);
1069
+ } finally {
1070
+ if (timeoutId) {
1071
+ clearTimeout(timeoutId);
1054
1072
  }
1055
- const updateQueryResponseResults = await sqlStatement.execute();
1056
- let result = updateQueryResponseResults.rows;
1057
- return { ...result, rows: [result] };
1058
- } else {
1059
- const sqlStatement = await sql$1.sql.prepare(query);
1060
- if (params) {
1061
- await sqlStatement.bindParams(...params);
1073
+ }
1074
+ }
1075
+ function inlineParams(sql2, params) {
1076
+ let i = 0;
1077
+ return sql2.replace(/\?/g, () => {
1078
+ const val = params[i++];
1079
+ if (val === null) return "NULL";
1080
+ if (typeof val === "number") return val.toString();
1081
+ return `'${String(val).replace(/'/g, "''")}'`;
1082
+ });
1083
+ }
1084
+ async function processDDLResult(method, result) {
1085
+ if (result.metadata) {
1086
+ await saveMetaDataToContext(result.metadata);
1087
+ }
1088
+ if (!result?.rows) {
1089
+ return { rows: [] };
1090
+ }
1091
+ if (isUpdateQueryResponse(result.rows)) {
1092
+ const oneRow = result.rows;
1093
+ return { ...oneRow, rows: [oneRow] };
1094
+ }
1095
+ if (Array.isArray(result.rows)) {
1096
+ if (method === "execute") {
1097
+ return { rows: result.rows };
1098
+ } else {
1099
+ const rows = result.rows.map((r) => Object.values(r));
1100
+ return { rows };
1062
1101
  }
1063
- const result = await sqlStatement.execute();
1064
- await saveMetaDataInContextContext(result.metadata);
1065
- let rows;
1066
- rows = result.rows.map((r) => Object.values(r));
1067
- return { rows };
1068
1102
  }
1103
+ return { rows: [] };
1104
+ }
1105
+ async function processExecuteMethod(query, params) {
1106
+ const sqlStatement = sql$1.sql.prepare(query);
1107
+ if (params) {
1108
+ sqlStatement.bindParams(...params);
1109
+ }
1110
+ const result = await withTimeout$1(sqlStatement.execute());
1111
+ await saveMetaDataToContext(result.metadata);
1112
+ if (!result?.rows) {
1113
+ return { rows: [] };
1114
+ }
1115
+ if (isUpdateQueryResponse(result.rows)) {
1116
+ const oneRow = result.rows;
1117
+ return { ...oneRow, rows: [oneRow] };
1118
+ }
1119
+ return { rows: result.rows };
1120
+ }
1121
+ async function processAllMethod(query, params) {
1122
+ const sqlStatement = await sql$1.sql.prepare(query);
1123
+ if (params) {
1124
+ await sqlStatement.bindParams(...params);
1125
+ }
1126
+ const result = await withTimeout$1(sqlStatement.execute());
1127
+ await saveMetaDataToContext(result.metadata);
1128
+ if (!result?.rows) {
1129
+ return { rows: [] };
1130
+ }
1131
+ const rows = result.rows.map((r) => Object.values(r));
1132
+ return { rows };
1133
+ }
1134
+ const forgeDriver = async (query, params, method) => {
1135
+ const operationType = await getOperationType();
1136
+ if (operationType === "DDL") {
1137
+ const result = await withTimeout$1(sql$1.sql.executeDDL(inlineParams(query, params)));
1138
+ return await processDDLResult(method, result);
1139
+ }
1140
+ if (method === "execute") {
1141
+ return await processExecuteMethod(query, params ?? []);
1142
+ }
1143
+ return await processAllMethod(query, params ?? []);
1069
1144
  };
1070
1145
  function injectSqlHints(query, hints) {
1071
1146
  if (!hints) {
@@ -1139,7 +1214,7 @@ async function handleSuccessfulExecution(rows, onfulfilled, table2, options, isC
1139
1214
  if (isCached && !cacheApplicationContext.getStore()) {
1140
1215
  await clearCache(table2, options);
1141
1216
  }
1142
- const result = onfulfilled?.(rows);
1217
+ const result = onfulfilled ? onfulfilled(rows) : rows;
1143
1218
  return result;
1144
1219
  } catch (error) {
1145
1220
  if (shouldClearCacheOnError(error)) {
@@ -1210,11 +1285,11 @@ async function handleCachedQuery(target, options, cacheTtl, selections, aliasMap
1210
1285
  try {
1211
1286
  const localCached = await getQueryLocalCacheQuery(target, options);
1212
1287
  if (localCached) {
1213
- return onfulfilled?.(localCached);
1288
+ return onfulfilled ? onfulfilled(localCached) : localCached;
1214
1289
  }
1215
1290
  const cacheResult = await getFromCache(target, options);
1216
1291
  if (cacheResult) {
1217
- return onfulfilled?.(cacheResult);
1292
+ return onfulfilled ? onfulfilled(cacheResult) : cacheResult;
1218
1293
  }
1219
1294
  const rows = await target.execute();
1220
1295
  const transformed = applyFromDriverTransform(rows, selections, aliasMap);
@@ -1222,23 +1297,29 @@ async function handleCachedQuery(target, options, cacheTtl, selections, aliasMap
1222
1297
  await setCacheResult(target, options, transformed, cacheTtl).catch((cacheError) => {
1223
1298
  console.warn("Cache set error:", cacheError);
1224
1299
  });
1225
- return onfulfilled?.(transformed);
1300
+ return onfulfilled ? onfulfilled(transformed) : transformed;
1226
1301
  } catch (error) {
1227
- return onrejected?.(error);
1302
+ if (onrejected) {
1303
+ return onrejected(error);
1304
+ }
1305
+ throw error;
1228
1306
  }
1229
1307
  }
1230
1308
  async function handleNonCachedQuery(target, options, selections, aliasMap, onfulfilled, onrejected) {
1231
1309
  try {
1232
1310
  const localCached = await getQueryLocalCacheQuery(target, options);
1233
1311
  if (localCached) {
1234
- return onfulfilled?.(localCached);
1312
+ return onfulfilled ? onfulfilled(localCached) : localCached;
1235
1313
  }
1236
1314
  const rows = await target.execute();
1237
1315
  const transformed = applyFromDriverTransform(rows, selections, aliasMap);
1238
1316
  await saveQueryLocalCacheQuery(target, transformed, options);
1239
- return onfulfilled?.(transformed);
1317
+ return onfulfilled ? onfulfilled(transformed) : transformed;
1240
1318
  } catch (error) {
1241
- return onrejected?.(error);
1319
+ if (onrejected) {
1320
+ return onrejected(error);
1321
+ }
1322
+ throw error;
1242
1323
  }
1243
1324
  }
1244
1325
  function createAliasedSelectBuilder(db, fields, selectFn, useCache, options, cacheTtl) {
@@ -1307,10 +1388,8 @@ function createRawQueryExecutor(db, options, useGlobalCache = false) {
1307
1388
  return async function(query, cacheTtl) {
1308
1389
  let sql$12;
1309
1390
  if (sql.isSQLWrapper(query)) {
1310
- const sqlWrapper = query;
1311
- sql$12 = sqlWrapper.getSQL().toQuery(
1312
- db.dialect
1313
- );
1391
+ const dialect = db.dialect;
1392
+ sql$12 = dialect.sqlToQuery(query);
1314
1393
  } else {
1315
1394
  sql$12 = {
1316
1395
  sql: query,
@@ -1859,19 +1938,26 @@ class ForgeSQLORMImpl {
1859
1938
  * ```
1860
1939
  */
1861
1940
  async executeWithMetadata(query, onMetadata) {
1862
- return metadataQueryContext.run({
1863
- totalDbExecutionTime: 0,
1864
- totalResponseSize: 0
1865
- }, async () => {
1866
- try {
1867
- return await query();
1868
- } finally {
1869
- const metadata = await getLastestMetadata();
1870
- if (metadata && metadata.lastMetadata) {
1871
- await onMetadata(metadata.totalDbExecutionTime, metadata.totalResponseSize, metadata.lastMetadata);
1941
+ return metadataQueryContext.run(
1942
+ {
1943
+ totalDbExecutionTime: 0,
1944
+ totalResponseSize: 0
1945
+ },
1946
+ async () => {
1947
+ try {
1948
+ return await query();
1949
+ } finally {
1950
+ const metadata = await getLastestMetadata();
1951
+ if (metadata && metadata.lastMetadata) {
1952
+ await onMetadata(
1953
+ metadata.totalDbExecutionTime,
1954
+ metadata.totalResponseSize,
1955
+ metadata.lastMetadata
1956
+ );
1957
+ }
1872
1958
  }
1873
1959
  }
1874
- });
1960
+ );
1875
1961
  }
1876
1962
  /**
1877
1963
  * Executes operations within a cache context that collects cache eviction events.
@@ -2254,6 +2340,97 @@ class ForgeSQLORMImpl {
2254
2340
  execute(query) {
2255
2341
  return this.drizzle.executeQuery(query);
2256
2342
  }
2343
+ /**
2344
+ * Executes a Data Definition Language (DDL) SQL query.
2345
+ * DDL operations include CREATE, ALTER, DROP, TRUNCATE, and other schema modification statements.
2346
+ *
2347
+ * This method is specifically designed for DDL operations and provides:
2348
+ * - Proper operation type context for DDL queries
2349
+ * - No caching (DDL operations should not be cached)
2350
+ * - Direct execution without query optimization
2351
+ *
2352
+ * @template T - The expected return type of the query result
2353
+ * @param query - The DDL SQL query to execute (SQLWrapper or string)
2354
+ * @returns Promise with query results
2355
+ * @throws {Error} If the DDL operation fails
2356
+ *
2357
+ * @example
2358
+ * ```typescript
2359
+ * // Create a new table
2360
+ * await forgeSQL.executeDDL(`
2361
+ * CREATE TABLE users (
2362
+ * id INT PRIMARY KEY AUTO_INCREMENT,
2363
+ * name VARCHAR(255) NOT NULL,
2364
+ * email VARCHAR(255) UNIQUE
2365
+ * )
2366
+ * `);
2367
+ *
2368
+ * // Alter table structure
2369
+ * await forgeSQL.executeDDL(sql`
2370
+ * ALTER TABLE users
2371
+ * ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
2372
+ * `);
2373
+ *
2374
+ * // Drop a table
2375
+ * await forgeSQL.executeDDL("DROP TABLE IF EXISTS old_users");
2376
+ * ```
2377
+ */
2378
+ async executeDDL(query) {
2379
+ return this.executeDDLActions(async () => this.drizzle.executeQuery(query));
2380
+ }
2381
+ /**
2382
+ * Executes a series of actions within a DDL operation context.
2383
+ * This method provides a way to execute regular SQL queries that should be treated
2384
+ * as DDL operations, ensuring proper operation type context for performance monitoring.
2385
+ *
2386
+ * This method is useful for:
2387
+ * - Executing regular SQL queries in DDL context for monitoring purposes
2388
+ * - Wrapping non-DDL operations that should be treated as DDL for analysis
2389
+ * - Ensuring proper operation type context for complex workflows
2390
+ * - Maintaining DDL operation context across multiple function calls
2391
+ *
2392
+ * @template T - The return type of the actions function
2393
+ * @param actions - Function containing SQL operations to execute in DDL context
2394
+ * @returns Promise that resolves to the return value of the actions function
2395
+ *
2396
+ * @example
2397
+ * ```typescript
2398
+ * // Execute regular SQL queries in DDL context for monitoring
2399
+ * await forgeSQL.executeDDLActions(async () => {
2400
+ * const slowQueries = await forgeSQL.execute(`
2401
+ * SELECT * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2402
+ * WHERE AVG_LATENCY > 1000000
2403
+ * `);
2404
+ * return slowQueries;
2405
+ * });
2406
+ *
2407
+ * // Execute complex analysis queries in DDL context
2408
+ * const result = await forgeSQL.executeDDLActions(async () => {
2409
+ * const tableInfo = await forgeSQL.execute("SHOW TABLES");
2410
+ * const performanceData = await forgeSQL.execute(`
2411
+ * SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
2412
+ * WHERE SUMMARY_END_TIME > DATE_SUB(NOW(), INTERVAL 1 HOUR)
2413
+ * `);
2414
+ * return { tableInfo, performanceData };
2415
+ * });
2416
+ *
2417
+ * // Execute monitoring queries with error handling
2418
+ * try {
2419
+ * await forgeSQL.executeDDLActions(async () => {
2420
+ * const metrics = await forgeSQL.execute(`
2421
+ * SELECT COUNT(*) as query_count
2422
+ * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2423
+ * `);
2424
+ * console.log(`Total queries: ${metrics[0].query_count}`);
2425
+ * });
2426
+ * } catch (error) {
2427
+ * console.error("Monitoring query failed:", error);
2428
+ * }
2429
+ * ```
2430
+ */
2431
+ async executeDDLActions(actions) {
2432
+ return operationTypeQueryContext.run({ operationType: "DDL" }, async () => actions());
2433
+ }
2257
2434
  /**
2258
2435
  * Executes a raw SQL query with both local and global cache support.
2259
2436
  * This method provides comprehensive caching for raw SQL queries:
@@ -2601,7 +2778,98 @@ class ForgeSQLORM {
2601
2778
  * ```
2602
2779
  */
2603
2780
  execute(query) {
2604
- return this.ormInstance.getDrizzleQueryBuilder().executeQuery(query);
2781
+ return this.ormInstance.execute(query);
2782
+ }
2783
+ /**
2784
+ * Executes a Data Definition Language (DDL) SQL query.
2785
+ * DDL operations include CREATE, ALTER, DROP, TRUNCATE, and other schema modification statements.
2786
+ *
2787
+ * This method is specifically designed for DDL operations and provides:
2788
+ * - Proper operation type context for DDL queries
2789
+ * - No caching (DDL operations should not be cached)
2790
+ * - Direct execution without query optimization
2791
+ *
2792
+ * @template T - The expected return type of the query result
2793
+ * @param query - The DDL SQL query to execute (SQLWrapper or string)
2794
+ * @returns Promise with query results
2795
+ * @throws {Error} If the DDL operation fails
2796
+ *
2797
+ * @example
2798
+ * ```typescript
2799
+ * // Create a new table
2800
+ * await forgeSQL.executeDDL(`
2801
+ * CREATE TABLE users (
2802
+ * id INT PRIMARY KEY AUTO_INCREMENT,
2803
+ * name VARCHAR(255) NOT NULL,
2804
+ * email VARCHAR(255) UNIQUE
2805
+ * )
2806
+ * `);
2807
+ *
2808
+ * // Alter table structure
2809
+ * await forgeSQL.executeDDL(sql`
2810
+ * ALTER TABLE users
2811
+ * ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
2812
+ * `);
2813
+ *
2814
+ * // Drop a table
2815
+ * await forgeSQL.executeDDL("DROP TABLE IF EXISTS old_users");
2816
+ * ```
2817
+ */
2818
+ executeDDL(query) {
2819
+ return this.ormInstance.executeDDL(query);
2820
+ }
2821
+ /**
2822
+ * Executes a series of actions within a DDL operation context.
2823
+ * This method provides a way to execute regular SQL queries that should be treated
2824
+ * as DDL operations, ensuring proper operation type context for performance monitoring.
2825
+ *
2826
+ * This method is useful for:
2827
+ * - Executing regular SQL queries in DDL context for monitoring purposes
2828
+ * - Wrapping non-DDL operations that should be treated as DDL for analysis
2829
+ * - Ensuring proper operation type context for complex workflows
2830
+ * - Maintaining DDL operation context across multiple function calls
2831
+ *
2832
+ * @template T - The return type of the actions function
2833
+ * @param actions - Function containing SQL operations to execute in DDL context
2834
+ * @returns Promise that resolves to the return value of the actions function
2835
+ *
2836
+ * @example
2837
+ * ```typescript
2838
+ * // Execute regular SQL queries in DDL context for monitoring
2839
+ * await forgeSQL.executeDDLActions(async () => {
2840
+ * const slowQueries = await forgeSQL.execute(`
2841
+ * SELECT * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2842
+ * WHERE AVG_LATENCY > 1000000
2843
+ * `);
2844
+ * return slowQueries;
2845
+ * });
2846
+ *
2847
+ * // Execute complex analysis queries in DDL context
2848
+ * const result = await forgeSQL.executeDDLActions(async () => {
2849
+ * const tableInfo = await forgeSQL.execute("SHOW TABLES");
2850
+ * const performanceData = await forgeSQL.execute(`
2851
+ * SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
2852
+ * WHERE SUMMARY_END_TIME > DATE_SUB(NOW(), INTERVAL 1 HOUR)
2853
+ * `);
2854
+ * return { tableInfo, performanceData };
2855
+ * });
2856
+ *
2857
+ * // Execute monitoring queries with error handling
2858
+ * try {
2859
+ * await forgeSQL.executeDDLActions(async () => {
2860
+ * const metrics = await forgeSQL.execute(`
2861
+ * SELECT COUNT(*) as query_count
2862
+ * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2863
+ * `);
2864
+ * console.log(`Total queries: ${metrics[0].query_count}`);
2865
+ * });
2866
+ * } catch (error) {
2867
+ * console.error("Monitoring query failed:", error);
2868
+ * }
2869
+ * ```
2870
+ */
2871
+ executeDDLActions(actions) {
2872
+ return this.ormInstance.executeDDLActions(actions);
2605
2873
  }
2606
2874
  /**
2607
2875
  * Executes a raw SQL query with both local and global cache support.
@@ -2622,7 +2890,7 @@ class ForgeSQLORM {
2622
2890
  * ```
2623
2891
  */
2624
2892
  executeCacheable(query, cacheTtl) {
2625
- return this.ormInstance.getDrizzleQueryBuilder().executeQueryCacheable(query, cacheTtl);
2893
+ return this.ormInstance.executeCacheable(query, cacheTtl);
2626
2894
  }
2627
2895
  /**
2628
2896
  * Creates a Common Table Expression (CTE) builder for complex queries.
@@ -2632,7 +2900,7 @@ class ForgeSQLORM {
2632
2900
  * @example
2633
2901
  * ```typescript
2634
2902
  * const withQuery = forgeSQL.$with('userStats').as(
2635
- * forgeSQL.select({ userId: users.id, count: sql<number>`count(*)` })
2903
+ * forgeSQL.getDrizzleQueryBuilder().select({ userId: users.id, count: sql<number>`count(*)` })
2636
2904
  * .from(users)
2637
2905
  * .groupBy(users.id)
2638
2906
  * );
@@ -2650,7 +2918,7 @@ class ForgeSQLORM {
2650
2918
  * @example
2651
2919
  * ```typescript
2652
2920
  * const withQuery = forgeSQL.$with('userStats').as(
2653
- * forgeSQL.select({ userId: users.id, count: sql<number>`count(*)` })
2921
+ * forgeSQL.getDrizzleQueryBuilder().select({ userId: users.id, count: sql<number>`count(*)` })
2654
2922
  * .from(users)
2655
2923
  * .groupBy(users.id)
2656
2924
  * );
@@ -3141,6 +3409,14 @@ const clusterStatementsSummaryHistory = informationSchema.table(
3141
3409
  "CLUSTER_STATEMENTS_SUMMARY_HISTORY",
3142
3410
  createClusterStatementsSummarySchema()
3143
3411
  );
3412
+ const statementsSummaryHistory = informationSchema.table(
3413
+ "STATEMENTS_SUMMARY_HISTORY",
3414
+ createClusterStatementsSummarySchema()
3415
+ );
3416
+ const statementsSummary = informationSchema.table(
3417
+ "STATEMENTS_SUMMARY",
3418
+ createClusterStatementsSummarySchema()
3419
+ );
3144
3420
  const clusterStatementsSummary = informationSchema.table(
3145
3421
  "CLUSTER_STATEMENTS_SUMMARY",
3146
3422
  createClusterStatementsSummarySchema()
@@ -3173,12 +3449,12 @@ const applySchemaMigrations = async (migration) => {
3173
3449
  if (typeof migration !== "function") {
3174
3450
  throw new Error("migration is not a function");
3175
3451
  }
3176
- console.log("Provisioning the database");
3452
+ console.debug("Provisioning the database");
3177
3453
  await sql$1.sql._provision();
3178
- console.info("Running schema migrations");
3454
+ console.debug("Running schema migrations");
3179
3455
  const migrations2 = await migration(sql$1.migrationRunner);
3180
3456
  const successfulMigrations = await migrations2.run();
3181
- console.info("Migrations applied:", successfulMigrations);
3457
+ console.debug("Migrations applied:", successfulMigrations);
3182
3458
  const migrationList = await sql$1.migrationRunner.list();
3183
3459
  let migrationHistory = "No migrations found";
3184
3460
  if (Array.isArray(migrationList) && migrationList.length > 0) {
@@ -3187,7 +3463,7 @@ const applySchemaMigrations = async (migration) => {
3187
3463
  );
3188
3464
  migrationHistory = sortedMigrations.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toUTCString()}`).join("\n");
3189
3465
  }
3190
- console.info("Migrations history:\nid, name, migrated_at\n", migrationHistory);
3466
+ console.debug("Migrations history:\nid, name, migrated_at\n", migrationHistory);
3191
3467
  return {
3192
3468
  headers: { "Content-Type": ["application/json"] },
3193
3469
  statusCode: 200,
@@ -3294,196 +3570,258 @@ const clearCacheSchedulerTrigger = async (options) => {
3294
3570
  };
3295
3571
  const DEFAULT_MEMORY_THRESHOLD = 8 * 1024 * 1024;
3296
3572
  const DEFAULT_TIMEOUT = 300;
3297
- const topSlowestStatementLastHourTrigger = async (orm, options) => {
3298
- if (!orm) {
3299
- return {
3300
- statusCode: 500,
3301
- headers: { "Content-Type": ["application/json"] },
3302
- body: JSON.stringify({
3303
- success: false,
3304
- message: "ORM instance is required",
3305
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
3573
+ const DEFAULT_TOP_N = 1;
3574
+ const DEFAULT_HOURS = 1;
3575
+ const DEFAULT_TABLES = "CLUSTER_SUMMARY_AND_HISTORY";
3576
+ const MAX_QUERY_TIMEOUT_MS = 3e3;
3577
+ const MAX_SQL_LENGTH = 1e3;
3578
+ const RETRY_ATTEMPTS = 2;
3579
+ const RETRY_BASE_DELAY_MS = 1e3;
3580
+ const nsToMs = (value) => {
3581
+ const n = Number(value);
3582
+ return Number.isFinite(n) ? n / 1e6 : NaN;
3583
+ };
3584
+ const bytesToMB = (value) => {
3585
+ const n = Number(value);
3586
+ return Number.isFinite(n) ? n / (1024 * 1024) : NaN;
3587
+ };
3588
+ const jsonSafeStringify = (value) => JSON.stringify(value, (_k, v) => typeof v === "bigint" ? v.toString() : v);
3589
+ const sanitizeSQL = (sql2, maxLen = MAX_SQL_LENGTH) => {
3590
+ let s = sql2;
3591
+ s = s.replace(/--[^\n\r]*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
3592
+ s = s.replace(/'(?:\\'|[^'])*'/g, "?");
3593
+ s = s.replace(/\b-?\d+(?:\.\d+)?\b/g, "?");
3594
+ s = s.replace(/\s+/g, " ").trim();
3595
+ if (s.length > maxLen) {
3596
+ s = s.slice(0, maxLen) + " …[truncated]";
3597
+ }
3598
+ return s;
3599
+ };
3600
+ const withTimeout = async (promise, ms) => {
3601
+ let timer;
3602
+ try {
3603
+ return await Promise.race([
3604
+ promise,
3605
+ new Promise((_resolve, reject) => {
3606
+ timer = setTimeout(() => reject(new Error(`TIMEOUT:${ms}`)), ms);
3306
3607
  })
3307
- };
3608
+ ]);
3609
+ } finally {
3610
+ if (timer) clearTimeout(timer);
3308
3611
  }
3309
- let newOptions = options ?? {
3310
- warnThresholdMs: DEFAULT_TIMEOUT,
3311
- memoryThresholdBytes: DEFAULT_MEMORY_THRESHOLD,
3312
- showPlan: false
3313
- };
3314
- const nsToMs = (v) => {
3315
- const n = Number(v);
3316
- return Number.isFinite(n) ? n / 1e6 : NaN;
3317
- };
3318
- const bytesToMB = (v) => {
3319
- const n = Number(v);
3320
- return Number.isFinite(n) ? n / (1024 * 1024) : NaN;
3321
- };
3322
- const jsonSafeStringify = (value) => JSON.stringify(value, (_k, v) => typeof v === "bigint" ? v.toString() : v);
3323
- function sanitizeSQL(sql2, maxLen = 1e3) {
3324
- let s = sql2;
3325
- s = s.replace(/--[^\n\r]*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
3326
- s = s.replace(/'(?:\\'|[^'])*'/g, "?");
3327
- s = s.replace(/\b-?\d+(?:\.\d+)?\b/g, "?");
3328
- s = s.replace(/\s+/g, " ").trim();
3329
- if (s.length > maxLen) {
3330
- s = s.slice(0, maxLen) + " …[truncated]";
3331
- }
3332
- return s;
3333
- }
3334
- const TOP_N = 1;
3335
- try {
3336
- const summaryHistory = clusterStatementsSummaryHistory;
3337
- const summary = clusterStatementsSummary;
3338
- const selectShape = (t) => ({
3339
- digest: t.digest,
3340
- stmtType: t.stmtType,
3341
- schemaName: t.schemaName,
3342
- execCount: t.execCount,
3343
- avgLatencyNs: t.avgLatency,
3344
- maxLatencyNs: t.maxLatency,
3345
- minLatencyNs: t.minLatency,
3346
- avgProcessTimeNs: t.avgProcessTime,
3347
- avgWaitTimeNs: t.avgWaitTime,
3348
- avgBackoffTimeNs: t.avgBackoffTime,
3349
- avgTotalKeys: t.avgTotalKeys,
3350
- firstSeen: t.firstSeen,
3351
- lastSeen: t.lastSeen,
3352
- planInCache: t.planInCache,
3353
- planCacheHits: t.planCacheHits,
3354
- digestText: t.digestText,
3355
- plan: t.plan,
3356
- avgMemBytes: t.avgMem,
3357
- maxMemBytes: t.maxMem
3358
- });
3359
- const lastHourFilterHistory = drizzleOrm.gte(
3360
- summaryHistory.summaryEndTime,
3361
- drizzleOrm.sql`DATE_SUB(NOW(), INTERVAL 1 HOUR)`
3362
- );
3363
- const lastHourFilterSummary = drizzleOrm.gte(
3364
- summary.summaryEndTime,
3365
- drizzleOrm.sql`DATE_SUB(NOW(), INTERVAL 1 HOUR)`
3366
- );
3367
- const qHistory = orm.getDrizzleQueryBuilder().select(selectShape(summaryHistory)).from(summaryHistory).where(lastHourFilterHistory);
3368
- const qSummary = orm.getDrizzleQueryBuilder().select(selectShape(summary)).from(summary).where(lastHourFilterSummary);
3369
- const combined = mysqlCore.unionAll(qHistory, qSummary).as("combined");
3370
- const thresholdNs = Math.floor((newOptions.warnThresholdMs ?? DEFAULT_TIMEOUT) * 1e6);
3371
- const memoryThresholdBytes = newOptions.memoryThresholdBytes ?? DEFAULT_MEMORY_THRESHOLD;
3372
- const grouped = orm.getDrizzleQueryBuilder().select({
3373
- digest: combined.digest,
3374
- stmtType: combined.stmtType,
3375
- schemaName: combined.schemaName,
3376
- execCount: drizzleOrm.sql`SUM(${combined.execCount})`.as("execCount"),
3377
- avgLatencyNs: drizzleOrm.sql`MAX(${combined.avgLatencyNs})`.as("avgLatencyNs"),
3378
- maxLatencyNs: drizzleOrm.sql`MAX(${combined.maxLatencyNs})`.as("maxLatencyNs"),
3379
- minLatencyNs: drizzleOrm.sql`MIN(${combined.minLatencyNs})`.as("minLatencyNs"),
3380
- avgProcessTimeNs: drizzleOrm.sql`MAX(${combined.avgProcessTimeNs})`.as("avgProcessTimeNs"),
3381
- avgWaitTimeNs: drizzleOrm.sql`MAX(${combined.avgWaitTimeNs})`.as("avgWaitTimeNs"),
3382
- avgBackoffTimeNs: drizzleOrm.sql`MAX(${combined.avgBackoffTimeNs})`.as("avgBackoffTimeNs"),
3383
- avgMemBytes: drizzleOrm.sql`MAX(${combined.avgMemBytes})`.as("avgMemBytes"),
3384
- maxMemBytes: drizzleOrm.sql`MAX(${combined.maxMemBytes})`.as("maxMemBytes"),
3385
- avgTotalKeys: drizzleOrm.sql`MAX(${combined.avgTotalKeys})`.as("avgTotalKeys"),
3386
- firstSeen: drizzleOrm.sql`MIN(${combined.firstSeen})`.as("firstSeen"),
3387
- lastSeen: drizzleOrm.sql`MAX(${combined.lastSeen})`.as("lastSeen"),
3388
- planInCache: drizzleOrm.sql`MAX(${combined.planInCache})`.as("planInCache"),
3389
- planCacheHits: drizzleOrm.sql`SUM(${combined.planCacheHits})`.as("planCacheHits"),
3390
- // Prefer a non-empty sample text/plan via MAX; acceptable for de-dup
3391
- digestText: drizzleOrm.sql`MAX(${combined.digestText})`.as("digestText"),
3392
- plan: drizzleOrm.sql`MAX(${combined.plan})`.as("plan")
3393
- }).from(combined).where(
3394
- drizzleOrm.sql`COALESCE(${combined.digest}, '') <> '' AND COALESCE(${combined.digestText}, '') <> '' AND COALESCE(${combined.stmtType}, '') NOT IN ('Use','Set','Show')`
3395
- ).groupBy(combined.digest, combined.stmtType, combined.schemaName).as("grouped");
3396
- const rows = await orm.getDrizzleQueryBuilder().select({
3397
- digest: grouped.digest,
3398
- stmtType: grouped.stmtType,
3399
- schemaName: grouped.schemaName,
3400
- execCount: grouped.execCount,
3401
- avgLatencyNs: grouped.avgLatencyNs,
3402
- maxLatencyNs: grouped.maxLatencyNs,
3403
- minLatencyNs: grouped.minLatencyNs,
3404
- avgProcessTimeNs: grouped.avgProcessTimeNs,
3405
- avgWaitTimeNs: grouped.avgWaitTimeNs,
3406
- avgBackoffTimeNs: grouped.avgBackoffTimeNs,
3407
- avgMemBytes: grouped.avgMemBytes,
3408
- maxMemBytes: grouped.maxMemBytes,
3409
- avgTotalKeys: grouped.avgTotalKeys,
3410
- firstSeen: grouped.firstSeen,
3411
- lastSeen: grouped.lastSeen,
3412
- planInCache: grouped.planInCache,
3413
- planCacheHits: grouped.planCacheHits,
3414
- digestText: grouped.digestText,
3415
- plan: grouped.plan
3416
- }).from(grouped).where(
3417
- drizzleOrm.sql`${grouped.avgLatencyNs} > ${thresholdNs} OR ${grouped.avgMemBytes} > ${memoryThresholdBytes}`
3418
- ).orderBy(drizzleOrm.desc(grouped.avgLatencyNs)).limit(formatLimitOffset(TOP_N));
3419
- const formatted = rows.map((r, i) => ({
3420
- rank: i + 1,
3421
- // 1-based rank in the top N
3422
- digest: r.digest,
3423
- stmtType: r.stmtType,
3424
- schemaName: r.schemaName,
3425
- execCount: r.execCount,
3426
- avgLatencyMs: nsToMs(r.avgLatencyNs),
3427
- // Convert ns to ms for readability
3428
- maxLatencyMs: nsToMs(r.maxLatencyNs),
3429
- minLatencyMs: nsToMs(r.minLatencyNs),
3430
- avgProcessTimeMs: nsToMs(r.avgProcessTimeNs),
3431
- avgWaitTimeMs: nsToMs(r.avgWaitTimeNs),
3432
- avgBackoffTimeMs: nsToMs(r.avgBackoffTimeNs),
3433
- avgMemMB: bytesToMB(r.avgMemBytes),
3434
- maxMemMB: bytesToMB(r.maxMemBytes),
3435
- avgMemBytes: r.avgMemBytes,
3436
- maxMemBytes: r.maxMemBytes,
3437
- avgTotalKeys: r.avgTotalKeys,
3438
- firstSeen: r.firstSeen,
3439
- lastSeen: r.lastSeen,
3440
- planInCache: r.planInCache,
3441
- planCacheHits: r.planCacheHits,
3442
- digestText: sanitizeSQL(r.digestText),
3443
- plan: newOptions.showPlan ? r.plan : void 0
3444
- }));
3445
- for (const f of formatted) {
3612
+ };
3613
+ const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
3614
+ const executeWithRetries = async (task, label) => {
3615
+ let attempt = 0;
3616
+ let delay = RETRY_BASE_DELAY_MS;
3617
+ while (true) {
3618
+ try {
3619
+ attempt++;
3620
+ return await task();
3621
+ } catch (error) {
3622
+ const msg = String(error?.message ?? error);
3623
+ const isTimeout = msg.startsWith("TIMEOUT:");
3624
+ if (attempt > RETRY_ATTEMPTS) throw error;
3446
3625
  console.warn(
3447
- `${f.rank}. ${f.stmtType} avg=${f.avgLatencyMs?.toFixed?.(2)}ms max=${f.maxLatencyMs?.toFixed?.(2)}ms mem≈${f.avgMemMB?.toFixed?.(2)}MB(max ${f.maxMemMB?.toFixed?.(2)}MB) exec=${f.execCount}
3448
- digest=${f.digest}
3449
- sql=${(f.digestText || "").slice(0, 300)}${f.digestText && f.digestText.length > 300 ? "…" : ""}`
3626
+ `${label}: attempt ${attempt} failed${isTimeout ? " (timeout)" : ""}; retrying in ${delay}ms...`,
3627
+ error
3450
3628
  );
3451
- if (newOptions.showPlan && f.plan) {
3452
- console.warn(` full plan:
3453
- ${f.plan}`);
3454
- }
3629
+ await sleep(delay);
3630
+ delay *= 2;
3455
3631
  }
3456
- return {
3457
- headers: { "Content-Type": ["application/json"] },
3458
- statusCode: 200,
3459
- statusText: "OK",
3460
- body: jsonSafeStringify({
3461
- success: true,
3462
- window: "last_1h",
3463
- top: TOP_N,
3464
- warnThresholdMs: newOptions.warnThresholdMs,
3465
- memoryThresholdBytes: newOptions.memoryThresholdBytes,
3466
- showPlan: newOptions.showPlan,
3467
- rows: formatted,
3468
- generatedAt: (/* @__PURE__ */ new Date()).toISOString()
3469
- })
3470
- };
3632
+ }
3633
+ };
3634
+ const createErrorResponse = (message, error) => ({
3635
+ headers: { "Content-Type": ["application/json"] },
3636
+ statusCode: 500,
3637
+ statusText: "Internal Server Error",
3638
+ body: jsonSafeStringify({
3639
+ success: false,
3640
+ message,
3641
+ error: error?.cause?.context?.debug?.sqlMessage ?? error?.cause?.message ?? error?.message,
3642
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
3643
+ })
3644
+ });
3645
+ const createSuccessResponse = (formatted, options) => ({
3646
+ headers: { "Content-Type": ["application/json"] },
3647
+ statusCode: 200,
3648
+ statusText: "OK",
3649
+ body: jsonSafeStringify({
3650
+ success: true,
3651
+ window: `last_${options.hours}h`,
3652
+ top: options.topN,
3653
+ warnThresholdMs: options.warnThresholdMs,
3654
+ memoryThresholdBytes: options.memoryThresholdBytes,
3655
+ showPlan: options.showPlan,
3656
+ rows: formatted,
3657
+ generatedAt: (/* @__PURE__ */ new Date()).toISOString()
3658
+ })
3659
+ });
3660
+ const createSelectShape = (table2) => ({
3661
+ digest: table2.digest,
3662
+ stmtType: table2.stmtType,
3663
+ schemaName: table2.schemaName,
3664
+ execCount: table2.execCount,
3665
+ avgLatencyNs: table2.avgLatency,
3666
+ maxLatencyNs: table2.maxLatency,
3667
+ minLatencyNs: table2.minLatency,
3668
+ avgProcessTimeNs: table2.avgProcessTime,
3669
+ avgWaitTimeNs: table2.avgWaitTime,
3670
+ avgBackoffTimeNs: table2.avgBackoffTime,
3671
+ avgTotalKeys: table2.avgTotalKeys,
3672
+ firstSeen: table2.firstSeen,
3673
+ lastSeen: table2.lastSeen,
3674
+ planInCache: table2.planInCache,
3675
+ planCacheHits: table2.planCacheHits,
3676
+ digestText: table2.digestText,
3677
+ plan: table2.plan,
3678
+ avgMemBytes: table2.avgMem,
3679
+ maxMemBytes: table2.maxMem
3680
+ });
3681
+ const buildCombinedQuery = (orm, options) => {
3682
+ const summaryHistory = statementsSummary;
3683
+ const summary = statementsSummary;
3684
+ const summaryHistoryCluster = clusterStatementsSummaryHistory;
3685
+ const summaryCluster = clusterStatementsSummary;
3686
+ const lastHoursFilter = (table2) => drizzleOrm.gte(table2.summaryEndTime, drizzleOrm.sql`DATE_SUB(NOW(), INTERVAL ${options.hours} HOUR)`);
3687
+ const qHistory = orm.getDrizzleQueryBuilder().select(createSelectShape(summaryHistory)).from(summaryHistory).where(lastHoursFilter(summaryHistory));
3688
+ const qSummary = orm.getDrizzleQueryBuilder().select(createSelectShape(summary)).from(summary).where(lastHoursFilter(summary));
3689
+ const qHistoryCluster = orm.getDrizzleQueryBuilder().select(createSelectShape(summaryHistoryCluster)).from(summaryHistoryCluster).where(lastHoursFilter(summaryHistoryCluster));
3690
+ const qSummaryCluster = orm.getDrizzleQueryBuilder().select(createSelectShape(summaryCluster)).from(summaryCluster).where(lastHoursFilter(summaryCluster));
3691
+ switch (options.tables) {
3692
+ case "SUMMARY_AND_HISTORY":
3693
+ return mysqlCore.unionAll(qHistory, qSummary).as("combined");
3694
+ case "CLUSTER_SUMMARY_AND_HISTORY":
3695
+ return mysqlCore.unionAll(qHistoryCluster, qSummaryCluster).as("combined");
3696
+ default:
3697
+ throw new Error(`Unsupported table configuration: ${options.tables}`);
3698
+ }
3699
+ };
3700
+ const buildGroupedQuery = (orm, combined) => {
3701
+ return orm.getDrizzleQueryBuilder().select({
3702
+ digest: combined.digest,
3703
+ stmtType: combined.stmtType,
3704
+ schemaName: combined.schemaName,
3705
+ execCount: drizzleOrm.sql`SUM(${combined.execCount})`.as("execCount"),
3706
+ avgLatencyNs: drizzleOrm.sql`MAX(${combined.avgLatencyNs})`.as("avgLatencyNs"),
3707
+ maxLatencyNs: drizzleOrm.sql`MAX(${combined.maxLatencyNs})`.as("maxLatencyNs"),
3708
+ minLatencyNs: drizzleOrm.sql`MIN(${combined.minLatencyNs})`.as("minLatencyNs"),
3709
+ avgProcessTimeNs: drizzleOrm.sql`MAX(${combined.avgProcessTimeNs})`.as("avgProcessTimeNs"),
3710
+ avgWaitTimeNs: drizzleOrm.sql`MAX(${combined.avgWaitTimeNs})`.as("avgWaitTimeNs"),
3711
+ avgBackoffTimeNs: drizzleOrm.sql`MAX(${combined.avgBackoffTimeNs})`.as("avgBackoffTimeNs"),
3712
+ avgMemBytes: drizzleOrm.sql`MAX(${combined.avgMemBytes})`.as("avgMemBytes"),
3713
+ maxMemBytes: drizzleOrm.sql`MAX(${combined.maxMemBytes})`.as("maxMemBytes"),
3714
+ avgTotalKeys: drizzleOrm.sql`MAX(${combined.avgTotalKeys})`.as("avgTotalKeys"),
3715
+ firstSeen: drizzleOrm.sql`MIN(${combined.firstSeen})`.as("firstSeen"),
3716
+ lastSeen: drizzleOrm.sql`MAX(${combined.lastSeen})`.as("lastSeen"),
3717
+ planInCache: drizzleOrm.sql`MAX(${combined.planInCache})`.as("planInCache"),
3718
+ planCacheHits: drizzleOrm.sql`SUM(${combined.planCacheHits})`.as("planCacheHits"),
3719
+ digestText: drizzleOrm.sql`MAX(${combined.digestText})`.as("digestText"),
3720
+ plan: drizzleOrm.sql`MAX(${combined.plan})`.as("plan")
3721
+ }).from(combined).where(
3722
+ drizzleOrm.sql`COALESCE(${combined.digest}, '') <> '' AND COALESCE(${combined.digestText}, '') <> '' AND COALESCE(${combined.stmtType}, '') NOT IN ('Use','Set','Show')`
3723
+ ).groupBy(combined.digest, combined.stmtType, combined.schemaName).as("grouped");
3724
+ };
3725
+ const buildFinalQuery = (orm, grouped, options) => {
3726
+ const thresholdNs = Math.floor(options.warnThresholdMs * 1e6);
3727
+ const memoryThresholdBytes = options.memoryThresholdBytes;
3728
+ const query = orm.getDrizzleQueryBuilder().select({
3729
+ digest: grouped.digest,
3730
+ stmtType: grouped.stmtType,
3731
+ schemaName: grouped.schemaName,
3732
+ execCount: grouped.execCount,
3733
+ avgLatencyNs: grouped.avgLatencyNs,
3734
+ maxLatencyNs: grouped.maxLatencyNs,
3735
+ minLatencyNs: grouped.minLatencyNs,
3736
+ avgProcessTimeNs: grouped.avgProcessTimeNs,
3737
+ avgWaitTimeNs: grouped.avgWaitTimeNs,
3738
+ avgBackoffTimeNs: grouped.avgBackoffTimeNs,
3739
+ avgMemBytes: grouped.avgMemBytes,
3740
+ maxMemBytes: grouped.maxMemBytes,
3741
+ avgTotalKeys: grouped.avgTotalKeys,
3742
+ firstSeen: grouped.firstSeen,
3743
+ lastSeen: grouped.lastSeen,
3744
+ planInCache: grouped.planInCache,
3745
+ planCacheHits: grouped.planCacheHits,
3746
+ digestText: grouped.digestText,
3747
+ plan: grouped.plan
3748
+ }).from(grouped).where(
3749
+ drizzleOrm.sql`${grouped.avgLatencyNs} > ${thresholdNs} OR ${grouped.avgMemBytes} > ${memoryThresholdBytes}`
3750
+ ).orderBy(drizzleOrm.desc(grouped.avgLatencyNs)).limit(formatLimitOffset(options.topN));
3751
+ if (options.operationType === "DDL") {
3752
+ return orm.executeDDLActions(async () => await query);
3753
+ }
3754
+ return query;
3755
+ };
3756
+ const formatQueryResults = (rows, options) => {
3757
+ return rows.map((row, index) => ({
3758
+ rank: index + 1,
3759
+ digest: row.digest,
3760
+ stmtType: row.stmtType,
3761
+ schemaName: row.schemaName,
3762
+ execCount: row.execCount,
3763
+ avgLatencyMs: nsToMs(row.avgLatencyNs),
3764
+ maxLatencyMs: nsToMs(row.maxLatencyNs),
3765
+ minLatencyMs: nsToMs(row.minLatencyNs),
3766
+ avgProcessTimeMs: nsToMs(row.avgProcessTimeNs),
3767
+ avgWaitTimeMs: nsToMs(row.avgWaitTimeNs),
3768
+ avgBackoffTimeMs: nsToMs(row.avgBackoffTimeNs),
3769
+ avgMemMB: bytesToMB(row.avgMemBytes),
3770
+ maxMemMB: bytesToMB(row.maxMemBytes),
3771
+ avgMemBytes: row.avgMemBytes,
3772
+ maxMemBytes: row.maxMemBytes,
3773
+ avgTotalKeys: row.avgTotalKeys,
3774
+ firstSeen: row.firstSeen,
3775
+ lastSeen: row.lastSeen,
3776
+ planInCache: row.planInCache,
3777
+ planCacheHits: row.planCacheHits,
3778
+ digestText: options.operationType === "DDL" ? row.digestText : sanitizeSQL(row.digestText),
3779
+ plan: options.showPlan ? row.plan : void 0
3780
+ }));
3781
+ };
3782
+ const logQueryResults = (formatted, options) => {
3783
+ for (const result of formatted) {
3784
+ console.warn(
3785
+ `${result.rank}. ${result.stmtType} avg=${result.avgLatencyMs?.toFixed?.(2)}ms max=${result.maxLatencyMs?.toFixed?.(2)}ms mem≈${result.avgMemMB?.toFixed?.(2)}MB(max ${result.maxMemMB?.toFixed?.(2)}MB) exec=${result.execCount}
3786
+ digest=${result.digest}
3787
+ sql=${(result.digestText || "").slice(0, 300)}${result.digestText && result.digestText.length > 300 ? "…" : ""}`
3788
+ );
3789
+ if (options.showPlan && result.plan) {
3790
+ console.warn(` full plan:
3791
+ ${result.plan}`);
3792
+ }
3793
+ }
3794
+ };
3795
+ const topSlowestStatementLastHourTrigger = async (orm, options) => {
3796
+ if (!orm) {
3797
+ return createErrorResponse("ORM instance is required");
3798
+ }
3799
+ const mergedOptions = {
3800
+ warnThresholdMs: options?.warnThresholdMs ?? DEFAULT_TIMEOUT,
3801
+ memoryThresholdBytes: options?.memoryThresholdBytes ?? DEFAULT_MEMORY_THRESHOLD,
3802
+ showPlan: options?.showPlan ?? false,
3803
+ operationType: options?.operationType ?? "DML",
3804
+ topN: options?.topN ?? DEFAULT_TOP_N,
3805
+ hours: options?.hours ?? DEFAULT_HOURS,
3806
+ tables: options?.tables ?? DEFAULT_TABLES
3807
+ };
3808
+ try {
3809
+ const combined = buildCombinedQuery(orm, mergedOptions);
3810
+ const grouped = buildGroupedQuery(orm, combined);
3811
+ const finalQuery = buildFinalQuery(orm, grouped, mergedOptions);
3812
+ const rows = await executeWithRetries(
3813
+ () => withTimeout(finalQuery, MAX_QUERY_TIMEOUT_MS),
3814
+ "topSlowestStatementLastHourTrigger"
3815
+ );
3816
+ const formatted = formatQueryResults(rows, mergedOptions);
3817
+ logQueryResults(formatted, mergedOptions);
3818
+ return createSuccessResponse(formatted, mergedOptions);
3471
3819
  } catch (error) {
3472
- console.error(
3473
- "Error in topSlowestStatementLastHourTrigger:",
3820
+ console.warn(
3821
+ "Error in topSlowestStatementLastHourTrigger (one-off errors can be ignored; if it recurs, investigate):",
3474
3822
  error?.cause?.context?.debug?.sqlMessage ?? error?.cause ?? error
3475
3823
  );
3476
- return {
3477
- headers: { "Content-Type": ["application/json"] },
3478
- statusCode: 500,
3479
- statusText: "Internal Server Error",
3480
- body: jsonSafeStringify({
3481
- success: false,
3482
- message: "Failed to fetch or log slow queries",
3483
- error: error?.cause?.context?.debug?.sqlMessage ?? error?.cause?.message,
3484
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
3485
- })
3486
- };
3824
+ return createErrorResponse("Failed to fetch or log slow queries", error);
3487
3825
  }
3488
3826
  };
3489
3827
  const getHttpResponse = (statusCode, body) => {
@@ -3524,6 +3862,7 @@ exports.getHttpResponse = getHttpResponse;
3524
3862
  exports.getPrimaryKeys = getPrimaryKeys;
3525
3863
  exports.getTableMetadata = getTableMetadata;
3526
3864
  exports.getTables = getTables;
3865
+ exports.isUpdateQueryResponse = isUpdateQueryResponse;
3527
3866
  exports.mapSelectAllFieldsToAlias = mapSelectAllFieldsToAlias;
3528
3867
  exports.mapSelectFieldsWithAlias = mapSelectFieldsWithAlias;
3529
3868
  exports.migrations = migrations;
@@ -3531,5 +3870,7 @@ exports.nextVal = nextVal;
3531
3870
  exports.parseDateTime = parseDateTime;
3532
3871
  exports.patchDbWithSelectAliased = patchDbWithSelectAliased;
3533
3872
  exports.slowQuery = slowQuery;
3873
+ exports.statementsSummary = statementsSummary;
3874
+ exports.statementsSummaryHistory = statementsSummaryHistory;
3534
3875
  exports.topSlowestStatementLastHourTrigger = topSlowestStatementLastHourTrigger;
3535
3876
  //# sourceMappingURL=ForgeSQLORM.js.map