forge-sql-orm 2.1.5 → 2.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +135 -53
  2. package/dist/ForgeSQLORM.js +572 -231
  3. package/dist/ForgeSQLORM.js.map +1 -1
  4. package/dist/ForgeSQLORM.mjs +572 -231
  5. package/dist/ForgeSQLORM.mjs.map +1 -1
  6. package/dist/core/ForgeSQLORM.d.ts +91 -3
  7. package/dist/core/ForgeSQLORM.d.ts.map +1 -1
  8. package/dist/core/ForgeSQLQueryBuilder.d.ts +89 -2
  9. package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
  10. package/dist/core/SystemTables.d.ts +3654 -0
  11. package/dist/core/SystemTables.d.ts.map +1 -1
  12. package/dist/lib/drizzle/extensions/additionalActions.d.ts +2 -2
  13. package/dist/lib/drizzle/extensions/additionalActions.d.ts.map +1 -1
  14. package/dist/utils/forgeDriver.d.ts +61 -14
  15. package/dist/utils/forgeDriver.d.ts.map +1 -1
  16. package/dist/utils/metadataContextUtils.d.ts +1 -1
  17. package/dist/utils/metadataContextUtils.d.ts.map +1 -1
  18. package/dist/utils/requestTypeContextUtils.d.ts +8 -0
  19. package/dist/utils/requestTypeContextUtils.d.ts.map +1 -0
  20. package/dist/webtriggers/topSlowestStatementLastHourTrigger.d.ts +90 -65
  21. package/dist/webtriggers/topSlowestStatementLastHourTrigger.d.ts.map +1 -1
  22. package/package.json +9 -9
  23. package/src/core/ForgeSQLCrudOperations.ts +3 -3
  24. package/src/core/ForgeSQLORM.ts +334 -124
  25. package/src/core/ForgeSQLQueryBuilder.ts +116 -20
  26. package/src/core/ForgeSQLSelectOperations.ts +2 -2
  27. package/src/core/SystemTables.ts +16 -0
  28. package/src/lib/drizzle/extensions/additionalActions.ts +24 -22
  29. package/src/utils/cacheContextUtils.ts +2 -2
  30. package/src/utils/cacheUtils.ts +12 -12
  31. package/src/utils/forgeDriver.ts +219 -40
  32. package/src/utils/forgeDriverProxy.ts +2 -2
  33. package/src/utils/metadataContextUtils.ts +11 -13
  34. package/src/utils/requestTypeContextUtils.ts +11 -0
  35. package/src/utils/sqlUtils.ts +1 -1
  36. package/src/webtriggers/applyMigrationsWebTrigger.ts +9 -9
  37. package/src/webtriggers/clearCacheSchedulerTrigger.ts +1 -1
  38. package/src/webtriggers/dropMigrationWebTrigger.ts +2 -2
  39. package/src/webtriggers/dropTablesMigrationWebTrigger.ts +2 -2
  40. package/src/webtriggers/fetchSchemaWebTrigger.ts +1 -1
  41. package/src/webtriggers/topSlowestStatementLastHourTrigger.ts +511 -308
@@ -1016,7 +1016,7 @@ class ForgeSQLSelectOperations {
1016
1016
  }
1017
1017
  }
1018
1018
  const metadataQueryContext = new AsyncLocalStorage();
1019
- async function saveMetaDataInContextContext(metadata) {
1019
+ async function saveMetaDataToContext(metadata) {
1020
1020
  const context = metadataQueryContext.getStore();
1021
1021
  if (context && metadata) {
1022
1022
  context.totalResponseSize += metadata.responseSize;
@@ -1027,26 +1027,101 @@ async function saveMetaDataInContextContext(metadata) {
1027
1027
  async function getLastestMetadata() {
1028
1028
  return metadataQueryContext.getStore();
1029
1029
  }
1030
- const forgeDriver = async (query, params, method) => {
1031
- if (method == "execute") {
1032
- const sqlStatement = sql$1.prepare(query);
1033
- if (params) {
1034
- sqlStatement.bindParams(...params);
1030
+ const operationTypeQueryContext = new AsyncLocalStorage();
1031
+ async function getOperationType() {
1032
+ return operationTypeQueryContext.getStore()?.operationType ?? "DML";
1033
+ }
1034
+ function isUpdateQueryResponse(obj) {
1035
+ return obj !== null && typeof obj === "object" && typeof obj.affectedRows === "number" && typeof obj.insertId === "number";
1036
+ }
1037
+ async function withTimeout$1(promise, timeoutMs = 1e4) {
1038
+ let timeoutId;
1039
+ const timeoutPromise = new Promise((_, reject) => {
1040
+ timeoutId = setTimeout(() => {
1041
+ reject(
1042
+ new Error(
1043
+ `Atlassian @forge/sql did not return a response within ${timeoutMs}ms (${timeoutMs / 1e3} seconds), so the request is blocked. Possible causes: slow query, network issues, or exceeding Forge SQL limits.`
1044
+ )
1045
+ );
1046
+ }, timeoutMs);
1047
+ });
1048
+ try {
1049
+ return await Promise.race([promise, timeoutPromise]);
1050
+ } finally {
1051
+ if (timeoutId) {
1052
+ clearTimeout(timeoutId);
1035
1053
  }
1036
- const updateQueryResponseResults = await sqlStatement.execute();
1037
- let result = updateQueryResponseResults.rows;
1038
- return { ...result, rows: [result] };
1039
- } else {
1040
- const sqlStatement = await sql$1.prepare(query);
1041
- if (params) {
1042
- await sqlStatement.bindParams(...params);
1054
+ }
1055
+ }
1056
+ function inlineParams(sql2, params) {
1057
+ let i = 0;
1058
+ return sql2.replace(/\?/g, () => {
1059
+ const val = params[i++];
1060
+ if (val === null) return "NULL";
1061
+ if (typeof val === "number") return val.toString();
1062
+ return `'${String(val).replace(/'/g, "''")}'`;
1063
+ });
1064
+ }
1065
+ async function processDDLResult(method, result) {
1066
+ if (result.metadata) {
1067
+ await saveMetaDataToContext(result.metadata);
1068
+ }
1069
+ if (!result?.rows) {
1070
+ return { rows: [] };
1071
+ }
1072
+ if (isUpdateQueryResponse(result.rows)) {
1073
+ const oneRow = result.rows;
1074
+ return { ...oneRow, rows: [oneRow] };
1075
+ }
1076
+ if (Array.isArray(result.rows)) {
1077
+ if (method === "execute") {
1078
+ return { rows: result.rows };
1079
+ } else {
1080
+ const rows = result.rows.map((r) => Object.values(r));
1081
+ return { rows };
1043
1082
  }
1044
- const result = await sqlStatement.execute();
1045
- await saveMetaDataInContextContext(result.metadata);
1046
- let rows;
1047
- rows = result.rows.map((r) => Object.values(r));
1048
- return { rows };
1049
1083
  }
1084
+ return { rows: [] };
1085
+ }
1086
+ async function processExecuteMethod(query, params) {
1087
+ const sqlStatement = sql$1.prepare(query);
1088
+ if (params) {
1089
+ sqlStatement.bindParams(...params);
1090
+ }
1091
+ const result = await withTimeout$1(sqlStatement.execute());
1092
+ await saveMetaDataToContext(result.metadata);
1093
+ if (!result?.rows) {
1094
+ return { rows: [] };
1095
+ }
1096
+ if (isUpdateQueryResponse(result.rows)) {
1097
+ const oneRow = result.rows;
1098
+ return { ...oneRow, rows: [oneRow] };
1099
+ }
1100
+ return { rows: result.rows };
1101
+ }
1102
+ async function processAllMethod(query, params) {
1103
+ const sqlStatement = await sql$1.prepare(query);
1104
+ if (params) {
1105
+ await sqlStatement.bindParams(...params);
1106
+ }
1107
+ const result = await withTimeout$1(sqlStatement.execute());
1108
+ await saveMetaDataToContext(result.metadata);
1109
+ if (!result?.rows) {
1110
+ return { rows: [] };
1111
+ }
1112
+ const rows = result.rows.map((r) => Object.values(r));
1113
+ return { rows };
1114
+ }
1115
+ const forgeDriver = async (query, params, method) => {
1116
+ const operationType = await getOperationType();
1117
+ if (operationType === "DDL") {
1118
+ const result = await withTimeout$1(sql$1.executeDDL(inlineParams(query, params)));
1119
+ return await processDDLResult(method, result);
1120
+ }
1121
+ if (method === "execute") {
1122
+ return await processExecuteMethod(query, params ?? []);
1123
+ }
1124
+ return await processAllMethod(query, params ?? []);
1050
1125
  };
1051
1126
  function injectSqlHints(query, hints) {
1052
1127
  if (!hints) {
@@ -1120,7 +1195,7 @@ async function handleSuccessfulExecution(rows, onfulfilled, table, options, isCa
1120
1195
  if (isCached && !cacheApplicationContext.getStore()) {
1121
1196
  await clearCache(table, options);
1122
1197
  }
1123
- const result = onfulfilled?.(rows);
1198
+ const result = onfulfilled ? onfulfilled(rows) : rows;
1124
1199
  return result;
1125
1200
  } catch (error) {
1126
1201
  if (shouldClearCacheOnError(error)) {
@@ -1191,11 +1266,11 @@ async function handleCachedQuery(target, options, cacheTtl, selections, aliasMap
1191
1266
  try {
1192
1267
  const localCached = await getQueryLocalCacheQuery(target, options);
1193
1268
  if (localCached) {
1194
- return onfulfilled?.(localCached);
1269
+ return onfulfilled ? onfulfilled(localCached) : localCached;
1195
1270
  }
1196
1271
  const cacheResult = await getFromCache(target, options);
1197
1272
  if (cacheResult) {
1198
- return onfulfilled?.(cacheResult);
1273
+ return onfulfilled ? onfulfilled(cacheResult) : cacheResult;
1199
1274
  }
1200
1275
  const rows = await target.execute();
1201
1276
  const transformed = applyFromDriverTransform(rows, selections, aliasMap);
@@ -1203,23 +1278,29 @@ async function handleCachedQuery(target, options, cacheTtl, selections, aliasMap
1203
1278
  await setCacheResult(target, options, transformed, cacheTtl).catch((cacheError) => {
1204
1279
  console.warn("Cache set error:", cacheError);
1205
1280
  });
1206
- return onfulfilled?.(transformed);
1281
+ return onfulfilled ? onfulfilled(transformed) : transformed;
1207
1282
  } catch (error) {
1208
- return onrejected?.(error);
1283
+ if (onrejected) {
1284
+ return onrejected(error);
1285
+ }
1286
+ throw error;
1209
1287
  }
1210
1288
  }
1211
1289
  async function handleNonCachedQuery(target, options, selections, aliasMap, onfulfilled, onrejected) {
1212
1290
  try {
1213
1291
  const localCached = await getQueryLocalCacheQuery(target, options);
1214
1292
  if (localCached) {
1215
- return onfulfilled?.(localCached);
1293
+ return onfulfilled ? onfulfilled(localCached) : localCached;
1216
1294
  }
1217
1295
  const rows = await target.execute();
1218
1296
  const transformed = applyFromDriverTransform(rows, selections, aliasMap);
1219
1297
  await saveQueryLocalCacheQuery(target, transformed, options);
1220
- return onfulfilled?.(transformed);
1298
+ return onfulfilled ? onfulfilled(transformed) : transformed;
1221
1299
  } catch (error) {
1222
- return onrejected?.(error);
1300
+ if (onrejected) {
1301
+ return onrejected(error);
1302
+ }
1303
+ throw error;
1223
1304
  }
1224
1305
  }
1225
1306
  function createAliasedSelectBuilder(db, fields, selectFn, useCache, options, cacheTtl) {
@@ -1288,10 +1369,8 @@ function createRawQueryExecutor(db, options, useGlobalCache = false) {
1288
1369
  return async function(query, cacheTtl) {
1289
1370
  let sql2;
1290
1371
  if (isSQLWrapper(query)) {
1291
- const sqlWrapper = query;
1292
- sql2 = sqlWrapper.getSQL().toQuery(
1293
- db.dialect
1294
- );
1372
+ const dialect = db.dialect;
1373
+ sql2 = dialect.sqlToQuery(query);
1295
1374
  } else {
1296
1375
  sql2 = {
1297
1376
  sql: query,
@@ -1840,19 +1919,26 @@ class ForgeSQLORMImpl {
1840
1919
  * ```
1841
1920
  */
1842
1921
  async executeWithMetadata(query, onMetadata) {
1843
- return metadataQueryContext.run({
1844
- totalDbExecutionTime: 0,
1845
- totalResponseSize: 0
1846
- }, async () => {
1847
- try {
1848
- return await query();
1849
- } finally {
1850
- const metadata = await getLastestMetadata();
1851
- if (metadata && metadata.lastMetadata) {
1852
- await onMetadata(metadata.totalDbExecutionTime, metadata.totalResponseSize, metadata.lastMetadata);
1922
+ return metadataQueryContext.run(
1923
+ {
1924
+ totalDbExecutionTime: 0,
1925
+ totalResponseSize: 0
1926
+ },
1927
+ async () => {
1928
+ try {
1929
+ return await query();
1930
+ } finally {
1931
+ const metadata = await getLastestMetadata();
1932
+ if (metadata && metadata.lastMetadata) {
1933
+ await onMetadata(
1934
+ metadata.totalDbExecutionTime,
1935
+ metadata.totalResponseSize,
1936
+ metadata.lastMetadata
1937
+ );
1938
+ }
1853
1939
  }
1854
1940
  }
1855
- });
1941
+ );
1856
1942
  }
1857
1943
  /**
1858
1944
  * Executes operations within a cache context that collects cache eviction events.
@@ -2235,6 +2321,97 @@ class ForgeSQLORMImpl {
2235
2321
  execute(query) {
2236
2322
  return this.drizzle.executeQuery(query);
2237
2323
  }
2324
+ /**
2325
+ * Executes a Data Definition Language (DDL) SQL query.
2326
+ * DDL operations include CREATE, ALTER, DROP, TRUNCATE, and other schema modification statements.
2327
+ *
2328
+ * This method is specifically designed for DDL operations and provides:
2329
+ * - Proper operation type context for DDL queries
2330
+ * - No caching (DDL operations should not be cached)
2331
+ * - Direct execution without query optimization
2332
+ *
2333
+ * @template T - The expected return type of the query result
2334
+ * @param query - The DDL SQL query to execute (SQLWrapper or string)
2335
+ * @returns Promise with query results
2336
+ * @throws {Error} If the DDL operation fails
2337
+ *
2338
+ * @example
2339
+ * ```typescript
2340
+ * // Create a new table
2341
+ * await forgeSQL.executeDDL(`
2342
+ * CREATE TABLE users (
2343
+ * id INT PRIMARY KEY AUTO_INCREMENT,
2344
+ * name VARCHAR(255) NOT NULL,
2345
+ * email VARCHAR(255) UNIQUE
2346
+ * )
2347
+ * `);
2348
+ *
2349
+ * // Alter table structure
2350
+ * await forgeSQL.executeDDL(sql`
2351
+ * ALTER TABLE users
2352
+ * ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
2353
+ * `);
2354
+ *
2355
+ * // Drop a table
2356
+ * await forgeSQL.executeDDL("DROP TABLE IF EXISTS old_users");
2357
+ * ```
2358
+ */
2359
+ async executeDDL(query) {
2360
+ return this.executeDDLActions(async () => this.drizzle.executeQuery(query));
2361
+ }
2362
+ /**
2363
+ * Executes a series of actions within a DDL operation context.
2364
+ * This method provides a way to execute regular SQL queries that should be treated
2365
+ * as DDL operations, ensuring proper operation type context for performance monitoring.
2366
+ *
2367
+ * This method is useful for:
2368
+ * - Executing regular SQL queries in DDL context for monitoring purposes
2369
+ * - Wrapping non-DDL operations that should be treated as DDL for analysis
2370
+ * - Ensuring proper operation type context for complex workflows
2371
+ * - Maintaining DDL operation context across multiple function calls
2372
+ *
2373
+ * @template T - The return type of the actions function
2374
+ * @param actions - Function containing SQL operations to execute in DDL context
2375
+ * @returns Promise that resolves to the return value of the actions function
2376
+ *
2377
+ * @example
2378
+ * ```typescript
2379
+ * // Execute regular SQL queries in DDL context for monitoring
2380
+ * await forgeSQL.executeDDLActions(async () => {
2381
+ * const slowQueries = await forgeSQL.execute(`
2382
+ * SELECT * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2383
+ * WHERE AVG_LATENCY > 1000000
2384
+ * `);
2385
+ * return slowQueries;
2386
+ * });
2387
+ *
2388
+ * // Execute complex analysis queries in DDL context
2389
+ * const result = await forgeSQL.executeDDLActions(async () => {
2390
+ * const tableInfo = await forgeSQL.execute("SHOW TABLES");
2391
+ * const performanceData = await forgeSQL.execute(`
2392
+ * SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
2393
+ * WHERE SUMMARY_END_TIME > DATE_SUB(NOW(), INTERVAL 1 HOUR)
2394
+ * `);
2395
+ * return { tableInfo, performanceData };
2396
+ * });
2397
+ *
2398
+ * // Execute monitoring queries with error handling
2399
+ * try {
2400
+ * await forgeSQL.executeDDLActions(async () => {
2401
+ * const metrics = await forgeSQL.execute(`
2402
+ * SELECT COUNT(*) as query_count
2403
+ * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2404
+ * `);
2405
+ * console.log(`Total queries: ${metrics[0].query_count}`);
2406
+ * });
2407
+ * } catch (error) {
2408
+ * console.error("Monitoring query failed:", error);
2409
+ * }
2410
+ * ```
2411
+ */
2412
+ async executeDDLActions(actions) {
2413
+ return operationTypeQueryContext.run({ operationType: "DDL" }, async () => actions());
2414
+ }
2238
2415
  /**
2239
2416
  * Executes a raw SQL query with both local and global cache support.
2240
2417
  * This method provides comprehensive caching for raw SQL queries:
@@ -2582,7 +2759,98 @@ class ForgeSQLORM {
2582
2759
  * ```
2583
2760
  */
2584
2761
  execute(query) {
2585
- return this.ormInstance.getDrizzleQueryBuilder().executeQuery(query);
2762
+ return this.ormInstance.execute(query);
2763
+ }
2764
+ /**
2765
+ * Executes a Data Definition Language (DDL) SQL query.
2766
+ * DDL operations include CREATE, ALTER, DROP, TRUNCATE, and other schema modification statements.
2767
+ *
2768
+ * This method is specifically designed for DDL operations and provides:
2769
+ * - Proper operation type context for DDL queries
2770
+ * - No caching (DDL operations should not be cached)
2771
+ * - Direct execution without query optimization
2772
+ *
2773
+ * @template T - The expected return type of the query result
2774
+ * @param query - The DDL SQL query to execute (SQLWrapper or string)
2775
+ * @returns Promise with query results
2776
+ * @throws {Error} If the DDL operation fails
2777
+ *
2778
+ * @example
2779
+ * ```typescript
2780
+ * // Create a new table
2781
+ * await forgeSQL.executeDDL(`
2782
+ * CREATE TABLE users (
2783
+ * id INT PRIMARY KEY AUTO_INCREMENT,
2784
+ * name VARCHAR(255) NOT NULL,
2785
+ * email VARCHAR(255) UNIQUE
2786
+ * )
2787
+ * `);
2788
+ *
2789
+ * // Alter table structure
2790
+ * await forgeSQL.executeDDL(sql`
2791
+ * ALTER TABLE users
2792
+ * ADD COLUMN created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
2793
+ * `);
2794
+ *
2795
+ * // Drop a table
2796
+ * await forgeSQL.executeDDL("DROP TABLE IF EXISTS old_users");
2797
+ * ```
2798
+ */
2799
+ executeDDL(query) {
2800
+ return this.ormInstance.executeDDL(query);
2801
+ }
2802
+ /**
2803
+ * Executes a series of actions within a DDL operation context.
2804
+ * This method provides a way to execute regular SQL queries that should be treated
2805
+ * as DDL operations, ensuring proper operation type context for performance monitoring.
2806
+ *
2807
+ * This method is useful for:
2808
+ * - Executing regular SQL queries in DDL context for monitoring purposes
2809
+ * - Wrapping non-DDL operations that should be treated as DDL for analysis
2810
+ * - Ensuring proper operation type context for complex workflows
2811
+ * - Maintaining DDL operation context across multiple function calls
2812
+ *
2813
+ * @template T - The return type of the actions function
2814
+ * @param actions - Function containing SQL operations to execute in DDL context
2815
+ * @returns Promise that resolves to the return value of the actions function
2816
+ *
2817
+ * @example
2818
+ * ```typescript
2819
+ * // Execute regular SQL queries in DDL context for monitoring
2820
+ * await forgeSQL.executeDDLActions(async () => {
2821
+ * const slowQueries = await forgeSQL.execute(`
2822
+ * SELECT * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2823
+ * WHERE AVG_LATENCY > 1000000
2824
+ * `);
2825
+ * return slowQueries;
2826
+ * });
2827
+ *
2828
+ * // Execute complex analysis queries in DDL context
2829
+ * const result = await forgeSQL.executeDDLActions(async () => {
2830
+ * const tableInfo = await forgeSQL.execute("SHOW TABLES");
2831
+ * const performanceData = await forgeSQL.execute(`
2832
+ * SELECT * FROM INFORMATION_SCHEMA.CLUSTER_STATEMENTS_SUMMARY_HISTORY
2833
+ * WHERE SUMMARY_END_TIME > DATE_SUB(NOW(), INTERVAL 1 HOUR)
2834
+ * `);
2835
+ * return { tableInfo, performanceData };
2836
+ * });
2837
+ *
2838
+ * // Execute monitoring queries with error handling
2839
+ * try {
2840
+ * await forgeSQL.executeDDLActions(async () => {
2841
+ * const metrics = await forgeSQL.execute(`
2842
+ * SELECT COUNT(*) as query_count
2843
+ * FROM INFORMATION_SCHEMA.STATEMENTS_SUMMARY
2844
+ * `);
2845
+ * console.log(`Total queries: ${metrics[0].query_count}`);
2846
+ * });
2847
+ * } catch (error) {
2848
+ * console.error("Monitoring query failed:", error);
2849
+ * }
2850
+ * ```
2851
+ */
2852
+ executeDDLActions(actions) {
2853
+ return this.ormInstance.executeDDLActions(actions);
2586
2854
  }
2587
2855
  /**
2588
2856
  * Executes a raw SQL query with both local and global cache support.
@@ -2603,7 +2871,7 @@ class ForgeSQLORM {
2603
2871
  * ```
2604
2872
  */
2605
2873
  executeCacheable(query, cacheTtl) {
2606
- return this.ormInstance.getDrizzleQueryBuilder().executeQueryCacheable(query, cacheTtl);
2874
+ return this.ormInstance.executeCacheable(query, cacheTtl);
2607
2875
  }
2608
2876
  /**
2609
2877
  * Creates a Common Table Expression (CTE) builder for complex queries.
@@ -2613,7 +2881,7 @@ class ForgeSQLORM {
2613
2881
  * @example
2614
2882
  * ```typescript
2615
2883
  * const withQuery = forgeSQL.$with('userStats').as(
2616
- * forgeSQL.select({ userId: users.id, count: sql<number>`count(*)` })
2884
+ * forgeSQL.getDrizzleQueryBuilder().select({ userId: users.id, count: sql<number>`count(*)` })
2617
2885
  * .from(users)
2618
2886
  * .groupBy(users.id)
2619
2887
  * );
@@ -2631,7 +2899,7 @@ class ForgeSQLORM {
2631
2899
  * @example
2632
2900
  * ```typescript
2633
2901
  * const withQuery = forgeSQL.$with('userStats').as(
2634
- * forgeSQL.select({ userId: users.id, count: sql<number>`count(*)` })
2902
+ * forgeSQL.getDrizzleQueryBuilder().select({ userId: users.id, count: sql<number>`count(*)` })
2635
2903
  * .from(users)
2636
2904
  * .groupBy(users.id)
2637
2905
  * );
@@ -3122,6 +3390,14 @@ const clusterStatementsSummaryHistory = informationSchema.table(
3122
3390
  "CLUSTER_STATEMENTS_SUMMARY_HISTORY",
3123
3391
  createClusterStatementsSummarySchema()
3124
3392
  );
3393
+ const statementsSummaryHistory = informationSchema.table(
3394
+ "STATEMENTS_SUMMARY_HISTORY",
3395
+ createClusterStatementsSummarySchema()
3396
+ );
3397
+ const statementsSummary = informationSchema.table(
3398
+ "STATEMENTS_SUMMARY",
3399
+ createClusterStatementsSummarySchema()
3400
+ );
3125
3401
  const clusterStatementsSummary = informationSchema.table(
3126
3402
  "CLUSTER_STATEMENTS_SUMMARY",
3127
3403
  createClusterStatementsSummarySchema()
@@ -3154,12 +3430,12 @@ const applySchemaMigrations = async (migration) => {
3154
3430
  if (typeof migration !== "function") {
3155
3431
  throw new Error("migration is not a function");
3156
3432
  }
3157
- console.log("Provisioning the database");
3433
+ console.debug("Provisioning the database");
3158
3434
  await sql$1._provision();
3159
- console.info("Running schema migrations");
3435
+ console.debug("Running schema migrations");
3160
3436
  const migrations2 = await migration(migrationRunner);
3161
3437
  const successfulMigrations = await migrations2.run();
3162
- console.info("Migrations applied:", successfulMigrations);
3438
+ console.debug("Migrations applied:", successfulMigrations);
3163
3439
  const migrationList = await migrationRunner.list();
3164
3440
  let migrationHistory = "No migrations found";
3165
3441
  if (Array.isArray(migrationList) && migrationList.length > 0) {
@@ -3168,7 +3444,7 @@ const applySchemaMigrations = async (migration) => {
3168
3444
  );
3169
3445
  migrationHistory = sortedMigrations.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toUTCString()}`).join("\n");
3170
3446
  }
3171
- console.info("Migrations history:\nid, name, migrated_at\n", migrationHistory);
3447
+ console.debug("Migrations history:\nid, name, migrated_at\n", migrationHistory);
3172
3448
  return {
3173
3449
  headers: { "Content-Type": ["application/json"] },
3174
3450
  statusCode: 200,
@@ -3275,196 +3551,258 @@ const clearCacheSchedulerTrigger = async (options) => {
3275
3551
  };
3276
3552
  const DEFAULT_MEMORY_THRESHOLD = 8 * 1024 * 1024;
3277
3553
  const DEFAULT_TIMEOUT = 300;
3278
- const topSlowestStatementLastHourTrigger = async (orm, options) => {
3279
- if (!orm) {
3280
- return {
3281
- statusCode: 500,
3282
- headers: { "Content-Type": ["application/json"] },
3283
- body: JSON.stringify({
3284
- success: false,
3285
- message: "ORM instance is required",
3286
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
3554
+ const DEFAULT_TOP_N = 1;
3555
+ const DEFAULT_HOURS = 1;
3556
+ const DEFAULT_TABLES = "CLUSTER_SUMMARY_AND_HISTORY";
3557
+ const MAX_QUERY_TIMEOUT_MS = 3e3;
3558
+ const MAX_SQL_LENGTH = 1e3;
3559
+ const RETRY_ATTEMPTS = 2;
3560
+ const RETRY_BASE_DELAY_MS = 1e3;
3561
+ const nsToMs = (value) => {
3562
+ const n = Number(value);
3563
+ return Number.isFinite(n) ? n / 1e6 : NaN;
3564
+ };
3565
+ const bytesToMB = (value) => {
3566
+ const n = Number(value);
3567
+ return Number.isFinite(n) ? n / (1024 * 1024) : NaN;
3568
+ };
3569
+ const jsonSafeStringify = (value) => JSON.stringify(value, (_k, v) => typeof v === "bigint" ? v.toString() : v);
3570
+ const sanitizeSQL = (sql2, maxLen = MAX_SQL_LENGTH) => {
3571
+ let s = sql2;
3572
+ s = s.replace(/--[^\n\r]*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
3573
+ s = s.replace(/'(?:\\'|[^'])*'/g, "?");
3574
+ s = s.replace(/\b-?\d+(?:\.\d+)?\b/g, "?");
3575
+ s = s.replace(/\s+/g, " ").trim();
3576
+ if (s.length > maxLen) {
3577
+ s = s.slice(0, maxLen) + " …[truncated]";
3578
+ }
3579
+ return s;
3580
+ };
3581
+ const withTimeout = async (promise, ms) => {
3582
+ let timer;
3583
+ try {
3584
+ return await Promise.race([
3585
+ promise,
3586
+ new Promise((_resolve, reject) => {
3587
+ timer = setTimeout(() => reject(new Error(`TIMEOUT:${ms}`)), ms);
3287
3588
  })
3288
- };
3589
+ ]);
3590
+ } finally {
3591
+ if (timer) clearTimeout(timer);
3289
3592
  }
3290
- let newOptions = options ?? {
3291
- warnThresholdMs: DEFAULT_TIMEOUT,
3292
- memoryThresholdBytes: DEFAULT_MEMORY_THRESHOLD,
3293
- showPlan: false
3294
- };
3295
- const nsToMs = (v) => {
3296
- const n = Number(v);
3297
- return Number.isFinite(n) ? n / 1e6 : NaN;
3298
- };
3299
- const bytesToMB = (v) => {
3300
- const n = Number(v);
3301
- return Number.isFinite(n) ? n / (1024 * 1024) : NaN;
3302
- };
3303
- const jsonSafeStringify = (value) => JSON.stringify(value, (_k, v) => typeof v === "bigint" ? v.toString() : v);
3304
- function sanitizeSQL(sql2, maxLen = 1e3) {
3305
- let s = sql2;
3306
- s = s.replace(/--[^\n\r]*/g, "").replace(/\/\*[\s\S]*?\*\//g, "");
3307
- s = s.replace(/'(?:\\'|[^'])*'/g, "?");
3308
- s = s.replace(/\b-?\d+(?:\.\d+)?\b/g, "?");
3309
- s = s.replace(/\s+/g, " ").trim();
3310
- if (s.length > maxLen) {
3311
- s = s.slice(0, maxLen) + " …[truncated]";
3312
- }
3313
- return s;
3314
- }
3315
- const TOP_N = 1;
3316
- try {
3317
- const summaryHistory = clusterStatementsSummaryHistory;
3318
- const summary = clusterStatementsSummary;
3319
- const selectShape = (t) => ({
3320
- digest: t.digest,
3321
- stmtType: t.stmtType,
3322
- schemaName: t.schemaName,
3323
- execCount: t.execCount,
3324
- avgLatencyNs: t.avgLatency,
3325
- maxLatencyNs: t.maxLatency,
3326
- minLatencyNs: t.minLatency,
3327
- avgProcessTimeNs: t.avgProcessTime,
3328
- avgWaitTimeNs: t.avgWaitTime,
3329
- avgBackoffTimeNs: t.avgBackoffTime,
3330
- avgTotalKeys: t.avgTotalKeys,
3331
- firstSeen: t.firstSeen,
3332
- lastSeen: t.lastSeen,
3333
- planInCache: t.planInCache,
3334
- planCacheHits: t.planCacheHits,
3335
- digestText: t.digestText,
3336
- plan: t.plan,
3337
- avgMemBytes: t.avgMem,
3338
- maxMemBytes: t.maxMem
3339
- });
3340
- const lastHourFilterHistory = gte(
3341
- summaryHistory.summaryEndTime,
3342
- sql`DATE_SUB(NOW(), INTERVAL 1 HOUR)`
3343
- );
3344
- const lastHourFilterSummary = gte(
3345
- summary.summaryEndTime,
3346
- sql`DATE_SUB(NOW(), INTERVAL 1 HOUR)`
3347
- );
3348
- const qHistory = orm.getDrizzleQueryBuilder().select(selectShape(summaryHistory)).from(summaryHistory).where(lastHourFilterHistory);
3349
- const qSummary = orm.getDrizzleQueryBuilder().select(selectShape(summary)).from(summary).where(lastHourFilterSummary);
3350
- const combined = unionAll(qHistory, qSummary).as("combined");
3351
- const thresholdNs = Math.floor((newOptions.warnThresholdMs ?? DEFAULT_TIMEOUT) * 1e6);
3352
- const memoryThresholdBytes = newOptions.memoryThresholdBytes ?? DEFAULT_MEMORY_THRESHOLD;
3353
- const grouped = orm.getDrizzleQueryBuilder().select({
3354
- digest: combined.digest,
3355
- stmtType: combined.stmtType,
3356
- schemaName: combined.schemaName,
3357
- execCount: sql`SUM(${combined.execCount})`.as("execCount"),
3358
- avgLatencyNs: sql`MAX(${combined.avgLatencyNs})`.as("avgLatencyNs"),
3359
- maxLatencyNs: sql`MAX(${combined.maxLatencyNs})`.as("maxLatencyNs"),
3360
- minLatencyNs: sql`MIN(${combined.minLatencyNs})`.as("minLatencyNs"),
3361
- avgProcessTimeNs: sql`MAX(${combined.avgProcessTimeNs})`.as("avgProcessTimeNs"),
3362
- avgWaitTimeNs: sql`MAX(${combined.avgWaitTimeNs})`.as("avgWaitTimeNs"),
3363
- avgBackoffTimeNs: sql`MAX(${combined.avgBackoffTimeNs})`.as("avgBackoffTimeNs"),
3364
- avgMemBytes: sql`MAX(${combined.avgMemBytes})`.as("avgMemBytes"),
3365
- maxMemBytes: sql`MAX(${combined.maxMemBytes})`.as("maxMemBytes"),
3366
- avgTotalKeys: sql`MAX(${combined.avgTotalKeys})`.as("avgTotalKeys"),
3367
- firstSeen: sql`MIN(${combined.firstSeen})`.as("firstSeen"),
3368
- lastSeen: sql`MAX(${combined.lastSeen})`.as("lastSeen"),
3369
- planInCache: sql`MAX(${combined.planInCache})`.as("planInCache"),
3370
- planCacheHits: sql`SUM(${combined.planCacheHits})`.as("planCacheHits"),
3371
- // Prefer a non-empty sample text/plan via MAX; acceptable for de-dup
3372
- digestText: sql`MAX(${combined.digestText})`.as("digestText"),
3373
- plan: sql`MAX(${combined.plan})`.as("plan")
3374
- }).from(combined).where(
3375
- sql`COALESCE(${combined.digest}, '') <> '' AND COALESCE(${combined.digestText}, '') <> '' AND COALESCE(${combined.stmtType}, '') NOT IN ('Use','Set','Show')`
3376
- ).groupBy(combined.digest, combined.stmtType, combined.schemaName).as("grouped");
3377
- const rows = await orm.getDrizzleQueryBuilder().select({
3378
- digest: grouped.digest,
3379
- stmtType: grouped.stmtType,
3380
- schemaName: grouped.schemaName,
3381
- execCount: grouped.execCount,
3382
- avgLatencyNs: grouped.avgLatencyNs,
3383
- maxLatencyNs: grouped.maxLatencyNs,
3384
- minLatencyNs: grouped.minLatencyNs,
3385
- avgProcessTimeNs: grouped.avgProcessTimeNs,
3386
- avgWaitTimeNs: grouped.avgWaitTimeNs,
3387
- avgBackoffTimeNs: grouped.avgBackoffTimeNs,
3388
- avgMemBytes: grouped.avgMemBytes,
3389
- maxMemBytes: grouped.maxMemBytes,
3390
- avgTotalKeys: grouped.avgTotalKeys,
3391
- firstSeen: grouped.firstSeen,
3392
- lastSeen: grouped.lastSeen,
3393
- planInCache: grouped.planInCache,
3394
- planCacheHits: grouped.planCacheHits,
3395
- digestText: grouped.digestText,
3396
- plan: grouped.plan
3397
- }).from(grouped).where(
3398
- sql`${grouped.avgLatencyNs} > ${thresholdNs} OR ${grouped.avgMemBytes} > ${memoryThresholdBytes}`
3399
- ).orderBy(desc(grouped.avgLatencyNs)).limit(formatLimitOffset(TOP_N));
3400
- const formatted = rows.map((r, i) => ({
3401
- rank: i + 1,
3402
- // 1-based rank in the top N
3403
- digest: r.digest,
3404
- stmtType: r.stmtType,
3405
- schemaName: r.schemaName,
3406
- execCount: r.execCount,
3407
- avgLatencyMs: nsToMs(r.avgLatencyNs),
3408
- // Convert ns to ms for readability
3409
- maxLatencyMs: nsToMs(r.maxLatencyNs),
3410
- minLatencyMs: nsToMs(r.minLatencyNs),
3411
- avgProcessTimeMs: nsToMs(r.avgProcessTimeNs),
3412
- avgWaitTimeMs: nsToMs(r.avgWaitTimeNs),
3413
- avgBackoffTimeMs: nsToMs(r.avgBackoffTimeNs),
3414
- avgMemMB: bytesToMB(r.avgMemBytes),
3415
- maxMemMB: bytesToMB(r.maxMemBytes),
3416
- avgMemBytes: r.avgMemBytes,
3417
- maxMemBytes: r.maxMemBytes,
3418
- avgTotalKeys: r.avgTotalKeys,
3419
- firstSeen: r.firstSeen,
3420
- lastSeen: r.lastSeen,
3421
- planInCache: r.planInCache,
3422
- planCacheHits: r.planCacheHits,
3423
- digestText: sanitizeSQL(r.digestText),
3424
- plan: newOptions.showPlan ? r.plan : void 0
3425
- }));
3426
- for (const f of formatted) {
3593
+ };
3594
+ const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
3595
+ const executeWithRetries = async (task, label) => {
3596
+ let attempt = 0;
3597
+ let delay = RETRY_BASE_DELAY_MS;
3598
+ while (true) {
3599
+ try {
3600
+ attempt++;
3601
+ return await task();
3602
+ } catch (error) {
3603
+ const msg = String(error?.message ?? error);
3604
+ const isTimeout = msg.startsWith("TIMEOUT:");
3605
+ if (attempt > RETRY_ATTEMPTS) throw error;
3427
3606
  console.warn(
3428
- `${f.rank}. ${f.stmtType} avg=${f.avgLatencyMs?.toFixed?.(2)}ms max=${f.maxLatencyMs?.toFixed?.(2)}ms mem≈${f.avgMemMB?.toFixed?.(2)}MB(max ${f.maxMemMB?.toFixed?.(2)}MB) exec=${f.execCount}
3429
- digest=${f.digest}
3430
- sql=${(f.digestText || "").slice(0, 300)}${f.digestText && f.digestText.length > 300 ? "…" : ""}`
3607
+ `${label}: attempt ${attempt} failed${isTimeout ? " (timeout)" : ""}; retrying in ${delay}ms...`,
3608
+ error
3431
3609
  );
3432
- if (newOptions.showPlan && f.plan) {
3433
- console.warn(` full plan:
3434
- ${f.plan}`);
3435
- }
3610
+ await sleep(delay);
3611
+ delay *= 2;
3436
3612
  }
3437
- return {
3438
- headers: { "Content-Type": ["application/json"] },
3439
- statusCode: 200,
3440
- statusText: "OK",
3441
- body: jsonSafeStringify({
3442
- success: true,
3443
- window: "last_1h",
3444
- top: TOP_N,
3445
- warnThresholdMs: newOptions.warnThresholdMs,
3446
- memoryThresholdBytes: newOptions.memoryThresholdBytes,
3447
- showPlan: newOptions.showPlan,
3448
- rows: formatted,
3449
- generatedAt: (/* @__PURE__ */ new Date()).toISOString()
3450
- })
3451
- };
3613
+ }
3614
+ };
3615
+ const createErrorResponse = (message, error) => ({
3616
+ headers: { "Content-Type": ["application/json"] },
3617
+ statusCode: 500,
3618
+ statusText: "Internal Server Error",
3619
+ body: jsonSafeStringify({
3620
+ success: false,
3621
+ message,
3622
+ error: error?.cause?.context?.debug?.sqlMessage ?? error?.cause?.message ?? error?.message,
3623
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
3624
+ })
3625
+ });
3626
+ const createSuccessResponse = (formatted, options) => ({
3627
+ headers: { "Content-Type": ["application/json"] },
3628
+ statusCode: 200,
3629
+ statusText: "OK",
3630
+ body: jsonSafeStringify({
3631
+ success: true,
3632
+ window: `last_${options.hours}h`,
3633
+ top: options.topN,
3634
+ warnThresholdMs: options.warnThresholdMs,
3635
+ memoryThresholdBytes: options.memoryThresholdBytes,
3636
+ showPlan: options.showPlan,
3637
+ rows: formatted,
3638
+ generatedAt: (/* @__PURE__ */ new Date()).toISOString()
3639
+ })
3640
+ });
3641
+ const createSelectShape = (table) => ({
3642
+ digest: table.digest,
3643
+ stmtType: table.stmtType,
3644
+ schemaName: table.schemaName,
3645
+ execCount: table.execCount,
3646
+ avgLatencyNs: table.avgLatency,
3647
+ maxLatencyNs: table.maxLatency,
3648
+ minLatencyNs: table.minLatency,
3649
+ avgProcessTimeNs: table.avgProcessTime,
3650
+ avgWaitTimeNs: table.avgWaitTime,
3651
+ avgBackoffTimeNs: table.avgBackoffTime,
3652
+ avgTotalKeys: table.avgTotalKeys,
3653
+ firstSeen: table.firstSeen,
3654
+ lastSeen: table.lastSeen,
3655
+ planInCache: table.planInCache,
3656
+ planCacheHits: table.planCacheHits,
3657
+ digestText: table.digestText,
3658
+ plan: table.plan,
3659
+ avgMemBytes: table.avgMem,
3660
+ maxMemBytes: table.maxMem
3661
+ });
3662
+ const buildCombinedQuery = (orm, options) => {
3663
+ const summaryHistory = statementsSummary;
3664
+ const summary = statementsSummary;
3665
+ const summaryHistoryCluster = clusterStatementsSummaryHistory;
3666
+ const summaryCluster = clusterStatementsSummary;
3667
+ const lastHoursFilter = (table) => gte(table.summaryEndTime, sql`DATE_SUB(NOW(), INTERVAL ${options.hours} HOUR)`);
3668
+ const qHistory = orm.getDrizzleQueryBuilder().select(createSelectShape(summaryHistory)).from(summaryHistory).where(lastHoursFilter(summaryHistory));
3669
+ const qSummary = orm.getDrizzleQueryBuilder().select(createSelectShape(summary)).from(summary).where(lastHoursFilter(summary));
3670
+ const qHistoryCluster = orm.getDrizzleQueryBuilder().select(createSelectShape(summaryHistoryCluster)).from(summaryHistoryCluster).where(lastHoursFilter(summaryHistoryCluster));
3671
+ const qSummaryCluster = orm.getDrizzleQueryBuilder().select(createSelectShape(summaryCluster)).from(summaryCluster).where(lastHoursFilter(summaryCluster));
3672
+ switch (options.tables) {
3673
+ case "SUMMARY_AND_HISTORY":
3674
+ return unionAll(qHistory, qSummary).as("combined");
3675
+ case "CLUSTER_SUMMARY_AND_HISTORY":
3676
+ return unionAll(qHistoryCluster, qSummaryCluster).as("combined");
3677
+ default:
3678
+ throw new Error(`Unsupported table configuration: ${options.tables}`);
3679
+ }
3680
+ };
3681
+ const buildGroupedQuery = (orm, combined) => {
3682
+ return orm.getDrizzleQueryBuilder().select({
3683
+ digest: combined.digest,
3684
+ stmtType: combined.stmtType,
3685
+ schemaName: combined.schemaName,
3686
+ execCount: sql`SUM(${combined.execCount})`.as("execCount"),
3687
+ avgLatencyNs: sql`MAX(${combined.avgLatencyNs})`.as("avgLatencyNs"),
3688
+ maxLatencyNs: sql`MAX(${combined.maxLatencyNs})`.as("maxLatencyNs"),
3689
+ minLatencyNs: sql`MIN(${combined.minLatencyNs})`.as("minLatencyNs"),
3690
+ avgProcessTimeNs: sql`MAX(${combined.avgProcessTimeNs})`.as("avgProcessTimeNs"),
3691
+ avgWaitTimeNs: sql`MAX(${combined.avgWaitTimeNs})`.as("avgWaitTimeNs"),
3692
+ avgBackoffTimeNs: sql`MAX(${combined.avgBackoffTimeNs})`.as("avgBackoffTimeNs"),
3693
+ avgMemBytes: sql`MAX(${combined.avgMemBytes})`.as("avgMemBytes"),
3694
+ maxMemBytes: sql`MAX(${combined.maxMemBytes})`.as("maxMemBytes"),
3695
+ avgTotalKeys: sql`MAX(${combined.avgTotalKeys})`.as("avgTotalKeys"),
3696
+ firstSeen: sql`MIN(${combined.firstSeen})`.as("firstSeen"),
3697
+ lastSeen: sql`MAX(${combined.lastSeen})`.as("lastSeen"),
3698
+ planInCache: sql`MAX(${combined.planInCache})`.as("planInCache"),
3699
+ planCacheHits: sql`SUM(${combined.planCacheHits})`.as("planCacheHits"),
3700
+ digestText: sql`MAX(${combined.digestText})`.as("digestText"),
3701
+ plan: sql`MAX(${combined.plan})`.as("plan")
3702
+ }).from(combined).where(
3703
+ sql`COALESCE(${combined.digest}, '') <> '' AND COALESCE(${combined.digestText}, '') <> '' AND COALESCE(${combined.stmtType}, '') NOT IN ('Use','Set','Show')`
3704
+ ).groupBy(combined.digest, combined.stmtType, combined.schemaName).as("grouped");
3705
+ };
3706
+ const buildFinalQuery = (orm, grouped, options) => {
3707
+ const thresholdNs = Math.floor(options.warnThresholdMs * 1e6);
3708
+ const memoryThresholdBytes = options.memoryThresholdBytes;
3709
+ const query = orm.getDrizzleQueryBuilder().select({
3710
+ digest: grouped.digest,
3711
+ stmtType: grouped.stmtType,
3712
+ schemaName: grouped.schemaName,
3713
+ execCount: grouped.execCount,
3714
+ avgLatencyNs: grouped.avgLatencyNs,
3715
+ maxLatencyNs: grouped.maxLatencyNs,
3716
+ minLatencyNs: grouped.minLatencyNs,
3717
+ avgProcessTimeNs: grouped.avgProcessTimeNs,
3718
+ avgWaitTimeNs: grouped.avgWaitTimeNs,
3719
+ avgBackoffTimeNs: grouped.avgBackoffTimeNs,
3720
+ avgMemBytes: grouped.avgMemBytes,
3721
+ maxMemBytes: grouped.maxMemBytes,
3722
+ avgTotalKeys: grouped.avgTotalKeys,
3723
+ firstSeen: grouped.firstSeen,
3724
+ lastSeen: grouped.lastSeen,
3725
+ planInCache: grouped.planInCache,
3726
+ planCacheHits: grouped.planCacheHits,
3727
+ digestText: grouped.digestText,
3728
+ plan: grouped.plan
3729
+ }).from(grouped).where(
3730
+ sql`${grouped.avgLatencyNs} > ${thresholdNs} OR ${grouped.avgMemBytes} > ${memoryThresholdBytes}`
3731
+ ).orderBy(desc(grouped.avgLatencyNs)).limit(formatLimitOffset(options.topN));
3732
+ if (options.operationType === "DDL") {
3733
+ return orm.executeDDLActions(async () => await query);
3734
+ }
3735
+ return query;
3736
+ };
3737
+ const formatQueryResults = (rows, options) => {
3738
+ return rows.map((row, index) => ({
3739
+ rank: index + 1,
3740
+ digest: row.digest,
3741
+ stmtType: row.stmtType,
3742
+ schemaName: row.schemaName,
3743
+ execCount: row.execCount,
3744
+ avgLatencyMs: nsToMs(row.avgLatencyNs),
3745
+ maxLatencyMs: nsToMs(row.maxLatencyNs),
3746
+ minLatencyMs: nsToMs(row.minLatencyNs),
3747
+ avgProcessTimeMs: nsToMs(row.avgProcessTimeNs),
3748
+ avgWaitTimeMs: nsToMs(row.avgWaitTimeNs),
3749
+ avgBackoffTimeMs: nsToMs(row.avgBackoffTimeNs),
3750
+ avgMemMB: bytesToMB(row.avgMemBytes),
3751
+ maxMemMB: bytesToMB(row.maxMemBytes),
3752
+ avgMemBytes: row.avgMemBytes,
3753
+ maxMemBytes: row.maxMemBytes,
3754
+ avgTotalKeys: row.avgTotalKeys,
3755
+ firstSeen: row.firstSeen,
3756
+ lastSeen: row.lastSeen,
3757
+ planInCache: row.planInCache,
3758
+ planCacheHits: row.planCacheHits,
3759
+ digestText: options.operationType === "DDL" ? row.digestText : sanitizeSQL(row.digestText),
3760
+ plan: options.showPlan ? row.plan : void 0
3761
+ }));
3762
+ };
3763
+ const logQueryResults = (formatted, options) => {
3764
+ for (const result of formatted) {
3765
+ console.warn(
3766
+ `${result.rank}. ${result.stmtType} avg=${result.avgLatencyMs?.toFixed?.(2)}ms max=${result.maxLatencyMs?.toFixed?.(2)}ms mem≈${result.avgMemMB?.toFixed?.(2)}MB(max ${result.maxMemMB?.toFixed?.(2)}MB) exec=${result.execCount}
3767
+ digest=${result.digest}
3768
+ sql=${(result.digestText || "").slice(0, 300)}${result.digestText && result.digestText.length > 300 ? "…" : ""}`
3769
+ );
3770
+ if (options.showPlan && result.plan) {
3771
+ console.warn(` full plan:
3772
+ ${result.plan}`);
3773
+ }
3774
+ }
3775
+ };
3776
+ const topSlowestStatementLastHourTrigger = async (orm, options) => {
3777
+ if (!orm) {
3778
+ return createErrorResponse("ORM instance is required");
3779
+ }
3780
+ const mergedOptions = {
3781
+ warnThresholdMs: options?.warnThresholdMs ?? DEFAULT_TIMEOUT,
3782
+ memoryThresholdBytes: options?.memoryThresholdBytes ?? DEFAULT_MEMORY_THRESHOLD,
3783
+ showPlan: options?.showPlan ?? false,
3784
+ operationType: options?.operationType ?? "DML",
3785
+ topN: options?.topN ?? DEFAULT_TOP_N,
3786
+ hours: options?.hours ?? DEFAULT_HOURS,
3787
+ tables: options?.tables ?? DEFAULT_TABLES
3788
+ };
3789
+ try {
3790
+ const combined = buildCombinedQuery(orm, mergedOptions);
3791
+ const grouped = buildGroupedQuery(orm, combined);
3792
+ const finalQuery = buildFinalQuery(orm, grouped, mergedOptions);
3793
+ const rows = await executeWithRetries(
3794
+ () => withTimeout(finalQuery, MAX_QUERY_TIMEOUT_MS),
3795
+ "topSlowestStatementLastHourTrigger"
3796
+ );
3797
+ const formatted = formatQueryResults(rows, mergedOptions);
3798
+ logQueryResults(formatted, mergedOptions);
3799
+ return createSuccessResponse(formatted, mergedOptions);
3452
3800
  } catch (error) {
3453
- console.error(
3454
- "Error in topSlowestStatementLastHourTrigger:",
3801
+ console.warn(
3802
+ "Error in topSlowestStatementLastHourTrigger (one-off errors can be ignored; if it recurs, investigate):",
3455
3803
  error?.cause?.context?.debug?.sqlMessage ?? error?.cause ?? error
3456
3804
  );
3457
- return {
3458
- headers: { "Content-Type": ["application/json"] },
3459
- statusCode: 500,
3460
- statusText: "Internal Server Error",
3461
- body: jsonSafeStringify({
3462
- success: false,
3463
- message: "Failed to fetch or log slow queries",
3464
- error: error?.cause?.context?.debug?.sqlMessage ?? error?.cause?.message,
3465
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
3466
- })
3467
- };
3805
+ return createErrorResponse("Failed to fetch or log slow queries", error);
3468
3806
  }
3469
3807
  };
3470
3808
  const getHttpResponse = (statusCode, body) => {
@@ -3506,6 +3844,7 @@ export {
3506
3844
  getPrimaryKeys,
3507
3845
  getTableMetadata,
3508
3846
  getTables,
3847
+ isUpdateQueryResponse,
3509
3848
  mapSelectAllFieldsToAlias,
3510
3849
  mapSelectFieldsWithAlias,
3511
3850
  migrations,
@@ -3513,6 +3852,8 @@ export {
3513
3852
  parseDateTime,
3514
3853
  patchDbWithSelectAliased,
3515
3854
  slowQuery,
3855
+ statementsSummary,
3856
+ statementsSummaryHistory,
3516
3857
  topSlowestStatementLastHourTrigger
3517
3858
  };
3518
3859
  //# sourceMappingURL=ForgeSQLORM.mjs.map