forge-sql-orm 2.1.14 → 2.1.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +294 -20
- package/dist/core/ForgeSQLORM.d.ts +16 -7
- package/dist/core/ForgeSQLORM.d.ts.map +1 -1
- package/dist/core/ForgeSQLORM.js +73 -15
- package/dist/core/ForgeSQLORM.js.map +1 -1
- package/dist/core/ForgeSQLQueryBuilder.d.ts +15 -7
- package/dist/core/ForgeSQLQueryBuilder.d.ts.map +1 -1
- package/dist/core/ForgeSQLQueryBuilder.js.map +1 -1
- package/dist/core/ForgeSQLSelectOperations.d.ts +2 -1
- package/dist/core/ForgeSQLSelectOperations.d.ts.map +1 -1
- package/dist/core/ForgeSQLSelectOperations.js.map +1 -1
- package/dist/core/Rovo.d.ts +40 -0
- package/dist/core/Rovo.d.ts.map +1 -1
- package/dist/core/Rovo.js +164 -138
- package/dist/core/Rovo.js.map +1 -1
- package/dist/index.d.ts +1 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -2
- package/dist/index.js.map +1 -1
- package/dist/lib/drizzle/extensions/additionalActions.d.ts.map +1 -1
- package/dist/lib/drizzle/extensions/additionalActions.js +72 -22
- package/dist/lib/drizzle/extensions/additionalActions.js.map +1 -1
- package/dist/utils/cacheTableUtils.d.ts +11 -0
- package/dist/utils/cacheTableUtils.d.ts.map +1 -0
- package/dist/utils/cacheTableUtils.js +450 -0
- package/dist/utils/cacheTableUtils.js.map +1 -0
- package/dist/utils/cacheUtils.d.ts.map +1 -1
- package/dist/utils/cacheUtils.js +3 -22
- package/dist/utils/cacheUtils.js.map +1 -1
- package/dist/utils/forgeDriver.d.ts +3 -2
- package/dist/utils/forgeDriver.d.ts.map +1 -1
- package/dist/utils/forgeDriver.js +24 -27
- package/dist/utils/forgeDriver.js.map +1 -1
- package/dist/utils/metadataContextUtils.d.ts +27 -1
- package/dist/utils/metadataContextUtils.d.ts.map +1 -1
- package/dist/utils/metadataContextUtils.js +237 -10
- package/dist/utils/metadataContextUtils.js.map +1 -1
- package/dist/utils/sqlUtils.d.ts +1 -0
- package/dist/utils/sqlUtils.d.ts.map +1 -1
- package/dist/utils/sqlUtils.js +217 -119
- package/dist/utils/sqlUtils.js.map +1 -1
- package/dist/webtriggers/applyMigrationsWebTrigger.js +1 -1
- package/dist/webtriggers/index.d.ts +1 -0
- package/dist/webtriggers/index.d.ts.map +1 -1
- package/dist/webtriggers/index.js +1 -0
- package/dist/webtriggers/index.js.map +1 -1
- package/dist/webtriggers/topSlowestStatementLastHourTrigger.d.ts +60 -0
- package/dist/webtriggers/topSlowestStatementLastHourTrigger.d.ts.map +1 -0
- package/dist/webtriggers/topSlowestStatementLastHourTrigger.js +55 -0
- package/dist/webtriggers/topSlowestStatementLastHourTrigger.js.map +1 -0
- package/package.json +11 -10
- package/src/core/ForgeSQLORM.ts +78 -14
- package/src/core/ForgeSQLQueryBuilder.ts +15 -5
- package/src/core/ForgeSQLSelectOperations.ts +2 -1
- package/src/core/Rovo.ts +209 -167
- package/src/index.ts +1 -3
- package/src/lib/drizzle/extensions/additionalActions.ts +98 -42
- package/src/utils/cacheTableUtils.ts +511 -0
- package/src/utils/cacheUtils.ts +3 -25
- package/src/utils/forgeDriver.ts +38 -29
- package/src/utils/metadataContextUtils.ts +290 -10
- package/src/utils/sqlUtils.ts +298 -142
- package/src/webtriggers/applyMigrationsWebTrigger.ts +1 -1
- package/src/webtriggers/index.ts +1 -0
- package/src/webtriggers/topSlowestStatementLastHourTrigger.ts +69 -0
package/src/utils/sqlUtils.ts
CHANGED
|
@@ -100,37 +100,37 @@ export const parseDateTime = (value: string | Date, format: string): Date => {
|
|
|
100
100
|
};
|
|
101
101
|
|
|
102
102
|
/**
|
|
103
|
-
*
|
|
104
|
-
* @param value - Date object, ISO/RFC2822/SQL/HTTP string, or timestamp (number|string).
|
|
105
|
-
* @param format - DateTime format string (Luxon format tokens).
|
|
106
|
-
* @returns Formatted date string.
|
|
107
|
-
* @throws Error if value cannot be parsed as a valid date.
|
|
103
|
+
* Parses a string value into DateTime using multiple format parsers
|
|
108
104
|
*/
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
105
|
+
function parseStringToDateTime(value: string): DateTime | null {
|
|
106
|
+
const parsers = [DateTime.fromISO, DateTime.fromRFC2822, DateTime.fromSQL, DateTime.fromHTTP];
|
|
107
|
+
|
|
108
|
+
for (const parser of parsers) {
|
|
109
|
+
const dt = parser(value);
|
|
110
|
+
if (dt.isValid) {
|
|
111
|
+
return dt;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Try parsing as number string
|
|
116
|
+
const parsed = Number(value);
|
|
117
|
+
if (!Number.isNaN(parsed)) {
|
|
118
|
+
return DateTime.fromMillis(parsed);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return null;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Converts a value to DateTime
|
|
126
|
+
*/
|
|
127
|
+
function valueToDateTime(value: Date | string | number): DateTime {
|
|
114
128
|
let dt: DateTime | null = null;
|
|
115
129
|
|
|
116
130
|
if (value instanceof Date) {
|
|
117
131
|
dt = DateTime.fromJSDate(value);
|
|
118
132
|
} else if (typeof value === "string") {
|
|
119
|
-
|
|
120
|
-
DateTime.fromISO,
|
|
121
|
-
DateTime.fromRFC2822,
|
|
122
|
-
DateTime.fromSQL,
|
|
123
|
-
DateTime.fromHTTP,
|
|
124
|
-
]) {
|
|
125
|
-
dt = parser(value);
|
|
126
|
-
if (dt.isValid) break;
|
|
127
|
-
}
|
|
128
|
-
if (!dt?.isValid) {
|
|
129
|
-
const parsed = Number(value);
|
|
130
|
-
if (!Number.isNaN(parsed)) {
|
|
131
|
-
dt = DateTime.fromMillis(parsed);
|
|
132
|
-
}
|
|
133
|
-
}
|
|
133
|
+
dt = parseStringToDateTime(value);
|
|
134
134
|
} else if (typeof value === "number") {
|
|
135
135
|
dt = DateTime.fromMillis(value);
|
|
136
136
|
} else {
|
|
@@ -140,20 +140,47 @@ export function formatDateTime(
|
|
|
140
140
|
if (!dt?.isValid) {
|
|
141
141
|
throw new Error("Invalid Date");
|
|
142
142
|
}
|
|
143
|
+
|
|
144
|
+
return dt;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Validates timestamp range for Atlassian Forge compatibility
|
|
149
|
+
*/
|
|
150
|
+
function validateTimestampRange(dt: DateTime): void {
|
|
143
151
|
const minDate = DateTime.fromSeconds(1);
|
|
144
152
|
const maxDate = DateTime.fromMillis(2147483647 * 1000); // 2038-01-19 03:14:07.999 UTC
|
|
145
153
|
|
|
154
|
+
if (dt < minDate) {
|
|
155
|
+
throw new Error(
|
|
156
|
+
"Atlassian Forge does not support zero or negative timestamps. Allowed range: from '1970-01-01 00:00:01.000000' to '2038-01-19 03:14:07.999999'.",
|
|
157
|
+
);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (dt > maxDate) {
|
|
161
|
+
throw new Error(
|
|
162
|
+
"Atlassian Forge does not support timestamps beyond 2038-01-19 03:14:07.999999. Please use a smaller date within the supported range.",
|
|
163
|
+
);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Helper function to validate and format a date-like value using Luxon DateTime.
|
|
169
|
+
* @param value - Date object, ISO/RFC2822/SQL/HTTP string, or timestamp (number|string).
|
|
170
|
+
* @param format - DateTime format string (Luxon format tokens).
|
|
171
|
+
* @param isTimeStamp - Whether to validate timestamp range
|
|
172
|
+
* @returns Formatted date string.
|
|
173
|
+
* @throws Error if value cannot be parsed as a valid date.
|
|
174
|
+
*/
|
|
175
|
+
export function formatDateTime(
|
|
176
|
+
value: Date | string | number,
|
|
177
|
+
format: string,
|
|
178
|
+
isTimeStamp: boolean,
|
|
179
|
+
): string {
|
|
180
|
+
const dt = valueToDateTime(value);
|
|
181
|
+
|
|
146
182
|
if (isTimeStamp) {
|
|
147
|
-
|
|
148
|
-
throw new Error(
|
|
149
|
-
"Atlassian Forge does not support zero or negative timestamps. Allowed range: from '1970-01-01 00:00:01.000000' to '2038-01-19 03:14:07.999999'.",
|
|
150
|
-
);
|
|
151
|
-
}
|
|
152
|
-
if (dt > maxDate) {
|
|
153
|
-
throw new Error(
|
|
154
|
-
"Atlassian Forge does not support timestamps beyond 2038-01-19 03:14:07.999999. Please use a smaller date within the supported range.",
|
|
155
|
-
);
|
|
156
|
-
}
|
|
183
|
+
validateTimestampRange(dt);
|
|
157
184
|
}
|
|
158
185
|
|
|
159
186
|
return dt.toFormat(format);
|
|
@@ -169,10 +196,7 @@ export function getPrimaryKeys<T extends AnyMySqlTable>(table: T): [string, AnyC
|
|
|
169
196
|
const { columns, primaryKeys } = getTableMetadata(table);
|
|
170
197
|
|
|
171
198
|
// First try to find primary keys in columns
|
|
172
|
-
const columnPrimaryKeys = Object.entries(columns).filter(([, column]) => column.primary)
|
|
173
|
-
string,
|
|
174
|
-
AnyColumn,
|
|
175
|
-
][];
|
|
199
|
+
const columnPrimaryKeys = Object.entries(columns).filter(([, column]) => column.primary);
|
|
176
200
|
|
|
177
201
|
if (columnPrimaryKeys.length > 0) {
|
|
178
202
|
return columnPrimaryKeys;
|
|
@@ -199,6 +223,85 @@ export function getPrimaryKeys<T extends AnyMySqlTable>(table: T): [string, AnyC
|
|
|
199
223
|
return [];
|
|
200
224
|
}
|
|
201
225
|
|
|
226
|
+
/**
|
|
227
|
+
* Processes foreign keys from foreignKeysSymbol
|
|
228
|
+
*/
|
|
229
|
+
function processForeignKeysFromSymbol(
|
|
230
|
+
table: AnyMySqlTable,
|
|
231
|
+
foreignKeysSymbol: symbol,
|
|
232
|
+
): ForeignKeyBuilder[] {
|
|
233
|
+
const foreignKeys: ForeignKeyBuilder[] = [];
|
|
234
|
+
// @ts-ignore
|
|
235
|
+
const fkArray: any[] = table[foreignKeysSymbol];
|
|
236
|
+
|
|
237
|
+
if (!fkArray) {
|
|
238
|
+
return foreignKeys;
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
for (const fk of fkArray) {
|
|
242
|
+
if (fk.reference) {
|
|
243
|
+
const item = fk.reference(fk);
|
|
244
|
+
foreignKeys.push(item);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
return foreignKeys;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
/**
|
|
252
|
+
* Extracts config builders from config builder data
|
|
253
|
+
*/
|
|
254
|
+
function extractConfigBuilders(configBuilderData: any): any[] {
|
|
255
|
+
if (Array.isArray(configBuilderData)) {
|
|
256
|
+
return configBuilderData;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
return Object.values(configBuilderData).map((item) => (item as ConfigBuilderData).value ?? item);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Checks if a builder is a ForeignKeyBuilder
|
|
264
|
+
*/
|
|
265
|
+
function isForeignKeyBuilder(builder: any): boolean {
|
|
266
|
+
if (!builder?.constructor) {
|
|
267
|
+
return false;
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
const builderName = builder.constructor.name.toLowerCase();
|
|
271
|
+
return builderName.includes("foreignkeybuilder");
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
/**
|
|
275
|
+
* Processes foreign keys from extraSymbol
|
|
276
|
+
*/
|
|
277
|
+
function processForeignKeysFromExtra(
|
|
278
|
+
table: AnyMySqlTable,
|
|
279
|
+
extraSymbol: symbol,
|
|
280
|
+
): ForeignKeyBuilder[] {
|
|
281
|
+
const foreignKeys: ForeignKeyBuilder[] = [];
|
|
282
|
+
// @ts-ignore
|
|
283
|
+
const extraConfigBuilder = table[extraSymbol];
|
|
284
|
+
|
|
285
|
+
if (!extraConfigBuilder || typeof extraConfigBuilder !== "function") {
|
|
286
|
+
return foreignKeys;
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
const configBuilderData = extraConfigBuilder(table);
|
|
290
|
+
if (!configBuilderData) {
|
|
291
|
+
return foreignKeys;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
const configBuilders = extractConfigBuilders(configBuilderData);
|
|
295
|
+
|
|
296
|
+
for (const builder of configBuilders) {
|
|
297
|
+
if (isForeignKeyBuilder(builder)) {
|
|
298
|
+
foreignKeys.push(builder);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
return foreignKeys;
|
|
303
|
+
}
|
|
304
|
+
|
|
202
305
|
/**
|
|
203
306
|
* Processes foreign keys from both foreignKeysSymbol and extraSymbol
|
|
204
307
|
* @param table - The table schema
|
|
@@ -215,57 +318,117 @@ function processForeignKeys(
|
|
|
215
318
|
|
|
216
319
|
// Process foreign keys from foreignKeysSymbol
|
|
217
320
|
if (foreignKeysSymbol) {
|
|
218
|
-
|
|
219
|
-
const fkArray: any[] = table[foreignKeysSymbol];
|
|
220
|
-
if (fkArray) {
|
|
221
|
-
for (const fk of fkArray) {
|
|
222
|
-
if (fk.reference) {
|
|
223
|
-
const item = fk.reference(fk);
|
|
224
|
-
foreignKeys.push(item);
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
}
|
|
321
|
+
foreignKeys.push(...processForeignKeysFromSymbol(table, foreignKeysSymbol));
|
|
228
322
|
}
|
|
229
323
|
|
|
230
324
|
// Process foreign keys from extraSymbol
|
|
231
325
|
if (extraSymbol) {
|
|
232
|
-
|
|
233
|
-
const extraConfigBuilder = table[extraSymbol];
|
|
234
|
-
if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
|
|
235
|
-
const configBuilderData = extraConfigBuilder(table);
|
|
236
|
-
if (configBuilderData) {
|
|
237
|
-
const configBuilders = Array.isArray(configBuilderData)
|
|
238
|
-
? configBuilderData
|
|
239
|
-
: Object.values(configBuilderData).map(
|
|
240
|
-
(item) => (item as ConfigBuilderData).value ?? item,
|
|
241
|
-
);
|
|
242
|
-
|
|
243
|
-
for (const builder of configBuilders) {
|
|
244
|
-
if (!builder?.constructor) continue;
|
|
245
|
-
|
|
246
|
-
const builderName = builder.constructor.name.toLowerCase();
|
|
247
|
-
if (builderName.includes("foreignkeybuilder")) {
|
|
248
|
-
foreignKeys.push(builder);
|
|
249
|
-
}
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
}
|
|
326
|
+
foreignKeys.push(...processForeignKeysFromExtra(table, extraSymbol));
|
|
253
327
|
}
|
|
254
328
|
|
|
255
329
|
return foreignKeys;
|
|
256
330
|
}
|
|
257
331
|
|
|
332
|
+
/**
|
|
333
|
+
* Extracts symbols from table schema.
|
|
334
|
+
* @param table - The table schema
|
|
335
|
+
* @returns Object containing relevant symbols
|
|
336
|
+
*/
|
|
337
|
+
function extractTableSymbols(table: AnyMySqlTable) {
|
|
338
|
+
const symbols = Object.getOwnPropertySymbols(table);
|
|
339
|
+
return {
|
|
340
|
+
nameSymbol: symbols.find((s) => s.toString().includes("Name")),
|
|
341
|
+
columnsSymbol: symbols.find((s) => s.toString().includes("Columns")),
|
|
342
|
+
foreignKeysSymbol: symbols.find((s) => s.toString().includes("ForeignKeys)")),
|
|
343
|
+
extraSymbol: symbols.find((s) => s.toString().includes("ExtraConfigBuilder")),
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
/**
|
|
348
|
+
* Maps builder to appropriate array based on its type.
|
|
349
|
+
* @param builder - The builder object
|
|
350
|
+
* @param builders - The builders object containing all arrays
|
|
351
|
+
* @returns True if builder was added to a specific array, false otherwise
|
|
352
|
+
*/
|
|
353
|
+
function addBuilderToTypedArray(
|
|
354
|
+
builder: any,
|
|
355
|
+
builders: {
|
|
356
|
+
indexes: AnyIndexBuilder[];
|
|
357
|
+
checks: CheckBuilder[];
|
|
358
|
+
primaryKeys: PrimaryKeyBuilder[];
|
|
359
|
+
uniqueConstraints: UniqueConstraintBuilder[];
|
|
360
|
+
},
|
|
361
|
+
): boolean {
|
|
362
|
+
if (!builder?.constructor) {
|
|
363
|
+
return false;
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
const builderName = builder.constructor.name.toLowerCase();
|
|
367
|
+
const builderMap = {
|
|
368
|
+
indexbuilder: builders.indexes,
|
|
369
|
+
checkbuilder: builders.checks,
|
|
370
|
+
primarykeybuilder: builders.primaryKeys,
|
|
371
|
+
uniqueconstraintbuilder: builders.uniqueConstraints,
|
|
372
|
+
};
|
|
373
|
+
|
|
374
|
+
for (const [type, array] of Object.entries(builderMap)) {
|
|
375
|
+
if (builderName.includes(type)) {
|
|
376
|
+
array.push(builder);
|
|
377
|
+
return true;
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
return false;
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
/**
|
|
385
|
+
* Processes extra configuration builders and adds them to the builders object.
|
|
386
|
+
* @param table - The table schema
|
|
387
|
+
* @param extraSymbol - The extra symbol from table
|
|
388
|
+
* @param builders - The builders object to populate
|
|
389
|
+
*/
|
|
390
|
+
function processExtraConfigBuilders(
|
|
391
|
+
table: AnyMySqlTable,
|
|
392
|
+
extraSymbol: symbol | undefined,
|
|
393
|
+
builders: {
|
|
394
|
+
indexes: AnyIndexBuilder[];
|
|
395
|
+
checks: CheckBuilder[];
|
|
396
|
+
foreignKeys: ForeignKeyBuilder[];
|
|
397
|
+
primaryKeys: PrimaryKeyBuilder[];
|
|
398
|
+
uniqueConstraints: UniqueConstraintBuilder[];
|
|
399
|
+
extras: any[];
|
|
400
|
+
},
|
|
401
|
+
): void {
|
|
402
|
+
if (!extraSymbol) {
|
|
403
|
+
return;
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
// @ts-ignore
|
|
407
|
+
const extraConfigBuilder = table[extraSymbol];
|
|
408
|
+
if (!extraConfigBuilder || typeof extraConfigBuilder !== "function") {
|
|
409
|
+
return;
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
const configBuilderData = extraConfigBuilder(table);
|
|
413
|
+
if (!configBuilderData) {
|
|
414
|
+
return;
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
const configBuilders = extractConfigBuilders(configBuilderData);
|
|
418
|
+
|
|
419
|
+
for (const builder of configBuilders) {
|
|
420
|
+
addBuilderToTypedArray(builder, builders);
|
|
421
|
+
builders.extras.push(builder);
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
|
|
258
425
|
/**
|
|
259
426
|
* Extracts table metadata from the schema.
|
|
260
427
|
* @param {AnyMySqlTable} table - The table schema
|
|
261
428
|
* @returns {MetadataInfo} Object containing table metadata
|
|
262
429
|
*/
|
|
263
430
|
export function getTableMetadata(table: AnyMySqlTable): MetadataInfo {
|
|
264
|
-
const
|
|
265
|
-
const nameSymbol = symbols.find((s) => s.toString().includes("Name"));
|
|
266
|
-
const columnsSymbol = symbols.find((s) => s.toString().includes("Columns"));
|
|
267
|
-
const foreignKeysSymbol = symbols.find((s) => s.toString().includes("ForeignKeys)"));
|
|
268
|
-
const extraSymbol = symbols.find((s) => s.toString().includes("ExtraConfigBuilder"));
|
|
431
|
+
const { nameSymbol, columnsSymbol, foreignKeysSymbol, extraSymbol } = extractTableSymbols(table);
|
|
269
432
|
|
|
270
433
|
// Initialize builders arrays
|
|
271
434
|
const builders = {
|
|
@@ -281,47 +444,7 @@ export function getTableMetadata(table: AnyMySqlTable): MetadataInfo {
|
|
|
281
444
|
builders.foreignKeys = processForeignKeys(table, foreignKeysSymbol, extraSymbol);
|
|
282
445
|
|
|
283
446
|
// Process extra configuration if available
|
|
284
|
-
|
|
285
|
-
// @ts-ignore
|
|
286
|
-
const extraConfigBuilder = table[extraSymbol];
|
|
287
|
-
if (extraConfigBuilder && typeof extraConfigBuilder === "function") {
|
|
288
|
-
const configBuilderData = extraConfigBuilder(table);
|
|
289
|
-
if (configBuilderData) {
|
|
290
|
-
// Convert configBuilderData to array if it's an object
|
|
291
|
-
const configBuilders = Array.isArray(configBuilderData)
|
|
292
|
-
? configBuilderData
|
|
293
|
-
: Object.values(configBuilderData).map(
|
|
294
|
-
(item) => (item as ConfigBuilderData).value ?? item,
|
|
295
|
-
);
|
|
296
|
-
|
|
297
|
-
// Process each builder
|
|
298
|
-
for (const builder of configBuilders) {
|
|
299
|
-
if (!builder?.constructor) continue;
|
|
300
|
-
|
|
301
|
-
const builderName = builder.constructor.name.toLowerCase();
|
|
302
|
-
|
|
303
|
-
// Map builder types to their corresponding arrays
|
|
304
|
-
const builderMap = {
|
|
305
|
-
indexbuilder: builders.indexes,
|
|
306
|
-
checkbuilder: builders.checks,
|
|
307
|
-
primarykeybuilder: builders.primaryKeys,
|
|
308
|
-
uniqueconstraintbuilder: builders.uniqueConstraints,
|
|
309
|
-
};
|
|
310
|
-
|
|
311
|
-
// Add builder to appropriate array if it matches any type
|
|
312
|
-
for (const [type, array] of Object.entries(builderMap)) {
|
|
313
|
-
if (builderName.includes(type)) {
|
|
314
|
-
array.push(builder);
|
|
315
|
-
break;
|
|
316
|
-
}
|
|
317
|
-
}
|
|
318
|
-
|
|
319
|
-
// Always add to extras array
|
|
320
|
-
builders.extras.push(builder);
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
}
|
|
324
|
-
}
|
|
447
|
+
processExtraConfigBuilders(table, extraSymbol, builders);
|
|
325
448
|
|
|
326
449
|
return {
|
|
327
450
|
tableName: nameSymbol ? (table as any)[nameSymbol] : "",
|
|
@@ -372,7 +495,7 @@ function mapSelectTableToAlias(
|
|
|
372
495
|
const { columns, tableName } = getTableMetadata(table);
|
|
373
496
|
const selectionsTableFields: Record<string, unknown> = {};
|
|
374
497
|
for (const name of Object.keys(columns)) {
|
|
375
|
-
const column = columns[name]
|
|
498
|
+
const column = columns[name];
|
|
376
499
|
const uniqName = `a_${uniqPrefix}_${tableName}_${column.name}`.toLowerCase();
|
|
377
500
|
const fieldAlias = sql.raw(uniqName);
|
|
378
501
|
selectionsTableFields[name] = sql`${column} as \`${fieldAlias}\``;
|
|
@@ -419,38 +542,71 @@ export function mapSelectFieldsWithAlias<TSelection extends SelectedFields>(
|
|
|
419
542
|
}
|
|
420
543
|
const aliasMap: AliasColumnMap = {};
|
|
421
544
|
const selections: any = {};
|
|
422
|
-
for (
|
|
423
|
-
const [name, fields1] = Object.entries(fields)[i];
|
|
545
|
+
for (const [name, fields1] of Object.entries(fields)) {
|
|
424
546
|
mapSelectAllFieldsToAlias(selections, name, name, fields1, aliasMap);
|
|
425
547
|
}
|
|
426
548
|
return { selections, aliasMap };
|
|
427
549
|
}
|
|
428
550
|
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
return values[0];
|
|
445
|
-
}
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
}
|
|
449
|
-
}
|
|
551
|
+
/**
|
|
552
|
+
* Checks if value is a SQL object with queryChunks
|
|
553
|
+
*/
|
|
554
|
+
function isSQLValue(value: unknown): value is SQL {
|
|
555
|
+
return (
|
|
556
|
+
value !== null && typeof value === "object" && isSQLWrapper(value) && "queryChunks" in value
|
|
557
|
+
);
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
/**
|
|
561
|
+
* Extracts the alias name chunk from query chunks if it exists and is a SQL object
|
|
562
|
+
*/
|
|
563
|
+
function getAliasNameChunk(queryChunks: any[]): SQL | undefined {
|
|
564
|
+
if (queryChunks.length <= 3) {
|
|
565
|
+
return undefined;
|
|
450
566
|
}
|
|
567
|
+
|
|
568
|
+
const aliasNameChunk = queryChunks.at(-2);
|
|
569
|
+
if (isSQLWrapper(aliasNameChunk) && "queryChunks" in aliasNameChunk) {
|
|
570
|
+
return aliasNameChunk as SQL;
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
return undefined;
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
/**
|
|
577
|
+
* Extracts string value from a SQL chunk if it contains a single string value
|
|
578
|
+
*/
|
|
579
|
+
function extractStringValueFromChunk(chunk: SQL): string | undefined {
|
|
580
|
+
if (chunk.queryChunks?.length !== 1 || !chunk.queryChunks[0]) {
|
|
581
|
+
return undefined;
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
const stringChunk = chunk.queryChunks[0];
|
|
585
|
+
if (!("value" in stringChunk)) {
|
|
586
|
+
return undefined;
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
const values = (stringChunk as StringChunk).value;
|
|
590
|
+
if (values?.length === 1) {
|
|
591
|
+
return values[0];
|
|
592
|
+
}
|
|
593
|
+
|
|
451
594
|
return undefined;
|
|
452
595
|
}
|
|
453
596
|
|
|
597
|
+
function getAliasFromDrizzleAlias(value: unknown): string | undefined {
|
|
598
|
+
if (!isSQLValue(value)) {
|
|
599
|
+
return undefined;
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
const aliasNameChunk = getAliasNameChunk(value.queryChunks);
|
|
603
|
+
if (!aliasNameChunk) {
|
|
604
|
+
return undefined;
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
return extractStringValueFromChunk(aliasNameChunk);
|
|
608
|
+
}
|
|
609
|
+
|
|
454
610
|
function transformValue(
|
|
455
611
|
value: unknown,
|
|
456
612
|
alias: string,
|
|
@@ -504,7 +660,7 @@ export function applyFromDriverTransform<T, TSelection>(
|
|
|
504
660
|
row as Record<string, unknown>,
|
|
505
661
|
selections as Record<string, unknown>,
|
|
506
662
|
aliasMap,
|
|
507
|
-
)
|
|
663
|
+
);
|
|
508
664
|
|
|
509
665
|
return processNullBranches(transformed) as unknown as T;
|
|
510
666
|
});
|
|
@@ -772,5 +928,5 @@ export function withTidbHint<
|
|
|
772
928
|
>(column: AnyMySqlColumn<TPartial>): AnyMySqlColumn<TPartial> {
|
|
773
929
|
// We lie a bit to TypeScript here: at runtime this is a new SQL fragment,
|
|
774
930
|
// but returning TExpr keeps the column type info in downstream inference.
|
|
775
|
-
return sql`/*+ SET_VAR(tidb_session_alias=${sql.raw(
|
|
931
|
+
return sql`/*+ SET_VAR(tidb_session_alias=${sql.raw(SESSION_ALIAS_NAME_ORM)}) */ ${column}` as unknown as AnyMySqlColumn<TPartial>;
|
|
776
932
|
}
|
|
@@ -50,7 +50,7 @@ export const applySchemaMigrations = async (
|
|
|
50
50
|
);
|
|
51
51
|
|
|
52
52
|
migrationHistory = sortedMigrations
|
|
53
|
-
.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.
|
|
53
|
+
.map((y) => `${y.id}, ${y.name}, ${y.migratedAt.toISOString()}`)
|
|
54
54
|
.join("\n");
|
|
55
55
|
}
|
|
56
56
|
// eslint-disable-next-line no-console
|
package/src/webtriggers/index.ts
CHANGED
|
@@ -4,6 +4,7 @@ export * from "./fetchSchemaWebTrigger";
|
|
|
4
4
|
export * from "./dropTablesMigrationWebTrigger";
|
|
5
5
|
export * from "./clearCacheSchedulerTrigger";
|
|
6
6
|
export * from "./slowQuerySchedulerTrigger";
|
|
7
|
+
export * from "./topSlowestStatementLastHourTrigger";
|
|
7
8
|
|
|
8
9
|
export interface TriggerResponse<BODY> {
|
|
9
10
|
body?: BODY;
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { ForgeSqlOperation } from "../core/ForgeSQLQueryBuilder";
|
|
2
|
+
import { slowQuerySchedulerTrigger, TriggerResponse } from "./";
|
|
3
|
+
import { OperationType } from "../utils/requestTypeContextUtils";
|
|
4
|
+
|
|
5
|
+
export interface TriggerOptions {
|
|
6
|
+
warnThresholdMs?: number;
|
|
7
|
+
memoryThresholdBytes?: number;
|
|
8
|
+
showPlan?: boolean;
|
|
9
|
+
operationType?: OperationType;
|
|
10
|
+
topN?: number;
|
|
11
|
+
hours?: number;
|
|
12
|
+
tables?: "SUMMARY_AND_HISTORY" | "CLUSTER_SUMMARY_AND_HISTORY";
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* @deprecated This function is deprecated and will be removed in a future version.
|
|
17
|
+
*
|
|
18
|
+
* This function was previously a complex implementation that directly queried
|
|
19
|
+
* CLUSTER_STATEMENTS_SUMMARY tables to analyze query performance. However, this approach
|
|
20
|
+
* had reliability issues with long-running functions where metadata could be evicted
|
|
21
|
+
* before the function completes.
|
|
22
|
+
*
|
|
23
|
+
* The recommended replacement is to use the new observability system with `executeWithMetadata`:
|
|
24
|
+
* - **TopSlowest mode** (default): Deterministic logging of SQL digests executed in resolvers
|
|
25
|
+
* - **SummaryTable mode** (optional): Uses CLUSTER_STATEMENTS_SUMMARY with a short memory window
|
|
26
|
+
* - Automatic fallback mechanisms for long-running functions
|
|
27
|
+
* - More reliable post-mortem diagnostics for Timeout and OOM errors
|
|
28
|
+
*
|
|
29
|
+
* Note: `slowQuerySchedulerTrigger` is a different function that analyzes TiDB's slow query log
|
|
30
|
+
* and is not a direct replacement for this function.
|
|
31
|
+
*
|
|
32
|
+
* For more details on the improvements and migration path, see:
|
|
33
|
+
* https://community.developer.atlassian.com/t/practical-sql-observability-for-forge-apps-with-forge-sql-orm/123456
|
|
34
|
+
*
|
|
35
|
+
* @param orm - ForgeSQL ORM instance
|
|
36
|
+
* @param options - Configuration options (currently passed to slowQuerySchedulerTrigger as a temporary wrapper)
|
|
37
|
+
* @returns Promise<TriggerResponse<string>> - HTTP response with query results or error
|
|
38
|
+
*
|
|
39
|
+
* @example
|
|
40
|
+
* ```typescript
|
|
41
|
+
* // Old usage (deprecated):
|
|
42
|
+
* await topSlowestStatementLastHourTrigger(forgeSQL, { hours: 1 });
|
|
43
|
+
*
|
|
44
|
+
* // New usage (recommended - use executeWithMetadata in your resolvers):
|
|
45
|
+
* await forgeSQL.executeWithMetadata(
|
|
46
|
+
* async () => {
|
|
47
|
+
* // your resolver logic
|
|
48
|
+
* },
|
|
49
|
+
* async (totalDbTime, totalResponseSize, printPlan) => {
|
|
50
|
+
* // custom observability logic
|
|
51
|
+
* if (totalDbTime > 1000) await printPlan();
|
|
52
|
+
* },
|
|
53
|
+
* {
|
|
54
|
+
* mode: "TopSlowest",
|
|
55
|
+
* topQueries: 1,
|
|
56
|
+
* showSlowestPlans: true
|
|
57
|
+
* }
|
|
58
|
+
* );
|
|
59
|
+
* ```
|
|
60
|
+
*/
|
|
61
|
+
export const topSlowestStatementLastHourTrigger = async (
|
|
62
|
+
orm: ForgeSqlOperation,
|
|
63
|
+
options?: TriggerOptions,
|
|
64
|
+
): Promise<TriggerResponse<string>> => {
|
|
65
|
+
return slowQuerySchedulerTrigger(
|
|
66
|
+
orm,
|
|
67
|
+
options ? { timeout: 3000, hours: options.hours ?? 1 } : { timeout: 3000, hours: 1 },
|
|
68
|
+
);
|
|
69
|
+
};
|