@oneuptime/common 10.0.29 → 10.0.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,6 +16,12 @@ import LogAggregationService, {
16
16
  HistogramRequest,
17
17
  FacetValue,
18
18
  FacetRequest,
19
+ AnalyticsRequest,
20
+ AnalyticsChartType,
21
+ AnalyticsAggregation,
22
+ AnalyticsTimeseriesRow,
23
+ AnalyticsTopItem,
24
+ AnalyticsTableRow,
19
25
  } from "../Services/LogAggregationService";
20
26
  import ObjectID from "../../Types/ObjectID";
21
27
  import OneUptimeDate from "../../Types/Date";
@@ -264,6 +270,146 @@ router.post(
264
270
  },
265
271
  );
266
272
 
273
+ // --- Log Analytics Endpoint ---
274
+
275
+ router.post(
276
+ "/telemetry/logs/analytics",
277
+ UserMiddleware.getUserMiddleware,
278
+ async (
279
+ req: ExpressRequest,
280
+ res: ExpressResponse,
281
+ next: NextFunction,
282
+ ): Promise<void> => {
283
+ try {
284
+ const databaseProps: DatabaseCommonInteractionProps =
285
+ await CommonAPI.getDatabaseCommonInteractionProps(req);
286
+
287
+ if (!databaseProps?.tenantId) {
288
+ return Response.sendErrorResponse(
289
+ req,
290
+ res,
291
+ new BadDataException("Invalid Project ID"),
292
+ );
293
+ }
294
+
295
+ const body: JSONObject = req.body as JSONObject;
296
+
297
+ const chartType: AnalyticsChartType =
298
+ (body["chartType"] as AnalyticsChartType) || "timeseries";
299
+
300
+ if (!["timeseries", "toplist", "table"].includes(chartType)) {
301
+ return Response.sendErrorResponse(
302
+ req,
303
+ res,
304
+ new BadDataException("Invalid chartType"),
305
+ );
306
+ }
307
+
308
+ const aggregation: AnalyticsAggregation =
309
+ (body["aggregation"] as AnalyticsAggregation) || "count";
310
+
311
+ if (!["count", "unique"].includes(aggregation)) {
312
+ return Response.sendErrorResponse(
313
+ req,
314
+ res,
315
+ new BadDataException("Invalid aggregation"),
316
+ );
317
+ }
318
+
319
+ const startTime: Date = body["startTime"]
320
+ ? OneUptimeDate.fromString(body["startTime"] as string)
321
+ : OneUptimeDate.addRemoveHours(OneUptimeDate.getCurrentDate(), -1);
322
+
323
+ const endTime: Date = body["endTime"]
324
+ ? OneUptimeDate.fromString(body["endTime"] as string)
325
+ : OneUptimeDate.getCurrentDate();
326
+
327
+ const bucketSizeInMinutes: number =
328
+ (body["bucketSizeInMinutes"] as number) ||
329
+ computeDefaultBucketSize(startTime, endTime);
330
+
331
+ const serviceIds: Array<ObjectID> | undefined = body["serviceIds"]
332
+ ? (body["serviceIds"] as Array<string>).map((id: string) => {
333
+ return new ObjectID(id);
334
+ })
335
+ : undefined;
336
+
337
+ const severityTexts: Array<string> | undefined = body["severityTexts"]
338
+ ? (body["severityTexts"] as Array<string>)
339
+ : undefined;
340
+
341
+ const bodySearchText: string | undefined = body["bodySearchText"]
342
+ ? (body["bodySearchText"] as string)
343
+ : undefined;
344
+
345
+ const traceIds: Array<string> | undefined = body["traceIds"]
346
+ ? (body["traceIds"] as Array<string>)
347
+ : undefined;
348
+
349
+ const spanIds: Array<string> | undefined = body["spanIds"]
350
+ ? (body["spanIds"] as Array<string>)
351
+ : undefined;
352
+
353
+ const groupBy: Array<string> | undefined = body["groupBy"]
354
+ ? (body["groupBy"] as Array<string>)
355
+ : undefined;
356
+
357
+ const aggregationField: string | undefined = body["aggregationField"]
358
+ ? (body["aggregationField"] as string)
359
+ : undefined;
360
+
361
+ const limit: number | undefined = body["limit"]
362
+ ? (body["limit"] as number)
363
+ : undefined;
364
+
365
+ const request: AnalyticsRequest = {
366
+ projectId: databaseProps.tenantId,
367
+ startTime,
368
+ endTime,
369
+ bucketSizeInMinutes,
370
+ chartType,
371
+ groupBy,
372
+ aggregation,
373
+ aggregationField,
374
+ serviceIds,
375
+ severityTexts,
376
+ bodySearchText,
377
+ traceIds,
378
+ spanIds,
379
+ limit,
380
+ };
381
+
382
+ if (chartType === "timeseries") {
383
+ const data: Array<AnalyticsTimeseriesRow> =
384
+ await LogAggregationService.getAnalyticsTimeseries(request);
385
+
386
+ return Response.sendJsonObjectResponse(req, res, {
387
+ data: data as unknown as JSONObject,
388
+ });
389
+ }
390
+
391
+ if (chartType === "toplist") {
392
+ const data: Array<AnalyticsTopItem> =
393
+ await LogAggregationService.getAnalyticsTopList(request);
394
+
395
+ return Response.sendJsonObjectResponse(req, res, {
396
+ data: data as unknown as JSONObject,
397
+ });
398
+ }
399
+
400
+ // table
401
+ const data: Array<AnalyticsTableRow> =
402
+ await LogAggregationService.getAnalyticsTable(request);
403
+
404
+ return Response.sendJsonObjectResponse(req, res, {
405
+ data: data as unknown as JSONObject,
406
+ });
407
+ } catch (err: unknown) {
408
+ next(err);
409
+ }
410
+ },
411
+ );
412
+
267
413
  // --- Helpers ---
268
414
 
269
415
  function computeDefaultBucketSize(startTime: Date, endTime: Date): number {
@@ -44,6 +44,42 @@ export interface FacetRequest {
44
44
  spanIds?: Array<string> | undefined;
45
45
  }
46
46
 
47
+ export type AnalyticsChartType = "timeseries" | "toplist" | "table";
48
+ export type AnalyticsAggregation = "count" | "unique";
49
+
50
+ export interface AnalyticsRequest {
51
+ projectId: ObjectID;
52
+ startTime: Date;
53
+ endTime: Date;
54
+ bucketSizeInMinutes: number;
55
+ chartType: AnalyticsChartType;
56
+ groupBy?: Array<string> | undefined;
57
+ aggregation: AnalyticsAggregation;
58
+ aggregationField?: string | undefined;
59
+ serviceIds?: Array<ObjectID> | undefined;
60
+ severityTexts?: Array<string> | undefined;
61
+ bodySearchText?: string | undefined;
62
+ traceIds?: Array<string> | undefined;
63
+ spanIds?: Array<string> | undefined;
64
+ limit?: number | undefined;
65
+ }
66
+
67
+ export interface AnalyticsTimeseriesRow {
68
+ time: string;
69
+ count: number;
70
+ groupValues: Record<string, string>;
71
+ }
72
+
73
+ export interface AnalyticsTopItem {
74
+ value: string;
75
+ count: number;
76
+ }
77
+
78
+ export interface AnalyticsTableRow {
79
+ groupValues: Record<string, string>;
80
+ count: number;
81
+ }
82
+
47
83
  export class LogAggregationService {
48
84
  private static readonly DEFAULT_FACET_LIMIT: number = 500;
49
85
  private static readonly TABLE_NAME: string = "LogItem";
@@ -197,6 +233,357 @@ export class LogAggregationService {
197
233
  return statement;
198
234
  }
199
235
 
236
+ private static readonly DEFAULT_ANALYTICS_LIMIT: number = 10;
237
+ private static readonly MAX_GROUP_BY_DIMENSIONS: number = 2;
238
+
239
+ @CaptureSpan()
240
+ public static async getAnalyticsTimeseries(
241
+ request: AnalyticsRequest,
242
+ ): Promise<Array<AnalyticsTimeseriesRow>> {
243
+ const statement: Statement =
244
+ LogAggregationService.buildAnalyticsTimeseriesStatement(request);
245
+
246
+ const dbResult: Results = await LogDatabaseService.executeQuery(statement);
247
+ const response: DbJSONResponse = await dbResult.json<{
248
+ data?: Array<JSONObject>;
249
+ }>();
250
+
251
+ const rows: Array<JSONObject> = response.data || [];
252
+ const groupByKeys: Array<string> = request.groupBy || [];
253
+
254
+ return rows.map((row: JSONObject): AnalyticsTimeseriesRow => {
255
+ const groupValues: Record<string, string> = {};
256
+
257
+ for (const key of groupByKeys) {
258
+ const alias: string = LogAggregationService.groupByAlias(key);
259
+ groupValues[key] = String(row[alias] || "");
260
+ }
261
+
262
+ return {
263
+ time: String(row["bucket"] || ""),
264
+ count: Number(row["cnt"] || 0),
265
+ groupValues,
266
+ };
267
+ });
268
+ }
269
+
270
+ @CaptureSpan()
271
+ public static async getAnalyticsTopList(
272
+ request: AnalyticsRequest,
273
+ ): Promise<Array<AnalyticsTopItem>> {
274
+ if (!request.groupBy || request.groupBy.length === 0) {
275
+ throw new BadDataException(
276
+ "groupBy with at least one dimension is required for top list",
277
+ );
278
+ }
279
+
280
+ const statement: Statement =
281
+ LogAggregationService.buildAnalyticsTopListStatement(request);
282
+
283
+ const dbResult: Results = await LogDatabaseService.executeQuery(statement);
284
+ const response: DbJSONResponse = await dbResult.json<{
285
+ data?: Array<JSONObject>;
286
+ }>();
287
+
288
+ const rows: Array<JSONObject> = response.data || [];
289
+
290
+ return rows
291
+ .map((row: JSONObject): AnalyticsTopItem => {
292
+ return {
293
+ value: String(row["val"] || ""),
294
+ count: Number(row["cnt"] || 0),
295
+ };
296
+ })
297
+ .filter((item: AnalyticsTopItem): boolean => {
298
+ return item.value.length > 0;
299
+ });
300
+ }
301
+
302
+ @CaptureSpan()
303
+ public static async getAnalyticsTable(
304
+ request: AnalyticsRequest,
305
+ ): Promise<Array<AnalyticsTableRow>> {
306
+ if (!request.groupBy || request.groupBy.length === 0) {
307
+ throw new BadDataException(
308
+ "groupBy with at least one dimension is required for table",
309
+ );
310
+ }
311
+
312
+ const statement: Statement =
313
+ LogAggregationService.buildAnalyticsTableStatement(request);
314
+
315
+ const dbResult: Results = await LogDatabaseService.executeQuery(statement);
316
+ const response: DbJSONResponse = await dbResult.json<{
317
+ data?: Array<JSONObject>;
318
+ }>();
319
+
320
+ const rows: Array<JSONObject> = response.data || [];
321
+ const groupByKeys: Array<string> = request.groupBy;
322
+
323
+ return rows.map((row: JSONObject): AnalyticsTableRow => {
324
+ const groupValues: Record<string, string> = {};
325
+
326
+ for (const key of groupByKeys) {
327
+ const alias: string = LogAggregationService.groupByAlias(key);
328
+ groupValues[key] = String(row[alias] || "");
329
+ }
330
+
331
+ return {
332
+ groupValues,
333
+ count: Number(row["cnt"] || 0),
334
+ };
335
+ });
336
+ }
337
+
338
+ private static groupByAlias(key: string): string {
339
+ if (LogAggregationService.isTopLevelColumn(key)) {
340
+ return key;
341
+ }
342
+
343
+ // For attribute keys, use a sanitized alias
344
+ return `attr_${key.replace(/[^a-zA-Z0-9_]/g, "_")}`;
345
+ }
346
+
347
+ private static appendGroupBySelect(
348
+ statement: Statement,
349
+ groupByKeys: Array<string>,
350
+ ): void {
351
+ for (const key of groupByKeys) {
352
+ LogAggregationService.validateFacetKey(key);
353
+
354
+ if (LogAggregationService.isTopLevelColumn(key)) {
355
+ statement.append(`, toString(${key}) AS ${key}`);
356
+ } else {
357
+ const alias: string = LogAggregationService.groupByAlias(key);
358
+ statement.append(
359
+ SQL`, JSONExtractRaw(attributes, ${{
360
+ type: TableColumnType.Text,
361
+ value: key,
362
+ }}) AS ${alias}`,
363
+ );
364
+ }
365
+ }
366
+ }
367
+
368
+ private static appendGroupByClause(
369
+ statement: Statement,
370
+ groupByKeys: Array<string>,
371
+ ): void {
372
+ for (const key of groupByKeys) {
373
+ if (LogAggregationService.isTopLevelColumn(key)) {
374
+ statement.append(`, ${key}`);
375
+ } else {
376
+ const alias: string = LogAggregationService.groupByAlias(key);
377
+ statement.append(`, ${alias}`);
378
+ }
379
+ }
380
+ }
381
+
382
+ private static getAggregationExpression(request: AnalyticsRequest): string {
383
+ if (request.aggregation === "unique" && request.aggregationField) {
384
+ LogAggregationService.validateFacetKey(request.aggregationField);
385
+
386
+ if (LogAggregationService.isTopLevelColumn(request.aggregationField)) {
387
+ return `uniqExact(${request.aggregationField})`;
388
+ }
389
+
390
+ return `uniqExact(JSONExtractRaw(attributes, '${request.aggregationField.replace(/'/g, "\\'")}'))`;
391
+ }
392
+
393
+ return "count()";
394
+ }
395
+
396
+ private static validateGroupBy(groupBy: Array<string> | undefined): void {
397
+ if (!groupBy) {
398
+ return;
399
+ }
400
+
401
+ if (groupBy.length > LogAggregationService.MAX_GROUP_BY_DIMENSIONS) {
402
+ throw new BadDataException(
403
+ `groupBy supports at most ${LogAggregationService.MAX_GROUP_BY_DIMENSIONS} dimensions`,
404
+ );
405
+ }
406
+
407
+ for (const key of groupBy) {
408
+ LogAggregationService.validateFacetKey(key);
409
+ }
410
+ }
411
+
412
+ private static buildAnalyticsTimeseriesStatement(
413
+ request: AnalyticsRequest,
414
+ ): Statement {
415
+ LogAggregationService.validateGroupBy(request.groupBy);
416
+
417
+ const intervalSeconds: number = request.bucketSizeInMinutes * 60;
418
+ const aggExpr: string =
419
+ LogAggregationService.getAggregationExpression(request);
420
+
421
+ const statement: Statement = SQL`
422
+ SELECT
423
+ toStartOfInterval(time, INTERVAL ${{
424
+ type: TableColumnType.Number,
425
+ value: intervalSeconds,
426
+ }} SECOND) AS bucket`;
427
+
428
+ statement.append(`, ${aggExpr} AS cnt`);
429
+
430
+ if (request.groupBy && request.groupBy.length > 0) {
431
+ LogAggregationService.appendGroupBySelect(statement, request.groupBy);
432
+ }
433
+
434
+ statement.append(
435
+ SQL`
436
+ FROM ${LogAggregationService.TABLE_NAME}
437
+ WHERE projectId = ${{
438
+ type: TableColumnType.ObjectID,
439
+ value: request.projectId,
440
+ }}
441
+ AND time >= ${{
442
+ type: TableColumnType.Date,
443
+ value: request.startTime,
444
+ }}
445
+ AND time <= ${{
446
+ type: TableColumnType.Date,
447
+ value: request.endTime,
448
+ }}`,
449
+ );
450
+
451
+ LogAggregationService.appendCommonFilters(statement, request);
452
+
453
+ statement.append(" GROUP BY bucket");
454
+
455
+ if (request.groupBy && request.groupBy.length > 0) {
456
+ LogAggregationService.appendGroupByClause(statement, request.groupBy);
457
+ }
458
+
459
+ statement.append(" ORDER BY bucket ASC");
460
+
461
+ return statement;
462
+ }
463
+
464
+ private static buildAnalyticsTopListStatement(
465
+ request: AnalyticsRequest,
466
+ ): Statement {
467
+ const groupByKey: string = request.groupBy![0]!;
468
+ LogAggregationService.validateFacetKey(groupByKey);
469
+
470
+ const limit: number =
471
+ request.limit ?? LogAggregationService.DEFAULT_ANALYTICS_LIMIT;
472
+ const aggExpr: string =
473
+ LogAggregationService.getAggregationExpression(request);
474
+
475
+ const isTopLevel: boolean =
476
+ LogAggregationService.isTopLevelColumn(groupByKey);
477
+
478
+ const statement: Statement = new Statement();
479
+
480
+ if (isTopLevel) {
481
+ statement.append(
482
+ `SELECT toString(${groupByKey}) AS val, ${aggExpr} AS cnt FROM ${LogAggregationService.TABLE_NAME}`,
483
+ );
484
+ } else {
485
+ statement.append(`SELECT JSONExtractRaw(attributes, `);
486
+ statement.append(
487
+ SQL`${{
488
+ type: TableColumnType.Text,
489
+ value: groupByKey,
490
+ }}`,
491
+ );
492
+ statement.append(
493
+ `) AS val, ${aggExpr} AS cnt FROM ${LogAggregationService.TABLE_NAME}`,
494
+ );
495
+ }
496
+
497
+ statement.append(
498
+ SQL` WHERE projectId = ${{
499
+ type: TableColumnType.ObjectID,
500
+ value: request.projectId,
501
+ }} AND time >= ${{
502
+ type: TableColumnType.Date,
503
+ value: request.startTime,
504
+ }} AND time <= ${{
505
+ type: TableColumnType.Date,
506
+ value: request.endTime,
507
+ }}`,
508
+ );
509
+
510
+ if (!isTopLevel) {
511
+ statement.append(
512
+ SQL` AND JSONHas(attributes, ${{
513
+ type: TableColumnType.Text,
514
+ value: groupByKey,
515
+ }}) = 1`,
516
+ );
517
+ }
518
+
519
+ LogAggregationService.appendCommonFilters(statement, request);
520
+
521
+ statement.append(
522
+ SQL` GROUP BY val ORDER BY cnt DESC LIMIT ${{
523
+ type: TableColumnType.Number,
524
+ value: limit,
525
+ }}`,
526
+ );
527
+
528
+ return statement;
529
+ }
530
+
531
+ private static buildAnalyticsTableStatement(
532
+ request: AnalyticsRequest,
533
+ ): Statement {
534
+ LogAggregationService.validateGroupBy(request.groupBy);
535
+
536
+ const groupByKeys: Array<string> = request.groupBy!;
537
+ const limit: number =
538
+ request.limit ?? LogAggregationService.DEFAULT_ANALYTICS_LIMIT;
539
+ const aggExpr: string =
540
+ LogAggregationService.getAggregationExpression(request);
541
+
542
+ const statement: Statement = new Statement();
543
+ statement.append(`SELECT ${aggExpr} AS cnt`);
544
+
545
+ LogAggregationService.appendGroupBySelect(statement, groupByKeys);
546
+
547
+ statement.append(
548
+ SQL`
549
+ FROM ${LogAggregationService.TABLE_NAME}
550
+ WHERE projectId = ${{
551
+ type: TableColumnType.ObjectID,
552
+ value: request.projectId,
553
+ }}
554
+ AND time >= ${{
555
+ type: TableColumnType.Date,
556
+ value: request.startTime,
557
+ }}
558
+ AND time <= ${{
559
+ type: TableColumnType.Date,
560
+ value: request.endTime,
561
+ }}`,
562
+ );
563
+
564
+ LogAggregationService.appendCommonFilters(statement, request);
565
+
566
+ // Build GROUP BY from aliases
567
+ const aliases: Array<string> = groupByKeys.map((key: string) => {
568
+ if (LogAggregationService.isTopLevelColumn(key)) {
569
+ return key;
570
+ }
571
+
572
+ return LogAggregationService.groupByAlias(key);
573
+ });
574
+
575
+ statement.append(` GROUP BY ${aliases.join(", ")}`);
576
+
577
+ statement.append(
578
+ SQL` ORDER BY cnt DESC LIMIT ${{
579
+ type: TableColumnType.Number,
580
+ value: limit,
581
+ }}`,
582
+ );
583
+
584
+ return statement;
585
+ }
586
+
200
587
  private static appendCommonFilters(
201
588
  statement: Statement,
202
589
  request: Pick<
@@ -138,6 +138,21 @@ export default class OnTriggerBaseModel<
138
138
  select = JSONFunctions.parse(select) as Select<TBaseModel>;
139
139
  }
140
140
 
141
+ /*
142
+ * Convert string "true"/"false" values to booleans in select.
143
+ * Workflow arguments may pass select values as strings (e.g. "true")
144
+ * which causes TypeORM to iterate string characters as property indices.
145
+ */
146
+ if (select && typeof select === "object") {
147
+ for (const key in select) {
148
+ if ((select as any)[key] === "true") {
149
+ (select as any)[key] = true;
150
+ } else if ((select as any)[key] === "false") {
151
+ (select as any)[key] = false;
152
+ }
153
+ }
154
+ }
155
+
141
156
  const model: TBaseModel | null = await this.service!.findOneById({
142
157
  id: new ObjectID(data["_id"].toString()),
143
158
  props: {