@checkstack/healthcheck-backend 0.8.2 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +66 -0
- package/drizzle/0009_late_argent.sql +1 -0
- package/drizzle/meta/0009_snapshot.json +426 -0
- package/drizzle/meta/_journal.json +7 -0
- package/package.json +10 -8
- package/src/aggregation-utils.test.ts +276 -1
- package/src/aggregation-utils.ts +223 -42
- package/src/aggregation.test.ts +23 -12
- package/src/availability.test.ts +108 -97
- package/src/index.ts +6 -3
- package/src/queue-executor.test.ts +5 -3
- package/src/queue-executor.ts +38 -1
- package/src/realtime-aggregation.test.ts +466 -0
- package/src/realtime-aggregation.ts +289 -0
- package/src/retention-job.ts +13 -165
- package/src/schema.ts +2 -0
- package/src/service-ordering.test.ts +6 -6
- package/src/service-pause.test.ts +1 -1
- package/src/service.ts +71 -57
|
@@ -0,0 +1,466 @@
|
|
|
1
|
+
import { describe, it, expect, mock, beforeEach } from "bun:test";
|
|
2
|
+
import {
|
|
3
|
+
getHourBucketStart,
|
|
4
|
+
serializeTDigest,
|
|
5
|
+
deserializeTDigest,
|
|
6
|
+
incrementHourlyAggregate,
|
|
7
|
+
} from "./realtime-aggregation";
|
|
8
|
+
import { TDigest } from "tdigest";
|
|
9
|
+
|
|
10
|
+
describe("getHourBucketStart", () => {
|
|
11
|
+
it("floors timestamp to the hour", () => {
|
|
12
|
+
const input = new Date("2024-01-15T10:35:42.123Z");
|
|
13
|
+
const result = getHourBucketStart(input);
|
|
14
|
+
expect(result.getTime()).toBe(
|
|
15
|
+
new Date("2024-01-15T10:00:00.000Z").getTime(),
|
|
16
|
+
);
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
it("returns same time if already at hour boundary", () => {
|
|
20
|
+
const input = new Date("2024-01-15T10:00:00.000Z");
|
|
21
|
+
const result = getHourBucketStart(input);
|
|
22
|
+
expect(result.getTime()).toBe(input.getTime());
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
it("handles midnight correctly", () => {
|
|
26
|
+
const input = new Date("2024-01-15T00:30:00.000Z");
|
|
27
|
+
const result = getHourBucketStart(input);
|
|
28
|
+
expect(result.getTime()).toBe(
|
|
29
|
+
new Date("2024-01-15T00:00:00.000Z").getTime(),
|
|
30
|
+
);
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
it("handles end of day correctly", () => {
|
|
34
|
+
const input = new Date("2024-01-15T23:59:59.999Z");
|
|
35
|
+
const result = getHourBucketStart(input);
|
|
36
|
+
expect(result.getTime()).toBe(
|
|
37
|
+
new Date("2024-01-15T23:00:00.000Z").getTime(),
|
|
38
|
+
);
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
describe("t-digest serialization", () => {
|
|
43
|
+
it("serializes and deserializes empty t-digest", () => {
|
|
44
|
+
const original = new TDigest();
|
|
45
|
+
const serialized = serializeTDigest(original);
|
|
46
|
+
const restored = deserializeTDigest(serialized);
|
|
47
|
+
|
|
48
|
+
// Empty digest returns empty array
|
|
49
|
+
expect(serialized.length).toBe(0);
|
|
50
|
+
expect(restored.size()).toBe(0);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
it("serializes and deserializes t-digest with values", () => {
|
|
54
|
+
const original = new TDigest();
|
|
55
|
+
original.push(100);
|
|
56
|
+
original.push(200);
|
|
57
|
+
original.push(300);
|
|
58
|
+
|
|
59
|
+
const serialized = serializeTDigest(original);
|
|
60
|
+
const restored = deserializeTDigest(serialized);
|
|
61
|
+
|
|
62
|
+
// Restored should give similar percentiles - median should be ~200
|
|
63
|
+
const median = restored.percentile(0.5);
|
|
64
|
+
expect(typeof median).toBe("number");
|
|
65
|
+
expect(median).toBeCloseTo(200, -1); // Within 10
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it("preserves p95 accuracy after serialization", () => {
|
|
69
|
+
const original = new TDigest();
|
|
70
|
+
// Add 100 values from 1 to 100
|
|
71
|
+
for (let i = 1; i <= 100; i++) {
|
|
72
|
+
original.push(i);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const serialized = serializeTDigest(original);
|
|
76
|
+
const restored = deserializeTDigest(serialized);
|
|
77
|
+
|
|
78
|
+
// p95 should be approximately 95
|
|
79
|
+
const p95 = restored.percentile(0.95);
|
|
80
|
+
expect(typeof p95).toBe("number");
|
|
81
|
+
expect(p95).toBeGreaterThanOrEqual(90);
|
|
82
|
+
expect(p95).toBeLessThanOrEqual(100);
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
it("handles incremental updates correctly", () => {
|
|
86
|
+
// First batch
|
|
87
|
+
const digest1 = new TDigest();
|
|
88
|
+
digest1.push(100);
|
|
89
|
+
digest1.push(110);
|
|
90
|
+
|
|
91
|
+
// Serialize, deserialize, add more
|
|
92
|
+
const serialized1 = serializeTDigest(digest1);
|
|
93
|
+
const digest2 = deserializeTDigest(serialized1);
|
|
94
|
+
digest2.push(120);
|
|
95
|
+
digest2.push(130);
|
|
96
|
+
|
|
97
|
+
// Should have values incorporated - median should be around 110-120
|
|
98
|
+
const median = digest2.percentile(0.5);
|
|
99
|
+
expect(typeof median).toBe("number");
|
|
100
|
+
expect(median).toBeGreaterThanOrEqual(105);
|
|
101
|
+
expect(median).toBeLessThanOrEqual(125);
|
|
102
|
+
});
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
describe("incrementHourlyAggregate", () => {
|
|
106
|
+
// Mock database
|
|
107
|
+
let mockDb: ReturnType<typeof createMockDb>;
|
|
108
|
+
let insertedValues: unknown[];
|
|
109
|
+
let existingAggregate: {
|
|
110
|
+
tdigestState: number[] | null;
|
|
111
|
+
minLatencyMs: number | null;
|
|
112
|
+
maxLatencyMs: number | null;
|
|
113
|
+
} | null = null;
|
|
114
|
+
|
|
115
|
+
function createMockDb() {
|
|
116
|
+
insertedValues = [];
|
|
117
|
+
|
|
118
|
+
const mockInsertChain = {
|
|
119
|
+
values: mock((values: unknown) => {
|
|
120
|
+
insertedValues.push(values);
|
|
121
|
+
return {
|
|
122
|
+
onConflictDoUpdate: mock(() => Promise.resolve()),
|
|
123
|
+
};
|
|
124
|
+
}),
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
const mockSelectChain = {
|
|
128
|
+
from: mock(() => ({
|
|
129
|
+
where: mock(() => ({
|
|
130
|
+
limit: mock(() =>
|
|
131
|
+
Promise.resolve(existingAggregate ? [existingAggregate] : []),
|
|
132
|
+
),
|
|
133
|
+
})),
|
|
134
|
+
})),
|
|
135
|
+
};
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
select: mock(() => mockSelectChain),
|
|
139
|
+
insert: mock(() => mockInsertChain),
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
beforeEach(() => {
|
|
144
|
+
existingAggregate = null;
|
|
145
|
+
mockDb = createMockDb();
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
it("creates new aggregate when none exists", async () => {
|
|
149
|
+
await incrementHourlyAggregate({
|
|
150
|
+
db: mockDb as never,
|
|
151
|
+
systemId: "sys-1",
|
|
152
|
+
configurationId: "config-1",
|
|
153
|
+
status: "healthy",
|
|
154
|
+
latencyMs: 150,
|
|
155
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
expect(mockDb.insert).toHaveBeenCalled();
|
|
159
|
+
expect(insertedValues).toHaveLength(1);
|
|
160
|
+
|
|
161
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
162
|
+
expect(inserted.systemId).toBe("sys-1");
|
|
163
|
+
expect(inserted.configurationId).toBe("config-1");
|
|
164
|
+
expect(inserted.bucketSize).toBe("hourly");
|
|
165
|
+
expect(inserted.runCount).toBe(1);
|
|
166
|
+
expect(inserted.healthyCount).toBe(1);
|
|
167
|
+
expect(inserted.degradedCount).toBe(0);
|
|
168
|
+
expect(inserted.unhealthyCount).toBe(0);
|
|
169
|
+
expect(inserted.latencySumMs).toBe(150);
|
|
170
|
+
expect(inserted.minLatencyMs).toBe(150);
|
|
171
|
+
expect(inserted.maxLatencyMs).toBe(150);
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
it("increments counts for unhealthy status", async () => {
|
|
175
|
+
await incrementHourlyAggregate({
|
|
176
|
+
db: mockDb as never,
|
|
177
|
+
systemId: "sys-1",
|
|
178
|
+
configurationId: "config-1",
|
|
179
|
+
status: "unhealthy",
|
|
180
|
+
latencyMs: 500,
|
|
181
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
185
|
+
expect(inserted.healthyCount).toBe(0);
|
|
186
|
+
expect(inserted.degradedCount).toBe(0);
|
|
187
|
+
expect(inserted.unhealthyCount).toBe(1);
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
it("increments counts for degraded status", async () => {
|
|
191
|
+
await incrementHourlyAggregate({
|
|
192
|
+
db: mockDb as never,
|
|
193
|
+
systemId: "sys-1",
|
|
194
|
+
configurationId: "config-1",
|
|
195
|
+
status: "degraded",
|
|
196
|
+
latencyMs: 300,
|
|
197
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
201
|
+
expect(inserted.healthyCount).toBe(0);
|
|
202
|
+
expect(inserted.degradedCount).toBe(1);
|
|
203
|
+
expect(inserted.unhealthyCount).toBe(0);
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
it("handles undefined latency", async () => {
|
|
207
|
+
await incrementHourlyAggregate({
|
|
208
|
+
db: mockDb as never,
|
|
209
|
+
systemId: "sys-1",
|
|
210
|
+
configurationId: "config-1",
|
|
211
|
+
status: "healthy",
|
|
212
|
+
latencyMs: undefined,
|
|
213
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
217
|
+
expect(inserted.latencySumMs).toBeUndefined();
|
|
218
|
+
expect(inserted.minLatencyMs).toBeUndefined();
|
|
219
|
+
expect(inserted.maxLatencyMs).toBeUndefined();
|
|
220
|
+
expect(inserted.tdigestState).toBeUndefined();
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
it("updates min/max when existing aggregate has values", async () => {
|
|
224
|
+
// Set up existing aggregate
|
|
225
|
+
existingAggregate = {
|
|
226
|
+
tdigestState: serializeTDigest(new TDigest()),
|
|
227
|
+
minLatencyMs: 100,
|
|
228
|
+
maxLatencyMs: 200,
|
|
229
|
+
};
|
|
230
|
+
mockDb = createMockDb();
|
|
231
|
+
|
|
232
|
+
await incrementHourlyAggregate({
|
|
233
|
+
db: mockDb as never,
|
|
234
|
+
systemId: "sys-1",
|
|
235
|
+
configurationId: "config-1",
|
|
236
|
+
status: "healthy",
|
|
237
|
+
latencyMs: 50, // Lower than existing min
|
|
238
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
242
|
+
expect(inserted.minLatencyMs).toBe(50); // Updated to lower value
|
|
243
|
+
expect(inserted.maxLatencyMs).toBe(200); // Unchanged
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
it("includes t-digest state for p95 calculation", async () => {
|
|
247
|
+
await incrementHourlyAggregate({
|
|
248
|
+
db: mockDb as never,
|
|
249
|
+
systemId: "sys-1",
|
|
250
|
+
configurationId: "config-1",
|
|
251
|
+
status: "healthy",
|
|
252
|
+
latencyMs: 150,
|
|
253
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
257
|
+
expect(inserted.tdigestState).toBeDefined();
|
|
258
|
+
expect(Array.isArray(inserted.tdigestState)).toBe(true);
|
|
259
|
+
expect(inserted.p95LatencyMs).toBeDefined();
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
it("uses correct bucket start time", async () => {
|
|
263
|
+
await incrementHourlyAggregate({
|
|
264
|
+
db: mockDb as never,
|
|
265
|
+
systemId: "sys-1",
|
|
266
|
+
configurationId: "config-1",
|
|
267
|
+
status: "healthy",
|
|
268
|
+
latencyMs: 150,
|
|
269
|
+
runTimestamp: new Date("2024-01-15T10:35:42Z"),
|
|
270
|
+
});
|
|
271
|
+
|
|
272
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
273
|
+
const bucketStart = inserted.bucketStart as Date;
|
|
274
|
+
expect(bucketStart.getTime()).toBe(
|
|
275
|
+
new Date("2024-01-15T10:00:00Z").getTime(),
|
|
276
|
+
);
|
|
277
|
+
});
|
|
278
|
+
|
|
279
|
+
it("updates max when new latency is higher", async () => {
|
|
280
|
+
existingAggregate = {
|
|
281
|
+
tdigestState: serializeTDigest(new TDigest()),
|
|
282
|
+
minLatencyMs: 100,
|
|
283
|
+
maxLatencyMs: 200,
|
|
284
|
+
};
|
|
285
|
+
mockDb = createMockDb();
|
|
286
|
+
|
|
287
|
+
await incrementHourlyAggregate({
|
|
288
|
+
db: mockDb as never,
|
|
289
|
+
systemId: "sys-1",
|
|
290
|
+
configurationId: "config-1",
|
|
291
|
+
status: "healthy",
|
|
292
|
+
latencyMs: 500, // Higher than existing max
|
|
293
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
294
|
+
});
|
|
295
|
+
|
|
296
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
297
|
+
expect(inserted.minLatencyMs).toBe(100); // Unchanged
|
|
298
|
+
expect(inserted.maxLatencyMs).toBe(500); // Updated to higher value
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
it("handles zero latency correctly", async () => {
|
|
302
|
+
await incrementHourlyAggregate({
|
|
303
|
+
db: mockDb as never,
|
|
304
|
+
systemId: "sys-1",
|
|
305
|
+
configurationId: "config-1",
|
|
306
|
+
status: "healthy",
|
|
307
|
+
latencyMs: 0,
|
|
308
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
309
|
+
});
|
|
310
|
+
|
|
311
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
312
|
+
expect(inserted.latencySumMs).toBe(0);
|
|
313
|
+
expect(inserted.minLatencyMs).toBe(0);
|
|
314
|
+
expect(inserted.maxLatencyMs).toBe(0);
|
|
315
|
+
});
|
|
316
|
+
|
|
317
|
+
it("handles very large latency values", async () => {
|
|
318
|
+
await incrementHourlyAggregate({
|
|
319
|
+
db: mockDb as never,
|
|
320
|
+
systemId: "sys-1",
|
|
321
|
+
configurationId: "config-1",
|
|
322
|
+
status: "healthy",
|
|
323
|
+
latencyMs: 1_000_000, // 1000 seconds
|
|
324
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
325
|
+
});
|
|
326
|
+
|
|
327
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
328
|
+
expect(inserted.latencySumMs).toBe(1_000_000);
|
|
329
|
+
});
|
|
330
|
+
|
|
331
|
+
it("accumulates t-digest state across multiple runs", async () => {
|
|
332
|
+
// First run
|
|
333
|
+
const digest1 = new TDigest();
|
|
334
|
+
digest1.push(100);
|
|
335
|
+
digest1.push(200);
|
|
336
|
+
digest1.push(300);
|
|
337
|
+
|
|
338
|
+
existingAggregate = {
|
|
339
|
+
tdigestState: serializeTDigest(digest1),
|
|
340
|
+
minLatencyMs: 100,
|
|
341
|
+
maxLatencyMs: 300,
|
|
342
|
+
};
|
|
343
|
+
mockDb = createMockDb();
|
|
344
|
+
|
|
345
|
+
// Second run adds a new value
|
|
346
|
+
await incrementHourlyAggregate({
|
|
347
|
+
db: mockDb as never,
|
|
348
|
+
systemId: "sys-1",
|
|
349
|
+
configurationId: "config-1",
|
|
350
|
+
status: "healthy",
|
|
351
|
+
latencyMs: 150,
|
|
352
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
353
|
+
});
|
|
354
|
+
|
|
355
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
356
|
+
expect(inserted.tdigestState).toBeDefined();
|
|
357
|
+
|
|
358
|
+
// Deserialize and verify all 4 values are incorporated
|
|
359
|
+
const restoredDigest = deserializeTDigest(
|
|
360
|
+
inserted.tdigestState as number[],
|
|
361
|
+
);
|
|
362
|
+
const size = restoredDigest.size();
|
|
363
|
+
expect(size).toBeGreaterThanOrEqual(1); // At least some centroids
|
|
364
|
+
|
|
365
|
+
// Median should be around 175 (average of 100, 150, 200, 300)
|
|
366
|
+
const median = restoredDigest.percentile(0.5);
|
|
367
|
+
expect(median).toBeGreaterThanOrEqual(100);
|
|
368
|
+
expect(median).toBeLessThanOrEqual(300);
|
|
369
|
+
});
|
|
370
|
+
|
|
371
|
+
it("handles runs at exact hour boundary", async () => {
|
|
372
|
+
await incrementHourlyAggregate({
|
|
373
|
+
db: mockDb as never,
|
|
374
|
+
systemId: "sys-1",
|
|
375
|
+
configurationId: "config-1",
|
|
376
|
+
status: "healthy",
|
|
377
|
+
latencyMs: 100,
|
|
378
|
+
runTimestamp: new Date("2024-01-15T10:00:00.000Z"), // Exact hour
|
|
379
|
+
});
|
|
380
|
+
|
|
381
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
382
|
+
const bucketStart = inserted.bucketStart as Date;
|
|
383
|
+
expect(bucketStart.getTime()).toBe(
|
|
384
|
+
new Date("2024-01-15T10:00:00.000Z").getTime(),
|
|
385
|
+
);
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
it("handles runs at last millisecond of hour", async () => {
|
|
389
|
+
await incrementHourlyAggregate({
|
|
390
|
+
db: mockDb as never,
|
|
391
|
+
systemId: "sys-1",
|
|
392
|
+
configurationId: "config-1",
|
|
393
|
+
status: "healthy",
|
|
394
|
+
latencyMs: 100,
|
|
395
|
+
runTimestamp: new Date("2024-01-15T10:59:59.999Z"), // Last ms of hour
|
|
396
|
+
});
|
|
397
|
+
|
|
398
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
399
|
+
const bucketStart = inserted.bucketStart as Date;
|
|
400
|
+
// Should still be in the 10:00 bucket
|
|
401
|
+
expect(bucketStart.getTime()).toBe(
|
|
402
|
+
new Date("2024-01-15T10:00:00.000Z").getTime(),
|
|
403
|
+
);
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
it("places runs after hour boundary in next hour", async () => {
|
|
407
|
+
await incrementHourlyAggregate({
|
|
408
|
+
db: mockDb as never,
|
|
409
|
+
systemId: "sys-1",
|
|
410
|
+
configurationId: "config-1",
|
|
411
|
+
status: "healthy",
|
|
412
|
+
latencyMs: 100,
|
|
413
|
+
runTimestamp: new Date("2024-01-15T11:00:00.001Z"), // 1ms into next hour
|
|
414
|
+
});
|
|
415
|
+
|
|
416
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
417
|
+
const bucketStart = inserted.bucketStart as Date;
|
|
418
|
+
expect(bucketStart.getTime()).toBe(
|
|
419
|
+
new Date("2024-01-15T11:00:00.000Z").getTime(),
|
|
420
|
+
);
|
|
421
|
+
});
|
|
422
|
+
|
|
423
|
+
it("handles existing aggregate with null tdigestState", async () => {
|
|
424
|
+
existingAggregate = {
|
|
425
|
+
tdigestState: null,
|
|
426
|
+
minLatencyMs: 100,
|
|
427
|
+
maxLatencyMs: 200,
|
|
428
|
+
};
|
|
429
|
+
mockDb = createMockDb();
|
|
430
|
+
|
|
431
|
+
await incrementHourlyAggregate({
|
|
432
|
+
db: mockDb as never,
|
|
433
|
+
systemId: "sys-1",
|
|
434
|
+
configurationId: "config-1",
|
|
435
|
+
status: "healthy",
|
|
436
|
+
latencyMs: 150,
|
|
437
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
438
|
+
});
|
|
439
|
+
|
|
440
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
441
|
+
expect(inserted.tdigestState).toBeDefined();
|
|
442
|
+
expect(Array.isArray(inserted.tdigestState)).toBe(true);
|
|
443
|
+
});
|
|
444
|
+
|
|
445
|
+
it("handles existing aggregate with null min/max latency", async () => {
|
|
446
|
+
existingAggregate = {
|
|
447
|
+
tdigestState: [],
|
|
448
|
+
minLatencyMs: null,
|
|
449
|
+
maxLatencyMs: null,
|
|
450
|
+
};
|
|
451
|
+
mockDb = createMockDb();
|
|
452
|
+
|
|
453
|
+
await incrementHourlyAggregate({
|
|
454
|
+
db: mockDb as never,
|
|
455
|
+
systemId: "sys-1",
|
|
456
|
+
configurationId: "config-1",
|
|
457
|
+
status: "healthy",
|
|
458
|
+
latencyMs: 150,
|
|
459
|
+
runTimestamp: new Date("2024-01-15T10:35:00Z"),
|
|
460
|
+
});
|
|
461
|
+
|
|
462
|
+
const inserted = insertedValues[0] as Record<string, unknown>;
|
|
463
|
+
expect(inserted.minLatencyMs).toBe(150);
|
|
464
|
+
expect(inserted.maxLatencyMs).toBe(150);
|
|
465
|
+
});
|
|
466
|
+
});
|