observability-toolkit 1.3.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. package/README.md +52 -3
  2. package/dist/backends/index.d.ts +108 -1
  3. package/dist/backends/index.d.ts.map +1 -1
  4. package/dist/backends/local-jsonl-boolean-search.test.d.ts +2 -0
  5. package/dist/backends/local-jsonl-boolean-search.test.d.ts.map +1 -0
  6. package/dist/backends/local-jsonl-boolean-search.test.js +154 -0
  7. package/dist/backends/local-jsonl-boolean-search.test.js.map +1 -0
  8. package/dist/backends/local-jsonl.d.ts +73 -2
  9. package/dist/backends/local-jsonl.d.ts.map +1 -1
  10. package/dist/backends/local-jsonl.js +768 -103
  11. package/dist/backends/local-jsonl.js.map +1 -1
  12. package/dist/backends/local-jsonl.test.js +3326 -271
  13. package/dist/backends/local-jsonl.test.js.map +1 -1
  14. package/dist/backends/signoz-api.d.ts +13 -1
  15. package/dist/backends/signoz-api.d.ts.map +1 -1
  16. package/dist/backends/signoz-api.integration.test.d.ts +8 -0
  17. package/dist/backends/signoz-api.integration.test.d.ts.map +1 -0
  18. package/dist/backends/signoz-api.integration.test.js +137 -0
  19. package/dist/backends/signoz-api.integration.test.js.map +1 -0
  20. package/dist/backends/signoz-api.js +384 -42
  21. package/dist/backends/signoz-api.js.map +1 -1
  22. package/dist/backends/signoz-api.test.js +821 -325
  23. package/dist/backends/signoz-api.test.js.map +1 -1
  24. package/dist/lib/cache.d.ts +20 -0
  25. package/dist/lib/cache.d.ts.map +1 -0
  26. package/dist/lib/cache.js +63 -0
  27. package/dist/lib/cache.js.map +1 -0
  28. package/dist/lib/constants.d.ts +27 -0
  29. package/dist/lib/constants.d.ts.map +1 -1
  30. package/dist/lib/constants.js +70 -0
  31. package/dist/lib/constants.js.map +1 -1
  32. package/dist/lib/constants.test.js +183 -1
  33. package/dist/lib/constants.test.js.map +1 -1
  34. package/dist/lib/file-utils.d.ts +53 -1
  35. package/dist/lib/file-utils.d.ts.map +1 -1
  36. package/dist/lib/file-utils.js +142 -3
  37. package/dist/lib/file-utils.js.map +1 -1
  38. package/dist/lib/file-utils.test.js +228 -1
  39. package/dist/lib/file-utils.test.js.map +1 -1
  40. package/dist/lib/indexer.d.ts +78 -0
  41. package/dist/lib/indexer.d.ts.map +1 -0
  42. package/dist/lib/indexer.js +277 -0
  43. package/dist/lib/indexer.js.map +1 -0
  44. package/dist/lib/indexer.test.d.ts +2 -0
  45. package/dist/lib/indexer.test.d.ts.map +1 -0
  46. package/dist/lib/indexer.test.js +392 -0
  47. package/dist/lib/indexer.test.js.map +1 -0
  48. package/dist/lib/otlp-export.d.ts +178 -0
  49. package/dist/lib/otlp-export.d.ts.map +1 -0
  50. package/dist/lib/otlp-export.js +382 -0
  51. package/dist/lib/otlp-export.js.map +1 -0
  52. package/dist/server.js +48 -65
  53. package/dist/server.js.map +1 -1
  54. package/dist/server.test.d.ts +5 -0
  55. package/dist/server.test.d.ts.map +1 -0
  56. package/dist/server.test.js +547 -0
  57. package/dist/server.test.js.map +1 -0
  58. package/dist/tools/context-stats.test.js +126 -0
  59. package/dist/tools/context-stats.test.js.map +1 -1
  60. package/dist/tools/get-trace-url.d.ts.map +1 -1
  61. package/dist/tools/get-trace-url.js +5 -1
  62. package/dist/tools/get-trace-url.js.map +1 -1
  63. package/dist/tools/get-trace-url.test.js +12 -6
  64. package/dist/tools/get-trace-url.test.js.map +1 -1
  65. package/dist/tools/health-check.d.ts +9 -2
  66. package/dist/tools/health-check.d.ts.map +1 -1
  67. package/dist/tools/health-check.js +66 -27
  68. package/dist/tools/health-check.js.map +1 -1
  69. package/dist/tools/health-check.test.js +89 -96
  70. package/dist/tools/health-check.test.js.map +1 -1
  71. package/dist/tools/query-llm-events.d.ts +2 -2
  72. package/dist/tools/query-llm-events.js +3 -3
  73. package/dist/tools/query-llm-events.js.map +1 -1
  74. package/dist/tools/query-logs.d.ts +12 -10
  75. package/dist/tools/query-logs.d.ts.map +1 -1
  76. package/dist/tools/query-logs.js +5 -5
  77. package/dist/tools/query-logs.js.map +1 -1
  78. package/dist/tools/query-logs.test.js +22 -2
  79. package/dist/tools/query-logs.test.js.map +1 -1
  80. package/dist/tools/query-metrics.d.ts +12 -14
  81. package/dist/tools/query-metrics.d.ts.map +1 -1
  82. package/dist/tools/query-metrics.js +11 -13
  83. package/dist/tools/query-metrics.js.map +1 -1
  84. package/dist/tools/query-metrics.test.js +134 -96
  85. package/dist/tools/query-metrics.test.js.map +1 -1
  86. package/dist/tools/query-traces.d.ts +16 -17
  87. package/dist/tools/query-traces.d.ts.map +1 -1
  88. package/dist/tools/query-traces.js +11 -14
  89. package/dist/tools/query-traces.js.map +1 -1
  90. package/dist/tools/query-traces.test.js +144 -48
  91. package/dist/tools/query-traces.test.js.map +1 -1
  92. package/dist/tools/setup-claudeignore.d.ts +36 -10
  93. package/dist/tools/setup-claudeignore.d.ts.map +1 -1
  94. package/dist/tools/setup-claudeignore.js +193 -33
  95. package/dist/tools/setup-claudeignore.js.map +1 -1
  96. package/dist/tools/setup-claudeignore.test.js +286 -41
  97. package/dist/tools/setup-claudeignore.test.js.map +1 -1
  98. package/dist/tools/signoz.integration.test.d.ts +8 -0
  99. package/dist/tools/signoz.integration.test.d.ts.map +1 -0
  100. package/dist/tools/signoz.integration.test.js +141 -0
  101. package/dist/tools/signoz.integration.test.js.map +1 -0
  102. package/package.json +6 -3
@@ -3,7 +3,8 @@ import * as assert from 'node:assert';
3
3
  import * as fs from 'fs';
4
4
  import * as path from 'path';
5
5
  import * as os from 'os';
6
- import { LocalJsonlBackend } from './local-jsonl.js';
6
+ import { LocalJsonlBackend, MultiDirectoryBackend } from './local-jsonl.js';
7
+ import { buildAndWriteIndex } from '../lib/indexer.js';
7
8
  /**
8
9
  * Test utilities for creating temp test fixtures
9
10
  */
@@ -191,367 +192,3421 @@ describe('LocalJsonlBackend', () => {
191
192
  assert.strictEqual(results[1].kind, 'SERVER');
192
193
  assert.strictEqual(results[2].kind, 'CLIENT');
193
194
  });
194
- it('should return empty array when no files found', async () => {
195
- // No files created - tempDir is empty
195
+ it('should convert status code number to string', async () => {
196
+ const today = getTestDate();
197
+ const mockSpans = [
198
+ { traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0], status: { code: 0 } },
199
+ { traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000000, 0], status: { code: 1 } },
200
+ { traceId: 'trace3', spanId: 'span3', name: 'op3', startTime: [1700000000, 0], status: { code: 2, message: 'Test error' } },
201
+ ];
202
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
196
203
  const results = await backend.queryTraces({});
197
- assert.strictEqual(results.length, 0);
204
+ assert.strictEqual(results[0].statusCode, 'UNSET');
205
+ assert.strictEqual(results[0].status?.code, 0);
206
+ assert.strictEqual(results[1].statusCode, 'OK');
207
+ assert.strictEqual(results[1].status?.code, 1);
208
+ assert.strictEqual(results[2].statusCode, 'ERROR');
209
+ assert.strictEqual(results[2].status?.code, 2);
210
+ assert.strictEqual(results[2].status?.message, 'Test error');
198
211
  });
199
- });
200
- describe('queryLogs', () => {
201
- it('should read and normalize log records from JSONL files', async () => {
212
+ it('should handle spans without status', async () => {
202
213
  const today = getTestDate();
203
- const mockLogs = [
214
+ const mockSpans = [
215
+ { traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
216
+ ];
217
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
218
+ const results = await backend.queryTraces({});
219
+ assert.strictEqual(results[0].statusCode, undefined);
220
+ assert.strictEqual(results[0].status, undefined);
221
+ });
222
+ it('should extract instrumentationScope from spans', async () => {
223
+ const today = getTestDate();
224
+ const mockSpans = [
204
225
  {
205
- timestamp: '2026-01-28T10:00:00.000Z',
206
- severityText: 'ERROR',
207
- body: 'Connection failed',
208
226
  traceId: 'trace1',
209
227
  spanId: 'span1',
210
- resource: { serviceName: 'api-service' },
211
- attributes: { 'error.type': 'timeout' },
228
+ name: 'http-request',
229
+ startTime: [1700000000, 0],
230
+ instrumentationScope: {
231
+ name: '@opentelemetry/instrumentation-http',
232
+ version: '0.48.0',
233
+ schemaUrl: 'https://opentelemetry.io/schemas/1.21.0',
234
+ },
235
+ },
236
+ {
237
+ traceId: 'trace2',
238
+ spanId: 'span2',
239
+ name: 'custom-span',
240
+ startTime: [1700000000, 0],
241
+ instrumentationScope: {
242
+ name: 'custom-hooks',
243
+ },
244
+ },
245
+ {
246
+ traceId: 'trace3',
247
+ spanId: 'span3',
248
+ name: 'no-scope',
249
+ startTime: [1700000000, 0],
212
250
  },
213
251
  ];
214
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
215
- const results = await backend.queryLogs({});
216
- assert.strictEqual(results.length, 1);
217
- assert.strictEqual(results[0].severity, 'ERROR');
218
- assert.strictEqual(results[0].body, 'Connection failed');
219
- assert.strictEqual(results[0].attributes?.['service.name'], 'api-service');
220
- assert.strictEqual(results[0].attributes?.['error.type'], 'timeout');
252
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
253
+ const results = await backend.queryTraces({});
254
+ assert.strictEqual(results.length, 3);
255
+ // First span: full scope
256
+ assert.strictEqual(results[0].instrumentationScope?.name, '@opentelemetry/instrumentation-http');
257
+ assert.strictEqual(results[0].instrumentationScope?.version, '0.48.0');
258
+ assert.strictEqual(results[0].instrumentationScope?.schemaUrl, 'https://opentelemetry.io/schemas/1.21.0');
259
+ // Second span: name only
260
+ assert.strictEqual(results[1].instrumentationScope?.name, 'custom-hooks');
261
+ assert.strictEqual(results[1].instrumentationScope?.version, undefined);
262
+ // Third span: no scope
263
+ assert.strictEqual(results[2].instrumentationScope, undefined);
221
264
  });
222
- it('should handle timestamp as ISO string', async () => {
265
+ it('should extract span links from spans', async () => {
223
266
  const today = getTestDate();
224
- const mockLogs = [
267
+ const mockSpans = [
225
268
  {
226
- timestamp: '2026-01-28T10:00:00.123Z',
227
- body: 'Test log',
269
+ traceId: 'trace1',
270
+ spanId: 'span1',
271
+ name: 'batch-processor',
272
+ startTime: [1700000000, 0],
273
+ links: [
274
+ {
275
+ context: { traceId: 'trace-upstream-1', spanId: 'span-upstream-1' },
276
+ attributes: { 'link.type': 'producer' },
277
+ },
278
+ {
279
+ context: { traceId: 'trace-upstream-2', spanId: 'span-upstream-2' },
280
+ },
281
+ ],
282
+ },
283
+ {
284
+ traceId: 'trace2',
285
+ spanId: 'span2',
286
+ name: 'single-link',
287
+ startTime: [1700000000, 0],
288
+ links: [
289
+ {
290
+ context: { traceId: 'trace-parent', spanId: 'span-parent' },
291
+ attributes: { 'link.reason': 'causal' },
292
+ },
293
+ ],
294
+ },
295
+ {
296
+ traceId: 'trace3',
297
+ spanId: 'span3',
298
+ name: 'no-links',
299
+ startTime: [1700000000, 0],
228
300
  },
229
301
  ];
230
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
231
- const results = await backend.queryLogs({});
232
- assert.strictEqual(results[0].timestamp, '2026-01-28T10:00:00.123Z');
302
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
303
+ const results = await backend.queryTraces({});
304
+ assert.strictEqual(results.length, 3);
305
+ // First span: multiple links
306
+ assert.strictEqual(results[0].links?.length, 2);
307
+ assert.strictEqual(results[0].links?.[0].traceId, 'trace-upstream-1');
308
+ assert.strictEqual(results[0].links?.[0].spanId, 'span-upstream-1');
309
+ assert.strictEqual(results[0].links?.[0].attributes?.['link.type'], 'producer');
310
+ assert.strictEqual(results[0].links?.[1].traceId, 'trace-upstream-2');
311
+ assert.strictEqual(results[0].links?.[1].spanId, 'span-upstream-2');
312
+ assert.strictEqual(results[0].links?.[1].attributes, undefined);
313
+ // Second span: single link with attributes
314
+ assert.strictEqual(results[1].links?.length, 1);
315
+ assert.strictEqual(results[1].links?.[0].traceId, 'trace-parent');
316
+ assert.strictEqual(results[1].links?.[0].attributes?.['link.reason'], 'causal');
317
+ // Third span: no links
318
+ assert.strictEqual(results[2].links, undefined);
233
319
  });
234
- it('should convert timestamp from [seconds, nanoseconds] array', async () => {
320
+ it('should filter out invalid span links with missing context', async () => {
235
321
  const today = getTestDate();
236
- const mockLogs = [
322
+ const mockSpans = [
237
323
  {
238
- timestamp: [1700000000, 123456789], // seconds + nanoseconds
239
- body: 'Test log',
324
+ traceId: 'trace1',
325
+ spanId: 'span1',
326
+ name: 'mixed-links',
327
+ startTime: [1700000000, 0],
328
+ links: [
329
+ {
330
+ context: { traceId: 'valid-trace', spanId: 'valid-span' },
331
+ },
332
+ {
333
+ context: { traceId: 'missing-span-id' },
334
+ },
335
+ {
336
+ context: { spanId: 'missing-trace-id' },
337
+ },
338
+ {
339
+ // No context at all
340
+ attributes: { 'orphan': true },
341
+ },
342
+ ],
240
343
  },
241
344
  ];
242
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
243
- const results = await backend.queryLogs({});
244
- // Verify it's a valid ISO string
245
- assert.match(results[0].timestamp, /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
345
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
346
+ const results = await backend.queryTraces({});
347
+ assert.strictEqual(results.length, 1);
348
+ // Only the valid link should be included
349
+ assert.strictEqual(results[0].links?.length, 1);
350
+ assert.strictEqual(results[0].links?.[0].traceId, 'valid-trace');
351
+ assert.strictEqual(results[0].links?.[0].spanId, 'valid-span');
246
352
  });
247
- it('should filter logs by severity (case-insensitive)', async () => {
353
+ it('should set links to undefined when all links are invalid', async () => {
248
354
  const today = getTestDate();
249
- const mockLogs = [
250
- { timestamp: '2026-01-28T10:00:00Z', severityText: 'ERROR', body: 'Error 1' },
251
- { timestamp: '2026-01-28T10:01:00Z', severity: 'WARN', body: 'Warning 1' },
252
- { timestamp: '2026-01-28T10:02:00Z', severity: 'error', body: 'Error 2' },
253
- { timestamp: '2026-01-28T10:03:00Z', severity: 'INFO', body: 'Info 1' },
355
+ const mockSpans = [
356
+ {
357
+ traceId: 'trace1',
358
+ spanId: 'span1',
359
+ name: 'all-invalid-links',
360
+ startTime: [1700000000, 0],
361
+ links: [
362
+ { context: { traceId: 'missing-span' } },
363
+ { context: { spanId: 'missing-trace' } },
364
+ ],
365
+ },
254
366
  ];
255
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
256
- const results = await backend.queryLogs({ severity: 'ERROR' });
257
- assert.strictEqual(results.length, 2);
258
- assert.ok(results.every(l => l.severity.toUpperCase() === 'ERROR'));
367
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
368
+ const results = await backend.queryTraces({});
369
+ assert.strictEqual(results.length, 1);
370
+ assert.strictEqual(results[0].links, undefined);
259
371
  });
260
- it('should filter logs by traceId', async () => {
372
+ it('should return empty array when no files found', async () => {
373
+ // No files created - tempDir is empty
374
+ const results = await backend.queryTraces({});
375
+ assert.strictEqual(results.length, 0);
376
+ });
377
+ it('should filter spans by attributeFilter with string value', async () => {
261
378
  const today = getTestDate();
262
- const mockLogs = [
263
- { timestamp: '2026-01-28T10:00:00Z', body: 'Log 1', traceId: 'trace1' },
264
- { timestamp: '2026-01-28T10:01:00Z', body: 'Log 2', traceId: 'trace2' },
265
- { timestamp: '2026-01-28T10:02:00Z', body: 'Log 3', traceId: 'trace1' },
379
+ const mockSpans = [
380
+ {
381
+ traceId: 'trace1',
382
+ spanId: 'span1',
383
+ name: 'hook:session-start',
384
+ startTime: [1700000000, 0],
385
+ attributes: { 'hook.name': 'session-start', 'hook.type': 'session' },
386
+ },
387
+ {
388
+ traceId: 'trace2',
389
+ spanId: 'span2',
390
+ name: 'hook:mcp-pre-tool',
391
+ startTime: [1700000000, 0],
392
+ attributes: { 'hook.name': 'mcp-pre-tool', 'mcp.server': 'signoz' },
393
+ },
394
+ {
395
+ traceId: 'trace3',
396
+ spanId: 'span3',
397
+ name: 'hook:post-tool',
398
+ startTime: [1700000000, 0],
399
+ attributes: { 'hook.name': 'post-tool', 'mcp.server': 'webresearch' },
400
+ },
266
401
  ];
267
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
268
- const results = await backend.queryLogs({ traceId: 'trace1' });
269
- assert.strictEqual(results.length, 2);
270
- assert.ok(results.every(l => l.traceId === 'trace1'));
402
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
403
+ const results = await backend.queryTraces({
404
+ attributeFilter: { 'hook.name': 'session-start' },
405
+ });
406
+ assert.strictEqual(results.length, 1);
407
+ assert.strictEqual(results[0].traceId, 'trace1');
271
408
  });
272
- it('should filter logs by search text (case-insensitive substring)', async () => {
409
+ it('should filter spans by attributeFilter with multiple attributes', async () => {
273
410
  const today = getTestDate();
274
- const mockLogs = [
275
- { timestamp: '2026-01-28T10:00:00Z', body: 'Connection timeout' },
276
- { timestamp: '2026-01-28T10:01:00Z', body: 'Database query failed' },
277
- { timestamp: '2026-01-28T10:02:00Z', body: 'Connection reset by peer' },
411
+ const mockSpans = [
412
+ {
413
+ traceId: 'trace1',
414
+ spanId: 'span1',
415
+ name: 'mcp-call',
416
+ startTime: [1700000000, 0],
417
+ attributes: { 'mcp.server': 'signoz', 'mcp.success': true },
418
+ },
419
+ {
420
+ traceId: 'trace2',
421
+ spanId: 'span2',
422
+ name: 'mcp-call',
423
+ startTime: [1700000000, 0],
424
+ attributes: { 'mcp.server': 'signoz', 'mcp.success': false },
425
+ },
426
+ {
427
+ traceId: 'trace3',
428
+ spanId: 'span3',
429
+ name: 'mcp-call',
430
+ startTime: [1700000000, 0],
431
+ attributes: { 'mcp.server': 'webresearch', 'mcp.success': true },
432
+ },
278
433
  ];
279
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
280
- const results = await backend.queryLogs({ search: 'CONNECTION' });
281
- assert.strictEqual(results.length, 2);
282
- assert.ok(results.every(l => l.body.toLowerCase().includes('connection')));
434
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
435
+ const results = await backend.queryTraces({
436
+ attributeFilter: { 'mcp.server': 'signoz', 'mcp.success': true },
437
+ });
438
+ assert.strictEqual(results.length, 1);
439
+ assert.strictEqual(results[0].traceId, 'trace1');
283
440
  });
284
- it('should use severityText if available, fallback to severity', async () => {
441
+ it('should filter spans by attributeFilter with number value', async () => {
285
442
  const today = getTestDate();
286
- const mockLogs = [
287
- { timestamp: '2026-01-28T10:00:00Z', body: 'Log 1', severityText: 'CUSTOM' },
288
- { timestamp: '2026-01-28T10:01:00Z', body: 'Log 2', severity: 'WARN' },
289
- { timestamp: '2026-01-28T10:02:00Z', body: 'Log 3' }, // no severity
443
+ const mockSpans = [
444
+ {
445
+ traceId: 'trace1',
446
+ spanId: 'span1',
447
+ name: 'http-request',
448
+ startTime: [1700000000, 0],
449
+ attributes: { 'http.status_code': 200 },
450
+ },
451
+ {
452
+ traceId: 'trace2',
453
+ spanId: 'span2',
454
+ name: 'http-request',
455
+ startTime: [1700000000, 0],
456
+ attributes: { 'http.status_code': 500 },
457
+ },
290
458
  ];
291
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
292
- const results = await backend.queryLogs({});
293
- assert.strictEqual(results[0].severity, 'CUSTOM');
294
- assert.strictEqual(results[1].severity, 'WARN');
295
- assert.strictEqual(results[2].severity, 'INFO'); // default
459
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
460
+ const results = await backend.queryTraces({
461
+ attributeFilter: { 'http.status_code': 200 },
462
+ });
463
+ assert.strictEqual(results.length, 1);
464
+ assert.strictEqual(results[0].traceId, 'trace1');
296
465
  });
297
- it('should apply limit and offset to log results', async () => {
466
+ it('should filter spans by attributeFilter with boolean value', async () => {
298
467
  const today = getTestDate();
299
- const mockLogs = Array.from({ length: 200 }, (_, i) => ({
300
- timestamp: new Date(Date.now() + i * 1000).toISOString(),
301
- body: `Log ${i}`,
302
- }));
303
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
304
- const results = await backend.queryLogs({ limit: 50, offset: 75 });
305
- assert.strictEqual(results.length, 50);
306
- assert.strictEqual(results[0].body, 'Log 75');
468
+ const mockSpans = [
469
+ {
470
+ traceId: 'trace1',
471
+ spanId: 'span1',
472
+ name: 'agent-call',
473
+ startTime: [1700000000, 0],
474
+ attributes: { 'agent.is_background': true },
475
+ },
476
+ {
477
+ traceId: 'trace2',
478
+ spanId: 'span2',
479
+ name: 'agent-call',
480
+ startTime: [1700000000, 0],
481
+ attributes: { 'agent.is_background': false },
482
+ },
483
+ ];
484
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
485
+ const results = await backend.queryTraces({
486
+ attributeFilter: { 'agent.is_background': false },
487
+ });
488
+ assert.strictEqual(results.length, 1);
489
+ assert.strictEqual(results[0].traceId, 'trace2');
307
490
  });
308
- it('should handle empty body field', async () => {
491
+ it('should return empty array when attributeFilter matches nothing', async () => {
309
492
  const today = getTestDate();
310
- const mockLogs = [
311
- { timestamp: '2026-01-28T10:00:00Z', body: 'Normal log' },
312
- { timestamp: '2026-01-28T10:01:00Z' }, // missing body
493
+ const mockSpans = [
494
+ {
495
+ traceId: 'trace1',
496
+ spanId: 'span1',
497
+ name: 'op1',
498
+ startTime: [1700000000, 0],
499
+ attributes: { 'hook.name': 'session-start' },
500
+ },
313
501
  ];
314
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
315
- const results = await backend.queryLogs({});
316
- assert.strictEqual(results.length, 2);
317
- assert.strictEqual(results[1].body, '');
502
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
503
+ const results = await backend.queryTraces({
504
+ attributeFilter: { 'hook.name': 'nonexistent' },
505
+ });
506
+ assert.strictEqual(results.length, 0);
318
507
  });
319
- });
320
- describe('queryMetrics', () => {
321
- it('should read and normalize metric data points from JSONL files', async () => {
508
+ it('should combine attributeFilter with other filters', async () => {
322
509
  const today = getTestDate();
323
- const mockMetrics = [
510
+ const mockSpans = [
324
511
  {
325
- timestamp: '2026-01-28T10:00:00Z',
326
- name: 'http.requests.total',
327
- value: 100,
328
- type: 'counter',
329
- unit: 'requests',
330
- resource: { serviceName: 'api-gateway' },
331
- attributes: { 'http.method': 'GET', 'http.status_code': 200 },
512
+ traceId: 'trace1',
513
+ spanId: 'span1',
514
+ name: 'hook:mcp-pre-tool',
515
+ startTime: [1700000000, 0],
516
+ endTime: [1700000000, 500000000], // 500ms
517
+ attributes: { 'mcp.server': 'signoz' },
518
+ },
519
+ {
520
+ traceId: 'trace2',
521
+ spanId: 'span2',
522
+ name: 'hook:mcp-pre-tool',
523
+ startTime: [1700000000, 0],
524
+ endTime: [1700000002, 0], // 2000ms
525
+ attributes: { 'mcp.server': 'signoz' },
526
+ },
527
+ {
528
+ traceId: 'trace3',
529
+ spanId: 'span3',
530
+ name: 'hook:mcp-pre-tool',
531
+ startTime: [1700000000, 0],
532
+ endTime: [1700000000, 500000000], // 500ms
533
+ attributes: { 'mcp.server': 'webresearch' },
332
534
  },
333
535
  ];
334
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
335
- const results = await backend.queryMetrics({});
536
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
537
+ const results = await backend.queryTraces({
538
+ spanName: 'mcp',
539
+ minDurationMs: 1000,
540
+ attributeFilter: { 'mcp.server': 'signoz' },
541
+ });
336
542
  assert.strictEqual(results.length, 1);
337
- assert.strictEqual(results[0].name, 'http.requests.total');
338
- assert.strictEqual(results[0].value, 100);
339
- assert.strictEqual(results[0].unit, 'requests');
340
- assert.strictEqual(results[0].attributes?.['service.name'], 'api-gateway');
341
- assert.strictEqual(results[0].attributes?.['http.method'], 'GET');
543
+ assert.strictEqual(results[0].traceId, 'trace2');
342
544
  });
343
- it('should filter metrics by name substring', async () => {
545
+ it('should exclude spans matching excludeSpanName', async () => {
344
546
  const today = getTestDate();
345
- const mockMetrics = [
346
- { timestamp: '2026-01-28T10:00:00Z', name: 'http.requests.total', value: 100, type: 'counter' },
347
- { timestamp: '2026-01-28T10:01:00Z', name: 'http.request.duration', value: 150, type: 'histogram' },
348
- { timestamp: '2026-01-28T10:02:00Z', name: 'memory.usage', value: 512, type: 'gauge' },
547
+ const mockSpans = [
548
+ { traceId: 'trace1', spanId: 'span1', name: 'http-request', startTime: [1700000000, 0] },
549
+ { traceId: 'trace2', spanId: 'span2', name: 'db-query', startTime: [1700000000, 0] },
550
+ { traceId: 'trace3', spanId: 'span3', name: 'http-response', startTime: [1700000000, 0] },
349
551
  ];
350
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
351
- const results = await backend.queryMetrics({ metricName: 'http' });
352
- assert.strictEqual(results.length, 2);
353
- assert.ok(results.every(m => m.name.includes('http')));
552
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
553
+ const results = await backend.queryTraces({ excludeSpanName: 'http' });
554
+ assert.strictEqual(results.length, 1);
555
+ assert.strictEqual(results[0].name, 'db-query');
354
556
  });
355
- it('should apply limit and offset to metric results', async () => {
557
+ it('should filter spans by spanNameRegex', async () => {
356
558
  const today = getTestDate();
357
- const mockMetrics = Array.from({ length: 150 }, (_, i) => ({
358
- timestamp: new Date(Date.now() + i * 1000).toISOString(),
359
- name: `metric.${i}`,
360
- value: i * 10,
361
- type: 'gauge',
362
- }));
363
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
364
- const results = await backend.queryMetrics({ limit: 50, offset: 30 });
365
- assert.strictEqual(results.length, 50);
366
- assert.strictEqual(results[0].name, 'metric.30');
559
+ const mockSpans = [
560
+ { traceId: 'trace1', spanId: 'span1', name: 'hook:session-start', startTime: [1700000000, 0] },
561
+ { traceId: 'trace2', spanId: 'span2', name: 'hook:session-end', startTime: [1700000000, 0] },
562
+ { traceId: 'trace3', spanId: 'span3', name: 'mcp-call', startTime: [1700000000, 0] },
563
+ { traceId: 'trace4', spanId: 'span4', name: 'hook:pre-tool', startTime: [1700000000, 0] },
564
+ ];
565
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
566
+ // Match spans starting with 'hook:session'
567
+ const results = await backend.queryTraces({ spanNameRegex: '^hook:session' });
568
+ assert.strictEqual(results.length, 2);
569
+ assert.ok(results.some(s => s.name === 'hook:session-start'));
570
+ assert.ok(results.some(s => s.name === 'hook:session-end'));
367
571
  });
368
- it('should aggregate metrics with sum function', async () => {
572
+ it('should filter spans by spanNameRegex with complex pattern', async () => {
369
573
  const today = getTestDate();
370
- const mockMetrics = [
371
- { timestamp: '2026-01-28T10:00:00Z', name: 'requests', value: 100, type: 'counter' },
372
- { timestamp: '2026-01-28T10:01:00Z', name: 'requests', value: 150, type: 'counter' },
373
- { timestamp: '2026-01-28T10:02:00Z', name: 'requests', value: 200, type: 'counter' },
574
+ const mockSpans = [
575
+ { traceId: 'trace1', spanId: 'span1', name: 'api-v1-users-get', startTime: [1700000000, 0] },
576
+ { traceId: 'trace2', spanId: 'span2', name: 'api-v2-users-get', startTime: [1700000000, 0] },
577
+ { traceId: 'trace3', spanId: 'span3', name: 'api-v1-orders-post', startTime: [1700000000, 0] },
578
+ { traceId: 'trace4', spanId: 'span4', name: 'internal-process', startTime: [1700000000, 0] },
374
579
  ];
375
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
376
- const results = await backend.queryMetrics({ aggregation: 'sum' });
377
- assert.strictEqual(results.length, 1);
378
- assert.strictEqual(results[0].value, 450);
580
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
581
+ // Match spans with api-v[12]-.*-get pattern
582
+ const results = await backend.queryTraces({ spanNameRegex: 'api-v[12]-.*-get' });
583
+ assert.strictEqual(results.length, 2);
584
+ assert.ok(results.some(s => s.name === 'api-v1-users-get'));
585
+ assert.ok(results.some(s => s.name === 'api-v2-users-get'));
379
586
  });
380
- it('should aggregate metrics with avg function', async () => {
587
+ it('should handle invalid spanNameRegex gracefully', async () => {
381
588
  const today = getTestDate();
382
- const mockMetrics = [
383
- { timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 100, type: 'histogram' },
384
- { timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 200, type: 'histogram' },
385
- { timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 300, type: 'histogram' },
589
+ const mockSpans = [
590
+ { traceId: 'trace1', spanId: 'span1', name: 'test-span', startTime: [1700000000, 0] },
386
591
  ];
387
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
388
- const results = await backend.queryMetrics({ aggregation: 'avg' });
592
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
593
+ // Invalid regex pattern - should be skipped (all spans returned)
594
+ const results = await backend.queryTraces({ spanNameRegex: '[invalid(' });
595
+ // Invalid regex is skipped, so all spans should be returned
389
596
  assert.strictEqual(results.length, 1);
390
- assert.strictEqual(results[0].value, 200);
597
+ assert.strictEqual(results[0].name, 'test-span');
391
598
  });
392
- it('should aggregate metrics with min function', async () => {
599
+ it('should combine spanNameRegex with spanName filter', async () => {
393
600
  const today = getTestDate();
394
- const mockMetrics = [
395
- { timestamp: '2026-01-28T10:00:00Z', name: 'response_time', value: 150, type: 'gauge' },
396
- { timestamp: '2026-01-28T10:01:00Z', name: 'response_time', value: 50, type: 'gauge' },
397
- { timestamp: '2026-01-28T10:02:00Z', name: 'response_time', value: 200, type: 'gauge' },
601
+ const mockSpans = [
602
+ { traceId: 'trace1', spanId: 'span1', name: 'hook:mcp-pre-tool', startTime: [1700000000, 0] },
603
+ { traceId: 'trace2', spanId: 'span2', name: 'hook:mcp-post-tool', startTime: [1700000000, 0] },
604
+ { traceId: 'trace3', spanId: 'span3', name: 'hook:session-start', startTime: [1700000000, 0] },
605
+ { traceId: 'trace4', spanId: 'span4', name: 'mcp-call', startTime: [1700000000, 0] },
398
606
  ];
399
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
400
- const results = await backend.queryMetrics({ aggregation: 'min' });
401
- assert.strictEqual(results.length, 1);
402
- assert.strictEqual(results[0].value, 50);
607
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
608
+ // spanName filters first (substring), then regex narrows down
609
+ const results = await backend.queryTraces({
610
+ spanName: 'hook',
611
+ spanNameRegex: 'mcp',
612
+ });
613
+ assert.strictEqual(results.length, 2);
614
+ assert.ok(results.some(s => s.name === 'hook:mcp-pre-tool'));
615
+ assert.ok(results.some(s => s.name === 'hook:mcp-post-tool'));
403
616
  });
404
- it('should aggregate metrics with max function', async () => {
617
+ it('should combine spanNameRegex with excludeSpanName', async () => {
405
618
  const today = getTestDate();
406
- const mockMetrics = [
407
- { timestamp: '2026-01-28T10:00:00Z', name: 'memory', value: 512, type: 'gauge' },
408
- { timestamp: '2026-01-28T10:01:00Z', name: 'memory', value: 256, type: 'gauge' },
409
- { timestamp: '2026-01-28T10:02:00Z', name: 'memory', value: 1024, type: 'gauge' },
619
+ const mockSpans = [
620
+ { traceId: 'trace1', spanId: 'span1', name: 'hook:mcp-pre-tool', startTime: [1700000000, 0] },
621
+ { traceId: 'trace2', spanId: 'span2', name: 'hook:mcp-post-tool', startTime: [1700000000, 0] },
622
+ { traceId: 'trace3', spanId: 'span3', name: 'hook:session-start', startTime: [1700000000, 0] },
410
623
  ];
411
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
412
- const results = await backend.queryMetrics({ aggregation: 'max' });
624
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
625
+ // Regex matches all hook:mcp-*, exclude post-tool
626
+ const results = await backend.queryTraces({
627
+ spanNameRegex: '^hook:mcp-',
628
+ excludeSpanName: 'post-tool',
629
+ });
413
630
  assert.strictEqual(results.length, 1);
414
- assert.strictEqual(results[0].value, 1024);
631
+ assert.strictEqual(results[0].name, 'hook:mcp-pre-tool');
415
632
  });
416
- it('should aggregate metrics with count function', async () => {
633
+ it('should filter spans by attributeExists - all must exist', async () => {
417
634
  const today = getTestDate();
418
- const mockMetrics = [
419
- { timestamp: '2026-01-28T10:00:00Z', name: 'events', value: 10, type: 'counter' },
420
- { timestamp: '2026-01-28T10:01:00Z', name: 'events', value: 20, type: 'counter' },
421
- { timestamp: '2026-01-28T10:02:00Z', name: 'events', value: 30, type: 'counter' },
635
+ const mockSpans = [
636
+ {
637
+ traceId: 'trace1',
638
+ spanId: 'span1',
639
+ name: 'op1',
640
+ startTime: [1700000000, 0],
641
+ attributes: { 'http.method': 'GET', 'http.status_code': 200 },
642
+ },
643
+ {
644
+ traceId: 'trace2',
645
+ spanId: 'span2',
646
+ name: 'op2',
647
+ startTime: [1700000000, 0],
648
+ attributes: { 'http.method': 'POST' }, // missing http.status_code
649
+ },
650
+ {
651
+ traceId: 'trace3',
652
+ spanId: 'span3',
653
+ name: 'op3',
654
+ startTime: [1700000000, 0],
655
+ attributes: { 'db.system': 'postgres' }, // missing both
656
+ },
422
657
  ];
423
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
424
- const results = await backend.queryMetrics({ aggregation: 'count' });
658
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
659
+ const results = await backend.queryTraces({
660
+ attributeExists: ['http.method', 'http.status_code'],
661
+ });
425
662
  assert.strictEqual(results.length, 1);
426
- assert.strictEqual(results[0].value, 3);
663
+ assert.strictEqual(results[0].traceId, 'trace1');
427
664
  });
428
- it('should aggregate metrics grouped by attributes', async () => {
665
+ it('should filter spans by attributeNotExists - exclude if any exist', async () => {
429
666
  const today = getTestDate();
430
- const mockMetrics = [
667
+ const mockSpans = [
431
668
  {
432
- timestamp: '2026-01-28T10:00:00Z',
433
- name: 'http.requests',
434
- value: 100,
435
- type: 'counter',
436
- attributes: { method: 'GET' },
669
+ traceId: 'trace1',
670
+ spanId: 'span1',
671
+ name: 'op1',
672
+ startTime: [1700000000, 0],
673
+ attributes: { 'http.method': 'GET', 'error.message': 'timeout' },
437
674
  },
438
675
  {
439
- timestamp: '2026-01-28T10:01:00Z',
440
- name: 'http.requests',
441
- value: 50,
442
- type: 'counter',
443
- attributes: { method: 'POST' },
676
+ traceId: 'trace2',
677
+ spanId: 'span2',
678
+ name: 'op2',
679
+ startTime: [1700000000, 0],
680
+ attributes: { 'http.method': 'POST' }, // no error attributes
444
681
  },
445
682
  {
446
- timestamp: '2026-01-28T10:02:00Z',
447
- name: 'http.requests',
448
- value: 200,
449
- type: 'counter',
450
- attributes: { method: 'GET' },
683
+ traceId: 'trace3',
684
+ spanId: 'span3',
685
+ name: 'op3',
686
+ startTime: [1700000000, 0],
687
+ attributes: { 'http.method': 'GET', 'error.type': 'network' },
451
688
  },
452
689
  ];
453
- writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
454
- const results = await backend.queryMetrics({ aggregation: 'sum', groupBy: ['method'] });
455
- assert.strictEqual(results.length, 2);
456
- const getMetric = results.find(m => m.attributes?.method === 'GET');
457
- const postMetric = results.find(m => m.attributes?.method === 'POST');
458
- assert.strictEqual(getMetric?.value, 300);
459
- assert.strictEqual(postMetric?.value, 50);
460
- });
461
- it('should return empty array when no metrics found', async () => {
462
- // No files created
463
- const results = await backend.queryMetrics({});
464
- assert.strictEqual(results.length, 0);
465
- });
466
- });
467
- describe('healthCheck', () => {
468
- it('should return error when telemetry directory does not exist', async () => {
469
- const nonExistentBackend = new LocalJsonlBackend('/nonexistent/telemetry');
470
- const result = await nonExistentBackend.healthCheck();
471
- assert.strictEqual(result.status, 'error');
472
- assert.match(result.message || '', /not found/);
473
- });
474
- it('should return ok when directory exists with no files', async () => {
475
- // tempDir exists but has no files
476
- const result = await backend.healthCheck();
477
- assert.strictEqual(result.status, 'ok');
478
- assert.match(result.message || '', /No telemetry files/);
690
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
691
+ const results = await backend.queryTraces({
692
+ attributeNotExists: ['error.message', 'error.type'],
693
+ });
694
+ assert.strictEqual(results.length, 1);
695
+ assert.strictEqual(results[0].traceId, 'trace2');
479
696
  });
480
- it('should return ok with found files message', async () => {
697
+ it('should combine spanName with excludeSpanName', async () => {
481
698
  const today = getTestDate();
482
- // Create both traces and logs files
483
- writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), []);
484
- writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), []);
485
- const result = await backend.healthCheck();
486
- assert.strictEqual(result.status, 'ok');
487
- assert.match(result.message || '', /traces.*logs/);
488
- });
489
- });
490
- describe('date range filtering', () => {
491
- it('should filter files by startDate and endDate', async () => {
492
- // Create files for multiple dates
493
- writeJsonlFile(path.join(tempDir, 'traces-2026-01-26.jsonl'), [
494
- { traceId: 'trace-26', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
495
- ]);
496
- writeJsonlFile(path.join(tempDir, 'traces-2026-01-27.jsonl'), [
497
- { traceId: 'trace-27', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
498
- ]);
499
- writeJsonlFile(path.join(tempDir, 'traces-2026-01-28.jsonl'), [
500
- { traceId: 'trace-28', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
501
- ]);
502
- writeJsonlFile(path.join(tempDir, 'traces-2026-01-29.jsonl'), [
503
- { traceId: 'trace-29', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
504
- ]);
505
- const results = await backend.queryTraces({ startDate: '2026-01-27', endDate: '2026-01-28' });
506
- assert.strictEqual(results.length, 2);
507
- const traceIds = results.map(r => r.traceId);
508
- assert.ok(traceIds.includes('trace-27'));
509
- assert.ok(traceIds.includes('trace-28'));
510
- assert.ok(!traceIds.includes('trace-26'));
511
- assert.ok(!traceIds.includes('trace-29'));
699
+ const mockSpans = [
700
+ { traceId: 'trace1', spanId: 'span1', name: 'http-request-external', startTime: [1700000000, 0] },
701
+ { traceId: 'trace2', spanId: 'span2', name: 'http-request-internal', startTime: [1700000000, 0] },
702
+ { traceId: 'trace3', spanId: 'span3', name: 'db-query', startTime: [1700000000, 0] },
703
+ ];
704
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
705
+ const results = await backend.queryTraces({
706
+ spanName: 'http',
707
+ excludeSpanName: 'internal',
708
+ });
709
+ assert.strictEqual(results.length, 1);
710
+ assert.strictEqual(results[0].name, 'http-request-external');
512
711
  });
513
- it('should use today as default when no date range specified', async () => {
712
+ it('should combine attributeExists with attributeFilter', async () => {
514
713
  const today = getTestDate();
515
- const yesterday = new Date(Date.now() - 86400000).toISOString().split('T')[0];
516
- // Create file for today and yesterday
517
- writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), [
518
- { traceId: 'today-trace', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
519
- ]);
520
- writeJsonlFile(path.join(tempDir, `traces-${yesterday}.jsonl`), [
521
- { traceId: 'yesterday-trace', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
522
- ]);
523
- // Query with no date range should only get today's data
524
- const results = await backend.queryTraces({});
714
+ const mockSpans = [
715
+ {
716
+ traceId: 'trace1',
717
+ spanId: 'span1',
718
+ name: 'op1',
719
+ startTime: [1700000000, 0],
720
+ attributes: { 'http.method': 'GET', 'http.status_code': 200 },
721
+ },
722
+ {
723
+ traceId: 'trace2',
724
+ spanId: 'span2',
725
+ name: 'op2',
726
+ startTime: [1700000000, 0],
727
+ attributes: { 'http.method': 'POST', 'http.status_code': 500 },
728
+ },
729
+ {
730
+ traceId: 'trace3',
731
+ spanId: 'span3',
732
+ name: 'op3',
733
+ startTime: [1700000000, 0],
734
+ attributes: { 'http.method': 'GET' }, // missing status_code
735
+ },
736
+ ];
737
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
738
+ const results = await backend.queryTraces({
739
+ attributeFilter: { 'http.method': 'GET' },
740
+ attributeExists: ['http.status_code'],
741
+ });
525
742
  assert.strictEqual(results.length, 1);
526
- assert.strictEqual(results[0].traceId, 'today-trace');
743
+ assert.strictEqual(results[0].traceId, 'trace1');
527
744
  });
528
- });
529
- describe('error handling', () => {
530
- it('should handle JSONL parsing errors gracefully', async () => {
745
+ it('should filter spans by numericFilter with gt operator', async () => {
531
746
  const today = getTestDate();
532
- const filePath = path.join(tempDir, `traces-${today}.jsonl`);
533
- // Write malformed JSON
534
- fs.writeFileSync(filePath, 'not valid json\n{"traceId":"t1","spanId":"s1","name":"op"}\n', 'utf-8');
535
- const results = await backend.queryTraces({});
536
- // Should skip the malformed line and parse the valid one
537
- assert.strictEqual(results.length, 1);
538
- assert.strictEqual(results[0].traceId, 't1');
747
+ const mockSpans = [
748
+ {
749
+ traceId: 'trace1',
750
+ spanId: 'span1',
751
+ name: 'http-request',
752
+ startTime: [1700000000, 0],
753
+ attributes: { 'http.status_code': 200 },
754
+ },
755
+ {
756
+ traceId: 'trace2',
757
+ spanId: 'span2',
758
+ name: 'http-request',
759
+ startTime: [1700000000, 0],
760
+ attributes: { 'http.status_code': 500 },
761
+ },
762
+ {
763
+ traceId: 'trace3',
764
+ spanId: 'span3',
765
+ name: 'http-request',
766
+ startTime: [1700000000, 0],
767
+ attributes: { 'http.status_code': 300 },
768
+ },
769
+ ];
770
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
771
+ const results = await backend.queryTraces({
772
+ numericFilter: [{ attribute: 'http.status_code', operator: 'gt', value: 299 }],
773
+ });
774
+ assert.strictEqual(results.length, 2);
775
+ assert.ok(results.some(s => s.traceId === 'trace2'));
776
+ assert.ok(results.some(s => s.traceId === 'trace3'));
539
777
  });
540
- it('should skip spans with invalid time calculations', async () => {
778
+ it('should filter spans by numericFilter with gte operator', async () => {
541
779
  const today = getTestDate();
542
780
  const mockSpans = [
543
781
  {
544
782
  traceId: 'trace1',
545
783
  spanId: 'span1',
546
- name: 'op1',
784
+ name: 'http-request',
547
785
  startTime: [1700000000, 0],
548
- endTime: [1700000000, 0],
786
+ attributes: { 'http.status_code': 200 },
787
+ },
788
+ {
789
+ traceId: 'trace2',
790
+ spanId: 'span2',
791
+ name: 'http-request',
792
+ startTime: [1700000000, 0],
793
+ attributes: { 'http.status_code': 300 },
549
794
  },
550
795
  ];
551
796
  writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
552
- const results = await backend.queryTraces({});
797
+ const results = await backend.queryTraces({
798
+ numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 300 }],
799
+ });
553
800
  assert.strictEqual(results.length, 1);
554
- assert.strictEqual(results[0].durationMs, 0);
801
+ assert.strictEqual(results[0].traceId, 'trace2');
802
+ });
803
+ it('should filter spans by numericFilter with lt operator', async () => {
804
+ const today = getTestDate();
805
+ const mockSpans = [
806
+ {
807
+ traceId: 'trace1',
808
+ spanId: 'span1',
809
+ name: 'http-request',
810
+ startTime: [1700000000, 0],
811
+ attributes: { 'http.status_code': 200 },
812
+ },
813
+ {
814
+ traceId: 'trace2',
815
+ spanId: 'span2',
816
+ name: 'http-request',
817
+ startTime: [1700000000, 0],
818
+ attributes: { 'http.status_code': 500 },
819
+ },
820
+ ];
821
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
822
+ const results = await backend.queryTraces({
823
+ numericFilter: [{ attribute: 'http.status_code', operator: 'lt', value: 300 }],
824
+ });
825
+ assert.strictEqual(results.length, 1);
826
+ assert.strictEqual(results[0].traceId, 'trace1');
827
+ });
828
+ it('should filter spans by numericFilter with lte operator', async () => {
829
+ const today = getTestDate();
830
+ const mockSpans = [
831
+ {
832
+ traceId: 'trace1',
833
+ spanId: 'span1',
834
+ name: 'http-request',
835
+ startTime: [1700000000, 0],
836
+ attributes: { 'http.status_code': 200 },
837
+ },
838
+ {
839
+ traceId: 'trace2',
840
+ spanId: 'span2',
841
+ name: 'http-request',
842
+ startTime: [1700000000, 0],
843
+ attributes: { 'http.status_code': 300 },
844
+ },
845
+ ];
846
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
847
+ const results = await backend.queryTraces({
848
+ numericFilter: [{ attribute: 'http.status_code', operator: 'lte', value: 200 }],
849
+ });
850
+ assert.strictEqual(results.length, 1);
851
+ assert.strictEqual(results[0].traceId, 'trace1');
852
+ });
853
+ it('should filter spans by numericFilter with eq operator', async () => {
854
+ const today = getTestDate();
855
+ const mockSpans = [
856
+ {
857
+ traceId: 'trace1',
858
+ spanId: 'span1',
859
+ name: 'http-request',
860
+ startTime: [1700000000, 0],
861
+ attributes: { 'http.status_code': 200 },
862
+ },
863
+ {
864
+ traceId: 'trace2',
865
+ spanId: 'span2',
866
+ name: 'http-request',
867
+ startTime: [1700000000, 0],
868
+ attributes: { 'http.status_code': 500 },
869
+ },
870
+ ];
871
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
872
+ const results = await backend.queryTraces({
873
+ numericFilter: [{ attribute: 'http.status_code', operator: 'eq', value: 200 }],
874
+ });
875
+ assert.strictEqual(results.length, 1);
876
+ assert.strictEqual(results[0].traceId, 'trace1');
877
+ });
878
+ it('should filter spans by multiple numericFilter conditions (AND logic)', async () => {
879
+ const today = getTestDate();
880
+ const mockSpans = [
881
+ {
882
+ traceId: 'trace1',
883
+ spanId: 'span1',
884
+ name: 'http-request',
885
+ startTime: [1700000000, 0],
886
+ attributes: { 'http.status_code': 200, 'http.response_size': 1000 },
887
+ },
888
+ {
889
+ traceId: 'trace2',
890
+ spanId: 'span2',
891
+ name: 'http-request',
892
+ startTime: [1700000000, 0],
893
+ attributes: { 'http.status_code': 200, 'http.response_size': 5000 },
894
+ },
895
+ {
896
+ traceId: 'trace3',
897
+ spanId: 'span3',
898
+ name: 'http-request',
899
+ startTime: [1700000000, 0],
900
+ attributes: { 'http.status_code': 500, 'http.response_size': 100 },
901
+ },
902
+ ];
903
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
904
+ const results = await backend.queryTraces({
905
+ numericFilter: [
906
+ { attribute: 'http.status_code', operator: 'lt', value: 300 },
907
+ { attribute: 'http.response_size', operator: 'gt', value: 2000 },
908
+ ],
909
+ });
910
+ assert.strictEqual(results.length, 1);
911
+ assert.strictEqual(results[0].traceId, 'trace2');
912
+ });
913
+ it('should skip spans when numericFilter attribute is missing', async () => {
914
+ const today = getTestDate();
915
+ const mockSpans = [
916
+ {
917
+ traceId: 'trace1',
918
+ spanId: 'span1',
919
+ name: 'http-request',
920
+ startTime: [1700000000, 0],
921
+ attributes: { 'http.status_code': 200 },
922
+ },
923
+ {
924
+ traceId: 'trace2',
925
+ spanId: 'span2',
926
+ name: 'http-request',
927
+ startTime: [1700000000, 0],
928
+ attributes: { 'other.attr': 'value' }, // missing http.status_code
929
+ },
930
+ ];
931
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
932
+ const results = await backend.queryTraces({
933
+ numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 100 }],
934
+ });
935
+ assert.strictEqual(results.length, 1);
936
+ assert.strictEqual(results[0].traceId, 'trace1');
937
+ });
938
+ it('should skip spans when numericFilter attribute is not a number', async () => {
939
+ const today = getTestDate();
940
+ const mockSpans = [
941
+ {
942
+ traceId: 'trace1',
943
+ spanId: 'span1',
944
+ name: 'http-request',
945
+ startTime: [1700000000, 0],
946
+ attributes: { 'http.status_code': 200 },
947
+ },
948
+ {
949
+ traceId: 'trace2',
950
+ spanId: 'span2',
951
+ name: 'http-request',
952
+ startTime: [1700000000, 0],
953
+ attributes: { 'http.status_code': '200' }, // string, not number
954
+ },
955
+ ];
956
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
957
+ const results = await backend.queryTraces({
958
+ numericFilter: [{ attribute: 'http.status_code', operator: 'eq', value: 200 }],
959
+ });
960
+ assert.strictEqual(results.length, 1);
961
+ assert.strictEqual(results[0].traceId, 'trace1');
962
+ });
963
+ it('should combine numericFilter with other filters', async () => {
964
+ const today = getTestDate();
965
+ const mockSpans = [
966
+ {
967
+ traceId: 'trace1',
968
+ spanId: 'span1',
969
+ name: 'http-request',
970
+ startTime: [1700000000, 0],
971
+ attributes: { 'http.status_code': 500, 'http.method': 'GET' },
972
+ },
973
+ {
974
+ traceId: 'trace2',
975
+ spanId: 'span2',
976
+ name: 'http-request',
977
+ startTime: [1700000000, 0],
978
+ attributes: { 'http.status_code': 500, 'http.method': 'POST' },
979
+ },
980
+ {
981
+ traceId: 'trace3',
982
+ spanId: 'span3',
983
+ name: 'http-request',
984
+ startTime: [1700000000, 0],
985
+ attributes: { 'http.status_code': 200, 'http.method': 'GET' },
986
+ },
987
+ ];
988
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
989
+ const results = await backend.queryTraces({
990
+ attributeFilter: { 'http.method': 'GET' },
991
+ numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 400 }],
992
+ });
993
+ assert.strictEqual(results.length, 1);
994
+ assert.strictEqual(results[0].traceId, 'trace1');
995
+ });
996
+ });
997
+ describe('queryLogs', () => {
998
+ it('should read and normalize log records from JSONL files', async () => {
999
+ const today = getTestDate();
1000
+ const mockLogs = [
1001
+ {
1002
+ timestamp: '2026-01-28T10:00:00.000Z',
1003
+ severityText: 'ERROR',
1004
+ body: 'Connection failed',
1005
+ traceId: 'trace1',
1006
+ spanId: 'span1',
1007
+ resource: { serviceName: 'api-service' },
1008
+ attributes: { 'error.type': 'timeout' },
1009
+ },
1010
+ ];
1011
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1012
+ const results = await backend.queryLogs({});
1013
+ assert.strictEqual(results.length, 1);
1014
+ assert.strictEqual(results[0].severity, 'ERROR');
1015
+ assert.strictEqual(results[0].body, 'Connection failed');
1016
+ assert.strictEqual(results[0].attributes?.['service.name'], 'api-service');
1017
+ assert.strictEqual(results[0].attributes?.['error.type'], 'timeout');
1018
+ });
1019
+ it('should handle timestamp as ISO string', async () => {
1020
+ const today = getTestDate();
1021
+ const mockLogs = [
1022
+ {
1023
+ timestamp: '2026-01-28T10:00:00.123Z',
1024
+ body: 'Test log',
1025
+ },
1026
+ ];
1027
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1028
+ const results = await backend.queryLogs({});
1029
+ assert.strictEqual(results[0].timestamp, '2026-01-28T10:00:00.123Z');
1030
+ });
1031
+ it('should convert timestamp from [seconds, nanoseconds] array', async () => {
1032
+ const today = getTestDate();
1033
+ const mockLogs = [
1034
+ {
1035
+ timestamp: [1700000000, 123456789], // seconds + nanoseconds
1036
+ body: 'Test log',
1037
+ },
1038
+ ];
1039
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1040
+ const results = await backend.queryLogs({});
1041
+ // Verify it's a valid ISO string
1042
+ assert.match(results[0].timestamp, /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
1043
+ });
1044
+ it('should filter logs by severity (case-insensitive)', async () => {
1045
+ const today = getTestDate();
1046
+ const mockLogs = [
1047
+ { timestamp: '2026-01-28T10:00:00Z', severityText: 'ERROR', body: 'Error 1' },
1048
+ { timestamp: '2026-01-28T10:01:00Z', severity: 'WARN', body: 'Warning 1' },
1049
+ { timestamp: '2026-01-28T10:02:00Z', severity: 'error', body: 'Error 2' },
1050
+ { timestamp: '2026-01-28T10:03:00Z', severity: 'INFO', body: 'Info 1' },
1051
+ ];
1052
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1053
+ const results = await backend.queryLogs({ severity: 'ERROR' });
1054
+ assert.strictEqual(results.length, 2);
1055
+ assert.ok(results.every(l => l.severity.toUpperCase() === 'ERROR'));
1056
+ });
1057
+ it('should filter logs by traceId', async () => {
1058
+ const today = getTestDate();
1059
+ const mockLogs = [
1060
+ { timestamp: '2026-01-28T10:00:00Z', body: 'Log 1', traceId: 'trace1' },
1061
+ { timestamp: '2026-01-28T10:01:00Z', body: 'Log 2', traceId: 'trace2' },
1062
+ { timestamp: '2026-01-28T10:02:00Z', body: 'Log 3', traceId: 'trace1' },
1063
+ ];
1064
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1065
+ const results = await backend.queryLogs({ traceId: 'trace1' });
1066
+ assert.strictEqual(results.length, 2);
1067
+ assert.ok(results.every(l => l.traceId === 'trace1'));
1068
+ });
1069
+ it('should filter logs by search text (case-insensitive substring)', async () => {
1070
+ const today = getTestDate();
1071
+ const mockLogs = [
1072
+ { timestamp: '2026-01-28T10:00:00Z', body: 'Connection timeout' },
1073
+ { timestamp: '2026-01-28T10:01:00Z', body: 'Database query failed' },
1074
+ { timestamp: '2026-01-28T10:02:00Z', body: 'Connection reset by peer' },
1075
+ ];
1076
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1077
+ const results = await backend.queryLogs({ search: 'CONNECTION' });
1078
+ assert.strictEqual(results.length, 2);
1079
+ assert.ok(results.every(l => l.body.toLowerCase().includes('connection')));
1080
+ });
1081
+ it('should use severityText if available, fallback to severity', async () => {
1082
+ const today = getTestDate();
1083
+ const mockLogs = [
1084
+ { timestamp: '2026-01-28T10:00:00Z', body: 'Log 1', severityText: 'CUSTOM' },
1085
+ { timestamp: '2026-01-28T10:01:00Z', body: 'Log 2', severity: 'WARN' },
1086
+ { timestamp: '2026-01-28T10:02:00Z', body: 'Log 3' }, // no severity
1087
+ ];
1088
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1089
+ const results = await backend.queryLogs({});
1090
+ assert.strictEqual(results[0].severity, 'CUSTOM');
1091
+ assert.strictEqual(results[1].severity, 'WARN');
1092
+ assert.strictEqual(results[2].severity, 'INFO'); // default
1093
+ });
1094
+ it('should extract instrumentationScope from logs', async () => {
1095
+ const today = getTestDate();
1096
+ const mockLogs = [
1097
+ {
1098
+ timestamp: '2026-01-28T10:00:00Z',
1099
+ body: 'HTTP request received',
1100
+ instrumentationScope: {
1101
+ name: '@opentelemetry/instrumentation-http',
1102
+ version: '0.48.0',
1103
+ schemaUrl: 'https://opentelemetry.io/schemas/1.21.0',
1104
+ },
1105
+ },
1106
+ {
1107
+ timestamp: '2026-01-28T10:01:00Z',
1108
+ body: 'Custom log from hooks',
1109
+ instrumentationScope: {
1110
+ name: 'custom-hooks',
1111
+ },
1112
+ },
1113
+ {
1114
+ timestamp: '2026-01-28T10:02:00Z',
1115
+ body: 'Log without scope',
1116
+ },
1117
+ ];
1118
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1119
+ const results = await backend.queryLogs({});
1120
+ assert.strictEqual(results.length, 3);
1121
+ // First log: full scope
1122
+ assert.strictEqual(results[0].instrumentationScope?.name, '@opentelemetry/instrumentation-http');
1123
+ assert.strictEqual(results[0].instrumentationScope?.version, '0.48.0');
1124
+ assert.strictEqual(results[0].instrumentationScope?.schemaUrl, 'https://opentelemetry.io/schemas/1.21.0');
1125
+ // Second log: name only
1126
+ assert.strictEqual(results[1].instrumentationScope?.name, 'custom-hooks');
1127
+ assert.strictEqual(results[1].instrumentationScope?.version, undefined);
1128
+ // Third log: no scope
1129
+ assert.strictEqual(results[2].instrumentationScope, undefined);
1130
+ });
1131
+ it('should apply limit and offset to log results', async () => {
1132
+ const today = getTestDate();
1133
+ const mockLogs = Array.from({ length: 200 }, (_, i) => ({
1134
+ timestamp: new Date(Date.now() + i * 1000).toISOString(),
1135
+ body: `Log ${i}`,
1136
+ }));
1137
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1138
+ const results = await backend.queryLogs({ limit: 50, offset: 75 });
1139
+ assert.strictEqual(results.length, 50);
1140
+ assert.strictEqual(results[0].body, 'Log 75');
1141
+ });
1142
+ it('should handle empty body field', async () => {
1143
+ const today = getTestDate();
1144
+ const mockLogs = [
1145
+ { timestamp: '2026-01-28T10:00:00Z', body: 'Normal log' },
1146
+ { timestamp: '2026-01-28T10:01:00Z' }, // missing body
1147
+ ];
1148
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1149
+ const results = await backend.queryLogs({});
1150
+ assert.strictEqual(results.length, 2);
1151
+ assert.strictEqual(results[1].body, '');
1152
+ });
1153
+ it('should set severityNumber based on severity text', async () => {
1154
+ const today = getTestDate();
1155
+ const mockLogs = [
1156
+ { timestamp: '2026-01-28T10:00:00Z', severityText: 'TRACE', body: 'Trace log' },
1157
+ { timestamp: '2026-01-28T10:01:00Z', severityText: 'DEBUG', body: 'Debug log' },
1158
+ { timestamp: '2026-01-28T10:02:00Z', severityText: 'INFO', body: 'Info log' },
1159
+ { timestamp: '2026-01-28T10:03:00Z', severityText: 'WARN', body: 'Warn log' },
1160
+ { timestamp: '2026-01-28T10:04:00Z', severityText: 'ERROR', body: 'Error log' },
1161
+ { timestamp: '2026-01-28T10:05:00Z', severityText: 'FATAL', body: 'Fatal log' },
1162
+ ];
1163
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1164
+ const results = await backend.queryLogs({});
1165
+ assert.strictEqual(results.length, 6);
1166
+ assert.strictEqual(results[0].severityNumber, 1); // TRACE
1167
+ assert.strictEqual(results[1].severityNumber, 5); // DEBUG
1168
+ assert.strictEqual(results[2].severityNumber, 9); // INFO
1169
+ assert.strictEqual(results[3].severityNumber, 13); // WARN
1170
+ assert.strictEqual(results[4].severityNumber, 17); // ERROR
1171
+ assert.strictEqual(results[5].severityNumber, 21); // FATAL
1172
+ });
1173
+ it('should handle lowercase severity when setting severityNumber', async () => {
1174
+ const today = getTestDate();
1175
+ const mockLogs = [
1176
+ { timestamp: '2026-01-28T10:00:00Z', severity: 'error', body: 'Lowercase error' },
1177
+ { timestamp: '2026-01-28T10:01:00Z', severity: 'warn', body: 'Lowercase warn' },
1178
+ { timestamp: '2026-01-28T10:02:00Z', severity: 'info', body: 'Lowercase info' },
1179
+ ];
1180
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1181
+ const results = await backend.queryLogs({});
1182
+ assert.strictEqual(results.length, 3);
1183
+ assert.strictEqual(results[0].severityNumber, 17); // error -> ERROR -> 17
1184
+ assert.strictEqual(results[1].severityNumber, 13); // warn -> WARN -> 13
1185
+ assert.strictEqual(results[2].severityNumber, 9); // info -> INFO -> 9
1186
+ });
1187
+ it('should set severityNumber to undefined for unknown severity levels', async () => {
1188
+ const today = getTestDate();
1189
+ const mockLogs = [
1190
+ { timestamp: '2026-01-28T10:00:00Z', severityText: 'CUSTOM', body: 'Custom severity' },
1191
+ { timestamp: '2026-01-28T10:01:00Z', severityText: 'VERBOSE', body: 'Verbose severity' },
1192
+ ];
1193
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1194
+ const results = await backend.queryLogs({});
1195
+ assert.strictEqual(results.length, 2);
1196
+ assert.strictEqual(results[0].severityNumber, undefined);
1197
+ assert.strictEqual(results[1].severityNumber, undefined);
1198
+ });
1199
+ it('should set severityNumber to 9 (INFO) when severity defaults', async () => {
1200
+ const today = getTestDate();
1201
+ const mockLogs = [
1202
+ { timestamp: '2026-01-28T10:00:00Z', body: 'No severity specified' },
1203
+ ];
1204
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1205
+ const results = await backend.queryLogs({});
1206
+ assert.strictEqual(results.length, 1);
1207
+ assert.strictEqual(results[0].severity, 'INFO');
1208
+ assert.strictEqual(results[0].severityNumber, 9);
1209
+ });
1210
+ it('should exclude logs matching excludeSearch', async () => {
1211
+ const today = getTestDate();
1212
+ const mockLogs = [
1213
+ { timestamp: '2026-01-28T10:00:00Z', body: 'Connection failed' },
1214
+ { timestamp: '2026-01-28T10:01:00Z', body: 'Request successful' },
1215
+ { timestamp: '2026-01-28T10:02:00Z', body: 'Connection timeout' },
1216
+ ];
1217
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1218
+ const results = await backend.queryLogs({ excludeSearch: 'connection' });
1219
+ assert.strictEqual(results.length, 1);
1220
+ assert.strictEqual(results[0].body, 'Request successful');
1221
+ });
1222
+ it('should combine search with excludeSearch', async () => {
1223
+ const today = getTestDate();
1224
+ const mockLogs = [
1225
+ { timestamp: '2026-01-28T10:00:00Z', body: 'User login successful' },
1226
+ { timestamp: '2026-01-28T10:01:00Z', body: 'User login failed' },
1227
+ { timestamp: '2026-01-28T10:02:00Z', body: 'System startup' },
1228
+ { timestamp: '2026-01-28T10:03:00Z', body: 'User logout' },
1229
+ ];
1230
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1231
+ const results = await backend.queryLogs({
1232
+ search: 'user',
1233
+ excludeSearch: 'failed',
1234
+ });
1235
+ assert.strictEqual(results.length, 2);
1236
+ assert.ok(results.some(l => l.body === 'User login successful'));
1237
+ assert.ok(results.some(l => l.body === 'User logout'));
1238
+ });
1239
+ it('should filter logs by attributeExists - all must exist', async () => {
1240
+ const today = getTestDate();
1241
+ const mockLogs = [
1242
+ {
1243
+ timestamp: '2026-01-28T10:00:00Z',
1244
+ body: 'Log 1',
1245
+ attributes: { 'request.id': 'abc', 'user.id': '123' },
1246
+ },
1247
+ {
1248
+ timestamp: '2026-01-28T10:01:00Z',
1249
+ body: 'Log 2',
1250
+ attributes: { 'request.id': 'def' }, // missing user.id
1251
+ },
1252
+ {
1253
+ timestamp: '2026-01-28T10:02:00Z',
1254
+ body: 'Log 3',
1255
+ attributes: { 'other.attr': 'value' }, // missing both
1256
+ },
1257
+ ];
1258
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1259
+ const results = await backend.queryLogs({
1260
+ attributeExists: ['request.id', 'user.id'],
1261
+ });
1262
+ assert.strictEqual(results.length, 1);
1263
+ assert.strictEqual(results[0].body, 'Log 1');
1264
+ });
1265
+ it('should filter logs by attributeNotExists - exclude if any exist', async () => {
1266
+ const today = getTestDate();
1267
+ const mockLogs = [
1268
+ {
1269
+ timestamp: '2026-01-28T10:00:00Z',
1270
+ body: 'Log with error',
1271
+ attributes: { 'request.id': 'abc', 'error.message': 'timeout' },
1272
+ },
1273
+ {
1274
+ timestamp: '2026-01-28T10:01:00Z',
1275
+ body: 'Clean log',
1276
+ attributes: { 'request.id': 'def' },
1277
+ },
1278
+ {
1279
+ timestamp: '2026-01-28T10:02:00Z',
1280
+ body: 'Log with exception',
1281
+ attributes: { 'request.id': 'ghi', 'exception.type': 'NullPointer' },
1282
+ },
1283
+ ];
1284
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1285
+ const results = await backend.queryLogs({
1286
+ attributeNotExists: ['error.message', 'exception.type'],
1287
+ });
1288
+ assert.strictEqual(results.length, 1);
1289
+ assert.strictEqual(results[0].body, 'Clean log');
1290
+ });
1291
+ it('should combine search with attribute filters', async () => {
1292
+ const today = getTestDate();
1293
+ const mockLogs = [
1294
+ {
1295
+ timestamp: '2026-01-28T10:00:00Z',
1296
+ body: 'API request completed',
1297
+ attributes: { 'request.id': 'abc', 'http.status_code': 200 },
1298
+ },
1299
+ {
1300
+ timestamp: '2026-01-28T10:01:00Z',
1301
+ body: 'API request failed',
1302
+ attributes: { 'request.id': 'def' }, // missing status_code
1303
+ },
1304
+ {
1305
+ timestamp: '2026-01-28T10:02:00Z',
1306
+ body: 'Database query completed',
1307
+ attributes: { 'request.id': 'ghi', 'http.status_code': 200 },
1308
+ },
1309
+ ];
1310
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1311
+ const results = await backend.queryLogs({
1312
+ search: 'API',
1313
+ attributeExists: ['http.status_code'],
1314
+ });
1315
+ assert.strictEqual(results.length, 1);
1316
+ assert.strictEqual(results[0].body, 'API request completed');
1317
+ });
1318
+ it('should filter logs by numericFilter with gt operator', async () => {
1319
+ const today = getTestDate();
1320
+ const mockLogs = [
1321
+ {
1322
+ timestamp: '2026-01-28T10:00:00Z',
1323
+ body: 'Request completed',
1324
+ attributes: { 'http.status_code': 200 },
1325
+ },
1326
+ {
1327
+ timestamp: '2026-01-28T10:01:00Z',
1328
+ body: 'Request failed',
1329
+ attributes: { 'http.status_code': 500 },
1330
+ },
1331
+ {
1332
+ timestamp: '2026-01-28T10:02:00Z',
1333
+ body: 'Request redirected',
1334
+ attributes: { 'http.status_code': 302 },
1335
+ },
1336
+ ];
1337
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1338
+ const results = await backend.queryLogs({
1339
+ numericFilter: [{ attribute: 'http.status_code', operator: 'gt', value: 299 }],
1340
+ });
1341
+ assert.strictEqual(results.length, 2);
1342
+ assert.ok(results.some(l => l.body === 'Request failed'));
1343
+ assert.ok(results.some(l => l.body === 'Request redirected'));
1344
+ });
1345
+ it('should filter logs by numericFilter with lt operator', async () => {
1346
+ const today = getTestDate();
1347
+ const mockLogs = [
1348
+ {
1349
+ timestamp: '2026-01-28T10:00:00Z',
1350
+ body: 'Small response',
1351
+ attributes: { 'response.size': 100 },
1352
+ },
1353
+ {
1354
+ timestamp: '2026-01-28T10:01:00Z',
1355
+ body: 'Large response',
1356
+ attributes: { 'response.size': 5000 },
1357
+ },
1358
+ ];
1359
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1360
+ const results = await backend.queryLogs({
1361
+ numericFilter: [{ attribute: 'response.size', operator: 'lt', value: 1000 }],
1362
+ });
1363
+ assert.strictEqual(results.length, 1);
1364
+ assert.strictEqual(results[0].body, 'Small response');
1365
+ });
1366
+ it('should filter logs by multiple numericFilter conditions', async () => {
1367
+ const today = getTestDate();
1368
+ const mockLogs = [
1369
+ {
1370
+ timestamp: '2026-01-28T10:00:00Z',
1371
+ body: 'Fast small response',
1372
+ attributes: { 'response.size': 100, 'response.time_ms': 50 },
1373
+ },
1374
+ {
1375
+ timestamp: '2026-01-28T10:01:00Z',
1376
+ body: 'Slow large response',
1377
+ attributes: { 'response.size': 5000, 'response.time_ms': 2000 },
1378
+ },
1379
+ {
1380
+ timestamp: '2026-01-28T10:02:00Z',
1381
+ body: 'Fast large response',
1382
+ attributes: { 'response.size': 5000, 'response.time_ms': 100 },
1383
+ },
1384
+ ];
1385
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1386
+ const results = await backend.queryLogs({
1387
+ numericFilter: [
1388
+ { attribute: 'response.size', operator: 'gte', value: 1000 },
1389
+ { attribute: 'response.time_ms', operator: 'lte', value: 500 },
1390
+ ],
1391
+ });
1392
+ assert.strictEqual(results.length, 1);
1393
+ assert.strictEqual(results[0].body, 'Fast large response');
1394
+ });
1395
+ it('should skip logs when numericFilter attribute is missing or not a number', async () => {
1396
+ const today = getTestDate();
1397
+ const mockLogs = [
1398
+ {
1399
+ timestamp: '2026-01-28T10:00:00Z',
1400
+ body: 'Log with numeric',
1401
+ attributes: { 'count': 100 },
1402
+ },
1403
+ {
1404
+ timestamp: '2026-01-28T10:01:00Z',
1405
+ body: 'Log with string',
1406
+ attributes: { 'count': 'one hundred' },
1407
+ },
1408
+ {
1409
+ timestamp: '2026-01-28T10:02:00Z',
1410
+ body: 'Log without count',
1411
+ attributes: { 'other': 'value' },
1412
+ },
1413
+ ];
1414
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1415
+ const results = await backend.queryLogs({
1416
+ numericFilter: [{ attribute: 'count', operator: 'gte', value: 50 }],
1417
+ });
1418
+ assert.strictEqual(results.length, 1);
1419
+ assert.strictEqual(results[0].body, 'Log with numeric');
1420
+ });
1421
+ it('should combine numericFilter with search and severity', async () => {
1422
+ const today = getTestDate();
1423
+ const mockLogs = [
1424
+ {
1425
+ timestamp: '2026-01-28T10:00:00Z',
1426
+ body: 'API Error: rate limit',
1427
+ severity: 'ERROR',
1428
+ attributes: { 'http.status_code': 429, 'retry_count': 3 },
1429
+ },
1430
+ {
1431
+ timestamp: '2026-01-28T10:01:00Z',
1432
+ body: 'API Error: server error',
1433
+ severity: 'ERROR',
1434
+ attributes: { 'http.status_code': 500, 'retry_count': 1 },
1435
+ },
1436
+ {
1437
+ timestamp: '2026-01-28T10:02:00Z',
1438
+ body: 'API Error: timeout',
1439
+ severity: 'WARN',
1440
+ attributes: { 'http.status_code': 408, 'retry_count': 5 },
1441
+ },
1442
+ ];
1443
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1444
+ const results = await backend.queryLogs({
1445
+ search: 'API Error',
1446
+ severity: 'ERROR',
1447
+ numericFilter: [{ attribute: 'retry_count', operator: 'gt', value: 2 }],
1448
+ });
1449
+ assert.strictEqual(results.length, 1);
1450
+ assert.strictEqual(results[0].body, 'API Error: rate limit');
1451
+ });
1452
+ it('should extract fields from JSON log body using dot notation', async () => {
1453
+ const today = getTestDate();
1454
+ const mockLogs = [
1455
+ {
1456
+ timestamp: '2026-01-28T10:00:00Z',
1457
+ body: '{"user":{"id":"user123","name":"Alice"},"request":{"method":"POST","path":"/api/v1/users"}}',
1458
+ },
1459
+ {
1460
+ timestamp: '2026-01-28T10:01:00Z',
1461
+ body: '{"user":{"id":"user456"},"request":{"method":"GET"}}',
1462
+ },
1463
+ ];
1464
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1465
+ const results = await backend.queryLogs({
1466
+ extractFields: ['user.id', 'request.method'],
1467
+ });
1468
+ assert.strictEqual(results.length, 2);
1469
+ assert.deepStrictEqual(results[0].extractedFields, {
1470
+ 'user.id': 'user123',
1471
+ 'request.method': 'POST',
1472
+ });
1473
+ assert.deepStrictEqual(results[1].extractedFields, {
1474
+ 'user.id': 'user456',
1475
+ 'request.method': 'GET',
1476
+ });
1477
+ });
1478
+ it('should handle missing fields gracefully during extraction', async () => {
1479
+ const today = getTestDate();
1480
+ const mockLogs = [
1481
+ {
1482
+ timestamp: '2026-01-28T10:00:00Z',
1483
+ body: '{"user":{"id":"user123"},"status":"ok"}',
1484
+ },
1485
+ ];
1486
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1487
+ const results = await backend.queryLogs({
1488
+ extractFields: ['user.id', 'user.name', 'nonexistent.path'],
1489
+ });
1490
+ assert.strictEqual(results.length, 1);
1491
+ // Only user.id exists, user.name and nonexistent.path are undefined
1492
+ assert.deepStrictEqual(results[0].extractedFields, {
1493
+ 'user.id': 'user123',
1494
+ });
1495
+ });
1496
+ it('should not extract fields from non-JSON log bodies', async () => {
1497
+ const today = getTestDate();
1498
+ const mockLogs = [
1499
+ {
1500
+ timestamp: '2026-01-28T10:00:00Z',
1501
+ body: 'Plain text log message',
1502
+ },
1503
+ {
1504
+ timestamp: '2026-01-28T10:01:00Z',
1505
+ body: '[2026-01-28] INFO: Starting service',
1506
+ },
1507
+ ];
1508
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1509
+ const results = await backend.queryLogs({
1510
+ extractFields: ['user.id', 'message'],
1511
+ });
1512
+ assert.strictEqual(results.length, 2);
1513
+ assert.strictEqual(results[0].extractedFields, undefined);
1514
+ assert.strictEqual(results[1].extractedFields, undefined);
1515
+ });
1516
+ it('should not extract fields when extractFields is empty', async () => {
1517
+ const today = getTestDate();
1518
+ const mockLogs = [
1519
+ {
1520
+ timestamp: '2026-01-28T10:00:00Z',
1521
+ body: '{"user":{"id":"user123"}}',
1522
+ },
1523
+ ];
1524
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1525
+ const results = await backend.queryLogs({
1526
+ extractFields: [],
1527
+ });
1528
+ assert.strictEqual(results.length, 1);
1529
+ assert.strictEqual(results[0].extractedFields, undefined);
1530
+ });
1531
+ it('should extract deeply nested fields', async () => {
1532
+ const today = getTestDate();
1533
+ const mockLogs = [
1534
+ {
1535
+ timestamp: '2026-01-28T10:00:00Z',
1536
+ body: '{"response":{"data":{"items":[1,2,3],"meta":{"count":3}}}}',
1537
+ },
1538
+ ];
1539
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1540
+ const results = await backend.queryLogs({
1541
+ extractFields: ['response.data.meta.count', 'response.data.items'],
1542
+ });
1543
+ assert.strictEqual(results.length, 1);
1544
+ assert.deepStrictEqual(results[0].extractedFields, {
1545
+ 'response.data.meta.count': 3,
1546
+ 'response.data.items': [1, 2, 3],
1547
+ });
1548
+ });
1549
+ it('should handle invalid JSON gracefully during extraction', async () => {
1550
+ const today = getTestDate();
1551
+ const mockLogs = [
1552
+ {
1553
+ timestamp: '2026-01-28T10:00:00Z',
1554
+ body: '{invalid json',
1555
+ },
1556
+ ];
1557
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1558
+ const results = await backend.queryLogs({
1559
+ extractFields: ['user.id'],
1560
+ });
1561
+ assert.strictEqual(results.length, 1);
1562
+ assert.strictEqual(results[0].extractedFields, undefined);
1563
+ });
1564
+ it('should return undefined extractedFields when no fields match', async () => {
1565
+ const today = getTestDate();
1566
+ const mockLogs = [
1567
+ {
1568
+ timestamp: '2026-01-28T10:00:00Z',
1569
+ body: '{"foo":"bar"}',
1570
+ },
1571
+ ];
1572
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
1573
+ const results = await backend.queryLogs({
1574
+ extractFields: ['nonexistent.field', 'another.missing'],
1575
+ });
1576
+ assert.strictEqual(results.length, 1);
1577
+ assert.strictEqual(results[0].extractedFields, undefined);
1578
+ });
1579
+ });
1580
+ describe('queryMetrics', () => {
1581
+ it('should read and normalize metric data points from JSONL files', async () => {
1582
+ const today = getTestDate();
1583
+ const mockMetrics = [
1584
+ {
1585
+ timestamp: '2026-01-28T10:00:00Z',
1586
+ name: 'http.requests.total',
1587
+ value: 100,
1588
+ type: 'counter',
1589
+ unit: 'requests',
1590
+ resource: { serviceName: 'api-gateway' },
1591
+ attributes: { 'http.method': 'GET', 'http.status_code': 200 },
1592
+ },
1593
+ ];
1594
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1595
+ const results = await backend.queryMetrics({});
1596
+ assert.strictEqual(results.length, 1);
1597
+ assert.strictEqual(results[0].name, 'http.requests.total');
1598
+ assert.strictEqual(results[0].value, 100);
1599
+ assert.strictEqual(results[0].unit, 'requests');
1600
+ assert.strictEqual(results[0].attributes?.['service.name'], 'api-gateway');
1601
+ assert.strictEqual(results[0].attributes?.['http.method'], 'GET');
1602
+ });
1603
+ it('should filter metrics by name substring', async () => {
1604
+ const today = getTestDate();
1605
+ const mockMetrics = [
1606
+ { timestamp: '2026-01-28T10:00:00Z', name: 'http.requests.total', value: 100, type: 'counter' },
1607
+ { timestamp: '2026-01-28T10:01:00Z', name: 'http.request.duration', value: 150, type: 'histogram' },
1608
+ { timestamp: '2026-01-28T10:02:00Z', name: 'memory.usage', value: 512, type: 'gauge' },
1609
+ ];
1610
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1611
+ const results = await backend.queryMetrics({ metricName: 'http' });
1612
+ assert.strictEqual(results.length, 2);
1613
+ assert.ok(results.every(m => m.name.includes('http')));
1614
+ });
1615
+ it('should apply limit and offset to metric results', async () => {
1616
+ const today = getTestDate();
1617
+ const mockMetrics = Array.from({ length: 150 }, (_, i) => ({
1618
+ timestamp: new Date(Date.now() + i * 1000).toISOString(),
1619
+ name: `metric.${i}`,
1620
+ value: i * 10,
1621
+ type: 'gauge',
1622
+ }));
1623
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1624
+ const results = await backend.queryMetrics({ limit: 50, offset: 30 });
1625
+ assert.strictEqual(results.length, 50);
1626
+ assert.strictEqual(results[0].name, 'metric.30');
1627
+ });
1628
+ it('should aggregate metrics with sum function', async () => {
1629
+ const today = getTestDate();
1630
+ const mockMetrics = [
1631
+ { timestamp: '2026-01-28T10:00:00Z', name: 'requests', value: 100, type: 'counter' },
1632
+ { timestamp: '2026-01-28T10:01:00Z', name: 'requests', value: 150, type: 'counter' },
1633
+ { timestamp: '2026-01-28T10:02:00Z', name: 'requests', value: 200, type: 'counter' },
1634
+ ];
1635
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1636
+ const results = await backend.queryMetrics({ aggregation: 'sum' });
1637
+ assert.strictEqual(results.length, 1);
1638
+ assert.strictEqual(results[0].value, 450);
1639
+ });
1640
+ it('should aggregate metrics with avg function', async () => {
1641
+ const today = getTestDate();
1642
+ const mockMetrics = [
1643
+ { timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 100, type: 'histogram' },
1644
+ { timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 200, type: 'histogram' },
1645
+ { timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 300, type: 'histogram' },
1646
+ ];
1647
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1648
+ const results = await backend.queryMetrics({ aggregation: 'avg' });
1649
+ assert.strictEqual(results.length, 1);
1650
+ assert.strictEqual(results[0].value, 200);
1651
+ });
1652
+ it('should aggregate metrics with min function', async () => {
1653
+ const today = getTestDate();
1654
+ const mockMetrics = [
1655
+ { timestamp: '2026-01-28T10:00:00Z', name: 'response_time', value: 150, type: 'gauge' },
1656
+ { timestamp: '2026-01-28T10:01:00Z', name: 'response_time', value: 50, type: 'gauge' },
1657
+ { timestamp: '2026-01-28T10:02:00Z', name: 'response_time', value: 200, type: 'gauge' },
1658
+ ];
1659
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1660
+ const results = await backend.queryMetrics({ aggregation: 'min' });
1661
+ assert.strictEqual(results.length, 1);
1662
+ assert.strictEqual(results[0].value, 50);
1663
+ });
1664
+ it('should aggregate metrics with max function', async () => {
1665
+ const today = getTestDate();
1666
+ const mockMetrics = [
1667
+ { timestamp: '2026-01-28T10:00:00Z', name: 'memory', value: 512, type: 'gauge' },
1668
+ { timestamp: '2026-01-28T10:01:00Z', name: 'memory', value: 256, type: 'gauge' },
1669
+ { timestamp: '2026-01-28T10:02:00Z', name: 'memory', value: 1024, type: 'gauge' },
1670
+ ];
1671
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1672
+ const results = await backend.queryMetrics({ aggregation: 'max' });
1673
+ assert.strictEqual(results.length, 1);
1674
+ assert.strictEqual(results[0].value, 1024);
1675
+ });
1676
+ it('should aggregate metrics with count function', async () => {
1677
+ const today = getTestDate();
1678
+ const mockMetrics = [
1679
+ { timestamp: '2026-01-28T10:00:00Z', name: 'events', value: 10, type: 'counter' },
1680
+ { timestamp: '2026-01-28T10:01:00Z', name: 'events', value: 20, type: 'counter' },
1681
+ { timestamp: '2026-01-28T10:02:00Z', name: 'events', value: 30, type: 'counter' },
1682
+ ];
1683
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1684
+ const results = await backend.queryMetrics({ aggregation: 'count' });
1685
+ assert.strictEqual(results.length, 1);
1686
+ assert.strictEqual(results[0].value, 3);
1687
+ });
1688
+ it('should aggregate metrics with p50 (median) function', async () => {
1689
+ const today = getTestDate();
1690
+ const mockMetrics = [
1691
+ { timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 10, type: 'histogram' },
1692
+ { timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 20, type: 'histogram' },
1693
+ { timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 30, type: 'histogram' },
1694
+ { timestamp: '2026-01-28T10:03:00Z', name: 'latency', value: 40, type: 'histogram' },
1695
+ { timestamp: '2026-01-28T10:04:00Z', name: 'latency', value: 50, type: 'histogram' },
1696
+ ];
1697
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1698
+ const results = await backend.queryMetrics({ aggregation: 'p50' });
1699
+ assert.strictEqual(results.length, 1);
1700
+ assert.strictEqual(results[0].value, 30); // median of [10, 20, 30, 40, 50]
1701
+ });
1702
+ it('should aggregate metrics with p95 function', async () => {
1703
+ const today = getTestDate();
1704
+ // Create 100 data points for a more realistic p95 calculation
1705
+ const mockMetrics = Array.from({ length: 100 }, (_, i) => ({
1706
+ timestamp: `2026-01-28T10:${String(Math.floor(i / 60)).padStart(2, '0')}:${String(i % 60).padStart(2, '0')}Z`,
1707
+ name: 'response_time',
1708
+ value: i + 1, // values 1-100
1709
+ type: 'histogram',
1710
+ }));
1711
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1712
+ const results = await backend.queryMetrics({ aggregation: 'p95' });
1713
+ assert.strictEqual(results.length, 1);
1714
+ assert.strictEqual(results[0].value, 95); // 95th percentile of 1-100
1715
+ });
1716
+ it('should aggregate metrics with p99 function', async () => {
1717
+ const today = getTestDate();
1718
+ // Create 100 data points for a more realistic p99 calculation
1719
+ const mockMetrics = Array.from({ length: 100 }, (_, i) => ({
1720
+ timestamp: `2026-01-28T10:${String(Math.floor(i / 60)).padStart(2, '0')}:${String(i % 60).padStart(2, '0')}Z`,
1721
+ name: 'response_time',
1722
+ value: i + 1, // values 1-100
1723
+ type: 'histogram',
1724
+ }));
1725
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1726
+ const results = await backend.queryMetrics({ aggregation: 'p99' });
1727
+ assert.strictEqual(results.length, 1);
1728
+ assert.strictEqual(results[0].value, 99); // 99th percentile of 1-100
1729
+ });
1730
+ it('should handle p50 with even number of values', async () => {
1731
+ const today = getTestDate();
1732
+ const mockMetrics = [
1733
+ { timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 10, type: 'histogram' },
1734
+ { timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 20, type: 'histogram' },
1735
+ { timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 30, type: 'histogram' },
1736
+ { timestamp: '2026-01-28T10:03:00Z', name: 'latency', value: 40, type: 'histogram' },
1737
+ ];
1738
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1739
+ const results = await backend.queryMetrics({ aggregation: 'p50' });
1740
+ assert.strictEqual(results.length, 1);
1741
+ assert.strictEqual(results[0].value, 20); // ceil(0.5 * 4) - 1 = 1, sorted[1] = 20
1742
+ });
1743
+ it('should handle percentile with single value', async () => {
1744
+ const today = getTestDate();
1745
+ const mockMetrics = [
1746
+ { timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 42, type: 'histogram' },
1747
+ ];
1748
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1749
+ const results = await backend.queryMetrics({ aggregation: 'p95' });
1750
+ assert.strictEqual(results.length, 1);
1751
+ assert.strictEqual(results[0].value, 42); // single value is the only percentile
1752
+ });
1753
+ it('should calculate percentiles with groupBy', async () => {
1754
+ const today = getTestDate();
1755
+ const mockMetrics = [
1756
+ { timestamp: '2026-01-28T10:00:00Z', name: 'latency', value: 10, type: 'histogram', attributes: { endpoint: '/api/users' } },
1757
+ { timestamp: '2026-01-28T10:01:00Z', name: 'latency', value: 20, type: 'histogram', attributes: { endpoint: '/api/users' } },
1758
+ { timestamp: '2026-01-28T10:02:00Z', name: 'latency', value: 30, type: 'histogram', attributes: { endpoint: '/api/users' } },
1759
+ { timestamp: '2026-01-28T10:03:00Z', name: 'latency', value: 100, type: 'histogram', attributes: { endpoint: '/api/orders' } },
1760
+ { timestamp: '2026-01-28T10:04:00Z', name: 'latency', value: 200, type: 'histogram', attributes: { endpoint: '/api/orders' } },
1761
+ { timestamp: '2026-01-28T10:05:00Z', name: 'latency', value: 300, type: 'histogram', attributes: { endpoint: '/api/orders' } },
1762
+ ];
1763
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1764
+ const results = await backend.queryMetrics({ aggregation: 'p50', groupBy: ['endpoint'] });
1765
+ assert.strictEqual(results.length, 2);
1766
+ const usersMetric = results.find(m => m.attributes?.endpoint === '/api/users');
1767
+ const ordersMetric = results.find(m => m.attributes?.endpoint === '/api/orders');
1768
+ assert.strictEqual(usersMetric?.value, 20); // median of [10, 20, 30]
1769
+ assert.strictEqual(ordersMetric?.value, 200); // median of [100, 200, 300]
1770
+ });
1771
+ it('should aggregate metrics grouped by attributes', async () => {
1772
+ const today = getTestDate();
1773
+ const mockMetrics = [
1774
+ {
1775
+ timestamp: '2026-01-28T10:00:00Z',
1776
+ name: 'http.requests',
1777
+ value: 100,
1778
+ type: 'counter',
1779
+ attributes: { method: 'GET' },
1780
+ },
1781
+ {
1782
+ timestamp: '2026-01-28T10:01:00Z',
1783
+ name: 'http.requests',
1784
+ value: 50,
1785
+ type: 'counter',
1786
+ attributes: { method: 'POST' },
1787
+ },
1788
+ {
1789
+ timestamp: '2026-01-28T10:02:00Z',
1790
+ name: 'http.requests',
1791
+ value: 200,
1792
+ type: 'counter',
1793
+ attributes: { method: 'GET' },
1794
+ },
1795
+ ];
1796
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1797
+ const results = await backend.queryMetrics({ aggregation: 'sum', groupBy: ['method'] });
1798
+ assert.strictEqual(results.length, 2);
1799
+ const getMetric = results.find(m => m.attributes?.method === 'GET');
1800
+ const postMetric = results.find(m => m.attributes?.method === 'POST');
1801
+ assert.strictEqual(getMetric?.value, 300);
1802
+ assert.strictEqual(postMetric?.value, 50);
1803
+ });
1804
+ it('should return empty array when no metrics found', async () => {
1805
+ // No files created
1806
+ const results = await backend.queryMetrics({});
1807
+ assert.strictEqual(results.length, 0);
1808
+ });
1809
+ it('should aggregate metrics by time bucket with 1m buckets', async () => {
1810
+ const today = getTestDate();
1811
+ const mockMetrics = [
1812
+ // First minute bucket: 10:00:00 - 10:00:59
1813
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
1814
+ { timestamp: `${today}T10:00:30Z`, name: 'requests', value: 20, type: 'counter' },
1815
+ { timestamp: `${today}T10:00:45Z`, name: 'requests', value: 30, type: 'counter' },
1816
+ // Second minute bucket: 10:01:00 - 10:01:59
1817
+ { timestamp: `${today}T10:01:00Z`, name: 'requests', value: 100, type: 'counter' },
1818
+ { timestamp: `${today}T10:01:30Z`, name: 'requests', value: 200, type: 'counter' },
1819
+ // Third minute bucket: 10:02:00 - 10:02:59
1820
+ { timestamp: `${today}T10:02:15Z`, name: 'requests', value: 50, type: 'counter' },
1821
+ ];
1822
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1823
+ const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: '1m' });
1824
+ assert.strictEqual(results.length, 3);
1825
+ // Results should be sorted by timestamp
1826
+ assert.strictEqual(results[0].value, 60); // 10 + 20 + 30
1827
+ assert.strictEqual(results[1].value, 300); // 100 + 200
1828
+ assert.strictEqual(results[2].value, 50); // 50
1829
+ // Timestamps should be floored to bucket boundaries
1830
+ assert.strictEqual(results[0].timestamp, `${today}T10:00:00.000Z`);
1831
+ assert.strictEqual(results[1].timestamp, `${today}T10:01:00.000Z`);
1832
+ assert.strictEqual(results[2].timestamp, `${today}T10:02:00.000Z`);
1833
+ });
1834
+ it('should aggregate metrics by time bucket with 5m buckets', async () => {
1835
+ const today = getTestDate();
1836
+ const mockMetrics = [
1837
+ // First 5-minute bucket: 10:00:00 - 10:04:59
1838
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
1839
+ { timestamp: `${today}T10:02:00Z`, name: 'requests', value: 20, type: 'counter' },
1840
+ { timestamp: `${today}T10:04:00Z`, name: 'requests', value: 30, type: 'counter' },
1841
+ // Second 5-minute bucket: 10:05:00 - 10:09:59
1842
+ { timestamp: `${today}T10:05:00Z`, name: 'requests', value: 100, type: 'counter' },
1843
+ { timestamp: `${today}T10:08:00Z`, name: 'requests', value: 200, type: 'counter' },
1844
+ ];
1845
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1846
+ const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: '5m' });
1847
+ assert.strictEqual(results.length, 2);
1848
+ assert.strictEqual(results[0].value, 60); // 10 + 20 + 30
1849
+ assert.strictEqual(results[1].value, 300); // 100 + 200
1850
+ });
1851
+ it('should aggregate metrics by time bucket with 1h buckets', async () => {
1852
+ const today = getTestDate();
1853
+ const mockMetrics = [
1854
+ // First hour bucket: 10:00:00 - 10:59:59
1855
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
1856
+ { timestamp: `${today}T10:30:00Z`, name: 'requests', value: 20, type: 'counter' },
1857
+ // Second hour bucket: 11:00:00 - 11:59:59
1858
+ { timestamp: `${today}T11:00:00Z`, name: 'requests', value: 100, type: 'counter' },
1859
+ { timestamp: `${today}T11:45:00Z`, name: 'requests', value: 200, type: 'counter' },
1860
+ ];
1861
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1862
+ const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: '1h' });
1863
+ assert.strictEqual(results.length, 2);
1864
+ assert.strictEqual(results[0].value, 30); // 10 + 20
1865
+ assert.strictEqual(results[1].value, 300); // 100 + 200
1866
+ });
1867
+ it('should aggregate metrics by time bucket with 1d buckets', async () => {
1868
+ // Create metrics files for two days
1869
+ writeJsonlFile(path.join(tempDir, 'metrics-2026-01-28.jsonl'), [
1870
+ { timestamp: '2026-01-28T10:00:00Z', name: 'requests', value: 100, type: 'counter' },
1871
+ { timestamp: '2026-01-28T20:00:00Z', name: 'requests', value: 200, type: 'counter' },
1872
+ ]);
1873
+ writeJsonlFile(path.join(tempDir, 'metrics-2026-01-29.jsonl'), [
1874
+ { timestamp: '2026-01-29T08:00:00Z', name: 'requests', value: 300, type: 'counter' },
1875
+ { timestamp: '2026-01-29T16:00:00Z', name: 'requests', value: 400, type: 'counter' },
1876
+ ]);
1877
+ const results = await backend.queryMetrics({
1878
+ aggregation: 'sum',
1879
+ timeBucket: '1d',
1880
+ startDate: '2026-01-28',
1881
+ endDate: '2026-01-29',
1882
+ });
1883
+ assert.strictEqual(results.length, 2);
1884
+ assert.strictEqual(results[0].value, 300); // 100 + 200
1885
+ assert.strictEqual(results[1].value, 700); // 300 + 400
1886
+ });
1887
+ it('should combine time bucket with groupBy', async () => {
1888
+ const today = getTestDate();
1889
+ const mockMetrics = [
1890
+ // First minute, method=GET
1891
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter', attributes: { method: 'GET' } },
1892
+ { timestamp: `${today}T10:00:30Z`, name: 'requests', value: 20, type: 'counter', attributes: { method: 'GET' } },
1893
+ // First minute, method=POST
1894
+ { timestamp: `${today}T10:00:15Z`, name: 'requests', value: 5, type: 'counter', attributes: { method: 'POST' } },
1895
+ // Second minute, method=GET
1896
+ { timestamp: `${today}T10:01:00Z`, name: 'requests', value: 100, type: 'counter', attributes: { method: 'GET' } },
1897
+ // Second minute, method=POST
1898
+ { timestamp: `${today}T10:01:30Z`, name: 'requests', value: 50, type: 'counter', attributes: { method: 'POST' } },
1899
+ ];
1900
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1901
+ const results = await backend.queryMetrics({
1902
+ aggregation: 'sum',
1903
+ timeBucket: '1m',
1904
+ groupBy: ['method'],
1905
+ });
1906
+ assert.strictEqual(results.length, 4);
1907
+ // First bucket GET
1908
+ const bucket1GET = results.find(m => m.timestamp === `${today}T10:00:00.000Z` && m.attributes?.method === 'GET');
1909
+ assert.strictEqual(bucket1GET?.value, 30); // 10 + 20
1910
+ // First bucket POST
1911
+ const bucket1POST = results.find(m => m.timestamp === `${today}T10:00:00.000Z` && m.attributes?.method === 'POST');
1912
+ assert.strictEqual(bucket1POST?.value, 5);
1913
+ // Second bucket GET
1914
+ const bucket2GET = results.find(m => m.timestamp === `${today}T10:01:00.000Z` && m.attributes?.method === 'GET');
1915
+ assert.strictEqual(bucket2GET?.value, 100);
1916
+ // Second bucket POST
1917
+ const bucket2POST = results.find(m => m.timestamp === `${today}T10:01:00.000Z` && m.attributes?.method === 'POST');
1918
+ assert.strictEqual(bucket2POST?.value, 50);
1919
+ });
1920
+ it('should use avg aggregation with time buckets', async () => {
1921
+ const today = getTestDate();
1922
+ const mockMetrics = [
1923
+ // First minute bucket
1924
+ { timestamp: `${today}T10:00:00Z`, name: 'latency', value: 100, type: 'histogram' },
1925
+ { timestamp: `${today}T10:00:30Z`, name: 'latency', value: 200, type: 'histogram' },
1926
+ { timestamp: `${today}T10:00:45Z`, name: 'latency', value: 300, type: 'histogram' },
1927
+ // Second minute bucket
1928
+ { timestamp: `${today}T10:01:00Z`, name: 'latency', value: 500, type: 'histogram' },
1929
+ { timestamp: `${today}T10:01:30Z`, name: 'latency', value: 700, type: 'histogram' },
1930
+ ];
1931
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1932
+ const results = await backend.queryMetrics({ aggregation: 'avg', timeBucket: '1m' });
1933
+ assert.strictEqual(results.length, 2);
1934
+ assert.strictEqual(results[0].value, 200); // (100 + 200 + 300) / 3
1935
+ assert.strictEqual(results[1].value, 600); // (500 + 700) / 2
1936
+ });
1937
+ it('should ignore invalid time bucket format', async () => {
1938
+ const today = getTestDate();
1939
+ const mockMetrics = [
1940
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 10, type: 'counter' },
1941
+ { timestamp: `${today}T10:00:30Z`, name: 'requests', value: 20, type: 'counter' },
1942
+ { timestamp: `${today}T10:01:00Z`, name: 'requests', value: 30, type: 'counter' },
1943
+ ];
1944
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1945
+ // Invalid format - should be ignored and aggregate all together
1946
+ const results = await backend.queryMetrics({ aggregation: 'sum', timeBucket: 'invalid' });
1947
+ assert.strictEqual(results.length, 1);
1948
+ assert.strictEqual(results[0].value, 60); // All grouped together
1949
+ });
1950
+ it('should calculate rate of change per second', async () => {
1951
+ const today = getTestDate();
1952
+ const mockMetrics = [
1953
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 100, type: 'counter' },
1954
+ { timestamp: `${today}T10:00:30Z`, name: 'requests', value: 200, type: 'counter' },
1955
+ { timestamp: `${today}T10:01:00Z`, name: 'requests', value: 400, type: 'counter' },
1956
+ ];
1957
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1958
+ const results = await backend.queryMetrics({ aggregation: 'rate' });
1959
+ assert.strictEqual(results.length, 1);
1960
+ // Rate = (400 - 100) / 60 seconds = 5 per second
1961
+ assert.strictEqual(results[0].value, 5);
1962
+ });
1963
+ it('should calculate rate with timeBucket', async () => {
1964
+ const today = getTestDate();
1965
+ const mockMetrics = [
1966
+ // First minute bucket: 10:00:00 - 10:00:59
1967
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 0, type: 'counter' },
1968
+ { timestamp: `${today}T10:00:30Z`, name: 'requests', value: 60, type: 'counter' },
1969
+ // Second minute bucket: 10:01:00 - 10:01:59
1970
+ { timestamp: `${today}T10:01:00Z`, name: 'requests', value: 100, type: 'counter' },
1971
+ { timestamp: `${today}T10:01:30Z`, name: 'requests', value: 250, type: 'counter' },
1972
+ ];
1973
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1974
+ const results = await backend.queryMetrics({ aggregation: 'rate', timeBucket: '1m' });
1975
+ assert.strictEqual(results.length, 2);
1976
+ // First bucket: (60 - 0) / 30 seconds = 2 per second
1977
+ assert.strictEqual(results[0].value, 2);
1978
+ // Second bucket: (250 - 100) / 30 seconds = 5 per second
1979
+ assert.strictEqual(results[1].value, 5);
1980
+ });
1981
+ it('should return rate of 0 for single data point', async () => {
1982
+ const today = getTestDate();
1983
+ const mockMetrics = [
1984
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 100, type: 'counter' },
1985
+ ];
1986
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1987
+ const results = await backend.queryMetrics({ aggregation: 'rate' });
1988
+ assert.strictEqual(results.length, 1);
1989
+ assert.strictEqual(results[0].value, 0);
1990
+ });
1991
+ it('should return rate of 0 when all timestamps are the same', async () => {
1992
+ const today = getTestDate();
1993
+ const mockMetrics = [
1994
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 100, type: 'counter' },
1995
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 200, type: 'counter' },
1996
+ { timestamp: `${today}T10:00:00Z`, name: 'requests', value: 300, type: 'counter' },
1997
+ ];
1998
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
1999
+ const results = await backend.queryMetrics({ aggregation: 'rate' });
2000
+ assert.strictEqual(results.length, 1);
2001
+ // Avoid division by zero - return 0 when duration is 0
2002
+ assert.strictEqual(results[0].value, 0);
2003
+ });
2004
+ it('should read and normalize histogram metrics with bucket distribution', async () => {
2005
+ const today = getTestDate();
2006
+ const mockMetrics = [
2007
+ {
2008
+ timestamp: '2026-01-28T10:00:00Z',
2009
+ name: 'http.request.duration',
2010
+ value: 150, // typically the sum/count average or similar aggregate
2011
+ type: 'histogram',
2012
+ unit: 'ms',
2013
+ resource: { serviceName: 'api-gateway' },
2014
+ attributes: { 'http.method': 'GET' },
2015
+ histogram: {
2016
+ buckets: [
2017
+ { le: 50, count: 10 },
2018
+ { le: 100, count: 25 },
2019
+ { le: 250, count: 45 },
2020
+ { le: 500, count: 48 },
2021
+ { le: Infinity, count: 50 },
2022
+ ],
2023
+ sum: 7500,
2024
+ count: 50,
2025
+ },
2026
+ },
2027
+ ];
2028
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2029
+ const results = await backend.queryMetrics({});
2030
+ assert.strictEqual(results.length, 1);
2031
+ assert.strictEqual(results[0].name, 'http.request.duration');
2032
+ assert.strictEqual(results[0].value, 150);
2033
+ assert.strictEqual(results[0].unit, 'ms');
2034
+ assert.strictEqual(results[0].attributes?.['service.name'], 'api-gateway');
2035
+ assert.strictEqual(results[0].attributes?.['http.method'], 'GET');
2036
+ // Verify histogram data is present
2037
+ assert.ok(results[0].histogram, 'Histogram data should be present');
2038
+ assert.strictEqual(results[0].histogram?.sum, 7500);
2039
+ assert.strictEqual(results[0].histogram?.count, 50);
2040
+ assert.strictEqual(results[0].histogram?.buckets.length, 5);
2041
+ // Verify bucket boundaries and cumulative counts
2042
+ assert.strictEqual(results[0].histogram?.buckets[0].le, 50);
2043
+ assert.strictEqual(results[0].histogram?.buckets[0].count, 10);
2044
+ assert.strictEqual(results[0].histogram?.buckets[1].le, 100);
2045
+ assert.strictEqual(results[0].histogram?.buckets[1].count, 25);
2046
+ assert.strictEqual(results[0].histogram?.buckets[2].le, 250);
2047
+ assert.strictEqual(results[0].histogram?.buckets[2].count, 45);
2048
+ });
2049
+ it('should handle histogram metrics without histogram data (non-histogram type)', async () => {
2050
+ const today = getTestDate();
2051
+ const mockMetrics = [
2052
+ {
2053
+ timestamp: '2026-01-28T10:00:00Z',
2054
+ name: 'http.requests.total',
2055
+ value: 100,
2056
+ type: 'counter',
2057
+ unit: 'requests',
2058
+ },
2059
+ ];
2060
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2061
+ const results = await backend.queryMetrics({});
2062
+ assert.strictEqual(results.length, 1);
2063
+ assert.strictEqual(results[0].name, 'http.requests.total');
2064
+ assert.strictEqual(results[0].value, 100);
2065
+ assert.strictEqual(results[0].histogram, undefined);
2066
+ });
2067
+ it('should handle mixed metric types including histograms', async () => {
2068
+ const today = getTestDate();
2069
+ const mockMetrics = [
2070
+ {
2071
+ timestamp: '2026-01-28T10:00:00Z',
2072
+ name: 'http.requests.total',
2073
+ value: 100,
2074
+ type: 'counter',
2075
+ },
2076
+ {
2077
+ timestamp: '2026-01-28T10:00:00Z',
2078
+ name: 'http.request.duration',
2079
+ value: 150,
2080
+ type: 'histogram',
2081
+ histogram: {
2082
+ buckets: [
2083
+ { le: 100, count: 20 },
2084
+ { le: 500, count: 80 },
2085
+ { le: Infinity, count: 100 },
2086
+ ],
2087
+ sum: 15000,
2088
+ count: 100,
2089
+ },
2090
+ },
2091
+ {
2092
+ timestamp: '2026-01-28T10:00:00Z',
2093
+ name: 'memory.usage',
2094
+ value: 512,
2095
+ type: 'gauge',
2096
+ },
2097
+ ];
2098
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2099
+ const results = await backend.queryMetrics({});
2100
+ assert.strictEqual(results.length, 3);
2101
+ const counter = results.find(m => m.name === 'http.requests.total');
2102
+ const histogram = results.find(m => m.name === 'http.request.duration');
2103
+ const gauge = results.find(m => m.name === 'memory.usage');
2104
+ assert.ok(counter, 'Counter metric should be present');
2105
+ assert.strictEqual(counter?.histogram, undefined);
2106
+ assert.ok(histogram, 'Histogram metric should be present');
2107
+ assert.ok(histogram?.histogram, 'Histogram should have histogram data');
2108
+ assert.strictEqual(histogram?.histogram?.count, 100);
2109
+ assert.ok(gauge, 'Gauge metric should be present');
2110
+ assert.strictEqual(gauge?.histogram, undefined);
2111
+ });
2112
+ it('should preserve histogram data when filtering by metric name', async () => {
2113
+ const today = getTestDate();
2114
+ const mockMetrics = [
2115
+ {
2116
+ timestamp: '2026-01-28T10:00:00Z',
2117
+ name: 'api.latency',
2118
+ value: 200,
2119
+ type: 'histogram',
2120
+ histogram: {
2121
+ buckets: [
2122
+ { le: 100, count: 5 },
2123
+ { le: 500, count: 15 },
2124
+ { le: 1000, count: 20 },
2125
+ ],
2126
+ sum: 4000,
2127
+ count: 20,
2128
+ },
2129
+ },
2130
+ {
2131
+ timestamp: '2026-01-28T10:01:00Z',
2132
+ name: 'db.query.duration',
2133
+ value: 50,
2134
+ type: 'histogram',
2135
+ histogram: {
2136
+ buckets: [
2137
+ { le: 10, count: 30 },
2138
+ { le: 50, count: 80 },
2139
+ { le: 100, count: 100 },
2140
+ ],
2141
+ sum: 3500,
2142
+ count: 100,
2143
+ },
2144
+ },
2145
+ ];
2146
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2147
+ const results = await backend.queryMetrics({ metricName: 'api.latency' });
2148
+ assert.strictEqual(results.length, 1);
2149
+ assert.strictEqual(results[0].name, 'api.latency');
2150
+ assert.ok(results[0].histogram);
2151
+ assert.strictEqual(results[0].histogram?.sum, 4000);
2152
+ assert.strictEqual(results[0].histogram?.count, 20);
2153
+ assert.strictEqual(results[0].histogram?.buckets.length, 3);
2154
+ });
2155
+ it('should ignore histogram field when metric type is not histogram', async () => {
2156
+ const today = getTestDate();
2157
+ // Edge case: a metric that has histogram field but type is not 'histogram'
2158
+ const mockMetrics = [
2159
+ {
2160
+ timestamp: '2026-01-28T10:00:00Z',
2161
+ name: 'malformed.metric',
2162
+ value: 100,
2163
+ type: 'gauge', // Not histogram type
2164
+ histogram: {
2165
+ // This should be ignored since type != 'histogram'
2166
+ buckets: [{ le: 100, count: 10 }],
2167
+ sum: 500,
2168
+ count: 10,
2169
+ },
2170
+ },
2171
+ ];
2172
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2173
+ const results = await backend.queryMetrics({});
2174
+ assert.strictEqual(results.length, 1);
2175
+ assert.strictEqual(results[0].name, 'malformed.metric');
2176
+ assert.strictEqual(results[0].value, 100);
2177
+ // Histogram data should NOT be included since type is 'gauge', not 'histogram'
2178
+ assert.strictEqual(results[0].histogram, undefined);
2179
+ });
2180
+ it('should normalize aggregationTemporality from numeric OTel values', async () => {
2181
+ const today = getTestDate();
2182
+ const mockMetrics = [
2183
+ {
2184
+ timestamp: '2026-01-28T10:00:00Z',
2185
+ name: 'http.requests.delta',
2186
+ value: 100,
2187
+ type: 'counter',
2188
+ aggregationTemporality: 1, // DELTA
2189
+ },
2190
+ {
2191
+ timestamp: '2026-01-28T10:01:00Z',
2192
+ name: 'http.requests.cumulative',
2193
+ value: 500,
2194
+ type: 'counter',
2195
+ aggregationTemporality: 2, // CUMULATIVE
2196
+ },
2197
+ {
2198
+ timestamp: '2026-01-28T10:02:00Z',
2199
+ name: 'http.requests.unspecified',
2200
+ value: 50,
2201
+ type: 'counter',
2202
+ aggregationTemporality: 0, // UNSPECIFIED
2203
+ },
2204
+ ];
2205
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2206
+ const results = await backend.queryMetrics({});
2207
+ assert.strictEqual(results.length, 3);
2208
+ const deltaMetric = results.find(m => m.name === 'http.requests.delta');
2209
+ const cumulativeMetric = results.find(m => m.name === 'http.requests.cumulative');
2210
+ const unspecifiedMetric = results.find(m => m.name === 'http.requests.unspecified');
2211
+ assert.strictEqual(deltaMetric?.aggregationTemporality, 'DELTA');
2212
+ assert.strictEqual(cumulativeMetric?.aggregationTemporality, 'CUMULATIVE');
2213
+ assert.strictEqual(unspecifiedMetric?.aggregationTemporality, 'UNSPECIFIED');
2214
+ });
2215
+ it('should normalize aggregationTemporality from string values', async () => {
2216
+ const today = getTestDate();
2217
+ const mockMetrics = [
2218
+ {
2219
+ timestamp: '2026-01-28T10:00:00Z',
2220
+ name: 'requests.delta',
2221
+ value: 100,
2222
+ type: 'counter',
2223
+ aggregationTemporality: 'delta', // lowercase
2224
+ },
2225
+ {
2226
+ timestamp: '2026-01-28T10:01:00Z',
2227
+ name: 'requests.cumulative',
2228
+ value: 500,
2229
+ type: 'counter',
2230
+ aggregationTemporality: 'CUMULATIVE', // uppercase
2231
+ },
2232
+ {
2233
+ timestamp: '2026-01-28T10:02:00Z',
2234
+ name: 'requests.unspecified',
2235
+ value: 50,
2236
+ type: 'counter',
2237
+ aggregationTemporality: 'Unspecified', // mixed case
2238
+ },
2239
+ ];
2240
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2241
+ const results = await backend.queryMetrics({});
2242
+ assert.strictEqual(results.length, 3);
2243
+ const deltaMetric = results.find(m => m.name === 'requests.delta');
2244
+ const cumulativeMetric = results.find(m => m.name === 'requests.cumulative');
2245
+ const unspecifiedMetric = results.find(m => m.name === 'requests.unspecified');
2246
+ assert.strictEqual(deltaMetric?.aggregationTemporality, 'DELTA');
2247
+ assert.strictEqual(cumulativeMetric?.aggregationTemporality, 'CUMULATIVE');
2248
+ assert.strictEqual(unspecifiedMetric?.aggregationTemporality, 'UNSPECIFIED');
2249
+ });
2250
+ it('should return undefined aggregationTemporality when not provided', async () => {
2251
+ const today = getTestDate();
2252
+ const mockMetrics = [
2253
+ {
2254
+ timestamp: '2026-01-28T10:00:00Z',
2255
+ name: 'gauge.metric',
2256
+ value: 42,
2257
+ type: 'gauge',
2258
+ // No aggregationTemporality field
2259
+ },
2260
+ ];
2261
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2262
+ const results = await backend.queryMetrics({});
2263
+ assert.strictEqual(results.length, 1);
2264
+ assert.strictEqual(results[0].aggregationTemporality, undefined);
2265
+ });
2266
+ it('should handle invalid aggregationTemporality string values', async () => {
2267
+ const today = getTestDate();
2268
+ const mockMetrics = [
2269
+ {
2270
+ timestamp: '2026-01-28T10:00:00Z',
2271
+ name: 'metric.invalid',
2272
+ value: 100,
2273
+ type: 'counter',
2274
+ aggregationTemporality: 'invalid_value',
2275
+ },
2276
+ ];
2277
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2278
+ const results = await backend.queryMetrics({});
2279
+ assert.strictEqual(results.length, 1);
2280
+ // Invalid values should normalize to UNSPECIFIED
2281
+ assert.strictEqual(results[0].aggregationTemporality, 'UNSPECIFIED');
2282
+ });
2283
+ it('should handle unknown numeric aggregationTemporality values', async () => {
2284
+ const today = getTestDate();
2285
+ const mockMetrics = [
2286
+ {
2287
+ timestamp: '2026-01-28T10:00:00Z',
2288
+ name: 'metric.unknown',
2289
+ value: 100,
2290
+ type: 'counter',
2291
+ aggregationTemporality: 99, // Unknown numeric value
2292
+ },
2293
+ ];
2294
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2295
+ const results = await backend.queryMetrics({});
2296
+ assert.strictEqual(results.length, 1);
2297
+ // Unknown numeric values should normalize to UNSPECIFIED
2298
+ assert.strictEqual(results[0].aggregationTemporality, 'UNSPECIFIED');
2299
+ });
2300
+ it('should read and normalize metrics with exemplars', async () => {
2301
+ const today = getTestDate();
2302
+ const mockMetrics = [
2303
+ {
2304
+ timestamp: '2026-01-28T10:00:00Z',
2305
+ name: 'http.request.duration',
2306
+ value: 150,
2307
+ type: 'histogram',
2308
+ unit: 'ms',
2309
+ histogram: {
2310
+ buckets: [
2311
+ { le: 100, count: 10 },
2312
+ { le: 500, count: 45 },
2313
+ { le: Infinity, count: 50 },
2314
+ ],
2315
+ sum: 7500,
2316
+ count: 50,
2317
+ },
2318
+ exemplars: [
2319
+ {
2320
+ timestamp: '2026-01-28T10:00:00.123Z',
2321
+ value: 450,
2322
+ traceId: 'abc123def456',
2323
+ spanId: 'span789',
2324
+ attributes: { 'http.status_code': 500 },
2325
+ },
2326
+ {
2327
+ timestamp: '2026-01-28T10:00:00.456Z',
2328
+ value: 95,
2329
+ traceId: 'xyz789abc123',
2330
+ spanId: 'span456',
2331
+ },
2332
+ ],
2333
+ },
2334
+ ];
2335
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2336
+ const results = await backend.queryMetrics({});
2337
+ assert.strictEqual(results.length, 1);
2338
+ assert.strictEqual(results[0].name, 'http.request.duration');
2339
+ assert.ok(results[0].exemplars, 'Exemplars should be present');
2340
+ assert.strictEqual(results[0].exemplars?.length, 2);
2341
+ // Verify first exemplar (high latency with error)
2342
+ const highLatencyExemplar = results[0].exemplars?.[0];
2343
+ assert.strictEqual(highLatencyExemplar?.value, 450);
2344
+ assert.strictEqual(highLatencyExemplar?.traceId, 'abc123def456');
2345
+ assert.strictEqual(highLatencyExemplar?.spanId, 'span789');
2346
+ assert.strictEqual(highLatencyExemplar?.attributes?.['http.status_code'], 500);
2347
+ // Verify second exemplar
2348
+ const normalExemplar = results[0].exemplars?.[1];
2349
+ assert.strictEqual(normalExemplar?.value, 95);
2350
+ assert.strictEqual(normalExemplar?.traceId, 'xyz789abc123');
2351
+ });
2352
+ it('should normalize exemplar timestamps from [seconds, nanoseconds] format', async () => {
2353
+ const today = getTestDate();
2354
+ const mockMetrics = [
2355
+ {
2356
+ timestamp: '2026-01-28T10:00:00Z',
2357
+ name: 'api.latency',
2358
+ value: 200,
2359
+ type: 'histogram',
2360
+ exemplars: [
2361
+ {
2362
+ timestamp: [1738062000, 123000000], // [seconds, nanoseconds]
2363
+ value: 350,
2364
+ traceId: 'trace123',
2365
+ spanId: 'span456',
2366
+ },
2367
+ ],
2368
+ },
2369
+ ];
2370
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2371
+ const results = await backend.queryMetrics({});
2372
+ assert.strictEqual(results.length, 1);
2373
+ assert.ok(results[0].exemplars);
2374
+ assert.strictEqual(results[0].exemplars?.length, 1);
2375
+ // Timestamp should be converted to ISO string
2376
+ assert.ok(results[0].exemplars?.[0].timestamp.includes('T'));
2377
+ assert.strictEqual(results[0].exemplars?.[0].value, 350);
2378
+ });
2379
+ it('should handle exemplars without optional fields', async () => {
2380
+ const today = getTestDate();
2381
+ const mockMetrics = [
2382
+ {
2383
+ timestamp: '2026-01-28T10:00:00Z',
2384
+ name: 'counter.metric',
2385
+ value: 100,
2386
+ type: 'counter',
2387
+ exemplars: [
2388
+ {
2389
+ value: 1, // Only required field
2390
+ },
2391
+ ],
2392
+ },
2393
+ ];
2394
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2395
+ const results = await backend.queryMetrics({});
2396
+ assert.strictEqual(results.length, 1);
2397
+ assert.ok(results[0].exemplars);
2398
+ assert.strictEqual(results[0].exemplars?.length, 1);
2399
+ assert.strictEqual(results[0].exemplars?.[0].value, 1);
2400
+ // Timestamp should default to metric timestamp
2401
+ assert.strictEqual(results[0].exemplars?.[0].timestamp, '2026-01-28T10:00:00Z');
2402
+ assert.strictEqual(results[0].exemplars?.[0].traceId, undefined);
2403
+ assert.strictEqual(results[0].exemplars?.[0].spanId, undefined);
2404
+ });
2405
+ it('should handle metrics without exemplars', async () => {
2406
+ const today = getTestDate();
2407
+ const mockMetrics = [
2408
+ {
2409
+ timestamp: '2026-01-28T10:00:00Z',
2410
+ name: 'simple.counter',
2411
+ value: 42,
2412
+ type: 'counter',
2413
+ // No exemplars field
2414
+ },
2415
+ ];
2416
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2417
+ const results = await backend.queryMetrics({});
2418
+ assert.strictEqual(results.length, 1);
2419
+ assert.strictEqual(results[0].exemplars, undefined);
2420
+ });
2421
+ it('should handle empty exemplars array', async () => {
2422
+ const today = getTestDate();
2423
+ const mockMetrics = [
2424
+ {
2425
+ timestamp: '2026-01-28T10:00:00Z',
2426
+ name: 'empty.exemplars',
2427
+ value: 100,
2428
+ type: 'counter',
2429
+ exemplars: [], // Empty array
2430
+ },
2431
+ ];
2432
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2433
+ const results = await backend.queryMetrics({});
2434
+ assert.strictEqual(results.length, 1);
2435
+ // Empty exemplars array should result in undefined
2436
+ assert.strictEqual(results[0].exemplars, undefined);
2437
+ });
2438
+ });
2439
+ describe('queryLLMEvents', () => {
2440
+ it('should read and normalize LLM events from JSONL files', async () => {
2441
+ const today = getTestDate();
2442
+ const mockEvents = [
2443
+ {
2444
+ timestamp: '2026-01-28T10:00:00.000Z',
2445
+ name: 'llm.completion',
2446
+ attributes: {
2447
+ 'gen_ai.request.model': 'claude-3-opus',
2448
+ 'gen_ai.system': 'anthropic',
2449
+ 'gen_ai.usage.input_tokens': 100,
2450
+ 'gen_ai.usage.output_tokens': 50,
2451
+ 'duration_ms': 1500,
2452
+ 'success': true,
2453
+ },
2454
+ },
2455
+ ];
2456
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2457
+ const results = await backend.queryLLMEvents({});
2458
+ assert.strictEqual(results.length, 1);
2459
+ assert.strictEqual(results[0].name, 'llm.completion');
2460
+ assert.strictEqual(results[0].attributes['gen_ai.request.model'], 'claude-3-opus');
2461
+ assert.strictEqual(results[0].attributes['gen_ai.system'], 'anthropic');
2462
+ assert.strictEqual(results[0].attributes['gen_ai.usage.input_tokens'], 100);
2463
+ });
2464
+ it('should filter events by eventName substring', async () => {
2465
+ const today = getTestDate();
2466
+ const mockEvents = [
2467
+ { timestamp: '2026-01-28T10:00:00Z', name: 'llm.completion', attributes: {} },
2468
+ { timestamp: '2026-01-28T10:01:00Z', name: 'llm.embedding', attributes: {} },
2469
+ { timestamp: '2026-01-28T10:02:00Z', name: 'tool.execution', attributes: {} },
2470
+ ];
2471
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2472
+ const results = await backend.queryLLMEvents({ eventName: 'llm' });
2473
+ assert.strictEqual(results.length, 2);
2474
+ assert.ok(results.every(e => e.name.includes('llm')));
2475
+ });
2476
+ it('should filter events by model', async () => {
2477
+ const today = getTestDate();
2478
+ const mockEvents = [
2479
+ {
2480
+ timestamp: '2026-01-28T10:00:00Z',
2481
+ name: 'llm.completion',
2482
+ attributes: { 'gen_ai.request.model': 'claude-3-opus' },
2483
+ },
2484
+ {
2485
+ timestamp: '2026-01-28T10:01:00Z',
2486
+ name: 'llm.completion',
2487
+ attributes: { 'gen_ai.request.model': 'gpt-4' },
2488
+ },
2489
+ {
2490
+ timestamp: '2026-01-28T10:02:00Z',
2491
+ name: 'llm.completion',
2492
+ attributes: { model: 'claude-3-opus' }, // alternate attribute name
2493
+ },
2494
+ ];
2495
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2496
+ const results = await backend.queryLLMEvents({ model: 'claude-3-opus' });
2497
+ assert.strictEqual(results.length, 2);
2498
+ });
2499
+ it('should filter events by provider', async () => {
2500
+ const today = getTestDate();
2501
+ const mockEvents = [
2502
+ {
2503
+ timestamp: '2026-01-28T10:00:00Z',
2504
+ name: 'llm.completion',
2505
+ attributes: { 'gen_ai.system': 'anthropic' },
2506
+ },
2507
+ {
2508
+ timestamp: '2026-01-28T10:01:00Z',
2509
+ name: 'llm.completion',
2510
+ attributes: { 'gen_ai.system': 'openai' },
2511
+ },
2512
+ {
2513
+ timestamp: '2026-01-28T10:02:00Z',
2514
+ name: 'llm.completion',
2515
+ attributes: { provider: 'anthropic' }, // alternate attribute name
2516
+ },
2517
+ ];
2518
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2519
+ const results = await backend.queryLLMEvents({ provider: 'anthropic' });
2520
+ assert.strictEqual(results.length, 2);
2521
+ });
2522
+ it('should filter events by search text in attributes', async () => {
2523
+ const today = getTestDate();
2524
+ const mockEvents = [
2525
+ {
2526
+ timestamp: '2026-01-28T10:00:00Z',
2527
+ name: 'llm.completion',
2528
+ attributes: { prompt: 'Write a function to calculate fibonacci' },
2529
+ },
2530
+ {
2531
+ timestamp: '2026-01-28T10:01:00Z',
2532
+ name: 'llm.completion',
2533
+ attributes: { prompt: 'Explain quantum computing' },
2534
+ },
2535
+ ];
2536
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2537
+ const results = await backend.queryLLMEvents({ search: 'fibonacci' });
2538
+ assert.strictEqual(results.length, 1);
2539
+ assert.strictEqual(results[0].attributes.prompt, 'Write a function to calculate fibonacci');
2540
+ });
2541
+ it('should filter events by search text in event name', async () => {
2542
+ const today = getTestDate();
2543
+ const mockEvents = [
2544
+ { timestamp: '2026-01-28T10:00:00Z', name: 'llm.completion.streaming', attributes: {} },
2545
+ { timestamp: '2026-01-28T10:01:00Z', name: 'llm.completion', attributes: {} },
2546
+ ];
2547
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2548
+ const results = await backend.queryLLMEvents({ search: 'streaming' });
2549
+ assert.strictEqual(results.length, 1);
2550
+ assert.strictEqual(results[0].name, 'llm.completion.streaming');
2551
+ });
2552
+ it('should apply limit and offset to LLM event results', async () => {
2553
+ const today = getTestDate();
2554
+ const mockEvents = Array.from({ length: 100 }, (_, i) => ({
2555
+ timestamp: new Date(Date.now() + i * 1000).toISOString(),
2556
+ name: `event-${i}`,
2557
+ attributes: { index: i },
2558
+ }));
2559
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2560
+ const results = await backend.queryLLMEvents({ limit: 20, offset: 50 });
2561
+ assert.strictEqual(results.length, 20);
2562
+ assert.strictEqual(results[0].name, 'event-50');
2563
+ });
2564
+ it('should filter events by date range', async () => {
2565
+ // Create files for multiple dates
2566
+ writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-26.jsonl'), [
2567
+ { timestamp: '2026-01-26T10:00:00Z', name: 'event-26', attributes: {} },
2568
+ ]);
2569
+ writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-27.jsonl'), [
2570
+ { timestamp: '2026-01-27T10:00:00Z', name: 'event-27', attributes: {} },
2571
+ ]);
2572
+ writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-28.jsonl'), [
2573
+ { timestamp: '2026-01-28T10:00:00Z', name: 'event-28', attributes: {} },
2574
+ ]);
2575
+ const results = await backend.queryLLMEvents({
2576
+ startDate: '2026-01-27',
2577
+ endDate: '2026-01-27',
2578
+ });
2579
+ assert.strictEqual(results.length, 1);
2580
+ assert.strictEqual(results[0].name, 'event-27');
2581
+ });
2582
+ it('should skip events with missing required fields', async () => {
2583
+ const today = getTestDate();
2584
+ const mockEvents = [
2585
+ { timestamp: '2026-01-28T10:00:00Z', name: 'valid-event', attributes: {} },
2586
+ { timestamp: '2026-01-28T10:01:00Z', attributes: {} }, // missing name
2587
+ { name: 'no-timestamp', attributes: {} }, // missing timestamp
2588
+ ];
2589
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2590
+ const results = await backend.queryLLMEvents({});
2591
+ assert.strictEqual(results.length, 1);
2592
+ assert.strictEqual(results[0].name, 'valid-event');
2593
+ });
2594
+ it('should return empty array when no LLM event files found', async () => {
2595
+ const results = await backend.queryLLMEvents({});
2596
+ assert.strictEqual(results.length, 0);
2597
+ });
2598
+ it('should combine multiple filters', async () => {
2599
+ const today = getTestDate();
2600
+ const mockEvents = [
2601
+ {
2602
+ timestamp: '2026-01-28T10:00:00Z',
2603
+ name: 'llm.completion',
2604
+ attributes: { 'gen_ai.request.model': 'claude-3-opus', 'gen_ai.system': 'anthropic' },
2605
+ },
2606
+ {
2607
+ timestamp: '2026-01-28T10:01:00Z',
2608
+ name: 'llm.completion',
2609
+ attributes: { 'gen_ai.request.model': 'gpt-4', 'gen_ai.system': 'openai' },
2610
+ },
2611
+ {
2612
+ timestamp: '2026-01-28T10:02:00Z',
2613
+ name: 'llm.embedding',
2614
+ attributes: { 'gen_ai.request.model': 'claude-3-opus', 'gen_ai.system': 'anthropic' },
2615
+ },
2616
+ ];
2617
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2618
+ const results = await backend.queryLLMEvents({
2619
+ eventName: 'completion',
2620
+ model: 'claude-3-opus',
2621
+ provider: 'anthropic',
2622
+ });
2623
+ assert.strictEqual(results.length, 1);
2624
+ assert.strictEqual(results[0].name, 'llm.completion');
2625
+ });
2626
+ });
2627
+ describe('healthCheck', () => {
2628
+ it('should return error when telemetry directory does not exist', async () => {
2629
+ const nonExistentBackend = new LocalJsonlBackend('/nonexistent/telemetry');
2630
+ const result = await nonExistentBackend.healthCheck();
2631
+ assert.strictEqual(result.status, 'error');
2632
+ assert.match(result.message || '', /not found/);
2633
+ });
2634
+ it('should return ok when directory exists with no files', async () => {
2635
+ // tempDir exists but has no files
2636
+ const result = await backend.healthCheck();
2637
+ assert.strictEqual(result.status, 'ok');
2638
+ assert.match(result.message || '', /No telemetry files/);
2639
+ });
2640
+ it('should return ok with found files message', async () => {
2641
+ const today = getTestDate();
2642
+ // Create both traces and logs files
2643
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), []);
2644
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), []);
2645
+ const result = await backend.healthCheck();
2646
+ assert.strictEqual(result.status, 'ok');
2647
+ assert.match(result.message || '', /traces.*logs/);
2648
+ });
2649
+ it('should include llm-events in health check message', async () => {
2650
+ const today = getTestDate();
2651
+ // Create all three file types
2652
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), []);
2653
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), []);
2654
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), []);
2655
+ const result = await backend.healthCheck();
2656
+ assert.strictEqual(result.status, 'ok');
2657
+ assert.match(result.message || '', /llm-events/);
2658
+ });
2659
+ });
2660
+ describe('date range filtering', () => {
2661
+ it('should filter files by startDate and endDate', async () => {
2662
+ // Create files for multiple dates
2663
+ writeJsonlFile(path.join(tempDir, 'traces-2026-01-26.jsonl'), [
2664
+ { traceId: 'trace-26', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
2665
+ ]);
2666
+ writeJsonlFile(path.join(tempDir, 'traces-2026-01-27.jsonl'), [
2667
+ { traceId: 'trace-27', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
2668
+ ]);
2669
+ writeJsonlFile(path.join(tempDir, 'traces-2026-01-28.jsonl'), [
2670
+ { traceId: 'trace-28', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
2671
+ ]);
2672
+ writeJsonlFile(path.join(tempDir, 'traces-2026-01-29.jsonl'), [
2673
+ { traceId: 'trace-29', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
2674
+ ]);
2675
+ const results = await backend.queryTraces({ startDate: '2026-01-27', endDate: '2026-01-28' });
2676
+ assert.strictEqual(results.length, 2);
2677
+ const traceIds = results.map(r => r.traceId);
2678
+ assert.ok(traceIds.includes('trace-27'));
2679
+ assert.ok(traceIds.includes('trace-28'));
2680
+ assert.ok(!traceIds.includes('trace-26'));
2681
+ assert.ok(!traceIds.includes('trace-29'));
2682
+ });
2683
+ it('should use today as default when no date range specified', async () => {
2684
+ const today = getTestDate();
2685
+ const yesterday = new Date(Date.now() - 86400000).toISOString().split('T')[0];
2686
+ // Create file for today and yesterday
2687
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), [
2688
+ { traceId: 'today-trace', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
2689
+ ]);
2690
+ writeJsonlFile(path.join(tempDir, `traces-${yesterday}.jsonl`), [
2691
+ { traceId: 'yesterday-trace', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
2692
+ ]);
2693
+ // Query with no date range should only get today's data
2694
+ const results = await backend.queryTraces({});
2695
+ assert.strictEqual(results.length, 1);
2696
+ assert.strictEqual(results[0].traceId, 'today-trace');
2697
+ });
2698
+ });
2699
+ describe('error handling', () => {
2700
+ it('should handle JSONL parsing errors gracefully', async () => {
2701
+ const today = getTestDate();
2702
+ const filePath = path.join(tempDir, `traces-${today}.jsonl`);
2703
+ // Write malformed JSON
2704
+ fs.writeFileSync(filePath, 'not valid json\n{"traceId":"t1","spanId":"s1","name":"op"}\n', 'utf-8');
2705
+ const results = await backend.queryTraces({});
2706
+ // Should skip the malformed line and parse the valid one
2707
+ assert.strictEqual(results.length, 1);
2708
+ assert.strictEqual(results[0].traceId, 't1');
2709
+ });
2710
+ it('should skip spans with invalid time calculations', async () => {
2711
+ const today = getTestDate();
2712
+ const mockSpans = [
2713
+ {
2714
+ traceId: 'trace1',
2715
+ spanId: 'span1',
2716
+ name: 'op1',
2717
+ startTime: [1700000000, 0],
2718
+ endTime: [1700000000, 0],
2719
+ },
2720
+ ];
2721
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
2722
+ const results = await backend.queryTraces({});
2723
+ assert.strictEqual(results.length, 1);
2724
+ assert.strictEqual(results[0].durationMs, 0);
2725
+ });
2726
+ });
2727
+ });
2728
+ describe('insertSortedBounded helper', () => {
2729
+ // Test the insertSortedBounded function by observing its behavior
2730
+ // through MultiDirectoryBackend's query methods
2731
+ it('should maintain sorted order by timestamp in traces', async () => {
2732
+ const projectDir = createTempDir();
2733
+ try {
2734
+ const today = getTestDate();
2735
+ const localTelemetry = path.join(projectDir, 'telemetry');
2736
+ fs.mkdirSync(localTelemetry, { recursive: true });
2737
+ // Create traces with various timestamps
2738
+ writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), [
2739
+ { traceId: 'sorted-test-1', spanId: 's1', name: 'SortedTestOp', startTime: [1700000005, 0] },
2740
+ { traceId: 'sorted-test-2', spanId: 's2', name: 'SortedTestOp', startTime: [1700000001, 0] },
2741
+ { traceId: 'sorted-test-3', spanId: 's3', name: 'SortedTestOp', startTime: [1700000009, 0] },
2742
+ { traceId: 'sorted-test-4', spanId: 's4', name: 'SortedTestOp', startTime: [1700000003, 0] },
2743
+ { traceId: 'sorted-test-5', spanId: 's5', name: 'SortedTestOp', startTime: [1700000007, 0] },
2744
+ ]);
2745
+ const backend = new MultiDirectoryBackend(projectDir);
2746
+ const results = await backend.queryTraces({ spanName: 'SortedTestOp' });
2747
+ // Should be sorted by timestamp descending (most recent first)
2748
+ assert.strictEqual(results.length, 5);
2749
+ assert.strictEqual(results[0].traceId, 'sorted-test-3'); // 1700000009
2750
+ assert.strictEqual(results[1].traceId, 'sorted-test-5'); // 1700000007
2751
+ assert.strictEqual(results[2].traceId, 'sorted-test-1'); // 1700000005
2752
+ assert.strictEqual(results[3].traceId, 'sorted-test-4'); // 1700000003
2753
+ assert.strictEqual(results[4].traceId, 'sorted-test-2'); // 1700000001
2754
+ }
2755
+ finally {
2756
+ removeTempDir(projectDir);
2757
+ }
2758
+ });
2759
+ it('should limit results to maxSize efficiently', async () => {
2760
+ const projectDir = createTempDir();
2761
+ try {
2762
+ const today = getTestDate();
2763
+ const localTelemetry = path.join(projectDir, 'telemetry');
2764
+ fs.mkdirSync(localTelemetry, { recursive: true });
2765
+ // Create 20 traces
2766
+ writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), Array.from({ length: 20 }, (_, i) => ({
2767
+ traceId: `bounded-test-${i}`,
2768
+ spanId: `s${i}`,
2769
+ name: 'BoundedTestOp',
2770
+ startTime: [1700000000 + i, 0],
2771
+ })));
2772
+ const backend = new MultiDirectoryBackend(projectDir);
2773
+ const results = await backend.queryTraces({ spanName: 'BoundedTestOp', limit: 5 });
2774
+ // Should return only 5 results
2775
+ assert.strictEqual(results.length, 5);
2776
+ // Results should be sorted by timestamp descending
2777
+ // LocalJsonlBackend returns first 5 found (0-4), then sorted
2778
+ assert.strictEqual(results[0].traceId, 'bounded-test-4'); // highest of 0-4
2779
+ assert.strictEqual(results[4].traceId, 'bounded-test-0'); // lowest of 0-4
2780
+ }
2781
+ finally {
2782
+ removeTempDir(projectDir);
2783
+ }
2784
+ });
2785
+ it('should maintain sorted order in logs', async () => {
2786
+ const projectDir = createTempDir();
2787
+ try {
2788
+ const today = getTestDate();
2789
+ const localTelemetry = path.join(projectDir, 'telemetry');
2790
+ fs.mkdirSync(localTelemetry, { recursive: true });
2791
+ writeJsonlFile(path.join(localTelemetry, `logs-${today}.jsonl`), [
2792
+ { timestamp: `${today}T10:00:00Z`, body: 'SortedLogTest_A' },
2793
+ { timestamp: `${today}T12:00:00Z`, body: 'SortedLogTest_B' },
2794
+ { timestamp: `${today}T08:00:00Z`, body: 'SortedLogTest_C' },
2795
+ { timestamp: `${today}T14:00:00Z`, body: 'SortedLogTest_D' },
2796
+ ]);
2797
+ const backend = new MultiDirectoryBackend(projectDir);
2798
+ const results = await backend.queryLogs({ search: 'SortedLogTest' });
2799
+ // Should be sorted by timestamp descending
2800
+ assert.strictEqual(results.length, 4);
2801
+ assert.strictEqual(results[0].body, 'SortedLogTest_D'); // 14:00
2802
+ assert.strictEqual(results[1].body, 'SortedLogTest_B'); // 12:00
2803
+ assert.strictEqual(results[2].body, 'SortedLogTest_A'); // 10:00
2804
+ assert.strictEqual(results[3].body, 'SortedLogTest_C'); // 08:00
2805
+ }
2806
+ finally {
2807
+ removeTempDir(projectDir);
2808
+ }
2809
+ });
2810
+ it('should maintain sorted order in LLM events', async () => {
2811
+ const projectDir = createTempDir();
2812
+ try {
2813
+ const today = getTestDate();
2814
+ const localTelemetry = path.join(projectDir, 'telemetry');
2815
+ fs.mkdirSync(localTelemetry, { recursive: true });
2816
+ writeJsonlFile(path.join(localTelemetry, `llm-events-${today}.jsonl`), [
2817
+ { timestamp: `${today}T09:00:00Z`, name: 'SortedLLMTest.A', attributes: {} },
2818
+ { timestamp: `${today}T15:00:00Z`, name: 'SortedLLMTest.B', attributes: {} },
2819
+ { timestamp: `${today}T11:00:00Z`, name: 'SortedLLMTest.C', attributes: {} },
2820
+ ]);
2821
+ const backend = new MultiDirectoryBackend(projectDir);
2822
+ const results = await backend.queryLLMEvents({ eventName: 'SortedLLMTest' });
2823
+ // Should be sorted by timestamp descending
2824
+ assert.strictEqual(results.length, 3);
2825
+ assert.strictEqual(results[0].name, 'SortedLLMTest.B'); // 15:00
2826
+ assert.strictEqual(results[1].name, 'SortedLLMTest.C'); // 11:00
2827
+ assert.strictEqual(results[2].name, 'SortedLLMTest.A'); // 09:00
2828
+ }
2829
+ finally {
2830
+ removeTempDir(projectDir);
2831
+ }
2832
+ });
2833
+ });
2834
+ describe('streaming JSONL optimization', () => {
2835
+ it('should handle large files with streaming', async () => {
2836
+ const tempDir = createTempDir();
2837
+ try {
2838
+ const today = getTestDate();
2839
+ // Create a file with many records
2840
+ const spans = Array.from({ length: 500 }, (_, i) => ({
2841
+ traceId: `stream-test-${i}`,
2842
+ spanId: `span-${i}`,
2843
+ name: 'StreamTestOp',
2844
+ startTime: [1700000000 + i, 0],
2845
+ }));
2846
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), spans);
2847
+ const backend = new LocalJsonlBackend(tempDir);
2848
+ const results = await backend.queryTraces({ spanName: 'StreamTestOp', limit: 50 });
2849
+ // Should return limited results without loading all into memory
2850
+ assert.strictEqual(results.length, 50);
2851
+ }
2852
+ finally {
2853
+ removeTempDir(tempDir);
2854
+ }
2855
+ });
2856
+ it('should terminate early when limit is reached', async () => {
2857
+ const tempDir = createTempDir();
2858
+ try {
2859
+ const today = getTestDate();
2860
+ // Create file with many records
2861
+ const logs = Array.from({ length: 1000 }, (_, i) => ({
2862
+ timestamp: new Date(Date.now() - i * 1000).toISOString(),
2863
+ body: `StreamingLog_${i}`,
2864
+ severity: 'INFO',
2865
+ }));
2866
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), logs);
2867
+ const backend = new LocalJsonlBackend(tempDir);
2868
+ const start = Date.now();
2869
+ const results = await backend.queryLogs({ search: 'StreamingLog', limit: 10 });
2870
+ const elapsed = Date.now() - start;
2871
+ // Should return quickly with early termination
2872
+ assert.strictEqual(results.length, 10);
2873
+ // Processing should be fast due to early termination
2874
+ assert.ok(elapsed < 1000, `Query took too long: ${elapsed}ms`);
2875
+ }
2876
+ finally {
2877
+ removeTempDir(tempDir);
2878
+ }
2879
+ });
2880
+ });
2881
+ describe('QueryCache', () => {
2882
+ let tempDir;
2883
+ let backend;
2884
+ beforeEach(() => {
2885
+ tempDir = createTempDir();
2886
+ backend = new LocalJsonlBackend(tempDir);
2887
+ });
2888
+ afterEach(() => {
2889
+ removeTempDir(tempDir);
2890
+ });
2891
+ describe('caching behavior', () => {
2892
+ it('should return cached results on second query with same options', async () => {
2893
+ const today = getTestDate();
2894
+ const mockSpans = [
2895
+ { traceId: 'cache-test-1', spanId: 'span1', name: 'test-op', startTime: [1700000000, 0] },
2896
+ ];
2897
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
2898
+ // First query - should read from file
2899
+ const result1 = await backend.queryTraces({ spanName: 'test-op' });
2900
+ assert.strictEqual(result1.length, 1);
2901
+ // Modify the file - but cache should return old result
2902
+ const newSpans = [
2903
+ { traceId: 'cache-test-1', spanId: 'span1', name: 'test-op', startTime: [1700000000, 0] },
2904
+ { traceId: 'cache-test-2', spanId: 'span2', name: 'test-op', startTime: [1700000001, 0] },
2905
+ ];
2906
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), newSpans);
2907
+ // Second query with same options - should return cached result
2908
+ const result2 = await backend.queryTraces({ spanName: 'test-op' });
2909
+ assert.strictEqual(result2.length, 1, 'Should return cached result, not new file contents');
2910
+ });
2911
+ it('should return fresh results when query options differ', async () => {
2912
+ const today = getTestDate();
2913
+ const mockSpans = [
2914
+ { traceId: 'cache-test-1', spanId: 'span1', name: 'alpha-op', startTime: [1700000000, 0] },
2915
+ { traceId: 'cache-test-2', spanId: 'span2', name: 'beta-op', startTime: [1700000000, 0] },
2916
+ ];
2917
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
2918
+ const result1 = await backend.queryTraces({ spanName: 'alpha' });
2919
+ assert.strictEqual(result1.length, 1);
2920
+ const result2 = await backend.queryTraces({ spanName: 'beta' });
2921
+ assert.strictEqual(result2.length, 1);
2922
+ assert.strictEqual(result2[0].name, 'beta-op');
2923
+ });
2924
+ it('should clear cache when clearCache is called', async () => {
2925
+ const today = getTestDate();
2926
+ const mockSpans = [
2927
+ { traceId: 'cache-test-1', spanId: 'span1', name: 'clear-test', startTime: [1700000000, 0] },
2928
+ ];
2929
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
2930
+ // First query
2931
+ const result1 = await backend.queryTraces({ spanName: 'clear-test' });
2932
+ assert.strictEqual(result1.length, 1);
2933
+ // Modify the file
2934
+ const newSpans = [
2935
+ { traceId: 'cache-test-1', spanId: 'span1', name: 'clear-test', startTime: [1700000000, 0] },
2936
+ { traceId: 'cache-test-2', spanId: 'span2', name: 'clear-test', startTime: [1700000001, 0] },
2937
+ ];
2938
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), newSpans);
2939
+ // Clear cache
2940
+ backend.clearCache();
2941
+ // Query again - should read fresh data
2942
+ const result2 = await backend.queryTraces({ spanName: 'clear-test' });
2943
+ assert.strictEqual(result2.length, 2, 'Should return fresh results after cache clear');
2944
+ });
2945
+ it('should cache logs query results', async () => {
2946
+ const today = getTestDate();
2947
+ const mockLogs = [
2948
+ { timestamp: `${today}T10:00:00Z`, body: 'Test log message', severity: 'INFO' },
2949
+ ];
2950
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
2951
+ const result1 = await backend.queryLogs({ search: 'Test' });
2952
+ assert.strictEqual(result1.length, 1);
2953
+ // Add another log to file
2954
+ const newLogs = [
2955
+ { timestamp: `${today}T10:00:00Z`, body: 'Test log message', severity: 'INFO' },
2956
+ { timestamp: `${today}T11:00:00Z`, body: 'Another Test log', severity: 'INFO' },
2957
+ ];
2958
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), newLogs);
2959
+ // Should return cached result
2960
+ const result2 = await backend.queryLogs({ search: 'Test' });
2961
+ assert.strictEqual(result2.length, 1, 'Should return cached logs result');
2962
+ });
2963
+ it('should cache metrics query results', async () => {
2964
+ const today = getTestDate();
2965
+ const mockMetrics = [
2966
+ { timestamp: `${today}T10:00:00Z`, name: 'test.metric', value: 100, type: 'gauge' },
2967
+ ];
2968
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
2969
+ const result1 = await backend.queryMetrics({ metricName: 'test.metric' });
2970
+ assert.strictEqual(result1.length, 1);
2971
+ // Add another metric to file
2972
+ const newMetrics = [
2973
+ { timestamp: `${today}T10:00:00Z`, name: 'test.metric', value: 100, type: 'gauge' },
2974
+ { timestamp: `${today}T11:00:00Z`, name: 'test.metric', value: 200, type: 'gauge' },
2975
+ ];
2976
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), newMetrics);
2977
+ // Should return cached result
2978
+ const result2 = await backend.queryMetrics({ metricName: 'test.metric' });
2979
+ assert.strictEqual(result2.length, 1, 'Should return cached metrics result');
2980
+ });
2981
+ it('should cache LLM events query results', async () => {
2982
+ const today = getTestDate();
2983
+ const mockEvents = [
2984
+ { timestamp: `${today}T10:00:00Z`, name: 'llm.call', attributes: { model: 'claude' } },
2985
+ ];
2986
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
2987
+ const result1 = await backend.queryLLMEvents({ eventName: 'llm.call' });
2988
+ assert.strictEqual(result1.length, 1);
2989
+ // Add another event to file
2990
+ const newEvents = [
2991
+ { timestamp: `${today}T10:00:00Z`, name: 'llm.call', attributes: { model: 'claude' } },
2992
+ { timestamp: `${today}T11:00:00Z`, name: 'llm.call', attributes: { model: 'gpt' } },
2993
+ ];
2994
+ writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), newEvents);
2995
+ // Should return cached result
2996
+ const result2 = await backend.queryLLMEvents({ eventName: 'llm.call' });
2997
+ assert.strictEqual(result2.length, 1, 'Should return cached LLM events result');
2998
+ });
2999
+ });
3000
+ describe('indexed queries', () => {
3001
+ it('should use index for trace queries when available', async () => {
3002
+ const today = getTestDate();
3003
+ const mockSpans = [
3004
+ { traceId: 'trace1', spanId: 'span1', name: 'user-create', startTime: [1700000000, 0], resource: { serviceName: 'service-a' } },
3005
+ { traceId: 'trace2', spanId: 'span2', name: 'db-query', startTime: [1700000001, 0], resource: { serviceName: 'service-b' } },
3006
+ { traceId: 'trace1', spanId: 'span3', name: 'user-update', startTime: [1700000002, 0], resource: { serviceName: 'service-a' } },
3007
+ ];
3008
+ const filePath = path.join(tempDir, `traces-${today}.jsonl`);
3009
+ writeJsonlFile(filePath, mockSpans);
3010
+ // Build index for the file
3011
+ await buildAndWriteIndex(filePath, 'traces');
3012
+ // Query should use the index
3013
+ const results = await backend.queryTraces({ traceId: 'trace1' });
3014
+ assert.strictEqual(results.length, 2);
3015
+ assert.ok(results.every(s => s.traceId === 'trace1'));
3016
+ });
3017
+ it('should fall back to full scan when no index exists', async () => {
3018
+ const today = getTestDate();
3019
+ const mockSpans = [
3020
+ { traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
3021
+ { traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
3022
+ ];
3023
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
3024
+ // No index built - should still work via full scan
3025
+ const results = await backend.queryTraces({ traceId: 'trace1' });
3026
+ assert.strictEqual(results.length, 1);
3027
+ assert.strictEqual(results[0].traceId, 'trace1');
3028
+ });
3029
+ it('should use index for log queries when available', async () => {
3030
+ const today = getTestDate();
3031
+ const mockLogs = [
3032
+ { timestamp: `${today}T10:00:00Z`, severityText: 'ERROR', body: 'Error occurred', traceId: 'trace1' },
3033
+ { timestamp: `${today}T10:01:00Z`, severityText: 'INFO', body: 'Info message', traceId: 'trace2' },
3034
+ { timestamp: `${today}T10:02:00Z`, severityText: 'ERROR', body: 'Another error', traceId: 'trace3' },
3035
+ ];
3036
+ const filePath = path.join(tempDir, `logs-${today}.jsonl`);
3037
+ writeJsonlFile(filePath, mockLogs);
3038
+ // Build index for the file
3039
+ await buildAndWriteIndex(filePath, 'logs');
3040
+ // Query should use the index
3041
+ const results = await backend.queryLogs({ severity: 'ERROR' });
3042
+ assert.strictEqual(results.length, 2);
3043
+ assert.ok(results.every(l => l.severity === 'ERROR'));
3044
+ });
3045
+ it('should use index for metric queries when available', async () => {
3046
+ const today = getTestDate();
3047
+ const mockMetrics = [
3048
+ { timestamp: `${today}T10:00:00Z`, name: 'http.request.duration', value: 100 },
3049
+ { timestamp: `${today}T10:01:00Z`, name: 'db.query.count', value: 50 },
3050
+ { timestamp: `${today}T10:02:00Z`, name: 'http.request.size', value: 1024 },
3051
+ ];
3052
+ const filePath = path.join(tempDir, `metrics-${today}.jsonl`);
3053
+ writeJsonlFile(filePath, mockMetrics);
3054
+ // Build index for the file
3055
+ await buildAndWriteIndex(filePath, 'metrics');
3056
+ // Query should use the index
3057
+ const results = await backend.queryMetrics({ metricName: 'http' });
3058
+ assert.strictEqual(results.length, 2);
3059
+ assert.ok(results.every(m => m.name.includes('http')));
3060
+ });
3061
+ it('should apply non-indexable filters after index lookup', async () => {
3062
+ const today = getTestDate();
3063
+ const mockSpans = [
3064
+ {
3065
+ traceId: 'trace1',
3066
+ spanId: 'span1',
3067
+ name: 'user-create',
3068
+ startTime: [1700000000, 0],
3069
+ endTime: [1700000000, 500000000], // 500ms duration
3070
+ },
3071
+ {
3072
+ traceId: 'trace1',
3073
+ spanId: 'span2',
3074
+ name: 'user-update',
3075
+ startTime: [1700000000, 0],
3076
+ endTime: [1700000002, 0], // 2s duration
3077
+ },
3078
+ ];
3079
+ const filePath = path.join(tempDir, `traces-${today}.jsonl`);
3080
+ writeJsonlFile(filePath, mockSpans);
3081
+ // Build index
3082
+ await buildAndWriteIndex(filePath, 'traces');
3083
+ // Query with both indexable (traceId) and non-indexable (minDurationMs) filters
3084
+ const results = await backend.queryTraces({ traceId: 'trace1', minDurationMs: 1000 });
3085
+ assert.strictEqual(results.length, 1);
3086
+ assert.strictEqual(results[0].name, 'user-update');
3087
+ });
3088
+ it('should work correctly when index is stale', async () => {
3089
+ const today = getTestDate();
3090
+ const filePath = path.join(tempDir, `traces-${today}.jsonl`);
3091
+ // Create initial file and index
3092
+ const initialSpans = [
3093
+ { traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
3094
+ ];
3095
+ writeJsonlFile(filePath, initialSpans);
3096
+ await buildAndWriteIndex(filePath, 'traces');
3097
+ // Modify the file to make index stale
3098
+ await new Promise(resolve => setTimeout(resolve, 10));
3099
+ const updatedSpans = [
3100
+ { traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
3101
+ { traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
3102
+ ];
3103
+ writeJsonlFile(filePath, updatedSpans);
3104
+ // Query should fall back to full scan and find both spans
3105
+ const results = await backend.queryTraces({});
3106
+ assert.strictEqual(results.length, 2);
3107
+ });
3108
+ it('should respect useIndexes=false constructor option', async () => {
3109
+ const today = getTestDate();
3110
+ const mockSpans = [
3111
+ { traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
3112
+ { traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
3113
+ ];
3114
+ const filePath = path.join(tempDir, `traces-${today}.jsonl`);
3115
+ writeJsonlFile(filePath, mockSpans);
3116
+ await buildAndWriteIndex(filePath, 'traces');
3117
+ // Create backend with indexes disabled
3118
+ const noIndexBackend = new LocalJsonlBackend(tempDir, false);
3119
+ // Should still work via full scan
3120
+ const results = await noIndexBackend.queryTraces({ traceId: 'trace1' });
3121
+ assert.strictEqual(results.length, 1);
3122
+ assert.strictEqual(results[0].traceId, 'trace1');
3123
+ });
3124
+ });
3125
+ });
3126
+ describe('MultiDirectoryBackend', () => {
3127
+ let projectDir;
3128
+ beforeEach(() => {
3129
+ // Create a project directory that will have local telemetry subdirectories
3130
+ projectDir = createTempDir();
3131
+ });
3132
+ afterEach(() => {
3133
+ removeTempDir(projectDir);
3134
+ });
3135
+ describe('constructor and getDirectories', () => {
3136
+ it('should return directories when local telemetry dirs exist', () => {
3137
+ // Create local telemetry directory
3138
+ const localTelemetry = path.join(projectDir, 'telemetry');
3139
+ fs.mkdirSync(localTelemetry, { recursive: true });
3140
+ const backend = new MultiDirectoryBackend(projectDir);
3141
+ const dirs = backend.getDirectories();
3142
+ assert.ok(Array.isArray(dirs));
3143
+ // Should include the local telemetry directory
3144
+ const localDir = dirs.find(d => d.source === 'local' && d.path === localTelemetry);
3145
+ assert.ok(localDir);
3146
+ });
3147
+ it('should have name property set to multi-directory', () => {
3148
+ const backend = new MultiDirectoryBackend(projectDir);
3149
+ assert.strictEqual(backend.name, 'multi-directory');
3150
+ });
3151
+ it('should detect .telemetry local directory', () => {
3152
+ const localTelemetry = path.join(projectDir, '.telemetry');
3153
+ fs.mkdirSync(localTelemetry, { recursive: true });
3154
+ const backend = new MultiDirectoryBackend(projectDir);
3155
+ const dirs = backend.getDirectories();
3156
+ const localDir = dirs.find(d => d.path === localTelemetry);
3157
+ assert.ok(localDir);
3158
+ assert.strictEqual(localDir?.source, 'local');
3159
+ });
3160
+ it('should detect .claude/telemetry local directory', () => {
3161
+ const localTelemetry = path.join(projectDir, '.claude', 'telemetry');
3162
+ fs.mkdirSync(localTelemetry, { recursive: true });
3163
+ const backend = new MultiDirectoryBackend(projectDir);
3164
+ const dirs = backend.getDirectories();
3165
+ const localDir = dirs.find(d => d.path === localTelemetry);
3166
+ assert.ok(localDir);
3167
+ assert.strictEqual(localDir?.source, 'local');
3168
+ });
3169
+ });
3170
+ describe('queryTraces', () => {
3171
+ it('should query traces from local telemetry directory', async () => {
3172
+ const today = getTestDate();
3173
+ const localTelemetry = path.join(projectDir, 'telemetry');
3174
+ fs.mkdirSync(localTelemetry, { recursive: true });
3175
+ // Create traces in local directory with unique IDs
3176
+ writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), [
3177
+ { traceId: 'multidir-unique-test-abc123-1', spanId: 'span1', name: 'MultiDirUniqueOp_XYZ', startTime: [1700000000, 0] },
3178
+ { traceId: 'multidir-unique-test-abc123-2', spanId: 'span2', name: 'MultiDirUniqueOp_XYZ', startTime: [1700000002, 0] },
3179
+ ]);
3180
+ const backend = new MultiDirectoryBackend(projectDir);
3181
+ // Use spanName filter to find our specific test traces
3182
+ const results = await backend.queryTraces({ spanName: 'MultiDirUniqueOp_XYZ' });
3183
+ assert.strictEqual(results.length, 2);
3184
+ assert.ok(results.some(t => t.traceId === 'multidir-unique-test-abc123-1'), 'Should find test trace 1');
3185
+ assert.ok(results.some(t => t.traceId === 'multidir-unique-test-abc123-2'), 'Should find test trace 2');
3186
+ });
3187
+ it('should merge and sort traces by timestamp', async () => {
3188
+ const today = getTestDate();
3189
+ // Create two local telemetry directories
3190
+ const localTelemetry1 = path.join(projectDir, 'telemetry');
3191
+ const localTelemetry2 = path.join(projectDir, '.telemetry');
3192
+ fs.mkdirSync(localTelemetry1, { recursive: true });
3193
+ fs.mkdirSync(localTelemetry2, { recursive: true });
3194
+ // Create traces with different timestamps and unique operation name
3195
+ writeJsonlFile(path.join(localTelemetry1, `traces-${today}.jsonl`), [
3196
+ { traceId: 'multidir-sort-old-xyz789', spanId: 'span1', name: 'MultiDirSortOp_ABC', startTime: [1700000000, 0] },
3197
+ ]);
3198
+ writeJsonlFile(path.join(localTelemetry2, `traces-${today}.jsonl`), [
3199
+ { traceId: 'multidir-sort-new-xyz789', spanId: 'span2', name: 'MultiDirSortOp_ABC', startTime: [1700000010, 0] },
3200
+ ]);
3201
+ const backend = new MultiDirectoryBackend(projectDir);
3202
+ const results = await backend.queryTraces({ spanName: 'MultiDirSortOp_ABC' });
3203
+ assert.strictEqual(results.length, 2);
3204
+ // Newer trace should be first (sorted by startTimeUnixNano descending)
3205
+ assert.strictEqual(results[0].traceId, 'multidir-sort-new-xyz789');
3206
+ assert.strictEqual(results[1].traceId, 'multidir-sort-old-xyz789');
3207
+ });
3208
+ it('should respect limit parameter', async () => {
3209
+ const today = getTestDate();
3210
+ const localTelemetry = path.join(projectDir, 'telemetry');
3211
+ fs.mkdirSync(localTelemetry, { recursive: true });
3212
+ // Create many traces with unique prefix
3213
+ writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), Array.from({ length: 150 }, (_, i) => ({
3214
+ traceId: `multidir-limit-trace-${i}`,
3215
+ spanId: `span${i}`,
3216
+ name: `MultiDirLimitOp_${i}`,
3217
+ startTime: [1700000000 + i, 0],
3218
+ })));
3219
+ const backend = new MultiDirectoryBackend(projectDir);
3220
+ const results = await backend.queryTraces({ spanName: 'MultiDirLimitOp', limit: 50 });
3221
+ assert.ok(results.length <= 50);
3222
+ });
3223
+ });
3224
+ describe('queryLogs', () => {
3225
+ it('should query logs from local telemetry directory', async () => {
3226
+ const today = getTestDate();
3227
+ const localTelemetry = path.join(projectDir, 'telemetry');
3228
+ fs.mkdirSync(localTelemetry, { recursive: true });
3229
+ writeJsonlFile(path.join(localTelemetry, `logs-${today}.jsonl`), [
3230
+ { timestamp: `${today}T10:00:00Z`, body: 'MultiDirUniqueTestLog_ABC123_1', severity: 'INFO' },
3231
+ { timestamp: `${today}T11:00:00Z`, body: 'MultiDirUniqueTestLog_ABC123_2', severity: 'ERROR' },
3232
+ ]);
3233
+ const backend = new MultiDirectoryBackend(projectDir);
3234
+ // Use search filter to find our specific test logs
3235
+ const results = await backend.queryLogs({ search: 'MultiDirUniqueTestLog_ABC123' });
3236
+ assert.strictEqual(results.length, 2);
3237
+ assert.ok(results.some(l => l.body === 'MultiDirUniqueTestLog_ABC123_1'), 'Should find test log 1');
3238
+ assert.ok(results.some(l => l.body === 'MultiDirUniqueTestLog_ABC123_2'), 'Should find test log 2');
3239
+ });
3240
+ it('should sort logs by timestamp descending', async () => {
3241
+ const today = getTestDate();
3242
+ const localTelemetry = path.join(projectDir, 'telemetry');
3243
+ fs.mkdirSync(localTelemetry, { recursive: true });
3244
+ writeJsonlFile(path.join(localTelemetry, `logs-${today}.jsonl`), [
3245
+ { timestamp: `${today}T08:00:00Z`, body: 'MultiDirSortTest_Early', severity: 'INFO' },
3246
+ { timestamp: `${today}T12:00:00Z`, body: 'MultiDirSortTest_Late', severity: 'INFO' },
3247
+ ]);
3248
+ const backend = new MultiDirectoryBackend(projectDir);
3249
+ const results = await backend.queryLogs({ search: 'MultiDirSortTest' });
3250
+ assert.strictEqual(results.length, 2);
3251
+ // Later log should come first (sorted descending)
3252
+ assert.strictEqual(results[0].body, 'MultiDirSortTest_Late');
3253
+ assert.strictEqual(results[1].body, 'MultiDirSortTest_Early');
3254
+ });
3255
+ });
3256
+ describe('queryMetrics', () => {
3257
+ it('should query metrics from local telemetry directory', async () => {
3258
+ const today = getTestDate();
3259
+ const localTelemetry = path.join(projectDir, 'telemetry');
3260
+ fs.mkdirSync(localTelemetry, { recursive: true });
3261
+ writeJsonlFile(path.join(localTelemetry, `metrics-${today}.jsonl`), [
3262
+ { timestamp: `${today}T10:00:00Z`, name: 'multidir.unique.xyz789.metric1', value: 100, type: 'gauge' },
3263
+ { timestamp: `${today}T11:00:00Z`, name: 'multidir.unique.xyz789.metric2', value: 200, type: 'gauge' },
3264
+ ]);
3265
+ const backend = new MultiDirectoryBackend(projectDir);
3266
+ // Use metricName filter to find our specific test metrics
3267
+ const results = await backend.queryMetrics({ metricName: 'multidir.unique.xyz789' });
3268
+ assert.strictEqual(results.length, 2);
3269
+ assert.ok(results.some(m => m.name === 'multidir.unique.xyz789.metric1'), 'Should find test metric 1');
3270
+ assert.ok(results.some(m => m.name === 'multidir.unique.xyz789.metric2'), 'Should find test metric 2');
3271
+ });
3272
+ it('should respect limit parameter', async () => {
3273
+ const today = getTestDate();
3274
+ const localTelemetry = path.join(projectDir, 'telemetry');
3275
+ fs.mkdirSync(localTelemetry, { recursive: true });
3276
+ writeJsonlFile(path.join(localTelemetry, `metrics-${today}.jsonl`), Array.from({ length: 150 }, (_, i) => ({
3277
+ timestamp: `${today}T10:${String(Math.floor(i / 60) % 60).padStart(2, '0')}:${String(i % 60).padStart(2, '0')}Z`,
3278
+ name: `multidir.limit.abc123.metric-${i}`,
3279
+ value: i * 10,
3280
+ type: 'gauge',
3281
+ })));
3282
+ const backend = new MultiDirectoryBackend(projectDir);
3283
+ const results = await backend.queryMetrics({ metricName: 'multidir.limit.abc123', limit: 50 });
3284
+ assert.ok(results.length <= 50);
3285
+ });
3286
+ });
3287
+ describe('queryLLMEvents', () => {
3288
+ it('should query LLM events from local telemetry directory', async () => {
3289
+ const today = getTestDate();
3290
+ const localTelemetry = path.join(projectDir, 'telemetry');
3291
+ fs.mkdirSync(localTelemetry, { recursive: true });
3292
+ writeJsonlFile(path.join(localTelemetry, `llm-events-${today}.jsonl`), [
3293
+ { timestamp: `${today}T10:00:00Z`, name: 'multidir.unique.xyz456.llm.event1', attributes: { model: 'claude' } },
3294
+ { timestamp: `${today}T11:00:00Z`, name: 'multidir.unique.xyz456.llm.event2', attributes: { model: 'gpt' } },
3295
+ ]);
3296
+ const backend = new MultiDirectoryBackend(projectDir);
3297
+ // Use eventName filter to find our specific test events
3298
+ const results = await backend.queryLLMEvents({ eventName: 'multidir.unique.xyz456' });
3299
+ assert.strictEqual(results.length, 2);
3300
+ assert.ok(results.some(e => e.name === 'multidir.unique.xyz456.llm.event1'), 'Should find test event 1');
3301
+ assert.ok(results.some(e => e.name === 'multidir.unique.xyz456.llm.event2'), 'Should find test event 2');
3302
+ });
3303
+ it('should sort LLM events by timestamp descending', async () => {
3304
+ const today = getTestDate();
3305
+ const localTelemetry = path.join(projectDir, 'telemetry');
3306
+ fs.mkdirSync(localTelemetry, { recursive: true });
3307
+ writeJsonlFile(path.join(localTelemetry, `llm-events-${today}.jsonl`), [
3308
+ { timestamp: `${today}T08:00:00Z`, name: 'multidir.sort.abc789.early', attributes: {} },
3309
+ { timestamp: `${today}T12:00:00Z`, name: 'multidir.sort.abc789.late', attributes: {} },
3310
+ ]);
3311
+ const backend = new MultiDirectoryBackend(projectDir);
3312
+ const results = await backend.queryLLMEvents({ eventName: 'multidir.sort.abc789' });
3313
+ assert.strictEqual(results.length, 2);
3314
+ // Later event should be first (sorted descending)
3315
+ assert.strictEqual(results[0].name, 'multidir.sort.abc789.late');
3316
+ assert.strictEqual(results[1].name, 'multidir.sort.abc789.early');
3317
+ });
3318
+ });
3319
+ describe('healthCheck', () => {
3320
+ it('should return error when no directories found', async () => {
3321
+ // Create a backend with cwd that has no telemetry directories
3322
+ // AND ensure global telemetry doesn't exist for this test
3323
+ const emptyProject = createTempDir();
3324
+ try {
3325
+ const backend = new MultiDirectoryBackend(emptyProject);
3326
+ // Only test error condition if we actually have no directories
3327
+ if (backend.getDirectories().length === 0) {
3328
+ const health = await backend.healthCheck();
3329
+ assert.strictEqual(health.status, 'error');
3330
+ assert.ok(health.message?.includes('No telemetry directories'));
3331
+ }
3332
+ }
3333
+ finally {
3334
+ removeTempDir(emptyProject);
3335
+ }
3336
+ });
3337
+ it('should return ok when local telemetry directory exists', async () => {
3338
+ const today = getTestDate();
3339
+ const localTelemetry = path.join(projectDir, 'telemetry');
3340
+ fs.mkdirSync(localTelemetry, { recursive: true });
3341
+ // Create some telemetry files
3342
+ writeJsonlFile(path.join(localTelemetry, `traces-${today}.jsonl`), []);
3343
+ const backend = new MultiDirectoryBackend(projectDir);
3344
+ const health = await backend.healthCheck();
3345
+ assert.strictEqual(health.status, 'ok');
3346
+ assert.ok(health.directories);
3347
+ assert.ok(health.directories.length > 0);
3348
+ });
3349
+ it('should include directory statuses in health response', async () => {
3350
+ const localTelemetry = path.join(projectDir, 'telemetry');
3351
+ fs.mkdirSync(localTelemetry, { recursive: true });
3352
+ const backend = new MultiDirectoryBackend(projectDir);
3353
+ const health = await backend.healthCheck();
3354
+ if (health.directories) {
3355
+ for (const dir of health.directories) {
3356
+ assert.ok(dir.path);
3357
+ assert.ok(dir.source);
3358
+ assert.ok(dir.status);
3359
+ }
3360
+ }
3361
+ });
3362
+ it('should report correct directory count in message', async () => {
3363
+ const localTelemetry = path.join(projectDir, 'telemetry');
3364
+ fs.mkdirSync(localTelemetry, { recursive: true });
3365
+ const backend = new MultiDirectoryBackend(projectDir);
3366
+ const health = await backend.healthCheck();
3367
+ assert.ok(health.message?.includes('telemetry director'));
3368
+ });
3369
+ });
3370
+ });
3371
+ /**
3372
+ * OTLP Export Tests
3373
+ */
3374
+ describe('OTLP Export', () => {
3375
+ let tempDir;
3376
+ let backend;
3377
+ beforeEach(() => {
3378
+ tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'otlp-export-test-'));
3379
+ backend = new LocalJsonlBackend(tempDir);
3380
+ });
3381
+ afterEach(() => {
3382
+ try {
3383
+ fs.rmSync(tempDir, { recursive: true, force: true });
3384
+ }
3385
+ catch {
3386
+ // Ignore cleanup errors
3387
+ }
3388
+ });
3389
+ describe('exportTracesOTLP', () => {
3390
+ it('should export traces in OTLP JSON format', async () => {
3391
+ const today = getTestDate();
3392
+ const mockSpans = [
3393
+ {
3394
+ traceId: 'trace1',
3395
+ spanId: 'span1',
3396
+ parentSpanId: undefined,
3397
+ name: 'root-span',
3398
+ kind: 1, // SERVER
3399
+ startTime: [1700000000, 0],
3400
+ endTime: [1700000001, 500000000],
3401
+ status: { code: 1, message: 'OK' },
3402
+ resource: { serviceName: 'test-service', serviceVersion: '1.0.0' },
3403
+ attributes: { 'http.method': 'GET', 'http.status_code': 200 },
3404
+ },
3405
+ {
3406
+ traceId: 'trace1',
3407
+ spanId: 'span2',
3408
+ parentSpanId: 'span1',
3409
+ name: 'db-query',
3410
+ kind: 2, // CLIENT
3411
+ startTime: [1700000000, 100000000],
3412
+ endTime: [1700000000, 500000000],
3413
+ status: { code: 1 },
3414
+ resource: { serviceName: 'test-service' },
3415
+ attributes: { 'db.system': 'postgresql' },
3416
+ },
3417
+ ];
3418
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
3419
+ const otlp = await backend.exportTracesOTLP({});
3420
+ assert.ok(otlp.resourceSpans);
3421
+ assert.strictEqual(otlp.resourceSpans.length, 1);
3422
+ const resourceSpan = otlp.resourceSpans[0];
3423
+ assert.ok(resourceSpan.resource);
3424
+ assert.ok(resourceSpan.resource.attributes.some((a) => a.key === 'service.name' && a.value.stringValue === 'test-service'));
3425
+ assert.ok(resourceSpan.scopeSpans);
3426
+ assert.strictEqual(resourceSpan.scopeSpans.length, 1);
3427
+ const scopeSpan = resourceSpan.scopeSpans[0];
3428
+ assert.strictEqual(scopeSpan.spans.length, 2);
3429
+ const rootSpan = scopeSpan.spans.find((s) => s.name === 'root-span');
3430
+ assert.ok(rootSpan);
3431
+ assert.strictEqual(rootSpan.traceId, 'trace1');
3432
+ assert.strictEqual(rootSpan.spanId, 'span1');
3433
+ assert.strictEqual(rootSpan.kind, 1); // SERVER
3434
+ assert.strictEqual(rootSpan.status?.code, 1);
3435
+ });
3436
+ it('should group spans by service name in OTLP export', async () => {
3437
+ const today = getTestDate();
3438
+ const mockSpans = [
3439
+ {
3440
+ traceId: 'trace1',
3441
+ spanId: 'span1',
3442
+ name: 'service-a-op',
3443
+ startTime: [1700000000, 0],
3444
+ resource: { serviceName: 'service-a' },
3445
+ },
3446
+ {
3447
+ traceId: 'trace1',
3448
+ spanId: 'span2',
3449
+ name: 'service-b-op',
3450
+ startTime: [1700000000, 0],
3451
+ resource: { serviceName: 'service-b' },
3452
+ },
3453
+ ];
3454
+ writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
3455
+ const otlp = await backend.exportTracesOTLP({});
3456
+ assert.strictEqual(otlp.resourceSpans.length, 2);
3457
+ const serviceNames = otlp.resourceSpans.map((rs) => rs.resource.attributes.find((a) => a.key === 'service.name')?.value.stringValue);
3458
+ assert.ok(serviceNames.includes('service-a'));
3459
+ assert.ok(serviceNames.includes('service-b'));
3460
+ });
3461
+ });
3462
+ describe('exportLogsOTLP', () => {
3463
+ it('should export logs in OTLP JSON format', async () => {
3464
+ const today = getTestDate();
3465
+ const mockLogs = [
3466
+ {
3467
+ timestamp: '2024-01-15T10:00:00.000Z',
3468
+ severityText: 'ERROR',
3469
+ body: 'Database connection failed',
3470
+ traceId: 'trace1',
3471
+ spanId: 'span1',
3472
+ resource: { serviceName: 'test-service' },
3473
+ attributes: { 'error.type': 'ConnectionError' },
3474
+ },
3475
+ {
3476
+ timestamp: '2024-01-15T10:00:01.000Z',
3477
+ severityText: 'INFO',
3478
+ body: 'Request processed',
3479
+ resource: { serviceName: 'test-service' },
3480
+ },
3481
+ ];
3482
+ writeJsonlFile(path.join(tempDir, `logs-${today}.jsonl`), mockLogs);
3483
+ const otlp = await backend.exportLogsOTLP({});
3484
+ assert.ok(otlp.resourceLogs);
3485
+ assert.strictEqual(otlp.resourceLogs.length, 1);
3486
+ const resourceLog = otlp.resourceLogs[0];
3487
+ assert.ok(resourceLog.resource);
3488
+ assert.ok(resourceLog.resource.attributes.some((a) => a.key === 'service.name' && a.value.stringValue === 'test-service'));
3489
+ assert.ok(resourceLog.scopeLogs);
3490
+ const scopeLog = resourceLog.scopeLogs[0];
3491
+ assert.strictEqual(scopeLog.logRecords.length, 2);
3492
+ const errorLog = scopeLog.logRecords.find((l) => l.severityText === 'ERROR');
3493
+ assert.ok(errorLog);
3494
+ assert.ok(errorLog.body?.stringValue?.includes('Database connection failed'));
3495
+ assert.strictEqual(errorLog.traceId, 'trace1');
3496
+ assert.strictEqual(errorLog.spanId, 'span1');
3497
+ assert.strictEqual(errorLog.severityNumber, 17); // ERROR severity number
3498
+ });
3499
+ });
3500
+ describe('exportMetricsOTLP', () => {
3501
+ it('should export gauge metrics in OTLP JSON format', async () => {
3502
+ const today = getTestDate();
3503
+ const mockMetrics = [
3504
+ {
3505
+ timestamp: '2024-01-15T10:00:00.000Z',
3506
+ name: 'cpu.utilization',
3507
+ value: 75.5,
3508
+ type: 'gauge',
3509
+ unit: 'percent',
3510
+ resource: { serviceName: 'test-service' },
3511
+ attributes: { 'host.name': 'server1' },
3512
+ },
3513
+ ];
3514
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
3515
+ const otlp = await backend.exportMetricsOTLP({});
3516
+ assert.ok(otlp.resourceMetrics);
3517
+ assert.strictEqual(otlp.resourceMetrics.length, 1);
3518
+ const resourceMetric = otlp.resourceMetrics[0];
3519
+ assert.ok(resourceMetric.scopeMetrics);
3520
+ const metric = resourceMetric.scopeMetrics[0].metrics[0];
3521
+ assert.strictEqual(metric.name, 'cpu.utilization');
3522
+ assert.strictEqual(metric.unit, 'percent');
3523
+ assert.ok(metric.gauge);
3524
+ assert.strictEqual(metric.gauge.dataPoints.length, 1);
3525
+ assert.strictEqual(metric.gauge.dataPoints[0].asDouble, 75.5);
3526
+ });
3527
+ it('should export counter metrics with aggregation temporality', async () => {
3528
+ const today = getTestDate();
3529
+ const mockMetrics = [
3530
+ {
3531
+ timestamp: '2024-01-15T10:00:00.000Z',
3532
+ name: 'http.requests',
3533
+ value: 100,
3534
+ type: 'counter',
3535
+ unit: 'requests',
3536
+ aggregationTemporality: 2, // CUMULATIVE
3537
+ resource: { serviceName: 'test-service' },
3538
+ },
3539
+ ];
3540
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
3541
+ const otlp = await backend.exportMetricsOTLP({});
3542
+ const metric = otlp.resourceMetrics[0].scopeMetrics[0].metrics[0];
3543
+ assert.ok(metric.sum);
3544
+ assert.strictEqual(metric.sum.aggregationTemporality, 2); // CUMULATIVE
3545
+ assert.strictEqual(metric.sum.isMonotonic, true);
3546
+ });
3547
+ it('should export histogram metrics in OTLP JSON format', async () => {
3548
+ const today = getTestDate();
3549
+ const mockMetrics = [
3550
+ {
3551
+ timestamp: '2024-01-15T10:00:00.000Z',
3552
+ name: 'http.request.duration',
3553
+ value: 250,
3554
+ type: 'histogram',
3555
+ unit: 'ms',
3556
+ histogram: {
3557
+ buckets: [
3558
+ { le: 10, count: 5 },
3559
+ { le: 50, count: 15 },
3560
+ { le: 100, count: 25 },
3561
+ { le: 500, count: 45 },
3562
+ { le: Infinity, count: 50 },
3563
+ ],
3564
+ sum: 12500,
3565
+ count: 50,
3566
+ },
3567
+ aggregationTemporality: 2,
3568
+ resource: { serviceName: 'test-service' },
3569
+ },
3570
+ ];
3571
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
3572
+ const otlp = await backend.exportMetricsOTLP({});
3573
+ const metric = otlp.resourceMetrics[0].scopeMetrics[0].metrics[0];
3574
+ assert.strictEqual(metric.name, 'http.request.duration');
3575
+ assert.ok(metric.histogram);
3576
+ assert.strictEqual(metric.histogram.aggregationTemporality, 2);
3577
+ const dataPoint = metric.histogram.dataPoints[0];
3578
+ assert.strictEqual(dataPoint.count, '50');
3579
+ assert.strictEqual(dataPoint.sum, 12500);
3580
+ assert.strictEqual(dataPoint.bucketCounts.length, 5);
3581
+ });
3582
+ it('should include exemplars in OTLP metric export', async () => {
3583
+ const today = getTestDate();
3584
+ const mockMetrics = [
3585
+ {
3586
+ timestamp: '2024-01-15T10:00:00.000Z',
3587
+ name: 'http.latency',
3588
+ value: 150,
3589
+ type: 'gauge',
3590
+ unit: 'ms',
3591
+ exemplars: [
3592
+ {
3593
+ timestamp: '2024-01-15T10:00:00.000Z',
3594
+ value: 150,
3595
+ traceId: 'trace123',
3596
+ spanId: 'span456',
3597
+ },
3598
+ ],
3599
+ resource: { serviceName: 'test-service' },
3600
+ },
3601
+ ];
3602
+ writeJsonlFile(path.join(tempDir, `metrics-${today}.jsonl`), mockMetrics);
3603
+ const otlp = await backend.exportMetricsOTLP({});
3604
+ const dataPoint = otlp.resourceMetrics[0].scopeMetrics[0].metrics[0].gauge?.dataPoints[0];
3605
+ assert.ok(dataPoint?.exemplars);
3606
+ assert.strictEqual(dataPoint.exemplars.length, 1);
3607
+ assert.strictEqual(dataPoint.exemplars[0].traceId, 'trace123');
3608
+ assert.strictEqual(dataPoint.exemplars[0].spanId, 'span456');
3609
+ assert.strictEqual(dataPoint.exemplars[0].asDouble, 150);
555
3610
  });
556
3611
  });
557
3612
  });