observability-toolkit 1.8.2 → 1.8.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -0
- package/dist/backends/index.d.ts +43 -0
- package/dist/backends/index.d.ts.map +1 -1
- package/dist/backends/index.js +41 -0
- package/dist/backends/index.js.map +1 -1
- package/dist/backends/index.test.d.ts +5 -0
- package/dist/backends/index.test.d.ts.map +1 -0
- package/dist/backends/index.test.js +156 -0
- package/dist/backends/index.test.js.map +1 -0
- package/dist/backends/local-jsonl-boolean-search.test.js +8 -5
- package/dist/backends/local-jsonl-boolean-search.test.js.map +1 -1
- package/dist/backends/local-jsonl-logs.test.d.ts +2 -0
- package/dist/backends/local-jsonl-logs.test.d.ts.map +1 -0
- package/dist/backends/local-jsonl-logs.test.js +603 -0
- package/dist/backends/local-jsonl-logs.test.js.map +1 -0
- package/dist/backends/local-jsonl-traces.test.d.ts +2 -0
- package/dist/backends/local-jsonl-traces.test.d.ts.map +1 -0
- package/dist/backends/local-jsonl-traces.test.js +1723 -0
- package/dist/backends/local-jsonl-traces.test.js.map +1 -0
- package/dist/backends/local-jsonl.test.js +290 -21
- package/dist/backends/local-jsonl.test.js.map +1 -1
- package/dist/lib/constants.d.ts +43 -0
- package/dist/lib/constants.d.ts.map +1 -1
- package/dist/lib/constants.js +154 -24
- package/dist/lib/constants.js.map +1 -1
- package/dist/lib/constants.test.js +156 -7
- package/dist/lib/constants.test.js.map +1 -1
- package/dist/lib/edge-cases.test.d.ts +11 -0
- package/dist/lib/edge-cases.test.d.ts.map +1 -0
- package/dist/lib/edge-cases.test.js +634 -0
- package/dist/lib/edge-cases.test.js.map +1 -0
- package/dist/lib/error-sanitizer.d.ts.map +1 -1
- package/dist/lib/error-sanitizer.js +33 -23
- package/dist/lib/error-sanitizer.js.map +1 -1
- package/dist/lib/error-sanitizer.test.js +27 -0
- package/dist/lib/error-sanitizer.test.js.map +1 -1
- package/dist/lib/file-utils.test.js +3 -3
- package/dist/lib/file-utils.test.js.map +1 -1
- package/dist/lib/indexer.test.js +138 -5
- package/dist/lib/indexer.test.js.map +1 -1
- package/dist/lib/input-validator.d.ts +5 -0
- package/dist/lib/input-validator.d.ts.map +1 -1
- package/dist/lib/input-validator.js +5 -0
- package/dist/lib/input-validator.js.map +1 -1
- package/dist/lib/query-sanitizer.d.ts +51 -3
- package/dist/lib/query-sanitizer.d.ts.map +1 -1
- package/dist/lib/query-sanitizer.js +105 -31
- package/dist/lib/query-sanitizer.js.map +1 -1
- package/dist/lib/query-sanitizer.test.js +102 -1
- package/dist/lib/query-sanitizer.test.js.map +1 -1
- package/dist/lib/server-utils.d.ts +80 -0
- package/dist/lib/server-utils.d.ts.map +1 -0
- package/dist/lib/server-utils.js +141 -0
- package/dist/lib/server-utils.js.map +1 -0
- package/dist/lib/shared-schemas.d.ts +59 -0
- package/dist/lib/shared-schemas.d.ts.map +1 -0
- package/dist/lib/shared-schemas.js +58 -0
- package/dist/lib/shared-schemas.js.map +1 -0
- package/dist/lib/shared-schemas.test.d.ts +5 -0
- package/dist/lib/shared-schemas.test.d.ts.map +1 -0
- package/dist/lib/shared-schemas.test.js +106 -0
- package/dist/lib/shared-schemas.test.js.map +1 -0
- package/dist/lib/toon-encoder.d.ts +21 -0
- package/dist/lib/toon-encoder.d.ts.map +1 -0
- package/dist/lib/toon-encoder.js +46 -0
- package/dist/lib/toon-encoder.js.map +1 -0
- package/dist/server.d.ts +1 -49
- package/dist/server.d.ts.map +1 -1
- package/dist/server.js +152 -162
- package/dist/server.js.map +1 -1
- package/dist/server.test.js +168 -7
- package/dist/server.test.js.map +1 -1
- package/dist/test-helpers/env-utils.d.ts +65 -0
- package/dist/test-helpers/env-utils.d.ts.map +1 -0
- package/dist/test-helpers/env-utils.js +94 -0
- package/dist/test-helpers/env-utils.js.map +1 -0
- package/dist/test-helpers/file-utils.d.ts +67 -0
- package/dist/test-helpers/file-utils.d.ts.map +1 -1
- package/dist/test-helpers/file-utils.js +165 -2
- package/dist/test-helpers/file-utils.js.map +1 -1
- package/dist/test-helpers/index.d.ts +10 -0
- package/dist/test-helpers/index.d.ts.map +1 -0
- package/dist/test-helpers/index.js +28 -0
- package/dist/test-helpers/index.js.map +1 -0
- package/dist/test-helpers/mock-backends.d.ts +113 -2
- package/dist/test-helpers/mock-backends.d.ts.map +1 -1
- package/dist/test-helpers/mock-backends.js +199 -3
- package/dist/test-helpers/mock-backends.js.map +1 -1
- package/dist/test-helpers/mock-backends.test.d.ts +5 -0
- package/dist/test-helpers/mock-backends.test.d.ts.map +1 -0
- package/dist/test-helpers/mock-backends.test.js +368 -0
- package/dist/test-helpers/mock-backends.test.js.map +1 -0
- package/dist/test-helpers/schema-validators.d.ts +32 -0
- package/dist/test-helpers/schema-validators.d.ts.map +1 -0
- package/dist/test-helpers/schema-validators.js +125 -0
- package/dist/test-helpers/schema-validators.js.map +1 -0
- package/dist/test-helpers/test-data-builders.d.ts +223 -0
- package/dist/test-helpers/test-data-builders.d.ts.map +1 -0
- package/dist/test-helpers/test-data-builders.js +288 -0
- package/dist/test-helpers/test-data-builders.js.map +1 -0
- package/dist/test-helpers/test-data-builders.test.d.ts +2 -0
- package/dist/test-helpers/test-data-builders.test.d.ts.map +1 -0
- package/dist/test-helpers/test-data-builders.test.js +306 -0
- package/dist/test-helpers/test-data-builders.test.js.map +1 -0
- package/dist/test-helpers/tool-validators.d.ts +28 -0
- package/dist/test-helpers/tool-validators.d.ts.map +1 -0
- package/dist/test-helpers/tool-validators.js +56 -0
- package/dist/test-helpers/tool-validators.js.map +1 -0
- package/dist/tools/context-stats.d.ts +1 -0
- package/dist/tools/context-stats.d.ts.map +1 -1
- package/dist/tools/context-stats.js +9 -5
- package/dist/tools/context-stats.js.map +1 -1
- package/dist/tools/context-stats.test.js +24 -10
- package/dist/tools/context-stats.test.js.map +1 -1
- package/dist/tools/get-trace-url.js +2 -2
- package/dist/tools/get-trace-url.js.map +1 -1
- package/dist/tools/health-check.js +2 -2
- package/dist/tools/health-check.js.map +1 -1
- package/dist/tools/query-evaluations.d.ts +21 -18
- package/dist/tools/query-evaluations.d.ts.map +1 -1
- package/dist/tools/query-evaluations.js +17 -17
- package/dist/tools/query-evaluations.js.map +1 -1
- package/dist/tools/query-evaluations.test.js +7 -17
- package/dist/tools/query-evaluations.test.js.map +1 -1
- package/dist/tools/query-llm-events.d.ts +19 -15
- package/dist/tools/query-llm-events.d.ts.map +1 -1
- package/dist/tools/query-llm-events.js +31 -15
- package/dist/tools/query-llm-events.js.map +1 -1
- package/dist/tools/query-llm-events.test.js +271 -9
- package/dist/tools/query-llm-events.test.js.map +1 -1
- package/dist/tools/query-logs.d.ts +22 -22
- package/dist/tools/query-logs.d.ts.map +1 -1
- package/dist/tools/query-logs.js +9 -9
- package/dist/tools/query-logs.js.map +1 -1
- package/dist/tools/query-logs.test.js +19 -72
- package/dist/tools/query-logs.test.js.map +1 -1
- package/dist/tools/query-metrics.d.ts +14 -14
- package/dist/tools/query-metrics.d.ts.map +1 -1
- package/dist/tools/query-metrics.js +9 -9
- package/dist/tools/query-metrics.js.map +1 -1
- package/dist/tools/query-metrics.test.js +12 -25
- package/dist/tools/query-metrics.test.js.map +1 -1
- package/dist/tools/query-traces.d.ts +28 -28
- package/dist/tools/query-traces.d.ts.map +1 -1
- package/dist/tools/query-traces.js +18 -18
- package/dist/tools/query-traces.js.map +1 -1
- package/dist/tools/query-traces.test.js +58 -54
- package/dist/tools/query-traces.test.js.map +1 -1
- package/dist/tools/setup-claudeignore.js +7 -7
- package/dist/tools/setup-claudeignore.js.map +1 -1
- package/dist/tools/setup-claudeignore.test.js +4 -25
- package/dist/tools/setup-claudeignore.test.js.map +1 -1
- package/package.json +3 -2
|
@@ -0,0 +1,1723 @@
|
|
|
1
|
+
import { describe, it, before, after, beforeEach } from 'node:test';
|
|
2
|
+
import * as assert from 'node:assert';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
import { LocalJsonlBackend } from './local-jsonl.js';
|
|
5
|
+
import { getSharedTempDir, clearTempDir, removeSharedTempDir, writeJsonlFile, getTestDate } from '../test-helpers/file-utils.js';
|
|
6
|
+
describe('LocalJsonlBackend', () => {
|
|
7
|
+
let tempDir;
|
|
8
|
+
let backend;
|
|
9
|
+
before(() => {
|
|
10
|
+
tempDir = getSharedTempDir('LocalJsonlBackend-Traces');
|
|
11
|
+
});
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
clearTempDir(tempDir);
|
|
14
|
+
backend = new LocalJsonlBackend(tempDir);
|
|
15
|
+
});
|
|
16
|
+
after(() => {
|
|
17
|
+
removeSharedTempDir('LocalJsonlBackend-Traces');
|
|
18
|
+
});
|
|
19
|
+
describe('queryTraces', () => {
|
|
20
|
+
it('should read and normalize trace spans from JSONL files', async () => {
|
|
21
|
+
const today = getTestDate();
|
|
22
|
+
const mockSpans = [
|
|
23
|
+
{
|
|
24
|
+
traceId: 'trace1',
|
|
25
|
+
spanId: 'span1',
|
|
26
|
+
name: 'test-operation',
|
|
27
|
+
startTime: [1700000000, 0],
|
|
28
|
+
endTime: [1700000001, 500000000],
|
|
29
|
+
resource: { serviceName: 'test-service', serviceVersion: '1.0.0' },
|
|
30
|
+
attributes: { 'custom.attr': 'value1' },
|
|
31
|
+
},
|
|
32
|
+
];
|
|
33
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
34
|
+
const results = await backend.queryTraces({});
|
|
35
|
+
assert.strictEqual(results.length, 1);
|
|
36
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
37
|
+
assert.strictEqual(results[0].spanId, 'span1');
|
|
38
|
+
assert.strictEqual(results[0].name, 'test-operation');
|
|
39
|
+
assert.strictEqual(results[0].attributes?.['service.name'], 'test-service');
|
|
40
|
+
assert.strictEqual(results[0].attributes?.['service.version'], '1.0.0');
|
|
41
|
+
assert.strictEqual(results[0].attributes?.['custom.attr'], 'value1');
|
|
42
|
+
});
|
|
43
|
+
it('should filter spans by traceId', async () => {
|
|
44
|
+
const today = getTestDate();
|
|
45
|
+
const mockSpans = [
|
|
46
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
47
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000000, 0] },
|
|
48
|
+
{ traceId: 'trace1', spanId: 'span3', name: 'op3', startTime: [1700000000, 0] },
|
|
49
|
+
];
|
|
50
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
51
|
+
const results = await backend.queryTraces({ traceId: 'trace1' });
|
|
52
|
+
assert.strictEqual(results.length, 2);
|
|
53
|
+
assert.ok(results.every(s => s.traceId === 'trace1'));
|
|
54
|
+
});
|
|
55
|
+
it('should filter spans by spanName substring', async () => {
|
|
56
|
+
const today = getTestDate();
|
|
57
|
+
const mockSpans = [
|
|
58
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'user-create', startTime: [1700000000, 0] },
|
|
59
|
+
{ traceId: 'trace1', spanId: 'span2', name: 'user-update', startTime: [1700000000, 0] },
|
|
60
|
+
{ traceId: 'trace1', spanId: 'span3', name: 'db-query', startTime: [1700000000, 0] },
|
|
61
|
+
];
|
|
62
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
63
|
+
const results = await backend.queryTraces({ spanName: 'user' });
|
|
64
|
+
assert.strictEqual(results.length, 2);
|
|
65
|
+
assert.ok(results.every(s => s.name.includes('user')));
|
|
66
|
+
});
|
|
67
|
+
it('should filter spans by duration range', async () => {
|
|
68
|
+
const today = getTestDate();
|
|
69
|
+
const mockSpans = [
|
|
70
|
+
{
|
|
71
|
+
traceId: 'trace1',
|
|
72
|
+
spanId: 'span1',
|
|
73
|
+
name: 'fast-op',
|
|
74
|
+
startTime: [1700000000, 0],
|
|
75
|
+
endTime: [1700000000, 500000000], // 0.5s
|
|
76
|
+
},
|
|
77
|
+
{
|
|
78
|
+
traceId: 'trace1',
|
|
79
|
+
spanId: 'span2',
|
|
80
|
+
name: 'medium-op',
|
|
81
|
+
startTime: [1700000000, 0],
|
|
82
|
+
endTime: [1700000002, 0], // 2s
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
traceId: 'trace1',
|
|
86
|
+
spanId: 'span3',
|
|
87
|
+
name: 'slow-op',
|
|
88
|
+
startTime: [1700000000, 0],
|
|
89
|
+
endTime: [1700000010, 0], // 10s
|
|
90
|
+
},
|
|
91
|
+
];
|
|
92
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
93
|
+
const results = await backend.queryTraces({ minDurationMs: 1000, maxDurationMs: 5000 });
|
|
94
|
+
assert.strictEqual(results.length, 1);
|
|
95
|
+
assert.strictEqual(results[0].name, 'medium-op');
|
|
96
|
+
});
|
|
97
|
+
it('should filter spans by serviceName', async () => {
|
|
98
|
+
const today = getTestDate();
|
|
99
|
+
const mockSpans = [
|
|
100
|
+
{
|
|
101
|
+
traceId: 'trace1',
|
|
102
|
+
spanId: 'span1',
|
|
103
|
+
name: 'op1',
|
|
104
|
+
startTime: [1700000000, 0],
|
|
105
|
+
resource: { serviceName: 'service-a' },
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
traceId: 'trace1',
|
|
109
|
+
spanId: 'span2',
|
|
110
|
+
name: 'op2',
|
|
111
|
+
startTime: [1700000000, 0],
|
|
112
|
+
resource: { serviceName: 'service-b' },
|
|
113
|
+
},
|
|
114
|
+
];
|
|
115
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
116
|
+
const results = await backend.queryTraces({ serviceName: 'service-a' });
|
|
117
|
+
assert.strictEqual(results.length, 1);
|
|
118
|
+
assert.strictEqual(results[0].attributes?.['service.name'], 'service-a');
|
|
119
|
+
});
|
|
120
|
+
it('should apply limit and offset to results', async () => {
|
|
121
|
+
const today = getTestDate();
|
|
122
|
+
const mockSpans = Array.from({ length: 150 }, (_, i) => ({
|
|
123
|
+
traceId: `trace${i}`,
|
|
124
|
+
spanId: `span${i}`,
|
|
125
|
+
name: `op${i}`,
|
|
126
|
+
startTime: [1700000000, 0],
|
|
127
|
+
}));
|
|
128
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
129
|
+
const results = await backend.queryTraces({ limit: 50, offset: 25 });
|
|
130
|
+
assert.strictEqual(results.length, 50);
|
|
131
|
+
assert.strictEqual(results[0].traceId, 'trace25');
|
|
132
|
+
});
|
|
133
|
+
it('should skip invalid spans (missing required fields)', async () => {
|
|
134
|
+
const today = getTestDate();
|
|
135
|
+
const mockSpans = [
|
|
136
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
137
|
+
{ traceId: 'trace2', spanId: 'span2', startTime: [1700000000, 0] }, // missing name
|
|
138
|
+
{ spanId: 'span3', name: 'op3', startTime: [1700000000, 0] }, // missing traceId
|
|
139
|
+
{ traceId: 'trace4', name: 'op4', startTime: [1700000000, 0] }, // missing spanId
|
|
140
|
+
];
|
|
141
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
142
|
+
const results = await backend.queryTraces({});
|
|
143
|
+
assert.strictEqual(results.length, 1);
|
|
144
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
145
|
+
});
|
|
146
|
+
it('should convert duration from [seconds, nanoseconds] array', async () => {
|
|
147
|
+
const today = getTestDate();
|
|
148
|
+
const mockSpans = [
|
|
149
|
+
{
|
|
150
|
+
traceId: 'trace1',
|
|
151
|
+
spanId: 'span1',
|
|
152
|
+
name: 'op1',
|
|
153
|
+
startTime: [1700000000, 0],
|
|
154
|
+
duration: [2, 500000000], // 2.5 seconds
|
|
155
|
+
},
|
|
156
|
+
];
|
|
157
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
158
|
+
const results = await backend.queryTraces({});
|
|
159
|
+
assert.strictEqual(results.length, 1);
|
|
160
|
+
assert.strictEqual(results[0].durationMs, 2500);
|
|
161
|
+
});
|
|
162
|
+
it('should convert span kind number to string', async () => {
|
|
163
|
+
const today = getTestDate();
|
|
164
|
+
const mockSpans = [
|
|
165
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'op1', kind: 0, startTime: [1700000000, 0] },
|
|
166
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'op2', kind: 1, startTime: [1700000000, 0] },
|
|
167
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'op3', kind: 2, startTime: [1700000000, 0] },
|
|
168
|
+
];
|
|
169
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
170
|
+
const results = await backend.queryTraces({});
|
|
171
|
+
assert.strictEqual(results[0].kind, 'INTERNAL');
|
|
172
|
+
assert.strictEqual(results[1].kind, 'SERVER');
|
|
173
|
+
assert.strictEqual(results[2].kind, 'CLIENT');
|
|
174
|
+
});
|
|
175
|
+
it('should convert status code number to string', async () => {
|
|
176
|
+
const today = getTestDate();
|
|
177
|
+
const mockSpans = [
|
|
178
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0], status: { code: 0 } },
|
|
179
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000000, 0], status: { code: 1 } },
|
|
180
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'op3', startTime: [1700000000, 0], status: { code: 2, message: 'Test error' } },
|
|
181
|
+
];
|
|
182
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
183
|
+
const results = await backend.queryTraces({});
|
|
184
|
+
assert.strictEqual(results[0].statusCode, 'UNSET');
|
|
185
|
+
assert.strictEqual(results[0].status?.code, 0);
|
|
186
|
+
assert.strictEqual(results[1].statusCode, 'OK');
|
|
187
|
+
assert.strictEqual(results[1].status?.code, 1);
|
|
188
|
+
assert.strictEqual(results[2].statusCode, 'ERROR');
|
|
189
|
+
assert.strictEqual(results[2].status?.code, 2);
|
|
190
|
+
assert.strictEqual(results[2].status?.message, 'Test error');
|
|
191
|
+
});
|
|
192
|
+
it('should handle spans without status', async () => {
|
|
193
|
+
const today = getTestDate();
|
|
194
|
+
const mockSpans = [
|
|
195
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
196
|
+
];
|
|
197
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
198
|
+
const results = await backend.queryTraces({});
|
|
199
|
+
assert.strictEqual(results[0].statusCode, undefined);
|
|
200
|
+
assert.strictEqual(results[0].status, undefined);
|
|
201
|
+
});
|
|
202
|
+
it('should extract instrumentationScope from spans', async () => {
|
|
203
|
+
const today = getTestDate();
|
|
204
|
+
const mockSpans = [
|
|
205
|
+
{
|
|
206
|
+
traceId: 'trace1',
|
|
207
|
+
spanId: 'span1',
|
|
208
|
+
name: 'http-request',
|
|
209
|
+
startTime: [1700000000, 0],
|
|
210
|
+
instrumentationScope: {
|
|
211
|
+
name: '@opentelemetry/instrumentation-http',
|
|
212
|
+
version: '0.48.0',
|
|
213
|
+
schemaUrl: 'https://opentelemetry.io/schemas/1.21.0',
|
|
214
|
+
},
|
|
215
|
+
},
|
|
216
|
+
{
|
|
217
|
+
traceId: 'trace2',
|
|
218
|
+
spanId: 'span2',
|
|
219
|
+
name: 'custom-span',
|
|
220
|
+
startTime: [1700000000, 0],
|
|
221
|
+
instrumentationScope: {
|
|
222
|
+
name: 'custom-hooks',
|
|
223
|
+
},
|
|
224
|
+
},
|
|
225
|
+
{
|
|
226
|
+
traceId: 'trace3',
|
|
227
|
+
spanId: 'span3',
|
|
228
|
+
name: 'no-scope',
|
|
229
|
+
startTime: [1700000000, 0],
|
|
230
|
+
},
|
|
231
|
+
];
|
|
232
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
233
|
+
const results = await backend.queryTraces({});
|
|
234
|
+
assert.strictEqual(results.length, 3);
|
|
235
|
+
// First span: full scope
|
|
236
|
+
assert.strictEqual(results[0].instrumentationScope?.name, '@opentelemetry/instrumentation-http');
|
|
237
|
+
assert.strictEqual(results[0].instrumentationScope?.version, '0.48.0');
|
|
238
|
+
assert.strictEqual(results[0].instrumentationScope?.schemaUrl, 'https://opentelemetry.io/schemas/1.21.0');
|
|
239
|
+
// Second span: name only
|
|
240
|
+
assert.strictEqual(results[1].instrumentationScope?.name, 'custom-hooks');
|
|
241
|
+
assert.strictEqual(results[1].instrumentationScope?.version, undefined);
|
|
242
|
+
// Third span: no scope
|
|
243
|
+
assert.strictEqual(results[2].instrumentationScope, undefined);
|
|
244
|
+
});
|
|
245
|
+
it('should extract span links from spans', async () => {
|
|
246
|
+
const today = getTestDate();
|
|
247
|
+
const mockSpans = [
|
|
248
|
+
{
|
|
249
|
+
traceId: 'trace1',
|
|
250
|
+
spanId: 'span1',
|
|
251
|
+
name: 'batch-processor',
|
|
252
|
+
startTime: [1700000000, 0],
|
|
253
|
+
links: [
|
|
254
|
+
{
|
|
255
|
+
context: { traceId: 'trace-upstream-1', spanId: 'span-upstream-1' },
|
|
256
|
+
attributes: { 'link.type': 'producer' },
|
|
257
|
+
},
|
|
258
|
+
{
|
|
259
|
+
context: { traceId: 'trace-upstream-2', spanId: 'span-upstream-2' },
|
|
260
|
+
},
|
|
261
|
+
],
|
|
262
|
+
},
|
|
263
|
+
{
|
|
264
|
+
traceId: 'trace2',
|
|
265
|
+
spanId: 'span2',
|
|
266
|
+
name: 'single-link',
|
|
267
|
+
startTime: [1700000000, 0],
|
|
268
|
+
links: [
|
|
269
|
+
{
|
|
270
|
+
context: { traceId: 'trace-parent', spanId: 'span-parent' },
|
|
271
|
+
attributes: { 'link.reason': 'causal' },
|
|
272
|
+
},
|
|
273
|
+
],
|
|
274
|
+
},
|
|
275
|
+
{
|
|
276
|
+
traceId: 'trace3',
|
|
277
|
+
spanId: 'span3',
|
|
278
|
+
name: 'no-links',
|
|
279
|
+
startTime: [1700000000, 0],
|
|
280
|
+
},
|
|
281
|
+
];
|
|
282
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
283
|
+
const results = await backend.queryTraces({});
|
|
284
|
+
assert.strictEqual(results.length, 3);
|
|
285
|
+
// First span: multiple links
|
|
286
|
+
assert.strictEqual(results[0].links?.length, 2);
|
|
287
|
+
assert.strictEqual(results[0].links?.[0].traceId, 'trace-upstream-1');
|
|
288
|
+
assert.strictEqual(results[0].links?.[0].spanId, 'span-upstream-1');
|
|
289
|
+
assert.strictEqual(results[0].links?.[0].attributes?.['link.type'], 'producer');
|
|
290
|
+
assert.strictEqual(results[0].links?.[1].traceId, 'trace-upstream-2');
|
|
291
|
+
assert.strictEqual(results[0].links?.[1].spanId, 'span-upstream-2');
|
|
292
|
+
assert.strictEqual(results[0].links?.[1].attributes, undefined);
|
|
293
|
+
// Second span: single link with attributes
|
|
294
|
+
assert.strictEqual(results[1].links?.length, 1);
|
|
295
|
+
assert.strictEqual(results[1].links?.[0].traceId, 'trace-parent');
|
|
296
|
+
assert.strictEqual(results[1].links?.[0].attributes?.['link.reason'], 'causal');
|
|
297
|
+
// Third span: no links
|
|
298
|
+
assert.strictEqual(results[2].links, undefined);
|
|
299
|
+
});
|
|
300
|
+
it('should filter out invalid span links with missing context', async () => {
|
|
301
|
+
const today = getTestDate();
|
|
302
|
+
const mockSpans = [
|
|
303
|
+
{
|
|
304
|
+
traceId: 'trace1',
|
|
305
|
+
spanId: 'span1',
|
|
306
|
+
name: 'mixed-links',
|
|
307
|
+
startTime: [1700000000, 0],
|
|
308
|
+
links: [
|
|
309
|
+
{
|
|
310
|
+
context: { traceId: 'valid-trace', spanId: 'valid-span' },
|
|
311
|
+
},
|
|
312
|
+
{
|
|
313
|
+
context: { traceId: 'missing-span-id' },
|
|
314
|
+
},
|
|
315
|
+
{
|
|
316
|
+
context: { spanId: 'missing-trace-id' },
|
|
317
|
+
},
|
|
318
|
+
{
|
|
319
|
+
// No context at all
|
|
320
|
+
attributes: { 'orphan': true },
|
|
321
|
+
},
|
|
322
|
+
],
|
|
323
|
+
},
|
|
324
|
+
];
|
|
325
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
326
|
+
const results = await backend.queryTraces({});
|
|
327
|
+
assert.strictEqual(results.length, 1);
|
|
328
|
+
// Only the valid link should be included
|
|
329
|
+
assert.strictEqual(results[0].links?.length, 1);
|
|
330
|
+
assert.strictEqual(results[0].links?.[0].traceId, 'valid-trace');
|
|
331
|
+
assert.strictEqual(results[0].links?.[0].spanId, 'valid-span');
|
|
332
|
+
});
|
|
333
|
+
it('should set links to undefined when all links are invalid', async () => {
|
|
334
|
+
const today = getTestDate();
|
|
335
|
+
const mockSpans = [
|
|
336
|
+
{
|
|
337
|
+
traceId: 'trace1',
|
|
338
|
+
spanId: 'span1',
|
|
339
|
+
name: 'all-invalid-links',
|
|
340
|
+
startTime: [1700000000, 0],
|
|
341
|
+
links: [
|
|
342
|
+
{ context: { traceId: 'missing-span' } },
|
|
343
|
+
{ context: { spanId: 'missing-trace' } },
|
|
344
|
+
],
|
|
345
|
+
},
|
|
346
|
+
];
|
|
347
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
348
|
+
const results = await backend.queryTraces({});
|
|
349
|
+
assert.strictEqual(results.length, 1);
|
|
350
|
+
assert.strictEqual(results[0].links, undefined);
|
|
351
|
+
});
|
|
352
|
+
it('should return empty array when no files found', async () => {
|
|
353
|
+
// No files created - tempDir is empty
|
|
354
|
+
const results = await backend.queryTraces({});
|
|
355
|
+
assert.strictEqual(results.length, 0);
|
|
356
|
+
});
|
|
357
|
+
it('should filter spans by attributeFilter with string value', async () => {
|
|
358
|
+
const today = getTestDate();
|
|
359
|
+
const mockSpans = [
|
|
360
|
+
{
|
|
361
|
+
traceId: 'trace1',
|
|
362
|
+
spanId: 'span1',
|
|
363
|
+
name: 'hook:session-start',
|
|
364
|
+
startTime: [1700000000, 0],
|
|
365
|
+
attributes: { 'hook.name': 'session-start', 'hook.type': 'session' },
|
|
366
|
+
},
|
|
367
|
+
{
|
|
368
|
+
traceId: 'trace2',
|
|
369
|
+
spanId: 'span2',
|
|
370
|
+
name: 'hook:mcp-pre-tool',
|
|
371
|
+
startTime: [1700000000, 0],
|
|
372
|
+
attributes: { 'hook.name': 'mcp-pre-tool', 'mcp.server': 'signoz' },
|
|
373
|
+
},
|
|
374
|
+
{
|
|
375
|
+
traceId: 'trace3',
|
|
376
|
+
spanId: 'span3',
|
|
377
|
+
name: 'hook:post-tool',
|
|
378
|
+
startTime: [1700000000, 0],
|
|
379
|
+
attributes: { 'hook.name': 'post-tool', 'mcp.server': 'webresearch' },
|
|
380
|
+
},
|
|
381
|
+
];
|
|
382
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
383
|
+
const results = await backend.queryTraces({
|
|
384
|
+
attributeFilter: { 'hook.name': 'session-start' },
|
|
385
|
+
});
|
|
386
|
+
assert.strictEqual(results.length, 1);
|
|
387
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
388
|
+
});
|
|
389
|
+
it('should filter spans by attributeFilter with multiple attributes', async () => {
|
|
390
|
+
const today = getTestDate();
|
|
391
|
+
const mockSpans = [
|
|
392
|
+
{
|
|
393
|
+
traceId: 'trace1',
|
|
394
|
+
spanId: 'span1',
|
|
395
|
+
name: 'mcp-call',
|
|
396
|
+
startTime: [1700000000, 0],
|
|
397
|
+
attributes: { 'mcp.server': 'signoz', 'mcp.success': true },
|
|
398
|
+
},
|
|
399
|
+
{
|
|
400
|
+
traceId: 'trace2',
|
|
401
|
+
spanId: 'span2',
|
|
402
|
+
name: 'mcp-call',
|
|
403
|
+
startTime: [1700000000, 0],
|
|
404
|
+
attributes: { 'mcp.server': 'signoz', 'mcp.success': false },
|
|
405
|
+
},
|
|
406
|
+
{
|
|
407
|
+
traceId: 'trace3',
|
|
408
|
+
spanId: 'span3',
|
|
409
|
+
name: 'mcp-call',
|
|
410
|
+
startTime: [1700000000, 0],
|
|
411
|
+
attributes: { 'mcp.server': 'webresearch', 'mcp.success': true },
|
|
412
|
+
},
|
|
413
|
+
];
|
|
414
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
415
|
+
const results = await backend.queryTraces({
|
|
416
|
+
attributeFilter: { 'mcp.server': 'signoz', 'mcp.success': true },
|
|
417
|
+
});
|
|
418
|
+
assert.strictEqual(results.length, 1);
|
|
419
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
420
|
+
});
|
|
421
|
+
it('should filter spans by attributeFilter with number value', async () => {
|
|
422
|
+
const today = getTestDate();
|
|
423
|
+
const mockSpans = [
|
|
424
|
+
{
|
|
425
|
+
traceId: 'trace1',
|
|
426
|
+
spanId: 'span1',
|
|
427
|
+
name: 'http-request',
|
|
428
|
+
startTime: [1700000000, 0],
|
|
429
|
+
attributes: { 'http.status_code': 200 },
|
|
430
|
+
},
|
|
431
|
+
{
|
|
432
|
+
traceId: 'trace2',
|
|
433
|
+
spanId: 'span2',
|
|
434
|
+
name: 'http-request',
|
|
435
|
+
startTime: [1700000000, 0],
|
|
436
|
+
attributes: { 'http.status_code': 500 },
|
|
437
|
+
},
|
|
438
|
+
];
|
|
439
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
440
|
+
const results = await backend.queryTraces({
|
|
441
|
+
attributeFilter: { 'http.status_code': 200 },
|
|
442
|
+
});
|
|
443
|
+
assert.strictEqual(results.length, 1);
|
|
444
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
445
|
+
});
|
|
446
|
+
it('should filter spans by attributeFilter with boolean value', async () => {
|
|
447
|
+
const today = getTestDate();
|
|
448
|
+
const mockSpans = [
|
|
449
|
+
{
|
|
450
|
+
traceId: 'trace1',
|
|
451
|
+
spanId: 'span1',
|
|
452
|
+
name: 'agent-call',
|
|
453
|
+
startTime: [1700000000, 0],
|
|
454
|
+
attributes: { 'agent.is_background': true },
|
|
455
|
+
},
|
|
456
|
+
{
|
|
457
|
+
traceId: 'trace2',
|
|
458
|
+
spanId: 'span2',
|
|
459
|
+
name: 'agent-call',
|
|
460
|
+
startTime: [1700000000, 0],
|
|
461
|
+
attributes: { 'agent.is_background': false },
|
|
462
|
+
},
|
|
463
|
+
];
|
|
464
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
465
|
+
const results = await backend.queryTraces({
|
|
466
|
+
attributeFilter: { 'agent.is_background': false },
|
|
467
|
+
});
|
|
468
|
+
assert.strictEqual(results.length, 1);
|
|
469
|
+
assert.strictEqual(results[0].traceId, 'trace2');
|
|
470
|
+
});
|
|
471
|
+
it('should return empty array when attributeFilter matches nothing', async () => {
|
|
472
|
+
const today = getTestDate();
|
|
473
|
+
const mockSpans = [
|
|
474
|
+
{
|
|
475
|
+
traceId: 'trace1',
|
|
476
|
+
spanId: 'span1',
|
|
477
|
+
name: 'op1',
|
|
478
|
+
startTime: [1700000000, 0],
|
|
479
|
+
attributes: { 'hook.name': 'session-start' },
|
|
480
|
+
},
|
|
481
|
+
];
|
|
482
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
483
|
+
const results = await backend.queryTraces({
|
|
484
|
+
attributeFilter: { 'hook.name': 'nonexistent' },
|
|
485
|
+
});
|
|
486
|
+
assert.strictEqual(results.length, 0);
|
|
487
|
+
});
|
|
488
|
+
it('should combine attributeFilter with other filters', async () => {
|
|
489
|
+
const today = getTestDate();
|
|
490
|
+
const mockSpans = [
|
|
491
|
+
{
|
|
492
|
+
traceId: 'trace1',
|
|
493
|
+
spanId: 'span1',
|
|
494
|
+
name: 'hook:mcp-pre-tool',
|
|
495
|
+
startTime: [1700000000, 0],
|
|
496
|
+
endTime: [1700000000, 500000000], // 500ms
|
|
497
|
+
attributes: { 'mcp.server': 'signoz' },
|
|
498
|
+
},
|
|
499
|
+
{
|
|
500
|
+
traceId: 'trace2',
|
|
501
|
+
spanId: 'span2',
|
|
502
|
+
name: 'hook:mcp-pre-tool',
|
|
503
|
+
startTime: [1700000000, 0],
|
|
504
|
+
endTime: [1700000002, 0], // 2000ms
|
|
505
|
+
attributes: { 'mcp.server': 'signoz' },
|
|
506
|
+
},
|
|
507
|
+
{
|
|
508
|
+
traceId: 'trace3',
|
|
509
|
+
spanId: 'span3',
|
|
510
|
+
name: 'hook:mcp-pre-tool',
|
|
511
|
+
startTime: [1700000000, 0],
|
|
512
|
+
endTime: [1700000000, 500000000], // 500ms
|
|
513
|
+
attributes: { 'mcp.server': 'webresearch' },
|
|
514
|
+
},
|
|
515
|
+
];
|
|
516
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
517
|
+
const results = await backend.queryTraces({
|
|
518
|
+
spanName: 'mcp',
|
|
519
|
+
minDurationMs: 1000,
|
|
520
|
+
attributeFilter: { 'mcp.server': 'signoz' },
|
|
521
|
+
});
|
|
522
|
+
assert.strictEqual(results.length, 1);
|
|
523
|
+
assert.strictEqual(results[0].traceId, 'trace2');
|
|
524
|
+
});
|
|
525
|
+
it('should exclude spans matching excludeSpanName', async () => {
|
|
526
|
+
const today = getTestDate();
|
|
527
|
+
const mockSpans = [
|
|
528
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'http-request', startTime: [1700000000, 0] },
|
|
529
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'db-query', startTime: [1700000000, 0] },
|
|
530
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'http-response', startTime: [1700000000, 0] },
|
|
531
|
+
];
|
|
532
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
533
|
+
const results = await backend.queryTraces({ excludeSpanName: 'http' });
|
|
534
|
+
assert.strictEqual(results.length, 1);
|
|
535
|
+
assert.strictEqual(results[0].name, 'db-query');
|
|
536
|
+
});
|
|
537
|
+
it('should filter spans by spanNameRegex', async () => {
|
|
538
|
+
const today = getTestDate();
|
|
539
|
+
const mockSpans = [
|
|
540
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'hook:session-start', startTime: [1700000000, 0] },
|
|
541
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'hook:session-end', startTime: [1700000000, 0] },
|
|
542
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'mcp-call', startTime: [1700000000, 0] },
|
|
543
|
+
{ traceId: 'trace4', spanId: 'span4', name: 'hook:pre-tool', startTime: [1700000000, 0] },
|
|
544
|
+
];
|
|
545
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
546
|
+
// Match spans starting with 'hook:session'
|
|
547
|
+
const results = await backend.queryTraces({ spanNameRegex: '^hook:session' });
|
|
548
|
+
assert.strictEqual(results.length, 2);
|
|
549
|
+
assert.ok(results.some(s => s.name === 'hook:session-start'));
|
|
550
|
+
assert.ok(results.some(s => s.name === 'hook:session-end'));
|
|
551
|
+
});
|
|
552
|
+
it('should filter spans by spanNameRegex with complex pattern', async () => {
|
|
553
|
+
const today = getTestDate();
|
|
554
|
+
const mockSpans = [
|
|
555
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'api-v1-users-get', startTime: [1700000000, 0] },
|
|
556
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'api-v2-users-get', startTime: [1700000000, 0] },
|
|
557
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'api-v1-orders-post', startTime: [1700000000, 0] },
|
|
558
|
+
{ traceId: 'trace4', spanId: 'span4', name: 'internal-process', startTime: [1700000000, 0] },
|
|
559
|
+
];
|
|
560
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
561
|
+
// Match spans with api-v[12]-.*-get pattern
|
|
562
|
+
const results = await backend.queryTraces({ spanNameRegex: 'api-v[12]-.*-get' });
|
|
563
|
+
assert.strictEqual(results.length, 2);
|
|
564
|
+
assert.ok(results.some(s => s.name === 'api-v1-users-get'));
|
|
565
|
+
assert.ok(results.some(s => s.name === 'api-v2-users-get'));
|
|
566
|
+
});
|
|
567
|
+
it('should handle invalid spanNameRegex gracefully', async () => {
|
|
568
|
+
const today = getTestDate();
|
|
569
|
+
const mockSpans = [
|
|
570
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'test-span', startTime: [1700000000, 0] },
|
|
571
|
+
];
|
|
572
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
573
|
+
// Invalid regex pattern - should be skipped (all spans returned)
|
|
574
|
+
const results = await backend.queryTraces({ spanNameRegex: '[invalid(' });
|
|
575
|
+
// Invalid regex is skipped, so all spans should be returned
|
|
576
|
+
assert.strictEqual(results.length, 1);
|
|
577
|
+
assert.strictEqual(results[0].name, 'test-span');
|
|
578
|
+
});
|
|
579
|
+
it('should combine spanNameRegex with spanName filter', async () => {
|
|
580
|
+
const today = getTestDate();
|
|
581
|
+
const mockSpans = [
|
|
582
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'hook:mcp-pre-tool', startTime: [1700000000, 0] },
|
|
583
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'hook:mcp-post-tool', startTime: [1700000000, 0] },
|
|
584
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'hook:session-start', startTime: [1700000000, 0] },
|
|
585
|
+
{ traceId: 'trace4', spanId: 'span4', name: 'mcp-call', startTime: [1700000000, 0] },
|
|
586
|
+
];
|
|
587
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
588
|
+
// spanName filters first (substring), then regex narrows down
|
|
589
|
+
const results = await backend.queryTraces({
|
|
590
|
+
spanName: 'hook',
|
|
591
|
+
spanNameRegex: 'mcp',
|
|
592
|
+
});
|
|
593
|
+
assert.strictEqual(results.length, 2);
|
|
594
|
+
assert.ok(results.some(s => s.name === 'hook:mcp-pre-tool'));
|
|
595
|
+
assert.ok(results.some(s => s.name === 'hook:mcp-post-tool'));
|
|
596
|
+
});
|
|
597
|
+
it('should combine spanNameRegex with excludeSpanName', async () => {
|
|
598
|
+
const today = getTestDate();
|
|
599
|
+
const mockSpans = [
|
|
600
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'hook:mcp-pre-tool', startTime: [1700000000, 0] },
|
|
601
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'hook:mcp-post-tool', startTime: [1700000000, 0] },
|
|
602
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'hook:session-start', startTime: [1700000000, 0] },
|
|
603
|
+
];
|
|
604
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
605
|
+
// Regex matches all hook:mcp-*, exclude post-tool
|
|
606
|
+
const results = await backend.queryTraces({
|
|
607
|
+
spanNameRegex: '^hook:mcp-',
|
|
608
|
+
excludeSpanName: 'post-tool',
|
|
609
|
+
});
|
|
610
|
+
assert.strictEqual(results.length, 1);
|
|
611
|
+
assert.strictEqual(results[0].name, 'hook:mcp-pre-tool');
|
|
612
|
+
});
|
|
613
|
+
it('should filter spans by attributeExists - all must exist', async () => {
|
|
614
|
+
const today = getTestDate();
|
|
615
|
+
const mockSpans = [
|
|
616
|
+
{
|
|
617
|
+
traceId: 'trace1',
|
|
618
|
+
spanId: 'span1',
|
|
619
|
+
name: 'op1',
|
|
620
|
+
startTime: [1700000000, 0],
|
|
621
|
+
attributes: { 'http.method': 'GET', 'http.status_code': 200 },
|
|
622
|
+
},
|
|
623
|
+
{
|
|
624
|
+
traceId: 'trace2',
|
|
625
|
+
spanId: 'span2',
|
|
626
|
+
name: 'op2',
|
|
627
|
+
startTime: [1700000000, 0],
|
|
628
|
+
attributes: { 'http.method': 'POST' }, // missing http.status_code
|
|
629
|
+
},
|
|
630
|
+
{
|
|
631
|
+
traceId: 'trace3',
|
|
632
|
+
spanId: 'span3',
|
|
633
|
+
name: 'op3',
|
|
634
|
+
startTime: [1700000000, 0],
|
|
635
|
+
attributes: { 'db.system': 'postgres' }, // missing both
|
|
636
|
+
},
|
|
637
|
+
];
|
|
638
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
639
|
+
const results = await backend.queryTraces({
|
|
640
|
+
attributeExists: ['http.method', 'http.status_code'],
|
|
641
|
+
});
|
|
642
|
+
assert.strictEqual(results.length, 1);
|
|
643
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
644
|
+
});
|
|
645
|
+
it('should filter spans by attributeNotExists - exclude if any exist', async () => {
|
|
646
|
+
const today = getTestDate();
|
|
647
|
+
const mockSpans = [
|
|
648
|
+
{
|
|
649
|
+
traceId: 'trace1',
|
|
650
|
+
spanId: 'span1',
|
|
651
|
+
name: 'op1',
|
|
652
|
+
startTime: [1700000000, 0],
|
|
653
|
+
attributes: { 'http.method': 'GET', 'error.message': 'timeout' },
|
|
654
|
+
},
|
|
655
|
+
{
|
|
656
|
+
traceId: 'trace2',
|
|
657
|
+
spanId: 'span2',
|
|
658
|
+
name: 'op2',
|
|
659
|
+
startTime: [1700000000, 0],
|
|
660
|
+
attributes: { 'http.method': 'POST' }, // no error attributes
|
|
661
|
+
},
|
|
662
|
+
{
|
|
663
|
+
traceId: 'trace3',
|
|
664
|
+
spanId: 'span3',
|
|
665
|
+
name: 'op3',
|
|
666
|
+
startTime: [1700000000, 0],
|
|
667
|
+
attributes: { 'http.method': 'GET', 'error.type': 'network' },
|
|
668
|
+
},
|
|
669
|
+
];
|
|
670
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
671
|
+
const results = await backend.queryTraces({
|
|
672
|
+
attributeNotExists: ['error.message', 'error.type'],
|
|
673
|
+
});
|
|
674
|
+
assert.strictEqual(results.length, 1);
|
|
675
|
+
assert.strictEqual(results[0].traceId, 'trace2');
|
|
676
|
+
});
|
|
677
|
+
it('should combine spanName with excludeSpanName', async () => {
|
|
678
|
+
const today = getTestDate();
|
|
679
|
+
const mockSpans = [
|
|
680
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'http-request-external', startTime: [1700000000, 0] },
|
|
681
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'http-request-internal', startTime: [1700000000, 0] },
|
|
682
|
+
{ traceId: 'trace3', spanId: 'span3', name: 'db-query', startTime: [1700000000, 0] },
|
|
683
|
+
];
|
|
684
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
685
|
+
const results = await backend.queryTraces({
|
|
686
|
+
spanName: 'http',
|
|
687
|
+
excludeSpanName: 'internal',
|
|
688
|
+
});
|
|
689
|
+
assert.strictEqual(results.length, 1);
|
|
690
|
+
assert.strictEqual(results[0].name, 'http-request-external');
|
|
691
|
+
});
|
|
692
|
+
it('should combine attributeExists with attributeFilter', async () => {
|
|
693
|
+
const today = getTestDate();
|
|
694
|
+
const mockSpans = [
|
|
695
|
+
{
|
|
696
|
+
traceId: 'trace1',
|
|
697
|
+
spanId: 'span1',
|
|
698
|
+
name: 'op1',
|
|
699
|
+
startTime: [1700000000, 0],
|
|
700
|
+
attributes: { 'http.method': 'GET', 'http.status_code': 200 },
|
|
701
|
+
},
|
|
702
|
+
{
|
|
703
|
+
traceId: 'trace2',
|
|
704
|
+
spanId: 'span2',
|
|
705
|
+
name: 'op2',
|
|
706
|
+
startTime: [1700000000, 0],
|
|
707
|
+
attributes: { 'http.method': 'POST', 'http.status_code': 500 },
|
|
708
|
+
},
|
|
709
|
+
{
|
|
710
|
+
traceId: 'trace3',
|
|
711
|
+
spanId: 'span3',
|
|
712
|
+
name: 'op3',
|
|
713
|
+
startTime: [1700000000, 0],
|
|
714
|
+
attributes: { 'http.method': 'GET' }, // missing status_code
|
|
715
|
+
},
|
|
716
|
+
];
|
|
717
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
718
|
+
const results = await backend.queryTraces({
|
|
719
|
+
attributeFilter: { 'http.method': 'GET' },
|
|
720
|
+
attributeExists: ['http.status_code'],
|
|
721
|
+
});
|
|
722
|
+
assert.strictEqual(results.length, 1);
|
|
723
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
724
|
+
});
|
|
725
|
+
it('should filter spans by numericFilter with gt operator', async () => {
|
|
726
|
+
const today = getTestDate();
|
|
727
|
+
const mockSpans = [
|
|
728
|
+
{
|
|
729
|
+
traceId: 'trace1',
|
|
730
|
+
spanId: 'span1',
|
|
731
|
+
name: 'http-request',
|
|
732
|
+
startTime: [1700000000, 0],
|
|
733
|
+
attributes: { 'http.status_code': 200 },
|
|
734
|
+
},
|
|
735
|
+
{
|
|
736
|
+
traceId: 'trace2',
|
|
737
|
+
spanId: 'span2',
|
|
738
|
+
name: 'http-request',
|
|
739
|
+
startTime: [1700000000, 0],
|
|
740
|
+
attributes: { 'http.status_code': 500 },
|
|
741
|
+
},
|
|
742
|
+
{
|
|
743
|
+
traceId: 'trace3',
|
|
744
|
+
spanId: 'span3',
|
|
745
|
+
name: 'http-request',
|
|
746
|
+
startTime: [1700000000, 0],
|
|
747
|
+
attributes: { 'http.status_code': 300 },
|
|
748
|
+
},
|
|
749
|
+
];
|
|
750
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
751
|
+
const results = await backend.queryTraces({
|
|
752
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'gt', value: 299 }],
|
|
753
|
+
});
|
|
754
|
+
assert.strictEqual(results.length, 2);
|
|
755
|
+
assert.ok(results.some(s => s.traceId === 'trace2'));
|
|
756
|
+
assert.ok(results.some(s => s.traceId === 'trace3'));
|
|
757
|
+
});
|
|
758
|
+
it('should filter spans by numericFilter with gte operator', async () => {
|
|
759
|
+
const today = getTestDate();
|
|
760
|
+
const mockSpans = [
|
|
761
|
+
{
|
|
762
|
+
traceId: 'trace1',
|
|
763
|
+
spanId: 'span1',
|
|
764
|
+
name: 'http-request',
|
|
765
|
+
startTime: [1700000000, 0],
|
|
766
|
+
attributes: { 'http.status_code': 200 },
|
|
767
|
+
},
|
|
768
|
+
{
|
|
769
|
+
traceId: 'trace2',
|
|
770
|
+
spanId: 'span2',
|
|
771
|
+
name: 'http-request',
|
|
772
|
+
startTime: [1700000000, 0],
|
|
773
|
+
attributes: { 'http.status_code': 300 },
|
|
774
|
+
},
|
|
775
|
+
];
|
|
776
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
777
|
+
const results = await backend.queryTraces({
|
|
778
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 300 }],
|
|
779
|
+
});
|
|
780
|
+
assert.strictEqual(results.length, 1);
|
|
781
|
+
assert.strictEqual(results[0].traceId, 'trace2');
|
|
782
|
+
});
|
|
783
|
+
it('should filter spans by numericFilter with lt operator', async () => {
|
|
784
|
+
const today = getTestDate();
|
|
785
|
+
const mockSpans = [
|
|
786
|
+
{
|
|
787
|
+
traceId: 'trace1',
|
|
788
|
+
spanId: 'span1',
|
|
789
|
+
name: 'http-request',
|
|
790
|
+
startTime: [1700000000, 0],
|
|
791
|
+
attributes: { 'http.status_code': 200 },
|
|
792
|
+
},
|
|
793
|
+
{
|
|
794
|
+
traceId: 'trace2',
|
|
795
|
+
spanId: 'span2',
|
|
796
|
+
name: 'http-request',
|
|
797
|
+
startTime: [1700000000, 0],
|
|
798
|
+
attributes: { 'http.status_code': 500 },
|
|
799
|
+
},
|
|
800
|
+
];
|
|
801
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
802
|
+
const results = await backend.queryTraces({
|
|
803
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'lt', value: 300 }],
|
|
804
|
+
});
|
|
805
|
+
assert.strictEqual(results.length, 1);
|
|
806
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
807
|
+
});
|
|
808
|
+
it('should filter spans by numericFilter with lte operator', async () => {
|
|
809
|
+
const today = getTestDate();
|
|
810
|
+
const mockSpans = [
|
|
811
|
+
{
|
|
812
|
+
traceId: 'trace1',
|
|
813
|
+
spanId: 'span1',
|
|
814
|
+
name: 'http-request',
|
|
815
|
+
startTime: [1700000000, 0],
|
|
816
|
+
attributes: { 'http.status_code': 200 },
|
|
817
|
+
},
|
|
818
|
+
{
|
|
819
|
+
traceId: 'trace2',
|
|
820
|
+
spanId: 'span2',
|
|
821
|
+
name: 'http-request',
|
|
822
|
+
startTime: [1700000000, 0],
|
|
823
|
+
attributes: { 'http.status_code': 300 },
|
|
824
|
+
},
|
|
825
|
+
];
|
|
826
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
827
|
+
const results = await backend.queryTraces({
|
|
828
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'lte', value: 200 }],
|
|
829
|
+
});
|
|
830
|
+
assert.strictEqual(results.length, 1);
|
|
831
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
832
|
+
});
|
|
833
|
+
it('should filter spans by numericFilter with eq operator', async () => {
|
|
834
|
+
const today = getTestDate();
|
|
835
|
+
const mockSpans = [
|
|
836
|
+
{
|
|
837
|
+
traceId: 'trace1',
|
|
838
|
+
spanId: 'span1',
|
|
839
|
+
name: 'http-request',
|
|
840
|
+
startTime: [1700000000, 0],
|
|
841
|
+
attributes: { 'http.status_code': 200 },
|
|
842
|
+
},
|
|
843
|
+
{
|
|
844
|
+
traceId: 'trace2',
|
|
845
|
+
spanId: 'span2',
|
|
846
|
+
name: 'http-request',
|
|
847
|
+
startTime: [1700000000, 0],
|
|
848
|
+
attributes: { 'http.status_code': 500 },
|
|
849
|
+
},
|
|
850
|
+
];
|
|
851
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
852
|
+
const results = await backend.queryTraces({
|
|
853
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'eq', value: 200 }],
|
|
854
|
+
});
|
|
855
|
+
assert.strictEqual(results.length, 1);
|
|
856
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
857
|
+
});
|
|
858
|
+
it('should filter spans by multiple numericFilter conditions (AND logic)', async () => {
|
|
859
|
+
const today = getTestDate();
|
|
860
|
+
const mockSpans = [
|
|
861
|
+
{
|
|
862
|
+
traceId: 'trace1',
|
|
863
|
+
spanId: 'span1',
|
|
864
|
+
name: 'http-request',
|
|
865
|
+
startTime: [1700000000, 0],
|
|
866
|
+
attributes: { 'http.status_code': 200, 'http.response_size': 1000 },
|
|
867
|
+
},
|
|
868
|
+
{
|
|
869
|
+
traceId: 'trace2',
|
|
870
|
+
spanId: 'span2',
|
|
871
|
+
name: 'http-request',
|
|
872
|
+
startTime: [1700000000, 0],
|
|
873
|
+
attributes: { 'http.status_code': 200, 'http.response_size': 5000 },
|
|
874
|
+
},
|
|
875
|
+
{
|
|
876
|
+
traceId: 'trace3',
|
|
877
|
+
spanId: 'span3',
|
|
878
|
+
name: 'http-request',
|
|
879
|
+
startTime: [1700000000, 0],
|
|
880
|
+
attributes: { 'http.status_code': 500, 'http.response_size': 100 },
|
|
881
|
+
},
|
|
882
|
+
];
|
|
883
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
884
|
+
const results = await backend.queryTraces({
|
|
885
|
+
numericFilter: [
|
|
886
|
+
{ attribute: 'http.status_code', operator: 'lt', value: 300 },
|
|
887
|
+
{ attribute: 'http.response_size', operator: 'gt', value: 2000 },
|
|
888
|
+
],
|
|
889
|
+
});
|
|
890
|
+
assert.strictEqual(results.length, 1);
|
|
891
|
+
assert.strictEqual(results[0].traceId, 'trace2');
|
|
892
|
+
});
|
|
893
|
+
it('should skip spans when numericFilter attribute is missing', async () => {
|
|
894
|
+
const today = getTestDate();
|
|
895
|
+
const mockSpans = [
|
|
896
|
+
{
|
|
897
|
+
traceId: 'trace1',
|
|
898
|
+
spanId: 'span1',
|
|
899
|
+
name: 'http-request',
|
|
900
|
+
startTime: [1700000000, 0],
|
|
901
|
+
attributes: { 'http.status_code': 200 },
|
|
902
|
+
},
|
|
903
|
+
{
|
|
904
|
+
traceId: 'trace2',
|
|
905
|
+
spanId: 'span2',
|
|
906
|
+
name: 'http-request',
|
|
907
|
+
startTime: [1700000000, 0],
|
|
908
|
+
attributes: { 'other.attr': 'value' }, // missing http.status_code
|
|
909
|
+
},
|
|
910
|
+
];
|
|
911
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
912
|
+
const results = await backend.queryTraces({
|
|
913
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 100 }],
|
|
914
|
+
});
|
|
915
|
+
assert.strictEqual(results.length, 1);
|
|
916
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
917
|
+
});
|
|
918
|
+
it('should skip spans when numericFilter attribute is not a number', async () => {
|
|
919
|
+
const today = getTestDate();
|
|
920
|
+
const mockSpans = [
|
|
921
|
+
{
|
|
922
|
+
traceId: 'trace1',
|
|
923
|
+
spanId: 'span1',
|
|
924
|
+
name: 'http-request',
|
|
925
|
+
startTime: [1700000000, 0],
|
|
926
|
+
attributes: { 'http.status_code': 200 },
|
|
927
|
+
},
|
|
928
|
+
{
|
|
929
|
+
traceId: 'trace2',
|
|
930
|
+
spanId: 'span2',
|
|
931
|
+
name: 'http-request',
|
|
932
|
+
startTime: [1700000000, 0],
|
|
933
|
+
attributes: { 'http.status_code': '200' }, // string, not number
|
|
934
|
+
},
|
|
935
|
+
];
|
|
936
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
937
|
+
const results = await backend.queryTraces({
|
|
938
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'eq', value: 200 }],
|
|
939
|
+
});
|
|
940
|
+
assert.strictEqual(results.length, 1);
|
|
941
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
942
|
+
});
|
|
943
|
+
it('should combine numericFilter with other filters', async () => {
|
|
944
|
+
const today = getTestDate();
|
|
945
|
+
const mockSpans = [
|
|
946
|
+
{
|
|
947
|
+
traceId: 'trace1',
|
|
948
|
+
spanId: 'span1',
|
|
949
|
+
name: 'http-request',
|
|
950
|
+
startTime: [1700000000, 0],
|
|
951
|
+
attributes: { 'http.status_code': 500, 'http.method': 'GET' },
|
|
952
|
+
},
|
|
953
|
+
{
|
|
954
|
+
traceId: 'trace2',
|
|
955
|
+
spanId: 'span2',
|
|
956
|
+
name: 'http-request',
|
|
957
|
+
startTime: [1700000000, 0],
|
|
958
|
+
attributes: { 'http.status_code': 500, 'http.method': 'POST' },
|
|
959
|
+
},
|
|
960
|
+
{
|
|
961
|
+
traceId: 'trace3',
|
|
962
|
+
spanId: 'span3',
|
|
963
|
+
name: 'http-request',
|
|
964
|
+
startTime: [1700000000, 0],
|
|
965
|
+
attributes: { 'http.status_code': 200, 'http.method': 'GET' },
|
|
966
|
+
},
|
|
967
|
+
];
|
|
968
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
969
|
+
const results = await backend.queryTraces({
|
|
970
|
+
attributeFilter: { 'http.method': 'GET' },
|
|
971
|
+
numericFilter: [{ attribute: 'http.status_code', operator: 'gte', value: 400 }],
|
|
972
|
+
});
|
|
973
|
+
assert.strictEqual(results.length, 1);
|
|
974
|
+
assert.strictEqual(results[0].traceId, 'trace1');
|
|
975
|
+
});
|
|
976
|
+
it('should complete queries with timing (timing is logged for slow queries)', async () => {
|
|
977
|
+
// This test verifies that query timing is active and doesn't break normal queries
|
|
978
|
+
// Timing warnings are logged for queries > 500ms
|
|
979
|
+
const today = getTestDate();
|
|
980
|
+
const mockSpans = [
|
|
981
|
+
{ traceId: 'trace1', spanId: 'span1', name: 'op1', startTime: [1700000000, 0] },
|
|
982
|
+
{ traceId: 'trace2', spanId: 'span2', name: 'op2', startTime: [1700000001, 0] },
|
|
983
|
+
];
|
|
984
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
985
|
+
// Query should complete successfully with timing active
|
|
986
|
+
const results = await backend.queryTraces({});
|
|
987
|
+
assert.strictEqual(results.length, 2);
|
|
988
|
+
});
|
|
989
|
+
});
|
|
990
|
+
describe('queryLLMEvents', () => {
|
|
991
|
+
it('should read and normalize LLM events from JSONL files', async () => {
|
|
992
|
+
const today = getTestDate();
|
|
993
|
+
const mockEvents = [
|
|
994
|
+
{
|
|
995
|
+
timestamp: '2026-01-28T10:00:00.000Z',
|
|
996
|
+
name: 'llm.completion',
|
|
997
|
+
attributes: {
|
|
998
|
+
'gen_ai.request.model': 'claude-3-opus',
|
|
999
|
+
'gen_ai.system': 'anthropic',
|
|
1000
|
+
'gen_ai.usage.input_tokens': 100,
|
|
1001
|
+
'gen_ai.usage.output_tokens': 50,
|
|
1002
|
+
'duration_ms': 1500,
|
|
1003
|
+
'success': true,
|
|
1004
|
+
},
|
|
1005
|
+
},
|
|
1006
|
+
];
|
|
1007
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1008
|
+
const results = await backend.queryLLMEvents({});
|
|
1009
|
+
assert.strictEqual(results.length, 1);
|
|
1010
|
+
assert.strictEqual(results[0].name, 'llm.completion');
|
|
1011
|
+
assert.strictEqual(results[0].attributes['gen_ai.request.model'], 'claude-3-opus');
|
|
1012
|
+
assert.strictEqual(results[0].attributes['gen_ai.system'], 'anthropic');
|
|
1013
|
+
assert.strictEqual(results[0].attributes['gen_ai.usage.input_tokens'], 100);
|
|
1014
|
+
});
|
|
1015
|
+
it('should filter events by eventName substring', async () => {
|
|
1016
|
+
const today = getTestDate();
|
|
1017
|
+
const mockEvents = [
|
|
1018
|
+
{ timestamp: '2026-01-28T10:00:00Z', name: 'llm.completion', attributes: {} },
|
|
1019
|
+
{ timestamp: '2026-01-28T10:01:00Z', name: 'llm.embedding', attributes: {} },
|
|
1020
|
+
{ timestamp: '2026-01-28T10:02:00Z', name: 'tool.execution', attributes: {} },
|
|
1021
|
+
];
|
|
1022
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1023
|
+
const results = await backend.queryLLMEvents({ eventName: 'llm' });
|
|
1024
|
+
assert.strictEqual(results.length, 2);
|
|
1025
|
+
assert.ok(results.every(e => e.name.includes('llm')));
|
|
1026
|
+
});
|
|
1027
|
+
it('should filter events by model', async () => {
|
|
1028
|
+
const today = getTestDate();
|
|
1029
|
+
const mockEvents = [
|
|
1030
|
+
{
|
|
1031
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1032
|
+
name: 'llm.completion',
|
|
1033
|
+
attributes: { 'gen_ai.request.model': 'claude-3-opus' },
|
|
1034
|
+
},
|
|
1035
|
+
{
|
|
1036
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1037
|
+
name: 'llm.completion',
|
|
1038
|
+
attributes: { 'gen_ai.request.model': 'gpt-4' },
|
|
1039
|
+
},
|
|
1040
|
+
{
|
|
1041
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1042
|
+
name: 'llm.completion',
|
|
1043
|
+
attributes: { model: 'claude-3-opus' }, // alternate attribute name
|
|
1044
|
+
},
|
|
1045
|
+
];
|
|
1046
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1047
|
+
const results = await backend.queryLLMEvents({ model: 'claude-3-opus' });
|
|
1048
|
+
assert.strictEqual(results.length, 2);
|
|
1049
|
+
});
|
|
1050
|
+
it('should filter events by provider', async () => {
|
|
1051
|
+
const today = getTestDate();
|
|
1052
|
+
const mockEvents = [
|
|
1053
|
+
{
|
|
1054
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1055
|
+
name: 'llm.completion',
|
|
1056
|
+
attributes: { 'gen_ai.system': 'anthropic' },
|
|
1057
|
+
},
|
|
1058
|
+
{
|
|
1059
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1060
|
+
name: 'llm.completion',
|
|
1061
|
+
attributes: { 'gen_ai.system': 'openai' },
|
|
1062
|
+
},
|
|
1063
|
+
{
|
|
1064
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1065
|
+
name: 'llm.completion',
|
|
1066
|
+
attributes: { provider: 'anthropic' }, // alternate attribute name
|
|
1067
|
+
},
|
|
1068
|
+
];
|
|
1069
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1070
|
+
const results = await backend.queryLLMEvents({ provider: 'anthropic' });
|
|
1071
|
+
assert.strictEqual(results.length, 2);
|
|
1072
|
+
});
|
|
1073
|
+
it('should filter events by search text in attributes', async () => {
|
|
1074
|
+
const today = getTestDate();
|
|
1075
|
+
const mockEvents = [
|
|
1076
|
+
{
|
|
1077
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1078
|
+
name: 'llm.completion',
|
|
1079
|
+
attributes: { prompt: 'Write a function to calculate fibonacci' },
|
|
1080
|
+
},
|
|
1081
|
+
{
|
|
1082
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1083
|
+
name: 'llm.completion',
|
|
1084
|
+
attributes: { prompt: 'Explain quantum computing' },
|
|
1085
|
+
},
|
|
1086
|
+
];
|
|
1087
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1088
|
+
const results = await backend.queryLLMEvents({ search: 'fibonacci' });
|
|
1089
|
+
assert.strictEqual(results.length, 1);
|
|
1090
|
+
assert.strictEqual(results[0].attributes.prompt, 'Write a function to calculate fibonacci');
|
|
1091
|
+
});
|
|
1092
|
+
it('should filter events by search text in event name', async () => {
|
|
1093
|
+
const today = getTestDate();
|
|
1094
|
+
const mockEvents = [
|
|
1095
|
+
{ timestamp: '2026-01-28T10:00:00Z', name: 'llm.completion.streaming', attributes: {} },
|
|
1096
|
+
{ timestamp: '2026-01-28T10:01:00Z', name: 'llm.completion', attributes: {} },
|
|
1097
|
+
];
|
|
1098
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1099
|
+
const results = await backend.queryLLMEvents({ search: 'streaming' });
|
|
1100
|
+
assert.strictEqual(results.length, 1);
|
|
1101
|
+
assert.strictEqual(results[0].name, 'llm.completion.streaming');
|
|
1102
|
+
});
|
|
1103
|
+
it('should apply limit and offset to LLM event results', async () => {
|
|
1104
|
+
const today = getTestDate();
|
|
1105
|
+
const mockEvents = Array.from({ length: 100 }, (_, i) => ({
|
|
1106
|
+
timestamp: new Date(Date.now() + i * 1000).toISOString(),
|
|
1107
|
+
name: `event-${i}`,
|
|
1108
|
+
attributes: { index: i },
|
|
1109
|
+
}));
|
|
1110
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1111
|
+
const results = await backend.queryLLMEvents({ limit: 20, offset: 50 });
|
|
1112
|
+
assert.strictEqual(results.length, 20);
|
|
1113
|
+
assert.strictEqual(results[0].name, 'event-50');
|
|
1114
|
+
});
|
|
1115
|
+
it('should filter events by date range', async () => {
|
|
1116
|
+
// Create files for multiple dates
|
|
1117
|
+
writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-26.jsonl'), [
|
|
1118
|
+
{ timestamp: '2026-01-26T10:00:00Z', name: 'event-26', attributes: {} },
|
|
1119
|
+
]);
|
|
1120
|
+
writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-27.jsonl'), [
|
|
1121
|
+
{ timestamp: '2026-01-27T10:00:00Z', name: 'event-27', attributes: {} },
|
|
1122
|
+
]);
|
|
1123
|
+
writeJsonlFile(path.join(tempDir, 'llm-events-2026-01-28.jsonl'), [
|
|
1124
|
+
{ timestamp: '2026-01-28T10:00:00Z', name: 'event-28', attributes: {} },
|
|
1125
|
+
]);
|
|
1126
|
+
const results = await backend.queryLLMEvents({
|
|
1127
|
+
startDate: '2026-01-27',
|
|
1128
|
+
endDate: '2026-01-27',
|
|
1129
|
+
});
|
|
1130
|
+
assert.strictEqual(results.length, 1);
|
|
1131
|
+
assert.strictEqual(results[0].name, 'event-27');
|
|
1132
|
+
});
|
|
1133
|
+
it('should skip events with missing required fields', async () => {
|
|
1134
|
+
const today = getTestDate();
|
|
1135
|
+
const mockEvents = [
|
|
1136
|
+
{ timestamp: '2026-01-28T10:00:00Z', name: 'valid-event', attributes: {} },
|
|
1137
|
+
{ timestamp: '2026-01-28T10:01:00Z', attributes: {} }, // missing name
|
|
1138
|
+
{ name: 'no-timestamp', attributes: {} }, // missing timestamp
|
|
1139
|
+
];
|
|
1140
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1141
|
+
const results = await backend.queryLLMEvents({});
|
|
1142
|
+
assert.strictEqual(results.length, 1);
|
|
1143
|
+
assert.strictEqual(results[0].name, 'valid-event');
|
|
1144
|
+
});
|
|
1145
|
+
it('should return empty array when no LLM event files found', async () => {
|
|
1146
|
+
const results = await backend.queryLLMEvents({});
|
|
1147
|
+
assert.strictEqual(results.length, 0);
|
|
1148
|
+
});
|
|
1149
|
+
it('should combine multiple filters', async () => {
|
|
1150
|
+
const today = getTestDate();
|
|
1151
|
+
const mockEvents = [
|
|
1152
|
+
{
|
|
1153
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1154
|
+
name: 'llm.completion',
|
|
1155
|
+
attributes: { 'gen_ai.request.model': 'claude-3-opus', 'gen_ai.system': 'anthropic' },
|
|
1156
|
+
},
|
|
1157
|
+
{
|
|
1158
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1159
|
+
name: 'llm.completion',
|
|
1160
|
+
attributes: { 'gen_ai.request.model': 'gpt-4', 'gen_ai.system': 'openai' },
|
|
1161
|
+
},
|
|
1162
|
+
{
|
|
1163
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1164
|
+
name: 'llm.embedding',
|
|
1165
|
+
attributes: { 'gen_ai.request.model': 'claude-3-opus', 'gen_ai.system': 'anthropic' },
|
|
1166
|
+
},
|
|
1167
|
+
];
|
|
1168
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1169
|
+
const results = await backend.queryLLMEvents({
|
|
1170
|
+
eventName: 'completion',
|
|
1171
|
+
model: 'claude-3-opus',
|
|
1172
|
+
provider: 'anthropic',
|
|
1173
|
+
});
|
|
1174
|
+
assert.strictEqual(results.length, 1);
|
|
1175
|
+
assert.strictEqual(results[0].name, 'llm.completion');
|
|
1176
|
+
});
|
|
1177
|
+
it('should use OTel GenAI provider fallback: gen_ai.provider.name -> gen_ai.system -> provider', async () => {
|
|
1178
|
+
const today = getTestDate();
|
|
1179
|
+
const mockEvents = [
|
|
1180
|
+
{
|
|
1181
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1182
|
+
name: 'llm.completion',
|
|
1183
|
+
attributes: { 'gen_ai.provider.name': 'anthropic-new' }, // should match
|
|
1184
|
+
},
|
|
1185
|
+
{
|
|
1186
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1187
|
+
name: 'llm.completion',
|
|
1188
|
+
attributes: { 'gen_ai.system': 'anthropic-new', 'provider': 'legacy' }, // should match via gen_ai.system
|
|
1189
|
+
},
|
|
1190
|
+
{
|
|
1191
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1192
|
+
name: 'llm.completion',
|
|
1193
|
+
attributes: { 'provider': 'anthropic-new' }, // should match via provider fallback
|
|
1194
|
+
},
|
|
1195
|
+
{
|
|
1196
|
+
timestamp: '2026-01-28T10:03:00Z',
|
|
1197
|
+
name: 'llm.completion',
|
|
1198
|
+
attributes: { 'gen_ai.provider.name': 'other-provider' }, // should NOT match
|
|
1199
|
+
},
|
|
1200
|
+
];
|
|
1201
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1202
|
+
const results = await backend.queryLLMEvents({ provider: 'anthropic-new' });
|
|
1203
|
+
assert.strictEqual(results.length, 3);
|
|
1204
|
+
});
|
|
1205
|
+
it('should filter OpenAI events by provider', async () => {
|
|
1206
|
+
const today = getTestDate();
|
|
1207
|
+
const mockEvents = [
|
|
1208
|
+
{
|
|
1209
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1210
|
+
name: 'llm.completion',
|
|
1211
|
+
attributes: {
|
|
1212
|
+
'gen_ai.provider.name': 'openai',
|
|
1213
|
+
'gen_ai.request.model': 'gpt-4o',
|
|
1214
|
+
'gen_ai.usage.input_tokens': 500,
|
|
1215
|
+
},
|
|
1216
|
+
},
|
|
1217
|
+
{
|
|
1218
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1219
|
+
name: 'llm.completion',
|
|
1220
|
+
attributes: {
|
|
1221
|
+
'gen_ai.provider.name': 'anthropic',
|
|
1222
|
+
'gen_ai.request.model': 'claude-3-opus',
|
|
1223
|
+
},
|
|
1224
|
+
},
|
|
1225
|
+
];
|
|
1226
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1227
|
+
const results = await backend.queryLLMEvents({ provider: 'openai' });
|
|
1228
|
+
assert.strictEqual(results.length, 1);
|
|
1229
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'gpt-4o');
|
|
1230
|
+
});
|
|
1231
|
+
it('should filter Google Gemini events by provider', async () => {
|
|
1232
|
+
const today = getTestDate();
|
|
1233
|
+
const mockEvents = [
|
|
1234
|
+
{
|
|
1235
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1236
|
+
name: 'llm.completion',
|
|
1237
|
+
attributes: {
|
|
1238
|
+
'gen_ai.provider.name': 'gcp.gemini',
|
|
1239
|
+
'gen_ai.request.model': 'gemini-1.5-pro',
|
|
1240
|
+
},
|
|
1241
|
+
},
|
|
1242
|
+
{
|
|
1243
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1244
|
+
name: 'llm.completion',
|
|
1245
|
+
attributes: {
|
|
1246
|
+
'gen_ai.provider.name': 'gcp.vertex_ai',
|
|
1247
|
+
'gen_ai.request.model': 'gemini-pro',
|
|
1248
|
+
},
|
|
1249
|
+
},
|
|
1250
|
+
{
|
|
1251
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1252
|
+
name: 'llm.completion',
|
|
1253
|
+
attributes: {
|
|
1254
|
+
'gen_ai.provider.name': 'openai',
|
|
1255
|
+
'gen_ai.request.model': 'gpt-4',
|
|
1256
|
+
},
|
|
1257
|
+
},
|
|
1258
|
+
];
|
|
1259
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1260
|
+
const geminiResults = await backend.queryLLMEvents({ provider: 'gcp.gemini' });
|
|
1261
|
+
assert.strictEqual(geminiResults.length, 1);
|
|
1262
|
+
assert.strictEqual(geminiResults[0].attributes?.['gen_ai.request.model'], 'gemini-1.5-pro');
|
|
1263
|
+
const vertexResults = await backend.queryLLMEvents({ provider: 'gcp.vertex_ai' });
|
|
1264
|
+
assert.strictEqual(vertexResults.length, 1);
|
|
1265
|
+
});
|
|
1266
|
+
it('should filter Mistral AI events by provider', async () => {
|
|
1267
|
+
const today = getTestDate();
|
|
1268
|
+
const mockEvents = [
|
|
1269
|
+
{
|
|
1270
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1271
|
+
name: 'llm.completion',
|
|
1272
|
+
attributes: {
|
|
1273
|
+
'gen_ai.provider.name': 'mistral_ai',
|
|
1274
|
+
'gen_ai.request.model': 'mistral-large',
|
|
1275
|
+
},
|
|
1276
|
+
},
|
|
1277
|
+
{
|
|
1278
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1279
|
+
name: 'llm.completion',
|
|
1280
|
+
attributes: {
|
|
1281
|
+
'gen_ai.provider.name': 'anthropic',
|
|
1282
|
+
'gen_ai.request.model': 'claude-3',
|
|
1283
|
+
},
|
|
1284
|
+
},
|
|
1285
|
+
];
|
|
1286
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1287
|
+
const results = await backend.queryLLMEvents({ provider: 'mistral_ai' });
|
|
1288
|
+
assert.strictEqual(results.length, 1);
|
|
1289
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'mistral-large');
|
|
1290
|
+
});
|
|
1291
|
+
it('should filter AWS Bedrock events by provider', async () => {
|
|
1292
|
+
const today = getTestDate();
|
|
1293
|
+
const mockEvents = [
|
|
1294
|
+
{
|
|
1295
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1296
|
+
name: 'llm.completion',
|
|
1297
|
+
attributes: {
|
|
1298
|
+
'gen_ai.provider.name': 'aws.bedrock',
|
|
1299
|
+
'gen_ai.request.model': 'anthropic.claude-3-sonnet-20240229-v1:0',
|
|
1300
|
+
},
|
|
1301
|
+
},
|
|
1302
|
+
{
|
|
1303
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1304
|
+
name: 'llm.completion',
|
|
1305
|
+
attributes: {
|
|
1306
|
+
'gen_ai.provider.name': 'anthropic',
|
|
1307
|
+
'gen_ai.request.model': 'claude-3-sonnet',
|
|
1308
|
+
},
|
|
1309
|
+
},
|
|
1310
|
+
];
|
|
1311
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1312
|
+
const results = await backend.queryLLMEvents({ provider: 'aws.bedrock' });
|
|
1313
|
+
assert.strictEqual(results.length, 1);
|
|
1314
|
+
assert.ok(results[0].attributes?.['gen_ai.request.model']?.toString().includes('anthropic.claude'));
|
|
1315
|
+
});
|
|
1316
|
+
it('should filter Cohere events by provider', async () => {
|
|
1317
|
+
const today = getTestDate();
|
|
1318
|
+
const mockEvents = [
|
|
1319
|
+
{
|
|
1320
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1321
|
+
name: 'llm.completion',
|
|
1322
|
+
attributes: {
|
|
1323
|
+
'gen_ai.provider.name': 'cohere',
|
|
1324
|
+
'gen_ai.request.model': 'command-r-plus',
|
|
1325
|
+
},
|
|
1326
|
+
},
|
|
1327
|
+
{
|
|
1328
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1329
|
+
name: 'llm.completion',
|
|
1330
|
+
attributes: {
|
|
1331
|
+
'gen_ai.provider.name': 'openai',
|
|
1332
|
+
'gen_ai.request.model': 'gpt-4',
|
|
1333
|
+
},
|
|
1334
|
+
},
|
|
1335
|
+
];
|
|
1336
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1337
|
+
const results = await backend.queryLLMEvents({ provider: 'cohere' });
|
|
1338
|
+
assert.strictEqual(results.length, 1);
|
|
1339
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'command-r-plus');
|
|
1340
|
+
});
|
|
1341
|
+
it('should filter Groq events by provider', async () => {
|
|
1342
|
+
const today = getTestDate();
|
|
1343
|
+
const mockEvents = [
|
|
1344
|
+
{
|
|
1345
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1346
|
+
name: 'llm.completion',
|
|
1347
|
+
attributes: {
|
|
1348
|
+
'gen_ai.provider.name': 'groq',
|
|
1349
|
+
'gen_ai.request.model': 'llama-3.3-70b',
|
|
1350
|
+
},
|
|
1351
|
+
},
|
|
1352
|
+
{
|
|
1353
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1354
|
+
name: 'llm.completion',
|
|
1355
|
+
attributes: {
|
|
1356
|
+
'gen_ai.provider.name': 'together_ai',
|
|
1357
|
+
'gen_ai.request.model': 'llama-3-70b',
|
|
1358
|
+
},
|
|
1359
|
+
},
|
|
1360
|
+
];
|
|
1361
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1362
|
+
const groqResults = await backend.queryLLMEvents({ provider: 'groq' });
|
|
1363
|
+
assert.strictEqual(groqResults.length, 1);
|
|
1364
|
+
assert.strictEqual(groqResults[0].attributes?.['gen_ai.request.model'], 'llama-3.3-70b');
|
|
1365
|
+
const togetherResults = await backend.queryLLMEvents({ provider: 'together_ai' });
|
|
1366
|
+
assert.strictEqual(togetherResults.length, 1);
|
|
1367
|
+
});
|
|
1368
|
+
it('should filter Ollama local model events by provider', async () => {
|
|
1369
|
+
const today = getTestDate();
|
|
1370
|
+
const mockEvents = [
|
|
1371
|
+
{
|
|
1372
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1373
|
+
name: 'llm.completion',
|
|
1374
|
+
attributes: {
|
|
1375
|
+
'gen_ai.provider.name': 'ollama',
|
|
1376
|
+
'gen_ai.request.model': 'llama3:8b',
|
|
1377
|
+
},
|
|
1378
|
+
},
|
|
1379
|
+
{
|
|
1380
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1381
|
+
name: 'llm.completion',
|
|
1382
|
+
attributes: {
|
|
1383
|
+
'gen_ai.provider.name': 'openai',
|
|
1384
|
+
'gen_ai.request.model': 'gpt-4',
|
|
1385
|
+
},
|
|
1386
|
+
},
|
|
1387
|
+
];
|
|
1388
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1389
|
+
const results = await backend.queryLLMEvents({ provider: 'ollama' });
|
|
1390
|
+
assert.strictEqual(results.length, 1);
|
|
1391
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'llama3:8b');
|
|
1392
|
+
});
|
|
1393
|
+
it('should filter custom/internal provider events', async () => {
|
|
1394
|
+
const today = getTestDate();
|
|
1395
|
+
const mockEvents = [
|
|
1396
|
+
{
|
|
1397
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1398
|
+
name: 'llm.completion',
|
|
1399
|
+
attributes: {
|
|
1400
|
+
'gen_ai.provider.name': 'custom-internal-llm',
|
|
1401
|
+
'gen_ai.request.model': 'internal-model-v2',
|
|
1402
|
+
},
|
|
1403
|
+
},
|
|
1404
|
+
{
|
|
1405
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1406
|
+
name: 'llm.completion',
|
|
1407
|
+
attributes: {
|
|
1408
|
+
'gen_ai.provider.name': 'anthropic',
|
|
1409
|
+
'gen_ai.request.model': 'claude-3',
|
|
1410
|
+
},
|
|
1411
|
+
},
|
|
1412
|
+
];
|
|
1413
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1414
|
+
const results = await backend.queryLLMEvents({ provider: 'custom-internal-llm' });
|
|
1415
|
+
assert.strictEqual(results.length, 1);
|
|
1416
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'internal-model-v2');
|
|
1417
|
+
});
|
|
1418
|
+
it('should combine provider and model filters', async () => {
|
|
1419
|
+
const today = getTestDate();
|
|
1420
|
+
const mockEvents = [
|
|
1421
|
+
{
|
|
1422
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1423
|
+
name: 'llm.completion',
|
|
1424
|
+
attributes: {
|
|
1425
|
+
'gen_ai.provider.name': 'openai',
|
|
1426
|
+
'gen_ai.request.model': 'gpt-4o',
|
|
1427
|
+
},
|
|
1428
|
+
},
|
|
1429
|
+
{
|
|
1430
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1431
|
+
name: 'llm.completion',
|
|
1432
|
+
attributes: {
|
|
1433
|
+
'gen_ai.provider.name': 'openai',
|
|
1434
|
+
'gen_ai.request.model': 'gpt-4-turbo',
|
|
1435
|
+
},
|
|
1436
|
+
},
|
|
1437
|
+
{
|
|
1438
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1439
|
+
name: 'llm.completion',
|
|
1440
|
+
attributes: {
|
|
1441
|
+
'gen_ai.provider.name': 'anthropic',
|
|
1442
|
+
'gen_ai.request.model': 'gpt-4o', // Same model name, different provider
|
|
1443
|
+
},
|
|
1444
|
+
},
|
|
1445
|
+
];
|
|
1446
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1447
|
+
const results = await backend.queryLLMEvents({ provider: 'openai', model: 'gpt-4o' });
|
|
1448
|
+
assert.strictEqual(results.length, 1);
|
|
1449
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.provider.name'], 'openai');
|
|
1450
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.request.model'], 'gpt-4o');
|
|
1451
|
+
});
|
|
1452
|
+
it('should return empty array when provider has no events', async () => {
|
|
1453
|
+
const today = getTestDate();
|
|
1454
|
+
const mockEvents = [
|
|
1455
|
+
{
|
|
1456
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1457
|
+
name: 'llm.completion',
|
|
1458
|
+
attributes: {
|
|
1459
|
+
'gen_ai.provider.name': 'anthropic',
|
|
1460
|
+
'gen_ai.request.model': 'claude-3',
|
|
1461
|
+
},
|
|
1462
|
+
},
|
|
1463
|
+
];
|
|
1464
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1465
|
+
const results = await backend.queryLLMEvents({ provider: 'nonexistent-provider' });
|
|
1466
|
+
assert.strictEqual(results.length, 0);
|
|
1467
|
+
});
|
|
1468
|
+
it('should filter events by operationName', async () => {
|
|
1469
|
+
const today = getTestDate();
|
|
1470
|
+
const mockEvents = [
|
|
1471
|
+
{
|
|
1472
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1473
|
+
name: 'llm.chat',
|
|
1474
|
+
attributes: { 'gen_ai.operation.name': 'chat' },
|
|
1475
|
+
},
|
|
1476
|
+
{
|
|
1477
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1478
|
+
name: 'llm.embedding',
|
|
1479
|
+
attributes: { 'gen_ai.operation.name': 'embeddings' },
|
|
1480
|
+
},
|
|
1481
|
+
{
|
|
1482
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1483
|
+
name: 'agent.invoke',
|
|
1484
|
+
attributes: { 'gen_ai.operation.name': 'invoke_agent' },
|
|
1485
|
+
},
|
|
1486
|
+
];
|
|
1487
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1488
|
+
const results = await backend.queryLLMEvents({ operationName: 'chat' });
|
|
1489
|
+
assert.strictEqual(results.length, 1);
|
|
1490
|
+
assert.strictEqual(results[0].name, 'llm.chat');
|
|
1491
|
+
});
|
|
1492
|
+
it('should filter events by conversationId', async () => {
|
|
1493
|
+
const today = getTestDate();
|
|
1494
|
+
const mockEvents = [
|
|
1495
|
+
{
|
|
1496
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1497
|
+
name: 'llm.chat',
|
|
1498
|
+
attributes: { 'gen_ai.conversation.id': 'conv-abc123' },
|
|
1499
|
+
},
|
|
1500
|
+
{
|
|
1501
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1502
|
+
name: 'llm.chat',
|
|
1503
|
+
attributes: { 'gen_ai.conversation.id': 'conv-xyz789' },
|
|
1504
|
+
},
|
|
1505
|
+
{
|
|
1506
|
+
timestamp: '2026-01-28T10:02:00Z',
|
|
1507
|
+
name: 'llm.chat',
|
|
1508
|
+
attributes: { 'gen_ai.conversation.id': 'conv-abc123' },
|
|
1509
|
+
},
|
|
1510
|
+
];
|
|
1511
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1512
|
+
const results = await backend.queryLLMEvents({ conversationId: 'conv-abc123' });
|
|
1513
|
+
assert.strictEqual(results.length, 2);
|
|
1514
|
+
});
|
|
1515
|
+
it('should combine OTel GenAI filters with other filters', async () => {
|
|
1516
|
+
const today = getTestDate();
|
|
1517
|
+
const mockEvents = [
|
|
1518
|
+
{
|
|
1519
|
+
timestamp: '2026-01-28T10:00:00Z',
|
|
1520
|
+
name: 'llm.chat',
|
|
1521
|
+
attributes: {
|
|
1522
|
+
'gen_ai.operation.name': 'chat',
|
|
1523
|
+
'gen_ai.conversation.id': 'conv-abc123',
|
|
1524
|
+
'gen_ai.request.model': 'claude-3-opus',
|
|
1525
|
+
},
|
|
1526
|
+
},
|
|
1527
|
+
{
|
|
1528
|
+
timestamp: '2026-01-28T10:01:00Z',
|
|
1529
|
+
name: 'llm.chat',
|
|
1530
|
+
attributes: {
|
|
1531
|
+
'gen_ai.operation.name': 'chat',
|
|
1532
|
+
'gen_ai.conversation.id': 'conv-abc123',
|
|
1533
|
+
'gen_ai.request.model': 'gpt-4',
|
|
1534
|
+
},
|
|
1535
|
+
},
|
|
1536
|
+
];
|
|
1537
|
+
writeJsonlFile(path.join(tempDir, `llm-events-${today}.jsonl`), mockEvents);
|
|
1538
|
+
const results = await backend.queryLLMEvents({
|
|
1539
|
+
operationName: 'chat',
|
|
1540
|
+
conversationId: 'conv-abc123',
|
|
1541
|
+
model: 'claude-3-opus',
|
|
1542
|
+
});
|
|
1543
|
+
assert.strictEqual(results.length, 1);
|
|
1544
|
+
assert.strictEqual(results[0].attributes['gen_ai.request.model'], 'claude-3-opus');
|
|
1545
|
+
});
|
|
1546
|
+
});
|
|
1547
|
+
describe('queryTraces OTel GenAI agent/tool filters', () => {
|
|
1548
|
+
it('should filter traces by agentId', async () => {
|
|
1549
|
+
const today = getTestDate();
|
|
1550
|
+
const mockSpans = [
|
|
1551
|
+
{
|
|
1552
|
+
traceId: 'trace1',
|
|
1553
|
+
spanId: 'span1',
|
|
1554
|
+
name: 'agent.invoke',
|
|
1555
|
+
startTime: [1700000000, 0],
|
|
1556
|
+
attributes: { 'gen_ai.agent.id': 'agent-001' },
|
|
1557
|
+
},
|
|
1558
|
+
{
|
|
1559
|
+
traceId: 'trace1',
|
|
1560
|
+
spanId: 'span2',
|
|
1561
|
+
name: 'agent.invoke',
|
|
1562
|
+
startTime: [1700000001, 0],
|
|
1563
|
+
attributes: { 'gen_ai.agent.id': 'agent-002' },
|
|
1564
|
+
},
|
|
1565
|
+
];
|
|
1566
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
1567
|
+
const results = await backend.queryTraces({ agentId: 'agent-001' });
|
|
1568
|
+
assert.strictEqual(results.length, 1);
|
|
1569
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.agent.id'], 'agent-001');
|
|
1570
|
+
});
|
|
1571
|
+
it('should filter traces by agentName', async () => {
|
|
1572
|
+
const today = getTestDate();
|
|
1573
|
+
const mockSpans = [
|
|
1574
|
+
{
|
|
1575
|
+
traceId: 'trace1',
|
|
1576
|
+
spanId: 'span1',
|
|
1577
|
+
name: 'agent.invoke',
|
|
1578
|
+
startTime: [1700000000, 0],
|
|
1579
|
+
attributes: { 'gen_ai.agent.name': 'Explore' },
|
|
1580
|
+
},
|
|
1581
|
+
{
|
|
1582
|
+
traceId: 'trace1',
|
|
1583
|
+
spanId: 'span2',
|
|
1584
|
+
name: 'agent.invoke',
|
|
1585
|
+
startTime: [1700000001, 0],
|
|
1586
|
+
attributes: { 'gen_ai.agent.name': 'Plan' },
|
|
1587
|
+
},
|
|
1588
|
+
];
|
|
1589
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
1590
|
+
const results = await backend.queryTraces({ agentName: 'Explore' });
|
|
1591
|
+
assert.strictEqual(results.length, 1);
|
|
1592
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.agent.name'], 'Explore');
|
|
1593
|
+
});
|
|
1594
|
+
it('should filter traces by toolName', async () => {
|
|
1595
|
+
const today = getTestDate();
|
|
1596
|
+
const mockSpans = [
|
|
1597
|
+
{
|
|
1598
|
+
traceId: 'trace1',
|
|
1599
|
+
spanId: 'span1',
|
|
1600
|
+
name: 'tool.execute',
|
|
1601
|
+
startTime: [1700000000, 0],
|
|
1602
|
+
attributes: { 'gen_ai.tool.name': 'Read' },
|
|
1603
|
+
},
|
|
1604
|
+
{
|
|
1605
|
+
traceId: 'trace1',
|
|
1606
|
+
spanId: 'span2',
|
|
1607
|
+
name: 'tool.execute',
|
|
1608
|
+
startTime: [1700000001, 0],
|
|
1609
|
+
attributes: { 'gen_ai.tool.name': 'Write' },
|
|
1610
|
+
},
|
|
1611
|
+
];
|
|
1612
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
1613
|
+
const results = await backend.queryTraces({ toolName: 'Read' });
|
|
1614
|
+
assert.strictEqual(results.length, 1);
|
|
1615
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.tool.name'], 'Read');
|
|
1616
|
+
});
|
|
1617
|
+
it('should filter traces by toolCallId', async () => {
|
|
1618
|
+
const today = getTestDate();
|
|
1619
|
+
const mockSpans = [
|
|
1620
|
+
{
|
|
1621
|
+
traceId: 'trace1',
|
|
1622
|
+
spanId: 'span1',
|
|
1623
|
+
name: 'tool.execute',
|
|
1624
|
+
startTime: [1700000000, 0],
|
|
1625
|
+
attributes: { 'gen_ai.tool.call.id': 'toolu_abc123' },
|
|
1626
|
+
},
|
|
1627
|
+
{
|
|
1628
|
+
traceId: 'trace1',
|
|
1629
|
+
spanId: 'span2',
|
|
1630
|
+
name: 'tool.execute',
|
|
1631
|
+
startTime: [1700000001, 0],
|
|
1632
|
+
attributes: { 'gen_ai.tool.call.id': 'toolu_xyz789' },
|
|
1633
|
+
},
|
|
1634
|
+
];
|
|
1635
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
1636
|
+
const results = await backend.queryTraces({ toolCallId: 'toolu_abc123' });
|
|
1637
|
+
assert.strictEqual(results.length, 1);
|
|
1638
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.tool.call.id'], 'toolu_abc123');
|
|
1639
|
+
});
|
|
1640
|
+
it('should filter traces by toolType', async () => {
|
|
1641
|
+
const today = getTestDate();
|
|
1642
|
+
const mockSpans = [
|
|
1643
|
+
{
|
|
1644
|
+
traceId: 'trace1',
|
|
1645
|
+
spanId: 'span1',
|
|
1646
|
+
name: 'tool.execute',
|
|
1647
|
+
startTime: [1700000000, 0],
|
|
1648
|
+
attributes: { 'gen_ai.tool.type': 'function' },
|
|
1649
|
+
},
|
|
1650
|
+
{
|
|
1651
|
+
traceId: 'trace1',
|
|
1652
|
+
spanId: 'span2',
|
|
1653
|
+
name: 'tool.execute',
|
|
1654
|
+
startTime: [1700000001, 0],
|
|
1655
|
+
attributes: { 'gen_ai.tool.type': 'mcp' },
|
|
1656
|
+
},
|
|
1657
|
+
];
|
|
1658
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
1659
|
+
const results = await backend.queryTraces({ toolType: 'function' });
|
|
1660
|
+
assert.strictEqual(results.length, 1);
|
|
1661
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.tool.type'], 'function');
|
|
1662
|
+
});
|
|
1663
|
+
it('should filter traces by operationName', async () => {
|
|
1664
|
+
const today = getTestDate();
|
|
1665
|
+
const mockSpans = [
|
|
1666
|
+
{
|
|
1667
|
+
traceId: 'trace1',
|
|
1668
|
+
spanId: 'span1',
|
|
1669
|
+
name: 'llm.call',
|
|
1670
|
+
startTime: [1700000000, 0],
|
|
1671
|
+
attributes: { 'gen_ai.operation.name': 'chat' },
|
|
1672
|
+
},
|
|
1673
|
+
{
|
|
1674
|
+
traceId: 'trace1',
|
|
1675
|
+
spanId: 'span2',
|
|
1676
|
+
name: 'tool.execute',
|
|
1677
|
+
startTime: [1700000001, 0],
|
|
1678
|
+
attributes: { 'gen_ai.operation.name': 'execute_tool' },
|
|
1679
|
+
},
|
|
1680
|
+
];
|
|
1681
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
1682
|
+
const results = await backend.queryTraces({ operationName: 'chat' });
|
|
1683
|
+
assert.strictEqual(results.length, 1);
|
|
1684
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.operation.name'], 'chat');
|
|
1685
|
+
});
|
|
1686
|
+
it('should combine agent/tool filters with other trace filters', async () => {
|
|
1687
|
+
const today = getTestDate();
|
|
1688
|
+
const mockSpans = [
|
|
1689
|
+
{
|
|
1690
|
+
traceId: 'trace1',
|
|
1691
|
+
spanId: 'span1',
|
|
1692
|
+
name: 'agent.explore',
|
|
1693
|
+
startTime: [1700000000, 0],
|
|
1694
|
+
duration: [0, 100000000], // 100ms
|
|
1695
|
+
attributes: {
|
|
1696
|
+
'gen_ai.agent.name': 'Explore',
|
|
1697
|
+
'gen_ai.tool.name': 'Grep',
|
|
1698
|
+
},
|
|
1699
|
+
},
|
|
1700
|
+
{
|
|
1701
|
+
traceId: 'trace1',
|
|
1702
|
+
spanId: 'span2',
|
|
1703
|
+
name: 'agent.explore',
|
|
1704
|
+
startTime: [1700000001, 0],
|
|
1705
|
+
duration: [0, 200000000], // 200ms
|
|
1706
|
+
attributes: {
|
|
1707
|
+
'gen_ai.agent.name': 'Explore',
|
|
1708
|
+
'gen_ai.tool.name': 'Read',
|
|
1709
|
+
},
|
|
1710
|
+
},
|
|
1711
|
+
];
|
|
1712
|
+
writeJsonlFile(path.join(tempDir, `traces-${today}.jsonl`), mockSpans);
|
|
1713
|
+
const results = await backend.queryTraces({
|
|
1714
|
+
agentName: 'Explore',
|
|
1715
|
+
toolName: 'Grep',
|
|
1716
|
+
spanName: 'agent',
|
|
1717
|
+
});
|
|
1718
|
+
assert.strictEqual(results.length, 1);
|
|
1719
|
+
assert.strictEqual(results[0].attributes?.['gen_ai.tool.name'], 'Grep');
|
|
1720
|
+
});
|
|
1721
|
+
});
|
|
1722
|
+
});
|
|
1723
|
+
//# sourceMappingURL=local-jsonl-traces.test.js.map
|