@comprehend/telemetry-node 0.1.4 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +112 -27
- package/dist/ComprehendDevSpanProcessor.d.ts +10 -6
- package/dist/ComprehendDevSpanProcessor.js +154 -87
- package/dist/ComprehendDevSpanProcessor.test.js +270 -449
- package/dist/ComprehendMetricsExporter.d.ts +18 -0
- package/dist/ComprehendMetricsExporter.js +178 -0
- package/dist/ComprehendMetricsExporter.test.d.ts +1 -0
- package/dist/ComprehendMetricsExporter.test.js +266 -0
- package/dist/ComprehendSDK.d.ts +18 -0
- package/dist/ComprehendSDK.js +56 -0
- package/dist/ComprehendSDK.test.d.ts +1 -0
- package/dist/ComprehendSDK.test.js +126 -0
- package/dist/WebSocketConnection.d.ts +23 -3
- package/dist/WebSocketConnection.js +106 -12
- package/dist/WebSocketConnection.test.js +236 -169
- package/dist/index.d.ts +3 -1
- package/dist/index.js +5 -1
- package/dist/util.d.ts +2 -0
- package/dist/util.js +7 -0
- package/dist/wire-protocol.d.ts +168 -28
- package/package.json +3 -1
- package/src/ComprehendDevSpanProcessor.test.ts +311 -507
- package/src/ComprehendDevSpanProcessor.ts +178 -105
- package/src/ComprehendMetricsExporter.test.ts +334 -0
- package/src/ComprehendMetricsExporter.ts +225 -0
- package/src/ComprehendSDK.test.ts +160 -0
- package/src/ComprehendSDK.ts +63 -0
- package/src/WebSocketConnection.test.ts +286 -205
- package/src/WebSocketConnection.ts +135 -13
- package/src/index.ts +3 -2
- package/src/util.ts +6 -0
- package/src/wire-protocol.ts +204 -29
- package/src/sql-analyzer.test.ts +0 -599
- package/src/sql-analyzer.ts +0 -439
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { ExportResult } from '@opentelemetry/core';
|
|
2
|
+
import { AggregationTemporality, InstrumentType, PushMetricExporter, ResourceMetrics } from '@opentelemetry/sdk-metrics';
|
|
3
|
+
import { CustomMetricSpecification } from './wire-protocol';
|
|
4
|
+
import { WebSocketConnection } from './WebSocketConnection';
|
|
5
|
+
export declare class ComprehendMetricsExporter implements PushMetricExporter {
|
|
6
|
+
private readonly connection;
|
|
7
|
+
private customCumulativeSpecs;
|
|
8
|
+
private customTimeSeriesSpecs;
|
|
9
|
+
constructor(connection: WebSocketConnection);
|
|
10
|
+
updateCustomMetrics(specs: CustomMetricSpecification[]): void;
|
|
11
|
+
export(metrics: ResourceMetrics, resultCallback: (result: ExportResult) => void): void;
|
|
12
|
+
private sendTimeSeriesData;
|
|
13
|
+
private sendCumulativeData;
|
|
14
|
+
private getServiceSubject;
|
|
15
|
+
selectAggregationTemporality(instrumentType: InstrumentType): AggregationTemporality;
|
|
16
|
+
forceFlush(): Promise<void>;
|
|
17
|
+
shutdown(): Promise<void>;
|
|
18
|
+
}
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ComprehendMetricsExporter = void 0;
|
|
4
|
+
const core_1 = require("@opentelemetry/core");
|
|
5
|
+
const sdk_metrics_1 = require("@opentelemetry/sdk-metrics");
|
|
6
|
+
const sha2_1 = require("@noble/hashes/sha2");
|
|
7
|
+
const utils_1 = require("@noble/hashes/utils");
|
|
8
|
+
/**
|
|
9
|
+
* Well-known Node.js runtime metric names that we forward as timeseries (gauge) metrics.
|
|
10
|
+
* Sources:
|
|
11
|
+
* - @opentelemetry/instrumentation-runtime-node (process.runtime.nodejs.*, nodejs.*)
|
|
12
|
+
* - OTel semconv: process metrics (process.cpu.utilization, process.uptime)
|
|
13
|
+
* - OTel semconv: V8 JS metrics (v8js.memory.heap.*)
|
|
14
|
+
*/
|
|
15
|
+
const KNOWN_GAUGE_METRICS = new Set([
|
|
16
|
+
// Node.js runtime instrumentation metrics (@opentelemetry/instrumentation-runtime-node)
|
|
17
|
+
'process.runtime.nodejs.memory.heap.total',
|
|
18
|
+
'process.runtime.nodejs.memory.heap.used',
|
|
19
|
+
'process.runtime.nodejs.memory.rss',
|
|
20
|
+
'process.runtime.nodejs.memory.array_buffers',
|
|
21
|
+
'process.runtime.nodejs.memory.external',
|
|
22
|
+
'nodejs.eventloop.delay.min',
|
|
23
|
+
'nodejs.eventloop.delay.max',
|
|
24
|
+
'nodejs.eventloop.delay.mean',
|
|
25
|
+
'nodejs.eventloop.delay.p50',
|
|
26
|
+
'nodejs.eventloop.delay.p99',
|
|
27
|
+
'nodejs.eventloop.utilization',
|
|
28
|
+
'nodejs.active_handles.total',
|
|
29
|
+
// OTel semconv process metrics
|
|
30
|
+
'process.cpu.utilization',
|
|
31
|
+
'process.uptime',
|
|
32
|
+
// OTel semconv V8 JS runtime metrics
|
|
33
|
+
'v8js.memory.heap.limit',
|
|
34
|
+
'v8js.memory.heap.used',
|
|
35
|
+
'v8js.memory.heap.space.available_size',
|
|
36
|
+
'v8js.memory.heap.space.physical_size',
|
|
37
|
+
// Host metrics (@opentelemetry/host-metrics)
|
|
38
|
+
'process.memory.usage',
|
|
39
|
+
]);
|
|
40
|
+
/** Well-known counter/cumulative metrics. */
|
|
41
|
+
const KNOWN_CUMULATIVE_METRICS = new Set([
|
|
42
|
+
'process.cpu.time',
|
|
43
|
+
'process.memory.virtual',
|
|
44
|
+
]);
|
|
45
|
+
class ComprehendMetricsExporter {
|
|
46
|
+
constructor(connection) {
|
|
47
|
+
this.customCumulativeSpecs = [];
|
|
48
|
+
this.customTimeSeriesSpecs = [];
|
|
49
|
+
this.connection = connection;
|
|
50
|
+
}
|
|
51
|
+
updateCustomMetrics(specs) {
|
|
52
|
+
this.customCumulativeSpecs = specs.filter((s) => s.type === 'cumulative');
|
|
53
|
+
this.customTimeSeriesSpecs = specs.filter((s) => s.type === 'timeseries');
|
|
54
|
+
}
|
|
55
|
+
export(metrics, resultCallback) {
|
|
56
|
+
try {
|
|
57
|
+
const serviceSubject = this.getServiceSubject(metrics);
|
|
58
|
+
for (const scopeMetrics of metrics.scopeMetrics) {
|
|
59
|
+
for (const metric of scopeMetrics.metrics) {
|
|
60
|
+
const name = metric.descriptor.name;
|
|
61
|
+
const unit = metric.descriptor.unit;
|
|
62
|
+
// Skip histogram types entirely
|
|
63
|
+
if (metric.dataPointType === sdk_metrics_1.DataPointType.HISTOGRAM ||
|
|
64
|
+
metric.dataPointType === sdk_metrics_1.DataPointType.EXPONENTIAL_HISTOGRAM) {
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
// Check standard well-known metrics
|
|
68
|
+
if (KNOWN_GAUGE_METRICS.has(name) && serviceSubject) {
|
|
69
|
+
this.sendTimeSeriesData(serviceSubject, name, unit, metric);
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
if (KNOWN_CUMULATIVE_METRICS.has(name) && serviceSubject) {
|
|
73
|
+
this.sendCumulativeData(serviceSubject, name, unit, metric);
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
// Check custom metric specs
|
|
77
|
+
const cumulativeSpec = this.customCumulativeSpecs.find(s => s.id === name);
|
|
78
|
+
if (cumulativeSpec) {
|
|
79
|
+
this.sendCumulativeData(cumulativeSpec.subject, name, unit, metric, cumulativeSpec.attributes);
|
|
80
|
+
continue;
|
|
81
|
+
}
|
|
82
|
+
const timeSeriesSpec = this.customTimeSeriesSpecs.find(s => s.id === name);
|
|
83
|
+
if (timeSeriesSpec) {
|
|
84
|
+
this.sendTimeSeriesData(timeSeriesSpec.subject, name, unit, metric, timeSeriesSpec.attributes);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
resultCallback({ code: core_1.ExportResultCode.SUCCESS });
|
|
89
|
+
}
|
|
90
|
+
catch {
|
|
91
|
+
resultCallback({ code: core_1.ExportResultCode.FAILED });
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
sendTimeSeriesData(subject, name, unit, metric, filterAttributes) {
|
|
95
|
+
const data = [];
|
|
96
|
+
for (const dp of metric.dataPoints) {
|
|
97
|
+
data.push({
|
|
98
|
+
subject,
|
|
99
|
+
type: name,
|
|
100
|
+
timestamp: hrTimeFromOtel(dp.endTime),
|
|
101
|
+
value: dp.value,
|
|
102
|
+
unit,
|
|
103
|
+
attributes: extractAttributes(dp.attributes, filterAttributes),
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
if (data.length > 0) {
|
|
107
|
+
this.connection.sendMessage({
|
|
108
|
+
event: 'timeseries',
|
|
109
|
+
seq: this.connection.nextSeq(),
|
|
110
|
+
data,
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
sendCumulativeData(subject, name, unit, metric, filterAttributes) {
|
|
115
|
+
const data = [];
|
|
116
|
+
for (const dp of metric.dataPoints) {
|
|
117
|
+
data.push({
|
|
118
|
+
subject,
|
|
119
|
+
type: name,
|
|
120
|
+
timestamp: hrTimeFromOtel(dp.endTime),
|
|
121
|
+
value: dp.value,
|
|
122
|
+
unit,
|
|
123
|
+
attributes: extractAttributes(dp.attributes, filterAttributes),
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
if (data.length > 0) {
|
|
127
|
+
this.connection.sendMessage({
|
|
128
|
+
event: 'cumulative',
|
|
129
|
+
seq: this.connection.nextSeq(),
|
|
130
|
+
data,
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
getServiceSubject(metrics) {
|
|
135
|
+
const attrs = metrics.resource.attributes;
|
|
136
|
+
const name = attrs['service.name'];
|
|
137
|
+
if (!name)
|
|
138
|
+
return null;
|
|
139
|
+
const namespace = attrs['service.namespace'];
|
|
140
|
+
const environment = attrs['deployment.environment'];
|
|
141
|
+
const idString = `service:${name}:${namespace ?? ''}:${environment ?? ''}`;
|
|
142
|
+
return hashIdString(idString);
|
|
143
|
+
}
|
|
144
|
+
selectAggregationTemporality(instrumentType) {
|
|
145
|
+
switch (instrumentType) {
|
|
146
|
+
case sdk_metrics_1.InstrumentType.COUNTER:
|
|
147
|
+
case sdk_metrics_1.InstrumentType.HISTOGRAM:
|
|
148
|
+
return sdk_metrics_1.AggregationTemporality.DELTA;
|
|
149
|
+
default:
|
|
150
|
+
return sdk_metrics_1.AggregationTemporality.CUMULATIVE;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
async forceFlush() {
|
|
154
|
+
}
|
|
155
|
+
async shutdown() {
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
exports.ComprehendMetricsExporter = ComprehendMetricsExporter;
|
|
159
|
+
function hashIdString(idString) {
|
|
160
|
+
return Array.from((0, sha2_1.sha256)((0, utils_1.utf8ToBytes)(idString)))
|
|
161
|
+
.map(b => b.toString(16).padStart(2, '0'))
|
|
162
|
+
.join('');
|
|
163
|
+
}
|
|
164
|
+
function extractAttributes(attrs, filterKeys) {
|
|
165
|
+
const result = {};
|
|
166
|
+
for (const [key, value] of Object.entries(attrs)) {
|
|
167
|
+
if (value === undefined)
|
|
168
|
+
continue;
|
|
169
|
+
if (filterKeys && !filterKeys.includes(key))
|
|
170
|
+
continue;
|
|
171
|
+
result[key] = value;
|
|
172
|
+
}
|
|
173
|
+
return result;
|
|
174
|
+
}
|
|
175
|
+
/** Convert OTel HrTime to our wire protocol HrTime (same format: [seconds, nanoseconds]). */
|
|
176
|
+
function hrTimeFromOtel(time) {
|
|
177
|
+
return time;
|
|
178
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const core_1 = require("@opentelemetry/core");
|
|
4
|
+
const sdk_metrics_1 = require("@opentelemetry/sdk-metrics");
|
|
5
|
+
const ComprehendMetricsExporter_1 = require("./ComprehendMetricsExporter");
|
|
6
|
+
jest.mock('./WebSocketConnection');
|
|
7
|
+
describe('ComprehendMetricsExporter', () => {
|
|
8
|
+
let exporter;
|
|
9
|
+
let mockConnection;
|
|
10
|
+
let sentMessages;
|
|
11
|
+
let seqCounter;
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
sentMessages = [];
|
|
14
|
+
seqCounter = 1;
|
|
15
|
+
mockConnection = {
|
|
16
|
+
sendMessage: jest.fn((message) => {
|
|
17
|
+
sentMessages.push(message);
|
|
18
|
+
}),
|
|
19
|
+
nextSeq: jest.fn(() => seqCounter++),
|
|
20
|
+
close: jest.fn(),
|
|
21
|
+
setProcessContext: jest.fn(),
|
|
22
|
+
};
|
|
23
|
+
exporter = new ComprehendMetricsExporter_1.ComprehendMetricsExporter(mockConnection);
|
|
24
|
+
});
|
|
25
|
+
function createResourceMetrics(metrics, resourceAttributes = { 'service.name': 'test-service' }) {
|
|
26
|
+
return ({
|
|
27
|
+
resource: { attributes: resourceAttributes },
|
|
28
|
+
scopeMetrics: [{
|
|
29
|
+
scope: { name: 'test-scope' },
|
|
30
|
+
metrics: metrics.map(m => ({
|
|
31
|
+
descriptor: {
|
|
32
|
+
name: m.name,
|
|
33
|
+
description: '',
|
|
34
|
+
unit: m.unit ?? '',
|
|
35
|
+
valueType: 0,
|
|
36
|
+
},
|
|
37
|
+
dataPointType: m.dataPointType,
|
|
38
|
+
dataPoints: m.dataPoints.map(dp => ({
|
|
39
|
+
startTime: dp.startTime ?? [0, 0],
|
|
40
|
+
endTime: dp.endTime,
|
|
41
|
+
attributes: dp.attributes,
|
|
42
|
+
value: dp.value,
|
|
43
|
+
})),
|
|
44
|
+
...(m.isMonotonic !== undefined ? { isMonotonic: m.isMonotonic } : {}),
|
|
45
|
+
aggregationTemporality: sdk_metrics_1.AggregationTemporality.CUMULATIVE,
|
|
46
|
+
})),
|
|
47
|
+
}],
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
describe('Standard Gauge Metrics', () => {
|
|
51
|
+
it('should forward known gauge metrics as timeseries', () => {
|
|
52
|
+
const rm = createResourceMetrics([{
|
|
53
|
+
name: 'process.runtime.nodejs.memory.heap.used',
|
|
54
|
+
unit: 'By',
|
|
55
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
56
|
+
dataPoints: [{
|
|
57
|
+
endTime: [1700000000, 0],
|
|
58
|
+
attributes: {},
|
|
59
|
+
value: 52428800,
|
|
60
|
+
}],
|
|
61
|
+
}]);
|
|
62
|
+
const callback = jest.fn();
|
|
63
|
+
exporter.export(rm, callback);
|
|
64
|
+
expect(callback).toHaveBeenCalledWith({ code: core_1.ExportResultCode.SUCCESS });
|
|
65
|
+
const tsMsg = sentMessages.find(m => m.event === 'timeseries');
|
|
66
|
+
expect(tsMsg).toBeDefined();
|
|
67
|
+
expect(tsMsg.data).toHaveLength(1);
|
|
68
|
+
expect(tsMsg.data[0]).toMatchObject({
|
|
69
|
+
type: 'process.runtime.nodejs.memory.heap.used',
|
|
70
|
+
value: 52428800,
|
|
71
|
+
unit: 'By',
|
|
72
|
+
});
|
|
73
|
+
});
|
|
74
|
+
it('should forward multiple known gauge metrics', () => {
|
|
75
|
+
const rm = createResourceMetrics([
|
|
76
|
+
{
|
|
77
|
+
name: 'process.runtime.nodejs.memory.heap.total',
|
|
78
|
+
unit: 'By',
|
|
79
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
80
|
+
dataPoints: [{ endTime: [1700000000, 0], attributes: {}, value: 104857600 }],
|
|
81
|
+
},
|
|
82
|
+
{
|
|
83
|
+
name: 'nodejs.eventloop.delay.mean',
|
|
84
|
+
unit: 'ms',
|
|
85
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
86
|
+
dataPoints: [{ endTime: [1700000000, 0], attributes: {}, value: 1.5 }],
|
|
87
|
+
},
|
|
88
|
+
]);
|
|
89
|
+
const callback = jest.fn();
|
|
90
|
+
exporter.export(rm, callback);
|
|
91
|
+
const tsMessages = sentMessages.filter(m => m.event === 'timeseries');
|
|
92
|
+
expect(tsMessages).toHaveLength(2);
|
|
93
|
+
});
|
|
94
|
+
});
|
|
95
|
+
describe('Unknown Metrics', () => {
|
|
96
|
+
it('should ignore unknown metric names', () => {
|
|
97
|
+
const rm = createResourceMetrics([{
|
|
98
|
+
name: 'some.unknown.metric',
|
|
99
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
100
|
+
dataPoints: [{ endTime: [0, 0], attributes: {}, value: 42 }],
|
|
101
|
+
}]);
|
|
102
|
+
const callback = jest.fn();
|
|
103
|
+
exporter.export(rm, callback);
|
|
104
|
+
expect(sentMessages).toHaveLength(0);
|
|
105
|
+
expect(callback).toHaveBeenCalledWith({ code: core_1.ExportResultCode.SUCCESS });
|
|
106
|
+
});
|
|
107
|
+
});
|
|
108
|
+
describe('Histogram Metrics', () => {
|
|
109
|
+
it('should skip histogram data point types', () => {
|
|
110
|
+
const rm = createResourceMetrics([{
|
|
111
|
+
name: 'process.runtime.nodejs.memory.heap.used',
|
|
112
|
+
dataPointType: sdk_metrics_1.DataPointType.HISTOGRAM,
|
|
113
|
+
dataPoints: [{ endTime: [0, 0], attributes: {}, value: { buckets: {} } }],
|
|
114
|
+
}]);
|
|
115
|
+
const callback = jest.fn();
|
|
116
|
+
exporter.export(rm, callback);
|
|
117
|
+
expect(sentMessages).toHaveLength(0);
|
|
118
|
+
});
|
|
119
|
+
it('should skip exponential histogram data point types', () => {
|
|
120
|
+
const rm = createResourceMetrics([{
|
|
121
|
+
name: 'process.runtime.nodejs.memory.heap.used',
|
|
122
|
+
dataPointType: sdk_metrics_1.DataPointType.EXPONENTIAL_HISTOGRAM,
|
|
123
|
+
dataPoints: [{ endTime: [0, 0], attributes: {}, value: {} }],
|
|
124
|
+
}]);
|
|
125
|
+
const callback = jest.fn();
|
|
126
|
+
exporter.export(rm, callback);
|
|
127
|
+
expect(sentMessages).toHaveLength(0);
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
describe('Custom Metrics', () => {
|
|
131
|
+
it('should forward custom cumulative metrics', () => {
|
|
132
|
+
exporter.updateCustomMetrics([
|
|
133
|
+
{ type: 'cumulative', id: 'app.requests.total', attributes: ['method', 'status'], subject: 'custom-sub' },
|
|
134
|
+
]);
|
|
135
|
+
const rm = createResourceMetrics([{
|
|
136
|
+
name: 'app.requests.total',
|
|
137
|
+
unit: '1',
|
|
138
|
+
dataPointType: sdk_metrics_1.DataPointType.SUM,
|
|
139
|
+
dataPoints: [{
|
|
140
|
+
endTime: [1700000000, 0],
|
|
141
|
+
attributes: { method: 'GET', status: '200', ignored: 'yes' },
|
|
142
|
+
value: 150,
|
|
143
|
+
}],
|
|
144
|
+
}]);
|
|
145
|
+
const callback = jest.fn();
|
|
146
|
+
exporter.export(rm, callback);
|
|
147
|
+
const cumMsg = sentMessages.find(m => m.event === 'cumulative');
|
|
148
|
+
expect(cumMsg).toBeDefined();
|
|
149
|
+
expect(cumMsg.data[0]).toMatchObject({
|
|
150
|
+
subject: 'custom-sub',
|
|
151
|
+
type: 'app.requests.total',
|
|
152
|
+
value: 150,
|
|
153
|
+
});
|
|
154
|
+
// Should only include filtered attributes
|
|
155
|
+
expect(cumMsg.data[0].attributes).toEqual({ method: 'GET', status: '200' });
|
|
156
|
+
});
|
|
157
|
+
it('should forward custom timeseries metrics', () => {
|
|
158
|
+
exporter.updateCustomMetrics([
|
|
159
|
+
{ type: 'timeseries', id: 'app.cpu.usage', attributes: ['core'], subject: 'cpu-sub' },
|
|
160
|
+
]);
|
|
161
|
+
const rm = createResourceMetrics([{
|
|
162
|
+
name: 'app.cpu.usage',
|
|
163
|
+
unit: '%',
|
|
164
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
165
|
+
dataPoints: [{
|
|
166
|
+
endTime: [1700000000, 0],
|
|
167
|
+
attributes: { core: '0' },
|
|
168
|
+
value: 45.2,
|
|
169
|
+
}],
|
|
170
|
+
}]);
|
|
171
|
+
const callback = jest.fn();
|
|
172
|
+
exporter.export(rm, callback);
|
|
173
|
+
const tsMsg = sentMessages.find(m => m.event === 'timeseries');
|
|
174
|
+
expect(tsMsg).toBeDefined();
|
|
175
|
+
expect(tsMsg.data[0]).toMatchObject({
|
|
176
|
+
subject: 'cpu-sub',
|
|
177
|
+
type: 'app.cpu.usage',
|
|
178
|
+
value: 45.2,
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
it('should update custom metric specs when updateCustomMetrics is called', () => {
|
|
182
|
+
exporter.updateCustomMetrics([
|
|
183
|
+
{ type: 'cumulative', id: 'old.metric', attributes: [], subject: 's1' },
|
|
184
|
+
]);
|
|
185
|
+
// Replace with new specs
|
|
186
|
+
exporter.updateCustomMetrics([
|
|
187
|
+
{ type: 'timeseries', id: 'new.metric', attributes: [], subject: 's2' },
|
|
188
|
+
]);
|
|
189
|
+
const rmOld = createResourceMetrics([{
|
|
190
|
+
name: 'old.metric',
|
|
191
|
+
dataPointType: sdk_metrics_1.DataPointType.SUM,
|
|
192
|
+
dataPoints: [{ endTime: [0, 0], attributes: {}, value: 1 }],
|
|
193
|
+
}]);
|
|
194
|
+
const rmNew = createResourceMetrics([{
|
|
195
|
+
name: 'new.metric',
|
|
196
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
197
|
+
dataPoints: [{ endTime: [0, 0], attributes: {}, value: 2 }],
|
|
198
|
+
}]);
|
|
199
|
+
const callback = jest.fn();
|
|
200
|
+
exporter.export(rmOld, callback);
|
|
201
|
+
expect(sentMessages).toHaveLength(0); // old spec no longer active
|
|
202
|
+
exporter.export(rmNew, callback);
|
|
203
|
+
expect(sentMessages).toHaveLength(1);
|
|
204
|
+
});
|
|
205
|
+
});
|
|
206
|
+
describe('Service Subject Hashing', () => {
|
|
207
|
+
it('should derive service subject from resource attributes', () => {
|
|
208
|
+
const rm = createResourceMetrics([{
|
|
209
|
+
name: 'process.runtime.nodejs.memory.heap.used',
|
|
210
|
+
unit: 'By',
|
|
211
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
212
|
+
dataPoints: [{ endTime: [0, 0], attributes: {}, value: 100 }],
|
|
213
|
+
}], {
|
|
214
|
+
'service.name': 'my-svc',
|
|
215
|
+
'service.namespace': 'ns',
|
|
216
|
+
'deployment.environment': 'prod',
|
|
217
|
+
});
|
|
218
|
+
const callback = jest.fn();
|
|
219
|
+
exporter.export(rm, callback);
|
|
220
|
+
const tsMsg = sentMessages.find(m => m.event === 'timeseries');
|
|
221
|
+
expect(tsMsg.data[0].subject).toBeDefined();
|
|
222
|
+
expect(typeof tsMsg.data[0].subject).toBe('string');
|
|
223
|
+
expect(tsMsg.data[0].subject.length).toBe(64); // SHA256 hex
|
|
224
|
+
});
|
|
225
|
+
it('should skip known metrics when no service.name in resource', () => {
|
|
226
|
+
const rm = createResourceMetrics([{
|
|
227
|
+
name: 'process.runtime.nodejs.memory.heap.used',
|
|
228
|
+
dataPointType: sdk_metrics_1.DataPointType.GAUGE,
|
|
229
|
+
dataPoints: [{ endTime: [0, 0], attributes: {}, value: 100 }],
|
|
230
|
+
}], {}); // No service.name
|
|
231
|
+
const callback = jest.fn();
|
|
232
|
+
exporter.export(rm, callback);
|
|
233
|
+
expect(sentMessages).toHaveLength(0);
|
|
234
|
+
});
|
|
235
|
+
});
|
|
236
|
+
describe('Aggregation Temporality', () => {
|
|
237
|
+
it('should return DELTA for COUNTER', () => {
|
|
238
|
+
expect(exporter.selectAggregationTemporality(sdk_metrics_1.InstrumentType.COUNTER))
|
|
239
|
+
.toBe(sdk_metrics_1.AggregationTemporality.DELTA);
|
|
240
|
+
});
|
|
241
|
+
it('should return DELTA for HISTOGRAM', () => {
|
|
242
|
+
expect(exporter.selectAggregationTemporality(sdk_metrics_1.InstrumentType.HISTOGRAM))
|
|
243
|
+
.toBe(sdk_metrics_1.AggregationTemporality.DELTA);
|
|
244
|
+
});
|
|
245
|
+
it('should return CUMULATIVE for OBSERVABLE_GAUGE', () => {
|
|
246
|
+
expect(exporter.selectAggregationTemporality(sdk_metrics_1.InstrumentType.OBSERVABLE_GAUGE))
|
|
247
|
+
.toBe(sdk_metrics_1.AggregationTemporality.CUMULATIVE);
|
|
248
|
+
});
|
|
249
|
+
it('should return CUMULATIVE for OBSERVABLE_COUNTER', () => {
|
|
250
|
+
expect(exporter.selectAggregationTemporality(sdk_metrics_1.InstrumentType.OBSERVABLE_COUNTER))
|
|
251
|
+
.toBe(sdk_metrics_1.AggregationTemporality.CUMULATIVE);
|
|
252
|
+
});
|
|
253
|
+
it('should return CUMULATIVE for GAUGE', () => {
|
|
254
|
+
expect(exporter.selectAggregationTemporality(sdk_metrics_1.InstrumentType.GAUGE))
|
|
255
|
+
.toBe(sdk_metrics_1.AggregationTemporality.CUMULATIVE);
|
|
256
|
+
});
|
|
257
|
+
});
|
|
258
|
+
describe('Lifecycle', () => {
|
|
259
|
+
it('should resolve forceFlush', async () => {
|
|
260
|
+
await expect(exporter.forceFlush()).resolves.toBeUndefined();
|
|
261
|
+
});
|
|
262
|
+
it('should resolve shutdown', async () => {
|
|
263
|
+
await expect(exporter.shutdown()).resolves.toBeUndefined();
|
|
264
|
+
});
|
|
265
|
+
});
|
|
266
|
+
});
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { ComprehendDevSpanProcessor } from './ComprehendDevSpanProcessor';
|
|
2
|
+
import { ComprehendMetricsExporter } from './ComprehendMetricsExporter';
|
|
3
|
+
export type LogFn = (message: string) => void;
|
|
4
|
+
export declare class ComprehendSDK {
|
|
5
|
+
private readonly connection;
|
|
6
|
+
private spanProcessor;
|
|
7
|
+
private metricsExporter;
|
|
8
|
+
private customMetricListeners;
|
|
9
|
+
constructor(options: {
|
|
10
|
+
organization: string;
|
|
11
|
+
token: string;
|
|
12
|
+
debug?: boolean | LogFn;
|
|
13
|
+
});
|
|
14
|
+
private distributeCustomMetrics;
|
|
15
|
+
getSpanProcessor(): ComprehendDevSpanProcessor;
|
|
16
|
+
getMetricsExporter(): ComprehendMetricsExporter;
|
|
17
|
+
shutdown(): Promise<void>;
|
|
18
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ComprehendSDK = void 0;
|
|
4
|
+
const WebSocketConnection_1 = require("./WebSocketConnection");
|
|
5
|
+
const ComprehendDevSpanProcessor_1 = require("./ComprehendDevSpanProcessor");
|
|
6
|
+
const ComprehendMetricsExporter_1 = require("./ComprehendMetricsExporter");
|
|
7
|
+
class ComprehendSDK {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
this.spanProcessor = null;
|
|
10
|
+
this.metricsExporter = null;
|
|
11
|
+
this.customMetricListeners = [];
|
|
12
|
+
const logger = options.debug === true ? console.log
|
|
13
|
+
: options.debug === false ? undefined
|
|
14
|
+
: options.debug;
|
|
15
|
+
this.connection = new WebSocketConnection_1.WebSocketConnection({
|
|
16
|
+
organization: options.organization,
|
|
17
|
+
token: options.token,
|
|
18
|
+
logger,
|
|
19
|
+
onAuthorized: (ack) => {
|
|
20
|
+
this.distributeCustomMetrics(ack.customMetrics);
|
|
21
|
+
},
|
|
22
|
+
onCustomMetricChange: (specs) => {
|
|
23
|
+
this.distributeCustomMetrics(specs);
|
|
24
|
+
},
|
|
25
|
+
});
|
|
26
|
+
}
|
|
27
|
+
distributeCustomMetrics(specs) {
|
|
28
|
+
for (const listener of this.customMetricListeners) {
|
|
29
|
+
listener(specs);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
getSpanProcessor() {
|
|
33
|
+
if (!this.spanProcessor) {
|
|
34
|
+
this.spanProcessor = new ComprehendDevSpanProcessor_1.ComprehendDevSpanProcessor(this.connection);
|
|
35
|
+
this.customMetricListeners.push((specs) => this.spanProcessor.updateCustomMetrics(specs));
|
|
36
|
+
}
|
|
37
|
+
return this.spanProcessor;
|
|
38
|
+
}
|
|
39
|
+
getMetricsExporter() {
|
|
40
|
+
if (!this.metricsExporter) {
|
|
41
|
+
this.metricsExporter = new ComprehendMetricsExporter_1.ComprehendMetricsExporter(this.connection);
|
|
42
|
+
this.customMetricListeners.push((specs) => this.metricsExporter.updateCustomMetrics(specs));
|
|
43
|
+
}
|
|
44
|
+
return this.metricsExporter;
|
|
45
|
+
}
|
|
46
|
+
async shutdown() {
|
|
47
|
+
if (this.spanProcessor) {
|
|
48
|
+
await this.spanProcessor.shutdown();
|
|
49
|
+
}
|
|
50
|
+
if (this.metricsExporter) {
|
|
51
|
+
await this.metricsExporter.shutdown();
|
|
52
|
+
}
|
|
53
|
+
this.connection.close();
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
exports.ComprehendSDK = ComprehendSDK;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const ComprehendSDK_1 = require("./ComprehendSDK");
|
|
4
|
+
const WebSocketConnection_1 = require("./WebSocketConnection");
|
|
5
|
+
const ComprehendDevSpanProcessor_1 = require("./ComprehendDevSpanProcessor");
|
|
6
|
+
const ComprehendMetricsExporter_1 = require("./ComprehendMetricsExporter");
|
|
7
|
+
jest.mock('./WebSocketConnection');
|
|
8
|
+
jest.mock('./ComprehendDevSpanProcessor');
|
|
9
|
+
jest.mock('./ComprehendMetricsExporter');
|
|
10
|
+
const MockedWebSocketConnection = WebSocketConnection_1.WebSocketConnection;
|
|
11
|
+
const MockedSpanProcessor = ComprehendDevSpanProcessor_1.ComprehendDevSpanProcessor;
|
|
12
|
+
const MockedMetricsExporter = ComprehendMetricsExporter_1.ComprehendMetricsExporter;
|
|
13
|
+
describe('ComprehendSDK', () => {
|
|
14
|
+
let capturedOnAuthorized;
|
|
15
|
+
let capturedOnCustomMetricChange;
|
|
16
|
+
let mockConnectionInstance;
|
|
17
|
+
beforeEach(() => {
|
|
18
|
+
jest.clearAllMocks();
|
|
19
|
+
capturedOnAuthorized = undefined;
|
|
20
|
+
capturedOnCustomMetricChange = undefined;
|
|
21
|
+
mockConnectionInstance = null;
|
|
22
|
+
MockedWebSocketConnection.mockImplementation((options) => {
|
|
23
|
+
capturedOnAuthorized = options.onAuthorized;
|
|
24
|
+
capturedOnCustomMetricChange = options.onCustomMetricChange;
|
|
25
|
+
mockConnectionInstance = {
|
|
26
|
+
sendMessage: jest.fn(),
|
|
27
|
+
setProcessContext: jest.fn(),
|
|
28
|
+
nextSeq: jest.fn(),
|
|
29
|
+
close: jest.fn(),
|
|
30
|
+
};
|
|
31
|
+
return mockConnectionInstance;
|
|
32
|
+
});
|
|
33
|
+
MockedSpanProcessor.mockImplementation(() => ({
|
|
34
|
+
onStart: jest.fn(),
|
|
35
|
+
onEnd: jest.fn(),
|
|
36
|
+
forceFlush: jest.fn().mockResolvedValue(undefined),
|
|
37
|
+
shutdown: jest.fn().mockResolvedValue(undefined),
|
|
38
|
+
updateCustomMetrics: jest.fn(),
|
|
39
|
+
}));
|
|
40
|
+
MockedMetricsExporter.mockImplementation(() => ({
|
|
41
|
+
export: jest.fn(),
|
|
42
|
+
forceFlush: jest.fn().mockResolvedValue(undefined),
|
|
43
|
+
shutdown: jest.fn().mockResolvedValue(undefined),
|
|
44
|
+
selectAggregationTemporality: jest.fn(),
|
|
45
|
+
updateCustomMetrics: jest.fn(),
|
|
46
|
+
}));
|
|
47
|
+
});
|
|
48
|
+
it('should create a WebSocketConnection with the correct options', () => {
|
|
49
|
+
new ComprehendSDK_1.ComprehendSDK({ organization: 'test-org', token: 'test-token' });
|
|
50
|
+
expect(MockedWebSocketConnection).toHaveBeenCalledTimes(1);
|
|
51
|
+
const opts = MockedWebSocketConnection.mock.calls[0][0];
|
|
52
|
+
expect(opts.organization).toBe('test-org');
|
|
53
|
+
expect(opts.token).toBe('test-token');
|
|
54
|
+
});
|
|
55
|
+
it('should pass console.log as logger when debug is true', () => {
|
|
56
|
+
new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok', debug: true });
|
|
57
|
+
const opts = MockedWebSocketConnection.mock.calls[0][0];
|
|
58
|
+
expect(opts.logger).toBe(console.log);
|
|
59
|
+
});
|
|
60
|
+
it('should pass custom logger function', () => {
|
|
61
|
+
const myLogger = jest.fn();
|
|
62
|
+
new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok', debug: myLogger });
|
|
63
|
+
const opts = MockedWebSocketConnection.mock.calls[0][0];
|
|
64
|
+
expect(opts.logger).toBe(myLogger);
|
|
65
|
+
});
|
|
66
|
+
it('should return the same span processor on multiple calls', () => {
|
|
67
|
+
const sdk = new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok' });
|
|
68
|
+
const proc1 = sdk.getSpanProcessor();
|
|
69
|
+
const proc2 = sdk.getSpanProcessor();
|
|
70
|
+
expect(proc1).toBe(proc2);
|
|
71
|
+
expect(MockedSpanProcessor).toHaveBeenCalledTimes(1);
|
|
72
|
+
});
|
|
73
|
+
it('should return the same metrics exporter on multiple calls', () => {
|
|
74
|
+
const sdk = new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok' });
|
|
75
|
+
const exp1 = sdk.getMetricsExporter();
|
|
76
|
+
const exp2 = sdk.getMetricsExporter();
|
|
77
|
+
expect(exp1).toBe(exp2);
|
|
78
|
+
expect(MockedMetricsExporter).toHaveBeenCalledTimes(1);
|
|
79
|
+
});
|
|
80
|
+
it('should share the same WebSocketConnection between processor and exporter', () => {
|
|
81
|
+
const sdk = new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok' });
|
|
82
|
+
sdk.getSpanProcessor();
|
|
83
|
+
sdk.getMetricsExporter();
|
|
84
|
+
// Both should receive the same connection instance
|
|
85
|
+
expect(MockedSpanProcessor).toHaveBeenCalledWith(mockConnectionInstance);
|
|
86
|
+
expect(MockedMetricsExporter).toHaveBeenCalledWith(mockConnectionInstance);
|
|
87
|
+
});
|
|
88
|
+
it('should distribute custom metrics from onAuthorized to processor and exporter', () => {
|
|
89
|
+
const sdk = new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok' });
|
|
90
|
+
const processor = sdk.getSpanProcessor();
|
|
91
|
+
const exporter = sdk.getMetricsExporter();
|
|
92
|
+
const specs = [
|
|
93
|
+
{ type: 'span', rule: { kind: 'type', value: 'server' }, subject: 's1' },
|
|
94
|
+
{ type: 'cumulative', id: 'm1', attributes: ['a'], subject: 's2' },
|
|
95
|
+
];
|
|
96
|
+
// Simulate the auth callback
|
|
97
|
+
capturedOnAuthorized({ type: 'ack-authorized', customMetrics: specs });
|
|
98
|
+
expect(processor.updateCustomMetrics).toHaveBeenCalledWith(specs);
|
|
99
|
+
expect(exporter.updateCustomMetrics).toHaveBeenCalledWith(specs);
|
|
100
|
+
});
|
|
101
|
+
it('should distribute custom metrics from onCustomMetricChange', () => {
|
|
102
|
+
const sdk = new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok' });
|
|
103
|
+
const processor = sdk.getSpanProcessor();
|
|
104
|
+
const exporter = sdk.getMetricsExporter();
|
|
105
|
+
const specs = [
|
|
106
|
+
{ type: 'timeseries', id: 'new-metric', attributes: [], subject: 's3' },
|
|
107
|
+
];
|
|
108
|
+
capturedOnCustomMetricChange(specs);
|
|
109
|
+
expect(processor.updateCustomMetrics).toHaveBeenCalledWith(specs);
|
|
110
|
+
expect(exporter.updateCustomMetrics).toHaveBeenCalledWith(specs);
|
|
111
|
+
});
|
|
112
|
+
it('should shutdown processor, exporter, and connection', async () => {
|
|
113
|
+
const sdk = new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok' });
|
|
114
|
+
const processor = sdk.getSpanProcessor();
|
|
115
|
+
const exporter = sdk.getMetricsExporter();
|
|
116
|
+
await sdk.shutdown();
|
|
117
|
+
expect(processor.shutdown).toHaveBeenCalled();
|
|
118
|
+
expect(exporter.shutdown).toHaveBeenCalled();
|
|
119
|
+
expect(mockConnectionInstance.close).toHaveBeenCalled();
|
|
120
|
+
});
|
|
121
|
+
it('should shutdown cleanly even without processor or exporter created', async () => {
|
|
122
|
+
const sdk = new ComprehendSDK_1.ComprehendSDK({ organization: 'org', token: 'tok' });
|
|
123
|
+
await expect(sdk.shutdown()).resolves.toBeUndefined();
|
|
124
|
+
expect(mockConnectionInstance.close).toHaveBeenCalled();
|
|
125
|
+
});
|
|
126
|
+
});
|