@agoric/telemetry 0.6.3-other-dev-3eb1a1d.0 → 0.6.3-other-dev-d15096d.0.d15096d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +29 -28
- package/src/context-aware-slog-file.js +7 -2
- package/src/context-aware-slog.js +11 -7
- package/src/flight-recorder.js +39 -19
- package/src/frcat-entrypoint.js +2 -2
- package/src/index.js +29 -26
- package/src/make-slog-sender.js +129 -109
- package/src/otel-and-flight-recorder.js +5 -1
- package/src/otel-context-aware-slog.js +12 -6
- package/src/otel-metrics.js +230 -0
- package/src/otel-trace.js +11 -1
- package/src/prometheus.js +22 -0
- package/src/serialize-slog-obj.js +32 -4
- package/src/slog-file.js +5 -1
- package/src/slog-sender-pipe-entrypoint.js +74 -69
- package/src/slog-sender-pipe.js +96 -111
- package/src/slog-to-otel.js +12 -6
- package/test/flight-recorder.test.js +42 -11
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agoric/telemetry",
|
|
3
|
-
"version": "0.6.3-other-dev-
|
|
3
|
+
"version": "0.6.3-other-dev-d15096d.0.d15096d",
|
|
4
4
|
"description": "Agoric's telemetry implementation",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"repository": "https://github.com/Agoric/agoric-sdk",
|
|
@@ -8,12 +8,12 @@
|
|
|
8
8
|
"scripts": {
|
|
9
9
|
"build": "exit 0",
|
|
10
10
|
"test": "ava",
|
|
11
|
-
"test:c8": "c8 --all $C8_OPTIONS ava",
|
|
11
|
+
"test:c8": "c8 --all ${C8_OPTIONS:-} ava",
|
|
12
12
|
"test:xs": "exit 0",
|
|
13
13
|
"lint-fix": "yarn lint:eslint --fix",
|
|
14
|
-
"lint": "run-s --continue-on-error lint:*",
|
|
15
|
-
"lint:types": "tsc",
|
|
16
|
-
"lint:eslint": "eslint ."
|
|
14
|
+
"lint": "yarn run -T run-s --continue-on-error 'lint:*'",
|
|
15
|
+
"lint:types": "yarn run -T tsc",
|
|
16
|
+
"lint:eslint": "yarn run -T eslint ."
|
|
17
17
|
},
|
|
18
18
|
"bin": {
|
|
19
19
|
"frcat": "./src/frcat-entrypoint.js"
|
|
@@ -22,38 +22,39 @@
|
|
|
22
22
|
"author": "Agoric",
|
|
23
23
|
"license": "Apache-2.0",
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"@agoric/internal": "0.3.3-other-dev-
|
|
26
|
-
"@agoric/store": "0.9.3-other-dev-
|
|
27
|
-
"@endo/errors": "^1.2.
|
|
28
|
-
"@endo/init": "^1.1.
|
|
29
|
-
"@endo/marshal": "^1.
|
|
30
|
-
"@endo/
|
|
31
|
-
"@
|
|
32
|
-
"@opentelemetry/api
|
|
33
|
-
"@opentelemetry/
|
|
34
|
-
"@opentelemetry/exporter-
|
|
35
|
-
"@opentelemetry/exporter-
|
|
36
|
-
"@opentelemetry/
|
|
37
|
-
"@opentelemetry/
|
|
38
|
-
"@opentelemetry/sdk-
|
|
39
|
-
"@opentelemetry/sdk-
|
|
40
|
-
"@opentelemetry/
|
|
25
|
+
"@agoric/internal": "0.3.3-other-dev-d15096d.0.d15096d",
|
|
26
|
+
"@agoric/store": "0.9.3-other-dev-d15096d.0.d15096d",
|
|
27
|
+
"@endo/errors": "^1.2.13",
|
|
28
|
+
"@endo/init": "^1.1.12",
|
|
29
|
+
"@endo/marshal": "^1.8.0",
|
|
30
|
+
"@endo/promise-kit": "^1.1.13",
|
|
31
|
+
"@endo/stream": "^1.2.13",
|
|
32
|
+
"@opentelemetry/api": "~1.9.0",
|
|
33
|
+
"@opentelemetry/api-logs": "0.57.1",
|
|
34
|
+
"@opentelemetry/exporter-logs-otlp-http": "0.57.1",
|
|
35
|
+
"@opentelemetry/exporter-prometheus": "~0.57.1",
|
|
36
|
+
"@opentelemetry/exporter-trace-otlp-http": "0.57.1",
|
|
37
|
+
"@opentelemetry/resources": "~1.30.1",
|
|
38
|
+
"@opentelemetry/sdk-logs": "0.57.1",
|
|
39
|
+
"@opentelemetry/sdk-metrics": "~1.30.1",
|
|
40
|
+
"@opentelemetry/sdk-trace-base": "~1.30.1",
|
|
41
|
+
"@opentelemetry/semantic-conventions": "~1.28.0",
|
|
41
42
|
"anylogger": "^0.21.0",
|
|
42
|
-
"better-sqlite3": "^
|
|
43
|
+
"better-sqlite3": "^10.1.0",
|
|
43
44
|
"tmp": "^0.2.1"
|
|
44
45
|
},
|
|
45
46
|
"devDependencies": {
|
|
46
|
-
"@endo/lockdown": "^1.0.
|
|
47
|
-
"@endo/ses-ava": "^1.2
|
|
47
|
+
"@endo/lockdown": "^1.0.18",
|
|
48
|
+
"@endo/ses-ava": "^1.3.2",
|
|
48
49
|
"ava": "^5.3.0",
|
|
49
|
-
"c8": "^10.1.
|
|
50
|
+
"c8": "^10.1.3",
|
|
50
51
|
"tmp": "^0.2.1"
|
|
51
52
|
},
|
|
52
53
|
"publishConfig": {
|
|
53
54
|
"access": "public"
|
|
54
55
|
},
|
|
55
56
|
"engines": {
|
|
56
|
-
"node": "^
|
|
57
|
+
"node": "^20.9 || ^22.11"
|
|
57
58
|
},
|
|
58
59
|
"ava": {
|
|
59
60
|
"files": [
|
|
@@ -66,7 +67,7 @@
|
|
|
66
67
|
"workerThreads": false
|
|
67
68
|
},
|
|
68
69
|
"typeCoverage": {
|
|
69
|
-
"atLeast":
|
|
70
|
+
"atLeast": 89.28
|
|
70
71
|
},
|
|
71
|
-
"gitHead": "
|
|
72
|
+
"gitHead": "d15096dc4ff8b96e9b6cd11954c20d3a9efbb393"
|
|
72
73
|
}
|
|
@@ -5,7 +5,12 @@ import { makeContextualSlogProcessor } from './context-aware-slog.js';
|
|
|
5
5
|
import { serializeSlogObj } from './serialize-slog-obj.js';
|
|
6
6
|
|
|
7
7
|
/**
|
|
8
|
-
* @
|
|
8
|
+
* @import {MakeSlogSenderOptions} from './index.js';
|
|
9
|
+
* @import {Slog} from './context-aware-slog.js';
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* @param {MakeSlogSenderOptions} options
|
|
9
14
|
*/
|
|
10
15
|
export const makeSlogSender = async options => {
|
|
11
16
|
const { CHAIN_ID, CONTEXTUAL_SLOGFILE } = options.env || {};
|
|
@@ -26,7 +31,7 @@ export const makeSlogSender = async options => {
|
|
|
26
31
|
});
|
|
27
32
|
|
|
28
33
|
/**
|
|
29
|
-
* @param {
|
|
34
|
+
* @param {Slog} slog
|
|
30
35
|
*/
|
|
31
36
|
const slogSender = slog => {
|
|
32
37
|
const contextualizedSlog = contextualSlogProcessor(slog);
|
|
@@ -135,9 +135,9 @@ export const makeContextualSlogProcessor = (
|
|
|
135
135
|
|
|
136
136
|
/**
|
|
137
137
|
* @param {Slog} slog
|
|
138
|
-
* @returns {{ attributes: T & LogAttributes, body: Partial<Slog>;
|
|
138
|
+
* @returns {{ attributes: T & LogAttributes, body: Partial<Slog>; time: Slog['time'] }}
|
|
139
139
|
*/
|
|
140
|
-
const slogProcessor = ({ monotime, time
|
|
140
|
+
const slogProcessor = ({ monotime, time, ...body }) => {
|
|
141
141
|
const finalBody = { ...body };
|
|
142
142
|
|
|
143
143
|
/** @type {{'crank.syscallNum'?: Slog['syscallNum']}} */
|
|
@@ -219,7 +219,7 @@ export const makeContextualSlogProcessor = (
|
|
|
219
219
|
|
|
220
220
|
triggerContext = {
|
|
221
221
|
'run.num': undefined,
|
|
222
|
-
'run.id': `${triggerType}-${finalBody.
|
|
222
|
+
'run.id': `${triggerType}-${finalBody.blockHeight}`,
|
|
223
223
|
'run.trigger.type': triggerType,
|
|
224
224
|
'run.trigger.time': finalBody.blockTime,
|
|
225
225
|
'run.trigger.blockHeight': finalBody.blockHeight,
|
|
@@ -321,13 +321,13 @@ export const makeContextualSlogProcessor = (
|
|
|
321
321
|
|
|
322
322
|
const logAttributes = {
|
|
323
323
|
...staticContext,
|
|
324
|
-
'process.uptime': monotime,
|
|
325
324
|
...initContext, // Optional prelude
|
|
326
325
|
...blockContext, // Block is the first level of execution nesting
|
|
327
326
|
...triggerContext, // run and trigger info is nested next
|
|
328
327
|
...crankContext, // Finally cranks are the last level of nesting
|
|
329
328
|
...replayContext, // Replay is a substitute for crank context during vat page in
|
|
330
329
|
...eventLogAttributes,
|
|
330
|
+
'process.uptime': monotime,
|
|
331
331
|
};
|
|
332
332
|
|
|
333
333
|
/**
|
|
@@ -356,7 +356,11 @@ export const makeContextualSlogProcessor = (
|
|
|
356
356
|
// eslint-disable-next-line no-restricted-syntax
|
|
357
357
|
case SLOG_TYPES.COSMIC_SWINGSET.RUN.FINISH: {
|
|
358
358
|
assert(!!triggerContext);
|
|
359
|
-
persistContext(
|
|
359
|
+
persistContext(
|
|
360
|
+
finalBody.remainingBeans && finalBody.remainingBeans > 0
|
|
361
|
+
? {}
|
|
362
|
+
: triggerContext,
|
|
363
|
+
);
|
|
360
364
|
triggerContext = null;
|
|
361
365
|
break;
|
|
362
366
|
}
|
|
@@ -373,9 +377,9 @@ export const makeContextualSlogProcessor = (
|
|
|
373
377
|
}
|
|
374
378
|
|
|
375
379
|
return {
|
|
376
|
-
attributes: /** @type {T & LogAttributes} */ (logAttributes),
|
|
377
380
|
body: finalBody,
|
|
378
|
-
|
|
381
|
+
attributes: /** @type {T & LogAttributes} */ (logAttributes),
|
|
382
|
+
time,
|
|
379
383
|
};
|
|
380
384
|
};
|
|
381
385
|
|
package/src/flight-recorder.js
CHANGED
|
@@ -2,12 +2,16 @@
|
|
|
2
2
|
/* eslint-env node */
|
|
3
3
|
/// <reference types="ses" />
|
|
4
4
|
|
|
5
|
-
import fs from 'node:fs';
|
|
6
5
|
import fsp from 'node:fs/promises';
|
|
7
6
|
import path from 'node:path';
|
|
8
7
|
import { Fail } from '@endo/errors';
|
|
9
8
|
import { serializeSlogObj } from './serialize-slog-obj.js';
|
|
10
9
|
|
|
10
|
+
/**
|
|
11
|
+
* @import {EReturn} from '@endo/far';
|
|
12
|
+
* @import {MakeSlogSender} from './index.js';
|
|
13
|
+
*/
|
|
14
|
+
|
|
11
15
|
export const DEFAULT_CBUF_SIZE = 100 * 1024 * 1024;
|
|
12
16
|
export const DEFAULT_CBUF_FILE = 'flight-recorder.bin';
|
|
13
17
|
export const SLOG_MAGIC = 0x41472d534c4f4721n; // 'AG-SLOG!'
|
|
@@ -35,6 +39,9 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
|
|
|
35
39
|
}
|
|
36
40
|
throw e;
|
|
37
41
|
});
|
|
42
|
+
|
|
43
|
+
// Use the default size if not provided and file doesn't exist.
|
|
44
|
+
circularBufferSize = circularBufferSize || stbuf?.size || DEFAULT_CBUF_SIZE;
|
|
38
45
|
const arenaSize = BigInt(circularBufferSize - I_ARENA_START);
|
|
39
46
|
|
|
40
47
|
if (stbuf && stbuf.size >= I_ARENA_START) {
|
|
@@ -63,7 +70,7 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
|
|
|
63
70
|
return arenaSize;
|
|
64
71
|
};
|
|
65
72
|
|
|
66
|
-
/** @typedef {
|
|
73
|
+
/** @typedef {EReturn<typeof makeSimpleCircularBuffer>} CircularBuffer */
|
|
67
74
|
|
|
68
75
|
/**
|
|
69
76
|
*
|
|
@@ -72,8 +79,8 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
|
|
|
72
79
|
* @param {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => void} readRecord
|
|
73
80
|
* @param {(record: Uint8Array, firstWriteLength: number, circEnd: bigint) => Promise<void>} writeRecord
|
|
74
81
|
*/
|
|
75
|
-
function
|
|
76
|
-
const readCircBuf = (outbuf, offset = 0) => {
|
|
82
|
+
function makeCircBufMethods(arenaSize, header, readRecord, writeRecord) {
|
|
83
|
+
const readCircBuf = async (outbuf, offset = 0) => {
|
|
77
84
|
offset + outbuf.byteLength <= arenaSize ||
|
|
78
85
|
Fail`Reading past end of circular buffer`;
|
|
79
86
|
|
|
@@ -95,7 +102,7 @@ function finishCircularBuffer(arenaSize, header, readRecord, writeRecord) {
|
|
|
95
102
|
// The data is contiguous, like ---AAABBB---
|
|
96
103
|
return { done: true, value: undefined };
|
|
97
104
|
}
|
|
98
|
-
readRecord(outbuf, readStart, firstReadLength);
|
|
105
|
+
await readRecord(outbuf, readStart, firstReadLength);
|
|
99
106
|
return { done: false, value: outbuf };
|
|
100
107
|
};
|
|
101
108
|
|
|
@@ -139,9 +146,10 @@ function finishCircularBuffer(arenaSize, header, readRecord, writeRecord) {
|
|
|
139
146
|
|
|
140
147
|
// Advance the start pointer until we have space to write the record.
|
|
141
148
|
let overlap = BigInt(record.byteLength) - capacity;
|
|
149
|
+
await null;
|
|
142
150
|
while (overlap > 0n) {
|
|
143
151
|
const startRecordLength = new Uint8Array(RECORD_HEADER_SIZE);
|
|
144
|
-
const { done } = readCircBuf(startRecordLength);
|
|
152
|
+
const { done } = await readCircBuf(startRecordLength);
|
|
145
153
|
if (done) {
|
|
146
154
|
break;
|
|
147
155
|
}
|
|
@@ -217,20 +225,22 @@ export const makeSimpleCircularBuffer = async ({
|
|
|
217
225
|
arenaSize === hdrArenaSize ||
|
|
218
226
|
Fail`${filename} arena size mismatch; wanted ${arenaSize}, got ${hdrArenaSize}`;
|
|
219
227
|
|
|
220
|
-
/** @type {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => void} */
|
|
221
|
-
const readRecord = (outbuf, readStart, firstReadLength) => {
|
|
222
|
-
const bytesRead =
|
|
228
|
+
/** @type {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => Promise<void>} */
|
|
229
|
+
const readRecord = async (outbuf, readStart, firstReadLength) => {
|
|
230
|
+
const { bytesRead } = await file.read(outbuf, {
|
|
223
231
|
length: firstReadLength,
|
|
224
232
|
position: Number(readStart) + I_ARENA_START,
|
|
225
233
|
});
|
|
226
234
|
assert.equal(bytesRead, firstReadLength, 'Too few bytes read');
|
|
227
235
|
|
|
228
236
|
if (bytesRead < outbuf.byteLength) {
|
|
229
|
-
|
|
237
|
+
const length = outbuf.byteLength - firstReadLength;
|
|
238
|
+
const { bytesRead: bytesRead2 } = await file.read(outbuf, {
|
|
230
239
|
offset: firstReadLength,
|
|
231
|
-
length
|
|
240
|
+
length,
|
|
232
241
|
position: I_ARENA_START,
|
|
233
242
|
});
|
|
243
|
+
assert.equal(bytesRead2, length, 'Too few bytes read');
|
|
234
244
|
}
|
|
235
245
|
};
|
|
236
246
|
|
|
@@ -265,25 +275,35 @@ export const makeSimpleCircularBuffer = async ({
|
|
|
265
275
|
await file.write(headerBuffer, undefined, undefined, 0);
|
|
266
276
|
};
|
|
267
277
|
|
|
268
|
-
return
|
|
278
|
+
return {
|
|
279
|
+
fileHandle: file,
|
|
280
|
+
...makeCircBufMethods(arenaSize, header, readRecord, writeRecord),
|
|
281
|
+
};
|
|
269
282
|
};
|
|
270
283
|
|
|
271
284
|
/**
|
|
272
285
|
*
|
|
273
|
-
* @param {Pick<
|
|
286
|
+
* @param {Pick<CircularBuffer, 'fileHandle' | 'writeCircBuf'>} circBuf
|
|
274
287
|
*/
|
|
275
|
-
export const makeSlogSenderFromBuffer = ({ writeCircBuf }) => {
|
|
276
|
-
/** @type {Promise<void>} */
|
|
288
|
+
export const makeSlogSenderFromBuffer = ({ fileHandle, writeCircBuf }) => {
|
|
289
|
+
/** @type {Promise<void> | undefined} */
|
|
277
290
|
let toWrite = Promise.resolve();
|
|
278
291
|
const writeJSON = (obj, serialized = serializeSlogObj(obj)) => {
|
|
279
292
|
// Prepend a newline so that the file can be more easily manipulated.
|
|
280
293
|
const data = new TextEncoder().encode(`\n${serialized}`);
|
|
281
294
|
// console.log('have obj', obj, data);
|
|
282
|
-
toWrite = toWrite
|
|
295
|
+
toWrite = toWrite?.then(() => writeCircBuf(data));
|
|
283
296
|
};
|
|
284
297
|
return Object.assign(writeJSON, {
|
|
285
298
|
forceFlush: async () => {
|
|
286
299
|
await toWrite;
|
|
300
|
+
await fileHandle.datasync();
|
|
301
|
+
},
|
|
302
|
+
shutdown: async () => {
|
|
303
|
+
const lastWritten = toWrite;
|
|
304
|
+
toWrite = undefined;
|
|
305
|
+
await lastWritten;
|
|
306
|
+
await fileHandle.close();
|
|
287
307
|
},
|
|
288
308
|
usesJsonObject: true,
|
|
289
309
|
});
|
|
@@ -292,9 +312,9 @@ export const makeSlogSenderFromBuffer = ({ writeCircBuf }) => {
|
|
|
292
312
|
/**
|
|
293
313
|
* Loaded dynamically by makeSlogSender()
|
|
294
314
|
*
|
|
295
|
-
* @type {
|
|
315
|
+
* @type {MakeSlogSender}
|
|
296
316
|
*/
|
|
297
317
|
export const makeSlogSender = async opts => {
|
|
298
|
-
const { writeCircBuf } = await makeSimpleCircularBuffer(opts);
|
|
299
|
-
return makeSlogSenderFromBuffer({ writeCircBuf });
|
|
318
|
+
const { fileHandle, writeCircBuf } = await makeSimpleCircularBuffer(opts);
|
|
319
|
+
return makeSlogSenderFromBuffer({ fileHandle, writeCircBuf });
|
|
300
320
|
};
|
package/src/frcat-entrypoint.js
CHANGED
|
@@ -22,7 +22,7 @@ const main = async () => {
|
|
|
22
22
|
let offset = 0;
|
|
23
23
|
for (;;) {
|
|
24
24
|
const lenBuf = new Uint8Array(BigUint64Array.BYTES_PER_ELEMENT);
|
|
25
|
-
const { done } = readCircBuf(lenBuf, offset);
|
|
25
|
+
const { done } = await readCircBuf(lenBuf, offset);
|
|
26
26
|
if (done) {
|
|
27
27
|
break;
|
|
28
28
|
}
|
|
@@ -30,7 +30,7 @@ const main = async () => {
|
|
|
30
30
|
const dv = new DataView(lenBuf.buffer);
|
|
31
31
|
const len = Number(dv.getBigUint64(0));
|
|
32
32
|
|
|
33
|
-
const { done: done2, value: buf } = readCircBuf(
|
|
33
|
+
const { done: done2, value: buf } = await readCircBuf(
|
|
34
34
|
new Uint8Array(len),
|
|
35
35
|
offset,
|
|
36
36
|
);
|
package/src/index.js
CHANGED
|
@@ -4,6 +4,12 @@ import { PrometheusExporter } from '@opentelemetry/exporter-prometheus';
|
|
|
4
4
|
import { Resource } from '@opentelemetry/resources';
|
|
5
5
|
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
|
|
6
6
|
|
|
7
|
+
/**
|
|
8
|
+
* @import {MakeSlogSenderOptions} from './index.js';
|
|
9
|
+
* @import {ResourceAttributes} from '@opentelemetry/resources';
|
|
10
|
+
* @import {View} from '@opentelemetry/sdk-metrics';
|
|
11
|
+
*/
|
|
12
|
+
|
|
7
13
|
export * from './make-slog-sender.js';
|
|
8
14
|
|
|
9
15
|
/**
|
|
@@ -14,7 +20,7 @@ export * from './make-slog-sender.js';
|
|
|
14
20
|
* }} SlogSender
|
|
15
21
|
*/
|
|
16
22
|
/**
|
|
17
|
-
* @typedef {(opts:
|
|
23
|
+
* @typedef {(opts: MakeSlogSenderOptions) => Promise<SlogSender | undefined>} MakeSlogSender
|
|
18
24
|
*/
|
|
19
25
|
/**
|
|
20
26
|
* @typedef {MakeSlogSenderCommonOptions & Record<string, unknown>} MakeSlogSenderOptions
|
|
@@ -34,7 +40,10 @@ export const tryFlushSlogSender = async (
|
|
|
34
40
|
slogSender,
|
|
35
41
|
{ env = {}, log } = {},
|
|
36
42
|
) => {
|
|
37
|
-
await
|
|
43
|
+
await null;
|
|
44
|
+
try {
|
|
45
|
+
await slogSender?.forceFlush?.();
|
|
46
|
+
} catch (err) {
|
|
38
47
|
log?.('Failed to flush slog sender', err);
|
|
39
48
|
if (err.errors) {
|
|
40
49
|
for (const error of err.errors) {
|
|
@@ -44,7 +53,7 @@ export const tryFlushSlogSender = async (
|
|
|
44
53
|
if (env.SLOGSENDER_FAIL_ON_ERROR) {
|
|
45
54
|
throw err;
|
|
46
55
|
}
|
|
47
|
-
}
|
|
56
|
+
}
|
|
48
57
|
};
|
|
49
58
|
|
|
50
59
|
export const getResourceAttributes = ({
|
|
@@ -53,7 +62,7 @@ export const getResourceAttributes = ({
|
|
|
53
62
|
}) => {
|
|
54
63
|
const { OTEL_RESOURCE_ATTRIBUTES, SDK_REVISION } = env;
|
|
55
64
|
|
|
56
|
-
/** @type {
|
|
65
|
+
/** @type {ResourceAttributes} */
|
|
57
66
|
const resourceAttributes = {};
|
|
58
67
|
if (SDK_REVISION) {
|
|
59
68
|
// Detect testnet-load-generator target revision.
|
|
@@ -81,43 +90,37 @@ export const getResourceAttributes = ({
|
|
|
81
90
|
|
|
82
91
|
/**
|
|
83
92
|
* @typedef {object} Powers
|
|
84
|
-
* @property {
|
|
93
|
+
* @property {Pick<Console, 'warn'>} console
|
|
85
94
|
* @property {NodeJS.ProcessEnv} env
|
|
86
|
-
* @property {
|
|
95
|
+
* @property {View[]} views
|
|
87
96
|
* @property {string} [serviceName]
|
|
88
97
|
*/
|
|
89
98
|
|
|
90
99
|
/**
|
|
91
|
-
* @param {Partial<Powers>}
|
|
100
|
+
* @param {Partial<Powers>} powers
|
|
92
101
|
*/
|
|
93
|
-
const getPrometheusMeterProvider = ({
|
|
102
|
+
export const getPrometheusMeterProvider = ({
|
|
94
103
|
console = globalThis.console,
|
|
95
104
|
env = process.env,
|
|
96
105
|
views,
|
|
97
106
|
...rest
|
|
98
107
|
} = {}) => {
|
|
99
|
-
const { OTEL_EXPORTER_PROMETHEUS_PORT } = env;
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
}
|
|
108
|
+
const { OTEL_EXPORTER_PROMETHEUS_HOST, OTEL_EXPORTER_PROMETHEUS_PORT } = env;
|
|
109
|
+
|
|
110
|
+
// The opt-in signal is a non-empty OTEL_EXPORTER_PROMETHEUS_PORT.
|
|
111
|
+
if (!OTEL_EXPORTER_PROMETHEUS_PORT) return;
|
|
104
112
|
|
|
105
113
|
const resource = new Resource(getResourceAttributes({ env, ...rest }));
|
|
106
114
|
|
|
107
|
-
const
|
|
108
|
-
|
|
109
|
-
|
|
115
|
+
const { DEFAULT_OPTIONS } = PrometheusExporter;
|
|
116
|
+
const host = OTEL_EXPORTER_PROMETHEUS_HOST || DEFAULT_OPTIONS.host;
|
|
117
|
+
const port = +OTEL_EXPORTER_PROMETHEUS_PORT || DEFAULT_OPTIONS.port;
|
|
118
|
+
const url = `http://${host || '0.0.0.0'}:${port}${DEFAULT_OPTIONS.endpoint}`;
|
|
110
119
|
|
|
111
|
-
const
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
() => {
|
|
116
|
-
console.warn(
|
|
117
|
-
`Prometheus scrape endpoint: http://0.0.0.0:${port}${PrometheusExporter.DEFAULT_OPTIONS.endpoint}`,
|
|
118
|
-
);
|
|
119
|
-
},
|
|
120
|
-
);
|
|
120
|
+
const options = { host, port, appendTimestamp: true };
|
|
121
|
+
const exporter = new PrometheusExporter(options, () => {
|
|
122
|
+
console.warn(`Prometheus scrape endpoint: ${url}`);
|
|
123
|
+
});
|
|
121
124
|
|
|
122
125
|
const provider = new MeterProvider({ resource, views });
|
|
123
126
|
provider.addMetricReader(exporter);
|