@agoric/telemetry 0.6.3-other-dev-3eb1a1d.0 → 0.6.3-other-dev-fbe72e7.0.fbe72e7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +29 -28
- package/src/context-aware-slog.js +11 -7
- package/src/flight-recorder.js +37 -18
- package/src/frcat-entrypoint.js +2 -2
- package/src/index.js +20 -23
- package/src/make-slog-sender.js +124 -107
- package/src/otel-context-aware-slog.js +2 -2
- package/src/otel-metrics.js +229 -0
- package/src/otel-trace.js +11 -1
- package/src/prometheus.js +18 -0
- package/src/serialize-slog-obj.js +32 -4
- package/src/slog-sender-pipe-entrypoint.js +64 -67
- package/src/slog-sender-pipe.js +86 -110
- package/test/flight-recorder.test.js +42 -11
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agoric/telemetry",
|
|
3
|
-
"version": "0.6.3-other-dev-
|
|
3
|
+
"version": "0.6.3-other-dev-fbe72e7.0.fbe72e7",
|
|
4
4
|
"description": "Agoric's telemetry implementation",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"repository": "https://github.com/Agoric/agoric-sdk",
|
|
@@ -8,12 +8,12 @@
|
|
|
8
8
|
"scripts": {
|
|
9
9
|
"build": "exit 0",
|
|
10
10
|
"test": "ava",
|
|
11
|
-
"test:c8": "c8 --all $C8_OPTIONS ava",
|
|
11
|
+
"test:c8": "c8 --all ${C8_OPTIONS:-} ava",
|
|
12
12
|
"test:xs": "exit 0",
|
|
13
13
|
"lint-fix": "yarn lint:eslint --fix",
|
|
14
|
-
"lint": "run-s --continue-on-error lint:*",
|
|
15
|
-
"lint:types": "tsc",
|
|
16
|
-
"lint:eslint": "eslint ."
|
|
14
|
+
"lint": "yarn run -T run-s --continue-on-error 'lint:*'",
|
|
15
|
+
"lint:types": "yarn run -T tsc",
|
|
16
|
+
"lint:eslint": "yarn run -T eslint ."
|
|
17
17
|
},
|
|
18
18
|
"bin": {
|
|
19
19
|
"frcat": "./src/frcat-entrypoint.js"
|
|
@@ -22,38 +22,39 @@
|
|
|
22
22
|
"author": "Agoric",
|
|
23
23
|
"license": "Apache-2.0",
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"@agoric/internal": "0.3.3-other-dev-
|
|
26
|
-
"@agoric/store": "0.9.3-other-dev-
|
|
27
|
-
"@endo/errors": "^1.2.
|
|
28
|
-
"@endo/init": "^1.1.
|
|
29
|
-
"@endo/marshal": "^1.
|
|
30
|
-
"@endo/
|
|
31
|
-
"@
|
|
32
|
-
"@opentelemetry/api
|
|
33
|
-
"@opentelemetry/
|
|
34
|
-
"@opentelemetry/exporter-
|
|
35
|
-
"@opentelemetry/exporter-
|
|
36
|
-
"@opentelemetry/
|
|
37
|
-
"@opentelemetry/
|
|
38
|
-
"@opentelemetry/sdk-
|
|
39
|
-
"@opentelemetry/sdk-
|
|
40
|
-
"@opentelemetry/
|
|
25
|
+
"@agoric/internal": "0.3.3-other-dev-fbe72e7.0.fbe72e7",
|
|
26
|
+
"@agoric/store": "0.9.3-other-dev-fbe72e7.0.fbe72e7",
|
|
27
|
+
"@endo/errors": "^1.2.13",
|
|
28
|
+
"@endo/init": "^1.1.12",
|
|
29
|
+
"@endo/marshal": "^1.8.0",
|
|
30
|
+
"@endo/promise-kit": "^1.1.13",
|
|
31
|
+
"@endo/stream": "^1.2.13",
|
|
32
|
+
"@opentelemetry/api": "~1.9.0",
|
|
33
|
+
"@opentelemetry/api-logs": "0.57.1",
|
|
34
|
+
"@opentelemetry/exporter-logs-otlp-http": "0.57.1",
|
|
35
|
+
"@opentelemetry/exporter-prometheus": "~0.57.1",
|
|
36
|
+
"@opentelemetry/exporter-trace-otlp-http": "0.57.1",
|
|
37
|
+
"@opentelemetry/resources": "~1.30.1",
|
|
38
|
+
"@opentelemetry/sdk-logs": "0.57.1",
|
|
39
|
+
"@opentelemetry/sdk-metrics": "~1.30.1",
|
|
40
|
+
"@opentelemetry/sdk-trace-base": "~1.30.1",
|
|
41
|
+
"@opentelemetry/semantic-conventions": "~1.28.0",
|
|
41
42
|
"anylogger": "^0.21.0",
|
|
42
|
-
"better-sqlite3": "^
|
|
43
|
+
"better-sqlite3": "^10.1.0",
|
|
43
44
|
"tmp": "^0.2.1"
|
|
44
45
|
},
|
|
45
46
|
"devDependencies": {
|
|
46
|
-
"@endo/lockdown": "^1.0.
|
|
47
|
-
"@endo/ses-ava": "^1.2
|
|
47
|
+
"@endo/lockdown": "^1.0.18",
|
|
48
|
+
"@endo/ses-ava": "^1.3.2",
|
|
48
49
|
"ava": "^5.3.0",
|
|
49
|
-
"c8": "^10.1.
|
|
50
|
+
"c8": "^10.1.3",
|
|
50
51
|
"tmp": "^0.2.1"
|
|
51
52
|
},
|
|
52
53
|
"publishConfig": {
|
|
53
54
|
"access": "public"
|
|
54
55
|
},
|
|
55
56
|
"engines": {
|
|
56
|
-
"node": "^
|
|
57
|
+
"node": "^20.9 || ^22.11"
|
|
57
58
|
},
|
|
58
59
|
"ava": {
|
|
59
60
|
"files": [
|
|
@@ -66,7 +67,7 @@
|
|
|
66
67
|
"workerThreads": false
|
|
67
68
|
},
|
|
68
69
|
"typeCoverage": {
|
|
69
|
-
"atLeast":
|
|
70
|
+
"atLeast": 89.28
|
|
70
71
|
},
|
|
71
|
-
"gitHead": "
|
|
72
|
+
"gitHead": "fbe72e72107f9997f788674e668c660d92ec4492"
|
|
72
73
|
}
|
|
@@ -135,9 +135,9 @@ export const makeContextualSlogProcessor = (
|
|
|
135
135
|
|
|
136
136
|
/**
|
|
137
137
|
* @param {Slog} slog
|
|
138
|
-
* @returns {{ attributes: T & LogAttributes, body: Partial<Slog>;
|
|
138
|
+
* @returns {{ attributes: T & LogAttributes, body: Partial<Slog>; time: Slog['time'] }}
|
|
139
139
|
*/
|
|
140
|
-
const slogProcessor = ({ monotime, time
|
|
140
|
+
const slogProcessor = ({ monotime, time, ...body }) => {
|
|
141
141
|
const finalBody = { ...body };
|
|
142
142
|
|
|
143
143
|
/** @type {{'crank.syscallNum'?: Slog['syscallNum']}} */
|
|
@@ -219,7 +219,7 @@ export const makeContextualSlogProcessor = (
|
|
|
219
219
|
|
|
220
220
|
triggerContext = {
|
|
221
221
|
'run.num': undefined,
|
|
222
|
-
'run.id': `${triggerType}-${finalBody.
|
|
222
|
+
'run.id': `${triggerType}-${finalBody.blockHeight}`,
|
|
223
223
|
'run.trigger.type': triggerType,
|
|
224
224
|
'run.trigger.time': finalBody.blockTime,
|
|
225
225
|
'run.trigger.blockHeight': finalBody.blockHeight,
|
|
@@ -321,13 +321,13 @@ export const makeContextualSlogProcessor = (
|
|
|
321
321
|
|
|
322
322
|
const logAttributes = {
|
|
323
323
|
...staticContext,
|
|
324
|
-
'process.uptime': monotime,
|
|
325
324
|
...initContext, // Optional prelude
|
|
326
325
|
...blockContext, // Block is the first level of execution nesting
|
|
327
326
|
...triggerContext, // run and trigger info is nested next
|
|
328
327
|
...crankContext, // Finally cranks are the last level of nesting
|
|
329
328
|
...replayContext, // Replay is a substitute for crank context during vat page in
|
|
330
329
|
...eventLogAttributes,
|
|
330
|
+
'process.uptime': monotime,
|
|
331
331
|
};
|
|
332
332
|
|
|
333
333
|
/**
|
|
@@ -356,7 +356,11 @@ export const makeContextualSlogProcessor = (
|
|
|
356
356
|
// eslint-disable-next-line no-restricted-syntax
|
|
357
357
|
case SLOG_TYPES.COSMIC_SWINGSET.RUN.FINISH: {
|
|
358
358
|
assert(!!triggerContext);
|
|
359
|
-
persistContext(
|
|
359
|
+
persistContext(
|
|
360
|
+
finalBody.remainingBeans && finalBody.remainingBeans > 0
|
|
361
|
+
? {}
|
|
362
|
+
: triggerContext,
|
|
363
|
+
);
|
|
360
364
|
triggerContext = null;
|
|
361
365
|
break;
|
|
362
366
|
}
|
|
@@ -373,9 +377,9 @@ export const makeContextualSlogProcessor = (
|
|
|
373
377
|
}
|
|
374
378
|
|
|
375
379
|
return {
|
|
376
|
-
attributes: /** @type {T & LogAttributes} */ (logAttributes),
|
|
377
380
|
body: finalBody,
|
|
378
|
-
|
|
381
|
+
attributes: /** @type {T & LogAttributes} */ (logAttributes),
|
|
382
|
+
time,
|
|
379
383
|
};
|
|
380
384
|
};
|
|
381
385
|
|
package/src/flight-recorder.js
CHANGED
|
@@ -2,12 +2,15 @@
|
|
|
2
2
|
/* eslint-env node */
|
|
3
3
|
/// <reference types="ses" />
|
|
4
4
|
|
|
5
|
-
import fs from 'node:fs';
|
|
6
5
|
import fsp from 'node:fs/promises';
|
|
7
6
|
import path from 'node:path';
|
|
8
7
|
import { Fail } from '@endo/errors';
|
|
9
8
|
import { serializeSlogObj } from './serialize-slog-obj.js';
|
|
10
9
|
|
|
10
|
+
/**
|
|
11
|
+
* @import {EReturn} from '@endo/far';
|
|
12
|
+
*/
|
|
13
|
+
|
|
11
14
|
export const DEFAULT_CBUF_SIZE = 100 * 1024 * 1024;
|
|
12
15
|
export const DEFAULT_CBUF_FILE = 'flight-recorder.bin';
|
|
13
16
|
export const SLOG_MAGIC = 0x41472d534c4f4721n; // 'AG-SLOG!'
|
|
@@ -35,6 +38,9 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
|
|
|
35
38
|
}
|
|
36
39
|
throw e;
|
|
37
40
|
});
|
|
41
|
+
|
|
42
|
+
// Use the default size if not provided and file doesn't exist.
|
|
43
|
+
circularBufferSize = circularBufferSize || stbuf?.size || DEFAULT_CBUF_SIZE;
|
|
38
44
|
const arenaSize = BigInt(circularBufferSize - I_ARENA_START);
|
|
39
45
|
|
|
40
46
|
if (stbuf && stbuf.size >= I_ARENA_START) {
|
|
@@ -63,7 +69,7 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
|
|
|
63
69
|
return arenaSize;
|
|
64
70
|
};
|
|
65
71
|
|
|
66
|
-
/** @typedef {
|
|
72
|
+
/** @typedef {EReturn<typeof makeSimpleCircularBuffer>} CircularBuffer */
|
|
67
73
|
|
|
68
74
|
/**
|
|
69
75
|
*
|
|
@@ -72,8 +78,8 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
|
|
|
72
78
|
* @param {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => void} readRecord
|
|
73
79
|
* @param {(record: Uint8Array, firstWriteLength: number, circEnd: bigint) => Promise<void>} writeRecord
|
|
74
80
|
*/
|
|
75
|
-
function
|
|
76
|
-
const readCircBuf = (outbuf, offset = 0) => {
|
|
81
|
+
function makeCircBufMethods(arenaSize, header, readRecord, writeRecord) {
|
|
82
|
+
const readCircBuf = async (outbuf, offset = 0) => {
|
|
77
83
|
offset + outbuf.byteLength <= arenaSize ||
|
|
78
84
|
Fail`Reading past end of circular buffer`;
|
|
79
85
|
|
|
@@ -95,7 +101,7 @@ function finishCircularBuffer(arenaSize, header, readRecord, writeRecord) {
|
|
|
95
101
|
// The data is contiguous, like ---AAABBB---
|
|
96
102
|
return { done: true, value: undefined };
|
|
97
103
|
}
|
|
98
|
-
readRecord(outbuf, readStart, firstReadLength);
|
|
104
|
+
await readRecord(outbuf, readStart, firstReadLength);
|
|
99
105
|
return { done: false, value: outbuf };
|
|
100
106
|
};
|
|
101
107
|
|
|
@@ -139,9 +145,10 @@ function finishCircularBuffer(arenaSize, header, readRecord, writeRecord) {
|
|
|
139
145
|
|
|
140
146
|
// Advance the start pointer until we have space to write the record.
|
|
141
147
|
let overlap = BigInt(record.byteLength) - capacity;
|
|
148
|
+
await null;
|
|
142
149
|
while (overlap > 0n) {
|
|
143
150
|
const startRecordLength = new Uint8Array(RECORD_HEADER_SIZE);
|
|
144
|
-
const { done } = readCircBuf(startRecordLength);
|
|
151
|
+
const { done } = await readCircBuf(startRecordLength);
|
|
145
152
|
if (done) {
|
|
146
153
|
break;
|
|
147
154
|
}
|
|
@@ -217,20 +224,22 @@ export const makeSimpleCircularBuffer = async ({
|
|
|
217
224
|
arenaSize === hdrArenaSize ||
|
|
218
225
|
Fail`${filename} arena size mismatch; wanted ${arenaSize}, got ${hdrArenaSize}`;
|
|
219
226
|
|
|
220
|
-
/** @type {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => void} */
|
|
221
|
-
const readRecord = (outbuf, readStart, firstReadLength) => {
|
|
222
|
-
const bytesRead =
|
|
227
|
+
/** @type {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => Promise<void>} */
|
|
228
|
+
const readRecord = async (outbuf, readStart, firstReadLength) => {
|
|
229
|
+
const { bytesRead } = await file.read(outbuf, {
|
|
223
230
|
length: firstReadLength,
|
|
224
231
|
position: Number(readStart) + I_ARENA_START,
|
|
225
232
|
});
|
|
226
233
|
assert.equal(bytesRead, firstReadLength, 'Too few bytes read');
|
|
227
234
|
|
|
228
235
|
if (bytesRead < outbuf.byteLength) {
|
|
229
|
-
|
|
236
|
+
const length = outbuf.byteLength - firstReadLength;
|
|
237
|
+
const { bytesRead: bytesRead2 } = await file.read(outbuf, {
|
|
230
238
|
offset: firstReadLength,
|
|
231
|
-
length
|
|
239
|
+
length,
|
|
232
240
|
position: I_ARENA_START,
|
|
233
241
|
});
|
|
242
|
+
assert.equal(bytesRead2, length, 'Too few bytes read');
|
|
234
243
|
}
|
|
235
244
|
};
|
|
236
245
|
|
|
@@ -265,25 +274,35 @@ export const makeSimpleCircularBuffer = async ({
|
|
|
265
274
|
await file.write(headerBuffer, undefined, undefined, 0);
|
|
266
275
|
};
|
|
267
276
|
|
|
268
|
-
return
|
|
277
|
+
return {
|
|
278
|
+
fileHandle: file,
|
|
279
|
+
...makeCircBufMethods(arenaSize, header, readRecord, writeRecord),
|
|
280
|
+
};
|
|
269
281
|
};
|
|
270
282
|
|
|
271
283
|
/**
|
|
272
284
|
*
|
|
273
|
-
* @param {Pick<
|
|
285
|
+
* @param {Pick<CircularBuffer, 'fileHandle' | 'writeCircBuf'>} circBuf
|
|
274
286
|
*/
|
|
275
|
-
export const makeSlogSenderFromBuffer = ({ writeCircBuf }) => {
|
|
276
|
-
/** @type {Promise<void>} */
|
|
287
|
+
export const makeSlogSenderFromBuffer = ({ fileHandle, writeCircBuf }) => {
|
|
288
|
+
/** @type {Promise<void> | undefined} */
|
|
277
289
|
let toWrite = Promise.resolve();
|
|
278
290
|
const writeJSON = (obj, serialized = serializeSlogObj(obj)) => {
|
|
279
291
|
// Prepend a newline so that the file can be more easily manipulated.
|
|
280
292
|
const data = new TextEncoder().encode(`\n${serialized}`);
|
|
281
293
|
// console.log('have obj', obj, data);
|
|
282
|
-
toWrite = toWrite
|
|
294
|
+
toWrite = toWrite?.then(() => writeCircBuf(data));
|
|
283
295
|
};
|
|
284
296
|
return Object.assign(writeJSON, {
|
|
285
297
|
forceFlush: async () => {
|
|
286
298
|
await toWrite;
|
|
299
|
+
await fileHandle.datasync();
|
|
300
|
+
},
|
|
301
|
+
shutdown: async () => {
|
|
302
|
+
const lastWritten = toWrite;
|
|
303
|
+
toWrite = undefined;
|
|
304
|
+
await lastWritten;
|
|
305
|
+
await fileHandle.close();
|
|
287
306
|
},
|
|
288
307
|
usesJsonObject: true,
|
|
289
308
|
});
|
|
@@ -295,6 +314,6 @@ export const makeSlogSenderFromBuffer = ({ writeCircBuf }) => {
|
|
|
295
314
|
* @type {import('./index.js').MakeSlogSender}
|
|
296
315
|
*/
|
|
297
316
|
export const makeSlogSender = async opts => {
|
|
298
|
-
const { writeCircBuf } = await makeSimpleCircularBuffer(opts);
|
|
299
|
-
return makeSlogSenderFromBuffer({ writeCircBuf });
|
|
317
|
+
const { fileHandle, writeCircBuf } = await makeSimpleCircularBuffer(opts);
|
|
318
|
+
return makeSlogSenderFromBuffer({ fileHandle, writeCircBuf });
|
|
300
319
|
};
|
package/src/frcat-entrypoint.js
CHANGED
|
@@ -22,7 +22,7 @@ const main = async () => {
|
|
|
22
22
|
let offset = 0;
|
|
23
23
|
for (;;) {
|
|
24
24
|
const lenBuf = new Uint8Array(BigUint64Array.BYTES_PER_ELEMENT);
|
|
25
|
-
const { done } = readCircBuf(lenBuf, offset);
|
|
25
|
+
const { done } = await readCircBuf(lenBuf, offset);
|
|
26
26
|
if (done) {
|
|
27
27
|
break;
|
|
28
28
|
}
|
|
@@ -30,7 +30,7 @@ const main = async () => {
|
|
|
30
30
|
const dv = new DataView(lenBuf.buffer);
|
|
31
31
|
const len = Number(dv.getBigUint64(0));
|
|
32
32
|
|
|
33
|
-
const { done: done2, value: buf } = readCircBuf(
|
|
33
|
+
const { done: done2, value: buf } = await readCircBuf(
|
|
34
34
|
new Uint8Array(len),
|
|
35
35
|
offset,
|
|
36
36
|
);
|
package/src/index.js
CHANGED
|
@@ -34,7 +34,10 @@ export const tryFlushSlogSender = async (
|
|
|
34
34
|
slogSender,
|
|
35
35
|
{ env = {}, log } = {},
|
|
36
36
|
) => {
|
|
37
|
-
await
|
|
37
|
+
await null;
|
|
38
|
+
try {
|
|
39
|
+
await slogSender?.forceFlush?.();
|
|
40
|
+
} catch (err) {
|
|
38
41
|
log?.('Failed to flush slog sender', err);
|
|
39
42
|
if (err.errors) {
|
|
40
43
|
for (const error of err.errors) {
|
|
@@ -44,7 +47,7 @@ export const tryFlushSlogSender = async (
|
|
|
44
47
|
if (env.SLOGSENDER_FAIL_ON_ERROR) {
|
|
45
48
|
throw err;
|
|
46
49
|
}
|
|
47
|
-
}
|
|
50
|
+
}
|
|
48
51
|
};
|
|
49
52
|
|
|
50
53
|
export const getResourceAttributes = ({
|
|
@@ -81,43 +84,37 @@ export const getResourceAttributes = ({
|
|
|
81
84
|
|
|
82
85
|
/**
|
|
83
86
|
* @typedef {object} Powers
|
|
84
|
-
* @property {
|
|
87
|
+
* @property {Pick<Console, 'warn'>} console
|
|
85
88
|
* @property {NodeJS.ProcessEnv} env
|
|
86
89
|
* @property {import('@opentelemetry/sdk-metrics').View[]} views
|
|
87
90
|
* @property {string} [serviceName]
|
|
88
91
|
*/
|
|
89
92
|
|
|
90
93
|
/**
|
|
91
|
-
* @param {Partial<Powers>}
|
|
94
|
+
* @param {Partial<Powers>} powers
|
|
92
95
|
*/
|
|
93
|
-
const getPrometheusMeterProvider = ({
|
|
96
|
+
export const getPrometheusMeterProvider = ({
|
|
94
97
|
console = globalThis.console,
|
|
95
98
|
env = process.env,
|
|
96
99
|
views,
|
|
97
100
|
...rest
|
|
98
101
|
} = {}) => {
|
|
99
|
-
const { OTEL_EXPORTER_PROMETHEUS_PORT } = env;
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
}
|
|
102
|
+
const { OTEL_EXPORTER_PROMETHEUS_HOST, OTEL_EXPORTER_PROMETHEUS_PORT } = env;
|
|
103
|
+
|
|
104
|
+
// The opt-in signal is a non-empty OTEL_EXPORTER_PROMETHEUS_PORT.
|
|
105
|
+
if (!OTEL_EXPORTER_PROMETHEUS_PORT) return;
|
|
104
106
|
|
|
105
107
|
const resource = new Resource(getResourceAttributes({ env, ...rest }));
|
|
106
108
|
|
|
107
|
-
const
|
|
108
|
-
|
|
109
|
-
|
|
109
|
+
const { DEFAULT_OPTIONS } = PrometheusExporter;
|
|
110
|
+
const host = OTEL_EXPORTER_PROMETHEUS_HOST || DEFAULT_OPTIONS.host;
|
|
111
|
+
const port = +OTEL_EXPORTER_PROMETHEUS_PORT || DEFAULT_OPTIONS.port;
|
|
112
|
+
const url = `http://${host || '0.0.0.0'}:${port}${DEFAULT_OPTIONS.endpoint}`;
|
|
110
113
|
|
|
111
|
-
const
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
() => {
|
|
116
|
-
console.warn(
|
|
117
|
-
`Prometheus scrape endpoint: http://0.0.0.0:${port}${PrometheusExporter.DEFAULT_OPTIONS.endpoint}`,
|
|
118
|
-
);
|
|
119
|
-
},
|
|
120
|
-
);
|
|
114
|
+
const options = { host, port, appendTimestamp: true };
|
|
115
|
+
const exporter = new PrometheusExporter(options, () => {
|
|
116
|
+
console.warn(`Prometheus scrape endpoint: ${url}`);
|
|
117
|
+
});
|
|
121
118
|
|
|
122
119
|
const provider = new MeterProvider({ resource, views });
|
|
123
120
|
provider.addMetricReader(exporter);
|