@agoric/telemetry 0.6.3-u19.2 → 0.6.3-u20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -18
- package/package.json +11 -11
- package/src/flight-recorder.js +8 -3
- package/src/index.js +15 -22
- package/src/make-slog-sender.js +124 -107
- package/src/otel-metrics.js +223 -0
- package/src/prometheus.js +18 -0
- package/src/serialize-slog-obj.js +32 -4
- package/src/slog-sender-pipe-entrypoint.js +64 -67
- package/src/slog-sender-pipe.js +75 -111
- package/test/flight-recorder.test.js +31 -2
package/CHANGELOG.md
CHANGED
|
@@ -3,45 +3,35 @@
|
|
|
3
3
|
All notable changes to this project will be documented in this file.
|
|
4
4
|
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
|
5
5
|
|
|
6
|
-
### [0.6.3-
|
|
7
|
-
|
|
8
|
-
**Note:** Version bump only for package @agoric/telemetry
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
### [0.6.3-u19.1](https://github.com/Agoric/agoric-sdk/compare/@agoric/telemetry@0.6.3-u19.0...@agoric/telemetry@0.6.3-u19.1) (2025-03-03)
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
### Bug Fixes
|
|
18
|
-
|
|
19
|
-
* Properly synchronize slog sender termination ([2fc342c](https://github.com/Agoric/agoric-sdk/commit/2fc342c180e296208d077a1d4799da139d3b7848))
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
### [0.6.3-u19.0](https://github.com/Agoric/agoric-sdk/compare/@agoric/telemetry@0.6.2...@agoric/telemetry@0.6.3-u19.0) (2025-02-24)
|
|
6
|
+
### [0.6.3-u20.0](https://github.com/Agoric/agoric-sdk/compare/@agoric/telemetry@0.6.2...@agoric/telemetry@0.6.3-u20.0) (2025-04-16)
|
|
24
7
|
|
|
25
8
|
|
|
26
9
|
### Features
|
|
27
10
|
|
|
11
|
+
* Add the Prometheus slog sender module and load it per OTEL_EXPORTER_PROMETHEUS_PORT ([1dc1827](https://github.com/Agoric/agoric-sdk/commit/1dc182783ce191f0ba2131cb1f7b3042f287737a)), closes [#11045](https://github.com/Agoric/agoric-sdk/issues/11045)
|
|
28
12
|
* **cosmic-swingset:** add JS upgrade plan handler stub ([655133e](https://github.com/Agoric/agoric-sdk/commit/655133ed909b5d632dc033e992214a7b6a1b5ab1))
|
|
13
|
+
* **internal:** Add helper `unprefixedProperties` for environment variable consumption ([878fecf](https://github.com/Agoric/agoric-sdk/commit/878fecf4f5153fa80f48a27a8b79e67943b2d199))
|
|
29
14
|
* simple CircularBuffer with fs offsets ([8d9cb7a](https://github.com/Agoric/agoric-sdk/commit/8d9cb7abe96e8905f5aaa0927e02914ef09279c4))
|
|
30
15
|
* **telemetry:** context aware slog support new triggers ([03965d9](https://github.com/Agoric/agoric-sdk/commit/03965d90b86cf75ce7f6677861e3a0aa8ac70710))
|
|
31
16
|
* **telemetry:** ingest-slog explicitly supports `-` for stdin ([63367c4](https://github.com/Agoric/agoric-sdk/commit/63367c4aaf9bafbd6553a1f4cb808c96bc90845a))
|
|
32
17
|
* **telemetry:** ingest-slog throttle and flush per block ([2134944](https://github.com/Agoric/agoric-sdk/commit/21349448b3b9379a9da43218a59a7e7eaf4f5a9e))
|
|
18
|
+
* **telemetry:** Update slog sender JSON serialization of error instances ([5db996d](https://github.com/Agoric/agoric-sdk/commit/5db996d99830e61fad6eed373e2fb2dc810d662e))
|
|
33
19
|
* use writeSync slogSender ([47a2add](https://github.com/Agoric/agoric-sdk/commit/47a2adda72a5377eda181a425130cdc5a7fd7ff5))
|
|
34
20
|
|
|
35
21
|
|
|
36
22
|
### Bug Fixes
|
|
37
23
|
|
|
38
24
|
* ensure script main rejections exit with error ([abdab87](https://github.com/Agoric/agoric-sdk/commit/abdab879014a5c3124ebd0e9246995ac6b1ce6e5))
|
|
25
|
+
* Properly synchronize slog sender termination ([f83c01d](https://github.com/Agoric/agoric-sdk/commit/f83c01d89d80798e0922acdb498fcc7250560977))
|
|
39
26
|
* **telemetry:** add missing slog type ([1aec8d0](https://github.com/Agoric/agoric-sdk/commit/1aec8d05036f6b3c3e3730339d1829da6b4a9051))
|
|
40
27
|
* **telemetry:** avoid polluting stdout in ingest-slog ([d4b8dfa](https://github.com/Agoric/agoric-sdk/commit/d4b8dfa91155789f7ceda5cc3cef06019b9527e7))
|
|
41
28
|
* **telemetry:** Empty context persisted when remaining beans are negative after run finish ([#10635](https://github.com/Agoric/agoric-sdk/issues/10635)) ([ad4e83e](https://github.com/Agoric/agoric-sdk/commit/ad4e83e0b6dff9716da91fd65d367d3acad1772e))
|
|
42
29
|
* **telemetry:** event name typo ([9e19321](https://github.com/Agoric/agoric-sdk/commit/9e19321ea8fed32d445d44169b32f5d94a93d61e))
|
|
30
|
+
* **telemetry:** flight-recorder ignores write after shutdown ([3d2bcb3](https://github.com/Agoric/agoric-sdk/commit/3d2bcb3c56ac24a0f991200b223e6af8514dc5b8))
|
|
43
31
|
* **telemetry:** handle new trigger slog events ([d32cb7e](https://github.com/Agoric/agoric-sdk/commit/d32cb7e9f406c25399321dc32e827b5018c38b69))
|
|
44
32
|
* **telemetry:** ingest-slog avoid writing progress file for stdin ([62589ca](https://github.com/Agoric/agoric-sdk/commit/62589ca7b6d4aaa9eb7042f95ec7aec633db27f9))
|
|
33
|
+
* **telemetry:** initialize empty flight-recorders ([0908258](https://github.com/Agoric/agoric-sdk/commit/0908258c159a18f2bace0f76fa25c485c0460d15))
|
|
34
|
+
* **telemetry:** Launch a slog sender subprocess with the correct environment ([1a60955](https://github.com/Agoric/agoric-sdk/commit/1a60955181f4e8b02b3b0d5a2f213d4cb051d7d3))
|
|
45
35
|
* **telemetry:** otel correctly pop upgrade span ([0ffdf00](https://github.com/Agoric/agoric-sdk/commit/0ffdf001bc8cbdc94081fedfeb4d2376902f4ffc)), closes [#8272](https://github.com/Agoric/agoric-sdk/issues/8272) [#9569](https://github.com/Agoric/agoric-sdk/issues/9569)
|
|
46
36
|
* **telemetry:** silence slogfile write errors ([91089d7](https://github.com/Agoric/agoric-sdk/commit/91089d7273ef3d41555b34d84471120d45602497))
|
|
47
37
|
* **telemetry:** timer-poll run.id ([#10672](https://github.com/Agoric/agoric-sdk/issues/10672)) ([3b478fb](https://github.com/Agoric/agoric-sdk/commit/3b478fb9e3fe7ded8dec1e83bab68760571f9071)), closes [#10357](https://github.com/Agoric/agoric-sdk/issues/10357) [#10357](https://github.com/Agoric/agoric-sdk/issues/10357)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@agoric/telemetry",
|
|
3
|
-
"version": "0.6.3-
|
|
3
|
+
"version": "0.6.3-u20.0",
|
|
4
4
|
"description": "Agoric's telemetry implementation",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"repository": "https://github.com/Agoric/agoric-sdk",
|
|
@@ -22,12 +22,12 @@
|
|
|
22
22
|
"author": "Agoric",
|
|
23
23
|
"license": "Apache-2.0",
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"@agoric/internal": "^0.4.0-
|
|
26
|
-
"@agoric/store": "^0.9.3-
|
|
27
|
-
"@endo/errors": "^1.2.
|
|
28
|
-
"@endo/init": "^1.1.
|
|
29
|
-
"@endo/marshal": "^1.6.
|
|
30
|
-
"@endo/stream": "^1.2.
|
|
25
|
+
"@agoric/internal": "^0.4.0-u20.0",
|
|
26
|
+
"@agoric/store": "^0.9.3-u20.0",
|
|
27
|
+
"@endo/errors": "^1.2.10",
|
|
28
|
+
"@endo/init": "^1.1.9",
|
|
29
|
+
"@endo/marshal": "^1.6.4",
|
|
30
|
+
"@endo/stream": "^1.2.10",
|
|
31
31
|
"@opentelemetry/api": "~1.9.0",
|
|
32
32
|
"@opentelemetry/api-logs": "0.57.1",
|
|
33
33
|
"@opentelemetry/exporter-logs-otlp-http": "0.57.1",
|
|
@@ -43,8 +43,8 @@
|
|
|
43
43
|
"tmp": "^0.2.1"
|
|
44
44
|
},
|
|
45
45
|
"devDependencies": {
|
|
46
|
-
"@endo/lockdown": "^1.0.
|
|
47
|
-
"@endo/ses-ava": "^1.2.
|
|
46
|
+
"@endo/lockdown": "^1.0.15",
|
|
47
|
+
"@endo/ses-ava": "^1.2.10",
|
|
48
48
|
"ava": "^5.3.0",
|
|
49
49
|
"c8": "^10.1.2",
|
|
50
50
|
"tmp": "^0.2.1"
|
|
@@ -66,7 +66,7 @@
|
|
|
66
66
|
"workerThreads": false
|
|
67
67
|
},
|
|
68
68
|
"typeCoverage": {
|
|
69
|
-
"atLeast": 88.
|
|
69
|
+
"atLeast": 88.87
|
|
70
70
|
},
|
|
71
|
-
"gitHead": "
|
|
71
|
+
"gitHead": "8e4207fa19dabf76c1f91f8779b5b5b93570ecea"
|
|
72
72
|
}
|
package/src/flight-recorder.js
CHANGED
|
@@ -39,6 +39,9 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
|
|
|
39
39
|
}
|
|
40
40
|
throw e;
|
|
41
41
|
});
|
|
42
|
+
|
|
43
|
+
// Use the default size if not provided and file doesn't exist.
|
|
44
|
+
circularBufferSize = circularBufferSize || stbuf?.size || DEFAULT_CBUF_SIZE;
|
|
42
45
|
const arenaSize = BigInt(circularBufferSize - I_ARENA_START);
|
|
43
46
|
|
|
44
47
|
if (stbuf && stbuf.size >= I_ARENA_START) {
|
|
@@ -280,20 +283,22 @@ export const makeSimpleCircularBuffer = async ({
|
|
|
280
283
|
* @param {Pick<CircularBuffer, 'fileHandle' | 'writeCircBuf'>} circBuf
|
|
281
284
|
*/
|
|
282
285
|
export const makeSlogSenderFromBuffer = ({ fileHandle, writeCircBuf }) => {
|
|
283
|
-
/** @type {Promise<void>} */
|
|
286
|
+
/** @type {Promise<void> | undefined} */
|
|
284
287
|
let toWrite = Promise.resolve();
|
|
285
288
|
const writeJSON = (obj, serialized = serializeSlogObj(obj)) => {
|
|
286
289
|
// Prepend a newline so that the file can be more easily manipulated.
|
|
287
290
|
const data = new TextEncoder().encode(`\n${serialized}`);
|
|
288
291
|
// console.log('have obj', obj, data);
|
|
289
|
-
toWrite = toWrite
|
|
292
|
+
toWrite = toWrite?.then(() => writeCircBuf(data));
|
|
290
293
|
};
|
|
291
294
|
return Object.assign(writeJSON, {
|
|
292
295
|
forceFlush: async () => {
|
|
293
296
|
await toWrite;
|
|
294
297
|
},
|
|
295
298
|
shutdown: async () => {
|
|
296
|
-
|
|
299
|
+
const lastWritten = toWrite;
|
|
300
|
+
toWrite = undefined;
|
|
301
|
+
await lastWritten;
|
|
297
302
|
await fileHandle.close();
|
|
298
303
|
},
|
|
299
304
|
usesJsonObject: true,
|
package/src/index.js
CHANGED
|
@@ -84,44 +84,37 @@ export const getResourceAttributes = ({
|
|
|
84
84
|
|
|
85
85
|
/**
|
|
86
86
|
* @typedef {object} Powers
|
|
87
|
-
* @property {
|
|
87
|
+
* @property {Pick<Console, 'warn'>} console
|
|
88
88
|
* @property {NodeJS.ProcessEnv} env
|
|
89
89
|
* @property {import('@opentelemetry/sdk-metrics').View[]} views
|
|
90
90
|
* @property {string} [serviceName]
|
|
91
91
|
*/
|
|
92
92
|
|
|
93
93
|
/**
|
|
94
|
-
* @param {Partial<Powers>}
|
|
94
|
+
* @param {Partial<Powers>} powers
|
|
95
95
|
*/
|
|
96
|
-
const getPrometheusMeterProvider = ({
|
|
96
|
+
export const getPrometheusMeterProvider = ({
|
|
97
97
|
console = globalThis.console,
|
|
98
98
|
env = process.env,
|
|
99
99
|
views,
|
|
100
100
|
...rest
|
|
101
101
|
} = {}) => {
|
|
102
|
-
const { OTEL_EXPORTER_PROMETHEUS_PORT } = env;
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
}
|
|
102
|
+
const { OTEL_EXPORTER_PROMETHEUS_HOST, OTEL_EXPORTER_PROMETHEUS_PORT } = env;
|
|
103
|
+
|
|
104
|
+
// The opt-in signal is a non-empty OTEL_EXPORTER_PROMETHEUS_PORT.
|
|
105
|
+
if (!OTEL_EXPORTER_PROMETHEUS_PORT) return;
|
|
107
106
|
|
|
108
107
|
const resource = new Resource(getResourceAttributes({ env, ...rest }));
|
|
109
108
|
|
|
110
|
-
const
|
|
111
|
-
|
|
112
|
-
|
|
109
|
+
const { DEFAULT_OPTIONS } = PrometheusExporter;
|
|
110
|
+
const host = OTEL_EXPORTER_PROMETHEUS_HOST || DEFAULT_OPTIONS.host;
|
|
111
|
+
const port = +OTEL_EXPORTER_PROMETHEUS_PORT || DEFAULT_OPTIONS.port;
|
|
112
|
+
const url = `http://${host || '0.0.0.0'}:${port}${DEFAULT_OPTIONS.endpoint}`;
|
|
113
113
|
|
|
114
|
-
const
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
},
|
|
119
|
-
() => {
|
|
120
|
-
console.warn(
|
|
121
|
-
`Prometheus scrape endpoint: http://0.0.0.0:${port}${PrometheusExporter.DEFAULT_OPTIONS.endpoint}`,
|
|
122
|
-
);
|
|
123
|
-
},
|
|
124
|
-
);
|
|
114
|
+
const options = { host, port, appendTimestamp: true };
|
|
115
|
+
const exporter = new PrometheusExporter(options, () => {
|
|
116
|
+
console.warn(`Prometheus scrape endpoint: ${url}`);
|
|
117
|
+
});
|
|
125
118
|
|
|
126
119
|
const provider = new MeterProvider({ resource, views });
|
|
127
120
|
provider.addMetricReader(exporter);
|
package/src/make-slog-sender.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import path from 'path';
|
|
2
2
|
import tmp from 'tmp';
|
|
3
|
-
import { PromiseAllOrErrors } from '@agoric/internal';
|
|
3
|
+
import { PromiseAllOrErrors, unprefixedProperties } from '@agoric/internal';
|
|
4
4
|
import { serializeSlogObj } from './serialize-slog-obj.js';
|
|
5
5
|
|
|
6
|
+
export const DEFAULT_SLOGSENDER_AGENT = 'self';
|
|
6
7
|
export const DEFAULT_SLOGSENDER_MODULE =
|
|
7
8
|
'@agoric/telemetry/src/flight-recorder.js';
|
|
8
9
|
export const SLOGFILE_SENDER_MODULE = '@agoric/telemetry/src/slog-file.js';
|
|
9
|
-
|
|
10
|
-
export const DEFAULT_SLOGSENDER_AGENT = 'self';
|
|
10
|
+
export const PROMETHEUS_SENDER_MODULE = '@agoric/telemetry/src/prometheus.js';
|
|
11
11
|
|
|
12
12
|
/** @import {SlogSender} from './index.js' */
|
|
13
13
|
|
|
@@ -19,6 +19,23 @@ export const DEFAULT_SLOGSENDER_AGENT = 'self';
|
|
|
19
19
|
const filterTruthy = arr => /** @type {any[]} */ (arr.filter(Boolean));
|
|
20
20
|
|
|
21
21
|
/**
|
|
22
|
+
* Create an aggregate slog sender that fans out inbound slog entries to modules
|
|
23
|
+
* as indicated by variables in the supplied `env` option. The SLOGSENDER value
|
|
24
|
+
* (or a default DEFAULT_SLOGSENDER_MODULE defined above) is split on commas
|
|
25
|
+
* into a list of module identifiers and adjusted by automatic insertions (a
|
|
26
|
+
* non-empty SLOGFILE value inserts DEFAULT_SLOGSENDER_AGENT defined above), and
|
|
27
|
+
* then each identifier is dynamically `import`ed for its own `makeSlogSender`
|
|
28
|
+
* export, which is invoked with a non-empty `stateDir` option and a modified
|
|
29
|
+
* `env` in which SLOGSENDER_AGENT_* variables have overridden their unprefixed
|
|
30
|
+
* equivalents to produce a subordinate slog sender.
|
|
31
|
+
* Subordinate slog senders remain isolated from each other, and any errors from
|
|
32
|
+
* them are caught and held until the next `forceFlush()` without disrupting
|
|
33
|
+
* any remaining slog entry fanout.
|
|
34
|
+
* If SLOGSENDER_AGENT is 'process', 'slog-sender-pipe.js' is used to load the
|
|
35
|
+
* subordinates in a child process rather than the main process.
|
|
36
|
+
* When there are no subordinates, the return value will be `undefined` rather
|
|
37
|
+
* than a slog sender function.
|
|
38
|
+
*
|
|
22
39
|
* @type {import('./index.js').MakeSlogSender}
|
|
23
40
|
*/
|
|
24
41
|
export const makeSlogSender = async (opts = {}) => {
|
|
@@ -26,95 +43,95 @@ export const makeSlogSender = async (opts = {}) => {
|
|
|
26
43
|
const {
|
|
27
44
|
SLOGSENDER = DEFAULT_SLOGSENDER_MODULE,
|
|
28
45
|
SLOGSENDER_AGENT = DEFAULT_SLOGSENDER_AGENT,
|
|
46
|
+
// While cosmic-swingset/kernel code includes its own Prometheus metrics
|
|
47
|
+
// export, that trumps a slog sender module doing so.
|
|
48
|
+
// This extraction can be removed when that changes, but in the meantime,
|
|
49
|
+
// opt-in is only by SLOGSENDER_AGENT_OTEL_EXPORTER_PROMETHEUS_PORT.
|
|
50
|
+
OTEL_EXPORTER_PROMETHEUS_PORT: _prometheusExportPort,
|
|
29
51
|
...otherEnv
|
|
30
52
|
} = env;
|
|
31
53
|
|
|
32
54
|
const agentEnv = {
|
|
33
55
|
...otherEnv,
|
|
34
|
-
...
|
|
35
|
-
Object.entries(otherEnv)
|
|
36
|
-
.filter(([k]) => k.match(/^(?:SLOGSENDER_AGENT_)+/)) // narrow to SLOGSENDER_AGENT_ prefixes.
|
|
37
|
-
.map(([k, v]) => [k.replace(/^(?:SLOGSENDER_AGENT_)+/, ''), v]), // Rewrite SLOGSENDER_AGENT_ to un-prefixed version.
|
|
38
|
-
),
|
|
56
|
+
...unprefixedProperties(otherEnv, 'SLOGSENDER_AGENT_'),
|
|
39
57
|
};
|
|
40
58
|
|
|
41
|
-
const slogSenderModules =
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
59
|
+
const slogSenderModules = new Set();
|
|
60
|
+
if (agentEnv.OTEL_EXPORTER_PROMETHEUS_PORT) {
|
|
61
|
+
slogSenderModules.add(PROMETHEUS_SENDER_MODULE);
|
|
62
|
+
}
|
|
63
|
+
if (agentEnv.SLOGFILE) {
|
|
64
|
+
slogSenderModules.add(SLOGFILE_SENDER_MODULE);
|
|
65
|
+
}
|
|
66
|
+
for (const moduleIdentifier of filterTruthy(SLOGSENDER.split(','))) {
|
|
67
|
+
if (moduleIdentifier.startsWith('-')) {
|
|
68
|
+
// Opt out of an automatically-included sender.
|
|
69
|
+
slogSenderModules.delete(moduleIdentifier.slice(1));
|
|
70
|
+
} else if (moduleIdentifier.startsWith('.')) {
|
|
71
|
+
// Resolve relative to the current working directory.
|
|
72
|
+
slogSenderModules.add(path.resolve(moduleIdentifier));
|
|
73
|
+
} else {
|
|
74
|
+
slogSenderModules.add(moduleIdentifier);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if (!slogSenderModules.size) {
|
|
56
79
|
return undefined;
|
|
57
80
|
}
|
|
58
81
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
);
|
|
77
|
-
}
|
|
78
|
-
case 'worker':
|
|
79
|
-
default:
|
|
80
|
-
console.warn(`Unknown SLOGSENDER_AGENT=${SLOGSENDER_AGENT}`);
|
|
82
|
+
if (SLOGSENDER_AGENT === 'process') {
|
|
83
|
+
console.warn('Loading slog sender in subprocess');
|
|
84
|
+
return import('./slog-sender-pipe.js').then(async module =>
|
|
85
|
+
module.makeSlogSender({
|
|
86
|
+
env: {
|
|
87
|
+
...agentEnv,
|
|
88
|
+
SLOGSENDER,
|
|
89
|
+
SLOGSENDER_AGENT: 'self',
|
|
90
|
+
},
|
|
91
|
+
stateDir: stateDirOption,
|
|
92
|
+
...otherOpts,
|
|
93
|
+
}),
|
|
94
|
+
);
|
|
95
|
+
} else if (SLOGSENDER_AGENT && SLOGSENDER_AGENT !== 'self') {
|
|
96
|
+
console.warn(
|
|
97
|
+
`Unknown SLOGSENDER_AGENT=${SLOGSENDER_AGENT}; defaulting to 'self'`,
|
|
98
|
+
);
|
|
81
99
|
}
|
|
82
100
|
|
|
83
101
|
if (SLOGSENDER) {
|
|
84
102
|
console.warn('Loading slog sender modules:', ...slogSenderModules);
|
|
85
103
|
}
|
|
86
104
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
return /** @type {const} */ ([maker, moduleIdentifier]);
|
|
105
|
-
},
|
|
106
|
-
)
|
|
107
|
-
.catch(err => {
|
|
105
|
+
/** @type {Map<import('./index.js').MakeSlogSender, string>} */
|
|
106
|
+
const makerMap = new Map();
|
|
107
|
+
await Promise.all(
|
|
108
|
+
[...slogSenderModules].map(async moduleIdentifier => {
|
|
109
|
+
await null;
|
|
110
|
+
try {
|
|
111
|
+
const module = await import(moduleIdentifier);
|
|
112
|
+
const { makeSlogSender: maker } = module;
|
|
113
|
+
if (typeof maker !== 'function') {
|
|
114
|
+
throw Error(`No 'makeSlogSender' function exported by module`);
|
|
115
|
+
} else if (maker === makeSlogSender) {
|
|
116
|
+
throw Error(`Cannot recursively load 'makeSlogSender' aggregator`);
|
|
117
|
+
}
|
|
118
|
+
const isReplacing = makerMap.get(maker);
|
|
119
|
+
if (isReplacing) {
|
|
108
120
|
console.warn(
|
|
109
|
-
`
|
|
110
|
-
err,
|
|
121
|
+
`The slog sender from ${moduleIdentifier} matches the one from ${isReplacing}.`,
|
|
111
122
|
);
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
123
|
+
}
|
|
124
|
+
makerMap.set(maker, moduleIdentifier);
|
|
125
|
+
} catch (err) {
|
|
126
|
+
console.warn(
|
|
127
|
+
`Failed to load slog sender from ${moduleIdentifier}.`,
|
|
128
|
+
err,
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
}),
|
|
132
|
+
);
|
|
116
133
|
|
|
117
|
-
if (!
|
|
134
|
+
if (!makerMap.size) {
|
|
118
135
|
return undefined;
|
|
119
136
|
}
|
|
120
137
|
|
|
@@ -122,11 +139,11 @@ export const makeSlogSender = async (opts = {}) => {
|
|
|
122
139
|
|
|
123
140
|
if (stateDir === undefined) {
|
|
124
141
|
stateDir = tmp.dirSync().name;
|
|
125
|
-
console.warn(`Using ${stateDir} for stateDir`);
|
|
142
|
+
console.warn(`Using ${stateDir} for slog sender stateDir`);
|
|
126
143
|
}
|
|
127
144
|
|
|
128
145
|
const senders = await Promise.all(
|
|
129
|
-
|
|
146
|
+
[...makerMap.entries()].map(async ([maker, moduleIdentifier]) =>
|
|
130
147
|
maker({
|
|
131
148
|
...otherOpts,
|
|
132
149
|
stateDir,
|
|
@@ -137,37 +154,37 @@ export const makeSlogSender = async (opts = {}) => {
|
|
|
137
154
|
|
|
138
155
|
if (!senders.length) {
|
|
139
156
|
return undefined;
|
|
140
|
-
} else {
|
|
141
|
-
// Optimize creating a JSON serialization only if needed
|
|
142
|
-
// by any of the sender modules
|
|
143
|
-
const hasSenderUsingJsonObj = senders.some(
|
|
144
|
-
({ usesJsonObject = true }) => usesJsonObject,
|
|
145
|
-
);
|
|
146
|
-
const getJsonObj = hasSenderUsingJsonObj
|
|
147
|
-
? serializeSlogObj
|
|
148
|
-
: () => undefined;
|
|
149
|
-
const sendErrors = [];
|
|
150
|
-
/** @type {SlogSender} */
|
|
151
|
-
const slogSender = (slogObj, jsonObj = getJsonObj(slogObj)) => {
|
|
152
|
-
for (const sender of senders) {
|
|
153
|
-
try {
|
|
154
|
-
sender(slogObj, jsonObj);
|
|
155
|
-
} catch (err) {
|
|
156
|
-
sendErrors.push(err);
|
|
157
|
-
}
|
|
158
|
-
}
|
|
159
|
-
};
|
|
160
|
-
return Object.assign(slogSender, {
|
|
161
|
-
forceFlush: async () => {
|
|
162
|
-
await PromiseAllOrErrors([
|
|
163
|
-
...senders.map(sender => sender.forceFlush?.()),
|
|
164
|
-
...sendErrors.splice(0).map(err => Promise.reject(err)),
|
|
165
|
-
]);
|
|
166
|
-
},
|
|
167
|
-
shutdown: async () => {
|
|
168
|
-
await PromiseAllOrErrors(senders.map(sender => sender.shutdown?.()));
|
|
169
|
-
},
|
|
170
|
-
usesJsonObject: hasSenderUsingJsonObj,
|
|
171
|
-
});
|
|
172
157
|
}
|
|
158
|
+
|
|
159
|
+
// Optimize creating a JSON serialization only if needed
|
|
160
|
+
// by at least one of the senders.
|
|
161
|
+
const hasSenderUsingJsonObj = senders.some(
|
|
162
|
+
({ usesJsonObject = true }) => usesJsonObject,
|
|
163
|
+
);
|
|
164
|
+
const getJsonObj = hasSenderUsingJsonObj ? serializeSlogObj : () => undefined;
|
|
165
|
+
|
|
166
|
+
const sendErrors = [];
|
|
167
|
+
|
|
168
|
+
/** @type {SlogSender} */
|
|
169
|
+
const slogSender = (slogObj, jsonObj = getJsonObj(slogObj)) => {
|
|
170
|
+
for (const sender of senders) {
|
|
171
|
+
try {
|
|
172
|
+
sender(slogObj, jsonObj);
|
|
173
|
+
} catch (err) {
|
|
174
|
+
sendErrors.push(err);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
};
|
|
178
|
+
return Object.assign(slogSender, {
|
|
179
|
+
forceFlush: async () => {
|
|
180
|
+
await PromiseAllOrErrors([
|
|
181
|
+
...senders.map(sender => sender.forceFlush?.()),
|
|
182
|
+
...sendErrors.splice(0).map(err => Promise.reject(err)),
|
|
183
|
+
]);
|
|
184
|
+
},
|
|
185
|
+
shutdown: async () => {
|
|
186
|
+
await PromiseAllOrErrors(senders.map(sender => sender.shutdown?.()));
|
|
187
|
+
},
|
|
188
|
+
usesJsonObject: hasSenderUsingJsonObj,
|
|
189
|
+
});
|
|
173
190
|
};
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
import { q, Fail } from '@endo/errors';
|
|
2
|
+
|
|
3
|
+
import * as ActionType from '@agoric/internal/src/action-types.js';
|
|
4
|
+
import { objectMapMutable } from '@agoric/internal/src/js-utils.js';
|
|
5
|
+
import {
|
|
6
|
+
HISTOGRAM_METRICS,
|
|
7
|
+
BLOCK_HISTOGRAM_METRICS,
|
|
8
|
+
KERNEL_STATS_METRICS,
|
|
9
|
+
makeQueueMetrics,
|
|
10
|
+
} from '@agoric/internal/src/metrics.js';
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* @import {MeterProvider, MetricOptions, ObservableCounter, ObservableUpDownCounter} from '@opentelemetry/api';
|
|
14
|
+
* @import {TotalMap} from '@agoric/internal';
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
const knownActionTypes = new Set(Object.values(ActionType.QueuedActionType));
|
|
18
|
+
|
|
19
|
+
/** @param {import('./index.js').MakeSlogSenderOptions & {otelMeterName: string, otelMeterProvider?: MeterProvider}} opts */
|
|
20
|
+
export const makeSlogSender = async (opts = /** @type {any} */ ({})) => {
|
|
21
|
+
const { otelMeterName, otelMeterProvider } = opts;
|
|
22
|
+
if (!otelMeterName) throw Fail`OTel meter name is required`;
|
|
23
|
+
if (!otelMeterProvider) return;
|
|
24
|
+
|
|
25
|
+
const otelMeter = otelMeterProvider.getMeter(otelMeterName);
|
|
26
|
+
|
|
27
|
+
const processedInboundActionCounter = otelMeter.createCounter(
|
|
28
|
+
'cosmic_swingset_inbound_actions',
|
|
29
|
+
{ description: 'Processed inbound action counts by type' },
|
|
30
|
+
);
|
|
31
|
+
const histograms = {
|
|
32
|
+
...objectMapMutable(HISTOGRAM_METRICS, (desc, name) => {
|
|
33
|
+
const { boundaries, ...options } = desc;
|
|
34
|
+
const advice = boundaries && { explicitBucketBoundaries: boundaries };
|
|
35
|
+
return otelMeter.createHistogram(name, { ...options, advice });
|
|
36
|
+
}),
|
|
37
|
+
...objectMapMutable(BLOCK_HISTOGRAM_METRICS, (desc, name) =>
|
|
38
|
+
otelMeter.createHistogram(name, desc),
|
|
39
|
+
),
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
const inboundQueueMetrics = makeQueueMetrics({
|
|
43
|
+
otelMeter,
|
|
44
|
+
namePrefix: 'cosmic_swingset_inbound_queue',
|
|
45
|
+
descPrefix: 'inbound queue',
|
|
46
|
+
console,
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
// Values for KERNEL_STATS_METRICS could be built up locally by observing slog
|
|
50
|
+
// entries, but they are all collectively reported in "kernel-stats"
|
|
51
|
+
// (@see {@link ../../cosmic-swingset/src/kernel-stats.js exportKernelStats})
|
|
52
|
+
// and for now we just reflect that, which requires implementation as async
|
|
53
|
+
// ("observable") instruments rather than synchronous ones.
|
|
54
|
+
/** @typedef {string} KernelStatsKey */
|
|
55
|
+
/** @typedef {string} KernelMetricName */
|
|
56
|
+
/** @type {TotalMap<KernelStatsKey, number>} */
|
|
57
|
+
const kernelStats = new Map();
|
|
58
|
+
/** @type {Map<KernelMetricName, ObservableCounter | ObservableUpDownCounter>} */
|
|
59
|
+
const kernelStatsCounters = new Map();
|
|
60
|
+
for (const meta of KERNEL_STATS_METRICS) {
|
|
61
|
+
const { key, name, sub, metricType, ...options } = meta;
|
|
62
|
+
kernelStats.set(key, 0);
|
|
63
|
+
if (metricType === 'gauge') {
|
|
64
|
+
kernelStats.set(`${key}Up`, 0);
|
|
65
|
+
kernelStats.set(`${key}Down`, 0);
|
|
66
|
+
kernelStats.set(`${key}Max`, 0);
|
|
67
|
+
} else if (metricType !== 'counter') {
|
|
68
|
+
Fail`Unknown metric type ${q(metricType)} for key ${q(key)} name ${q(name)}`;
|
|
69
|
+
}
|
|
70
|
+
let counter = kernelStatsCounters.get(name);
|
|
71
|
+
if (!counter) {
|
|
72
|
+
counter =
|
|
73
|
+
metricType === 'counter'
|
|
74
|
+
? otelMeter.createObservableCounter(name, options)
|
|
75
|
+
: otelMeter.createObservableUpDownCounter(name, options);
|
|
76
|
+
kernelStatsCounters.set(name, counter);
|
|
77
|
+
}
|
|
78
|
+
const attributes = sub ? { [sub.dimension]: sub.value } : {};
|
|
79
|
+
counter.addCallback(observer => {
|
|
80
|
+
observer.observe(kernelStats.get(key), attributes);
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
const expectedKernelStats = new Set(kernelStats.keys());
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* @typedef {object} LazyStats
|
|
87
|
+
* @property {string} namePrefix
|
|
88
|
+
* @property {MetricOptions} options
|
|
89
|
+
* @property {Set<string>} keys
|
|
90
|
+
* @property {Record<string, number>} data
|
|
91
|
+
*/
|
|
92
|
+
/** @type {(namePrefix: string, description: string) => LazyStats} */
|
|
93
|
+
const makeLazyStats = (namePrefix, description) => {
|
|
94
|
+
return { namePrefix, options: { description }, keys: new Set(), data: {} };
|
|
95
|
+
};
|
|
96
|
+
const dynamicAfterCommitStatsCounters = {
|
|
97
|
+
memoryUsage: makeLazyStats(
|
|
98
|
+
'memoryUsage_',
|
|
99
|
+
'kernel process memory statistic',
|
|
100
|
+
),
|
|
101
|
+
heapStats: makeLazyStats('heapStats_', 'v8 kernel heap statistic'),
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
const slogSender = ({ type: slogType, ...slogObj }) => {
|
|
105
|
+
switch (slogType) {
|
|
106
|
+
// Consume cosmic-swingset block lifecycle slog entries.
|
|
107
|
+
case 'cosmic-swingset-init': {
|
|
108
|
+
const { inboundQueueInitialLengths: lengths } = slogObj;
|
|
109
|
+
inboundQueueMetrics.initLengths(lengths);
|
|
110
|
+
break;
|
|
111
|
+
}
|
|
112
|
+
case 'cosmic-swingset-begin-block': {
|
|
113
|
+
const {
|
|
114
|
+
interBlockSeconds,
|
|
115
|
+
afterCommitHangoverSeconds,
|
|
116
|
+
blockLagSeconds,
|
|
117
|
+
} = slogObj;
|
|
118
|
+
|
|
119
|
+
Number.isFinite(interBlockSeconds) &&
|
|
120
|
+
histograms.interBlockSeconds.record(interBlockSeconds);
|
|
121
|
+
histograms.afterCommitHangoverSeconds.record(
|
|
122
|
+
afterCommitHangoverSeconds,
|
|
123
|
+
);
|
|
124
|
+
Number.isFinite(blockLagSeconds) &&
|
|
125
|
+
histograms.blockLagSeconds.record(blockLagSeconds);
|
|
126
|
+
break;
|
|
127
|
+
}
|
|
128
|
+
case 'cosmic-swingset-run-finish': {
|
|
129
|
+
histograms.swingset_block_processing_seconds.record(slogObj.seconds);
|
|
130
|
+
break;
|
|
131
|
+
}
|
|
132
|
+
case 'cosmic-swingset-end-block-finish': {
|
|
133
|
+
const { inboundQueueStartLengths, processedActionCounts } = slogObj;
|
|
134
|
+
inboundQueueMetrics.updateLengths(inboundQueueStartLengths);
|
|
135
|
+
for (const processedActionRecord of processedActionCounts) {
|
|
136
|
+
const { count, phase, type: actionType } = processedActionRecord;
|
|
137
|
+
if (!knownActionTypes.has(actionType)) {
|
|
138
|
+
console.warn('Unknown inbound action type', actionType);
|
|
139
|
+
}
|
|
140
|
+
processedInboundActionCounter.add(count, { actionType });
|
|
141
|
+
inboundQueueMetrics.decLength(phase);
|
|
142
|
+
}
|
|
143
|
+
break;
|
|
144
|
+
}
|
|
145
|
+
case 'cosmic-swingset-commit-block-finish': {
|
|
146
|
+
const {
|
|
147
|
+
runSeconds,
|
|
148
|
+
chainTime,
|
|
149
|
+
saveTime,
|
|
150
|
+
cosmosCommitSeconds,
|
|
151
|
+
fullSaveTime,
|
|
152
|
+
} = slogObj;
|
|
153
|
+
histograms.swingsetRunSeconds.record(runSeconds);
|
|
154
|
+
histograms.swingsetChainSaveSeconds.record(chainTime);
|
|
155
|
+
histograms.swingsetCommitSeconds.record(saveTime);
|
|
156
|
+
histograms.cosmosCommitSeconds.record(cosmosCommitSeconds);
|
|
157
|
+
histograms.fullCommitSeconds.record(fullSaveTime);
|
|
158
|
+
break;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Consume Swingset kernel slog entries.
|
|
162
|
+
case 'vat-startup-finish': {
|
|
163
|
+
histograms.swingset_vat_startup.record(slogObj.seconds * 1000);
|
|
164
|
+
break;
|
|
165
|
+
}
|
|
166
|
+
case 'crank-finish': {
|
|
167
|
+
const { crankType, messageType, seconds } = slogObj;
|
|
168
|
+
// TODO: Reflect crankType/messageType as proper dimensional attributes.
|
|
169
|
+
// For now, we're going for parity with direct metrics.
|
|
170
|
+
if (crankType !== 'routing' && messageType !== 'create-vat') {
|
|
171
|
+
histograms.swingset_crank_processing_time.record(seconds * 1000);
|
|
172
|
+
}
|
|
173
|
+
break;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Consume miscellaneous slog entries.
|
|
177
|
+
case 'kernel-stats': {
|
|
178
|
+
const { stats } = slogObj;
|
|
179
|
+
const notYetFoundKernelStats = new Set(expectedKernelStats);
|
|
180
|
+
for (const [key, value] of Object.entries(stats)) {
|
|
181
|
+
notYetFoundKernelStats.delete(key);
|
|
182
|
+
if (!kernelStats.has(key)) {
|
|
183
|
+
console.warn('Unexpected SwingSet kernel statistic', key);
|
|
184
|
+
}
|
|
185
|
+
kernelStats.set(key, value);
|
|
186
|
+
}
|
|
187
|
+
if (notYetFoundKernelStats.size) {
|
|
188
|
+
console.warn('Expected SwingSet kernel statistics not found', [
|
|
189
|
+
...notYetFoundKernelStats,
|
|
190
|
+
]);
|
|
191
|
+
}
|
|
192
|
+
break;
|
|
193
|
+
}
|
|
194
|
+
case 'cosmic-swingset-after-commit-stats': {
|
|
195
|
+
const dynamicCounterEntries = Object.entries(
|
|
196
|
+
dynamicAfterCommitStatsCounters,
|
|
197
|
+
);
|
|
198
|
+
for (const [slogKey, meta] of dynamicCounterEntries) {
|
|
199
|
+
const { namePrefix, options, keys } = meta;
|
|
200
|
+
meta.data = slogObj[slogKey] || {};
|
|
201
|
+
const newKeys = Object.keys(meta.data).filter(key => !keys.has(key));
|
|
202
|
+
for (const key of newKeys) {
|
|
203
|
+
keys.add(key);
|
|
204
|
+
const name = `${namePrefix}${key}`;
|
|
205
|
+
const gauge = otelMeter.createObservableUpDownCounter(
|
|
206
|
+
name,
|
|
207
|
+
options,
|
|
208
|
+
);
|
|
209
|
+
gauge.addCallback(observer => {
|
|
210
|
+
observer.observe(meta.data[key]);
|
|
211
|
+
});
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
break;
|
|
215
|
+
}
|
|
216
|
+
default:
|
|
217
|
+
break;
|
|
218
|
+
}
|
|
219
|
+
};
|
|
220
|
+
return Object.assign(slogSender, {
|
|
221
|
+
usesJsonObject: false,
|
|
222
|
+
});
|
|
223
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Fail } from '@endo/errors';
|
|
2
|
+
|
|
3
|
+
import { getPrometheusMeterProvider } from './index.js';
|
|
4
|
+
import { makeSlogSender as makeOtelMetricsSender } from './otel-metrics.js';
|
|
5
|
+
|
|
6
|
+
/** @param {import('./index.js').MakeSlogSenderOptions & {otelMeterName?: string}} opts */
|
|
7
|
+
export const makeSlogSender = async (opts = {}) => {
|
|
8
|
+
const { env, otelMeterName, serviceName } = opts;
|
|
9
|
+
if (!otelMeterName) throw Fail`OTel meter name is required`;
|
|
10
|
+
const otelMeterProvider = getPrometheusMeterProvider({
|
|
11
|
+
console,
|
|
12
|
+
env,
|
|
13
|
+
serviceName,
|
|
14
|
+
});
|
|
15
|
+
if (!otelMeterProvider) return;
|
|
16
|
+
|
|
17
|
+
return makeOtelMetricsSender({ ...opts, otelMeterName, otelMeterProvider });
|
|
18
|
+
};
|
|
@@ -1,4 +1,32 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
)
|
|
1
|
+
const { hasOwn } = Object;
|
|
2
|
+
|
|
3
|
+
const replacer = (_key, value) => {
|
|
4
|
+
switch (typeof value) {
|
|
5
|
+
case 'object': {
|
|
6
|
+
if (value instanceof Error) {
|
|
7
|
+
// Represent each error as a serialization-friendly
|
|
8
|
+
// { errorType, message, cause?, errors?, stack? } object
|
|
9
|
+
// (itself subject to recursive replacement, particularly in `cause` and
|
|
10
|
+
// `errors`).
|
|
11
|
+
const obj = { errorType: value.name, message: value.message };
|
|
12
|
+
if (hasOwn(value, 'cause')) obj.cause = value.cause;
|
|
13
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
14
|
+
// @ts-ignore TS2339 property "errors" is only on AggregateError
|
|
15
|
+
if (hasOwn(value, 'errors')) obj.errors = value.errors;
|
|
16
|
+
const stack = value.stack;
|
|
17
|
+
if (stack) obj.stack = stack;
|
|
18
|
+
return obj;
|
|
19
|
+
}
|
|
20
|
+
break;
|
|
21
|
+
}
|
|
22
|
+
case 'bigint':
|
|
23
|
+
// Represent each bigint as a JSON-serializable number, accepting the
|
|
24
|
+
// possible loss of precision.
|
|
25
|
+
return Number(value);
|
|
26
|
+
default:
|
|
27
|
+
break;
|
|
28
|
+
}
|
|
29
|
+
return value;
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
export const serializeSlogObj = slogObj => JSON.stringify(slogObj, replacer);
|
|
@@ -1,32 +1,34 @@
|
|
|
1
1
|
/* eslint-env node */
|
|
2
|
+
/**
|
|
3
|
+
* @file Run as a child process of {@link ./slog-sender-pipe.js} to isolate an
|
|
4
|
+
* aggregate slog sender (@see {@link ./make-slog-sender.js}). Communicates
|
|
5
|
+
* with its parent via Node.js IPC with advanced (structured clone)
|
|
6
|
+
* serialization.
|
|
7
|
+
* https://nodejs.org/docs/latest/api/child_process.html#advanced-serialization
|
|
8
|
+
*/
|
|
9
|
+
|
|
2
10
|
import '@endo/init';
|
|
3
11
|
|
|
4
12
|
import anylogger from 'anylogger';
|
|
13
|
+
import { Fail } from '@endo/errors';
|
|
5
14
|
import { makeShutdown } from '@agoric/internal/src/node/shutdown.js';
|
|
6
15
|
|
|
7
16
|
import { makeSlogSender } from './make-slog-sender.js';
|
|
8
17
|
|
|
9
18
|
const logger = anylogger('slog-sender-pipe-entrypoint');
|
|
10
19
|
|
|
11
|
-
/** @type {(msg: import('./slog-sender-pipe.js').
|
|
20
|
+
/** @type {(msg: import('./slog-sender-pipe.js').PipeAPIReply) => void} */
|
|
12
21
|
const send = Function.prototype.bind.call(process.send, process);
|
|
13
22
|
|
|
14
23
|
/**
|
|
15
|
-
* @typedef {
|
|
16
|
-
* @
|
|
17
|
-
* @
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
* @typedef {
|
|
21
|
-
* @
|
|
22
|
-
* @property {object} obj
|
|
24
|
+
* @typedef {{type: 'init', options: import('./index.js').MakeSlogSenderOptions }} InitMessage
|
|
25
|
+
* @typedef {{type: 'flush' }} FlushMessage
|
|
26
|
+
* @typedef {{type: 'send', obj: Record<string, unknown> }} SendMessage
|
|
27
|
+
*
|
|
28
|
+
* @typedef {InitMessage | FlushMessage} PipeAPIResponsefulMessage
|
|
29
|
+
* @typedef {SendMessage} PipeAPIResponselessMessage
|
|
30
|
+
* @typedef {PipeAPIResponsefulMessage | PipeAPIResponselessMessage} PipeAPIMessage
|
|
23
31
|
*/
|
|
24
|
-
/**
|
|
25
|
-
* @typedef {object} FlushMessage
|
|
26
|
-
* @property {'flush'} type
|
|
27
|
-
*/
|
|
28
|
-
/** @typedef {InitMessage | FlushMessage} SlogSenderPipeWaitMessages */
|
|
29
|
-
/** @typedef {SlogSenderPipeWaitMessages | SendMessage } SlogSenderPipeMessages */
|
|
30
32
|
|
|
31
33
|
const main = async () => {
|
|
32
34
|
/** @type {import('./index.js').SlogSender | undefined} */
|
|
@@ -44,9 +46,7 @@ const main = async () => {
|
|
|
44
46
|
|
|
45
47
|
/** @param {import('./index.js').MakeSlogSenderOptions} opts */
|
|
46
48
|
const init = async ({ env, ...otherOpts } = {}) => {
|
|
47
|
-
|
|
48
|
-
assert.fail('Already initialized');
|
|
49
|
-
}
|
|
49
|
+
!slogSender || Fail`Already initialized`;
|
|
50
50
|
|
|
51
51
|
slogSender = await makeSlogSender({
|
|
52
52
|
...otherOpts,
|
|
@@ -57,9 +57,7 @@ const main = async () => {
|
|
|
57
57
|
};
|
|
58
58
|
|
|
59
59
|
const flush = async () => {
|
|
60
|
-
if (!slogSender)
|
|
61
|
-
assert.fail('No sender available');
|
|
62
|
-
}
|
|
60
|
+
if (!slogSender) throw Fail`No sender available`;
|
|
63
61
|
|
|
64
62
|
await slogSender.forceFlush?.();
|
|
65
63
|
};
|
|
@@ -77,56 +75,55 @@ const main = async () => {
|
|
|
77
75
|
return AggregateError(sendErrors.splice(0));
|
|
78
76
|
};
|
|
79
77
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
}
|
|
78
|
+
/** @param {PipeAPIMessage} msg */
|
|
79
|
+
const onMessage = msg => {
|
|
80
|
+
if (!msg || typeof msg !== 'object') {
|
|
81
|
+
logger.warn('Received invalid message', msg);
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
87
84
|
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
}
|
|
85
|
+
switch (msg.type) {
|
|
86
|
+
case 'init': {
|
|
87
|
+
void init(msg.options).then(
|
|
88
|
+
hasSender => {
|
|
89
|
+
send({ type: 'initReply', hasSender });
|
|
90
|
+
},
|
|
91
|
+
error => {
|
|
92
|
+
send({ type: 'initReply', hasSender: false, error });
|
|
93
|
+
},
|
|
94
|
+
);
|
|
95
|
+
break;
|
|
96
|
+
}
|
|
97
|
+
case 'flush': {
|
|
98
|
+
void flush().then(
|
|
99
|
+
() => {
|
|
100
|
+
send({ type: 'flushReply', error: generateFlushError() });
|
|
101
|
+
},
|
|
102
|
+
error => {
|
|
103
|
+
send({ type: 'flushReply', error: generateFlushError(error) });
|
|
104
|
+
},
|
|
105
|
+
);
|
|
106
|
+
break;
|
|
107
|
+
}
|
|
108
|
+
case 'send': {
|
|
109
|
+
if (!slogSender) {
|
|
110
|
+
logger.warn('Received send with no sender available');
|
|
111
|
+
} else {
|
|
112
|
+
try {
|
|
113
|
+
slogSender(harden(msg.obj));
|
|
114
|
+
} catch (e) {
|
|
115
|
+
sendErrors.push(e);
|
|
120
116
|
}
|
|
121
|
-
break;
|
|
122
|
-
}
|
|
123
|
-
default: {
|
|
124
|
-
// @ts-expect-error exhaustive type check
|
|
125
|
-
logger.warn('received unknown message type', msg.type);
|
|
126
117
|
}
|
|
118
|
+
break;
|
|
119
|
+
}
|
|
120
|
+
default: {
|
|
121
|
+
// @ts-expect-error exhaustive type check
|
|
122
|
+
logger.warn('Received unknown message type', msg.type);
|
|
127
123
|
}
|
|
128
|
-
}
|
|
129
|
-
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
process.on('message', onMessage);
|
|
130
127
|
};
|
|
131
128
|
|
|
132
129
|
process.exitCode = 1;
|
package/src/slog-sender-pipe.js
CHANGED
|
@@ -1,7 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file Export a `makeSlogSender` that spawns a
|
|
3
|
+
* {@link ./slog-sender-pipe-entrypoint.js} child process to which it forwards
|
|
4
|
+
* all slog entries via Node.js IPC with advanced (structured clone)
|
|
5
|
+
* serialization.
|
|
6
|
+
* https://nodejs.org/docs/latest/api/child_process.html#advanced-serialization
|
|
7
|
+
*/
|
|
8
|
+
|
|
1
9
|
import { fork } from 'child_process';
|
|
2
10
|
import path from 'path';
|
|
11
|
+
import { promisify } from 'util';
|
|
3
12
|
import anylogger from 'anylogger';
|
|
4
13
|
|
|
14
|
+
import { q, Fail } from '@endo/errors';
|
|
5
15
|
import { makeQueue } from '@endo/stream';
|
|
6
16
|
|
|
7
17
|
import { makeShutdown } from '@agoric/internal/src/node/shutdown.js';
|
|
@@ -10,6 +20,8 @@ const dirname = path.dirname(new URL(import.meta.url).pathname);
|
|
|
10
20
|
|
|
11
21
|
const logger = anylogger('slog-sender-pipe');
|
|
12
22
|
|
|
23
|
+
const sink = () => {};
|
|
24
|
+
|
|
13
25
|
/**
|
|
14
26
|
* @template {any[]} T
|
|
15
27
|
* @template R
|
|
@@ -23,168 +35,120 @@ const withMutex = operation => {
|
|
|
23
35
|
return async (...args) => {
|
|
24
36
|
await mutex.get();
|
|
25
37
|
const result = operation(...args);
|
|
26
|
-
mutex.put(
|
|
27
|
-
result.then(
|
|
28
|
-
() => {},
|
|
29
|
-
() => {},
|
|
30
|
-
),
|
|
31
|
-
);
|
|
38
|
+
mutex.put(result.then(sink, sink));
|
|
32
39
|
return result;
|
|
33
40
|
};
|
|
34
41
|
};
|
|
35
42
|
|
|
36
43
|
/**
|
|
37
|
-
* @
|
|
38
|
-
* @
|
|
39
|
-
* @property {boolean} hasSender
|
|
40
|
-
* @property {Error} [error]
|
|
44
|
+
* @template [P=unknown]
|
|
45
|
+
* @typedef {{ type: string, error?: Error } & P} PipeReply
|
|
41
46
|
*/
|
|
47
|
+
|
|
42
48
|
/**
|
|
43
|
-
* @typedef {
|
|
44
|
-
*
|
|
45
|
-
*
|
|
49
|
+
* @typedef {{
|
|
50
|
+
* init: {
|
|
51
|
+
* message: import('./slog-sender-pipe-entrypoint.js').InitMessage;
|
|
52
|
+
* reply: PipeReply<{ hasSender: boolean }>;
|
|
53
|
+
* };
|
|
54
|
+
* flush: {
|
|
55
|
+
* message: import('./slog-sender-pipe-entrypoint.js').FlushMessage;
|
|
56
|
+
* reply: PipeReply<{}>;
|
|
57
|
+
* };
|
|
58
|
+
* }} SlogSenderPipeAPI
|
|
59
|
+
*
|
|
60
|
+
* @typedef {keyof SlogSenderPipeAPI} PipeAPICommand
|
|
61
|
+
* @typedef {SlogSenderPipeAPI[PipeAPICommand]["reply"]} PipeAPIReply
|
|
46
62
|
*/
|
|
47
|
-
/** @typedef {SlogSenderInitReply | SlogSenderFlushReply} SlogSenderPipeWaitReplies */
|
|
48
63
|
|
|
49
|
-
/** @param {import('.').MakeSlogSenderOptions}
|
|
50
|
-
export const makeSlogSender = async
|
|
64
|
+
/** @param {import('.').MakeSlogSenderOptions} options */
|
|
65
|
+
export const makeSlogSender = async options => {
|
|
66
|
+
const { env = {} } = options;
|
|
51
67
|
const { registerShutdown } = makeShutdown();
|
|
68
|
+
|
|
52
69
|
const cp = fork(path.join(dirname, 'slog-sender-pipe-entrypoint.js'), [], {
|
|
53
|
-
stdio: ['
|
|
70
|
+
stdio: ['ignore', 'inherit', 'inherit', 'ipc'],
|
|
54
71
|
serialization: 'advanced',
|
|
72
|
+
env,
|
|
55
73
|
});
|
|
56
74
|
// logger.log('done fork');
|
|
75
|
+
/** @type {(msg: Record<string, unknown> & {type: string}) => Promise<void>} */
|
|
76
|
+
const rawSend = promisify(cp.send.bind(cp));
|
|
77
|
+
const pipeSend = withMutex(rawSend);
|
|
57
78
|
|
|
58
|
-
|
|
59
|
-
/**
|
|
60
|
-
* @template {{type: string}} T
|
|
61
|
-
* @param {T} msg
|
|
62
|
-
*/
|
|
63
|
-
msg =>
|
|
64
|
-
/** @type {Promise<void>} */ (
|
|
65
|
-
new Promise((resolve, reject) => {
|
|
66
|
-
cp.send(msg, err => {
|
|
67
|
-
if (err) {
|
|
68
|
-
reject(err);
|
|
69
|
-
} else {
|
|
70
|
-
resolve();
|
|
71
|
-
}
|
|
72
|
-
});
|
|
73
|
-
})
|
|
74
|
-
),
|
|
75
|
-
);
|
|
76
|
-
|
|
77
|
-
/**
|
|
78
|
-
* @typedef {{
|
|
79
|
-
* init: {
|
|
80
|
-
* message: import('./slog-sender-pipe-entrypoint.js').InitMessage;
|
|
81
|
-
* reply: SlogSenderInitReply;
|
|
82
|
-
* };
|
|
83
|
-
* flush: {
|
|
84
|
-
* message: import('./slog-sender-pipe-entrypoint.js').FlushMessage;
|
|
85
|
-
* reply: SlogSenderFlushReply;
|
|
86
|
-
* };
|
|
87
|
-
* }} SlogSenderWaitMessagesAndReplies
|
|
88
|
-
*/
|
|
89
|
-
|
|
90
|
-
/** @typedef {keyof SlogSenderWaitMessagesAndReplies} SendWaitCommands */
|
|
91
|
-
/**
|
|
92
|
-
* @template {SlogSenderPipeWaitReplies} T
|
|
93
|
-
* @typedef {Omit<T, 'type' | 'error'>} ReplyPayload
|
|
94
|
-
*/
|
|
95
|
-
|
|
96
|
-
/** @type {import('@endo/stream').AsyncQueue<SlogSenderPipeWaitReplies>} */
|
|
79
|
+
/** @type {import('@endo/stream').AsyncQueue<PipeAPIReply>} */
|
|
97
80
|
const sendWaitQueue = makeQueue();
|
|
98
|
-
/** @type {
|
|
81
|
+
/** @type {PipeAPICommand | undefined} */
|
|
99
82
|
let sendWaitType;
|
|
100
83
|
|
|
101
84
|
const sendWaitReply = withMutex(
|
|
102
85
|
/**
|
|
103
|
-
* @template {
|
|
86
|
+
* @template {PipeAPICommand} T
|
|
104
87
|
* @param {T} type
|
|
105
|
-
* @param {Omit<
|
|
106
|
-
* @returns {Promise<
|
|
88
|
+
* @param {Omit<SlogSenderPipeAPI[T]["message"], 'type'>} payload
|
|
89
|
+
* @returns {Promise<Omit<SlogSenderPipeAPI[T]["reply"], keyof PipeReply>>}
|
|
107
90
|
*/
|
|
108
91
|
async (type, payload) => {
|
|
109
|
-
!sendWaitType ||
|
|
92
|
+
!sendWaitType || Fail`Invalid mutex state`;
|
|
110
93
|
|
|
111
94
|
const msg = { ...payload, type };
|
|
112
95
|
|
|
113
96
|
sendWaitType = type;
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
}
|
|
127
|
-
return rest;
|
|
128
|
-
},
|
|
129
|
-
)
|
|
130
|
-
.finally(() => {
|
|
131
|
-
sendWaitType = undefined;
|
|
132
|
-
});
|
|
133
|
-
},
|
|
134
|
-
);
|
|
135
|
-
|
|
136
|
-
cp.on(
|
|
137
|
-
'message',
|
|
138
|
-
/** @param { SlogSenderPipeWaitReplies } msg */
|
|
139
|
-
msg => {
|
|
140
|
-
// logger.log('received', msg);
|
|
141
|
-
if (
|
|
142
|
-
!msg ||
|
|
143
|
-
typeof msg !== 'object' ||
|
|
144
|
-
msg.type !== `${sendWaitType}Reply`
|
|
145
|
-
) {
|
|
146
|
-
logger.warn('Received unexpected message', msg);
|
|
147
|
-
return;
|
|
97
|
+
await null;
|
|
98
|
+
try {
|
|
99
|
+
await pipeSend(msg);
|
|
100
|
+
/** @type {SlogSenderPipeAPI[T]["reply"]} */
|
|
101
|
+
const reply = await sendWaitQueue.get();
|
|
102
|
+
const { type: replyType, error, ...rest } = reply;
|
|
103
|
+
replyType === `${type}Reply` ||
|
|
104
|
+
Fail`Unexpected reply type ${q(replyType)}`;
|
|
105
|
+
if (error) throw error;
|
|
106
|
+
return rest;
|
|
107
|
+
} finally {
|
|
108
|
+
sendWaitType = undefined;
|
|
148
109
|
}
|
|
149
|
-
|
|
150
|
-
sendWaitQueue.put(msg);
|
|
151
110
|
},
|
|
152
111
|
);
|
|
153
112
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
113
|
+
/** @param {PipeReply} msg */
|
|
114
|
+
const onMessage = msg => {
|
|
115
|
+
// logger.log('received', msg);
|
|
116
|
+
if (!msg || msg.type !== `${sendWaitType}Reply`) {
|
|
117
|
+
logger.warn('Received unexpected message', msg);
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
sendWaitQueue.put(msg);
|
|
122
|
+
};
|
|
123
|
+
cp.on('message', onMessage);
|
|
157
124
|
|
|
158
|
-
const
|
|
159
|
-
|
|
125
|
+
const flush = async () => {
|
|
126
|
+
await sendWaitReply('flush', {});
|
|
160
127
|
};
|
|
161
128
|
|
|
162
129
|
const shutdown = async () => {
|
|
163
130
|
// logger.log('shutdown');
|
|
164
|
-
if (!cp.connected)
|
|
165
|
-
return;
|
|
166
|
-
}
|
|
131
|
+
if (!cp.connected) return;
|
|
167
132
|
|
|
168
133
|
await flush();
|
|
169
134
|
cp.disconnect();
|
|
170
135
|
};
|
|
171
136
|
registerShutdown(shutdown);
|
|
172
137
|
|
|
173
|
-
const { hasSender } = await init
|
|
138
|
+
const { hasSender } = await sendWaitReply('init', { options }).catch(err => {
|
|
174
139
|
cp.disconnect();
|
|
175
140
|
throw err;
|
|
176
141
|
});
|
|
177
|
-
|
|
178
142
|
if (!hasSender) {
|
|
179
143
|
cp.disconnect();
|
|
180
144
|
return undefined;
|
|
181
145
|
}
|
|
182
146
|
|
|
183
|
-
const slogSender =
|
|
147
|
+
const slogSender = obj => {
|
|
148
|
+
void pipeSend({ type: 'send', obj }).catch(sink);
|
|
149
|
+
};
|
|
184
150
|
return Object.assign(slogSender, {
|
|
185
|
-
forceFlush:
|
|
186
|
-
await flush();
|
|
187
|
-
},
|
|
151
|
+
forceFlush: flush,
|
|
188
152
|
shutdown,
|
|
189
153
|
usesJsonObject: false,
|
|
190
154
|
});
|
|
@@ -31,8 +31,25 @@ const bufferTests = test.macro(
|
|
|
31
31
|
circularBufferSize: BUFFER_SIZE,
|
|
32
32
|
circularBufferFilename: tmpFile,
|
|
33
33
|
});
|
|
34
|
-
const
|
|
35
|
-
|
|
34
|
+
const realSlogSender = makeSlogSenderFromBuffer({
|
|
35
|
+
fileHandle,
|
|
36
|
+
writeCircBuf,
|
|
37
|
+
});
|
|
38
|
+
let wasShutdown = false;
|
|
39
|
+
const shutdown = () => {
|
|
40
|
+
if (wasShutdown) return;
|
|
41
|
+
wasShutdown = true;
|
|
42
|
+
|
|
43
|
+
return realSlogSender.shutdown();
|
|
44
|
+
};
|
|
45
|
+
t.teardown(shutdown);
|
|
46
|
+
// To verify lack of attempted mutation by the consumer, send only hardened
|
|
47
|
+
// entries.
|
|
48
|
+
/** @type {typeof realSlogSender} */
|
|
49
|
+
const slogSender = Object.assign(
|
|
50
|
+
(obj, serialized) => realSlogSender(harden(obj), serialized),
|
|
51
|
+
realSlogSender,
|
|
52
|
+
);
|
|
36
53
|
slogSender({ type: 'start' });
|
|
37
54
|
await slogSender.forceFlush();
|
|
38
55
|
t.is(fs.readFileSync(tmpFile, { encoding: 'utf8' }).length, BUFFER_SIZE);
|
|
@@ -83,6 +100,18 @@ const bufferTests = test.macro(
|
|
|
83
100
|
slogSender(null, 'PRE-SERIALIZED');
|
|
84
101
|
await slogSender.forceFlush();
|
|
85
102
|
t.truthy(fs.readFileSync(tmpFile).includes('PRE-SERIALIZED'));
|
|
103
|
+
|
|
104
|
+
slogSender(null, 'PRE_SHUTDOWN');
|
|
105
|
+
const shutdownP = shutdown();
|
|
106
|
+
slogSender(null, 'POST_SHUTDOWN');
|
|
107
|
+
await shutdownP;
|
|
108
|
+
slogSender(null, 'SHUTDOWN_COMPLETED');
|
|
109
|
+
|
|
110
|
+
const finalContent = fs.readFileSync(tmpFile);
|
|
111
|
+
|
|
112
|
+
t.truthy(finalContent.includes('PRE_SHUTDOWN'));
|
|
113
|
+
t.falsy(finalContent.includes('POST_SHUTDOWN'));
|
|
114
|
+
t.falsy(finalContent.includes('SHUTDOWN_COMPLETED'));
|
|
86
115
|
},
|
|
87
116
|
);
|
|
88
117
|
|