@agoric/telemetry 0.6.3-u19.2 → 0.6.3-u21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -3,45 +3,39 @@
3
3
  All notable changes to this project will be documented in this file.
4
4
  See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
5
5
 
6
- ### [0.6.3-u19.2](https://github.com/Agoric/agoric-sdk/compare/@agoric/telemetry@0.6.3-u19.1...@agoric/telemetry@0.6.3-u19.2) (2025-03-13)
7
-
8
- **Note:** Version bump only for package @agoric/telemetry
9
-
10
-
11
-
12
-
13
-
14
- ### [0.6.3-u19.1](https://github.com/Agoric/agoric-sdk/compare/@agoric/telemetry@0.6.3-u19.0...@agoric/telemetry@0.6.3-u19.1) (2025-03-03)
15
-
16
-
17
- ### Bug Fixes
18
-
19
- * Properly synchronize slog sender termination ([2fc342c](https://github.com/Agoric/agoric-sdk/commit/2fc342c180e296208d077a1d4799da139d3b7848))
20
-
21
-
22
-
23
- ### [0.6.3-u19.0](https://github.com/Agoric/agoric-sdk/compare/@agoric/telemetry@0.6.2...@agoric/telemetry@0.6.3-u19.0) (2025-02-24)
6
+ ### [0.6.3-u21.0](https://github.com/Agoric/agoric-sdk/compare/@agoric/telemetry@0.6.2...@agoric/telemetry@0.6.3-u21.0) (2025-06-19)
24
7
 
25
8
 
26
9
  ### Features
27
10
 
11
+ * Add the Prometheus slog sender module and load it per OTEL_EXPORTER_PROMETHEUS_PORT ([1dc1827](https://github.com/Agoric/agoric-sdk/commit/1dc182783ce191f0ba2131cb1f7b3042f287737a)), closes [#11045](https://github.com/Agoric/agoric-sdk/issues/11045)
28
12
  * **cosmic-swingset:** add JS upgrade plan handler stub ([655133e](https://github.com/Agoric/agoric-sdk/commit/655133ed909b5d632dc033e992214a7b6a1b5ab1))
13
+ * **internal:** Add helper `unprefixedProperties` for environment variable consumption ([878fecf](https://github.com/Agoric/agoric-sdk/commit/878fecf4f5153fa80f48a27a8b79e67943b2d199))
29
14
  * simple CircularBuffer with fs offsets ([8d9cb7a](https://github.com/Agoric/agoric-sdk/commit/8d9cb7abe96e8905f5aaa0927e02914ef09279c4))
30
15
  * **telemetry:** context aware slog support new triggers ([03965d9](https://github.com/Agoric/agoric-sdk/commit/03965d90b86cf75ce7f6677861e3a0aa8ac70710))
31
16
  * **telemetry:** ingest-slog explicitly supports `-` for stdin ([63367c4](https://github.com/Agoric/agoric-sdk/commit/63367c4aaf9bafbd6553a1f4cb808c96bc90845a))
32
17
  * **telemetry:** ingest-slog throttle and flush per block ([2134944](https://github.com/Agoric/agoric-sdk/commit/21349448b3b9379a9da43218a59a7e7eaf4f5a9e))
18
+ * **telemetry:** Update slog sender JSON serialization of error instances ([5db996d](https://github.com/Agoric/agoric-sdk/commit/5db996d99830e61fad6eed373e2fb2dc810d662e))
33
19
  * use writeSync slogSender ([47a2add](https://github.com/Agoric/agoric-sdk/commit/47a2adda72a5377eda181a425130cdc5a7fd7ff5))
34
20
 
35
21
 
36
22
  ### Bug Fixes
37
23
 
38
24
  * ensure script main rejections exit with error ([abdab87](https://github.com/Agoric/agoric-sdk/commit/abdab879014a5c3124ebd0e9246995ac6b1ce6e5))
25
+ * Properly synchronize slog sender termination ([f83c01d](https://github.com/Agoric/agoric-sdk/commit/f83c01d89d80798e0922acdb498fcc7250560977))
39
26
  * **telemetry:** add missing slog type ([1aec8d0](https://github.com/Agoric/agoric-sdk/commit/1aec8d05036f6b3c3e3730339d1829da6b4a9051))
27
+ * **telemetry:** async flight recorder read ([b7a19dd](https://github.com/Agoric/agoric-sdk/commit/b7a19dd9c106d9b31e6f9188f5d4df0bbb5132bf))
40
28
  * **telemetry:** avoid polluting stdout in ingest-slog ([d4b8dfa](https://github.com/Agoric/agoric-sdk/commit/d4b8dfa91155789f7ceda5cc3cef06019b9527e7))
41
29
  * **telemetry:** Empty context persisted when remaining beans are negative after run finish ([#10635](https://github.com/Agoric/agoric-sdk/issues/10635)) ([ad4e83e](https://github.com/Agoric/agoric-sdk/commit/ad4e83e0b6dff9716da91fd65d367d3acad1772e))
42
30
  * **telemetry:** event name typo ([9e19321](https://github.com/Agoric/agoric-sdk/commit/9e19321ea8fed32d445d44169b32f5d94a93d61e))
31
+ * **telemetry:** Extend shutdown logic for slog-sender-pipe and otel-metrics ([7b8ccc8](https://github.com/Agoric/agoric-sdk/commit/7b8ccc82e641e5d11ccc6b8aebe524f75af829fe)), closes [#11175](https://github.com/Agoric/agoric-sdk/issues/11175)
32
+ * **telemetry:** flight recorder flush does sync ([d270202](https://github.com/Agoric/agoric-sdk/commit/d2702028d77c06f3b4de91ca711a3c45c685a477))
33
+ * **telemetry:** flight-recorder check second read size ([bfbacb2](https://github.com/Agoric/agoric-sdk/commit/bfbacb2b9f8de36f8f66b8cba8a88603fb7225e2))
34
+ * **telemetry:** flight-recorder ignores write after shutdown ([3d2bcb3](https://github.com/Agoric/agoric-sdk/commit/3d2bcb3c56ac24a0f991200b223e6af8514dc5b8))
43
35
  * **telemetry:** handle new trigger slog events ([d32cb7e](https://github.com/Agoric/agoric-sdk/commit/d32cb7e9f406c25399321dc32e827b5018c38b69))
44
36
  * **telemetry:** ingest-slog avoid writing progress file for stdin ([62589ca](https://github.com/Agoric/agoric-sdk/commit/62589ca7b6d4aaa9eb7042f95ec7aec633db27f9))
37
+ * **telemetry:** initialize empty flight-recorders ([0908258](https://github.com/Agoric/agoric-sdk/commit/0908258c159a18f2bace0f76fa25c485c0460d15))
38
+ * **telemetry:** Launch a slog sender subprocess with the correct environment ([1a60955](https://github.com/Agoric/agoric-sdk/commit/1a60955181f4e8b02b3b0d5a2f213d4cb051d7d3))
45
39
  * **telemetry:** otel correctly pop upgrade span ([0ffdf00](https://github.com/Agoric/agoric-sdk/commit/0ffdf001bc8cbdc94081fedfeb4d2376902f4ffc)), closes [#8272](https://github.com/Agoric/agoric-sdk/issues/8272) [#9569](https://github.com/Agoric/agoric-sdk/issues/9569)
46
40
  * **telemetry:** silence slogfile write errors ([91089d7](https://github.com/Agoric/agoric-sdk/commit/91089d7273ef3d41555b34d84471120d45602497))
47
41
  * **telemetry:** timer-poll run.id ([#10672](https://github.com/Agoric/agoric-sdk/issues/10672)) ([3b478fb](https://github.com/Agoric/agoric-sdk/commit/3b478fb9e3fe7ded8dec1e83bab68760571f9071)), closes [#10357](https://github.com/Agoric/agoric-sdk/issues/10357) [#10357](https://github.com/Agoric/agoric-sdk/issues/10357)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@agoric/telemetry",
3
- "version": "0.6.3-u19.2",
3
+ "version": "0.6.3-u21.0",
4
4
  "description": "Agoric's telemetry implementation",
5
5
  "type": "module",
6
6
  "repository": "https://github.com/Agoric/agoric-sdk",
@@ -11,9 +11,9 @@
11
11
  "test:c8": "c8 --all $C8_OPTIONS ava",
12
12
  "test:xs": "exit 0",
13
13
  "lint-fix": "yarn lint:eslint --fix",
14
- "lint": "run-s --continue-on-error lint:*",
15
- "lint:types": "tsc",
16
- "lint:eslint": "eslint ."
14
+ "lint": "yarn run -T run-s --continue-on-error 'lint:*'",
15
+ "lint:types": "yarn run -T tsc",
16
+ "lint:eslint": "yarn run -T eslint ."
17
17
  },
18
18
  "bin": {
19
19
  "frcat": "./src/frcat-entrypoint.js"
@@ -22,12 +22,13 @@
22
22
  "author": "Agoric",
23
23
  "license": "Apache-2.0",
24
24
  "dependencies": {
25
- "@agoric/internal": "^0.4.0-u19.2",
26
- "@agoric/store": "^0.9.3-u19.0",
27
- "@endo/errors": "^1.2.9",
28
- "@endo/init": "^1.1.8",
29
- "@endo/marshal": "^1.6.3",
30
- "@endo/stream": "^1.2.9",
25
+ "@agoric/internal": "workspace:*",
26
+ "@agoric/store": "workspace:*",
27
+ "@endo/errors": "^1.2.10",
28
+ "@endo/init": "^1.1.9",
29
+ "@endo/marshal": "^1.6.4",
30
+ "@endo/promise-kit": "^1.1.10",
31
+ "@endo/stream": "^1.2.10",
31
32
  "@opentelemetry/api": "~1.9.0",
32
33
  "@opentelemetry/api-logs": "0.57.1",
33
34
  "@opentelemetry/exporter-logs-otlp-http": "0.57.1",
@@ -39,21 +40,21 @@
39
40
  "@opentelemetry/sdk-trace-base": "~1.30.1",
40
41
  "@opentelemetry/semantic-conventions": "~1.28.0",
41
42
  "anylogger": "^0.21.0",
42
- "better-sqlite3": "^9.1.1",
43
+ "better-sqlite3": "^10.1.0",
43
44
  "tmp": "^0.2.1"
44
45
  },
45
46
  "devDependencies": {
46
- "@endo/lockdown": "^1.0.14",
47
- "@endo/ses-ava": "^1.2.9",
47
+ "@endo/lockdown": "^1.0.15",
48
+ "@endo/ses-ava": "^1.2.10",
48
49
  "ava": "^5.3.0",
49
- "c8": "^10.1.2",
50
+ "c8": "^10.1.3",
50
51
  "tmp": "^0.2.1"
51
52
  },
52
53
  "publishConfig": {
53
54
  "access": "public"
54
55
  },
55
56
  "engines": {
56
- "node": "^18.12 || ^20.9"
57
+ "node": "^20.9 || ^22.11"
57
58
  },
58
59
  "ava": {
59
60
  "files": [
@@ -66,7 +67,7 @@
66
67
  "workerThreads": false
67
68
  },
68
69
  "typeCoverage": {
69
- "atLeast": 88.88
70
+ "atLeast": 88.83
70
71
  },
71
- "gitHead": "f0ae74b84cb6de3724bfdcd18b4bea7e8199dee1"
72
+ "gitHead": "e4dd46857133403d584bcf822a81817b355532f9"
72
73
  }
@@ -2,7 +2,6 @@
2
2
  /* eslint-env node */
3
3
  /// <reference types="ses" />
4
4
 
5
- import fs from 'node:fs';
6
5
  import fsp from 'node:fs/promises';
7
6
  import path from 'node:path';
8
7
  import { Fail } from '@endo/errors';
@@ -39,6 +38,9 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
39
38
  }
40
39
  throw e;
41
40
  });
41
+
42
+ // Use the default size if not provided and file doesn't exist.
43
+ circularBufferSize = circularBufferSize || stbuf?.size || DEFAULT_CBUF_SIZE;
42
44
  const arenaSize = BigInt(circularBufferSize - I_ARENA_START);
43
45
 
44
46
  if (stbuf && stbuf.size >= I_ARENA_START) {
@@ -77,7 +79,7 @@ const initializeCircularBuffer = async (bufferFile, circularBufferSize) => {
77
79
  * @param {(record: Uint8Array, firstWriteLength: number, circEnd: bigint) => Promise<void>} writeRecord
78
80
  */
79
81
  function makeCircBufMethods(arenaSize, header, readRecord, writeRecord) {
80
- const readCircBuf = (outbuf, offset = 0) => {
82
+ const readCircBuf = async (outbuf, offset = 0) => {
81
83
  offset + outbuf.byteLength <= arenaSize ||
82
84
  Fail`Reading past end of circular buffer`;
83
85
 
@@ -99,7 +101,7 @@ function makeCircBufMethods(arenaSize, header, readRecord, writeRecord) {
99
101
  // The data is contiguous, like ---AAABBB---
100
102
  return { done: true, value: undefined };
101
103
  }
102
- readRecord(outbuf, readStart, firstReadLength);
104
+ await readRecord(outbuf, readStart, firstReadLength);
103
105
  return { done: false, value: outbuf };
104
106
  };
105
107
 
@@ -143,9 +145,10 @@ function makeCircBufMethods(arenaSize, header, readRecord, writeRecord) {
143
145
 
144
146
  // Advance the start pointer until we have space to write the record.
145
147
  let overlap = BigInt(record.byteLength) - capacity;
148
+ await null;
146
149
  while (overlap > 0n) {
147
150
  const startRecordLength = new Uint8Array(RECORD_HEADER_SIZE);
148
- const { done } = readCircBuf(startRecordLength);
151
+ const { done } = await readCircBuf(startRecordLength);
149
152
  if (done) {
150
153
  break;
151
154
  }
@@ -221,20 +224,22 @@ export const makeSimpleCircularBuffer = async ({
221
224
  arenaSize === hdrArenaSize ||
222
225
  Fail`${filename} arena size mismatch; wanted ${arenaSize}, got ${hdrArenaSize}`;
223
226
 
224
- /** @type {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => void} */
225
- const readRecord = (outbuf, readStart, firstReadLength) => {
226
- const bytesRead = fs.readSync(file.fd, outbuf, {
227
+ /** @type {(outbuf: Uint8Array, readStart: number, firstReadLength: number) => Promise<void>} */
228
+ const readRecord = async (outbuf, readStart, firstReadLength) => {
229
+ const { bytesRead } = await file.read(outbuf, {
227
230
  length: firstReadLength,
228
231
  position: Number(readStart) + I_ARENA_START,
229
232
  });
230
233
  assert.equal(bytesRead, firstReadLength, 'Too few bytes read');
231
234
 
232
235
  if (bytesRead < outbuf.byteLength) {
233
- fs.readSync(file.fd, outbuf, {
236
+ const length = outbuf.byteLength - firstReadLength;
237
+ const { bytesRead: bytesRead2 } = await file.read(outbuf, {
234
238
  offset: firstReadLength,
235
- length: outbuf.byteLength - firstReadLength,
239
+ length,
236
240
  position: I_ARENA_START,
237
241
  });
242
+ assert.equal(bytesRead2, length, 'Too few bytes read');
238
243
  }
239
244
  };
240
245
 
@@ -280,20 +285,23 @@ export const makeSimpleCircularBuffer = async ({
280
285
  * @param {Pick<CircularBuffer, 'fileHandle' | 'writeCircBuf'>} circBuf
281
286
  */
282
287
  export const makeSlogSenderFromBuffer = ({ fileHandle, writeCircBuf }) => {
283
- /** @type {Promise<void>} */
288
+ /** @type {Promise<void> | undefined} */
284
289
  let toWrite = Promise.resolve();
285
290
  const writeJSON = (obj, serialized = serializeSlogObj(obj)) => {
286
291
  // Prepend a newline so that the file can be more easily manipulated.
287
292
  const data = new TextEncoder().encode(`\n${serialized}`);
288
293
  // console.log('have obj', obj, data);
289
- toWrite = toWrite.then(() => writeCircBuf(data));
294
+ toWrite = toWrite?.then(() => writeCircBuf(data));
290
295
  };
291
296
  return Object.assign(writeJSON, {
292
297
  forceFlush: async () => {
293
298
  await toWrite;
299
+ await fileHandle.datasync();
294
300
  },
295
301
  shutdown: async () => {
296
- await toWrite;
302
+ const lastWritten = toWrite;
303
+ toWrite = undefined;
304
+ await lastWritten;
297
305
  await fileHandle.close();
298
306
  },
299
307
  usesJsonObject: true,
@@ -22,7 +22,7 @@ const main = async () => {
22
22
  let offset = 0;
23
23
  for (;;) {
24
24
  const lenBuf = new Uint8Array(BigUint64Array.BYTES_PER_ELEMENT);
25
- const { done } = readCircBuf(lenBuf, offset);
25
+ const { done } = await readCircBuf(lenBuf, offset);
26
26
  if (done) {
27
27
  break;
28
28
  }
@@ -30,7 +30,7 @@ const main = async () => {
30
30
  const dv = new DataView(lenBuf.buffer);
31
31
  const len = Number(dv.getBigUint64(0));
32
32
 
33
- const { done: done2, value: buf } = readCircBuf(
33
+ const { done: done2, value: buf } = await readCircBuf(
34
34
  new Uint8Array(len),
35
35
  offset,
36
36
  );
package/src/index.js CHANGED
@@ -84,44 +84,37 @@ export const getResourceAttributes = ({
84
84
 
85
85
  /**
86
86
  * @typedef {object} Powers
87
- * @property {{ warn: Console['warn'] }} console
87
+ * @property {Pick<Console, 'warn'>} console
88
88
  * @property {NodeJS.ProcessEnv} env
89
89
  * @property {import('@opentelemetry/sdk-metrics').View[]} views
90
90
  * @property {string} [serviceName]
91
91
  */
92
92
 
93
93
  /**
94
- * @param {Partial<Powers>} param0
94
+ * @param {Partial<Powers>} powers
95
95
  */
96
- const getPrometheusMeterProvider = ({
96
+ export const getPrometheusMeterProvider = ({
97
97
  console = globalThis.console,
98
98
  env = process.env,
99
99
  views,
100
100
  ...rest
101
101
  } = {}) => {
102
- const { OTEL_EXPORTER_PROMETHEUS_PORT } = env;
103
- if (!OTEL_EXPORTER_PROMETHEUS_PORT) {
104
- // No Prometheus config, so don't install.
105
- return undefined;
106
- }
102
+ const { OTEL_EXPORTER_PROMETHEUS_HOST, OTEL_EXPORTER_PROMETHEUS_PORT } = env;
103
+
104
+ // The opt-in signal is a non-empty OTEL_EXPORTER_PROMETHEUS_PORT.
105
+ if (!OTEL_EXPORTER_PROMETHEUS_PORT) return;
107
106
 
108
107
  const resource = new Resource(getResourceAttributes({ env, ...rest }));
109
108
 
110
- const port =
111
- parseInt(OTEL_EXPORTER_PROMETHEUS_PORT || '', 10) ||
112
- PrometheusExporter.DEFAULT_OPTIONS.port;
109
+ const { DEFAULT_OPTIONS } = PrometheusExporter;
110
+ const host = OTEL_EXPORTER_PROMETHEUS_HOST || DEFAULT_OPTIONS.host;
111
+ const port = +OTEL_EXPORTER_PROMETHEUS_PORT || DEFAULT_OPTIONS.port;
112
+ const url = `http://${host || '0.0.0.0'}:${port}${DEFAULT_OPTIONS.endpoint}`;
113
113
 
114
- const exporter = new PrometheusExporter(
115
- {
116
- port,
117
- appendTimestamp: true,
118
- },
119
- () => {
120
- console.warn(
121
- `Prometheus scrape endpoint: http://0.0.0.0:${port}${PrometheusExporter.DEFAULT_OPTIONS.endpoint}`,
122
- );
123
- },
124
- );
114
+ const options = { host, port, appendTimestamp: true };
115
+ const exporter = new PrometheusExporter(options, () => {
116
+ console.warn(`Prometheus scrape endpoint: ${url}`);
117
+ });
125
118
 
126
119
  const provider = new MeterProvider({ resource, views });
127
120
  provider.addMetricReader(exporter);
@@ -1,13 +1,13 @@
1
1
  import path from 'path';
2
2
  import tmp from 'tmp';
3
- import { PromiseAllOrErrors } from '@agoric/internal';
3
+ import { PromiseAllOrErrors, unprefixedProperties } from '@agoric/internal';
4
4
  import { serializeSlogObj } from './serialize-slog-obj.js';
5
5
 
6
+ export const DEFAULT_SLOGSENDER_AGENT = 'self';
6
7
  export const DEFAULT_SLOGSENDER_MODULE =
7
8
  '@agoric/telemetry/src/flight-recorder.js';
8
9
  export const SLOGFILE_SENDER_MODULE = '@agoric/telemetry/src/slog-file.js';
9
-
10
- export const DEFAULT_SLOGSENDER_AGENT = 'self';
10
+ export const PROMETHEUS_SENDER_MODULE = '@agoric/telemetry/src/prometheus.js';
11
11
 
12
12
  /** @import {SlogSender} from './index.js' */
13
13
 
@@ -19,6 +19,23 @@ export const DEFAULT_SLOGSENDER_AGENT = 'self';
19
19
  const filterTruthy = arr => /** @type {any[]} */ (arr.filter(Boolean));
20
20
 
21
21
  /**
22
+ * Create an aggregate slog sender that fans out inbound slog entries to modules
23
+ * as indicated by variables in the supplied `env` option. The SLOGSENDER value
24
+ * (or a default DEFAULT_SLOGSENDER_MODULE defined above) is split on commas
25
+ * into a list of module identifiers and adjusted by automatic insertions (a
26
+ * non-empty SLOGFILE value inserts DEFAULT_SLOGSENDER_AGENT defined above), and
27
+ * then each identifier is dynamically `import`ed for its own `makeSlogSender`
28
+ * export, which is invoked with a non-empty `stateDir` option and a modified
29
+ * `env` in which SLOGSENDER_AGENT_* variables have overridden their unprefixed
30
+ * equivalents to produce a subordinate slog sender.
31
+ * Subordinate slog senders remain isolated from each other, and any errors from
32
+ * them are caught and held until the next `forceFlush()` without disrupting
33
+ * any remaining slog entry fanout.
34
+ * If SLOGSENDER_AGENT is 'process', 'slog-sender-pipe.js' is used to load the
35
+ * subordinates in a child process rather than the main process.
36
+ * When there are no subordinates, the return value will be `undefined` rather
37
+ * than a slog sender function.
38
+ *
22
39
  * @type {import('./index.js').MakeSlogSender}
23
40
  */
24
41
  export const makeSlogSender = async (opts = {}) => {
@@ -26,95 +43,95 @@ export const makeSlogSender = async (opts = {}) => {
26
43
  const {
27
44
  SLOGSENDER = DEFAULT_SLOGSENDER_MODULE,
28
45
  SLOGSENDER_AGENT = DEFAULT_SLOGSENDER_AGENT,
46
+ // While cosmic-swingset/kernel code includes its own Prometheus metrics
47
+ // export, that trumps a slog sender module doing so.
48
+ // This extraction can be removed when that changes, but in the meantime,
49
+ // opt-in is only by SLOGSENDER_AGENT_OTEL_EXPORTER_PROMETHEUS_PORT.
50
+ OTEL_EXPORTER_PROMETHEUS_PORT: _prometheusExportPort,
29
51
  ...otherEnv
30
52
  } = env;
31
53
 
32
54
  const agentEnv = {
33
55
  ...otherEnv,
34
- ...Object.fromEntries(
35
- Object.entries(otherEnv)
36
- .filter(([k]) => k.match(/^(?:SLOGSENDER_AGENT_)+/)) // narrow to SLOGSENDER_AGENT_ prefixes.
37
- .map(([k, v]) => [k.replace(/^(?:SLOGSENDER_AGENT_)+/, ''), v]), // Rewrite SLOGSENDER_AGENT_ to un-prefixed version.
38
- ),
56
+ ...unprefixedProperties(otherEnv, 'SLOGSENDER_AGENT_'),
39
57
  };
40
58
 
41
- const slogSenderModules = [
42
- ...new Set([
43
- ...(agentEnv.SLOGFILE ? [SLOGFILE_SENDER_MODULE] : []),
44
- ...SLOGSENDER.split(',')
45
- .filter(Boolean)
46
- .map(modulePath =>
47
- modulePath.startsWith('.')
48
- ? // Resolve relative to the current working directory.
49
- path.resolve(modulePath)
50
- : modulePath,
51
- ),
52
- ]),
53
- ];
54
-
55
- if (!slogSenderModules.length) {
59
+ const slogSenderModules = new Set();
60
+ if (agentEnv.OTEL_EXPORTER_PROMETHEUS_PORT) {
61
+ slogSenderModules.add(PROMETHEUS_SENDER_MODULE);
62
+ }
63
+ if (agentEnv.SLOGFILE) {
64
+ slogSenderModules.add(SLOGFILE_SENDER_MODULE);
65
+ }
66
+ for (const moduleIdentifier of filterTruthy(SLOGSENDER.split(','))) {
67
+ if (moduleIdentifier.startsWith('-')) {
68
+ // Opt out of an automatically-included sender.
69
+ slogSenderModules.delete(moduleIdentifier.slice(1));
70
+ } else if (moduleIdentifier.startsWith('.')) {
71
+ // Resolve relative to the current working directory.
72
+ slogSenderModules.add(path.resolve(moduleIdentifier));
73
+ } else {
74
+ slogSenderModules.add(moduleIdentifier);
75
+ }
76
+ }
77
+
78
+ if (!slogSenderModules.size) {
56
79
  return undefined;
57
80
  }
58
81
 
59
- switch (SLOGSENDER_AGENT) {
60
- case '':
61
- case 'self':
62
- break;
63
- case 'process': {
64
- console.warn('Loading slog sender in subprocess');
65
- return import('./slog-sender-pipe.js').then(
66
- async ({ makeSlogSender: makeSogSenderPipe }) =>
67
- makeSogSenderPipe({
68
- env: {
69
- ...agentEnv,
70
- SLOGSENDER,
71
- SLOGSENDER_AGENT: 'self',
72
- },
73
- stateDir: stateDirOption,
74
- ...otherOpts,
75
- }),
76
- );
77
- }
78
- case 'worker':
79
- default:
80
- console.warn(`Unknown SLOGSENDER_AGENT=${SLOGSENDER_AGENT}`);
82
+ if (SLOGSENDER_AGENT === 'process') {
83
+ console.warn('Loading slog sender in subprocess');
84
+ return import('./slog-sender-pipe.js').then(async module =>
85
+ module.makeSlogSender({
86
+ env: {
87
+ ...agentEnv,
88
+ SLOGSENDER,
89
+ SLOGSENDER_AGENT: 'self',
90
+ },
91
+ stateDir: stateDirOption,
92
+ ...otherOpts,
93
+ }),
94
+ );
95
+ } else if (SLOGSENDER_AGENT && SLOGSENDER_AGENT !== 'self') {
96
+ console.warn(
97
+ `Unknown SLOGSENDER_AGENT=${SLOGSENDER_AGENT}; defaulting to 'self'`,
98
+ );
81
99
  }
82
100
 
83
101
  if (SLOGSENDER) {
84
102
  console.warn('Loading slog sender modules:', ...slogSenderModules);
85
103
  }
86
104
 
87
- const makersInfo = await Promise.all(
88
- slogSenderModules.map(async moduleIdentifier =>
89
- import(moduleIdentifier)
90
- .then(
91
- /** @param {{makeSlogSender: import('./index.js').MakeSlogSender}} module */ ({
92
- makeSlogSender: maker,
93
- }) => {
94
- if (typeof maker !== 'function') {
95
- return Promise.reject(
96
- Error(`No 'makeSlogSender' function exported by module`),
97
- );
98
- } else if (maker === makeSlogSender) {
99
- return Promise.reject(
100
- Error(`Cannot recursively load 'makeSlogSender' aggregator`),
101
- );
102
- }
103
-
104
- return /** @type {const} */ ([maker, moduleIdentifier]);
105
- },
106
- )
107
- .catch(err => {
105
+ /** @type {Map<import('./index.js').MakeSlogSender, string>} */
106
+ const makerMap = new Map();
107
+ await Promise.all(
108
+ [...slogSenderModules].map(async moduleIdentifier => {
109
+ await null;
110
+ try {
111
+ const module = await import(moduleIdentifier);
112
+ const { makeSlogSender: maker } = module;
113
+ if (typeof maker !== 'function') {
114
+ throw Error(`No 'makeSlogSender' function exported by module`);
115
+ } else if (maker === makeSlogSender) {
116
+ throw Error(`Cannot recursively load 'makeSlogSender' aggregator`);
117
+ }
118
+ const isReplacing = makerMap.get(maker);
119
+ if (isReplacing) {
108
120
  console.warn(
109
- `Failed to load slog sender from ${moduleIdentifier}.`,
110
- err,
121
+ `The slog sender from ${moduleIdentifier} matches the one from ${isReplacing}.`,
111
122
  );
112
- return undefined;
113
- }),
114
- ),
115
- ).then(makerEntries => [...new Map(filterTruthy(makerEntries)).entries()]);
123
+ }
124
+ makerMap.set(maker, moduleIdentifier);
125
+ } catch (err) {
126
+ console.warn(
127
+ `Failed to load slog sender from ${moduleIdentifier}.`,
128
+ err,
129
+ );
130
+ }
131
+ }),
132
+ );
116
133
 
117
- if (!makersInfo.length) {
134
+ if (!makerMap.size) {
118
135
  return undefined;
119
136
  }
120
137
 
@@ -122,11 +139,11 @@ export const makeSlogSender = async (opts = {}) => {
122
139
 
123
140
  if (stateDir === undefined) {
124
141
  stateDir = tmp.dirSync().name;
125
- console.warn(`Using ${stateDir} for stateDir`);
142
+ console.warn(`Using ${stateDir} for slog sender stateDir`);
126
143
  }
127
144
 
128
145
  const senders = await Promise.all(
129
- makersInfo.map(async ([maker, moduleIdentifier]) =>
146
+ [...makerMap.entries()].map(async ([maker, moduleIdentifier]) =>
130
147
  maker({
131
148
  ...otherOpts,
132
149
  stateDir,
@@ -137,37 +154,37 @@ export const makeSlogSender = async (opts = {}) => {
137
154
 
138
155
  if (!senders.length) {
139
156
  return undefined;
140
- } else {
141
- // Optimize creating a JSON serialization only if needed
142
- // by any of the sender modules
143
- const hasSenderUsingJsonObj = senders.some(
144
- ({ usesJsonObject = true }) => usesJsonObject,
145
- );
146
- const getJsonObj = hasSenderUsingJsonObj
147
- ? serializeSlogObj
148
- : () => undefined;
149
- const sendErrors = [];
150
- /** @type {SlogSender} */
151
- const slogSender = (slogObj, jsonObj = getJsonObj(slogObj)) => {
152
- for (const sender of senders) {
153
- try {
154
- sender(slogObj, jsonObj);
155
- } catch (err) {
156
- sendErrors.push(err);
157
- }
158
- }
159
- };
160
- return Object.assign(slogSender, {
161
- forceFlush: async () => {
162
- await PromiseAllOrErrors([
163
- ...senders.map(sender => sender.forceFlush?.()),
164
- ...sendErrors.splice(0).map(err => Promise.reject(err)),
165
- ]);
166
- },
167
- shutdown: async () => {
168
- await PromiseAllOrErrors(senders.map(sender => sender.shutdown?.()));
169
- },
170
- usesJsonObject: hasSenderUsingJsonObj,
171
- });
172
157
  }
158
+
159
+ // Optimize creating a JSON serialization only if needed
160
+ // by at least one of the senders.
161
+ const hasSenderUsingJsonObj = senders.some(
162
+ ({ usesJsonObject = true }) => usesJsonObject,
163
+ );
164
+ const getJsonObj = hasSenderUsingJsonObj ? serializeSlogObj : () => undefined;
165
+
166
+ const sendErrors = [];
167
+
168
+ /** @type {SlogSender} */
169
+ const slogSender = (slogObj, jsonObj = getJsonObj(slogObj)) => {
170
+ for (const sender of senders) {
171
+ try {
172
+ sender(slogObj, jsonObj);
173
+ } catch (err) {
174
+ sendErrors.push(err);
175
+ }
176
+ }
177
+ };
178
+ return Object.assign(slogSender, {
179
+ forceFlush: async () => {
180
+ await PromiseAllOrErrors([
181
+ ...senders.map(sender => sender.forceFlush?.()),
182
+ ...sendErrors.splice(0).map(err => Promise.reject(err)),
183
+ ]);
184
+ },
185
+ shutdown: async () => {
186
+ await PromiseAllOrErrors(senders.map(sender => sender.shutdown?.()));
187
+ },
188
+ usesJsonObject: hasSenderUsingJsonObj,
189
+ });
173
190
  };