@agoric/telemetry 0.6.3-other-dev-1f26562.0 → 0.6.3-other-dev-3eb1a1d.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,131 @@
1
+ /* eslint-env node */
2
+ import { logs, SeverityNumber } from '@opentelemetry/api-logs';
3
+ import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http';
4
+ import { Resource } from '@opentelemetry/resources';
5
+ import {
6
+ LoggerProvider,
7
+ SimpleLogRecordProcessor,
8
+ } from '@opentelemetry/sdk-logs';
9
+ import { readFileSync, writeFileSync } from 'fs';
10
+ import { makeContextualSlogProcessor } from './context-aware-slog.js';
11
+ import { getResourceAttributes } from './index.js';
12
+ import { serializeSlogObj } from './serialize-slog-obj.js';
13
+
14
+ const DEFAULT_CONTEXT_FILE = 'slog-context.json';
15
+ const FILE_ENCODING = 'utf8';
16
+
17
+ /**
18
+ * @param {string} filePath
19
+ */
20
+ export const getContextFilePersistenceUtils = filePath => {
21
+ console.warn(`Using file ${filePath} for slogger context`);
22
+
23
+ return {
24
+ /**
25
+ * @param {import('./context-aware-slog.js').Context} context
26
+ */
27
+ persistContext: context => {
28
+ try {
29
+ writeFileSync(filePath, serializeSlogObj(context), FILE_ENCODING);
30
+ } catch (err) {
31
+ console.error('Error writing context to file: ', err);
32
+ }
33
+ },
34
+
35
+ /**
36
+ * @returns {import('./context-aware-slog.js').Context | null}
37
+ */
38
+ restoreContext: () => {
39
+ try {
40
+ return JSON.parse(readFileSync(filePath, FILE_ENCODING));
41
+ } catch (parseErr) {
42
+ console.error('Error reading context from file: ', parseErr);
43
+ return null;
44
+ }
45
+ },
46
+ };
47
+ };
48
+
49
+ /**
50
+ * @param {import('./index.js').MakeSlogSenderOptions} options
51
+ */
52
+ export const makeSlogSender = async options => {
53
+ const { CHAIN_ID, OTEL_EXPORTER_OTLP_ENDPOINT } = options.env || {};
54
+ if (!(OTEL_EXPORTER_OTLP_ENDPOINT && options.stateDir))
55
+ return console.error(
56
+ 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" and "stateDir"',
57
+ );
58
+
59
+ const loggerProvider = new LoggerProvider({
60
+ resource: new Resource(getResourceAttributes(options)),
61
+ });
62
+
63
+ const otelLogExporter = new OTLPLogExporter({ keepAlive: true });
64
+ const logRecordProcessor = new SimpleLogRecordProcessor(otelLogExporter);
65
+
66
+ loggerProvider.addLogRecordProcessor(logRecordProcessor);
67
+
68
+ logs.setGlobalLoggerProvider(loggerProvider);
69
+ const logger = logs.getLogger('default');
70
+
71
+ const persistenceUtils = getContextFilePersistenceUtils(
72
+ process.env.SLOG_CONTEXT_FILE_PATH ||
73
+ `${options.stateDir}/${DEFAULT_CONTEXT_FILE}`,
74
+ );
75
+
76
+ const contextualSlogProcessor = makeContextualSlogProcessor(
77
+ { 'chain-id': CHAIN_ID },
78
+ persistenceUtils,
79
+ );
80
+
81
+ /**
82
+ * @param {import('./context-aware-slog.js').Slog} slog
83
+ */
84
+ const slogSender = slog => {
85
+ const { timestamp, ...logRecord } = contextualSlogProcessor(slog);
86
+
87
+ const [secondsStr, fractionStr] = String(timestamp).split('.');
88
+ const seconds = parseInt(secondsStr, 10);
89
+ const nanoSeconds = parseInt(
90
+ (fractionStr || String(0)).padEnd(9, String(0)).slice(0, 9),
91
+ 10,
92
+ );
93
+
94
+ logger.emit({
95
+ ...JSON.parse(serializeSlogObj(logRecord)),
96
+ severityNumber: SeverityNumber.INFO,
97
+ timestamp: [seconds, nanoSeconds],
98
+ });
99
+ };
100
+
101
+ const shutdown = async () => {
102
+ await Promise.resolve();
103
+ const errors = [];
104
+
105
+ try {
106
+ await logRecordProcessor.shutdown();
107
+ } catch (err) {
108
+ errors.push(err);
109
+ }
110
+
111
+ try {
112
+ await otelLogExporter.forceFlush();
113
+ } catch (err) {
114
+ errors.push(err);
115
+ }
116
+
117
+ switch (errors.length) {
118
+ case 0:
119
+ return;
120
+ case 1:
121
+ throw errors[0];
122
+ default:
123
+ throw AggregateError(errors);
124
+ }
125
+ };
126
+
127
+ return Object.assign(slogSender, {
128
+ forceFlush: () => otelLogExporter.forceFlush(),
129
+ shutdown,
130
+ });
131
+ };
package/src/slog-file.js CHANGED
@@ -11,7 +11,7 @@ export const makeSlogSender = async ({ env: { SLOGFILE } = {} } = {}) => {
11
11
 
12
12
  const slogSender = (slogObj, jsonObj = serializeSlogObj(slogObj)) => {
13
13
  // eslint-disable-next-line prefer-template
14
- void stream.write(jsonObj + '\n');
14
+ stream.write(jsonObj + '\n').catch(() => {});
15
15
  };
16
16
 
17
17
  return Object.assign(slogSender, {
@@ -1,7 +1,6 @@
1
- /* global process */
1
+ /* eslint-env node */
2
2
  import '@endo/init';
3
3
 
4
- import { makeAggregateError } from '@agoric/internal';
5
4
  import anylogger from 'anylogger';
6
5
  import { makeShutdown } from '@agoric/internal/src/node/shutdown.js';
7
6
 
@@ -75,7 +74,7 @@ const main = async () => {
75
74
  sendErrors.unshift(actualFlushError);
76
75
  }
77
76
 
78
- return makeAggregateError(sendErrors.splice(0));
77
+ return AggregateError(sendErrors.splice(0));
79
78
  };
80
79
 
81
80
  process.on(
@@ -130,7 +129,13 @@ const main = async () => {
130
129
  );
131
130
  };
132
131
 
133
- main().catch(e => {
134
- logger.error(e);
135
- process.exitCode = 1;
136
- });
132
+ process.exitCode = 1;
133
+ main().then(
134
+ () => {
135
+ process.exitCode = 0;
136
+ },
137
+ err => {
138
+ logger.error('Failed with', err);
139
+ process.exit(process.exitCode || 1);
140
+ },
141
+ );
@@ -6,8 +6,7 @@ import { makeQueue } from '@endo/stream';
6
6
 
7
7
  import { makeShutdown } from '@agoric/internal/src/node/shutdown.js';
8
8
 
9
- const filename = new URL(import.meta.url).pathname;
10
- const dirname = path.dirname(filename);
9
+ const dirname = path.dirname(new URL(import.meta.url).pathname);
11
10
 
12
11
  const logger = anylogger('slog-sender-pipe');
13
12
 
@@ -78,11 +77,11 @@ export const makeSlogSender = async opts => {
78
77
  /**
79
78
  * @typedef {{
80
79
  * init: {
81
- * message: import('./slog-sender-pipe-entrypoint').InitMessage;
80
+ * message: import('./slog-sender-pipe-entrypoint.js').InitMessage;
82
81
  * reply: SlogSenderInitReply;
83
82
  * };
84
83
  * flush: {
85
- * message: import('./slog-sender-pipe-entrypoint').FlushMessage;
84
+ * message: import('./slog-sender-pipe-entrypoint.js').FlushMessage;
86
85
  * reply: SlogSenderFlushReply;
87
86
  * };
88
87
  * }} SlogSenderWaitMessagesAndReplies
@@ -1,7 +1,7 @@
1
1
  import otel, { SpanStatusCode } from '@opentelemetry/api';
2
2
 
3
+ import { Fail, q } from '@endo/errors';
3
4
  import { makeMarshal, Remotable } from '@endo/marshal';
4
- import { Fail, q } from '@agoric/assert';
5
5
 
6
6
  import { makeLegacyMap } from '@agoric/store';
7
7
  import {
@@ -14,11 +14,8 @@ import {
14
14
 
15
15
  // diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.VERBOSE);
16
16
 
17
- /** @typedef {import('@opentelemetry/api').Span} Span */
18
- /** @typedef {import('@opentelemetry/api').Link} SpanLink */
19
- /** @typedef {import('@opentelemetry/api').SpanContext} SpanContext */
20
- /** @typedef {import('@opentelemetry/api').SpanOptions} SpanOptions */
21
- /** @typedef {import('@opentelemetry/api').SpanAttributes} SpanAttributes */
17
+ /** @import {Span, Link as SpanLink} from '@opentelemetry/api' */
18
+ /** @import {SpanContext, SpanOptions} from '@opentelemetry/api' */
22
19
 
23
20
  const { assign } = Object;
24
21
 
@@ -54,9 +51,9 @@ const serializeInto = (value, prefix, target = {}, depth = 3) => {
54
51
  } else {
55
52
  const proto = Object.getPrototypeOf(value);
56
53
  if (proto == null || proto === Object.prototype) {
57
- Object.entries(value).forEach(([key, nested]) =>
58
- serializeInto(nested, `${prefix}.${key}`, target, depth),
59
- );
54
+ for (const [key, nested] of Object.entries(value)) {
55
+ serializeInto(nested, `${prefix}.${key}`, target, depth);
56
+ }
60
57
  return target;
61
58
  }
62
59
  }
@@ -142,7 +139,10 @@ export const makeSlogToOtelKit = (tracer, overrideAttrs = {}) => {
142
139
  serializeBodyFormat: 'smallcaps',
143
140
  });
144
141
 
145
- /** @param {import('@agoric/swingset-vat').SwingSetCapData} data */
142
+ /**
143
+ * @param {import('@agoric/swingset-vat').SwingSetCapData} data
144
+ * @returns {any}
145
+ */
146
146
  const unserialize = data => {
147
147
  try {
148
148
  const body = rawUnserialize(data);
@@ -915,7 +915,7 @@ export const makeSlogToOtelKit = (tracer, overrideAttrs = {}) => {
915
915
  break;
916
916
  }
917
917
  case 'cosmic-swingset-upgrade-finish': {
918
- spans.pop(['slogAttrs.blockHeight', slogAttrs.blockHeight]);
918
+ spans.pop(['upgrade', slogAttrs.blockHeight]);
919
919
  dbTransactionManager.end();
920
920
  break;
921
921
  }
@@ -971,6 +971,21 @@ export const makeSlogToOtelKit = (tracer, overrideAttrs = {}) => {
971
971
  spans.pop('bridge-inbound');
972
972
  break;
973
973
  }
974
+ case 'cosmic-swingset-timer-poll': {
975
+ spans.push(['timer-poll', slogAttrs.blockTime]);
976
+ spans.pop('timer-poll');
977
+ break;
978
+ }
979
+ case 'cosmic-swingset-inject-kernel-upgrade-events': {
980
+ spans.push('kernel-upgrade-events');
981
+ spans.pop('kernel-upgrade-events');
982
+ break;
983
+ }
984
+ case 'cosmic-swingset-install-bundle': {
985
+ spans.push(['install-bundle', slogAttrs.endoZipBase64Sha512]);
986
+ spans.pop('install-bundle');
987
+ break;
988
+ }
974
989
  case 'cosmic-swingset-end-block-start': {
975
990
  // Add `end-block` as an event onto the encompassing `block` span
976
991
  spans.top()?.addEvent('end-block-action', cleanAttrs(slogAttrs), now);
@@ -0,0 +1,83 @@
1
+ import fs from 'node:fs';
2
+ import tmp from 'tmp';
3
+ import { test } from './prepare-test-env-ava.js';
4
+
5
+ import {
6
+ makeSimpleCircularBuffer,
7
+ makeSlogSenderFromBuffer,
8
+ } from '../src/flight-recorder.js';
9
+
10
+ // Factored this way to support multiple implementations, which at one point there were
11
+ const bufferTests = test.macro(
12
+ /**
13
+ *
14
+ * @param {*} t
15
+ * @param {{makeBuffer: Function}} input
16
+ */
17
+ async (t, input) => {
18
+ const BUFFER_SIZE = 512;
19
+
20
+ const { name: tmpFile, removeCallback } = tmp.fileSync();
21
+ const { readCircBuf, writeCircBuf } = await input.makeBuffer({
22
+ circularBufferSize: BUFFER_SIZE,
23
+ circularBufferFilename: tmpFile,
24
+ });
25
+ const slogSender = makeSlogSenderFromBuffer({ writeCircBuf });
26
+ slogSender({ type: 'start' });
27
+ await slogSender.forceFlush();
28
+ t.is(fs.readFileSync(tmpFile, { encoding: 'utf8' }).length, BUFFER_SIZE);
29
+
30
+ const len0 = new Uint8Array(BigUint64Array.BYTES_PER_ELEMENT);
31
+ const { done: done0 } = readCircBuf(len0);
32
+ t.false(done0, 'readCircBuf should not be done');
33
+ const dv0 = new DataView(len0.buffer);
34
+ const buf0 = new Uint8Array(Number(dv0.getBigUint64(0)));
35
+ const { done: done0b } = readCircBuf(buf0, len0.byteLength);
36
+ t.false(done0b, 'readCircBuf should not be done');
37
+ const buf0Str = new TextDecoder().decode(buf0);
38
+ t.is(buf0Str, `\n{"type":"start"}`, `start compare failed`);
39
+
40
+ const last = 500;
41
+ for (let i = 0; i < last; i += 1) {
42
+ slogSender({ type: 'iteration', iteration: i });
43
+ await slogSender.forceFlush();
44
+ t.is(
45
+ fs.readFileSync(tmpFile, { encoding: 'utf8' }).length,
46
+ BUFFER_SIZE,
47
+ `iteration ${i} length mismatch`,
48
+ );
49
+ }
50
+
51
+ let offset = 0;
52
+ const len1 = new Uint8Array(BigUint64Array.BYTES_PER_ELEMENT);
53
+ for (let i = 490; i < last; i += 1) {
54
+ const { done: done1 } = readCircBuf(len1, offset);
55
+ offset += len1.byteLength;
56
+ t.false(done1, `readCircBuf ${i} should not be done`);
57
+ const dv1 = new DataView(len1.buffer);
58
+ const buf1 = new Uint8Array(Number(dv1.getBigUint64(0)));
59
+ const { done: done1b } = readCircBuf(buf1, offset);
60
+ offset += buf1.byteLength;
61
+ t.false(done1b, `readCircBuf ${i} should not be done`);
62
+ const buf1Str = new TextDecoder().decode(buf1);
63
+ t.is(
64
+ buf1Str,
65
+ `\n{"type":"iteration","iteration":${i}}`,
66
+ `iteration ${i} compare failed`,
67
+ );
68
+ }
69
+
70
+ const { done: done2 } = readCircBuf(len1, offset);
71
+ t.assert(done2, `readCircBuf ${last} should be done`);
72
+
73
+ slogSender(null, 'PRE-SERIALIZED');
74
+ await slogSender.forceFlush();
75
+ t.truthy(fs.readFileSync(tmpFile).includes('PRE-SERIALIZED'));
76
+ // console.log({ tmpFile });
77
+ removeCallback();
78
+ },
79
+ );
80
+
81
+ test('simple', bufferTests, {
82
+ makeBuffer: makeSimpleCircularBuffer,
83
+ });
@@ -1,5 +1,3 @@
1
- import '@endo/init';
2
-
3
1
  import { wrapTest } from '@endo/ses-ava';
4
2
  import rawTest from 'ava';
5
3
 
@@ -1,6 +1,7 @@
1
1
  // This file can contain .js-specific Typescript compiler config.
2
2
  {
3
3
  "extends": "../../tsconfig.json",
4
+ "compilerOptions": {},
4
5
  "include": [
5
6
  "*.js",
6
7
  "scripts",
@@ -1,53 +0,0 @@
1
- import tmp from 'tmp';
2
- import { test } from './prepare-test-env-ava.js';
3
-
4
- import { makeMemoryMappedCircularBuffer } from '../src/flight-recorder.js';
5
-
6
- test('flight-recorder sanity', async t => {
7
- const { name: tmpFile, removeCallback } = tmp.fileSync();
8
- const { writeJSON: slogSender, readCircBuf } =
9
- await makeMemoryMappedCircularBuffer({
10
- circularBufferSize: 512,
11
- circularBufferFilename: tmpFile,
12
- });
13
- slogSender({ type: 'start' });
14
-
15
- const len0 = new Uint8Array(BigUint64Array.BYTES_PER_ELEMENT);
16
- const { done: done0 } = readCircBuf(len0);
17
- t.false(done0, 'readCircBuf should not be done');
18
- const dv0 = new DataView(len0.buffer);
19
- const buf0 = new Uint8Array(Number(dv0.getBigUint64(0)));
20
- const { done: done0b } = readCircBuf(buf0, len0.byteLength);
21
- t.false(done0b, 'readCircBuf should not be done');
22
- const buf0Str = new TextDecoder().decode(buf0);
23
- t.is(buf0Str, `\n{"type":"start"}`, `start compare failed`);
24
-
25
- const last = 500;
26
- for (let i = 0; i < last; i += 1) {
27
- slogSender({ type: 'iteration', iteration: i });
28
- }
29
-
30
- let offset = 0;
31
- const len1 = new Uint8Array(BigUint64Array.BYTES_PER_ELEMENT);
32
- for (let i = 490; i < last; i += 1) {
33
- const { done: done1 } = readCircBuf(len1, offset);
34
- offset += len1.byteLength;
35
- t.false(done1, `readCircBuf ${i} should not be done`);
36
- const dv1 = new DataView(len1.buffer);
37
- const buf1 = new Uint8Array(Number(dv1.getBigUint64(0)));
38
- const { done: done1b } = readCircBuf(buf1, offset);
39
- offset += buf1.byteLength;
40
- t.false(done1b, `readCircBuf ${i} should not be done`);
41
- const buf1Str = new TextDecoder().decode(buf1);
42
- t.is(
43
- buf1Str,
44
- `\n{"type":"iteration","iteration":${i}}`,
45
- `iteration ${i} compare failed`,
46
- );
47
- }
48
-
49
- const { done: done2 } = readCircBuf(len1, offset);
50
- t.assert(done2, `readCircBuf ${last} should be done`);
51
- // console.log({ tmpFile });
52
- removeCallback();
53
- });
File without changes