@lodestar/beacon-node 1.37.0 → 1.38.0-dev.255e56fb68

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +1 -1
  2. package/lib/api/impl/beacon/state/utils.d.ts.map +1 -1
  3. package/lib/api/impl/beacon/state/utils.js +5 -3
  4. package/lib/api/impl/beacon/state/utils.js.map +1 -1
  5. package/lib/chain/blocks/importBlock.d.ts.map +1 -1
  6. package/lib/chain/blocks/importBlock.js +4 -4
  7. package/lib/chain/blocks/importBlock.js.map +1 -1
  8. package/lib/chain/blocks/index.d.ts.map +1 -1
  9. package/lib/chain/blocks/index.js +2 -1
  10. package/lib/chain/blocks/index.js.map +1 -1
  11. package/lib/chain/blocks/types.d.ts +5 -1
  12. package/lib/chain/blocks/types.d.ts.map +1 -1
  13. package/lib/chain/blocks/verifyBlock.d.ts +2 -0
  14. package/lib/chain/blocks/verifyBlock.d.ts.map +1 -1
  15. package/lib/chain/blocks/verifyBlock.js +18 -10
  16. package/lib/chain/blocks/verifyBlock.js.map +1 -1
  17. package/lib/chain/blocks/verifyBlocksSignatures.d.ts +2 -2
  18. package/lib/chain/blocks/verifyBlocksSignatures.d.ts.map +1 -1
  19. package/lib/chain/blocks/verifyBlocksSignatures.js +2 -2
  20. package/lib/chain/blocks/verifyBlocksSignatures.js.map +1 -1
  21. package/lib/chain/opPools/aggregatedAttestationPool.d.ts.map +1 -1
  22. package/lib/chain/opPools/aggregatedAttestationPool.js +3 -1
  23. package/lib/chain/opPools/aggregatedAttestationPool.js.map +1 -1
  24. package/lib/network/gossip/encoding.d.ts +4 -2
  25. package/lib/network/gossip/encoding.d.ts.map +1 -1
  26. package/lib/network/gossip/encoding.js +24 -9
  27. package/lib/network/gossip/encoding.js.map +1 -1
  28. package/lib/network/gossip/gossipsub.d.ts.map +1 -1
  29. package/lib/network/gossip/gossipsub.js +6 -5
  30. package/lib/network/gossip/gossipsub.js.map +1 -1
  31. package/lib/network/gossip/metrics.d.ts +8 -0
  32. package/lib/network/gossip/metrics.d.ts.map +1 -1
  33. package/lib/network/gossip/metrics.js +12 -0
  34. package/lib/network/gossip/metrics.js.map +1 -1
  35. package/package.json +15 -15
  36. package/src/api/impl/beacon/state/utils.ts +5 -3
  37. package/src/chain/blocks/importBlock.ts +5 -4
  38. package/src/chain/blocks/index.ts +2 -1
  39. package/src/chain/blocks/types.ts +5 -1
  40. package/src/chain/blocks/verifyBlock.ts +34 -11
  41. package/src/chain/blocks/verifyBlocksSignatures.ts +3 -2
  42. package/src/chain/opPools/aggregatedAttestationPool.ts +5 -1
  43. package/src/network/gossip/encoding.ts +26 -9
  44. package/src/network/gossip/gossipsub.ts +8 -6
  45. package/src/network/gossip/metrics.ts +12 -0
@@ -7,7 +7,7 @@ import {
7
7
  computeEpochAtSlot,
8
8
  isStateValidatorsNodesPopulated,
9
9
  } from "@lodestar/state-transition";
10
- import {bellatrix, deneb} from "@lodestar/types";
10
+ import {IndexedAttestation, bellatrix, deneb} from "@lodestar/types";
11
11
  import {Logger, toRootHex} from "@lodestar/utils";
12
12
  import type {BeaconChain} from "../chain.js";
13
13
  import {BlockError, BlockErrorCode} from "../errors/index.js";
@@ -47,6 +47,7 @@ export async function verifyBlocksInEpoch(
47
47
  proposerBalanceDeltas: number[];
48
48
  segmentExecStatus: SegmentExecStatus;
49
49
  dataAvailabilityStatuses: DataAvailabilityStatus[];
50
+ indexedAttestationsByBlock: IndexedAttestation[][];
50
51
  }> {
51
52
  const blocks = blockInputs.map((blockInput) => blockInput.getBlock());
52
53
  const lastBlock = blocks.at(-1);
@@ -65,6 +66,9 @@ export async function verifyBlocksInEpoch(
65
66
  }
66
67
  }
67
68
 
69
+ // All blocks are in the same epoch
70
+ const fork = this.config.getForkSeq(block0.message.slot);
71
+
68
72
  // TODO: Skip in process chain segment
69
73
  // Retrieve preState from cache (regen)
70
74
  const preState0 = await this.regen
@@ -92,6 +96,24 @@ export async function verifyBlocksInEpoch(
92
96
  const abortController = new AbortController();
93
97
 
94
98
  try {
99
+ // Start execution payload verification first (async request to execution client)
100
+ const verifyExecutionPayloadsPromise =
101
+ opts.skipVerifyExecutionPayload !== true
102
+ ? verifyBlocksExecutionPayload(this, parentBlock, blockInputs, preState0, abortController.signal, opts)
103
+ : Promise.resolve({
104
+ execAborted: null,
105
+ executionStatuses: blocks.map((_blk) => ExecutionStatus.Syncing),
106
+ mergeBlockFound: null,
107
+ } as SegmentExecStatus);
108
+
109
+ // Store indexed attestations for each block to avoid recomputing them during import
110
+ const indexedAttestationsByBlock: IndexedAttestation[][] = [];
111
+ for (const [i, block] of blocks.entries()) {
112
+ indexedAttestationsByBlock[i] = block.message.body.attestations.map((attestation) =>
113
+ preState0.epochCtx.getIndexedAttestation(fork, attestation)
114
+ );
115
+ }
116
+
95
117
  // batch all I/O operations to reduce overhead
96
118
  const [
97
119
  segmentExecStatus,
@@ -99,14 +121,7 @@ export async function verifyBlocksInEpoch(
99
121
  {postStates, proposerBalanceDeltas, verifyStateTime},
100
122
  {verifySignaturesTime},
101
123
  ] = await Promise.all([
102
- // Execution payloads
103
- opts.skipVerifyExecutionPayload !== true
104
- ? verifyBlocksExecutionPayload(this, parentBlock, blockInputs, preState0, abortController.signal, opts)
105
- : Promise.resolve({
106
- execAborted: null,
107
- executionStatuses: blocks.map((_blk) => ExecutionStatus.Syncing),
108
- mergeBlockFound: null,
109
- } as SegmentExecStatus),
124
+ verifyExecutionPayloadsPromise,
110
125
 
111
126
  // data availability for the blobs
112
127
  verifyBlocksDataAvailability(blockInputs, abortController.signal),
@@ -127,7 +142,15 @@ export async function verifyBlocksInEpoch(
127
142
 
128
143
  // All signatures at once
129
144
  opts.skipVerifyBlockSignatures !== true
130
- ? verifyBlocksSignatures(this.bls, this.logger, this.metrics, preState0, blocks, opts)
145
+ ? verifyBlocksSignatures(
146
+ this.bls,
147
+ this.logger,
148
+ this.metrics,
149
+ preState0,
150
+ blocks,
151
+ indexedAttestationsByBlock,
152
+ opts
153
+ )
131
154
  : Promise.resolve({verifySignaturesTime: Date.now()}),
132
155
 
133
156
  // ideally we want to only persist blocks after verifying them however the reality is there are
@@ -222,7 +245,7 @@ export async function verifyBlocksInEpoch(
222
245
  );
223
246
  }
224
247
 
225
- return {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus};
248
+ return {postStates, dataAvailabilityStatuses, proposerBalanceDeltas, segmentExecStatus, indexedAttestationsByBlock};
226
249
  } finally {
227
250
  abortController.abort();
228
251
  }
@@ -1,5 +1,5 @@
1
1
  import {CachedBeaconStateAllForks, getBlockSignatureSets} from "@lodestar/state-transition";
2
- import {SignedBeaconBlock} from "@lodestar/types";
2
+ import {IndexedAttestation, SignedBeaconBlock} from "@lodestar/types";
3
3
  import {Logger} from "@lodestar/utils";
4
4
  import {Metrics} from "../../metrics/metrics.js";
5
5
  import {nextEventLoop} from "../../util/eventLoop.js";
@@ -20,6 +20,7 @@ export async function verifyBlocksSignatures(
20
20
  metrics: Metrics | null,
21
21
  preState0: CachedBeaconStateAllForks,
22
22
  blocks: SignedBeaconBlock[],
23
+ indexedAttestationsByBlock: IndexedAttestation[][],
23
24
  opts: ImportBlockOpts
24
25
  ): Promise<{verifySignaturesTime: number}> {
25
26
  const isValidPromises: Promise<boolean>[] = [];
@@ -37,7 +38,7 @@ export async function verifyBlocksSignatures(
37
38
  : //
38
39
  // Verify signatures per block to track which block is invalid
39
40
  bls.verifySignatureSets(
40
- getBlockSignatureSets(preState0, block, {
41
+ getBlockSignatureSets(preState0, block, indexedAttestationsByBlock[i], {
41
42
  skipProposerSignature: opts.validProposerSignature,
42
43
  })
43
44
  );
@@ -22,6 +22,7 @@ import {
22
22
  import {
23
23
  CachedBeaconStateAllForks,
24
24
  CachedBeaconStateAltair,
25
+ CachedBeaconStateGloas,
25
26
  CachedBeaconStatePhase0,
26
27
  EffectiveBalanceIncrements,
27
28
  RootCache,
@@ -486,7 +487,10 @@ export class AggregatedAttestationPool {
486
487
  consolidation.attData,
487
488
  inclusionDistance,
488
489
  stateEpoch,
489
- rootCache
490
+ rootCache,
491
+ ForkSeq[fork] >= ForkSeq.gloas
492
+ ? (state as CachedBeaconStateGloas).executionPayloadAvailability.toBoolArray()
493
+ : null
490
494
  );
491
495
 
492
496
  const weight =
@@ -1,13 +1,14 @@
1
1
  import {Message} from "@libp2p/interface";
2
2
  // snappyjs is better for compression for smaller payloads
3
- import {compress, uncompress} from "snappyjs";
4
3
  import xxhashFactory from "xxhash-wasm";
5
4
  import {digest} from "@chainsafe/as-sha256";
6
5
  import {RPC} from "@chainsafe/libp2p-gossipsub/message";
7
6
  import {DataTransform} from "@chainsafe/libp2p-gossipsub/types";
7
+ import snappyWasm from "@chainsafe/snappy-wasm";
8
8
  import {ForkName} from "@lodestar/params";
9
9
  import {intToBytes} from "@lodestar/utils";
10
10
  import {MESSAGE_DOMAIN_VALID_SNAPPY} from "./constants.js";
11
+ import {Eth2GossipsubMetrics} from "./metrics.js";
11
12
  import {GossipTopicCache, getGossipSSZType} from "./topic.js";
12
13
 
13
14
  // Load WASM
@@ -16,6 +17,10 @@ const xxhash = await xxhashFactory();
16
17
  // Use salt to prevent msgId from being mined for collisions
17
18
  const h64Seed = BigInt(Math.floor(Math.random() * 1e9));
18
19
 
20
+ // create singleton snappy encoder + decoder
21
+ const encoder = new snappyWasm.Encoder();
22
+ const decoder = new snappyWasm.Decoder();
23
+
19
24
  // Shared buffer to convert msgId to string
20
25
  const sharedMsgIdBuf = Buffer.alloc(20);
21
26
 
@@ -70,7 +75,8 @@ export function msgIdFn(gossipTopicCache: GossipTopicCache, msg: Message): Uint8
70
75
  export class DataTransformSnappy implements DataTransform {
71
76
  constructor(
72
77
  private readonly gossipTopicCache: GossipTopicCache,
73
- private readonly maxSizePerMessage: number
78
+ private readonly maxSizePerMessage: number,
79
+ private readonly metrics: Eth2GossipsubMetrics | null
74
80
  ) {}
75
81
 
76
82
  /**
@@ -80,13 +86,15 @@ export class DataTransformSnappy implements DataTransform {
80
86
  * - `outboundTransform()`: compress snappy payload
81
87
  */
82
88
  inboundTransform(topicStr: string, data: Uint8Array): Uint8Array {
83
- const uncompressedData = uncompress(data, this.maxSizePerMessage);
89
+ // check uncompressed data length before we actually decompress
90
+ const uncompressedDataLength = snappyWasm.decompress_len(data);
91
+ if (uncompressedDataLength > this.maxSizePerMessage) {
92
+ throw Error(`ssz_snappy decoded data length ${uncompressedDataLength} > ${this.maxSizePerMessage}`);
93
+ }
84
94
 
85
- // check uncompressed data length before we extract beacon block root, slot or
86
- // attestation data at later steps
87
- const uncompressedDataLength = uncompressedData.length;
88
95
  const topic = this.gossipTopicCache.getTopic(topicStr);
89
96
  const sszType = getGossipSSZType(topic);
97
+ this.metrics?.dataTransform.inbound.inc({type: topic.type});
90
98
 
91
99
  if (uncompressedDataLength < sszType.minSize) {
92
100
  throw Error(`ssz_snappy decoded data length ${uncompressedDataLength} < ${sszType.minSize}`);
@@ -95,6 +103,10 @@ export class DataTransformSnappy implements DataTransform {
95
103
  throw Error(`ssz_snappy decoded data length ${uncompressedDataLength} > ${sszType.maxSize}`);
96
104
  }
97
105
 
106
+ // Only after sanity length checks, we can decompress the data
107
+ // Using Buffer.alloc() instead of Buffer.allocUnsafe() to mitigate high GC pressure observed in some environments
108
+ const uncompressedData = Buffer.alloc(uncompressedDataLength);
109
+ decoder.decompress_into(data, uncompressedData);
98
110
  return uncompressedData;
99
111
  }
100
112
 
@@ -102,11 +114,16 @@ export class DataTransformSnappy implements DataTransform {
102
114
  * Takes the data to be published (a topic and associated data) transforms the data. The
103
115
  * transformed data will then be used to create a `RawGossipsubMessage` to be sent to peers.
104
116
  */
105
- outboundTransform(_topicStr: string, data: Uint8Array): Uint8Array {
117
+ outboundTransform(topicStr: string, data: Uint8Array): Uint8Array {
118
+ const topic = this.gossipTopicCache.getTopic(topicStr);
119
+ this.metrics?.dataTransform.outbound.inc({type: topic.type});
106
120
  if (data.length > this.maxSizePerMessage) {
107
121
  throw Error(`ssz_snappy encoded data length ${data.length} > ${this.maxSizePerMessage}`);
108
122
  }
109
- // No need to parse topic, everything is snappy compressed
110
- return compress(data);
123
+
124
+ // Using Buffer.alloc() instead of Buffer.allocUnsafe() to mitigate high GC pressure observed in some environments
125
+ const compressedData = Buffer.alloc(snappyWasm.max_compress_len(data.length));
126
+ const compressedLen = encoder.compress_into(data, compressedData);
127
+ return compressedData.subarray(0, compressedLen);
111
128
  }
112
129
  }
@@ -89,6 +89,13 @@ export class Eth2Gossipsub extends GossipSub {
89
89
  const gossipTopicCache = new GossipTopicCache(config);
90
90
 
91
91
  const scoreParams = computeGossipPeerScoreParams({config, eth2Context: modules.eth2Context});
92
+ let metrics: Eth2GossipsubMetrics | null = null;
93
+ if (metricsRegister) {
94
+ metrics = createEth2GossipsubMetrics(metricsRegister);
95
+ metrics.gossipMesh.peersByType.addCollect(() =>
96
+ this.onScrapeLodestarMetrics(metrics as Eth2GossipsubMetrics, networkConfig)
97
+ );
98
+ }
92
99
 
93
100
  // Gossipsub parameters defined here:
94
101
  // https://github.com/ethereum/consensus-specs/blob/v1.1.10/specs/phase0/p2p-interface.md#the-gossip-domain-gossipsub
@@ -116,7 +123,7 @@ export class Eth2Gossipsub extends GossipSub {
116
123
  fastMsgIdFn: fastMsgIdFn,
117
124
  msgIdFn: msgIdFn.bind(msgIdFn, gossipTopicCache),
118
125
  msgIdToStrFn: msgIdToStrFn,
119
- dataTransform: new DataTransformSnappy(gossipTopicCache, config.MAX_PAYLOAD_SIZE),
126
+ dataTransform: new DataTransformSnappy(gossipTopicCache, config.MAX_PAYLOAD_SIZE, metrics),
120
127
  metricsRegister: metricsRegister as MetricsRegister | null,
121
128
  metricsTopicStrToLabel: metricsRegister
122
129
  ? getMetricsTopicStrToLabel(networkConfig, {disableLightClientServer: opts.disableLightClientServer ?? false})
@@ -141,11 +148,6 @@ export class Eth2Gossipsub extends GossipSub {
141
148
  this.events = events;
142
149
  this.gossipTopicCache = gossipTopicCache;
143
150
 
144
- if (metricsRegister) {
145
- const metrics = createEth2GossipsubMetrics(metricsRegister);
146
- metrics.gossipMesh.peersByType.addCollect(() => this.onScrapeLodestarMetrics(metrics, networkConfig));
147
- }
148
-
149
151
  this.addEventListener("gossipsub:message", this.onGossipsubMessage.bind(this));
150
152
  this.events.on(NetworkEvent.gossipMessageValidationResult, this.onValidationResult.bind(this));
151
153
 
@@ -67,5 +67,17 @@ export function createEth2GossipsubMetrics(register: RegistryMetricCreator) {
67
67
  labelNames: ["subnet", "boundary"],
68
68
  }),
69
69
  },
70
+ dataTransform: {
71
+ inbound: register.counter<{type: GossipType}>({
72
+ name: "lodestar_gossip_data_transform_inbound_total",
73
+ help: "Total number of inbound data transforms by gossip type",
74
+ labelNames: ["type"],
75
+ }),
76
+ outbound: register.counter<{type: GossipType}>({
77
+ name: "lodestar_gossip_data_transform_outbound_total",
78
+ help: "Total number of outbound data transforms by gossip type",
79
+ labelNames: ["type"],
80
+ }),
81
+ },
70
82
  };
71
83
  }