@lodestar/beacon-node 1.41.0-rc.0 → 1.41.0-rc.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/lib/api/impl/beacon/blocks/index.d.ts.map +1 -1
  2. package/lib/api/impl/beacon/blocks/index.js +9 -0
  3. package/lib/api/impl/beacon/blocks/index.js.map +1 -1
  4. package/lib/chain/blocks/importBlock.js +1 -1
  5. package/lib/chain/blocks/importBlock.js.map +1 -1
  6. package/lib/chain/chain.d.ts.map +1 -1
  7. package/lib/chain/chain.js +4 -2
  8. package/lib/chain/chain.js.map +1 -1
  9. package/lib/chain/produceBlock/produceBlockBody.d.ts.map +1 -1
  10. package/lib/chain/produceBlock/produceBlockBody.js +9 -1
  11. package/lib/chain/produceBlock/produceBlockBody.js.map +1 -1
  12. package/lib/chain/validation/dataColumnSidecar.d.ts +2 -1
  13. package/lib/chain/validation/dataColumnSidecar.d.ts.map +1 -1
  14. package/lib/chain/validation/dataColumnSidecar.js +124 -107
  15. package/lib/chain/validation/dataColumnSidecar.js.map +1 -1
  16. package/lib/metrics/metrics/beacon.d.ts +2 -1
  17. package/lib/metrics/metrics/beacon.d.ts.map +1 -1
  18. package/lib/metrics/metrics/beacon.js +9 -3
  19. package/lib/metrics/metrics/beacon.js.map +1 -1
  20. package/lib/metrics/metrics/lodestar.d.ts +5 -5
  21. package/lib/metrics/metrics/lodestar.d.ts.map +1 -1
  22. package/lib/metrics/metrics/lodestar.js +16 -14
  23. package/lib/metrics/metrics/lodestar.js.map +1 -1
  24. package/lib/sync/range/range.d.ts.map +1 -1
  25. package/lib/sync/range/range.js +1 -0
  26. package/lib/sync/range/range.js.map +1 -1
  27. package/lib/sync/utils/downloadByRange.d.ts +6 -3
  28. package/lib/sync/utils/downloadByRange.d.ts.map +1 -1
  29. package/lib/sync/utils/downloadByRange.js +6 -5
  30. package/lib/sync/utils/downloadByRange.js.map +1 -1
  31. package/lib/sync/utils/downloadByRoot.js +1 -1
  32. package/lib/sync/utils/downloadByRoot.js.map +1 -1
  33. package/lib/util/dataColumns.d.ts.map +1 -1
  34. package/lib/util/dataColumns.js +5 -1
  35. package/lib/util/dataColumns.js.map +1 -1
  36. package/lib/util/execution.d.ts.map +1 -1
  37. package/lib/util/execution.js +17 -8
  38. package/lib/util/execution.js.map +1 -1
  39. package/package.json +17 -17
  40. package/src/api/impl/beacon/blocks/index.ts +11 -0
  41. package/src/chain/blocks/importBlock.ts +1 -1
  42. package/src/chain/chain.ts +4 -2
  43. package/src/chain/produceBlock/produceBlockBody.ts +8 -1
  44. package/src/chain/validation/dataColumnSidecar.ts +146 -126
  45. package/src/metrics/metrics/beacon.ts +9 -3
  46. package/src/metrics/metrics/lodestar.ts +16 -14
  47. package/src/sync/range/range.ts +1 -0
  48. package/src/sync/utils/downloadByRange.ts +12 -3
  49. package/src/sync/utils/downloadByRoot.ts +1 -1
  50. package/src/util/dataColumns.ts +6 -2
  51. package/src/util/execution.ts +23 -12
@@ -12,6 +12,7 @@ import {
12
12
  } from "@lodestar/state-transition";
13
13
  import {DataColumnSidecar, Root, Slot, SubnetID, fulu, ssz} from "@lodestar/types";
14
14
  import {byteArrayEquals, toRootHex, verifyMerkleBranch} from "@lodestar/utils";
15
+ import {BeaconMetrics} from "../../metrics/metrics/beacon.js";
15
16
  import {Metrics} from "../../metrics/metrics.js";
16
17
  import {kzg} from "../../util/kzg.js";
17
18
  import {
@@ -177,6 +178,7 @@ export async function validateGossipDataColumnSidecar(
177
178
  });
178
179
  }
179
180
 
181
+ // single data column is being verified here
180
182
  const kzgProofTimer = metrics?.peerDas.dataColumnSidecarKzgProofsVerificationTime.startTimer();
181
183
  // 11) [REJECT] The sidecar's column data is valid as verified by verify_data_column_sidecar_kzg_proofs
182
184
  try {
@@ -297,159 +299,177 @@ export async function validateBlockDataColumnSidecars(
297
299
  blockSlot: Slot,
298
300
  blockRoot: Root,
299
301
  blockBlobCount: number,
300
- dataColumnSidecars: fulu.DataColumnSidecars
302
+ dataColumnSidecars: fulu.DataColumnSidecars,
303
+ metrics?: BeaconMetrics["peerDas"] | null
301
304
  ): Promise<void> {
302
- if (dataColumnSidecars.length === 0) {
303
- return;
304
- }
305
+ metrics?.dataColumnSidecarProcessingRequests.inc(dataColumnSidecars.length);
306
+ const verificationTimer = metrics?.dataColumnSidecarGossipVerificationTime.startTimer();
307
+ try {
308
+ if (dataColumnSidecars.length === 0) {
309
+ return;
310
+ }
305
311
 
306
- if (blockBlobCount === 0) {
307
- throw new DataColumnSidecarValidationError(
308
- {
309
- code: DataColumnSidecarErrorCode.INCORRECT_SIDECAR_COUNT,
310
- slot: blockSlot,
311
- expected: 0,
312
- actual: dataColumnSidecars.length,
313
- },
314
- "Block has no blob commitments but data column sidecars were provided"
315
- );
316
- }
317
- // Hash the first sidecar block header and compare the rest via (cheaper) equality
318
- const firstSidecarSignedBlockHeader = dataColumnSidecars[0].signedBlockHeader;
319
- const firstSidecarBlockHeader = firstSidecarSignedBlockHeader.message;
320
- const firstBlockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(firstSidecarBlockHeader);
321
- if (!byteArrayEquals(blockRoot, firstBlockRoot)) {
322
- throw new DataColumnSidecarValidationError(
323
- {
324
- code: DataColumnSidecarErrorCode.INCORRECT_BLOCK,
325
- slot: blockSlot,
326
- columnIndex: 0,
327
- expected: toRootHex(blockRoot),
328
- actual: toRootHex(firstBlockRoot),
329
- },
330
- "DataColumnSidecar doesn't match corresponding block"
331
- );
332
- }
312
+ if (blockBlobCount === 0) {
313
+ throw new DataColumnSidecarValidationError(
314
+ {
315
+ code: DataColumnSidecarErrorCode.INCORRECT_SIDECAR_COUNT,
316
+ slot: blockSlot,
317
+ expected: 0,
318
+ actual: dataColumnSidecars.length,
319
+ },
320
+ "Block has no blob commitments but data column sidecars were provided"
321
+ );
322
+ }
323
+ // Hash the first sidecar block header and compare the rest via (cheaper) equality
324
+ const firstSidecarSignedBlockHeader = dataColumnSidecars[0].signedBlockHeader;
325
+ const firstSidecarBlockHeader = firstSidecarSignedBlockHeader.message;
326
+ const firstBlockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(firstSidecarBlockHeader);
327
+ if (!byteArrayEquals(blockRoot, firstBlockRoot)) {
328
+ throw new DataColumnSidecarValidationError(
329
+ {
330
+ code: DataColumnSidecarErrorCode.INCORRECT_BLOCK,
331
+ slot: blockSlot,
332
+ columnIndex: 0,
333
+ expected: toRootHex(blockRoot),
334
+ actual: toRootHex(firstBlockRoot),
335
+ },
336
+ "DataColumnSidecar doesn't match corresponding block"
337
+ );
338
+ }
339
+
340
+ if (chain !== null) {
341
+ const rootHex = toRootHex(blockRoot);
342
+ const slot = firstSidecarSignedBlockHeader.message.slot;
343
+ const signature = firstSidecarSignedBlockHeader.signature;
344
+ if (!chain.seenBlockInputCache.isVerifiedProposerSignature(slot, rootHex, signature)) {
345
+ const signatureSet = getBlockHeaderProposerSignatureSetByHeaderSlot(
346
+ chain.config,
347
+ firstSidecarSignedBlockHeader
348
+ );
349
+
350
+ if (
351
+ !(await chain.bls.verifySignatureSets([signatureSet], {
352
+ verifyOnMainThread: true,
353
+ }))
354
+ ) {
355
+ throw new DataColumnSidecarValidationError({
356
+ code: DataColumnSidecarErrorCode.PROPOSAL_SIGNATURE_INVALID,
357
+ blockRoot: rootHex,
358
+ slot: blockSlot,
359
+ index: dataColumnSidecars[0].index,
360
+ });
361
+ }
362
+
363
+ chain.seenBlockInputCache.markVerifiedProposerSignature(slot, rootHex, signature);
364
+ }
365
+ }
333
366
 
334
- if (chain !== null) {
335
- const rootHex = toRootHex(blockRoot);
336
- const slot = firstSidecarSignedBlockHeader.message.slot;
337
- const signature = firstSidecarSignedBlockHeader.signature;
338
- if (!chain.seenBlockInputCache.isVerifiedProposerSignature(slot, rootHex, signature)) {
339
- const signatureSet = getBlockHeaderProposerSignatureSetByHeaderSlot(chain.config, firstSidecarSignedBlockHeader);
367
+ const commitments: Uint8Array[] = [];
368
+ const cellIndices: number[] = [];
369
+ const cells: Uint8Array[] = [];
370
+ const proofs: Uint8Array[] = [];
371
+ for (let i = 0; i < dataColumnSidecars.length; i++) {
372
+ const columnSidecar = dataColumnSidecars[i];
340
373
 
341
374
  if (
342
- !(await chain.bls.verifySignatureSets([signatureSet], {
343
- verifyOnMainThread: true,
344
- }))
375
+ i !== 0 &&
376
+ !ssz.phase0.SignedBeaconBlockHeader.equals(firstSidecarSignedBlockHeader, columnSidecar.signedBlockHeader)
345
377
  ) {
346
378
  throw new DataColumnSidecarValidationError({
347
- code: DataColumnSidecarErrorCode.PROPOSAL_SIGNATURE_INVALID,
348
- blockRoot: rootHex,
379
+ code: DataColumnSidecarErrorCode.INCORRECT_HEADER_ROOT,
349
380
  slot: blockSlot,
350
- index: dataColumnSidecars[0].index,
381
+ expected: toRootHex(blockRoot),
382
+ actual: toRootHex(ssz.phase0.BeaconBlockHeader.hashTreeRoot(columnSidecar.signedBlockHeader.message)),
351
383
  });
352
384
  }
353
385
 
354
- chain.seenBlockInputCache.markVerifiedProposerSignature(slot, rootHex, signature);
355
- }
356
- }
386
+ if (columnSidecar.index >= NUMBER_OF_COLUMNS) {
387
+ throw new DataColumnSidecarValidationError(
388
+ {
389
+ code: DataColumnSidecarErrorCode.INVALID_INDEX,
390
+ slot: blockSlot,
391
+ columnIndex: columnSidecar.index,
392
+ },
393
+ "DataColumnSidecar has invalid index"
394
+ );
395
+ }
357
396
 
358
- const commitments: Uint8Array[] = [];
359
- const cellIndices: number[] = [];
360
- const cells: Uint8Array[] = [];
361
- const proofs: Uint8Array[] = [];
362
- for (let i = 0; i < dataColumnSidecars.length; i++) {
363
- const columnSidecar = dataColumnSidecars[i];
397
+ if (columnSidecar.column.length !== blockBlobCount) {
398
+ throw new DataColumnSidecarValidationError({
399
+ code: DataColumnSidecarErrorCode.INCORRECT_CELL_COUNT,
400
+ slot: blockSlot,
401
+ columnIndex: columnSidecar.index,
402
+ expected: blockBlobCount,
403
+ actual: columnSidecar.column.length,
404
+ });
405
+ }
364
406
 
365
- if (
366
- i !== 0 &&
367
- !ssz.phase0.SignedBeaconBlockHeader.equals(firstSidecarSignedBlockHeader, columnSidecar.signedBlockHeader)
368
- ) {
369
- throw new DataColumnSidecarValidationError({
370
- code: DataColumnSidecarErrorCode.INCORRECT_HEADER_ROOT,
371
- slot: blockSlot,
372
- expected: toRootHex(blockRoot),
373
- actual: toRootHex(ssz.phase0.BeaconBlockHeader.hashTreeRoot(columnSidecar.signedBlockHeader.message)),
374
- });
375
- }
407
+ if (columnSidecar.column.length !== columnSidecar.kzgCommitments.length) {
408
+ throw new DataColumnSidecarValidationError({
409
+ code: DataColumnSidecarErrorCode.INCORRECT_KZG_COMMITMENTS_COUNT,
410
+ slot: blockSlot,
411
+ columnIndex: columnSidecar.index,
412
+ expected: columnSidecar.column.length,
413
+ actual: columnSidecar.kzgCommitments.length,
414
+ });
415
+ }
376
416
 
377
- if (columnSidecar.index >= NUMBER_OF_COLUMNS) {
378
- throw new DataColumnSidecarValidationError(
379
- {
380
- code: DataColumnSidecarErrorCode.INVALID_INDEX,
417
+ if (columnSidecar.column.length !== columnSidecar.kzgProofs.length) {
418
+ throw new DataColumnSidecarValidationError({
419
+ code: DataColumnSidecarErrorCode.INCORRECT_KZG_PROOF_COUNT,
381
420
  slot: blockSlot,
382
421
  columnIndex: columnSidecar.index,
383
- },
384
- "DataColumnSidecar has invalid index"
385
- );
386
- }
422
+ expected: columnSidecar.column.length,
423
+ actual: columnSidecar.kzgProofs.length,
424
+ });
425
+ }
387
426
 
388
- if (columnSidecar.column.length !== blockBlobCount) {
389
- throw new DataColumnSidecarValidationError({
390
- code: DataColumnSidecarErrorCode.INCORRECT_CELL_COUNT,
391
- slot: blockSlot,
392
- columnIndex: columnSidecar.index,
393
- expected: blockBlobCount,
394
- actual: columnSidecar.column.length,
395
- });
396
- }
427
+ const inclusionProofTimer = metrics?.dataColumnSidecarInclusionProofVerificationTime.startTimer();
428
+ const validInclusionProof = verifyDataColumnSidecarInclusionProof(columnSidecar);
429
+ inclusionProofTimer?.();
430
+ if (!validInclusionProof) {
431
+ throw new DataColumnSidecarValidationError(
432
+ {
433
+ code: DataColumnSidecarErrorCode.INCLUSION_PROOF_INVALID,
434
+ slot: blockSlot,
435
+ columnIndex: columnSidecar.index,
436
+ },
437
+ "DataColumnSidecar has invalid inclusion proof"
438
+ );
439
+ }
397
440
 
398
- if (columnSidecar.column.length !== columnSidecar.kzgCommitments.length) {
399
- throw new DataColumnSidecarValidationError({
400
- code: DataColumnSidecarErrorCode.INCORRECT_KZG_COMMITMENTS_COUNT,
401
- slot: blockSlot,
402
- columnIndex: columnSidecar.index,
403
- expected: columnSidecar.column.length,
404
- actual: columnSidecar.kzgCommitments.length,
405
- });
441
+ commitments.push(...columnSidecar.kzgCommitments);
442
+ cellIndices.push(...Array.from({length: columnSidecar.column.length}, () => columnSidecar.index));
443
+ cells.push(...columnSidecar.column);
444
+ proofs.push(...columnSidecar.kzgProofs);
406
445
  }
407
446
 
408
- if (columnSidecar.column.length !== columnSidecar.kzgProofs.length) {
409
- throw new DataColumnSidecarValidationError({
410
- code: DataColumnSidecarErrorCode.INCORRECT_KZG_PROOF_COUNT,
411
- slot: blockSlot,
412
- columnIndex: columnSidecar.index,
413
- expected: columnSidecar.column.length,
414
- actual: columnSidecar.kzgProofs.length,
415
- });
447
+ let reason: string | undefined;
448
+ // batch verification for the cases: downloadByRange and downloadByRoot
449
+ const kzgVerificationTimer = metrics?.kzgVerificationDataColumnBatchTime.startTimer();
450
+ try {
451
+ const valid = await kzg.asyncVerifyCellKzgProofBatch(commitments, cellIndices, cells, proofs);
452
+ if (!valid) {
453
+ reason = "Invalid KZG proof batch";
454
+ }
455
+ } catch (e) {
456
+ reason = (e as Error).message;
457
+ } finally {
458
+ kzgVerificationTimer?.();
416
459
  }
417
-
418
- if (!verifyDataColumnSidecarInclusionProof(columnSidecar)) {
460
+ if (reason !== undefined) {
419
461
  throw new DataColumnSidecarValidationError(
420
462
  {
421
- code: DataColumnSidecarErrorCode.INCLUSION_PROOF_INVALID,
463
+ code: DataColumnSidecarErrorCode.INVALID_KZG_PROOF_BATCH,
422
464
  slot: blockSlot,
423
- columnIndex: columnSidecar.index,
465
+ reason,
424
466
  },
425
- "DataColumnSidecar has invalid inclusion proof"
467
+ "DataColumnSidecar has invalid KZG proof batch"
426
468
  );
427
469
  }
428
-
429
- commitments.push(...columnSidecar.kzgCommitments);
430
- cellIndices.push(...Array.from({length: columnSidecar.column.length}, () => columnSidecar.index));
431
- cells.push(...columnSidecar.column);
432
- proofs.push(...columnSidecar.kzgProofs);
433
- }
434
-
435
- let reason: string | undefined;
436
- try {
437
- const valid = await kzg.asyncVerifyCellKzgProofBatch(commitments, cellIndices, cells, proofs);
438
- if (!valid) {
439
- reason = "Invalid KZG proof batch";
440
- }
441
- } catch (e) {
442
- reason = (e as Error).message;
443
- }
444
- if (reason !== undefined) {
445
- throw new DataColumnSidecarValidationError(
446
- {
447
- code: DataColumnSidecarErrorCode.INVALID_KZG_PROOF_BATCH,
448
- slot: blockSlot,
449
- reason,
450
- },
451
- "DataColumnSidecar has invalid KZG proof batch"
452
- );
470
+ metrics?.dataColumnSidecarProcessingSuccesses.inc();
471
+ } finally {
472
+ verificationTimer?.();
453
473
  }
454
474
  }
455
475
 
@@ -333,11 +333,13 @@ export function createBeaconMetrics(register: RegistryMetricCreator) {
333
333
  help: "Time taken to verify data_column sidecar inclusion proof",
334
334
  buckets: [0.002, 0.004, 0.006, 0.008, 0.01, 0.05, 1, 2],
335
335
  }),
336
+ // single verification
336
337
  dataColumnSidecarKzgProofsVerificationTime: register.histogram({
337
338
  name: "beacon_data_column_sidecar_kzg_proofs_verification_seconds",
338
- help: "Time taken to verify data_column sidecar kzg proofs",
339
+ help: "Time taken to verify single data_column sidecar kzg proofs",
339
340
  buckets: [0.01, 0.02, 0.03, 0.04, 0.05, 0.1, 0.2, 0.5, 1],
340
341
  }),
342
+ // batch verification
341
343
  kzgVerificationDataColumnBatchTime: register.histogram({
342
344
  name: "beacon_kzg_verification_data_column_batch_seconds",
343
345
  help: "Runtime of batched data column kzg verification",
@@ -361,10 +363,14 @@ export function createBeaconMetrics(register: RegistryMetricCreator) {
361
363
  help: "Duration of engine_getBlobsV2 requests",
362
364
  buckets: [0.01, 0.05, 0.1, 0.5, 1, 2.5, 5, 7.5],
363
365
  }),
364
- targetCustodyGroupCount: register.gauge({
365
- name: "beacon_target_custody_group_count",
366
+ custodyGroupCount: register.gauge({
367
+ name: "beacon_custody_groups",
366
368
  help: "Total number of custody groups within a node",
367
369
  }),
370
+ custodyGroupsBackfilled: register.gauge({
371
+ name: "beacon_custody_groups_backfilled",
372
+ help: "Total number of custody groups backfilled by a node",
373
+ }),
368
374
  reconstructedColumns: register.counter({
369
375
  name: "beacon_data_availability_reconstructed_columns_total",
370
376
  help: "Total count of reconstructed columns",
@@ -836,20 +836,23 @@ export function createLodestarMetrics(
836
836
  buckets: [0.5, 1, 2, 4, 6, 12],
837
837
  }),
838
838
  },
839
- recoverDataColumnSidecars: {
840
- recoverTime: register.histogram({
841
- name: "lodestar_recover_data_column_sidecar_recover_time_seconds",
842
- help: "Time elapsed to recover data column sidecar",
843
- buckets: [0.5, 1.0, 1.5, 2],
839
+ // recovery in the case of specific blob rows required
840
+ recoverBlobSidecars: {
841
+ blobsReconstructed: register.counter({
842
+ name: "lodestar_blobs_reconstructed_total",
843
+ help: "Total count of reconstructed blobs",
844
+ }),
845
+ reconstructionTime: register.histogram({
846
+ name: "lodestar_blob_reconstruction_seconds",
847
+ help: "Time taken to reconstruct blobs",
848
+ buckets: [0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 2, 5],
844
849
  }),
850
+ },
851
+ recoverDataColumnSidecars: {
845
852
  custodyBeforeReconstruction: register.gauge({
846
853
  name: "lodestar_data_columns_in_custody_before_reconstruction",
847
854
  help: "Number of data columns in custody before reconstruction",
848
855
  }),
849
- numberOfColumnsRecovered: register.gauge({
850
- name: "lodestar_recover_data_column_sidecar_recovered_columns_total",
851
- help: "Total number of columns that were recovered",
852
- }),
853
856
  reconstructionResult: register.counter<{result: DataColumnReconstructionCode}>({
854
857
  name: "lodestar_data_column_sidecars_reconstruction_result",
855
858
  help: "Data column sidecars reconstruction result",
@@ -857,6 +860,10 @@ export function createLodestarMetrics(
857
860
  }),
858
861
  },
859
862
  dataColumns: {
863
+ alreadyAdded: register.counter({
864
+ name: "lodestar_data_column_sidecar_already_added",
865
+ help: "Total number of columns that were already added by other sources while waiting",
866
+ }),
860
867
  bySource: register.gauge<{source: BlockInputSource}>({
861
868
  name: "lodestar_data_columns_by_source",
862
869
  help: "Number of received data columns by source",
@@ -912,11 +919,6 @@ export function createLodestarMetrics(
912
919
  help: "Total number of imported blobs by source",
913
920
  labelNames: ["blobsSource"],
914
921
  }),
915
- columnsBySource: register.gauge<{source: BlockInputSource}>({
916
- name: "lodestar_import_columns_by_source_total",
917
- help: "Total number of imported columns (sampled columns) by source",
918
- labelNames: ["source"],
919
- }),
920
922
  notOverrideFcuReason: register.counter<{reason: NotReorgedReason}>({
921
923
  name: "lodestar_import_block_not_override_fcu_reason_total",
922
924
  help: "Reason why the fcu call is not suppressed during block import",
@@ -206,6 +206,7 @@ export class RangeSync extends (EventEmitter as {new (): RangeSyncEmitter}) {
206
206
  logger: this.logger,
207
207
  peerIdStr: peer.peerId,
208
208
  batchBlocks,
209
+ peerDasMetrics: this.chain.metrics?.peerDas,
209
210
  ...batch.getRequestsForPeer(peer),
210
211
  });
211
212
  const cached = cacheByRangeResponses({
@@ -12,6 +12,7 @@ import {
12
12
  import {SeenBlockInput} from "../../chain/seenCache/seenGossipBlockInput.js";
13
13
  import {validateBlockBlobSidecars} from "../../chain/validation/blobSidecar.js";
14
14
  import {validateBlockDataColumnSidecars} from "../../chain/validation/dataColumnSidecar.js";
15
+ import {BeaconMetrics} from "../../metrics/metrics/beacon.js";
15
16
  import {INetwork} from "../../network/index.js";
16
17
  import {getBlobKzgCommitments} from "../../util/dataColumns.js";
17
18
  import {PeerIdStr} from "../../util/peerId.js";
@@ -35,6 +36,7 @@ export type DownloadAndCacheByRangeProps = DownloadByRangeRequests & {
35
36
  logger: Logger;
36
37
  peerIdStr: string;
37
38
  batchBlocks?: IBlockInput[];
39
+ peerDasMetrics?: BeaconMetrics["peerDas"] | null;
38
40
  };
39
41
 
40
42
  export type CacheByRangeResponsesProps = {
@@ -196,6 +198,7 @@ export async function downloadByRange({
196
198
  blocksRequest,
197
199
  blobsRequest,
198
200
  columnsRequest,
201
+ peerDasMetrics,
199
202
  }: DownloadAndCacheByRangeProps): Promise<WarnResult<ValidatedResponses, DownloadByRangeError>> {
200
203
  let response: DownloadByRangeResponses;
201
204
  try {
@@ -220,6 +223,7 @@ export async function downloadByRange({
220
223
  blocksRequest,
221
224
  blobsRequest,
222
225
  columnsRequest,
226
+ peerDasMetrics,
223
227
  ...response,
224
228
  });
225
229
 
@@ -290,10 +294,12 @@ export async function validateResponses({
290
294
  blocks,
291
295
  blobSidecars,
292
296
  columnSidecars,
297
+ peerDasMetrics,
293
298
  }: DownloadByRangeRequests &
294
299
  DownloadByRangeResponses & {
295
300
  config: ChainForkConfig;
296
301
  batchBlocks?: IBlockInput[];
302
+ peerDasMetrics?: BeaconMetrics["peerDas"] | null;
297
303
  }): Promise<WarnResult<ValidatedResponses, DownloadByRangeError>> {
298
304
  // Blocks are always required for blob/column validation
299
305
  // If a blocksRequest is provided, blocks have just been downloaded
@@ -372,7 +378,8 @@ export async function validateResponses({
372
378
  config,
373
379
  columnsRequest,
374
380
  blocksForDataValidation,
375
- columnSidecars
381
+ columnSidecars,
382
+ peerDasMetrics
376
383
  );
377
384
  validatedResponses.validatedColumnSidecars = validatedColumnSidecarsResult.result;
378
385
  warnings = validatedColumnSidecarsResult.warnings;
@@ -608,7 +615,8 @@ export async function validateColumnsByRangeResponse(
608
615
  config: ChainForkConfig,
609
616
  request: fulu.DataColumnSidecarsByRangeRequest,
610
617
  blocks: ValidatedBlock[],
611
- columnSidecars: fulu.DataColumnSidecars
618
+ columnSidecars: fulu.DataColumnSidecars,
619
+ peerDasMetrics?: BeaconMetrics["peerDas"] | null
612
620
  ): Promise<WarnResult<ValidatedColumnSidecars[], DownloadByRangeError>> {
613
621
  const warnings: DownloadByRangeError[] = [];
614
622
 
@@ -764,7 +772,8 @@ export async function validateColumnsByRangeResponse(
764
772
  slot,
765
773
  blockRoot,
766
774
  blobCount,
767
- columnSidecars
775
+ columnSidecars,
776
+ peerDasMetrics
768
777
  ).then(() => ({
769
778
  blockRoot,
770
779
  columnSidecars,
@@ -440,7 +440,7 @@ export async function fetchAndValidateColumns({
440
440
  );
441
441
  }
442
442
 
443
- await validateBlockDataColumnSidecars(chain, slot, blockRoot, blobCount, columnSidecars);
443
+ await validateBlockDataColumnSidecars(chain, slot, blockRoot, blobCount, columnSidecars, chain?.metrics?.peerDas);
444
444
 
445
445
  return {result: columnSidecars, warnings: warnings.length > 0 ? warnings : null};
446
446
  }
@@ -425,7 +425,8 @@ export async function recoverDataColumnSidecars(
425
425
  partialSidecars.set(columnSidecar.index, columnSidecar);
426
426
  }
427
427
 
428
- const timer = metrics?.recoverDataColumnSidecars.recoverTime.startTimer();
428
+ const timer = metrics?.peerDas.dataColumnsReconstructionTime.startTimer();
429
+
429
430
  // if this function throws, we catch at the consumer side
430
431
  const fullSidecars = await dataColumnMatrixRecovery(partialSidecars).catch(() => null);
431
432
  timer?.();
@@ -435,6 +436,7 @@ export async function recoverDataColumnSidecars(
435
436
 
436
437
  if (blockInput.getAllColumns().length === NUMBER_OF_COLUMNS) {
437
438
  // either gossip or getBlobsV2 resolved availability while we were recovering
439
+ metrics?.dataColumns.alreadyAdded.inc(fullSidecars.length);
438
440
  return DataColumnReconstructionCode.SuccessLate;
439
441
  }
440
442
 
@@ -458,8 +460,10 @@ export async function recoverDataColumnSidecars(
458
460
  sidecarsToPublish.push(columnSidecar);
459
461
  }
460
462
  }
463
+ metrics?.peerDas.reconstructedColumns.inc(sidecarsToPublish.length);
464
+ metrics?.dataColumns.bySource.inc({source: BlockInputSource.recovery}, sidecarsToPublish.length);
461
465
  emitter.emit(ChainEvent.publishDataColumns, sidecarsToPublish);
462
-
466
+ // TODO: Can we record dataColumns.sentPeersPerSubnet metric somehow
463
467
  return DataColumnReconstructionCode.SuccessResolved;
464
468
  }
465
469
 
@@ -173,16 +173,21 @@ export async function getDataColumnSidecarsFromExecution(
173
173
  }
174
174
 
175
175
  let dataColumnSidecars: fulu.DataColumnSidecars;
176
- const cellsAndProofs = await getCellsAndProofs(blobs);
177
- if (blockInput.hasBlock()) {
178
- dataColumnSidecars = getDataColumnSidecarsFromBlock(
179
- config,
180
- blockInput.getBlock() as fulu.SignedBeaconBlock,
181
- cellsAndProofs
182
- );
183
- } else {
184
- const firstSidecar = blockInput.getAllColumns()[0];
185
- dataColumnSidecars = getDataColumnSidecarsFromColumnSidecar(firstSidecar, cellsAndProofs);
176
+ const compTimer = metrics?.peerDas.dataColumnSidecarComputationTime.startTimer();
177
+ try {
178
+ const cellsAndProofs = await getCellsAndProofs(blobs);
179
+ if (blockInput.hasBlock()) {
180
+ dataColumnSidecars = getDataColumnSidecarsFromBlock(
181
+ config,
182
+ blockInput.getBlock() as fulu.SignedBeaconBlock,
183
+ cellsAndProofs
184
+ );
185
+ } else {
186
+ const firstSidecar = blockInput.getAllColumns()[0];
187
+ dataColumnSidecars = getDataColumnSidecarsFromColumnSidecar(firstSidecar, cellsAndProofs);
188
+ }
189
+ } finally {
190
+ compTimer?.();
186
191
  }
187
192
 
188
193
  // Publish columns if and only if subscribed to them
@@ -191,13 +196,15 @@ export async function getDataColumnSidecarsFromExecution(
191
196
 
192
197
  // for columns that we already seen, it will be ignored through `ignoreDuplicatePublishError` gossip option
193
198
  emitter.emit(ChainEvent.publishDataColumns, sampledColumns);
199
+ // TODO: Can we record dataColumns.sentPeersPerSubnet metric here somehow
194
200
 
195
201
  // add all sampled columns to the block input, even if we didn't sample them
196
202
  const seenTimestampSec = Date.now() / 1000;
203
+ let alreadyAddedColumnsCount = 0;
197
204
  for (const columnSidecar of sampledColumns) {
198
205
  if (blockInput.hasColumn(columnSidecar.index)) {
199
206
  // columns may have been added while waiting
200
- // TODO(fulu): add metrics for this condition
207
+ alreadyAddedColumnsCount++;
201
208
  continue;
202
209
  }
203
210
 
@@ -217,7 +224,11 @@ export async function getDataColumnSidecarsFromExecution(
217
224
  });
218
225
  }
219
226
  }
227
+ metrics?.dataColumns.alreadyAdded.inc(alreadyAddedColumnsCount);
220
228
 
221
- metrics?.dataColumns.bySource.inc({source: BlockInputSource.engine}, previouslyMissingColumns.length);
229
+ metrics?.dataColumns.bySource.inc(
230
+ {source: BlockInputSource.engine},
231
+ previouslyMissingColumns.length - alreadyAddedColumnsCount
232
+ );
222
233
  return DataColumnEngineResult.SuccessResolved;
223
234
  }