@aztec/prover-node 0.0.1-commit.b655e406 → 0.0.1-commit.d1f2d6c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/dest/actions/download-epoch-proving-job.d.ts +4 -4
  2. package/dest/actions/index.d.ts +1 -1
  3. package/dest/actions/rerun-epoch-proving-job.d.ts +3 -2
  4. package/dest/actions/rerun-epoch-proving-job.d.ts.map +1 -1
  5. package/dest/actions/rerun-epoch-proving-job.js +3 -1
  6. package/dest/actions/upload-epoch-proof-failure.d.ts +1 -1
  7. package/dest/bin/run-failed-epoch.d.ts +1 -1
  8. package/dest/config.d.ts +5 -4
  9. package/dest/config.d.ts.map +1 -1
  10. package/dest/config.js +4 -3
  11. package/dest/factory.d.ts +2 -4
  12. package/dest/factory.d.ts.map +1 -1
  13. package/dest/factory.js +20 -15
  14. package/dest/index.d.ts +2 -1
  15. package/dest/index.d.ts.map +1 -1
  16. package/dest/index.js +1 -0
  17. package/dest/job/epoch-proving-job-data.d.ts +8 -6
  18. package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
  19. package/dest/job/epoch-proving-job-data.js +25 -18
  20. package/dest/job/epoch-proving-job.d.ts +5 -12
  21. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  22. package/dest/job/epoch-proving-job.js +481 -97
  23. package/dest/metrics.d.ts +4 -3
  24. package/dest/metrics.d.ts.map +1 -1
  25. package/dest/metrics.js +22 -98
  26. package/dest/monitors/epoch-monitor.d.ts +3 -2
  27. package/dest/monitors/epoch-monitor.d.ts.map +1 -1
  28. package/dest/monitors/epoch-monitor.js +3 -11
  29. package/dest/monitors/index.d.ts +1 -1
  30. package/dest/prover-node-publisher.d.ts +9 -7
  31. package/dest/prover-node-publisher.d.ts.map +1 -1
  32. package/dest/prover-node-publisher.js +44 -38
  33. package/dest/prover-node.d.ts +9 -8
  34. package/dest/prover-node.d.ts.map +1 -1
  35. package/dest/prover-node.js +430 -49
  36. package/dest/prover-publisher-factory.d.ts +4 -2
  37. package/dest/prover-publisher-factory.d.ts.map +1 -1
  38. package/dest/test/index.d.ts +1 -1
  39. package/dest/test/index.d.ts.map +1 -1
  40. package/package.json +26 -25
  41. package/src/actions/rerun-epoch-proving-job.ts +3 -2
  42. package/src/bin/run-failed-epoch.ts +1 -1
  43. package/src/config.ts +6 -4
  44. package/src/factory.ts +30 -17
  45. package/src/index.ts +1 -0
  46. package/src/job/epoch-proving-job-data.ts +31 -25
  47. package/src/job/epoch-proving-job.ts +107 -97
  48. package/src/metrics.ts +28 -83
  49. package/src/monitors/epoch-monitor.ts +5 -11
  50. package/src/prover-node-publisher.ts +64 -53
  51. package/src/prover-node.ts +47 -43
  52. package/src/prover-publisher-factory.ts +3 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/prover-node",
3
- "version": "0.0.1-commit.b655e406",
3
+ "version": "0.0.1-commit.d1f2d6c",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
@@ -11,8 +11,8 @@
11
11
  "../package.common.json"
12
12
  ],
13
13
  "scripts": {
14
- "build": "yarn clean && tsc -b",
15
- "build:dev": "tsc -b --watch",
14
+ "build": "yarn clean && ../scripts/tsc.sh",
15
+ "build:dev": "../scripts/tsc.sh --watch",
16
16
  "clean": "rm -rf ./dest .tsbuildinfo",
17
17
  "bb": "node --no-warnings ./dest/bb/index.js",
18
18
  "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --maxWorkers=${JEST_MAX_WORKERS:-8}",
@@ -56,36 +56,37 @@
56
56
  ]
57
57
  },
58
58
  "dependencies": {
59
- "@aztec/archiver": "0.0.1-commit.b655e406",
60
- "@aztec/bb-prover": "0.0.1-commit.b655e406",
61
- "@aztec/blob-lib": "0.0.1-commit.b655e406",
62
- "@aztec/blob-sink": "0.0.1-commit.b655e406",
63
- "@aztec/constants": "0.0.1-commit.b655e406",
64
- "@aztec/epoch-cache": "0.0.1-commit.b655e406",
65
- "@aztec/ethereum": "0.0.1-commit.b655e406",
66
- "@aztec/foundation": "0.0.1-commit.b655e406",
67
- "@aztec/kv-store": "0.0.1-commit.b655e406",
68
- "@aztec/l1-artifacts": "0.0.1-commit.b655e406",
69
- "@aztec/node-keystore": "0.0.1-commit.b655e406",
70
- "@aztec/node-lib": "0.0.1-commit.b655e406",
71
- "@aztec/noir-protocol-circuits-types": "0.0.1-commit.b655e406",
72
- "@aztec/p2p": "0.0.1-commit.b655e406",
73
- "@aztec/protocol-contracts": "0.0.1-commit.b655e406",
74
- "@aztec/prover-client": "0.0.1-commit.b655e406",
75
- "@aztec/sequencer-client": "0.0.1-commit.b655e406",
76
- "@aztec/simulator": "0.0.1-commit.b655e406",
77
- "@aztec/stdlib": "0.0.1-commit.b655e406",
78
- "@aztec/telemetry-client": "0.0.1-commit.b655e406",
79
- "@aztec/world-state": "0.0.1-commit.b655e406",
59
+ "@aztec/archiver": "0.0.1-commit.d1f2d6c",
60
+ "@aztec/bb-prover": "0.0.1-commit.d1f2d6c",
61
+ "@aztec/blob-client": "0.0.1-commit.d1f2d6c",
62
+ "@aztec/blob-lib": "0.0.1-commit.d1f2d6c",
63
+ "@aztec/constants": "0.0.1-commit.d1f2d6c",
64
+ "@aztec/epoch-cache": "0.0.1-commit.d1f2d6c",
65
+ "@aztec/ethereum": "0.0.1-commit.d1f2d6c",
66
+ "@aztec/foundation": "0.0.1-commit.d1f2d6c",
67
+ "@aztec/kv-store": "0.0.1-commit.d1f2d6c",
68
+ "@aztec/l1-artifacts": "0.0.1-commit.d1f2d6c",
69
+ "@aztec/node-keystore": "0.0.1-commit.d1f2d6c",
70
+ "@aztec/node-lib": "0.0.1-commit.d1f2d6c",
71
+ "@aztec/noir-protocol-circuits-types": "0.0.1-commit.d1f2d6c",
72
+ "@aztec/p2p": "0.0.1-commit.d1f2d6c",
73
+ "@aztec/protocol-contracts": "0.0.1-commit.d1f2d6c",
74
+ "@aztec/prover-client": "0.0.1-commit.d1f2d6c",
75
+ "@aztec/sequencer-client": "0.0.1-commit.d1f2d6c",
76
+ "@aztec/simulator": "0.0.1-commit.d1f2d6c",
77
+ "@aztec/stdlib": "0.0.1-commit.d1f2d6c",
78
+ "@aztec/telemetry-client": "0.0.1-commit.d1f2d6c",
79
+ "@aztec/world-state": "0.0.1-commit.d1f2d6c",
80
80
  "source-map-support": "^0.5.21",
81
81
  "tslib": "^2.4.0",
82
- "viem": "npm:@spalladino/viem@2.38.2-eip7594.0"
82
+ "viem": "npm:@aztec/viem@2.38.2"
83
83
  },
84
84
  "devDependencies": {
85
85
  "@jest/globals": "^30.0.0",
86
86
  "@types/jest": "^30.0.0",
87
87
  "@types/node": "^22.15.17",
88
88
  "@types/source-map-support": "^0.5.10",
89
+ "@typescript/native-preview": "7.0.0-dev.20260113.1",
89
90
  "jest": "^30.0.0",
90
91
  "jest-mock-extended": "^4.0.0",
91
92
  "ts-node": "^10.9.1",
@@ -1,4 +1,5 @@
1
1
  import { createArchiverStore } from '@aztec/archiver';
2
+ import type { L1ContractsConfig } from '@aztec/ethereum/config';
2
3
  import type { Logger } from '@aztec/foundation/log';
3
4
  import type { DataStoreConfig } from '@aztec/kv-store/config';
4
5
  import { type ProverClientConfig, createProverClient } from '@aztec/prover-client';
@@ -21,7 +22,7 @@ import { ProverNodeJobMetrics } from '../metrics.js';
21
22
  export async function rerunEpochProvingJob(
22
23
  localPath: string,
23
24
  log: Logger,
24
- config: DataStoreConfig & ProverBrokerConfig & ProverClientConfig,
25
+ config: DataStoreConfig & ProverBrokerConfig & ProverClientConfig & Pick<L1ContractsConfig, 'aztecEpochDuration'>,
25
26
  ) {
26
27
  const jobData = deserializeEpochProvingJobData(readFileSync(localPath));
27
28
  log.info(`Loaded proving job data for epoch ${jobData.epochNumber}`);
@@ -29,7 +30,7 @@ export async function rerunEpochProvingJob(
29
30
  const telemetry = getTelemetryClient();
30
31
  const metrics = new ProverNodeJobMetrics(telemetry.getMeter('prover-job'), telemetry.getTracer('prover-job'));
31
32
  const worldState = await createWorldState(config);
32
- const archiver = await createArchiverStore(config);
33
+ const archiver = await createArchiverStore(config, { epochDuration: config.aztecEpochDuration });
33
34
  const publicProcessorFactory = new PublicProcessorFactory(archiver);
34
35
 
35
36
  const publisher = { submitEpochProof: () => Promise.resolve(true) };
@@ -1,5 +1,5 @@
1
1
  /* eslint-disable no-console */
2
- import type { L1ContractAddresses } from '@aztec/ethereum';
2
+ import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
3
3
  import { EthAddress } from '@aztec/foundation/eth-address';
4
4
  import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc';
5
5
  import { createLogger } from '@aztec/foundation/log';
package/src/config.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver/config';
2
2
  import type { ACVMConfig, BBConfig } from '@aztec/bb-prover/config';
3
- import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethereum';
3
+ import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethereum/config';
4
4
  import {
5
5
  type ConfigMappingsType,
6
6
  booleanConfigHelper,
@@ -8,7 +8,9 @@ import {
8
8
  numberConfigHelper,
9
9
  } from '@aztec/foundation/config';
10
10
  import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
11
- import { type KeyStore, type KeyStoreConfig, ethPrivateKeySchema, keyStoreConfigMappings } from '@aztec/node-keystore';
11
+ import { type KeyStoreConfig, keyStoreConfigMappings } from '@aztec/node-keystore/config';
12
+ import { ethPrivateKeySchema } from '@aztec/node-keystore/schemas';
13
+ import type { KeyStore } from '@aztec/node-keystore/types';
12
14
  import { type SharedNodeConfig, sharedNodeConfigMappings } from '@aztec/node-lib/config';
13
15
  import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config';
14
16
  import {
@@ -16,7 +18,7 @@ import {
16
18
  type ProverBrokerConfig,
17
19
  proverAgentConfigMappings,
18
20
  proverBrokerConfigMappings,
19
- } from '@aztec/prover-client/broker';
21
+ } from '@aztec/prover-client/broker/config';
20
22
  import { type ProverClientUserConfig, bbConfigMappings, proverClientConfigMappings } from '@aztec/prover-client/config';
21
23
  import {
22
24
  type PublisherConfig,
@@ -163,7 +165,7 @@ function createKeyStoreFromWeb3Signer(config: ProverNodeConfig): KeyStore | unde
163
165
  function createKeyStoreFromPublisherKeys(config: ProverNodeConfig): KeyStore | undefined {
164
166
  // Extract the publisher keys from the provided config.
165
167
  const publisherKeys = config.publisherPrivateKeys
166
- ? config.publisherPrivateKeys.map(k => ethPrivateKeySchema.parse(k.getValue()))
168
+ ? config.publisherPrivateKeys.map((k: { getValue: () => string }) => ethPrivateKeySchema.parse(k.getValue()))
167
169
  : [];
168
170
 
169
171
  // There must be at least 1.
package/src/factory.ts CHANGED
@@ -1,15 +1,21 @@
1
1
  import { type Archiver, createArchiver } from '@aztec/archiver';
2
2
  import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover';
3
- import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
3
+ import { createBlobClientWithFileStores } from '@aztec/blob-client/client';
4
4
  import { EpochCache } from '@aztec/epoch-cache';
5
- import { L1TxUtils, PublisherManager, RollupContract, createEthereumChain } from '@aztec/ethereum';
5
+ import { createEthereumChain } from '@aztec/ethereum/chain';
6
+ import { RollupContract } from '@aztec/ethereum/contracts';
7
+ import { L1TxUtils } from '@aztec/ethereum/l1-tx-utils';
8
+ import { PublisherManager } from '@aztec/ethereum/publisher-manager';
6
9
  import { pick } from '@aztec/foundation/collection';
7
10
  import { type Logger, createLogger } from '@aztec/foundation/log';
8
11
  import { DateProvider } from '@aztec/foundation/timer';
9
12
  import type { DataStoreConfig } from '@aztec/kv-store/config';
10
13
  import { type KeyStoreConfig, KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore';
11
14
  import { trySnapshotSync } from '@aztec/node-lib/actions';
12
- import { createL1TxUtilsFromEthSignerWithStore } from '@aztec/node-lib/factories';
15
+ import {
16
+ createForwarderL1TxUtilsFromEthSigner,
17
+ createL1TxUtilsFromEthSignerWithStore,
18
+ } from '@aztec/node-lib/factories';
13
19
  import { NodeRpcTxSource, createP2PClient } from '@aztec/p2p';
14
20
  import { type ProverClientConfig, createProverClient } from '@aztec/prover-client';
15
21
  import { createAndStartProvingBroker } from '@aztec/prover-client/broker';
@@ -33,7 +39,6 @@ export type ProverNodeDeps = {
33
39
  aztecNodeTxProvider?: Pick<AztecNode, 'getTxsByHash'>;
34
40
  archiver?: Archiver;
35
41
  publisherFactory?: ProverPublisherFactory;
36
- blobSinkClient?: BlobSinkClientInterface;
37
42
  broker?: ProvingJobBroker;
38
43
  l1TxUtils?: L1TxUtils;
39
44
  dateProvider?: DateProvider;
@@ -50,8 +55,7 @@ export async function createProverNode(
50
55
  const config = { ...userConfig };
51
56
  const telemetry = deps.telemetry ?? getTelemetryClient();
52
57
  const dateProvider = deps.dateProvider ?? new DateProvider();
53
- const blobSinkClient =
54
- deps.blobSinkClient ?? createBlobSinkClient(config, { logger: createLogger('prover-node:blob-sink:client') });
58
+ const blobClient = await createBlobClientWithFileStores(config, createLogger('prover-node:blob-client:client'));
55
59
  const log = deps.log ?? createLogger('prover-node');
56
60
 
57
61
  // Build a key store from file if given or from environment otherwise
@@ -102,12 +106,11 @@ export async function createProverNode(
102
106
 
103
107
  const archiver =
104
108
  deps.archiver ??
105
- (await createArchiver(config, { blobSinkClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
109
+ (await createArchiver(config, { blobClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
106
110
  log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`);
107
111
 
108
- const worldStateConfig = { ...config, worldStateProvenBlocksOnly: false };
109
112
  const worldStateSynchronizer = await createWorldStateSynchronizer(
110
- worldStateConfig,
113
+ config,
111
114
  archiver,
112
115
  options.prefilledPublicData,
113
116
  telemetry,
@@ -123,7 +126,7 @@ export async function createProverNode(
123
126
 
124
127
  const publicClient = createPublicClient({
125
128
  chain: chain.chainInfo,
126
- transport: fallback(config.l1RpcUrls.map((url: string) => http(url))),
129
+ transport: fallback(config.l1RpcUrls.map((url: string) => http(url, { batch: false }))),
127
130
  pollingInterval: config.viemPollingIntervalMS,
128
131
  });
129
132
 
@@ -131,12 +134,20 @@ export async function createProverNode(
131
134
 
132
135
  const l1TxUtils = deps.l1TxUtils
133
136
  ? [deps.l1TxUtils]
134
- : await createL1TxUtilsFromEthSignerWithStore(
135
- publicClient,
136
- proverSigners.signers,
137
- { ...config, scope: 'prover' },
138
- { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
139
- );
137
+ : config.publisherForwarderAddress
138
+ ? await createForwarderL1TxUtilsFromEthSigner(
139
+ publicClient,
140
+ proverSigners.signers,
141
+ config.publisherForwarderAddress,
142
+ { ...config, scope: 'prover' },
143
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
144
+ )
145
+ : await createL1TxUtilsFromEthSignerWithStore(
146
+ publicClient,
147
+ proverSigners.signers,
148
+ { ...config, scope: 'prover' },
149
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
150
+ );
140
151
 
141
152
  const publisherFactory =
142
153
  deps.publisherFactory ??
@@ -148,7 +159,9 @@ export async function createProverNode(
148
159
 
149
160
  const proofVerifier = new QueuedIVCVerifier(
150
161
  config,
151
- config.realProofs ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(),
162
+ config.realProofs || config.debugForceTxProofVerification
163
+ ? await BBCircuitVerifier.new(config)
164
+ : new TestCircuitVerifier(config.proverTestVerificationDelayMs),
152
165
  );
153
166
 
154
167
  const p2pClient = await createP2PClient(
package/src/index.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  export * from './actions/index.js';
2
2
  export * from './config.js';
3
3
  export * from './factory.js';
4
+ export * from './monitors/index.js';
4
5
  export * from './prover-node-publisher.js';
5
6
  export * from './prover-node.js';
@@ -1,49 +1,55 @@
1
- import { Fr } from '@aztec/foundation/fields';
1
+ import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
- import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block';
4
+ import { CommitteeAttestation } from '@aztec/stdlib/block';
5
+ import { Checkpoint } from '@aztec/stdlib/checkpoint';
4
6
  import { BlockHeader, Tx } from '@aztec/stdlib/tx';
5
7
 
6
8
  /** All data from an epoch used in proving. */
7
9
  export type EpochProvingJobData = {
8
- epochNumber: bigint;
9
- blocks: L2Block[];
10
+ epochNumber: EpochNumber;
11
+ checkpoints: Checkpoint[];
10
12
  txs: Map<string, Tx>;
11
- l1ToL2Messages: Record<number, Fr[]>;
13
+ l1ToL2Messages: Record<CheckpointNumber, Fr[]>;
12
14
  previousBlockHeader: BlockHeader;
13
15
  attestations: CommitteeAttestation[];
14
16
  };
15
17
 
16
18
  export function validateEpochProvingJobData(data: EpochProvingJobData) {
17
- if (data.blocks.length > 0 && data.previousBlockHeader.getBlockNumber() + 1 !== data.blocks[0].number) {
19
+ if (data.checkpoints.length === 0) {
20
+ throw new Error('No checkpoints to prove');
21
+ }
22
+
23
+ const firstBlockNumber = data.checkpoints[0].blocks[0].number;
24
+ const previousBlockNumber = data.previousBlockHeader.getBlockNumber();
25
+ if (previousBlockNumber + 1 !== firstBlockNumber) {
18
26
  throw new Error(
19
- `Initial block number ${
20
- data.blocks[0].number
21
- } does not match previous block header ${data.previousBlockHeader.getBlockNumber()}`,
27
+ `Initial block number ${firstBlockNumber} does not match previous block header ${previousBlockNumber}`,
22
28
  );
23
29
  }
24
30
 
25
- for (const blockNumber of data.blocks.map(block => block.number)) {
26
- if (!(blockNumber in data.l1ToL2Messages)) {
27
- throw new Error(`Missing L1 to L2 messages for block number ${blockNumber}`);
31
+ for (const checkpoint of data.checkpoints) {
32
+ if (!(checkpoint.number in data.l1ToL2Messages)) {
33
+ throw new Error(`Missing L1 to L2 messages for checkpoint number ${checkpoint.number}`);
28
34
  }
29
35
  }
30
36
  }
31
37
 
32
38
  export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer {
33
- const blocks = data.blocks.map(block => block.toBuffer());
39
+ const checkpoints = data.checkpoints.map(checkpoint => checkpoint.toBuffer());
34
40
  const txs = Array.from(data.txs.values()).map(tx => tx.toBuffer());
35
- const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([blockNumber, messages]) => [
36
- Number(blockNumber),
41
+ const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([checkpointNumber, messages]) => [
42
+ Number(checkpointNumber),
37
43
  messages.length,
38
44
  ...messages,
39
45
  ]);
40
46
  const attestations = data.attestations.map(attestation => attestation.toBuffer());
41
47
 
42
48
  return serializeToBuffer(
43
- Number(data.epochNumber),
49
+ data.epochNumber,
44
50
  data.previousBlockHeader,
45
- blocks.length,
46
- ...blocks,
51
+ checkpoints.length,
52
+ ...checkpoints,
47
53
  txs.length,
48
54
  ...txs,
49
55
  l1ToL2Messages.length,
@@ -55,22 +61,22 @@ export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer
55
61
 
56
62
  export function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData {
57
63
  const reader = BufferReader.asReader(buf);
58
- const epochNumber = BigInt(reader.readNumber());
64
+ const epochNumber = EpochNumber(reader.readNumber());
59
65
  const previousBlockHeader = reader.readObject(BlockHeader);
60
- const blocks = reader.readVector(L2Block);
66
+ const checkpoints = reader.readVector(Checkpoint);
61
67
  const txArray = reader.readVector(Tx);
62
68
 
63
- const l1ToL2MessageBlockCount = reader.readNumber();
69
+ const l1ToL2MessageCheckpointCount = reader.readNumber();
64
70
  const l1ToL2Messages: Record<number, Fr[]> = {};
65
- for (let i = 0; i < l1ToL2MessageBlockCount; i++) {
66
- const blockNumber = reader.readNumber();
71
+ for (let i = 0; i < l1ToL2MessageCheckpointCount; i++) {
72
+ const checkpointNumber = CheckpointNumber(reader.readNumber());
67
73
  const messages = reader.readVector(Fr);
68
- l1ToL2Messages[blockNumber] = messages;
74
+ l1ToL2Messages[checkpointNumber] = messages;
69
75
  }
70
76
 
71
77
  const attestations = reader.readVector(CommitteeAttestation);
72
78
 
73
79
  const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
74
80
 
75
- return { epochNumber, previousBlockHeader, blocks, txs, l1ToL2Messages, attestations };
81
+ return { epochNumber, previousBlockHeader, checkpoints, txs, l1ToL2Messages, attestations };
76
82
  }