@aztec/prover-node 0.0.1-commit.fce3e4f → 0.0.1-commit.fffb133c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dest/actions/download-epoch-proving-job.d.ts +1 -1
  2. package/dest/actions/rerun-epoch-proving-job.d.ts +3 -2
  3. package/dest/actions/rerun-epoch-proving-job.d.ts.map +1 -1
  4. package/dest/actions/rerun-epoch-proving-job.js +3 -1
  5. package/dest/config.d.ts +5 -4
  6. package/dest/config.d.ts.map +1 -1
  7. package/dest/config.js +4 -3
  8. package/dest/factory.d.ts +2 -4
  9. package/dest/factory.d.ts.map +1 -1
  10. package/dest/factory.js +20 -15
  11. package/dest/index.d.ts +2 -1
  12. package/dest/index.d.ts.map +1 -1
  13. package/dest/index.js +1 -0
  14. package/dest/job/epoch-proving-job-data.d.ts +7 -6
  15. package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
  16. package/dest/job/epoch-proving-job-data.js +24 -18
  17. package/dest/job/epoch-proving-job.d.ts +3 -3
  18. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  19. package/dest/job/epoch-proving-job.js +478 -99
  20. package/dest/metrics.d.ts +4 -3
  21. package/dest/metrics.d.ts.map +1 -1
  22. package/dest/metrics.js +21 -97
  23. package/dest/monitors/epoch-monitor.d.ts +1 -1
  24. package/dest/monitors/epoch-monitor.d.ts.map +1 -1
  25. package/dest/monitors/epoch-monitor.js +3 -11
  26. package/dest/prover-node-publisher.d.ts +7 -6
  27. package/dest/prover-node-publisher.d.ts.map +1 -1
  28. package/dest/prover-node-publisher.js +41 -36
  29. package/dest/prover-node.d.ts +4 -4
  30. package/dest/prover-node.d.ts.map +1 -1
  31. package/dest/prover-node.js +427 -45
  32. package/dest/prover-publisher-factory.d.ts +4 -2
  33. package/dest/prover-publisher-factory.d.ts.map +1 -1
  34. package/package.json +25 -25
  35. package/src/actions/rerun-epoch-proving-job.ts +3 -2
  36. package/src/bin/run-failed-epoch.ts +1 -1
  37. package/src/config.ts +6 -4
  38. package/src/factory.ts +28 -17
  39. package/src/index.ts +1 -0
  40. package/src/job/epoch-proving-job-data.ts +28 -23
  41. package/src/job/epoch-proving-job.ts +102 -97
  42. package/src/metrics.ts +27 -82
  43. package/src/monitors/epoch-monitor.ts +3 -10
  44. package/src/prover-node-publisher.ts +60 -50
  45. package/src/prover-node.ts +36 -32
  46. package/src/prover-publisher-factory.ts +3 -1
@@ -1,4 +1,5 @@
1
1
  import { createArchiverStore } from '@aztec/archiver';
2
+ import type { L1ContractsConfig } from '@aztec/ethereum/config';
2
3
  import type { Logger } from '@aztec/foundation/log';
3
4
  import type { DataStoreConfig } from '@aztec/kv-store/config';
4
5
  import { type ProverClientConfig, createProverClient } from '@aztec/prover-client';
@@ -21,7 +22,7 @@ import { ProverNodeJobMetrics } from '../metrics.js';
21
22
  export async function rerunEpochProvingJob(
22
23
  localPath: string,
23
24
  log: Logger,
24
- config: DataStoreConfig & ProverBrokerConfig & ProverClientConfig,
25
+ config: DataStoreConfig & ProverBrokerConfig & ProverClientConfig & Pick<L1ContractsConfig, 'aztecEpochDuration'>,
25
26
  ) {
26
27
  const jobData = deserializeEpochProvingJobData(readFileSync(localPath));
27
28
  log.info(`Loaded proving job data for epoch ${jobData.epochNumber}`);
@@ -29,7 +30,7 @@ export async function rerunEpochProvingJob(
29
30
  const telemetry = getTelemetryClient();
30
31
  const metrics = new ProverNodeJobMetrics(telemetry.getMeter('prover-job'), telemetry.getTracer('prover-job'));
31
32
  const worldState = await createWorldState(config);
32
- const archiver = await createArchiverStore(config);
33
+ const archiver = await createArchiverStore(config, { epochDuration: config.aztecEpochDuration });
33
34
  const publicProcessorFactory = new PublicProcessorFactory(archiver);
34
35
 
35
36
  const publisher = { submitEpochProof: () => Promise.resolve(true) };
@@ -1,5 +1,5 @@
1
1
  /* eslint-disable no-console */
2
- import type { L1ContractAddresses } from '@aztec/ethereum';
2
+ import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
3
3
  import { EthAddress } from '@aztec/foundation/eth-address';
4
4
  import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc';
5
5
  import { createLogger } from '@aztec/foundation/log';
package/src/config.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver/config';
2
2
  import type { ACVMConfig, BBConfig } from '@aztec/bb-prover/config';
3
- import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethereum';
3
+ import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethereum/config';
4
4
  import {
5
5
  type ConfigMappingsType,
6
6
  booleanConfigHelper,
@@ -8,7 +8,9 @@ import {
8
8
  numberConfigHelper,
9
9
  } from '@aztec/foundation/config';
10
10
  import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
11
- import { type KeyStore, type KeyStoreConfig, ethPrivateKeySchema, keyStoreConfigMappings } from '@aztec/node-keystore';
11
+ import { type KeyStoreConfig, keyStoreConfigMappings } from '@aztec/node-keystore/config';
12
+ import { ethPrivateKeySchema } from '@aztec/node-keystore/schemas';
13
+ import type { KeyStore } from '@aztec/node-keystore/types';
12
14
  import { type SharedNodeConfig, sharedNodeConfigMappings } from '@aztec/node-lib/config';
13
15
  import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config';
14
16
  import {
@@ -16,7 +18,7 @@ import {
16
18
  type ProverBrokerConfig,
17
19
  proverAgentConfigMappings,
18
20
  proverBrokerConfigMappings,
19
- } from '@aztec/prover-client/broker';
21
+ } from '@aztec/prover-client/broker/config';
20
22
  import { type ProverClientUserConfig, bbConfigMappings, proverClientConfigMappings } from '@aztec/prover-client/config';
21
23
  import {
22
24
  type PublisherConfig,
@@ -163,7 +165,7 @@ function createKeyStoreFromWeb3Signer(config: ProverNodeConfig): KeyStore | unde
163
165
  function createKeyStoreFromPublisherKeys(config: ProverNodeConfig): KeyStore | undefined {
164
166
  // Extract the publisher keys from the provided config.
165
167
  const publisherKeys = config.publisherPrivateKeys
166
- ? config.publisherPrivateKeys.map(k => ethPrivateKeySchema.parse(k.getValue()))
168
+ ? config.publisherPrivateKeys.map((k: { getValue: () => string }) => ethPrivateKeySchema.parse(k.getValue()))
167
169
  : [];
168
170
 
169
171
  // There must be at least 1.
package/src/factory.ts CHANGED
@@ -1,15 +1,21 @@
1
1
  import { type Archiver, createArchiver } from '@aztec/archiver';
2
2
  import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover';
3
- import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
3
+ import { createBlobClientWithFileStores } from '@aztec/blob-client/client';
4
4
  import { EpochCache } from '@aztec/epoch-cache';
5
- import { L1TxUtils, PublisherManager, RollupContract, createEthereumChain } from '@aztec/ethereum';
5
+ import { createEthereumChain } from '@aztec/ethereum/chain';
6
+ import { RollupContract } from '@aztec/ethereum/contracts';
7
+ import { L1TxUtils } from '@aztec/ethereum/l1-tx-utils';
8
+ import { PublisherManager } from '@aztec/ethereum/publisher-manager';
6
9
  import { pick } from '@aztec/foundation/collection';
7
10
  import { type Logger, createLogger } from '@aztec/foundation/log';
8
11
  import { DateProvider } from '@aztec/foundation/timer';
9
12
  import type { DataStoreConfig } from '@aztec/kv-store/config';
10
13
  import { type KeyStoreConfig, KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore';
11
14
  import { trySnapshotSync } from '@aztec/node-lib/actions';
12
- import { createL1TxUtilsFromEthSignerWithStore } from '@aztec/node-lib/factories';
15
+ import {
16
+ createForwarderL1TxUtilsFromEthSigner,
17
+ createL1TxUtilsFromEthSignerWithStore,
18
+ } from '@aztec/node-lib/factories';
13
19
  import { NodeRpcTxSource, createP2PClient } from '@aztec/p2p';
14
20
  import { type ProverClientConfig, createProverClient } from '@aztec/prover-client';
15
21
  import { createAndStartProvingBroker } from '@aztec/prover-client/broker';
@@ -33,7 +39,6 @@ export type ProverNodeDeps = {
33
39
  aztecNodeTxProvider?: Pick<AztecNode, 'getTxsByHash'>;
34
40
  archiver?: Archiver;
35
41
  publisherFactory?: ProverPublisherFactory;
36
- blobSinkClient?: BlobSinkClientInterface;
37
42
  broker?: ProvingJobBroker;
38
43
  l1TxUtils?: L1TxUtils;
39
44
  dateProvider?: DateProvider;
@@ -50,8 +55,7 @@ export async function createProverNode(
50
55
  const config = { ...userConfig };
51
56
  const telemetry = deps.telemetry ?? getTelemetryClient();
52
57
  const dateProvider = deps.dateProvider ?? new DateProvider();
53
- const blobSinkClient =
54
- deps.blobSinkClient ?? createBlobSinkClient(config, { logger: createLogger('prover-node:blob-sink:client') });
58
+ const blobClient = await createBlobClientWithFileStores(config, createLogger('prover-node:blob-client:client'));
55
59
  const log = deps.log ?? createLogger('prover-node');
56
60
 
57
61
  // Build a key store from file if given or from environment otherwise
@@ -102,12 +106,11 @@ export async function createProverNode(
102
106
 
103
107
  const archiver =
104
108
  deps.archiver ??
105
- (await createArchiver(config, { blobSinkClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
109
+ (await createArchiver(config, { blobClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
106
110
  log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`);
107
111
 
108
- const worldStateConfig = { ...config, worldStateProvenBlocksOnly: false };
109
112
  const worldStateSynchronizer = await createWorldStateSynchronizer(
110
- worldStateConfig,
113
+ config,
111
114
  archiver,
112
115
  options.prefilledPublicData,
113
116
  telemetry,
@@ -123,7 +126,7 @@ export async function createProverNode(
123
126
 
124
127
  const publicClient = createPublicClient({
125
128
  chain: chain.chainInfo,
126
- transport: fallback(config.l1RpcUrls.map((url: string) => http(url))),
129
+ transport: fallback(config.l1RpcUrls.map((url: string) => http(url, { batch: false }))),
127
130
  pollingInterval: config.viemPollingIntervalMS,
128
131
  });
129
132
 
@@ -131,12 +134,20 @@ export async function createProverNode(
131
134
 
132
135
  const l1TxUtils = deps.l1TxUtils
133
136
  ? [deps.l1TxUtils]
134
- : await createL1TxUtilsFromEthSignerWithStore(
135
- publicClient,
136
- proverSigners.signers,
137
- { ...config, scope: 'prover' },
138
- { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
139
- );
137
+ : config.publisherForwarderAddress
138
+ ? await createForwarderL1TxUtilsFromEthSigner(
139
+ publicClient,
140
+ proverSigners.signers,
141
+ config.publisherForwarderAddress,
142
+ { ...config, scope: 'prover' },
143
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
144
+ )
145
+ : await createL1TxUtilsFromEthSignerWithStore(
146
+ publicClient,
147
+ proverSigners.signers,
148
+ { ...config, scope: 'prover' },
149
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
150
+ );
140
151
 
141
152
  const publisherFactory =
142
153
  deps.publisherFactory ??
@@ -148,7 +159,7 @@ export async function createProverNode(
148
159
 
149
160
  const proofVerifier = new QueuedIVCVerifier(
150
161
  config,
151
- config.realProofs
162
+ config.realProofs || config.debugForceTxProofVerification
152
163
  ? await BBCircuitVerifier.new(config)
153
164
  : new TestCircuitVerifier(config.proverTestVerificationDelayMs),
154
165
  );
package/src/index.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  export * from './actions/index.js';
2
2
  export * from './config.js';
3
3
  export * from './factory.js';
4
+ export * from './monitors/index.js';
4
5
  export * from './prover-node-publisher.js';
5
6
  export * from './prover-node.js';
@@ -1,40 +1,45 @@
1
- import { EpochNumber } from '@aztec/foundation/branded-types';
2
- import { Fr } from '@aztec/foundation/fields';
1
+ import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
- import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block';
4
+ import { CommitteeAttestation } from '@aztec/stdlib/block';
5
+ import { Checkpoint } from '@aztec/stdlib/checkpoint';
5
6
  import { BlockHeader, Tx } from '@aztec/stdlib/tx';
6
7
 
7
8
  /** All data from an epoch used in proving. */
8
9
  export type EpochProvingJobData = {
9
10
  epochNumber: EpochNumber;
10
- blocks: L2Block[];
11
+ checkpoints: Checkpoint[];
11
12
  txs: Map<string, Tx>;
12
- l1ToL2Messages: Record<number, Fr[]>;
13
+ l1ToL2Messages: Record<CheckpointNumber, Fr[]>;
13
14
  previousBlockHeader: BlockHeader;
14
15
  attestations: CommitteeAttestation[];
15
16
  };
16
17
 
17
18
  export function validateEpochProvingJobData(data: EpochProvingJobData) {
18
- if (data.blocks.length > 0 && data.previousBlockHeader.getBlockNumber() + 1 !== data.blocks[0].number) {
19
+ if (data.checkpoints.length === 0) {
20
+ throw new Error('No checkpoints to prove');
21
+ }
22
+
23
+ const firstBlockNumber = data.checkpoints[0].blocks[0].number;
24
+ const previousBlockNumber = data.previousBlockHeader.getBlockNumber();
25
+ if (previousBlockNumber + 1 !== firstBlockNumber) {
19
26
  throw new Error(
20
- `Initial block number ${
21
- data.blocks[0].number
22
- } does not match previous block header ${data.previousBlockHeader.getBlockNumber()}`,
27
+ `Initial block number ${firstBlockNumber} does not match previous block header ${previousBlockNumber}`,
23
28
  );
24
29
  }
25
30
 
26
- for (const blockNumber of data.blocks.map(block => block.number)) {
27
- if (!(blockNumber in data.l1ToL2Messages)) {
28
- throw new Error(`Missing L1 to L2 messages for block number ${blockNumber}`);
31
+ for (const checkpoint of data.checkpoints) {
32
+ if (!(checkpoint.number in data.l1ToL2Messages)) {
33
+ throw new Error(`Missing L1 to L2 messages for checkpoint number ${checkpoint.number}`);
29
34
  }
30
35
  }
31
36
  }
32
37
 
33
38
  export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer {
34
- const blocks = data.blocks.map(block => block.toBuffer());
39
+ const checkpoints = data.checkpoints.map(checkpoint => checkpoint.toBuffer());
35
40
  const txs = Array.from(data.txs.values()).map(tx => tx.toBuffer());
36
- const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([blockNumber, messages]) => [
37
- Number(blockNumber),
41
+ const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([checkpointNumber, messages]) => [
42
+ Number(checkpointNumber),
38
43
  messages.length,
39
44
  ...messages,
40
45
  ]);
@@ -43,8 +48,8 @@ export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer
43
48
  return serializeToBuffer(
44
49
  data.epochNumber,
45
50
  data.previousBlockHeader,
46
- blocks.length,
47
- ...blocks,
51
+ checkpoints.length,
52
+ ...checkpoints,
48
53
  txs.length,
49
54
  ...txs,
50
55
  l1ToL2Messages.length,
@@ -58,20 +63,20 @@ export function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData
58
63
  const reader = BufferReader.asReader(buf);
59
64
  const epochNumber = EpochNumber(reader.readNumber());
60
65
  const previousBlockHeader = reader.readObject(BlockHeader);
61
- const blocks = reader.readVector(L2Block);
66
+ const checkpoints = reader.readVector(Checkpoint);
62
67
  const txArray = reader.readVector(Tx);
63
68
 
64
- const l1ToL2MessageBlockCount = reader.readNumber();
69
+ const l1ToL2MessageCheckpointCount = reader.readNumber();
65
70
  const l1ToL2Messages: Record<number, Fr[]> = {};
66
- for (let i = 0; i < l1ToL2MessageBlockCount; i++) {
67
- const blockNumber = reader.readNumber();
71
+ for (let i = 0; i < l1ToL2MessageCheckpointCount; i++) {
72
+ const checkpointNumber = CheckpointNumber(reader.readNumber());
68
73
  const messages = reader.readVector(Fr);
69
- l1ToL2Messages[blockNumber] = messages;
74
+ l1ToL2Messages[checkpointNumber] = messages;
70
75
  }
71
76
 
72
77
  const attestations = reader.readVector(CommitteeAttestation);
73
78
 
74
79
  const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
75
80
 
76
- return { epochNumber, previousBlockHeader, blocks, txs, l1ToL2Messages, attestations };
81
+ return { epochNumber, previousBlockHeader, checkpoints, txs, l1ToL2Messages, attestations };
77
82
  }
@@ -1,8 +1,8 @@
1
1
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
2
2
  import { asyncPool } from '@aztec/foundation/async-pool';
3
- import { EpochNumber } from '@aztec/foundation/branded-types';
3
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
- import { Fr } from '@aztec/foundation/fields';
5
+ import { Fr } from '@aztec/foundation/curves/bn254';
6
6
  import { createLogger } from '@aztec/foundation/log';
7
7
  import { RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
8
8
  import { Timer } from '@aztec/foundation/timer';
@@ -12,6 +12,7 @@ import { buildFinalBlobChallenges } from '@aztec/prover-client/helpers';
12
12
  import type { PublicProcessor, PublicProcessorFactory } from '@aztec/simulator/server';
13
13
  import { PublicSimulatorConfig } from '@aztec/stdlib/avm';
14
14
  import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
15
+ import type { Checkpoint } from '@aztec/stdlib/checkpoint';
15
16
  import {
16
17
  type EpochProver,
17
18
  type EpochProvingJobState,
@@ -91,8 +92,8 @@ export class EpochProvingJob implements Traceable {
91
92
  return this.data.epochNumber;
92
93
  }
93
94
 
94
- private get blocks() {
95
- return this.data.blocks;
95
+ private get checkpoints() {
96
+ return this.data.checkpoints;
96
97
  }
97
98
 
98
99
  private get txs() {
@@ -117,13 +118,21 @@ export class EpochProvingJob implements Traceable {
117
118
 
118
119
  const attestations = this.attestations.map(attestation => attestation.toViem());
119
120
  const epochNumber = this.epochNumber;
120
- const epochSizeBlocks = this.blocks.length;
121
- const epochSizeTxs = this.blocks.reduce((total, current) => total + current.body.txEffects.length, 0);
122
- const [fromBlock, toBlock] = [this.blocks[0].number, this.blocks.at(-1)!.number];
123
- this.log.info(`Starting epoch ${epochNumber} proving job with blocks ${fromBlock} to ${toBlock}`, {
121
+ const epochSizeCheckpoints = this.checkpoints.length;
122
+ const epochSizeBlocks = this.checkpoints.reduce((accum, checkpoint) => accum + checkpoint.blocks.length, 0);
123
+ const epochSizeTxs = this.checkpoints.reduce(
124
+ (accum, checkpoint) =>
125
+ accum + checkpoint.blocks.reduce((accumC, block) => accumC + block.body.txEffects.length, 0),
126
+ 0,
127
+ );
128
+ const fromCheckpoint = this.checkpoints[0].number;
129
+ const toCheckpoint = this.checkpoints.at(-1)!.number;
130
+ const fromBlock = this.checkpoints[0].blocks[0].number;
131
+ const toBlock = this.checkpoints.at(-1)!.blocks.at(-1)!.number;
132
+ this.log.info(`Starting epoch ${epochNumber} proving job with checkpoints ${fromCheckpoint} to ${toCheckpoint}`, {
124
133
  fromBlock,
125
134
  toBlock,
126
- epochSizeBlocks,
135
+ epochSizeTxs,
127
136
  epochNumber,
128
137
  uuid: this.uuid,
129
138
  });
@@ -134,86 +143,93 @@ export class EpochProvingJob implements Traceable {
134
143
  this.runPromise = promise;
135
144
 
136
145
  try {
137
- const blobFieldsPerCheckpoint = this.blocks.map(block => block.getCheckpointBlobFields());
146
+ const blobFieldsPerCheckpoint = this.checkpoints.map(checkpoint => checkpoint.toBlobFields());
138
147
  const finalBlobBatchingChallenges = await buildFinalBlobChallenges(blobFieldsPerCheckpoint);
139
148
 
140
- // TODO(#17027): Enable multiple blocks per checkpoint.
141
- // Total number of checkpoints equals number of blocks because we currently build a checkpoint with only one block.
142
- const totalNumCheckpoints = epochSizeBlocks;
143
-
144
- this.prover.startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges);
149
+ this.prover.startNewEpoch(epochNumber, epochSizeCheckpoints, finalBlobBatchingChallenges);
145
150
  await this.prover.startChonkVerifierCircuits(Array.from(this.txs.values()));
146
151
 
147
- await asyncPool(this.config.parallelBlockLimit ?? 32, this.blocks, async block => {
148
- this.checkState();
152
+ // Everything in the epoch should have the same chainId and version.
153
+ const { chainId, version } = this.checkpoints[0].blocks[0].header.globalVariables;
149
154
 
150
- const globalVariables = block.header.globalVariables;
151
- const txs = this.getTxs(block);
152
- const l1ToL2Messages = this.getL1ToL2Messages(block);
153
- const previousHeader = this.getBlockHeader(block.number - 1)!;
154
-
155
- this.log.verbose(`Starting processing block ${block.number}`, {
156
- number: block.number,
157
- blockHash: (await block.hash()).toString(),
158
- lastArchive: block.header.lastArchive.root,
159
- noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
160
- nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
161
- publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
162
- previousHeader: previousHeader.hash(),
163
- uuid: this.uuid,
164
- ...globalVariables,
165
- });
155
+ const previousBlockHeaders = this.gatherPreviousBlockHeaders();
166
156
 
157
+ await asyncPool(this.config.parallelBlockLimit ?? 32, this.checkpoints, async checkpoint => {
158
+ this.checkState();
159
+
160
+ const checkpointIndex = checkpoint.number - fromCheckpoint;
167
161
  const checkpointConstants = CheckpointConstantData.from({
168
- chainId: globalVariables.chainId,
169
- version: globalVariables.version,
162
+ chainId,
163
+ version,
170
164
  vkTreeRoot: getVKTreeRoot(),
171
165
  protocolContractsHash: protocolContractsHash,
172
166
  proverId: this.prover.getProverId().toField(),
173
- slotNumber: globalVariables.slotNumber,
174
- coinbase: globalVariables.coinbase,
175
- feeRecipient: globalVariables.feeRecipient,
176
- gasFees: globalVariables.gasFees,
167
+ slotNumber: checkpoint.header.slotNumber,
168
+ coinbase: checkpoint.header.coinbase,
169
+ feeRecipient: checkpoint.header.feeRecipient,
170
+ gasFees: checkpoint.header.gasFees,
171
+ });
172
+ const previousHeader = previousBlockHeaders[checkpointIndex];
173
+ const l1ToL2Messages = this.getL1ToL2Messages(checkpoint);
174
+
175
+ this.log.verbose(`Starting processing checkpoint ${checkpoint.number}`, {
176
+ number: checkpoint.number,
177
+ checkpointHash: checkpoint.hash().toString(),
178
+ lastArchive: checkpoint.header.lastArchiveRoot,
179
+ previousHeader: previousHeader.hash(),
180
+ uuid: this.uuid,
177
181
  });
178
182
 
179
- // TODO(#17027): Enable multiple blocks per checkpoint.
180
- // Each checkpoint has only one block.
181
- const totalNumBlocks = 1;
182
- const checkpointIndex = block.number - fromBlock;
183
183
  await this.prover.startNewCheckpoint(
184
184
  checkpointIndex,
185
185
  checkpointConstants,
186
186
  l1ToL2Messages,
187
- totalNumBlocks,
187
+ checkpoint.blocks.length,
188
188
  previousHeader,
189
189
  );
190
190
 
191
- // Start block proving
192
- await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
191
+ for (const block of checkpoint.blocks) {
192
+ const globalVariables = block.header.globalVariables;
193
+ const txs = this.getTxs(block);
194
+
195
+ this.log.verbose(`Starting processing block ${block.number}`, {
196
+ number: block.number,
197
+ blockHash: (await block.hash()).toString(),
198
+ lastArchive: block.header.lastArchive.root,
199
+ noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
200
+ nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
201
+ publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
202
+ ...globalVariables,
203
+ numTxs: txs.length,
204
+ });
193
205
 
194
- // Process public fns
195
- const db = await this.createFork(block.number - 1, l1ToL2Messages);
196
- const config = PublicSimulatorConfig.from({
197
- proverId: this.prover.getProverId().toField(),
198
- skipFeeEnforcement: false,
199
- collectDebugLogs: false,
200
- collectHints: true,
201
- maxDebugLogMemoryReads: 0,
202
- collectStatistics: false,
203
- });
204
- const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
205
- const processed = await this.processTxs(publicProcessor, txs);
206
- await this.prover.addTxs(processed);
207
- await db.close();
208
- this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
209
- blockNumber: block.number,
210
- blockHash: (await block.hash()).toString(),
211
- uuid: this.uuid,
212
- });
206
+ // Start block proving
207
+ await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
208
+
209
+ // Process public fns
210
+ const db = await this.createFork(BlockNumber(block.number - 1), l1ToL2Messages);
211
+ const config = PublicSimulatorConfig.from({
212
+ proverId: this.prover.getProverId().toField(),
213
+ skipFeeEnforcement: false,
214
+ collectDebugLogs: false,
215
+ collectHints: true,
216
+ collectPublicInputs: true,
217
+ collectStatistics: false,
218
+ });
219
+ const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
220
+ const processed = await this.processTxs(publicProcessor, txs);
221
+ await this.prover.addTxs(processed);
222
+ await db.close();
223
+ this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
224
+ blockNumber: block.number,
225
+ blockHash: (await block.hash()).toString(),
226
+ uuid: this.uuid,
227
+ });
213
228
 
214
- // Mark block as completed to pad it
215
- const expectedBlockHeader = block.getBlockHeader();
216
- await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
229
+ // Mark block as completed to pad it
230
+ const expectedBlockHeader = block.header;
231
+ await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
232
+ }
217
233
  });
218
234
 
219
235
  const executionTime = timer.ms();
@@ -226,16 +242,16 @@ export class EpochProvingJob implements Traceable {
226
242
 
227
243
  if (this.config.skipSubmitProof) {
228
244
  this.log.info(
229
- `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`,
245
+ `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`,
230
246
  );
231
247
  this.state = 'completed';
232
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
248
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
233
249
  return;
234
250
  }
235
251
 
236
252
  const success = await this.publisher.submitEpochProof({
237
- fromBlock,
238
- toBlock,
253
+ fromCheckpoint,
254
+ toCheckpoint,
239
255
  epochNumber,
240
256
  publicInputs,
241
257
  proof,
@@ -246,12 +262,12 @@ export class EpochProvingJob implements Traceable {
246
262
  throw new Error('Failed to submit epoch proof to L1');
247
263
  }
248
264
 
249
- this.log.info(`Submitted proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`, {
265
+ this.log.info(`Submitted proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`, {
250
266
  epochNumber,
251
267
  uuid: this.uuid,
252
268
  });
253
269
  this.state = 'completed';
254
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
270
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
255
271
  } catch (err: any) {
256
272
  if (err && err.name === 'HaltExecutionError') {
257
273
  this.log.warn(`Halted execution of epoch ${epochNumber} prover job`, {
@@ -277,7 +293,7 @@ export class EpochProvingJob implements Traceable {
277
293
  * Create a new db fork for tx processing, inserting all L1 to L2.
278
294
  * REFACTOR: The prover already spawns a db fork of its own for each block, so we may be able to do away with just one fork.
279
295
  */
280
- private async createFork(blockNumber: number, l1ToL2Messages: Fr[]) {
296
+ private async createFork(blockNumber: BlockNumber, l1ToL2Messages: Fr[]) {
281
297
  const db = await this.dbProvider.fork(blockNumber);
282
298
  const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
283
299
  l1ToL2Messages,
@@ -346,11 +362,12 @@ export class EpochProvingJob implements Traceable {
346
362
  const intervalMs = Math.ceil((await l2BlockSource.getL1Constants()).ethereumSlotDuration / 2) * 1000;
347
363
  this.epochCheckPromise = new RunningPromise(
348
364
  async () => {
349
- const blocks = await l2BlockSource.getBlockHeadersForEpoch(this.epochNumber);
350
- const blockHashes = await Promise.all(blocks.map(block => block.hash()));
351
- const thisBlockHashes = await Promise.all(this.blocks.map(block => block.hash()));
365
+ const blockHeaders = await l2BlockSource.getCheckpointedBlockHeadersForEpoch(this.epochNumber);
366
+ const blockHashes = await Promise.all(blockHeaders.map(async header => (await header.hash()).toField()));
367
+ const thisBlocks = this.checkpoints.flatMap(checkpoint => checkpoint.blocks);
368
+ const thisBlockHashes = await Promise.all(thisBlocks.map(block => block.hash()));
352
369
  if (
353
- blocks.length !== this.blocks.length ||
370
+ blockHeaders.length !== thisBlocks.length ||
354
371
  !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))
355
372
  ) {
356
373
  this.log.warn('Epoch blocks changed underfoot', {
@@ -368,30 +385,18 @@ export class EpochProvingJob implements Traceable {
368
385
  this.log.verbose(`Scheduled epoch check for epoch ${this.epochNumber} every ${intervalMs}ms`);
369
386
  }
370
387
 
371
- /* Returns the header for the given block number based on the epoch proving job data. */
372
- private getBlockHeader(blockNumber: number) {
373
- const block = this.blocks.find(b => b.number === blockNumber);
374
- if (block) {
375
- return block.getBlockHeader();
376
- }
377
-
378
- if (blockNumber === Number(this.data.previousBlockHeader.getBlockNumber())) {
379
- return this.data.previousBlockHeader;
380
- }
381
-
382
- throw new Error(
383
- `Block header not found for block number ${blockNumber} (got ${this.blocks
384
- .map(b => b.number)
385
- .join(', ')} and previous header ${this.data.previousBlockHeader.getBlockNumber()})`,
386
- );
388
+ /* Returns the last block header in the previous checkpoint for all checkpoints in the epoch */
389
+ private gatherPreviousBlockHeaders() {
390
+ const lastBlocks = this.checkpoints.map(checkpoint => checkpoint.blocks.at(-1)!);
391
+ return [this.data.previousBlockHeader, ...lastBlocks.map(block => block.header).slice(0, -1)];
387
392
  }
388
393
 
389
394
  private getTxs(block: L2Block): Tx[] {
390
395
  return block.body.txEffects.map(txEffect => this.txs.get(txEffect.txHash.toString())!);
391
396
  }
392
397
 
393
- private getL1ToL2Messages(block: L2Block) {
394
- return this.data.l1ToL2Messages[block.number];
398
+ private getL1ToL2Messages(checkpoint: Checkpoint) {
399
+ return this.data.l1ToL2Messages[checkpoint.number];
395
400
  }
396
401
 
397
402
  private async processTxs(publicProcessor: PublicProcessor, txs: Tx[]): Promise<ProcessedTx[]> {