@aztec/prover-node 3.0.0-nightly.20251202 → 3.0.0-nightly.20251204

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
2
2
  import { asyncPool } from '@aztec/foundation/async-pool';
3
- import { EpochNumber } from '@aztec/foundation/branded-types';
3
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
5
  import { Fr } from '@aztec/foundation/fields';
6
6
  import { createLogger } from '@aztec/foundation/log';
@@ -11,7 +11,8 @@ import { protocolContractsHash } from '@aztec/protocol-contracts';
11
11
  import { buildFinalBlobChallenges } from '@aztec/prover-client/helpers';
12
12
  import type { PublicProcessor, PublicProcessorFactory } from '@aztec/simulator/server';
13
13
  import { PublicSimulatorConfig } from '@aztec/stdlib/avm';
14
- import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
14
+ import type { L2BlockNew, L2BlockSource } from '@aztec/stdlib/block';
15
+ import type { Checkpoint } from '@aztec/stdlib/checkpoint';
15
16
  import {
16
17
  type EpochProver,
17
18
  type EpochProvingJobState,
@@ -91,8 +92,8 @@ export class EpochProvingJob implements Traceable {
91
92
  return this.data.epochNumber;
92
93
  }
93
94
 
94
- private get blocks() {
95
- return this.data.blocks;
95
+ private get checkpoints() {
96
+ return this.data.checkpoints;
96
97
  }
97
98
 
98
99
  private get txs() {
@@ -117,13 +118,21 @@ export class EpochProvingJob implements Traceable {
117
118
 
118
119
  const attestations = this.attestations.map(attestation => attestation.toViem());
119
120
  const epochNumber = this.epochNumber;
120
- const epochSizeBlocks = this.blocks.length;
121
- const epochSizeTxs = this.blocks.reduce((total, current) => total + current.body.txEffects.length, 0);
122
- const [fromBlock, toBlock] = [this.blocks[0].number, this.blocks.at(-1)!.number];
123
- this.log.info(`Starting epoch ${epochNumber} proving job with blocks ${fromBlock} to ${toBlock}`, {
121
+ const epochSizeCheckpoints = this.checkpoints.length;
122
+ const epochSizeBlocks = this.checkpoints.reduce((accum, checkpoint) => accum + checkpoint.blocks.length, 0);
123
+ const epochSizeTxs = this.checkpoints.reduce(
124
+ (accum, checkpoint) =>
125
+ accum + checkpoint.blocks.reduce((accumC, block) => accumC + block.body.txEffects.length, 0),
126
+ 0,
127
+ );
128
+ const fromCheckpoint = this.checkpoints[0].number;
129
+ const toCheckpoint = this.checkpoints.at(-1)!.number;
130
+ const fromBlock = this.checkpoints[0].blocks[0].number;
131
+ const toBlock = this.checkpoints.at(-1)!.blocks.at(-1)!.number;
132
+ this.log.info(`Starting epoch ${epochNumber} proving job with checkpoints ${fromCheckpoint} to ${toCheckpoint}`, {
124
133
  fromBlock,
125
134
  toBlock,
126
- epochSizeBlocks,
135
+ epochSizeTxs,
127
136
  epochNumber,
128
137
  uuid: this.uuid,
129
138
  });
@@ -134,86 +143,93 @@ export class EpochProvingJob implements Traceable {
134
143
  this.runPromise = promise;
135
144
 
136
145
  try {
137
- const blobFieldsPerCheckpoint = this.blocks.map(block => block.getCheckpointBlobFields());
146
+ const blobFieldsPerCheckpoint = this.checkpoints.map(checkpoint => checkpoint.toBlobFields());
138
147
  const finalBlobBatchingChallenges = await buildFinalBlobChallenges(blobFieldsPerCheckpoint);
139
148
 
140
- // TODO(#17027): Enable multiple blocks per checkpoint.
141
- // Total number of checkpoints equals number of blocks because we currently build a checkpoint with only one block.
142
- const totalNumCheckpoints = epochSizeBlocks;
143
-
144
- this.prover.startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges);
149
+ this.prover.startNewEpoch(epochNumber, epochSizeCheckpoints, finalBlobBatchingChallenges);
145
150
  await this.prover.startChonkVerifierCircuits(Array.from(this.txs.values()));
146
151
 
147
- await asyncPool(this.config.parallelBlockLimit ?? 32, this.blocks, async block => {
148
- this.checkState();
152
+ // Everything in the epoch should have the same chainId and version.
153
+ const { chainId, version } = this.checkpoints[0].blocks[0].header.globalVariables;
149
154
 
150
- const globalVariables = block.header.globalVariables;
151
- const txs = this.getTxs(block);
152
- const l1ToL2Messages = this.getL1ToL2Messages(block);
153
- const previousHeader = this.getBlockHeader(block.number - 1)!;
154
-
155
- this.log.verbose(`Starting processing block ${block.number}`, {
156
- number: block.number,
157
- blockHash: (await block.hash()).toString(),
158
- lastArchive: block.header.lastArchive.root,
159
- noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
160
- nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
161
- publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
162
- previousHeader: previousHeader.hash(),
163
- uuid: this.uuid,
164
- ...globalVariables,
165
- });
155
+ const previousBlockHeaders = this.gatherPreviousBlockHeaders();
166
156
 
157
+ await asyncPool(this.config.parallelBlockLimit ?? 32, this.checkpoints, async checkpoint => {
158
+ this.checkState();
159
+
160
+ const checkpointIndex = checkpoint.number - fromCheckpoint;
167
161
  const checkpointConstants = CheckpointConstantData.from({
168
- chainId: globalVariables.chainId,
169
- version: globalVariables.version,
162
+ chainId,
163
+ version,
170
164
  vkTreeRoot: getVKTreeRoot(),
171
165
  protocolContractsHash: protocolContractsHash,
172
166
  proverId: this.prover.getProverId().toField(),
173
- slotNumber: globalVariables.slotNumber,
174
- coinbase: globalVariables.coinbase,
175
- feeRecipient: globalVariables.feeRecipient,
176
- gasFees: globalVariables.gasFees,
167
+ slotNumber: checkpoint.header.slotNumber,
168
+ coinbase: checkpoint.header.coinbase,
169
+ feeRecipient: checkpoint.header.feeRecipient,
170
+ gasFees: checkpoint.header.gasFees,
171
+ });
172
+ const previousHeader = previousBlockHeaders[checkpointIndex];
173
+ const l1ToL2Messages = this.getL1ToL2Messages(checkpoint);
174
+
175
+ this.log.verbose(`Starting processing checkpoint ${checkpoint.number}`, {
176
+ number: checkpoint.number,
177
+ checkpointHash: checkpoint.hash().toString(),
178
+ lastArchive: checkpoint.header.lastArchiveRoot,
179
+ previousHeader: previousHeader.hash(),
180
+ uuid: this.uuid,
177
181
  });
178
182
 
179
- // TODO(#17027): Enable multiple blocks per checkpoint.
180
- // Each checkpoint has only one block.
181
- const totalNumBlocks = 1;
182
- const checkpointIndex = block.number - fromBlock;
183
183
  await this.prover.startNewCheckpoint(
184
184
  checkpointIndex,
185
185
  checkpointConstants,
186
186
  l1ToL2Messages,
187
- totalNumBlocks,
187
+ checkpoint.blocks.length,
188
188
  previousHeader,
189
189
  );
190
190
 
191
- // Start block proving
192
- await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
191
+ for (const block of checkpoint.blocks) {
192
+ const globalVariables = block.header.globalVariables;
193
+ const txs = this.getTxs(block);
194
+
195
+ this.log.verbose(`Starting processing block ${block.number}`, {
196
+ number: block.number,
197
+ blockHash: (await block.hash()).toString(),
198
+ lastArchive: block.header.lastArchive.root,
199
+ noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
200
+ nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
201
+ publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
202
+ ...globalVariables,
203
+ numTxs: txs.length,
204
+ });
193
205
 
194
- // Process public fns
195
- const db = await this.createFork(block.number - 1, l1ToL2Messages);
196
- const config = PublicSimulatorConfig.from({
197
- proverId: this.prover.getProverId().toField(),
198
- skipFeeEnforcement: false,
199
- collectDebugLogs: false,
200
- collectHints: true,
201
- maxDebugLogMemoryReads: 0,
202
- collectStatistics: false,
203
- });
204
- const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
205
- const processed = await this.processTxs(publicProcessor, txs);
206
- await this.prover.addTxs(processed);
207
- await db.close();
208
- this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
209
- blockNumber: block.number,
210
- blockHash: (await block.hash()).toString(),
211
- uuid: this.uuid,
212
- });
206
+ // Start block proving
207
+ await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
208
+
209
+ // Process public fns
210
+ const db = await this.createFork(BlockNumber(block.number - 1), l1ToL2Messages);
211
+ const config = PublicSimulatorConfig.from({
212
+ proverId: this.prover.getProverId().toField(),
213
+ skipFeeEnforcement: false,
214
+ collectDebugLogs: false,
215
+ collectHints: true,
216
+ maxDebugLogMemoryReads: 0,
217
+ collectStatistics: false,
218
+ });
219
+ const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
220
+ const processed = await this.processTxs(publicProcessor, txs);
221
+ await this.prover.addTxs(processed);
222
+ await db.close();
223
+ this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
224
+ blockNumber: block.number,
225
+ blockHash: (await block.hash()).toString(),
226
+ uuid: this.uuid,
227
+ });
213
228
 
214
- // Mark block as completed to pad it
215
- const expectedBlockHeader = block.getBlockHeader();
216
- await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
229
+ // Mark block as completed to pad it
230
+ const expectedBlockHeader = block.header;
231
+ await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
232
+ }
217
233
  });
218
234
 
219
235
  const executionTime = timer.ms();
@@ -226,16 +242,16 @@ export class EpochProvingJob implements Traceable {
226
242
 
227
243
  if (this.config.skipSubmitProof) {
228
244
  this.log.info(
229
- `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`,
245
+ `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`,
230
246
  );
231
247
  this.state = 'completed';
232
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
248
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
233
249
  return;
234
250
  }
235
251
 
236
252
  const success = await this.publisher.submitEpochProof({
237
- fromBlock,
238
- toBlock,
253
+ fromCheckpoint,
254
+ toCheckpoint,
239
255
  epochNumber,
240
256
  publicInputs,
241
257
  proof,
@@ -246,12 +262,12 @@ export class EpochProvingJob implements Traceable {
246
262
  throw new Error('Failed to submit epoch proof to L1');
247
263
  }
248
264
 
249
- this.log.info(`Submitted proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`, {
265
+ this.log.info(`Submitted proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`, {
250
266
  epochNumber,
251
267
  uuid: this.uuid,
252
268
  });
253
269
  this.state = 'completed';
254
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
270
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
255
271
  } catch (err: any) {
256
272
  if (err && err.name === 'HaltExecutionError') {
257
273
  this.log.warn(`Halted execution of epoch ${epochNumber} prover job`, {
@@ -277,7 +293,7 @@ export class EpochProvingJob implements Traceable {
277
293
  * Create a new db fork for tx processing, inserting all L1 to L2.
278
294
  * REFACTOR: The prover already spawns a db fork of its own for each block, so we may be able to do away with just one fork.
279
295
  */
280
- private async createFork(blockNumber: number, l1ToL2Messages: Fr[]) {
296
+ private async createFork(blockNumber: BlockNumber, l1ToL2Messages: Fr[]) {
281
297
  const db = await this.dbProvider.fork(blockNumber);
282
298
  const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
283
299
  l1ToL2Messages,
@@ -348,11 +364,9 @@ export class EpochProvingJob implements Traceable {
348
364
  async () => {
349
365
  const blocks = await l2BlockSource.getBlockHeadersForEpoch(this.epochNumber);
350
366
  const blockHashes = await Promise.all(blocks.map(block => block.hash()));
351
- const thisBlockHashes = await Promise.all(this.blocks.map(block => block.hash()));
352
- if (
353
- blocks.length !== this.blocks.length ||
354
- !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))
355
- ) {
367
+ const thisBlocks = this.checkpoints.flatMap(checkpoint => checkpoint.blocks);
368
+ const thisBlockHashes = await Promise.all(thisBlocks.map(block => block.hash()));
369
+ if (blocks.length !== thisBlocks.length || !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))) {
356
370
  this.log.warn('Epoch blocks changed underfoot', {
357
371
  uuid: this.uuid,
358
372
  epochNumber: this.epochNumber,
@@ -368,30 +382,18 @@ export class EpochProvingJob implements Traceable {
368
382
  this.log.verbose(`Scheduled epoch check for epoch ${this.epochNumber} every ${intervalMs}ms`);
369
383
  }
370
384
 
371
- /* Returns the header for the given block number based on the epoch proving job data. */
372
- private getBlockHeader(blockNumber: number) {
373
- const block = this.blocks.find(b => b.number === blockNumber);
374
- if (block) {
375
- return block.getBlockHeader();
376
- }
377
-
378
- if (blockNumber === Number(this.data.previousBlockHeader.getBlockNumber())) {
379
- return this.data.previousBlockHeader;
380
- }
381
-
382
- throw new Error(
383
- `Block header not found for block number ${blockNumber} (got ${this.blocks
384
- .map(b => b.number)
385
- .join(', ')} and previous header ${this.data.previousBlockHeader.getBlockNumber()})`,
386
- );
385
+ /* Returns the last block header in the previous checkpoint for all checkpoints in the epoch */
386
+ private gatherPreviousBlockHeaders() {
387
+ const lastBlocks = this.checkpoints.map(checkpoint => checkpoint.blocks.at(-1)!);
388
+ return [this.data.previousBlockHeader, ...lastBlocks.map(block => block.header).slice(0, -1)];
387
389
  }
388
390
 
389
- private getTxs(block: L2Block): Tx[] {
391
+ private getTxs(block: L2BlockNew): Tx[] {
390
392
  return block.body.txEffects.map(txEffect => this.txs.get(txEffect.txHash.toString())!);
391
393
  }
392
394
 
393
- private getL1ToL2Messages(block: L2Block) {
394
- return this.data.l1ToL2Messages[block.number];
395
+ private getL1ToL2Messages(checkpoint: Checkpoint) {
396
+ return this.data.l1ToL2Messages[checkpoint.number];
395
397
  }
396
398
 
397
399
  private async processTxs(publicProcessor: PublicProcessor, txs: Tx[]): Promise<ProcessedTx[]> {
package/src/metrics.ts CHANGED
@@ -21,6 +21,7 @@ import { formatEther, formatUnits } from 'viem';
21
21
  export class ProverNodeJobMetrics {
22
22
  proverEpochExecutionDuration: Histogram;
23
23
  provingJobDuration: Histogram;
24
+ provingJobCheckpoints: Gauge;
24
25
  provingJobBlocks: Gauge;
25
26
  provingJobTransactions: Gauge;
26
27
 
@@ -39,6 +40,10 @@ export class ProverNodeJobMetrics {
39
40
  unit: 's',
40
41
  valueType: ValueType.DOUBLE,
41
42
  });
43
+ this.provingJobCheckpoints = this.meter.createGauge(Metrics.PROVER_NODE_JOB_CHECKPOINTS, {
44
+ description: 'Number of checkpoints in a proven epoch',
45
+ valueType: ValueType.INT,
46
+ });
42
47
  this.provingJobBlocks = this.meter.createGauge(Metrics.PROVER_NODE_JOB_BLOCKS, {
43
48
  description: 'Number of blocks in a proven epoch',
44
49
  valueType: ValueType.INT,
@@ -49,9 +54,16 @@ export class ProverNodeJobMetrics {
49
54
  });
50
55
  }
51
56
 
52
- public recordProvingJob(executionTimeMs: number, totalTimeMs: number, numBlocks: number, numTxs: number) {
57
+ public recordProvingJob(
58
+ executionTimeMs: number,
59
+ totalTimeMs: number,
60
+ numCheckpoints: number,
61
+ numBlocks: number,
62
+ numTxs: number,
63
+ ) {
53
64
  this.proverEpochExecutionDuration.record(Math.ceil(executionTimeMs));
54
65
  this.provingJobDuration.record(totalTimeMs / 1000);
66
+ this.provingJobCheckpoints.record(Math.floor(numCheckpoints));
55
67
  this.provingJobBlocks.record(Math.floor(numBlocks));
56
68
  this.provingJobTransactions.record(Math.floor(numTxs));
57
69
  }
@@ -1,4 +1,4 @@
1
- import { EpochNumber } from '@aztec/foundation/branded-types';
1
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
2
2
  import { createLogger } from '@aztec/foundation/log';
3
3
  import { RunningPromise } from '@aztec/foundation/running-promise';
4
4
  import { sleep } from '@aztec/foundation/sleep';
@@ -105,7 +105,7 @@ export class EpochMonitor implements Traceable {
105
105
 
106
106
  private async getEpochNumberToProve() {
107
107
  const lastBlockProven = await this.l2BlockSource.getProvenBlockNumber();
108
- const firstBlockToProve = lastBlockProven + 1;
108
+ const firstBlockToProve = BlockNumber(lastBlockProven + 1);
109
109
  const firstBlockHeaderToProve = await this.l2BlockSource.getBlockHeader(firstBlockToProve);
110
110
  if (!firstBlockHeaderToProve) {
111
111
  return { epochToProve: undefined, blockNumber: firstBlockToProve };
@@ -87,15 +87,15 @@ export class ProverNodePublisher {
87
87
 
88
88
  public async submitEpochProof(args: {
89
89
  epochNumber: EpochNumber;
90
- fromBlock: number;
91
- toBlock: number;
90
+ fromCheckpoint: CheckpointNumber;
91
+ toCheckpoint: CheckpointNumber;
92
92
  publicInputs: RootRollupPublicInputs;
93
93
  proof: Proof;
94
94
  batchedBlobInputs: BatchedBlob;
95
95
  attestations: ViemCommitteeAttestation[];
96
96
  }): Promise<boolean> {
97
- const { epochNumber, fromBlock, toBlock } = args;
98
- const ctx = { epochNumber, fromBlock, toBlock };
97
+ const { epochNumber, fromCheckpoint, toCheckpoint } = args;
98
+ const ctx = { epochNumber, fromCheckpoint, toCheckpoint };
99
99
 
100
100
  if (!this.interrupted) {
101
101
  const timer = new Timer();
@@ -139,45 +139,48 @@ export class ProverNodePublisher {
139
139
  this.log.error(`Rollup.submitEpochProof tx status failed ${txReceipt.transactionHash}`, undefined, ctx);
140
140
  }
141
141
 
142
- this.log.verbose('L2 block data syncing interrupted', ctx);
142
+ this.log.verbose('Checkpoint data syncing interrupted', ctx);
143
143
  return false;
144
144
  }
145
145
 
146
146
  private async validateEpochProofSubmission(args: {
147
- fromBlock: number;
148
- toBlock: number;
147
+ fromCheckpoint: CheckpointNumber;
148
+ toCheckpoint: CheckpointNumber;
149
149
  publicInputs: RootRollupPublicInputs;
150
150
  proof: Proof;
151
151
  batchedBlobInputs: BatchedBlob;
152
152
  attestations: ViemCommitteeAttestation[];
153
153
  }) {
154
- const { fromBlock, toBlock, publicInputs, batchedBlobInputs } = args;
154
+ const { fromCheckpoint, toCheckpoint, publicInputs, batchedBlobInputs } = args;
155
155
 
156
- // Check that the block numbers match the expected epoch to be proven
157
- // TODO: These are checkpoint numbers, not block numbers. Fix when we decouple them properly.
156
+ // Check that the checkpoint numbers match the expected epoch to be proven
158
157
  const { pending, proven } = await this.rollupContract.getTips();
159
- // Don't publish if proven is beyond our toBlock, pointless to do so
160
- if (proven > BigInt(toBlock)) {
161
- throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as proven block is ${proven}`);
158
+ // Don't publish if proven is beyond our toCheckpoint, pointless to do so
159
+ if (proven > BigInt(toCheckpoint)) {
160
+ throw new Error(
161
+ `Cannot submit epoch proof for ${fromCheckpoint}-${toCheckpoint} as proven checkpoint is ${proven}`,
162
+ );
162
163
  }
163
- // toBlock can't be greater than pending
164
- if (toBlock > pending) {
165
- throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as pending block is ${pending}`);
164
+ // toCheckpoint can't be greater than pending
165
+ if (toCheckpoint > pending) {
166
+ throw new Error(
167
+ `Cannot submit epoch proof for ${fromCheckpoint}-${toCheckpoint} as pending checkpoint is ${pending}`,
168
+ );
166
169
  }
167
170
 
168
- // Check the archive for the immediate block before the epoch
169
- const blockLog = await this.rollupContract.getCheckpoint(CheckpointNumber.fromBlockNumber(fromBlock - 1));
170
- if (publicInputs.previousArchiveRoot.toString() !== blockLog.archive) {
171
+ // Check the archive for the immediate checkpoint before the epoch
172
+ const checkpointLog = await this.rollupContract.getCheckpoint(CheckpointNumber(fromCheckpoint - 1));
173
+ if (publicInputs.previousArchiveRoot.toString() !== checkpointLog.archive) {
171
174
  throw new Error(
172
- `Previous archive root mismatch: ${publicInputs.previousArchiveRoot.toString()} !== ${blockLog.archive}`,
175
+ `Previous archive root mismatch: ${publicInputs.previousArchiveRoot.toString()} !== ${checkpointLog.archive}`,
173
176
  );
174
177
  }
175
178
 
176
- // Check the archive for the last block in the epoch
177
- const endBlockLog = await this.rollupContract.getCheckpoint(CheckpointNumber.fromBlockNumber(toBlock));
178
- if (publicInputs.endArchiveRoot.toString() !== endBlockLog.archive) {
179
+ // Check the archive for the last checkpoint in the epoch
180
+ const endCheckpointLog = await this.rollupContract.getCheckpoint(toCheckpoint);
181
+ if (publicInputs.endArchiveRoot.toString() !== endCheckpointLog.archive) {
179
182
  throw new Error(
180
- `End archive root mismatch: ${publicInputs.endArchiveRoot.toString()} !== ${endBlockLog.archive}`,
183
+ `End archive root mismatch: ${publicInputs.endArchiveRoot.toString()} !== ${endCheckpointLog.archive}`,
181
184
  );
182
185
  }
183
186
 
@@ -204,8 +207,8 @@ export class ProverNodePublisher {
204
207
  }
205
208
 
206
209
  private async sendSubmitEpochProofTx(args: {
207
- fromBlock: number;
208
- toBlock: number;
210
+ fromCheckpoint: CheckpointNumber;
211
+ toCheckpoint: CheckpointNumber;
209
212
  publicInputs: RootRollupPublicInputs;
210
213
  proof: Proof;
211
214
  batchedBlobInputs: BatchedBlob;
@@ -215,8 +218,8 @@ export class ProverNodePublisher {
215
218
 
216
219
  this.log.info(`Submitting epoch proof to L1 rollup contract`, {
217
220
  proofSize: args.proof.withoutPublicInputs().length,
218
- fromBlock: args.fromBlock,
219
- toBlock: args.toBlock,
221
+ fromCheckpoint: args.fromCheckpoint,
222
+ toCheckpoint: args.toCheckpoint,
220
223
  });
221
224
  const data = encodeFunctionData({
222
225
  abi: RollupAbi,
@@ -245,16 +248,16 @@ export class ProverNodePublisher {
245
248
  }
246
249
 
247
250
  private getEpochProofPublicInputsArgs(args: {
248
- fromBlock: number;
249
- toBlock: number;
251
+ fromCheckpoint: CheckpointNumber;
252
+ toCheckpoint: CheckpointNumber;
250
253
  publicInputs: RootRollupPublicInputs;
251
254
  batchedBlobInputs: BatchedBlob;
252
255
  attestations: ViemCommitteeAttestation[];
253
256
  }) {
254
257
  // Returns arguments for EpochProofLib.sol -> getEpochProofPublicInputs()
255
258
  return [
256
- BigInt(args.fromBlock) /*_start*/,
257
- BigInt(args.toBlock) /*_end*/,
259
+ BigInt(args.fromCheckpoint) /*_start*/,
260
+ BigInt(args.toCheckpoint) /*_end*/,
258
261
  {
259
262
  previousArchive: args.publicInputs.previousArchiveRoot.toString(),
260
263
  endArchive: args.publicInputs.endArchiveRoot.toString(),
@@ -270,8 +273,8 @@ export class ProverNodePublisher {
270
273
  }
271
274
 
272
275
  private getSubmitEpochProofArgs(args: {
273
- fromBlock: number;
274
- toBlock: number;
276
+ fromCheckpoint: CheckpointNumber;
277
+ toCheckpoint: CheckpointNumber;
275
278
  publicInputs: RootRollupPublicInputs;
276
279
  proof: Proof;
277
280
  batchedBlobInputs: BatchedBlob;
@@ -1,6 +1,6 @@
1
1
  import type { Archiver } from '@aztec/archiver';
2
2
  import type { RollupContract } from '@aztec/ethereum';
3
- import { EpochNumber } from '@aztec/foundation/branded-types';
3
+ import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
4
4
  import { assertRequired, compact, pick, sum } from '@aztec/foundation/collection';
5
5
  import { memoize } from '@aztec/foundation/decorators';
6
6
  import type { Fr } from '@aztec/foundation/fields';
@@ -9,7 +9,8 @@ import { DateProvider } from '@aztec/foundation/timer';
9
9
  import type { DataStoreConfig } from '@aztec/kv-store/config';
10
10
  import type { P2PClient } from '@aztec/p2p';
11
11
  import { PublicProcessorFactory } from '@aztec/simulator/server';
12
- import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
12
+ import type { L2BlockSource } from '@aztec/stdlib/block';
13
+ import type { Checkpoint } from '@aztec/stdlib/checkpoint';
13
14
  import type { ChainConfig } from '@aztec/stdlib/config';
14
15
  import type { ContractDataSource } from '@aztec/stdlib/contract';
15
16
  import { getProofSubmissionDeadlineTimestamp } from '@aztec/stdlib/epoch-helpers';
@@ -271,10 +272,13 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
271
272
 
272
273
  // Gather all data for this epoch
273
274
  const epochData = await this.gatherEpochData(epochNumber);
274
-
275
- const fromBlock = epochData.blocks[0].number;
276
- const toBlock = epochData.blocks.at(-1)!.number;
277
- this.log.verbose(`Creating proving job for epoch ${epochNumber} for block range ${fromBlock} to ${toBlock}`);
275
+ const fromCheckpoint = epochData.checkpoints[0].number;
276
+ const toCheckpoint = epochData.checkpoints.at(-1)!.number;
277
+ const fromBlock = epochData.checkpoints[0].blocks[0].number;
278
+ const toBlock = epochData.checkpoints.at(-1)!.blocks.at(-1)!.number;
279
+ this.log.verbose(
280
+ `Creating proving job for epoch ${epochNumber} for checkpoint range ${fromCheckpoint} to ${toCheckpoint} and block range ${fromBlock} to ${toBlock}`,
281
+ );
278
282
 
279
283
  // Fast forward world state to right before the target block and get a fork
280
284
  await this.worldState.syncImmediate(toBlock);
@@ -289,7 +293,6 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
289
293
  // Set deadline for this job to run. It will abort if it takes too long.
290
294
  const deadlineTs = getProofSubmissionDeadlineTimestamp(epochNumber, await this.getL1Constants());
291
295
  const deadline = new Date(Number(deadlineTs) * 1000);
292
-
293
296
  const job = this.doCreateEpochProvingJob(epochData, deadline, publicProcessorFactory, this.publisher, opts);
294
297
  this.jobs.set(job.getId(), job);
295
298
  return job;
@@ -302,28 +305,30 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
302
305
 
303
306
  @trackSpan('ProverNode.gatherEpochData', epochNumber => ({ [Attributes.EPOCH_NUMBER]: epochNumber }))
304
307
  private async gatherEpochData(epochNumber: EpochNumber): Promise<EpochProvingJobData> {
305
- const blocks = await this.gatherBlocks(epochNumber);
306
- const txArray = await this.gatherTxs(epochNumber, blocks);
308
+ const checkpoints = await this.gatherCheckpoints(epochNumber);
309
+ const txArray = await this.gatherTxs(epochNumber, checkpoints);
307
310
  const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
308
- const l1ToL2Messages = await this.gatherMessages(epochNumber, blocks);
309
- const previousBlockHeader = await this.gatherPreviousBlockHeader(epochNumber, blocks[0]);
310
- const [lastBlock] = await this.l2BlockSource.getPublishedBlocks(blocks.at(-1)!.number, 1);
311
- const attestations = lastBlock?.attestations ?? [];
311
+ const l1ToL2Messages = await this.gatherMessages(epochNumber, checkpoints);
312
+ const [firstBlock] = checkpoints[0].blocks;
313
+ const previousBlockHeader = await this.gatherPreviousBlockHeader(epochNumber, firstBlock.number - 1);
314
+ const [lastPublishedCheckpoint] = await this.l2BlockSource.getPublishedCheckpoints(checkpoints.at(-1)!.number, 1);
315
+ const attestations = lastPublishedCheckpoint?.attestations ?? [];
312
316
 
313
- return { blocks, txs, l1ToL2Messages, epochNumber, previousBlockHeader, attestations };
317
+ return { checkpoints, txs, l1ToL2Messages, epochNumber, previousBlockHeader, attestations };
314
318
  }
315
319
 
316
- private async gatherBlocks(epochNumber: EpochNumber) {
317
- const blocks = await this.l2BlockSource.getBlocksForEpoch(epochNumber);
318
- if (blocks.length === 0) {
320
+ private async gatherCheckpoints(epochNumber: EpochNumber) {
321
+ const checkpoints = await this.l2BlockSource.getCheckpointsForEpoch(epochNumber);
322
+ if (checkpoints.length === 0) {
319
323
  throw new EmptyEpochError(epochNumber);
320
324
  }
321
- return blocks;
325
+ return checkpoints;
322
326
  }
323
327
 
324
- private async gatherTxs(epochNumber: EpochNumber, blocks: L2Block[]) {
328
+ private async gatherTxs(epochNumber: EpochNumber, checkpoints: Checkpoint[]) {
325
329
  const deadline = new Date(this.dateProvider.now() + this.config.txGatheringTimeoutMs);
326
330
  const txProvider = this.p2pClient.getTxProvider();
331
+ const blocks = checkpoints.flatMap(checkpoint => checkpoint.blocks);
327
332
  const txsByBlock = await Promise.all(blocks.map(block => txProvider.getTxsForBlock(block, { deadline })));
328
333
  const txs = txsByBlock.map(({ txs }) => txs).flat();
329
334
  const missingTxs = txsByBlock.map(({ missingTxs }) => missingTxs).flat();
@@ -336,25 +341,26 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
336
341
  throw new Error(`Txs not found for epoch ${epochNumber}: ${missingTxs.map(hash => hash.toString()).join(', ')}`);
337
342
  }
338
343
 
339
- private async gatherMessages(epochNumber: EpochNumber, blocks: L2Block[]) {
340
- const messages = await Promise.all(blocks.map(b => this.l1ToL2MessageSource.getL1ToL2Messages(b.number)));
344
+ private async gatherMessages(epochNumber: EpochNumber, checkpoints: Checkpoint[]) {
345
+ const messages = await Promise.all(
346
+ checkpoints.map(c => this.l1ToL2MessageSource.getL1ToL2MessagesForCheckpoint(c.number)),
347
+ );
341
348
  const messageCount = sum(messages.map(m => m.length));
342
349
  this.log.verbose(`Gathered all ${messageCount} messages for epoch ${epochNumber}`, { epochNumber });
343
- const messagesByBlock: Record<number, Fr[]> = {};
344
- for (let i = 0; i < blocks.length; i++) {
345
- messagesByBlock[blocks[i].number] = messages[i];
350
+ const messagesByCheckpoint: Record<CheckpointNumber, Fr[]> = {};
351
+ for (let i = 0; i < checkpoints.length; i++) {
352
+ messagesByCheckpoint[checkpoints[i].number] = messages[i];
346
353
  }
347
- return messagesByBlock;
354
+ return messagesByCheckpoint;
348
355
  }
349
356
 
350
- private async gatherPreviousBlockHeader(epochNumber: EpochNumber, initialBlock: L2Block) {
351
- const previousBlockNumber = initialBlock.number - 1;
357
+ private async gatherPreviousBlockHeader(epochNumber: EpochNumber, previousBlockNumber: number) {
352
358
  const header = await (previousBlockNumber === 0
353
359
  ? this.worldState.getCommitted().getInitialHeader()
354
- : this.l2BlockSource.getBlockHeader(previousBlockNumber));
360
+ : this.l2BlockSource.getBlockHeader(BlockNumber(previousBlockNumber)));
355
361
 
356
362
  if (!header) {
357
- throw new Error(`Previous block header ${initialBlock.number} not found for proving epoch ${epochNumber}`);
363
+ throw new Error(`Previous block header ${previousBlockNumber} not found for proving epoch ${epochNumber}`);
358
364
  }
359
365
 
360
366
  this.log.verbose(`Gathered previous block header ${header.getBlockNumber()} for epoch ${epochNumber}`);