@aztec/prover-node 4.0.0-nightly.20250907 → 4.0.0-nightly.20260107

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dest/actions/download-epoch-proving-job.d.ts +4 -4
  2. package/dest/actions/index.d.ts +1 -1
  3. package/dest/actions/rerun-epoch-proving-job.d.ts +2 -2
  4. package/dest/actions/upload-epoch-proof-failure.d.ts +1 -1
  5. package/dest/bin/run-failed-epoch.d.ts +1 -1
  6. package/dest/bin/run-failed-epoch.js +1 -1
  7. package/dest/config.d.ts +7 -4
  8. package/dest/config.d.ts.map +1 -1
  9. package/dest/config.js +21 -15
  10. package/dest/factory.d.ts +2 -4
  11. package/dest/factory.d.ts.map +1 -1
  12. package/dest/factory.js +24 -13
  13. package/dest/index.d.ts +1 -1
  14. package/dest/job/epoch-proving-job-data.d.ts +8 -6
  15. package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
  16. package/dest/job/epoch-proving-job-data.js +25 -18
  17. package/dest/job/epoch-proving-job.d.ts +11 -16
  18. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  19. package/dest/job/epoch-proving-job.js +493 -77
  20. package/dest/metrics.d.ts +4 -3
  21. package/dest/metrics.d.ts.map +1 -1
  22. package/dest/metrics.js +9 -3
  23. package/dest/monitors/epoch-monitor.d.ts +5 -2
  24. package/dest/monitors/epoch-monitor.d.ts.map +1 -1
  25. package/dest/monitors/epoch-monitor.js +393 -10
  26. package/dest/monitors/index.d.ts +1 -1
  27. package/dest/prover-node-publisher.d.ts +9 -10
  28. package/dest/prover-node-publisher.d.ts.map +1 -1
  29. package/dest/prover-node-publisher.js +51 -53
  30. package/dest/prover-node.d.ts +8 -7
  31. package/dest/prover-node.d.ts.map +1 -1
  32. package/dest/prover-node.js +435 -50
  33. package/dest/prover-publisher-factory.d.ts +6 -2
  34. package/dest/prover-publisher-factory.d.ts.map +1 -1
  35. package/dest/prover-publisher-factory.js +6 -0
  36. package/dest/test/index.d.ts +1 -1
  37. package/dest/test/index.d.ts.map +1 -1
  38. package/package.json +26 -25
  39. package/src/bin/run-failed-epoch.ts +2 -2
  40. package/src/config.ts +33 -30
  41. package/src/factory.ts +26 -20
  42. package/src/job/epoch-proving-job-data.ts +31 -25
  43. package/src/job/epoch-proving-job.ts +138 -82
  44. package/src/metrics.ts +16 -4
  45. package/src/monitors/epoch-monitor.ts +15 -5
  46. package/src/prover-node-publisher.ts +74 -73
  47. package/src/prover-node.ts +52 -45
  48. package/src/prover-publisher-factory.ts +12 -1
@@ -1,19 +1,25 @@
1
- import { BatchedBlob, Blob } from '@aztec/blob-lib';
2
1
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
2
  import { asyncPool } from '@aztec/foundation/async-pool';
3
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
- import { Fr } from '@aztec/foundation/fields';
5
+ import { Fr } from '@aztec/foundation/curves/bn254';
6
6
  import { createLogger } from '@aztec/foundation/log';
7
7
  import { RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
8
8
  import { Timer } from '@aztec/foundation/timer';
9
+ import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
10
+ import { protocolContractsHash } from '@aztec/protocol-contracts';
11
+ import { buildFinalBlobChallenges } from '@aztec/prover-client/helpers';
9
12
  import type { PublicProcessor, PublicProcessorFactory } from '@aztec/simulator/server';
10
- import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
13
+ import { PublicSimulatorConfig } from '@aztec/stdlib/avm';
14
+ import type { L2BlockNew, L2BlockSource } from '@aztec/stdlib/block';
15
+ import type { Checkpoint } from '@aztec/stdlib/checkpoint';
11
16
  import {
12
17
  type EpochProver,
13
18
  type EpochProvingJobState,
14
19
  EpochProvingJobTerminalState,
15
20
  type ForkMerkleTreeOperations,
16
21
  } from '@aztec/stdlib/interfaces/server';
22
+ import { CheckpointConstantData } from '@aztec/stdlib/rollup';
17
23
  import { MerkleTreeId } from '@aztec/stdlib/trees';
18
24
  import type { ProcessedTx, Tx } from '@aztec/stdlib/tx';
19
25
  import { Attributes, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client';
@@ -24,6 +30,12 @@ import type { ProverNodeJobMetrics } from '../metrics.js';
24
30
  import type { ProverNodePublisher } from '../prover-node-publisher.js';
25
31
  import { type EpochProvingJobData, validateEpochProvingJobData } from './epoch-proving-job-data.js';
26
32
 
33
+ export type EpochProvingJobOptions = {
34
+ parallelBlockLimit?: number;
35
+ skipEpochCheck?: boolean;
36
+ skipSubmitProof?: boolean;
37
+ };
38
+
27
39
  /**
28
40
  * Job that grabs a range of blocks from the unfinalized chain from L1, gets their txs given their hashes,
29
41
  * re-executes their public calls, generates a rollup proof, and submits it to L1. This job will update the
@@ -49,7 +61,7 @@ export class EpochProvingJob implements Traceable {
49
61
  private l2BlockSource: L2BlockSource | undefined,
50
62
  private metrics: ProverNodeJobMetrics,
51
63
  private deadline: Date | undefined,
52
- private config: { parallelBlockLimit?: number; skipEpochCheck?: boolean },
64
+ private config: EpochProvingJobOptions,
53
65
  ) {
54
66
  validateEpochProvingJobData(data);
55
67
  this.uuid = crypto.randomUUID();
@@ -64,7 +76,7 @@ export class EpochProvingJob implements Traceable {
64
76
  return this.state;
65
77
  }
66
78
 
67
- public getEpochNumber(): bigint {
79
+ public getEpochNumber(): EpochNumber {
68
80
  return this.data.epochNumber;
69
81
  }
70
82
 
@@ -80,8 +92,8 @@ export class EpochProvingJob implements Traceable {
80
92
  return this.data.epochNumber;
81
93
  }
82
94
 
83
- private get blocks() {
84
- return this.data.blocks;
95
+ private get checkpoints() {
96
+ return this.data.checkpoints;
85
97
  }
86
98
 
87
99
  private get txs() {
@@ -96,7 +108,7 @@ export class EpochProvingJob implements Traceable {
96
108
  * Proves the given epoch and submits the proof to L1.
97
109
  */
98
110
  @trackSpan('EpochProvingJob.run', function () {
99
- return { [Attributes.EPOCH_NUMBER]: Number(this.data.epochNumber) };
111
+ return { [Attributes.EPOCH_NUMBER]: this.data.epochNumber };
100
112
  })
101
113
  public async run() {
102
114
  this.scheduleDeadlineStop();
@@ -105,14 +117,22 @@ export class EpochProvingJob implements Traceable {
105
117
  }
106
118
 
107
119
  const attestations = this.attestations.map(attestation => attestation.toViem());
108
- const epochNumber = Number(this.epochNumber);
109
- const epochSizeBlocks = this.blocks.length;
110
- const epochSizeTxs = this.blocks.reduce((total, current) => total + current.body.txEffects.length, 0);
111
- const [fromBlock, toBlock] = [this.blocks[0].number, this.blocks.at(-1)!.number];
112
- this.log.info(`Starting epoch ${epochNumber} proving job with blocks ${fromBlock} to ${toBlock}`, {
120
+ const epochNumber = this.epochNumber;
121
+ const epochSizeCheckpoints = this.checkpoints.length;
122
+ const epochSizeBlocks = this.checkpoints.reduce((accum, checkpoint) => accum + checkpoint.blocks.length, 0);
123
+ const epochSizeTxs = this.checkpoints.reduce(
124
+ (accum, checkpoint) =>
125
+ accum + checkpoint.blocks.reduce((accumC, block) => accumC + block.body.txEffects.length, 0),
126
+ 0,
127
+ );
128
+ const fromCheckpoint = this.checkpoints[0].number;
129
+ const toCheckpoint = this.checkpoints.at(-1)!.number;
130
+ const fromBlock = this.checkpoints[0].blocks[0].number;
131
+ const toBlock = this.checkpoints.at(-1)!.blocks.at(-1)!.number;
132
+ this.log.info(`Starting epoch ${epochNumber} proving job with checkpoints ${fromCheckpoint} to ${toCheckpoint}`, {
113
133
  fromBlock,
114
134
  toBlock,
115
- epochSizeBlocks,
135
+ epochSizeTxs,
116
136
  epochNumber,
117
137
  uuid: this.uuid,
118
138
  });
@@ -123,51 +143,93 @@ export class EpochProvingJob implements Traceable {
123
143
  this.runPromise = promise;
124
144
 
125
145
  try {
126
- const allBlobs = (
127
- await Promise.all(this.blocks.map(async block => await Blob.getBlobsPerBlock(block.body.toBlobFields())))
128
- ).flat();
146
+ const blobFieldsPerCheckpoint = this.checkpoints.map(checkpoint => checkpoint.toBlobFields());
147
+ const finalBlobBatchingChallenges = await buildFinalBlobChallenges(blobFieldsPerCheckpoint);
148
+
149
+ this.prover.startNewEpoch(epochNumber, epochSizeCheckpoints, finalBlobBatchingChallenges);
150
+ await this.prover.startChonkVerifierCircuits(Array.from(this.txs.values()));
129
151
 
130
- const finalBlobBatchingChallenges = await BatchedBlob.precomputeBatchedBlobChallenges(allBlobs);
131
- this.prover.startNewEpoch(epochNumber, fromBlock, epochSizeBlocks, finalBlobBatchingChallenges);
132
- await this.prover.startTubeCircuits(Array.from(this.txs.values()));
152
+ // Everything in the epoch should have the same chainId and version.
153
+ const { chainId, version } = this.checkpoints[0].blocks[0].header.globalVariables;
133
154
 
134
- await asyncPool(this.config.parallelBlockLimit ?? 32, this.blocks, async block => {
155
+ const previousBlockHeaders = this.gatherPreviousBlockHeaders();
156
+
157
+ await asyncPool(this.config.parallelBlockLimit ?? 32, this.checkpoints, async checkpoint => {
135
158
  this.checkState();
136
159
 
137
- const globalVariables = block.header.globalVariables;
138
- const txs = this.getTxs(block);
139
- const l1ToL2Messages = this.getL1ToL2Messages(block);
140
- const previousHeader = this.getBlockHeader(block.number - 1)!;
141
-
142
- this.log.verbose(`Starting processing block ${block.number}`, {
143
- number: block.number,
144
- blockHash: (await block.hash()).toString(),
145
- lastArchive: block.header.lastArchive.root,
146
- noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
147
- nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
148
- publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
149
- previousHeader: previousHeader.hash(),
150
- uuid: this.uuid,
151
- ...globalVariables,
160
+ const checkpointIndex = checkpoint.number - fromCheckpoint;
161
+ const checkpointConstants = CheckpointConstantData.from({
162
+ chainId,
163
+ version,
164
+ vkTreeRoot: getVKTreeRoot(),
165
+ protocolContractsHash: protocolContractsHash,
166
+ proverId: this.prover.getProverId().toField(),
167
+ slotNumber: checkpoint.header.slotNumber,
168
+ coinbase: checkpoint.header.coinbase,
169
+ feeRecipient: checkpoint.header.feeRecipient,
170
+ gasFees: checkpoint.header.gasFees,
152
171
  });
172
+ const previousHeader = previousBlockHeaders[checkpointIndex];
173
+ const l1ToL2Messages = this.getL1ToL2Messages(checkpoint);
153
174
 
154
- // Start block proving
155
- await this.prover.startNewBlock(globalVariables, l1ToL2Messages, previousHeader);
156
-
157
- // Process public fns
158
- const db = await this.createFork(block.number - 1, l1ToL2Messages);
159
- const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, true);
160
- const processed = await this.processTxs(publicProcessor, txs);
161
- await this.prover.addTxs(processed);
162
- await db.close();
163
- this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
164
- blockNumber: block.number,
165
- blockHash: (await block.hash()).toString(),
175
+ this.log.verbose(`Starting processing checkpoint ${checkpoint.number}`, {
176
+ number: checkpoint.number,
177
+ checkpointHash: checkpoint.hash().toString(),
178
+ lastArchive: checkpoint.header.lastArchiveRoot,
179
+ previousHeader: previousHeader.hash(),
166
180
  uuid: this.uuid,
167
181
  });
168
182
 
169
- // Mark block as completed to pad it
170
- await this.prover.setBlockCompleted(block.number, block.header);
183
+ await this.prover.startNewCheckpoint(
184
+ checkpointIndex,
185
+ checkpointConstants,
186
+ l1ToL2Messages,
187
+ checkpoint.blocks.length,
188
+ previousHeader,
189
+ );
190
+
191
+ for (const block of checkpoint.blocks) {
192
+ const globalVariables = block.header.globalVariables;
193
+ const txs = this.getTxs(block);
194
+
195
+ this.log.verbose(`Starting processing block ${block.number}`, {
196
+ number: block.number,
197
+ blockHash: (await block.hash()).toString(),
198
+ lastArchive: block.header.lastArchive.root,
199
+ noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
200
+ nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
201
+ publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
202
+ ...globalVariables,
203
+ numTxs: txs.length,
204
+ });
205
+
206
+ // Start block proving
207
+ await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
208
+
209
+ // Process public fns
210
+ const db = await this.createFork(BlockNumber(block.number - 1), l1ToL2Messages);
211
+ const config = PublicSimulatorConfig.from({
212
+ proverId: this.prover.getProverId().toField(),
213
+ skipFeeEnforcement: false,
214
+ collectDebugLogs: false,
215
+ collectHints: true,
216
+ collectPublicInputs: true,
217
+ collectStatistics: false,
218
+ });
219
+ const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
220
+ const processed = await this.processTxs(publicProcessor, txs);
221
+ await this.prover.addTxs(processed);
222
+ await db.close();
223
+ this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
224
+ blockNumber: block.number,
225
+ blockHash: (await block.hash()).toString(),
226
+ uuid: this.uuid,
227
+ });
228
+
229
+ // Mark block as completed to pad it
230
+ const expectedBlockHeader = block.header;
231
+ await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
232
+ }
171
233
  });
172
234
 
173
235
  const executionTime = timer.ms();
@@ -178,9 +240,18 @@ export class EpochProvingJob implements Traceable {
178
240
 
179
241
  this.progressState('publishing-proof');
180
242
 
243
+ if (this.config.skipSubmitProof) {
244
+ this.log.info(
245
+ `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`,
246
+ );
247
+ this.state = 'completed';
248
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
249
+ return;
250
+ }
251
+
181
252
  const success = await this.publisher.submitEpochProof({
182
- fromBlock,
183
- toBlock,
253
+ fromCheckpoint,
254
+ toCheckpoint,
184
255
  epochNumber,
185
256
  publicInputs,
186
257
  proof,
@@ -191,12 +262,12 @@ export class EpochProvingJob implements Traceable {
191
262
  throw new Error('Failed to submit epoch proof to L1');
192
263
  }
193
264
 
194
- this.log.info(`Submitted proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`, {
265
+ this.log.info(`Submitted proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`, {
195
266
  epochNumber,
196
267
  uuid: this.uuid,
197
268
  });
198
269
  this.state = 'completed';
199
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
270
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
200
271
  } catch (err: any) {
201
272
  if (err && err.name === 'HaltExecutionError') {
202
273
  this.log.warn(`Halted execution of epoch ${epochNumber} prover job`, {
@@ -222,9 +293,9 @@ export class EpochProvingJob implements Traceable {
222
293
  * Create a new db fork for tx processing, inserting all L1 to L2.
223
294
  * REFACTOR: The prover already spawns a db fork of its own for each block, so we may be able to do away with just one fork.
224
295
  */
225
- private async createFork(blockNumber: number, l1ToL2Messages: Fr[]) {
296
+ private async createFork(blockNumber: BlockNumber, l1ToL2Messages: Fr[]) {
226
297
  const db = await this.dbProvider.fork(blockNumber);
227
- const l1ToL2MessagesPadded = padArrayEnd(
298
+ const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
228
299
  l1ToL2Messages,
229
300
  Fr.ZERO,
230
301
  NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
@@ -232,7 +303,7 @@ export class EpochProvingJob implements Traceable {
232
303
  );
233
304
  this.log.verbose(`Creating fork at ${blockNumber} with ${l1ToL2Messages.length} L1 to L2 messages`, {
234
305
  blockNumber,
235
- l1ToL2Messages: l1ToL2MessagesPadded.map(m => m.toString()),
306
+ l1ToL2Messages: l1ToL2Messages.map(m => m.toString()),
236
307
  });
237
308
  await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
238
309
  return db;
@@ -252,7 +323,6 @@ export class EpochProvingJob implements Traceable {
252
323
  public async stop(state: EpochProvingJobTerminalState = 'stopped') {
253
324
  this.state = state;
254
325
  this.prover.cancel();
255
- // TODO(palla/prover): Stop the publisher as well
256
326
  if (this.runPromise) {
257
327
  await this.runPromise;
258
328
  }
@@ -294,11 +364,9 @@ export class EpochProvingJob implements Traceable {
294
364
  async () => {
295
365
  const blocks = await l2BlockSource.getBlockHeadersForEpoch(this.epochNumber);
296
366
  const blockHashes = await Promise.all(blocks.map(block => block.hash()));
297
- const thisBlockHashes = await Promise.all(this.blocks.map(block => block.hash()));
298
- if (
299
- blocks.length !== this.blocks.length ||
300
- !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))
301
- ) {
367
+ const thisBlocks = this.checkpoints.flatMap(checkpoint => checkpoint.blocks);
368
+ const thisBlockHashes = await Promise.all(thisBlocks.map(block => block.hash()));
369
+ if (blocks.length !== thisBlocks.length || !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))) {
302
370
  this.log.warn('Epoch blocks changed underfoot', {
303
371
  uuid: this.uuid,
304
372
  epochNumber: this.epochNumber,
@@ -314,30 +382,18 @@ export class EpochProvingJob implements Traceable {
314
382
  this.log.verbose(`Scheduled epoch check for epoch ${this.epochNumber} every ${intervalMs}ms`);
315
383
  }
316
384
 
317
- /* Returns the header for the given block number based on the epoch proving job data. */
318
- private getBlockHeader(blockNumber: number) {
319
- const block = this.blocks.find(b => b.number === blockNumber);
320
- if (block) {
321
- return block.header;
322
- }
323
-
324
- if (blockNumber === Number(this.data.previousBlockHeader.getBlockNumber())) {
325
- return this.data.previousBlockHeader;
326
- }
327
-
328
- throw new Error(
329
- `Block header not found for block number ${blockNumber} (got ${this.blocks
330
- .map(b => b.number)
331
- .join(', ')} and previous header ${this.data.previousBlockHeader.getBlockNumber()})`,
332
- );
385
+ /* Returns the last block header in the previous checkpoint for all checkpoints in the epoch */
386
+ private gatherPreviousBlockHeaders() {
387
+ const lastBlocks = this.checkpoints.map(checkpoint => checkpoint.blocks.at(-1)!);
388
+ return [this.data.previousBlockHeader, ...lastBlocks.map(block => block.header).slice(0, -1)];
333
389
  }
334
390
 
335
- private getTxs(block: L2Block): Tx[] {
391
+ private getTxs(block: L2BlockNew): Tx[] {
336
392
  return block.body.txEffects.map(txEffect => this.txs.get(txEffect.txHash.toString())!);
337
393
  }
338
394
 
339
- private getL1ToL2Messages(block: L2Block) {
340
- return this.data.l1ToL2Messages[block.number];
395
+ private getL1ToL2Messages(checkpoint: Checkpoint) {
396
+ return this.data.l1ToL2Messages[checkpoint.number];
341
397
  }
342
398
 
343
399
  private async processTxs(publicProcessor: PublicProcessor, txs: Tx[]): Promise<ProcessedTx[]> {
package/src/metrics.ts CHANGED
@@ -1,4 +1,4 @@
1
- import type { RollupContract } from '@aztec/ethereum';
1
+ import type { RollupContract } from '@aztec/ethereum/contracts';
2
2
  import type { EthAddress } from '@aztec/foundation/eth-address';
3
3
  import { createLogger } from '@aztec/foundation/log';
4
4
  import type { L1PublishProofStats, L1PublishStats } from '@aztec/stdlib/stats';
@@ -21,6 +21,7 @@ import { formatEther, formatUnits } from 'viem';
21
21
  export class ProverNodeJobMetrics {
22
22
  proverEpochExecutionDuration: Histogram;
23
23
  provingJobDuration: Histogram;
24
+ provingJobCheckpoints: Gauge;
24
25
  provingJobBlocks: Gauge;
25
26
  provingJobTransactions: Gauge;
26
27
 
@@ -39,6 +40,10 @@ export class ProverNodeJobMetrics {
39
40
  unit: 's',
40
41
  valueType: ValueType.DOUBLE,
41
42
  });
43
+ this.provingJobCheckpoints = this.meter.createGauge(Metrics.PROVER_NODE_JOB_CHECKPOINTS, {
44
+ description: 'Number of checkpoints in a proven epoch',
45
+ valueType: ValueType.INT,
46
+ });
42
47
  this.provingJobBlocks = this.meter.createGauge(Metrics.PROVER_NODE_JOB_BLOCKS, {
43
48
  description: 'Number of blocks in a proven epoch',
44
49
  valueType: ValueType.INT,
@@ -49,9 +54,16 @@ export class ProverNodeJobMetrics {
49
54
  });
50
55
  }
51
56
 
52
- public recordProvingJob(executionTimeMs: number, totalTimeMs: number, numBlocks: number, numTxs: number) {
57
+ public recordProvingJob(
58
+ executionTimeMs: number,
59
+ totalTimeMs: number,
60
+ numCheckpoints: number,
61
+ numBlocks: number,
62
+ numTxs: number,
63
+ ) {
53
64
  this.proverEpochExecutionDuration.record(Math.ceil(executionTimeMs));
54
65
  this.provingJobDuration.record(totalTimeMs / 1000);
66
+ this.provingJobCheckpoints.record(Math.floor(numCheckpoints));
55
67
  this.provingJobBlocks.record(Math.floor(numBlocks));
56
68
  this.provingJobTransactions.record(Math.floor(numTxs));
57
69
  }
@@ -91,13 +103,13 @@ export class ProverNodeRewardsMetrics {
91
103
  }
92
104
 
93
105
  private observe = async (observer: BatchObservableResult): Promise<void> => {
94
- const epoch = await this.rollup.getEpochNumber();
106
+ const epoch = await this.rollup.getCurrentEpochNumber();
95
107
 
96
108
  if (epoch > this.proofSubmissionEpochs) {
97
109
  // look at the prev epoch so that we get an accurate value, after proof submission window has closed
98
110
  // For example, if proof submission window is 1 epoch, and we are in epoch 2, we should be looking at epoch 0.
99
111
  // Similarly, if the proof submission window is 0, and we are in epoch 1, we should be looking at epoch 0.
100
- const closedEpoch = epoch - BigInt(this.proofSubmissionEpochs) - 1n;
112
+ const closedEpoch = BigInt(epoch) - BigInt(this.proofSubmissionEpochs) - 1n;
101
113
  const rewards = await this.rollup.getSpecificProverRewardsForEpoch(closedEpoch, this.coinbase);
102
114
 
103
115
  const fmt = parseFloat(formatUnits(rewards, 18));
@@ -1,5 +1,7 @@
1
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
1
2
  import { createLogger } from '@aztec/foundation/log';
2
3
  import { RunningPromise } from '@aztec/foundation/running-promise';
4
+ import { sleep } from '@aztec/foundation/sleep';
3
5
  import type { L2BlockSource } from '@aztec/stdlib/block';
4
6
  import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
5
7
  import {
@@ -11,7 +13,7 @@ import {
11
13
  } from '@aztec/telemetry-client';
12
14
 
13
15
  export interface EpochMonitorHandler {
14
- handleEpochReadyToProve(epochNumber: bigint): Promise<boolean>;
16
+ handleEpochReadyToProve(epochNumber: EpochNumber): Promise<boolean>;
15
17
  }
16
18
 
17
19
  /**
@@ -31,21 +33,24 @@ export class EpochMonitor implements Traceable {
31
33
  public readonly tracer: Tracer;
32
34
 
33
35
  private handler: EpochMonitorHandler | undefined;
34
- private latestEpochNumber: bigint | undefined;
36
+ private latestEpochNumber: EpochNumber | undefined;
35
37
 
36
38
  constructor(
37
39
  private readonly l2BlockSource: L2BlockSource,
38
40
  private readonly l1Constants: Pick<L1RollupConstants, 'epochDuration'>,
39
- private options: { pollingIntervalMs: number },
41
+ private options: { pollingIntervalMs: number; provingDelayMs?: number },
40
42
  telemetry: TelemetryClient = getTelemetryClient(),
41
43
  ) {
42
44
  this.tracer = telemetry.getTracer('EpochMonitor');
43
45
  this.runningPromise = new RunningPromise(this.work.bind(this), this.log, this.options.pollingIntervalMs);
46
+ if (this.options.provingDelayMs) {
47
+ this.log.warn(`Prover node epoch monitor running with delay of ${this.options.provingDelayMs}ms`);
48
+ }
44
49
  }
45
50
 
46
51
  public static async create(
47
52
  l2BlockSource: L2BlockSource,
48
- options: { pollingIntervalMs: number },
53
+ options: { pollingIntervalMs: number; provingDelayMs?: number },
49
54
  telemetry: TelemetryClient = getTelemetryClient(),
50
55
  ): Promise<EpochMonitor> {
51
56
  const l1Constants = await l2BlockSource.getL1Constants();
@@ -87,6 +92,11 @@ export class EpochMonitor implements Traceable {
87
92
  return;
88
93
  }
89
94
 
95
+ if (this.options.provingDelayMs) {
96
+ this.log.debug(`Waiting ${this.options.provingDelayMs}ms before proving epoch ${epochToProve}`);
97
+ await sleep(this.options.provingDelayMs);
98
+ }
99
+
90
100
  this.log.debug(`Epoch ${epochToProve} is ready to be proven`);
91
101
  if (await this.handler?.handleEpochReadyToProve(epochToProve)) {
92
102
  this.latestEpochNumber = epochToProve;
@@ -95,7 +105,7 @@ export class EpochMonitor implements Traceable {
95
105
 
96
106
  private async getEpochNumberToProve() {
97
107
  const lastBlockProven = await this.l2BlockSource.getProvenBlockNumber();
98
- const firstBlockToProve = lastBlockProven + 1;
108
+ const firstBlockToProve = BlockNumber(lastBlockProven + 1);
99
109
  const firstBlockHeaderToProve = await this.l2BlockSource.getBlockHeader(firstBlockToProve);
100
110
  if (!firstBlockHeaderToProve) {
101
111
  return { epochToProve: undefined, blockNumber: firstBlockToProve };