@aztec/prover-node 0.0.1-commit.9b94fc1 → 0.0.1-commit.c7c42ec

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dest/config.d.ts +5 -4
  2. package/dest/config.d.ts.map +1 -1
  3. package/dest/config.js +4 -3
  4. package/dest/factory.d.ts +4 -4
  5. package/dest/factory.d.ts.map +1 -1
  6. package/dest/factory.js +11 -7
  7. package/dest/job/epoch-proving-job-data.d.ts +7 -6
  8. package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
  9. package/dest/job/epoch-proving-job-data.js +24 -18
  10. package/dest/job/epoch-proving-job.d.ts +3 -3
  11. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  12. package/dest/job/epoch-proving-job.js +89 -84
  13. package/dest/metrics.d.ts +4 -3
  14. package/dest/metrics.d.ts.map +1 -1
  15. package/dest/metrics.js +7 -1
  16. package/dest/monitors/epoch-monitor.d.ts +1 -1
  17. package/dest/monitors/epoch-monitor.d.ts.map +1 -1
  18. package/dest/monitors/epoch-monitor.js +2 -1
  19. package/dest/prover-node-publisher.d.ts +7 -6
  20. package/dest/prover-node-publisher.d.ts.map +1 -1
  21. package/dest/prover-node-publisher.js +40 -35
  22. package/dest/prover-node.d.ts +3 -3
  23. package/dest/prover-node.d.ts.map +1 -1
  24. package/dest/prover-node.js +30 -26
  25. package/dest/prover-publisher-factory.d.ts +4 -2
  26. package/dest/prover-publisher-factory.d.ts.map +1 -1
  27. package/package.json +24 -24
  28. package/src/bin/run-failed-epoch.ts +1 -1
  29. package/src/config.ts +6 -4
  30. package/src/factory.ts +11 -8
  31. package/src/job/epoch-proving-job-data.ts +28 -23
  32. package/src/job/epoch-proving-job.ts +102 -100
  33. package/src/metrics.ts +14 -2
  34. package/src/monitors/epoch-monitor.ts +2 -2
  35. package/src/prover-node-publisher.ts +58 -49
  36. package/src/prover-node.ts +36 -32
  37. package/src/prover-publisher-factory.ts +3 -1
package/src/factory.ts CHANGED
@@ -1,8 +1,11 @@
1
1
  import { type Archiver, createArchiver } from '@aztec/archiver';
2
2
  import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover';
3
- import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
3
+ import { type BlobClientInterface, createBlobClient } from '@aztec/blob-client/client';
4
4
  import { EpochCache } from '@aztec/epoch-cache';
5
- import { L1TxUtils, PublisherManager, RollupContract, createEthereumChain } from '@aztec/ethereum';
5
+ import { createEthereumChain } from '@aztec/ethereum/chain';
6
+ import { RollupContract } from '@aztec/ethereum/contracts';
7
+ import { L1TxUtils } from '@aztec/ethereum/l1-tx-utils';
8
+ import { PublisherManager } from '@aztec/ethereum/publisher-manager';
6
9
  import { pick } from '@aztec/foundation/collection';
7
10
  import { type Logger, createLogger } from '@aztec/foundation/log';
8
11
  import { DateProvider } from '@aztec/foundation/timer';
@@ -33,7 +36,7 @@ export type ProverNodeDeps = {
33
36
  aztecNodeTxProvider?: Pick<AztecNode, 'getTxsByHash'>;
34
37
  archiver?: Archiver;
35
38
  publisherFactory?: ProverPublisherFactory;
36
- blobSinkClient?: BlobSinkClientInterface;
39
+ blobClient?: BlobClientInterface;
37
40
  broker?: ProvingJobBroker;
38
41
  l1TxUtils?: L1TxUtils;
39
42
  dateProvider?: DateProvider;
@@ -50,8 +53,8 @@ export async function createProverNode(
50
53
  const config = { ...userConfig };
51
54
  const telemetry = deps.telemetry ?? getTelemetryClient();
52
55
  const dateProvider = deps.dateProvider ?? new DateProvider();
53
- const blobSinkClient =
54
- deps.blobSinkClient ?? createBlobSinkClient(config, { logger: createLogger('prover-node:blob-sink:client') });
56
+ const blobClient =
57
+ deps.blobClient ?? createBlobClient(config, { logger: createLogger('prover-node:blob-client:client') });
55
58
  const log = deps.log ?? createLogger('prover-node');
56
59
 
57
60
  // Build a key store from file if given or from environment otherwise
@@ -102,7 +105,7 @@ export async function createProverNode(
102
105
 
103
106
  const archiver =
104
107
  deps.archiver ??
105
- (await createArchiver(config, { blobSinkClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
108
+ (await createArchiver(config, { blobClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
106
109
  log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`);
107
110
 
108
111
  const worldStateConfig = { ...config, worldStateProvenBlocksOnly: false };
@@ -123,7 +126,7 @@ export async function createProverNode(
123
126
 
124
127
  const publicClient = createPublicClient({
125
128
  chain: chain.chainInfo,
126
- transport: fallback(config.l1RpcUrls.map((url: string) => http(url))),
129
+ transport: fallback(config.l1RpcUrls.map((url: string) => http(url, { batch: false }))),
127
130
  pollingInterval: config.viemPollingIntervalMS,
128
131
  });
129
132
 
@@ -148,7 +151,7 @@ export async function createProverNode(
148
151
 
149
152
  const proofVerifier = new QueuedIVCVerifier(
150
153
  config,
151
- config.realProofs
154
+ config.realProofs || config.debugForceTxProofVerification
152
155
  ? await BBCircuitVerifier.new(config)
153
156
  : new TestCircuitVerifier(config.proverTestVerificationDelayMs),
154
157
  );
@@ -1,40 +1,45 @@
1
- import { EpochNumber } from '@aztec/foundation/branded-types';
2
- import { Fr } from '@aztec/foundation/fields';
1
+ import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
- import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block';
4
+ import { CommitteeAttestation } from '@aztec/stdlib/block';
5
+ import { Checkpoint } from '@aztec/stdlib/checkpoint';
5
6
  import { BlockHeader, Tx } from '@aztec/stdlib/tx';
6
7
 
7
8
  /** All data from an epoch used in proving. */
8
9
  export type EpochProvingJobData = {
9
10
  epochNumber: EpochNumber;
10
- blocks: L2Block[];
11
+ checkpoints: Checkpoint[];
11
12
  txs: Map<string, Tx>;
12
- l1ToL2Messages: Record<number, Fr[]>;
13
+ l1ToL2Messages: Record<CheckpointNumber, Fr[]>;
13
14
  previousBlockHeader: BlockHeader;
14
15
  attestations: CommitteeAttestation[];
15
16
  };
16
17
 
17
18
  export function validateEpochProvingJobData(data: EpochProvingJobData) {
18
- if (data.blocks.length > 0 && data.previousBlockHeader.getBlockNumber() + 1 !== data.blocks[0].number) {
19
+ if (data.checkpoints.length === 0) {
20
+ throw new Error('No checkpoints to prove');
21
+ }
22
+
23
+ const firstBlockNumber = data.checkpoints[0].blocks[0].number;
24
+ const previousBlockNumber = data.previousBlockHeader.getBlockNumber();
25
+ if (previousBlockNumber + 1 !== firstBlockNumber) {
19
26
  throw new Error(
20
- `Initial block number ${
21
- data.blocks[0].number
22
- } does not match previous block header ${data.previousBlockHeader.getBlockNumber()}`,
27
+ `Initial block number ${firstBlockNumber} does not match previous block header ${previousBlockNumber}`,
23
28
  );
24
29
  }
25
30
 
26
- for (const blockNumber of data.blocks.map(block => block.number)) {
27
- if (!(blockNumber in data.l1ToL2Messages)) {
28
- throw new Error(`Missing L1 to L2 messages for block number ${blockNumber}`);
31
+ for (const checkpoint of data.checkpoints) {
32
+ if (!(checkpoint.number in data.l1ToL2Messages)) {
33
+ throw new Error(`Missing L1 to L2 messages for checkpoint number ${checkpoint.number}`);
29
34
  }
30
35
  }
31
36
  }
32
37
 
33
38
  export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer {
34
- const blocks = data.blocks.map(block => block.toBuffer());
39
+ const checkpoints = data.checkpoints.map(checkpoint => checkpoint.toBuffer());
35
40
  const txs = Array.from(data.txs.values()).map(tx => tx.toBuffer());
36
- const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([blockNumber, messages]) => [
37
- Number(blockNumber),
41
+ const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([checkpointNumber, messages]) => [
42
+ Number(checkpointNumber),
38
43
  messages.length,
39
44
  ...messages,
40
45
  ]);
@@ -43,8 +48,8 @@ export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer
43
48
  return serializeToBuffer(
44
49
  data.epochNumber,
45
50
  data.previousBlockHeader,
46
- blocks.length,
47
- ...blocks,
51
+ checkpoints.length,
52
+ ...checkpoints,
48
53
  txs.length,
49
54
  ...txs,
50
55
  l1ToL2Messages.length,
@@ -58,20 +63,20 @@ export function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData
58
63
  const reader = BufferReader.asReader(buf);
59
64
  const epochNumber = EpochNumber(reader.readNumber());
60
65
  const previousBlockHeader = reader.readObject(BlockHeader);
61
- const blocks = reader.readVector(L2Block);
66
+ const checkpoints = reader.readVector(Checkpoint);
62
67
  const txArray = reader.readVector(Tx);
63
68
 
64
- const l1ToL2MessageBlockCount = reader.readNumber();
69
+ const l1ToL2MessageCheckpointCount = reader.readNumber();
65
70
  const l1ToL2Messages: Record<number, Fr[]> = {};
66
- for (let i = 0; i < l1ToL2MessageBlockCount; i++) {
67
- const blockNumber = reader.readNumber();
71
+ for (let i = 0; i < l1ToL2MessageCheckpointCount; i++) {
72
+ const checkpointNumber = CheckpointNumber(reader.readNumber());
68
73
  const messages = reader.readVector(Fr);
69
- l1ToL2Messages[blockNumber] = messages;
74
+ l1ToL2Messages[checkpointNumber] = messages;
70
75
  }
71
76
 
72
77
  const attestations = reader.readVector(CommitteeAttestation);
73
78
 
74
79
  const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
75
80
 
76
- return { epochNumber, previousBlockHeader, blocks, txs, l1ToL2Messages, attestations };
81
+ return { epochNumber, previousBlockHeader, checkpoints, txs, l1ToL2Messages, attestations };
77
82
  }
@@ -1,8 +1,8 @@
1
1
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
2
2
  import { asyncPool } from '@aztec/foundation/async-pool';
3
- import { EpochNumber } from '@aztec/foundation/branded-types';
3
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
- import { Fr } from '@aztec/foundation/fields';
5
+ import { Fr } from '@aztec/foundation/curves/bn254';
6
6
  import { createLogger } from '@aztec/foundation/log';
7
7
  import { RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
8
8
  import { Timer } from '@aztec/foundation/timer';
@@ -11,7 +11,8 @@ import { protocolContractsHash } from '@aztec/protocol-contracts';
11
11
  import { buildFinalBlobChallenges } from '@aztec/prover-client/helpers';
12
12
  import type { PublicProcessor, PublicProcessorFactory } from '@aztec/simulator/server';
13
13
  import { PublicSimulatorConfig } from '@aztec/stdlib/avm';
14
- import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
14
+ import type { L2BlockNew, L2BlockSource } from '@aztec/stdlib/block';
15
+ import type { Checkpoint } from '@aztec/stdlib/checkpoint';
15
16
  import {
16
17
  type EpochProver,
17
18
  type EpochProvingJobState,
@@ -91,8 +92,8 @@ export class EpochProvingJob implements Traceable {
91
92
  return this.data.epochNumber;
92
93
  }
93
94
 
94
- private get blocks() {
95
- return this.data.blocks;
95
+ private get checkpoints() {
96
+ return this.data.checkpoints;
96
97
  }
97
98
 
98
99
  private get txs() {
@@ -117,13 +118,21 @@ export class EpochProvingJob implements Traceable {
117
118
 
118
119
  const attestations = this.attestations.map(attestation => attestation.toViem());
119
120
  const epochNumber = this.epochNumber;
120
- const epochSizeBlocks = this.blocks.length;
121
- const epochSizeTxs = this.blocks.reduce((total, current) => total + current.body.txEffects.length, 0);
122
- const [fromBlock, toBlock] = [this.blocks[0].number, this.blocks.at(-1)!.number];
123
- this.log.info(`Starting epoch ${epochNumber} proving job with blocks ${fromBlock} to ${toBlock}`, {
121
+ const epochSizeCheckpoints = this.checkpoints.length;
122
+ const epochSizeBlocks = this.checkpoints.reduce((accum, checkpoint) => accum + checkpoint.blocks.length, 0);
123
+ const epochSizeTxs = this.checkpoints.reduce(
124
+ (accum, checkpoint) =>
125
+ accum + checkpoint.blocks.reduce((accumC, block) => accumC + block.body.txEffects.length, 0),
126
+ 0,
127
+ );
128
+ const fromCheckpoint = this.checkpoints[0].number;
129
+ const toCheckpoint = this.checkpoints.at(-1)!.number;
130
+ const fromBlock = this.checkpoints[0].blocks[0].number;
131
+ const toBlock = this.checkpoints.at(-1)!.blocks.at(-1)!.number;
132
+ this.log.info(`Starting epoch ${epochNumber} proving job with checkpoints ${fromCheckpoint} to ${toCheckpoint}`, {
124
133
  fromBlock,
125
134
  toBlock,
126
- epochSizeBlocks,
135
+ epochSizeTxs,
127
136
  epochNumber,
128
137
  uuid: this.uuid,
129
138
  });
@@ -134,86 +143,93 @@ export class EpochProvingJob implements Traceable {
134
143
  this.runPromise = promise;
135
144
 
136
145
  try {
137
- const blobFieldsPerCheckpoint = this.blocks.map(block => block.getCheckpointBlobFields());
146
+ const blobFieldsPerCheckpoint = this.checkpoints.map(checkpoint => checkpoint.toBlobFields());
138
147
  const finalBlobBatchingChallenges = await buildFinalBlobChallenges(blobFieldsPerCheckpoint);
139
148
 
140
- // TODO(#17027): Enable multiple blocks per checkpoint.
141
- // Total number of checkpoints equals number of blocks because we currently build a checkpoint with only one block.
142
- const totalNumCheckpoints = epochSizeBlocks;
143
-
144
- this.prover.startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges);
149
+ this.prover.startNewEpoch(epochNumber, epochSizeCheckpoints, finalBlobBatchingChallenges);
145
150
  await this.prover.startChonkVerifierCircuits(Array.from(this.txs.values()));
146
151
 
147
- await asyncPool(this.config.parallelBlockLimit ?? 32, this.blocks, async block => {
148
- this.checkState();
152
+ // Everything in the epoch should have the same chainId and version.
153
+ const { chainId, version } = this.checkpoints[0].blocks[0].header.globalVariables;
149
154
 
150
- const globalVariables = block.header.globalVariables;
151
- const txs = this.getTxs(block);
152
- const l1ToL2Messages = this.getL1ToL2Messages(block);
153
- const previousHeader = this.getBlockHeader(block.number - 1)!;
154
-
155
- this.log.verbose(`Starting processing block ${block.number}`, {
156
- number: block.number,
157
- blockHash: (await block.hash()).toString(),
158
- lastArchive: block.header.lastArchive.root,
159
- noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
160
- nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
161
- publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
162
- previousHeader: previousHeader.hash(),
163
- uuid: this.uuid,
164
- ...globalVariables,
165
- });
155
+ const previousBlockHeaders = this.gatherPreviousBlockHeaders();
166
156
 
157
+ await asyncPool(this.config.parallelBlockLimit ?? 32, this.checkpoints, async checkpoint => {
158
+ this.checkState();
159
+
160
+ const checkpointIndex = checkpoint.number - fromCheckpoint;
167
161
  const checkpointConstants = CheckpointConstantData.from({
168
- chainId: globalVariables.chainId,
169
- version: globalVariables.version,
162
+ chainId,
163
+ version,
170
164
  vkTreeRoot: getVKTreeRoot(),
171
165
  protocolContractsHash: protocolContractsHash,
172
166
  proverId: this.prover.getProverId().toField(),
173
- slotNumber: globalVariables.slotNumber,
174
- coinbase: globalVariables.coinbase,
175
- feeRecipient: globalVariables.feeRecipient,
176
- gasFees: globalVariables.gasFees,
167
+ slotNumber: checkpoint.header.slotNumber,
168
+ coinbase: checkpoint.header.coinbase,
169
+ feeRecipient: checkpoint.header.feeRecipient,
170
+ gasFees: checkpoint.header.gasFees,
171
+ });
172
+ const previousHeader = previousBlockHeaders[checkpointIndex];
173
+ const l1ToL2Messages = this.getL1ToL2Messages(checkpoint);
174
+
175
+ this.log.verbose(`Starting processing checkpoint ${checkpoint.number}`, {
176
+ number: checkpoint.number,
177
+ checkpointHash: checkpoint.hash().toString(),
178
+ lastArchive: checkpoint.header.lastArchiveRoot,
179
+ previousHeader: previousHeader.hash(),
180
+ uuid: this.uuid,
177
181
  });
178
182
 
179
- // TODO(#17027): Enable multiple blocks per checkpoint.
180
- // Each checkpoint has only one block.
181
- const totalNumBlocks = 1;
182
- const checkpointIndex = block.number - fromBlock;
183
183
  await this.prover.startNewCheckpoint(
184
184
  checkpointIndex,
185
185
  checkpointConstants,
186
186
  l1ToL2Messages,
187
- totalNumBlocks,
187
+ checkpoint.blocks.length,
188
188
  previousHeader,
189
189
  );
190
190
 
191
- // Start block proving
192
- await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
191
+ for (const block of checkpoint.blocks) {
192
+ const globalVariables = block.header.globalVariables;
193
+ const txs = this.getTxs(block);
194
+
195
+ this.log.verbose(`Starting processing block ${block.number}`, {
196
+ number: block.number,
197
+ blockHash: (await block.hash()).toString(),
198
+ lastArchive: block.header.lastArchive.root,
199
+ noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
200
+ nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
201
+ publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
202
+ ...globalVariables,
203
+ numTxs: txs.length,
204
+ });
193
205
 
194
- // Process public fns
195
- const db = await this.createFork(block.number - 1, l1ToL2Messages);
196
- const config = PublicSimulatorConfig.from({
197
- proverId: this.prover.getProverId().toField(),
198
- skipFeeEnforcement: false,
199
- collectDebugLogs: false,
200
- collectHints: true,
201
- maxDebugLogMemoryReads: 0,
202
- collectStatistics: false,
203
- });
204
- const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
205
- const processed = await this.processTxs(publicProcessor, txs);
206
- await this.prover.addTxs(processed);
207
- await db.close();
208
- this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
209
- blockNumber: block.number,
210
- blockHash: (await block.hash()).toString(),
211
- uuid: this.uuid,
212
- });
206
+ // Start block proving
207
+ await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
208
+
209
+ // Process public fns
210
+ const db = await this.createFork(BlockNumber(block.number - 1), l1ToL2Messages);
211
+ const config = PublicSimulatorConfig.from({
212
+ proverId: this.prover.getProverId().toField(),
213
+ skipFeeEnforcement: false,
214
+ collectDebugLogs: false,
215
+ collectHints: true,
216
+ collectPublicInputs: true,
217
+ collectStatistics: false,
218
+ });
219
+ const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
220
+ const processed = await this.processTxs(publicProcessor, txs);
221
+ await this.prover.addTxs(processed);
222
+ await db.close();
223
+ this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
224
+ blockNumber: block.number,
225
+ blockHash: (await block.hash()).toString(),
226
+ uuid: this.uuid,
227
+ });
213
228
 
214
- // Mark block as completed to pad it
215
- const expectedBlockHeader = block.getBlockHeader();
216
- await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
229
+ // Mark block as completed to pad it
230
+ const expectedBlockHeader = block.header;
231
+ await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
232
+ }
217
233
  });
218
234
 
219
235
  const executionTime = timer.ms();
@@ -226,16 +242,16 @@ export class EpochProvingJob implements Traceable {
226
242
 
227
243
  if (this.config.skipSubmitProof) {
228
244
  this.log.info(
229
- `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`,
245
+ `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`,
230
246
  );
231
247
  this.state = 'completed';
232
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
248
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
233
249
  return;
234
250
  }
235
251
 
236
252
  const success = await this.publisher.submitEpochProof({
237
- fromBlock,
238
- toBlock,
253
+ fromCheckpoint,
254
+ toCheckpoint,
239
255
  epochNumber,
240
256
  publicInputs,
241
257
  proof,
@@ -246,12 +262,12 @@ export class EpochProvingJob implements Traceable {
246
262
  throw new Error('Failed to submit epoch proof to L1');
247
263
  }
248
264
 
249
- this.log.info(`Submitted proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`, {
265
+ this.log.info(`Submitted proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`, {
250
266
  epochNumber,
251
267
  uuid: this.uuid,
252
268
  });
253
269
  this.state = 'completed';
254
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
270
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
255
271
  } catch (err: any) {
256
272
  if (err && err.name === 'HaltExecutionError') {
257
273
  this.log.warn(`Halted execution of epoch ${epochNumber} prover job`, {
@@ -277,7 +293,7 @@ export class EpochProvingJob implements Traceable {
277
293
  * Create a new db fork for tx processing, inserting all L1 to L2.
278
294
  * REFACTOR: The prover already spawns a db fork of its own for each block, so we may be able to do away with just one fork.
279
295
  */
280
- private async createFork(blockNumber: number, l1ToL2Messages: Fr[]) {
296
+ private async createFork(blockNumber: BlockNumber, l1ToL2Messages: Fr[]) {
281
297
  const db = await this.dbProvider.fork(blockNumber);
282
298
  const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
283
299
  l1ToL2Messages,
@@ -348,11 +364,9 @@ export class EpochProvingJob implements Traceable {
348
364
  async () => {
349
365
  const blocks = await l2BlockSource.getBlockHeadersForEpoch(this.epochNumber);
350
366
  const blockHashes = await Promise.all(blocks.map(block => block.hash()));
351
- const thisBlockHashes = await Promise.all(this.blocks.map(block => block.hash()));
352
- if (
353
- blocks.length !== this.blocks.length ||
354
- !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))
355
- ) {
367
+ const thisBlocks = this.checkpoints.flatMap(checkpoint => checkpoint.blocks);
368
+ const thisBlockHashes = await Promise.all(thisBlocks.map(block => block.hash()));
369
+ if (blocks.length !== thisBlocks.length || !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))) {
356
370
  this.log.warn('Epoch blocks changed underfoot', {
357
371
  uuid: this.uuid,
358
372
  epochNumber: this.epochNumber,
@@ -368,30 +382,18 @@ export class EpochProvingJob implements Traceable {
368
382
  this.log.verbose(`Scheduled epoch check for epoch ${this.epochNumber} every ${intervalMs}ms`);
369
383
  }
370
384
 
371
- /* Returns the header for the given block number based on the epoch proving job data. */
372
- private getBlockHeader(blockNumber: number) {
373
- const block = this.blocks.find(b => b.number === blockNumber);
374
- if (block) {
375
- return block.getBlockHeader();
376
- }
377
-
378
- if (blockNumber === Number(this.data.previousBlockHeader.getBlockNumber())) {
379
- return this.data.previousBlockHeader;
380
- }
381
-
382
- throw new Error(
383
- `Block header not found for block number ${blockNumber} (got ${this.blocks
384
- .map(b => b.number)
385
- .join(', ')} and previous header ${this.data.previousBlockHeader.getBlockNumber()})`,
386
- );
385
+ /* Returns the last block header in the previous checkpoint for all checkpoints in the epoch */
386
+ private gatherPreviousBlockHeaders() {
387
+ const lastBlocks = this.checkpoints.map(checkpoint => checkpoint.blocks.at(-1)!);
388
+ return [this.data.previousBlockHeader, ...lastBlocks.map(block => block.header).slice(0, -1)];
387
389
  }
388
390
 
389
- private getTxs(block: L2Block): Tx[] {
391
+ private getTxs(block: L2BlockNew): Tx[] {
390
392
  return block.body.txEffects.map(txEffect => this.txs.get(txEffect.txHash.toString())!);
391
393
  }
392
394
 
393
- private getL1ToL2Messages(block: L2Block) {
394
- return this.data.l1ToL2Messages[block.number];
395
+ private getL1ToL2Messages(checkpoint: Checkpoint) {
396
+ return this.data.l1ToL2Messages[checkpoint.number];
395
397
  }
396
398
 
397
399
  private async processTxs(publicProcessor: PublicProcessor, txs: Tx[]): Promise<ProcessedTx[]> {
package/src/metrics.ts CHANGED
@@ -1,4 +1,4 @@
1
- import type { RollupContract } from '@aztec/ethereum';
1
+ import type { RollupContract } from '@aztec/ethereum/contracts';
2
2
  import type { EthAddress } from '@aztec/foundation/eth-address';
3
3
  import { createLogger } from '@aztec/foundation/log';
4
4
  import type { L1PublishProofStats, L1PublishStats } from '@aztec/stdlib/stats';
@@ -21,6 +21,7 @@ import { formatEther, formatUnits } from 'viem';
21
21
  export class ProverNodeJobMetrics {
22
22
  proverEpochExecutionDuration: Histogram;
23
23
  provingJobDuration: Histogram;
24
+ provingJobCheckpoints: Gauge;
24
25
  provingJobBlocks: Gauge;
25
26
  provingJobTransactions: Gauge;
26
27
 
@@ -39,6 +40,10 @@ export class ProverNodeJobMetrics {
39
40
  unit: 's',
40
41
  valueType: ValueType.DOUBLE,
41
42
  });
43
+ this.provingJobCheckpoints = this.meter.createGauge(Metrics.PROVER_NODE_JOB_CHECKPOINTS, {
44
+ description: 'Number of checkpoints in a proven epoch',
45
+ valueType: ValueType.INT,
46
+ });
42
47
  this.provingJobBlocks = this.meter.createGauge(Metrics.PROVER_NODE_JOB_BLOCKS, {
43
48
  description: 'Number of blocks in a proven epoch',
44
49
  valueType: ValueType.INT,
@@ -49,9 +54,16 @@ export class ProverNodeJobMetrics {
49
54
  });
50
55
  }
51
56
 
52
- public recordProvingJob(executionTimeMs: number, totalTimeMs: number, numBlocks: number, numTxs: number) {
57
+ public recordProvingJob(
58
+ executionTimeMs: number,
59
+ totalTimeMs: number,
60
+ numCheckpoints: number,
61
+ numBlocks: number,
62
+ numTxs: number,
63
+ ) {
53
64
  this.proverEpochExecutionDuration.record(Math.ceil(executionTimeMs));
54
65
  this.provingJobDuration.record(totalTimeMs / 1000);
66
+ this.provingJobCheckpoints.record(Math.floor(numCheckpoints));
55
67
  this.provingJobBlocks.record(Math.floor(numBlocks));
56
68
  this.provingJobTransactions.record(Math.floor(numTxs));
57
69
  }
@@ -1,4 +1,4 @@
1
- import { EpochNumber } from '@aztec/foundation/branded-types';
1
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
2
2
  import { createLogger } from '@aztec/foundation/log';
3
3
  import { RunningPromise } from '@aztec/foundation/running-promise';
4
4
  import { sleep } from '@aztec/foundation/sleep';
@@ -105,7 +105,7 @@ export class EpochMonitor implements Traceable {
105
105
 
106
106
  private async getEpochNumberToProve() {
107
107
  const lastBlockProven = await this.l2BlockSource.getProvenBlockNumber();
108
- const firstBlockToProve = lastBlockProven + 1;
108
+ const firstBlockToProve = BlockNumber(lastBlockProven + 1);
109
109
  const firstBlockHeaderToProve = await this.l2BlockSource.getBlockHeader(firstBlockToProve);
110
110
  if (!firstBlockHeaderToProve) {
111
111
  return { epochToProve: undefined, blockNumber: firstBlockToProve };