@aztec/prover-node 0.0.1-commit.9b94fc1 → 0.0.1-commit.9ee6fcc6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dest/actions/download-epoch-proving-job.d.ts +1 -1
  2. package/dest/actions/rerun-epoch-proving-job.d.ts +4 -3
  3. package/dest/actions/rerun-epoch-proving-job.d.ts.map +1 -1
  4. package/dest/actions/rerun-epoch-proving-job.js +2 -2
  5. package/dest/actions/upload-epoch-proof-failure.d.ts +2 -2
  6. package/dest/actions/upload-epoch-proof-failure.d.ts.map +1 -1
  7. package/dest/bin/run-failed-epoch.js +5 -2
  8. package/dest/config.d.ts +8 -10
  9. package/dest/config.d.ts.map +1 -1
  10. package/dest/config.js +19 -21
  11. package/dest/factory.d.ts +20 -16
  12. package/dest/factory.d.ts.map +1 -1
  13. package/dest/factory.js +47 -62
  14. package/dest/index.d.ts +2 -1
  15. package/dest/index.d.ts.map +1 -1
  16. package/dest/index.js +1 -0
  17. package/dest/job/epoch-proving-job-data.d.ts +7 -6
  18. package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
  19. package/dest/job/epoch-proving-job-data.js +24 -18
  20. package/dest/job/epoch-proving-job.d.ts +5 -4
  21. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  22. package/dest/job/epoch-proving-job.js +511 -108
  23. package/dest/metrics.d.ts +14 -3
  24. package/dest/metrics.d.ts.map +1 -1
  25. package/dest/metrics.js +54 -97
  26. package/dest/monitors/epoch-monitor.d.ts +1 -1
  27. package/dest/monitors/epoch-monitor.d.ts.map +1 -1
  28. package/dest/monitors/epoch-monitor.js +8 -18
  29. package/dest/prover-node-publisher.d.ts +12 -10
  30. package/dest/prover-node-publisher.d.ts.map +1 -1
  31. package/dest/prover-node-publisher.js +46 -40
  32. package/dest/prover-node.d.ts +22 -12
  33. package/dest/prover-node.d.ts.map +1 -1
  34. package/dest/prover-node.js +442 -55
  35. package/dest/prover-publisher-factory.d.ts +10 -6
  36. package/dest/prover-publisher-factory.d.ts.map +1 -1
  37. package/dest/prover-publisher-factory.js +7 -5
  38. package/package.json +26 -25
  39. package/src/actions/rerun-epoch-proving-job.ts +5 -3
  40. package/src/actions/upload-epoch-proof-failure.ts +1 -1
  41. package/src/bin/run-failed-epoch.ts +5 -2
  42. package/src/config.ts +27 -33
  43. package/src/factory.ts +80 -104
  44. package/src/index.ts +1 -0
  45. package/src/job/epoch-proving-job-data.ts +28 -23
  46. package/src/job/epoch-proving-job.ts +151 -111
  47. package/src/metrics.ts +64 -81
  48. package/src/monitors/epoch-monitor.ts +6 -14
  49. package/src/prover-node-publisher.ts +67 -55
  50. package/src/prover-node.ts +54 -42
  51. package/src/prover-publisher-factory.ts +19 -11
@@ -1,40 +1,45 @@
1
- import { EpochNumber } from '@aztec/foundation/branded-types';
2
- import { Fr } from '@aztec/foundation/fields';
1
+ import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
3
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
4
- import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block';
4
+ import { CommitteeAttestation } from '@aztec/stdlib/block';
5
+ import { Checkpoint } from '@aztec/stdlib/checkpoint';
5
6
  import { BlockHeader, Tx } from '@aztec/stdlib/tx';
6
7
 
7
8
  /** All data from an epoch used in proving. */
8
9
  export type EpochProvingJobData = {
9
10
  epochNumber: EpochNumber;
10
- blocks: L2Block[];
11
+ checkpoints: Checkpoint[];
11
12
  txs: Map<string, Tx>;
12
- l1ToL2Messages: Record<number, Fr[]>;
13
+ l1ToL2Messages: Record<CheckpointNumber, Fr[]>;
13
14
  previousBlockHeader: BlockHeader;
14
15
  attestations: CommitteeAttestation[];
15
16
  };
16
17
 
17
18
  export function validateEpochProvingJobData(data: EpochProvingJobData) {
18
- if (data.blocks.length > 0 && data.previousBlockHeader.getBlockNumber() + 1 !== data.blocks[0].number) {
19
+ if (data.checkpoints.length === 0) {
20
+ throw new Error('No checkpoints to prove');
21
+ }
22
+
23
+ const firstBlockNumber = data.checkpoints[0].blocks[0].number;
24
+ const previousBlockNumber = data.previousBlockHeader.getBlockNumber();
25
+ if (previousBlockNumber + 1 !== firstBlockNumber) {
19
26
  throw new Error(
20
- `Initial block number ${
21
- data.blocks[0].number
22
- } does not match previous block header ${data.previousBlockHeader.getBlockNumber()}`,
27
+ `Initial block number ${firstBlockNumber} does not match previous block header ${previousBlockNumber}`,
23
28
  );
24
29
  }
25
30
 
26
- for (const blockNumber of data.blocks.map(block => block.number)) {
27
- if (!(blockNumber in data.l1ToL2Messages)) {
28
- throw new Error(`Missing L1 to L2 messages for block number ${blockNumber}`);
31
+ for (const checkpoint of data.checkpoints) {
32
+ if (!(checkpoint.number in data.l1ToL2Messages)) {
33
+ throw new Error(`Missing L1 to L2 messages for checkpoint number ${checkpoint.number}`);
29
34
  }
30
35
  }
31
36
  }
32
37
 
33
38
  export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer {
34
- const blocks = data.blocks.map(block => block.toBuffer());
39
+ const checkpoints = data.checkpoints.map(checkpoint => checkpoint.toBuffer());
35
40
  const txs = Array.from(data.txs.values()).map(tx => tx.toBuffer());
36
- const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([blockNumber, messages]) => [
37
- Number(blockNumber),
41
+ const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([checkpointNumber, messages]) => [
42
+ Number(checkpointNumber),
38
43
  messages.length,
39
44
  ...messages,
40
45
  ]);
@@ -43,8 +48,8 @@ export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer
43
48
  return serializeToBuffer(
44
49
  data.epochNumber,
45
50
  data.previousBlockHeader,
46
- blocks.length,
47
- ...blocks,
51
+ checkpoints.length,
52
+ ...checkpoints,
48
53
  txs.length,
49
54
  ...txs,
50
55
  l1ToL2Messages.length,
@@ -58,20 +63,20 @@ export function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData
58
63
  const reader = BufferReader.asReader(buf);
59
64
  const epochNumber = EpochNumber(reader.readNumber());
60
65
  const previousBlockHeader = reader.readObject(BlockHeader);
61
- const blocks = reader.readVector(L2Block);
66
+ const checkpoints = reader.readVector(Checkpoint);
62
67
  const txArray = reader.readVector(Tx);
63
68
 
64
- const l1ToL2MessageBlockCount = reader.readNumber();
69
+ const l1ToL2MessageCheckpointCount = reader.readNumber();
65
70
  const l1ToL2Messages: Record<number, Fr[]> = {};
66
- for (let i = 0; i < l1ToL2MessageBlockCount; i++) {
67
- const blockNumber = reader.readNumber();
71
+ for (let i = 0; i < l1ToL2MessageCheckpointCount; i++) {
72
+ const checkpointNumber = CheckpointNumber(reader.readNumber());
68
73
  const messages = reader.readVector(Fr);
69
- l1ToL2Messages[blockNumber] = messages;
74
+ l1ToL2Messages[checkpointNumber] = messages;
70
75
  }
71
76
 
72
77
  const attestations = reader.readVector(CommitteeAttestation);
73
78
 
74
79
  const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
75
80
 
76
- return { epochNumber, previousBlockHeader, blocks, txs, l1ToL2Messages, attestations };
81
+ return { epochNumber, previousBlockHeader, checkpoints, txs, l1ToL2Messages, attestations };
77
82
  }
@@ -1,17 +1,19 @@
1
1
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
2
2
  import { asyncPool } from '@aztec/foundation/async-pool';
3
- import { EpochNumber } from '@aztec/foundation/branded-types';
3
+ import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
- import { Fr } from '@aztec/foundation/fields';
6
- import { createLogger } from '@aztec/foundation/log';
5
+ import { Fr } from '@aztec/foundation/curves/bn254';
6
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
7
7
  import { RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
8
8
  import { Timer } from '@aztec/foundation/timer';
9
+ import { AVM_MAX_CONCURRENT_SIMULATIONS } from '@aztec/native';
9
10
  import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
10
11
  import { protocolContractsHash } from '@aztec/protocol-contracts';
11
12
  import { buildFinalBlobChallenges } from '@aztec/prover-client/helpers';
12
13
  import type { PublicProcessor, PublicProcessorFactory } from '@aztec/simulator/server';
13
14
  import { PublicSimulatorConfig } from '@aztec/stdlib/avm';
14
15
  import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
16
+ import type { Checkpoint } from '@aztec/stdlib/checkpoint';
15
17
  import {
16
18
  type EpochProver,
17
19
  type EpochProvingJobState,
@@ -42,7 +44,7 @@ export type EpochProvingJobOptions = {
42
44
  */
43
45
  export class EpochProvingJob implements Traceable {
44
46
  private state: EpochProvingJobState = 'initialized';
45
- private log = createLogger('prover-node:epoch-proving-job');
47
+ private log: Logger;
46
48
  private uuid: string;
47
49
 
48
50
  private runPromise: Promise<void> | undefined;
@@ -61,9 +63,14 @@ export class EpochProvingJob implements Traceable {
61
63
  private metrics: ProverNodeJobMetrics,
62
64
  private deadline: Date | undefined,
63
65
  private config: EpochProvingJobOptions,
66
+ bindings?: LoggerBindings,
64
67
  ) {
65
68
  validateEpochProvingJobData(data);
66
69
  this.uuid = crypto.randomUUID();
70
+ this.log = createLogger('prover-node:epoch-proving-job', {
71
+ ...bindings,
72
+ instanceId: `epoch-${data.epochNumber}`,
73
+ });
67
74
  this.tracer = metrics.tracer;
68
75
  }
69
76
 
@@ -91,8 +98,8 @@ export class EpochProvingJob implements Traceable {
91
98
  return this.data.epochNumber;
92
99
  }
93
100
 
94
- private get blocks() {
95
- return this.data.blocks;
101
+ private get checkpoints() {
102
+ return this.data.checkpoints;
96
103
  }
97
104
 
98
105
  private get txs() {
@@ -117,13 +124,21 @@ export class EpochProvingJob implements Traceable {
117
124
 
118
125
  const attestations = this.attestations.map(attestation => attestation.toViem());
119
126
  const epochNumber = this.epochNumber;
120
- const epochSizeBlocks = this.blocks.length;
121
- const epochSizeTxs = this.blocks.reduce((total, current) => total + current.body.txEffects.length, 0);
122
- const [fromBlock, toBlock] = [this.blocks[0].number, this.blocks.at(-1)!.number];
123
- this.log.info(`Starting epoch ${epochNumber} proving job with blocks ${fromBlock} to ${toBlock}`, {
127
+ const epochSizeCheckpoints = this.checkpoints.length;
128
+ const epochSizeBlocks = this.checkpoints.reduce((accum, checkpoint) => accum + checkpoint.blocks.length, 0);
129
+ const epochSizeTxs = this.checkpoints.reduce(
130
+ (accum, checkpoint) =>
131
+ accum + checkpoint.blocks.reduce((accumC, block) => accumC + block.body.txEffects.length, 0),
132
+ 0,
133
+ );
134
+ const fromCheckpoint = this.checkpoints[0].number;
135
+ const toCheckpoint = this.checkpoints.at(-1)!.number;
136
+ const fromBlock = this.checkpoints[0].blocks[0].number;
137
+ const toBlock = this.checkpoints.at(-1)!.blocks.at(-1)!.number;
138
+ this.log.info(`Starting epoch ${epochNumber} proving job with checkpoints ${fromCheckpoint} to ${toCheckpoint}`, {
124
139
  fromBlock,
125
140
  toBlock,
126
- epochSizeBlocks,
141
+ epochSizeTxs,
127
142
  epochNumber,
128
143
  uuid: this.uuid,
129
144
  });
@@ -134,87 +149,116 @@ export class EpochProvingJob implements Traceable {
134
149
  this.runPromise = promise;
135
150
 
136
151
  try {
137
- const blobFieldsPerCheckpoint = this.blocks.map(block => block.getCheckpointBlobFields());
152
+ const blobTimer = new Timer();
153
+ const blobFieldsPerCheckpoint = this.checkpoints.map(checkpoint => checkpoint.toBlobFields());
138
154
  const finalBlobBatchingChallenges = await buildFinalBlobChallenges(blobFieldsPerCheckpoint);
155
+ this.metrics.recordBlobProcessing(blobTimer.ms());
139
156
 
140
- // TODO(#17027): Enable multiple blocks per checkpoint.
141
- // Total number of checkpoints equals number of blocks because we currently build a checkpoint with only one block.
142
- const totalNumCheckpoints = epochSizeBlocks;
143
-
144
- this.prover.startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges);
157
+ this.prover.startNewEpoch(epochNumber, epochSizeCheckpoints, finalBlobBatchingChallenges);
158
+ const chonkTimer = new Timer();
145
159
  await this.prover.startChonkVerifierCircuits(Array.from(this.txs.values()));
160
+ this.metrics.recordChonkVerifier(chonkTimer.ms());
146
161
 
147
- await asyncPool(this.config.parallelBlockLimit ?? 32, this.blocks, async block => {
148
- this.checkState();
162
+ // Everything in the epoch should have the same chainId and version.
163
+ const { chainId, version } = this.checkpoints[0].blocks[0].header.globalVariables;
149
164
 
150
- const globalVariables = block.header.globalVariables;
151
- const txs = this.getTxs(block);
152
- const l1ToL2Messages = this.getL1ToL2Messages(block);
153
- const previousHeader = this.getBlockHeader(block.number - 1)!;
154
-
155
- this.log.verbose(`Starting processing block ${block.number}`, {
156
- number: block.number,
157
- blockHash: (await block.hash()).toString(),
158
- lastArchive: block.header.lastArchive.root,
159
- noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
160
- nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
161
- publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
162
- previousHeader: previousHeader.hash(),
163
- uuid: this.uuid,
164
- ...globalVariables,
165
- });
165
+ const previousBlockHeaders = this.gatherPreviousBlockHeaders();
166
+
167
+ const allCheckpointsTimer = new Timer();
168
+
169
+ const parallelism = this.config.parallelBlockLimit
170
+ ? this.config.parallelBlockLimit
171
+ : AVM_MAX_CONCURRENT_SIMULATIONS > 0
172
+ ? AVM_MAX_CONCURRENT_SIMULATIONS
173
+ : this.checkpoints.length;
166
174
 
175
+ await asyncPool(parallelism, this.checkpoints, async checkpoint => {
176
+ this.checkState();
177
+ const checkpointTimer = new Timer();
178
+
179
+ const checkpointIndex = checkpoint.number - fromCheckpoint;
167
180
  const checkpointConstants = CheckpointConstantData.from({
168
- chainId: globalVariables.chainId,
169
- version: globalVariables.version,
181
+ chainId,
182
+ version,
170
183
  vkTreeRoot: getVKTreeRoot(),
171
184
  protocolContractsHash: protocolContractsHash,
172
185
  proverId: this.prover.getProverId().toField(),
173
- slotNumber: globalVariables.slotNumber,
174
- coinbase: globalVariables.coinbase,
175
- feeRecipient: globalVariables.feeRecipient,
176
- gasFees: globalVariables.gasFees,
186
+ slotNumber: checkpoint.header.slotNumber,
187
+ coinbase: checkpoint.header.coinbase,
188
+ feeRecipient: checkpoint.header.feeRecipient,
189
+ gasFees: checkpoint.header.gasFees,
190
+ });
191
+ const previousHeader = previousBlockHeaders[checkpointIndex];
192
+ const l1ToL2Messages = this.getL1ToL2Messages(checkpoint);
193
+
194
+ this.log.verbose(`Starting processing checkpoint ${checkpoint.number}`, {
195
+ number: checkpoint.number,
196
+ checkpointHash: checkpoint.hash().toString(),
197
+ lastArchive: checkpoint.header.lastArchiveRoot,
198
+ previousHeader: previousHeader.hash(),
199
+ uuid: this.uuid,
177
200
  });
178
201
 
179
- // TODO(#17027): Enable multiple blocks per checkpoint.
180
- // Each checkpoint has only one block.
181
- const totalNumBlocks = 1;
182
- const checkpointIndex = block.number - fromBlock;
183
202
  await this.prover.startNewCheckpoint(
184
203
  checkpointIndex,
185
204
  checkpointConstants,
186
205
  l1ToL2Messages,
187
- totalNumBlocks,
206
+ checkpoint.blocks.length,
188
207
  previousHeader,
189
208
  );
190
209
 
191
- // Start block proving
192
- await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
210
+ for (let blockIndex = 0; blockIndex < checkpoint.blocks.length; blockIndex++) {
211
+ const blockTimer = new Timer();
212
+ const block = checkpoint.blocks[blockIndex];
213
+ const globalVariables = block.header.globalVariables;
214
+ const txs = this.getTxs(block);
215
+
216
+ this.log.verbose(`Starting processing block ${block.number}`, {
217
+ number: block.number,
218
+ blockHash: (await block.hash()).toString(),
219
+ lastArchive: block.header.lastArchive.root,
220
+ noteHashTreeRoot: block.header.state.partial.noteHashTree.root,
221
+ nullifierTreeRoot: block.header.state.partial.nullifierTree.root,
222
+ publicDataTreeRoot: block.header.state.partial.publicDataTree.root,
223
+ ...globalVariables,
224
+ numTxs: txs.length,
225
+ });
193
226
 
194
- // Process public fns
195
- const db = await this.createFork(block.number - 1, l1ToL2Messages);
196
- const config = PublicSimulatorConfig.from({
197
- proverId: this.prover.getProverId().toField(),
198
- skipFeeEnforcement: false,
199
- collectDebugLogs: false,
200
- collectHints: true,
201
- maxDebugLogMemoryReads: 0,
202
- collectStatistics: false,
203
- });
204
- const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
205
- const processed = await this.processTxs(publicProcessor, txs);
206
- await this.prover.addTxs(processed);
207
- await db.close();
208
- this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
209
- blockNumber: block.number,
210
- blockHash: (await block.hash()).toString(),
211
- uuid: this.uuid,
212
- });
227
+ // Start block proving
228
+ await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
229
+
230
+ // Process public fns. L1 to L2 messages are only inserted for the first block of a checkpoint,
231
+ // as the fork for subsequent blocks already includes them from the previous block's synced state.
232
+ const db = await this.createFork(
233
+ BlockNumber(block.number - 1),
234
+ blockIndex === 0 ? l1ToL2Messages : undefined,
235
+ );
236
+ const config = PublicSimulatorConfig.from({
237
+ proverId: this.prover.getProverId().toField(),
238
+ skipFeeEnforcement: false,
239
+ collectDebugLogs: false,
240
+ collectHints: true,
241
+ collectPublicInputs: true,
242
+ collectStatistics: false,
243
+ });
244
+ const publicProcessor = this.publicProcessorFactory.create(db, globalVariables, config);
245
+ const processed = await this.processTxs(publicProcessor, txs);
246
+ await this.prover.addTxs(processed);
247
+ await db.close();
248
+ this.log.verbose(`Processed all ${txs.length} txs for block ${block.number}`, {
249
+ blockNumber: block.number,
250
+ blockHash: (await block.hash()).toString(),
251
+ uuid: this.uuid,
252
+ });
213
253
 
214
- // Mark block as completed to pad it
215
- const expectedBlockHeader = block.getBlockHeader();
216
- await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
254
+ // Mark block as completed to pad it
255
+ const expectedBlockHeader = block.header;
256
+ await this.prover.setBlockCompleted(block.number, expectedBlockHeader);
257
+ this.metrics.recordBlockProcessing(blockTimer.ms());
258
+ }
259
+ this.metrics.recordCheckpointProcessing(checkpointTimer.ms());
217
260
  });
261
+ this.metrics.recordAllCheckpointsProcessing(allCheckpointsTimer.ms());
218
262
 
219
263
  const executionTime = timer.ms();
220
264
 
@@ -226,16 +270,16 @@ export class EpochProvingJob implements Traceable {
226
270
 
227
271
  if (this.config.skipSubmitProof) {
228
272
  this.log.info(
229
- `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`,
273
+ `Proof publishing is disabled. Dropping valid proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`,
230
274
  );
231
275
  this.state = 'completed';
232
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
276
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
233
277
  return;
234
278
  }
235
279
 
236
280
  const success = await this.publisher.submitEpochProof({
237
- fromBlock,
238
- toBlock,
281
+ fromCheckpoint,
282
+ toCheckpoint,
239
283
  epochNumber,
240
284
  publicInputs,
241
285
  proof,
@@ -246,12 +290,12 @@ export class EpochProvingJob implements Traceable {
246
290
  throw new Error('Failed to submit epoch proof to L1');
247
291
  }
248
292
 
249
- this.log.info(`Submitted proof for epoch ${epochNumber} (blocks ${fromBlock} to ${toBlock})`, {
293
+ this.log.info(`Submitted proof for epoch ${epochNumber} (checkpoints ${fromCheckpoint} to ${toCheckpoint})`, {
250
294
  epochNumber,
251
295
  uuid: this.uuid,
252
296
  });
253
297
  this.state = 'completed';
254
- this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeBlocks, epochSizeTxs);
298
+ this.metrics.recordProvingJob(executionTime, timer.ms(), epochSizeCheckpoints, epochSizeBlocks, epochSizeTxs);
255
299
  } catch (err: any) {
256
300
  if (err && err.name === 'HaltExecutionError') {
257
301
  this.log.warn(`Halted execution of epoch ${epochNumber} prover job`, {
@@ -274,22 +318,29 @@ export class EpochProvingJob implements Traceable {
274
318
  }
275
319
 
276
320
  /**
277
- * Create a new db fork for tx processing, inserting all L1 to L2.
321
+ * Create a new db fork for tx processing, optionally inserting L1 to L2 messages.
322
+ * L1 to L2 messages should only be inserted for the first block in a checkpoint,
323
+ * as subsequent blocks' synced state already includes them.
278
324
  * REFACTOR: The prover already spawns a db fork of its own for each block, so we may be able to do away with just one fork.
279
325
  */
280
- private async createFork(blockNumber: number, l1ToL2Messages: Fr[]) {
326
+ private async createFork(blockNumber: BlockNumber, l1ToL2Messages: Fr[] | undefined) {
327
+ this.log.verbose(`Creating fork at ${blockNumber}`, { blockNumber });
281
328
  const db = await this.dbProvider.fork(blockNumber);
282
- const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
283
- l1ToL2Messages,
284
- Fr.ZERO,
285
- NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
286
- 'Too many L1 to L2 messages',
287
- );
288
- this.log.verbose(`Creating fork at ${blockNumber} with ${l1ToL2Messages.length} L1 to L2 messages`, {
289
- blockNumber,
290
- l1ToL2Messages: l1ToL2Messages.map(m => m.toString()),
291
- });
292
- await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
329
+
330
+ if (l1ToL2Messages !== undefined) {
331
+ this.log.verbose(`Inserting ${l1ToL2Messages.length} L1 to L2 messages in fork`, {
332
+ blockNumber,
333
+ l1ToL2Messages: l1ToL2Messages.map(m => m.toString()),
334
+ });
335
+ const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
336
+ l1ToL2Messages,
337
+ Fr.ZERO,
338
+ NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
339
+ 'Too many L1 to L2 messages',
340
+ );
341
+ await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
342
+ }
343
+
293
344
  return db;
294
345
  }
295
346
 
@@ -346,11 +397,12 @@ export class EpochProvingJob implements Traceable {
346
397
  const intervalMs = Math.ceil((await l2BlockSource.getL1Constants()).ethereumSlotDuration / 2) * 1000;
347
398
  this.epochCheckPromise = new RunningPromise(
348
399
  async () => {
349
- const blocks = await l2BlockSource.getBlockHeadersForEpoch(this.epochNumber);
350
- const blockHashes = await Promise.all(blocks.map(block => block.hash()));
351
- const thisBlockHashes = await Promise.all(this.blocks.map(block => block.hash()));
400
+ const blockHeaders = await l2BlockSource.getCheckpointedBlockHeadersForEpoch(this.epochNumber);
401
+ const blockHashes = await Promise.all(blockHeaders.map(header => header.hash()));
402
+ const thisBlocks = this.checkpoints.flatMap(checkpoint => checkpoint.blocks);
403
+ const thisBlockHashes = await Promise.all(thisBlocks.map(block => block.hash()));
352
404
  if (
353
- blocks.length !== this.blocks.length ||
405
+ blockHeaders.length !== thisBlocks.length ||
354
406
  !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))
355
407
  ) {
356
408
  this.log.warn('Epoch blocks changed underfoot', {
@@ -368,30 +420,18 @@ export class EpochProvingJob implements Traceable {
368
420
  this.log.verbose(`Scheduled epoch check for epoch ${this.epochNumber} every ${intervalMs}ms`);
369
421
  }
370
422
 
371
- /* Returns the header for the given block number based on the epoch proving job data. */
372
- private getBlockHeader(blockNumber: number) {
373
- const block = this.blocks.find(b => b.number === blockNumber);
374
- if (block) {
375
- return block.getBlockHeader();
376
- }
377
-
378
- if (blockNumber === Number(this.data.previousBlockHeader.getBlockNumber())) {
379
- return this.data.previousBlockHeader;
380
- }
381
-
382
- throw new Error(
383
- `Block header not found for block number ${blockNumber} (got ${this.blocks
384
- .map(b => b.number)
385
- .join(', ')} and previous header ${this.data.previousBlockHeader.getBlockNumber()})`,
386
- );
423
+ /* Returns the last block header in the previous checkpoint for all checkpoints in the epoch */
424
+ private gatherPreviousBlockHeaders() {
425
+ const lastBlocks = this.checkpoints.map(checkpoint => checkpoint.blocks.at(-1)!);
426
+ return [this.data.previousBlockHeader, ...lastBlocks.map(block => block.header).slice(0, -1)];
387
427
  }
388
428
 
389
429
  private getTxs(block: L2Block): Tx[] {
390
430
  return block.body.txEffects.map(txEffect => this.txs.get(txEffect.txHash.toString())!);
391
431
  }
392
432
 
393
- private getL1ToL2Messages(block: L2Block) {
394
- return this.data.l1ToL2Messages[block.number];
433
+ private getL1ToL2Messages(checkpoint: Checkpoint) {
434
+ return this.data.l1ToL2Messages[checkpoint.number];
395
435
  }
396
436
 
397
437
  private async processTxs(publicProcessor: PublicProcessor, txs: Tx[]): Promise<ProcessedTx[]> {