@aztec/prover-node 3.0.0-nightly.20251201.2 → 3.0.0-nightly.20251203
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/job/epoch-proving-job-data.d.ts +6 -5
- package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
- package/dest/job/epoch-proving-job-data.js +23 -17
- package/dest/job/epoch-proving-job.d.ts +3 -3
- package/dest/job/epoch-proving-job.d.ts.map +1 -1
- package/dest/job/epoch-proving-job.js +87 -83
- package/dest/metrics.d.ts +3 -2
- package/dest/metrics.d.ts.map +1 -1
- package/dest/metrics.js +7 -1
- package/dest/prover-node-publisher.d.ts +4 -4
- package/dest/prover-node-publisher.d.ts.map +1 -1
- package/dest/prover-node-publisher.js +25 -24
- package/dest/prover-node.d.ts +2 -2
- package/dest/prover-node.d.ts.map +1 -1
- package/dest/prover-node.js +27 -24
- package/package.json +22 -22
- package/src/job/epoch-proving-job-data.ts +27 -22
- package/src/job/epoch-proving-job.ts +99 -97
- package/src/metrics.ts +13 -1
- package/src/prover-node-publisher.ts +38 -34
- package/src/prover-node.ts +34 -28
|
@@ -2,7 +2,7 @@ import { BatchedBlob, getEthBlobEvaluationInputs } from '@aztec/blob-lib';
|
|
|
2
2
|
import { AZTEC_MAX_EPOCH_DURATION } from '@aztec/constants';
|
|
3
3
|
import type { L1TxUtils, RollupContract, ViemCommitteeAttestation } from '@aztec/ethereum';
|
|
4
4
|
import { makeTuple } from '@aztec/foundation/array';
|
|
5
|
-
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
5
|
+
import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
6
6
|
import { areArraysEqual } from '@aztec/foundation/collection';
|
|
7
7
|
import { EthAddress } from '@aztec/foundation/eth-address';
|
|
8
8
|
import { Fr } from '@aztec/foundation/fields';
|
|
@@ -87,15 +87,15 @@ export class ProverNodePublisher {
|
|
|
87
87
|
|
|
88
88
|
public async submitEpochProof(args: {
|
|
89
89
|
epochNumber: EpochNumber;
|
|
90
|
-
|
|
91
|
-
|
|
90
|
+
fromCheckpoint: CheckpointNumber;
|
|
91
|
+
toCheckpoint: CheckpointNumber;
|
|
92
92
|
publicInputs: RootRollupPublicInputs;
|
|
93
93
|
proof: Proof;
|
|
94
94
|
batchedBlobInputs: BatchedBlob;
|
|
95
95
|
attestations: ViemCommitteeAttestation[];
|
|
96
96
|
}): Promise<boolean> {
|
|
97
|
-
const { epochNumber,
|
|
98
|
-
const ctx = { epochNumber,
|
|
97
|
+
const { epochNumber, fromCheckpoint, toCheckpoint } = args;
|
|
98
|
+
const ctx = { epochNumber, fromCheckpoint, toCheckpoint };
|
|
99
99
|
|
|
100
100
|
if (!this.interrupted) {
|
|
101
101
|
const timer = new Timer();
|
|
@@ -139,44 +139,48 @@ export class ProverNodePublisher {
|
|
|
139
139
|
this.log.error(`Rollup.submitEpochProof tx status failed ${txReceipt.transactionHash}`, undefined, ctx);
|
|
140
140
|
}
|
|
141
141
|
|
|
142
|
-
this.log.verbose('
|
|
142
|
+
this.log.verbose('Checkpoint data syncing interrupted', ctx);
|
|
143
143
|
return false;
|
|
144
144
|
}
|
|
145
145
|
|
|
146
146
|
private async validateEpochProofSubmission(args: {
|
|
147
|
-
|
|
148
|
-
|
|
147
|
+
fromCheckpoint: CheckpointNumber;
|
|
148
|
+
toCheckpoint: CheckpointNumber;
|
|
149
149
|
publicInputs: RootRollupPublicInputs;
|
|
150
150
|
proof: Proof;
|
|
151
151
|
batchedBlobInputs: BatchedBlob;
|
|
152
152
|
attestations: ViemCommitteeAttestation[];
|
|
153
153
|
}) {
|
|
154
|
-
const {
|
|
154
|
+
const { fromCheckpoint, toCheckpoint, publicInputs, batchedBlobInputs } = args;
|
|
155
155
|
|
|
156
|
-
// Check that the
|
|
156
|
+
// Check that the checkpoint numbers match the expected epoch to be proven
|
|
157
157
|
const { pending, proven } = await this.rollupContract.getTips();
|
|
158
|
-
// Don't publish if proven is beyond our
|
|
159
|
-
if (proven > BigInt(
|
|
160
|
-
throw new Error(
|
|
158
|
+
// Don't publish if proven is beyond our toCheckpoint, pointless to do so
|
|
159
|
+
if (proven > BigInt(toCheckpoint)) {
|
|
160
|
+
throw new Error(
|
|
161
|
+
`Cannot submit epoch proof for ${fromCheckpoint}-${toCheckpoint} as proven checkpoint is ${proven}`,
|
|
162
|
+
);
|
|
161
163
|
}
|
|
162
|
-
//
|
|
163
|
-
if (
|
|
164
|
-
throw new Error(
|
|
164
|
+
// toCheckpoint can't be greater than pending
|
|
165
|
+
if (toCheckpoint > pending) {
|
|
166
|
+
throw new Error(
|
|
167
|
+
`Cannot submit epoch proof for ${fromCheckpoint}-${toCheckpoint} as pending checkpoint is ${pending}`,
|
|
168
|
+
);
|
|
165
169
|
}
|
|
166
170
|
|
|
167
|
-
// Check the archive for the immediate
|
|
168
|
-
const
|
|
169
|
-
if (publicInputs.previousArchiveRoot.toString() !==
|
|
171
|
+
// Check the archive for the immediate checkpoint before the epoch
|
|
172
|
+
const checkpointLog = await this.rollupContract.getCheckpoint(CheckpointNumber(fromCheckpoint - 1));
|
|
173
|
+
if (publicInputs.previousArchiveRoot.toString() !== checkpointLog.archive) {
|
|
170
174
|
throw new Error(
|
|
171
|
-
`Previous archive root mismatch: ${publicInputs.previousArchiveRoot.toString()} !== ${
|
|
175
|
+
`Previous archive root mismatch: ${publicInputs.previousArchiveRoot.toString()} !== ${checkpointLog.archive}`,
|
|
172
176
|
);
|
|
173
177
|
}
|
|
174
178
|
|
|
175
|
-
// Check the archive for the last
|
|
176
|
-
const
|
|
177
|
-
if (publicInputs.endArchiveRoot.toString() !==
|
|
179
|
+
// Check the archive for the last checkpoint in the epoch
|
|
180
|
+
const endCheckpointLog = await this.rollupContract.getCheckpoint(toCheckpoint);
|
|
181
|
+
if (publicInputs.endArchiveRoot.toString() !== endCheckpointLog.archive) {
|
|
178
182
|
throw new Error(
|
|
179
|
-
`End archive root mismatch: ${publicInputs.endArchiveRoot.toString()} !== ${
|
|
183
|
+
`End archive root mismatch: ${publicInputs.endArchiveRoot.toString()} !== ${endCheckpointLog.archive}`,
|
|
180
184
|
);
|
|
181
185
|
}
|
|
182
186
|
|
|
@@ -203,8 +207,8 @@ export class ProverNodePublisher {
|
|
|
203
207
|
}
|
|
204
208
|
|
|
205
209
|
private async sendSubmitEpochProofTx(args: {
|
|
206
|
-
|
|
207
|
-
|
|
210
|
+
fromCheckpoint: CheckpointNumber;
|
|
211
|
+
toCheckpoint: CheckpointNumber;
|
|
208
212
|
publicInputs: RootRollupPublicInputs;
|
|
209
213
|
proof: Proof;
|
|
210
214
|
batchedBlobInputs: BatchedBlob;
|
|
@@ -214,8 +218,8 @@ export class ProverNodePublisher {
|
|
|
214
218
|
|
|
215
219
|
this.log.info(`Submitting epoch proof to L1 rollup contract`, {
|
|
216
220
|
proofSize: args.proof.withoutPublicInputs().length,
|
|
217
|
-
|
|
218
|
-
|
|
221
|
+
fromCheckpoint: args.fromCheckpoint,
|
|
222
|
+
toCheckpoint: args.toCheckpoint,
|
|
219
223
|
});
|
|
220
224
|
const data = encodeFunctionData({
|
|
221
225
|
abi: RollupAbi,
|
|
@@ -244,16 +248,16 @@ export class ProverNodePublisher {
|
|
|
244
248
|
}
|
|
245
249
|
|
|
246
250
|
private getEpochProofPublicInputsArgs(args: {
|
|
247
|
-
|
|
248
|
-
|
|
251
|
+
fromCheckpoint: CheckpointNumber;
|
|
252
|
+
toCheckpoint: CheckpointNumber;
|
|
249
253
|
publicInputs: RootRollupPublicInputs;
|
|
250
254
|
batchedBlobInputs: BatchedBlob;
|
|
251
255
|
attestations: ViemCommitteeAttestation[];
|
|
252
256
|
}) {
|
|
253
257
|
// Returns arguments for EpochProofLib.sol -> getEpochProofPublicInputs()
|
|
254
258
|
return [
|
|
255
|
-
BigInt(args.
|
|
256
|
-
BigInt(args.
|
|
259
|
+
BigInt(args.fromCheckpoint) /*_start*/,
|
|
260
|
+
BigInt(args.toCheckpoint) /*_end*/,
|
|
257
261
|
{
|
|
258
262
|
previousArchive: args.publicInputs.previousArchiveRoot.toString(),
|
|
259
263
|
endArchive: args.publicInputs.endArchiveRoot.toString(),
|
|
@@ -269,8 +273,8 @@ export class ProverNodePublisher {
|
|
|
269
273
|
}
|
|
270
274
|
|
|
271
275
|
private getSubmitEpochProofArgs(args: {
|
|
272
|
-
|
|
273
|
-
|
|
276
|
+
fromCheckpoint: CheckpointNumber;
|
|
277
|
+
toCheckpoint: CheckpointNumber;
|
|
274
278
|
publicInputs: RootRollupPublicInputs;
|
|
275
279
|
proof: Proof;
|
|
276
280
|
batchedBlobInputs: BatchedBlob;
|
package/src/prover-node.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import type { Archiver } from '@aztec/archiver';
|
|
2
2
|
import type { RollupContract } from '@aztec/ethereum';
|
|
3
|
-
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
4
4
|
import { assertRequired, compact, pick, sum } from '@aztec/foundation/collection';
|
|
5
5
|
import { memoize } from '@aztec/foundation/decorators';
|
|
6
6
|
import type { Fr } from '@aztec/foundation/fields';
|
|
@@ -9,7 +9,8 @@ import { DateProvider } from '@aztec/foundation/timer';
|
|
|
9
9
|
import type { DataStoreConfig } from '@aztec/kv-store/config';
|
|
10
10
|
import type { P2PClient } from '@aztec/p2p';
|
|
11
11
|
import { PublicProcessorFactory } from '@aztec/simulator/server';
|
|
12
|
-
import type {
|
|
12
|
+
import type { L2BlockSource } from '@aztec/stdlib/block';
|
|
13
|
+
import type { Checkpoint } from '@aztec/stdlib/checkpoint';
|
|
13
14
|
import type { ChainConfig } from '@aztec/stdlib/config';
|
|
14
15
|
import type { ContractDataSource } from '@aztec/stdlib/contract';
|
|
15
16
|
import { getProofSubmissionDeadlineTimestamp } from '@aztec/stdlib/epoch-helpers';
|
|
@@ -271,10 +272,13 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
|
|
|
271
272
|
|
|
272
273
|
// Gather all data for this epoch
|
|
273
274
|
const epochData = await this.gatherEpochData(epochNumber);
|
|
274
|
-
|
|
275
|
-
const
|
|
276
|
-
const
|
|
277
|
-
|
|
275
|
+
const fromCheckpoint = epochData.checkpoints[0].number;
|
|
276
|
+
const toCheckpoint = epochData.checkpoints.at(-1)!.number;
|
|
277
|
+
const fromBlock = epochData.checkpoints[0].blocks[0].number;
|
|
278
|
+
const toBlock = epochData.checkpoints.at(-1)!.blocks.at(-1)!.number;
|
|
279
|
+
this.log.verbose(
|
|
280
|
+
`Creating proving job for epoch ${epochNumber} for checkpoint range ${fromCheckpoint} to ${toCheckpoint} and block range ${fromBlock} to ${toBlock}`,
|
|
281
|
+
);
|
|
278
282
|
|
|
279
283
|
// Fast forward world state to right before the target block and get a fork
|
|
280
284
|
await this.worldState.syncImmediate(toBlock);
|
|
@@ -289,7 +293,6 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
|
|
|
289
293
|
// Set deadline for this job to run. It will abort if it takes too long.
|
|
290
294
|
const deadlineTs = getProofSubmissionDeadlineTimestamp(epochNumber, await this.getL1Constants());
|
|
291
295
|
const deadline = new Date(Number(deadlineTs) * 1000);
|
|
292
|
-
|
|
293
296
|
const job = this.doCreateEpochProvingJob(epochData, deadline, publicProcessorFactory, this.publisher, opts);
|
|
294
297
|
this.jobs.set(job.getId(), job);
|
|
295
298
|
return job;
|
|
@@ -302,28 +305,30 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
|
|
|
302
305
|
|
|
303
306
|
@trackSpan('ProverNode.gatherEpochData', epochNumber => ({ [Attributes.EPOCH_NUMBER]: epochNumber }))
|
|
304
307
|
private async gatherEpochData(epochNumber: EpochNumber): Promise<EpochProvingJobData> {
|
|
305
|
-
const
|
|
306
|
-
const txArray = await this.gatherTxs(epochNumber,
|
|
308
|
+
const checkpoints = await this.gatherCheckpoints(epochNumber);
|
|
309
|
+
const txArray = await this.gatherTxs(epochNumber, checkpoints);
|
|
307
310
|
const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
|
|
308
|
-
const l1ToL2Messages = await this.gatherMessages(epochNumber,
|
|
309
|
-
const
|
|
310
|
-
const
|
|
311
|
-
const
|
|
311
|
+
const l1ToL2Messages = await this.gatherMessages(epochNumber, checkpoints);
|
|
312
|
+
const [firstBlock] = checkpoints[0].blocks;
|
|
313
|
+
const previousBlockHeader = await this.gatherPreviousBlockHeader(epochNumber, firstBlock.number - 1);
|
|
314
|
+
const [lastPublishedCheckpoint] = await this.l2BlockSource.getPublishedCheckpoints(checkpoints.at(-1)!.number, 1);
|
|
315
|
+
const attestations = lastPublishedCheckpoint?.attestations ?? [];
|
|
312
316
|
|
|
313
|
-
return {
|
|
317
|
+
return { checkpoints, txs, l1ToL2Messages, epochNumber, previousBlockHeader, attestations };
|
|
314
318
|
}
|
|
315
319
|
|
|
316
|
-
private async
|
|
317
|
-
const
|
|
318
|
-
if (
|
|
320
|
+
private async gatherCheckpoints(epochNumber: EpochNumber) {
|
|
321
|
+
const checkpoints = await this.l2BlockSource.getCheckpointsForEpoch(epochNumber);
|
|
322
|
+
if (checkpoints.length === 0) {
|
|
319
323
|
throw new EmptyEpochError(epochNumber);
|
|
320
324
|
}
|
|
321
|
-
return
|
|
325
|
+
return checkpoints;
|
|
322
326
|
}
|
|
323
327
|
|
|
324
|
-
private async gatherTxs(epochNumber: EpochNumber,
|
|
328
|
+
private async gatherTxs(epochNumber: EpochNumber, checkpoints: Checkpoint[]) {
|
|
325
329
|
const deadline = new Date(this.dateProvider.now() + this.config.txGatheringTimeoutMs);
|
|
326
330
|
const txProvider = this.p2pClient.getTxProvider();
|
|
331
|
+
const blocks = checkpoints.flatMap(checkpoint => checkpoint.blocks);
|
|
327
332
|
const txsByBlock = await Promise.all(blocks.map(block => txProvider.getTxsForBlock(block, { deadline })));
|
|
328
333
|
const txs = txsByBlock.map(({ txs }) => txs).flat();
|
|
329
334
|
const missingTxs = txsByBlock.map(({ missingTxs }) => missingTxs).flat();
|
|
@@ -336,25 +341,26 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
|
|
|
336
341
|
throw new Error(`Txs not found for epoch ${epochNumber}: ${missingTxs.map(hash => hash.toString()).join(', ')}`);
|
|
337
342
|
}
|
|
338
343
|
|
|
339
|
-
private async gatherMessages(epochNumber: EpochNumber,
|
|
340
|
-
const messages = await Promise.all(
|
|
344
|
+
private async gatherMessages(epochNumber: EpochNumber, checkpoints: Checkpoint[]) {
|
|
345
|
+
const messages = await Promise.all(
|
|
346
|
+
checkpoints.map(c => this.l1ToL2MessageSource.getL1ToL2MessagesForCheckpoint(c.number)),
|
|
347
|
+
);
|
|
341
348
|
const messageCount = sum(messages.map(m => m.length));
|
|
342
349
|
this.log.verbose(`Gathered all ${messageCount} messages for epoch ${epochNumber}`, { epochNumber });
|
|
343
|
-
const
|
|
344
|
-
for (let i = 0; i <
|
|
345
|
-
|
|
350
|
+
const messagesByCheckpoint: Record<CheckpointNumber, Fr[]> = {};
|
|
351
|
+
for (let i = 0; i < checkpoints.length; i++) {
|
|
352
|
+
messagesByCheckpoint[checkpoints[i].number] = messages[i];
|
|
346
353
|
}
|
|
347
|
-
return
|
|
354
|
+
return messagesByCheckpoint;
|
|
348
355
|
}
|
|
349
356
|
|
|
350
|
-
private async gatherPreviousBlockHeader(epochNumber: EpochNumber,
|
|
351
|
-
const previousBlockNumber = initialBlock.number - 1;
|
|
357
|
+
private async gatherPreviousBlockHeader(epochNumber: EpochNumber, previousBlockNumber: number) {
|
|
352
358
|
const header = await (previousBlockNumber === 0
|
|
353
359
|
? this.worldState.getCommitted().getInitialHeader()
|
|
354
360
|
: this.l2BlockSource.getBlockHeader(previousBlockNumber));
|
|
355
361
|
|
|
356
362
|
if (!header) {
|
|
357
|
-
throw new Error(`Previous block header ${
|
|
363
|
+
throw new Error(`Previous block header ${previousBlockNumber} not found for proving epoch ${epochNumber}`);
|
|
358
364
|
}
|
|
359
365
|
|
|
360
366
|
this.log.verbose(`Gathered previous block header ${header.getBlockNumber()} for epoch ${epochNumber}`);
|