@aztec/prover-node 1.2.0 → 2.0.0-nightly.20250813

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dest/config.d.ts +2 -2
  2. package/dest/config.d.ts.map +1 -1
  3. package/dest/config.js +5 -2
  4. package/dest/factory.d.ts +8 -6
  5. package/dest/factory.d.ts.map +1 -1
  6. package/dest/factory.js +22 -14
  7. package/dest/job/epoch-proving-job-data.d.ts +3 -2
  8. package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
  9. package/dest/job/epoch-proving-job-data.js +12 -5
  10. package/dest/job/epoch-proving-job.d.ts +1 -0
  11. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  12. package/dest/job/epoch-proving-job.js +10 -10
  13. package/dest/prover-node-publisher.d.ts +2 -4
  14. package/dest/prover-node-publisher.d.ts.map +1 -1
  15. package/dest/prover-node-publisher.js +4 -2
  16. package/dest/prover-node.d.ts +6 -8
  17. package/dest/prover-node.d.ts.map +1 -1
  18. package/dest/prover-node.js +24 -35
  19. package/package.json +21 -21
  20. package/src/config.ts +6 -4
  21. package/src/factory.ts +46 -23
  22. package/src/job/epoch-proving-job-data.ts +13 -5
  23. package/src/job/epoch-proving-job.ts +11 -8
  24. package/src/prover-node-publisher.ts +14 -6
  25. package/src/prover-node.ts +19 -40
  26. package/dest/prover-coordination/combined-prover-coordination.d.ts +0 -22
  27. package/dest/prover-coordination/combined-prover-coordination.d.ts.map +0 -1
  28. package/dest/prover-coordination/combined-prover-coordination.js +0 -140
  29. package/dest/prover-coordination/config.d.ts +0 -7
  30. package/dest/prover-coordination/config.d.ts.map +0 -1
  31. package/dest/prover-coordination/config.js +0 -12
  32. package/dest/prover-coordination/factory.d.ts +0 -23
  33. package/dest/prover-coordination/factory.d.ts.map +0 -1
  34. package/dest/prover-coordination/factory.js +0 -52
  35. package/dest/prover-coordination/index.d.ts +0 -3
  36. package/dest/prover-coordination/index.d.ts.map +0 -1
  37. package/dest/prover-coordination/index.js +0 -2
  38. package/src/prover-coordination/combined-prover-coordination.ts +0 -164
  39. package/src/prover-coordination/config.ts +0 -18
  40. package/src/prover-coordination/factory.ts +0 -86
  41. package/src/prover-coordination/index.ts +0 -2
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/prover-node",
3
- "version": "1.2.0",
3
+ "version": "2.0.0-nightly.20250813",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
@@ -56,26 +56,26 @@
56
56
  ]
57
57
  },
58
58
  "dependencies": {
59
- "@aztec/archiver": "1.2.0",
60
- "@aztec/bb-prover": "1.2.0",
61
- "@aztec/blob-lib": "1.2.0",
62
- "@aztec/blob-sink": "1.2.0",
63
- "@aztec/constants": "1.2.0",
64
- "@aztec/epoch-cache": "1.2.0",
65
- "@aztec/ethereum": "1.2.0",
66
- "@aztec/foundation": "1.2.0",
67
- "@aztec/kv-store": "1.2.0",
68
- "@aztec/l1-artifacts": "1.2.0",
69
- "@aztec/node-lib": "1.2.0",
70
- "@aztec/noir-protocol-circuits-types": "1.2.0",
71
- "@aztec/p2p": "1.2.0",
72
- "@aztec/protocol-contracts": "1.2.0",
73
- "@aztec/prover-client": "1.2.0",
74
- "@aztec/sequencer-client": "1.2.0",
75
- "@aztec/simulator": "1.2.0",
76
- "@aztec/stdlib": "1.2.0",
77
- "@aztec/telemetry-client": "1.2.0",
78
- "@aztec/world-state": "1.2.0",
59
+ "@aztec/archiver": "2.0.0-nightly.20250813",
60
+ "@aztec/bb-prover": "2.0.0-nightly.20250813",
61
+ "@aztec/blob-lib": "2.0.0-nightly.20250813",
62
+ "@aztec/blob-sink": "2.0.0-nightly.20250813",
63
+ "@aztec/constants": "2.0.0-nightly.20250813",
64
+ "@aztec/epoch-cache": "2.0.0-nightly.20250813",
65
+ "@aztec/ethereum": "2.0.0-nightly.20250813",
66
+ "@aztec/foundation": "2.0.0-nightly.20250813",
67
+ "@aztec/kv-store": "2.0.0-nightly.20250813",
68
+ "@aztec/l1-artifacts": "2.0.0-nightly.20250813",
69
+ "@aztec/node-lib": "2.0.0-nightly.20250813",
70
+ "@aztec/noir-protocol-circuits-types": "2.0.0-nightly.20250813",
71
+ "@aztec/p2p": "2.0.0-nightly.20250813",
72
+ "@aztec/protocol-contracts": "2.0.0-nightly.20250813",
73
+ "@aztec/prover-client": "2.0.0-nightly.20250813",
74
+ "@aztec/sequencer-client": "2.0.0-nightly.20250813",
75
+ "@aztec/simulator": "2.0.0-nightly.20250813",
76
+ "@aztec/stdlib": "2.0.0-nightly.20250813",
77
+ "@aztec/telemetry-client": "2.0.0-nightly.20250813",
78
+ "@aztec/world-state": "2.0.0-nightly.20250813",
79
79
  "source-map-support": "^0.5.21",
80
80
  "tslib": "^2.4.0",
81
81
  "viem": "2.23.7"
package/src/config.ts CHANGED
@@ -26,8 +26,6 @@ import {
26
26
  } from '@aztec/sequencer-client/config';
27
27
  import { type WorldStateConfig, worldStateConfigMappings } from '@aztec/world-state/config';
28
28
 
29
- import { type ProverCoordinationConfig, proverCoordinationConfigMappings } from './prover-coordination/config.js';
30
-
31
29
  export type ProverNodeConfig = ArchiverConfig &
32
30
  ProverClientUserConfig &
33
31
  P2PConfig &
@@ -35,7 +33,6 @@ export type ProverNodeConfig = ArchiverConfig &
35
33
  PublisherConfig &
36
34
  TxSenderConfig &
37
35
  DataStoreConfig &
38
- ProverCoordinationConfig &
39
36
  SharedNodeConfig &
40
37
  SpecificProverNodeConfig &
41
38
  GenesisStateConfig;
@@ -45,6 +42,7 @@ export type SpecificProverNodeConfig = {
45
42
  proverNodePollingIntervalMs: number;
46
43
  proverNodeMaxParallelBlocksPerEpoch: number;
47
44
  proverNodeFailedEpochStore: string | undefined;
45
+ txGatheringTimeoutMs: number;
48
46
  txGatheringIntervalMs: number;
49
47
  txGatheringBatchSize: number;
50
48
  txGatheringMaxParallelRequestsPerNode: number;
@@ -86,6 +84,11 @@ const specificProverNodeConfigMappings: ConfigMappingsType<SpecificProverNodeCon
86
84
  description: 'How many tx requests to make in parallel to each node',
87
85
  ...numberConfigHelper(100),
88
86
  },
87
+ txGatheringTimeoutMs: {
88
+ env: 'PROVER_NODE_TX_GATHERING_TIMEOUT_MS',
89
+ description: 'How long to wait for tx data to be available before giving up',
90
+ ...numberConfigHelper(120_000),
91
+ },
89
92
  };
90
93
 
91
94
  export const proverNodeConfigMappings: ConfigMappingsType<ProverNodeConfig> = {
@@ -96,7 +99,6 @@ export const proverNodeConfigMappings: ConfigMappingsType<ProverNodeConfig> = {
96
99
  ...worldStateConfigMappings,
97
100
  ...getPublisherConfigMappings('PROVER'),
98
101
  ...getTxSenderConfigMappings('PROVER'),
99
- ...proverCoordinationConfigMappings,
100
102
  ...specificProverNodeConfigMappings,
101
103
  ...genesisStateConfigMappings,
102
104
  ...sharedNodeConfigMappings,
package/src/factory.ts CHANGED
@@ -1,51 +1,62 @@
1
1
  import { type Archiver, createArchiver } from '@aztec/archiver';
2
+ import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover';
2
3
  import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
3
4
  import { EpochCache } from '@aztec/epoch-cache';
4
5
  import { L1TxUtils, RollupContract, createEthereumChain, createExtendedL1Client } from '@aztec/ethereum';
5
6
  import { pick } from '@aztec/foundation/collection';
6
7
  import { type Logger, createLogger } from '@aztec/foundation/log';
8
+ import { DateProvider } from '@aztec/foundation/timer';
7
9
  import type { DataStoreConfig } from '@aztec/kv-store/config';
8
10
  import { trySnapshotSync } from '@aztec/node-lib/actions';
11
+ import { NodeRpcTxSource, createP2PClient } from '@aztec/p2p';
9
12
  import { createProverClient } from '@aztec/prover-client';
10
13
  import { createAndStartProvingBroker } from '@aztec/prover-client/broker';
11
- import type { ProvingJobBroker } from '@aztec/stdlib/interfaces/server';
14
+ import type { AztecNode, ProvingJobBroker } from '@aztec/stdlib/interfaces/server';
15
+ import { P2PClientType } from '@aztec/stdlib/p2p';
12
16
  import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees';
17
+ import { getPackageVersion } from '@aztec/stdlib/update-checker';
13
18
  import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
14
19
  import { createWorldStateSynchronizer } from '@aztec/world-state';
15
20
 
16
21
  import { type ProverNodeConfig, resolveConfig } from './config.js';
17
22
  import { EpochMonitor } from './monitors/epoch-monitor.js';
18
- import type { TxSource } from './prover-coordination/combined-prover-coordination.js';
19
- import { createProverCoordination } from './prover-coordination/factory.js';
20
23
  import { ProverNodePublisher } from './prover-node-publisher.js';
21
24
  import { ProverNode } from './prover-node.js';
22
25
 
26
+ export type ProverNodeDeps = {
27
+ telemetry?: TelemetryClient;
28
+ log?: Logger;
29
+ aztecNodeTxProvider?: Pick<AztecNode, 'getTxsByHash'>;
30
+ archiver?: Archiver;
31
+ publisher?: ProverNodePublisher;
32
+ blobSinkClient?: BlobSinkClientInterface;
33
+ broker?: ProvingJobBroker;
34
+ l1TxUtils?: L1TxUtils;
35
+ dateProvider?: DateProvider;
36
+ };
37
+
23
38
  /** Creates a new prover node given a config. */
24
39
  export async function createProverNode(
25
40
  userConfig: ProverNodeConfig & DataStoreConfig,
26
- deps: {
27
- telemetry?: TelemetryClient;
28
- log?: Logger;
29
- aztecNodeTxProvider?: TxSource;
30
- archiver?: Archiver;
31
- publisher?: ProverNodePublisher;
32
- blobSinkClient?: BlobSinkClientInterface;
33
- broker?: ProvingJobBroker;
34
- l1TxUtils?: L1TxUtils;
35
- } = {},
41
+ deps: ProverNodeDeps = {},
36
42
  options: {
37
43
  prefilledPublicData?: PublicDataTreeLeaf[];
38
44
  } = {},
39
45
  ) {
40
46
  const config = resolveConfig(userConfig);
41
47
  const telemetry = deps.telemetry ?? getTelemetryClient();
48
+ const dateProvider = deps.dateProvider ?? new DateProvider();
42
49
  const blobSinkClient =
43
50
  deps.blobSinkClient ?? createBlobSinkClient(config, { logger: createLogger('prover-node:blob-sink:client') });
44
51
  const log = deps.log ?? createLogger('prover-node');
45
52
 
46
53
  await trySnapshotSync(config, log);
47
54
 
48
- const archiver = deps.archiver ?? (await createArchiver(config, blobSinkClient, { blockUntilSync: true }, telemetry));
55
+ const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config);
56
+
57
+ const archiver =
58
+ deps.archiver ??
59
+ (await createArchiver(config, { blobSinkClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
49
60
  log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`);
50
61
 
51
62
  const worldStateConfig = { ...config, worldStateProvenBlocksOnly: false };
@@ -66,20 +77,32 @@ export async function createProverNode(
66
77
 
67
78
  const rollupContract = new RollupContract(l1Client, config.l1Contracts.rollupAddress.toString());
68
79
 
69
- const l1TxUtils = deps.l1TxUtils ?? new L1TxUtils(l1Client, log, config);
80
+ const l1TxUtils = deps.l1TxUtils ?? new L1TxUtils(l1Client, log, deps.dateProvider, config);
70
81
  const publisher = deps.publisher ?? new ProverNodePublisher(config, { telemetry, rollupContract, l1TxUtils });
71
82
 
72
- const epochCache = await EpochCache.create(config.l1Contracts.rollupAddress, config);
83
+ const proofVerifier = new QueuedIVCVerifier(
84
+ config,
85
+ config.realProofs ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(),
86
+ );
73
87
 
74
- // If config.p2pEnabled is true, createProverCoordination will create a p2p client where txs are requested
75
- // If config.proverCoordinationNodeUrls is not empty, createProverCoordination will create set of aztec node clients from which txs are requested
76
- const proverCoordination = await createProverCoordination(config, {
77
- aztecNodeTxProvider: deps.aztecNodeTxProvider,
78
- worldStateSynchronizer,
88
+ const p2pClient = await createP2PClient(
89
+ P2PClientType.Prover,
90
+ config,
79
91
  archiver,
92
+ proofVerifier,
93
+ worldStateSynchronizer,
80
94
  epochCache,
95
+ getPackageVersion() ?? '',
96
+ dateProvider,
81
97
  telemetry,
82
- });
98
+ {
99
+ txCollectionNodeSources: deps.aztecNodeTxProvider
100
+ ? [new NodeRpcTxSource(deps.aztecNodeTxProvider, 'TestNode')]
101
+ : [],
102
+ },
103
+ );
104
+
105
+ await p2pClient.start();
83
106
 
84
107
  const proverNodeConfig = {
85
108
  ...pick(
@@ -110,7 +133,7 @@ export async function createProverNode(
110
133
  archiver,
111
134
  archiver,
112
135
  worldStateSynchronizer,
113
- proverCoordination,
136
+ p2pClient,
114
137
  epochMonitor,
115
138
  proverNodeConfig,
116
139
  telemetry,
@@ -1,15 +1,16 @@
1
1
  import { Fr } from '@aztec/foundation/fields';
2
2
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
- import { L2Block } from '@aztec/stdlib/block';
3
+ import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block';
4
4
  import { BlockHeader, Tx } from '@aztec/stdlib/tx';
5
5
 
6
6
  /** All data from an epoch used in proving. */
7
7
  export type EpochProvingJobData = {
8
8
  epochNumber: bigint;
9
9
  blocks: L2Block[];
10
- txs: Tx[];
10
+ txs: Map<string, Tx>;
11
11
  l1ToL2Messages: Record<number, Fr[]>;
12
12
  previousBlockHeader: BlockHeader;
13
+ attestations: CommitteeAttestation[];
13
14
  };
14
15
 
15
16
  export function validateEpochProvingJobData(data: EpochProvingJobData) {
@@ -30,12 +31,13 @@ export function validateEpochProvingJobData(data: EpochProvingJobData) {
30
31
 
31
32
  export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer {
32
33
  const blocks = data.blocks.map(block => block.toBuffer());
33
- const txs = data.txs.map(tx => tx.toBuffer());
34
+ const txs = Array.from(data.txs.values()).map(tx => tx.toBuffer());
34
35
  const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([blockNumber, messages]) => [
35
36
  Number(blockNumber),
36
37
  messages.length,
37
38
  ...messages,
38
39
  ]);
40
+ const attestations = data.attestations.map(attestation => attestation.toBuffer());
39
41
 
40
42
  return serializeToBuffer(
41
43
  Number(data.epochNumber),
@@ -46,6 +48,8 @@ export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer
46
48
  ...txs,
47
49
  l1ToL2Messages.length,
48
50
  ...l1ToL2Messages,
51
+ attestations.length,
52
+ ...attestations,
49
53
  );
50
54
  }
51
55
 
@@ -54,7 +58,7 @@ export function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData
54
58
  const epochNumber = BigInt(reader.readNumber());
55
59
  const previousBlockHeader = reader.readObject(BlockHeader);
56
60
  const blocks = reader.readVector(L2Block);
57
- const txs = reader.readVector(Tx);
61
+ const txArray = reader.readVector(Tx);
58
62
 
59
63
  const l1ToL2MessageBlockCount = reader.readNumber();
60
64
  const l1ToL2Messages: Record<number, Fr[]> = {};
@@ -64,5 +68,9 @@ export function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData
64
68
  l1ToL2Messages[blockNumber] = messages;
65
69
  }
66
70
 
67
- return { epochNumber, previousBlockHeader, blocks, txs, l1ToL2Messages };
71
+ const attestations = reader.readVector(CommitteeAttestation);
72
+
73
+ const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
74
+
75
+ return { epochNumber, previousBlockHeader, blocks, txs, l1ToL2Messages, attestations };
68
76
  }
@@ -84,6 +84,10 @@ export class EpochProvingJob implements Traceable {
84
84
  return this.data.txs;
85
85
  }
86
86
 
87
+ private get attestations() {
88
+ return this.data.attestations;
89
+ }
90
+
87
91
  /**
88
92
  * Proves the given epoch and submits the proof to L1.
89
93
  */
@@ -96,6 +100,7 @@ export class EpochProvingJob implements Traceable {
96
100
  await this.scheduleEpochCheck();
97
101
  }
98
102
 
103
+ const attestations = this.attestations.map(attestation => attestation.toViem());
99
104
  const epochNumber = Number(this.epochNumber);
100
105
  const epochSizeBlocks = this.blocks.length;
101
106
  const epochSizeTxs = this.blocks.reduce((total, current) => total + current.body.txEffects.length, 0);
@@ -120,13 +125,13 @@ export class EpochProvingJob implements Traceable {
120
125
 
121
126
  const finalBlobBatchingChallenges = await BatchedBlob.precomputeBatchedBlobChallenges(allBlobs);
122
127
  this.prover.startNewEpoch(epochNumber, fromBlock, epochSizeBlocks, finalBlobBatchingChallenges);
123
- await this.prover.startTubeCircuits(this.txs);
128
+ await this.prover.startTubeCircuits(Array.from(this.txs.values()));
124
129
 
125
130
  await asyncPool(this.config.parallelBlockLimit ?? 32, this.blocks, async block => {
126
131
  this.checkState();
127
132
 
128
133
  const globalVariables = block.header.globalVariables;
129
- const txs = await this.getTxs(block);
134
+ const txs = this.getTxs(block);
130
135
  const l1ToL2Messages = this.getL1ToL2Messages(block);
131
136
  const previousHeader = this.getBlockHeader(block.number - 1)!;
132
137
 
@@ -168,6 +173,7 @@ export class EpochProvingJob implements Traceable {
168
173
  this.log.info(`Finalised proof for epoch ${epochNumber}`, { epochNumber, uuid: this.uuid, duration: timer.ms() });
169
174
 
170
175
  this.progressState('publishing-proof');
176
+
171
177
  const success = await this.publisher.submitEpochProof({
172
178
  fromBlock,
173
179
  toBlock,
@@ -175,6 +181,7 @@ export class EpochProvingJob implements Traceable {
175
181
  publicInputs,
176
182
  proof,
177
183
  batchedBlobInputs,
184
+ attestations,
178
185
  });
179
186
  if (!success) {
180
187
  throw new Error('Failed to submit epoch proof to L1');
@@ -299,12 +306,8 @@ export class EpochProvingJob implements Traceable {
299
306
  );
300
307
  }
301
308
 
302
- private async getTxs(block: L2Block): Promise<Tx[]> {
303
- const txHashes = block.body.txEffects.map(tx => tx.txHash.toBigInt());
304
- const txsAndHashes = await Promise.all(this.txs.map(async tx => ({ tx, hash: await tx.getTxHash() })));
305
- return txsAndHashes
306
- .filter(txAndHash => txHashes.includes(txAndHash.hash.toBigInt()))
307
- .map(txAndHash => txAndHash.tx);
309
+ private getTxs(block: L2Block): Tx[] {
310
+ return block.body.txEffects.map(txEffect => this.txs.get(txEffect.txHash.toString())!);
308
311
  }
309
312
 
310
313
  private getL1ToL2Messages(block: L2Block) {
@@ -1,6 +1,11 @@
1
1
  import { type BatchedBlob, FinalBlobAccumulatorPublicInputs } from '@aztec/blob-lib';
2
2
  import { AZTEC_MAX_EPOCH_DURATION } from '@aztec/constants';
3
- import type { L1TxUtils, RollupContract } from '@aztec/ethereum';
3
+ import {
4
+ type L1TxUtils,
5
+ type RollupContract,
6
+ RollupContract as RollupContractClass,
7
+ type ViemCommitteeAttestation,
8
+ } from '@aztec/ethereum';
4
9
  import { makeTuple } from '@aztec/foundation/array';
5
10
  import { areArraysEqual } from '@aztec/foundation/collection';
6
11
  import { EthAddress } from '@aztec/foundation/eth-address';
@@ -21,9 +26,6 @@ import { type Hex, type TransactionReceipt, encodeFunctionData } from 'viem';
21
26
 
22
27
  import { ProverNodePublisherMetrics } from './metrics.js';
23
28
 
24
- /**
25
- * Stats for a sent transaction.
26
- */
27
29
  /** Arguments to the submitEpochProof method of the rollup contract */
28
30
  export type L1SubmitEpochProofArgs = {
29
31
  epochSize: number;
@@ -97,6 +99,7 @@ export class ProverNodePublisher {
97
99
  publicInputs: RootRollupPublicInputs;
98
100
  proof: Proof;
99
101
  batchedBlobInputs: BatchedBlob;
102
+ attestations: ViemCommitteeAttestation[];
100
103
  }): Promise<boolean> {
101
104
  const { epochNumber, fromBlock, toBlock } = args;
102
105
  const ctx = { epochNumber, fromBlock, toBlock };
@@ -136,11 +139,11 @@ export class ProverNodePublisher {
136
139
  }
137
140
 
138
141
  this.metrics.recordFailedTx();
139
- this.log.error(`Rollup.submitEpochProof tx status failed: ${txReceipt.transactionHash}`, ctx);
142
+ this.log.error(`Rollup.submitEpochProof tx status failed ${txReceipt.transactionHash}`, undefined, ctx);
140
143
  await this.sleepOrInterrupted();
141
144
  }
142
145
 
143
- this.log.verbose('L2 block data syncing interrupted while processing blocks.', ctx);
146
+ this.log.verbose('L2 block data syncing interrupted', ctx);
144
147
  return false;
145
148
  }
146
149
 
@@ -150,6 +153,7 @@ export class ProverNodePublisher {
150
153
  publicInputs: RootRollupPublicInputs;
151
154
  proof: Proof;
152
155
  batchedBlobInputs: BatchedBlob;
156
+ attestations: ViemCommitteeAttestation[];
153
157
  }) {
154
158
  const { fromBlock, toBlock, publicInputs, batchedBlobInputs } = args;
155
159
 
@@ -207,6 +211,7 @@ export class ProverNodePublisher {
207
211
  publicInputs: RootRollupPublicInputs;
208
212
  proof: Proof;
209
213
  batchedBlobInputs: BatchedBlob;
214
+ attestations: ViemCommitteeAttestation[];
210
215
  }): Promise<TransactionReceipt | undefined> {
211
216
  const txArgs = [this.getSubmitEpochProofArgs(args)] as const;
212
217
 
@@ -246,6 +251,7 @@ export class ProverNodePublisher {
246
251
  toBlock: number;
247
252
  publicInputs: RootRollupPublicInputs;
248
253
  batchedBlobInputs: BatchedBlob;
254
+ attestations: ViemCommitteeAttestation[];
249
255
  }) {
250
256
  // Returns arguments for EpochProofLib.sol -> getEpochProofPublicInputs()
251
257
  return [
@@ -271,6 +277,7 @@ export class ProverNodePublisher {
271
277
  publicInputs: RootRollupPublicInputs;
272
278
  proof: Proof;
273
279
  batchedBlobInputs: BatchedBlob;
280
+ attestations: ViemCommitteeAttestation[];
274
281
  }) {
275
282
  // Returns arguments for EpochProofLib.sol -> submitEpochRootProof()
276
283
  const proofHex: Hex = `0x${args.proof.withoutPublicInputs().toString('hex')}`;
@@ -280,6 +287,7 @@ export class ProverNodePublisher {
280
287
  end: argsArray[1],
281
288
  args: argsArray[2],
282
289
  fees: argsArray[3],
290
+ attestations: RollupContractClass.packAttestations(args.attestations),
283
291
  blobInputs: argsArray[4],
284
292
  proof: proofHex,
285
293
  };
@@ -5,9 +5,9 @@ import { memoize } from '@aztec/foundation/decorators';
5
5
  import { EthAddress } from '@aztec/foundation/eth-address';
6
6
  import type { Fr } from '@aztec/foundation/fields';
7
7
  import { createLogger } from '@aztec/foundation/log';
8
- import { RunningPromise } from '@aztec/foundation/running-promise';
9
8
  import { DateProvider } from '@aztec/foundation/timer';
10
9
  import type { DataStoreConfig } from '@aztec/kv-store/config';
10
+ import type { P2PClient } from '@aztec/p2p';
11
11
  import { PublicProcessorFactory } from '@aztec/simulator/server';
12
12
  import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
13
13
  import type { ChainConfig } from '@aztec/stdlib/config';
@@ -16,7 +16,6 @@ import { getProofSubmissionDeadlineTimestamp } from '@aztec/stdlib/epoch-helpers
16
16
  import {
17
17
  type EpochProverManager,
18
18
  EpochProvingJobTerminalState,
19
- type ProverCoordination,
20
19
  type ProverNodeApi,
21
20
  type Service,
22
21
  type WorldStateSyncStatus,
@@ -24,7 +23,8 @@ import {
24
23
  tryStop,
25
24
  } from '@aztec/stdlib/interfaces/server';
26
25
  import type { L1ToL2MessageSource } from '@aztec/stdlib/messaging';
27
- import type { TxHash } from '@aztec/stdlib/tx';
26
+ import type { P2PClientType } from '@aztec/stdlib/p2p';
27
+ import type { Tx } from '@aztec/stdlib/tx';
28
28
  import {
29
29
  Attributes,
30
30
  L1Metrics,
@@ -61,9 +61,6 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
61
61
  private rewardsMetrics: ProverNodeRewardsMetrics;
62
62
  private l1Metrics: L1Metrics;
63
63
 
64
- private txFetcher: RunningPromise;
65
- private lastBlockNumber: number | undefined;
66
-
67
64
  public readonly tracer: Tracer;
68
65
 
69
66
  constructor(
@@ -73,7 +70,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
73
70
  protected readonly l1ToL2MessageSource: L1ToL2MessageSource,
74
71
  protected readonly contractDataSource: ContractDataSource,
75
72
  protected readonly worldState: WorldStateSynchronizer,
76
- protected readonly coordination: ProverCoordination & Partial<Service>,
73
+ protected readonly p2pClient: Pick<P2PClient<P2PClientType.Prover>, 'getTxProvider'> & Partial<Service>,
77
74
  protected readonly epochsMonitor: EpochMonitor,
78
75
  config: Partial<ProverNodeOptions> = {},
79
76
  protected readonly telemetryClient: TelemetryClient = getTelemetryClient(),
@@ -91,6 +88,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
91
88
  txGatheringIntervalMs: 1_000,
92
89
  txGatheringBatchSize: 10,
93
90
  txGatheringMaxParallelRequestsPerNode: 100,
91
+ txGatheringTimeoutMs: 120_000,
94
92
  proverNodeFailedEpochStore: undefined,
95
93
  ...compact(config),
96
94
  };
@@ -107,8 +105,6 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
107
105
  EthAddress.fromField(this.prover.getProverId()),
108
106
  this.publisher.getRollupContract(),
109
107
  );
110
-
111
- this.txFetcher = new RunningPromise(() => this.checkForTxs(), this.log, this.config.txGatheringIntervalMs);
112
108
  }
113
109
 
114
110
  public getProverId() {
@@ -116,7 +112,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
116
112
  }
117
113
 
118
114
  public getP2P() {
119
- return this.coordination.getP2PClient();
115
+ return this.p2pClient;
120
116
  }
121
117
 
122
118
  /**
@@ -153,7 +149,6 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
153
149
  * starts proving jobs for them.
154
150
  */
155
151
  async start() {
156
- this.txFetcher.start();
157
152
  this.epochsMonitor.start(this);
158
153
  this.l1Metrics.start();
159
154
  await this.rewardsMetrics.start();
@@ -165,14 +160,13 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
165
160
  */
166
161
  async stop() {
167
162
  this.log.info('Stopping ProverNode');
168
- await this.txFetcher.stop();
169
163
  await this.epochsMonitor.stop();
170
164
  await this.prover.stop();
165
+ await tryStop(this.p2pClient);
171
166
  await tryStop(this.l2BlockSource);
172
167
  this.publisher.interrupt();
173
168
  await Promise.all(Array.from(this.jobs.values()).map(job => job.stop()));
174
169
  await this.worldState.stop();
175
- await tryStop(this.coordination);
176
170
  this.l1Metrics.stop();
177
171
  this.rewardsMetrics.stop();
178
172
  await this.telemetryClient.stop();
@@ -307,30 +301,17 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
307
301
  return this.l2BlockSource.getL1Constants();
308
302
  }
309
303
 
310
- /** Monitors for new blocks and requests their txs from the p2p layer to ensure they are available for proving. */
311
- @trackSpan('ProverNode.checkForTxs')
312
- private async checkForTxs() {
313
- const blockNumber = await this.l2BlockSource.getBlockNumber();
314
- if (this.lastBlockNumber === undefined || blockNumber > this.lastBlockNumber) {
315
- const block = await this.l2BlockSource.getBlock(blockNumber);
316
- if (!block) {
317
- return;
318
- }
319
- const txHashes = block.body.txEffects.map(tx => tx.txHash);
320
- this.log.verbose(`Fetching ${txHashes.length} tx hashes for block number ${blockNumber} from coordination`);
321
- await this.coordination.gatherTxs(txHashes); // This stores the txs in the tx pool, no need to persist them here
322
- this.lastBlockNumber = blockNumber;
323
- }
324
- }
325
-
326
304
  @trackSpan('ProverNode.gatherEpochData', epochNumber => ({ [Attributes.EPOCH_NUMBER]: Number(epochNumber) }))
327
305
  private async gatherEpochData(epochNumber: bigint): Promise<EpochProvingJobData> {
328
306
  const blocks = await this.gatherBlocks(epochNumber);
329
- const txs = await this.gatherTxs(epochNumber, blocks);
307
+ const txArray = await this.gatherTxs(epochNumber, blocks);
308
+ const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
330
309
  const l1ToL2Messages = await this.gatherMessages(epochNumber, blocks);
331
310
  const previousBlockHeader = await this.gatherPreviousBlockHeader(epochNumber, blocks[0]);
311
+ const [lastBlock] = await this.l2BlockSource.getPublishedBlocks(blocks.at(-1)!.number, 1);
312
+ const attestations = lastBlock?.attestations ?? [];
332
313
 
333
- return { blocks, txs, l1ToL2Messages, epochNumber, previousBlockHeader };
314
+ return { blocks, txs, l1ToL2Messages, epochNumber, previousBlockHeader, attestations };
334
315
  }
335
316
 
336
317
  private async gatherBlocks(epochNumber: bigint) {
@@ -342,20 +323,18 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
342
323
  }
343
324
 
344
325
  private async gatherTxs(epochNumber: bigint, blocks: L2Block[]) {
345
- const txsToFind: TxHash[] = blocks.flatMap(block => block.body.txEffects.map(tx => tx.txHash));
346
- const txs = await this.coordination.getTxsByHash(txsToFind);
326
+ const deadline = new Date(this.dateProvider.now() + this.config.txGatheringTimeoutMs);
327
+ const txProvider = this.p2pClient.getTxProvider();
328
+ const txsByBlock = await Promise.all(blocks.map(block => txProvider.getTxsForBlock(block, { deadline })));
329
+ const txs = txsByBlock.map(({ txs }) => txs).flat();
330
+ const missingTxs = txsByBlock.map(({ missingTxs }) => missingTxs).flat();
347
331
 
348
- if (txs.length === txsToFind.length) {
332
+ if (missingTxs.length === 0) {
349
333
  this.log.verbose(`Gathered all ${txs.length} txs for epoch ${epochNumber}`, { epochNumber });
350
334
  return txs;
351
335
  }
352
336
 
353
- const txHashesFound = await Promise.all(txs.map(tx => tx.getTxHash()));
354
- const missingTxHashes = txsToFind
355
- .filter(txHashToFind => !txHashesFound.some(txHashFound => txHashToFind.equals(txHashFound)))
356
- .join(', ');
357
-
358
- throw new Error(`Txs not found for epoch ${epochNumber}: ${missingTxHashes}`);
337
+ throw new Error(`Txs not found for epoch ${epochNumber}: ${missingTxs.map(hash => hash.toString()).join(', ')}`);
359
338
  }
360
339
 
361
340
  private async gatherMessages(epochNumber: bigint, blocks: L2Block[]) {
@@ -1,22 +0,0 @@
1
- import type { P2P } from '@aztec/p2p';
2
- import type { P2PClient, ProverCoordination } from '@aztec/stdlib/interfaces/server';
3
- import { type Tx, TxHash } from '@aztec/stdlib/tx';
4
- export type CombinedCoordinationOptions = {
5
- txGatheringBatchSize: number;
6
- txGatheringMaxParallelRequestsPerNode: number;
7
- };
8
- export interface TxSource {
9
- getTxsByHash(txHashes: TxHash[]): Promise<(Tx | undefined)[]>;
10
- }
11
- export declare class CombinedProverCoordination implements ProverCoordination {
12
- #private;
13
- readonly p2p: P2P | undefined;
14
- readonly aztecNodes: TxSource[];
15
- private readonly options;
16
- private readonly log;
17
- constructor(p2p: P2P | undefined, aztecNodes: TxSource[], options?: CombinedCoordinationOptions, log?: import("@aztec/foundation/log").Logger);
18
- getP2PClient(): P2PClient | undefined;
19
- getTxsByHash(txHashes: TxHash[]): Promise<Tx[]>;
20
- gatherTxs(txHashes: TxHash[]): Promise<void>;
21
- }
22
- //# sourceMappingURL=combined-prover-coordination.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"combined-prover-coordination.d.ts","sourceRoot":"","sources":["../../src/prover-coordination/combined-prover-coordination.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,KAAK,EAAE,SAAS,EAAE,kBAAkB,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,EAAE,KAAK,EAAE,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAEnD,MAAM,MAAM,2BAA2B,GAAG;IAExC,oBAAoB,EAAE,MAAM,CAAC;IAC7B,qCAAqC,EAAE,MAAM,CAAC;CAC/C,CAAC;AASF,MAAM,WAAW,QAAQ;IACvB,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,CAAC,EAAE,GAAG,SAAS,CAAC,EAAE,CAAC,CAAC;CAC/D;AAuCD,qBAAa,0BAA2B,YAAW,kBAAkB;;aAEjD,GAAG,EAAE,GAAG,GAAG,SAAS;aACpB,UAAU,EAAE,QAAQ,EAAE;IACtC,OAAO,CAAC,QAAQ,CAAC,OAAO;IAIxB,OAAO,CAAC,QAAQ,CAAC,GAAG;gBANJ,GAAG,EAAE,GAAG,GAAG,SAAS,EACpB,UAAU,EAAE,QAAQ,EAAE,EACrB,OAAO,GAAE,2BAGzB,EACgB,GAAG,yCAA2D;IAG1E,YAAY,IAAI,SAAS,GAAG,SAAS;IAI/B,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,EAAE,CAAC;IAY/C,SAAS,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CA4E1D"}