@aztec/prover-node 3.0.3 → 4.0.0-devnet.1-patch.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/dest/actions/download-epoch-proving-job.d.ts +1 -1
  2. package/dest/actions/rerun-epoch-proving-job.d.ts +3 -2
  3. package/dest/actions/rerun-epoch-proving-job.d.ts.map +1 -1
  4. package/dest/actions/rerun-epoch-proving-job.js +5 -3
  5. package/dest/factory.d.ts +4 -3
  6. package/dest/factory.d.ts.map +1 -1
  7. package/dest/factory.js +24 -17
  8. package/dest/index.d.ts +2 -1
  9. package/dest/index.d.ts.map +1 -1
  10. package/dest/index.js +1 -0
  11. package/dest/job/epoch-proving-job.d.ts +3 -2
  12. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  13. package/dest/job/epoch-proving-job.js +414 -28
  14. package/dest/metrics.d.ts +1 -1
  15. package/dest/metrics.d.ts.map +1 -1
  16. package/dest/metrics.js +26 -100
  17. package/dest/monitors/epoch-monitor.d.ts +1 -1
  18. package/dest/monitors/epoch-monitor.d.ts.map +1 -1
  19. package/dest/monitors/epoch-monitor.js +1 -10
  20. package/dest/prover-node-publisher.d.ts +6 -5
  21. package/dest/prover-node-publisher.d.ts.map +1 -1
  22. package/dest/prover-node-publisher.js +11 -10
  23. package/dest/prover-node.d.ts +2 -2
  24. package/dest/prover-node.d.ts.map +1 -1
  25. package/dest/prover-node.js +400 -22
  26. package/dest/prover-publisher-factory.d.ts +4 -2
  27. package/dest/prover-publisher-factory.d.ts.map +1 -1
  28. package/dest/prover-publisher-factory.js +4 -2
  29. package/package.json +23 -23
  30. package/src/actions/rerun-epoch-proving-job.ts +5 -3
  31. package/src/factory.ts +31 -20
  32. package/src/index.ts +1 -0
  33. package/src/job/epoch-proving-job.ts +43 -23
  34. package/src/metrics.ts +20 -83
  35. package/src/monitors/epoch-monitor.ts +1 -8
  36. package/src/prover-node-publisher.ts +13 -10
  37. package/src/prover-node.ts +3 -1
  38. package/src/prover-publisher-factory.ts +11 -5
package/src/factory.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { type Archiver, createArchiver } from '@aztec/archiver';
2
2
  import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover';
3
- import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
3
+ import { createBlobClientWithFileStores } from '@aztec/blob-client/client';
4
4
  import { EpochCache } from '@aztec/epoch-cache';
5
5
  import { createEthereumChain } from '@aztec/ethereum/chain';
6
6
  import { RollupContract } from '@aztec/ethereum/contracts';
@@ -12,8 +12,11 @@ import { DateProvider } from '@aztec/foundation/timer';
12
12
  import type { DataStoreConfig } from '@aztec/kv-store/config';
13
13
  import { type KeyStoreConfig, KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore';
14
14
  import { trySnapshotSync } from '@aztec/node-lib/actions';
15
- import { createL1TxUtilsFromEthSignerWithStore } from '@aztec/node-lib/factories';
16
- import { NodeRpcTxSource, createP2PClient } from '@aztec/p2p';
15
+ import {
16
+ createForwarderL1TxUtilsFromEthSigner,
17
+ createL1TxUtilsFromEthSignerWithStore,
18
+ } from '@aztec/node-lib/factories';
19
+ import { NodeRpcTxSource, type P2PClientDeps, createP2PClient } from '@aztec/p2p';
17
20
  import { type ProverClientConfig, createProverClient } from '@aztec/prover-client';
18
21
  import { createAndStartProvingBroker } from '@aztec/prover-client/broker';
19
22
  import type { AztecNode, ProvingJobBroker } from '@aztec/stdlib/interfaces/server';
@@ -36,10 +39,10 @@ export type ProverNodeDeps = {
36
39
  aztecNodeTxProvider?: Pick<AztecNode, 'getTxsByHash'>;
37
40
  archiver?: Archiver;
38
41
  publisherFactory?: ProverPublisherFactory;
39
- blobSinkClient?: BlobSinkClientInterface;
40
42
  broker?: ProvingJobBroker;
41
43
  l1TxUtils?: L1TxUtils;
42
44
  dateProvider?: DateProvider;
45
+ p2pClientDeps?: P2PClientDeps<P2PClientType.Prover>;
43
46
  };
44
47
 
45
48
  /** Creates a new prover node given a config. */
@@ -53,8 +56,7 @@ export async function createProverNode(
53
56
  const config = { ...userConfig };
54
57
  const telemetry = deps.telemetry ?? getTelemetryClient();
55
58
  const dateProvider = deps.dateProvider ?? new DateProvider();
56
- const blobSinkClient =
57
- deps.blobSinkClient ?? createBlobSinkClient(config, { logger: createLogger('prover-node:blob-sink:client') });
59
+ const blobClient = await createBlobClientWithFileStores(config, createLogger('prover-node:blob-client:client'));
58
60
  const log = deps.log ?? createLogger('prover-node');
59
61
 
60
62
  // Build a key store from file if given or from environment otherwise
@@ -105,12 +107,11 @@ export async function createProverNode(
105
107
 
106
108
  const archiver =
107
109
  deps.archiver ??
108
- (await createArchiver(config, { blobSinkClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
110
+ (await createArchiver(config, { blobClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
109
111
  log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`);
110
112
 
111
- const worldStateConfig = { ...config, worldStateProvenBlocksOnly: false };
112
113
  const worldStateSynchronizer = await createWorldStateSynchronizer(
113
- worldStateConfig,
114
+ config,
114
115
  archiver,
115
116
  options.prefilledPublicData,
116
117
  telemetry,
@@ -126,7 +127,7 @@ export async function createProverNode(
126
127
 
127
128
  const publicClient = createPublicClient({
128
129
  chain: chain.chainInfo,
129
- transport: fallback(config.l1RpcUrls.map((url: string) => http(url))),
130
+ transport: fallback(config.l1RpcUrls.map((url: string) => http(url, { batch: false }))),
130
131
  pollingInterval: config.viemPollingIntervalMS,
131
132
  });
132
133
 
@@ -134,18 +135,26 @@ export async function createProverNode(
134
135
 
135
136
  const l1TxUtils = deps.l1TxUtils
136
137
  ? [deps.l1TxUtils]
137
- : await createL1TxUtilsFromEthSignerWithStore(
138
- publicClient,
139
- proverSigners.signers,
140
- { ...config, scope: 'prover' },
141
- { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
142
- );
138
+ : config.publisherForwarderAddress
139
+ ? await createForwarderL1TxUtilsFromEthSigner(
140
+ publicClient,
141
+ proverSigners.signers,
142
+ config.publisherForwarderAddress,
143
+ { ...config, scope: 'prover' },
144
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
145
+ )
146
+ : await createL1TxUtilsFromEthSignerWithStore(
147
+ publicClient,
148
+ proverSigners.signers,
149
+ { ...config, scope: 'prover' },
150
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
151
+ );
143
152
 
144
153
  const publisherFactory =
145
154
  deps.publisherFactory ??
146
155
  new ProverPublisherFactory(config, {
147
156
  rollupContract,
148
- publisherManager: new PublisherManager(l1TxUtils, config),
157
+ publisherManager: new PublisherManager(l1TxUtils, config, log.getBindings()),
149
158
  telemetry,
150
159
  });
151
160
 
@@ -167,9 +176,11 @@ export async function createProverNode(
167
176
  dateProvider,
168
177
  telemetry,
169
178
  {
170
- txCollectionNodeSources: deps.aztecNodeTxProvider
171
- ? [new NodeRpcTxSource(deps.aztecNodeTxProvider, 'TestNode')]
172
- : [],
179
+ ...deps.p2pClientDeps,
180
+ txCollectionNodeSources: [
181
+ ...(deps.p2pClientDeps?.txCollectionNodeSources ?? []),
182
+ ...(deps.aztecNodeTxProvider ? [new NodeRpcTxSource(deps.aztecNodeTxProvider, 'TestNode')] : []),
183
+ ],
173
184
  },
174
185
  );
175
186
 
package/src/index.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  export * from './actions/index.js';
2
2
  export * from './config.js';
3
3
  export * from './factory.js';
4
+ export * from './monitors/index.js';
4
5
  export * from './prover-node-publisher.js';
5
6
  export * from './prover-node.js';
@@ -3,7 +3,7 @@ import { asyncPool } from '@aztec/foundation/async-pool';
3
3
  import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
5
  import { Fr } from '@aztec/foundation/curves/bn254';
6
- import { createLogger } from '@aztec/foundation/log';
6
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
7
7
  import { RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
8
8
  import { Timer } from '@aztec/foundation/timer';
9
9
  import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
@@ -11,7 +11,7 @@ import { protocolContractsHash } from '@aztec/protocol-contracts';
11
11
  import { buildFinalBlobChallenges } from '@aztec/prover-client/helpers';
12
12
  import type { PublicProcessor, PublicProcessorFactory } from '@aztec/simulator/server';
13
13
  import { PublicSimulatorConfig } from '@aztec/stdlib/avm';
14
- import type { L2BlockNew, L2BlockSource } from '@aztec/stdlib/block';
14
+ import type { L2Block, L2BlockSource } from '@aztec/stdlib/block';
15
15
  import type { Checkpoint } from '@aztec/stdlib/checkpoint';
16
16
  import {
17
17
  type EpochProver,
@@ -43,7 +43,7 @@ export type EpochProvingJobOptions = {
43
43
  */
44
44
  export class EpochProvingJob implements Traceable {
45
45
  private state: EpochProvingJobState = 'initialized';
46
- private log = createLogger('prover-node:epoch-proving-job');
46
+ private log: Logger;
47
47
  private uuid: string;
48
48
 
49
49
  private runPromise: Promise<void> | undefined;
@@ -62,9 +62,14 @@ export class EpochProvingJob implements Traceable {
62
62
  private metrics: ProverNodeJobMetrics,
63
63
  private deadline: Date | undefined,
64
64
  private config: EpochProvingJobOptions,
65
+ bindings?: LoggerBindings,
65
66
  ) {
66
67
  validateEpochProvingJobData(data);
67
68
  this.uuid = crypto.randomUUID();
69
+ this.log = createLogger('prover-node:epoch-proving-job', {
70
+ ...bindings,
71
+ instanceId: `epoch-${data.epochNumber}`,
72
+ });
68
73
  this.tracer = metrics.tracer;
69
74
  }
70
75
 
@@ -188,7 +193,8 @@ export class EpochProvingJob implements Traceable {
188
193
  previousHeader,
189
194
  );
190
195
 
191
- for (const block of checkpoint.blocks) {
196
+ for (let blockIndex = 0; blockIndex < checkpoint.blocks.length; blockIndex++) {
197
+ const block = checkpoint.blocks[blockIndex];
192
198
  const globalVariables = block.header.globalVariables;
193
199
  const txs = this.getTxs(block);
194
200
 
@@ -206,8 +212,12 @@ export class EpochProvingJob implements Traceable {
206
212
  // Start block proving
207
213
  await this.prover.startNewBlock(block.number, globalVariables.timestamp, txs.length);
208
214
 
209
- // Process public fns
210
- const db = await this.createFork(BlockNumber(block.number - 1), l1ToL2Messages);
215
+ // Process public fns. L1 to L2 messages are only inserted for the first block of a checkpoint,
216
+ // as the fork for subsequent blocks already includes them from the previous block's synced state.
217
+ const db = await this.createFork(
218
+ BlockNumber(block.number - 1),
219
+ blockIndex === 0 ? l1ToL2Messages : undefined,
220
+ );
211
221
  const config = PublicSimulatorConfig.from({
212
222
  proverId: this.prover.getProverId().toField(),
213
223
  skipFeeEnforcement: false,
@@ -290,22 +300,29 @@ export class EpochProvingJob implements Traceable {
290
300
  }
291
301
 
292
302
  /**
293
- * Create a new db fork for tx processing, inserting all L1 to L2.
303
+ * Create a new db fork for tx processing, optionally inserting L1 to L2 messages.
304
+ * L1 to L2 messages should only be inserted for the first block in a checkpoint,
305
+ * as subsequent blocks' synced state already includes them.
294
306
  * REFACTOR: The prover already spawns a db fork of its own for each block, so we may be able to do away with just one fork.
295
307
  */
296
- private async createFork(blockNumber: BlockNumber, l1ToL2Messages: Fr[]) {
308
+ private async createFork(blockNumber: BlockNumber, l1ToL2Messages: Fr[] | undefined) {
309
+ this.log.verbose(`Creating fork at ${blockNumber}`, { blockNumber });
297
310
  const db = await this.dbProvider.fork(blockNumber);
298
- const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
299
- l1ToL2Messages,
300
- Fr.ZERO,
301
- NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
302
- 'Too many L1 to L2 messages',
303
- );
304
- this.log.verbose(`Creating fork at ${blockNumber} with ${l1ToL2Messages.length} L1 to L2 messages`, {
305
- blockNumber,
306
- l1ToL2Messages: l1ToL2Messages.map(m => m.toString()),
307
- });
308
- await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
311
+
312
+ if (l1ToL2Messages !== undefined) {
313
+ this.log.verbose(`Inserting ${l1ToL2Messages.length} L1 to L2 messages in fork`, {
314
+ blockNumber,
315
+ l1ToL2Messages: l1ToL2Messages.map(m => m.toString()),
316
+ });
317
+ const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
318
+ l1ToL2Messages,
319
+ Fr.ZERO,
320
+ NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
321
+ 'Too many L1 to L2 messages',
322
+ );
323
+ await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
324
+ }
325
+
309
326
  return db;
310
327
  }
311
328
 
@@ -362,11 +379,14 @@ export class EpochProvingJob implements Traceable {
362
379
  const intervalMs = Math.ceil((await l2BlockSource.getL1Constants()).ethereumSlotDuration / 2) * 1000;
363
380
  this.epochCheckPromise = new RunningPromise(
364
381
  async () => {
365
- const blocks = await l2BlockSource.getBlockHeadersForEpoch(this.epochNumber);
366
- const blockHashes = await Promise.all(blocks.map(block => block.hash()));
382
+ const blockHeaders = await l2BlockSource.getCheckpointedBlockHeadersForEpoch(this.epochNumber);
383
+ const blockHashes = await Promise.all(blockHeaders.map(header => header.hash()));
367
384
  const thisBlocks = this.checkpoints.flatMap(checkpoint => checkpoint.blocks);
368
385
  const thisBlockHashes = await Promise.all(thisBlocks.map(block => block.hash()));
369
- if (blocks.length !== thisBlocks.length || !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))) {
386
+ if (
387
+ blockHeaders.length !== thisBlocks.length ||
388
+ !blockHashes.every((block, i) => block.equals(thisBlockHashes[i]))
389
+ ) {
370
390
  this.log.warn('Epoch blocks changed underfoot', {
371
391
  uuid: this.uuid,
372
392
  epochNumber: this.epochNumber,
@@ -388,7 +408,7 @@ export class EpochProvingJob implements Traceable {
388
408
  return [this.data.previousBlockHeader, ...lastBlocks.map(block => block.header).slice(0, -1)];
389
409
  }
390
410
 
391
- private getTxs(block: L2BlockNew): Tx[] {
411
+ private getTxs(block: L2Block): Tx[] {
392
412
  return block.body.txEffects.map(txEffect => this.txs.get(txEffect.txHash.toString())!);
393
413
  }
394
414
 
package/src/metrics.ts CHANGED
@@ -13,7 +13,7 @@ import {
13
13
  type TelemetryClient,
14
14
  type Tracer,
15
15
  type UpDownCounter,
16
- ValueType,
16
+ createUpDownCounterWithDefault,
17
17
  } from '@aztec/telemetry-client';
18
18
 
19
19
  import { formatEther, formatUnits } from 'viem';
@@ -30,28 +30,11 @@ export class ProverNodeJobMetrics {
30
30
  public readonly tracer: Tracer,
31
31
  private logger = createLogger('prover-node:publisher:metrics'),
32
32
  ) {
33
- this.proverEpochExecutionDuration = this.meter.createHistogram(Metrics.PROVER_NODE_EXECUTION_DURATION, {
34
- description: 'Duration of execution of an epoch by the prover',
35
- unit: 'ms',
36
- valueType: ValueType.INT,
37
- });
38
- this.provingJobDuration = this.meter.createHistogram(Metrics.PROVER_NODE_JOB_DURATION, {
39
- description: 'Duration of proving job',
40
- unit: 's',
41
- valueType: ValueType.DOUBLE,
42
- });
43
- this.provingJobCheckpoints = this.meter.createGauge(Metrics.PROVER_NODE_JOB_CHECKPOINTS, {
44
- description: 'Number of checkpoints in a proven epoch',
45
- valueType: ValueType.INT,
46
- });
47
- this.provingJobBlocks = this.meter.createGauge(Metrics.PROVER_NODE_JOB_BLOCKS, {
48
- description: 'Number of blocks in a proven epoch',
49
- valueType: ValueType.INT,
50
- });
51
- this.provingJobTransactions = this.meter.createGauge(Metrics.PROVER_NODE_JOB_TRANSACTIONS, {
52
- description: 'Number of transactions in a proven epoch',
53
- valueType: ValueType.INT,
54
- });
33
+ this.proverEpochExecutionDuration = this.meter.createHistogram(Metrics.PROVER_NODE_EXECUTION_DURATION);
34
+ this.provingJobDuration = this.meter.createHistogram(Metrics.PROVER_NODE_JOB_DURATION);
35
+ this.provingJobCheckpoints = this.meter.createGauge(Metrics.PROVER_NODE_JOB_CHECKPOINTS);
36
+ this.provingJobBlocks = this.meter.createGauge(Metrics.PROVER_NODE_JOB_BLOCKS);
37
+ this.provingJobTransactions = this.meter.createGauge(Metrics.PROVER_NODE_JOB_TRANSACTIONS);
55
38
  }
56
39
 
57
40
  public recordProvingJob(
@@ -81,15 +64,9 @@ export class ProverNodeRewardsMetrics {
81
64
  private rollup: RollupContract,
82
65
  private logger = createLogger('prover-node:publisher:metrics'),
83
66
  ) {
84
- this.rewards = this.meter.createObservableGauge(Metrics.PROVER_NODE_REWARDS_PER_EPOCH, {
85
- valueType: ValueType.DOUBLE,
86
- description: 'The rewards earned',
87
- });
67
+ this.rewards = this.meter.createObservableGauge(Metrics.PROVER_NODE_REWARDS_PER_EPOCH);
88
68
 
89
- this.accumulatedRewards = this.meter.createUpDownCounter(Metrics.PROVER_NODE_REWARDS_TOTAL, {
90
- valueType: ValueType.DOUBLE,
91
- description: 'The rewards earned (total)',
92
- });
69
+ this.accumulatedRewards = createUpDownCounterWithDefault(this.meter, Metrics.PROVER_NODE_REWARDS_TOTAL);
93
70
  }
94
71
 
95
72
  public async start() {
@@ -150,68 +127,28 @@ export class ProverNodePublisherMetrics {
150
127
  ) {
151
128
  this.meter = client.getMeter(name);
152
129
 
153
- this.gasPrice = this.meter.createHistogram(Metrics.L1_PUBLISHER_GAS_PRICE, {
154
- description: 'The gas price used for transactions',
155
- unit: 'gwei',
156
- valueType: ValueType.DOUBLE,
157
- });
130
+ this.gasPrice = this.meter.createHistogram(Metrics.L1_PUBLISHER_GAS_PRICE);
158
131
 
159
- this.txCount = this.meter.createUpDownCounter(Metrics.L1_PUBLISHER_TX_COUNT, {
160
- description: 'The number of transactions processed',
132
+ this.txCount = createUpDownCounterWithDefault(this.meter, Metrics.L1_PUBLISHER_TX_COUNT, {
133
+ [Attributes.L1_TX_TYPE]: ['submitProof'],
134
+ [Attributes.OK]: [true, false],
161
135
  });
162
136
 
163
- this.txDuration = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_DURATION, {
164
- description: 'The duration of transaction processing',
165
- unit: 'ms',
166
- valueType: ValueType.INT,
167
- });
137
+ this.txDuration = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_DURATION);
168
138
 
169
- this.txGas = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_GAS, {
170
- description: 'The gas consumed by transactions',
171
- unit: 'gas',
172
- valueType: ValueType.INT,
173
- });
139
+ this.txGas = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_GAS);
174
140
 
175
- this.txCalldataSize = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_SIZE, {
176
- description: 'The size of the calldata in transactions',
177
- unit: 'By',
178
- valueType: ValueType.INT,
179
- });
141
+ this.txCalldataSize = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_SIZE);
180
142
 
181
- this.txCalldataGas = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_GAS, {
182
- description: 'The gas consumed by the calldata in transactions',
183
- unit: 'gas',
184
- valueType: ValueType.INT,
185
- });
143
+ this.txCalldataGas = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_CALLDATA_GAS);
186
144
 
187
- this.txBlobDataGasUsed = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_USED, {
188
- description: 'The amount of blob gas used in transactions',
189
- unit: 'gas',
190
- valueType: ValueType.INT,
191
- });
145
+ this.txBlobDataGasUsed = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_USED);
192
146
 
193
- this.txBlobDataGasCost = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_COST, {
194
- description: 'The gas cost of blobs in transactions',
195
- unit: 'gwei',
196
- valueType: ValueType.INT,
197
- });
147
+ this.txBlobDataGasCost = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_BLOBDATA_GAS_COST);
198
148
 
199
- this.txTotalFee = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_TOTAL_FEE, {
200
- description: 'How much L1 tx costs',
201
- unit: 'gwei',
202
- valueType: ValueType.DOUBLE,
203
- advice: {
204
- explicitBucketBoundaries: [
205
- 0.001, 0.002, 0.004, 0.008, 0.01, 0.02, 0.04, 0.08, 0.1, 0.2, 0.4, 0.8, 1, 1.2, 1.4, 1.8, 2,
206
- ],
207
- },
208
- });
149
+ this.txTotalFee = this.meter.createHistogram(Metrics.L1_PUBLISHER_TX_TOTAL_FEE);
209
150
 
210
- this.senderBalance = this.meter.createGauge(Metrics.L1_PUBLISHER_BALANCE, {
211
- unit: 'eth',
212
- description: 'The balance of the sender address',
213
- valueType: ValueType.DOUBLE,
214
- });
151
+ this.senderBalance = this.meter.createGauge(Metrics.L1_PUBLISHER_BALANCE);
215
152
  }
216
153
 
217
154
  recordFailedTx() {
@@ -4,13 +4,7 @@ import { RunningPromise } from '@aztec/foundation/running-promise';
4
4
  import { sleep } from '@aztec/foundation/sleep';
5
5
  import type { L2BlockSource } from '@aztec/stdlib/block';
6
6
  import { type L1RollupConstants, getEpochAtSlot } from '@aztec/stdlib/epoch-helpers';
7
- import {
8
- type TelemetryClient,
9
- type Traceable,
10
- type Tracer,
11
- getTelemetryClient,
12
- trackSpan,
13
- } from '@aztec/telemetry-client';
7
+ import { type TelemetryClient, type Traceable, type Tracer, getTelemetryClient } from '@aztec/telemetry-client';
14
8
 
15
9
  export interface EpochMonitorHandler {
16
10
  handleEpochReadyToProve(epochNumber: EpochNumber): Promise<boolean>;
@@ -73,7 +67,6 @@ export class EpochMonitor implements Traceable {
73
67
  this.log.info('Stopped EpochMonitor');
74
68
  }
75
69
 
76
- @trackSpan('EpochMonitor.work')
77
70
  public async work() {
78
71
  const { epochToProve, blockNumber, slotNumber } = await this.getEpochNumberToProve();
79
72
  this.log.debug(`Epoch to prove: ${epochToProve}`, { blockNumber, slotNumber });
@@ -1,5 +1,5 @@
1
1
  import { BatchedBlob, getEthBlobEvaluationInputs } from '@aztec/blob-lib';
2
- import { AZTEC_MAX_EPOCH_DURATION } from '@aztec/constants';
2
+ import { MAX_CHECKPOINTS_PER_EPOCH } from '@aztec/constants';
3
3
  import type { RollupContract, ViemCommitteeAttestation } from '@aztec/ethereum/contracts';
4
4
  import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils';
5
5
  import { makeTuple } from '@aztec/foundation/array';
@@ -7,7 +7,7 @@ import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
7
7
  import { areArraysEqual } from '@aztec/foundation/collection';
8
8
  import { Fr } from '@aztec/foundation/curves/bn254';
9
9
  import { EthAddress } from '@aztec/foundation/eth-address';
10
- import { createLogger } from '@aztec/foundation/log';
10
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
11
11
  import type { Tuple } from '@aztec/foundation/serialize';
12
12
  import { Timer } from '@aztec/foundation/timer';
13
13
  import { RollupAbi } from '@aztec/l1-artifacts';
@@ -31,7 +31,7 @@ export type L1SubmitEpochProofArgs = {
31
31
  endTimestamp: Fr;
32
32
  outHash: Fr;
33
33
  proverId: Fr;
34
- fees: Tuple<FeeRecipient, typeof AZTEC_MAX_EPOCH_DURATION>;
34
+ fees: Tuple<FeeRecipient, typeof MAX_CHECKPOINTS_PER_EPOCH>;
35
35
  proof: Proof;
36
36
  };
37
37
 
@@ -39,7 +39,7 @@ export class ProverNodePublisher {
39
39
  private interrupted = false;
40
40
  private metrics: ProverNodePublisherMetrics;
41
41
 
42
- protected log = createLogger('prover-node:l1-tx-publisher');
42
+ protected log: Logger;
43
43
 
44
44
  protected rollupContract: RollupContract;
45
45
 
@@ -52,10 +52,12 @@ export class ProverNodePublisher {
52
52
  l1TxUtils: L1TxUtils;
53
53
  telemetry?: TelemetryClient;
54
54
  },
55
+ bindings?: LoggerBindings,
55
56
  ) {
56
57
  const telemetry = deps.telemetry ?? getTelemetryClient();
57
58
 
58
59
  this.metrics = new ProverNodePublisherMetrics(telemetry, 'ProverNode');
60
+ this.log = createLogger('prover-node:l1-tx-publisher', bindings);
59
61
 
60
62
  this.rollupContract = deps.rollupContract;
61
63
  this.l1TxUtils = deps.l1TxUtils;
@@ -172,17 +174,17 @@ export class ProverNodePublisher {
172
174
 
173
175
  // Check the archive for the immediate checkpoint before the epoch
174
176
  const checkpointLog = await this.rollupContract.getCheckpoint(CheckpointNumber(fromCheckpoint - 1));
175
- if (publicInputs.previousArchiveRoot.toString() !== checkpointLog.archive) {
177
+ if (!publicInputs.previousArchiveRoot.equals(checkpointLog.archive)) {
176
178
  throw new Error(
177
- `Previous archive root mismatch: ${publicInputs.previousArchiveRoot.toString()} !== ${checkpointLog.archive}`,
179
+ `Previous archive root mismatch: ${publicInputs.previousArchiveRoot.toString()} !== ${checkpointLog.archive.toString()}`,
178
180
  );
179
181
  }
180
182
 
181
183
  // Check the archive for the last checkpoint in the epoch
182
184
  const endCheckpointLog = await this.rollupContract.getCheckpoint(toCheckpoint);
183
- if (publicInputs.endArchiveRoot.toString() !== endCheckpointLog.archive) {
185
+ if (!publicInputs.endArchiveRoot.equals(endCheckpointLog.archive)) {
184
186
  throw new Error(
185
- `End archive root mismatch: ${publicInputs.endArchiveRoot.toString()} !== ${endCheckpointLog.archive}`,
187
+ `End archive root mismatch: ${publicInputs.endArchiveRoot.toString()} !== ${endCheckpointLog.archive.toString()}`,
186
188
  );
187
189
  }
188
190
 
@@ -200,7 +202,7 @@ export class ProverNodePublisher {
200
202
  );
201
203
  const argsPublicInputs = [...publicInputs.toFields()];
202
204
 
203
- if (!areArraysEqual(rollupPublicInputs.map(Fr.fromHexString), argsPublicInputs, (a, b) => a.equals(b))) {
205
+ if (!areArraysEqual(rollupPublicInputs, argsPublicInputs, (a, b) => a.equals(b))) {
204
206
  const fmt = (inputs: Fr[] | readonly string[]) => inputs.map(x => x.toString()).join(', ');
205
207
  throw new Error(
206
208
  `Root rollup public inputs mismatch:\nRollup: ${fmt(rollupPublicInputs)}\nComputed:${fmt(argsPublicInputs)}`,
@@ -266,9 +268,10 @@ export class ProverNodePublisher {
266
268
  {
267
269
  previousArchive: args.publicInputs.previousArchiveRoot.toString(),
268
270
  endArchive: args.publicInputs.endArchiveRoot.toString(),
271
+ outHash: args.publicInputs.outHash.toString(),
269
272
  proverId: EthAddress.fromField(args.publicInputs.constants.proverId).toString(),
270
273
  } /*_args*/,
271
- makeTuple(AZTEC_MAX_EPOCH_DURATION * 2, i =>
274
+ makeTuple(MAX_CHECKPOINTS_PER_EPOCH * 2, i =>
272
275
  i % 2 === 0
273
276
  ? args.publicInputs.fees[i / 2].recipient.toField().toString()
274
277
  : args.publicInputs.fees[(i - 1) / 2].value.toString(),
@@ -288,6 +288,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
288
288
  this.contractDataSource,
289
289
  this.dateProvider,
290
290
  this.telemetryClient,
291
+ this.log.getBindings(),
291
292
  );
292
293
 
293
294
  // Set deadline for this job to run. It will abort if it takes too long.
@@ -311,7 +312,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
311
312
  const l1ToL2Messages = await this.gatherMessages(epochNumber, checkpoints);
312
313
  const [firstBlock] = checkpoints[0].blocks;
313
314
  const previousBlockHeader = await this.gatherPreviousBlockHeader(epochNumber, firstBlock.number - 1);
314
- const [lastPublishedCheckpoint] = await this.l2BlockSource.getPublishedCheckpoints(checkpoints.at(-1)!.number, 1);
315
+ const [lastPublishedCheckpoint] = await this.l2BlockSource.getCheckpoints(checkpoints.at(-1)!.number, 1);
315
316
  const attestations = lastPublishedCheckpoint?.attestations ?? [];
316
317
 
317
318
  return { checkpoints, txs, l1ToL2Messages, epochNumber, previousBlockHeader, attestations };
@@ -384,6 +385,7 @@ export class ProverNode implements EpochMonitorHandler, ProverNodeApi, Traceable
384
385
  this.jobMetrics,
385
386
  deadline,
386
387
  { parallelBlockLimit, skipSubmitProof: proverNodeDisableProofPublish, ...opts },
388
+ this.log.getBindings(),
387
389
  );
388
390
  }
389
391
 
@@ -1,6 +1,7 @@
1
1
  import type { RollupContract } from '@aztec/ethereum/contracts';
2
2
  import type { L1TxUtils } from '@aztec/ethereum/l1-tx-utils';
3
3
  import type { PublisherManager } from '@aztec/ethereum/publisher-manager';
4
+ import type { LoggerBindings } from '@aztec/foundation/log';
4
5
  import type { PublisherConfig, TxSenderConfig } from '@aztec/sequencer-client';
5
6
  import type { TelemetryClient } from '@aztec/telemetry-client';
6
7
 
@@ -14,6 +15,7 @@ export class ProverPublisherFactory {
14
15
  publisherManager: PublisherManager<L1TxUtils>;
15
16
  telemetry?: TelemetryClient;
16
17
  },
18
+ private bindings?: LoggerBindings,
17
19
  ) {}
18
20
 
19
21
  public async start() {
@@ -30,10 +32,14 @@ export class ProverPublisherFactory {
30
32
  */
31
33
  public async create(): Promise<ProverNodePublisher> {
32
34
  const l1Publisher = await this.deps.publisherManager.getAvailablePublisher();
33
- return new ProverNodePublisher(this.config, {
34
- rollupContract: this.deps.rollupContract,
35
- l1TxUtils: l1Publisher,
36
- telemetry: this.deps.telemetry,
37
- });
35
+ return new ProverNodePublisher(
36
+ this.config,
37
+ {
38
+ rollupContract: this.deps.rollupContract,
39
+ l1TxUtils: l1Publisher,
40
+ telemetry: this.deps.telemetry,
41
+ },
42
+ this.bindings,
43
+ );
38
44
  }
39
45
  }