@aztec/prover-node 4.0.0-nightly.20250907 → 4.0.0-nightly.20260108

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dest/actions/download-epoch-proving-job.d.ts +4 -4
  2. package/dest/actions/index.d.ts +1 -1
  3. package/dest/actions/rerun-epoch-proving-job.d.ts +2 -2
  4. package/dest/actions/upload-epoch-proof-failure.d.ts +1 -1
  5. package/dest/bin/run-failed-epoch.d.ts +1 -1
  6. package/dest/bin/run-failed-epoch.js +1 -1
  7. package/dest/config.d.ts +7 -4
  8. package/dest/config.d.ts.map +1 -1
  9. package/dest/config.js +21 -15
  10. package/dest/factory.d.ts +2 -4
  11. package/dest/factory.d.ts.map +1 -1
  12. package/dest/factory.js +31 -13
  13. package/dest/index.d.ts +1 -1
  14. package/dest/job/epoch-proving-job-data.d.ts +8 -6
  15. package/dest/job/epoch-proving-job-data.d.ts.map +1 -1
  16. package/dest/job/epoch-proving-job-data.js +25 -18
  17. package/dest/job/epoch-proving-job.d.ts +11 -16
  18. package/dest/job/epoch-proving-job.d.ts.map +1 -1
  19. package/dest/job/epoch-proving-job.js +493 -77
  20. package/dest/metrics.d.ts +4 -3
  21. package/dest/metrics.d.ts.map +1 -1
  22. package/dest/metrics.js +9 -3
  23. package/dest/monitors/epoch-monitor.d.ts +5 -2
  24. package/dest/monitors/epoch-monitor.d.ts.map +1 -1
  25. package/dest/monitors/epoch-monitor.js +11 -11
  26. package/dest/monitors/index.d.ts +1 -1
  27. package/dest/prover-node-publisher.d.ts +9 -10
  28. package/dest/prover-node-publisher.d.ts.map +1 -1
  29. package/dest/prover-node-publisher.js +51 -53
  30. package/dest/prover-node.d.ts +8 -7
  31. package/dest/prover-node.d.ts.map +1 -1
  32. package/dest/prover-node.js +435 -50
  33. package/dest/prover-publisher-factory.d.ts +6 -2
  34. package/dest/prover-publisher-factory.d.ts.map +1 -1
  35. package/dest/prover-publisher-factory.js +6 -0
  36. package/dest/test/index.d.ts +1 -1
  37. package/dest/test/index.d.ts.map +1 -1
  38. package/package.json +26 -25
  39. package/src/bin/run-failed-epoch.ts +2 -2
  40. package/src/config.ts +33 -30
  41. package/src/factory.ts +37 -20
  42. package/src/job/epoch-proving-job-data.ts +31 -25
  43. package/src/job/epoch-proving-job.ts +138 -82
  44. package/src/metrics.ts +16 -4
  45. package/src/monitors/epoch-monitor.ts +16 -13
  46. package/src/prover-node-publisher.ts +74 -73
  47. package/src/prover-node.ts +52 -45
  48. package/src/prover-publisher-factory.ts +12 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/prover-node",
3
- "version": "4.0.0-nightly.20250907",
3
+ "version": "4.0.0-nightly.20260108",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
@@ -11,8 +11,8 @@
11
11
  "../package.common.json"
12
12
  ],
13
13
  "scripts": {
14
- "build": "yarn clean && tsc -b",
15
- "build:dev": "tsc -b --watch",
14
+ "build": "yarn clean && ../scripts/tsc.sh",
15
+ "build:dev": "../scripts/tsc.sh --watch",
16
16
  "clean": "rm -rf ./dest .tsbuildinfo",
17
17
  "bb": "node --no-warnings ./dest/bb/index.js",
18
18
  "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests --maxWorkers=${JEST_MAX_WORKERS:-8}",
@@ -56,36 +56,37 @@
56
56
  ]
57
57
  },
58
58
  "dependencies": {
59
- "@aztec/archiver": "4.0.0-nightly.20250907",
60
- "@aztec/bb-prover": "4.0.0-nightly.20250907",
61
- "@aztec/blob-lib": "4.0.0-nightly.20250907",
62
- "@aztec/blob-sink": "4.0.0-nightly.20250907",
63
- "@aztec/constants": "4.0.0-nightly.20250907",
64
- "@aztec/epoch-cache": "4.0.0-nightly.20250907",
65
- "@aztec/ethereum": "4.0.0-nightly.20250907",
66
- "@aztec/foundation": "4.0.0-nightly.20250907",
67
- "@aztec/kv-store": "4.0.0-nightly.20250907",
68
- "@aztec/l1-artifacts": "4.0.0-nightly.20250907",
69
- "@aztec/node-keystore": "4.0.0-nightly.20250907",
70
- "@aztec/node-lib": "4.0.0-nightly.20250907",
71
- "@aztec/noir-protocol-circuits-types": "4.0.0-nightly.20250907",
72
- "@aztec/p2p": "4.0.0-nightly.20250907",
73
- "@aztec/protocol-contracts": "4.0.0-nightly.20250907",
74
- "@aztec/prover-client": "4.0.0-nightly.20250907",
75
- "@aztec/sequencer-client": "4.0.0-nightly.20250907",
76
- "@aztec/simulator": "4.0.0-nightly.20250907",
77
- "@aztec/stdlib": "4.0.0-nightly.20250907",
78
- "@aztec/telemetry-client": "4.0.0-nightly.20250907",
79
- "@aztec/world-state": "4.0.0-nightly.20250907",
59
+ "@aztec/archiver": "4.0.0-nightly.20260108",
60
+ "@aztec/bb-prover": "4.0.0-nightly.20260108",
61
+ "@aztec/blob-client": "4.0.0-nightly.20260108",
62
+ "@aztec/blob-lib": "4.0.0-nightly.20260108",
63
+ "@aztec/constants": "4.0.0-nightly.20260108",
64
+ "@aztec/epoch-cache": "4.0.0-nightly.20260108",
65
+ "@aztec/ethereum": "4.0.0-nightly.20260108",
66
+ "@aztec/foundation": "4.0.0-nightly.20260108",
67
+ "@aztec/kv-store": "4.0.0-nightly.20260108",
68
+ "@aztec/l1-artifacts": "4.0.0-nightly.20260108",
69
+ "@aztec/node-keystore": "4.0.0-nightly.20260108",
70
+ "@aztec/node-lib": "4.0.0-nightly.20260108",
71
+ "@aztec/noir-protocol-circuits-types": "4.0.0-nightly.20260108",
72
+ "@aztec/p2p": "4.0.0-nightly.20260108",
73
+ "@aztec/protocol-contracts": "4.0.0-nightly.20260108",
74
+ "@aztec/prover-client": "4.0.0-nightly.20260108",
75
+ "@aztec/sequencer-client": "4.0.0-nightly.20260108",
76
+ "@aztec/simulator": "4.0.0-nightly.20260108",
77
+ "@aztec/stdlib": "4.0.0-nightly.20260108",
78
+ "@aztec/telemetry-client": "4.0.0-nightly.20260108",
79
+ "@aztec/world-state": "4.0.0-nightly.20260108",
80
80
  "source-map-support": "^0.5.21",
81
81
  "tslib": "^2.4.0",
82
- "viem": "2.23.7"
82
+ "viem": "npm:@aztec/viem@2.38.2"
83
83
  },
84
84
  "devDependencies": {
85
85
  "@jest/globals": "^30.0.0",
86
86
  "@types/jest": "^30.0.0",
87
87
  "@types/node": "^22.15.17",
88
88
  "@types/source-map-support": "^0.5.10",
89
+ "@typescript/native-preview": "7.0.0-dev.20251126.1",
89
90
  "jest": "^30.0.0",
90
91
  "jest-mock-extended": "^4.0.0",
91
92
  "ts-node": "^10.9.1",
@@ -1,5 +1,5 @@
1
1
  /* eslint-disable no-console */
2
- import type { L1ContractAddresses } from '@aztec/ethereum';
2
+ import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
3
3
  import { EthAddress } from '@aztec/foundation/eth-address';
4
4
  import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc';
5
5
  import { createLogger } from '@aztec/foundation/log';
@@ -25,7 +25,7 @@ async function rerunFailedEpoch(provingJobUrl: string, baseLocalDir: string) {
25
25
  const config = {
26
26
  ...getProverNodeConfigFromEnv(),
27
27
  dataDirectory: dataDir,
28
- dataStoreMapSizeKB: env.dataStoreMapSizeKB ?? 1024 * 1024,
28
+ dataStoreMapSizeKb: env.dataStoreMapSizeKb ?? 1024 * 1024,
29
29
  proverId: env.proverId ?? EthAddress.random(),
30
30
  };
31
31
 
package/src/config.ts CHANGED
@@ -1,16 +1,16 @@
1
1
  import { type ArchiverConfig, archiverConfigMappings } from '@aztec/archiver/config';
2
2
  import type { ACVMConfig, BBConfig } from '@aztec/bb-prover/config';
3
- import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethereum';
4
- import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config';
5
- import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
3
+ import { type GenesisStateConfig, genesisStateConfigMappings } from '@aztec/ethereum/config';
6
4
  import {
7
- type EthAccount,
8
- type EthAddressHex,
9
- type EthRemoteSignerAccount,
10
- type KeyStore,
11
- type KeyStoreConfig,
12
- keyStoreConfigMappings,
13
- } from '@aztec/node-keystore';
5
+ type ConfigMappingsType,
6
+ booleanConfigHelper,
7
+ getConfigFromMappings,
8
+ numberConfigHelper,
9
+ } from '@aztec/foundation/config';
10
+ import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config';
11
+ import { type KeyStoreConfig, keyStoreConfigMappings } from '@aztec/node-keystore/config';
12
+ import { ethPrivateKeySchema } from '@aztec/node-keystore/schemas';
13
+ import type { KeyStore } from '@aztec/node-keystore/types';
14
14
  import { type SharedNodeConfig, sharedNodeConfigMappings } from '@aztec/node-lib/config';
15
15
  import { type P2PConfig, p2pConfigMappings } from '@aztec/p2p/config';
16
16
  import {
@@ -18,7 +18,7 @@ import {
18
18
  type ProverBrokerConfig,
19
19
  proverAgentConfigMappings,
20
20
  proverBrokerConfigMappings,
21
- } from '@aztec/prover-client/broker';
21
+ } from '@aztec/prover-client/broker/config';
22
22
  import { type ProverClientUserConfig, bbConfigMappings, proverClientConfigMappings } from '@aztec/prover-client/config';
23
23
  import {
24
24
  type PublisherConfig,
@@ -45,6 +45,8 @@ export type SpecificProverNodeConfig = {
45
45
  proverNodePollingIntervalMs: number;
46
46
  proverNodeMaxParallelBlocksPerEpoch: number;
47
47
  proverNodeFailedEpochStore: string | undefined;
48
+ proverNodeEpochProvingDelayMs: number | undefined;
49
+ proverNodeDisableProofPublish?: boolean;
48
50
  txGatheringTimeoutMs: number;
49
51
  txGatheringIntervalMs: number;
50
52
  txGatheringBatchSize: number;
@@ -72,6 +74,10 @@ const specificProverNodeConfigMappings: ConfigMappingsType<SpecificProverNodeCon
72
74
  description: 'File store where to upload node state when an epoch fails to be proven',
73
75
  defaultValue: undefined,
74
76
  },
77
+ proverNodeEpochProvingDelayMs: {
78
+ description: 'Optional delay in milliseconds to wait before proving a new epoch',
79
+ defaultValue: undefined,
80
+ },
75
81
  txGatheringIntervalMs: {
76
82
  env: 'PROVER_NODE_TX_GATHERING_INTERVAL_MS',
77
83
  description: 'How often to check that tx data is available',
@@ -92,6 +98,11 @@ const specificProverNodeConfigMappings: ConfigMappingsType<SpecificProverNodeCon
92
98
  description: 'How long to wait for tx data to be available before giving up',
93
99
  ...numberConfigHelper(120_000),
94
100
  },
101
+ proverNodeDisableProofPublish: {
102
+ env: 'PROVER_NODE_DISABLE_PROOF_PUBLISH',
103
+ description: 'Whether the prover node skips publishing proofs to L1',
104
+ ...booleanConfigHelper(false),
105
+ },
95
106
  };
96
107
 
97
108
  export const proverNodeConfigMappings: ConfigMappingsType<ProverNodeConfig> = {
@@ -125,19 +136,14 @@ export function getProverNodeAgentConfigFromEnv(): ProverAgentConfig & BBConfig
125
136
  };
126
137
  }
127
138
 
128
- function createKeyStoreFromWeb3Signer(config: ProverNodeConfig) {
129
- // See what we have been given for proverId.
130
- const proverId = config.proverId ? (config.proverId.toString() as EthAddressHex) : undefined;
131
-
139
+ function createKeyStoreFromWeb3Signer(config: ProverNodeConfig): KeyStore | undefined {
132
140
  // If we don't have a valid prover Id then we can't build a valid key store with remote signers
133
- if (proverId === undefined) {
141
+ if (config.proverId === undefined) {
134
142
  return undefined;
135
143
  }
136
144
 
137
145
  // Also, we need at least one publisher address.
138
- const publishers = config.publisherAddresses
139
- ? config.publisherAddresses.map(k => k.toChecksumString() as EthRemoteSignerAccount)
140
- : [];
146
+ const publishers = config.publisherAddresses ?? [];
141
147
 
142
148
  if (publishers.length === 0) {
143
149
  return undefined;
@@ -147,7 +153,7 @@ function createKeyStoreFromWeb3Signer(config: ProverNodeConfig) {
147
153
  schemaVersion: 1,
148
154
  slasher: undefined,
149
155
  prover: {
150
- id: proverId,
156
+ id: config.proverId,
151
157
  publisher: publishers,
152
158
  },
153
159
  remoteSigner: config.web3SignerUrl,
@@ -156,10 +162,10 @@ function createKeyStoreFromWeb3Signer(config: ProverNodeConfig) {
156
162
  return keyStore;
157
163
  }
158
164
 
159
- function createKeyStoreFromPublisherKeys(config: ProverNodeConfig) {
165
+ function createKeyStoreFromPublisherKeys(config: ProverNodeConfig): KeyStore | undefined {
160
166
  // Extract the publisher keys from the provided config.
161
167
  const publisherKeys = config.publisherPrivateKeys
162
- ? config.publisherPrivateKeys.map(k => k.getValue() as EthAddressHex)
168
+ ? config.publisherPrivateKeys.map((k: { getValue: () => string }) => ethPrivateKeySchema.parse(k.getValue()))
163
169
  : [];
164
170
 
165
171
  // There must be at least 1.
@@ -167,9 +173,6 @@ function createKeyStoreFromPublisherKeys(config: ProverNodeConfig) {
167
173
  return undefined;
168
174
  }
169
175
 
170
- // Now see what we have been given for proverId.
171
- const proverId = config.proverId ? (config.proverId.toString() as EthAddressHex) : undefined;
172
-
173
176
  // If we have a valid proverId then create a prover key store of the form { id, publisher: [publisherKeys] }
174
177
  // Otherwise create one of the form ("0x12345678....." as EthAccount).
175
178
 
@@ -177,11 +180,11 @@ function createKeyStoreFromPublisherKeys(config: ProverNodeConfig) {
177
180
  schemaVersion: 1,
178
181
  slasher: undefined,
179
182
  prover:
180
- proverId === undefined
181
- ? (publisherKeys[0] as EthAccount)
183
+ config.proverId === undefined
184
+ ? publisherKeys[0]
182
185
  : {
183
- id: proverId,
184
- publisher: publisherKeys.map(key => key as EthAccount),
186
+ id: config.proverId,
187
+ publisher: publisherKeys,
185
188
  },
186
189
  remoteSigner: undefined,
187
190
  validators: undefined,
@@ -189,7 +192,7 @@ function createKeyStoreFromPublisherKeys(config: ProverNodeConfig) {
189
192
  return keyStore;
190
193
  }
191
194
 
192
- export function createKeyStoreForProver(config: ProverNodeConfig) {
195
+ export function createKeyStoreForProver(config: ProverNodeConfig): KeyStore | undefined {
193
196
  if (config.web3SignerUrl !== undefined && config.web3SignerUrl.length > 0) {
194
197
  return createKeyStoreFromWeb3Signer(config);
195
198
  }
package/src/factory.ts CHANGED
@@ -1,21 +1,21 @@
1
1
  import { type Archiver, createArchiver } from '@aztec/archiver';
2
2
  import { BBCircuitVerifier, QueuedIVCVerifier, TestCircuitVerifier } from '@aztec/bb-prover';
3
- import { type BlobSinkClientInterface, createBlobSinkClient } from '@aztec/blob-sink/client';
3
+ import { createBlobClientWithFileStores } from '@aztec/blob-client/client';
4
4
  import { EpochCache } from '@aztec/epoch-cache';
5
- import {
6
- type EthSigner,
7
- L1TxUtils,
8
- PublisherManager,
9
- RollupContract,
10
- createEthereumChain,
11
- createL1TxUtilsFromEthSigner,
12
- } from '@aztec/ethereum';
5
+ import { createEthereumChain } from '@aztec/ethereum/chain';
6
+ import { RollupContract } from '@aztec/ethereum/contracts';
7
+ import { L1TxUtils } from '@aztec/ethereum/l1-tx-utils';
8
+ import { PublisherManager } from '@aztec/ethereum/publisher-manager';
13
9
  import { pick } from '@aztec/foundation/collection';
14
10
  import { type Logger, createLogger } from '@aztec/foundation/log';
15
11
  import { DateProvider } from '@aztec/foundation/timer';
16
12
  import type { DataStoreConfig } from '@aztec/kv-store/config';
17
13
  import { type KeyStoreConfig, KeystoreManager, loadKeystores, mergeKeystores } from '@aztec/node-keystore';
18
14
  import { trySnapshotSync } from '@aztec/node-lib/actions';
15
+ import {
16
+ createForwarderL1TxUtilsFromEthSigner,
17
+ createL1TxUtilsFromEthSignerWithStore,
18
+ } from '@aztec/node-lib/factories';
19
19
  import { NodeRpcTxSource, createP2PClient } from '@aztec/p2p';
20
20
  import { type ProverClientConfig, createProverClient } from '@aztec/prover-client';
21
21
  import { createAndStartProvingBroker } from '@aztec/prover-client/broker';
@@ -39,7 +39,6 @@ export type ProverNodeDeps = {
39
39
  aztecNodeTxProvider?: Pick<AztecNode, 'getTxsByHash'>;
40
40
  archiver?: Archiver;
41
41
  publisherFactory?: ProverPublisherFactory;
42
- blobSinkClient?: BlobSinkClientInterface;
43
42
  broker?: ProvingJobBroker;
44
43
  l1TxUtils?: L1TxUtils;
45
44
  dateProvider?: DateProvider;
@@ -56,8 +55,7 @@ export async function createProverNode(
56
55
  const config = { ...userConfig };
57
56
  const telemetry = deps.telemetry ?? getTelemetryClient();
58
57
  const dateProvider = deps.dateProvider ?? new DateProvider();
59
- const blobSinkClient =
60
- deps.blobSinkClient ?? createBlobSinkClient(config, { logger: createLogger('prover-node:blob-sink:client') });
58
+ const blobClient = await createBlobClientWithFileStores(config, createLogger('prover-node:blob-client:client'));
61
59
  const log = deps.log ?? createLogger('prover-node');
62
60
 
63
61
  // Build a key store from file if given or from environment otherwise
@@ -73,6 +71,8 @@ export async function createProverNode(
73
71
  }
74
72
  }
75
73
 
74
+ await keyStoreManager?.validateSigners();
75
+
76
76
  // Extract the prover signers from the key store and verify that we have one.
77
77
  const proverSigners = keyStoreManager?.createProverSigners();
78
78
 
@@ -86,6 +86,8 @@ export async function createProverNode(
86
86
  );
87
87
  }
88
88
 
89
+ log.info(`Creating prover with publishers ${proverSigners.signers.map(signer => signer.address.toString()).join()}`);
90
+
89
91
  // Only consider user provided config if it is valid
90
92
  const proverIdInUserConfig = config.proverId === undefined || config.proverId.isZero() ? undefined : config.proverId;
91
93
 
@@ -104,7 +106,7 @@ export async function createProverNode(
104
106
 
105
107
  const archiver =
106
108
  deps.archiver ??
107
- (await createArchiver(config, { blobSinkClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
109
+ (await createArchiver(config, { blobClient, epochCache, telemetry, dateProvider }, { blockUntilSync: true }));
108
110
  log.verbose(`Created archiver and synced to block ${await archiver.getBlockNumber()}`);
109
111
 
110
112
  const worldStateConfig = { ...config, worldStateProvenBlocksOnly: false };
@@ -125,7 +127,7 @@ export async function createProverNode(
125
127
 
126
128
  const publicClient = createPublicClient({
127
129
  chain: chain.chainInfo,
128
- transport: fallback(config.l1RpcUrls.map((url: string) => http(url))),
130
+ transport: fallback(config.l1RpcUrls.map((url: string) => http(url, { batch: false }))),
129
131
  pollingInterval: config.viemPollingIntervalMS,
130
132
  });
131
133
 
@@ -133,21 +135,34 @@ export async function createProverNode(
133
135
 
134
136
  const l1TxUtils = deps.l1TxUtils
135
137
  ? [deps.l1TxUtils]
136
- : proverSigners.signers.map((signer: EthSigner) => {
137
- return createL1TxUtilsFromEthSigner(publicClient, signer, log, dateProvider, config);
138
- });
138
+ : config.publisherForwarderAddress
139
+ ? await createForwarderL1TxUtilsFromEthSigner(
140
+ publicClient,
141
+ proverSigners.signers,
142
+ config.publisherForwarderAddress,
143
+ { ...config, scope: 'prover' },
144
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
145
+ )
146
+ : await createL1TxUtilsFromEthSignerWithStore(
147
+ publicClient,
148
+ proverSigners.signers,
149
+ { ...config, scope: 'prover' },
150
+ { telemetry, logger: log.createChild('l1-tx-utils'), dateProvider },
151
+ );
139
152
 
140
153
  const publisherFactory =
141
154
  deps.publisherFactory ??
142
155
  new ProverPublisherFactory(config, {
143
156
  rollupContract,
144
- publisherManager: new PublisherManager(l1TxUtils),
157
+ publisherManager: new PublisherManager(l1TxUtils, config),
145
158
  telemetry,
146
159
  });
147
160
 
148
161
  const proofVerifier = new QueuedIVCVerifier(
149
162
  config,
150
- config.realProofs ? await BBCircuitVerifier.new(config) : new TestCircuitVerifier(),
163
+ config.realProofs || config.debugForceTxProofVerification
164
+ ? await BBCircuitVerifier.new(config)
165
+ : new TestCircuitVerifier(config.proverTestVerificationDelayMs),
151
166
  );
152
167
 
153
168
  const p2pClient = await createP2PClient(
@@ -175,10 +190,12 @@ export async function createProverNode(
175
190
  'proverNodeMaxPendingJobs',
176
191
  'proverNodeMaxParallelBlocksPerEpoch',
177
192
  'proverNodePollingIntervalMs',
193
+ 'proverNodeEpochProvingDelayMs',
178
194
  'txGatheringMaxParallelRequests',
179
195
  'txGatheringIntervalMs',
180
196
  'txGatheringTimeoutMs',
181
197
  'proverNodeFailedEpochStore',
198
+ 'proverNodeDisableProofPublish',
182
199
  'dataDirectory',
183
200
  'l1ChainId',
184
201
  'rollupVersion',
@@ -187,7 +204,7 @@ export async function createProverNode(
187
204
 
188
205
  const epochMonitor = await EpochMonitor.create(
189
206
  archiver,
190
- { pollingIntervalMs: config.proverNodePollingIntervalMs },
207
+ { pollingIntervalMs: config.proverNodePollingIntervalMs, provingDelayMs: config.proverNodeEpochProvingDelayMs },
191
208
  telemetry,
192
209
  );
193
210
 
@@ -1,49 +1,55 @@
1
- import { Fr } from '@aztec/foundation/fields';
1
+ import { CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
2
+ import { Fr } from '@aztec/foundation/curves/bn254';
2
3
  import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
3
- import { CommitteeAttestation, L2Block } from '@aztec/stdlib/block';
4
+ import { CommitteeAttestation } from '@aztec/stdlib/block';
5
+ import { Checkpoint } from '@aztec/stdlib/checkpoint';
4
6
  import { BlockHeader, Tx } from '@aztec/stdlib/tx';
5
7
 
6
8
  /** All data from an epoch used in proving. */
7
9
  export type EpochProvingJobData = {
8
- epochNumber: bigint;
9
- blocks: L2Block[];
10
+ epochNumber: EpochNumber;
11
+ checkpoints: Checkpoint[];
10
12
  txs: Map<string, Tx>;
11
- l1ToL2Messages: Record<number, Fr[]>;
13
+ l1ToL2Messages: Record<CheckpointNumber, Fr[]>;
12
14
  previousBlockHeader: BlockHeader;
13
15
  attestations: CommitteeAttestation[];
14
16
  };
15
17
 
16
18
  export function validateEpochProvingJobData(data: EpochProvingJobData) {
17
- if (data.blocks.length > 0 && data.previousBlockHeader.getBlockNumber() + 1 !== data.blocks[0].number) {
19
+ if (data.checkpoints.length === 0) {
20
+ throw new Error('No checkpoints to prove');
21
+ }
22
+
23
+ const firstBlockNumber = data.checkpoints[0].blocks[0].number;
24
+ const previousBlockNumber = data.previousBlockHeader.getBlockNumber();
25
+ if (previousBlockNumber + 1 !== firstBlockNumber) {
18
26
  throw new Error(
19
- `Initial block number ${
20
- data.blocks[0].number
21
- } does not match previous block header ${data.previousBlockHeader.getBlockNumber()}`,
27
+ `Initial block number ${firstBlockNumber} does not match previous block header ${previousBlockNumber}`,
22
28
  );
23
29
  }
24
30
 
25
- for (const blockNumber of data.blocks.map(block => block.number)) {
26
- if (!(blockNumber in data.l1ToL2Messages)) {
27
- throw new Error(`Missing L1 to L2 messages for block number ${blockNumber}`);
31
+ for (const checkpoint of data.checkpoints) {
32
+ if (!(checkpoint.number in data.l1ToL2Messages)) {
33
+ throw new Error(`Missing L1 to L2 messages for checkpoint number ${checkpoint.number}`);
28
34
  }
29
35
  }
30
36
  }
31
37
 
32
38
  export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer {
33
- const blocks = data.blocks.map(block => block.toBuffer());
39
+ const checkpoints = data.checkpoints.map(checkpoint => checkpoint.toBuffer());
34
40
  const txs = Array.from(data.txs.values()).map(tx => tx.toBuffer());
35
- const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([blockNumber, messages]) => [
36
- Number(blockNumber),
41
+ const l1ToL2Messages = Object.entries(data.l1ToL2Messages).map(([checkpointNumber, messages]) => [
42
+ Number(checkpointNumber),
37
43
  messages.length,
38
44
  ...messages,
39
45
  ]);
40
46
  const attestations = data.attestations.map(attestation => attestation.toBuffer());
41
47
 
42
48
  return serializeToBuffer(
43
- Number(data.epochNumber),
49
+ data.epochNumber,
44
50
  data.previousBlockHeader,
45
- blocks.length,
46
- ...blocks,
51
+ checkpoints.length,
52
+ ...checkpoints,
47
53
  txs.length,
48
54
  ...txs,
49
55
  l1ToL2Messages.length,
@@ -55,22 +61,22 @@ export function serializeEpochProvingJobData(data: EpochProvingJobData): Buffer
55
61
 
56
62
  export function deserializeEpochProvingJobData(buf: Buffer): EpochProvingJobData {
57
63
  const reader = BufferReader.asReader(buf);
58
- const epochNumber = BigInt(reader.readNumber());
64
+ const epochNumber = EpochNumber(reader.readNumber());
59
65
  const previousBlockHeader = reader.readObject(BlockHeader);
60
- const blocks = reader.readVector(L2Block);
66
+ const checkpoints = reader.readVector(Checkpoint);
61
67
  const txArray = reader.readVector(Tx);
62
68
 
63
- const l1ToL2MessageBlockCount = reader.readNumber();
69
+ const l1ToL2MessageCheckpointCount = reader.readNumber();
64
70
  const l1ToL2Messages: Record<number, Fr[]> = {};
65
- for (let i = 0; i < l1ToL2MessageBlockCount; i++) {
66
- const blockNumber = reader.readNumber();
71
+ for (let i = 0; i < l1ToL2MessageCheckpointCount; i++) {
72
+ const checkpointNumber = CheckpointNumber(reader.readNumber());
67
73
  const messages = reader.readVector(Fr);
68
- l1ToL2Messages[blockNumber] = messages;
74
+ l1ToL2Messages[checkpointNumber] = messages;
69
75
  }
70
76
 
71
77
  const attestations = reader.readVector(CommitteeAttestation);
72
78
 
73
79
  const txs = new Map<string, Tx>(txArray.map(tx => [tx.getTxHash().toString(), tx]));
74
80
 
75
- return { epochNumber, previousBlockHeader, blocks, txs, l1ToL2Messages, attestations };
81
+ return { epochNumber, previousBlockHeader, checkpoints, txs, l1ToL2Messages, attestations };
76
82
  }