@aztec/prover-client 3.0.0-canary.a9708bd → 3.0.0-manual.20251030

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/dest/block-factory/light.d.ts +5 -3
  2. package/dest/block-factory/light.d.ts.map +1 -1
  3. package/dest/block-factory/light.js +16 -9
  4. package/dest/config.js +1 -1
  5. package/dest/mocks/fixtures.d.ts +4 -1
  6. package/dest/mocks/fixtures.d.ts.map +1 -1
  7. package/dest/mocks/fixtures.js +31 -3
  8. package/dest/mocks/test_context.d.ts +32 -9
  9. package/dest/mocks/test_context.d.ts.map +1 -1
  10. package/dest/mocks/test_context.js +78 -22
  11. package/dest/orchestrator/block-building-helpers.d.ts +33 -31
  12. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  13. package/dest/orchestrator/block-building-helpers.js +126 -137
  14. package/dest/orchestrator/block-proving-state.d.ts +60 -53
  15. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  16. package/dest/orchestrator/block-proving-state.js +214 -187
  17. package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
  18. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
  19. package/dest/orchestrator/checkpoint-proving-state.js +211 -0
  20. package/dest/orchestrator/epoch-proving-state.d.ts +34 -28
  21. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  22. package/dest/orchestrator/epoch-proving-state.js +128 -84
  23. package/dest/orchestrator/orchestrator.d.ts +31 -30
  24. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  25. package/dest/orchestrator/orchestrator.js +368 -236
  26. package/dest/orchestrator/tx-proving-state.d.ts +11 -9
  27. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  28. package/dest/orchestrator/tx-proving-state.js +26 -23
  29. package/dest/prover-client/server-epoch-prover.d.ts +9 -8
  30. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  31. package/dest/prover-client/server-epoch-prover.js +9 -9
  32. package/dest/proving_broker/broker_prover_facade.d.ts +20 -15
  33. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  34. package/dest/proving_broker/broker_prover_facade.js +36 -21
  35. package/dest/proving_broker/config.d.ts +8 -8
  36. package/dest/proving_broker/config.js +5 -5
  37. package/dest/proving_broker/factory.js +1 -1
  38. package/dest/proving_broker/fixtures.js +1 -1
  39. package/dest/proving_broker/proof_store/index.d.ts +1 -0
  40. package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
  41. package/dest/proving_broker/proof_store/index.js +1 -0
  42. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  43. package/dest/proving_broker/proving_broker.js +29 -18
  44. package/dest/proving_broker/proving_broker_database/persisted.js +5 -5
  45. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  46. package/dest/proving_broker/proving_job_controller.js +38 -18
  47. package/dest/test/mock_prover.d.ts +22 -17
  48. package/dest/test/mock_prover.d.ts.map +1 -1
  49. package/dest/test/mock_prover.js +35 -20
  50. package/package.json +16 -17
  51. package/src/block-factory/light.ts +35 -9
  52. package/src/config.ts +1 -1
  53. package/src/mocks/fixtures.ts +39 -11
  54. package/src/mocks/test_context.ts +137 -31
  55. package/src/orchestrator/block-building-helpers.ts +211 -211
  56. package/src/orchestrator/block-proving-state.ts +235 -245
  57. package/src/orchestrator/checkpoint-proving-state.ts +299 -0
  58. package/src/orchestrator/epoch-proving-state.ts +172 -127
  59. package/src/orchestrator/orchestrator.ts +545 -303
  60. package/src/orchestrator/tx-proving-state.ts +49 -43
  61. package/src/prover-client/server-epoch-prover.ts +28 -18
  62. package/src/proving_broker/broker_prover_facade.ts +157 -86
  63. package/src/proving_broker/config.ts +7 -7
  64. package/src/proving_broker/factory.ts +1 -1
  65. package/src/proving_broker/fixtures.ts +1 -1
  66. package/src/proving_broker/proof_store/index.ts +1 -0
  67. package/src/proving_broker/proving_broker.ts +36 -18
  68. package/src/proving_broker/proving_broker_database/persisted.ts +5 -5
  69. package/src/proving_broker/proving_job_controller.ts +38 -18
  70. package/src/test/mock_prover.ts +142 -60
  71. package/dest/bin/get-proof-inputs.d.ts +0 -2
  72. package/dest/bin/get-proof-inputs.d.ts.map +0 -1
  73. package/dest/bin/get-proof-inputs.js +0 -51
  74. package/src/bin/get-proof-inputs.ts +0 -59
@@ -4,9 +4,8 @@ function _ts_decorate(decorators, target, key, desc) {
4
4
  else for(var i = decorators.length - 1; i >= 0; i--)if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
5
5
  return c > 3 && r && Object.defineProperty(target, key, r), r;
6
6
  }
7
- import { BlobAccumulatorPublicInputs } from '@aztec/blob-lib';
8
- import { L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
9
- import { padArrayEnd, times } from '@aztec/foundation/collection';
7
+ import { L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
8
+ import { padArrayEnd } from '@aztec/foundation/collection';
10
9
  import { AbortError } from '@aztec/foundation/error';
11
10
  import { Fr } from '@aztec/foundation/fields';
12
11
  import { createLogger } from '@aztec/foundation/log';
@@ -14,16 +13,13 @@ import { promiseWithResolvers } from '@aztec/foundation/promise';
14
13
  import { assertLength } from '@aztec/foundation/serialize';
15
14
  import { pushTestData } from '@aztec/foundation/testing';
16
15
  import { elapsed } from '@aztec/foundation/timer';
17
- import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
18
16
  import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
19
- import { L2Block } from '@aztec/stdlib/block';
20
- import { BaseParityInputs } from '@aztec/stdlib/parity';
21
- import { EmptyBlockRootRollupInputs, PrivateBaseRollupInputs, SingleTxBlockRootRollupInputs, TubeInputs } from '@aztec/stdlib/rollup';
17
+ import { createBlockEndMarker } from '@aztec/stdlib/block';
18
+ import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs, PrivateTxBaseRollupPrivateInputs } from '@aztec/stdlib/rollup';
22
19
  import { MerkleTreeId } from '@aztec/stdlib/trees';
23
- import { toNumBlobFields } from '@aztec/stdlib/tx';
24
20
  import { Attributes, getTelemetryClient, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client';
25
21
  import { inspect } from 'util';
26
- import { buildHeaderAndBodyFromTxs, getLastSiblingPath, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
22
+ import { buildBlockHeaderFromTxs, buildHeaderFromCircuitOutputs, getLastSiblingPath, getPublicChonkVerifierPrivateInputsFromTx, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
27
23
  import { EpochProvingState } from './epoch-proving-state.js';
28
24
  import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
29
25
  import { TxProvingState } from './tx-proving-state.js';
@@ -68,51 +64,92 @@ const logger = createLogger('prover-client:orchestrator');
68
64
  this.cancel();
69
65
  return Promise.resolve();
70
66
  }
71
- startNewEpoch(epochNumber, firstBlockNumber, totalNumBlocks, finalBlobBatchingChallenges) {
67
+ startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges) {
68
+ if (this.provingState?.verifyState()) {
69
+ throw new Error(`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`);
70
+ }
72
71
  const { promise: _promise, resolve, reject } = promiseWithResolvers();
73
72
  const promise = _promise.catch((reason)=>({
74
73
  status: 'failure',
75
74
  reason
76
75
  }));
77
- if (totalNumBlocks <= 0 || !Number.isInteger(totalNumBlocks)) {
78
- throw new Error(`Invalid number of blocks for epoch (got ${totalNumBlocks})`);
79
- }
80
- logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
81
- this.provingState = new EpochProvingState(epochNumber, firstBlockNumber, totalNumBlocks, finalBlobBatchingChallenges, resolve, reject);
76
+ logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
77
+ this.provingState = new EpochProvingState(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges, (provingState)=>this.checkAndEnqueueCheckpointRootRollup(provingState), resolve, reject);
82
78
  this.provingPromise = promise;
83
79
  }
80
+ async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, totalNumBlobFields, headerOfLastBlockInPreviousCheckpoint) {
81
+ if (!this.provingState) {
82
+ throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
83
+ }
84
+ if (!this.provingState.isAcceptingCheckpoints()) {
85
+ throw new Error(`Epoch not accepting further checkpoints.`);
86
+ }
87
+ // Fork world state at the end of the immediately previous block.
88
+ const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
89
+ const db = await this.dbProvider.fork(lastBlockNumber);
90
+ const firstBlockNumber = lastBlockNumber + 1;
91
+ this.dbs.set(firstBlockNumber, db);
92
+ // Get archive sibling path before any block in this checkpoint lands.
93
+ const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
94
+ // Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
95
+ const { lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath } = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
96
+ this.provingState.startNewCheckpoint(checkpointIndex, constants, totalNumBlocks, totalNumBlobFields, headerOfLastBlockInPreviousCheckpoint, lastArchiveSiblingPath, l1ToL2Messages, lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath);
97
+ }
84
98
  /**
85
99
  * Starts off a new block
86
- * @param globalVariables - The global variables for the block
87
- * @param l1ToL2Messages - The l1 to l2 messages for the block
88
- * @returns A proving ticket, containing a promise notifying of proving completion
89
- */ async startNewBlock(globalVariables, l1ToL2Messages, previousBlockHeader) {
100
+ * @param blockNumber - The block number
101
+ * @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
102
+ * block that doesn't have any txs.
103
+ * @param totalNumTxs - The total number of txs in the block
104
+ */ async startNewBlock(blockNumber, timestamp, totalNumTxs) {
90
105
  if (!this.provingState) {
91
- throw new Error(`Invalid proving state, call startNewEpoch before starting a block`);
92
- }
93
- if (!this.provingState?.isAcceptingBlocks()) {
94
- throw new Error(`Epoch not accepting further blocks`);
95
- }
96
- logger.info(`Starting block ${globalVariables.blockNumber} for slot ${globalVariables.slotNumber.toNumber()}`);
97
- // Fork world state at the end of the immediately previous block
98
- const db = await this.dbProvider.fork(globalVariables.blockNumber - 1);
99
- this.dbs.set(globalVariables.blockNumber, db);
100
- // we start the block by enqueueing all of the base parity circuits
101
- const { l1ToL2MessageTreeSnapshot, l1ToL2MessageSubtreeSiblingPath, l1ToL2MessageTreeSnapshotAfterInsertion, baseParityInputs } = await this.prepareBaseParityInputs(l1ToL2Messages, db);
102
- // Get archive snapshot before this block lands
103
- const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
104
- const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
105
- const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
106
- const blockProvingState = this.provingState.startNewBlock(globalVariables, l1ToL2Messages, l1ToL2MessageTreeSnapshot, l1ToL2MessageSubtreeSiblingPath, l1ToL2MessageTreeSnapshotAfterInsertion, lastArchive, lastArchiveSiblingPath, newArchiveSiblingPath, previousBlockHeader, this.proverId);
107
- // Enqueue base parity circuits for the block
108
- for(let i = 0; i < baseParityInputs.length; i++){
109
- this.enqueueBaseParityCircuit(blockProvingState, baseParityInputs[i], i);
106
+ throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
107
+ }
108
+ const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
109
+ if (!checkpointProvingState) {
110
+ throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
111
+ }
112
+ if (!checkpointProvingState.isAcceptingBlocks()) {
113
+ throw new Error(`Checkpoint not accepting further blocks.`);
114
+ }
115
+ const constants = checkpointProvingState.constants;
116
+ logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber.toNumber()}.`);
117
+ // Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
118
+ if (!this.dbs.has(blockNumber)) {
119
+ // Fork world state at the end of the immediately previous block
120
+ const db = await this.dbProvider.fork(blockNumber - 1);
121
+ this.dbs.set(blockNumber, db);
122
+ }
123
+ const db = this.dbs.get(blockNumber);
124
+ // Get archive snapshot and sibling path before any txs in this block lands.
125
+ const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
126
+ const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
127
+ const blockProvingState = await checkpointProvingState.startNewBlock(blockNumber, timestamp, totalNumTxs, lastArchiveTreeSnapshot, lastArchiveSiblingPath);
128
+ // Enqueue base parity circuits for the first block in the checkpoint.
129
+ if (blockProvingState.index === 0) {
130
+ for(let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++){
131
+ this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
132
+ }
133
+ }
134
+ // Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
135
+ // We need to set its end sponge blob here, which will become the start sponge blob for the next block.
136
+ if (totalNumTxs === 0) {
137
+ const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
138
+ await endSpongeBlob.absorb([
139
+ createBlockEndMarker(0)
140
+ ]);
141
+ blockProvingState.setEndSpongeBlob(endSpongeBlob);
142
+ // And also try to accumulate the blobs as far as we can:
143
+ await this.provingState.setBlobAccumulators();
110
144
  }
111
145
  }
112
146
  /**
113
147
  * The interface to add simulated transactions to the scheduler. This can only be called once per block.
114
148
  * @param txs - The transactions to be proven
115
149
  */ async addTxs(txs) {
150
+ if (!this.provingState) {
151
+ throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
152
+ }
116
153
  if (!txs.length) {
117
154
  // To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
118
155
  // on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
@@ -120,16 +157,21 @@ const logger = createLogger('prover-client:orchestrator');
120
157
  return;
121
158
  }
122
159
  const blockNumber = txs[0].globalVariables.blockNumber;
123
- const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
160
+ const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber);
124
161
  if (!provingState) {
125
- throw new Error(`Block proving state for ${blockNumber} not found`);
162
+ throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
163
+ }
164
+ if (provingState.totalNumTxs !== txs.length) {
165
+ throw new Error(`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`);
126
166
  }
127
- if (provingState.totalNumTxs) {
167
+ if (!provingState.isAcceptingTxs()) {
128
168
  throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
129
169
  }
130
- const numBlobFields = toNumBlobFields(txs);
131
- provingState.startNewBlock(txs.length, numBlobFields);
132
- logger.info(`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState.blockNumber}`);
170
+ logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
171
+ const db = this.dbs.get(blockNumber);
172
+ const lastArchive = provingState.lastArchiveTreeSnapshot;
173
+ const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
174
+ const spongeBlobState = provingState.getStartSpongeBlob().clone();
133
175
  for (const tx of txs){
134
176
  try {
135
177
  if (!provingState.verifyState()) {
@@ -137,13 +179,21 @@ const logger = createLogger('prover-client:orchestrator');
137
179
  }
138
180
  validateTx(tx);
139
181
  logger.info(`Received transaction: ${tx.hash}`);
140
- const [hints, treeSnapshots] = await this.prepareTransaction(tx, provingState);
141
- const txProvingState = new TxProvingState(tx, hints, treeSnapshots);
182
+ const startSpongeBlob = spongeBlobState.clone();
183
+ const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db);
184
+ if (!provingState.verifyState()) {
185
+ throw new Error(`Unable to add transaction, preparing base inputs failed`);
186
+ }
187
+ await spongeBlobState.absorb(tx.txEffect.toBlobFields());
188
+ const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
142
189
  const txIndex = provingState.addNewTx(txProvingState);
143
- this.getOrEnqueueTube(provingState, txIndex);
144
190
  if (txProvingState.requireAvmProof) {
191
+ this.getOrEnqueueChonkVerifier(provingState, txIndex);
145
192
  logger.debug(`Enqueueing public VM for tx ${txIndex}`);
146
193
  this.enqueueVM(provingState, txIndex);
194
+ } else {
195
+ logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
196
+ this.enqueueBaseRollup(provingState, txIndex);
147
197
  }
148
198
  } catch (err) {
149
199
  throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
@@ -151,21 +201,30 @@ const logger = createLogger('prover-client:orchestrator');
151
201
  });
152
202
  }
153
203
  }
204
+ await spongeBlobState.absorb([
205
+ createBlockEndMarker(txs.length)
206
+ ]);
207
+ provingState.setEndSpongeBlob(spongeBlobState);
208
+ // Txs have been added to the block. Now try to accumulate the blobs as far as we can:
209
+ await this.provingState.setBlobAccumulators();
154
210
  }
155
211
  /**
156
- * Kickstarts tube circuits for the specified txs. These will be used during epoch proving.
157
- * Note that if the tube circuits are not started this way, they will be started nontheless after processing.
158
- */ startTubeCircuits(txs) {
212
+ * Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
213
+ * Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
214
+ */ startChonkVerifierCircuits(txs) {
159
215
  if (!this.provingState?.verifyState()) {
160
- throw new Error(`Invalid proving state, call startNewEpoch before starting tube circuits`);
216
+ throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
161
217
  }
162
- for (const tx of txs){
218
+ const publicTxs = txs.filter((tx)=>tx.data.forPublic);
219
+ for (const tx of publicTxs){
163
220
  const txHash = tx.getTxHash().toString();
164
- const tubeInputs = new TubeInputs(!!tx.data.forPublic, tx.clientIvcProof);
221
+ const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
165
222
  const tubeProof = promiseWithResolvers();
166
- logger.debug(`Starting tube circuit for tx ${txHash}`);
167
- this.doEnqueueTube(txHash, tubeInputs, (proof)=>tubeProof.resolve(proof));
168
- this.provingState?.cachedTubeProofs.set(txHash, tubeProof.promise);
223
+ logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
224
+ this.doEnqueueChonkVerifier(txHash, privateInputs, (proof)=>{
225
+ tubeProof.resolve(proof);
226
+ });
227
+ this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
169
228
  }
170
229
  return Promise.resolve();
171
230
  }
@@ -177,57 +236,82 @@ const logger = createLogger('prover-client:orchestrator');
177
236
  if (!provingState) {
178
237
  throw new Error(`Block proving state for ${blockNumber} not found`);
179
238
  }
180
- if (!provingState.spongeBlobState) {
181
- // If we are completing an empty block, initialize the provingState.
182
- // We will have 0 txs and no blob fields.
183
- provingState.startNewBlock(0, 0);
239
+ // Abort with specific error for the block if there's one.
240
+ const error = provingState.getError();
241
+ if (error) {
242
+ throw new Error(`Block proving failed: ${error}`);
184
243
  }
244
+ // Abort if the proving state is not valid due to errors occurred elsewhere.
185
245
  if (!provingState.verifyState()) {
186
- throw new Error(`Block proving failed: ${provingState.error}`);
246
+ throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
247
+ }
248
+ if (provingState.isAcceptingTxs()) {
249
+ throw new Error(`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`);
187
250
  }
188
251
  // And build the block header
189
252
  logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
190
- await this.buildBlock(provingState, expectedHeader);
191
- logger.debug(`Accumulating blobs for ${blockNumber}`);
192
- await this.provingState?.setBlobAccumulators(blockNumber);
193
- // If the proofs were faster than the block building, then we need to try the block root rollup again here
194
- await this.checkAndEnqueueBlockRootRollup(provingState);
195
- return provingState.block;
196
- }
197
- /** Returns the block as built for a given index. */ getBlock(index) {
198
- const block = this.provingState?.blocks[index]?.block;
199
- if (!block) {
200
- throw new Error(`Block at index ${index} not available`);
201
- }
202
- return block;
203
- }
204
- async buildBlock(provingState, expectedHeader) {
205
- // Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
206
- const txs = provingState.allTxs.map((a)=>a.processedTx);
253
+ const header = await this.buildL2BlockHeader(provingState, expectedHeader);
254
+ await this.verifyBuiltBlockAgainstSyncedState(provingState);
255
+ return header;
256
+ }
257
+ async buildL2BlockHeader(provingState, expectedHeader) {
258
+ // Collect all txs in this block to build the header. The function calling this has made sure that all txs have been added.
259
+ const txs = provingState.getProcessedTxs();
260
+ const startSpongeBlob = provingState.getStartSpongeBlob();
207
261
  // Get db for this block
208
262
  const db = this.dbs.get(provingState.blockNumber);
209
263
  // Given we've applied every change from this block, now assemble the block header
210
264
  // and update the archive tree, so we're ready to start processing the next block
211
- const { header, body } = await buildHeaderAndBodyFromTxs(txs, provingState.globalVariables, provingState.newL1ToL2Messages, db);
265
+ const header = await buildBlockHeaderFromTxs(txs, provingState.getGlobalVariables(), startSpongeBlob, db);
212
266
  if (expectedHeader && !header.equals(expectedHeader)) {
213
267
  logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
214
268
  throw new Error('Block header mismatch');
215
269
  }
216
270
  logger.verbose(`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`);
217
271
  await db.updateArchive(header);
218
- // Assemble the L2 block
219
- const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
220
- const l2Block = new L2Block(newArchive, header, body);
221
- await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
222
- logger.verbose(`Orchestrator finalized block ${l2Block.number}`);
223
- provingState.setBlock(l2Block);
272
+ provingState.setBuiltBlockHeader(header);
273
+ return header;
224
274
  }
225
275
  // Flagged as protected to disable in certain unit tests
226
- async verifyBuiltBlockAgainstSyncedState(l2Block, newArchive) {
227
- const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(l2Block.number));
276
+ async verifyBuiltBlockAgainstSyncedState(provingState) {
277
+ const builtBlockHeader = provingState.getBuiltBlockHeader();
278
+ if (!builtBlockHeader) {
279
+ logger.debug('Block header not built yet, skipping header check.');
280
+ return;
281
+ }
282
+ const output = provingState.getBlockRootRollupOutput();
283
+ if (!output) {
284
+ logger.debug('Block root rollup proof not built yet, skipping header check.');
285
+ return;
286
+ }
287
+ const header = await buildHeaderFromCircuitOutputs(output);
288
+ if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
289
+ logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
290
+ provingState.reject(`Block header hash mismatch.`);
291
+ return;
292
+ }
293
+ // Get db for this block
294
+ const blockNumber = provingState.blockNumber;
295
+ const db = this.dbs.get(blockNumber);
296
+ const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
297
+ const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
228
298
  if (!syncedArchive.equals(newArchive)) {
229
- throw new Error(`Archive tree mismatch for block ${l2Block.number}: world state synced to ${inspect(syncedArchive)} but built ${inspect(newArchive)}`);
299
+ logger.error(`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(syncedArchive)} but built ${inspect(newArchive)}`);
300
+ provingState.reject(`Archive tree mismatch.`);
301
+ return;
302
+ }
303
+ const circuitArchive = output.newArchive;
304
+ if (!newArchive.equals(circuitArchive)) {
305
+ logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
306
+ provingState.reject(`New archive mismatch.`);
307
+ return;
230
308
  }
309
+ // TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
310
+ // is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
311
+ // but have to make sure it only runs once all operations are completed, otherwise some function here
312
+ // will attempt to access the fork after it was closed.
313
+ logger.debug(`Cleaning up world state fork for ${blockNumber}`);
314
+ void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
231
315
  }
232
316
  /**
233
317
  * Cancel any further proving
@@ -247,14 +331,7 @@ const logger = createLogger('prover-client:orchestrator');
247
331
  if (result.status === 'failure') {
248
332
  throw new Error(`Epoch proving failed: ${result.reason}`);
249
333
  }
250
- // TODO(MW): Move this? Requires async and don't want to force root methods to be async
251
- // TODO(MW): EpochProvingState uses this.blocks.filter(b => !!b).length as total blocks, use this below:
252
- const finalBlock = this.provingState.blocks[this.provingState.totalNumBlocks - 1];
253
- if (!finalBlock || !finalBlock.endBlobAccumulator) {
254
- throw new Error(`Epoch's final block not ready for finalize`);
255
- }
256
- const finalBatchedBlob = await finalBlock.endBlobAccumulator.finalize();
257
- this.provingState.setFinalBatchedBlob(finalBatchedBlob);
334
+ await this.provingState.finalizeBatchedBlob();
258
335
  const epochProofResult = this.provingState.getEpochProofResult();
259
336
  pushTestData('epochProofResult', {
260
337
  proof: epochProofResult.proof.toString(),
@@ -263,24 +340,12 @@ const logger = createLogger('prover-client:orchestrator');
263
340
  return epochProofResult;
264
341
  }
265
342
  /**
266
- * Starts the proving process for the given transaction and adds it to our state
267
- * @param tx - The transaction whose proving we wish to commence
268
- * @param provingState - The proving state being worked on
269
- */ async prepareTransaction(tx, provingState) {
270
- const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
271
- if (!txInputs) {
272
- // This should not be possible
273
- throw new Error(`Unable to add transaction, preparing base inputs failed`);
274
- }
275
- return txInputs;
276
- }
277
- /**
278
343
  * Enqueue a job to be scheduled
279
344
  * @param provingState - The proving state object being operated on
280
345
  * @param jobType - The type of job to be queued
281
346
  * @param job - The actual job, returns a promise notifying of the job's completion
282
347
  */ deferredProving(provingState, request, callback) {
283
- if (!provingState?.verifyState()) {
348
+ if (!provingState.verifyState()) {
284
349
  logger.debug(`Not enqueuing job, state no longer valid`);
285
350
  return;
286
351
  }
@@ -295,7 +360,7 @@ const logger = createLogger('prover-client:orchestrator');
295
360
  return;
296
361
  }
297
362
  const result = await request(controller.signal);
298
- if (!provingState?.verifyState()) {
363
+ if (!provingState.verifyState()) {
299
364
  logger.debug(`State no longer valid, discarding result`);
300
365
  return;
301
366
  }
@@ -323,31 +388,26 @@ const logger = createLogger('prover-client:orchestrator');
323
388
  // let the callstack unwind before adding the job to the queue
324
389
  setImmediate(()=>void safeJob());
325
390
  }
326
- async prepareBaseParityInputs(l1ToL2Messages, db) {
391
+ async updateL1ToL2MessageTree(l1ToL2Messages, db) {
327
392
  const l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 'Too many L1 to L2 messages');
328
- const baseParityInputs = times(NUM_BASE_PARITY_PER_ROOT_PARITY, (i)=>BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()));
329
- const l1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
330
- const l1ToL2MessageSubtreeSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH);
393
+ const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
394
+ const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
331
395
  // Update the local trees to include the new l1 to l2 messages
332
396
  await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
333
- const l1ToL2MessageTreeSnapshotAfterInsertion = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
397
+ const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
398
+ const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
334
399
  return {
335
- l1ToL2MessageTreeSnapshot,
336
- l1ToL2MessageSubtreeSiblingPath,
337
- l1ToL2MessageTreeSnapshotAfterInsertion,
338
- baseParityInputs
400
+ lastL1ToL2MessageTreeSnapshot,
401
+ lastL1ToL2MessageSubtreeRootSiblingPath,
402
+ newL1ToL2MessageTreeSnapshot,
403
+ newL1ToL2MessageSubtreeRootSiblingPath
339
404
  };
340
405
  }
341
406
  // Updates the merkle trees for a transaction. The first enqueued job for a transaction
342
- async prepareBaseRollupInputs(provingState, tx) {
343
- if (!provingState.verifyState() || !provingState.spongeBlobState) {
344
- logger.debug('Not preparing base rollup inputs, state invalid');
345
- return;
346
- }
347
- const db = this.dbs.get(provingState.blockNumber);
407
+ async prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db) {
348
408
  // We build the base rollup inputs using a mock proof and verification key.
349
- // These will be overwritten later once we have proven the tube circuit and any public kernels
350
- const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx, provingState.globalVariables, provingState.l1ToL2MessageTreeSnapshotAfterInsertion, db, provingState.spongeBlobState));
409
+ // These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
410
+ const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, this.proverId.toField(), db));
351
411
  this.metrics.recordBaseRollupInputs(ms);
352
412
  const promises = [
353
413
  MerkleTreeId.NOTE_HASH_TREE,
@@ -363,10 +423,6 @@ const logger = createLogger('prover-client:orchestrator');
363
423
  obj.key,
364
424
  obj.value
365
425
  ]));
366
- if (!provingState.verifyState()) {
367
- logger.debug(`Discarding proving job, state no longer valid`);
368
- return;
369
- }
370
426
  return [
371
427
  hints,
372
428
  treeSnapshots
@@ -379,66 +435,71 @@ const logger = createLogger('prover-client:orchestrator');
379
435
  logger.debug('Not running base rollup, state invalid');
380
436
  return;
381
437
  }
438
+ if (!provingState.tryStartProvingBase(txIndex)) {
439
+ logger.debug(`Base rollup for tx ${txIndex} already started.`);
440
+ return;
441
+ }
382
442
  const txProvingState = provingState.getTxProvingState(txIndex);
383
443
  const { processedTx } = txProvingState;
384
444
  const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
385
445
  logger.debug(`Enqueuing deferred proving base rollup for ${processedTx.hash.toString()}`);
386
- this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof PrivateBaseRollupInputs ? 'getPrivateBaseRollupProof' : 'getPublicBaseRollupProof'}`, {
446
+ this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof PrivateTxBaseRollupPrivateInputs ? 'getPrivateTxBaseRollupProof' : 'getPublicTxBaseRollupProof'}`, {
387
447
  [Attributes.TX_HASH]: processedTx.hash.toString(),
388
448
  [Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
389
449
  }, (signal)=>{
390
- if (inputs instanceof PrivateBaseRollupInputs) {
391
- return this.prover.getPrivateBaseRollupProof(inputs, signal, provingState.epochNumber);
450
+ if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
451
+ return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
392
452
  } else {
393
- return this.prover.getPublicBaseRollupProof(inputs, signal, provingState.epochNumber);
453
+ return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
394
454
  }
395
- }), async (result)=>{
455
+ }), (result)=>{
396
456
  logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
397
- validatePartialState(result.inputs.end, txProvingState.treeSnapshots);
457
+ validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
398
458
  const leafLocation = provingState.setBaseRollupProof(txIndex, result);
399
459
  if (provingState.totalNumTxs === 1) {
400
- await this.checkAndEnqueueBlockRootRollup(provingState);
460
+ this.checkAndEnqueueBlockRootRollup(provingState);
401
461
  } else {
402
- await this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
462
+ this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
403
463
  }
404
464
  });
405
465
  }
406
- // Enqueues the tube circuit for a given transaction index, or reuses the one already enqueued
407
- // Once completed, will enqueue the next circuit, either a public kernel or the base rollup
408
- getOrEnqueueTube(provingState, txIndex) {
466
+ // Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
467
+ // Once completed, will enqueue the the public tx base rollup.
468
+ getOrEnqueueChonkVerifier(provingState, txIndex) {
409
469
  if (!provingState.verifyState()) {
410
- logger.debug('Not running tube circuit, state invalid');
470
+ logger.debug('Not running chonk verifier circuit, state invalid');
411
471
  return;
412
472
  }
413
473
  const txProvingState = provingState.getTxProvingState(txIndex);
414
474
  const txHash = txProvingState.processedTx.hash.toString();
475
+ NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
415
476
  const handleResult = (result)=>{
416
- logger.debug(`Got tube proof for tx index: ${txIndex}`, {
477
+ logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, {
417
478
  txHash
418
479
  });
419
- txProvingState.setTubeProof(result);
420
- this.provingState?.cachedTubeProofs.delete(txHash);
421
- this.checkAndEnqueueNextTxCircuit(provingState, txIndex);
480
+ txProvingState.setPublicChonkVerifierProof(result);
481
+ this.provingState?.cachedChonkVerifierProofs.delete(txHash);
482
+ this.checkAndEnqueueBaseRollup(provingState, txIndex);
422
483
  };
423
- if (this.provingState?.cachedTubeProofs.has(txHash)) {
424
- logger.debug(`Tube proof already enqueued for tx index: ${txIndex}`, {
484
+ if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
485
+ logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, {
425
486
  txHash
426
487
  });
427
- void this.provingState.cachedTubeProofs.get(txHash).then(handleResult);
488
+ void this.provingState.cachedChonkVerifierProofs.get(txHash).then(handleResult);
428
489
  return;
429
490
  }
430
- logger.debug(`Enqueuing tube circuit for tx index: ${txIndex}`);
431
- this.doEnqueueTube(txHash, txProvingState.getTubeInputs(), handleResult);
491
+ logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
492
+ this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
432
493
  }
433
- doEnqueueTube(txHash, inputs, handler, provingState = this.provingState) {
434
- if (!provingState?.verifyState()) {
435
- logger.debug('Not running tube circuit, state invalid');
494
+ doEnqueueChonkVerifier(txHash, inputs, handler, provingState = this.provingState) {
495
+ if (!provingState.verifyState()) {
496
+ logger.debug('Not running chonk verifier circuit, state invalid');
436
497
  return;
437
498
  }
438
- this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getTubeProof', {
499
+ this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getPublicChonkVerifierProof', {
439
500
  [Attributes.TX_HASH]: txHash,
440
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'tube-circuit'
441
- }, (signal)=>this.prover.getTubeProof(inputs, signal, this.provingState.epochNumber)), handler);
501
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public'
502
+ }, (signal)=>this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber)), handler);
442
503
  }
443
504
  // Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
444
505
  // Enqueues the next level of merge if all inputs are available
@@ -447,74 +508,73 @@ const logger = createLogger('prover-client:orchestrator');
447
508
  logger.debug('Not running merge rollup. State no longer valid.');
448
509
  return;
449
510
  }
511
+ if (!provingState.tryStartProvingMerge(location)) {
512
+ logger.debug('Merge rollup already started.');
513
+ return;
514
+ }
450
515
  const inputs = provingState.getMergeRollupInputs(location);
451
- this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getMergeRollupProof', {
452
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'merge-rollup'
453
- }, (signal)=>this.prover.getMergeRollupProof(inputs, signal, provingState.epochNumber)), async (result)=>{
516
+ this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getTxMergeRollupProof', {
517
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge'
518
+ }, (signal)=>this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
454
519
  provingState.setMergeRollupProof(location, result);
455
- await this.checkAndEnqueueNextMergeRollup(provingState, location);
520
+ this.checkAndEnqueueNextMergeRollup(provingState, location);
456
521
  });
457
522
  }
458
523
  // Executes the block root rollup circuit
459
- async enqueueBlockRootRollup(provingState) {
524
+ enqueueBlockRootRollup(provingState) {
460
525
  if (!provingState.verifyState()) {
461
526
  logger.debug('Not running block root rollup, state no longer valid');
462
527
  return;
463
528
  }
464
- provingState.blockRootRollupStarted = true;
465
- const { rollupType, inputs } = await provingState.getBlockRootRollupTypeAndInputs();
466
- logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs.`);
529
+ if (!provingState.tryStartProvingBlockRoot()) {
530
+ logger.debug('Block root rollup already started.');
531
+ return;
532
+ }
533
+ const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
534
+ logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
467
535
  this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockRootRollupProof', {
468
536
  [Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
469
537
  }, (signal)=>{
470
- if (inputs instanceof EmptyBlockRootRollupInputs) {
471
- return this.prover.getEmptyBlockRootRollupProof(inputs, signal, provingState.epochNumber);
472
- } else if (inputs instanceof SingleTxBlockRootRollupInputs) {
473
- return this.prover.getSingleTxBlockRootRollupProof(inputs, signal, provingState.epochNumber);
538
+ if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
539
+ return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
540
+ } else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
541
+ return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
542
+ } else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
543
+ return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
544
+ } else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
545
+ return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
474
546
  } else {
475
547
  return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
476
548
  }
477
549
  }), async (result)=>{
478
- provingState.setBlockRootRollupProof(result);
479
- const header = await provingState.buildHeaderFromProvingOutputs();
480
- if (!(await header.hash()).equals(await provingState.block.header.hash())) {
481
- logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(provingState.block.header)}`);
482
- provingState.reject(`Block header hash mismatch.`);
483
- }
484
- const dbArchiveRoot = provingState.block.archive.root;
485
- const circuitArchiveRoot = result.inputs.newArchive.root;
486
- if (!dbArchiveRoot.equals(circuitArchiveRoot)) {
487
- logger.error(`New archive root mismatch.\nCircuit: ${result.inputs.newArchive.root}\nComputed: ${dbArchiveRoot}`);
488
- provingState.reject(`New archive root mismatch.`);
489
- }
490
- const endBlobAccumulatorPublicInputs = BlobAccumulatorPublicInputs.fromBatchedBlobAccumulator(provingState.endBlobAccumulator);
491
- const circuitEndBlobAccumulatorState = result.inputs.blobPublicInputs.endBlobAccumulator;
492
- if (!circuitEndBlobAccumulatorState.equals(endBlobAccumulatorPublicInputs)) {
493
- logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(endBlobAccumulatorPublicInputs)}`);
494
- provingState.reject(`Blob accumulator state mismatch.`);
495
- }
496
- logger.debug(`Completed ${rollupType} proof for block ${provingState.block.number}`);
497
- // validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
498
- const epochProvingState = this.provingState;
499
- const leafLocation = epochProvingState.setBlockRootRollupProof(provingState.index, result);
500
- if (epochProvingState.totalNumBlocks === 1) {
501
- this.enqueueEpochPadding(epochProvingState);
550
+ // If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
551
+ await this.verifyBuiltBlockAgainstSyncedState(provingState);
552
+ logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
553
+ const leafLocation = provingState.setBlockRootRollupProof(result);
554
+ const checkpointProvingState = provingState.parentCheckpoint;
555
+ if (checkpointProvingState.totalNumBlocks === 1) {
556
+ this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
502
557
  } else {
503
- this.checkAndEnqueueNextBlockMergeRollup(epochProvingState, leafLocation);
558
+ this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
504
559
  }
505
560
  });
506
561
  }
507
562
  // Executes the base parity circuit and stores the intermediate state for the root parity circuit
508
563
  // Enqueues the root parity circuit if all inputs are available
509
- enqueueBaseParityCircuit(provingState, inputs, index) {
564
+ enqueueBaseParityCircuit(checkpointProvingState, provingState, baseParityIndex) {
510
565
  if (!provingState.verifyState()) {
511
566
  logger.debug('Not running base parity. State no longer valid.');
512
567
  return;
513
568
  }
569
+ if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
570
+ logger.warn(`Base parity ${baseParityIndex} already started.`);
571
+ return;
572
+ }
573
+ const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
514
574
  this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBaseParityProof', {
515
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity'
575
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base'
516
576
  }, (signal)=>this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber)), (provingOutput)=>{
517
- provingState.setBaseParityProof(index, provingOutput);
577
+ provingState.setBaseParityProof(baseParityIndex, provingOutput);
518
578
  this.checkAndEnqueueRootParityCircuit(provingState);
519
579
  });
520
580
  }
@@ -531,12 +591,16 @@ const logger = createLogger('prover-client:orchestrator');
531
591
  logger.debug('Not running root parity. State no longer valid.');
532
592
  return;
533
593
  }
534
- const inputs = provingState.getRootParityInputs();
594
+ if (!provingState.tryStartProvingRootParity()) {
595
+ logger.debug('Root parity already started.');
596
+ return;
597
+ }
598
+ const inputs = provingState.getParityRootInputs();
535
599
  this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootParityProof', {
536
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-parity'
537
- }, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), async (result)=>{
600
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root'
601
+ }, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), (result)=>{
538
602
  provingState.setRootParityProof(result);
539
- await this.checkAndEnqueueBlockRootRollup(provingState);
603
+ this.checkAndEnqueueBlockRootRollup(provingState);
540
604
  });
541
605
  }
542
606
  // Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
@@ -546,26 +610,90 @@ const logger = createLogger('prover-client:orchestrator');
546
610
  logger.debug('Not running block merge rollup. State no longer valid.');
547
611
  return;
548
612
  }
613
+ if (!provingState.tryStartProvingBlockMerge(location)) {
614
+ logger.debug('Block merge rollup already started.');
615
+ return;
616
+ }
549
617
  const inputs = provingState.getBlockMergeRollupInputs(location);
550
618
  this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockMergeRollupProof', {
551
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge-rollup'
619
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge'
552
620
  }, (signal)=>this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
553
621
  provingState.setBlockMergeRollupProof(location, result);
554
622
  this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
555
623
  });
556
624
  }
625
+ enqueueCheckpointRootRollup(provingState) {
626
+ if (!provingState.verifyState()) {
627
+ logger.debug('Not running checkpoint root rollup. State no longer valid.');
628
+ return;
629
+ }
630
+ if (!provingState.tryStartProvingCheckpointRoot()) {
631
+ logger.debug('Checkpoint root rollup already started.');
632
+ return;
633
+ }
634
+ const rollupType = provingState.getCheckpointRootRollupType();
635
+ logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
636
+ const inputs = provingState.getCheckpointRootRollupInputs();
637
+ this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointRootRollupProof', {
638
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
639
+ }, (signal)=>{
640
+ if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
641
+ return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
642
+ } else {
643
+ return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
644
+ }
645
+ }), (result)=>{
646
+ const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator().toBlobAccumulator();
647
+ const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
648
+ if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
649
+ logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(computedEndBlobAccumulatorState)}`);
650
+ provingState.reject(`Blob accumulator state mismatch.`);
651
+ return;
652
+ }
653
+ logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
654
+ const leafLocation = provingState.setCheckpointRootRollupProof(result);
655
+ const epochProvingState = provingState.parentEpoch;
656
+ if (epochProvingState.totalNumCheckpoints === 1) {
657
+ this.enqueueEpochPadding(epochProvingState);
658
+ } else {
659
+ this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
660
+ }
661
+ });
662
+ }
663
+ enqueueCheckpointMergeRollup(provingState, location) {
664
+ if (!provingState.verifyState()) {
665
+ logger.debug('Not running checkpoint merge rollup. State no longer valid.');
666
+ return;
667
+ }
668
+ if (!provingState.tryStartProvingCheckpointMerge(location)) {
669
+ logger.debug('Checkpoint merge rollup already started.');
670
+ return;
671
+ }
672
+ const inputs = provingState.getCheckpointMergeRollupInputs(location);
673
+ this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointMergeRollupProof', {
674
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge'
675
+ }, (signal)=>this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
676
+ logger.debug('Completed proof for checkpoint merge rollup.');
677
+ provingState.setCheckpointMergeRollupProof(location, result);
678
+ this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
679
+ });
680
+ }
557
681
  enqueueEpochPadding(provingState) {
558
682
  if (!provingState.verifyState()) {
559
683
  logger.debug('Not running epoch padding. State no longer valid.');
560
684
  return;
561
685
  }
686
+ if (!provingState.tryStartProvingPaddingCheckpoint()) {
687
+ logger.debug('Padding checkpoint already started.');
688
+ return;
689
+ }
562
690
  logger.debug('Padding epoch proof with a padding block root proof.');
563
- const inputs = provingState.getPaddingBlockRootInputs();
564
- this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getPaddingBlockRootRollupProof', {
565
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'padding-block-root-rollup'
566
- }, (signal)=>this.prover.getPaddingBlockRootRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
567
- logger.debug('Completed proof for padding block root.');
568
- provingState.setPaddingBlockRootProof(result);
691
+ const inputs = provingState.getPaddingCheckpointInputs();
692
+ this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof', {
693
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding'
694
+ }, (signal)=>this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
695
+ logger.debug('Completed proof for padding checkpoint.');
696
+ provingState.setCheckpointPaddingProof(result);
569
697
  this.checkAndEnqueueRootRollup(provingState);
570
698
  });
571
699
  }
@@ -578,7 +706,7 @@ const logger = createLogger('prover-client:orchestrator');
578
706
  logger.debug(`Preparing root rollup`);
579
707
  const inputs = provingState.getRootRollupInputs();
580
708
  this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootRollupProof', {
581
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup'
709
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root'
582
710
  }, (signal)=>this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
583
711
  logger.verbose(`Orchestrator completed root rollup for epoch ${provingState.epochNumber}`);
584
712
  provingState.setRootRollupProof(result);
@@ -587,36 +715,23 @@ const logger = createLogger('prover-client:orchestrator');
587
715
  });
588
716
  });
589
717
  }
590
- async checkAndEnqueueNextMergeRollup(provingState, currentLocation) {
718
+ checkAndEnqueueNextMergeRollup(provingState, currentLocation) {
591
719
  if (!provingState.isReadyForMergeRollup(currentLocation)) {
592
720
  return;
593
721
  }
594
722
  const parentLocation = provingState.getParentLocation(currentLocation);
595
723
  if (parentLocation.level === 0) {
596
- await this.checkAndEnqueueBlockRootRollup(provingState);
724
+ this.checkAndEnqueueBlockRootRollup(provingState);
597
725
  } else {
598
726
  this.enqueueMergeRollup(provingState, parentLocation);
599
727
  }
600
728
  }
601
- async checkAndEnqueueBlockRootRollup(provingState) {
602
- const blockNumber = provingState.blockNumber;
603
- // Accumulate as far as we can, in case blocks came in out of order and we are behind:
604
- await this.provingState?.setBlobAccumulators(blockNumber);
729
+ checkAndEnqueueBlockRootRollup(provingState) {
605
730
  if (!provingState.isReadyForBlockRootRollup()) {
606
731
  logger.debug('Not ready for block root rollup');
607
732
  return;
608
733
  }
609
- if (provingState.blockRootRollupStarted) {
610
- logger.debug('Block root rollup already started');
611
- return;
612
- }
613
- // TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
614
- // is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
615
- // but have to make sure it only runs once all operations are completed, otherwise some function here
616
- // will attempt to access the fork after it was closed.
617
- logger.debug(`Cleaning up world state fork for ${blockNumber}`);
618
- void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
619
- await this.enqueueBlockRootRollup(provingState);
734
+ this.enqueueBlockRootRollup(provingState);
620
735
  }
621
736
  checkAndEnqueueNextBlockMergeRollup(provingState, currentLocation) {
622
737
  if (!provingState.isReadyForBlockMerge(currentLocation)) {
@@ -624,11 +739,28 @@ const logger = createLogger('prover-client:orchestrator');
624
739
  }
625
740
  const parentLocation = provingState.getParentLocation(currentLocation);
626
741
  if (parentLocation.level === 0) {
627
- this.checkAndEnqueueRootRollup(provingState);
742
+ this.checkAndEnqueueCheckpointRootRollup(provingState);
628
743
  } else {
629
744
  this.enqueueBlockMergeRollup(provingState, parentLocation);
630
745
  }
631
746
  }
747
+ checkAndEnqueueCheckpointRootRollup(provingState) {
748
+ if (!provingState.isReadyForCheckpointRoot()) {
749
+ return;
750
+ }
751
+ this.enqueueCheckpointRootRollup(provingState);
752
+ }
753
+ checkAndEnqueueNextCheckpointMergeRollup(provingState, currentLocation) {
754
+ if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
755
+ return;
756
+ }
757
+ const parentLocation = provingState.getParentLocation(currentLocation);
758
+ if (parentLocation.level === 0) {
759
+ this.checkAndEnqueueRootRollup(provingState);
760
+ } else {
761
+ this.enqueueCheckpointMergeRollup(provingState, parentLocation);
762
+ }
763
+ }
632
764
  checkAndEnqueueRootRollup(provingState) {
633
765
  if (!provingState.isReadyForRootRollup()) {
634
766
  logger.debug('Not ready for root rollup');
@@ -678,22 +810,22 @@ const logger = createLogger('prover-client:orchestrator');
678
810
  this.deferredProving(provingState, doAvmProving, (proofAndVk)=>{
679
811
  logger.debug(`Proven VM for tx index: ${txIndex}`);
680
812
  txProvingState.setAvmProof(proofAndVk);
681
- this.checkAndEnqueueNextTxCircuit(provingState, txIndex);
813
+ this.checkAndEnqueueBaseRollup(provingState, txIndex);
682
814
  });
683
815
  }
684
- checkAndEnqueueNextTxCircuit(provingState, txIndex) {
816
+ checkAndEnqueueBaseRollup(provingState, txIndex) {
685
817
  const txProvingState = provingState.getTxProvingState(txIndex);
686
818
  if (!txProvingState.ready()) {
687
819
  return;
688
820
  }
689
- // We must have completed all proving (tube proof and (if required) vm proof are generated), we now move to the base rollup.
821
+ // We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
690
822
  logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
691
823
  this.enqueueBaseRollup(provingState, txIndex);
692
824
  }
693
825
  }
694
826
  _ts_decorate([
695
- trackSpan('ProvingOrchestrator.startNewBlock', (globalVariables)=>({
696
- [Attributes.BLOCK_NUMBER]: globalVariables.blockNumber
827
+ trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
828
+ [Attributes.BLOCK_NUMBER]: blockNumber
697
829
  }))
698
830
  ], ProvingOrchestrator.prototype, "startNewBlock", null);
699
831
  _ts_decorate([
@@ -702,15 +834,15 @@ _ts_decorate([
702
834
  }))
703
835
  ], ProvingOrchestrator.prototype, "addTxs", null);
704
836
  _ts_decorate([
705
- trackSpan('ProvingOrchestrator.startTubeCircuits')
706
- ], ProvingOrchestrator.prototype, "startTubeCircuits", null);
837
+ trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
838
+ ], ProvingOrchestrator.prototype, "startChonkVerifierCircuits", null);
707
839
  _ts_decorate([
708
840
  trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
709
841
  [Attributes.BLOCK_NUMBER]: blockNumber
710
842
  }))
711
843
  ], ProvingOrchestrator.prototype, "setBlockCompleted", null);
712
844
  _ts_decorate([
713
- trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (_, tx)=>({
845
+ trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (tx)=>({
714
846
  [Attributes.TX_HASH]: tx.hash.toString()
715
847
  }))
716
848
  ], ProvingOrchestrator.prototype, "prepareBaseRollupInputs", null);