@aztec/prover-client 3.0.0-canary.a9708bd → 3.0.0-manual.20251030
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/light.d.ts +5 -3
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +16 -9
- package/dest/config.js +1 -1
- package/dest/mocks/fixtures.d.ts +4 -1
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +31 -3
- package/dest/mocks/test_context.d.ts +32 -9
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +78 -22
- package/dest/orchestrator/block-building-helpers.d.ts +33 -31
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +126 -137
- package/dest/orchestrator/block-proving-state.d.ts +60 -53
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +214 -187
- package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +211 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +34 -28
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +128 -84
- package/dest/orchestrator/orchestrator.d.ts +31 -30
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +368 -236
- package/dest/orchestrator/tx-proving-state.d.ts +11 -9
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +26 -23
- package/dest/prover-client/server-epoch-prover.d.ts +9 -8
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +9 -9
- package/dest/proving_broker/broker_prover_facade.d.ts +20 -15
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +36 -21
- package/dest/proving_broker/config.d.ts +8 -8
- package/dest/proving_broker/config.js +5 -5
- package/dest/proving_broker/factory.js +1 -1
- package/dest/proving_broker/fixtures.js +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +1 -0
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +29 -18
- package/dest/proving_broker/proving_broker_database/persisted.js +5 -5
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +38 -18
- package/dest/test/mock_prover.d.ts +22 -17
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +35 -20
- package/package.json +16 -17
- package/src/block-factory/light.ts +35 -9
- package/src/config.ts +1 -1
- package/src/mocks/fixtures.ts +39 -11
- package/src/mocks/test_context.ts +137 -31
- package/src/orchestrator/block-building-helpers.ts +211 -211
- package/src/orchestrator/block-proving-state.ts +235 -245
- package/src/orchestrator/checkpoint-proving-state.ts +299 -0
- package/src/orchestrator/epoch-proving-state.ts +172 -127
- package/src/orchestrator/orchestrator.ts +545 -303
- package/src/orchestrator/tx-proving-state.ts +49 -43
- package/src/prover-client/server-epoch-prover.ts +28 -18
- package/src/proving_broker/broker_prover_facade.ts +157 -86
- package/src/proving_broker/config.ts +7 -7
- package/src/proving_broker/factory.ts +1 -1
- package/src/proving_broker/fixtures.ts +1 -1
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proving_broker.ts +36 -18
- package/src/proving_broker/proving_broker_database/persisted.ts +5 -5
- package/src/proving_broker/proving_job_controller.ts +38 -18
- package/src/test/mock_prover.ts +142 -60
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/src/bin/get-proof-inputs.ts +0 -59
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
|
|
2
2
|
import {
|
|
3
3
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
4
|
-
|
|
4
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
5
6
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
6
7
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
7
|
-
type TUBE_PROOF_LENGTH,
|
|
8
8
|
} from '@aztec/constants';
|
|
9
|
-
import { padArrayEnd
|
|
9
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
10
10
|
import { AbortError } from '@aztec/foundation/error';
|
|
11
11
|
import { Fr } from '@aztec/foundation/fields';
|
|
12
12
|
import { createLogger } from '@aztec/foundation/log';
|
|
@@ -15,27 +15,33 @@ import { assertLength } from '@aztec/foundation/serialize';
|
|
|
15
15
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
16
16
|
import { elapsed } from '@aztec/foundation/timer';
|
|
17
17
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
18
|
-
import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
19
18
|
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
20
|
-
import { EthAddress,
|
|
19
|
+
import { EthAddress, createBlockEndMarker } from '@aztec/stdlib/block';
|
|
21
20
|
import type {
|
|
22
21
|
EpochProver,
|
|
23
22
|
ForkMerkleTreeOperations,
|
|
24
23
|
MerkleTreeWriteOperations,
|
|
25
|
-
|
|
24
|
+
PublicInputsAndRecursiveProof,
|
|
26
25
|
ServerCircuitProver,
|
|
27
26
|
} from '@aztec/stdlib/interfaces/server';
|
|
28
|
-
import {
|
|
27
|
+
import type { Proof } from '@aztec/stdlib/proofs';
|
|
29
28
|
import {
|
|
30
29
|
type BaseRollupHints,
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
30
|
+
BlockRootEmptyTxFirstRollupPrivateInputs,
|
|
31
|
+
BlockRootFirstRollupPrivateInputs,
|
|
32
|
+
BlockRootSingleTxFirstRollupPrivateInputs,
|
|
33
|
+
BlockRootSingleTxRollupPrivateInputs,
|
|
34
|
+
CheckpointConstantData,
|
|
35
|
+
CheckpointRootSingleBlockRollupPrivateInputs,
|
|
36
|
+
PrivateTxBaseRollupPrivateInputs,
|
|
37
|
+
PublicChonkVerifierPrivateInputs,
|
|
38
|
+
PublicChonkVerifierPublicInputs,
|
|
39
|
+
RootRollupPublicInputs,
|
|
35
40
|
} from '@aztec/stdlib/rollup';
|
|
36
41
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
37
42
|
import { type AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
|
|
38
|
-
import {
|
|
43
|
+
import type { BlockHeader, ProcessedTx, Tx } from '@aztec/stdlib/tx';
|
|
44
|
+
import type { UInt64 } from '@aztec/stdlib/types';
|
|
39
45
|
import {
|
|
40
46
|
Attributes,
|
|
41
47
|
type TelemetryClient,
|
|
@@ -48,8 +54,10 @@ import {
|
|
|
48
54
|
import { inspect } from 'util';
|
|
49
55
|
|
|
50
56
|
import {
|
|
51
|
-
|
|
57
|
+
buildBlockHeaderFromTxs,
|
|
58
|
+
buildHeaderFromCircuitOutputs,
|
|
52
59
|
getLastSiblingPath,
|
|
60
|
+
getPublicChonkVerifierPrivateInputsFromTx,
|
|
53
61
|
getRootTreeSiblingPath,
|
|
54
62
|
getSubtreeSiblingPath,
|
|
55
63
|
getTreeSnapshot,
|
|
@@ -58,6 +66,7 @@ import {
|
|
|
58
66
|
validateTx,
|
|
59
67
|
} from './block-building-helpers.js';
|
|
60
68
|
import type { BlockProvingState } from './block-proving-state.js';
|
|
69
|
+
import type { CheckpointProvingState } from './checkpoint-proving-state.js';
|
|
61
70
|
import { EpochProvingState, type ProvingResult, type TreeSnapshots } from './epoch-proving-state.js';
|
|
62
71
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
63
72
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -110,80 +119,141 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
110
119
|
|
|
111
120
|
public startNewEpoch(
|
|
112
121
|
epochNumber: number,
|
|
113
|
-
|
|
114
|
-
totalNumBlocks: number,
|
|
122
|
+
totalNumCheckpoints: number,
|
|
115
123
|
finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
116
124
|
) {
|
|
125
|
+
if (this.provingState?.verifyState()) {
|
|
126
|
+
throw new Error(
|
|
127
|
+
`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`,
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
|
|
117
131
|
const { promise: _promise, resolve, reject } = promiseWithResolvers<ProvingResult>();
|
|
118
132
|
const promise = _promise.catch((reason): ProvingResult => ({ status: 'failure', reason }));
|
|
119
|
-
|
|
120
|
-
throw new Error(`Invalid number of blocks for epoch (got ${totalNumBlocks})`);
|
|
121
|
-
}
|
|
122
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
133
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
123
134
|
this.provingState = new EpochProvingState(
|
|
124
135
|
epochNumber,
|
|
125
|
-
|
|
126
|
-
totalNumBlocks,
|
|
136
|
+
totalNumCheckpoints,
|
|
127
137
|
finalBlobBatchingChallenges,
|
|
138
|
+
provingState => this.checkAndEnqueueCheckpointRootRollup(provingState),
|
|
128
139
|
resolve,
|
|
129
140
|
reject,
|
|
130
141
|
);
|
|
131
142
|
this.provingPromise = promise;
|
|
132
143
|
}
|
|
133
144
|
|
|
145
|
+
public async startNewCheckpoint(
|
|
146
|
+
checkpointIndex: number,
|
|
147
|
+
constants: CheckpointConstantData,
|
|
148
|
+
l1ToL2Messages: Fr[],
|
|
149
|
+
totalNumBlocks: number,
|
|
150
|
+
totalNumBlobFields: number,
|
|
151
|
+
headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
152
|
+
) {
|
|
153
|
+
if (!this.provingState) {
|
|
154
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
158
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Fork world state at the end of the immediately previous block.
|
|
162
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
163
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
164
|
+
|
|
165
|
+
const firstBlockNumber = lastBlockNumber + 1;
|
|
166
|
+
this.dbs.set(firstBlockNumber, db);
|
|
167
|
+
|
|
168
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
169
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
170
|
+
|
|
171
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
172
|
+
const {
|
|
173
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
174
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
175
|
+
newL1ToL2MessageTreeSnapshot,
|
|
176
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
177
|
+
} = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
178
|
+
|
|
179
|
+
this.provingState.startNewCheckpoint(
|
|
180
|
+
checkpointIndex,
|
|
181
|
+
constants,
|
|
182
|
+
totalNumBlocks,
|
|
183
|
+
totalNumBlobFields,
|
|
184
|
+
headerOfLastBlockInPreviousCheckpoint,
|
|
185
|
+
lastArchiveSiblingPath,
|
|
186
|
+
l1ToL2Messages,
|
|
187
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
188
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
189
|
+
newL1ToL2MessageTreeSnapshot,
|
|
190
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
191
|
+
);
|
|
192
|
+
}
|
|
193
|
+
|
|
134
194
|
/**
|
|
135
195
|
* Starts off a new block
|
|
136
|
-
* @param
|
|
137
|
-
* @param
|
|
138
|
-
*
|
|
196
|
+
* @param blockNumber - The block number
|
|
197
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
198
|
+
* block that doesn't have any txs.
|
|
199
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
139
200
|
*/
|
|
140
|
-
@trackSpan('ProvingOrchestrator.startNewBlock',
|
|
141
|
-
[Attributes.BLOCK_NUMBER]:
|
|
201
|
+
@trackSpan('ProvingOrchestrator.startNewBlock', blockNumber => ({
|
|
202
|
+
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
142
203
|
}))
|
|
143
|
-
public async startNewBlock(
|
|
204
|
+
public async startNewBlock(blockNumber: number, timestamp: UInt64, totalNumTxs: number) {
|
|
144
205
|
if (!this.provingState) {
|
|
145
|
-
throw new Error(
|
|
206
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
146
207
|
}
|
|
147
208
|
|
|
148
|
-
|
|
149
|
-
|
|
209
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
210
|
+
if (!checkpointProvingState) {
|
|
211
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
150
212
|
}
|
|
151
213
|
|
|
152
|
-
|
|
214
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
215
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
216
|
+
}
|
|
153
217
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
this.dbs.set(globalVariables.blockNumber, db);
|
|
218
|
+
const constants = checkpointProvingState.constants;
|
|
219
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber.toNumber()}.`);
|
|
157
220
|
|
|
158
|
-
//
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
// Get archive snapshot before this block lands
|
|
167
|
-
const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
168
|
-
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
169
|
-
const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
221
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
222
|
+
if (!this.dbs.has(blockNumber)) {
|
|
223
|
+
// Fork world state at the end of the immediately previous block
|
|
224
|
+
const db = await this.dbProvider.fork(blockNumber - 1);
|
|
225
|
+
this.dbs.set(blockNumber, db);
|
|
226
|
+
}
|
|
227
|
+
const db = this.dbs.get(blockNumber)!;
|
|
170
228
|
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
229
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
230
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
231
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
232
|
+
|
|
233
|
+
const blockProvingState = await checkpointProvingState.startNewBlock(
|
|
234
|
+
blockNumber,
|
|
235
|
+
timestamp,
|
|
236
|
+
totalNumTxs,
|
|
237
|
+
lastArchiveTreeSnapshot,
|
|
178
238
|
lastArchiveSiblingPath,
|
|
179
|
-
newArchiveSiblingPath,
|
|
180
|
-
previousBlockHeader,
|
|
181
|
-
this.proverId,
|
|
182
239
|
);
|
|
183
240
|
|
|
184
|
-
// Enqueue base parity circuits for the block
|
|
185
|
-
|
|
186
|
-
|
|
241
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
242
|
+
if (blockProvingState.index === 0) {
|
|
243
|
+
for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) {
|
|
244
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
249
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
250
|
+
if (totalNumTxs === 0) {
|
|
251
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
252
|
+
await endSpongeBlob.absorb([createBlockEndMarker(0)]);
|
|
253
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
254
|
+
|
|
255
|
+
// And also try to accumulate the blobs as far as we can:
|
|
256
|
+
await this.provingState.setBlobAccumulators();
|
|
187
257
|
}
|
|
188
258
|
}
|
|
189
259
|
|
|
@@ -195,28 +265,40 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
195
265
|
[Attributes.BLOCK_TXS_COUNT]: txs.length,
|
|
196
266
|
}))
|
|
197
267
|
public async addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
268
|
+
if (!this.provingState) {
|
|
269
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
270
|
+
}
|
|
271
|
+
|
|
198
272
|
if (!txs.length) {
|
|
199
273
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
200
274
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
201
275
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
202
276
|
return;
|
|
203
277
|
}
|
|
278
|
+
|
|
204
279
|
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
205
|
-
const provingState = this.provingState
|
|
280
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
206
281
|
if (!provingState) {
|
|
207
|
-
throw new Error(`
|
|
282
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
286
|
+
throw new Error(
|
|
287
|
+
`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`,
|
|
288
|
+
);
|
|
208
289
|
}
|
|
209
290
|
|
|
210
|
-
if (provingState.
|
|
291
|
+
if (!provingState.isAcceptingTxs()) {
|
|
211
292
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
212
293
|
}
|
|
213
294
|
|
|
214
|
-
|
|
215
|
-
|
|
295
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
296
|
+
|
|
297
|
+
const db = this.dbs.get(blockNumber)!;
|
|
298
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
299
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
300
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
216
301
|
|
|
217
|
-
logger.info(
|
|
218
|
-
`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState.blockNumber}`,
|
|
219
|
-
);
|
|
220
302
|
for (const tx of txs) {
|
|
221
303
|
try {
|
|
222
304
|
if (!provingState.verifyState()) {
|
|
@@ -227,13 +309,30 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
227
309
|
|
|
228
310
|
logger.info(`Received transaction: ${tx.hash}`);
|
|
229
311
|
|
|
230
|
-
const
|
|
231
|
-
const
|
|
312
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
313
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
314
|
+
tx,
|
|
315
|
+
lastArchive,
|
|
316
|
+
newL1ToL2MessageTreeSnapshot,
|
|
317
|
+
startSpongeBlob,
|
|
318
|
+
db,
|
|
319
|
+
);
|
|
320
|
+
|
|
321
|
+
if (!provingState.verifyState()) {
|
|
322
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
326
|
+
|
|
327
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
232
328
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
233
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
234
329
|
if (txProvingState.requireAvmProof) {
|
|
330
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
235
331
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
236
332
|
this.enqueueVM(provingState, txIndex);
|
|
333
|
+
} else {
|
|
334
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
335
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
237
336
|
}
|
|
238
337
|
} catch (err: any) {
|
|
239
338
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -241,24 +340,40 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
241
340
|
});
|
|
242
341
|
}
|
|
243
342
|
}
|
|
343
|
+
|
|
344
|
+
await spongeBlobState.absorb([createBlockEndMarker(txs.length)]);
|
|
345
|
+
|
|
346
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
347
|
+
|
|
348
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
349
|
+
await this.provingState.setBlobAccumulators();
|
|
244
350
|
}
|
|
245
351
|
|
|
246
352
|
/**
|
|
247
|
-
* Kickstarts
|
|
248
|
-
* Note that if the
|
|
353
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
354
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
249
355
|
*/
|
|
250
|
-
@trackSpan('ProvingOrchestrator.
|
|
251
|
-
public
|
|
356
|
+
@trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
357
|
+
public startChonkVerifierCircuits(txs: Tx[]) {
|
|
252
358
|
if (!this.provingState?.verifyState()) {
|
|
253
|
-
throw new Error(`
|
|
359
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
254
360
|
}
|
|
255
|
-
|
|
361
|
+
const publicTxs = txs.filter(tx => tx.data.forPublic);
|
|
362
|
+
for (const tx of publicTxs) {
|
|
256
363
|
const txHash = tx.getTxHash().toString();
|
|
257
|
-
const
|
|
258
|
-
const tubeProof =
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
364
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
365
|
+
const tubeProof =
|
|
366
|
+
promiseWithResolvers<
|
|
367
|
+
PublicInputsAndRecursiveProof<
|
|
368
|
+
PublicChonkVerifierPublicInputs,
|
|
369
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
370
|
+
>
|
|
371
|
+
>();
|
|
372
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
373
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, proof => {
|
|
374
|
+
tubeProof.resolve(proof);
|
|
375
|
+
});
|
|
376
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
262
377
|
}
|
|
263
378
|
return Promise.resolve();
|
|
264
379
|
}
|
|
@@ -270,58 +385,50 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
270
385
|
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: number) => ({
|
|
271
386
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
272
387
|
}))
|
|
273
|
-
public async setBlockCompleted(blockNumber: number, expectedHeader?: BlockHeader): Promise<
|
|
388
|
+
public async setBlockCompleted(blockNumber: number, expectedHeader?: BlockHeader): Promise<BlockHeader> {
|
|
274
389
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
275
390
|
if (!provingState) {
|
|
276
391
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
277
392
|
}
|
|
278
393
|
|
|
279
|
-
if
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
394
|
+
// Abort with specific error for the block if there's one.
|
|
395
|
+
const error = provingState.getError();
|
|
396
|
+
if (error) {
|
|
397
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
283
398
|
}
|
|
284
399
|
|
|
400
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
285
401
|
if (!provingState.verifyState()) {
|
|
286
|
-
throw new Error(`
|
|
402
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
if (provingState.isAcceptingTxs()) {
|
|
406
|
+
throw new Error(
|
|
407
|
+
`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`,
|
|
408
|
+
);
|
|
287
409
|
}
|
|
288
410
|
|
|
289
411
|
// And build the block header
|
|
290
412
|
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
291
|
-
await this.
|
|
413
|
+
const header = await this.buildL2BlockHeader(provingState, expectedHeader);
|
|
292
414
|
|
|
293
|
-
|
|
294
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
415
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
295
416
|
|
|
296
|
-
|
|
297
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
298
|
-
return provingState.block!;
|
|
417
|
+
return header;
|
|
299
418
|
}
|
|
300
419
|
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
const
|
|
304
|
-
if (!block) {
|
|
305
|
-
throw new Error(`Block at index ${index} not available`);
|
|
306
|
-
}
|
|
307
|
-
return block;
|
|
308
|
-
}
|
|
420
|
+
private async buildL2BlockHeader(provingState: BlockProvingState, expectedHeader?: BlockHeader) {
|
|
421
|
+
// Collect all txs in this block to build the header. The function calling this has made sure that all txs have been added.
|
|
422
|
+
const txs = provingState.getProcessedTxs();
|
|
309
423
|
|
|
310
|
-
|
|
311
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
312
|
-
const txs = provingState.allTxs.map(a => a.processedTx);
|
|
424
|
+
const startSpongeBlob = provingState.getStartSpongeBlob();
|
|
313
425
|
|
|
314
426
|
// Get db for this block
|
|
315
427
|
const db = this.dbs.get(provingState.blockNumber)!;
|
|
316
428
|
|
|
317
429
|
// Given we've applied every change from this block, now assemble the block header
|
|
318
430
|
// and update the archive tree, so we're ready to start processing the next block
|
|
319
|
-
const
|
|
320
|
-
txs,
|
|
321
|
-
provingState.globalVariables,
|
|
322
|
-
provingState.newL1ToL2Messages,
|
|
323
|
-
db,
|
|
324
|
-
);
|
|
431
|
+
const header = await buildBlockHeaderFromTxs(txs, provingState.getGlobalVariables(), startSpongeBlob, db);
|
|
325
432
|
|
|
326
433
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
327
434
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
@@ -333,26 +440,65 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
333
440
|
);
|
|
334
441
|
await db.updateArchive(header);
|
|
335
442
|
|
|
336
|
-
|
|
337
|
-
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
338
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
339
|
-
|
|
340
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
443
|
+
provingState.setBuiltBlockHeader(header);
|
|
341
444
|
|
|
342
|
-
|
|
343
|
-
provingState.setBlock(l2Block);
|
|
445
|
+
return header;
|
|
344
446
|
}
|
|
345
447
|
|
|
346
448
|
// Flagged as protected to disable in certain unit tests
|
|
347
|
-
protected async verifyBuiltBlockAgainstSyncedState(
|
|
348
|
-
const
|
|
449
|
+
protected async verifyBuiltBlockAgainstSyncedState(provingState: BlockProvingState) {
|
|
450
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
451
|
+
if (!builtBlockHeader) {
|
|
452
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
457
|
+
if (!output) {
|
|
458
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
459
|
+
return;
|
|
460
|
+
}
|
|
461
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
462
|
+
|
|
463
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
464
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
465
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
466
|
+
return;
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
// Get db for this block
|
|
470
|
+
const blockNumber = provingState.blockNumber;
|
|
471
|
+
const db = this.dbs.get(blockNumber)!;
|
|
472
|
+
|
|
473
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
474
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
349
475
|
if (!syncedArchive.equals(newArchive)) {
|
|
350
|
-
|
|
351
|
-
`Archive tree mismatch for block ${
|
|
476
|
+
logger.error(
|
|
477
|
+
`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(
|
|
352
478
|
syncedArchive,
|
|
353
479
|
)} but built ${inspect(newArchive)}`,
|
|
354
480
|
);
|
|
481
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
482
|
+
return;
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
const circuitArchive = output.newArchive;
|
|
486
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
487
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
488
|
+
provingState.reject(`New archive mismatch.`);
|
|
489
|
+
return;
|
|
355
490
|
}
|
|
491
|
+
|
|
492
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
493
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
494
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
495
|
+
// will attempt to access the fork after it was closed.
|
|
496
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
497
|
+
void this.dbs
|
|
498
|
+
.get(blockNumber)
|
|
499
|
+
?.close()
|
|
500
|
+
.then(() => this.dbs.delete(blockNumber))
|
|
501
|
+
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
356
502
|
}
|
|
357
503
|
|
|
358
504
|
/**
|
|
@@ -369,7 +515,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
369
515
|
/**
|
|
370
516
|
* Returns the proof for the current epoch.
|
|
371
517
|
*/
|
|
372
|
-
public async finalizeEpoch() {
|
|
518
|
+
public async finalizeEpoch(): Promise<{
|
|
519
|
+
publicInputs: RootRollupPublicInputs;
|
|
520
|
+
proof: Proof;
|
|
521
|
+
batchedBlobInputs: BatchedBlob;
|
|
522
|
+
}> {
|
|
373
523
|
if (!this.provingState || !this.provingPromise) {
|
|
374
524
|
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
375
525
|
}
|
|
@@ -379,14 +529,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
379
529
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
380
530
|
}
|
|
381
531
|
|
|
382
|
-
|
|
383
|
-
// TODO(MW): EpochProvingState uses this.blocks.filter(b => !!b).length as total blocks, use this below:
|
|
384
|
-
const finalBlock = this.provingState.blocks[this.provingState.totalNumBlocks - 1];
|
|
385
|
-
if (!finalBlock || !finalBlock.endBlobAccumulator) {
|
|
386
|
-
throw new Error(`Epoch's final block not ready for finalize`);
|
|
387
|
-
}
|
|
388
|
-
const finalBatchedBlob = await finalBlock.endBlobAccumulator.finalize();
|
|
389
|
-
this.provingState.setFinalBatchedBlob(finalBatchedBlob);
|
|
532
|
+
await this.provingState.finalizeBatchedBlob();
|
|
390
533
|
|
|
391
534
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
392
535
|
|
|
@@ -398,20 +541,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
398
541
|
return epochProofResult;
|
|
399
542
|
}
|
|
400
543
|
|
|
401
|
-
/**
|
|
402
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
403
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
404
|
-
* @param provingState - The proving state being worked on
|
|
405
|
-
*/
|
|
406
|
-
private async prepareTransaction(tx: ProcessedTx, provingState: BlockProvingState) {
|
|
407
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
408
|
-
if (!txInputs) {
|
|
409
|
-
// This should not be possible
|
|
410
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
411
|
-
}
|
|
412
|
-
return txInputs;
|
|
413
|
-
}
|
|
414
|
-
|
|
415
544
|
/**
|
|
416
545
|
* Enqueue a job to be scheduled
|
|
417
546
|
* @param provingState - The proving state object being operated on
|
|
@@ -419,11 +548,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
419
548
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
420
549
|
*/
|
|
421
550
|
private deferredProving<T>(
|
|
422
|
-
provingState: EpochProvingState |
|
|
551
|
+
provingState: EpochProvingState | CheckpointProvingState | BlockProvingState,
|
|
423
552
|
request: (signal: AbortSignal) => Promise<T>,
|
|
424
553
|
callback: (result: T) => void | Promise<void>,
|
|
425
554
|
) {
|
|
426
|
-
if (!provingState
|
|
555
|
+
if (!provingState.verifyState()) {
|
|
427
556
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
428
557
|
return;
|
|
429
558
|
}
|
|
@@ -441,7 +570,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
441
570
|
}
|
|
442
571
|
|
|
443
572
|
const result = await request(controller.signal);
|
|
444
|
-
if (!provingState
|
|
573
|
+
if (!provingState.verifyState()) {
|
|
445
574
|
logger.debug(`State no longer valid, discarding result`);
|
|
446
575
|
return;
|
|
447
576
|
}
|
|
@@ -474,60 +603,58 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
474
603
|
setImmediate(() => void safeJob());
|
|
475
604
|
}
|
|
476
605
|
|
|
477
|
-
private async
|
|
478
|
-
const l1ToL2MessagesPadded = padArrayEnd(
|
|
606
|
+
private async updateL1ToL2MessageTree(l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations) {
|
|
607
|
+
const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
|
|
479
608
|
l1ToL2Messages,
|
|
480
609
|
Fr.ZERO,
|
|
481
610
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
482
611
|
'Too many L1 to L2 messages',
|
|
483
612
|
);
|
|
484
|
-
const baseParityInputs = times(NUM_BASE_PARITY_PER_ROOT_PARITY, i =>
|
|
485
|
-
BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()),
|
|
486
|
-
);
|
|
487
613
|
|
|
488
|
-
const
|
|
489
|
-
|
|
490
|
-
const l1ToL2MessageSubtreeSiblingPath = assertLength(
|
|
614
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
615
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(
|
|
491
616
|
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
492
|
-
|
|
617
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
493
618
|
);
|
|
494
619
|
|
|
495
620
|
// Update the local trees to include the new l1 to l2 messages
|
|
496
621
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
497
|
-
|
|
622
|
+
|
|
623
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
624
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(
|
|
625
|
+
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
626
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
627
|
+
);
|
|
498
628
|
|
|
499
629
|
return {
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
630
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
631
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
632
|
+
newL1ToL2MessageTreeSnapshot,
|
|
633
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
504
634
|
};
|
|
505
635
|
}
|
|
506
636
|
|
|
507
637
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
508
|
-
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs',
|
|
638
|
+
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', tx => ({
|
|
509
639
|
[Attributes.TX_HASH]: tx.hash.toString(),
|
|
510
640
|
}))
|
|
511
641
|
private async prepareBaseRollupInputs(
|
|
512
|
-
provingState: BlockProvingState,
|
|
513
642
|
tx: ProcessedTx,
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
const db = this.dbs.get(provingState.blockNumber)!;
|
|
521
|
-
|
|
643
|
+
lastArchive: AppendOnlyTreeSnapshot,
|
|
644
|
+
newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
645
|
+
startSpongeBlob: SpongeBlob,
|
|
646
|
+
db: MerkleTreeWriteOperations,
|
|
647
|
+
): Promise<[BaseRollupHints, TreeSnapshots]> {
|
|
522
648
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
523
|
-
// These will be overwritten later once we have proven the
|
|
649
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
524
650
|
const [ms, hints] = await elapsed(
|
|
525
651
|
insertSideEffectsAndBuildBaseRollupHints(
|
|
526
652
|
tx,
|
|
527
|
-
|
|
528
|
-
|
|
653
|
+
lastArchive,
|
|
654
|
+
newL1ToL2MessageTreeSnapshot,
|
|
655
|
+
startSpongeBlob,
|
|
656
|
+
this.proverId.toField(),
|
|
529
657
|
db,
|
|
530
|
-
provingState.spongeBlobState,
|
|
531
658
|
),
|
|
532
659
|
);
|
|
533
660
|
|
|
@@ -540,10 +667,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
540
667
|
);
|
|
541
668
|
const treeSnapshots: TreeSnapshots = new Map((await Promise.all(promises)).map(obj => [obj.key, obj.value]));
|
|
542
669
|
|
|
543
|
-
if (!provingState.verifyState()) {
|
|
544
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
545
|
-
return;
|
|
546
|
-
}
|
|
547
670
|
return [hints, treeSnapshots];
|
|
548
671
|
}
|
|
549
672
|
|
|
@@ -555,6 +678,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
555
678
|
return;
|
|
556
679
|
}
|
|
557
680
|
|
|
681
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
682
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
683
|
+
return;
|
|
684
|
+
}
|
|
685
|
+
|
|
558
686
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
559
687
|
const { processedTx } = txProvingState;
|
|
560
688
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
@@ -566,69 +694,81 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
566
694
|
wrapCallbackInSpan(
|
|
567
695
|
this.tracer,
|
|
568
696
|
`ProvingOrchestrator.prover.${
|
|
569
|
-
inputs instanceof
|
|
697
|
+
inputs instanceof PrivateTxBaseRollupPrivateInputs
|
|
698
|
+
? 'getPrivateTxBaseRollupProof'
|
|
699
|
+
: 'getPublicTxBaseRollupProof'
|
|
570
700
|
}`,
|
|
571
701
|
{
|
|
572
702
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
573
703
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
574
704
|
},
|
|
575
705
|
signal => {
|
|
576
|
-
if (inputs instanceof
|
|
577
|
-
return this.prover.
|
|
706
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
707
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
578
708
|
} else {
|
|
579
|
-
return this.prover.
|
|
709
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
580
710
|
}
|
|
581
711
|
},
|
|
582
712
|
),
|
|
583
|
-
|
|
713
|
+
result => {
|
|
584
714
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
585
|
-
validatePartialState(result.inputs.
|
|
715
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
586
716
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
587
717
|
if (provingState.totalNumTxs === 1) {
|
|
588
|
-
|
|
718
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
589
719
|
} else {
|
|
590
|
-
|
|
720
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
591
721
|
}
|
|
592
722
|
},
|
|
593
723
|
);
|
|
594
724
|
}
|
|
595
725
|
|
|
596
|
-
// Enqueues the
|
|
597
|
-
// Once completed, will enqueue the
|
|
598
|
-
private
|
|
726
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
727
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
728
|
+
private getOrEnqueueChonkVerifier(provingState: BlockProvingState, txIndex: number) {
|
|
599
729
|
if (!provingState.verifyState()) {
|
|
600
|
-
logger.debug('Not running
|
|
730
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
601
731
|
return;
|
|
602
732
|
}
|
|
603
733
|
|
|
604
734
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
605
735
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
606
|
-
|
|
607
|
-
const handleResult = (
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
736
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
737
|
+
const handleResult = (
|
|
738
|
+
result: PublicInputsAndRecursiveProof<
|
|
739
|
+
PublicChonkVerifierPublicInputs,
|
|
740
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
741
|
+
>,
|
|
742
|
+
) => {
|
|
743
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, { txHash });
|
|
744
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
745
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
746
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
612
747
|
};
|
|
613
748
|
|
|
614
|
-
if (this.provingState?.
|
|
615
|
-
logger.debug(`
|
|
616
|
-
void this.provingState!.
|
|
749
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
750
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, { txHash });
|
|
751
|
+
void this.provingState!.cachedChonkVerifierProofs.get(txHash)!.then(handleResult);
|
|
617
752
|
return;
|
|
618
753
|
}
|
|
619
754
|
|
|
620
|
-
logger.debug(`Enqueuing
|
|
621
|
-
this.
|
|
755
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
756
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
622
757
|
}
|
|
623
758
|
|
|
624
|
-
private
|
|
759
|
+
private doEnqueueChonkVerifier(
|
|
625
760
|
txHash: string,
|
|
626
|
-
inputs:
|
|
627
|
-
handler: (
|
|
761
|
+
inputs: PublicChonkVerifierPrivateInputs,
|
|
762
|
+
handler: (
|
|
763
|
+
result: PublicInputsAndRecursiveProof<
|
|
764
|
+
PublicChonkVerifierPublicInputs,
|
|
765
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
766
|
+
>,
|
|
767
|
+
) => void,
|
|
628
768
|
provingState: EpochProvingState | BlockProvingState = this.provingState!,
|
|
629
769
|
) {
|
|
630
|
-
if (!provingState
|
|
631
|
-
logger.debug('Not running
|
|
770
|
+
if (!provingState.verifyState()) {
|
|
771
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
632
772
|
return;
|
|
633
773
|
}
|
|
634
774
|
|
|
@@ -636,12 +776,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
636
776
|
provingState,
|
|
637
777
|
wrapCallbackInSpan(
|
|
638
778
|
this.tracer,
|
|
639
|
-
'ProvingOrchestrator.prover.
|
|
779
|
+
'ProvingOrchestrator.prover.getPublicChonkVerifierProof',
|
|
640
780
|
{
|
|
641
781
|
[Attributes.TX_HASH]: txHash,
|
|
642
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
782
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public' satisfies CircuitName,
|
|
643
783
|
},
|
|
644
|
-
signal => this.prover.
|
|
784
|
+
signal => this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber),
|
|
645
785
|
),
|
|
646
786
|
handler,
|
|
647
787
|
);
|
|
@@ -655,39 +795,45 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
655
795
|
return;
|
|
656
796
|
}
|
|
657
797
|
|
|
798
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
799
|
+
logger.debug('Merge rollup already started.');
|
|
800
|
+
return;
|
|
801
|
+
}
|
|
802
|
+
|
|
658
803
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
659
804
|
|
|
660
805
|
this.deferredProving(
|
|
661
806
|
provingState,
|
|
662
807
|
wrapCallbackInSpan(
|
|
663
808
|
this.tracer,
|
|
664
|
-
'ProvingOrchestrator.prover.
|
|
809
|
+
'ProvingOrchestrator.prover.getTxMergeRollupProof',
|
|
665
810
|
{
|
|
666
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'merge
|
|
811
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge' satisfies CircuitName,
|
|
667
812
|
},
|
|
668
|
-
signal => this.prover.
|
|
813
|
+
signal => this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
669
814
|
),
|
|
670
|
-
|
|
815
|
+
result => {
|
|
671
816
|
provingState.setMergeRollupProof(location, result);
|
|
672
|
-
|
|
817
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
673
818
|
},
|
|
674
819
|
);
|
|
675
820
|
}
|
|
676
821
|
|
|
677
822
|
// Executes the block root rollup circuit
|
|
678
|
-
private
|
|
823
|
+
private enqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
679
824
|
if (!provingState.verifyState()) {
|
|
680
825
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
681
826
|
return;
|
|
682
827
|
}
|
|
683
828
|
|
|
684
|
-
provingState.
|
|
829
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
830
|
+
logger.debug('Block root rollup already started.');
|
|
831
|
+
return;
|
|
832
|
+
}
|
|
685
833
|
|
|
686
|
-
const { rollupType, inputs } =
|
|
834
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
687
835
|
|
|
688
|
-
logger.debug(
|
|
689
|
-
`Enqueuing ${rollupType} for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs.`,
|
|
690
|
-
);
|
|
836
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
691
837
|
|
|
692
838
|
this.deferredProving(
|
|
693
839
|
provingState,
|
|
@@ -698,56 +844,32 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
698
844
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
699
845
|
},
|
|
700
846
|
signal => {
|
|
701
|
-
if (inputs instanceof
|
|
702
|
-
return this.prover.
|
|
703
|
-
} else if (inputs instanceof
|
|
704
|
-
return this.prover.
|
|
847
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
848
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
849
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
850
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
851
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
852
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
853
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
854
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
705
855
|
} else {
|
|
706
856
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
707
857
|
}
|
|
708
858
|
},
|
|
709
859
|
),
|
|
710
860
|
async result => {
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
if (!(await header.hash()).equals(await provingState.block!.header.hash())) {
|
|
714
|
-
logger.error(
|
|
715
|
-
`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(provingState.block!.header)}`,
|
|
716
|
-
);
|
|
717
|
-
provingState.reject(`Block header hash mismatch.`);
|
|
718
|
-
}
|
|
719
|
-
|
|
720
|
-
const dbArchiveRoot = provingState.block!.archive.root;
|
|
721
|
-
const circuitArchiveRoot = result.inputs.newArchive.root;
|
|
722
|
-
if (!dbArchiveRoot.equals(circuitArchiveRoot)) {
|
|
723
|
-
logger.error(
|
|
724
|
-
`New archive root mismatch.\nCircuit: ${result.inputs.newArchive.root}\nComputed: ${dbArchiveRoot}`,
|
|
725
|
-
);
|
|
726
|
-
provingState.reject(`New archive root mismatch.`);
|
|
727
|
-
}
|
|
861
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
862
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
728
863
|
|
|
729
|
-
|
|
730
|
-
provingState.endBlobAccumulator!,
|
|
731
|
-
);
|
|
732
|
-
const circuitEndBlobAccumulatorState = result.inputs.blobPublicInputs.endBlobAccumulator;
|
|
733
|
-
if (!circuitEndBlobAccumulatorState.equals(endBlobAccumulatorPublicInputs)) {
|
|
734
|
-
logger.error(
|
|
735
|
-
`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(
|
|
736
|
-
endBlobAccumulatorPublicInputs,
|
|
737
|
-
)}`,
|
|
738
|
-
);
|
|
739
|
-
provingState.reject(`Blob accumulator state mismatch.`);
|
|
740
|
-
}
|
|
864
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
741
865
|
|
|
742
|
-
|
|
743
|
-
|
|
866
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
867
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
744
868
|
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
748
|
-
this.enqueueEpochPadding(epochProvingState);
|
|
869
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
870
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
749
871
|
} else {
|
|
750
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
872
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
751
873
|
}
|
|
752
874
|
},
|
|
753
875
|
);
|
|
@@ -755,24 +877,35 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
755
877
|
|
|
756
878
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
757
879
|
// Enqueues the root parity circuit if all inputs are available
|
|
758
|
-
private enqueueBaseParityCircuit(
|
|
880
|
+
private enqueueBaseParityCircuit(
|
|
881
|
+
checkpointProvingState: CheckpointProvingState,
|
|
882
|
+
provingState: BlockProvingState,
|
|
883
|
+
baseParityIndex: number,
|
|
884
|
+
) {
|
|
759
885
|
if (!provingState.verifyState()) {
|
|
760
886
|
logger.debug('Not running base parity. State no longer valid.');
|
|
761
887
|
return;
|
|
762
888
|
}
|
|
763
889
|
|
|
890
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
891
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
892
|
+
return;
|
|
893
|
+
}
|
|
894
|
+
|
|
895
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
896
|
+
|
|
764
897
|
this.deferredProving(
|
|
765
898
|
provingState,
|
|
766
899
|
wrapCallbackInSpan(
|
|
767
900
|
this.tracer,
|
|
768
901
|
'ProvingOrchestrator.prover.getBaseParityProof',
|
|
769
902
|
{
|
|
770
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base
|
|
903
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base' satisfies CircuitName,
|
|
771
904
|
},
|
|
772
905
|
signal => this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber),
|
|
773
906
|
),
|
|
774
907
|
provingOutput => {
|
|
775
|
-
provingState.setBaseParityProof(
|
|
908
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
776
909
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
777
910
|
},
|
|
778
911
|
);
|
|
@@ -794,7 +927,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
794
927
|
return;
|
|
795
928
|
}
|
|
796
929
|
|
|
797
|
-
|
|
930
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
931
|
+
logger.debug('Root parity already started.');
|
|
932
|
+
return;
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
const inputs = provingState.getParityRootInputs();
|
|
798
936
|
|
|
799
937
|
this.deferredProving(
|
|
800
938
|
provingState,
|
|
@@ -802,25 +940,30 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
802
940
|
this.tracer,
|
|
803
941
|
'ProvingOrchestrator.prover.getRootParityProof',
|
|
804
942
|
{
|
|
805
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root
|
|
943
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root' satisfies CircuitName,
|
|
806
944
|
},
|
|
807
945
|
signal => this.prover.getRootParityProof(inputs, signal, provingState.epochNumber),
|
|
808
946
|
),
|
|
809
|
-
|
|
947
|
+
result => {
|
|
810
948
|
provingState.setRootParityProof(result);
|
|
811
|
-
|
|
949
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
812
950
|
},
|
|
813
951
|
);
|
|
814
952
|
}
|
|
815
953
|
|
|
816
954
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
817
955
|
// Enqueues the next level of merge if all inputs are available
|
|
818
|
-
private enqueueBlockMergeRollup(provingState:
|
|
956
|
+
private enqueueBlockMergeRollup(provingState: CheckpointProvingState, location: TreeNodeLocation) {
|
|
819
957
|
if (!provingState.verifyState()) {
|
|
820
958
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
821
959
|
return;
|
|
822
960
|
}
|
|
823
961
|
|
|
962
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
963
|
+
logger.debug('Block merge rollup already started.');
|
|
964
|
+
return;
|
|
965
|
+
}
|
|
966
|
+
|
|
824
967
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
825
968
|
this.deferredProving(
|
|
826
969
|
provingState,
|
|
@@ -828,7 +971,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
828
971
|
this.tracer,
|
|
829
972
|
'ProvingOrchestrator.prover.getBlockMergeRollupProof',
|
|
830
973
|
{
|
|
831
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge
|
|
974
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge' satisfies CircuitName,
|
|
832
975
|
},
|
|
833
976
|
signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
834
977
|
),
|
|
@@ -839,29 +982,125 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
839
982
|
);
|
|
840
983
|
}
|
|
841
984
|
|
|
985
|
+
private enqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
986
|
+
if (!provingState.verifyState()) {
|
|
987
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
988
|
+
return;
|
|
989
|
+
}
|
|
990
|
+
|
|
991
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
992
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
993
|
+
return;
|
|
994
|
+
}
|
|
995
|
+
|
|
996
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
997
|
+
|
|
998
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
999
|
+
|
|
1000
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
1001
|
+
|
|
1002
|
+
this.deferredProving(
|
|
1003
|
+
provingState,
|
|
1004
|
+
wrapCallbackInSpan(
|
|
1005
|
+
this.tracer,
|
|
1006
|
+
'ProvingOrchestrator.prover.getCheckpointRootRollupProof',
|
|
1007
|
+
{
|
|
1008
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
1009
|
+
},
|
|
1010
|
+
signal => {
|
|
1011
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
1012
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
1013
|
+
} else {
|
|
1014
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
1015
|
+
}
|
|
1016
|
+
},
|
|
1017
|
+
),
|
|
1018
|
+
result => {
|
|
1019
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator()!.toBlobAccumulator();
|
|
1020
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
1021
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
1022
|
+
logger.error(
|
|
1023
|
+
`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(
|
|
1024
|
+
computedEndBlobAccumulatorState,
|
|
1025
|
+
)}`,
|
|
1026
|
+
);
|
|
1027
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
1028
|
+
return;
|
|
1029
|
+
}
|
|
1030
|
+
|
|
1031
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
1032
|
+
|
|
1033
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
1034
|
+
const epochProvingState = provingState.parentEpoch;
|
|
1035
|
+
|
|
1036
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
1037
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
1038
|
+
} else {
|
|
1039
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
1040
|
+
}
|
|
1041
|
+
},
|
|
1042
|
+
);
|
|
1043
|
+
}
|
|
1044
|
+
|
|
1045
|
+
private enqueueCheckpointMergeRollup(provingState: EpochProvingState, location: TreeNodeLocation) {
|
|
1046
|
+
if (!provingState.verifyState()) {
|
|
1047
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
1048
|
+
return;
|
|
1049
|
+
}
|
|
1050
|
+
|
|
1051
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
1052
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
1053
|
+
return;
|
|
1054
|
+
}
|
|
1055
|
+
|
|
1056
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
1057
|
+
|
|
1058
|
+
this.deferredProving(
|
|
1059
|
+
provingState,
|
|
1060
|
+
wrapCallbackInSpan(
|
|
1061
|
+
this.tracer,
|
|
1062
|
+
'ProvingOrchestrator.prover.getCheckpointMergeRollupProof',
|
|
1063
|
+
{
|
|
1064
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge' satisfies CircuitName,
|
|
1065
|
+
},
|
|
1066
|
+
signal => this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
1067
|
+
),
|
|
1068
|
+
result => {
|
|
1069
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
1070
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
1071
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
1072
|
+
},
|
|
1073
|
+
);
|
|
1074
|
+
}
|
|
1075
|
+
|
|
842
1076
|
private enqueueEpochPadding(provingState: EpochProvingState) {
|
|
843
1077
|
if (!provingState.verifyState()) {
|
|
844
1078
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
845
1079
|
return;
|
|
846
1080
|
}
|
|
847
1081
|
|
|
1082
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
1083
|
+
logger.debug('Padding checkpoint already started.');
|
|
1084
|
+
return;
|
|
1085
|
+
}
|
|
1086
|
+
|
|
848
1087
|
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
849
1088
|
|
|
850
|
-
const inputs = provingState.
|
|
1089
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
851
1090
|
|
|
852
1091
|
this.deferredProving(
|
|
853
1092
|
provingState,
|
|
854
1093
|
wrapCallbackInSpan(
|
|
855
1094
|
this.tracer,
|
|
856
|
-
'ProvingOrchestrator.prover.
|
|
1095
|
+
'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof',
|
|
857
1096
|
{
|
|
858
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
1097
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding' satisfies CircuitName,
|
|
859
1098
|
},
|
|
860
|
-
signal => this.prover.
|
|
1099
|
+
signal => this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber),
|
|
861
1100
|
),
|
|
862
1101
|
result => {
|
|
863
|
-
logger.debug('Completed proof for padding
|
|
864
|
-
provingState.
|
|
1102
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
1103
|
+
provingState.setCheckpointPaddingProof(result);
|
|
865
1104
|
this.checkAndEnqueueRootRollup(provingState);
|
|
866
1105
|
},
|
|
867
1106
|
);
|
|
@@ -884,7 +1123,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
884
1123
|
this.tracer,
|
|
885
1124
|
'ProvingOrchestrator.prover.getRootRollupProof',
|
|
886
1125
|
{
|
|
887
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root
|
|
1126
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root' satisfies CircuitName,
|
|
888
1127
|
},
|
|
889
1128
|
signal => this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber),
|
|
890
1129
|
),
|
|
@@ -896,48 +1135,51 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
896
1135
|
);
|
|
897
1136
|
}
|
|
898
1137
|
|
|
899
|
-
private
|
|
1138
|
+
private checkAndEnqueueNextMergeRollup(provingState: BlockProvingState, currentLocation: TreeNodeLocation) {
|
|
900
1139
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
901
1140
|
return;
|
|
902
1141
|
}
|
|
903
1142
|
|
|
904
1143
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
905
1144
|
if (parentLocation.level === 0) {
|
|
906
|
-
|
|
1145
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
907
1146
|
} else {
|
|
908
1147
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
909
1148
|
}
|
|
910
1149
|
}
|
|
911
1150
|
|
|
912
|
-
private
|
|
913
|
-
const blockNumber = provingState.blockNumber;
|
|
914
|
-
// Accumulate as far as we can, in case blocks came in out of order and we are behind:
|
|
915
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
1151
|
+
private checkAndEnqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
916
1152
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
917
1153
|
logger.debug('Not ready for block root rollup');
|
|
918
1154
|
return;
|
|
919
1155
|
}
|
|
920
|
-
|
|
921
|
-
|
|
1156
|
+
|
|
1157
|
+
this.enqueueBlockRootRollup(provingState);
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
private checkAndEnqueueNextBlockMergeRollup(provingState: CheckpointProvingState, currentLocation: TreeNodeLocation) {
|
|
1161
|
+
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
922
1162
|
return;
|
|
923
1163
|
}
|
|
924
1164
|
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
?.close()
|
|
933
|
-
.then(() => this.dbs.delete(blockNumber))
|
|
934
|
-
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
1165
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
1166
|
+
if (parentLocation.level === 0) {
|
|
1167
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
1168
|
+
} else {
|
|
1169
|
+
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
1170
|
+
}
|
|
1171
|
+
}
|
|
935
1172
|
|
|
936
|
-
|
|
1173
|
+
private checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
1174
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
1175
|
+
return;
|
|
1176
|
+
}
|
|
1177
|
+
|
|
1178
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
937
1179
|
}
|
|
938
1180
|
|
|
939
|
-
private
|
|
940
|
-
if (!provingState.
|
|
1181
|
+
private checkAndEnqueueNextCheckpointMergeRollup(provingState: EpochProvingState, currentLocation: TreeNodeLocation) {
|
|
1182
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
941
1183
|
return;
|
|
942
1184
|
}
|
|
943
1185
|
|
|
@@ -945,7 +1187,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
945
1187
|
if (parentLocation.level === 0) {
|
|
946
1188
|
this.checkAndEnqueueRootRollup(provingState);
|
|
947
1189
|
} else {
|
|
948
|
-
this.
|
|
1190
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
949
1191
|
}
|
|
950
1192
|
}
|
|
951
1193
|
|
|
@@ -1012,17 +1254,17 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1012
1254
|
this.deferredProving(provingState, doAvmProving, proofAndVk => {
|
|
1013
1255
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
1014
1256
|
txProvingState.setAvmProof(proofAndVk);
|
|
1015
|
-
this.
|
|
1257
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
1016
1258
|
});
|
|
1017
1259
|
}
|
|
1018
1260
|
|
|
1019
|
-
private
|
|
1261
|
+
private checkAndEnqueueBaseRollup(provingState: BlockProvingState, txIndex: number) {
|
|
1020
1262
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
1021
1263
|
if (!txProvingState.ready()) {
|
|
1022
1264
|
return;
|
|
1023
1265
|
}
|
|
1024
1266
|
|
|
1025
|
-
// We must have completed all proving (
|
|
1267
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
1026
1268
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
1027
1269
|
|
|
1028
1270
|
this.enqueueBaseRollup(provingState, txIndex);
|