@aztec/prover-client 3.0.0-nightly.20250916 → 3.0.0-nightly.20250918
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/light.d.ts +5 -3
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +16 -9
- package/dest/mocks/fixtures.d.ts +3 -1
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +19 -2
- package/dest/mocks/test_context.d.ts +30 -9
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +68 -15
- package/dest/orchestrator/block-building-helpers.d.ts +16 -14
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +69 -66
- package/dest/orchestrator/block-proving-state.d.ts +53 -46
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +209 -172
- package/dest/orchestrator/checkpoint-proving-state.d.ts +62 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +208 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +32 -25
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +132 -81
- package/dest/orchestrator/orchestrator.d.ts +25 -24
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +318 -190
- package/dest/prover-client/server-epoch-prover.d.ts +8 -7
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +7 -7
- package/dest/proving_broker/broker_prover_facade.d.ts +12 -7
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +30 -15
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +18 -7
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +26 -6
- package/dest/test/mock_prover.d.ts +12 -7
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +25 -10
- package/package.json +15 -15
- package/src/block-factory/light.ts +33 -9
- package/src/mocks/fixtures.ts +25 -7
- package/src/mocks/test_context.ts +113 -21
- package/src/orchestrator/block-building-helpers.ts +107 -93
- package/src/orchestrator/block-proving-state.ts +225 -212
- package/src/orchestrator/checkpoint-proving-state.ts +294 -0
- package/src/orchestrator/epoch-proving-state.ts +169 -121
- package/src/orchestrator/orchestrator.ts +466 -247
- package/src/prover-client/server-epoch-prover.ts +30 -16
- package/src/proving_broker/broker_prover_facade.ts +145 -71
- package/src/proving_broker/proving_broker.ts +24 -6
- package/src/proving_broker/proving_job_controller.ts +26 -6
- package/src/test/mock_prover.ts +105 -28
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BlobAccumulatorPublicInputs, FinalBlobBatchingChallenges } from '@aztec/blob-lib';
|
|
1
|
+
import { BatchedBlob, BlobAccumulatorPublicInputs, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
|
|
2
2
|
import {
|
|
3
3
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
4
4
|
L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
@@ -6,7 +6,7 @@ import {
|
|
|
6
6
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
7
7
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
8
8
|
} from '@aztec/constants';
|
|
9
|
-
import { padArrayEnd
|
|
9
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
10
10
|
import { AbortError } from '@aztec/foundation/error';
|
|
11
11
|
import { Fr } from '@aztec/foundation/fields';
|
|
12
12
|
import { createLogger } from '@aztec/foundation/log';
|
|
@@ -15,9 +15,8 @@ import { assertLength } from '@aztec/foundation/serialize';
|
|
|
15
15
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
16
16
|
import { elapsed } from '@aztec/foundation/timer';
|
|
17
17
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
18
|
-
import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
19
18
|
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
20
|
-
import { EthAddress,
|
|
19
|
+
import { EthAddress, createBlockEndMarker } from '@aztec/stdlib/block';
|
|
21
20
|
import type {
|
|
22
21
|
EpochProver,
|
|
23
22
|
ForkMerkleTreeOperations,
|
|
@@ -26,17 +25,23 @@ import type {
|
|
|
26
25
|
ServerCircuitProver,
|
|
27
26
|
} from '@aztec/stdlib/interfaces/server';
|
|
28
27
|
import type { PrivateToPublicKernelCircuitPublicInputs } from '@aztec/stdlib/kernel';
|
|
29
|
-
import {
|
|
28
|
+
import type { Proof } from '@aztec/stdlib/proofs';
|
|
30
29
|
import {
|
|
31
30
|
type BaseRollupHints,
|
|
32
|
-
|
|
31
|
+
BlockRootEmptyTxFirstRollupPrivateInputs,
|
|
32
|
+
BlockRootFirstRollupPrivateInputs,
|
|
33
|
+
BlockRootSingleTxFirstRollupPrivateInputs,
|
|
34
|
+
BlockRootSingleTxRollupPrivateInputs,
|
|
35
|
+
CheckpointConstantData,
|
|
36
|
+
CheckpointRootSingleBlockRollupPrivateInputs,
|
|
33
37
|
PrivateBaseRollupInputs,
|
|
34
38
|
PublicTubePrivateInputs,
|
|
35
|
-
|
|
39
|
+
RootRollupPublicInputs,
|
|
36
40
|
} from '@aztec/stdlib/rollup';
|
|
37
41
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
38
42
|
import { type AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
|
|
39
|
-
import {
|
|
43
|
+
import type { BlockHeader, ProcessedTx, Tx } from '@aztec/stdlib/tx';
|
|
44
|
+
import type { UInt64 } from '@aztec/stdlib/types';
|
|
40
45
|
import {
|
|
41
46
|
Attributes,
|
|
42
47
|
type TelemetryClient,
|
|
@@ -49,7 +54,8 @@ import {
|
|
|
49
54
|
import { inspect } from 'util';
|
|
50
55
|
|
|
51
56
|
import {
|
|
52
|
-
|
|
57
|
+
buildBlockHeaderFromTxs,
|
|
58
|
+
buildHeaderFromCircuitOutputs,
|
|
53
59
|
getLastSiblingPath,
|
|
54
60
|
getPublicTubePrivateInputsFromTx,
|
|
55
61
|
getRootTreeSiblingPath,
|
|
@@ -60,6 +66,7 @@ import {
|
|
|
60
66
|
validateTx,
|
|
61
67
|
} from './block-building-helpers.js';
|
|
62
68
|
import type { BlockProvingState } from './block-proving-state.js';
|
|
69
|
+
import type { CheckpointProvingState } from './checkpoint-proving-state.js';
|
|
63
70
|
import { EpochProvingState, type ProvingResult, type TreeSnapshots } from './epoch-proving-state.js';
|
|
64
71
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
65
72
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -112,80 +119,141 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
112
119
|
|
|
113
120
|
public startNewEpoch(
|
|
114
121
|
epochNumber: number,
|
|
115
|
-
|
|
116
|
-
|
|
122
|
+
firstCheckpointNumber: Fr,
|
|
123
|
+
totalNumCheckpoints: number,
|
|
117
124
|
finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
118
125
|
) {
|
|
126
|
+
if (this.provingState?.verifyState()) {
|
|
127
|
+
throw new Error(
|
|
128
|
+
`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`,
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
|
|
119
132
|
const { promise: _promise, resolve, reject } = promiseWithResolvers<ProvingResult>();
|
|
120
133
|
const promise = _promise.catch((reason): ProvingResult => ({ status: 'failure', reason }));
|
|
121
|
-
|
|
122
|
-
throw new Error(`Invalid number of blocks for epoch (got ${totalNumBlocks})`);
|
|
123
|
-
}
|
|
124
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
134
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
125
135
|
this.provingState = new EpochProvingState(
|
|
126
136
|
epochNumber,
|
|
127
|
-
|
|
128
|
-
|
|
137
|
+
firstCheckpointNumber,
|
|
138
|
+
totalNumCheckpoints,
|
|
129
139
|
finalBlobBatchingChallenges,
|
|
140
|
+
provingState => this.checkAndEnqueueCheckpointRootRollup(provingState),
|
|
130
141
|
resolve,
|
|
131
142
|
reject,
|
|
132
143
|
);
|
|
133
144
|
this.provingPromise = promise;
|
|
134
145
|
}
|
|
135
146
|
|
|
147
|
+
public async startNewCheckpoint(
|
|
148
|
+
constants: CheckpointConstantData,
|
|
149
|
+
l1ToL2Messages: Fr[],
|
|
150
|
+
totalNumBlocks: number,
|
|
151
|
+
totalNumBlobFields: number,
|
|
152
|
+
headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
153
|
+
) {
|
|
154
|
+
if (!this.provingState) {
|
|
155
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
159
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Fork world state at the end of the immediately previous block.
|
|
163
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
164
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
165
|
+
|
|
166
|
+
const firstBlockNumber = lastBlockNumber + 1;
|
|
167
|
+
this.dbs.set(firstBlockNumber, db);
|
|
168
|
+
|
|
169
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
170
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
171
|
+
|
|
172
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
173
|
+
const {
|
|
174
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
175
|
+
lastL1ToL2MessageSubtreeSiblingPath,
|
|
176
|
+
newL1ToL2MessageTreeSnapshot,
|
|
177
|
+
newL1ToL2MessageSubtreeSiblingPath,
|
|
178
|
+
} = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
179
|
+
|
|
180
|
+
this.provingState.startNewCheckpoint(
|
|
181
|
+
constants,
|
|
182
|
+
totalNumBlocks,
|
|
183
|
+
totalNumBlobFields,
|
|
184
|
+
headerOfLastBlockInPreviousCheckpoint,
|
|
185
|
+
lastArchiveSiblingPath,
|
|
186
|
+
l1ToL2Messages,
|
|
187
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
188
|
+
lastL1ToL2MessageSubtreeSiblingPath,
|
|
189
|
+
newL1ToL2MessageTreeSnapshot,
|
|
190
|
+
newL1ToL2MessageSubtreeSiblingPath,
|
|
191
|
+
);
|
|
192
|
+
}
|
|
193
|
+
|
|
136
194
|
/**
|
|
137
195
|
* Starts off a new block
|
|
138
|
-
* @param
|
|
139
|
-
* @param
|
|
140
|
-
*
|
|
196
|
+
* @param blockNumber - The block number
|
|
197
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
198
|
+
* block that doesn't have any txs.
|
|
199
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
141
200
|
*/
|
|
142
|
-
@trackSpan('ProvingOrchestrator.startNewBlock',
|
|
143
|
-
[Attributes.BLOCK_NUMBER]:
|
|
201
|
+
@trackSpan('ProvingOrchestrator.startNewBlock', blockNumber => ({
|
|
202
|
+
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
144
203
|
}))
|
|
145
|
-
public async startNewBlock(
|
|
204
|
+
public async startNewBlock(blockNumber: number, timestamp: UInt64, totalNumTxs: number) {
|
|
146
205
|
if (!this.provingState) {
|
|
147
|
-
throw new Error(
|
|
206
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
148
207
|
}
|
|
149
208
|
|
|
150
|
-
|
|
151
|
-
|
|
209
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
210
|
+
if (!checkpointProvingState) {
|
|
211
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
152
212
|
}
|
|
153
213
|
|
|
154
|
-
|
|
214
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
215
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
216
|
+
}
|
|
155
217
|
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
this.dbs.set(globalVariables.blockNumber, db);
|
|
218
|
+
const constants = checkpointProvingState.constants;
|
|
219
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber.toNumber()}.`);
|
|
159
220
|
|
|
160
|
-
//
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
// Get archive snapshot before this block lands
|
|
169
|
-
const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
170
|
-
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
171
|
-
const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
221
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
222
|
+
if (!this.dbs.has(blockNumber)) {
|
|
223
|
+
// Fork world state at the end of the immediately previous block
|
|
224
|
+
const db = await this.dbProvider.fork(blockNumber - 1);
|
|
225
|
+
this.dbs.set(blockNumber, db);
|
|
226
|
+
}
|
|
227
|
+
const db = this.dbs.get(blockNumber)!;
|
|
172
228
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
229
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
230
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
231
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
232
|
+
|
|
233
|
+
const blockProvingState = checkpointProvingState.startNewBlock(
|
|
234
|
+
blockNumber,
|
|
235
|
+
timestamp,
|
|
236
|
+
totalNumTxs,
|
|
237
|
+
lastArchiveTreeSnapshot,
|
|
180
238
|
lastArchiveSiblingPath,
|
|
181
|
-
newArchiveSiblingPath,
|
|
182
|
-
previousBlockHeader,
|
|
183
|
-
this.proverId,
|
|
184
239
|
);
|
|
185
240
|
|
|
186
|
-
// Enqueue base parity circuits for the block
|
|
187
|
-
|
|
188
|
-
|
|
241
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
242
|
+
if (blockProvingState.index === 0) {
|
|
243
|
+
for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) {
|
|
244
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
249
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
250
|
+
if (totalNumTxs === 0) {
|
|
251
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
252
|
+
await endSpongeBlob.absorb([createBlockEndMarker(0)]);
|
|
253
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
254
|
+
|
|
255
|
+
// And also try to accumulate the blobs as far as we can:
|
|
256
|
+
await this.provingState.setBlobAccumulators();
|
|
189
257
|
}
|
|
190
258
|
}
|
|
191
259
|
|
|
@@ -197,28 +265,40 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
197
265
|
[Attributes.BLOCK_TXS_COUNT]: txs.length,
|
|
198
266
|
}))
|
|
199
267
|
public async addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
268
|
+
if (!this.provingState) {
|
|
269
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
270
|
+
}
|
|
271
|
+
|
|
200
272
|
if (!txs.length) {
|
|
201
273
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
202
274
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
203
275
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
204
276
|
return;
|
|
205
277
|
}
|
|
278
|
+
|
|
206
279
|
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
207
|
-
const provingState = this.provingState
|
|
280
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
208
281
|
if (!provingState) {
|
|
209
|
-
throw new Error(`
|
|
282
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
286
|
+
throw new Error(
|
|
287
|
+
`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`,
|
|
288
|
+
);
|
|
210
289
|
}
|
|
211
290
|
|
|
212
|
-
if (provingState.
|
|
291
|
+
if (!provingState.isAcceptingTxs()) {
|
|
213
292
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
214
293
|
}
|
|
215
294
|
|
|
216
|
-
|
|
217
|
-
|
|
295
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
296
|
+
|
|
297
|
+
const db = this.dbs.get(blockNumber)!;
|
|
298
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
299
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
300
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
218
301
|
|
|
219
|
-
logger.info(
|
|
220
|
-
`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState.blockNumber}`,
|
|
221
|
-
);
|
|
222
302
|
for (const tx of txs) {
|
|
223
303
|
try {
|
|
224
304
|
if (!provingState.verifyState()) {
|
|
@@ -229,7 +309,21 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
229
309
|
|
|
230
310
|
logger.info(`Received transaction: ${tx.hash}`);
|
|
231
311
|
|
|
232
|
-
const
|
|
312
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
313
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
314
|
+
tx,
|
|
315
|
+
lastArchive,
|
|
316
|
+
newL1ToL2MessageTreeSnapshot,
|
|
317
|
+
startSpongeBlob,
|
|
318
|
+
db,
|
|
319
|
+
);
|
|
320
|
+
|
|
321
|
+
if (!provingState.verifyState()) {
|
|
322
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
326
|
+
|
|
233
327
|
const txProvingState = new TxProvingState(tx, hints, treeSnapshots);
|
|
234
328
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
235
329
|
if (txProvingState.requireAvmProof) {
|
|
@@ -246,6 +340,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
246
340
|
});
|
|
247
341
|
}
|
|
248
342
|
}
|
|
343
|
+
|
|
344
|
+
await spongeBlobState.absorb([createBlockEndMarker(txs.length)]);
|
|
345
|
+
|
|
346
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
347
|
+
|
|
348
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
349
|
+
await this.provingState.setBlobAccumulators();
|
|
249
350
|
}
|
|
250
351
|
|
|
251
352
|
/**
|
|
@@ -255,7 +356,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
255
356
|
@trackSpan('ProvingOrchestrator.startTubeCircuits')
|
|
256
357
|
public startTubeCircuits(txs: Tx[]) {
|
|
257
358
|
if (!this.provingState?.verifyState()) {
|
|
258
|
-
throw new Error(`
|
|
359
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting tube circuits.`);
|
|
259
360
|
}
|
|
260
361
|
const publicTxs = txs.filter(tx => tx.data.forPublic);
|
|
261
362
|
for (const tx of publicTxs) {
|
|
@@ -272,7 +373,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
272
373
|
this.doEnqueueTube(txHash, privateInputs, proof => {
|
|
273
374
|
tubeProof.resolve(proof);
|
|
274
375
|
});
|
|
275
|
-
this.provingState
|
|
376
|
+
this.provingState.cachedTubeProofs.set(txHash, tubeProof.promise);
|
|
276
377
|
}
|
|
277
378
|
return Promise.resolve();
|
|
278
379
|
}
|
|
@@ -284,58 +385,50 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
284
385
|
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: number) => ({
|
|
285
386
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
286
387
|
}))
|
|
287
|
-
public async setBlockCompleted(blockNumber: number, expectedHeader?: BlockHeader): Promise<
|
|
388
|
+
public async setBlockCompleted(blockNumber: number, expectedHeader?: BlockHeader): Promise<BlockHeader> {
|
|
288
389
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
289
390
|
if (!provingState) {
|
|
290
391
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
291
392
|
}
|
|
292
393
|
|
|
293
|
-
if
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
394
|
+
// Abort with specific error for the block if there's one.
|
|
395
|
+
const error = provingState.getError();
|
|
396
|
+
if (error) {
|
|
397
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
297
398
|
}
|
|
298
399
|
|
|
400
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
299
401
|
if (!provingState.verifyState()) {
|
|
300
|
-
throw new Error(`
|
|
402
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
if (provingState.isAcceptingTxs()) {
|
|
406
|
+
throw new Error(
|
|
407
|
+
`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`,
|
|
408
|
+
);
|
|
301
409
|
}
|
|
302
410
|
|
|
303
411
|
// And build the block header
|
|
304
412
|
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
305
|
-
await this.
|
|
413
|
+
const header = await this.buildL2BlockHeader(provingState, expectedHeader);
|
|
306
414
|
|
|
307
|
-
|
|
308
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
415
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
309
416
|
|
|
310
|
-
|
|
311
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
312
|
-
return provingState.block!;
|
|
417
|
+
return header;
|
|
313
418
|
}
|
|
314
419
|
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
const
|
|
318
|
-
if (!block) {
|
|
319
|
-
throw new Error(`Block at index ${index} not available`);
|
|
320
|
-
}
|
|
321
|
-
return block;
|
|
322
|
-
}
|
|
420
|
+
private async buildL2BlockHeader(provingState: BlockProvingState, expectedHeader?: BlockHeader) {
|
|
421
|
+
// Collect all txs in this block to build the header. The function calling this has made sure that all txs have been added.
|
|
422
|
+
const txs = provingState.getProcessedTxs();
|
|
323
423
|
|
|
324
|
-
|
|
325
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
326
|
-
const txs = provingState.allTxs.map(a => a.processedTx);
|
|
424
|
+
const startSpongeBlob = provingState.getStartSpongeBlob();
|
|
327
425
|
|
|
328
426
|
// Get db for this block
|
|
329
427
|
const db = this.dbs.get(provingState.blockNumber)!;
|
|
330
428
|
|
|
331
429
|
// Given we've applied every change from this block, now assemble the block header
|
|
332
430
|
// and update the archive tree, so we're ready to start processing the next block
|
|
333
|
-
const
|
|
334
|
-
txs,
|
|
335
|
-
provingState.globalVariables,
|
|
336
|
-
provingState.newL1ToL2Messages,
|
|
337
|
-
db,
|
|
338
|
-
);
|
|
431
|
+
const header = await buildBlockHeaderFromTxs(txs, provingState.getGlobalVariables(), startSpongeBlob, db);
|
|
339
432
|
|
|
340
433
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
341
434
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
@@ -347,26 +440,65 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
347
440
|
);
|
|
348
441
|
await db.updateArchive(header);
|
|
349
442
|
|
|
350
|
-
|
|
351
|
-
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
352
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
353
|
-
|
|
354
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
443
|
+
provingState.setBuiltBlockHeader(header);
|
|
355
444
|
|
|
356
|
-
|
|
357
|
-
provingState.setBlock(l2Block);
|
|
445
|
+
return header;
|
|
358
446
|
}
|
|
359
447
|
|
|
360
448
|
// Flagged as protected to disable in certain unit tests
|
|
361
|
-
protected async verifyBuiltBlockAgainstSyncedState(
|
|
362
|
-
const
|
|
449
|
+
protected async verifyBuiltBlockAgainstSyncedState(provingState: BlockProvingState) {
|
|
450
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
451
|
+
if (!builtBlockHeader) {
|
|
452
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
453
|
+
return;
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
457
|
+
if (!output) {
|
|
458
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
459
|
+
return;
|
|
460
|
+
}
|
|
461
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
462
|
+
|
|
463
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
464
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
465
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
466
|
+
return;
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
// Get db for this block
|
|
470
|
+
const blockNumber = provingState.blockNumber;
|
|
471
|
+
const db = this.dbs.get(blockNumber)!;
|
|
472
|
+
|
|
473
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
474
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
363
475
|
if (!syncedArchive.equals(newArchive)) {
|
|
364
|
-
|
|
365
|
-
`Archive tree mismatch for block ${
|
|
476
|
+
logger.error(
|
|
477
|
+
`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(
|
|
366
478
|
syncedArchive,
|
|
367
479
|
)} but built ${inspect(newArchive)}`,
|
|
368
480
|
);
|
|
481
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
482
|
+
return;
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
const circuitArchive = output.newArchive;
|
|
486
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
487
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
488
|
+
provingState.reject(`New archive mismatch.`);
|
|
489
|
+
return;
|
|
369
490
|
}
|
|
491
|
+
|
|
492
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
493
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
494
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
495
|
+
// will attempt to access the fork after it was closed.
|
|
496
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
497
|
+
void this.dbs
|
|
498
|
+
.get(blockNumber)
|
|
499
|
+
?.close()
|
|
500
|
+
.then(() => this.dbs.delete(blockNumber))
|
|
501
|
+
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
370
502
|
}
|
|
371
503
|
|
|
372
504
|
/**
|
|
@@ -383,7 +515,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
383
515
|
/**
|
|
384
516
|
* Returns the proof for the current epoch.
|
|
385
517
|
*/
|
|
386
|
-
public async finalizeEpoch() {
|
|
518
|
+
public async finalizeEpoch(): Promise<{
|
|
519
|
+
publicInputs: RootRollupPublicInputs;
|
|
520
|
+
proof: Proof;
|
|
521
|
+
batchedBlobInputs: BatchedBlob;
|
|
522
|
+
}> {
|
|
387
523
|
if (!this.provingState || !this.provingPromise) {
|
|
388
524
|
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
389
525
|
}
|
|
@@ -393,14 +529,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
393
529
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
394
530
|
}
|
|
395
531
|
|
|
396
|
-
|
|
397
|
-
// TODO(MW): EpochProvingState uses this.blocks.filter(b => !!b).length as total blocks, use this below:
|
|
398
|
-
const finalBlock = this.provingState.blocks[this.provingState.totalNumBlocks - 1];
|
|
399
|
-
if (!finalBlock || !finalBlock.endBlobAccumulator) {
|
|
400
|
-
throw new Error(`Epoch's final block not ready for finalize`);
|
|
401
|
-
}
|
|
402
|
-
const finalBatchedBlob = await finalBlock.endBlobAccumulator.finalize();
|
|
403
|
-
this.provingState.setFinalBatchedBlob(finalBatchedBlob);
|
|
532
|
+
await this.provingState.finalizeBatchedBlob();
|
|
404
533
|
|
|
405
534
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
406
535
|
|
|
@@ -412,20 +541,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
412
541
|
return epochProofResult;
|
|
413
542
|
}
|
|
414
543
|
|
|
415
|
-
/**
|
|
416
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
417
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
418
|
-
* @param provingState - The proving state being worked on
|
|
419
|
-
*/
|
|
420
|
-
private async prepareTransaction(tx: ProcessedTx, provingState: BlockProvingState) {
|
|
421
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
422
|
-
if (!txInputs) {
|
|
423
|
-
// This should not be possible
|
|
424
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
425
|
-
}
|
|
426
|
-
return txInputs;
|
|
427
|
-
}
|
|
428
|
-
|
|
429
544
|
/**
|
|
430
545
|
* Enqueue a job to be scheduled
|
|
431
546
|
* @param provingState - The proving state object being operated on
|
|
@@ -433,11 +548,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
433
548
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
434
549
|
*/
|
|
435
550
|
private deferredProving<T>(
|
|
436
|
-
provingState: EpochProvingState |
|
|
551
|
+
provingState: EpochProvingState | CheckpointProvingState | BlockProvingState,
|
|
437
552
|
request: (signal: AbortSignal) => Promise<T>,
|
|
438
553
|
callback: (result: T) => void | Promise<void>,
|
|
439
554
|
) {
|
|
440
|
-
if (!provingState
|
|
555
|
+
if (!provingState.verifyState()) {
|
|
441
556
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
442
557
|
return;
|
|
443
558
|
}
|
|
@@ -455,7 +570,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
455
570
|
}
|
|
456
571
|
|
|
457
572
|
const result = await request(controller.signal);
|
|
458
|
-
if (!provingState
|
|
573
|
+
if (!provingState.verifyState()) {
|
|
459
574
|
logger.debug(`State no longer valid, discarding result`);
|
|
460
575
|
return;
|
|
461
576
|
}
|
|
@@ -488,58 +603,58 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
488
603
|
setImmediate(() => void safeJob());
|
|
489
604
|
}
|
|
490
605
|
|
|
491
|
-
private async
|
|
606
|
+
private async updateL1ToL2MessageTree(l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations) {
|
|
492
607
|
const l1ToL2MessagesPadded = padArrayEnd(
|
|
493
608
|
l1ToL2Messages,
|
|
494
609
|
Fr.ZERO,
|
|
495
610
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
496
611
|
'Too many L1 to L2 messages',
|
|
497
612
|
);
|
|
498
|
-
const baseParityInputs = times(NUM_BASE_PARITY_PER_ROOT_PARITY, i =>
|
|
499
|
-
BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()),
|
|
500
|
-
);
|
|
501
613
|
|
|
502
|
-
const
|
|
503
|
-
|
|
504
|
-
const l1ToL2MessageSubtreeSiblingPath = assertLength(
|
|
614
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
615
|
+
const lastL1ToL2MessageSubtreeSiblingPath = assertLength(
|
|
505
616
|
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
506
617
|
L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
507
618
|
);
|
|
508
619
|
|
|
509
620
|
// Update the local trees to include the new l1 to l2 messages
|
|
510
621
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
511
|
-
|
|
622
|
+
|
|
623
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
624
|
+
const newL1ToL2MessageSubtreeSiblingPath = assertLength(
|
|
625
|
+
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
626
|
+
L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
627
|
+
);
|
|
512
628
|
|
|
513
629
|
return {
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
630
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
631
|
+
lastL1ToL2MessageSubtreeSiblingPath,
|
|
632
|
+
newL1ToL2MessageTreeSnapshot,
|
|
633
|
+
newL1ToL2MessageSubtreeSiblingPath,
|
|
518
634
|
};
|
|
519
635
|
}
|
|
520
636
|
|
|
521
637
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
522
|
-
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs',
|
|
638
|
+
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', tx => ({
|
|
523
639
|
[Attributes.TX_HASH]: tx.hash.toString(),
|
|
524
640
|
}))
|
|
525
641
|
private async prepareBaseRollupInputs(
|
|
526
|
-
provingState: BlockProvingState,
|
|
527
642
|
tx: ProcessedTx,
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
643
|
+
lastArchive: AppendOnlyTreeSnapshot,
|
|
644
|
+
newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
645
|
+
startSpongeBlob: SpongeBlob,
|
|
646
|
+
db: MerkleTreeWriteOperations,
|
|
647
|
+
): Promise<[BaseRollupHints, TreeSnapshots]> {
|
|
648
|
+
// We build the base rollup inputs using a mock proof and verification key.
|
|
649
|
+
// These will be overwritten later once we have proven the tube circuit and any public kernels
|
|
536
650
|
const [ms, hints] = await elapsed(
|
|
537
651
|
insertSideEffectsAndBuildBaseRollupHints(
|
|
538
652
|
tx,
|
|
539
|
-
|
|
540
|
-
|
|
653
|
+
lastArchive,
|
|
654
|
+
newL1ToL2MessageTreeSnapshot,
|
|
655
|
+
startSpongeBlob,
|
|
656
|
+
this.proverId.toField(),
|
|
541
657
|
db,
|
|
542
|
-
provingState.spongeBlobState,
|
|
543
658
|
),
|
|
544
659
|
);
|
|
545
660
|
|
|
@@ -552,10 +667,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
552
667
|
);
|
|
553
668
|
const treeSnapshots: TreeSnapshots = new Map((await Promise.all(promises)).map(obj => [obj.key, obj.value]));
|
|
554
669
|
|
|
555
|
-
if (!provingState.verifyState()) {
|
|
556
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
557
|
-
return;
|
|
558
|
-
}
|
|
559
670
|
return [hints, treeSnapshots];
|
|
560
671
|
}
|
|
561
672
|
|
|
@@ -567,6 +678,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
567
678
|
return;
|
|
568
679
|
}
|
|
569
680
|
|
|
681
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
682
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
683
|
+
return;
|
|
684
|
+
}
|
|
685
|
+
|
|
570
686
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
571
687
|
const { processedTx } = txProvingState;
|
|
572
688
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
@@ -592,14 +708,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
592
708
|
}
|
|
593
709
|
},
|
|
594
710
|
),
|
|
595
|
-
|
|
711
|
+
result => {
|
|
596
712
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
597
713
|
validatePartialState(result.inputs.end, txProvingState.treeSnapshots);
|
|
598
714
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
599
715
|
if (provingState.totalNumTxs === 1) {
|
|
600
|
-
|
|
716
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
601
717
|
} else {
|
|
602
|
-
|
|
718
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
603
719
|
}
|
|
604
720
|
},
|
|
605
721
|
);
|
|
@@ -649,7 +765,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
649
765
|
) => void,
|
|
650
766
|
provingState: EpochProvingState | BlockProvingState = this.provingState!,
|
|
651
767
|
) {
|
|
652
|
-
if (!provingState
|
|
768
|
+
if (!provingState.verifyState()) {
|
|
653
769
|
logger.debug('Not running tube circuit, state invalid');
|
|
654
770
|
return;
|
|
655
771
|
}
|
|
@@ -663,7 +779,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
663
779
|
[Attributes.TX_HASH]: txHash,
|
|
664
780
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'public-tube' satisfies CircuitName,
|
|
665
781
|
},
|
|
666
|
-
signal => this.prover.getPublicTubeProof(inputs, signal,
|
|
782
|
+
signal => this.prover.getPublicTubeProof(inputs, signal, provingState.epochNumber),
|
|
667
783
|
),
|
|
668
784
|
handler,
|
|
669
785
|
);
|
|
@@ -677,6 +793,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
677
793
|
return;
|
|
678
794
|
}
|
|
679
795
|
|
|
796
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
797
|
+
logger.debug('Merge rollup already started.');
|
|
798
|
+
return;
|
|
799
|
+
}
|
|
800
|
+
|
|
680
801
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
681
802
|
|
|
682
803
|
this.deferredProving(
|
|
@@ -689,27 +810,28 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
689
810
|
},
|
|
690
811
|
signal => this.prover.getMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
691
812
|
),
|
|
692
|
-
|
|
813
|
+
result => {
|
|
693
814
|
provingState.setMergeRollupProof(location, result);
|
|
694
|
-
|
|
815
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
695
816
|
},
|
|
696
817
|
);
|
|
697
818
|
}
|
|
698
819
|
|
|
699
820
|
// Executes the block root rollup circuit
|
|
700
|
-
private
|
|
821
|
+
private enqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
701
822
|
if (!provingState.verifyState()) {
|
|
702
823
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
703
824
|
return;
|
|
704
825
|
}
|
|
705
826
|
|
|
706
|
-
provingState.
|
|
827
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
828
|
+
logger.debug('Block root rollup already started.');
|
|
829
|
+
return;
|
|
830
|
+
}
|
|
707
831
|
|
|
708
|
-
const { rollupType, inputs } =
|
|
832
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
709
833
|
|
|
710
|
-
logger.debug(
|
|
711
|
-
`Enqueuing ${rollupType} for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs.`,
|
|
712
|
-
);
|
|
834
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
713
835
|
|
|
714
836
|
this.deferredProving(
|
|
715
837
|
provingState,
|
|
@@ -720,56 +842,32 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
720
842
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
721
843
|
},
|
|
722
844
|
signal => {
|
|
723
|
-
if (inputs instanceof
|
|
724
|
-
return this.prover.
|
|
725
|
-
} else if (inputs instanceof
|
|
726
|
-
return this.prover.
|
|
845
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
846
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
847
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
848
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
849
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
850
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
851
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
852
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
727
853
|
} else {
|
|
728
854
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
729
855
|
}
|
|
730
856
|
},
|
|
731
857
|
),
|
|
732
858
|
async result => {
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
if (!(await header.hash()).equals(await provingState.block!.header.hash())) {
|
|
736
|
-
logger.error(
|
|
737
|
-
`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(provingState.block!.header)}`,
|
|
738
|
-
);
|
|
739
|
-
provingState.reject(`Block header hash mismatch.`);
|
|
740
|
-
}
|
|
859
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
860
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
741
861
|
|
|
742
|
-
|
|
743
|
-
const circuitArchiveRoot = result.inputs.newArchive.root;
|
|
744
|
-
if (!dbArchiveRoot.equals(circuitArchiveRoot)) {
|
|
745
|
-
logger.error(
|
|
746
|
-
`New archive root mismatch.\nCircuit: ${result.inputs.newArchive.root}\nComputed: ${dbArchiveRoot}`,
|
|
747
|
-
);
|
|
748
|
-
provingState.reject(`New archive root mismatch.`);
|
|
749
|
-
}
|
|
862
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
750
863
|
|
|
751
|
-
const
|
|
752
|
-
|
|
753
|
-
);
|
|
754
|
-
const circuitEndBlobAccumulatorState = result.inputs.blobPublicInputs.endBlobAccumulator;
|
|
755
|
-
if (!circuitEndBlobAccumulatorState.equals(endBlobAccumulatorPublicInputs)) {
|
|
756
|
-
logger.error(
|
|
757
|
-
`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(
|
|
758
|
-
endBlobAccumulatorPublicInputs,
|
|
759
|
-
)}`,
|
|
760
|
-
);
|
|
761
|
-
provingState.reject(`Blob accumulator state mismatch.`);
|
|
762
|
-
}
|
|
864
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
865
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
763
866
|
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
const epochProvingState = this.provingState!;
|
|
768
|
-
const leafLocation = epochProvingState.setBlockRootRollupProof(provingState.index, result);
|
|
769
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
770
|
-
this.enqueueEpochPadding(epochProvingState);
|
|
867
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
868
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
771
869
|
} else {
|
|
772
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
870
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
773
871
|
}
|
|
774
872
|
},
|
|
775
873
|
);
|
|
@@ -777,12 +875,23 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
777
875
|
|
|
778
876
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
779
877
|
// Enqueues the root parity circuit if all inputs are available
|
|
780
|
-
private enqueueBaseParityCircuit(
|
|
878
|
+
private enqueueBaseParityCircuit(
|
|
879
|
+
checkpointProvingState: CheckpointProvingState,
|
|
880
|
+
provingState: BlockProvingState,
|
|
881
|
+
baseParityIndex: number,
|
|
882
|
+
) {
|
|
781
883
|
if (!provingState.verifyState()) {
|
|
782
884
|
logger.debug('Not running base parity. State no longer valid.');
|
|
783
885
|
return;
|
|
784
886
|
}
|
|
785
887
|
|
|
888
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
889
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
890
|
+
return;
|
|
891
|
+
}
|
|
892
|
+
|
|
893
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
894
|
+
|
|
786
895
|
this.deferredProving(
|
|
787
896
|
provingState,
|
|
788
897
|
wrapCallbackInSpan(
|
|
@@ -794,7 +903,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
794
903
|
signal => this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber),
|
|
795
904
|
),
|
|
796
905
|
provingOutput => {
|
|
797
|
-
provingState.setBaseParityProof(
|
|
906
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
798
907
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
799
908
|
},
|
|
800
909
|
);
|
|
@@ -816,6 +925,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
816
925
|
return;
|
|
817
926
|
}
|
|
818
927
|
|
|
928
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
929
|
+
logger.debug('Root parity already started.');
|
|
930
|
+
return;
|
|
931
|
+
}
|
|
932
|
+
|
|
819
933
|
const inputs = provingState.getRootParityInputs();
|
|
820
934
|
|
|
821
935
|
this.deferredProving(
|
|
@@ -828,21 +942,26 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
828
942
|
},
|
|
829
943
|
signal => this.prover.getRootParityProof(inputs, signal, provingState.epochNumber),
|
|
830
944
|
),
|
|
831
|
-
|
|
945
|
+
result => {
|
|
832
946
|
provingState.setRootParityProof(result);
|
|
833
|
-
|
|
947
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
834
948
|
},
|
|
835
949
|
);
|
|
836
950
|
}
|
|
837
951
|
|
|
838
952
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
839
953
|
// Enqueues the next level of merge if all inputs are available
|
|
840
|
-
private enqueueBlockMergeRollup(provingState:
|
|
954
|
+
private enqueueBlockMergeRollup(provingState: CheckpointProvingState, location: TreeNodeLocation) {
|
|
841
955
|
if (!provingState.verifyState()) {
|
|
842
956
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
843
957
|
return;
|
|
844
958
|
}
|
|
845
959
|
|
|
960
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
961
|
+
logger.debug('Block merge rollup already started.');
|
|
962
|
+
return;
|
|
963
|
+
}
|
|
964
|
+
|
|
846
965
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
847
966
|
this.deferredProving(
|
|
848
967
|
provingState,
|
|
@@ -861,29 +980,126 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
861
980
|
);
|
|
862
981
|
}
|
|
863
982
|
|
|
983
|
+
private enqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
984
|
+
if (!provingState.verifyState()) {
|
|
985
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
986
|
+
return;
|
|
987
|
+
}
|
|
988
|
+
|
|
989
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
990
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
991
|
+
return;
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
995
|
+
|
|
996
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
997
|
+
|
|
998
|
+
this.deferredProving(
|
|
999
|
+
provingState,
|
|
1000
|
+
wrapCallbackInSpan(
|
|
1001
|
+
this.tracer,
|
|
1002
|
+
'ProvingOrchestrator.prover.getCheckpointRootRollupProof',
|
|
1003
|
+
{
|
|
1004
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
1005
|
+
},
|
|
1006
|
+
async signal => {
|
|
1007
|
+
const inputs = await provingState.getCheckpointRootRollupInputs();
|
|
1008
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
1009
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
1010
|
+
} else {
|
|
1011
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
1012
|
+
}
|
|
1013
|
+
},
|
|
1014
|
+
),
|
|
1015
|
+
result => {
|
|
1016
|
+
const computedEndBlobAccumulatorState = BlobAccumulatorPublicInputs.fromBatchedBlobAccumulator(
|
|
1017
|
+
provingState.getEndBlobAccumulator()!,
|
|
1018
|
+
);
|
|
1019
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
1020
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
1021
|
+
logger.error(
|
|
1022
|
+
`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(
|
|
1023
|
+
computedEndBlobAccumulatorState,
|
|
1024
|
+
)}`,
|
|
1025
|
+
);
|
|
1026
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
1027
|
+
return;
|
|
1028
|
+
}
|
|
1029
|
+
|
|
1030
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
1031
|
+
|
|
1032
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
1033
|
+
const epochProvingState = provingState.parentEpoch;
|
|
1034
|
+
|
|
1035
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
1036
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
1037
|
+
} else {
|
|
1038
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
1039
|
+
}
|
|
1040
|
+
},
|
|
1041
|
+
);
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
private enqueueCheckpointMergeRollup(provingState: EpochProvingState, location: TreeNodeLocation) {
|
|
1045
|
+
if (!provingState.verifyState()) {
|
|
1046
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
1047
|
+
return;
|
|
1048
|
+
}
|
|
1049
|
+
|
|
1050
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
1051
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
1052
|
+
return;
|
|
1053
|
+
}
|
|
1054
|
+
|
|
1055
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
1056
|
+
|
|
1057
|
+
this.deferredProving(
|
|
1058
|
+
provingState,
|
|
1059
|
+
wrapCallbackInSpan(
|
|
1060
|
+
this.tracer,
|
|
1061
|
+
'ProvingOrchestrator.prover.getCheckpointMergeRollupProof',
|
|
1062
|
+
{
|
|
1063
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'checkpoint-merge-rollup' satisfies CircuitName,
|
|
1064
|
+
},
|
|
1065
|
+
signal => this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
1066
|
+
),
|
|
1067
|
+
result => {
|
|
1068
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
1069
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
1070
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
1071
|
+
},
|
|
1072
|
+
);
|
|
1073
|
+
}
|
|
1074
|
+
|
|
864
1075
|
private enqueueEpochPadding(provingState: EpochProvingState) {
|
|
865
1076
|
if (!provingState.verifyState()) {
|
|
866
1077
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
867
1078
|
return;
|
|
868
1079
|
}
|
|
869
1080
|
|
|
1081
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
1082
|
+
logger.debug('Padding checkpoint already started.');
|
|
1083
|
+
return;
|
|
1084
|
+
}
|
|
1085
|
+
|
|
870
1086
|
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
871
1087
|
|
|
872
|
-
const inputs = provingState.
|
|
1088
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
873
1089
|
|
|
874
1090
|
this.deferredProving(
|
|
875
1091
|
provingState,
|
|
876
1092
|
wrapCallbackInSpan(
|
|
877
1093
|
this.tracer,
|
|
878
|
-
'ProvingOrchestrator.prover.
|
|
1094
|
+
'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof',
|
|
879
1095
|
{
|
|
880
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'padding-
|
|
1096
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'checkpoint-padding-rollup' satisfies CircuitName,
|
|
881
1097
|
},
|
|
882
|
-
signal => this.prover.
|
|
1098
|
+
signal => this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber),
|
|
883
1099
|
),
|
|
884
1100
|
result => {
|
|
885
|
-
logger.debug('Completed proof for padding
|
|
886
|
-
provingState.
|
|
1101
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
1102
|
+
provingState.setCheckpointPaddingProof(result);
|
|
887
1103
|
this.checkAndEnqueueRootRollup(provingState);
|
|
888
1104
|
},
|
|
889
1105
|
);
|
|
@@ -918,48 +1134,51 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
918
1134
|
);
|
|
919
1135
|
}
|
|
920
1136
|
|
|
921
|
-
private
|
|
1137
|
+
private checkAndEnqueueNextMergeRollup(provingState: BlockProvingState, currentLocation: TreeNodeLocation) {
|
|
922
1138
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
923
1139
|
return;
|
|
924
1140
|
}
|
|
925
1141
|
|
|
926
1142
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
927
1143
|
if (parentLocation.level === 0) {
|
|
928
|
-
|
|
1144
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
929
1145
|
} else {
|
|
930
1146
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
931
1147
|
}
|
|
932
1148
|
}
|
|
933
1149
|
|
|
934
|
-
private
|
|
935
|
-
const blockNumber = provingState.blockNumber;
|
|
936
|
-
// Accumulate as far as we can, in case blocks came in out of order and we are behind:
|
|
937
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
1150
|
+
private checkAndEnqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
938
1151
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
939
1152
|
logger.debug('Not ready for block root rollup');
|
|
940
1153
|
return;
|
|
941
1154
|
}
|
|
942
|
-
|
|
943
|
-
|
|
1155
|
+
|
|
1156
|
+
this.enqueueBlockRootRollup(provingState);
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
private checkAndEnqueueNextBlockMergeRollup(provingState: CheckpointProvingState, currentLocation: TreeNodeLocation) {
|
|
1160
|
+
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
944
1161
|
return;
|
|
945
1162
|
}
|
|
946
1163
|
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
?.close()
|
|
955
|
-
.then(() => this.dbs.delete(blockNumber))
|
|
956
|
-
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
1164
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
1165
|
+
if (parentLocation.level === 0) {
|
|
1166
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
1167
|
+
} else {
|
|
1168
|
+
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
1169
|
+
}
|
|
1170
|
+
}
|
|
957
1171
|
|
|
958
|
-
|
|
1172
|
+
private checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
1173
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
1174
|
+
return;
|
|
1175
|
+
}
|
|
1176
|
+
|
|
1177
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
959
1178
|
}
|
|
960
1179
|
|
|
961
|
-
private
|
|
962
|
-
if (!provingState.
|
|
1180
|
+
private checkAndEnqueueNextCheckpointMergeRollup(provingState: EpochProvingState, currentLocation: TreeNodeLocation) {
|
|
1181
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
963
1182
|
return;
|
|
964
1183
|
}
|
|
965
1184
|
|
|
@@ -967,7 +1186,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
967
1186
|
if (parentLocation.level === 0) {
|
|
968
1187
|
this.checkAndEnqueueRootRollup(provingState);
|
|
969
1188
|
} else {
|
|
970
|
-
this.
|
|
1189
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
971
1190
|
}
|
|
972
1191
|
}
|
|
973
1192
|
|