@aztec/prover-client 4.0.0-nightly.20250907 → 4.0.0-nightly.20260108
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +1 -1
- package/dest/block-factory/light.d.ts +5 -3
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +32 -11
- package/dest/config.d.ts +2 -2
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +2 -2
- package/dest/index.d.ts +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +36 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +147 -0
- package/dest/mocks/fixtures.d.ts +5 -5
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +33 -15
- package/dest/mocks/test_context.d.ts +38 -33
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +133 -82
- package/dest/orchestrator/block-building-helpers.d.ts +35 -35
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +151 -187
- package/dest/orchestrator/block-proving-state.d.ts +68 -55
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +273 -185
- package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +210 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +38 -31
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +128 -84
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +35 -34
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +777 -292
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +0 -9
- package/dest/orchestrator/tx-proving-state.d.ts +12 -10
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +23 -29
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +1 -1
- package/dest/prover-client/server-epoch-prover.d.ts +13 -11
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +9 -9
- package/dest/proving_broker/broker_prover_facade.d.ts +23 -18
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +42 -33
- package/dest/proving_broker/config.d.ts +18 -14
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +12 -6
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/factory.js +1 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +2 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +3 -8
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +1 -16
- package/dest/proving_broker/proving_broker.d.ts +2 -2
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +35 -29
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +5 -3
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +397 -8
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts +3 -2
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +40 -21
- package/dest/proving_broker/rpc.d.ts +4 -4
- package/dest/test/mock_proof_store.d.ts +3 -3
- package/dest/test/mock_proof_store.d.ts.map +1 -1
- package/dest/test/mock_prover.d.ts +23 -19
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +36 -21
- package/package.json +21 -19
- package/src/block-factory/light.ts +40 -17
- package/src/config.ts +2 -2
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +198 -0
- package/src/mocks/fixtures.ts +41 -36
- package/src/mocks/test_context.ts +196 -114
- package/src/orchestrator/block-building-helpers.ts +233 -313
- package/src/orchestrator/block-proving-state.ts +315 -247
- package/src/orchestrator/checkpoint-proving-state.ts +303 -0
- package/src/orchestrator/epoch-proving-state.ts +176 -129
- package/src/orchestrator/orchestrator.ts +558 -348
- package/src/orchestrator/orchestrator_metrics.ts +1 -20
- package/src/orchestrator/tx-proving-state.ts +47 -55
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +4 -11
- package/src/prover-client/server-epoch-prover.ts +30 -21
- package/src/proving_broker/broker_prover_facade.ts +175 -112
- package/src/proving_broker/config.ts +14 -7
- package/src/proving_broker/factory.ts +1 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proving_agent.ts +1 -17
- package/src/proving_broker/proving_broker.ts +42 -27
- package/src/proving_broker/proving_broker_database/memory.ts +2 -1
- package/src/proving_broker/proving_broker_database/persisted.ts +25 -10
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_job_controller.ts +42 -22
- package/src/test/mock_prover.ts +143 -66
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
- package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
- package/src/bin/get-proof-inputs.ts +0 -59
- package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
|
@@ -1,41 +1,48 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib/types';
|
|
2
2
|
import {
|
|
3
3
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
4
|
-
|
|
4
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
5
6
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
6
7
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
7
|
-
type TUBE_PROOF_LENGTH,
|
|
8
8
|
} from '@aztec/constants';
|
|
9
|
-
import {
|
|
9
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
10
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
11
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
10
12
|
import { AbortError } from '@aztec/foundation/error';
|
|
11
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
12
13
|
import { createLogger } from '@aztec/foundation/log';
|
|
13
14
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
14
15
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
15
16
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
16
17
|
import { elapsed } from '@aztec/foundation/timer';
|
|
17
18
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
18
|
-
import {
|
|
19
|
-
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
20
|
-
import { EthAddress, L2Block } from '@aztec/stdlib/block';
|
|
19
|
+
import { EthAddress } from '@aztec/stdlib/block';
|
|
21
20
|
import type {
|
|
22
21
|
EpochProver,
|
|
23
22
|
ForkMerkleTreeOperations,
|
|
24
23
|
MerkleTreeWriteOperations,
|
|
25
|
-
|
|
24
|
+
PublicInputsAndRecursiveProof,
|
|
25
|
+
ReadonlyWorldStateAccess,
|
|
26
26
|
ServerCircuitProver,
|
|
27
27
|
} from '@aztec/stdlib/interfaces/server';
|
|
28
|
-
import {
|
|
28
|
+
import type { Proof } from '@aztec/stdlib/proofs';
|
|
29
29
|
import {
|
|
30
30
|
type BaseRollupHints,
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
31
|
+
BlockRootEmptyTxFirstRollupPrivateInputs,
|
|
32
|
+
BlockRootFirstRollupPrivateInputs,
|
|
33
|
+
BlockRootSingleTxFirstRollupPrivateInputs,
|
|
34
|
+
BlockRootSingleTxRollupPrivateInputs,
|
|
35
|
+
CheckpointConstantData,
|
|
36
|
+
CheckpointRootSingleBlockRollupPrivateInputs,
|
|
37
|
+
PrivateTxBaseRollupPrivateInputs,
|
|
38
|
+
PublicChonkVerifierPrivateInputs,
|
|
39
|
+
PublicChonkVerifierPublicInputs,
|
|
40
|
+
RootRollupPublicInputs,
|
|
35
41
|
} from '@aztec/stdlib/rollup';
|
|
36
42
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
37
43
|
import { type AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
|
|
38
|
-
import {
|
|
44
|
+
import type { BlockHeader, ProcessedTx, Tx } from '@aztec/stdlib/tx';
|
|
45
|
+
import type { UInt64 } from '@aztec/stdlib/types';
|
|
39
46
|
import {
|
|
40
47
|
Attributes,
|
|
41
48
|
type TelemetryClient,
|
|
@@ -48,8 +55,9 @@ import {
|
|
|
48
55
|
import { inspect } from 'util';
|
|
49
56
|
|
|
50
57
|
import {
|
|
51
|
-
|
|
58
|
+
buildHeaderFromCircuitOutputs,
|
|
52
59
|
getLastSiblingPath,
|
|
60
|
+
getPublicChonkVerifierPrivateInputsFromTx,
|
|
53
61
|
getRootTreeSiblingPath,
|
|
54
62
|
getSubtreeSiblingPath,
|
|
55
63
|
getTreeSnapshot,
|
|
@@ -58,6 +66,7 @@ import {
|
|
|
58
66
|
validateTx,
|
|
59
67
|
} from './block-building-helpers.js';
|
|
60
68
|
import type { BlockProvingState } from './block-proving-state.js';
|
|
69
|
+
import type { CheckpointProvingState } from './checkpoint-proving-state.js';
|
|
61
70
|
import { EpochProvingState, type ProvingResult, type TreeSnapshots } from './epoch-proving-state.js';
|
|
62
71
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
63
72
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -84,10 +93,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
84
93
|
|
|
85
94
|
private provingPromise: Promise<ProvingResult> | undefined = undefined;
|
|
86
95
|
private metrics: ProvingOrchestratorMetrics;
|
|
87
|
-
|
|
96
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
97
|
+
private dbs: Map<BlockNumber, MerkleTreeWriteOperations> = new Map();
|
|
88
98
|
|
|
89
99
|
constructor(
|
|
90
|
-
private dbProvider: ForkMerkleTreeOperations,
|
|
100
|
+
private dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations,
|
|
91
101
|
private prover: ServerCircuitProver,
|
|
92
102
|
private readonly proverId: EthAddress,
|
|
93
103
|
telemetryClient: TelemetryClient = getTelemetryClient(),
|
|
@@ -109,81 +119,144 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
109
119
|
}
|
|
110
120
|
|
|
111
121
|
public startNewEpoch(
|
|
112
|
-
epochNumber:
|
|
113
|
-
|
|
114
|
-
totalNumBlocks: number,
|
|
122
|
+
epochNumber: EpochNumber,
|
|
123
|
+
totalNumCheckpoints: number,
|
|
115
124
|
finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
116
125
|
) {
|
|
126
|
+
if (this.provingState?.verifyState()) {
|
|
127
|
+
throw new Error(
|
|
128
|
+
`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`,
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
|
|
117
132
|
const { promise: _promise, resolve, reject } = promiseWithResolvers<ProvingResult>();
|
|
118
133
|
const promise = _promise.catch((reason): ProvingResult => ({ status: 'failure', reason }));
|
|
119
|
-
|
|
120
|
-
throw new Error(`Invalid number of blocks for epoch (got ${totalNumBlocks})`);
|
|
121
|
-
}
|
|
122
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
134
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
123
135
|
this.provingState = new EpochProvingState(
|
|
124
136
|
epochNumber,
|
|
125
|
-
|
|
126
|
-
totalNumBlocks,
|
|
137
|
+
totalNumCheckpoints,
|
|
127
138
|
finalBlobBatchingChallenges,
|
|
139
|
+
provingState => this.checkAndEnqueueCheckpointRootRollup(provingState),
|
|
128
140
|
resolve,
|
|
129
141
|
reject,
|
|
130
142
|
);
|
|
131
143
|
this.provingPromise = promise;
|
|
132
144
|
}
|
|
133
145
|
|
|
146
|
+
public async startNewCheckpoint(
|
|
147
|
+
checkpointIndex: number,
|
|
148
|
+
constants: CheckpointConstantData,
|
|
149
|
+
l1ToL2Messages: Fr[],
|
|
150
|
+
totalNumBlocks: number,
|
|
151
|
+
headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
152
|
+
) {
|
|
153
|
+
if (!this.provingState) {
|
|
154
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
158
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Fork world state at the end of the immediately previous block.
|
|
162
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
163
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
164
|
+
|
|
165
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
166
|
+
this.dbs.set(firstBlockNumber, db);
|
|
167
|
+
|
|
168
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
169
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
170
|
+
|
|
171
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
172
|
+
const {
|
|
173
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
174
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
175
|
+
newL1ToL2MessageTreeSnapshot,
|
|
176
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
177
|
+
} = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
178
|
+
|
|
179
|
+
this.provingState.startNewCheckpoint(
|
|
180
|
+
checkpointIndex,
|
|
181
|
+
constants,
|
|
182
|
+
totalNumBlocks,
|
|
183
|
+
headerOfLastBlockInPreviousCheckpoint,
|
|
184
|
+
lastArchiveSiblingPath,
|
|
185
|
+
l1ToL2Messages,
|
|
186
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
187
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
188
|
+
newL1ToL2MessageTreeSnapshot,
|
|
189
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
190
|
+
);
|
|
191
|
+
}
|
|
192
|
+
|
|
134
193
|
/**
|
|
135
194
|
* Starts off a new block
|
|
136
|
-
* @param
|
|
137
|
-
* @param
|
|
138
|
-
*
|
|
195
|
+
* @param blockNumber - The block number
|
|
196
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
197
|
+
* block that doesn't have any txs.
|
|
198
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
139
199
|
*/
|
|
140
|
-
@trackSpan('ProvingOrchestrator.startNewBlock',
|
|
141
|
-
[Attributes.BLOCK_NUMBER]:
|
|
200
|
+
@trackSpan('ProvingOrchestrator.startNewBlock', blockNumber => ({
|
|
201
|
+
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
142
202
|
}))
|
|
143
|
-
public async startNewBlock(
|
|
203
|
+
public async startNewBlock(blockNumber: BlockNumber, timestamp: UInt64, totalNumTxs: number) {
|
|
144
204
|
if (!this.provingState) {
|
|
145
|
-
throw new Error(
|
|
205
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
146
206
|
}
|
|
147
207
|
|
|
148
|
-
|
|
149
|
-
|
|
208
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
209
|
+
if (!checkpointProvingState) {
|
|
210
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
150
211
|
}
|
|
151
212
|
|
|
152
|
-
|
|
213
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
214
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
215
|
+
}
|
|
153
216
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
this.dbs.set(globalVariables.blockNumber, db);
|
|
217
|
+
const constants = checkpointProvingState.constants;
|
|
218
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
157
219
|
|
|
158
|
-
//
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
// Get archive snapshot before this block lands
|
|
167
|
-
const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
168
|
-
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
169
|
-
const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
220
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
221
|
+
if (!this.dbs.has(blockNumber)) {
|
|
222
|
+
// Fork world state at the end of the immediately previous block
|
|
223
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
224
|
+
this.dbs.set(blockNumber, db);
|
|
225
|
+
}
|
|
226
|
+
const db = this.dbs.get(blockNumber)!;
|
|
170
227
|
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
228
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
229
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
230
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
231
|
+
|
|
232
|
+
const blockProvingState = checkpointProvingState.startNewBlock(
|
|
233
|
+
blockNumber,
|
|
234
|
+
timestamp,
|
|
235
|
+
totalNumTxs,
|
|
236
|
+
lastArchiveTreeSnapshot,
|
|
178
237
|
lastArchiveSiblingPath,
|
|
179
|
-
newArchiveSiblingPath,
|
|
180
|
-
previousBlockHeader,
|
|
181
|
-
this.proverId,
|
|
182
238
|
);
|
|
183
239
|
|
|
184
|
-
// Enqueue base parity circuits for the block
|
|
185
|
-
|
|
186
|
-
|
|
240
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
241
|
+
if (blockProvingState.index === 0) {
|
|
242
|
+
for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) {
|
|
243
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
248
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
249
|
+
if (totalNumTxs === 0) {
|
|
250
|
+
const endState = await db.getStateReference();
|
|
251
|
+
blockProvingState.setEndState(endState);
|
|
252
|
+
|
|
253
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
254
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
255
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
256
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
257
|
+
|
|
258
|
+
// And also try to accumulate the blobs as far as we can:
|
|
259
|
+
await this.provingState.setBlobAccumulators();
|
|
187
260
|
}
|
|
188
261
|
}
|
|
189
262
|
|
|
@@ -195,28 +268,40 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
195
268
|
[Attributes.BLOCK_TXS_COUNT]: txs.length,
|
|
196
269
|
}))
|
|
197
270
|
public async addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
271
|
+
if (!this.provingState) {
|
|
272
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
273
|
+
}
|
|
274
|
+
|
|
198
275
|
if (!txs.length) {
|
|
199
276
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
200
277
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
201
278
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
202
279
|
return;
|
|
203
280
|
}
|
|
204
|
-
|
|
205
|
-
const
|
|
281
|
+
|
|
282
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
283
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
206
284
|
if (!provingState) {
|
|
207
|
-
throw new Error(`
|
|
285
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
208
286
|
}
|
|
209
287
|
|
|
210
|
-
if (provingState.totalNumTxs) {
|
|
288
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
289
|
+
throw new Error(
|
|
290
|
+
`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`,
|
|
291
|
+
);
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
if (!provingState.isAcceptingTxs()) {
|
|
211
295
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
212
296
|
}
|
|
213
297
|
|
|
214
|
-
|
|
215
|
-
|
|
298
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
299
|
+
|
|
300
|
+
const db = this.dbs.get(blockNumber)!;
|
|
301
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
302
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
303
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
216
304
|
|
|
217
|
-
logger.info(
|
|
218
|
-
`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState.blockNumber}`,
|
|
219
|
-
);
|
|
220
305
|
for (const tx of txs) {
|
|
221
306
|
try {
|
|
222
307
|
if (!provingState.verifyState()) {
|
|
@@ -225,15 +310,32 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
225
310
|
|
|
226
311
|
validateTx(tx);
|
|
227
312
|
|
|
228
|
-
logger.
|
|
313
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
314
|
+
|
|
315
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
316
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
317
|
+
tx,
|
|
318
|
+
lastArchive,
|
|
319
|
+
newL1ToL2MessageTreeSnapshot,
|
|
320
|
+
startSpongeBlob,
|
|
321
|
+
db,
|
|
322
|
+
);
|
|
229
323
|
|
|
230
|
-
|
|
231
|
-
|
|
324
|
+
if (!provingState.verifyState()) {
|
|
325
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
329
|
+
|
|
330
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
232
331
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
233
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
234
332
|
if (txProvingState.requireAvmProof) {
|
|
333
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
235
334
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
236
335
|
this.enqueueVM(provingState, txIndex);
|
|
336
|
+
} else {
|
|
337
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
338
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
237
339
|
}
|
|
238
340
|
} catch (err: any) {
|
|
239
341
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -241,24 +343,44 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
241
343
|
});
|
|
242
344
|
}
|
|
243
345
|
}
|
|
346
|
+
|
|
347
|
+
const endState = await db.getStateReference();
|
|
348
|
+
provingState.setEndState(endState);
|
|
349
|
+
|
|
350
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
351
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
352
|
+
|
|
353
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
354
|
+
|
|
355
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
356
|
+
await this.provingState.setBlobAccumulators();
|
|
244
357
|
}
|
|
245
358
|
|
|
246
359
|
/**
|
|
247
|
-
* Kickstarts
|
|
248
|
-
* Note that if the
|
|
360
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
361
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
249
362
|
*/
|
|
250
|
-
@trackSpan('ProvingOrchestrator.
|
|
251
|
-
public
|
|
363
|
+
@trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
364
|
+
public startChonkVerifierCircuits(txs: Tx[]) {
|
|
252
365
|
if (!this.provingState?.verifyState()) {
|
|
253
|
-
throw new Error(`
|
|
366
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
254
367
|
}
|
|
255
|
-
|
|
368
|
+
const publicTxs = txs.filter(tx => tx.data.forPublic);
|
|
369
|
+
for (const tx of publicTxs) {
|
|
256
370
|
const txHash = tx.getTxHash().toString();
|
|
257
|
-
const
|
|
258
|
-
const tubeProof =
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
371
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
372
|
+
const tubeProof =
|
|
373
|
+
promiseWithResolvers<
|
|
374
|
+
PublicInputsAndRecursiveProof<
|
|
375
|
+
PublicChonkVerifierPublicInputs,
|
|
376
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
377
|
+
>
|
|
378
|
+
>();
|
|
379
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
380
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, proof => {
|
|
381
|
+
tubeProof.resolve(proof);
|
|
382
|
+
});
|
|
383
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
262
384
|
}
|
|
263
385
|
return Promise.resolve();
|
|
264
386
|
}
|
|
@@ -267,92 +389,109 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
267
389
|
* Marks the block as completed.
|
|
268
390
|
* Computes the block header and updates the archive tree.
|
|
269
391
|
*/
|
|
270
|
-
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber:
|
|
392
|
+
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: BlockNumber) => ({
|
|
271
393
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
272
394
|
}))
|
|
273
|
-
public async setBlockCompleted(blockNumber:
|
|
395
|
+
public async setBlockCompleted(blockNumber: BlockNumber, expectedHeader?: BlockHeader): Promise<BlockHeader> {
|
|
274
396
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
275
397
|
if (!provingState) {
|
|
276
398
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
277
399
|
}
|
|
278
400
|
|
|
279
|
-
if
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
401
|
+
// Abort with specific error for the block if there's one.
|
|
402
|
+
const error = provingState.getError();
|
|
403
|
+
if (error) {
|
|
404
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
283
405
|
}
|
|
284
406
|
|
|
407
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
285
408
|
if (!provingState.verifyState()) {
|
|
286
|
-
throw new Error(`
|
|
409
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
287
410
|
}
|
|
288
411
|
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
logger.debug(`Accumulating blobs for ${blockNumber}`);
|
|
294
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
295
|
-
|
|
296
|
-
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
297
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
298
|
-
return provingState.block!;
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
/** Returns the block as built for a given index. */
|
|
302
|
-
public getBlock(index: number): L2Block {
|
|
303
|
-
const block = this.provingState?.blocks[index]?.block;
|
|
304
|
-
if (!block) {
|
|
305
|
-
throw new Error(`Block at index ${index} not available`);
|
|
412
|
+
if (provingState.isAcceptingTxs()) {
|
|
413
|
+
throw new Error(
|
|
414
|
+
`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`,
|
|
415
|
+
);
|
|
306
416
|
}
|
|
307
|
-
return block;
|
|
308
|
-
}
|
|
309
|
-
|
|
310
|
-
private async buildBlock(provingState: BlockProvingState, expectedHeader?: BlockHeader) {
|
|
311
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
312
|
-
const txs = provingState.allTxs.map(a => a.processedTx);
|
|
313
|
-
|
|
314
|
-
// Get db for this block
|
|
315
|
-
const db = this.dbs.get(provingState.blockNumber)!;
|
|
316
417
|
|
|
317
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
318
|
-
|
|
319
|
-
const
|
|
320
|
-
txs,
|
|
321
|
-
provingState.globalVariables,
|
|
322
|
-
provingState.newL1ToL2Messages,
|
|
323
|
-
db,
|
|
324
|
-
);
|
|
418
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
419
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
420
|
+
const header = await provingState.buildBlockHeader();
|
|
325
421
|
|
|
326
422
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
327
423
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
328
424
|
throw new Error('Block header mismatch');
|
|
329
425
|
}
|
|
330
426
|
|
|
427
|
+
// Get db for this block
|
|
428
|
+
const db = this.dbs.get(provingState.blockNumber)!;
|
|
429
|
+
|
|
430
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
331
431
|
logger.verbose(
|
|
332
432
|
`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`,
|
|
333
433
|
);
|
|
334
434
|
await db.updateArchive(header);
|
|
335
435
|
|
|
336
|
-
|
|
337
|
-
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
338
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
339
|
-
|
|
340
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
436
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
341
437
|
|
|
342
|
-
|
|
343
|
-
provingState.setBlock(l2Block);
|
|
438
|
+
return header;
|
|
344
439
|
}
|
|
345
440
|
|
|
346
441
|
// Flagged as protected to disable in certain unit tests
|
|
347
|
-
protected async verifyBuiltBlockAgainstSyncedState(
|
|
348
|
-
const
|
|
442
|
+
protected async verifyBuiltBlockAgainstSyncedState(provingState: BlockProvingState) {
|
|
443
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
444
|
+
if (!builtBlockHeader) {
|
|
445
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
446
|
+
return;
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
450
|
+
if (!output) {
|
|
451
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
452
|
+
return;
|
|
453
|
+
}
|
|
454
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
455
|
+
|
|
456
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
457
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
458
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
459
|
+
return;
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
// Get db for this block
|
|
463
|
+
const blockNumber = provingState.blockNumber;
|
|
464
|
+
const db = this.dbs.get(blockNumber)!;
|
|
465
|
+
|
|
466
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
467
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
349
468
|
if (!syncedArchive.equals(newArchive)) {
|
|
350
|
-
|
|
351
|
-
`Archive tree mismatch for block ${
|
|
469
|
+
logger.error(
|
|
470
|
+
`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(
|
|
352
471
|
syncedArchive,
|
|
353
472
|
)} but built ${inspect(newArchive)}`,
|
|
354
473
|
);
|
|
474
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
475
|
+
return;
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
const circuitArchive = output.newArchive;
|
|
479
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
480
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
481
|
+
provingState.reject(`New archive mismatch.`);
|
|
482
|
+
return;
|
|
355
483
|
}
|
|
484
|
+
|
|
485
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
486
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
487
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
488
|
+
// will attempt to access the fork after it was closed.
|
|
489
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
490
|
+
void this.dbs
|
|
491
|
+
.get(blockNumber)
|
|
492
|
+
?.close()
|
|
493
|
+
.then(() => this.dbs.delete(blockNumber))
|
|
494
|
+
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
356
495
|
}
|
|
357
496
|
|
|
358
497
|
/**
|
|
@@ -369,7 +508,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
369
508
|
/**
|
|
370
509
|
* Returns the proof for the current epoch.
|
|
371
510
|
*/
|
|
372
|
-
public async finalizeEpoch() {
|
|
511
|
+
public async finalizeEpoch(): Promise<{
|
|
512
|
+
publicInputs: RootRollupPublicInputs;
|
|
513
|
+
proof: Proof;
|
|
514
|
+
batchedBlobInputs: BatchedBlob;
|
|
515
|
+
}> {
|
|
373
516
|
if (!this.provingState || !this.provingPromise) {
|
|
374
517
|
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
375
518
|
}
|
|
@@ -379,14 +522,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
379
522
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
380
523
|
}
|
|
381
524
|
|
|
382
|
-
|
|
383
|
-
// TODO(MW): EpochProvingState uses this.blocks.filter(b => !!b).length as total blocks, use this below:
|
|
384
|
-
const finalBlock = this.provingState.blocks[this.provingState.totalNumBlocks - 1];
|
|
385
|
-
if (!finalBlock || !finalBlock.endBlobAccumulator) {
|
|
386
|
-
throw new Error(`Epoch's final block not ready for finalize`);
|
|
387
|
-
}
|
|
388
|
-
const finalBatchedBlob = await finalBlock.endBlobAccumulator.finalize();
|
|
389
|
-
this.provingState.setFinalBatchedBlob(finalBatchedBlob);
|
|
525
|
+
await this.provingState.finalizeBatchedBlob();
|
|
390
526
|
|
|
391
527
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
392
528
|
|
|
@@ -398,20 +534,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
398
534
|
return epochProofResult;
|
|
399
535
|
}
|
|
400
536
|
|
|
401
|
-
/**
|
|
402
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
403
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
404
|
-
* @param provingState - The proving state being worked on
|
|
405
|
-
*/
|
|
406
|
-
private async prepareTransaction(tx: ProcessedTx, provingState: BlockProvingState) {
|
|
407
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
408
|
-
if (!txInputs) {
|
|
409
|
-
// This should not be possible
|
|
410
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
411
|
-
}
|
|
412
|
-
return txInputs;
|
|
413
|
-
}
|
|
414
|
-
|
|
415
537
|
/**
|
|
416
538
|
* Enqueue a job to be scheduled
|
|
417
539
|
* @param provingState - The proving state object being operated on
|
|
@@ -419,11 +541,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
419
541
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
420
542
|
*/
|
|
421
543
|
private deferredProving<T>(
|
|
422
|
-
provingState: EpochProvingState |
|
|
544
|
+
provingState: EpochProvingState | CheckpointProvingState | BlockProvingState,
|
|
423
545
|
request: (signal: AbortSignal) => Promise<T>,
|
|
424
546
|
callback: (result: T) => void | Promise<void>,
|
|
425
547
|
) {
|
|
426
|
-
if (!provingState
|
|
548
|
+
if (!provingState.verifyState()) {
|
|
427
549
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
428
550
|
return;
|
|
429
551
|
}
|
|
@@ -441,7 +563,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
441
563
|
}
|
|
442
564
|
|
|
443
565
|
const result = await request(controller.signal);
|
|
444
|
-
if (!provingState
|
|
566
|
+
if (!provingState.verifyState()) {
|
|
445
567
|
logger.debug(`State no longer valid, discarding result`);
|
|
446
568
|
return;
|
|
447
569
|
}
|
|
@@ -474,60 +596,58 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
474
596
|
setImmediate(() => void safeJob());
|
|
475
597
|
}
|
|
476
598
|
|
|
477
|
-
private async
|
|
478
|
-
const l1ToL2MessagesPadded = padArrayEnd(
|
|
599
|
+
private async updateL1ToL2MessageTree(l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations) {
|
|
600
|
+
const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
|
|
479
601
|
l1ToL2Messages,
|
|
480
602
|
Fr.ZERO,
|
|
481
603
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
482
604
|
'Too many L1 to L2 messages',
|
|
483
605
|
);
|
|
484
|
-
const baseParityInputs = times(NUM_BASE_PARITY_PER_ROOT_PARITY, i =>
|
|
485
|
-
BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()),
|
|
486
|
-
);
|
|
487
606
|
|
|
488
|
-
const
|
|
489
|
-
|
|
490
|
-
const l1ToL2MessageSubtreeSiblingPath = assertLength(
|
|
607
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
608
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(
|
|
491
609
|
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
492
|
-
|
|
610
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
493
611
|
);
|
|
494
612
|
|
|
495
613
|
// Update the local trees to include the new l1 to l2 messages
|
|
496
614
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
497
|
-
|
|
615
|
+
|
|
616
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
617
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(
|
|
618
|
+
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
619
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
620
|
+
);
|
|
498
621
|
|
|
499
622
|
return {
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
623
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
624
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
625
|
+
newL1ToL2MessageTreeSnapshot,
|
|
626
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
504
627
|
};
|
|
505
628
|
}
|
|
506
629
|
|
|
507
630
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
508
|
-
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs',
|
|
631
|
+
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', tx => ({
|
|
509
632
|
[Attributes.TX_HASH]: tx.hash.toString(),
|
|
510
633
|
}))
|
|
511
634
|
private async prepareBaseRollupInputs(
|
|
512
|
-
provingState: BlockProvingState,
|
|
513
635
|
tx: ProcessedTx,
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
const db = this.dbs.get(provingState.blockNumber)!;
|
|
521
|
-
|
|
636
|
+
lastArchive: AppendOnlyTreeSnapshot,
|
|
637
|
+
newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
638
|
+
startSpongeBlob: SpongeBlob,
|
|
639
|
+
db: MerkleTreeWriteOperations,
|
|
640
|
+
): Promise<[BaseRollupHints, TreeSnapshots]> {
|
|
522
641
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
523
|
-
// These will be overwritten later once we have proven the
|
|
642
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
524
643
|
const [ms, hints] = await elapsed(
|
|
525
644
|
insertSideEffectsAndBuildBaseRollupHints(
|
|
526
645
|
tx,
|
|
527
|
-
|
|
528
|
-
|
|
646
|
+
lastArchive,
|
|
647
|
+
newL1ToL2MessageTreeSnapshot,
|
|
648
|
+
startSpongeBlob,
|
|
649
|
+
this.proverId.toField(),
|
|
529
650
|
db,
|
|
530
|
-
provingState.spongeBlobState,
|
|
531
651
|
),
|
|
532
652
|
);
|
|
533
653
|
|
|
@@ -540,10 +660,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
540
660
|
);
|
|
541
661
|
const treeSnapshots: TreeSnapshots = new Map((await Promise.all(promises)).map(obj => [obj.key, obj.value]));
|
|
542
662
|
|
|
543
|
-
if (!provingState.verifyState()) {
|
|
544
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
545
|
-
return;
|
|
546
|
-
}
|
|
547
663
|
return [hints, treeSnapshots];
|
|
548
664
|
}
|
|
549
665
|
|
|
@@ -555,6 +671,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
555
671
|
return;
|
|
556
672
|
}
|
|
557
673
|
|
|
674
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
675
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
676
|
+
return;
|
|
677
|
+
}
|
|
678
|
+
|
|
558
679
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
559
680
|
const { processedTx } = txProvingState;
|
|
560
681
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
@@ -566,69 +687,81 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
566
687
|
wrapCallbackInSpan(
|
|
567
688
|
this.tracer,
|
|
568
689
|
`ProvingOrchestrator.prover.${
|
|
569
|
-
inputs instanceof
|
|
690
|
+
inputs instanceof PrivateTxBaseRollupPrivateInputs
|
|
691
|
+
? 'getPrivateTxBaseRollupProof'
|
|
692
|
+
: 'getPublicTxBaseRollupProof'
|
|
570
693
|
}`,
|
|
571
694
|
{
|
|
572
695
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
573
696
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
574
697
|
},
|
|
575
698
|
signal => {
|
|
576
|
-
if (inputs instanceof
|
|
577
|
-
return this.prover.
|
|
699
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
700
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
578
701
|
} else {
|
|
579
|
-
return this.prover.
|
|
702
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
580
703
|
}
|
|
581
704
|
},
|
|
582
705
|
),
|
|
583
|
-
|
|
706
|
+
result => {
|
|
584
707
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
585
|
-
validatePartialState(result.inputs.
|
|
708
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
586
709
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
587
710
|
if (provingState.totalNumTxs === 1) {
|
|
588
|
-
|
|
711
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
589
712
|
} else {
|
|
590
|
-
|
|
713
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
591
714
|
}
|
|
592
715
|
},
|
|
593
716
|
);
|
|
594
717
|
}
|
|
595
718
|
|
|
596
|
-
// Enqueues the
|
|
597
|
-
// Once completed, will enqueue the
|
|
598
|
-
private
|
|
719
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
720
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
721
|
+
private getOrEnqueueChonkVerifier(provingState: BlockProvingState, txIndex: number) {
|
|
599
722
|
if (!provingState.verifyState()) {
|
|
600
|
-
logger.debug('Not running
|
|
723
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
601
724
|
return;
|
|
602
725
|
}
|
|
603
726
|
|
|
604
727
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
605
728
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
606
|
-
|
|
607
|
-
const handleResult = (
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
729
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
730
|
+
const handleResult = (
|
|
731
|
+
result: PublicInputsAndRecursiveProof<
|
|
732
|
+
PublicChonkVerifierPublicInputs,
|
|
733
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
734
|
+
>,
|
|
735
|
+
) => {
|
|
736
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, { txHash });
|
|
737
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
738
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
739
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
612
740
|
};
|
|
613
741
|
|
|
614
|
-
if (this.provingState?.
|
|
615
|
-
logger.debug(`
|
|
616
|
-
void this.provingState!.
|
|
742
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
743
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, { txHash });
|
|
744
|
+
void this.provingState!.cachedChonkVerifierProofs.get(txHash)!.then(handleResult);
|
|
617
745
|
return;
|
|
618
746
|
}
|
|
619
747
|
|
|
620
|
-
logger.debug(`Enqueuing
|
|
621
|
-
this.
|
|
748
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
749
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
622
750
|
}
|
|
623
751
|
|
|
624
|
-
private
|
|
752
|
+
private doEnqueueChonkVerifier(
|
|
625
753
|
txHash: string,
|
|
626
|
-
inputs:
|
|
627
|
-
handler: (
|
|
754
|
+
inputs: PublicChonkVerifierPrivateInputs,
|
|
755
|
+
handler: (
|
|
756
|
+
result: PublicInputsAndRecursiveProof<
|
|
757
|
+
PublicChonkVerifierPublicInputs,
|
|
758
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
759
|
+
>,
|
|
760
|
+
) => void,
|
|
628
761
|
provingState: EpochProvingState | BlockProvingState = this.provingState!,
|
|
629
762
|
) {
|
|
630
|
-
if (!provingState
|
|
631
|
-
logger.debug('Not running
|
|
763
|
+
if (!provingState.verifyState()) {
|
|
764
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
632
765
|
return;
|
|
633
766
|
}
|
|
634
767
|
|
|
@@ -636,12 +769,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
636
769
|
provingState,
|
|
637
770
|
wrapCallbackInSpan(
|
|
638
771
|
this.tracer,
|
|
639
|
-
'ProvingOrchestrator.prover.
|
|
772
|
+
'ProvingOrchestrator.prover.getPublicChonkVerifierProof',
|
|
640
773
|
{
|
|
641
774
|
[Attributes.TX_HASH]: txHash,
|
|
642
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
775
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public' satisfies CircuitName,
|
|
643
776
|
},
|
|
644
|
-
signal => this.prover.
|
|
777
|
+
signal => this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber),
|
|
645
778
|
),
|
|
646
779
|
handler,
|
|
647
780
|
);
|
|
@@ -655,39 +788,45 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
655
788
|
return;
|
|
656
789
|
}
|
|
657
790
|
|
|
791
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
792
|
+
logger.debug('Merge rollup already started.');
|
|
793
|
+
return;
|
|
794
|
+
}
|
|
795
|
+
|
|
658
796
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
659
797
|
|
|
660
798
|
this.deferredProving(
|
|
661
799
|
provingState,
|
|
662
800
|
wrapCallbackInSpan(
|
|
663
801
|
this.tracer,
|
|
664
|
-
'ProvingOrchestrator.prover.
|
|
802
|
+
'ProvingOrchestrator.prover.getTxMergeRollupProof',
|
|
665
803
|
{
|
|
666
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'merge
|
|
804
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge' satisfies CircuitName,
|
|
667
805
|
},
|
|
668
|
-
signal => this.prover.
|
|
806
|
+
signal => this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
669
807
|
),
|
|
670
|
-
|
|
808
|
+
result => {
|
|
671
809
|
provingState.setMergeRollupProof(location, result);
|
|
672
|
-
|
|
810
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
673
811
|
},
|
|
674
812
|
);
|
|
675
813
|
}
|
|
676
814
|
|
|
677
815
|
// Executes the block root rollup circuit
|
|
678
|
-
private
|
|
816
|
+
private enqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
679
817
|
if (!provingState.verifyState()) {
|
|
680
818
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
681
819
|
return;
|
|
682
820
|
}
|
|
683
821
|
|
|
684
|
-
provingState.
|
|
822
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
823
|
+
logger.debug('Block root rollup already started.');
|
|
824
|
+
return;
|
|
825
|
+
}
|
|
685
826
|
|
|
686
|
-
const { rollupType, inputs } =
|
|
827
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
687
828
|
|
|
688
|
-
logger.debug(
|
|
689
|
-
`Enqueuing ${rollupType} for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs.`,
|
|
690
|
-
);
|
|
829
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
691
830
|
|
|
692
831
|
this.deferredProving(
|
|
693
832
|
provingState,
|
|
@@ -698,56 +837,32 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
698
837
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
699
838
|
},
|
|
700
839
|
signal => {
|
|
701
|
-
if (inputs instanceof
|
|
702
|
-
return this.prover.
|
|
703
|
-
} else if (inputs instanceof
|
|
704
|
-
return this.prover.
|
|
840
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
841
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
842
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
843
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
844
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
845
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
846
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
847
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
705
848
|
} else {
|
|
706
849
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
707
850
|
}
|
|
708
851
|
},
|
|
709
852
|
),
|
|
710
853
|
async result => {
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
if (!(await header.hash()).equals(await provingState.block!.header.hash())) {
|
|
714
|
-
logger.error(
|
|
715
|
-
`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(provingState.block!.header)}`,
|
|
716
|
-
);
|
|
717
|
-
provingState.reject(`Block header hash mismatch.`);
|
|
718
|
-
}
|
|
719
|
-
|
|
720
|
-
const dbArchiveRoot = provingState.block!.archive.root;
|
|
721
|
-
const circuitArchiveRoot = result.inputs.newArchive.root;
|
|
722
|
-
if (!dbArchiveRoot.equals(circuitArchiveRoot)) {
|
|
723
|
-
logger.error(
|
|
724
|
-
`New archive root mismatch.\nCircuit: ${result.inputs.newArchive.root}\nComputed: ${dbArchiveRoot}`,
|
|
725
|
-
);
|
|
726
|
-
provingState.reject(`New archive root mismatch.`);
|
|
727
|
-
}
|
|
854
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
855
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
728
856
|
|
|
729
|
-
|
|
730
|
-
provingState.endBlobAccumulator!,
|
|
731
|
-
);
|
|
732
|
-
const circuitEndBlobAccumulatorState = result.inputs.blobPublicInputs.endBlobAccumulator;
|
|
733
|
-
if (!circuitEndBlobAccumulatorState.equals(endBlobAccumulatorPublicInputs)) {
|
|
734
|
-
logger.error(
|
|
735
|
-
`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(
|
|
736
|
-
endBlobAccumulatorPublicInputs,
|
|
737
|
-
)}`,
|
|
738
|
-
);
|
|
739
|
-
provingState.reject(`Blob accumulator state mismatch.`);
|
|
740
|
-
}
|
|
857
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
741
858
|
|
|
742
|
-
|
|
743
|
-
|
|
859
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
860
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
744
861
|
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
748
|
-
this.enqueueEpochPadding(epochProvingState);
|
|
862
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
863
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
749
864
|
} else {
|
|
750
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
865
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
751
866
|
}
|
|
752
867
|
},
|
|
753
868
|
);
|
|
@@ -755,24 +870,35 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
755
870
|
|
|
756
871
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
757
872
|
// Enqueues the root parity circuit if all inputs are available
|
|
758
|
-
private enqueueBaseParityCircuit(
|
|
873
|
+
private enqueueBaseParityCircuit(
|
|
874
|
+
checkpointProvingState: CheckpointProvingState,
|
|
875
|
+
provingState: BlockProvingState,
|
|
876
|
+
baseParityIndex: number,
|
|
877
|
+
) {
|
|
759
878
|
if (!provingState.verifyState()) {
|
|
760
879
|
logger.debug('Not running base parity. State no longer valid.');
|
|
761
880
|
return;
|
|
762
881
|
}
|
|
763
882
|
|
|
883
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
884
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
885
|
+
return;
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
889
|
+
|
|
764
890
|
this.deferredProving(
|
|
765
891
|
provingState,
|
|
766
892
|
wrapCallbackInSpan(
|
|
767
893
|
this.tracer,
|
|
768
894
|
'ProvingOrchestrator.prover.getBaseParityProof',
|
|
769
895
|
{
|
|
770
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base
|
|
896
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base' satisfies CircuitName,
|
|
771
897
|
},
|
|
772
898
|
signal => this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber),
|
|
773
899
|
),
|
|
774
900
|
provingOutput => {
|
|
775
|
-
provingState.setBaseParityProof(
|
|
901
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
776
902
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
777
903
|
},
|
|
778
904
|
);
|
|
@@ -794,7 +920,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
794
920
|
return;
|
|
795
921
|
}
|
|
796
922
|
|
|
797
|
-
|
|
923
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
924
|
+
logger.debug('Root parity already started.');
|
|
925
|
+
return;
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
const inputs = provingState.getParityRootInputs();
|
|
798
929
|
|
|
799
930
|
this.deferredProving(
|
|
800
931
|
provingState,
|
|
@@ -802,25 +933,30 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
802
933
|
this.tracer,
|
|
803
934
|
'ProvingOrchestrator.prover.getRootParityProof',
|
|
804
935
|
{
|
|
805
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root
|
|
936
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root' satisfies CircuitName,
|
|
806
937
|
},
|
|
807
938
|
signal => this.prover.getRootParityProof(inputs, signal, provingState.epochNumber),
|
|
808
939
|
),
|
|
809
|
-
|
|
940
|
+
result => {
|
|
810
941
|
provingState.setRootParityProof(result);
|
|
811
|
-
|
|
942
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
812
943
|
},
|
|
813
944
|
);
|
|
814
945
|
}
|
|
815
946
|
|
|
816
947
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
817
948
|
// Enqueues the next level of merge if all inputs are available
|
|
818
|
-
private enqueueBlockMergeRollup(provingState:
|
|
949
|
+
private enqueueBlockMergeRollup(provingState: CheckpointProvingState, location: TreeNodeLocation) {
|
|
819
950
|
if (!provingState.verifyState()) {
|
|
820
951
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
821
952
|
return;
|
|
822
953
|
}
|
|
823
954
|
|
|
955
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
956
|
+
logger.debug('Block merge rollup already started.');
|
|
957
|
+
return;
|
|
958
|
+
}
|
|
959
|
+
|
|
824
960
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
825
961
|
this.deferredProving(
|
|
826
962
|
provingState,
|
|
@@ -828,7 +964,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
828
964
|
this.tracer,
|
|
829
965
|
'ProvingOrchestrator.prover.getBlockMergeRollupProof',
|
|
830
966
|
{
|
|
831
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge
|
|
967
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge' satisfies CircuitName,
|
|
832
968
|
},
|
|
833
969
|
signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
834
970
|
),
|
|
@@ -839,29 +975,125 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
839
975
|
);
|
|
840
976
|
}
|
|
841
977
|
|
|
978
|
+
private enqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
979
|
+
if (!provingState.verifyState()) {
|
|
980
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
981
|
+
return;
|
|
982
|
+
}
|
|
983
|
+
|
|
984
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
985
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
986
|
+
return;
|
|
987
|
+
}
|
|
988
|
+
|
|
989
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
990
|
+
|
|
991
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
992
|
+
|
|
993
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
994
|
+
|
|
995
|
+
this.deferredProving(
|
|
996
|
+
provingState,
|
|
997
|
+
wrapCallbackInSpan(
|
|
998
|
+
this.tracer,
|
|
999
|
+
'ProvingOrchestrator.prover.getCheckpointRootRollupProof',
|
|
1000
|
+
{
|
|
1001
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
1002
|
+
},
|
|
1003
|
+
signal => {
|
|
1004
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
1005
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
1006
|
+
} else {
|
|
1007
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
1008
|
+
}
|
|
1009
|
+
},
|
|
1010
|
+
),
|
|
1011
|
+
result => {
|
|
1012
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator()!.toBlobAccumulator();
|
|
1013
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
1014
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
1015
|
+
logger.error(
|
|
1016
|
+
`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(
|
|
1017
|
+
computedEndBlobAccumulatorState,
|
|
1018
|
+
)}`,
|
|
1019
|
+
);
|
|
1020
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
1021
|
+
return;
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
1025
|
+
|
|
1026
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
1027
|
+
const epochProvingState = provingState.parentEpoch;
|
|
1028
|
+
|
|
1029
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
1030
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
1031
|
+
} else {
|
|
1032
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
1033
|
+
}
|
|
1034
|
+
},
|
|
1035
|
+
);
|
|
1036
|
+
}
|
|
1037
|
+
|
|
1038
|
+
private enqueueCheckpointMergeRollup(provingState: EpochProvingState, location: TreeNodeLocation) {
|
|
1039
|
+
if (!provingState.verifyState()) {
|
|
1040
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
1041
|
+
return;
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
1045
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
1046
|
+
return;
|
|
1047
|
+
}
|
|
1048
|
+
|
|
1049
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
1050
|
+
|
|
1051
|
+
this.deferredProving(
|
|
1052
|
+
provingState,
|
|
1053
|
+
wrapCallbackInSpan(
|
|
1054
|
+
this.tracer,
|
|
1055
|
+
'ProvingOrchestrator.prover.getCheckpointMergeRollupProof',
|
|
1056
|
+
{
|
|
1057
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge' satisfies CircuitName,
|
|
1058
|
+
},
|
|
1059
|
+
signal => this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
1060
|
+
),
|
|
1061
|
+
result => {
|
|
1062
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
1063
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
1064
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
1065
|
+
},
|
|
1066
|
+
);
|
|
1067
|
+
}
|
|
1068
|
+
|
|
842
1069
|
private enqueueEpochPadding(provingState: EpochProvingState) {
|
|
843
1070
|
if (!provingState.verifyState()) {
|
|
844
1071
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
845
1072
|
return;
|
|
846
1073
|
}
|
|
847
1074
|
|
|
1075
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
1076
|
+
logger.debug('Padding checkpoint already started.');
|
|
1077
|
+
return;
|
|
1078
|
+
}
|
|
1079
|
+
|
|
848
1080
|
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
849
1081
|
|
|
850
|
-
const inputs = provingState.
|
|
1082
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
851
1083
|
|
|
852
1084
|
this.deferredProving(
|
|
853
1085
|
provingState,
|
|
854
1086
|
wrapCallbackInSpan(
|
|
855
1087
|
this.tracer,
|
|
856
|
-
'ProvingOrchestrator.prover.
|
|
1088
|
+
'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof',
|
|
857
1089
|
{
|
|
858
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
1090
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding' satisfies CircuitName,
|
|
859
1091
|
},
|
|
860
|
-
signal => this.prover.
|
|
1092
|
+
signal => this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber),
|
|
861
1093
|
),
|
|
862
1094
|
result => {
|
|
863
|
-
logger.debug('Completed proof for padding
|
|
864
|
-
provingState.
|
|
1095
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
1096
|
+
provingState.setCheckpointPaddingProof(result);
|
|
865
1097
|
this.checkAndEnqueueRootRollup(provingState);
|
|
866
1098
|
},
|
|
867
1099
|
);
|
|
@@ -884,7 +1116,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
884
1116
|
this.tracer,
|
|
885
1117
|
'ProvingOrchestrator.prover.getRootRollupProof',
|
|
886
1118
|
{
|
|
887
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root
|
|
1119
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root' satisfies CircuitName,
|
|
888
1120
|
},
|
|
889
1121
|
signal => this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber),
|
|
890
1122
|
),
|
|
@@ -896,48 +1128,51 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
896
1128
|
);
|
|
897
1129
|
}
|
|
898
1130
|
|
|
899
|
-
private
|
|
1131
|
+
private checkAndEnqueueNextMergeRollup(provingState: BlockProvingState, currentLocation: TreeNodeLocation) {
|
|
900
1132
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
901
1133
|
return;
|
|
902
1134
|
}
|
|
903
1135
|
|
|
904
1136
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
905
1137
|
if (parentLocation.level === 0) {
|
|
906
|
-
|
|
1138
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
907
1139
|
} else {
|
|
908
1140
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
909
1141
|
}
|
|
910
1142
|
}
|
|
911
1143
|
|
|
912
|
-
private
|
|
913
|
-
const blockNumber = provingState.blockNumber;
|
|
914
|
-
// Accumulate as far as we can, in case blocks came in out of order and we are behind:
|
|
915
|
-
await this.provingState?.setBlobAccumulators(blockNumber);
|
|
1144
|
+
private checkAndEnqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
916
1145
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
917
1146
|
logger.debug('Not ready for block root rollup');
|
|
918
1147
|
return;
|
|
919
1148
|
}
|
|
920
|
-
|
|
921
|
-
|
|
1149
|
+
|
|
1150
|
+
this.enqueueBlockRootRollup(provingState);
|
|
1151
|
+
}
|
|
1152
|
+
|
|
1153
|
+
private checkAndEnqueueNextBlockMergeRollup(provingState: CheckpointProvingState, currentLocation: TreeNodeLocation) {
|
|
1154
|
+
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
922
1155
|
return;
|
|
923
1156
|
}
|
|
924
1157
|
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
?.close()
|
|
933
|
-
.then(() => this.dbs.delete(blockNumber))
|
|
934
|
-
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
1158
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
1159
|
+
if (parentLocation.level === 0) {
|
|
1160
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
1161
|
+
} else {
|
|
1162
|
+
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
1163
|
+
}
|
|
1164
|
+
}
|
|
935
1165
|
|
|
936
|
-
|
|
1166
|
+
private checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
1167
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
1168
|
+
return;
|
|
1169
|
+
}
|
|
1170
|
+
|
|
1171
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
937
1172
|
}
|
|
938
1173
|
|
|
939
|
-
private
|
|
940
|
-
if (!provingState.
|
|
1174
|
+
private checkAndEnqueueNextCheckpointMergeRollup(provingState: EpochProvingState, currentLocation: TreeNodeLocation) {
|
|
1175
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
941
1176
|
return;
|
|
942
1177
|
}
|
|
943
1178
|
|
|
@@ -945,7 +1180,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
945
1180
|
if (parentLocation.level === 0) {
|
|
946
1181
|
this.checkAndEnqueueRootRollup(provingState);
|
|
947
1182
|
} else {
|
|
948
|
-
this.
|
|
1183
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
949
1184
|
}
|
|
950
1185
|
}
|
|
951
1186
|
|
|
@@ -972,8 +1207,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
972
1207
|
|
|
973
1208
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
974
1209
|
|
|
975
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
976
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
977
1210
|
const doAvmProving = wrapCallbackInSpan(
|
|
978
1211
|
this.tracer,
|
|
979
1212
|
'ProvingOrchestrator.prover.getAvmProof',
|
|
@@ -982,47 +1215,24 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
982
1215
|
},
|
|
983
1216
|
async (signal: AbortSignal) => {
|
|
984
1217
|
const inputs = txProvingState.getAvmInputs();
|
|
985
|
-
|
|
986
|
-
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
987
|
-
// just keep the next line but removing the second argument (false).
|
|
988
|
-
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
989
|
-
} catch (err) {
|
|
990
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
991
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
992
|
-
throw err;
|
|
993
|
-
} else {
|
|
994
|
-
logger.warn(
|
|
995
|
-
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
996
|
-
AVM inputs and carrying on. ${inspect(err)}.`,
|
|
997
|
-
);
|
|
998
|
-
|
|
999
|
-
try {
|
|
1000
|
-
this.metrics.incAvmFallback();
|
|
1001
|
-
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
1002
|
-
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
1003
|
-
} catch (err) {
|
|
1004
|
-
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
1005
|
-
throw err;
|
|
1006
|
-
}
|
|
1007
|
-
}
|
|
1008
|
-
}
|
|
1218
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1009
1219
|
},
|
|
1010
1220
|
);
|
|
1011
1221
|
|
|
1012
1222
|
this.deferredProving(provingState, doAvmProving, proofAndVk => {
|
|
1013
1223
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
1014
1224
|
txProvingState.setAvmProof(proofAndVk);
|
|
1015
|
-
this.
|
|
1225
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
1016
1226
|
});
|
|
1017
1227
|
}
|
|
1018
1228
|
|
|
1019
|
-
private
|
|
1229
|
+
private checkAndEnqueueBaseRollup(provingState: BlockProvingState, txIndex: number) {
|
|
1020
1230
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
1021
1231
|
if (!txProvingState.ready()) {
|
|
1022
1232
|
return;
|
|
1023
1233
|
}
|
|
1024
1234
|
|
|
1025
|
-
// We must have completed all proving (
|
|
1235
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
1026
1236
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
1027
1237
|
|
|
1028
1238
|
this.enqueueBaseRollup(provingState, txIndex);
|