@aztec/prover-client 0.0.0-test.1 → 0.0.1-commit.1142ef1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +2 -0
- package/dest/block-factory/index.d.ts.map +1 -0
- package/dest/block-factory/light.d.ts +38 -0
- package/dest/block-factory/light.d.ts.map +1 -0
- package/dest/block-factory/light.js +106 -0
- package/dest/config.d.ts +8 -8
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +12 -2
- package/dest/index.d.ts +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +43 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +183 -0
- package/dest/mocks/fixtures.d.ts +8 -8
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +34 -16
- package/dest/mocks/test_context.d.ts +42 -32
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +144 -87
- package/dest/orchestrator/block-building-helpers.d.ts +37 -30
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +170 -189
- package/dest/orchestrator/block-proving-state.d.ts +70 -48
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +282 -177
- package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +210 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +41 -27
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +143 -73
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +37 -34
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +788 -277
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -1
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +2 -6
- package/dest/orchestrator/tx-proving-state.d.ts +15 -12
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +27 -44
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +5 -5
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +6 -5
- package/dest/prover-client/server-epoch-prover.d.ts +16 -12
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +11 -11
- package/dest/proving_broker/broker_prover_facade.d.ts +25 -17
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +59 -40
- package/dest/proving_broker/config.d.ts +19 -10
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +23 -6
- package/dest/proving_broker/factory.d.ts +2 -2
- package/dest/proving_broker/factory.d.ts.map +1 -1
- package/dest/proving_broker/factory.js +5 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/factory.js +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
- package/dest/proving_broker/proof_store/index.d.ts +2 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +6 -11
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +84 -63
- package/dest/proving_broker/proving_broker.d.ts +13 -4
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +40 -33
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/memory.js +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +5 -3
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +401 -11
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.js +11 -35
- package/dest/proving_broker/proving_job_controller.d.ts +9 -9
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +87 -60
- package/dest/proving_broker/rpc.d.ts +4 -6
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +1 -4
- package/dest/test/mock_proof_store.d.ts +9 -0
- package/dest/test/mock_proof_store.d.ts.map +1 -0
- package/dest/test/mock_proof_store.js +10 -0
- package/dest/test/mock_prover.d.ts +23 -17
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +38 -20
- package/package.json +34 -31
- package/src/block-factory/index.ts +1 -0
- package/src/block-factory/light.ts +136 -0
- package/src/config.ts +25 -9
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +249 -0
- package/src/mocks/fixtures.ts +44 -39
- package/src/mocks/test_context.ts +218 -116
- package/src/orchestrator/block-building-helpers.ts +258 -334
- package/src/orchestrator/block-proving-state.ts +325 -231
- package/src/orchestrator/checkpoint-proving-state.ts +303 -0
- package/src/orchestrator/epoch-proving-state.ts +191 -113
- package/src/orchestrator/orchestrator.ts +587 -318
- package/src/orchestrator/orchestrator_metrics.ts +2 -6
- package/src/orchestrator/tx-proving-state.ts +48 -66
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +20 -25
- package/src/prover-client/server-epoch-prover.ts +40 -22
- package/src/proving_broker/broker_prover_facade.ts +206 -128
- package/src/proving_broker/config.ts +25 -7
- package/src/proving_broker/factory.ts +2 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proof_store/factory.ts +1 -1
- package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
- package/src/proving_broker/proving_agent.ts +90 -64
- package/src/proving_broker/proving_broker.ts +57 -41
- package/src/proving_broker/proving_broker_database/memory.ts +3 -2
- package/src/proving_broker/proving_broker_database/persisted.ts +29 -13
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_broker_instrumentation.ts +10 -35
- package/src/proving_broker/proving_job_controller.ts +92 -81
- package/src/proving_broker/rpc.ts +1 -6
- package/src/test/mock_proof_store.ts +14 -0
- package/src/test/mock_prover.ts +156 -64
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/dest/block_builder/index.d.ts +0 -6
- package/dest/block_builder/index.d.ts.map +0 -1
- package/dest/block_builder/light.d.ts +0 -33
- package/dest/block_builder/light.d.ts.map +0 -1
- package/dest/block_builder/light.js +0 -82
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
- package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
- package/src/bin/get-proof-inputs.ts +0 -59
- package/src/block_builder/index.ts +0 -6
- package/src/block_builder/light.ts +0 -101
- package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
- /package/dest/{block_builder → block-factory}/index.js +0 -0
|
@@ -1,43 +1,48 @@
|
|
|
1
|
+
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib/types';
|
|
1
2
|
import {
|
|
2
|
-
AVM_PROOF_LENGTH_IN_FIELDS,
|
|
3
|
-
AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS,
|
|
4
3
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
5
|
-
|
|
4
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
6
6
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
7
7
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
8
|
-
type TUBE_PROOF_LENGTH,
|
|
9
8
|
} from '@aztec/constants';
|
|
10
|
-
import {
|
|
9
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
10
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
11
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
11
12
|
import { AbortError } from '@aztec/foundation/error';
|
|
12
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
13
13
|
import { createLogger } from '@aztec/foundation/log';
|
|
14
14
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
15
15
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
16
16
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
17
17
|
import { elapsed } from '@aztec/foundation/timer';
|
|
18
18
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
19
|
-
import {
|
|
20
|
-
import { L2Block } from '@aztec/stdlib/block';
|
|
19
|
+
import { EthAddress } from '@aztec/stdlib/block';
|
|
21
20
|
import type {
|
|
22
21
|
EpochProver,
|
|
23
22
|
ForkMerkleTreeOperations,
|
|
24
23
|
MerkleTreeWriteOperations,
|
|
25
|
-
|
|
24
|
+
PublicInputsAndRecursiveProof,
|
|
25
|
+
ReadonlyWorldStateAccess,
|
|
26
26
|
ServerCircuitProver,
|
|
27
27
|
} from '@aztec/stdlib/interfaces/server';
|
|
28
|
-
import {
|
|
29
|
-
import { makeEmptyRecursiveProof } from '@aztec/stdlib/proofs';
|
|
28
|
+
import type { Proof } from '@aztec/stdlib/proofs';
|
|
30
29
|
import {
|
|
31
30
|
type BaseRollupHints,
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
31
|
+
BlockRootEmptyTxFirstRollupPrivateInputs,
|
|
32
|
+
BlockRootFirstRollupPrivateInputs,
|
|
33
|
+
BlockRootSingleTxFirstRollupPrivateInputs,
|
|
34
|
+
BlockRootSingleTxRollupPrivateInputs,
|
|
35
|
+
CheckpointConstantData,
|
|
36
|
+
CheckpointRootSingleBlockRollupPrivateInputs,
|
|
37
|
+
PrivateTxBaseRollupPrivateInputs,
|
|
38
|
+
PublicChonkVerifierPrivateInputs,
|
|
39
|
+
PublicChonkVerifierPublicInputs,
|
|
40
|
+
RootRollupPublicInputs,
|
|
36
41
|
} from '@aztec/stdlib/rollup';
|
|
37
42
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
38
43
|
import { type AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
|
|
39
|
-
import {
|
|
40
|
-
import {
|
|
44
|
+
import type { BlockHeader, ProcessedTx, Tx } from '@aztec/stdlib/tx';
|
|
45
|
+
import type { UInt64 } from '@aztec/stdlib/types';
|
|
41
46
|
import {
|
|
42
47
|
Attributes,
|
|
43
48
|
type TelemetryClient,
|
|
@@ -50,15 +55,18 @@ import {
|
|
|
50
55
|
import { inspect } from 'util';
|
|
51
56
|
|
|
52
57
|
import {
|
|
53
|
-
|
|
54
|
-
|
|
58
|
+
buildHeaderFromCircuitOutputs,
|
|
59
|
+
getLastSiblingPath,
|
|
60
|
+
getPublicChonkVerifierPrivateInputsFromTx,
|
|
55
61
|
getRootTreeSiblingPath,
|
|
56
62
|
getSubtreeSiblingPath,
|
|
57
63
|
getTreeSnapshot,
|
|
64
|
+
insertSideEffectsAndBuildBaseRollupHints,
|
|
58
65
|
validatePartialState,
|
|
59
66
|
validateTx,
|
|
60
67
|
} from './block-building-helpers.js';
|
|
61
68
|
import type { BlockProvingState } from './block-proving-state.js';
|
|
69
|
+
import type { CheckpointProvingState } from './checkpoint-proving-state.js';
|
|
62
70
|
import { EpochProvingState, type ProvingResult, type TreeSnapshots } from './epoch-proving-state.js';
|
|
63
71
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
64
72
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -85,12 +93,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
85
93
|
|
|
86
94
|
private provingPromise: Promise<ProvingResult> | undefined = undefined;
|
|
87
95
|
private metrics: ProvingOrchestratorMetrics;
|
|
88
|
-
|
|
96
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
97
|
+
private dbs: Map<BlockNumber, MerkleTreeWriteOperations> = new Map();
|
|
89
98
|
|
|
90
99
|
constructor(
|
|
91
|
-
private dbProvider: ForkMerkleTreeOperations,
|
|
100
|
+
private dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations,
|
|
92
101
|
private prover: ServerCircuitProver,
|
|
93
|
-
private readonly proverId:
|
|
102
|
+
private readonly proverId: EthAddress,
|
|
94
103
|
telemetryClient: TelemetryClient = getTelemetryClient(),
|
|
95
104
|
) {
|
|
96
105
|
this.metrics = new ProvingOrchestratorMetrics(telemetryClient, 'ProvingOrchestrator');
|
|
@@ -100,7 +109,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
100
109
|
return this.metrics.tracer;
|
|
101
110
|
}
|
|
102
111
|
|
|
103
|
-
public getProverId():
|
|
112
|
+
public getProverId(): EthAddress {
|
|
104
113
|
return this.proverId;
|
|
105
114
|
}
|
|
106
115
|
|
|
@@ -109,64 +118,145 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
109
118
|
return Promise.resolve();
|
|
110
119
|
}
|
|
111
120
|
|
|
112
|
-
public startNewEpoch(
|
|
121
|
+
public startNewEpoch(
|
|
122
|
+
epochNumber: EpochNumber,
|
|
123
|
+
totalNumCheckpoints: number,
|
|
124
|
+
finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
125
|
+
) {
|
|
126
|
+
if (this.provingState?.verifyState()) {
|
|
127
|
+
throw new Error(
|
|
128
|
+
`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`,
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
|
|
113
132
|
const { promise: _promise, resolve, reject } = promiseWithResolvers<ProvingResult>();
|
|
114
133
|
const promise = _promise.catch((reason): ProvingResult => ({ status: 'failure', reason }));
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
134
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
135
|
+
this.provingState = new EpochProvingState(
|
|
136
|
+
epochNumber,
|
|
137
|
+
totalNumCheckpoints,
|
|
138
|
+
finalBlobBatchingChallenges,
|
|
139
|
+
provingState => this.checkAndEnqueueCheckpointRootRollup(provingState),
|
|
140
|
+
resolve,
|
|
141
|
+
reject,
|
|
142
|
+
);
|
|
120
143
|
this.provingPromise = promise;
|
|
121
144
|
}
|
|
122
145
|
|
|
146
|
+
public async startNewCheckpoint(
|
|
147
|
+
checkpointIndex: number,
|
|
148
|
+
constants: CheckpointConstantData,
|
|
149
|
+
l1ToL2Messages: Fr[],
|
|
150
|
+
totalNumBlocks: number,
|
|
151
|
+
headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
152
|
+
) {
|
|
153
|
+
if (!this.provingState) {
|
|
154
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
158
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Fork world state at the end of the immediately previous block.
|
|
162
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
163
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
164
|
+
|
|
165
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
166
|
+
this.dbs.set(firstBlockNumber, db);
|
|
167
|
+
|
|
168
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
169
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
170
|
+
|
|
171
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
172
|
+
const {
|
|
173
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
174
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
175
|
+
newL1ToL2MessageTreeSnapshot,
|
|
176
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
177
|
+
} = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
178
|
+
|
|
179
|
+
this.provingState.startNewCheckpoint(
|
|
180
|
+
checkpointIndex,
|
|
181
|
+
constants,
|
|
182
|
+
totalNumBlocks,
|
|
183
|
+
headerOfLastBlockInPreviousCheckpoint,
|
|
184
|
+
lastArchiveSiblingPath,
|
|
185
|
+
l1ToL2Messages,
|
|
186
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
187
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
188
|
+
newL1ToL2MessageTreeSnapshot,
|
|
189
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
190
|
+
);
|
|
191
|
+
}
|
|
192
|
+
|
|
123
193
|
/**
|
|
124
194
|
* Starts off a new block
|
|
125
|
-
* @param
|
|
126
|
-
* @param
|
|
127
|
-
*
|
|
195
|
+
* @param blockNumber - The block number
|
|
196
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
197
|
+
* block that doesn't have any txs.
|
|
198
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
128
199
|
*/
|
|
129
|
-
@trackSpan('ProvingOrchestrator.startNewBlock',
|
|
130
|
-
[Attributes.BLOCK_NUMBER]:
|
|
200
|
+
@trackSpan('ProvingOrchestrator.startNewBlock', blockNumber => ({
|
|
201
|
+
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
131
202
|
}))
|
|
132
|
-
public async startNewBlock(
|
|
203
|
+
public async startNewBlock(blockNumber: BlockNumber, timestamp: UInt64, totalNumTxs: number) {
|
|
133
204
|
if (!this.provingState) {
|
|
134
|
-
throw new Error(
|
|
205
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
135
206
|
}
|
|
136
207
|
|
|
137
|
-
|
|
138
|
-
|
|
208
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
209
|
+
if (!checkpointProvingState) {
|
|
210
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
139
211
|
}
|
|
140
212
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
213
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
214
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
215
|
+
}
|
|
144
216
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
this.dbs.set(globalVariables.blockNumber.toNumber(), db);
|
|
217
|
+
const constants = checkpointProvingState.constants;
|
|
218
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
148
219
|
|
|
149
|
-
//
|
|
150
|
-
|
|
151
|
-
|
|
220
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
221
|
+
if (!this.dbs.has(blockNumber)) {
|
|
222
|
+
// Fork world state at the end of the immediately previous block
|
|
223
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
224
|
+
this.dbs.set(blockNumber, db);
|
|
225
|
+
}
|
|
226
|
+
const db = this.dbs.get(blockNumber)!;
|
|
227
|
+
|
|
228
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
229
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
230
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
231
|
+
|
|
232
|
+
const blockProvingState = checkpointProvingState.startNewBlock(
|
|
233
|
+
blockNumber,
|
|
234
|
+
timestamp,
|
|
235
|
+
totalNumTxs,
|
|
236
|
+
lastArchiveTreeSnapshot,
|
|
237
|
+
lastArchiveSiblingPath,
|
|
238
|
+
);
|
|
152
239
|
|
|
153
|
-
//
|
|
154
|
-
|
|
155
|
-
|
|
240
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
241
|
+
if (blockProvingState.index === 0) {
|
|
242
|
+
for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) {
|
|
243
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
156
246
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
lastArchive,
|
|
163
|
-
newArchiveSiblingPath,
|
|
164
|
-
previousBlockHeader,
|
|
165
|
-
);
|
|
247
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
248
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
249
|
+
if (totalNumTxs === 0) {
|
|
250
|
+
const endState = await db.getStateReference();
|
|
251
|
+
blockProvingState.setEndState(endState);
|
|
166
252
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
253
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
254
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
255
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
256
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
257
|
+
|
|
258
|
+
// And also try to accumulate the blobs as far as we can:
|
|
259
|
+
await this.provingState.setBlobAccumulators();
|
|
170
260
|
}
|
|
171
261
|
}
|
|
172
262
|
|
|
@@ -178,28 +268,40 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
178
268
|
[Attributes.BLOCK_TXS_COUNT]: txs.length,
|
|
179
269
|
}))
|
|
180
270
|
public async addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
271
|
+
if (!this.provingState) {
|
|
272
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
273
|
+
}
|
|
274
|
+
|
|
181
275
|
if (!txs.length) {
|
|
182
276
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
183
277
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
184
278
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
185
279
|
return;
|
|
186
280
|
}
|
|
187
|
-
|
|
188
|
-
const
|
|
281
|
+
|
|
282
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
283
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
189
284
|
if (!provingState) {
|
|
190
|
-
throw new Error(`
|
|
285
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
289
|
+
throw new Error(
|
|
290
|
+
`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`,
|
|
291
|
+
);
|
|
191
292
|
}
|
|
192
293
|
|
|
193
|
-
if (provingState.
|
|
294
|
+
if (!provingState.isAcceptingTxs()) {
|
|
194
295
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
195
296
|
}
|
|
196
297
|
|
|
197
|
-
|
|
198
|
-
|
|
298
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
299
|
+
|
|
300
|
+
const db = this.dbs.get(blockNumber)!;
|
|
301
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
302
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
303
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
199
304
|
|
|
200
|
-
logger.info(
|
|
201
|
-
`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState.blockNumber}`,
|
|
202
|
-
);
|
|
203
305
|
for (const tx of txs) {
|
|
204
306
|
try {
|
|
205
307
|
if (!provingState.verifyState()) {
|
|
@@ -208,15 +310,32 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
208
310
|
|
|
209
311
|
validateTx(tx);
|
|
210
312
|
|
|
211
|
-
logger.
|
|
313
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
314
|
+
|
|
315
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
316
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
317
|
+
tx,
|
|
318
|
+
lastArchive,
|
|
319
|
+
newL1ToL2MessageTreeSnapshot,
|
|
320
|
+
startSpongeBlob,
|
|
321
|
+
db,
|
|
322
|
+
);
|
|
212
323
|
|
|
213
|
-
|
|
214
|
-
|
|
324
|
+
if (!provingState.verifyState()) {
|
|
325
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
329
|
+
|
|
330
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
215
331
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
216
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
217
332
|
if (txProvingState.requireAvmProof) {
|
|
333
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
218
334
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
219
335
|
this.enqueueVM(provingState, txIndex);
|
|
336
|
+
} else {
|
|
337
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
338
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
220
339
|
}
|
|
221
340
|
} catch (err: any) {
|
|
222
341
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -224,114 +343,155 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
224
343
|
});
|
|
225
344
|
}
|
|
226
345
|
}
|
|
346
|
+
|
|
347
|
+
const endState = await db.getStateReference();
|
|
348
|
+
provingState.setEndState(endState);
|
|
349
|
+
|
|
350
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
351
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
352
|
+
|
|
353
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
354
|
+
|
|
355
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
356
|
+
await this.provingState.setBlobAccumulators();
|
|
227
357
|
}
|
|
228
358
|
|
|
229
359
|
/**
|
|
230
|
-
* Kickstarts
|
|
231
|
-
* Note that if the
|
|
360
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
361
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
232
362
|
*/
|
|
233
|
-
@trackSpan('ProvingOrchestrator.
|
|
234
|
-
public
|
|
363
|
+
@trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
364
|
+
public startChonkVerifierCircuits(txs: Tx[]) {
|
|
235
365
|
if (!this.provingState?.verifyState()) {
|
|
236
|
-
throw new Error(`
|
|
366
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
237
367
|
}
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
const
|
|
241
|
-
const
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
368
|
+
const publicTxs = txs.filter(tx => tx.data.forPublic);
|
|
369
|
+
for (const tx of publicTxs) {
|
|
370
|
+
const txHash = tx.getTxHash().toString();
|
|
371
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
372
|
+
const tubeProof =
|
|
373
|
+
promiseWithResolvers<
|
|
374
|
+
PublicInputsAndRecursiveProof<
|
|
375
|
+
PublicChonkVerifierPublicInputs,
|
|
376
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
377
|
+
>
|
|
378
|
+
>();
|
|
379
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
380
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, proof => {
|
|
381
|
+
tubeProof.resolve(proof);
|
|
382
|
+
});
|
|
383
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
245
384
|
}
|
|
385
|
+
return Promise.resolve();
|
|
246
386
|
}
|
|
247
387
|
|
|
248
388
|
/**
|
|
249
389
|
* Marks the block as completed.
|
|
250
390
|
* Computes the block header and updates the archive tree.
|
|
251
391
|
*/
|
|
252
|
-
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber:
|
|
392
|
+
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: BlockNumber) => ({
|
|
253
393
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
254
394
|
}))
|
|
255
|
-
public async setBlockCompleted(blockNumber:
|
|
395
|
+
public async setBlockCompleted(blockNumber: BlockNumber, expectedHeader?: BlockHeader): Promise<BlockHeader> {
|
|
256
396
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
257
397
|
if (!provingState) {
|
|
258
398
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
259
399
|
}
|
|
260
400
|
|
|
261
|
-
if
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
401
|
+
// Abort with specific error for the block if there's one.
|
|
402
|
+
const error = provingState.getError();
|
|
403
|
+
if (error) {
|
|
404
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
265
405
|
}
|
|
266
406
|
|
|
407
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
267
408
|
if (!provingState.verifyState()) {
|
|
268
|
-
throw new Error(`
|
|
409
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
269
410
|
}
|
|
270
411
|
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
276
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
277
|
-
return provingState.block!;
|
|
278
|
-
}
|
|
279
|
-
|
|
280
|
-
/** Returns the block as built for a given index. */
|
|
281
|
-
public getBlock(index: number): L2Block {
|
|
282
|
-
const block = this.provingState?.blocks[index]?.block;
|
|
283
|
-
if (!block) {
|
|
284
|
-
throw new Error(`Block at index ${index} not available`);
|
|
412
|
+
if (provingState.isAcceptingTxs()) {
|
|
413
|
+
throw new Error(
|
|
414
|
+
`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`,
|
|
415
|
+
);
|
|
285
416
|
}
|
|
286
|
-
return block;
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
private async buildBlock(provingState: BlockProvingState, expectedHeader?: BlockHeader) {
|
|
290
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
291
|
-
const txs = provingState.allTxs.map(a => a.processedTx);
|
|
292
|
-
|
|
293
|
-
// Get db for this block
|
|
294
|
-
const db = this.dbs.get(provingState.blockNumber)!;
|
|
295
417
|
|
|
296
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
297
|
-
|
|
298
|
-
const
|
|
299
|
-
txs,
|
|
300
|
-
provingState.globalVariables,
|
|
301
|
-
provingState.newL1ToL2Messages,
|
|
302
|
-
db,
|
|
303
|
-
);
|
|
418
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
419
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
420
|
+
const header = await provingState.buildBlockHeader();
|
|
304
421
|
|
|
305
422
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
306
423
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
307
424
|
throw new Error('Block header mismatch');
|
|
308
425
|
}
|
|
309
426
|
|
|
427
|
+
// Get db for this block
|
|
428
|
+
const db = this.dbs.get(provingState.blockNumber)!;
|
|
429
|
+
|
|
430
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
310
431
|
logger.verbose(
|
|
311
432
|
`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`,
|
|
312
433
|
);
|
|
313
434
|
await db.updateArchive(header);
|
|
314
435
|
|
|
315
|
-
|
|
316
|
-
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
317
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
318
|
-
|
|
319
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
436
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
320
437
|
|
|
321
|
-
|
|
322
|
-
provingState.block = l2Block;
|
|
438
|
+
return header;
|
|
323
439
|
}
|
|
324
440
|
|
|
325
441
|
// Flagged as protected to disable in certain unit tests
|
|
326
|
-
protected async verifyBuiltBlockAgainstSyncedState(
|
|
327
|
-
const
|
|
442
|
+
protected async verifyBuiltBlockAgainstSyncedState(provingState: BlockProvingState) {
|
|
443
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
444
|
+
if (!builtBlockHeader) {
|
|
445
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
446
|
+
return;
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
450
|
+
if (!output) {
|
|
451
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
452
|
+
return;
|
|
453
|
+
}
|
|
454
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
455
|
+
|
|
456
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
457
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
458
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
459
|
+
return;
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
// Get db for this block
|
|
463
|
+
const blockNumber = provingState.blockNumber;
|
|
464
|
+
const db = this.dbs.get(blockNumber)!;
|
|
465
|
+
|
|
466
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
467
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
328
468
|
if (!syncedArchive.equals(newArchive)) {
|
|
329
|
-
|
|
330
|
-
`Archive tree mismatch for block ${
|
|
469
|
+
logger.error(
|
|
470
|
+
`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(
|
|
331
471
|
syncedArchive,
|
|
332
472
|
)} but built ${inspect(newArchive)}`,
|
|
333
473
|
);
|
|
474
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
475
|
+
return;
|
|
334
476
|
}
|
|
477
|
+
|
|
478
|
+
const circuitArchive = output.newArchive;
|
|
479
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
480
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
481
|
+
provingState.reject(`New archive mismatch.`);
|
|
482
|
+
return;
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
486
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
487
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
488
|
+
// will attempt to access the fork after it was closed.
|
|
489
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
490
|
+
void this.dbs
|
|
491
|
+
.get(blockNumber)
|
|
492
|
+
?.close()
|
|
493
|
+
.then(() => this.dbs.delete(blockNumber))
|
|
494
|
+
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
335
495
|
}
|
|
336
496
|
|
|
337
497
|
/**
|
|
@@ -348,9 +508,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
348
508
|
/**
|
|
349
509
|
* Returns the proof for the current epoch.
|
|
350
510
|
*/
|
|
351
|
-
public async
|
|
511
|
+
public async finalizeEpoch(): Promise<{
|
|
512
|
+
publicInputs: RootRollupPublicInputs;
|
|
513
|
+
proof: Proof;
|
|
514
|
+
batchedBlobInputs: BatchedBlob;
|
|
515
|
+
}> {
|
|
352
516
|
if (!this.provingState || !this.provingPromise) {
|
|
353
|
-
throw new Error(`Invalid proving state, an epoch must be proven before it can be
|
|
517
|
+
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
354
518
|
}
|
|
355
519
|
|
|
356
520
|
const result = await this.provingPromise!;
|
|
@@ -358,6 +522,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
358
522
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
359
523
|
}
|
|
360
524
|
|
|
525
|
+
await this.provingState.finalizeBatchedBlob();
|
|
526
|
+
|
|
361
527
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
362
528
|
|
|
363
529
|
pushTestData('epochProofResult', {
|
|
@@ -368,20 +534,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
368
534
|
return epochProofResult;
|
|
369
535
|
}
|
|
370
536
|
|
|
371
|
-
/**
|
|
372
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
373
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
374
|
-
* @param provingState - The proving state being worked on
|
|
375
|
-
*/
|
|
376
|
-
private async prepareTransaction(tx: ProcessedTx, provingState: BlockProvingState) {
|
|
377
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
378
|
-
if (!txInputs) {
|
|
379
|
-
// This should not be possible
|
|
380
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
381
|
-
}
|
|
382
|
-
return txInputs;
|
|
383
|
-
}
|
|
384
|
-
|
|
385
537
|
/**
|
|
386
538
|
* Enqueue a job to be scheduled
|
|
387
539
|
* @param provingState - The proving state object being operated on
|
|
@@ -389,11 +541,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
389
541
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
390
542
|
*/
|
|
391
543
|
private deferredProving<T>(
|
|
392
|
-
provingState: EpochProvingState |
|
|
544
|
+
provingState: EpochProvingState | CheckpointProvingState | BlockProvingState,
|
|
393
545
|
request: (signal: AbortSignal) => Promise<T>,
|
|
394
546
|
callback: (result: T) => void | Promise<void>,
|
|
395
547
|
) {
|
|
396
|
-
if (!provingState
|
|
548
|
+
if (!provingState.verifyState()) {
|
|
397
549
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
398
550
|
return;
|
|
399
551
|
}
|
|
@@ -411,7 +563,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
411
563
|
}
|
|
412
564
|
|
|
413
565
|
const result = await request(controller.signal);
|
|
414
|
-
if (!provingState
|
|
566
|
+
if (!provingState.verifyState()) {
|
|
415
567
|
logger.debug(`State no longer valid, discarding result`);
|
|
416
568
|
return;
|
|
417
569
|
}
|
|
@@ -444,52 +596,59 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
444
596
|
setImmediate(() => void safeJob());
|
|
445
597
|
}
|
|
446
598
|
|
|
447
|
-
private async
|
|
448
|
-
const l1ToL2MessagesPadded = padArrayEnd(
|
|
599
|
+
private async updateL1ToL2MessageTree(l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations) {
|
|
600
|
+
const l1ToL2MessagesPadded = padArrayEnd<Fr, number>(
|
|
449
601
|
l1ToL2Messages,
|
|
450
602
|
Fr.ZERO,
|
|
451
603
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
452
604
|
'Too many L1 to L2 messages',
|
|
453
605
|
);
|
|
454
|
-
const baseParityInputs = times(NUM_BASE_PARITY_PER_ROOT_PARITY, i =>
|
|
455
|
-
BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()),
|
|
456
|
-
);
|
|
457
606
|
|
|
458
|
-
const
|
|
607
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
608
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(
|
|
459
609
|
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
460
|
-
|
|
610
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
461
611
|
);
|
|
462
612
|
|
|
463
613
|
// Update the local trees to include the new l1 to l2 messages
|
|
464
614
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
465
|
-
|
|
615
|
+
|
|
616
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
617
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(
|
|
618
|
+
await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
|
|
619
|
+
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
620
|
+
);
|
|
466
621
|
|
|
467
622
|
return {
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
623
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
624
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
625
|
+
newL1ToL2MessageTreeSnapshot,
|
|
626
|
+
newL1ToL2MessageSubtreeRootSiblingPath,
|
|
471
627
|
};
|
|
472
628
|
}
|
|
473
629
|
|
|
474
630
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
475
|
-
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs',
|
|
631
|
+
@trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', tx => ({
|
|
476
632
|
[Attributes.TX_HASH]: tx.hash.toString(),
|
|
477
633
|
}))
|
|
478
634
|
private async prepareBaseRollupInputs(
|
|
479
|
-
provingState: BlockProvingState,
|
|
480
635
|
tx: ProcessedTx,
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
const db = this.dbs.get(provingState.blockNumber)!;
|
|
488
|
-
|
|
636
|
+
lastArchive: AppendOnlyTreeSnapshot,
|
|
637
|
+
newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
638
|
+
startSpongeBlob: SpongeBlob,
|
|
639
|
+
db: MerkleTreeWriteOperations,
|
|
640
|
+
): Promise<[BaseRollupHints, TreeSnapshots]> {
|
|
489
641
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
490
|
-
// These will be overwritten later once we have proven the
|
|
642
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
491
643
|
const [ms, hints] = await elapsed(
|
|
492
|
-
|
|
644
|
+
insertSideEffectsAndBuildBaseRollupHints(
|
|
645
|
+
tx,
|
|
646
|
+
lastArchive,
|
|
647
|
+
newL1ToL2MessageTreeSnapshot,
|
|
648
|
+
startSpongeBlob,
|
|
649
|
+
this.proverId.toField(),
|
|
650
|
+
db,
|
|
651
|
+
),
|
|
493
652
|
);
|
|
494
653
|
|
|
495
654
|
this.metrics.recordBaseRollupInputs(ms);
|
|
@@ -501,10 +660,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
501
660
|
);
|
|
502
661
|
const treeSnapshots: TreeSnapshots = new Map((await Promise.all(promises)).map(obj => [obj.key, obj.value]));
|
|
503
662
|
|
|
504
|
-
if (!provingState.verifyState()) {
|
|
505
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
506
|
-
return;
|
|
507
|
-
}
|
|
508
663
|
return [hints, treeSnapshots];
|
|
509
664
|
}
|
|
510
665
|
|
|
@@ -516,6 +671,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
516
671
|
return;
|
|
517
672
|
}
|
|
518
673
|
|
|
674
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
675
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
676
|
+
return;
|
|
677
|
+
}
|
|
678
|
+
|
|
519
679
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
520
680
|
const { processedTx } = txProvingState;
|
|
521
681
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
@@ -527,70 +687,81 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
527
687
|
wrapCallbackInSpan(
|
|
528
688
|
this.tracer,
|
|
529
689
|
`ProvingOrchestrator.prover.${
|
|
530
|
-
inputs instanceof
|
|
690
|
+
inputs instanceof PrivateTxBaseRollupPrivateInputs
|
|
691
|
+
? 'getPrivateTxBaseRollupProof'
|
|
692
|
+
: 'getPublicTxBaseRollupProof'
|
|
531
693
|
}`,
|
|
532
694
|
{
|
|
533
695
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
534
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
535
696
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
536
697
|
},
|
|
537
698
|
signal => {
|
|
538
|
-
if (inputs instanceof
|
|
539
|
-
return this.prover.
|
|
699
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
700
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
540
701
|
} else {
|
|
541
|
-
return this.prover.
|
|
702
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
542
703
|
}
|
|
543
704
|
},
|
|
544
705
|
),
|
|
545
|
-
|
|
706
|
+
result => {
|
|
546
707
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
547
|
-
validatePartialState(result.inputs.
|
|
708
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
548
709
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
549
710
|
if (provingState.totalNumTxs === 1) {
|
|
550
|
-
|
|
711
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
551
712
|
} else {
|
|
552
|
-
|
|
713
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
553
714
|
}
|
|
554
715
|
},
|
|
555
716
|
);
|
|
556
717
|
}
|
|
557
718
|
|
|
558
|
-
// Enqueues the
|
|
559
|
-
// Once completed, will enqueue the
|
|
560
|
-
private
|
|
719
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
720
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
721
|
+
private getOrEnqueueChonkVerifier(provingState: BlockProvingState, txIndex: number) {
|
|
561
722
|
if (!provingState.verifyState()) {
|
|
562
|
-
logger.debug('Not running
|
|
723
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
563
724
|
return;
|
|
564
725
|
}
|
|
565
726
|
|
|
566
727
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
567
728
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
568
|
-
|
|
569
|
-
const handleResult = (
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
729
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
730
|
+
const handleResult = (
|
|
731
|
+
result: PublicInputsAndRecursiveProof<
|
|
732
|
+
PublicChonkVerifierPublicInputs,
|
|
733
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
734
|
+
>,
|
|
735
|
+
) => {
|
|
736
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, { txHash });
|
|
737
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
738
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
739
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
574
740
|
};
|
|
575
741
|
|
|
576
|
-
if (this.provingState?.
|
|
577
|
-
logger.debug(`
|
|
578
|
-
void this.provingState!.
|
|
742
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
743
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, { txHash });
|
|
744
|
+
void this.provingState!.cachedChonkVerifierProofs.get(txHash)!.then(handleResult);
|
|
579
745
|
return;
|
|
580
746
|
}
|
|
581
747
|
|
|
582
|
-
logger.debug(`Enqueuing
|
|
583
|
-
this.
|
|
748
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
749
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
584
750
|
}
|
|
585
751
|
|
|
586
|
-
private
|
|
752
|
+
private doEnqueueChonkVerifier(
|
|
587
753
|
txHash: string,
|
|
588
|
-
inputs:
|
|
589
|
-
handler: (
|
|
754
|
+
inputs: PublicChonkVerifierPrivateInputs,
|
|
755
|
+
handler: (
|
|
756
|
+
result: PublicInputsAndRecursiveProof<
|
|
757
|
+
PublicChonkVerifierPublicInputs,
|
|
758
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
759
|
+
>,
|
|
760
|
+
) => void,
|
|
590
761
|
provingState: EpochProvingState | BlockProvingState = this.provingState!,
|
|
591
762
|
) {
|
|
592
|
-
if (!provingState
|
|
593
|
-
logger.debug('Not running
|
|
763
|
+
if (!provingState.verifyState()) {
|
|
764
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
594
765
|
return;
|
|
595
766
|
}
|
|
596
767
|
|
|
@@ -598,13 +769,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
598
769
|
provingState,
|
|
599
770
|
wrapCallbackInSpan(
|
|
600
771
|
this.tracer,
|
|
601
|
-
'ProvingOrchestrator.prover.
|
|
772
|
+
'ProvingOrchestrator.prover.getPublicChonkVerifierProof',
|
|
602
773
|
{
|
|
603
774
|
[Attributes.TX_HASH]: txHash,
|
|
604
|
-
[Attributes.
|
|
605
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'tube-circuit' satisfies CircuitName,
|
|
775
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public' satisfies CircuitName,
|
|
606
776
|
},
|
|
607
|
-
signal => this.prover.
|
|
777
|
+
signal => this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber),
|
|
608
778
|
),
|
|
609
779
|
handler,
|
|
610
780
|
);
|
|
@@ -618,40 +788,45 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
618
788
|
return;
|
|
619
789
|
}
|
|
620
790
|
|
|
791
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
792
|
+
logger.debug('Merge rollup already started.');
|
|
793
|
+
return;
|
|
794
|
+
}
|
|
795
|
+
|
|
621
796
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
622
797
|
|
|
623
798
|
this.deferredProving(
|
|
624
799
|
provingState,
|
|
625
800
|
wrapCallbackInSpan(
|
|
626
801
|
this.tracer,
|
|
627
|
-
'ProvingOrchestrator.prover.
|
|
802
|
+
'ProvingOrchestrator.prover.getTxMergeRollupProof',
|
|
628
803
|
{
|
|
629
|
-
[Attributes.
|
|
630
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'merge-rollup' satisfies CircuitName,
|
|
804
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge' satisfies CircuitName,
|
|
631
805
|
},
|
|
632
|
-
signal => this.prover.
|
|
806
|
+
signal => this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
633
807
|
),
|
|
634
|
-
|
|
808
|
+
result => {
|
|
635
809
|
provingState.setMergeRollupProof(location, result);
|
|
636
|
-
|
|
810
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
637
811
|
},
|
|
638
812
|
);
|
|
639
813
|
}
|
|
640
814
|
|
|
641
815
|
// Executes the block root rollup circuit
|
|
642
|
-
private
|
|
816
|
+
private enqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
643
817
|
if (!provingState.verifyState()) {
|
|
644
818
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
645
819
|
return;
|
|
646
820
|
}
|
|
647
821
|
|
|
648
|
-
provingState.
|
|
822
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
823
|
+
logger.debug('Block root rollup already started.');
|
|
824
|
+
return;
|
|
825
|
+
}
|
|
649
826
|
|
|
650
|
-
const { rollupType, inputs } =
|
|
827
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
651
828
|
|
|
652
|
-
logger.debug(
|
|
653
|
-
`Enqueuing ${rollupType} for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs.`,
|
|
654
|
-
);
|
|
829
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
655
830
|
|
|
656
831
|
this.deferredProving(
|
|
657
832
|
provingState,
|
|
@@ -659,38 +834,35 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
659
834
|
this.tracer,
|
|
660
835
|
'ProvingOrchestrator.prover.getBlockRootRollupProof',
|
|
661
836
|
{
|
|
662
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
663
837
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
664
838
|
},
|
|
665
839
|
signal => {
|
|
666
|
-
if (inputs instanceof
|
|
667
|
-
return this.prover.
|
|
668
|
-
} else if (inputs instanceof
|
|
669
|
-
return this.prover.
|
|
840
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
841
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
842
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
843
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
844
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
845
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
846
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
847
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
670
848
|
} else {
|
|
671
849
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
672
850
|
}
|
|
673
851
|
},
|
|
674
852
|
),
|
|
675
853
|
async result => {
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
if (!(await header.hash()).equals(await provingState.block!.header.hash())) {
|
|
679
|
-
logger.error(
|
|
680
|
-
`Block header mismatch\nCircuit:${inspect(header)}\nComputed:${inspect(provingState.block!.header)}`,
|
|
681
|
-
);
|
|
682
|
-
provingState.reject(`Block header hash mismatch`);
|
|
683
|
-
}
|
|
854
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
855
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
684
856
|
|
|
685
|
-
logger.debug(`Completed ${rollupType} proof for block ${provingState.
|
|
686
|
-
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
857
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
687
858
|
|
|
688
|
-
const
|
|
689
|
-
const
|
|
690
|
-
|
|
691
|
-
|
|
859
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
860
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
861
|
+
|
|
862
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
863
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
692
864
|
} else {
|
|
693
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
865
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
694
866
|
}
|
|
695
867
|
},
|
|
696
868
|
);
|
|
@@ -698,25 +870,35 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
698
870
|
|
|
699
871
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
700
872
|
// Enqueues the root parity circuit if all inputs are available
|
|
701
|
-
private enqueueBaseParityCircuit(
|
|
873
|
+
private enqueueBaseParityCircuit(
|
|
874
|
+
checkpointProvingState: CheckpointProvingState,
|
|
875
|
+
provingState: BlockProvingState,
|
|
876
|
+
baseParityIndex: number,
|
|
877
|
+
) {
|
|
702
878
|
if (!provingState.verifyState()) {
|
|
703
879
|
logger.debug('Not running base parity. State no longer valid.');
|
|
704
880
|
return;
|
|
705
881
|
}
|
|
706
882
|
|
|
883
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
884
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
885
|
+
return;
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
889
|
+
|
|
707
890
|
this.deferredProving(
|
|
708
891
|
provingState,
|
|
709
892
|
wrapCallbackInSpan(
|
|
710
893
|
this.tracer,
|
|
711
894
|
'ProvingOrchestrator.prover.getBaseParityProof',
|
|
712
895
|
{
|
|
713
|
-
[Attributes.
|
|
714
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity' satisfies CircuitName,
|
|
896
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base' satisfies CircuitName,
|
|
715
897
|
},
|
|
716
898
|
signal => this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber),
|
|
717
899
|
),
|
|
718
900
|
provingOutput => {
|
|
719
|
-
provingState.setBaseParityProof(
|
|
901
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
720
902
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
721
903
|
},
|
|
722
904
|
);
|
|
@@ -738,7 +920,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
738
920
|
return;
|
|
739
921
|
}
|
|
740
922
|
|
|
741
|
-
|
|
923
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
924
|
+
logger.debug('Root parity already started.');
|
|
925
|
+
return;
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
const inputs = provingState.getParityRootInputs();
|
|
742
929
|
|
|
743
930
|
this.deferredProving(
|
|
744
931
|
provingState,
|
|
@@ -746,36 +933,38 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
746
933
|
this.tracer,
|
|
747
934
|
'ProvingOrchestrator.prover.getRootParityProof',
|
|
748
935
|
{
|
|
749
|
-
[Attributes.
|
|
750
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-parity' satisfies CircuitName,
|
|
936
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root' satisfies CircuitName,
|
|
751
937
|
},
|
|
752
938
|
signal => this.prover.getRootParityProof(inputs, signal, provingState.epochNumber),
|
|
753
939
|
),
|
|
754
|
-
|
|
940
|
+
result => {
|
|
755
941
|
provingState.setRootParityProof(result);
|
|
756
|
-
|
|
942
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
757
943
|
},
|
|
758
944
|
);
|
|
759
945
|
}
|
|
760
946
|
|
|
761
947
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
762
948
|
// Enqueues the next level of merge if all inputs are available
|
|
763
|
-
private enqueueBlockMergeRollup(provingState:
|
|
949
|
+
private enqueueBlockMergeRollup(provingState: CheckpointProvingState, location: TreeNodeLocation) {
|
|
764
950
|
if (!provingState.verifyState()) {
|
|
765
951
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
766
952
|
return;
|
|
767
953
|
}
|
|
768
954
|
|
|
769
|
-
|
|
955
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
956
|
+
logger.debug('Block merge rollup already started.');
|
|
957
|
+
return;
|
|
958
|
+
}
|
|
770
959
|
|
|
960
|
+
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
771
961
|
this.deferredProving(
|
|
772
962
|
provingState,
|
|
773
963
|
wrapCallbackInSpan(
|
|
774
964
|
this.tracer,
|
|
775
965
|
'ProvingOrchestrator.prover.getBlockMergeRollupProof',
|
|
776
966
|
{
|
|
777
|
-
[Attributes.
|
|
778
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge-rollup' satisfies CircuitName,
|
|
967
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge' satisfies CircuitName,
|
|
779
968
|
},
|
|
780
969
|
signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
781
970
|
),
|
|
@@ -786,30 +975,125 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
786
975
|
);
|
|
787
976
|
}
|
|
788
977
|
|
|
789
|
-
private
|
|
978
|
+
private enqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
979
|
+
if (!provingState.verifyState()) {
|
|
980
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
981
|
+
return;
|
|
982
|
+
}
|
|
983
|
+
|
|
984
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
985
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
986
|
+
return;
|
|
987
|
+
}
|
|
988
|
+
|
|
989
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
990
|
+
|
|
991
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
992
|
+
|
|
993
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
994
|
+
|
|
995
|
+
this.deferredProving(
|
|
996
|
+
provingState,
|
|
997
|
+
wrapCallbackInSpan(
|
|
998
|
+
this.tracer,
|
|
999
|
+
'ProvingOrchestrator.prover.getCheckpointRootRollupProof',
|
|
1000
|
+
{
|
|
1001
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
1002
|
+
},
|
|
1003
|
+
signal => {
|
|
1004
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
1005
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
1006
|
+
} else {
|
|
1007
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
1008
|
+
}
|
|
1009
|
+
},
|
|
1010
|
+
),
|
|
1011
|
+
result => {
|
|
1012
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator()!.toBlobAccumulator();
|
|
1013
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
1014
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
1015
|
+
logger.error(
|
|
1016
|
+
`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(
|
|
1017
|
+
computedEndBlobAccumulatorState,
|
|
1018
|
+
)}`,
|
|
1019
|
+
);
|
|
1020
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
1021
|
+
return;
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
1025
|
+
|
|
1026
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
1027
|
+
const epochProvingState = provingState.parentEpoch;
|
|
1028
|
+
|
|
1029
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
1030
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
1031
|
+
} else {
|
|
1032
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
1033
|
+
}
|
|
1034
|
+
},
|
|
1035
|
+
);
|
|
1036
|
+
}
|
|
1037
|
+
|
|
1038
|
+
private enqueueCheckpointMergeRollup(provingState: EpochProvingState, location: TreeNodeLocation) {
|
|
1039
|
+
if (!provingState.verifyState()) {
|
|
1040
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
1041
|
+
return;
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
1045
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
1046
|
+
return;
|
|
1047
|
+
}
|
|
1048
|
+
|
|
1049
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
1050
|
+
|
|
1051
|
+
this.deferredProving(
|
|
1052
|
+
provingState,
|
|
1053
|
+
wrapCallbackInSpan(
|
|
1054
|
+
this.tracer,
|
|
1055
|
+
'ProvingOrchestrator.prover.getCheckpointMergeRollupProof',
|
|
1056
|
+
{
|
|
1057
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge' satisfies CircuitName,
|
|
1058
|
+
},
|
|
1059
|
+
signal => this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber),
|
|
1060
|
+
),
|
|
1061
|
+
result => {
|
|
1062
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
1063
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
1064
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
1065
|
+
},
|
|
1066
|
+
);
|
|
1067
|
+
}
|
|
1068
|
+
|
|
1069
|
+
private enqueueEpochPadding(provingState: EpochProvingState) {
|
|
790
1070
|
if (!provingState.verifyState()) {
|
|
791
1071
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
792
1072
|
return;
|
|
793
1073
|
}
|
|
794
1074
|
|
|
795
|
-
|
|
1075
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
1076
|
+
logger.debug('Padding checkpoint already started.');
|
|
1077
|
+
return;
|
|
1078
|
+
}
|
|
1079
|
+
|
|
1080
|
+
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
796
1081
|
|
|
797
|
-
const inputs =
|
|
1082
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
798
1083
|
|
|
799
1084
|
this.deferredProving(
|
|
800
1085
|
provingState,
|
|
801
1086
|
wrapCallbackInSpan(
|
|
802
1087
|
this.tracer,
|
|
803
|
-
'ProvingOrchestrator.prover.
|
|
1088
|
+
'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof',
|
|
804
1089
|
{
|
|
805
|
-
[Attributes.
|
|
806
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'empty-block-root-rollup' satisfies CircuitName,
|
|
1090
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding' satisfies CircuitName,
|
|
807
1091
|
},
|
|
808
|
-
signal => this.prover.
|
|
1092
|
+
signal => this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber),
|
|
809
1093
|
),
|
|
810
1094
|
result => {
|
|
811
|
-
logger.debug('Completed proof for padding
|
|
812
|
-
provingState.
|
|
1095
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
1096
|
+
provingState.setCheckpointPaddingProof(result);
|
|
813
1097
|
this.checkAndEnqueueRootRollup(provingState);
|
|
814
1098
|
},
|
|
815
1099
|
);
|
|
@@ -824,7 +1108,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
824
1108
|
|
|
825
1109
|
logger.debug(`Preparing root rollup`);
|
|
826
1110
|
|
|
827
|
-
const inputs = provingState.getRootRollupInputs(
|
|
1111
|
+
const inputs = provingState.getRootRollupInputs();
|
|
828
1112
|
|
|
829
1113
|
this.deferredProving(
|
|
830
1114
|
provingState,
|
|
@@ -832,8 +1116,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
832
1116
|
this.tracer,
|
|
833
1117
|
'ProvingOrchestrator.prover.getRootRollupProof',
|
|
834
1118
|
{
|
|
835
|
-
[Attributes.
|
|
836
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup' satisfies CircuitName,
|
|
1119
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root' satisfies CircuitName,
|
|
837
1120
|
},
|
|
838
1121
|
signal => this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber),
|
|
839
1122
|
),
|
|
@@ -845,46 +1128,51 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
845
1128
|
);
|
|
846
1129
|
}
|
|
847
1130
|
|
|
848
|
-
private
|
|
1131
|
+
private checkAndEnqueueNextMergeRollup(provingState: BlockProvingState, currentLocation: TreeNodeLocation) {
|
|
849
1132
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
850
1133
|
return;
|
|
851
1134
|
}
|
|
852
1135
|
|
|
853
1136
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
854
1137
|
if (parentLocation.level === 0) {
|
|
855
|
-
|
|
1138
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
856
1139
|
} else {
|
|
857
1140
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
858
1141
|
}
|
|
859
1142
|
}
|
|
860
1143
|
|
|
861
|
-
private
|
|
1144
|
+
private checkAndEnqueueBlockRootRollup(provingState: BlockProvingState) {
|
|
862
1145
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
863
|
-
logger.debug('Not ready for root rollup');
|
|
1146
|
+
logger.debug('Not ready for block root rollup');
|
|
864
1147
|
return;
|
|
865
1148
|
}
|
|
866
|
-
|
|
867
|
-
|
|
1149
|
+
|
|
1150
|
+
this.enqueueBlockRootRollup(provingState);
|
|
1151
|
+
}
|
|
1152
|
+
|
|
1153
|
+
private checkAndEnqueueNextBlockMergeRollup(provingState: CheckpointProvingState, currentLocation: TreeNodeLocation) {
|
|
1154
|
+
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
868
1155
|
return;
|
|
869
1156
|
}
|
|
870
|
-
const blockNumber = provingState.blockNumber;
|
|
871
1157
|
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
?.close()
|
|
880
|
-
.then(() => this.dbs.delete(blockNumber))
|
|
881
|
-
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
1158
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
1159
|
+
if (parentLocation.level === 0) {
|
|
1160
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
1161
|
+
} else {
|
|
1162
|
+
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
1163
|
+
}
|
|
1164
|
+
}
|
|
882
1165
|
|
|
883
|
-
|
|
1166
|
+
private checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
|
|
1167
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
1168
|
+
return;
|
|
1169
|
+
}
|
|
1170
|
+
|
|
1171
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
884
1172
|
}
|
|
885
1173
|
|
|
886
|
-
private
|
|
887
|
-
if (!provingState.
|
|
1174
|
+
private checkAndEnqueueNextCheckpointMergeRollup(provingState: EpochProvingState, currentLocation: TreeNodeLocation) {
|
|
1175
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
888
1176
|
return;
|
|
889
1177
|
}
|
|
890
1178
|
|
|
@@ -892,7 +1180,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
892
1180
|
if (parentLocation.level === 0) {
|
|
893
1181
|
this.checkAndEnqueueRootRollup(provingState);
|
|
894
1182
|
} else {
|
|
895
|
-
this.
|
|
1183
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
896
1184
|
}
|
|
897
1185
|
}
|
|
898
1186
|
|
|
@@ -919,8 +1207,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
919
1207
|
|
|
920
1208
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
921
1209
|
|
|
922
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
923
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
924
1210
|
const doAvmProving = wrapCallbackInSpan(
|
|
925
1211
|
this.tracer,
|
|
926
1212
|
'ProvingOrchestrator.prover.getAvmProof',
|
|
@@ -929,41 +1215,24 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
929
1215
|
},
|
|
930
1216
|
async (signal: AbortSignal) => {
|
|
931
1217
|
const inputs = txProvingState.getAvmInputs();
|
|
932
|
-
|
|
933
|
-
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
934
|
-
} catch (err) {
|
|
935
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
936
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
937
|
-
throw err;
|
|
938
|
-
} else {
|
|
939
|
-
logger.warn(
|
|
940
|
-
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Faking AVM proof and carrying on. ${inspect(
|
|
941
|
-
err,
|
|
942
|
-
)}.`,
|
|
943
|
-
);
|
|
944
|
-
return {
|
|
945
|
-
proof: makeEmptyRecursiveProof(AVM_PROOF_LENGTH_IN_FIELDS),
|
|
946
|
-
verificationKey: VerificationKeyData.makeFake(AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS),
|
|
947
|
-
};
|
|
948
|
-
}
|
|
949
|
-
}
|
|
1218
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
950
1219
|
},
|
|
951
1220
|
);
|
|
952
1221
|
|
|
953
|
-
this.deferredProving(provingState, doAvmProving,
|
|
1222
|
+
this.deferredProving(provingState, doAvmProving, proof => {
|
|
954
1223
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
955
|
-
txProvingState.setAvmProof(
|
|
956
|
-
this.
|
|
1224
|
+
txProvingState.setAvmProof(proof);
|
|
1225
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
957
1226
|
});
|
|
958
1227
|
}
|
|
959
1228
|
|
|
960
|
-
private
|
|
1229
|
+
private checkAndEnqueueBaseRollup(provingState: BlockProvingState, txIndex: number) {
|
|
961
1230
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
962
1231
|
if (!txProvingState.ready()) {
|
|
963
1232
|
return;
|
|
964
1233
|
}
|
|
965
1234
|
|
|
966
|
-
// We must have completed all proving (
|
|
1235
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
967
1236
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
968
1237
|
|
|
969
1238
|
this.enqueueBaseRollup(provingState, txIndex);
|