@aztec/prover-client 0.0.0-test.1 → 0.0.1-commit.03f7ef2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +2 -0
- package/dest/block-factory/index.d.ts.map +1 -0
- package/dest/block-factory/light.d.ts +38 -0
- package/dest/block-factory/light.d.ts.map +1 -0
- package/dest/block-factory/light.js +108 -0
- package/dest/config.d.ts +8 -8
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +12 -2
- package/dest/index.d.ts +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +36 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +147 -0
- package/dest/mocks/fixtures.d.ts +8 -8
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +34 -16
- package/dest/mocks/test_context.d.ts +41 -32
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +138 -87
- package/dest/orchestrator/block-building-helpers.d.ts +37 -30
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +170 -189
- package/dest/orchestrator/block-proving-state.d.ts +70 -48
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +282 -177
- package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +210 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +41 -27
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +143 -73
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +36 -33
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +379 -250
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -1
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.d.ts +13 -11
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +23 -40
- package/dest/prover-client/factory.d.ts +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +4 -4
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +5 -4
- package/dest/prover-client/server-epoch-prover.d.ts +16 -12
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +11 -11
- package/dest/proving_broker/broker_prover_facade.d.ts +23 -16
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +59 -40
- package/dest/proving_broker/config.d.ts +19 -10
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +23 -6
- package/dest/proving_broker/factory.d.ts +2 -2
- package/dest/proving_broker/factory.d.ts.map +1 -1
- package/dest/proving_broker/factory.js +5 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/factory.js +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
- package/dest/proving_broker/proof_store/index.d.ts +2 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +4 -4
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +83 -47
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.d.ts +13 -4
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +39 -23
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/memory.js +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +12 -10
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts +9 -9
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +87 -60
- package/dest/proving_broker/rpc.d.ts +4 -6
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +1 -4
- package/dest/test/mock_proof_store.d.ts +9 -0
- package/dest/test/mock_proof_store.d.ts.map +1 -0
- package/dest/test/mock_proof_store.js +10 -0
- package/dest/test/mock_prover.d.ts +23 -17
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +37 -19
- package/package.json +34 -31
- package/src/block-factory/index.ts +1 -0
- package/src/block-factory/light.ts +137 -0
- package/src/config.ts +25 -9
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +198 -0
- package/src/mocks/fixtures.ts +44 -39
- package/src/mocks/test_context.ts +213 -116
- package/src/orchestrator/block-building-helpers.ts +258 -334
- package/src/orchestrator/block-proving-state.ts +325 -231
- package/src/orchestrator/checkpoint-proving-state.ts +303 -0
- package/src/orchestrator/epoch-proving-state.ts +191 -113
- package/src/orchestrator/orchestrator.ts +582 -314
- package/src/orchestrator/tx-proving-state.ts +49 -64
- package/src/prover-client/prover-client.ts +16 -14
- package/src/prover-client/server-epoch-prover.ts +40 -22
- package/src/proving_broker/broker_prover_facade.ts +205 -126
- package/src/proving_broker/config.ts +25 -7
- package/src/proving_broker/factory.ts +2 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proof_store/factory.ts +1 -1
- package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
- package/src/proving_broker/proving_agent.ts +89 -47
- package/src/proving_broker/proving_broker.ts +56 -33
- package/src/proving_broker/proving_broker_database/memory.ts +3 -2
- package/src/proving_broker/proving_broker_database/persisted.ts +14 -12
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_job_controller.ts +92 -81
- package/src/proving_broker/rpc.ts +1 -6
- package/src/test/mock_proof_store.ts +14 -0
- package/src/test/mock_prover.ts +158 -59
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/dest/block_builder/index.d.ts +0 -6
- package/dest/block_builder/index.d.ts.map +0 -1
- package/dest/block_builder/light.d.ts +0 -33
- package/dest/block_builder/light.d.ts.map +0 -1
- package/dest/block_builder/light.js +0 -82
- package/src/bin/get-proof-inputs.ts +0 -59
- package/src/block_builder/index.ts +0 -6
- package/src/block_builder/light.ts +0 -101
- /package/dest/{block_builder → block-factory}/index.js +0 -0
|
@@ -4,26 +4,21 @@ function _ts_decorate(decorators, target, key, desc) {
|
|
|
4
4
|
else for(var i = decorators.length - 1; i >= 0; i--)if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
5
5
|
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
6
6
|
}
|
|
7
|
-
import {
|
|
8
|
-
import {
|
|
7
|
+
import { L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
|
|
8
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
9
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
10
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
9
11
|
import { AbortError } from '@aztec/foundation/error';
|
|
10
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
11
12
|
import { createLogger } from '@aztec/foundation/log';
|
|
12
13
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
13
14
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
14
15
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
15
16
|
import { elapsed } from '@aztec/foundation/timer';
|
|
16
|
-
import {
|
|
17
|
-
import { L2Block } from '@aztec/stdlib/block';
|
|
18
|
-
import { BaseParityInputs } from '@aztec/stdlib/parity';
|
|
19
|
-
import { makeEmptyRecursiveProof } from '@aztec/stdlib/proofs';
|
|
20
|
-
import { EmptyBlockRootRollupInputs, PrivateBaseRollupInputs, SingleTxBlockRootRollupInputs, TubeInputs } from '@aztec/stdlib/rollup';
|
|
17
|
+
import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs, PrivateTxBaseRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
21
18
|
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
22
|
-
import { toNumBlobFields } from '@aztec/stdlib/tx';
|
|
23
|
-
import { VerificationKeyData } from '@aztec/stdlib/vks';
|
|
24
19
|
import { Attributes, getTelemetryClient, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client';
|
|
25
20
|
import { inspect } from 'util';
|
|
26
|
-
import {
|
|
21
|
+
import { buildHeaderFromCircuitOutputs, getLastSiblingPath, getPublicChonkVerifierPrivateInputsFromTx, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
|
|
27
22
|
import { EpochProvingState } from './epoch-proving-state.js';
|
|
28
23
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
29
24
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -47,8 +42,9 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
47
42
|
pendingProvingJobs;
|
|
48
43
|
provingPromise;
|
|
49
44
|
metrics;
|
|
45
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
50
46
|
dbs;
|
|
51
|
-
constructor(dbProvider, prover, proverId
|
|
47
|
+
constructor(dbProvider, prover, proverId, telemetryClient = getTelemetryClient()){
|
|
52
48
|
this.dbProvider = dbProvider;
|
|
53
49
|
this.prover = prover;
|
|
54
50
|
this.proverId = proverId;
|
|
@@ -68,67 +64,115 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
68
64
|
this.cancel();
|
|
69
65
|
return Promise.resolve();
|
|
70
66
|
}
|
|
71
|
-
startNewEpoch(epochNumber,
|
|
67
|
+
startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges) {
|
|
68
|
+
if (this.provingState?.verifyState()) {
|
|
69
|
+
throw new Error(`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`);
|
|
70
|
+
}
|
|
72
71
|
const { promise: _promise, resolve, reject } = promiseWithResolvers();
|
|
73
72
|
const promise = _promise.catch((reason)=>({
|
|
74
73
|
status: 'failure',
|
|
75
74
|
reason
|
|
76
75
|
}));
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
}
|
|
80
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
81
|
-
this.provingState = new EpochProvingState(epochNumber, firstBlockNumber, totalNumBlocks, resolve, reject);
|
|
76
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
77
|
+
this.provingState = new EpochProvingState(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges, (provingState)=>this.checkAndEnqueueCheckpointRootRollup(provingState), resolve, reject);
|
|
82
78
|
this.provingPromise = promise;
|
|
83
79
|
}
|
|
80
|
+
async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint) {
|
|
81
|
+
if (!this.provingState) {
|
|
82
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
83
|
+
}
|
|
84
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
85
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
86
|
+
}
|
|
87
|
+
// Fork world state at the end of the immediately previous block.
|
|
88
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
89
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
90
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
91
|
+
this.dbs.set(firstBlockNumber, db);
|
|
92
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
93
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
94
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
95
|
+
const { lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath } = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
96
|
+
this.provingState.startNewCheckpoint(checkpointIndex, constants, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint, lastArchiveSiblingPath, l1ToL2Messages, lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath);
|
|
97
|
+
}
|
|
84
98
|
/**
|
|
85
99
|
* Starts off a new block
|
|
86
|
-
* @param
|
|
87
|
-
* @param
|
|
88
|
-
*
|
|
89
|
-
|
|
100
|
+
* @param blockNumber - The block number
|
|
101
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
102
|
+
* block that doesn't have any txs.
|
|
103
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
104
|
+
*/ async startNewBlock(blockNumber, timestamp, totalNumTxs) {
|
|
90
105
|
if (!this.provingState) {
|
|
91
|
-
throw new Error(
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
//
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
106
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
107
|
+
}
|
|
108
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
109
|
+
if (!checkpointProvingState) {
|
|
110
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
111
|
+
}
|
|
112
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
113
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
114
|
+
}
|
|
115
|
+
const constants = checkpointProvingState.constants;
|
|
116
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
117
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
118
|
+
if (!this.dbs.has(blockNumber)) {
|
|
119
|
+
// Fork world state at the end of the immediately previous block
|
|
120
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
121
|
+
this.dbs.set(blockNumber, db);
|
|
122
|
+
}
|
|
123
|
+
const db = this.dbs.get(blockNumber);
|
|
124
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
125
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
126
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
127
|
+
const blockProvingState = checkpointProvingState.startNewBlock(blockNumber, timestamp, totalNumTxs, lastArchiveTreeSnapshot, lastArchiveSiblingPath);
|
|
128
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
129
|
+
if (blockProvingState.index === 0) {
|
|
130
|
+
for(let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++){
|
|
131
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
135
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
136
|
+
if (totalNumTxs === 0) {
|
|
137
|
+
const endState = await db.getStateReference();
|
|
138
|
+
blockProvingState.setEndState(endState);
|
|
139
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
140
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
141
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
142
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
143
|
+
// And also try to accumulate the blobs as far as we can:
|
|
144
|
+
await this.provingState.setBlobAccumulators();
|
|
109
145
|
}
|
|
110
146
|
}
|
|
111
147
|
/**
|
|
112
148
|
* The interface to add simulated transactions to the scheduler. This can only be called once per block.
|
|
113
149
|
* @param txs - The transactions to be proven
|
|
114
150
|
*/ async addTxs(txs) {
|
|
151
|
+
if (!this.provingState) {
|
|
152
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
153
|
+
}
|
|
115
154
|
if (!txs.length) {
|
|
116
155
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
117
156
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
118
157
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
119
158
|
return;
|
|
120
159
|
}
|
|
121
|
-
const blockNumber = txs[0].
|
|
122
|
-
const provingState = this.provingState
|
|
160
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
161
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber);
|
|
123
162
|
if (!provingState) {
|
|
124
|
-
throw new Error(`
|
|
163
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
125
164
|
}
|
|
126
|
-
if (provingState.totalNumTxs) {
|
|
165
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
166
|
+
throw new Error(`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`);
|
|
167
|
+
}
|
|
168
|
+
if (!provingState.isAcceptingTxs()) {
|
|
127
169
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
128
170
|
}
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
171
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
172
|
+
const db = this.dbs.get(blockNumber);
|
|
173
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
174
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
175
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
132
176
|
for (const tx of txs){
|
|
133
177
|
try {
|
|
134
178
|
if (!provingState.verifyState()) {
|
|
@@ -136,13 +180,21 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
136
180
|
}
|
|
137
181
|
validateTx(tx);
|
|
138
182
|
logger.info(`Received transaction: ${tx.hash}`);
|
|
139
|
-
const
|
|
140
|
-
const
|
|
183
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
184
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db);
|
|
185
|
+
if (!provingState.verifyState()) {
|
|
186
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
187
|
+
}
|
|
188
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
189
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
141
190
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
142
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
143
191
|
if (txProvingState.requireAvmProof) {
|
|
192
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
144
193
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
145
194
|
this.enqueueVM(provingState, txIndex);
|
|
195
|
+
} else {
|
|
196
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
197
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
146
198
|
}
|
|
147
199
|
} catch (err) {
|
|
148
200
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -150,22 +202,33 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
150
202
|
});
|
|
151
203
|
}
|
|
152
204
|
}
|
|
205
|
+
const endState = await db.getStateReference();
|
|
206
|
+
provingState.setEndState(endState);
|
|
207
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
208
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
209
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
210
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
211
|
+
await this.provingState.setBlobAccumulators();
|
|
153
212
|
}
|
|
154
213
|
/**
|
|
155
|
-
* Kickstarts
|
|
156
|
-
* Note that if the
|
|
157
|
-
*/
|
|
214
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
215
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
216
|
+
*/ startChonkVerifierCircuits(txs) {
|
|
158
217
|
if (!this.provingState?.verifyState()) {
|
|
159
|
-
throw new Error(`
|
|
218
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
160
219
|
}
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
const
|
|
220
|
+
const publicTxs = txs.filter((tx)=>tx.data.forPublic);
|
|
221
|
+
for (const tx of publicTxs){
|
|
222
|
+
const txHash = tx.getTxHash().toString();
|
|
223
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
164
224
|
const tubeProof = promiseWithResolvers();
|
|
165
|
-
logger.debug(`Starting
|
|
166
|
-
this.
|
|
167
|
-
|
|
225
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
226
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, (proof)=>{
|
|
227
|
+
tubeProof.resolve(proof);
|
|
228
|
+
});
|
|
229
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
168
230
|
}
|
|
231
|
+
return Promise.resolve();
|
|
169
232
|
}
|
|
170
233
|
/**
|
|
171
234
|
* Marks the block as completed.
|
|
@@ -175,55 +238,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
175
238
|
if (!provingState) {
|
|
176
239
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
177
240
|
}
|
|
178
|
-
if
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
241
|
+
// Abort with specific error for the block if there's one.
|
|
242
|
+
const error = provingState.getError();
|
|
243
|
+
if (error) {
|
|
244
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
182
245
|
}
|
|
246
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
183
247
|
if (!provingState.verifyState()) {
|
|
184
|
-
throw new Error(`
|
|
248
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
185
249
|
}
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
await this.buildBlock(provingState, expectedHeader);
|
|
189
|
-
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
190
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
191
|
-
return provingState.block;
|
|
192
|
-
}
|
|
193
|
-
/** Returns the block as built for a given index. */ getBlock(index) {
|
|
194
|
-
const block = this.provingState?.blocks[index]?.block;
|
|
195
|
-
if (!block) {
|
|
196
|
-
throw new Error(`Block at index ${index} not available`);
|
|
250
|
+
if (provingState.isAcceptingTxs()) {
|
|
251
|
+
throw new Error(`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`);
|
|
197
252
|
}
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
202
|
-
const txs = provingState.allTxs.map((a)=>a.processedTx);
|
|
203
|
-
// Get db for this block
|
|
204
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
205
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
206
|
-
// and update the archive tree, so we're ready to start processing the next block
|
|
207
|
-
const { header, body } = await buildHeaderAndBodyFromTxs(txs, provingState.globalVariables, provingState.newL1ToL2Messages, db);
|
|
253
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
254
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
255
|
+
const header = await provingState.buildBlockHeader();
|
|
208
256
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
209
257
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
210
258
|
throw new Error('Block header mismatch');
|
|
211
259
|
}
|
|
260
|
+
// Get db for this block
|
|
261
|
+
const db = this.dbs.get(provingState.blockNumber);
|
|
262
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
212
263
|
logger.verbose(`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`);
|
|
213
264
|
await db.updateArchive(header);
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
217
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
218
|
-
logger.verbose(`Orchestrator finalised block ${l2Block.number}`);
|
|
219
|
-
provingState.block = l2Block;
|
|
265
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
266
|
+
return header;
|
|
220
267
|
}
|
|
221
268
|
// Flagged as protected to disable in certain unit tests
|
|
222
|
-
async verifyBuiltBlockAgainstSyncedState(
|
|
223
|
-
const
|
|
269
|
+
async verifyBuiltBlockAgainstSyncedState(provingState) {
|
|
270
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
271
|
+
if (!builtBlockHeader) {
|
|
272
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
276
|
+
if (!output) {
|
|
277
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
278
|
+
return;
|
|
279
|
+
}
|
|
280
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
281
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
282
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
283
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
284
|
+
return;
|
|
285
|
+
}
|
|
286
|
+
// Get db for this block
|
|
287
|
+
const blockNumber = provingState.blockNumber;
|
|
288
|
+
const db = this.dbs.get(blockNumber);
|
|
289
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
290
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
224
291
|
if (!syncedArchive.equals(newArchive)) {
|
|
225
|
-
|
|
292
|
+
logger.error(`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(syncedArchive)} but built ${inspect(newArchive)}`);
|
|
293
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
294
|
+
return;
|
|
295
|
+
}
|
|
296
|
+
const circuitArchive = output.newArchive;
|
|
297
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
298
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
299
|
+
provingState.reject(`New archive mismatch.`);
|
|
300
|
+
return;
|
|
226
301
|
}
|
|
302
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
303
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
304
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
305
|
+
// will attempt to access the fork after it was closed.
|
|
306
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
307
|
+
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
227
308
|
}
|
|
228
309
|
/**
|
|
229
310
|
* Cancel any further proving
|
|
@@ -235,14 +316,15 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
235
316
|
}
|
|
236
317
|
/**
|
|
237
318
|
* Returns the proof for the current epoch.
|
|
238
|
-
*/ async
|
|
319
|
+
*/ async finalizeEpoch() {
|
|
239
320
|
if (!this.provingState || !this.provingPromise) {
|
|
240
|
-
throw new Error(`Invalid proving state, an epoch must be proven before it can be
|
|
321
|
+
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
241
322
|
}
|
|
242
323
|
const result = await this.provingPromise;
|
|
243
324
|
if (result.status === 'failure') {
|
|
244
325
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
245
326
|
}
|
|
327
|
+
await this.provingState.finalizeBatchedBlob();
|
|
246
328
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
247
329
|
pushTestData('epochProofResult', {
|
|
248
330
|
proof: epochProofResult.proof.toString(),
|
|
@@ -251,24 +333,12 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
251
333
|
return epochProofResult;
|
|
252
334
|
}
|
|
253
335
|
/**
|
|
254
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
255
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
256
|
-
* @param provingState - The proving state being worked on
|
|
257
|
-
*/ async prepareTransaction(tx, provingState) {
|
|
258
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
259
|
-
if (!txInputs) {
|
|
260
|
-
// This should not be possible
|
|
261
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
262
|
-
}
|
|
263
|
-
return txInputs;
|
|
264
|
-
}
|
|
265
|
-
/**
|
|
266
336
|
* Enqueue a job to be scheduled
|
|
267
337
|
* @param provingState - The proving state object being operated on
|
|
268
338
|
* @param jobType - The type of job to be queued
|
|
269
339
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
270
340
|
*/ deferredProving(provingState, request, callback) {
|
|
271
|
-
if (!provingState
|
|
341
|
+
if (!provingState.verifyState()) {
|
|
272
342
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
273
343
|
return;
|
|
274
344
|
}
|
|
@@ -283,7 +353,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
283
353
|
return;
|
|
284
354
|
}
|
|
285
355
|
const result = await request(controller.signal);
|
|
286
|
-
if (!provingState
|
|
356
|
+
if (!provingState.verifyState()) {
|
|
287
357
|
logger.debug(`State no longer valid, discarding result`);
|
|
288
358
|
return;
|
|
289
359
|
}
|
|
@@ -311,29 +381,26 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
311
381
|
// let the callstack unwind before adding the job to the queue
|
|
312
382
|
setImmediate(()=>void safeJob());
|
|
313
383
|
}
|
|
314
|
-
async
|
|
384
|
+
async updateL1ToL2MessageTree(l1ToL2Messages, db) {
|
|
315
385
|
const l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 'Too many L1 to L2 messages');
|
|
316
|
-
const
|
|
317
|
-
const
|
|
386
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
387
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
318
388
|
// Update the local trees to include the new l1 to l2 messages
|
|
319
389
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
320
|
-
const
|
|
390
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
391
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
321
392
|
return {
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
393
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
394
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
395
|
+
newL1ToL2MessageTreeSnapshot,
|
|
396
|
+
newL1ToL2MessageSubtreeRootSiblingPath
|
|
325
397
|
};
|
|
326
398
|
}
|
|
327
399
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
328
|
-
async prepareBaseRollupInputs(
|
|
329
|
-
if (!provingState.verifyState() || !provingState.spongeBlobState) {
|
|
330
|
-
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
331
|
-
return;
|
|
332
|
-
}
|
|
333
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
400
|
+
async prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db) {
|
|
334
401
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
335
|
-
// These will be overwritten later once we have proven the
|
|
336
|
-
const [ms, hints] = await elapsed(
|
|
402
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
403
|
+
const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, this.proverId.toField(), db));
|
|
337
404
|
this.metrics.recordBaseRollupInputs(ms);
|
|
338
405
|
const promises = [
|
|
339
406
|
MerkleTreeId.NOTE_HASH_TREE,
|
|
@@ -349,10 +416,6 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
349
416
|
obj.key,
|
|
350
417
|
obj.value
|
|
351
418
|
]));
|
|
352
|
-
if (!provingState.verifyState()) {
|
|
353
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
354
|
-
return;
|
|
355
|
-
}
|
|
356
419
|
return [
|
|
357
420
|
hints,
|
|
358
421
|
treeSnapshots
|
|
@@ -365,68 +428,71 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
365
428
|
logger.debug('Not running base rollup, state invalid');
|
|
366
429
|
return;
|
|
367
430
|
}
|
|
431
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
432
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
433
|
+
return;
|
|
434
|
+
}
|
|
368
435
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
369
436
|
const { processedTx } = txProvingState;
|
|
370
437
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
371
438
|
logger.debug(`Enqueuing deferred proving base rollup for ${processedTx.hash.toString()}`);
|
|
372
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof
|
|
439
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof PrivateTxBaseRollupPrivateInputs ? 'getPrivateTxBaseRollupProof' : 'getPublicTxBaseRollupProof'}`, {
|
|
373
440
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
374
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
375
441
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
376
442
|
}, (signal)=>{
|
|
377
|
-
if (inputs instanceof
|
|
378
|
-
return this.prover.
|
|
443
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
444
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
379
445
|
} else {
|
|
380
|
-
return this.prover.
|
|
446
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
381
447
|
}
|
|
382
|
-
}),
|
|
448
|
+
}), (result)=>{
|
|
383
449
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
384
|
-
validatePartialState(result.inputs.
|
|
450
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
385
451
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
386
452
|
if (provingState.totalNumTxs === 1) {
|
|
387
|
-
|
|
453
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
388
454
|
} else {
|
|
389
|
-
|
|
455
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
390
456
|
}
|
|
391
457
|
});
|
|
392
458
|
}
|
|
393
|
-
// Enqueues the
|
|
394
|
-
// Once completed, will enqueue the
|
|
395
|
-
|
|
459
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
460
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
461
|
+
getOrEnqueueChonkVerifier(provingState, txIndex) {
|
|
396
462
|
if (!provingState.verifyState()) {
|
|
397
|
-
logger.debug('Not running
|
|
463
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
398
464
|
return;
|
|
399
465
|
}
|
|
400
466
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
401
467
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
468
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
402
469
|
const handleResult = (result)=>{
|
|
403
|
-
logger.debug(`Got
|
|
470
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, {
|
|
404
471
|
txHash
|
|
405
472
|
});
|
|
406
|
-
txProvingState.
|
|
407
|
-
this.provingState?.
|
|
408
|
-
this.
|
|
473
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
474
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
475
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
409
476
|
};
|
|
410
|
-
if (this.provingState?.
|
|
411
|
-
logger.debug(`
|
|
477
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
478
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, {
|
|
412
479
|
txHash
|
|
413
480
|
});
|
|
414
|
-
void this.provingState.
|
|
481
|
+
void this.provingState.cachedChonkVerifierProofs.get(txHash).then(handleResult);
|
|
415
482
|
return;
|
|
416
483
|
}
|
|
417
|
-
logger.debug(`Enqueuing
|
|
418
|
-
this.
|
|
484
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
485
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
419
486
|
}
|
|
420
|
-
|
|
421
|
-
if (!provingState
|
|
422
|
-
logger.debug('Not running
|
|
487
|
+
doEnqueueChonkVerifier(txHash, inputs, handler, provingState = this.provingState) {
|
|
488
|
+
if (!provingState.verifyState()) {
|
|
489
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
423
490
|
return;
|
|
424
491
|
}
|
|
425
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
492
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getPublicChonkVerifierProof', {
|
|
426
493
|
[Attributes.TX_HASH]: txHash,
|
|
427
|
-
[Attributes.
|
|
428
|
-
|
|
429
|
-
}, (signal)=>this.prover.getTubeProof(inputs, signal, this.provingState.epochNumber)), handler);
|
|
494
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public'
|
|
495
|
+
}, (signal)=>this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber)), handler);
|
|
430
496
|
}
|
|
431
497
|
// Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
432
498
|
// Enqueues the next level of merge if all inputs are available
|
|
@@ -435,65 +501,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
435
501
|
logger.debug('Not running merge rollup. State no longer valid.');
|
|
436
502
|
return;
|
|
437
503
|
}
|
|
504
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
505
|
+
logger.debug('Merge rollup already started.');
|
|
506
|
+
return;
|
|
507
|
+
}
|
|
438
508
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
439
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
440
|
-
[Attributes.
|
|
441
|
-
|
|
442
|
-
}, (signal)=>this.prover.getMergeRollupProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
509
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getTxMergeRollupProof', {
|
|
510
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge'
|
|
511
|
+
}, (signal)=>this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
443
512
|
provingState.setMergeRollupProof(location, result);
|
|
444
|
-
|
|
513
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
445
514
|
});
|
|
446
515
|
}
|
|
447
516
|
// Executes the block root rollup circuit
|
|
448
|
-
|
|
517
|
+
enqueueBlockRootRollup(provingState) {
|
|
449
518
|
if (!provingState.verifyState()) {
|
|
450
519
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
451
520
|
return;
|
|
452
521
|
}
|
|
453
|
-
provingState.
|
|
454
|
-
|
|
455
|
-
|
|
522
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
523
|
+
logger.debug('Block root rollup already started.');
|
|
524
|
+
return;
|
|
525
|
+
}
|
|
526
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
527
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
456
528
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockRootRollupProof', {
|
|
457
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
458
529
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
459
530
|
}, (signal)=>{
|
|
460
|
-
if (inputs instanceof
|
|
461
|
-
return this.prover.
|
|
462
|
-
} else if (inputs instanceof
|
|
463
|
-
return this.prover.
|
|
531
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
532
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
533
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
534
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
535
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
536
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
537
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
538
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
464
539
|
} else {
|
|
465
540
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
466
541
|
}
|
|
467
542
|
}), async (result)=>{
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
476
|
-
const epochProvingState = this.provingState;
|
|
477
|
-
const leafLocation = epochProvingState.setBlockRootRollupProof(provingState.index, result);
|
|
478
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
479
|
-
await this.enqueueEpochPadding(epochProvingState);
|
|
543
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
544
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
545
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
546
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
547
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
548
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
549
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
480
550
|
} else {
|
|
481
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
551
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
482
552
|
}
|
|
483
553
|
});
|
|
484
554
|
}
|
|
485
555
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
486
556
|
// Enqueues the root parity circuit if all inputs are available
|
|
487
|
-
enqueueBaseParityCircuit(
|
|
557
|
+
enqueueBaseParityCircuit(checkpointProvingState, provingState, baseParityIndex) {
|
|
488
558
|
if (!provingState.verifyState()) {
|
|
489
559
|
logger.debug('Not running base parity. State no longer valid.');
|
|
490
560
|
return;
|
|
491
561
|
}
|
|
562
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
563
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
564
|
+
return;
|
|
565
|
+
}
|
|
566
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
492
567
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBaseParityProof', {
|
|
493
|
-
[Attributes.
|
|
494
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity'
|
|
568
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base'
|
|
495
569
|
}, (signal)=>this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber)), (provingOutput)=>{
|
|
496
|
-
provingState.setBaseParityProof(
|
|
570
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
497
571
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
498
572
|
});
|
|
499
573
|
}
|
|
@@ -510,13 +584,16 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
510
584
|
logger.debug('Not running root parity. State no longer valid.');
|
|
511
585
|
return;
|
|
512
586
|
}
|
|
513
|
-
|
|
587
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
588
|
+
logger.debug('Root parity already started.');
|
|
589
|
+
return;
|
|
590
|
+
}
|
|
591
|
+
const inputs = provingState.getParityRootInputs();
|
|
514
592
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootParityProof', {
|
|
515
|
-
[Attributes.
|
|
516
|
-
|
|
517
|
-
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
593
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root'
|
|
594
|
+
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
518
595
|
provingState.setRootParityProof(result);
|
|
519
|
-
|
|
596
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
520
597
|
});
|
|
521
598
|
}
|
|
522
599
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
@@ -526,28 +603,90 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
526
603
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
527
604
|
return;
|
|
528
605
|
}
|
|
606
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
607
|
+
logger.debug('Block merge rollup already started.');
|
|
608
|
+
return;
|
|
609
|
+
}
|
|
529
610
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
530
611
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockMergeRollupProof', {
|
|
531
|
-
[Attributes.
|
|
532
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge-rollup'
|
|
612
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge'
|
|
533
613
|
}, (signal)=>this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
534
614
|
provingState.setBlockMergeRollupProof(location, result);
|
|
535
615
|
this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
|
|
536
616
|
});
|
|
537
617
|
}
|
|
538
|
-
|
|
618
|
+
enqueueCheckpointRootRollup(provingState) {
|
|
619
|
+
if (!provingState.verifyState()) {
|
|
620
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
621
|
+
return;
|
|
622
|
+
}
|
|
623
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
624
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
625
|
+
return;
|
|
626
|
+
}
|
|
627
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
628
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
629
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
630
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointRootRollupProof', {
|
|
631
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
632
|
+
}, (signal)=>{
|
|
633
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
634
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
635
|
+
} else {
|
|
636
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
637
|
+
}
|
|
638
|
+
}), (result)=>{
|
|
639
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator().toBlobAccumulator();
|
|
640
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
641
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
642
|
+
logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(computedEndBlobAccumulatorState)}`);
|
|
643
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
644
|
+
return;
|
|
645
|
+
}
|
|
646
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
647
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
648
|
+
const epochProvingState = provingState.parentEpoch;
|
|
649
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
650
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
651
|
+
} else {
|
|
652
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
653
|
+
}
|
|
654
|
+
});
|
|
655
|
+
}
|
|
656
|
+
enqueueCheckpointMergeRollup(provingState, location) {
|
|
657
|
+
if (!provingState.verifyState()) {
|
|
658
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
659
|
+
return;
|
|
660
|
+
}
|
|
661
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
662
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
663
|
+
return;
|
|
664
|
+
}
|
|
665
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
666
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointMergeRollupProof', {
|
|
667
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge'
|
|
668
|
+
}, (signal)=>this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
669
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
670
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
671
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
672
|
+
});
|
|
673
|
+
}
|
|
674
|
+
enqueueEpochPadding(provingState) {
|
|
539
675
|
if (!provingState.verifyState()) {
|
|
540
676
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
541
677
|
return;
|
|
542
678
|
}
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
679
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
680
|
+
logger.debug('Padding checkpoint already started.');
|
|
681
|
+
return;
|
|
682
|
+
}
|
|
683
|
+
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
684
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
685
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof', {
|
|
686
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding'
|
|
687
|
+
}, (signal)=>this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
688
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
689
|
+
provingState.setCheckpointPaddingProof(result);
|
|
551
690
|
this.checkAndEnqueueRootRollup(provingState);
|
|
552
691
|
});
|
|
553
692
|
}
|
|
@@ -558,10 +697,9 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
558
697
|
return;
|
|
559
698
|
}
|
|
560
699
|
logger.debug(`Preparing root rollup`);
|
|
561
|
-
const inputs = provingState.getRootRollupInputs(
|
|
700
|
+
const inputs = provingState.getRootRollupInputs();
|
|
562
701
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootRollupProof', {
|
|
563
|
-
[Attributes.
|
|
564
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup'
|
|
702
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root'
|
|
565
703
|
}, (signal)=>this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
566
704
|
logger.verbose(`Orchestrator completed root rollup for epoch ${provingState.epochNumber}`);
|
|
567
705
|
provingState.setRootRollupProof(result);
|
|
@@ -570,34 +708,23 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
570
708
|
});
|
|
571
709
|
});
|
|
572
710
|
}
|
|
573
|
-
|
|
711
|
+
checkAndEnqueueNextMergeRollup(provingState, currentLocation) {
|
|
574
712
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
575
713
|
return;
|
|
576
714
|
}
|
|
577
715
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
578
716
|
if (parentLocation.level === 0) {
|
|
579
|
-
|
|
717
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
580
718
|
} else {
|
|
581
719
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
582
720
|
}
|
|
583
721
|
}
|
|
584
|
-
|
|
722
|
+
checkAndEnqueueBlockRootRollup(provingState) {
|
|
585
723
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
586
|
-
logger.debug('Not ready for root rollup');
|
|
587
|
-
return;
|
|
588
|
-
}
|
|
589
|
-
if (provingState.blockRootRollupStarted) {
|
|
590
|
-
logger.debug('Block root rollup already started');
|
|
724
|
+
logger.debug('Not ready for block root rollup');
|
|
591
725
|
return;
|
|
592
726
|
}
|
|
593
|
-
|
|
594
|
-
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
595
|
-
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
596
|
-
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
597
|
-
// will attempt to access the fork after it was closed.
|
|
598
|
-
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
599
|
-
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
600
|
-
await this.enqueueBlockRootRollup(provingState);
|
|
727
|
+
this.enqueueBlockRootRollup(provingState);
|
|
601
728
|
}
|
|
602
729
|
checkAndEnqueueNextBlockMergeRollup(provingState, currentLocation) {
|
|
603
730
|
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
@@ -605,11 +732,28 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
605
732
|
}
|
|
606
733
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
607
734
|
if (parentLocation.level === 0) {
|
|
608
|
-
this.
|
|
735
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
609
736
|
} else {
|
|
610
737
|
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
611
738
|
}
|
|
612
739
|
}
|
|
740
|
+
checkAndEnqueueCheckpointRootRollup(provingState) {
|
|
741
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
742
|
+
return;
|
|
743
|
+
}
|
|
744
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
745
|
+
}
|
|
746
|
+
checkAndEnqueueNextCheckpointMergeRollup(provingState, currentLocation) {
|
|
747
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
748
|
+
return;
|
|
749
|
+
}
|
|
750
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
751
|
+
if (parentLocation.level === 0) {
|
|
752
|
+
this.checkAndEnqueueRootRollup(provingState);
|
|
753
|
+
} else {
|
|
754
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
755
|
+
}
|
|
756
|
+
}
|
|
613
757
|
checkAndEnqueueRootRollup(provingState) {
|
|
614
758
|
if (!provingState.isReadyForRootRollup()) {
|
|
615
759
|
logger.debug('Not ready for root rollup');
|
|
@@ -628,46 +772,31 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
628
772
|
return;
|
|
629
773
|
}
|
|
630
774
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
631
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
632
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
633
775
|
const doAvmProving = wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getAvmProof', {
|
|
634
776
|
[Attributes.TX_HASH]: txProvingState.processedTx.hash.toString()
|
|
635
777
|
}, async (signal)=>{
|
|
636
778
|
const inputs = txProvingState.getAvmInputs();
|
|
637
|
-
|
|
638
|
-
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
639
|
-
} catch (err) {
|
|
640
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
641
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
642
|
-
throw err;
|
|
643
|
-
} else {
|
|
644
|
-
logger.warn(`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Faking AVM proof and carrying on. ${inspect(err)}.`);
|
|
645
|
-
return {
|
|
646
|
-
proof: makeEmptyRecursiveProof(AVM_PROOF_LENGTH_IN_FIELDS),
|
|
647
|
-
verificationKey: VerificationKeyData.makeFake(AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS)
|
|
648
|
-
};
|
|
649
|
-
}
|
|
650
|
-
}
|
|
779
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
651
780
|
});
|
|
652
781
|
this.deferredProving(provingState, doAvmProving, (proofAndVk)=>{
|
|
653
782
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
654
783
|
txProvingState.setAvmProof(proofAndVk);
|
|
655
|
-
this.
|
|
784
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
656
785
|
});
|
|
657
786
|
}
|
|
658
|
-
|
|
787
|
+
checkAndEnqueueBaseRollup(provingState, txIndex) {
|
|
659
788
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
660
789
|
if (!txProvingState.ready()) {
|
|
661
790
|
return;
|
|
662
791
|
}
|
|
663
|
-
// We must have completed all proving (
|
|
792
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
664
793
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
665
794
|
this.enqueueBaseRollup(provingState, txIndex);
|
|
666
795
|
}
|
|
667
796
|
}
|
|
668
797
|
_ts_decorate([
|
|
669
|
-
trackSpan('ProvingOrchestrator.startNewBlock', (
|
|
670
|
-
[Attributes.BLOCK_NUMBER]:
|
|
798
|
+
trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
|
|
799
|
+
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
671
800
|
}))
|
|
672
801
|
], ProvingOrchestrator.prototype, "startNewBlock", null);
|
|
673
802
|
_ts_decorate([
|
|
@@ -676,15 +805,15 @@ _ts_decorate([
|
|
|
676
805
|
}))
|
|
677
806
|
], ProvingOrchestrator.prototype, "addTxs", null);
|
|
678
807
|
_ts_decorate([
|
|
679
|
-
trackSpan('ProvingOrchestrator.
|
|
680
|
-
], ProvingOrchestrator.prototype, "
|
|
808
|
+
trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
809
|
+
], ProvingOrchestrator.prototype, "startChonkVerifierCircuits", null);
|
|
681
810
|
_ts_decorate([
|
|
682
811
|
trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
|
|
683
812
|
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
684
813
|
}))
|
|
685
814
|
], ProvingOrchestrator.prototype, "setBlockCompleted", null);
|
|
686
815
|
_ts_decorate([
|
|
687
|
-
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (
|
|
816
|
+
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (tx)=>({
|
|
688
817
|
[Attributes.TX_HASH]: tx.hash.toString()
|
|
689
818
|
}))
|
|
690
819
|
], ProvingOrchestrator.prototype, "prepareBaseRollupInputs", null);
|