@aztec/prover-client 0.0.0-test.1 → 0.0.1-commit.5476d83
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +2 -0
- package/dest/block-factory/index.d.ts.map +1 -0
- package/dest/block-factory/light.d.ts +38 -0
- package/dest/block-factory/light.d.ts.map +1 -0
- package/dest/block-factory/light.js +108 -0
- package/dest/config.d.ts +7 -7
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +11 -1
- package/dest/index.d.ts +1 -1
- package/dest/light/lightweight_checkpoint_builder.d.ts +28 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +107 -0
- package/dest/mocks/fixtures.d.ts +8 -8
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +32 -14
- package/dest/mocks/test_context.d.ts +40 -31
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +134 -86
- package/dest/orchestrator/block-building-helpers.d.ts +36 -29
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +168 -188
- package/dest/orchestrator/block-proving-state.d.ts +68 -47
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +281 -176
- package/dest/orchestrator/checkpoint-proving-state.d.ts +62 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +208 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +40 -26
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +143 -73
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +35 -32
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +389 -239
- package/dest/orchestrator/orchestrator_metrics.d.ts +3 -1
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +9 -0
- package/dest/orchestrator/tx-proving-state.d.ts +13 -11
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +23 -40
- package/dest/prover-client/factory.d.ts +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +4 -4
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +5 -4
- package/dest/prover-client/server-epoch-prover.d.ts +15 -11
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +11 -11
- package/dest/proving_broker/broker_prover_facade.d.ts +23 -16
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +67 -41
- package/dest/proving_broker/config.d.ts +18 -9
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +22 -5
- package/dest/proving_broker/factory.d.ts +2 -2
- package/dest/proving_broker/factory.d.ts.map +1 -1
- package/dest/proving_broker/factory.js +5 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +2 -1
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/factory.js +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
- package/dest/proving_broker/proof_store/index.d.ts +2 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +4 -4
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +83 -47
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.d.ts +13 -4
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +36 -23
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/memory.js +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +12 -10
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts +9 -9
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +89 -61
- package/dest/proving_broker/rpc.d.ts +4 -6
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +1 -4
- package/dest/test/mock_proof_store.d.ts +9 -0
- package/dest/test/mock_proof_store.d.ts.map +1 -0
- package/dest/test/mock_proof_store.js +10 -0
- package/dest/test/mock_prover.d.ts +23 -17
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +38 -20
- package/package.json +32 -31
- package/src/block-factory/index.ts +1 -0
- package/src/block-factory/light.ts +137 -0
- package/src/config.ts +24 -8
- package/src/light/lightweight_checkpoint_builder.ts +142 -0
- package/src/mocks/fixtures.ts +42 -37
- package/src/mocks/test_context.ts +207 -115
- package/src/orchestrator/block-building-helpers.ts +256 -333
- package/src/orchestrator/block-proving-state.ts +323 -230
- package/src/orchestrator/checkpoint-proving-state.ts +301 -0
- package/src/orchestrator/epoch-proving-state.ts +187 -112
- package/src/orchestrator/orchestrator.ts +592 -299
- package/src/orchestrator/orchestrator_metrics.ts +20 -1
- package/src/orchestrator/tx-proving-state.ts +50 -64
- package/src/prover-client/prover-client.ts +16 -14
- package/src/prover-client/server-epoch-prover.ts +39 -21
- package/src/proving_broker/broker_prover_facade.ts +214 -126
- package/src/proving_broker/config.ts +24 -6
- package/src/proving_broker/factory.ts +2 -1
- package/src/proving_broker/fixtures.ts +7 -2
- package/src/proving_broker/proof_store/factory.ts +1 -1
- package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
- package/src/proving_broker/proving_agent.ts +89 -47
- package/src/proving_broker/proving_broker.ts +53 -33
- package/src/proving_broker/proving_broker_database/memory.ts +3 -2
- package/src/proving_broker/proving_broker_database/persisted.ts +14 -12
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_job_controller.ts +94 -82
- package/src/proving_broker/rpc.ts +1 -6
- package/src/test/mock_proof_store.ts +14 -0
- package/src/test/mock_prover.ts +164 -60
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/dest/block_builder/index.d.ts +0 -6
- package/dest/block_builder/index.d.ts.map +0 -1
- package/dest/block_builder/light.d.ts +0 -33
- package/dest/block_builder/light.d.ts.map +0 -1
- package/dest/block_builder/light.js +0 -82
- package/src/bin/get-proof-inputs.ts +0 -59
- package/src/block_builder/index.ts +0 -6
- package/src/block_builder/light.ts +0 -101
- /package/dest/{block_builder → block-factory}/index.js +0 -0
|
@@ -4,8 +4,8 @@ function _ts_decorate(decorators, target, key, desc) {
|
|
|
4
4
|
else for(var i = decorators.length - 1; i >= 0; i--)if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
5
5
|
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
6
6
|
}
|
|
7
|
-
import {
|
|
8
|
-
import { padArrayEnd
|
|
7
|
+
import { L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
|
|
8
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
9
9
|
import { AbortError } from '@aztec/foundation/error';
|
|
10
10
|
import { Fr } from '@aztec/foundation/fields';
|
|
11
11
|
import { createLogger } from '@aztec/foundation/log';
|
|
@@ -13,17 +13,12 @@ import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
|
13
13
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
14
14
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
15
15
|
import { elapsed } from '@aztec/foundation/timer';
|
|
16
|
-
import {
|
|
17
|
-
import {
|
|
18
|
-
import { BaseParityInputs } from '@aztec/stdlib/parity';
|
|
19
|
-
import { makeEmptyRecursiveProof } from '@aztec/stdlib/proofs';
|
|
20
|
-
import { EmptyBlockRootRollupInputs, PrivateBaseRollupInputs, SingleTxBlockRootRollupInputs, TubeInputs } from '@aztec/stdlib/rollup';
|
|
16
|
+
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
17
|
+
import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs, PrivateTxBaseRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
21
18
|
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
22
|
-
import { toNumBlobFields } from '@aztec/stdlib/tx';
|
|
23
|
-
import { VerificationKeyData } from '@aztec/stdlib/vks';
|
|
24
19
|
import { Attributes, getTelemetryClient, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client';
|
|
25
20
|
import { inspect } from 'util';
|
|
26
|
-
import {
|
|
21
|
+
import { buildHeaderFromCircuitOutputs, getLastSiblingPath, getPublicChonkVerifierPrivateInputsFromTx, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
|
|
27
22
|
import { EpochProvingState } from './epoch-proving-state.js';
|
|
28
23
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
29
24
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -48,7 +43,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
48
43
|
provingPromise;
|
|
49
44
|
metrics;
|
|
50
45
|
dbs;
|
|
51
|
-
constructor(dbProvider, prover, proverId
|
|
46
|
+
constructor(dbProvider, prover, proverId, telemetryClient = getTelemetryClient()){
|
|
52
47
|
this.dbProvider = dbProvider;
|
|
53
48
|
this.prover = prover;
|
|
54
49
|
this.proverId = proverId;
|
|
@@ -68,67 +63,115 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
68
63
|
this.cancel();
|
|
69
64
|
return Promise.resolve();
|
|
70
65
|
}
|
|
71
|
-
startNewEpoch(epochNumber,
|
|
66
|
+
startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges) {
|
|
67
|
+
if (this.provingState?.verifyState()) {
|
|
68
|
+
throw new Error(`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`);
|
|
69
|
+
}
|
|
72
70
|
const { promise: _promise, resolve, reject } = promiseWithResolvers();
|
|
73
71
|
const promise = _promise.catch((reason)=>({
|
|
74
72
|
status: 'failure',
|
|
75
73
|
reason
|
|
76
74
|
}));
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
}
|
|
80
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
81
|
-
this.provingState = new EpochProvingState(epochNumber, firstBlockNumber, totalNumBlocks, resolve, reject);
|
|
75
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
76
|
+
this.provingState = new EpochProvingState(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges, (provingState)=>this.checkAndEnqueueCheckpointRootRollup(provingState), resolve, reject);
|
|
82
77
|
this.provingPromise = promise;
|
|
83
78
|
}
|
|
79
|
+
async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint) {
|
|
80
|
+
if (!this.provingState) {
|
|
81
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
82
|
+
}
|
|
83
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
84
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
85
|
+
}
|
|
86
|
+
// Fork world state at the end of the immediately previous block.
|
|
87
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
88
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
89
|
+
const firstBlockNumber = lastBlockNumber + 1;
|
|
90
|
+
this.dbs.set(firstBlockNumber, db);
|
|
91
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
92
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
93
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
94
|
+
const { lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath } = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
95
|
+
this.provingState.startNewCheckpoint(checkpointIndex, constants, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint, lastArchiveSiblingPath, l1ToL2Messages, lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath);
|
|
96
|
+
}
|
|
84
97
|
/**
|
|
85
98
|
* Starts off a new block
|
|
86
|
-
* @param
|
|
87
|
-
* @param
|
|
88
|
-
*
|
|
89
|
-
|
|
99
|
+
* @param blockNumber - The block number
|
|
100
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
101
|
+
* block that doesn't have any txs.
|
|
102
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
103
|
+
*/ async startNewBlock(blockNumber, timestamp, totalNumTxs) {
|
|
90
104
|
if (!this.provingState) {
|
|
91
|
-
throw new Error(
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
//
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
105
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
106
|
+
}
|
|
107
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
108
|
+
if (!checkpointProvingState) {
|
|
109
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
110
|
+
}
|
|
111
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
112
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
113
|
+
}
|
|
114
|
+
const constants = checkpointProvingState.constants;
|
|
115
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
116
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
117
|
+
if (!this.dbs.has(blockNumber)) {
|
|
118
|
+
// Fork world state at the end of the immediately previous block
|
|
119
|
+
const db = await this.dbProvider.fork(blockNumber - 1);
|
|
120
|
+
this.dbs.set(blockNumber, db);
|
|
121
|
+
}
|
|
122
|
+
const db = this.dbs.get(blockNumber);
|
|
123
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
124
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
125
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
126
|
+
const blockProvingState = checkpointProvingState.startNewBlock(blockNumber, timestamp, totalNumTxs, lastArchiveTreeSnapshot, lastArchiveSiblingPath);
|
|
127
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
128
|
+
if (blockProvingState.index === 0) {
|
|
129
|
+
for(let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++){
|
|
130
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
134
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
135
|
+
if (totalNumTxs === 0) {
|
|
136
|
+
const endState = await db.getStateReference();
|
|
137
|
+
blockProvingState.setEndState(endState);
|
|
138
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
139
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
140
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
141
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
142
|
+
// And also try to accumulate the blobs as far as we can:
|
|
143
|
+
await this.provingState.setBlobAccumulators();
|
|
109
144
|
}
|
|
110
145
|
}
|
|
111
146
|
/**
|
|
112
147
|
* The interface to add simulated transactions to the scheduler. This can only be called once per block.
|
|
113
148
|
* @param txs - The transactions to be proven
|
|
114
149
|
*/ async addTxs(txs) {
|
|
150
|
+
if (!this.provingState) {
|
|
151
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
152
|
+
}
|
|
115
153
|
if (!txs.length) {
|
|
116
154
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
117
155
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
118
156
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
119
157
|
return;
|
|
120
158
|
}
|
|
121
|
-
const blockNumber = txs[0].
|
|
122
|
-
const provingState = this.provingState
|
|
159
|
+
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
160
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber);
|
|
123
161
|
if (!provingState) {
|
|
124
|
-
throw new Error(`
|
|
162
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
125
163
|
}
|
|
126
|
-
if (provingState.totalNumTxs) {
|
|
164
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
165
|
+
throw new Error(`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`);
|
|
166
|
+
}
|
|
167
|
+
if (!provingState.isAcceptingTxs()) {
|
|
127
168
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
128
169
|
}
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
170
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
171
|
+
const db = this.dbs.get(blockNumber);
|
|
172
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
173
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
174
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
132
175
|
for (const tx of txs){
|
|
133
176
|
try {
|
|
134
177
|
if (!provingState.verifyState()) {
|
|
@@ -136,13 +179,21 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
136
179
|
}
|
|
137
180
|
validateTx(tx);
|
|
138
181
|
logger.info(`Received transaction: ${tx.hash}`);
|
|
139
|
-
const
|
|
140
|
-
const
|
|
182
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
183
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db);
|
|
184
|
+
if (!provingState.verifyState()) {
|
|
185
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
186
|
+
}
|
|
187
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
188
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
141
189
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
142
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
143
190
|
if (txProvingState.requireAvmProof) {
|
|
191
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
144
192
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
145
193
|
this.enqueueVM(provingState, txIndex);
|
|
194
|
+
} else {
|
|
195
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
196
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
146
197
|
}
|
|
147
198
|
} catch (err) {
|
|
148
199
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -150,22 +201,33 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
150
201
|
});
|
|
151
202
|
}
|
|
152
203
|
}
|
|
204
|
+
const endState = await db.getStateReference();
|
|
205
|
+
provingState.setEndState(endState);
|
|
206
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
207
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
208
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
209
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
210
|
+
await this.provingState.setBlobAccumulators();
|
|
153
211
|
}
|
|
154
212
|
/**
|
|
155
|
-
* Kickstarts
|
|
156
|
-
* Note that if the
|
|
157
|
-
*/
|
|
213
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
214
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
215
|
+
*/ startChonkVerifierCircuits(txs) {
|
|
158
216
|
if (!this.provingState?.verifyState()) {
|
|
159
|
-
throw new Error(`
|
|
217
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
160
218
|
}
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
const
|
|
219
|
+
const publicTxs = txs.filter((tx)=>tx.data.forPublic);
|
|
220
|
+
for (const tx of publicTxs){
|
|
221
|
+
const txHash = tx.getTxHash().toString();
|
|
222
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
164
223
|
const tubeProof = promiseWithResolvers();
|
|
165
|
-
logger.debug(`Starting
|
|
166
|
-
this.
|
|
167
|
-
|
|
224
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
225
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, (proof)=>{
|
|
226
|
+
tubeProof.resolve(proof);
|
|
227
|
+
});
|
|
228
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
168
229
|
}
|
|
230
|
+
return Promise.resolve();
|
|
169
231
|
}
|
|
170
232
|
/**
|
|
171
233
|
* Marks the block as completed.
|
|
@@ -175,55 +237,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
175
237
|
if (!provingState) {
|
|
176
238
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
177
239
|
}
|
|
178
|
-
if
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
240
|
+
// Abort with specific error for the block if there's one.
|
|
241
|
+
const error = provingState.getError();
|
|
242
|
+
if (error) {
|
|
243
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
182
244
|
}
|
|
245
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
183
246
|
if (!provingState.verifyState()) {
|
|
184
|
-
throw new Error(`
|
|
247
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
185
248
|
}
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
await this.buildBlock(provingState, expectedHeader);
|
|
189
|
-
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
190
|
-
await this.checkAndEnqueueBlockRootRollup(provingState);
|
|
191
|
-
return provingState.block;
|
|
192
|
-
}
|
|
193
|
-
/** Returns the block as built for a given index. */ getBlock(index) {
|
|
194
|
-
const block = this.provingState?.blocks[index]?.block;
|
|
195
|
-
if (!block) {
|
|
196
|
-
throw new Error(`Block at index ${index} not available`);
|
|
249
|
+
if (provingState.isAcceptingTxs()) {
|
|
250
|
+
throw new Error(`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`);
|
|
197
251
|
}
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
202
|
-
const txs = provingState.allTxs.map((a)=>a.processedTx);
|
|
203
|
-
// Get db for this block
|
|
204
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
205
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
206
|
-
// and update the archive tree, so we're ready to start processing the next block
|
|
207
|
-
const { header, body } = await buildHeaderAndBodyFromTxs(txs, provingState.globalVariables, provingState.newL1ToL2Messages, db);
|
|
252
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
253
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
254
|
+
const header = await provingState.buildBlockHeader();
|
|
208
255
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
209
256
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
210
257
|
throw new Error('Block header mismatch');
|
|
211
258
|
}
|
|
259
|
+
// Get db for this block
|
|
260
|
+
const db = this.dbs.get(provingState.blockNumber);
|
|
261
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
212
262
|
logger.verbose(`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`);
|
|
213
263
|
await db.updateArchive(header);
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
217
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
218
|
-
logger.verbose(`Orchestrator finalised block ${l2Block.number}`);
|
|
219
|
-
provingState.block = l2Block;
|
|
264
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
265
|
+
return header;
|
|
220
266
|
}
|
|
221
267
|
// Flagged as protected to disable in certain unit tests
|
|
222
|
-
async verifyBuiltBlockAgainstSyncedState(
|
|
223
|
-
const
|
|
268
|
+
async verifyBuiltBlockAgainstSyncedState(provingState) {
|
|
269
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
270
|
+
if (!builtBlockHeader) {
|
|
271
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
272
|
+
return;
|
|
273
|
+
}
|
|
274
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
275
|
+
if (!output) {
|
|
276
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
277
|
+
return;
|
|
278
|
+
}
|
|
279
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
280
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
281
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
282
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
283
|
+
return;
|
|
284
|
+
}
|
|
285
|
+
// Get db for this block
|
|
286
|
+
const blockNumber = provingState.blockNumber;
|
|
287
|
+
const db = this.dbs.get(blockNumber);
|
|
288
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
289
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
224
290
|
if (!syncedArchive.equals(newArchive)) {
|
|
225
|
-
|
|
291
|
+
logger.error(`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(syncedArchive)} but built ${inspect(newArchive)}`);
|
|
292
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
293
|
+
return;
|
|
226
294
|
}
|
|
295
|
+
const circuitArchive = output.newArchive;
|
|
296
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
297
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
298
|
+
provingState.reject(`New archive mismatch.`);
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
302
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
303
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
304
|
+
// will attempt to access the fork after it was closed.
|
|
305
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
306
|
+
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
227
307
|
}
|
|
228
308
|
/**
|
|
229
309
|
* Cancel any further proving
|
|
@@ -235,14 +315,15 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
235
315
|
}
|
|
236
316
|
/**
|
|
237
317
|
* Returns the proof for the current epoch.
|
|
238
|
-
*/ async
|
|
318
|
+
*/ async finalizeEpoch() {
|
|
239
319
|
if (!this.provingState || !this.provingPromise) {
|
|
240
|
-
throw new Error(`Invalid proving state, an epoch must be proven before it can be
|
|
320
|
+
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
241
321
|
}
|
|
242
322
|
const result = await this.provingPromise;
|
|
243
323
|
if (result.status === 'failure') {
|
|
244
324
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
245
325
|
}
|
|
326
|
+
await this.provingState.finalizeBatchedBlob();
|
|
246
327
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
247
328
|
pushTestData('epochProofResult', {
|
|
248
329
|
proof: epochProofResult.proof.toString(),
|
|
@@ -251,24 +332,12 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
251
332
|
return epochProofResult;
|
|
252
333
|
}
|
|
253
334
|
/**
|
|
254
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
255
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
256
|
-
* @param provingState - The proving state being worked on
|
|
257
|
-
*/ async prepareTransaction(tx, provingState) {
|
|
258
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
259
|
-
if (!txInputs) {
|
|
260
|
-
// This should not be possible
|
|
261
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
262
|
-
}
|
|
263
|
-
return txInputs;
|
|
264
|
-
}
|
|
265
|
-
/**
|
|
266
335
|
* Enqueue a job to be scheduled
|
|
267
336
|
* @param provingState - The proving state object being operated on
|
|
268
337
|
* @param jobType - The type of job to be queued
|
|
269
338
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
270
339
|
*/ deferredProving(provingState, request, callback) {
|
|
271
|
-
if (!provingState
|
|
340
|
+
if (!provingState.verifyState()) {
|
|
272
341
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
273
342
|
return;
|
|
274
343
|
}
|
|
@@ -283,7 +352,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
283
352
|
return;
|
|
284
353
|
}
|
|
285
354
|
const result = await request(controller.signal);
|
|
286
|
-
if (!provingState
|
|
355
|
+
if (!provingState.verifyState()) {
|
|
287
356
|
logger.debug(`State no longer valid, discarding result`);
|
|
288
357
|
return;
|
|
289
358
|
}
|
|
@@ -311,29 +380,26 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
311
380
|
// let the callstack unwind before adding the job to the queue
|
|
312
381
|
setImmediate(()=>void safeJob());
|
|
313
382
|
}
|
|
314
|
-
async
|
|
383
|
+
async updateL1ToL2MessageTree(l1ToL2Messages, db) {
|
|
315
384
|
const l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 'Too many L1 to L2 messages');
|
|
316
|
-
const
|
|
317
|
-
const
|
|
385
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
386
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
318
387
|
// Update the local trees to include the new l1 to l2 messages
|
|
319
388
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
320
|
-
const
|
|
389
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
390
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
321
391
|
return {
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
392
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
393
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
394
|
+
newL1ToL2MessageTreeSnapshot,
|
|
395
|
+
newL1ToL2MessageSubtreeRootSiblingPath
|
|
325
396
|
};
|
|
326
397
|
}
|
|
327
398
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
328
|
-
async prepareBaseRollupInputs(
|
|
329
|
-
if (!provingState.verifyState() || !provingState.spongeBlobState) {
|
|
330
|
-
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
331
|
-
return;
|
|
332
|
-
}
|
|
333
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
399
|
+
async prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db) {
|
|
334
400
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
335
|
-
// These will be overwritten later once we have proven the
|
|
336
|
-
const [ms, hints] = await elapsed(
|
|
401
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
402
|
+
const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, this.proverId.toField(), db));
|
|
337
403
|
this.metrics.recordBaseRollupInputs(ms);
|
|
338
404
|
const promises = [
|
|
339
405
|
MerkleTreeId.NOTE_HASH_TREE,
|
|
@@ -349,10 +415,6 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
349
415
|
obj.key,
|
|
350
416
|
obj.value
|
|
351
417
|
]));
|
|
352
|
-
if (!provingState.verifyState()) {
|
|
353
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
354
|
-
return;
|
|
355
|
-
}
|
|
356
418
|
return [
|
|
357
419
|
hints,
|
|
358
420
|
treeSnapshots
|
|
@@ -365,68 +427,71 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
365
427
|
logger.debug('Not running base rollup, state invalid');
|
|
366
428
|
return;
|
|
367
429
|
}
|
|
430
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
431
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
432
|
+
return;
|
|
433
|
+
}
|
|
368
434
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
369
435
|
const { processedTx } = txProvingState;
|
|
370
436
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
371
437
|
logger.debug(`Enqueuing deferred proving base rollup for ${processedTx.hash.toString()}`);
|
|
372
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof
|
|
438
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof PrivateTxBaseRollupPrivateInputs ? 'getPrivateTxBaseRollupProof' : 'getPublicTxBaseRollupProof'}`, {
|
|
373
439
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
374
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
375
440
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
376
441
|
}, (signal)=>{
|
|
377
|
-
if (inputs instanceof
|
|
378
|
-
return this.prover.
|
|
442
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
443
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
379
444
|
} else {
|
|
380
|
-
return this.prover.
|
|
445
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
381
446
|
}
|
|
382
|
-
}),
|
|
447
|
+
}), (result)=>{
|
|
383
448
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
384
|
-
validatePartialState(result.inputs.
|
|
449
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
385
450
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
386
451
|
if (provingState.totalNumTxs === 1) {
|
|
387
|
-
|
|
452
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
388
453
|
} else {
|
|
389
|
-
|
|
454
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
390
455
|
}
|
|
391
456
|
});
|
|
392
457
|
}
|
|
393
|
-
// Enqueues the
|
|
394
|
-
// Once completed, will enqueue the
|
|
395
|
-
|
|
458
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
459
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
460
|
+
getOrEnqueueChonkVerifier(provingState, txIndex) {
|
|
396
461
|
if (!provingState.verifyState()) {
|
|
397
|
-
logger.debug('Not running
|
|
462
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
398
463
|
return;
|
|
399
464
|
}
|
|
400
465
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
401
466
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
467
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
402
468
|
const handleResult = (result)=>{
|
|
403
|
-
logger.debug(`Got
|
|
469
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, {
|
|
404
470
|
txHash
|
|
405
471
|
});
|
|
406
|
-
txProvingState.
|
|
407
|
-
this.provingState?.
|
|
408
|
-
this.
|
|
472
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
473
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
474
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
409
475
|
};
|
|
410
|
-
if (this.provingState?.
|
|
411
|
-
logger.debug(`
|
|
476
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
477
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, {
|
|
412
478
|
txHash
|
|
413
479
|
});
|
|
414
|
-
void this.provingState.
|
|
480
|
+
void this.provingState.cachedChonkVerifierProofs.get(txHash).then(handleResult);
|
|
415
481
|
return;
|
|
416
482
|
}
|
|
417
|
-
logger.debug(`Enqueuing
|
|
418
|
-
this.
|
|
483
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
484
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
419
485
|
}
|
|
420
|
-
|
|
421
|
-
if (!provingState
|
|
422
|
-
logger.debug('Not running
|
|
486
|
+
doEnqueueChonkVerifier(txHash, inputs, handler, provingState = this.provingState) {
|
|
487
|
+
if (!provingState.verifyState()) {
|
|
488
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
423
489
|
return;
|
|
424
490
|
}
|
|
425
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
491
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getPublicChonkVerifierProof', {
|
|
426
492
|
[Attributes.TX_HASH]: txHash,
|
|
427
|
-
[Attributes.
|
|
428
|
-
|
|
429
|
-
}, (signal)=>this.prover.getTubeProof(inputs, signal, this.provingState.epochNumber)), handler);
|
|
493
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public'
|
|
494
|
+
}, (signal)=>this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber)), handler);
|
|
430
495
|
}
|
|
431
496
|
// Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
432
497
|
// Enqueues the next level of merge if all inputs are available
|
|
@@ -435,65 +500,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
435
500
|
logger.debug('Not running merge rollup. State no longer valid.');
|
|
436
501
|
return;
|
|
437
502
|
}
|
|
503
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
504
|
+
logger.debug('Merge rollup already started.');
|
|
505
|
+
return;
|
|
506
|
+
}
|
|
438
507
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
439
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
440
|
-
[Attributes.
|
|
441
|
-
|
|
442
|
-
}, (signal)=>this.prover.getMergeRollupProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
508
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getTxMergeRollupProof', {
|
|
509
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge'
|
|
510
|
+
}, (signal)=>this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
443
511
|
provingState.setMergeRollupProof(location, result);
|
|
444
|
-
|
|
512
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
445
513
|
});
|
|
446
514
|
}
|
|
447
515
|
// Executes the block root rollup circuit
|
|
448
|
-
|
|
516
|
+
enqueueBlockRootRollup(provingState) {
|
|
449
517
|
if (!provingState.verifyState()) {
|
|
450
518
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
451
519
|
return;
|
|
452
520
|
}
|
|
453
|
-
provingState.
|
|
454
|
-
|
|
455
|
-
|
|
521
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
522
|
+
logger.debug('Block root rollup already started.');
|
|
523
|
+
return;
|
|
524
|
+
}
|
|
525
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
526
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
456
527
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockRootRollupProof', {
|
|
457
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
458
528
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
459
529
|
}, (signal)=>{
|
|
460
|
-
if (inputs instanceof
|
|
461
|
-
return this.prover.
|
|
462
|
-
} else if (inputs instanceof
|
|
463
|
-
return this.prover.
|
|
530
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
531
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
532
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
533
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
534
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
535
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
536
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
537
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
464
538
|
} else {
|
|
465
539
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
466
540
|
}
|
|
467
541
|
}), async (result)=>{
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
476
|
-
const epochProvingState = this.provingState;
|
|
477
|
-
const leafLocation = epochProvingState.setBlockRootRollupProof(provingState.index, result);
|
|
478
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
479
|
-
await this.enqueueEpochPadding(epochProvingState);
|
|
542
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
543
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
544
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
545
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
546
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
547
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
548
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
480
549
|
} else {
|
|
481
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
550
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
482
551
|
}
|
|
483
552
|
});
|
|
484
553
|
}
|
|
485
554
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
486
555
|
// Enqueues the root parity circuit if all inputs are available
|
|
487
|
-
enqueueBaseParityCircuit(
|
|
556
|
+
enqueueBaseParityCircuit(checkpointProvingState, provingState, baseParityIndex) {
|
|
488
557
|
if (!provingState.verifyState()) {
|
|
489
558
|
logger.debug('Not running base parity. State no longer valid.');
|
|
490
559
|
return;
|
|
491
560
|
}
|
|
561
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
562
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
563
|
+
return;
|
|
564
|
+
}
|
|
565
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
492
566
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBaseParityProof', {
|
|
493
|
-
[Attributes.
|
|
494
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity'
|
|
567
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base'
|
|
495
568
|
}, (signal)=>this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber)), (provingOutput)=>{
|
|
496
|
-
provingState.setBaseParityProof(
|
|
569
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
497
570
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
498
571
|
});
|
|
499
572
|
}
|
|
@@ -510,13 +583,16 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
510
583
|
logger.debug('Not running root parity. State no longer valid.');
|
|
511
584
|
return;
|
|
512
585
|
}
|
|
513
|
-
|
|
586
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
587
|
+
logger.debug('Root parity already started.');
|
|
588
|
+
return;
|
|
589
|
+
}
|
|
590
|
+
const inputs = provingState.getParityRootInputs();
|
|
514
591
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootParityProof', {
|
|
515
|
-
[Attributes.
|
|
516
|
-
|
|
517
|
-
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
592
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root'
|
|
593
|
+
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
518
594
|
provingState.setRootParityProof(result);
|
|
519
|
-
|
|
595
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
520
596
|
});
|
|
521
597
|
}
|
|
522
598
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
@@ -526,28 +602,90 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
526
602
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
527
603
|
return;
|
|
528
604
|
}
|
|
605
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
606
|
+
logger.debug('Block merge rollup already started.');
|
|
607
|
+
return;
|
|
608
|
+
}
|
|
529
609
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
530
610
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockMergeRollupProof', {
|
|
531
|
-
[Attributes.
|
|
532
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge-rollup'
|
|
611
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge'
|
|
533
612
|
}, (signal)=>this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
534
613
|
provingState.setBlockMergeRollupProof(location, result);
|
|
535
614
|
this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
|
|
536
615
|
});
|
|
537
616
|
}
|
|
538
|
-
|
|
617
|
+
enqueueCheckpointRootRollup(provingState) {
|
|
618
|
+
if (!provingState.verifyState()) {
|
|
619
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
620
|
+
return;
|
|
621
|
+
}
|
|
622
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
623
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
624
|
+
return;
|
|
625
|
+
}
|
|
626
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
627
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
628
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
629
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointRootRollupProof', {
|
|
630
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
631
|
+
}, (signal)=>{
|
|
632
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
633
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
634
|
+
} else {
|
|
635
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
636
|
+
}
|
|
637
|
+
}), (result)=>{
|
|
638
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator().toBlobAccumulator();
|
|
639
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
640
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
641
|
+
logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(computedEndBlobAccumulatorState)}`);
|
|
642
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
643
|
+
return;
|
|
644
|
+
}
|
|
645
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
646
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
647
|
+
const epochProvingState = provingState.parentEpoch;
|
|
648
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
649
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
650
|
+
} else {
|
|
651
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
652
|
+
}
|
|
653
|
+
});
|
|
654
|
+
}
|
|
655
|
+
enqueueCheckpointMergeRollup(provingState, location) {
|
|
656
|
+
if (!provingState.verifyState()) {
|
|
657
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
658
|
+
return;
|
|
659
|
+
}
|
|
660
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
661
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
662
|
+
return;
|
|
663
|
+
}
|
|
664
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
665
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointMergeRollupProof', {
|
|
666
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge'
|
|
667
|
+
}, (signal)=>this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
668
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
669
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
670
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
671
|
+
});
|
|
672
|
+
}
|
|
673
|
+
enqueueEpochPadding(provingState) {
|
|
539
674
|
if (!provingState.verifyState()) {
|
|
540
675
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
541
676
|
return;
|
|
542
677
|
}
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
678
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
679
|
+
logger.debug('Padding checkpoint already started.');
|
|
680
|
+
return;
|
|
681
|
+
}
|
|
682
|
+
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
683
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
684
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof', {
|
|
685
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding'
|
|
686
|
+
}, (signal)=>this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
687
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
688
|
+
provingState.setCheckpointPaddingProof(result);
|
|
551
689
|
this.checkAndEnqueueRootRollup(provingState);
|
|
552
690
|
});
|
|
553
691
|
}
|
|
@@ -558,10 +696,9 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
558
696
|
return;
|
|
559
697
|
}
|
|
560
698
|
logger.debug(`Preparing root rollup`);
|
|
561
|
-
const inputs = provingState.getRootRollupInputs(
|
|
699
|
+
const inputs = provingState.getRootRollupInputs();
|
|
562
700
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootRollupProof', {
|
|
563
|
-
[Attributes.
|
|
564
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup'
|
|
701
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root'
|
|
565
702
|
}, (signal)=>this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
566
703
|
logger.verbose(`Orchestrator completed root rollup for epoch ${provingState.epochNumber}`);
|
|
567
704
|
provingState.setRootRollupProof(result);
|
|
@@ -570,34 +707,23 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
570
707
|
});
|
|
571
708
|
});
|
|
572
709
|
}
|
|
573
|
-
|
|
710
|
+
checkAndEnqueueNextMergeRollup(provingState, currentLocation) {
|
|
574
711
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
575
712
|
return;
|
|
576
713
|
}
|
|
577
714
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
578
715
|
if (parentLocation.level === 0) {
|
|
579
|
-
|
|
716
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
580
717
|
} else {
|
|
581
718
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
582
719
|
}
|
|
583
720
|
}
|
|
584
|
-
|
|
721
|
+
checkAndEnqueueBlockRootRollup(provingState) {
|
|
585
722
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
586
|
-
logger.debug('Not ready for root rollup');
|
|
723
|
+
logger.debug('Not ready for block root rollup');
|
|
587
724
|
return;
|
|
588
725
|
}
|
|
589
|
-
|
|
590
|
-
logger.debug('Block root rollup already started');
|
|
591
|
-
return;
|
|
592
|
-
}
|
|
593
|
-
const blockNumber = provingState.blockNumber;
|
|
594
|
-
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
595
|
-
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
596
|
-
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
597
|
-
// will attempt to access the fork after it was closed.
|
|
598
|
-
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
599
|
-
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
600
|
-
await this.enqueueBlockRootRollup(provingState);
|
|
726
|
+
this.enqueueBlockRootRollup(provingState);
|
|
601
727
|
}
|
|
602
728
|
checkAndEnqueueNextBlockMergeRollup(provingState, currentLocation) {
|
|
603
729
|
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
@@ -605,11 +731,28 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
605
731
|
}
|
|
606
732
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
607
733
|
if (parentLocation.level === 0) {
|
|
608
|
-
this.
|
|
734
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
609
735
|
} else {
|
|
610
736
|
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
611
737
|
}
|
|
612
738
|
}
|
|
739
|
+
checkAndEnqueueCheckpointRootRollup(provingState) {
|
|
740
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
741
|
+
return;
|
|
742
|
+
}
|
|
743
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
744
|
+
}
|
|
745
|
+
checkAndEnqueueNextCheckpointMergeRollup(provingState, currentLocation) {
|
|
746
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
747
|
+
return;
|
|
748
|
+
}
|
|
749
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
750
|
+
if (parentLocation.level === 0) {
|
|
751
|
+
this.checkAndEnqueueRootRollup(provingState);
|
|
752
|
+
} else {
|
|
753
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
754
|
+
}
|
|
755
|
+
}
|
|
613
756
|
checkAndEnqueueRootRollup(provingState) {
|
|
614
757
|
if (!provingState.isReadyForRootRollup()) {
|
|
615
758
|
logger.debug('Not ready for root rollup');
|
|
@@ -635,39 +778,46 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
635
778
|
}, async (signal)=>{
|
|
636
779
|
const inputs = txProvingState.getAvmInputs();
|
|
637
780
|
try {
|
|
638
|
-
|
|
781
|
+
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
782
|
+
// just keep the next line but removing the second argument (false).
|
|
783
|
+
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
639
784
|
} catch (err) {
|
|
640
785
|
if (process.env.AVM_PROVING_STRICT) {
|
|
641
786
|
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
642
787
|
throw err;
|
|
643
788
|
} else {
|
|
644
|
-
logger.warn(`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off.
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
789
|
+
logger.warn(`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
790
|
+
AVM inputs and carrying on. ${inspect(err)}.`);
|
|
791
|
+
try {
|
|
792
|
+
this.metrics.incAvmFallback();
|
|
793
|
+
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
794
|
+
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
795
|
+
} catch (err) {
|
|
796
|
+
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
797
|
+
throw err;
|
|
798
|
+
}
|
|
649
799
|
}
|
|
650
800
|
}
|
|
651
801
|
});
|
|
652
802
|
this.deferredProving(provingState, doAvmProving, (proofAndVk)=>{
|
|
653
803
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
654
804
|
txProvingState.setAvmProof(proofAndVk);
|
|
655
|
-
this.
|
|
805
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
656
806
|
});
|
|
657
807
|
}
|
|
658
|
-
|
|
808
|
+
checkAndEnqueueBaseRollup(provingState, txIndex) {
|
|
659
809
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
660
810
|
if (!txProvingState.ready()) {
|
|
661
811
|
return;
|
|
662
812
|
}
|
|
663
|
-
// We must have completed all proving (
|
|
813
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
664
814
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
665
815
|
this.enqueueBaseRollup(provingState, txIndex);
|
|
666
816
|
}
|
|
667
817
|
}
|
|
668
818
|
_ts_decorate([
|
|
669
|
-
trackSpan('ProvingOrchestrator.startNewBlock', (
|
|
670
|
-
[Attributes.BLOCK_NUMBER]:
|
|
819
|
+
trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
|
|
820
|
+
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
671
821
|
}))
|
|
672
822
|
], ProvingOrchestrator.prototype, "startNewBlock", null);
|
|
673
823
|
_ts_decorate([
|
|
@@ -676,15 +826,15 @@ _ts_decorate([
|
|
|
676
826
|
}))
|
|
677
827
|
], ProvingOrchestrator.prototype, "addTxs", null);
|
|
678
828
|
_ts_decorate([
|
|
679
|
-
trackSpan('ProvingOrchestrator.
|
|
680
|
-
], ProvingOrchestrator.prototype, "
|
|
829
|
+
trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
830
|
+
], ProvingOrchestrator.prototype, "startChonkVerifierCircuits", null);
|
|
681
831
|
_ts_decorate([
|
|
682
832
|
trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
|
|
683
833
|
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
684
834
|
}))
|
|
685
835
|
], ProvingOrchestrator.prototype, "setBlockCompleted", null);
|
|
686
836
|
_ts_decorate([
|
|
687
|
-
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (
|
|
837
|
+
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (tx)=>({
|
|
688
838
|
[Attributes.TX_HASH]: tx.hash.toString()
|
|
689
839
|
}))
|
|
690
840
|
], ProvingOrchestrator.prototype, "prepareBaseRollupInputs", null);
|