@aztec/prover-client 0.0.0-test.1 → 0.0.1-fake-c83136db25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +2 -0
- package/dest/block-factory/index.d.ts.map +1 -0
- package/dest/block-factory/light.d.ts +38 -0
- package/dest/block-factory/light.d.ts.map +1 -0
- package/dest/block-factory/light.js +94 -0
- package/dest/config.d.ts +6 -6
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +11 -1
- package/dest/mocks/fixtures.d.ts +7 -4
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +32 -4
- package/dest/mocks/test_context.d.ts +43 -15
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +110 -48
- package/dest/orchestrator/block-building-helpers.d.ts +37 -28
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +156 -150
- package/dest/orchestrator/block-proving-state.d.ts +62 -46
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +223 -179
- package/dest/orchestrator/checkpoint-proving-state.d.ts +63 -0
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/checkpoint-proving-state.js +211 -0
- package/dest/orchestrator/epoch-proving-state.d.ts +37 -24
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +143 -73
- package/dest/orchestrator/orchestrator.d.ts +34 -31
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +392 -234
- package/dest/orchestrator/orchestrator_metrics.d.ts +2 -0
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +9 -0
- package/dest/orchestrator/tx-proving-state.d.ts +12 -10
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +30 -38
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +5 -4
- package/dest/prover-client/server-epoch-prover.d.ts +13 -10
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +11 -11
- package/dest/proving_broker/broker_prover_facade.d.ts +22 -15
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +64 -39
- package/dest/proving_broker/config.d.ts +9 -4
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +15 -4
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/factory.d.ts.map +1 -1
- package/dest/proving_broker/factory.js +5 -1
- package/dest/proving_broker/fixtures.js +1 -1
- package/dest/proving_broker/proof_store/factory.js +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.js +1 -0
- package/dest/proving_broker/proof_store/index.d.ts +1 -0
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +1 -0
- package/dest/proving_broker/proving_agent.d.ts +3 -3
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +83 -47
- package/dest/proving_broker/proving_broker.d.ts +11 -2
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +34 -22
- package/dest/proving_broker/proving_broker_database/memory.js +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +9 -8
- package/dest/proving_broker/proving_job_controller.d.ts +7 -8
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +89 -61
- package/dest/proving_broker/rpc.d.ts +3 -5
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +1 -4
- package/dest/test/mock_proof_store.d.ts +9 -0
- package/dest/test/mock_proof_store.d.ts.map +1 -0
- package/dest/test/mock_proof_store.js +10 -0
- package/dest/test/mock_prover.d.ts +23 -16
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +38 -20
- package/package.json +29 -29
- package/src/block-factory/index.ts +1 -0
- package/src/block-factory/light.ts +140 -0
- package/src/config.ts +24 -8
- package/src/mocks/fixtures.ts +43 -15
- package/src/mocks/test_context.ts +201 -75
- package/src/orchestrator/block-building-helpers.ts +247 -243
- package/src/orchestrator/block-proving-state.ts +247 -231
- package/src/orchestrator/checkpoint-proving-state.ts +299 -0
- package/src/orchestrator/epoch-proving-state.ts +187 -111
- package/src/orchestrator/orchestrator.ts +590 -289
- package/src/orchestrator/orchestrator_metrics.ts +20 -1
- package/src/orchestrator/tx-proving-state.ts +60 -61
- package/src/prover-client/prover-client.ts +16 -14
- package/src/prover-client/server-epoch-prover.ts +40 -21
- package/src/proving_broker/broker_prover_facade.ts +200 -113
- package/src/proving_broker/config.ts +17 -6
- package/src/proving_broker/factory.ts +2 -1
- package/src/proving_broker/fixtures.ts +1 -1
- package/src/proving_broker/proof_store/factory.ts +1 -1
- package/src/proving_broker/proof_store/gcs_proof_store.ts +5 -1
- package/src/proving_broker/proof_store/index.ts +1 -0
- package/src/proving_broker/proof_store/inline_proof_store.ts +1 -1
- package/src/proving_broker/proving_agent.ts +89 -47
- package/src/proving_broker/proving_broker.ts +51 -32
- package/src/proving_broker/proving_broker_database/memory.ts +1 -1
- package/src/proving_broker/proving_broker_database/persisted.ts +9 -8
- package/src/proving_broker/proving_job_controller.ts +92 -81
- package/src/proving_broker/rpc.ts +1 -6
- package/src/test/mock_proof_store.ts +14 -0
- package/src/test/mock_prover.ts +164 -60
- package/dest/bin/get-proof-inputs.d.ts +0 -2
- package/dest/bin/get-proof-inputs.d.ts.map +0 -1
- package/dest/bin/get-proof-inputs.js +0 -51
- package/dest/block_builder/index.d.ts +0 -6
- package/dest/block_builder/index.d.ts.map +0 -1
- package/dest/block_builder/light.d.ts +0 -33
- package/dest/block_builder/light.d.ts.map +0 -1
- package/dest/block_builder/light.js +0 -82
- package/src/bin/get-proof-inputs.ts +0 -59
- package/src/block_builder/index.ts +0 -6
- package/src/block_builder/light.ts +0 -101
- /package/dest/{block_builder → block-factory}/index.js +0 -0
|
@@ -4,8 +4,8 @@ function _ts_decorate(decorators, target, key, desc) {
|
|
|
4
4
|
else for(var i = decorators.length - 1; i >= 0; i--)if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
5
5
|
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
6
6
|
}
|
|
7
|
-
import {
|
|
8
|
-
import { padArrayEnd
|
|
7
|
+
import { L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
|
|
8
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
9
9
|
import { AbortError } from '@aztec/foundation/error';
|
|
10
10
|
import { Fr } from '@aztec/foundation/fields';
|
|
11
11
|
import { createLogger } from '@aztec/foundation/log';
|
|
@@ -13,17 +13,13 @@ import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
|
13
13
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
14
14
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
15
15
|
import { elapsed } from '@aztec/foundation/timer';
|
|
16
|
-
import {
|
|
17
|
-
import {
|
|
18
|
-
import {
|
|
19
|
-
import { makeEmptyRecursiveProof } from '@aztec/stdlib/proofs';
|
|
20
|
-
import { EmptyBlockRootRollupInputs, PrivateBaseRollupInputs, SingleTxBlockRootRollupInputs, TubeInputs } from '@aztec/stdlib/rollup';
|
|
16
|
+
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
17
|
+
import { createBlockEndMarker } from '@aztec/stdlib/block';
|
|
18
|
+
import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs, PrivateTxBaseRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
21
19
|
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
22
|
-
import { toNumBlobFields } from '@aztec/stdlib/tx';
|
|
23
|
-
import { VerificationKeyData } from '@aztec/stdlib/vks';
|
|
24
20
|
import { Attributes, getTelemetryClient, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client';
|
|
25
21
|
import { inspect } from 'util';
|
|
26
|
-
import {
|
|
22
|
+
import { buildBlockHeaderFromTxs, buildHeaderFromCircuitOutputs, getLastSiblingPath, getPublicChonkVerifierPrivateInputsFromTx, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
|
|
27
23
|
import { EpochProvingState } from './epoch-proving-state.js';
|
|
28
24
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
29
25
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -48,7 +44,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
48
44
|
provingPromise;
|
|
49
45
|
metrics;
|
|
50
46
|
dbs;
|
|
51
|
-
constructor(dbProvider, prover, proverId
|
|
47
|
+
constructor(dbProvider, prover, proverId, telemetryClient = getTelemetryClient()){
|
|
52
48
|
this.dbProvider = dbProvider;
|
|
53
49
|
this.prover = prover;
|
|
54
50
|
this.proverId = proverId;
|
|
@@ -68,67 +64,114 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
68
64
|
this.cancel();
|
|
69
65
|
return Promise.resolve();
|
|
70
66
|
}
|
|
71
|
-
startNewEpoch(epochNumber,
|
|
67
|
+
startNewEpoch(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges) {
|
|
68
|
+
if (this.provingState?.verifyState()) {
|
|
69
|
+
throw new Error(`Cannot start epoch ${epochNumber} when epoch ${this.provingState.epochNumber} is still being processed.`);
|
|
70
|
+
}
|
|
72
71
|
const { promise: _promise, resolve, reject } = promiseWithResolvers();
|
|
73
72
|
const promise = _promise.catch((reason)=>({
|
|
74
73
|
status: 'failure',
|
|
75
74
|
reason
|
|
76
75
|
}));
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
}
|
|
80
|
-
logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`);
|
|
81
|
-
this.provingState = new EpochProvingState(epochNumber, firstBlockNumber, totalNumBlocks, resolve, reject);
|
|
76
|
+
logger.info(`Starting epoch ${epochNumber} with ${totalNumCheckpoints} checkpoints.`);
|
|
77
|
+
this.provingState = new EpochProvingState(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges, (provingState)=>this.checkAndEnqueueCheckpointRootRollup(provingState), resolve, reject);
|
|
82
78
|
this.provingPromise = promise;
|
|
83
79
|
}
|
|
80
|
+
async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, totalNumBlobFields, headerOfLastBlockInPreviousCheckpoint) {
|
|
81
|
+
if (!this.provingState) {
|
|
82
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
|
|
83
|
+
}
|
|
84
|
+
if (!this.provingState.isAcceptingCheckpoints()) {
|
|
85
|
+
throw new Error(`Epoch not accepting further checkpoints.`);
|
|
86
|
+
}
|
|
87
|
+
// Fork world state at the end of the immediately previous block.
|
|
88
|
+
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
89
|
+
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
90
|
+
const firstBlockNumber = lastBlockNumber + 1;
|
|
91
|
+
this.dbs.set(firstBlockNumber, db);
|
|
92
|
+
// Get archive sibling path before any block in this checkpoint lands.
|
|
93
|
+
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
94
|
+
// Insert all the l1 to l2 messages into the db. And get the states before and after the insertion.
|
|
95
|
+
const { lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath } = await this.updateL1ToL2MessageTree(l1ToL2Messages, db);
|
|
96
|
+
this.provingState.startNewCheckpoint(checkpointIndex, constants, totalNumBlocks, totalNumBlobFields, headerOfLastBlockInPreviousCheckpoint, lastArchiveSiblingPath, l1ToL2Messages, lastL1ToL2MessageTreeSnapshot, lastL1ToL2MessageSubtreeRootSiblingPath, newL1ToL2MessageTreeSnapshot, newL1ToL2MessageSubtreeRootSiblingPath);
|
|
97
|
+
}
|
|
84
98
|
/**
|
|
85
99
|
* Starts off a new block
|
|
86
|
-
* @param
|
|
87
|
-
* @param
|
|
88
|
-
*
|
|
89
|
-
|
|
100
|
+
* @param blockNumber - The block number
|
|
101
|
+
* @param timestamp - The timestamp of the block. This is only required for constructing the private inputs for the
|
|
102
|
+
* block that doesn't have any txs.
|
|
103
|
+
* @param totalNumTxs - The total number of txs in the block
|
|
104
|
+
*/ async startNewBlock(blockNumber, timestamp, totalNumTxs) {
|
|
90
105
|
if (!this.provingState) {
|
|
91
|
-
throw new Error(
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
//
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
106
|
+
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
107
|
+
}
|
|
108
|
+
const checkpointProvingState = this.provingState.getCheckpointProvingStateByBlockNumber(blockNumber);
|
|
109
|
+
if (!checkpointProvingState) {
|
|
110
|
+
throw new Error(`Checkpoint not started. Call startNewCheckpoint first.`);
|
|
111
|
+
}
|
|
112
|
+
if (!checkpointProvingState.isAcceptingBlocks()) {
|
|
113
|
+
throw new Error(`Checkpoint not accepting further blocks.`);
|
|
114
|
+
}
|
|
115
|
+
const constants = checkpointProvingState.constants;
|
|
116
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber.toNumber()}.`);
|
|
117
|
+
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
118
|
+
if (!this.dbs.has(blockNumber)) {
|
|
119
|
+
// Fork world state at the end of the immediately previous block
|
|
120
|
+
const db = await this.dbProvider.fork(blockNumber - 1);
|
|
121
|
+
this.dbs.set(blockNumber, db);
|
|
122
|
+
}
|
|
123
|
+
const db = this.dbs.get(blockNumber);
|
|
124
|
+
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
125
|
+
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
126
|
+
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
127
|
+
const blockProvingState = await checkpointProvingState.startNewBlock(blockNumber, timestamp, totalNumTxs, lastArchiveTreeSnapshot, lastArchiveSiblingPath);
|
|
128
|
+
// Enqueue base parity circuits for the first block in the checkpoint.
|
|
129
|
+
if (blockProvingState.index === 0) {
|
|
130
|
+
for(let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++){
|
|
131
|
+
this.enqueueBaseParityCircuit(checkpointProvingState, blockProvingState, i);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
135
|
+
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
136
|
+
if (totalNumTxs === 0) {
|
|
137
|
+
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
138
|
+
await endSpongeBlob.absorb([
|
|
139
|
+
createBlockEndMarker(0)
|
|
140
|
+
]);
|
|
141
|
+
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
142
|
+
// And also try to accumulate the blobs as far as we can:
|
|
143
|
+
await this.provingState.setBlobAccumulators();
|
|
109
144
|
}
|
|
110
145
|
}
|
|
111
146
|
/**
|
|
112
147
|
* The interface to add simulated transactions to the scheduler. This can only be called once per block.
|
|
113
148
|
* @param txs - The transactions to be proven
|
|
114
149
|
*/ async addTxs(txs) {
|
|
150
|
+
if (!this.provingState) {
|
|
151
|
+
throw new Error(`Empty epoch proving state. Call startNewEpoch before adding txs.`);
|
|
152
|
+
}
|
|
115
153
|
if (!txs.length) {
|
|
116
154
|
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
117
155
|
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
118
156
|
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
119
157
|
return;
|
|
120
158
|
}
|
|
121
|
-
const blockNumber = txs[0].
|
|
122
|
-
const provingState = this.provingState
|
|
159
|
+
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
160
|
+
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber);
|
|
123
161
|
if (!provingState) {
|
|
124
|
-
throw new Error(`
|
|
162
|
+
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
163
|
+
}
|
|
164
|
+
if (provingState.totalNumTxs !== txs.length) {
|
|
165
|
+
throw new Error(`Block ${blockNumber} should be filled with ${provingState.totalNumTxs} txs. Received ${txs.length} txs.`);
|
|
125
166
|
}
|
|
126
|
-
if (provingState.
|
|
167
|
+
if (!provingState.isAcceptingTxs()) {
|
|
127
168
|
throw new Error(`Block ${blockNumber} has been initialized with transactions.`);
|
|
128
169
|
}
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
170
|
+
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
171
|
+
const db = this.dbs.get(blockNumber);
|
|
172
|
+
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
173
|
+
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
174
|
+
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
132
175
|
for (const tx of txs){
|
|
133
176
|
try {
|
|
134
177
|
if (!provingState.verifyState()) {
|
|
@@ -136,13 +179,21 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
136
179
|
}
|
|
137
180
|
validateTx(tx);
|
|
138
181
|
logger.info(`Received transaction: ${tx.hash}`);
|
|
139
|
-
const
|
|
140
|
-
const
|
|
182
|
+
const startSpongeBlob = spongeBlobState.clone();
|
|
183
|
+
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db);
|
|
184
|
+
if (!provingState.verifyState()) {
|
|
185
|
+
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
186
|
+
}
|
|
187
|
+
await spongeBlobState.absorb(tx.txEffect.toBlobFields());
|
|
188
|
+
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
141
189
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
142
|
-
this.getOrEnqueueTube(provingState, txIndex);
|
|
143
190
|
if (txProvingState.requireAvmProof) {
|
|
191
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
144
192
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
145
193
|
this.enqueueVM(provingState, txIndex);
|
|
194
|
+
} else {
|
|
195
|
+
logger.debug(`Enqueueing base rollup for private-only tx ${txIndex}`);
|
|
196
|
+
this.enqueueBaseRollup(provingState, txIndex);
|
|
146
197
|
}
|
|
147
198
|
} catch (err) {
|
|
148
199
|
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
@@ -150,22 +201,32 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
150
201
|
});
|
|
151
202
|
}
|
|
152
203
|
}
|
|
204
|
+
await spongeBlobState.absorb([
|
|
205
|
+
createBlockEndMarker(txs.length)
|
|
206
|
+
]);
|
|
207
|
+
provingState.setEndSpongeBlob(spongeBlobState);
|
|
208
|
+
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
209
|
+
await this.provingState.setBlobAccumulators();
|
|
153
210
|
}
|
|
154
211
|
/**
|
|
155
|
-
* Kickstarts
|
|
156
|
-
* Note that if the
|
|
157
|
-
*/
|
|
212
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
213
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
214
|
+
*/ startChonkVerifierCircuits(txs) {
|
|
158
215
|
if (!this.provingState?.verifyState()) {
|
|
159
|
-
throw new Error(`
|
|
216
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
160
217
|
}
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
const
|
|
218
|
+
const publicTxs = txs.filter((tx)=>tx.data.forPublic);
|
|
219
|
+
for (const tx of publicTxs){
|
|
220
|
+
const txHash = tx.getTxHash().toString();
|
|
221
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
164
222
|
const tubeProof = promiseWithResolvers();
|
|
165
|
-
logger.debug(`Starting
|
|
166
|
-
this.
|
|
167
|
-
|
|
223
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
224
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, (proof)=>{
|
|
225
|
+
tubeProof.resolve(proof);
|
|
226
|
+
});
|
|
227
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
168
228
|
}
|
|
229
|
+
return Promise.resolve();
|
|
169
230
|
}
|
|
170
231
|
/**
|
|
171
232
|
* Marks the block as completed.
|
|
@@ -175,55 +236,82 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
175
236
|
if (!provingState) {
|
|
176
237
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
177
238
|
}
|
|
178
|
-
if
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
239
|
+
// Abort with specific error for the block if there's one.
|
|
240
|
+
const error = provingState.getError();
|
|
241
|
+
if (error) {
|
|
242
|
+
throw new Error(`Block proving failed: ${error}`);
|
|
182
243
|
}
|
|
244
|
+
// Abort if the proving state is not valid due to errors occurred elsewhere.
|
|
183
245
|
if (!provingState.verifyState()) {
|
|
184
|
-
throw new Error(`
|
|
246
|
+
throw new Error(`Invalid proving state when completing block ${blockNumber}.`);
|
|
247
|
+
}
|
|
248
|
+
if (provingState.isAcceptingTxs()) {
|
|
249
|
+
throw new Error(`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`);
|
|
185
250
|
}
|
|
186
251
|
// And build the block header
|
|
187
252
|
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
188
|
-
await this.
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
const
|
|
195
|
-
|
|
196
|
-
throw new Error(`Block at index ${index} not available`);
|
|
197
|
-
}
|
|
198
|
-
return block;
|
|
199
|
-
}
|
|
200
|
-
async buildBlock(provingState, expectedHeader) {
|
|
201
|
-
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
202
|
-
const txs = provingState.allTxs.map((a)=>a.processedTx);
|
|
253
|
+
const header = await this.buildL2BlockHeader(provingState, expectedHeader);
|
|
254
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
255
|
+
return header;
|
|
256
|
+
}
|
|
257
|
+
async buildL2BlockHeader(provingState, expectedHeader) {
|
|
258
|
+
// Collect all txs in this block to build the header. The function calling this has made sure that all txs have been added.
|
|
259
|
+
const txs = provingState.getProcessedTxs();
|
|
260
|
+
const startSpongeBlob = provingState.getStartSpongeBlob();
|
|
203
261
|
// Get db for this block
|
|
204
262
|
const db = this.dbs.get(provingState.blockNumber);
|
|
205
263
|
// Given we've applied every change from this block, now assemble the block header
|
|
206
264
|
// and update the archive tree, so we're ready to start processing the next block
|
|
207
|
-
const
|
|
265
|
+
const header = await buildBlockHeaderFromTxs(txs, provingState.getGlobalVariables(), startSpongeBlob, db);
|
|
208
266
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
209
267
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
210
268
|
throw new Error('Block header mismatch');
|
|
211
269
|
}
|
|
212
270
|
logger.verbose(`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`);
|
|
213
271
|
await db.updateArchive(header);
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
const l2Block = new L2Block(newArchive, header, body);
|
|
217
|
-
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
218
|
-
logger.verbose(`Orchestrator finalised block ${l2Block.number}`);
|
|
219
|
-
provingState.block = l2Block;
|
|
272
|
+
provingState.setBuiltBlockHeader(header);
|
|
273
|
+
return header;
|
|
220
274
|
}
|
|
221
275
|
// Flagged as protected to disable in certain unit tests
|
|
222
|
-
async verifyBuiltBlockAgainstSyncedState(
|
|
223
|
-
const
|
|
276
|
+
async verifyBuiltBlockAgainstSyncedState(provingState) {
|
|
277
|
+
const builtBlockHeader = provingState.getBuiltBlockHeader();
|
|
278
|
+
if (!builtBlockHeader) {
|
|
279
|
+
logger.debug('Block header not built yet, skipping header check.');
|
|
280
|
+
return;
|
|
281
|
+
}
|
|
282
|
+
const output = provingState.getBlockRootRollupOutput();
|
|
283
|
+
if (!output) {
|
|
284
|
+
logger.debug('Block root rollup proof not built yet, skipping header check.');
|
|
285
|
+
return;
|
|
286
|
+
}
|
|
287
|
+
const header = await buildHeaderFromCircuitOutputs(output);
|
|
288
|
+
if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
|
|
289
|
+
logger.error(`Block header mismatch.\nCircuit: ${inspect(header)}\nComputed: ${inspect(builtBlockHeader)}`);
|
|
290
|
+
provingState.reject(`Block header hash mismatch.`);
|
|
291
|
+
return;
|
|
292
|
+
}
|
|
293
|
+
// Get db for this block
|
|
294
|
+
const blockNumber = provingState.blockNumber;
|
|
295
|
+
const db = this.dbs.get(blockNumber);
|
|
296
|
+
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
297
|
+
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
224
298
|
if (!syncedArchive.equals(newArchive)) {
|
|
225
|
-
|
|
299
|
+
logger.error(`Archive tree mismatch for block ${blockNumber}: world state synced to ${inspect(syncedArchive)} but built ${inspect(newArchive)}`);
|
|
300
|
+
provingState.reject(`Archive tree mismatch.`);
|
|
301
|
+
return;
|
|
226
302
|
}
|
|
303
|
+
const circuitArchive = output.newArchive;
|
|
304
|
+
if (!newArchive.equals(circuitArchive)) {
|
|
305
|
+
logger.error(`New archive mismatch.\nCircuit: ${output.newArchive}\nComputed: ${newArchive}`);
|
|
306
|
+
provingState.reject(`New archive mismatch.`);
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
310
|
+
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
311
|
+
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
312
|
+
// will attempt to access the fork after it was closed.
|
|
313
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
314
|
+
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
227
315
|
}
|
|
228
316
|
/**
|
|
229
317
|
* Cancel any further proving
|
|
@@ -235,14 +323,15 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
235
323
|
}
|
|
236
324
|
/**
|
|
237
325
|
* Returns the proof for the current epoch.
|
|
238
|
-
*/ async
|
|
326
|
+
*/ async finalizeEpoch() {
|
|
239
327
|
if (!this.provingState || !this.provingPromise) {
|
|
240
|
-
throw new Error(`Invalid proving state, an epoch must be proven before it can be
|
|
328
|
+
throw new Error(`Invalid proving state, an epoch must be proven before it can be finalized`);
|
|
241
329
|
}
|
|
242
330
|
const result = await this.provingPromise;
|
|
243
331
|
if (result.status === 'failure') {
|
|
244
332
|
throw new Error(`Epoch proving failed: ${result.reason}`);
|
|
245
333
|
}
|
|
334
|
+
await this.provingState.finalizeBatchedBlob();
|
|
246
335
|
const epochProofResult = this.provingState.getEpochProofResult();
|
|
247
336
|
pushTestData('epochProofResult', {
|
|
248
337
|
proof: epochProofResult.proof.toString(),
|
|
@@ -251,24 +340,12 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
251
340
|
return epochProofResult;
|
|
252
341
|
}
|
|
253
342
|
/**
|
|
254
|
-
* Starts the proving process for the given transaction and adds it to our state
|
|
255
|
-
* @param tx - The transaction whose proving we wish to commence
|
|
256
|
-
* @param provingState - The proving state being worked on
|
|
257
|
-
*/ async prepareTransaction(tx, provingState) {
|
|
258
|
-
const txInputs = await this.prepareBaseRollupInputs(provingState, tx);
|
|
259
|
-
if (!txInputs) {
|
|
260
|
-
// This should not be possible
|
|
261
|
-
throw new Error(`Unable to add transaction, preparing base inputs failed`);
|
|
262
|
-
}
|
|
263
|
-
return txInputs;
|
|
264
|
-
}
|
|
265
|
-
/**
|
|
266
343
|
* Enqueue a job to be scheduled
|
|
267
344
|
* @param provingState - The proving state object being operated on
|
|
268
345
|
* @param jobType - The type of job to be queued
|
|
269
346
|
* @param job - The actual job, returns a promise notifying of the job's completion
|
|
270
347
|
*/ deferredProving(provingState, request, callback) {
|
|
271
|
-
if (!provingState
|
|
348
|
+
if (!provingState.verifyState()) {
|
|
272
349
|
logger.debug(`Not enqueuing job, state no longer valid`);
|
|
273
350
|
return;
|
|
274
351
|
}
|
|
@@ -283,7 +360,7 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
283
360
|
return;
|
|
284
361
|
}
|
|
285
362
|
const result = await request(controller.signal);
|
|
286
|
-
if (!provingState
|
|
363
|
+
if (!provingState.verifyState()) {
|
|
287
364
|
logger.debug(`State no longer valid, discarding result`);
|
|
288
365
|
return;
|
|
289
366
|
}
|
|
@@ -311,29 +388,26 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
311
388
|
// let the callstack unwind before adding the job to the queue
|
|
312
389
|
setImmediate(()=>void safeJob());
|
|
313
390
|
}
|
|
314
|
-
async
|
|
391
|
+
async updateL1ToL2MessageTree(l1ToL2Messages, db) {
|
|
315
392
|
const l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 'Too many L1 to L2 messages');
|
|
316
|
-
const
|
|
317
|
-
const
|
|
393
|
+
const lastL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
394
|
+
const lastL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
318
395
|
// Update the local trees to include the new l1 to l2 messages
|
|
319
396
|
await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
|
|
320
|
-
const
|
|
397
|
+
const newL1ToL2MessageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
|
|
398
|
+
const newL1ToL2MessageSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db), L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
321
399
|
return {
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
400
|
+
lastL1ToL2MessageTreeSnapshot,
|
|
401
|
+
lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
402
|
+
newL1ToL2MessageTreeSnapshot,
|
|
403
|
+
newL1ToL2MessageSubtreeRootSiblingPath
|
|
325
404
|
};
|
|
326
405
|
}
|
|
327
406
|
// Updates the merkle trees for a transaction. The first enqueued job for a transaction
|
|
328
|
-
async prepareBaseRollupInputs(
|
|
329
|
-
if (!provingState.verifyState() || !provingState.spongeBlobState) {
|
|
330
|
-
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
331
|
-
return;
|
|
332
|
-
}
|
|
333
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
407
|
+
async prepareBaseRollupInputs(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, db) {
|
|
334
408
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
335
|
-
// These will be overwritten later once we have proven the
|
|
336
|
-
const [ms, hints] = await elapsed(
|
|
409
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
410
|
+
const [ms, hints] = await elapsed(insertSideEffectsAndBuildBaseRollupHints(tx, lastArchive, newL1ToL2MessageTreeSnapshot, startSpongeBlob, this.proverId.toField(), db));
|
|
337
411
|
this.metrics.recordBaseRollupInputs(ms);
|
|
338
412
|
const promises = [
|
|
339
413
|
MerkleTreeId.NOTE_HASH_TREE,
|
|
@@ -349,10 +423,6 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
349
423
|
obj.key,
|
|
350
424
|
obj.value
|
|
351
425
|
]));
|
|
352
|
-
if (!provingState.verifyState()) {
|
|
353
|
-
logger.debug(`Discarding proving job, state no longer valid`);
|
|
354
|
-
return;
|
|
355
|
-
}
|
|
356
426
|
return [
|
|
357
427
|
hints,
|
|
358
428
|
treeSnapshots
|
|
@@ -365,68 +435,71 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
365
435
|
logger.debug('Not running base rollup, state invalid');
|
|
366
436
|
return;
|
|
367
437
|
}
|
|
438
|
+
if (!provingState.tryStartProvingBase(txIndex)) {
|
|
439
|
+
logger.debug(`Base rollup for tx ${txIndex} already started.`);
|
|
440
|
+
return;
|
|
441
|
+
}
|
|
368
442
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
369
443
|
const { processedTx } = txProvingState;
|
|
370
444
|
const { rollupType, inputs } = txProvingState.getBaseRollupTypeAndInputs();
|
|
371
445
|
logger.debug(`Enqueuing deferred proving base rollup for ${processedTx.hash.toString()}`);
|
|
372
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof
|
|
446
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, `ProvingOrchestrator.prover.${inputs instanceof PrivateTxBaseRollupPrivateInputs ? 'getPrivateTxBaseRollupProof' : 'getPublicTxBaseRollupProof'}`, {
|
|
373
447
|
[Attributes.TX_HASH]: processedTx.hash.toString(),
|
|
374
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
375
448
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
376
449
|
}, (signal)=>{
|
|
377
|
-
if (inputs instanceof
|
|
378
|
-
return this.prover.
|
|
450
|
+
if (inputs instanceof PrivateTxBaseRollupPrivateInputs) {
|
|
451
|
+
return this.prover.getPrivateTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
379
452
|
} else {
|
|
380
|
-
return this.prover.
|
|
453
|
+
return this.prover.getPublicTxBaseRollupProof(inputs, signal, provingState.epochNumber);
|
|
381
454
|
}
|
|
382
|
-
}),
|
|
455
|
+
}), (result)=>{
|
|
383
456
|
logger.debug(`Completed proof for ${rollupType} for tx ${processedTx.hash.toString()}`);
|
|
384
|
-
validatePartialState(result.inputs.
|
|
457
|
+
validatePartialState(result.inputs.endTreeSnapshots, txProvingState.treeSnapshots);
|
|
385
458
|
const leafLocation = provingState.setBaseRollupProof(txIndex, result);
|
|
386
459
|
if (provingState.totalNumTxs === 1) {
|
|
387
|
-
|
|
460
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
388
461
|
} else {
|
|
389
|
-
|
|
462
|
+
this.checkAndEnqueueNextMergeRollup(provingState, leafLocation);
|
|
390
463
|
}
|
|
391
464
|
});
|
|
392
465
|
}
|
|
393
|
-
// Enqueues the
|
|
394
|
-
// Once completed, will enqueue the
|
|
395
|
-
|
|
466
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
467
|
+
// Once completed, will enqueue the the public tx base rollup.
|
|
468
|
+
getOrEnqueueChonkVerifier(provingState, txIndex) {
|
|
396
469
|
if (!provingState.verifyState()) {
|
|
397
|
-
logger.debug('Not running
|
|
470
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
398
471
|
return;
|
|
399
472
|
}
|
|
400
473
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
401
474
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
475
|
+
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
402
476
|
const handleResult = (result)=>{
|
|
403
|
-
logger.debug(`Got
|
|
477
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, {
|
|
404
478
|
txHash
|
|
405
479
|
});
|
|
406
|
-
txProvingState.
|
|
407
|
-
this.provingState?.
|
|
408
|
-
this.
|
|
480
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
481
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
482
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
409
483
|
};
|
|
410
|
-
if (this.provingState?.
|
|
411
|
-
logger.debug(`
|
|
484
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
485
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, {
|
|
412
486
|
txHash
|
|
413
487
|
});
|
|
414
|
-
void this.provingState.
|
|
488
|
+
void this.provingState.cachedChonkVerifierProofs.get(txHash).then(handleResult);
|
|
415
489
|
return;
|
|
416
490
|
}
|
|
417
|
-
logger.debug(`Enqueuing
|
|
418
|
-
this.
|
|
491
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
492
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
419
493
|
}
|
|
420
|
-
|
|
421
|
-
if (!provingState
|
|
422
|
-
logger.debug('Not running
|
|
494
|
+
doEnqueueChonkVerifier(txHash, inputs, handler, provingState = this.provingState) {
|
|
495
|
+
if (!provingState.verifyState()) {
|
|
496
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
423
497
|
return;
|
|
424
498
|
}
|
|
425
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
499
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getPublicChonkVerifierProof', {
|
|
426
500
|
[Attributes.TX_HASH]: txHash,
|
|
427
|
-
[Attributes.
|
|
428
|
-
|
|
429
|
-
}, (signal)=>this.prover.getTubeProof(inputs, signal, this.provingState.epochNumber)), handler);
|
|
501
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public'
|
|
502
|
+
}, (signal)=>this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber)), handler);
|
|
430
503
|
}
|
|
431
504
|
// Executes the merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
432
505
|
// Enqueues the next level of merge if all inputs are available
|
|
@@ -435,65 +508,73 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
435
508
|
logger.debug('Not running merge rollup. State no longer valid.');
|
|
436
509
|
return;
|
|
437
510
|
}
|
|
511
|
+
if (!provingState.tryStartProvingMerge(location)) {
|
|
512
|
+
logger.debug('Merge rollup already started.');
|
|
513
|
+
return;
|
|
514
|
+
}
|
|
438
515
|
const inputs = provingState.getMergeRollupInputs(location);
|
|
439
|
-
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.
|
|
440
|
-
[Attributes.
|
|
441
|
-
|
|
442
|
-
}, (signal)=>this.prover.getMergeRollupProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
516
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getTxMergeRollupProof', {
|
|
517
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-tx-merge'
|
|
518
|
+
}, (signal)=>this.prover.getTxMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
443
519
|
provingState.setMergeRollupProof(location, result);
|
|
444
|
-
|
|
520
|
+
this.checkAndEnqueueNextMergeRollup(provingState, location);
|
|
445
521
|
});
|
|
446
522
|
}
|
|
447
523
|
// Executes the block root rollup circuit
|
|
448
|
-
|
|
524
|
+
enqueueBlockRootRollup(provingState) {
|
|
449
525
|
if (!provingState.verifyState()) {
|
|
450
526
|
logger.debug('Not running block root rollup, state no longer valid');
|
|
451
527
|
return;
|
|
452
528
|
}
|
|
453
|
-
provingState.
|
|
454
|
-
|
|
455
|
-
|
|
529
|
+
if (!provingState.tryStartProvingBlockRoot()) {
|
|
530
|
+
logger.debug('Block root rollup already started.');
|
|
531
|
+
return;
|
|
532
|
+
}
|
|
533
|
+
const { rollupType, inputs } = provingState.getBlockRootRollupTypeAndInputs();
|
|
534
|
+
logger.debug(`Enqueuing ${rollupType} for block ${provingState.blockNumber}.`);
|
|
456
535
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockRootRollupProof', {
|
|
457
|
-
[Attributes.PROTOCOL_CIRCUIT_TYPE]: 'server',
|
|
458
536
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
459
537
|
}, (signal)=>{
|
|
460
|
-
if (inputs instanceof
|
|
461
|
-
return this.prover.
|
|
462
|
-
} else if (inputs instanceof
|
|
463
|
-
return this.prover.
|
|
538
|
+
if (inputs instanceof BlockRootFirstRollupPrivateInputs) {
|
|
539
|
+
return this.prover.getBlockRootFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
540
|
+
} else if (inputs instanceof BlockRootSingleTxFirstRollupPrivateInputs) {
|
|
541
|
+
return this.prover.getBlockRootSingleTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
542
|
+
} else if (inputs instanceof BlockRootEmptyTxFirstRollupPrivateInputs) {
|
|
543
|
+
return this.prover.getBlockRootEmptyTxFirstRollupProof(inputs, signal, provingState.epochNumber);
|
|
544
|
+
} else if (inputs instanceof BlockRootSingleTxRollupPrivateInputs) {
|
|
545
|
+
return this.prover.getBlockRootSingleTxRollupProof(inputs, signal, provingState.epochNumber);
|
|
464
546
|
} else {
|
|
465
547
|
return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
466
548
|
}
|
|
467
549
|
}), async (result)=>{
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
476
|
-
const epochProvingState = this.provingState;
|
|
477
|
-
const leafLocation = epochProvingState.setBlockRootRollupProof(provingState.index, result);
|
|
478
|
-
if (epochProvingState.totalNumBlocks === 1) {
|
|
479
|
-
await this.enqueueEpochPadding(epochProvingState);
|
|
550
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
551
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
552
|
+
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
553
|
+
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
554
|
+
const checkpointProvingState = provingState.parentCheckpoint;
|
|
555
|
+
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
556
|
+
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
480
557
|
} else {
|
|
481
|
-
this.checkAndEnqueueNextBlockMergeRollup(
|
|
558
|
+
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
482
559
|
}
|
|
483
560
|
});
|
|
484
561
|
}
|
|
485
562
|
// Executes the base parity circuit and stores the intermediate state for the root parity circuit
|
|
486
563
|
// Enqueues the root parity circuit if all inputs are available
|
|
487
|
-
enqueueBaseParityCircuit(
|
|
564
|
+
enqueueBaseParityCircuit(checkpointProvingState, provingState, baseParityIndex) {
|
|
488
565
|
if (!provingState.verifyState()) {
|
|
489
566
|
logger.debug('Not running base parity. State no longer valid.');
|
|
490
567
|
return;
|
|
491
568
|
}
|
|
569
|
+
if (!provingState.tryStartProvingBaseParity(baseParityIndex)) {
|
|
570
|
+
logger.warn(`Base parity ${baseParityIndex} already started.`);
|
|
571
|
+
return;
|
|
572
|
+
}
|
|
573
|
+
const inputs = checkpointProvingState.getBaseParityInputs(baseParityIndex);
|
|
492
574
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBaseParityProof', {
|
|
493
|
-
[Attributes.
|
|
494
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'base-parity'
|
|
575
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-base'
|
|
495
576
|
}, (signal)=>this.prover.getBaseParityProof(inputs, signal, provingState.epochNumber)), (provingOutput)=>{
|
|
496
|
-
provingState.setBaseParityProof(
|
|
577
|
+
provingState.setBaseParityProof(baseParityIndex, provingOutput);
|
|
497
578
|
this.checkAndEnqueueRootParityCircuit(provingState);
|
|
498
579
|
});
|
|
499
580
|
}
|
|
@@ -510,13 +591,16 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
510
591
|
logger.debug('Not running root parity. State no longer valid.');
|
|
511
592
|
return;
|
|
512
593
|
}
|
|
513
|
-
|
|
594
|
+
if (!provingState.tryStartProvingRootParity()) {
|
|
595
|
+
logger.debug('Root parity already started.');
|
|
596
|
+
return;
|
|
597
|
+
}
|
|
598
|
+
const inputs = provingState.getParityRootInputs();
|
|
514
599
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootParityProof', {
|
|
515
|
-
[Attributes.
|
|
516
|
-
|
|
517
|
-
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), async (result)=>{
|
|
600
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'parity-root'
|
|
601
|
+
}, (signal)=>this.prover.getRootParityProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
518
602
|
provingState.setRootParityProof(result);
|
|
519
|
-
|
|
603
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
520
604
|
});
|
|
521
605
|
}
|
|
522
606
|
// Executes the block merge rollup circuit and stored the output as intermediate state for the parent merge/block root circuit
|
|
@@ -526,28 +610,90 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
526
610
|
logger.debug('Not running block merge rollup. State no longer valid.');
|
|
527
611
|
return;
|
|
528
612
|
}
|
|
613
|
+
if (!provingState.tryStartProvingBlockMerge(location)) {
|
|
614
|
+
logger.debug('Block merge rollup already started.');
|
|
615
|
+
return;
|
|
616
|
+
}
|
|
529
617
|
const inputs = provingState.getBlockMergeRollupInputs(location);
|
|
530
618
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getBlockMergeRollupProof', {
|
|
531
|
-
[Attributes.
|
|
532
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'block-merge-rollup'
|
|
619
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-block-merge'
|
|
533
620
|
}, (signal)=>this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
534
621
|
provingState.setBlockMergeRollupProof(location, result);
|
|
535
622
|
this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
|
|
536
623
|
});
|
|
537
624
|
}
|
|
538
|
-
|
|
625
|
+
enqueueCheckpointRootRollup(provingState) {
|
|
626
|
+
if (!provingState.verifyState()) {
|
|
627
|
+
logger.debug('Not running checkpoint root rollup. State no longer valid.');
|
|
628
|
+
return;
|
|
629
|
+
}
|
|
630
|
+
if (!provingState.tryStartProvingCheckpointRoot()) {
|
|
631
|
+
logger.debug('Checkpoint root rollup already started.');
|
|
632
|
+
return;
|
|
633
|
+
}
|
|
634
|
+
const rollupType = provingState.getCheckpointRootRollupType();
|
|
635
|
+
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
636
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
637
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointRootRollupProof', {
|
|
638
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType
|
|
639
|
+
}, (signal)=>{
|
|
640
|
+
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
641
|
+
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
642
|
+
} else {
|
|
643
|
+
return this.prover.getCheckpointRootRollupProof(inputs, signal, provingState.epochNumber);
|
|
644
|
+
}
|
|
645
|
+
}), (result)=>{
|
|
646
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator().toBlobAccumulator();
|
|
647
|
+
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
648
|
+
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
649
|
+
logger.error(`Blob accumulator state mismatch.\nCircuit: ${inspect(circuitEndBlobAccumulatorState)}\nComputed: ${inspect(computedEndBlobAccumulatorState)}`);
|
|
650
|
+
provingState.reject(`Blob accumulator state mismatch.`);
|
|
651
|
+
return;
|
|
652
|
+
}
|
|
653
|
+
logger.debug(`Completed ${rollupType} proof for checkpoint ${provingState.index}.`);
|
|
654
|
+
const leafLocation = provingState.setCheckpointRootRollupProof(result);
|
|
655
|
+
const epochProvingState = provingState.parentEpoch;
|
|
656
|
+
if (epochProvingState.totalNumCheckpoints === 1) {
|
|
657
|
+
this.enqueueEpochPadding(epochProvingState);
|
|
658
|
+
} else {
|
|
659
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(epochProvingState, leafLocation);
|
|
660
|
+
}
|
|
661
|
+
});
|
|
662
|
+
}
|
|
663
|
+
enqueueCheckpointMergeRollup(provingState, location) {
|
|
664
|
+
if (!provingState.verifyState()) {
|
|
665
|
+
logger.debug('Not running checkpoint merge rollup. State no longer valid.');
|
|
666
|
+
return;
|
|
667
|
+
}
|
|
668
|
+
if (!provingState.tryStartProvingCheckpointMerge(location)) {
|
|
669
|
+
logger.debug('Checkpoint merge rollup already started.');
|
|
670
|
+
return;
|
|
671
|
+
}
|
|
672
|
+
const inputs = provingState.getCheckpointMergeRollupInputs(location);
|
|
673
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointMergeRollupProof', {
|
|
674
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-merge'
|
|
675
|
+
}, (signal)=>this.prover.getCheckpointMergeRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
676
|
+
logger.debug('Completed proof for checkpoint merge rollup.');
|
|
677
|
+
provingState.setCheckpointMergeRollupProof(location, result);
|
|
678
|
+
this.checkAndEnqueueNextCheckpointMergeRollup(provingState, location);
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
enqueueEpochPadding(provingState) {
|
|
539
682
|
if (!provingState.verifyState()) {
|
|
540
683
|
logger.debug('Not running epoch padding. State no longer valid.');
|
|
541
684
|
return;
|
|
542
685
|
}
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
686
|
+
if (!provingState.tryStartProvingPaddingCheckpoint()) {
|
|
687
|
+
logger.debug('Padding checkpoint already started.');
|
|
688
|
+
return;
|
|
689
|
+
}
|
|
690
|
+
logger.debug('Padding epoch proof with a padding block root proof.');
|
|
691
|
+
const inputs = provingState.getPaddingCheckpointInputs();
|
|
692
|
+
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getCheckpointPaddingRollupProof', {
|
|
693
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-checkpoint-padding'
|
|
694
|
+
}, (signal)=>this.prover.getCheckpointPaddingRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
695
|
+
logger.debug('Completed proof for padding checkpoint.');
|
|
696
|
+
provingState.setCheckpointPaddingProof(result);
|
|
551
697
|
this.checkAndEnqueueRootRollup(provingState);
|
|
552
698
|
});
|
|
553
699
|
}
|
|
@@ -558,10 +704,9 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
558
704
|
return;
|
|
559
705
|
}
|
|
560
706
|
logger.debug(`Preparing root rollup`);
|
|
561
|
-
const inputs = provingState.getRootRollupInputs(
|
|
707
|
+
const inputs = provingState.getRootRollupInputs();
|
|
562
708
|
this.deferredProving(provingState, wrapCallbackInSpan(this.tracer, 'ProvingOrchestrator.prover.getRootRollupProof', {
|
|
563
|
-
[Attributes.
|
|
564
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'root-rollup'
|
|
709
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'rollup-root'
|
|
565
710
|
}, (signal)=>this.prover.getRootRollupProof(inputs, signal, provingState.epochNumber)), (result)=>{
|
|
566
711
|
logger.verbose(`Orchestrator completed root rollup for epoch ${provingState.epochNumber}`);
|
|
567
712
|
provingState.setRootRollupProof(result);
|
|
@@ -570,34 +715,23 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
570
715
|
});
|
|
571
716
|
});
|
|
572
717
|
}
|
|
573
|
-
|
|
718
|
+
checkAndEnqueueNextMergeRollup(provingState, currentLocation) {
|
|
574
719
|
if (!provingState.isReadyForMergeRollup(currentLocation)) {
|
|
575
720
|
return;
|
|
576
721
|
}
|
|
577
722
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
578
723
|
if (parentLocation.level === 0) {
|
|
579
|
-
|
|
724
|
+
this.checkAndEnqueueBlockRootRollup(provingState);
|
|
580
725
|
} else {
|
|
581
726
|
this.enqueueMergeRollup(provingState, parentLocation);
|
|
582
727
|
}
|
|
583
728
|
}
|
|
584
|
-
|
|
729
|
+
checkAndEnqueueBlockRootRollup(provingState) {
|
|
585
730
|
if (!provingState.isReadyForBlockRootRollup()) {
|
|
586
|
-
logger.debug('Not ready for root rollup');
|
|
587
|
-
return;
|
|
588
|
-
}
|
|
589
|
-
if (provingState.blockRootRollupStarted) {
|
|
590
|
-
logger.debug('Block root rollup already started');
|
|
731
|
+
logger.debug('Not ready for block root rollup');
|
|
591
732
|
return;
|
|
592
733
|
}
|
|
593
|
-
|
|
594
|
-
// TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
|
|
595
|
-
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
596
|
-
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
597
|
-
// will attempt to access the fork after it was closed.
|
|
598
|
-
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
599
|
-
void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
600
|
-
await this.enqueueBlockRootRollup(provingState);
|
|
734
|
+
this.enqueueBlockRootRollup(provingState);
|
|
601
735
|
}
|
|
602
736
|
checkAndEnqueueNextBlockMergeRollup(provingState, currentLocation) {
|
|
603
737
|
if (!provingState.isReadyForBlockMerge(currentLocation)) {
|
|
@@ -605,11 +739,28 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
605
739
|
}
|
|
606
740
|
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
607
741
|
if (parentLocation.level === 0) {
|
|
608
|
-
this.
|
|
742
|
+
this.checkAndEnqueueCheckpointRootRollup(provingState);
|
|
609
743
|
} else {
|
|
610
744
|
this.enqueueBlockMergeRollup(provingState, parentLocation);
|
|
611
745
|
}
|
|
612
746
|
}
|
|
747
|
+
checkAndEnqueueCheckpointRootRollup(provingState) {
|
|
748
|
+
if (!provingState.isReadyForCheckpointRoot()) {
|
|
749
|
+
return;
|
|
750
|
+
}
|
|
751
|
+
this.enqueueCheckpointRootRollup(provingState);
|
|
752
|
+
}
|
|
753
|
+
checkAndEnqueueNextCheckpointMergeRollup(provingState, currentLocation) {
|
|
754
|
+
if (!provingState.isReadyForCheckpointMerge(currentLocation)) {
|
|
755
|
+
return;
|
|
756
|
+
}
|
|
757
|
+
const parentLocation = provingState.getParentLocation(currentLocation);
|
|
758
|
+
if (parentLocation.level === 0) {
|
|
759
|
+
this.checkAndEnqueueRootRollup(provingState);
|
|
760
|
+
} else {
|
|
761
|
+
this.enqueueCheckpointMergeRollup(provingState, parentLocation);
|
|
762
|
+
}
|
|
763
|
+
}
|
|
613
764
|
checkAndEnqueueRootRollup(provingState) {
|
|
614
765
|
if (!provingState.isReadyForRootRollup()) {
|
|
615
766
|
logger.debug('Not ready for root rollup');
|
|
@@ -635,39 +786,46 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
635
786
|
}, async (signal)=>{
|
|
636
787
|
const inputs = txProvingState.getAvmInputs();
|
|
637
788
|
try {
|
|
638
|
-
|
|
789
|
+
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
790
|
+
// just keep the next line but removing the second argument (false).
|
|
791
|
+
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
639
792
|
} catch (err) {
|
|
640
793
|
if (process.env.AVM_PROVING_STRICT) {
|
|
641
794
|
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
642
795
|
throw err;
|
|
643
796
|
} else {
|
|
644
|
-
logger.warn(`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off.
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
797
|
+
logger.warn(`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
798
|
+
AVM inputs and carrying on. ${inspect(err)}.`);
|
|
799
|
+
try {
|
|
800
|
+
this.metrics.incAvmFallback();
|
|
801
|
+
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
802
|
+
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
803
|
+
} catch (err) {
|
|
804
|
+
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
805
|
+
throw err;
|
|
806
|
+
}
|
|
649
807
|
}
|
|
650
808
|
}
|
|
651
809
|
});
|
|
652
810
|
this.deferredProving(provingState, doAvmProving, (proofAndVk)=>{
|
|
653
811
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
654
812
|
txProvingState.setAvmProof(proofAndVk);
|
|
655
|
-
this.
|
|
813
|
+
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
656
814
|
});
|
|
657
815
|
}
|
|
658
|
-
|
|
816
|
+
checkAndEnqueueBaseRollup(provingState, txIndex) {
|
|
659
817
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
660
818
|
if (!txProvingState.ready()) {
|
|
661
819
|
return;
|
|
662
820
|
}
|
|
663
|
-
// We must have completed all proving (
|
|
821
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
664
822
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
665
823
|
this.enqueueBaseRollup(provingState, txIndex);
|
|
666
824
|
}
|
|
667
825
|
}
|
|
668
826
|
_ts_decorate([
|
|
669
|
-
trackSpan('ProvingOrchestrator.startNewBlock', (
|
|
670
|
-
[Attributes.BLOCK_NUMBER]:
|
|
827
|
+
trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
|
|
828
|
+
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
671
829
|
}))
|
|
672
830
|
], ProvingOrchestrator.prototype, "startNewBlock", null);
|
|
673
831
|
_ts_decorate([
|
|
@@ -676,15 +834,15 @@ _ts_decorate([
|
|
|
676
834
|
}))
|
|
677
835
|
], ProvingOrchestrator.prototype, "addTxs", null);
|
|
678
836
|
_ts_decorate([
|
|
679
|
-
trackSpan('ProvingOrchestrator.
|
|
680
|
-
], ProvingOrchestrator.prototype, "
|
|
837
|
+
trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
838
|
+
], ProvingOrchestrator.prototype, "startChonkVerifierCircuits", null);
|
|
681
839
|
_ts_decorate([
|
|
682
840
|
trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber)=>({
|
|
683
841
|
[Attributes.BLOCK_NUMBER]: blockNumber
|
|
684
842
|
}))
|
|
685
843
|
], ProvingOrchestrator.prototype, "setBlockCompleted", null);
|
|
686
844
|
_ts_decorate([
|
|
687
|
-
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (
|
|
845
|
+
trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (tx)=>({
|
|
688
846
|
[Attributes.TX_HASH]: tx.hash.toString()
|
|
689
847
|
}))
|
|
690
848
|
], ProvingOrchestrator.prototype, "prepareBaseRollupInputs", null);
|