@aztec/prover-client 0.0.1-commit.9b94fc1 → 0.0.1-commit.bf2612ae
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/config.d.ts +2 -2
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +30 -14
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -1
- package/dest/light/lightweight_checkpoint_builder.js +109 -22
- package/dest/mocks/fixtures.js +4 -4
- package/dest/mocks/test_context.d.ts +5 -3
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +25 -11
- package/dest/orchestrator/block-building-helpers.d.ts +4 -4
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +5 -4
- package/dest/orchestrator/block-proving-state.d.ts +5 -4
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +1 -1
- package/dest/orchestrator/checkpoint-proving-state.d.ts +20 -6
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +40 -5
- package/dest/orchestrator/epoch-proving-state.d.ts +8 -7
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +36 -2
- package/dest/orchestrator/orchestrator.d.ts +21 -8
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +479 -77
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +2 -15
- package/dest/orchestrator/tx-proving-state.d.ts +6 -5
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +8 -8
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +2 -2
- package/dest/prover-client/server-epoch-prover.d.ts +5 -5
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.d.ts +4 -3
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +3 -10
- package/dest/proving_broker/config.d.ts +6 -2
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +8 -2
- package/dest/proving_broker/fixtures.js +1 -1
- package/dest/proving_broker/proving_agent.d.ts +3 -8
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +1 -16
- package/dest/proving_broker/proving_broker.d.ts +1 -1
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +4 -10
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +389 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.js +11 -35
- package/dest/proving_broker/proving_job_controller.d.ts +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +2 -3
- package/dest/test/mock_proof_store.d.ts +3 -3
- package/dest/test/mock_proof_store.d.ts.map +1 -1
- package/dest/test/mock_prover.d.ts +2 -2
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +4 -4
- package/package.json +20 -19
- package/src/config.ts +1 -1
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +161 -29
- package/src/mocks/fixtures.ts +4 -4
- package/src/mocks/test_context.ts +25 -10
- package/src/orchestrator/block-building-helpers.ts +5 -4
- package/src/orchestrator/block-proving-state.ts +3 -2
- package/src/orchestrator/checkpoint-proving-state.ts +56 -8
- package/src/orchestrator/epoch-proving-state.ts +63 -12
- package/src/orchestrator/orchestrator.ts +76 -60
- package/src/orchestrator/orchestrator_metrics.ts +2 -25
- package/src/orchestrator/tx-proving-state.ts +10 -14
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +11 -12
- package/src/prover-client/server-epoch-prover.ts +4 -4
- package/src/proving_broker/broker_prover_facade.ts +4 -14
- package/src/proving_broker/config.ts +10 -1
- package/src/proving_broker/fixtures.ts +1 -1
- package/src/proving_broker/proving_agent.ts +1 -17
- package/src/proving_broker/proving_broker.ts +4 -8
- package/src/proving_broker/proving_broker_database/persisted.ts +15 -1
- package/src/proving_broker/proving_broker_instrumentation.ts +10 -35
- package/src/proving_broker/proving_job_controller.ts +2 -3
- package/src/test/mock_prover.ts +2 -14
- package/dest/block-factory/index.d.ts +0 -2
- package/dest/block-factory/index.d.ts.map +0 -1
- package/dest/block-factory/index.js +0 -1
- package/dest/block-factory/light.d.ts +0 -38
- package/dest/block-factory/light.d.ts.map +0 -1
- package/dest/block-factory/light.js +0 -108
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
- package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
- package/src/block-factory/index.ts +0 -1
- package/src/block-factory/light.ts +0 -137
- package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
|
@@ -11,12 +11,16 @@ import {
|
|
|
11
11
|
type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
12
12
|
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
13
13
|
NUM_MSGS_PER_BASE_PARITY,
|
|
14
|
+
OUT_HASH_TREE_HEIGHT,
|
|
14
15
|
} from '@aztec/constants';
|
|
16
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
15
17
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
16
|
-
import { BLS12Point
|
|
18
|
+
import { BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
19
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
17
20
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
18
21
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
19
22
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
23
|
+
import { computeCheckpointOutHash } from '@aztec/stdlib/messaging';
|
|
20
24
|
import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
|
|
21
25
|
import {
|
|
22
26
|
BlockMergeRollupPrivateInputs,
|
|
@@ -36,6 +40,11 @@ import { accumulateBlobs, buildBlobHints, toProofData } from './block-building-h
|
|
|
36
40
|
import { BlockProvingState, type ProofState } from './block-proving-state.js';
|
|
37
41
|
import type { EpochProvingState } from './epoch-proving-state.js';
|
|
38
42
|
|
|
43
|
+
type OutHashHint = {
|
|
44
|
+
treeSnapshot: AppendOnlyTreeSnapshot;
|
|
45
|
+
siblingPath: Tuple<Fr, typeof OUT_HASH_TREE_HEIGHT>;
|
|
46
|
+
};
|
|
47
|
+
|
|
39
48
|
export class CheckpointProvingState {
|
|
40
49
|
private blockProofs: UnbalancedTreeStore<
|
|
41
50
|
ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
@@ -44,11 +53,16 @@ export class CheckpointProvingState {
|
|
|
44
53
|
| ProofState<CheckpointRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
45
54
|
| undefined;
|
|
46
55
|
private blocks: (BlockProvingState | undefined)[] = [];
|
|
56
|
+
private previousOutHashHint: OutHashHint | undefined;
|
|
57
|
+
private outHash: Fr | undefined;
|
|
58
|
+
// The snapshot and sibling path after the checkpoint's out hash is inserted.
|
|
59
|
+
// Stored here to be retrieved for the next checkpoint when it's added.
|
|
60
|
+
private newOutHashHint: OutHashHint | undefined;
|
|
47
61
|
private startBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
48
62
|
private endBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
49
63
|
private blobFields: Fr[] | undefined;
|
|
50
64
|
private error: string | undefined;
|
|
51
|
-
public readonly firstBlockNumber:
|
|
65
|
+
public readonly firstBlockNumber: BlockNumber;
|
|
52
66
|
|
|
53
67
|
constructor(
|
|
54
68
|
public readonly index: number,
|
|
@@ -74,7 +88,7 @@ export class CheckpointProvingState {
|
|
|
74
88
|
private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void,
|
|
75
89
|
) {
|
|
76
90
|
this.blockProofs = new UnbalancedTreeStore(totalNumBlocks);
|
|
77
|
-
this.firstBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber + 1;
|
|
91
|
+
this.firstBlockNumber = BlockNumber(headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber + 1);
|
|
78
92
|
}
|
|
79
93
|
|
|
80
94
|
public get epochNumber(): number {
|
|
@@ -82,13 +96,13 @@ export class CheckpointProvingState {
|
|
|
82
96
|
}
|
|
83
97
|
|
|
84
98
|
public startNewBlock(
|
|
85
|
-
blockNumber:
|
|
99
|
+
blockNumber: BlockNumber,
|
|
86
100
|
timestamp: UInt64,
|
|
87
101
|
totalNumTxs: number,
|
|
88
102
|
lastArchiveTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
89
103
|
lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
90
104
|
): BlockProvingState {
|
|
91
|
-
const index = blockNumber - this.firstBlockNumber;
|
|
105
|
+
const index = Number(blockNumber) - Number(this.firstBlockNumber);
|
|
92
106
|
if (index >= this.totalNumBlocks) {
|
|
93
107
|
throw new Error(`Unable to start a new block at index ${index}. Expected at most ${this.totalNumBlocks} blocks.`);
|
|
94
108
|
}
|
|
@@ -193,6 +207,35 @@ export class CheckpointProvingState {
|
|
|
193
207
|
return new ParityBasePrivateInputs(messages, this.constants.vkTreeRoot);
|
|
194
208
|
}
|
|
195
209
|
|
|
210
|
+
public setOutHashHint(hint: OutHashHint) {
|
|
211
|
+
this.previousOutHashHint = hint;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
public getOutHashHint() {
|
|
215
|
+
return this.previousOutHashHint;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
public accumulateBlockOutHashes() {
|
|
219
|
+
if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
|
|
220
|
+
return;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (!this.outHash) {
|
|
224
|
+
const messagesPerBlock = this.blocks.map(b => b!.getTxEffects().map(tx => tx.l2ToL1Msgs));
|
|
225
|
+
this.outHash = computeCheckpointOutHash(messagesPerBlock);
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return this.outHash;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
public setOutHashHintForNextCheckpoint(hint: OutHashHint) {
|
|
232
|
+
this.newOutHashHint = hint;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
public getOutHashHintForNextCheckpoint() {
|
|
236
|
+
return this.newOutHashHint;
|
|
237
|
+
}
|
|
238
|
+
|
|
196
239
|
public async accumulateBlobs(startBlobAccumulator: BatchedBlobAccumulator) {
|
|
197
240
|
if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
|
|
198
241
|
return;
|
|
@@ -234,6 +277,9 @@ export class CheckpointProvingState {
|
|
|
234
277
|
if (proofs.length !== nonEmptyProofs.length) {
|
|
235
278
|
throw new Error('At least one child is not ready for the checkpoint root rollup.');
|
|
236
279
|
}
|
|
280
|
+
if (!this.previousOutHashHint) {
|
|
281
|
+
throw new Error('Out hash hint is not set.');
|
|
282
|
+
}
|
|
237
283
|
if (!this.startBlobAccumulator) {
|
|
238
284
|
throw new Error('Start blob accumulator is not set.');
|
|
239
285
|
}
|
|
@@ -246,6 +292,8 @@ export class CheckpointProvingState {
|
|
|
246
292
|
const hints = CheckpointRootRollupHints.from({
|
|
247
293
|
previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint,
|
|
248
294
|
previousArchiveSiblingPath: this.lastArchiveSiblingPath,
|
|
295
|
+
previousOutHash: this.previousOutHashHint.treeSnapshot,
|
|
296
|
+
newOutHashSiblingPath: this.previousOutHashHint.siblingPath,
|
|
249
297
|
startBlobAccumulator: this.startBlobAccumulator.toBlobAccumulator(),
|
|
250
298
|
finalBlobChallenges: this.finalBlobBatchingChallenges,
|
|
251
299
|
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_CHECKPOINT),
|
|
@@ -260,8 +308,8 @@ export class CheckpointProvingState {
|
|
|
260
308
|
: new CheckpointRootRollupPrivateInputs([left, right], hints);
|
|
261
309
|
}
|
|
262
310
|
|
|
263
|
-
public getBlockProvingStateByBlockNumber(blockNumber:
|
|
264
|
-
const index = blockNumber - this.firstBlockNumber;
|
|
311
|
+
public getBlockProvingStateByBlockNumber(blockNumber: BlockNumber) {
|
|
312
|
+
const index = Number(blockNumber) - Number(this.firstBlockNumber);
|
|
265
313
|
return this.blocks[index];
|
|
266
314
|
}
|
|
267
315
|
|
|
@@ -271,7 +319,7 @@ export class CheckpointProvingState {
|
|
|
271
319
|
|
|
272
320
|
public isReadyForCheckpointRoot() {
|
|
273
321
|
const allChildProofsReady = this.#getChildProofsForRoot().every(p => !!p);
|
|
274
|
-
return allChildProofsReady && !!this.startBlobAccumulator;
|
|
322
|
+
return allChildProofsReady && !!this.previousOutHashHint && !!this.startBlobAccumulator;
|
|
275
323
|
}
|
|
276
324
|
|
|
277
325
|
public verifyState() {
|
|
@@ -1,14 +1,20 @@
|
|
|
1
1
|
import { BatchedBlob, BatchedBlobAccumulator, type FinalBlobBatchingChallenges } from '@aztec/blob-lib';
|
|
2
|
-
import
|
|
3
|
-
ARCHIVE_HEIGHT,
|
|
4
|
-
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
|
-
NESTED_RECURSIVE_PROOF_LENGTH,
|
|
6
|
-
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
2
|
+
import {
|
|
3
|
+
type ARCHIVE_HEIGHT,
|
|
4
|
+
type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
|
+
type NESTED_RECURSIVE_PROOF_LENGTH,
|
|
6
|
+
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
7
|
+
OUT_HASH_TREE_HEIGHT,
|
|
7
8
|
} from '@aztec/constants';
|
|
8
|
-
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
9
|
-
import
|
|
9
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
10
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
10
11
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
11
|
-
import {
|
|
12
|
+
import {
|
|
13
|
+
MerkleTreeCalculator,
|
|
14
|
+
type TreeNodeLocation,
|
|
15
|
+
UnbalancedTreeStore,
|
|
16
|
+
shaMerkleHash,
|
|
17
|
+
} from '@aztec/foundation/trees';
|
|
12
18
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
13
19
|
import type { Proof } from '@aztec/stdlib/proofs';
|
|
14
20
|
import {
|
|
@@ -20,7 +26,7 @@ import {
|
|
|
20
26
|
RootRollupPrivateInputs,
|
|
21
27
|
type RootRollupPublicInputs,
|
|
22
28
|
} from '@aztec/stdlib/rollup';
|
|
23
|
-
import
|
|
29
|
+
import { AppendOnlyTreeSnapshot, type MerkleTreeId } from '@aztec/stdlib/trees';
|
|
24
30
|
import type { BlockHeader } from '@aztec/stdlib/tx';
|
|
25
31
|
|
|
26
32
|
import { toProofData } from './block-building-helpers.js';
|
|
@@ -126,13 +132,16 @@ export class EpochProvingState {
|
|
|
126
132
|
return this.checkpoints[index];
|
|
127
133
|
}
|
|
128
134
|
|
|
129
|
-
public getCheckpointProvingStateByBlockNumber(blockNumber:
|
|
135
|
+
public getCheckpointProvingStateByBlockNumber(blockNumber: BlockNumber) {
|
|
130
136
|
return this.checkpoints.find(
|
|
131
|
-
c =>
|
|
137
|
+
c =>
|
|
138
|
+
c &&
|
|
139
|
+
Number(blockNumber) >= Number(c.firstBlockNumber) &&
|
|
140
|
+
Number(blockNumber) < Number(c.firstBlockNumber) + c.totalNumBlocks,
|
|
132
141
|
);
|
|
133
142
|
}
|
|
134
143
|
|
|
135
|
-
public getBlockProvingStateByBlockNumber(blockNumber:
|
|
144
|
+
public getBlockProvingStateByBlockNumber(blockNumber: BlockNumber) {
|
|
136
145
|
return this.getCheckpointProvingStateByBlockNumber(blockNumber)?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
137
146
|
}
|
|
138
147
|
|
|
@@ -209,6 +218,48 @@ export class EpochProvingState {
|
|
|
209
218
|
this.checkpointPaddingProof = { provingOutput };
|
|
210
219
|
}
|
|
211
220
|
|
|
221
|
+
public async accumulateCheckpointOutHashes() {
|
|
222
|
+
const treeCalculator = await MerkleTreeCalculator.create(OUT_HASH_TREE_HEIGHT, undefined, (left, right) =>
|
|
223
|
+
Promise.resolve(shaMerkleHash(left, right)),
|
|
224
|
+
);
|
|
225
|
+
|
|
226
|
+
const computeOutHashHint = async (leaves: Fr[]) => {
|
|
227
|
+
const tree = await treeCalculator.computeTree(leaves.map(l => l.toBuffer()));
|
|
228
|
+
const nextAvailableLeafIndex = leaves.length;
|
|
229
|
+
return {
|
|
230
|
+
treeSnapshot: new AppendOnlyTreeSnapshot(Fr.fromBuffer(tree.root), nextAvailableLeafIndex),
|
|
231
|
+
siblingPath: tree.getSiblingPath(nextAvailableLeafIndex).map(Fr.fromBuffer) as Tuple<
|
|
232
|
+
Fr,
|
|
233
|
+
typeof OUT_HASH_TREE_HEIGHT
|
|
234
|
+
>,
|
|
235
|
+
};
|
|
236
|
+
};
|
|
237
|
+
|
|
238
|
+
let hint = this.checkpoints[0]?.getOutHashHint();
|
|
239
|
+
const outHashes = [];
|
|
240
|
+
for (let i = 0; i < this.totalNumCheckpoints; i++) {
|
|
241
|
+
const checkpoint = this.checkpoints[i];
|
|
242
|
+
if (!checkpoint) {
|
|
243
|
+
break;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// If hints are not set yet, it must be the first checkpoint. Compute the hints with an empty tree.
|
|
247
|
+
hint ??= await computeOutHashHint([]);
|
|
248
|
+
checkpoint.setOutHashHint(hint);
|
|
249
|
+
|
|
250
|
+
// Get the out hash for this checkpoint.
|
|
251
|
+
const outHash = checkpoint.accumulateBlockOutHashes();
|
|
252
|
+
if (!outHash) {
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
outHashes.push(outHash);
|
|
256
|
+
|
|
257
|
+
// Get or create hints for the next checkpoint.
|
|
258
|
+
hint = checkpoint.getOutHashHintForNextCheckpoint() ?? (await computeOutHashHint(outHashes));
|
|
259
|
+
checkpoint.setOutHashHintForNextCheckpoint(hint);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
212
263
|
public async setBlobAccumulators() {
|
|
213
264
|
let previousAccumulator = this.startBlobAccumulator;
|
|
214
265
|
// Accumulate blobs as far as we can for this epoch.
|
|
@@ -6,23 +6,23 @@ import {
|
|
|
6
6
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
7
7
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
8
8
|
} from '@aztec/constants';
|
|
9
|
-
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
9
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
10
10
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
11
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
11
12
|
import { AbortError } from '@aztec/foundation/error';
|
|
12
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
13
13
|
import { createLogger } from '@aztec/foundation/log';
|
|
14
14
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
15
15
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
16
16
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
17
17
|
import { elapsed } from '@aztec/foundation/timer';
|
|
18
18
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
19
|
-
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
20
19
|
import { EthAddress } from '@aztec/stdlib/block';
|
|
21
20
|
import type {
|
|
22
21
|
EpochProver,
|
|
23
22
|
ForkMerkleTreeOperations,
|
|
24
23
|
MerkleTreeWriteOperations,
|
|
25
24
|
PublicInputsAndRecursiveProof,
|
|
25
|
+
ReadonlyWorldStateAccess,
|
|
26
26
|
ServerCircuitProver,
|
|
27
27
|
} from '@aztec/stdlib/interfaces/server';
|
|
28
28
|
import type { Proof } from '@aztec/stdlib/proofs';
|
|
@@ -73,6 +73,11 @@ import { TxProvingState } from './tx-proving-state.js';
|
|
|
73
73
|
|
|
74
74
|
const logger = createLogger('prover-client:orchestrator');
|
|
75
75
|
|
|
76
|
+
type WorldStateFork = {
|
|
77
|
+
fork: MerkleTreeWriteOperations;
|
|
78
|
+
cleanupPromise: Promise<void> | undefined;
|
|
79
|
+
};
|
|
80
|
+
|
|
76
81
|
/**
|
|
77
82
|
* Implements an event driven proving scheduler to build the recursive proof tree. The idea being:
|
|
78
83
|
* 1. Transactions are provided to the scheduler post simulation.
|
|
@@ -93,12 +98,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
93
98
|
|
|
94
99
|
private provingPromise: Promise<ProvingResult> | undefined = undefined;
|
|
95
100
|
private metrics: ProvingOrchestratorMetrics;
|
|
96
|
-
|
|
101
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
102
|
+
private dbs: Map<BlockNumber, WorldStateFork> = new Map();
|
|
97
103
|
|
|
98
104
|
constructor(
|
|
99
|
-
private dbProvider: ForkMerkleTreeOperations,
|
|
105
|
+
private dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations,
|
|
100
106
|
private prover: ServerCircuitProver,
|
|
101
107
|
private readonly proverId: EthAddress,
|
|
108
|
+
private readonly cancelJobsOnStop: boolean = false,
|
|
102
109
|
telemetryClient: TelemetryClient = getTelemetryClient(),
|
|
103
110
|
) {
|
|
104
111
|
this.metrics = new ProvingOrchestratorMetrics(telemetryClient, 'ProvingOrchestrator');
|
|
@@ -112,6 +119,10 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
112
119
|
return this.proverId;
|
|
113
120
|
}
|
|
114
121
|
|
|
122
|
+
public getNumActiveForks() {
|
|
123
|
+
return this.dbs.size;
|
|
124
|
+
}
|
|
125
|
+
|
|
115
126
|
public stop(): Promise<void> {
|
|
116
127
|
this.cancel();
|
|
117
128
|
return Promise.resolve();
|
|
@@ -142,6 +153,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
142
153
|
this.provingPromise = promise;
|
|
143
154
|
}
|
|
144
155
|
|
|
156
|
+
/**
|
|
157
|
+
* Starts a new checkpoint.
|
|
158
|
+
* @param checkpointIndex - The index of the checkpoint in the epoch.
|
|
159
|
+
* @param constants - The constants for this checkpoint.
|
|
160
|
+
* @param l1ToL2Messages - The set of L1 to L2 messages to be inserted at the beginning of this checkpoint.
|
|
161
|
+
* @param totalNumBlocks - The total number of blocks expected in the checkpoint (must be at least one).
|
|
162
|
+
* @param headerOfLastBlockInPreviousCheckpoint - The header of the last block in the previous checkpoint.
|
|
163
|
+
*/
|
|
145
164
|
public async startNewCheckpoint(
|
|
146
165
|
checkpointIndex: number,
|
|
147
166
|
constants: CheckpointConstantData,
|
|
@@ -161,8 +180,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
161
180
|
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
162
181
|
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
163
182
|
|
|
164
|
-
const firstBlockNumber = lastBlockNumber + 1;
|
|
165
|
-
this.dbs.set(firstBlockNumber, db);
|
|
183
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
184
|
+
this.dbs.set(firstBlockNumber, { fork: db, cleanupPromise: undefined });
|
|
166
185
|
|
|
167
186
|
// Get archive sibling path before any block in this checkpoint lands.
|
|
168
187
|
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
@@ -199,7 +218,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
199
218
|
@trackSpan('ProvingOrchestrator.startNewBlock', blockNumber => ({
|
|
200
219
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
201
220
|
}))
|
|
202
|
-
public async startNewBlock(blockNumber:
|
|
221
|
+
public async startNewBlock(blockNumber: BlockNumber, timestamp: UInt64, totalNumTxs: number) {
|
|
203
222
|
if (!this.provingState) {
|
|
204
223
|
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
205
224
|
}
|
|
@@ -219,10 +238,10 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
219
238
|
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
220
239
|
if (!this.dbs.has(blockNumber)) {
|
|
221
240
|
// Fork world state at the end of the immediately previous block
|
|
222
|
-
const db = await this.dbProvider.fork(blockNumber - 1);
|
|
223
|
-
this.dbs.set(blockNumber, db);
|
|
241
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
242
|
+
this.dbs.set(blockNumber, { fork: db, cleanupPromise: undefined });
|
|
224
243
|
}
|
|
225
|
-
const db = this.dbs.get(blockNumber)
|
|
244
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
226
245
|
|
|
227
246
|
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
228
247
|
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
@@ -254,7 +273,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
254
273
|
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
255
274
|
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
256
275
|
|
|
257
|
-
//
|
|
276
|
+
// Try to accumulate the out hashes and blobs as far as we can:
|
|
277
|
+
await this.provingState.accumulateCheckpointOutHashes();
|
|
258
278
|
await this.provingState.setBlobAccumulators();
|
|
259
279
|
}
|
|
260
280
|
}
|
|
@@ -278,7 +298,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
278
298
|
return;
|
|
279
299
|
}
|
|
280
300
|
|
|
281
|
-
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
301
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
282
302
|
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
283
303
|
if (!provingState) {
|
|
284
304
|
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
@@ -296,7 +316,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
296
316
|
|
|
297
317
|
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
298
318
|
|
|
299
|
-
const db = this.dbs.get(blockNumber)
|
|
319
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
300
320
|
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
301
321
|
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
302
322
|
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
@@ -309,7 +329,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
309
329
|
|
|
310
330
|
validateTx(tx);
|
|
311
331
|
|
|
312
|
-
logger.
|
|
332
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
313
333
|
|
|
314
334
|
const startSpongeBlob = spongeBlobState.clone();
|
|
315
335
|
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
@@ -351,7 +371,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
351
371
|
|
|
352
372
|
provingState.setEndSpongeBlob(spongeBlobState);
|
|
353
373
|
|
|
354
|
-
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
374
|
+
// Txs have been added to the block. Now try to accumulate the out hashes and blobs as far as we can:
|
|
375
|
+
await this.provingState.accumulateCheckpointOutHashes();
|
|
355
376
|
await this.provingState.setBlobAccumulators();
|
|
356
377
|
}
|
|
357
378
|
|
|
@@ -388,10 +409,10 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
388
409
|
* Marks the block as completed.
|
|
389
410
|
* Computes the block header and updates the archive tree.
|
|
390
411
|
*/
|
|
391
|
-
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber:
|
|
412
|
+
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: BlockNumber) => ({
|
|
392
413
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
393
414
|
}))
|
|
394
|
-
public async setBlockCompleted(blockNumber:
|
|
415
|
+
public async setBlockCompleted(blockNumber: BlockNumber, expectedHeader?: BlockHeader): Promise<BlockHeader> {
|
|
395
416
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
396
417
|
if (!provingState) {
|
|
397
418
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
@@ -424,7 +445,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
424
445
|
}
|
|
425
446
|
|
|
426
447
|
// Get db for this block
|
|
427
|
-
const db = this.dbs.get(provingState.blockNumber)
|
|
448
|
+
const db = this.dbs.get(provingState.blockNumber)!.fork;
|
|
428
449
|
|
|
429
450
|
// Update the archive tree, so we're ready to start processing the next block:
|
|
430
451
|
logger.verbose(
|
|
@@ -460,7 +481,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
460
481
|
|
|
461
482
|
// Get db for this block
|
|
462
483
|
const blockNumber = provingState.blockNumber;
|
|
463
|
-
const db = this.dbs.get(blockNumber)
|
|
484
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
464
485
|
|
|
465
486
|
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
466
487
|
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
@@ -485,20 +506,19 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
485
506
|
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
486
507
|
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
487
508
|
// will attempt to access the fork after it was closed.
|
|
488
|
-
|
|
489
|
-
void this.dbs
|
|
490
|
-
.get(blockNumber)
|
|
491
|
-
?.close()
|
|
492
|
-
.then(() => this.dbs.delete(blockNumber))
|
|
493
|
-
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
509
|
+
void this.cleanupDBFork(blockNumber);
|
|
494
510
|
}
|
|
495
511
|
|
|
496
512
|
/**
|
|
497
|
-
* Cancel any further proving
|
|
513
|
+
* Cancel any further proving.
|
|
514
|
+
* If cancelJobsOnStop is true, aborts all pending jobs with the broker (which marks them as 'Aborted').
|
|
515
|
+
* If cancelJobsOnStop is false (default), jobs remain in the broker queue and can be reused on restart/reorg.
|
|
498
516
|
*/
|
|
499
517
|
public cancel() {
|
|
500
|
-
|
|
501
|
-
controller.
|
|
518
|
+
if (this.cancelJobsOnStop) {
|
|
519
|
+
for (const controller of this.pendingProvingJobs) {
|
|
520
|
+
controller.abort();
|
|
521
|
+
}
|
|
502
522
|
}
|
|
503
523
|
|
|
504
524
|
this.provingState?.cancel();
|
|
@@ -533,6 +553,24 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
533
553
|
return epochProofResult;
|
|
534
554
|
}
|
|
535
555
|
|
|
556
|
+
private async cleanupDBFork(blockNumber: BlockNumber): Promise<void> {
|
|
557
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
558
|
+
const fork = this.dbs.get(blockNumber);
|
|
559
|
+
if (!fork) {
|
|
560
|
+
return;
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
try {
|
|
564
|
+
if (!fork.cleanupPromise) {
|
|
565
|
+
fork.cleanupPromise = fork.fork.close();
|
|
566
|
+
}
|
|
567
|
+
await fork.cleanupPromise;
|
|
568
|
+
this.dbs.delete(blockNumber);
|
|
569
|
+
} catch (err) {
|
|
570
|
+
logger.error(`Error closing db for block ${blockNumber}`, err);
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
|
|
536
574
|
/**
|
|
537
575
|
* Enqueue a job to be scheduled
|
|
538
576
|
* @param provingState - The proving state object being operated on
|
|
@@ -850,19 +888,22 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
850
888
|
},
|
|
851
889
|
),
|
|
852
890
|
async result => {
|
|
853
|
-
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
854
|
-
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
855
|
-
|
|
856
891
|
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
857
892
|
|
|
858
893
|
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
859
894
|
const checkpointProvingState = provingState.parentCheckpoint;
|
|
860
895
|
|
|
896
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
897
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
898
|
+
|
|
861
899
|
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
862
900
|
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
863
901
|
} else {
|
|
864
902
|
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
865
903
|
}
|
|
904
|
+
|
|
905
|
+
// We are finished with the block at this point, ensure the fork is cleaned up
|
|
906
|
+
void this.cleanupDBFork(provingState.blockNumber);
|
|
866
907
|
},
|
|
867
908
|
);
|
|
868
909
|
}
|
|
@@ -1206,8 +1247,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1206
1247
|
|
|
1207
1248
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
1208
1249
|
|
|
1209
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
1210
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
1211
1250
|
const doAvmProving = wrapCallbackInSpan(
|
|
1212
1251
|
this.tracer,
|
|
1213
1252
|
'ProvingOrchestrator.prover.getAvmProof',
|
|
@@ -1216,36 +1255,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1216
1255
|
},
|
|
1217
1256
|
async (signal: AbortSignal) => {
|
|
1218
1257
|
const inputs = txProvingState.getAvmInputs();
|
|
1219
|
-
|
|
1220
|
-
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
1221
|
-
// just keep the next line but removing the second argument (false).
|
|
1222
|
-
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
1223
|
-
} catch (err) {
|
|
1224
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
1225
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
1226
|
-
throw err;
|
|
1227
|
-
} else {
|
|
1228
|
-
logger.warn(
|
|
1229
|
-
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
1230
|
-
AVM inputs and carrying on. ${inspect(err)}.`,
|
|
1231
|
-
);
|
|
1232
|
-
|
|
1233
|
-
try {
|
|
1234
|
-
this.metrics.incAvmFallback();
|
|
1235
|
-
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
1236
|
-
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
1237
|
-
} catch (err) {
|
|
1238
|
-
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
1239
|
-
throw err;
|
|
1240
|
-
}
|
|
1241
|
-
}
|
|
1242
|
-
}
|
|
1258
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1243
1259
|
},
|
|
1244
1260
|
);
|
|
1245
1261
|
|
|
1246
|
-
this.deferredProving(provingState, doAvmProving,
|
|
1262
|
+
this.deferredProving(provingState, doAvmProving, proof => {
|
|
1247
1263
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
1248
|
-
txProvingState.setAvmProof(
|
|
1264
|
+
txProvingState.setAvmProof(proof);
|
|
1249
1265
|
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
1250
1266
|
});
|
|
1251
1267
|
}
|
|
@@ -1,41 +1,18 @@
|
|
|
1
|
-
import {
|
|
2
|
-
type Histogram,
|
|
3
|
-
Metrics,
|
|
4
|
-
type TelemetryClient,
|
|
5
|
-
type Tracer,
|
|
6
|
-
type UpDownCounter,
|
|
7
|
-
ValueType,
|
|
8
|
-
} from '@aztec/telemetry-client';
|
|
1
|
+
import { type Histogram, Metrics, type TelemetryClient, type Tracer } from '@aztec/telemetry-client';
|
|
9
2
|
|
|
10
3
|
export class ProvingOrchestratorMetrics {
|
|
11
4
|
public readonly tracer: Tracer;
|
|
12
5
|
|
|
13
6
|
private baseRollupInputsDuration: Histogram;
|
|
14
|
-
private avmFallbackCount: UpDownCounter;
|
|
15
7
|
|
|
16
8
|
constructor(client: TelemetryClient, name = 'ProvingOrchestrator') {
|
|
17
9
|
this.tracer = client.getTracer(name);
|
|
18
10
|
const meter = client.getMeter(name);
|
|
19
11
|
|
|
20
|
-
this.baseRollupInputsDuration = meter.createHistogram(Metrics.PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION
|
|
21
|
-
unit: 'ms',
|
|
22
|
-
description: 'Duration to build base rollup inputs',
|
|
23
|
-
valueType: ValueType.INT,
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
this.avmFallbackCount = meter.createUpDownCounter(Metrics.PROVING_ORCHESTRATOR_AVM_FALLBACK_COUNT, {
|
|
27
|
-
description: 'How many times the AVM fallback was used',
|
|
28
|
-
valueType: ValueType.INT,
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
this.avmFallbackCount.add(0);
|
|
12
|
+
this.baseRollupInputsDuration = meter.createHistogram(Metrics.PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION);
|
|
32
13
|
}
|
|
33
14
|
|
|
34
15
|
recordBaseRollupInputs(durationMs: number) {
|
|
35
16
|
this.baseRollupInputsDuration.record(Math.ceil(durationMs));
|
|
36
17
|
}
|
|
37
|
-
|
|
38
|
-
incAvmFallback() {
|
|
39
|
-
this.avmFallbackCount.add(1);
|
|
40
|
-
}
|
|
41
18
|
}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH } from '@aztec/constants';
|
|
2
|
-
import type { Fr } from '@aztec/foundation/
|
|
3
|
-
import {
|
|
2
|
+
import type { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
|
+
import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
|
|
4
4
|
import type { AvmCircuitInputs } from '@aztec/stdlib/avm';
|
|
5
|
-
import type {
|
|
6
|
-
import { ProofData } from '@aztec/stdlib/proofs';
|
|
5
|
+
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
6
|
+
import { ProofData, ProofDataForFixedVk, RecursiveProof } from '@aztec/stdlib/proofs';
|
|
7
7
|
import {
|
|
8
8
|
type BaseRollupHints,
|
|
9
9
|
PrivateBaseRollupHints,
|
|
@@ -32,7 +32,7 @@ export class TxProvingState {
|
|
|
32
32
|
PublicChonkVerifierPublicInputs,
|
|
33
33
|
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
34
34
|
>;
|
|
35
|
-
private
|
|
35
|
+
private avmProof?: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>;
|
|
36
36
|
|
|
37
37
|
constructor(
|
|
38
38
|
public readonly processedTx: ProcessedTx,
|
|
@@ -46,7 +46,7 @@ export class TxProvingState {
|
|
|
46
46
|
}
|
|
47
47
|
|
|
48
48
|
public ready() {
|
|
49
|
-
return !this.requireAvmProof || (!!this.
|
|
49
|
+
return !this.requireAvmProof || (!!this.avmProof && !!this.publicChonkVerifier);
|
|
50
50
|
}
|
|
51
51
|
|
|
52
52
|
public getAvmInputs(): AvmCircuitInputs {
|
|
@@ -80,8 +80,8 @@ export class TxProvingState {
|
|
|
80
80
|
this.publicChonkVerifier = publicChonkVerifierProofAndVk;
|
|
81
81
|
}
|
|
82
82
|
|
|
83
|
-
public setAvmProof(
|
|
84
|
-
this.
|
|
83
|
+
public setAvmProof(avmProof: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>) {
|
|
84
|
+
this.avmProof = avmProof;
|
|
85
85
|
}
|
|
86
86
|
|
|
87
87
|
#getPrivateBaseInputs() {
|
|
@@ -105,7 +105,7 @@ export class TxProvingState {
|
|
|
105
105
|
if (!this.publicChonkVerifier) {
|
|
106
106
|
throw new Error('Tx not ready for proving base rollup: public chonk verifier proof undefined');
|
|
107
107
|
}
|
|
108
|
-
if (!this.
|
|
108
|
+
if (!this.avmProof) {
|
|
109
109
|
throw new Error('Tx not ready for proving base rollup: avm proof undefined');
|
|
110
110
|
}
|
|
111
111
|
if (!(this.baseRollupHints instanceof PublicBaseRollupHints)) {
|
|
@@ -114,11 +114,7 @@ export class TxProvingState {
|
|
|
114
114
|
|
|
115
115
|
const publicChonkVerifierProofData = toProofData(this.publicChonkVerifier);
|
|
116
116
|
|
|
117
|
-
const avmProofData = new
|
|
118
|
-
this.processedTx.avmProvingRequest.inputs.publicInputs,
|
|
119
|
-
this.avm.proof,
|
|
120
|
-
getAvmVkData(),
|
|
121
|
-
);
|
|
117
|
+
const avmProofData = new ProofDataForFixedVk(this.processedTx.avmProvingRequest.inputs.publicInputs, this.avmProof);
|
|
122
118
|
|
|
123
119
|
return new PublicTxBaseRollupPrivateInputs(publicChonkVerifierProofData, avmProofData, this.baseRollupHints);
|
|
124
120
|
}
|