@aztec/prover-client 3.0.0-devnet.6 → 3.0.0-devnet.6-patch.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/index.d.ts +1 -1
- package/dest/block-factory/light.d.ts +8 -8
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +37 -23
- package/dest/config.d.ts +1 -1
- package/dest/config.js +1 -1
- package/dest/index.d.ts +1 -1
- package/dest/light/lightweight_checkpoint_builder.d.ts +29 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +108 -0
- package/dest/mocks/fixtures.d.ts +1 -4
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +7 -17
- package/dest/mocks/test_context.d.ts +27 -46
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +102 -116
- package/dest/orchestrator/block-building-helpers.d.ts +19 -21
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +93 -118
- package/dest/orchestrator/block-proving-state.d.ts +17 -11
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +81 -20
- package/dest/orchestrator/checkpoint-proving-state.d.ts +8 -8
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +15 -16
- package/dest/orchestrator/epoch-proving-state.d.ts +10 -9
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +6 -6
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +13 -13
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +54 -60
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -1
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.d.ts +6 -6
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +14 -23
- package/dest/prover-client/factory.d.ts +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.d.ts +9 -8
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +4 -4
- package/dest/proving_broker/broker_prover_facade.d.ts +21 -21
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +7 -6
- package/dest/proving_broker/config.d.ts +12 -8
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +8 -2
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.d.ts +2 -2
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +4 -3
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts +3 -2
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +3 -3
- package/dest/proving_broker/rpc.d.ts +4 -4
- package/dest/test/mock_proof_store.d.ts +3 -3
- package/dest/test/mock_proof_store.d.ts.map +1 -1
- package/dest/test/mock_prover.d.ts +5 -6
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +3 -3
- package/package.json +18 -17
- package/src/block-factory/light.ts +40 -43
- package/src/config.ts +1 -1
- package/src/light/lightweight_checkpoint_builder.ts +144 -0
- package/src/mocks/fixtures.ts +7 -30
- package/src/mocks/test_context.ts +145 -177
- package/src/orchestrator/block-building-helpers.ts +133 -215
- package/src/orchestrator/block-proving-state.ts +103 -25
- package/src/orchestrator/checkpoint-proving-state.ts +24 -20
- package/src/orchestrator/epoch-proving-state.ts +15 -11
- package/src/orchestrator/orchestrator.ts +76 -74
- package/src/orchestrator/tx-proving-state.ts +24 -33
- package/src/prover-client/server-epoch-prover.ts +8 -9
- package/src/proving_broker/broker_prover_facade.ts +32 -29
- package/src/proving_broker/config.ts +8 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proving_broker.ts +4 -3
- package/src/proving_broker/proving_broker_database/memory.ts +2 -1
- package/src/proving_broker/proving_broker_database/persisted.ts +5 -4
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_job_controller.ts +5 -4
- package/src/test/mock_prover.ts +9 -7
|
@@ -1,17 +1,23 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
BatchedBlobAccumulator,
|
|
3
|
+
type FinalBlobBatchingChallenges,
|
|
4
|
+
SpongeBlob,
|
|
5
|
+
encodeCheckpointBlobDataFromBlocks,
|
|
6
|
+
} from '@aztec/blob-lib';
|
|
2
7
|
import {
|
|
3
8
|
type ARCHIVE_HEIGHT,
|
|
4
|
-
|
|
9
|
+
BLOBS_PER_CHECKPOINT,
|
|
5
10
|
FIELDS_PER_BLOB,
|
|
6
11
|
type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
7
12
|
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
8
13
|
NUM_MSGS_PER_BASE_PARITY,
|
|
9
14
|
} from '@aztec/constants';
|
|
15
|
+
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
10
16
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
11
|
-
import { BLS12Point
|
|
17
|
+
import { BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
18
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
12
19
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
13
20
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
14
|
-
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
15
21
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
16
22
|
import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
|
|
17
23
|
import {
|
|
@@ -44,13 +50,12 @@ export class CheckpointProvingState {
|
|
|
44
50
|
private endBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
45
51
|
private blobFields: Fr[] | undefined;
|
|
46
52
|
private error: string | undefined;
|
|
47
|
-
public readonly firstBlockNumber:
|
|
53
|
+
public readonly firstBlockNumber: BlockNumber;
|
|
48
54
|
|
|
49
55
|
constructor(
|
|
50
56
|
public readonly index: number,
|
|
51
57
|
public readonly constants: CheckpointConstantData,
|
|
52
58
|
public readonly totalNumBlocks: number,
|
|
53
|
-
private readonly totalNumBlobFields: number,
|
|
54
59
|
private readonly finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
55
60
|
private readonly headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
56
61
|
private readonly lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
@@ -71,21 +76,21 @@ export class CheckpointProvingState {
|
|
|
71
76
|
private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void,
|
|
72
77
|
) {
|
|
73
78
|
this.blockProofs = new UnbalancedTreeStore(totalNumBlocks);
|
|
74
|
-
this.firstBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber + 1;
|
|
79
|
+
this.firstBlockNumber = BlockNumber(headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber + 1);
|
|
75
80
|
}
|
|
76
81
|
|
|
77
82
|
public get epochNumber(): number {
|
|
78
83
|
return this.parentEpoch.epochNumber;
|
|
79
84
|
}
|
|
80
85
|
|
|
81
|
-
public
|
|
82
|
-
blockNumber:
|
|
86
|
+
public startNewBlock(
|
|
87
|
+
blockNumber: BlockNumber,
|
|
83
88
|
timestamp: UInt64,
|
|
84
89
|
totalNumTxs: number,
|
|
85
90
|
lastArchiveTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
86
91
|
lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
87
|
-
):
|
|
88
|
-
const index = blockNumber - this.firstBlockNumber;
|
|
92
|
+
): BlockProvingState {
|
|
93
|
+
const index = Number(blockNumber) - Number(this.firstBlockNumber);
|
|
89
94
|
if (index >= this.totalNumBlocks) {
|
|
90
95
|
throw new Error(`Unable to start a new block at index ${index}. Expected at most ${this.totalNumBlocks} blocks.`);
|
|
91
96
|
}
|
|
@@ -98,8 +103,7 @@ export class CheckpointProvingState {
|
|
|
98
103
|
const lastL1ToL2MessageSubtreeRootSiblingPath =
|
|
99
104
|
index === 0 ? this.lastL1ToL2MessageSubtreeRootSiblingPath : this.newL1ToL2MessageSubtreeRootSiblingPath;
|
|
100
105
|
|
|
101
|
-
const startSpongeBlob =
|
|
102
|
-
index === 0 ? await SpongeBlob.init(this.totalNumBlobFields) : this.blocks[index - 1]?.getEndSpongeBlob();
|
|
106
|
+
const startSpongeBlob = index === 0 ? SpongeBlob.init() : this.blocks[index - 1]?.getEndSpongeBlob();
|
|
103
107
|
if (!startSpongeBlob) {
|
|
104
108
|
throw new Error(
|
|
105
109
|
'Cannot start a new block before the trees have progressed from the tx effects in the previous block.',
|
|
@@ -192,12 +196,12 @@ export class CheckpointProvingState {
|
|
|
192
196
|
}
|
|
193
197
|
|
|
194
198
|
public async accumulateBlobs(startBlobAccumulator: BatchedBlobAccumulator) {
|
|
195
|
-
if (this.isAcceptingBlocks() || this.blocks.some(b => b
|
|
199
|
+
if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
|
|
196
200
|
return;
|
|
197
201
|
}
|
|
198
202
|
|
|
199
|
-
this.blobFields =
|
|
200
|
-
this.endBlobAccumulator = await accumulateBlobs(this.blobFields
|
|
203
|
+
this.blobFields = encodeCheckpointBlobDataFromBlocks(this.blocks.map(b => b!.getBlockBlobData()));
|
|
204
|
+
this.endBlobAccumulator = await accumulateBlobs(this.blobFields!, startBlobAccumulator);
|
|
201
205
|
this.startBlobAccumulator = startBlobAccumulator;
|
|
202
206
|
|
|
203
207
|
this.onBlobAccumulatorSet(this);
|
|
@@ -246,8 +250,8 @@ export class CheckpointProvingState {
|
|
|
246
250
|
previousArchiveSiblingPath: this.lastArchiveSiblingPath,
|
|
247
251
|
startBlobAccumulator: this.startBlobAccumulator.toBlobAccumulator(),
|
|
248
252
|
finalBlobChallenges: this.finalBlobBatchingChallenges,
|
|
249
|
-
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB *
|
|
250
|
-
blobCommitments: padArrayEnd(blobCommitments, BLS12Point.ZERO,
|
|
253
|
+
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_CHECKPOINT),
|
|
254
|
+
blobCommitments: padArrayEnd(blobCommitments, BLS12Point.ZERO, BLOBS_PER_CHECKPOINT),
|
|
251
255
|
blobsHash,
|
|
252
256
|
});
|
|
253
257
|
|
|
@@ -258,8 +262,8 @@ export class CheckpointProvingState {
|
|
|
258
262
|
: new CheckpointRootRollupPrivateInputs([left, right], hints);
|
|
259
263
|
}
|
|
260
264
|
|
|
261
|
-
public getBlockProvingStateByBlockNumber(blockNumber:
|
|
262
|
-
const index = blockNumber - this.firstBlockNumber;
|
|
265
|
+
public getBlockProvingStateByBlockNumber(blockNumber: BlockNumber) {
|
|
266
|
+
const index = Number(blockNumber) - Number(this.firstBlockNumber);
|
|
263
267
|
return this.blocks[index];
|
|
264
268
|
}
|
|
265
269
|
|
|
@@ -5,7 +5,8 @@ import type {
|
|
|
5
5
|
NESTED_RECURSIVE_PROOF_LENGTH,
|
|
6
6
|
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
7
7
|
} from '@aztec/constants';
|
|
8
|
-
import
|
|
8
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
9
|
+
import type { Fr } from '@aztec/foundation/curves/bn254';
|
|
9
10
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
10
11
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
11
12
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
@@ -15,7 +16,7 @@ import {
|
|
|
15
16
|
CheckpointMergeRollupPrivateInputs,
|
|
16
17
|
CheckpointPaddingRollupPrivateInputs,
|
|
17
18
|
CheckpointRollupPublicInputs,
|
|
18
|
-
|
|
19
|
+
PublicChonkVerifierPublicInputs,
|
|
19
20
|
RootRollupPrivateInputs,
|
|
20
21
|
type RootRollupPublicInputs,
|
|
21
22
|
} from '@aztec/stdlib/rollup';
|
|
@@ -57,14 +58,16 @@ export class EpochProvingState {
|
|
|
57
58
|
private finalBatchedBlob: BatchedBlob | undefined;
|
|
58
59
|
private provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED;
|
|
59
60
|
|
|
60
|
-
// Map from tx hash to
|
|
61
|
-
public readonly
|
|
61
|
+
// Map from tx hash to chonk verifier proof promise. Used when kickstarting chonk verifier proofs before tx processing.
|
|
62
|
+
public readonly cachedChonkVerifierProofs = new Map<
|
|
62
63
|
string,
|
|
63
|
-
Promise<
|
|
64
|
+
Promise<
|
|
65
|
+
PublicInputsAndRecursiveProof<PublicChonkVerifierPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
66
|
+
>
|
|
64
67
|
>();
|
|
65
68
|
|
|
66
69
|
constructor(
|
|
67
|
-
public readonly epochNumber:
|
|
70
|
+
public readonly epochNumber: EpochNumber,
|
|
68
71
|
public readonly totalNumCheckpoints: number,
|
|
69
72
|
private readonly finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
70
73
|
private onCheckpointBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void,
|
|
@@ -81,7 +84,6 @@ export class EpochProvingState {
|
|
|
81
84
|
checkpointIndex: number,
|
|
82
85
|
constants: CheckpointConstantData,
|
|
83
86
|
totalNumBlocks: number,
|
|
84
|
-
totalNumBlobFields: number,
|
|
85
87
|
previousBlockHeader: BlockHeader,
|
|
86
88
|
lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
87
89
|
l1ToL2Messages: Fr[],
|
|
@@ -100,7 +102,6 @@ export class EpochProvingState {
|
|
|
100
102
|
checkpointIndex,
|
|
101
103
|
constants,
|
|
102
104
|
totalNumBlocks,
|
|
103
|
-
totalNumBlobFields,
|
|
104
105
|
this.finalBlobBatchingChallenges,
|
|
105
106
|
previousBlockHeader,
|
|
106
107
|
lastArchiveSiblingPath,
|
|
@@ -125,13 +126,16 @@ export class EpochProvingState {
|
|
|
125
126
|
return this.checkpoints[index];
|
|
126
127
|
}
|
|
127
128
|
|
|
128
|
-
public getCheckpointProvingStateByBlockNumber(blockNumber:
|
|
129
|
+
public getCheckpointProvingStateByBlockNumber(blockNumber: BlockNumber) {
|
|
129
130
|
return this.checkpoints.find(
|
|
130
|
-
c =>
|
|
131
|
+
c =>
|
|
132
|
+
c &&
|
|
133
|
+
Number(blockNumber) >= Number(c.firstBlockNumber) &&
|
|
134
|
+
Number(blockNumber) < Number(c.firstBlockNumber) + c.totalNumBlocks,
|
|
131
135
|
);
|
|
132
136
|
}
|
|
133
137
|
|
|
134
|
-
public getBlockProvingStateByBlockNumber(blockNumber:
|
|
138
|
+
public getBlockProvingStateByBlockNumber(blockNumber: BlockNumber) {
|
|
135
139
|
return this.getCheckpointProvingStateByBlockNumber(blockNumber)?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
136
140
|
}
|
|
137
141
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
|
|
1
|
+
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib/types';
|
|
2
2
|
import {
|
|
3
3
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
4
4
|
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
@@ -6,9 +6,10 @@ import {
|
|
|
6
6
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
7
7
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
8
8
|
} from '@aztec/constants';
|
|
9
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
9
10
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
11
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
10
12
|
import { AbortError } from '@aztec/foundation/error';
|
|
11
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
12
13
|
import { createLogger } from '@aztec/foundation/log';
|
|
13
14
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
14
15
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
@@ -16,7 +17,7 @@ import { pushTestData } from '@aztec/foundation/testing';
|
|
|
16
17
|
import { elapsed } from '@aztec/foundation/timer';
|
|
17
18
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
18
19
|
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
19
|
-
import { EthAddress
|
|
20
|
+
import { EthAddress } from '@aztec/stdlib/block';
|
|
20
21
|
import type {
|
|
21
22
|
EpochProver,
|
|
22
23
|
ForkMerkleTreeOperations,
|
|
@@ -34,8 +35,8 @@ import {
|
|
|
34
35
|
CheckpointConstantData,
|
|
35
36
|
CheckpointRootSingleBlockRollupPrivateInputs,
|
|
36
37
|
PrivateTxBaseRollupPrivateInputs,
|
|
37
|
-
|
|
38
|
-
|
|
38
|
+
PublicChonkVerifierPrivateInputs,
|
|
39
|
+
PublicChonkVerifierPublicInputs,
|
|
39
40
|
RootRollupPublicInputs,
|
|
40
41
|
} from '@aztec/stdlib/rollup';
|
|
41
42
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
@@ -54,10 +55,9 @@ import {
|
|
|
54
55
|
import { inspect } from 'util';
|
|
55
56
|
|
|
56
57
|
import {
|
|
57
|
-
buildBlockHeaderFromTxs,
|
|
58
58
|
buildHeaderFromCircuitOutputs,
|
|
59
59
|
getLastSiblingPath,
|
|
60
|
-
|
|
60
|
+
getPublicChonkVerifierPrivateInputsFromTx,
|
|
61
61
|
getRootTreeSiblingPath,
|
|
62
62
|
getSubtreeSiblingPath,
|
|
63
63
|
getTreeSnapshot,
|
|
@@ -93,7 +93,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
93
93
|
|
|
94
94
|
private provingPromise: Promise<ProvingResult> | undefined = undefined;
|
|
95
95
|
private metrics: ProvingOrchestratorMetrics;
|
|
96
|
-
|
|
96
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
97
|
+
private dbs: Map<BlockNumber, MerkleTreeWriteOperations> = new Map();
|
|
97
98
|
|
|
98
99
|
constructor(
|
|
99
100
|
private dbProvider: ForkMerkleTreeOperations,
|
|
@@ -118,7 +119,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
118
119
|
}
|
|
119
120
|
|
|
120
121
|
public startNewEpoch(
|
|
121
|
-
epochNumber:
|
|
122
|
+
epochNumber: EpochNumber,
|
|
122
123
|
totalNumCheckpoints: number,
|
|
123
124
|
finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
124
125
|
) {
|
|
@@ -147,7 +148,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
147
148
|
constants: CheckpointConstantData,
|
|
148
149
|
l1ToL2Messages: Fr[],
|
|
149
150
|
totalNumBlocks: number,
|
|
150
|
-
totalNumBlobFields: number,
|
|
151
151
|
headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
152
152
|
) {
|
|
153
153
|
if (!this.provingState) {
|
|
@@ -162,7 +162,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
162
162
|
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
163
163
|
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
164
164
|
|
|
165
|
-
const firstBlockNumber = lastBlockNumber + 1;
|
|
165
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
166
166
|
this.dbs.set(firstBlockNumber, db);
|
|
167
167
|
|
|
168
168
|
// Get archive sibling path before any block in this checkpoint lands.
|
|
@@ -180,7 +180,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
180
180
|
checkpointIndex,
|
|
181
181
|
constants,
|
|
182
182
|
totalNumBlocks,
|
|
183
|
-
totalNumBlobFields,
|
|
184
183
|
headerOfLastBlockInPreviousCheckpoint,
|
|
185
184
|
lastArchiveSiblingPath,
|
|
186
185
|
l1ToL2Messages,
|
|
@@ -201,7 +200,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
201
200
|
@trackSpan('ProvingOrchestrator.startNewBlock', blockNumber => ({
|
|
202
201
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
203
202
|
}))
|
|
204
|
-
public async startNewBlock(blockNumber:
|
|
203
|
+
public async startNewBlock(blockNumber: BlockNumber, timestamp: UInt64, totalNumTxs: number) {
|
|
205
204
|
if (!this.provingState) {
|
|
206
205
|
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
207
206
|
}
|
|
@@ -216,12 +215,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
216
215
|
}
|
|
217
216
|
|
|
218
217
|
const constants = checkpointProvingState.constants;
|
|
219
|
-
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber
|
|
218
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
220
219
|
|
|
221
220
|
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
222
221
|
if (!this.dbs.has(blockNumber)) {
|
|
223
222
|
// Fork world state at the end of the immediately previous block
|
|
224
|
-
const db = await this.dbProvider.fork(blockNumber - 1);
|
|
223
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
225
224
|
this.dbs.set(blockNumber, db);
|
|
226
225
|
}
|
|
227
226
|
const db = this.dbs.get(blockNumber)!;
|
|
@@ -230,7 +229,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
230
229
|
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
231
230
|
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
232
231
|
|
|
233
|
-
const blockProvingState =
|
|
232
|
+
const blockProvingState = checkpointProvingState.startNewBlock(
|
|
234
233
|
blockNumber,
|
|
235
234
|
timestamp,
|
|
236
235
|
totalNumTxs,
|
|
@@ -248,8 +247,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
248
247
|
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
249
248
|
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
250
249
|
if (totalNumTxs === 0) {
|
|
250
|
+
const endState = await db.getStateReference();
|
|
251
|
+
blockProvingState.setEndState(endState);
|
|
252
|
+
|
|
251
253
|
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
252
|
-
|
|
254
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
255
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
253
256
|
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
254
257
|
|
|
255
258
|
// And also try to accumulate the blobs as far as we can:
|
|
@@ -276,7 +279,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
276
279
|
return;
|
|
277
280
|
}
|
|
278
281
|
|
|
279
|
-
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
282
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
280
283
|
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
281
284
|
if (!provingState) {
|
|
282
285
|
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
@@ -327,7 +330,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
327
330
|
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
328
331
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
329
332
|
if (txProvingState.requireAvmProof) {
|
|
330
|
-
this.
|
|
333
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
331
334
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
332
335
|
this.enqueueVM(provingState, txIndex);
|
|
333
336
|
} else {
|
|
@@ -341,7 +344,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
341
344
|
}
|
|
342
345
|
}
|
|
343
346
|
|
|
344
|
-
await
|
|
347
|
+
const endState = await db.getStateReference();
|
|
348
|
+
provingState.setEndState(endState);
|
|
349
|
+
|
|
350
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
351
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
345
352
|
|
|
346
353
|
provingState.setEndSpongeBlob(spongeBlobState);
|
|
347
354
|
|
|
@@ -350,27 +357,30 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
350
357
|
}
|
|
351
358
|
|
|
352
359
|
/**
|
|
353
|
-
* Kickstarts
|
|
354
|
-
* Note that if the
|
|
360
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
361
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
355
362
|
*/
|
|
356
|
-
@trackSpan('ProvingOrchestrator.
|
|
357
|
-
public
|
|
363
|
+
@trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
364
|
+
public startChonkVerifierCircuits(txs: Tx[]) {
|
|
358
365
|
if (!this.provingState?.verifyState()) {
|
|
359
|
-
throw new Error(`Empty epoch proving state. call startNewEpoch before starting
|
|
366
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
360
367
|
}
|
|
361
368
|
const publicTxs = txs.filter(tx => tx.data.forPublic);
|
|
362
369
|
for (const tx of publicTxs) {
|
|
363
370
|
const txHash = tx.getTxHash().toString();
|
|
364
|
-
const privateInputs =
|
|
371
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
365
372
|
const tubeProof =
|
|
366
373
|
promiseWithResolvers<
|
|
367
|
-
PublicInputsAndRecursiveProof<
|
|
374
|
+
PublicInputsAndRecursiveProof<
|
|
375
|
+
PublicChonkVerifierPublicInputs,
|
|
376
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
377
|
+
>
|
|
368
378
|
>();
|
|
369
|
-
logger.debug(`Starting
|
|
370
|
-
this.
|
|
379
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
380
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, proof => {
|
|
371
381
|
tubeProof.resolve(proof);
|
|
372
382
|
});
|
|
373
|
-
this.provingState.
|
|
383
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
374
384
|
}
|
|
375
385
|
return Promise.resolve();
|
|
376
386
|
}
|
|
@@ -379,10 +389,10 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
379
389
|
* Marks the block as completed.
|
|
380
390
|
* Computes the block header and updates the archive tree.
|
|
381
391
|
*/
|
|
382
|
-
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber:
|
|
392
|
+
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: BlockNumber) => ({
|
|
383
393
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
384
394
|
}))
|
|
385
|
-
public async setBlockCompleted(blockNumber:
|
|
395
|
+
public async setBlockCompleted(blockNumber: BlockNumber, expectedHeader?: BlockHeader): Promise<BlockHeader> {
|
|
386
396
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
387
397
|
if (!provingState) {
|
|
388
398
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
@@ -405,39 +415,25 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
405
415
|
);
|
|
406
416
|
}
|
|
407
417
|
|
|
408
|
-
//
|
|
418
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
409
419
|
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
410
|
-
const header = await
|
|
411
|
-
|
|
412
|
-
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
413
|
-
|
|
414
|
-
return header;
|
|
415
|
-
}
|
|
416
|
-
|
|
417
|
-
private async buildL2BlockHeader(provingState: BlockProvingState, expectedHeader?: BlockHeader) {
|
|
418
|
-
// Collect all txs in this block to build the header. The function calling this has made sure that all txs have been added.
|
|
419
|
-
const txs = provingState.getProcessedTxs();
|
|
420
|
-
|
|
421
|
-
const startSpongeBlob = provingState.getStartSpongeBlob();
|
|
422
|
-
|
|
423
|
-
// Get db for this block
|
|
424
|
-
const db = this.dbs.get(provingState.blockNumber)!;
|
|
425
|
-
|
|
426
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
427
|
-
// and update the archive tree, so we're ready to start processing the next block
|
|
428
|
-
const header = await buildBlockHeaderFromTxs(txs, provingState.getGlobalVariables(), startSpongeBlob, db);
|
|
420
|
+
const header = await provingState.buildBlockHeader();
|
|
429
421
|
|
|
430
422
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
431
423
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
432
424
|
throw new Error('Block header mismatch');
|
|
433
425
|
}
|
|
434
426
|
|
|
427
|
+
// Get db for this block
|
|
428
|
+
const db = this.dbs.get(provingState.blockNumber)!;
|
|
429
|
+
|
|
430
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
435
431
|
logger.verbose(
|
|
436
432
|
`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`,
|
|
437
433
|
);
|
|
438
434
|
await db.updateArchive(header);
|
|
439
435
|
|
|
440
|
-
|
|
436
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
441
437
|
|
|
442
438
|
return header;
|
|
443
439
|
}
|
|
@@ -643,7 +639,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
643
639
|
db: MerkleTreeWriteOperations,
|
|
644
640
|
): Promise<[BaseRollupHints, TreeSnapshots]> {
|
|
645
641
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
646
|
-
// These will be overwritten later once we have proven the
|
|
642
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
647
643
|
const [ms, hints] = await elapsed(
|
|
648
644
|
insertSideEffectsAndBuildBaseRollupHints(
|
|
649
645
|
tx,
|
|
@@ -720,11 +716,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
720
716
|
);
|
|
721
717
|
}
|
|
722
718
|
|
|
723
|
-
// Enqueues the public
|
|
719
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
724
720
|
// Once completed, will enqueue the the public tx base rollup.
|
|
725
|
-
private
|
|
721
|
+
private getOrEnqueueChonkVerifier(provingState: BlockProvingState, txIndex: number) {
|
|
726
722
|
if (!provingState.verifyState()) {
|
|
727
|
-
logger.debug('Not running
|
|
723
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
728
724
|
return;
|
|
729
725
|
}
|
|
730
726
|
|
|
@@ -732,34 +728,40 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
732
728
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
733
729
|
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
734
730
|
const handleResult = (
|
|
735
|
-
result: PublicInputsAndRecursiveProof<
|
|
731
|
+
result: PublicInputsAndRecursiveProof<
|
|
732
|
+
PublicChonkVerifierPublicInputs,
|
|
733
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
734
|
+
>,
|
|
736
735
|
) => {
|
|
737
|
-
logger.debug(`Got
|
|
738
|
-
txProvingState.
|
|
739
|
-
this.provingState?.
|
|
736
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, { txHash });
|
|
737
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
738
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
740
739
|
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
741
740
|
};
|
|
742
741
|
|
|
743
|
-
if (this.provingState?.
|
|
744
|
-
logger.debug(`
|
|
745
|
-
void this.provingState!.
|
|
742
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
743
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, { txHash });
|
|
744
|
+
void this.provingState!.cachedChonkVerifierProofs.get(txHash)!.then(handleResult);
|
|
746
745
|
return;
|
|
747
746
|
}
|
|
748
747
|
|
|
749
|
-
logger.debug(`Enqueuing
|
|
750
|
-
this.
|
|
748
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
749
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
751
750
|
}
|
|
752
751
|
|
|
753
|
-
private
|
|
752
|
+
private doEnqueueChonkVerifier(
|
|
754
753
|
txHash: string,
|
|
755
|
-
inputs:
|
|
754
|
+
inputs: PublicChonkVerifierPrivateInputs,
|
|
756
755
|
handler: (
|
|
757
|
-
result: PublicInputsAndRecursiveProof<
|
|
756
|
+
result: PublicInputsAndRecursiveProof<
|
|
757
|
+
PublicChonkVerifierPublicInputs,
|
|
758
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
759
|
+
>,
|
|
758
760
|
) => void,
|
|
759
761
|
provingState: EpochProvingState | BlockProvingState = this.provingState!,
|
|
760
762
|
) {
|
|
761
763
|
if (!provingState.verifyState()) {
|
|
762
|
-
logger.debug('Not running
|
|
764
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
763
765
|
return;
|
|
764
766
|
}
|
|
765
767
|
|
|
@@ -767,12 +769,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
767
769
|
provingState,
|
|
768
770
|
wrapCallbackInSpan(
|
|
769
771
|
this.tracer,
|
|
770
|
-
'ProvingOrchestrator.prover.
|
|
772
|
+
'ProvingOrchestrator.prover.getPublicChonkVerifierProof',
|
|
771
773
|
{
|
|
772
774
|
[Attributes.TX_HASH]: txHash,
|
|
773
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
775
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public' satisfies CircuitName,
|
|
774
776
|
},
|
|
775
|
-
signal => this.prover.
|
|
777
|
+
signal => this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber),
|
|
776
778
|
),
|
|
777
779
|
handler,
|
|
778
780
|
);
|
|
@@ -1255,7 +1257,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1255
1257
|
return;
|
|
1256
1258
|
}
|
|
1257
1259
|
|
|
1258
|
-
// We must have completed all proving (
|
|
1260
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
1259
1261
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
1260
1262
|
|
|
1261
1263
|
this.enqueueBaseRollup(provingState, txIndex);
|