@aztec/prover-client 0.0.1-fake-c83136db25 → 0.0.1-fake-ceab37513c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/bin/get-proof-inputs.d.ts +2 -0
- package/dest/bin/get-proof-inputs.d.ts.map +1 -0
- package/dest/bin/get-proof-inputs.js +51 -0
- package/dest/block-factory/light.d.ts +3 -5
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +9 -16
- package/dest/config.js +1 -1
- package/dest/mocks/fixtures.d.ts +1 -4
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +3 -31
- package/dest/mocks/test_context.d.ts +9 -32
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +22 -78
- package/dest/orchestrator/block-building-helpers.d.ts +31 -33
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +137 -126
- package/dest/orchestrator/block-proving-state.d.ts +53 -60
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +187 -214
- package/dest/orchestrator/epoch-proving-state.d.ts +28 -34
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +84 -128
- package/dest/orchestrator/orchestrator.d.ts +30 -31
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +236 -368
- package/dest/orchestrator/tx-proving-state.d.ts +9 -11
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +23 -26
- package/dest/prover-client/server-epoch-prover.d.ts +8 -9
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +9 -9
- package/dest/proving_broker/broker_prover_facade.d.ts +15 -20
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +21 -36
- package/dest/proving_broker/fixtures.js +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +0 -1
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.js +0 -1
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +18 -29
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +18 -38
- package/dest/test/mock_prover.d.ts +17 -22
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +20 -35
- package/package.json +17 -16
- package/src/bin/get-proof-inputs.ts +59 -0
- package/src/block-factory/light.ts +9 -35
- package/src/config.ts +1 -1
- package/src/mocks/fixtures.ts +11 -39
- package/src/mocks/test_context.ts +31 -137
- package/src/orchestrator/block-building-helpers.ts +211 -211
- package/src/orchestrator/block-proving-state.ts +245 -235
- package/src/orchestrator/epoch-proving-state.ts +127 -172
- package/src/orchestrator/orchestrator.ts +303 -545
- package/src/orchestrator/tx-proving-state.ts +43 -49
- package/src/prover-client/server-epoch-prover.ts +18 -28
- package/src/proving_broker/broker_prover_facade.ts +86 -157
- package/src/proving_broker/fixtures.ts +1 -1
- package/src/proving_broker/proof_store/index.ts +0 -1
- package/src/proving_broker/proving_broker.ts +18 -36
- package/src/proving_broker/proving_job_controller.ts +18 -38
- package/src/test/mock_prover.ts +60 -142
- package/dest/orchestrator/checkpoint-proving-state.d.ts +0 -63
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +0 -1
- package/dest/orchestrator/checkpoint-proving-state.js +0 -211
- package/src/orchestrator/checkpoint-proving-state.ts +0 -299
|
@@ -1,157 +1,140 @@
|
|
|
1
|
-
import
|
|
1
|
+
import { BatchedBlobAccumulator, BlobAccumulatorPublicInputs, SpongeBlob } from '@aztec/blob-lib';
|
|
2
2
|
import {
|
|
3
3
|
type ARCHIVE_HEIGHT,
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
BLOBS_PER_BLOCK,
|
|
5
|
+
FIELDS_PER_BLOB,
|
|
6
|
+
type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
6
7
|
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
7
8
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
9
|
+
type RECURSIVE_PROOF_LENGTH,
|
|
8
10
|
} from '@aztec/constants';
|
|
9
|
-
import {
|
|
10
|
-
import {
|
|
11
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
12
|
+
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
13
|
+
import type { Tuple } from '@aztec/foundation/serialize';
|
|
11
14
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
15
|
+
import { getVKIndex, getVKSiblingPath, getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
16
|
+
import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
|
|
17
|
+
import type { EthAddress, L2Block } from '@aztec/stdlib/block';
|
|
12
18
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
13
|
-
import { type ParityPublicInputs,
|
|
14
|
-
import type { RollupHonkProofData } from '@aztec/stdlib/proofs';
|
|
19
|
+
import { type ParityPublicInputs, RootParityInput, RootParityInputs } from '@aztec/stdlib/parity';
|
|
15
20
|
import {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
21
|
+
type BaseOrMergeRollupPublicInputs,
|
|
22
|
+
BlockConstantData,
|
|
23
|
+
type BlockRootOrBlockMergePublicInputs,
|
|
24
|
+
BlockRootRollupBlobData,
|
|
25
|
+
BlockRootRollupData,
|
|
26
|
+
BlockRootRollupInputs,
|
|
27
|
+
EmptyBlockRootRollupInputs,
|
|
28
|
+
EpochConstantData,
|
|
29
|
+
MergeRollupInputs,
|
|
30
|
+
PaddingBlockRootRollupInputs,
|
|
31
|
+
PreviousRollupData,
|
|
32
|
+
SingleTxBlockRootRollupInputs,
|
|
25
33
|
} from '@aztec/stdlib/rollup';
|
|
26
34
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
27
|
-
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
28
|
-
import { type BlockHeader, GlobalVariables } from '@aztec/stdlib/tx';
|
|
29
|
-
import
|
|
35
|
+
import { AppendOnlyTreeSnapshot, type MerkleTreeId } from '@aztec/stdlib/trees';
|
|
36
|
+
import { type BlockHeader, type GlobalVariables, StateReference } from '@aztec/stdlib/tx';
|
|
37
|
+
import { VkData } from '@aztec/stdlib/vks';
|
|
30
38
|
|
|
31
|
-
import {
|
|
32
|
-
|
|
39
|
+
import {
|
|
40
|
+
accumulateBlobs,
|
|
41
|
+
buildBlobHints,
|
|
42
|
+
buildHeaderFromCircuitOutputs,
|
|
43
|
+
getEmptyBlockBlobsHash,
|
|
44
|
+
} from './block-building-helpers.js';
|
|
45
|
+
import type { EpochProvingState } from './epoch-proving-state.js';
|
|
33
46
|
import type { TxProvingState } from './tx-proving-state.js';
|
|
34
47
|
|
|
35
|
-
export type
|
|
36
|
-
provingOutput?: PublicInputsAndRecursiveProof<T, PROOF_LENGTH>;
|
|
37
|
-
isProving?: boolean;
|
|
38
|
-
};
|
|
48
|
+
export type TreeSnapshots = Map<MerkleTreeId, AppendOnlyTreeSnapshot>;
|
|
39
49
|
|
|
40
50
|
/**
|
|
41
51
|
* The current state of the proving schedule for a given block. Managed by ProvingState.
|
|
42
52
|
* Contains the raw inputs and intermediate state to generate every constituent proof in the tree.
|
|
43
53
|
*/
|
|
44
54
|
export class BlockProvingState {
|
|
45
|
-
private
|
|
46
|
-
|
|
55
|
+
private baseOrMergeProvingOutputs: UnbalancedTreeStore<
|
|
56
|
+
PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
47
57
|
> = new UnbalancedTreeStore(0);
|
|
48
|
-
private
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
private rootParityProof: ProofState<ParityPublicInputs, typeof NESTED_RECURSIVE_PROOF_LENGTH> | undefined;
|
|
53
|
-
private blockRootProof:
|
|
54
|
-
| ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
58
|
+
private baseParityProvingOutputs: (PublicInputsAndRecursiveProof<ParityPublicInputs> | undefined)[];
|
|
59
|
+
private rootParityProvingOutput: PublicInputsAndRecursiveProof<ParityPublicInputs> | undefined;
|
|
60
|
+
private blockRootProvingOutput:
|
|
61
|
+
| PublicInputsAndRecursiveProof<BlockRootOrBlockMergePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
55
62
|
| undefined;
|
|
56
|
-
|
|
57
|
-
|
|
63
|
+
public blockRootRollupStarted: boolean = false;
|
|
64
|
+
public block: L2Block | undefined;
|
|
65
|
+
public spongeBlobState: SpongeBlob | undefined;
|
|
66
|
+
public startBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
67
|
+
public endBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
68
|
+
public blobsHash: Fr | undefined;
|
|
69
|
+
public totalNumTxs: number;
|
|
58
70
|
private txs: TxProvingState[] = [];
|
|
59
|
-
|
|
60
|
-
private error: string | undefined;
|
|
71
|
+
public error: string | undefined;
|
|
61
72
|
|
|
62
73
|
constructor(
|
|
63
74
|
public readonly index: number,
|
|
64
|
-
public readonly
|
|
65
|
-
public readonly
|
|
66
|
-
|
|
67
|
-
private readonly
|
|
68
|
-
public readonly
|
|
75
|
+
public readonly globalVariables: GlobalVariables,
|
|
76
|
+
public readonly newL1ToL2Messages: Fr[],
|
|
77
|
+
public readonly l1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
78
|
+
private readonly l1ToL2MessageSubtreeSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH>,
|
|
79
|
+
public readonly l1ToL2MessageTreeSnapshotAfterInsertion: AppendOnlyTreeSnapshot,
|
|
80
|
+
private readonly lastArchiveSnapshot: AppendOnlyTreeSnapshot,
|
|
69
81
|
private readonly lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
70
|
-
private readonly
|
|
71
|
-
private readonly
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
>,
|
|
75
|
-
public readonly newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
76
|
-
private readonly headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
77
|
-
private readonly startSpongeBlob: SpongeBlob,
|
|
78
|
-
public parentCheckpoint: CheckpointProvingState,
|
|
82
|
+
private readonly newArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
83
|
+
private readonly previousBlockHeader: BlockHeader,
|
|
84
|
+
private readonly proverId: EthAddress,
|
|
85
|
+
private readonly parentEpoch: EpochProvingState,
|
|
79
86
|
) {
|
|
80
|
-
this.
|
|
81
|
-
|
|
82
|
-
|
|
87
|
+
this.baseParityProvingOutputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }).map(_ => undefined);
|
|
88
|
+
this.totalNumTxs = 0;
|
|
89
|
+
if (this.blockNumber == parentEpoch.firstBlockNumber) {
|
|
90
|
+
this.startBlobAccumulator = BatchedBlobAccumulator.newWithChallenges(parentEpoch.finalBlobBatchingChallenges);
|
|
83
91
|
}
|
|
92
|
+
}
|
|
84
93
|
|
|
85
|
-
|
|
94
|
+
public get blockNumber() {
|
|
95
|
+
return this.globalVariables.blockNumber;
|
|
86
96
|
}
|
|
87
97
|
|
|
88
|
-
public
|
|
89
|
-
|
|
98
|
+
public startNewBlock(numTxs: number, numBlobFields: number) {
|
|
99
|
+
if (this.spongeBlobState) {
|
|
100
|
+
throw new Error(`Block ${this.blockNumber} already initalised.`);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
this.baseOrMergeProvingOutputs = new UnbalancedTreeStore(numTxs);
|
|
104
|
+
// Initialize the sponge which will eventually absorb all tx effects to be added to the blob.
|
|
105
|
+
// Like l1 to l2 messages, we need to know beforehand how many effects will be absorbed.
|
|
106
|
+
this.spongeBlobState = SpongeBlob.init(numBlobFields);
|
|
107
|
+
this.totalNumTxs = numTxs;
|
|
90
108
|
}
|
|
91
109
|
|
|
92
110
|
// Adds a transaction to the proving state, returns it's index
|
|
93
111
|
public addNewTx(tx: TxProvingState) {
|
|
94
|
-
if (!this.
|
|
95
|
-
throw new Error(`
|
|
112
|
+
if (!this.spongeBlobState) {
|
|
113
|
+
throw new Error(`Invalid block proving state, call startNewBlock before adding transactions.`);
|
|
96
114
|
}
|
|
97
115
|
const txIndex = this.txs.length;
|
|
98
116
|
this.txs[txIndex] = tx;
|
|
99
117
|
return txIndex;
|
|
100
118
|
}
|
|
101
119
|
|
|
102
|
-
public isAcceptingTxs() {
|
|
103
|
-
return this.txs.length < this.totalNumTxs;
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
public getProcessedTxs() {
|
|
107
|
-
return this.txs.map(t => t.processedTx);
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
public tryStartProvingBase(txIndex: number) {
|
|
111
|
-
if (this.baseOrMergeProofs.getLeaf(txIndex)?.isProving) {
|
|
112
|
-
return false;
|
|
113
|
-
} else {
|
|
114
|
-
this.baseOrMergeProofs.setLeaf(txIndex, { isProving: true });
|
|
115
|
-
return true;
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
|
|
119
120
|
public setBaseRollupProof(
|
|
120
121
|
txIndex: number,
|
|
121
122
|
provingOutput: PublicInputsAndRecursiveProof<
|
|
122
|
-
|
|
123
|
+
BaseOrMergeRollupPublicInputs,
|
|
123
124
|
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
124
125
|
>,
|
|
125
126
|
): TreeNodeLocation {
|
|
126
|
-
return this.
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
public tryStartProvingMerge(location: TreeNodeLocation) {
|
|
130
|
-
if (this.baseOrMergeProofs.getNode(location)?.isProving) {
|
|
131
|
-
return false;
|
|
132
|
-
} else {
|
|
133
|
-
this.baseOrMergeProofs.setNode(location, { isProving: true });
|
|
134
|
-
return true;
|
|
135
|
-
}
|
|
127
|
+
return this.baseOrMergeProvingOutputs.setLeaf(txIndex, provingOutput);
|
|
136
128
|
}
|
|
137
129
|
|
|
138
130
|
public setMergeRollupProof(
|
|
139
131
|
location: TreeNodeLocation,
|
|
140
132
|
provingOutput: PublicInputsAndRecursiveProof<
|
|
141
|
-
|
|
133
|
+
BaseOrMergeRollupPublicInputs,
|
|
142
134
|
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
143
135
|
>,
|
|
144
136
|
) {
|
|
145
|
-
this.
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
public tryStartProvingBaseParity(index: number) {
|
|
149
|
-
if (this.baseParityProofs[index]?.isProving) {
|
|
150
|
-
return false;
|
|
151
|
-
} else {
|
|
152
|
-
this.baseParityProofs[index] = { isProving: true };
|
|
153
|
-
return true;
|
|
154
|
-
}
|
|
137
|
+
this.baseOrMergeProvingOutputs.setNode(location, provingOutput);
|
|
155
138
|
}
|
|
156
139
|
|
|
157
140
|
// Stores a set of root parity inputs at the given index
|
|
@@ -161,179 +144,144 @@ export class BlockProvingState {
|
|
|
161
144
|
`Unable to set a base parity proofs at index ${index}. Expected at most ${NUM_BASE_PARITY_PER_ROOT_PARITY} proofs.`,
|
|
162
145
|
);
|
|
163
146
|
}
|
|
164
|
-
this.
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
public tryStartProvingRootParity() {
|
|
168
|
-
if (this.rootParityProof?.isProving) {
|
|
169
|
-
return false;
|
|
170
|
-
} else {
|
|
171
|
-
this.rootParityProof = { isProving: true };
|
|
172
|
-
return true;
|
|
173
|
-
}
|
|
147
|
+
this.baseParityProvingOutputs[index] = provingOutput;
|
|
174
148
|
}
|
|
175
149
|
|
|
176
150
|
public setRootParityProof(provingOutput: PublicInputsAndRecursiveProof<ParityPublicInputs>) {
|
|
177
|
-
this.
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
public tryStartProvingBlockRoot() {
|
|
181
|
-
if (this.blockRootProof?.isProving) {
|
|
182
|
-
return false;
|
|
183
|
-
} else {
|
|
184
|
-
this.blockRootProof = { isProving: true };
|
|
185
|
-
return true;
|
|
186
|
-
}
|
|
151
|
+
this.rootParityProvingOutput = provingOutput;
|
|
187
152
|
}
|
|
188
153
|
|
|
189
154
|
public setBlockRootRollupProof(
|
|
190
155
|
provingOutput: PublicInputsAndRecursiveProof<
|
|
191
|
-
|
|
156
|
+
BlockRootOrBlockMergePublicInputs,
|
|
192
157
|
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
193
158
|
>,
|
|
194
|
-
)
|
|
195
|
-
this.
|
|
196
|
-
return this.parentCheckpoint.setBlockRootRollupProof(this.index, provingOutput);
|
|
159
|
+
) {
|
|
160
|
+
this.blockRootProvingOutput = provingOutput;
|
|
197
161
|
}
|
|
198
162
|
|
|
199
|
-
public
|
|
200
|
-
|
|
163
|
+
public setBlock(block: L2Block) {
|
|
164
|
+
this.block = block;
|
|
201
165
|
}
|
|
202
166
|
|
|
203
|
-
public
|
|
204
|
-
this.
|
|
167
|
+
public setStartBlobAccumulator(accumulator: BatchedBlobAccumulator) {
|
|
168
|
+
this.startBlobAccumulator = accumulator;
|
|
205
169
|
}
|
|
206
170
|
|
|
207
|
-
public
|
|
208
|
-
|
|
171
|
+
public setEndBlobAccumulator(accumulator: BatchedBlobAccumulator) {
|
|
172
|
+
this.endBlobAccumulator = accumulator;
|
|
209
173
|
}
|
|
210
174
|
|
|
211
|
-
public
|
|
212
|
-
if (this.
|
|
213
|
-
|
|
175
|
+
public async accumulateBlobs() {
|
|
176
|
+
if (!this.block || !this.startBlobAccumulator) {
|
|
177
|
+
// We only want to accumulate once we have all txs, so we wait until the block is set.
|
|
178
|
+
return;
|
|
214
179
|
}
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
blockNumber: this.blockNumber,
|
|
221
|
-
slotNumber: constants.slotNumber,
|
|
222
|
-
timestamp: this.timestamp,
|
|
223
|
-
coinbase: constants.coinbase,
|
|
224
|
-
feeRecipient: constants.feeRecipient,
|
|
225
|
-
gasFees: constants.gasFees,
|
|
226
|
-
});
|
|
227
|
-
}
|
|
228
|
-
|
|
229
|
-
public getStartSpongeBlob() {
|
|
230
|
-
return this.startSpongeBlob;
|
|
231
|
-
}
|
|
232
|
-
|
|
233
|
-
public setEndSpongeBlob(endSpongeBlob: SpongeBlob) {
|
|
234
|
-
this.endSpongeBlob = endSpongeBlob;
|
|
180
|
+
const endBlobAccumulator = await accumulateBlobs(
|
|
181
|
+
this.allTxs.map(t => t.processedTx),
|
|
182
|
+
this.startBlobAccumulator,
|
|
183
|
+
);
|
|
184
|
+
this.setEndBlobAccumulator(endBlobAccumulator);
|
|
235
185
|
}
|
|
236
186
|
|
|
237
|
-
|
|
238
|
-
|
|
187
|
+
// Returns the complete set of transaction proving state objects
|
|
188
|
+
public get allTxs() {
|
|
189
|
+
return this.txs;
|
|
239
190
|
}
|
|
240
191
|
|
|
241
|
-
|
|
242
|
-
|
|
192
|
+
/** Returns the block number as an epoch number. Used for prioritizing proof requests. */
|
|
193
|
+
public get epochNumber(): number {
|
|
194
|
+
return this.parentEpoch.epochNumber;
|
|
243
195
|
}
|
|
244
196
|
|
|
245
197
|
public getParentLocation(location: TreeNodeLocation) {
|
|
246
|
-
return this.
|
|
198
|
+
return this.baseOrMergeProvingOutputs.getParentLocation(location);
|
|
247
199
|
}
|
|
248
200
|
|
|
249
201
|
public getMergeRollupInputs(mergeLocation: TreeNodeLocation) {
|
|
250
|
-
const [left, right] = this.
|
|
202
|
+
const [left, right] = this.baseOrMergeProvingOutputs.getChildren(mergeLocation);
|
|
251
203
|
if (!left || !right) {
|
|
252
|
-
throw new Error('At
|
|
204
|
+
throw new Error('At lease one child is not ready.');
|
|
253
205
|
}
|
|
254
206
|
|
|
255
|
-
return new
|
|
207
|
+
return new MergeRollupInputs([this.#getPreviousRollupData(left), this.#getPreviousRollupData(right)]);
|
|
256
208
|
}
|
|
257
209
|
|
|
258
|
-
public getBlockRootRollupTypeAndInputs() {
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
throw new Error('At least one child is not ready for the block root rollup.');
|
|
210
|
+
public async getBlockRootRollupTypeAndInputs() {
|
|
211
|
+
if (!this.rootParityProvingOutput) {
|
|
212
|
+
throw new Error('Root parity is not ready.');
|
|
262
213
|
}
|
|
263
214
|
|
|
264
|
-
const
|
|
265
|
-
|
|
266
|
-
if (
|
|
267
|
-
|
|
215
|
+
const proofs = this.#getChildProofsForBlockRoot();
|
|
216
|
+
const nonEmptyProofs = proofs.filter(p => !!p);
|
|
217
|
+
if (proofs.length !== nonEmptyProofs.length) {
|
|
218
|
+
throw new Error('At lease one child is not ready for the block root.');
|
|
268
219
|
}
|
|
269
220
|
|
|
270
|
-
const
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
221
|
+
const data = this.#getBlockRootRollupData();
|
|
222
|
+
|
|
223
|
+
if (this.totalNumTxs === 0) {
|
|
224
|
+
const constants = BlockConstantData.from({
|
|
225
|
+
lastArchive: this.lastArchiveSnapshot,
|
|
226
|
+
newL1ToL2: this.l1ToL2MessageTreeSnapshotAfterInsertion,
|
|
227
|
+
globalVariables: this.globalVariables,
|
|
228
|
+
vkTreeRoot: getVKTreeRoot(),
|
|
229
|
+
protocolContractTreeRoot,
|
|
230
|
+
});
|
|
231
|
+
|
|
232
|
+
this.blobsHash = await getEmptyBlockBlobsHash();
|
|
233
|
+
|
|
277
234
|
return {
|
|
278
|
-
rollupType: '
|
|
279
|
-
inputs:
|
|
235
|
+
rollupType: 'empty-block-root-rollup' satisfies CircuitName,
|
|
236
|
+
inputs: EmptyBlockRootRollupInputs.from({
|
|
237
|
+
data,
|
|
238
|
+
constants,
|
|
239
|
+
}),
|
|
280
240
|
};
|
|
281
241
|
}
|
|
282
|
-
}
|
|
283
242
|
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
}
|
|
288
|
-
const l1ToL2Roots = toProofData(this.rootParityProof.provingOutput);
|
|
243
|
+
const previousRollupData = await Promise.all(nonEmptyProofs.map(p => this.#getPreviousRollupData(p!)));
|
|
244
|
+
const blobData = await this.#getBlockRootRollupBlobData();
|
|
245
|
+
this.blobsHash = blobData.blobsHash;
|
|
289
246
|
|
|
290
|
-
if (
|
|
291
|
-
return {
|
|
292
|
-
rollupType: 'rollup-block-root-first-empty-tx' satisfies CircuitName,
|
|
293
|
-
inputs: new BlockRootEmptyTxFirstRollupPrivateInputs(
|
|
294
|
-
l1ToL2Roots,
|
|
295
|
-
this.lastArchiveTreeSnapshot,
|
|
296
|
-
this.headerOfLastBlockInPreviousCheckpoint.state,
|
|
297
|
-
this.constants,
|
|
298
|
-
this.startSpongeBlob,
|
|
299
|
-
this.timestamp,
|
|
300
|
-
this.lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
301
|
-
this.lastArchiveSiblingPath,
|
|
302
|
-
),
|
|
303
|
-
};
|
|
304
|
-
} else if (!rightRollup) {
|
|
247
|
+
if (previousRollupData.length === 1) {
|
|
305
248
|
return {
|
|
306
|
-
rollupType: '
|
|
307
|
-
inputs: new
|
|
308
|
-
l1ToL2Roots,
|
|
309
|
-
leftRollup,
|
|
310
|
-
this.lastL1ToL2MessageTreeSnapshot,
|
|
311
|
-
this.lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
312
|
-
this.lastArchiveSiblingPath,
|
|
313
|
-
),
|
|
249
|
+
rollupType: 'single-tx-block-root-rollup' satisfies CircuitName,
|
|
250
|
+
inputs: new SingleTxBlockRootRollupInputs(previousRollupData as [PreviousRollupData], data, blobData),
|
|
314
251
|
};
|
|
315
252
|
} else {
|
|
316
253
|
return {
|
|
317
|
-
rollupType: '
|
|
318
|
-
inputs: new
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
this.lastL1ToL2MessageSubtreeRootSiblingPath,
|
|
323
|
-
this.lastArchiveSiblingPath,
|
|
254
|
+
rollupType: 'block-root-rollup' satisfies CircuitName,
|
|
255
|
+
inputs: new BlockRootRollupInputs(
|
|
256
|
+
previousRollupData as [PreviousRollupData, PreviousRollupData],
|
|
257
|
+
data,
|
|
258
|
+
blobData,
|
|
324
259
|
),
|
|
325
260
|
};
|
|
326
261
|
}
|
|
327
262
|
}
|
|
328
263
|
|
|
329
|
-
public
|
|
330
|
-
const
|
|
331
|
-
|
|
264
|
+
public getPaddingBlockRootInputs() {
|
|
265
|
+
const constants = EpochConstantData.from({
|
|
266
|
+
vkTreeRoot: getVKTreeRoot(),
|
|
267
|
+
protocolContractTreeRoot,
|
|
268
|
+
proverId: this.proverId.toField(),
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
return PaddingBlockRootRollupInputs.from({
|
|
272
|
+
constants,
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
public getRootParityInputs() {
|
|
277
|
+
if (!this.baseParityProvingOutputs.every(p => !!p)) {
|
|
332
278
|
throw new Error('At lease one base parity is not ready.');
|
|
333
279
|
}
|
|
334
280
|
|
|
335
|
-
const children = baseParityProvingOutputs.map(p =>
|
|
336
|
-
return new
|
|
281
|
+
const children = this.baseParityProvingOutputs.map(p => this.#getRootParityData(p!));
|
|
282
|
+
return new RootParityInputs(
|
|
283
|
+
children as Tuple<RootParityInput<typeof RECURSIVE_PROOF_LENGTH>, typeof NUM_BASE_PARITY_PER_ROOT_PARITY>,
|
|
284
|
+
);
|
|
337
285
|
}
|
|
338
286
|
|
|
339
287
|
// Returns a specific transaction proving state
|
|
@@ -342,46 +290,89 @@ export class BlockProvingState {
|
|
|
342
290
|
}
|
|
343
291
|
|
|
344
292
|
public async buildHeaderFromProvingOutputs() {
|
|
345
|
-
|
|
346
|
-
|
|
293
|
+
const previousRollupData =
|
|
294
|
+
this.totalNumTxs === 0
|
|
295
|
+
? []
|
|
296
|
+
: await Promise.all(this.#getChildProofsForBlockRoot().map(p => this.#getPreviousRollupData(p!)));
|
|
297
|
+
|
|
298
|
+
let endPartialState = this.previousBlockHeader.state.partial;
|
|
299
|
+
if (this.totalNumTxs !== 0) {
|
|
300
|
+
const previousRollupData = this.#getChildProofsForBlockRoot();
|
|
301
|
+
const lastRollup = previousRollupData[previousRollupData.length - 1];
|
|
302
|
+
if (!lastRollup) {
|
|
303
|
+
throw new Error('End state of the block is not available. Last rollup is not ready yet.');
|
|
304
|
+
}
|
|
305
|
+
endPartialState = lastRollup.inputs.end;
|
|
347
306
|
}
|
|
307
|
+
const endState = new StateReference(this.l1ToL2MessageTreeSnapshotAfterInsertion, endPartialState);
|
|
348
308
|
|
|
349
|
-
return
|
|
309
|
+
return buildHeaderFromCircuitOutputs(
|
|
310
|
+
previousRollupData.map(d => d.baseOrMergeRollupPublicInputs),
|
|
311
|
+
this.rootParityProvingOutput!.inputs,
|
|
312
|
+
this.blockRootProvingOutput!.inputs,
|
|
313
|
+
this.blobsHash!,
|
|
314
|
+
endState,
|
|
315
|
+
);
|
|
350
316
|
}
|
|
351
317
|
|
|
352
318
|
public isReadyForMergeRollup(location: TreeNodeLocation) {
|
|
353
|
-
return
|
|
319
|
+
return this.baseOrMergeProvingOutputs.getSibling(location) !== undefined;
|
|
354
320
|
}
|
|
355
321
|
|
|
356
322
|
// Returns true if we have sufficient inputs to execute the block root rollup
|
|
357
323
|
public isReadyForBlockRootRollup() {
|
|
358
|
-
const childProofs = this.#
|
|
359
|
-
return (
|
|
324
|
+
const childProofs = this.#getChildProofsForBlockRoot();
|
|
325
|
+
return (
|
|
326
|
+
this.block !== undefined &&
|
|
327
|
+
this.rootParityProvingOutput !== undefined &&
|
|
328
|
+
this.endBlobAccumulator !== undefined &&
|
|
329
|
+
childProofs.every(p => !!p)
|
|
330
|
+
);
|
|
360
331
|
}
|
|
361
332
|
|
|
362
333
|
// Returns true if we have sufficient root parity inputs to execute the root parity circuit
|
|
363
334
|
public isReadyForRootParity() {
|
|
364
|
-
return this.
|
|
335
|
+
return this.baseParityProvingOutputs.every(p => !!p);
|
|
365
336
|
}
|
|
366
337
|
|
|
367
338
|
public isComplete() {
|
|
368
|
-
return !!this.
|
|
339
|
+
return !!this.blockRootProvingOutput;
|
|
369
340
|
}
|
|
370
341
|
|
|
342
|
+
// Returns whether the proving state is still valid
|
|
371
343
|
public verifyState() {
|
|
372
|
-
return this.
|
|
373
|
-
}
|
|
374
|
-
|
|
375
|
-
public getError() {
|
|
376
|
-
return this.error;
|
|
344
|
+
return this.parentEpoch.verifyState();
|
|
377
345
|
}
|
|
378
346
|
|
|
379
347
|
public reject(reason: string) {
|
|
380
348
|
this.error = reason;
|
|
381
|
-
this.
|
|
349
|
+
this.parentEpoch.reject(reason);
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
#getBlockRootRollupData() {
|
|
353
|
+
return BlockRootRollupData.from({
|
|
354
|
+
l1ToL2Roots: this.#getRootParityData(this.rootParityProvingOutput!),
|
|
355
|
+
l1ToL2MessageSubtreeSiblingPath: this.l1ToL2MessageSubtreeSiblingPath,
|
|
356
|
+
previousArchiveSiblingPath: this.lastArchiveSiblingPath,
|
|
357
|
+
newArchiveSiblingPath: this.newArchiveSiblingPath,
|
|
358
|
+
previousBlockHeader: this.previousBlockHeader,
|
|
359
|
+
startBlobAccumulator: BlobAccumulatorPublicInputs.fromBatchedBlobAccumulator(this.startBlobAccumulator!),
|
|
360
|
+
finalBlobChallenges: this.startBlobAccumulator!.finalBlobChallenges,
|
|
361
|
+
proverId: this.proverId.toField(),
|
|
362
|
+
});
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
async #getBlockRootRollupBlobData() {
|
|
366
|
+
const txEffects = this.txs.map(txProvingState => txProvingState.processedTx.txEffect);
|
|
367
|
+
const { blobFields, blobCommitments, blobsHash } = await buildBlobHints(txEffects);
|
|
368
|
+
return BlockRootRollupBlobData.from({
|
|
369
|
+
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
|
|
370
|
+
blobCommitments: padArrayEnd(blobCommitments, BLS12Point.ZERO, BLOBS_PER_BLOCK),
|
|
371
|
+
blobsHash,
|
|
372
|
+
});
|
|
382
373
|
}
|
|
383
374
|
|
|
384
|
-
#
|
|
375
|
+
#getChildProofsForBlockRoot() {
|
|
385
376
|
if (this.totalNumTxs === 0) {
|
|
386
377
|
return [];
|
|
387
378
|
}
|
|
@@ -389,7 +380,26 @@ export class BlockProvingState {
|
|
|
389
380
|
const rootLocation = { level: 0, index: 0 };
|
|
390
381
|
// If there's only 1 tx, its base rollup proof will be stored at the root.
|
|
391
382
|
return this.totalNumTxs === 1
|
|
392
|
-
? [this.
|
|
393
|
-
: this.
|
|
383
|
+
? [this.baseOrMergeProvingOutputs.getNode(rootLocation)]
|
|
384
|
+
: this.baseOrMergeProvingOutputs.getChildren(rootLocation);
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
#getPreviousRollupData({
|
|
388
|
+
inputs,
|
|
389
|
+
proof,
|
|
390
|
+
verificationKey,
|
|
391
|
+
}: PublicInputsAndRecursiveProof<BaseOrMergeRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>) {
|
|
392
|
+
const leafIndex = getVKIndex(verificationKey.keyAsFields);
|
|
393
|
+
const vkData = new VkData(verificationKey, leafIndex, getVKSiblingPath(leafIndex));
|
|
394
|
+
return new PreviousRollupData(inputs, proof, vkData);
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
#getRootParityData({ inputs, proof, verificationKey }: PublicInputsAndRecursiveProof<ParityPublicInputs>) {
|
|
398
|
+
return new RootParityInput(
|
|
399
|
+
proof,
|
|
400
|
+
verificationKey.keyAsFields,
|
|
401
|
+
getVKSiblingPath(getVKIndex(verificationKey)),
|
|
402
|
+
inputs,
|
|
403
|
+
);
|
|
394
404
|
}
|
|
395
405
|
}
|