@aztec/prover-client 3.0.0-nightly.20251026 → 3.0.0-nightly.20251030-2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/light.js +1 -1
- package/dest/config.js +1 -1
- package/dest/mocks/fixtures.js +1 -1
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +8 -9
- package/dest/orchestrator/block-building-helpers.d.ts +9 -11
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +35 -45
- package/dest/orchestrator/block-proving-state.d.ts +1 -1
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +2 -3
- package/dest/orchestrator/checkpoint-proving-state.d.ts +3 -2
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +12 -9
- package/dest/orchestrator/epoch-proving-state.d.ts +2 -2
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +4 -4
- package/dest/orchestrator/orchestrator.d.ts +5 -5
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +34 -35
- package/dest/orchestrator/tx-proving-state.d.ts +4 -4
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +12 -12
- package/dest/prover-client/server-epoch-prover.d.ts +1 -1
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +2 -2
- package/dest/proving_broker/broker_prover_facade.d.ts +2 -2
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +2 -2
- package/dest/proving_broker/proving_broker.js +2 -2
- package/dest/proving_broker/proving_job_controller.js +2 -2
- package/dest/test/mock_prover.d.ts +2 -2
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +3 -3
- package/package.json +15 -15
- package/src/block-factory/light.ts +1 -1
- package/src/config.ts +1 -1
- package/src/mocks/fixtures.ts +1 -1
- package/src/mocks/test_context.ts +8 -9
- package/src/orchestrator/block-building-helpers.ts +43 -52
- package/src/orchestrator/block-proving-state.ts +2 -3
- package/src/orchestrator/checkpoint-proving-state.ts +14 -10
- package/src/orchestrator/epoch-proving-state.ts +7 -5
- package/src/orchestrator/orchestrator.ts +50 -42
- package/src/orchestrator/tx-proving-state.ts +20 -16
- package/src/prover-client/server-epoch-prover.ts +2 -2
- package/src/proving_broker/broker_prover_facade.ts +9 -7
- package/src/proving_broker/proving_broker.ts +2 -2
- package/src/proving_broker/proving_job_controller.ts +2 -2
- package/src/test/mock_prover.ts +9 -7
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { BBProverConfig } from '@aztec/bb-prover';
|
|
2
|
+
import { TestCircuitProver } from '@aztec/bb-prover';
|
|
2
3
|
import { SpongeBlob } from '@aztec/blob-lib';
|
|
3
4
|
import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
|
|
4
5
|
import { padArrayEnd, times, timesParallel } from '@aztec/foundation/collection';
|
|
@@ -13,7 +14,8 @@ import { SimpleContractDataSource } from '@aztec/simulator/public/fixtures';
|
|
|
13
14
|
import { PublicProcessorFactory } from '@aztec/simulator/server';
|
|
14
15
|
import { PublicDataWrite } from '@aztec/stdlib/avm';
|
|
15
16
|
import { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
16
|
-
import { EthAddress
|
|
17
|
+
import { EthAddress } from '@aztec/stdlib/block';
|
|
18
|
+
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
17
19
|
import type { ServerCircuitProver } from '@aztec/stdlib/interfaces/server';
|
|
18
20
|
import type { CheckpointConstantData } from '@aztec/stdlib/rollup';
|
|
19
21
|
import { makeBloatedProcessedTx } from '@aztec/stdlib/testing';
|
|
@@ -24,9 +26,6 @@ import { NativeWorldStateService } from '@aztec/world-state/native';
|
|
|
24
26
|
|
|
25
27
|
import { promises as fs } from 'fs';
|
|
26
28
|
|
|
27
|
-
// TODO(#12613) This means of sharing test code is not ideal.
|
|
28
|
-
// eslint-disable-next-line import/no-relative-packages
|
|
29
|
-
import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js';
|
|
30
29
|
import { buildBlockWithCleanDB } from '../block-factory/light.js';
|
|
31
30
|
import { getTreeSnapshot } from '../orchestrator/block-building-helpers.js';
|
|
32
31
|
import type { BlockProvingState } from '../orchestrator/block-proving-state.js';
|
|
@@ -284,9 +283,8 @@ export class TestContext {
|
|
|
284
283
|
);
|
|
285
284
|
});
|
|
286
285
|
|
|
287
|
-
const
|
|
288
|
-
const
|
|
289
|
-
const spongeBlobState = SpongeBlob.init(totalNumBlobFields);
|
|
286
|
+
const blobFields = getCheckpointBlobFields(blockTxs.map(txs => txs.map(tx => tx.txEffect)));
|
|
287
|
+
const spongeBlobState = await SpongeBlob.init(blobFields.length);
|
|
290
288
|
|
|
291
289
|
const blocks: { header: BlockHeader; txs: ProcessedTx[] }[] = [];
|
|
292
290
|
for (let i = 0; i < numBlocks; i++) {
|
|
@@ -306,12 +304,13 @@ export class TestContext {
|
|
|
306
304
|
|
|
307
305
|
await this.worldState.handleL2BlockAndMessages(block, blockMsgs, isFirstBlock);
|
|
308
306
|
|
|
309
|
-
|
|
307
|
+
const blockBlobFields = block.body.toBlobFields();
|
|
308
|
+
await spongeBlobState.absorb(blockBlobFields);
|
|
310
309
|
|
|
311
310
|
blocks.push({ header, txs });
|
|
312
311
|
}
|
|
313
312
|
|
|
314
|
-
return { blocks, l1ToL2Messages, blobFields
|
|
313
|
+
return { blocks, l1ToL2Messages, blobFields };
|
|
315
314
|
}
|
|
316
315
|
|
|
317
316
|
public async processPublicFunctions(
|
|
@@ -1,7 +1,14 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
BatchedBlob,
|
|
3
|
+
BatchedBlobAccumulator,
|
|
4
|
+
SpongeBlob,
|
|
5
|
+
computeBlobsHashFromBlobs,
|
|
6
|
+
getBlobCommitmentsFromBlobs,
|
|
7
|
+
getBlobsPerL1Block,
|
|
8
|
+
} from '@aztec/blob-lib';
|
|
2
9
|
import {
|
|
3
10
|
ARCHIVE_HEIGHT,
|
|
4
|
-
|
|
11
|
+
CHONK_PROOF_LENGTH,
|
|
5
12
|
MAX_CONTRACT_CLASS_LOGS_PER_TX,
|
|
6
13
|
MAX_NOTE_HASHES_PER_TX,
|
|
7
14
|
MAX_NULLIFIERS_PER_TX,
|
|
@@ -15,8 +22,8 @@ import {
|
|
|
15
22
|
} from '@aztec/constants';
|
|
16
23
|
import { makeTuple } from '@aztec/foundation/array';
|
|
17
24
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
18
|
-
import {
|
|
19
|
-
import {
|
|
25
|
+
import { sha256Trunc } from '@aztec/foundation/crypto';
|
|
26
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
20
27
|
import { type Bufferable, type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
21
28
|
import {
|
|
22
29
|
MembershipWitness,
|
|
@@ -27,6 +34,7 @@ import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
|
|
|
27
34
|
import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
28
35
|
import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
|
|
29
36
|
import { Body, L2BlockHeader, getBlockBlobFields } from '@aztec/stdlib/block';
|
|
37
|
+
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
30
38
|
import type { MerkleTreeWriteOperations, PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
31
39
|
import { ContractClassLogFields } from '@aztec/stdlib/logs';
|
|
32
40
|
import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
|
|
@@ -35,7 +43,7 @@ import {
|
|
|
35
43
|
BlockRollupPublicInputs,
|
|
36
44
|
PrivateBaseRollupHints,
|
|
37
45
|
PublicBaseRollupHints,
|
|
38
|
-
|
|
46
|
+
PublicChonkVerifierPrivateInputs,
|
|
39
47
|
TreeSnapshotDiffHints,
|
|
40
48
|
} from '@aztec/stdlib/rollup';
|
|
41
49
|
import {
|
|
@@ -209,60 +217,53 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
|
|
|
209
217
|
},
|
|
210
218
|
);
|
|
211
219
|
|
|
212
|
-
export function
|
|
213
|
-
const
|
|
214
|
-
|
|
215
|
-
const binaryProof = new Proof(
|
|
216
|
-
|
|
217
|
-
|
|
220
|
+
export function getChonkProofFromTx(tx: Tx | ProcessedTx) {
|
|
221
|
+
const publicInputs = tx.data.publicInputs().toFields();
|
|
222
|
+
|
|
223
|
+
const binaryProof = new Proof(
|
|
224
|
+
Buffer.concat(tx.chonkProof.attachPublicInputs(publicInputs).fieldsWithPublicInputs.map(field => field.toBuffer())),
|
|
225
|
+
publicInputs.length,
|
|
226
|
+
);
|
|
227
|
+
return new RecursiveProof(tx.chonkProof.fields, binaryProof, true, CHONK_PROOF_LENGTH);
|
|
218
228
|
}
|
|
219
229
|
|
|
220
|
-
export function
|
|
230
|
+
export function getPublicChonkVerifierPrivateInputsFromTx(tx: Tx | ProcessedTx, proverId: Fr) {
|
|
221
231
|
const proofData = new ProofData(
|
|
222
232
|
tx.data.toPrivateToPublicKernelCircuitPublicInputs(),
|
|
223
|
-
|
|
233
|
+
getChonkProofFromTx(tx),
|
|
224
234
|
getVkData('HidingKernelToPublic'),
|
|
225
235
|
);
|
|
226
|
-
return new
|
|
236
|
+
return new PublicChonkVerifierPrivateInputs(proofData, proverId);
|
|
227
237
|
}
|
|
228
238
|
|
|
229
239
|
// Build "hints" as the private inputs for the checkpoint root rollup circuit.
|
|
230
240
|
// The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`.
|
|
231
|
-
// The `blobsHash` will be validated on L1 against the blob
|
|
232
|
-
export const buildBlobHints =
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
// - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
|
|
239
|
-
// - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
|
|
240
|
-
// exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
|
|
241
|
-
// We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
|
|
242
|
-
const blobCommitments = blobs.map(b => BLS12Point.decompress(b.commitment));
|
|
243
|
-
const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
|
|
244
|
-
return { blobCommitments, blobs, blobsHash };
|
|
245
|
-
},
|
|
246
|
-
);
|
|
241
|
+
// The `blobsHash` will be validated on L1 against the submitted blob data.
|
|
242
|
+
export const buildBlobHints = (blobFields: Fr[]) => {
|
|
243
|
+
const blobs = getBlobsPerL1Block(blobFields);
|
|
244
|
+
const blobCommitments = getBlobCommitmentsFromBlobs(blobs);
|
|
245
|
+
const blobsHash = computeBlobsHashFromBlobs(blobs);
|
|
246
|
+
return { blobCommitments, blobs, blobsHash };
|
|
247
|
+
};
|
|
247
248
|
|
|
248
|
-
// Build the data required to prove the txs in an epoch. Currently only used in tests.
|
|
249
|
+
// Build the data required to prove the txs in an epoch. Currently only used in tests. It assumes 1 block per checkpoint.
|
|
249
250
|
export const buildBlobDataFromTxs = async (txsPerCheckpoint: ProcessedTx[][]) => {
|
|
250
|
-
const blobFields = txsPerCheckpoint.map(txs =>
|
|
251
|
+
const blobFields = txsPerCheckpoint.map(txs => getCheckpointBlobFields([txs.map(tx => tx.txEffect)]));
|
|
251
252
|
const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
|
|
252
253
|
return { blobFieldsLengths: blobFields.map(fields => fields.length), finalBlobChallenges };
|
|
253
254
|
};
|
|
254
255
|
|
|
255
256
|
export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint: Fr[][]) => {
|
|
256
|
-
const blobs =
|
|
257
|
-
return await BatchedBlob.precomputeBatchedBlobChallenges(blobs
|
|
257
|
+
const blobs = blobFieldsPerCheckpoint.map(blobFields => getBlobsPerL1Block(blobFields));
|
|
258
|
+
return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
|
|
258
259
|
};
|
|
259
260
|
|
|
260
261
|
export const accumulateBlobs = runInSpan(
|
|
261
262
|
'BlockBuilderHelpers',
|
|
262
263
|
'accumulateBlobs',
|
|
263
264
|
async (_span: Span, blobFields: Fr[], startBlobAccumulator: BatchedBlobAccumulator) => {
|
|
264
|
-
const blobs =
|
|
265
|
-
const endBlobAccumulator = startBlobAccumulator.accumulateBlobs(blobs);
|
|
265
|
+
const blobs = getBlobsPerL1Block(blobFields);
|
|
266
|
+
const endBlobAccumulator = await startBlobAccumulator.accumulateBlobs(blobs);
|
|
266
267
|
return endBlobAccumulator;
|
|
267
268
|
},
|
|
268
269
|
);
|
|
@@ -326,16 +327,18 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
|
|
|
326
327
|
const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeCompressedUnbalancedMerkleTreeRoot(txOutHashes));
|
|
327
328
|
|
|
328
329
|
const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
|
|
329
|
-
const
|
|
330
|
-
|
|
330
|
+
const blockBlobFields = body.toBlobFields();
|
|
331
|
+
// TODO(#17027): This only works when there's one block per checkpoint.
|
|
332
|
+
const blobFields = [new Fr(blockBlobFields.length + 1)].concat(blockBlobFields);
|
|
333
|
+
const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
|
|
331
334
|
|
|
332
335
|
const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
|
|
333
336
|
|
|
334
337
|
const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
|
|
335
338
|
const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
|
|
336
339
|
|
|
337
|
-
const endSpongeBlob = startSpongeBlob?.clone() ?? SpongeBlob.init(blobFields.length);
|
|
338
|
-
await endSpongeBlob.absorb(
|
|
340
|
+
const endSpongeBlob = startSpongeBlob?.clone() ?? (await SpongeBlob.init(blobFields.length));
|
|
341
|
+
await endSpongeBlob.absorb(blockBlobFields);
|
|
339
342
|
const spongeBlobHash = await endSpongeBlob.squeeze();
|
|
340
343
|
|
|
341
344
|
const header = new L2BlockHeader(
|
|
@@ -397,18 +400,6 @@ export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr
|
|
|
397
400
|
return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())));
|
|
398
401
|
}
|
|
399
402
|
|
|
400
|
-
export function getBlobsHashFromBlobs(inputs: Blob[]): Fr {
|
|
401
|
-
return sha256ToField(inputs.map(b => b.getEthVersionedBlobHash()));
|
|
402
|
-
}
|
|
403
|
-
|
|
404
|
-
// Note: tested against the constant values in block_root/empty_block_root_rollup_inputs.nr, set by block_building_helpers.test.ts.
|
|
405
|
-
// Having this separate fn hopefully makes it clear how we treat empty blocks and their blobs, and won't break if we decide to change how
|
|
406
|
-
// getBlobsPerBlock() works on empty input.
|
|
407
|
-
export async function getEmptyBlockBlobsHash(): Promise<Fr> {
|
|
408
|
-
const blobHash = (await Blob.getBlobsPerBlock([])).map(b => b.getEthVersionedBlobHash());
|
|
409
|
-
return sha256ToField(blobHash);
|
|
410
|
-
}
|
|
411
|
-
|
|
412
403
|
export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
|
|
413
404
|
const { size } = await db.getTreeInfo(treeId);
|
|
414
405
|
const path = await db.getSiblingPath(treeId, size - 1n);
|
|
@@ -9,7 +9,6 @@ import {
|
|
|
9
9
|
import { Fr } from '@aztec/foundation/fields';
|
|
10
10
|
import { type Tuple, assertLength } from '@aztec/foundation/serialize';
|
|
11
11
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
12
|
-
import { getBlockBlobFields } from '@aztec/stdlib/block';
|
|
13
12
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
14
13
|
import { type ParityPublicInputs, ParityRootPrivateInputs } from '@aztec/stdlib/parity';
|
|
15
14
|
import type { RollupHonkProofData } from '@aztec/stdlib/proofs';
|
|
@@ -239,8 +238,8 @@ export class BlockProvingState {
|
|
|
239
238
|
return this.endSpongeBlob;
|
|
240
239
|
}
|
|
241
240
|
|
|
242
|
-
public
|
|
243
|
-
return
|
|
241
|
+
public getTxEffects() {
|
|
242
|
+
return this.txs.map(t => t.processedTx.txEffect);
|
|
244
243
|
}
|
|
245
244
|
|
|
246
245
|
public getParentLocation(location: TreeNodeLocation) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BatchedBlobAccumulator,
|
|
1
|
+
import { BatchedBlobAccumulator, type FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
|
|
2
2
|
import {
|
|
3
3
|
type ARCHIVE_HEIGHT,
|
|
4
4
|
BLOBS_PER_BLOCK,
|
|
@@ -11,6 +11,7 @@ import { padArrayEnd } from '@aztec/foundation/collection';
|
|
|
11
11
|
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
12
12
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
13
13
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
14
|
+
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
14
15
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
15
16
|
import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
|
|
16
17
|
import {
|
|
@@ -41,6 +42,7 @@ export class CheckpointProvingState {
|
|
|
41
42
|
private blocks: (BlockProvingState | undefined)[] = [];
|
|
42
43
|
private startBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
43
44
|
private endBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
45
|
+
private blobFields: Fr[] | undefined;
|
|
44
46
|
private error: string | undefined;
|
|
45
47
|
public readonly firstBlockNumber: number;
|
|
46
48
|
|
|
@@ -76,13 +78,13 @@ export class CheckpointProvingState {
|
|
|
76
78
|
return this.parentEpoch.epochNumber;
|
|
77
79
|
}
|
|
78
80
|
|
|
79
|
-
public startNewBlock(
|
|
81
|
+
public async startNewBlock(
|
|
80
82
|
blockNumber: number,
|
|
81
83
|
timestamp: UInt64,
|
|
82
84
|
totalNumTxs: number,
|
|
83
85
|
lastArchiveTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
84
86
|
lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
|
|
85
|
-
): BlockProvingState {
|
|
87
|
+
): Promise<BlockProvingState> {
|
|
86
88
|
const index = blockNumber - this.firstBlockNumber;
|
|
87
89
|
if (index >= this.totalNumBlocks) {
|
|
88
90
|
throw new Error(`Unable to start a new block at index ${index}. Expected at most ${this.totalNumBlocks} blocks.`);
|
|
@@ -97,7 +99,7 @@ export class CheckpointProvingState {
|
|
|
97
99
|
index === 0 ? this.lastL1ToL2MessageSubtreeRootSiblingPath : this.newL1ToL2MessageSubtreeRootSiblingPath;
|
|
98
100
|
|
|
99
101
|
const startSpongeBlob =
|
|
100
|
-
index === 0 ? SpongeBlob.init(this.totalNumBlobFields) : this.blocks[index - 1]?.getEndSpongeBlob();
|
|
102
|
+
index === 0 ? await SpongeBlob.init(this.totalNumBlobFields) : this.blocks[index - 1]?.getEndSpongeBlob();
|
|
101
103
|
if (!startSpongeBlob) {
|
|
102
104
|
throw new Error(
|
|
103
105
|
'Cannot start a new block before the trees have progressed from the tx effects in the previous block.',
|
|
@@ -194,8 +196,8 @@ export class CheckpointProvingState {
|
|
|
194
196
|
return;
|
|
195
197
|
}
|
|
196
198
|
|
|
197
|
-
|
|
198
|
-
this.endBlobAccumulator = await accumulateBlobs(blobFields, startBlobAccumulator);
|
|
199
|
+
this.blobFields = getCheckpointBlobFields(this.blocks.map(b => b!.getTxEffects()));
|
|
200
|
+
this.endBlobAccumulator = await accumulateBlobs(this.blobFields, startBlobAccumulator);
|
|
199
201
|
this.startBlobAccumulator = startBlobAccumulator;
|
|
200
202
|
|
|
201
203
|
this.onBlobAccumulatorSet(this);
|
|
@@ -224,7 +226,7 @@ export class CheckpointProvingState {
|
|
|
224
226
|
return this.totalNumBlocks === 1 ? 'rollup-checkpoint-root-single-block' : 'rollup-checkpoint-root';
|
|
225
227
|
}
|
|
226
228
|
|
|
227
|
-
public
|
|
229
|
+
public getCheckpointRootRollupInputs() {
|
|
228
230
|
const proofs = this.#getChildProofsForRoot();
|
|
229
231
|
const nonEmptyProofs = proofs.filter(p => !!p);
|
|
230
232
|
if (proofs.length !== nonEmptyProofs.length) {
|
|
@@ -234,13 +236,15 @@ export class CheckpointProvingState {
|
|
|
234
236
|
throw new Error('Start blob accumulator is not set.');
|
|
235
237
|
}
|
|
236
238
|
|
|
237
|
-
|
|
238
|
-
const
|
|
239
|
+
// `blobFields` must've been set if `startBlobAccumulator` is set (in `accumulateBlobs`).
|
|
240
|
+
const blobFields = this.blobFields!;
|
|
241
|
+
|
|
242
|
+
const { blobCommitments, blobsHash } = buildBlobHints(blobFields);
|
|
239
243
|
|
|
240
244
|
const hints = CheckpointRootRollupHints.from({
|
|
241
245
|
previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint,
|
|
242
246
|
previousArchiveSiblingPath: this.lastArchiveSiblingPath,
|
|
243
|
-
startBlobAccumulator:
|
|
247
|
+
startBlobAccumulator: this.startBlobAccumulator.toBlobAccumulator(),
|
|
244
248
|
finalBlobChallenges: this.finalBlobBatchingChallenges,
|
|
245
249
|
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
|
|
246
250
|
blobCommitments: padArrayEnd(blobCommitments, BLS12Point.ZERO, BLOBS_PER_BLOCK),
|
|
@@ -15,7 +15,7 @@ import {
|
|
|
15
15
|
CheckpointMergeRollupPrivateInputs,
|
|
16
16
|
CheckpointPaddingRollupPrivateInputs,
|
|
17
17
|
CheckpointRollupPublicInputs,
|
|
18
|
-
|
|
18
|
+
PublicChonkVerifierPublicInputs,
|
|
19
19
|
RootRollupPrivateInputs,
|
|
20
20
|
type RootRollupPublicInputs,
|
|
21
21
|
} from '@aztec/stdlib/rollup';
|
|
@@ -57,10 +57,12 @@ export class EpochProvingState {
|
|
|
57
57
|
private finalBatchedBlob: BatchedBlob | undefined;
|
|
58
58
|
private provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED;
|
|
59
59
|
|
|
60
|
-
// Map from tx hash to
|
|
61
|
-
public readonly
|
|
60
|
+
// Map from tx hash to chonk verifier proof promise. Used when kickstarting chonk verifier proofs before tx processing.
|
|
61
|
+
public readonly cachedChonkVerifierProofs = new Map<
|
|
62
62
|
string,
|
|
63
|
-
Promise<
|
|
63
|
+
Promise<
|
|
64
|
+
PublicInputsAndRecursiveProof<PublicChonkVerifierPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
65
|
+
>
|
|
64
66
|
>();
|
|
65
67
|
|
|
66
68
|
constructor(
|
|
@@ -236,7 +238,7 @@ export class EpochProvingState {
|
|
|
236
238
|
if (!this.endBlobAccumulator) {
|
|
237
239
|
throw new Error('End blob accumulator not ready.');
|
|
238
240
|
}
|
|
239
|
-
this.finalBatchedBlob = await this.endBlobAccumulator.finalize();
|
|
241
|
+
this.finalBatchedBlob = await this.endBlobAccumulator.finalize(true /* verifyProof */);
|
|
240
242
|
}
|
|
241
243
|
|
|
242
244
|
public getParentLocation(location: TreeNodeLocation) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BatchedBlob,
|
|
1
|
+
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
|
|
2
2
|
import {
|
|
3
3
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
4
4
|
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
@@ -34,8 +34,8 @@ import {
|
|
|
34
34
|
CheckpointConstantData,
|
|
35
35
|
CheckpointRootSingleBlockRollupPrivateInputs,
|
|
36
36
|
PrivateTxBaseRollupPrivateInputs,
|
|
37
|
-
|
|
38
|
-
|
|
37
|
+
PublicChonkVerifierPrivateInputs,
|
|
38
|
+
PublicChonkVerifierPublicInputs,
|
|
39
39
|
RootRollupPublicInputs,
|
|
40
40
|
} from '@aztec/stdlib/rollup';
|
|
41
41
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
@@ -57,7 +57,7 @@ import {
|
|
|
57
57
|
buildBlockHeaderFromTxs,
|
|
58
58
|
buildHeaderFromCircuitOutputs,
|
|
59
59
|
getLastSiblingPath,
|
|
60
|
-
|
|
60
|
+
getPublicChonkVerifierPrivateInputsFromTx,
|
|
61
61
|
getRootTreeSiblingPath,
|
|
62
62
|
getSubtreeSiblingPath,
|
|
63
63
|
getTreeSnapshot,
|
|
@@ -230,7 +230,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
230
230
|
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
231
231
|
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
232
232
|
|
|
233
|
-
const blockProvingState = checkpointProvingState.startNewBlock(
|
|
233
|
+
const blockProvingState = await checkpointProvingState.startNewBlock(
|
|
234
234
|
blockNumber,
|
|
235
235
|
timestamp,
|
|
236
236
|
totalNumTxs,
|
|
@@ -327,7 +327,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
327
327
|
const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
|
|
328
328
|
const txIndex = provingState.addNewTx(txProvingState);
|
|
329
329
|
if (txProvingState.requireAvmProof) {
|
|
330
|
-
this.
|
|
330
|
+
this.getOrEnqueueChonkVerifier(provingState, txIndex);
|
|
331
331
|
logger.debug(`Enqueueing public VM for tx ${txIndex}`);
|
|
332
332
|
this.enqueueVM(provingState, txIndex);
|
|
333
333
|
} else {
|
|
@@ -350,27 +350,30 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
350
350
|
}
|
|
351
351
|
|
|
352
352
|
/**
|
|
353
|
-
* Kickstarts
|
|
354
|
-
* Note that if the
|
|
353
|
+
* Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
|
|
354
|
+
* Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
|
|
355
355
|
*/
|
|
356
|
-
@trackSpan('ProvingOrchestrator.
|
|
357
|
-
public
|
|
356
|
+
@trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
|
|
357
|
+
public startChonkVerifierCircuits(txs: Tx[]) {
|
|
358
358
|
if (!this.provingState?.verifyState()) {
|
|
359
|
-
throw new Error(`Empty epoch proving state. call startNewEpoch before starting
|
|
359
|
+
throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
|
|
360
360
|
}
|
|
361
361
|
const publicTxs = txs.filter(tx => tx.data.forPublic);
|
|
362
362
|
for (const tx of publicTxs) {
|
|
363
363
|
const txHash = tx.getTxHash().toString();
|
|
364
|
-
const privateInputs =
|
|
364
|
+
const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
|
|
365
365
|
const tubeProof =
|
|
366
366
|
promiseWithResolvers<
|
|
367
|
-
PublicInputsAndRecursiveProof<
|
|
367
|
+
PublicInputsAndRecursiveProof<
|
|
368
|
+
PublicChonkVerifierPublicInputs,
|
|
369
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
370
|
+
>
|
|
368
371
|
>();
|
|
369
|
-
logger.debug(`Starting
|
|
370
|
-
this.
|
|
372
|
+
logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
|
|
373
|
+
this.doEnqueueChonkVerifier(txHash, privateInputs, proof => {
|
|
371
374
|
tubeProof.resolve(proof);
|
|
372
375
|
});
|
|
373
|
-
this.provingState.
|
|
376
|
+
this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
|
|
374
377
|
}
|
|
375
378
|
return Promise.resolve();
|
|
376
379
|
}
|
|
@@ -643,7 +646,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
643
646
|
db: MerkleTreeWriteOperations,
|
|
644
647
|
): Promise<[BaseRollupHints, TreeSnapshots]> {
|
|
645
648
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
646
|
-
// These will be overwritten later once we have proven the
|
|
649
|
+
// These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
|
|
647
650
|
const [ms, hints] = await elapsed(
|
|
648
651
|
insertSideEffectsAndBuildBaseRollupHints(
|
|
649
652
|
tx,
|
|
@@ -720,11 +723,11 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
720
723
|
);
|
|
721
724
|
}
|
|
722
725
|
|
|
723
|
-
// Enqueues the public
|
|
726
|
+
// Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
|
|
724
727
|
// Once completed, will enqueue the the public tx base rollup.
|
|
725
|
-
private
|
|
728
|
+
private getOrEnqueueChonkVerifier(provingState: BlockProvingState, txIndex: number) {
|
|
726
729
|
if (!provingState.verifyState()) {
|
|
727
|
-
logger.debug('Not running
|
|
730
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
728
731
|
return;
|
|
729
732
|
}
|
|
730
733
|
|
|
@@ -732,34 +735,40 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
732
735
|
const txHash = txProvingState.processedTx.hash.toString();
|
|
733
736
|
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
|
|
734
737
|
const handleResult = (
|
|
735
|
-
result: PublicInputsAndRecursiveProof<
|
|
738
|
+
result: PublicInputsAndRecursiveProof<
|
|
739
|
+
PublicChonkVerifierPublicInputs,
|
|
740
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
741
|
+
>,
|
|
736
742
|
) => {
|
|
737
|
-
logger.debug(`Got
|
|
738
|
-
txProvingState.
|
|
739
|
-
this.provingState?.
|
|
743
|
+
logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, { txHash });
|
|
744
|
+
txProvingState.setPublicChonkVerifierProof(result);
|
|
745
|
+
this.provingState?.cachedChonkVerifierProofs.delete(txHash);
|
|
740
746
|
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
741
747
|
};
|
|
742
748
|
|
|
743
|
-
if (this.provingState?.
|
|
744
|
-
logger.debug(`
|
|
745
|
-
void this.provingState!.
|
|
749
|
+
if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
|
|
750
|
+
logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, { txHash });
|
|
751
|
+
void this.provingState!.cachedChonkVerifierProofs.get(txHash)!.then(handleResult);
|
|
746
752
|
return;
|
|
747
753
|
}
|
|
748
754
|
|
|
749
|
-
logger.debug(`Enqueuing
|
|
750
|
-
this.
|
|
755
|
+
logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
|
|
756
|
+
this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
|
|
751
757
|
}
|
|
752
758
|
|
|
753
|
-
private
|
|
759
|
+
private doEnqueueChonkVerifier(
|
|
754
760
|
txHash: string,
|
|
755
|
-
inputs:
|
|
761
|
+
inputs: PublicChonkVerifierPrivateInputs,
|
|
756
762
|
handler: (
|
|
757
|
-
result: PublicInputsAndRecursiveProof<
|
|
763
|
+
result: PublicInputsAndRecursiveProof<
|
|
764
|
+
PublicChonkVerifierPublicInputs,
|
|
765
|
+
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
766
|
+
>,
|
|
758
767
|
) => void,
|
|
759
768
|
provingState: EpochProvingState | BlockProvingState = this.provingState!,
|
|
760
769
|
) {
|
|
761
770
|
if (!provingState.verifyState()) {
|
|
762
|
-
logger.debug('Not running
|
|
771
|
+
logger.debug('Not running chonk verifier circuit, state invalid');
|
|
763
772
|
return;
|
|
764
773
|
}
|
|
765
774
|
|
|
@@ -767,12 +776,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
767
776
|
provingState,
|
|
768
777
|
wrapCallbackInSpan(
|
|
769
778
|
this.tracer,
|
|
770
|
-
'ProvingOrchestrator.prover.
|
|
779
|
+
'ProvingOrchestrator.prover.getPublicChonkVerifierProof',
|
|
771
780
|
{
|
|
772
781
|
[Attributes.TX_HASH]: txHash,
|
|
773
|
-
[Attributes.PROTOCOL_CIRCUIT_NAME]: '
|
|
782
|
+
[Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public' satisfies CircuitName,
|
|
774
783
|
},
|
|
775
|
-
signal => this.prover.
|
|
784
|
+
signal => this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber),
|
|
776
785
|
),
|
|
777
786
|
handler,
|
|
778
787
|
);
|
|
@@ -988,6 +997,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
988
997
|
|
|
989
998
|
logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
|
|
990
999
|
|
|
1000
|
+
const inputs = provingState.getCheckpointRootRollupInputs();
|
|
1001
|
+
|
|
991
1002
|
this.deferredProving(
|
|
992
1003
|
provingState,
|
|
993
1004
|
wrapCallbackInSpan(
|
|
@@ -996,8 +1007,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
996
1007
|
{
|
|
997
1008
|
[Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
|
|
998
1009
|
},
|
|
999
|
-
|
|
1000
|
-
const inputs = await provingState.getCheckpointRootRollupInputs();
|
|
1010
|
+
signal => {
|
|
1001
1011
|
if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
|
|
1002
1012
|
return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
|
|
1003
1013
|
} else {
|
|
@@ -1006,9 +1016,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1006
1016
|
},
|
|
1007
1017
|
),
|
|
1008
1018
|
result => {
|
|
1009
|
-
const computedEndBlobAccumulatorState =
|
|
1010
|
-
provingState.getEndBlobAccumulator()!,
|
|
1011
|
-
);
|
|
1019
|
+
const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator()!.toBlobAccumulator();
|
|
1012
1020
|
const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
|
|
1013
1021
|
if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
|
|
1014
1022
|
logger.error(
|
|
@@ -1256,7 +1264,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1256
1264
|
return;
|
|
1257
1265
|
}
|
|
1258
1266
|
|
|
1259
|
-
// We must have completed all proving (
|
|
1267
|
+
// We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
|
|
1260
1268
|
logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
|
|
1261
1269
|
|
|
1262
1270
|
this.enqueueBaseRollup(provingState, txIndex);
|