@aztec/prover-client 3.0.0-nightly.20251026 → 3.0.0-nightly.20251031

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dest/block-factory/light.js +1 -1
  2. package/dest/config.js +1 -1
  3. package/dest/mocks/fixtures.js +1 -1
  4. package/dest/mocks/test_context.d.ts.map +1 -1
  5. package/dest/mocks/test_context.js +8 -9
  6. package/dest/orchestrator/block-building-helpers.d.ts +9 -11
  7. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  8. package/dest/orchestrator/block-building-helpers.js +35 -45
  9. package/dest/orchestrator/block-proving-state.d.ts +1 -1
  10. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  11. package/dest/orchestrator/block-proving-state.js +2 -3
  12. package/dest/orchestrator/checkpoint-proving-state.d.ts +3 -2
  13. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
  14. package/dest/orchestrator/checkpoint-proving-state.js +12 -9
  15. package/dest/orchestrator/epoch-proving-state.d.ts +2 -2
  16. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  17. package/dest/orchestrator/epoch-proving-state.js +4 -4
  18. package/dest/orchestrator/orchestrator.d.ts +5 -5
  19. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  20. package/dest/orchestrator/orchestrator.js +34 -35
  21. package/dest/orchestrator/tx-proving-state.d.ts +4 -4
  22. package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
  23. package/dest/orchestrator/tx-proving-state.js +12 -12
  24. package/dest/prover-client/server-epoch-prover.d.ts +1 -1
  25. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  26. package/dest/prover-client/server-epoch-prover.js +2 -2
  27. package/dest/proving_broker/broker_prover_facade.d.ts +2 -2
  28. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  29. package/dest/proving_broker/broker_prover_facade.js +2 -2
  30. package/dest/proving_broker/proving_broker.js +2 -2
  31. package/dest/proving_broker/proving_job_controller.js +2 -2
  32. package/dest/test/mock_prover.d.ts +2 -2
  33. package/dest/test/mock_prover.d.ts.map +1 -1
  34. package/dest/test/mock_prover.js +3 -3
  35. package/package.json +15 -15
  36. package/src/block-factory/light.ts +1 -1
  37. package/src/config.ts +1 -1
  38. package/src/mocks/fixtures.ts +1 -1
  39. package/src/mocks/test_context.ts +8 -9
  40. package/src/orchestrator/block-building-helpers.ts +43 -52
  41. package/src/orchestrator/block-proving-state.ts +2 -3
  42. package/src/orchestrator/checkpoint-proving-state.ts +14 -10
  43. package/src/orchestrator/epoch-proving-state.ts +7 -5
  44. package/src/orchestrator/orchestrator.ts +50 -42
  45. package/src/orchestrator/tx-proving-state.ts +20 -16
  46. package/src/prover-client/server-epoch-prover.ts +2 -2
  47. package/src/proving_broker/broker_prover_facade.ts +9 -7
  48. package/src/proving_broker/proving_broker.ts +2 -2
  49. package/src/proving_broker/proving_job_controller.ts +2 -2
  50. package/src/test/mock_prover.ts +9 -7
@@ -1,4 +1,5 @@
1
1
  import type { BBProverConfig } from '@aztec/bb-prover';
2
+ import { TestCircuitProver } from '@aztec/bb-prover';
2
3
  import { SpongeBlob } from '@aztec/blob-lib';
3
4
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
4
5
  import { padArrayEnd, times, timesParallel } from '@aztec/foundation/collection';
@@ -13,7 +14,8 @@ import { SimpleContractDataSource } from '@aztec/simulator/public/fixtures';
13
14
  import { PublicProcessorFactory } from '@aztec/simulator/server';
14
15
  import { PublicDataWrite } from '@aztec/stdlib/avm';
15
16
  import { AztecAddress } from '@aztec/stdlib/aztec-address';
16
- import { EthAddress, getBlockBlobFields } from '@aztec/stdlib/block';
17
+ import { EthAddress } from '@aztec/stdlib/block';
18
+ import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
17
19
  import type { ServerCircuitProver } from '@aztec/stdlib/interfaces/server';
18
20
  import type { CheckpointConstantData } from '@aztec/stdlib/rollup';
19
21
  import { makeBloatedProcessedTx } from '@aztec/stdlib/testing';
@@ -24,9 +26,6 @@ import { NativeWorldStateService } from '@aztec/world-state/native';
24
26
 
25
27
  import { promises as fs } from 'fs';
26
28
 
27
- // TODO(#12613) This means of sharing test code is not ideal.
28
- // eslint-disable-next-line import/no-relative-packages
29
- import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js';
30
29
  import { buildBlockWithCleanDB } from '../block-factory/light.js';
31
30
  import { getTreeSnapshot } from '../orchestrator/block-building-helpers.js';
32
31
  import type { BlockProvingState } from '../orchestrator/block-proving-state.js';
@@ -284,9 +283,8 @@ export class TestContext {
284
283
  );
285
284
  });
286
285
 
287
- const blockBlobFields = blockTxs.map(txs => getBlockBlobFields(txs.map(tx => tx.txEffect)));
288
- const totalNumBlobFields = blockBlobFields.reduce((acc, curr) => acc + curr.length, 0);
289
- const spongeBlobState = SpongeBlob.init(totalNumBlobFields);
286
+ const blobFields = getCheckpointBlobFields(blockTxs.map(txs => txs.map(tx => tx.txEffect)));
287
+ const spongeBlobState = await SpongeBlob.init(blobFields.length);
290
288
 
291
289
  const blocks: { header: BlockHeader; txs: ProcessedTx[] }[] = [];
292
290
  for (let i = 0; i < numBlocks; i++) {
@@ -306,12 +304,13 @@ export class TestContext {
306
304
 
307
305
  await this.worldState.handleL2BlockAndMessages(block, blockMsgs, isFirstBlock);
308
306
 
309
- await spongeBlobState.absorb(blockBlobFields[i]);
307
+ const blockBlobFields = block.body.toBlobFields();
308
+ await spongeBlobState.absorb(blockBlobFields);
310
309
 
311
310
  blocks.push({ header, txs });
312
311
  }
313
312
 
314
- return { blocks, l1ToL2Messages, blobFields: blockBlobFields.flat() };
313
+ return { blocks, l1ToL2Messages, blobFields };
315
314
  }
316
315
 
317
316
  public async processPublicFunctions(
@@ -1,7 +1,14 @@
1
- import { BatchedBlob, BatchedBlobAccumulator, Blob, SpongeBlob } from '@aztec/blob-lib';
1
+ import {
2
+ BatchedBlob,
3
+ BatchedBlobAccumulator,
4
+ SpongeBlob,
5
+ computeBlobsHashFromBlobs,
6
+ getBlobCommitmentsFromBlobs,
7
+ getBlobsPerL1Block,
8
+ } from '@aztec/blob-lib';
2
9
  import {
3
10
  ARCHIVE_HEIGHT,
4
- CIVC_PROOF_LENGTH,
11
+ CHONK_PROOF_LENGTH,
5
12
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
6
13
  MAX_NOTE_HASHES_PER_TX,
7
14
  MAX_NULLIFIERS_PER_TX,
@@ -15,8 +22,8 @@ import {
15
22
  } from '@aztec/constants';
16
23
  import { makeTuple } from '@aztec/foundation/array';
17
24
  import { padArrayEnd } from '@aztec/foundation/collection';
18
- import { sha256ToField, sha256Trunc } from '@aztec/foundation/crypto';
19
- import { BLS12Point, Fr } from '@aztec/foundation/fields';
25
+ import { sha256Trunc } from '@aztec/foundation/crypto';
26
+ import { Fr } from '@aztec/foundation/fields';
20
27
  import { type Bufferable, type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
21
28
  import {
22
29
  MembershipWitness,
@@ -27,6 +34,7 @@ import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
27
34
  import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
28
35
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
29
36
  import { Body, L2BlockHeader, getBlockBlobFields } from '@aztec/stdlib/block';
37
+ import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
30
38
  import type { MerkleTreeWriteOperations, PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
31
39
  import { ContractClassLogFields } from '@aztec/stdlib/logs';
32
40
  import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
@@ -35,7 +43,7 @@ import {
35
43
  BlockRollupPublicInputs,
36
44
  PrivateBaseRollupHints,
37
45
  PublicBaseRollupHints,
38
- PublicTubePrivateInputs,
46
+ PublicChonkVerifierPrivateInputs,
39
47
  TreeSnapshotDiffHints,
40
48
  } from '@aztec/stdlib/rollup';
41
49
  import {
@@ -209,60 +217,53 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
209
217
  },
210
218
  );
211
219
 
212
- export function getCivcProofFromTx(tx: Tx | ProcessedTx) {
213
- const proofFields = tx.clientIvcProof.proof;
214
- const numPublicInputs = proofFields.length - CIVC_PROOF_LENGTH;
215
- const binaryProof = new Proof(Buffer.concat(proofFields.map(field => field.toBuffer())), numPublicInputs);
216
- const proofFieldsWithoutPublicInputs = proofFields.slice(numPublicInputs);
217
- return new RecursiveProof(proofFieldsWithoutPublicInputs, binaryProof, true, CIVC_PROOF_LENGTH);
220
+ export function getChonkProofFromTx(tx: Tx | ProcessedTx) {
221
+ const publicInputs = tx.data.publicInputs().toFields();
222
+
223
+ const binaryProof = new Proof(
224
+ Buffer.concat(tx.chonkProof.attachPublicInputs(publicInputs).fieldsWithPublicInputs.map(field => field.toBuffer())),
225
+ publicInputs.length,
226
+ );
227
+ return new RecursiveProof(tx.chonkProof.fields, binaryProof, true, CHONK_PROOF_LENGTH);
218
228
  }
219
229
 
220
- export function getPublicTubePrivateInputsFromTx(tx: Tx | ProcessedTx, proverId: Fr) {
230
+ export function getPublicChonkVerifierPrivateInputsFromTx(tx: Tx | ProcessedTx, proverId: Fr) {
221
231
  const proofData = new ProofData(
222
232
  tx.data.toPrivateToPublicKernelCircuitPublicInputs(),
223
- getCivcProofFromTx(tx),
233
+ getChonkProofFromTx(tx),
224
234
  getVkData('HidingKernelToPublic'),
225
235
  );
226
- return new PublicTubePrivateInputs(proofData, proverId);
236
+ return new PublicChonkVerifierPrivateInputs(proofData, proverId);
227
237
  }
228
238
 
229
239
  // Build "hints" as the private inputs for the checkpoint root rollup circuit.
230
240
  // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`.
231
- // The `blobsHash` will be validated on L1 against the blob fields.
232
- export const buildBlobHints = runInSpan(
233
- 'BlockBuilderHelpers',
234
- 'buildBlobHints',
235
- async (_span: Span, blobFields: Fr[]) => {
236
- const blobs = await Blob.getBlobsPerBlock(blobFields);
237
- // TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
238
- // - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
239
- // - blobCommitmentsHash := sha256( ...sha256(sha256(C_0), C_1) ... C_n) = iteratively calculated hash of all blob commitments in an epoch with n+1 blobs (see calculateBlobCommitmentsHash()),
240
- // exists so we can validate injected commitments to the rollup circuits correspond to the correct real blobs.
241
- // We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
242
- const blobCommitments = blobs.map(b => BLS12Point.decompress(b.commitment));
243
- const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
244
- return { blobCommitments, blobs, blobsHash };
245
- },
246
- );
241
+ // The `blobsHash` will be validated on L1 against the submitted blob data.
242
+ export const buildBlobHints = (blobFields: Fr[]) => {
243
+ const blobs = getBlobsPerL1Block(blobFields);
244
+ const blobCommitments = getBlobCommitmentsFromBlobs(blobs);
245
+ const blobsHash = computeBlobsHashFromBlobs(blobs);
246
+ return { blobCommitments, blobs, blobsHash };
247
+ };
247
248
 
248
- // Build the data required to prove the txs in an epoch. Currently only used in tests.
249
+ // Build the data required to prove the txs in an epoch. Currently only used in tests. It assumes 1 block per checkpoint.
249
250
  export const buildBlobDataFromTxs = async (txsPerCheckpoint: ProcessedTx[][]) => {
250
- const blobFields = txsPerCheckpoint.map(txs => getBlockBlobFields(txs.map(tx => tx.txEffect)));
251
+ const blobFields = txsPerCheckpoint.map(txs => getCheckpointBlobFields([txs.map(tx => tx.txEffect)]));
251
252
  const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
252
253
  return { blobFieldsLengths: blobFields.map(fields => fields.length), finalBlobChallenges };
253
254
  };
254
255
 
255
256
  export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint: Fr[][]) => {
256
- const blobs = await Promise.all(blobFieldsPerCheckpoint.map(blobFields => Blob.getBlobsPerBlock(blobFields)));
257
- return await BatchedBlob.precomputeBatchedBlobChallenges(blobs.flat());
257
+ const blobs = blobFieldsPerCheckpoint.map(blobFields => getBlobsPerL1Block(blobFields));
258
+ return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
258
259
  };
259
260
 
260
261
  export const accumulateBlobs = runInSpan(
261
262
  'BlockBuilderHelpers',
262
263
  'accumulateBlobs',
263
264
  async (_span: Span, blobFields: Fr[], startBlobAccumulator: BatchedBlobAccumulator) => {
264
- const blobs = await Blob.getBlobsPerBlock(blobFields);
265
- const endBlobAccumulator = startBlobAccumulator.accumulateBlobs(blobs);
265
+ const blobs = getBlobsPerL1Block(blobFields);
266
+ const endBlobAccumulator = await startBlobAccumulator.accumulateBlobs(blobs);
266
267
  return endBlobAccumulator;
267
268
  },
268
269
  );
@@ -326,16 +327,18 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
326
327
  const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeCompressedUnbalancedMerkleTreeRoot(txOutHashes));
327
328
 
328
329
  const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
329
- const blobFields = body.toBlobFields();
330
- const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobsPerBlock(blobFields));
330
+ const blockBlobFields = body.toBlobFields();
331
+ // TODO(#17027): This only works when there's one block per checkpoint.
332
+ const blobFields = [new Fr(blockBlobFields.length + 1)].concat(blockBlobFields);
333
+ const blobsHash = computeBlobsHashFromBlobs(getBlobsPerL1Block(blobFields));
331
334
 
332
335
  const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
333
336
 
334
337
  const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
335
338
  const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
336
339
 
337
- const endSpongeBlob = startSpongeBlob?.clone() ?? SpongeBlob.init(blobFields.length);
338
- await endSpongeBlob.absorb(blobFields);
340
+ const endSpongeBlob = startSpongeBlob?.clone() ?? (await SpongeBlob.init(blobFields.length));
341
+ await endSpongeBlob.absorb(blockBlobFields);
339
342
  const spongeBlobHash = await endSpongeBlob.squeeze();
340
343
 
341
344
  const header = new L2BlockHeader(
@@ -397,18 +400,6 @@ export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr
397
400
  return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())));
398
401
  }
399
402
 
400
- export function getBlobsHashFromBlobs(inputs: Blob[]): Fr {
401
- return sha256ToField(inputs.map(b => b.getEthVersionedBlobHash()));
402
- }
403
-
404
- // Note: tested against the constant values in block_root/empty_block_root_rollup_inputs.nr, set by block_building_helpers.test.ts.
405
- // Having this separate fn hopefully makes it clear how we treat empty blocks and their blobs, and won't break if we decide to change how
406
- // getBlobsPerBlock() works on empty input.
407
- export async function getEmptyBlockBlobsHash(): Promise<Fr> {
408
- const blobHash = (await Blob.getBlobsPerBlock([])).map(b => b.getEthVersionedBlobHash());
409
- return sha256ToField(blobHash);
410
- }
411
-
412
403
  export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
413
404
  const { size } = await db.getTreeInfo(treeId);
414
405
  const path = await db.getSiblingPath(treeId, size - 1n);
@@ -9,7 +9,6 @@ import {
9
9
  import { Fr } from '@aztec/foundation/fields';
10
10
  import { type Tuple, assertLength } from '@aztec/foundation/serialize';
11
11
  import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
12
- import { getBlockBlobFields } from '@aztec/stdlib/block';
13
12
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
14
13
  import { type ParityPublicInputs, ParityRootPrivateInputs } from '@aztec/stdlib/parity';
15
14
  import type { RollupHonkProofData } from '@aztec/stdlib/proofs';
@@ -239,8 +238,8 @@ export class BlockProvingState {
239
238
  return this.endSpongeBlob;
240
239
  }
241
240
 
242
- public getBlockBlobFields() {
243
- return getBlockBlobFields(this.txs.map(t => t.processedTx.txEffect));
241
+ public getTxEffects() {
242
+ return this.txs.map(t => t.processedTx.txEffect);
244
243
  }
245
244
 
246
245
  public getParentLocation(location: TreeNodeLocation) {
@@ -1,4 +1,4 @@
1
- import { BatchedBlobAccumulator, BlobAccumulator, type FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
1
+ import { BatchedBlobAccumulator, type FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
2
2
  import {
3
3
  type ARCHIVE_HEIGHT,
4
4
  BLOBS_PER_BLOCK,
@@ -11,6 +11,7 @@ import { padArrayEnd } from '@aztec/foundation/collection';
11
11
  import { BLS12Point, Fr } from '@aztec/foundation/fields';
12
12
  import type { Tuple } from '@aztec/foundation/serialize';
13
13
  import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
14
+ import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
14
15
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
15
16
  import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
16
17
  import {
@@ -41,6 +42,7 @@ export class CheckpointProvingState {
41
42
  private blocks: (BlockProvingState | undefined)[] = [];
42
43
  private startBlobAccumulator: BatchedBlobAccumulator | undefined;
43
44
  private endBlobAccumulator: BatchedBlobAccumulator | undefined;
45
+ private blobFields: Fr[] | undefined;
44
46
  private error: string | undefined;
45
47
  public readonly firstBlockNumber: number;
46
48
 
@@ -76,13 +78,13 @@ export class CheckpointProvingState {
76
78
  return this.parentEpoch.epochNumber;
77
79
  }
78
80
 
79
- public startNewBlock(
81
+ public async startNewBlock(
80
82
  blockNumber: number,
81
83
  timestamp: UInt64,
82
84
  totalNumTxs: number,
83
85
  lastArchiveTreeSnapshot: AppendOnlyTreeSnapshot,
84
86
  lastArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
85
- ): BlockProvingState {
87
+ ): Promise<BlockProvingState> {
86
88
  const index = blockNumber - this.firstBlockNumber;
87
89
  if (index >= this.totalNumBlocks) {
88
90
  throw new Error(`Unable to start a new block at index ${index}. Expected at most ${this.totalNumBlocks} blocks.`);
@@ -97,7 +99,7 @@ export class CheckpointProvingState {
97
99
  index === 0 ? this.lastL1ToL2MessageSubtreeRootSiblingPath : this.newL1ToL2MessageSubtreeRootSiblingPath;
98
100
 
99
101
  const startSpongeBlob =
100
- index === 0 ? SpongeBlob.init(this.totalNumBlobFields) : this.blocks[index - 1]?.getEndSpongeBlob();
102
+ index === 0 ? await SpongeBlob.init(this.totalNumBlobFields) : this.blocks[index - 1]?.getEndSpongeBlob();
101
103
  if (!startSpongeBlob) {
102
104
  throw new Error(
103
105
  'Cannot start a new block before the trees have progressed from the tx effects in the previous block.',
@@ -194,8 +196,8 @@ export class CheckpointProvingState {
194
196
  return;
195
197
  }
196
198
 
197
- const blobFields = this.blocks.flatMap(b => b!.getBlockBlobFields());
198
- this.endBlobAccumulator = await accumulateBlobs(blobFields, startBlobAccumulator);
199
+ this.blobFields = getCheckpointBlobFields(this.blocks.map(b => b!.getTxEffects()));
200
+ this.endBlobAccumulator = await accumulateBlobs(this.blobFields, startBlobAccumulator);
199
201
  this.startBlobAccumulator = startBlobAccumulator;
200
202
 
201
203
  this.onBlobAccumulatorSet(this);
@@ -224,7 +226,7 @@ export class CheckpointProvingState {
224
226
  return this.totalNumBlocks === 1 ? 'rollup-checkpoint-root-single-block' : 'rollup-checkpoint-root';
225
227
  }
226
228
 
227
- public async getCheckpointRootRollupInputs() {
229
+ public getCheckpointRootRollupInputs() {
228
230
  const proofs = this.#getChildProofsForRoot();
229
231
  const nonEmptyProofs = proofs.filter(p => !!p);
230
232
  if (proofs.length !== nonEmptyProofs.length) {
@@ -234,13 +236,15 @@ export class CheckpointProvingState {
234
236
  throw new Error('Start blob accumulator is not set.');
235
237
  }
236
238
 
237
- const blobFields = this.blocks.flatMap(b => b!.getBlockBlobFields());
238
- const { blobCommitments, blobsHash } = await buildBlobHints(blobFields);
239
+ // `blobFields` must've been set if `startBlobAccumulator` is set (in `accumulateBlobs`).
240
+ const blobFields = this.blobFields!;
241
+
242
+ const { blobCommitments, blobsHash } = buildBlobHints(blobFields);
239
243
 
240
244
  const hints = CheckpointRootRollupHints.from({
241
245
  previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint,
242
246
  previousArchiveSiblingPath: this.lastArchiveSiblingPath,
243
- startBlobAccumulator: BlobAccumulator.fromBatchedBlobAccumulator(this.startBlobAccumulator),
247
+ startBlobAccumulator: this.startBlobAccumulator.toBlobAccumulator(),
244
248
  finalBlobChallenges: this.finalBlobBatchingChallenges,
245
249
  blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
246
250
  blobCommitments: padArrayEnd(blobCommitments, BLS12Point.ZERO, BLOBS_PER_BLOCK),
@@ -15,7 +15,7 @@ import {
15
15
  CheckpointMergeRollupPrivateInputs,
16
16
  CheckpointPaddingRollupPrivateInputs,
17
17
  CheckpointRollupPublicInputs,
18
- PublicTubePublicInputs,
18
+ PublicChonkVerifierPublicInputs,
19
19
  RootRollupPrivateInputs,
20
20
  type RootRollupPublicInputs,
21
21
  } from '@aztec/stdlib/rollup';
@@ -57,10 +57,12 @@ export class EpochProvingState {
57
57
  private finalBatchedBlob: BatchedBlob | undefined;
58
58
  private provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_CREATED;
59
59
 
60
- // Map from tx hash to tube proof promise. Used when kickstarting tube proofs before tx processing.
61
- public readonly cachedTubeProofs = new Map<
60
+ // Map from tx hash to chonk verifier proof promise. Used when kickstarting chonk verifier proofs before tx processing.
61
+ public readonly cachedChonkVerifierProofs = new Map<
62
62
  string,
63
- Promise<PublicInputsAndRecursiveProof<PublicTubePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>>
63
+ Promise<
64
+ PublicInputsAndRecursiveProof<PublicChonkVerifierPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
65
+ >
64
66
  >();
65
67
 
66
68
  constructor(
@@ -236,7 +238,7 @@ export class EpochProvingState {
236
238
  if (!this.endBlobAccumulator) {
237
239
  throw new Error('End blob accumulator not ready.');
238
240
  }
239
- this.finalBatchedBlob = await this.endBlobAccumulator.finalize();
241
+ this.finalBatchedBlob = await this.endBlobAccumulator.finalize(true /* verifyProof */);
240
242
  }
241
243
 
242
244
  public getParentLocation(location: TreeNodeLocation) {
@@ -1,4 +1,4 @@
1
- import { BatchedBlob, BlobAccumulator, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
1
+ import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
2
2
  import {
3
3
  L1_TO_L2_MSG_SUBTREE_HEIGHT,
4
4
  L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
@@ -34,8 +34,8 @@ import {
34
34
  CheckpointConstantData,
35
35
  CheckpointRootSingleBlockRollupPrivateInputs,
36
36
  PrivateTxBaseRollupPrivateInputs,
37
- PublicTubePrivateInputs,
38
- PublicTubePublicInputs,
37
+ PublicChonkVerifierPrivateInputs,
38
+ PublicChonkVerifierPublicInputs,
39
39
  RootRollupPublicInputs,
40
40
  } from '@aztec/stdlib/rollup';
41
41
  import type { CircuitName } from '@aztec/stdlib/stats';
@@ -57,7 +57,7 @@ import {
57
57
  buildBlockHeaderFromTxs,
58
58
  buildHeaderFromCircuitOutputs,
59
59
  getLastSiblingPath,
60
- getPublicTubePrivateInputsFromTx,
60
+ getPublicChonkVerifierPrivateInputsFromTx,
61
61
  getRootTreeSiblingPath,
62
62
  getSubtreeSiblingPath,
63
63
  getTreeSnapshot,
@@ -230,7 +230,7 @@ export class ProvingOrchestrator implements EpochProver {
230
230
  const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
231
231
  const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
232
232
 
233
- const blockProvingState = checkpointProvingState.startNewBlock(
233
+ const blockProvingState = await checkpointProvingState.startNewBlock(
234
234
  blockNumber,
235
235
  timestamp,
236
236
  totalNumTxs,
@@ -327,7 +327,7 @@ export class ProvingOrchestrator implements EpochProver {
327
327
  const txProvingState = new TxProvingState(tx, hints, treeSnapshots, this.proverId.toField());
328
328
  const txIndex = provingState.addNewTx(txProvingState);
329
329
  if (txProvingState.requireAvmProof) {
330
- this.getOrEnqueueTube(provingState, txIndex);
330
+ this.getOrEnqueueChonkVerifier(provingState, txIndex);
331
331
  logger.debug(`Enqueueing public VM for tx ${txIndex}`);
332
332
  this.enqueueVM(provingState, txIndex);
333
333
  } else {
@@ -350,27 +350,30 @@ export class ProvingOrchestrator implements EpochProver {
350
350
  }
351
351
 
352
352
  /**
353
- * Kickstarts tube circuits for the specified txs. These will be used during epoch proving.
354
- * Note that if the tube circuits are not started this way, they will be started nontheless after processing.
353
+ * Kickstarts chonk verifier circuits for the specified txs. These will be used during epoch proving.
354
+ * Note that if the chonk verifier circuits are not started this way, they will be started nontheless after processing.
355
355
  */
356
- @trackSpan('ProvingOrchestrator.startTubeCircuits')
357
- public startTubeCircuits(txs: Tx[]) {
356
+ @trackSpan('ProvingOrchestrator.startChonkVerifierCircuits')
357
+ public startChonkVerifierCircuits(txs: Tx[]) {
358
358
  if (!this.provingState?.verifyState()) {
359
- throw new Error(`Empty epoch proving state. call startNewEpoch before starting tube circuits.`);
359
+ throw new Error(`Empty epoch proving state. call startNewEpoch before starting chonk verifier circuits.`);
360
360
  }
361
361
  const publicTxs = txs.filter(tx => tx.data.forPublic);
362
362
  for (const tx of publicTxs) {
363
363
  const txHash = tx.getTxHash().toString();
364
- const privateInputs = getPublicTubePrivateInputsFromTx(tx, this.proverId.toField());
364
+ const privateInputs = getPublicChonkVerifierPrivateInputsFromTx(tx, this.proverId.toField());
365
365
  const tubeProof =
366
366
  promiseWithResolvers<
367
- PublicInputsAndRecursiveProof<PublicTubePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
367
+ PublicInputsAndRecursiveProof<
368
+ PublicChonkVerifierPublicInputs,
369
+ typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
370
+ >
368
371
  >();
369
- logger.debug(`Starting tube circuit for tx ${txHash}`);
370
- this.doEnqueueTube(txHash, privateInputs, proof => {
372
+ logger.debug(`Starting chonk verifier circuit for tx ${txHash}`);
373
+ this.doEnqueueChonkVerifier(txHash, privateInputs, proof => {
371
374
  tubeProof.resolve(proof);
372
375
  });
373
- this.provingState.cachedTubeProofs.set(txHash, tubeProof.promise);
376
+ this.provingState.cachedChonkVerifierProofs.set(txHash, tubeProof.promise);
374
377
  }
375
378
  return Promise.resolve();
376
379
  }
@@ -643,7 +646,7 @@ export class ProvingOrchestrator implements EpochProver {
643
646
  db: MerkleTreeWriteOperations,
644
647
  ): Promise<[BaseRollupHints, TreeSnapshots]> {
645
648
  // We build the base rollup inputs using a mock proof and verification key.
646
- // These will be overwritten later once we have proven the tube circuit and any public kernels
649
+ // These will be overwritten later once we have proven the chonk verifier circuit and any public kernels
647
650
  const [ms, hints] = await elapsed(
648
651
  insertSideEffectsAndBuildBaseRollupHints(
649
652
  tx,
@@ -720,11 +723,11 @@ export class ProvingOrchestrator implements EpochProver {
720
723
  );
721
724
  }
722
725
 
723
- // Enqueues the public tube circuit for a given transaction index, or reuses the one already enqueued.
726
+ // Enqueues the public chonk verifier circuit for a given transaction index, or reuses the one already enqueued.
724
727
  // Once completed, will enqueue the the public tx base rollup.
725
- private getOrEnqueueTube(provingState: BlockProvingState, txIndex: number) {
728
+ private getOrEnqueueChonkVerifier(provingState: BlockProvingState, txIndex: number) {
726
729
  if (!provingState.verifyState()) {
727
- logger.debug('Not running tube circuit, state invalid');
730
+ logger.debug('Not running chonk verifier circuit, state invalid');
728
731
  return;
729
732
  }
730
733
 
@@ -732,34 +735,40 @@ export class ProvingOrchestrator implements EpochProver {
732
735
  const txHash = txProvingState.processedTx.hash.toString();
733
736
  NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH;
734
737
  const handleResult = (
735
- result: PublicInputsAndRecursiveProof<PublicTubePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>,
738
+ result: PublicInputsAndRecursiveProof<
739
+ PublicChonkVerifierPublicInputs,
740
+ typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
741
+ >,
736
742
  ) => {
737
- logger.debug(`Got tube proof for tx index: ${txIndex}`, { txHash });
738
- txProvingState.setPublicTubeProof(result);
739
- this.provingState?.cachedTubeProofs.delete(txHash);
743
+ logger.debug(`Got chonk verifier proof for tx index: ${txIndex}`, { txHash });
744
+ txProvingState.setPublicChonkVerifierProof(result);
745
+ this.provingState?.cachedChonkVerifierProofs.delete(txHash);
740
746
  this.checkAndEnqueueBaseRollup(provingState, txIndex);
741
747
  };
742
748
 
743
- if (this.provingState?.cachedTubeProofs.has(txHash)) {
744
- logger.debug(`Tube proof already enqueued for tx index: ${txIndex}`, { txHash });
745
- void this.provingState!.cachedTubeProofs.get(txHash)!.then(handleResult);
749
+ if (this.provingState?.cachedChonkVerifierProofs.has(txHash)) {
750
+ logger.debug(`Chonk verifier proof already enqueued for tx index: ${txIndex}`, { txHash });
751
+ void this.provingState!.cachedChonkVerifierProofs.get(txHash)!.then(handleResult);
746
752
  return;
747
753
  }
748
754
 
749
- logger.debug(`Enqueuing tube circuit for tx index: ${txIndex}`);
750
- this.doEnqueueTube(txHash, txProvingState.getPublicTubePrivateInputs(), handleResult);
755
+ logger.debug(`Enqueuing chonk verifier circuit for tx index: ${txIndex}`);
756
+ this.doEnqueueChonkVerifier(txHash, txProvingState.getPublicChonkVerifierPrivateInputs(), handleResult);
751
757
  }
752
758
 
753
- private doEnqueueTube(
759
+ private doEnqueueChonkVerifier(
754
760
  txHash: string,
755
- inputs: PublicTubePrivateInputs,
761
+ inputs: PublicChonkVerifierPrivateInputs,
756
762
  handler: (
757
- result: PublicInputsAndRecursiveProof<PublicTubePublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>,
763
+ result: PublicInputsAndRecursiveProof<
764
+ PublicChonkVerifierPublicInputs,
765
+ typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
766
+ >,
758
767
  ) => void,
759
768
  provingState: EpochProvingState | BlockProvingState = this.provingState!,
760
769
  ) {
761
770
  if (!provingState.verifyState()) {
762
- logger.debug('Not running tube circuit, state invalid');
771
+ logger.debug('Not running chonk verifier circuit, state invalid');
763
772
  return;
764
773
  }
765
774
 
@@ -767,12 +776,12 @@ export class ProvingOrchestrator implements EpochProver {
767
776
  provingState,
768
777
  wrapCallbackInSpan(
769
778
  this.tracer,
770
- 'ProvingOrchestrator.prover.getPublicTubeProof',
779
+ 'ProvingOrchestrator.prover.getPublicChonkVerifierProof',
771
780
  {
772
781
  [Attributes.TX_HASH]: txHash,
773
- [Attributes.PROTOCOL_CIRCUIT_NAME]: 'tube-public' satisfies CircuitName,
782
+ [Attributes.PROTOCOL_CIRCUIT_NAME]: 'chonk-verifier-public' satisfies CircuitName,
774
783
  },
775
- signal => this.prover.getPublicTubeProof(inputs, signal, provingState.epochNumber),
784
+ signal => this.prover.getPublicChonkVerifierProof(inputs, signal, provingState.epochNumber),
776
785
  ),
777
786
  handler,
778
787
  );
@@ -988,6 +997,8 @@ export class ProvingOrchestrator implements EpochProver {
988
997
 
989
998
  logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
990
999
 
1000
+ const inputs = provingState.getCheckpointRootRollupInputs();
1001
+
991
1002
  this.deferredProving(
992
1003
  provingState,
993
1004
  wrapCallbackInSpan(
@@ -996,8 +1007,7 @@ export class ProvingOrchestrator implements EpochProver {
996
1007
  {
997
1008
  [Attributes.PROTOCOL_CIRCUIT_NAME]: rollupType,
998
1009
  },
999
- async signal => {
1000
- const inputs = await provingState.getCheckpointRootRollupInputs();
1010
+ signal => {
1001
1011
  if (inputs instanceof CheckpointRootSingleBlockRollupPrivateInputs) {
1002
1012
  return this.prover.getCheckpointRootSingleBlockRollupProof(inputs, signal, provingState.epochNumber);
1003
1013
  } else {
@@ -1006,9 +1016,7 @@ export class ProvingOrchestrator implements EpochProver {
1006
1016
  },
1007
1017
  ),
1008
1018
  result => {
1009
- const computedEndBlobAccumulatorState = BlobAccumulator.fromBatchedBlobAccumulator(
1010
- provingState.getEndBlobAccumulator()!,
1011
- );
1019
+ const computedEndBlobAccumulatorState = provingState.getEndBlobAccumulator()!.toBlobAccumulator();
1012
1020
  const circuitEndBlobAccumulatorState = result.inputs.endBlobAccumulator;
1013
1021
  if (!circuitEndBlobAccumulatorState.equals(computedEndBlobAccumulatorState)) {
1014
1022
  logger.error(
@@ -1256,7 +1264,7 @@ export class ProvingOrchestrator implements EpochProver {
1256
1264
  return;
1257
1265
  }
1258
1266
 
1259
- // We must have completed all proving (tube proof and (if required) vm proof are generated), we now move to the base rollup.
1267
+ // We must have completed all proving (chonk verifier proof and (if required) vm proof are generated), we now move to the base rollup.
1260
1268
  logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`);
1261
1269
 
1262
1270
  this.enqueueBaseRollup(provingState, txIndex);