@aztec/prover-client 3.0.0-nightly.20250917 → 3.0.0-nightly.20250918

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dest/block-factory/light.d.ts +5 -3
  2. package/dest/block-factory/light.d.ts.map +1 -1
  3. package/dest/block-factory/light.js +16 -9
  4. package/dest/mocks/fixtures.d.ts +3 -1
  5. package/dest/mocks/fixtures.d.ts.map +1 -1
  6. package/dest/mocks/fixtures.js +19 -2
  7. package/dest/mocks/test_context.d.ts +30 -9
  8. package/dest/mocks/test_context.d.ts.map +1 -1
  9. package/dest/mocks/test_context.js +68 -15
  10. package/dest/orchestrator/block-building-helpers.d.ts +16 -14
  11. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  12. package/dest/orchestrator/block-building-helpers.js +69 -66
  13. package/dest/orchestrator/block-proving-state.d.ts +53 -46
  14. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  15. package/dest/orchestrator/block-proving-state.js +209 -172
  16. package/dest/orchestrator/checkpoint-proving-state.d.ts +62 -0
  17. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -0
  18. package/dest/orchestrator/checkpoint-proving-state.js +208 -0
  19. package/dest/orchestrator/epoch-proving-state.d.ts +32 -25
  20. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  21. package/dest/orchestrator/epoch-proving-state.js +132 -81
  22. package/dest/orchestrator/orchestrator.d.ts +25 -24
  23. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  24. package/dest/orchestrator/orchestrator.js +318 -190
  25. package/dest/prover-client/server-epoch-prover.d.ts +8 -7
  26. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  27. package/dest/prover-client/server-epoch-prover.js +7 -7
  28. package/dest/proving_broker/broker_prover_facade.d.ts +12 -7
  29. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  30. package/dest/proving_broker/broker_prover_facade.js +30 -15
  31. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  32. package/dest/proving_broker/proving_broker.js +18 -7
  33. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  34. package/dest/proving_broker/proving_job_controller.js +26 -6
  35. package/dest/test/mock_prover.d.ts +12 -7
  36. package/dest/test/mock_prover.d.ts.map +1 -1
  37. package/dest/test/mock_prover.js +25 -10
  38. package/package.json +15 -15
  39. package/src/block-factory/light.ts +33 -9
  40. package/src/mocks/fixtures.ts +25 -7
  41. package/src/mocks/test_context.ts +113 -21
  42. package/src/orchestrator/block-building-helpers.ts +107 -93
  43. package/src/orchestrator/block-proving-state.ts +225 -212
  44. package/src/orchestrator/checkpoint-proving-state.ts +294 -0
  45. package/src/orchestrator/epoch-proving-state.ts +169 -121
  46. package/src/orchestrator/orchestrator.ts +466 -247
  47. package/src/prover-client/server-epoch-prover.ts +30 -16
  48. package/src/proving_broker/broker_prover_facade.ts +145 -71
  49. package/src/proving_broker/proving_broker.ts +24 -6
  50. package/src/proving_broker/proving_job_controller.ts +26 -6
  51. package/src/test/mock_prover.ts +105 -28
@@ -5,10 +5,13 @@ import { EthAddress } from '@aztec/foundation/eth-address';
5
5
  import { Fr } from '@aztec/foundation/fields';
6
6
  import type { Logger } from '@aztec/foundation/log';
7
7
  import { fileURLToPath } from '@aztec/foundation/url';
8
+ import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
9
+ import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
8
10
  import { type CircuitSimulator, NativeACVMSimulator, WASMSimulatorWithBlobs } from '@aztec/simulator/server';
9
11
  import { AztecAddress } from '@aztec/stdlib/aztec-address';
10
12
  import { GasFees } from '@aztec/stdlib/gas';
11
13
  import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
14
+ import { CheckpointConstantData } from '@aztec/stdlib/rollup';
12
15
  import { MerkleTreeId } from '@aztec/stdlib/trees';
13
16
  import type { ProcessedTx } from '@aztec/stdlib/tx';
14
17
  import { GlobalVariables } from '@aztec/stdlib/tx';
@@ -103,15 +106,30 @@ export const updateExpectedTreesFromTxs = async (db: MerkleTreeWriteOperations,
103
106
  }
104
107
  };
105
108
 
106
- export const makeGlobals = (blockNumber: number) => {
109
+ export const makeGlobals = (blockNumber: number, slotNumber = blockNumber) => {
110
+ const checkpointConstants = makeCheckpointConstants(slotNumber);
107
111
  return new GlobalVariables(
108
- Fr.ZERO,
109
- Fr.ZERO,
112
+ checkpointConstants.chainId,
113
+ checkpointConstants.version,
110
114
  blockNumber /** block number */,
111
- new Fr(blockNumber) /** slot number */,
115
+ new Fr(slotNumber) /** slot number */,
112
116
  BigInt(blockNumber) /** block number as pseudo-timestamp for testing */,
113
- EthAddress.ZERO,
114
- AztecAddress.ZERO,
115
- GasFees.empty(),
117
+ checkpointConstants.coinbase,
118
+ checkpointConstants.feeRecipient,
119
+ checkpointConstants.gasFees,
116
120
  );
117
121
  };
122
+
123
+ export const makeCheckpointConstants = (slotNumber: number) => {
124
+ return CheckpointConstantData.from({
125
+ chainId: Fr.ZERO,
126
+ version: Fr.ZERO,
127
+ vkTreeRoot: getVKTreeRoot(),
128
+ protocolContractTreeRoot,
129
+ proverId: Fr.ZERO,
130
+ slotNumber: new Fr(slotNumber),
131
+ coinbase: EthAddress.ZERO,
132
+ feeRecipient: AztecAddress.ZERO,
133
+ gasFees: GasFees.empty(),
134
+ });
135
+ };
@@ -1,4 +1,5 @@
1
1
  import type { BBProverConfig } from '@aztec/bb-prover';
2
+ import { SpongeBlob } from '@aztec/blob-lib';
2
3
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
4
  import { padArrayEnd, times, timesParallel } from '@aztec/foundation/collection';
4
5
  import { Fr } from '@aztec/foundation/fields';
@@ -11,10 +12,11 @@ import { SimpleContractDataSource } from '@aztec/simulator/public/fixtures';
11
12
  import { PublicProcessorFactory } from '@aztec/simulator/server';
12
13
  import { PublicDataWrite } from '@aztec/stdlib/avm';
13
14
  import { AztecAddress } from '@aztec/stdlib/aztec-address';
14
- import { EthAddress, type L2Block } from '@aztec/stdlib/block';
15
+ import { EthAddress, getBlockBlobFields } from '@aztec/stdlib/block';
15
16
  import type { ServerCircuitProver } from '@aztec/stdlib/interfaces/server';
17
+ import type { CheckpointConstantData } from '@aztec/stdlib/rollup';
16
18
  import { makeBloatedProcessedTx } from '@aztec/stdlib/testing';
17
- import { type AppendOnlyTreeSnapshot, MerkleTreeId, PublicDataTreeLeaf } from '@aztec/stdlib/trees';
19
+ import { MerkleTreeId, PublicDataTreeLeaf } from '@aztec/stdlib/trees';
18
20
  import { type BlockHeader, type GlobalVariables, type ProcessedTx, TreeSnapshots, type Tx } from '@aztec/stdlib/tx';
19
21
  import type { MerkleTreeAdminDatabase } from '@aztec/world-state';
20
22
  import { NativeWorldStateService } from '@aztec/world-state/native';
@@ -26,10 +28,17 @@ import { promises as fs } from 'fs';
26
28
  import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js';
27
29
  import { buildBlockWithCleanDB } from '../block-factory/light.js';
28
30
  import { getTreeSnapshot } from '../orchestrator/block-building-helpers.js';
31
+ import type { BlockProvingState } from '../orchestrator/block-proving-state.js';
29
32
  import { ProvingOrchestrator } from '../orchestrator/index.js';
30
33
  import { BrokerCircuitProverFacade } from '../proving_broker/broker_prover_facade.js';
31
34
  import { TestBroker } from '../test/mock_prover.js';
32
- import { getEnvironmentConfig, getSimulator, makeGlobals, updateExpectedTreesFromTxs } from './fixtures.js';
35
+ import {
36
+ getEnvironmentConfig,
37
+ getSimulator,
38
+ makeCheckpointConstants,
39
+ makeGlobals,
40
+ updateExpectedTreesFromTxs,
41
+ } from './fixtures.js';
33
42
 
34
43
  export class TestContext {
35
44
  private headers: Map<number, BlockHeader> = new Map();
@@ -37,6 +46,7 @@ export class TestContext {
37
46
 
38
47
  constructor(
39
48
  public worldState: MerkleTreeAdminDatabase,
49
+ public firstCheckpointNumber: Fr,
40
50
  public globalVariables: GlobalVariables,
41
51
  public prover: ServerCircuitProver,
42
52
  public broker: TestBroker,
@@ -55,20 +65,27 @@ export class TestContext {
55
65
  return this.orchestrator;
56
66
  }
57
67
 
68
+ public getCheckpointConstants(checkpointIndex = 0): CheckpointConstantData {
69
+ return makeCheckpointConstants(this.firstCheckpointNumber.toNumber() + checkpointIndex);
70
+ }
71
+
58
72
  static async new(
59
73
  logger: Logger,
60
74
  {
61
75
  proverCount = 4,
62
76
  createProver = async (bbConfig: BBProverConfig) => new TestCircuitProver(await getSimulator(bbConfig, logger)),
77
+ slotNumber = 1,
63
78
  blockNumber = 1,
64
79
  }: {
65
80
  proverCount?: number;
66
81
  createProver?: (bbConfig: BBProverConfig) => Promise<ServerCircuitProver>;
82
+ slotNumber?: number;
67
83
  blockNumber?: number;
68
84
  } = {},
69
85
  ) {
70
86
  const directoriesToCleanup: string[] = [];
71
- const globalVariables = makeGlobals(blockNumber);
87
+ const firstCheckpointNumber = new Fr(slotNumber);
88
+ const globalVariables = makeGlobals(blockNumber, slotNumber);
72
89
 
73
90
  const feePayer = AztecAddress.fromNumber(42222);
74
91
  const initialFeePayerBalance = new Fr(10n ** 20n);
@@ -112,6 +129,7 @@ export class TestContext {
112
129
 
113
130
  return new this(
114
131
  ws,
132
+ firstCheckpointNumber,
115
133
  globalVariables,
116
134
  localProver,
117
135
  broker,
@@ -131,14 +149,10 @@ export class TestContext {
131
149
 
132
150
  public getBlockHeader(blockNumber: 0): BlockHeader;
133
151
  public getBlockHeader(blockNumber: number): BlockHeader | undefined;
134
- public getBlockHeader(blockNumber = 0) {
152
+ public getBlockHeader(blockNumber = 0): BlockHeader | undefined {
135
153
  return blockNumber === 0 ? this.worldState.getCommitted().getInitialHeader() : this.headers.get(blockNumber);
136
154
  }
137
155
 
138
- public setBlockHeader(header: BlockHeader, blockNumber: number) {
139
- this.headers.set(blockNumber, header);
140
- }
141
-
142
156
  public getPreviousBlockHeader(currentBlockNumber = this.blockNumber): BlockHeader {
143
157
  return this.getBlockHeader(currentBlockNumber - 1)!;
144
158
  }
@@ -155,9 +169,9 @@ export class TestContext {
155
169
  }
156
170
  }
157
171
 
158
- private async makeProcessedTx(opts?: Parameters<typeof makeBloatedProcessedTx>[0]): Promise<ProcessedTx> {
172
+ async makeProcessedTx(opts?: Parameters<typeof makeBloatedProcessedTx>[0]): Promise<ProcessedTx> {
159
173
  const blockNum = (opts?.globalVariables ?? this.globalVariables).blockNumber;
160
- const header = this.getBlockHeader(blockNum - 1);
174
+ const header = opts?.header ?? this.getBlockHeader(blockNum - 1);
161
175
  const tx = await makeBloatedProcessedTx({
162
176
  header,
163
177
  vkTreeRoot: getVKTreeRoot(),
@@ -177,11 +191,20 @@ export class TestContext {
177
191
  /** Creates a block with the given number of txs and adds it to world-state */
178
192
  public async makePendingBlock(
179
193
  numTxs: number,
180
- numL1ToL2Messages: number = 0,
181
- blockNumOrGlobals: GlobalVariables | number = this.globalVariables,
182
- makeProcessedTxOpts: (index: number) => Partial<Parameters<typeof makeBloatedProcessedTx>[0]> = () => ({}),
194
+ {
195
+ checkpointIndex = 0,
196
+ numL1ToL2Messages = 0,
197
+ blockNumber = this.blockNumber,
198
+ makeProcessedTxOpts = () => ({}),
199
+ }: {
200
+ checkpointIndex?: number;
201
+ numL1ToL2Messages?: number;
202
+ blockNumber?: number;
203
+ makeProcessedTxOpts?: (index: number) => Partial<Parameters<typeof makeBloatedProcessedTx>[0]>;
204
+ } = {},
183
205
  ) {
184
- const globalVariables = typeof blockNumOrGlobals === 'number' ? makeGlobals(blockNumOrGlobals) : blockNumOrGlobals;
206
+ const slotNumber = this.firstCheckpointNumber.toNumber() + checkpointIndex;
207
+ const globalVariables = makeGlobals(blockNumber, slotNumber);
185
208
  const blockNum = globalVariables.blockNumber;
186
209
  const db = await this.worldState.fork();
187
210
  const l1ToL2Messages = times(numL1ToL2Messages, i => new Fr(blockNum * 100 + i));
@@ -202,11 +225,83 @@ export class TestContext {
202
225
  await this.setTreeRoots(txs);
203
226
 
204
227
  const block = await buildBlockWithCleanDB(txs, globalVariables, l1ToL2Messages, db);
205
- this.headers.set(blockNum, block.header);
228
+ this.headers.set(blockNum, block.getBlockHeader());
206
229
  await this.worldState.handleL2BlockAndMessages(block, l1ToL2Messages);
207
230
  return { block, txs, l1ToL2Messages };
208
231
  }
209
232
 
233
+ public async makePendingBlocksInCheckpoint(
234
+ numBlocks: number,
235
+ {
236
+ checkpointIndex = 0,
237
+ numTxsPerBlock = 1,
238
+ numL1ToL2Messages = 0,
239
+ firstBlockNumber = this.blockNumber + checkpointIndex * numBlocks,
240
+ makeProcessedTxOpts = () => ({}),
241
+ }: {
242
+ checkpointIndex?: number;
243
+ numTxsPerBlock?: number | number[];
244
+ numL1ToL2Messages?: number;
245
+ firstBlockNumber?: number;
246
+ makeProcessedTxOpts?: (index: number) => Partial<Parameters<typeof makeBloatedProcessedTx>[0]>;
247
+ } = {},
248
+ ) {
249
+ const slotNumber = this.firstCheckpointNumber.toNumber() + checkpointIndex;
250
+ const l1ToL2Messages = times(numL1ToL2Messages, i => new Fr(slotNumber * 100 + i));
251
+ const merkleTrees = await this.worldState.fork();
252
+ await merkleTrees.appendLeaves(
253
+ MerkleTreeId.L1_TO_L2_MESSAGE_TREE,
254
+ padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
255
+ );
256
+ const newL1ToL2Snapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, merkleTrees);
257
+
258
+ const blockGlobalVariables = times(numBlocks, i => makeGlobals(firstBlockNumber + i, slotNumber));
259
+ let totalTxs = 0;
260
+ const blockTxs = await timesParallel(numBlocks, blockIndex => {
261
+ const txIndexOffset = totalTxs;
262
+ const numTxs = typeof numTxsPerBlock === 'number' ? numTxsPerBlock : numTxsPerBlock[blockIndex];
263
+ totalTxs += numTxs;
264
+ return timesParallel(numTxs, txIndex =>
265
+ this.makeProcessedTx({
266
+ seed: (txIndexOffset + txIndex + 1) * 321 + (checkpointIndex + 1) * 123456,
267
+ globalVariables: blockGlobalVariables[blockIndex],
268
+ header: this.getBlockHeader(firstBlockNumber - 1),
269
+ newL1ToL2Snapshot,
270
+ ...makeProcessedTxOpts(txIndexOffset + txIndex),
271
+ }),
272
+ );
273
+ });
274
+
275
+ const blockBlobFields = blockTxs.map(txs => getBlockBlobFields(txs.map(tx => tx.txEffect)));
276
+ const totalNumBlobFields = blockBlobFields.reduce((acc, curr) => acc + curr.length, 0);
277
+ const spongeBlobState = SpongeBlob.init(totalNumBlobFields);
278
+
279
+ const blocks: { header: BlockHeader; txs: ProcessedTx[] }[] = [];
280
+ for (let i = 0; i < numBlocks; i++) {
281
+ const isFirstBlock = i === 0;
282
+ const blockNumber = firstBlockNumber + i;
283
+ const globalVariables = blockGlobalVariables[i];
284
+ const txs = blockTxs[i];
285
+
286
+ await this.setTreeRoots(txs);
287
+
288
+ const fork = await this.worldState.fork();
289
+ const blockMsgs = isFirstBlock ? l1ToL2Messages : [];
290
+ const block = await buildBlockWithCleanDB(txs, globalVariables, blockMsgs, fork, spongeBlobState, isFirstBlock);
291
+
292
+ const header = block.getBlockHeader();
293
+ this.headers.set(blockNumber, header);
294
+
295
+ await this.worldState.handleL2BlockAndMessages(block, blockMsgs, isFirstBlock);
296
+
297
+ await spongeBlobState.absorb(blockBlobFields[i]);
298
+
299
+ blocks.push({ header, txs });
300
+ }
301
+
302
+ return { blocks, l1ToL2Messages, blobFields: blockBlobFields.flat() };
303
+ }
304
+
210
305
  public async processPublicFunctions(
211
306
  txs: Tx[],
212
307
  {
@@ -265,12 +360,9 @@ class TestProvingOrchestrator extends ProvingOrchestrator {
265
360
 
266
361
  // Disable this check by default, since it requires seeding world state with the block being built
267
362
  // This is only enabled in some tests with multiple blocks that populate the pending chain via makePendingBlock
268
- protected override verifyBuiltBlockAgainstSyncedState(
269
- l2Block: L2Block,
270
- newArchive: AppendOnlyTreeSnapshot,
271
- ): Promise<void> {
363
+ protected override verifyBuiltBlockAgainstSyncedState(provingState: BlockProvingState): Promise<void> {
272
364
  if (this.isVerifyBuiltBlockAgainstSyncedStateEnabled) {
273
- return super.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
365
+ return super.verifyBuiltBlockAgainstSyncedState(provingState);
274
366
  }
275
367
  return Promise.resolve();
276
368
  }
@@ -1,4 +1,4 @@
1
- import { BatchedBlobAccumulator, Blob, type SpongeBlob } from '@aztec/blob-lib';
1
+ import { BatchedBlob, BatchedBlobAccumulator, Blob, SpongeBlob } from '@aztec/blob-lib';
2
2
  import {
3
3
  ARCHIVE_HEIGHT,
4
4
  CIVC_PROOF_LENGTH,
@@ -20,19 +20,16 @@ import { BLS12Point, Fr } from '@aztec/foundation/fields';
20
20
  import { type Bufferable, type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
21
21
  import { MembershipWitness, MerkleTreeCalculator, computeUnbalancedMerkleTreeRoot } from '@aztec/foundation/trees';
22
22
  import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
23
- import { getVKIndex, getVKSiblingPath, getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree';
24
- import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
23
+ import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
25
24
  import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
26
25
  import { PublicDataHint } from '@aztec/stdlib/avm';
27
- import { Body } from '@aztec/stdlib/block';
26
+ import { Body, L2BlockHeader, getBlockBlobFields } from '@aztec/stdlib/block';
28
27
  import type { MerkleTreeWriteOperations, PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
29
28
  import { ContractClassLogFields } from '@aztec/stdlib/logs';
30
- import type { ParityPublicInputs } from '@aztec/stdlib/parity';
31
29
  import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
32
30
  import {
33
- type BaseOrMergeRollupPublicInputs,
34
31
  BlockConstantData,
35
- type BlockRootOrBlockMergePublicInputs,
32
+ BlockRollupPublicInputs,
36
33
  PrivateBaseRollupHints,
37
34
  PrivateBaseStateDiffHints,
38
35
  PublicBaseRollupHints,
@@ -49,12 +46,11 @@ import {
49
46
  import {
50
47
  BlockHeader,
51
48
  ContentCommitment,
52
- type GlobalVariables,
49
+ GlobalVariables,
53
50
  PartialStateReference,
54
51
  type ProcessedTx,
55
52
  StateReference,
56
53
  Tx,
57
- TxEffect,
58
54
  } from '@aztec/stdlib/tx';
59
55
  import { VkData } from '@aztec/stdlib/vks';
60
56
  import { Attributes, type Span, runInSpan } from '@aztec/telemetry-client';
@@ -76,14 +72,14 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
76
72
  async (
77
73
  span: Span,
78
74
  tx: ProcessedTx,
79
- globalVariables: GlobalVariables,
75
+ lastArchive: AppendOnlyTreeSnapshot,
80
76
  newL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot,
81
- db: MerkleTreeWriteOperations,
82
77
  startSpongeBlob: SpongeBlob,
78
+ proverId: Fr,
79
+ db: MerkleTreeWriteOperations,
83
80
  ) => {
84
81
  span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
85
82
  // Get trees info before any changes hit
86
- const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
87
83
  const start = new PartialStateReference(
88
84
  await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
89
85
  await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
@@ -141,17 +137,13 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
141
137
  i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO,
142
138
  );
143
139
 
144
- // Append new data to startSpongeBlob
145
- const inputSpongeBlob = startSpongeBlob.clone();
146
- await startSpongeBlob.absorb(tx.txEffect.toBlobFields());
147
-
148
140
  const contractClassLogsFields = makeTuple(
149
141
  MAX_CONTRACT_CLASS_LOGS_PER_TX,
150
142
  i => tx.txEffect.contractClassLogs[i]?.fields || ContractClassLogFields.empty(),
151
143
  );
152
144
 
153
145
  if (tx.avmProvingRequest) {
154
- const blockHash = await tx.data.constants.historicalHeader.hash();
146
+ const blockHash = await tx.data.constants.anchorBlockHeader.hash();
155
147
  const archiveRootMembershipWitness = await getMembershipWitnessFor(
156
148
  blockHash,
157
149
  MerkleTreeId.ARCHIVE,
@@ -160,10 +152,11 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
160
152
  );
161
153
 
162
154
  return PublicBaseRollupHints.from({
163
- startSpongeBlob: inputSpongeBlob,
155
+ startSpongeBlob,
164
156
  lastArchive,
165
157
  archiveRootMembershipWitness,
166
158
  contractClassLogsFields,
159
+ proverId,
167
160
  });
168
161
  } else {
169
162
  if (
@@ -203,7 +196,7 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
203
196
  feeWriteSiblingPath,
204
197
  });
205
198
 
206
- const blockHash = await tx.data.constants.historicalHeader.hash();
199
+ const blockHash = await tx.data.constants.anchorBlockHeader.hash();
207
200
  const archiveRootMembershipWitness = await getMembershipWitnessFor(
208
201
  blockHash,
209
202
  MerkleTreeId.ARCHIVE,
@@ -213,15 +206,16 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
213
206
 
214
207
  const constants = BlockConstantData.from({
215
208
  lastArchive,
216
- newL1ToL2: newL1ToL2MessageTreeSnapshot,
217
- vkTreeRoot: getVKTreeRoot(),
218
- protocolContractTreeRoot,
219
- globalVariables,
209
+ l1ToL2TreeSnapshot: newL1ToL2MessageTreeSnapshot,
210
+ vkTreeRoot: tx.data.constants.vkTreeRoot,
211
+ protocolContractTreeRoot: tx.data.constants.protocolContractTreeRoot,
212
+ globalVariables: tx.globalVariables,
213
+ proverId,
220
214
  });
221
215
 
222
216
  return PrivateBaseRollupHints.from({
223
217
  start,
224
- startSpongeBlob: inputSpongeBlob,
218
+ startSpongeBlob,
225
219
  stateDiffHints,
226
220
  feePayerFeeJuiceBalanceReadHint,
227
221
  archiveRootMembershipWitness,
@@ -269,11 +263,13 @@ export function getPublicTubePrivateInputsFromTx(tx: Tx | ProcessedTx) {
269
263
  return new PublicTubePrivateInputs(proofData);
270
264
  }
271
265
 
266
+ // Build "hints" as the private inputs for the checkpoint root rollup circuit.
267
+ // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`.
268
+ // The `blobsHash` will be validated on L1 against the blob fields.
272
269
  export const buildBlobHints = runInSpan(
273
270
  'BlockBuilderHelpers',
274
271
  'buildBlobHints',
275
- async (_span: Span, txEffects: TxEffect[]) => {
276
- const blobFields = txEffects.flatMap(tx => tx.toBlobFields());
272
+ async (_span: Span, blobFields: Fr[]) => {
277
273
  const blobs = await Blob.getBlobsPerBlock(blobFields);
278
274
  // TODO(#13430): The blobsHash is confusingly similar to blobCommitmentsHash, calculated from below blobCommitments:
279
275
  // - blobsHash := sha256([blobhash_0, ..., blobhash_m]) = a hash of all blob hashes in a block with m+1 blobs inserted into the header, exists so a user can cross check blobs.
@@ -282,15 +278,26 @@ export const buildBlobHints = runInSpan(
282
278
  // We may be able to combine these values e.g. blobCommitmentsHash := sha256( ...sha256(sha256(blobshash_0), blobshash_1) ... blobshash_l) for an epoch with l+1 blocks.
283
279
  const blobCommitments = blobs.map(b => BLS12Point.decompress(b.commitment));
284
280
  const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
285
- return { blobFields, blobCommitments, blobs, blobsHash };
281
+ return { blobCommitments, blobs, blobsHash };
286
282
  },
287
283
  );
288
284
 
285
+ // Build the data required to prove the txs in an epoch. Currently only used in tests.
286
+ export const buildBlobDataFromTxs = async (txsPerCheckpoint: ProcessedTx[][]) => {
287
+ const blobFields = txsPerCheckpoint.map(txs => getBlockBlobFields(txs.map(tx => tx.txEffect)));
288
+ const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
289
+ return { blobFieldsLengths: blobFields.map(fields => fields.length), finalBlobChallenges };
290
+ };
291
+
292
+ export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint: Fr[][]) => {
293
+ const blobs = await Promise.all(blobFieldsPerCheckpoint.map(blobFields => Blob.getBlobsPerBlock(blobFields)));
294
+ return await BatchedBlob.precomputeBatchedBlobChallenges(blobs.flat());
295
+ };
296
+
289
297
  export const accumulateBlobs = runInSpan(
290
298
  'BlockBuilderHelpers',
291
299
  'accumulateBlobs',
292
- async (_span: Span, txs: ProcessedTx[], startBlobAccumulator: BatchedBlobAccumulator) => {
293
- const blobFields = txs.flatMap(tx => tx.txEffect.toBlobFields());
300
+ async (_span: Span, blobFields: Fr[], startBlobAccumulator: BatchedBlobAccumulator) => {
294
301
  const blobs = await Blob.getBlobsPerBlock(blobFields);
295
302
  const endBlobAccumulator = startBlobAccumulator.accumulateBlobs(blobs);
296
303
  return endBlobAccumulator;
@@ -300,36 +307,28 @@ export const accumulateBlobs = runInSpan(
300
307
  export const buildHeaderFromCircuitOutputs = runInSpan(
301
308
  'BlockBuilderHelpers',
302
309
  'buildHeaderFromCircuitOutputs',
303
- (
304
- _span,
305
- previousRollupData: BaseOrMergeRollupPublicInputs[],
306
- parityPublicInputs: ParityPublicInputs,
307
- rootRollupOutputs: BlockRootOrBlockMergePublicInputs,
308
- blobsHash: Fr,
309
- endState: StateReference,
310
- ) => {
311
- if (previousRollupData.length > 2) {
312
- throw new Error(`There can't be more than 2 previous rollups. Received ${previousRollupData.length}.`);
313
- }
314
-
315
- const outHash =
316
- previousRollupData.length === 0
317
- ? Fr.ZERO
318
- : previousRollupData.length === 1
319
- ? previousRollupData[0].outHash
320
- : sha256ToField([previousRollupData[0].outHash, previousRollupData[1].outHash]);
321
- const contentCommitment = new ContentCommitment(blobsHash, parityPublicInputs.shaRoot, outHash);
310
+ async (_span, blockRootRollupOutput: BlockRollupPublicInputs) => {
311
+ const constants = blockRootRollupOutput.constants;
312
+ const globalVariables = GlobalVariables.from({
313
+ chainId: constants.chainId,
314
+ version: constants.version,
315
+ blockNumber: blockRootRollupOutput.previousArchive.nextAvailableLeafIndex,
316
+ timestamp: blockRootRollupOutput.endTimestamp,
317
+ slotNumber: constants.slotNumber,
318
+ coinbase: constants.coinbase,
319
+ feeRecipient: constants.feeRecipient,
320
+ gasFees: constants.gasFees,
321
+ });
322
322
 
323
- const accumulatedFees = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedFees), Fr.ZERO);
324
- const accumulatedManaUsed = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedManaUsed), Fr.ZERO);
323
+ const spongeBlobHash = await blockRootRollupOutput.endSpongeBlob.clone().squeeze();
325
324
 
326
325
  return new BlockHeader(
327
- rootRollupOutputs.previousArchive,
328
- contentCommitment,
329
- endState,
330
- rootRollupOutputs.endGlobalVariables,
331
- accumulatedFees,
332
- accumulatedManaUsed,
326
+ blockRootRollupOutput.previousArchive,
327
+ blockRootRollupOutput.endState,
328
+ spongeBlobHash,
329
+ globalVariables,
330
+ blockRootRollupOutput.accumulatedFees,
331
+ blockRootRollupOutput.accumulatedManaUsed,
333
332
  );
334
333
  },
335
334
  );
@@ -343,6 +342,7 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
343
342
  globalVariables: GlobalVariables,
344
343
  l1ToL2Messages: Fr[],
345
344
  db: MerkleTreeReadOperations,
345
+ startSpongeBlob?: SpongeBlob,
346
346
  ) => {
347
347
  span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
348
348
  const stateReference = new StateReference(
@@ -363,19 +363,67 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
363
363
  const outHash = txOutHashes.length === 0 ? Fr.ZERO : new Fr(computeUnbalancedMerkleTreeRoot(txOutHashes));
364
364
 
365
365
  const parityShaRoot = await computeInHashFromL1ToL2Messages(l1ToL2Messages);
366
- const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobsPerBlock(body.toBlobFields()));
366
+ const blobFields = body.toBlobFields();
367
+ const blobsHash = getBlobsHashFromBlobs(await Blob.getBlobsPerBlock(blobFields));
367
368
 
368
369
  const contentCommitment = new ContentCommitment(blobsHash, parityShaRoot, outHash);
369
370
 
370
371
  const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
371
372
  const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
372
373
 
373
- const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed);
374
+ const endSpongeBlob = startSpongeBlob?.clone() ?? SpongeBlob.init(blobFields.length);
375
+ await endSpongeBlob.absorb(blobFields);
376
+ const spongeBlobHash = await endSpongeBlob.squeeze();
377
+
378
+ const header = new L2BlockHeader(
379
+ previousArchive,
380
+ contentCommitment,
381
+ stateReference,
382
+ globalVariables,
383
+ fees,
384
+ manaUsed,
385
+ spongeBlobHash,
386
+ );
374
387
 
375
388
  return { header, body };
376
389
  },
377
390
  );
378
391
 
392
+ export const buildBlockHeaderFromTxs = runInSpan(
393
+ 'BlockBuilderHelpers',
394
+ 'buildBlockHeaderFromTxs',
395
+ async (
396
+ span,
397
+ txs: ProcessedTx[],
398
+ globalVariables: GlobalVariables,
399
+ startSpongeBlob: SpongeBlob,
400
+ db: MerkleTreeReadOperations,
401
+ ) => {
402
+ span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
403
+ const stateReference = new StateReference(
404
+ await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
405
+ new PartialStateReference(
406
+ await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
407
+ await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
408
+ await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
409
+ ),
410
+ );
411
+
412
+ const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
413
+
414
+ const blobFields = getBlockBlobFields(txs.map(tx => tx.txEffect));
415
+ const endSpongeBlob = startSpongeBlob.clone();
416
+ await endSpongeBlob.absorb(blobFields);
417
+ const spongeBlobHash = await endSpongeBlob.squeeze();
418
+
419
+ const txEffects = txs.map(tx => tx.txEffect);
420
+ const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
421
+ const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
422
+
423
+ return new BlockHeader(previousArchive, stateReference, spongeBlobHash, globalVariables, fees, manaUsed);
424
+ },
425
+ );
426
+
379
427
  /** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */
380
428
  export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr[]): Promise<Fr> {
381
429
  const l1ToL2Messages = padArrayEnd(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
@@ -398,40 +446,6 @@ export async function getEmptyBlockBlobsHash(): Promise<Fr> {
398
446
  return sha256ToField(blobHash);
399
447
  }
400
448
 
401
- // Validate that the roots of all local trees match the output of the root circuit simulation
402
- // TODO: does this get called?
403
- export async function validateBlockRootOutput(
404
- blockRootOutput: BlockRootOrBlockMergePublicInputs,
405
- blockHeader: BlockHeader,
406
- db: MerkleTreeReadOperations,
407
- ) {
408
- await Promise.all([
409
- validateState(blockHeader.state, db),
410
- validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), blockRootOutput.newArchive, 'Archive'),
411
- ]);
412
- }
413
-
414
- export const validateState = runInSpan(
415
- 'BlockBuilderHelpers',
416
- 'validateState',
417
- async (_span, state: StateReference, db: MerkleTreeReadOperations) => {
418
- const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map(
419
- async (id: MerkleTreeId) => {
420
- return { key: id, value: await getTreeSnapshot(id, db) };
421
- },
422
- );
423
- const snapshots: Map<MerkleTreeId, AppendOnlyTreeSnapshot> = new Map(
424
- (await Promise.all(promises)).map(obj => [obj.key, obj.value]),
425
- );
426
- validatePartialState(state.partial, snapshots);
427
- validateSimulatedTree(
428
- await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
429
- state.l1ToL2MessageTree,
430
- 'L1ToL2MessageTree',
431
- );
432
- },
433
- );
434
-
435
449
  export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
436
450
  const { size } = await db.getTreeInfo(treeId);
437
451
  const path = await db.getSiblingPath(treeId, size - 1n);
@@ -564,7 +578,7 @@ function validateSimulatedTree(
564
578
  }
565
579
 
566
580
  export function validateTx(tx: ProcessedTx) {
567
- const txHeader = tx.data.constants.historicalHeader;
581
+ const txHeader = tx.data.constants.anchorBlockHeader;
568
582
  if (txHeader.state.l1ToL2MessageTree.isEmpty()) {
569
583
  throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`);
570
584
  }