@aztec/prover-client 0.71.0 → 0.72.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dest/config.js +2 -2
  2. package/dest/mocks/test_context.d.ts +2 -1
  3. package/dest/mocks/test_context.d.ts.map +1 -1
  4. package/dest/mocks/test_context.js +6 -3
  5. package/dest/orchestrator/block-proving-state.d.ts +9 -10
  6. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  7. package/dest/orchestrator/block-proving-state.js +70 -52
  8. package/dest/orchestrator/epoch-proving-state.d.ts +5 -5
  9. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  10. package/dest/orchestrator/epoch-proving-state.js +8 -29
  11. package/dest/orchestrator/orchestrator.d.ts +3 -2
  12. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  13. package/dest/orchestrator/orchestrator.js +20 -33
  14. package/dest/prover-agent/rpc.d.ts +1 -1
  15. package/dest/prover-agent/rpc.d.ts.map +1 -1
  16. package/dest/prover-agent/rpc.js +6 -6
  17. package/dest/prover-client/server-epoch-prover.d.ts +1 -1
  18. package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
  19. package/dest/prover-client/server-epoch-prover.js +3 -3
  20. package/dest/proving_broker/rpc.d.ts +3 -3
  21. package/dest/proving_broker/rpc.d.ts.map +1 -1
  22. package/dest/proving_broker/rpc.js +7 -7
  23. package/dest/test/mock_prover.d.ts +1 -1
  24. package/package.json +11 -11
  25. package/src/config.ts +1 -1
  26. package/src/mocks/test_context.ts +5 -2
  27. package/src/orchestrator/block-proving-state.ts +81 -57
  28. package/src/orchestrator/epoch-proving-state.ts +16 -45
  29. package/src/orchestrator/orchestrator.ts +38 -64
  30. package/src/prover-agent/rpc.ts +5 -5
  31. package/src/prover-client/server-epoch-prover.ts +6 -2
  32. package/src/proving_broker/rpc.ts +13 -6
@@ -11,7 +11,6 @@ import {
11
11
  type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
12
12
  MembershipWitness,
13
13
  type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
14
- type NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
15
14
  NUM_BASE_PARITY_PER_ROOT_PARITY,
16
15
  type ParityPublicInputs,
17
16
  type RECURSIVE_PROOF_LENGTH,
@@ -24,6 +23,7 @@ import { SpongeBlob } from '@aztec/circuits.js/blobs';
24
23
  import {
25
24
  type BaseOrMergeRollupPublicInputs,
26
25
  type BlockRootOrBlockMergePublicInputs,
26
+ BlockRootRollupBlobData,
27
27
  BlockRootRollupData,
28
28
  BlockRootRollupInputs,
29
29
  ConstantRollupData,
@@ -68,14 +68,12 @@ export class BlockProvingState {
68
68
  constructor(
69
69
  public readonly index: number,
70
70
  public readonly globalVariables: GlobalVariables,
71
- public readonly newL1ToL2Messages: Tuple<Fr, typeof NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP>,
72
- private readonly messageTreeSnapshot: AppendOnlyTreeSnapshot,
73
- private readonly messageTreeRootSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH>,
74
- private readonly messageTreeSnapshotAfterInsertion: AppendOnlyTreeSnapshot,
75
- private readonly archiveTreeSnapshot: AppendOnlyTreeSnapshot,
76
- private readonly archiveTreeRootSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
71
+ public readonly newL1ToL2Messages: Fr[],
72
+ private readonly l1ToL2MessageSubtreeSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH>,
73
+ private readonly l1ToL2MessageTreeSnapshotAfterInsertion: AppendOnlyTreeSnapshot,
74
+ private readonly lastArchiveSnapshot: AppendOnlyTreeSnapshot,
75
+ private readonly newArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
77
76
  private readonly previousBlockHeader: BlockHeader,
78
- private readonly previousBlockHash: Fr,
79
77
  private readonly parentEpoch: EpochProvingState,
80
78
  ) {
81
79
  this.baseParityProvingOutputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }).map(_ => undefined);
@@ -176,11 +174,8 @@ export class BlockProvingState {
176
174
  }
177
175
 
178
176
  public getBlockRootRollupTypeAndInputs(proverId: Fr) {
179
- if (this.totalNumTxs === 0) {
180
- return {
181
- rollupType: 'empty-block-root-rollup' satisfies CircuitName,
182
- inputs: this.#getEmptyBlockRootInputs(proverId),
183
- };
177
+ if (!this.rootParityProvingOutput) {
178
+ throw new Error('Root parity is not ready.');
184
179
  }
185
180
 
186
181
  const proofs = this.#getChildProofsForBlockRoot();
@@ -189,41 +184,88 @@ export class BlockProvingState {
189
184
  throw new Error('At lease one child is not ready for the block root.');
190
185
  }
191
186
 
192
- const previousRollupData = nonEmptyProofs.map(p => this.#getPreviousRollupData(p!));
193
187
  const data = this.#getBlockRootRollupData(proverId);
194
188
 
189
+ if (this.totalNumTxs === 0) {
190
+ const constants = ConstantRollupData.from({
191
+ lastArchive: this.lastArchiveSnapshot,
192
+ globalVariables: this.globalVariables,
193
+ vkTreeRoot: getVKTreeRoot(),
194
+ protocolContractTreeRoot,
195
+ });
196
+
197
+ return {
198
+ rollupType: 'empty-block-root-rollup' satisfies CircuitName,
199
+ inputs: EmptyBlockRootRollupInputs.from({
200
+ data,
201
+ constants,
202
+ isPadding: false,
203
+ }),
204
+ };
205
+ }
206
+
207
+ const previousRollupData = nonEmptyProofs.map(p => this.#getPreviousRollupData(p!));
208
+ const blobData = this.#getBlockRootRollupBlobData();
209
+
195
210
  if (previousRollupData.length === 1) {
196
211
  return {
197
212
  rollupType: 'single-tx-block-root-rollup' satisfies CircuitName,
198
- inputs: new SingleTxBlockRootRollupInputs(previousRollupData as [PreviousRollupData], data),
213
+ inputs: new SingleTxBlockRootRollupInputs(previousRollupData as [PreviousRollupData], data, blobData),
199
214
  };
200
215
  } else {
201
216
  return {
202
217
  rollupType: 'block-root-rollup' satisfies CircuitName,
203
- inputs: new BlockRootRollupInputs(previousRollupData as [PreviousRollupData, PreviousRollupData], data),
218
+ inputs: new BlockRootRollupInputs(
219
+ previousRollupData as [PreviousRollupData, PreviousRollupData],
220
+ data,
221
+ blobData,
222
+ ),
204
223
  };
205
224
  }
206
225
  }
207
226
 
227
+ public getPaddingBlockRootInputs(proverId: Fr) {
228
+ if (!this.rootParityProvingOutput) {
229
+ throw new Error('Root parity is not ready.');
230
+ }
231
+
232
+ // Use the new block header and archive of the current block as the previous header and archiver of the next padding block.
233
+ const newBlockHeader = this.buildHeaderFromProvingOutputs();
234
+ const newArchive = this.blockRootProvingOutput!.inputs.newArchive;
235
+
236
+ const data = BlockRootRollupData.from({
237
+ l1ToL2Roots: this.#getRootParityData(this.rootParityProvingOutput!),
238
+ l1ToL2MessageSubtreeSiblingPath: this.l1ToL2MessageSubtreeSiblingPath,
239
+ newArchiveSiblingPath: this.newArchiveSiblingPath,
240
+ previousBlockHeader: newBlockHeader,
241
+ proverId,
242
+ });
243
+
244
+ const constants = ConstantRollupData.from({
245
+ lastArchive: newArchive,
246
+ globalVariables: this.globalVariables,
247
+ vkTreeRoot: getVKTreeRoot(),
248
+ protocolContractTreeRoot,
249
+ });
250
+
251
+ return EmptyBlockRootRollupInputs.from({
252
+ data,
253
+ constants,
254
+ isPadding: true,
255
+ });
256
+ }
257
+
208
258
  public getRootParityInputs() {
209
259
  if (!this.baseParityProvingOutputs.every(p => !!p)) {
210
260
  throw new Error('At lease one base parity is not ready.');
211
261
  }
212
262
 
213
- const children = this.baseParityProvingOutputs.map(p => this.#getRootParityInputFromProvingOutput(p!));
263
+ const children = this.baseParityProvingOutputs.map(p => this.#getRootParityData(p!));
214
264
  return new RootParityInputs(
215
265
  children as Tuple<RootParityInput<typeof RECURSIVE_PROOF_LENGTH>, typeof NUM_BASE_PARITY_PER_ROOT_PARITY>,
216
266
  );
217
267
  }
218
268
 
219
- public getL1ToL2Roots() {
220
- if (!this.rootParityProvingOutput) {
221
- throw new Error('Root parity is not ready.');
222
- }
223
-
224
- return this.#getRootParityInputFromProvingOutput(this.rootParityProvingOutput);
225
- }
226
-
227
269
  // Returns a specific transaction proving state
228
270
  public getTxProvingState(txIndex: number) {
229
271
  return this.txs[txIndex];
@@ -242,7 +284,7 @@ export class BlockProvingState {
242
284
  }
243
285
  endPartialState = lastRollup.inputs.end;
244
286
  }
245
- const endState = new StateReference(this.messageTreeSnapshotAfterInsertion, endPartialState);
287
+ const endState = new StateReference(this.l1ToL2MessageTreeSnapshotAfterInsertion, endPartialState);
246
288
 
247
289
  return buildHeaderFromCircuitOutputs(
248
290
  previousRollupData.map(d => d.baseOrMergeRollupPublicInputs),
@@ -268,6 +310,10 @@ export class BlockProvingState {
268
310
  return this.baseParityProvingOutputs.every(p => !!p);
269
311
  }
270
312
 
313
+ public isComplete() {
314
+ return !!this.blockRootProvingOutput;
315
+ }
316
+
271
317
  // Returns whether the proving state is still valid
272
318
  public verifyState() {
273
319
  return this.parentEpoch.verifyState();
@@ -278,38 +324,20 @@ export class BlockProvingState {
278
324
  this.parentEpoch.reject(reason);
279
325
  }
280
326
 
281
- #getEmptyBlockRootInputs(proverId: Fr) {
282
- const l1ToL2Roots = this.getL1ToL2Roots();
283
- const constants = ConstantRollupData.from({
284
- lastArchive: this.archiveTreeSnapshot,
285
- globalVariables: this.globalVariables,
286
- vkTreeRoot: getVKTreeRoot(),
287
- protocolContractTreeRoot,
288
- });
289
-
290
- return EmptyBlockRootRollupInputs.from({
291
- l1ToL2Roots,
292
- newL1ToL2MessageTreeRootSiblingPath: this.messageTreeRootSiblingPath,
293
- startL1ToL2MessageTreeSnapshot: this.messageTreeSnapshot,
294
- newArchiveSiblingPath: this.archiveTreeRootSiblingPath,
295
- previousBlockHash: this.previousBlockHash,
296
- previousPartialState: this.previousBlockHeader.state.partial,
297
- constants,
327
+ #getBlockRootRollupData(proverId: Fr) {
328
+ return BlockRootRollupData.from({
329
+ l1ToL2Roots: this.#getRootParityData(this.rootParityProvingOutput!),
330
+ l1ToL2MessageSubtreeSiblingPath: this.l1ToL2MessageSubtreeSiblingPath,
331
+ newArchiveSiblingPath: this.newArchiveSiblingPath,
332
+ previousBlockHeader: this.previousBlockHeader,
298
333
  proverId,
299
- isPadding: false,
300
334
  });
301
335
  }
302
336
 
303
- #getBlockRootRollupData(proverId: Fr) {
337
+ #getBlockRootRollupBlobData() {
304
338
  const txEffects = this.txs.map(txProvingState => txProvingState.processedTx.txEffect);
305
339
  const { blobFields, blobCommitments, blobsHash } = buildBlobHints(txEffects);
306
- return BlockRootRollupData.from({
307
- l1ToL2Roots: this.getL1ToL2Roots(),
308
- newL1ToL2MessageTreeRootSiblingPath: this.messageTreeRootSiblingPath,
309
- startL1ToL2MessageTreeSnapshot: this.messageTreeSnapshot,
310
- newArchiveSiblingPath: this.archiveTreeRootSiblingPath,
311
- previousBlockHash: this.previousBlockHash,
312
- proverId,
340
+ return BlockRootRollupBlobData.from({
313
341
  blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
314
342
  blobCommitments: padArrayEnd(blobCommitments, [Fr.ZERO, Fr.ZERO], BLOBS_PER_BLOCK),
315
343
  blobsHash,
@@ -342,11 +370,7 @@ export class BlockProvingState {
342
370
  );
343
371
  }
344
372
 
345
- #getRootParityInputFromProvingOutput({
346
- inputs,
347
- proof,
348
- verificationKey,
349
- }: PublicInputsAndRecursiveProof<ParityPublicInputs>) {
373
+ #getRootParityData({ inputs, proof, verificationKey }: PublicInputsAndRecursiveProof<ParityPublicInputs>) {
350
374
  return new RootParityInput(
351
375
  proof,
352
376
  verificationKey.keyAsFields,
@@ -4,33 +4,27 @@ import {
4
4
  type PublicInputsAndRecursiveProof,
5
5
  } from '@aztec/circuit-types';
6
6
  import {
7
- ARCHIVE_HEIGHT,
8
- AppendOnlyTreeSnapshot,
7
+ type ARCHIVE_HEIGHT,
8
+ type AppendOnlyTreeSnapshot,
9
9
  type BlockHeader,
10
- Fr,
10
+ type Fr,
11
11
  type GlobalVariables,
12
- L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
12
+ type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
13
13
  MembershipWitness,
14
14
  type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
15
- NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
16
15
  type TUBE_PROOF_LENGTH,
17
16
  VK_TREE_HEIGHT,
18
17
  } from '@aztec/circuits.js';
19
18
  import {
20
19
  BlockMergeRollupInputs,
21
20
  type BlockRootOrBlockMergePublicInputs,
22
- ConstantRollupData,
23
- EmptyBlockRootRollupInputs,
24
21
  PreviousRollupBlockData,
25
22
  RootRollupInputs,
26
23
  type RootRollupPublicInputs,
27
24
  } from '@aztec/circuits.js/rollup';
28
- import { makeTuple } from '@aztec/foundation/array';
29
- import { padArrayEnd } from '@aztec/foundation/collection';
30
25
  import { type Tuple } from '@aztec/foundation/serialize';
31
26
  import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
32
- import { getVKIndex, getVKSiblingPath, getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vks';
33
- import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
27
+ import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vks';
34
28
 
35
29
  import { BlockProvingState } from './block-proving-state.js';
36
30
 
@@ -81,26 +75,22 @@ export class EpochProvingState {
81
75
  public startNewBlock(
82
76
  globalVariables: GlobalVariables,
83
77
  l1ToL2Messages: Fr[],
84
- messageTreeSnapshot: AppendOnlyTreeSnapshot,
85
- messageTreeRootSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH>,
86
- messageTreeSnapshotAfterInsertion: AppendOnlyTreeSnapshot,
87
- archiveTreeSnapshot: AppendOnlyTreeSnapshot,
88
- archiveTreeRootSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
78
+ l1ToL2MessageSubtreeSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH>,
79
+ l1ToL2MessageTreeSnapshotAfterInsertion: AppendOnlyTreeSnapshot,
80
+ lastArchiveSnapshot: AppendOnlyTreeSnapshot,
81
+ newArchiveSiblingPath: Tuple<Fr, typeof ARCHIVE_HEIGHT>,
89
82
  previousBlockHeader: BlockHeader,
90
- previousBlockHash: Fr,
91
83
  ): BlockProvingState {
92
84
  const index = globalVariables.blockNumber.toNumber() - this.firstBlockNumber;
93
85
  const block = new BlockProvingState(
94
86
  index,
95
87
  globalVariables,
96
- padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
97
- messageTreeSnapshot,
98
- messageTreeRootSiblingPath,
99
- messageTreeSnapshotAfterInsertion,
100
- archiveTreeSnapshot,
101
- archiveTreeRootSiblingPath,
88
+ l1ToL2Messages,
89
+ l1ToL2MessageSubtreeSiblingPath,
90
+ l1ToL2MessageTreeSnapshotAfterInsertion,
91
+ lastArchiveSnapshot,
92
+ newArchiveSiblingPath,
102
93
  previousBlockHeader,
103
- previousBlockHash,
104
94
  this,
105
95
  );
106
96
  this.blocks[index] = block;
@@ -182,30 +172,11 @@ export class EpochProvingState {
182
172
  }
183
173
 
184
174
  public getPaddingBlockRootInputs(proverId: Fr) {
185
- const { block } = this.blocks[0] ?? {};
186
- const l1ToL2Roots = this.blocks[0]?.getL1ToL2Roots();
187
- if (!block || !l1ToL2Roots) {
175
+ if (!this.blocks[0]?.isComplete()) {
188
176
  throw new Error('Epoch needs one completed block in order to be padded.');
189
177
  }
190
178
 
191
- const constants = ConstantRollupData.from({
192
- lastArchive: block.archive,
193
- globalVariables: block.header.globalVariables,
194
- vkTreeRoot: getVKTreeRoot(),
195
- protocolContractTreeRoot,
196
- });
197
-
198
- return EmptyBlockRootRollupInputs.from({
199
- l1ToL2Roots,
200
- newL1ToL2MessageTreeRootSiblingPath: makeTuple(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, Fr.zero),
201
- startL1ToL2MessageTreeSnapshot: AppendOnlyTreeSnapshot.zero(),
202
- newArchiveSiblingPath: makeTuple(ARCHIVE_HEIGHT, Fr.zero),
203
- previousBlockHash: block.header.hash(),
204
- previousPartialState: block.header.state.partial,
205
- constants,
206
- proverId,
207
- isPadding: true,
208
- });
179
+ return this.blocks[0].getPaddingBlockRootInputs(proverId);
209
180
  }
210
181
 
211
182
  // Returns a specific transaction proving state
@@ -18,16 +18,13 @@ import {
18
18
  AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS,
19
19
  type AppendOnlyTreeSnapshot,
20
20
  BaseParityInputs,
21
- BlockHeader,
22
- ContentCommitment,
21
+ type BlockHeader,
23
22
  Fr,
24
- GlobalVariables,
23
+ type GlobalVariables,
25
24
  L1_TO_L2_MSG_SUBTREE_HEIGHT,
26
25
  L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
27
26
  NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
28
27
  NUM_BASE_PARITY_PER_ROOT_PARITY,
29
- PartialStateReference,
30
- StateReference,
31
28
  type TUBE_PROOF_LENGTH,
32
29
  VerificationKeyData,
33
30
  makeEmptyRecursiveProof,
@@ -39,12 +36,11 @@ import {
39
36
  SingleTxBlockRootRollupInputs,
40
37
  TubeInputs,
41
38
  } from '@aztec/circuits.js/rollup';
42
- import { makeTuple } from '@aztec/foundation/array';
43
39
  import { padArrayEnd } from '@aztec/foundation/collection';
44
40
  import { AbortError } from '@aztec/foundation/error';
45
41
  import { createLogger } from '@aztec/foundation/log';
46
42
  import { promiseWithResolvers } from '@aztec/foundation/promise';
47
- import { type Tuple } from '@aztec/foundation/serialize';
43
+ import { assertLength } from '@aztec/foundation/serialize';
48
44
  import { pushTestData } from '@aztec/foundation/testing';
49
45
  import { elapsed } from '@aztec/foundation/timer';
50
46
  import { type TreeNodeLocation } from '@aztec/foundation/trees';
@@ -140,7 +136,7 @@ export class ProvingOrchestrator implements EpochProver {
140
136
  @trackSpan('ProvingOrchestrator.startNewBlock', globalVariables => ({
141
137
  [Attributes.BLOCK_NUMBER]: globalVariables.blockNumber.toNumber(),
142
138
  }))
143
- public async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]) {
139
+ public async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[], previousBlockHeader: BlockHeader) {
144
140
  if (!this.provingState) {
145
141
  throw new Error(`Invalid proving state, call startNewEpoch before starting a block`);
146
142
  }
@@ -158,70 +154,21 @@ export class ProvingOrchestrator implements EpochProver {
158
154
  this.dbs.set(globalVariables.blockNumber.toNumber(), db);
159
155
 
160
156
  // we start the block by enqueueing all of the base parity circuits
161
- let baseParityInputs: BaseParityInputs[] = [];
162
- let l1ToL2MessagesPadded: Tuple<Fr, typeof NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP>;
163
- try {
164
- l1ToL2MessagesPadded = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
165
- } catch (err) {
166
- throw new Error('Too many L1 to L2 messages');
167
- }
168
- baseParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, (_, i) =>
169
- BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()),
170
- );
171
-
172
- const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
173
-
174
- const newL1ToL2MessageTreeRootSiblingPathArray = await getSubtreeSiblingPath(
175
- MerkleTreeId.L1_TO_L2_MESSAGE_TREE,
176
- L1_TO_L2_MSG_SUBTREE_HEIGHT,
177
- db,
178
- );
179
-
180
- const newL1ToL2MessageTreeRootSiblingPath = makeTuple(
181
- L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
182
- i =>
183
- i < newL1ToL2MessageTreeRootSiblingPathArray.length ? newL1ToL2MessageTreeRootSiblingPathArray[i] : Fr.ZERO,
184
- 0,
185
- );
186
-
187
- // Update the local trees to include the new l1 to l2 messages
188
- await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
189
- const messageTreeSnapshotAfterInsertion = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
157
+ const { l1ToL2MessageSubtreeSiblingPath, l1ToL2MessageTreeSnapshotAfterInsertion, baseParityInputs } =
158
+ await this.prepareBaseParityInputs(l1ToL2Messages, db);
190
159
 
191
160
  // Get archive snapshot before this block lands
192
- const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
161
+ const lastArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
193
162
  const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
194
- const previousBlockHash = await db.getLeafValue(
195
- MerkleTreeId.ARCHIVE,
196
- BigInt(startArchiveSnapshot.nextAvailableLeafIndex - 1),
197
- );
198
-
199
- const partial = new PartialStateReference(
200
- await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
201
- await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
202
- await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
203
- );
204
- const state = new StateReference(messageTreeSnapshot, partial);
205
- // TODO: Construct the full previousBlockHeader.
206
- const previousBlockHeader = BlockHeader.from({
207
- lastArchive: startArchiveSnapshot,
208
- contentCommitment: ContentCommitment.empty(),
209
- state,
210
- globalVariables: GlobalVariables.empty(),
211
- totalFees: Fr.ZERO,
212
- totalManaUsed: Fr.ZERO,
213
- });
214
163
 
215
164
  const blockProvingState = this.provingState!.startNewBlock(
216
165
  globalVariables,
217
- l1ToL2MessagesPadded,
218
- messageTreeSnapshot,
219
- newL1ToL2MessageTreeRootSiblingPath,
220
- messageTreeSnapshotAfterInsertion,
221
- startArchiveSnapshot,
166
+ l1ToL2Messages,
167
+ l1ToL2MessageSubtreeSiblingPath,
168
+ l1ToL2MessageTreeSnapshotAfterInsertion,
169
+ lastArchive,
222
170
  newArchiveSiblingPath,
223
171
  previousBlockHeader,
224
- previousBlockHash!,
225
172
  );
226
173
 
227
174
  // Enqueue base parity circuits for the block
@@ -502,6 +449,33 @@ export class ProvingOrchestrator implements EpochProver {
502
449
  setImmediate(safeJob);
503
450
  }
504
451
 
452
+ private async prepareBaseParityInputs(l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations) {
453
+ const l1ToL2MessagesPadded = padArrayEnd(
454
+ l1ToL2Messages,
455
+ Fr.ZERO,
456
+ NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
457
+ 'Too many L1 to L2 messages',
458
+ );
459
+ const baseParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }, (_, i) =>
460
+ BaseParityInputs.fromSlice(l1ToL2MessagesPadded, i, getVKTreeRoot()),
461
+ );
462
+
463
+ const l1ToL2MessageSubtreeSiblingPath = assertLength(
464
+ await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, db),
465
+ L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
466
+ );
467
+
468
+ // Update the local trees to include the new l1 to l2 messages
469
+ await db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2MessagesPadded);
470
+ const l1ToL2MessageTreeSnapshotAfterInsertion = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db);
471
+
472
+ return {
473
+ l1ToL2MessageSubtreeSiblingPath,
474
+ l1ToL2MessageTreeSnapshotAfterInsertion,
475
+ baseParityInputs,
476
+ };
477
+ }
478
+
505
479
  // Updates the merkle trees for a transaction. The first enqueued job for a transaction
506
480
  @trackSpan('ProvingOrchestrator.prepareBaseRollupInputs', (_, tx) => ({
507
481
  [Attributes.TX_HASH]: tx.hash.toString(),
@@ -1,14 +1,14 @@
1
1
  import { ProverAgentApiSchema, type ProvingJobSource, ProvingJobSourceSchema } from '@aztec/circuit-types';
2
- import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client';
3
- import { createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server';
2
+ import { createSafeJsonRpcClient } from '@aztec/foundation/json-rpc/client';
3
+ import { createTracedJsonRpcServer, makeTracedFetch } from '@aztec/telemetry-client';
4
4
 
5
5
  import { type ProverAgent } from './prover-agent.js';
6
6
 
7
7
  export function createProvingJobSourceServer(queue: ProvingJobSource) {
8
- return createSafeJsonRpcServer(queue, ProvingJobSourceSchema);
8
+ return createTracedJsonRpcServer(queue, ProvingJobSourceSchema);
9
9
  }
10
10
 
11
- export function createProvingJobSourceClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobSource {
11
+ export function createProvingJobSourceClient(url: string, fetch = makeTracedFetch([1, 2, 3], false)): ProvingJobSource {
12
12
  return createSafeJsonRpcClient(url, ProvingJobSourceSchema, false, 'provingJobSource', fetch);
13
13
  }
14
14
 
@@ -18,5 +18,5 @@ export function createProvingJobSourceClient(url: string, fetch = makeFetch([1,
18
18
  * @returns An JSON-RPC HTTP server
19
19
  */
20
20
  export function createProverAgentRpcServer(agent: ProverAgent) {
21
- return createSafeJsonRpcServer(agent, ProverAgentApiSchema);
21
+ return createTracedJsonRpcServer(agent, ProverAgentApiSchema);
22
22
  }
@@ -35,8 +35,12 @@ export class ServerEpochProver implements EpochProver {
35
35
  await this.facade.stop();
36
36
  await this.orchestrator.stop();
37
37
  }
38
- startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise<void> {
39
- return this.orchestrator.startNewBlock(globalVariables, l1ToL2Messages);
38
+ startNewBlock(
39
+ globalVariables: GlobalVariables,
40
+ l1ToL2Messages: Fr[],
41
+ previousBlockHeader: BlockHeader,
42
+ ): Promise<void> {
43
+ return this.orchestrator.startNewBlock(globalVariables, l1ToL2Messages, previousBlockHeader);
40
44
  }
41
45
  addTxs(txs: ProcessedTx[]): Promise<void> {
42
46
  return this.orchestrator.addTxs(txs);
@@ -9,9 +9,10 @@ import {
9
9
  ProvingJobStatus,
10
10
  ProvingRequestType,
11
11
  } from '@aztec/circuit-types';
12
- import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client';
13
- import { type SafeJsonRpcServer, createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server';
12
+ import { createSafeJsonRpcClient } from '@aztec/foundation/json-rpc/client';
13
+ import { type SafeJsonRpcServer } from '@aztec/foundation/json-rpc/server';
14
14
  import { type ApiSchemaFor, optional } from '@aztec/foundation/schemas';
15
+ import { createTracedJsonRpcServer, makeTracedFetch } from '@aztec/telemetry-client';
15
16
 
16
17
  import { z } from 'zod';
17
18
 
@@ -47,17 +48,23 @@ export const ProvingJobBrokerSchema: ApiSchemaFor<ProvingJobBroker> = {
47
48
  };
48
49
 
49
50
  export function createProvingBrokerServer(broker: ProvingJobBroker): SafeJsonRpcServer {
50
- return createSafeJsonRpcServer(broker, ProvingJobBrokerSchema);
51
+ return createTracedJsonRpcServer(broker, ProvingJobBrokerSchema);
51
52
  }
52
53
 
53
- export function createProvingJobBrokerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobBroker {
54
+ export function createProvingJobBrokerClient(url: string, fetch = makeTracedFetch([1, 2, 3], false)): ProvingJobBroker {
54
55
  return createSafeJsonRpcClient(url, ProvingJobBrokerSchema, false, 'proverBroker', fetch);
55
56
  }
56
57
 
57
- export function createProvingJobProducerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobProducer {
58
+ export function createProvingJobProducerClient(
59
+ url: string,
60
+ fetch = makeTracedFetch([1, 2, 3], false),
61
+ ): ProvingJobProducer {
58
62
  return createSafeJsonRpcClient(url, ProvingJobProducerSchema, false, 'provingJobProducer', fetch);
59
63
  }
60
64
 
61
- export function createProvingJobConsumerClient(url: string, fetch = makeFetch([1, 2, 3], false)): ProvingJobConsumer {
65
+ export function createProvingJobConsumerClient(
66
+ url: string,
67
+ fetch = makeTracedFetch([1, 2, 3], false),
68
+ ): ProvingJobConsumer {
62
69
  return createSafeJsonRpcClient(url, ProvingJobConsumerSchema, false, 'provingJobConsumer', fetch);
63
70
  }