@aztec/prover-client 0.0.1-commit.7d4e6cd → 0.0.1-commit.9372f48

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/dest/light/lightweight_checkpoint_builder.d.ts +11 -7
  2. package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -1
  3. package/dest/light/lightweight_checkpoint_builder.js +28 -11
  4. package/dest/mocks/fixtures.d.ts +1 -1
  5. package/dest/mocks/fixtures.d.ts.map +1 -1
  6. package/dest/mocks/fixtures.js +2 -1
  7. package/dest/mocks/test_context.d.ts +3 -2
  8. package/dest/mocks/test_context.d.ts.map +1 -1
  9. package/dest/mocks/test_context.js +6 -1
  10. package/dest/orchestrator/block-building-helpers.d.ts +1 -1
  11. package/dest/orchestrator/block-building-helpers.js +1 -1
  12. package/dest/orchestrator/checkpoint-proving-state.d.ts +15 -2
  13. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
  14. package/dest/orchestrator/checkpoint-proving-state.js +34 -1
  15. package/dest/orchestrator/epoch-proving-state.d.ts +5 -4
  16. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  17. package/dest/orchestrator/epoch-proving-state.js +35 -1
  18. package/dest/orchestrator/orchestrator.d.ts +18 -3
  19. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  20. package/dest/orchestrator/orchestrator.js +120 -78
  21. package/dest/prover-client/prover-client.d.ts +1 -1
  22. package/dest/prover-client/prover-client.d.ts.map +1 -1
  23. package/dest/prover-client/prover-client.js +7 -4
  24. package/dest/proving_broker/broker_prover_facade.d.ts +4 -3
  25. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  26. package/dest/proving_broker/broker_prover_facade.js +3 -3
  27. package/dest/proving_broker/config.d.ts +5 -1
  28. package/dest/proving_broker/config.d.ts.map +1 -1
  29. package/dest/proving_broker/config.js +7 -1
  30. package/dest/proving_broker/proving_agent.d.ts +4 -3
  31. package/dest/proving_broker/proving_agent.d.ts.map +1 -1
  32. package/dest/proving_broker/proving_agent.js +4 -4
  33. package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
  34. package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
  35. package/dest/proving_broker/proving_broker_instrumentation.js +11 -7
  36. package/dest/proving_broker/proving_job_controller.d.ts +4 -3
  37. package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
  38. package/dest/proving_broker/proving_job_controller.js +6 -3
  39. package/dest/test/mock_proof_store.d.ts +3 -3
  40. package/dest/test/mock_proof_store.d.ts.map +1 -1
  41. package/package.json +16 -17
  42. package/src/light/lightweight_checkpoint_builder.ts +52 -12
  43. package/src/mocks/fixtures.ts +2 -1
  44. package/src/mocks/test_context.ts +5 -0
  45. package/src/orchestrator/block-building-helpers.ts +1 -1
  46. package/src/orchestrator/checkpoint-proving-state.ts +47 -1
  47. package/src/orchestrator/epoch-proving-state.ts +56 -8
  48. package/src/orchestrator/orchestrator.ts +124 -83
  49. package/src/prover-client/prover-client.ts +23 -6
  50. package/src/proving_broker/broker_prover_facade.ts +6 -3
  51. package/src/proving_broker/config.ts +9 -0
  52. package/src/proving_broker/proving_agent.ts +5 -2
  53. package/src/proving_broker/proving_broker_instrumentation.ts +10 -6
  54. package/src/proving_broker/proving_job_controller.ts +9 -3
  55. package/dest/block-factory/index.d.ts +0 -2
  56. package/dest/block-factory/index.d.ts.map +0 -1
  57. package/dest/block-factory/index.js +0 -1
  58. package/dest/block-factory/light.d.ts +0 -38
  59. package/dest/block-factory/light.d.ts.map +0 -1
  60. package/dest/block-factory/light.js +0 -106
  61. package/src/block-factory/index.ts +0 -1
  62. package/src/block-factory/light.ts +0 -136
@@ -1,13 +1,17 @@
1
1
  import { SpongeBlob, computeBlobsHashFromBlobs, encodeCheckpointEndMarker, getBlobsPerL1Block } from '@aztec/blob-lib';
2
2
  import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/constants';
3
- import type { CheckpointNumber } from '@aztec/foundation/branded-types';
3
+ import { type CheckpointNumber, IndexWithinCheckpoint } from '@aztec/foundation/branded-types';
4
4
  import { padArrayEnd } from '@aztec/foundation/collection';
5
5
  import { Fr } from '@aztec/foundation/curves/bn254';
6
- import { createLogger } from '@aztec/foundation/log';
7
- import { L2BlockNew } from '@aztec/stdlib/block';
6
+ import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
7
+ import { L2Block } from '@aztec/stdlib/block';
8
8
  import { Checkpoint } from '@aztec/stdlib/checkpoint';
9
9
  import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
10
- import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
10
+ import {
11
+ accumulateCheckpointOutHashes,
12
+ computeCheckpointOutHash,
13
+ computeInHashFromL1ToL2Messages,
14
+ } from '@aztec/stdlib/messaging';
11
15
  import { CheckpointHeader, computeBlockHeadersHash } from '@aztec/stdlib/rollup';
12
16
  import { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
13
17
  import {
@@ -30,19 +34,25 @@ import {
30
34
  * Finally completes the checkpoint by computing its header.
31
35
  */
32
36
  export class LightweightCheckpointBuilder {
33
- private readonly logger = createLogger('lightweight-checkpoint-builder');
37
+ private readonly logger: Logger;
34
38
 
35
39
  private lastArchives: AppendOnlyTreeSnapshot[] = [];
36
40
  private spongeBlob: SpongeBlob;
37
- private blocks: L2BlockNew[] = [];
41
+ private blocks: L2Block[] = [];
38
42
  private blobFields: Fr[] = [];
39
43
 
40
44
  constructor(
41
45
  public readonly checkpointNumber: CheckpointNumber,
42
46
  public readonly constants: CheckpointGlobalVariables,
43
47
  public readonly l1ToL2Messages: Fr[],
48
+ private readonly previousCheckpointOutHashes: Fr[],
44
49
  public readonly db: MerkleTreeWriteOperations,
50
+ bindings?: LoggerBindings,
45
51
  ) {
52
+ this.logger = createLogger('checkpoint-builder', {
53
+ ...bindings,
54
+ instanceId: `checkpoint-${checkpointNumber}`,
55
+ });
46
56
  this.spongeBlob = SpongeBlob.init();
47
57
  this.logger.debug('Starting new checkpoint', { constants, l1ToL2Messages });
48
58
  }
@@ -51,7 +61,9 @@ export class LightweightCheckpointBuilder {
51
61
  checkpointNumber: CheckpointNumber,
52
62
  constants: CheckpointGlobalVariables,
53
63
  l1ToL2Messages: Fr[],
64
+ previousCheckpointOutHashes: Fr[],
54
65
  db: MerkleTreeWriteOperations,
66
+ bindings?: LoggerBindings,
55
67
  ): Promise<LightweightCheckpointBuilder> {
56
68
  // Insert l1-to-l2 messages into the tree.
57
69
  await db.appendLeaves(
@@ -59,7 +71,14 @@ export class LightweightCheckpointBuilder {
59
71
  padArrayEnd<Fr, number>(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
60
72
  );
61
73
 
62
- return new LightweightCheckpointBuilder(checkpointNumber, constants, l1ToL2Messages, db);
74
+ return new LightweightCheckpointBuilder(
75
+ checkpointNumber,
76
+ constants,
77
+ l1ToL2Messages,
78
+ previousCheckpointOutHashes,
79
+ db,
80
+ bindings,
81
+ );
63
82
  }
64
83
 
65
84
  /**
@@ -72,10 +91,19 @@ export class LightweightCheckpointBuilder {
72
91
  checkpointNumber: CheckpointNumber,
73
92
  constants: CheckpointGlobalVariables,
74
93
  l1ToL2Messages: Fr[],
94
+ previousCheckpointOutHashes: Fr[],
75
95
  db: MerkleTreeWriteOperations,
76
- existingBlocks: L2BlockNew[],
96
+ existingBlocks: L2Block[],
97
+ bindings?: LoggerBindings,
77
98
  ): Promise<LightweightCheckpointBuilder> {
78
- const builder = new LightweightCheckpointBuilder(checkpointNumber, constants, l1ToL2Messages, db);
99
+ const builder = new LightweightCheckpointBuilder(
100
+ checkpointNumber,
101
+ constants,
102
+ l1ToL2Messages,
103
+ previousCheckpointOutHashes,
104
+ db,
105
+ bindings,
106
+ );
79
107
 
80
108
  builder.logger.debug('Resuming checkpoint from existing blocks', {
81
109
  checkpointNumber,
@@ -115,6 +143,11 @@ export class LightweightCheckpointBuilder {
115
143
  return builder;
116
144
  }
117
145
 
146
+ /** Returns how many blocks have been added to this checkpoint so far */
147
+ public getBlockCount() {
148
+ return this.blocks.length;
149
+ }
150
+
118
151
  /**
119
152
  * Adds a new block to the checkpoint. The tx effects must have already been inserted into the db if
120
153
  * this is called after tx processing, if that's not the case, then set `insertTxsEffects` to true.
@@ -123,7 +156,7 @@ export class LightweightCheckpointBuilder {
123
156
  globalVariables: GlobalVariables,
124
157
  txs: ProcessedTx[],
125
158
  opts: { insertTxsEffects?: boolean; expectedEndState?: StateReference } = {},
126
- ): Promise<L2BlockNew> {
159
+ ): Promise<L2Block> {
127
160
  const isFirstBlock = this.blocks.length === 0;
128
161
 
129
162
  // Empty blocks are only allowed as the first block in a checkpoint
@@ -172,8 +205,8 @@ export class LightweightCheckpointBuilder {
172
205
  const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
173
206
  this.lastArchives.push(newArchive);
174
207
 
175
- const indexWithinCheckpoint = this.blocks.length;
176
- const block = new L2BlockNew(newArchive, header, body, this.checkpointNumber, indexWithinCheckpoint);
208
+ const indexWithinCheckpoint = IndexWithinCheckpoint(this.blocks.length);
209
+ const block = new L2Block(newArchive, header, body, this.checkpointNumber, indexWithinCheckpoint);
177
210
  this.blocks.push(block);
178
211
 
179
212
  await this.spongeBlob.absorb(blockBlobFields);
@@ -210,6 +243,10 @@ export class LightweightCheckpointBuilder {
210
243
  const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages);
211
244
 
212
245
  const { slotNumber, coinbase, feeRecipient, gasFees } = this.constants;
246
+ const checkpointOutHash = computeCheckpointOutHash(
247
+ blocks.map(block => block.body.txEffects.map(tx => tx.l2ToL1Msgs)),
248
+ );
249
+ const epochOutHash = accumulateCheckpointOutHashes([...this.previousCheckpointOutHashes, checkpointOutHash]);
213
250
 
214
251
  // TODO(palla/mbps): Should we source this from the constants instead?
215
252
  // timestamp of a checkpoint is the timestamp of the last block in the checkpoint.
@@ -221,6 +258,7 @@ export class LightweightCheckpointBuilder {
221
258
  lastArchiveRoot: this.lastArchives[0].root,
222
259
  blobsHash,
223
260
  inHash,
261
+ epochOutHash,
224
262
  blockHeadersHash,
225
263
  slotNumber,
226
264
  timestamp,
@@ -238,7 +276,9 @@ export class LightweightCheckpointBuilder {
238
276
  this.checkpointNumber,
239
277
  this.constants,
240
278
  [...this.l1ToL2Messages],
279
+ [...this.previousCheckpointOutHashes],
241
280
  this.db,
281
+ this.logger.getBindings(),
242
282
  );
243
283
  clone.lastArchives = [...this.lastArchives];
244
284
  clone.spongeBlob = this.spongeBlob.clone();
@@ -75,7 +75,8 @@ export async function getSimulator(
75
75
  logger?.info(
76
76
  `Using native ACVM at ${config.acvmBinaryPath} and working directory ${config.acvmWorkingDirectory}`,
77
77
  );
78
- return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath);
78
+ const acvmLogger = logger?.createChild('acvm-native');
79
+ return new NativeACVMSimulator(config.acvmWorkingDirectory, config.acvmBinaryPath, undefined, acvmLogger);
79
80
  } catch {
80
81
  logger?.warn(`Failed to access ACVM at ${config.acvmBinaryPath}, falling back to WASM`);
81
82
  }
@@ -44,6 +44,7 @@ import { getEnvironmentConfig, getSimulator, makeCheckpointConstants, makeGlobal
44
44
  export class TestContext {
45
45
  private headers: Map<number, BlockHeader> = new Map();
46
46
  private checkpoints: Checkpoint[] = [];
47
+ private checkpointOutHashes: Fr[] = [];
47
48
  private nextCheckpointIndex = 0;
48
49
  private nextCheckpointNumber = CheckpointNumber(1);
49
50
  private nextBlockNumber = 1;
@@ -151,6 +152,7 @@ export class TestContext {
151
152
 
152
153
  public startNewEpoch() {
153
154
  this.checkpoints = [];
155
+ this.checkpointOutHashes = [];
154
156
  this.nextCheckpointIndex = 0;
155
157
  this.epochNumber++;
156
158
  }
@@ -245,10 +247,12 @@ export class TestContext {
245
247
  });
246
248
 
247
249
  const cleanFork = await this.worldState.fork();
250
+ const previousCheckpointOutHashes = this.checkpointOutHashes;
248
251
  const builder = await LightweightCheckpointBuilder.startNewCheckpoint(
249
252
  checkpointNumber,
250
253
  constants,
251
254
  l1ToL2Messages,
255
+ previousCheckpointOutHashes,
252
256
  cleanFork,
253
257
  );
254
258
 
@@ -274,6 +278,7 @@ export class TestContext {
274
278
 
275
279
  const checkpoint = await builder.completeCheckpoint();
276
280
  this.checkpoints.push(checkpoint);
281
+ this.checkpointOutHashes.push(checkpoint.getCheckpointOutHash());
277
282
 
278
283
  return {
279
284
  constants,
@@ -99,7 +99,7 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
99
99
 
100
100
  const blockHash = await tx.data.constants.anchorBlockHeader.hash();
101
101
  const anchorBlockArchiveSiblingPath = (
102
- await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)
102
+ await getMembershipWitnessFor(blockHash.toFr(), MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)
103
103
  ).siblingPath;
104
104
 
105
105
  const contractClassLogsFields = makeTuple(
@@ -11,6 +11,7 @@ import {
11
11
  type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
12
12
  type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
13
13
  NUM_MSGS_PER_BASE_PARITY,
14
+ OUT_HASH_TREE_HEIGHT,
14
15
  } from '@aztec/constants';
15
16
  import { BlockNumber } from '@aztec/foundation/branded-types';
16
17
  import { padArrayEnd } from '@aztec/foundation/collection';
@@ -19,6 +20,7 @@ import { Fr } from '@aztec/foundation/curves/bn254';
19
20
  import type { Tuple } from '@aztec/foundation/serialize';
20
21
  import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
21
22
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
23
+ import { computeCheckpointOutHash } from '@aztec/stdlib/messaging';
22
24
  import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
23
25
  import {
24
26
  BlockMergeRollupPrivateInputs,
@@ -38,6 +40,11 @@ import { accumulateBlobs, buildBlobHints, toProofData } from './block-building-h
38
40
  import { BlockProvingState, type ProofState } from './block-proving-state.js';
39
41
  import type { EpochProvingState } from './epoch-proving-state.js';
40
42
 
43
+ type OutHashHint = {
44
+ treeSnapshot: AppendOnlyTreeSnapshot;
45
+ siblingPath: Tuple<Fr, typeof OUT_HASH_TREE_HEIGHT>;
46
+ };
47
+
41
48
  export class CheckpointProvingState {
42
49
  private blockProofs: UnbalancedTreeStore<
43
50
  ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
@@ -46,6 +53,11 @@ export class CheckpointProvingState {
46
53
  | ProofState<CheckpointRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
47
54
  | undefined;
48
55
  private blocks: (BlockProvingState | undefined)[] = [];
56
+ private previousOutHashHint: OutHashHint | undefined;
57
+ private outHash: Fr | undefined;
58
+ // The snapshot and sibling path after the checkpoint's out hash is inserted.
59
+ // Stored here to be retrieved for the next checkpoint when it's added.
60
+ private newOutHashHint: OutHashHint | undefined;
49
61
  private startBlobAccumulator: BatchedBlobAccumulator | undefined;
50
62
  private endBlobAccumulator: BatchedBlobAccumulator | undefined;
51
63
  private blobFields: Fr[] | undefined;
@@ -195,6 +207,35 @@ export class CheckpointProvingState {
195
207
  return new ParityBasePrivateInputs(messages, this.constants.vkTreeRoot);
196
208
  }
197
209
 
210
+ public setOutHashHint(hint: OutHashHint) {
211
+ this.previousOutHashHint = hint;
212
+ }
213
+
214
+ public getOutHashHint() {
215
+ return this.previousOutHashHint;
216
+ }
217
+
218
+ public accumulateBlockOutHashes() {
219
+ if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
220
+ return;
221
+ }
222
+
223
+ if (!this.outHash) {
224
+ const messagesPerBlock = this.blocks.map(b => b!.getTxEffects().map(tx => tx.l2ToL1Msgs));
225
+ this.outHash = computeCheckpointOutHash(messagesPerBlock);
226
+ }
227
+
228
+ return this.outHash;
229
+ }
230
+
231
+ public setOutHashHintForNextCheckpoint(hint: OutHashHint) {
232
+ this.newOutHashHint = hint;
233
+ }
234
+
235
+ public getOutHashHintForNextCheckpoint() {
236
+ return this.newOutHashHint;
237
+ }
238
+
198
239
  public async accumulateBlobs(startBlobAccumulator: BatchedBlobAccumulator) {
199
240
  if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
200
241
  return;
@@ -236,6 +277,9 @@ export class CheckpointProvingState {
236
277
  if (proofs.length !== nonEmptyProofs.length) {
237
278
  throw new Error('At least one child is not ready for the checkpoint root rollup.');
238
279
  }
280
+ if (!this.previousOutHashHint) {
281
+ throw new Error('Out hash hint is not set.');
282
+ }
239
283
  if (!this.startBlobAccumulator) {
240
284
  throw new Error('Start blob accumulator is not set.');
241
285
  }
@@ -248,6 +292,8 @@ export class CheckpointProvingState {
248
292
  const hints = CheckpointRootRollupHints.from({
249
293
  previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint,
250
294
  previousArchiveSiblingPath: this.lastArchiveSiblingPath,
295
+ previousOutHash: this.previousOutHashHint.treeSnapshot,
296
+ newOutHashSiblingPath: this.previousOutHashHint.siblingPath,
251
297
  startBlobAccumulator: this.startBlobAccumulator.toBlobAccumulator(),
252
298
  finalBlobChallenges: this.finalBlobBatchingChallenges,
253
299
  blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_CHECKPOINT),
@@ -273,7 +319,7 @@ export class CheckpointProvingState {
273
319
 
274
320
  public isReadyForCheckpointRoot() {
275
321
  const allChildProofsReady = this.#getChildProofsForRoot().every(p => !!p);
276
- return allChildProofsReady && !!this.startBlobAccumulator;
322
+ return allChildProofsReady && !!this.previousOutHashHint && !!this.startBlobAccumulator;
277
323
  }
278
324
 
279
325
  public verifyState() {
@@ -1,14 +1,20 @@
1
1
  import { BatchedBlob, BatchedBlobAccumulator, type FinalBlobBatchingChallenges } from '@aztec/blob-lib';
2
- import type {
3
- ARCHIVE_HEIGHT,
4
- L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
5
- NESTED_RECURSIVE_PROOF_LENGTH,
6
- NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
2
+ import {
3
+ type ARCHIVE_HEIGHT,
4
+ type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
5
+ type NESTED_RECURSIVE_PROOF_LENGTH,
6
+ type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
7
+ OUT_HASH_TREE_HEIGHT,
7
8
  } from '@aztec/constants';
8
9
  import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
9
- import type { Fr } from '@aztec/foundation/curves/bn254';
10
+ import { Fr } from '@aztec/foundation/curves/bn254';
10
11
  import type { Tuple } from '@aztec/foundation/serialize';
11
- import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
12
+ import {
13
+ MerkleTreeCalculator,
14
+ type TreeNodeLocation,
15
+ UnbalancedTreeStore,
16
+ shaMerkleHash,
17
+ } from '@aztec/foundation/trees';
12
18
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
13
19
  import type { Proof } from '@aztec/stdlib/proofs';
14
20
  import {
@@ -20,7 +26,7 @@ import {
20
26
  RootRollupPrivateInputs,
21
27
  type RootRollupPublicInputs,
22
28
  } from '@aztec/stdlib/rollup';
23
- import type { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
29
+ import { AppendOnlyTreeSnapshot, type MerkleTreeId } from '@aztec/stdlib/trees';
24
30
  import type { BlockHeader } from '@aztec/stdlib/tx';
25
31
 
26
32
  import { toProofData } from './block-building-helpers.js';
@@ -212,6 +218,48 @@ export class EpochProvingState {
212
218
  this.checkpointPaddingProof = { provingOutput };
213
219
  }
214
220
 
221
+ public async accumulateCheckpointOutHashes() {
222
+ const treeCalculator = await MerkleTreeCalculator.create(OUT_HASH_TREE_HEIGHT, undefined, (left, right) =>
223
+ Promise.resolve(shaMerkleHash(left, right)),
224
+ );
225
+
226
+ const computeOutHashHint = async (leaves: Fr[]) => {
227
+ const tree = await treeCalculator.computeTree(leaves.map(l => l.toBuffer()));
228
+ const nextAvailableLeafIndex = leaves.length;
229
+ return {
230
+ treeSnapshot: new AppendOnlyTreeSnapshot(Fr.fromBuffer(tree.root), nextAvailableLeafIndex),
231
+ siblingPath: tree.getSiblingPath(nextAvailableLeafIndex).map(Fr.fromBuffer) as Tuple<
232
+ Fr,
233
+ typeof OUT_HASH_TREE_HEIGHT
234
+ >,
235
+ };
236
+ };
237
+
238
+ let hint = this.checkpoints[0]?.getOutHashHint();
239
+ const outHashes = [];
240
+ for (let i = 0; i < this.totalNumCheckpoints; i++) {
241
+ const checkpoint = this.checkpoints[i];
242
+ if (!checkpoint) {
243
+ break;
244
+ }
245
+
246
+ // If hints are not set yet, it must be the first checkpoint. Compute the hints with an empty tree.
247
+ hint ??= await computeOutHashHint([]);
248
+ checkpoint.setOutHashHint(hint);
249
+
250
+ // Get the out hash for this checkpoint.
251
+ const outHash = checkpoint.accumulateBlockOutHashes();
252
+ if (!outHash) {
253
+ break;
254
+ }
255
+ outHashes.push(outHash);
256
+
257
+ // Get or create hints for the next checkpoint.
258
+ hint = checkpoint.getOutHashHintForNextCheckpoint() ?? (await computeOutHashHint(outHashes));
259
+ checkpoint.setOutHashHintForNextCheckpoint(hint);
260
+ }
261
+ }
262
+
215
263
  public async setBlobAccumulators() {
216
264
  let previousAccumulator = this.startBlobAccumulator;
217
265
  // Accumulate blobs as far as we can for this epoch.