@aztec/prover-client 4.0.0-nightly.20260112 → 4.0.0-nightly.20260114

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -113,6 +113,10 @@ export class ProvingOrchestrator implements EpochProver {
113
113
  return this.proverId;
114
114
  }
115
115
 
116
+ public getNumActiveForks() {
117
+ return this.dbs.size;
118
+ }
119
+
116
120
  public stop(): Promise<void> {
117
121
  this.cancel();
118
122
  return Promise.resolve();
@@ -143,6 +147,14 @@ export class ProvingOrchestrator implements EpochProver {
143
147
  this.provingPromise = promise;
144
148
  }
145
149
 
150
+ /**
151
+ * Starts a new checkpoint.
152
+ * @param checkpointIndex - The index of the checkpoint in the epoch.
153
+ * @param constants - The constants for this checkpoint.
154
+ * @param l1ToL2Messages - The set of L1 to L2 messages to be inserted at the beginning of this checkpoint.
155
+ * @param totalNumBlocks - The total number of blocks expected in the checkpoint (must be at least one).
156
+ * @param headerOfLastBlockInPreviousCheckpoint - The header of the last block in the previous checkpoint.
157
+ */
146
158
  public async startNewCheckpoint(
147
159
  checkpointIndex: number,
148
160
  constants: CheckpointConstantData,
@@ -255,7 +267,8 @@ export class ProvingOrchestrator implements EpochProver {
255
267
  await endSpongeBlob.absorb(blockEndBlobFields);
256
268
  blockProvingState.setEndSpongeBlob(endSpongeBlob);
257
269
 
258
- // And also try to accumulate the blobs as far as we can:
270
+ // Try to accumulate the out hashes and blobs as far as we can:
271
+ await this.provingState.accumulateCheckpointOutHashes();
259
272
  await this.provingState.setBlobAccumulators();
260
273
  }
261
274
  }
@@ -352,7 +365,8 @@ export class ProvingOrchestrator implements EpochProver {
352
365
 
353
366
  provingState.setEndSpongeBlob(spongeBlobState);
354
367
 
355
- // Txs have been added to the block. Now try to accumulate the blobs as far as we can:
368
+ // Txs have been added to the block. Now try to accumulate the out hashes and blobs as far as we can:
369
+ await this.provingState.accumulateCheckpointOutHashes();
356
370
  await this.provingState.setBlobAccumulators();
357
371
  }
358
372
 
@@ -486,12 +500,7 @@ export class ProvingOrchestrator implements EpochProver {
486
500
  // is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
487
501
  // but have to make sure it only runs once all operations are completed, otherwise some function here
488
502
  // will attempt to access the fork after it was closed.
489
- logger.debug(`Cleaning up world state fork for ${blockNumber}`);
490
- void this.dbs
491
- .get(blockNumber)
492
- ?.close()
493
- .then(() => this.dbs.delete(blockNumber))
494
- .catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
503
+ void this.cleanupDBFork(blockNumber);
495
504
  }
496
505
 
497
506
  /**
@@ -534,6 +543,21 @@ export class ProvingOrchestrator implements EpochProver {
534
543
  return epochProofResult;
535
544
  }
536
545
 
546
+ private async cleanupDBFork(blockNumber: BlockNumber): Promise<void> {
547
+ logger.debug(`Cleaning up world state fork for ${blockNumber}`);
548
+ const fork = this.dbs.get(blockNumber);
549
+ if (!fork) {
550
+ return;
551
+ }
552
+
553
+ try {
554
+ await fork.close();
555
+ this.dbs.delete(blockNumber);
556
+ } catch (err) {
557
+ logger.error(`Error closing db for block ${blockNumber}`, err);
558
+ }
559
+ }
560
+
537
561
  /**
538
562
  * Enqueue a job to be scheduled
539
563
  * @param provingState - The proving state object being operated on
@@ -851,19 +875,22 @@ export class ProvingOrchestrator implements EpochProver {
851
875
  },
852
876
  ),
853
877
  async result => {
854
- // If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
855
- await this.verifyBuiltBlockAgainstSyncedState(provingState);
856
-
857
878
  logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
858
879
 
859
880
  const leafLocation = provingState.setBlockRootRollupProof(result);
860
881
  const checkpointProvingState = provingState.parentCheckpoint;
861
882
 
883
+ // If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
884
+ await this.verifyBuiltBlockAgainstSyncedState(provingState);
885
+
862
886
  if (checkpointProvingState.totalNumBlocks === 1) {
863
887
  this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
864
888
  } else {
865
889
  this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
866
890
  }
891
+
892
+ // We are finished with the block at this point, ensure the fork is cleaned up
893
+ void this.cleanupDBFork(provingState.blockNumber);
867
894
  },
868
895
  );
869
896
  }