@aztec/prover-client 0.0.1-commit.f2ce05ee → 0.0.1-commit.f81dbcf

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dest/light/lightweight_checkpoint_builder.d.ts +6 -4
  2. package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -1
  3. package/dest/light/lightweight_checkpoint_builder.js +16 -11
  4. package/dest/mocks/test_context.js +5 -2
  5. package/dest/orchestrator/block-building-helpers.d.ts +3 -3
  6. package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
  7. package/dest/orchestrator/block-building-helpers.js +2 -2
  8. package/dest/orchestrator/block-proving-state.d.ts +4 -1
  9. package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
  10. package/dest/orchestrator/block-proving-state.js +7 -0
  11. package/dest/orchestrator/checkpoint-proving-state.d.ts +3 -3
  12. package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
  13. package/dest/orchestrator/checkpoint-proving-state.js +3 -3
  14. package/dest/orchestrator/epoch-proving-state.d.ts +2 -2
  15. package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
  16. package/dest/orchestrator/epoch-proving-state.js +5 -3
  17. package/dest/orchestrator/orchestrator.d.ts +5 -3
  18. package/dest/orchestrator/orchestrator.d.ts.map +1 -1
  19. package/dest/orchestrator/orchestrator.js +64 -57
  20. package/dest/prover-client/prover-client.d.ts +2 -2
  21. package/dest/prover-client/prover-client.d.ts.map +1 -1
  22. package/dest/prover-client/prover-client.js +1 -1
  23. package/dest/proving_broker/broker_prover_facade.d.ts +1 -1
  24. package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
  25. package/dest/proving_broker/broker_prover_facade.js +3 -3
  26. package/dest/proving_broker/proving_broker.d.ts +1 -1
  27. package/dest/proving_broker/proving_broker.d.ts.map +1 -1
  28. package/dest/proving_broker/proving_broker.js +7 -3
  29. package/dest/proving_broker/proving_broker_instrumentation.d.ts +3 -1
  30. package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
  31. package/dest/proving_broker/proving_broker_instrumentation.js +7 -0
  32. package/dest/test/mock_prover.d.ts +1 -1
  33. package/package.json +15 -16
  34. package/src/light/lightweight_checkpoint_builder.ts +14 -5
  35. package/src/mocks/test_context.ts +2 -2
  36. package/src/orchestrator/block-building-helpers.ts +2 -2
  37. package/src/orchestrator/block-proving-state.ts +9 -0
  38. package/src/orchestrator/checkpoint-proving-state.ts +4 -4
  39. package/src/orchestrator/epoch-proving-state.ts +6 -4
  40. package/src/orchestrator/orchestrator.ts +72 -63
  41. package/src/prover-client/prover-client.ts +2 -1
  42. package/src/proving_broker/broker_prover_facade.ts +9 -4
  43. package/src/proving_broker/proving_broker.ts +7 -2
  44. package/src/proving_broker/proving_broker_instrumentation.ts +9 -0
@@ -44,6 +44,7 @@ export class LightweightCheckpointBuilder {
44
44
  constructor(
45
45
  public readonly checkpointNumber: CheckpointNumber,
46
46
  public readonly constants: CheckpointGlobalVariables,
47
+ public feeAssetPriceModifier: bigint,
47
48
  public readonly l1ToL2Messages: Fr[],
48
49
  private readonly previousCheckpointOutHashes: Fr[],
49
50
  public readonly db: MerkleTreeWriteOperations,
@@ -54,7 +55,7 @@ export class LightweightCheckpointBuilder {
54
55
  instanceId: `checkpoint-${checkpointNumber}`,
55
56
  });
56
57
  this.spongeBlob = SpongeBlob.init();
57
- this.logger.debug('Starting new checkpoint', { constants, l1ToL2Messages });
58
+ this.logger.debug('Starting new checkpoint', { constants, l1ToL2Messages, feeAssetPriceModifier });
58
59
  }
59
60
 
60
61
  static async startNewCheckpoint(
@@ -64,6 +65,7 @@ export class LightweightCheckpointBuilder {
64
65
  previousCheckpointOutHashes: Fr[],
65
66
  db: MerkleTreeWriteOperations,
66
67
  bindings?: LoggerBindings,
68
+ feeAssetPriceModifier: bigint = 0n,
67
69
  ): Promise<LightweightCheckpointBuilder> {
68
70
  // Insert l1-to-l2 messages into the tree.
69
71
  await db.appendLeaves(
@@ -74,6 +76,7 @@ export class LightweightCheckpointBuilder {
74
76
  return new LightweightCheckpointBuilder(
75
77
  checkpointNumber,
76
78
  constants,
79
+ feeAssetPriceModifier,
77
80
  l1ToL2Messages,
78
81
  previousCheckpointOutHashes,
79
82
  db,
@@ -90,6 +93,7 @@ export class LightweightCheckpointBuilder {
90
93
  static async resumeCheckpoint(
91
94
  checkpointNumber: CheckpointNumber,
92
95
  constants: CheckpointGlobalVariables,
96
+ feeAssetPriceModifier: bigint,
93
97
  l1ToL2Messages: Fr[],
94
98
  previousCheckpointOutHashes: Fr[],
95
99
  db: MerkleTreeWriteOperations,
@@ -99,6 +103,7 @@ export class LightweightCheckpointBuilder {
99
103
  const builder = new LightweightCheckpointBuilder(
100
104
  checkpointNumber,
101
105
  constants,
106
+ feeAssetPriceModifier,
102
107
  l1ToL2Messages,
103
108
  previousCheckpointOutHashes,
104
109
  db,
@@ -148,6 +153,10 @@ export class LightweightCheckpointBuilder {
148
153
  return this.blocks.length;
149
154
  }
150
155
 
156
+ public getBlocks() {
157
+ return this.blocks;
158
+ }
159
+
151
160
  /**
152
161
  * Adds a new block to the checkpoint. The tx effects must have already been inserted into the db if
153
162
  * this is called after tx processing, if that's not the case, then set `insertTxsEffects` to true.
@@ -237,7 +246,7 @@ export class LightweightCheckpointBuilder {
237
246
 
238
247
  const newArchive = this.lastArchives[this.lastArchives.length - 1];
239
248
 
240
- const blobs = getBlobsPerL1Block(this.blobFields);
249
+ const blobs = await getBlobsPerL1Block(this.blobFields);
241
250
  const blobsHash = computeBlobsHashFromBlobs(blobs);
242
251
 
243
252
  const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages);
@@ -248,8 +257,7 @@ export class LightweightCheckpointBuilder {
248
257
  );
249
258
  const epochOutHash = accumulateCheckpointOutHashes([...this.previousCheckpointOutHashes, checkpointOutHash]);
250
259
 
251
- // TODO(palla/mbps): Should we source this from the constants instead?
252
- // timestamp of a checkpoint is the timestamp of the last block in the checkpoint.
260
+ // All blocks in the checkpoint have the same timestamp
253
261
  const timestamp = blocks[blocks.length - 1].timestamp;
254
262
 
255
263
  const totalManaUsed = blocks.reduce((acc, block) => acc.add(block.header.totalManaUsed), Fr.ZERO);
@@ -268,13 +276,14 @@ export class LightweightCheckpointBuilder {
268
276
  totalManaUsed,
269
277
  });
270
278
 
271
- return new Checkpoint(newArchive, header, blocks, this.checkpointNumber);
279
+ return new Checkpoint(newArchive, header, blocks, this.checkpointNumber, this.feeAssetPriceModifier);
272
280
  }
273
281
 
274
282
  clone() {
275
283
  const clone = new LightweightCheckpointBuilder(
276
284
  this.checkpointNumber,
277
285
  this.constants,
286
+ this.feeAssetPriceModifier,
278
287
  [...this.l1ToL2Messages],
279
288
  [...this.previousCheckpointOutHashes],
280
289
  this.db,
@@ -116,7 +116,7 @@ export class TestContext {
116
116
 
117
117
  const broker = new TestBroker(proverCount, localProver);
118
118
  const facade = new BrokerCircuitProverFacade(broker);
119
- const orchestrator = new TestProvingOrchestrator(ws, facade, EthAddress.ZERO);
119
+ const orchestrator = new TestProvingOrchestrator(ws, facade, EthAddress.ZERO, false, 10);
120
120
 
121
121
  await broker.start();
122
122
  facade.start();
@@ -250,7 +250,7 @@ export class TestContext {
250
250
  const previousCheckpointOutHashes = this.checkpointOutHashes;
251
251
  const builder = await LightweightCheckpointBuilder.startNewCheckpoint(
252
252
  checkpointNumber,
253
- constants,
253
+ { ...constants, timestamp },
254
254
  l1ToL2Messages,
255
255
  previousCheckpointOutHashes,
256
256
  cleanFork,
@@ -253,8 +253,8 @@ export function getPublicChonkVerifierPrivateInputsFromTx(tx: Tx | ProcessedTx,
253
253
  // Build "hints" as the private inputs for the checkpoint root rollup circuit.
254
254
  // The `blobCommitments` will be accumulated and checked in the root rollup against the `finalBlobChallenges`.
255
255
  // The `blobsHash` will be validated on L1 against the submitted blob data.
256
- export const buildBlobHints = (blobFields: Fr[]) => {
257
- const blobs = getBlobsPerL1Block(blobFields);
256
+ export const buildBlobHints = async (blobFields: Fr[]) => {
257
+ const blobs = await getBlobsPerL1Block(blobFields);
258
258
  const blobCommitments = getBlobCommitmentsFromBlobs(blobs);
259
259
  const blobsHash = computeBlobsHashFromBlobs(blobs);
260
260
  return { blobCommitments, blobs, blobsHash };
@@ -55,6 +55,7 @@ export class BlockProvingState {
55
55
  | ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
56
56
  | undefined;
57
57
  private builtBlockHeader: BlockHeader | undefined;
58
+ private builtArchive: AppendOnlyTreeSnapshot | undefined;
58
59
  private endState: StateReference | undefined;
59
60
  private endSpongeBlob: SpongeBlob | undefined;
60
61
  private txs: TxProvingState[] = [];
@@ -232,6 +233,14 @@ export class BlockProvingState {
232
233
  return this.builtBlockHeader;
233
234
  }
234
235
 
236
+ public setBuiltArchive(archive: AppendOnlyTreeSnapshot) {
237
+ this.builtArchive = archive;
238
+ }
239
+
240
+ public getBuiltArchive() {
241
+ return this.builtArchive;
242
+ }
243
+
235
244
  public getStartSpongeBlob() {
236
245
  return this.startSpongeBlob;
237
246
  }
@@ -85,7 +85,7 @@ export class CheckpointProvingState {
85
85
  typeof L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH
86
86
  >,
87
87
  public parentEpoch: EpochProvingState,
88
- private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void,
88
+ private onBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => Promise<void>,
89
89
  ) {
90
90
  this.blockProofs = new UnbalancedTreeStore(totalNumBlocks);
91
91
  this.firstBlockNumber = BlockNumber(headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber + 1);
@@ -245,7 +245,7 @@ export class CheckpointProvingState {
245
245
  this.endBlobAccumulator = await accumulateBlobs(this.blobFields!, startBlobAccumulator);
246
246
  this.startBlobAccumulator = startBlobAccumulator;
247
247
 
248
- this.onBlobAccumulatorSet(this);
248
+ await this.onBlobAccumulatorSet(this);
249
249
 
250
250
  return this.endBlobAccumulator;
251
251
  }
@@ -271,7 +271,7 @@ export class CheckpointProvingState {
271
271
  return this.totalNumBlocks === 1 ? 'rollup-checkpoint-root-single-block' : 'rollup-checkpoint-root';
272
272
  }
273
273
 
274
- public getCheckpointRootRollupInputs() {
274
+ public async getCheckpointRootRollupInputs() {
275
275
  const proofs = this.#getChildProofsForRoot();
276
276
  const nonEmptyProofs = proofs.filter(p => !!p);
277
277
  if (proofs.length !== nonEmptyProofs.length) {
@@ -287,7 +287,7 @@ export class CheckpointProvingState {
287
287
  // `blobFields` must've been set if `startBlobAccumulator` is set (in `accumulateBlobs`).
288
288
  const blobFields = this.blobFields!;
289
289
 
290
- const { blobCommitments, blobsHash } = buildBlobHints(blobFields);
290
+ const { blobCommitments, blobsHash } = await buildBlobHints(blobFields);
291
291
 
292
292
  const hints = CheckpointRootRollupHints.from({
293
293
  previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint,
@@ -76,7 +76,7 @@ export class EpochProvingState {
76
76
  public readonly epochNumber: EpochNumber,
77
77
  public readonly totalNumCheckpoints: number,
78
78
  private readonly finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
79
- private onCheckpointBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => void,
79
+ private onCheckpointBlobAccumulatorSet: (checkpoint: CheckpointProvingState) => Promise<void>,
80
80
  private completionCallback: (result: ProvingResult) => void,
81
81
  private rejectionCallback: (reason: string) => void,
82
82
  ) {
@@ -254,9 +254,11 @@ export class EpochProvingState {
254
254
  }
255
255
  outHashes.push(outHash);
256
256
 
257
- // Get or create hints for the next checkpoint.
258
- hint = checkpoint.getOutHashHintForNextCheckpoint() ?? (await computeOutHashHint(outHashes));
259
- checkpoint.setOutHashHintForNextCheckpoint(hint);
257
+ // If this is NOT the last checkpoint, get or create the hint for the next checkpoint.
258
+ if (i !== this.totalNumCheckpoints - 1) {
259
+ hint = checkpoint.getOutHashHintForNextCheckpoint() ?? (await computeOutHashHint(outHashes));
260
+ checkpoint.setOutHashHintForNextCheckpoint(hint);
261
+ }
260
262
  }
261
263
  }
262
264
 
@@ -12,7 +12,9 @@ import { Fr } from '@aztec/foundation/curves/bn254';
12
12
  import { AbortError } from '@aztec/foundation/error';
13
13
  import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
14
14
  import { promiseWithResolvers } from '@aztec/foundation/promise';
15
+ import { SerialQueue } from '@aztec/foundation/queue';
15
16
  import { assertLength } from '@aztec/foundation/serialize';
17
+ import { sleep } from '@aztec/foundation/sleep';
16
18
  import { pushTestData } from '@aztec/foundation/testing';
17
19
  import { elapsed } from '@aztec/foundation/timer';
18
20
  import type { TreeNodeLocation } from '@aztec/foundation/trees';
@@ -71,11 +73,6 @@ import { EpochProvingState, type ProvingResult, type TreeSnapshots } from './epo
71
73
  import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
72
74
  import { TxProvingState } from './tx-proving-state.js';
73
75
 
74
- type WorldStateFork = {
75
- fork: MerkleTreeWriteOperations;
76
- cleanupPromise: Promise<void> | undefined;
77
- };
78
-
79
76
  /**
80
77
  * Implements an event driven proving scheduler to build the recursive proof tree. The idea being:
81
78
  * 1. Transactions are provided to the scheduler post simulation.
@@ -97,19 +94,22 @@ export class ProvingOrchestrator implements EpochProver {
97
94
  private provingPromise: Promise<ProvingResult> | undefined = undefined;
98
95
  private metrics: ProvingOrchestratorMetrics;
99
96
  // eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
100
- private dbs: Map<BlockNumber, WorldStateFork> = new Map();
97
+ private dbs: Map<BlockNumber, MerkleTreeWriteOperations> = new Map();
101
98
  private logger: Logger;
99
+ private deferredJobQueue = new SerialQueue();
102
100
 
103
101
  constructor(
104
102
  private dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations,
105
103
  private prover: ServerCircuitProver,
106
104
  private readonly proverId: EthAddress,
107
105
  private readonly cancelJobsOnStop: boolean = false,
106
+ private readonly enqueueConcurrency: number,
108
107
  telemetryClient: TelemetryClient = getTelemetryClient(),
109
108
  bindings?: LoggerBindings,
110
109
  ) {
111
110
  this.logger = createLogger('prover-client:orchestrator', bindings);
112
111
  this.metrics = new ProvingOrchestratorMetrics(telemetryClient, 'ProvingOrchestrator');
112
+ this.deferredJobQueue.start(this.enqueueConcurrency);
113
113
  }
114
114
 
115
115
  get tracer(): Tracer {
@@ -124,9 +124,11 @@ export class ProvingOrchestrator implements EpochProver {
124
124
  return this.dbs.size;
125
125
  }
126
126
 
127
- public stop(): Promise<void> {
127
+ public async stop(): Promise<void> {
128
+ // Grab the old queue before cancel() replaces it, so we can await its draining.
129
+ const oldQueue = this.deferredJobQueue;
128
130
  this.cancel();
129
- return Promise.resolve();
131
+ await oldQueue.cancel();
130
132
  }
131
133
 
132
134
  public startNewEpoch(
@@ -182,7 +184,7 @@ export class ProvingOrchestrator implements EpochProver {
182
184
  const db = await this.dbProvider.fork(lastBlockNumber);
183
185
 
184
186
  const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
185
- this.dbs.set(firstBlockNumber, { fork: db, cleanupPromise: undefined });
187
+ this.dbs.set(firstBlockNumber, db);
186
188
 
187
189
  // Get archive sibling path before any block in this checkpoint lands.
188
190
  const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
@@ -240,9 +242,9 @@ export class ProvingOrchestrator implements EpochProver {
240
242
  if (!this.dbs.has(blockNumber)) {
241
243
  // Fork world state at the end of the immediately previous block
242
244
  const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
243
- this.dbs.set(blockNumber, { fork: db, cleanupPromise: undefined });
245
+ this.dbs.set(blockNumber, db);
244
246
  }
245
- const db = this.dbs.get(blockNumber)!.fork;
247
+ const db = this.getDbForBlock(blockNumber);
246
248
 
247
249
  // Get archive snapshot and sibling path before any txs in this block lands.
248
250
  const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
@@ -317,7 +319,7 @@ export class ProvingOrchestrator implements EpochProver {
317
319
 
318
320
  this.logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
319
321
 
320
- const db = this.dbs.get(blockNumber)!.fork;
322
+ const db = this.getDbForBlock(blockNumber);
321
323
  const lastArchive = provingState.lastArchiveTreeSnapshot;
322
324
  const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
323
325
  const spongeBlobState = provingState.getStartSpongeBlob().clone();
@@ -445,14 +447,20 @@ export class ProvingOrchestrator implements EpochProver {
445
447
  throw new Error('Block header mismatch');
446
448
  }
447
449
 
448
- // Get db for this block
449
- const db = this.dbs.get(provingState.blockNumber)!.fork;
450
+ // Get db for this block and remove from map — no other code should use it after this point.
451
+ const db = this.getDbForBlock(provingState.blockNumber);
452
+ this.dbs.delete(provingState.blockNumber);
450
453
 
451
- // Update the archive tree, so we're ready to start processing the next block:
452
- this.logger.verbose(
453
- `Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`,
454
- );
455
- await db.updateArchive(header);
454
+ // Update the archive tree, capture the snapshot, and close the fork deterministically.
455
+ try {
456
+ this.logger.verbose(
457
+ `Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`,
458
+ );
459
+ await db.updateArchive(header);
460
+ provingState.setBuiltArchive(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db));
461
+ } finally {
462
+ await db.close();
463
+ }
456
464
 
457
465
  await this.verifyBuiltBlockAgainstSyncedState(provingState);
458
466
 
@@ -472,6 +480,13 @@ export class ProvingOrchestrator implements EpochProver {
472
480
  this.logger.debug('Block root rollup proof not built yet, skipping header check.');
473
481
  return;
474
482
  }
483
+
484
+ const newArchive = provingState.getBuiltArchive();
485
+ if (!newArchive) {
486
+ this.logger.debug('Archive snapshot not yet captured, skipping header check.');
487
+ return;
488
+ }
489
+
475
490
  const header = await buildHeaderFromCircuitOutputs(output);
476
491
 
477
492
  if (!(await header.hash()).equals(await builtBlockHeader.hash())) {
@@ -480,11 +495,7 @@ export class ProvingOrchestrator implements EpochProver {
480
495
  return;
481
496
  }
482
497
 
483
- // Get db for this block
484
498
  const blockNumber = provingState.blockNumber;
485
- const db = this.dbs.get(blockNumber)!.fork;
486
-
487
- const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
488
499
  const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
489
500
  if (!syncedArchive.equals(newArchive)) {
490
501
  this.logger.error(
@@ -502,12 +513,6 @@ export class ProvingOrchestrator implements EpochProver {
502
513
  provingState.reject(`New archive mismatch.`);
503
514
  return;
504
515
  }
505
-
506
- // TODO(palla/prover): This closes the fork only on the happy path. If this epoch orchestrator
507
- // is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
508
- // but have to make sure it only runs once all operations are completed, otherwise some function here
509
- // will attempt to access the fork after it was closed.
510
- void this.cleanupDBFork(blockNumber);
511
516
  }
512
517
 
513
518
  /**
@@ -516,6 +521,11 @@ export class ProvingOrchestrator implements EpochProver {
516
521
  * If cancelJobsOnStop is false (default), jobs remain in the broker queue and can be reused on restart/reorg.
517
522
  */
518
523
  public cancel() {
524
+ void this.deferredJobQueue.cancel();
525
+ // Recreate the queue so it can accept jobs for subsequent epochs.
526
+ this.deferredJobQueue = new SerialQueue();
527
+ this.deferredJobQueue.start(this.enqueueConcurrency);
528
+
519
529
  if (this.cancelJobsOnStop) {
520
530
  for (const controller of this.pendingProvingJobs) {
521
531
  controller.abort();
@@ -523,6 +533,19 @@ export class ProvingOrchestrator implements EpochProver {
523
533
  }
524
534
 
525
535
  this.provingState?.cancel();
536
+
537
+ for (const [blockNumber, db] of this.dbs.entries()) {
538
+ void db.close().catch(err => this.logger.error(`Error closing db for block ${blockNumber}`, err));
539
+ }
540
+ this.dbs.clear();
541
+ }
542
+
543
+ private getDbForBlock(blockNumber: BlockNumber): MerkleTreeWriteOperations {
544
+ const db = this.dbs.get(blockNumber);
545
+ if (!db) {
546
+ throw new Error(`World state fork for block ${blockNumber} not found.`);
547
+ }
548
+ return db;
526
549
  }
527
550
 
528
551
  /**
@@ -554,24 +577,6 @@ export class ProvingOrchestrator implements EpochProver {
554
577
  return epochProofResult;
555
578
  }
556
579
 
557
- private async cleanupDBFork(blockNumber: BlockNumber): Promise<void> {
558
- this.logger.debug(`Cleaning up world state fork for ${blockNumber}`);
559
- const fork = this.dbs.get(blockNumber);
560
- if (!fork) {
561
- return;
562
- }
563
-
564
- try {
565
- if (!fork.cleanupPromise) {
566
- fork.cleanupPromise = fork.fork.close();
567
- }
568
- await fork.cleanupPromise;
569
- this.dbs.delete(blockNumber);
570
- } catch (err) {
571
- this.logger.error(`Error closing db for block ${blockNumber}`, err);
572
- }
573
- }
574
-
575
580
  /**
576
581
  * Enqueue a job to be scheduled
577
582
  * @param provingState - The proving state object being operated on
@@ -630,8 +635,11 @@ export class ProvingOrchestrator implements EpochProver {
630
635
  }
631
636
  };
632
637
 
633
- // let the callstack unwind before adding the job to the queue
634
- setImmediate(() => void safeJob());
638
+ void this.deferredJobQueue.put(async () => {
639
+ void safeJob();
640
+ // we yield here to the macro task queue such to give Nodejs a chance to run other operatoins in between enqueues
641
+ await sleep(0);
642
+ });
635
643
  }
636
644
 
637
645
  private async updateL1ToL2MessageTree(l1ToL2Messages: Fr[], db: MerkleTreeWriteOperations) {
@@ -894,17 +902,15 @@ export class ProvingOrchestrator implements EpochProver {
894
902
  const leafLocation = provingState.setBlockRootRollupProof(result);
895
903
  const checkpointProvingState = provingState.parentCheckpoint;
896
904
 
897
- // If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
905
+ // Verification is called from both here and setBlockCompleted. Whichever runs last
906
+ // will be the first to see all three pieces (header, proof output, archive) and run the checks.
898
907
  await this.verifyBuiltBlockAgainstSyncedState(provingState);
899
908
 
900
909
  if (checkpointProvingState.totalNumBlocks === 1) {
901
- this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
910
+ await this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
902
911
  } else {
903
- this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
912
+ await this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
904
913
  }
905
-
906
- // We are finished with the block at this point, ensure the fork is cleaned up
907
- void this.cleanupDBFork(provingState.blockNumber);
908
914
  },
909
915
  );
910
916
  }
@@ -1009,14 +1015,14 @@ export class ProvingOrchestrator implements EpochProver {
1009
1015
  },
1010
1016
  signal => this.prover.getBlockMergeRollupProof(inputs, signal, provingState.epochNumber),
1011
1017
  ),
1012
- result => {
1018
+ async result => {
1013
1019
  provingState.setBlockMergeRollupProof(location, result);
1014
- this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
1020
+ await this.checkAndEnqueueNextBlockMergeRollup(provingState, location);
1015
1021
  },
1016
1022
  );
1017
1023
  }
1018
1024
 
1019
- private enqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
1025
+ private async enqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
1020
1026
  if (!provingState.verifyState()) {
1021
1027
  this.logger.debug('Not running checkpoint root rollup. State no longer valid.');
1022
1028
  return;
@@ -1031,7 +1037,7 @@ export class ProvingOrchestrator implements EpochProver {
1031
1037
 
1032
1038
  this.logger.debug(`Enqueuing ${rollupType} for checkpoint ${provingState.index}.`);
1033
1039
 
1034
- const inputs = provingState.getCheckpointRootRollupInputs();
1040
+ const inputs = await provingState.getCheckpointRootRollupInputs();
1035
1041
 
1036
1042
  this.deferredProving(
1037
1043
  provingState,
@@ -1191,25 +1197,28 @@ export class ProvingOrchestrator implements EpochProver {
1191
1197
  this.enqueueBlockRootRollup(provingState);
1192
1198
  }
1193
1199
 
1194
- private checkAndEnqueueNextBlockMergeRollup(provingState: CheckpointProvingState, currentLocation: TreeNodeLocation) {
1200
+ private async checkAndEnqueueNextBlockMergeRollup(
1201
+ provingState: CheckpointProvingState,
1202
+ currentLocation: TreeNodeLocation,
1203
+ ) {
1195
1204
  if (!provingState.isReadyForBlockMerge(currentLocation)) {
1196
1205
  return;
1197
1206
  }
1198
1207
 
1199
1208
  const parentLocation = provingState.getParentLocation(currentLocation);
1200
1209
  if (parentLocation.level === 0) {
1201
- this.checkAndEnqueueCheckpointRootRollup(provingState);
1210
+ await this.checkAndEnqueueCheckpointRootRollup(provingState);
1202
1211
  } else {
1203
1212
  this.enqueueBlockMergeRollup(provingState, parentLocation);
1204
1213
  }
1205
1214
  }
1206
1215
 
1207
- private checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
1216
+ private async checkAndEnqueueCheckpointRootRollup(provingState: CheckpointProvingState) {
1208
1217
  if (!provingState.isReadyForCheckpointRoot()) {
1209
1218
  return;
1210
1219
  }
1211
1220
 
1212
- this.enqueueCheckpointRootRollup(provingState);
1221
+ await this.enqueueCheckpointRootRollup(provingState);
1213
1222
  }
1214
1223
 
1215
1224
  private checkAndEnqueueNextCheckpointMergeRollup(provingState: EpochProvingState, currentLocation: TreeNodeLocation) {
@@ -54,6 +54,7 @@ export class ProverClient implements EpochProverManager {
54
54
  facade,
55
55
  this.config.proverId,
56
56
  this.config.cancelJobsOnStop,
57
+ this.config.enqueueConcurrency,
57
58
  this.telemetry,
58
59
  bindings,
59
60
  );
@@ -156,7 +157,7 @@ export class ProverClient implements EpochProverManager {
156
157
  }
157
158
 
158
159
  export function buildServerCircuitProver(
159
- config: ActualProverConfig & ACVMConfig & BBConfig,
160
+ config: Omit<ActualProverConfig, 'enqueueConcurrency'> & ACVMConfig & BBConfig,
160
161
  telemetry: TelemetryClient,
161
162
  ): Promise<ServerCircuitProver> {
162
163
  if (config.realProofs) {
@@ -5,7 +5,6 @@ import type {
5
5
  RECURSIVE_PROOF_LENGTH,
6
6
  } from '@aztec/constants';
7
7
  import { EpochNumber } from '@aztec/foundation/branded-types';
8
- import { sha256 } from '@aztec/foundation/crypto/sha256';
9
8
  import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log';
10
9
  import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
11
10
  import { truncate } from '@aztec/foundation/string';
@@ -46,6 +45,8 @@ import type {
46
45
  TxRollupPublicInputs,
47
46
  } from '@aztec/stdlib/rollup';
48
47
 
48
+ import { createHash } from 'node:crypto';
49
+
49
50
  import { InlineProofStore, type ProofStore } from './proof_store/index.js';
50
51
 
51
52
  // Perform a snapshot sync every 30 seconds
@@ -659,8 +660,12 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
659
660
  );
660
661
  }
661
662
 
662
- private generateId(type: ProvingRequestType, inputs: { toBuffer(): Buffer }, epochNumber = EpochNumber.ZERO) {
663
- const inputsHash = sha256(inputs.toBuffer());
664
- return makeProvingJobId(epochNumber, type, inputsHash.toString('hex'));
663
+ private generateId(
664
+ type: ProvingRequestType,
665
+ inputs: { toBuffer(): Buffer },
666
+ epochNumber = EpochNumber.ZERO,
667
+ ): ProvingJobId {
668
+ const inputsHash = createHash('sha256').update(inputs.toBuffer()).digest('hex');
669
+ return makeProvingJobId(epochNumber, type, inputsHash);
665
670
  }
666
671
  }
@@ -314,7 +314,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Pr
314
314
  // notify listeners of the cancellation
315
315
  if (!this.resultsCache.has(id)) {
316
316
  this.logger.info(`Cancelling job id=${id}`, { provingJobId: id });
317
- await this.#reportProvingJobError(id, 'Aborted', false);
317
+ await this.#reportProvingJobError(id, 'Aborted', false, undefined, true);
318
318
  }
319
319
  }
320
320
 
@@ -395,6 +395,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Pr
395
395
  err: string,
396
396
  retry = false,
397
397
  filter?: ProvingJobFilter,
398
+ aborted = false,
398
399
  ): Promise<GetProvingJobResponse | undefined> {
399
400
  const info = this.inProgress.get(id);
400
401
  const item = this.jobsCache.get(id);
@@ -455,7 +456,11 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Pr
455
456
  this.promises.get(id)!.resolve(result);
456
457
  this.completedJobNotifications.push(id);
457
458
 
458
- this.instrumentation.incRejectedJobs(item.type);
459
+ if (aborted) {
460
+ this.instrumentation.incAbortedJobs(item.type);
461
+ } else {
462
+ this.instrumentation.incRejectedJobs(item.type);
463
+ }
459
464
  if (info) {
460
465
  const duration = this.msTimeSource() - info.startedAt;
461
466
  this.instrumentation.recordJobDuration(item.type, duration);
@@ -18,6 +18,7 @@ export class ProvingBrokerInstrumentation {
18
18
  private activeJobs: ObservableGauge;
19
19
  private resolvedJobs: UpDownCounter;
20
20
  private rejectedJobs: UpDownCounter;
21
+ private abortedJobs: UpDownCounter;
21
22
  private timedOutJobs: UpDownCounter;
22
23
  private cachedJobs: UpDownCounter;
23
24
  private totalJobs: UpDownCounter;
@@ -39,6 +40,8 @@ export class ProvingBrokerInstrumentation {
39
40
 
40
41
  this.rejectedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_REJECTED_JOBS, provingJobAttrs);
41
42
 
43
+ this.abortedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_ABORTED_JOBS, provingJobAttrs);
44
+
42
45
  this.retriedJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_RETRIED_JOBS, provingJobAttrs);
43
46
 
44
47
  this.timedOutJobs = createUpDownCounterWithDefault(meter, Metrics.PROVING_QUEUE_TIMED_OUT_JOBS, provingJobAttrs);
@@ -72,6 +75,12 @@ export class ProvingBrokerInstrumentation {
72
75
  });
73
76
  }
74
77
 
78
+ incAbortedJobs(proofType: ProvingRequestType) {
79
+ this.abortedJobs.add(1, {
80
+ [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType],
81
+ });
82
+ }
83
+
75
84
  incRetriedJobs(proofType: ProvingRequestType) {
76
85
  this.retriedJobs.add(1, {
77
86
  [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType],