@aztec/prover-client 4.0.0-nightly.20260112 → 4.0.0-nightly.20260114

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -34,8 +34,17 @@ export declare class ProvingOrchestrator implements EpochProver {
34
34
  constructor(dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations, prover: ServerCircuitProver, proverId: EthAddress, telemetryClient?: TelemetryClient);
35
35
  get tracer(): Tracer;
36
36
  getProverId(): EthAddress;
37
+ getNumActiveForks(): number;
37
38
  stop(): Promise<void>;
38
39
  startNewEpoch(epochNumber: EpochNumber, totalNumCheckpoints: number, finalBlobBatchingChallenges: FinalBlobBatchingChallenges): void;
40
+ /**
41
+ * Starts a new checkpoint.
42
+ * @param checkpointIndex - The index of the checkpoint in the epoch.
43
+ * @param constants - The constants for this checkpoint.
44
+ * @param l1ToL2Messages - The set of L1 to L2 messages to be inserted at the beginning of this checkpoint.
45
+ * @param totalNumBlocks - The total number of blocks expected in the checkpoint (must be at least one).
46
+ * @param headerOfLastBlockInPreviousCheckpoint - The header of the last block in the previous checkpoint.
47
+ */
39
48
  startNewCheckpoint(checkpointIndex: number, constants: CheckpointConstantData, l1ToL2Messages: Fr[], totalNumBlocks: number, headerOfLastBlockInPreviousCheckpoint: BlockHeader): Promise<void>;
40
49
  /**
41
50
  * Starts off a new block
@@ -73,6 +82,7 @@ export declare class ProvingOrchestrator implements EpochProver {
73
82
  proof: Proof;
74
83
  batchedBlobInputs: BatchedBlob;
75
84
  }>;
85
+ private cleanupDBFork;
76
86
  /**
77
87
  * Enqueue a job to be scheduled
78
88
  * @param provingState - The proving state object being operated on
@@ -110,4 +120,4 @@ export declare class ProvingOrchestrator implements EpochProver {
110
120
  private enqueueVM;
111
121
  private checkAndEnqueueBaseRollup;
112
122
  }
113
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3JjaGVzdHJhdG9yLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvb3JjaGVzdHJhdG9yL29yY2hlc3RyYXRvci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsV0FBVyxFQUFFLDJCQUEyQixFQUFjLE1BQU0sdUJBQXVCLENBQUM7QUFRN0YsT0FBTyxFQUFFLFdBQVcsRUFBRSxXQUFXLEVBQUUsTUFBTSxpQ0FBaUMsQ0FBQztBQUUzRSxPQUFPLEVBQUUsRUFBRSxFQUFFLE1BQU0sZ0NBQWdDLENBQUM7QUFRcEQsT0FBTyxFQUFFLFVBQVUsRUFBRSxNQUFNLHFCQUFxQixDQUFDO0FBQ2pELE9BQU8sS0FBSyxFQUNWLFdBQVcsRUFDWCx3QkFBd0IsRUFHeEIsd0JBQXdCLEVBQ3hCLG1CQUFtQixFQUNwQixNQUFNLGlDQUFpQyxDQUFDO0FBQ3pDLE9BQU8sS0FBSyxFQUFFLEtBQUssRUFBRSxNQUFNLHNCQUFzQixDQUFDO0FBQ2xELE9BQU8sRUFNTCxzQkFBc0IsRUFLdEIsc0JBQXNCLEVBQ3ZCLE1BQU0sc0JBQXNCLENBQUM7QUFHOUIsT0FBTyxLQUFLLEVBQUUsV0FBVyxFQUFFLFdBQVcsRUFBRSxFQUFFLEVBQUUsTUFBTSxrQkFBa0IsQ0FBQztBQUNyRSxPQUFPLEtBQUssRUFBRSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQztBQUNsRCxPQUFPLEVBRUwsS0FBSyxlQUFlLEVBQ3BCLEtBQUssTUFBTSxFQUlaLE1BQU0seUJBQXlCLENBQUM7QUFlakMsT0FBTyxLQUFLLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSwwQkFBMEIsQ0FBQztBQVFsRTs7Ozs7Ozs7O0dBU0c7QUFFSDs7R0FFRztBQUNILHFCQUFhLG1CQUFvQixZQUFXLFdBQVc7SUFVbkQsT0FBTyxDQUFDLFVBQVU7SUFDbEIsT0FBTyxDQUFDLE1BQU07SUFDZCxPQUFPLENBQUMsUUFBUSxDQUFDLFFBQVE7SUFYM0IsT0FBTyxDQUFDLFlBQVksQ0FBNEM7SUFDaEUsT0FBTyxDQUFDLGtCQUFrQixDQUF5QjtJQUVuRCxPQUFPLENBQUMsY0FBYyxDQUFpRDtJQUN2RSxPQUFPLENBQUMsT0FBTyxDQUE2QjtJQUU1QyxPQUFPLENBQUMsR0FBRyxDQUEwRDtJQUVyRSxZQUNVLFVBQVUsRUFBRSx3QkFBd0IsR0FBRyx3QkFBd0IsRUFDL0QsTUFBTSxFQUFFLG1CQUFtQixFQUNsQixRQUFRLEVBQUUsVUFBVSxFQUNyQyxlQUFlLEdBQUUsZUFBc0MsRUFHeEQ7SUFFRCxJQUFJLE1BQU0sSUFBSSxNQUFNLENBRW5CO0lBRU0sV0FBVyxJQUFJLFVBQVUsQ0FFL0I7SUFFTSxJQUFJLElBQUksT0FBTyxDQUFDLElBQUksQ0FBQyxDQUczQjtJQUVNLGFBQWEsQ0FDbEIsV0FBVyxFQUFFLFdBQVcsRUFDeEIsbUJBQW1CLEVBQUUsTUFBTSxFQUMzQiwyQkFBMkIsRUFBRSwyQkFBMkIsUUFvQnpEO0lBRVksa0JBQWtCLENBQzdCLGVBQWUsRUFBRSxNQUFNLEVBQ3ZCLFNBQVMsRUFBRSxzQkFBc0IsRUFDakMsY0FBYyxFQUFFLEVBQUUsRUFBRSxFQUNwQixjQUFjLEVBQUUsTUFBTSxFQUN0QixxQ0FBcUMsRUFBRSxXQUFXLGlCQXdDbkQ7SUFFRDs7Ozs7O09BTUc7SUFJVSxhQUFhLENBQUMsV0FBVyxFQUFFLFdBQVcsRUFBRSxTQUFTLEVBQUUsTUFBTSxFQUFFLFdBQVcsRUFBRSxNQUFNLGlCQTBEMUY7SUFFRDs7O09BR0c7SUFJVSxNQUFNLENBQUMsR0FBRyxFQUFFLFdBQVcsRUFBRSxHQUFHLE9BQU8sQ0FBQyxJQUFJLENBQUMsQ0F1RnJEO0lBRUQ7OztPQUdHO0lBRUksMEJBQTBCLENBQUMsR0FBRyxFQUFFLEVBQUUsRUFBRSxpQkFzQjFDO0lBRUQ7OztPQUdHO0lBSVUsaUJBQWlCLENBQUMsV0FBVyxFQUFFLFdBQVcsRUFBRSxjQUFjLENBQUMsRUFBRSxXQUFXLEdBQUcsT0FBTyxDQUFDLFdBQVcsQ0FBQyxDQTRDM0c7SUFHRCxVQUFnQixrQ0FBa0MsQ0FBQyxZQUFZLEVBQUUsaUJBQWlCLGlCQXFEakY7SUFFRDs7T0FFRztJQUNJLE1BQU0sU0FNWjtJQUVEOztPQUVHO0lBQ1UsYUFBYSxJQUFJLE9BQU8sQ0FBQztRQUNwQyxZQUFZLEVBQUUsc0JBQXNCLENBQUM7UUFDckMsS0FBSyxFQUFFLEtBQUssQ0FBQztRQUNiLGlCQUFpQixFQUFFLFdBQVcsQ0FBQztLQUNoQyxDQUFDLENBb0JEO0lBRUQ7Ozs7O09BS0c7SUFDSCxPQUFPLENBQUMsZUFBZTtZQXdEVCx1QkFBdUI7WUFtQ3ZCLHVCQUF1QjtJQWtDckMsT0FBTyxDQUFDLGlCQUFpQjtJQXFEekIsT0FBTyxDQUFDLHlCQUF5QjtJQStCakMsT0FBTyxDQUFDLHNCQUFzQjtJQWlDOUIsT0FBTyxDQUFDLGtCQUFrQjtJQStCMUIsT0FBTyxDQUFDLHNCQUFzQjtJQXlEOUIsT0FBTyxDQUFDLHdCQUF3QjtJQWtDaEMsT0FBTyxDQUFDLGdDQUFnQztJQVV4QyxPQUFPLENBQUMsd0JBQXdCO0lBZ0NoQyxPQUFPLENBQUMsdUJBQXVCO0lBNkIvQixPQUFPLENBQUMsMkJBQTJCO0lBNERuQyxPQUFPLENBQUMsNEJBQTRCO0lBK0JwQyxPQUFPLENBQUMsbUJBQW1CO0lBa0MzQixPQUFPLENBQUMsaUJBQWlCO0lBNEJ6QixPQUFPLENBQUMsOEJBQThCO0lBYXRDLE9BQU8sQ0FBQyw4QkFBOEI7SUFTdEMsT0FBTyxDQUFDLG1DQUFtQztJQWEzQyxPQUFPLENBQUMsbUNBQW1DO0lBUTNDLE9BQU8sQ0FBQyx3Q0FBd0M7SUFhaEQsT0FBTyxDQUFDLHlCQUF5QjtJQVNqQzs7Ozs7T0FLRztJQUNILE9BQU8sQ0FBQyxTQUFTO0lBMkJqQixPQUFPLENBQUMseUJBQXlCO0NBV2xDIn0=
123
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoib3JjaGVzdHJhdG9yLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi9zcmMvb3JjaGVzdHJhdG9yL29yY2hlc3RyYXRvci50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSxPQUFPLEVBQUUsV0FBVyxFQUFFLDJCQUEyQixFQUFjLE1BQU0sdUJBQXVCLENBQUM7QUFRN0YsT0FBTyxFQUFFLFdBQVcsRUFBRSxXQUFXLEVBQUUsTUFBTSxpQ0FBaUMsQ0FBQztBQUUzRSxPQUFPLEVBQUUsRUFBRSxFQUFFLE1BQU0sZ0NBQWdDLENBQUM7QUFRcEQsT0FBTyxFQUFFLFVBQVUsRUFBRSxNQUFNLHFCQUFxQixDQUFDO0FBQ2pELE9BQU8sS0FBSyxFQUNWLFdBQVcsRUFDWCx3QkFBd0IsRUFHeEIsd0JBQXdCLEVBQ3hCLG1CQUFtQixFQUNwQixNQUFNLGlDQUFpQyxDQUFDO0FBQ3pDLE9BQU8sS0FBSyxFQUFFLEtBQUssRUFBRSxNQUFNLHNCQUFzQixDQUFDO0FBQ2xELE9BQU8sRUFNTCxzQkFBc0IsRUFLdEIsc0JBQXNCLEVBQ3ZCLE1BQU0sc0JBQXNCLENBQUM7QUFHOUIsT0FBTyxLQUFLLEVBQUUsV0FBVyxFQUFFLFdBQVcsRUFBRSxFQUFFLEVBQUUsTUFBTSxrQkFBa0IsQ0FBQztBQUNyRSxPQUFPLEtBQUssRUFBRSxNQUFNLEVBQUUsTUFBTSxxQkFBcUIsQ0FBQztBQUNsRCxPQUFPLEVBRUwsS0FBSyxlQUFlLEVBQ3BCLEtBQUssTUFBTSxFQUlaLE1BQU0seUJBQXlCLENBQUM7QUFlakMsT0FBTyxLQUFLLEVBQUUsaUJBQWlCLEVBQUUsTUFBTSwwQkFBMEIsQ0FBQztBQVFsRTs7Ozs7Ozs7O0dBU0c7QUFFSDs7R0FFRztBQUNILHFCQUFhLG1CQUFvQixZQUFXLFdBQVc7SUFVbkQsT0FBTyxDQUFDLFVBQVU7SUFDbEIsT0FBTyxDQUFDLE1BQU07SUFDZCxPQUFPLENBQUMsUUFBUSxDQUFDLFFBQVE7SUFYM0IsT0FBTyxDQUFDLFlBQVksQ0FBNEM7SUFDaEUsT0FBTyxDQUFDLGtCQUFrQixDQUF5QjtJQUVuRCxPQUFPLENBQUMsY0FBYyxDQUFpRDtJQUN2RSxPQUFPLENBQUMsT0FBTyxDQUE2QjtJQUU1QyxPQUFPLENBQUMsR0FBRyxDQUEwRDtJQUVyRSxZQUNVLFVBQVUsRUFBRSx3QkFBd0IsR0FBRyx3QkFBd0IsRUFDL0QsTUFBTSxFQUFFLG1CQUFtQixFQUNsQixRQUFRLEVBQUUsVUFBVSxFQUNyQyxlQUFlLEdBQUUsZUFBc0MsRUFHeEQ7SUFFRCxJQUFJLE1BQU0sSUFBSSxNQUFNLENBRW5CO0lBRU0sV0FBVyxJQUFJLFVBQVUsQ0FFL0I7SUFFTSxpQkFBaUIsV0FFdkI7SUFFTSxJQUFJLElBQUksT0FBTyxDQUFDLElBQUksQ0FBQyxDQUczQjtJQUVNLGFBQWEsQ0FDbEIsV0FBVyxFQUFFLFdBQVcsRUFDeEIsbUJBQW1CLEVBQUUsTUFBTSxFQUMzQiwyQkFBMkIsRUFBRSwyQkFBMkIsUUFvQnpEO0lBRUQ7Ozs7Ozs7T0FPRztJQUNVLGtCQUFrQixDQUM3QixlQUFlLEVBQUUsTUFBTSxFQUN2QixTQUFTLEVBQUUsc0JBQXNCLEVBQ2pDLGNBQWMsRUFBRSxFQUFFLEVBQUUsRUFDcEIsY0FBYyxFQUFFLE1BQU0sRUFDdEIscUNBQXFDLEVBQUUsV0FBVyxpQkF3Q25EO0lBRUQ7Ozs7OztPQU1HO0lBSVUsYUFBYSxDQUFDLFdBQVcsRUFBRSxXQUFXLEVBQUUsU0FBUyxFQUFFLE1BQU0sRUFBRSxXQUFXLEVBQUUsTUFBTSxpQkEyRDFGO0lBRUQ7OztPQUdHO0lBSVUsTUFBTSxDQUFDLEdBQUcsRUFBRSxXQUFXLEVBQUUsR0FBRyxPQUFPLENBQUMsSUFBSSxDQUFDLENBd0ZyRDtJQUVEOzs7T0FHRztJQUVJLDBCQUEwQixDQUFDLEdBQUcsRUFBRSxFQUFFLEVBQUUsaUJBc0IxQztJQUVEOzs7T0FHRztJQUlVLGlCQUFpQixDQUFDLFdBQVcsRUFBRSxXQUFXLEVBQUUsY0FBYyxDQUFDLEVBQUUsV0FBVyxHQUFHLE9BQU8sQ0FBQyxXQUFXLENBQUMsQ0E0QzNHO0lBR0QsVUFBZ0Isa0NBQWtDLENBQUMsWUFBWSxFQUFFLGlCQUFpQixpQkFnRGpGO0lBRUQ7O09BRUc7SUFDSSxNQUFNLFNBTVo7SUFFRDs7T0FFRztJQUNVLGFBQWEsSUFBSSxPQUFPLENBQUM7UUFDcEMsWUFBWSxFQUFFLHNCQUFzQixDQUFDO1FBQ3JDLEtBQUssRUFBRSxLQUFLLENBQUM7UUFDYixpQkFBaUIsRUFBRSxXQUFXLENBQUM7S0FDaEMsQ0FBQyxDQW9CRDtZQUVhLGFBQWE7SUFlM0I7Ozs7O09BS0c7SUFDSCxPQUFPLENBQUMsZUFBZTtZQXdEVCx1QkFBdUI7WUFtQ3ZCLHVCQUF1QjtJQWtDckMsT0FBTyxDQUFDLGlCQUFpQjtJQXFEekIsT0FBTyxDQUFDLHlCQUF5QjtJQStCakMsT0FBTyxDQUFDLHNCQUFzQjtJQWlDOUIsT0FBTyxDQUFDLGtCQUFrQjtJQStCMUIsT0FBTyxDQUFDLHNCQUFzQjtJQTREOUIsT0FBTyxDQUFDLHdCQUF3QjtJQWtDaEMsT0FBTyxDQUFDLGdDQUFnQztJQVV4QyxPQUFPLENBQUMsd0JBQXdCO0lBZ0NoQyxPQUFPLENBQUMsdUJBQXVCO0lBNkIvQixPQUFPLENBQUMsMkJBQTJCO0lBNERuQyxPQUFPLENBQUMsNEJBQTRCO0lBK0JwQyxPQUFPLENBQUMsbUJBQW1CO0lBa0MzQixPQUFPLENBQUMsaUJBQWlCO0lBNEJ6QixPQUFPLENBQUMsOEJBQThCO0lBYXRDLE9BQU8sQ0FBQyw4QkFBOEI7SUFTdEMsT0FBTyxDQUFDLG1DQUFtQztJQWEzQyxPQUFPLENBQUMsbUNBQW1DO0lBUTNDLE9BQU8sQ0FBQyx3Q0FBd0M7SUFhaEQsT0FBTyxDQUFDLHlCQUF5QjtJQVNqQzs7Ozs7T0FLRztJQUNILE9BQU8sQ0FBQyxTQUFTO0lBMkJqQixPQUFPLENBQUMseUJBQXlCO0NBV2xDIn0=
@@ -1 +1 @@
1
- {"version":3,"file":"orchestrator.d.ts","sourceRoot":"","sources":["../../src/orchestrator/orchestrator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,2BAA2B,EAAc,MAAM,uBAAuB,CAAC;AAQ7F,OAAO,EAAE,WAAW,EAAE,WAAW,EAAE,MAAM,iCAAiC,CAAC;AAE3E,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAQpD,OAAO,EAAE,UAAU,EAAE,MAAM,qBAAqB,CAAC;AACjD,OAAO,KAAK,EACV,WAAW,EACX,wBAAwB,EAGxB,wBAAwB,EACxB,mBAAmB,EACpB,MAAM,iCAAiC,CAAC;AACzC,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,sBAAsB,CAAC;AAClD,OAAO,EAML,sBAAsB,EAKtB,sBAAsB,EACvB,MAAM,sBAAsB,CAAC;AAG9B,OAAO,KAAK,EAAE,WAAW,EAAE,WAAW,EAAE,EAAE,EAAE,MAAM,kBAAkB,CAAC;AACrE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAEL,KAAK,eAAe,EACpB,KAAK,MAAM,EAIZ,MAAM,yBAAyB,CAAC;AAejC,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAQlE;;;;;;;;;GASG;AAEH;;GAEG;AACH,qBAAa,mBAAoB,YAAW,WAAW;IAUnD,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAX3B,OAAO,CAAC,YAAY,CAA4C;IAChE,OAAO,CAAC,kBAAkB,CAAyB;IAEnD,OAAO,CAAC,cAAc,CAAiD;IACvE,OAAO,CAAC,OAAO,CAA6B;IAE5C,OAAO,CAAC,GAAG,CAA0D;IAErE,YACU,UAAU,EAAE,wBAAwB,GAAG,wBAAwB,EAC/D,MAAM,EAAE,mBAAmB,EAClB,QAAQ,EAAE,UAAU,EACrC,eAAe,GAAE,eAAsC,EAGxD;IAED,IAAI,MAAM,IAAI,MAAM,CAEnB;IAEM,WAAW,IAAI,UAAU,CAE/B;IAEM,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,CAG3B;IAEM,aAAa,CAClB,WAAW,EAAE,WAAW,EACxB,mBAAmB,EAAE,MAAM,EAC3B,2BAA2B,EAAE,2BAA2B,QAoBzD;IAEY,kBAAkB,CAC7B,eAAe,EAAE,MAAM,EACvB,SAAS,EAAE,sBAAsB,EACjC,cAAc,EAAE,EAAE,EAAE,EACpB,cAAc,EAAE,MAAM,EACtB,qCAAqC,EAAE,WAAW,iBAwCnD;IAED;;;;;;OAMG;IAIU,aAAa,CAAC,WAAW,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,iBA0D1F;IAED;;;OAGG;IAIU,MAAM,CAAC,GAAG,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAuFrD;IAED;;;OAGG;IAEI,0BAA0B,CAAC,GAAG,EAAE,EAAE,EAAE,iBAsB1C;IAED;;;OAGG;IAIU,iBAAiB,CAAC,WAAW,EAAE,WAAW,EAAE,cAAc,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC,CA4C3G;IAGD,UAAgB,kCAAkC,CAAC,YAAY,EAAE,iBAAiB,iBAqDjF;IAED;;OAEG;IACI,MAAM,SAMZ;IAED;;OAEG;IACU,aAAa,IAAI,OAAO,CAAC;QACpC,YAAY,EAAE,sBAAsB,CAAC;QACrC,KAAK,EAAE,KAAK,CAAC;QACb,iBAAiB,EAAE,WAAW,CAAC;KAChC,CAAC,CAoBD;IAED;;;;;OAKG;IACH,OAAO,CAAC,eAAe;YAwDT,uBAAuB;YAmCvB,uBAAuB;IAkCrC,OAAO,CAAC,iBAAiB;IAqDzB,OAAO,CAAC,yBAAyB;IA+BjC,OAAO,CAAC,sBAAsB;IAiC9B,OAAO,CAAC,kBAAkB;IA+B1B,OAAO,CAAC,sBAAsB;IAyD9B,OAAO,CAAC,wBAAwB;IAkChC,OAAO,CAAC,gCAAgC;IAUxC,OAAO,CAAC,wBAAwB;IAgChC,OAAO,CAAC,uBAAuB;IA6B/B,OAAO,CAAC,2BAA2B;IA4DnC,OAAO,CAAC,4BAA4B;IA+BpC,OAAO,CAAC,mBAAmB;IAkC3B,OAAO,CAAC,iBAAiB;IA4BzB,OAAO,CAAC,8BAA8B;IAatC,OAAO,CAAC,8BAA8B;IAStC,OAAO,CAAC,mCAAmC;IAa3C,OAAO,CAAC,mCAAmC;IAQ3C,OAAO,CAAC,wCAAwC;IAahD,OAAO,CAAC,yBAAyB;IASjC;;;;;OAKG;IACH,OAAO,CAAC,SAAS;IA2BjB,OAAO,CAAC,yBAAyB;CAWlC"}
1
+ {"version":3,"file":"orchestrator.d.ts","sourceRoot":"","sources":["../../src/orchestrator/orchestrator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,2BAA2B,EAAc,MAAM,uBAAuB,CAAC;AAQ7F,OAAO,EAAE,WAAW,EAAE,WAAW,EAAE,MAAM,iCAAiC,CAAC;AAE3E,OAAO,EAAE,EAAE,EAAE,MAAM,gCAAgC,CAAC;AAQpD,OAAO,EAAE,UAAU,EAAE,MAAM,qBAAqB,CAAC;AACjD,OAAO,KAAK,EACV,WAAW,EACX,wBAAwB,EAGxB,wBAAwB,EACxB,mBAAmB,EACpB,MAAM,iCAAiC,CAAC;AACzC,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,sBAAsB,CAAC;AAClD,OAAO,EAML,sBAAsB,EAKtB,sBAAsB,EACvB,MAAM,sBAAsB,CAAC;AAG9B,OAAO,KAAK,EAAE,WAAW,EAAE,WAAW,EAAE,EAAE,EAAE,MAAM,kBAAkB,CAAC;AACrE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAEL,KAAK,eAAe,EACpB,KAAK,MAAM,EAIZ,MAAM,yBAAyB,CAAC;AAejC,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAQlE;;;;;;;;;GASG;AAEH;;GAEG;AACH,qBAAa,mBAAoB,YAAW,WAAW;IAUnD,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAX3B,OAAO,CAAC,YAAY,CAA4C;IAChE,OAAO,CAAC,kBAAkB,CAAyB;IAEnD,OAAO,CAAC,cAAc,CAAiD;IACvE,OAAO,CAAC,OAAO,CAA6B;IAE5C,OAAO,CAAC,GAAG,CAA0D;IAErE,YACU,UAAU,EAAE,wBAAwB,GAAG,wBAAwB,EAC/D,MAAM,EAAE,mBAAmB,EAClB,QAAQ,EAAE,UAAU,EACrC,eAAe,GAAE,eAAsC,EAGxD;IAED,IAAI,MAAM,IAAI,MAAM,CAEnB;IAEM,WAAW,IAAI,UAAU,CAE/B;IAEM,iBAAiB,WAEvB;IAEM,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,CAG3B;IAEM,aAAa,CAClB,WAAW,EAAE,WAAW,EACxB,mBAAmB,EAAE,MAAM,EAC3B,2BAA2B,EAAE,2BAA2B,QAoBzD;IAED;;;;;;;OAOG;IACU,kBAAkB,CAC7B,eAAe,EAAE,MAAM,EACvB,SAAS,EAAE,sBAAsB,EACjC,cAAc,EAAE,EAAE,EAAE,EACpB,cAAc,EAAE,MAAM,EACtB,qCAAqC,EAAE,WAAW,iBAwCnD;IAED;;;;;;OAMG;IAIU,aAAa,CAAC,WAAW,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,iBA2D1F;IAED;;;OAGG;IAIU,MAAM,CAAC,GAAG,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAwFrD;IAED;;;OAGG;IAEI,0BAA0B,CAAC,GAAG,EAAE,EAAE,EAAE,iBAsB1C;IAED;;;OAGG;IAIU,iBAAiB,CAAC,WAAW,EAAE,WAAW,EAAE,cAAc,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC,CA4C3G;IAGD,UAAgB,kCAAkC,CAAC,YAAY,EAAE,iBAAiB,iBAgDjF;IAED;;OAEG;IACI,MAAM,SAMZ;IAED;;OAEG;IACU,aAAa,IAAI,OAAO,CAAC;QACpC,YAAY,EAAE,sBAAsB,CAAC;QACrC,KAAK,EAAE,KAAK,CAAC;QACb,iBAAiB,EAAE,WAAW,CAAC;KAChC,CAAC,CAoBD;YAEa,aAAa;IAe3B;;;;;OAKG;IACH,OAAO,CAAC,eAAe;YAwDT,uBAAuB;YAmCvB,uBAAuB;IAkCrC,OAAO,CAAC,iBAAiB;IAqDzB,OAAO,CAAC,yBAAyB;IA+BjC,OAAO,CAAC,sBAAsB;IAiC9B,OAAO,CAAC,kBAAkB;IA+B1B,OAAO,CAAC,sBAAsB;IA4D9B,OAAO,CAAC,wBAAwB;IAkChC,OAAO,CAAC,gCAAgC;IAUxC,OAAO,CAAC,wBAAwB;IAgChC,OAAO,CAAC,uBAAuB;IA6B/B,OAAO,CAAC,2BAA2B;IA4DnC,OAAO,CAAC,4BAA4B;IA+BpC,OAAO,CAAC,mBAAmB;IAkC3B,OAAO,CAAC,iBAAiB;IA4BzB,OAAO,CAAC,8BAA8B;IAatC,OAAO,CAAC,8BAA8B;IAStC,OAAO,CAAC,mCAAmC;IAa3C,OAAO,CAAC,mCAAmC;IAQ3C,OAAO,CAAC,wCAAwC;IAahD,OAAO,CAAC,yBAAyB;IASjC;;;;;OAKG;IACH,OAAO,CAAC,SAAS;IA2BjB,OAAO,CAAC,yBAAyB;CAWlC"}
@@ -465,6 +465,9 @@ _dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
465
465
  getProverId() {
466
466
  return this.proverId;
467
467
  }
468
+ getNumActiveForks() {
469
+ return this.dbs.size;
470
+ }
468
471
  stop() {
469
472
  this.cancel();
470
473
  return Promise.resolve();
@@ -482,7 +485,14 @@ _dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
482
485
  this.provingState = new EpochProvingState(epochNumber, totalNumCheckpoints, finalBlobBatchingChallenges, (provingState)=>this.checkAndEnqueueCheckpointRootRollup(provingState), resolve, reject);
483
486
  this.provingPromise = promise;
484
487
  }
485
- async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint) {
488
+ /**
489
+ * Starts a new checkpoint.
490
+ * @param checkpointIndex - The index of the checkpoint in the epoch.
491
+ * @param constants - The constants for this checkpoint.
492
+ * @param l1ToL2Messages - The set of L1 to L2 messages to be inserted at the beginning of this checkpoint.
493
+ * @param totalNumBlocks - The total number of blocks expected in the checkpoint (must be at least one).
494
+ * @param headerOfLastBlockInPreviousCheckpoint - The header of the last block in the previous checkpoint.
495
+ */ async startNewCheckpoint(checkpointIndex, constants, l1ToL2Messages, totalNumBlocks, headerOfLastBlockInPreviousCheckpoint) {
486
496
  if (!this.provingState) {
487
497
  throw new Error('Empty epoch proving state. Call startNewEpoch before starting a checkpoint.');
488
498
  }
@@ -545,7 +555,8 @@ _dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
545
555
  const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
546
556
  await endSpongeBlob.absorb(blockEndBlobFields);
547
557
  blockProvingState.setEndSpongeBlob(endSpongeBlob);
548
- // And also try to accumulate the blobs as far as we can:
558
+ // Try to accumulate the out hashes and blobs as far as we can:
559
+ await this.provingState.accumulateCheckpointOutHashes();
549
560
  await this.provingState.setBlobAccumulators();
550
561
  }
551
562
  }
@@ -612,7 +623,8 @@ _dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
612
623
  const blockEndBlobFields = provingState.getBlockEndBlobFields();
613
624
  await spongeBlobState.absorb(blockEndBlobFields);
614
625
  provingState.setEndSpongeBlob(spongeBlobState);
615
- // Txs have been added to the block. Now try to accumulate the blobs as far as we can:
626
+ // Txs have been added to the block. Now try to accumulate the out hashes and blobs as far as we can:
627
+ await this.provingState.accumulateCheckpointOutHashes();
616
628
  await this.provingState.setBlobAccumulators();
617
629
  }
618
630
  /**
@@ -708,8 +720,7 @@ _dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
708
720
  // is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
709
721
  // but have to make sure it only runs once all operations are completed, otherwise some function here
710
722
  // will attempt to access the fork after it was closed.
711
- logger.debug(`Cleaning up world state fork for ${blockNumber}`);
712
- void this.dbs.get(blockNumber)?.close().then(()=>this.dbs.delete(blockNumber)).catch((err)=>logger.error(`Error closing db for block ${blockNumber}`, err));
723
+ void this.cleanupDBFork(blockNumber);
713
724
  }
714
725
  /**
715
726
  * Cancel any further proving
@@ -737,6 +748,19 @@ _dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
737
748
  });
738
749
  return epochProofResult;
739
750
  }
751
+ async cleanupDBFork(blockNumber) {
752
+ logger.debug(`Cleaning up world state fork for ${blockNumber}`);
753
+ const fork = this.dbs.get(blockNumber);
754
+ if (!fork) {
755
+ return;
756
+ }
757
+ try {
758
+ await fork.close();
759
+ this.dbs.delete(blockNumber);
760
+ } catch (err) {
761
+ logger.error(`Error closing db for block ${blockNumber}`, err);
762
+ }
763
+ }
740
764
  /**
741
765
  * Enqueue a job to be scheduled
742
766
  * @param provingState - The proving state object being operated on
@@ -945,16 +969,18 @@ _dec = trackSpan('ProvingOrchestrator.startNewBlock', (blockNumber)=>({
945
969
  return this.prover.getBlockRootRollupProof(inputs, signal, provingState.epochNumber);
946
970
  }
947
971
  }), async (result)=>{
948
- // If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
949
- await this.verifyBuiltBlockAgainstSyncedState(provingState);
950
972
  logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
951
973
  const leafLocation = provingState.setBlockRootRollupProof(result);
952
974
  const checkpointProvingState = provingState.parentCheckpoint;
975
+ // If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
976
+ await this.verifyBuiltBlockAgainstSyncedState(provingState);
953
977
  if (checkpointProvingState.totalNumBlocks === 1) {
954
978
  this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
955
979
  } else {
956
980
  this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
957
981
  }
982
+ // We are finished with the block at this point, ensure the fork is cleaned up
983
+ void this.cleanupDBFork(provingState.blockNumber);
958
984
  });
959
985
  }
960
986
  // Executes the base parity circuit and stores the intermediate state for the root parity circuit
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aztec/prover-client",
3
- "version": "4.0.0-nightly.20260112",
3
+ "version": "4.0.0-nightly.20260114",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": "./dest/index.js",
@@ -69,19 +69,19 @@
69
69
  ]
70
70
  },
71
71
  "dependencies": {
72
- "@aztec/bb-prover": "4.0.0-nightly.20260112",
73
- "@aztec/blob-lib": "4.0.0-nightly.20260112",
74
- "@aztec/constants": "4.0.0-nightly.20260112",
75
- "@aztec/ethereum": "4.0.0-nightly.20260112",
76
- "@aztec/foundation": "4.0.0-nightly.20260112",
77
- "@aztec/kv-store": "4.0.0-nightly.20260112",
78
- "@aztec/noir-protocol-circuits-types": "4.0.0-nightly.20260112",
79
- "@aztec/noir-types": "4.0.0-nightly.20260112",
80
- "@aztec/protocol-contracts": "4.0.0-nightly.20260112",
81
- "@aztec/simulator": "4.0.0-nightly.20260112",
82
- "@aztec/stdlib": "4.0.0-nightly.20260112",
83
- "@aztec/telemetry-client": "4.0.0-nightly.20260112",
84
- "@aztec/world-state": "4.0.0-nightly.20260112",
72
+ "@aztec/bb-prover": "4.0.0-nightly.20260114",
73
+ "@aztec/blob-lib": "4.0.0-nightly.20260114",
74
+ "@aztec/constants": "4.0.0-nightly.20260114",
75
+ "@aztec/ethereum": "4.0.0-nightly.20260114",
76
+ "@aztec/foundation": "4.0.0-nightly.20260114",
77
+ "@aztec/kv-store": "4.0.0-nightly.20260114",
78
+ "@aztec/noir-protocol-circuits-types": "4.0.0-nightly.20260114",
79
+ "@aztec/noir-types": "4.0.0-nightly.20260114",
80
+ "@aztec/protocol-contracts": "4.0.0-nightly.20260114",
81
+ "@aztec/simulator": "4.0.0-nightly.20260114",
82
+ "@aztec/stdlib": "4.0.0-nightly.20260114",
83
+ "@aztec/telemetry-client": "4.0.0-nightly.20260114",
84
+ "@aztec/world-state": "4.0.0-nightly.20260114",
85
85
  "@google-cloud/storage": "^7.15.0",
86
86
  "@iarna/toml": "^2.2.5",
87
87
  "commander": "^12.1.0",
@@ -91,7 +91,7 @@
91
91
  "zod": "^3.23.8"
92
92
  },
93
93
  "devDependencies": {
94
- "@aztec/noir-contracts.js": "4.0.0-nightly.20260112",
94
+ "@aztec/noir-contracts.js": "4.0.0-nightly.20260114",
95
95
  "@jest/globals": "^30.0.0",
96
96
  "@types/jest": "^30.0.0",
97
97
  "@types/node": "^22.15.17",
@@ -5,7 +5,11 @@ import { Fr } from '@aztec/foundation/curves/bn254';
5
5
  import { createLogger } from '@aztec/foundation/log';
6
6
  import { L2Block, L2BlockHeader } from '@aztec/stdlib/block';
7
7
  import type { IBlockFactory, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
8
- import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
8
+ import {
9
+ accumulateCheckpointOutHashes,
10
+ computeBlockOutHash,
11
+ computeInHashFromL1ToL2Messages,
12
+ } from '@aztec/stdlib/messaging';
9
13
  import { MerkleTreeId } from '@aztec/stdlib/trees';
10
14
  import type { GlobalVariables, ProcessedTx } from '@aztec/stdlib/tx';
11
15
  import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
@@ -38,6 +42,7 @@ export class LightweightBlockFactory implements IBlockFactory {
38
42
  private readonly logger = createLogger('lightweight-block-factory');
39
43
 
40
44
  constructor(
45
+ private previousCheckpointOutHashes: Fr[],
41
46
  private db: MerkleTreeWriteOperations,
42
47
  private telemetry: TelemetryClient = getTelemetryClient(),
43
48
  ) {}
@@ -87,6 +92,10 @@ export class LightweightBlockFactory implements IBlockFactory {
87
92
  await this.db.updateArchive(header);
88
93
  const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
89
94
 
95
+ const blockOutHash = computeBlockOutHash(txs.map(tx => tx.txEffect.l2ToL1Msgs));
96
+ // There's only one block per checkpoint, so the checkpoint out hash equals the block out hash.
97
+ const checkpointOutHash = blockOutHash;
98
+ const epochOutHash = accumulateCheckpointOutHashes([...this.previousCheckpointOutHashes, checkpointOutHash]);
90
99
  const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages!);
91
100
  const numBlobFields = blockBlobFields.length + 1;
92
101
  const blobFields = blockBlobFields.concat([encodeCheckpointEndMarker({ numBlobFields })]);
@@ -97,6 +106,7 @@ export class LightweightBlockFactory implements IBlockFactory {
97
106
  blockHeadersHash: blockHeaderHash,
98
107
  blobsHash,
99
108
  inHash,
109
+ epochOutHash,
100
110
  });
101
111
 
102
112
  const block = new L2Block(newArchive, l2BlockHeader, body);
@@ -124,7 +134,7 @@ export async function buildBlockWithCleanDB(
124
134
  db: MerkleTreeWriteOperations,
125
135
  telemetry: TelemetryClient = getTelemetryClient(),
126
136
  ) {
127
- const builder = new LightweightBlockFactory(db, telemetry);
137
+ const builder = new LightweightBlockFactory([], db, telemetry);
128
138
  await builder.startNewBlock(globalVariables, l1ToL2Messages);
129
139
 
130
140
  for (const tx of txs) {
@@ -7,7 +7,11 @@ import { createLogger } from '@aztec/foundation/log';
7
7
  import { L2BlockNew } from '@aztec/stdlib/block';
8
8
  import { Checkpoint } from '@aztec/stdlib/checkpoint';
9
9
  import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
10
- import { computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
10
+ import {
11
+ accumulateCheckpointOutHashes,
12
+ computeCheckpointOutHash,
13
+ computeInHashFromL1ToL2Messages,
14
+ } from '@aztec/stdlib/messaging';
11
15
  import { CheckpointHeader, computeBlockHeadersHash } from '@aztec/stdlib/rollup';
12
16
  import { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
13
17
  import {
@@ -41,6 +45,7 @@ export class LightweightCheckpointBuilder {
41
45
  public readonly checkpointNumber: CheckpointNumber,
42
46
  public readonly constants: CheckpointGlobalVariables,
43
47
  public readonly l1ToL2Messages: Fr[],
48
+ private readonly previousCheckpointOutHashes: Fr[],
44
49
  public readonly db: MerkleTreeWriteOperations,
45
50
  ) {
46
51
  this.spongeBlob = SpongeBlob.init();
@@ -51,6 +56,7 @@ export class LightweightCheckpointBuilder {
51
56
  checkpointNumber: CheckpointNumber,
52
57
  constants: CheckpointGlobalVariables,
53
58
  l1ToL2Messages: Fr[],
59
+ previousCheckpointOutHashes: Fr[],
54
60
  db: MerkleTreeWriteOperations,
55
61
  ): Promise<LightweightCheckpointBuilder> {
56
62
  // Insert l1-to-l2 messages into the tree.
@@ -59,7 +65,73 @@ export class LightweightCheckpointBuilder {
59
65
  padArrayEnd<Fr, number>(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
60
66
  );
61
67
 
62
- return new LightweightCheckpointBuilder(checkpointNumber, constants, l1ToL2Messages, db);
68
+ return new LightweightCheckpointBuilder(
69
+ checkpointNumber,
70
+ constants,
71
+ l1ToL2Messages,
72
+ previousCheckpointOutHashes,
73
+ db,
74
+ );
75
+ }
76
+
77
+ /**
78
+ * Resumes building a checkpoint from existing blocks. This is used for validator re-execution
79
+ * where blocks have already been built and their effects are already in the database.
80
+ * Unlike startNewCheckpoint, this does NOT append l1ToL2Messages to the tree since they
81
+ * were already added when the blocks were originally built.
82
+ */
83
+ static async resumeCheckpoint(
84
+ checkpointNumber: CheckpointNumber,
85
+ constants: CheckpointGlobalVariables,
86
+ l1ToL2Messages: Fr[],
87
+ previousCheckpointOutHashes: Fr[],
88
+ db: MerkleTreeWriteOperations,
89
+ existingBlocks: L2BlockNew[],
90
+ ): Promise<LightweightCheckpointBuilder> {
91
+ const builder = new LightweightCheckpointBuilder(
92
+ checkpointNumber,
93
+ constants,
94
+ l1ToL2Messages,
95
+ previousCheckpointOutHashes,
96
+ db,
97
+ );
98
+
99
+ builder.logger.debug('Resuming checkpoint from existing blocks', {
100
+ checkpointNumber,
101
+ numExistingBlocks: existingBlocks.length,
102
+ blockNumbers: existingBlocks.map(b => b.header.getBlockNumber()),
103
+ });
104
+
105
+ // Validate block order and consistency
106
+ for (let i = 1; i < existingBlocks.length; i++) {
107
+ const prev = existingBlocks[i - 1];
108
+ const curr = existingBlocks[i];
109
+ if (curr.number !== prev.number + 1) {
110
+ throw new Error(`Non-sequential block numbers in resumeCheckpoint: ${prev.number} -> ${curr.number}`);
111
+ }
112
+ if (!prev.archive.root.equals(curr.header.lastArchive.root)) {
113
+ throw new Error(`Archive root mismatch between blocks ${prev.number} and ${curr.number}`);
114
+ }
115
+ }
116
+
117
+ for (let i = 0; i < existingBlocks.length; i++) {
118
+ const block = existingBlocks[i];
119
+ const isFirstBlock = i === 0;
120
+
121
+ if (isFirstBlock) {
122
+ builder.lastArchives.push(block.header.lastArchive);
123
+ }
124
+
125
+ builder.lastArchives.push(block.archive);
126
+
127
+ const blockBlobFields = block.toBlobFields();
128
+ await builder.spongeBlob.absorb(blockBlobFields);
129
+ builder.blobFields.push(...blockBlobFields);
130
+
131
+ builder.blocks.push(block);
132
+ }
133
+
134
+ return builder;
63
135
  }
64
136
 
65
137
  /**
@@ -157,6 +229,10 @@ export class LightweightCheckpointBuilder {
157
229
  const inHash = computeInHashFromL1ToL2Messages(this.l1ToL2Messages);
158
230
 
159
231
  const { slotNumber, coinbase, feeRecipient, gasFees } = this.constants;
232
+ const checkpointOutHash = computeCheckpointOutHash(
233
+ blocks.map(block => block.body.txEffects.map(tx => tx.l2ToL1Msgs)),
234
+ );
235
+ const epochOutHash = accumulateCheckpointOutHashes([...this.previousCheckpointOutHashes, checkpointOutHash]);
160
236
 
161
237
  // TODO(palla/mbps): Should we source this from the constants instead?
162
238
  // timestamp of a checkpoint is the timestamp of the last block in the checkpoint.
@@ -168,6 +244,7 @@ export class LightweightCheckpointBuilder {
168
244
  lastArchiveRoot: this.lastArchives[0].root,
169
245
  blobsHash,
170
246
  inHash,
247
+ epochOutHash,
171
248
  blockHeadersHash,
172
249
  slotNumber,
173
250
  timestamp,
@@ -185,6 +262,7 @@ export class LightweightCheckpointBuilder {
185
262
  this.checkpointNumber,
186
263
  this.constants,
187
264
  [...this.l1ToL2Messages],
265
+ [...this.previousCheckpointOutHashes],
188
266
  this.db,
189
267
  );
190
268
  clone.lastArchives = [...this.lastArchives];
@@ -44,6 +44,7 @@ import { getEnvironmentConfig, getSimulator, makeCheckpointConstants, makeGlobal
44
44
  export class TestContext {
45
45
  private headers: Map<number, BlockHeader> = new Map();
46
46
  private checkpoints: Checkpoint[] = [];
47
+ private checkpointOutHashes: Fr[] = [];
47
48
  private nextCheckpointIndex = 0;
48
49
  private nextCheckpointNumber = CheckpointNumber(1);
49
50
  private nextBlockNumber = 1;
@@ -151,6 +152,7 @@ export class TestContext {
151
152
 
152
153
  public startNewEpoch() {
153
154
  this.checkpoints = [];
155
+ this.checkpointOutHashes = [];
154
156
  this.nextCheckpointIndex = 0;
155
157
  this.epochNumber++;
156
158
  }
@@ -245,10 +247,12 @@ export class TestContext {
245
247
  });
246
248
 
247
249
  const cleanFork = await this.worldState.fork();
250
+ const previousCheckpointOutHashes = this.checkpointOutHashes;
248
251
  const builder = await LightweightCheckpointBuilder.startNewCheckpoint(
249
252
  checkpointNumber,
250
253
  constants,
251
254
  l1ToL2Messages,
255
+ previousCheckpointOutHashes,
252
256
  cleanFork,
253
257
  );
254
258
 
@@ -274,6 +278,7 @@ export class TestContext {
274
278
 
275
279
  const checkpoint = await builder.completeCheckpoint();
276
280
  this.checkpoints.push(checkpoint);
281
+ this.checkpointOutHashes.push(checkpoint.getCheckpointOutHash());
277
282
 
278
283
  return {
279
284
  constants,
@@ -11,6 +11,7 @@ import {
11
11
  type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
12
12
  type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
13
13
  NUM_MSGS_PER_BASE_PARITY,
14
+ OUT_HASH_TREE_HEIGHT,
14
15
  } from '@aztec/constants';
15
16
  import { BlockNumber } from '@aztec/foundation/branded-types';
16
17
  import { padArrayEnd } from '@aztec/foundation/collection';
@@ -19,6 +20,7 @@ import { Fr } from '@aztec/foundation/curves/bn254';
19
20
  import type { Tuple } from '@aztec/foundation/serialize';
20
21
  import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
21
22
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
23
+ import { computeCheckpointOutHash } from '@aztec/stdlib/messaging';
22
24
  import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
23
25
  import {
24
26
  BlockMergeRollupPrivateInputs,
@@ -38,6 +40,11 @@ import { accumulateBlobs, buildBlobHints, toProofData } from './block-building-h
38
40
  import { BlockProvingState, type ProofState } from './block-proving-state.js';
39
41
  import type { EpochProvingState } from './epoch-proving-state.js';
40
42
 
43
+ type OutHashHint = {
44
+ treeSnapshot: AppendOnlyTreeSnapshot;
45
+ siblingPath: Tuple<Fr, typeof OUT_HASH_TREE_HEIGHT>;
46
+ };
47
+
41
48
  export class CheckpointProvingState {
42
49
  private blockProofs: UnbalancedTreeStore<
43
50
  ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
@@ -46,6 +53,11 @@ export class CheckpointProvingState {
46
53
  | ProofState<CheckpointRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
47
54
  | undefined;
48
55
  private blocks: (BlockProvingState | undefined)[] = [];
56
+ private previousOutHashHint: OutHashHint | undefined;
57
+ private outHash: Fr | undefined;
58
+ // The snapshot and sibling path after the checkpoint's out hash is inserted.
59
+ // Stored here to be retrieved for the next checkpoint when it's added.
60
+ private newOutHashHint: OutHashHint | undefined;
49
61
  private startBlobAccumulator: BatchedBlobAccumulator | undefined;
50
62
  private endBlobAccumulator: BatchedBlobAccumulator | undefined;
51
63
  private blobFields: Fr[] | undefined;
@@ -195,6 +207,35 @@ export class CheckpointProvingState {
195
207
  return new ParityBasePrivateInputs(messages, this.constants.vkTreeRoot);
196
208
  }
197
209
 
210
+ public setOutHashHint(hint: OutHashHint) {
211
+ this.previousOutHashHint = hint;
212
+ }
213
+
214
+ public getOutHashHint() {
215
+ return this.previousOutHashHint;
216
+ }
217
+
218
+ public accumulateBlockOutHashes() {
219
+ if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
220
+ return;
221
+ }
222
+
223
+ if (!this.outHash) {
224
+ const messagesPerBlock = this.blocks.map(b => b!.getTxEffects().map(tx => tx.l2ToL1Msgs));
225
+ this.outHash = computeCheckpointOutHash(messagesPerBlock);
226
+ }
227
+
228
+ return this.outHash;
229
+ }
230
+
231
+ public setOutHashHintForNextCheckpoint(hint: OutHashHint) {
232
+ this.newOutHashHint = hint;
233
+ }
234
+
235
+ public getOutHashHintForNextCheckpoint() {
236
+ return this.newOutHashHint;
237
+ }
238
+
198
239
  public async accumulateBlobs(startBlobAccumulator: BatchedBlobAccumulator) {
199
240
  if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
200
241
  return;
@@ -236,6 +277,9 @@ export class CheckpointProvingState {
236
277
  if (proofs.length !== nonEmptyProofs.length) {
237
278
  throw new Error('At least one child is not ready for the checkpoint root rollup.');
238
279
  }
280
+ if (!this.previousOutHashHint) {
281
+ throw new Error('Out hash hint is not set.');
282
+ }
239
283
  if (!this.startBlobAccumulator) {
240
284
  throw new Error('Start blob accumulator is not set.');
241
285
  }
@@ -248,6 +292,8 @@ export class CheckpointProvingState {
248
292
  const hints = CheckpointRootRollupHints.from({
249
293
  previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint,
250
294
  previousArchiveSiblingPath: this.lastArchiveSiblingPath,
295
+ previousOutHash: this.previousOutHashHint.treeSnapshot,
296
+ newOutHashSiblingPath: this.previousOutHashHint.siblingPath,
251
297
  startBlobAccumulator: this.startBlobAccumulator.toBlobAccumulator(),
252
298
  finalBlobChallenges: this.finalBlobBatchingChallenges,
253
299
  blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_CHECKPOINT),
@@ -273,7 +319,7 @@ export class CheckpointProvingState {
273
319
 
274
320
  public isReadyForCheckpointRoot() {
275
321
  const allChildProofsReady = this.#getChildProofsForRoot().every(p => !!p);
276
- return allChildProofsReady && !!this.startBlobAccumulator;
322
+ return allChildProofsReady && !!this.previousOutHashHint && !!this.startBlobAccumulator;
277
323
  }
278
324
 
279
325
  public verifyState() {
@@ -1,14 +1,20 @@
1
1
  import { BatchedBlob, BatchedBlobAccumulator, type FinalBlobBatchingChallenges } from '@aztec/blob-lib';
2
- import type {
3
- ARCHIVE_HEIGHT,
4
- L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
5
- NESTED_RECURSIVE_PROOF_LENGTH,
6
- NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
2
+ import {
3
+ type ARCHIVE_HEIGHT,
4
+ type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
5
+ type NESTED_RECURSIVE_PROOF_LENGTH,
6
+ type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
7
+ OUT_HASH_TREE_HEIGHT,
7
8
  } from '@aztec/constants';
8
9
  import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
9
- import type { Fr } from '@aztec/foundation/curves/bn254';
10
+ import { Fr } from '@aztec/foundation/curves/bn254';
10
11
  import type { Tuple } from '@aztec/foundation/serialize';
11
- import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
12
+ import {
13
+ MerkleTreeCalculator,
14
+ type TreeNodeLocation,
15
+ UnbalancedTreeStore,
16
+ shaMerkleHash,
17
+ } from '@aztec/foundation/trees';
12
18
  import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
13
19
  import type { Proof } from '@aztec/stdlib/proofs';
14
20
  import {
@@ -20,7 +26,7 @@ import {
20
26
  RootRollupPrivateInputs,
21
27
  type RootRollupPublicInputs,
22
28
  } from '@aztec/stdlib/rollup';
23
- import type { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
29
+ import { AppendOnlyTreeSnapshot, type MerkleTreeId } from '@aztec/stdlib/trees';
24
30
  import type { BlockHeader } from '@aztec/stdlib/tx';
25
31
 
26
32
  import { toProofData } from './block-building-helpers.js';
@@ -212,6 +218,48 @@ export class EpochProvingState {
212
218
  this.checkpointPaddingProof = { provingOutput };
213
219
  }
214
220
 
221
+ public async accumulateCheckpointOutHashes() {
222
+ const treeCalculator = await MerkleTreeCalculator.create(OUT_HASH_TREE_HEIGHT, undefined, (left, right) =>
223
+ Promise.resolve(shaMerkleHash(left, right)),
224
+ );
225
+
226
+ const computeOutHashHint = async (leaves: Fr[]) => {
227
+ const tree = await treeCalculator.computeTree(leaves.map(l => l.toBuffer()));
228
+ const nextAvailableLeafIndex = leaves.length;
229
+ return {
230
+ treeSnapshot: new AppendOnlyTreeSnapshot(Fr.fromBuffer(tree.root), nextAvailableLeafIndex),
231
+ siblingPath: tree.getSiblingPath(nextAvailableLeafIndex).map(Fr.fromBuffer) as Tuple<
232
+ Fr,
233
+ typeof OUT_HASH_TREE_HEIGHT
234
+ >,
235
+ };
236
+ };
237
+
238
+ let hint = this.checkpoints[0]?.getOutHashHint();
239
+ const outHashes = [];
240
+ for (let i = 0; i < this.totalNumCheckpoints; i++) {
241
+ const checkpoint = this.checkpoints[i];
242
+ if (!checkpoint) {
243
+ break;
244
+ }
245
+
246
+ // If hints are not set yet, it must be the first checkpoint. Compute the hints with an empty tree.
247
+ hint ??= await computeOutHashHint([]);
248
+ checkpoint.setOutHashHint(hint);
249
+
250
+ // Get the out hash for this checkpoint.
251
+ const outHash = checkpoint.accumulateBlockOutHashes();
252
+ if (!outHash) {
253
+ break;
254
+ }
255
+ outHashes.push(outHash);
256
+
257
+ // Get or create hints for the next checkpoint.
258
+ hint = checkpoint.getOutHashHintForNextCheckpoint() ?? (await computeOutHashHint(outHashes));
259
+ checkpoint.setOutHashHintForNextCheckpoint(hint);
260
+ }
261
+ }
262
+
215
263
  public async setBlobAccumulators() {
216
264
  let previousAccumulator = this.startBlobAccumulator;
217
265
  // Accumulate blobs as far as we can for this epoch.