@aztec/prover-client 0.0.1-commit.b655e406 → 0.0.1-commit.d1f2d6c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/config.d.ts +2 -2
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +1 -1
- package/dest/index.d.ts +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +44 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +194 -0
- package/dest/mocks/fixtures.d.ts +1 -4
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +7 -17
- package/dest/mocks/test_context.d.ts +29 -46
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +116 -116
- package/dest/orchestrator/block-building-helpers.d.ts +17 -19
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +87 -112
- package/dest/orchestrator/block-proving-state.d.ts +17 -11
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +81 -20
- package/dest/orchestrator/checkpoint-proving-state.d.ts +22 -9
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +49 -17
- package/dest/orchestrator/epoch-proving-state.d.ts +12 -10
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +38 -4
- package/dest/orchestrator/index.d.ts +1 -1
- package/dest/orchestrator/orchestrator.d.ts +24 -11
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +496 -102
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +2 -15
- package/dest/orchestrator/tx-proving-state.d.ts +6 -5
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +7 -16
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/index.d.ts +1 -1
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +2 -2
- package/dest/prover-client/server-epoch-prover.d.ts +8 -7
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/prover-client/server-epoch-prover.js +2 -2
- package/dest/proving_broker/broker_prover_facade.d.ts +22 -21
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +6 -12
- package/dest/proving_broker/config.d.ts +16 -8
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +14 -2
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/fixtures.d.ts +3 -2
- package/dest/proving_broker/fixtures.d.ts.map +1 -1
- package/dest/proving_broker/fixtures.js +3 -2
- package/dest/proving_broker/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/factory.d.ts +2 -2
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/index.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +1 -1
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -1
- package/dest/proving_broker/proof_store/proof_store.d.ts +1 -1
- package/dest/proving_broker/proving_agent.d.ts +3 -8
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +1 -16
- package/dest/proving_broker/proving_broker.d.ts +2 -2
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +6 -11
- package/dest/proving_broker/proving_broker_database/memory.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +5 -3
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +392 -3
- package/dest/proving_broker/proving_broker_database.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.js +11 -35
- package/dest/proving_broker/proving_job_controller.d.ts +3 -2
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +2 -3
- package/dest/proving_broker/rpc.d.ts +4 -4
- package/dest/test/mock_proof_store.d.ts +1 -1
- package/dest/test/mock_proof_store.d.ts.map +1 -1
- package/dest/test/mock_prover.d.ts +3 -4
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +4 -4
- package/package.json +20 -18
- package/src/config.ts +1 -1
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +274 -0
- package/src/mocks/fixtures.ts +7 -30
- package/src/mocks/test_context.ts +158 -177
- package/src/orchestrator/block-building-helpers.ts +126 -206
- package/src/orchestrator/block-proving-state.ts +103 -25
- package/src/orchestrator/checkpoint-proving-state.ts +71 -21
- package/src/orchestrator/epoch-proving-state.ts +64 -14
- package/src/orchestrator/orchestrator.ts +97 -89
- package/src/orchestrator/orchestrator_metrics.ts +2 -25
- package/src/orchestrator/tx-proving-state.ts +10 -27
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +11 -12
- package/src/prover-client/server-epoch-prover.ts +6 -7
- package/src/proving_broker/broker_prover_facade.ts +25 -34
- package/src/proving_broker/config.ts +17 -1
- package/src/proving_broker/fixtures.ts +8 -3
- package/src/proving_broker/proving_agent.ts +1 -17
- package/src/proving_broker/proving_broker.ts +6 -9
- package/src/proving_broker/proving_broker_database/memory.ts +2 -1
- package/src/proving_broker/proving_broker_database/persisted.ts +20 -5
- package/src/proving_broker/proving_broker_database.ts +2 -1
- package/src/proving_broker/proving_broker_instrumentation.ts +10 -35
- package/src/proving_broker/proving_job_controller.ts +4 -4
- package/src/test/mock_prover.ts +2 -14
- package/dest/block-factory/index.d.ts +0 -2
- package/dest/block-factory/index.d.ts.map +0 -1
- package/dest/block-factory/index.js +0 -1
- package/dest/block-factory/light.d.ts +0 -38
- package/dest/block-factory/light.d.ts.map +0 -1
- package/dest/block-factory/light.js +0 -94
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
- package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
- package/src/block-factory/index.ts +0 -1
- package/src/block-factory/light.ts +0 -140
- package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib';
|
|
1
|
+
import { BatchedBlob, FinalBlobBatchingChallenges, SpongeBlob } from '@aztec/blob-lib/types';
|
|
2
2
|
import {
|
|
3
3
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
4
4
|
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
@@ -6,22 +6,23 @@ import {
|
|
|
6
6
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
7
7
|
NUM_BASE_PARITY_PER_ROOT_PARITY,
|
|
8
8
|
} from '@aztec/constants';
|
|
9
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
9
10
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
11
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
10
12
|
import { AbortError } from '@aztec/foundation/error';
|
|
11
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
12
13
|
import { createLogger } from '@aztec/foundation/log';
|
|
13
14
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
14
15
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
15
16
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
16
17
|
import { elapsed } from '@aztec/foundation/timer';
|
|
17
18
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
18
|
-
import {
|
|
19
|
-
import { EthAddress, createBlockEndMarker } from '@aztec/stdlib/block';
|
|
19
|
+
import { EthAddress } from '@aztec/stdlib/block';
|
|
20
20
|
import type {
|
|
21
21
|
EpochProver,
|
|
22
22
|
ForkMerkleTreeOperations,
|
|
23
23
|
MerkleTreeWriteOperations,
|
|
24
24
|
PublicInputsAndRecursiveProof,
|
|
25
|
+
ReadonlyWorldStateAccess,
|
|
25
26
|
ServerCircuitProver,
|
|
26
27
|
} from '@aztec/stdlib/interfaces/server';
|
|
27
28
|
import type { Proof } from '@aztec/stdlib/proofs';
|
|
@@ -54,7 +55,6 @@ import {
|
|
|
54
55
|
import { inspect } from 'util';
|
|
55
56
|
|
|
56
57
|
import {
|
|
57
|
-
buildBlockHeaderFromTxs,
|
|
58
58
|
buildHeaderFromCircuitOutputs,
|
|
59
59
|
getLastSiblingPath,
|
|
60
60
|
getPublicChonkVerifierPrivateInputsFromTx,
|
|
@@ -73,6 +73,11 @@ import { TxProvingState } from './tx-proving-state.js';
|
|
|
73
73
|
|
|
74
74
|
const logger = createLogger('prover-client:orchestrator');
|
|
75
75
|
|
|
76
|
+
type WorldStateFork = {
|
|
77
|
+
fork: MerkleTreeWriteOperations;
|
|
78
|
+
cleanupPromise: Promise<void> | undefined;
|
|
79
|
+
};
|
|
80
|
+
|
|
76
81
|
/**
|
|
77
82
|
* Implements an event driven proving scheduler to build the recursive proof tree. The idea being:
|
|
78
83
|
* 1. Transactions are provided to the scheduler post simulation.
|
|
@@ -93,12 +98,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
93
98
|
|
|
94
99
|
private provingPromise: Promise<ProvingResult> | undefined = undefined;
|
|
95
100
|
private metrics: ProvingOrchestratorMetrics;
|
|
96
|
-
|
|
101
|
+
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
102
|
+
private dbs: Map<BlockNumber, WorldStateFork> = new Map();
|
|
97
103
|
|
|
98
104
|
constructor(
|
|
99
|
-
private dbProvider: ForkMerkleTreeOperations,
|
|
105
|
+
private dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations,
|
|
100
106
|
private prover: ServerCircuitProver,
|
|
101
107
|
private readonly proverId: EthAddress,
|
|
108
|
+
private readonly cancelJobsOnStop: boolean = false,
|
|
102
109
|
telemetryClient: TelemetryClient = getTelemetryClient(),
|
|
103
110
|
) {
|
|
104
111
|
this.metrics = new ProvingOrchestratorMetrics(telemetryClient, 'ProvingOrchestrator');
|
|
@@ -112,13 +119,17 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
112
119
|
return this.proverId;
|
|
113
120
|
}
|
|
114
121
|
|
|
122
|
+
public getNumActiveForks() {
|
|
123
|
+
return this.dbs.size;
|
|
124
|
+
}
|
|
125
|
+
|
|
115
126
|
public stop(): Promise<void> {
|
|
116
127
|
this.cancel();
|
|
117
128
|
return Promise.resolve();
|
|
118
129
|
}
|
|
119
130
|
|
|
120
131
|
public startNewEpoch(
|
|
121
|
-
epochNumber:
|
|
132
|
+
epochNumber: EpochNumber,
|
|
122
133
|
totalNumCheckpoints: number,
|
|
123
134
|
finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
124
135
|
) {
|
|
@@ -142,12 +153,19 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
142
153
|
this.provingPromise = promise;
|
|
143
154
|
}
|
|
144
155
|
|
|
156
|
+
/**
|
|
157
|
+
* Starts a new checkpoint.
|
|
158
|
+
* @param checkpointIndex - The index of the checkpoint in the epoch.
|
|
159
|
+
* @param constants - The constants for this checkpoint.
|
|
160
|
+
* @param l1ToL2Messages - The set of L1 to L2 messages to be inserted at the beginning of this checkpoint.
|
|
161
|
+
* @param totalNumBlocks - The total number of blocks expected in the checkpoint (must be at least one).
|
|
162
|
+
* @param headerOfLastBlockInPreviousCheckpoint - The header of the last block in the previous checkpoint.
|
|
163
|
+
*/
|
|
145
164
|
public async startNewCheckpoint(
|
|
146
165
|
checkpointIndex: number,
|
|
147
166
|
constants: CheckpointConstantData,
|
|
148
167
|
l1ToL2Messages: Fr[],
|
|
149
168
|
totalNumBlocks: number,
|
|
150
|
-
totalNumBlobFields: number,
|
|
151
169
|
headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
152
170
|
) {
|
|
153
171
|
if (!this.provingState) {
|
|
@@ -162,8 +180,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
162
180
|
const lastBlockNumber = headerOfLastBlockInPreviousCheckpoint.globalVariables.blockNumber;
|
|
163
181
|
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
164
182
|
|
|
165
|
-
const firstBlockNumber = lastBlockNumber + 1;
|
|
166
|
-
this.dbs.set(firstBlockNumber, db);
|
|
183
|
+
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
184
|
+
this.dbs.set(firstBlockNumber, { fork: db, cleanupPromise: undefined });
|
|
167
185
|
|
|
168
186
|
// Get archive sibling path before any block in this checkpoint lands.
|
|
169
187
|
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
@@ -180,7 +198,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
180
198
|
checkpointIndex,
|
|
181
199
|
constants,
|
|
182
200
|
totalNumBlocks,
|
|
183
|
-
totalNumBlobFields,
|
|
184
201
|
headerOfLastBlockInPreviousCheckpoint,
|
|
185
202
|
lastArchiveSiblingPath,
|
|
186
203
|
l1ToL2Messages,
|
|
@@ -201,7 +218,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
201
218
|
@trackSpan('ProvingOrchestrator.startNewBlock', blockNumber => ({
|
|
202
219
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
203
220
|
}))
|
|
204
|
-
public async startNewBlock(blockNumber:
|
|
221
|
+
public async startNewBlock(blockNumber: BlockNumber, timestamp: UInt64, totalNumTxs: number) {
|
|
205
222
|
if (!this.provingState) {
|
|
206
223
|
throw new Error('Empty epoch proving state. Call startNewEpoch before starting a block.');
|
|
207
224
|
}
|
|
@@ -216,21 +233,21 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
216
233
|
}
|
|
217
234
|
|
|
218
235
|
const constants = checkpointProvingState.constants;
|
|
219
|
-
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber
|
|
236
|
+
logger.info(`Starting block ${blockNumber} for slot ${constants.slotNumber}.`);
|
|
220
237
|
|
|
221
238
|
// Fork the db only when it's not already set. The db for the first block is set in `startNewCheckpoint`.
|
|
222
239
|
if (!this.dbs.has(blockNumber)) {
|
|
223
240
|
// Fork world state at the end of the immediately previous block
|
|
224
|
-
const db = await this.dbProvider.fork(blockNumber - 1);
|
|
225
|
-
this.dbs.set(blockNumber, db);
|
|
241
|
+
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
242
|
+
this.dbs.set(blockNumber, { fork: db, cleanupPromise: undefined });
|
|
226
243
|
}
|
|
227
|
-
const db = this.dbs.get(blockNumber)
|
|
244
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
228
245
|
|
|
229
246
|
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
230
247
|
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
231
248
|
const lastArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
232
249
|
|
|
233
|
-
const blockProvingState =
|
|
250
|
+
const blockProvingState = checkpointProvingState.startNewBlock(
|
|
234
251
|
blockNumber,
|
|
235
252
|
timestamp,
|
|
236
253
|
totalNumTxs,
|
|
@@ -248,11 +265,16 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
248
265
|
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
249
266
|
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
250
267
|
if (totalNumTxs === 0) {
|
|
268
|
+
const endState = await db.getStateReference();
|
|
269
|
+
blockProvingState.setEndState(endState);
|
|
270
|
+
|
|
251
271
|
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
252
|
-
|
|
272
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
273
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
253
274
|
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
254
275
|
|
|
255
|
-
//
|
|
276
|
+
// Try to accumulate the out hashes and blobs as far as we can:
|
|
277
|
+
await this.provingState.accumulateCheckpointOutHashes();
|
|
256
278
|
await this.provingState.setBlobAccumulators();
|
|
257
279
|
}
|
|
258
280
|
}
|
|
@@ -276,7 +298,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
276
298
|
return;
|
|
277
299
|
}
|
|
278
300
|
|
|
279
|
-
const blockNumber = txs[0].globalVariables.blockNumber;
|
|
301
|
+
const blockNumber = BlockNumber(txs[0].globalVariables.blockNumber);
|
|
280
302
|
const provingState = this.provingState.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
281
303
|
if (!provingState) {
|
|
282
304
|
throw new Error(`Proving state for block ${blockNumber} not found. Call startNewBlock first.`);
|
|
@@ -294,7 +316,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
294
316
|
|
|
295
317
|
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
296
318
|
|
|
297
|
-
const db = this.dbs.get(blockNumber)
|
|
319
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
298
320
|
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
299
321
|
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
300
322
|
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
@@ -307,7 +329,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
307
329
|
|
|
308
330
|
validateTx(tx);
|
|
309
331
|
|
|
310
|
-
logger.
|
|
332
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
311
333
|
|
|
312
334
|
const startSpongeBlob = spongeBlobState.clone();
|
|
313
335
|
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
@@ -341,11 +363,16 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
341
363
|
}
|
|
342
364
|
}
|
|
343
365
|
|
|
344
|
-
await
|
|
366
|
+
const endState = await db.getStateReference();
|
|
367
|
+
provingState.setEndState(endState);
|
|
368
|
+
|
|
369
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
370
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
345
371
|
|
|
346
372
|
provingState.setEndSpongeBlob(spongeBlobState);
|
|
347
373
|
|
|
348
|
-
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
374
|
+
// Txs have been added to the block. Now try to accumulate the out hashes and blobs as far as we can:
|
|
375
|
+
await this.provingState.accumulateCheckpointOutHashes();
|
|
349
376
|
await this.provingState.setBlobAccumulators();
|
|
350
377
|
}
|
|
351
378
|
|
|
@@ -382,10 +409,10 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
382
409
|
* Marks the block as completed.
|
|
383
410
|
* Computes the block header and updates the archive tree.
|
|
384
411
|
*/
|
|
385
|
-
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber:
|
|
412
|
+
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: BlockNumber) => ({
|
|
386
413
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
387
414
|
}))
|
|
388
|
-
public async setBlockCompleted(blockNumber:
|
|
415
|
+
public async setBlockCompleted(blockNumber: BlockNumber, expectedHeader?: BlockHeader): Promise<BlockHeader> {
|
|
389
416
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
390
417
|
if (!provingState) {
|
|
391
418
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
@@ -408,39 +435,25 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
408
435
|
);
|
|
409
436
|
}
|
|
410
437
|
|
|
411
|
-
//
|
|
438
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
412
439
|
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
413
|
-
const header = await
|
|
414
|
-
|
|
415
|
-
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
416
|
-
|
|
417
|
-
return header;
|
|
418
|
-
}
|
|
419
|
-
|
|
420
|
-
private async buildL2BlockHeader(provingState: BlockProvingState, expectedHeader?: BlockHeader) {
|
|
421
|
-
// Collect all txs in this block to build the header. The function calling this has made sure that all txs have been added.
|
|
422
|
-
const txs = provingState.getProcessedTxs();
|
|
423
|
-
|
|
424
|
-
const startSpongeBlob = provingState.getStartSpongeBlob();
|
|
425
|
-
|
|
426
|
-
// Get db for this block
|
|
427
|
-
const db = this.dbs.get(provingState.blockNumber)!;
|
|
428
|
-
|
|
429
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
430
|
-
// and update the archive tree, so we're ready to start processing the next block
|
|
431
|
-
const header = await buildBlockHeaderFromTxs(txs, provingState.getGlobalVariables(), startSpongeBlob, db);
|
|
440
|
+
const header = await provingState.buildBlockHeader();
|
|
432
441
|
|
|
433
442
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
434
443
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
435
444
|
throw new Error('Block header mismatch');
|
|
436
445
|
}
|
|
437
446
|
|
|
447
|
+
// Get db for this block
|
|
448
|
+
const db = this.dbs.get(provingState.blockNumber)!.fork;
|
|
449
|
+
|
|
450
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
438
451
|
logger.verbose(
|
|
439
452
|
`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`,
|
|
440
453
|
);
|
|
441
454
|
await db.updateArchive(header);
|
|
442
455
|
|
|
443
|
-
|
|
456
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
444
457
|
|
|
445
458
|
return header;
|
|
446
459
|
}
|
|
@@ -468,7 +481,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
468
481
|
|
|
469
482
|
// Get db for this block
|
|
470
483
|
const blockNumber = provingState.blockNumber;
|
|
471
|
-
const db = this.dbs.get(blockNumber)
|
|
484
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
472
485
|
|
|
473
486
|
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
474
487
|
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
@@ -493,20 +506,19 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
493
506
|
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
494
507
|
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
495
508
|
// will attempt to access the fork after it was closed.
|
|
496
|
-
|
|
497
|
-
void this.dbs
|
|
498
|
-
.get(blockNumber)
|
|
499
|
-
?.close()
|
|
500
|
-
.then(() => this.dbs.delete(blockNumber))
|
|
501
|
-
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
509
|
+
void this.cleanupDBFork(blockNumber);
|
|
502
510
|
}
|
|
503
511
|
|
|
504
512
|
/**
|
|
505
|
-
* Cancel any further proving
|
|
513
|
+
* Cancel any further proving.
|
|
514
|
+
* If cancelJobsOnStop is true, aborts all pending jobs with the broker (which marks them as 'Aborted').
|
|
515
|
+
* If cancelJobsOnStop is false (default), jobs remain in the broker queue and can be reused on restart/reorg.
|
|
506
516
|
*/
|
|
507
517
|
public cancel() {
|
|
508
|
-
|
|
509
|
-
controller.
|
|
518
|
+
if (this.cancelJobsOnStop) {
|
|
519
|
+
for (const controller of this.pendingProvingJobs) {
|
|
520
|
+
controller.abort();
|
|
521
|
+
}
|
|
510
522
|
}
|
|
511
523
|
|
|
512
524
|
this.provingState?.cancel();
|
|
@@ -541,6 +553,24 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
541
553
|
return epochProofResult;
|
|
542
554
|
}
|
|
543
555
|
|
|
556
|
+
private async cleanupDBFork(blockNumber: BlockNumber): Promise<void> {
|
|
557
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
558
|
+
const fork = this.dbs.get(blockNumber);
|
|
559
|
+
if (!fork) {
|
|
560
|
+
return;
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
try {
|
|
564
|
+
if (!fork.cleanupPromise) {
|
|
565
|
+
fork.cleanupPromise = fork.fork.close();
|
|
566
|
+
}
|
|
567
|
+
await fork.cleanupPromise;
|
|
568
|
+
this.dbs.delete(blockNumber);
|
|
569
|
+
} catch (err) {
|
|
570
|
+
logger.error(`Error closing db for block ${blockNumber}`, err);
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
|
|
544
574
|
/**
|
|
545
575
|
* Enqueue a job to be scheduled
|
|
546
576
|
* @param provingState - The proving state object being operated on
|
|
@@ -858,19 +888,22 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
858
888
|
},
|
|
859
889
|
),
|
|
860
890
|
async result => {
|
|
861
|
-
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
862
|
-
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
863
|
-
|
|
864
891
|
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
865
892
|
|
|
866
893
|
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
867
894
|
const checkpointProvingState = provingState.parentCheckpoint;
|
|
868
895
|
|
|
896
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
897
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
898
|
+
|
|
869
899
|
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
870
900
|
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
871
901
|
} else {
|
|
872
902
|
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
873
903
|
}
|
|
904
|
+
|
|
905
|
+
// We are finished with the block at this point, ensure the fork is cleaned up
|
|
906
|
+
void this.cleanupDBFork(provingState.blockNumber);
|
|
874
907
|
},
|
|
875
908
|
);
|
|
876
909
|
}
|
|
@@ -1214,8 +1247,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1214
1247
|
|
|
1215
1248
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
1216
1249
|
|
|
1217
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
1218
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
1219
1250
|
const doAvmProving = wrapCallbackInSpan(
|
|
1220
1251
|
this.tracer,
|
|
1221
1252
|
'ProvingOrchestrator.prover.getAvmProof',
|
|
@@ -1224,36 +1255,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1224
1255
|
},
|
|
1225
1256
|
async (signal: AbortSignal) => {
|
|
1226
1257
|
const inputs = txProvingState.getAvmInputs();
|
|
1227
|
-
|
|
1228
|
-
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
1229
|
-
// just keep the next line but removing the second argument (false).
|
|
1230
|
-
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
1231
|
-
} catch (err) {
|
|
1232
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
1233
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
1234
|
-
throw err;
|
|
1235
|
-
} else {
|
|
1236
|
-
logger.warn(
|
|
1237
|
-
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
1238
|
-
AVM inputs and carrying on. ${inspect(err)}.`,
|
|
1239
|
-
);
|
|
1240
|
-
|
|
1241
|
-
try {
|
|
1242
|
-
this.metrics.incAvmFallback();
|
|
1243
|
-
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
1244
|
-
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
1245
|
-
} catch (err) {
|
|
1246
|
-
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
1247
|
-
throw err;
|
|
1248
|
-
}
|
|
1249
|
-
}
|
|
1250
|
-
}
|
|
1258
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1251
1259
|
},
|
|
1252
1260
|
);
|
|
1253
1261
|
|
|
1254
|
-
this.deferredProving(provingState, doAvmProving,
|
|
1262
|
+
this.deferredProving(provingState, doAvmProving, proof => {
|
|
1255
1263
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
1256
|
-
txProvingState.setAvmProof(
|
|
1264
|
+
txProvingState.setAvmProof(proof);
|
|
1257
1265
|
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
1258
1266
|
});
|
|
1259
1267
|
}
|
|
@@ -1,41 +1,18 @@
|
|
|
1
|
-
import {
|
|
2
|
-
type Histogram,
|
|
3
|
-
Metrics,
|
|
4
|
-
type TelemetryClient,
|
|
5
|
-
type Tracer,
|
|
6
|
-
type UpDownCounter,
|
|
7
|
-
ValueType,
|
|
8
|
-
} from '@aztec/telemetry-client';
|
|
1
|
+
import { type Histogram, Metrics, type TelemetryClient, type Tracer } from '@aztec/telemetry-client';
|
|
9
2
|
|
|
10
3
|
export class ProvingOrchestratorMetrics {
|
|
11
4
|
public readonly tracer: Tracer;
|
|
12
5
|
|
|
13
6
|
private baseRollupInputsDuration: Histogram;
|
|
14
|
-
private avmFallbackCount: UpDownCounter;
|
|
15
7
|
|
|
16
8
|
constructor(client: TelemetryClient, name = 'ProvingOrchestrator') {
|
|
17
9
|
this.tracer = client.getTracer(name);
|
|
18
10
|
const meter = client.getMeter(name);
|
|
19
11
|
|
|
20
|
-
this.baseRollupInputsDuration = meter.createHistogram(Metrics.PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION
|
|
21
|
-
unit: 'ms',
|
|
22
|
-
description: 'Duration to build base rollup inputs',
|
|
23
|
-
valueType: ValueType.INT,
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
this.avmFallbackCount = meter.createUpDownCounter(Metrics.PROVING_ORCHESTRATOR_AVM_FALLBACK_COUNT, {
|
|
27
|
-
description: 'How many times the AVM fallback was used',
|
|
28
|
-
valueType: ValueType.INT,
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
this.avmFallbackCount.add(0);
|
|
12
|
+
this.baseRollupInputsDuration = meter.createHistogram(Metrics.PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION);
|
|
32
13
|
}
|
|
33
14
|
|
|
34
15
|
recordBaseRollupInputs(durationMs: number) {
|
|
35
16
|
this.baseRollupInputsDuration.record(Math.ceil(durationMs));
|
|
36
17
|
}
|
|
37
|
-
|
|
38
|
-
incAvmFallback() {
|
|
39
|
-
this.avmFallbackCount.add(1);
|
|
40
|
-
}
|
|
41
18
|
}
|
|
@@ -1,14 +1,9 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
AVM_VK_INDEX,
|
|
4
|
-
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
5
|
-
} from '@aztec/constants';
|
|
6
|
-
import type { Fr } from '@aztec/foundation/fields';
|
|
1
|
+
import { AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH } from '@aztec/constants';
|
|
2
|
+
import type { Fr } from '@aztec/foundation/curves/bn254';
|
|
7
3
|
import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
|
|
8
|
-
import { getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
9
4
|
import type { AvmCircuitInputs } from '@aztec/stdlib/avm';
|
|
10
|
-
import type {
|
|
11
|
-
import { ProofData } from '@aztec/stdlib/proofs';
|
|
5
|
+
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
6
|
+
import { ProofData, ProofDataForFixedVk, RecursiveProof } from '@aztec/stdlib/proofs';
|
|
12
7
|
import {
|
|
13
8
|
type BaseRollupHints,
|
|
14
9
|
PrivateBaseRollupHints,
|
|
@@ -20,7 +15,6 @@ import {
|
|
|
20
15
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
21
16
|
import type { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
|
|
22
17
|
import type { ProcessedTx } from '@aztec/stdlib/tx';
|
|
23
|
-
import { VerificationKeyData, VkData } from '@aztec/stdlib/vks';
|
|
24
18
|
|
|
25
19
|
import {
|
|
26
20
|
getChonkProofFromTx,
|
|
@@ -38,7 +32,7 @@ export class TxProvingState {
|
|
|
38
32
|
PublicChonkVerifierPublicInputs,
|
|
39
33
|
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
40
34
|
>;
|
|
41
|
-
private
|
|
35
|
+
private avmProof?: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>;
|
|
42
36
|
|
|
43
37
|
constructor(
|
|
44
38
|
public readonly processedTx: ProcessedTx,
|
|
@@ -52,7 +46,7 @@ export class TxProvingState {
|
|
|
52
46
|
}
|
|
53
47
|
|
|
54
48
|
public ready() {
|
|
55
|
-
return !this.requireAvmProof || (!!this.
|
|
49
|
+
return !this.requireAvmProof || (!!this.avmProof && !!this.publicChonkVerifier);
|
|
56
50
|
}
|
|
57
51
|
|
|
58
52
|
public getAvmInputs(): AvmCircuitInputs {
|
|
@@ -86,8 +80,8 @@ export class TxProvingState {
|
|
|
86
80
|
this.publicChonkVerifier = publicChonkVerifierProofAndVk;
|
|
87
81
|
}
|
|
88
82
|
|
|
89
|
-
public setAvmProof(
|
|
90
|
-
this.
|
|
83
|
+
public setAvmProof(avmProof: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>) {
|
|
84
|
+
this.avmProof = avmProof;
|
|
91
85
|
}
|
|
92
86
|
|
|
93
87
|
#getPrivateBaseInputs() {
|
|
@@ -111,7 +105,7 @@ export class TxProvingState {
|
|
|
111
105
|
if (!this.publicChonkVerifier) {
|
|
112
106
|
throw new Error('Tx not ready for proving base rollup: public chonk verifier proof undefined');
|
|
113
107
|
}
|
|
114
|
-
if (!this.
|
|
108
|
+
if (!this.avmProof) {
|
|
115
109
|
throw new Error('Tx not ready for proving base rollup: avm proof undefined');
|
|
116
110
|
}
|
|
117
111
|
if (!(this.baseRollupHints instanceof PublicBaseRollupHints)) {
|
|
@@ -120,19 +114,8 @@ export class TxProvingState {
|
|
|
120
114
|
|
|
121
115
|
const publicChonkVerifierProofData = toProofData(this.publicChonkVerifier);
|
|
122
116
|
|
|
123
|
-
const avmProofData = new
|
|
124
|
-
this.processedTx.avmProvingRequest.inputs.publicInputs,
|
|
125
|
-
this.avm.proof,
|
|
126
|
-
this.#getVkData(this.avm!.verificationKey, AVM_VK_INDEX),
|
|
127
|
-
);
|
|
117
|
+
const avmProofData = new ProofDataForFixedVk(this.processedTx.avmProvingRequest.inputs.publicInputs, this.avmProof);
|
|
128
118
|
|
|
129
119
|
return new PublicTxBaseRollupPrivateInputs(publicChonkVerifierProofData, avmProofData, this.baseRollupHints);
|
|
130
120
|
}
|
|
131
|
-
|
|
132
|
-
#getVkData(verificationKey: VerificationKeyData, vkIndex: number) {
|
|
133
|
-
// TODO(#17162): Add avm vk hash to the tree and call `getVkData('AVM')` instead.
|
|
134
|
-
// Below will return a path to an empty leaf.
|
|
135
|
-
const vkPath = getVKSiblingPath(vkIndex);
|
|
136
|
-
return new VkData(verificationKey, vkIndex, vkPath);
|
|
137
|
-
}
|
|
138
121
|
}
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type {
|
|
2
|
+
ForkMerkleTreeOperations,
|
|
3
|
+
ProvingJobBroker,
|
|
4
|
+
ReadonlyWorldStateAccess,
|
|
5
|
+
} from '@aztec/stdlib/interfaces/server';
|
|
2
6
|
import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
|
|
3
7
|
|
|
4
8
|
import type { ProverClientConfig } from '../config.js';
|
|
@@ -6,7 +10,7 @@ import { ProverClient } from './prover-client.js';
|
|
|
6
10
|
|
|
7
11
|
export function createProverClient(
|
|
8
12
|
config: ProverClientConfig,
|
|
9
|
-
worldState: ForkMerkleTreeOperations,
|
|
13
|
+
worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
10
14
|
broker: ProvingJobBroker,
|
|
11
15
|
telemetry: TelemetryClient = getTelemetryClient(),
|
|
12
16
|
) {
|
|
@@ -11,6 +11,7 @@ import {
|
|
|
11
11
|
type ProvingJobBroker,
|
|
12
12
|
type ProvingJobConsumer,
|
|
13
13
|
type ProvingJobProducer,
|
|
14
|
+
type ReadonlyWorldStateAccess,
|
|
14
15
|
type ServerCircuitProver,
|
|
15
16
|
tryStop,
|
|
16
17
|
} from '@aztec/stdlib/interfaces/server';
|
|
@@ -33,7 +34,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
33
34
|
|
|
34
35
|
private constructor(
|
|
35
36
|
private config: ProverClientConfig,
|
|
36
|
-
private worldState: ForkMerkleTreeOperations,
|
|
37
|
+
private worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
37
38
|
private orchestratorClient: ProvingJobProducer,
|
|
38
39
|
private agentClient?: ProvingJobConsumer,
|
|
39
40
|
private telemetry: TelemetryClient = getTelemetryClient(),
|
|
@@ -45,7 +46,13 @@ export class ProverClient implements EpochProverManager {
|
|
|
45
46
|
|
|
46
47
|
public createEpochProver(): EpochProver {
|
|
47
48
|
const facade = new BrokerCircuitProverFacade(this.orchestratorClient, this.proofStore, this.failedProofStore);
|
|
48
|
-
const orchestrator = new ProvingOrchestrator(
|
|
49
|
+
const orchestrator = new ProvingOrchestrator(
|
|
50
|
+
this.worldState,
|
|
51
|
+
facade,
|
|
52
|
+
this.config.proverId,
|
|
53
|
+
this.config.cancelJobsOnStop,
|
|
54
|
+
this.telemetry,
|
|
55
|
+
);
|
|
49
56
|
return new ServerEpochProver(facade, orchestrator);
|
|
50
57
|
}
|
|
51
58
|
|
|
@@ -99,7 +106,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
99
106
|
*/
|
|
100
107
|
public static async new(
|
|
101
108
|
config: ProverClientConfig,
|
|
102
|
-
worldState: ForkMerkleTreeOperations,
|
|
109
|
+
worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
103
110
|
broker: ProvingJobBroker,
|
|
104
111
|
telemetry: TelemetryClient = getTelemetryClient(),
|
|
105
112
|
) {
|
|
@@ -129,15 +136,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
129
136
|
const prover = await buildServerCircuitProver(this.config, this.telemetry);
|
|
130
137
|
this.agents = times(
|
|
131
138
|
this.config.proverAgentCount,
|
|
132
|
-
() =>
|
|
133
|
-
new ProvingAgent(
|
|
134
|
-
this.agentClient!,
|
|
135
|
-
proofStore,
|
|
136
|
-
prover,
|
|
137
|
-
[],
|
|
138
|
-
this.config.proverAgentPollIntervalMs,
|
|
139
|
-
this.telemetry,
|
|
140
|
-
),
|
|
139
|
+
() => new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs),
|
|
141
140
|
);
|
|
142
141
|
|
|
143
142
|
await Promise.all(this.agents.map(agent => agent.start()));
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
import type { BatchedBlob, FinalBlobBatchingChallenges } from '@aztec/blob-lib';
|
|
2
|
-
import
|
|
1
|
+
import type { BatchedBlob, FinalBlobBatchingChallenges } from '@aztec/blob-lib/types';
|
|
2
|
+
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
3
|
+
import type { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
4
|
import type { EthAddress } from '@aztec/stdlib/block';
|
|
4
5
|
import type { EpochProver } from '@aztec/stdlib/interfaces/server';
|
|
5
6
|
import type { Proof } from '@aztec/stdlib/proofs';
|
|
@@ -18,7 +19,7 @@ export class ServerEpochProver implements EpochProver {
|
|
|
18
19
|
) {}
|
|
19
20
|
|
|
20
21
|
startNewEpoch(
|
|
21
|
-
epochNumber:
|
|
22
|
+
epochNumber: EpochNumber,
|
|
22
23
|
totalNumCheckpoints: number,
|
|
23
24
|
finalBlobBatchingChallenges: FinalBlobBatchingChallenges,
|
|
24
25
|
): void {
|
|
@@ -30,7 +31,6 @@ export class ServerEpochProver implements EpochProver {
|
|
|
30
31
|
constants: CheckpointConstantData,
|
|
31
32
|
l1ToL2Messages: Fr[],
|
|
32
33
|
totalNumBlocks: number,
|
|
33
|
-
totalNumBlobFields: number,
|
|
34
34
|
headerOfLastBlockInPreviousCheckpoint: BlockHeader,
|
|
35
35
|
): Promise<void> {
|
|
36
36
|
return this.orchestrator.startNewCheckpoint(
|
|
@@ -38,14 +38,13 @@ export class ServerEpochProver implements EpochProver {
|
|
|
38
38
|
constants,
|
|
39
39
|
l1ToL2Messages,
|
|
40
40
|
totalNumBlocks,
|
|
41
|
-
totalNumBlobFields,
|
|
42
41
|
headerOfLastBlockInPreviousCheckpoint,
|
|
43
42
|
);
|
|
44
43
|
}
|
|
45
44
|
startChonkVerifierCircuits(txs: Tx[]): Promise<void> {
|
|
46
45
|
return this.orchestrator.startChonkVerifierCircuits(txs);
|
|
47
46
|
}
|
|
48
|
-
setBlockCompleted(blockNumber:
|
|
47
|
+
setBlockCompleted(blockNumber: BlockNumber, expectedBlockHeader?: BlockHeader): Promise<BlockHeader> {
|
|
49
48
|
return this.orchestrator.setBlockCompleted(blockNumber, expectedBlockHeader);
|
|
50
49
|
}
|
|
51
50
|
finalizeEpoch(): Promise<{ publicInputs: RootRollupPublicInputs; proof: Proof; batchedBlobInputs: BatchedBlob }> {
|
|
@@ -61,7 +60,7 @@ export class ServerEpochProver implements EpochProver {
|
|
|
61
60
|
await this.facade.stop();
|
|
62
61
|
await this.orchestrator.stop();
|
|
63
62
|
}
|
|
64
|
-
startNewBlock(blockNumber:
|
|
63
|
+
startNewBlock(blockNumber: BlockNumber, timestamp: UInt64, totalNumTxs: number): Promise<void> {
|
|
65
64
|
return this.orchestrator.startNewBlock(blockNumber, timestamp, totalNumTxs);
|
|
66
65
|
}
|
|
67
66
|
addTxs(txs: ProcessedTx[]): Promise<void> {
|