@aztec/prover-client 0.0.1-commit.d3ec352c → 0.0.1-commit.e6bd8901
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/config.d.ts +2 -2
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +1 -1
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +30 -15
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -1
- package/dest/light/lightweight_checkpoint_builder.js +109 -22
- package/dest/mocks/fixtures.js +2 -2
- package/dest/mocks/test_context.d.ts +5 -3
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +21 -8
- package/dest/orchestrator/block-building-helpers.d.ts +3 -3
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +2 -2
- package/dest/orchestrator/block-proving-state.d.ts +2 -2
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +1 -1
- package/dest/orchestrator/checkpoint-proving-state.d.ts +16 -3
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +36 -2
- package/dest/orchestrator/epoch-proving-state.d.ts +5 -4
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +35 -1
- package/dest/orchestrator/orchestrator.d.ts +18 -5
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +474 -74
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +2 -15
- package/dest/orchestrator/tx-proving-state.d.ts +6 -5
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +8 -8
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +2 -2
- package/dest/prover-client/server-epoch-prover.d.ts +2 -2
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.d.ts +4 -3
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +3 -10
- package/dest/proving_broker/config.d.ts +6 -2
- package/dest/proving_broker/config.d.ts.map +1 -1
- package/dest/proving_broker/config.js +8 -2
- package/dest/proving_broker/fixtures.js +1 -1
- package/dest/proving_broker/proving_agent.d.ts +3 -8
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +1 -16
- package/dest/proving_broker/proving_broker.d.ts +1 -1
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +4 -10
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +3 -2
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +389 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.js +11 -35
- package/dest/proving_broker/proving_job_controller.d.ts +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +2 -3
- package/dest/test/mock_proof_store.d.ts +3 -3
- package/dest/test/mock_proof_store.d.ts.map +1 -1
- package/dest/test/mock_prover.d.ts +2 -2
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +4 -4
- package/package.json +18 -17
- package/src/config.ts +1 -1
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +160 -29
- package/src/mocks/fixtures.ts +2 -2
- package/src/mocks/test_context.ts +18 -6
- package/src/orchestrator/block-building-helpers.ts +2 -2
- package/src/orchestrator/block-proving-state.ts +1 -1
- package/src/orchestrator/checkpoint-proving-state.ts +49 -2
- package/src/orchestrator/epoch-proving-state.ts +56 -8
- package/src/orchestrator/orchestrator.ts +68 -53
- package/src/orchestrator/orchestrator_metrics.ts +2 -25
- package/src/orchestrator/tx-proving-state.ts +10 -14
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +11 -12
- package/src/prover-client/server-epoch-prover.ts +1 -1
- package/src/proving_broker/broker_prover_facade.ts +4 -14
- package/src/proving_broker/config.ts +10 -1
- package/src/proving_broker/fixtures.ts +1 -1
- package/src/proving_broker/proving_agent.ts +1 -17
- package/src/proving_broker/proving_broker.ts +4 -8
- package/src/proving_broker/proving_broker_database/persisted.ts +15 -1
- package/src/proving_broker/proving_broker_instrumentation.ts +10 -35
- package/src/proving_broker/proving_job_controller.ts +2 -3
- package/src/test/mock_prover.ts +2 -14
- package/dest/block-factory/index.d.ts +0 -2
- package/dest/block-factory/index.d.ts.map +0 -1
- package/dest/block-factory/index.js +0 -1
- package/dest/block-factory/light.d.ts +0 -38
- package/dest/block-factory/light.d.ts.map +0 -1
- package/dest/block-factory/light.js +0 -108
- package/dest/proving_broker/proving_agent_instrumentation.d.ts +0 -8
- package/dest/proving_broker/proving_agent_instrumentation.d.ts.map +0 -1
- package/dest/proving_broker/proving_agent_instrumentation.js +0 -16
- package/src/block-factory/index.ts +0 -1
- package/src/block-factory/light.ts +0 -137
- package/src/proving_broker/proving_agent_instrumentation.ts +0 -21
|
@@ -11,13 +11,16 @@ import {
|
|
|
11
11
|
type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
12
12
|
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
13
13
|
NUM_MSGS_PER_BASE_PARITY,
|
|
14
|
+
OUT_HASH_TREE_HEIGHT,
|
|
14
15
|
} from '@aztec/constants';
|
|
15
16
|
import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
16
17
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
17
|
-
import { BLS12Point
|
|
18
|
+
import { BLS12Point } from '@aztec/foundation/curves/bls12';
|
|
19
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
18
20
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
19
21
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
20
22
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
23
|
+
import { computeCheckpointOutHash } from '@aztec/stdlib/messaging';
|
|
21
24
|
import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
|
|
22
25
|
import {
|
|
23
26
|
BlockMergeRollupPrivateInputs,
|
|
@@ -37,6 +40,11 @@ import { accumulateBlobs, buildBlobHints, toProofData } from './block-building-h
|
|
|
37
40
|
import { BlockProvingState, type ProofState } from './block-proving-state.js';
|
|
38
41
|
import type { EpochProvingState } from './epoch-proving-state.js';
|
|
39
42
|
|
|
43
|
+
type OutHashHint = {
|
|
44
|
+
treeSnapshot: AppendOnlyTreeSnapshot;
|
|
45
|
+
siblingPath: Tuple<Fr, typeof OUT_HASH_TREE_HEIGHT>;
|
|
46
|
+
};
|
|
47
|
+
|
|
40
48
|
export class CheckpointProvingState {
|
|
41
49
|
private blockProofs: UnbalancedTreeStore<
|
|
42
50
|
ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
@@ -45,6 +53,11 @@ export class CheckpointProvingState {
|
|
|
45
53
|
| ProofState<CheckpointRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
46
54
|
| undefined;
|
|
47
55
|
private blocks: (BlockProvingState | undefined)[] = [];
|
|
56
|
+
private previousOutHashHint: OutHashHint | undefined;
|
|
57
|
+
private outHash: Fr | undefined;
|
|
58
|
+
// The snapshot and sibling path after the checkpoint's out hash is inserted.
|
|
59
|
+
// Stored here to be retrieved for the next checkpoint when it's added.
|
|
60
|
+
private newOutHashHint: OutHashHint | undefined;
|
|
48
61
|
private startBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
49
62
|
private endBlobAccumulator: BatchedBlobAccumulator | undefined;
|
|
50
63
|
private blobFields: Fr[] | undefined;
|
|
@@ -194,6 +207,35 @@ export class CheckpointProvingState {
|
|
|
194
207
|
return new ParityBasePrivateInputs(messages, this.constants.vkTreeRoot);
|
|
195
208
|
}
|
|
196
209
|
|
|
210
|
+
public setOutHashHint(hint: OutHashHint) {
|
|
211
|
+
this.previousOutHashHint = hint;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
public getOutHashHint() {
|
|
215
|
+
return this.previousOutHashHint;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
public accumulateBlockOutHashes() {
|
|
219
|
+
if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
|
|
220
|
+
return;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (!this.outHash) {
|
|
224
|
+
const messagesPerBlock = this.blocks.map(b => b!.getTxEffects().map(tx => tx.l2ToL1Msgs));
|
|
225
|
+
this.outHash = computeCheckpointOutHash(messagesPerBlock);
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return this.outHash;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
public setOutHashHintForNextCheckpoint(hint: OutHashHint) {
|
|
232
|
+
this.newOutHashHint = hint;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
public getOutHashHintForNextCheckpoint() {
|
|
236
|
+
return this.newOutHashHint;
|
|
237
|
+
}
|
|
238
|
+
|
|
197
239
|
public async accumulateBlobs(startBlobAccumulator: BatchedBlobAccumulator) {
|
|
198
240
|
if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
|
|
199
241
|
return;
|
|
@@ -235,6 +277,9 @@ export class CheckpointProvingState {
|
|
|
235
277
|
if (proofs.length !== nonEmptyProofs.length) {
|
|
236
278
|
throw new Error('At least one child is not ready for the checkpoint root rollup.');
|
|
237
279
|
}
|
|
280
|
+
if (!this.previousOutHashHint) {
|
|
281
|
+
throw new Error('Out hash hint is not set.');
|
|
282
|
+
}
|
|
238
283
|
if (!this.startBlobAccumulator) {
|
|
239
284
|
throw new Error('Start blob accumulator is not set.');
|
|
240
285
|
}
|
|
@@ -247,6 +292,8 @@ export class CheckpointProvingState {
|
|
|
247
292
|
const hints = CheckpointRootRollupHints.from({
|
|
248
293
|
previousBlockHeader: this.headerOfLastBlockInPreviousCheckpoint,
|
|
249
294
|
previousArchiveSiblingPath: this.lastArchiveSiblingPath,
|
|
295
|
+
previousOutHash: this.previousOutHashHint.treeSnapshot,
|
|
296
|
+
newOutHashSiblingPath: this.previousOutHashHint.siblingPath,
|
|
250
297
|
startBlobAccumulator: this.startBlobAccumulator.toBlobAccumulator(),
|
|
251
298
|
finalBlobChallenges: this.finalBlobBatchingChallenges,
|
|
252
299
|
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_CHECKPOINT),
|
|
@@ -272,7 +319,7 @@ export class CheckpointProvingState {
|
|
|
272
319
|
|
|
273
320
|
public isReadyForCheckpointRoot() {
|
|
274
321
|
const allChildProofsReady = this.#getChildProofsForRoot().every(p => !!p);
|
|
275
|
-
return allChildProofsReady && !!this.startBlobAccumulator;
|
|
322
|
+
return allChildProofsReady && !!this.previousOutHashHint && !!this.startBlobAccumulator;
|
|
276
323
|
}
|
|
277
324
|
|
|
278
325
|
public verifyState() {
|
|
@@ -1,14 +1,20 @@
|
|
|
1
1
|
import { BatchedBlob, BatchedBlobAccumulator, type FinalBlobBatchingChallenges } from '@aztec/blob-lib';
|
|
2
|
-
import
|
|
3
|
-
ARCHIVE_HEIGHT,
|
|
4
|
-
L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
|
-
NESTED_RECURSIVE_PROOF_LENGTH,
|
|
6
|
-
NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
2
|
+
import {
|
|
3
|
+
type ARCHIVE_HEIGHT,
|
|
4
|
+
type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
5
|
+
type NESTED_RECURSIVE_PROOF_LENGTH,
|
|
6
|
+
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
7
|
+
OUT_HASH_TREE_HEIGHT,
|
|
7
8
|
} from '@aztec/constants';
|
|
8
9
|
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
9
|
-
import
|
|
10
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
10
11
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
11
|
-
import {
|
|
12
|
+
import {
|
|
13
|
+
MerkleTreeCalculator,
|
|
14
|
+
type TreeNodeLocation,
|
|
15
|
+
UnbalancedTreeStore,
|
|
16
|
+
shaMerkleHash,
|
|
17
|
+
} from '@aztec/foundation/trees';
|
|
12
18
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
13
19
|
import type { Proof } from '@aztec/stdlib/proofs';
|
|
14
20
|
import {
|
|
@@ -20,7 +26,7 @@ import {
|
|
|
20
26
|
RootRollupPrivateInputs,
|
|
21
27
|
type RootRollupPublicInputs,
|
|
22
28
|
} from '@aztec/stdlib/rollup';
|
|
23
|
-
import
|
|
29
|
+
import { AppendOnlyTreeSnapshot, type MerkleTreeId } from '@aztec/stdlib/trees';
|
|
24
30
|
import type { BlockHeader } from '@aztec/stdlib/tx';
|
|
25
31
|
|
|
26
32
|
import { toProofData } from './block-building-helpers.js';
|
|
@@ -212,6 +218,48 @@ export class EpochProvingState {
|
|
|
212
218
|
this.checkpointPaddingProof = { provingOutput };
|
|
213
219
|
}
|
|
214
220
|
|
|
221
|
+
public async accumulateCheckpointOutHashes() {
|
|
222
|
+
const treeCalculator = await MerkleTreeCalculator.create(OUT_HASH_TREE_HEIGHT, undefined, (left, right) =>
|
|
223
|
+
Promise.resolve(shaMerkleHash(left, right)),
|
|
224
|
+
);
|
|
225
|
+
|
|
226
|
+
const computeOutHashHint = async (leaves: Fr[]) => {
|
|
227
|
+
const tree = await treeCalculator.computeTree(leaves.map(l => l.toBuffer()));
|
|
228
|
+
const nextAvailableLeafIndex = leaves.length;
|
|
229
|
+
return {
|
|
230
|
+
treeSnapshot: new AppendOnlyTreeSnapshot(Fr.fromBuffer(tree.root), nextAvailableLeafIndex),
|
|
231
|
+
siblingPath: tree.getSiblingPath(nextAvailableLeafIndex).map(Fr.fromBuffer) as Tuple<
|
|
232
|
+
Fr,
|
|
233
|
+
typeof OUT_HASH_TREE_HEIGHT
|
|
234
|
+
>,
|
|
235
|
+
};
|
|
236
|
+
};
|
|
237
|
+
|
|
238
|
+
let hint = this.checkpoints[0]?.getOutHashHint();
|
|
239
|
+
const outHashes = [];
|
|
240
|
+
for (let i = 0; i < this.totalNumCheckpoints; i++) {
|
|
241
|
+
const checkpoint = this.checkpoints[i];
|
|
242
|
+
if (!checkpoint) {
|
|
243
|
+
break;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// If hints are not set yet, it must be the first checkpoint. Compute the hints with an empty tree.
|
|
247
|
+
hint ??= await computeOutHashHint([]);
|
|
248
|
+
checkpoint.setOutHashHint(hint);
|
|
249
|
+
|
|
250
|
+
// Get the out hash for this checkpoint.
|
|
251
|
+
const outHash = checkpoint.accumulateBlockOutHashes();
|
|
252
|
+
if (!outHash) {
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
outHashes.push(outHash);
|
|
256
|
+
|
|
257
|
+
// Get or create hints for the next checkpoint.
|
|
258
|
+
hint = checkpoint.getOutHashHintForNextCheckpoint() ?? (await computeOutHashHint(outHashes));
|
|
259
|
+
checkpoint.setOutHashHintForNextCheckpoint(hint);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
215
263
|
public async setBlobAccumulators() {
|
|
216
264
|
let previousAccumulator = this.startBlobAccumulator;
|
|
217
265
|
// Accumulate blobs as far as we can for this epoch.
|
|
@@ -8,21 +8,21 @@ import {
|
|
|
8
8
|
} from '@aztec/constants';
|
|
9
9
|
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
10
10
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
11
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
11
12
|
import { AbortError } from '@aztec/foundation/error';
|
|
12
|
-
import { Fr } from '@aztec/foundation/fields';
|
|
13
13
|
import { createLogger } from '@aztec/foundation/log';
|
|
14
14
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
15
15
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
16
16
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
17
17
|
import { elapsed } from '@aztec/foundation/timer';
|
|
18
18
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
19
|
-
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
20
19
|
import { EthAddress } from '@aztec/stdlib/block';
|
|
21
20
|
import type {
|
|
22
21
|
EpochProver,
|
|
23
22
|
ForkMerkleTreeOperations,
|
|
24
23
|
MerkleTreeWriteOperations,
|
|
25
24
|
PublicInputsAndRecursiveProof,
|
|
25
|
+
ReadonlyWorldStateAccess,
|
|
26
26
|
ServerCircuitProver,
|
|
27
27
|
} from '@aztec/stdlib/interfaces/server';
|
|
28
28
|
import type { Proof } from '@aztec/stdlib/proofs';
|
|
@@ -73,6 +73,11 @@ import { TxProvingState } from './tx-proving-state.js';
|
|
|
73
73
|
|
|
74
74
|
const logger = createLogger('prover-client:orchestrator');
|
|
75
75
|
|
|
76
|
+
type WorldStateFork = {
|
|
77
|
+
fork: MerkleTreeWriteOperations;
|
|
78
|
+
cleanupPromise: Promise<void> | undefined;
|
|
79
|
+
};
|
|
80
|
+
|
|
76
81
|
/**
|
|
77
82
|
* Implements an event driven proving scheduler to build the recursive proof tree. The idea being:
|
|
78
83
|
* 1. Transactions are provided to the scheduler post simulation.
|
|
@@ -94,12 +99,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
94
99
|
private provingPromise: Promise<ProvingResult> | undefined = undefined;
|
|
95
100
|
private metrics: ProvingOrchestratorMetrics;
|
|
96
101
|
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
97
|
-
private dbs: Map<BlockNumber,
|
|
102
|
+
private dbs: Map<BlockNumber, WorldStateFork> = new Map();
|
|
98
103
|
|
|
99
104
|
constructor(
|
|
100
|
-
private dbProvider: ForkMerkleTreeOperations,
|
|
105
|
+
private dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations,
|
|
101
106
|
private prover: ServerCircuitProver,
|
|
102
107
|
private readonly proverId: EthAddress,
|
|
108
|
+
private readonly cancelJobsOnStop: boolean = false,
|
|
103
109
|
telemetryClient: TelemetryClient = getTelemetryClient(),
|
|
104
110
|
) {
|
|
105
111
|
this.metrics = new ProvingOrchestratorMetrics(telemetryClient, 'ProvingOrchestrator');
|
|
@@ -113,6 +119,10 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
113
119
|
return this.proverId;
|
|
114
120
|
}
|
|
115
121
|
|
|
122
|
+
public getNumActiveForks() {
|
|
123
|
+
return this.dbs.size;
|
|
124
|
+
}
|
|
125
|
+
|
|
116
126
|
public stop(): Promise<void> {
|
|
117
127
|
this.cancel();
|
|
118
128
|
return Promise.resolve();
|
|
@@ -143,6 +153,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
143
153
|
this.provingPromise = promise;
|
|
144
154
|
}
|
|
145
155
|
|
|
156
|
+
/**
|
|
157
|
+
* Starts a new checkpoint.
|
|
158
|
+
* @param checkpointIndex - The index of the checkpoint in the epoch.
|
|
159
|
+
* @param constants - The constants for this checkpoint.
|
|
160
|
+
* @param l1ToL2Messages - The set of L1 to L2 messages to be inserted at the beginning of this checkpoint.
|
|
161
|
+
* @param totalNumBlocks - The total number of blocks expected in the checkpoint (must be at least one).
|
|
162
|
+
* @param headerOfLastBlockInPreviousCheckpoint - The header of the last block in the previous checkpoint.
|
|
163
|
+
*/
|
|
146
164
|
public async startNewCheckpoint(
|
|
147
165
|
checkpointIndex: number,
|
|
148
166
|
constants: CheckpointConstantData,
|
|
@@ -163,7 +181,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
163
181
|
const db = await this.dbProvider.fork(lastBlockNumber);
|
|
164
182
|
|
|
165
183
|
const firstBlockNumber = BlockNumber(lastBlockNumber + 1);
|
|
166
|
-
this.dbs.set(firstBlockNumber, db);
|
|
184
|
+
this.dbs.set(firstBlockNumber, { fork: db, cleanupPromise: undefined });
|
|
167
185
|
|
|
168
186
|
// Get archive sibling path before any block in this checkpoint lands.
|
|
169
187
|
const lastArchiveSiblingPath = await getLastSiblingPath(MerkleTreeId.ARCHIVE, db);
|
|
@@ -221,9 +239,9 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
221
239
|
if (!this.dbs.has(blockNumber)) {
|
|
222
240
|
// Fork world state at the end of the immediately previous block
|
|
223
241
|
const db = await this.dbProvider.fork(BlockNumber(blockNumber - 1));
|
|
224
|
-
this.dbs.set(blockNumber, db);
|
|
242
|
+
this.dbs.set(blockNumber, { fork: db, cleanupPromise: undefined });
|
|
225
243
|
}
|
|
226
|
-
const db = this.dbs.get(blockNumber)
|
|
244
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
227
245
|
|
|
228
246
|
// Get archive snapshot and sibling path before any txs in this block lands.
|
|
229
247
|
const lastArchiveTreeSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
@@ -255,7 +273,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
255
273
|
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
256
274
|
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
257
275
|
|
|
258
|
-
//
|
|
276
|
+
// Try to accumulate the out hashes and blobs as far as we can:
|
|
277
|
+
await this.provingState.accumulateCheckpointOutHashes();
|
|
259
278
|
await this.provingState.setBlobAccumulators();
|
|
260
279
|
}
|
|
261
280
|
}
|
|
@@ -297,7 +316,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
297
316
|
|
|
298
317
|
logger.info(`Adding ${txs.length} transactions to block ${blockNumber}`);
|
|
299
318
|
|
|
300
|
-
const db = this.dbs.get(blockNumber)
|
|
319
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
301
320
|
const lastArchive = provingState.lastArchiveTreeSnapshot;
|
|
302
321
|
const newL1ToL2MessageTreeSnapshot = provingState.newL1ToL2MessageTreeSnapshot;
|
|
303
322
|
const spongeBlobState = provingState.getStartSpongeBlob().clone();
|
|
@@ -310,7 +329,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
310
329
|
|
|
311
330
|
validateTx(tx);
|
|
312
331
|
|
|
313
|
-
logger.
|
|
332
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
314
333
|
|
|
315
334
|
const startSpongeBlob = spongeBlobState.clone();
|
|
316
335
|
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
@@ -352,7 +371,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
352
371
|
|
|
353
372
|
provingState.setEndSpongeBlob(spongeBlobState);
|
|
354
373
|
|
|
355
|
-
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
374
|
+
// Txs have been added to the block. Now try to accumulate the out hashes and blobs as far as we can:
|
|
375
|
+
await this.provingState.accumulateCheckpointOutHashes();
|
|
356
376
|
await this.provingState.setBlobAccumulators();
|
|
357
377
|
}
|
|
358
378
|
|
|
@@ -425,7 +445,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
425
445
|
}
|
|
426
446
|
|
|
427
447
|
// Get db for this block
|
|
428
|
-
const db = this.dbs.get(provingState.blockNumber)
|
|
448
|
+
const db = this.dbs.get(provingState.blockNumber)!.fork;
|
|
429
449
|
|
|
430
450
|
// Update the archive tree, so we're ready to start processing the next block:
|
|
431
451
|
logger.verbose(
|
|
@@ -461,7 +481,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
461
481
|
|
|
462
482
|
// Get db for this block
|
|
463
483
|
const blockNumber = provingState.blockNumber;
|
|
464
|
-
const db = this.dbs.get(blockNumber)
|
|
484
|
+
const db = this.dbs.get(blockNumber)!.fork;
|
|
465
485
|
|
|
466
486
|
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
467
487
|
const syncedArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.dbProvider.getSnapshot(blockNumber));
|
|
@@ -486,20 +506,19 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
486
506
|
// is aborted and never reaches this point, it will leak the fork. We need to add a global cleanup,
|
|
487
507
|
// but have to make sure it only runs once all operations are completed, otherwise some function here
|
|
488
508
|
// will attempt to access the fork after it was closed.
|
|
489
|
-
|
|
490
|
-
void this.dbs
|
|
491
|
-
.get(blockNumber)
|
|
492
|
-
?.close()
|
|
493
|
-
.then(() => this.dbs.delete(blockNumber))
|
|
494
|
-
.catch(err => logger.error(`Error closing db for block ${blockNumber}`, err));
|
|
509
|
+
void this.cleanupDBFork(blockNumber);
|
|
495
510
|
}
|
|
496
511
|
|
|
497
512
|
/**
|
|
498
|
-
* Cancel any further proving
|
|
513
|
+
* Cancel any further proving.
|
|
514
|
+
* If cancelJobsOnStop is true, aborts all pending jobs with the broker (which marks them as 'Aborted').
|
|
515
|
+
* If cancelJobsOnStop is false (default), jobs remain in the broker queue and can be reused on restart/reorg.
|
|
499
516
|
*/
|
|
500
517
|
public cancel() {
|
|
501
|
-
|
|
502
|
-
controller.
|
|
518
|
+
if (this.cancelJobsOnStop) {
|
|
519
|
+
for (const controller of this.pendingProvingJobs) {
|
|
520
|
+
controller.abort();
|
|
521
|
+
}
|
|
503
522
|
}
|
|
504
523
|
|
|
505
524
|
this.provingState?.cancel();
|
|
@@ -534,6 +553,24 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
534
553
|
return epochProofResult;
|
|
535
554
|
}
|
|
536
555
|
|
|
556
|
+
private async cleanupDBFork(blockNumber: BlockNumber): Promise<void> {
|
|
557
|
+
logger.debug(`Cleaning up world state fork for ${blockNumber}`);
|
|
558
|
+
const fork = this.dbs.get(blockNumber);
|
|
559
|
+
if (!fork) {
|
|
560
|
+
return;
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
try {
|
|
564
|
+
if (!fork.cleanupPromise) {
|
|
565
|
+
fork.cleanupPromise = fork.fork.close();
|
|
566
|
+
}
|
|
567
|
+
await fork.cleanupPromise;
|
|
568
|
+
this.dbs.delete(blockNumber);
|
|
569
|
+
} catch (err) {
|
|
570
|
+
logger.error(`Error closing db for block ${blockNumber}`, err);
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
|
|
537
574
|
/**
|
|
538
575
|
* Enqueue a job to be scheduled
|
|
539
576
|
* @param provingState - The proving state object being operated on
|
|
@@ -851,19 +888,22 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
851
888
|
},
|
|
852
889
|
),
|
|
853
890
|
async result => {
|
|
854
|
-
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
855
|
-
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
856
|
-
|
|
857
891
|
logger.debug(`Completed ${rollupType} proof for block ${provingState.blockNumber}`);
|
|
858
892
|
|
|
859
893
|
const leafLocation = provingState.setBlockRootRollupProof(result);
|
|
860
894
|
const checkpointProvingState = provingState.parentCheckpoint;
|
|
861
895
|
|
|
896
|
+
// If the proofs were slower than the block header building, then we need to try validating the block header hashes here.
|
|
897
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
898
|
+
|
|
862
899
|
if (checkpointProvingState.totalNumBlocks === 1) {
|
|
863
900
|
this.checkAndEnqueueCheckpointRootRollup(checkpointProvingState);
|
|
864
901
|
} else {
|
|
865
902
|
this.checkAndEnqueueNextBlockMergeRollup(checkpointProvingState, leafLocation);
|
|
866
903
|
}
|
|
904
|
+
|
|
905
|
+
// We are finished with the block at this point, ensure the fork is cleaned up
|
|
906
|
+
void this.cleanupDBFork(provingState.blockNumber);
|
|
867
907
|
},
|
|
868
908
|
);
|
|
869
909
|
}
|
|
@@ -1207,8 +1247,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1207
1247
|
|
|
1208
1248
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
1209
1249
|
|
|
1210
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
1211
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
1212
1250
|
const doAvmProving = wrapCallbackInSpan(
|
|
1213
1251
|
this.tracer,
|
|
1214
1252
|
'ProvingOrchestrator.prover.getAvmProof',
|
|
@@ -1217,36 +1255,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1217
1255
|
},
|
|
1218
1256
|
async (signal: AbortSignal) => {
|
|
1219
1257
|
const inputs = txProvingState.getAvmInputs();
|
|
1220
|
-
|
|
1221
|
-
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
1222
|
-
// just keep the next line but removing the second argument (false).
|
|
1223
|
-
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
1224
|
-
} catch (err) {
|
|
1225
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
1226
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
1227
|
-
throw err;
|
|
1228
|
-
} else {
|
|
1229
|
-
logger.warn(
|
|
1230
|
-
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
1231
|
-
AVM inputs and carrying on. ${inspect(err)}.`,
|
|
1232
|
-
);
|
|
1233
|
-
|
|
1234
|
-
try {
|
|
1235
|
-
this.metrics.incAvmFallback();
|
|
1236
|
-
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
1237
|
-
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
1238
|
-
} catch (err) {
|
|
1239
|
-
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
1240
|
-
throw err;
|
|
1241
|
-
}
|
|
1242
|
-
}
|
|
1243
|
-
}
|
|
1258
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1244
1259
|
},
|
|
1245
1260
|
);
|
|
1246
1261
|
|
|
1247
|
-
this.deferredProving(provingState, doAvmProving,
|
|
1262
|
+
this.deferredProving(provingState, doAvmProving, proof => {
|
|
1248
1263
|
logger.debug(`Proven VM for tx index: ${txIndex}`);
|
|
1249
|
-
txProvingState.setAvmProof(
|
|
1264
|
+
txProvingState.setAvmProof(proof);
|
|
1250
1265
|
this.checkAndEnqueueBaseRollup(provingState, txIndex);
|
|
1251
1266
|
});
|
|
1252
1267
|
}
|
|
@@ -1,41 +1,18 @@
|
|
|
1
|
-
import {
|
|
2
|
-
type Histogram,
|
|
3
|
-
Metrics,
|
|
4
|
-
type TelemetryClient,
|
|
5
|
-
type Tracer,
|
|
6
|
-
type UpDownCounter,
|
|
7
|
-
ValueType,
|
|
8
|
-
} from '@aztec/telemetry-client';
|
|
1
|
+
import { type Histogram, Metrics, type TelemetryClient, type Tracer } from '@aztec/telemetry-client';
|
|
9
2
|
|
|
10
3
|
export class ProvingOrchestratorMetrics {
|
|
11
4
|
public readonly tracer: Tracer;
|
|
12
5
|
|
|
13
6
|
private baseRollupInputsDuration: Histogram;
|
|
14
|
-
private avmFallbackCount: UpDownCounter;
|
|
15
7
|
|
|
16
8
|
constructor(client: TelemetryClient, name = 'ProvingOrchestrator') {
|
|
17
9
|
this.tracer = client.getTracer(name);
|
|
18
10
|
const meter = client.getMeter(name);
|
|
19
11
|
|
|
20
|
-
this.baseRollupInputsDuration = meter.createHistogram(Metrics.PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION
|
|
21
|
-
unit: 'ms',
|
|
22
|
-
description: 'Duration to build base rollup inputs',
|
|
23
|
-
valueType: ValueType.INT,
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
this.avmFallbackCount = meter.createUpDownCounter(Metrics.PROVING_ORCHESTRATOR_AVM_FALLBACK_COUNT, {
|
|
27
|
-
description: 'How many times the AVM fallback was used',
|
|
28
|
-
valueType: ValueType.INT,
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
this.avmFallbackCount.add(0);
|
|
12
|
+
this.baseRollupInputsDuration = meter.createHistogram(Metrics.PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION);
|
|
32
13
|
}
|
|
33
14
|
|
|
34
15
|
recordBaseRollupInputs(durationMs: number) {
|
|
35
16
|
this.baseRollupInputsDuration.record(Math.ceil(durationMs));
|
|
36
17
|
}
|
|
37
|
-
|
|
38
|
-
incAvmFallback() {
|
|
39
|
-
this.avmFallbackCount.add(1);
|
|
40
|
-
}
|
|
41
18
|
}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED, NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH } from '@aztec/constants';
|
|
2
|
-
import type { Fr } from '@aztec/foundation/
|
|
3
|
-
import {
|
|
2
|
+
import type { Fr } from '@aztec/foundation/curves/bn254';
|
|
3
|
+
import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
|
|
4
4
|
import type { AvmCircuitInputs } from '@aztec/stdlib/avm';
|
|
5
|
-
import type {
|
|
6
|
-
import { ProofData } from '@aztec/stdlib/proofs';
|
|
5
|
+
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
6
|
+
import { ProofData, ProofDataForFixedVk, RecursiveProof } from '@aztec/stdlib/proofs';
|
|
7
7
|
import {
|
|
8
8
|
type BaseRollupHints,
|
|
9
9
|
PrivateBaseRollupHints,
|
|
@@ -32,7 +32,7 @@ export class TxProvingState {
|
|
|
32
32
|
PublicChonkVerifierPublicInputs,
|
|
33
33
|
typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH
|
|
34
34
|
>;
|
|
35
|
-
private
|
|
35
|
+
private avmProof?: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>;
|
|
36
36
|
|
|
37
37
|
constructor(
|
|
38
38
|
public readonly processedTx: ProcessedTx,
|
|
@@ -46,7 +46,7 @@ export class TxProvingState {
|
|
|
46
46
|
}
|
|
47
47
|
|
|
48
48
|
public ready() {
|
|
49
|
-
return !this.requireAvmProof || (!!this.
|
|
49
|
+
return !this.requireAvmProof || (!!this.avmProof && !!this.publicChonkVerifier);
|
|
50
50
|
}
|
|
51
51
|
|
|
52
52
|
public getAvmInputs(): AvmCircuitInputs {
|
|
@@ -80,8 +80,8 @@ export class TxProvingState {
|
|
|
80
80
|
this.publicChonkVerifier = publicChonkVerifierProofAndVk;
|
|
81
81
|
}
|
|
82
82
|
|
|
83
|
-
public setAvmProof(
|
|
84
|
-
this.
|
|
83
|
+
public setAvmProof(avmProof: RecursiveProof<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>) {
|
|
84
|
+
this.avmProof = avmProof;
|
|
85
85
|
}
|
|
86
86
|
|
|
87
87
|
#getPrivateBaseInputs() {
|
|
@@ -105,7 +105,7 @@ export class TxProvingState {
|
|
|
105
105
|
if (!this.publicChonkVerifier) {
|
|
106
106
|
throw new Error('Tx not ready for proving base rollup: public chonk verifier proof undefined');
|
|
107
107
|
}
|
|
108
|
-
if (!this.
|
|
108
|
+
if (!this.avmProof) {
|
|
109
109
|
throw new Error('Tx not ready for proving base rollup: avm proof undefined');
|
|
110
110
|
}
|
|
111
111
|
if (!(this.baseRollupHints instanceof PublicBaseRollupHints)) {
|
|
@@ -114,11 +114,7 @@ export class TxProvingState {
|
|
|
114
114
|
|
|
115
115
|
const publicChonkVerifierProofData = toProofData(this.publicChonkVerifier);
|
|
116
116
|
|
|
117
|
-
const avmProofData = new
|
|
118
|
-
this.processedTx.avmProvingRequest.inputs.publicInputs,
|
|
119
|
-
this.avm.proof,
|
|
120
|
-
getAvmVkData(),
|
|
121
|
-
);
|
|
117
|
+
const avmProofData = new ProofDataForFixedVk(this.processedTx.avmProvingRequest.inputs.publicInputs, this.avmProof);
|
|
122
118
|
|
|
123
119
|
return new PublicTxBaseRollupPrivateInputs(publicChonkVerifierProofData, avmProofData, this.baseRollupHints);
|
|
124
120
|
}
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type {
|
|
2
|
+
ForkMerkleTreeOperations,
|
|
3
|
+
ProvingJobBroker,
|
|
4
|
+
ReadonlyWorldStateAccess,
|
|
5
|
+
} from '@aztec/stdlib/interfaces/server';
|
|
2
6
|
import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
|
|
3
7
|
|
|
4
8
|
import type { ProverClientConfig } from '../config.js';
|
|
@@ -6,7 +10,7 @@ import { ProverClient } from './prover-client.js';
|
|
|
6
10
|
|
|
7
11
|
export function createProverClient(
|
|
8
12
|
config: ProverClientConfig,
|
|
9
|
-
worldState: ForkMerkleTreeOperations,
|
|
13
|
+
worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
10
14
|
broker: ProvingJobBroker,
|
|
11
15
|
telemetry: TelemetryClient = getTelemetryClient(),
|
|
12
16
|
) {
|
|
@@ -11,6 +11,7 @@ import {
|
|
|
11
11
|
type ProvingJobBroker,
|
|
12
12
|
type ProvingJobConsumer,
|
|
13
13
|
type ProvingJobProducer,
|
|
14
|
+
type ReadonlyWorldStateAccess,
|
|
14
15
|
type ServerCircuitProver,
|
|
15
16
|
tryStop,
|
|
16
17
|
} from '@aztec/stdlib/interfaces/server';
|
|
@@ -33,7 +34,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
33
34
|
|
|
34
35
|
private constructor(
|
|
35
36
|
private config: ProverClientConfig,
|
|
36
|
-
private worldState: ForkMerkleTreeOperations,
|
|
37
|
+
private worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
37
38
|
private orchestratorClient: ProvingJobProducer,
|
|
38
39
|
private agentClient?: ProvingJobConsumer,
|
|
39
40
|
private telemetry: TelemetryClient = getTelemetryClient(),
|
|
@@ -45,7 +46,13 @@ export class ProverClient implements EpochProverManager {
|
|
|
45
46
|
|
|
46
47
|
public createEpochProver(): EpochProver {
|
|
47
48
|
const facade = new BrokerCircuitProverFacade(this.orchestratorClient, this.proofStore, this.failedProofStore);
|
|
48
|
-
const orchestrator = new ProvingOrchestrator(
|
|
49
|
+
const orchestrator = new ProvingOrchestrator(
|
|
50
|
+
this.worldState,
|
|
51
|
+
facade,
|
|
52
|
+
this.config.proverId,
|
|
53
|
+
this.config.cancelJobsOnStop,
|
|
54
|
+
this.telemetry,
|
|
55
|
+
);
|
|
49
56
|
return new ServerEpochProver(facade, orchestrator);
|
|
50
57
|
}
|
|
51
58
|
|
|
@@ -99,7 +106,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
99
106
|
*/
|
|
100
107
|
public static async new(
|
|
101
108
|
config: ProverClientConfig,
|
|
102
|
-
worldState: ForkMerkleTreeOperations,
|
|
109
|
+
worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
103
110
|
broker: ProvingJobBroker,
|
|
104
111
|
telemetry: TelemetryClient = getTelemetryClient(),
|
|
105
112
|
) {
|
|
@@ -129,15 +136,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
129
136
|
const prover = await buildServerCircuitProver(this.config, this.telemetry);
|
|
130
137
|
this.agents = times(
|
|
131
138
|
this.config.proverAgentCount,
|
|
132
|
-
() =>
|
|
133
|
-
new ProvingAgent(
|
|
134
|
-
this.agentClient!,
|
|
135
|
-
proofStore,
|
|
136
|
-
prover,
|
|
137
|
-
[],
|
|
138
|
-
this.config.proverAgentPollIntervalMs,
|
|
139
|
-
this.telemetry,
|
|
140
|
-
),
|
|
139
|
+
() => new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs),
|
|
141
140
|
);
|
|
142
141
|
|
|
143
142
|
await Promise.all(this.agents.map(agent => agent.start()));
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import type { BatchedBlob, FinalBlobBatchingChallenges } from '@aztec/blob-lib/types';
|
|
2
2
|
import { BlockNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
3
|
-
import type { Fr } from '@aztec/foundation/
|
|
3
|
+
import type { Fr } from '@aztec/foundation/curves/bn254';
|
|
4
4
|
import type { EthAddress } from '@aztec/stdlib/block';
|
|
5
5
|
import type { EpochProver } from '@aztec/stdlib/interfaces/server';
|
|
6
6
|
import type { Proof } from '@aztec/stdlib/proofs';
|