@aztec/prover-client 3.0.0-rc.5 → 4.0.0-nightly.20260107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/light/index.d.ts +2 -0
- package/dest/light/index.d.ts.map +1 -0
- package/dest/light/index.js +1 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts +19 -12
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -1
- package/dest/light/lightweight_checkpoint_builder.js +52 -13
- package/dest/mocks/test_context.d.ts +2 -1
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +12 -3
- package/dest/orchestrator/block-building-helpers.js +1 -1
- package/dest/orchestrator/orchestrator.d.ts +3 -3
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +413 -54
- package/dest/orchestrator/orchestrator_metrics.d.ts +1 -3
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +0 -9
- package/dest/prover-client/factory.d.ts +3 -3
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/prover-client.d.ts +3 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.d.ts +2 -2
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +2 -9
- package/dest/proving_broker/proving_agent.js +383 -8
- package/dest/proving_broker/proving_broker.d.ts +1 -1
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +387 -10
- package/dest/proving_broker/proving_job_controller.d.ts +1 -1
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +1 -2
- package/dest/test/mock_prover.d.ts +2 -2
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +1 -1
- package/package.json +16 -15
- package/src/light/index.ts +1 -0
- package/src/light/lightweight_checkpoint_builder.ts +74 -20
- package/src/mocks/test_context.ts +10 -2
- package/src/orchestrator/block-building-helpers.ts +1 -1
- package/src/orchestrator/orchestrator.ts +4 -29
- package/src/orchestrator/orchestrator_metrics.ts +1 -20
- package/src/prover-client/factory.ts +6 -2
- package/src/prover-client/prover-client.ts +3 -2
- package/src/proving_broker/broker_prover_facade.ts +1 -10
- package/src/proving_broker/proving_broker.ts +3 -0
- package/src/proving_broker/proving_job_controller.ts +1 -2
- package/src/test/mock_prover.ts +1 -6
|
@@ -8,9 +8,15 @@ import { L2BlockNew } from '@aztec/stdlib/block';
|
|
|
8
8
|
import { Checkpoint } from '@aztec/stdlib/checkpoint';
|
|
9
9
|
import type { MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server';
|
|
10
10
|
import { computeCheckpointOutHash, computeInHashFromL1ToL2Messages } from '@aztec/stdlib/messaging';
|
|
11
|
-
import {
|
|
11
|
+
import { CheckpointHeader, computeBlockHeadersHash } from '@aztec/stdlib/rollup';
|
|
12
12
|
import { AppendOnlyTreeSnapshot, MerkleTreeId } from '@aztec/stdlib/trees';
|
|
13
|
-
import {
|
|
13
|
+
import {
|
|
14
|
+
type CheckpointGlobalVariables,
|
|
15
|
+
ContentCommitment,
|
|
16
|
+
type GlobalVariables,
|
|
17
|
+
type ProcessedTx,
|
|
18
|
+
StateReference,
|
|
19
|
+
} from '@aztec/stdlib/tx';
|
|
14
20
|
|
|
15
21
|
import {
|
|
16
22
|
buildHeaderAndBodyFromTxs,
|
|
@@ -21,29 +27,30 @@ import {
|
|
|
21
27
|
/**
|
|
22
28
|
* Builds a checkpoint and its header and the blocks in it from a set of processed tx without running any circuits.
|
|
23
29
|
*
|
|
24
|
-
* It updates the l1-to-l2 message tree when starting a new checkpoint,
|
|
25
|
-
*
|
|
30
|
+
* It updates the l1-to-l2 message tree when starting a new checkpoint, and then updates the archive tree when each block is added.
|
|
31
|
+
* Finally completes the checkpoint by computing its header.
|
|
26
32
|
*/
|
|
27
33
|
export class LightweightCheckpointBuilder {
|
|
28
34
|
private readonly logger = createLogger('lightweight-checkpoint-builder');
|
|
35
|
+
|
|
29
36
|
private lastArchives: AppendOnlyTreeSnapshot[] = [];
|
|
30
37
|
private spongeBlob: SpongeBlob;
|
|
31
38
|
private blocks: L2BlockNew[] = [];
|
|
32
39
|
private blobFields: Fr[] = [];
|
|
33
40
|
|
|
34
41
|
constructor(
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
42
|
+
public readonly checkpointNumber: CheckpointNumber,
|
|
43
|
+
public readonly constants: CheckpointGlobalVariables,
|
|
44
|
+
public readonly l1ToL2Messages: Fr[],
|
|
45
|
+
public readonly db: MerkleTreeWriteOperations,
|
|
39
46
|
) {
|
|
40
47
|
this.spongeBlob = SpongeBlob.init();
|
|
41
|
-
this.logger.debug('Starting new checkpoint', { constants
|
|
48
|
+
this.logger.debug('Starting new checkpoint', { constants, l1ToL2Messages });
|
|
42
49
|
}
|
|
43
50
|
|
|
44
51
|
static async startNewCheckpoint(
|
|
45
52
|
checkpointNumber: CheckpointNumber,
|
|
46
|
-
constants:
|
|
53
|
+
constants: CheckpointGlobalVariables,
|
|
47
54
|
l1ToL2Messages: Fr[],
|
|
48
55
|
db: MerkleTreeWriteOperations,
|
|
49
56
|
): Promise<LightweightCheckpointBuilder> {
|
|
@@ -56,16 +63,46 @@ export class LightweightCheckpointBuilder {
|
|
|
56
63
|
return new LightweightCheckpointBuilder(checkpointNumber, constants, l1ToL2Messages, db);
|
|
57
64
|
}
|
|
58
65
|
|
|
59
|
-
|
|
66
|
+
/**
|
|
67
|
+
* Adds a new block to the checkpoint. The tx effects must have already been inserted into the db if
|
|
68
|
+
* this is called after tx processing, if that's not the case, then set `insertTxsEffects` to true.
|
|
69
|
+
*/
|
|
70
|
+
public async addBlock(
|
|
71
|
+
globalVariables: GlobalVariables,
|
|
72
|
+
txs: ProcessedTx[],
|
|
73
|
+
opts: { insertTxsEffects?: boolean; expectedEndState?: StateReference } = {},
|
|
74
|
+
): Promise<L2BlockNew> {
|
|
60
75
|
const isFirstBlock = this.blocks.length === 0;
|
|
76
|
+
|
|
77
|
+
// Empty blocks are only allowed as the first block in a checkpoint
|
|
78
|
+
if (!isFirstBlock && txs.length === 0) {
|
|
79
|
+
throw new Error('Cannot add empty block that is not the first block in the checkpoint.');
|
|
80
|
+
}
|
|
81
|
+
|
|
61
82
|
if (isFirstBlock) {
|
|
62
83
|
this.lastArchives.push(await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db));
|
|
63
84
|
}
|
|
64
85
|
|
|
65
86
|
const lastArchive = this.lastArchives.at(-1)!;
|
|
66
87
|
|
|
67
|
-
|
|
68
|
-
|
|
88
|
+
if (opts.insertTxsEffects) {
|
|
89
|
+
this.logger.debug(
|
|
90
|
+
`Inserting side effects for ${txs.length} txs for block ${globalVariables.blockNumber} into db`,
|
|
91
|
+
{ txs: txs.map(tx => tx.hash.toString()) },
|
|
92
|
+
);
|
|
93
|
+
for (const tx of txs) {
|
|
94
|
+
await insertSideEffects(tx, this.db);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const endState = await this.db.getStateReference();
|
|
99
|
+
if (opts.expectedEndState && !endState.equals(opts.expectedEndState)) {
|
|
100
|
+
this.logger.error('End state after processing txs does not match expected end state', {
|
|
101
|
+
globalVariables: globalVariables.toInspect(),
|
|
102
|
+
expectedEndState: opts.expectedEndState.toInspect(),
|
|
103
|
+
actualEndState: endState.toInspect(),
|
|
104
|
+
});
|
|
105
|
+
throw new Error(`End state does not match expected end state when building block ${globalVariables.blockNumber}`);
|
|
69
106
|
}
|
|
70
107
|
|
|
71
108
|
const { header, body, blockBlobFields } = await buildHeaderAndBodyFromTxs(
|
|
@@ -77,6 +114,8 @@ export class LightweightCheckpointBuilder {
|
|
|
77
114
|
isFirstBlock,
|
|
78
115
|
);
|
|
79
116
|
|
|
117
|
+
header.state.validate();
|
|
118
|
+
|
|
80
119
|
await this.db.updateArchive(header);
|
|
81
120
|
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db);
|
|
82
121
|
this.lastArchives.push(newArchive);
|
|
@@ -101,7 +140,7 @@ export class LightweightCheckpointBuilder {
|
|
|
101
140
|
|
|
102
141
|
async completeCheckpoint(): Promise<Checkpoint> {
|
|
103
142
|
if (!this.blocks.length) {
|
|
104
|
-
throw new Error('
|
|
143
|
+
throw new Error('Cannot complete a checkpoint with no blocks');
|
|
105
144
|
}
|
|
106
145
|
|
|
107
146
|
const numBlobFields = this.blobFields.length + 1; // +1 for the checkpoint end marker.
|
|
@@ -120,8 +159,9 @@ export class LightweightCheckpointBuilder {
|
|
|
120
159
|
|
|
121
160
|
const outHash = computeCheckpointOutHash(blocks.map(block => block.body.txEffects.map(tx => tx.l2ToL1Msgs)));
|
|
122
161
|
|
|
123
|
-
const
|
|
162
|
+
const { slotNumber, coinbase, feeRecipient, gasFees } = this.constants;
|
|
124
163
|
|
|
164
|
+
// TODO(palla/mbps): Should we source this from the constants instead?
|
|
125
165
|
// timestamp of a checkpoint is the timestamp of the last block in the checkpoint.
|
|
126
166
|
const timestamp = blocks[blocks.length - 1].timestamp;
|
|
127
167
|
|
|
@@ -129,16 +169,30 @@ export class LightweightCheckpointBuilder {
|
|
|
129
169
|
|
|
130
170
|
const header = CheckpointHeader.from({
|
|
131
171
|
lastArchiveRoot: this.lastArchives[0].root,
|
|
132
|
-
blockHeadersHash,
|
|
133
172
|
contentCommitment: new ContentCommitment(blobsHash, inHash, outHash),
|
|
134
|
-
|
|
173
|
+
blockHeadersHash,
|
|
174
|
+
slotNumber,
|
|
135
175
|
timestamp,
|
|
136
|
-
coinbase
|
|
137
|
-
feeRecipient
|
|
138
|
-
gasFees
|
|
176
|
+
coinbase,
|
|
177
|
+
feeRecipient,
|
|
178
|
+
gasFees,
|
|
139
179
|
totalManaUsed,
|
|
140
180
|
});
|
|
141
181
|
|
|
142
182
|
return new Checkpoint(newArchive, header, blocks, this.checkpointNumber);
|
|
143
183
|
}
|
|
184
|
+
|
|
185
|
+
clone() {
|
|
186
|
+
const clone = new LightweightCheckpointBuilder(
|
|
187
|
+
this.checkpointNumber,
|
|
188
|
+
this.constants,
|
|
189
|
+
[...this.l1ToL2Messages],
|
|
190
|
+
this.db,
|
|
191
|
+
);
|
|
192
|
+
clone.lastArchives = [...this.lastArchives];
|
|
193
|
+
clone.spongeBlob = this.spongeBlob.clone();
|
|
194
|
+
clone.blocks = [...this.blocks];
|
|
195
|
+
clone.blobFields = [...this.blobFields];
|
|
196
|
+
return clone;
|
|
197
|
+
}
|
|
144
198
|
}
|
|
@@ -45,6 +45,7 @@ export class TestContext {
|
|
|
45
45
|
private headers: Map<number, BlockHeader> = new Map();
|
|
46
46
|
private checkpoints: Checkpoint[] = [];
|
|
47
47
|
private nextCheckpointIndex = 0;
|
|
48
|
+
private nextCheckpointNumber = CheckpointNumber(1);
|
|
48
49
|
private nextBlockNumber = 1;
|
|
49
50
|
private epochNumber = 1;
|
|
50
51
|
private feePayerBalance: Fr;
|
|
@@ -187,7 +188,8 @@ export class TestContext {
|
|
|
187
188
|
}
|
|
188
189
|
|
|
189
190
|
const checkpointIndex = this.nextCheckpointIndex++;
|
|
190
|
-
const checkpointNumber =
|
|
191
|
+
const checkpointNumber = this.nextCheckpointNumber;
|
|
192
|
+
this.nextCheckpointNumber++;
|
|
191
193
|
const slotNumber = checkpointNumber * 15; // times an arbitrary number to make it different to the checkpoint number
|
|
192
194
|
|
|
193
195
|
const constants = makeCheckpointConstants(slotNumber, constantOpts);
|
|
@@ -204,6 +206,8 @@ export class TestContext {
|
|
|
204
206
|
|
|
205
207
|
const startBlockNumber = this.nextBlockNumber;
|
|
206
208
|
const previousBlockHeader = this.getBlockHeader(BlockNumber(startBlockNumber - 1));
|
|
209
|
+
// All blocks in the same slot/checkpoint share the same timestamp.
|
|
210
|
+
const timestamp = BigInt(slotNumber * 26);
|
|
207
211
|
|
|
208
212
|
// Build global variables.
|
|
209
213
|
const blockGlobalVariables = times(numBlocks, i =>
|
|
@@ -211,6 +215,7 @@ export class TestContext {
|
|
|
211
215
|
coinbase: constants.coinbase,
|
|
212
216
|
feeRecipient: constants.feeRecipient,
|
|
213
217
|
gasFees: constants.gasFees,
|
|
218
|
+
timestamp,
|
|
214
219
|
}),
|
|
215
220
|
);
|
|
216
221
|
this.nextBlockNumber += numBlocks;
|
|
@@ -253,7 +258,10 @@ export class TestContext {
|
|
|
253
258
|
const txs = blockTxs[i];
|
|
254
259
|
const state = blockEndStates[i];
|
|
255
260
|
|
|
256
|
-
const block = await builder.addBlock(blockGlobalVariables[i],
|
|
261
|
+
const block = await builder.addBlock(blockGlobalVariables[i], txs, {
|
|
262
|
+
expectedEndState: state,
|
|
263
|
+
insertTxsEffects: true,
|
|
264
|
+
});
|
|
257
265
|
|
|
258
266
|
const header = block.header;
|
|
259
267
|
this.headers.set(block.number, header);
|
|
@@ -282,7 +282,7 @@ export const buildHeaderFromCircuitOutputs = runInSpan(
|
|
|
282
282
|
chainId: constants.chainId,
|
|
283
283
|
version: constants.version,
|
|
284
284
|
blockNumber: BlockNumber(blockRootRollupOutput.previousArchive.nextAvailableLeafIndex),
|
|
285
|
-
timestamp: blockRootRollupOutput.
|
|
285
|
+
timestamp: blockRootRollupOutput.timestamp,
|
|
286
286
|
slotNumber: constants.slotNumber,
|
|
287
287
|
coinbase: constants.coinbase,
|
|
288
288
|
feeRecipient: constants.feeRecipient,
|
|
@@ -16,13 +16,13 @@ import { assertLength } from '@aztec/foundation/serialize';
|
|
|
16
16
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
17
17
|
import { elapsed } from '@aztec/foundation/timer';
|
|
18
18
|
import type { TreeNodeLocation } from '@aztec/foundation/trees';
|
|
19
|
-
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
20
19
|
import { EthAddress } from '@aztec/stdlib/block';
|
|
21
20
|
import type {
|
|
22
21
|
EpochProver,
|
|
23
22
|
ForkMerkleTreeOperations,
|
|
24
23
|
MerkleTreeWriteOperations,
|
|
25
24
|
PublicInputsAndRecursiveProof,
|
|
25
|
+
ReadonlyWorldStateAccess,
|
|
26
26
|
ServerCircuitProver,
|
|
27
27
|
} from '@aztec/stdlib/interfaces/server';
|
|
28
28
|
import type { Proof } from '@aztec/stdlib/proofs';
|
|
@@ -97,7 +97,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
97
97
|
private dbs: Map<BlockNumber, MerkleTreeWriteOperations> = new Map();
|
|
98
98
|
|
|
99
99
|
constructor(
|
|
100
|
-
private dbProvider: ForkMerkleTreeOperations,
|
|
100
|
+
private dbProvider: ReadonlyWorldStateAccess & ForkMerkleTreeOperations,
|
|
101
101
|
private prover: ServerCircuitProver,
|
|
102
102
|
private readonly proverId: EthAddress,
|
|
103
103
|
telemetryClient: TelemetryClient = getTelemetryClient(),
|
|
@@ -310,7 +310,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
310
310
|
|
|
311
311
|
validateTx(tx);
|
|
312
312
|
|
|
313
|
-
logger.
|
|
313
|
+
logger.debug(`Received transaction: ${tx.hash}`);
|
|
314
314
|
|
|
315
315
|
const startSpongeBlob = spongeBlobState.clone();
|
|
316
316
|
const [hints, treeSnapshots] = await this.prepareBaseRollupInputs(
|
|
@@ -1207,8 +1207,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1207
1207
|
|
|
1208
1208
|
const txProvingState = provingState.getTxProvingState(txIndex);
|
|
1209
1209
|
|
|
1210
|
-
// This function tries to do AVM proving. If there is a failure, it fakes the proof unless AVM_PROVING_STRICT is defined.
|
|
1211
|
-
// Nothing downstream depends on the AVM proof yet. So having this mode lets us incrementally build the AVM circuit.
|
|
1212
1210
|
const doAvmProving = wrapCallbackInSpan(
|
|
1213
1211
|
this.tracer,
|
|
1214
1212
|
'ProvingOrchestrator.prover.getAvmProof',
|
|
@@ -1217,30 +1215,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1217
1215
|
},
|
|
1218
1216
|
async (signal: AbortSignal) => {
|
|
1219
1217
|
const inputs = txProvingState.getAvmInputs();
|
|
1220
|
-
|
|
1221
|
-
// TODO(#14234)[Unconditional PIs validation]: Remove the whole try-catch logic and
|
|
1222
|
-
// just keep the next line but removing the second argument (false).
|
|
1223
|
-
return await this.prover.getAvmProof(inputs, false, signal, provingState.epochNumber);
|
|
1224
|
-
} catch (err) {
|
|
1225
|
-
if (process.env.AVM_PROVING_STRICT) {
|
|
1226
|
-
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
1227
|
-
throw err;
|
|
1228
|
-
} else {
|
|
1229
|
-
logger.warn(
|
|
1230
|
-
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Use snapshotted
|
|
1231
|
-
AVM inputs and carrying on. ${inspect(err)}.`,
|
|
1232
|
-
);
|
|
1233
|
-
|
|
1234
|
-
try {
|
|
1235
|
-
this.metrics.incAvmFallback();
|
|
1236
|
-
const snapshotAvmPrivateInputs = readAvmMinimalPublicTxInputsFromFile();
|
|
1237
|
-
return await this.prover.getAvmProof(snapshotAvmPrivateInputs, true, signal, provingState.epochNumber);
|
|
1238
|
-
} catch (err) {
|
|
1239
|
-
logger.error(`Error thrown when proving snapshotted AVM inputs.`, err);
|
|
1240
|
-
throw err;
|
|
1241
|
-
}
|
|
1242
|
-
}
|
|
1243
|
-
}
|
|
1218
|
+
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1244
1219
|
},
|
|
1245
1220
|
);
|
|
1246
1221
|
|
|
@@ -1,17 +1,9 @@
|
|
|
1
|
-
import {
|
|
2
|
-
type Histogram,
|
|
3
|
-
Metrics,
|
|
4
|
-
type TelemetryClient,
|
|
5
|
-
type Tracer,
|
|
6
|
-
type UpDownCounter,
|
|
7
|
-
ValueType,
|
|
8
|
-
} from '@aztec/telemetry-client';
|
|
1
|
+
import { type Histogram, Metrics, type TelemetryClient, type Tracer, ValueType } from '@aztec/telemetry-client';
|
|
9
2
|
|
|
10
3
|
export class ProvingOrchestratorMetrics {
|
|
11
4
|
public readonly tracer: Tracer;
|
|
12
5
|
|
|
13
6
|
private baseRollupInputsDuration: Histogram;
|
|
14
|
-
private avmFallbackCount: UpDownCounter;
|
|
15
7
|
|
|
16
8
|
constructor(client: TelemetryClient, name = 'ProvingOrchestrator') {
|
|
17
9
|
this.tracer = client.getTracer(name);
|
|
@@ -22,20 +14,9 @@ export class ProvingOrchestratorMetrics {
|
|
|
22
14
|
description: 'Duration to build base rollup inputs',
|
|
23
15
|
valueType: ValueType.INT,
|
|
24
16
|
});
|
|
25
|
-
|
|
26
|
-
this.avmFallbackCount = meter.createUpDownCounter(Metrics.PROVING_ORCHESTRATOR_AVM_FALLBACK_COUNT, {
|
|
27
|
-
description: 'How many times the AVM fallback was used',
|
|
28
|
-
valueType: ValueType.INT,
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
this.avmFallbackCount.add(0);
|
|
32
17
|
}
|
|
33
18
|
|
|
34
19
|
recordBaseRollupInputs(durationMs: number) {
|
|
35
20
|
this.baseRollupInputsDuration.record(Math.ceil(durationMs));
|
|
36
21
|
}
|
|
37
|
-
|
|
38
|
-
incAvmFallback() {
|
|
39
|
-
this.avmFallbackCount.add(1);
|
|
40
|
-
}
|
|
41
22
|
}
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type {
|
|
2
|
+
ForkMerkleTreeOperations,
|
|
3
|
+
ProvingJobBroker,
|
|
4
|
+
ReadonlyWorldStateAccess,
|
|
5
|
+
} from '@aztec/stdlib/interfaces/server';
|
|
2
6
|
import { type TelemetryClient, getTelemetryClient } from '@aztec/telemetry-client';
|
|
3
7
|
|
|
4
8
|
import type { ProverClientConfig } from '../config.js';
|
|
@@ -6,7 +10,7 @@ import { ProverClient } from './prover-client.js';
|
|
|
6
10
|
|
|
7
11
|
export function createProverClient(
|
|
8
12
|
config: ProverClientConfig,
|
|
9
|
-
worldState: ForkMerkleTreeOperations,
|
|
13
|
+
worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
10
14
|
broker: ProvingJobBroker,
|
|
11
15
|
telemetry: TelemetryClient = getTelemetryClient(),
|
|
12
16
|
) {
|
|
@@ -11,6 +11,7 @@ import {
|
|
|
11
11
|
type ProvingJobBroker,
|
|
12
12
|
type ProvingJobConsumer,
|
|
13
13
|
type ProvingJobProducer,
|
|
14
|
+
type ReadonlyWorldStateAccess,
|
|
14
15
|
type ServerCircuitProver,
|
|
15
16
|
tryStop,
|
|
16
17
|
} from '@aztec/stdlib/interfaces/server';
|
|
@@ -33,7 +34,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
33
34
|
|
|
34
35
|
private constructor(
|
|
35
36
|
private config: ProverClientConfig,
|
|
36
|
-
private worldState: ForkMerkleTreeOperations,
|
|
37
|
+
private worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
37
38
|
private orchestratorClient: ProvingJobProducer,
|
|
38
39
|
private agentClient?: ProvingJobConsumer,
|
|
39
40
|
private telemetry: TelemetryClient = getTelemetryClient(),
|
|
@@ -99,7 +100,7 @@ export class ProverClient implements EpochProverManager {
|
|
|
99
100
|
*/
|
|
100
101
|
public static async new(
|
|
101
102
|
config: ProverClientConfig,
|
|
102
|
-
worldState: ForkMerkleTreeOperations,
|
|
103
|
+
worldState: ForkMerkleTreeOperations & ReadonlyWorldStateAccess,
|
|
103
104
|
broker: ProvingJobBroker,
|
|
104
105
|
telemetry: TelemetryClient = getTelemetryClient(),
|
|
105
106
|
) {
|
|
@@ -6,7 +6,6 @@ import type {
|
|
|
6
6
|
} from '@aztec/constants';
|
|
7
7
|
import { EpochNumber } from '@aztec/foundation/branded-types';
|
|
8
8
|
import { sha256 } from '@aztec/foundation/crypto/sha256';
|
|
9
|
-
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
10
9
|
import { createLogger } from '@aztec/foundation/log';
|
|
11
10
|
import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
12
11
|
import { truncate } from '@aztec/foundation/string';
|
|
@@ -398,24 +397,16 @@ export class BrokerCircuitProverFacade implements ServerCircuitProver {
|
|
|
398
397
|
|
|
399
398
|
getAvmProof(
|
|
400
399
|
inputs: AvmCircuitInputs,
|
|
401
|
-
skipPublicInputsValidation?: boolean, // TODO(#14234)[Unconditional PIs validation]: remove this argument
|
|
402
400
|
signal?: AbortSignal,
|
|
403
401
|
epochNumber?: EpochNumber,
|
|
404
402
|
): Promise<ProofAndVerificationKey<typeof AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED>> {
|
|
405
|
-
this.log.info(`getAvmProof() called with skipPublicInputsValidation: ${skipPublicInputsValidation}`);
|
|
406
|
-
|
|
407
403
|
return this.enqueueJob(
|
|
408
404
|
this.generateId(ProvingRequestType.PUBLIC_VM, inputs, epochNumber),
|
|
409
405
|
ProvingRequestType.PUBLIC_VM,
|
|
410
406
|
inputs,
|
|
411
407
|
epochNumber,
|
|
412
408
|
signal,
|
|
413
|
-
)
|
|
414
|
-
// TODO(#14234)[Unconditional PIs validation]: Remove ".then()".
|
|
415
|
-
// Override the default value of skipPublicInputsValidation potentially set in BBNativeRollupProver.getAvmProof().
|
|
416
|
-
result.proof.proof[0] = skipPublicInputsValidation ? new Fr(1) : new Fr(0);
|
|
417
|
-
return result;
|
|
418
|
-
});
|
|
409
|
+
);
|
|
419
410
|
}
|
|
420
411
|
|
|
421
412
|
getBaseParityProof(
|
|
@@ -302,6 +302,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Tr
|
|
|
302
302
|
this.resultsCache.delete(id);
|
|
303
303
|
this.inProgress.delete(id);
|
|
304
304
|
this.retries.delete(id);
|
|
305
|
+
this.enqueuedAt.delete(id);
|
|
305
306
|
}
|
|
306
307
|
}
|
|
307
308
|
|
|
@@ -354,6 +355,8 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer, Tr
|
|
|
354
355
|
const enqueuedAt = this.enqueuedAt.get(job.id);
|
|
355
356
|
if (enqueuedAt) {
|
|
356
357
|
this.instrumentation.recordJobWait(job.type, enqueuedAt);
|
|
358
|
+
// we can clear this flag now.
|
|
359
|
+
this.enqueuedAt.delete(job.id);
|
|
357
360
|
}
|
|
358
361
|
|
|
359
362
|
return { job, time };
|
|
@@ -125,8 +125,7 @@ export class ProvingJobController {
|
|
|
125
125
|
const signal = this.abortController.signal;
|
|
126
126
|
switch (type) {
|
|
127
127
|
case ProvingRequestType.PUBLIC_VM: {
|
|
128
|
-
|
|
129
|
-
return await this.circuitProver.getAvmProof(inputs, undefined, signal, this.epochNumber);
|
|
128
|
+
return await this.circuitProver.getAvmProof(inputs, signal, this.epochNumber);
|
|
130
129
|
}
|
|
131
130
|
|
|
132
131
|
case ProvingRequestType.PUBLIC_CHONK_VERIFIER: {
|
package/src/test/mock_prover.ts
CHANGED
|
@@ -105,12 +105,7 @@ export class TestBroker implements ProvingJobProducer {
|
|
|
105
105
|
export class MockProver implements ServerCircuitProver {
|
|
106
106
|
constructor() {}
|
|
107
107
|
|
|
108
|
-
getAvmProof(
|
|
109
|
-
_inputs: AvmCircuitInputs,
|
|
110
|
-
_skipPublicInputsValidation?: boolean, // TODO(#14234)[Unconditional PIs validation]: Remove.
|
|
111
|
-
_signal?: AbortSignal,
|
|
112
|
-
_epochNumber?: number,
|
|
113
|
-
) {
|
|
108
|
+
getAvmProof(_inputs: AvmCircuitInputs, _signal?: AbortSignal, _epochNumber?: number) {
|
|
114
109
|
return Promise.resolve(
|
|
115
110
|
makeProofAndVerificationKey(
|
|
116
111
|
makeEmptyRecursiveProof(AVM_V2_PROOF_LENGTH_IN_FIELDS_PADDED),
|