@aztec/prover-client 0.66.0 → 0.67.1-devnet
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block_builder/light.d.ts +4 -3
- package/dest/block_builder/light.d.ts.map +1 -1
- package/dest/block_builder/light.js +30 -20
- package/dest/index.d.ts +0 -1
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -2
- package/dest/mocks/fixtures.d.ts +3 -3
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +2 -2
- package/dest/mocks/test_context.d.ts +10 -9
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +24 -13
- package/dest/orchestrator/block-building-helpers.d.ts +10 -6
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +27 -16
- package/dest/orchestrator/block-proving-state.d.ts +6 -5
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +16 -8
- package/dest/orchestrator/epoch-proving-state.d.ts +1 -1
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +3 -3
- package/dest/orchestrator/orchestrator.d.ts +11 -8
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +94 -58
- package/dest/orchestrator/orchestrator_metrics.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator_metrics.js +2 -5
- package/dest/prover-agent/memory-proving-queue.d.ts +2 -1
- package/dest/prover-agent/memory-proving-queue.d.ts.map +1 -1
- package/dest/prover-agent/memory-proving-queue.js +241 -224
- package/dest/prover-agent/prover-agent.d.ts +11 -2
- package/dest/prover-agent/prover-agent.d.ts.map +1 -1
- package/dest/prover-agent/prover-agent.js +187 -160
- package/dest/prover-client/prover-client.d.ts +2 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +6 -9
- package/dest/proving_broker/broker_prover_facade.d.ts +26 -0
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -0
- package/dest/proving_broker/broker_prover_facade.js +107 -0
- package/dest/proving_broker/proving_agent.d.ts +4 -3
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +74 -65
- package/dest/proving_broker/proving_broker.d.ts +27 -7
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +405 -258
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +4 -8
- package/dest/proving_broker/proving_broker_instrumentation.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_instrumentation.js +2 -8
- package/dest/proving_broker/proving_job_controller.d.ts +2 -1
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +15 -14
- package/dest/proving_broker/rpc.js +2 -2
- package/dest/test/mock_prover.d.ts +6 -6
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +5 -5
- package/package.json +18 -13
- package/src/block_builder/light.ts +31 -22
- package/src/index.ts +0 -1
- package/src/mocks/fixtures.ts +4 -4
- package/src/mocks/test_context.ts +39 -24
- package/src/orchestrator/block-building-helpers.ts +33 -20
- package/src/orchestrator/block-proving-state.ts +17 -6
- package/src/orchestrator/epoch-proving-state.ts +0 -2
- package/src/orchestrator/orchestrator.ts +111 -62
- package/src/orchestrator/orchestrator_metrics.ts +1 -11
- package/src/prover-agent/memory-proving-queue.ts +12 -7
- package/src/prover-agent/prover-agent.ts +67 -48
- package/src/prover-client/prover-client.ts +5 -12
- package/src/proving_broker/{caching_broker_facade.ts → broker_prover_facade.ts} +62 -85
- package/src/proving_broker/proving_agent.ts +74 -78
- package/src/proving_broker/proving_broker.ts +240 -73
- package/src/proving_broker/proving_broker_database/persisted.ts +2 -8
- package/src/proving_broker/proving_broker_instrumentation.ts +0 -7
- package/src/proving_broker/proving_job_controller.ts +13 -12
- package/src/proving_broker/rpc.ts +1 -1
- package/src/test/mock_prover.ts +7 -3
- package/dest/proving_broker/caching_broker_facade.d.ts +0 -30
- package/dest/proving_broker/caching_broker_facade.d.ts.map +0 -1
- package/dest/proving_broker/caching_broker_facade.js +0 -150
- package/dest/proving_broker/prover_cache/memory.d.ts +0 -9
- package/dest/proving_broker/prover_cache/memory.d.ts.map +0 -1
- package/dest/proving_broker/prover_cache/memory.js +0 -16
- package/src/proving_broker/prover_cache/memory.ts +0 -20
|
@@ -4,6 +4,7 @@ import {
|
|
|
4
4
|
type ProcessedTx,
|
|
5
5
|
type ServerCircuitProver,
|
|
6
6
|
makeEmptyProcessedTx,
|
|
7
|
+
toNumBlobFields,
|
|
7
8
|
} from '@aztec/circuit-types';
|
|
8
9
|
import {
|
|
9
10
|
type EpochProver,
|
|
@@ -16,15 +17,18 @@ import {
|
|
|
16
17
|
AVM_PROOF_LENGTH_IN_FIELDS,
|
|
17
18
|
AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS,
|
|
18
19
|
type AppendOnlyTreeSnapshot,
|
|
20
|
+
BLOBS_PER_BLOCK,
|
|
19
21
|
type BaseOrMergeRollupPublicInputs,
|
|
20
22
|
BaseParityInputs,
|
|
21
23
|
type BaseRollupHints,
|
|
24
|
+
BlobPublicInputs,
|
|
25
|
+
type BlockHeader,
|
|
22
26
|
type BlockRootOrBlockMergePublicInputs,
|
|
23
27
|
BlockRootRollupInputs,
|
|
24
28
|
EmptyBlockRootRollupInputs,
|
|
29
|
+
FIELDS_PER_BLOB,
|
|
25
30
|
Fr,
|
|
26
31
|
type GlobalVariables,
|
|
27
|
-
type Header,
|
|
28
32
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
29
33
|
L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
30
34
|
type NESTED_RECURSIVE_PROOF_LENGTH,
|
|
@@ -40,9 +44,11 @@ import {
|
|
|
40
44
|
makeEmptyRecursiveProof,
|
|
41
45
|
} from '@aztec/circuits.js';
|
|
42
46
|
import { makeTuple } from '@aztec/foundation/array';
|
|
47
|
+
import { Blob } from '@aztec/foundation/blob';
|
|
43
48
|
import { maxBy, padArrayEnd } from '@aztec/foundation/collection';
|
|
49
|
+
import { sha256ToField } from '@aztec/foundation/crypto';
|
|
44
50
|
import { AbortError } from '@aztec/foundation/error';
|
|
45
|
-
import {
|
|
51
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
46
52
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
47
53
|
import { type Tuple } from '@aztec/foundation/serialize';
|
|
48
54
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
@@ -77,7 +83,7 @@ import {
|
|
|
77
83
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
78
84
|
import { TxProvingState } from './tx-proving-state.js';
|
|
79
85
|
|
|
80
|
-
const logger =
|
|
86
|
+
const logger = createLogger('prover-client:orchestrator');
|
|
81
87
|
|
|
82
88
|
/**
|
|
83
89
|
* Implements an event driven proving scheduler to build the recursive proof tree. The idea being:
|
|
@@ -139,17 +145,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
139
145
|
|
|
140
146
|
/**
|
|
141
147
|
* Starts off a new block
|
|
142
|
-
* @param numTxs - The total number of transactions in the block.
|
|
143
148
|
* @param globalVariables - The global variables for the block
|
|
144
149
|
* @param l1ToL2Messages - The l1 to l2 messages for the block
|
|
145
|
-
* @param verificationKeys - The private kernel verification keys
|
|
146
150
|
* @returns A proving ticket, containing a promise notifying of proving completion
|
|
147
151
|
*/
|
|
148
|
-
@trackSpan('ProvingOrchestrator.startNewBlock',
|
|
149
|
-
[Attributes.BLOCK_SIZE]: numTxs,
|
|
152
|
+
@trackSpan('ProvingOrchestrator.startNewBlock', globalVariables => ({
|
|
150
153
|
[Attributes.BLOCK_NUMBER]: globalVariables.blockNumber.toNumber(),
|
|
151
154
|
}))
|
|
152
|
-
public async startNewBlock(
|
|
155
|
+
public async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]) {
|
|
153
156
|
if (!this.provingState) {
|
|
154
157
|
throw new Error(`Invalid proving state, call startNewEpoch before starting a block`);
|
|
155
158
|
}
|
|
@@ -158,12 +161,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
158
161
|
throw new Error(`Epoch not accepting further blocks`);
|
|
159
162
|
}
|
|
160
163
|
|
|
161
|
-
if (!Number.isInteger(numTxs) || numTxs < 2) {
|
|
162
|
-
throw new Error(`Invalid number of txs for block (got ${numTxs})`);
|
|
163
|
-
}
|
|
164
|
-
|
|
165
164
|
logger.info(
|
|
166
|
-
`Starting block ${globalVariables.blockNumber.toNumber()} for slot ${globalVariables.slotNumber.toNumber()}
|
|
165
|
+
`Starting block ${globalVariables.blockNumber.toNumber()} for slot ${globalVariables.slotNumber.toNumber()}`,
|
|
167
166
|
);
|
|
168
167
|
|
|
169
168
|
// Fork world state at the end of the immediately previous block
|
|
@@ -210,7 +209,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
210
209
|
);
|
|
211
210
|
|
|
212
211
|
const blockProvingState = this.provingState!.startNewBlock(
|
|
213
|
-
numTxs,
|
|
214
212
|
globalVariables,
|
|
215
213
|
l1ToL2MessagesPadded,
|
|
216
214
|
messageTreeSnapshot,
|
|
@@ -228,47 +226,56 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
228
226
|
}
|
|
229
227
|
|
|
230
228
|
/**
|
|
231
|
-
* The interface to add
|
|
232
|
-
* @param
|
|
229
|
+
* The interface to add simulated transactions to the scheduler
|
|
230
|
+
* @param txs - The transactions to be proven
|
|
233
231
|
*/
|
|
234
|
-
@trackSpan('ProvingOrchestrator.
|
|
235
|
-
[Attributes.
|
|
232
|
+
@trackSpan('ProvingOrchestrator.addTxs', txs => ({
|
|
233
|
+
[Attributes.BLOCK_TXS_COUNT]: txs.length,
|
|
236
234
|
}))
|
|
237
|
-
public async
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
}
|
|
235
|
+
public async addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
236
|
+
if (!txs.length) {
|
|
237
|
+
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
238
|
+
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
239
|
+
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
const blockNumber = txs[0].constants.globalVariables.blockNumber.toNumber();
|
|
243
|
+
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
244
|
+
if (!provingState) {
|
|
245
|
+
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
246
|
+
}
|
|
248
247
|
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
}
|
|
248
|
+
const numBlobFields = toNumBlobFields(txs);
|
|
249
|
+
provingState.startNewBlock(Math.max(2, txs.length), numBlobFields);
|
|
252
250
|
|
|
253
|
-
|
|
251
|
+
logger.info(
|
|
252
|
+
`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState?.blockNumber}`,
|
|
253
|
+
);
|
|
254
|
+
for (const tx of txs) {
|
|
255
|
+
try {
|
|
256
|
+
if (!provingState.verifyState()) {
|
|
257
|
+
throw new Error(`Invalid proving state when adding a tx`);
|
|
258
|
+
}
|
|
254
259
|
|
|
255
|
-
|
|
260
|
+
validateTx(tx);
|
|
256
261
|
|
|
257
|
-
|
|
258
|
-
logger.warn(`Ignoring empty transaction ${tx.hash} - it will not be added to this block`);
|
|
259
|
-
return;
|
|
260
|
-
}
|
|
262
|
+
logger.info(`Received transaction: ${tx.hash}`);
|
|
261
263
|
|
|
262
|
-
|
|
263
|
-
|
|
264
|
+
if (tx.isEmpty) {
|
|
265
|
+
logger.warn(`Ignoring empty transaction ${tx.hash} - it will not be added to this block`);
|
|
266
|
+
continue;
|
|
267
|
+
}
|
|
264
268
|
|
|
265
|
-
|
|
266
|
-
|
|
269
|
+
const [hints, treeSnapshots] = await this.prepareTransaction(tx, provingState);
|
|
270
|
+
this.enqueueFirstProofs(hints, treeSnapshots, tx, provingState);
|
|
271
|
+
} catch (err: any) {
|
|
272
|
+
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
273
|
+
cause: err,
|
|
274
|
+
});
|
|
267
275
|
}
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
});
|
|
276
|
+
}
|
|
277
|
+
if (provingState.transactionsReceived === provingState.totalNumTxs) {
|
|
278
|
+
logger.verbose(`All transactions received for block ${provingState.globalVariables.blockNumber}.`);
|
|
272
279
|
}
|
|
273
280
|
}
|
|
274
281
|
|
|
@@ -279,12 +286,18 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
279
286
|
@trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: number) => ({
|
|
280
287
|
[Attributes.BLOCK_NUMBER]: blockNumber,
|
|
281
288
|
}))
|
|
282
|
-
public async setBlockCompleted(blockNumber: number, expectedHeader?:
|
|
289
|
+
public async setBlockCompleted(blockNumber: number, expectedHeader?: BlockHeader): Promise<L2Block> {
|
|
283
290
|
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber);
|
|
284
291
|
if (!provingState) {
|
|
285
292
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
286
293
|
}
|
|
287
294
|
|
|
295
|
+
if (!provingState.spongeBlobState) {
|
|
296
|
+
// If we are completing an empty block, initialise the provingState.
|
|
297
|
+
// We will have 2 padding txs, and => no blob fields.
|
|
298
|
+
provingState.startNewBlock(2, 0);
|
|
299
|
+
}
|
|
300
|
+
|
|
288
301
|
if (!provingState.verifyState()) {
|
|
289
302
|
throw new Error(`Block proving failed: ${provingState.error}`);
|
|
290
303
|
}
|
|
@@ -326,7 +339,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
326
339
|
}
|
|
327
340
|
|
|
328
341
|
// And build the block header
|
|
329
|
-
logger.verbose(`Block ${
|
|
342
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
330
343
|
await this.buildBlock(provingState, expectedHeader);
|
|
331
344
|
|
|
332
345
|
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
@@ -407,7 +420,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
407
420
|
return Promise.resolve();
|
|
408
421
|
}
|
|
409
422
|
|
|
410
|
-
private async buildBlock(provingState: BlockProvingState, expectedHeader?:
|
|
423
|
+
private async buildBlock(provingState: BlockProvingState, expectedHeader?: BlockHeader) {
|
|
411
424
|
// Collect all new nullifiers, commitments, and contracts from all txs in this block to build body
|
|
412
425
|
const txs = provingState!.allTxs.map(a => a.processedTx);
|
|
413
426
|
|
|
@@ -435,14 +448,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
435
448
|
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
436
449
|
const l2Block = new L2Block(newArchive, header, body);
|
|
437
450
|
|
|
438
|
-
if (!l2Block.body.getTxsEffectsHash().equals(header.contentCommitment.txsEffectsHash)) {
|
|
439
|
-
throw new Error(
|
|
440
|
-
`Txs effects hash mismatch, ${l2Block.body
|
|
441
|
-
.getTxsEffectsHash()
|
|
442
|
-
.toString('hex')} == ${header.contentCommitment.txsEffectsHash.toString('hex')} `,
|
|
443
|
-
);
|
|
444
|
-
}
|
|
445
|
-
|
|
446
451
|
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
447
452
|
|
|
448
453
|
logger.verbose(`Orchestrator finalised block ${l2Block.number}`);
|
|
@@ -566,6 +571,22 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
566
571
|
);
|
|
567
572
|
}
|
|
568
573
|
|
|
574
|
+
/**
|
|
575
|
+
* Collect all new nullifiers, commitments, and contracts from all txs in a block
|
|
576
|
+
* @returns The array of non empty tx effects.
|
|
577
|
+
*/
|
|
578
|
+
private extractTxEffects(provingState: BlockProvingState) {
|
|
579
|
+
// Note: this check should ensure that we have all txs and their effects ready.
|
|
580
|
+
if (!provingState.finalRootParityInput?.publicInputs.shaRoot) {
|
|
581
|
+
throw new Error(`Invalid proving state, a block must be ready to be proven before its effects can be extracted.`);
|
|
582
|
+
}
|
|
583
|
+
const nonEmptyTxEffects = provingState.allTxs
|
|
584
|
+
.map(txProvingState => txProvingState.processedTx.txEffect)
|
|
585
|
+
.filter(txEffect => !txEffect.isEmpty());
|
|
586
|
+
|
|
587
|
+
return nonEmptyTxEffects;
|
|
588
|
+
}
|
|
589
|
+
|
|
569
590
|
/**
|
|
570
591
|
* Returns the proof for the current epoch.
|
|
571
592
|
*/
|
|
@@ -692,7 +713,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
692
713
|
provingState: BlockProvingState | undefined,
|
|
693
714
|
tx: ProcessedTx,
|
|
694
715
|
): Promise<[BaseRollupHints, TreeSnapshots] | undefined> {
|
|
695
|
-
if (!provingState?.verifyState()) {
|
|
716
|
+
if (!provingState?.verifyState() || !provingState.spongeBlobState) {
|
|
696
717
|
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
697
718
|
return;
|
|
698
719
|
}
|
|
@@ -701,7 +722,9 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
701
722
|
|
|
702
723
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
703
724
|
// These will be overwritten later once we have proven the tube circuit and any public kernels
|
|
704
|
-
const [ms, hints] = await elapsed(
|
|
725
|
+
const [ms, hints] = await elapsed(
|
|
726
|
+
buildBaseRollupHints(tx, provingState.globalVariables, db, provingState.spongeBlobState),
|
|
727
|
+
);
|
|
705
728
|
|
|
706
729
|
if (!tx.isEmpty) {
|
|
707
730
|
this.metrics.recordBaseRollupInputs(ms);
|
|
@@ -774,7 +797,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
774
797
|
);
|
|
775
798
|
}
|
|
776
799
|
|
|
777
|
-
// Enqueues the
|
|
800
|
+
// Enqueues the tube circuit for a given transaction index
|
|
778
801
|
// Once completed, will enqueue the next circuit, either a public kernel or the base rollup
|
|
779
802
|
private enqueueTube(provingState: BlockProvingState, txIndex: number) {
|
|
780
803
|
if (!provingState?.verifyState()) {
|
|
@@ -856,9 +879,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
856
879
|
provingState.blockRootRollupStarted = true;
|
|
857
880
|
const mergeInputData = provingState.getMergeInputs(0);
|
|
858
881
|
const rootParityInput = provingState.finalRootParityInput!;
|
|
882
|
+
const blobFields = this.extractTxEffects(provingState)
|
|
883
|
+
.map(tx => tx.toBlobFields())
|
|
884
|
+
.flat();
|
|
885
|
+
const blobs = Blob.getBlobs(blobFields);
|
|
886
|
+
const blobsHash = sha256ToField(blobs.map(b => b.getEthVersionedBlobHash()));
|
|
859
887
|
|
|
860
888
|
logger.debug(
|
|
861
|
-
`Enqueuing block root rollup for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs
|
|
889
|
+
`Enqueuing block root rollup for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs and ${blobs.length} blobs.`,
|
|
862
890
|
);
|
|
863
891
|
|
|
864
892
|
const previousRollupData: BlockRootRollupInputs['previousRollupData'] = makeTuple(2, i =>
|
|
@@ -879,6 +907,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
879
907
|
newArchiveSiblingPath: provingState.archiveTreeRootSiblingPath,
|
|
880
908
|
previousBlockHash: provingState.previousBlockHash,
|
|
881
909
|
proverId: this.proverId,
|
|
910
|
+
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
|
|
911
|
+
blobCommitments: padArrayEnd(
|
|
912
|
+
blobs.map(b => b.commitmentToFields()),
|
|
913
|
+
[Fr.ZERO, Fr.ZERO],
|
|
914
|
+
BLOBS_PER_BLOCK,
|
|
915
|
+
),
|
|
916
|
+
blobsHash: blobsHash,
|
|
882
917
|
});
|
|
883
918
|
|
|
884
919
|
this.deferredProving(
|
|
@@ -903,6 +938,17 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
903
938
|
|
|
904
939
|
provingState.blockRootRollupPublicInputs = result.inputs;
|
|
905
940
|
provingState.finalProof = result.proof.binaryProof;
|
|
941
|
+
const blobOutputs = result.inputs.blobPublicInputs[0];
|
|
942
|
+
blobOutputs.inner.forEach((blobOutput, i) => {
|
|
943
|
+
if (!blobOutput.isEmpty() && !blobOutput.equals(BlobPublicInputs.fromBlob(blobs[i]))) {
|
|
944
|
+
throw new Error(
|
|
945
|
+
`Rollup circuits produced mismatched blob evaluation:
|
|
946
|
+
z: ${blobOutput.z} == ${blobs[i].challengeZ},
|
|
947
|
+
y: ${blobOutput.y.toString(16)} == ${blobs[i].evaluationY.toString('hex')},
|
|
948
|
+
C: ${blobOutput.kzgCommitment} == ${blobs[i].commitmentToFields()}`,
|
|
949
|
+
);
|
|
950
|
+
}
|
|
951
|
+
});
|
|
906
952
|
|
|
907
953
|
logger.debug(`Completed proof for block root rollup for ${provingState.block?.number}`);
|
|
908
954
|
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
@@ -1196,10 +1242,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1196
1242
|
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1197
1243
|
} catch (err) {
|
|
1198
1244
|
if (process.env.AVM_PROVING_STRICT) {
|
|
1245
|
+
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
1199
1246
|
throw err;
|
|
1200
1247
|
} else {
|
|
1201
1248
|
logger.warn(
|
|
1202
|
-
`Error thrown when proving AVM circuit
|
|
1249
|
+
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Faking AVM proof and carrying on. ${inspect(
|
|
1250
|
+
err,
|
|
1251
|
+
)}.`,
|
|
1203
1252
|
);
|
|
1204
1253
|
return {
|
|
1205
1254
|
proof: makeEmptyRecursiveProof(AVM_PROOF_LENGTH_IN_FIELDS),
|
|
@@ -1,11 +1,4 @@
|
|
|
1
|
-
import {
|
|
2
|
-
type Histogram,
|
|
3
|
-
Metrics,
|
|
4
|
-
type TelemetryClient,
|
|
5
|
-
type Tracer,
|
|
6
|
-
ValueType,
|
|
7
|
-
millisecondBuckets,
|
|
8
|
-
} from '@aztec/telemetry-client';
|
|
1
|
+
import { type Histogram, Metrics, type TelemetryClient, type Tracer, ValueType } from '@aztec/telemetry-client';
|
|
9
2
|
|
|
10
3
|
export class ProvingOrchestratorMetrics {
|
|
11
4
|
public readonly tracer: Tracer;
|
|
@@ -20,9 +13,6 @@ export class ProvingOrchestratorMetrics {
|
|
|
20
13
|
unit: 'ms',
|
|
21
14
|
description: 'Duration to build base rollup inputs',
|
|
22
15
|
valueType: ValueType.INT,
|
|
23
|
-
advice: {
|
|
24
|
-
explicitBucketBoundaries: millisecondBuckets(1), // 10ms -> ~327s
|
|
25
|
-
},
|
|
26
16
|
});
|
|
27
17
|
}
|
|
28
18
|
|
|
@@ -32,10 +32,10 @@ import type {
|
|
|
32
32
|
} from '@aztec/circuits.js';
|
|
33
33
|
import { randomBytes } from '@aztec/foundation/crypto';
|
|
34
34
|
import { AbortError, TimeoutError } from '@aztec/foundation/error';
|
|
35
|
-
import {
|
|
35
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
36
36
|
import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
37
37
|
import { PriorityMemoryQueue } from '@aztec/foundation/queue';
|
|
38
|
-
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
38
|
+
import { type TelemetryClient, type Tracer, trackSpan } from '@aztec/telemetry-client';
|
|
39
39
|
|
|
40
40
|
import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js';
|
|
41
41
|
import { ProvingQueueMetrics } from './queue_metrics.js';
|
|
@@ -57,7 +57,7 @@ const defaultTimeSource = () => Date.now();
|
|
|
57
57
|
* The queue accumulates jobs and provides them to agents prioritized by block number.
|
|
58
58
|
*/
|
|
59
59
|
export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource {
|
|
60
|
-
private log =
|
|
60
|
+
private log = createLogger('prover-client:prover-pool:queue');
|
|
61
61
|
private queue = new PriorityMemoryQueue<ProvingJobWithResolvers>(
|
|
62
62
|
(a, b) => (a.epochNumber ?? 0) - (b.epochNumber ?? 0),
|
|
63
63
|
);
|
|
@@ -65,6 +65,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
65
65
|
private runningPromise: RunningPromise;
|
|
66
66
|
private metrics: ProvingQueueMetrics;
|
|
67
67
|
|
|
68
|
+
public readonly tracer: Tracer;
|
|
69
|
+
|
|
68
70
|
constructor(
|
|
69
71
|
client: TelemetryClient,
|
|
70
72
|
/** Timeout the job if an agent doesn't report back in this time */
|
|
@@ -75,8 +77,9 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
75
77
|
private timeSource = defaultTimeSource,
|
|
76
78
|
private proofStore: ProofStore = new InlineProofStore(),
|
|
77
79
|
) {
|
|
80
|
+
this.tracer = client.getTracer('MemoryProvingQueue');
|
|
78
81
|
this.metrics = new ProvingQueueMetrics(client, 'MemoryProvingQueue');
|
|
79
|
-
this.runningPromise = new RunningPromise(this.poll, pollingIntervalMs);
|
|
82
|
+
this.runningPromise = new RunningPromise(this.poll.bind(this), this.log, pollingIntervalMs);
|
|
80
83
|
}
|
|
81
84
|
|
|
82
85
|
public start() {
|
|
@@ -120,6 +123,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
120
123
|
id: job.id,
|
|
121
124
|
type: job.type,
|
|
122
125
|
inputsUri: job.inputsUri,
|
|
126
|
+
epochNumber: job.epochNumber,
|
|
123
127
|
};
|
|
124
128
|
} catch (err) {
|
|
125
129
|
if (err instanceof TimeoutError) {
|
|
@@ -201,7 +205,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
201
205
|
return this.jobsInProgress.has(jobId);
|
|
202
206
|
}
|
|
203
207
|
|
|
204
|
-
|
|
208
|
+
@trackSpan('MemoryProvingQueue.poll')
|
|
209
|
+
private poll() {
|
|
205
210
|
const now = this.timeSource();
|
|
206
211
|
this.metrics.recordQueueSize(this.queue.length());
|
|
207
212
|
|
|
@@ -219,7 +224,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
219
224
|
this.queue.put(job);
|
|
220
225
|
}
|
|
221
226
|
}
|
|
222
|
-
}
|
|
227
|
+
}
|
|
223
228
|
|
|
224
229
|
private async enqueue<T extends ProvingRequestType>(
|
|
225
230
|
type: T,
|
|
@@ -244,7 +249,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
244
249
|
reject,
|
|
245
250
|
attempts: 1,
|
|
246
251
|
heartbeat: 0,
|
|
247
|
-
epochNumber,
|
|
252
|
+
epochNumber: epochNumber ?? 0,
|
|
248
253
|
};
|
|
249
254
|
|
|
250
255
|
if (signal) {
|
|
@@ -8,29 +8,32 @@ import {
|
|
|
8
8
|
type ServerCircuitProver,
|
|
9
9
|
makeProvingRequestResult,
|
|
10
10
|
} from '@aztec/circuit-types';
|
|
11
|
-
import {
|
|
11
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
12
12
|
import { RunningPromise } from '@aztec/foundation/running-promise';
|
|
13
13
|
import { elapsed } from '@aztec/foundation/timer';
|
|
14
|
+
import { Attributes, type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client';
|
|
15
|
+
import { NoopTelemetryClient } from '@aztec/telemetry-client/noop';
|
|
14
16
|
|
|
15
17
|
import { InlineProofStore } from '../proving_broker/proof_store.js';
|
|
16
18
|
|
|
17
19
|
const PRINT_THRESHOLD_NS = 6e10; // 60 seconds
|
|
18
20
|
|
|
21
|
+
type InFlightPromise = {
|
|
22
|
+
id: string;
|
|
23
|
+
type: ProvingRequestType;
|
|
24
|
+
promise: Promise<any>;
|
|
25
|
+
};
|
|
26
|
+
|
|
19
27
|
/**
|
|
20
28
|
* A helper class that encapsulates a circuit prover and connects it to a job source.
|
|
21
29
|
*/
|
|
22
|
-
export class ProverAgent implements ProverAgentApi {
|
|
23
|
-
private inFlightPromises = new Map<
|
|
24
|
-
string,
|
|
25
|
-
{
|
|
26
|
-
id: string;
|
|
27
|
-
type: ProvingRequestType;
|
|
28
|
-
promise: Promise<any>;
|
|
29
|
-
}
|
|
30
|
-
>();
|
|
30
|
+
export class ProverAgent implements ProverAgentApi, Traceable {
|
|
31
|
+
private inFlightPromises = new Map<string, InFlightPromise>();
|
|
31
32
|
private runningPromise?: RunningPromise;
|
|
32
33
|
private proofInputsDatabase = new InlineProofStore();
|
|
33
34
|
|
|
35
|
+
public readonly tracer: Tracer;
|
|
36
|
+
|
|
34
37
|
constructor(
|
|
35
38
|
/** The prover implementation to defer jobs to */
|
|
36
39
|
private circuitProver: ServerCircuitProver,
|
|
@@ -38,8 +41,13 @@ export class ProverAgent implements ProverAgentApi {
|
|
|
38
41
|
private maxConcurrency = 1,
|
|
39
42
|
/** How long to wait between jobs */
|
|
40
43
|
private pollIntervalMs = 100,
|
|
41
|
-
|
|
42
|
-
|
|
44
|
+
/** Telemetry client */
|
|
45
|
+
private telemetry: TelemetryClient = new NoopTelemetryClient(),
|
|
46
|
+
/** Logger */
|
|
47
|
+
private log = createLogger('prover-client:prover-agent'),
|
|
48
|
+
) {
|
|
49
|
+
this.tracer = telemetry.getTracer('ProverAgent');
|
|
50
|
+
}
|
|
43
51
|
|
|
44
52
|
setMaxConcurrency(maxConcurrency: number): Promise<void> {
|
|
45
53
|
if (maxConcurrency < 1) {
|
|
@@ -74,49 +82,53 @@ export class ProverAgent implements ProverAgentApi {
|
|
|
74
82
|
|
|
75
83
|
let lastPrint = process.hrtime.bigint();
|
|
76
84
|
|
|
77
|
-
this.runningPromise = new RunningPromise(
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
const now = process.hrtime.bigint();
|
|
83
|
-
|
|
84
|
-
if (now - lastPrint >= PRINT_THRESHOLD_NS) {
|
|
85
|
-
// only log if we're actually doing work
|
|
86
|
-
if (this.inFlightPromises.size > 0) {
|
|
87
|
-
const jobs = Array.from(this.inFlightPromises.values())
|
|
88
|
-
.map(job => `id=${job.id},type=${ProvingRequestType[job.type]}`)
|
|
89
|
-
.join(' ');
|
|
90
|
-
this.log.info(`Agent is running with ${this.inFlightPromises.size} in-flight jobs: ${jobs}`);
|
|
85
|
+
this.runningPromise = new RunningPromise(
|
|
86
|
+
async () => {
|
|
87
|
+
for (const jobId of this.inFlightPromises.keys()) {
|
|
88
|
+
await jobSource.heartbeat(jobId);
|
|
91
89
|
}
|
|
92
|
-
lastPrint = now;
|
|
93
|
-
}
|
|
94
90
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
if
|
|
99
|
-
|
|
100
|
-
|
|
91
|
+
const now = process.hrtime.bigint();
|
|
92
|
+
|
|
93
|
+
if (now - lastPrint >= PRINT_THRESHOLD_NS) {
|
|
94
|
+
// only log if we're actually doing work
|
|
95
|
+
if (this.inFlightPromises.size > 0) {
|
|
96
|
+
const jobs = Array.from(this.inFlightPromises.values())
|
|
97
|
+
.map(job => `id=${job.id},type=${ProvingRequestType[job.type]}`)
|
|
98
|
+
.join(' ');
|
|
99
|
+
this.log.info(`Agent is running with ${this.inFlightPromises.size} in-flight jobs: ${jobs}`);
|
|
101
100
|
}
|
|
101
|
+
lastPrint = now;
|
|
102
|
+
}
|
|
102
103
|
|
|
104
|
+
while (this.inFlightPromises.size < this.maxConcurrency) {
|
|
103
105
|
try {
|
|
104
|
-
const
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
106
|
+
const job = await jobSource.getProvingJob();
|
|
107
|
+
if (!job) {
|
|
108
|
+
// job source is fully drained, sleep for a bit and try again
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
try {
|
|
113
|
+
const promise = this.work(jobSource, job).finally(() => this.inFlightPromises.delete(job.id));
|
|
114
|
+
this.inFlightPromises.set(job.id, {
|
|
115
|
+
id: job.id,
|
|
116
|
+
type: job.type,
|
|
117
|
+
promise,
|
|
118
|
+
});
|
|
119
|
+
} catch (err) {
|
|
120
|
+
this.log.warn(
|
|
121
|
+
`Error processing job! type=${ProvingRequestType[job.type]}: ${err}. ${(err as Error).stack}`,
|
|
122
|
+
);
|
|
123
|
+
}
|
|
110
124
|
} catch (err) {
|
|
111
|
-
this.log.
|
|
112
|
-
`Error processing job! type=${ProvingRequestType[job.type]}: ${err}. ${(err as Error).stack}`,
|
|
113
|
-
);
|
|
125
|
+
this.log.error(`Error fetching job`, err);
|
|
114
126
|
}
|
|
115
|
-
} catch (err) {
|
|
116
|
-
this.log.error(`Error fetching job`, err);
|
|
117
127
|
}
|
|
118
|
-
}
|
|
119
|
-
|
|
128
|
+
},
|
|
129
|
+
this.log,
|
|
130
|
+
this.pollIntervalMs,
|
|
131
|
+
);
|
|
120
132
|
|
|
121
133
|
this.runningPromise.start();
|
|
122
134
|
this.log.info(`Agent started with concurrency=${this.maxConcurrency}`);
|
|
@@ -133,9 +145,16 @@ export class ProverAgent implements ProverAgentApi {
|
|
|
133
145
|
this.log.info('Agent stopped');
|
|
134
146
|
}
|
|
135
147
|
|
|
148
|
+
@trackSpan('ProverAgent.work', (_jobSoure, job) => ({
|
|
149
|
+
[Attributes.PROVING_JOB_ID]: job.id,
|
|
150
|
+
[Attributes.PROVING_JOB_TYPE]: ProvingRequestType[job.type],
|
|
151
|
+
}))
|
|
136
152
|
private async work(jobSource: ProvingJobSource, job: ProvingJob): Promise<void> {
|
|
137
153
|
try {
|
|
138
|
-
this.log.debug(`Picked up proving job
|
|
154
|
+
this.log.debug(`Picked up proving job ${job.id} ${ProvingRequestType[job.type]}`, {
|
|
155
|
+
jobId: job.id,
|
|
156
|
+
jobType: ProvingRequestType[job.type],
|
|
157
|
+
});
|
|
139
158
|
const type = job.type;
|
|
140
159
|
const inputs = await this.proofInputsDatabase.getProofInput(job.inputsUri);
|
|
141
160
|
const [time, result] = await elapsed(this.getProof(inputs));
|
|
@@ -4,7 +4,6 @@ import {
|
|
|
4
4
|
type EpochProver,
|
|
5
5
|
type EpochProverManager,
|
|
6
6
|
type ForkMerkleTreeOperations,
|
|
7
|
-
type ProverCache,
|
|
8
7
|
type ProvingJobBroker,
|
|
9
8
|
type ProvingJobConsumer,
|
|
10
9
|
type ProvingJobProducer,
|
|
@@ -12,17 +11,14 @@ import {
|
|
|
12
11
|
} from '@aztec/circuit-types/interfaces';
|
|
13
12
|
import { Fr } from '@aztec/circuits.js';
|
|
14
13
|
import { times } from '@aztec/foundation/collection';
|
|
15
|
-
import {
|
|
14
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
16
15
|
import { NativeACVMSimulator } from '@aztec/simulator';
|
|
17
16
|
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
18
17
|
|
|
19
|
-
import { join } from 'path';
|
|
20
|
-
|
|
21
18
|
import { type ProverClientConfig } from '../config.js';
|
|
22
19
|
import { ProvingOrchestrator } from '../orchestrator/orchestrator.js';
|
|
23
|
-
import {
|
|
20
|
+
import { BrokerCircuitProverFacade } from '../proving_broker/broker_prover_facade.js';
|
|
24
21
|
import { InlineProofStore } from '../proving_broker/proof_store.js';
|
|
25
|
-
import { InMemoryProverCache } from '../proving_broker/prover_cache/memory.js';
|
|
26
22
|
import { ProvingAgent } from '../proving_broker/proving_agent.js';
|
|
27
23
|
|
|
28
24
|
/** Manages proving of epochs by orchestrating the proving of individual blocks relying on a pool of prover agents. */
|
|
@@ -30,25 +26,22 @@ export class ProverClient implements EpochProverManager {
|
|
|
30
26
|
private running = false;
|
|
31
27
|
private agents: ProvingAgent[] = [];
|
|
32
28
|
|
|
33
|
-
private cacheDir?: string;
|
|
34
|
-
|
|
35
29
|
private constructor(
|
|
36
30
|
private config: ProverClientConfig,
|
|
37
31
|
private worldState: ForkMerkleTreeOperations,
|
|
38
32
|
private telemetry: TelemetryClient,
|
|
39
33
|
private orchestratorClient: ProvingJobProducer,
|
|
40
34
|
private agentClient?: ProvingJobConsumer,
|
|
41
|
-
private log =
|
|
35
|
+
private log = createLogger('prover-client:tx-prover'),
|
|
42
36
|
) {
|
|
43
37
|
// TODO(palla/prover-node): Cache the paddingTx here, and not in each proving orchestrator,
|
|
44
38
|
// so it can be reused across multiple ones and not recomputed every time.
|
|
45
|
-
this.cacheDir = this.config.cacheDir ? join(this.config.cacheDir, `tx_prover_${this.config.proverId}`) : undefined;
|
|
46
39
|
}
|
|
47
40
|
|
|
48
|
-
public createEpochProver(
|
|
41
|
+
public createEpochProver(): EpochProver {
|
|
49
42
|
return new ProvingOrchestrator(
|
|
50
43
|
this.worldState,
|
|
51
|
-
new
|
|
44
|
+
new BrokerCircuitProverFacade(this.orchestratorClient),
|
|
52
45
|
this.telemetry,
|
|
53
46
|
this.config.proverId,
|
|
54
47
|
);
|