@aztec/prover-client 0.67.0 → 0.67.1-devnet
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block_builder/light.d.ts +4 -3
- package/dest/block_builder/light.d.ts.map +1 -1
- package/dest/block_builder/light.js +23 -17
- package/dest/index.d.ts +0 -1
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -2
- package/dest/mocks/test_context.d.ts +3 -2
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +20 -9
- package/dest/orchestrator/block-building-helpers.d.ts +6 -2
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +18 -6
- package/dest/orchestrator/block-proving-state.d.ts +6 -5
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +16 -8
- package/dest/orchestrator/epoch-proving-state.d.ts +1 -1
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +3 -3
- package/dest/orchestrator/orchestrator.d.ts +9 -6
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +92 -56
- package/dest/prover-agent/memory-proving-queue.d.ts +2 -1
- package/dest/prover-agent/memory-proving-queue.d.ts.map +1 -1
- package/dest/prover-agent/memory-proving-queue.js +240 -224
- package/dest/prover-agent/prover-agent.d.ts +11 -2
- package/dest/prover-agent/prover-agent.d.ts.map +1 -1
- package/dest/prover-agent/prover-agent.js +186 -159
- package/dest/prover-client/prover-client.d.ts +2 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +4 -7
- package/dest/proving_broker/{caching_broker_facade.d.ts → broker_prover_facade.d.ts} +4 -8
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -0
- package/dest/proving_broker/broker_prover_facade.js +107 -0
- package/dest/proving_broker/proving_agent.d.ts +4 -3
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +73 -64
- package/dest/proving_broker/proving_broker.d.ts +4 -3
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +403 -324
- package/dest/proving_broker/proving_job_controller.d.ts +2 -1
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +15 -14
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +1 -2
- package/dest/test/mock_prover.d.ts +4 -5
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +3 -6
- package/package.json +14 -13
- package/src/block_builder/light.ts +23 -17
- package/src/index.ts +0 -1
- package/src/mocks/test_context.ts +29 -14
- package/src/orchestrator/block-building-helpers.ts +20 -6
- package/src/orchestrator/block-proving-state.ts +17 -6
- package/src/orchestrator/epoch-proving-state.ts +0 -2
- package/src/orchestrator/orchestrator.ts +106 -57
- package/src/prover-agent/memory-proving-queue.ts +8 -4
- package/src/prover-agent/prover-agent.ts +65 -46
- package/src/prover-client/prover-client.ts +3 -10
- package/src/proving_broker/{caching_broker_facade.ts → broker_prover_facade.ts} +35 -74
- package/src/proving_broker/proving_agent.ts +72 -76
- package/src/proving_broker/proving_broker.ts +114 -36
- package/src/proving_broker/proving_job_controller.ts +13 -12
- package/src/proving_broker/rpc.ts +0 -1
- package/src/test/mock_prover.ts +5 -4
- package/dest/proving_broker/caching_broker_facade.d.ts.map +0 -1
- package/dest/proving_broker/caching_broker_facade.js +0 -153
- package/dest/proving_broker/prover_cache/memory.d.ts +0 -9
- package/dest/proving_broker/prover_cache/memory.d.ts.map +0 -1
- package/dest/proving_broker/prover_cache/memory.js +0 -16
- package/src/proving_broker/prover_cache/memory.ts +0 -20
|
@@ -14,6 +14,7 @@ import {
|
|
|
14
14
|
type RECURSIVE_PROOF_LENGTH,
|
|
15
15
|
type RecursiveProof,
|
|
16
16
|
type RootParityInput,
|
|
17
|
+
SpongeBlob,
|
|
17
18
|
type VerificationKeyAsFields,
|
|
18
19
|
} from '@aztec/circuits.js';
|
|
19
20
|
import { type Tuple } from '@aztec/foundation/serialize';
|
|
@@ -44,12 +45,13 @@ export class BlockProvingState {
|
|
|
44
45
|
public blockRootRollupStarted: boolean = false;
|
|
45
46
|
public finalProof: Proof | undefined;
|
|
46
47
|
public block: L2Block | undefined;
|
|
48
|
+
public spongeBlobState: SpongeBlob | undefined;
|
|
49
|
+
public totalNumTxs: number;
|
|
47
50
|
private txs: TxProvingState[] = [];
|
|
48
51
|
public error: string | undefined;
|
|
49
52
|
|
|
50
53
|
constructor(
|
|
51
54
|
public readonly index: number,
|
|
52
|
-
public readonly totalNumTxs: number,
|
|
53
55
|
public readonly globalVariables: GlobalVariables,
|
|
54
56
|
public readonly newL1ToL2Messages: Tuple<Fr, typeof NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP>,
|
|
55
57
|
public readonly messageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
@@ -61,6 +63,7 @@ export class BlockProvingState {
|
|
|
61
63
|
private readonly parentEpoch: EpochProvingState,
|
|
62
64
|
) {
|
|
63
65
|
this.rootParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }).map(_ => undefined);
|
|
66
|
+
this.totalNumTxs = 0;
|
|
64
67
|
}
|
|
65
68
|
|
|
66
69
|
public get blockNumber() {
|
|
@@ -98,8 +101,21 @@ export class BlockProvingState {
|
|
|
98
101
|
return [mergeLevel - 1n, thisIndex >> 1n, thisIndex & 1n];
|
|
99
102
|
}
|
|
100
103
|
|
|
104
|
+
public startNewBlock(numTxs: number, numBlobFields: number) {
|
|
105
|
+
if (this.spongeBlobState) {
|
|
106
|
+
throw new Error(`Block ${this.blockNumber} already initalised.`);
|
|
107
|
+
}
|
|
108
|
+
// Initialise the sponge which will eventually absorb all tx effects to be added to the blob.
|
|
109
|
+
// Like l1 to l2 messages, we need to know beforehand how many effects will be absorbed.
|
|
110
|
+
this.spongeBlobState = SpongeBlob.init(numBlobFields);
|
|
111
|
+
this.totalNumTxs = numTxs;
|
|
112
|
+
}
|
|
113
|
+
|
|
101
114
|
// Adds a transaction to the proving state, returns it's index
|
|
102
115
|
public addNewTx(tx: TxProvingState) {
|
|
116
|
+
if (!this.spongeBlobState) {
|
|
117
|
+
throw new Error(`Invalid block proving state, call startNewBlock before adding transactions.`);
|
|
118
|
+
}
|
|
103
119
|
this.txs.push(tx);
|
|
104
120
|
return this.txs.length - 1;
|
|
105
121
|
}
|
|
@@ -199,11 +215,6 @@ export class BlockProvingState {
|
|
|
199
215
|
return this.rootParityInputs.findIndex(p => !p) === -1;
|
|
200
216
|
}
|
|
201
217
|
|
|
202
|
-
// Returns true if we are still able to accept transactions, false otherwise
|
|
203
|
-
public isAcceptingTransactions() {
|
|
204
|
-
return this.totalNumTxs > this.txs.length;
|
|
205
|
-
}
|
|
206
|
-
|
|
207
218
|
// Returns whether the proving state is still valid
|
|
208
219
|
public verifyState() {
|
|
209
220
|
return this.parentEpoch.verifyState();
|
|
@@ -97,7 +97,6 @@ export class EpochProvingState {
|
|
|
97
97
|
// Adds a block to the proving state, returns its index
|
|
98
98
|
// Will update the proving life cycle if this is the last block
|
|
99
99
|
public startNewBlock(
|
|
100
|
-
numTxs: number,
|
|
101
100
|
globalVariables: GlobalVariables,
|
|
102
101
|
l1ToL2Messages: Fr[],
|
|
103
102
|
messageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
@@ -110,7 +109,6 @@ export class EpochProvingState {
|
|
|
110
109
|
const index = globalVariables.blockNumber.toNumber() - this.firstBlockNumber;
|
|
111
110
|
const block = new BlockProvingState(
|
|
112
111
|
index,
|
|
113
|
-
numTxs,
|
|
114
112
|
globalVariables,
|
|
115
113
|
padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
|
|
116
114
|
messageTreeSnapshot,
|
|
@@ -4,6 +4,7 @@ import {
|
|
|
4
4
|
type ProcessedTx,
|
|
5
5
|
type ServerCircuitProver,
|
|
6
6
|
makeEmptyProcessedTx,
|
|
7
|
+
toNumBlobFields,
|
|
7
8
|
} from '@aztec/circuit-types';
|
|
8
9
|
import {
|
|
9
10
|
type EpochProver,
|
|
@@ -16,13 +17,16 @@ import {
|
|
|
16
17
|
AVM_PROOF_LENGTH_IN_FIELDS,
|
|
17
18
|
AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS,
|
|
18
19
|
type AppendOnlyTreeSnapshot,
|
|
20
|
+
BLOBS_PER_BLOCK,
|
|
19
21
|
type BaseOrMergeRollupPublicInputs,
|
|
20
22
|
BaseParityInputs,
|
|
21
23
|
type BaseRollupHints,
|
|
24
|
+
BlobPublicInputs,
|
|
22
25
|
type BlockHeader,
|
|
23
26
|
type BlockRootOrBlockMergePublicInputs,
|
|
24
27
|
BlockRootRollupInputs,
|
|
25
28
|
EmptyBlockRootRollupInputs,
|
|
29
|
+
FIELDS_PER_BLOB,
|
|
26
30
|
Fr,
|
|
27
31
|
type GlobalVariables,
|
|
28
32
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
@@ -40,7 +44,9 @@ import {
|
|
|
40
44
|
makeEmptyRecursiveProof,
|
|
41
45
|
} from '@aztec/circuits.js';
|
|
42
46
|
import { makeTuple } from '@aztec/foundation/array';
|
|
47
|
+
import { Blob } from '@aztec/foundation/blob';
|
|
43
48
|
import { maxBy, padArrayEnd } from '@aztec/foundation/collection';
|
|
49
|
+
import { sha256ToField } from '@aztec/foundation/crypto';
|
|
44
50
|
import { AbortError } from '@aztec/foundation/error';
|
|
45
51
|
import { createLogger } from '@aztec/foundation/log';
|
|
46
52
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
@@ -139,17 +145,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
139
145
|
|
|
140
146
|
/**
|
|
141
147
|
* Starts off a new block
|
|
142
|
-
* @param numTxs - The total number of transactions in the block.
|
|
143
148
|
* @param globalVariables - The global variables for the block
|
|
144
149
|
* @param l1ToL2Messages - The l1 to l2 messages for the block
|
|
145
|
-
* @param verificationKeys - The private kernel verification keys
|
|
146
150
|
* @returns A proving ticket, containing a promise notifying of proving completion
|
|
147
151
|
*/
|
|
148
|
-
@trackSpan('ProvingOrchestrator.startNewBlock',
|
|
149
|
-
[Attributes.BLOCK_SIZE]: numTxs,
|
|
152
|
+
@trackSpan('ProvingOrchestrator.startNewBlock', globalVariables => ({
|
|
150
153
|
[Attributes.BLOCK_NUMBER]: globalVariables.blockNumber.toNumber(),
|
|
151
154
|
}))
|
|
152
|
-
public async startNewBlock(
|
|
155
|
+
public async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]) {
|
|
153
156
|
if (!this.provingState) {
|
|
154
157
|
throw new Error(`Invalid proving state, call startNewEpoch before starting a block`);
|
|
155
158
|
}
|
|
@@ -158,12 +161,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
158
161
|
throw new Error(`Epoch not accepting further blocks`);
|
|
159
162
|
}
|
|
160
163
|
|
|
161
|
-
if (!Number.isInteger(numTxs) || numTxs < 2) {
|
|
162
|
-
throw new Error(`Invalid number of txs for block (got ${numTxs})`);
|
|
163
|
-
}
|
|
164
|
-
|
|
165
164
|
logger.info(
|
|
166
|
-
`Starting block ${globalVariables.blockNumber.toNumber()} for slot ${globalVariables.slotNumber.toNumber()}
|
|
165
|
+
`Starting block ${globalVariables.blockNumber.toNumber()} for slot ${globalVariables.slotNumber.toNumber()}`,
|
|
167
166
|
);
|
|
168
167
|
|
|
169
168
|
// Fork world state at the end of the immediately previous block
|
|
@@ -210,7 +209,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
210
209
|
);
|
|
211
210
|
|
|
212
211
|
const blockProvingState = this.provingState!.startNewBlock(
|
|
213
|
-
numTxs,
|
|
214
212
|
globalVariables,
|
|
215
213
|
l1ToL2MessagesPadded,
|
|
216
214
|
messageTreeSnapshot,
|
|
@@ -228,47 +226,56 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
228
226
|
}
|
|
229
227
|
|
|
230
228
|
/**
|
|
231
|
-
* The interface to add
|
|
232
|
-
* @param
|
|
229
|
+
* The interface to add simulated transactions to the scheduler
|
|
230
|
+
* @param txs - The transactions to be proven
|
|
233
231
|
*/
|
|
234
|
-
@trackSpan('ProvingOrchestrator.
|
|
235
|
-
[Attributes.
|
|
232
|
+
@trackSpan('ProvingOrchestrator.addTxs', txs => ({
|
|
233
|
+
[Attributes.BLOCK_TXS_COUNT]: txs.length,
|
|
236
234
|
}))
|
|
237
|
-
public async
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
}
|
|
235
|
+
public async addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
236
|
+
if (!txs.length) {
|
|
237
|
+
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
238
|
+
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
239
|
+
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
const blockNumber = txs[0].constants.globalVariables.blockNumber.toNumber();
|
|
243
|
+
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
244
|
+
if (!provingState) {
|
|
245
|
+
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
246
|
+
}
|
|
248
247
|
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
}
|
|
248
|
+
const numBlobFields = toNumBlobFields(txs);
|
|
249
|
+
provingState.startNewBlock(Math.max(2, txs.length), numBlobFields);
|
|
252
250
|
|
|
253
|
-
|
|
251
|
+
logger.info(
|
|
252
|
+
`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState?.blockNumber}`,
|
|
253
|
+
);
|
|
254
|
+
for (const tx of txs) {
|
|
255
|
+
try {
|
|
256
|
+
if (!provingState.verifyState()) {
|
|
257
|
+
throw new Error(`Invalid proving state when adding a tx`);
|
|
258
|
+
}
|
|
254
259
|
|
|
255
|
-
|
|
260
|
+
validateTx(tx);
|
|
256
261
|
|
|
257
|
-
|
|
258
|
-
logger.warn(`Ignoring empty transaction ${tx.hash} - it will not be added to this block`);
|
|
259
|
-
return;
|
|
260
|
-
}
|
|
262
|
+
logger.info(`Received transaction: ${tx.hash}`);
|
|
261
263
|
|
|
262
|
-
|
|
263
|
-
|
|
264
|
+
if (tx.isEmpty) {
|
|
265
|
+
logger.warn(`Ignoring empty transaction ${tx.hash} - it will not be added to this block`);
|
|
266
|
+
continue;
|
|
267
|
+
}
|
|
264
268
|
|
|
265
|
-
|
|
266
|
-
|
|
269
|
+
const [hints, treeSnapshots] = await this.prepareTransaction(tx, provingState);
|
|
270
|
+
this.enqueueFirstProofs(hints, treeSnapshots, tx, provingState);
|
|
271
|
+
} catch (err: any) {
|
|
272
|
+
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
273
|
+
cause: err,
|
|
274
|
+
});
|
|
267
275
|
}
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
});
|
|
276
|
+
}
|
|
277
|
+
if (provingState.transactionsReceived === provingState.totalNumTxs) {
|
|
278
|
+
logger.verbose(`All transactions received for block ${provingState.globalVariables.blockNumber}.`);
|
|
272
279
|
}
|
|
273
280
|
}
|
|
274
281
|
|
|
@@ -285,6 +292,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
285
292
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
286
293
|
}
|
|
287
294
|
|
|
295
|
+
if (!provingState.spongeBlobState) {
|
|
296
|
+
// If we are completing an empty block, initialise the provingState.
|
|
297
|
+
// We will have 2 padding txs, and => no blob fields.
|
|
298
|
+
provingState.startNewBlock(2, 0);
|
|
299
|
+
}
|
|
300
|
+
|
|
288
301
|
if (!provingState.verifyState()) {
|
|
289
302
|
throw new Error(`Block proving failed: ${provingState.error}`);
|
|
290
303
|
}
|
|
@@ -326,7 +339,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
326
339
|
}
|
|
327
340
|
|
|
328
341
|
// And build the block header
|
|
329
|
-
logger.verbose(`Block ${
|
|
342
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
330
343
|
await this.buildBlock(provingState, expectedHeader);
|
|
331
344
|
|
|
332
345
|
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
@@ -435,14 +448,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
435
448
|
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
436
449
|
const l2Block = new L2Block(newArchive, header, body);
|
|
437
450
|
|
|
438
|
-
if (!l2Block.body.getTxsEffectsHash().equals(header.contentCommitment.txsEffectsHash)) {
|
|
439
|
-
throw new Error(
|
|
440
|
-
`Txs effects hash mismatch, ${l2Block.body
|
|
441
|
-
.getTxsEffectsHash()
|
|
442
|
-
.toString('hex')} == ${header.contentCommitment.txsEffectsHash.toString('hex')} `,
|
|
443
|
-
);
|
|
444
|
-
}
|
|
445
|
-
|
|
446
451
|
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
447
452
|
|
|
448
453
|
logger.verbose(`Orchestrator finalised block ${l2Block.number}`);
|
|
@@ -566,6 +571,22 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
566
571
|
);
|
|
567
572
|
}
|
|
568
573
|
|
|
574
|
+
/**
|
|
575
|
+
* Collect all new nullifiers, commitments, and contracts from all txs in a block
|
|
576
|
+
* @returns The array of non empty tx effects.
|
|
577
|
+
*/
|
|
578
|
+
private extractTxEffects(provingState: BlockProvingState) {
|
|
579
|
+
// Note: this check should ensure that we have all txs and their effects ready.
|
|
580
|
+
if (!provingState.finalRootParityInput?.publicInputs.shaRoot) {
|
|
581
|
+
throw new Error(`Invalid proving state, a block must be ready to be proven before its effects can be extracted.`);
|
|
582
|
+
}
|
|
583
|
+
const nonEmptyTxEffects = provingState.allTxs
|
|
584
|
+
.map(txProvingState => txProvingState.processedTx.txEffect)
|
|
585
|
+
.filter(txEffect => !txEffect.isEmpty());
|
|
586
|
+
|
|
587
|
+
return nonEmptyTxEffects;
|
|
588
|
+
}
|
|
589
|
+
|
|
569
590
|
/**
|
|
570
591
|
* Returns the proof for the current epoch.
|
|
571
592
|
*/
|
|
@@ -692,7 +713,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
692
713
|
provingState: BlockProvingState | undefined,
|
|
693
714
|
tx: ProcessedTx,
|
|
694
715
|
): Promise<[BaseRollupHints, TreeSnapshots] | undefined> {
|
|
695
|
-
if (!provingState?.verifyState()) {
|
|
716
|
+
if (!provingState?.verifyState() || !provingState.spongeBlobState) {
|
|
696
717
|
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
697
718
|
return;
|
|
698
719
|
}
|
|
@@ -701,7 +722,9 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
701
722
|
|
|
702
723
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
703
724
|
// These will be overwritten later once we have proven the tube circuit and any public kernels
|
|
704
|
-
const [ms, hints] = await elapsed(
|
|
725
|
+
const [ms, hints] = await elapsed(
|
|
726
|
+
buildBaseRollupHints(tx, provingState.globalVariables, db, provingState.spongeBlobState),
|
|
727
|
+
);
|
|
705
728
|
|
|
706
729
|
if (!tx.isEmpty) {
|
|
707
730
|
this.metrics.recordBaseRollupInputs(ms);
|
|
@@ -774,7 +797,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
774
797
|
);
|
|
775
798
|
}
|
|
776
799
|
|
|
777
|
-
// Enqueues the
|
|
800
|
+
// Enqueues the tube circuit for a given transaction index
|
|
778
801
|
// Once completed, will enqueue the next circuit, either a public kernel or the base rollup
|
|
779
802
|
private enqueueTube(provingState: BlockProvingState, txIndex: number) {
|
|
780
803
|
if (!provingState?.verifyState()) {
|
|
@@ -856,9 +879,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
856
879
|
provingState.blockRootRollupStarted = true;
|
|
857
880
|
const mergeInputData = provingState.getMergeInputs(0);
|
|
858
881
|
const rootParityInput = provingState.finalRootParityInput!;
|
|
882
|
+
const blobFields = this.extractTxEffects(provingState)
|
|
883
|
+
.map(tx => tx.toBlobFields())
|
|
884
|
+
.flat();
|
|
885
|
+
const blobs = Blob.getBlobs(blobFields);
|
|
886
|
+
const blobsHash = sha256ToField(blobs.map(b => b.getEthVersionedBlobHash()));
|
|
859
887
|
|
|
860
888
|
logger.debug(
|
|
861
|
-
`Enqueuing block root rollup for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs
|
|
889
|
+
`Enqueuing block root rollup for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs and ${blobs.length} blobs.`,
|
|
862
890
|
);
|
|
863
891
|
|
|
864
892
|
const previousRollupData: BlockRootRollupInputs['previousRollupData'] = makeTuple(2, i =>
|
|
@@ -879,6 +907,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
879
907
|
newArchiveSiblingPath: provingState.archiveTreeRootSiblingPath,
|
|
880
908
|
previousBlockHash: provingState.previousBlockHash,
|
|
881
909
|
proverId: this.proverId,
|
|
910
|
+
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
|
|
911
|
+
blobCommitments: padArrayEnd(
|
|
912
|
+
blobs.map(b => b.commitmentToFields()),
|
|
913
|
+
[Fr.ZERO, Fr.ZERO],
|
|
914
|
+
BLOBS_PER_BLOCK,
|
|
915
|
+
),
|
|
916
|
+
blobsHash: blobsHash,
|
|
882
917
|
});
|
|
883
918
|
|
|
884
919
|
this.deferredProving(
|
|
@@ -903,6 +938,17 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
903
938
|
|
|
904
939
|
provingState.blockRootRollupPublicInputs = result.inputs;
|
|
905
940
|
provingState.finalProof = result.proof.binaryProof;
|
|
941
|
+
const blobOutputs = result.inputs.blobPublicInputs[0];
|
|
942
|
+
blobOutputs.inner.forEach((blobOutput, i) => {
|
|
943
|
+
if (!blobOutput.isEmpty() && !blobOutput.equals(BlobPublicInputs.fromBlob(blobs[i]))) {
|
|
944
|
+
throw new Error(
|
|
945
|
+
`Rollup circuits produced mismatched blob evaluation:
|
|
946
|
+
z: ${blobOutput.z} == ${blobs[i].challengeZ},
|
|
947
|
+
y: ${blobOutput.y.toString(16)} == ${blobs[i].evaluationY.toString('hex')},
|
|
948
|
+
C: ${blobOutput.kzgCommitment} == ${blobs[i].commitmentToFields()}`,
|
|
949
|
+
);
|
|
950
|
+
}
|
|
951
|
+
});
|
|
906
952
|
|
|
907
953
|
logger.debug(`Completed proof for block root rollup for ${provingState.block?.number}`);
|
|
908
954
|
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
@@ -1196,10 +1242,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1196
1242
|
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1197
1243
|
} catch (err) {
|
|
1198
1244
|
if (process.env.AVM_PROVING_STRICT) {
|
|
1245
|
+
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
1199
1246
|
throw err;
|
|
1200
1247
|
} else {
|
|
1201
1248
|
logger.warn(
|
|
1202
|
-
`Error thrown when proving AVM circuit
|
|
1249
|
+
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Faking AVM proof and carrying on. ${inspect(
|
|
1250
|
+
err,
|
|
1251
|
+
)}.`,
|
|
1203
1252
|
);
|
|
1204
1253
|
return {
|
|
1205
1254
|
proof: makeEmptyRecursiveProof(AVM_PROOF_LENGTH_IN_FIELDS),
|
|
@@ -35,7 +35,7 @@ import { AbortError, TimeoutError } from '@aztec/foundation/error';
|
|
|
35
35
|
import { createLogger } from '@aztec/foundation/log';
|
|
36
36
|
import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
37
37
|
import { PriorityMemoryQueue } from '@aztec/foundation/queue';
|
|
38
|
-
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
38
|
+
import { type TelemetryClient, type Tracer, trackSpan } from '@aztec/telemetry-client';
|
|
39
39
|
|
|
40
40
|
import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js';
|
|
41
41
|
import { ProvingQueueMetrics } from './queue_metrics.js';
|
|
@@ -65,6 +65,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
65
65
|
private runningPromise: RunningPromise;
|
|
66
66
|
private metrics: ProvingQueueMetrics;
|
|
67
67
|
|
|
68
|
+
public readonly tracer: Tracer;
|
|
69
|
+
|
|
68
70
|
constructor(
|
|
69
71
|
client: TelemetryClient,
|
|
70
72
|
/** Timeout the job if an agent doesn't report back in this time */
|
|
@@ -75,8 +77,9 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
75
77
|
private timeSource = defaultTimeSource,
|
|
76
78
|
private proofStore: ProofStore = new InlineProofStore(),
|
|
77
79
|
) {
|
|
80
|
+
this.tracer = client.getTracer('MemoryProvingQueue');
|
|
78
81
|
this.metrics = new ProvingQueueMetrics(client, 'MemoryProvingQueue');
|
|
79
|
-
this.runningPromise = new RunningPromise(this.poll, pollingIntervalMs);
|
|
82
|
+
this.runningPromise = new RunningPromise(this.poll.bind(this), this.log, pollingIntervalMs);
|
|
80
83
|
}
|
|
81
84
|
|
|
82
85
|
public start() {
|
|
@@ -202,7 +205,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
202
205
|
return this.jobsInProgress.has(jobId);
|
|
203
206
|
}
|
|
204
207
|
|
|
205
|
-
|
|
208
|
+
@trackSpan('MemoryProvingQueue.poll')
|
|
209
|
+
private poll() {
|
|
206
210
|
const now = this.timeSource();
|
|
207
211
|
this.metrics.recordQueueSize(this.queue.length());
|
|
208
212
|
|
|
@@ -220,7 +224,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
220
224
|
this.queue.put(job);
|
|
221
225
|
}
|
|
222
226
|
}
|
|
223
|
-
}
|
|
227
|
+
}
|
|
224
228
|
|
|
225
229
|
private async enqueue<T extends ProvingRequestType>(
|
|
226
230
|
type: T,
|
|
@@ -11,26 +11,29 @@ import {
|
|
|
11
11
|
import { createLogger } from '@aztec/foundation/log';
|
|
12
12
|
import { RunningPromise } from '@aztec/foundation/running-promise';
|
|
13
13
|
import { elapsed } from '@aztec/foundation/timer';
|
|
14
|
+
import { Attributes, type TelemetryClient, type Traceable, type Tracer, trackSpan } from '@aztec/telemetry-client';
|
|
15
|
+
import { NoopTelemetryClient } from '@aztec/telemetry-client/noop';
|
|
14
16
|
|
|
15
17
|
import { InlineProofStore } from '../proving_broker/proof_store.js';
|
|
16
18
|
|
|
17
19
|
const PRINT_THRESHOLD_NS = 6e10; // 60 seconds
|
|
18
20
|
|
|
21
|
+
type InFlightPromise = {
|
|
22
|
+
id: string;
|
|
23
|
+
type: ProvingRequestType;
|
|
24
|
+
promise: Promise<any>;
|
|
25
|
+
};
|
|
26
|
+
|
|
19
27
|
/**
|
|
20
28
|
* A helper class that encapsulates a circuit prover and connects it to a job source.
|
|
21
29
|
*/
|
|
22
|
-
export class ProverAgent implements ProverAgentApi {
|
|
23
|
-
private inFlightPromises = new Map<
|
|
24
|
-
string,
|
|
25
|
-
{
|
|
26
|
-
id: string;
|
|
27
|
-
type: ProvingRequestType;
|
|
28
|
-
promise: Promise<any>;
|
|
29
|
-
}
|
|
30
|
-
>();
|
|
30
|
+
export class ProverAgent implements ProverAgentApi, Traceable {
|
|
31
|
+
private inFlightPromises = new Map<string, InFlightPromise>();
|
|
31
32
|
private runningPromise?: RunningPromise;
|
|
32
33
|
private proofInputsDatabase = new InlineProofStore();
|
|
33
34
|
|
|
35
|
+
public readonly tracer: Tracer;
|
|
36
|
+
|
|
34
37
|
constructor(
|
|
35
38
|
/** The prover implementation to defer jobs to */
|
|
36
39
|
private circuitProver: ServerCircuitProver,
|
|
@@ -38,8 +41,13 @@ export class ProverAgent implements ProverAgentApi {
|
|
|
38
41
|
private maxConcurrency = 1,
|
|
39
42
|
/** How long to wait between jobs */
|
|
40
43
|
private pollIntervalMs = 100,
|
|
44
|
+
/** Telemetry client */
|
|
45
|
+
private telemetry: TelemetryClient = new NoopTelemetryClient(),
|
|
46
|
+
/** Logger */
|
|
41
47
|
private log = createLogger('prover-client:prover-agent'),
|
|
42
|
-
) {
|
|
48
|
+
) {
|
|
49
|
+
this.tracer = telemetry.getTracer('ProverAgent');
|
|
50
|
+
}
|
|
43
51
|
|
|
44
52
|
setMaxConcurrency(maxConcurrency: number): Promise<void> {
|
|
45
53
|
if (maxConcurrency < 1) {
|
|
@@ -74,49 +82,53 @@ export class ProverAgent implements ProverAgentApi {
|
|
|
74
82
|
|
|
75
83
|
let lastPrint = process.hrtime.bigint();
|
|
76
84
|
|
|
77
|
-
this.runningPromise = new RunningPromise(
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
const now = process.hrtime.bigint();
|
|
83
|
-
|
|
84
|
-
if (now - lastPrint >= PRINT_THRESHOLD_NS) {
|
|
85
|
-
// only log if we're actually doing work
|
|
86
|
-
if (this.inFlightPromises.size > 0) {
|
|
87
|
-
const jobs = Array.from(this.inFlightPromises.values())
|
|
88
|
-
.map(job => `id=${job.id},type=${ProvingRequestType[job.type]}`)
|
|
89
|
-
.join(' ');
|
|
90
|
-
this.log.info(`Agent is running with ${this.inFlightPromises.size} in-flight jobs: ${jobs}`);
|
|
85
|
+
this.runningPromise = new RunningPromise(
|
|
86
|
+
async () => {
|
|
87
|
+
for (const jobId of this.inFlightPromises.keys()) {
|
|
88
|
+
await jobSource.heartbeat(jobId);
|
|
91
89
|
}
|
|
92
|
-
lastPrint = now;
|
|
93
|
-
}
|
|
94
90
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
if
|
|
99
|
-
|
|
100
|
-
|
|
91
|
+
const now = process.hrtime.bigint();
|
|
92
|
+
|
|
93
|
+
if (now - lastPrint >= PRINT_THRESHOLD_NS) {
|
|
94
|
+
// only log if we're actually doing work
|
|
95
|
+
if (this.inFlightPromises.size > 0) {
|
|
96
|
+
const jobs = Array.from(this.inFlightPromises.values())
|
|
97
|
+
.map(job => `id=${job.id},type=${ProvingRequestType[job.type]}`)
|
|
98
|
+
.join(' ');
|
|
99
|
+
this.log.info(`Agent is running with ${this.inFlightPromises.size} in-flight jobs: ${jobs}`);
|
|
101
100
|
}
|
|
101
|
+
lastPrint = now;
|
|
102
|
+
}
|
|
102
103
|
|
|
104
|
+
while (this.inFlightPromises.size < this.maxConcurrency) {
|
|
103
105
|
try {
|
|
104
|
-
const
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
106
|
+
const job = await jobSource.getProvingJob();
|
|
107
|
+
if (!job) {
|
|
108
|
+
// job source is fully drained, sleep for a bit and try again
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
try {
|
|
113
|
+
const promise = this.work(jobSource, job).finally(() => this.inFlightPromises.delete(job.id));
|
|
114
|
+
this.inFlightPromises.set(job.id, {
|
|
115
|
+
id: job.id,
|
|
116
|
+
type: job.type,
|
|
117
|
+
promise,
|
|
118
|
+
});
|
|
119
|
+
} catch (err) {
|
|
120
|
+
this.log.warn(
|
|
121
|
+
`Error processing job! type=${ProvingRequestType[job.type]}: ${err}. ${(err as Error).stack}`,
|
|
122
|
+
);
|
|
123
|
+
}
|
|
110
124
|
} catch (err) {
|
|
111
|
-
this.log.
|
|
112
|
-
`Error processing job! type=${ProvingRequestType[job.type]}: ${err}. ${(err as Error).stack}`,
|
|
113
|
-
);
|
|
125
|
+
this.log.error(`Error fetching job`, err);
|
|
114
126
|
}
|
|
115
|
-
} catch (err) {
|
|
116
|
-
this.log.error(`Error fetching job`, err);
|
|
117
127
|
}
|
|
118
|
-
}
|
|
119
|
-
|
|
128
|
+
},
|
|
129
|
+
this.log,
|
|
130
|
+
this.pollIntervalMs,
|
|
131
|
+
);
|
|
120
132
|
|
|
121
133
|
this.runningPromise.start();
|
|
122
134
|
this.log.info(`Agent started with concurrency=${this.maxConcurrency}`);
|
|
@@ -133,9 +145,16 @@ export class ProverAgent implements ProverAgentApi {
|
|
|
133
145
|
this.log.info('Agent stopped');
|
|
134
146
|
}
|
|
135
147
|
|
|
148
|
+
@trackSpan('ProverAgent.work', (_jobSoure, job) => ({
|
|
149
|
+
[Attributes.PROVING_JOB_ID]: job.id,
|
|
150
|
+
[Attributes.PROVING_JOB_TYPE]: ProvingRequestType[job.type],
|
|
151
|
+
}))
|
|
136
152
|
private async work(jobSource: ProvingJobSource, job: ProvingJob): Promise<void> {
|
|
137
153
|
try {
|
|
138
|
-
this.log.debug(`Picked up proving job
|
|
154
|
+
this.log.debug(`Picked up proving job ${job.id} ${ProvingRequestType[job.type]}`, {
|
|
155
|
+
jobId: job.id,
|
|
156
|
+
jobType: ProvingRequestType[job.type],
|
|
157
|
+
});
|
|
139
158
|
const type = job.type;
|
|
140
159
|
const inputs = await this.proofInputsDatabase.getProofInput(job.inputsUri);
|
|
141
160
|
const [time, result] = await elapsed(this.getProof(inputs));
|
|
@@ -4,7 +4,6 @@ import {
|
|
|
4
4
|
type EpochProver,
|
|
5
5
|
type EpochProverManager,
|
|
6
6
|
type ForkMerkleTreeOperations,
|
|
7
|
-
type ProverCache,
|
|
8
7
|
type ProvingJobBroker,
|
|
9
8
|
type ProvingJobConsumer,
|
|
10
9
|
type ProvingJobProducer,
|
|
@@ -16,13 +15,10 @@ import { createLogger } from '@aztec/foundation/log';
|
|
|
16
15
|
import { NativeACVMSimulator } from '@aztec/simulator';
|
|
17
16
|
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
18
17
|
|
|
19
|
-
import { join } from 'path';
|
|
20
|
-
|
|
21
18
|
import { type ProverClientConfig } from '../config.js';
|
|
22
19
|
import { ProvingOrchestrator } from '../orchestrator/orchestrator.js';
|
|
23
|
-
import {
|
|
20
|
+
import { BrokerCircuitProverFacade } from '../proving_broker/broker_prover_facade.js';
|
|
24
21
|
import { InlineProofStore } from '../proving_broker/proof_store.js';
|
|
25
|
-
import { InMemoryProverCache } from '../proving_broker/prover_cache/memory.js';
|
|
26
22
|
import { ProvingAgent } from '../proving_broker/proving_agent.js';
|
|
27
23
|
|
|
28
24
|
/** Manages proving of epochs by orchestrating the proving of individual blocks relying on a pool of prover agents. */
|
|
@@ -30,8 +26,6 @@ export class ProverClient implements EpochProverManager {
|
|
|
30
26
|
private running = false;
|
|
31
27
|
private agents: ProvingAgent[] = [];
|
|
32
28
|
|
|
33
|
-
private cacheDir?: string;
|
|
34
|
-
|
|
35
29
|
private constructor(
|
|
36
30
|
private config: ProverClientConfig,
|
|
37
31
|
private worldState: ForkMerkleTreeOperations,
|
|
@@ -42,13 +36,12 @@ export class ProverClient implements EpochProverManager {
|
|
|
42
36
|
) {
|
|
43
37
|
// TODO(palla/prover-node): Cache the paddingTx here, and not in each proving orchestrator,
|
|
44
38
|
// so it can be reused across multiple ones and not recomputed every time.
|
|
45
|
-
this.cacheDir = this.config.cacheDir ? join(this.config.cacheDir, `tx_prover_${this.config.proverId}`) : undefined;
|
|
46
39
|
}
|
|
47
40
|
|
|
48
|
-
public createEpochProver(
|
|
41
|
+
public createEpochProver(): EpochProver {
|
|
49
42
|
return new ProvingOrchestrator(
|
|
50
43
|
this.worldState,
|
|
51
|
-
new
|
|
44
|
+
new BrokerCircuitProverFacade(this.orchestratorClient),
|
|
52
45
|
this.telemetry,
|
|
53
46
|
this.config.proverId,
|
|
54
47
|
);
|