@aztec/prover-client 0.67.1 → 0.68.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block_builder/light.d.ts +4 -3
- package/dest/block_builder/light.d.ts.map +1 -1
- package/dest/block_builder/light.js +23 -16
- package/dest/index.d.ts +0 -1
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -2
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +3 -3
- package/dest/mocks/test_context.d.ts +3 -2
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +22 -11
- package/dest/orchestrator/block-building-helpers.d.ts +8 -2
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +20 -7
- package/dest/orchestrator/block-proving-state.d.ts +8 -5
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +16 -7
- package/dest/orchestrator/epoch-proving-state.d.ts +3 -2
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +3 -3
- package/dest/orchestrator/orchestrator.d.ts +10 -7
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +94 -56
- package/dest/orchestrator/tx-proving-state.d.ts +2 -1
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +3 -2
- package/dest/prover-agent/memory-proving-queue.d.ts +4 -2
- package/dest/prover-agent/memory-proving-queue.d.ts.map +1 -1
- package/dest/prover-agent/memory-proving-queue.js +240 -224
- package/dest/prover-agent/prover-agent.d.ts +11 -2
- package/dest/prover-agent/prover-agent.d.ts.map +1 -1
- package/dest/prover-agent/prover-agent.js +186 -159
- package/dest/prover-client/prover-client.d.ts +2 -3
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +4 -7
- package/dest/proving_broker/{caching_broker_facade.d.ts → broker_prover_facade.d.ts} +6 -9
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -0
- package/dest/proving_broker/broker_prover_facade.js +107 -0
- package/dest/proving_broker/proving_agent.d.ts +4 -3
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +73 -64
- package/dest/proving_broker/proving_broker.d.ts +4 -3
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +403 -324
- package/dest/proving_broker/proving_job_controller.d.ts +2 -1
- package/dest/proving_broker/proving_job_controller.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +15 -14
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +1 -2
- package/dest/test/mock_prover.d.ts +6 -6
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +3 -6
- package/package.json +16 -15
- package/src/block_builder/light.ts +23 -16
- package/src/index.ts +0 -1
- package/src/mocks/fixtures.ts +2 -2
- package/src/mocks/test_context.ts +31 -16
- package/src/orchestrator/block-building-helpers.ts +34 -18
- package/src/orchestrator/block-proving-state.ts +18 -8
- package/src/orchestrator/epoch-proving-state.ts +1 -4
- package/src/orchestrator/orchestrator.ts +113 -62
- package/src/orchestrator/tx-proving-state.ts +6 -4
- package/src/prover-agent/memory-proving-queue.ts +21 -15
- package/src/prover-agent/prover-agent.ts +65 -46
- package/src/prover-client/prover-client.ts +3 -10
- package/src/proving_broker/{caching_broker_facade.ts → broker_prover_facade.ts} +46 -83
- package/src/proving_broker/proving_agent.ts +72 -76
- package/src/proving_broker/proving_broker.ts +114 -36
- package/src/proving_broker/proving_job_controller.ts +13 -12
- package/src/proving_broker/rpc.ts +0 -1
- package/src/test/mock_prover.ts +17 -14
- package/dest/proving_broker/caching_broker_facade.d.ts.map +0 -1
- package/dest/proving_broker/caching_broker_facade.js +0 -153
- package/dest/proving_broker/prover_cache/memory.d.ts +0 -9
- package/dest/proving_broker/prover_cache/memory.d.ts.map +0 -1
- package/dest/proving_broker/prover_cache/memory.js +0 -16
- package/src/proving_broker/prover_cache/memory.ts +0 -20
|
@@ -9,11 +9,7 @@ import {
|
|
|
9
9
|
import {
|
|
10
10
|
ARCHIVE_HEIGHT,
|
|
11
11
|
AppendOnlyTreeSnapshot,
|
|
12
|
-
type BaseOrMergeRollupPublicInputs,
|
|
13
12
|
BlockHeader,
|
|
14
|
-
BlockMergeRollupInputs,
|
|
15
|
-
type BlockRootOrBlockMergePublicInputs,
|
|
16
|
-
ConstantRollupData,
|
|
17
13
|
ContentCommitment,
|
|
18
14
|
Fr,
|
|
19
15
|
type GlobalVariables,
|
|
@@ -21,7 +17,6 @@ import {
|
|
|
21
17
|
MAX_NULLIFIERS_PER_TX,
|
|
22
18
|
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
23
19
|
MembershipWitness,
|
|
24
|
-
MergeRollupInputs,
|
|
25
20
|
MerkleTreeCalculator,
|
|
26
21
|
type NESTED_RECURSIVE_PROOF_LENGTH,
|
|
27
22
|
NOTE_HASH_SUBTREE_HEIGHT,
|
|
@@ -34,26 +29,35 @@ import {
|
|
|
34
29
|
PUBLIC_DATA_TREE_HEIGHT,
|
|
35
30
|
type ParityPublicInputs,
|
|
36
31
|
PartialStateReference,
|
|
37
|
-
PreviousRollupBlockData,
|
|
38
|
-
PreviousRollupData,
|
|
39
|
-
PrivateBaseRollupHints,
|
|
40
|
-
PrivateBaseStateDiffHints,
|
|
41
|
-
PublicBaseRollupHints,
|
|
42
|
-
PublicBaseStateDiffHints,
|
|
43
32
|
PublicDataHint,
|
|
44
33
|
PublicDataTreeLeaf,
|
|
45
34
|
PublicDataTreeLeafPreimage,
|
|
46
35
|
type RecursiveProof,
|
|
47
|
-
RootRollupInputs,
|
|
48
36
|
StateReference,
|
|
49
37
|
VK_TREE_HEIGHT,
|
|
50
38
|
type VerificationKeyAsFields,
|
|
51
39
|
} from '@aztec/circuits.js';
|
|
40
|
+
import { type SpongeBlob } from '@aztec/circuits.js/blobs';
|
|
41
|
+
import {
|
|
42
|
+
type BaseOrMergeRollupPublicInputs,
|
|
43
|
+
BlockMergeRollupInputs,
|
|
44
|
+
type BlockRootOrBlockMergePublicInputs,
|
|
45
|
+
ConstantRollupData,
|
|
46
|
+
MergeRollupInputs,
|
|
47
|
+
PreviousRollupBlockData,
|
|
48
|
+
PreviousRollupData,
|
|
49
|
+
PrivateBaseRollupHints,
|
|
50
|
+
PrivateBaseStateDiffHints,
|
|
51
|
+
PublicBaseRollupHints,
|
|
52
|
+
PublicBaseStateDiffHints,
|
|
53
|
+
RootRollupInputs,
|
|
54
|
+
} from '@aztec/circuits.js/rollup';
|
|
52
55
|
import { makeTuple } from '@aztec/foundation/array';
|
|
56
|
+
import { Blob } from '@aztec/foundation/blob';
|
|
53
57
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
54
58
|
import { sha256Trunc } from '@aztec/foundation/crypto';
|
|
55
59
|
import { type Logger } from '@aztec/foundation/log';
|
|
56
|
-
import { type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
60
|
+
import { type Tuple, assertLength, serializeToBuffer, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
57
61
|
import { computeUnbalancedMerkleRoot } from '@aztec/foundation/trees';
|
|
58
62
|
import { getVKIndex, getVKSiblingPath, getVKTreeRoot } from '@aztec/noir-protocol-circuits-types';
|
|
59
63
|
import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
|
|
@@ -76,6 +80,7 @@ export async function buildBaseRollupHints(
|
|
|
76
80
|
tx: ProcessedTx,
|
|
77
81
|
globalVariables: GlobalVariables,
|
|
78
82
|
db: MerkleTreeWriteOperations,
|
|
83
|
+
startSpongeBlob: SpongeBlob,
|
|
79
84
|
) {
|
|
80
85
|
// Get trees info before any changes hit
|
|
81
86
|
const constants = await getConstantRollupData(globalVariables, db);
|
|
@@ -132,6 +137,10 @@ export async function buildBaseRollupHints(
|
|
|
132
137
|
i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO,
|
|
133
138
|
);
|
|
134
139
|
|
|
140
|
+
// Append new data to startSpongeBlob
|
|
141
|
+
const inputSpongeBlob = startSpongeBlob.clone();
|
|
142
|
+
startSpongeBlob.absorb(tx.txEffect.toBlobFields());
|
|
143
|
+
|
|
135
144
|
if (tx.avmProvingRequest) {
|
|
136
145
|
// Build public base rollup hints
|
|
137
146
|
const stateDiffHints = PublicBaseStateDiffHints.from({
|
|
@@ -176,6 +185,7 @@ export async function buildBaseRollupHints(
|
|
|
176
185
|
|
|
177
186
|
return PublicBaseRollupHints.from({
|
|
178
187
|
start,
|
|
188
|
+
startSpongeBlob: inputSpongeBlob,
|
|
179
189
|
stateDiffHints,
|
|
180
190
|
archiveRootMembershipWitness,
|
|
181
191
|
constants,
|
|
@@ -235,6 +245,7 @@ export async function buildBaseRollupHints(
|
|
|
235
245
|
|
|
236
246
|
return PrivateBaseRollupHints.from({
|
|
237
247
|
start,
|
|
248
|
+
startSpongeBlob: inputSpongeBlob,
|
|
238
249
|
stateDiffHints,
|
|
239
250
|
feePayerFeeJuiceBalanceReadHint: feePayerFeeJuiceBalanceReadHint,
|
|
240
251
|
archiveRootMembershipWitness,
|
|
@@ -300,11 +311,10 @@ export function buildHeaderFromCircuitOutputs(
|
|
|
300
311
|
updatedL1ToL2TreeSnapshot: AppendOnlyTreeSnapshot,
|
|
301
312
|
logger?: Logger,
|
|
302
313
|
) {
|
|
314
|
+
const blobsHash = rootRollupOutputs.blobPublicInputs[0].getBlobsHash();
|
|
303
315
|
const contentCommitment = new ContentCommitment(
|
|
304
316
|
new Fr(previousMergeData[0].numTxs + previousMergeData[1].numTxs),
|
|
305
|
-
|
|
306
|
-
Buffer.concat([previousMergeData[0].txsEffectsHash.toBuffer(), previousMergeData[1].txsEffectsHash.toBuffer()]),
|
|
307
|
-
),
|
|
317
|
+
blobsHash,
|
|
308
318
|
parityPublicInputs.shaRoot.toBuffer(),
|
|
309
319
|
sha256Trunc(Buffer.concat([previousMergeData[0].outHash.toBuffer(), previousMergeData[1].outHash.toBuffer()])),
|
|
310
320
|
);
|
|
@@ -359,10 +369,11 @@ export async function buildHeaderAndBodyFromTxs(
|
|
|
359
369
|
const parityShaRoot = new MerkleTreeCalculator(parityHeight, Fr.ZERO.toBuffer(), hasher).computeTreeRoot(
|
|
360
370
|
l1ToL2Messages.map(msg => msg.toBuffer()),
|
|
361
371
|
);
|
|
372
|
+
const blobsHash = getBlobsHashFromBlobs(Blob.getBlobs(body.toBlobFields()));
|
|
362
373
|
|
|
363
374
|
const contentCommitment = new ContentCommitment(
|
|
364
375
|
new Fr(body.numberOfTxsIncludingPadded),
|
|
365
|
-
|
|
376
|
+
blobsHash,
|
|
366
377
|
parityShaRoot,
|
|
367
378
|
outHash,
|
|
368
379
|
);
|
|
@@ -375,6 +386,11 @@ export async function buildHeaderAndBodyFromTxs(
|
|
|
375
386
|
return { header, body };
|
|
376
387
|
}
|
|
377
388
|
|
|
389
|
+
export function getBlobsHashFromBlobs(inputs: Blob[]): Buffer {
|
|
390
|
+
const blobHashes = serializeToBuffer(inputs.map(b => b.getEthVersionedBlobHash()));
|
|
391
|
+
return sha256Trunc(serializeToBuffer(blobHashes));
|
|
392
|
+
}
|
|
393
|
+
|
|
378
394
|
// Validate that the roots of all local trees match the output of the root circuit simulation
|
|
379
395
|
export async function validateBlockRootOutput(
|
|
380
396
|
blockRootOutput: BlockRootOrBlockMergePublicInputs,
|
|
@@ -547,7 +563,7 @@ export async function getMembershipWitnessFor<N extends number>(
|
|
|
547
563
|
return makeEmptyMembershipWitness(height);
|
|
548
564
|
}
|
|
549
565
|
|
|
550
|
-
const index = await db.
|
|
566
|
+
const index = (await db.findLeafIndices(treeId, [value.toBuffer()]))[0];
|
|
551
567
|
if (index === undefined) {
|
|
552
568
|
throw new Error(`Leaf with value ${value} not found in tree ${MerkleTreeId[treeId]}`);
|
|
553
569
|
}
|
|
@@ -2,8 +2,6 @@ import { type L2Block, type MerkleTreeId } from '@aztec/circuit-types';
|
|
|
2
2
|
import {
|
|
3
3
|
type ARCHIVE_HEIGHT,
|
|
4
4
|
type AppendOnlyTreeSnapshot,
|
|
5
|
-
type BaseOrMergeRollupPublicInputs,
|
|
6
|
-
type BlockRootOrBlockMergePublicInputs,
|
|
7
5
|
type Fr,
|
|
8
6
|
type GlobalVariables,
|
|
9
7
|
type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
@@ -16,6 +14,8 @@ import {
|
|
|
16
14
|
type RootParityInput,
|
|
17
15
|
type VerificationKeyAsFields,
|
|
18
16
|
} from '@aztec/circuits.js';
|
|
17
|
+
import { SpongeBlob } from '@aztec/circuits.js/blobs';
|
|
18
|
+
import { type BaseOrMergeRollupPublicInputs, type BlockRootOrBlockMergePublicInputs } from '@aztec/circuits.js/rollup';
|
|
19
19
|
import { type Tuple } from '@aztec/foundation/serialize';
|
|
20
20
|
|
|
21
21
|
import { type EpochProvingState } from './epoch-proving-state.js';
|
|
@@ -44,12 +44,13 @@ export class BlockProvingState {
|
|
|
44
44
|
public blockRootRollupStarted: boolean = false;
|
|
45
45
|
public finalProof: Proof | undefined;
|
|
46
46
|
public block: L2Block | undefined;
|
|
47
|
+
public spongeBlobState: SpongeBlob | undefined;
|
|
48
|
+
public totalNumTxs: number;
|
|
47
49
|
private txs: TxProvingState[] = [];
|
|
48
50
|
public error: string | undefined;
|
|
49
51
|
|
|
50
52
|
constructor(
|
|
51
53
|
public readonly index: number,
|
|
52
|
-
public readonly totalNumTxs: number,
|
|
53
54
|
public readonly globalVariables: GlobalVariables,
|
|
54
55
|
public readonly newL1ToL2Messages: Tuple<Fr, typeof NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP>,
|
|
55
56
|
public readonly messageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
@@ -61,6 +62,7 @@ export class BlockProvingState {
|
|
|
61
62
|
private readonly parentEpoch: EpochProvingState,
|
|
62
63
|
) {
|
|
63
64
|
this.rootParityInputs = Array.from({ length: NUM_BASE_PARITY_PER_ROOT_PARITY }).map(_ => undefined);
|
|
65
|
+
this.totalNumTxs = 0;
|
|
64
66
|
}
|
|
65
67
|
|
|
66
68
|
public get blockNumber() {
|
|
@@ -98,8 +100,21 @@ export class BlockProvingState {
|
|
|
98
100
|
return [mergeLevel - 1n, thisIndex >> 1n, thisIndex & 1n];
|
|
99
101
|
}
|
|
100
102
|
|
|
103
|
+
public startNewBlock(numTxs: number, numBlobFields: number) {
|
|
104
|
+
if (this.spongeBlobState) {
|
|
105
|
+
throw new Error(`Block ${this.blockNumber} already initalised.`);
|
|
106
|
+
}
|
|
107
|
+
// Initialise the sponge which will eventually absorb all tx effects to be added to the blob.
|
|
108
|
+
// Like l1 to l2 messages, we need to know beforehand how many effects will be absorbed.
|
|
109
|
+
this.spongeBlobState = SpongeBlob.init(numBlobFields);
|
|
110
|
+
this.totalNumTxs = numTxs;
|
|
111
|
+
}
|
|
112
|
+
|
|
101
113
|
// Adds a transaction to the proving state, returns it's index
|
|
102
114
|
public addNewTx(tx: TxProvingState) {
|
|
115
|
+
if (!this.spongeBlobState) {
|
|
116
|
+
throw new Error(`Invalid block proving state, call startNewBlock before adding transactions.`);
|
|
117
|
+
}
|
|
103
118
|
this.txs.push(tx);
|
|
104
119
|
return this.txs.length - 1;
|
|
105
120
|
}
|
|
@@ -199,11 +214,6 @@ export class BlockProvingState {
|
|
|
199
214
|
return this.rootParityInputs.findIndex(p => !p) === -1;
|
|
200
215
|
}
|
|
201
216
|
|
|
202
|
-
// Returns true if we are still able to accept transactions, false otherwise
|
|
203
|
-
public isAcceptingTransactions() {
|
|
204
|
-
return this.totalNumTxs > this.txs.length;
|
|
205
|
-
}
|
|
206
|
-
|
|
207
217
|
// Returns whether the proving state is still valid
|
|
208
218
|
public verifyState() {
|
|
209
219
|
return this.parentEpoch.verifyState();
|
|
@@ -2,7 +2,6 @@ import { type MerkleTreeId } from '@aztec/circuit-types';
|
|
|
2
2
|
import {
|
|
3
3
|
type ARCHIVE_HEIGHT,
|
|
4
4
|
type AppendOnlyTreeSnapshot,
|
|
5
|
-
type BlockRootOrBlockMergePublicInputs,
|
|
6
5
|
Fr,
|
|
7
6
|
type GlobalVariables,
|
|
8
7
|
type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
@@ -10,9 +9,9 @@ import {
|
|
|
10
9
|
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
11
10
|
type Proof,
|
|
12
11
|
type RecursiveProof,
|
|
13
|
-
type RootRollupPublicInputs,
|
|
14
12
|
type VerificationKeyAsFields,
|
|
15
13
|
} from '@aztec/circuits.js';
|
|
14
|
+
import { type BlockRootOrBlockMergePublicInputs, type RootRollupPublicInputs } from '@aztec/circuits.js/rollup';
|
|
16
15
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
17
16
|
import { type Tuple } from '@aztec/foundation/serialize';
|
|
18
17
|
|
|
@@ -97,7 +96,6 @@ export class EpochProvingState {
|
|
|
97
96
|
// Adds a block to the proving state, returns its index
|
|
98
97
|
// Will update the proving life cycle if this is the last block
|
|
99
98
|
public startNewBlock(
|
|
100
|
-
numTxs: number,
|
|
101
99
|
globalVariables: GlobalVariables,
|
|
102
100
|
l1ToL2Messages: Fr[],
|
|
103
101
|
messageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
@@ -110,7 +108,6 @@ export class EpochProvingState {
|
|
|
110
108
|
const index = globalVariables.blockNumber.toNumber() - this.firstBlockNumber;
|
|
111
109
|
const block = new BlockProvingState(
|
|
112
110
|
index,
|
|
113
|
-
numTxs,
|
|
114
111
|
globalVariables,
|
|
115
112
|
padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP),
|
|
116
113
|
messageTreeSnapshot,
|
|
@@ -4,6 +4,7 @@ import {
|
|
|
4
4
|
type ProcessedTx,
|
|
5
5
|
type ServerCircuitProver,
|
|
6
6
|
makeEmptyProcessedTx,
|
|
7
|
+
toNumBlobFields,
|
|
7
8
|
} from '@aztec/circuit-types';
|
|
8
9
|
import {
|
|
9
10
|
type EpochProver,
|
|
@@ -16,13 +17,10 @@ import {
|
|
|
16
17
|
AVM_PROOF_LENGTH_IN_FIELDS,
|
|
17
18
|
AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS,
|
|
18
19
|
type AppendOnlyTreeSnapshot,
|
|
19
|
-
|
|
20
|
+
BLOBS_PER_BLOCK,
|
|
20
21
|
BaseParityInputs,
|
|
21
|
-
type BaseRollupHints,
|
|
22
22
|
type BlockHeader,
|
|
23
|
-
|
|
24
|
-
BlockRootRollupInputs,
|
|
25
|
-
EmptyBlockRootRollupInputs,
|
|
23
|
+
FIELDS_PER_BLOB,
|
|
26
24
|
Fr,
|
|
27
25
|
type GlobalVariables,
|
|
28
26
|
L1_TO_L2_MSG_SUBTREE_HEIGHT,
|
|
@@ -39,8 +37,18 @@ import {
|
|
|
39
37
|
VerificationKeyData,
|
|
40
38
|
makeEmptyRecursiveProof,
|
|
41
39
|
} from '@aztec/circuits.js';
|
|
40
|
+
import { BlobPublicInputs } from '@aztec/circuits.js/blobs';
|
|
41
|
+
import {
|
|
42
|
+
type BaseOrMergeRollupPublicInputs,
|
|
43
|
+
type BaseRollupHints,
|
|
44
|
+
type BlockRootOrBlockMergePublicInputs,
|
|
45
|
+
BlockRootRollupInputs,
|
|
46
|
+
EmptyBlockRootRollupInputs,
|
|
47
|
+
} from '@aztec/circuits.js/rollup';
|
|
42
48
|
import { makeTuple } from '@aztec/foundation/array';
|
|
49
|
+
import { Blob } from '@aztec/foundation/blob';
|
|
43
50
|
import { maxBy, padArrayEnd } from '@aztec/foundation/collection';
|
|
51
|
+
import { sha256ToField } from '@aztec/foundation/crypto';
|
|
44
52
|
import { AbortError } from '@aztec/foundation/error';
|
|
45
53
|
import { createLogger } from '@aztec/foundation/log';
|
|
46
54
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
@@ -139,17 +147,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
139
147
|
|
|
140
148
|
/**
|
|
141
149
|
* Starts off a new block
|
|
142
|
-
* @param numTxs - The total number of transactions in the block.
|
|
143
150
|
* @param globalVariables - The global variables for the block
|
|
144
151
|
* @param l1ToL2Messages - The l1 to l2 messages for the block
|
|
145
|
-
* @param verificationKeys - The private kernel verification keys
|
|
146
152
|
* @returns A proving ticket, containing a promise notifying of proving completion
|
|
147
153
|
*/
|
|
148
|
-
@trackSpan('ProvingOrchestrator.startNewBlock',
|
|
149
|
-
[Attributes.BLOCK_SIZE]: numTxs,
|
|
154
|
+
@trackSpan('ProvingOrchestrator.startNewBlock', globalVariables => ({
|
|
150
155
|
[Attributes.BLOCK_NUMBER]: globalVariables.blockNumber.toNumber(),
|
|
151
156
|
}))
|
|
152
|
-
public async startNewBlock(
|
|
157
|
+
public async startNewBlock(globalVariables: GlobalVariables, l1ToL2Messages: Fr[]) {
|
|
153
158
|
if (!this.provingState) {
|
|
154
159
|
throw new Error(`Invalid proving state, call startNewEpoch before starting a block`);
|
|
155
160
|
}
|
|
@@ -158,12 +163,8 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
158
163
|
throw new Error(`Epoch not accepting further blocks`);
|
|
159
164
|
}
|
|
160
165
|
|
|
161
|
-
if (!Number.isInteger(numTxs) || numTxs < 2) {
|
|
162
|
-
throw new Error(`Invalid number of txs for block (got ${numTxs})`);
|
|
163
|
-
}
|
|
164
|
-
|
|
165
166
|
logger.info(
|
|
166
|
-
`Starting block ${globalVariables.blockNumber.toNumber()} for slot ${globalVariables.slotNumber.toNumber()}
|
|
167
|
+
`Starting block ${globalVariables.blockNumber.toNumber()} for slot ${globalVariables.slotNumber.toNumber()}`,
|
|
167
168
|
);
|
|
168
169
|
|
|
169
170
|
// Fork world state at the end of the immediately previous block
|
|
@@ -210,7 +211,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
210
211
|
);
|
|
211
212
|
|
|
212
213
|
const blockProvingState = this.provingState!.startNewBlock(
|
|
213
|
-
numTxs,
|
|
214
214
|
globalVariables,
|
|
215
215
|
l1ToL2MessagesPadded,
|
|
216
216
|
messageTreeSnapshot,
|
|
@@ -228,47 +228,56 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
228
228
|
}
|
|
229
229
|
|
|
230
230
|
/**
|
|
231
|
-
* The interface to add
|
|
232
|
-
* @param
|
|
231
|
+
* The interface to add simulated transactions to the scheduler
|
|
232
|
+
* @param txs - The transactions to be proven
|
|
233
233
|
*/
|
|
234
|
-
@trackSpan('ProvingOrchestrator.
|
|
235
|
-
[Attributes.
|
|
234
|
+
@trackSpan('ProvingOrchestrator.addTxs', txs => ({
|
|
235
|
+
[Attributes.BLOCK_TXS_COUNT]: txs.length,
|
|
236
236
|
}))
|
|
237
|
-
public async
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
}
|
|
237
|
+
public async addTxs(txs: ProcessedTx[]): Promise<void> {
|
|
238
|
+
if (!txs.length) {
|
|
239
|
+
// To avoid an ugly throw below. If we require an empty block, we can just call setBlockCompleted
|
|
240
|
+
// on a block with no txs. We cannot do that here because we cannot find the blockNumber without any txs.
|
|
241
|
+
logger.warn(`Provided no txs to orchestrator addTxs.`);
|
|
242
|
+
return;
|
|
243
|
+
}
|
|
244
|
+
const blockNumber = txs[0].constants.globalVariables.blockNumber.toNumber();
|
|
245
|
+
const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber!);
|
|
246
|
+
if (!provingState) {
|
|
247
|
+
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
248
|
+
}
|
|
248
249
|
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
}
|
|
250
|
+
const numBlobFields = toNumBlobFields(txs);
|
|
251
|
+
provingState.startNewBlock(Math.max(2, txs.length), numBlobFields);
|
|
252
252
|
|
|
253
|
-
|
|
253
|
+
logger.info(
|
|
254
|
+
`Adding ${txs.length} transactions with ${numBlobFields} blob fields to block ${provingState?.blockNumber}`,
|
|
255
|
+
);
|
|
256
|
+
for (const tx of txs) {
|
|
257
|
+
try {
|
|
258
|
+
if (!provingState.verifyState()) {
|
|
259
|
+
throw new Error(`Invalid proving state when adding a tx`);
|
|
260
|
+
}
|
|
254
261
|
|
|
255
|
-
|
|
262
|
+
validateTx(tx);
|
|
256
263
|
|
|
257
|
-
|
|
258
|
-
logger.warn(`Ignoring empty transaction ${tx.hash} - it will not be added to this block`);
|
|
259
|
-
return;
|
|
260
|
-
}
|
|
264
|
+
logger.info(`Received transaction: ${tx.hash}`);
|
|
261
265
|
|
|
262
|
-
|
|
263
|
-
|
|
266
|
+
if (tx.isEmpty) {
|
|
267
|
+
logger.warn(`Ignoring empty transaction ${tx.hash} - it will not be added to this block`);
|
|
268
|
+
continue;
|
|
269
|
+
}
|
|
264
270
|
|
|
265
|
-
|
|
266
|
-
|
|
271
|
+
const [hints, treeSnapshots] = await this.prepareTransaction(tx, provingState);
|
|
272
|
+
this.enqueueFirstProofs(hints, treeSnapshots, tx, provingState);
|
|
273
|
+
} catch (err: any) {
|
|
274
|
+
throw new Error(`Error adding transaction ${tx.hash.toString()} to block ${blockNumber}: ${err.message}`, {
|
|
275
|
+
cause: err,
|
|
276
|
+
});
|
|
267
277
|
}
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
});
|
|
278
|
+
}
|
|
279
|
+
if (provingState.transactionsReceived === provingState.totalNumTxs) {
|
|
280
|
+
logger.verbose(`All transactions received for block ${provingState.globalVariables.blockNumber}.`);
|
|
272
281
|
}
|
|
273
282
|
}
|
|
274
283
|
|
|
@@ -285,6 +294,12 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
285
294
|
throw new Error(`Block proving state for ${blockNumber} not found`);
|
|
286
295
|
}
|
|
287
296
|
|
|
297
|
+
if (!provingState.spongeBlobState) {
|
|
298
|
+
// If we are completing an empty block, initialise the provingState.
|
|
299
|
+
// We will have 2 padding txs, and => no blob fields.
|
|
300
|
+
provingState.startNewBlock(2, 0);
|
|
301
|
+
}
|
|
302
|
+
|
|
288
303
|
if (!provingState.verifyState()) {
|
|
289
304
|
throw new Error(`Block proving failed: ${provingState.error}`);
|
|
290
305
|
}
|
|
@@ -326,7 +341,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
326
341
|
}
|
|
327
342
|
|
|
328
343
|
// And build the block header
|
|
329
|
-
logger.verbose(`Block ${
|
|
344
|
+
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
330
345
|
await this.buildBlock(provingState, expectedHeader);
|
|
331
346
|
|
|
332
347
|
// If the proofs were faster than the block building, then we need to try the block root rollup again here
|
|
@@ -435,14 +450,6 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
435
450
|
const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
436
451
|
const l2Block = new L2Block(newArchive, header, body);
|
|
437
452
|
|
|
438
|
-
if (!l2Block.body.getTxsEffectsHash().equals(header.contentCommitment.txsEffectsHash)) {
|
|
439
|
-
throw new Error(
|
|
440
|
-
`Txs effects hash mismatch, ${l2Block.body
|
|
441
|
-
.getTxsEffectsHash()
|
|
442
|
-
.toString('hex')} == ${header.contentCommitment.txsEffectsHash.toString('hex')} `,
|
|
443
|
-
);
|
|
444
|
-
}
|
|
445
|
-
|
|
446
453
|
await this.verifyBuiltBlockAgainstSyncedState(l2Block, newArchive);
|
|
447
454
|
|
|
448
455
|
logger.verbose(`Orchestrator finalised block ${l2Block.number}`);
|
|
@@ -566,6 +573,22 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
566
573
|
);
|
|
567
574
|
}
|
|
568
575
|
|
|
576
|
+
/**
|
|
577
|
+
* Collect all new nullifiers, commitments, and contracts from all txs in a block
|
|
578
|
+
* @returns The array of non empty tx effects.
|
|
579
|
+
*/
|
|
580
|
+
private extractTxEffects(provingState: BlockProvingState) {
|
|
581
|
+
// Note: this check should ensure that we have all txs and their effects ready.
|
|
582
|
+
if (!provingState.finalRootParityInput?.publicInputs.shaRoot) {
|
|
583
|
+
throw new Error(`Invalid proving state, a block must be ready to be proven before its effects can be extracted.`);
|
|
584
|
+
}
|
|
585
|
+
const nonEmptyTxEffects = provingState.allTxs
|
|
586
|
+
.map(txProvingState => txProvingState.processedTx.txEffect)
|
|
587
|
+
.filter(txEffect => !txEffect.isEmpty());
|
|
588
|
+
|
|
589
|
+
return nonEmptyTxEffects;
|
|
590
|
+
}
|
|
591
|
+
|
|
569
592
|
/**
|
|
570
593
|
* Returns the proof for the current epoch.
|
|
571
594
|
*/
|
|
@@ -692,7 +715,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
692
715
|
provingState: BlockProvingState | undefined,
|
|
693
716
|
tx: ProcessedTx,
|
|
694
717
|
): Promise<[BaseRollupHints, TreeSnapshots] | undefined> {
|
|
695
|
-
if (!provingState?.verifyState()) {
|
|
718
|
+
if (!provingState?.verifyState() || !provingState.spongeBlobState) {
|
|
696
719
|
logger.debug('Not preparing base rollup inputs, state invalid');
|
|
697
720
|
return;
|
|
698
721
|
}
|
|
@@ -701,7 +724,9 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
701
724
|
|
|
702
725
|
// We build the base rollup inputs using a mock proof and verification key.
|
|
703
726
|
// These will be overwritten later once we have proven the tube circuit and any public kernels
|
|
704
|
-
const [ms, hints] = await elapsed(
|
|
727
|
+
const [ms, hints] = await elapsed(
|
|
728
|
+
buildBaseRollupHints(tx, provingState.globalVariables, db, provingState.spongeBlobState),
|
|
729
|
+
);
|
|
705
730
|
|
|
706
731
|
if (!tx.isEmpty) {
|
|
707
732
|
this.metrics.recordBaseRollupInputs(ms);
|
|
@@ -774,7 +799,7 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
774
799
|
);
|
|
775
800
|
}
|
|
776
801
|
|
|
777
|
-
// Enqueues the
|
|
802
|
+
// Enqueues the tube circuit for a given transaction index
|
|
778
803
|
// Once completed, will enqueue the next circuit, either a public kernel or the base rollup
|
|
779
804
|
private enqueueTube(provingState: BlockProvingState, txIndex: number) {
|
|
780
805
|
if (!provingState?.verifyState()) {
|
|
@@ -856,9 +881,14 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
856
881
|
provingState.blockRootRollupStarted = true;
|
|
857
882
|
const mergeInputData = provingState.getMergeInputs(0);
|
|
858
883
|
const rootParityInput = provingState.finalRootParityInput!;
|
|
884
|
+
const blobFields = this.extractTxEffects(provingState)
|
|
885
|
+
.map(tx => tx.toBlobFields())
|
|
886
|
+
.flat();
|
|
887
|
+
const blobs = Blob.getBlobs(blobFields);
|
|
888
|
+
const blobsHash = sha256ToField(blobs.map(b => b.getEthVersionedBlobHash()));
|
|
859
889
|
|
|
860
890
|
logger.debug(
|
|
861
|
-
`Enqueuing block root rollup for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs
|
|
891
|
+
`Enqueuing block root rollup for block ${provingState.blockNumber} with ${provingState.newL1ToL2Messages.length} l1 to l2 msgs and ${blobs.length} blobs.`,
|
|
862
892
|
);
|
|
863
893
|
|
|
864
894
|
const previousRollupData: BlockRootRollupInputs['previousRollupData'] = makeTuple(2, i =>
|
|
@@ -879,6 +909,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
879
909
|
newArchiveSiblingPath: provingState.archiveTreeRootSiblingPath,
|
|
880
910
|
previousBlockHash: provingState.previousBlockHash,
|
|
881
911
|
proverId: this.proverId,
|
|
912
|
+
blobFields: padArrayEnd(blobFields, Fr.ZERO, FIELDS_PER_BLOB * BLOBS_PER_BLOCK),
|
|
913
|
+
blobCommitments: padArrayEnd(
|
|
914
|
+
blobs.map(b => b.commitmentToFields()),
|
|
915
|
+
[Fr.ZERO, Fr.ZERO],
|
|
916
|
+
BLOBS_PER_BLOCK,
|
|
917
|
+
),
|
|
918
|
+
blobsHash: blobsHash,
|
|
882
919
|
});
|
|
883
920
|
|
|
884
921
|
this.deferredProving(
|
|
@@ -903,6 +940,17 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
903
940
|
|
|
904
941
|
provingState.blockRootRollupPublicInputs = result.inputs;
|
|
905
942
|
provingState.finalProof = result.proof.binaryProof;
|
|
943
|
+
const blobOutputs = result.inputs.blobPublicInputs[0];
|
|
944
|
+
blobOutputs.inner.forEach((blobOutput, i) => {
|
|
945
|
+
if (!blobOutput.isEmpty() && !blobOutput.equals(BlobPublicInputs.fromBlob(blobs[i]))) {
|
|
946
|
+
throw new Error(
|
|
947
|
+
`Rollup circuits produced mismatched blob evaluation:
|
|
948
|
+
z: ${blobOutput.z} == ${blobs[i].challengeZ},
|
|
949
|
+
y: ${blobOutput.y.toString(16)} == ${blobs[i].evaluationY.toString('hex')},
|
|
950
|
+
C: ${blobOutput.kzgCommitment} == ${blobs[i].commitmentToFields()}`,
|
|
951
|
+
);
|
|
952
|
+
}
|
|
953
|
+
});
|
|
906
954
|
|
|
907
955
|
logger.debug(`Completed proof for block root rollup for ${provingState.block?.number}`);
|
|
908
956
|
// validatePartialState(result.inputs.end, tx.treeSnapshots); // TODO(palla/prover)
|
|
@@ -1196,10 +1244,13 @@ export class ProvingOrchestrator implements EpochProver {
|
|
|
1196
1244
|
return await this.prover.getAvmProof(inputs, signal, provingState.epochNumber);
|
|
1197
1245
|
} catch (err) {
|
|
1198
1246
|
if (process.env.AVM_PROVING_STRICT) {
|
|
1247
|
+
logger.error(`Error thrown when proving AVM circuit with AVM_PROVING_STRICT on`, err);
|
|
1199
1248
|
throw err;
|
|
1200
1249
|
} else {
|
|
1201
1250
|
logger.warn(
|
|
1202
|
-
`Error thrown when proving AVM circuit
|
|
1251
|
+
`Error thrown when proving AVM circuit but AVM_PROVING_STRICT is off. Faking AVM proof and carrying on. ${inspect(
|
|
1252
|
+
err,
|
|
1253
|
+
)}.`,
|
|
1203
1254
|
);
|
|
1204
1255
|
return {
|
|
1205
1256
|
proof: makeEmptyRecursiveProof(AVM_PROOF_LENGTH_IN_FIELDS),
|
|
@@ -3,6 +3,11 @@ import {
|
|
|
3
3
|
type AVM_PROOF_LENGTH_IN_FIELDS,
|
|
4
4
|
AVM_VK_INDEX,
|
|
5
5
|
type AppendOnlyTreeSnapshot,
|
|
6
|
+
type TUBE_PROOF_LENGTH,
|
|
7
|
+
TUBE_VK_INDEX,
|
|
8
|
+
VkWitnessData,
|
|
9
|
+
} from '@aztec/circuits.js';
|
|
10
|
+
import {
|
|
6
11
|
AvmProofData,
|
|
7
12
|
type BaseRollupHints,
|
|
8
13
|
PrivateBaseRollupHints,
|
|
@@ -11,11 +16,8 @@ import {
|
|
|
11
16
|
PublicBaseRollupHints,
|
|
12
17
|
PublicBaseRollupInputs,
|
|
13
18
|
PublicTubeData,
|
|
14
|
-
type TUBE_PROOF_LENGTH,
|
|
15
|
-
TUBE_VK_INDEX,
|
|
16
19
|
TubeInputs,
|
|
17
|
-
|
|
18
|
-
} from '@aztec/circuits.js';
|
|
20
|
+
} from '@aztec/circuits.js/rollup';
|
|
19
21
|
import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types';
|
|
20
22
|
|
|
21
23
|
/**
|
|
@@ -11,31 +11,33 @@ import {
|
|
|
11
11
|
import type {
|
|
12
12
|
AVM_PROOF_LENGTH_IN_FIELDS,
|
|
13
13
|
AvmCircuitInputs,
|
|
14
|
-
BaseOrMergeRollupPublicInputs,
|
|
15
14
|
BaseParityInputs,
|
|
16
|
-
BlockMergeRollupInputs,
|
|
17
|
-
BlockRootOrBlockMergePublicInputs,
|
|
18
|
-
BlockRootRollupInputs,
|
|
19
|
-
EmptyBlockRootRollupInputs,
|
|
20
15
|
KernelCircuitPublicInputs,
|
|
21
|
-
MergeRollupInputs,
|
|
22
16
|
NESTED_RECURSIVE_PROOF_LENGTH,
|
|
23
17
|
ParityPublicInputs,
|
|
24
|
-
PrivateBaseRollupInputs,
|
|
25
18
|
PrivateKernelEmptyInputData,
|
|
26
|
-
PublicBaseRollupInputs,
|
|
27
19
|
RECURSIVE_PROOF_LENGTH,
|
|
28
20
|
RootParityInputs,
|
|
29
|
-
RootRollupInputs,
|
|
30
|
-
RootRollupPublicInputs,
|
|
31
|
-
TubeInputs,
|
|
32
21
|
} from '@aztec/circuits.js';
|
|
22
|
+
import {
|
|
23
|
+
type BaseOrMergeRollupPublicInputs,
|
|
24
|
+
type BlockMergeRollupInputs,
|
|
25
|
+
type BlockRootOrBlockMergePublicInputs,
|
|
26
|
+
type BlockRootRollupInputs,
|
|
27
|
+
type EmptyBlockRootRollupInputs,
|
|
28
|
+
type MergeRollupInputs,
|
|
29
|
+
type PrivateBaseRollupInputs,
|
|
30
|
+
type PublicBaseRollupInputs,
|
|
31
|
+
type RootRollupInputs,
|
|
32
|
+
type RootRollupPublicInputs,
|
|
33
|
+
type TubeInputs,
|
|
34
|
+
} from '@aztec/circuits.js/rollup';
|
|
33
35
|
import { randomBytes } from '@aztec/foundation/crypto';
|
|
34
36
|
import { AbortError, TimeoutError } from '@aztec/foundation/error';
|
|
35
37
|
import { createLogger } from '@aztec/foundation/log';
|
|
36
38
|
import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
37
39
|
import { PriorityMemoryQueue } from '@aztec/foundation/queue';
|
|
38
|
-
import { type TelemetryClient } from '@aztec/telemetry-client';
|
|
40
|
+
import { type TelemetryClient, type Tracer, trackSpan } from '@aztec/telemetry-client';
|
|
39
41
|
|
|
40
42
|
import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js';
|
|
41
43
|
import { ProvingQueueMetrics } from './queue_metrics.js';
|
|
@@ -65,6 +67,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
65
67
|
private runningPromise: RunningPromise;
|
|
66
68
|
private metrics: ProvingQueueMetrics;
|
|
67
69
|
|
|
70
|
+
public readonly tracer: Tracer;
|
|
71
|
+
|
|
68
72
|
constructor(
|
|
69
73
|
client: TelemetryClient,
|
|
70
74
|
/** Timeout the job if an agent doesn't report back in this time */
|
|
@@ -75,8 +79,9 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
75
79
|
private timeSource = defaultTimeSource,
|
|
76
80
|
private proofStore: ProofStore = new InlineProofStore(),
|
|
77
81
|
) {
|
|
82
|
+
this.tracer = client.getTracer('MemoryProvingQueue');
|
|
78
83
|
this.metrics = new ProvingQueueMetrics(client, 'MemoryProvingQueue');
|
|
79
|
-
this.runningPromise = new RunningPromise(this.poll, pollingIntervalMs);
|
|
84
|
+
this.runningPromise = new RunningPromise(this.poll.bind(this), this.log, pollingIntervalMs);
|
|
80
85
|
}
|
|
81
86
|
|
|
82
87
|
public start() {
|
|
@@ -202,7 +207,8 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
202
207
|
return this.jobsInProgress.has(jobId);
|
|
203
208
|
}
|
|
204
209
|
|
|
205
|
-
|
|
210
|
+
@trackSpan('MemoryProvingQueue.poll')
|
|
211
|
+
private poll() {
|
|
206
212
|
const now = this.timeSource();
|
|
207
213
|
this.metrics.recordQueueSize(this.queue.length());
|
|
208
214
|
|
|
@@ -220,7 +226,7 @@ export class MemoryProvingQueue implements ServerCircuitProver, ProvingJobSource
|
|
|
220
226
|
this.queue.put(job);
|
|
221
227
|
}
|
|
222
228
|
}
|
|
223
|
-
}
|
|
229
|
+
}
|
|
224
230
|
|
|
225
231
|
private async enqueue<T extends ProvingRequestType>(
|
|
226
232
|
type: T,
|