@aztec/prover-client 3.0.0-nightly.20251113 → 3.0.0-nightly.20251115
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/light.d.ts +6 -6
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +35 -22
- package/dest/light/lightweight_checkpoint_builder.d.ts +29 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +107 -0
- package/dest/mocks/fixtures.d.ts +0 -3
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +1 -12
- package/dest/mocks/test_context.d.ts +26 -44
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +101 -112
- package/dest/orchestrator/block-building-helpers.d.ts +12 -14
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +81 -105
- package/dest/orchestrator/block-proving-state.d.ts +9 -4
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +80 -19
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +6 -4
- package/dest/orchestrator/orchestrator.d.ts +0 -1
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +15 -23
- package/package.json +15 -15
- package/src/block-factory/light.ts +43 -42
- package/src/light/lightweight_checkpoint_builder.ts +141 -0
- package/src/mocks/fixtures.ts +1 -25
- package/src/mocks/test_context.ts +141 -174
- package/src/orchestrator/block-building-helpers.ts +120 -198
- package/src/orchestrator/block-proving-state.ts +100 -22
- package/src/orchestrator/checkpoint-proving-state.ts +12 -5
- package/src/orchestrator/orchestrator.ts +18 -25
|
@@ -3,6 +3,7 @@ import {
|
|
|
3
3
|
BatchedBlobAccumulator,
|
|
4
4
|
SpongeBlob,
|
|
5
5
|
computeBlobsHashFromBlobs,
|
|
6
|
+
encodeBlockBlobData,
|
|
6
7
|
getBlobCommitmentsFromBlobs,
|
|
7
8
|
getBlobsPerL1Block,
|
|
8
9
|
} from '@aztec/blob-lib';
|
|
@@ -17,24 +18,17 @@ import {
|
|
|
17
18
|
NULLIFIER_SUBTREE_HEIGHT,
|
|
18
19
|
NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
19
20
|
NULLIFIER_TREE_HEIGHT,
|
|
20
|
-
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
21
21
|
PUBLIC_DATA_TREE_HEIGHT,
|
|
22
22
|
} from '@aztec/constants';
|
|
23
23
|
import { makeTuple } from '@aztec/foundation/array';
|
|
24
24
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
25
|
-
import { sha256Trunc } from '@aztec/foundation/crypto';
|
|
26
25
|
import { Fr } from '@aztec/foundation/fields';
|
|
27
|
-
import { type Bufferable,
|
|
28
|
-
import {
|
|
29
|
-
MembershipWitness,
|
|
30
|
-
MerkleTreeCalculator,
|
|
31
|
-
computeCompressedUnbalancedMerkleTreeRoot,
|
|
32
|
-
} from '@aztec/foundation/trees';
|
|
26
|
+
import { type Bufferable, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
27
|
+
import { MembershipWitness } from '@aztec/foundation/trees';
|
|
33
28
|
import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
|
|
34
29
|
import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
35
30
|
import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
|
|
36
|
-
import { Body
|
|
37
|
-
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
31
|
+
import { Body } from '@aztec/stdlib/block';
|
|
38
32
|
import type { MerkleTreeWriteOperations, PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
39
33
|
import { ContractClassLogFields } from '@aztec/stdlib/logs';
|
|
40
34
|
import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
|
|
@@ -50,13 +44,11 @@ import {
|
|
|
50
44
|
AppendOnlyTreeSnapshot,
|
|
51
45
|
MerkleTreeId,
|
|
52
46
|
NullifierLeafPreimage,
|
|
53
|
-
PublicDataTreeLeaf,
|
|
54
47
|
PublicDataTreeLeafPreimage,
|
|
55
48
|
getTreeHeight,
|
|
56
49
|
} from '@aztec/stdlib/trees';
|
|
57
50
|
import {
|
|
58
51
|
BlockHeader,
|
|
59
|
-
ContentCommitment,
|
|
60
52
|
GlobalVariables,
|
|
61
53
|
PartialStateReference,
|
|
62
54
|
type ProcessedTx,
|
|
@@ -103,30 +95,7 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
|
|
|
103
95
|
NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
104
96
|
);
|
|
105
97
|
|
|
106
|
-
|
|
107
|
-
// that will be used by the next iteration of the base rollup circuit, skipping the empty ones
|
|
108
|
-
const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
|
|
109
|
-
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
|
|
110
|
-
|
|
111
|
-
// The read witnesses for a given TX should be generated before the writes of the same TX are applied.
|
|
112
|
-
// All reads that refer to writes in the same tx are transient and can be simplified out.
|
|
113
|
-
const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
|
|
114
|
-
|
|
115
|
-
// Update the nullifier tree, capturing the low nullifier info for each individual operation
|
|
116
|
-
const {
|
|
117
|
-
lowLeavesWitnessData: nullifierWitnessLeaves,
|
|
118
|
-
newSubtreeSiblingPath: nullifiersSubtreeRootSiblingPath,
|
|
119
|
-
sortedNewLeaves: sortedNullifiers,
|
|
120
|
-
sortedNewLeavesIndexes,
|
|
121
|
-
} = await db.batchInsert(
|
|
122
|
-
MerkleTreeId.NULLIFIER_TREE,
|
|
123
|
-
padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
|
|
124
|
-
NULLIFIER_SUBTREE_HEIGHT,
|
|
125
|
-
);
|
|
126
|
-
|
|
127
|
-
if (nullifierWitnessLeaves === undefined) {
|
|
128
|
-
throw new Error(`Could not craft nullifier batch insertion proofs`);
|
|
129
|
-
}
|
|
98
|
+
const { nullifierInsertionResult, publicDataInsertionResult } = await insertSideEffects(tx, db);
|
|
130
99
|
|
|
131
100
|
const blockHash = await tx.data.constants.anchorBlockHeader.hash();
|
|
132
101
|
const anchorBlockArchiveSiblingPath = (
|
|
@@ -146,52 +115,59 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
|
|
|
146
115
|
contractClassLogsFields,
|
|
147
116
|
});
|
|
148
117
|
} else {
|
|
149
|
-
if (
|
|
150
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses.length > 1 ||
|
|
151
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages.length > 1 ||
|
|
152
|
-
txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths.length > 1
|
|
153
|
-
) {
|
|
118
|
+
if (tx.txEffect.publicDataWrites.length > 1) {
|
|
154
119
|
throw new Error(`More than one public data write in a private only tx`);
|
|
155
120
|
}
|
|
156
121
|
|
|
157
122
|
// Get hints for reading fee payer's balance in the public data tree.
|
|
158
|
-
const
|
|
159
|
-
const
|
|
123
|
+
const feePayerBalanceLeafWitnessData = publicDataInsertionResult.lowLeavesWitnessData[0];
|
|
124
|
+
const feePayerBalanceMembershipWitness = MembershipWitness.fromBufferArray<typeof PUBLIC_DATA_TREE_HEIGHT>(
|
|
125
|
+
feePayerBalanceLeafWitnessData.index,
|
|
126
|
+
assertLength(feePayerBalanceLeafWitnessData.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
|
|
127
|
+
);
|
|
128
|
+
const feePayerBalanceLeafPreimage = feePayerBalanceLeafWitnessData.leafPreimage as PublicDataTreeLeafPreimage;
|
|
160
129
|
const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
|
|
161
|
-
if (!
|
|
130
|
+
if (!leafSlot.equals(feePayerBalanceLeafPreimage.leaf.slot)) {
|
|
162
131
|
throw new Error(`Cannot find the public data tree leaf for the fee payer's balance`);
|
|
163
132
|
}
|
|
164
133
|
|
|
165
|
-
//
|
|
166
|
-
const
|
|
167
|
-
|
|
168
|
-
|
|
134
|
+
// Get hints for inserting the nullifiers.
|
|
135
|
+
const nullifierLowLeavesWitnessData = nullifierInsertionResult.lowLeavesWitnessData!;
|
|
136
|
+
const nullifierPredecessorPreimages = padArrayEnd(
|
|
137
|
+
nullifierLowLeavesWitnessData.map(l => l.leafPreimage as NullifierLeafPreimage),
|
|
138
|
+
NullifierLeafPreimage.empty(),
|
|
139
|
+
MAX_NULLIFIERS_PER_TX,
|
|
140
|
+
);
|
|
141
|
+
const nullifierPredecessorMembershipWitnesses = padArrayEnd(
|
|
142
|
+
nullifierLowLeavesWitnessData.map(l =>
|
|
143
|
+
MembershipWitness.fromBufferArray<typeof NULLIFIER_TREE_HEIGHT>(
|
|
169
144
|
l.index,
|
|
170
145
|
assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT),
|
|
171
146
|
),
|
|
172
|
-
)
|
|
147
|
+
),
|
|
148
|
+
makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
|
|
149
|
+
MAX_NULLIFIERS_PER_TX,
|
|
150
|
+
);
|
|
151
|
+
const sortedNullifiers = assertLength(
|
|
152
|
+
nullifierInsertionResult.sortedNewLeaves.map(n => Fr.fromBuffer(n)),
|
|
153
|
+
MAX_NULLIFIERS_PER_TX,
|
|
154
|
+
);
|
|
155
|
+
const sortedNullifierIndexes = assertLength(
|
|
156
|
+
nullifierInsertionResult.sortedNewLeavesIndexes,
|
|
157
|
+
MAX_NULLIFIERS_PER_TX,
|
|
158
|
+
);
|
|
159
|
+
const nullifierSubtreeRootSiblingPath = assertLength(
|
|
160
|
+
nullifierInsertionResult.newSubtreeSiblingPath.toFields(),
|
|
161
|
+
NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
162
|
+
);
|
|
173
163
|
|
|
174
164
|
const treeSnapshotDiffHints = TreeSnapshotDiffHints.from({
|
|
175
165
|
noteHashSubtreeRootSiblingPath,
|
|
176
|
-
nullifierPredecessorPreimages
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
182
|
-
i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
|
|
183
|
-
? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
|
|
184
|
-
: makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
|
|
185
|
-
),
|
|
186
|
-
sortedNullifiers: assertLength(
|
|
187
|
-
sortedNullifiers.map(n => Fr.fromBuffer(n)),
|
|
188
|
-
MAX_NULLIFIERS_PER_TX,
|
|
189
|
-
),
|
|
190
|
-
sortedNullifierIndexes: assertLength(sortedNewLeavesIndexes, MAX_NULLIFIERS_PER_TX),
|
|
191
|
-
nullifierSubtreeRootSiblingPath: assertLength(
|
|
192
|
-
nullifiersSubtreeRootSiblingPath.toFields(),
|
|
193
|
-
NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
194
|
-
),
|
|
166
|
+
nullifierPredecessorPreimages,
|
|
167
|
+
nullifierPredecessorMembershipWitnesses,
|
|
168
|
+
sortedNullifiers,
|
|
169
|
+
sortedNullifierIndexes,
|
|
170
|
+
nullifierSubtreeRootSiblingPath,
|
|
195
171
|
feePayerBalanceMembershipWitness,
|
|
196
172
|
});
|
|
197
173
|
|
|
@@ -217,6 +193,44 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan(
|
|
|
217
193
|
},
|
|
218
194
|
);
|
|
219
195
|
|
|
196
|
+
export const insertSideEffects = runInSpan(
|
|
197
|
+
'BlockBuilderHelpers',
|
|
198
|
+
'buildBaseRollupHints',
|
|
199
|
+
async (span: Span, tx: ProcessedTx, db: MerkleTreeWriteOperations) => {
|
|
200
|
+
span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
|
|
201
|
+
|
|
202
|
+
// Insert the note hashes. Padded with zeros to the max number of note hashes per tx.
|
|
203
|
+
const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
|
|
204
|
+
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
|
|
205
|
+
|
|
206
|
+
// Insert the nullifiers. Padded with zeros to the max number of nullifiers per tx.
|
|
207
|
+
// Capturing the low nullifier info for each individual operation.
|
|
208
|
+
const nullifierInsertionResult = await db.batchInsert(
|
|
209
|
+
MerkleTreeId.NULLIFIER_TREE,
|
|
210
|
+
padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
|
|
211
|
+
NULLIFIER_SUBTREE_HEIGHT,
|
|
212
|
+
);
|
|
213
|
+
if (nullifierInsertionResult.lowLeavesWitnessData === undefined) {
|
|
214
|
+
throw new Error(`Failed to batch insert nullifiers.`);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
if (tx.txEffect.publicDataWrites.some(write => write.isEmpty())) {
|
|
218
|
+
throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}.`);
|
|
219
|
+
}
|
|
220
|
+
// Insert the public data writes sequentially. No need to pad them to the max array size.
|
|
221
|
+
// Capturing the low leaf info for each individual operation.
|
|
222
|
+
const publicDataInsertionResult = await db.sequentialInsert(
|
|
223
|
+
MerkleTreeId.PUBLIC_DATA_TREE,
|
|
224
|
+
tx.txEffect.publicDataWrites.map(write => write.toBuffer()),
|
|
225
|
+
);
|
|
226
|
+
|
|
227
|
+
return {
|
|
228
|
+
nullifierInsertionResult,
|
|
229
|
+
publicDataInsertionResult,
|
|
230
|
+
};
|
|
231
|
+
},
|
|
232
|
+
);
|
|
233
|
+
|
|
220
234
|
export function getChonkProofFromTx(tx: Tx | ProcessedTx) {
|
|
221
235
|
const publicInputs = tx.data.publicInputs().toFields();
|
|
222
236
|
|
|
@@ -246,13 +260,6 @@ export const buildBlobHints = (blobFields: Fr[]) => {
|
|
|
246
260
|
return { blobCommitments, blobs, blobsHash };
|
|
247
261
|
};
|
|
248
262
|
|
|
249
|
-
// Build the data required to prove the txs in an epoch. Currently only used in tests. It assumes 1 block per checkpoint.
|
|
250
|
-
export const buildBlobDataFromTxs = async (txsPerCheckpoint: ProcessedTx[][]) => {
|
|
251
|
-
const blobFields = txsPerCheckpoint.map(txs => getCheckpointBlobFields([txs.map(tx => tx.txEffect)]));
|
|
252
|
-
const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
|
|
253
|
-
return { blobFieldsLengths: blobFields.map(fields => fields.length), finalBlobChallenges };
|
|
254
|
-
};
|
|
255
|
-
|
|
256
263
|
export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint: Fr[][]) => {
|
|
257
264
|
const blobs = blobFieldsPerCheckpoint.map(blobFields => getBlobsPerL1Block(blobFields));
|
|
258
265
|
return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
|
|
@@ -303,103 +310,60 @@ export const buildHeaderAndBodyFromTxs = runInSpan(
|
|
|
303
310
|
async (
|
|
304
311
|
span,
|
|
305
312
|
txs: ProcessedTx[],
|
|
313
|
+
lastArchive: AppendOnlyTreeSnapshot,
|
|
314
|
+
endState: StateReference,
|
|
306
315
|
globalVariables: GlobalVariables,
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
startSpongeBlob?: SpongeBlob,
|
|
316
|
+
startSpongeBlob: SpongeBlob,
|
|
317
|
+
isFirstBlock: boolean,
|
|
310
318
|
) => {
|
|
311
319
|
span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
|
|
312
|
-
const stateReference = new StateReference(
|
|
313
|
-
await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
|
|
314
|
-
new PartialStateReference(
|
|
315
|
-
await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
|
|
316
|
-
await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
|
|
317
|
-
await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
|
|
318
|
-
),
|
|
319
|
-
);
|
|
320
|
-
|
|
321
|
-
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
322
320
|
|
|
323
321
|
const txEffects = txs.map(tx => tx.txEffect);
|
|
324
322
|
const body = new Body(txEffects);
|
|
325
323
|
|
|
326
|
-
const
|
|
327
|
-
const
|
|
328
|
-
|
|
329
|
-
const
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
324
|
+
const totalFees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
|
|
325
|
+
const totalManaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
|
|
326
|
+
|
|
327
|
+
const { l1ToL2MessageTree, partial } = endState;
|
|
328
|
+
|
|
329
|
+
const blockBlobFields = encodeBlockBlobData({
|
|
330
|
+
blockEndMarker: {
|
|
331
|
+
timestamp: globalVariables.timestamp,
|
|
332
|
+
blockNumber: globalVariables.blockNumber,
|
|
333
|
+
numTxs: txs.length,
|
|
334
|
+
},
|
|
335
|
+
blockEndStateField: {
|
|
336
|
+
l1ToL2MessageNextAvailableLeafIndex: l1ToL2MessageTree.nextAvailableLeafIndex,
|
|
337
|
+
noteHashNextAvailableLeafIndex: partial.noteHashTree.nextAvailableLeafIndex,
|
|
338
|
+
nullifierNextAvailableLeafIndex: partial.nullifierTree.nextAvailableLeafIndex,
|
|
339
|
+
publicDataNextAvailableLeafIndex: partial.publicDataTree.nextAvailableLeafIndex,
|
|
340
|
+
totalManaUsed: totalManaUsed.toBigInt(),
|
|
341
|
+
},
|
|
342
|
+
lastArchiveRoot: lastArchive.root,
|
|
343
|
+
noteHashRoot: partial.noteHashTree.root,
|
|
344
|
+
nullifierRoot: partial.nullifierTree.root,
|
|
345
|
+
publicDataRoot: partial.publicDataTree.root,
|
|
346
|
+
l1ToL2MessageRoot: isFirstBlock ? l1ToL2MessageTree.root : undefined,
|
|
347
|
+
txs: body.toTxBlobData(),
|
|
348
|
+
});
|
|
339
349
|
|
|
340
|
-
const endSpongeBlob = startSpongeBlob
|
|
350
|
+
const endSpongeBlob = startSpongeBlob.clone();
|
|
341
351
|
await endSpongeBlob.absorb(blockBlobFields);
|
|
342
352
|
const spongeBlobHash = await endSpongeBlob.squeeze();
|
|
343
353
|
|
|
344
|
-
const header =
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
stateReference,
|
|
348
|
-
globalVariables,
|
|
349
|
-
fees,
|
|
350
|
-
manaUsed,
|
|
354
|
+
const header = BlockHeader.from({
|
|
355
|
+
lastArchive,
|
|
356
|
+
state: endState,
|
|
351
357
|
spongeBlobHash,
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
);
|
|
357
|
-
|
|
358
|
-
export const buildBlockHeaderFromTxs = runInSpan(
|
|
359
|
-
'BlockBuilderHelpers',
|
|
360
|
-
'buildBlockHeaderFromTxs',
|
|
361
|
-
async (
|
|
362
|
-
span,
|
|
363
|
-
txs: ProcessedTx[],
|
|
364
|
-
globalVariables: GlobalVariables,
|
|
365
|
-
startSpongeBlob: SpongeBlob,
|
|
366
|
-
db: MerkleTreeReadOperations,
|
|
367
|
-
) => {
|
|
368
|
-
span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
|
|
369
|
-
const stateReference = new StateReference(
|
|
370
|
-
await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
|
|
371
|
-
new PartialStateReference(
|
|
372
|
-
await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
|
|
373
|
-
await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
|
|
374
|
-
await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
|
|
375
|
-
),
|
|
376
|
-
);
|
|
377
|
-
|
|
378
|
-
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
379
|
-
|
|
380
|
-
const blobFields = getBlockBlobFields(txs.map(tx => tx.txEffect));
|
|
381
|
-
const endSpongeBlob = startSpongeBlob.clone();
|
|
382
|
-
await endSpongeBlob.absorb(blobFields);
|
|
383
|
-
const spongeBlobHash = await endSpongeBlob.squeeze();
|
|
384
|
-
|
|
385
|
-
const txEffects = txs.map(tx => tx.txEffect);
|
|
386
|
-
const fees = txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
|
|
387
|
-
const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
|
|
358
|
+
globalVariables,
|
|
359
|
+
totalFees,
|
|
360
|
+
totalManaUsed,
|
|
361
|
+
});
|
|
388
362
|
|
|
389
|
-
return
|
|
363
|
+
return { header, body, blockBlobFields };
|
|
390
364
|
},
|
|
391
365
|
);
|
|
392
366
|
|
|
393
|
-
/** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */
|
|
394
|
-
export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages: Fr[]): Promise<Fr> {
|
|
395
|
-
const l1ToL2Messages = padArrayEnd<Fr, number>(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
396
|
-
const hasher = (left: Buffer, right: Buffer) =>
|
|
397
|
-
Promise.resolve(sha256Trunc(Buffer.concat([left, right])) as Buffer<ArrayBuffer>);
|
|
398
|
-
const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
|
|
399
|
-
const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
|
|
400
|
-
return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map(msg => msg.toBuffer())));
|
|
401
|
-
}
|
|
402
|
-
|
|
403
367
|
export async function getLastSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
|
|
404
368
|
const { size } = await db.getTreeInfo(treeId);
|
|
405
369
|
const path = await db.getSiblingPath(treeId, size - 1n);
|
|
@@ -425,48 +389,6 @@ export function makeEmptyMembershipWitness<N extends number>(height: N) {
|
|
|
425
389
|
);
|
|
426
390
|
}
|
|
427
391
|
|
|
428
|
-
const processPublicDataUpdateRequests = runInSpan(
|
|
429
|
-
'BlockBuilderHelpers',
|
|
430
|
-
'processPublicDataUpdateRequests',
|
|
431
|
-
async (span, tx: ProcessedTx, db: MerkleTreeWriteOperations) => {
|
|
432
|
-
span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
|
|
433
|
-
const allPublicDataWrites = tx.txEffect.publicDataWrites.map(
|
|
434
|
-
({ leafSlot, value }) => new PublicDataTreeLeaf(leafSlot, value),
|
|
435
|
-
);
|
|
436
|
-
|
|
437
|
-
const { lowLeavesWitnessData, insertionWitnessData } = await db.sequentialInsert(
|
|
438
|
-
MerkleTreeId.PUBLIC_DATA_TREE,
|
|
439
|
-
allPublicDataWrites.map(write => {
|
|
440
|
-
if (write.isEmpty()) {
|
|
441
|
-
throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}`);
|
|
442
|
-
}
|
|
443
|
-
return write.toBuffer();
|
|
444
|
-
}),
|
|
445
|
-
);
|
|
446
|
-
|
|
447
|
-
const lowPublicDataWritesPreimages = lowLeavesWitnessData.map(
|
|
448
|
-
lowLeafWitness => lowLeafWitness.leafPreimage as PublicDataTreeLeafPreimage,
|
|
449
|
-
);
|
|
450
|
-
const lowPublicDataWritesMembershipWitnesses = lowLeavesWitnessData.map(lowLeafWitness =>
|
|
451
|
-
MembershipWitness.fromBufferArray<typeof PUBLIC_DATA_TREE_HEIGHT>(
|
|
452
|
-
lowLeafWitness.index,
|
|
453
|
-
assertLength(lowLeafWitness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
|
|
454
|
-
),
|
|
455
|
-
);
|
|
456
|
-
const publicDataWritesSiblingPaths = insertionWitnessData.map(w => {
|
|
457
|
-
const insertionSiblingPath = w.siblingPath.toFields();
|
|
458
|
-
assertLength(insertionSiblingPath, PUBLIC_DATA_TREE_HEIGHT);
|
|
459
|
-
return insertionSiblingPath as Tuple<Fr, typeof PUBLIC_DATA_TREE_HEIGHT>;
|
|
460
|
-
});
|
|
461
|
-
|
|
462
|
-
return {
|
|
463
|
-
lowPublicDataWritesPreimages,
|
|
464
|
-
lowPublicDataWritesMembershipWitnesses,
|
|
465
|
-
publicDataWritesSiblingPaths,
|
|
466
|
-
};
|
|
467
|
-
},
|
|
468
|
-
);
|
|
469
|
-
|
|
470
392
|
export async function getSubtreeSiblingPath(
|
|
471
393
|
treeId: MerkleTreeId,
|
|
472
394
|
subtreeHeight: number,
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { type BlockBlobData, type BlockEndBlobData, type SpongeBlob, encodeBlockEndBlobData } from '@aztec/blob-lib';
|
|
2
2
|
import {
|
|
3
3
|
type ARCHIVE_HEIGHT,
|
|
4
4
|
type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH,
|
|
@@ -25,7 +25,7 @@ import {
|
|
|
25
25
|
} from '@aztec/stdlib/rollup';
|
|
26
26
|
import type { CircuitName } from '@aztec/stdlib/stats';
|
|
27
27
|
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
28
|
-
import {
|
|
28
|
+
import { BlockHeader, GlobalVariables, StateReference } from '@aztec/stdlib/tx';
|
|
29
29
|
import type { UInt64 } from '@aztec/stdlib/types';
|
|
30
30
|
|
|
31
31
|
import { buildHeaderFromCircuitOutputs, toProofData } from './block-building-helpers.js';
|
|
@@ -54,6 +54,7 @@ export class BlockProvingState {
|
|
|
54
54
|
| ProofState<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>
|
|
55
55
|
| undefined;
|
|
56
56
|
private builtBlockHeader: BlockHeader | undefined;
|
|
57
|
+
private endState: StateReference | undefined;
|
|
57
58
|
private endSpongeBlob: SpongeBlob | undefined;
|
|
58
59
|
private txs: TxProvingState[] = [];
|
|
59
60
|
private isFirstBlock: boolean;
|
|
@@ -200,30 +201,34 @@ export class BlockProvingState {
|
|
|
200
201
|
return this.blockRootProof?.provingOutput?.inputs;
|
|
201
202
|
}
|
|
202
203
|
|
|
203
|
-
public
|
|
204
|
-
this.
|
|
205
|
-
|
|
204
|
+
public async buildBlockHeader() {
|
|
205
|
+
if (this.isAcceptingTxs()) {
|
|
206
|
+
throw new Error('All txs must be added to the block before building the header.');
|
|
207
|
+
}
|
|
208
|
+
if (!this.endState) {
|
|
209
|
+
throw new Error('Call `setEndState` first.');
|
|
210
|
+
}
|
|
211
|
+
if (!this.endSpongeBlob) {
|
|
212
|
+
throw new Error('Call `setEndSpongeBlob` first.');
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
const endSpongeBlob = this.endSpongeBlob.clone();
|
|
216
|
+
const endSpongeBlobHash = await endSpongeBlob.squeeze();
|
|
217
|
+
|
|
218
|
+
this.builtBlockHeader = new BlockHeader(
|
|
219
|
+
this.lastArchiveTreeSnapshot,
|
|
220
|
+
this.endState,
|
|
221
|
+
endSpongeBlobHash,
|
|
222
|
+
this.#getGlobalVariables(),
|
|
223
|
+
this.#getTotalFees(),
|
|
224
|
+
new Fr(this.#getTotalManaUsed()),
|
|
225
|
+
);
|
|
206
226
|
|
|
207
|
-
public getBuiltBlockHeader() {
|
|
208
227
|
return this.builtBlockHeader;
|
|
209
228
|
}
|
|
210
229
|
|
|
211
|
-
public
|
|
212
|
-
|
|
213
|
-
return this.txs[0].processedTx.globalVariables;
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
const constants = this.constants;
|
|
217
|
-
return GlobalVariables.from({
|
|
218
|
-
chainId: constants.chainId,
|
|
219
|
-
version: constants.version,
|
|
220
|
-
blockNumber: this.blockNumber,
|
|
221
|
-
slotNumber: constants.slotNumber,
|
|
222
|
-
timestamp: this.timestamp,
|
|
223
|
-
coinbase: constants.coinbase,
|
|
224
|
-
feeRecipient: constants.feeRecipient,
|
|
225
|
-
gasFees: constants.gasFees,
|
|
226
|
-
});
|
|
230
|
+
public getBuiltBlockHeader() {
|
|
231
|
+
return this.builtBlockHeader;
|
|
227
232
|
}
|
|
228
233
|
|
|
229
234
|
public getStartSpongeBlob() {
|
|
@@ -238,6 +243,52 @@ export class BlockProvingState {
|
|
|
238
243
|
return this.endSpongeBlob;
|
|
239
244
|
}
|
|
240
245
|
|
|
246
|
+
public setEndState(endState: StateReference) {
|
|
247
|
+
this.endState = endState;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
public hasEndState() {
|
|
251
|
+
return !!this.endState;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
public getBlockEndBlobFields(): Fr[] {
|
|
255
|
+
return encodeBlockEndBlobData(this.getBlockEndBlobData());
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
getBlockEndBlobData(): BlockEndBlobData {
|
|
259
|
+
if (!this.endState) {
|
|
260
|
+
throw new Error('Call `setEndState` first.');
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
const partial = this.endState.partial;
|
|
264
|
+
return {
|
|
265
|
+
blockEndMarker: {
|
|
266
|
+
numTxs: this.totalNumTxs,
|
|
267
|
+
timestamp: this.timestamp,
|
|
268
|
+
blockNumber: this.blockNumber,
|
|
269
|
+
},
|
|
270
|
+
blockEndStateField: {
|
|
271
|
+
l1ToL2MessageNextAvailableLeafIndex: this.newL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex,
|
|
272
|
+
noteHashNextAvailableLeafIndex: partial.noteHashTree.nextAvailableLeafIndex,
|
|
273
|
+
nullifierNextAvailableLeafIndex: partial.nullifierTree.nextAvailableLeafIndex,
|
|
274
|
+
publicDataNextAvailableLeafIndex: partial.publicDataTree.nextAvailableLeafIndex,
|
|
275
|
+
totalManaUsed: this.#getTotalManaUsed(),
|
|
276
|
+
},
|
|
277
|
+
lastArchiveRoot: this.lastArchiveTreeSnapshot.root,
|
|
278
|
+
noteHashRoot: partial.noteHashTree.root,
|
|
279
|
+
nullifierRoot: partial.nullifierTree.root,
|
|
280
|
+
publicDataRoot: partial.publicDataTree.root,
|
|
281
|
+
l1ToL2MessageRoot: this.isFirstBlock ? this.newL1ToL2MessageTreeSnapshot.root : undefined,
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
public getBlockBlobData(): BlockBlobData {
|
|
286
|
+
return {
|
|
287
|
+
...this.getBlockEndBlobData(),
|
|
288
|
+
txs: this.getTxEffects().map(t => t.toTxBlobData()),
|
|
289
|
+
};
|
|
290
|
+
}
|
|
291
|
+
|
|
241
292
|
public getTxEffects() {
|
|
242
293
|
return this.txs.map(t => t.processedTx.txEffect);
|
|
243
294
|
}
|
|
@@ -392,4 +443,31 @@ export class BlockProvingState {
|
|
|
392
443
|
? [this.baseOrMergeProofs.getNode(rootLocation)?.provingOutput]
|
|
393
444
|
: this.baseOrMergeProofs.getChildren(rootLocation).map(c => c?.provingOutput);
|
|
394
445
|
}
|
|
446
|
+
|
|
447
|
+
#getGlobalVariables() {
|
|
448
|
+
if (this.txs.length) {
|
|
449
|
+
return this.txs[0].processedTx.globalVariables;
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
const constants = this.constants;
|
|
453
|
+
|
|
454
|
+
return GlobalVariables.from({
|
|
455
|
+
chainId: constants.chainId,
|
|
456
|
+
version: constants.version,
|
|
457
|
+
blockNumber: this.blockNumber,
|
|
458
|
+
slotNumber: constants.slotNumber,
|
|
459
|
+
timestamp: this.timestamp,
|
|
460
|
+
coinbase: constants.coinbase,
|
|
461
|
+
feeRecipient: constants.feeRecipient,
|
|
462
|
+
gasFees: constants.gasFees,
|
|
463
|
+
});
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
#getTotalFees() {
|
|
467
|
+
return this.txs.reduce((acc, tx) => acc.add(tx.processedTx.txEffect.transactionFee), Fr.ZERO);
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
#getTotalManaUsed() {
|
|
471
|
+
return this.txs.reduce((acc, tx) => acc + BigInt(tx.processedTx.gasUsed.billedGas.l2Gas), 0n);
|
|
472
|
+
}
|
|
395
473
|
}
|
|
@@ -1,4 +1,9 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
BatchedBlobAccumulator,
|
|
3
|
+
type FinalBlobBatchingChallenges,
|
|
4
|
+
SpongeBlob,
|
|
5
|
+
encodeCheckpointBlobData,
|
|
6
|
+
} from '@aztec/blob-lib';
|
|
2
7
|
import {
|
|
3
8
|
type ARCHIVE_HEIGHT,
|
|
4
9
|
BLOBS_PER_BLOCK,
|
|
@@ -11,7 +16,6 @@ import { padArrayEnd } from '@aztec/foundation/collection';
|
|
|
11
16
|
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
12
17
|
import type { Tuple } from '@aztec/foundation/serialize';
|
|
13
18
|
import { type TreeNodeLocation, UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
14
|
-
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
15
19
|
import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/server';
|
|
16
20
|
import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
|
|
17
21
|
import {
|
|
@@ -192,12 +196,15 @@ export class CheckpointProvingState {
|
|
|
192
196
|
}
|
|
193
197
|
|
|
194
198
|
public async accumulateBlobs(startBlobAccumulator: BatchedBlobAccumulator) {
|
|
195
|
-
if (this.isAcceptingBlocks() || this.blocks.some(b => b
|
|
199
|
+
if (this.isAcceptingBlocks() || this.blocks.some(b => !b?.hasEndState())) {
|
|
196
200
|
return;
|
|
197
201
|
}
|
|
198
202
|
|
|
199
|
-
this.blobFields =
|
|
200
|
-
|
|
203
|
+
this.blobFields = encodeCheckpointBlobData({
|
|
204
|
+
totalNumBlobFields: this.totalNumBlobFields,
|
|
205
|
+
blocks: this.blocks.map(b => b!.getBlockBlobData()),
|
|
206
|
+
});
|
|
207
|
+
this.endBlobAccumulator = await accumulateBlobs(this.blobFields!, startBlobAccumulator);
|
|
201
208
|
this.startBlobAccumulator = startBlobAccumulator;
|
|
202
209
|
|
|
203
210
|
this.onBlobAccumulatorSet(this);
|