@aztec/prover-client 3.0.0-nightly.20251113 → 3.0.0-nightly.20251115
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/block-factory/light.d.ts +6 -6
- package/dest/block-factory/light.d.ts.map +1 -1
- package/dest/block-factory/light.js +35 -22
- package/dest/light/lightweight_checkpoint_builder.d.ts +29 -0
- package/dest/light/lightweight_checkpoint_builder.d.ts.map +1 -0
- package/dest/light/lightweight_checkpoint_builder.js +107 -0
- package/dest/mocks/fixtures.d.ts +0 -3
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +1 -12
- package/dest/mocks/test_context.d.ts +26 -44
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +101 -112
- package/dest/orchestrator/block-building-helpers.d.ts +12 -14
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +81 -105
- package/dest/orchestrator/block-proving-state.d.ts +9 -4
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +80 -19
- package/dest/orchestrator/checkpoint-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/checkpoint-proving-state.js +6 -4
- package/dest/orchestrator/orchestrator.d.ts +0 -1
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +15 -23
- package/package.json +15 -15
- package/src/block-factory/light.ts +43 -42
- package/src/light/lightweight_checkpoint_builder.ts +141 -0
- package/src/mocks/fixtures.ts +1 -25
- package/src/mocks/test_context.ts +141 -174
- package/src/orchestrator/block-building-helpers.ts +120 -198
- package/src/orchestrator/block-proving-state.ts +100 -22
- package/src/orchestrator/checkpoint-proving-state.ts +12 -5
- package/src/orchestrator/orchestrator.ts +18 -25
|
@@ -1,21 +1,19 @@
|
|
|
1
|
-
import { BatchedBlob,
|
|
2
|
-
import { ARCHIVE_HEIGHT, CHONK_PROOF_LENGTH, MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NULLIFIER_TREE_HEIGHT,
|
|
1
|
+
import { BatchedBlob, computeBlobsHashFromBlobs, encodeBlockBlobData, getBlobCommitmentsFromBlobs, getBlobsPerL1Block } from '@aztec/blob-lib';
|
|
2
|
+
import { ARCHIVE_HEIGHT, CHONK_PROOF_LENGTH, MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH, NULLIFIER_TREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT } from '@aztec/constants';
|
|
3
3
|
import { makeTuple } from '@aztec/foundation/array';
|
|
4
4
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
5
|
-
import { sha256Trunc } from '@aztec/foundation/crypto';
|
|
6
5
|
import { Fr } from '@aztec/foundation/fields';
|
|
7
6
|
import { assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
8
|
-
import { MembershipWitness
|
|
7
|
+
import { MembershipWitness } from '@aztec/foundation/trees';
|
|
9
8
|
import { getVkData } from '@aztec/noir-protocol-circuits-types/server/vks';
|
|
10
9
|
import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-types/vk-tree';
|
|
11
10
|
import { computeFeePayerBalanceLeafSlot } from '@aztec/protocol-contracts/fee-juice';
|
|
12
|
-
import { Body
|
|
13
|
-
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
11
|
+
import { Body } from '@aztec/stdlib/block';
|
|
14
12
|
import { ContractClassLogFields } from '@aztec/stdlib/logs';
|
|
15
13
|
import { Proof, ProofData, RecursiveProof } from '@aztec/stdlib/proofs';
|
|
16
14
|
import { BlockConstantData, PrivateBaseRollupHints, PublicBaseRollupHints, PublicChonkVerifierPrivateInputs, TreeSnapshotDiffHints } from '@aztec/stdlib/rollup';
|
|
17
|
-
import { AppendOnlyTreeSnapshot, MerkleTreeId, NullifierLeafPreimage,
|
|
18
|
-
import { BlockHeader,
|
|
15
|
+
import { AppendOnlyTreeSnapshot, MerkleTreeId, NullifierLeafPreimage, getTreeHeight } from '@aztec/stdlib/trees';
|
|
16
|
+
import { BlockHeader, GlobalVariables, PartialStateReference } from '@aztec/stdlib/tx';
|
|
19
17
|
import { VkData } from '@aztec/stdlib/vks';
|
|
20
18
|
import { Attributes, runInSpan } from '@aztec/telemetry-client';
|
|
21
19
|
// Builds the hints for base rollup. Updating the contract, nullifier, and data trees in the process.
|
|
@@ -25,18 +23,7 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan('BlockBuilderH
|
|
|
25
23
|
const start = new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db));
|
|
26
24
|
// Get the note hash subtree root sibling path for insertion.
|
|
27
25
|
const noteHashSubtreeRootSiblingPath = assertLength(await getSubtreeSiblingPath(MerkleTreeId.NOTE_HASH_TREE, NOTE_HASH_SUBTREE_HEIGHT, db), NOTE_HASH_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
28
|
-
|
|
29
|
-
// that will be used by the next iteration of the base rollup circuit, skipping the empty ones
|
|
30
|
-
const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
|
|
31
|
-
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
|
|
32
|
-
// The read witnesses for a given TX should be generated before the writes of the same TX are applied.
|
|
33
|
-
// All reads that refer to writes in the same tx are transient and can be simplified out.
|
|
34
|
-
const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
|
|
35
|
-
// Update the nullifier tree, capturing the low nullifier info for each individual operation
|
|
36
|
-
const { lowLeavesWitnessData: nullifierWitnessLeaves, newSubtreeSiblingPath: nullifiersSubtreeRootSiblingPath, sortedNewLeaves: sortedNullifiers, sortedNewLeavesIndexes } = await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map((n)=>n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT);
|
|
37
|
-
if (nullifierWitnessLeaves === undefined) {
|
|
38
|
-
throw new Error(`Could not craft nullifier batch insertion proofs`);
|
|
39
|
-
}
|
|
26
|
+
const { nullifierInsertionResult, publicDataInsertionResult } = await insertSideEffects(tx, db);
|
|
40
27
|
const blockHash = await tx.data.constants.anchorBlockHeader.hash();
|
|
41
28
|
const anchorBlockArchiveSiblingPath = (await getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT, db)).siblingPath;
|
|
42
29
|
const contractClassLogsFields = makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_TX, (i)=>tx.txEffect.contractClassLogs[i]?.fields || ContractClassLogFields.empty());
|
|
@@ -48,25 +35,31 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan('BlockBuilderH
|
|
|
48
35
|
contractClassLogsFields
|
|
49
36
|
});
|
|
50
37
|
} else {
|
|
51
|
-
if (
|
|
38
|
+
if (tx.txEffect.publicDataWrites.length > 1) {
|
|
52
39
|
throw new Error(`More than one public data write in a private only tx`);
|
|
53
40
|
}
|
|
54
41
|
// Get hints for reading fee payer's balance in the public data tree.
|
|
55
|
-
const
|
|
56
|
-
const
|
|
42
|
+
const feePayerBalanceLeafWitnessData = publicDataInsertionResult.lowLeavesWitnessData[0];
|
|
43
|
+
const feePayerBalanceMembershipWitness = MembershipWitness.fromBufferArray(feePayerBalanceLeafWitnessData.index, assertLength(feePayerBalanceLeafWitnessData.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT));
|
|
44
|
+
const feePayerBalanceLeafPreimage = feePayerBalanceLeafWitnessData.leafPreimage;
|
|
57
45
|
const leafSlot = await computeFeePayerBalanceLeafSlot(tx.data.feePayer);
|
|
58
|
-
if (!
|
|
46
|
+
if (!leafSlot.equals(feePayerBalanceLeafPreimage.leaf.slot)) {
|
|
59
47
|
throw new Error(`Cannot find the public data tree leaf for the fee payer's balance`);
|
|
60
48
|
}
|
|
61
|
-
//
|
|
62
|
-
const
|
|
49
|
+
// Get hints for inserting the nullifiers.
|
|
50
|
+
const nullifierLowLeavesWitnessData = nullifierInsertionResult.lowLeavesWitnessData;
|
|
51
|
+
const nullifierPredecessorPreimages = padArrayEnd(nullifierLowLeavesWitnessData.map((l)=>l.leafPreimage), NullifierLeafPreimage.empty(), MAX_NULLIFIERS_PER_TX);
|
|
52
|
+
const nullifierPredecessorMembershipWitnesses = padArrayEnd(nullifierLowLeavesWitnessData.map((l)=>MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT))), makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT), MAX_NULLIFIERS_PER_TX);
|
|
53
|
+
const sortedNullifiers = assertLength(nullifierInsertionResult.sortedNewLeaves.map((n)=>Fr.fromBuffer(n)), MAX_NULLIFIERS_PER_TX);
|
|
54
|
+
const sortedNullifierIndexes = assertLength(nullifierInsertionResult.sortedNewLeavesIndexes, MAX_NULLIFIERS_PER_TX);
|
|
55
|
+
const nullifierSubtreeRootSiblingPath = assertLength(nullifierInsertionResult.newSubtreeSiblingPath.toFields(), NULLIFIER_SUBTREE_ROOT_SIBLING_PATH_LENGTH);
|
|
63
56
|
const treeSnapshotDiffHints = TreeSnapshotDiffHints.from({
|
|
64
57
|
noteHashSubtreeRootSiblingPath,
|
|
65
|
-
nullifierPredecessorPreimages
|
|
66
|
-
nullifierPredecessorMembershipWitnesses
|
|
67
|
-
sortedNullifiers
|
|
68
|
-
sortedNullifierIndexes
|
|
69
|
-
nullifierSubtreeRootSiblingPath
|
|
58
|
+
nullifierPredecessorPreimages,
|
|
59
|
+
nullifierPredecessorMembershipWitnesses,
|
|
60
|
+
sortedNullifiers,
|
|
61
|
+
sortedNullifierIndexes,
|
|
62
|
+
nullifierSubtreeRootSiblingPath,
|
|
70
63
|
feePayerBalanceMembershipWitness
|
|
71
64
|
});
|
|
72
65
|
const constants = BlockConstantData.from({
|
|
@@ -88,6 +81,28 @@ export const insertSideEffectsAndBuildBaseRollupHints = runInSpan('BlockBuilderH
|
|
|
88
81
|
});
|
|
89
82
|
}
|
|
90
83
|
});
|
|
84
|
+
export const insertSideEffects = runInSpan('BlockBuilderHelpers', 'buildBaseRollupHints', async (span, tx, db)=>{
|
|
85
|
+
span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
|
|
86
|
+
// Insert the note hashes. Padded with zeros to the max number of note hashes per tx.
|
|
87
|
+
const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
|
|
88
|
+
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
|
|
89
|
+
// Insert the nullifiers. Padded with zeros to the max number of nullifiers per tx.
|
|
90
|
+
// Capturing the low nullifier info for each individual operation.
|
|
91
|
+
const nullifierInsertionResult = await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map((n)=>n.toBuffer()), NULLIFIER_SUBTREE_HEIGHT);
|
|
92
|
+
if (nullifierInsertionResult.lowLeavesWitnessData === undefined) {
|
|
93
|
+
throw new Error(`Failed to batch insert nullifiers.`);
|
|
94
|
+
}
|
|
95
|
+
if (tx.txEffect.publicDataWrites.some((write)=>write.isEmpty())) {
|
|
96
|
+
throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}.`);
|
|
97
|
+
}
|
|
98
|
+
// Insert the public data writes sequentially. No need to pad them to the max array size.
|
|
99
|
+
// Capturing the low leaf info for each individual operation.
|
|
100
|
+
const publicDataInsertionResult = await db.sequentialInsert(MerkleTreeId.PUBLIC_DATA_TREE, tx.txEffect.publicDataWrites.map((write)=>write.toBuffer()));
|
|
101
|
+
return {
|
|
102
|
+
nullifierInsertionResult,
|
|
103
|
+
publicDataInsertionResult
|
|
104
|
+
};
|
|
105
|
+
});
|
|
91
106
|
export function getChonkProofFromTx(tx) {
|
|
92
107
|
const publicInputs = tx.data.publicInputs().toFields();
|
|
93
108
|
const binaryProof = new Proof(Buffer.concat(tx.chonkProof.attachPublicInputs(publicInputs).fieldsWithPublicInputs.map((field)=>field.toBuffer())), publicInputs.length);
|
|
@@ -110,17 +125,6 @@ export const buildBlobHints = (blobFields)=>{
|
|
|
110
125
|
blobsHash
|
|
111
126
|
};
|
|
112
127
|
};
|
|
113
|
-
// Build the data required to prove the txs in an epoch. Currently only used in tests. It assumes 1 block per checkpoint.
|
|
114
|
-
export const buildBlobDataFromTxs = async (txsPerCheckpoint)=>{
|
|
115
|
-
const blobFields = txsPerCheckpoint.map((txs)=>getCheckpointBlobFields([
|
|
116
|
-
txs.map((tx)=>tx.txEffect)
|
|
117
|
-
]));
|
|
118
|
-
const finalBlobChallenges = await buildFinalBlobChallenges(blobFields);
|
|
119
|
-
return {
|
|
120
|
-
blobFieldsLengths: blobFields.map((fields)=>fields.length),
|
|
121
|
-
finalBlobChallenges
|
|
122
|
-
};
|
|
123
|
-
};
|
|
124
128
|
export const buildFinalBlobChallenges = async (blobFieldsPerCheckpoint)=>{
|
|
125
129
|
const blobs = blobFieldsPerCheckpoint.map((blobFields)=>getBlobsPerL1Block(blobFields));
|
|
126
130
|
return await BatchedBlob.precomputeBatchedBlobChallenges(blobs);
|
|
@@ -145,56 +149,50 @@ export const buildHeaderFromCircuitOutputs = runInSpan('BlockBuilderHelpers', 'b
|
|
|
145
149
|
const spongeBlobHash = await blockRootRollupOutput.endSpongeBlob.clone().squeeze();
|
|
146
150
|
return new BlockHeader(blockRootRollupOutput.previousArchive, blockRootRollupOutput.endState, spongeBlobHash, globalVariables, blockRootRollupOutput.accumulatedFees, blockRootRollupOutput.accumulatedManaUsed);
|
|
147
151
|
});
|
|
148
|
-
export const buildHeaderAndBodyFromTxs = runInSpan('BlockBuilderHelpers', 'buildHeaderAndBodyFromTxs', async (span, txs,
|
|
152
|
+
export const buildHeaderAndBodyFromTxs = runInSpan('BlockBuilderHelpers', 'buildHeaderAndBodyFromTxs', async (span, txs, lastArchive, endState, globalVariables, startSpongeBlob, isFirstBlock)=>{
|
|
149
153
|
span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
|
|
150
|
-
const stateReference = new StateReference(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db)));
|
|
151
|
-
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
152
154
|
const txEffects = txs.map((tx)=>tx.txEffect);
|
|
153
155
|
const body = new Body(txEffects);
|
|
154
|
-
const
|
|
155
|
-
const
|
|
156
|
-
const
|
|
157
|
-
const blockBlobFields =
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
156
|
+
const totalFees = txEffects.reduce((acc, tx)=>acc.add(tx.transactionFee), Fr.ZERO);
|
|
157
|
+
const totalManaUsed = txs.reduce((acc, tx)=>acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
|
|
158
|
+
const { l1ToL2MessageTree, partial } = endState;
|
|
159
|
+
const blockBlobFields = encodeBlockBlobData({
|
|
160
|
+
blockEndMarker: {
|
|
161
|
+
timestamp: globalVariables.timestamp,
|
|
162
|
+
blockNumber: globalVariables.blockNumber,
|
|
163
|
+
numTxs: txs.length
|
|
164
|
+
},
|
|
165
|
+
blockEndStateField: {
|
|
166
|
+
l1ToL2MessageNextAvailableLeafIndex: l1ToL2MessageTree.nextAvailableLeafIndex,
|
|
167
|
+
noteHashNextAvailableLeafIndex: partial.noteHashTree.nextAvailableLeafIndex,
|
|
168
|
+
nullifierNextAvailableLeafIndex: partial.nullifierTree.nextAvailableLeafIndex,
|
|
169
|
+
publicDataNextAvailableLeafIndex: partial.publicDataTree.nextAvailableLeafIndex,
|
|
170
|
+
totalManaUsed: totalManaUsed.toBigInt()
|
|
171
|
+
},
|
|
172
|
+
lastArchiveRoot: lastArchive.root,
|
|
173
|
+
noteHashRoot: partial.noteHashTree.root,
|
|
174
|
+
nullifierRoot: partial.nullifierTree.root,
|
|
175
|
+
publicDataRoot: partial.publicDataTree.root,
|
|
176
|
+
l1ToL2MessageRoot: isFirstBlock ? l1ToL2MessageTree.root : undefined,
|
|
177
|
+
txs: body.toTxBlobData()
|
|
178
|
+
});
|
|
179
|
+
const endSpongeBlob = startSpongeBlob.clone();
|
|
167
180
|
await endSpongeBlob.absorb(blockBlobFields);
|
|
168
181
|
const spongeBlobHash = await endSpongeBlob.squeeze();
|
|
169
|
-
const header =
|
|
182
|
+
const header = BlockHeader.from({
|
|
183
|
+
lastArchive,
|
|
184
|
+
state: endState,
|
|
185
|
+
spongeBlobHash,
|
|
186
|
+
globalVariables,
|
|
187
|
+
totalFees,
|
|
188
|
+
totalManaUsed
|
|
189
|
+
});
|
|
170
190
|
return {
|
|
171
191
|
header,
|
|
172
|
-
body
|
|
192
|
+
body,
|
|
193
|
+
blockBlobFields
|
|
173
194
|
};
|
|
174
195
|
});
|
|
175
|
-
export const buildBlockHeaderFromTxs = runInSpan('BlockBuilderHelpers', 'buildBlockHeaderFromTxs', async (span, txs, globalVariables, startSpongeBlob, db)=>{
|
|
176
|
-
span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber);
|
|
177
|
-
const stateReference = new StateReference(await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), new PartialStateReference(await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db)));
|
|
178
|
-
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
179
|
-
const blobFields = getBlockBlobFields(txs.map((tx)=>tx.txEffect));
|
|
180
|
-
const endSpongeBlob = startSpongeBlob.clone();
|
|
181
|
-
await endSpongeBlob.absorb(blobFields);
|
|
182
|
-
const spongeBlobHash = await endSpongeBlob.squeeze();
|
|
183
|
-
const txEffects = txs.map((tx)=>tx.txEffect);
|
|
184
|
-
const fees = txEffects.reduce((acc, tx)=>acc.add(tx.transactionFee), Fr.ZERO);
|
|
185
|
-
const manaUsed = txs.reduce((acc, tx)=>acc.add(new Fr(tx.gasUsed.billedGas.l2Gas)), Fr.ZERO);
|
|
186
|
-
return new BlockHeader(previousArchive, stateReference, spongeBlobHash, globalVariables, fees, manaUsed);
|
|
187
|
-
});
|
|
188
|
-
/** Computes the inHash for a block's ContentCommitment given its l1 to l2 messages. */ export async function computeInHashFromL1ToL2Messages(unpaddedL1ToL2Messages) {
|
|
189
|
-
const l1ToL2Messages = padArrayEnd(unpaddedL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
190
|
-
const hasher = (left, right)=>Promise.resolve(sha256Trunc(Buffer.concat([
|
|
191
|
-
left,
|
|
192
|
-
right
|
|
193
|
-
])));
|
|
194
|
-
const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
|
|
195
|
-
const parityCalculator = await MerkleTreeCalculator.create(parityHeight, Fr.ZERO.toBuffer(), hasher);
|
|
196
|
-
return new Fr(await parityCalculator.computeTreeRoot(l1ToL2Messages.map((msg)=>msg.toBuffer())));
|
|
197
|
-
}
|
|
198
196
|
export async function getLastSiblingPath(treeId, db) {
|
|
199
197
|
const { size } = await db.getTreeInfo(treeId);
|
|
200
198
|
const path = await db.getSiblingPath(treeId, size - 1n);
|
|
@@ -212,28 +210,6 @@ export async function getTreeSnapshot(id, db) {
|
|
|
212
210
|
export function makeEmptyMembershipWitness(height) {
|
|
213
211
|
return new MembershipWitness(height, 0n, makeTuple(height, ()=>Fr.ZERO));
|
|
214
212
|
}
|
|
215
|
-
const processPublicDataUpdateRequests = runInSpan('BlockBuilderHelpers', 'processPublicDataUpdateRequests', async (span, tx, db)=>{
|
|
216
|
-
span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
|
|
217
|
-
const allPublicDataWrites = tx.txEffect.publicDataWrites.map(({ leafSlot, value })=>new PublicDataTreeLeaf(leafSlot, value));
|
|
218
|
-
const { lowLeavesWitnessData, insertionWitnessData } = await db.sequentialInsert(MerkleTreeId.PUBLIC_DATA_TREE, allPublicDataWrites.map((write)=>{
|
|
219
|
-
if (write.isEmpty()) {
|
|
220
|
-
throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}`);
|
|
221
|
-
}
|
|
222
|
-
return write.toBuffer();
|
|
223
|
-
}));
|
|
224
|
-
const lowPublicDataWritesPreimages = lowLeavesWitnessData.map((lowLeafWitness)=>lowLeafWitness.leafPreimage);
|
|
225
|
-
const lowPublicDataWritesMembershipWitnesses = lowLeavesWitnessData.map((lowLeafWitness)=>MembershipWitness.fromBufferArray(lowLeafWitness.index, assertLength(lowLeafWitness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT)));
|
|
226
|
-
const publicDataWritesSiblingPaths = insertionWitnessData.map((w)=>{
|
|
227
|
-
const insertionSiblingPath = w.siblingPath.toFields();
|
|
228
|
-
assertLength(insertionSiblingPath, PUBLIC_DATA_TREE_HEIGHT);
|
|
229
|
-
return insertionSiblingPath;
|
|
230
|
-
});
|
|
231
|
-
return {
|
|
232
|
-
lowPublicDataWritesPreimages,
|
|
233
|
-
lowPublicDataWritesMembershipWitnesses,
|
|
234
|
-
publicDataWritesSiblingPaths
|
|
235
|
-
};
|
|
236
|
-
});
|
|
237
213
|
export async function getSubtreeSiblingPath(treeId, subtreeHeight, db) {
|
|
238
214
|
const nextAvailableLeafIndex = await db.getTreeInfo(treeId).then((t)=>t.size);
|
|
239
215
|
const fullSiblingPath = await db.getSiblingPath(treeId, nextAvailableLeafIndex);
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { type BlockBlobData, type BlockEndBlobData, type SpongeBlob } from '@aztec/blob-lib';
|
|
2
2
|
import { type ARCHIVE_HEIGHT, type L1_TO_L2_MSG_SUBTREE_ROOT_SIBLING_PATH_LENGTH, type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH } from '@aztec/constants';
|
|
3
3
|
import { Fr } from '@aztec/foundation/fields';
|
|
4
4
|
import { type Tuple } from '@aztec/foundation/serialize';
|
|
@@ -7,7 +7,7 @@ import type { PublicInputsAndRecursiveProof } from '@aztec/stdlib/interfaces/ser
|
|
|
7
7
|
import { type ParityPublicInputs, ParityRootPrivateInputs } from '@aztec/stdlib/parity';
|
|
8
8
|
import { BlockRollupPublicInputs, BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointConstantData, TxMergeRollupPrivateInputs, type TxRollupPublicInputs } from '@aztec/stdlib/rollup';
|
|
9
9
|
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
10
|
-
import {
|
|
10
|
+
import { BlockHeader, StateReference } from '@aztec/stdlib/tx';
|
|
11
11
|
import type { UInt64 } from '@aztec/stdlib/types';
|
|
12
12
|
import type { CheckpointProvingState } from './checkpoint-proving-state.js';
|
|
13
13
|
import type { TxProvingState } from './tx-proving-state.js';
|
|
@@ -39,6 +39,7 @@ export declare class BlockProvingState {
|
|
|
39
39
|
private rootParityProof;
|
|
40
40
|
private blockRootProof;
|
|
41
41
|
private builtBlockHeader;
|
|
42
|
+
private endState;
|
|
42
43
|
private endSpongeBlob;
|
|
43
44
|
private txs;
|
|
44
45
|
private isFirstBlock;
|
|
@@ -59,12 +60,16 @@ export declare class BlockProvingState {
|
|
|
59
60
|
tryStartProvingBlockRoot(): boolean;
|
|
60
61
|
setBlockRootRollupProof(provingOutput: PublicInputsAndRecursiveProof<BlockRollupPublicInputs, typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>): TreeNodeLocation;
|
|
61
62
|
getBlockRootRollupOutput(): BlockRollupPublicInputs | undefined;
|
|
62
|
-
|
|
63
|
+
buildBlockHeader(): Promise<BlockHeader>;
|
|
63
64
|
getBuiltBlockHeader(): BlockHeader | undefined;
|
|
64
|
-
getGlobalVariables(): GlobalVariables;
|
|
65
65
|
getStartSpongeBlob(): SpongeBlob;
|
|
66
66
|
setEndSpongeBlob(endSpongeBlob: SpongeBlob): void;
|
|
67
67
|
getEndSpongeBlob(): SpongeBlob | undefined;
|
|
68
|
+
setEndState(endState: StateReference): void;
|
|
69
|
+
hasEndState(): boolean;
|
|
70
|
+
getBlockEndBlobFields(): Fr[];
|
|
71
|
+
getBlockEndBlobData(): BlockEndBlobData;
|
|
72
|
+
getBlockBlobData(): BlockBlobData;
|
|
68
73
|
getTxEffects(): import("@aztec/stdlib/tx").TxEffect[];
|
|
69
74
|
getParentLocation(location: TreeNodeLocation): TreeNodeLocation;
|
|
70
75
|
getMergeRollupInputs(mergeLocation: TreeNodeLocation): TxMergeRollupPrivateInputs;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"block-proving-state.d.ts","sourceRoot":"","sources":["../../src/orchestrator/block-proving-state.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,
|
|
1
|
+
{"version":3,"file":"block-proving-state.d.ts","sourceRoot":"","sources":["../../src/orchestrator/block-proving-state.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,aAAa,EAAE,KAAK,gBAAgB,EAAE,KAAK,UAAU,EAA0B,MAAM,iBAAiB,CAAC;AACrH,OAAO,EACL,KAAK,cAAc,EACnB,KAAK,6CAA6C,EAElD,KAAK,yCAAyC,EAE/C,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC9C,OAAO,EAAE,KAAK,KAAK,EAAgB,MAAM,6BAA6B,CAAC;AACvE,OAAO,EAAE,KAAK,gBAAgB,EAAuB,MAAM,yBAAyB,CAAC;AACrF,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,EAAE,KAAK,kBAAkB,EAAE,uBAAuB,EAAE,MAAM,sBAAsB,CAAC;AAExF,OAAO,EACL,uBAAuB,EACvB,wCAAwC,EAExC,4BAA4B,EAE5B,oCAAoC,EACpC,sBAAsB,EACtB,0BAA0B,EAC1B,KAAK,oBAAoB,EAC1B,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,sBAAsB,EAAE,MAAM,qBAAqB,CAAC;AAC7D,OAAO,EAAE,WAAW,EAAmB,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAChF,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAGlD,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AAC5E,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAE5D,MAAM,MAAM,UAAU,CAAC,CAAC,EAAE,YAAY,SAAS,MAAM,IAAI;IACvD,aAAa,CAAC,EAAE,6BAA6B,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC/D,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB,CAAC;AAEF;;;GAGG;AACH,qBAAa,iBAAiB;;aAoBV,KAAK,EAAE,MAAM;aACb,WAAW,EAAE,MAAM;aACnB,WAAW,EAAE,MAAM;IACnC,OAAO,CAAC,QAAQ,CAAC,SAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,SAAS;aACV,uBAAuB,EAAE,sBAAsB;IAC/D,OAAO,CAAC,QAAQ,CAAC,sBAAsB;IACvC,OAAO,CAAC,QAAQ,CAAC,6BAA6B;IAC9C,OAAO,CAAC,QAAQ,CAAC,uCAAuC;aAIxC,4BAA4B,EAAE,sBAAsB;IACpE,OAAO,CAAC,QAAQ,CAAC,qCAAqC;IACtD,OAAO,CAAC,QAAQ,CAAC,eAAe;IACzB,gBAAgB,EAAE,sBAAsB;IAlCjD,OAAO,CAAC,iBAAiB,CAEM;IAC/B,OAAO,CAAC,gBAAgB,CAGC;IACzB,OAAO,CAAC,eAAe,CAAmF;IAC1G,OAAO,CAAC,cAAc,CAER;IACd,OAAO,CAAC,gBAAgB,CAA0B;IAClD,OAAO,CAAC,QAAQ,CAA6B;IAC7C,OAAO,CAAC,aAAa,CAAyB;IAC9C,OAAO,CAAC,GAAG,CAAwB;IACnC,OAAO,CAAC,YAAY,CAAU;IAC9B,OAAO,CAAC,KAAK,CAAqB;gBAGhB,KAAK,EAAE,MAAM,EACb,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,MAAM,EAClB,SAAS,EAAE,sBAAsB,EACjC,SAAS,EAAE,MAAM,EAClB,uBAAuB,EAAE,sBAAsB,EAC9C,sBAAsB,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,cAAc,CAAC,EACxD,6BAA6B,EAAE,sBAAsB,EACrD,uCAAuC,EAAE,KAAK,CAC7D,EAAE,EACF,OAAO,6CAA6C,CACrD,EACe,4BAA4B,EAAE,sBAAsB,EACnD,qCAAqC,EAAE,WAAW,EAClD,eAAe,EAAE,UAAU,EACrC,gBAAgB,EAAE,sBAAsB;IAUjD,IAAW,WAAW,IAAI,MAAM,CAE/B;IAGM,QAAQ,CAAC,EAAE,EAAE,cAAc;IAS3B,cAAc;IAId,eAAe;IAIf,mBAAmB,CAAC,OAAO,EAAE,MAAM;IASnC,kBAAkB,CACvB,OAAO,EAAE,MAAM,EACf,aAAa,EAAE,6BAA6B,CAC1C,oBAAoB,EACpB,OAAO,yCAAyC,CACjD,GACA,gBAAgB;IAIZ,oBAAoB,CAAC,QAAQ,EAAE,gBAAgB;IAS/C,mBAAmB,CACxB,QAAQ,EAAE,gBAAgB,EAC1B,aAAa,EAAE,6BAA6B,CAC1C,oBAAoB,EACpB,OAAO,yCAAyC,CACjD;IAKI,yBAAyB,CAAC,KAAK,EAAE,MAAM;IAUvC,kBAAkB,CAAC,KAAK,EAAE,MAAM,EAAE,aAAa,EAAE,6BAA6B,CAAC,kBAAkB,CAAC;IASlG,yBAAyB;IASzB,kBAAkB,CAAC,aAAa,EAAE,6BAA6B,CAAC,kBAAkB,CAAC;IAInF,wBAAwB;IASxB,uBAAuB,CAC5B,aAAa,EAAE,6BAA6B,CAC1C,uBAAuB,EACvB,OAAO,yCAAyC,CACjD,GACA,gBAAgB;IAKZ,wBAAwB;IAIlB,gBAAgB;IA0BtB,mBAAmB;IAInB,kBAAkB;IAIlB,gBAAgB,CAAC,aAAa,EAAE,UAAU;IAI1C,gBAAgB;IAIhB,WAAW,CAAC,QAAQ,EAAE,cAAc;IAIpC,WAAW;IAIX,qBAAqB,IAAI,EAAE,EAAE;IAIpC,mBAAmB,IAAI,gBAAgB;IA2BhC,gBAAgB,IAAI,aAAa;IAOjC,YAAY;IAIZ,iBAAiB,CAAC,QAAQ,EAAE,gBAAgB;IAI5C,oBAAoB,CAAC,aAAa,EAAE,gBAAgB;IASpD,+BAA+B;;;;;;;;;;IAuE/B,mBAAmB;IAWnB,iBAAiB,CAAC,OAAO,EAAE,MAAM;IAI3B,6BAA6B;IAQnC,qBAAqB,CAAC,QAAQ,EAAE,gBAAgB;IAKhD,yBAAyB;IAMzB,oBAAoB;IAIpB,UAAU;IAIV,WAAW;IAIX,QAAQ;IAIR,MAAM,CAAC,MAAM,EAAE,MAAM;CA2C7B"}
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
+
import { encodeBlockEndBlobData } from '@aztec/blob-lib';
|
|
1
2
|
import { NUM_BASE_PARITY_PER_ROOT_PARITY } from '@aztec/constants';
|
|
3
|
+
import { Fr } from '@aztec/foundation/fields';
|
|
2
4
|
import { assertLength } from '@aztec/foundation/serialize';
|
|
3
5
|
import { UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
4
6
|
import { ParityRootPrivateInputs } from '@aztec/stdlib/parity';
|
|
5
7
|
import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, TxMergeRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
6
|
-
import { GlobalVariables } from '@aztec/stdlib/tx';
|
|
8
|
+
import { BlockHeader, GlobalVariables } from '@aztec/stdlib/tx';
|
|
7
9
|
import { buildHeaderFromCircuitOutputs, toProofData } from './block-building-helpers.js';
|
|
8
10
|
/**
|
|
9
11
|
* The current state of the proving schedule for a given block. Managed by ProvingState.
|
|
@@ -27,6 +29,7 @@ import { buildHeaderFromCircuitOutputs, toProofData } from './block-building-hel
|
|
|
27
29
|
rootParityProof;
|
|
28
30
|
blockRootProof;
|
|
29
31
|
builtBlockHeader;
|
|
32
|
+
endState;
|
|
30
33
|
endSpongeBlob;
|
|
31
34
|
txs;
|
|
32
35
|
isFirstBlock;
|
|
@@ -157,28 +160,24 @@ import { buildHeaderFromCircuitOutputs, toProofData } from './block-building-hel
|
|
|
157
160
|
getBlockRootRollupOutput() {
|
|
158
161
|
return this.blockRootProof?.provingOutput?.inputs;
|
|
159
162
|
}
|
|
160
|
-
|
|
161
|
-
this.
|
|
163
|
+
async buildBlockHeader() {
|
|
164
|
+
if (this.isAcceptingTxs()) {
|
|
165
|
+
throw new Error('All txs must be added to the block before building the header.');
|
|
166
|
+
}
|
|
167
|
+
if (!this.endState) {
|
|
168
|
+
throw new Error('Call `setEndState` first.');
|
|
169
|
+
}
|
|
170
|
+
if (!this.endSpongeBlob) {
|
|
171
|
+
throw new Error('Call `setEndSpongeBlob` first.');
|
|
172
|
+
}
|
|
173
|
+
const endSpongeBlob = this.endSpongeBlob.clone();
|
|
174
|
+
const endSpongeBlobHash = await endSpongeBlob.squeeze();
|
|
175
|
+
this.builtBlockHeader = new BlockHeader(this.lastArchiveTreeSnapshot, this.endState, endSpongeBlobHash, this.#getGlobalVariables(), this.#getTotalFees(), new Fr(this.#getTotalManaUsed()));
|
|
176
|
+
return this.builtBlockHeader;
|
|
162
177
|
}
|
|
163
178
|
getBuiltBlockHeader() {
|
|
164
179
|
return this.builtBlockHeader;
|
|
165
180
|
}
|
|
166
|
-
getGlobalVariables() {
|
|
167
|
-
if (this.txs.length) {
|
|
168
|
-
return this.txs[0].processedTx.globalVariables;
|
|
169
|
-
}
|
|
170
|
-
const constants = this.constants;
|
|
171
|
-
return GlobalVariables.from({
|
|
172
|
-
chainId: constants.chainId,
|
|
173
|
-
version: constants.version,
|
|
174
|
-
blockNumber: this.blockNumber,
|
|
175
|
-
slotNumber: constants.slotNumber,
|
|
176
|
-
timestamp: this.timestamp,
|
|
177
|
-
coinbase: constants.coinbase,
|
|
178
|
-
feeRecipient: constants.feeRecipient,
|
|
179
|
-
gasFees: constants.gasFees
|
|
180
|
-
});
|
|
181
|
-
}
|
|
182
181
|
getStartSpongeBlob() {
|
|
183
182
|
return this.startSpongeBlob;
|
|
184
183
|
}
|
|
@@ -188,6 +187,46 @@ import { buildHeaderFromCircuitOutputs, toProofData } from './block-building-hel
|
|
|
188
187
|
getEndSpongeBlob() {
|
|
189
188
|
return this.endSpongeBlob;
|
|
190
189
|
}
|
|
190
|
+
setEndState(endState) {
|
|
191
|
+
this.endState = endState;
|
|
192
|
+
}
|
|
193
|
+
hasEndState() {
|
|
194
|
+
return !!this.endState;
|
|
195
|
+
}
|
|
196
|
+
getBlockEndBlobFields() {
|
|
197
|
+
return encodeBlockEndBlobData(this.getBlockEndBlobData());
|
|
198
|
+
}
|
|
199
|
+
getBlockEndBlobData() {
|
|
200
|
+
if (!this.endState) {
|
|
201
|
+
throw new Error('Call `setEndState` first.');
|
|
202
|
+
}
|
|
203
|
+
const partial = this.endState.partial;
|
|
204
|
+
return {
|
|
205
|
+
blockEndMarker: {
|
|
206
|
+
numTxs: this.totalNumTxs,
|
|
207
|
+
timestamp: this.timestamp,
|
|
208
|
+
blockNumber: this.blockNumber
|
|
209
|
+
},
|
|
210
|
+
blockEndStateField: {
|
|
211
|
+
l1ToL2MessageNextAvailableLeafIndex: this.newL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex,
|
|
212
|
+
noteHashNextAvailableLeafIndex: partial.noteHashTree.nextAvailableLeafIndex,
|
|
213
|
+
nullifierNextAvailableLeafIndex: partial.nullifierTree.nextAvailableLeafIndex,
|
|
214
|
+
publicDataNextAvailableLeafIndex: partial.publicDataTree.nextAvailableLeafIndex,
|
|
215
|
+
totalManaUsed: this.#getTotalManaUsed()
|
|
216
|
+
},
|
|
217
|
+
lastArchiveRoot: this.lastArchiveTreeSnapshot.root,
|
|
218
|
+
noteHashRoot: partial.noteHashTree.root,
|
|
219
|
+
nullifierRoot: partial.nullifierTree.root,
|
|
220
|
+
publicDataRoot: partial.publicDataTree.root,
|
|
221
|
+
l1ToL2MessageRoot: this.isFirstBlock ? this.newL1ToL2MessageTreeSnapshot.root : undefined
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
getBlockBlobData() {
|
|
225
|
+
return {
|
|
226
|
+
...this.getBlockEndBlobData(),
|
|
227
|
+
txs: this.getTxEffects().map((t)=>t.toTxBlobData())
|
|
228
|
+
};
|
|
229
|
+
}
|
|
191
230
|
getTxEffects() {
|
|
192
231
|
return this.txs.map((t)=>t.processedTx.txEffect);
|
|
193
232
|
}
|
|
@@ -310,4 +349,26 @@ import { buildHeaderFromCircuitOutputs, toProofData } from './block-building-hel
|
|
|
310
349
|
this.baseOrMergeProofs.getNode(rootLocation)?.provingOutput
|
|
311
350
|
] : this.baseOrMergeProofs.getChildren(rootLocation).map((c)=>c?.provingOutput);
|
|
312
351
|
}
|
|
352
|
+
#getGlobalVariables() {
|
|
353
|
+
if (this.txs.length) {
|
|
354
|
+
return this.txs[0].processedTx.globalVariables;
|
|
355
|
+
}
|
|
356
|
+
const constants = this.constants;
|
|
357
|
+
return GlobalVariables.from({
|
|
358
|
+
chainId: constants.chainId,
|
|
359
|
+
version: constants.version,
|
|
360
|
+
blockNumber: this.blockNumber,
|
|
361
|
+
slotNumber: constants.slotNumber,
|
|
362
|
+
timestamp: this.timestamp,
|
|
363
|
+
coinbase: constants.coinbase,
|
|
364
|
+
feeRecipient: constants.feeRecipient,
|
|
365
|
+
gasFees: constants.gasFees
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
#getTotalFees() {
|
|
369
|
+
return this.txs.reduce((acc, tx)=>acc.add(tx.processedTx.txEffect.transactionFee), Fr.ZERO);
|
|
370
|
+
}
|
|
371
|
+
#getTotalManaUsed() {
|
|
372
|
+
return this.txs.reduce((acc, tx)=>acc + BigInt(tx.processedTx.gasUsed.billedGas.l2Gas), 0n);
|
|
373
|
+
}
|
|
313
374
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"checkpoint-proving-state.d.ts","sourceRoot":"","sources":["../../src/orchestrator/checkpoint-proving-state.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"checkpoint-proving-state.d.ts","sourceRoot":"","sources":["../../src/orchestrator/checkpoint-proving-state.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,sBAAsB,EACtB,KAAK,2BAA2B,EAGjC,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,KAAK,cAAc,EAGnB,KAAK,6CAA6C,EAClD,KAAK,yCAAyC,EAE/C,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAc,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAC1D,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,6BAA6B,CAAC;AACzD,OAAO,EAAE,KAAK,gBAAgB,EAAuB,MAAM,yBAAyB,CAAC;AACrF,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,iCAAiC,CAAC;AACrF,OAAO,EAAE,uBAAuB,EAAE,MAAM,sBAAsB,CAAC;AAC/D,OAAO,EACL,6BAA6B,EAC7B,uBAAuB,EACvB,sBAAsB,EACtB,4BAA4B,EAE5B,iCAAiC,EACjC,4CAA4C,EAC7C,MAAM,sBAAsB,CAAC;AAC9B,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AACvD,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,qBAAqB,CAAC;AAClE,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AACpD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAGlD,OAAO,EAAE,iBAAiB,EAAmB,MAAM,0BAA0B,CAAC;AAC9E,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAElE,qBAAa,sBAAsB;;aAef,KAAK,EAAE,MAAM;aACb,SAAS,EAAE,sBAAsB;aACjC,cAAc,EAAE,MAAM;IACtC,OAAO,CAAC,QAAQ,CAAC,kBAAkB;IACnC,OAAO,CAAC,QAAQ,CAAC,2BAA2B;IAC5C,OAAO,CAAC,QAAQ,CAAC,qCAAqC;IACtD,OAAO,CAAC,QAAQ,CAAC,sBAAsB;IACvC,OAAO,CAAC,QAAQ,CAAC,cAAc;IAE/B,OAAO,CAAC,QAAQ,CAAC,6BAA6B;IAC9C,OAAO,CAAC,QAAQ,CAAC,uCAAuC;IAKxD,OAAO,CAAC,QAAQ,CAAC,4BAA4B;IAC7C,OAAO,CAAC,QAAQ,CAAC,sCAAsC;IAIhD,WAAW,EAAE,iBAAiB;IACrC,OAAO,CAAC,oBAAoB;IAnC9B,OAAO,CAAC,WAAW,CAEjB;IACF,OAAO,CAAC,mBAAmB,CAEb;IACd,OAAO,CAAC,MAAM,CAAyC;IACvD,OAAO,CAAC,oBAAoB,CAAqC;IACjE,OAAO,CAAC,kBAAkB,CAAqC;IAC/D,OAAO,CAAC,UAAU,CAAmB;IACrC,OAAO,CAAC,KAAK,CAAqB;IAClC,SAAgB,gBAAgB,EAAE,MAAM,CAAC;gBAGvB,KAAK,EAAE,MAAM,EACb,SAAS,EAAE,sBAAsB,EACjC,cAAc,EAAE,MAAM,EACrB,kBAAkB,EAAE,MAAM,EAC1B,2BAA2B,EAAE,2BAA2B,EACxD,qCAAqC,EAAE,WAAW,EAClD,sBAAsB,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,cAAc,CAAC,EACxD,cAAc,EAAE,EAAE,EAAE,EAEpB,6BAA6B,EAAE,sBAAsB,EACrD,uCAAuC,EAAE,KAAK,CAC7D,EAAE,EACF,OAAO,6CAA6C,CACrD,EAEgB,4BAA4B,EAAE,sBAAsB,EACpD,sCAAsC,EAAE,KAAK,CAC5D,EAAE,EACF,OAAO,6CAA6C,CACrD,EACM,WAAW,EAAE,iBAAiB,EAC7B,oBAAoB,EAAE,CAAC,UAAU,EAAE,sBAAsB,KAAK,IAAI;IAM5E,IAAW,WAAW,IAAI,MAAM,CAE/B;IAEY,aAAa,CACxB,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM,EACnB,uBAAuB,EAAE,sBAAsB,EAC/C,sBAAsB,EAAE,KAAK,CAAC,EAAE,EAAE,OAAO,cAAc,CAAC,GACvD,OAAO,CAAC,iBAAiB,CAAC;IA2CtB,iBAAiB;IAIjB,uBAAuB,CAC5B,UAAU,EAAE,MAAM,EAClB,aAAa,EAAE,6BAA6B,CAC1C,uBAAuB,EACvB,OAAO,yCAAyC,CACjD,GACA,gBAAgB;IAIZ,yBAAyB,CAAC,QAAQ,EAAE,gBAAgB;IASpD,wBAAwB,CAC7B,QAAQ,EAAE,gBAAgB,EAC1B,aAAa,EAAE,6BAA6B,CAC1C,uBAAuB,EACvB,OAAO,yCAAyC,CACjD;IAKI,6BAA6B;IAS7B,4BAA4B,CACjC,aAAa,EAAE,6BAA6B,CAC1C,4BAA4B,EAC5B,OAAO,yCAAyC,CACjD,GACA,gBAAgB;IAKZ,mBAAmB,CAAC,eAAe,EAAE,MAAM;IAYrC,eAAe,CAAC,oBAAoB,EAAE,sBAAsB;IAiBlE,qBAAqB;IAIrB,iBAAiB,CAAC,QAAQ,EAAE,gBAAgB;IAI5C,yBAAyB,CAAC,aAAa,EAAE,gBAAgB;IASzD,2BAA2B,IAAI,WAAW;IAI1C,6BAA6B;IAgC7B,iCAAiC,CAAC,WAAW,EAAE,MAAM;IAKrD,oBAAoB,CAAC,QAAQ,EAAE,gBAAgB;IAI/C,wBAAwB;IAKxB,WAAW;IAIX,QAAQ;IAKR,MAAM;IAIN,MAAM,CAAC,MAAM,EAAE,MAAM;CAW7B"}
|
|
@@ -1,9 +1,8 @@
|
|
|
1
|
-
import { SpongeBlob } from '@aztec/blob-lib';
|
|
1
|
+
import { SpongeBlob, encodeCheckpointBlobData } from '@aztec/blob-lib';
|
|
2
2
|
import { BLOBS_PER_BLOCK, FIELDS_PER_BLOB, NUM_MSGS_PER_BASE_PARITY } from '@aztec/constants';
|
|
3
3
|
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
4
4
|
import { BLS12Point, Fr } from '@aztec/foundation/fields';
|
|
5
5
|
import { UnbalancedTreeStore } from '@aztec/foundation/trees';
|
|
6
|
-
import { getCheckpointBlobFields } from '@aztec/stdlib/checkpoint';
|
|
7
6
|
import { ParityBasePrivateInputs } from '@aztec/stdlib/parity';
|
|
8
7
|
import { BlockMergeRollupPrivateInputs, CheckpointRootRollupHints, CheckpointRootRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
9
8
|
import { accumulateBlobs, buildBlobHints, toProofData } from './block-building-helpers.js';
|
|
@@ -118,10 +117,13 @@ export class CheckpointProvingState {
|
|
|
118
117
|
return new ParityBasePrivateInputs(messages, this.constants.vkTreeRoot);
|
|
119
118
|
}
|
|
120
119
|
async accumulateBlobs(startBlobAccumulator) {
|
|
121
|
-
if (this.isAcceptingBlocks() || this.blocks.some((b)
|
|
120
|
+
if (this.isAcceptingBlocks() || this.blocks.some((b)=>!b?.hasEndState())) {
|
|
122
121
|
return;
|
|
123
122
|
}
|
|
124
|
-
this.blobFields =
|
|
123
|
+
this.blobFields = encodeCheckpointBlobData({
|
|
124
|
+
totalNumBlobFields: this.totalNumBlobFields,
|
|
125
|
+
blocks: this.blocks.map((b)=>b.getBlockBlobData())
|
|
126
|
+
});
|
|
125
127
|
this.endBlobAccumulator = await accumulateBlobs(this.blobFields, startBlobAccumulator);
|
|
126
128
|
this.startBlobAccumulator = startBlobAccumulator;
|
|
127
129
|
this.onBlobAccumulatorSet(this);
|
|
@@ -59,7 +59,6 @@ export declare class ProvingOrchestrator implements EpochProver {
|
|
|
59
59
|
* Computes the block header and updates the archive tree.
|
|
60
60
|
*/
|
|
61
61
|
setBlockCompleted(blockNumber: number, expectedHeader?: BlockHeader): Promise<BlockHeader>;
|
|
62
|
-
private buildL2BlockHeader;
|
|
63
62
|
protected verifyBuiltBlockAgainstSyncedState(provingState: BlockProvingState): Promise<void>;
|
|
64
63
|
/**
|
|
65
64
|
* Cancel any further proving
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"orchestrator.d.ts","sourceRoot":"","sources":["../../src/orchestrator/orchestrator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,2BAA2B,EAAc,MAAM,iBAAiB,CAAC;AAUvF,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAQ9C,OAAO,EAAE,UAAU,
|
|
1
|
+
{"version":3,"file":"orchestrator.d.ts","sourceRoot":"","sources":["../../src/orchestrator/orchestrator.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,2BAA2B,EAAc,MAAM,iBAAiB,CAAC;AAUvF,OAAO,EAAE,EAAE,EAAE,MAAM,0BAA0B,CAAC;AAQ9C,OAAO,EAAE,UAAU,EAAE,MAAM,qBAAqB,CAAC;AACjD,OAAO,KAAK,EACV,WAAW,EACX,wBAAwB,EAGxB,mBAAmB,EACpB,MAAM,iCAAiC,CAAC;AACzC,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,sBAAsB,CAAC;AAClD,OAAO,EAML,sBAAsB,EAKtB,sBAAsB,EACvB,MAAM,sBAAsB,CAAC;AAG9B,OAAO,KAAK,EAAE,WAAW,EAAE,WAAW,EAAE,EAAE,EAAE,MAAM,kBAAkB,CAAC;AACrE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAEL,KAAK,eAAe,EACpB,KAAK,MAAM,EAIZ,MAAM,yBAAyB,CAAC;AAejC,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAQlE;;;;;;;;;GASG;AAEH;;GAEG;AACH,qBAAa,mBAAoB,YAAW,WAAW;IASnD,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,QAAQ,CAAC,QAAQ;IAV3B,OAAO,CAAC,YAAY,CAA4C;IAChE,OAAO,CAAC,kBAAkB,CAAyB;IAEnD,OAAO,CAAC,cAAc,CAAiD;IACvE,OAAO,CAAC,OAAO,CAA6B;IAC5C,OAAO,CAAC,GAAG,CAAqD;gBAGtD,UAAU,EAAE,wBAAwB,EACpC,MAAM,EAAE,mBAAmB,EAClB,QAAQ,EAAE,UAAU,EACrC,eAAe,GAAE,eAAsC;IAKzD,IAAI,MAAM,IAAI,MAAM,CAEnB;IAEM,WAAW,IAAI,UAAU;IAIzB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAKrB,aAAa,CAClB,WAAW,EAAE,MAAM,EACnB,mBAAmB,EAAE,MAAM,EAC3B,2BAA2B,EAAE,2BAA2B;IAsB7C,kBAAkB,CAC7B,eAAe,EAAE,MAAM,EACvB,SAAS,EAAE,sBAAsB,EACjC,cAAc,EAAE,EAAE,EAAE,EACpB,cAAc,EAAE,MAAM,EACtB,kBAAkB,EAAE,MAAM,EAC1B,qCAAqC,EAAE,WAAW;IA2CpD;;;;;;OAMG;IAIU,aAAa,CAAC,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM;IA4DtF;;;OAGG;IAIU,MAAM,CAAC,GAAG,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAyFtD;;;OAGG;IAEI,0BAA0B,CAAC,GAAG,EAAE,EAAE,EAAE;IAwB3C;;;OAGG;IAIU,iBAAiB,CAAC,WAAW,EAAE,MAAM,EAAE,cAAc,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;cA+CvF,kCAAkC,CAAC,YAAY,EAAE,iBAAiB;IAuDlF;;OAEG;IACI,MAAM;IAQb;;OAEG;IACU,aAAa,IAAI,OAAO,CAAC;QACpC,YAAY,EAAE,sBAAsB,CAAC;QACrC,KAAK,EAAE,KAAK,CAAC;QACb,iBAAiB,EAAE,WAAW,CAAC;KAChC,CAAC;IAsBF;;;;;OAKG;IACH,OAAO,CAAC,eAAe;YAwDT,uBAAuB;YAmCvB,uBAAuB;IAkCrC,OAAO,CAAC,iBAAiB;IAqDzB,OAAO,CAAC,yBAAyB;IA+BjC,OAAO,CAAC,sBAAsB;IAiC9B,OAAO,CAAC,kBAAkB;IA+B1B,OAAO,CAAC,sBAAsB;IAyD9B,OAAO,CAAC,wBAAwB;IAkChC,OAAO,CAAC,gCAAgC;IAUxC,OAAO,CAAC,wBAAwB;IAgChC,OAAO,CAAC,uBAAuB;IA6B/B,OAAO,CAAC,2BAA2B;IA4DnC,OAAO,CAAC,4BAA4B;IA+BpC,OAAO,CAAC,mBAAmB;IAkC3B,OAAO,CAAC,iBAAiB;IA4BzB,OAAO,CAAC,8BAA8B;IAatC,OAAO,CAAC,8BAA8B;IAStC,OAAO,CAAC,mCAAmC;IAa3C,OAAO,CAAC,mCAAmC;IAQ3C,OAAO,CAAC,wCAAwC;IAahD,OAAO,CAAC,yBAAyB;IASjC;;;;;OAKG;IACH,OAAO,CAAC,SAAS;IAoDjB,OAAO,CAAC,yBAAyB;CAWlC"}
|
|
@@ -14,12 +14,11 @@ import { assertLength } from '@aztec/foundation/serialize';
|
|
|
14
14
|
import { pushTestData } from '@aztec/foundation/testing';
|
|
15
15
|
import { elapsed } from '@aztec/foundation/timer';
|
|
16
16
|
import { readAvmMinimalPublicTxInputsFromFile } from '@aztec/simulator/public/fixtures';
|
|
17
|
-
import { createBlockEndMarker } from '@aztec/stdlib/block';
|
|
18
17
|
import { BlockRootEmptyTxFirstRollupPrivateInputs, BlockRootFirstRollupPrivateInputs, BlockRootSingleTxFirstRollupPrivateInputs, BlockRootSingleTxRollupPrivateInputs, CheckpointRootSingleBlockRollupPrivateInputs, PrivateTxBaseRollupPrivateInputs } from '@aztec/stdlib/rollup';
|
|
19
18
|
import { MerkleTreeId } from '@aztec/stdlib/trees';
|
|
20
19
|
import { Attributes, getTelemetryClient, trackSpan, wrapCallbackInSpan } from '@aztec/telemetry-client';
|
|
21
20
|
import { inspect } from 'util';
|
|
22
|
-
import {
|
|
21
|
+
import { buildHeaderFromCircuitOutputs, getLastSiblingPath, getPublicChonkVerifierPrivateInputsFromTx, getRootTreeSiblingPath, getSubtreeSiblingPath, getTreeSnapshot, insertSideEffectsAndBuildBaseRollupHints, validatePartialState, validateTx } from './block-building-helpers.js';
|
|
23
22
|
import { EpochProvingState } from './epoch-proving-state.js';
|
|
24
23
|
import { ProvingOrchestratorMetrics } from './orchestrator_metrics.js';
|
|
25
24
|
import { TxProvingState } from './tx-proving-state.js';
|
|
@@ -134,10 +133,11 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
134
133
|
// Because `addTxs` won't be called for a block without txs, and that's where the sponge blob state is computed.
|
|
135
134
|
// We need to set its end sponge blob here, which will become the start sponge blob for the next block.
|
|
136
135
|
if (totalNumTxs === 0) {
|
|
136
|
+
const endState = await db.getStateReference();
|
|
137
|
+
blockProvingState.setEndState(endState);
|
|
137
138
|
const endSpongeBlob = blockProvingState.getStartSpongeBlob().clone();
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
]);
|
|
139
|
+
const blockEndBlobFields = blockProvingState.getBlockEndBlobFields();
|
|
140
|
+
await endSpongeBlob.absorb(blockEndBlobFields);
|
|
141
141
|
blockProvingState.setEndSpongeBlob(endSpongeBlob);
|
|
142
142
|
// And also try to accumulate the blobs as far as we can:
|
|
143
143
|
await this.provingState.setBlobAccumulators();
|
|
@@ -201,9 +201,10 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
201
201
|
});
|
|
202
202
|
}
|
|
203
203
|
}
|
|
204
|
-
await
|
|
205
|
-
|
|
206
|
-
|
|
204
|
+
const endState = await db.getStateReference();
|
|
205
|
+
provingState.setEndState(endState);
|
|
206
|
+
const blockEndBlobFields = provingState.getBlockEndBlobFields();
|
|
207
|
+
await spongeBlobState.absorb(blockEndBlobFields);
|
|
207
208
|
provingState.setEndSpongeBlob(spongeBlobState);
|
|
208
209
|
// Txs have been added to the block. Now try to accumulate the blobs as far as we can:
|
|
209
210
|
await this.provingState.setBlobAccumulators();
|
|
@@ -248,28 +249,19 @@ const logger = createLogger('prover-client:orchestrator');
|
|
|
248
249
|
if (provingState.isAcceptingTxs()) {
|
|
249
250
|
throw new Error(`Block ${blockNumber} is still accepting txs. Call setBlockCompleted after all txs have been added.`);
|
|
250
251
|
}
|
|
251
|
-
//
|
|
252
|
+
// Given we've applied every change from this block, now assemble the block header:
|
|
252
253
|
logger.verbose(`Block ${blockNumber} completed. Assembling header.`);
|
|
253
|
-
const header = await
|
|
254
|
-
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
255
|
-
return header;
|
|
256
|
-
}
|
|
257
|
-
async buildL2BlockHeader(provingState, expectedHeader) {
|
|
258
|
-
// Collect all txs in this block to build the header. The function calling this has made sure that all txs have been added.
|
|
259
|
-
const txs = provingState.getProcessedTxs();
|
|
260
|
-
const startSpongeBlob = provingState.getStartSpongeBlob();
|
|
261
|
-
// Get db for this block
|
|
262
|
-
const db = this.dbs.get(provingState.blockNumber);
|
|
263
|
-
// Given we've applied every change from this block, now assemble the block header
|
|
264
|
-
// and update the archive tree, so we're ready to start processing the next block
|
|
265
|
-
const header = await buildBlockHeaderFromTxs(txs, provingState.getGlobalVariables(), startSpongeBlob, db);
|
|
254
|
+
const header = await provingState.buildBlockHeader();
|
|
266
255
|
if (expectedHeader && !header.equals(expectedHeader)) {
|
|
267
256
|
logger.error(`Block header mismatch: header=${header} expectedHeader=${expectedHeader}`);
|
|
268
257
|
throw new Error('Block header mismatch');
|
|
269
258
|
}
|
|
259
|
+
// Get db for this block
|
|
260
|
+
const db = this.dbs.get(provingState.blockNumber);
|
|
261
|
+
// Update the archive tree, so we're ready to start processing the next block:
|
|
270
262
|
logger.verbose(`Updating archive tree with block ${provingState.blockNumber} header ${(await header.hash()).toString()}`);
|
|
271
263
|
await db.updateArchive(header);
|
|
272
|
-
|
|
264
|
+
await this.verifyBuiltBlockAgainstSyncedState(provingState);
|
|
273
265
|
return header;
|
|
274
266
|
}
|
|
275
267
|
// Flagged as protected to disable in certain unit tests
|