@aztec/prover-client 0.69.1 → 0.71.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/bin/get-proof-inputs.d.ts +2 -0
- package/dest/bin/get-proof-inputs.d.ts.map +1 -0
- package/dest/bin/get-proof-inputs.js +50 -0
- package/dest/block_builder/light.d.ts +3 -5
- package/dest/block_builder/light.d.ts.map +1 -1
- package/dest/block_builder/light.js +9 -22
- package/dest/config.d.ts +2 -1
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +3 -2
- package/dest/mocks/fixtures.d.ts +1 -1
- package/dest/mocks/fixtures.d.ts.map +1 -1
- package/dest/mocks/fixtures.js +2 -2
- package/dest/mocks/test_context.d.ts +1 -1
- package/dest/mocks/test_context.d.ts.map +1 -1
- package/dest/mocks/test_context.js +11 -12
- package/dest/orchestrator/block-building-helpers.d.ts +15 -29
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -1
- package/dest/orchestrator/block-building-helpers.js +51 -58
- package/dest/orchestrator/block-proving-state.d.ts +40 -44
- package/dest/orchestrator/block-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/block-proving-state.js +149 -85
- package/dest/orchestrator/epoch-proving-state.d.ts +23 -30
- package/dest/orchestrator/epoch-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/epoch-proving-state.js +92 -65
- package/dest/orchestrator/orchestrator.d.ts +17 -48
- package/dest/orchestrator/orchestrator.d.ts.map +1 -1
- package/dest/orchestrator/orchestrator.js +208 -351
- package/dest/orchestrator/tx-proving-state.d.ts +10 -6
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -1
- package/dest/orchestrator/tx-proving-state.js +57 -46
- package/dest/prover-agent/memory-proving-queue.d.ts +4 -4
- package/dest/prover-agent/memory-proving-queue.d.ts.map +1 -1
- package/dest/prover-agent/memory-proving-queue.js +5 -5
- package/dest/prover-agent/prover-agent.d.ts +0 -2
- package/dest/prover-agent/prover-agent.d.ts.map +1 -1
- package/dest/prover-agent/prover-agent.js +7 -9
- package/dest/prover-client/factory.d.ts.map +1 -1
- package/dest/prover-client/factory.js +3 -3
- package/dest/prover-client/prover-client.d.ts +4 -2
- package/dest/prover-client/prover-client.d.ts.map +1 -1
- package/dest/prover-client/prover-client.js +16 -15
- package/dest/prover-client/server-epoch-prover.d.ts +25 -0
- package/dest/prover-client/server-epoch-prover.d.ts.map +1 -0
- package/dest/prover-client/server-epoch-prover.js +40 -0
- package/dest/proving_broker/broker_prover_facade.d.ts +19 -7
- package/dest/proving_broker/broker_prover_facade.d.ts.map +1 -1
- package/dest/proving_broker/broker_prover_facade.js +271 -49
- package/dest/proving_broker/config.d.ts +61 -0
- package/dest/proving_broker/config.d.ts.map +1 -0
- package/dest/proving_broker/config.js +83 -0
- package/dest/proving_broker/factory.d.ts +1 -1
- package/dest/proving_broker/factory.d.ts.map +1 -1
- package/dest/proving_broker/factory.js +4 -7
- package/dest/proving_broker/fixtures.d.ts +5 -0
- package/dest/proving_broker/fixtures.d.ts.map +1 -0
- package/dest/proving_broker/fixtures.js +12 -0
- package/dest/proving_broker/index.d.ts +2 -1
- package/dest/proving_broker/index.d.ts.map +1 -1
- package/dest/proving_broker/index.js +3 -2
- package/dest/proving_broker/proof_store/factory.d.ts +6 -0
- package/dest/proving_broker/proof_store/factory.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/factory.js +39 -0
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts +13 -0
- package/dest/proving_broker/proof_store/gcs_proof_store.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/gcs_proof_store.js +46 -0
- package/dest/proving_broker/proof_store/index.d.ts +4 -0
- package/dest/proving_broker/proof_store/index.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/index.js +4 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts +14 -0
- package/dest/proving_broker/proof_store/inline_proof_store.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/inline_proof_store.js +37 -0
- package/dest/proving_broker/{proof_store.d.ts → proof_store/proof_store.d.ts} +1 -12
- package/dest/proving_broker/proof_store/proof_store.d.ts.map +1 -0
- package/dest/proving_broker/proof_store/proof_store.js +2 -0
- package/dest/proving_broker/proving_agent.d.ts +4 -4
- package/dest/proving_broker/proving_agent.d.ts.map +1 -1
- package/dest/proving_broker/proving_agent.js +5 -5
- package/dest/proving_broker/proving_broker.d.ts +16 -12
- package/dest/proving_broker/proving_broker.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker.js +307 -274
- package/dest/proving_broker/proving_broker_database/memory.d.ts +4 -2
- package/dest/proving_broker/proving_broker_database/memory.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/memory.js +17 -4
- package/dest/proving_broker/proving_broker_database/persisted.d.ts +10 -6
- package/dest/proving_broker/proving_broker_database/persisted.d.ts.map +1 -1
- package/dest/proving_broker/proving_broker_database/persisted.js +106 -14
- package/dest/proving_broker/proving_broker_database.d.ts +7 -3
- package/dest/proving_broker/proving_broker_database.d.ts.map +1 -1
- package/dest/proving_broker/proving_job_controller.js +4 -4
- package/dest/proving_broker/rpc.d.ts.map +1 -1
- package/dest/proving_broker/rpc.js +4 -4
- package/dest/test/mock_prover.d.ts +8 -8
- package/dest/test/mock_prover.d.ts.map +1 -1
- package/dest/test/mock_prover.js +9 -10
- package/package.json +14 -12
- package/src/bin/get-proof-inputs.ts +60 -0
- package/src/block_builder/light.ts +7 -31
- package/src/config.ts +4 -4
- package/src/mocks/fixtures.ts +1 -1
- package/src/mocks/test_context.ts +9 -11
- package/src/orchestrator/block-building-helpers.ts +360 -402
- package/src/orchestrator/block-proving-state.ts +251 -121
- package/src/orchestrator/epoch-proving-state.ts +159 -88
- package/src/orchestrator/orchestrator.ts +262 -542
- package/src/orchestrator/tx-proving-state.ts +30 -18
- package/src/prover-agent/memory-proving-queue.ts +12 -16
- package/src/prover-agent/prover-agent.ts +14 -8
- package/src/prover-client/factory.ts +2 -3
- package/src/prover-client/prover-client.ts +17 -20
- package/src/prover-client/server-epoch-prover.ts +44 -0
- package/src/proving_broker/broker_prover_facade.ts +347 -67
- package/src/proving_broker/config.ts +93 -0
- package/src/proving_broker/factory.ts +11 -10
- package/src/proving_broker/fixtures.ts +14 -0
- package/src/proving_broker/index.ts +2 -1
- package/src/proving_broker/proof_store/factory.ts +42 -0
- package/src/proving_broker/proof_store/gcs_proof_store.ts +72 -0
- package/src/proving_broker/proof_store/index.ts +3 -0
- package/src/proving_broker/{proof_store.ts → proof_store/inline_proof_store.ts} +1 -44
- package/src/proving_broker/proof_store/proof_store.ts +54 -0
- package/src/proving_broker/proving_agent.ts +11 -5
- package/src/proving_broker/proving_broker.ts +122 -73
- package/src/proving_broker/proving_broker_database/memory.ts +24 -4
- package/src/proving_broker/proving_broker_database/persisted.ts +142 -20
- package/src/proving_broker/proving_broker_database.ts +8 -3
- package/src/proving_broker/proving_job_controller.ts +5 -5
- package/src/proving_broker/rpc.ts +2 -3
- package/src/test/mock_prover.ts +12 -18
- package/dest/proving_broker/proof_store.d.ts.map +0 -1
- package/dest/proving_broker/proof_store.js +0 -37
|
@@ -18,7 +18,6 @@ import {
|
|
|
18
18
|
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
19
19
|
MembershipWitness,
|
|
20
20
|
MerkleTreeCalculator,
|
|
21
|
-
type NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH,
|
|
22
21
|
NOTE_HASH_SUBTREE_HEIGHT,
|
|
23
22
|
NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH,
|
|
24
23
|
NULLIFIER_SUBTREE_HEIGHT,
|
|
@@ -32,25 +31,17 @@ import {
|
|
|
32
31
|
PublicDataHint,
|
|
33
32
|
PublicDataTreeLeaf,
|
|
34
33
|
PublicDataTreeLeafPreimage,
|
|
35
|
-
type RecursiveProof,
|
|
36
34
|
StateReference,
|
|
37
|
-
VK_TREE_HEIGHT,
|
|
38
|
-
type VerificationKeyAsFields,
|
|
39
35
|
} from '@aztec/circuits.js';
|
|
40
36
|
import { type SpongeBlob } from '@aztec/circuits.js/blobs';
|
|
41
37
|
import {
|
|
42
38
|
type BaseOrMergeRollupPublicInputs,
|
|
43
|
-
BlockMergeRollupInputs,
|
|
44
39
|
type BlockRootOrBlockMergePublicInputs,
|
|
45
40
|
ConstantRollupData,
|
|
46
|
-
MergeRollupInputs,
|
|
47
|
-
PreviousRollupBlockData,
|
|
48
|
-
PreviousRollupData,
|
|
49
41
|
PrivateBaseRollupHints,
|
|
50
42
|
PrivateBaseStateDiffHints,
|
|
51
43
|
PublicBaseRollupHints,
|
|
52
44
|
PublicBaseStateDiffHints,
|
|
53
|
-
RootRollupInputs,
|
|
54
45
|
} from '@aztec/circuits.js/rollup';
|
|
55
46
|
import { makeTuple } from '@aztec/foundation/array';
|
|
56
47
|
import { Blob } from '@aztec/foundation/blob';
|
|
@@ -59,9 +50,10 @@ import { sha256Trunc } from '@aztec/foundation/crypto';
|
|
|
59
50
|
import { type Logger } from '@aztec/foundation/log';
|
|
60
51
|
import { type Tuple, assertLength, serializeToBuffer, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
61
52
|
import { computeUnbalancedMerkleRoot } from '@aztec/foundation/trees';
|
|
62
|
-
import {
|
|
53
|
+
import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vks';
|
|
63
54
|
import { protocolContractTreeRoot } from '@aztec/protocol-contracts';
|
|
64
|
-
import { computeFeePayerBalanceLeafSlot } from '@aztec/simulator';
|
|
55
|
+
import { computeFeePayerBalanceLeafSlot } from '@aztec/simulator/server';
|
|
56
|
+
import { Attributes, type Span, runInSpan } from '@aztec/telemetry-client';
|
|
65
57
|
import { type MerkleTreeReadOperations } from '@aztec/world-state';
|
|
66
58
|
|
|
67
59
|
import { inspect } from 'util';
|
|
@@ -76,185 +68,191 @@ type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'Public
|
|
|
76
68
|
export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive';
|
|
77
69
|
|
|
78
70
|
// Builds the hints for base rollup. Updating the contract, nullifier, and data trees in the process.
|
|
79
|
-
export
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
// Update the note hash trees with the new items being inserted to get the new roots
|
|
104
|
-
// that will be used by the next iteration of the base rollup circuit, skipping the empty ones
|
|
105
|
-
const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
|
|
106
|
-
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
|
|
107
|
-
|
|
108
|
-
// The read witnesses for a given TX should be generated before the writes of the same TX are applied.
|
|
109
|
-
// All reads that refer to writes in the same tx are transient and can be simplified out.
|
|
110
|
-
const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
|
|
111
|
-
|
|
112
|
-
// Update the nullifier tree, capturing the low nullifier info for each individual operation
|
|
113
|
-
const {
|
|
114
|
-
lowLeavesWitnessData: nullifierWitnessLeaves,
|
|
115
|
-
newSubtreeSiblingPath: nullifiersSubtreeSiblingPath,
|
|
116
|
-
sortedNewLeaves: sortednullifiers,
|
|
117
|
-
sortedNewLeavesIndexes,
|
|
118
|
-
} = await db.batchInsert(
|
|
119
|
-
MerkleTreeId.NULLIFIER_TREE,
|
|
120
|
-
padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
|
|
121
|
-
NULLIFIER_SUBTREE_HEIGHT,
|
|
122
|
-
);
|
|
123
|
-
|
|
124
|
-
if (nullifierWitnessLeaves === undefined) {
|
|
125
|
-
throw new Error(`Could not craft nullifier batch insertion proofs`);
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
// Extract witness objects from returned data
|
|
129
|
-
const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness<typeof NULLIFIER_TREE_HEIGHT>[] =
|
|
130
|
-
nullifierWitnessLeaves.map(l =>
|
|
131
|
-
MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)),
|
|
71
|
+
export const buildBaseRollupHints = runInSpan(
|
|
72
|
+
'BlockBuilderHelpers',
|
|
73
|
+
'buildBaseRollupHints',
|
|
74
|
+
async (
|
|
75
|
+
span: Span,
|
|
76
|
+
tx: ProcessedTx,
|
|
77
|
+
globalVariables: GlobalVariables,
|
|
78
|
+
db: MerkleTreeWriteOperations,
|
|
79
|
+
startSpongeBlob: SpongeBlob,
|
|
80
|
+
) => {
|
|
81
|
+
span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
|
|
82
|
+
// Get trees info before any changes hit
|
|
83
|
+
const constants = await getConstantRollupData(globalVariables, db);
|
|
84
|
+
const start = new PartialStateReference(
|
|
85
|
+
await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
|
|
86
|
+
await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
|
|
87
|
+
await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
|
|
88
|
+
);
|
|
89
|
+
// Get the subtree sibling paths for the circuit
|
|
90
|
+
const noteHashSubtreeSiblingPathArray = await getSubtreeSiblingPath(
|
|
91
|
+
MerkleTreeId.NOTE_HASH_TREE,
|
|
92
|
+
NOTE_HASH_SUBTREE_HEIGHT,
|
|
93
|
+
db,
|
|
132
94
|
);
|
|
133
95
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO,
|
|
138
|
-
);
|
|
139
|
-
|
|
140
|
-
// Append new data to startSpongeBlob
|
|
141
|
-
const inputSpongeBlob = startSpongeBlob.clone();
|
|
142
|
-
startSpongeBlob.absorb(tx.txEffect.toBlobFields());
|
|
143
|
-
|
|
144
|
-
if (tx.avmProvingRequest) {
|
|
145
|
-
// Build public base rollup hints
|
|
146
|
-
const stateDiffHints = PublicBaseStateDiffHints.from({
|
|
147
|
-
nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
148
|
-
i < nullifierWitnessLeaves.length
|
|
149
|
-
? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage)
|
|
150
|
-
: NullifierLeafPreimage.empty(),
|
|
151
|
-
),
|
|
152
|
-
nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
153
|
-
i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
|
|
154
|
-
? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
|
|
155
|
-
: makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
|
|
156
|
-
),
|
|
157
|
-
sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortednullifiers[i])),
|
|
158
|
-
sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]),
|
|
159
|
-
noteHashSubtreeSiblingPath,
|
|
160
|
-
nullifierSubtreeSiblingPath,
|
|
161
|
-
lowPublicDataWritesPreimages: padArrayEnd(
|
|
162
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages,
|
|
163
|
-
PublicDataTreeLeafPreimage.empty(),
|
|
164
|
-
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
165
|
-
),
|
|
166
|
-
lowPublicDataWritesMembershipWitnesses: padArrayEnd(
|
|
167
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses,
|
|
168
|
-
MembershipWitness.empty(PUBLIC_DATA_TREE_HEIGHT),
|
|
169
|
-
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
170
|
-
),
|
|
171
|
-
publicDataTreeSiblingPaths: padArrayEnd(
|
|
172
|
-
txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths,
|
|
173
|
-
makeTuple(PUBLIC_DATA_TREE_HEIGHT, () => Fr.ZERO),
|
|
174
|
-
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
175
|
-
),
|
|
176
|
-
});
|
|
96
|
+
const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, i =>
|
|
97
|
+
i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO,
|
|
98
|
+
);
|
|
177
99
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
100
|
+
// Update the note hash trees with the new items being inserted to get the new roots
|
|
101
|
+
// that will be used by the next iteration of the base rollup circuit, skipping the empty ones
|
|
102
|
+
const noteHashes = padArrayEnd(tx.txEffect.noteHashes, Fr.ZERO, MAX_NOTE_HASHES_PER_TX);
|
|
103
|
+
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, noteHashes);
|
|
104
|
+
|
|
105
|
+
// The read witnesses for a given TX should be generated before the writes of the same TX are applied.
|
|
106
|
+
// All reads that refer to writes in the same tx are transient and can be simplified out.
|
|
107
|
+
const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
|
|
108
|
+
|
|
109
|
+
// Update the nullifier tree, capturing the low nullifier info for each individual operation
|
|
110
|
+
const {
|
|
111
|
+
lowLeavesWitnessData: nullifierWitnessLeaves,
|
|
112
|
+
newSubtreeSiblingPath: nullifiersSubtreeSiblingPath,
|
|
113
|
+
sortedNewLeaves: sortednullifiers,
|
|
114
|
+
sortedNewLeavesIndexes,
|
|
115
|
+
} = await db.batchInsert(
|
|
116
|
+
MerkleTreeId.NULLIFIER_TREE,
|
|
117
|
+
padArrayEnd(tx.txEffect.nullifiers, Fr.ZERO, MAX_NULLIFIERS_PER_TX).map(n => n.toBuffer()),
|
|
118
|
+
NULLIFIER_SUBTREE_HEIGHT,
|
|
184
119
|
);
|
|
185
120
|
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
startSpongeBlob: inputSpongeBlob,
|
|
189
|
-
stateDiffHints,
|
|
190
|
-
archiveRootMembershipWitness,
|
|
191
|
-
constants,
|
|
192
|
-
});
|
|
193
|
-
} else {
|
|
194
|
-
if (
|
|
195
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses.length > 1 ||
|
|
196
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages.length > 1 ||
|
|
197
|
-
txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths.length > 1
|
|
198
|
-
) {
|
|
199
|
-
throw new Error(`More than one public data write in a private only tx`);
|
|
121
|
+
if (nullifierWitnessLeaves === undefined) {
|
|
122
|
+
throw new Error(`Could not craft nullifier batch insertion proofs`);
|
|
200
123
|
}
|
|
201
124
|
|
|
202
|
-
//
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
: await getPublicDataHint(db, leafSlot.toBigInt());
|
|
208
|
-
|
|
209
|
-
const feeWriteLowLeafPreimage =
|
|
210
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0] || PublicDataTreeLeafPreimage.empty();
|
|
211
|
-
const feeWriteLowLeafMembershipWitness =
|
|
212
|
-
txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0] ||
|
|
213
|
-
MembershipWitness.empty<typeof PUBLIC_DATA_TREE_HEIGHT>(PUBLIC_DATA_TREE_HEIGHT);
|
|
214
|
-
const feeWriteSiblingPath =
|
|
215
|
-
txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths[0] ||
|
|
216
|
-
makeTuple(PUBLIC_DATA_TREE_HEIGHT, () => Fr.ZERO);
|
|
217
|
-
|
|
218
|
-
const stateDiffHints = PrivateBaseStateDiffHints.from({
|
|
219
|
-
nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
220
|
-
i < nullifierWitnessLeaves.length
|
|
221
|
-
? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage)
|
|
222
|
-
: NullifierLeafPreimage.empty(),
|
|
223
|
-
),
|
|
224
|
-
nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
225
|
-
i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
|
|
226
|
-
? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
|
|
227
|
-
: makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
|
|
228
|
-
),
|
|
229
|
-
sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortednullifiers[i])),
|
|
230
|
-
sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]),
|
|
231
|
-
noteHashSubtreeSiblingPath,
|
|
232
|
-
nullifierSubtreeSiblingPath,
|
|
233
|
-
feeWriteLowLeafPreimage,
|
|
234
|
-
feeWriteLowLeafMembershipWitness,
|
|
235
|
-
feeWriteSiblingPath,
|
|
236
|
-
});
|
|
125
|
+
// Extract witness objects from returned data
|
|
126
|
+
const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness<typeof NULLIFIER_TREE_HEIGHT>[] =
|
|
127
|
+
nullifierWitnessLeaves.map(l =>
|
|
128
|
+
MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)),
|
|
129
|
+
);
|
|
237
130
|
|
|
238
|
-
const
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
ARCHIVE_HEIGHT,
|
|
243
|
-
db,
|
|
131
|
+
const nullifierSubtreeSiblingPathArray = nullifiersSubtreeSiblingPath.toFields();
|
|
132
|
+
|
|
133
|
+
const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, i =>
|
|
134
|
+
i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO,
|
|
244
135
|
);
|
|
245
136
|
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
137
|
+
// Append new data to startSpongeBlob
|
|
138
|
+
const inputSpongeBlob = startSpongeBlob.clone();
|
|
139
|
+
startSpongeBlob.absorb(tx.txEffect.toBlobFields());
|
|
140
|
+
|
|
141
|
+
if (tx.avmProvingRequest) {
|
|
142
|
+
// Build public base rollup hints
|
|
143
|
+
const stateDiffHints = PublicBaseStateDiffHints.from({
|
|
144
|
+
nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
145
|
+
i < nullifierWitnessLeaves.length
|
|
146
|
+
? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage)
|
|
147
|
+
: NullifierLeafPreimage.empty(),
|
|
148
|
+
),
|
|
149
|
+
nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
150
|
+
i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
|
|
151
|
+
? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
|
|
152
|
+
: makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
|
|
153
|
+
),
|
|
154
|
+
sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortednullifiers[i])),
|
|
155
|
+
sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]),
|
|
156
|
+
noteHashSubtreeSiblingPath,
|
|
157
|
+
nullifierSubtreeSiblingPath,
|
|
158
|
+
lowPublicDataWritesPreimages: padArrayEnd(
|
|
159
|
+
txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages,
|
|
160
|
+
PublicDataTreeLeafPreimage.empty(),
|
|
161
|
+
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
162
|
+
),
|
|
163
|
+
lowPublicDataWritesMembershipWitnesses: padArrayEnd(
|
|
164
|
+
txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses,
|
|
165
|
+
MembershipWitness.empty(PUBLIC_DATA_TREE_HEIGHT),
|
|
166
|
+
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
167
|
+
),
|
|
168
|
+
publicDataTreeSiblingPaths: padArrayEnd(
|
|
169
|
+
txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths,
|
|
170
|
+
makeTuple(PUBLIC_DATA_TREE_HEIGHT, () => Fr.ZERO),
|
|
171
|
+
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
172
|
+
),
|
|
173
|
+
});
|
|
174
|
+
|
|
175
|
+
const blockHash = tx.constants.historicalHeader.hash();
|
|
176
|
+
const archiveRootMembershipWitness = await getMembershipWitnessFor(
|
|
177
|
+
blockHash,
|
|
178
|
+
MerkleTreeId.ARCHIVE,
|
|
179
|
+
ARCHIVE_HEIGHT,
|
|
180
|
+
db,
|
|
181
|
+
);
|
|
182
|
+
|
|
183
|
+
return PublicBaseRollupHints.from({
|
|
184
|
+
start,
|
|
185
|
+
startSpongeBlob: inputSpongeBlob,
|
|
186
|
+
stateDiffHints,
|
|
187
|
+
archiveRootMembershipWitness,
|
|
188
|
+
constants,
|
|
189
|
+
});
|
|
190
|
+
} else {
|
|
191
|
+
if (
|
|
192
|
+
txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses.length > 1 ||
|
|
193
|
+
txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages.length > 1 ||
|
|
194
|
+
txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths.length > 1
|
|
195
|
+
) {
|
|
196
|
+
throw new Error(`More than one public data write in a private only tx`);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Create data hint for reading fee payer initial balance in Fee Juice
|
|
200
|
+
// If no fee payer is set, read hint should be empty
|
|
201
|
+
const leafSlot = computeFeePayerBalanceLeafSlot(tx.data.feePayer);
|
|
202
|
+
const feePayerFeeJuiceBalanceReadHint = tx.data.feePayer.isZero()
|
|
203
|
+
? PublicDataHint.empty()
|
|
204
|
+
: await getPublicDataHint(db, leafSlot.toBigInt());
|
|
205
|
+
|
|
206
|
+
const feeWriteLowLeafPreimage =
|
|
207
|
+
txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages[0] || PublicDataTreeLeafPreimage.empty();
|
|
208
|
+
const feeWriteLowLeafMembershipWitness =
|
|
209
|
+
txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses[0] ||
|
|
210
|
+
MembershipWitness.empty<typeof PUBLIC_DATA_TREE_HEIGHT>(PUBLIC_DATA_TREE_HEIGHT);
|
|
211
|
+
const feeWriteSiblingPath =
|
|
212
|
+
txPublicDataUpdateRequestInfo.publicDataWritesSiblingPaths[0] ||
|
|
213
|
+
makeTuple(PUBLIC_DATA_TREE_HEIGHT, () => Fr.ZERO);
|
|
214
|
+
|
|
215
|
+
const stateDiffHints = PrivateBaseStateDiffHints.from({
|
|
216
|
+
nullifierPredecessorPreimages: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
217
|
+
i < nullifierWitnessLeaves.length
|
|
218
|
+
? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage)
|
|
219
|
+
: NullifierLeafPreimage.empty(),
|
|
220
|
+
),
|
|
221
|
+
nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NULLIFIERS_PER_TX, i =>
|
|
222
|
+
i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
|
|
223
|
+
? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
|
|
224
|
+
: makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
|
|
225
|
+
),
|
|
226
|
+
sortedNullifiers: makeTuple(MAX_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortednullifiers[i])),
|
|
227
|
+
sortedNullifierIndexes: makeTuple(MAX_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]),
|
|
228
|
+
noteHashSubtreeSiblingPath,
|
|
229
|
+
nullifierSubtreeSiblingPath,
|
|
230
|
+
feeWriteLowLeafPreimage,
|
|
231
|
+
feeWriteLowLeafMembershipWitness,
|
|
232
|
+
feeWriteSiblingPath,
|
|
233
|
+
});
|
|
234
|
+
|
|
235
|
+
const blockHash = tx.constants.historicalHeader.hash();
|
|
236
|
+
const archiveRootMembershipWitness = await getMembershipWitnessFor(
|
|
237
|
+
blockHash,
|
|
238
|
+
MerkleTreeId.ARCHIVE,
|
|
239
|
+
ARCHIVE_HEIGHT,
|
|
240
|
+
db,
|
|
241
|
+
);
|
|
242
|
+
|
|
243
|
+
return PrivateBaseRollupHints.from({
|
|
244
|
+
start,
|
|
245
|
+
startSpongeBlob: inputSpongeBlob,
|
|
246
|
+
stateDiffHints,
|
|
247
|
+
feePayerFeeJuiceBalanceReadHint: feePayerFeeJuiceBalanceReadHint,
|
|
248
|
+
archiveRootMembershipWitness,
|
|
249
|
+
constants,
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
},
|
|
253
|
+
);
|
|
256
254
|
|
|
257
|
-
async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot: bigint) {
|
|
255
|
+
export async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot: bigint) {
|
|
258
256
|
const { index } = (await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot)) ?? {};
|
|
259
257
|
if (index === undefined) {
|
|
260
258
|
throw new Error(`Cannot find the previous value index for public data ${leafSlot}.`);
|
|
@@ -274,125 +272,126 @@ async function getPublicDataHint(db: MerkleTreeWriteOperations, leafSlot: bigint
|
|
|
274
272
|
return new PublicDataHint(new Fr(leafSlot), value, membershipWitness, leafPreimage);
|
|
275
273
|
}
|
|
276
274
|
|
|
277
|
-
export
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
BlockRootOrBlockMergePublicInputs,
|
|
304
|
-
RecursiveProof<typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>,
|
|
305
|
-
VerificationKeyAsFields,
|
|
306
|
-
],
|
|
307
|
-
) {
|
|
308
|
-
const mergeInputs = new BlockMergeRollupInputs([
|
|
309
|
-
getPreviousRollupBlockDataFromPublicInputs(left[0], left[1], left[2]),
|
|
310
|
-
getPreviousRollupBlockDataFromPublicInputs(right[0], right[1], right[2]),
|
|
311
|
-
]);
|
|
312
|
-
return mergeInputs;
|
|
313
|
-
}
|
|
275
|
+
export const buildBlobHints = runInSpan(
|
|
276
|
+
'BlockBuilderHelpers',
|
|
277
|
+
'buildBlobHints',
|
|
278
|
+
(_span: Span, txEffects: TxEffect[]) => {
|
|
279
|
+
const blobFields = txEffects.flatMap(tx => tx.toBlobFields());
|
|
280
|
+
const blobs = Blob.getBlobs(blobFields);
|
|
281
|
+
const blobCommitments = blobs.map(b => b.commitmentToFields());
|
|
282
|
+
const blobsHash = new Fr(getBlobsHashFromBlobs(blobs));
|
|
283
|
+
return { blobFields, blobCommitments, blobs, blobsHash };
|
|
284
|
+
},
|
|
285
|
+
);
|
|
286
|
+
|
|
287
|
+
export const buildHeaderFromCircuitOutputs = runInSpan(
|
|
288
|
+
'BlockBuilderHelpers',
|
|
289
|
+
'buildHeaderFromCircuitOutputs',
|
|
290
|
+
(
|
|
291
|
+
_span,
|
|
292
|
+
previousRollupData: BaseOrMergeRollupPublicInputs[],
|
|
293
|
+
parityPublicInputs: ParityPublicInputs,
|
|
294
|
+
rootRollupOutputs: BlockRootOrBlockMergePublicInputs,
|
|
295
|
+
endState: StateReference,
|
|
296
|
+
logger?: Logger,
|
|
297
|
+
) => {
|
|
298
|
+
if (previousRollupData.length > 2) {
|
|
299
|
+
throw new Error(`There can't be more than 2 previous rollups. Received ${previousRollupData.length}.`);
|
|
300
|
+
}
|
|
314
301
|
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
)
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
const header = new BlockHeader(
|
|
331
|
-
rootRollupOutputs.previousArchive,
|
|
332
|
-
contentCommitment,
|
|
333
|
-
state,
|
|
334
|
-
previousMergeData[0].constants.globalVariables,
|
|
335
|
-
previousMergeData[0].accumulatedFees.add(previousMergeData[1].accumulatedFees),
|
|
336
|
-
previousMergeData[0].accumulatedManaUsed.add(previousMergeData[1].accumulatedManaUsed),
|
|
337
|
-
);
|
|
338
|
-
if (!header.hash().equals(rootRollupOutputs.endBlockHash)) {
|
|
339
|
-
logger?.error(
|
|
340
|
-
`Block header mismatch when building header from circuit outputs.` +
|
|
341
|
-
`\n\nHeader: ${inspect(header)}` +
|
|
342
|
-
`\n\nCircuit: ${toFriendlyJSON(rootRollupOutputs)}`,
|
|
302
|
+
const blobsHash = rootRollupOutputs.blobPublicInputs[0].getBlobsHash();
|
|
303
|
+
const numTxs = previousRollupData.reduce((sum, d) => sum + d.numTxs, 0);
|
|
304
|
+
const outHash =
|
|
305
|
+
previousRollupData.length === 0
|
|
306
|
+
? Fr.ZERO.toBuffer()
|
|
307
|
+
: previousRollupData.length === 1
|
|
308
|
+
? previousRollupData[0].outHash.toBuffer()
|
|
309
|
+
: sha256Trunc(
|
|
310
|
+
Buffer.concat([previousRollupData[0].outHash.toBuffer(), previousRollupData[1].outHash.toBuffer()]),
|
|
311
|
+
);
|
|
312
|
+
const contentCommitment = new ContentCommitment(
|
|
313
|
+
new Fr(numTxs),
|
|
314
|
+
blobsHash,
|
|
315
|
+
parityPublicInputs.shaRoot.toBuffer(),
|
|
316
|
+
outHash,
|
|
343
317
|
);
|
|
344
|
-
throw new Error(`Block header mismatch when building from circuit outputs`);
|
|
345
|
-
}
|
|
346
|
-
return header;
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
export async function buildHeaderAndBodyFromTxs(
|
|
350
|
-
txs: ProcessedTx[],
|
|
351
|
-
globalVariables: GlobalVariables,
|
|
352
|
-
l1ToL2Messages: Fr[],
|
|
353
|
-
db: MerkleTreeReadOperations,
|
|
354
|
-
) {
|
|
355
|
-
const stateReference = new StateReference(
|
|
356
|
-
await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
|
|
357
|
-
new PartialStateReference(
|
|
358
|
-
await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
|
|
359
|
-
await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
|
|
360
|
-
await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
|
|
361
|
-
),
|
|
362
|
-
);
|
|
363
|
-
|
|
364
|
-
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
365
318
|
|
|
366
|
-
|
|
367
|
-
|
|
319
|
+
const accumulatedFees = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedFees), Fr.ZERO);
|
|
320
|
+
const accumulatedManaUsed = previousRollupData.reduce((sum, d) => sum.add(d.accumulatedManaUsed), Fr.ZERO);
|
|
321
|
+
const header = new BlockHeader(
|
|
322
|
+
rootRollupOutputs.previousArchive,
|
|
323
|
+
contentCommitment,
|
|
324
|
+
endState,
|
|
325
|
+
rootRollupOutputs.endGlobalVariables,
|
|
326
|
+
accumulatedFees,
|
|
327
|
+
accumulatedManaUsed,
|
|
328
|
+
);
|
|
329
|
+
if (!header.hash().equals(rootRollupOutputs.endBlockHash)) {
|
|
330
|
+
logger?.error(
|
|
331
|
+
`Block header mismatch when building header from circuit outputs.` +
|
|
332
|
+
`\n\nHeader: ${inspect(header)}` +
|
|
333
|
+
`\n\nCircuit: ${toFriendlyJSON(rootRollupOutputs)}`,
|
|
334
|
+
);
|
|
335
|
+
throw new Error(`Block header mismatch when building from circuit outputs`);
|
|
336
|
+
}
|
|
337
|
+
return header;
|
|
338
|
+
},
|
|
339
|
+
);
|
|
340
|
+
|
|
341
|
+
export const buildHeaderAndBodyFromTxs = runInSpan(
|
|
342
|
+
'BlockBuilderHelpers',
|
|
343
|
+
'buildHeaderAndBodyFromTxs',
|
|
344
|
+
async (
|
|
345
|
+
span,
|
|
346
|
+
txs: ProcessedTx[],
|
|
347
|
+
globalVariables: GlobalVariables,
|
|
348
|
+
l1ToL2Messages: Fr[],
|
|
349
|
+
db: MerkleTreeReadOperations,
|
|
350
|
+
) => {
|
|
351
|
+
span.setAttribute(Attributes.BLOCK_NUMBER, globalVariables.blockNumber.toNumber());
|
|
352
|
+
const stateReference = new StateReference(
|
|
353
|
+
await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
|
|
354
|
+
new PartialStateReference(
|
|
355
|
+
await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
|
|
356
|
+
await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
|
|
357
|
+
await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
|
|
358
|
+
),
|
|
359
|
+
);
|
|
368
360
|
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
361
|
+
const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
362
|
+
|
|
363
|
+
const txEffects = txs.map(tx => tx.txEffect);
|
|
364
|
+
const body = new Body(txEffects);
|
|
365
|
+
|
|
366
|
+
const numTxs = body.txEffects.length;
|
|
367
|
+
const outHash =
|
|
368
|
+
numTxs === 0
|
|
369
|
+
? Fr.ZERO.toBuffer()
|
|
370
|
+
: numTxs === 1
|
|
371
|
+
? body.txEffects[0].txOutHash()
|
|
372
|
+
: computeUnbalancedMerkleRoot(
|
|
373
|
+
body.txEffects.map(tx => tx.txOutHash()),
|
|
374
|
+
TxEffect.empty().txOutHash(),
|
|
375
|
+
);
|
|
376
|
+
|
|
377
|
+
l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
378
|
+
const hasher = (left: Buffer, right: Buffer) => sha256Trunc(Buffer.concat([left, right]));
|
|
379
|
+
const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
|
|
380
|
+
const parityShaRoot = new MerkleTreeCalculator(parityHeight, Fr.ZERO.toBuffer(), hasher).computeTreeRoot(
|
|
381
|
+
l1ToL2Messages.map(msg => msg.toBuffer()),
|
|
382
|
+
);
|
|
383
|
+
const blobsHash = getBlobsHashFromBlobs(Blob.getBlobs(body.toBlobFields()));
|
|
373
384
|
|
|
374
|
-
|
|
375
|
-
const hasher = (left: Buffer, right: Buffer) => sha256Trunc(Buffer.concat([left, right]));
|
|
376
|
-
const parityHeight = Math.ceil(Math.log2(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
|
|
377
|
-
const parityShaRoot = new MerkleTreeCalculator(parityHeight, Fr.ZERO.toBuffer(), hasher).computeTreeRoot(
|
|
378
|
-
l1ToL2Messages.map(msg => msg.toBuffer()),
|
|
379
|
-
);
|
|
380
|
-
const blobsHash = getBlobsHashFromBlobs(Blob.getBlobs(body.toBlobFields()));
|
|
385
|
+
const contentCommitment = new ContentCommitment(new Fr(numTxs), blobsHash, parityShaRoot, outHash);
|
|
381
386
|
|
|
382
|
-
|
|
383
|
-
new Fr(
|
|
384
|
-
blobsHash,
|
|
385
|
-
parityShaRoot,
|
|
386
|
-
outHash,
|
|
387
|
-
);
|
|
387
|
+
const fees = body.txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO);
|
|
388
|
+
const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.totalGas.l2Gas)), Fr.ZERO);
|
|
388
389
|
|
|
389
|
-
|
|
390
|
-
const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.totalGas.l2Gas)), Fr.ZERO);
|
|
390
|
+
const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed);
|
|
391
391
|
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
}
|
|
392
|
+
return { header, body };
|
|
393
|
+
},
|
|
394
|
+
);
|
|
396
395
|
|
|
397
396
|
export function getBlobsHashFromBlobs(inputs: Blob[]): Buffer {
|
|
398
397
|
const blobHashes = serializeToBuffer(inputs.map(b => b.getEthVersionedBlobHash()));
|
|
@@ -411,22 +410,26 @@ export async function validateBlockRootOutput(
|
|
|
411
410
|
]);
|
|
412
411
|
}
|
|
413
412
|
|
|
414
|
-
export
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
state.
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
413
|
+
export const validateState = runInSpan(
|
|
414
|
+
'BlockBuilderHelpers',
|
|
415
|
+
'validateState',
|
|
416
|
+
async (_span, state: StateReference, db: MerkleTreeReadOperations) => {
|
|
417
|
+
const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map(
|
|
418
|
+
async (id: MerkleTreeId) => {
|
|
419
|
+
return { key: id, value: await getTreeSnapshot(id, db) };
|
|
420
|
+
},
|
|
421
|
+
);
|
|
422
|
+
const snapshots: Map<MerkleTreeId, AppendOnlyTreeSnapshot> = new Map(
|
|
423
|
+
(await Promise.all(promises)).map(obj => [obj.key, obj.value]),
|
|
424
|
+
);
|
|
425
|
+
validatePartialState(state.partial, snapshots);
|
|
426
|
+
validateSimulatedTree(
|
|
427
|
+
await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
|
|
428
|
+
state.l1ToL2MessageTree,
|
|
429
|
+
'L1ToL2MessageTree',
|
|
430
|
+
);
|
|
431
|
+
},
|
|
432
|
+
);
|
|
430
433
|
|
|
431
434
|
export async function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: TID, db: MerkleTreeReadOperations) {
|
|
432
435
|
const { size } = await db.getTreeInfo(treeId);
|
|
@@ -434,68 +437,18 @@ export async function getRootTreeSiblingPath<TID extends MerkleTreeId>(treeId: T
|
|
|
434
437
|
return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId));
|
|
435
438
|
}
|
|
436
439
|
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
)
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
getPreviousRollupBlockDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight),
|
|
450
|
-
];
|
|
451
|
-
|
|
452
|
-
return RootRollupInputs.from({
|
|
453
|
-
previousRollupData,
|
|
454
|
-
proverId,
|
|
455
|
-
});
|
|
456
|
-
}
|
|
457
|
-
|
|
458
|
-
export function getPreviousRollupDataFromPublicInputs(
|
|
459
|
-
rollupOutput: BaseOrMergeRollupPublicInputs,
|
|
460
|
-
rollupProof: RecursiveProof<typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>,
|
|
461
|
-
vk: VerificationKeyAsFields,
|
|
462
|
-
) {
|
|
463
|
-
const leafIndex = getVKIndex(vk);
|
|
464
|
-
|
|
465
|
-
return new PreviousRollupData(
|
|
466
|
-
rollupOutput,
|
|
467
|
-
rollupProof,
|
|
468
|
-
vk,
|
|
469
|
-
new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)),
|
|
470
|
-
);
|
|
471
|
-
}
|
|
472
|
-
|
|
473
|
-
export function getPreviousRollupBlockDataFromPublicInputs(
|
|
474
|
-
rollupOutput: BlockRootOrBlockMergePublicInputs,
|
|
475
|
-
rollupProof: RecursiveProof<typeof NESTED_RECURSIVE_ROLLUP_HONK_PROOF_LENGTH>,
|
|
476
|
-
vk: VerificationKeyAsFields,
|
|
477
|
-
) {
|
|
478
|
-
const leafIndex = getVKIndex(vk);
|
|
479
|
-
|
|
480
|
-
return new PreviousRollupBlockData(
|
|
481
|
-
rollupOutput,
|
|
482
|
-
rollupProof,
|
|
483
|
-
vk,
|
|
484
|
-
new MembershipWitness(VK_TREE_HEIGHT, BigInt(leafIndex), getVKSiblingPath(leafIndex)),
|
|
485
|
-
);
|
|
486
|
-
}
|
|
487
|
-
|
|
488
|
-
export async function getConstantRollupData(
|
|
489
|
-
globalVariables: GlobalVariables,
|
|
490
|
-
db: MerkleTreeReadOperations,
|
|
491
|
-
): Promise<ConstantRollupData> {
|
|
492
|
-
return ConstantRollupData.from({
|
|
493
|
-
vkTreeRoot: getVKTreeRoot(),
|
|
494
|
-
protocolContractTreeRoot,
|
|
495
|
-
lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
|
|
496
|
-
globalVariables,
|
|
497
|
-
});
|
|
498
|
-
}
|
|
440
|
+
export const getConstantRollupData = runInSpan(
|
|
441
|
+
'BlockBuilderHelpers',
|
|
442
|
+
'getConstantRollupData',
|
|
443
|
+
async (_span, globalVariables: GlobalVariables, db: MerkleTreeReadOperations): Promise<ConstantRollupData> => {
|
|
444
|
+
return ConstantRollupData.from({
|
|
445
|
+
vkTreeRoot: getVKTreeRoot(),
|
|
446
|
+
protocolContractTreeRoot,
|
|
447
|
+
lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
|
|
448
|
+
globalVariables,
|
|
449
|
+
});
|
|
450
|
+
},
|
|
451
|
+
);
|
|
499
452
|
|
|
500
453
|
export async function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeReadOperations): Promise<AppendOnlyTreeSnapshot> {
|
|
501
454
|
const treeInfo = await db.getTreeInfo(id);
|
|
@@ -510,42 +463,47 @@ export function makeEmptyMembershipWitness<N extends number>(height: N) {
|
|
|
510
463
|
);
|
|
511
464
|
}
|
|
512
465
|
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
)
|
|
466
|
+
const processPublicDataUpdateRequests = runInSpan(
|
|
467
|
+
'BlockBuilderHelpers',
|
|
468
|
+
'processPublicDataUpdateRequests',
|
|
469
|
+
async (span, tx: ProcessedTx, db: MerkleTreeWriteOperations) => {
|
|
470
|
+
span.setAttribute(Attributes.TX_HASH, tx.hash.toString());
|
|
471
|
+
const allPublicDataWrites = tx.txEffect.publicDataWrites.map(
|
|
472
|
+
({ leafSlot, value }) => new PublicDataTreeLeaf(leafSlot, value),
|
|
473
|
+
);
|
|
517
474
|
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
475
|
+
const { lowLeavesWitnessData, insertionWitnessData } = await db.sequentialInsert(
|
|
476
|
+
MerkleTreeId.PUBLIC_DATA_TREE,
|
|
477
|
+
allPublicDataWrites.map(write => {
|
|
478
|
+
if (write.isEmpty()) {
|
|
479
|
+
throw new Error(`Empty public data write in tx: ${toFriendlyJSON(tx)}`);
|
|
480
|
+
}
|
|
481
|
+
return write.toBuffer();
|
|
482
|
+
}),
|
|
483
|
+
);
|
|
527
484
|
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
}
|
|
485
|
+
const lowPublicDataWritesPreimages = lowLeavesWitnessData.map(
|
|
486
|
+
lowLeafWitness => lowLeafWitness.leafPreimage as PublicDataTreeLeafPreimage,
|
|
487
|
+
);
|
|
488
|
+
const lowPublicDataWritesMembershipWitnesses = lowLeavesWitnessData.map(lowLeafWitness =>
|
|
489
|
+
MembershipWitness.fromBufferArray<typeof PUBLIC_DATA_TREE_HEIGHT>(
|
|
490
|
+
lowLeafWitness.index,
|
|
491
|
+
assertLength(lowLeafWitness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
|
|
492
|
+
),
|
|
493
|
+
);
|
|
494
|
+
const publicDataWritesSiblingPaths = insertionWitnessData.map(w => {
|
|
495
|
+
const insertionSiblingPath = w.siblingPath.toFields();
|
|
496
|
+
assertLength(insertionSiblingPath, PUBLIC_DATA_TREE_HEIGHT);
|
|
497
|
+
return insertionSiblingPath as Tuple<Fr, typeof PUBLIC_DATA_TREE_HEIGHT>;
|
|
498
|
+
});
|
|
499
|
+
|
|
500
|
+
return {
|
|
501
|
+
lowPublicDataWritesPreimages,
|
|
502
|
+
lowPublicDataWritesMembershipWitnesses,
|
|
503
|
+
publicDataWritesSiblingPaths,
|
|
504
|
+
};
|
|
505
|
+
},
|
|
506
|
+
);
|
|
549
507
|
|
|
550
508
|
export async function getSubtreeSiblingPath(
|
|
551
509
|
treeId: MerkleTreeId,
|