@aztec/prover-client 0.42.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dest/config.d.ts +21 -0
- package/dest/config.d.ts.map +1 -0
- package/dest/config.js +31 -0
- package/dest/index.d.ts +4 -0
- package/dest/index.d.ts.map +1 -0
- package/dest/index.js +3 -0
- package/dest/mocks/fixtures.d.ts +22 -0
- package/dest/mocks/fixtures.d.ts.map +1 -0
- package/dest/mocks/fixtures.js +95 -0
- package/dest/mocks/test_context.d.ts +32 -0
- package/dest/mocks/test_context.d.ts.map +1 -0
- package/dest/mocks/test_context.js +116 -0
- package/dest/orchestrator/block-building-helpers.d.ts +36 -0
- package/dest/orchestrator/block-building-helpers.d.ts.map +1 -0
- package/dest/orchestrator/block-building-helpers.js +236 -0
- package/dest/orchestrator/orchestrator.d.ts +113 -0
- package/dest/orchestrator/orchestrator.d.ts.map +1 -0
- package/dest/orchestrator/orchestrator.js +574 -0
- package/dest/orchestrator/proving-state.d.ts +68 -0
- package/dest/orchestrator/proving-state.d.ts.map +1 -0
- package/dest/orchestrator/proving-state.js +142 -0
- package/dest/orchestrator/tx-proving-state.d.ts +35 -0
- package/dest/orchestrator/tx-proving-state.d.ts.map +1 -0
- package/dest/orchestrator/tx-proving-state.js +92 -0
- package/dest/prover-agent/index.d.ts +4 -0
- package/dest/prover-agent/index.d.ts.map +1 -0
- package/dest/prover-agent/index.js +4 -0
- package/dest/prover-agent/memory-proving-queue.d.ts +64 -0
- package/dest/prover-agent/memory-proving-queue.d.ts.map +1 -0
- package/dest/prover-agent/memory-proving-queue.js +187 -0
- package/dest/prover-agent/prover-agent.d.ts +30 -0
- package/dest/prover-agent/prover-agent.d.ts.map +1 -0
- package/dest/prover-agent/prover-agent.js +115 -0
- package/dest/prover-agent/proving-error.d.ts +5 -0
- package/dest/prover-agent/proving-error.d.ts.map +1 -0
- package/dest/prover-agent/proving-error.js +9 -0
- package/dest/prover-agent/rpc.d.ts +5 -0
- package/dest/prover-agent/rpc.d.ts.map +1 -0
- package/dest/prover-agent/rpc.js +53 -0
- package/dest/tx-prover/tx-prover.d.ts +65 -0
- package/dest/tx-prover/tx-prover.d.ts.map +1 -0
- package/dest/tx-prover/tx-prover.js +122 -0
- package/package.json +87 -0
- package/src/config.ts +59 -0
- package/src/index.ts +4 -0
- package/src/mocks/fixtures.ts +182 -0
- package/src/mocks/test_context.ts +217 -0
- package/src/orchestrator/block-building-helpers.ts +470 -0
- package/src/orchestrator/orchestrator.ts +883 -0
- package/src/orchestrator/proving-state.ts +210 -0
- package/src/orchestrator/tx-proving-state.ts +139 -0
- package/src/prover-agent/index.ts +3 -0
- package/src/prover-agent/memory-proving-queue.ts +303 -0
- package/src/prover-agent/prover-agent.ts +144 -0
- package/src/prover-agent/proving-error.ts +9 -0
- package/src/prover-agent/rpc.ts +91 -0
- package/src/tx-prover/tx-prover.ts +171 -0
|
@@ -0,0 +1,470 @@
|
|
|
1
|
+
import { MerkleTreeId, type ProcessedTx } from '@aztec/circuit-types';
|
|
2
|
+
import {
|
|
3
|
+
ARCHIVE_HEIGHT,
|
|
4
|
+
AppendOnlyTreeSnapshot,
|
|
5
|
+
type BaseOrMergeRollupPublicInputs,
|
|
6
|
+
BaseRollupInputs,
|
|
7
|
+
ConstantRollupData,
|
|
8
|
+
Fr,
|
|
9
|
+
type GlobalVariables,
|
|
10
|
+
KernelData,
|
|
11
|
+
type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH,
|
|
12
|
+
MAX_NEW_NULLIFIERS_PER_TX,
|
|
13
|
+
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
14
|
+
MembershipWitness,
|
|
15
|
+
MergeRollupInputs,
|
|
16
|
+
NESTED_RECURSIVE_PROOF_LENGTH,
|
|
17
|
+
NOTE_HASH_SUBTREE_HEIGHT,
|
|
18
|
+
NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH,
|
|
19
|
+
NULLIFIER_SUBTREE_HEIGHT,
|
|
20
|
+
NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH,
|
|
21
|
+
NULLIFIER_TREE_HEIGHT,
|
|
22
|
+
type NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
|
|
23
|
+
NullifierLeafPreimage,
|
|
24
|
+
PUBLIC_DATA_SUBTREE_HEIGHT,
|
|
25
|
+
PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH,
|
|
26
|
+
PUBLIC_DATA_TREE_HEIGHT,
|
|
27
|
+
PartialStateReference,
|
|
28
|
+
PreviousRollupData,
|
|
29
|
+
PublicDataHint,
|
|
30
|
+
PublicDataTreeLeaf,
|
|
31
|
+
type PublicDataTreeLeafPreimage,
|
|
32
|
+
PublicDataUpdateRequest,
|
|
33
|
+
ROLLUP_VK_TREE_HEIGHT,
|
|
34
|
+
type RecursiveProof,
|
|
35
|
+
type RootParityInput,
|
|
36
|
+
RootRollupInputs,
|
|
37
|
+
type RootRollupPublicInputs,
|
|
38
|
+
StateDiffHints,
|
|
39
|
+
type StateReference,
|
|
40
|
+
VK_TREE_HEIGHT,
|
|
41
|
+
type VerificationKeyAsFields,
|
|
42
|
+
type VerificationKeyData,
|
|
43
|
+
makeRecursiveProofFromBinary,
|
|
44
|
+
} from '@aztec/circuits.js';
|
|
45
|
+
import { assertPermutation, makeTuple } from '@aztec/foundation/array';
|
|
46
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
47
|
+
import { type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize';
|
|
48
|
+
import { HintsBuilder, computeFeePayerBalanceLeafSlot } from '@aztec/simulator';
|
|
49
|
+
import { type MerkleTreeOperations } from '@aztec/world-state';
|
|
50
|
+
|
|
51
|
+
// Denotes fields that are not used now, but will be in the future
|
|
52
|
+
const FUTURE_FR = new Fr(0n);
|
|
53
|
+
const FUTURE_NUM = 0;
|
|
54
|
+
|
|
55
|
+
// Denotes fields that should be deleted
|
|
56
|
+
const DELETE_FR = new Fr(0n);
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Type representing the names of the trees for the base rollup.
|
|
60
|
+
*/
|
|
61
|
+
type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree' | 'PublicDataTree';
|
|
62
|
+
/**
|
|
63
|
+
* Type representing the names of the trees.
|
|
64
|
+
*/
|
|
65
|
+
export type TreeNames = BaseTreeNames | 'L1ToL2MessageTree' | 'Archive';
|
|
66
|
+
|
|
67
|
+
// Builds the base rollup inputs, updating the contract, nullifier, and data trees in the process
|
|
68
|
+
export async function buildBaseRollupInput(
|
|
69
|
+
tx: ProcessedTx,
|
|
70
|
+
globalVariables: GlobalVariables,
|
|
71
|
+
db: MerkleTreeOperations,
|
|
72
|
+
kernelVk: VerificationKeyData,
|
|
73
|
+
) {
|
|
74
|
+
// Get trees info before any changes hit
|
|
75
|
+
const constants = await getConstantRollupData(globalVariables, db);
|
|
76
|
+
const start = new PartialStateReference(
|
|
77
|
+
await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db),
|
|
78
|
+
await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db),
|
|
79
|
+
await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db),
|
|
80
|
+
);
|
|
81
|
+
// Get the subtree sibling paths for the circuit
|
|
82
|
+
const noteHashSubtreeSiblingPathArray = await getSubtreeSiblingPath(
|
|
83
|
+
MerkleTreeId.NOTE_HASH_TREE,
|
|
84
|
+
NOTE_HASH_SUBTREE_HEIGHT,
|
|
85
|
+
db,
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
const noteHashSubtreeSiblingPath = makeTuple(NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, i =>
|
|
89
|
+
i < noteHashSubtreeSiblingPathArray.length ? noteHashSubtreeSiblingPathArray[i] : Fr.ZERO,
|
|
90
|
+
);
|
|
91
|
+
|
|
92
|
+
// Create data hint for reading fee payer initial balance in gas tokens
|
|
93
|
+
// If no fee payer is set, read hint should be empty
|
|
94
|
+
// If there is already a public data write for this slot, also skip the read hint
|
|
95
|
+
const hintsBuilder = new HintsBuilder(db);
|
|
96
|
+
const leafSlot = computeFeePayerBalanceLeafSlot(tx.data.feePayer);
|
|
97
|
+
const existingBalanceWrite = tx.data.end.publicDataUpdateRequests.find(write => write.leafSlot.equals(leafSlot));
|
|
98
|
+
const feePayerGasTokenBalanceReadHint =
|
|
99
|
+
leafSlot.isZero() || existingBalanceWrite
|
|
100
|
+
? PublicDataHint.empty()
|
|
101
|
+
: await hintsBuilder.getPublicDataHint(leafSlot.toBigInt());
|
|
102
|
+
|
|
103
|
+
// Update the note hash trees with the new items being inserted to get the new roots
|
|
104
|
+
// that will be used by the next iteration of the base rollup circuit, skipping the empty ones
|
|
105
|
+
const newNoteHashes = tx.data.end.newNoteHashes;
|
|
106
|
+
await db.appendLeaves(MerkleTreeId.NOTE_HASH_TREE, newNoteHashes);
|
|
107
|
+
|
|
108
|
+
// The read witnesses for a given TX should be generated before the writes of the same TX are applied.
|
|
109
|
+
// All reads that refer to writes in the same tx are transient and can be simplified out.
|
|
110
|
+
const txPublicDataUpdateRequestInfo = await processPublicDataUpdateRequests(tx, db);
|
|
111
|
+
|
|
112
|
+
// Update the nullifier tree, capturing the low nullifier info for each individual operation
|
|
113
|
+
const {
|
|
114
|
+
lowLeavesWitnessData: nullifierWitnessLeaves,
|
|
115
|
+
newSubtreeSiblingPath: newNullifiersSubtreeSiblingPath,
|
|
116
|
+
sortedNewLeaves: sortedNewNullifiers,
|
|
117
|
+
sortedNewLeavesIndexes,
|
|
118
|
+
} = await db.batchInsert(
|
|
119
|
+
MerkleTreeId.NULLIFIER_TREE,
|
|
120
|
+
tx.data.end.newNullifiers.map(n => n.toBuffer()),
|
|
121
|
+
NULLIFIER_SUBTREE_HEIGHT,
|
|
122
|
+
);
|
|
123
|
+
if (nullifierWitnessLeaves === undefined) {
|
|
124
|
+
throw new Error(`Could not craft nullifier batch insertion proofs`);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Extract witness objects from returned data
|
|
128
|
+
const nullifierPredecessorMembershipWitnessesWithoutPadding: MembershipWitness<typeof NULLIFIER_TREE_HEIGHT>[] =
|
|
129
|
+
nullifierWitnessLeaves.map(l =>
|
|
130
|
+
MembershipWitness.fromBufferArray(l.index, assertLength(l.siblingPath.toBufferArray(), NULLIFIER_TREE_HEIGHT)),
|
|
131
|
+
);
|
|
132
|
+
|
|
133
|
+
const nullifierSubtreeSiblingPathArray = newNullifiersSubtreeSiblingPath.toFields();
|
|
134
|
+
|
|
135
|
+
const nullifierSubtreeSiblingPath = makeTuple(NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, i =>
|
|
136
|
+
i < nullifierSubtreeSiblingPathArray.length ? nullifierSubtreeSiblingPathArray[i] : Fr.ZERO,
|
|
137
|
+
);
|
|
138
|
+
|
|
139
|
+
const publicDataSiblingPath = txPublicDataUpdateRequestInfo.newPublicDataSubtreeSiblingPath;
|
|
140
|
+
|
|
141
|
+
const stateDiffHints = StateDiffHints.from({
|
|
142
|
+
nullifierPredecessorPreimages: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i =>
|
|
143
|
+
i < nullifierWitnessLeaves.length
|
|
144
|
+
? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage)
|
|
145
|
+
: NullifierLeafPreimage.empty(),
|
|
146
|
+
),
|
|
147
|
+
nullifierPredecessorMembershipWitnesses: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i =>
|
|
148
|
+
i < nullifierPredecessorMembershipWitnessesWithoutPadding.length
|
|
149
|
+
? nullifierPredecessorMembershipWitnessesWithoutPadding[i]
|
|
150
|
+
: makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT),
|
|
151
|
+
),
|
|
152
|
+
sortedNullifiers: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => Fr.fromBuffer(sortedNewNullifiers[i])),
|
|
153
|
+
sortedNullifierIndexes: makeTuple(MAX_NEW_NULLIFIERS_PER_TX, i => sortedNewLeavesIndexes[i]),
|
|
154
|
+
noteHashSubtreeSiblingPath,
|
|
155
|
+
nullifierSubtreeSiblingPath,
|
|
156
|
+
publicDataSiblingPath,
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
const blockHash = tx.data.constants.historicalHeader.hash();
|
|
160
|
+
const archiveRootMembershipWitness = await getMembershipWitnessFor(
|
|
161
|
+
blockHash,
|
|
162
|
+
MerkleTreeId.ARCHIVE,
|
|
163
|
+
ARCHIVE_HEIGHT,
|
|
164
|
+
db,
|
|
165
|
+
);
|
|
166
|
+
|
|
167
|
+
return BaseRollupInputs.from({
|
|
168
|
+
kernelData: getKernelDataFor(tx, kernelVk),
|
|
169
|
+
start,
|
|
170
|
+
stateDiffHints,
|
|
171
|
+
feePayerGasTokenBalanceReadHint,
|
|
172
|
+
sortedPublicDataWrites: txPublicDataUpdateRequestInfo.sortedPublicDataWrites,
|
|
173
|
+
sortedPublicDataWritesIndexes: txPublicDataUpdateRequestInfo.sortedPublicDataWritesIndexes,
|
|
174
|
+
lowPublicDataWritesPreimages: txPublicDataUpdateRequestInfo.lowPublicDataWritesPreimages,
|
|
175
|
+
lowPublicDataWritesMembershipWitnesses: txPublicDataUpdateRequestInfo.lowPublicDataWritesMembershipWitnesses,
|
|
176
|
+
|
|
177
|
+
archiveRootMembershipWitness,
|
|
178
|
+
|
|
179
|
+
constants,
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
export function createMergeRollupInputs(
|
|
184
|
+
left: [BaseOrMergeRollupPublicInputs, RecursiveProof<typeof NESTED_RECURSIVE_PROOF_LENGTH>, VerificationKeyAsFields],
|
|
185
|
+
right: [BaseOrMergeRollupPublicInputs, RecursiveProof<typeof NESTED_RECURSIVE_PROOF_LENGTH>, VerificationKeyAsFields],
|
|
186
|
+
) {
|
|
187
|
+
const mergeInputs = new MergeRollupInputs([
|
|
188
|
+
getPreviousRollupDataFromPublicInputs(left[0], left[1], left[2]),
|
|
189
|
+
getPreviousRollupDataFromPublicInputs(right[0], right[1], right[2]),
|
|
190
|
+
]);
|
|
191
|
+
return mergeInputs;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Validate that the roots of all local trees match the output of the root circuit simulation
|
|
195
|
+
export async function validateRootOutput(rootOutput: RootRollupPublicInputs, db: MerkleTreeOperations) {
|
|
196
|
+
await Promise.all([
|
|
197
|
+
validateState(rootOutput.header.state, db),
|
|
198
|
+
validateSimulatedTree(await getTreeSnapshot(MerkleTreeId.ARCHIVE, db), rootOutput.archive, 'Archive'),
|
|
199
|
+
]);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
export async function validateState(state: StateReference, db: MerkleTreeOperations) {
|
|
203
|
+
const promises = [MerkleTreeId.NOTE_HASH_TREE, MerkleTreeId.NULLIFIER_TREE, MerkleTreeId.PUBLIC_DATA_TREE].map(
|
|
204
|
+
async (id: MerkleTreeId) => {
|
|
205
|
+
return { key: id, value: await getTreeSnapshot(id, db) };
|
|
206
|
+
},
|
|
207
|
+
);
|
|
208
|
+
const snapshots: Map<MerkleTreeId, AppendOnlyTreeSnapshot> = new Map(
|
|
209
|
+
(await Promise.all(promises)).map(obj => [obj.key, obj.value]),
|
|
210
|
+
);
|
|
211
|
+
validatePartialState(state.partial, snapshots);
|
|
212
|
+
validateSimulatedTree(
|
|
213
|
+
await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db),
|
|
214
|
+
state.l1ToL2MessageTree,
|
|
215
|
+
'L1ToL2MessageTree',
|
|
216
|
+
);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// Builds the inputs for the root rollup circuit, without making any changes to trees
|
|
220
|
+
export async function getRootRollupInput(
|
|
221
|
+
rollupOutputLeft: BaseOrMergeRollupPublicInputs,
|
|
222
|
+
rollupProofLeft: RecursiveProof<typeof NESTED_RECURSIVE_PROOF_LENGTH>,
|
|
223
|
+
verificationKeyLeft: VerificationKeyAsFields,
|
|
224
|
+
rollupOutputRight: BaseOrMergeRollupPublicInputs,
|
|
225
|
+
rollupProofRight: RecursiveProof<typeof NESTED_RECURSIVE_PROOF_LENGTH>,
|
|
226
|
+
verificationKeyRight: VerificationKeyAsFields,
|
|
227
|
+
l1ToL2Roots: RootParityInput<typeof NESTED_RECURSIVE_PROOF_LENGTH>,
|
|
228
|
+
newL1ToL2Messages: Tuple<Fr, typeof NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP>,
|
|
229
|
+
messageTreeSnapshot: AppendOnlyTreeSnapshot,
|
|
230
|
+
messageTreeRootSiblingPath: Tuple<Fr, typeof L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH>,
|
|
231
|
+
db: MerkleTreeOperations,
|
|
232
|
+
) {
|
|
233
|
+
const previousRollupData: RootRollupInputs['previousRollupData'] = [
|
|
234
|
+
getPreviousRollupDataFromPublicInputs(rollupOutputLeft, rollupProofLeft, verificationKeyLeft),
|
|
235
|
+
getPreviousRollupDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight),
|
|
236
|
+
];
|
|
237
|
+
|
|
238
|
+
const getRootTreeSiblingPath = async (treeId: MerkleTreeId) => {
|
|
239
|
+
const { size } = await db.getTreeInfo(treeId);
|
|
240
|
+
const path = await db.getSiblingPath(treeId, size);
|
|
241
|
+
return path.toFields();
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
// Get blocks tree
|
|
245
|
+
const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db);
|
|
246
|
+
const newArchiveSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE);
|
|
247
|
+
|
|
248
|
+
const newArchiveSiblingPath = makeTuple(
|
|
249
|
+
ARCHIVE_HEIGHT,
|
|
250
|
+
i => (i < newArchiveSiblingPathArray.length ? newArchiveSiblingPathArray[i] : Fr.ZERO),
|
|
251
|
+
0,
|
|
252
|
+
);
|
|
253
|
+
|
|
254
|
+
return RootRollupInputs.from({
|
|
255
|
+
previousRollupData,
|
|
256
|
+
l1ToL2Roots,
|
|
257
|
+
newL1ToL2Messages,
|
|
258
|
+
newL1ToL2MessageTreeRootSiblingPath: messageTreeRootSiblingPath,
|
|
259
|
+
startL1ToL2MessageTreeSnapshot: messageTreeSnapshot,
|
|
260
|
+
startArchiveSnapshot,
|
|
261
|
+
newArchiveSiblingPath,
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
export function getPreviousRollupDataFromPublicInputs(
|
|
266
|
+
rollupOutput: BaseOrMergeRollupPublicInputs,
|
|
267
|
+
rollupProof: RecursiveProof<typeof NESTED_RECURSIVE_PROOF_LENGTH>,
|
|
268
|
+
vk: VerificationKeyAsFields,
|
|
269
|
+
) {
|
|
270
|
+
return new PreviousRollupData(
|
|
271
|
+
rollupOutput,
|
|
272
|
+
rollupProof,
|
|
273
|
+
vk,
|
|
274
|
+
|
|
275
|
+
// MembershipWitness for a VK tree to be implemented in the future
|
|
276
|
+
FUTURE_NUM,
|
|
277
|
+
new MembershipWitness(
|
|
278
|
+
ROLLUP_VK_TREE_HEIGHT,
|
|
279
|
+
BigInt(FUTURE_NUM),
|
|
280
|
+
makeTuple(ROLLUP_VK_TREE_HEIGHT, () => FUTURE_FR),
|
|
281
|
+
),
|
|
282
|
+
);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
export async function getConstantRollupData(
|
|
286
|
+
globalVariables: GlobalVariables,
|
|
287
|
+
db: MerkleTreeOperations,
|
|
288
|
+
): Promise<ConstantRollupData> {
|
|
289
|
+
return ConstantRollupData.from({
|
|
290
|
+
baseRollupVkHash: DELETE_FR,
|
|
291
|
+
mergeRollupVkHash: DELETE_FR,
|
|
292
|
+
privateKernelVkTreeRoot: FUTURE_FR,
|
|
293
|
+
publicKernelVkTreeRoot: FUTURE_FR,
|
|
294
|
+
lastArchive: await getTreeSnapshot(MerkleTreeId.ARCHIVE, db),
|
|
295
|
+
globalVariables,
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
export async function getTreeSnapshot(id: MerkleTreeId, db: MerkleTreeOperations): Promise<AppendOnlyTreeSnapshot> {
|
|
300
|
+
const treeInfo = await db.getTreeInfo(id);
|
|
301
|
+
return new AppendOnlyTreeSnapshot(Fr.fromBuffer(treeInfo.root), Number(treeInfo.size));
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
export function getKernelDataFor(tx: ProcessedTx, vk: VerificationKeyData): KernelData {
|
|
305
|
+
const recursiveProof = makeRecursiveProofFromBinary(tx.proof, NESTED_RECURSIVE_PROOF_LENGTH);
|
|
306
|
+
return new KernelData(
|
|
307
|
+
tx.data,
|
|
308
|
+
recursiveProof,
|
|
309
|
+
|
|
310
|
+
// VK for the kernel circuit
|
|
311
|
+
vk,
|
|
312
|
+
|
|
313
|
+
// MembershipWitness for a VK tree to be implemented in the future
|
|
314
|
+
FUTURE_NUM,
|
|
315
|
+
assertLength(Array(VK_TREE_HEIGHT).fill(FUTURE_FR), VK_TREE_HEIGHT),
|
|
316
|
+
);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
export function makeEmptyMembershipWitness<N extends number>(height: N) {
|
|
320
|
+
return new MembershipWitness(
|
|
321
|
+
height,
|
|
322
|
+
0n,
|
|
323
|
+
makeTuple(height, () => Fr.ZERO),
|
|
324
|
+
);
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
export async function processPublicDataUpdateRequests(tx: ProcessedTx, db: MerkleTreeOperations) {
|
|
328
|
+
const allPublicDataUpdateRequests = padArrayEnd(
|
|
329
|
+
tx.finalPublicDataUpdateRequests,
|
|
330
|
+
PublicDataUpdateRequest.empty(),
|
|
331
|
+
MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX,
|
|
332
|
+
);
|
|
333
|
+
|
|
334
|
+
const allPublicDataWrites = allPublicDataUpdateRequests.map(
|
|
335
|
+
({ leafSlot, newValue }) => new PublicDataTreeLeaf(leafSlot, newValue),
|
|
336
|
+
);
|
|
337
|
+
const { lowLeavesWitnessData, newSubtreeSiblingPath, sortedNewLeaves, sortedNewLeavesIndexes } = await db.batchInsert(
|
|
338
|
+
MerkleTreeId.PUBLIC_DATA_TREE,
|
|
339
|
+
allPublicDataWrites.map(x => x.toBuffer()),
|
|
340
|
+
// TODO(#3675) remove oldValue from update requests
|
|
341
|
+
PUBLIC_DATA_SUBTREE_HEIGHT,
|
|
342
|
+
);
|
|
343
|
+
|
|
344
|
+
if (lowLeavesWitnessData === undefined) {
|
|
345
|
+
throw new Error(`Could not craft public data batch insertion proofs`);
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
const sortedPublicDataWrites = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => {
|
|
349
|
+
return PublicDataTreeLeaf.fromBuffer(sortedNewLeaves[i]);
|
|
350
|
+
});
|
|
351
|
+
|
|
352
|
+
const sortedPublicDataWritesIndexes = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => {
|
|
353
|
+
return sortedNewLeavesIndexes[i];
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
const subtreeSiblingPathAsFields = newSubtreeSiblingPath.toFields();
|
|
357
|
+
const newPublicDataSubtreeSiblingPath = makeTuple(PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, i => {
|
|
358
|
+
return subtreeSiblingPathAsFields[i];
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
const lowPublicDataWritesMembershipWitnesses: Tuple<
|
|
362
|
+
MembershipWitness<typeof PUBLIC_DATA_TREE_HEIGHT>,
|
|
363
|
+
typeof MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX
|
|
364
|
+
> = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => {
|
|
365
|
+
const witness = lowLeavesWitnessData[i];
|
|
366
|
+
return MembershipWitness.fromBufferArray(
|
|
367
|
+
witness.index,
|
|
368
|
+
assertLength(witness.siblingPath.toBufferArray(), PUBLIC_DATA_TREE_HEIGHT),
|
|
369
|
+
);
|
|
370
|
+
});
|
|
371
|
+
|
|
372
|
+
const lowPublicDataWritesPreimages: Tuple<
|
|
373
|
+
PublicDataTreeLeafPreimage,
|
|
374
|
+
typeof MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX
|
|
375
|
+
> = makeTuple(MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, i => {
|
|
376
|
+
return lowLeavesWitnessData[i].leafPreimage as PublicDataTreeLeafPreimage;
|
|
377
|
+
});
|
|
378
|
+
|
|
379
|
+
// validate that the sortedPublicDataWrites and sortedPublicDataWritesIndexes are in the correct order
|
|
380
|
+
// otherwise it will just fail in the circuit
|
|
381
|
+
assertPermutation(allPublicDataWrites, sortedPublicDataWrites, sortedPublicDataWritesIndexes, (a, b) => a.equals(b));
|
|
382
|
+
|
|
383
|
+
return {
|
|
384
|
+
lowPublicDataWritesPreimages,
|
|
385
|
+
lowPublicDataWritesMembershipWitnesses,
|
|
386
|
+
newPublicDataSubtreeSiblingPath,
|
|
387
|
+
sortedPublicDataWrites,
|
|
388
|
+
sortedPublicDataWritesIndexes,
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
export async function getSubtreeSiblingPath(
|
|
393
|
+
treeId: MerkleTreeId,
|
|
394
|
+
subtreeHeight: number,
|
|
395
|
+
db: MerkleTreeOperations,
|
|
396
|
+
): Promise<Fr[]> {
|
|
397
|
+
const nextAvailableLeafIndex = await db.getTreeInfo(treeId).then(t => t.size);
|
|
398
|
+
const fullSiblingPath = await db.getSiblingPath(treeId, nextAvailableLeafIndex);
|
|
399
|
+
|
|
400
|
+
// Drop the first subtreeHeight items since we only care about the path to the subtree root
|
|
401
|
+
return fullSiblingPath.getSubtreeSiblingPath(subtreeHeight).toFields();
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
// Scan a tree searching for a specific value and return a membership witness proof for it
|
|
405
|
+
export async function getMembershipWitnessFor<N extends number>(
|
|
406
|
+
value: Fr,
|
|
407
|
+
treeId: MerkleTreeId,
|
|
408
|
+
height: N,
|
|
409
|
+
db: MerkleTreeOperations,
|
|
410
|
+
): Promise<MembershipWitness<N>> {
|
|
411
|
+
// If this is an empty tx, then just return zeroes
|
|
412
|
+
if (value.isZero()) {
|
|
413
|
+
return makeEmptyMembershipWitness(height);
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
const index = await db.findLeafIndex(treeId, value.toBuffer());
|
|
417
|
+
if (index === undefined) {
|
|
418
|
+
throw new Error(`Leaf with value ${value} not found in tree ${MerkleTreeId[treeId]}`);
|
|
419
|
+
}
|
|
420
|
+
const path = await db.getSiblingPath(treeId, index);
|
|
421
|
+
return new MembershipWitness(height, index, assertLength(path.toFields(), height));
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
export function validatePartialState(
|
|
425
|
+
partialState: PartialStateReference,
|
|
426
|
+
treeSnapshots: Map<MerkleTreeId, AppendOnlyTreeSnapshot>,
|
|
427
|
+
) {
|
|
428
|
+
validateSimulatedTree(treeSnapshots.get(MerkleTreeId.NOTE_HASH_TREE)!, partialState.noteHashTree, 'NoteHashTree');
|
|
429
|
+
validateSimulatedTree(treeSnapshots.get(MerkleTreeId.NULLIFIER_TREE)!, partialState.nullifierTree, 'NullifierTree');
|
|
430
|
+
validateSimulatedTree(
|
|
431
|
+
treeSnapshots.get(MerkleTreeId.PUBLIC_DATA_TREE)!,
|
|
432
|
+
partialState.publicDataTree,
|
|
433
|
+
'PublicDataTree',
|
|
434
|
+
);
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
// Helper for comparing two trees snapshots
|
|
438
|
+
export function validateSimulatedTree(
|
|
439
|
+
localTree: AppendOnlyTreeSnapshot,
|
|
440
|
+
simulatedTree: AppendOnlyTreeSnapshot,
|
|
441
|
+
name: TreeNames,
|
|
442
|
+
label?: string,
|
|
443
|
+
) {
|
|
444
|
+
if (!simulatedTree.root.toBuffer().equals(localTree.root.toBuffer())) {
|
|
445
|
+
throw new Error(`${label ?? name} tree root mismatch (local ${localTree.root}, simulated ${simulatedTree.root})`);
|
|
446
|
+
}
|
|
447
|
+
if (simulatedTree.nextAvailableLeafIndex !== localTree.nextAvailableLeafIndex) {
|
|
448
|
+
throw new Error(
|
|
449
|
+
`${label ?? name} tree next available leaf index mismatch (local ${localTree.nextAvailableLeafIndex}, simulated ${
|
|
450
|
+
simulatedTree.nextAvailableLeafIndex
|
|
451
|
+
})`,
|
|
452
|
+
);
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
export function validateTx(tx: ProcessedTx) {
|
|
457
|
+
const txHeader = tx.data.constants.historicalHeader;
|
|
458
|
+
if (txHeader.state.l1ToL2MessageTree.isZero()) {
|
|
459
|
+
throw new Error(`Empty L1 to L2 messages tree in tx: ${toFriendlyJSON(tx)}`);
|
|
460
|
+
}
|
|
461
|
+
if (txHeader.state.partial.noteHashTree.isZero()) {
|
|
462
|
+
throw new Error(`Empty note hash tree in tx: ${toFriendlyJSON(tx)}`);
|
|
463
|
+
}
|
|
464
|
+
if (txHeader.state.partial.nullifierTree.isZero()) {
|
|
465
|
+
throw new Error(`Empty nullifier tree in tx: ${toFriendlyJSON(tx)}`);
|
|
466
|
+
}
|
|
467
|
+
if (txHeader.state.partial.publicDataTree.isZero()) {
|
|
468
|
+
throw new Error(`Empty public data tree in tx: ${toFriendlyJSON(tx)}`);
|
|
469
|
+
}
|
|
470
|
+
}
|