@aztec/merkle-tree 0.1.0-alpha10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.cjs +1 -0
- package/.tsbuildinfo +1 -0
- package/README.md +41 -0
- package/dest/hasher.d.ts +11 -0
- package/dest/hasher.d.ts.map +1 -0
- package/dest/hasher.js +2 -0
- package/dest/index.d.ts +14 -0
- package/dest/index.d.ts.map +1 -0
- package/dest/index.js +14 -0
- package/dest/interfaces/append_only_tree.d.ts +13 -0
- package/dest/interfaces/append_only_tree.d.ts.map +1 -0
- package/dest/interfaces/append_only_tree.js +2 -0
- package/dest/interfaces/indexed_tree.d.ts +63 -0
- package/dest/interfaces/indexed_tree.d.ts.map +1 -0
- package/dest/interfaces/indexed_tree.js +2 -0
- package/dest/interfaces/merkle_tree.d.ts +47 -0
- package/dest/interfaces/merkle_tree.d.ts.map +1 -0
- package/dest/interfaces/merkle_tree.js +2 -0
- package/dest/interfaces/update_only_tree.d.ts +15 -0
- package/dest/interfaces/update_only_tree.d.ts.map +1 -0
- package/dest/interfaces/update_only_tree.js +2 -0
- package/dest/load_tree.d.ts +13 -0
- package/dest/load_tree.d.ts.map +1 -0
- package/dest/load_tree.js +17 -0
- package/dest/new_tree.d.ts +15 -0
- package/dest/new_tree.d.ts.map +1 -0
- package/dest/new_tree.js +16 -0
- package/dest/pedersen.d.ts +42 -0
- package/dest/pedersen.d.ts.map +1 -0
- package/dest/pedersen.js +49 -0
- package/dest/sibling_path/sibling_path.d.ts +92 -0
- package/dest/sibling_path/sibling_path.d.ts.map +1 -0
- package/dest/sibling_path/sibling_path.js +120 -0
- package/dest/sparse_tree/sparse_tree.d.ts +15 -0
- package/dest/sparse_tree/sparse_tree.d.ts.map +1 -0
- package/dest/sparse_tree/sparse_tree.js +31 -0
- package/dest/sparse_tree/sparse_tree.test.d.ts +2 -0
- package/dest/sparse_tree/sparse_tree.test.d.ts.map +1 -0
- package/dest/sparse_tree/sparse_tree.test.js +132 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.d.ts +230 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.d.ts.map +1 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.js +497 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.test.d.ts +2 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.test.d.ts.map +1 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.test.js +316 -0
- package/dest/standard_tree/standard_tree.d.ts +25 -0
- package/dest/standard_tree/standard_tree.d.ts.map +1 -0
- package/dest/standard_tree/standard_tree.js +50 -0
- package/dest/standard_tree/standard_tree.test.d.ts +2 -0
- package/dest/standard_tree/standard_tree.test.d.ts.map +1 -0
- package/dest/standard_tree/standard_tree.test.js +58 -0
- package/dest/test/standard_based_test_suite.d.ts +6 -0
- package/dest/test/standard_based_test_suite.d.ts.map +1 -0
- package/dest/test/standard_based_test_suite.js +86 -0
- package/dest/test/test_suite.d.ts +6 -0
- package/dest/test/test_suite.d.ts.map +1 -0
- package/dest/test/test_suite.js +118 -0
- package/dest/test/utils/append_leaves.d.ts +5 -0
- package/dest/test/utils/append_leaves.d.ts.map +1 -0
- package/dest/test/utils/append_leaves.js +14 -0
- package/dest/test/utils/create_mem_down.d.ts +3 -0
- package/dest/test/utils/create_mem_down.d.ts.map +1 -0
- package/dest/test/utils/create_mem_down.js +3 -0
- package/dest/test/utils/pedersen_with_counter.d.ts +24 -0
- package/dest/test/utils/pedersen_with_counter.d.ts.map +1 -0
- package/dest/test/utils/pedersen_with_counter.js +31 -0
- package/dest/tree_base.d.ts +118 -0
- package/dest/tree_base.d.ts.map +1 -0
- package/dest/tree_base.js +214 -0
- package/package.json +14 -0
- package/package.local.json +3 -0
- package/src/hasher.ts +9 -0
- package/src/index.ts +13 -0
- package/src/interfaces/append_only_tree.ts +12 -0
- package/src/interfaces/indexed_tree.ts +78 -0
- package/src/interfaces/merkle_tree.ts +52 -0
- package/src/interfaces/update_only_tree.ts +15 -0
- package/src/load_tree.ts +24 -0
- package/src/new_tree.ts +26 -0
- package/src/pedersen.ts +58 -0
- package/src/sibling_path/sibling_path.ts +139 -0
- package/src/sparse_tree/sparse_tree.test.ts +177 -0
- package/src/sparse_tree/sparse_tree.ts +32 -0
- package/src/standard_indexed_tree/standard_indexed_tree.test.ts +450 -0
- package/src/standard_indexed_tree/standard_indexed_tree.ts +591 -0
- package/src/standard_tree/standard_tree.test.ts +74 -0
- package/src/standard_tree/standard_tree.ts +54 -0
- package/src/test/standard_based_test_suite.ts +139 -0
- package/src/test/test_suite.ts +162 -0
- package/src/test/utils/append_leaves.ts +15 -0
- package/src/test/utils/create_mem_down.ts +3 -0
- package/src/test/utils/pedersen_with_counter.ts +30 -0
- package/src/tree_base.ts +242 -0
- package/tsconfig.json +17 -0
|
@@ -0,0 +1,591 @@
|
|
|
1
|
+
import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer';
|
|
2
|
+
import { createLogger } from '@aztec/foundation/log';
|
|
3
|
+
import { Hasher } from '../hasher.js';
|
|
4
|
+
import { IndexedTree, LeafData } from '../interfaces/indexed_tree.js';
|
|
5
|
+
import { TreeBase } from '../tree_base.js';
|
|
6
|
+
import { SiblingPath } from '../index.js';
|
|
7
|
+
|
|
8
|
+
const log = createLogger('aztec:standard-indexed-tree');
|
|
9
|
+
|
|
10
|
+
const indexToKeyLeaf = (name: string, index: bigint) => {
|
|
11
|
+
return `${name}:leaf:${index}`;
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
const zeroLeaf: LeafData = {
|
|
15
|
+
value: 0n,
|
|
16
|
+
nextValue: 0n,
|
|
17
|
+
nextIndex: 0n,
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* All of the data to be return during batch insertion.
|
|
22
|
+
*/
|
|
23
|
+
export interface LowLeafWitnessData<N extends number> {
|
|
24
|
+
/**
|
|
25
|
+
* Preimage of the low nullifier that proves non membership.
|
|
26
|
+
*/
|
|
27
|
+
leafData: LeafData;
|
|
28
|
+
/**
|
|
29
|
+
* Sibling path to prove membership of low nullifier.
|
|
30
|
+
*/
|
|
31
|
+
siblingPath: SiblingPath<N>;
|
|
32
|
+
/**
|
|
33
|
+
* The index of low nullifier.
|
|
34
|
+
*/
|
|
35
|
+
index: bigint;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Pre-compute empty witness.
|
|
40
|
+
* @param treeHeight - Height of tree for sibling path.
|
|
41
|
+
* @returns An empty witness.
|
|
42
|
+
*/
|
|
43
|
+
function getEmptyLowLeafWitness<N extends number>(treeHeight: N): LowLeafWitnessData<N> {
|
|
44
|
+
return {
|
|
45
|
+
leafData: zeroLeaf,
|
|
46
|
+
index: 0n,
|
|
47
|
+
siblingPath: new SiblingPath(treeHeight, Array(treeHeight).fill(toBufferBE(0n, 32))),
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
52
|
+
const encodeTreeValue = (leafData: LeafData) => {
|
|
53
|
+
const valueAsBuffer = toBufferBE(leafData.value, 32);
|
|
54
|
+
const indexAsBuffer = toBufferBE(leafData.nextIndex, 32);
|
|
55
|
+
const nextValueAsBuffer = toBufferBE(leafData.nextValue, 32);
|
|
56
|
+
return Buffer.concat([valueAsBuffer, indexAsBuffer, nextValueAsBuffer]);
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
const hashEncodedTreeValue = (leaf: LeafData, hasher: Hasher) => {
|
|
60
|
+
return hasher.compressInputs([leaf.value, leaf.nextIndex, leaf.nextValue].map(val => toBufferBE(val, 32)));
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
const decodeTreeValue = (buf: Buffer) => {
|
|
64
|
+
const value = toBigIntBE(buf.subarray(0, 32));
|
|
65
|
+
const nextIndex = toBigIntBE(buf.subarray(32, 64));
|
|
66
|
+
const nextValue = toBigIntBE(buf.subarray(64, 96));
|
|
67
|
+
return {
|
|
68
|
+
value,
|
|
69
|
+
nextIndex,
|
|
70
|
+
nextValue,
|
|
71
|
+
} as LeafData;
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
const initialLeaf: LeafData = {
|
|
75
|
+
value: 0n,
|
|
76
|
+
nextIndex: 0n,
|
|
77
|
+
nextValue: 0n,
|
|
78
|
+
};
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Indexed merkle tree.
|
|
82
|
+
*/
|
|
83
|
+
export class StandardIndexedTree extends TreeBase implements IndexedTree {
|
|
84
|
+
private leaves: LeafData[] = [];
|
|
85
|
+
private cachedLeaves: { [key: number]: LeafData } = {};
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Appends the given leaves to the tree.
|
|
89
|
+
* @param leaves - The leaves to append.
|
|
90
|
+
* @returns Empty promise.
|
|
91
|
+
*/
|
|
92
|
+
public async appendLeaves(leaves: Buffer[]): Promise<void> {
|
|
93
|
+
for (const leaf of leaves) {
|
|
94
|
+
await this.appendLeaf(leaf);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Commits the changes to the database.
|
|
100
|
+
* @returns Empty promise.
|
|
101
|
+
*/
|
|
102
|
+
public async commit(): Promise<void> {
|
|
103
|
+
await super.commit();
|
|
104
|
+
await this.commitLeaves();
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Rolls back the not-yet-committed changes.
|
|
109
|
+
* @returns Empty promise.
|
|
110
|
+
*/
|
|
111
|
+
public async rollback(): Promise<void> {
|
|
112
|
+
await super.rollback();
|
|
113
|
+
this.clearCachedLeaves();
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Gets the value of the leaf at the given index.
|
|
118
|
+
* @param index - Index of the leaf of which to obtain the value.
|
|
119
|
+
* @param includeUncommitted - Indicates whether to include uncommitted leaves in the computation.
|
|
120
|
+
* @returns The value of the leaf at the given index or undefined if the leaf is empty.
|
|
121
|
+
*/
|
|
122
|
+
public getLeafValue(index: bigint, includeUncommitted: boolean): Promise<Buffer | undefined> {
|
|
123
|
+
const leaf = this.getLatestLeafDataCopy(Number(index), includeUncommitted);
|
|
124
|
+
if (!leaf) return Promise.resolve(undefined);
|
|
125
|
+
return Promise.resolve(toBufferBE(leaf.value, 32));
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Finds the index of the largest leaf whose value is less than or equal to the provided value.
|
|
130
|
+
* @param newValue - The new value to be inserted into the tree.
|
|
131
|
+
* @param includeUncommitted - If true, the uncommitted changes are included in the search.
|
|
132
|
+
* @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`.
|
|
133
|
+
*/
|
|
134
|
+
findIndexOfPreviousValue(
|
|
135
|
+
newValue: bigint,
|
|
136
|
+
includeUncommitted: boolean,
|
|
137
|
+
): {
|
|
138
|
+
/**
|
|
139
|
+
* The index of the found leaf.
|
|
140
|
+
*/
|
|
141
|
+
index: number;
|
|
142
|
+
/**
|
|
143
|
+
* A flag indicating if the corresponding leaf's value is equal to `newValue`.
|
|
144
|
+
*/
|
|
145
|
+
alreadyPresent: boolean;
|
|
146
|
+
} {
|
|
147
|
+
const numLeaves = this.getNumLeaves(includeUncommitted);
|
|
148
|
+
const diff: bigint[] = [];
|
|
149
|
+
|
|
150
|
+
for (let i = 0; i < numLeaves; i++) {
|
|
151
|
+
const storedLeaf = this.getLatestLeafDataCopy(i, includeUncommitted)!;
|
|
152
|
+
|
|
153
|
+
// The stored leaf can be undefined if it addresses an empty leaf
|
|
154
|
+
// If the leaf is empty we do the same as if the leaf was larger
|
|
155
|
+
if (storedLeaf === undefined) {
|
|
156
|
+
diff.push(newValue);
|
|
157
|
+
} else if (storedLeaf.value > newValue) {
|
|
158
|
+
diff.push(newValue);
|
|
159
|
+
} else if (storedLeaf.value === newValue) {
|
|
160
|
+
return { index: i, alreadyPresent: true };
|
|
161
|
+
} else {
|
|
162
|
+
diff.push(newValue - storedLeaf.value);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
const minIndex = this.findMinIndex(diff);
|
|
166
|
+
return { index: minIndex, alreadyPresent: false };
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Gets the latest LeafData copy.
|
|
171
|
+
* @param index - Index of the leaf of which to obtain the LeafData copy.
|
|
172
|
+
* @param includeUncommitted - If true, the uncommitted changes are included in the search.
|
|
173
|
+
* @returns A copy of the leaf data at the given index or undefined if the leaf was not found.
|
|
174
|
+
*/
|
|
175
|
+
public getLatestLeafDataCopy(index: number, includeUncommitted: boolean): LeafData | undefined {
|
|
176
|
+
const leaf = !includeUncommitted ? this.leaves[index] : this.cachedLeaves[index] ?? this.leaves[index];
|
|
177
|
+
return leaf
|
|
178
|
+
? ({
|
|
179
|
+
value: leaf.value,
|
|
180
|
+
nextIndex: leaf.nextIndex,
|
|
181
|
+
nextValue: leaf.nextValue,
|
|
182
|
+
} as LeafData)
|
|
183
|
+
: undefined;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Appends the given leaf to the tree.
|
|
188
|
+
* @param leaf - The leaf to append.
|
|
189
|
+
* @returns Empty promise.
|
|
190
|
+
*/
|
|
191
|
+
private async appendLeaf(leaf: Buffer): Promise<void> {
|
|
192
|
+
const newValue = toBigIntBE(leaf);
|
|
193
|
+
|
|
194
|
+
// Special case when appending zero
|
|
195
|
+
if (newValue === 0n) {
|
|
196
|
+
const newSize = (this.cachedSize ?? this.size) + 1n;
|
|
197
|
+
if (newSize - 1n > this.maxIndex) {
|
|
198
|
+
throw Error(`Can't append beyond max index. Max index: ${this.maxIndex}`);
|
|
199
|
+
}
|
|
200
|
+
this.cachedSize = newSize;
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
const indexOfPrevious = this.findIndexOfPreviousValue(newValue, true);
|
|
205
|
+
const previousLeafCopy = this.getLatestLeafDataCopy(indexOfPrevious.index, true);
|
|
206
|
+
|
|
207
|
+
if (previousLeafCopy === undefined) {
|
|
208
|
+
throw new Error(`Previous leaf not found!`);
|
|
209
|
+
}
|
|
210
|
+
const newLeaf = {
|
|
211
|
+
value: newValue,
|
|
212
|
+
nextIndex: previousLeafCopy.nextIndex,
|
|
213
|
+
nextValue: previousLeafCopy.nextValue,
|
|
214
|
+
} as LeafData;
|
|
215
|
+
if (indexOfPrevious.alreadyPresent) {
|
|
216
|
+
return;
|
|
217
|
+
}
|
|
218
|
+
// insert a new leaf at the highest index and update the values of our previous leaf copy
|
|
219
|
+
const currentSize = this.getNumLeaves(true);
|
|
220
|
+
previousLeafCopy.nextIndex = BigInt(currentSize);
|
|
221
|
+
previousLeafCopy.nextValue = newLeaf.value;
|
|
222
|
+
this.cachedLeaves[Number(currentSize)] = newLeaf;
|
|
223
|
+
this.cachedLeaves[Number(indexOfPrevious.index)] = previousLeafCopy;
|
|
224
|
+
await this._updateLeaf(hashEncodedTreeValue(previousLeafCopy, this.hasher), BigInt(indexOfPrevious.index));
|
|
225
|
+
await this._updateLeaf(hashEncodedTreeValue(newLeaf, this.hasher), this.getNumLeaves(true));
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Finds the index of the minimum value in an array.
|
|
230
|
+
* @param values - The collection of values to be searched.
|
|
231
|
+
* @returns The index of the minimum value in the array.
|
|
232
|
+
*/
|
|
233
|
+
private findMinIndex(values: bigint[]) {
|
|
234
|
+
if (!values.length) {
|
|
235
|
+
return 0;
|
|
236
|
+
}
|
|
237
|
+
let minIndex = 0;
|
|
238
|
+
for (let i = 1; i < values.length; i++) {
|
|
239
|
+
if (values[minIndex] > values[i]) {
|
|
240
|
+
minIndex = i;
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
return minIndex;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Initializes the tree.
|
|
248
|
+
* @param prefilledSize - A number of leaves that are prefilled with values.
|
|
249
|
+
* @returns Empty promise.
|
|
250
|
+
*/
|
|
251
|
+
public async init(prefilledSize: number): Promise<void> {
|
|
252
|
+
this.leaves.push(initialLeaf);
|
|
253
|
+
await this._updateLeaf(hashEncodedTreeValue(initialLeaf, this.hasher), 0n);
|
|
254
|
+
|
|
255
|
+
for (let i = 1; i < prefilledSize; i++) {
|
|
256
|
+
await this.appendLeaf(Buffer.from([i]));
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
await this.commit();
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Loads Merkle tree data from a database and assigns them to this object.
|
|
264
|
+
*/
|
|
265
|
+
public async initFromDb(): Promise<void> {
|
|
266
|
+
const startingIndex = 0n;
|
|
267
|
+
const values: LeafData[] = [];
|
|
268
|
+
const promise = new Promise<void>((resolve, reject) => {
|
|
269
|
+
this.db
|
|
270
|
+
.createReadStream({
|
|
271
|
+
gte: indexToKeyLeaf(this.getName(), startingIndex),
|
|
272
|
+
lte: indexToKeyLeaf(this.getName(), 2n ** BigInt(this.getDepth())),
|
|
273
|
+
})
|
|
274
|
+
.on('data', function (data) {
|
|
275
|
+
const index = Number(data.key);
|
|
276
|
+
values[index] = decodeTreeValue(data.value);
|
|
277
|
+
})
|
|
278
|
+
.on('close', function () {})
|
|
279
|
+
.on('end', function () {
|
|
280
|
+
resolve();
|
|
281
|
+
})
|
|
282
|
+
.on('error', function () {
|
|
283
|
+
log('stream error');
|
|
284
|
+
reject();
|
|
285
|
+
});
|
|
286
|
+
});
|
|
287
|
+
await promise;
|
|
288
|
+
this.leaves = values;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* Commits all the leaves to the database and removes them from a cache.
|
|
293
|
+
*/
|
|
294
|
+
private async commitLeaves(): Promise<void> {
|
|
295
|
+
const batch = this.db.batch();
|
|
296
|
+
const keys = Object.getOwnPropertyNames(this.cachedLeaves);
|
|
297
|
+
for (const key of keys) {
|
|
298
|
+
const index = Number(key);
|
|
299
|
+
batch.put(key, this.cachedLeaves[index]);
|
|
300
|
+
this.leaves[index] = this.cachedLeaves[index];
|
|
301
|
+
}
|
|
302
|
+
await batch.write();
|
|
303
|
+
this.clearCachedLeaves();
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
/**
|
|
307
|
+
* Clears the cache.
|
|
308
|
+
*/
|
|
309
|
+
private clearCachedLeaves() {
|
|
310
|
+
this.cachedLeaves = {};
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
/**
|
|
314
|
+
* Updates a leaf in the tree.
|
|
315
|
+
* @param leaf - New contents of the leaf.
|
|
316
|
+
* @param index - Index of the leaf to be updated.
|
|
317
|
+
*/
|
|
318
|
+
// TODO: rename back to updateLeaf once the old updateLeaf is removed
|
|
319
|
+
private async _updateLeaf(leaf: Buffer, index: bigint) {
|
|
320
|
+
if (index > this.maxIndex) {
|
|
321
|
+
throw Error(`Index out of bounds. Index ${index}, max index: ${this.maxIndex}.`);
|
|
322
|
+
}
|
|
323
|
+
await this.addLeafToCacheAndHashToRoot(leaf, index);
|
|
324
|
+
const numLeaves = this.getNumLeaves(true);
|
|
325
|
+
if (index >= numLeaves) {
|
|
326
|
+
this.cachedSize = index + 1n;
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
/**
|
|
331
|
+
* Exposes the underlying tree's update leaf method.
|
|
332
|
+
* @param leaf - The hash to set at the leaf.
|
|
333
|
+
* @param index - The index of the element.
|
|
334
|
+
*/
|
|
335
|
+
// TODO: remove once the batch insertion functionality is moved here from circuit_block_builder.ts
|
|
336
|
+
public async updateLeaf(leaf: LeafData, index: bigint): Promise<void> {
|
|
337
|
+
let encodedLeaf;
|
|
338
|
+
if (leaf.value == 0n) {
|
|
339
|
+
encodedLeaf = toBufferBE(0n, 32);
|
|
340
|
+
} else {
|
|
341
|
+
encodedLeaf = hashEncodedTreeValue(leaf, this.hasher);
|
|
342
|
+
}
|
|
343
|
+
this.cachedLeaves[Number(index)] = leaf;
|
|
344
|
+
await this._updateLeaf(encodedLeaf, index);
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
/* eslint-disable jsdoc/require-description-complete-sentence */
|
|
348
|
+
/* The following doc block messes up with complete-sentence, so we just disable it */
|
|
349
|
+
|
|
350
|
+
/**
|
|
351
|
+
*
|
|
352
|
+
* Each base rollup needs to provide non membership / inclusion proofs for each of the nullifier.
|
|
353
|
+
* This method will return membership proofs and perform partial node updates that will
|
|
354
|
+
* allow the circuit to incrementally update the tree and perform a batch insertion.
|
|
355
|
+
*
|
|
356
|
+
* This offers massive circuit performance savings over doing incremental insertions.
|
|
357
|
+
*
|
|
358
|
+
* A description of the algorithm can be found here: https://colab.research.google.com/drive/1A0gizduSi4FIiIJZ8OylwIpO9-OTqV-R
|
|
359
|
+
*
|
|
360
|
+
* WARNING: This function has side effects, it will insert values into the tree.
|
|
361
|
+
*
|
|
362
|
+
* Assumptions:
|
|
363
|
+
* 1. There are 8 nullifiers provided and they are either unique or empty. (denoted as 0)
|
|
364
|
+
* 2. If kc 0 has 1 nullifier, and kc 1 has 3 nullifiers the layout will assume to be the sparse
|
|
365
|
+
* nullifier layout: [kc0-0, 0, 0, 0, kc1-0, kc1-1, kc1-2, 0]
|
|
366
|
+
*
|
|
367
|
+
* Algorithm overview
|
|
368
|
+
*
|
|
369
|
+
* In general, if we want to batch insert items, we first to update their low nullifier to point to them,
|
|
370
|
+
* then batch insert all of the values as at once in the final step.
|
|
371
|
+
* To update a low nullifier, we provide an insertion proof that the low nullifier currently exists to the
|
|
372
|
+
* circuit, then update the low nullifier.
|
|
373
|
+
* Updating this low nullifier will in turn change the root of the tree. Therefore future low nullifier insertion proofs
|
|
374
|
+
* must be given against this new root.
|
|
375
|
+
* As a result, each low nullifier membership proof will be provided against an intermediate tree state, each with differing
|
|
376
|
+
* roots.
|
|
377
|
+
*
|
|
378
|
+
* This become tricky when two items that are being batch inserted need to update the same low nullifier, or need to use
|
|
379
|
+
* a value that is part of the same batch insertion as their low nullifier. In this case a zero low nullifier path is given
|
|
380
|
+
* to the circuit, and it must determine from the set of batch inserted values if the insertion is valid.
|
|
381
|
+
*
|
|
382
|
+
* The following example will illustrate attempting to insert 2,3,20,19 into a tree already containing 0,5,10,15
|
|
383
|
+
*
|
|
384
|
+
* The example will explore two cases. In each case the values low nullifier will exist within the batch insertion,
|
|
385
|
+
* One where the low nullifier comes before the item in the set (2,3), and one where it comes after (20,19).
|
|
386
|
+
*
|
|
387
|
+
* The original tree: Pending insertion subtree
|
|
388
|
+
*
|
|
389
|
+
* index 0 2 3 4 - - - -
|
|
390
|
+
* ------------------------------------- ----------------------------
|
|
391
|
+
* val 0 5 10 15 - - - -
|
|
392
|
+
* nextIdx 1 2 3 0 - - - -
|
|
393
|
+
* nextVal 5 10 15 0 - - - -
|
|
394
|
+
*
|
|
395
|
+
*
|
|
396
|
+
* Inserting 2: (happy path)
|
|
397
|
+
* 1. Find the low nullifier (0) - provide inclusion proof
|
|
398
|
+
* 2. Update its pointers
|
|
399
|
+
* 3. Insert 2 into the pending subtree
|
|
400
|
+
*
|
|
401
|
+
* index 0 2 3 4 5 - - -
|
|
402
|
+
* ------------------------------------- ----------------------------
|
|
403
|
+
* val 0 5 10 15 2 - - -
|
|
404
|
+
* nextIdx 5 2 3 0 2 - - -
|
|
405
|
+
* nextVal 2 10 15 0 5 - - -
|
|
406
|
+
*
|
|
407
|
+
* Inserting 3: The low nullifier exists within the insertion current subtree
|
|
408
|
+
* 1. When looking for the low nullifier for 3, we will receive 0 again as we have not inserted 2 into the main tree
|
|
409
|
+
* This is problematic, as we cannot use either 0 or 2 as our inclusion proof.
|
|
410
|
+
* Why cant we?
|
|
411
|
+
* - Index 0 has a val 0 and nextVal of 2. This is NOT enough to prove non inclusion of 2.
|
|
412
|
+
* - Our existing tree is in a state where we cannot prove non inclusion of 3.
|
|
413
|
+
* We do not provide a non inclusion proof to out circuit, but prompt it to look within the insertion subtree.
|
|
414
|
+
* 2. Update pending insertion subtree
|
|
415
|
+
* 3. Insert 3 into pending subtree
|
|
416
|
+
*
|
|
417
|
+
* (no inclusion proof provided)
|
|
418
|
+
* index 0 2 3 4 5 6 - -
|
|
419
|
+
* ------------------------------------- ----------------------------
|
|
420
|
+
* val 0 5 10 15 2 3 - -
|
|
421
|
+
* nextIdx 5 2 3 0 6 2 - -
|
|
422
|
+
* nextVal 2 10 15 0 3 5 - -
|
|
423
|
+
*
|
|
424
|
+
* Inserting 20: (happy path)
|
|
425
|
+
* 1. Find the low nullifier (15) - provide inculsion proof
|
|
426
|
+
* 2. Update its pointers
|
|
427
|
+
* 3. Insert 20 into the pending subtree
|
|
428
|
+
*
|
|
429
|
+
* index 0 2 3 4 5 6 7 -
|
|
430
|
+
* ------------------------------------- ----------------------------
|
|
431
|
+
* val 0 5 10 15 2 3 20 -
|
|
432
|
+
* nextIdx 5 2 3 7 6 2 0 -
|
|
433
|
+
* nextVal 2 10 15 20 3 5 0 -
|
|
434
|
+
*
|
|
435
|
+
* Inserting 19:
|
|
436
|
+
* 1. In this case we can find a low nullifier, but we are updating a low nullifier that has already been updated
|
|
437
|
+
* We can provide an inclusion proof of this intermediate tree state.
|
|
438
|
+
* 2. Update its pointers
|
|
439
|
+
* 3. Insert 19 into the pending subtree
|
|
440
|
+
*
|
|
441
|
+
* index 0 2 3 4 5 6 7 8
|
|
442
|
+
* ------------------------------------- ----------------------------
|
|
443
|
+
* val 0 5 10 15 2 3 20 19
|
|
444
|
+
* nextIdx 5 2 3 8 6 2 0 7
|
|
445
|
+
* nextVal 2 10 15 19 3 5 0 20
|
|
446
|
+
*
|
|
447
|
+
* Perform subtree insertion
|
|
448
|
+
*
|
|
449
|
+
* index 0 2 3 4 5 6 7 8
|
|
450
|
+
* ---------------------------------------------------------------------
|
|
451
|
+
* val 0 5 10 15 2 3 20 19
|
|
452
|
+
* nextIdx 5 2 3 8 6 2 0 7
|
|
453
|
+
* nextVal 2 10 15 19 3 5 0 20
|
|
454
|
+
*
|
|
455
|
+
* TODO: this implementation will change once the zero value is changed from h(0,0,0). Changes incoming over the next sprint
|
|
456
|
+
* @param leaves - Values to insert into the tree.
|
|
457
|
+
* @param treeHeight - Height of the tree.
|
|
458
|
+
* @param subtreeHeight - Height of the subtree.
|
|
459
|
+
* @returns The data for the leaves to be updated when inserting the new ones.
|
|
460
|
+
*/
|
|
461
|
+
public async batchInsert<
|
|
462
|
+
TreeHeight extends number,
|
|
463
|
+
SubtreeHeight extends number,
|
|
464
|
+
SubtreeSiblingPathHeight extends number,
|
|
465
|
+
>(
|
|
466
|
+
leaves: Buffer[],
|
|
467
|
+
treeHeight: TreeHeight,
|
|
468
|
+
subtreeHeight: SubtreeHeight,
|
|
469
|
+
): Promise<
|
|
470
|
+
| [LowLeafWitnessData<TreeHeight>[], SiblingPath<SubtreeSiblingPathHeight>]
|
|
471
|
+
| [undefined, SiblingPath<SubtreeSiblingPathHeight>]
|
|
472
|
+
> {
|
|
473
|
+
// Keep track of touched low leaves
|
|
474
|
+
const touched = new Map<number, bigint[]>();
|
|
475
|
+
|
|
476
|
+
const emptyLowLeafWitness = getEmptyLowLeafWitness(treeHeight);
|
|
477
|
+
// Accumulators
|
|
478
|
+
const lowLeavesWitnesses: LowLeafWitnessData<TreeHeight>[] = [];
|
|
479
|
+
const pendingInsertionSubtree: LeafData[] = [];
|
|
480
|
+
|
|
481
|
+
// Start info
|
|
482
|
+
const startInsertionIndex = this.getNumLeaves(true);
|
|
483
|
+
|
|
484
|
+
// Get insertion path for each leaf
|
|
485
|
+
for (let i = 0; i < leaves.length; i++) {
|
|
486
|
+
const newValue = toBigIntBE(leaves[i]);
|
|
487
|
+
|
|
488
|
+
// Keep space and just insert zero values
|
|
489
|
+
if (newValue === 0n) {
|
|
490
|
+
pendingInsertionSubtree.push(zeroLeaf);
|
|
491
|
+
lowLeavesWitnesses.push(emptyLowLeafWitness);
|
|
492
|
+
continue;
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
const indexOfPrevious = this.findIndexOfPreviousValue(newValue, true);
|
|
496
|
+
|
|
497
|
+
// If a touched node has a value that is less greater than the current value
|
|
498
|
+
const prevNodes = touched.get(indexOfPrevious.index);
|
|
499
|
+
if (prevNodes && prevNodes.some(v => v < newValue)) {
|
|
500
|
+
// check the pending low nullifiers for a low nullifier that works
|
|
501
|
+
// This is the case where the next value is less than the pending
|
|
502
|
+
for (let j = 0; j < pendingInsertionSubtree.length; j++) {
|
|
503
|
+
if (pendingInsertionSubtree[j].value === 0n) continue;
|
|
504
|
+
|
|
505
|
+
if (
|
|
506
|
+
pendingInsertionSubtree[j].value < newValue &&
|
|
507
|
+
(pendingInsertionSubtree[j].nextValue > newValue || pendingInsertionSubtree[j].nextValue === 0n)
|
|
508
|
+
) {
|
|
509
|
+
// add the new value to the pending low nullifiers
|
|
510
|
+
const currentLowLeaf: LeafData = {
|
|
511
|
+
value: newValue,
|
|
512
|
+
nextValue: pendingInsertionSubtree[j].nextValue,
|
|
513
|
+
nextIndex: pendingInsertionSubtree[j].nextIndex,
|
|
514
|
+
};
|
|
515
|
+
|
|
516
|
+
pendingInsertionSubtree.push(currentLowLeaf);
|
|
517
|
+
|
|
518
|
+
// Update the pending low leaf to point at the new value
|
|
519
|
+
pendingInsertionSubtree[j].nextValue = newValue;
|
|
520
|
+
pendingInsertionSubtree[j].nextIndex = startInsertionIndex + BigInt(i);
|
|
521
|
+
|
|
522
|
+
break;
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// Any node updated in this space will need to calculate its low nullifier from a previously inserted value
|
|
527
|
+
lowLeavesWitnesses.push(emptyLowLeafWitness);
|
|
528
|
+
} else {
|
|
529
|
+
// Update the touched mapping
|
|
530
|
+
if (prevNodes) {
|
|
531
|
+
prevNodes.push(newValue);
|
|
532
|
+
touched.set(indexOfPrevious.index, prevNodes);
|
|
533
|
+
} else {
|
|
534
|
+
touched.set(indexOfPrevious.index, [newValue]);
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
// get the low leaf
|
|
538
|
+
const lowLeaf = this.getLatestLeafDataCopy(indexOfPrevious.index, true);
|
|
539
|
+
if (lowLeaf === undefined) {
|
|
540
|
+
return [undefined, await this.getSubtreeSiblingPath(subtreeHeight, true)];
|
|
541
|
+
}
|
|
542
|
+
const siblingPath = await this.getSiblingPath<TreeHeight>(BigInt(indexOfPrevious.index), true);
|
|
543
|
+
|
|
544
|
+
const witness: LowLeafWitnessData<TreeHeight> = {
|
|
545
|
+
leafData: { ...lowLeaf },
|
|
546
|
+
index: BigInt(indexOfPrevious.index),
|
|
547
|
+
siblingPath,
|
|
548
|
+
};
|
|
549
|
+
|
|
550
|
+
// Update the running paths
|
|
551
|
+
lowLeavesWitnesses.push(witness);
|
|
552
|
+
|
|
553
|
+
const currentLowLeaf: LeafData = {
|
|
554
|
+
value: newValue,
|
|
555
|
+
nextValue: lowLeaf.nextValue,
|
|
556
|
+
nextIndex: lowLeaf.nextIndex,
|
|
557
|
+
};
|
|
558
|
+
|
|
559
|
+
pendingInsertionSubtree.push(currentLowLeaf);
|
|
560
|
+
|
|
561
|
+
lowLeaf.nextValue = newValue;
|
|
562
|
+
lowLeaf.nextIndex = startInsertionIndex + BigInt(i);
|
|
563
|
+
|
|
564
|
+
await this.updateLeaf(lowLeaf, BigInt(indexOfPrevious.index));
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
const newSubtreeSiblingPath = await this.getSubtreeSiblingPath<SubtreeHeight, SubtreeSiblingPathHeight>(
|
|
569
|
+
subtreeHeight,
|
|
570
|
+
true,
|
|
571
|
+
);
|
|
572
|
+
|
|
573
|
+
// Perform batch insertion of new pending values
|
|
574
|
+
for (let i = 0; i < pendingInsertionSubtree.length; i++) {
|
|
575
|
+
await this.updateLeaf(pendingInsertionSubtree[i], startInsertionIndex + BigInt(i));
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
return [lowLeavesWitnesses, newSubtreeSiblingPath];
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
async getSubtreeSiblingPath<SubtreeHeight extends number, SubtreeSiblingPathHeight extends number>(
|
|
582
|
+
subtreeHeight: SubtreeHeight,
|
|
583
|
+
includeUncommitted: boolean,
|
|
584
|
+
): Promise<SiblingPath<SubtreeSiblingPathHeight>> {
|
|
585
|
+
const nextAvailableLeafIndex = this.getNumLeaves(includeUncommitted);
|
|
586
|
+
const fullSiblingPath = await this.getSiblingPath(nextAvailableLeafIndex, includeUncommitted);
|
|
587
|
+
|
|
588
|
+
// Drop the first subtreeHeight items since we only care about the path to the subtree root
|
|
589
|
+
return fullSiblingPath.getSubtreeSiblingPath(subtreeHeight);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import { CircuitsWasm } from '@aztec/circuits.js';
|
|
2
|
+
import { IWasmModule } from '@aztec/foundation/wasm';
|
|
3
|
+
import { default as levelup } from 'levelup';
|
|
4
|
+
import { Hasher } from '../hasher.js';
|
|
5
|
+
import { loadTree } from '../load_tree.js';
|
|
6
|
+
import { newTree } from '../new_tree.js';
|
|
7
|
+
import { standardBasedTreeTestSuite } from '../test/standard_based_test_suite.js';
|
|
8
|
+
import { treeTestSuite } from '../test/test_suite.js';
|
|
9
|
+
import { PedersenWithCounter } from '../test/utils/pedersen_with_counter.js';
|
|
10
|
+
import { StandardTree } from './standard_tree.js';
|
|
11
|
+
import { createMemDown } from '../test/utils/create_mem_down.js';
|
|
12
|
+
import { randomBytes } from '@aztec/foundation/crypto';
|
|
13
|
+
import { INITIAL_LEAF } from '../tree_base.js';
|
|
14
|
+
|
|
15
|
+
const createDb = async (levelUp: levelup.LevelUp, hasher: Hasher, name: string, depth: number) => {
|
|
16
|
+
return await newTree(StandardTree, levelUp, hasher, name, depth);
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
const createFromName = async (levelUp: levelup.LevelUp, hasher: Hasher, name: string) => {
|
|
20
|
+
return await loadTree(StandardTree, levelUp, hasher, name);
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
treeTestSuite('StandardTree', createDb, createFromName);
|
|
24
|
+
standardBasedTreeTestSuite('StandardTree', createDb);
|
|
25
|
+
|
|
26
|
+
describe('StandardTree_batchAppend', () => {
|
|
27
|
+
let wasm: IWasmModule;
|
|
28
|
+
let pedersen: PedersenWithCounter;
|
|
29
|
+
|
|
30
|
+
beforeAll(async () => {
|
|
31
|
+
wasm = await CircuitsWasm.get();
|
|
32
|
+
pedersen = new PedersenWithCounter(wasm);
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
afterEach(() => {
|
|
36
|
+
pedersen.resetCounter();
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
it('correctly computes root when batch appending and calls compress function expected num times', async () => {
|
|
40
|
+
const db = levelup(createMemDown());
|
|
41
|
+
const tree = await createDb(db, pedersen, 'test', 3);
|
|
42
|
+
const leaves = Array.from({ length: 5 }, _ => randomBytes(32));
|
|
43
|
+
|
|
44
|
+
pedersen.resetCounter();
|
|
45
|
+
await tree.appendLeaves(leaves);
|
|
46
|
+
|
|
47
|
+
// We append 5 leaves so to update values we do the following hashing on each level:
|
|
48
|
+
// level2Node0 level2Node1 level2Node2
|
|
49
|
+
// LEVEL2: [newLeaf0, newLeaf1], [newLeaf2, newLeaf3], [newLeaf4, INITIAL_LEAF].
|
|
50
|
+
// level1Node0 level1Node1
|
|
51
|
+
// LEVEL1: [level2Node0, level2Node1], [level2Node2, level2ZeroHash].
|
|
52
|
+
// ROOT
|
|
53
|
+
// LEVEL0: [level1Node0, level1Node1].
|
|
54
|
+
const level2NumHashing = 3;
|
|
55
|
+
const level1NumHashing = 2;
|
|
56
|
+
const level0NumHashing = 1;
|
|
57
|
+
const expectedNumHashing = level2NumHashing + level1NumHashing + level0NumHashing;
|
|
58
|
+
|
|
59
|
+
expect(pedersen.compressCounter).toEqual(expectedNumHashing);
|
|
60
|
+
|
|
61
|
+
const level2Node0 = pedersen.compress(leaves[0], leaves[1]);
|
|
62
|
+
const level2Node1 = pedersen.compress(leaves[2], leaves[3]);
|
|
63
|
+
const level2Node2 = pedersen.compress(leaves[4], INITIAL_LEAF);
|
|
64
|
+
|
|
65
|
+
const level2ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF);
|
|
66
|
+
|
|
67
|
+
const level1Node0 = pedersen.compress(level2Node0, level2Node1);
|
|
68
|
+
const level1Node1 = pedersen.compress(level2Node2, level2ZeroHash);
|
|
69
|
+
|
|
70
|
+
const root = pedersen.compress(level1Node0, level1Node1);
|
|
71
|
+
|
|
72
|
+
expect(tree.getRoot(true)).toEqual(root);
|
|
73
|
+
});
|
|
74
|
+
});
|