@aztec/merkle-tree 0.1.0-alpha10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.cjs +1 -0
- package/.tsbuildinfo +1 -0
- package/README.md +41 -0
- package/dest/hasher.d.ts +11 -0
- package/dest/hasher.d.ts.map +1 -0
- package/dest/hasher.js +2 -0
- package/dest/index.d.ts +14 -0
- package/dest/index.d.ts.map +1 -0
- package/dest/index.js +14 -0
- package/dest/interfaces/append_only_tree.d.ts +13 -0
- package/dest/interfaces/append_only_tree.d.ts.map +1 -0
- package/dest/interfaces/append_only_tree.js +2 -0
- package/dest/interfaces/indexed_tree.d.ts +63 -0
- package/dest/interfaces/indexed_tree.d.ts.map +1 -0
- package/dest/interfaces/indexed_tree.js +2 -0
- package/dest/interfaces/merkle_tree.d.ts +47 -0
- package/dest/interfaces/merkle_tree.d.ts.map +1 -0
- package/dest/interfaces/merkle_tree.js +2 -0
- package/dest/interfaces/update_only_tree.d.ts +15 -0
- package/dest/interfaces/update_only_tree.d.ts.map +1 -0
- package/dest/interfaces/update_only_tree.js +2 -0
- package/dest/load_tree.d.ts +13 -0
- package/dest/load_tree.d.ts.map +1 -0
- package/dest/load_tree.js +17 -0
- package/dest/new_tree.d.ts +15 -0
- package/dest/new_tree.d.ts.map +1 -0
- package/dest/new_tree.js +16 -0
- package/dest/pedersen.d.ts +42 -0
- package/dest/pedersen.d.ts.map +1 -0
- package/dest/pedersen.js +49 -0
- package/dest/sibling_path/sibling_path.d.ts +92 -0
- package/dest/sibling_path/sibling_path.d.ts.map +1 -0
- package/dest/sibling_path/sibling_path.js +120 -0
- package/dest/sparse_tree/sparse_tree.d.ts +15 -0
- package/dest/sparse_tree/sparse_tree.d.ts.map +1 -0
- package/dest/sparse_tree/sparse_tree.js +31 -0
- package/dest/sparse_tree/sparse_tree.test.d.ts +2 -0
- package/dest/sparse_tree/sparse_tree.test.d.ts.map +1 -0
- package/dest/sparse_tree/sparse_tree.test.js +132 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.d.ts +230 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.d.ts.map +1 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.js +497 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.test.d.ts +2 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.test.d.ts.map +1 -0
- package/dest/standard_indexed_tree/standard_indexed_tree.test.js +316 -0
- package/dest/standard_tree/standard_tree.d.ts +25 -0
- package/dest/standard_tree/standard_tree.d.ts.map +1 -0
- package/dest/standard_tree/standard_tree.js +50 -0
- package/dest/standard_tree/standard_tree.test.d.ts +2 -0
- package/dest/standard_tree/standard_tree.test.d.ts.map +1 -0
- package/dest/standard_tree/standard_tree.test.js +58 -0
- package/dest/test/standard_based_test_suite.d.ts +6 -0
- package/dest/test/standard_based_test_suite.d.ts.map +1 -0
- package/dest/test/standard_based_test_suite.js +86 -0
- package/dest/test/test_suite.d.ts +6 -0
- package/dest/test/test_suite.d.ts.map +1 -0
- package/dest/test/test_suite.js +118 -0
- package/dest/test/utils/append_leaves.d.ts +5 -0
- package/dest/test/utils/append_leaves.d.ts.map +1 -0
- package/dest/test/utils/append_leaves.js +14 -0
- package/dest/test/utils/create_mem_down.d.ts +3 -0
- package/dest/test/utils/create_mem_down.d.ts.map +1 -0
- package/dest/test/utils/create_mem_down.js +3 -0
- package/dest/test/utils/pedersen_with_counter.d.ts +24 -0
- package/dest/test/utils/pedersen_with_counter.d.ts.map +1 -0
- package/dest/test/utils/pedersen_with_counter.js +31 -0
- package/dest/tree_base.d.ts +118 -0
- package/dest/tree_base.d.ts.map +1 -0
- package/dest/tree_base.js +214 -0
- package/package.json +14 -0
- package/package.local.json +3 -0
- package/src/hasher.ts +9 -0
- package/src/index.ts +13 -0
- package/src/interfaces/append_only_tree.ts +12 -0
- package/src/interfaces/indexed_tree.ts +78 -0
- package/src/interfaces/merkle_tree.ts +52 -0
- package/src/interfaces/update_only_tree.ts +15 -0
- package/src/load_tree.ts +24 -0
- package/src/new_tree.ts +26 -0
- package/src/pedersen.ts +58 -0
- package/src/sibling_path/sibling_path.ts +139 -0
- package/src/sparse_tree/sparse_tree.test.ts +177 -0
- package/src/sparse_tree/sparse_tree.ts +32 -0
- package/src/standard_indexed_tree/standard_indexed_tree.test.ts +450 -0
- package/src/standard_indexed_tree/standard_indexed_tree.ts +591 -0
- package/src/standard_tree/standard_tree.test.ts +74 -0
- package/src/standard_tree/standard_tree.ts +54 -0
- package/src/test/standard_based_test_suite.ts +139 -0
- package/src/test/test_suite.ts +162 -0
- package/src/test/utils/append_leaves.ts +15 -0
- package/src/test/utils/create_mem_down.ts +3 -0
- package/src/test/utils/pedersen_with_counter.ts +30 -0
- package/src/tree_base.ts +242 -0
- package/tsconfig.json +17 -0
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { AppendOnlyTree } from '../interfaces/append_only_tree.js';
|
|
2
|
+
import { TreeBase, indexToKeyHash } from '../tree_base.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* A Merkle tree implementation that uses a LevelDB database to store the tree.
|
|
6
|
+
*/
|
|
7
|
+
export class StandardTree extends TreeBase implements AppendOnlyTree {
|
|
8
|
+
/**
|
|
9
|
+
* Appends the given leaves to the tree.
|
|
10
|
+
* @param leaves - The leaves to append.
|
|
11
|
+
* @returns Empty promise.
|
|
12
|
+
*
|
|
13
|
+
* @remarks The batch insertion algorithm works as follows:
|
|
14
|
+
* 1. Insert all the leaves,
|
|
15
|
+
* 2. start iterating over levels from the bottom up,
|
|
16
|
+
* 3. on each level iterate over all the affected nodes (i.e. nodes whose preimages have changed),
|
|
17
|
+
* 4. fetch the preimage, hash it and insert the updated value.
|
|
18
|
+
* @remarks This algorithm is optimal when it comes to the number of hashing operations. It might not be optimal when
|
|
19
|
+
* it comes to the number of database reads, but that should be irrelevant given that most of the time
|
|
20
|
+
* `getLatestValueAtIndex` will return a value from cache (because at least one of the 2 children was
|
|
21
|
+
* touched in previous iteration).
|
|
22
|
+
*/
|
|
23
|
+
public async appendLeaves(leaves: Buffer[]): Promise<void> {
|
|
24
|
+
const numLeaves = this.getNumLeaves(true);
|
|
25
|
+
if (numLeaves + BigInt(leaves.length) - 1n > this.maxIndex) {
|
|
26
|
+
throw Error(`Can't append beyond max index. Max index: ${this.maxIndex}`);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// 1. Insert all the leaves
|
|
30
|
+
let firstIndex = numLeaves;
|
|
31
|
+
let level = this.depth;
|
|
32
|
+
for (let i = 0; i < leaves.length; i++) {
|
|
33
|
+
const cacheKey = indexToKeyHash(this.name, level, firstIndex + BigInt(i));
|
|
34
|
+
this.cache[cacheKey] = leaves[i];
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
let lastIndex = firstIndex + BigInt(leaves.length);
|
|
38
|
+
// 2. Iterate over all the levels from the bottom up
|
|
39
|
+
while (level > 0) {
|
|
40
|
+
firstIndex >>= 1n;
|
|
41
|
+
lastIndex >>= 1n;
|
|
42
|
+
// 3.Iterate over all the affected nodes at this level and update them
|
|
43
|
+
for (let index = firstIndex; index <= lastIndex; index++) {
|
|
44
|
+
const lhs = await this.getLatestValueAtIndex(level, index * 2n, true);
|
|
45
|
+
const rhs = await this.getLatestValueAtIndex(level, index * 2n + 1n, true);
|
|
46
|
+
const cacheKey = indexToKeyHash(this.name, level - 1, index);
|
|
47
|
+
this.cache[cacheKey] = this.hasher.compress(lhs, rhs);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
level -= 1;
|
|
51
|
+
}
|
|
52
|
+
this.cachedSize = numLeaves + BigInt(leaves.length);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
import { default as levelup } from 'levelup';
|
|
2
|
+
import { Hasher, INITIAL_LEAF, Pedersen, SiblingPath } from '../index.js';
|
|
3
|
+
import { appendLeaves } from './utils/append_leaves.js';
|
|
4
|
+
import { createMemDown } from './utils/create_mem_down.js';
|
|
5
|
+
import { randomBytes } from 'crypto';
|
|
6
|
+
import { AppendOnlyTree } from '../interfaces/append_only_tree.js';
|
|
7
|
+
import { UpdateOnlyTree } from '../interfaces/update_only_tree.js';
|
|
8
|
+
import { IWasmModule } from '@aztec/foundation/wasm';
|
|
9
|
+
import { CircuitsWasm } from '@aztec/circuits.js';
|
|
10
|
+
|
|
11
|
+
const TEST_TREE_DEPTH = 2;
|
|
12
|
+
|
|
13
|
+
export const standardBasedTreeTestSuite = (
|
|
14
|
+
testName: string,
|
|
15
|
+
createDb: (
|
|
16
|
+
levelup: levelup.LevelUp,
|
|
17
|
+
hasher: Hasher,
|
|
18
|
+
name: string,
|
|
19
|
+
depth: number,
|
|
20
|
+
) => Promise<AppendOnlyTree | UpdateOnlyTree>,
|
|
21
|
+
) => {
|
|
22
|
+
describe(testName, () => {
|
|
23
|
+
let wasm: IWasmModule;
|
|
24
|
+
let pedersen: Pedersen;
|
|
25
|
+
const values: Buffer[] = [];
|
|
26
|
+
|
|
27
|
+
beforeAll(async () => {
|
|
28
|
+
wasm = await CircuitsWasm.get();
|
|
29
|
+
pedersen = new Pedersen(wasm);
|
|
30
|
+
|
|
31
|
+
for (let i = 0; i < 4; ++i) {
|
|
32
|
+
const v = Buffer.alloc(32, i + 1);
|
|
33
|
+
v.writeUInt32BE(i, 28);
|
|
34
|
+
values[i] = v;
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
it('should have correct empty tree root for depth 32', async () => {
|
|
39
|
+
const db = levelup(createMemDown());
|
|
40
|
+
const tree = await createDb(db, pedersen, 'test', 32);
|
|
41
|
+
const root = tree.getRoot(false);
|
|
42
|
+
expect(root.toString('hex')).toEqual('20efbe2c7b675f26ab71689279908bbab33a6963e7e0dcb80e4c46583d094113');
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
it('should throw when appending beyond max index', async () => {
|
|
46
|
+
const db = levelup(createMemDown());
|
|
47
|
+
const tree = await createDb(db, pedersen, 'test', 2);
|
|
48
|
+
const leaves = Array.from({ length: 5 }, _ => randomBytes(32));
|
|
49
|
+
await expect(appendLeaves(tree, leaves)).rejects.toThrow();
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
it('should have correct root and sibling paths', async () => {
|
|
53
|
+
const db = levelup(createMemDown());
|
|
54
|
+
const tree = await createDb(db, pedersen, 'test', 2);
|
|
55
|
+
|
|
56
|
+
const level1ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF);
|
|
57
|
+
expect(tree.getNumLeaves(false)).toEqual(0n);
|
|
58
|
+
expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash));
|
|
59
|
+
expect(await tree.getSiblingPath(0n, false)).toEqual(
|
|
60
|
+
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]),
|
|
61
|
+
);
|
|
62
|
+
|
|
63
|
+
await appendLeaves(tree, [values[0]]);
|
|
64
|
+
expect(tree.getNumLeaves(true)).toEqual(1n);
|
|
65
|
+
expect(tree.getNumLeaves(false)).toEqual(0n);
|
|
66
|
+
expect(tree.getRoot(true)).toEqual(pedersen.compress(pedersen.compress(values[0], INITIAL_LEAF), level1ZeroHash));
|
|
67
|
+
expect(await tree.getSiblingPath(0n, true)).toEqual(
|
|
68
|
+
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]),
|
|
69
|
+
);
|
|
70
|
+
expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash));
|
|
71
|
+
expect(await tree.getSiblingPath(0n, false)).toEqual(
|
|
72
|
+
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]),
|
|
73
|
+
);
|
|
74
|
+
|
|
75
|
+
await appendLeaves(tree, [values[1]]);
|
|
76
|
+
expect(tree.getNumLeaves(true)).toEqual(2n);
|
|
77
|
+
expect(tree.getRoot(true)).toEqual(pedersen.compress(pedersen.compress(values[0], values[1]), level1ZeroHash));
|
|
78
|
+
expect(await tree.getSiblingPath(1n, true)).toEqual(
|
|
79
|
+
new SiblingPath(TEST_TREE_DEPTH, [values[0], level1ZeroHash]),
|
|
80
|
+
);
|
|
81
|
+
expect(tree.getNumLeaves(false)).toEqual(0n);
|
|
82
|
+
expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash));
|
|
83
|
+
expect(await tree.getSiblingPath(1n, false)).toEqual(
|
|
84
|
+
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]),
|
|
85
|
+
);
|
|
86
|
+
|
|
87
|
+
await appendLeaves(tree, [values[2]]);
|
|
88
|
+
expect(tree.getNumLeaves(true)).toEqual(3n);
|
|
89
|
+
expect(tree.getRoot(true)).toEqual(
|
|
90
|
+
pedersen.compress(pedersen.compress(values[0], values[1]), pedersen.compress(values[2], INITIAL_LEAF)),
|
|
91
|
+
);
|
|
92
|
+
expect(await tree.getSiblingPath(2n, true)).toEqual(
|
|
93
|
+
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, pedersen.compress(values[0], values[1])]),
|
|
94
|
+
);
|
|
95
|
+
expect(tree.getNumLeaves(false)).toEqual(0n);
|
|
96
|
+
expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash));
|
|
97
|
+
expect(await tree.getSiblingPath(2n, false)).toEqual(
|
|
98
|
+
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]),
|
|
99
|
+
);
|
|
100
|
+
|
|
101
|
+
await appendLeaves(tree, [values[3]]);
|
|
102
|
+
expect(tree.getNumLeaves(true)).toEqual(4n);
|
|
103
|
+
expect(tree.getRoot(true)).toEqual(
|
|
104
|
+
pedersen.compress(pedersen.compress(values[0], values[1]), pedersen.compress(values[2], values[3])),
|
|
105
|
+
);
|
|
106
|
+
expect(await tree.getSiblingPath(3n, true)).toEqual(
|
|
107
|
+
new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.compress(values[0], values[1])]),
|
|
108
|
+
);
|
|
109
|
+
expect(tree.getNumLeaves(false)).toEqual(0n);
|
|
110
|
+
expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash));
|
|
111
|
+
expect(await tree.getSiblingPath(3n, false)).toEqual(
|
|
112
|
+
new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]),
|
|
113
|
+
);
|
|
114
|
+
// Lifted from memory_tree.test.cpp to ensure consistency.
|
|
115
|
+
//expect(root.toString('hex')).toEqual('0bf2e78afd70f72b0e6eafb03c41faef167a82441b05e517cdf35d813302061f');
|
|
116
|
+
expect(await tree.getSiblingPath(0n, true)).toEqual(
|
|
117
|
+
new SiblingPath(TEST_TREE_DEPTH, [values[1], pedersen.compress(values[2], values[3])]),
|
|
118
|
+
);
|
|
119
|
+
expect(await tree.getSiblingPath(1n, true)).toEqual(
|
|
120
|
+
new SiblingPath(TEST_TREE_DEPTH, [values[0], pedersen.compress(values[2], values[3])]),
|
|
121
|
+
);
|
|
122
|
+
expect(await tree.getSiblingPath(2n, true)).toEqual(
|
|
123
|
+
new SiblingPath(TEST_TREE_DEPTH, [values[3], pedersen.compress(values[0], values[1])]),
|
|
124
|
+
);
|
|
125
|
+
expect(await tree.getSiblingPath(3n, true)).toEqual(
|
|
126
|
+
new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.compress(values[0], values[1])]),
|
|
127
|
+
);
|
|
128
|
+
|
|
129
|
+
await tree.commit();
|
|
130
|
+
// now committed state should equal uncommitted state
|
|
131
|
+
expect(await tree.getSiblingPath(0n, false)).toEqual(await tree.getSiblingPath(0n, true));
|
|
132
|
+
expect(await tree.getSiblingPath(1n, false)).toEqual(await tree.getSiblingPath(1n, true));
|
|
133
|
+
expect(await tree.getSiblingPath(2n, false)).toEqual(await tree.getSiblingPath(2n, true));
|
|
134
|
+
expect(await tree.getSiblingPath(3n, false)).toEqual(await tree.getSiblingPath(3n, true));
|
|
135
|
+
expect(tree.getNumLeaves(false)).toEqual(tree.getNumLeaves(true));
|
|
136
|
+
expect(tree.getRoot(false)).toEqual(tree.getRoot(true));
|
|
137
|
+
});
|
|
138
|
+
});
|
|
139
|
+
};
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
import { IWasmModule } from '@aztec/foundation/wasm';
|
|
2
|
+
import { default as levelup } from 'levelup';
|
|
3
|
+
import { Hasher, Pedersen, SiblingPath } from '../index.js';
|
|
4
|
+
import { appendLeaves } from './utils/append_leaves.js';
|
|
5
|
+
import { createMemDown } from './utils/create_mem_down.js';
|
|
6
|
+
import { AppendOnlyTree } from '../interfaces/append_only_tree.js';
|
|
7
|
+
import { UpdateOnlyTree } from '../interfaces/update_only_tree.js';
|
|
8
|
+
import { CircuitsWasm } from '@aztec/circuits.js';
|
|
9
|
+
|
|
10
|
+
const expectSameTrees = async (
|
|
11
|
+
tree1: AppendOnlyTree | UpdateOnlyTree,
|
|
12
|
+
tree2: AppendOnlyTree | UpdateOnlyTree,
|
|
13
|
+
includeUncommitted = true,
|
|
14
|
+
) => {
|
|
15
|
+
const size = tree1.getNumLeaves(includeUncommitted);
|
|
16
|
+
expect(size).toBe(tree2.getNumLeaves(includeUncommitted));
|
|
17
|
+
expect(tree1.getRoot(includeUncommitted).toString('hex')).toBe(tree2.getRoot(includeUncommitted).toString('hex'));
|
|
18
|
+
|
|
19
|
+
for (let i = 0; i < size; ++i) {
|
|
20
|
+
const siblingPath1 = await tree1.getSiblingPath(BigInt(i), includeUncommitted);
|
|
21
|
+
const siblingPath2 = await tree2.getSiblingPath(BigInt(i), includeUncommitted);
|
|
22
|
+
expect(siblingPath2).toStrictEqual(siblingPath1);
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export const treeTestSuite = (
|
|
27
|
+
testName: string,
|
|
28
|
+
createDb: (
|
|
29
|
+
levelup: levelup.LevelUp,
|
|
30
|
+
hasher: Hasher,
|
|
31
|
+
name: string,
|
|
32
|
+
depth: number,
|
|
33
|
+
) => Promise<AppendOnlyTree | UpdateOnlyTree>,
|
|
34
|
+
createFromName: (levelup: levelup.LevelUp, hasher: Hasher, name: string) => Promise<AppendOnlyTree | UpdateOnlyTree>,
|
|
35
|
+
) => {
|
|
36
|
+
describe(testName, () => {
|
|
37
|
+
const values: Buffer[] = [];
|
|
38
|
+
let wasm: IWasmModule;
|
|
39
|
+
let pedersen: Pedersen;
|
|
40
|
+
|
|
41
|
+
beforeAll(() => {
|
|
42
|
+
for (let i = 0; i < 32; ++i) {
|
|
43
|
+
const v = Buffer.alloc(32, i + 1);
|
|
44
|
+
v.writeUInt32BE(i, 28);
|
|
45
|
+
values[i] = v;
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
beforeEach(async () => {
|
|
50
|
+
wasm = await CircuitsWasm.get();
|
|
51
|
+
pedersen = new Pedersen(wasm);
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
it('should revert changes on rollback', async () => {
|
|
55
|
+
const levelDownEmpty = createMemDown();
|
|
56
|
+
const dbEmpty = levelup(levelDownEmpty);
|
|
57
|
+
const emptyTree = await createDb(dbEmpty, pedersen, 'test', 10);
|
|
58
|
+
|
|
59
|
+
const levelDown = createMemDown();
|
|
60
|
+
const db = levelup(levelDown);
|
|
61
|
+
const tree = await createDb(db, pedersen, 'test2', 10);
|
|
62
|
+
await appendLeaves(tree, values.slice(0, 4));
|
|
63
|
+
|
|
64
|
+
const firstRoot = tree.getRoot(true);
|
|
65
|
+
expect(firstRoot).not.toEqual(emptyTree.getRoot(true));
|
|
66
|
+
// committed root should still be the empty root
|
|
67
|
+
expect(tree.getRoot(false)).toEqual(emptyTree.getRoot(false));
|
|
68
|
+
|
|
69
|
+
await tree.rollback();
|
|
70
|
+
|
|
71
|
+
// both committed and uncommitted trees should be equal to the empty tree
|
|
72
|
+
await expectSameTrees(tree, emptyTree, true);
|
|
73
|
+
await expectSameTrees(tree, emptyTree, false);
|
|
74
|
+
|
|
75
|
+
// append the leaves again
|
|
76
|
+
await appendLeaves(tree, values.slice(0, 4));
|
|
77
|
+
|
|
78
|
+
expect(tree.getRoot(true)).toEqual(firstRoot);
|
|
79
|
+
// committed root should still be the empty root
|
|
80
|
+
expect(tree.getRoot(false)).toEqual(emptyTree.getRoot(false));
|
|
81
|
+
|
|
82
|
+
expect(firstRoot).not.toEqual(emptyTree.getRoot(true));
|
|
83
|
+
|
|
84
|
+
await tree.rollback();
|
|
85
|
+
|
|
86
|
+
// both committed and uncommitted trees should be equal to the empty tree
|
|
87
|
+
await expectSameTrees(tree, emptyTree, true);
|
|
88
|
+
await expectSameTrees(tree, emptyTree, false);
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
it('should not revert changes after commit', async () => {
|
|
92
|
+
const levelDownEmpty = createMemDown();
|
|
93
|
+
const dbEmpty = levelup(levelDownEmpty);
|
|
94
|
+
const emptyTree = await createDb(dbEmpty, pedersen, 'test', 10);
|
|
95
|
+
|
|
96
|
+
const levelDown = createMemDown();
|
|
97
|
+
const db = levelup(levelDown);
|
|
98
|
+
const tree = await createDb(db, pedersen, 'test2', 10);
|
|
99
|
+
await appendLeaves(tree, values.slice(0, 4));
|
|
100
|
+
|
|
101
|
+
expect(tree.getRoot(true)).not.toEqual(emptyTree.getRoot(true));
|
|
102
|
+
// committed root should still be the empty root
|
|
103
|
+
expect(tree.getRoot(false)).toEqual(emptyTree.getRoot(false));
|
|
104
|
+
|
|
105
|
+
await tree.commit();
|
|
106
|
+
await tree.rollback();
|
|
107
|
+
|
|
108
|
+
expect(tree.getRoot(true)).not.toEqual(emptyTree.getRoot(true));
|
|
109
|
+
expect(tree.getRoot(false)).not.toEqual(emptyTree.getRoot(true));
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
it('should be able to restore from previous committed data', async () => {
|
|
113
|
+
const levelDown = createMemDown();
|
|
114
|
+
const db = levelup(levelDown);
|
|
115
|
+
const tree = await createDb(db, pedersen, 'test', 10);
|
|
116
|
+
await appendLeaves(tree, values.slice(0, 4));
|
|
117
|
+
await tree.commit();
|
|
118
|
+
|
|
119
|
+
const db2 = levelup(levelDown);
|
|
120
|
+
const tree2 = await createFromName(db2, pedersen, 'test');
|
|
121
|
+
|
|
122
|
+
// both committed and uncommitted should be equal to the restored data
|
|
123
|
+
expect(tree.getRoot(true)).toEqual(tree2.getRoot(true));
|
|
124
|
+
expect(tree.getRoot(false)).toEqual(tree2.getRoot(false));
|
|
125
|
+
for (let i = 0; i < 4; ++i) {
|
|
126
|
+
expect(await tree.getSiblingPath(BigInt(i), true)).toEqual(await tree2.getSiblingPath(BigInt(i), true));
|
|
127
|
+
expect(await tree.getSiblingPath(BigInt(i), false)).toEqual(await tree2.getSiblingPath(BigInt(i), false));
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
it('should throw an error if previous data does not exist for the given name', async () => {
|
|
132
|
+
const db = levelup(createMemDown());
|
|
133
|
+
await expect(
|
|
134
|
+
(async () => {
|
|
135
|
+
await createFromName(db, pedersen, 'a_whole_new_tree');
|
|
136
|
+
})(),
|
|
137
|
+
).rejects.toThrow();
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
it('should serialize sibling path data to a buffer and be able to deserialize it back', async () => {
|
|
141
|
+
const db = levelup(createMemDown());
|
|
142
|
+
const tree = await createDb(db, pedersen, 'test', 10);
|
|
143
|
+
await appendLeaves(tree, values.slice(0, 1));
|
|
144
|
+
|
|
145
|
+
const siblingPath = await tree.getSiblingPath(0n, true);
|
|
146
|
+
const buf = siblingPath.toBuffer();
|
|
147
|
+
const recovered = SiblingPath.fromBuffer(buf);
|
|
148
|
+
expect(recovered).toEqual(siblingPath);
|
|
149
|
+
const deserialized = SiblingPath.deserialize(buf);
|
|
150
|
+
expect(deserialized.elem).toEqual(siblingPath);
|
|
151
|
+
expect(deserialized.adv).toBe(4 + 10 * 32);
|
|
152
|
+
|
|
153
|
+
const dummyData = Buffer.alloc(23, 1);
|
|
154
|
+
const paddedBuf = Buffer.concat([dummyData, buf]);
|
|
155
|
+
const recovered2 = SiblingPath.fromBuffer(paddedBuf, 23);
|
|
156
|
+
expect(recovered2).toEqual(siblingPath);
|
|
157
|
+
const deserialized2 = SiblingPath.deserialize(buf);
|
|
158
|
+
expect(deserialized2.elem).toEqual(siblingPath);
|
|
159
|
+
expect(deserialized2.adv).toBe(4 + 10 * 32);
|
|
160
|
+
});
|
|
161
|
+
});
|
|
162
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { AppendOnlyTree } from '../../interfaces/append_only_tree.js';
|
|
2
|
+
import { UpdateOnlyTree } from '../../interfaces/update_only_tree.js';
|
|
3
|
+
|
|
4
|
+
export const appendLeaves = async (tree: AppendOnlyTree | UpdateOnlyTree, leaves: Buffer[]) => {
|
|
5
|
+
if ('appendLeaves' in tree) {
|
|
6
|
+
// This branch is used by the standard tree test suite, which implements appendLeaves
|
|
7
|
+
await tree.appendLeaves(leaves);
|
|
8
|
+
} else {
|
|
9
|
+
// This branch is used by the sparse tree test suite, which does not implement appendLeaves
|
|
10
|
+
for (const value of leaves) {
|
|
11
|
+
const index = tree.getNumLeaves(true);
|
|
12
|
+
await tree.updateLeaf(value, index);
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
};
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { Pedersen } from '../../index.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* A test utility allowing us to count the number of times the compress function has been called.
|
|
5
|
+
*/
|
|
6
|
+
export class PedersenWithCounter extends Pedersen {
|
|
7
|
+
/**
|
|
8
|
+
* The number of times the compress function has been called.
|
|
9
|
+
*/
|
|
10
|
+
public compressCounter = 0;
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Compresses two 32-byte hashes.
|
|
14
|
+
* @param lhs - The first hash.
|
|
15
|
+
* @param rhs - The second hash.
|
|
16
|
+
* @returns The new 32-byte hash.
|
|
17
|
+
*/
|
|
18
|
+
public compress(lhs: Uint8Array, rhs: Uint8Array): Buffer {
|
|
19
|
+
this.compressCounter++;
|
|
20
|
+
return super.compress(lhs, rhs);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Resets the compress counter.
|
|
25
|
+
* @returns void
|
|
26
|
+
*/
|
|
27
|
+
public resetCounter() {
|
|
28
|
+
this.compressCounter = 0;
|
|
29
|
+
}
|
|
30
|
+
}
|
package/src/tree_base.ts
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
import { LevelUp, LevelUpChain } from 'levelup';
|
|
2
|
+
import { SiblingPath } from './sibling_path/sibling_path.js';
|
|
3
|
+
import { Hasher } from './hasher.js';
|
|
4
|
+
import { MerkleTree } from './interfaces/merkle_tree.js';
|
|
5
|
+
import { toBigIntLE, toBufferLE } from '@aztec/foundation/bigint-buffer';
|
|
6
|
+
|
|
7
|
+
const MAX_DEPTH = 254;
|
|
8
|
+
|
|
9
|
+
export const indexToKeyHash = (name: string, level: number, index: bigint) => `${name}:${level}:${index}`;
|
|
10
|
+
const encodeMeta = (root: Buffer, depth: number, size: bigint) => {
|
|
11
|
+
const data = Buffer.alloc(36);
|
|
12
|
+
root.copy(data);
|
|
13
|
+
data.writeUInt32LE(depth, 32);
|
|
14
|
+
return Buffer.concat([data, toBufferLE(size, 32)]);
|
|
15
|
+
};
|
|
16
|
+
export const decodeMeta = (meta: Buffer) => {
|
|
17
|
+
const root = meta.subarray(0, 32);
|
|
18
|
+
const depth = meta.readUInt32LE(32);
|
|
19
|
+
const size = toBigIntLE(meta.subarray(36));
|
|
20
|
+
return {
|
|
21
|
+
root,
|
|
22
|
+
depth,
|
|
23
|
+
size,
|
|
24
|
+
};
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export const INITIAL_LEAF = Buffer.from('0000000000000000000000000000000000000000000000000000000000000000', 'hex');
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* A Merkle tree implementation that uses a LevelDB database to store the tree.
|
|
31
|
+
*/
|
|
32
|
+
export abstract class TreeBase implements MerkleTree {
|
|
33
|
+
protected readonly maxIndex: bigint;
|
|
34
|
+
protected cachedSize?: bigint;
|
|
35
|
+
private root!: Buffer;
|
|
36
|
+
private zeroHashes: Buffer[] = [];
|
|
37
|
+
protected cache: { [key: string]: Buffer } = {};
|
|
38
|
+
|
|
39
|
+
public constructor(
|
|
40
|
+
protected db: LevelUp,
|
|
41
|
+
protected hasher: Hasher,
|
|
42
|
+
protected name: string,
|
|
43
|
+
protected depth: number,
|
|
44
|
+
protected size: bigint = 0n,
|
|
45
|
+
root?: Buffer,
|
|
46
|
+
) {
|
|
47
|
+
if (!(depth >= 1 && depth <= MAX_DEPTH)) {
|
|
48
|
+
throw Error('Invalid depth');
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Compute the zero values at each layer.
|
|
52
|
+
let current = INITIAL_LEAF;
|
|
53
|
+
for (let i = depth - 1; i >= 0; --i) {
|
|
54
|
+
this.zeroHashes[i] = current;
|
|
55
|
+
current = hasher.compress(current, current);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
this.root = root ? root : current;
|
|
59
|
+
this.maxIndex = 2n ** BigInt(depth) - 1n;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Returns the root of the tree.
|
|
64
|
+
* @param includeUncommitted - If true, root incorporating uncomitted changes is returned.
|
|
65
|
+
* @returns The root of the tree.
|
|
66
|
+
*/
|
|
67
|
+
public getRoot(includeUncommitted: boolean): Buffer {
|
|
68
|
+
return !includeUncommitted ? this.root : this.cache[indexToKeyHash(this.name, 0, 0n)] ?? this.root;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Returns the number of leaves in the tree.
|
|
73
|
+
* @param includeUncommitted - If true, the returned number of leaves includes uncomitted changes.
|
|
74
|
+
* @returns The number of leaves in the tree.
|
|
75
|
+
*/
|
|
76
|
+
public getNumLeaves(includeUncommitted: boolean) {
|
|
77
|
+
return !includeUncommitted ? this.size : this.cachedSize ?? this.size;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Returns the name of the tree.
|
|
82
|
+
* @returns The name of the tree.
|
|
83
|
+
*/
|
|
84
|
+
public getName(): string {
|
|
85
|
+
return this.name;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Returns the depth of the tree.
|
|
90
|
+
* @returns The depth of the tree.
|
|
91
|
+
*/
|
|
92
|
+
public getDepth(): number {
|
|
93
|
+
return this.depth;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Returns a sibling path for the element at the given index.
|
|
98
|
+
* @param index - The index of the element.
|
|
99
|
+
* @param includeUncommitted - Indicates whether to get a sibling path incorporating uncommitted changes.
|
|
100
|
+
* @returns A sibling path for the element at the given index.
|
|
101
|
+
* Note: The sibling path is an array of sibling hashes, with the lowest hash (leaf hash) first, and the highest hash last.
|
|
102
|
+
*/
|
|
103
|
+
public async getSiblingPath<N extends number>(index: bigint, includeUncommitted: boolean): Promise<SiblingPath<N>> {
|
|
104
|
+
const path: Buffer[] = [];
|
|
105
|
+
let level = this.depth;
|
|
106
|
+
while (level > 0) {
|
|
107
|
+
const isRight = index & 0x01n;
|
|
108
|
+
const sibling = await this.getLatestValueAtIndex(level, isRight ? index - 1n : index + 1n, includeUncommitted);
|
|
109
|
+
path.push(sibling);
|
|
110
|
+
level -= 1;
|
|
111
|
+
index >>= 1n;
|
|
112
|
+
}
|
|
113
|
+
return new SiblingPath<N>(this.depth as N, path);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Commits the changes to the database.
|
|
118
|
+
* @returns Empty promise.
|
|
119
|
+
*/
|
|
120
|
+
public async commit(): Promise<void> {
|
|
121
|
+
const batch = this.db.batch();
|
|
122
|
+
const keys = Object.getOwnPropertyNames(this.cache);
|
|
123
|
+
for (const key of keys) {
|
|
124
|
+
batch.put(key, this.cache[key]);
|
|
125
|
+
}
|
|
126
|
+
this.size = this.getNumLeaves(true);
|
|
127
|
+
this.root = this.getRoot(true);
|
|
128
|
+
await this.writeMeta(batch);
|
|
129
|
+
await batch.write();
|
|
130
|
+
this.clearCache();
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Rolls back the not-yet-committed changes.
|
|
135
|
+
* @returns Empty promise.
|
|
136
|
+
*/
|
|
137
|
+
public rollback(): Promise<void> {
|
|
138
|
+
this.clearCache();
|
|
139
|
+
return Promise.resolve();
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Gets the value at the given index.
|
|
144
|
+
* @param index - The index of the leaf.
|
|
145
|
+
* @param includeUncommitted - Indicates whether to include uncommitted changes.
|
|
146
|
+
* @returns Leaf value at the given index or undefined.
|
|
147
|
+
*/
|
|
148
|
+
public getLeafValue(index: bigint, includeUncommitted: boolean): Promise<Buffer | undefined> {
|
|
149
|
+
return this.getLatestValueAtIndex(this.depth, index, includeUncommitted);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Clears the cache.
|
|
154
|
+
*/
|
|
155
|
+
private clearCache() {
|
|
156
|
+
this.cache = {};
|
|
157
|
+
this.cachedSize = undefined;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Adds a leaf and all the hashes above it to the cache.
|
|
162
|
+
* @param leaf - Leaf to add to cache.
|
|
163
|
+
* @param index - Index of the leaf (used to derive the cache key).
|
|
164
|
+
*/
|
|
165
|
+
protected async addLeafToCacheAndHashToRoot(leaf: Buffer, index: bigint) {
|
|
166
|
+
const key = indexToKeyHash(this.name, this.depth, index);
|
|
167
|
+
let current = leaf;
|
|
168
|
+
this.cache[key] = current;
|
|
169
|
+
let level = this.depth;
|
|
170
|
+
while (level > 0) {
|
|
171
|
+
const isRight = index & 0x01n;
|
|
172
|
+
const sibling = await this.getLatestValueAtIndex(level, isRight ? index - 1n : index + 1n, true);
|
|
173
|
+
const lhs = isRight ? sibling : current;
|
|
174
|
+
const rhs = isRight ? current : sibling;
|
|
175
|
+
current = this.hasher.compress(lhs, rhs);
|
|
176
|
+
level -= 1;
|
|
177
|
+
index >>= 1n;
|
|
178
|
+
const cacheKey = indexToKeyHash(this.name, level, index);
|
|
179
|
+
this.cache[cacheKey] = current;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Returns the latest value at the given index.
|
|
185
|
+
* @param level - The level of the tree.
|
|
186
|
+
* @param index - The index of the element.
|
|
187
|
+
* @param includeUncommitted - Indicates, whether to get include uncommitted changes.
|
|
188
|
+
* @returns The latest value at the given index.
|
|
189
|
+
* Note: If the value is not in the cache, it will be fetched from the database.
|
|
190
|
+
*/
|
|
191
|
+
protected async getLatestValueAtIndex(level: number, index: bigint, includeUncommitted: boolean): Promise<Buffer> {
|
|
192
|
+
const key = indexToKeyHash(this.name, level, index);
|
|
193
|
+
if (includeUncommitted && this.cache[key] !== undefined) {
|
|
194
|
+
return this.cache[key];
|
|
195
|
+
}
|
|
196
|
+
const committed = await this.dbGet(key);
|
|
197
|
+
if (committed !== undefined) {
|
|
198
|
+
return committed;
|
|
199
|
+
}
|
|
200
|
+
return this.zeroHashes[level - 1];
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Gets a value from db by key.
|
|
205
|
+
* @param key - The key to by which to get the value.
|
|
206
|
+
* @returns A value from the db based on the key.
|
|
207
|
+
*/
|
|
208
|
+
private async dbGet(key: string): Promise<Buffer | undefined> {
|
|
209
|
+
return await this.db.get(key).catch(() => {});
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
/**
|
|
213
|
+
* Initializes the tree.
|
|
214
|
+
* @param prefilledSize - A number of leaves that are prefilled with values.
|
|
215
|
+
* @returns Empty promise.
|
|
216
|
+
*/
|
|
217
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
218
|
+
public async init(prefilledSize: number): Promise<void> {
|
|
219
|
+
// prefilledSize is used only by Indexed Tree.
|
|
220
|
+
await this.writeMeta();
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
/**
|
|
224
|
+
* Initializes the tree from the database.
|
|
225
|
+
*/
|
|
226
|
+
public async initFromDb(): Promise<void> {
|
|
227
|
+
// Implemented only by Inedexed Tree to populate the leaf cache.
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Writes meta data to the provided batch.
|
|
232
|
+
* @param batch - The batch to which to write the meta data.
|
|
233
|
+
*/
|
|
234
|
+
protected async writeMeta(batch?: LevelUpChain<string, Buffer>) {
|
|
235
|
+
const data = encodeMeta(this.getRoot(true), this.depth, this.getNumLeaves(true));
|
|
236
|
+
if (batch) {
|
|
237
|
+
batch.put(this.name, data);
|
|
238
|
+
} else {
|
|
239
|
+
await this.db.put(this.name, data);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "..",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"outDir": "dest",
|
|
5
|
+
"rootDir": "src",
|
|
6
|
+
"tsBuildInfoFile": ".tsbuildinfo"
|
|
7
|
+
},
|
|
8
|
+
"references": [
|
|
9
|
+
{
|
|
10
|
+
"path": "../circuits.js"
|
|
11
|
+
},
|
|
12
|
+
{
|
|
13
|
+
"path": "../foundation"
|
|
14
|
+
}
|
|
15
|
+
],
|
|
16
|
+
"include": ["src"]
|
|
17
|
+
}
|