@aztec/archiver 0.0.0-test.1 → 0.0.1-commit.03f7ef2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -6
- package/dest/archiver/archiver.d.ts +201 -94
- package/dest/archiver/archiver.d.ts.map +1 -1
- package/dest/archiver/archiver.js +1141 -396
- package/dest/archiver/archiver_store.d.ts +171 -83
- package/dest/archiver/archiver_store.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts +1 -1
- package/dest/archiver/archiver_store_test_suite.d.ts.map +1 -1
- package/dest/archiver/archiver_store_test_suite.js +2389 -393
- package/dest/archiver/config.d.ts +7 -22
- package/dest/archiver/config.d.ts.map +1 -1
- package/dest/archiver/config.js +30 -14
- package/dest/archiver/errors.d.ts +33 -1
- package/dest/archiver/errors.d.ts.map +1 -1
- package/dest/archiver/errors.js +49 -0
- package/dest/archiver/index.d.ts +3 -4
- package/dest/archiver/index.d.ts.map +1 -1
- package/dest/archiver/index.js +1 -2
- package/dest/archiver/instrumentation.d.ts +14 -6
- package/dest/archiver/instrumentation.d.ts.map +1 -1
- package/dest/archiver/instrumentation.js +69 -17
- package/dest/archiver/kv_archiver_store/block_store.d.ts +91 -21
- package/dest/archiver/kv_archiver_store/block_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/block_store.js +476 -86
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts +4 -4
- package/dest/archiver/kv_archiver_store/contract_class_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_class_store.js +13 -19
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts +12 -9
- package/dest/archiver/kv_archiver_store/contract_instance_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/contract_instance_store.js +30 -16
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts +80 -75
- package/dest/archiver/kv_archiver_store/kv_archiver_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/kv_archiver_store.js +142 -83
- package/dest/archiver/kv_archiver_store/log_store.d.ts +12 -16
- package/dest/archiver/kv_archiver_store/log_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/log_store.js +153 -113
- package/dest/archiver/kv_archiver_store/message_store.d.ts +25 -18
- package/dest/archiver/kv_archiver_store/message_store.d.ts.map +1 -1
- package/dest/archiver/kv_archiver_store/message_store.js +152 -49
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts +3 -0
- package/dest/archiver/l1/bin/retrieve-calldata.d.ts.map +1 -0
- package/dest/archiver/l1/bin/retrieve-calldata.js +149 -0
- package/dest/archiver/l1/calldata_retriever.d.ts +112 -0
- package/dest/archiver/l1/calldata_retriever.d.ts.map +1 -0
- package/dest/archiver/l1/calldata_retriever.js +471 -0
- package/dest/archiver/l1/data_retrieval.d.ts +90 -0
- package/dest/archiver/l1/data_retrieval.d.ts.map +1 -0
- package/dest/archiver/l1/data_retrieval.js +331 -0
- package/dest/archiver/l1/debug_tx.d.ts +19 -0
- package/dest/archiver/l1/debug_tx.d.ts.map +1 -0
- package/dest/archiver/l1/debug_tx.js +73 -0
- package/dest/archiver/l1/spire_proposer.d.ts +70 -0
- package/dest/archiver/l1/spire_proposer.d.ts.map +1 -0
- package/dest/archiver/l1/spire_proposer.js +157 -0
- package/dest/archiver/l1/trace_tx.d.ts +97 -0
- package/dest/archiver/l1/trace_tx.d.ts.map +1 -0
- package/dest/archiver/l1/trace_tx.js +91 -0
- package/dest/archiver/l1/types.d.ts +12 -0
- package/dest/archiver/l1/types.d.ts.map +1 -0
- package/dest/archiver/l1/types.js +3 -0
- package/dest/archiver/l1/validate_trace.d.ts +29 -0
- package/dest/archiver/l1/validate_trace.d.ts.map +1 -0
- package/dest/archiver/l1/validate_trace.js +150 -0
- package/dest/archiver/structs/data_retrieval.d.ts +1 -1
- package/dest/archiver/structs/inbox_message.d.ts +15 -0
- package/dest/archiver/structs/inbox_message.d.ts.map +1 -0
- package/dest/archiver/structs/inbox_message.js +39 -0
- package/dest/archiver/structs/published.d.ts +2 -11
- package/dest/archiver/structs/published.d.ts.map +1 -1
- package/dest/archiver/structs/published.js +1 -1
- package/dest/archiver/validation.d.ts +17 -0
- package/dest/archiver/validation.d.ts.map +1 -0
- package/dest/archiver/validation.js +98 -0
- package/dest/factory.d.ts +9 -14
- package/dest/factory.d.ts.map +1 -1
- package/dest/factory.js +22 -52
- package/dest/index.d.ts +2 -2
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -1
- package/dest/rpc/index.d.ts +2 -3
- package/dest/rpc/index.d.ts.map +1 -1
- package/dest/rpc/index.js +1 -4
- package/dest/test/index.d.ts +1 -1
- package/dest/test/mock_archiver.d.ts +16 -8
- package/dest/test/mock_archiver.d.ts.map +1 -1
- package/dest/test/mock_archiver.js +19 -14
- package/dest/test/mock_l1_to_l2_message_source.d.ts +9 -6
- package/dest/test/mock_l1_to_l2_message_source.d.ts.map +1 -1
- package/dest/test/mock_l1_to_l2_message_source.js +21 -7
- package/dest/test/mock_l2_block_source.d.ts +52 -13
- package/dest/test/mock_l2_block_source.d.ts.map +1 -1
- package/dest/test/mock_l2_block_source.js +140 -15
- package/dest/test/mock_structs.d.ts +10 -0
- package/dest/test/mock_structs.d.ts.map +1 -0
- package/dest/test/mock_structs.js +38 -0
- package/package.json +29 -30
- package/src/archiver/archiver.ts +1477 -501
- package/src/archiver/archiver_store.ts +197 -88
- package/src/archiver/archiver_store_test_suite.ts +2403 -350
- package/src/archiver/config.ts +38 -46
- package/src/archiver/errors.ts +85 -0
- package/src/archiver/index.ts +2 -3
- package/src/archiver/instrumentation.ts +91 -22
- package/src/archiver/kv_archiver_store/block_store.ts +640 -101
- package/src/archiver/kv_archiver_store/contract_class_store.ts +14 -24
- package/src/archiver/kv_archiver_store/contract_instance_store.ts +36 -28
- package/src/archiver/kv_archiver_store/kv_archiver_store.ts +193 -113
- package/src/archiver/kv_archiver_store/log_store.ts +205 -127
- package/src/archiver/kv_archiver_store/message_store.ts +213 -54
- package/src/archiver/l1/README.md +98 -0
- package/src/archiver/l1/bin/retrieve-calldata.ts +182 -0
- package/src/archiver/l1/calldata_retriever.ts +641 -0
- package/src/archiver/l1/data_retrieval.ts +512 -0
- package/src/archiver/l1/debug_tx.ts +99 -0
- package/src/archiver/l1/spire_proposer.ts +160 -0
- package/src/archiver/l1/trace_tx.ts +128 -0
- package/src/archiver/l1/types.ts +13 -0
- package/src/archiver/l1/validate_trace.ts +211 -0
- package/src/archiver/structs/inbox_message.ts +41 -0
- package/src/archiver/structs/published.ts +1 -11
- package/src/archiver/validation.ts +124 -0
- package/src/factory.ts +28 -69
- package/src/index.ts +1 -1
- package/src/rpc/index.ts +1 -5
- package/src/test/fixtures/debug_traceTransaction-multicall3.json +88 -0
- package/src/test/fixtures/debug_traceTransaction-multiplePropose.json +153 -0
- package/src/test/fixtures/debug_traceTransaction-proxied.json +122 -0
- package/src/test/fixtures/trace_transaction-multicall3.json +65 -0
- package/src/test/fixtures/trace_transaction-multiplePropose.json +319 -0
- package/src/test/fixtures/trace_transaction-proxied.json +128 -0
- package/src/test/fixtures/trace_transaction-randomRevert.json +216 -0
- package/src/test/mock_archiver.ts +22 -16
- package/src/test/mock_l1_to_l2_message_source.ts +20 -8
- package/src/test/mock_l2_block_source.ts +186 -21
- package/src/test/mock_structs.ts +50 -0
- package/dest/archiver/data_retrieval.d.ts +0 -74
- package/dest/archiver/data_retrieval.d.ts.map +0 -1
- package/dest/archiver/data_retrieval.js +0 -283
- package/dest/archiver/kv_archiver_store/nullifier_store.d.ts +0 -12
- package/dest/archiver/kv_archiver_store/nullifier_store.d.ts.map +0 -1
- package/dest/archiver/kv_archiver_store/nullifier_store.js +0 -73
- package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts +0 -23
- package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.d.ts.map +0 -1
- package/dest/archiver/memory_archiver_store/l1_to_l2_message_store.js +0 -49
- package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts +0 -175
- package/dest/archiver/memory_archiver_store/memory_archiver_store.d.ts.map +0 -1
- package/dest/archiver/memory_archiver_store/memory_archiver_store.js +0 -636
- package/src/archiver/data_retrieval.ts +0 -422
- package/src/archiver/kv_archiver_store/nullifier_store.ts +0 -97
- package/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts +0 -61
- package/src/archiver/memory_archiver_store/memory_archiver_store.ts +0 -801
|
@@ -1,88 +1,282 @@
|
|
|
1
|
-
import { INITIAL_L2_BLOCK_NUM,
|
|
1
|
+
import { INITIAL_CHECKPOINT_NUMBER, INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, PRIVATE_LOG_SIZE_IN_FIELDS } from '@aztec/constants';
|
|
2
|
+
import { makeTuple } from '@aztec/foundation/array';
|
|
3
|
+
import { BlockNumber, CheckpointNumber, EpochNumber } from '@aztec/foundation/branded-types';
|
|
4
|
+
import { Buffer16, Buffer32 } from '@aztec/foundation/buffer';
|
|
2
5
|
import { times, timesParallel } from '@aztec/foundation/collection';
|
|
3
|
-
import { randomInt } from '@aztec/foundation/crypto';
|
|
4
|
-
import { Fr } from '@aztec/foundation/
|
|
6
|
+
import { randomInt } from '@aztec/foundation/crypto/random';
|
|
7
|
+
import { Fr } from '@aztec/foundation/curves/bn254';
|
|
8
|
+
import { toArray } from '@aztec/foundation/iterable';
|
|
9
|
+
import { sleep } from '@aztec/foundation/sleep';
|
|
5
10
|
import { AztecAddress } from '@aztec/stdlib/aztec-address';
|
|
6
|
-
import {
|
|
11
|
+
import { CommitteeAttestation, EthAddress, L2BlockHash, L2BlockNew, randomBlockInfo } from '@aztec/stdlib/block';
|
|
12
|
+
import { Checkpoint, L1PublishedData, PublishedCheckpoint } from '@aztec/stdlib/checkpoint';
|
|
7
13
|
import { SerializableContractInstance, computePublicBytecodeCommitment } from '@aztec/stdlib/contract';
|
|
8
|
-
import { LogId, PrivateLog, PublicLog } from '@aztec/stdlib/logs';
|
|
14
|
+
import { ContractClassLog, LogId, PrivateLog, PublicLog, SiloedTag, Tag } from '@aztec/stdlib/logs';
|
|
9
15
|
import { InboxLeaf } from '@aztec/stdlib/messaging';
|
|
10
|
-
import {
|
|
16
|
+
import { CheckpointHeader } from '@aztec/stdlib/rollup';
|
|
17
|
+
import { makeContractClassPublic, makeExecutablePrivateFunctionWithMembershipProof, makeUtilityFunctionWithMembershipProof } from '@aztec/stdlib/testing';
|
|
11
18
|
import '@aztec/stdlib/testing/jest';
|
|
12
|
-
import {
|
|
19
|
+
import { AppendOnlyTreeSnapshot } from '@aztec/stdlib/trees';
|
|
20
|
+
import { PartialStateReference, StateReference, TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
21
|
+
import { makeInboxMessage, makeInboxMessages } from '../test/mock_structs.js';
|
|
22
|
+
import { BlockArchiveNotConsistentError, BlockIndexNotSequentialError, BlockNumberNotSequentialError, CheckpointNumberNotConsistentError, CheckpointNumberNotSequentialError, InitialBlockNumberNotSequentialError, InitialCheckpointNumberNotSequentialError } from './errors.js';
|
|
23
|
+
import { MessageStoreError } from './kv_archiver_store/message_store.js';
|
|
13
24
|
/**
|
|
14
25
|
* @param testName - The name of the test suite.
|
|
15
26
|
* @param getStore - Returns an instance of a store that's already been initialized.
|
|
16
27
|
*/ export function describeArchiverDataStore(testName, getStore) {
|
|
17
28
|
describe(testName, ()=>{
|
|
18
29
|
let store;
|
|
19
|
-
let
|
|
20
|
-
const
|
|
30
|
+
let publishedCheckpoints;
|
|
31
|
+
const blockNumberTests = [
|
|
21
32
|
[
|
|
22
33
|
1,
|
|
23
|
-
|
|
24
|
-
()=>blocks.slice(0, 1)
|
|
25
|
-
],
|
|
26
|
-
[
|
|
27
|
-
10,
|
|
28
|
-
1,
|
|
29
|
-
()=>blocks.slice(9, 10)
|
|
34
|
+
()=>publishedCheckpoints[0].checkpoint.blocks[0]
|
|
30
35
|
],
|
|
31
36
|
[
|
|
32
|
-
1,
|
|
33
37
|
10,
|
|
34
|
-
()=>blocks
|
|
35
|
-
],
|
|
36
|
-
[
|
|
37
|
-
2,
|
|
38
|
-
5,
|
|
39
|
-
()=>blocks.slice(1, 6)
|
|
38
|
+
()=>publishedCheckpoints[9].checkpoint.blocks[0]
|
|
40
39
|
],
|
|
41
40
|
[
|
|
42
41
|
5,
|
|
43
|
-
|
|
44
|
-
()=>blocks.slice(4, 6)
|
|
42
|
+
()=>publishedCheckpoints[4].checkpoint.blocks[0]
|
|
45
43
|
]
|
|
46
44
|
];
|
|
47
|
-
const
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
45
|
+
const makeBlockHash = (blockNumber)=>`0x${blockNumber.toString(16).padStart(64, '0')}`;
|
|
46
|
+
// Create a state reference with properly calculated noteHashTree.nextAvailableLeafIndex
|
|
47
|
+
// This is needed because the log store calculates dataStartIndexForBlock as:
|
|
48
|
+
// noteHashTree.nextAvailableLeafIndex - txEffects.length * MAX_NOTE_HASHES_PER_TX
|
|
49
|
+
// If nextAvailableLeafIndex is too small (random values 0-1000), this becomes negative
|
|
50
|
+
const makeStateForBlock = (blockNumber, txsPerBlock)=>{
|
|
51
|
+
// Ensure nextAvailableLeafIndex is large enough for all blocks up to this point
|
|
52
|
+
const noteHashIndex = blockNumber * txsPerBlock * MAX_NOTE_HASHES_PER_TX;
|
|
53
|
+
return new StateReference(AppendOnlyTreeSnapshot.random(), new PartialStateReference(new AppendOnlyTreeSnapshot(Fr.random(), noteHashIndex), AppendOnlyTreeSnapshot.random(), AppendOnlyTreeSnapshot.random()));
|
|
54
|
+
};
|
|
55
|
+
const makePublishedCheckpoint = (checkpoint, l1BlockNumber)=>{
|
|
56
|
+
return new PublishedCheckpoint(checkpoint, new L1PublishedData(BigInt(l1BlockNumber), BigInt(l1BlockNumber * 1000), makeBlockHash(l1BlockNumber)), times(3, CommitteeAttestation.random));
|
|
57
|
+
};
|
|
58
|
+
const expectCheckpointedBlockEquals = (actual, expectedBlock, expectedCheckpoint)=>{
|
|
59
|
+
expect(actual.l1).toEqual(expectedCheckpoint.l1);
|
|
60
|
+
expect(actual.block.header.equals(expectedBlock.header)).toBe(true);
|
|
61
|
+
expect(actual.checkpointNumber).toEqual(expectedCheckpoint.checkpoint.number);
|
|
62
|
+
expect(actual.attestations.every((a, i)=>a.equals(expectedCheckpoint.attestations[i]))).toBe(true);
|
|
63
|
+
};
|
|
55
64
|
beforeEach(async ()=>{
|
|
56
65
|
store = await getStore();
|
|
57
|
-
|
|
66
|
+
// Create checkpoints sequentially to ensure archive roots are chained properly.
|
|
67
|
+
// Each block's header.lastArchive must equal the previous block's archive.
|
|
68
|
+
publishedCheckpoints = [];
|
|
69
|
+
const txsPerBlock = 4;
|
|
70
|
+
for(let i = 0; i < 10; i++){
|
|
71
|
+
const blockNumber = i + 1;
|
|
72
|
+
const previousArchive = i > 0 ? publishedCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
73
|
+
const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
|
|
74
|
+
numBlocks: 1,
|
|
75
|
+
startBlockNumber: blockNumber,
|
|
76
|
+
previousArchive,
|
|
77
|
+
txsPerBlock,
|
|
78
|
+
state: makeStateForBlock(blockNumber, txsPerBlock),
|
|
79
|
+
// Ensure each tx has public logs for getPublicLogs tests
|
|
80
|
+
txOptions: {
|
|
81
|
+
numPublicCallsPerTx: 2,
|
|
82
|
+
numPublicLogsPerCall: 2
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
publishedCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
|
|
86
|
+
}
|
|
58
87
|
});
|
|
59
|
-
describe('
|
|
60
|
-
it('returns success when adding
|
|
61
|
-
await expect(store.
|
|
88
|
+
describe('addCheckpoints', ()=>{
|
|
89
|
+
it('returns success when adding checkpoints', async ()=>{
|
|
90
|
+
await expect(store.addCheckpoints(publishedCheckpoints)).resolves.toBe(true);
|
|
91
|
+
});
|
|
92
|
+
it('throws on duplicate checkpoints', async ()=>{
|
|
93
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
94
|
+
await expect(store.addCheckpoints(publishedCheckpoints)).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
95
|
+
});
|
|
96
|
+
it('throws an error if the previous block does not exist in the store', async ()=>{
|
|
97
|
+
const checkpoint = await Checkpoint.random(CheckpointNumber(2), {
|
|
98
|
+
numBlocks: 1,
|
|
99
|
+
startBlockNumber: 2
|
|
100
|
+
});
|
|
101
|
+
const block = makePublishedCheckpoint(checkpoint, 2);
|
|
102
|
+
await expect(store.addCheckpoints([
|
|
103
|
+
block
|
|
104
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
105
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
106
|
+
});
|
|
107
|
+
it('throws an error if there is a gap in the blocks being added', async ()=>{
|
|
108
|
+
const checkpoint1 = await Checkpoint.random(CheckpointNumber(1), {
|
|
109
|
+
numBlocks: 1,
|
|
110
|
+
startBlockNumber: 1
|
|
111
|
+
});
|
|
112
|
+
const checkpoint3 = await Checkpoint.random(CheckpointNumber(3), {
|
|
113
|
+
numBlocks: 1,
|
|
114
|
+
startBlockNumber: 3
|
|
115
|
+
});
|
|
116
|
+
const checkpoints = [
|
|
117
|
+
makePublishedCheckpoint(checkpoint1, 1),
|
|
118
|
+
makePublishedCheckpoint(checkpoint3, 3)
|
|
119
|
+
];
|
|
120
|
+
await expect(store.addCheckpoints(checkpoints)).rejects.toThrow(CheckpointNumberNotSequentialError);
|
|
121
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
122
|
+
});
|
|
123
|
+
it('throws an error if blocks within a checkpoint are not sequential', async ()=>{
|
|
124
|
+
// Create a checkpoint with non-sequential block numbers (block 1 and block 3, skipping block 2)
|
|
125
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
126
|
+
checkpointNumber: CheckpointNumber(1)
|
|
127
|
+
});
|
|
128
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
129
|
+
checkpointNumber: CheckpointNumber(1)
|
|
130
|
+
});
|
|
131
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
132
|
+
block1,
|
|
133
|
+
block3
|
|
134
|
+
], CheckpointNumber(1));
|
|
135
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
136
|
+
await expect(store.addCheckpoints([
|
|
137
|
+
publishedCheckpoint
|
|
138
|
+
])).rejects.toThrow(BlockNumberNotSequentialError);
|
|
139
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
140
|
+
});
|
|
141
|
+
it('throws an error if blocks within a checkpoint do not have sequential indexes', async ()=>{
|
|
142
|
+
// Create a checkpoint with non-sequential indexes
|
|
143
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
144
|
+
checkpointNumber: CheckpointNumber(1),
|
|
145
|
+
indexWithinCheckpoint: 0
|
|
146
|
+
});
|
|
147
|
+
const block3 = await L2BlockNew.random(BlockNumber(2), {
|
|
148
|
+
checkpointNumber: CheckpointNumber(1),
|
|
149
|
+
indexWithinCheckpoint: 2
|
|
150
|
+
});
|
|
151
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
152
|
+
block1,
|
|
153
|
+
block3
|
|
154
|
+
], CheckpointNumber(1));
|
|
155
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
156
|
+
await expect(store.addCheckpoints([
|
|
157
|
+
publishedCheckpoint
|
|
158
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
159
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
160
|
+
});
|
|
161
|
+
it('throws an error if blocks within a checkpoint do not start from index 0', async ()=>{
|
|
162
|
+
// Create a checkpoint with non-sequential indexes
|
|
163
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
164
|
+
checkpointNumber: CheckpointNumber(1),
|
|
165
|
+
indexWithinCheckpoint: 1
|
|
166
|
+
});
|
|
167
|
+
const block3 = await L2BlockNew.random(BlockNumber(2), {
|
|
168
|
+
checkpointNumber: CheckpointNumber(1),
|
|
169
|
+
indexWithinCheckpoint: 2
|
|
170
|
+
});
|
|
171
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
172
|
+
block1,
|
|
173
|
+
block3
|
|
174
|
+
], CheckpointNumber(1));
|
|
175
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
176
|
+
await expect(store.addCheckpoints([
|
|
177
|
+
publishedCheckpoint
|
|
178
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
179
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
180
|
+
});
|
|
181
|
+
it('throws an error if block has invalid checkpoint index', async ()=>{
|
|
182
|
+
// Create a block wit an invalid checkpoint index
|
|
183
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
184
|
+
checkpointNumber: CheckpointNumber(1),
|
|
185
|
+
indexWithinCheckpoint: -1
|
|
186
|
+
});
|
|
187
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
188
|
+
block1
|
|
189
|
+
], CheckpointNumber(1));
|
|
190
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
191
|
+
await expect(store.addCheckpoints([
|
|
192
|
+
publishedCheckpoint
|
|
193
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
194
|
+
await expect(store.getCheckpointedBlock(1)).resolves.toBeUndefined();
|
|
195
|
+
});
|
|
196
|
+
it('throws an error if checkpoint has invalid initial number', async ()=>{
|
|
197
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
198
|
+
checkpointNumber: CheckpointNumber(2),
|
|
199
|
+
indexWithinCheckpoint: 0
|
|
200
|
+
});
|
|
201
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
202
|
+
block1
|
|
203
|
+
], CheckpointNumber(2));
|
|
204
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
205
|
+
await expect(store.addCheckpoints([
|
|
206
|
+
publishedCheckpoint
|
|
207
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
208
|
+
});
|
|
209
|
+
it('allows the correct initial checkpoint', async ()=>{
|
|
210
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
211
|
+
checkpointNumber: CheckpointNumber(1),
|
|
212
|
+
indexWithinCheckpoint: 0
|
|
213
|
+
});
|
|
214
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
215
|
+
block1
|
|
216
|
+
], CheckpointNumber(1));
|
|
217
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
218
|
+
await expect(store.addCheckpoints([
|
|
219
|
+
publishedCheckpoint
|
|
220
|
+
])).resolves.toBe(true);
|
|
62
221
|
});
|
|
63
|
-
it('
|
|
64
|
-
await
|
|
65
|
-
|
|
222
|
+
it('throws on duplicate initial checkpoint', async ()=>{
|
|
223
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
224
|
+
checkpointNumber: CheckpointNumber(1),
|
|
225
|
+
indexWithinCheckpoint: 0
|
|
226
|
+
});
|
|
227
|
+
const block2 = await L2BlockNew.random(BlockNumber(1), {
|
|
228
|
+
checkpointNumber: CheckpointNumber(1),
|
|
229
|
+
indexWithinCheckpoint: 0
|
|
230
|
+
});
|
|
231
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
232
|
+
block1
|
|
233
|
+
], CheckpointNumber(1));
|
|
234
|
+
const publishedCheckpoint = makePublishedCheckpoint(checkpoint, 10);
|
|
235
|
+
const checkpoint2 = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
236
|
+
block2
|
|
237
|
+
], CheckpointNumber(1));
|
|
238
|
+
const publishedCheckpoint2 = makePublishedCheckpoint(checkpoint2, 10);
|
|
239
|
+
await expect(store.addCheckpoints([
|
|
240
|
+
publishedCheckpoint
|
|
241
|
+
])).resolves.toBe(true);
|
|
242
|
+
await expect(store.addCheckpoints([
|
|
243
|
+
publishedCheckpoint2
|
|
244
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
66
245
|
});
|
|
67
246
|
});
|
|
68
|
-
describe('
|
|
69
|
-
it('unwinding
|
|
70
|
-
await store.
|
|
71
|
-
const
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
await store.
|
|
76
|
-
expect(
|
|
77
|
-
expect(
|
|
247
|
+
describe('unwindcheckpoints', ()=>{
|
|
248
|
+
it('unwinding checkpoints will remove checkpoints from the chain', async ()=>{
|
|
249
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
250
|
+
const checkpointNumber = await store.getSynchedCheckpointNumber();
|
|
251
|
+
const lastCheckpoint = publishedCheckpoints.at(-1);
|
|
252
|
+
const lastBlockNumber = lastCheckpoint.checkpoint.blocks[0].number;
|
|
253
|
+
// Verify block exists before unwinding
|
|
254
|
+
const retrievedBlock = await store.getCheckpointedBlock(lastBlockNumber);
|
|
255
|
+
expect(retrievedBlock).toBeDefined();
|
|
256
|
+
expect(retrievedBlock.block.header.equals(lastCheckpoint.checkpoint.blocks[0].header)).toBe(true);
|
|
257
|
+
expect(retrievedBlock.checkpointNumber).toEqual(checkpointNumber);
|
|
258
|
+
await store.unwindCheckpoints(checkpointNumber, 1);
|
|
259
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(checkpointNumber - 1);
|
|
260
|
+
await expect(store.getCheckpointedBlock(lastBlockNumber)).resolves.toBeUndefined();
|
|
78
261
|
});
|
|
79
262
|
it('can unwind multiple empty blocks', async ()=>{
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
263
|
+
// Create checkpoints sequentially to chain archive roots
|
|
264
|
+
const emptyCheckpoints = [];
|
|
265
|
+
for(let i = 0; i < 10; i++){
|
|
266
|
+
const previousArchive = i > 0 ? emptyCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
267
|
+
const checkpoint = await Checkpoint.random(CheckpointNumber(i + 1), {
|
|
268
|
+
numBlocks: 1,
|
|
269
|
+
startBlockNumber: i + 1,
|
|
270
|
+
txsPerBlock: 0,
|
|
271
|
+
previousArchive
|
|
272
|
+
});
|
|
273
|
+
emptyCheckpoints.push(makePublishedCheckpoint(checkpoint, i + 10));
|
|
274
|
+
}
|
|
275
|
+
await store.addCheckpoints(emptyCheckpoints);
|
|
276
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(10);
|
|
277
|
+
await store.unwindCheckpoints(CheckpointNumber(10), 3);
|
|
278
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(7);
|
|
279
|
+
expect((await store.getRangeOfCheckpoints(CheckpointNumber(1), 10)).map((b)=>b.checkpointNumber)).toEqual([
|
|
86
280
|
1,
|
|
87
281
|
2,
|
|
88
282
|
3,
|
|
@@ -92,35 +286,1337 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
92
286
|
7
|
|
93
287
|
]);
|
|
94
288
|
});
|
|
95
|
-
it('refuses to unwind
|
|
96
|
-
await store.
|
|
97
|
-
await expect(store.
|
|
289
|
+
it('refuses to unwind checkpoints if the tip is not the last checkpoint', async ()=>{
|
|
290
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
291
|
+
await expect(store.unwindCheckpoints(CheckpointNumber(5), 1)).rejects.toThrow(/can only unwind checkpoints from the tip/i);
|
|
292
|
+
});
|
|
293
|
+
it('unwound blocks and headers cannot be retrieved by hash or archive', async ()=>{
|
|
294
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
295
|
+
const lastCheckpoint = publishedCheckpoints[publishedCheckpoints.length - 1];
|
|
296
|
+
const lastBlock = lastCheckpoint.checkpoint.blocks[0];
|
|
297
|
+
const blockHash = await lastBlock.header.hash();
|
|
298
|
+
const archive = lastBlock.archive.root;
|
|
299
|
+
// Verify block and header exist before unwinding
|
|
300
|
+
const retrievedByHash = await store.getCheckpointedBlockByHash(blockHash);
|
|
301
|
+
expect(retrievedByHash).toBeDefined();
|
|
302
|
+
expect(retrievedByHash.block.header.equals(lastBlock.header)).toBe(true);
|
|
303
|
+
const retrievedByArchive = await store.getCheckpointedBlockByArchive(archive);
|
|
304
|
+
expect(retrievedByArchive).toBeDefined();
|
|
305
|
+
expect(retrievedByArchive.block.header.equals(lastBlock.header)).toBe(true);
|
|
306
|
+
const headerByHash = await store.getBlockHeaderByHash(blockHash);
|
|
307
|
+
expect(headerByHash).toBeDefined();
|
|
308
|
+
expect(headerByHash.equals(lastBlock.header)).toBe(true);
|
|
309
|
+
const headerByArchive = await store.getBlockHeaderByArchive(archive);
|
|
310
|
+
expect(headerByArchive).toBeDefined();
|
|
311
|
+
expect(headerByArchive.equals(lastBlock.header)).toBe(true);
|
|
312
|
+
// Unwind the checkpoint
|
|
313
|
+
await store.unwindCheckpoints(lastCheckpoint.checkpoint.number, 1);
|
|
314
|
+
// Verify neither block nor header can be retrieved after unwinding
|
|
315
|
+
expect(await store.getCheckpointedBlockByHash(blockHash)).toBeUndefined();
|
|
316
|
+
expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
|
|
317
|
+
expect(await store.getBlockHeaderByHash(blockHash)).toBeUndefined();
|
|
318
|
+
expect(await store.getBlockHeaderByArchive(archive)).toBeUndefined();
|
|
319
|
+
});
|
|
320
|
+
});
|
|
321
|
+
describe('multi-block checkpoints', ()=>{
|
|
322
|
+
it('block number increases correctly when adding checkpoints with multiple blocks', async ()=>{
|
|
323
|
+
// Create 3 checkpoints: first with 2 blocks, second with 3 blocks, third with 1 block
|
|
324
|
+
// Total blocks: 6, spanning block numbers 1-6
|
|
325
|
+
// Chain archive roots across checkpoints
|
|
326
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
327
|
+
numBlocks: 2,
|
|
328
|
+
startBlockNumber: 1
|
|
329
|
+
});
|
|
330
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
331
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
332
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
333
|
+
numBlocks: 3,
|
|
334
|
+
startBlockNumber: 3,
|
|
335
|
+
previousArchive: previousArchive1
|
|
336
|
+
});
|
|
337
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
338
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
339
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
340
|
+
numBlocks: 1,
|
|
341
|
+
startBlockNumber: 6,
|
|
342
|
+
previousArchive: previousArchive2
|
|
343
|
+
});
|
|
344
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
345
|
+
await store.addCheckpoints([
|
|
346
|
+
checkpoint1,
|
|
347
|
+
checkpoint2,
|
|
348
|
+
checkpoint3
|
|
349
|
+
]);
|
|
350
|
+
// Checkpoint number should be 3 (the last checkpoint number)
|
|
351
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(3);
|
|
352
|
+
// Block number should be 6 (the last block number across all checkpoints)
|
|
353
|
+
expect(await store.getLatestBlockNumber()).toBe(6);
|
|
354
|
+
});
|
|
355
|
+
it('block number decreases correctly when unwinding checkpoints with multiple blocks', async ()=>{
|
|
356
|
+
// Create 3 checkpoints with varying block counts, chaining archive roots
|
|
357
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
358
|
+
numBlocks: 2,
|
|
359
|
+
startBlockNumber: 1
|
|
360
|
+
});
|
|
361
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
362
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
363
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
364
|
+
numBlocks: 3,
|
|
365
|
+
startBlockNumber: 3,
|
|
366
|
+
previousArchive: previousArchive1
|
|
367
|
+
});
|
|
368
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
369
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
370
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
371
|
+
numBlocks: 2,
|
|
372
|
+
startBlockNumber: 6,
|
|
373
|
+
previousArchive: previousArchive2
|
|
374
|
+
});
|
|
375
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
376
|
+
await store.addCheckpoints([
|
|
377
|
+
checkpoint1,
|
|
378
|
+
checkpoint2,
|
|
379
|
+
checkpoint3
|
|
380
|
+
]);
|
|
381
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(3);
|
|
382
|
+
expect(await store.getLatestBlockNumber()).toBe(7);
|
|
383
|
+
// Unwind the last checkpoint (which has 2 blocks)
|
|
384
|
+
await store.unwindCheckpoints(CheckpointNumber(3), 1);
|
|
385
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(2);
|
|
386
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
387
|
+
// Unwind another checkpoint (which has 3 blocks)
|
|
388
|
+
await store.unwindCheckpoints(CheckpointNumber(2), 1);
|
|
389
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
390
|
+
expect(await store.getLatestBlockNumber()).toBe(2);
|
|
391
|
+
});
|
|
392
|
+
it('unwinding multiple checkpoints with multiple blocks in one go', async ()=>{
|
|
393
|
+
// Create 4 checkpoints with varying block counts, chaining archive roots
|
|
394
|
+
// Checkpoint 1: blocks 1-2 (2 blocks)
|
|
395
|
+
// Checkpoint 2: blocks 3-5 (3 blocks)
|
|
396
|
+
// Checkpoint 3: blocks 6-7 (2 blocks)
|
|
397
|
+
// Checkpoint 4: blocks 8-10 (3 blocks)
|
|
398
|
+
// Total: 10 blocks across 4 checkpoints
|
|
399
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
400
|
+
numBlocks: 2,
|
|
401
|
+
startBlockNumber: 1
|
|
402
|
+
});
|
|
403
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
404
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
405
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
406
|
+
numBlocks: 3,
|
|
407
|
+
startBlockNumber: 3,
|
|
408
|
+
previousArchive: previousArchive1
|
|
409
|
+
});
|
|
410
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
411
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
412
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
413
|
+
numBlocks: 2,
|
|
414
|
+
startBlockNumber: 6,
|
|
415
|
+
previousArchive: previousArchive2
|
|
416
|
+
});
|
|
417
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
418
|
+
const previousArchive3 = checkpoint3Cp.blocks.at(-1).archive;
|
|
419
|
+
const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
|
|
420
|
+
numBlocks: 3,
|
|
421
|
+
startBlockNumber: 8,
|
|
422
|
+
previousArchive: previousArchive3
|
|
423
|
+
});
|
|
424
|
+
const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
|
|
425
|
+
await store.addCheckpoints([
|
|
426
|
+
checkpoint1,
|
|
427
|
+
checkpoint2,
|
|
428
|
+
checkpoint3,
|
|
429
|
+
checkpoint4
|
|
430
|
+
]);
|
|
431
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(4);
|
|
432
|
+
expect(await store.getLatestBlockNumber()).toBe(10);
|
|
433
|
+
// Unwind 2 checkpoints at once (checkpoints 3 and 4, which together have 5 blocks)
|
|
434
|
+
await store.unwindCheckpoints(CheckpointNumber(4), 2);
|
|
435
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(2);
|
|
436
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
437
|
+
// Verify blocks 1-5 still exist (from checkpoints 1 and 2)
|
|
438
|
+
for(let blockNumber = 1; blockNumber <= 5; blockNumber++){
|
|
439
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
|
|
440
|
+
}
|
|
441
|
+
// Verify blocks 6-10 are gone (from checkpoints 3 and 4)
|
|
442
|
+
for(let blockNumber = 6; blockNumber <= 10; blockNumber++){
|
|
443
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
|
|
444
|
+
}
|
|
445
|
+
// Unwind remaining 2 checkpoints at once (checkpoints 1 and 2, which together have 5 blocks)
|
|
446
|
+
await store.unwindCheckpoints(CheckpointNumber(2), 2);
|
|
447
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(0);
|
|
448
|
+
expect(await store.getLatestBlockNumber()).toBe(0);
|
|
449
|
+
// Verify all blocks are gone
|
|
450
|
+
for(let blockNumber = 1; blockNumber <= 10; blockNumber++){
|
|
451
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
|
|
452
|
+
}
|
|
453
|
+
});
|
|
454
|
+
it('getCheckpointedBlock returns correct checkpoint info for blocks within multi-block checkpoints', async ()=>{
|
|
455
|
+
// Create checkpoints with chained archive roots
|
|
456
|
+
// Create a checkpoint with 3 blocks
|
|
457
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
458
|
+
numBlocks: 3,
|
|
459
|
+
startBlockNumber: 1
|
|
460
|
+
});
|
|
461
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
462
|
+
// Create another checkpoint with 2 blocks
|
|
463
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
464
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
465
|
+
numBlocks: 2,
|
|
466
|
+
startBlockNumber: 4,
|
|
467
|
+
previousArchive: previousArchive1
|
|
468
|
+
});
|
|
469
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
470
|
+
await store.addCheckpoints([
|
|
471
|
+
checkpoint1,
|
|
472
|
+
checkpoint2
|
|
473
|
+
]);
|
|
474
|
+
// Check blocks from the first checkpoint (blocks 1, 2, 3)
|
|
475
|
+
for(let i = 0; i < 3; i++){
|
|
476
|
+
const blockNumber = i + 1;
|
|
477
|
+
const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
|
|
478
|
+
expect(retrievedBlock).toBeDefined();
|
|
479
|
+
expect(retrievedBlock.checkpointNumber).toBe(1);
|
|
480
|
+
expect(retrievedBlock.block.number).toBe(blockNumber);
|
|
481
|
+
expect(retrievedBlock.l1).toEqual(checkpoint1.l1);
|
|
482
|
+
expect(retrievedBlock.attestations.every((a, j)=>a.equals(checkpoint1.attestations[j]))).toBe(true);
|
|
483
|
+
}
|
|
484
|
+
// Check blocks from the second checkpoint (blocks 4, 5)
|
|
485
|
+
for(let i = 0; i < 2; i++){
|
|
486
|
+
const blockNumber = i + 4;
|
|
487
|
+
const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
|
|
488
|
+
expect(retrievedBlock).toBeDefined();
|
|
489
|
+
expect(retrievedBlock.checkpointNumber).toBe(2);
|
|
490
|
+
expect(retrievedBlock.block.number).toBe(blockNumber);
|
|
491
|
+
expect(retrievedBlock.l1).toEqual(checkpoint2.l1);
|
|
492
|
+
expect(retrievedBlock.attestations.every((a, j)=>a.equals(checkpoint2.attestations[j]))).toBe(true);
|
|
493
|
+
}
|
|
494
|
+
});
|
|
495
|
+
it('getCheckpointedBlockByHash returns correct checkpoint info for blocks within multi-block checkpoints', async ()=>{
|
|
496
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
497
|
+
numBlocks: 3,
|
|
498
|
+
startBlockNumber: 1
|
|
499
|
+
}), 10);
|
|
500
|
+
await store.addCheckpoints([
|
|
501
|
+
checkpoint
|
|
502
|
+
]);
|
|
503
|
+
// Check each block by its hash
|
|
504
|
+
for(let i = 0; i < checkpoint.checkpoint.blocks.length; i++){
|
|
505
|
+
const block = checkpoint.checkpoint.blocks[i];
|
|
506
|
+
const blockHash = await block.header.hash();
|
|
507
|
+
const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
|
|
508
|
+
expect(retrievedBlock).toBeDefined();
|
|
509
|
+
expect(retrievedBlock.checkpointNumber).toBe(1);
|
|
510
|
+
expect(retrievedBlock.block.number).toBe(i + 1);
|
|
511
|
+
expect(retrievedBlock.l1).toEqual(checkpoint.l1);
|
|
512
|
+
}
|
|
513
|
+
});
|
|
514
|
+
it('getCheckpointedBlockByArchive returns correct checkpoint info for blocks within multi-block checkpoints', async ()=>{
|
|
515
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
516
|
+
numBlocks: 3,
|
|
517
|
+
startBlockNumber: 1
|
|
518
|
+
}), 10);
|
|
519
|
+
await store.addCheckpoints([
|
|
520
|
+
checkpoint
|
|
521
|
+
]);
|
|
522
|
+
// Check each block by its archive root
|
|
523
|
+
for(let i = 0; i < checkpoint.checkpoint.blocks.length; i++){
|
|
524
|
+
const block = checkpoint.checkpoint.blocks[i];
|
|
525
|
+
const archive = block.archive.root;
|
|
526
|
+
const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
|
|
527
|
+
expect(retrievedBlock).toBeDefined();
|
|
528
|
+
expect(retrievedBlock.checkpointNumber).toBe(1);
|
|
529
|
+
expect(retrievedBlock.block.number).toBe(i + 1);
|
|
530
|
+
expect(retrievedBlock.l1).toEqual(checkpoint.l1);
|
|
531
|
+
}
|
|
532
|
+
});
|
|
533
|
+
it('unwinding a multi-block checkpoint removes all its blocks', async ()=>{
|
|
534
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
535
|
+
numBlocks: 3,
|
|
536
|
+
startBlockNumber: 1
|
|
537
|
+
}), 10);
|
|
538
|
+
await store.addCheckpoints([
|
|
539
|
+
checkpoint
|
|
540
|
+
]);
|
|
541
|
+
// Verify all 3 blocks exist
|
|
542
|
+
for(let blockNumber = 1; blockNumber <= 3; blockNumber++){
|
|
543
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeDefined();
|
|
544
|
+
}
|
|
545
|
+
// Unwind the checkpoint
|
|
546
|
+
await store.unwindCheckpoints(CheckpointNumber(1), 1);
|
|
547
|
+
// Verify all 3 blocks are removed
|
|
548
|
+
for(let blockNumber = 1; blockNumber <= 3; blockNumber++){
|
|
549
|
+
expect(await store.getCheckpointedBlock(blockNumber)).toBeUndefined();
|
|
550
|
+
}
|
|
551
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(0);
|
|
552
|
+
expect(await store.getLatestBlockNumber()).toBe(0);
|
|
553
|
+
});
|
|
554
|
+
});
|
|
555
|
+
describe('uncheckpointed blocks', ()=>{
|
|
556
|
+
it('can add blocks independently before a checkpoint arrives', async ()=>{
|
|
557
|
+
// First, establish some checkpointed blocks (checkpoint 1 with blocks 1-3)
|
|
558
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
559
|
+
numBlocks: 3,
|
|
560
|
+
startBlockNumber: 1
|
|
561
|
+
}), 10);
|
|
562
|
+
await store.addCheckpoints([
|
|
563
|
+
checkpoint1
|
|
564
|
+
]);
|
|
565
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
566
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
567
|
+
// Now add blocks 4, 5, 6 independently (without a checkpoint) for upcoming checkpoint 2
|
|
568
|
+
// Chain archive roots from the last block of checkpoint 1
|
|
569
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
570
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
571
|
+
checkpointNumber: CheckpointNumber(2),
|
|
572
|
+
indexWithinCheckpoint: 0,
|
|
573
|
+
lastArchive: lastBlockArchive
|
|
574
|
+
});
|
|
575
|
+
const block5 = await L2BlockNew.random(BlockNumber(5), {
|
|
576
|
+
checkpointNumber: CheckpointNumber(2),
|
|
577
|
+
indexWithinCheckpoint: 1,
|
|
578
|
+
lastArchive: block4.archive
|
|
579
|
+
});
|
|
580
|
+
const block6 = await L2BlockNew.random(BlockNumber(6), {
|
|
581
|
+
checkpointNumber: CheckpointNumber(2),
|
|
582
|
+
indexWithinCheckpoint: 2,
|
|
583
|
+
lastArchive: block5.archive
|
|
584
|
+
});
|
|
585
|
+
await store.addBlocks([
|
|
586
|
+
block4,
|
|
587
|
+
block5,
|
|
588
|
+
block6
|
|
589
|
+
]);
|
|
590
|
+
// Checkpoint number should still be 1 (no new checkpoint added)
|
|
591
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
592
|
+
// But latest block number should be 6
|
|
593
|
+
expect(await store.getLatestBlockNumber()).toBe(6);
|
|
594
|
+
});
|
|
595
|
+
it('getBlock retrieves uncheckpointed blocks', async ()=>{
|
|
596
|
+
// First, establish some checkpointed blocks
|
|
597
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
598
|
+
numBlocks: 2,
|
|
599
|
+
startBlockNumber: 1
|
|
600
|
+
}), 10);
|
|
601
|
+
await store.addCheckpoints([
|
|
602
|
+
checkpoint1
|
|
603
|
+
]);
|
|
604
|
+
// Add uncheckpointed blocks for upcoming checkpoint 2, chaining archive roots
|
|
605
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
606
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
607
|
+
checkpointNumber: CheckpointNumber(2),
|
|
608
|
+
indexWithinCheckpoint: 0,
|
|
609
|
+
lastArchive: lastBlockArchive
|
|
610
|
+
});
|
|
611
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
612
|
+
checkpointNumber: CheckpointNumber(2),
|
|
613
|
+
indexWithinCheckpoint: 1,
|
|
614
|
+
lastArchive: block3.archive
|
|
615
|
+
});
|
|
616
|
+
await store.addBlocks([
|
|
617
|
+
block3,
|
|
618
|
+
block4
|
|
619
|
+
]);
|
|
620
|
+
// getBlock should work for both checkpointed and uncheckpointed blocks
|
|
621
|
+
expect((await store.getBlock(1))?.number).toBe(1);
|
|
622
|
+
expect((await store.getBlock(2))?.number).toBe(2);
|
|
623
|
+
expect((await store.getBlock(3))?.equals(block3)).toBe(true);
|
|
624
|
+
expect((await store.getBlock(4))?.equals(block4)).toBe(true);
|
|
625
|
+
expect(await store.getBlock(5)).toBeUndefined();
|
|
626
|
+
const block5 = await L2BlockNew.random(BlockNumber(5), {
|
|
627
|
+
checkpointNumber: CheckpointNumber(2),
|
|
628
|
+
indexWithinCheckpoint: 2,
|
|
629
|
+
lastArchive: block4.archive
|
|
630
|
+
});
|
|
631
|
+
await store.addBlocks([
|
|
632
|
+
block5
|
|
633
|
+
]);
|
|
634
|
+
// Verify the uncheckpointed blocks have correct data
|
|
635
|
+
const retrieved3 = await store.getBlock(3);
|
|
636
|
+
expect(retrieved3.number).toBe(3);
|
|
637
|
+
expect(retrieved3.equals(block3)).toBe(true);
|
|
638
|
+
const retrieved4 = await store.getBlock(4);
|
|
639
|
+
expect(retrieved4.number).toBe(4);
|
|
640
|
+
expect(retrieved4.equals(block4)).toBe(true);
|
|
641
|
+
const retrieved5 = await store.getBlock(5);
|
|
642
|
+
expect(retrieved5.number).toBe(5);
|
|
643
|
+
expect(retrieved5.equals(block5)).toBe(true);
|
|
644
|
+
});
|
|
645
|
+
it('getBlockByHash retrieves uncheckpointed blocks', async ()=>{
|
|
646
|
+
// Add uncheckpointed blocks (no checkpoints at all) for initial checkpoint 1, chaining archive roots
|
|
647
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
648
|
+
checkpointNumber: CheckpointNumber(1),
|
|
649
|
+
indexWithinCheckpoint: 0
|
|
650
|
+
});
|
|
651
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
652
|
+
checkpointNumber: CheckpointNumber(1),
|
|
653
|
+
indexWithinCheckpoint: 1,
|
|
654
|
+
lastArchive: block1.archive
|
|
655
|
+
});
|
|
656
|
+
await store.addBlocks([
|
|
657
|
+
block1,
|
|
658
|
+
block2
|
|
659
|
+
]);
|
|
660
|
+
// getBlockByHash should work for uncheckpointed blocks
|
|
661
|
+
const hash1 = await block1.header.hash();
|
|
662
|
+
const hash2 = await block2.header.hash();
|
|
663
|
+
const retrieved1 = await store.getBlockByHash(hash1);
|
|
664
|
+
expect(retrieved1.equals(block1)).toBe(true);
|
|
665
|
+
const retrieved2 = await store.getBlockByHash(hash2);
|
|
666
|
+
expect(retrieved2.equals(block2)).toBe(true);
|
|
667
|
+
});
|
|
668
|
+
it('getBlockByArchive retrieves uncheckpointed blocks', async ()=>{
|
|
669
|
+
// Add uncheckpointed blocks for initial checkpoint 1, chaining archive roots
|
|
670
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
671
|
+
checkpointNumber: CheckpointNumber(1),
|
|
672
|
+
indexWithinCheckpoint: 0
|
|
673
|
+
});
|
|
674
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
675
|
+
checkpointNumber: CheckpointNumber(1),
|
|
676
|
+
indexWithinCheckpoint: 1,
|
|
677
|
+
lastArchive: block1.archive
|
|
678
|
+
});
|
|
679
|
+
await store.addBlocks([
|
|
680
|
+
block1,
|
|
681
|
+
block2
|
|
682
|
+
]);
|
|
683
|
+
// getBlockByArchive should work for uncheckpointed blocks
|
|
684
|
+
const archive1 = block1.archive.root;
|
|
685
|
+
const archive2 = block2.archive.root;
|
|
686
|
+
const retrieved1 = await store.getBlockByArchive(archive1);
|
|
687
|
+
expect(retrieved1.equals(block1)).toBe(true);
|
|
688
|
+
const retrieved2 = await store.getBlockByArchive(archive2);
|
|
689
|
+
expect(retrieved2.equals(block2)).toBe(true);
|
|
690
|
+
});
|
|
691
|
+
it('getCheckpointedBlock returns undefined for uncheckpointed blocks', async ()=>{
|
|
692
|
+
// Add a checkpoint with blocks 1-2
|
|
693
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
694
|
+
numBlocks: 2,
|
|
695
|
+
startBlockNumber: 1
|
|
696
|
+
}), 10);
|
|
697
|
+
await store.addCheckpoints([
|
|
698
|
+
checkpoint1
|
|
699
|
+
]);
|
|
700
|
+
// Add uncheckpointed blocks 3-4 for upcoming checkpoint 2, chaining archive roots
|
|
701
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
702
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
703
|
+
checkpointNumber: CheckpointNumber(2),
|
|
704
|
+
indexWithinCheckpoint: 0,
|
|
705
|
+
lastArchive: lastBlockArchive
|
|
706
|
+
});
|
|
707
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
708
|
+
checkpointNumber: CheckpointNumber(2),
|
|
709
|
+
indexWithinCheckpoint: 1,
|
|
710
|
+
lastArchive: block3.archive
|
|
711
|
+
});
|
|
712
|
+
await store.addBlocks([
|
|
713
|
+
block3,
|
|
714
|
+
block4
|
|
715
|
+
]);
|
|
716
|
+
// getCheckpointedBlock should work for checkpointed blocks
|
|
717
|
+
expect((await store.getCheckpointedBlock(1))?.block.number).toBe(1);
|
|
718
|
+
expect((await store.getCheckpointedBlock(2))?.block.number).toBe(2);
|
|
719
|
+
// getCheckpointedBlock should return undefined for uncheckpointed blocks
|
|
720
|
+
expect(await store.getCheckpointedBlock(3)).toBeUndefined();
|
|
721
|
+
expect(await store.getCheckpointedBlock(4)).toBeUndefined();
|
|
722
|
+
// But getBlock should work for all blocks
|
|
723
|
+
expect((await store.getBlock(3))?.equals(block3)).toBe(true);
|
|
724
|
+
expect((await store.getBlock(4))?.equals(block4)).toBe(true);
|
|
725
|
+
});
|
|
726
|
+
it('getCheckpointedBlockByHash returns undefined for uncheckpointed blocks', async ()=>{
|
|
727
|
+
// Add uncheckpointed blocks for initial checkpoint 1
|
|
728
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
729
|
+
checkpointNumber: CheckpointNumber(1),
|
|
730
|
+
indexWithinCheckpoint: 0
|
|
731
|
+
});
|
|
732
|
+
await store.addBlocks([
|
|
733
|
+
block1
|
|
734
|
+
]);
|
|
735
|
+
const hash = await block1.header.hash();
|
|
736
|
+
// getCheckpointedBlockByHash should return undefined
|
|
737
|
+
expect(await store.getCheckpointedBlockByHash(hash)).toBeUndefined();
|
|
738
|
+
// But getBlockByHash should work
|
|
739
|
+
expect((await store.getBlockByHash(hash))?.equals(block1)).toBe(true);
|
|
740
|
+
});
|
|
741
|
+
it('getCheckpointedBlockByArchive returns undefined for uncheckpointed blocks', async ()=>{
|
|
742
|
+
// Add uncheckpointed blocks for initial checkpoint 1
|
|
743
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
744
|
+
checkpointNumber: CheckpointNumber(1),
|
|
745
|
+
indexWithinCheckpoint: 0
|
|
746
|
+
});
|
|
747
|
+
await store.addBlocks([
|
|
748
|
+
block1
|
|
749
|
+
]);
|
|
750
|
+
const archive = block1.archive.root;
|
|
751
|
+
// getCheckpointedBlockByArchive should return undefined
|
|
752
|
+
expect(await store.getCheckpointedBlockByArchive(archive)).toBeUndefined();
|
|
753
|
+
// But getBlockByArchive should work
|
|
754
|
+
expect((await store.getBlockByArchive(archive))?.equals(block1)).toBe(true);
|
|
755
|
+
});
|
|
756
|
+
it('checkpoint adopts previously added uncheckpointed blocks', async ()=>{
|
|
757
|
+
// Add blocks 1-3 without a checkpoint (for initial checkpoint 1), chaining archive roots
|
|
758
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
759
|
+
checkpointNumber: CheckpointNumber(1),
|
|
760
|
+
indexWithinCheckpoint: 0
|
|
761
|
+
});
|
|
762
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
763
|
+
checkpointNumber: CheckpointNumber(1),
|
|
764
|
+
indexWithinCheckpoint: 1,
|
|
765
|
+
lastArchive: block1.archive
|
|
766
|
+
});
|
|
767
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
768
|
+
checkpointNumber: CheckpointNumber(1),
|
|
769
|
+
indexWithinCheckpoint: 2,
|
|
770
|
+
lastArchive: block2.archive
|
|
771
|
+
});
|
|
772
|
+
await store.addBlocks([
|
|
773
|
+
block1,
|
|
774
|
+
block2,
|
|
775
|
+
block3
|
|
776
|
+
]);
|
|
777
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(0);
|
|
778
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
779
|
+
// getCheckpointedBlock should return undefined for all
|
|
780
|
+
expect(await store.getCheckpointedBlock(1)).toBeUndefined();
|
|
781
|
+
expect(await store.getCheckpointedBlock(2)).toBeUndefined();
|
|
782
|
+
expect(await store.getCheckpointedBlock(3)).toBeUndefined();
|
|
783
|
+
// Now add a checkpoint that covers blocks 1-3
|
|
784
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
785
|
+
numBlocks: 3,
|
|
786
|
+
startBlockNumber: 1
|
|
787
|
+
}), 10);
|
|
788
|
+
await store.addCheckpoints([
|
|
789
|
+
checkpoint1
|
|
790
|
+
]);
|
|
791
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
792
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
793
|
+
// Now getCheckpointedBlock should work for all blocks
|
|
794
|
+
const checkpointed1 = await store.getCheckpointedBlock(1);
|
|
795
|
+
expect(checkpointed1).toBeDefined();
|
|
796
|
+
expect(checkpointed1.checkpointNumber).toBe(1);
|
|
797
|
+
expect(checkpointed1.l1).toEqual(checkpoint1.l1);
|
|
798
|
+
const checkpointed2 = await store.getCheckpointedBlock(2);
|
|
799
|
+
expect(checkpointed2).toBeDefined();
|
|
800
|
+
expect(checkpointed2.checkpointNumber).toBe(1);
|
|
801
|
+
const checkpointed3 = await store.getCheckpointedBlock(3);
|
|
802
|
+
expect(checkpointed3).toBeDefined();
|
|
803
|
+
expect(checkpointed3.checkpointNumber).toBe(1);
|
|
804
|
+
});
|
|
805
|
+
it('can add more uncheckpointed blocks after a checkpoint and then checkpoint them', async ()=>{
|
|
806
|
+
// Start with checkpoint 1 covering blocks 1-2
|
|
807
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
808
|
+
numBlocks: 2,
|
|
809
|
+
startBlockNumber: 1
|
|
810
|
+
}), 10);
|
|
811
|
+
await store.addCheckpoints([
|
|
812
|
+
checkpoint1
|
|
813
|
+
]);
|
|
814
|
+
// Add uncheckpointed blocks 3-5 for the upcoming checkpoint 2, chaining archive roots
|
|
815
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
816
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
817
|
+
checkpointNumber: CheckpointNumber(2),
|
|
818
|
+
indexWithinCheckpoint: 0,
|
|
819
|
+
lastArchive: lastBlockArchive
|
|
820
|
+
});
|
|
821
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
822
|
+
checkpointNumber: CheckpointNumber(2),
|
|
823
|
+
indexWithinCheckpoint: 1,
|
|
824
|
+
lastArchive: block3.archive
|
|
825
|
+
});
|
|
826
|
+
const block5 = await L2BlockNew.random(BlockNumber(5), {
|
|
827
|
+
checkpointNumber: CheckpointNumber(2),
|
|
828
|
+
indexWithinCheckpoint: 2,
|
|
829
|
+
lastArchive: block4.archive
|
|
830
|
+
});
|
|
831
|
+
await store.addBlocks([
|
|
832
|
+
block3,
|
|
833
|
+
block4,
|
|
834
|
+
block5
|
|
835
|
+
]);
|
|
836
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(1);
|
|
837
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
838
|
+
// Blocks 3-5 are not checkpointed yet
|
|
839
|
+
expect(await store.getCheckpointedBlock(3)).toBeUndefined();
|
|
840
|
+
expect(await store.getCheckpointedBlock(4)).toBeUndefined();
|
|
841
|
+
expect(await store.getCheckpointedBlock(5)).toBeUndefined();
|
|
842
|
+
// Add checkpoint 2 covering blocks 3-5, chaining from checkpoint1
|
|
843
|
+
const checkpoint2 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(2), {
|
|
844
|
+
numBlocks: 3,
|
|
845
|
+
startBlockNumber: 3,
|
|
846
|
+
previousArchive: lastBlockArchive
|
|
847
|
+
}), 11);
|
|
848
|
+
await store.addCheckpoints([
|
|
849
|
+
checkpoint2
|
|
850
|
+
]);
|
|
851
|
+
expect(await store.getSynchedCheckpointNumber()).toBe(2);
|
|
852
|
+
expect(await store.getLatestBlockNumber()).toBe(5);
|
|
853
|
+
// Now blocks 3-5 should be checkpointed with checkpoint 2's info
|
|
854
|
+
const checkpointed3 = await store.getCheckpointedBlock(3);
|
|
855
|
+
expect(checkpointed3).toBeDefined();
|
|
856
|
+
expect(checkpointed3.checkpointNumber).toBe(2);
|
|
857
|
+
expect(checkpointed3.l1).toEqual(checkpoint2.l1);
|
|
858
|
+
const checkpointed4 = await store.getCheckpointedBlock(4);
|
|
859
|
+
expect(checkpointed4).toBeDefined();
|
|
860
|
+
expect(checkpointed4.checkpointNumber).toBe(2);
|
|
861
|
+
const checkpointed5 = await store.getCheckpointedBlock(5);
|
|
862
|
+
expect(checkpointed5).toBeDefined();
|
|
863
|
+
expect(checkpointed5.checkpointNumber).toBe(2);
|
|
864
|
+
});
|
|
865
|
+
it('getBlocks retrieves both checkpointed and uncheckpointed blocks', async ()=>{
|
|
866
|
+
// Add checkpoint with blocks 1-2
|
|
867
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
868
|
+
numBlocks: 2,
|
|
869
|
+
startBlockNumber: 1
|
|
870
|
+
}), 10);
|
|
871
|
+
await store.addCheckpoints([
|
|
872
|
+
checkpoint1
|
|
873
|
+
]);
|
|
874
|
+
// Add uncheckpointed blocks 3-4 for the upcoming checkpoint 2, chaining archive roots
|
|
875
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
876
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
877
|
+
checkpointNumber: CheckpointNumber(2),
|
|
878
|
+
indexWithinCheckpoint: 0,
|
|
879
|
+
lastArchive: lastBlockArchive
|
|
880
|
+
});
|
|
881
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
882
|
+
checkpointNumber: CheckpointNumber(2),
|
|
883
|
+
indexWithinCheckpoint: 1,
|
|
884
|
+
lastArchive: block3.archive
|
|
885
|
+
});
|
|
886
|
+
await store.addBlocks([
|
|
887
|
+
block3,
|
|
888
|
+
block4
|
|
889
|
+
]);
|
|
890
|
+
// getBlocks should retrieve all blocks
|
|
891
|
+
const allBlocks = await store.getBlocks(1, 10);
|
|
892
|
+
expect(allBlocks.length).toBe(4);
|
|
893
|
+
expect(allBlocks.map((b)=>b.number)).toEqual([
|
|
894
|
+
1,
|
|
895
|
+
2,
|
|
896
|
+
3,
|
|
897
|
+
4
|
|
898
|
+
]);
|
|
899
|
+
});
|
|
900
|
+
});
|
|
901
|
+
describe('addBlocks validation', ()=>{
|
|
902
|
+
it('throws if blocks have different checkpoint numbers', async ()=>{
|
|
903
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
904
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
905
|
+
numBlocks: 2,
|
|
906
|
+
startBlockNumber: 1
|
|
907
|
+
}), 10);
|
|
908
|
+
await store.addCheckpoints([
|
|
909
|
+
checkpoint1
|
|
910
|
+
]);
|
|
911
|
+
// Try to add blocks 3 and 4 with different checkpoint numbers
|
|
912
|
+
// Chain archives correctly to test the checkpoint number validation
|
|
913
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
914
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
915
|
+
checkpointNumber: CheckpointNumber(2),
|
|
916
|
+
indexWithinCheckpoint: 0,
|
|
917
|
+
lastArchive: lastBlockArchive
|
|
918
|
+
});
|
|
919
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
920
|
+
checkpointNumber: CheckpointNumber(3),
|
|
921
|
+
indexWithinCheckpoint: 1,
|
|
922
|
+
lastArchive: block3.archive
|
|
923
|
+
});
|
|
924
|
+
await expect(store.addBlocks([
|
|
925
|
+
block3,
|
|
926
|
+
block4
|
|
927
|
+
])).rejects.toThrow(CheckpointNumberNotConsistentError);
|
|
928
|
+
});
|
|
929
|
+
it('throws if checkpoint number is not the current checkpoint', async ()=>{
|
|
930
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
931
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
932
|
+
numBlocks: 2,
|
|
933
|
+
startBlockNumber: 1
|
|
934
|
+
}), 10);
|
|
935
|
+
await store.addCheckpoints([
|
|
936
|
+
checkpoint1
|
|
937
|
+
]);
|
|
938
|
+
// Try to add blocks for checkpoint 3 (skipping checkpoint 2)
|
|
939
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
940
|
+
checkpointNumber: CheckpointNumber(3),
|
|
941
|
+
indexWithinCheckpoint: 0
|
|
942
|
+
});
|
|
943
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
944
|
+
checkpointNumber: CheckpointNumber(3),
|
|
945
|
+
indexWithinCheckpoint: 1
|
|
946
|
+
});
|
|
947
|
+
await expect(store.addBlocks([
|
|
948
|
+
block3,
|
|
949
|
+
block4
|
|
950
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
951
|
+
});
|
|
952
|
+
it('allows blocks with the same checkpoint number for the current checkpoint', async ()=>{
|
|
953
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
954
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
955
|
+
numBlocks: 2,
|
|
956
|
+
startBlockNumber: 1
|
|
957
|
+
}), 10);
|
|
958
|
+
await store.addCheckpoints([
|
|
959
|
+
checkpoint1
|
|
960
|
+
]);
|
|
961
|
+
// Add blocks 3 and 4 with consistent checkpoint number (2), chaining archive roots
|
|
962
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
963
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
964
|
+
checkpointNumber: CheckpointNumber(2),
|
|
965
|
+
indexWithinCheckpoint: 0,
|
|
966
|
+
lastArchive: lastBlockArchive
|
|
967
|
+
});
|
|
968
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
969
|
+
checkpointNumber: CheckpointNumber(2),
|
|
970
|
+
indexWithinCheckpoint: 1,
|
|
971
|
+
lastArchive: block3.archive
|
|
972
|
+
});
|
|
973
|
+
await expect(store.addBlocks([
|
|
974
|
+
block3,
|
|
975
|
+
block4
|
|
976
|
+
])).resolves.toBe(true);
|
|
977
|
+
// Verify blocks were added
|
|
978
|
+
expect((await store.getBlock(3))?.equals(block3)).toBe(true);
|
|
979
|
+
expect((await store.getBlock(4))?.equals(block4)).toBe(true);
|
|
980
|
+
});
|
|
981
|
+
it('allows blocks for the initial checkpoint when store is empty', async ()=>{
|
|
982
|
+
// Add blocks for the initial checkpoint (1), chaining archive roots
|
|
983
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
984
|
+
checkpointNumber: CheckpointNumber(1),
|
|
985
|
+
indexWithinCheckpoint: 0
|
|
986
|
+
});
|
|
987
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
988
|
+
checkpointNumber: CheckpointNumber(1),
|
|
989
|
+
indexWithinCheckpoint: 1,
|
|
990
|
+
lastArchive: block1.archive
|
|
991
|
+
});
|
|
992
|
+
await expect(store.addBlocks([
|
|
993
|
+
block1,
|
|
994
|
+
block2
|
|
995
|
+
])).resolves.toBe(true);
|
|
996
|
+
// Verify blocks were added
|
|
997
|
+
expect((await store.getBlock(1))?.equals(block1)).toBe(true);
|
|
998
|
+
expect((await store.getBlock(2))?.equals(block2)).toBe(true);
|
|
999
|
+
expect(await store.getLatestBlockNumber()).toBe(2);
|
|
1000
|
+
});
|
|
1001
|
+
it('throws if initial block is duplicated across calls', async ()=>{
|
|
1002
|
+
// Add blocks for the initial checkpoint (1)
|
|
1003
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
1004
|
+
checkpointNumber: CheckpointNumber(1),
|
|
1005
|
+
indexWithinCheckpoint: 0
|
|
1006
|
+
});
|
|
1007
|
+
const block2 = await L2BlockNew.random(BlockNumber(1), {
|
|
1008
|
+
checkpointNumber: CheckpointNumber(1),
|
|
1009
|
+
indexWithinCheckpoint: 0
|
|
1010
|
+
});
|
|
1011
|
+
await expect(store.addBlocks([
|
|
1012
|
+
block1
|
|
1013
|
+
])).resolves.toBe(true);
|
|
1014
|
+
await expect(store.addBlocks([
|
|
1015
|
+
block2
|
|
1016
|
+
])).rejects.toThrow(InitialBlockNumberNotSequentialError);
|
|
1017
|
+
});
|
|
1018
|
+
it('throws if first block has wrong checkpoint number when store is empty', async ()=>{
|
|
1019
|
+
// Try to add blocks for checkpoint 2 when store is empty (should start at 1)
|
|
1020
|
+
const block1 = await L2BlockNew.random(BlockNumber(1), {
|
|
1021
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1022
|
+
indexWithinCheckpoint: 0
|
|
1023
|
+
});
|
|
1024
|
+
const block2 = await L2BlockNew.random(BlockNumber(2), {
|
|
1025
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1026
|
+
indexWithinCheckpoint: 1
|
|
1027
|
+
});
|
|
1028
|
+
await expect(store.addBlocks([
|
|
1029
|
+
block1,
|
|
1030
|
+
block2
|
|
1031
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
1032
|
+
});
|
|
1033
|
+
it('allows adding more blocks to the same checkpoint in separate calls', async ()=>{
|
|
1034
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1035
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1036
|
+
numBlocks: 2,
|
|
1037
|
+
startBlockNumber: 1
|
|
1038
|
+
}), 10);
|
|
1039
|
+
await store.addCheckpoints([
|
|
1040
|
+
checkpoint1
|
|
1041
|
+
]);
|
|
1042
|
+
// Add block 3 for checkpoint 2, chaining archive roots
|
|
1043
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1044
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1045
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1046
|
+
indexWithinCheckpoint: 0,
|
|
1047
|
+
lastArchive: lastBlockArchive
|
|
1048
|
+
});
|
|
1049
|
+
await expect(store.addBlocks([
|
|
1050
|
+
block3
|
|
1051
|
+
])).resolves.toBe(true);
|
|
1052
|
+
// Add block 4 for the same checkpoint 2 in a separate call
|
|
1053
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1054
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1055
|
+
indexWithinCheckpoint: 1,
|
|
1056
|
+
lastArchive: block3.archive
|
|
1057
|
+
});
|
|
1058
|
+
await expect(store.addBlocks([
|
|
1059
|
+
block4
|
|
1060
|
+
])).resolves.toBe(true);
|
|
1061
|
+
expect(await store.getLatestBlockNumber()).toBe(4);
|
|
1062
|
+
});
|
|
1063
|
+
it('throws if adding blocks in separate calls with non-consecutive indexes', async ()=>{
|
|
1064
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1065
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1066
|
+
numBlocks: 2,
|
|
1067
|
+
startBlockNumber: 1
|
|
1068
|
+
}), 10);
|
|
1069
|
+
await store.addCheckpoints([
|
|
1070
|
+
checkpoint1
|
|
1071
|
+
]);
|
|
1072
|
+
// Add block 3 for checkpoint 2, chaining archive roots
|
|
1073
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1074
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1075
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1076
|
+
indexWithinCheckpoint: 0,
|
|
1077
|
+
lastArchive: lastBlockArchive
|
|
1078
|
+
});
|
|
1079
|
+
await expect(store.addBlocks([
|
|
1080
|
+
block3
|
|
1081
|
+
])).resolves.toBe(true);
|
|
1082
|
+
// Add block 4 for the same checkpoint 2 in a separate call but with a missing index
|
|
1083
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1084
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1085
|
+
indexWithinCheckpoint: 2,
|
|
1086
|
+
lastArchive: block3.archive
|
|
1087
|
+
});
|
|
1088
|
+
await expect(store.addBlocks([
|
|
1089
|
+
block4
|
|
1090
|
+
])).rejects.toThrow(BlockIndexNotSequentialError);
|
|
1091
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
1092
|
+
});
|
|
1093
|
+
it('throws if second batch of blocks has different checkpoint number than first batch', async ()=>{
|
|
1094
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1095
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1096
|
+
numBlocks: 2,
|
|
1097
|
+
startBlockNumber: 1
|
|
1098
|
+
}), 10);
|
|
1099
|
+
await store.addCheckpoints([
|
|
1100
|
+
checkpoint1
|
|
1101
|
+
]);
|
|
1102
|
+
// Add block 3 for checkpoint 2, chaining archive roots
|
|
1103
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1104
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1105
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1106
|
+
indexWithinCheckpoint: 0,
|
|
1107
|
+
lastArchive: lastBlockArchive
|
|
1108
|
+
});
|
|
1109
|
+
await store.addBlocks([
|
|
1110
|
+
block3
|
|
1111
|
+
]);
|
|
1112
|
+
// Try to add block 4 for checkpoint 3 (should fail because current checkpoint is still 2)
|
|
1113
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1114
|
+
checkpointNumber: CheckpointNumber(3),
|
|
1115
|
+
indexWithinCheckpoint: 0,
|
|
1116
|
+
lastArchive: block3.archive
|
|
1117
|
+
});
|
|
1118
|
+
await expect(store.addBlocks([
|
|
1119
|
+
block4
|
|
1120
|
+
])).rejects.toThrow(InitialCheckpointNumberNotSequentialError);
|
|
1121
|
+
});
|
|
1122
|
+
it('force option bypasses checkpoint number validation', async ()=>{
|
|
1123
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1124
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1125
|
+
numBlocks: 2,
|
|
1126
|
+
startBlockNumber: 1
|
|
1127
|
+
}), 10);
|
|
1128
|
+
await store.addCheckpoints([
|
|
1129
|
+
checkpoint1
|
|
1130
|
+
]);
|
|
1131
|
+
// Add blocks with different checkpoint numbers using force option, chaining archive roots
|
|
1132
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1133
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1134
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1135
|
+
indexWithinCheckpoint: 0,
|
|
1136
|
+
lastArchive: lastBlockArchive
|
|
1137
|
+
});
|
|
1138
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1139
|
+
checkpointNumber: CheckpointNumber(5),
|
|
1140
|
+
indexWithinCheckpoint: 0,
|
|
1141
|
+
lastArchive: block3.archive
|
|
1142
|
+
});
|
|
1143
|
+
await expect(store.addBlocks([
|
|
1144
|
+
block3,
|
|
1145
|
+
block4
|
|
1146
|
+
], {
|
|
1147
|
+
force: true
|
|
1148
|
+
})).resolves.toBe(true);
|
|
1149
|
+
});
|
|
1150
|
+
it('force option bypasses blockindex number validation', async ()=>{
|
|
1151
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1152
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1153
|
+
numBlocks: 2,
|
|
1154
|
+
startBlockNumber: 1
|
|
1155
|
+
}), 10);
|
|
1156
|
+
await store.addCheckpoints([
|
|
1157
|
+
checkpoint1
|
|
1158
|
+
]);
|
|
1159
|
+
// Add blocks with different checkpoint numbers using force option, chaining archive roots
|
|
1160
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1161
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1162
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1163
|
+
indexWithinCheckpoint: 0,
|
|
1164
|
+
lastArchive: lastBlockArchive
|
|
1165
|
+
});
|
|
1166
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1167
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1168
|
+
indexWithinCheckpoint: 2,
|
|
1169
|
+
lastArchive: block3.archive
|
|
1170
|
+
});
|
|
1171
|
+
await expect(store.addBlocks([
|
|
1172
|
+
block3,
|
|
1173
|
+
block4
|
|
1174
|
+
], {
|
|
1175
|
+
force: true
|
|
1176
|
+
})).resolves.toBe(true);
|
|
1177
|
+
});
|
|
1178
|
+
it('throws if adding blocks with non-consecutive archives', async ()=>{
|
|
1179
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1180
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1181
|
+
numBlocks: 2,
|
|
1182
|
+
startBlockNumber: 1
|
|
1183
|
+
}), 10);
|
|
1184
|
+
await store.addCheckpoints([
|
|
1185
|
+
checkpoint1
|
|
1186
|
+
]);
|
|
1187
|
+
// Add block 3 for checkpoint 2 with incorrect archive
|
|
1188
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1189
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1190
|
+
indexWithinCheckpoint: 0
|
|
1191
|
+
});
|
|
1192
|
+
await expect(store.addBlocks([
|
|
1193
|
+
block3
|
|
1194
|
+
])).rejects.toThrow(BlockArchiveNotConsistentError);
|
|
1195
|
+
expect(await store.getLatestBlockNumber()).toBe(2);
|
|
1196
|
+
});
|
|
1197
|
+
it('throws if adding blocks with non-consecutive archives across calls', async ()=>{
|
|
1198
|
+
// First, establish checkpoint 1 with blocks 1-2
|
|
1199
|
+
const checkpoint1 = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1200
|
+
numBlocks: 2,
|
|
1201
|
+
startBlockNumber: 1
|
|
1202
|
+
}), 10);
|
|
1203
|
+
await store.addCheckpoints([
|
|
1204
|
+
checkpoint1
|
|
1205
|
+
]);
|
|
1206
|
+
// Add block 3 for checkpoint 2 with correct archive
|
|
1207
|
+
const lastBlockArchive = checkpoint1.checkpoint.blocks.at(-1).archive;
|
|
1208
|
+
const block3 = await L2BlockNew.random(BlockNumber(3), {
|
|
1209
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1210
|
+
indexWithinCheckpoint: 0,
|
|
1211
|
+
lastArchive: lastBlockArchive
|
|
1212
|
+
});
|
|
1213
|
+
await expect(store.addBlocks([
|
|
1214
|
+
block3
|
|
1215
|
+
])).resolves.toBe(true);
|
|
1216
|
+
// Add block 4 with incorrect archive (should fail)
|
|
1217
|
+
const block4 = await L2BlockNew.random(BlockNumber(4), {
|
|
1218
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1219
|
+
indexWithinCheckpoint: 1,
|
|
1220
|
+
lastArchive: AppendOnlyTreeSnapshot.random()
|
|
1221
|
+
});
|
|
1222
|
+
await expect(store.addBlocks([
|
|
1223
|
+
block4
|
|
1224
|
+
])).rejects.toThrow(BlockArchiveNotConsistentError);
|
|
1225
|
+
expect(await store.getLatestBlockNumber()).toBe(3);
|
|
1226
|
+
});
|
|
1227
|
+
});
|
|
1228
|
+
describe('getBlocksForCheckpoint', ()=>{
|
|
1229
|
+
it('returns blocks for a single-block checkpoint', async ()=>{
|
|
1230
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1231
|
+
numBlocks: 1,
|
|
1232
|
+
startBlockNumber: 1
|
|
1233
|
+
}), 10);
|
|
1234
|
+
await store.addCheckpoints([
|
|
1235
|
+
checkpoint
|
|
1236
|
+
]);
|
|
1237
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1238
|
+
expect(blocks).toBeDefined();
|
|
1239
|
+
expect(blocks.length).toBe(1);
|
|
1240
|
+
expect(blocks[0].number).toBe(1);
|
|
1241
|
+
});
|
|
1242
|
+
it('returns all blocks for a multi-block checkpoint', async ()=>{
|
|
1243
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1244
|
+
numBlocks: 4,
|
|
1245
|
+
startBlockNumber: 1
|
|
1246
|
+
}), 10);
|
|
1247
|
+
await store.addCheckpoints([
|
|
1248
|
+
checkpoint
|
|
1249
|
+
]);
|
|
1250
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1251
|
+
expect(blocks).toBeDefined();
|
|
1252
|
+
expect(blocks.length).toBe(4);
|
|
1253
|
+
expect(blocks.map((b)=>b.number)).toEqual([
|
|
1254
|
+
1,
|
|
1255
|
+
2,
|
|
1256
|
+
3,
|
|
1257
|
+
4
|
|
1258
|
+
]);
|
|
1259
|
+
});
|
|
1260
|
+
it('returns correct blocks for different checkpoints', async ()=>{
|
|
1261
|
+
// Create checkpoints with chained archive roots
|
|
1262
|
+
// Checkpoint 1: blocks 1-2
|
|
1263
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1264
|
+
numBlocks: 2,
|
|
1265
|
+
startBlockNumber: 1
|
|
1266
|
+
});
|
|
1267
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1268
|
+
// Checkpoint 2: blocks 3-5
|
|
1269
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1270
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1271
|
+
numBlocks: 3,
|
|
1272
|
+
startBlockNumber: 3,
|
|
1273
|
+
previousArchive: previousArchive1
|
|
1274
|
+
});
|
|
1275
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1276
|
+
// Checkpoint 3: blocks 6-7
|
|
1277
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1278
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1279
|
+
numBlocks: 2,
|
|
1280
|
+
startBlockNumber: 6,
|
|
1281
|
+
previousArchive: previousArchive2
|
|
1282
|
+
});
|
|
1283
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1284
|
+
await store.addCheckpoints([
|
|
1285
|
+
checkpoint1,
|
|
1286
|
+
checkpoint2,
|
|
1287
|
+
checkpoint3
|
|
1288
|
+
]);
|
|
1289
|
+
const blocks1 = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1290
|
+
expect(blocks1).toBeDefined();
|
|
1291
|
+
expect(blocks1.map((b)=>b.number)).toEqual([
|
|
1292
|
+
1,
|
|
1293
|
+
2
|
|
1294
|
+
]);
|
|
1295
|
+
const blocks2 = await store.getBlocksForCheckpoint(CheckpointNumber(2));
|
|
1296
|
+
expect(blocks2).toBeDefined();
|
|
1297
|
+
expect(blocks2.map((b)=>b.number)).toEqual([
|
|
1298
|
+
3,
|
|
1299
|
+
4,
|
|
1300
|
+
5
|
|
1301
|
+
]);
|
|
1302
|
+
const blocks3 = await store.getBlocksForCheckpoint(CheckpointNumber(3));
|
|
1303
|
+
expect(blocks3).toBeDefined();
|
|
1304
|
+
expect(blocks3.map((b)=>b.number)).toEqual([
|
|
1305
|
+
6,
|
|
1306
|
+
7
|
|
1307
|
+
]);
|
|
1308
|
+
});
|
|
1309
|
+
it('returns undefined for non-existent checkpoint', async ()=>{
|
|
1310
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1311
|
+
numBlocks: 2,
|
|
1312
|
+
startBlockNumber: 1
|
|
1313
|
+
}), 10);
|
|
1314
|
+
await store.addCheckpoints([
|
|
1315
|
+
checkpoint
|
|
1316
|
+
]);
|
|
1317
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(5));
|
|
1318
|
+
expect(blocks).toBeUndefined();
|
|
1319
|
+
});
|
|
1320
|
+
it('returns undefined when no checkpoints exist', async ()=>{
|
|
1321
|
+
const blocks = await store.getBlocksForCheckpoint(CheckpointNumber(1));
|
|
1322
|
+
expect(blocks).toBeUndefined();
|
|
1323
|
+
});
|
|
1324
|
+
});
|
|
1325
|
+
describe('getRangeOfCheckpoints', ()=>{
|
|
1326
|
+
it('returns empty array when no checkpoints exist', async ()=>{
|
|
1327
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1328
|
+
expect(checkpoints).toEqual([]);
|
|
1329
|
+
});
|
|
1330
|
+
it('returns single checkpoint', async ()=>{
|
|
1331
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1332
|
+
numBlocks: 2,
|
|
1333
|
+
startBlockNumber: 1
|
|
1334
|
+
}), 10);
|
|
1335
|
+
await store.addCheckpoints([
|
|
1336
|
+
checkpoint
|
|
1337
|
+
]);
|
|
1338
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1339
|
+
expect(checkpoints.length).toBe(1);
|
|
1340
|
+
expect(checkpoints[0].checkpointNumber).toBe(1);
|
|
1341
|
+
expect(checkpoints[0].startBlock).toBe(1);
|
|
1342
|
+
expect(checkpoints[0].numBlocks).toBe(2);
|
|
1343
|
+
});
|
|
1344
|
+
it('returns multiple checkpoints in order', async ()=>{
|
|
1345
|
+
// Create checkpoints with chained archive roots
|
|
1346
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1347
|
+
numBlocks: 2,
|
|
1348
|
+
startBlockNumber: 1
|
|
1349
|
+
});
|
|
1350
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1351
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1352
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1353
|
+
numBlocks: 3,
|
|
1354
|
+
startBlockNumber: 3,
|
|
1355
|
+
previousArchive: previousArchive1
|
|
1356
|
+
});
|
|
1357
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1358
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1359
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1360
|
+
numBlocks: 1,
|
|
1361
|
+
startBlockNumber: 6,
|
|
1362
|
+
previousArchive: previousArchive2
|
|
1363
|
+
});
|
|
1364
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1365
|
+
await store.addCheckpoints([
|
|
1366
|
+
checkpoint1,
|
|
1367
|
+
checkpoint2,
|
|
1368
|
+
checkpoint3
|
|
1369
|
+
]);
|
|
1370
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1371
|
+
expect(checkpoints.length).toBe(3);
|
|
1372
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1373
|
+
1,
|
|
1374
|
+
2,
|
|
1375
|
+
3
|
|
1376
|
+
]);
|
|
1377
|
+
expect(checkpoints.map((c)=>c.startBlock)).toEqual([
|
|
1378
|
+
1,
|
|
1379
|
+
3,
|
|
1380
|
+
6
|
|
1381
|
+
]);
|
|
1382
|
+
expect(checkpoints.map((c)=>c.numBlocks)).toEqual([
|
|
1383
|
+
2,
|
|
1384
|
+
3,
|
|
1385
|
+
1
|
|
1386
|
+
]);
|
|
1387
|
+
});
|
|
1388
|
+
it('respects the from parameter', async ()=>{
|
|
1389
|
+
// Create checkpoints with chained archive roots
|
|
1390
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1391
|
+
numBlocks: 2,
|
|
1392
|
+
startBlockNumber: 1
|
|
1393
|
+
});
|
|
1394
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1395
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1396
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1397
|
+
numBlocks: 2,
|
|
1398
|
+
startBlockNumber: 3,
|
|
1399
|
+
previousArchive: previousArchive1
|
|
1400
|
+
});
|
|
1401
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1402
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1403
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1404
|
+
numBlocks: 2,
|
|
1405
|
+
startBlockNumber: 5,
|
|
1406
|
+
previousArchive: previousArchive2
|
|
1407
|
+
});
|
|
1408
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1409
|
+
await store.addCheckpoints([
|
|
1410
|
+
checkpoint1,
|
|
1411
|
+
checkpoint2,
|
|
1412
|
+
checkpoint3
|
|
1413
|
+
]);
|
|
1414
|
+
// Start from checkpoint 2
|
|
1415
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(2), 10);
|
|
1416
|
+
expect(checkpoints.length).toBe(2);
|
|
1417
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1418
|
+
2,
|
|
1419
|
+
3
|
|
1420
|
+
]);
|
|
1421
|
+
});
|
|
1422
|
+
it('respects the limit parameter', async ()=>{
|
|
1423
|
+
// Create checkpoints with chained archive roots
|
|
1424
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1425
|
+
numBlocks: 1,
|
|
1426
|
+
startBlockNumber: 1
|
|
1427
|
+
});
|
|
1428
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1429
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1430
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1431
|
+
numBlocks: 1,
|
|
1432
|
+
startBlockNumber: 2,
|
|
1433
|
+
previousArchive: previousArchive1
|
|
1434
|
+
});
|
|
1435
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1436
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1437
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1438
|
+
numBlocks: 1,
|
|
1439
|
+
startBlockNumber: 3,
|
|
1440
|
+
previousArchive: previousArchive2
|
|
1441
|
+
});
|
|
1442
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1443
|
+
const previousArchive3 = checkpoint3Cp.blocks.at(-1).archive;
|
|
1444
|
+
const checkpoint4Cp = await Checkpoint.random(CheckpointNumber(4), {
|
|
1445
|
+
numBlocks: 1,
|
|
1446
|
+
startBlockNumber: 4,
|
|
1447
|
+
previousArchive: previousArchive3
|
|
1448
|
+
});
|
|
1449
|
+
const checkpoint4 = makePublishedCheckpoint(checkpoint4Cp, 13);
|
|
1450
|
+
await store.addCheckpoints([
|
|
1451
|
+
checkpoint1,
|
|
1452
|
+
checkpoint2,
|
|
1453
|
+
checkpoint3,
|
|
1454
|
+
checkpoint4
|
|
1455
|
+
]);
|
|
1456
|
+
// Only get 2 checkpoints
|
|
1457
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 2);
|
|
1458
|
+
expect(checkpoints.length).toBe(2);
|
|
1459
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1460
|
+
1,
|
|
1461
|
+
2
|
|
1462
|
+
]);
|
|
1463
|
+
});
|
|
1464
|
+
it('returns correct checkpoint data including L1 info', async ()=>{
|
|
1465
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1466
|
+
numBlocks: 3,
|
|
1467
|
+
startBlockNumber: 1
|
|
1468
|
+
}), 42);
|
|
1469
|
+
await store.addCheckpoints([
|
|
1470
|
+
checkpoint
|
|
1471
|
+
]);
|
|
1472
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 1);
|
|
1473
|
+
expect(checkpoints.length).toBe(1);
|
|
1474
|
+
const data = checkpoints[0];
|
|
1475
|
+
expect(data.checkpointNumber).toBe(1);
|
|
1476
|
+
expect(data.startBlock).toBe(1);
|
|
1477
|
+
expect(data.numBlocks).toBe(3);
|
|
1478
|
+
expect(data.l1.blockNumber).toBe(42n);
|
|
1479
|
+
expect(data.header.equals(checkpoint.checkpoint.header)).toBe(true);
|
|
1480
|
+
expect(data.archive.equals(checkpoint.checkpoint.archive)).toBe(true);
|
|
1481
|
+
});
|
|
1482
|
+
it('returns empty array when from is beyond available checkpoints', async ()=>{
|
|
1483
|
+
const checkpoint = makePublishedCheckpoint(await Checkpoint.random(CheckpointNumber(1), {
|
|
1484
|
+
numBlocks: 2,
|
|
1485
|
+
startBlockNumber: 1
|
|
1486
|
+
}), 10);
|
|
1487
|
+
await store.addCheckpoints([
|
|
1488
|
+
checkpoint
|
|
1489
|
+
]);
|
|
1490
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(5), 10);
|
|
1491
|
+
expect(checkpoints).toEqual([]);
|
|
1492
|
+
});
|
|
1493
|
+
it('works correctly after unwinding checkpoints', async ()=>{
|
|
1494
|
+
// Create checkpoints with chained archive roots
|
|
1495
|
+
const checkpoint1Cp = await Checkpoint.random(CheckpointNumber(1), {
|
|
1496
|
+
numBlocks: 2,
|
|
1497
|
+
startBlockNumber: 1
|
|
1498
|
+
});
|
|
1499
|
+
const checkpoint1 = makePublishedCheckpoint(checkpoint1Cp, 10);
|
|
1500
|
+
const previousArchive1 = checkpoint1Cp.blocks.at(-1).archive;
|
|
1501
|
+
const checkpoint2Cp = await Checkpoint.random(CheckpointNumber(2), {
|
|
1502
|
+
numBlocks: 2,
|
|
1503
|
+
startBlockNumber: 3,
|
|
1504
|
+
previousArchive: previousArchive1
|
|
1505
|
+
});
|
|
1506
|
+
const checkpoint2 = makePublishedCheckpoint(checkpoint2Cp, 11);
|
|
1507
|
+
const previousArchive2 = checkpoint2Cp.blocks.at(-1).archive;
|
|
1508
|
+
const checkpoint3Cp = await Checkpoint.random(CheckpointNumber(3), {
|
|
1509
|
+
numBlocks: 2,
|
|
1510
|
+
startBlockNumber: 5,
|
|
1511
|
+
previousArchive: previousArchive2
|
|
1512
|
+
});
|
|
1513
|
+
const checkpoint3 = makePublishedCheckpoint(checkpoint3Cp, 12);
|
|
1514
|
+
await store.addCheckpoints([
|
|
1515
|
+
checkpoint1,
|
|
1516
|
+
checkpoint2,
|
|
1517
|
+
checkpoint3
|
|
1518
|
+
]);
|
|
1519
|
+
// Unwind checkpoint 3
|
|
1520
|
+
await store.unwindCheckpoints(CheckpointNumber(3), 1);
|
|
1521
|
+
const checkpoints = await store.getRangeOfCheckpoints(CheckpointNumber(1), 10);
|
|
1522
|
+
expect(checkpoints.length).toBe(2);
|
|
1523
|
+
expect(checkpoints.map((c)=>c.checkpointNumber)).toEqual([
|
|
1524
|
+
1,
|
|
1525
|
+
2
|
|
1526
|
+
]);
|
|
1527
|
+
});
|
|
1528
|
+
});
|
|
1529
|
+
describe('getCheckpointedBlock', ()=>{
|
|
1530
|
+
beforeEach(async ()=>{
|
|
1531
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
1532
|
+
});
|
|
1533
|
+
it.each(blockNumberTests)('retrieves previously stored block %i', async (blockNumber, getExpectedBlock)=>{
|
|
1534
|
+
const retrievedBlock = await store.getCheckpointedBlock(blockNumber);
|
|
1535
|
+
const expectedBlock = getExpectedBlock();
|
|
1536
|
+
const expectedCheckpoint = publishedCheckpoints[blockNumber - 1];
|
|
1537
|
+
expect(retrievedBlock).toBeDefined();
|
|
1538
|
+
expectCheckpointedBlockEquals(retrievedBlock, expectedBlock, expectedCheckpoint);
|
|
1539
|
+
});
|
|
1540
|
+
it('returns undefined if block is not found', async ()=>{
|
|
1541
|
+
await expect(store.getCheckpointedBlock(12)).resolves.toBeUndefined();
|
|
1542
|
+
});
|
|
1543
|
+
it('returns undefined for block number 0', async ()=>{
|
|
1544
|
+
await expect(store.getCheckpointedBlock(0)).resolves.toBeUndefined();
|
|
98
1545
|
});
|
|
99
1546
|
});
|
|
100
|
-
describe('
|
|
1547
|
+
describe('getCheckpointedBlockByHash', ()=>{
|
|
101
1548
|
beforeEach(async ()=>{
|
|
102
|
-
await store.
|
|
1549
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
103
1550
|
});
|
|
104
|
-
it
|
|
105
|
-
|
|
1551
|
+
it('retrieves a block by its hash', async ()=>{
|
|
1552
|
+
const expectedCheckpoint = publishedCheckpoints[5];
|
|
1553
|
+
const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
|
|
1554
|
+
const blockHash = await expectedBlock.header.hash();
|
|
1555
|
+
const retrievedBlock = await store.getCheckpointedBlockByHash(blockHash);
|
|
1556
|
+
expect(retrievedBlock).toBeDefined();
|
|
1557
|
+
expectCheckpointedBlockEquals(retrievedBlock, expectedBlock, expectedCheckpoint);
|
|
106
1558
|
});
|
|
107
|
-
it('returns
|
|
108
|
-
|
|
1559
|
+
it('returns undefined for non-existent block hash', async ()=>{
|
|
1560
|
+
const nonExistentHash = Fr.random();
|
|
1561
|
+
await expect(store.getCheckpointedBlockByHash(nonExistentHash)).resolves.toBeUndefined();
|
|
109
1562
|
});
|
|
110
|
-
|
|
111
|
-
|
|
1563
|
+
});
|
|
1564
|
+
describe('getCheckpointedBlockByArchive', ()=>{
|
|
1565
|
+
beforeEach(async ()=>{
|
|
1566
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
1567
|
+
});
|
|
1568
|
+
it('retrieves a block by its archive root', async ()=>{
|
|
1569
|
+
const expectedCheckpoint = publishedCheckpoints[3];
|
|
1570
|
+
const expectedBlock = expectedCheckpoint.checkpoint.blocks[0];
|
|
1571
|
+
const archive = expectedBlock.archive.root;
|
|
1572
|
+
const retrievedBlock = await store.getCheckpointedBlockByArchive(archive);
|
|
1573
|
+
expect(retrievedBlock).toBeDefined();
|
|
1574
|
+
expectCheckpointedBlockEquals(retrievedBlock, expectedBlock, expectedCheckpoint);
|
|
1575
|
+
});
|
|
1576
|
+
it('returns undefined for non-existent archive root', async ()=>{
|
|
1577
|
+
const nonExistentArchive = Fr.random();
|
|
1578
|
+
await expect(store.getCheckpointedBlockByArchive(nonExistentArchive)).resolves.toBeUndefined();
|
|
1579
|
+
});
|
|
1580
|
+
});
|
|
1581
|
+
describe('getBlockHeaderByHash', ()=>{
|
|
1582
|
+
beforeEach(async ()=>{
|
|
1583
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
1584
|
+
});
|
|
1585
|
+
it('retrieves a block header by its hash', async ()=>{
|
|
1586
|
+
const expectedBlock = publishedCheckpoints[7].checkpoint.blocks[0];
|
|
1587
|
+
const blockHash = await expectedBlock.header.hash();
|
|
1588
|
+
const retrievedHeader = await store.getBlockHeaderByHash(blockHash);
|
|
1589
|
+
expect(retrievedHeader).toBeDefined();
|
|
1590
|
+
expect(retrievedHeader.equals(expectedBlock.header)).toBe(true);
|
|
112
1591
|
});
|
|
113
|
-
it('
|
|
114
|
-
|
|
1592
|
+
it('returns undefined for non-existent block hash', async ()=>{
|
|
1593
|
+
const nonExistentHash = Fr.random();
|
|
1594
|
+
await expect(store.getBlockHeaderByHash(nonExistentHash)).resolves.toBeUndefined();
|
|
115
1595
|
});
|
|
116
1596
|
});
|
|
117
|
-
describe('
|
|
118
|
-
|
|
119
|
-
await
|
|
1597
|
+
describe('getBlockHeaderByArchive', ()=>{
|
|
1598
|
+
beforeEach(async ()=>{
|
|
1599
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
1600
|
+
});
|
|
1601
|
+
it('retrieves a block header by its archive root', async ()=>{
|
|
1602
|
+
const expectedBlock = publishedCheckpoints[2].checkpoint.blocks[0];
|
|
1603
|
+
const archive = expectedBlock.archive.root;
|
|
1604
|
+
const retrievedHeader = await store.getBlockHeaderByArchive(archive);
|
|
1605
|
+
expect(retrievedHeader).toBeDefined();
|
|
1606
|
+
expect(retrievedHeader.equals(expectedBlock.header)).toBe(true);
|
|
1607
|
+
});
|
|
1608
|
+
it('returns undefined for non-existent archive root', async ()=>{
|
|
1609
|
+
const nonExistentArchive = Fr.random();
|
|
1610
|
+
await expect(store.getBlockHeaderByArchive(nonExistentArchive)).resolves.toBeUndefined();
|
|
1611
|
+
});
|
|
1612
|
+
});
|
|
1613
|
+
describe('getSynchedCheckpointNumber', ()=>{
|
|
1614
|
+
it('returns the checkpoint number before INITIAL_CHECKPOINT_NUMBER if no checkpoints have been added', async ()=>{
|
|
1615
|
+
await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(INITIAL_CHECKPOINT_NUMBER - 1);
|
|
120
1616
|
});
|
|
121
|
-
it(
|
|
122
|
-
await store.
|
|
123
|
-
await expect(store.
|
|
1617
|
+
it('returns the most recently added checkpoint number', async ()=>{
|
|
1618
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
1619
|
+
await expect(store.getSynchedCheckpointNumber()).resolves.toEqual(publishedCheckpoints.at(-1).checkpoint.number);
|
|
124
1620
|
});
|
|
125
1621
|
});
|
|
126
1622
|
describe('getSynchPoint', ()=>{
|
|
@@ -131,139 +1627,385 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
131
1627
|
});
|
|
132
1628
|
});
|
|
133
1629
|
it('returns the L1 block number in which the most recent L2 block was published', async ()=>{
|
|
134
|
-
await store.
|
|
1630
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
135
1631
|
await expect(store.getSynchPoint()).resolves.toEqual({
|
|
136
1632
|
blocksSynchedTo: 19n,
|
|
137
1633
|
messagesSynchedTo: undefined
|
|
138
1634
|
});
|
|
139
1635
|
});
|
|
140
1636
|
it('returns the L1 block number that most recently added messages from inbox', async ()=>{
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
1637
|
+
const l1BlockHash = Buffer32.random();
|
|
1638
|
+
const l1BlockNumber = 10n;
|
|
1639
|
+
await store.setMessageSynchedL1Block({
|
|
1640
|
+
l1BlockNumber: 5n,
|
|
1641
|
+
l1BlockHash: Buffer32.random()
|
|
1642
|
+
});
|
|
1643
|
+
await store.addL1ToL2Messages([
|
|
1644
|
+
makeInboxMessage(Buffer16.ZERO, {
|
|
1645
|
+
l1BlockNumber,
|
|
1646
|
+
l1BlockHash
|
|
1647
|
+
})
|
|
1648
|
+
]);
|
|
1649
|
+
await expect(store.getSynchPoint()).resolves.toEqual({
|
|
1650
|
+
blocksSynchedTo: undefined,
|
|
1651
|
+
messagesSynchedTo: {
|
|
1652
|
+
l1BlockHash,
|
|
1653
|
+
l1BlockNumber
|
|
1654
|
+
}
|
|
1655
|
+
});
|
|
1656
|
+
});
|
|
1657
|
+
it('returns the latest syncpoint if latest message is behind', async ()=>{
|
|
1658
|
+
const l1BlockHash = Buffer32.random();
|
|
1659
|
+
const l1BlockNumber = 10n;
|
|
1660
|
+
await store.setMessageSynchedL1Block({
|
|
1661
|
+
l1BlockNumber,
|
|
1662
|
+
l1BlockHash
|
|
1663
|
+
});
|
|
1664
|
+
const msg = makeInboxMessage(Buffer16.ZERO, {
|
|
1665
|
+
l1BlockNumber: 5n,
|
|
1666
|
+
l1BlockHash: Buffer32.random()
|
|
146
1667
|
});
|
|
1668
|
+
await store.addL1ToL2Messages([
|
|
1669
|
+
msg
|
|
1670
|
+
]);
|
|
147
1671
|
await expect(store.getSynchPoint()).resolves.toEqual({
|
|
148
1672
|
blocksSynchedTo: undefined,
|
|
149
|
-
messagesSynchedTo:
|
|
1673
|
+
messagesSynchedTo: {
|
|
1674
|
+
l1BlockHash,
|
|
1675
|
+
l1BlockNumber
|
|
1676
|
+
}
|
|
150
1677
|
});
|
|
151
1678
|
});
|
|
152
1679
|
});
|
|
153
1680
|
describe('addLogs', ()=>{
|
|
154
1681
|
it('adds private & public logs', async ()=>{
|
|
155
|
-
const
|
|
156
|
-
await
|
|
157
|
-
|
|
158
|
-
])).resolves.toEqual(true);
|
|
159
|
-
});
|
|
160
|
-
});
|
|
161
|
-
describe('deleteLogs', ()=>{
|
|
162
|
-
it('deletes private & public logs', async ()=>{
|
|
163
|
-
const block = blocks[0].data;
|
|
164
|
-
await store.addBlocks([
|
|
165
|
-
blocks[0]
|
|
1682
|
+
const checkpoint = publishedCheckpoints[0];
|
|
1683
|
+
await store.addCheckpoints([
|
|
1684
|
+
checkpoint
|
|
166
1685
|
]);
|
|
167
|
-
await expect(store.addLogs(
|
|
168
|
-
block
|
|
169
|
-
])).resolves.toEqual(true);
|
|
170
|
-
expect((await store.getPrivateLogs(1, 1)).length).toEqual(block.body.txEffects.map((txEffect)=>txEffect.privateLogs).flat().length);
|
|
171
|
-
expect((await store.getPublicLogs({
|
|
172
|
-
fromBlock: 1
|
|
173
|
-
})).logs.length).toEqual(block.body.txEffects.map((txEffect)=>txEffect.publicLogs).flat().length);
|
|
174
|
-
// This one is a pain for memory as we would never want to just delete memory in the middle.
|
|
175
|
-
await store.deleteLogs([
|
|
176
|
-
block
|
|
177
|
-
]);
|
|
178
|
-
expect((await store.getPrivateLogs(1, 1)).length).toEqual(0);
|
|
179
|
-
expect((await store.getPublicLogs({
|
|
180
|
-
fromBlock: 1
|
|
181
|
-
})).logs.length).toEqual(0);
|
|
1686
|
+
await expect(store.addLogs(checkpoint.checkpoint.blocks)).resolves.toEqual(true);
|
|
182
1687
|
});
|
|
183
1688
|
});
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
1689
|
+
it('deleteLogs', async ()=>{
|
|
1690
|
+
const block = publishedCheckpoints[0].checkpoint.blocks[0];
|
|
1691
|
+
await store.addBlocks([
|
|
1692
|
+
block
|
|
1693
|
+
]);
|
|
1694
|
+
await expect(store.addLogs([
|
|
1695
|
+
block
|
|
1696
|
+
])).resolves.toEqual(true);
|
|
1697
|
+
expect((await store.getPublicLogs({
|
|
1698
|
+
fromBlock: BlockNumber(1)
|
|
1699
|
+
})).logs.length).toEqual(block.body.txEffects.map((txEffect)=>txEffect.publicLogs).flat().length);
|
|
1700
|
+
// This one is a pain for memory as we would never want to just delete memory in the middle.
|
|
1701
|
+
await store.deleteLogs([
|
|
1702
|
+
block
|
|
1703
|
+
]);
|
|
1704
|
+
expect((await store.getPublicLogs({
|
|
1705
|
+
fromBlock: BlockNumber(1)
|
|
1706
|
+
})).logs.length).toEqual(0);
|
|
196
1707
|
});
|
|
197
1708
|
describe('getTxEffect', ()=>{
|
|
1709
|
+
const getBlock = (i)=>publishedCheckpoints[i].checkpoint.blocks[0];
|
|
198
1710
|
beforeEach(async ()=>{
|
|
199
|
-
await store.addLogs(
|
|
200
|
-
await store.
|
|
1711
|
+
await store.addLogs(publishedCheckpoints.flatMap((x)=>x.checkpoint.blocks));
|
|
1712
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
201
1713
|
});
|
|
202
1714
|
it.each([
|
|
203
|
-
()=>
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
1715
|
+
()=>({
|
|
1716
|
+
data: getBlock(0).body.txEffects[0],
|
|
1717
|
+
block: getBlock(0),
|
|
1718
|
+
txIndexInBlock: 0
|
|
1719
|
+
}),
|
|
1720
|
+
()=>({
|
|
1721
|
+
data: getBlock(9).body.txEffects[3],
|
|
1722
|
+
block: getBlock(9),
|
|
1723
|
+
txIndexInBlock: 3
|
|
1724
|
+
}),
|
|
1725
|
+
()=>({
|
|
1726
|
+
data: getBlock(3).body.txEffects[1],
|
|
1727
|
+
block: getBlock(3),
|
|
1728
|
+
txIndexInBlock: 1
|
|
1729
|
+
}),
|
|
1730
|
+
()=>({
|
|
1731
|
+
data: getBlock(5).body.txEffects[2],
|
|
1732
|
+
block: getBlock(5),
|
|
1733
|
+
txIndexInBlock: 2
|
|
1734
|
+
}),
|
|
1735
|
+
()=>({
|
|
1736
|
+
data: getBlock(1).body.txEffects[0],
|
|
1737
|
+
block: getBlock(1),
|
|
1738
|
+
txIndexInBlock: 0
|
|
1739
|
+
})
|
|
208
1740
|
])('retrieves a previously stored transaction', async (getExpectedTx)=>{
|
|
209
|
-
const
|
|
210
|
-
const
|
|
1741
|
+
const { data, block, txIndexInBlock } = getExpectedTx();
|
|
1742
|
+
const expectedTx = {
|
|
1743
|
+
data,
|
|
1744
|
+
l2BlockNumber: block.number,
|
|
1745
|
+
l2BlockHash: L2BlockHash.fromField(await block.header.hash()),
|
|
1746
|
+
txIndexInBlock
|
|
1747
|
+
};
|
|
1748
|
+
const actualTx = await store.getTxEffect(data.txHash);
|
|
211
1749
|
expect(actualTx).toEqual(expectedTx);
|
|
212
1750
|
});
|
|
213
1751
|
it('returns undefined if tx is not found', async ()=>{
|
|
214
1752
|
await expect(store.getTxEffect(TxHash.random())).resolves.toBeUndefined();
|
|
215
1753
|
});
|
|
216
1754
|
it.each([
|
|
217
|
-
()=>
|
|
218
|
-
()=>
|
|
219
|
-
()=>
|
|
220
|
-
()=>
|
|
221
|
-
()=>
|
|
222
|
-
])('tries to retrieves a previously stored transaction after deleted', async (
|
|
223
|
-
await store.
|
|
224
|
-
const
|
|
225
|
-
const actualTx = await store.getTxEffect(
|
|
1755
|
+
()=>getBlock(0).body.txEffects[0],
|
|
1756
|
+
()=>getBlock(9).body.txEffects[3],
|
|
1757
|
+
()=>getBlock(3).body.txEffects[1],
|
|
1758
|
+
()=>getBlock(5).body.txEffects[2],
|
|
1759
|
+
()=>getBlock(1).body.txEffects[0]
|
|
1760
|
+
])('tries to retrieves a previously stored transaction after deleted', async (getTxEffect)=>{
|
|
1761
|
+
await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
|
|
1762
|
+
const txEffect = getTxEffect();
|
|
1763
|
+
const actualTx = await store.getTxEffect(txEffect.txHash);
|
|
226
1764
|
expect(actualTx).toEqual(undefined);
|
|
227
1765
|
});
|
|
228
1766
|
it('returns undefined if tx is not found', async ()=>{
|
|
229
1767
|
await expect(store.getTxEffect(TxHash.random())).resolves.toBeUndefined();
|
|
230
1768
|
});
|
|
1769
|
+
it('does not fail if the block is unwound while requesting a tx', async ()=>{
|
|
1770
|
+
const txEffect = getBlock(1).body.txEffects[0];
|
|
1771
|
+
let done = false;
|
|
1772
|
+
void (async ()=>{
|
|
1773
|
+
while(!done){
|
|
1774
|
+
void store.getTxEffect(txEffect.txHash);
|
|
1775
|
+
await sleep(1);
|
|
1776
|
+
}
|
|
1777
|
+
})();
|
|
1778
|
+
await store.unwindCheckpoints(CheckpointNumber(publishedCheckpoints.length), publishedCheckpoints.length);
|
|
1779
|
+
done = true;
|
|
1780
|
+
expect(await store.getTxEffect(txEffect.txHash)).toEqual(undefined);
|
|
1781
|
+
});
|
|
231
1782
|
});
|
|
232
1783
|
describe('L1 to L2 Messages', ()=>{
|
|
233
|
-
const
|
|
234
|
-
const
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
await store.addL1ToL2Messages(
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
1784
|
+
const initialCheckpointNumber = CheckpointNumber(13);
|
|
1785
|
+
const checkMessages = async (msgs)=>{
|
|
1786
|
+
expect(await store.getLastL1ToL2Message()).toEqual(msgs.at(-1));
|
|
1787
|
+
expect(await toArray(store.iterateL1ToL2Messages())).toEqual(msgs);
|
|
1788
|
+
expect(await store.getTotalL1ToL2MessageCount()).toEqual(BigInt(msgs.length));
|
|
1789
|
+
};
|
|
1790
|
+
const makeInboxMessagesWithFullBlocks = (blockCount, opts = {})=>makeInboxMessages(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * blockCount, {
|
|
1791
|
+
overrideFn: (msg, i)=>{
|
|
1792
|
+
const checkpointNumber = CheckpointNumber((opts.initialCheckpointNumber ?? initialCheckpointNumber) + Math.floor(i / NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP));
|
|
1793
|
+
const index = InboxLeaf.smallestIndexForCheckpoint(checkpointNumber) + BigInt(i % NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1794
|
+
return {
|
|
1795
|
+
...msg,
|
|
1796
|
+
checkpointNumber,
|
|
1797
|
+
index
|
|
1798
|
+
};
|
|
1799
|
+
}
|
|
1800
|
+
});
|
|
1801
|
+
it('stores first message ever', async ()=>{
|
|
1802
|
+
const msg = makeInboxMessage(Buffer16.ZERO, {
|
|
1803
|
+
index: 0n,
|
|
1804
|
+
checkpointNumber: CheckpointNumber(1)
|
|
1805
|
+
});
|
|
1806
|
+
await store.addL1ToL2Messages([
|
|
1807
|
+
msg
|
|
1808
|
+
]);
|
|
1809
|
+
await checkMessages([
|
|
1810
|
+
msg
|
|
1811
|
+
]);
|
|
1812
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([
|
|
1813
|
+
msg.leaf
|
|
1814
|
+
]);
|
|
1815
|
+
});
|
|
1816
|
+
it('stores single message', async ()=>{
|
|
1817
|
+
const msg = makeInboxMessage(Buffer16.ZERO, {
|
|
1818
|
+
checkpointNumber: CheckpointNumber(2)
|
|
1819
|
+
});
|
|
1820
|
+
await store.addL1ToL2Messages([
|
|
1821
|
+
msg
|
|
1822
|
+
]);
|
|
1823
|
+
await checkMessages([
|
|
1824
|
+
msg
|
|
1825
|
+
]);
|
|
1826
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toEqual([
|
|
1827
|
+
msg.leaf
|
|
1828
|
+
]);
|
|
1829
|
+
});
|
|
1830
|
+
it('stores and returns messages across different blocks', async ()=>{
|
|
1831
|
+
const msgs = makeInboxMessages(5, {
|
|
1832
|
+
initialCheckpointNumber
|
|
1833
|
+
});
|
|
1834
|
+
await store.addL1ToL2Messages(msgs);
|
|
1835
|
+
await checkMessages(msgs);
|
|
1836
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual([
|
|
1837
|
+
msgs[2]
|
|
1838
|
+
].map((m)=>m.leaf));
|
|
1839
|
+
});
|
|
1840
|
+
it('stores the same messages again', async ()=>{
|
|
1841
|
+
const msgs = makeInboxMessages(5, {
|
|
1842
|
+
initialCheckpointNumber
|
|
1843
|
+
});
|
|
1844
|
+
await store.addL1ToL2Messages(msgs);
|
|
1845
|
+
await store.addL1ToL2Messages(msgs.slice(2));
|
|
1846
|
+
await checkMessages(msgs);
|
|
1847
|
+
});
|
|
1848
|
+
it('stores and returns messages across different blocks with gaps', async ()=>{
|
|
1849
|
+
const msgs1 = makeInboxMessages(3, {
|
|
1850
|
+
initialCheckpointNumber: CheckpointNumber(1)
|
|
1851
|
+
});
|
|
1852
|
+
const msgs2 = makeInboxMessages(3, {
|
|
1853
|
+
initialCheckpointNumber: CheckpointNumber(20),
|
|
1854
|
+
initialHash: msgs1.at(-1).rollingHash
|
|
1855
|
+
});
|
|
1856
|
+
await store.addL1ToL2Messages(msgs1);
|
|
1857
|
+
await store.addL1ToL2Messages(msgs2);
|
|
1858
|
+
await checkMessages([
|
|
1859
|
+
...msgs1,
|
|
1860
|
+
...msgs2
|
|
1861
|
+
]);
|
|
1862
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toEqual([
|
|
1863
|
+
msgs1[0].leaf
|
|
1864
|
+
]);
|
|
1865
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toEqual([]);
|
|
1866
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(20))).toEqual([
|
|
1867
|
+
msgs2[0].leaf
|
|
1868
|
+
]);
|
|
1869
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(24))).toEqual([]);
|
|
1870
|
+
});
|
|
1871
|
+
it('stores and returns messages with block numbers larger than a byte', async ()=>{
|
|
1872
|
+
const msgs = makeInboxMessages(5, {
|
|
1873
|
+
initialCheckpointNumber: CheckpointNumber(1000)
|
|
1874
|
+
});
|
|
1875
|
+
await store.addL1ToL2Messages(msgs);
|
|
1876
|
+
await checkMessages(msgs);
|
|
1877
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1002))).toEqual([
|
|
1878
|
+
msgs[2]
|
|
1879
|
+
].map((m)=>m.leaf));
|
|
1880
|
+
});
|
|
1881
|
+
it('stores and returns multiple messages per block', async ()=>{
|
|
1882
|
+
const msgs = makeInboxMessagesWithFullBlocks(4);
|
|
1883
|
+
await store.addL1ToL2Messages(msgs);
|
|
1884
|
+
await checkMessages(msgs);
|
|
1885
|
+
const blockMessages = await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 1));
|
|
1886
|
+
expect(blockMessages).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1887
|
+
expect(blockMessages).toEqual(msgs.slice(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2).map((m)=>m.leaf));
|
|
1888
|
+
});
|
|
1889
|
+
it('stores messages in multiple operations', async ()=>{
|
|
1890
|
+
const msgs = makeInboxMessages(20, {
|
|
1891
|
+
initialCheckpointNumber
|
|
1892
|
+
});
|
|
1893
|
+
await store.addL1ToL2Messages(msgs.slice(0, 10));
|
|
1894
|
+
await store.addL1ToL2Messages(msgs.slice(10, 20));
|
|
1895
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 2))).toEqual([
|
|
1896
|
+
msgs[2]
|
|
1897
|
+
].map((m)=>m.leaf));
|
|
1898
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(initialCheckpointNumber + 12))).toEqual([
|
|
1899
|
+
msgs[12]
|
|
1900
|
+
].map((m)=>m.leaf));
|
|
1901
|
+
await checkMessages(msgs);
|
|
1902
|
+
});
|
|
1903
|
+
it('iterates over messages from start index', async ()=>{
|
|
1904
|
+
const msgs = makeInboxMessages(10, {
|
|
1905
|
+
initialCheckpointNumber
|
|
1906
|
+
});
|
|
1907
|
+
await store.addL1ToL2Messages(msgs);
|
|
1908
|
+
const iterated = await toArray(store.iterateL1ToL2Messages({
|
|
1909
|
+
start: msgs[3].index
|
|
1910
|
+
}));
|
|
1911
|
+
expect(iterated).toEqual(msgs.slice(3));
|
|
1912
|
+
});
|
|
1913
|
+
it('iterates over messages in reverse', async ()=>{
|
|
1914
|
+
const msgs = makeInboxMessages(10, {
|
|
1915
|
+
initialCheckpointNumber
|
|
1916
|
+
});
|
|
1917
|
+
await store.addL1ToL2Messages(msgs);
|
|
1918
|
+
initialCheckpointNumber;
|
|
1919
|
+
const iterated = await toArray(store.iterateL1ToL2Messages({
|
|
1920
|
+
reverse: true,
|
|
1921
|
+
end: msgs[3].index
|
|
1922
|
+
}));
|
|
1923
|
+
expect(iterated).toEqual(msgs.slice(0, 4).reverse());
|
|
1924
|
+
});
|
|
1925
|
+
it('throws if messages are added out of order', async ()=>{
|
|
1926
|
+
const msgs = makeInboxMessages(5, {
|
|
1927
|
+
overrideFn: (msg, i)=>({
|
|
1928
|
+
...msg,
|
|
1929
|
+
index: BigInt(10 - i)
|
|
1930
|
+
})
|
|
1931
|
+
});
|
|
1932
|
+
await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
|
|
1933
|
+
});
|
|
1934
|
+
it('throws if block number for the first message is out of order', async ()=>{
|
|
1935
|
+
const msgs = makeInboxMessages(4, {
|
|
1936
|
+
initialCheckpointNumber
|
|
1937
|
+
});
|
|
1938
|
+
msgs[2].checkpointNumber = CheckpointNumber(initialCheckpointNumber - 1);
|
|
1939
|
+
await store.addL1ToL2Messages(msgs.slice(0, 2));
|
|
1940
|
+
await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
|
|
1941
|
+
});
|
|
1942
|
+
it('throws if rolling hash is not correct', async ()=>{
|
|
1943
|
+
const msgs = makeInboxMessages(5);
|
|
1944
|
+
msgs[1].rollingHash = Buffer16.random();
|
|
1945
|
+
await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
|
|
1946
|
+
});
|
|
1947
|
+
it('throws if rolling hash for first message is not correct', async ()=>{
|
|
1948
|
+
const msgs = makeInboxMessages(4);
|
|
1949
|
+
msgs[2].rollingHash = Buffer16.random();
|
|
1950
|
+
await store.addL1ToL2Messages(msgs.slice(0, CheckpointNumber(2)));
|
|
1951
|
+
await expect(store.addL1ToL2Messages(msgs.slice(2, 4))).rejects.toThrow(MessageStoreError);
|
|
1952
|
+
});
|
|
1953
|
+
it('throws if index is not in the correct range', async ()=>{
|
|
1954
|
+
const msgs = makeInboxMessages(5, {
|
|
1955
|
+
initialCheckpointNumber
|
|
1956
|
+
});
|
|
1957
|
+
msgs.at(-1).index += 100n;
|
|
1958
|
+
await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
|
|
1959
|
+
});
|
|
1960
|
+
it('throws if first index in block has gaps', async ()=>{
|
|
1961
|
+
const msgs = makeInboxMessages(4, {
|
|
1962
|
+
initialCheckpointNumber
|
|
1963
|
+
});
|
|
1964
|
+
msgs[2].index++;
|
|
1965
|
+
await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
|
|
1966
|
+
});
|
|
1967
|
+
it('throws if index does not follow previous one', async ()=>{
|
|
1968
|
+
const msgs = makeInboxMessages(2, {
|
|
1969
|
+
initialCheckpointNumber,
|
|
1970
|
+
overrideFn: (msg, i)=>({
|
|
1971
|
+
...msg,
|
|
1972
|
+
checkpointNumber: CheckpointNumber(2),
|
|
1973
|
+
index: BigInt(i + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2)
|
|
1974
|
+
})
|
|
1975
|
+
});
|
|
1976
|
+
msgs[1].index++;
|
|
1977
|
+
await expect(store.addL1ToL2Messages(msgs)).rejects.toThrow(MessageStoreError);
|
|
1978
|
+
});
|
|
1979
|
+
it('removes messages up to the given block number', async ()=>{
|
|
1980
|
+
const msgs = makeInboxMessagesWithFullBlocks(4, {
|
|
1981
|
+
initialCheckpointNumber: CheckpointNumber(1)
|
|
1982
|
+
});
|
|
1983
|
+
await store.addL1ToL2Messages(msgs);
|
|
1984
|
+
await checkMessages(msgs);
|
|
1985
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1986
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1987
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1988
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1989
|
+
await store.rollbackL1ToL2MessagesToCheckpoint(CheckpointNumber(2));
|
|
1990
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(1))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1991
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(2))).toHaveLength(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP);
|
|
1992
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(3))).toHaveLength(0);
|
|
1993
|
+
expect(await store.getL1ToL2Messages(CheckpointNumber(4))).toHaveLength(0);
|
|
1994
|
+
await checkMessages(msgs.slice(0, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP * 2));
|
|
1995
|
+
});
|
|
1996
|
+
it('removes messages starting with the given index', async ()=>{
|
|
1997
|
+
const msgs = makeInboxMessagesWithFullBlocks(4, {
|
|
1998
|
+
initialCheckpointNumber: CheckpointNumber(1)
|
|
1999
|
+
});
|
|
2000
|
+
await store.addL1ToL2Messages(msgs);
|
|
2001
|
+
await store.removeL1ToL2Messages(msgs[13].index);
|
|
2002
|
+
await checkMessages(msgs.slice(0, 13));
|
|
262
2003
|
});
|
|
263
2004
|
});
|
|
264
2005
|
describe('contractInstances', ()=>{
|
|
265
2006
|
let contractInstance;
|
|
266
2007
|
const blockNum = 10;
|
|
2008
|
+
const timestamp = 3600n;
|
|
267
2009
|
beforeEach(async ()=>{
|
|
268
2010
|
const classId = Fr.random();
|
|
269
2011
|
const randomInstance = await SerializableContractInstance.random({
|
|
@@ -276,19 +2018,106 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
276
2018
|
};
|
|
277
2019
|
await store.addContractInstances([
|
|
278
2020
|
contractInstance
|
|
279
|
-
], blockNum);
|
|
2021
|
+
], BlockNumber(blockNum));
|
|
280
2022
|
});
|
|
281
2023
|
it('returns previously stored contract instances', async ()=>{
|
|
282
|
-
await expect(store.getContractInstance(contractInstance.address)).resolves.toMatchObject(contractInstance);
|
|
2024
|
+
await expect(store.getContractInstance(contractInstance.address, timestamp)).resolves.toMatchObject(contractInstance);
|
|
283
2025
|
});
|
|
284
2026
|
it('returns undefined if contract instance is not found', async ()=>{
|
|
285
|
-
await expect(store.getContractInstance(await AztecAddress.random())).resolves.toBeUndefined();
|
|
2027
|
+
await expect(store.getContractInstance(await AztecAddress.random(), timestamp)).resolves.toBeUndefined();
|
|
286
2028
|
});
|
|
287
2029
|
it('returns undefined if previously stored contract instances was deleted', async ()=>{
|
|
288
2030
|
await store.deleteContractInstances([
|
|
289
2031
|
contractInstance
|
|
290
|
-
], blockNum);
|
|
291
|
-
await expect(store.getContractInstance(contractInstance.address)).resolves.toBeUndefined();
|
|
2032
|
+
], BlockNumber(blockNum));
|
|
2033
|
+
await expect(store.getContractInstance(contractInstance.address, timestamp)).resolves.toBeUndefined();
|
|
2034
|
+
});
|
|
2035
|
+
});
|
|
2036
|
+
describe('contractInstanceUpdates', ()=>{
|
|
2037
|
+
let contractInstance;
|
|
2038
|
+
let classId;
|
|
2039
|
+
let nextClassId;
|
|
2040
|
+
const timestampOfChange = 3600n;
|
|
2041
|
+
beforeEach(async ()=>{
|
|
2042
|
+
classId = Fr.random();
|
|
2043
|
+
nextClassId = Fr.random();
|
|
2044
|
+
const randomInstance = await SerializableContractInstance.random({
|
|
2045
|
+
currentContractClassId: classId,
|
|
2046
|
+
originalContractClassId: classId
|
|
2047
|
+
});
|
|
2048
|
+
contractInstance = {
|
|
2049
|
+
...randomInstance,
|
|
2050
|
+
address: await AztecAddress.random()
|
|
2051
|
+
};
|
|
2052
|
+
await store.addContractInstances([
|
|
2053
|
+
contractInstance
|
|
2054
|
+
], BlockNumber(1));
|
|
2055
|
+
await store.addContractInstanceUpdates([
|
|
2056
|
+
{
|
|
2057
|
+
prevContractClassId: classId,
|
|
2058
|
+
newContractClassId: nextClassId,
|
|
2059
|
+
timestampOfChange,
|
|
2060
|
+
address: contractInstance.address
|
|
2061
|
+
}
|
|
2062
|
+
], timestampOfChange - 1n);
|
|
2063
|
+
});
|
|
2064
|
+
it('gets the correct current class id for a contract not updated yet', async ()=>{
|
|
2065
|
+
const fetchedInstance = await store.getContractInstance(contractInstance.address, timestampOfChange - 1n);
|
|
2066
|
+
expect(fetchedInstance?.originalContractClassId).toEqual(classId);
|
|
2067
|
+
expect(fetchedInstance?.currentContractClassId).toEqual(classId);
|
|
2068
|
+
});
|
|
2069
|
+
it('gets the correct current class id for a contract that has just been updated', async ()=>{
|
|
2070
|
+
const fetchedInstance = await store.getContractInstance(contractInstance.address, timestampOfChange);
|
|
2071
|
+
expect(fetchedInstance?.originalContractClassId).toEqual(classId);
|
|
2072
|
+
expect(fetchedInstance?.currentContractClassId).toEqual(nextClassId);
|
|
2073
|
+
});
|
|
2074
|
+
it('gets the correct current class id for a contract that was updated in the past', async ()=>{
|
|
2075
|
+
const fetchedInstance = await store.getContractInstance(contractInstance.address, timestampOfChange + 1n);
|
|
2076
|
+
expect(fetchedInstance?.originalContractClassId).toEqual(classId);
|
|
2077
|
+
expect(fetchedInstance?.currentContractClassId).toEqual(nextClassId);
|
|
2078
|
+
});
|
|
2079
|
+
it('ignores updates for the wrong contract', async ()=>{
|
|
2080
|
+
const otherClassId = Fr.random();
|
|
2081
|
+
const randomInstance = await SerializableContractInstance.random({
|
|
2082
|
+
currentContractClassId: otherClassId,
|
|
2083
|
+
originalContractClassId: otherClassId
|
|
2084
|
+
});
|
|
2085
|
+
const otherContractInstance = {
|
|
2086
|
+
...randomInstance,
|
|
2087
|
+
address: await AztecAddress.random()
|
|
2088
|
+
};
|
|
2089
|
+
await store.addContractInstances([
|
|
2090
|
+
otherContractInstance
|
|
2091
|
+
], BlockNumber(1));
|
|
2092
|
+
const fetchedInstance = await store.getContractInstance(otherContractInstance.address, timestampOfChange + 1n);
|
|
2093
|
+
expect(fetchedInstance?.originalContractClassId).toEqual(otherClassId);
|
|
2094
|
+
expect(fetchedInstance?.currentContractClassId).toEqual(otherClassId);
|
|
2095
|
+
});
|
|
2096
|
+
it('bounds its search to the right contract if more than than one update exists', async ()=>{
|
|
2097
|
+
const otherClassId = Fr.random();
|
|
2098
|
+
const otherNextClassId = Fr.random();
|
|
2099
|
+
const randomInstance = await SerializableContractInstance.random({
|
|
2100
|
+
currentContractClassId: otherClassId,
|
|
2101
|
+
originalContractClassId: otherNextClassId
|
|
2102
|
+
});
|
|
2103
|
+
const otherContractInstance = {
|
|
2104
|
+
...randomInstance,
|
|
2105
|
+
address: await AztecAddress.random()
|
|
2106
|
+
};
|
|
2107
|
+
await store.addContractInstances([
|
|
2108
|
+
otherContractInstance
|
|
2109
|
+
], BlockNumber(1));
|
|
2110
|
+
await store.addContractInstanceUpdates([
|
|
2111
|
+
{
|
|
2112
|
+
prevContractClassId: otherClassId,
|
|
2113
|
+
newContractClassId: otherNextClassId,
|
|
2114
|
+
timestampOfChange,
|
|
2115
|
+
address: otherContractInstance.address
|
|
2116
|
+
}
|
|
2117
|
+
], timestampOfChange - 1n);
|
|
2118
|
+
const fetchedInstance = await store.getContractInstance(contractInstance.address, timestampOfChange + 1n);
|
|
2119
|
+
expect(fetchedInstance?.originalContractClassId).toEqual(classId);
|
|
2120
|
+
expect(fetchedInstance?.currentContractClassId).toEqual(nextClassId);
|
|
292
2121
|
});
|
|
293
2122
|
});
|
|
294
2123
|
describe('contractClasses', ()=>{
|
|
@@ -300,7 +2129,7 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
300
2129
|
contractClass
|
|
301
2130
|
], [
|
|
302
2131
|
await computePublicBytecodeCommitment(contractClass.packedBytecode)
|
|
303
|
-
], blockNum);
|
|
2132
|
+
], BlockNumber(blockNum));
|
|
304
2133
|
});
|
|
305
2134
|
it('returns previously stored contract class', async ()=>{
|
|
306
2135
|
await expect(store.getContractClass(contractClass.id)).resolves.toMatchObject(contractClass);
|
|
@@ -308,7 +2137,7 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
308
2137
|
it('returns undefined if the initial deployed contract class was deleted', async ()=>{
|
|
309
2138
|
await store.deleteContractClasses([
|
|
310
2139
|
contractClass
|
|
311
|
-
], blockNum);
|
|
2140
|
+
], BlockNumber(blockNum));
|
|
312
2141
|
await expect(store.getContractClass(contractClass.id)).resolves.toBeUndefined();
|
|
313
2142
|
});
|
|
314
2143
|
it('returns contract class if later "deployment" class was deleted', async ()=>{
|
|
@@ -316,10 +2145,10 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
316
2145
|
contractClass
|
|
317
2146
|
], [
|
|
318
2147
|
await computePublicBytecodeCommitment(contractClass.packedBytecode)
|
|
319
|
-
], blockNum + 1);
|
|
2148
|
+
], BlockNumber(blockNum + 1));
|
|
320
2149
|
await store.deleteContractClasses([
|
|
321
2150
|
contractClass
|
|
322
|
-
], blockNum + 1);
|
|
2151
|
+
], BlockNumber(blockNum + 1));
|
|
323
2152
|
await expect(store.getContractClass(contractClass.id)).resolves.toMatchObject(contractClass);
|
|
324
2153
|
});
|
|
325
2154
|
it('returns undefined if contract class is not found', async ()=>{
|
|
@@ -338,121 +2167,218 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
338
2167
|
const stored = await store.getContractClass(contractClass.id);
|
|
339
2168
|
expect(stored?.privateFunctions).toEqual(fns);
|
|
340
2169
|
});
|
|
341
|
-
it('adds new
|
|
342
|
-
const fns = times(3,
|
|
2170
|
+
it('adds new utility functions', async ()=>{
|
|
2171
|
+
const fns = times(3, makeUtilityFunctionWithMembershipProof);
|
|
343
2172
|
await store.addFunctions(contractClass.id, [], fns);
|
|
344
2173
|
const stored = await store.getContractClass(contractClass.id);
|
|
345
|
-
expect(stored?.
|
|
2174
|
+
expect(stored?.utilityFunctions).toEqual(fns);
|
|
346
2175
|
});
|
|
347
|
-
it('does not duplicate
|
|
348
|
-
const fns = times(3,
|
|
2176
|
+
it('does not duplicate utility functions', async ()=>{
|
|
2177
|
+
const fns = times(3, makeUtilityFunctionWithMembershipProof);
|
|
349
2178
|
await store.addFunctions(contractClass.id, [], fns.slice(0, 1));
|
|
350
2179
|
await store.addFunctions(contractClass.id, [], fns);
|
|
351
2180
|
const stored = await store.getContractClass(contractClass.id);
|
|
352
|
-
expect(stored?.
|
|
2181
|
+
expect(stored?.utilityFunctions).toEqual(fns);
|
|
353
2182
|
});
|
|
354
2183
|
});
|
|
355
|
-
describe('
|
|
356
|
-
const
|
|
2184
|
+
describe('getPrivateLogsByTags', ()=>{
|
|
2185
|
+
const numBlocksForLogs = 3;
|
|
357
2186
|
const numTxsPerBlock = 4;
|
|
358
2187
|
const numPrivateLogsPerTx = 3;
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
buf.writeUint16BE(publicValuesLen, 27);
|
|
367
|
-
buf.writeUint16BE(privateValuesLen, 30);
|
|
368
|
-
return Fr.fromBuffer(buf);
|
|
369
|
-
};
|
|
370
|
-
const makePrivateLog = (tag)=>PrivateLog.fromFields([
|
|
371
|
-
tag,
|
|
372
|
-
...times(PRIVATE_LOG_SIZE_IN_FIELDS - 1, (i)=>new Fr(tag.toNumber() + i))
|
|
373
|
-
]);
|
|
374
|
-
// The tag lives in field 1, not 0, of a public log
|
|
375
|
-
// See extractTaggedLogsFromPublic and noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr -> emit_log
|
|
376
|
-
const makePublicLog = (tag)=>PublicLog.fromFields([
|
|
377
|
-
AztecAddress.fromNumber(1).toField(),
|
|
378
|
-
makeLengthsField(2, PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 3),
|
|
379
|
-
tag,
|
|
380
|
-
...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, (i)=>new Fr(tag.toNumber() + i))
|
|
381
|
-
]);
|
|
2188
|
+
let logsCheckpoints;
|
|
2189
|
+
const makePrivateLogTag = (blockNumber, txIndex, logIndex)=>new SiloedTag(blockNumber === 1 && txIndex === 0 && logIndex === 0 ? Fr.ZERO // Shared tag
|
|
2190
|
+
: new Fr(blockNumber * 100 + txIndex * 10 + logIndex));
|
|
2191
|
+
const makePrivateLog = (tag)=>PrivateLog.from({
|
|
2192
|
+
fields: makeTuple(PRIVATE_LOG_SIZE_IN_FIELDS, (i)=>!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i))),
|
|
2193
|
+
emittedLength: PRIVATE_LOG_SIZE_IN_FIELDS
|
|
2194
|
+
});
|
|
382
2195
|
const mockPrivateLogs = (blockNumber, txIndex)=>{
|
|
383
2196
|
return times(numPrivateLogsPerTx, (logIndex)=>{
|
|
384
|
-
const tag =
|
|
2197
|
+
const tag = makePrivateLogTag(blockNumber, txIndex, logIndex);
|
|
385
2198
|
return makePrivateLog(tag);
|
|
386
2199
|
});
|
|
387
2200
|
};
|
|
388
|
-
const
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
2201
|
+
const mockCheckpointWithLogs = async (blockNumber, previousArchive)=>{
|
|
2202
|
+
const block = await L2BlockNew.random(BlockNumber(blockNumber), {
|
|
2203
|
+
checkpointNumber: CheckpointNumber(blockNumber),
|
|
2204
|
+
indexWithinCheckpoint: 0,
|
|
2205
|
+
state: makeStateForBlock(blockNumber, numTxsPerBlock),
|
|
2206
|
+
...previousArchive ? {
|
|
2207
|
+
lastArchive: previousArchive
|
|
2208
|
+
} : {}
|
|
392
2209
|
});
|
|
393
|
-
|
|
394
|
-
const mockBlockWithLogs = async (blockNumber)=>{
|
|
395
|
-
const block = await L2Block.random(blockNumber);
|
|
396
|
-
block.header.globalVariables.blockNumber = new Fr(blockNumber);
|
|
2210
|
+
block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
|
|
397
2211
|
block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex)=>{
|
|
398
2212
|
const txEffect = await TxEffect.random();
|
|
399
2213
|
txEffect.privateLogs = mockPrivateLogs(blockNumber, txIndex);
|
|
400
|
-
txEffect.publicLogs =
|
|
2214
|
+
txEffect.publicLogs = []; // No public logs needed for private log tests
|
|
401
2215
|
return txEffect;
|
|
402
2216
|
});
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
blockHash: `0x${blockNumber}`,
|
|
408
|
-
timestamp: BigInt(blockNumber)
|
|
409
|
-
}
|
|
410
|
-
};
|
|
2217
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
2218
|
+
block
|
|
2219
|
+
], CheckpointNumber(blockNumber));
|
|
2220
|
+
return makePublishedCheckpoint(checkpoint, blockNumber);
|
|
411
2221
|
};
|
|
412
2222
|
beforeEach(async ()=>{
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
2223
|
+
// Create checkpoints sequentially to chain archive roots
|
|
2224
|
+
logsCheckpoints = [];
|
|
2225
|
+
for(let i = 0; i < numBlocksForLogs; i++){
|
|
2226
|
+
const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
2227
|
+
logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
|
|
2228
|
+
}
|
|
2229
|
+
await store.addCheckpoints(logsCheckpoints);
|
|
2230
|
+
await store.addLogs(logsCheckpoints.flatMap((p)=>p.checkpoint.blocks));
|
|
416
2231
|
});
|
|
417
2232
|
it('is possible to batch request private logs via tags', async ()=>{
|
|
418
2233
|
const tags = [
|
|
419
|
-
|
|
420
|
-
|
|
2234
|
+
makePrivateLogTag(2, 1, 2),
|
|
2235
|
+
makePrivateLogTag(1, 2, 0)
|
|
421
2236
|
];
|
|
422
|
-
const logsByTags = await store.
|
|
2237
|
+
const logsByTags = await store.getPrivateLogsByTags(tags);
|
|
423
2238
|
expect(logsByTags).toEqual([
|
|
424
2239
|
[
|
|
425
2240
|
expect.objectContaining({
|
|
426
|
-
blockNumber:
|
|
427
|
-
|
|
2241
|
+
blockNumber: 2,
|
|
2242
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[2 - 1].checkpoint.blocks[0].header.hash()),
|
|
2243
|
+
log: makePrivateLog(tags[0]),
|
|
428
2244
|
isFromPublic: false
|
|
429
2245
|
})
|
|
430
2246
|
],
|
|
431
2247
|
[
|
|
432
2248
|
expect.objectContaining({
|
|
433
|
-
blockNumber:
|
|
434
|
-
|
|
2249
|
+
blockNumber: 1,
|
|
2250
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()),
|
|
2251
|
+
log: makePrivateLog(tags[1]),
|
|
435
2252
|
isFromPublic: false
|
|
436
2253
|
})
|
|
437
2254
|
]
|
|
438
2255
|
]);
|
|
439
2256
|
});
|
|
440
|
-
it('is possible to batch request
|
|
441
|
-
// Tag(0, 0, 0) is shared with the first private log and the first public log.
|
|
2257
|
+
it('is possible to batch request logs that have the same tag but different content', async ()=>{
|
|
442
2258
|
const tags = [
|
|
443
|
-
|
|
2259
|
+
makePrivateLogTag(1, 2, 1)
|
|
444
2260
|
];
|
|
445
|
-
|
|
2261
|
+
// Create a checkpoint containing logs that have the same tag as the checkpoints before.
|
|
2262
|
+
// Chain from the last checkpoint's archive
|
|
2263
|
+
const newBlockNumber = numBlocksForLogs + 1;
|
|
2264
|
+
const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
|
|
2265
|
+
const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
|
|
2266
|
+
const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1];
|
|
2267
|
+
newLog.fields[0] = tags[0].value;
|
|
2268
|
+
newCheckpoint.checkpoint.blocks[0].body.txEffects[1].privateLogs[1] = newLog;
|
|
2269
|
+
await store.addCheckpoints([
|
|
2270
|
+
newCheckpoint
|
|
2271
|
+
]);
|
|
2272
|
+
await store.addLogs([
|
|
2273
|
+
newCheckpoint.checkpoint.blocks[0]
|
|
2274
|
+
]);
|
|
2275
|
+
const logsByTags = await store.getPrivateLogsByTags(tags);
|
|
446
2276
|
expect(logsByTags).toEqual([
|
|
447
2277
|
[
|
|
448
2278
|
expect.objectContaining({
|
|
449
|
-
blockNumber:
|
|
450
|
-
|
|
2279
|
+
blockNumber: 1,
|
|
2280
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()),
|
|
2281
|
+
log: makePrivateLog(tags[0]),
|
|
451
2282
|
isFromPublic: false
|
|
452
2283
|
}),
|
|
453
2284
|
expect.objectContaining({
|
|
454
|
-
blockNumber:
|
|
455
|
-
|
|
2285
|
+
blockNumber: newBlockNumber,
|
|
2286
|
+
blockHash: L2BlockHash.fromField(await newCheckpoint.checkpoint.blocks[0].header.hash()),
|
|
2287
|
+
log: newLog,
|
|
2288
|
+
isFromPublic: false
|
|
2289
|
+
})
|
|
2290
|
+
]
|
|
2291
|
+
]);
|
|
2292
|
+
});
|
|
2293
|
+
it('is possible to request logs for non-existing tags and determine their position', async ()=>{
|
|
2294
|
+
const tags = [
|
|
2295
|
+
makePrivateLogTag(99, 88, 77),
|
|
2296
|
+
makePrivateLogTag(1, 1, 1)
|
|
2297
|
+
];
|
|
2298
|
+
const logsByTags = await store.getPrivateLogsByTags(tags);
|
|
2299
|
+
expect(logsByTags).toEqual([
|
|
2300
|
+
[],
|
|
2301
|
+
[
|
|
2302
|
+
expect.objectContaining({
|
|
2303
|
+
blockNumber: 1,
|
|
2304
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()),
|
|
2305
|
+
log: makePrivateLog(tags[1]),
|
|
2306
|
+
isFromPublic: false
|
|
2307
|
+
})
|
|
2308
|
+
]
|
|
2309
|
+
]);
|
|
2310
|
+
});
|
|
2311
|
+
});
|
|
2312
|
+
describe('getPublicLogsByTagsFromContract', ()=>{
|
|
2313
|
+
const numBlocksForLogs = 3;
|
|
2314
|
+
const numTxsPerBlock = 4;
|
|
2315
|
+
const numPublicLogsPerTx = 2;
|
|
2316
|
+
const contractAddress = AztecAddress.fromNumber(543254);
|
|
2317
|
+
let logsCheckpoints;
|
|
2318
|
+
const makePublicLogTag = (blockNumber, txIndex, logIndex)=>new Tag(blockNumber === 1 && txIndex === 0 && logIndex === 0 ? Fr.ZERO // Shared tag
|
|
2319
|
+
: new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * 123));
|
|
2320
|
+
const makePublicLog = (tag)=>PublicLog.from({
|
|
2321
|
+
contractAddress: contractAddress,
|
|
2322
|
+
// Arbitrary length
|
|
2323
|
+
fields: new Array(10).fill(null).map((_, i)=>!i ? tag.value : new Fr(tag.value.toBigInt() + BigInt(i)))
|
|
2324
|
+
});
|
|
2325
|
+
const mockPublicLogs = (blockNumber, txIndex)=>{
|
|
2326
|
+
return times(numPublicLogsPerTx, (logIndex)=>{
|
|
2327
|
+
const tag = makePublicLogTag(blockNumber, txIndex, logIndex);
|
|
2328
|
+
return makePublicLog(tag);
|
|
2329
|
+
});
|
|
2330
|
+
};
|
|
2331
|
+
const mockCheckpointWithLogs = async (blockNumber, previousArchive)=>{
|
|
2332
|
+
const block = await L2BlockNew.random(BlockNumber(blockNumber), {
|
|
2333
|
+
checkpointNumber: CheckpointNumber(blockNumber),
|
|
2334
|
+
indexWithinCheckpoint: 0,
|
|
2335
|
+
state: makeStateForBlock(blockNumber, numTxsPerBlock),
|
|
2336
|
+
...previousArchive ? {
|
|
2337
|
+
lastArchive: previousArchive
|
|
2338
|
+
} : {}
|
|
2339
|
+
});
|
|
2340
|
+
block.header.globalVariables.blockNumber = BlockNumber(blockNumber);
|
|
2341
|
+
block.body.txEffects = await timesParallel(numTxsPerBlock, async (txIndex)=>{
|
|
2342
|
+
const txEffect = await TxEffect.random();
|
|
2343
|
+
txEffect.privateLogs = []; // No private logs needed for public log tests
|
|
2344
|
+
txEffect.publicLogs = mockPublicLogs(blockNumber, txIndex);
|
|
2345
|
+
return txEffect;
|
|
2346
|
+
});
|
|
2347
|
+
const checkpoint = new Checkpoint(AppendOnlyTreeSnapshot.random(), CheckpointHeader.random(), [
|
|
2348
|
+
block
|
|
2349
|
+
], CheckpointNumber(blockNumber));
|
|
2350
|
+
return makePublishedCheckpoint(checkpoint, blockNumber);
|
|
2351
|
+
};
|
|
2352
|
+
beforeEach(async ()=>{
|
|
2353
|
+
// Create checkpoints sequentially to chain archive roots
|
|
2354
|
+
logsCheckpoints = [];
|
|
2355
|
+
for(let i = 0; i < numBlocksForLogs; i++){
|
|
2356
|
+
const previousArchive = i > 0 ? logsCheckpoints[i - 1].checkpoint.blocks[0].archive : undefined;
|
|
2357
|
+
logsCheckpoints.push(await mockCheckpointWithLogs(i + 1, previousArchive));
|
|
2358
|
+
}
|
|
2359
|
+
await store.addCheckpoints(logsCheckpoints);
|
|
2360
|
+
await store.addLogs(logsCheckpoints.flatMap((p)=>p.checkpoint.blocks));
|
|
2361
|
+
});
|
|
2362
|
+
it('is possible to batch request public logs via tags', async ()=>{
|
|
2363
|
+
const tags = [
|
|
2364
|
+
makePublicLogTag(2, 1, 1),
|
|
2365
|
+
makePublicLogTag(1, 2, 0)
|
|
2366
|
+
];
|
|
2367
|
+
const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
2368
|
+
expect(logsByTags).toEqual([
|
|
2369
|
+
[
|
|
2370
|
+
expect.objectContaining({
|
|
2371
|
+
blockNumber: 2,
|
|
2372
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[2 - 1].checkpoint.blocks[0].header.hash()),
|
|
2373
|
+
log: makePublicLog(tags[0]),
|
|
2374
|
+
isFromPublic: true
|
|
2375
|
+
})
|
|
2376
|
+
],
|
|
2377
|
+
[
|
|
2378
|
+
expect.objectContaining({
|
|
2379
|
+
blockNumber: 1,
|
|
2380
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()),
|
|
2381
|
+
log: makePublicLog(tags[1]),
|
|
456
2382
|
isFromPublic: true
|
|
457
2383
|
})
|
|
458
2384
|
]
|
|
@@ -460,114 +2386,83 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
460
2386
|
});
|
|
461
2387
|
it('is possible to batch request logs that have the same tag but different content', async ()=>{
|
|
462
2388
|
const tags = [
|
|
463
|
-
|
|
2389
|
+
makePublicLogTag(1, 2, 1)
|
|
464
2390
|
];
|
|
465
|
-
// Create a
|
|
466
|
-
|
|
467
|
-
const
|
|
468
|
-
const
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
2391
|
+
// Create a checkpoint containing logs that have the same tag as the checkpoints before.
|
|
2392
|
+
// Chain from the last checkpoint's archive
|
|
2393
|
+
const newBlockNumber = numBlocksForLogs + 1;
|
|
2394
|
+
const previousArchive = logsCheckpoints[logsCheckpoints.length - 1].checkpoint.blocks[0].archive;
|
|
2395
|
+
const newCheckpoint = await mockCheckpointWithLogs(newBlockNumber, previousArchive);
|
|
2396
|
+
const newLog = newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1];
|
|
2397
|
+
newLog.fields[0] = tags[0].value;
|
|
2398
|
+
newCheckpoint.checkpoint.blocks[0].body.txEffects[1].publicLogs[1] = newLog;
|
|
2399
|
+
await store.addCheckpoints([
|
|
2400
|
+
newCheckpoint
|
|
473
2401
|
]);
|
|
474
2402
|
await store.addLogs([
|
|
475
|
-
|
|
2403
|
+
newCheckpoint.checkpoint.blocks[0]
|
|
476
2404
|
]);
|
|
477
|
-
const logsByTags = await store.
|
|
2405
|
+
const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
478
2406
|
expect(logsByTags).toEqual([
|
|
479
2407
|
[
|
|
480
2408
|
expect.objectContaining({
|
|
481
2409
|
blockNumber: 1,
|
|
482
|
-
|
|
483
|
-
|
|
2410
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()),
|
|
2411
|
+
log: makePublicLog(tags[0]),
|
|
2412
|
+
isFromPublic: true
|
|
484
2413
|
}),
|
|
485
2414
|
expect.objectContaining({
|
|
486
2415
|
blockNumber: newBlockNumber,
|
|
487
|
-
|
|
488
|
-
|
|
2416
|
+
blockHash: L2BlockHash.fromField(await newCheckpoint.checkpoint.blocks[0].header.hash()),
|
|
2417
|
+
log: newLog,
|
|
2418
|
+
isFromPublic: true
|
|
489
2419
|
})
|
|
490
2420
|
]
|
|
491
2421
|
]);
|
|
492
2422
|
});
|
|
493
2423
|
it('is possible to request logs for non-existing tags and determine their position', async ()=>{
|
|
494
2424
|
const tags = [
|
|
495
|
-
|
|
496
|
-
|
|
2425
|
+
makePublicLogTag(99, 88, 77),
|
|
2426
|
+
makePublicLogTag(1, 1, 0)
|
|
497
2427
|
];
|
|
498
|
-
const logsByTags = await store.
|
|
2428
|
+
const logsByTags = await store.getPublicLogsByTagsFromContract(contractAddress, tags);
|
|
499
2429
|
expect(logsByTags).toEqual([
|
|
500
2430
|
[],
|
|
501
2431
|
[
|
|
502
2432
|
expect.objectContaining({
|
|
503
2433
|
blockNumber: 1,
|
|
504
|
-
|
|
505
|
-
|
|
2434
|
+
blockHash: L2BlockHash.fromField(await logsCheckpoints[1 - 1].checkpoint.blocks[0].header.hash()),
|
|
2435
|
+
log: makePublicLog(tags[1]),
|
|
2436
|
+
isFromPublic: true
|
|
506
2437
|
})
|
|
507
2438
|
]
|
|
508
2439
|
]);
|
|
509
2440
|
});
|
|
510
|
-
it('is not possible to add public logs by tag if they are invalid', async ()=>{
|
|
511
|
-
const tag = makeTag(99, 88, 77);
|
|
512
|
-
const invalidLogs = [
|
|
513
|
-
PublicLog.fromFields([
|
|
514
|
-
AztecAddress.fromNumber(1).toField(),
|
|
515
|
-
makeLengthsField(2, 3),
|
|
516
|
-
tag,
|
|
517
|
-
...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, (i)=>new Fr(tag.toNumber() + i))
|
|
518
|
-
]),
|
|
519
|
-
PublicLog.fromFields([
|
|
520
|
-
AztecAddress.fromNumber(1).toField(),
|
|
521
|
-
makeLengthsField(2, PUBLIC_LOG_DATA_SIZE_IN_FIELDS),
|
|
522
|
-
tag,
|
|
523
|
-
...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, (i)=>new Fr(tag.toNumber() + i))
|
|
524
|
-
])
|
|
525
|
-
];
|
|
526
|
-
// Create a block containing these invalid logs
|
|
527
|
-
const newBlockNumber = numBlocks;
|
|
528
|
-
const newBlock = await mockBlockWithLogs(newBlockNumber);
|
|
529
|
-
newBlock.data.body.txEffects[0].publicLogs = invalidLogs;
|
|
530
|
-
await store.addBlocks([
|
|
531
|
-
newBlock
|
|
532
|
-
]);
|
|
533
|
-
await store.addLogs([
|
|
534
|
-
newBlock.data
|
|
535
|
-
]);
|
|
536
|
-
const logsByTags = await store.getLogsByTags([
|
|
537
|
-
tag
|
|
538
|
-
]);
|
|
539
|
-
// Neither of the logs should have been added:
|
|
540
|
-
expect(logsByTags).toEqual([
|
|
541
|
-
[]
|
|
542
|
-
]);
|
|
543
|
-
});
|
|
544
2441
|
});
|
|
545
2442
|
describe('getPublicLogs', ()=>{
|
|
546
|
-
const
|
|
547
|
-
|
|
548
|
-
const
|
|
549
|
-
|
|
550
|
-
|
|
2443
|
+
const numBlocksForPublicLogs = 10;
|
|
2444
|
+
// Helper to get total public logs per tx from a block
|
|
2445
|
+
const getPublicLogsPerTx = (block, txIndex)=>block.body.txEffects[txIndex].publicLogs.length;
|
|
2446
|
+
// Helper to get number of txs in a block
|
|
2447
|
+
const getTxsPerBlock = (block)=>block.body.txEffects.length;
|
|
551
2448
|
beforeEach(async ()=>{
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
}));
|
|
560
|
-
await store.addBlocks(blocks);
|
|
561
|
-
await store.addLogs(blocks.map((b)=>b.data));
|
|
2449
|
+
// Use the outer publishedCheckpoints for log tests
|
|
2450
|
+
for(let i = 0; i < numBlocksForPublicLogs; i++){
|
|
2451
|
+
await store.addCheckpoints([
|
|
2452
|
+
publishedCheckpoints[i]
|
|
2453
|
+
]);
|
|
2454
|
+
await store.addLogs(publishedCheckpoints[i].checkpoint.blocks);
|
|
2455
|
+
}
|
|
562
2456
|
});
|
|
563
2457
|
it('no logs returned if deleted ("txHash" filter param is respected variant)', async ()=>{
|
|
564
2458
|
// get random tx
|
|
565
|
-
const targetBlockIndex = randomInt(
|
|
566
|
-
const
|
|
567
|
-
const
|
|
2459
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2460
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2461
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2462
|
+
const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
|
|
568
2463
|
await Promise.all([
|
|
569
|
-
store.
|
|
570
|
-
store.deleteLogs(
|
|
2464
|
+
store.unwindCheckpoints(CheckpointNumber(numBlocksForPublicLogs), numBlocksForPublicLogs),
|
|
2465
|
+
store.deleteLogs(publishedCheckpoints.slice(0, numBlocksForPublicLogs).flatMap((b)=>b.checkpoint.blocks))
|
|
571
2466
|
]);
|
|
572
2467
|
const response = await store.getPublicLogs({
|
|
573
2468
|
txHash: targetTxHash
|
|
@@ -578,15 +2473,16 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
578
2473
|
});
|
|
579
2474
|
it('"txHash" filter param is respected', async ()=>{
|
|
580
2475
|
// get random tx
|
|
581
|
-
const targetBlockIndex = randomInt(
|
|
582
|
-
const
|
|
583
|
-
const
|
|
2476
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2477
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2478
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2479
|
+
const targetTxHash = targetBlock.body.txEffects[targetTxIndex].txHash;
|
|
584
2480
|
const response = await store.getPublicLogs({
|
|
585
2481
|
txHash: targetTxHash
|
|
586
2482
|
});
|
|
587
2483
|
const logs = response.logs;
|
|
588
2484
|
expect(response.maxLogsHit).toBeFalsy();
|
|
589
|
-
const expectedNumLogs =
|
|
2485
|
+
const expectedNumLogs = getPublicLogsPerTx(targetBlock, targetTxIndex);
|
|
590
2486
|
expect(logs.length).toEqual(expectedNumLogs);
|
|
591
2487
|
const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM;
|
|
592
2488
|
for (const log of logs){
|
|
@@ -594,6 +2490,16 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
594
2490
|
expect(log.id.txIndex).toEqual(targetTxIndex);
|
|
595
2491
|
}
|
|
596
2492
|
});
|
|
2493
|
+
it('returns block hash on public log ids', async ()=>{
|
|
2494
|
+
const targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
|
|
2495
|
+
const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2496
|
+
const logs = (await store.getPublicLogs({
|
|
2497
|
+
fromBlock: targetBlock.number,
|
|
2498
|
+
toBlock: targetBlock.number + 1
|
|
2499
|
+
})).logs;
|
|
2500
|
+
expect(logs.length).toBeGreaterThan(0);
|
|
2501
|
+
expect(logs.every((log)=>log.id.blockHash.equals(expectedBlockHash))).toBe(true);
|
|
2502
|
+
});
|
|
597
2503
|
it('"fromBlock" and "toBlock" filter params are respected', async ()=>{
|
|
598
2504
|
// Set "fromBlock" and "toBlock"
|
|
599
2505
|
const fromBlock = 3;
|
|
@@ -604,7 +2510,12 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
604
2510
|
});
|
|
605
2511
|
const logs = response.logs;
|
|
606
2512
|
expect(response.maxLogsHit).toBeFalsy();
|
|
607
|
-
|
|
2513
|
+
// Compute expected logs from the blocks in range
|
|
2514
|
+
let expectedNumLogs = 0;
|
|
2515
|
+
for(let i = fromBlock - 1; i < toBlock - 1; i++){
|
|
2516
|
+
const block = publishedCheckpoints[i].checkpoint.blocks[0];
|
|
2517
|
+
expectedNumLogs += block.body.txEffects.reduce((sum, tx)=>sum + tx.publicLogs.length, 0);
|
|
2518
|
+
}
|
|
608
2519
|
expect(logs.length).toEqual(expectedNumLogs);
|
|
609
2520
|
for (const log of logs){
|
|
610
2521
|
const blockNumber = log.id.blockNumber;
|
|
@@ -614,10 +2525,11 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
614
2525
|
});
|
|
615
2526
|
it('"contractAddress" filter param is respected', async ()=>{
|
|
616
2527
|
// Get a random contract address from the logs
|
|
617
|
-
const targetBlockIndex = randomInt(
|
|
618
|
-
const
|
|
619
|
-
const
|
|
620
|
-
const
|
|
2528
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2529
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2530
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2531
|
+
const targetLogIndex = randomInt(getPublicLogsPerTx(targetBlock, targetTxIndex));
|
|
2532
|
+
const targetContractAddress = targetBlock.body.txEffects[targetTxIndex].publicLogs[targetLogIndex].contractAddress;
|
|
621
2533
|
const response = await store.getPublicLogs({
|
|
622
2534
|
contractAddress: targetContractAddress
|
|
623
2535
|
});
|
|
@@ -628,10 +2540,13 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
628
2540
|
});
|
|
629
2541
|
it('"afterLog" filter param is respected', async ()=>{
|
|
630
2542
|
// Get a random log as reference
|
|
631
|
-
const targetBlockIndex = randomInt(
|
|
632
|
-
const
|
|
633
|
-
const
|
|
634
|
-
const
|
|
2543
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2544
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2545
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2546
|
+
const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
|
|
2547
|
+
const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
|
|
2548
|
+
const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2549
|
+
const afterLog = new LogId(BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM), targetBlockHash, targetTxIndex, targetLogIndex);
|
|
635
2550
|
const response = await store.getPublicLogs({
|
|
636
2551
|
afterLog
|
|
637
2552
|
});
|
|
@@ -651,7 +2566,7 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
651
2566
|
it('"txHash" filter param is ignored when "afterLog" is set', async ()=>{
|
|
652
2567
|
// Get random txHash
|
|
653
2568
|
const txHash = TxHash.random();
|
|
654
|
-
const afterLog = new LogId(1, 0, 0);
|
|
2569
|
+
const afterLog = new LogId(BlockNumber(1), L2BlockHash.random(), 0, 0);
|
|
655
2570
|
const response = await store.getPublicLogs({
|
|
656
2571
|
txHash,
|
|
657
2572
|
afterLog
|
|
@@ -667,7 +2582,7 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
667
2582
|
// "fromBlock" gets correctly trimmed to range and "toBlock" is exclusive
|
|
668
2583
|
logs = (await store.getPublicLogs({
|
|
669
2584
|
fromBlock: -10,
|
|
670
|
-
toBlock: 5
|
|
2585
|
+
toBlock: BlockNumber(5)
|
|
671
2586
|
})).logs;
|
|
672
2587
|
let blockNumbers = new Set(logs.map((log)=>log.id.blockNumber));
|
|
673
2588
|
expect(blockNumbers).toEqual(new Set([
|
|
@@ -678,13 +2593,13 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
678
2593
|
]));
|
|
679
2594
|
// "toBlock" should be exclusive
|
|
680
2595
|
logs = (await store.getPublicLogs({
|
|
681
|
-
fromBlock: 1,
|
|
682
|
-
toBlock: 1
|
|
2596
|
+
fromBlock: BlockNumber(1),
|
|
2597
|
+
toBlock: BlockNumber(1)
|
|
683
2598
|
})).logs;
|
|
684
2599
|
expect(logs.length).toBe(0);
|
|
685
2600
|
logs = (await store.getPublicLogs({
|
|
686
|
-
fromBlock: 10,
|
|
687
|
-
toBlock: 5
|
|
2601
|
+
fromBlock: BlockNumber(10),
|
|
2602
|
+
toBlock: BlockNumber(5)
|
|
688
2603
|
})).logs;
|
|
689
2604
|
expect(logs.length).toBe(0);
|
|
690
2605
|
// both "fromBlock" and "toBlock" get correctly capped to range and logs from all blocks are returned
|
|
@@ -693,35 +2608,38 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
693
2608
|
toBlock: +100
|
|
694
2609
|
})).logs;
|
|
695
2610
|
blockNumbers = new Set(logs.map((log)=>log.id.blockNumber));
|
|
696
|
-
expect(blockNumbers.size).toBe(
|
|
2611
|
+
expect(blockNumbers.size).toBe(numBlocksForPublicLogs);
|
|
697
2612
|
// intersecting with "afterLog" works
|
|
698
2613
|
logs = (await store.getPublicLogs({
|
|
699
|
-
fromBlock: 2,
|
|
700
|
-
toBlock: 5,
|
|
701
|
-
afterLog: new LogId(4, 0, 0)
|
|
2614
|
+
fromBlock: BlockNumber(2),
|
|
2615
|
+
toBlock: BlockNumber(5),
|
|
2616
|
+
afterLog: new LogId(BlockNumber(4), L2BlockHash.random(), 0, 0)
|
|
702
2617
|
})).logs;
|
|
703
2618
|
blockNumbers = new Set(logs.map((log)=>log.id.blockNumber));
|
|
704
2619
|
expect(blockNumbers).toEqual(new Set([
|
|
705
2620
|
4
|
|
706
2621
|
]));
|
|
707
2622
|
logs = (await store.getPublicLogs({
|
|
708
|
-
toBlock: 5,
|
|
709
|
-
afterLog: new LogId(5, 1, 0)
|
|
2623
|
+
toBlock: BlockNumber(5),
|
|
2624
|
+
afterLog: new LogId(BlockNumber(5), L2BlockHash.random(), 1, 0)
|
|
710
2625
|
})).logs;
|
|
711
2626
|
expect(logs.length).toBe(0);
|
|
712
2627
|
logs = (await store.getPublicLogs({
|
|
713
|
-
fromBlock: 2,
|
|
714
|
-
toBlock: 5,
|
|
715
|
-
afterLog: new LogId(100, 0, 0)
|
|
2628
|
+
fromBlock: BlockNumber(2),
|
|
2629
|
+
toBlock: BlockNumber(5),
|
|
2630
|
+
afterLog: new LogId(BlockNumber(100), L2BlockHash.random(), 0, 0)
|
|
716
2631
|
})).logs;
|
|
717
2632
|
expect(logs.length).toBe(0);
|
|
718
2633
|
});
|
|
719
2634
|
it('"txIndex" and "logIndex" are respected when "afterLog.blockNumber" is equal to "fromBlock"', async ()=>{
|
|
720
2635
|
// Get a random log as reference
|
|
721
|
-
const targetBlockIndex = randomInt(
|
|
722
|
-
const
|
|
723
|
-
const
|
|
724
|
-
const
|
|
2636
|
+
const targetBlockIndex = randomInt(numBlocksForPublicLogs);
|
|
2637
|
+
const targetBlock = publishedCheckpoints[targetBlockIndex].checkpoint.blocks[0];
|
|
2638
|
+
const targetTxIndex = randomInt(getTxsPerBlock(targetBlock));
|
|
2639
|
+
const numLogsInTx = targetBlock.body.txEffects[targetTxIndex].publicLogs.length;
|
|
2640
|
+
const targetLogIndex = numLogsInTx > 0 ? randomInt(numLogsInTx) : 0;
|
|
2641
|
+
const targetBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2642
|
+
const afterLog = new LogId(BlockNumber(targetBlockIndex + INITIAL_L2_BLOCK_NUM), targetBlockHash, targetTxIndex, targetLogIndex);
|
|
725
2643
|
const response = await store.getPublicLogs({
|
|
726
2644
|
afterLog,
|
|
727
2645
|
fromBlock: afterLog.blockNumber
|
|
@@ -740,54 +2658,132 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx';
|
|
|
740
2658
|
}
|
|
741
2659
|
});
|
|
742
2660
|
});
|
|
743
|
-
describe('
|
|
744
|
-
let
|
|
745
|
-
|
|
746
|
-
const nullifiersPerBlock = new Map();
|
|
2661
|
+
describe('getContractClassLogs', ()=>{
|
|
2662
|
+
let targetBlock;
|
|
2663
|
+
let expectedContractClassLog;
|
|
747
2664
|
beforeEach(async ()=>{
|
|
748
|
-
|
|
749
|
-
blocks
|
|
750
|
-
|
|
2665
|
+
await store.addCheckpoints(publishedCheckpoints);
|
|
2666
|
+
targetBlock = publishedCheckpoints[0].checkpoint.blocks[0];
|
|
2667
|
+
expectedContractClassLog = await ContractClassLog.random();
|
|
2668
|
+
targetBlock.body.txEffects.forEach((txEffect, index)=>{
|
|
2669
|
+
txEffect.contractClassLogs = index === 0 ? [
|
|
2670
|
+
expectedContractClassLog
|
|
2671
|
+
] : [];
|
|
751
2672
|
});
|
|
2673
|
+
await store.addLogs([
|
|
2674
|
+
targetBlock
|
|
2675
|
+
]);
|
|
752
2676
|
});
|
|
753
|
-
it('returns
|
|
754
|
-
await store.
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
...nullifiersPerBlock.get(5),
|
|
758
|
-
Fr.random()
|
|
759
|
-
];
|
|
760
|
-
const blockScopedNullifiers = await store.findNullifiersIndexesWithBlock(10, nullifiersToRetrieve);
|
|
761
|
-
expect(blockScopedNullifiers).toHaveLength(nullifiersToRetrieve.length);
|
|
762
|
-
const [undefinedNullifier] = blockScopedNullifiers.slice(-1);
|
|
763
|
-
const realNullifiers = blockScopedNullifiers.slice(0, -1);
|
|
764
|
-
realNullifiers.forEach((blockScopedNullifier, index)=>{
|
|
765
|
-
expect(blockScopedNullifier).not.toBeUndefined();
|
|
766
|
-
const { data, l2BlockNumber } = blockScopedNullifier;
|
|
767
|
-
expect(data).toEqual(expect.any(BigInt));
|
|
768
|
-
expect(l2BlockNumber).toEqual(index < MAX_NULLIFIERS_PER_TX ? 1 : 6);
|
|
769
|
-
});
|
|
770
|
-
expect(undefinedNullifier).toBeUndefined();
|
|
771
|
-
});
|
|
772
|
-
it('returns wrapped nullifiers filtering by blockNumber', async ()=>{
|
|
773
|
-
await store.addNullifiers(blocks);
|
|
774
|
-
const nullifiersToRetrieve = [
|
|
775
|
-
...nullifiersPerBlock.get(0),
|
|
776
|
-
...nullifiersPerBlock.get(5)
|
|
777
|
-
];
|
|
778
|
-
const blockScopedNullifiers = await store.findNullifiersIndexesWithBlock(5, nullifiersToRetrieve);
|
|
779
|
-
expect(blockScopedNullifiers).toHaveLength(nullifiersToRetrieve.length);
|
|
780
|
-
const undefinedNullifiers = blockScopedNullifiers.slice(-MAX_NULLIFIERS_PER_TX);
|
|
781
|
-
const realNullifiers = blockScopedNullifiers.slice(0, -MAX_NULLIFIERS_PER_TX);
|
|
782
|
-
realNullifiers.forEach((blockScopedNullifier)=>{
|
|
783
|
-
expect(blockScopedNullifier).not.toBeUndefined();
|
|
784
|
-
const { data, l2BlockNumber } = blockScopedNullifier;
|
|
785
|
-
expect(data).toEqual(expect.any(BigInt));
|
|
786
|
-
expect(l2BlockNumber).toEqual(1);
|
|
787
|
-
});
|
|
788
|
-
undefinedNullifiers.forEach((undefinedNullifier)=>{
|
|
789
|
-
expect(undefinedNullifier).toBeUndefined();
|
|
2677
|
+
it('returns block hash on contract class log ids', async ()=>{
|
|
2678
|
+
const result = await store.getContractClassLogs({
|
|
2679
|
+
fromBlock: targetBlock.number,
|
|
2680
|
+
toBlock: targetBlock.number + 1
|
|
790
2681
|
});
|
|
2682
|
+
expect(result.maxLogsHit).toBeFalsy();
|
|
2683
|
+
expect(result.logs).toHaveLength(1);
|
|
2684
|
+
const [{ id, log }] = result.logs;
|
|
2685
|
+
const expectedBlockHash = L2BlockHash.fromField(await targetBlock.header.hash());
|
|
2686
|
+
expect(id.blockHash.equals(expectedBlockHash)).toBe(true);
|
|
2687
|
+
expect(id.blockNumber).toEqual(targetBlock.number);
|
|
2688
|
+
expect(log).toEqual(expectedContractClassLog);
|
|
2689
|
+
});
|
|
2690
|
+
});
|
|
2691
|
+
describe('pendingChainValidationStatus', ()=>{
|
|
2692
|
+
it('should return undefined when no status is set', async ()=>{
|
|
2693
|
+
const status = await store.getPendingChainValidationStatus();
|
|
2694
|
+
expect(status).toBeUndefined();
|
|
2695
|
+
});
|
|
2696
|
+
it('should store and retrieve a valid validation status', async ()=>{
|
|
2697
|
+
const validStatus = {
|
|
2698
|
+
valid: true
|
|
2699
|
+
};
|
|
2700
|
+
await store.setPendingChainValidationStatus(validStatus);
|
|
2701
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
2702
|
+
expect(retrievedStatus).toEqual(validStatus);
|
|
2703
|
+
});
|
|
2704
|
+
it('should store and retrieve an invalid validation status with insufficient attestations', async ()=>{
|
|
2705
|
+
const invalidStatus = {
|
|
2706
|
+
valid: false,
|
|
2707
|
+
block: randomBlockInfo(1),
|
|
2708
|
+
committee: [
|
|
2709
|
+
EthAddress.random(),
|
|
2710
|
+
EthAddress.random()
|
|
2711
|
+
],
|
|
2712
|
+
epoch: EpochNumber(123),
|
|
2713
|
+
seed: 456n,
|
|
2714
|
+
attestors: [
|
|
2715
|
+
EthAddress.random()
|
|
2716
|
+
],
|
|
2717
|
+
attestations: [
|
|
2718
|
+
CommitteeAttestation.random()
|
|
2719
|
+
],
|
|
2720
|
+
reason: 'insufficient-attestations'
|
|
2721
|
+
};
|
|
2722
|
+
await store.setPendingChainValidationStatus(invalidStatus);
|
|
2723
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
2724
|
+
expect(retrievedStatus).toEqual(invalidStatus);
|
|
2725
|
+
});
|
|
2726
|
+
it('should store and retrieve an invalid validation status with invalid attestation', async ()=>{
|
|
2727
|
+
const invalidStatus = {
|
|
2728
|
+
valid: false,
|
|
2729
|
+
block: randomBlockInfo(2),
|
|
2730
|
+
committee: [
|
|
2731
|
+
EthAddress.random()
|
|
2732
|
+
],
|
|
2733
|
+
attestors: [
|
|
2734
|
+
EthAddress.random()
|
|
2735
|
+
],
|
|
2736
|
+
epoch: EpochNumber(789),
|
|
2737
|
+
seed: 101n,
|
|
2738
|
+
attestations: [
|
|
2739
|
+
CommitteeAttestation.random()
|
|
2740
|
+
],
|
|
2741
|
+
reason: 'invalid-attestation',
|
|
2742
|
+
invalidIndex: 5
|
|
2743
|
+
};
|
|
2744
|
+
await store.setPendingChainValidationStatus(invalidStatus);
|
|
2745
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
2746
|
+
expect(retrievedStatus).toEqual(invalidStatus);
|
|
2747
|
+
});
|
|
2748
|
+
it('should overwrite existing status when setting a new one', async ()=>{
|
|
2749
|
+
const firstStatus = {
|
|
2750
|
+
valid: true
|
|
2751
|
+
};
|
|
2752
|
+
const secondStatus = {
|
|
2753
|
+
valid: false,
|
|
2754
|
+
block: randomBlockInfo(3),
|
|
2755
|
+
committee: [
|
|
2756
|
+
EthAddress.random()
|
|
2757
|
+
],
|
|
2758
|
+
epoch: EpochNumber(999),
|
|
2759
|
+
seed: 888n,
|
|
2760
|
+
attestors: [
|
|
2761
|
+
EthAddress.random()
|
|
2762
|
+
],
|
|
2763
|
+
attestations: [
|
|
2764
|
+
CommitteeAttestation.random()
|
|
2765
|
+
],
|
|
2766
|
+
reason: 'insufficient-attestations'
|
|
2767
|
+
};
|
|
2768
|
+
await store.setPendingChainValidationStatus(firstStatus);
|
|
2769
|
+
await store.setPendingChainValidationStatus(secondStatus);
|
|
2770
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
2771
|
+
expect(retrievedStatus).toEqual(secondStatus);
|
|
2772
|
+
});
|
|
2773
|
+
it('should handle empty committee and attestations arrays', async ()=>{
|
|
2774
|
+
const statusWithEmptyArrays = {
|
|
2775
|
+
valid: false,
|
|
2776
|
+
block: randomBlockInfo(4),
|
|
2777
|
+
committee: [],
|
|
2778
|
+
epoch: EpochNumber(0),
|
|
2779
|
+
seed: 0n,
|
|
2780
|
+
attestors: [],
|
|
2781
|
+
attestations: [],
|
|
2782
|
+
reason: 'insufficient-attestations'
|
|
2783
|
+
};
|
|
2784
|
+
await store.setPendingChainValidationStatus(statusWithEmptyArrays);
|
|
2785
|
+
const retrievedStatus = await store.getPendingChainValidationStatus();
|
|
2786
|
+
expect(retrievedStatus).toEqual(statusWithEmptyArrays);
|
|
791
2787
|
});
|
|
792
2788
|
});
|
|
793
2789
|
});
|